Merge branch 'master' into img_tool
diff --git a/.asf.yaml b/.asf.yaml
new file mode 100644
index 0000000..1e0b37f
--- /dev/null
+++ b/.asf.yaml
@@ -0,0 +1,28 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+github:
+  description: a distributed deep learning platform
+  labels:
+    - deep-learning
+  features:
+    # Enable wiki for documentation
+    wiki: true
+    # Enable issues on github
+    issues: true
+    # Enable settings on github
+    settings: true
diff --git a/python/rafiki/__init__.py b/.clang-format
similarity index 92%
copy from python/rafiki/__init__.py
copy to .clang-format
index 3aa745b..a7b90d1 100644
--- a/python/rafiki/__init__.py
+++ b/.clang-format
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +15,8 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+---
+Language:        Cpp
+BasedOnStyle:  Google
+...
+
diff --git a/.codecov.yml b/.codecov.yml
new file mode 100644
index 0000000..497c927
--- /dev/null
+++ b/.codecov.yml
@@ -0,0 +1,19 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ignore:
+  - proto/*
diff --git a/.github/workflows/conda.yaml b/.github/workflows/conda.yaml
new file mode 100644
index 0000000..882a424
--- /dev/null
+++ b/.github/workflows/conda.yaml
@@ -0,0 +1,71 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a basic workflow to help you get started with Actions
+
+name: conda
+
+# Controls when the action will run. Triggers the workflow on push or pull request
+# events but only for the master branch
+on:
+  push:
+  pull_request:
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+  build-pytest-package-ubuntu:
+    runs-on: ubuntu-latest
+
+    steps:
+      - uses: actions/checkout@v1
+      - name: install-conda-build
+        run: conda install conda-build anaconda-client
+      - name: conda-config
+        run: conda config --add channels conda-forge && conda config --add channels nusdbsystem && conda config --set anaconda_upload no
+      - name: build-pytest
+        run:  conda build tool/conda/singa --python 3.6
+        env:
+          TEST_COMMAND: pytest --cov=$PREFIX/lib/python3.7/site-packages/singa --cov-report=xml && codecov --flags singa-python
+      - name: upload-package
+        env: 
+          ANACONDA_UPLOAD_TOKEN: ${{ secrets.ANACONDA_UPLOAD_TOKEN }}
+        if: ${{ env.ANACONDA_UPLOAD_TOKEN }}
+        run: /usr/share/miniconda/bin/anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main /usr/share/miniconda/conda-bld/linux-64/singa-*.tar.bz2 --force
+        # 
+
+
+  build-pytest-package-macos:
+    runs-on: macos-latest
+
+    steps:
+      - uses: actions/checkout@v1
+      - name: set permission
+        run: sudo chmod -R 777 /usr/local/miniconda 
+        # && xcrun --show-sdk-path
+      - name: install-conda-build
+        run: conda install conda-build anaconda-client
+      - name: conda-config
+        run: conda config --add channels conda-forge && conda config --add channels nusdbsystem && conda config --set anaconda_upload no
+      - name: build-pytest
+        run:  conda build tool/conda/singa --python 3.6
+        env:
+          TEST_COMMAND: pytest --cov=$PREFIX/lib/python3.6/site-packages/singa --cov-report=xml && codecov --flags singa-python
+      - name: upload-package
+        env: 
+          ANACONDA_UPLOAD_TOKEN: ${{ secrets.ANACONDA_UPLOAD_TOKEN }}
+        if: ${{ env.ANACONDA_UPLOAD_TOKEN }}
+        run: /usr/local/miniconda/bin/anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main /usr/local/miniconda/conda-bld/osx-64/singa-*.tar.bz2 --force
\ No newline at end of file
diff --git a/.github/workflows/macOS.yaml b/.github/workflows/macOS.yaml
new file mode 100644
index 0000000..d38ce32
--- /dev/null
+++ b/.github/workflows/macOS.yaml
@@ -0,0 +1,56 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+name: Native-MacOS
+
+on:
+  push:
+  pull_request:
+
+jobs:
+  build-cpptest-cpu:
+    runs-on: macos-latest
+
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-python@v2
+        with:
+          python-version: "3.7"
+      - name: install-build-dependencies
+        run: |
+         brew install protobuf swig opencv glog lmdb numpy
+         pip3 install numpy && wget https://github.com/oneapi-src/oneDNN/releases/download/v1.2/dnnl_mac_1.2.0_cpu_tbb.tgz -P /tmp
+         tar zxf /tmp/dnnl_mac_1.2.0_cpu_tbb.tgz -C /tmp
+      - name: configure
+        run: mkdir build && cd build && cmake -DUSE_PYTHON3=YES -DENABLE_TEST=YES -DUSE_DNNL=YES ..
+        env:
+          CMAKE_INCLUDE_PATH: /usr/local/opt/openblas/include:$CMAKE_INCLUDE_PATH
+          CMAKE_LIBRARY_PATH: /usr/local/opt/openblas/lib:$CMAKE_LIBRARY_PATH
+          DNNL_ROOT: /tmp/dnnl_mac_1.2.0_cpu_tbb/
+      - name: build
+        run: cd build && make
+        env:
+          CXXFLAGS: -I  /Users/runner/hostedtoolcache/Python/3.7.8/x64/lib/python3.7/site-packages/numpy/core/include $CXXFLAGS
+          LD_LIBRARY_PATH: /usr/local/opt/openblas/lib:/tmp/dnnl_mac_1.2.0_cpu_tbb/lib:$LD_LIBRARY_PATH
+      - name: C++ test
+        run: |
+         brew install tbb
+         install_name_tool -change libdnnl.1.dylib /tmp/dnnl_mac_1.2.0_cpu_tbb/lib/libdnnl.1.dylib /Users/runner/work/singa/singa/build/lib/libsinga.dylib
+         install_name_tool -change libdnnl.1.dylib /tmp/dnnl_mac_1.2.0_cpu_tbb/lib/libdnnl.1.dylib build/bin/test_singa
+         build/bin/test_singa
+        env:
+          LD_LIBRARY_PATH: /usr/local/opt/openblas/lib:/tmp/dnnl_mac_1.2.0_cpu_tbb/lib:$LD_LIBRARY_PATH
diff --git a/.github/workflows/rat.yaml b/.github/workflows/rat.yaml
new file mode 100644
index 0000000..b7c588d
--- /dev/null
+++ b/.github/workflows/rat.yaml
@@ -0,0 +1,44 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a basic workflow to help you get started with Actions
+
+name: License-Check
+
+# Controls when the action will run. Triggers the workflow on push or pull request 
+# events but only for the master branch
+on:
+  push:
+    branches: [ master ]
+  pull_request:
+    branches: [ master ]
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+  # This workflow contains a single job called "CheckLicence"
+  CheckLicence:
+    # The type of runner that the job will run on
+    runs-on: ubuntu-latest
+
+    # Steps represent a sequence of tasks that will be executed as part of the job
+    steps:
+    # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+    - uses: actions/checkout@v2
+
+    # Runs a single command using the runners shell
+    - name: Licence check with Apache RAT
+      run: bash -ex tool/rat.sh
diff --git a/.github/workflows/ubuntu.yaml b/.github/workflows/ubuntu.yaml
new file mode 100644
index 0000000..b67fcda
--- /dev/null
+++ b/.github/workflows/ubuntu.yaml
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a basic workflow to help you get started with Actions
+
+name: Native-Ubuntu
+
+# Controls when the action will run. Triggers the workflow on push or pull request
+# events but only for the master branch
+on:
+  push:
+  pull_request:
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+  # build-ubuntu-cpp:
+  #  runs-on: ubuntu-latest
+
+  #  steps:
+  #    - uses: actions/checkout@v1
+  #    - name: install-build-dependencies
+  #      run: sudo apt-get install -y libgoogle-glog-dev libprotobuf-dev protobuf-compiler libncurses-dev libopenblas-dev gfortran libblas-dev liblapack-dev libatlas-base-dev swig libcurl3-dev cmake dh-autoreconf  
+  #    - name: configure
+  #      run: mkdir build && cd build && cmake -DUSE_PYTHON=NO -DENABLE_TEST=YES ..
+  #    - name: build
+  #      run: cd build && make
+  #    - name: C++ test
+  #      run: build/bin/test_singa
+ 
+  build-cpptest-on-cpu:
+    runs-on: ubuntu-latest
+
+    steps:
+      - uses: actions/checkout@v1
+      - name: get-oneDNN
+        run: wget https://github.com/oneapi-src/oneDNN/releases/download/v1.1/dnnl_lnx_1.1.0_cpu_gomp.tgz -P /tmp/ && tar zxf /tmp/dnnl_lnx_1.1.0_cpu_gomp.tgz -C /tmp
+      - name: install-build-dependencies
+        run: sudo apt-get install -y libgoogle-glog-dev libprotobuf-dev protobuf-compiler libncurses-dev libopenblas-dev gfortran libblas-dev liblapack-dev libatlas-base-dev swig dh-autoreconf lcov
+      - name: configure
+        run: mkdir build && cd build && cmake -DUSE_PYTHON=NO -DENABLE_TEST=YES -DCODE_COVERAGE=YES -DUSE_DNNL=YES ..
+        env:
+         DNNL_ROOT: /tmp/dnnl_lnx_1.1.0_cpu_gomp/
+      - name: build
+        run: cd build && make
+      - name: C++ test
+        run: build/bin/test_singa
+      - name: Upload coverage to Codecov
+        uses: codecov/codecov-action@v1
+        with:
+          flags: singa-cpp
diff --git a/.gitignore b/.gitignore
index 887c409..1025392 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,14 @@
+.DS_Store
+
+*.pyc
+__pycache__
 *.swp
+*~
+.\#*
+\#*\#
+
+
+
 *.o
 *.bin
 *.a
@@ -8,7 +18,14 @@
 *.pb.h
 *.pb.cc
 *.cxx
+
 build/
 thirdparty/*
 !thirdparty/install.sh
 test/samples/
+.idea/
+.vscode/
+
+# Sphinx and Doxygen Doc-Site
+doc/_build/*
+doc/en/docs/model_zoo/
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..c8df3de
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "rafiki"]
+	path = rafiki
+	url = https://github.com/nginyc/rafiki.git
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..ca4161a
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,606 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[MASTER]
+
+errors-only=yes
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-whitelist=numpy
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=print-statement,
+        parameter-unpacking,
+        unpacking-in-except,
+        old-raise-syntax,
+        backtick,
+        long-suffix,
+        old-ne-operator,
+        old-octal-literal,
+        import-star-module-level,
+        non-ascii-bytes-literal,
+        raw-checker-failed,
+        bad-inline-option,
+        locally-disabled,
+        file-ignored,
+        suppressed-message,
+        useless-suppression,
+        deprecated-pragma,
+        use-symbolic-message-instead,
+        apply-builtin,
+        basestring-builtin,
+        buffer-builtin,
+        cmp-builtin,
+        coerce-builtin,
+        execfile-builtin,
+        file-builtin,
+        long-builtin,
+        raw_input-builtin,
+        reduce-builtin,
+        standarderror-builtin,
+        unicode-builtin,
+        xrange-builtin,
+        coerce-method,
+        delslice-method,
+        getslice-method,
+        setslice-method,
+        no-absolute-import,
+        old-division,
+        dict-iter-method,
+        dict-view-method,
+        next-method-called,
+        metaclass-assignment,
+        indexing-exception,
+        raising-string,
+        reload-builtin,
+        oct-method,
+        hex-method,
+        nonzero-method,
+        cmp-method,
+        input-builtin,
+        round-builtin,
+        intern-builtin,
+        unichr-builtin,
+        map-builtin-not-iterating,
+        zip-builtin-not-iterating,
+        range-builtin-not-iterating,
+        filter-builtin-not-iterating,
+        using-cmp-argument,
+        eq-without-hash,
+        div-method,
+        idiv-method,
+        rdiv-method,
+        exception-message-attribute,
+        invalid-str-codec,
+        sys-max-int,
+        bad-python3-import,
+        deprecated-string-function,
+        deprecated-str-translate-call,
+        deprecated-itertools-function,
+        deprecated-types-field,
+        next-method-defined,
+        dict-items-not-iterating,
+        dict-keys-not-iterating,
+        dict-values-not-iterating,
+        deprecated-operator-function,
+        deprecated-urllib-function,
+        xreadlines-attribute,
+        deprecated-sys-function,
+        exception-escape,
+        import-error,
+        comprehension-escape,
+        too-many-function-args, # false positive on np.array([...])
+        not-an-iterable, # false positive on tensor.shape
+        unsubscriptable-object, # false positive on tensor.shape
+        no-name-in-module, # proto object available after compilation
+        access-member-before-definition # conv layer re-instantiate handle when batch number changed
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'error', 'warning', 'refactor', and 'convention'
+# which contain the number of messages in each category, as well as 'statement'
+# which is the total number of statements analyzed. This score is used by the
+# global evaluation report (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+          bar,
+          baz,
+          toto,
+          tutu,
+          tata
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style.
+#class-attribute-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+           j,
+           k,
+           ex,
+           Run,
+           _
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style.
+#variable-rgx=
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[LOGGING]
+
+# Format style used to check logging format string. `old` means using %
+# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+          _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local,numpy
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=numpy
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string='    '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1  : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,
+               dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+      XXX,
+      TODO
+
+
+[STRING]
+
+# This flag controls whether the implicit-str-concat-in-sequence should
+# generate a warning on implicit string concatenation in sequences defined over
+# several lines.
+check-str-concat-over-line-jumps=no
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+                      __new__,
+                      setUp,
+                      __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+                  _fields,
+                  _replace,
+                  _source,
+                  _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled).
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled).
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=BaseException,
+                       Exception
diff --git a/examples/cifar10/caffe/__init__.py b/.readthedocs.yml
similarity index 77%
rename from examples/cifar10/caffe/__init__.py
rename to .readthedocs.yml
index ffd1754..f5ad977 100644
--- a/examples/cifar10/caffe/__init__.py
+++ b/.readthedocs.yml
@@ -15,4 +15,17 @@
 # limitations under the License.
 #
 
+# .readthedocs.yml
+build:
+  image: stable
+conda:
+  environment: doc/environment.yml
+python:
+    version: 3.7
 
+# Required
+version: 2
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+  configuration: doc/conf.py
\ No newline at end of file
diff --git a/python/rafiki/__init__.py b/.style.yapf
similarity index 95%
rename from python/rafiki/__init__.py
rename to .style.yapf
index 3aa745b..e31551b 100644
--- a/python/rafiki/__init__.py
+++ b/.style.yapf
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +15,5 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+[style]
+based_on_style = google
diff --git a/CMakeLists.txt b/CMakeLists.txt
index f71ce69..6a151f7 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -19,13 +19,30 @@
 CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
 
 PROJECT(singa)
-SET(PACKAGE_VERSION "1.1.1")
-SET(SINGA_MAJOR_VERSION 1)  # 0 -
-SET(SINGA_MINOR_VERSION 1)  # 0 - 9
-SET(SINGA_PATCH_VERSION 1)  # 0 - 99
-MATH(EXPR SINGA_VERSION "${SINGA_MAJOR_VERSION} * 1000 + ${SINGA_MINOR_VERSION} * 100 + ${SINGA_PATCH_VERSION}")
 
 LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/Thirdparty)
+
+#include(GetGitRevisionDescription)
+#git_describe(VERSION --tags --dirty=-d)
+#string(REGEX REPLACE "^([0-9]+)\\..*" "\\1" VERSION_MAJOR "${VERSION}")
+#string(REGEX REPLACE "^[0-9]+\\.([0-9]+).*" "\\1" VERSION_MINOR "${VERSION}")
+#string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.([0-9]+).*" "\\1" VERSION_PATCH "${VERSION}")
+
+
+SET(PACKAGE_VERSION 3.1.0) # ${VERSION})
+SET(VERSION 3.1.0)
+SET(SINGA_MAJOR_VERSION 3)
+SET(SINGA_MINOR_VERSION 1)
+SET(SINGA_PATCH_VERSION 0)
+#SET(SINGA_MAJOR_VERSION ${VERSION_MAJOR})  # 0 -
+#SET(SINGA_MINOR_VERSION ${VERSION_MINOR})  # 0 - 9
+#SET(SINGA_PATCH_VERSION ${VERSION_PATCH})  # 0 - 99
+
+MATH(EXPR SINGA_VERSION "${SINGA_MAJOR_VERSION} * 1000 + ${SINGA_MINOR_VERSION} * 100 + ${SINGA_PATCH_VERSION}")
+
+message(STATUS "SINGA git Version ${VERSION}")
+message(STATUS "SINGA Version ${SINGA_VERSION}")
+
 #message(STATUS "module path: ${CMAKE_MODULE_PATH}")
 
 # Flags
@@ -52,19 +69,23 @@
     "${CMAKE_SOURCE_DIR}/include;${PROJECT_BINARY_DIR}")
 INCLUDE_DIRECTORIES(${SINGA_INCLUDE_DIR})
 
-#OPTION(USE_CBLAS "Use CBlas libs" ON)
+
 OPTION(USE_CUDA "Use Cuda libs" OFF)
+OPTION(ENABLE_TEST "Enable unit test" OFF)
+option(CODE_COVERAGE "Enable coverage reporting" OFF)
+OPTION(USE_PYTHON "Generate py wrappers" ON)
+OPTION(USE_PYTHON3 "Python 3x" OFF)
+
 OPTION(USE_CUDNN "Use Cudnn libs" ON)
 OPTION(USE_OPENCV "Use opencv" OFF)
 OPTION(USE_LMDB "Use LMDB libs" OFF)
-OPTION(USE_PYTHON "Generate py wrappers" ON)
 OPTION(USE_JAVA "Generate java wrappers" OFF)
 OPTION(USE_OPENCL "Use OpenCL" OFF)
 OPTION(ENABLE_DIST "Enable distributed training" OFF)
-OPTION(ENABLE_TEST "Enable unit test" OFF)
 OPTION(DISABLE_WARNINGS "Disable warnings under windows" ON)
 OPTION(USE_MODULES "Compile dependent libs as submodules together with singa" OFF)
-#OPTION(USE_SHARED_LIBS "Use shared library" OFF)
+OPTION(USE_DNNL "Use dnnl libs" OFF)
+OPTION(USE_DIST "Use nccl distributed module" OFF)
 
 # TODO: remove all USE_CBLAS in codes
 SET(USE_CBLAS ON)
@@ -122,13 +143,13 @@
     #ENDIF()
     ExternalProject_Add(protobuf
         DOWNLOAD_COMMAND "wget"
-        "http://github.com/google/protobuf/releases/download/v2.6.1/protobuf-2.6.1.tar.gz"
-        "-O" "protobuf-2.6.1.tar.gz"
+        "https://github.com/google/protobuf/archive/v3.3.0.tar.gz"
+        "-O" "protobuf-3.3.0.tar.gz"
         UPDATE_COMMAND "tar" "zxvf"
-        "${CMAKE_BINARY_DIR}/protobuf-prefix/src/protobuf-2.6.1.tar.gz" "-C" ".."
-        SOURCE_DIR "protobuf-2.6.1/"
+        "${CMAKE_BINARY_DIR}/protobuf-prefix/src/protobuf-3.3.0.tar.gz" "-C" ".."
+        SOURCE_DIR "protobuf-3.3.0/"
         BUILD_IN_SOURCE 1
-        CONFIGURE_COMMAND "./configure" "--disable-shared"
+        CONFIGURE_COMMAND "./autogen.sh" COMMAND "./configure" "--disable-shared"
         "--prefix=${CMAKE_BINARY_DIR}/" "CXXFLAGS=-fPIC"
         INSTALL_COMMAND "make" "install"
         )
@@ -139,7 +160,7 @@
         GIT_REPOSITORY "https://github.com/xianyi/OpenBLAS.git"
         #GIT_TAG "develop"
         #GIT_TAG "f3419e6"
-        GIT_TAG "optimized_for_deeplearning"
+        GIT_TAG "v0.2.20"
         SOURCE_DIR "openblas/"
         BUILD_IN_SOURCE 1
         CONFIGURE_COMMAND ""
@@ -165,9 +186,9 @@
     ADD_SUBDIRECTORY(java)
 ENDIF()
 
-INSTALL(DIRECTORY ${CMAKE_BINARY_DIR}/include DESTINATION ${CMAKE_INSTALL_PREFIX})
+INSTALL(DIRECTORY ${CMAKE_BINARY_DIR}/include DESTINATION ${CMAKE_INSTALL_PREFIX}/)
 INSTALL(DIRECTORY include/singa DESTINATION ${CMAKE_INSTALL_PREFIX}/include/)
-INSTALL(DIRECTORY ${CMAKE_BINARY_DIR}/lib DESTINATION ${CMAKE_INSTALL_PREFIX} PATTERN "*libgtest.a" EXCLUDE )
+INSTALL(DIRECTORY ${CMAKE_BINARY_DIR}/lib DESTINATION ${CMAKE_INSTALL_PREFIX}/ PATTERN "*libgtest.a" EXCLUDE )
 
 INSTALL(CODE "execute_process(COMMAND python setup.py install --prefix=${CMAKE_INSTALL_PREFIX} WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/python)")
 
@@ -192,7 +213,11 @@
 		SET(CORE_DEPENDENCIES "libgoogle-glog-dev, libprotobuf-dev, libopenblas-dev, libstdc++6, libc6")
 	ENDIF()
 
-	SET(PYTHON_DEPENDENCIES "${CORE_DEPENDENCIES}, python-dev, libpython2.7, python-pip, python-numpy, python-pillow")
+	IF(USE_PYTHON3)
+		SET(PYTHON_DEPENDENCIES "${CORE_DEPENDENCIES}, python3, python3-dev, python3-pip, python3-numpy, python3-pillow, python3-matplotlib")
+	ELSE()
+		SET(PYTHON_DEPENDENCIES "${CORE_DEPENDENCIES}, python-dev, libpython2.7, python-pip, python-numpy, python-pillow")
+	ENDIF()
 
 	SET(CPACK_GENERATOR "DEB")
 	SET(CPACK_DEBIAN_PACKAGE_MAINTAINER "Apache Incubator <dev@singa.incubator.apache.org>")
@@ -206,12 +231,24 @@
 		SET(CPACK_DEBIAN_PACKAGE_DEPENDS ${PYTHON_DEPENDENCIES})
 		SET(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${PROJECT_SOURCE_DIR}/tool/debian/postinst" )
 		SET(CPACK_DEBIAN_PACKAGE_PREDEPENDS "ca-certificates")
-		IF (USE_CUDA)
-			SET(CPACK_DEBIAN_PACKAGE_NAME "python-singa-cuda")
-			SET(CPACK_PACKAGE_FILE_NAME "python-singa-cuda-${PACKAGE_VERSION}")
+		IF(USE_PYTHON3)
+			SET(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${PROJECT_SOURCE_DIR}/tool/debian/postinst" )
+			IF (USE_CUDA)
+				SET(CPACK_DEBIAN_PACKAGE_NAME "python3-singa-cuda")
+				SET(CPACK_PACKAGE_FILE_NAME "python3-singa-cuda-${PACKAGE_VERSION}")
+			ELSE()
+				SET(CPACK_DEBIAN_PACKAGE_NAME "python3-singa")
+				SET(CPACK_PACKAGE_FILE_NAME "python3-singa-${PACKAGE_VERSION}")
+			ENDIF()
 		ELSE()
-			SET(CPACK_DEBIAN_PACKAGE_NAME "python-singa")
-			SET(CPACK_PACKAGE_FILE_NAME "python-singa-${PACKAGE_VERSION}")
+			SET(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${PROJECT_SOURCE_DIR}/tool/debian-python2/postinst" )
+			IF (USE_CUDA)
+				SET(CPACK_DEBIAN_PACKAGE_NAME "python-singa-cuda")
+				SET(CPACK_PACKAGE_FILE_NAME "python-singa-cuda-${PACKAGE_VERSION}")
+			ELSE()
+				SET(CPACK_DEBIAN_PACKAGE_NAME "python-singa")
+				SET(CPACK_PACKAGE_FILE_NAME "python-singa-${PACKAGE_VERSION}")
+			ENDIF()
 		ENDIF()
 	ELSE()
 		SET(CPACK_DEBIAN_PACKAGE_NAME "singa")
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6fd833c..1b855fa 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,3 +1,21 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
 # How to contribute 
 
 [Getting Started](./doc/en/develop/how-contribute.md)
diff --git a/python/rafiki/__init__.py b/CPPLINT.cfg
similarity index 76%
copy from python/rafiki/__init__.py
copy to CPPLINT.cfg
index 3aa745b..2dd27cf 100644
--- a/python/rafiki/__init__.py
+++ b/CPPLINT.cfg
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +15,14 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+set noparent
+
+filter=-runtime/threadsafe_fn
+filter=-whitespace/braces
+filter=-readability/todo
+filter=-build/c++11
+filter=-build/c++14
+filter=-build/c++tr1
+filter=-build/namespaces
+filter=-build/header_guard
+filter=-runtime/explicit
diff --git a/KEYS b/KEYS
new file mode 100644
index 0000000..36a9da3
--- /dev/null
+++ b/KEYS
@@ -0,0 +1,316 @@
+pub   4096R/3059B92F 2015-09-10 [expires: 2019-09-10]
+uid       [ultimate] Anh Dinh <dinhtta@apache.org>
+sig 3        3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+sig 3        4E638F6B 2015-09-11  Leo Dang <truongdx2002@yahoo.com>
+sig          B74101D8 2015-09-11  Dan Ghica <dan@ghica.net>
+sig 3        E6480A7E 2015-09-11  [User ID not found]
+sig 3        3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+sig 3        3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+uid       [ultimate] Anh Dinh <ug93tad@gmail.com>
+sig 3        3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+sig 3        4E638F6B 2015-09-11  Leo Dang <truongdx2002@yahoo.com>
+sig          B74101D8 2015-09-11  Dan Ghica <dan@ghica.net>
+sig 3        E6480A7E 2015-09-11  [User ID not found]
+sig 3        3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+sub   4096R/2AB66957 2015-09-10 [expires: 2019-09-10]
+sig          3059B92F 2015-09-10  Anh Dinh <dinhtta@apache.org>
+
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Comment: GPGTools - https://gpgtools.org
+
+mQINBFXxJtkBEAC5QIfPvsVZeJ+/2Bk0G5a2em1Sat8XHBA8Nm/K8VQbB3OFvF+l
+n0NCym/3K7jO2J6HNkx0Vfyorka8Vycg8dMCpO616b2l0wXRnzuPswaiZ0xAlW06
+sNPV11Mw9MXg26+/jIIbcjrqHEbLqqwPGM3ija/5HCLQoTmmFI4ZmNplwbTSB7a+
+yQyxD36l+3up/r39HOQau1aUbW99qklb63mToDh+bgu2HuuQ1/qPje8b9SNkNrKU
+ROE1h6CcbsNoc/LcwB4cdIYkD5BcVUOm7AkTthg4pUsBiC9VtgUcKvRWci9anXPP
+1YmBDVHPujwPbuYHG1iWYF2Tl9bDxNURxLkPFFt21KpiEsP3Y6zx23v9KbnXso5z
+uu003/VS3NJxk3Cya1OM3uzxcm/mH7sRWx/ejWieWqHUJROZq4tBBxYCbC+wq33N
++NpcPSjMCpTtvcy1B2i306fGc5QkUgzP647NxL9f9ruprCvCW3LuTW5bugo3SKFg
+aCK6ggGrtMEXhp7EjreudL4vXu5dZBLvoLqzK762Wod+6aI3khVdGV6Rq4oB4PFi
+gShNwXVOnFQdr3XVauFjwd5xaMy/6GQdGFpnb1ol5EXw5IjXBmpFdk6Y9mtP/HKF
+x7qGV7my0JNPFaX3D16dbxLCJtmfF/uH8DcWI3AEqKS1qmM3ZWHqz3hPZwARAQAB
+tB1BbmggRGluaCA8ZGluaHR0YUBhcGFjaGUub3JnPokCQAQTAQoAKgIbAwUJB4Yf
+gAULCQgHAwUVCgkICwUWAgMBAAIeAQIXgAUCVfFQoAIZAQAKCRCp9X0YMFm5Lxf7
+EACfeP7TFKn9DEgDVkpBdRu35i6yjFp7+hdoo+KvHULkajIFlF8pp2wsklq67z+G
+OWcvu7qRg2RjLF2B7av40r3/iM31/UTgKlWz0A7IGTJkNWABWqIOor7gNx8616Uc
+Nb7P843hgeD6Vp+Zwl2SHzTwezWBwmUYeuBO+S99SMgEuPvO66EyOOok/FN7CLRO
+BR1dxGzLH6hJmwYVAbnxD37iy+NgNrJ02IN2qzRre/FhYTsrMGTLP78OFdMhVdAX
+2TOJJwezseKFhz7dljuO5IPMozsT4CXYLG3weZUF2RJfSB4QivZkOhYMme8/e2sk
+mINnIz1a668lrv9c4ua7a5TJYvzF1pziKINbLFQZSJE0QnwYCEWch1c+QAljds9M
+rbhKyejU+6Ke3xl0bhswv3T4QzbA+bIzkpub9cl6QltZU475V49sUeBoJOi1FI7N
+EdOGmZNd1jrtbmfzGNa1v+HumzyiLi2CA5WU21gZIIB5GU5lzAaNbyvif6mWGAwN
+XYenbElb4Wj8SAEcE7F8oAnPOIqz1QdNOTk0c+HqhY2dJmeAUeyqtK+QW9bpy6WP
+WBJvhSd/s++wPHN01qc8ASF3eQhDeWAxjH51bg6bDkWqqwSbosBgDs47lIARfgyc
+bFKc1aeel4tc2lDJok2TqrNgha6A9xcDw5dh6PbmWuLlRokCHAQTAQoABgUCVfLm
+JwAKCRD+O4xsTmOPa8wsEADAfLJgRKQPmm80V6I+mngY4ArwUjlONyMG//01pD9a
+CdbqyABORqSKxW4YWljoywr/N+hz7dRbkrJiJezf4Q6VGGIsc6BuJY7UrH0d2dRK
+8+WceWUN0tR5irlsWnIaNA8aH+7FVoE7V5kZxWy/8oPwLoOOOrpJclz9ZDVx6d+I
+iIttJRh+cItuYAGIZqZvWpB/WkJA+5Wqj4Q4/uXBgcD4aORVMEg3UJnshDpBnO5R
+b+hfE1xsrARw0dRcSQ3a6RwBtSWYjPIq8OMZDcc4gaeR44B2e893HUBh+sDhcGMT
+6MdRVzRNfXb21giiSPioB/28z2SP1A3Of7mmyqeBITqp1Lap7LfvE3L4yexNeEOF
+p3mg3ql0giFPm9n0CckQEaDjILLTVDftLDf1nzue6eaGvZU4GJnfux4ybZnlNpkp
+Gh8tQGkTcY7ZDfhwseDOdeyQbb6oHe64HInWU62GK6ZUsAjlQIhTbf7xEEzMLrPr
+y9h5fu25/iWckCPfxJVR04fL91Bf/mEgS8OR6BtneqPhIZbslK/1W/QGnFoQs18m
+DwMRrD2OCvxdwxXIUOw5n39XUSMqqcO4c18dEd6R9sEsQ39mOd+a2bdbrHCbrks0
+ljrsCa7m6tjNw8eUlWmYBsebATDEhOncRqV6f1qCrsbrrAgMSd/5f098AorqHhyo
+D4kBHAQQAQoABgUCVfLztAAKCRDNYPDbt0EB2NawB/sEqxVA86FgOBeMooQF8gt0
+uH0mLxzr90hQg8O5KjrDkkjHLxrOkTfczpRj0cOJfX63/O32VOlguXJIxjFRsQLd
+9fTVBole/EBI9KNdj0CtYd5ovx8PIXH4Izj14EN2COSRyAJzAafZUDdjGo6/v9vN
+IfJgv9Yi0ZKyEYdyWzAlTeuQe4N0V5sqP7Fk5O0oU82q/IdERifMrWtvYqD6nGFR
+ThNhsIDb+GTWlmrAv7wv4nDFD8Ap5E4kHnvQLfqDak5rs0yBCa6kpzhuyEQXn+fy
+3VpjJ4iwm0zcp+1sbLMHAhQnBeAY6uUOtglDKQenSsc3lmnxvMLWbWO+YA8HVsvt
+iQEiBBMBCgAMBQJV8o3VBYMHhh+AAAoJECnzPqjmSAp+qYAH/2/SdJjhFnTfx0j7
+wl3LIWdD34M/Ke4mk6/RMmsn4vVDHy3kE2DdkEUWu7RX5xu5HTrgqVgYZry4GrFf
+MViyYuz3aLZH6kYKfHeDOvLDcXumhjWWqkNjJRzJeTuxoBjHzCP39sCENdh9OVXR
+FAsl2IxMmSwmZ4fO5NRXN+Mg4YVqoCzAu8c4kr7nKJxJXRHjx7yU6WqMC6WW+bB8
+FdMNNbIp88tiiFKlM2Uue+AMisC80LfJ+p7QipRKmtG51TxJDvdC/Jt5tSgYRlgb
+Pp0oPsgeaoXNQyYwuxD/OGAbw/R6nnQeUv9tgr74+pCn0aIoqOrKhLDpVBr9hga8
+kzitrrKJAj0EEwEKACcFAlXxMrsCGwMFCQeGH4AFCwkIBwMFFQoJCAsFFgIDAQAC
+HgECF4AACgkQqfV9GDBZuS8+4g/+Naw4vtrHEEQozl2AoFqoXBbJE7BFOkdP9KeD
+QeJmwZ0UU4Bfle5S6BbcnZkI9w5rVo2djZ5L6j1du5ah2U58uNw+S74BCNfsrqtl
+Ix55hbiMMIt9IL6nstple8zkqfuSKzJf04Cl7IXjG/xXK25N0tkLgy+PSIRY7Dp/
+JnEYmcoyIrbUb2EXLoUX4R+HreZhKrwbHqbbEvDBWhJj/Cg/8N5ZxkduAnCboca6
+qupyi0D+hu0D3zt31EXVnnHmI82JV9aBIUru9kZOxl5NlzkR18Gx6iW2/WK+OAXa
+/4QX6mHTGeU6uFTxFD+BpWGm85fpax3ueGPDwGwvJgIVQvELnEH52K+F6ifgeBBV
+tUzgXfbD6Xveh+sgKISKAIgtZtit2IwuKdqTZtV7czLRyLypUUMXLoNtYnFWy/xT
+pDZRHr7ilGcGxjyrBBEX8lFqdoJj0D0kP4nCyChKbYS/WJPAaFcl+5TcV+07WUqW
+9uDCmWY0PUHEqtzrhL/XCJCJSfFjjUi52cS0vr0x7ZlthneJajn2oqxQ3NTaeNvg
+N5bNKh/ndmXP9umOmO3f8bTXGtdRRGNjj7JDQdG1eRum6et+0mfjAccp2YCauZ6/
+TMYADZJ35aV+JzQWjFOfiSv/hVBomOHCGln416pQHgElhoFz6GpLPAPe32BUBtCG
+wC2wEhyJAj0EEwEKACcFAlXxUZMCGwMFCQeGH4AFCwkIBwMFFQoJCAsFFgIDAQAC
+HgECF4AACgkQqfV9GDBZuS8dFg/9H6IIDS1Q0eeRXDlDW3L0NeFvU22Z2TE236nf
+TbbDpRXQfWxCVuoCSa0FRxMrk1r1ueVEQ9bO5bMwo4+KOiD+jH53LKX7oo/cxwDS
+S62mzRsH5eqb3KOdcKVDIfsqoPNYeLb1Tmswt9YM3J0qXHbskmp7BekwVS4Y6Nsv
+aFuNYjLRdGLO5Af5BVE5L+Zk6lUITiLp6NeFMQZ/9qsOENh0nPTA9JrlGYQ9xRHI
+RZD6L1wybFTJpm61hF71npPDn7jKjqCXsTO32E2uVkulj4UKt2I8eWxlLf18wnIQ
+6Joaof9azz/dhtxBM5qAs2KtQRn1ZNxBnpwiJzjsMBOBXXCAitP3ZPJKObvzzVdb
+jy1RmSHas2DhLmTslUptEPW1VyLaftjemnCPRM7O43ZyhtG/wD/Un1FZpTYzTF+E
+TSesus1ZP5Bp94Oj66yiBXbmSR1faBAaoyfe21cDimw64ZX6iqNLvbM2z4DTrHtN
+O7dcbinN88wKgyqeblgVk43uC1gtZ0537O1gVw9UYW3ffd7mIvQ058y5Mq8Cr/Vm
+caHPsyRYhaVePMm6k9GVhSUQ+78B3IahKPZJUDCzD3pm/x0hQZyNCh6oA2ql28nt
+KPxYJYOUGF4r/DPDYzrJ0PuA1ArwwgLBCdlJIfKCeXFamUxXO7NeAScxkGU4pkh9
+USC9V960HEFuaCBEaW5oIDx1ZzkzdGFkQGdtYWlsLmNvbT6JAj0EEwEKACcFAlXx
+TncCGwMFCQeGH4AFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQqfV9GDBZuS+Y
+Xg/7BIXPEsQbB9OAmvTWBrVOn4kDWVZbmZQZrXDwzqjChep2PLF7vmbu+BTS+0UU
+eJcVhSInTq83w91AYgs5a8fKrL1S60wvKLkUKB0XkPCRFEexZM6TKMKPGJJJgk3o
+W6G8pkc8SKjnvUIjYJlq9imwPj9e+nmGNHrLPvQbXK2Ufc24mCgcgkkGXG9skusm
+Lq7BoqUr8lq1VaRK9+plImTfxgNa8qHQPEOinXVIK6R6ZBZ3ua3neX8KZ8sYiSZO
+4BRHFUZg8UvFI+rsz9H/rMlNdt9iwx9buNIVDrBfJLzEoEvgwP8Cns89R3VRTO49
+Il6AwGwyw/TY5Si3x+QAALWuL2u/gM8/emCW5ew1+wL7XQUFzKvGOKUt3bwH5zVt
++/bLelVRWjALB5rPrf53LhVLynFsLL59wxKp4nbioTNU6YwcIX2NJN0STTne5NAx
+iov8n0hot/XbELv4aFIMVNUgSpb27Y4XJ1YlLm2ULsoF30U/sq0DdU1G7Sdf//uv
+Ys/5aMQReZvV6EmAVtRkGrrDu6paSkOk7WAogqZ1E29HIjzy8piomQVVKXSn9Zc6
+8MgDmyvUxno4cCN0IrxrLvhVpav/ULM71UIdzqfsPWqfiKtZQag1AzFTydR+UUS0
+o6A9m+WbNNAcjXc6Ru9LV+Zf8B4300MEjP2JGAH4yQCXazGJAhwEEwEKAAYFAlXy
+5jIACgkQ/juMbE5jj2vhXg//Wy/TCd6qMfa7N9U886Z6HDulN3S/jqOV7jynplh8
+kwK1GtWUs9AmC5w/zxWBrusqG0k/j66fE5x2MkDu1Gm6Ni7c0mHh0x5y0sSX0HMx
+Y66uviTpaOIJZqZ3fnn4VHFu8LESB6fyCLialXkODDqlCV/T/akO2MRpFYe3kmSz
+WUC3/CdP94KUX+bXffZqiKWUATDSbIVIYS6BGcd+YwE2zJPQfDGLMGKzLe2DYO8j
+XssAoX8ECk5reP2Ud/SOjGprSyaCgBuRNBwU1/UwS5GgJMFanirW/O4tE4yyzfXx
+mWnJcERvBxCqLRXq4cfdZazDUN1HJwBnmUY1E1SChp2H5xF3QLoEIvfbJNUJc/x7
+wJLwSKrUfZoPh+ea7IOtUJvR3RX0skH969JmvjREkjjEl6/OX5uEk97CK28Snwge
+iJJdrRguDQS10XhSSZWvZBF6i5JUFfsbauLmaSW2teLEXB0mVQu3fihEoPLCYMZL
+kkapxpckCj9c9wodA3oDMV5p9uahknWcI6jUV5J5oHzzumnS48pZeTCMoTfgOmo+
+GrBmehPr8VRAhuC0n+ZxMD0v1PtagsASg9y4mSk5uyN3B7wzvv/m5+545sB543nt
+Wx87GDhLkHN2x4lAJfrK8XBswy83tCMN6NTCVXqYlbebADSE53U5feOrWppVjdfM
+/biJARwEEAEKAAYFAlXy87UACgkQzWDw27dBAdj/fggAvUjLuIsxRPkFYof8Y3dO
+LU5+AfHP9Q0U5c1qumuq5H363VQn9VGPK0I1ry/8Kj2clHZkLSq7hNltN0iT7J8s
+sa4NhB4+9zuLTJsOqMDVP2fzIa2fHIQT3IWmbXWC9hI3f5ZsooeCA0upyvbanDD4
+YpbVkPg6vvIybDTVqSAAD8TaJcESa8W1Ucr3OIFDtgjugtICMTUAFHq+GySGp5cy
+uGKIQ8XRKN0CeKP+4tgH0fXZfpFrYP3yqYwxAmuUVEigzDmBZ8l2F/EygYyjIyTa
+8y5DvzGAPvM/W7udy0jLTh4TVv42d1wn+ZPORZoAXoNBL8hwOoZK/KcsmOgubIpB
+dokBIgQTAQoADAUCVfKN1gWDB4YfgAAKCRAp8z6o5kgKfgqCB/sF442bt8fJpyiL
+OUdZPL2mULFBcuqJMHeoJRgBCASUOXRGaBRvTOqRgMKeXT+3/2d4/ARzVLZ1O2vx
+7kBlpZHo9fA3Qf6V0/IReYhLhn4a4sVmEEK7Osp9DO4rjjKf3sr0lBY5aWBSQ+89
+ZNCivWkJzJFKDeJ78P9I/TJD/QprcNQ2feWjdZ966ZdyPgkMDFGvje+/sYz1Y5NO
+Tnu2CN7Mv1Yo/ejTqtPTPa63e25dwLfzDMk65AqL6/UtrOF80AWKWHutrqMn0ExS
+o3wVeb2ATEjot61NyybBui+rrgqd4QJzzijXq0wHlPhunvkIO3y1i/gbi2KSUSrw
+JsHYf7BUiQI9BBMBCgAnBQJV8SbZAhsDBQkHhh+ABQsJCAcDBRUKCQgLBRYCAwEA
+Ah4BAheAAAoJEKn1fRgwWbkvp60QAJNJTPNoW4AA3pCvmWaDCusx+uaCtZ5EkNn5
+Guw8Sj+2mu+dgL5AwSIyplkAjkZmOU0rIjA/NqTYrFs9/aqJvPBShlXiVP05I2Ib
+F+A0hWzQzsxLqdq4LAKOVbeW14OGLxWVTVVAq0TTWoiJu+Mw0ANrbP+qeFpoINOb
+XGjH6TSFZiSnMgKAKAX5Ovo8fRYoQKENAUDBaUtR3U+2DP52RvqQszMcbNciHSKC
+tUKUw7W4si0dnJO24w/P/e4oq+oagvt2UAEC1H9MNpDqV6WsawozKIpU79TvAQum
+4ge+KTx3nexZwDUxtOYzCZ/FU8MG7hjlPY8j9InPTMcvFUOshp9u5NwHPN4+fw/I
+5r2W9+UNNCxOaQO/2nBdAXa1RMwUuKDdPpHeMOMD1mSVDsYyyH5egGen+WBVO/ZS
+6K+NZSWKqs4kHxzXO6MOHEtz8BRDDy5nsX2v3B+wJTrJq4QPxVYhnyZ0IbKLC5SM
+MoULwHonyn8kcZToHAtX6hJHag6dd1HbUbaMXZ621M8s/eN3ScK6uS4uJsfxtBFc
+qSAJrDpVi3YxiYQfwtJtnobfxQ2dsYG75pUA1gXHyRgKKogp8LJk6r4WuvHaoB6h
+Skqqt6ifAX12AEzE85CLwonLbPSMh1heCY2UX3rEv4lEfpevAxQZibOi9mIMjajo
+8DPQ9V3/uQINBFXxJtkBEADWzdDuDzWASIWGqu65leM6uZopLEPDDeUYbYujbHDm
+nS5JA3sLEsbwad4qIp00QxsyR+58oDBYPJlIsjQtZGJq50SLZ8xDm6t+xohXwZfS
+vvVNttIOZuWkci3E9fTgxw+TPmyt9hxFi0J4X6z4Tv3FnKTIswF99F5GWQLQV3bz
+Z6r2EwUeCrV4LeMrazX5ElxeE3B6YisH9/8afORtT1FO5Uan8ZjqOtleXylmhDp6
+ODUZcbAe8Wt1zbeyk6T7IV8mXCTWRViu35FB0HTDSE4L5XS1oiNc/7HNgj/Br8Xj
+UMo9D78rSHg1m/IgW7A2XtDKGrgM535iQbNWroQVL6Vq8F2cXFkMiLWlKsH7VfiD
+2FvQBVdhoLPGjzSET7oubioEgDlH8JgLxRmUbSUQ7aXjC5a5tDgHwAft2Pcn5pNr
+n1p2mebViQ0DLQx1Ddg4mHy8xrX9/kZ/GObttulxzClR1HFJ6WK7YqJwNiNLPr32
+TKaDNG+8z1WpfO8bIN+tnZk7mtr5NpkWOrBfHA/Gyi3V4WMznHmR8prdtBg3ro3u
+cxbNu6zwkqf/AoDYL7+BfD80ovYNf1kPokdJWvnTZbNyesBLlgVpTtw3tUyp+rVd
+NbzVdqA/ntBhYF5V7DeXDxOy4uGRHuM1+AW3KYCQpCMTLOSDBjD8NSIXRU9sm4mj
+4QARAQABiQIlBBgBCgAPBQJV8SbZAhsMBQkHhh+AAAoJEKn1fRgwWbkvp48P/2pz
+9DdcxK44SSC1f9xUPVqWD274bx3FVoCPXCiSxTy66qCM4AsZ446XuOccSqWNchPJ
+QrBSDP3Z5wEO8kcGRENowndOqKlh/TitknHZf1Ib3eUBH1KdcdUy0STFDnSQFaxR
+pLLcR8ez2D8RgNP1A7GP6/f+p0UdS4xjxTIR18zrxnS+ig2y5OyOLfFxH5NPUXDI
+zhvhvC19zaNFgyiDrPKRlO4h8/bWifsf9QHbBMSYwnUIG4NCLO+nnAguVs9t0YOV
+UUJ9oe1ilUNL1CMQO0ZADj8OVdo0g42rc0+c5V3xVmkzdD3KZ6sxMrnjlDhuqOD/
+9C2a4sd3bdCYjqB0iwtRabqGKUrILqCeqrLHlNumudYvWHJpFehI63mALiwwhgvJ
+k2uvuFk7Np7Fmta3+mAbMc7/0RY/aa470JJvHuK5wyMSNK6gYXrCE8xkpav4A7xb
+SEtKr+aQ7EHkhRXiI08qps+WTo8tscuxAAF0uKAD71XH5Gy4akDKWJPfW9d7nFFf
+kgKGFqwAxeSSBAEACQTeBLNGya4XKayDybyikPy2fP9cYFKxQc5WV2UETjh/mWwM
+9UtFzNjxJa4OQRxWZC84zFNPlXrHlHsgBcoG0FMsVCsm0WPGhTla8kd2T2+z6Opt
+PzUX7blxl0y3pG/NrJICPg03LD4sWhaJREkRxpgk
+=Yx0U
+-----END PGP PUBLIC KEY BLOCK-----
+pub   rsa3072 2019-05-05 [SC] [expires: 2021-05-04]
+      10BA3B0553317D66F134077E0187F247D3B4224A
+uid           [ultimate] Moaz Reyad <moaz@apache.org>
+sig 3        0187F247D3B4224A 2019-05-05  Moaz Reyad <moaz@apache.org>
+sig          82B137F934DAAD6D 2019-05-05  Angel-Ventura MENDO GOMEZ <angelventura@gmail.com>
+sig          EA6C3728EA91C4AF 2019-05-07  Felix Schumacher <fschumacher@apache.org>
+sig 3        FABEEA09897258E5 2019-05-08  Giovanni Bechis <giovanni@paclan.it>
+sig          D4D8267B628A1BD8 2019-05-06  Niklas Merz <niklasmerz@linux.com>
+sub   rsa3072 2019-05-05 [E] [expires: 2021-05-04]
+sig          0187F247D3B4224A 2019-05-05  Moaz Reyad <moaz@apache.org>
+
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQGNBFzOi+gBDAC07qQEQ8f5Vf3toQg7ADM2ip7E1dZYKicQqq07oNeeIQm//Bi5
+8Ex2uiLgoF0IoePvoODahukiWy7uXekRH8f7NRg7g32tLY4/m2LNfVcvOgpIhohI
+LgWF9mSyZ9wpYfiZzF2bG6MGwXw80bssGa70aHMQloVXhwJGr6Uh1I9wt/HcleWl
+wpgC6P5HCX0/2H/E8Jxeqq6/kvuL1OrvvUxsm+0BF/Zzo4HzRYNqLcj7gLifELJ7
+7Ejr9ZsDscJ79ie8YfNnv7BgIt8VyMEFDJ+Rltjb9nt0NpEu8L49g/oWbRiOwBRe
+Az3uFozspMxaVdhZtxPvmTedXViOjZ7pR+/jp1i1cKiZeJYfDvX69jGVOTFWnD78
+N2aEN3J/+qXNbA4X/W0K0RViu0UkOc5W6zA2nqlpziyVtS2H2hjY11eo5wM2Zk5E
+5fxWz8+J1hfX21uzq1Fg6F94UR0ylkVBMTnJyTKf0DEKV+5LdSmADaU5861ckwZF
+0IOy+ZjTZ8VhJWcAEQEAAbQcTW9heiBSZXlhZCA8bW9hekBhcGFjaGUub3JnPokB
+1AQTAQoAPhYhBBC6OwVTMX1m8TQHfgGH8kfTtCJKBQJczovoAhsDBQkDwmcABQsJ
+CAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJEAGH8kfTtCJKa9EMAIa3D/J5CtzgMHFi
+zagzZjuubIt+dyJxCHEd/AdIHYx1pPp9iXGDA75clagbvCA9EROpZ3rIPmL/bJvJ
+ucDcy4ge4+VIvmvcAHVw33OgWvmPSbT0BlTarvWwZ9i9NNwK0tbv1f3UmM7SCJE5
+EkHiSN+1NNwJzuejnRwYdToihAtIOrkeTEOmSWN4o6F6hWGVG/aORSoKIKIHBNy5
+6O/YkDJ7nuyUc33BfuKBMjlv+KHEGvJx6Nka2+yD/9gqFcVHmsBHIfqQnDbMBd8A
+71KvWRCE2Evdys2DKpJjaG6T4kQOh9ePcNAcv6jfKJeeztzwIdqYfT60qtaNoXI1
+v8lneOW0aixy2Vj/x5jfnKiK6tJh+nHgwqXzOcvUe2fR6kHv3POT87e4yahJa6Bd
+wThnMPQL8iJEuh4uSRBDLb4W7gz5+I9DC0Tor/EmznUVjCA9wc9E0pNxl95OVPld
+S2htfoBbz1DD6cxw81EH3t7rhnmnSkVs14t3RtJhMwD50ZugA4kBMwQQAQgAHRYh
+BHM2/dWkm/efvF5yWoKxN/k02q1tBQJczuvhAAoJEIKxN/k02q1tdsMH/2Ug0DiJ
+7p72YzA9e6J+WdHZlRtde40uIw/BfFk3XGq4NnvyEQQR8sytJrLfj18/xl/gWex0
+gwL7JgNtAomzlQ2P+QmKwMKST8n52NPg1CJH6FaWsSgqSI89nfZHcPnfOwOA2TVL
+mV/VkGlAMRrxWxrXW12ePHo1xQh8NdRZ5lnBGfYz8pokyJM2HCwiacP9EK8Dy50h
+fwU/D8xz8KFcC0zRv0vwVqcSXD4T7hGHWHxBKr/WlmbQNM9hNDy2s9ZErorWGx65
+mxgJX9dWDrXW8bjywQ79Mwc/55ov7D16qFjR732uAhzFDKJUrGv4ZgVWJ3CgsTO/
+fNy5OWKjGOM0Fq2JAjMEEAEIAB0WIQSTr9+eZPnrbtOssCLqbDco6pHErwUCXNHD
+jQAKCRDqbDco6pHEr8bREACzTwyeQxsRDc189r5Lvj2mjBX4V2W3FEzTmMsTr5Z3
+9lkFkE8ATgAX/pJhCsLbPUAwcPLA3ZFHgDHi5tqO/sZEvqxUc/O5/tzRsmkeA4XL
+Dg1163cVHH/KjAQIZDqEeCIdD4VA4oi0FKN9a3/PMF32HxtHr7/Vs7sEhNPplIaP
+ZzOFILhN2bCPJOaz2RHADDChy00u5H9cQpdX8nmLDcJ1OoyUml1EWKBTxZUsL/aV
+MkgPO7ewb/qaukebFMb0aIACzafEU9UOOHnDfXn/wWqGUKdbWEGfoYNfFPPLStnb
+H41FXH3kViaePc475Cw5f5dL7P/ZxVKKS7weMOIdMDeTCz0J0M9HTCleDdG8obiK
+/+kEhoRNvOpgCfz602R7jp9DJ7LSNXuatjHT7qg+kz6CBYk88xKKbDKETf0Nm3Wj
+AtpyCCuHsfoOrIbE6K4VXHtMl8lbn3DDpKhRGuM6LyZSO6DBSVU2fjkcc0mwslTe
+8ftOVOnanFbkreQm8Dlkdp1TQEvD3JueoxmVGACxhhSILjxecKvJcWFGA1tHHVw/
+ldf7zkqz9KnhvdtCVCUjjLYa1R0OXI4iropbGWUMoQM00aHFjR0FMS2gmh98EgCy
+M5ttjn6oJCmzp4VDZopRny59WWUrrksOIrfCSS46oCqUs1WyGl4Rv/nEwLUOZwxB
+iYkCMwQTAQoAHRYhBKoN05xukd6mDMZfePq+6gmJcljlBQJc0nywAAoJEPq+6gmJ
+cljlNY4P/1IUOyD+ZaSIMphExWicC+purQpUdWY7FwAajkz7SGr94ZnvEOanns2p
+6L3WtFCVMWPuXMaVfhEGWIYvKubV9uNb4LF5K0z5McPh7LYK1hGueSS6BSZwY+K5
+UYPDj2JA6a4DNCpfUqQLxMoQDTov6gSHsbnzNfiyWUpeeNJLdgLr/mBU/KY65WIr
+1mlwGr93gUBXz9x+udT24euTLBQHDSEjxNi3A9+U4P6GCpJVCzoc+gYgshyD8WAf
+T+xUDcUkvS2hulQTYdL4m/OV90/bFI9+ujE1jL0sMuILfggZHOy2Ya8is/lqaHPq
+eXHCzISd9SaO5ry8x59HgMiu0tQej/8Ldgrz4JNSkeg9UT45NZO/tm8MinIVPeh2
+3zEQl7OpzLkIAmSqtDWOIC2Mb8GwZEVBT9wfTZHu8B+OR78dZw4QgLkXioS28i1I
+CI1iw95dQ/fc9CygSNWRGKE5lQXCfsWWayBv0b+GzESlH9C0yObqwxQA7aSn/7qW
+aY0O4K8vFyoiH/b5cwBNE2L+HXeo8H5rRbzl4mOsoNpx9JfTu8kbyvKcFSayOuOl
+kWbeNigBkufg0UKI1yc+Ec0yPDyANGhFqfE5vYtxhIp/Z2XTrXx1asxmdgdkInoe
+Qpkl7BG9g0onDQK0DFhMl9VpwUgR3pv5a/3p/pY9fQcnzWjj2rY9iQIzBBABCgAd
+FiEEad+kk35U+P9jYuOe1Ngme2KKG9gFAlzQYxcACgkQ1Ngme2KKG9i0FQ//aZeL
+bpcFL7Q+1cCANP98DOs3tia51YiOJ+fasuuGoaay7YaVssjVfYX/NOd8WKoN6Ju8
+6Rx92YCpxfJpGUVNG3k3KijsrkZRQs85J0Dw02ULHJsfwyPmBNKC/Al5Iv4ZOz1F
+vVMtQO2NdACCdbXFHpr85T1m1LmuWOlmtk+YbQBgudv5FsUq6DHkprgnvmN232Ra
+m5V3THQqSCrfY7FpTeEbvaj2RFjTtsv/U/mbNBM7wWxz2maNCctAM7d6XRuYKg7B
+zuiCn4VdpxHg+ubhpoWhshuYkeZ+vt57XgUBbmyvPnA6l2wn0HTLoDCDrB9+kR3/
+n+Qf2NaUoqWAc9f0S1AU+QCAXG8jaKRVQsQoD9m8pMFiXyAs/lX3bhs3ARkCl86H
+Dv5NnJLQ0Nvk+yz5dWOSCv9lKm1rJ0QkSH1wjNU8ZsohCJeHdwbV2CgmIKu40B1H
+lDum2IfpKaX1140wZnbolNl1540srYDCI4i0iEVGGCpdcHTXeBsxFUsU/d7WBUmh
+zOwasK2QVgt4M4KjsJucuw6KoIKrSiL0RUsjuH6Xrq5+SOQmZvfGb6pJbWDNe5Px
+qDQgIFqT375k80+cempo6eAJAdAl7Ba8dOme8/SWLXCXer+yvCq+eL6O96gaWUwN
+H3to2+yXqeif76BSujiKaDLye0YDuuvsLhzEmPy5AY0EXM6L6AEMAKU06gsb12Di
+Ny5OzN072/rRYIx+I3Hv+liNavPhOtud8qTUPsWDTmpaU4rEBy1mtTcSebWZN+V1
+WXVQuUMtY4zErR9DnN9ZXFhs/uj9O7oyGbVjN+ppXo37YRhzDvx++93SWwtNYA2A
+4uhU1YIGQNrNFMkw2siAggw3ahXo3kJqh3VI4T56lNWqJf20sGa6MLZ4VaN+KG45
+JEsChHL0RAJ2bo7E4Qh3BbZBKGDUzCXGjoIdQlPpkN1qIiof/Dgqun47zAAIg9Ss
+YDYXAadXsZkI8kVNPByzCo5uGvaoiFURLMNaH4lBLF8mIqamCMm8UBSkztirDnQ6
+IpaXgLETPB8XzkFfPdv88MUwTnNlIFbwtjgLac4kznMlsdSlu2NzsJX1XdIRgWHt
+R2yQXsV57drGM1ymacNvUxaW6v3H9ed8OPr54Gz79wlZx3DlJRf2zRM3iN2iaE90
+hNHXHDIqLTidCEAVQ0yuT/Md2+qKLhjQy9iyEhB8xUWOqgemwbtszQARAQABiQG8
+BBgBCgAmFiEEELo7BVMxfWbxNAd+AYfyR9O0IkoFAlzOi+gCGwwFCQPCZwAACgkQ
+AYfyR9O0IkolYAv/Tx7vLBnqt9cZp+OiCoetbo1AxgEQP6eRLJLsRcxiuguMEVBe
+Ep0LZQTcvWBeCeq3cSKUz7+Bnc8Yo8eY7xDcFa5lfD5D+zlJjkDtUuhs4yL/svp6
+F0THdMMn9KomsSdSX7NtavVI9yJDkU788sSfeWJtG9yEls4GPsAkADcravAQLVlx
+31+dbspQAPYjLMeJsK5LH3h1nxhoQzALk3wqGcr1zNA8Q4JdeoV1mlZHf7N5//s9
+TngRSlhjYw/I5V236ifm66I8QCNIY+uqSmhITnhVV1fnILTMz0yzjVI4DNORQcQ7
+CNqFT8/+fOfsQFyvolYz7Wcq+quxs79TwpP/lbl6Fqgwn/kCcjCuqZ/Z/6rWqDg+
+N5f0KGzTPXQEiQBP+r42VO9KYAc/MK0K3fOAl+gSbV8vmJtcFRrAe202AvJ9jrPl
+FhDuYxq7NL+JgEVDQW2OYeJrsfWkX2iiN9gALBYiPyNHBV8y7imAwL/6n1vnB+DF
+vKsYH6AUn9efksWn
+=1Ofs
+-----END PGP PUBLIC KEY BLOCK-----
+pub   rsa4096 2020-03-19 [SC]
+      04C5DF6C57964D5940D7BA2BE8F047DE6DABDD89
+uid           [ultimate] Wei Wang (Apache SINGA Release) <wangwei.cs@gmail.com>
+sub   rsa4096 2020-03-19 [E]
+
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBF5zCIoBEADloLJsDcxjXutVBoZq1WaeiJVggG6aPZMYTWLp63tQyKtMJwvs
+2TRNZC5UVhb4OPMmmlRWT5fOPrSon+MV4KO8jIpB5B/6khnailB/gjSmei63NLr5
+ti4mFSFQu8vm+pEGvk6olM5Xjc6wa0Jukwmz2fScbIpAJapoXhiN89WKwiSXhWaR
+w3z0wAsN3l0ZfH1tgPA2AQSXlp3h2Ul6OjVbNHC+vRQFP9nPAN7mZZCvuo0F6kji
+hERWLIo4tV8ds5a2JUbYc0e+gs7SA3H4NAY/hJnJgZwOBoGb2ubha2o0y+PIuDW2
+xqO1EjTmg66zsFMJeUrlQ7zoPxtFcvp09/p7OZFlQSIprw2P46/2rMgdwT15G2U0
+ELJfPCAXLCI/7dudns68j+7MBLbB2fSh88c78xPLihkT7ujZuGK0EEsHUUgIJyFM
+x1xM075fuauSeSt7/q4yYroSULQgCtUOE5PidmqoeNzzzHtv3yXDdqxfasuMFtUy
+2NpDSY96V+4iPifax7ENnvveUH3vp2pM3gqkjyh/dYR42k9DrcODz531Rf3WgtT9
+gpuBYiMLzjdhVz6lD1yM7QADl9gwRvF2zJ+jsL3MvpODSdJ9vwm5kzUpgx+Esbw9
+WmUvD8oTdweLCa0OKHT2AS/lVzW/8rPYEiuRhrKGgpmQ305viGJw9jdHiQARAQAB
+tDZXZWkgV2FuZyAoQXBhY2hlIFNJTkdBIFJlbGVhc2UpIDx3YW5nd2VpLmNzQGdt
+YWlsLmNvbT6JAk4EEwEKADgWIQQExd9sV5ZNWUDXuivo8EfebavdiQUCXnMIigIb
+AwULCQgHAwUVCgkICwUWAgMBAAIeAQIXgAAKCRDo8EfebavdiWQMD/9YxpvrQKMH
+M75S7FhfX/4EAgFiXIQQa4Ymyy0I0/pK3KIUKJLnng49tUpOcFkiMLVbUtXRw8k1
+jC1Ky+ramMzO2KZTa8TLPZmMI+iPEQMXyTAjgAx6ODj5to+AqlANVv0pATEJVTqb
+EiSHKTPATajH0z3goKP30AvseBodK81/D0uD+0AzloQbc7muZbE7LrJ3E5xPRUqS
+SNgodwjhKQPke7UeeGEWQOPzu/WqCJ9RpCzAhPrDOtKXgsdicRPDzuZY5M2d2T4H
+GLbTt7ShjK8RDSMs3FppkNqSZ2gdUnoMn/nqYYJe9M8IgXqc0rZOh45Naas8riPK
+9kZIK6baxLbG17vggVViwfhFYXMU1Aj4wXTO8Tna7gCl6TwYae6nWwvovI9WEGug
+vERRjt5saE5Y0KNw5nIaIwR6zQPmSj0tHx3WMo/OnFy33jww5JHUUce0p7/PLqxm
+RzkBB7SNE6kpt91FYYF5XqtQjP5m02WPWH31ed19WBq7bwh5pghA4vhFHGsmH8yY
+YSrG0ocXdhBJ4bz+iv5eNGYiJbY+N3DwIPQ1ZD05vA8zJXkwq2/TZ+g8+3iqoH5M
+tnJ1xSAcayVRHxrPCIyQ+GS0Pi7RGylbM3wLniyxo7t+l30wSaEkWZbjtjKTxbVZ
+CdeU4iOvvOO3mweTW7uqk68Anwz1+B+IdbkCDQRecwiKARAAx3AOwyzMhpYvtgNF
+RnLiTaooXEnyZpxp7DtfQ8kJtgq3vQJWxjBU6ZWztdLSK9hAi7pVtStlL1oEWCUb
+LKz7sfdoG/zGTD8EKATVX+A8mpomzy+RkR2Nx4DEyjSCPd3H0/Rr5P9hqcNbBYz0
+/7gzMhGo8WBzMNAKek7WS08CFk6JRrjvNs1vleg00s8paL+IoyP6Dc3NuApVf+//
+3GWviBH9cGzfqk1G5gz8paiaVNAZz8Y1iLrS8hUaLMX6kHYYoNTWS14gSa5RodKG
+bCNVDFP0hoUxALN2kIX/SyFAWGHcu7MpoR1MAPt1VnGh16MQf3gbRQCvoM30QdUM
+8h6yJob/aSSgMqBlMaOjwiHRKt5Fcwhe5QL3LqLjtzn5hfzs5xffSCsg8YTV6Y1D
+XXOSg89rxLLwZsoHSY+YjxZA1hwdSiYGiOaAxZDI/jFD/HFDSeSJekTi1Ohh9HxS
+2S42B91LhPGbJl2eZPOPB5fSF4vaWA+YxisBZhRyOIGEOvf66wClotDVaYs7ePuN
+c1bpiBZKZUo7kIHjHPTX3XjYhg+kHeZTfnCkEDLEabEQkb02/hxndNk0f1rpXTSC
+b+co9gBXMcgbQI1TvJDAAjRu2SYrwN5aKS/SXCylyWs/0fM7TH3VqyKbKdUhrt6x
+gB34naKm4Bxlv2YDtqVmW0VPI9UAEQEAAYkCNgQYAQoAIBYhBATF32xXlk1ZQNe6
+K+jwR95tq92JBQJecwiKAhsMAAoJEOjwR95tq92JIwIQALBHol5uKScxypvscxQW
+bXOiroOc0H58HDzxpV9XLCEHE919hRaU26PK7EpXOsnG851v4v5zXCtQeikULTqn
+OPX2a0k1IRkTShBS1W0SzCtV3U/qaBSHBaDmuXU0kK0ba8g4NWgcabeL5rYXJjnB
+hCJdUJoKP9iSFV2hAf0wJFUO5Nk0kmoMX0h3vKiF91ejuWJUXH5iCxTGIetn7bi0
+WRUOx76GHyjYh9rPgS0TAAS9TT2sopPvSgfgj/Q8hLLvF8PQ5XjbCWuJYpJkH1CO
+MF66iLs7ZfNLbRNXsee4ffDxzcbIprfHGQb4HbUZwvjhii40SPiK9PLu/Cqc6nIx
+xIFa614+smeJlgNuP895xmIxy92hF0CBdzdWFLfEJRmEbjWt6azzdTJw4zAAZ/hS
+2J7LqHx9vLHqEr2HkJNXuCdjJE+cVUVwiS4ISW0RHZCf7iAzpGKNkvhrDpwe75QJ
+VugJmm1TdV4I5Zv6eeELJ8ZXSd/xzx3pTSsmTBXiFMVd6rL6SwLJcShU8iZZ/kY2
+374F6C5FC3wPeQMeQYE6Bh0xlmR7io216Ex+z3GUaJltw9Njh/Xr1YNsHS9PC8ib
+Eg758rpH8qTwgyn24MgQoCqnNnTKYtxp4Jv3U4hNqzUw3XHRw50Ql3UVyA1Ha/if
+76eUv1NgGzx/5IDkZAhl3Ryu
+=O2O4
+-----END PGP PUBLIC KEY BLOCK-----
+
diff --git a/LICENSE b/LICENSE
index 9a119a7..e968ba3 100644
--- a/LICENSE
+++ b/LICENSE
@@ -186,7 +186,7 @@
       same "printed page" as the copyright notice for easier
       identification within third-party archives.
 
-   Copyright [yyyy] [name of copyright owner]
+   Copyright 2015-2020  Apache Software Foundation
 
    Licensed under the Apache License, Version 2.0 (the "License");
    you may not use this file except in compliance with the License.
@@ -200,12 +200,13 @@
    See the License for the specific language governing permissions and
    limitations under the License.
 ============================================================================
-The SINGA project contains subcomponents with separate copyright
-notices and license terms. Your use of the source code for the these
-subcomponents is subject to the terms and conditions of the following
+
+The SINGA project contains subcomponents and dependencies with separate 
+copyright notices and license terms. Your use of the source code for the 
+these subcomponents is subject to the terms and conditions of the following
 licenses.
 
-=====================================================================
+============================================================================
 SINGA bundles the following under BSD 2-clause license:
 include/singa/utils/tinydir.h
 
@@ -304,6 +305,7 @@
 is owned by The United States Government, and operated by UChicago Argonne, LLC
 under provision of a contract with the Department of Energy.
 
+
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
@@ -389,7 +391,7 @@
 =====================================================================
 SINGA bundles the following under New BSD license: src/api/numpy.i
 
-Copyright (c) 2005-2016, NumPy Developers.
+Copyright (c) 2005-2020, NumPy Developers.
 All rights reserved.
 
 Redistribution and use in source and binary forms, with or without
@@ -419,3 +421,140 @@
 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+=====================================================================
+SINGA bundles the following dependency under Apache License v2.0:
+oneDNN
+
+Copyright 2016-2019 Intel Corporation
+   Copyright 2018 YANDEX LLC
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+   This distribution includes third party software ("third party programs").
+   This third party software, even if included with the distribution of
+   the Intel software, may be governed by separate license terms, including
+   without limitation, third party license terms, other Intel software license
+   terms, and open source software license terms. These separate license terms
+   govern your use of the third party programs as set forth in the
+   "THIRD-PARTY-PROGRAMS" file.
+
+=====================================================================
+SINGA bundles the following under New BSD license:
+cmake/Thirdparty/GetGitRevisionDescription.cmake
+cmake/Thirdparty/GetGitRevisionDescription.cmake.in
+
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+=======================================================================
+SINGA bundles the following file and dependency under MIT license:
+examples/onnx/test_onnx_backend.py
+ONNX
+
+MIT License
+
+Copyright (c) ONNX Project Contributors
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+===========================================================================
+SINGA bundles the following under Apache License Version 2.0:
+examples/onnx/tokenization.py
+examples/onnx/run_onnx_squad.py
+examples/onnx/inputs.json
+examples/onnx/gpt2/requirements.txt
+examples/onnx/ro_bert_a/requirements.txt
+
+# Copyright 2018 The Google AI Language Team Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+===============================================================================
+SINGA bundles the following under new BSD 3-clause license:
+tool/docker/devel/centos6/cuda10/cuda.repo
+
+Copyright (c) 2019,2020 NVIDIA CORPORATION. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+ * Neither the name of NVIDIA CORPORATION nor the names of its
+   contributors may be used to endorse or promote products derived
+   from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README.md b/README.md
index a2788e7..6856881 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,33 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with < this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+![Logo](doc/_static/singa.png)
+
 # Apache SINGA
 
-[![Build Status](https://travis-ci.org/apache/incubator-singa.png)](https://travis-ci.org/apache/incubator-singa)
+![Native Ubuntu build status](https://github.com/apache/singa/workflows/Native-Ubuntu/badge.svg)
+![Native Mac build status](https://github.com/apache/singa/workflows/Native-MacOS/badge.svg)
+![conda build status](https://github.com/apache/singa/workflows/conda/badge.svg)
+[![Documentation Status](https://readthedocs.org/projects/apache-singa/badge/?version=latest)](https://apache-singa.readthedocs.io/en/latest/?badge=latest)
 ![License](http://img.shields.io/:license-Apache%202.0-blue.svg)
+[![Follow Apache SINGA on Twitter](https://img.shields.io/twitter/follow/apachesinga.svg?style=social&label=Follow)](https://twitter.com/ApacheSinga)
+[![Docker pulls](https://img.shields.io/docker/pulls/apache/singa.svg)](https://hub.docker.com/r/apache/singa/)
 
 Distributed deep learning system
 
@@ -9,14 +35,22 @@
 
 ## Quick Start
 
-* [Installation](doc/en/docs/installation.md)
+* [Installation](http://singa.apache.org/docs/installation/)
 * [Examples](examples)
 
 ## Issues
 
 * [JIRA tickets](https://issues.apache.org/jira/browse/SINGA)
 
+## Code Analysis:
+
+![LGTM C++ Grade](https://img.shields.io/lgtm/grade/cpp/github/apache/singa)
+![LGTM Python Grade](https://img.shields.io/lgtm/grade/python/github/apache/singa)
+[![codecov](https://codecov.io/gh/apache/singa/branch/master/graph/badge.svg)](https://codecov.io/gh/apache/singa)
+
+[![Stargazers over time](https://starchart.cc/apache/singa.svg)](https://starchart.cc/apache/singa)
+
 ## Mailing Lists
 
-* [Development Mailing List](mailto:dev-subscribe@singa.incubator.apache.org) ([Archive](http://mail-archives.apache.org/mod_mbox/singa-dev/))
-* [Commits Mailing List](mailto:commits-subscribe@singa.incubator.apache.org) ([Archive](http://mail-archives.apache.org/mod_mbox/singa-commits/))
+* [Development Mailing List](mailto:dev-subscribe@singa.apache.org) ([Archive](http://mail-archives.apache.org/mod_mbox/singa-dev/))
+* [Commits Mailing List](mailto:commits-subscribe@singa.apache.org) ([Archive](http://mail-archives.apache.org/mod_mbox/singa-commits/))
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 43d4004..f7852ee 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -1,3 +1,231 @@
+Release Notes - SINGA - Version singa-3.1.0
+
+SINGA is a distributed deep learning library.
+
+This release includes following changes:
+
+  * Tensor core:
+    * Support tensor transformation (reshape, transpose) for tensors up to 6 dimensions.
+    * Implement traverse_unary_transform in Cuda backend, which is similar to CPP backend one.
+
+  * Add new tensor operators into the autograd module, including
+    CosSim, DepthToSpace, Embedding, Erf, Expand, Floor, Pad, Round, Rounde, SpaceToDepth, UpSample, Where. 
+    The corresponding ONNX operators are thus supported by SINGA.
+
+  * Add Embedding and Gemm into the layer module.
+
+  * Add SGD operators to opt module, including RMSProp, Adam, and AdaGrad.
+
+  * Extend the sonnx module to support 
+    DenseNet121, ShuffleNetv1, ShuffleNetv2, SqueezeNet, VGG19, GPT2, and RoBERTa,
+
+  * Reconstruct sonnx to 
+    * Support creating operators from both layer and autograd.
+    * Re-write SingaRep to provide a more powerful intermediate representation of SINGA.
+    * Add a SONNXModel which implements from Model to provide uniform API and features.
+
+  * Add one example that trains a BiLSTM model over the InsuranceQA data.
+
+  * Replace the Travis CI with Github workflow. Add quality and coverage management.
+
+  * Add compiling and packaging scripts to creat wheel packages for distribution.
+
+  * Fix bugs
+    * Fix IMDB LSTM model example training script.
+    * Fix Tensor operation Mult on Broadcasting use cases.
+    * Gaussian function on Tensor now can run on Tensor with odd size.
+    * Updated a testing helper function gradients() in autograd to lookup param gradient by param python object id for testing purpose.
+
+
+----------------------------------------------------------------------------------------------
+
+Release Notes - SINGA - Version singa-3.0.0
+
+SINGA is a distributed deep learning library.
+
+This release includes following changes:
+
+  * Code quality has been promoted by introducing linting check in CI and auto code formatter. 
+    For linting, the tools, `cpplint` and `pylint`, are used and configured to comply 
+    [google coding styles](http://google.github.io/styleguide/)  details in `tool/linting/`. 
+    Similarly, formatting tools, `clang-format` and `yapf` configured with google coding styles, 
+    are the recommended one for developers to clean code before submitting changes, 
+    details in `tool/code-format/`. [LGTM](https://lgtm.com) is enabled on Github for 
+    code quality check; License check is also enabled.
+
+ * New Tensor APIs are added for naming consistency, and feature enhancement: 
+   - size(), mem_size(), get_value(), to_proto(), l1(), l2(): added for the sake of naming consistency
+   - AsType(): convert data type between `float` and `int`
+   - ceil(): perform element-wise ceiling of the input
+   - concat(): concatenate two tensor
+   - index selector: e.g. tensor1[:,:,1:,1:]
+   - softmax(in, axis): allow to perform softmax on a axis on a multi-dimensional tensor
+
+  * 14 new operators are added into the autograd module: Gemm, GlobalAveragePool, ConstantOfShape, 
+    Dropout, ReduceSum, ReduceMean, Slice, Ceil, Split, Gather, Tile, NonZero, Cast, OneHot. 
+    Their unit tests are added as well.
+
+  * 14 new operators are added to sonnx module for both backend and frontend: 
+    [Gemm](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Gemm), 
+    [GlobalAveragePool](https://github.com/onnx/onnx/blob/master/docs/Operators.md#GlobalAveragePool), 
+    [ConstantOfShape](https://github.com/onnx/onnx/blob/master/docs/Operators.md#ConstantOfShape), 
+    [Dropout](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Dropout), 
+    [ReduceSum](https://github.com/onnx/onnx/blob/master/docs/Operators.md#ReduceSum), 
+    [ReduceMean](https://github.com/onnx/onnx/blob/master/docs/Operators.md#ReduceMean), 
+    [Slice](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Slice), 
+    [Ceil](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Ceil), 
+    [Split](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Split), 
+    [Gather](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Gather), 
+    [Tile](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Tile), 
+    [NonZero](https://github.com/onnx/onnx/blob/master/docs/Operators.md#NonZero), 
+    [Cast](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Cast), 
+    [OneHot](https://github.com/onnx/onnx/blob/master/docs/Operators.md#OneHot). 
+    Their tests are added as well.
+
+  * Some ONNX models are imported into SINGA, including 
+    [Bert-squad](https://github.com/onnx/models/tree/master/text/machine_comprehension/bert-squad), 
+    [Arcface](https://github.com/onnx/models/tree/master/vision/body_analysis/arcface), 
+    [FER+ Emotion](https://github.com/onnx/models/tree/master/vision/body_analysis/emotion_ferplus), 
+    [MobileNet](https://github.com/onnx/models/tree/master/vision/classification/mobilenet), 
+    [ResNet18](https://github.com/onnx/models/tree/master/vision/classification/resnet), 
+    [Tiny Yolov2](https://github.com/onnx/models/tree/master/vision/object_detection_segmentation/tiny_yolov2), 
+    [Vgg16](https://github.com/onnx/models/tree/master/vision/classification/vgg), and Mnist.
+
+  * Some operators now support [multidirectional broadcasting](https://github.com/onnx/onnx/blob/master/docs/Broadcasting.md#multidirectional-broadcasting), 
+    including Add, Sub, Mul, Div, Pow, PRelu, Gemm 
+
+  * [Distributed training with communication optimization]. [DistOpt](./python/singa/opt.py) 
+    has implemented multiple optimization techniques, including gradient sparsification, 
+    chunk transmission, and gradient compression.
+
+  * Computational graph construction at the CPP level. The operations submitted to the Device are buffered.
+    After analyzing the dependency, the computational graph is created, which is further analyzed for
+    speed and memory optimization. To enable this feature, use the [Module API](./python/singa/module.py).
+
+  * New website based on Docusaurus. The documentation files are moved to a separate repo [singa-doc](https://github.com/apache/singa-doc).
+    The static website files are stored at [singa-site](https://github.com/apache/singa-site).
+
+  * DNNL([Deep Neural Network Library](https://github.com/intel/mkl-dnn)), powered by Intel, 
+    is integrated into `model/operations/[batchnorm|pooling|convolution]`, 
+    the changes is opaque to the end users. The current version is dnnl v1.1 
+    which replaced previous integration of mkl-dnn v0.18. The framework could 
+    boost the performance of dl operations when executing on CPU. The dnnl dependency 
+    is installed through conda.
+
+  * Some Tensor APIs are marked as deprecated which could be replaced by broadcast, 
+    and it can support better on multi-dimensional operations. These APIs are
+    add_column(), add_row(), div_column(), div_row(), mult_column(), mult_row()
+
+  * Conv and Pooling are enhanced to support fine-grained padding like (2,3,2,3), 
+    and [SAME_UPPER, SAME_LOWER](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv) 
+    pad mode and shape checking.
+
+  * Reconstruct soonx, 
+    - Support two types of weight value (Initializer and Constant Node); 
+    - For some operators (BatchNorm, Reshape, Clip, Slice, Gather, Tile, OneHot), 
+      move some inputs to its attributes; 
+    - Define and implement the type conversion map. 
+
+------------------------------------------------------------------------
+Release Notes - SINGA - Version singa-incubating-2.0.0
+
+SINGA is a general distributed deep learning platform for training big deep
+learning models over large datasets.
+
+This release includes following features:
+
+  * Core components
+    * [SINGA-434] Support tensor broadcasting
+    * [SINGA-370] Improvement to tensor reshape and various misc. changes related to SINGA-341 and 351
+
+  * Model components
+    * [SINGA-333] Add support for Open Neural Network Exchange (ONNX) format
+    * [SINGA-385] Add new python module for optimizers
+    * [SINGA-394] Improve the CPP operations via Intel MKL DNN lib
+    * [SINGA-425] Add 3 operators , Abs(), Exp() and leakyrelu(), for Autograd 
+    * [SINGA-410] Add two function, set_params() and get_params(), for Autograd Layer class
+    * [SINGA-383] Add Separable Convolution for autograd
+    * [SINGA-388] Develop some RNN layers by calling tiny operations like matmul, addbias.
+    * [SINGA-382] Implement concat operation for autograd    
+    * [SINGA-378] Implement maxpooling operation and its related functions for autograd
+    * [SINGA-379] Implement batchnorm operation and its related functions for autograd
+
+  * Utility functions and CI
+    * [SINGA-432] Update depdent lib versions in conda-build config
+    * [SINGA-429] Update docker images for latest cuda and cudnn
+    * [SINGA-428] Move Docker images under Apache user name
+
+  * Documentation and usability
+    * [SINGA-395] Add documentation for autograd APIs
+    * [SINGA-344] Add a GAN example
+    * [SINGA-390] Update installation.md
+    * [SINGA-384] Implement ResNet using autograd API
+    * [SINGA-352] Complete SINGA documentation in Chinese version
+      
+  * Bugs fixed
+    * [SINGA-431] Unit Test failed - Tensor Transpose
+    * [SINGA-422] ModuleNotFoundError: No module named "_singa_wrap"
+    * [SINGA-418] Unsupportive type 'long' in python3.  
+    * [SINGA-409] Basic `singa-cpu` import throws error
+    * [SINGA-408] Unsupportive function definition in python3
+    * [SINGA-380] Fix bugs from Reshape  
+
+---------------------------------------------------------------
+Release Notes - SINGA - Version singa-incubating-1.2.0
+
+SINGA is a general distributed deep learning platform for training big deep
+learning models over large datasets.
+
+This release includes following features:
+
+  * Core components
+      * [SINGA-290] Upgrade to Python 3
+      * [SINGA-341] Added stride functionality to tensors for CPP
+      * [SINGA-347] Create a function that supports einsum
+      * [SINGA-351] Added stride support and cudnn codes to cuda
+
+  * Model components
+      * [SINGA-300] Add residual networks for imagenet classification
+      * [SINGA-312] Rename layer parameters
+      * [SINGA-313] Add L2 norm layer
+      * [SINGA-315] Reduce memory footprint by Python generator for parameter
+      * [SINGA-316] Add SigmoidCrossEntropy
+      * [SINGA-324] Extend RNN layer to accept variant seq length across batches
+      * [SINGA-326] Add Inception V4 for ImageNet classification
+      * [SINGA-328] Add VGG models for ImageNet classification
+      * [SINGA-329] Support layer freezing during training (fine-tuning)
+      * [SINGA-346] Update cudnn from V5 to V7
+      * [SINGA-349] Create layer operations for autograd
+      * [SINGA-363] Add DenseNet for Imagenet classification
+
+  * Utility functions and CI
+      * [SINGA-274] Improve Debian packaging with CPack
+      * [SINGA-303] Create conda packages
+      * [SINGA-337] Add test cases for code
+      * [SINGA-348] Support autograd MLP Example
+      * [SINGA-345] Update Jenkins and fix bugs in compliation
+      * [SINGA-354] Update travis scripts to use conda-build for all platforms
+      * [SINGA-358] Consolidated RUN steps and cleaned caches in Docker containers
+      * [SINGA-359] Create alias for conda packages
+
+  * Documentation and usability
+      * [SINGA-223] Fix side navigation menu in the website
+      * [SINGA-294] Add instructions to run CUDA unit tests on Windows
+      * [SINGA-305] Add jupyter notebooks for SINGA V1 tutorial
+      * [SINGA-319] Fix link errors on the index page
+      * [SINGA-352] Complete SINGA documentation in Chinese version
+      * [SINGA-361] Add git instructions for contributors and committers
+
+  * Bugs fixed
+      * [SINGA-330] fix openblas building on i7 7700k
+      * [SINGA-331] Fix the bug of tensor division operation
+      * [SINGA-350] Error from python3 test
+      * [SINGA-356] Error using travis tool to build SINGA on mac os
+      * [SINGA-363] Fix some bugs in imagenet examples
+      * [SINGA-368] Fix the bug in Cifar10 examples
+      * [SINGA-369] the errors of examples in testing
+
+---------------------------------------------------------------
 Release Notes - SINGA - Version singa-incubating-1.1.0
 
 SINGA is a general distributed deep learning platform for training big deep learning models over large datasets.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..4274f3c
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,28 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with < this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+# Security Policy
+
+## Reporting a Vulnerability
+
+Users can report security vulnerabilities to [SINGA Security Team Mail List](mailto:security@singa.apache.org)
+
+For more information, please visit:
+
+http://singa.apache.org/security
diff --git a/cmake/Cuda.cmake b/cmake/Cuda.cmake
index 35109aa..dc553a5 100644
--- a/cmake/Cuda.cmake
+++ b/cmake/Cuda.cmake
@@ -26,15 +26,15 @@
 SET(HAVE_CUDA TRUE)
 MESSAGE(STATUS "Found cuda_v${CUDA_VERSION}")
 #ADD_DEFINITIONS(-DUSE_CUDA)
-#message(STATUS "linking: ${CUDA_CUDART_LIBRARY} ${CUDA_curand_LIBRARY} ${CUDA_CUBLAS_LIBRARIES}")
+#message(STATUS "linking: ${CUDA_CUDART_LIBRARY} ${CUDA_curand_LIBRARY} ${CUDA_cusparse_LIBRARY} ${CUDA_CUBLAS_LIBRARIES}")
 
 IF(USE_CUDNN)
 #include(cmake/Modules/Cudnn.cmake)
     FIND_PACKAGE(CUDNN REQUIRED)
-    INCLUDE_DIRECTORIES(SYSTEM ${CUDNN_INCLUDE_DIR})
+    INCLUDE_DIRECTORIES( ${CUDNN_INCLUDE_DIR})
     LIST(APPEND SINGA_LINKER_LIBS ${CUDNN_LIBRARIES})
 ENDIF()
 
-INCLUDE_DIRECTORIES(SYSTEM ${CUDA_INCLUDE_DIRS})
-LIST(APPEND SINGA_LINKER_LIBS ${CUDA_CUDART_LIBRARY} ${CUDA_curand_LIBRARY} ${CUDA_CUBLAS_LIBRARIES})
+INCLUDE_DIRECTORIES( ${CUDA_INCLUDE_DIRS})
+LIST(APPEND SINGA_LINKER_LIBS ${CUDA_CUDART_LIBRARY} ${CUDA_curand_LIBRARY} ${CUDA_cusparse_LIBRARY} ${CUDA_CUBLAS_LIBRARIES})
 #MESSAGE(STATUS "libs " ${SINGA_LINKER_LIBS})
diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
index 83c5187..a4257a6 100644
--- a/cmake/Dependencies.cmake
+++ b/cmake/Dependencies.cmake
@@ -41,22 +41,22 @@
     SET(PROTOBUF_LIBRARY "${CMAKE_BINARY_DIR}/lib/libprotobuf.a")
     SET(PROTOBUF_PROTOC_LIBRARY "${CMAKE_BINARY_DIR}/lib/libprotobuf.a")
     SET(PROTOBUF_PROTOC_EXECUTABLE "${CMAKE_BINARY_DIR}/bin/protoc")
-    INCLUDE_DIRECTORIES(SYSTEM ${PROTOBUF_INCLUDE_DIR})
+    INCLUDE_DIRECTORIES( ${PROTOBUF_INCLUDE_DIR})
     LIST(APPEND SINGA_LINKER_LIBS ${PROTOBUF_LIBRARY})
     #IF(USE_CBLAS)
     SET(CBLAS_INCLUDE_DIR "${CMAKE_BINARY_DIR}/include")
     SET(CBLAS_LIBRARIES "${CMAKE_BINARY_DIR}/lib/libopenblas.a")
-    INCLUDE_DIRECTORIES(SYSTEM ${CBLAS_INCLUDE_DIR})
+    INCLUDE_DIRECTORIES( ${CBLAS_INCLUDE_DIR})
     LIST(APPEND SINGA_LINKER_LIBS ${CBLAS_LIBRARIES})
     #ENDIF()
     #ENDIF()
 ELSE()
-    FIND_PACKAGE( Protobuf REQUIRED )
+    FIND_PACKAGE( Protobuf 3.0 REQUIRED )
     #MESSAGE(STATUS "proto libs " ${PROTOBUF_LIBRARY})
     LIST(APPEND SINGA_LINKER_LIBS ${PROTOBUF_LIBRARY})
     #IF(USE_CBLAS)
     FIND_PACKAGE(CBLAS REQUIRED)
-    INCLUDE_DIRECTORIES(SYSTEM ${CBLAS_INCLUDE_DIR})
+    INCLUDE_DIRECTORIES( ${CBLAS_INCLUDE_DIR})
     LIST(APPEND SINGA_LINKER_LIBS ${CBLAS_LIBRARIES})
     #MESSAGE(STATUS "Found cblas at ${CBLAS_LIBRARIES}")
     #ENDIF()
@@ -76,7 +76,7 @@
 
 IF(USE_LMDB)
     FIND_PACKAGE(LMDB REQUIRED)
-    INCLUDE_DIRECTORIES(SYSTEM ${LMDB_INCLUDE_DIR})
+    INCLUDE_DIRECTORIES( ${LMDB_INCLUDE_DIR})
     LIST(APPEND SINGA_LINKER_LIBS ${LMDB_LIBRARIES})
     #MESSAGE(STATUS "FOUND lmdb at ${LMDB_INCLUDE_DIR}")
 ENDIF()
@@ -95,14 +95,14 @@
     IF(NOT OPENCL_FOUND)
         MESSAGE(SEND_ERROR "OpenCL was requested, but not found.")
     ELSE()
-        INCLUDE_DIRECTORIES(SYSTEM ${OPENCL_INCLUDE_DIR})
+        INCLUDE_DIRECTORIES( ${OPENCL_INCLUDE_DIR})
         LIST(APPEND SINGA_LINKER_LIBS ${OPENCL_LIBRARIES})
         FIND_PACKAGE(ViennaCL REQUIRED)
         IF(NOT ViennaCL_FOUND)
             MESSAGE(SEND_ERROR "ViennaCL is required if OpenCL is enabled.")
         ELSE()
             #MESSAGE(STATUS "Found ViennaCL headers at ${ViennaCL_INCLUDE_DIR}")
-            INCLUDE_DIRECTORIES(SYSTEM ${ViennaCL_INCLUDE_DIR})
+            INCLUDE_DIRECTORIES( ${ViennaCL_INCLUDE_DIR})
             LIST(APPEND SINGA_LINKER_LIBS ${ViennaCL_LIBRARIES})
         ENDIF()
     ENDIF()
@@ -116,7 +116,7 @@
 IF(USE_OPENCV)
     FIND_PACKAGE(OpenCV REQUIRED)
     MESSAGE(STATUS "Found OpenCV_${OpenCV_VERSION} at ${OpenCV_INCLUDE_DIRS}")
-    INCLUDE_DIRECTORIES(SYSTEM ${OpenCV_INCLUDE_DIRS})
+    INCLUDE_DIRECTORIES( ${OpenCV_INCLUDE_DIRS})
     LIST(APPEND SINGA_LINKER_LIBS ${OpenCV_LIBRARIES})
 ENDIF()
 
@@ -124,9 +124,16 @@
 #MESSAGE(STATUS "link lib : " ${SINGA_LINKER_LIBS})
 
 IF(USE_PYTHON)
-    FIND_PACKAGE(PythonLibs 2.7 REQUIRED)
-    FIND_PACKAGE(PythonInterp 2.7 REQUIRED)
-    FIND_PACKAGE(SWIG 3.0 REQUIRED)
+    IF(USE_PYTHON3)
+        set(Python_ADDITIONAL_VERSIONS 3.6 3.5 3.4)        
+        FIND_PACKAGE(PythonInterp 3 REQUIRED)
+        FIND_PACKAGE(PythonLibs 3 REQUIRED)
+	    FIND_PACKAGE(SWIG 3.0.10 REQUIRED)
+    ELSE()        
+        FIND_PACKAGE(PythonInterp 2.7 REQUIRED)
+        FIND_PACKAGE(PythonLibs 2.7 REQUIRED)
+	    FIND_PACKAGE(SWIG 3.0.8 REQUIRED)
+    ENDIF()
 ENDIF()
 
 IF(USE_JAVA)
@@ -134,3 +141,29 @@
     FIND_PACKAGE(JNI REQUIRED)
     FIND_PACKAGE(SWIG 3.0 REQUIRED)
 ENDIF()
+
+IF(USE_DNNL)
+    FIND_PATH(DNNL_INCLUDE_DIR NAME "dnnl.hpp" PATHS "$ENV{DNNL_ROOT}/include")
+    FIND_LIBRARY(DNNL_LIBRARIES NAME "dnnl" PATHS "$ENV{DNNL_ROOT}/lib")
+    MESSAGE(STATUS "Found DNNL at ${DNNL_INCLUDE_DIR}")
+    INCLUDE_DIRECTORIES(${DNNL_INCLUDE_DIR})
+    LIST(APPEND SINGA_LINKER_LIBS ${DNNL_LIBRARIES})
+ENDIF()
+
+IF(USE_DIST)
+    FIND_PATH(MPI_INCLUDE_DIR NAME "mpi.h" PATHS "$ENV{HOME}/mpich-3.3.2/build/include/")
+    FIND_LIBRARY(MPI_LIBRARIES NAME "mpi" PATHS "$ENV{HOME}/mpich-3.3.2/build/lib")
+    FIND_LIBRARY(MPICXX_LIBRARIES NAME "mpicxx" PATHS "$ENV{HOME}/mpich-3.3.2/build/lib")
+    MESSAGE(STATUS "Found MPI at ${MPI_INCLUDE_DIR}")
+    INCLUDE_DIRECTORIES(${MPI_INCLUDE_DIR})
+    LIST(APPEND SINGA_LINKER_LIBS ${MPI_LIBRARIES})
+    LIST(APPEND SINGA_LINKER_LIBS ${MPICXX_LIBRARIES})
+    MESSAGE(STATUS "Found MPI lib at ${MPI_LIBRARIES}")
+    MESSAGE(STATUS "Found all lib at ${SINGA_LINKER_LIBS}")
+    FIND_PATH(NCCL_INCLUDE_DIR NAME "nccl.h" PATHS "/usr/include/")
+    FIND_LIBRARY(NCCL_LIBRARIES NAME "nccl" PATHS "/usr/lib/x86_64-linux-gnu/")
+    MESSAGE(STATUS "Found NCCL at ${NCCL_INCLUDE_DIR}")
+    INCLUDE_DIRECTORIES(${NCCL_INCLUDE_DIR})
+    LIST(APPEND SINGA_LINKER_LIBS ${NCCL_LIBRARIES})
+    MESSAGE(STATUS "Found NCCL lib at ${NCCL_LIBRARIES}")
+ENDIF()
diff --git a/cmake/Templates/singa_config.h.in b/cmake/Templates/singa_config.h.in
index e35230c..c2865e1 100644
--- a/cmake/Templates/singa_config.h.in
+++ b/cmake/Templates/singa_config.h.in
@@ -36,6 +36,8 @@
 
 #cmakedefine ENABLE_DIST
 
+#cmakedefine USE_DIST
+
 // lmdb
 #cmakedefine USE_LMDB
 
@@ -51,3 +53,5 @@
 // #cmakedefine CUDNN_MINOR_VERSION @CUDNN_MINOR_VERSION@
 // #cmakedefine CUDNN_PATCH_VERSION @CUDNN_PATCH_VERSION@
 // #cmakedefine CUDNN_VERSION @CUDNN_VERSION@
+
+#cmakedefine USE_DNNL
diff --git a/cmake/Thirdparty/FindCBLAS.cmake b/cmake/Thirdparty/FindCBLAS.cmake
index 76c9118..acc78e2 100644
--- a/cmake/Thirdparty/FindCBLAS.cmake
+++ b/cmake/Thirdparty/FindCBLAS.cmake
@@ -17,13 +17,12 @@
 # 
 
 
-FIND_PATH(CBLAS_INCLUDE_DIR NAMES cblas.h PATHS "$ENV{CBLAS_DIR}/include")
-FIND_LIBRARY(CBLAS_LIBRARIES NAMES openblas PATHS "$ENV{CBLAS_DIR}/lib")
+FIND_PATH(CBLAS_INCLUDE_DIR NAMES cblas.h PATHS "$ENV{CMAKE_INCLUDE_PATH}")
+FIND_LIBRARY(CBLAS_LIBRARIES NAMES openblas PATHS "$ENV{CMAKE_LIBRARY_PATH}")
 
 INCLUDE(FindPackageHandleStandardArgs)
 find_package_handle_standard_args(CBLAS DEFAULT_MSG CBLAS_INCLUDE_DIR CBLAS_LIBRARIES)
 
 IF(CBLAS_FOUND)
-    #    MESSAGE(STATUS "Found cblas at ${CBLAS_INCLUDE_DIR}")
     MARK_AS_ADVANCED(CBLAS_INCLUDE_DIR CBLAS_LIBRARIES)
 ENDIF()
diff --git a/cmake/Thirdparty/FindGlog.cmake b/cmake/Thirdparty/FindGlog.cmake
index e18c602..35caefd 100644
--- a/cmake/Thirdparty/FindGlog.cmake
+++ b/cmake/Thirdparty/FindGlog.cmake
@@ -21,7 +21,7 @@
 FIND_LIBRARY(GLOG_LIBRARIES NAMES glog)
 
 INCLUDE(FindPackageHandleStandardArgs)
-find_package_handle_standard_args(GLOG DEFAULT_MSG GLOG_INCLUDE_DIR GLOG_LIBRARIES)
+find_package_handle_standard_args(Glog DEFAULT_MSG GLOG_INCLUDE_DIR GLOG_LIBRARIES)
 
 IF(GLOG_FOUND)
     #    MESSAGE(STATUS "Found glog at ${GLOG_INCLUDE_DIR}")
diff --git a/cmake/Thirdparty/GetGitRevisionDescription.cmake b/cmake/Thirdparty/GetGitRevisionDescription.cmake
new file mode 100644
index 0000000..142e135
--- /dev/null
+++ b/cmake/Thirdparty/GetGitRevisionDescription.cmake
@@ -0,0 +1,195 @@
+# Boost Software License - Version 1.0 - August 17th, 2003
+#
+# Permission is hereby granted, free of charge, to any person or organization
+# obtaining a copy of the software and accompanying documentation covered by
+# this license (the "Software") to use, reproduce, display, distribute,
+# execute, and transmit the Software, and to prepare derivative works of the
+# Software, and to permit third-parties to whom the Software is furnished to
+# do so, all subject to the following:
+#
+# The copyright notices in the Software and this entire statement, including
+# the above license grant, this restriction and the following disclaimer,
+# must be included in all copies of the Software, in whole or in part, and
+# all derivative works of the Software, unless such copies or derivative
+# works are solely in the form of machine-executable object code generated by
+# a source language processor.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+# SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+# FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+
+# - Returns a version string from Git
+#
+# These functions force a re-configure on each git commit so that you can
+# trust the values of the variables in your build system.
+#
+#  get_git_head_revision(<refspecvar> <hashvar> [<additional arguments to git describe> ...])
+#
+# Returns the refspec and sha hash of the current head revision
+#
+#  git_describe(<var> [<additional arguments to git describe> ...])
+#
+# Returns the results of git describe on the source tree, and adjusting
+# the output so that it tests false if an error occurs.
+#
+#  git_get_exact_tag(<var> [<additional arguments to git describe> ...])
+#
+# Returns the results of git describe --exact-match on the source tree,
+# and adjusting the output so that it tests false if there was no exact
+# matching tag.
+#
+#  git_local_changes(<var>)
+#
+# Returns either "CLEAN" or "DIRTY" with respect to uncommitted changes.
+# Uses the return code of "git diff-index --quiet HEAD --".
+# Does not regard untracked files.
+#
+# Requires CMake 2.6 or newer (uses the 'function' command)
+#
+# Original Author:
+# 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net>
+# http://academic.cleardefinition.com
+# Iowa State University HCI Graduate Program/VRAC
+#
+# Copyright Iowa State University 2009-2010.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+if(__get_git_revision_description)
+	return()
+endif()
+set(__get_git_revision_description YES)
+
+# We must run the following at "include" time, not at function call time,
+# to find the path to this module rather than the path to a calling list file
+get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH)
+
+function(get_git_head_revision _refspecvar _hashvar)
+	set(GIT_PARENT_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+	set(GIT_DIR "${GIT_PARENT_DIR}/.git")
+	while(NOT EXISTS "${GIT_DIR}")	# .git dir not found, search parent directories
+		set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}")
+		get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH)
+		if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT)
+			# We have reached the root directory, we are not in git
+			set(${_refspecvar} "GITDIR-NOTFOUND" PARENT_SCOPE)
+			set(${_hashvar} "GITDIR-NOTFOUND" PARENT_SCOPE)
+			return()
+		endif()
+		set(GIT_DIR "${GIT_PARENT_DIR}/.git")
+	endwhile()
+	# check if this is a submodule
+	if(NOT IS_DIRECTORY ${GIT_DIR})
+		file(READ ${GIT_DIR} submodule)
+		string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" GIT_DIR_RELATIVE ${submodule})
+		get_filename_component(SUBMODULE_DIR ${GIT_DIR} PATH)
+		get_filename_component(GIT_DIR ${SUBMODULE_DIR}/${GIT_DIR_RELATIVE} ABSOLUTE)
+	endif()
+	if(NOT IS_DIRECTORY "${GIT_DIR}")
+		file(READ ${GIT_DIR} worktree)
+ 		string(REGEX REPLACE "gitdir: (.*)worktrees(.*)\n$" "\\1" GIT_DIR ${worktree})
+ 	endif()
+	set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data")
+	if(NOT EXISTS "${GIT_DATA}")
+		file(MAKE_DIRECTORY "${GIT_DATA}")
+	endif()
+
+	if(NOT EXISTS "${GIT_DIR}/HEAD")
+		return()
+	endif()
+	set(HEAD_FILE "${GIT_DATA}/HEAD")
+	configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY)
+
+	configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in"
+		"${GIT_DATA}/grabRef.cmake"
+		@ONLY)
+	include("${GIT_DATA}/grabRef.cmake")
+
+	set(${_refspecvar} "${HEAD_REF}" PARENT_SCOPE)
+	set(${_hashvar} "${HEAD_HASH}" PARENT_SCOPE)
+endfunction()
+
+function(git_describe _var)
+	if(NOT GIT_FOUND)
+		find_package(Git QUIET)
+	endif()
+	get_git_head_revision(refspec hash)
+	if(NOT GIT_FOUND)
+		set(${_var} "GIT-NOTFOUND" PARENT_SCOPE)
+		return()
+	endif()
+	if(NOT hash)
+		set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE)
+		return()
+	endif()
+
+	# TODO sanitize
+	#if((${ARGN}" MATCHES "&&") OR
+	#	(ARGN MATCHES "||") OR
+	#	(ARGN MATCHES "\\;"))
+	#	message("Please report the following error to the project!")
+	#	message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}")
+	#endif()
+
+	#message(STATUS "Arguments to execute_process: ${ARGN}")
+
+	execute_process(COMMAND
+		"${GIT_EXECUTABLE}"
+		describe --abbrev=0
+		WORKING_DIRECTORY
+		"${CMAKE_CURRENT_SOURCE_DIR}"
+		RESULT_VARIABLE
+		res
+		OUTPUT_VARIABLE
+		out
+		ERROR_QUIET
+		OUTPUT_STRIP_TRAILING_WHITESPACE)
+	if(NOT res EQUAL 0)
+		set(out "${out}-${res}-NOTFOUND")
+	endif()
+
+	set(${_var} "${out}" PARENT_SCOPE)
+endfunction()
+
+function(git_get_exact_tag _var)
+	git_describe(out --exact-match ${ARGN})
+	set(${_var} "${out}" PARENT_SCOPE)
+endfunction()
+
+function(git_local_changes _var)
+	if(NOT GIT_FOUND)
+		find_package(Git QUIET)
+	endif()
+	get_git_head_revision(refspec hash)
+	if(NOT GIT_FOUND)
+		set(${_var} "GIT-NOTFOUND" PARENT_SCOPE)
+		return()
+	endif()
+	if(NOT hash)
+		set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE)
+		return()
+	endif()
+
+	execute_process(COMMAND
+		"${GIT_EXECUTABLE}"
+		diff-index --quiet HEAD --
+		WORKING_DIRECTORY
+		"${CMAKE_CURRENT_SOURCE_DIR}"
+		RESULT_VARIABLE
+		res
+		OUTPUT_VARIABLE
+		out
+		ERROR_QUIET
+		OUTPUT_STRIP_TRAILING_WHITESPACE)
+	if(res EQUAL 0)
+		set(${_var} "CLEAN" PARENT_SCOPE)
+	else()
+		set(${_var} "DIRTY" PARENT_SCOPE)
+	endif()
+endfunction()
diff --git a/cmake/Thirdparty/GetGitRevisionDescription.cmake.in b/cmake/Thirdparty/GetGitRevisionDescription.cmake.in
new file mode 100644
index 0000000..7d376e1
--- /dev/null
+++ b/cmake/Thirdparty/GetGitRevisionDescription.cmake.in
@@ -0,0 +1,66 @@
+# Boost Software License - Version 1.0 - August 17th, 2003
+#
+# Permission is hereby granted, free of charge, to any person or organization
+# obtaining a copy of the software and accompanying documentation covered by
+# this license (the "Software") to use, reproduce, display, distribute,
+# execute, and transmit the Software, and to prepare derivative works of the
+# Software, and to permit third-parties to whom the Software is furnished to
+# do so, all subject to the following:
+#
+# The copyright notices in the Software and this entire statement, including
+# the above license grant, this restriction and the following disclaimer,
+# must be included in all copies of the Software, in whole or in part, and
+# all derivative works of the Software, unless such copies or derivative
+# works are solely in the form of machine-executable object code generated by
+# a source language processor.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+# SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+# FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+
+#
+# Internal file for GetGitRevisionDescription.cmake
+#
+# Requires CMake 2.6 or newer (uses the 'function' command)
+#
+# Original Author:
+# 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net>
+# http://academic.cleardefinition.com
+# Iowa State University HCI Graduate Program/VRAC
+#
+# Copyright Iowa State University 2009-2010.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+set(HEAD_HASH)
+
+file(READ "@HEAD_FILE@" HEAD_CONTENTS LIMIT 1024)
+
+string(STRIP "${HEAD_CONTENTS}" HEAD_CONTENTS)
+if(HEAD_CONTENTS MATCHES "ref")
+	# named branch
+	string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}")
+	if(EXISTS "@GIT_DIR@/${HEAD_REF}")
+		configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
+	else()
+		configure_file("@GIT_DIR@/packed-refs" "@GIT_DATA@/packed-refs" COPYONLY)
+		file(READ "@GIT_DATA@/packed-refs" PACKED_REFS)
+		if(${PACKED_REFS} MATCHES "([0-9a-z]*) ${HEAD_REF}")
+			set(HEAD_HASH "${CMAKE_MATCH_1}")
+		endif()
+	endif()
+else()
+	# detached HEAD
+	configure_file("@GIT_DIR@/HEAD" "@GIT_DATA@/head-ref" COPYONLY)
+endif()
+
+if(NOT HEAD_HASH)
+	file(READ "@GIT_DATA@/head-ref" HEAD_HASH LIMIT 1024)
+	string(STRIP "${HEAD_HASH}" HEAD_HASH)
+endif()
diff --git a/doc/README.md b/doc/README.md
deleted file mode 100644
index 3652fa2..0000000
--- a/doc/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-# How to Contribute to Documentation
-
-
-## Website
-This document gives step-by-step instructions for deploying [SINGA website](http://singa.incubator.apache.org).
-
-SINGA website is built by [Sphinx](http://www.sphinx-doc.org) >=1.4.4 from a source tree stored in git: https://github.com/apache/incubator-singa/tree/master/doc.
-
-To install Sphinx:
-
-    $ pip install -U Sphinx
-
-To install the markdown support for Sphinx:
-
-    $ pip install recommonmark
-
-To install the rtd theme:
-
-    $ pip install sphinx_rtd_theme
-
-You can build the website by executing the following command from the doc folder:
-
-    $ ./build.sh html
-
-Committers can update the [SINGA website](http://singa.apache.org/en/index.html) by following these steps:
-
-    $ cd _build
-    $ svn co https://svn.apache.org/repos/asf/incubator/singa/site/trunk
-    $ cp -r html/* trunk
-    # svn add <newly added html files>
-    $ svn commit -m "commit messages" --username  <committer ID> --password <password>
-
-
-## CPP API
-
-To generate docs, run "doxygen" from the doc folder (Doxygen >= 1.8 recommended)
diff --git a/doc/_static/apache.png b/doc/_static/apache.png
index 338169e..714166c 100644
--- a/doc/_static/apache.png
+++ b/doc/_static/apache.png
Binary files differ
diff --git a/doc/_static/images/benchmark.png b/doc/_static/images/benchmark.png
new file mode 100644
index 0000000..711e6b5
--- /dev/null
+++ b/doc/_static/images/benchmark.png
Binary files differ
diff --git a/doc/_static/images/imda2019_1.png b/doc/_static/images/imda2019_1.png
new file mode 100644
index 0000000..cd2a7b7
--- /dev/null
+++ b/doc/_static/images/imda2019_1.png
Binary files differ
diff --git a/doc/_static/images/imda2019_2.png b/doc/_static/images/imda2019_2.png
new file mode 100644
index 0000000..52ff4d2
--- /dev/null
+++ b/doc/_static/images/imda2019_2.png
Binary files differ
diff --git a/doc/_static/singa-logo-updated.ai b/doc/_static/singa-logo-updated.ai
new file mode 100644
index 0000000..d1022e3
--- /dev/null
+++ b/doc/_static/singa-logo-updated.ai
Binary files differ
diff --git a/doc/_static/singa-logo-v3.ai b/doc/_static/singa-logo-v3.ai
new file mode 100644
index 0000000..78217cc
--- /dev/null
+++ b/doc/_static/singa-logo-v3.ai
Binary files differ
diff --git a/doc/_static/singa-logo-v3.png b/doc/_static/singa-logo-v3.png
new file mode 100644
index 0000000..1f030f9
--- /dev/null
+++ b/doc/_static/singa-logo-v3.png
Binary files differ
diff --git a/doc/_static/singa.png b/doc/_static/singa.png
old mode 100755
new mode 100644
index 30be5c1..eb755be
--- a/doc/_static/singa.png
+++ b/doc/_static/singa.png
Binary files differ
diff --git a/doc/_static/style.css b/doc/_static/style.css
index b07bdb1..57ad9fe 100644
--- a/doc/_static/style.css
+++ b/doc/_static/style.css
@@ -1,3 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
 .wy-nav-content {
     max-width: none;
 }
+.wy-nav-side {
+    padding-bottom:12em;
+}
diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html
index 1b07f5b..cd80335 100755
--- a/doc/_templates/layout.html
+++ b/doc/_templates/layout.html
@@ -18,38 +18,76 @@
 {% extends "!layout.html" %}
 
 {% block extrahead %}
-    <link href="{{ pathto("_static/style.css", True) }}" rel="stylesheet" type="text/css">
+<link href="{{ pathto("_static/style.css", True) }}" rel="stylesheet" type="text/css">
+<!--link href="{{ pathto("_static/fontawesome-all.min.css", True) }}" rel="stylesheet" type="text/css"-->
+<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.13/css/all.css"
+    integrity="sha384-DNOHZ68U8hZfKXOrtjWvjxusGo9WQnrNx2sqG0tfsghAvtVlRW3tvkXWZh58N9jp" crossorigin="anonymous">
+<style>
+    .fa:hover {
+        opacity: 0.7;
+    }
+
+    .fab:hover {
+        opacity: 0.7;
+    }
+</style>
 {% endblock %}
 
 {% block footer %}
 
-<div class="rst-versions shift-up" data-toggle="rst-versions" role="note" aria-label="versions">
-  <span class="rst-current-version" data-toggle="rst-current-version">
-    <span class="fa fa-book"> incubator-singa </span>
-    v: {{ version }}
-    <span class="fa fa-caret-down"></span>
-  </span>
-  <div class="rst-other-versions">
-      <dl>
-          <dt>Languages</dt>
-          <dd><a href="{{ pathto('../en/index.html', 1) }}">English</a></dd>
-          <dd><a href="{{ pathto('../zh/index.html', 1) }}">中文</a></dd>
-      </dl>
-      <dl>
-          <dt>Versions</dt>
-          <dd><a href="http://singa.apache.org/v0.3.0/">0.3</a></dd>
-          <dd><a href="http://singa.apache.org/v1.1.0/">1.1</a></dd>
-      </dl>
+<div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+        <span class="fa fa-book"> singa </span>
+        v: {{ version }}
+        <span class="fa fa-caret-down"></span>
+    </span>
+    <div class="rst-other-versions">
+        <dl>
+            <dt>Languages</dt>
+            <dd><a href="{{ pathto('./index.html', 1) }}">English</a></dd>
+            <dd><a href="{{ pathto('./zh/index.html', 1) }}">中文</a></dd>
+        </dl>
+        <dl>
+            <dt>Versions</dt>
+            <dd><a href="http://singa.apache.org/v0.3.0/">0.3</a></dd>
+            <dd><a href="http://singa.apache.org/v1.1.0/">1.1</a></dd>
+        </dl>
 
-  </div>
-  <a href="http://incubator.apache.org/"> <img src= "{{pathto('_static/'+ 'apache.png' , 1) }}" style="background-color:white;"> </a>
+    </div>
+
+    <a href="http://www.apache.org"
+        style="color:lightblue;padding: 5px; font-size: 10px; text-align: center; text-decoration: none; margin: 5px 2px;">Foundation</a>
+    <a href="http://www.apache.org/events/current-event"
+        style="color:lightblue;padding: 5px; font-size: 10px; text-align: center; text-decoration: none; margin: 5px 2px;">Events</a>
+    <a href="http://www.apache.org/foundation/thanks.html"
+        style="color:lightblue;padding: 5px; font-size: 10px; text-align: center; text-decoration: none; margin: 5px 2px;">Thanks</a>
+    <a href="http://www.apache.org/foundation/sponsorship.html"
+        style="color:lightblue;padding: 5px; font-size: 10px;  text-align: center; text-decoration: none; margin: 5px 2px;">Sponsorship</a>
+    <a href="http://www.apache.org/licenses/"
+        style="color:lightblue;padding: 5px; font-size: 10px;  text-align: center; text-decoration: none; margin: 5px 2px;">License</a>
+    <br>
+    <a href="https://github.com/apache/singa" class="fa fa-github"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://aws.amazon.com/marketplace/seller-profile?id=5bcac385-12c4-4802-aec7-351e09b77b4c"
+        class="fab fa-aws"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://hub.docker.com/r/apache/singa/" class="fab fa-docker"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://www.linkedin.com/groups/13550034" class="fa fa-linkedin"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://twitter.com/ApacheSinga" class="fa fa-twitter"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://www.facebook.com/Apache-SINGA-347284219056544/" class="fa fa-facebook"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+    <a href="https://www.researchgate.net/project/Apache-SINGA" class="fab fa-researchgate"
+        style="padding: 10px; font-size: 20px; width: 30px; text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+
 </div>
 
- <a href="https://github.com/apache/incubator-singa">
+<a href="https://github.com/apache/singa">
     <img style="position: absolute; top: 0; right: 0; border: 0; z-index: 10000;"
-        src="https://s3.amazonaws.com/github/ribbons/forkme_right_orange_ff7600.png"
-        alt="Fork me on GitHub">
+        src="https://s3.amazonaws.com/github/ribbons/forkme_right_orange_ff7600.png" alt="Fork me on GitHub">
 </a>
 
 {{ super() }}
-{% endblock %}
+{% endblock %}
\ No newline at end of file
diff --git a/doc/en/docs/loss.rst b/doc/autograd.rst
similarity index 84%
copy from doc/en/docs/loss.rst
copy to doc/autograd.rst
index 18c587a..448bb4f 100644
--- a/doc/en/docs/loss.rst
+++ b/doc/autograd.rst
@@ -1,4 +1,4 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
+.. Licensed to the Apache Software Foundation (ASF) under one
    or more contributor license agreements.  See the NOTICE file
    distributed with this work for additional information
    regarding copyright ownership.  The ASF licenses this file
@@ -16,10 +16,11 @@
    under the License.
 
 
-Loss
-=========
+Autograd
+========
 
 
-.. automodule:: singa.loss
-   :members:
-   :show-inheritance:
+Python API
+----------
+.. automodule:: singa.autograd
+   :members:
\ No newline at end of file
diff --git a/doc/build.sh b/doc/build.sh
deleted file mode 100755
index 44eb1c2..0000000
--- a/doc/build.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-SPHINXBUILD="sphinx-build"
-BUILDDIR="_build"
-LANG_ARR=(en zh)
-
-if [ "$1"x = "clean"x ]; then
-  rm -rf $BUILDDIR/*
-  rm -rf en/docs/examples
-  echo "clean up $BUILDDIR"
-fi
-
-
-if [ "$1"x = "html"x ]; then
-  cp -rf ../examples en/docs/model_zoo
-  cp README.md en/develop/contribute-docs.md
-  for (( i=0; i<${#LANG_ARR[@]}; i++)) do
-    echo "building language ${LANG_ARR[i]} ..."
-    $SPHINXBUILD -b html -c . -d $BUILDDIR/doctree ${LANG_ARR[i]} $BUILDDIR/html/${LANG_ARR[i]}
-  done
-  echo "<script language=\"javascript\" type=\"text/javascript\">window.location.href='en/index.html';</script>" > $BUILDDIR/html/index.html
-fi
diff --git a/doc/conf.py b/doc/conf.py
index 280fae6..dfeea13 100755
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# incubator-singa documentation build configuration file, created by
+# singa documentation build configuration file, created by
 # sphinx-quickstart on Sat Jul  9 20:36:57 2016.
 #
 # Licensed to the Apache Software Foundation (ASF) under one
@@ -19,6 +19,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# Directives for doing spell check only for python comments
+# cSpell:includeRegExp #.*
+# cSpell:includeRegExp ("""|''')[^]*
+
 # This file is execfile()d with the current directory set to its
 # containing dir.
 #
@@ -34,8 +38,11 @@
 #
 import os
 import sys
-#sys.path.insert(0, os.path.abspath('.'))
-#sys.path.insert(1, os.path.abspath('../build/python/'))
+
+# If the user has singa codebase
+# will build document from code comments in these loactions
+sys.path.append(os.path.abspath('../python/'))
+sys.path.append(os.path.abspath('../build/python/'))
 
 # -- General configuration ------------------------------------------------
 from recommonmark.parser import CommonMarkParser
@@ -70,9 +77,9 @@
 master_doc = 'index'
 
 # General information about the project.
-project = u'incubator-singa'
-copyright = u'2017 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.'
-author = u'moaz'
+project = u'singa'
+copyright = u'2020 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.'
+author = u'Apache Software Foundation'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -81,7 +88,7 @@
 # The short X.Y version.
 version = u'latest'
 # The full version, including alpha/beta/rc tags.
-release = u'1.1.0'
+release = u'3.0.0'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
@@ -135,7 +142,6 @@
 # If true, `todo` and `todoList` produce output, else they produce nothing.
 todo_include_todos = False
 
-
 # -- Options for HTML output ----------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
@@ -164,7 +170,7 @@
 # The name of an image file (relative to this directory) to place at the top
 # of the sidebar.
 #
-html_logo = '/singa.png'
+html_logo = '_static/singa.png'
 
 # The name of an image file (relative to this directory) to use as a favicon of
 # the docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
@@ -260,29 +266,28 @@
 # -- Options for LaTeX output ---------------------------------------------
 
 latex_elements = {
-     # The paper size ('letterpaper' or 'a4paper').
-     #
-     # 'papersize': 'letterpaper',
+    # The paper size ('letterpaper' or 'a4paper').
+    #
+    # 'papersize': 'letterpaper',
 
-     # The font size ('10pt', '11pt' or '12pt').
-     #
-     # 'pointsize': '10pt',
+    # The font size ('10pt', '11pt' or '12pt').
+    #
+    # 'pointsize': '10pt',
 
-     # Additional stuff for the LaTeX preamble.
-     #
-     # 'preamble': '',
+    # Additional stuff for the LaTeX preamble.
+    #
+    # 'preamble': '',
 
-     # Latex figure (float) alignment
-     #
-     # 'figure_align': 'htbp',
+    # Latex figure (float) alignment
+    #
+    # 'figure_align': 'htbp',
 }
 
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
-    (master_doc, 'incubator-singa.tex', u'incubator-singa Documentation',
-     u'moaz', 'manual'),
+    (master_doc, 'singa.tex', u'singa Documentation', u'moaz', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
@@ -311,30 +316,24 @@
 #
 # latex_domain_indices = True
 
-
 # -- Options for manual page output ---------------------------------------
 
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
-man_pages = [
-    (master_doc, 'incubator-singa', u'incubator-singa Documentation',
-     [author], 1)
-]
+man_pages = [(master_doc, 'singa', u'singa Documentation', [author], 1)]
 
 # If true, show URL addresses after external links.
 #
 # man_show_urls = False
 
-
 # -- Options for Texinfo output -------------------------------------------
 
 # Grouping the document tree into Texinfo files. List of tuples
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-    (master_doc, 'incubator-singa', u'incubator-singa Documentation',
-     author, 'incubator-singa', 'One line description of project.',
-     'Miscellaneous'),
+    (master_doc, 'singa', u'singa Documentation', author, 'singa',
+     'One line description of project.', 'Miscellaneous'),
 ]
 
 # Documents to append as an appendix to all manuals.
diff --git a/doc/en/docs/data.rst b/doc/data.rst
similarity index 100%
rename from doc/en/docs/data.rst
rename to doc/data.rst
diff --git a/doc/en/docs/device.rst b/doc/device.rst
similarity index 98%
rename from doc/en/docs/device.rst
rename to doc/device.rst
index 57993f9..400907d 100644
--- a/doc/en/docs/device.rst
+++ b/doc/device.rst
@@ -48,7 +48,3 @@
    host = device.get_default_device()  # get the default host device (a CppCPU)
    ary1 = device.create_cuda_gpus(2)  # create 2 devices, starting from ID 0
    ary2 = device.create_cuda_gpus([0,2])  # create 2 devices on ID 0 and 2
-
-
-CPP API
----------
diff --git a/doc/en/_templates/layout.html b/doc/en/_templates/layout.html
deleted file mode 100755
index 2f9ca0d..0000000
--- a/doc/en/_templates/layout.html
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-#}
-{% extends "!layout.html" %}
-
-{% block extrahead %}
-    <link href="{{ pathto("_static/style.css", True) }}" rel="stylesheet" type="text/css">
-{% endblock %}
-
-{% block footer %}
-
-<div class="rst-versions shift-up" data-toggle="rst-versions" role="note" aria-label="versions">
-<a href="http://incubator.apache.org/">
-<img src= "{{pathto('_static/'+ 'apache.jpg' , 1) }}">
-</a>
-
-  <span class="rst-current-version" data-toggle="rst-current-version">
-    <span class="fa fa-book"> incubator-singa </span>
-    v: {{ version }}
-    <span class="fa fa-caret-down"></span>
-  </span>
-  <div class="rst-other-versions">
-    <dl>
-       <dd><a href="">English</a></dd>
-       <dd><a href="{{pathto('zh/'+ 'index.html' , 1) }}">中文</a></dd>
-	  <!--dd><a href="/jp/latest/">日本語</a></dd>
-	  <dd><a href="/kr/latest/">한국어</a></dd>
-	  <dd><a href="/it/latest/">Italiano</a></dd>
-	  <dd><a href="/ar/latest/">العربية</a></dd-->
-    </dl>
-    </dl>
-  </div>
-</div>
-
- <a href="https://github.com/apache/incubator-singa">
-    <img style="position: absolute; top: 0; right: 0; border: 0; z-index: 10000;"
-        src="https://s3.amazonaws.com/github/ribbons/forkme_right_orange_ff7600.png"
-        alt="Fork me on GitHub">
-</a>
-
-{{ super() }}
-{% endblock %}
diff --git a/doc/en/community/issue-tracking.md b/doc/en/community/issue-tracking.md
deleted file mode 100644
index 26b23dd..0000000
--- a/doc/en/community/issue-tracking.md
+++ /dev/null
@@ -1,9 +0,0 @@
-## Issue Tracking
-
-___
-
-SINGA uses [JIRA](https://www.atlassian.com/software/jira) a J2EE-based, issue tracking and project management application.
-
-Issues, bugs, and feature requests should be submitted to the following issue tracking system for this project.
-
-* https://issues.apache.org/jira/browse/singa
diff --git a/doc/en/community/mail-lists.rst b/doc/en/community/mail-lists.rst
deleted file mode 100644
index a170042..0000000
--- a/doc/en/community/mail-lists.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Project Mailing Lists
-=====================
-
-These are the mailing lists that have been established for this project. For each list, there is a subscribe, unsubscribe, and an archive link.
-
-.. csv-table:: Mailing Lists
-	:header: "Name", "Post", "Subscribe", "Unsubscribe", "Archive"
-
-        "Development", "dev@singa.incubator.apache.org", "`Subscribe <mailto:dev-subscribe@singa.incubator.apache.org>`_", "`Unsubscribe <mailto:dev-unsubscribe@singa.incubator.apache.org.>`_", "`mail-archives.apache.org <http://mail-archives.apache.org/mod_mbox/singa-dev/>`_"
-        "Commits", "commits@singa.incubator.apache.org", "`Subscribe <mailto:commits-subscribe@singa.incubator.apache.org>`_", "`Unsubscribe <mailto:commits-unsubscribe@singa.incubator.apache.org>`_", "`mail-archives.apache.org  <http://mail-archives.apache.org/mod_mbox/singa-commits/>`_"
diff --git a/doc/en/community/source-repository.md b/doc/en/community/source-repository.md
deleted file mode 100644
index 8864629..0000000
--- a/doc/en/community/source-repository.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# Source Repository
-
-___
-
-This project uses [Git](http://git-scm.com/) to manage its source code. Instructions on Git use can be found at [http://git-scm.com/documentation](http://git-scm.com/documentation).
-
-## Web Access
-
-The following is a link to the online source repository.
-
-* [https://git-wip-us.apache.org/repos/asf?p=incubator-singa.git;a=summary](https://git-wip-us.apache.org/repos/asf?p=incubator-singa.git;a=summary)
-
-
-## Upstream for committers
-
-Committers need to set the upstream endpoint to the Apache git (not github) repo address, e.g.,
-
-    $ git remote add asf https://git-wip-us.apache.org/repos/asf/incubator-singa.git
-
-Then you (committer) can push your code in this way,
-
-    $ git push asf <local-branch>:<remote-branch>
diff --git a/doc/en/community/team-list.rst b/doc/en/community/team-list.rst
deleted file mode 100644
index 668810d..0000000
--- a/doc/en/community/team-list.rst
+++ /dev/null
@@ -1,79 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-The SINGA Team
-==============
-
-A successful project requires many people to play many roles. Some members write code or documentation, while others are valuable as testers, submitting patches and suggestions.
-
-Mentors
--------
-
-==================   ============
-Name                 Email
-==================   ============
-Daniel Dai           daijy@apache.org
-Ted Dunning          tdunning@apache.org
-Alan Gates           gates@apache.org
-Thejas Nair          thejas@apache.org
-==================   ============
-
-
-Developers
-----------
-
-+--------------------+--------------------------------+-----------------------------------------------+
-| Name               | Email                          | Organization                                  |
-+====================+================================+===============================================+
-| Gang Chen          | cg@zju.edu.cn                  | Zhejiang University                           |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Haibo Chen         | hzchenhaibo@corp.netease.com   | NetEase                                       |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Anh Dinh           | dinhtta@apache.org             | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Jinyang Gao        | jinyang@apache.org             | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Xing Ji            | jixin@comp.nus.edu.sg          | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Chonho Lee         | chonho@gmail.com               | Osaka University                              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Zhaojing Luo       | zhaojing@apache.org            | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Beng Chin Ooi      | ooibc@comp.nus.edu.sg          | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Kian-Lee Tan       | tankl@apache.org               | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Anthony K. H. Tung | atung@comp.nus.edu.sg          | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Ji Wang            | wangji@comp.nus.edu.sg         | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Sheng Wang         | wangsh@apache.org              | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Wei Wang           | wangwei@apache.org             | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Yuan Wang          | wangyuan@corp.netease.com      | NetEase                                       |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Wenfeng Wu         | wuwf@comp.nus.edu.sg           | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Zhongle Xie        | zhongle@apache.org             | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Meihui Zhang       | meihui_zhang@sutd.edu.sg       | Singapore University of Technology and Design |
-+--------------------+--------------------------------+-----------------------------------------------+
-| Kaiping Zheng      | kaiping@apache.org             | National University of Singapore              |
-+--------------------+--------------------------------+-----------------------------------------------+
-
diff --git a/doc/en/develop/contribute-code.md b/doc/en/develop/contribute-code.md
deleted file mode 100644
index 39d11f8..0000000
--- a/doc/en/develop/contribute-code.md
+++ /dev/null
@@ -1,59 +0,0 @@
-## How to Contribute Code
-
-
-### Coding Style
-
-The SINGA codebase follows the [Google C++ Style Guide](http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml).
-
-To check if your code follows the style, you can use the provided cpplint tool:
-
-    $ ./tool/cpplint.py YOUR_FILE
-
-
-### JIRA format
-
-Like other Apache projects, SINGA uses JIRA to track bugs, improvements and
-other high-level discussions (e.g., system design and features).  Github pull requests are
-used for implementation discussions, e.g., code review and code merge.
-
-* Provide a descriptive Title.
-* Write a detailed Description. For bug reports, this should ideally include a
-  short reproduction of the problem. For new features, it may include a design
-  document.
-* Set [required fields](https://cwiki.apache.org/confluence/display/SPARK/Contributing+to+Spark#ContributingtoSpark-JIRA)
-
-### Pull Request
-
-The work flow is
-
-* Fork the [SINGA Github repository](https://github.com/apache/incubator-singa) to
-your own Github account.
-
-* Clone your fork, create a new branch (e.g., feature-foo or fixbug-foo),
- work on it. After finishing your job,
- [rebase](https://git-scm.com/book/en/v2/Git-Branching-Rebasing) it to the
- current latest master and push commits to your own Github account (the new
- branch).
-
-* Open a pull request against the master branch of apache/incubator-singa.
-The PR title should be of the form SINGA-xxxx Title, where
-SINGA-xxxx is the relevant JIRA number, and Title may be the JIRA's title or a
-more specific title describing the PR itself, for example, "SINGA-6 Implement thread-safe singleton". Detailed description can be copied from the JIRA.
-Consider identifying committers or other contributors who have worked on the
-code being changed. Find the file(s) in Github and click "Blame" to see a
-line-by-line annotation of who changed the code last.  You can add @username in
-the PR description to ping them immediately.
-Please state that the contribution is your original work and that you license
-the work to the project under the project's open source license. Further commits (e.g., bug fix)
-to your new branch will be added to this pull request automatically by Github.
-
-* Wait for one committer to review the patch. If no conflicts, the committers will merge it with
-the master branch. The merge should a) not use rebase b) disable fast forward merge c) check the
-commit message format and test the code/feature.
-
-* If there are too many small commit messages, you will be told to squash your commits into fewer meaningful
-commits. If your commit message does not follow the format (i.e., SINGA-xxxx), you will be told to
-reword your commit message. Both changes can be done using interactive git rebase. Once you
-get the commits corrected, push them to you own github again. Your pull request
-will be automatically updated. For details, please refer to
-[Rebase Pull Requests](https://github.com/edx/edx-platform/wiki/How-to-Rebase-a-Pull-Request).
diff --git a/doc/en/develop/how-contribute.md b/doc/en/develop/how-contribute.md
deleted file mode 100644
index 639f14e..0000000
--- a/doc/en/develop/how-contribute.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# How to Contribute to SINGA
-
-As with any open source project, there are several ways you can help:
-
-* Join the [mailing list](http://singa.apache.org/en/community/mail-lists.html) and answer other user's questions.
-* [Build SINGA](http://singa.apache.org/en/docs/installation.html) by yourself.
-* Report bugs, feature requests and other issues in the [issue tracking](http://singa.apache.org/en/community/issue-tracking.html) application.
-* Check SINGA's [development schedule](http://singa.apache.org/en/develop/schedule.html) and [contribute code](http://singa.apache.org/en/develop/contribute-code.html) by providing patches.
-* [Help with the documentation](http://singa.apache.org/en/develop/contribute-docs.html) by updating webpages that are lacking or unclear.
diff --git a/doc/en/develop/schedule.rst b/doc/en/develop/schedule.rst
deleted file mode 100644
index 0eb4f90..0000000
--- a/doc/en/develop/schedule.rst
+++ /dev/null
@@ -1,66 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Development Schedule
-====================
-
-.. csv-table::
-  :header: "Release","Module","Feature"
-
-  "v0.1 Sep 2015      ","Neural Network               ","Feed forward neural network, including CNN, MLP                                                                     "
-  "                   ","                             ","RBM-like model, including RBM                                                                                       "
-  "                   ","                             ","Recurrent neural network, including standard RNN                                                                    "
-  "                   ","Architecture                 ","One worker group on single node (with data partition)                                                               "
-  "                   ","                             ","Multi worker groups on single node using `Hogwild <http://www.eecs.berkeley.edu/~brecht/papers/hogwildTR.pdf>`_     "
-  "                   ","                             ","Distributed Hogwild"
-  "                   ","                             ","Multi groups across nodes, like `Downpour <http://papers.nips.cc/paper/4687-large-scale-distributed-deep-networks>`_"
-  "                   ","                             ","All-Reduce training architecture like `DeepImage <http://arxiv.org/abs/1501.02876>`_                                "
-  "                   ","                             ","Load-balance among servers                                                                                          "
-  "                   ","Failure recovery             ","Checkpoint and restore                                                                                              "
-  "                   ","Tools                        ","Installation with GNU auto Tools                                                                                    "
-  "v0.2 Jan 2016      ","Neural Network               ","Feed forward neural network, including AlexNet, cuDNN layers,Tools                                                  "
-  "                   ","                             ","Recurrent neural network, including GRULayer and BPTT                                                               "
-  "                   ","                             ","Model partition and hybrid partition                                                                                "
-  "                   ","Tools                        ","Integration with Mesos for resource management                                                                      "
-  "                   ","                             ","Prepare Docker images for deployment"
-  "                   ","                             ","Visualization of neural net and debug information "
-  "                   ","Binding                      ","Python binding for major components "
-  "                   ","GPU                          ","Single node with multiple GPUs "
-  "v0.3 April 2016    ","GPU                          ","Multiple nodes, each with multiple GPUs"
-  "                   ","                             ","Heterogeneous training using both GPU and CPU `CcT <http://arxiv.org/abs/1504.04343>`_"
-  "                   ","                             ","Support cuDNN v4 "
-  "                   ","Installation                 ","Remove dependency on ZeroMQ, CZMQ, Zookeeper for single node training"
-  "                   ","Updater                      ","Add new SGD updaters including Adam, AdamMax and AdaDelta"
-  "                   ","Binding                      ","Enhance Python binding for training"
-  "v1.0 Sep 2016      ","Programming abstraction      ","Tensor with linear algebra, neural net and random operations "
-  "                   ","                             ","Updater for distributed parameter updating "
-  "                   ","Hardware                     ","Use Cuda and Cudnn for Nvidia GPU"
-  "                   ","                             ","Use OpenCL for AMD GPU or other devices"
-  "                   ","Cross-platform               ","To extend from Linux to MacOS"
-  "                   ","                             ","Large image models, e.g., `VGG <https://arxiv.org/pdf/1409.1556.pdf>`_ and `Residual Net <http://arxiv.org/abs/1512.03385>`_"
-  "v1.1 Jan 2017      ","Model Zoo                    ","GoogleNet; Health-care models"
-  "                   ","Caffe converter              ","Use SINGA to train models configured in caffe proto files"
-  "                   ","Model components             ","Add concat and slice layers; accept multiple inputs to the net"
-  "                   ","Compilation and installation ","Windows suppport"
-  "                   ","                             ","Simplify the installation by compiling protobuf and openblas together with SINGA"
-  "                   ","                             ","Build python wheel automatically using Jenkins"
-  "                   ","                             ","Install SINGA from Debian packages"
-  "v1.2 April 2017    ","Numpy API                    ","Implement functions for the tensor module of PySINGA following numpy API"
-  "                   ","Distributed training         ","Migrate distributed training frameworks from V0.3"
-  "v1.3 July 2017     ","Memory optimization          ","Replace CNMEM with new memory pool to reduce memory footprint"
-  "                   ","Execution optimization       ","Runtime optimization of execution scheduling"
diff --git a/doc/en/docs.rst b/doc/en/docs.rst
deleted file mode 100644
index 1b94d02..0000000
--- a/doc/en/docs.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Documentation
-=============
-
-.. toctree::
-   docs/index
diff --git a/doc/en/docs/cnn.md b/doc/en/docs/cnn.md
deleted file mode 100755
index 6609137..0000000
--- a/doc/en/docs/cnn.md
+++ /dev/null
@@ -1,141 +0,0 @@
-# Quickstart - Cifar10 example
-Convolution neural network (CNN) is a type of feed-forward artificial neural network widely used for image classification. In this example, we will use a deep CNN model to do image classification for the [CIFAR10 dataset](http://www.cs.toronto.edu/~kriz/cifar.html).
-
-## Running instructions for CPP version
-Please refer to [Installation](installation.html) page for how to install SINGA. Currently, we CNN requires CUDNN, hence both CUDA and CUDNN should be installed and SINGA should be compiled with CUDA and CUDNN.
-
-The Cifar10 dataset could be downloaded by running
-
-    # switch to cifar10 directory
-    $ cd ../examples/cifar10
-    # download data for CPP version
-    $ python download_data.py bin
-
-'bin' is for downloading binary version of Cifar10 data.
-
-During downloading, you should see the detailed output like
-
-     Downloading CIFAR10 from http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz
-     The tar file does exist. Extracting it now..
-     Finished!
-
-Now you have prepared the data for this Cifar10 example, the final step is to execute the `run.sh` script,
-
-    # in SINGA_ROOT/examples/cifar10/
-    $ ./run.sh
-
-You should see the detailed output as follows: first read the data files in order, show the statistics of training and testing data, then show the details of neural net structure with some parameter information, finally illustrate the performance details during training and validation process. The number of epochs can be specified in `run.sh` file.
-
-    Start training
-    Reading file cifar-10-batches-bin/data_batch_1.bin
-    Reading file cifar-10-batches-bin/data_batch_2.bin
-    Reading file cifar-10-batches-bin/data_batch_3.bin
-    Reading file cifar-10-batches-bin/data_batch_4.bin
-    Reading file cifar-10-batches-bin/data_batch_5.bin
-    Reading file cifar-10-batches-bin/test_batch.bin
-    Training samples = 50000, Test samples = 10000
-    conv1(32, 32, 32, )
-    pool1(32, 16, 16, )
-    relu1(32, 16, 16, )
-    lrn1(32, 16, 16, )
-    conv2(32, 16, 16, )
-    relu2(32, 16, 16, )
-    pool2(32, 8, 8, )
-    lrn2(32, 8, 8, )
-    conv3(64, 8, 8, )
-    relu3(64, 8, 8, )
-    pool3(64, 4, 4, )
-    flat(1024, )
-    ip(10, )
-    conv1_weight : 8.09309e-05
-    conv1_bias : 0
-    conv2_weight : 0.00797731
-    conv2_bias : 0
-    conv3_weight : 0.00795888
-    conv3_bias : 0
-    ip_weight : 0.00798683
-    ip_bias : 0
-    Messages will be appended to an existed file: train_perf
-    Messages will be appended to an existed file: val_perf
-    Epoch 0, training loss = 1.828369, accuracy = 0.329420, lr = 0.001000
-    Epoch 0, val loss = 1.561823, metric = 0.420600
-    Epoch 1, training loss = 1.465898, accuracy = 0.469940, lr = 0.001000
-    Epoch 1, val loss = 1.361778, metric = 0.513300
-    Epoch 2, training loss = 1.320708, accuracy = 0.529000, lr = 0.001000
-    Epoch 2, val loss = 1.242080, metric = 0.549100
-    Epoch 3, training loss = 1.213776, accuracy = 0.571620, lr = 0.001000
-    Epoch 3, val loss = 1.175346, metric = 0.582000
-
-The training details are stored in `train_perf` file in the same directory and the validation details in `val_perf` file.
-
-
-## Running instructions for Python version
-To run CNN example in Python version, we need to compile SINGA with Python binding,
-
-    $ mkdir build && cd build
-    $ cmake -DUSE_PYTHON=ON ..
-    $ make
-
-Now download the Cifar10 dataset,
-
-    # switch to cifar10 directory
-    $ cd ../examples/cifar10
-    # download data for Python version
-    $ python download_data.py py
-
-During downloading, you should see the detailed output like
-
-     Downloading CIFAR10 from http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
-     The tar file does exist. Extracting it now..
-     Finished!
-
-Then execute the `train.py` script to build the model
-
-    $ python train.py
-
-You should see the output as follows including the details of neural net structure with some parameter information, reading data files, and the performance details during training and testing process.
-
-    (32L, 32L, 32L)
-    (32L, 16L, 16L)
-    (32L, 16L, 16L)
-    (32L, 16L, 16L)
-    (32L, 16L, 16L)
-    (32L, 16L, 16L)
-    (32L, 8L, 8L)
-    (32L, 8L, 8L)
-    (64L, 8L, 8L)
-    (64L, 8L, 8L)
-    (64L, 4L, 4L)
-    (1024L,)
-    Start intialization............
-    conv1_weight gaussian 7.938460476e-05
-    conv1_bias constant 0.0
-    conv2_weight gaussian 0.00793507322669
-    conv2_bias constant 0.0
-    conv3_weight gaussian 0.00799657031894
-    conv3_bias constant 0.0
-    dense_weight gaussian 0.00804364029318
-    dense_bias constant 0.0
-    Loading data ..................
-    Loading data file cifar-10-batches-py/data_batch_1
-    Loading data file cifar-10-batches-py/data_batch_2
-    Loading data file cifar-10-batches-py/data_batch_3
-    Loading data file cifar-10-batches-py/data_batch_4
-    Loading data file cifar-10-batches-py/data_batch_5
-    Loading data file cifar-10-batches-py/test_batch
-    Epoch 0
-    training loss = 1.881866, training accuracy = 0.306360 accuracy = 0.420000
-    test loss = 1.602577, test accuracy = 0.412200
-    Epoch 1
-    training loss = 1.536011, training accuracy = 0.441940 accuracy = 0.500000
-    test loss = 1.378170, test accuracy = 0.507600
-    Epoch 2
-    training loss = 1.333137, training accuracy = 0.519960 accuracy = 0.520000
-    test loss = 1.272205, test accuracy = 0.540600
-    Epoch 3
-    training loss = 1.185212, training accuracy = 0.574120 accuracy = 0.540000
-    test loss = 1.211573, test accuracy = 0.567600
-
-This script will call `alexnet.py` file to build the alexnet model. After the training is finished, SINGA will save the model parameters into a checkpoint file `model.bin` in the same directory. Then we can use this `model.bin` file for prediction.
-
-    $ python predict.py
diff --git a/doc/en/docs/converter.rst b/doc/en/docs/converter.rst
deleted file mode 100644
index 16a81b8..0000000
--- a/doc/en/docs/converter.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Caffe Converter
-================
-
-.. automodule:: singa.converter
-   :members:
diff --git a/doc/en/docs/dependencies.md b/doc/en/docs/dependencies.md
deleted file mode 100644
index a812c05..0000000
--- a/doc/en/docs/dependencies.md
+++ /dev/null
@@ -1,92 +0,0 @@
-# Dependent library installation
-
-## Windows
-
-This section is used to compile and install the dependent libraries under
-windows system from source codes. The following instructions ONLY work for Visual Studio 2015 as
-previous VS does not support [C++11 features](https://msdn.microsoft.com/en-us/library/hh567368.aspx) well (including generic lambdas, auto, non-static
-data member initializers). If you intend to generate a 32-bit/64-bit singa solution, please configure all the
-VS projects for the dependent libraries as 32-bit/64-bit. This can be done by
-"Configuration Manager" in VS 2015 or use corresponding generator in cmake. When compiling the following libraries, you
-may get system-specific warnings/errors. Please fix them according to the
-prompts by VS.
-
-### Google Logging
-The glog library is an optional library for singa project. But it is currently necessary for Window compilation.
-Since the latest release version of glog will encounter error C2084 on sprintf function
-under VS2015, we test the compilation and installation using the master branch from [github](https://github.com/google/glog).
-
-Step 1: Download and decompress the source code. Or use `git clone
-https://github.com/google/glog` to get the code.
-
-Step 2: Open "glog.sln" file under project folder. You will get a conversion
-dialog and please finish it by the prompts. Compile all the projects in the solution after
-proper configuration, especially "libglog" and "libglog_static" projects.
-
-Step 3: Copy all the header files and the entire directory named "glog" under
-"src\windows\" folder into the installation include folder (or system folder).
-Copy all the generated library files into the installation library folder (or
-system folder).
-
-Step 4: Done.
-
-
-### Google protobuf
-
-Tested on version 2.6.1:
-
-Step 1: Download and decompress the source code.
-
-Step 2: Open "protobuf.sln" file under "vsprojects" folder. You will get a conversion
-dialog and please finish it by the prompts. Compile all the projects in the solution after proper
-configuration. Especially "libprotobuf", "libprotobuf-lite", "libprotoc" and
-"protoc" projects.
-
-Step 3: Run "extract_includes.bat" script under "vsprojects" folder, you will
-get a new "include" folder with all the headers.
-
-Step 4: Copy the library files, such as "libprotobuf.lib",
-"libprotobuf-lite.lib", "libprotoc.lib", etc., into your installation library folder (or
-system folder). Copy the binary file "protoc" into your installation binary
-folder (or system folder). Copy all the headers and folders in "include" folder into your
-installation include folder (or system folder).
-
-Step 5: Done.
-
-### CBLAS
-
-There are ready-to-use binary packages online
-([link](https://sourceforge.net/projects/openblas/files/)). However, we still install
-OpenBLAS with version 0.2.18 as test:
-
-Step 1: Download and decompress the source code.
-
-Step 2: Start a cmd window under the OpenBLAS folder then run the following
-commands to generate the solution:
-
-    $ md build $$ cd build
-    $ cmake -G "Visual Studio 14" ..
-
-Or run `cmake -G "Visual Studio 14 Win64"` as you wish.
-
-Step 3: Install Perl into your system and put perl.exe on your path. Open "OpenBlas.sln" and build the solution, especially "libopenblas"
-project.
-
-Step 4: Copy the library files under "build\lib" folder and all header files
-under OpenBLAS folder into installation library and include folders (or system
-folders).
-
-Step 5: Done.
-
-
-## FAQ
-
-1. Error C2375 'snprintf': redefinition; different linkage
-
-    Add “HAVE_SNPRINTF” to “C/C++ - Preprocessor - Preprocessor definitions”
-
-2. Error due to hash map
-
-    Add "_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS" to Preprocessor Definitions.
-
-
diff --git a/doc/en/docs/docker.md b/doc/en/docs/docker.md
deleted file mode 100644
index 8e5743e..0000000
--- a/doc/en/docs/docker.md
+++ /dev/null
@@ -1,58 +0,0 @@
-# Docker Images
-
-
-## Available tags
-
-* `devel`, with SINGA and the development packages installed on Ubuntu16.04 (no GPU)
-* `devel-cuda`, with SINGA, CUDA8.0, CUDNN5, and other development packages installed on Ubuntu16.04
-
-## Use the existing Docker images
-
-Users can pull the Docker images from Dockerhub via
-
-    docker pull apache/singa:devel
-    # or
-    docker pull apache/singa:devel-cuda
-
-Run the docker container using
-
-    docker run -it apache/singa:devel /bin/bash
-    # or
-    docker run -it apache/singa:devel-cuda /bin/bash
-
-The latest SINGA code is under the `incubator-singa` folder.
-
-## Create new Docker images from Dockerfile
-
-New Docker images could be created by executing the following command within the
-Dockerfile folder, e.g., tool/docker/devel/
-
-    docker build -t singa:<TAG> -f Dockerfile
-
-The `<TAG>` is named as
-
-    devel|runtime[-OS][-CUDA|OPENCL][-CUDNN]
-
-* devel: development images with all dependent libs' header files installed and SINGA's source code; runtime: the minimal images which can run SINGA programs.
-* OS: ubuntu, ubuntu14.04, centos, centos6
-* CUDA: cuda, cuda8.0, cuda7.0
-* CUDNN: cudnn, cudnn5, cudnn4
-* OPENCL: opencl, opencl1.2
-
-By default, if the version is not included in the tag, the latest stable version is used.
-The default OS is Ubuntu. The version is the latest stable version (e.g., 16.04 for now).
-For -cuda version, the **cudnn** is included by default. Their versions are also the latest stable version, i.e., cuda8.0 and cudnn5 for now.
-
-Here are some example tags,
-
-`devel`, `devel-cuda`, `runtime`, `runtime-cuda`, `devel-centos7-cuda`, `devel-ubuntu14.04`, `devel-ubuntu14.04-cuda7.5-cudnn4`
-
-Please follow the existing Dockefiles under tool/docker/ to create other Dockefiles.
-The folder structure is like
-
-    level1: devel|runtime
-    level2: Dockerfile, OS
-    level3: Dockerfile, CUDA|OPENCL
-    level4: CUDNN
-
-For example, the path of the Dockerfile for `devel-cuda` is `tool/docker/devel/cuda/Dockerfile`.
diff --git a/doc/en/docs/image_tool.rst b/doc/en/docs/image_tool.rst
deleted file mode 100644
index 764f036..0000000
--- a/doc/en/docs/image_tool.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Image Tool
-==========
-
-.. automodule:: singa.image_tool
-   :members:
diff --git a/doc/en/docs/initializer.rst b/doc/en/docs/initializer.rst
deleted file mode 100644
index 6790a8e..0000000
--- a/doc/en/docs/initializer.rst
+++ /dev/null
@@ -1,30 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Initializer
-===========
-
-Python API
-----------
-
-.. automodule:: singa.initializer
-   :members: uniform, gaussian
-   :member-order: bysource
-
-CPP API
---------
diff --git a/doc/en/docs/installation.md b/doc/en/docs/installation.md
deleted file mode 100755
index d60e572..0000000
--- a/doc/en/docs/installation.md
+++ /dev/null
@@ -1,437 +0,0 @@
-# Installation
-
-## From Conda
-
-Conda is a package manager provided by [Anaconda](https://www.continuum.io/downloads) or [Miniconda](https://conda.io/miniconda.html).
-Currently, SINGA has conda packages (Python2.7) for Linux and MacOSX.
-
-### Linux
-
-1. CPU only
-
-        conda install -c nusdbsystem singa
-
-2. GPU via CUDA+cuDNN
-
-        conda install -c nusdbsystem singa-cudax.y-cudnnz
-
-    where `x.y,z` is one of <8.0, 5>, <7.5, 5> and <7.5, 4>.
-    Users need to install CUDA and cuDNN before installing SINGA.
-    If cuDNN is not in system folders (e.g., /usr/local), export the folder of libcudnn.so to LD_LIBRARY_PATH
-
-
-### Mac OSX
-
-Only the CPU version is available,
-
-    conda config --add channels conda-forge
-    conda install -c nusdbsystem singa
-
-
-If there is no error message from
-
-    python -c "from singa import tensor"
-
-then SINGA is installed successfully.
-
-## From Debian Package
-
-The following Debian packages (on architecture: amd64) are available
-
-<table border="1">
-  <tr>
-    <th>OS</th>
-    <th>Device</th>
-    <th>CUDA/cuDNN</th>
-    <th>Link</th>
-  </tr>
-  <tr>
-    <td>Ubuntu14.04</td>
-    <td>CPU</td>
-    <td>-</td>
-    <td><a href="http://comp.nus.edu.sg/~dbsystem/singa/assets/file/debian/latest/ubuntu14.04-cpp/python-singa.deb">latest</a>, <a href="http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/debian">history</a></td>
-  </tr>
-  <tr>
-    <td>Ubuntu14.04</td>
-    <td>GPU</td>
-    <td>CUDA7.5+cuDNN4</td>
-    <td>-</td>
-  </tr>
-  <tr>
-    <td>Ubuntu14.04</td>
-    <td>GPU</td>
-    <td>CUDA7.5+cuDNN5</td>
-    <td>-</td>
-  </tr>
-  <tr>
-    <td>Ubuntu14.04</td>
-    <td>GPU</td>
-    <td>CUDA8.0+cuDNN5</td>
-    <td>-</td>
-  </tr>
-</table>
-
-Download the deb file and install it via
-
-    apt-get install <path to the deb file, e.g., ./python-singa.deb>
-
-Note that the path must include `./` if the file in inside the current folder.
-
-## From source
-
-The source files could be downloaded either as a [tar.gz file](https://dist.apache.org/repos/dist/dev/incubator/singa/), or as a git repo
-
-    $ git clone https://github.com/apache/incubator-singa.git
-    $ cd incubator-singa/
-
-### Pre-requisite
-
-The following libraries are required
-* cmake (>=2.8)
-* gcc (>=4.8.1) or Clang
-* google protobuf (>=2.5)
-* blas (tested with openblas >=0.2.10)
-* swig(>=3.0.10) for compiling PySINGA
-* numpy(>=1.11.0) for compiling PySINGA
-
-The following libraries are optional
-* opencv (tested with 2.4.8)
-* lmdb (tested with 0.9)
-* glog
-
-### Instructions
-
-1. create a `build` folder inside incubator-singa and go into that folder
-2. run `cmake [options] ..`
-  by default all options are OFF except `USE_PYTHON`
-
-    * `USE_MODUELS=ON`, used if protobuf and blas are not installed a prior
-    * `USE_CUDA=ON`, used if CUDA and cuDNN is available
-    * `USE_PYTHON=ON`, used for compiling PySINGA
-    * `USE_OPENCL=ON`, used for compiling with OpenCL support
-3. compile the code, e.g., `make`
-4. goto python folder
-5. run `pip install .`
-6. [optional] run `python setup.py bdist_wheel` to generate the wheel file
-
-Step 4 and 5 are to install PySINGA.
-Details on the installation of dependent libraries and the instructions for each OS are given in the following sections.
-
-### Linux and Mac OS
-
-Most of the dependent libraries could be installed from source or via package mangers like
-apt-get, yum, and homebrew. Please refer to FAQ for problems caused by the path setting of the dependent libraries.
-
-The following instructions are tested on Ubuntu 14.04  and 16.04for installing dependent libraries.
-
-    # required libraries
-    $ sudo apt-get install libprotobuf-dev libopenblas-dev protobuf-compiler
-
-    # optional libraries
-    $ sudo apt-get install python2.7-dev python-pip python-numpy
-    $ sudo apt-get install libopencv-dev libgoogle-glog-dev liblmdb-dev
-
-The following instructions are tested on Mac OS X Yosemite (10.11 and 10.12) for installing dependent libraries.
-
-    # required libraries
-    $ brew tap homebrew/science
-    $ brew install openblas
-    $ brew install protobuf260
-
-    # optional libraries
-    $ brew tap homebrew/python
-    $ brew install python
-    $ brew install opencv
-    $ brew install -vd glog lmdb
-
-By default, openblas is installed into /usr/local/opt/openblas. To let the compiler (and cmake) know the openblas
-path,
-
-    $ export CMAKE_INCLUDE_PATH=/usr/local/opt/openblas/include:$CMAKE_INCLUDE_PATH
-    $ export CMAKE_LIBRARY_PATH=/usr/local/opt/openblas/lib:$CMAKE_LIBRARY_PATH
-
-To let the runtime know the openblas path,
-
-    $ export LD_LIBRARY_PATH=/usr/local/opt/openblas/library:$LD_LIBRARY_PATH
-
-
-#### Compile with USE_MODULES=ON
-
-If protobuf and openblas are not installed, you can compile SINGA together with them
-
-    $ In SINGA ROOT folder
-    $ mkdir build
-    $ cd build
-    $ cmake -DUSE_MODULES=ON ..
-    $ make
-
-cmake would download OpenBlas and Protobuf (2.6.1) and compile them together
-with SINGA.
-
-After compiling SINGA, you can run the unit tests by
-
-    $ ./bin/test_singa
-
-You can see all the testing cases with testing results. If SINGA passes all
-tests, then you have successfully installed SINGA.
-
-You can use `ccmake ..` to configure the compilation options.
-If some dependent libraries are not in the system default paths, you need to export
-the following environment variables
-
-    export CMAKE_INCLUDE_PATH=<path to the header file folder>
-    export CMAKE_LIBRARY_PATH=<path to the lib file folder>
-
-#### Compile with USE_PYTHON=ON
-swig and numpy can be install by
-
-    $ Ubuntu 14.04 and 16.04
-    $ sudo apt-get install python-numpy
-    # Ubuntu 16.04
-    $ sudo apt-get install swig
-
-Note that swig has to be installed from source on Ubuntu 14.04.
-After installing numpy, export the header path of numpy.i as
-
-    $ export CPLUS_INCLUDE_PATH=`python -c "import numpy; print numpy.get_include()"`:$CPLUS_INCLUDE_PATH
-
-Similar to compile CPP code, PySINGA is compiled by
-
-    $ cmake -DUSE_PYTHON=ON ..
-    $ make
-    $ cd python
-    $ pip install .
-
-Developers can build the wheel file via
-
-    # under the build directory
-    $ cd python
-
-The generated wheel file is under "dist" directory.
-
-
-#### Compile SINGA with USE_CUDA=ON
-
-Users are encouraged to install the CUDA and
-[cuDNN](https://developer.nvidia.com/cudnn) for running SINGA on GPUs to
-get better performance.
-
-SINGA has been tested over CUDA (7, 7.5, 8), and cuDNN (4 and 5).  If cuDNN is
-decompressed into non-system folder, e.g. /home/bob/local/cudnn/, the following
-commands should be executed for cmake and the runtime to find it
-
-    $ export CMAKE_INCLUDE_PATH=/home/bob/local/cudnn/include:$CMAKE_INCLUDE_PATH
-    $ export CMAKE_LIBRARY_PATH=/home/bob/local/cudnn/lib64:$CMAKE_LIBRARY_PATH
-    $ export LD_LIBRARY_PATH=/home/bob/local/cudnn/lib64:$LD_LIBRARY_PATH
-
-The cmake options for CUDA and cuDNN should be switched on
-
-    # Dependent libs are install already
-    $ cmake -DUSE_CUDA=ON ..
-
-#### Compile SINGA with USE_OPENCL=ON
-
-SINGA uses opencl-headers and viennacl (version 1.7.1 or newer) for OpenCL support, which
-can be installed using via
-
-    # On Ubuntu 16.04
-    $ sudo apt-get install opencl-headers, libviennacl-dev
-    # On Fedora
-    $ sudo yum install opencl-headers, viennacl
-
-Additionally, you will need the OpenCL Installable Client Driver (ICD) for the platforms that you want to run OpenCL on.
-
-* For AMD and nVidia GPUs, the driver package should also install the correct OpenCL ICD.
-* For Intel CPUs and/or GPUs, get the driver from the [Intel website.](https://software.intel.com/en-us/articles/opencl-drivers) Note that the drivers provided on that website only supports recent CPUs and Iris GPUs.
-* For older Intel CPUs, you can use the `beignet-opencl-icd` package.
-
-Note that running OpenCL on CPUs is not currently recommended because it is slow. Memory transfer is on the order of whole seconds (1000's of ms on CPUs as compared to 1's of ms on GPUs).
-
-More information on setting up a working OpenCL environment may be found [here](https://wiki.tiker.net/OpenCLHowTo).
-
-If the package version of ViennaCL is not at least 1.7.1, you will need to build it from source:
-
-Clone [the repository from here](https://github.com/viennacl/viennacl-dev), checkout the `release-1.7.1` tag and build it.
-Remember to add its directory to `PATH` and the built libraries to `LD_LIBRARY_PATH`.
-
-To build SINGA with OpenCL support, you need to pass the flag during cmake:
-
-    cmake -DUSE_OPENCL=ON ..
-
-### Compile SINGA on Windows
-
-For the dependent library installation, please refer to [Dependencies](dependencies.md).
-After all the dependencies are successfully installed, just run the following commands to
-generate the VS solution in cmd under singa folder:
-
-    $ md build && cd build
-    $ cmake -G "Visual Studio 14" -DUSE_CUDA=OFF -DUSE_PYTHON=OFF ..
-
-The default project generated by the command is 32-bit version. You can also
-specify a 64-bit version project by:
-
-    $ md build && cd build
-    $ cmake -G "Visual Studio 14 Win64" -DUSE_CUDA=OFF -DUSE_PYTHON=OFF ..
-
-If you get error outputs like "Could NOT find xxxxx" indicating a dependent
-library missing, configure your library file and include path for cmake or the system.
-For example, you get an error "Could NOT find CBLAS" and suppose you installed
-openblas header files at "d:\include" and openblas library at "d:\lib". You should run the
-following command to specify your cblas parameters in cmake:
-
-    $ cmake -G "Visual Studio 14" -DUSE_CUDA=OFF -DUSE_PYTHON=OFF -DCBLAS_INCLUDE_DIR="d:\include" -DCBLAS_LIBRARIES="d:\lib\libopenblas.lib" -DProtobuf_INCLUDE_DIR=<include dir of protobuf> -DProtobuf_LIBRARIES=<path to libprotobuf.lib> -DProtobuf_PROTOC_EXECUTABLE=<path to protoc.exe> -DGLOG_INCLUDE_DIR=<include dir of glog> -DGLOG_LIBRARIES=<path to libglog.lib> ..
-
-To find out the parameters you need to specify for some special libraries, you
-can run the following command:
-
-    $ cmake -LAH
-
-If you use cmake GUI tool in windows, make sure you configure the right
-parameters for the singa solution by select "Advanced" box. After generating the VS project,
-open the "singa.sln" project file under
-the "build" folder and compile it as a normal VS solution. You will find the
-unit tests file named "test_singa" in the project binary folder.
-If you get errors when running test_singa.exe due to libglog.dll/libopenblas.dll missing,
-just copy the dll files into the same folder as test_singa.exe
-
-## FAQ
-
-* Q: Error from 'import singa' using PySINGA installed from wheel.
-
-    A: Please check the detailed error from `python -c  "from singa import _singa_wrap"`. Sometimes it is
-    caused by the dependent libraries, e.g. there are multiple versions of protobuf, missing of cudnn, numpy version mismatch. Following
-    steps show the solutions for different cases
-    1. Check the cudnn and cuda and gcc versions, cudnn5 and cuda7.5 and gcc4.8/4.9 are preferred. if gcc is 5.0, then downgrade it.
-       If cudnn is missing or not match with the wheel version, you can download the correct version of cudnn into ~/local/cudnn/ and
-
-            $ echo "export LD_LIBRARY_PATH=/home/<yourname>/local/cudnn/lib64:$LD_LIBRARY_PATH" >> ~/.bashrc
-
-    2. If it is the problem related to protobuf, then download the newest whl files which have [compiled protobuf and openblas into the whl](https://issues.apache.org/jira/browse/SINGA-255) file of PySINGA.
-       Or you can install protobuf from source into a local folder, say ~/local/;
-       Decompress the tar file, and then
-
-            $ ./configure --prefix=/home/<yourname>local
-            $ make && make install
-            $ echo "export LD_LIBRARY_PATH=/home/<yourname>/local/lib:$LD_LIBRARY_PATH" >> ~/.bashrc
-            $ source ~/.bashrc
-
-    3. If it cannot find other libs including python, then create virtual env using pip or conda;
-
-    4. If it is not caused by the above reasons, go to the folder of `_singa_wrap.so`,
-
-            $ python
-            >> import importlib
-            >> importlib.import_module('_singa_wrap')
-
-      Check the error message. For example, if the numpy version mismatches, the error message would be,
-
-            RuntimeError: module compiled against API version 0xb but this version of numpy is 0xa
-
-      Then you need to upgrade the numpy.
-
-
-* Q: Error from running `cmake ..`, which cannot find the dependent libraries.
-
-    A: If you haven't installed the libraries, install them. If you installed
-    the libraries in a folder that is outside of the system folder, e.g. /usr/local,
-    you need to export the following variables
-
-        $ export CMAKE_INCLUDE_PATH=<path to your header file folder>
-        $ export CMAKE_LIBRARY_PATH=<path to your lib file folder>
-
-
-* Q: Error from `make`, e.g. the linking phase
-
-    A: If your libraries are in other folders than system default paths, you need
-    to export the following varaibles
-
-        $ export LIBRARY_PATH=<path to your lib file folder>
-        $ export LD_LIBRARY_PATH=<path to your lib file folder>
-
-
-* Q: Error from header files, e.g. 'cblas.h no such file or directory exists'
-
-    A: You need to include the folder of the cblas.h into CPLUS_INCLUDE_PATH,
-    e.g.,
-
-        $ export CPLUS_INCLUDE_PATH=/opt/OpenBLAS/include:$CPLUS_INCLUDE_PATH
-
-* Q:While compiling SINGA, I get error `SSE2 instruction set not enabled`
-
-    A:You can try following command:
-
-        $ make CFLAGS='-msse2' CXXFLAGS='-msse2'
-
-* Q:I get `ImportError: cannot import name enum_type_wrapper` from google.protobuf.internal when I try to import .py files.
-
-    A: You need to install the python binding of protobuf, which could be installed via
-
-        $ sudo apt-get install protobuf
-
-    or from source
-
-        $ cd /PROTOBUF/SOURCE/FOLDER
-        $ cd python
-        $ python setup.py build
-        $ python setup.py install
-
-* Q: When I build OpenBLAS from source, I am told that I need a Fortran compiler.
-
-    A: You can compile OpenBLAS by
-
-        $ make ONLY_CBLAS=1
-
-    or install it using
-
-        $ sudo apt-get install libopenblas-dev
-
-* Q: When I build protocol buffer, it reports that GLIBC++_3.4.20 not found in /usr/lib64/libstdc++.so.6.
-
-    A: This means the linker found libstdc++.so.6 but that library
-    belongs to an older version of GCC than was used to compile and link the
-    program. The program depends on code defined in
-    the newer libstdc++ that belongs to the newer version of GCC, so the linker
-    must be told how to find the newer libstdc++ shared library.
-    The simplest way to fix this is to find the correct libstdc++ and export it to
-    LD_LIBRARY_PATH. For example, if GLIBC++_3.4.20 is listed in the output of the
-    following command,
-
-        $ strings /usr/local/lib64/libstdc++.so.6|grep GLIBC++
-
-    then you just set your environment variable as
-
-        $ export LD_LIBRARY_PATH=/usr/local/lib64:$LD_LIBRARY_PATH
-
-* Q: When I build glog, it reports that "src/logging_unittest.cc:83:20: error: ‘gflags’ is not a namespace-name"
-
-    A: It maybe that you have installed gflags with a different namespace such as "google". so glog can't find 'gflags' namespace.
-    Because it is not necessary to have gflags to build glog. So you can change the configure.ac file to ignore gflags.
-
-        1. cd to glog src directory
-        2. change line 125 of configure.ac  to "AC_CHECK_LIB(gflags, main, ac_cv_have_libgflags=0, ac_cv_have_libgflags=0)"
-        3. autoreconf
-
-    After this, you can build glog again.
-
-* Q: When using virtual environment, everytime I run pip install, it would reinstall numpy. However, the numpy would not be used when I `import numpy`
-
-    A: It could be caused by the `PYTHONPATH` which should be set to empty when you are using virtual environment to avoid the conflicts with the path of
-    the virtual environment.
-
-* Q: When compiling PySINGA from source, there is a compilation error due to the missing of <numpy/objectarray.h>
-
-    A: Please install numpy and export the path of numpy header files as
-
-        $ export CPLUS_INCLUDE_PATH=`python -c "import numpy; print numpy.get_include()"`:$CPLUS_INCLUDE_PATH
-
-* Q: When I run PySINGA in Mac OS X, I got the error "Fatal Python error: PyThreadState_Get: no current thread  Abort trap: 6"
-
-    A: This error happens typically when you have multiple version of Python on your system and you installed SINGA via pip (this problem is resolved for installation via conda),
-    e.g, the one comes with the OS and the one installed by Homebrew. The Python linked by PySINGA must be the same as the Python interpreter.
-    You can check your interpreter by `which python` and check the Python linked by PySINGA via `otool -L <path to _singa_wrap.so>`.
-    To fix this error, compile SINGA with the correct version of Python.
-    In particular, if you build PySINGA from source, you need to specify the paths when invoking [cmake](http://stackoverflow.com/questions/15291500/i-have-2-versions-of-python-installed-but-cmake-is-using-older-version-how-do)
-
-        $ cmake -DPYTHON_LIBRARY=`python-config --prefix`/lib/libpython2.7.dylib -DPYTHON_INCLUDE_DIR=`python-config --prefix`/include/python2.7/ ..
-
-    If installed PySINGA from binary packages, e.g. debian or wheel, then you need to change the python interpreter, e.g., reset the $PATH to put the correct path of Python at the front position.
diff --git a/doc/en/docs/layer.rst b/doc/en/docs/layer.rst
deleted file mode 100644
index 1a576f1..0000000
--- a/doc/en/docs/layer.rst
+++ /dev/null
@@ -1,32 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Layer
-======
-
-Python API
------------
-.. automodule:: singa.layer
-   :members:
-   :member-order: bysource
-   :show-inheritance:
-   :undoc-members:
-
-
-CPP API
---------
diff --git a/doc/en/docs/metric.rst b/doc/en/docs/metric.rst
deleted file mode 100644
index 20a7144..0000000
--- a/doc/en/docs/metric.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-Metric
-=========
-
-
-.. automodule:: singa.metric
-   :members:
-   :show-inheritance:
-   :member-order: bysource
diff --git a/doc/en/docs/neural-net.md b/doc/en/docs/neural-net.md
deleted file mode 100644
index 0a97f21..0000000
--- a/doc/en/docs/neural-net.md
+++ /dev/null
@@ -1,326 +0,0 @@
-# Neural Net
-
-
-`NeuralNet` in SINGA represents an instance of user's neural net model. As the
-neural net typically consists of a set of layers, `NeuralNet` comprises
-a set of unidirectionally connected [Layer](layer.html)s.
-This page describes how to convert an user's neural net into
-the configuration of `NeuralNet`.
-
-<img src="../_static/images/model-category.png" align="center" width="200px"/>
-<span><strong>Figure 1 - Categorization of popular deep learning models.</strong></span>
-
-## Net structure configuration
-
-Users configure the `NeuralNet` by listing all layers of the neural net and
-specifying each layer's source layer names. Popular deep learning models can be
-categorized as Figure 1. The subsequent sections give details for each
-category.
-
-### Feed-forward models
-
-<div align = "left">
-<img src="../_static/images/mlp-net.png" align="center" width="200px"/>
-<span><strong>Figure 2 - Net structure of a MLP model.</strong></span>
-</div>
-
-Feed-forward models, e.g., CNN and MLP, can easily get configured as their layer
-connections are undirected without circles. The
-configuration for the MLP model shown in Figure 1 is as follows,
-
-    net {
-      layer {
-        name : 'data"
-        type : kData
-      }
-      layer {
-        name : 'image"
-        type : kImage
-        srclayer: 'data'
-      }
-      layer {
-        name : 'label"
-        type : kLabel
-        srclayer: 'data'
-      }
-      layer {
-        name : 'hidden"
-        type : kHidden
-        srclayer: 'image'
-      }
-      layer {
-        name : 'softmax"
-        type : kSoftmaxLoss
-        srclayer: 'hidden'
-        srclayer: 'label'
-      }
-    }
-
-### Energy models
-
-<img src="../_static/images/rbm-rnn.png" align="center" width="500px"/>
-<span><strong>Figure 3 - Convert connections in RBM and RNN.</strong></span>
-
-
-For energy models including RBM, DBM,
-etc., their connections are undirected (i.e., Category B). To represent these models using
-`NeuralNet`, users can simply replace each connection with two directed
-connections, as shown in Figure 3a. In other words, for each pair of connected layers, their source
-layer field should include each other's name.
-The full [RBM example](rbm.html) has
-detailed neural net configuration for a RBM model, which looks like
-
-    net {
-      layer {
-        name : "vis"
-        type : kVisLayer
-        param {
-          name : "w1"
-        }
-        srclayer: "hid"
-      }
-      layer {
-        name : "hid"
-        type : kHidLayer
-        param {
-          name : "w2"
-          share_from: "w1"
-        }
-        srclayer: "vis"
-      }
-    }
-
-### RNN models
-
-For recurrent neural networks (RNN), users can remove the recurrent connections
-by unrolling the recurrent layer.  For example, in Figure 3b, the original
-layer is unrolled into a new layer with 4 internal layers. In this way, the
-model is like a normal feed-forward model, thus can be configured similarly.
-The [RNN example](rnn.html) has a full neural net
-configuration for a RNN model.
-
-
-## Configuration for multiple nets
-
-Typically, a training job includes three neural nets for
-training, validation and test phase respectively. The three neural nets share most
-layers except the data layer, loss layer or output layer, etc..  To avoid
-redundant configurations for the shared layers, users can uses the `exclude`
-filed to filter a layer in the neural net, e.g., the following layer will be
-filtered when creating the testing `NeuralNet`.
-
-
-    layer {
-      ...
-      exclude : kTest # filter this layer for creating test net
-    }
-
-
-
-## Neural net partitioning
-
-A neural net can be partitioned in different ways to distribute the training
-over multiple workers.
-
-### Batch and feature dimension
-
-<img src="../_static/images/partition_fc.png" align="center" width="400px"/>
-<span><strong>Figure 4 - Partitioning of a fully connected layer.</strong></span>
-
-
-Every layer's feature blob is considered a matrix whose rows are feature
-vectors. Thus, one layer can be split on two dimensions. Partitioning on
-dimension 0 (also called batch dimension) slices the feature matrix by rows.
-For instance, if the mini-batch size is 256 and the layer is partitioned into 2
-sub-layers, each sub-layer would have 128 feature vectors in its feature blob.
-Partitioning on this dimension has no effect on the parameters, as every
-[Param](param.html) object is replicated in the sub-layers. Partitioning on dimension
-1 (also called feature dimension) slices the feature matrix by columns. For
-example, suppose the original feature vector has 50 units, after partitioning
-into 2 sub-layers, each sub-layer would have 25 units. This partitioning may
-result in [Param](param.html) object being split, as shown in
-Figure 4. Both the bias vector and weight matrix are
-partitioned into two sub-layers.
-
-
-### Partitioning configuration
-
-There are 4 partitioning schemes, whose configurations are give below,
-
-  1. Partitioning each singe layer into sub-layers on batch dimension (see
-  below). It is enabled by configuring the partition dimension of the layer to
-  0, e.g.,
-
-          # with other fields omitted
-          layer {
-            partition_dim: 0
-          }
-
-  2. Partitioning each singe layer into sub-layers on feature dimension (see
-  below).  It is enabled by configuring the partition dimension of the layer to
-  1, e.g.,
-
-          # with other fields omitted
-          layer {
-            partition_dim: 1
-          }
-
-  3. Partitioning all layers into different subsets. It is enabled by
-  configuring the location ID of a layer, e.g.,
-
-          # with other fields omitted
-          layer {
-            location: 1
-          }
-          layer {
-            location: 0
-          }
-
-
-  4. Hybrid partitioning of strategy 1, 2 and 3. The hybrid partitioning is
-  useful for large models. An example application is to implement the
-  [idea proposed by Alex](http://arxiv.org/abs/1404.5997).
-  Hybrid partitioning is configured like,
-
-          # with other fields omitted
-          layer {
-            location: 1
-          }
-          layer {
-            location: 0
-          }
-          layer {
-            partition_dim: 0
-            location: 0
-          }
-          layer {
-            partition_dim: 1
-            location: 0
-          }
-
-Currently SINGA supports strategy-2 well. Other partitioning strategies are
-are under test and will be released in later version.
-
-## Parameter sharing
-
-Parameters can be shared in two cases,
-
-  * sharing parameters among layers via user configuration. For example, the
-  visible layer and hidden layer of a RBM shares the weight matrix, which is configured through
-  the `share_from` field as shown in the above RBM configuration. The
-  configurations must be the same (except name) for shared parameters.
-
-  * due to neural net partitioning, some `Param` objects are replicated into
-  different workers, e.g., partitioning one layer on batch dimension. These
-  workers share parameter values. SINGA controls this kind of parameter
-  sharing automatically, users do not need to do any configuration.
-
-  * the `NeuralNet` for training and testing (and validation) share most layers
-  , thus share `Param` values.
-
-If the shared `Param` instances resident in the same process (may in different
-threads), they use the same chunk of memory space for their values. But they
-would have different memory spaces for their gradients. In fact, their
-gradients will be averaged by the stub or server.
-
-## Advanced user guide
-
-### Creation
-
-    static NeuralNet* NeuralNet::Create(const NetProto& np, Phase phase, int num);
-
-The above function creates a `NeuralNet` for a given phase, and returns a
-pointer to the `NeuralNet` instance. The phase is in {kTrain,
-kValidation, kTest}. `num` is used for net partitioning which indicates the
-number of partitions.  Typically, a training job includes three neural nets for
-training, validation and test phase respectively. The three neural nets share most
-layers except the data layer, loss layer or output layer, etc.. The `Create`
-function takes in the full net configuration including layers for training,
-validation and test.  It removes layers for phases other than the specified
-phase based on the `exclude` field in
-[layer configuration](layer.html):
-
-    layer {
-      ...
-      exclude : kTest # filter this layer for creating test net
-    }
-
-The filtered net configuration is passed to the constructor of `NeuralNet`:
-
-    NeuralNet::NeuralNet(NetProto netproto, int npartitions);
-
-The constructor creates a graph representing the net structure firstly in
-
-    Graph* NeuralNet::CreateGraph(const NetProto& netproto, int npartitions);
-
-Next, it creates a layer for each node and connects layers if their nodes are
-connected.
-
-    void NeuralNet::CreateNetFromGraph(Graph* graph, int npartitions);
-
-Since the `NeuralNet` instance may be shared among multiple workers, the
-`Create` function returns a pointer to the `NeuralNet` instance .
-
-### Parameter sharing
-
- `Param` sharing
-is enabled by first sharing the Param configuration (in `NeuralNet::Create`)
-to create two similar (e.g., the same shape) Param objects, and then calling
-(in `NeuralNet::CreateNetFromGraph`),
-
-    void Param::ShareFrom(const Param& from);
-
-It is also possible to share `Param`s of two nets, e.g., sharing parameters of
-the training net and the test net,
-
-    void NeuralNet:ShareParamsFrom(NeuralNet* other);
-
-It will call `Param::ShareFrom` for each Param object.
-
-### Access functions
-`NeuralNet` provides a couple of access function to get the layers and params
-of the net:
-
-    const std::vector<Layer*>& layers() const;
-    const std::vector<Param*>& params() const ;
-    Layer* name2layer(string name) const;
-    Param* paramid2param(int id) const;
-
-
-### Partitioning
-
-
-#### Implementation
-
-SINGA partitions the neural net in `CreateGraph` function, which creates one
-node for each (partitioned) layer. For example, if one layer's partition
-dimension is 0 or 1, then it creates `npartition` nodes for it; if the
-partition dimension is -1, a single node is created, i.e., no partitioning.
-Each node is assigned a partition (or location) ID. If the original layer is
-configured with a location ID, then the ID is assigned to each newly created node.
-These nodes are connected according to the connections of the original layers.
-Some connection layers will be added automatically.
-For instance, if two connected sub-layers are located at two
-different workers, then a pair of bridge layers is inserted to transfer the
-feature (and gradient) blob between them. When two layers are partitioned on
-different dimensions, a concatenation layer which concatenates feature rows (or
-columns) and a slice layer which slices feature rows (or columns) would be
-inserted. These connection layers help making the network communication and
-synchronization transparent to the users.
-
-#### Dispatching partitions to workers
-
-Each (partitioned) layer is assigned a location ID, based on which it is dispatched to one
-worker. Particularly, the pointer to the `NeuralNet` instance is passed
-to every worker within the same group, but each worker only computes over the
-layers that have the same partition (or location) ID as the worker's ID.  When
-every worker computes the gradients of the entire model parameters
-(strategy-2), we refer to this process as data parallelism.  When different
-workers compute the gradients of different parameters (strategy-3 or
-strategy-1), we call this process model parallelism.  The hybrid partitioning
-leads to hybrid parallelism where some workers compute the gradients of the
-same subset of model parameters while other workers compute on different model
-parameters.  For example, to implement the hybrid parallelism in for the
-[DCNN model](http://arxiv.org/abs/1404.5997), we set `partition_dim = 0` for
-lower layers and `partition_dim = 1` for higher layers.
-
diff --git a/doc/en/docs/notebook/README.md b/doc/en/docs/notebook/README.md
deleted file mode 100644
index 14b7333..0000000
--- a/doc/en/docs/notebook/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-These are some examples in IPython notebooks.
-
-You can open them in [notebook viewer](http://nbviewer.jupyter.org/github/apache/incubator-singa/blob/master/doc/en/docs/notebook/index.ipynb).
diff --git a/doc/en/docs/notebook/cnn.ipynb b/doc/en/docs/notebook/cnn.ipynb
deleted file mode 100644
index 64a3a25..0000000
--- a/doc/en/docs/notebook/cnn.ipynb
+++ /dev/null
@@ -1,907 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Classify images from MNIST using LeNet"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Dataset\n",
-    "\n",
-    "Download the [dataset](http://deeplearning.net/data/mnist/mnist.pkl.gz) to your workspace (i.e. the notebook folder)."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import cPickle, gzip\n",
-    "\n",
-    "# Load the dataset\n",
-    "f = gzip.open('mnist.pkl.gz', 'rb')\n",
-    "train_set, valid_set, _ = cPickle.load(f)\n",
-    "f.close()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(50000, 784) (50000,)\n",
-      "(10000, 784) (10000,)\n"
-     ]
-    }
-   ],
-   "source": [
-    "print train_set[0].shape, train_set[1].shape\n",
-    "print valid_set[0].shape, valid_set[1].shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "train_x = np.reshape(train_set[0], (50000, 1, 28, 28)).astype(np.float32, copy=False)\n",
-    "train_y = np.array(train_set[1]).astype(np.int32, copy=False)\n",
-    "valid_x = np.reshape(valid_set[0], (10000, 1, 28, 28))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<matplotlib.image.AxesImage at 0x7f0747e21410>"
-      ]
-     },
-     "execution_count": 4,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADn9JREFUeJzt3X9sXfV5x/HPU8dxlhDauCmeSzMSIC3QsIbtKoCIgImR\npQgpoKqhUVWljDVdC3RsmQTLpjWb2JRNLVXKGJJZsyQVv0oLIn+wVmBV0GrgYbIQfpVfwV0TjE1w\nIYHSxLGf/eGTygXf73XuPfeeaz/vl2T53vOcc8+jk3x87r3fe8/X3F0A4vlA0Q0AKAbhB4Ii/EBQ\nhB8IivADQRF+ICjCDwRF+IGgCD8Q1IxG7mymtfkszWnkLoFQfq13dNgP2WTWrSn8ZrZS0mZJLZL+\nw903pdafpTk62y6qZZcAEnq8e9LrVv2038xaJN0i6dOSzpC0xszOqPbxADRWLa/5l0l6yd33uPth\nSXdJWpVPWwDqrZbwnyjpF+Pu782W/RYzW2dmvWbWO6xDNewOQJ7q/m6/u3e5e8ndS61qq/fuAExS\nLeHfJ2nBuPsfy5YBmAJqCf/jkhab2SIzmynpc5J25NMWgHqreqjP3Y+Y2TWSfqSxob4t7v5Mbp0B\nqKuaxvnd/QFJD+TUC4AG4uO9QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ER\nfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANB\nEX4gKMIPBFXTLL1m1ifpoKQRSUfcvZRHU8iPzUj/E7d8ZH5d9//8Xy8sWxuZPZrc9qRTBpP12V+1\nZP21m2aWre0s3Z3cdv/IO8n62fesT9ZP/avHkvVmUFP4M3/k7vtzeBwADcTTfiCoWsPvkh4ysyfM\nbF0eDQFojFqf9i93931mdoKkB83sZ+7+yPgVsj8K6yRplmbXuDsAeanpzO/u+7Lfg5Luk7RsgnW6\n3L3k7qVWtdWyOwA5qjr8ZjbHzOYevS1phaSn82oMQH3V8rS/Q9J9Znb0ce5w9x/m0hWAuqs6/O6+\nR9Kncuxl2mo5fXGy7m2tyfqrF3woWX/3nPJj0u0fTI9X/+RT6fHuIv3Xr+Ym6//ybyuT9Z4z7yhb\ne2X43eS2mwYuTtY/+hNP1qcChvqAoAg/EBThB4Ii/EBQhB8IivADQeXxrb7wRi78g2T9pq23JOsf\nby3/1dPpbNhHkvW/v/mLyfqMd9LDbefec03Z2tx9R5Lbtu1PDwXO7u1J1qcCzvxAUIQfCIrwA0ER\nfiAowg8ERfiBoAg/EBTj/Dloe/7VZP2JXy9I1j/eOpBnO7la339Osr7n7fSlv7ee8v2ytbdG0+P0\nHd/+72S9nqb+F3Yr48wPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0GZe+NGNI+3dj/bLmrY/prF0JXn\nJusHVqYvr92y+7hk/cmv3nzMPR114/7fT9YfvyA9jj/y5lvJup9b/urufV9LbqpFa55Mr4D36fFu\nHfCh9NzlGc78QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUxXF+M9si6VJJg+6+JFvWLuluSQsl9Ula\n7e6/rLSzqOP8lbTM/3CyPvLGULL+yh3lx+qfOX9Lcttl/3xtsn7CLcV9px7HLu9x/q2S3jsR+g2S\nut19saTu7D6AKaRi+N39EUnvPfWskrQtu71N0mU59wWgzqp9zd/h7v3Z7dckdeTUD4AGqfkNPx97\n06DsGwdmts7Mes2sd1iHat0dgJxUG/4BM+uUpOz3YLkV3b3L3UvuXmpVW5W7A5C3asO/Q9La7PZa\nSffn0w6ARqkYfjO7U9Kjkj5hZnvN7CpJmyRdbGYvSvrj7D6AKaTidfvdfU2ZEgP2ORnZ/0ZN2w8f\nmFn1tp/8/LPJ+uu3tqQfYHSk6n2jWHzCDwiK8ANBEX4gKMIPBEX4gaAIPxAUU3RPA6df/0LZ2pVn\npkdk//Ok7mT9gs9enazPvfuxZB3NizM/EBThB4Ii/EBQhB8IivADQRF+ICjCDwTFOP80kJom+42v\nnJ7c9v92vJus33Dj9mT9b1Zfnqz7/36wbG3BPz2a3FYNnD4+Is78QFCEHwiK8ANBEX4gKMIPBEX4\ngaAIPxBUxSm688QU3c1n6E/PTdZv//o3kvVFM2ZVve9Pbr8mWV98W3+yfmRPX9X7nq7ynqIbwDRE\n+IGgCD8QFOEHgiL8QFCEHwiK8ANBVRznN7Mtki6VNOjuS7JlGyV9SdLr2Wob3P2BSjtjnH/q8fOW\nJuvHb9qbrN958o+q3vdpP/6zZP0T/1D+OgaSNPLinqr3PVXlPc6/VdLKCZZ/y92XZj8Vgw+guVQM\nv7s/ImmoAb0AaKBaXvNfa2a7zWyLmc3LrSMADVFt+G+VdLKkpZL6JX2z3Ipmts7Mes2sd1iHqtwd\ngLxVFX53H3D3EXcflXSbpGWJdbvcveTupVa1VdsngJxVFX4z6xx393JJT+fTDoBGqXjpbjO7U9KF\nkuab2V5JX5d0oZktleSS+iR9uY49AqgDvs+PmrR0nJCsv3rFqWVrPddvTm77gQpPTD//yopk/a3l\nbyTr0xHf5wdQEeEHgiL8QFCEHwiK8ANBEX4gKIb6UJjv7U1P0T3bZibrv/LDyfql115X/rHv60lu\nO1Ux1AegIsIPBEX4gaAIPxAU4QeCIvxAUIQfCKri9/kR2+jy9KW7X/5seoruJUv7ytYqjeNXcvPQ\nWcn67Pt7a3r86Y4zPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ExTj/NGelJcn6C19Lj7Xfdt62ZP38\nWenv1NfikA8n648NLUo/wGh/jt1MP5z5gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiCoiuP8ZrZA0nZJ\nHZJcUpe7bzazdkl3S1ooqU/Sanf/Zf1ajWvGopOS9Zev/GjZ2sYr7kpu+5nj9lfVUx42DJSS9Yc3\nn5Osz9uWvu4/0iZz5j8iab27nyHpHElXm9kZkm6Q1O3uiyV1Z/cBTBEVw+/u/e6+M7t9UNJzkk6U\ntErS0Y9/bZN0Wb2aBJC/Y3rNb2YLJZ0lqUdSh7sf/fzkaxp7WQBgiph0+M3sOEk/kHSdux8YX/Ox\nCf8mnPTPzNaZWa+Z9Q7rUE3NAsjPpMJvZq0aC/7t7n5vtnjAzDqzeqekwYm2dfcudy+5e6lVbXn0\nDCAHFcNvZibpO5Kec/ebxpV2SFqb3V4r6f782wNQL5P5Su95kr4g6Skz25Ut2yBpk6TvmdlVkn4u\naXV9Wpz6Ziz8vWT9rT/sTNav+McfJut//qF7k/V6Wt+fHo579N/LD+e1b/2f5LbzRhnKq6eK4Xf3\nn0oqN9/3Rfm2A6BR+IQfEBThB4Ii/EBQhB8IivADQRF+ICgu3T1JMzp/t2xtaMuc5LZfWfRwsr5m\n7kBVPeXhmn3Lk/Wdt6an6J7//aeT9faDjNU3K878QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUmHH+\nw3+Svkz04b8cStY3nPpA2dqK33mnqp7yMjDybtna+TvWJ7c97e9+lqy3v5kepx9NVtHMOPMDQRF+\nICjCDwRF+IGgCD8QFOEHgiL8QFBhxvn7Lkv/nXvhzHvqtu9b3jwlWd/88Ipk3UbKXTl9zGk3vlK2\ntnigJ7ntSLKK6YwzPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EZe6eXsFsgaTtkjokuaQud99sZhsl\nfUnS69mqG9y9/JfeJR1v7X62Mas3UC893q0DPpT+YEhmMh/yOSJpvbvvNLO5kp4wswez2rfc/RvV\nNgqgOBXD7+79kvqz2wfN7DlJJ9a7MQD1dUyv+c1soaSzJB39zOi1ZrbbzLaY2bwy26wzs14z6x3W\noZqaBZCfSYffzI6T9ANJ17n7AUm3SjpZ0lKNPTP45kTbuXuXu5fcvdSqthxaBpCHSYXfzFo1Fvzb\n3f1eSXL3AXcfcfdRSbdJWla/NgHkrWL4zcwkfUfSc+5+07jlneNWu1xSerpWAE1lMu/2nyfpC5Ke\nMrNd2bINktaY2VKNDf/1SfpyXToEUBeTebf/p5ImGjdMjukDaG58wg8IivADQRF+ICjCDwRF+IGg\nCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUxUt357ozs9cl/XzcovmS9jesgWPTrL01\na18SvVUrz95OcvePTGbFhob/fTs363X3UmENJDRrb83al0Rv1SqqN572A0ERfiCoosPfVfD+U5q1\nt2btS6K3ahXSW6Gv+QEUp+gzP4CCFBJ+M1tpZs+b2UtmdkMRPZRjZn1m9pSZ7TKz3oJ72WJmg2b2\n9Lhl7Wb2oJm9mP2ecJq0gnrbaGb7smO3y8wuKai3BWb2YzN71syeMbO/yJYXeuwSfRVy3Br+tN/M\nWiS9IOliSXslPS5pjbs/29BGyjCzPkkldy98TNjMzpf0tqTt7r4kW/avkobcfVP2h3Oeu1/fJL1t\nlPR20TM3ZxPKdI6fWVrSZZK+qAKPXaKv1SrguBVx5l8m6SV33+PuhyXdJWlVAX00PXd/RNLQexav\nkrQtu71NY/95Gq5Mb03B3fvdfWd2+6CkozNLF3rsEn0VoojwnyjpF+Pu71VzTfntkh4ysyfMbF3R\nzUygI5s2XZJek9RRZDMTqDhzcyO9Z2bppjl21cx4nTfe8Hu/5e6+VNKnJV2dPb1tSj72mq2Zhmsm\nNXNzo0wws/RvFHnsqp3xOm9FhH+fpAXj7n8sW9YU3H1f9ntQ0n1qvtmHB45Okpr9Hiy4n99oppmb\nJ5pZWk1w7Jppxusiwv+4pMVmtsjMZkr6nKQdBfTxPmY2J3sjRmY2R9IKNd/swzskrc1ur5V0f4G9\n/JZmmbm53MzSKvjYNd2M1+7e8B9Jl2jsHf+XJf1tET2U6etkSU9mP88U3ZukOzX2NHBYY++NXCXp\nw5K6Jb0o6SFJ7U3U23clPSVpt8aC1llQb8s19pR+t6Rd2c8lRR+7RF+FHDc+4QcExRt+QFCEHwiK\n8ANBEX4gKMIPBEX4gaAIPxAU4QeC+n8DZI6NXofNrQAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f077003c810>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "%matplotlib inline\n",
-    "import matplotlib.pyplot as plt\n",
-    "plt.imshow(train_x[0][0])"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Create the CNN model\n",
-    "\n",
-    "TODO: plot the net structure"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "conv1 (32, 14, 14)\n",
-      "relu1 (32, 14, 14)\n",
-      "conv2 (32, 7, 7)\n",
-      "relu2 (32, 7, 7)\n",
-      "pool (32, 4, 4)\n",
-      "flat (512,)\n",
-      "dense (10,)\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<singa.layer.Dense at 0x7f0735355990>"
-      ]
-     },
-     "execution_count": 5,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "from singa import net as ffnet\n",
-    "from singa.layer import Conv2D, MaxPooling2D, Dropout, Activation, Flatten, Dense\n",
-    "from singa import optimizer, loss, metric\n",
-    "from singa import layer\n",
-    "layer.engine = 'singacpp'\n",
-    "net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())\n",
-    "net.add(Conv2D('conv1', 32, 3, 2, input_sample_shape=(1,28,28)))\n",
-    "net.add(Activation('relu1'))\n",
-    "net.add(Conv2D('conv2', 32, 3, 2))\n",
-    "net.add(Activation('relu2'))\n",
-    "net.add(MaxPooling2D('pool', 3, 2))\n",
-    "net.add(Flatten('flat'))\n",
-    "net.add(Dense('dense', 10))\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Initialize the parameters\n",
-    "\n",
-    "* weight matrix - guassian distribution\n",
-    "* bias - 0"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "conv1_weight (32, 9) 0.0764843672514\n",
-      "conv1_bias (32,) 0.0\n",
-      "conv2_weight (32, 288) 0.0803024768829\n",
-      "conv2_bias (32,) 0.0\n",
-      "dense_weight (512, 10) 0.0795410946012\n",
-      "dense_bias (10,) 0.0\n"
-     ]
-    }
-   ],
-   "source": [
-    "for pname, pval in zip(net.param_names(), net.param_values()):\n",
-    "    if len(pval.shape) > 1:\n",
-    "        pval.gaussian(0, 0.1)\n",
-    "    else:\n",
-    "        pval.set_value(0)\n",
-    "    print pname, pval.shape, pval.l1()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Set up the optimizer and tensors"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from singa import tensor\n",
-    "#from singa.proto import core_pb2\n",
-    "from singa import device\n",
-    "from singa import utils\n",
-    "cpu = device.get_default_device()\n",
-    "\n",
-    "opt = optimizer.SGD(momentum=0.9, weight_decay=1e-4)\n",
-    "batch_size = 32\n",
-    "num_train_batch = train_x.shape[0] / batch_size\n",
-    "\n",
-    "tx = tensor.Tensor((batch_size, 1, 28, 28))\n",
-    "ty = tensor.Tensor((batch_size,), cpu , tensor.int32)\n",
-    "\n",
-    "# for progress bar\n",
-    "from tqdm import tnrange\n",
-    "idx = np.arange(train_x.shape[0], dtype=np.int32)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Conduct SGD\n",
-    "\n",
-    "1. process the training data multile time, each time is called on epoch; \n",
-    "2. for each epoch, read the data as mini-batches in random order\n",
-    "3. for each mini-batch, do BP and update the parameters  "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Epoch = 0, training loss = 0.292792, training accuracy = 0.906530\n",
-      "\n",
-      "Epoch = 1, training loss = 0.109958, training accuracy = 0.965589\n"
-     ]
-    }
-   ],
-   "source": [
-    "for epoch in range(2):\n",
-    "    np.random.shuffle(idx)\n",
-    "    loss, acc = 0.0, 0.0\n",
-    "    \n",
-    "    bar = tnrange(num_train_batch, desc='Epoch %d' % epoch)\n",
-    "    for b in bar:\n",
-    "        x = train_x[idx[b * batch_size: (b + 1) * batch_size]]\n",
-    "        y = train_y[idx[b * batch_size: (b + 1) * batch_size]]\n",
-    "        tx.copy_from_numpy(x)\n",
-    "        ty.copy_from_numpy(y)\n",
-    "        grads, (l, a) = net.train(tx, ty)\n",
-    "        loss += l\n",
-    "        acc += a\n",
-    "        for (s, p, g) in zip(net.param_names(), net.param_values(), grads):\n",
-    "            opt.apply_with_lr(epoch, 0.01, g, p, str(s), b)\n",
-    "        # update progress bar\n",
-    "        bar.set_postfix(train_loss=l, train_accuracy=a)\n",
-    "    print 'Epoch = %d, training loss = %f, training accuracy = %f' % (epoch, loss / num_train_batch, acc / num_train_batch)    \n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Save model to disk"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "net.save('checkpoint')"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load model from disk"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "collapsed": false,
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "NOTE: If your model was saved using pickle, then set use_pickle=True for loading it\n",
-      "conv2_bias\n",
-      "conv2_weight\n",
-      "dense_weight\n",
-      "conv1_bias\n",
-      "dense_bias\n",
-      "conv1_weight\n"
-     ]
-    }
-   ],
-   "source": [
-    "for pval in net.param_values():\n",
-    "    pval.set_value(0)\n",
-    "net.load('checkpoint')"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Do prediction"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from PIL import Image\n",
-    "img = Image.open('static/digit.jpg').convert('L')\n",
-    "img = img.resize((28,28))\n",
-    "img = np.array(img, dtype=np.float32)/255\n",
-    "img = tensor.from_numpy(img)\n",
-    "img.reshape((1,1,28,28))\n",
-    "y=net.predict(img)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[<matplotlib.lines.Line2D at 0x7f07356e4210>]"
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAG7pJREFUeJzt3WuMY+dZB/D/Y3s8F/vsbOZ2nJ29zG5iuyyX0HZIy0VQ\nKKVJC4RKICVcKipQiGigIKQ2IAEfyhdUQFARulqVghCIqGojCNXS9EOh/YBasoHSNEntmU72mswZ\nz+zteO62Hz7YZ9bjjMdnxsc+9jn/nxLtnOMzPs9a4/+cfc/j9xVVBRERBUvE7wKIiMh7DHciogBi\nuBMRBRDDnYgogBjuREQBxHAnIgoghjsRUQAx3ImIAojhTkQUQDG/TjwxMaEzMzN+nZ6IqC+9+OKL\ny6o62eo438J9ZmYGFy9e9Ov0RER9SUQuuzmOwzJERAHEcCciCiCGOxFRADHciYgCqGW4i8hnRGRJ\nRL7V5HERkU+KyLyIfFNE3uZ9mUREdBBurtz/HsBD+zz+MIB07f/HAXyq/bKIiKgdLcNdVb8K4MY+\nhzwC4B+06msAjorIvV4VSEREB+fFmPs0gKt129dq+4hCqVSu4Jn/voLtcsXvUijEunpDVUQeF5GL\nInKxUCh089REXfPVuQKeevYlfPnbS36XQiHmRbhfB3Cibvt4bd+bqOp5VZ1V1dnJyZafniXqS99e\ntAEA+dqfRH7wItyfA/DBWtfMOwHcVtU3PHheor7khHrOYriTf1rOLSMi/wzgXQAmROQagD8GMAAA\nqnoOwAUA7wMwD2ANwIc6VSxRP8hZRQBAnuFOPmoZ7qr6WIvHFcCHPauIqI+VyhV8Z6mIaESwUFjF\nVqmCeIyfFaTu408dkYcu31jDVrmCH7pvHKWK4tLKqt8lUUgx3Ik85Iy3/8wDxwAAOd5UJZ8w3Ik8\nlLNsiADvPZtCRDjuTv5huBN5KG/ZODU2gtGRAcxMJHjlTr5huBN5KLdoI2MaAICsafDKnXzDcCfy\nyGapjEsra8imquGeMQ1cvrGGje2yz5VRGDHciTyyUFhFuaJ3r9xTBlSB+aWiz5VRGDHciTziDME4\n4Z4xkwDYMUP+YLgTeSS3aCMWEZyeSAAATo0nEI9GOO5OvmC4E3kkb9k4M5nY+UTqQDSCM5MJzjFD\nvmC4E3kkbxV3hmQc2ZSBOYtj7tR9DHciD6xtlXDlxhqyDeGeMQ1cv7UOe2Pbp8oorBjuRB5wrs7T\ne4Q7UL2qJ+omhjuRB5xxdafH3ZHdCXeOu1N3MdyJPDBn2RiMRXBybGTX/uP3DGN4IMpwp65juBN5\nIGcVkTaTiEZk1/5IRJAxkwx36jqGO5EH8nVzyjTKmAZyixxzp+5iuBO16fbaNhbvbOwb7svFTawU\nN7tcGYUZw52oTfml2s3UZuGeYscMdR/DnahNO3PKpPYOdyf055Y47k7dw3AnalN+0UZyMIZjo0N7\nPm4eGcSRoRgnEKOuYrgTtSln2ciYSYjIno+LCLIpLtxB3cVwJ2qDqu5afamZtGkgt2hDVbtUGYUd\nw52oDcvFLdxc224Z7lnTwJ2NEqw77Jih7mC4E7Vhrsm0A40ynIaAuozhTtSGXMPqS804qzIx3Klb\nGO5EbchbNsYScUwk4/seN54cxERykB0z1DUMd6I25BZtpKead8rU4xwz1E0Md6JDUlXMWcWW4+2O\njGkgbxVRqbBjhjqP4U50SG/c3oC9WWo53u7Ipgysb5dx/dZ6hysjYrgTHVqzBTqacX4JcNydusFV\nuIvIQyKSE5F5EXlqj8dHReTfROT/RORlEfmQ96US9ZZ8LaQzU27Dvdoxk+O4O3VBy3AXkSiApwE8\nDOAsgMdE5GzDYR8G8IqqPgDgXQD+XET2bx8g6nM5y4Z5ZBCjIwOujjeGBnBsdIg3Vakr3Fy5Pwhg\nXlUXVHULwDMAHmk4RgEYUm0ZSAK4AaDkaaVEPSZvtZ52oFEmZXBYhrrCTbhPA7hat32ttq/eXwP4\nLgCvA3gJwEdUteJJhUQ9qFxRzC8Vm87h3kzWNLBQWEWpzLcHdZZXN1TfC+AbAI4B+H4Afy0iRxoP\nEpHHReSiiFwsFAoenZqo+67eWMPGdqXpHO7NZEwDW+UKLq2sdagyoio34X4dwIm67eO1ffU+BOBZ\nrZoH8BqAtzQ+kaqeV9VZVZ2dnJw8bM1EvnM77UCjbIpzzFB3uAn3FwCkReR07SbpowCeazjmCoB3\nA4CImACyABa8LJSolzidMump5IG+777JJETYDkmdF2t1gKqWRORJAM8DiAL4jKq+LCJP1B4/B+Dj\nAP5eRF4CIAA+pqrLHaybyFc5y8aJsWEkBlu+hXYZjkdxamyEV+7Uca5+MlX1AoALDfvO1X39OoCf\n8rY0ot41Zx38ZqqjOg0Bw506i59QJTqgrVIF3ykUDzze7simDFxaWcPGdtnjyojuYrgTHdCllVWU\nKnrocM+YBsoVxUJh1ePKiO5iuBMdkHMztJ1wB9gxQ53FcCc6oLxlIxoRnJlMHOr7T08kEIsI55ih\njmK4Ex1Q3rIxMz6CoYHoob4/HovgzGRiZ/1Vok5guBMdUP4AC3Q0kzENXrlTRzHciQ5gY7uMSyur\nSLuc5reZrGng6o11rG5yfj3qDIY70QHMLxWh6n6BjmbStZuqc0tFL8oiehOGO9EBtNsp49iZY4bT\nEFCHMNyJDiC/ZCMejWBmfKSt5zk5NoLBWITtkNQxDHeiA8gv2rhvKolYtL23TjQiSJtJ3lSljmG4\nEx1A3irurIXaLs4xQ53EcCdyyd7YxvVb622PtzsypgHrziZurW158nxE9RjuRC7lrWpny2Fng2yU\n3ZmGgB0z5D2GO5FLzidK222DdGS4KhN1EMOdyKWcZWMkHsX00WFPnu/Y6BCSgzGGO3UEw53Ipbxl\nIz2VRCQinjyfiCBjJrnkHnUEw53Ipdzi4RfoaMbpmFFVT5+XiOFO5MKN1S0sFzc9G293ZEwDN9e2\nUShuevq8RAx3IheccXGvr9ydXxZz7JghjzHciVzIe9wp43B+WXDcnbzGcCdyIbdo48hQDFPGoKfP\nO5GMYywRZ8cMeY7hTuRC3rKRTRkQ8aZTxiEiSE9xjhnyHsOdqAVVrc0p4+2QjCObMjBnFdkxQ55i\nuBO1sGRv4vb6tufj7Y6MaaC4WcLrtzc68vwUTgx3oha8WqCjGS7cQZ3AcCdqoVNtkI5MbT1WjruT\nlxjuRC3kFm1MJAcxloh35PlHRwZgHhnklTt5iuFO1EJ+qYhsypsFOprJmAbySwx38g7DnWgflYpi\nzrI7NiTjyJrVjplyhR0z5A2GO9E+rt9ax9pW2bMFOprJpAxsliq4cmOto+eh8HAV7iLykIjkRGRe\nRJ5qcsy7ROQbIvKyiHzF2zKJ/OF0yqS7cOVefz6idrUMdxGJAngawMMAzgJ4TETONhxzFMDfAPhZ\nVf1uAL/QgVqJui630ynT2TH3+6eqz89pCMgrbq7cHwQwr6oLqroF4BkAjzQc84sAnlXVKwCgqkve\nlknkjznLxvTRYRhDAx09T2IwhhNjwwx38oybcJ8GcLVu+1ptX70MgHtE5D9F5EUR+eBeTyQij4vI\nRRG5WCgUDlcxURflrGLHr9od2drCHURe8OqGagzA2wG8H8B7AfyhiGQaD1LV86o6q6qzk5OTHp2a\nqDNK5Qq+s1TcWci60zKmgYXCKrZKla6cj4LNTbhfB3Cibvt4bV+9awCeV9VVVV0G8FUAD3hTIpE/\nLq2sYatc2fkEaadlUwZKFcVry6tdOR8Fm5twfwFAWkROi0gcwKMAnms45l8B/IiIxERkBMA7ALzq\nbalE3dWpBTqaSXMaAvJQrNUBqloSkScBPA8gCuAzqvqyiDxRe/ycqr4qIl8E8E0AFQCfVtVvdbJw\nok7LWzZE7naydNqZyQSiEcEcw5080DLcAUBVLwC40LDvXMP2JwB8wrvSiPyVt2zMjCcwNBDtyvmG\nBqKYGR9hrzt5gp9QJWoit2gj3aWrdkc2xY4Z8gbDnWgPG9tlXFpZ69p4uyNjGrh8Yw3rW+WunpeC\nh+FOtIeFwirKFe34hGGNsqYBVWB+qdjV81LwMNyJ9jC31N1OGYczhw2HZqhdDHeiPeQWbQxEBTPj\nia6ed2Z8BPFohOFObWO4E+0hb9k4PZFAPNbdt0gsGsF9U0n2ulPbGO5Ee8h1YYGOZrJmkkvuUdsY\n7kQNVjdLuHpjveMLdDSTSRl4/fYG7mxs+3J+CgaGO1EDp1OlWxOGNXLmspmz2DFDh8dwJ2rgjHf7\ndeXudOjwpiq1g+FO1CC/aGMwFsGJsRFfzj99dBgj8SinIaC2MNyJGuQsG2kziWhEfDl/JCJIc+EO\nahPDnahB3sdOGUfWTDLcqS0Md6I6t9e2Yd3Z9G283ZExDSwXt7BS3PS1DupfDHeiOvnatAN+dco4\nMjvTELBjhg6H4U5Ux7mJ6fuwDDtmqE0Md6I6ectGcjCGY6NDvtYxZQxidHiA0xDQoTHcierkFm1k\nzCRE/OmUcYgIsqbBaQjo0BjuRDWqirxld32a32bStY4ZVfW7FOpDDHeimuXiFm6ubfs+3u7Ipgzc\n2SjBusOOGTo4hjtRjXPzslfC3amD4+50GAx3oppe6ZRx7LRDctydDoHhTlSTt2yMJeKYSMb9LgUA\nMJaIY9IY5JU7HQrDnaimOu2A/50y9TJmEnMMdzoEhjsRnE6Zou/TDjTKmAbyVhGVCjtm6GAY7kQA\nXr+9geJmCekeC/esaWB9u4xrN9f9LoX6DMOdCHdvWvZKj7vDmeOG4+50UAx3ItS1QU71Vrinp5IA\nOMcMHRzDnQjVK+PUkSGMjgz4XcouxtAApo8OM9zpwBjuRKh1yvTYkIwjYya55B4dmKtwF5GHRCQn\nIvMi8tQ+x/2AiJRE5Oe9K5Gos8oVxZxVRKY2BNJrMikDC4VVbJcrfpdCfaRluItIFMDTAB4GcBbA\nYyJytslxfwrgS14XSdRJV26sYbNU6dkr96xpYKtcweWVVb9LoT7i5sr9QQDzqrqgqlsAngHwyB7H\n/RaAzwNY8rA+oo5zxrN7rcfdwVWZ6DDchPs0gKt129dq+3aIyDSADwD4lHelEXWH0waZNntzWOb+\nqSQiAo6704F4dUP1LwF8TFX3HRQUkcdF5KKIXCwUCh6dmqg9OcvGybERjMRjfpeyp6GBKE6NJ9gx\nQwfi5qf5OoATddvHa/vqzQJ4pjYnxwSA94lISVX/pf4gVT0P4DwAzM7O8vPU1BOcOWV6WcZM8oNM\ndCBurtxfAJAWkdMiEgfwKIDn6g9Q1dOqOqOqMwA+B+A3G4OdqBdtlSpYKKz2zDS/zWRNA5eWV7Gx\nXfa7FOoTLcNdVUsAngTwPIBXAXxWVV8WkSdE5IlOF0jUSZdWVlGqaM9NO9AokzJQUWChwI4ZcsfV\nIKOqXgBwoWHfuSbH/mr7ZRF1R68t0NHM3Y4ZG2ePHfG5GuoH/IQqhVreshGNCM5MJvwuZV8z4wkM\nRIXj7uQaw51CLbdoY2Z8BIOxqN+l7Csei+DMRJJL7pFrDHcKtbxl9/x4uyOTMnjlTq4x3Cm0NrbL\nuHxjrefH2x1ZM4lrN9exulnyuxTqAwx3Cq35pSJUe3fagUbOKlFzS5yGgFpjuFNo5XamHeiPcHd+\nCXHcndxguFNo5S0b8WgEM+MjfpfiyomxEQwNRDjuTq4w3Cm0cpaN+6aSiEX7420QjQjSUwbnmCFX\n+uOnmqgD5qwisj0+p0yjjMlwJ3cY7hRK9sY2rt9a79kFOprJmElYdzZxa23L71KoxzHcKZSchS8y\nU30W7iku3EHuMNwplHZWX+qzK3enY4Y3VakVhjuFUm7Rxkg8iumjw36XciD3jg7BGIyxHZJaYrhT\nKM0t2UibBiIR8buUAxERZFK8qUqtMdwplHKL/dcp48iYSeQtG6pczIyaY7hT6KwUN7Fc3OybOWUa\nZUwDN9e2UShu+l0K9TCGO4XOTqdMn4b73WkI2DFDzTHcKXT6tVPG4bRDsmOG9sNwp9DJWzZGhwcw\nZQz6XcqhTCQHMZ6IY47hTvtguFPo5C0bWdOASH91ytRLm0leudO+GO4UKqqK3KKNdJ92yjiypoH8\nIjtmqDmGO4WKdWcTdzZKfTve7sikDKxulXH91rrfpVCPYrhTqDhDGf3aKePY6Zjh0Aw1wXCnUJkL\nSLinTU4gRvtjuFOo5BZtTBqDGEvE/S6lLaPDA0gdGeIcM9QUw51CJW/ZyPT5zVRHJmWwY4aaYrhT\naFQqirxV7PshGUfWTGJuqYhyhR0z9GYMdwqNazfXsb5d3rkZ2e8ypoGtUgWXV1b9LoV6EMOdQsPp\nLOm3pfWayXJVJtoHw51CwxmfTk8FY8z9/trfg+2QtBeGO4VG3rIxfXQYxtCA36V4YiQew8mxEd5U\npT25CncReUhEciIyLyJP7fH4L4nIN0XkJRH5LxF5wPtSidqTWwxOp4wjU5uGgKhRy3AXkSiApwE8\nDOAsgMdE5GzDYa8B+DFV/V4AHwdw3utCidpRKlewUFgNzHi7I5tK4rXlVWyVKn6XQj3GzZX7gwDm\nVXVBVbcAPAPgkfoDVPW/VPVmbfNrAI57WyZRey6trGGrXAlMp4wjYxooVRSvLbNjhnZzE+7TAK7W\nbV+r7Wvm1wD8+14PiMjjInJRRC4WCgX3VRK1KR+QaQcaOX8fjrtTI09vqIrIj6Ma7h/b63FVPa+q\ns6o6Ozk56eWpifaVW7QRkbsdJkFxZjKBaEQ47k5vEnNxzHUAJ+q2j9f27SIi3wfg0wAeVtUVb8oj\n8kbesnFqPIGhgajfpXhqMBbF6YkEr9zpTdxcub8AIC0ip0UkDuBRAM/VHyAiJwE8C+BXVDXvfZlE\n7QnSnDKNsqbBJffoTVqGu6qWADwJ4HkArwL4rKq+LCJPiMgTtcP+CMA4gL8RkW+IyMWOVUx0QBvb\nZVxaWQvczVRHxjRw+cYa1rfKfpdCPcTNsAxU9QKACw37ztV9/esAft3b0oi8sVBYRbmigWuDdGTM\nJFSB+aUivvf4qN/lUI/gJ1Qp8ILaKeNwfmlx3J3qMdwp8HKWjYGoYGY84XcpHXFqbATxWIRzzNAu\nDHcKvDnLxpmJJOKxYP64x6IR3D+ZZLjTLsH8aSeqk7PswI63O7IpzjFDuzHcKdBWN0u4emMd2YC2\nQTrSZhKv397AnY1tv0uhHsFwp0CbW6ouZJEO6M1Uh9PmyX53cjDcKdCcoYqg9rg7duaYWeSqTFTF\ncKdAy1s2hgYiODE24ncpHTV9dBiJeJQ3VWkHw50CLWfZSE8ZiEbE71I6KhIRpE2D4U47GO4UaNU5\nZYI9JOPImGyHpLsY7hRYt9a2YN3ZDOyEYY0ypoHl4haWi5t+l0I9gOFOgZW3qjcXg97j7sjW/p68\neieA4U4B5oRc0DtlHHfbIdkxQwx3CrC8ZcMYjOHe0SG/S+mKSWMQR0cGOIEYAWC4U4DlFm2kzSRE\ngt0p4xARZKY4DQFVMdwpkFQVecveGYcOi0wqiZxlQ1X9LoV8xnCnQCoUN3FzbTs0bZCOrGnA3ihh\n8c6G36WQzxjuFEjOTcWw3Ex1OL/M8rypGnoMdwqkXG3cOSxtkI6dcOe4e+gx3CmQ8paNsUQcE8lB\nv0vpqnsScUwag+yYIYY7BVPOskPzydRGWc4xQ2C4UwCpKuasYujG2x0Z08CcVUSlwo6ZMGO4U+C8\nfnsDxc1S6MbbHdlUEuvbZVy7ue53KeQjhjsFTlgW6GhmZ+EODs2EGsOdAscJtaAvrddM2uQEYsRw\npwDKL9pIHRnC6PCA36X4IjkYw/TR4Z12UAonhjsFTs6yQzve7sim2DETdgx3CpRyRTG/VEQ2pG2Q\njoxpYKGwiu1yxe9SyCcMdwqUKzfWsFmqhG5OmUbZVBJb5Qour6z6XQr5hOFOgbIz7UDIwz09VeuY\nWeQcM2HlKtxF5CERyYnIvIg8tcfjIiKfrD3+TRF5m/elErWW3+mUCfewzP1TSUSE7ZBh1jLcRSQK\n4GkADwM4C+AxETnbcNjDANK1/x8H8CmP6yRyJWfZODk2gpF4zO9SfDU0EMXMeIITiIWYmyv3BwHM\nq+qCqm4BeAbAIw3HPALgH7TqawCOisi9HtdK1NKcZYd+SMaRMQ3klxjuYeXm8mYawNW67WsA3uHi\nmGkAb7RV3R6+ki/gT77witdP29fanUGkl1btaXdJvIVCEe85a3pUTX/LpAw8/8oi3vMXX/G7lJ6g\nqP6sa22jflsVUCgqteaixv1af/xe39vs6ybf+xs/egYffegtHf37dvXfriLyOKrDNjh58uShniM5\nGAv9eOpeBG2uE9oLy4x68Dvmu+49gg+8dbr9JwqAn/v+Y7i0vIpShe2QDoGg9h9EpPZndTsizmOy\ns0/qt6XJftQ/1nDMzjl273/w9HjH/65uwv06gBN128dr+w56DFT1PIDzADA7O3uot/LbT92Dt596\n+2G+lShUzkwm8cnH3up3GeQTN2PuLwBIi8hpEYkDeBTAcw3HPAfgg7WumXcCuK2qng/JEBGROy2v\n3FW1JCJPAngeQBTAZ1T1ZRF5ovb4OQAXALwPwDyANQAf6lzJRETUiqsxd1W9gGqA1+87V/e1Aviw\nt6UREdFh8ROqREQBxHAnIgoghjsRUQAx3ImIAojhTkQUQOLXR89FpADg8iG/fQLAsofl9Du+Hrvx\n9biLr8VuQXg9TqnqZKuDfAv3dojIRVWd9buOXsHXYze+HnfxtdgtTK8Hh2WIiAKI4U5EFED9Gu7n\n/S6gx/D12I2vx118LXYLzevRl2PuRES0v369cicion30Xbi3Wqw7TETkhIj8h4i8IiIvi8hH/K7J\nbyISFZH/FZEv+F2L30TkqIh8TkS+LSKvisgP+l2TX0Tkd2vvkW+JyD+LyJDfNXVaX4W7y8W6w6QE\n4PdU9SyAdwL4cMhfDwD4CIBX/S6iR/wVgC+q6lsAPICQvi4iMg3gtwHMqur3oDp1+aP+VtV5fRXu\ncLdYd2io6huq+j+1r21U37yhXWNORI4DeD+AT/tdi99EZBTAjwL4WwBQ1S1VveVvVb6KARgWkRiA\nEQCv+1xPx/VbuDdbiDv0RGQGwFsBfN3fSnz1lwA+CoCLhgKnARQA/F1tmOrTIpLwuyg/qOp1AH8G\n4AqAN1BdKe5L/lbVef0W7rQHEUkC+DyA31HVO37X4wcR+WkAS6r6ot+19IgYgLcB+JSqvhXAKoBQ\n3qMSkXtQ/Rf+aQDHACRE5Jf9rarz+i3cXS3EHSYiMoBqsP+Tqj7rdz0++mEAPysil1AdrvsJEflH\nf0vy1TUA11TV+Zfc51AN+zD6SQCvqWpBVbcBPAvgh3yuqeP6LdzdLNYdGiIiqI6pvqqqf+F3PX5S\n1d9X1eOqOoPqz8WXVTXwV2fNqOoigKsikq3tejeAV3wsyU9XALxTREZq75l3IwQ3l12todormi3W\n7XNZfvphAL8C4CUR+UZt3x/U1rwl+i0A/1S7EFpASBeuV9Wvi8jnAPwPqh1m/4sQfFKVn1AlIgqg\nfhuWISIiFxjuREQBxHAnIgoghjsRUQAx3ImIAojhTkQUQAx3IqIAYrgTEQXQ/wNXckf+AMbmGAAA\nAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f0747e45890>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "prob=tensor.to_numpy(y)[0]\n",
-    "plt.plot(range(10), prob)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Debug\n",
-    "\n",
-    "Print l1 norm or parameter and layer feature\n",
-    "\n",
-    "1. parameter initialization\n",
-    "2. learning rate\n",
-    "3. weight decay\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "conv1_weight (32, 9) 7.97165679932\n",
-      "conv1_bias (32,) 0.0\n",
-      "conv2_weight (32, 288) 8.00566577911\n",
-      "conv2_bias (32,) 0.0\n",
-      "dense_weight (512, 10) 7.92119693756\n",
-      "dense_bias (10,) 0.0\n",
-      "\n",
-      "\n",
-      "Epoch 0\n",
-      "-->conv1: 3.886751\n",
-      "conv1-->relu1: 2.299547\n",
-      "relu1-->conv2: 603.797852\n",
-      "conv2-->relu2: 295.446167\n",
-      "relu2-->pool: 955.442017\n",
-      "pool-->flat: 955.442017\n",
-      "flat-->dense: 284901.062500\n",
-      "-->dense: 0.311904\n",
-      "dense-->flat: 0.311904\n",
-      "flat-->pool: 0.086828\n",
-      "pool-->relu2: 0.050518\n",
-      "relu2-->conv2: 9.024708\n",
-      "conv2-->relu1: 2.790344\n",
-      "relu1-->conv1: 336.182007\n",
-      "\n",
-      " loss = 79.148743, params\n",
-      "conv1_weight 10.1786603928\n",
-      "conv1_bias 12.2705039978\n",
-      "conv2_weight 8.05525493622\n",
-      "conv2_bias 0.13293106854\n",
-      "dense_weight 8.18883609772\n",
-      "dense_bias 0.00134002871346\n",
-      "\n",
-      "\n",
-      "Epoch 1\n",
-      "-->conv1: 20.362625\n",
-      "conv1-->relu1: 4.780766\n",
-      "relu1-->conv2: 2066.139404\n",
-      "conv2-->relu2: 488.272614\n",
-      "relu2-->pool: 1144.980225\n",
-      "pool-->flat: 1144.980225\n",
-      "flat-->dense: 1392742.250000\n",
-      "-->dense: 0.286131\n",
-      "dense-->flat: 0.286131\n",
-      "flat-->pool: 0.084796\n",
-      "pool-->relu2: 0.034586\n",
-      "relu2-->conv2: 7.695237\n",
-      "conv2-->relu1: 2.117975\n",
-      "relu1-->conv1: 487.341522\n",
-      "\n",
-      " loss = 70.960945, params\n",
-      "conv1_weight 16.7161483765\n",
-      "conv1_bias 34.0985298157\n",
-      "conv2_weight 8.74045848846\n",
-      "conv2_bias 0.33584010601\n",
-      "dense_weight 9.48363685608\n",
-      "dense_bias 0.00340566551313\n",
-      "\n",
-      "\n",
-      "Epoch 2\n",
-      "-->conv1: 52.302490\n",
-      "conv1-->relu1: 10.072969\n",
-      "relu1-->conv2: 12706.870117\n",
-      "conv2-->relu2: 1381.310059\n",
-      "relu2-->pool: 1888.169067\n",
-      "pool-->flat: 1888.169067\n",
-      "flat-->dense: 4740897.500000\n",
-      "-->dense: 0.350905\n",
-      "dense-->flat: 0.350905\n",
-      "flat-->pool: 0.111108\n",
-      "pool-->relu2: 0.023244\n",
-      "relu2-->conv2: 7.156909\n",
-      "conv2-->relu1: 1.548402\n",
-      "relu1-->conv1: 516.079651\n",
-      "\n",
-      " loss = 73.690216, params\n",
-      "conv1_weight 30.0410804749\n",
-      "conv1_bias 60.256187439\n",
-      "conv2_weight 11.2280254364\n",
-      "conv2_bias 0.536676049232\n",
-      "dense_weight 13.0333833694\n",
-      "dense_bias 0.00553257204592\n",
-      "\n",
-      "\n",
-      "Epoch 3\n",
-      "-->conv1: 96.670937\n",
-      "conv1-->relu1: 2.220010\n",
-      "relu1-->conv2: 16051.786133\n",
-      "conv2-->relu2: 705.551208\n",
-      "relu2-->pool: 846.462280\n",
-      "pool-->flat: 846.462280\n",
-      "flat-->dense: 4615773.000000\n",
-      "-->dense: 0.454912\n",
-      "dense-->flat: 0.454912\n",
-      "flat-->pool: 0.146553\n",
-      "pool-->relu2: 0.012394\n",
-      "relu2-->conv2: 6.071361\n",
-      "conv2-->relu1: 0.411632\n",
-      "relu1-->conv1: 328.999054\n",
-      "\n",
-      " loss = 81.878014, params\n",
-      "conv1_weight 44.8700485229\n",
-      "conv1_bias 111.767730713\n",
-      "conv2_weight 14.0987415314\n",
-      "conv2_bias 0.919560790062\n",
-      "dense_weight 18.0457553864\n",
-      "dense_bias 0.00845981575549\n",
-      "\n",
-      "\n",
-      "Epoch 4\n",
-      "-->conv1: 165.898941\n",
-      "conv1-->relu1: 0.000018\n",
-      "relu1-->conv2: 1.004575\n",
-      "conv2-->relu2: 0.049885\n",
-      "relu2-->pool: 0.214831\n",
-      "pool-->flat: 0.214831\n",
-      "flat-->dense: 1963.049194\n",
-      "-->dense: 0.560196\n",
-      "dense-->flat: 0.560196\n",
-      "flat-->pool: 0.182800\n",
-      "pool-->relu2: 0.026711\n",
-      "relu2-->conv2: 10.339769\n",
-      "conv2-->relu1: 0.006092\n",
-      "relu1-->conv1: 11.064970\n",
-      "\n",
-      " loss = 45.199768, params\n",
-      "conv1_weight 58.6796875\n",
-      "conv1_bias 162.814819336\n",
-      "conv2_weight 16.8376598358\n",
-      "conv2_bias 1.47756028175\n",
-      "dense_weight 23.0595436096\n",
-      "dense_bias 0.01112665236\n",
-      "\n",
-      "\n",
-      "Epoch 5\n",
-      "-->conv1: 241.562866\n",
-      "conv1-->relu1: 0.000000\n",
-      "relu1-->conv2: 1.477560\n",
-      "conv2-->relu2: 0.000000\n",
-      "relu2-->pool: 0.000000\n",
-      "pool-->flat: 0.000000\n",
-      "flat-->dense: 0.011127\n",
-      "-->dense: 0.668576\n",
-      "dense-->flat: 0.668576\n",
-      "flat-->pool: 0.218311\n",
-      "pool-->relu2: 0.000000\n",
-      "relu2-->conv2: 0.000000\n",
-      "conv2-->relu1: 0.000000\n",
-      "relu1-->conv1: 0.000000\n",
-      "\n",
-      " loss = 2.299366, params\n",
-      "conv1_weight 71.2618331909\n",
-      "conv1_bias 208.757034302\n",
-      "conv2_weight 19.3444824219\n",
-      "conv2_bias 2.00746393204\n",
-      "dense_weight 27.6571712494\n",
-      "dense_bias 0.0137531962246\n",
-      "\n",
-      "\n",
-      "Epoch 6\n",
-      "-->conv1: 296.653534\n",
-      "conv1-->relu1: 0.000000\n",
-      "relu1-->conv2: 2.007464\n",
-      "conv2-->relu2: 0.000000\n",
-      "relu2-->pool: 0.000000\n",
-      "pool-->flat: 0.000000\n",
-      "flat-->dense: 0.013753\n",
-      "-->dense: 0.910015\n",
-      "dense-->flat: 0.910015\n",
-      "flat-->pool: 0.297148\n",
-      "pool-->relu2: 0.000000\n",
-      "relu2-->conv2: 0.000000\n",
-      "conv2-->relu1: 0.000000\n",
-      "relu1-->conv1: 0.000000\n",
-      "\n",
-      " loss = 2.301979, params\n",
-      "conv1_weight 82.6378097534\n",
-      "conv1_bias 250.104797363\n",
-      "conv2_weight 21.6229515076\n",
-      "conv2_bias 2.484375\n",
-      "dense_weight 31.8290367126\n",
-      "dense_bias 0.0165074821562\n",
-      "\n",
-      "\n",
-      "Epoch 7\n",
-      "-->conv1: 348.356262\n",
-      "conv1-->relu1: 0.000000\n",
-      "relu1-->conv2: 2.484375\n",
-      "conv2-->relu2: 0.000000\n",
-      "relu2-->pool: 0.000000\n",
-      "pool-->flat: 0.000000\n",
-      "flat-->dense: 0.016507\n",
-      "-->dense: 0.927903\n",
-      "dense-->flat: 0.927903\n",
-      "flat-->pool: 0.302989\n",
-      "pool-->relu2: 0.000000\n",
-      "relu2-->conv2: 0.000000\n",
-      "conv2-->relu1: 0.000000\n",
-      "relu1-->conv1: 0.000000\n",
-      "\n",
-      " loss = 2.302492, params\n",
-      "conv1_weight 92.8762283325\n",
-      "conv1_bias 287.317565918\n",
-      "conv2_weight 23.6872882843\n",
-      "conv2_bias 2.9135928154\n",
-      "dense_weight 35.6038131714\n",
-      "dense_bias 0.0192515775561\n",
-      "\n",
-      "\n",
-      "Epoch 8\n",
-      "-->conv1: 394.178162\n",
-      "conv1-->relu1: 0.000000\n",
-      "relu1-->conv2: 2.913593\n",
-      "conv2-->relu2: 0.000000\n",
-      "relu2-->pool: 0.000000\n",
-      "pool-->flat: 0.000000\n",
-      "flat-->dense: 0.019252\n",
-      "-->dense: 1.218609\n",
-      "dense-->flat: 1.218609\n",
-      "flat-->pool: 0.397913\n",
-      "pool-->relu2: 0.000000\n",
-      "relu2-->conv2: 0.000000\n",
-      "conv2-->relu1: 0.000000\n",
-      "relu1-->conv1: 0.000000\n",
-      "\n",
-      " loss = 2.300582, params\n",
-      "conv1_weight 102.090713501\n",
-      "conv1_bias 320.808746338\n",
-      "conv2_weight 25.5548191071\n",
-      "conv2_bias 3.29988527298\n",
-      "dense_weight 39.0138893127\n",
-      "dense_bias 0.0218270029873\n",
-      "\n",
-      "\n",
-      "Epoch 9\n",
-      "-->conv1: 430.156555\n",
-      "conv1-->relu1: 0.000000\n",
-      "relu1-->conv2: 3.299885\n",
-      "conv2-->relu2: 0.000000\n",
-      "relu2-->pool: 0.000000\n",
-      "pool-->flat: 0.000000\n",
-      "flat-->dense: 0.021827\n",
-      "-->dense: 1.221773\n",
-      "dense-->flat: 1.221773\n",
-      "flat-->pool: 0.398946\n",
-      "pool-->relu2: 0.000000\n",
-      "relu2-->conv2: 0.000000\n",
-      "conv2-->relu1: 0.000000\n",
-      "relu1-->conv1: 0.000000\n",
-      "\n",
-      " loss = 2.302244, params\n",
-      "conv1_weight 110.383636475\n",
-      "conv1_bias 350.950500488\n",
-      "conv2_weight 27.2405776978\n",
-      "conv2_bias 3.64754581451\n",
-      "dense_weight 42.0904846191\n",
-      "dense_bias 0.0241855494678\n"
-     ]
-    }
-   ],
-   "source": [
-    "np.random.shuffle(idx)\n",
-    "ffnet.verbose=True\n",
-    "for pname, pval in zip(net.param_names(), net.param_values()):\n",
-    "    if len(pval.shape) > 1:\n",
-    "        pval.gaussian(0, 10)\n",
-    "    else:\n",
-    "        pval.set_value(0)\n",
-    "    print pname, pval.shape, pval.l1()\n",
-    "for b in range(10):\n",
-    "    print \"\\n\\nEpoch %d\" % b\n",
-    "    x = train_x[idx[b * batch_size: (b + 1) * batch_size]]\n",
-    "    y = train_y[idx[b * batch_size: (b + 1) * batch_size]]\n",
-    "    tx.copy_from_numpy(x)\n",
-    "    ty.copy_from_numpy(y)\n",
-    "    grads, (l, a) = net.train(tx, ty)\n",
-    "    print '\\n loss = %f, params' % l\n",
-    "    for (s, p, g) in zip(net.param_names(), net.param_values(), grads):\n",
-    "        opt.apply_with_lr(epoch, 0.01, g, p, str(s), b)\n",
-    "        print s, p.l1()\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "def vis_square(data):\n",
-    "    \"\"\"Take an array of shape (n, height, width) or (n, height, width, 3)\n",
-    "       and visualize each (height, width) thing in a grid of size approx. sqrt(n) by sqrt(n)\"\"\"\n",
-    "    \n",
-    "    # normalize data for display\n",
-    "    data = (data - data.min()) / (data.max() - data.min())\n",
-    "    \n",
-    "    # force the number of filters to be square\n",
-    "    n = int(np.ceil(np.sqrt(data.shape[0])))\n",
-    "    padding = (((0, n ** 2 - data.shape[0]),\n",
-    "               (0, 1), (0, 1))                 # add some space between filters\n",
-    "               + ((0, 0),) * (data.ndim - 3))  # don't pad the last dimension (if there is one)\n",
-    "    data = np.pad(data, padding, mode='constant', constant_values=1)  # pad with ones (white)\n",
-    "    \n",
-    "    # tile the filters into an image\n",
-    "    data = data.reshape((n, n) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))\n",
-    "    data = data.reshape((n * data.shape[1], n * data.shape[3]) + data.shape[4:])\n",
-    "    \n",
-    "    plt.imshow(data); plt.axis('off')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "NOTE: If your model was saved using pickle, then set use_pickle=True for loading it\n",
-      "conv2_bias\n",
-      "conv2_weight\n",
-      "dense_weight\n",
-      "conv1_bias\n",
-      "dense_bias\n",
-      "conv1_weight\n"
-     ]
-    }
-   ],
-   "source": [
-    "np.random.shuffle(idx)\n",
-    "ffnet.verbose=False\n",
-    "net.load('checkpoint')\n",
-    "b=1\n",
-    "x = train_x[idx[b * batch_size: (b + 1) * batch_size]]    \n",
-    "tx.copy_from_numpy(x)\n",
-    "\n",
-    "r = net.forward(False, tx, ['relu1', 'relu2'])\n",
-    "\n",
-    "\n",
-    "    "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnWl0HNd1529VNYDGDhA7AYLgAoCLuEiUKFp2bEsek44k\nJ854mTO2Mx88drxEmjnOzJmcyWTiLJOTyXHG8thJTjKyYseWJpHO+EgxJdlwrMV2xEUkRYmSuIIL\nSCxcQOxAo4Guqvlwq957QDcagFBV3dT7/770w6vXXRfV9freeu8uhuu6BADQDzPXAgAAcgMmPwCa\ngskPgKZg8gOgKZj8AGgKJj8AmoLJD4CmYPIDoCmY/ABoSizKk33Y+lReuBN29R0nIqJ9q3fmWBKm\nq/910c43mSBPZuZ8Zy27ciiJpKv3GBERmY3njKWMh+YHQFMw+QHQFEx+ADQFkx8ATcHkB0BTMPkB\n0JRIt/oyEmQyEWNJOxzZPyImL4mbSq3484IgtnYNERGd/UoLERGlym1xzJjl32/DkePdAr6m8QGL\niIjW/OmBUOQyd2wW7WRDKTcc+X0aXtMuYhljCSm39fM3vPGyL5fEWppFO9Xbt+LPsyoriIjIbW4g\nIiJzaloccy1P5xYo029mloiI7O6L6R9mWiuWJxPQ/ABoCiY/AJqSc7PfN7fGdreIvviNGSIicmPS\njDen2TycqSokIiKnUB4r/qdXA5PHrKoUbbeFTTbn9ZPcoTxWGLECHl9aLPqm79xIREQFPzsWmDxE\nRJTi/720l8/f9OgJcciZnk4f78l58U/3EBFRrK1VftSly4GJ5bxxSrQLsozLdJOZt23iz3jrdGDy\nqJz9691ERFR/SJrMZb3p91XBT48SEVHvJ9aKvsZvrtzsp5oqIiLq27uKiIjWPNUjDhkxTyZbPqvZ\n1fzY1PNH9xAR0dqvhfOopgLND4Cm5Fzzp/r6iYiorGtE9LnJJL/a6YtBRd6rVVsr+oJcMrIHb8o/\nhkbmHlQWJ91Z1iL2yIzos5IOhYF/jRr+6hoRETmLLZJ5chobJvn9AWr7wOi+FOrHb/69M0REZI+M\nLmn82G3ye2wM4Pz2Bb7mzY8OEhFRanxcHsy0gHeJX2IfvjuAsy8NaH4ANAWTHwBNybnZ7+NMTaV3\nZvEBsG/cEG0zHufP8B4XghNqrnltlpTIQ768igln/vJ4sOdfRB4iIud9HOI60i4XHgfv5nG1Pwln\nfzgbZnm5aBsN/GiWquM9b+PgG+JYxoXKAFmquR9r5EXd+l9kW7J85ziT6fdJrLmJiIjsq9flwB0d\nRETU+jT3ReH9AM0PgKbkjebPyO5tommX8i+za/I2TewFuZ2WbbsrSJztG0XbOj/ApymUGsNfmIuS\nyRa2emJJaSWVdfPXOtrOfdZn94hjlY8fClUeR13Y8tpGN/9p7twiDrkWfz/usbdDlUcldR8n3SgY\nl4t7zizr2Jofd0vZSnnbzZmcDEWO4ffytraVlF6F01Wsh2vezPAG3+IL2NMPmh8ATclrzW+eOCfb\nJv9OmdXsPJG89w5xrKhniIiIUhcuhSvQYfmzbPvbaQWFos+qYYcOqq+R407J/yEMyp86wg1lPaDC\n0xCmZ5Wc+V87xLHKx0MVJyvCWUrBaqgXbfva9bTjQVJ45CwRzYvfWMPP/Ooa0rWH2dGm4dvhONpU\n/oidtNxZGTtS7jmXOW280Xj1q/eIY42PhBSbEcqnAgDyHkx+ADQlr83+TFt3TiJBRESWurjmbdeE\nToatR9/Tj4jIvsmPH9TekjYuNDJ5+3l9jhcPEW8IZ+EqCNypRGTnmrMY6TM8nNY1upXN8bDuKmc6\n/b4W9473morA0w+aHwBNyWvNv1Sc8Ylci0BE0sHFOZxpvyZ6jF1biYgo1V2+yMhoUBdHfYspozbO\nMeVnczctfAegyvPhxImoQPMDoCmY/ABoyq1n9mdYdDMsz/NJ9YBywzebiObuGZte3rZcm7J+gpST\nX2F//84vHhHHclkvzSyOi7atLJTmA5f/UO6rr/s2JxiJMrugVVFGREQje/i7K38yXE9MImh+ALQl\nvzV/lqg+a+M60XaueNt+IWv7TAtWaobfILK+Lhc/s+/JP5AbU+Y4f61b/rCX5cpRFmL3PexZaE15\niU+UtF/5wvWHWOO3Pi+tNbHtFhJWNXvz2cqWsDvB16jqEN9Dc74xZO8FAARJXmt+4StPRKMf4njn\n+CDnNzd+KbfT3NRstIIRUax5NRHlJpJPxY8J3/SwTD/m5xrIddUBP34/mtWXxbE/yPEgw51Foq/p\np5wazT57PnJ5ZiqlJVnS5zn3XGFrLSxtrwLND4CmYPIDoCmGG2S5rEVwrrbncqcJAC0wG88tKZMN\nND8AmhLpgt/92+6L8nQL8vybLxIR0b7VO3MsCdPV/7po72u+PYeSSLr6OBnpvpZdOZaE6erltG35\ndn2I8vM+WgrQ/ABoCiY/AJqCyQ+ApmDyA6ApmPwAaAomPwCagskPgKbkPLAniPBJv1CnUVqyyMjF\nsaqrRdvu5HBZOnRixZ+7EoxCDgAZ+DLvubd//Kw4trWCy4YNz8r/Pel45bpmOZnH+cc6xbFV3115\nkgijgD8/sVcWAxnq9G4lxbfM8LJhOF5FM0fGsdCaLi+E9mh6IY9lk8lL1S9csqVddKVq+HoUXFXC\nd890UxoBlnqzKrwipauq0j9/NkPolVdoxR0dkzKGFGIMzQ+ApuRc8/tpsNSkGJf/gBMslPYrxScH\n+LhfqDO+/1VxLPEhLuhZcmjlYZm2msf9UHpO91zgevULCsb5ehx7e704NvUIaxRjVGoz/1om7uRx\nL33nEXHs4999T2DyqN/B6v3L+4yB/8TfcdPRFYtDsbZW0b74F6xpYzE2O1JHpCVXcpWvX3K3LGc+\ntYMzGzfsl2G+5U8dXrlQvnZv5iQrU62V4lB8gLNNm+OyZoHrF3yNscUyuUUWhY0/K69zkEDzA6Ap\nOdf8ftJL9Tlr/eOcIEN97pmPmlxxaBP/apaEn/MwK1YtF+i0B28uMnKZeNem5rGDRERU9w/y+d5O\nLFzxJpZgjfj1mwH75/uJJjJVC8qWhEIZX/NWcAk8Uz1XRLvtc16NAm9dwh46nfW9yfvvJCKiG9ul\nHgykyoG3DmGfuUBEREXd8ro4fs2CDG+zvIKdVnP4tRag+QHQFEx+ADQl52a/QNmu8c39pW5xFI5G\nkyMktr5N/uHJ61ry99PuvhiuAJ757yimvuU9Nl3+0lbRl6hng/K3PvwCERF1ffX94lgBvRacPBlM\nfKusVLTH9m4mIiK7kOWOJeT3VPL04QU/YyXY82smLJKsZriDHxnrXg8pD6T3qONmeESy6urSxs1s\nbyMiouLuQXEorFyM0PwAaEr+aP5FMO7i7Txz0itvfENuw9V8hxfC3HK5SGL4WycBkrpwKfDPXCnu\nDGusxFZpDRSX8oJSSyFbTi1/ck4cuz7CFoJ77O1Q5LHH5CJt2X5OLuFvDSY+tlsc87dz1z96QfSl\nrt0IXJ7k/XeJ9vRDfM+sr5QLsmdPsF5tfOQIpRGgs0+mz7XXNYoup5inopHyLMqYtIh8JzZnejpQ\nMaD5AdAUTH4ANCWvzX5/35yIyBicu+ef2tAk/7jB5uL0PdKHvfjoBdIBv0BH++cVH3mbF48eL+Tr\n0fvQbeJQ8R/xtap+MHzZfHNfnPsZ6anW+gy/Dv6m9DiseiJ4s7/45/K6lBxh/4iRVfK+qtvD5vXZ\nv5GPJFv+51Uimus/EAbGGzJGI+YVmzVXsUeis0rZ5+/0StMFXO4Mmh8ATclrzT9n+29s7haOpfzt\nb6IMbpNhY2sC8Bl/p/iRXERzF8BCwVs8cmfSPeZcb0uw+ozcLDL/VcilzZa6decVVS3rC7dUt28Z\nERGR3x6U22jVXpkua+Zu0Tf4fi6TXfWDkDS/v02sWEb+ne7LayXlNuBsJ8sTtKaG5gdAUzD5AdCU\n/Db7s5DJ+69gMrfVwGJNvG+bGriaUzl8zBJe4Or9DeldVvkMVxeup57o5YnLsFljLZuysXPXRF9O\nqgp7JvhYm9SDDUeSC42ODGdtg2gXXPM8XgM+BzQ/AJpyy2n+TBp/7NN7iIio8YXrUYsjQjCJMi+6\n5QI/7Vf/5znNVn29vC7Vf89pvzKFk4Ymjxde6/pbVkRkxz2PtlPS+zCKmvQsiLQQzR0cf2AoRmP8\nOMdoBK1pl4IIC49Jvexc7gvlXND8AGhKXmv+bFF9yQekz3bRsPcbfTPctFuqlnc2cnJP++hboZ5z\nMXwtf/G/3yH6HvxVjpjrPsPbRi2/Lbf3UurWVxjyFMhbauJBLmA52cgavalrQBxzPe0aurZXtLyf\nMo5u3yz6zn+8jIiI1j85KvrsoWjSt1k1q+Qftdy2KznFWOySshaSDGcNApofAE3B5AdAU/La7Fd9\n+2e2rSUioptbOLyx7vikOBY7E64Pto89Ik1DOjq68MAI8b3E1v+ZrC1w8h87iIio4wbnQkxdD95n\nfjF5iIhKf8iPH356jzkLaBEt7qkJM3o+zzn87bg8vvEx3pYNPRGLgv/4YVRKT1AnziHo1tnLRESU\nGgn//oLmB0BTDHeRNEdB4lxtz60XDgAaYDaeW1IWEmh+ADQFkx8ATYl0we+Be34tytMtyHMHfkRE\nRPtW78yxJExX/+uinW8y5Zs8D9x1f44lYZ478rxo72u+PYeSSLr6ji9rPDQ/AJqCyQ+ApmDyA6Ap\nmPwAaAomPwCagskPgKbk3Lff9cpqjW2vFX12ATsolV2R5YkKhjkTbbKBQzCHNsmUUDWneFzRRZmV\n9d2EH0o8+LEtREQ0dJt0lLSrOPmVUSDTc7gp/k0v6uNru+F7MpQ2iJJjfsHSs1+UtRNcT40UDUnn\nMstz85/1XNhNJTK15c8OrFgOgSP/99n1jZ48LIfhKIk7pjm02ZiVUQauly/fuh5sGK9ZxPfn5Ee2\n82u9jGUovcbnLx6QJdZS5RyaPdzOrzVvyWPmK28EKpv43FA+FQCQ9+Re83spikq9/OkL4f9Wx7z6\nkvUvymPWlo4QJMsf/GKc5ixrsTX/LFNdFnYtXKDAL1J58dOrRd+a/3Fp5fKMc0Rl27PSMjN/ubiD\niVqoM0jclNTkxivsDJTJud2d96pi3ymrGlkDSysNnw2/qKZfpah4kfH+RKzjquo0/VF5reLpwwMB\nmh8ATcHkB0BTcm72m431/DqtlC6a9RZmyssWfJ/d2y/bJ7ngYaytNQwRl43IFUdEbmrl2ej9Ek6V\nTxxa1vum6liOiovB5uq1vcKo1rBcJFtKrHbvv5bXov2Z4OQxvEVjIiKrge8nw/T0mrmIfovxQlxK\nzcXYvHqBwcsRynvwyBQybywccevnZIzvf1XpXFKE7rKB5gdAU3Ku+QVqNRev7VSUiL4bu7l0cdEo\na7GqpMyRny8VcnyC0PZLJbaWswhPbJPbbiPt/LUmq1nrrPv62+JYkDZApv8zdd8u0R7uZC022sly\nND8Tfi4X1eoiIlGunIjIXsNWgV0kt90KzrEFOSeTbqACpWttyyvDffN+WVLe8YyXmh8cC0eODEDz\nA6ApmPwAaEr+mP0ZcE6cFu2aE3OPqQbn0OfeQ0REta/JjKfmyESYouUNTiXnxh3ukF/lrLdOmmzg\nq3T54W3i2Nq/5IUte2wsFHlcRZ00/D17pjW2tfA5y+Sj3cSnuMRa2VPLW8RctjzK46F75E0imqfx\nvOy+Tqt8bLKu3uTGYouF7xC/GE3VDw6mHfMfjEY/s0f01RxgD83UpcuBygHND4Cm5LXmjylbLu4U\n+zo7E+xd5s7KX/RVf8e/oM5uqeF0+VXzraOmEwuPuf7b94j2xL2biIio+J9eXWj4iih8WfqhO/6C\noLcVq5Lcztbawpu5wWAUFYp2rJHLXjuTsmSZv21pqHnya6pDlmpx1G3diQfZ2y8OzQ8ACIK81vyk\nFH00KsuJiMjyXlXEs9Crb8rOPHH4EZVpnFwUfGaspBLZlgp3u22p25xDv8JOXTXfCVMaIrLSKwOZ\nFeXp7Zgcl+rhClCxIJx9MrEUpx3FOajsaA8RzV3nCgJofgA0BZMfAE3Jb7M/C67iz53X5NDcjzVx\nYovkKmlm1r/MCU9yIZVxl1yQLTkVVqDqMvE8AN1SGXRrlqc/WkaFWcZLoM74uOhzp6cXGr6yc4Xy\nqQCAvOeW0/yuvwiYyWc6LP/sZWIUSWcWtWR1VFgVnDer/zfWExFR+WXp0R9lKWofP/7g1GdLRV/7\nfwwwjddyUdJ+pdp4+48Oyb3SWEszNyIsYuun/XI7uRQ9KVGG9lg4DmvQ/ABoCiY/AJqS12a/q+zz\nT3XUEBFR4YiX6OOV19PGZ0v+EQV+OGmUpr6f2ffKF7aKvsk2XsSqP8Dmbfk/hus/rxJbt1a0T/4u\nm9TFV/i65MTUV0z86S1szltTcsfcOMgeiXP29EM2930TP/EhuQBqF7IeLnn6cPob3GCTsQg5QvlU\nAEDek9ean0bkdkfRcxfmHjOlR1astTkqibISZRIPH9vzSV/99RwuoCk4126IdseXenIoCeOnhCMi\nir2QnigjNC++LPiZfYueO7LwoJBSd6lA8wOgKZj8AGiK4Ua4l+lcbY/uZABoitl4bknPDND8AGhK\npAt++1bvjPJ0C9LVz9uED+x+IMeSMM+9+pxo72u+PYeSSLr6uPxWvn1nH1kbTsmv5fKTHpkMJd++\ns6UCzQ+ApmDyA6ApmPwAaAomPwCagskPgKZg8gOgKTn37fdLKp/76gbR13Q7F94sK5TRcVWFnLe/\nuXiEiIiuJGRu9ZGHuNqKe1wWpHyn2DcGRfudRueZ2zfJ9tB4lpFLwy/b7MtjlsgCpqZX9NEtkWmx\njASPS13pzfBhwfmM+0lDiIiMao4unN5YL/oStZxqrWiEYx6Kj8lEIvbgzcDkUFHrORARmaUygYi4\nVlNK3n6veo6KUVCY1rdczGJOC+Zu4YQqyRr5/Uw28XVJVsrvomCC/d9qX+NKSnPuZZToBgAECSY/\nAJqSc7PfvnadiIiaDraJvt4SNh07H5PFJEf6OTRzaIxNsmufl6b1f33yCSIieqxj3YrlMdWEIBtZ\npr69nEikrE8mVfBNWScmTbLiF7loyFSrNIfLAjD7TS9kue9BzsZb1S1Dh0uuePndlBiN6dYqIiIa\n/CQXLmn6RrDhvn4Y7Kn/IgujFI6wHmn+hXxUKrvCj2oz1fyd9f2mrEdfd5zDWq2XX1uxPKZXCJSI\n6PTv8/9uWHw9is7IrLzF17kvUSu/M+MODolu/ZrMZ+yemRc+/k5kauACoDN/zp9/5arMNl1+kK9H\n6TUl0UgVy2R+Y5iIiMa/dbc4VvJMOKXVoPkB0JSca34ftXBk54ucN13NXT6fyksyScP/HfDLGV9b\nsRxGXMknP8ELQ83PeqnDEtkXAFNekoahzfKylqVnG1s2fsbd5u/yIpk9Ki2iTAmefB1T0sjaw0/1\nNf+975RUXz8REXX+nixu6S9GZkpo4ucyLiuVZaftItY76cW0lo9zSS5sbvrP3j3jpb5yRrNbXqOf\nuIOIiG7uktbAqjMrl8kvIRf7GFuS7TQi5Z1Iz8brVwroqeYCptZ6eawkbXQwQPMDoCmY/ABoSt6Y\n/Sq+ua/ul/d8lAtyJJo9s1L52Ur+DS/0VQRg9s/BZtNR7Jv39mUd7ryPw18bjoZTXimTye7v+TuT\nk2l9JVf5ccVWa88HuGesnjMbpvcoNdYmv7TW73Ml3KCzHjrq/zqP+T4ARESu99xR0RNOxuVMJr7l\n+xtMy3PaOzYSEVGqjBcl1/w/+V2HlQEHmh8ATclLze/jnDgt2omvcBKHgirWqmff/31x7GOb9xER\n0cyPlYWtLBogCJL338XyTErdVTDgeWcNXBd9bhNvWxpT4VgDE3tvIyKiRK38HZ8tZe2++kX2Xgsn\n63tm7HvvEO3zn+Tby6pijVuoOK25ZWEtY81leu8O0b78Kd7Oq6mR2njyEF+3ysfllqOvaYPw9MvE\n8N4OIiJq+JL0eNxdzbUV/vkaW7vGk3KLUqTaC9jTD5ofAE3Ja82v0vkQ/zL7W0n7SKaXOvftNj72\nDemo0fG5o4HLIAo4ElHsNOend+NSO8w0s+VhnT0v+pLv41/y0jf7A5eHiKjsZyf5VclPb3oFS5Pt\n7BSU+JTcYit7yqveE5K/eMHBk6K96TDrFrOaHW+mO5vEscuf4Paabw6IPieEUtQlB7pFe/NrvOno\n1FWJPvse1qpn/1qmB9v0O1y007XDsZmqnub935kfy4Ku/1K7hYiIEndzlaPCR+T2Zeyz/D2mrga7\npgXND4CmYPIDoCm3jNmfrRRWwwE2Ycc/mYhKHKIZz+tvRprbRWO89aVKOr6GL3Hpm+GIkWkryeln\nU9ryXu3PSD9xw68DP5O+7RWIPBlMd8cLoY31yUef+Dr2ZDPWrZEDT50LXp4JZTvSbw/KMN76U/xq\n/rtdom/6A7yIWvTiicDlISJy/FBxNWTc28atPH+JiIgubJePak27+XG2+Ecw+wEAAXDLaP75xNa3\nifZ4K/+GGYcqFxgdDb4TkOUtuBERVVyOvninj1nGfuXFg0rEmq9tIigEmY2pJj6/MbLyqMeVYFh8\n79hF8noUDkdXYn0+vgNQqkreN6UX+RoFvfwIzQ+ApmDyA6Apt5zZb9VyYo3ej8q66pOdvHi1+Xfk\ngpFN0eBkyEXX/2kZk9D8XDj7+9kQOf86ONlGvFea1lF6+80ndZ9cVKu8wJKkrt3IlThMRxsREVmz\n0oPeOs+Pb1FeK9NbiL3x65zwpPiy8ljWfTmcc4byqQCAvCevNb9VVyfap/6EI/fKGnhra+Zt+Uu9\n6Vu8lRS2P/9imX1v/nvevmr6xbDsVLYCw8DXGOpWqLmeNf5UA/vPFz1/RL4hwoU+q5Mj1RLreBFr\ncJtMZdXyV+zl5jgh22im/H/dWb5GsQZ5X116gBdnW/fL7T9nLH37NFCRvKjLxAe3iL7JBp6Kw1v4\nvu7831fEsVQinC1saH4ANCWvNb99Qz4PdnzJ86V/L/v0F/TLZ+nUxZ5I5LHqakV79G5OGjlbIn8/\n6w/y878xtrQ49yAwvRRdI7/SJvoqT3LKKKHxo9T2yjbnyO18veKDbP00f+uYOOa8w5oIy5anXmr5\nc19mhyJT2X3d8D2+j+wr0a3N+GsyqiVkej5XHX/OsQipwcG09wUNND8AmoLJD4CmGK4bVpKgdJyr\n7dGdDABNMRvPLek5D5ofAE2JdMHvA1/8rShPtyA//9v/Q0RE+1bvXGRkNHT1y+T++SZTvsnzwS98\nIceSMC8/+qho37/13hxKInn+7ZeWNR6aHwBNweQHQFMw+QHQFEx+ADQFkx8ATcHkB0BTMPkB0JTc\nB/Z4Pn8lP5OZUpdbvCG2bi0REU3c1hCYWHmFydUkzW1c5ml4m8xV6JfpcgqU4V7gSnyQL27ty0p4\n6BVZDCJszNJSIlLCjW0Zvmu1cmBU6sKlFZ8nUSNv4+vv887lsJNb5Ul5LH6T03OoBUOdHZzopOSl\nMtEXRN5Fo4TLbaUGvIy7KwhdtqqrVyxPJqD5AdCUnGv+4gEOf82k7c2dMtmB64WlWiOcaEEN4xXt\nd6vm97SGXcaJOwxFiTQ/wanLnGGZQMTXtFYDFwk99d/WiWPt/yEczT/+bzjPfPUB+fmul6/fMDl5\nhRqiPbybS1CVB6D5a/9Flvyq+v7in5cpx7NfeJWIiAKIgM6m8Y0YT7tstSiiAJofAE3JueZPrObn\nQqtaJne04/yMm/EXuIXHxyNK4JFPGK+wf3vlUVng0c6SFMO+xqXCzeSGcAUjovInuQDoUnVZsoK/\n3PIAzj25WSbsKGxkve56+fhnK7Lf4qVvXeWGkuos+cBdC4xeOlZlBcvhVUYyiuPyYJYCoLZiwYUN\nND8AmoLJD4Cm5Nzs97GLrbQ+w5G5P0peOcvjMmToTX1oV1rfuxk1c7BfeNPcsFb0OcW873d9N5ue\nHX8n88FFVc8gE+aOzaJdf4DN26Bz489UF875e7hd7oFO3sVZcO1Jedt3PMfboH49iKDx8/Wppr5R\nxd9L6pKSj39eUh1/+5qIyB0ZC0U2aH4ANCVvNH8m4l3HRdvOsi0Se4Gzwsb2bBd903XxhYa/q/Ct\ngOvvUTSX95M+vJOvmZmSWYfrenmByxkPv0CmsWsrnz/hlTMfUs7p5dM3quTGWxh1Fxq/eWBJ405/\nbaNo1x9i2eIj4dhJS8k2rY4xCth6MMtKA5UDmh8ATcHkB0BT8trsn7lP5o9zvT1/K8kLJ4XXpAlp\nn/IKdL76tnzzA3otAtY8djC9z0szN/Zv94i+qQ/yolt8/6uhy+Qe4+/DN57NuHwU8z06Jz9+t+gr\n/eHhwGVwPnC7aJtJliTWc130pQb4Maj9YXnu0c/I6xUGfsyDUSCnn5Pg65GpDJw761X0IJj9AIAA\nyGvNbxel/zY5hZ7nVrmMdIqf4leroixtPCCq+IdDon3jy1xMNBfLoZniNxI18jsOVq8xs2XKLe61\nkzVrlBHcjitRpdVPcztx79YQJFK2/xT84p3kvaqefv52btBA8wOgKZj8AGhKXpv92TDs9Mpf9qa1\nGUZqjJcEZOITMlCl/ggvlOZL3bTKCzOLDwqRiUaeAkXKQpu1vi1H0hAZ/l6+YvZbTRyq7o4G65sB\nzQ+Aptx6mt9TWUVKCKZffz5ZE87CyC2Hp/GtavaeG9kof+PLnz6bE5HmM/FJ3uIr++HRyM89XS3j\nSEpupnvxJdeuilIcIiIy4nzvZkqz5k4mQjknND8AmpLfml95MI0/u7BTSvIjd/JwM4D8S7cantUz\ns1c6NSVq+Wud8RJmrH30nDhmz+buGXv6wd2iXflSNxER2StIbLmkc1ZJLX/dO33dUXmfFD/D95W1\nuV30JUvSI0yDRGh5z8EoE7GWZtF2J6dCkQOaHwBNweQHQFPy2uwvHJ0V7VhbKxERpep5EStZK33U\nXEtDc9/HSwJR2CUXzkq8RBDuTd4ussfCSQaxGL4Pu1nLC2glL8nYC3tyMhIZqk7K7bHKJ95OO+6H\nHU81lkQwjbyaAAADhUlEQVQiDxGRPZSep8/34rNWsedqWKa+CjQ/AJpiuG507h7O1fZ88S0B4F2L\n2XhuSaYwND8AmoLJD4CmRLrg95G1uxcfFAE/6eG93X2rdy4yMhq6+l/PtQhAQ6D5AdAUTH4ANAWT\nHwBNweQHQFMw+QHQFEx+ADQl9779Xhhupnzly8UvaxQUVgUXVJze08Gv1fJy+eGydoF0prJm2YGx\n7qD03XbeOh2oTAAEBTQ/AJqSc83vJyccuatJ9MWHuMDkZKMsr1w0ypV6klX8ezW6Xv5utf7x0oox\nLhc/7qHnV/kybXxSRlrFBjztPiMjD/1IrML9xaIv8YFQRANgxUDzA6ApmPwAaErOzX67j/OYVao5\nyW3O61aQSC/v5BvUFXduDls0UcO+4/ff5L+VBBSpLO87P9Qi2qvpWiiyAbBSoPkB0JSca34fZyJ7\nWid3XtZZ4+Aboh3z0lbZvQPBC0ZzNb44p1fVxR2VKbJu/FonEREVPRuKGAAECjQ/AJqCyQ+ApuSN\n2Z8J++4tom142f/8eutqttrUxZ609wbt7Tefgb3sl+Baq0Xf6BZeqCzulUUfrGrOxqrWWwcgH4Dm\nB0BT8lrzF5y4oPzBohZUsr/9+K8rKcE893q/9FIUNHzvOBEROdNyO7Lee7U2rhN9p/+Y4wLaHz4c\nmWwALAVofgA0Ja81v6M6+fhViscmiIio5Eq/OOTu2hShVIyq8edjd18U7aLGsijEAWDZQPMDoCmY\n/ABoSl6b/dkwCqTo7mH2vTfjSvFO24lcJh+rqlL+8VZ5zuQAIBvQ/ABoyq2n+b20X85Uegljo1A6\n9rgZIgKjovt3pXPSxr/gNF52roQBYAGg+QHQFEx+ADQlv81+U2bGnZ/d1ywtle3aVUREZPdHlzjD\nqq0hIqKJ924QfSMb+HK27ZchwPbNochkAmA5QPMDoCl5rflNZcvs+kdZw1peTo/an0q//yg1vjjn\n4E0iIioaahV9LadZy9tnuiOXB4DlAs0PgKZg8gOgKYZfmCIKnKvt0Z0MAE0xG88Zi4+C5gdAWyLV\n/ACA/AGaHwBNweQHQFMw+QHQFEx+ADQFkx8ATcHkB0BTMPkB0BRMfgA0BZMfAE3B5AdAUzD5AdAU\nTH4ANAWTHwBNweQHQFMw+QHQFEx+ADQFkx8ATcHkB0BTMPkB0BRMfgA0BZMfAE3B5AdAUzD5AdCU\n/w99eh37iWlJHwAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f0747eafc90>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "r1 = tensor.to_numpy(r['relu1'])[0]\n",
-    "vis_square(r1)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADrhJREFUeJzt3WlsHsUdx/F9DseOYydxnMN2Qsj1OIFwJA0JSYqIKkET\naKsWWrWiqOIqVUpVRBFVD6RW6iFatUK8KUUF2qJCqFAPoSKIK1SUCkJIyAV1aOwcDrEdxzmcA9ux\n/Ty7fVnN/jc8m83uPM+T//fzbkbjnSH2j9GMdmdSnuc5APRJl3oAAEqD8ANKEX5AKcIPKEX4AaUI\nP6AU4QeUIvyAUoQfUCprs7O19XeL1wlTVfaG8OqeTeZ4WpZY6ztIW+8uUXfLvJVG2R0dszUcp617\nu6grt3+jchuP45R2TEHjSTd1psL8LDM/oBThB5Qi/IBShB9QyuqGnzs4GMtzhr+wQtSNf3mbWVGh\nnyr7P7FO11SLNnt/u8gor1nYKdrkavuN8lufyYk2+cPdUYaIIjJXyH/rk8sajfLEA8OiTWrz7sTG\nFISZH1CK8ANKEX5AKatr/qj2/2qVUa4/KN9hGF+ha/xi3KEhUZe7a4dR7g34uV5nvFHe9/gs0WbB\nw/Gs+VNZ+WeUrq83yoWBAdHGXbPU/JlNOyP13/3XxUa5fdULos01W+8wyvUbJok2dS9tidS/X+qM\n3Nua9Lzcl/FL19Ya5aDffZyY+QGlCD+gFOEHlCL8gFIVseE3/7tvF23T8fRyo9x6/7bztIxfpqFB\n1AVtcNnU8ZT5IlTr+ng2s4J4+byoC/PfX9V31vyZiP3P+mK7UV7ryK/smp0PIj79wuV75BZsaqm5\nKbn3wRrRpvUe+VVlkpj5AaUIP6AU4QeUqog1f99Dq43y5bcdEG1a19hb4/ulJtTKygTX/KmqcUbZ\nGxsVbVrXb02s/zBO3mu+mDXl93LfptB50NZwhCMPrxZ1zY9vTqw/b6e5L9F6T2JdhcbMDyhF+AGl\nCD+gFOEHlKqIDb+mJ8yNmJEnZJtM4xSjXDhxMskhGfLdPaIuM2O6qCsc7Rd1UQRt8JWboA0+v66f\nmi8izXm0+M/EJcnNvVDSGVF18m7z3yPMv+FFDSHRpwMoW4QfUIrwA0pVxJo/jP7bFxrlxqftrR+D\nxLW+v1Rk58wWdUmt8YNOFup7wFxPt7x+XLQp7OlIZDyO4ziZyebJQYe+uVi0mfWY3X0IZn5AKcIP\nKEX4AaUIP6BUyn89VJLcvtyleb42UEbSTZ3ybPugdkkPBEB5IvyAUoQfUMrqSz5rW+SpqkEfOCSl\nrds8HTVwPBa19e4SdaUcU7mNx3HkmMptPI5Tfr+zsJj5AaUIP6AU4QeUIvyAUqX/qs+NeklTEalQ\n7zlUpEzrfKM81jxRtIl6131SMtOmibrCsWOxPDu95EqjfOTGyaLN1PfPGeWqk8Oijbs7uSu9spfN\nMspefcBx7wXXLO7dl9h4HIeZH1CL8ANKEX5AqZKv+Yduu94oNzx0SLS5fcYOo/ziopbExjP4JXM8\nE/7yTmJ9RVXo2G+U08kdQBOboPX9mvfMdfema8ZHera7a49RnhHivRe3eJNY5Q93W+6xOGZ+QCnC\nDyhF+AGlCD+gVMk3/Ope222UR/5+TrR50Sm+wZdtbjLK+b6jkcbj3+BLT5gg2riDg5GenZTj31gl\n6qb+rrRHl4fx7O7VRnmBk9yLSanqaqPsjYyINkO3Xy/qav9mb8P3xNfN32PjM1zXBSABhB9QivAD\nSpV8ze/61l7e6mtFm4Efmi+DDG6ZKtpc9jPfVUcxfdgTtL5P19QY5dn/ln11rZAfjiQlaH2fWmZe\nB+Vtb7c1nNBq34v2Uk8UYo0f8Pdhc30fJOk1vh8zP6AU4QeUIvyAUoQfUKrkG35+qbffE3VTPmve\n8jXFCfiMzeLJPe4580WkrhXnaVhCp66oN8qTtp+nYQkNXi1f6LIlMz3gZKGj/SUYSekw8wNKEX5A\nKcIPKEX4AaXKbsPP8byiTfxHVzuO4wwubDTKNa9si21I5S47U371OOn5LSUYycdYeY2oWvSdLqOc\n0CHugYI290bXLRd14zZeun9HzPyAUoQfUIrwA0qlvBBr7Li4fTl7nQFKpZs6Q73xxswPKEX4AaUI\nP6AU4QeUsvqSz9qWJTa7E9p6zUvcym08jlPaMZXbeByH31kxQeMJi5kfUIrwA0oRfkCp8vuwB4Z0\nba2oc4eGSjCS//NffTV8szxuva7dd13a6Jhok+/pjXVc5SyzeKFRLrTvFW1SWTOOXj6f6JiY+QGl\nCD+gFOEHlCL8gFIVueH30cZ5oq5u3YESjOT8hj8vz/Me//LWC39QJhNL/5H6Pg//vXc1r8hnJ7tV\n9fG8T8qXblJvmS/DdPxhmWjTeu+OgIdd+Ieo2XlzRF0+YINPdJXwBp8fMz+gFOEHlCL8gFIVseZ3\nbzDXcHXron/MYEv3bfIs2tzLF/4c9+xZUZeury/aJs41fqXxr+8dx3E++vJKo9x6T3KnG+cPdIm6\n7KyZRrnQPEW08ba9n9SQAjHzA0oRfkApwg8oRfgBpSpiwy/9prmB8+GPVos2s3+y2dZwBG+1/Kot\nd/f2xPrzb/BlcvKlp6GceX1Z9at2r51y1yw1yulNO63271f3UvENvlNfWyXqJv/p7Vj69wbNLzG9\nbT2xPPdiMPMDShF+QCnCDyhVEWt+v1Ku74OkNu8WdZkFc0VdYd/BRPovdMqPmkaXTTfK1aJFskq9\nxo8irvV9kMLAQNE2vY+Ye1ktv07275yZH1CK8ANKEX5AKcIPKFURG35jn77OKFf9890SjSS8Dx6Z\nJupa1yez4Ze+apGoq/9zcl+tVaLuH5ibabMes7xpnDZPZMrUTRBNkt7g82PmB5Qi/IBShB9QKuVF\nOJ00KrcvZ68zQKl0U2cqVLukBwKgPBF+QCnCDyhF+AGlrL7ks7ZFXqNkU1uveSJQuY3HcUo7pqDx\nrJt9naizea2U+J3NXCobWdy0Dvo3unXRjUbZHT5nazjOxkPRj2hn5geUIvyAUoQfUIrwA0qV/Ku+\ndE2NUT7xFbmhkx0xN3Rqj46KNplNvqO0XHlXXhQjtywXdbVdp8yKMbkBltSRXY7jOKnlVxvl0QZ5\nSFdcXz4mubmXmTzJKBdOnS7+QwGbe6nrrjKbvPufixrXhfIK5t9aespk2eajQaPsDg6KNrYx8wNK\nEX5AKcIPKFXyNb97znwhouE5eXyyf41r8x7z6tfkNVfx7CZE5//vr7Lcf8/3zVNxZv4i2gk0odb4\nIWR6jhtle68gBSsc7S/aJrN4ofy59r1JDOe8mPkBpQg/oBThB5Qi/IBSJd/wCyPMBt/pO1ca5Ukv\nJHd0dXbu5Ub51PJm0SbMffBxybTOF3XDcxuMcnX/sGjj7WyP1F/UDb6k5I/0GeXsrJmyTXePreGI\nF9ccR25s297cC8LMDyhF+AGlCD+gVEWs+bPz5hjl3nUtos30J+2tQwvdR4xy3cFD1voOUujYL+rG\ndZjl/b9cJdrM25nUiErL5vo+iH997zjyyrl//fEZ0WbVI+uN8sQNye4bMfMDShF+QCnCDyhF+AGl\nKmLDL3+gyyhPf7IrsJ0t3pg8Sajczfue/FpSs8y0aaKucOxYYv35T1YKOqJ9omPvxTDHYeYH1CL8\ngFKEH1CqItb8YWSubDXKhT0d52kJG/ofME/7sfkSVhhJru8rBTM/oBThB5Qi/IBShB9QKuVZvNvc\n7cvZ6wxQKt3UmQrVLumBAChPhB9QivADSll9ySfoYwab2np3GeVyG4/jlHZM5TYex6mM39mti240\nyoUzZ2wNJ3A8YTHzA0oRfkApwg8oRfgBpS6Zr/pgT+FTnzDKPWvk9VTnmseM8oINedEmval0Z4en\nli0Wdd72aNeXuSMjFzscx3EcJ9s0wyh7Q/KKtTg3E5n5AaUIP6AU4QeUumTW/IcfNU+Ouezn0U6O\nGbtpmVGe8mN5FdeejeapQUF9ZefMFnX5rg8jjancZN7YYZRnvxHtOf3f8p3285t4Tvs597kVom7C\nvgGjXAhY36eqxok6myc15/uOGuUDQVesxXgKMzM/oBThB5Qi/IBShB9Q6pLZ8Jvz5AdGuRDxOVWv\nbzfKZ1+XbS5zjhd9zqWyuRfVvidWGuXUdHln/fyvJnOcd80/too6/9/DsfVyM23aU+V1pVnumaOi\nLurfdRBmfkApwg8oRfgBpSpyzW/zBZqgvgavMD/AqH5tm2iTWn61qPO2vR/LmPzXSw/cPF+0mbjB\n7nXPfgseKt5/trnJKOeP9CU1HMdbfa1RDlrfZxYvFHWF9r2JjamY/35bXiOee/BAbM9n5geUIvyA\nUoQfUIrwA0pV5IZf0ObeifvMlzYan43nhY2gvqpDbC66VfL/q6HuUArBf7f8xA2Vedd8kht8fqnN\nu4u28aoyFkYSXu7BdxJ9PjM/oBThB5Qi/IBShB9QqiI3/ILEtcEXxpk7zC/W+m6Sx1K33veureGU\npXR9vVF2z54t0UiCDdwlv+preK68vupLGjM/oBThB5Qi/IBSKc/zrHXm9uXsdQYolW7qDPU+GTM/\noBThB5Qi/IBShB9QyupLPmtbltjsTmjr3WWUy208gE3M/IBShB9QivADSlXkhz3ZuZeLuvzBQyUY\nCVC5mPkBpQg/oBThB5Qi/IBSZbfhl66tFXXu0JBRTnJzz3+nW5gjn4OMrlsu6sZtlHf6AaXCzA8o\nRfgBpQg/oFTZrfn96/sgJ+6XJ682Ph3Pyav+Nb7/DnnHCXfNVO3uw/Lnog8LiB0zP6AU4QeUIvyA\nUoQfUKrsNvzCCNrcO32neYXWpBe2xNKXNzxctI17gzwRKP8mp/SgvDHzA0oRfkApwg8oVZFr/iBu\nNtQNRRescOp0Is8FSo2ZH1CK8ANKEX5AKcIPKFWRG36ZqY2iruG5eL7qiyId8EJP0BgLx0/YGA4Q\nCjM/oBThB5Qi/IBSKc/zrHXm9uXsdQYolW7qDPXGGzM/oBThB5Qi/IBShB9QyuqGH4DywcwPKEX4\nAaUIP6AU4QeUIvyAUoQfUIrwA0oRfkApwg8oRfgBpQg/oBThB5Qi/IBShB9QivADShF+QCnCDyhF\n+AGlCD+gFOEHlCL8gFKEH1CK8ANK/Q/zQX1soXEu2gAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f07356cc790>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "r2 = tensor.to_numpy(r['relu2'])[0]\n",
-    "vis_square(r2)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(32, 288)\n"
-     ]
-    }
-   ],
-   "source": [
-    "p=net.param_values()[2]\n",
-    "print p.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAACwNJREFUeJzt3etv1vUdxvHvfZeWHjkptIUB0lJAJohyUvFQNVuGWZhh\nLjjnBupQqqDL3OKcPlh4sGxL5kaioImniUg2N6fZnBPnsFMHBhy2Ujy05ShQKgXKobS0ve/9B7u+\nD375ueR6vx5fuX8f7pur3yeffH+ZfD4fAPjJftEDAPhiUH7AFOUHTFF+wBTlB0xRfsAU5QdMUX7A\nFOUHTA1J82Hz/v6gXCfsbDtPfk5Fe4HMND2wVmamNC6VmeKSczIzrLhPZkII4Z2ZL8nMvYfmysxf\nd82QmWxnkcy03fK4zNS+eZvMzJ60X2a2fTJJZvYsfFJmpjzXIDNX1X8oM1sPTpSZEEJouXyDzJzO\n9crMxZv13CMbi2Vm++p1MpOtas3IUODkB2xRfsAU5QdMUX7AFOUHTFF+wBTlB0xRfsBUqks+Y8u7\nZaYz6CWffEJTD3TppYq+7lKZOTQiF/fAmTryyZx+mZmw6ZjM7OsZEzORVHBAf0dd1WUyUz6qJ4lx\nQm68XqhpX32hzGSXn0linBBCCPPfu0NmcqcLZebE1CSmicfJD5ii/IApyg+YovyAKcoPmKL8gCnK\nD5ii/ICpVJd89q+fLDMVpfoSkomLdycxTiipPi0z+SPDZWZ07edJjBNCCKHzlWkyc6p5hMxc+ESn\nftgiHXnm5sdkZvlTK2WmvyLinZDzdSRk9Od0LNOLQANt+ncNIUTNlPtQf1ZBmZ57WDL/raNx8gOm\nKD9givIDpig/YIryA6YoP2CK8gOmKD9gKpPPRyxfJCTXUZfewwBTvK4LwP9E+QFTlB8wRfkBU5Qf\nMEX5AVOUHzBF+QFTqd7kU/PHu2Qme36fzuwrkZlPl66Tmdo/rJCZ3HD9+qySCj1zCCHsuuJ5mdna\nOygz33vvdpnJ5/WeR2v9szLzVHeVzHzY8yWZ2XygTmaa5m2UmQ/69He9+G/3ykz57gKZCSGE5vvX\nykzNi/r/Ue1FB2XmcPcwmdl52QaZicXJD5ii/IApyg+YovyAKcoPmKL8gCnKD5ii/ICpVJd8Cs7q\nvzXFJedkZqC3NIlxwh3XbZaZjc9fLzM9M3NJjBNCCOH+n9wjMyMiXml2fHoS04Tw6GOLZebELP2b\nFVXoTIwfLr9bZqY/vF9mWkrGJTFOCCGErN4DC+27xspMQU/EWXxZxECROPkBU5QfMEX5AVOUHzBF\n+QFTlB8wRfkBU5QfMJXqks+Y9/XbuspeLpSZ3Tcls1Tzp8euk5meK8/KTMEQfftOrOI7D8nMod2V\nMlPZGHFTza06Url4n8ycaNcLLEXbyvXDrtKR41OHysxnWy6QmdKTUW+0CuFrEZlqfbvQhBd01Q5c\nHzlTQjj5AVOUHzBF+QFTlB8wRfkBU5QfMEX5AVOUHzBF+QFTmXxeb90lJddRl97DAFPZqtaoVUFO\nfsAU5QdMUX7AFOUHTFF+wBTlB0xRfsAU5QdMpXqN14IfrJCZY986IzPFjRUys+Ona2Vm0mvfl5kh\nR/W1YuX74q5f2vGwnmlrr74S7LsvrpSZohN6ppZVep5pTzbITCann9U7Xr+rb8/CJ2Vm0it3ykyM\nwhMR15yFED5duk5mLtl2s8zMq9bXoTW+donMfLxc/2axOPkBU5QfMEX5AVOUHzBF+QFTlB8wRfkB\nU5QfMJXqks/ha/RFPvnjxTLTf0VPEuOEuqcHZKb9m/orGihN7h1rK3+uF3gGZ+jv8Yobm5MYJwxv\n05nT3+iWmZIdwxOYJoRh1adkpq9ppMz01+h3MMY61TZCZjYdjXhXYaX+/5gkTn7AFOUHTFF+wBTl\nB0xRfsAU5QdMUX7AFOUHTKW65FN4TP+tKbpIL3EMeUMvVYR6Hdlzj86Ub9Mzj7rhoP6gSFc2bJOZ\nv7w9R2a2/+5i/bCH35WRuav+IzOvNs+QmeF6DyjKmZ6hMjN63hGZOdo0JolxQgghZPsilrxK9AJP\n5vOSBKaJx8kPmKL8gCnKD5ii/IApyg+YovyAKcoPmKL8gKlMPq9vhUlKrqMuvYcBprJVrVFXS3Hy\nA6YoP2CK8gOmKD9givIDpig/YIryA6YoP2Aq1Zt8bt1bLzMrq96UmYZfr5KZHQ+tlZkpjUtlpr9b\n3xzzwNWvykwIIawYoW/8qXtrmczkD+lXmg3t0n/XW1bp7+iGT26QmQvKj8nM5r11MvPRgvUyE6N+\n540y07FlbNRnfbxcf0e37LlWZra2TZKZa6a2yswzE96WmVic/IApyg+YovyAKcoPmKL8gCnKD5ii\n/IApyg+YSnXJp2XDdJlZ/fsCmTn5UC6JcaKMu+CozPzqH1+P+qwVNz0hM8M361c2dc3Vr36qnbs3\nZiRp8MHzZeb1JRNkZlRzxOUyC3Tky4/eLTMDJfrCqNHzO/TDIm39YIrMZM/qf3/1zITeaRaJkx8w\nRfkBU5QfMEX5AVOUHzBF+QFTlB8wRfkBU6ku+Sy6s1FmNl41W39QQvsZZaV9+lHHhsnMy4vWRD5R\n38DTNXtQZgqP6p+t642JepyIsVu/o5eO8kV65vpV2/XDIvROPyszBQV6CexIU2XcA2fqyLxZ+gae\nptenycw7nbX6YZXNOhOJkx8wRfkBU5QfMEX5AVOUHzBF+QFTlB8wRfkBU5l8Xt96kpRcR116DwNM\nZataI65N4uQHbFF+wBTlB0xRfsAU5QdMUX7AFOUHTFF+wFSqN/l85du3yUzvqCKZ6Rmt/2a9/7N1\nMnPp6gaZORlxuUp2/BkdCiF8evVzMnPXZ5fLTNPRcTJzWeVemflttb5dZ/ILK2SmYo/+PfrLZSTs\nvG+tzNS8qOfJntM7LqWHo/ZgQvOP9EyT31omM9fW6tt+Wn4zQ2b+/cjjMhOLkx8wRfkBU5QfMEX5\nAVOUHzBF+QFTlB8wRfkBU6ku+bQtKdShQv2qpdnT2hOYJoRTNTpz0bzdMtPUEvFqrEi7fqHfD9V/\nvv6bvemr+tVgIWLJJzOol2FOj9cXNA2W6t81Rr5YvxqsamqXzAw0jUlinBBCCL+c85LMrNlzvcxM\nvm9XEuNE4+QHTFF+wBTlB0xRfsAU5QdMUX7AFOUHTFF+wFSqSz6ZsgGZKdupl1PGzDqdxDhhsFgv\nnnSf0/Nk+5L7G9p7+3GZWV7zrsw80qyXSmKU79NLPrmh+nN6qpL5jmpqj8hMWeE5mWmfkNxvtubH\nN8vMgYX6cw6WjdChif+KmCgOJz9givIDpig/YIryA6YoP2CK8gOmKD9givIDpig/YCqTz+srmJKS\n66hL72GAqWxVa9SLCDn5AVOUHzBF+QFTlB8wRfkBU5QfMEX5AVOUHzCV6jVeU59ukJlMxCvdsv16\nh2FXw1qZuXT7Epk5cbxMZoaW9stMCCF8tGB9VA5IAyc/YIryA6YoP2CK8gOmKD9givIDpig/YIry\nA6ZSXfIpmHZKZmZVH5SZ/adGJjFOGPzneTIz6oy+fOjkdYNJjAOkipMfMEX5AVOUHzBF+QFTlB8w\nRfkBU5QfMEX5AVOpLvkM+3O5zGypnywzY8d3JTFOOHlxn8yUfzRUZgY6S5IYB0gVJz9givIDpig/\nYIryA6YoP2CK8gOmKD9givIDpjL5vL6pJim5jrr0HgaYyla16vfZBU5+wBblB0xRfsAU5QdMUX7A\nFOUHTFF+wBTlB0yluuQD4P8HJz9givIDpig/YIryA6YoP2CK8gOmKD9givIDpig/YIryA6YoP2CK\n8gOmKD9givIDpig/YIryA6YoP2CK8gOmKD9givIDpig/YIryA6YoP2Dqv/x3CAzTt8/UAAAAAElF\nTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f07355967d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "vis_square(tensor.to_numpy(p)[0].reshape(32, 3,3))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  },
-  "widgets": {
-   "state": {
-    "0678ea185e8c48a9ab20bdb956e6b18a": {
-     "views": [
-      {
-       "cell_index": 13
-      }
-     ]
-    },
-    "1373fb7b9e754b639a3f27ebf0372d70": {
-     "views": [
-      {
-       "cell_index": 13
-      }
-     ]
-    },
-    "49561f2ab00b457f82357766d967ca8b": {
-     "views": [
-      {
-       "cell_index": 13
-      }
-     ]
-    }
-   },
-   "version": "1.2.0"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/core.ipynb b/doc/en/docs/notebook/core.ipynb
deleted file mode 100644
index c16cc1f..0000000
--- a/doc/en/docs/notebook/core.ipynb
+++ /dev/null
@@ -1,1151 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "# SINGA Core Classes\n",
-    "\n",
-    "<img src=\"http://singa.apache.org/en/_static/images/singav1-sw.png\" width=\"500px\"/>"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "# Device\n",
-    "\n",
-    "A device instance represents a hardware device with multiple execution units, e.g.,\n",
-    "* A GPU which has multile cuda streams\n",
-    "* A CPU which has multiple threads\n",
-    "\n",
-    "All data structures (variables) are allocated on a device instance. Consequently, all operations are executed on the resident device.\n",
-    "\n",
-    "## Create a device instance"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.singa_wrap.Device; proxy of <Swig Object of type 'std::shared_ptr< singa::Device > *' at 0x7f69a05ff330> >"
-      ]
-     },
-     "execution_count": 1,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "from singa import device\n",
-    "default_dev = device.get_default_device()\n",
-    "gpu = device.create_cuda_gpu()  # the first gpu device\n",
-    "gpu"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "** NOTE: currently we can only call the creating function once due to the cnmem restriction.**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "gpu = device.create_cuda_gpu_on(1)  # use the gpu device with the specified GPU ID\n",
-    "gpu_list1 = device.create_cuda_gpus(2)  # the first two gpu devices\n",
-    "gpu_list2 = device.create_cuda_gpus([0,2]) # create the gpu instances on the given GPU IDs\n",
-    "opencl_gpu = device.create_opencl_device()  # valid if SINGA is compiled with USE_OPENCL=ON"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "3"
-      ]
-     },
-     "execution_count": 2,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "device.get_num_gpus()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(0, 1, 2)"
-      ]
-     },
-     "execution_count": 3,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "device.get_gpu_ids()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Tensor\n",
-    "\n",
-    "A tensor instance represents a multi-dimensional array allocated on a device instance.\n",
-    "It provides linear algbra operations, like +, -, *, /, dot, pow ,etc\n",
-    "\n",
-    "NOTE: class memeber functions are inplace; global functions are out-of-place."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Create tensor instances"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(2, 3)"
-      ]
-     },
-     "execution_count": 4,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "from singa import tensor\n",
-    "import numpy as np\n",
-    "a = tensor.Tensor((2, 3))\n",
-    "a.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.singa_wrap.Device; proxy of <Swig Object of type 'std::shared_ptr< singa::Device > *' at 0x7f69a02f8a50> >"
-      ]
-     },
-     "execution_count": 5,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a.device"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "gb = tensor.Tensor((2, 3), gpu)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.singa_wrap.Device; proxy of <Swig Object of type 'std::shared_ptr< singa::Device > *' at 0x7f69a05ff330> >"
-      ]
-     },
-     "execution_count": 7,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "gb.device"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Initialize tensor values"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "a.set_value(1.2)\n",
-    "gb.gaussian(0, 0.1)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### To and from numpy"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 1.20000005,  1.20000005,  1.20000005],\n",
-       "       [ 1.20000005,  1.20000005,  1.20000005]], dtype=float32)"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.24042693, -0.21337385, -0.0969397 ],\n",
-       "       [-0.010797  , -0.07642138, -0.09220808]], dtype=float32)"
-      ]
-     },
-     "execution_count": 10,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.to_numpy(gb)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(2,)"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "c = tensor.from_numpy(np.array([1,2], dtype=np.float32))\n",
-    "c.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 3.,  4.], dtype=float32)"
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "c.copy_from_numpy(np.array([3,4], dtype=np.float32))\n",
-    "tensor.to_numpy(c)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Move tensor between devices"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.singa_wrap.Device; proxy of <Swig Object of type 'std::shared_ptr< singa::Device > *' at 0x7f69a05ff330> >"
-      ]
-     },
-     "execution_count": 13,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "gc = c.clone()\n",
-    "gc.to_device(gpu)\n",
-    "gc.device"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.singa_wrap.Device; proxy of <Swig Object of type 'std::shared_ptr< singa::Device > *' at 0x7f69a02f8a50> >"
-      ]
-     },
-     "execution_count": 14,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "b = gb.clone()\n",
-    "b.to_host()  # the same as b.to_device(default_dev)\n",
-    "b.device"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Operations\n",
-    "\n",
-    "**NOTE: tensors should be initialized if the operation would read the tensor values**"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Summary"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.12169448286294937"
-      ]
-     },
-     "execution_count": 15,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "gb.l1()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "0.4898979663848877"
-      ]
-     },
-     "execution_count": 16,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a.l2()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "False"
-      ]
-     },
-     "execution_count": 17,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "e = tensor.Tensor((2, 3))\n",
-    "e.is_empty()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "6L"
-      ]
-     },
-     "execution_count": 18,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "gb.size()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "24L"
-      ]
-     },
-     "execution_count": 19,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "gb.memsize()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "False"
-      ]
-     },
-     "execution_count": 20,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# note we can only support matrix multiplication for tranposed tensors; \n",
-    "# other operations on transposed tensor would result in errors\n",
-    "c.is_transpose()  "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "True"
-      ]
-     },
-     "execution_count": 21,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "et=e.T()\n",
-    "et.is_transpose()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(3L, 2L)"
-      ]
-     },
-     "execution_count": 22,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "et.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "2L"
-      ]
-     },
-     "execution_count": 23,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "et.ndim()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Member functions (in-place)\n",
-    "\n",
-    "These functions would change the content of the tensor"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 1.44042695,  0.98662621,  1.10306036],\n",
-       "       [ 1.18920302,  1.12357867,  1.10779202]], dtype=float32)"
-      ]
-     },
-     "execution_count": 24,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a += b\n",
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 1.20000005,  1.20000005,  1.20000005],\n",
-       "       [ 1.20000005,  1.20000005,  1.20000005]], dtype=float32)"
-      ]
-     },
-     "execution_count": 25,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a -= b\n",
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 2.4000001,  2.4000001,  2.4000001],\n",
-       "       [ 2.4000001,  2.4000001,  2.4000001]], dtype=float32)"
-      ]
-     },
-     "execution_count": 26,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a *= 2\n",
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.80000007,  0.80000007,  0.80000007],\n",
-       "       [ 0.80000007,  0.80000007,  0.80000007]], dtype=float32)"
-      ]
-     },
-     "execution_count": 27,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a /= 3\n",
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 0.62944734, -0.72904599,  0.81158388], dtype=float32)"
-      ]
-     },
-     "execution_count": 28,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "d = tensor.Tensor((3,))\n",
-    "d.uniform(-1,1)\n",
-    "tensor.to_numpy(d)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 1.42944741,  0.07095408,  1.61158395],\n",
-       "       [ 1.42944741,  0.07095408,  1.61158395]], dtype=float32)"
-      ]
-     },
-     "execution_count": 29,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "a.add_row(d)\n",
-    "tensor.to_numpy(a)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Global functions (out of place)\n",
-    "\n",
-    "These functions would not change the memory of the tensor, instead they return a new tensor\n",
-    "\n",
-    "**Unary functions**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 30,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 1., -1.,  1.], dtype=float32)"
-      ]
-     },
-     "execution_count": 30,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "h = tensor.sign(d)\n",
-    "tensor.to_numpy(h)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 0.62944734, -0.72904599,  0.81158388], dtype=float32)"
-      ]
-     },
-     "execution_count": 31,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.to_numpy(d)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 0.62944734,  0.72904599,  0.81158388], dtype=float32)"
-      ]
-     },
-     "execution_count": 32,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "h = tensor.abs(d)\n",
-    "tensor.to_numpy(h)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 0.62944734,  0.        ,  0.81158388], dtype=float32)"
-      ]
-     },
-     "execution_count": 33,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "h = tensor.relu(d)\n",
-    "tensor.to_numpy(h)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(3L,)"
-      ]
-     },
-     "execution_count": 34,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "g = tensor.sum(a, 0)\n",
-    "g.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "(2L,)"
-      ]
-     },
-     "execution_count": 35,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "g = tensor.sum(a, 1)\n",
-    "g.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([ 1.,  0.], dtype=float32)"
-      ]
-     },
-     "execution_count": 36,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.bernoulli(0.5, g)\n",
-    "tensor.to_numpy(g)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([-0.12226005, -0.05827543], dtype=float32)"
-      ]
-     },
-     "execution_count": 37,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "g.gaussian(0, 0.2)\n",
-    "tensor.gaussian(0, 0.2, g)\n",
-    "tensor.to_numpy(g)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Binary functions"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 1.66987431, -0.14241977,  1.51464427],\n",
-       "       [ 1.41865039, -0.0054673 ,  1.51937592]], dtype=float32)"
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "f = a + b\n",
-    "tensor.to_numpy(f)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.,  0.,  0.],\n",
-       "       [ 0.,  0.,  0.]], dtype=float32)"
-      ]
-     },
-     "execution_count": 39,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "g = a < b\n",
-    "tensor.to_numpy(g)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 40,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 7.66987419,  5.85758018,  7.51464415],\n",
-       "       [ 9.41865063,  7.99453259,  9.5193758 ]], dtype=float32)"
-      ]
-     },
-     "execution_count": 40,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.add_column(2, c, 1, f)   # f = 2 *c + 1* f\n",
-    "tensor.to_numpy(f)\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### BLAS\n",
-    "\n",
-    "BLAS function may change the memory of input tensor"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.24042693, -0.21337385, -0.0969397 ],\n",
-       "       [-0.010797  , -0.07642138, -0.09220808]], dtype=float32)"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.axpy(2, a, f)  # f = 2a + f\n",
-    "tensor.to_numpy(b)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.17231143, -0.16945721],\n",
-       "       [ 0.17231143, -0.16945721]], dtype=float32)"
-      ]
-     },
-     "execution_count": 42,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "f = tensor.mult(a, b.T())\n",
-    "tensor.to_numpy(f)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.51693428, -0.50837165],\n",
-       "       [ 0.51693428, -0.50837165]], dtype=float32)"
-      ]
-     },
-     "execution_count": 43,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "tensor.mult(a, b.T(), f, 2, 1)  # f = 2a*b.T() + 1f\n",
-    "tensor.to_numpy(f)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "## Next: [SINGA model classes](./model.ipynb)"
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:pysinga]",
-   "language": "python",
-   "name": "conda-env-pysinga-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.12"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/index.ipynb b/doc/en/docs/notebook/index.ipynb
deleted file mode 100644
index 0c29187..0000000
--- a/doc/en/docs/notebook/index.ipynb
+++ /dev/null
@@ -1,134 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "![Apache Singa](http://singa.apache.org/en/_static/singa.png)\n",
-    "\n",
-    "# A Tutorial of SINGA V1\n",
-    "\n",
-    "Welcome to this tutorial for Apache Incubator-singa.\n",
-    "Please install [conda and SINGA](./installation.ipynb) before running the notebooks.\n",
-    "\n",
-    "## Outline\n",
-    "\n",
-    "* Introduction and installation\n",
-    "* SINGA classes\n",
-    "  * [Core classes](./core.ipynb) - Tensor and Device\n",
-    "  * [Model classes](./model.ipynb) - Layer, Loss, Metric, FeedForwardNet, Optimzier, Initializer\n",
-    "* Simple models \n",
-    "  * [Linear Regression](./regression.ipynb)\n",
-    "  * [Multi-layer Perceptron](./mlp.ipynb)\n",
-    "* Deep learning models\n",
-    "  * [Convolutional Neural Network (CNN)](./cnn.ipynb)\n",
-    "  * [Recurrent Neural Networks (RNN)](./rnn.ipynb) (WIP)\n",
-    "  * [Restricted Boltzmann Machine (RBM)](./rbm.ipynb)\n",
-    "* [Distributed training](./distributed.ipynb) (WIP)\n",
-    "* [Rafiki](./rafiki.ipynb) (WIP)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Introduction \n",
-    "### Training and Inference of deep learning models\n",
-    "\n",
-    "#### Training objective\n",
-    "\n",
-    "$\\min_{\\theta} \\sum_{<x, y>\\in D}L(f(x|\\theta), y)$\n",
-    "\n",
-    "Training procedure\n",
-    "1. randomly initialize $\\theta$\n",
-    "2. get a mini-batch of data $\\{<x, y>\\}$ and compute $\\frac{\\partial L}{\\partial \\theta}$\n",
-    "3. $\\theta \\leftarrow \\theta - \\alpha \\frac{\\partial L}{\\partial \\theta}$\n",
-    "4. repeat 2-3 until converge, i.e., $||\\frac{\\partial L}{\\partial \\theta}|| < \\epsilon$ \n",
-    "\n",
-    "<img src=\"./static/sgd.png\" width=\"400px\"/>\n",
-    "\n",
-    "#### Back-propagation for gradient computation\n",
-    "\n",
-    "<img src=\"./static/bp.PNG\" width=\"300px\"/> \n",
-    "\n",
-    "#### Popular deep learning models\n",
-    "\n",
-    "<img src=\"./static/models.PNG\" width=\"200px\"/>\n",
-    "\n",
-    "\n",
-    "\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### SINGA V1\n",
-    "#### Software Stack\n",
-    "\n",
-    "<img src=\"http://singa.apache.org/en/_static/images/singav1-sw.png\" width=\"500px\"/>\n",
-    "\n",
-    "#### Progress\n",
-    "\n",
-    "|Components|Feature|Status|\n",
-    "|:---------|:------|:-----|\n",
-    "|Core |Tensor implementation|V1.0|\n",
-    "|     |Device implementation (CppCPU, CudaGPU, OpenclGPU) |V1.1|\n",
-    "|Model|Basic layers for CNN and RNN| V1.0|\n",
-    "|     |Basic loss and metric functions|V1.0|\n",
-    "|     |Basic optimizer and initializers |V1.0|\n",
-    "|IO   |Message paasing via socket| V1.0|\n",
-    "|Installation| From source on Linux and Mac|V1.0|\n",
-    "|            | From source on Windows|V1.1|\n",
-    "|            | From wheel on Linux and Mac|V1.0|\n",
-    "|            | From debian package on Ubuntu |V1.1|\n",
-    "|            | Docker images | V1.1|\n",
-    "|            | AWS AMI       | V1.1|\n",
-    "\n",
-    "#### Schedule\n",
-    "|Components|Feature|Status|\n",
-    "|:---------|:------|:-----|\n",
-    "|Core     |Tensor API = Numpy API| V1.2|\n",
-    "| |Operation scheduling and memory optimization| V1.3|\n",
-    "|Model | To support more vison, NLP examples| V1.2|\n",
-    "|      | General neural net API| V1.2|\n",
-    "|      | To support reinforcement learning examples| V1.3|\n",
-    "|      | Auto-grad + Functional API | TBD|\n",
-    "|IO | HDFS integration  |V1.3|\n",
-    "|Server| Consistency and communication optimization |V1.2|   \n",
-    "|      |Message passing via Infiniband |V1.2|  \n",
-    "|Installation | From wheel on Windows| V1.2|\n",
-    "|             | From debian on Linux | V1.2|\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Next: [SINGA core classes](./core.ipynb)"
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/installation.ipynb b/doc/en/docs/notebook/installation.ipynb
deleted file mode 100644
index 65093dc..0000000
--- a/doc/en/docs/notebook/installation.ipynb
+++ /dev/null
@@ -1,93 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "The easiast way to install SINGA is via [conda](https://conda.io/docs/).\n",
-    "\n",
-    "## Install Conda\n",
-    "\n",
-    "If you haven't install conda, you can install it via\n",
-    "\n",
-    "* Linux 64\n",
-    "\n",
-    "      wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh\n",
-    "      bash miniconda.sh\n",
-    "\n",
-    "* Mac OSX\n",
-    "\n",
-    "      wget https://repo.continuum.io/miniconda/Miniconda2-latest-MacOSX-x86_64.sh  miniconda.sh\n",
-    "      bash miniconda.sh\n",
-    "        \n",
-    "* Windows (not supported for the moment) \n",
-    "\n",
-    "## Install SINGA\n",
-    "\n",
-    "With Conda installed, you can install SINGA via\n",
-    "\n",
-    "* SINGA for Linux\n",
-    "  * CPU Version\n",
-    "      \n",
-    "        conda install -c nusdbsystem singa\n",
-    "      \n",
-    "  * GPU Version\n",
-    "      \n",
-    "        conda install -c nusdbystem singa-cudax.y-cudnnz\n",
-    "    \n",
-    "    where <x,y,z> specifies the cuda and cuDNN version, which could be (cuda8.0, cudnn5), (cuda7.5, cudnn5) or (cuda7.5, cudnn4)\n",
-    "    \n",
-    "    \n",
-    "* SINGA for MacOSX (CPU only)\n",
-    "  \n",
-    "      conda config --add channels conda-forge\n",
-    "      conda install -c nusdbsystem singa\n",
-    "      \n",
-    "      \n",
-    "## Install Jupyter Notebook and Other Packages\n",
-    "\n",
-    "      conda install --file requirements.txt\n",
-    "    \n",
-    "To learn more about Jupyter, please check [IPython in Depth](https://www.youtube.com/watch?v=xe_ATRmw0KM).\n",
-    "\n",
-    "\n",
-    "## Run the notebooks\n",
-    "\n",
-    "Download the [notebooks](https://github.com/apache/incubator-singa/tree/master/doc/en/docs/notebook) and start the jupyter\n",
-    "\n",
-    "    jupyter notebook"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/mlp.ipynb b/doc/en/docs/notebook/mlp.ipynb
deleted file mode 100755
index 5dcbe28..0000000
--- a/doc/en/docs/notebook/mlp.ipynb
+++ /dev/null
@@ -1,419 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Train a multi-layer perceptron (MLP) model \n",
-    "\n",
-    "In this notebook, we are going to use PySINGA to train a MLP model for classifying 2-d points into two categories (i.e., positive and negative). We use this example to illustrate the usage of PySINGA's modules. Please refer to the [documentation page](http://singa.apache.org/en/docs/index.html) for the functions of each module."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "To import PySINGA modules"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from singa import tensor\n",
-    "from singa import optimizer\n",
-    "from singa import loss\n",
-    "from singa import layer\n",
-    "#from singa.proto import model_pb2"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Task is to train a MLP model to classify 2-d points into the positive and negative categories.\n",
-    "\n",
-    "## Training data generation\n",
-    "\n",
-    "The following steps would be conducted to generate the training data.\n",
-    "1. draw a boundary line in the 2-d space \n",
-    "2. generate data points in the 2-dspace\n",
-    "3. label the data points above the boundary line as positive points, and label other points as negative points."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We draw the boundary line as $y=5x+1$"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "# generate the boundary\n",
-    "f = lambda x: (5 * x + 1)\n",
-    "bd_x = np.linspace(-1., 1, 200)\n",
-    "bd_y = f(bd_x)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We generate the datapoints by adding a random noise to the data points on the boundary line"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "# generate the training data\n",
-    "x = np.random.uniform(-1, 1, 400)\n",
-    "y = f(x) + 2 * np.random.randn(len(x))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We label the data points above the boundary line as positive points with label 1 and other data points with label 0 (negative)."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD8CAYAAAB+UHOxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXt4VNW9sN9FkslkMkhUVFDgiApoW2prQdt6qZfWC3gj\nB63SiooKavUrVU6PRwU88OmxWq1VW6VVo9BQbau0qKD1gkLVU8B+4qWCCmJBvAICSSYJSdb3x5pN\ndib7svbM3nNJ1vs8+5nbnrXX3rNn/db6XYWUEoPBYDD0PvoUugMGg8FgKAxGABgMBkMvxQgAg8Fg\n6KUYAWAwGAy9FCMADAaDoZdiBIDBYDD0UowAMBgMhl6KEQAGg8HQSzECwGAwGHop5YXugBf9+/eX\n+++/f6G7YTAYDCXDq6+++rmUci+dfYtaAOy///6sXLmy0N0wGAyGkkEI8YHuvkYFZDAYDL0UIwAM\nBoOhl2IEgMFgMPRSitoG4MTOnTvZuHEjzc3Nhe5KSROPxxk0aBAVFRWF7orBYCgQJScANm7cSN++\nfdl///0RQhS6OyWJlJLNmzezceNGhg4dWujuGAw9mlQKFiyAtWvhwAOhthbi8UL3SqGtAhJCPCCE\n+FQI8abtvT2EEM8IId5NP+7u8t2ThRBrhBDvCSGuyaXDzc3N7LnnnmbwzwEhBHvuuadZRRkMEbNi\nBey7L0yZAjNnqseBA9X7xUAQG8CDwMkZ710DPCelHAY8l37dBSFEGfAr4BTgS8C5QogvZdXbzjZz\n+boBcw0NhqhJpeDEE+GLL6ChAaRUj198od4vhvmXtgpISrlUCLF/xttnAMemnz8EvAD8Z8Y+hwPv\nSSnXAQghHk5/75+Be2swGAxFSqaqp7UV2tqc921rg8cegwkT8tvHTHK1Aewjpfwo/fxjYB+HffYD\nNthebwSOyPG4BWP9+vWceuqpvPnmm/47h8ANN9xAMplk2rRpeTmewWAIzooValbf1gaNjVBdrZ63\ntDjv39gI69blt49OhGYEllJKIUTOFeaFEJOByQBDhgzJuV+9jba2NsrLS862bzCULHZVj0VDg/d3\nqqvhgAOi7ZcOucYBfCKEGAiQfvzUYZ8PgcG214PS7zkipfyNlHKUlHLUXntppbPIO21tbfzgBz/g\nkEMOYfz48TQ1NfHcc8/x9a9/nZEjRzJp0iRa0qJ///335/PPPwdg5cqVHHvssYCa2U+aNIljjz2W\nAw44gDvvvHNX+zfeeCPDhw/nqKOOYs2aNbve/+1vf8vo0aM59NBD+fd//3eampoAuOCCC7j00ks5\n4ogj+OlPf8qwYcP47LPPAOjo6OCggw7a9dpgMITLggXuqh43ysuVN1ChyXWquBA4H7g5/fgXh31W\nAMOEEENRA/85QCiar6lTp/Laa6+F0dQuvva1r3HHHXd47rNmzRruv/9+jjzySCZNmsTtt9/OnDlz\neO655xg+fDgTJ07knnvuYerUqZ7trF69miVLlrBjxw5GjBjBZZddxuuvv87DDz/Ma6+9RltbG4cd\ndhjf+MY3AKitreWSSy4B4Prrr+f+++/nyiuvBJR77Msvv0xZWRn9+vWjvr6eqVOn8uyzz3LooYdS\nrMLUYCh11q5VKh034nE14FuqofJy+Otfi8MVNIgb6O+BV4ARQoiNQoiLUAP/94QQ7wLfTb9GCLGv\nEGIRgJSyDbgCeBp4G/iDlPKtcE8jvwwePJgjjzwSgB/+8Ic899xzDB06lOHDhwNw/vnns3TpUt92\nxo4dS2VlJf3792fvvffmk08+YdmyZYwbN45EIsFuu+3G6aefvmv/N998k6OPPpqRI0dSX1/PW291\nXsazzjqLsrIyACZNmsTcuXMBeOCBB7jwwgtDO3eDwdCVAw9UA7sTySTccw/MmQOzZqnHjz6C0aPz\n20c3gngBnevy0QkO+24CxtheLwIWBe6dD34z9ajIdKGsqalh8+bNjvuWl5fT0dEB0M3vvrKyctfz\nsrIy2nzWkRdccAF//vOfOfTQQ3nwwQd54YUXdn1WbbsDBw8ezD777MPzzz/P8uXLqa+v1zovg8EQ\nnNpa+NGPnD9Lz8lYvRo2b4adO5X3T7EEg5lcQFnwr3/9i1deeQWA+fPnM2rUKNavX897770HwLx5\n8/jOd74DKBvAq6++CsCjjz7q2/YxxxzDn//8Z1KpFDt27ODxxx/f9dmOHTsYOHAgO3fu9B3UL774\nYn74wx92WRkYDIbwiceVSqemRs34hVCPySR0dMDll8Ps2fDrX6tVwCWXFE8wmBEAWTBixAh+9atf\nccghh7B161Z+8pOfUFdXx1lnncXIkSPp06cPl156KQAzZ87kxz/+MaNGjdIaiA877DC+//3vc+ih\nh3LKKacw2rZWnD17NkcccQRHHnkkBx98sGc7p59+Og0NDUb9YzDkgdGjYdOmTlXPXXcpXf+OHcpL\nyE5TU/EEgwkpc/bcjIxRo0bJzIIwb7/9NoccckiBelQ6rFy5kp/85CcsW7bMdR9zLQ2GaJg/X6V9\n8HIHTSaVwAg7GEwI8aqUcpTOvsZhvAdy8803c8899xjdv8FQIPw8g6A4gsGMAOiBXHPNNVxzTU45\n9wwlQDFnmeztWJ5BXiuAYggGK0kBIKU0ycxypJhVfwZ/nFIP/OhHyhhZLC6GvRkvzyCLYggGKzkj\ncDweZ/PmzWYAywGrHkDcTBdLklLIMtnbsXsGVVV1/SyRUO8XQzBYya0ABg0axMaNG01qgxyxKoIZ\nSg+v1APFkmXS0OkZtGABrFkDn38Oe+0Fw4cXj7qu5ARARUWFqWJl6NV4GRiLwbBo6KSqqriFccmp\ngAyG3o5X6oFiMCwaSgcjAAyGEqO2VhkQnSgGw2KpkEopf/3Zs9Vjb7SdlJwKyGDo7VgGxkwvoGLK\nMlnsGC8qhREABkMJYjcwrlun1D7FYlgsdrwKuJx4osrW2VuuoxEABkOJUuwGxmLFeFF1YgSAwWAo\nadwiot3eD8OLqqdEYRsBYDAYShY3Xf4dd8DUqd3fX7gQPv4YKiqgtbV7ezpeVD3JflBy2UANBkPv\nwmuGv+++XXX5FkKoCGmn9xMJ9xVATY1aASxe7Dy79zpmTU1x2A9MNlCDwdAj8Jptv/uuuy7fbV4r\npfPgX12tVgV33KFWAG6z+wULnFcOoN4vNfuBiQMwGAxFiV/Oo9Wr/VMu6xCLwfnnq5n/1KneOZZW\nr1YFXZxoaoJ33sm9P/kkZwEghBghhHjNtm0XQkzN2OdYIcQ22z4zcj2uwWDo2fh562ze7B4RHYSd\nO1WJxsWL/b2DXEp/76LUUpTlrAKSUq4BvgYghCgDPgQWOOy6TEp5aq7HMxgMvQM/b5299nKPiHaz\nAThhGX51vIP23NO7rf799Y5ZLIStAjoBWCul/CDkdg0GQw8nMzXD4MHeOY+GD3cuxl5TA3V1Xd+v\nrlaPTljpM3RyLB1wgFIZOVFVBSNGBD/vQhK2Efgc4Pcun31bCPE6aoUwTUr5ltNOQojJwGSAIUOG\nhNw9g8HgRdT+7W7tOxl73Wb30Dlox+PuEdFnn931/SFD4LTT3NNneBVxKS9X3z/1VHcjcGVl6eVh\nCs0NVAgRAzYBX5ZSfpLx2W5Ah5SyQQgxBvillHKYX5vGDdRgyB9ug3BY/u1u7S9cCKef7uxamUh0\nN7oKoWb4558fvA+WAHJLn5FNH6GzwEsxxAEEcQMNUwCcAfxISnmixr7rgVFSys+99jMCwGDID7r+\n7dmuELzaj8WUvn7nTv3+Rulz7yQkHnsMpkxxrvFbWQn33gsXXBB+X7KhUHEA5+Ki/hFCDAA+kVJK\nIcThKNuDjz3dYDDkC538OMOGZR8B69W+m0rFiyhz9jjlWPIyELe2wsaN4fcjH4QiAIQQ1cD3gCm2\n9y4FkFLeC4wHLhNCtAEp4BxZzCHIBkMvw88DZs0aNdhnm0HTq/1ssLxy8pWTxzIQO60ASrkITygC\nQErZCOyZ8d69tud3A3eHcSyDwRA+fgPc55/nlkHTq/1sqK5WaqN99w0/J4+TUPEzEDsZf0shYZxJ\nBWEwGHwHuD33zC2Dplf7TsRiygDc1uYsNMrL4bbbYNu2zvfCyOlvNwI3NCj9/kUXwbXXwuOPe3sR\nubWTKZy+8pXiEQxGABgMBt8qY+++m5sKxKl9NyVwLAYXX6wG+DfecO7TVVfBLbc4fz9b+4BToZiW\nFvU4Y4YyPC9cCBs2eBfh8So4c/zxqv+WgInF4JJL4Omn4aijgvU3DIwAMBgMgHeVsZEjg6tAvNp/\n5x249VbnvDqJhBr843H3Pt16a+45/TPxMlSDGtBPP915dWFX93z8sbtHU6YAbW1V2zHHwNKl+RcC\nRgAYDIZduFUZC6sOsb39sWP12nPqUxRGWR1DtdPqIlPd41ZrwAsp4aSTVK6hfKqDjAAwGAxahF2H\nOJf2sjHK+qFjqM5cXTipe7JxawW1ash3OmkjAAyGiCgFLxBdMs9l2rTszsXpmmQz4IW1IrGjY6i2\nVhfWeSxYoJ6Hwc6dSg2UTwFgKoIZDBEQdVqFMPETVGGdSxTXxC+1Q1BWrIDvfa+rd5Gdmpqu3kB+\nbq2xmBrYq6uhrEwZlZub3fdPJHJXAxUkFUQUGAFgKEVKoWyghd+gHNa5FPs1sQvBwYOVQLn1VvVZ\nS4t+TiA7ySRMnKhqDVjCaeVKOPpo7+/MmZPbKsCUhDQYCohOWoViKBvo5a5o+dJney6Zq4rWVr12\nCqE28xKCmS6fjz3m7Slkx4pVsPf/qKNUXMH99zt/J1sPpmwxAsBgCBmdwiLFgM7gns25OA2obW2d\nPvVu7XgFT0WlNvMSgk4un6tX+6t9kkl3W0QqpYLLYjFnY3G+00qYmsAGQ8joFBYpBnQG96Dn4lbH\nt7nZPfCruhoGDfKu/+ulN88FHSFosWKFmtG7EY/D+PFKhfPRR92F1ooVSg12//3unkJ9+rSzZs3/\ncNFFFwU7kSwxAsBgCJnaWvdiJtm6KEaBzuAe9Fz8gqnc2gH9gThMdFc4lmBzKwgP0NGhagK/9FKn\nwLKqnM2YoYK9vvjCaSUkicVaKS/fwfbt32TWrGtZv349rdn6kwbACACDIWQsF0WnUoXZuihGgc7g\n7ncuUnYt47hmjXcwVTzu3M6GDd4D8Tvv5H6+TuiucHQEW2srvPgi/PrXsMceMHOmmvFPmaKuj9cq\nprX1fg444ChuuqmWDz74gOeee46YW+3JEDFeQAZDRITtohgFuq6ZTufilKeno0Pt7zRTTibhrruU\n/jvzmsyf715wBZR75AsvhG8LaG5WXjp+3kmzZ6sBPZrhUnL88Vt49tk9EG6FiwNgvIAMhjzh5bXi\nllahmNCNxs08Fy/jqdsY1tEBZ56pBtZM/IKwmpq8s3w6/Q5S+nsUSakSy910k3ptd/m0VmuplMrv\nk02KB1322GNP1+sWKVLKot2+8Y1vSIOhWFm+XMqaGimTSSmFUI81Ner9nk59vTpfNYR23aqqpEwk\n1KP9/UTC+/osX672cWoT1PHq652/l/k79O2rHr1+G/v3QMrKSinjcSlnzZIyleq6T3W1e7/ctw7t\n/eqmrw3ttwFWSs0x1tgADIYscPN2idprpVjwMp6mUmo2nzmjbWpyvj6WofSpp1RCNDec3E7dfocd\nO9Sj22+T+T3ojNK9/fbOtr/3PbWP07kmk0qdlStJtnPOnd8uyE1jVEAGQxaUSrBXVPglTvvlL1Xq\nAyfs1yfTBlFZ6X7MTLfTVErlJPLyzHE7tvXca5+1a91TQsRiKsp3+nQYOFACQfQ3ylBSSTNxmnmG\nk4i3NxbkpjECwGBwwUu/XyrBXlHhp7P30pXb6/lm2hG8JsF2t1NLcDQ1BdPLW8eW0r8GslvBGYDW\nVsn777/MmDFXAiOBOuxCQAjB9DPf4M4Fg2mjjEaqqaaRctq5itsQwAGso5bHiNMCjaIgN01YReHX\nAzuAdqBNZlighTJt/xIYAzQBF0gp/xHGsQ2GKPCLSu2pRcJ1sdxDjz022AwcOq9PkJiBZLKrUTZT\ncAQ9tvXcqwayF31IsXjxrxg9upw77xzFSSdt5pe/3Is1a2DECLjxRqi56y9cs+AmFjCOdRzQdcDP\npKJCRcPlG11jgdcGrAf6e3w+BliMEpHfBP6u064xAhsKQVOTMvw5GexqapSBMJXy3ydffa2vV4bL\n+vqux/X6LCymTw9uHLWuz6xZykAb5DtSehugddvx+/2mT5dSCHcjbpxGuerFv3tfnPp6KSsq9DvX\nr18oHgQUoRH4DGBuun//C9QIIQbm6dgGQyB09PvFEOxlpRaYMkX5qE+ZonzaV6zw/ixMDj5YnbcT\niYTarOtjFXp//HF1fbyCsDKx6+51Knc5EYt1Htvv99ttt03EhdvSpoNruYmvPvKQ9wFra90NIU5s\n26aWVDNnKqt4PozCupLCawPeB14DXgUmO3z+BHCU7fVzwCiXtiYDK4GVQ4YMyVkaGgxB8ZqZCiHl\n7Nmd+1qz7Nmzo5tlO+G3SunXLz+rE7+Z9DPPKNfOWEy9V13d6Y7p9V2n6z5jhrrG48crl82gs/+K\niu6uoPbf7957t8tf/OIeecQRR0iolEk2O58XW2SKSilPOMH/As2ald1SJQefYgKsAMISAPulH/cG\nVgHHZHyuLQDsm1EBGQqBl4rBzRc9W7JV03j1MR53HyCD9l+nf27xEEuX+qvJMn3x3baqKiVELEGS\ny1aTaJGp6TdKWV8vm7/4Qj766KPyzDPPlBUVFRKQh48cKZ+YMEG+NHyCrGGzTLJNCtpkkm2yhs1y\nOaNUQ+PH+1/AVMpdGmt1NrjEzrsA6NIg3ABMy3hvDnCu7fUaYKBfW0YAGApBvvT7uQSSBdGfZ86k\n7SsYJ6xBf8oUNXvX6Z/TSkhXkFrfnTHDOxDMb8Lcr58KAPPdl+3yd0yQqYoKuVUIOQrkgAED5LRp\n0+Q79fVdJFITcVnPuXI218l6zlUzf6uhfv30bgZdKRfSjCOvAgCoBvranr8MnJyxz1i6GoGX67Rt\nBIChUEQd5atjaPYiqhWATuRrIiHl1q3+5+gnpGbMcD++/bp7jY/xuJqIW0LH+r7XSkHQJmdz3a43\nWpNJuXPHDu8fJdfB2S5VM0Okc5XYGQQRAGEYgfcB/iaEWAUsB56UUj4lhLhUCHFpep9FwDrgPeC3\nwOUhHNdgiAwrR86cOTBrlnuO92x55BH3YuI66Y+9MnlaRk4nvNJR290rvYysTU2w337+BmU/I+/P\nf969jczrPnGi8pB0o6VFciirmLB2NvHH5jN6ZDObNsHFF7tH6VbTyAF0+txXAOULFwbPZR0k4MNK\npnTvvaro76xZ6kfyinyD6H2KdSVFITazAjD0RJYvVzNXr4mfzqTPa5WSzQomqHul30pFx8jrt5rw\ns6HGaJb1lRd2OcmdL78sH330SVlRscO532zuqsqxZtlBDba5GoSamqS8/HJvV9FEIlIbgIkENhhC\nQLeWrTXL9vPwk9L/mH6ZPHWyfNoJ6l7pl/JCJ1jMWk24pXoePNi9fCJAOTupbZkPyF1RXQ1HHskE\nKUn0+y7NzX+hrKySVKoP1bKBcnbyV07qGoxlzbLXrtU99fTBc6zuU1UFe+4JO3e673PeedH6FOtK\nikJsZgVgKAWCzLZ1Z9n5DCYL2jc/PX4mOsFiTudrXVd3XX67XMa3u33QVF4uX736atna2tppnJ7Z\nKusTF3ed+dsPvmVLMP1/GAYhv/Sn1dVZrTAowkAwg6FosbJRWlWtgsTfBM0KqjvLjrIMohtB45bA\nWY+fiVewmEUqBQ8/3PW1e7oHSRUNvMDRHMXL3T6tam/nsJoaKioqdqner7+hggkvTCZeU+Uc+bV4\nsb7+v7JSRZTlYhDSqTFZURF5/VAjAAy9mlwjZoMUFQf96NdCJJSLx+Hqq4N9xyrU4iU0vQzWFi0t\ncNllndfd67rGaebX/IijHQZ/wN1w6mXZD6L/ammB007LLVLXz+CcSOQlpNwIAEOvJYyc/kGKis+f\nD6tXd5ZN9MJtDEul4MEH4ayz1Pbgg+FnDAhamcpvtWLZAhIJ73aamzuvu9d1bSHGRga7N1RW5j5z\n3rUkuF49WgNskLwUkPsSzU/gTJsWfv1LB4wAMPRags7endApKm5fZfzf/6sEjRBqLHLDyb64YgXs\nsw9ceCH86U9qu/BC2Guv8HL8BB0HQW+1Mno0bNzoLwTa2uCBB75gzZpFCOE8QFpunK5yqqNDFSwO\ngs4yxU6uSzSvC51MqpSiecAIAEOvJYyc/l7jRnk5nHJK91VGKtUpBC6+uGvCNLeEctZqZceO7sdp\naFCVq8JYCWRjB9B1Vd99d+Xt46XVaGjo4IorbqO+vhYh2h33KaeNWjyk844dwcuyxeOwcKH6MawA\ngiDVaYLideO0tSm3pzwkgzMCwNBr0Zm9++GXVdLLttinDxx3HGzYoAKejjtOPb7/fvfV/4IF3uNB\nc3M4RuM33gieabOjA8aM0dt39Gi45x53ISBEE6efPpLVq1fxyiu7UVMjSbIDQTtJtlPDlu5unE60\ntSmDhpdl3279nz1b6fX79FGDbyymJKHbDZKrC2jmjWOnpQWuvDKa9K2Z6LoLFWIzbqCGKAkz549b\nVlC/zKJTpui5kOrEKAXMGOB4DjqekH36dH1dVRXMK/K111bLysoml+ve0b2mwfjHnHPx+G2xmF5F\neK98FX37qpw/QXOC6Gb5a2qSsq7OPTIw4mRwJhDM0GuxJmGZlb/Ky4M7YFi2xUy8KoclEjBvXldP\nQGu/E09UDip2G2U87r4KqKzMPWOAbiaETCNxKtWporL3edeHCxbQ+MYbvPTRR8x6801eevVV+vQ5\ngvLyZygvj9PSUk51tUhfd9Hl+1VVMGHeKfDMhe4Fet2woscyL6qU+iXFpIQ77lArAqeIOqcIwDfe\n8C4nZ6eqSrXtpQ6KsFawEQCGXo3lGfjII/Dkk+q9sWNh5Mhw2veqnWtN85xobe36v7facRMA8XhX\njYRuZLIdXU/Iykpn9/XMsar1pZfgpJNob26mqr2dbwGLysp4YupUjv/P/6Rfv756kcrxODzzTNdB\ntaIiWDFgewet5zo0Nirr9fXXd//MrW5oW1tXie8m1S0KWWBad6lQiM2ogAz5IOrMn27tX3SRtwZj\n5szu7TilO04mvbUbuuejEwk8bpy3SmvWrA750ksvySsuukhuddsx2zBnu57t8su9U5a6ddDK+aOb\nS9st30/QzKFebYVcgIJC1gMIczMCoPjIR53ZfJJrWuYgx8m0EUye7D1eTJ7s3E5dnUqBPH68ep6p\nM8/2fLxsIrGYlB995D1WxWLNcu+9p0pAXhCLyaby8tAGtUCd9RuAg+S8cLto9fXBixe4pXYOuQCF\nEQCGSIh6phwFfgIrn9W/Mvt08MHe48W4ccHb9jqfykolMLzw+429x90t8thjT5YPPfSQbL7uOv26\nmk4XR2eGkVm8IBZTFmm3C2ANplu2+A/efje3ToKjIDdUiH+uIALA2AAMWjjlZvFTbeazb076bjcV\nrd0Wl2/1q71PToZhO0H98cEngrYFJk+Ggw6Co45y3scvw2hZ2U6mT3+Fa645jJ07JZBAiGbi8TL+\n8IdWTj11sdpx/nx367ebj63OD5bZ2YUL4eSTO1OG2n1xobtl3zLQStm1Lcv//6qr1PftJ+50g23e\n7HwBvfByHfW78FGhKykKsZkVQPFQiJmyDrnUo7XOy20ymEiEd15NTVLOmeOd+j1z85utO6Gr3Vi2\nTL/Njo4OuXz5cnnllVfK/v37S0D27z9YnnTSQ/LSSzfK3/2uo/tEPahaIxvdld936uq66ty89ncr\nTJCtAaeysmBLZYwKyBA2fv7sufqgZ4Pf/1lHYG3Z4n1eOqUP/Vi+PHia5WQyO/uDrmq8stK//Q8+\n+EDedNNN8uCDD5aArKyslGeffbZ84oknZGtrq96JB8mT7WbUDct4GnR/vxvMrbRjVZUSPk6BIXkg\niAAwKiCDFl7+7FFXrXPDL5ePm5egXbWzeHHnKj+TeBwWLcrNBTuVUmka/NQ9FhUVShvxzDPZrf6t\n2IZjjvGOHG5pUemXL7ig6/vbt2/n0UcfZd68ebzwwgtIKTn66KO5+uqrGT9+PDU1NfqdCaLWWLo0\nuC4uqP4u6P5eN5hXxrxYDM45p3A60QDknApCCDFYCLFECPFPIcRbQogfO+xzrBBimxDitfQ2I9fj\nGvKLX86biNOWO7J2rfvAakXzO2EXWGvXug+Uzc252wD8UjhkUlEB69fnnghSJ6OnFffQ1tbGU089\nxQ9+8AMGDBjApEmT2LBhA//93//NunXrWLp0KRdffHGwwd/CLfumnVRKRcS5kUg4zzCC5vIIur+X\nwGhqUtW6nAodZJOMrlDoLhXcNmAgcFj6eV/gHeBLGfscCzwRtG2jAiouis0LqK7OW83hptvPtAGE\nYdtwc14pRJlZPe/IDvnd726VV199tRwwYIAE5O677y4vu+wy+corr8iOjo7sOxEUP8NFPO6sQglq\nZwi6v9/NUVeXHx/igJBPFZCU8iPgo/TzHUKIt4H9gH/m2rahuCiUo0K2XH013HVXV6eSsjLl6HHr\nrWpCOGZM7iubZcuUI4qldkomO51XgpKr55FuOgeQvPfsj3nxxd8zduxYJk6cyJgxY6j0yoAZFX4h\nyG4FFOy5PJqb1RaPd76feWNa71upU1taVFiz2/61tXD55c7Htm4av3ziEaVwCItQbQBCiP2BrwN/\nd/j420KI14EPgWlSyrfCPLbBnWzSArjhlPMmzPaDsGGDUnWoRWZXhFAqILvAkhJuvx1uuaWrd+Ad\nd8DUqdnlA1q2DL7zna59sNRSxxwT/JxytafopnPoSwP/L76Q9vfeZ8/99vPeOeof+MAD1UDspisT\nwnswtebdmc/dEKJTR2Z/nskbbzgLn2RS3RxPPVW4FA5hobtU8NuAJPAqUOvw2W5AMv18DPCuRzuT\ngZXAyiFDhkS1Suo1OMXKJBLd3QC94m+cPrPemzJFtVddnX+1UBD1jZ/H4JYtwZ02mpqCB4P6bblq\nDry1KR3Dm0beAAAgAElEQVSyghZZwxa5nFF6+iYvP9tsQsKdbqZUyj0bprU5uZkFdR318+qZPl3P\nZdRqu0h9o8m3GyhQATwNXKW5/3qgv99+xgaQG173sBCdQsBLt+/0Wd++6tFLbZsPFWgQlW7Y/9Wm\nJpWOpqws+8Fe11VcOzi2qUm+d+NtMlm23fF4cRplHRM7Uyr7+e/63UBBjUFeN5qXscTtB6qrUxdR\n9zs6QRJWn2bN8r9hQk7hEBZ5FQCAAOYCd3jsMwAQ6eeHA/+yXnttRgDkhpdrtTXp2bLF/R7u1y94\nupUgg2oYeYV0DdNhxjFYx4zFsh/8LRui36pD5/w+/fRT+furrpLbysrkdpD/yyhZwxaZZLsUtMsk\n22UNm9Wsv4tEiHtHmwXJmeM36PnNqLduVTecbrvLl3uvGpx+1CBJ4OJxvRum2DwjZDABEIYN4Ejg\nPOANIcRr6feuBYakVUz3AuOBy4QQbUAKOCfdUUOE+OmD29qUd56bHau5OXiBcAs/FWjQqH83dA3T\nYcUxOKXEyIbycn9Xce/0G5K7736MRx55iCWLFvFBezu7pfc5gpVsYl8WMI515SM4QLxP7c6Hu1fR\nam5Wxo8vf9n5ousaFCy8DJ9+QRuLFnVP+exmjLEujJd/rZcbqE5QhpTudgl722F5RhTKkKYrKQqx\nmRVAbtTXe89ShZDy+OP1J0VhrQDylYHTTlir9aCT4szrHWSC6H2s7RLOlfvtt5+cf+qpsi0XY4RX\nxssgJ+u1lNJdgrmVVrMvF3VSQQd1A3VbBeTjJg15FUGAFYCpCZxn7GVI3UqVhoVX8BaoiczBB7vH\nxlgectng5ULpNxkMo7ZtJk61ey2Pwauu0m8n6KTYIpGAGTNgzhyVOG/XhNvthkilWLvgdRob3BbK\n1VxxzIX865JLODcep8wplFkXt4vudwN165LHUsorCMse6GW5mV19tXp9001w1lmwxx4waZK6iPfd\n5/0j+LmB2m8CNyor4T/+w73Yc1izc/syr6FBiZiGBvU6aGH7bNCVFIXYSnUF4KbbLoS6cNky94mX\npXoN0wZQXe1/ToXMK9TUpI4fj3faD8OalcdiKke/dglZtxviwQdle79+8qHyiTKJs0E3yXZZX3mh\n+p6bITSMmXsQg4fXzNhr9m33SLAf0y3Xjt+s3S+LnvUHnTnT243L7u0UVU6fCDyJMMngCkeu2Smj\nYOlSdZ+71cjW8QLyG2NiMeUSqvMfKaT3XK7qJx1V0ubNSktx/PHq0TGhnEdHOtKPKSplDZudj8Xm\nYAXSvTa/i265PHkJgUTCX4IuXeou+a2B28srQWcL+mdavtzd+JyPP2cEsyEjAAqEn5tx0GSHYffN\nayLjpXqtq5PyS19y/48I0b18oReF9J4LQ/gEdZt1XAF4dKTD9nw5o2QNm2WSbVLQJpMVqU4/fqeT\nCJJvOizJ55ZKOcjFt4RAIhEsuMJtVhOEujp3fX/Uf84CrwBMNtAQCSM7ZVQ4RfD6fa5bvKS6Gvbf\nX6mwdZwY7BH82UTf5kIYBWDcHD+khH331Sua88Wrr9KvoQE/J6vRrGQT+ymPHg7kgEGS2vd/3t2j\nx0JK/xOorFQ3Y5CL7vej6SSK8zOgBNV3J5MwcSIMHJhbXpING1RaCCei/nPW1ir3NyfykGXRCIAQ\n8bq/d+7sLFqUSaHSKXsRxN1RCOVN2N6u79JZqLxCYbmDOgnM+fO9JwD19U0I8TDz5s1j4AsvMAeV\nPTGTTKFQRTMT+L3q4Ic7AZeZhHUgP047DQ49NPhFt360Rx7pTCU6diyMHKn3/SBumDqUl8Ntt+V+\n0xQy13khZ0NgVEBh4reac1vZ9utXfMXV/Vbr9lW3XwlWP/JZaD5s9ZNd33/44V7ainZZWTZTAnLY\nsGHyphkzZNtuuwVT1VRV+bs/+m25qjRy8WTIppC727bbbuF5T+jcFFHfpH462gBgbACFwe8+WrbM\nebDs27f4Cqv7BU2ecIK6T+vqvAsjZZtqJqrrkasXkJ0HH9SPoUiyTT7Y54fyzXvv7Uy1nHnyfvlw\nwgjayMXIEkYAh45XQVWVt8E5FsuuXqZOv3Iy7BQHQQSAUQGFiN9q7itfcXar3rGj+AqrDx7svSoe\nMULt+//+n3M1LavNNWu8j5nPQvN2m4Y9E/BPf6pcvt1qlTgFaG7ZAhdeqEYkHcpp4/sdfyR+Wb2K\nvD3qqO56sFWr4E9/cm5ACOULn60KJZnMXa3gZeTauRN+/GN1YUCphpxCne1qpMsuc9b7V1aqz/7n\nf9yPtXFjdufgRhiGnVJEV1IUYiu1FYCF22quSJMHuk5w3PpqzwPm53Ry+eXux83n9Qgyec3MdOo0\n8bv8cq/z7pDltCivHbZ1z8OTSASPvK2slHLcOHc9otvKIJGQcsaMcNQWQXLp6CxtvWbWxfJnKZZ+\nBACzAigsbh43YXighI3XLLxvX+jXr9O4m0ioSnhSdu6zc6d3+3vt1fVY9tn0mjX5ux460ccTJnSu\nEnbu7N43ex6ef/u3rcDudDfZKkawhnN4hANYRy2PdfXaaWlR5QTHjetqhPXyCGlpgT//Wd1cVhhz\nc7NeUYNc60taBDXi+i1tvTwBRo4sqHfMLorxTxsiRgDkkWItrO7mntreDnfeqbyX1q1T/+O6OneV\nTyaJBAwfrp5bA6u9cFOfPmo8a2rq/l3d66GbQ0vnf6zr+bRtWyOrVq0Evue6z3fE37he3uj8YXu7\nUvU88QRcdBFce22nDsquQ8y8UaTsvFhCwPTpShdnnfTZZ0frVuUloNxobvYu5uI2Wyq0d4xFMf5p\nw0R3qVCIrVRVQG4UY/rw6dO9V/H2AC+/fd3OqalJaQPCtlUGsc3pxPro5z5rl2ee+aYUosNFG9Mh\nt8YHBDvhfv06O27poI4/3n3/QqkfMi+6ToqIXHJ7hOgdkxXF+Kf1AaMCKk6cJniVlWoyFyQhmRPZ\nZpPdvNn7888+62z/lVe8962oUOeVOVF78EGlDXAjkVCrgSCTvCAG5BUrlH3SLc7I0ijceqteordk\nsg9nnfVlzjyzuyFYCKirE9Qc+EdVE9L+oRfbtnV2vKpKqYcuush9/0KpHzLVNh99BPff7x5IFY/n\nNkv2i2AMG6c/UjGsRKJCV1IUYutpKwALuyuiVXciF8+yXLzU/Gb1M2Z0tu9n8B03znmiNn68//eC\nTvJ0bXNNTf6Vy5Yvl7K9vV1On/62LC9P+a9O4k0yVfd7KVOpXXEAJ5zgkPfHnoRJZxVg73h9vber\nZDzunW87n4EVhcylEyZef6R8rERC+t0wcQDFTZj58HNtq77e24+/rk4/dsfNNfvMM72/N368/vla\n6ObQqqvzPvbs2R/J6dOny6FDh0qolLDFZd+OdGWtLXI5o/WlrPWnHj/e38/f3nGvEomWAMi2hFjY\nLF/uLGVjMXUepSAAClGkwk6Iv1sQAWDqARSAMPPh67TlVYOgtlapoZyw3tfJLuDGihXw9NPe+4wd\n6/25U/+90svbbXNWxgJnJNOnL+XGG29k2LBhzJt3Hy++GKembxtJdiBop5rtJGhgCnOYwxQ+YiCj\nWaGXs92uThg71l9dYO/4gQcq3303rr22e3uFyi0/ejR8+qnyEDj+eKULtPIN3XKLytWzYkU0xw6L\nQhSpsChkTQBdSVGIraeuAMLMAOvX1pQp/hMLv1rduq7fmf32mlTZtR5ekyu3vi1bpjdh81Y/dciR\nI9+WGzdu7NLppn4DZD3nytlcJ+s51zvtspsx1qnjyaR3Kgd7x72Mj265Q3T0YlGqhwo9i86FQhap\nCDnWAGMELm7C9CzzaiuRgHnzurpZWvsdc4xy8ayoUMkQf/EL9f7GjV09CN99V8/1O5ns3m+vSRUo\n+97zz7tPjL0MvaeeCuPHK/ujHSGUS3w8riY3Bx+8DjgAN3/9q646mP3269rpqvYGlXxNBydjrFfH\noXtWwKoqNWO2GxWzcYP083VdulS5ceZaiNkN3WCLYqSQ7p6FjDXQlRReG3AysAZ4D7jG4XMB3Jn+\n/HXgMJ12e+oKIIhnmd+EzS9Nu25JV2tmPWtW12OlUnpt9OunannU1alZ9/jxyrjr9Z0ZM7yvU7b1\nd3fbrU3OmHGTHD58uDyMSplkm+N+ffs6TEqDRrs6zdCCdtwrn34Q46NfJLGbsSes2XkhZ9G5UupF\nKmyQTyMwUAasRU2zYsAq4EsZ+4wBFqcFwTeBv+u0HYYAyKdDRBC8KodZ/Z01S88u5NbWlCnZ5w6z\n90fHhz9oHQ+d+zroWNy5bZPf4lz54L/9m2yNxdJFVbbIOE0S2mWcJlnDVrn8ue3dDxp08HYaHIJ2\nvLJSScxcb9Bss22GFVMQdtqEfP95C5X0LWThk28B8C3gadvr/wL+K2OfOcC5ttdrgIF+becqAIo9\niV/m5M7Sa/uNP073hNNEMdsZdObAnmsb2d7X2fZf0CZnVfx3lzebiHfX6zuVMdQdRCsru7sIWgNV\nXV12HQ/jBvUrceh4wXKYndvPva7O/djZlGosxJ+3UIFnmS7DOsW1Xci3ABgP3Gd7fR5wd8Y+TwBH\n2V4/B4zyazsXAVBoe1TQyYuOwTToZCqM9Ou6LuxBxxt7DfCw+59km6znXL2dnW6G5cv9XTbHj1ff\nC5pJL9s+BcEr7DmXGyoTp3Pv27fT4J3twF3oP2++sV9H60+XSOj9SRwIIgCKzg1UCDFZCLFSCLHy\nMysMNQsK6dW1YoXKIDtlCsycqR79POH8DKZ2dO1Clh2xpib7gMXWVmWzDJPqavjXv/z3s/e/uroD\n6ACa04/ulNNGLZo/sNPNMHq0ys3jRiwGe+/t7b7X2qqMu9XVyjJtPWbbpyB4lTh0Ipvkam7nvmOH\nau+uu2DWLJgzR0ULBzEyF/LPm28yryOoe6epSVVui9IFlHCSwX0IDLa9HpR+L+g+AEgpfwP8BmDU\nqFEy204FNaxnm0ohE78UBevWweLF3Y/jVy7VThCnBJ30637HktI9YVw26AqwhoYGVq9ewNe//jBL\nltQAQznwwDibNv0HqZRT8IKkhi/4Kye518zV6UwqpVyk3GhthYceUjkuvPax8nwcd5zK5X/IIWpg\n9CuynKvnh1/WzmxqAmfiN0jHYnD99cHatOjhGTi7UGDPqTAEwApgmBBiKGpQPwfI7PFC4AohxMPA\nEcA2KeVHIRzblSBeXfZCIbl6xy1Y4J4iubUVBg3qmvfGOk6QTLvl5aqt2bP1hFVVFVxwgapDYj9P\nqSFeKyrg8cfVZMRv3NLFS4C1t7ezZMkS5s6dy2OPPUZjYyNDhw5lxozzOO+88zjooIM6f6/Wdhqa\noJJWBHBtn5v5j46f6Q/+oC7CoEFd31uwQGXs9EJHWluz8OefV4/JJJSVqQo0r7+usoE6SeRc3Q69\nsnbW1Cif30x/36BEOUj39Aycdgos7HIWAFLKNiHEFcDTKI+gB6SUbwkhLk1/fi+wCOUJ9B7QBFyY\n63H98PoP2Fe8YVelWrrU/ffMTHtsP8777ztXC7OwaxHa2uDKK4MJq1RK+fRfeaVKANe/Pwwdqj7b\nuFGNg7fd1rWwuz2dvJX/a80a+NnPgmkYMikvh9pTUjC/c8n11ogRzP3DH/jd737Hpk2b6NevHxMm\nTGDixIkceeSRCJv6ZPRXUmz62Z9YcMVzrGNQZ879jiw7tXChmrFmsxwLgvWD3367+sGHDnUWAEFU\nMm5LV68YgjB8/qMcpHX/vD2BQgs7XWNBIbZ8eAGF6bnW1BTM9pZ5HKf+9uunDMmzZ2fvZOF3HSyD\n9fTpKqGZVwEpr+sVi6kMxuPGKRvp9Okux33wLSlramR7dbXsANnQp4/cDPKbZWXy1FNPlX/4wx9k\nyu9kvBKlZbPpVqMKY4vH1QXS9fN1Q/eHjcKjJWq/+WJ34QuLCK4jJhJY4VVwyCLMFdgjj2Rns7GO\nM2GCd3/nz3fXTLipC/1WOHb1Tq7Bpq2t8Le/qQhka6L5X/8FCx5pZd2Tb3MA6zj12C3EL/8/0NS0\nywOhuqODauClZJI+f/xjsNBgXTKjbzMJshzLleZmVRDmqac6VUJCdKqiHn9c2Rf69+9a8MWOztI1\nylTKURds0fnz9gQKXPimRwsA8P8PhLkC8048pnT/HQ4OLPbjePU3G8P2tGnOFbdA2SpOOsk5VYSb\n+svPVtHaqrZdY9CbKzj3JyfS3tJCn1SKlj9BhfNX6dPeDg8/rAZrJ2t8EFcpO8mkuvA6luy2Nli0\nSP35jjzSv+ZlLthVQpYkbmnpWnItkXDW8RVD2oWoB+l81wIoFIUUdrpLhUJs+UgFkc0KzM3H3y/v\nvVs+fd2VXhB1lbWC9vPjd/s8mVQqp8zz1PXNTyalvO1/PpBNQVU18bj7sj/b0GB79jg/1Y49KMov\nj3WfPtmHWtu36mr/MOrMm6SU0y4YIgVTDyAYQdSNXvv65Z531YlrqjV1hZVuUJmXcBDCfSzWEy7t\n8jtcJztyHRztJ+enm4/FVGerqrx14l65+e2S1O8HnTRJz1bglQFU58fI7JeU4addMPQYjADIAh17\nmV+A4tat3v9Jqz5uLna5XA3bmWOOztjkNBY3NSmDsdu41UfskA/1+WGwxv0GPy8JGI+rAXvLFvcL\nbLd2u8247ZJ02TLvPm3d6i1p7cmdxo93XwKWl/tfg8xZfQnWqjXkByMAIkJn0mWlYrE0H5WVXet9\nuxEkdYSfENHVlFgT5WzG4o6ODrlkySsyFmtwHoPYIlMVPlnkKis7pZjfcsQa/LL1Dsn8nnXyVobM\nzPB7v2WUtV9mGH9lpRJGmZWwtmzxvth+kthpVt9bPGUMgQgiAHq8EThMdIywEyYo46eTPcfNZTto\nIFouhm07lq3RijFobFRBom1t7vbFxkbJ/fe/wMyZk3nvvffoGzuSBI/TRhmNVFNNI+W0qWjcnR6V\n4ONxuOceFYQwaJA6YTcjbSLRaSXPxmDm5DFjnXwq1ekhFIspQ+xf/6qCJtwugj2XhW5/Fi9WP5yT\nRb6qSg3zXjj5v/cWTxlDdOhKikJspbgCcMMr/bNXPn8vn3w3giZRq67uVIv722y3SThXHnfccfKB\nBx6QjffdJ5uq++tX0LK2WbP0Lqx1IXJRaWST4nn69HCNrLql2zJz9icSZlZvCARmBRANXgGKHR0w\nZozzZ14u2yef7J4jrKlJ5dNKJvVTU1irjO9/H+bO7epR6EZTk3tWgkzi8QpWrbqZ4cOHqDdmz4am\nzfoVtEClI7AnW/OLvD3vvNxmtUEje9vaVLh0mBGafv7GxxyjUjRY4daffw577QXDh5tZvSEyjAAI\ngD1mw0rYZyGliux3GqT9XLb93NN1U1NkqpJ0s3hWVoLs6ADH5LASkCQSklisD3+96kWGP7KiU4d1\n4IGd+i2nhsvKuiY/Ki+Hq66CW2/t2obb4JhMqsExF4IkWgLV1732cg8GyyYdgU56g3g8HL/3sDIb\nGno+ukuFQmzFogLKNNB++KGeE4mF3+pfN+e+l5opSD2BzK2ysl1Cu3P/aJcnHNcm62e9J1P99umu\nw3r2We+T27Sp02I9a5ayiGdb4T1bgurEvHJz5KKOyYfR1hiGez0YFVB4OBlonaJ5LZqblcZi3Dg1\n8ZISPv5YZdV0mulb7ekEqVqGZqcJXvAgWUlF2U4qZIrjWm7lRf6TBvp27x+NTBr2DybcfiZsc9Bh\nnX66+wqgqgqWLFGz2lRKFUnYtq17G6edFjwnRRCcwu0TCbWEk7L7/vYZuZuRNZtZdqbR1kr98NRT\nyuic60w97MyGhh6PEQAeeP2f3LCneZkyRen3vQb48nL9dMtWbv599+3uMfT97wdTc8dp5hftlzKR\nR+joU8bgjquc+0cbtZ/NyU6HlUp15qfw04P961/RDo5OHjNDhvgLHSeXq1zyh1vthZmD3KIY0kMY\nSgvdpUIhtkKrgKJMCukUpFpXpzJpeqWMcMsGmkh4uZKrQuiCNplkm6xhs1zOqC47qcLpm2WSbV33\nqzpGuQhlo8Oy66yCpC7IpxojSGSe9SO5RRHrqquiKnlo0kMYpFEBhUZUaeHLyuCb34Q//lE5xAC8\n+Sb85CdqombPP2b56Fu201tucW7TLdEcQD++4A5+wkYGd+bOzyiaMpqVbGI/FjCOdRzQuV9lFYz9\nhZqJu3mwuC1x7MZS3ax7+VZj6CYcs2bszc3u7lK6s+yoZuqFzi1vKD10JUUhtmJeAVRVqVl3tiuE\nWKxzUus1IbQyHKRS/hO8iy5qlYlEiywra5TQJmMus33fjmXOuv3SDtgTrbnN2HVTFxRjjhtdC7vu\nLDuqmbpJD2GQZgUQGl6ee5WVnfV9FyxQevwgVbLsaZN/8Qt/A2487j3BKytLUV9/Bc3N89ljj4v5\nyldO5+xBLVz0lwuIN27W61QyCRMnqgr2mVGlfhWm/CJSdfOeh1GgIWw3SF0Lu+4sO6qZeoFzyxtK\nEF1JUYit0CsAKfXU0UG9DDMntV4qdmsV4DcRhy3yvPMukUuWLJHt7e3Zdcwv/7VVMmzmzOwrTG3e\nrNo4/nj1uHVr189zXQFEYT/QTa6kO8uOeqYeZSUwQ9GDSQanCJJgTacdr/+T07iTTErZt69/nrPx\n4/1VSTU1Um7Y8JmcNu0RWVa2TaqUDG2yrKxRJhItcunSAPUgrY4FyX9tGT7j8ewH1Fylqd/gGJVx\n1c8boLIy+DUx/vqGiAgiAITaPzuEELcCpwGtwFrgQillt3p9Qoj1wA6gHWiTUo7SaX/UqFFy5cqV\nWfXNycsuzJrY0KlpWL1aZQ7o10+5udsLrq9Zo9Is/POfzkbaZBLuuksZgL0qHVaUNdIuL6Wj43eM\nHHk4I0fOYNCgYzj00L7uGo7MDtpTC0jZVWVzyilKn2VXm0gJ++wDOxySuvXtC59+6lyq0En9YsUB\nOJ1kTU1X4262P96DD8JllzkbaZNJmDOnq3FVV1XU3KzUYk59t5LanXNOcBWLdXyTyM0QIkKIV3XH\n2Jxm6MCJQHn6+c+An7nstx7oH7T9bFcAUU0E7VgTOKfcXdbkOvMzr/4sXy5lvLJDQofzSoE2efS3\nnpSrVq0K1sFcqtxMn+7d+bo6/WMGVe0EVWMsX+7unmkttXJxNTUzdkOJQCFUQMA4oN7ls7wKgPp6\nd5/4zLEmGzVRLmkXrK26OmP8WL5c3l9+saykybnfbJf1l/9N7wIEkYBe+5aVeZ/E+PH6xww7u2bQ\nH8T+w2c7QzC6dUMJEEQAOGX/ypZJwGK3hQbwrBDiVSHE5BCP6cjSpe6OJA0NSi0DStOw774qYnfm\nTPU4cKB634tsa5NbxGJw/vlK63HQQVu5/+672f6tbzGhbR5VOKfvLKeN2v5L9Q6g42eus297u97x\ndI5pZdd0IlcfdZ0fxB6T8Mgj/r78TlgxA9dfrx7t6ppUCubPV9lR58/XS61qMBQYXzdQIcSzwACH\nj66TUv4lvc91QBtQ79LMUVLKD4UQewPPCCFWSykdR7O0gJgMMGTIEI1T6EoqBfPmee9z883w4Yfw\n+993zeipG2+Ua4DYzp2SbdvWcN5503n88cepbWnhHCGI08JfOYkTeZo2yrsWWKkaR3zEFL0DBHGl\nzOVkxo7VP2bY2TXt+J2D5R5pVd9xsxNYfdVxNbUTRVoHgyEP+AoAKeV3vT4XQlwAnAqckF5+OLXx\nYfrxUyHEAuBwwFEASCl/A/wGlBHYr3+ZLFigomK9aG2F++93/9wvGDNoduFMpGygvn4W/fu/wJQp\nU/hpSwuJ3/wG8InI1R0kg/iZZ3sysZgyfOoec/jw6HzUvY5dWamMtKNHd0YZe83Og65GTAI2Qymj\nqyty2oCTgX8Ce3nsUw30tT1/GThZp/1sbAC6Lttem59KOhe/f5CyomKHfPTRJ2Vra6tq0M/NsE8f\nKf/937v7zGfTwUwdd7YnY6/oFeSYUejRw4gy1rEBOFGMkcuGXg15tAHcDfRFqXVeE0LcCyCE2FcI\nsSi9zz7A34QQq4DlwJNSyqdyPK4r1mQwF6TsTEbphKVRqKlRamE7iQQkkx0k2U4VDZAuqAKSBI3U\n8AUvPV9Bbe0YKioq1Jf8ZvYdHfDoo7DHHvDQQ8772HXQjz2mQpNrapT7oxDqsaam+2zbfjL2ffv1\nU66eTmRW9PJqJ/OYXnp0L7x07LrHDqIq0iWMyGWDoUDklApCSnmQy/ubgDHp5+uAQ3M5ThC80jeE\niT37wZo18OmnHezYsY7333+aA5Zfw2208SzjWMNwPqc/e/E5w3mH2uq/Ev/nTfD7Vco//+CD1WDo\nVhfSjpRw4YVwxhmdWeTA3W9+4ULYsMHfz9wtlcMbbwRT2URVpFxHx65zbF1VURBMAjZDCZNTIFjU\nZBsIZh8vstXTz56txmU/Xn/9debOnUt9fT0ff/wxNTU1PDRsGKetXIkIcm0rK/WTCV1+OfzqV+p5\nkACrbHALVspX2cEwz88roCvbaxVFmwZDDuQtECzqLZdUEJaq+fjjg6u3/VS3mzZtkj//+c/lV7/6\nVQnI8vJyecYZZ8hHH31UNjc3R1tIAKQ89tjO4IXLL8+/DjqfQVFh69ij6LsJEjMUEZhcQJ1kY+N0\nsgM2NjbK+vp6edJJJ8k+ffpIQB5++OHy7rvvlp999lnuBw2yxWKdg41foqGwi4DkI8zajp9Vf8qU\n7M4hbEO0CRIzFAlGAGRgn6CByt0Vj6uxw6lGuTVxa29vl88//7y84IILZDKZlIAcMmSIvO666+Tq\n1av1D2o1Xl4enVDI1wog314vfqupRMIMtgaDjSACoFfUA/CyD/7iF93ff//9t7n22nn87ne/Y8OG\nDfTt25ezzz6biRMncvTRR9PHL9DA7aAvvghpf39HYjG1NTYqm0Cu0aS5Blg5kW+vl9pauOQS98+F\nKL1at/mynxgMPvQKAQDulf+s9z/77DMefvhhjj56LitXrqSsrIwTTzyRW265hdNPP51EIpH7QceM\ngfAWm8QAAA+VSURBVN/+Vs1dMxEC1q+HJUs6i6JPnarSi+oSi6l6klEWAcmH10vmAHnuue6Re01N\npeVqaaKGDUVErxEATrOuZuCJJ55g7ty5LF68mLa2Nr72ta9x++23c+655zJggFMGjByoqYG6OuXK\naRcCQqj3Bw7sFBjz5wfLxVNdrRIMOVXzChMvP9swVhxOA2RHhxKmKYc8SaXkammihg1FRu8QALZB\nRTY20h6P03LhhdSWl7NnUxNHJZOcc9JJfPWGG/jKKD3vqaw5/3w47TSYPl0FEIwYATfe2NWvH4Ln\n6KmogNtui34AibLsoNcA6RYnEYWaKyqiKgZvMGRJzxcAGYOKAMpTKcqBp1pbaY/HKWtsRLz4Inzv\ne9Euxe2rkCOP9B6w/XL0VFaqpEaFqPkaVcCX1wAZjysh0KdP6da6NVHDhiKjRwuArVu38o9p0/jm\n9u04ZYcQQLllaI16Ke6k2rj8crj6avV5pjHQS9WSSMAdd8Ann3QOvlIqtVG+DItuRpVc8Bogm5vV\nqmnEiNKtoGWihg1FRo+LBG5ra2PRokXMnTuXxx9/nJ+2tvLfoJ/0qLoavvUtpXe20jQsWZLbwOoV\nzQpqZutU9tASGq2tXfNWV1WpFYC1b5ASisXsgTJ/virK4DRAOpV0LDVM1LAhDwSJBO5xAqC5uZkB\nAwYQi8WYMGEC/2evvRh6882IbHNCgLO65Stf0R9IvQa2TGpqlG/qhg2q3WOPhWHDugoA+77WbDjK\nWrv5ojcMkMX+GxhKnl4tAADeeOMNDj74YJVt02tQyZa+fZUuur1d7088e7YqOaZ7re0Cp6NDfc/J\nAyaZhIkTYe5c/1lz1DmDwqI3DJCmGLwhQoIIgB5pAxg5cmTni3gcrroKZswI7wA7dnR97Wc/CFp0\nxUoK57d/Y6PKKKpjWCwVD5SoDMzFRBT2E4MhC3qkACgYbgNpVDmqq6uVnWL5cn/DYil5oGQOkFYt\ngCjsFsVsEzEYIqbnCgD7H/vjj4OlW84Wt4E003c+F3uEnfJyFUMwf77755aPfKl6oEQZOWuicg29\nnB5pA3D8Yzc26uvgs8XPU8Wu+5USbr+9s49euX+sNBROPvC6XkClaGCN0m5RKjYRgyEgvdsG4BVN\n6kYiob4XizmvEjK9gNxm8X5RqZmqjWnTOgWCV+6fWEzts3ixs15cR28eZQRvVERptygVm4jBECE9\nTwB4/bGTSfWn3rJFvR47VpVXXLxYpWX4/HMlDF57TXn4HHJIZxyAfWBdsQJOPlkdJ5fka5kC4ctf\ndh+gd9/de0DSMSyWmoE1SrtFKdlEDIaIyEkACCFuAC4BPku/da2UcpHDficDvwTKgPuklDfnclxP\nvP7YDQ1q8J83r+ugN2yY0v1mDrw/+1nXBG2gBv/TT1fBW62tanbe0aGKsDvpjYMYGbMZoIMaMUvJ\nAyVKu0Wp2kQMhjDRLRzgtAE3ANN89ikD1gIHADFgFfAlnfazKgjjV0AkHu9a9SVIhaug1bCiLhXY\n00sRelVWy7X6WJRtGwwFhAAFYbQzJOTA4cB7Usp1UspW4GHgjMiOVlvrnjkSlDH0iy+UqqW5WU8X\nbLFggVL56Oxrt0U0NKihpaGh67FzIer2w8Zy5Zw9Wz3q9M+yW9TUKPWdEOqxpiZ3u0WUbRsMJUIY\nNoArhRATgZXA1VLKrRmf7wdssL3eCBwRwnGdkdJ9QLfT1qYSsa1e7W4kztQFL12qrzeO2shYSkbM\nXNwto7RblJpNxGAIGV8BIIR4FnCqjHIdcA8wG5Dpx9uASbl0SAgxGZgMMGTIkOANPPKIc9qETBoa\n4L77lB7fjUSiUxecSinbgc6+EL2RsVSMmGEUQYnSblFKNhGDIWR8BYCU8rs6DQkhfgs84fDRh8Bg\n2+tB6ffcjvcb4Deg4gB0jt2FJ5/U39dr8AeVgM0SQgsWKD98N6Ts6gIatZGxVIyYpbRSMRh6GTnZ\nAIQQA20vxwFvOuy2AhgmhBgqhIgB5wALczlu3pBSVe9qbvav0HXeeV1nsrW1ypPIiTCqWGXbvpcu\nPhs9vR+lslIxGHohudoAbhFCfA2lAloPTAEQQuyLcvccI6VsE0JcATyN8gh6QEr5Vo7HdWfsWPjT\nn8Jrz5qles24k0k45piu70UdeJVN+166eIgmLUKprFQMhl5Iz0sFsXUr7LFHeJ0QAmbNUlG72aRS\niDr1r277fqkPpHSOQs41LUIppqAwGEqY3p0KYvFiZdjTMQTrYM1Ss53RZ2NkDBLcpdu+ly6+udk9\nT1KuevpSTEFhMPQSep4AWLvWX3edWdAlkVAGX6dB0K5Pz4fbYNDawbr41dt1Iww9vXG3NBiKkp4n\nAPyKryQS8NxznSUdrQFpyBBl8PWbpUbpNujlMjljRmft4Gx0817XJR5Xws8pEV5YenrjbmkwFB09\nzwbgpXNOJODDD5XuOZNUSsUQWG6kY8fCOeeogTFfBUOC1g4Ooj/308VHZQMwGAx5pXfbAPx0zk6D\nv5Pa5dln1Yx76tT8FQzxczW1E1Q373ddwOjpDYZeRs8TABBM5+yldrnwwq52gaARrEEJUjs4G928\n33UxenqDoVfRMwUAhOMdE5VnDDh7+gSpHZytbt7ruhg9vcHQq+i5AkCXIGoXi1w9Y7wCsnRrBweN\nJjbFzw0GQwZGAARRu1jk4hmjkxzNrorJrB2cjW7eFD83GAwO9DwvoKB4eccI4awGysUzxsvTx62o\nfC7RxKb4ucHQqwjiBZSPgjDFjVdhkLq68AuGZJMczdLNX3+9egxy7CAFbwwGQ6/CqIDA2zvm7LPD\n9YzJd3I0k43TYDC4YASAhZsHTNieMV6ePmGkic7EZOM0GAwuGBVQvsl3Ldqo6xIYDIaSpXevAArl\nGpnP5GgmG6fBYHCh93oBOblGWoNiT3SNjLougcFgKAp6dy4gHcIoVF5qmChfg8GQQe+0ARjXSIPB\nYOilAsC4RhoMBkNuKiAhxCPAiPTLGuALKeXXHPZbD+wA2oE2Xf1UZBjXSIPBYMhNAEgpv289F0Lc\nBjhUFNnFcVLKz3M5Xmjk2xffYDAYipBQjMBCCAGcDRwfRnuRkw/XSJN902AwFDmhuIEKIY4BbndT\n7Qgh3ketDtqBOVLK33i0NRmYDDBkyJBvfPDBBzn3z5WoXCN7m4upwWAoGoK4gfoKACHEs8AAh4+u\nk1L+Jb3PPcB7UsrbXNrYT0r5oRBib+AZ4Eop5VK/zuUlG2jYmOybBoOhgIQaByCl/K7PwcqBWuAb\nHm18mH78VAixADgc8BUAJYmOi6nxxzcYDEVAGG6g3wVWSyk3On0ohKgWQvS1ngMnAm+GcNzixLiY\nGgyGEiEMAXAO8Hv7G0KIfYUQi9Iv9wH+JoRYBSwHnpRSPhXCcYsTy8XUCeNiajAYiojemwsoKrwq\njBkbgMFgiBhTEayQ5Dvds8FgMGRJ70wGFzX5TPdsMBgMWWIEQFSY7JsGg6HIMSogg8Fg6KUYAWAw\nGAy9FCMADAaDoZdiBIDBYDD0Uoo6DkAI8RmQbTa4/kBxpJ/uiulXMEy/gmH6FYye2K9/k1LupbNj\nUQuAXBBCrCx44RkHTL+CYfoVDNOvYPT2fhkVkMFgMPRSjAAwGAyGXkpPFgCuRWcKjOlXMEy/gmH6\nFYxe3a8eawMwGAwGgzc9eQVgMBgMBg9KWgAIIc4SQrwlhOgQQrhazIUQJwsh1ggh3hNCXGN7fw8h\nxDNCiHfTj7uH1C/fdoUQI4QQr9m27UKIqenPbhBCfGj7bEy++pXeb70Q4o30sVcG/X4U/RJCDBZC\nLBFC/DP9m//Y9llo18vtXrF9LoQQd6Y/f10IcZjud3NBo18/SPfnDSHEy0KIQ22fOf6eeezbsUKI\nbbbfZ4budyPu13/Y+vSmEKJdCLFH+rNIrpkQ4gEhxKdCCMeiWHm/v6SUJbsBhwAjgBeAUS77lAFr\ngQOAGLAK+FL6s1uAa9LPrwF+FlK/ArWb7uPHKP9dgBuAaRFcL61+AeuB/rmeV5j9AgYCh6Wf9wXe\nsf2OoVwvr3vFts8YYDEggG8Cf9f9bsT9+jawe/r5KVa/vH7PPPbtWOCJbL4bZb8y9j8NeD7qawYc\nAxwGvOnyeV7vr5JeAUgp35ZSrvHZ7XBUwfp1UspW4GHgjPRnZwAPpZ8/BJwZUteCtnsCsFZKmW3Q\nmy65nm/BrpeU8iMp5T/Sz3cAbwP7hXR8C697xd7XuVLxv0CNEGKg5ncj65eU8mUp5db0y/8FBoV0\n7Jz7FtF3w277XDIqG0aBlHIpsMVjl7zeXyUtADTZD9hge72RzoFjHynlR+nnH6PKV4ZB0Ha7ldUE\nrkwvAR8IS9USoF8SeFYI8aoQYnIW34+qXwAIIfYHvg783fZ2GNfL617x20fnu9kStO2LULNIC7ff\nM599+3b691kshPhywO9G2S+EEAngZOBR29tRXjMv8np/FX09ACHEs8AAh4+uk1L+JazjSCmlEELb\nJcqrX0HaFULEgNOB/7K9fQ8wG3UTzgZuAyblsV9HSSk/FELsDTwjhFidnrnofj+qfiGESKL+qFOl\nlNvTb2d9vXoaQojjUALgKNvbvr9nxPwDGCKlbEjbZ/4MDMvj8f04DXhJSmmfmRf6muWFohcAUsrv\n5tjEh8Bg2+tB6fcAPhFCDJRSfpReZn0aRr+EEEHaPQX4h5TyE1vbu54LIX4LPJHPfkkpP0w/fiqE\nWIBafi6lwNdLCFGBGvzrpZSP2drO+npl4HWv+O1TofHdbNHpF0KIrwL3AadIKTdb73v8nnnpm01Q\nI6VcJIT4tRCiv853o+yXjW4r8IivmRd5vb96gwpoBTBMCDE0Pds+B1iY/mwhcH76+flAWCuKIO12\n0z2mB0GLcYCjx0AU/RJCVAsh+lrPgRNtxy/Y9RJCCOB+4G0p5e0Zn4V1vbzuFXtfJ6a9Nb4JbEur\nr3S+my2+bQshhgCPAedJKd+xve/1e+arbwPSvx9CiMNR485mne9G2a90f/oB38F2z+XhmnmR3/sr\nbCt3PjfUn30j0AJ8Ajydfn9fYJFtvzEor5G1KNWR9f6ewHPAu8CzwB4h9cuxXYd+VaP+CP0yvj8P\neAN4Pf0jD8xXv1BeBqvS21vFcr1QKg2ZviavpbcxYV8vp3sFuBS4NP1cAL9Kf/4GNu8zt/sspGvk\n16/7gK22a7PS7/fMY9+uSB97FcpA/e1iuGbp1xcAD2d8L7JrhprsfQTsRI1dFxXy/jKRwAaDwdBL\n6Q0qIIPBYDA4YASAwWAw9FKMADAYDIZeihEABoPB0EsxAsBgMBh6KUYAGAwGQy/FCACDwWDopRgB\nYDAYDL2U/w9rzmhkh+BocQAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68af50c890>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "# convert training data to 2d space\n",
-    "label = np.asarray([5 * a + 1 > b for (a, b) in zip(x, y)])\n",
-    "data = np.array([[a,b] for (a, b) in zip(x, y)], dtype=np.float32)\n",
-    "\n",
-    "plt.plot(bd_x, bd_y, 'k', label = 'boundary')\n",
-    "plt.plot(x[label], y[label], 'ro', ms=7)\n",
-    "plt.plot(x[~label], y[~label], 'bo', ms=7)\n",
-    "plt.legend(loc='best')\n",
-    "plt.show()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Create the MLP model\n",
-    "\n",
-    "1. We will create a MLP by with one dense layer (i.e. fully connected layer).\n",
-    "2. We use the Softmax function to get compute the probability of each category for every data point.\n",
-    "3. We use the cross-entropy as the loss function.\n",
-    "4. We initialize the weight matrix following guassian distribution (mean=0, std=0.1), and set the bias to 0.\n",
-    "5. We creat a SGD updater to update the model parameters.\n",
-    "\n",
-    "2 and 3 are combined by the SoftmaxCrossEntropy."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(2, 2) (2,)\n"
-     ]
-    }
-   ],
-   "source": [
-    "# create layers\n",
-    "layer.engine = 'singacpp'\n",
-    "dense = layer.Dense('dense', 2, input_sample_shape=(2,))\n",
-    "p = dense.param_values()\n",
-    "print p[0].shape, p[1].shape\n",
-    "\n",
-    "# init parameters\n",
-    "p[0].gaussian(0, 0.1) # weight matrix\n",
-    "p[1].set_value(0) # bias\n",
-    "\n",
-    "# setup optimizer and loss func\n",
-    "opt = optimizer.SGD(lr=0.05)\n",
-    "lossfunc = loss.SoftmaxCrossEntropy()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "* Each layer is created with a layer name and other meta data, e.g., the dimension size for the dense layer. The last argument is the shape of a single input sample of this layer.\n",
-    "* **param_values()** returns a list of tensors as the parameter objects of this layer\n",
-    "* SGD optimzier is typically created with the weight decay, and momentum specified. The learning rate could be specified at creation or passed in when the optimizer is applied."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Train the model\n",
-    "\n",
-    "We run 1000 iterations to train the MLP model. \n",
-    "1. For each iteration, we compute the gradient of the models parameters and use them to update the model parameters.\n",
-    "2. Periodically, we plot the prediction from the model."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "training loss =  0.705488\n",
-      "training loss =  0.479273\n",
-      "training loss =  0.414510\n",
-      "training loss =  0.370120\n",
-      "training loss =  0.337781\n",
-      "training loss =  0.313082\n",
-      "training loss =  0.293514\n",
-      "training loss =  0.277555\n",
-      "training loss =  0.264237\n",
-      "training loss =  0.252912\n"
-     ]
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYFdWZuN/T6+3u23YnkhgQiIKKUWIwgD+jBlAjimvo\nYSaik+jECIqauI9GUdNMzAIuiYkZGCNmwdGMkXGJUTSOgBkTQMc1ggbRAC6End637/fHuUVX367l\n1L11l4bzPk89d6ntVN265zvnW5WIYLFYLBaLm5JCN8BisVgsxYcVDhaLxWLphxUOFovFYumHFQ4W\ni8Vi6YcVDhaLxWLphxUOFovFYumHFQ4WS0wopQ5QSolSqqzQbbFYssUKB4ulQCjND5RSW1LLD5RS\nqtDtslgA7AjHYikcM4AvA58DBHgaWAf8eyEbZbGAnTlY9mCUUkOUUr9VSv1dKbVOKfVN17pblFIP\nKaUeVErtUkq9pJT6nGv9Z5RSzymltiul3lBKnelaV6WUuk0p9Z5SaodS6nmlVJXr1Ocqpf6mlNqs\nlLohoInnAbeJyAYR2QjMA86P7w5YLJljhYNlj0QpVQI8BrwC7A+cCFyulDrZtdlZwH8BHwfuB/5b\nKVWulCpP7bsE+CRwGbBIKTUqtd88YCxwTGrfa4Ee13GPA0alznmTUuozPs08PNU+h1dS31ksBccK\nB8ueynjgEyLSKCIdIvIO8B/A2a5tXhSRh0SkE7gdSABHp5Yk8P3Uvs8CjwPTU0Ln68C3RGSjiHSL\nyP+KSLvruN8RkVYReQXd4X8Ob5LADtfnnUDS2h0sxYC1OVj2VD4NDFFKbXd9Vwosd31e77wRkR6l\n1AZgiLNORNyzgffQM5BBaCGyNuDcH7ret6CFgBdNwD6uz3VAk9hsmJYiwAoHy57KemCdiBwcsM0w\n501qRjAUeN9Zp5QqcQmI4cBbwGagDRhJX5VQJryBnlWsSH3+XOo7i6XgWLWSZU9lBbBLKfWvKQNy\nqVJqtFJqvGubsUqphlRcwuVAO/An4M/oEf+1KRvEJOAM4IGUsLgXuD1l8C5VSn1BKVWZQRt/CVyp\nlNpfKbU/cBVwX6YXbLHEiRUOlj0SEekGTgfGoN1DNwP3oFU3Do8AXwG2AV8FGkSkU0Q60MJgSmq/\nu4Gvicjq1H5XA68BK4GtwA/I7L80H234fi21PJ76zmIpOMqqNy17I0qpW4CDROSfC90Wi6UYsTMH\ni8VisfTDCgeLxWKx9MOqlSwWi8XSDztzsFgsFks/rHCwWCwWSz+KOghu0KBBcsABBxS6GRaLxTJg\nePHFFzeLyCeyPU5RC4cDDjiAVatWFboZFovFMmBQSr0Xx3GsWslisVgs/bDCwWKxWCz9KGq1ksVi\nsezNtLbC4sWwdi2MHAkNDZBI5OfcxsJBKXUvOlfNJhEZnfru48CDwAHAu8A/icg2j31PAX6ETpl8\nj4h8P9MGd3Z2smHDBtra2jI9xF5PIpFg6NChlJeXF7opFovFh5UrYfJk6OqC5maoqYFLLoElS2D8\n+PD9s8U4CE4pNQGdf/6XLuHwQ2CriHxfKXUd8DER+de0/UrRqY5PAjagk5VNF5G/hJ1z3Lhxkm6Q\nXrduHbW1tey7777YmijRERG2bNnCrl27OPDAAwvdHItlr8drdiACQ4bA9u39t6+vhw8+8J9BKKVe\nFJFx2bbL2OYgIsvQGSjdnAX8IvX+F+hi6ekcBfxVRN5JZbt8ILVfRrS1tVnBkAVKKfbdd18787JY\nioCVK7UQmDkTbr5Zvw4eDPPm6RmDF11d8PDDuW9btjaH/UTkg9T7D4H9PLbZH1fFLfTs4f/5HVAp\nNQOYATB8+HC/bTJpqyWFvX8WS+FpbdVqI/fsoKlJv956K7S3e+/X3AzvvJP79sXmrZQqbZh1oiYR\nWSAi40Rk3Cc+kXUcR+xs376du+++O6N977vvPt5///3dnw844AA2b94cuM9zzz3H6aefDsCjjz7K\n97+fsbnGYrEUEYsX+88ORKDSp3xUTQ2MGJG7djlkKxw+UkoNBki9bvLYZiOucozoUowbszxvwQgS\nDl1+v3SKdOEQlTPPPJPrrrsu4/0tFkvxsHatngV40d4O3d3e68rKtF0i12QrHB4Fzku9Pw9dWSud\nlcDBSqkDlVIVwNmp/QYk1113HWvXrmXMmDFcc801PPfcc3zxi1/kzDPP5LDDDuPdd99l9OjRu7ef\nN28et9xyCw899BCrVq3i3HPPZcyYMbS2tgJw11138fnPf57PfvazrF692u+0gBYul156KQDnn38+\n3/zmNznmmGMYMWIEDz300O7t5s6dy/jx4zniiCO4+eabc3AXLBZLtowcqWcBfqRrfysqtDF6yZL8\nuLNGcWX9T2ASMEgptQG4Gfg+8Bul1AXAe8A/pbYdgnZZPVVEupRSlwJPoV1Z7xWRWIqoX3755bz8\n8stxHGo3Y8aM4c477/Rd//3vf5/XX39993mfe+45XnrpJV5//XUOPPBA3n33Xc/9pk2bxk9+8hPm\nzZvHuHG9jgSDBg3ipZde4u6772bevHncc889xm394IMPeP7551m9ejVnnnkm06ZNY8mSJbz99tus\nWLECEeHMM89k2bJlTJgwwfi4Fosl9zQ0aNdUPzo6+n+uqsptm9wYCwcRme6z6kSPbd8HTnV9fgJ4\nInLrBghHHXVUxm6hDan54dixY3k4ogvCl7/8ZUpKSjjssMP46KOPAFiyZAlLlizhyCOPBKCpqYm3\n337bCgeLpchIJPQswB3LUFHhb4gG2LFDbx/kyhoXAzpCOmiEn09qXHPDsrIyenp6dn8OcxmtTFmd\nSktLQ20WfvuCjl9wXq+//npmzpwZ6ViWgU8ho2ktmTF+PLz/vv7d3nkHXnkFfvtbbZD2w3FlPeec\n3LbN5laKSG1tLbt27fJdv99++7Fp0ya2bNlCe3s7jz/+uPG+cXDyySdz77330pTyidu4cSObNnn5\nCVj2JPz85VeuLHTLLGFUVemO/sYbYerUYDsE5M+VdUDPHArBvvvuy7HHHsvo0aOZMmUKp512Wp/1\n5eXl3HTTTRx11FHsv//+HHroobvXnX/++Vx00UVUVVXxwgsv5KR9kydP5s033+QLX/gCAMlkkl//\n+td88pOfzMn5LIUnyF8+XyoISzyE2SEgf66sRV1D2it9xptvvslnPvOZArVoz8Hexz2H++/XMwVH\nILhJJmH+/NyrICzxsXIlnHSSti94UXTpMywWS3ES5C+fLxXEnk5rqxbCc+bo11xmnxk/Xnf+jY1a\nACQS2q01mSxSV1aLxVKcOP7yXjOHfKkg9mQKkR21qgpmz4arr+41Vo8YUaQpuy0WS3ESpKfOVzTt\nnkqh7TmOsboQDEi1UjHbSQYC9v7tWTj+8vX1WvVQCBXEnkpQ/qN8ZUctFANu5pBIJNiyZYtN250h\nTj2HhO0x9ijS/eXzrYIYCPjFgYR976Wug+j2nIEWhzLgvJVsJbjssZXgLHsaYR2vl92grAzuvBMu\nv9z/+7Y2f+Oz4wk2dWp4p++c3zmeY2jOhd0iLm+lASccLBaLxY1fx+90vK2t/lXVlPKORvb73k19\nPTz2GJxxhv+5QZ9/v/3AK/61thY2bYp3BlE0rqxKqVFKqZddy06l1OVp20xSSu1wbXNTtue1WCwW\nt8G4qUl36E1N+rMzUg+rmxDle9B1Furr4dFHtWAIOjfAgw96CwbQ3z/wgPn15pOshYOIrBGRMSIy\nBhgLtACLPTZd7mwnIo3ZntdisVhMDMZBcSCZcMYZ2ktp/XozY/Xvfhd8vLD1hSJug/SJwFoReS/m\n41osFks/28Lq1eEBgEFxIFFJJrWNIZEwDz6MmE+zaIhbOJwN/KfPumOUUq+iq8BdHVdNB4vFklvy\n6WUTdC4v20JPT69nUTpOAGBQHEhUm4M7bsQk+HDlSnjqqeBrTkvPVjyISCwLUAFsBvbzWLcPkEy9\nPxV4O+A4M4BVwKrhw4eLxWIpHCtWiNTXiySTIkrp1/p6/b2blhaRRYtEGhv1a2trfOdatkxk4UKR\nREJEd9lmS21tbzv8jn3ffdG+d193a6v+zuvc9fUiW7f6r3eWZDKzexUEsEpi6NNj81ZSSp0FXCIi\nkw22fRcYJyKbg7az3koWS+EI8vJxJ38L8xbK9lxKQXl5/8poYSST8Pe/9848nFlJehxI1O/dBF37\n22/7J0QEHf28dGnxurLGqVaajo9KSSn1KeAjERGl1FFoQ/iWGM9tsVhixsTYO3VqPOklwjyKogoG\nB6coTpC6yi9FhUnqivHj9TFnz9b2j0MPhe9+VwvPJ58MNoRfc423YCiWYLlYhINSqgY4CZjp+u4i\nABH5d2AacLFSqgtoBc6WuKYsFoslJ5gYXE0EiEluoLg9itxtzGXiPPexm5rgj3+Ee++Fb38bhg3z\nt0kkkzBqVPDxvNqaT8ERi3AQkWZg37Tv/t31/ifAT+I4l8ViyQ8mBte40oXH6VHkbuPQoblLnOeV\nlM+p/3zTTVBX57+vY9h2d/bDhumobHcdB3dbnYA7d5R1LrPDDsjEexaLJfc0NOhOzAunc3M6dS+i\npAsPOpcfiURvskG/NkLuEucFzZpAd/JKaSHhlRDxtdf6lna9+GL/Aj+dnboA0PbtvcF1bW3684kn\n5qa+hBUOFksOyWeRmLgxyfZqIkCyOZdfbs1EAn72Mz3yf/ZZ/zauX5+7QkgmqjARnadp/nxdvGf+\nfN3m0aP7R3YHPRvNzf7rcxVlPeCyslosA4VCFImJk9ZW7XFz2WWwZQsMGqT15G49dyKh00iccoq+\nzo4OfZ3l5dHThXsZdxsaYNq0YE+ooIy0b7+du0JIJqqw5mbYsAFuvLFXhTR3Lnz4oZ4NmFJWFjxL\n+dWv4PzzzY9ngk28Z7HkAFM30GLF1D013SBbUaG3e+opOO64eM756KN6BpBJKvK2Nhg8ON7fwenk\n16yBefOgpcV/Wydz68EH9722qK655eXBwqS8HHbudEqKFp8rq8ViSRGXF08hMK1+5rVdR4denPxD\nfh1vutfNlCn+5zzzzL7ndPIlmXjrOOoqP0EXVTCkC7CqquDty8pg0iQtHNxCJEwwVFb2zsLKyvTs\nbc4c/+1LSuJ/pqxwsFhyQFxePIXAVLBlKgD90mD44RwrffRtqqaLqxCSlzB0OvxEQtsNlNIeS+66\nEOmCIYz6erjjDq2OctoK8L3v+d/vjo74nykrHCyWHGDiBlqsmAq2TARg0KzEj+ZmrcK55JLMXVLj\nqMUcJAzLyuCuu7RazRFAU6bo1zDB4ORxcttqRo/unVk9/LC+B0E2h5oa+NjHtvHAAyGJnCJghYPF\nkgOCkr1F8eIpBKaCLWy7oUO1h5ZbBRTm/ulFTQ1s3uy/X1sbfPWrOlo7l0FhYcLQMTw73H+/2bU6\nZt/mZliwQL8fMqR3hlRdHe4V1dKyg0svHQy0h5/QECscLJYs8ItYjVvXnU9MBVtYttPLL4fu7r4q\noK98JXokdFmZ9pTy26+tDR56SKerMFEzef1mIuGRx1Fng5lEfc+cqavD7dzZ+13wMQRo59hjv8NZ\nZ32XiRMnMj4uV7g4svflahk7dmw2yQktlpxikrHUyVY6Z07m2UoLgWk2Vr/tkknvLKTV1f7rnKWq\nqv85Fy0K38+dEdXvPnu1t7ZWv4Zd69atuv0m52xpEZk1S6SiIlomWRApK4uyfY9MmtTdp53ElJXV\nzhwslgww9eiJQ9ddCEyNuF7bdXRo7xovurvDazMrpWMd3DEVBx0UbLR2k24Md2YKq1fDbbf1tQF4\nzQK8fkfHiJ7e9upqbWdwZoOtrdq99dZbdXujJwwUuroE8/hkxWGH+UQKZokVDhZLBgxkV1VTTAVb\n+nZz5virQtrbdWdaWdmbhyidkhItGJxj+nXMfriN4elxGFEIyjzrZt067WHknMtvO9BxD9rdVwC/\nTr0LXR7HjNmzjTeNRCzpM5RS7yqlXlNKvayU6he1pjQ/Vkr9VSn1qlLq83Gc12LJFWFpLwayq2qu\nCcq3BLpz9EuLAX3vn3uG5lXtzQtH/+/eN5OEfiaZZ0tK4IknvGeS6ZSW9jBmzCuceOLX0bYCbxKJ\n8kjtvPvuSJsbE2dupeNFZIx4R+ZNAQ5OLTOAn8V4XoslVlau7JsQbeZMHWW7cmXvNnElnIubMKGW\nj1xPpkn0Kiu9v3ffv0y8mxyjeSb7erUj29TlDt3diueff4w33/wfjj3252gB4SxaYN53n+K886Kp\niebMgeefj7SLGXEYLoB3gUEB6+cD012f1wCDw45rDdKWfNPSElz60TE6hpWILIThOcyIbGpkjqst\nQWU9lfJfr5TI8uX6OI2N+rOpgbauTpcVXbRI5IQTohuDvX7HIGN4MqnX33JLlyjVE2o8Pu64lt33\naMsWbbQ+8UT9um2b/n7RIpHS0mhtrarSpVQbG0Vg0DsSR78ey0FgHfAy8CIww2P948Bxrs9/QJcJ\n9TqWrSFtKRgmHYFDnJ1ttjWYw4RaUD3jTISZSXuD6j4nkyKzZ/t3/CYdc/qSSIhcf732KMrESyh9\nqanR1zd7tr+XUiLRIl/60ulSWTlDoCX0mJMmhd/b1lb/84Vdv76fR3ZLEQmH/VOvnwReASakrTcW\nDu7Fzhws+SZopKqUdkl1E4erahxCJkyozZplLvS8cAuDxsb+7a2uFpk5s+89CJtdLVyoO+CgNgUd\nw+v3Me1Iy8vNt1VKpKqqR6BHysvbBboFdgpsERgnI0Z8RcrLmwXCZg4i06aZ/Z7Ll0cXDr3LWJEY\n+vW4KsFtTL1uUkotBo4Clrk22QgMc30emvrOYikqogY6ZeuqauoSG0aYXnz16swN6GEeP8538+fD\nr3/dNxAtKBDwySf9U0s0NemUGenBhEGGZT3uDCaZ1Oe/8kr44Q/NDNUi0Nqq7QCdnR188pM/Z8yY\nas4/fx+OPfYJPve5Txin33766d4qbkEcdxwsWwYnn6wN+N3dZsePlWylC1AD1Lre/y9wSto2pwG/\nR/tuHQ2sMDm2nTlY8k2+bQkLF4pUVnqfr7JSrzchVzOHIHVV0OK+V36zqzCVUUVF72xk61b9etNN\nmalcQOTww/XIfeFCrd/P5LqSye4+9yro94t6r/3uv3PvZs82PU88M4c4hMMItCrpFeAN4IbU9xcB\nF6XeK+CnwFrgNQxUSmKFg6VAZKrmiWo3CDPagl5vol4KE2pBnWGQ0Iui83cvNTXhnaCpyqimJtyw\nbqImqqzs3b6urkeuvPIVqaxsFqXaxEQlBH1Viya/X9D+mbBsmYlNpUiEQy4XKxwsuSSoM49qS4gq\nUKKMyE1nLLnwVorqLeReZs40a7PpTKCurv9sZOZMvX+U0XvvskVKSvaVkSMbpbS03WgfZ+Sf6Ywq\n6szBC0dA+Hs0WeFgsWRM3J5GUUflUUbkfh1KS4tWa0yb1qsucdQvfkItqtBbtMjfaBy2VFebCTVz\ndYkWVib3ve/iPSuoquqUe+/VDXQ/DyaCOtMZVbaqSbNrLiKDtMUykIjLCOxgmkrDnQ301VfNM3Z6\nGYxXroQTT9TF5R0eekgbXJ991t9IHtWA3tAAF15ovr0b0+pkhx6q221iHL71VrjmGv37ZBvk1tpa\nxsaNugtMzxElArff7p9RN5OMq/X12Wfkzfaao2CFg2WvI+68SCYRtOnVzyoqdAdkQrqXlCPc3ILB\noakJTjpJF7CPIy14IqFrJcyfH31f0zQiQam/vXj4YTjxxI945JG/09R0GOGJHvwjjt2/QbrgvPpq\n/8SDI0ea1VlwqKzU1d2yzaYdJpRKS6G72zRFYTBWOFj2OuLOi2RS9CZ9puKXdM6L9OJAixcHp71o\na4s38d/++2e2X3m5noWFuW5GcVdta+vhW9/6MZs3XwFMRydfqM2sgegsrcOGwfr1/es4uIVFeu3q\n/faLVvqzo0MXA8qWoGcN4nV5tcLBstcRdwnPsOI4EKwKSCT8O/tksr8qYu3aYOHQ3h5f4r/WVt2B\nZkJHB/ziFzqHU5Q6zz//OTz7rOA94m9m6NBOrr32hxx99CTOPDMZmOwujB074KKLdFv9alJ71bxu\nbjaf+UHf58qvQFTYOjCdZZXEkzMvDsNFrhZrkLbkgiAXyurq3hw3UQgycId5/Eyd6u8S6WXAXLQo\n2IWysrK/ATvT9BxRDK9B7pV+htiWFpFf/7pHrrhiq8yY8Zz88z9/Q4YPP0Rgq89xevocx7nvcaTL\n8Gprpl5JfscMek4yKbCUS4N0QTv/sMUKh8KRba6fYsf5g1VV9f1jVVdnnx8pSrCXE6Dm585ZXd2/\now+LD3C7fLqvNRPPLFNX1ooKkS99yb+Tdntc9fT0yFtvvSXf/vZiqahoEqV2iZOSQqltMmnSNXLl\nlf8ptbWdkkz2hLbZ8dqKS0C42xrVK8lJKOh1r8O82urq/Nf5eZ15Jxe0wsGSI/KZvTNXmAi3LVvM\nyz5mQ1iA2vXXB3c4N9/c/5grVujyll4dW3qZUr9zJxK6Uw26TtPOsa4uOJGeUj1yxhl/lrPPPlsG\nDx4skBCdm8j/3vsJW7/fdsUK3Q4n5qGyUgv/bALVosZ5JJP6nvoNEvyet4oK/1iNoNgI79/HCgdL\nDsjEZ79QBHUSJsItSgbWbPFrU2OjyNixwR3OjBn+158e5+Clggrq3MvKdIflpMhOxySK2RFIwefa\nITBdhgwZItOnT5evf/0Zqa7uinzvw37bdIGSSZoM9/l1CmzzxcmA6/VcRonn8BNWZr+PFQ6WHJDP\nDjMb/DqJZcvMhVvUDKzZ4nTmU6eKHHaY7phNInunTs38nKYjX3cNhXTS73UiodNVnHCCvp7m5m75\nv//7P5k79y4pL9/lefzq6jZ5/fW/Sk9PT2i7/O59pgMXp/2mMwj3zMVP1eO0s6am7/N3333+wmvW\nrODzlpVl9r9L/33iStltvZUsfRgI5S+DgthOOcXfVyM9hmHYMP3380JEu6DGyeuvw2WX9fWSMglo\nKi3N/Jxhro8OIjoD6JYt/d1O0wPEDjighxEjXuZPf3qOxYuXcuWVy9m2bRsAQ4Y8w+bNiygpKae9\nvTwVPKZYsqSSww8fadQuP4+xxYvxzX4aFJ8yfrx+rocN67/OjZOx1fEOu//+YNfQG2/UAXxOHMSU\nKfrVL7hy1qzg8/s9t+muzOmk/z6zZ69/L/hMZljhYOlD3G6euSAsiK2jw3tdIYVba6sOTsuklvFp\np2V+3oYG7appQmendwfb2dnJq6++xIYNS3nhhaXMnfs8O3fuBOCggw6ioaGBiRMnMnHiRIYPH77b\nHdMreMzdriD3X6/OcNmyzAcuTz7p3/kmEnD66TB1at+2hgWcVVT0vVf33x/8XG7frmMn/Gphl5Ro\nAeVcT3pUdhDumIzZszdvDd7ajKyFg1JqGPBLYD9AgAUi8qO0bSYBj6ArxgE8LCKN2Z7bEj+Z/Gnz\nzdq1/p1sR4f+03oJiHThtn598HmiBi0F+ajPm6d96qOSTMLZZ0ffz8FRTpjQ2ak72I6ODlatWsXS\npUt57rnn+OMf/0hzqpc89NBDmT59+m5hMGTIkH7HMUnRkR74FtYZtrbqmAk/qquDBy5BHX17O3zu\nc/3bHBQFnUz2P1/YrHvQIB0p7Scc2tp0io077tDPnp9gzRdxzBy6gKtE5CWlVC3wolLqaRH5S9p2\ny0Xk9BjOZ3ERFjQTlaA/7aOP9o0SLdSDG6YeKCvzFg49PbqAzP3367aPHOmf08frz+9Ha6vu/G+9\nVX9ub+8bUDV6dO+6KFRV6TxJ2ebiMUWpLh544Ifceuu/0ZrqwQ4//HDOO+88Jk2axIQJE9hvv/12\nb9/aqu9lps9DujokqDN88MHgwD+R4IFLJjPi4cP9o6C9BkojR+q2e3X+iQSMGqWfh0mT/I/b1aUH\nNzfe6H8t+SJr4SAiHwAfpN7vUkq9CewPpAsHS8ykV+iqrIQLLoBvf7s3OZmbMEHiXn/HHfq7detg\n82bd4Z1yCiilH2y/aNJi4Kqr4K67eoWbOwJ5zpzetj/2WG8EczomsyRHKHz3u/3TYTid0IQJ8LWv\nmY/eHZSCa6/NfS4eNyKlwBNceOGFTJw4kQkTJjBo0CDPbb2ihi+5RA8g1q83Fxjpsww/gfO73wW3\n/eijg88zfLj/ffD6rVtb4Ywz/H+3yy6DuXP7tnHKFH8B1tYGp56qZwZXXaWfQy+Kxa4HxOutBBwA\n/A3YJ+37ScBW4FV0RbjDA44xA1gFrBo+fLi/iX4vJyxyM911M5Nc/7W1+jXIFbIQ7q0mni6OS2NQ\n5bD6eu2hk0lMh+NTb+L94p93338JStNtGpzY0qI9ZCoqzArZlJX1yKxZ4b9n2LPneGBFjY8Jekan\nTQtue1Bt5qB4Fj8vrTAXYHfhIKeNQXEM7mDGXHsEAqskjv48joPo9pAEXgQaPNbtAyRT708F3jY5\npnVl9cckOMntkhfkArh1a+YpAqI8zHFFXUf5c5lsG7XGQVwpFUx+Ozem8Rs7d+6UO+/8o1RWNktJ\niVnhe2epqAjv0KNGDZsMIMKe0QULgs/hV041rJiQX8W6qMFv9fV6IGLinht3Kdr0/xWUvCgG/WvY\nEou3klKqHPgtsEhEHvaYnex0vX9CKXW3UmqQiGyO4/x7IybqAse9z3nvt82NN2aeI950GuynhshE\nLRXFaB5mJHzrreg1DuLMqV9ZqVUS3d3BRtkg992TTurh3nuf5IUX/oelS5fy4ot/oafnb0B15PZ0\ndOglqK5F1FoGJmnQwzzQysuD7UNnn91fbTplir6OoOypLS3ez6+pC7C7jZs3m9k1ohrjg/D6X8ER\nnzM/gj9xeCsp4OfAmyJyu882nwI+EhFRSh2FTsC+Jdtz55NsMinmApOH1+m4RYI7yNWroxcucTBx\nb427uE6UP1fQfRLReuPTTosmoDIp9OJQUaG9gtztHT1aPz9r1ugOZt994e234bOf1W1cvDg4TfeO\nHU38wz/8htLSHoYOncGRR47mjTf2CTTghhHUoUftOE0GEEH3tKlJe+88+6x2B25r0zaeykrt/vmV\nr+iaE4/rgI9tAAAgAElEQVQ/ru9Xe7sWGD09+nMQfs9v1BoTTU3aG8nUhhXFGO+H//8qm8gYF9lO\nPYDjAEHbE15OLacCFwEXpba5FHgDeAX4E3CMybGLRa0URybFuDFNa7BokVnit0xKHppOg3OlYzVR\nB5ncp6hT+UxLRNbU6Hvt1d4gm09NTXdKNeSnHuqRioqu3Unq4khAFz1lQ/hzmM09PeEEvY2TmsKp\nHZ2eODHO59crMjzoWI2N+e0P/O+ZTZ+RF+LMpBg3YUZRpw1hOs733zcv8u7u6OLI7JmLNBVe9yno\n+qIKqKido/s8fmmrc23DyKStJikbTNJ/mPwXWlvDDfwmKVKiLNXV4c+vewCycKHIPvuEX2dUG1am\n+P+vrHDIC0EjmkQis0yKcdLSoh+SREIvfqMVvxGNkwvGdARWXi5ywQV6BHzTTeZF6gudryko6Vkm\nAio9p35lpb7/s2f7X2ttrfe9uuuuLZJIdBRcIETt0J1cUUEj6ro681GzaZK76urMZ7ruY2RStyOo\njfnOPZbrmYNNnxFCkC40SKfb1JQff+WqKpg9O7jeLXjrOL1ywQSRSMAVV8CPftSbpsLEsBykv+3p\n0f7fuSaoiH0maUGc+/ngg9q3f+NGXU7zo4/039OLzk544AE4/vj3WLp06e5l7drpwHciX5Pf9USl\nokIvmaRsOP98OPxwrfvu6Ohr/C0vj94Wpfzvn0NQipR0qlM2+ZKS/tdXXx+9fUFtTLet5NoWGdUu\nEpk4JEyuloE8cwA9yihmourOs4l3yEVxnShs3Rq9dkOY6+2KFVFHsD0C7QLHCCAf+9jH5KyzzpJz\nz/2db/pqv2XqVD17M1HrhC3f/nb2apCgWIK6Oj3DCHNhjqvqXPpv69gp4lDzmM6C82V78DoPjOmS\nGPrfrA+Qy6UYhEOYvt5EB1kspHd2QcVZnD+gY+Csrg7uiPz8xd3kq7hOOs4fKP3cVVX9/7DOPXIM\nnkF1A0yD4LyExL33viXd3d0ikpkNw+lso/jiey1VVfGoQsI6dkf15KjfGhv7/96m96GmxsxGVlUV\nf2dsEqOQ75oo6TaOuOIcsj5ALpdiEA4i4fWB/R7OYqp/sGyZ/kM5I65kMtjbI5HQ2zo55v1yzbsX\nt97ea9RdCNtD0B81Xe/s/M41Nf7X6Py5Fy3KbtReVdV3NJ0eqR02Mj7hBD1zCGqryRJXZ5WJoPIa\nSZv+BsuXh3sPeVXQi4OwWUGhbWzEFCFtbQ4GBPkkP/mkuQ6yUCxfDhMn9m2jo6tWynufdHtKWNBX\nRUWv3t4JzGlr00sioXWjX/lKPLUiouhygxK2lZTAE09oX34vn3EvurqEO+9cz/PPv097uxOy44Vz\ns71vcGsrXHxxr78+6HxYI0Zon/4PPtBZSP3u17PPwooV/gFeYbr79NoF2RI19gH0vU6Pc3H/15Yt\ng1/9ytteMH48/Oxn+h56/b7JJBxySPbX5UVYjMJAqIliRBwSJldLscwcgsj1KCHblBMtLcFT8Kqq\nvuqTmprM/OSrq3un1F61jZ3ZSLb3Kooud8WK4NGl20vJXN/dJXCDwHSBVt/tysszU/k43j1RVE1R\nKpI1NubGxTIb996g3z3ILTTuNBQZ4fEH3VNmDlkfIJfLQBAOQQ9opu5yDnEYtRYtCu7slep1SXX0\n7FETxbmTly1cGLxtNjaHKLpck9iB9FrBJp15aWm7jBu3TubO3RRoc6iryzxAy7kW5/c36WC9guvc\nyQdnzdI2plz63ac/ryZqt2zjXAoVhBp08tblK/MvtFxCahC8IzH0v7F15LlYBoJwEMmNJ05cRq0w\n3/GKit7kc6Yjv9LSvoZqd1bLqVOD9z3hhMz/zFET7oV1zvX1Ijt3dsgLL7wgZ5/9iJSWNht34E62\nWi9hV1urr2f5crNjBV3LwoXhnWxQB5vvzjM9aCzsmUok/JPmZXLOXAq/ficN+IOuWN6Wv/ue9iMf\nCbaGdLHgV6O2pUUvmeQPCktE5uS9CdO/O7rgsFz2Dz9snkzuggv0tabrWleu1Pltgvj4x8Nzyvhd\nUxRd7urV/hW3QCgt7WLUqCsZPHhhqspZJSUlH5ndAHp163V1MH8+PP20/nzaaToJXCKh70cmsQju\na1m/Ptyn3y9OwzinVYwO+elJDA8/XOdD8quC19YGl1+ut8u0dkXUxImxEPIHHf+33/L+++dklTvJ\nCI8fucTfEBYJKxxiIqhGrUlWynRMOkK/TKdXXqm3GzZMB175FUlXCp56qrfjNenEKip0EJxXIaHJ\nk/0LwDucdlrwnzkoe+uwYfr86YV1oH8HuSUkrWN39z00Nz/H+eefv7uwzVtv1fUz3IfR3a0FwH/9\nV9/vnfuRSZCa+1pMDL1+hYmMBhgHx5gu14Px47UQcirleRmPd+zIbABVUAz+oHkRWnGmCE4jrpTd\npwA/AkqBe0Tk+2nrVWr9qUALcL6IvBTlHIXIfBqFqB4KpiN+v4jeoUP9R4U33RTc1tJSvTzxBBx3\nXPi1ubnxRt153ndfb3Wu007Tr2HPaFg95KCR7gkn6DZ7CQaHhgZoaWnhT3/6Ey+8sA8wFj9voauv\n/hpz517c57s//MG/ZrAfft4n2fxn3Z19WBRsfb2/x1HwMym880YbXBJjulwfnCj+YcPgoou8f8NM\nBlAFJZO6o7kgmxTBIcSRsrsU+ClwErABWKmUelT61pCeAhycWv4f8LPUqxFx1gLIFSaduVP+EOD2\n24OvJ6xmAWTe+XR36//8tGm9KaNv90y23p9PfQr22w927er97qGH9Ig+SP1RWRleDzmoQw0bgXd0\ntHHyxJP59It/ZEh3Oa/yd/wEQ1UVHHlkTb/v164Nzv3vRU0NjBjaAfc/1EfSr12byOA/K1SXd7Lk\nymdJMAlI9EtP7pSDVcq/HKxD4DMpTYyY900o9ZG2cfTWaSOg9ev+kY4O75waGbt4FmrUGKWoSC4Z\nOTL6iMaUbI0WwBeAp1yfrweuT9tmPjDd9XkNMDjs2GPHjs17tKEfYS6lQV5LtbXae8W0cptDWPBd\nttGxzjkXLjRPW5DJOSsrzYyO2VxTCTvkHnWutJaXy6+ZLjXs9N22miZpXb6y3/kzScVdX9sprXX7\n9fuRFjX+NfKxFN1yMzd7Wi8zMboGunqyRVoJsHTnwI1oUfU3JOmTJiQjF8+CuioVwflFtNdD2p9m\nLIiE9K0mS/YHgGloVZLz+avAT9K2eRw4zvX5D8C4sGOPHTs2ss9wXKUo3Zg+A37bmXYSQdeT3ilk\nWlPA65zTpsUjaPyWujqz3yH4moJLXSq6ZA43iIA0cqMoun2PM5O7PUcW5r76PVJR1iX1dd2yInm8\n50atVR+T+kRLtN+CHbKI6b1fxDD6WbFCpL66XZLsFEWXJNkh9WyRFYyL/jCa4jOia6VS6tU2z9NF\nvtRiGzXm1VXKdW6PexCXcCg6g7RSagYwA2D48OGRdPm5UD+ZeHyI9M5s77hDr9uwQasdOzrgssvM\nztXcDO+s6YT7/2v3NLmqoYFzzuk/TY4rI6Nzb6NGt0bhqqvMZvoNDTBrVg/ezhaCn5oIoIYWRqAf\nhpGspYZmmqj12G4XE1iuf9C5c7UyPEUiAUsebWPyxDbapJI2Ep7nrKCDb3Aft3VcT0J5q2USrdtY\nwgQms4QuymhWSWoquyiVLnpUGbva+qtXyuiiAVeV3RhUO+PHw/tX3cbiOa/zDgcygndo4GESBBhv\nIDvViI9+MEE7SxJnMVk9TVdJRVblMY3d+XJNQVylUuTQGA3xGKQ3Am4nzqGp76JuA4CILAAWAIwb\nN05M7T5xl6J0WLwY2tu9O6b2dt2/pNsP3CH+c+aYqwNrqroZMW8WlDwQKt366qKF5mYtpII6UM9z\n1miD8jPPmO4R3EmnoxSorr4Cz60X3rRpE8uWLdudvnrHjkrgKaAcqKGiopPy8hKUKg322KFzd8fa\nwMNcwk88tyt3d8CNjdpK6vieAuPXP8z71d/iweZTuZif0eZRh7maZm7r+haJruAOdjyreJ8hLGYq\n78hIRsgGGtof4LWqo5isHqGrqpbmFkUNTZTRxRJO7ttpR1XE++jfqw79NOckb/X+EwXltM5Udx8w\nohvftpz3Z9/O4lHXZefiucfkqMiCHBqjgVjUSmXAO8CBQAW6FOjhaducBvwe3ascDawwOfbYsWON\nQ+RzFbI++4L1gSoNvwAld4I2U/VPvdrmrQcOmCa3LFspi6q/IXMqviON3Ch1bNUqBJXeZu9rSI/G\n1W31L0npr67xuffVXbKo+hu7dW3dNTXSXlMj32tokM985jOCljZSXV0tJ510kvzbv/2bPPPMH+W+\n+zp6Z+rLV8qK5PFSz1apYperfT1STZOnmmQF46SeLZJkR7A6JZHoqyN0GT6Mj5HB0kJCFpWfJ3PG\n/bcsqvwX7989kdA6PxN1RZDuM+xPFGdOa5H8ZJ4rdI6KYsDnHhSNzUFEQLuovgWsBW5IfeeuIa3Q\nHk1rgdcwsDeI9EZIm+j8c1KKsqVFZlUsCBAOPb7ZSp3nM0yPvft6qttlRdWEaA+7Rw7sFhKyiOky\nJzFHFi5ol4ULRebMXC+NlXNSgsPp6HZKfW2nZ7rqOdNelsby70h9n+11x3gfX5NadgiE1TVO9T14\nC7ytIGecdJLMbWyUt265Rbpuvtm7Y3LpVZ1ru4mbZRZ3yc3cLIuY7mtY3X0vuCFwu92dpIc0j3SM\nTBYnL3rQNkF5rtPuke915dN4mo+kR0WRWKnA+NyDohIOuVrc6TPC7D4ZDySCLNiLFslN5bcGCoeg\nTt8RSF7/y7q6tCRos78bTbqZFkb26Fx3d3R1+/WOGt3Xn5K0fh1jCwlZyHkyjQdlWvnDsnBBuyxf\n8IbUs62f4XOpzyi7J5nU5wnrsLK1vI8zHOWbSvNcLc71hwkKrw7d9OHPp/E0H8KoGLyFCo3HPRgD\nttiPm4wGEmEPV2OjLOIcqabJ87gVpZ3GNaRD/5dRpJtpIqRp04L9VKurvSvaNDYad8jd1dWy9F/+\nRVrKyjyFSU/Q/n4pU90/WDb+rTU1+vpNqvKkS3OTajJxLjU1OjvetGnhyZTSH+icTJtjIB/CqJDe\nQsVC2j0oAVvsJ51IAwmTqfiiRdJas6/Us8V7s+p2335n9//X1Lc2inQzHU0nEnqJ2rnW1RmPnntA\n/pB6jXSOigqd19prnVsYmpQY87s+t0qlri64YlF6MMbWrf7tc19DnEJkzhwzYeiVZXBv179bdkNM\nKbtjSdBULDhFOObP144o8+drLyVPN1YTV7iGBhLl3SzhZOrZSpKdKLpJspN6tZ0lTwlPP9ZGfXUH\nyYp2lBKSSelNafDaShgyBGbOhJtv1q+DB2uf23Qc96P6ep1nQin96pUfwdRLwam2I2J0/3bT3Q1X\nXonU19NdXa0txj6bKmCSX1KpIDo6/BMxub1NGhp6Q8LTSSR0xZdly8Lvm1I6/4Yf7e1wxRW9v00i\nocO+/aiuhvfeC94mkdAP4vLluj1OVR8/hg7tDWsOIt0bJ+ge5TNa17JHUXRxDtli7HZs4gqX6rDH\nT57M+52jWNw8mXcqRjGibD0NT80kUSkweTLvqzIWd6TW9ayn4bGZJEYfDkN8fGsnTYKrr4ZRo/r6\n8YWVmALtrvjhh1BeHp6uM0N6mpp48IEH+NeyMo7bvp1DgOsAP2/DEr9ycg7l5bpjbG7WnWpLS7DA\ncvsop+eP8PIXBv/7ZlriDfqWJlu82L9MHsB11+lcIqa5Ld5/X6cfXbAguA0mASzpuXvC7lExJSGz\nDBj2OOFgTFAARXm57iDa2nZ32FWLF3PO7o7nWt25DRkC27dTBZzD/dCBXs54SEfD+c1MWlr0iDKZ\n7B/HYJKytLMzZ4IBoAno+NvfuOOgg9h36lSGX345lT/6kX/H5pf21WHKFD0qHjQINm+GX/4yWppR\nE6Hpd9+iBgo5s8aw2ZkjOEza5rRv6FD/+p1K6chJp6MPynPtNRsIakexZ620FCdx6KZyteSk2I9j\nA5g9O1hfXFMT7PkQpueNkpMi2zJoYUsiEan2Z49zDelG6myq2TvHmTkz+L4kEvF6m0Q1aDsG3Fzo\n8YOOWVnZN6ahpUW33W03ysQbx3r07HVgy4RmQPofpaqq99Wvs6iu7ltf0REuJ5wQ3MFMmxY9qVKI\nW22mLp2dJSXSHSYMHIEYZNwNuk9OMeqgos3O/fS7jvJykS9/OV6vk6j3zcSl1R04FiWJl3HQi6vz\nzsYbp1jyD1nyihUOUQn6o1RUhI+snbqQtbXhnU0yaVYj0d0pzJwZ6lYbZQTc41raMPAmOvFE7UoZ\nNFoOE4hOMeogd8yamnAPnzhHt1HjFtydpt+o+777Mh+N9w1FN2tHplgvpr2SuITDHuWtFEiY7jlM\nh9/UpJddu8Iz1JWV6Xw9S5ZAbf/kb/2oqYFf/UobRJua9N+3qanXQNrWZubF4kK5lkpCsiElk/D1\nr2sDa5CR/uMf19v6XcOoUVrvf8QR/vezpQW++tW+3kXppF97Nvh5gSWT+rcJ8nByu7/Nng1f+xpc\nfDHMmhX8WwXhPua0af66f8f2kQ02/5AlC/Ye4RD0R+noCHZJjEJtbW8HM3q0f+1QNz09/p4xLrda\n8XNXTEMiNBfoNXAGCSAnQ5+Jy2TYcSZM6O0g/+Ef/F084+ggwdvH+e9/h48+Cvd7rqqCgw+GH/9Y\nG9K/9z3/ikCm7XWM50cc4V/aLo7OO+x3yFe1MsvAJI7pR66WWNVKQVNsE1WH6eIuXmASwBVipO1R\nSpZMmCDHHHOMHF1aKltAdoB0m6iKvFQ/5eW9+XzS1SFbt/rfhyg5eqIE9MUZ3ZuLYh5RHAGitjfX\nah+bf2ivhL1GrdTaqutrzpmjXzNVMwQFCpWXw1NPBas6TOnu7h09hrlDdnXBY4/pkbTPCG+XCL9Y\nvpzu7m4mXHUVKxcvpuyeeyi56SZUdf900oHU1Gh31IUL+4+Wly/Xrpbp6qDq6r7qFmcUftddetR/\nyinabfezn+3dx1HleKnUurrgtdd6P8c1ul0ZIeAwClFcYaOOxnMdvBYlsNJiSScbyQLMBVYDrwKL\ngXqf7d5FZ2N9mQhSbeyhh8brhhc26t2yRRtlv/hFkdLSzGYO7tGjgadMRyIh3541S3b4nK8jmZRd\nf/97/2vxc3Wsq9NGc7/R4sKF/UfWTz/t38bqapFt26LdR6d9oblFJPvRbUuLviaTPE3p+5nMMqI4\nAmQyGs+Hq6nNP7RXEaWPDVqyFQ6TgbLU+x8AP/DZ7l1gUNTjj/XroL3+hKZ/dr8/ivMnDXLXNFnc\n6oDWVv+OOrV0g1xQWiozjjxSWhIJ6ayqkp6wTiLd4yU9pbNXh+N4WaV3QvPnm1+Pc//iLrCRaQfp\n7BeUqM5LPRPlfCausNl26LbztsRIUQiHPgeCqcAin3WZCQe/P2N6krRsR1/ZBJgFdJAfrlsnHSF+\n/z0gnQ0Nve0I6yRMO2f3sUwzk3ot6Xp0004/qi0hagdp+pu5z5XJLCNoZlNd3eu+m0ncg8WSA+IS\nDnGmz/g68KCf9gp4RinVDcwXXQrUE3cN6bF+G7W3w4UXwrp1+m962219PUii1geNoxZrRQU9VVU8\nfdVVLL78cpYuXcqRq1czH13w0g8FlDkeTSaJoUxr57qPdf/94Sku/Cgv76tHN3WPNK3v6hC1Fq/p\nb+acy0k90trq7yHkVXvYJLfTypX6HHEWL7dYCkyocFBKPQN8ymPVDSLySGqbG4AuYJHPYY4TkY1K\nqU8CTyulVovIMq8NxV1DWinxbVhXlzaqBmFaaDzLWqydJSU8mEjwjR07aJ89m9raWo477jguHjaM\n5DPPaAEWxPHHm58sE9/1bK4v3TBq2ukHJZCLw9hqek1lZTq304gR4cn3/O5fWN6iXBQvt1gKTKhw\nEJEvBa1XSp0PnA6cmJrSeB1jY+p1k1JqMXAU4CkcYsXUVzyowzM5TU8Pv/vCF/jtvvsypraWTx17\nLKX/+I9aMD3/vO5AgrjkEn3+887zXu9OnPbhh9FG5JDd9T31VN/OzbTTjyNTaFDCuLBrSiR62/D7\n30ebZXiRSWI/08GJxVKMZKOTAk4B/gJ8ImCbGqDW9f5/gVNMju9rc4iyuG0TfhikWPCLKegB6b7w\nQq3TdwyjlZX68zPPmHu6KNXfK0ikvz0lLP+Rl6476PqC2ve733nr0aPYeDI1toadI+iaEgn9u0et\nJpeJt1GxVmGz7LVQDAZp4K/AerSL6svAv6e+HwI8kXo/AngltbyBVkcZHT9vwkFEev78Z+naZx/p\nKC/vk5eoCaQVAhPX+eZlqqyM5v00a1Zfr6ug/EzumsMmBnjTPEFOdbMFC4I751x62Jga3U2FlGkw\nYibeRjZ/kaXIiEs4KH2s4mRcWZmsqqrKWN0D6OC5G2/s97WI8Je//IWlS5fuXnZ89BFTgbE1NYwe\nPJiPH3IIgydOZP9PfAL1zW96t6Oy0t/AGZXx4+Htt3vVMBUV/sd21FCDB/vXEEjHUdP45ft3vg/S\n0dfX516Pfv/9OojN73dvbNS5joKuyU1bm75PXtfjVJM7++zMrino2Pm4VxZLGkqpF0VkXNYHikPC\n5GoZe+SReuT1jW9kNmtwjdy6u7vl5Zdflh/96EfS0NAggwYNEtAVMIcOHSrnnnuuLFiwQNasWSM9\nPT19RXGQCiOsznBQ3WLTGUi+VRaFHg2HqYESieIKNrM1EyxFBEXoyho/JSXamHfOOToj5imn6FG1\nO8WDX2UtoEspfrp+Pc+edRbLly9n27ZtAHz605/m1FNPZeLEiUyaNIkDDzwQFZQyI8i4+vnPw7PP\n+u8bJRVHlFrMuUyclu9snumG52HD9IzML1WKSHRDr2nFtkzI9Ni2QpuliClu4eDmi1+E9eu1OmH1\najjoIBg3TmfW7OhA5s1Durqgs5O2khI6eno4adcuVl13HSNHjmTq1KlMnDiRiRMn8ulPfzr6+f06\ngAceCBYO112n8xCZqIqi5I3KZeH4qDEK2eDEH7iFbmmpzlTrR3t7ZgIqaixFLo/tdd02NsJSRBS3\nzWHcOFm1apX+kPZnkpoaupXi95MnM+mRR6CrixqgE+hWirtOO41h06czYcIEhg4dmrtGbtumE755\ndey1tbBpkx7pOkLlgw/gF7+IHneQSOhOMT0AKxfkS4/e2rq7Dnc/gmw5yaROGjhQXUSDrtvaKSxZ\nEpfNofizsgK0tiJOoFGqwIpqaqJs1y5O++1vqe3qohZ9MZVAtQj/+vzznNPRwdCFC7PL5hqEExmb\nrjoqL9fZTM85R6s/lNLvb7xRF9TxqwfgR12dVqsdf7x+Xbcut6PLfGXzDIoRKCvTo3G/dbmaNeUD\nk9gIi6XAFLVaadeuXdxyyy2U/eY3fGv7drxqqvlq9Ldv1x4vTiGfCy+ERx7RI3kvHW9U/a9XZKxD\nZ6c+54IFsGhRX3VBlIA0p2Pu7tYCrrkZVqzQ74NmDnHosnOpo3cIsm20tMCMGfDgg5kH0RUrtkKb\nZSAQh1U7VwsgSin56eDBwXEGUZaqqv4eJZl4m0QtXF9dLTJ7tnlt6URCxxqYpL12M5A8Z0y8ovbE\njKWF9gaz7NGwN8Q5HHzwwbJy5Urqn3gi2O89G+rr9d9yxw7vdX763zlzdFGZqPfPqcGslJ5h+Km7\nkkmtQvrlL72v20vvPtB02UG2jbo6uPNO7YSwp3ny2NgISw7ZK2wOdbW1WjCsXu1fsD5b2tr8O+gg\n/W9QBbMgmpr0UlKiXXP9aG7W1x1F/TDQdNl+to1kUnsrXXZZvFXd0omrymBUbIU2ywCgqG0OvPqq\n7hiam7V7Yy4I6hCC9L9BCehM6O7Wo/xk0t9l9NBDtY3B1KV0IOqy020bQ4fCFVfkPstpoV1J82HT\nsViyoLiFQ3d3b8eQbb0FPxIJrRrycpsM8un3CoyrrtaGVBNVU3MzfOITwTWEv/tdPaL1W5/usZPP\n+IQ4Sa89kessp8WSZjuXcRcWS5ZkpVZSSt2ilNqolHo5tZzqs90pSqk1Sqm/KqWuy+acseOkdvbC\nz2XSUUc8+STccYcOcmts1N5Jy5b1VRf4UVMDhxwSrF6orIQrr9Ttq6zU+wWpH3JdsD4f5GP2M9DU\nbxZLAYhj5nCHiMzzW6mUKgV+CpwEbABWKqUeFZG/ZHzG0tK+lc0qK7VNIijlQlWVXpdI6FfHLfLO\nO+HSS/tvX1vr3QF7qSPSg9IcdcFbb8Hcud5xDU5nnUh4qxdee02rnbq69KymslJve+21cM013gIt\njhoKhSYfs5+BqH6zWPJMVt5KSqlbgKYQ4fAF4BYROTn1+XoAEfle2PHHKSWrvFaUl2sBcdppcPjh\nMGoUTJoEBx/s3RE7mTfPOksXfnE64aB96up0YR13h5qJN5CJMEknW68jk0yluSbTWIt8ePIEZX0d\n6NHXlr2eosjKCtwCvAe8CtwLfMxjm2nAPa7PXwV+YnL80HoO9fW9hd2nTesttmPiO75ihY49MMjo\nuptM/dOj+uoPdD/4bGMtch2rEZRlN5OCPxZLEUG+srIG1ZAGfgbMAST1ehvw9WyElVJqBjADYGzY\nxh0d2rulpCQ4BqKpCdas6f3sGCSD0lh4qRcyVUdENTwOZLVHHMbeXHvy7AnqN4slx2RdQ9pBKfUf\nwOMeqzYCw1yfh6a+8zvfAmABaLVS4Emj5CiaNw9OP113PEEGSQcv/Xa+vIGyOU+YOifXaaLjqqmc\na08e60pqsQSTzbQDGOx6fwXwgMc2ZcA7wIFABbpc6OEmx4+lTKiXysCkprCXeiFf6ohMzxOmjslH\nag1bU9liKSjEpFbKNkL6h0qp15RSrwLHpwQESqkhSqknUsKnC7gUeAp4E/iNiLyR5Xkzwxm5hkU3\nV1d7qxeyjWw1jcjN5DxudU4qcy1NTfrz5Mk6tXjQ+riig4PubTHHWlgslj4UdW4lX28lh7KyaMFx\nSt2VA7YAAAkWSURBVOl4hKuv9veIqa6GjRt1R+xHJt5AfkVtrrpKrw9SAZmcJ8wDJ2qepkyxeYMs\nloISl7dScUdIB+Hk34kiHGpqdOc0d64OLrv9dm+DZJBggOj68CAj7U03aaHllb4hynnCjNhR8zRl\nijX2Wix7BANXOJSVwWOPwRln9HZCiYTuiP1obtZV2Fpaekfu116rO+dcGiTDDOCOigcyT98QZsSO\nmqcpG6yx12IZ8BS3WqmsTFZVVPTt8KurdSEdZ4TtqF7WrNEeSVGrrMVd9tLLEyhKeu9MVTxh6px1\n6+DAA626x2LZw9k71EpHHKHtA2vWwObNOlHdIYf0HYU6qpf779fxDl6Ul+t1Xsn14krmFpTlM0r1\nt0xVPCJaVXbrrfpzer1px5ht1T0Wi8WA4hYOJSXx6Nw7O/33i0PfHhb4tW6df0K8dDJR8bgFU1ub\nzsNUWdk/D5NV91gsFkOKWzhEIWh0nmlablPCAr+eeKLvqD1oBhE1e6qXYHKu8/bbtXBwY9NEWywW\nA4q6ElwkgtJVZ5KWOwom6S6cUfv8+doG0dgYTyUwm37aYrHkgD1n5hDmQgm507ebprtIH7VffXX2\nKp6BnIfJYrEULXuGcHB7Cd1xh/5uw4b+HW6u9O1BJUODZiZxqHgGavU3i8VS1BS3K+u4cbJqVWCM\ndGb1EnJBodphI5ItFouLvcOVNYxiqQUMhfMEshHJFoslBwxs4RBXeui4KJQnkHVRtVgsMZOVcFBK\nPQiMSn2sB7aLyBiP7d4FdgHdQFccUx7AGmPdWBdVi8USI1kJBxH5ivNeKXUbsCNg8+NFZHM25+uH\nNcZaLBZLToglzkEppYB/Av4zjuMZExTbEEf8ghvTWgwWi8WyBxCXzeGLwEci8rbPegGeUUp1A/NF\nlwLNnnwZY4PyJuXTI8pisVjyRKhwUEo9A3zKY9UNIvJI6v10gmcNx4nIRqXUJ4GnlVKrRWSZz/lm\nADMAhg8fHta83Btji8kjymKxWPJEqHAQkS8FrVdKlQENwNiAY2xMvW5SSi0GjgI8hUNqVrEAdJxD\nWPuA3Bpji80jymKxWPJAHDaHLwGrRWSD10qlVI1SqtZ5D0wGXo/hvPnBekRZLJa9kDiEw9mkqZSU\nUkOUUk+kPu4HPK+UegVYAfxORJ6M4bz5wfGI8sJ6RFkslj2UgZ8+I9fY9BQWi2UAEVf6jD0nZXeu\ncDyi4kivbbFYLAOEgZ0+I1/Y9BQWi2UvwwoHU2x6CovFshdh1UoWi8Vi6YcVDhaLxWLpR1F7Kyml\ndgFrCt2OEAYB8SYUzA22nfFi2xkvtp3xMUpEarM9SLHbHNbElt47RyilVhV7G8G2M25sO+PFtjM+\nlFKx+P9btZLFYrFY+mGFg8VisVj6UezCIZ7U3rllILQRbDvjxrYzXmw74yOWNha1QdpisVgshaHY\nZw4Wi8ViKQAFFQ5KqX9USr2hlOpRSvl6ACilTlFKrVFK/VUpdZ3r+48rpZ5WSr2dev1YjtoZeh6l\n1Cil1MuuZadS6vLUuluUUhtd604tVDtT272rlHot1ZZVUffPRzuVUsOUUv+jlPpL6hn5lmtdzu6n\n37PmWq+UUj9OrX9VKfV5033jxKCd56ba95pS6n+VUp9zrfP8/QvUzklKqR2u3/Im033z3M5rXG18\nXSnVrZT6eGpdXu6nUupepdQmpZRnyYPYn00RKdgCfAYYBTwHjPPZphRYC4wAKoBXgMNS634IXJd6\nfx3wgxy1M9J5Um3+EPh06vMtwNV5uJ9G7QTeBQZle525bCcwGPh86n0t8Jbrd8/J/Qx61lzbnAr8\nHlDA0cCfTffNczuPAT6Wej/FaWfQ71+gdk4CHs9k33y2M237M4BnC3A/JwCfB173WR/rs1nQmYOI\nvCkiYUFuRwF/FZF3RKQDeAA4K7XuLOAXqfe/AL6cm5ZGPs+JwFoReS9H7fEj2/tRNPdTRD4QkZdS\n73cBbwL756g9DkHPmsNZwC9F8yegXik12HDfvLVTRP5XRLalPv4JGJqjtgSRzT0pqvuZRlhZ5Jwg\nurTy1oBNYn02B4LNYX9gvevzBno7if1E5IPU+w/RhYVyQdTz9CuABFyWmurdmyt1DebtFOAZpdSL\nStfsjrp/vtoJgFLqAOBI4M+ur3NxP4OetbBtTPaNi6jnugA9onTw+/3jxrSdx6R+y98rpQ6PuG8c\nGJ9LKVUNnAL81vV1vu5nGLE+mzmPkFZKPQN8ymPVDSLySFznERFRSmXsehXUzijnUUpVAGcC17u+\n/hkwB/0QzQFuA75ewHYeJyIblVKfBJ5WSq1OjUpM989XO1FKJdF/xMtFZGfq69ju556OUup4tHA4\nzvV16O+fR14ChotIU8p29N/AwQVqiwlnAH8UEfcIvpjuZ2zkXDiIyJeyPMRGYJjr89DUdwAfKaUG\ni8gHqenTpkxPEtROpVSU80wBXhKRj1zH3v1eKfUfwOOFbKeIbEy9blJKLUZPO5dRZPdTKVWOFgyL\nRORh17Fju59pBD1rYduUG+wbFybtRCl1BHAPMEVEtjjfB/z+eW+nS+AjIk8ope5WSg0y2Tef7XTR\nTyuQx/sZRqzP5kBQK60EDlZKHZgalZ8NPJpa9yhwXur9eUBsM5E0opynnz4y1QE6TAU8vQ1iILSd\nSqkapVSt8x6Y7GpP0dxPpZQCfg68KSK3p63L1f0Metbcbf9ayjPkaGBHSkVmsm9chJ5LKTUceBj4\nqoi85fo+6PcvRDs/lfqtUUodhe6Ttpjsm892ptpXB0zE9bzm+X6GEe+zmWsLe9CC/mNvANqBj4Cn\nUt8PAZ5wbXcq2ltlLVod5Xy/L/AH4G3gGeDjOWqn53k82lmDfrDr0vb/FfAa8GrqRxlcqHaiPRZe\nSS1vFOv9RKtBJHXPXk4tp+b6fno9a8BFwEWp9wr4aWr9a7i87Pye0xzdw7B23gNsc927VWG/f4Ha\neWmqHa+gDefHFOP9TH0+H3ggbb+83U/0oPMDoBPdb16Qy2fTRkhbLBaLpR8DQa1ksVgsljxjhYPF\nYrFY+mGFg8VisVj6YYWDxWKxWPphhYPFYrFY+mGFg8VisVj6YYWDxWKxWPphhYPFYrFY+vH/AZ8m\nCS/bM+YRAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68ac0e3750>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXmYVNWZ+P95e63u6rY7ijEgEMU1ahIN4C8xBjBG427o\nYSZiJtGJEQxqYtxGo6iBiclEo2ZMzMAYMUsTna/KuEbROAJmA3RcI2hwCaCRAAr0Qq/v749Tl75d\nddeqW9XVcD7Pc59a7nburVvnPeddRVWxWCwWi8VNxVA3wGKxWCzlhxUOFovFYsnBCgeLxWKx5GCF\ng8VisVhysMLBYrFYLDlY4WCxWCyWHKxwsFgiIiL7iIiKSNVQt8ViKTZWOFgsRUJEjhGR/xWRLSLy\npsf6fTLrO0RklYh8Lmv9mSLyloi0i8j/iMjuJWu8ZZfHCgeLpXi0A3cAl/ms/zXwf8AewFXAPSKy\nJ4CIHArMA74M7AV0ALcVu8EWi4MVDpZhi4iMEpF7ReTvIvKGiHzDte46EblHRO4WkW0i8qyIfNy1\n/iMi8pSIvC8iL4vIaa51dSLyw8yofYuIPC0ida5Tf0lE/ioiG0XkKr/2qepyVf0l8LpH2w8EPgFc\nq6qdqnov8ALwD845gAdVdamqtgGzgRYRaczvblks8bDCwTIsEZEK4EHgeWBv4FjgIhH5vGuz04H/\nB+wOLAT+R0SqRaQ6s+9i4IPAhUCriByU2e9GYDxwVGbfy4F+13GPBg7KnPMaEflIHpdwKPC6qm5z\nffd85ntn/fPOClVdA3QBB+ZxLoslNlY4WIYrE4E9VXWOqnar6uvAfwFnuLZ5RlXvUdUe4CYgBXwy\nszQA38/s+yTwEDA9I3S+CnxTVderap+q/l5Vu1zH/U5mtP88pgP/OPFpALZkfbcVaIy43mIpKtbr\nwjJc+TAwSkTed31XCSxzfV7rvFHVfhFZB4xy1qmqezbwFmYGMgIjRNYEnPtvrvcdmI48Lm3Ablnf\nNQHbIq63WIqKFQ6W4cpa4A1VPSBgmzHOm8yMYDTwtrNORCpcAmIs8CqwEdgO7IdLrVMEXgbGiUij\nS7X0caDVtd5tI9kPqMm00WIpOlatZBmuLAe2ici/ZgzIlSJymIhMdG0zXkRaMnEJF2F09n8E/oQZ\n8V+esUFMAU4F7soIizuAmzIG70oR+ZSI1MZtoIhUiEgKqDYfJSUiNQCq+irwHHBt5vsW4KPAvZnd\nW4FTReQzIpIG5gL3ZdkoLJaiYYWDZViiqn3AKcDhwBuYEf/tGNWLw/3AF4H3MC6hLarao6rdGGFw\nYma/24CvqOqqzH6XAi8CK4DNwL+T339lEtAJPIKZmXRijOAOZwATMu37HjBNVf+eub6XgfMwQmID\nkAZm5dEGiyUvxBb7seyMiMh1wP6q+s9D3RaLZThiZw4Wi8ViycEKB4vFYrHkYNVKFovFYsnBzhws\nFovFkoMVDhaLxWLJoayD4EaMGKH77LPPUDfDYrFYhg3PPPPMRlXds9DjlLVw2GeffVi5cuVQN8Ni\nsViGDSLyVhLHsWoli8ViseRghYPFYrFYcihrtZLFYrHsynR2wqJFsGYN7LcftLRAKlWac0cWDiJy\nByaXzQZVPSzz3e7A3cA+wJvAP6nqex77ngD8CJNS+XZV/X6+De7p6WHdunVs374930Ps8qRSKUaP\nHk11dfVQN8VisfiwYgUcfzz09kJ7O6TTcP75sHgxTJwYvn+hRA6CE5FJmBzzv3AJhx8Am1X1+yJy\nBfABVf3XrP0qMWmGjwPWYZKZTVfVP4edc8KECZptkH7jjTdobGxkjz32QEQitd0ygKqyadMmtm3b\nxr777jvUzbFYdnm8ZgeqMGoUvP9+7vbNzfDOO/4zCBF5RlUnFNquyDYHVV2KyVDp5nTg55n3Pwe+\n4LHrkcBfVPX1TDbMuzL75cX27dutYCgAEWGPPfawMy+LpQxYscIIgZkz4dprzevIkXDjjWbG4EVv\nL9x3X/HbVqjNYS9VfSfz/m/AXh7b7I2rIhdm9vD/+R1QRGYAMwDGjh3rt00+bbVksPfPYhl6OjuN\n2sg9O2hrM6/XXw9dXd77tbfD668Xv32JeSup0U8VnKhJVeer6gRVnbDnngXHcSTO+++/z2233ZbX\nvnfeeSdvv/32js/77LMPGzduDNznqaee4pRTTgHggQce4Pvfz9tcY7FYyohFi/xnB6pQ61NeKp2G\nceOK1y6HQoXDuyIyEiDzusFjm/W4yjViSjWuL/C8Q0aQcOj1+6UzZAuHuJx22mlcccUVee9vsVjK\nhzVrzCzAi64u6OvzXldVZewSxaZQ4fAAcFbm/VmYylvZrAAOEJF9MyUSz8jsNyy54oorWLNmDYcf\nfjiXXXYZTz31FJ/5zGc47bTTOOSQQ3jzzTc57LDDdmx/4403ct1113HPPfewcuVKvvSlL3H44YfT\n2dkJwK233sonPvEJPvrRj7Jq1Sq/0wJGuFxwwQUAnH322XzjG9/gqKOOYty4cdxzzz07trvhhhuY\nOHEiH/vYx7j22muLcBcsFkuh7LefmQX4ka39rakxxujFi0vjzhrHlfXXwBRghIisA64Fvg/8t4ic\nA7wF/FNm21EYl9WTVLVXRC4AHsO4st6RKYFYMBdddBHPPfdcEofaweGHH84tt9ziu/773/8+L730\n0o7zPvXUUzz77LO89NJL7Lvvvrz55pue+02bNo0f//jH3HjjjUyYMOBIMGLECJ599lluu+02brzx\nRm6//fbIbX3nnXd4+umnWbVqFaeddhrTpk1j8eLFvPbaayxfvhxV5bTTTmPp0qVMmjQp8nEtFkvx\naWkxrql+dHfnfq6rK26b3EQWDqo63WfVsR7bvg2c5Pr8CKaO7k7JkUcembdbaEtmfjh+/Hjui+mC\n8IUvfIGKigoOOeQQ3n33XQAWL17M4sWLOeKIIwBoa2vjtddes8LBYikzUikzC3DHMtTU+BuiAbZs\nMdsHubImxbCOkA4a4ZeStGtuWFVVRX9//47PYS6jtRmrU2VlZajNwm9fMPELzuuVV17JzJkzYx3L\nMvwZymhaS35MnAhvv21+t9dfh+efh3vvNQZpPxxX1jPPLG7bbG6lmDQ2NrJt2zbf9XvttRcbNmxg\n06ZNdHV18dBDD0XeNwk+//nPc8cdd9CW8Ylbv349GzZ4+QlYdib8/OVXrBjqllnCqKszHf3VV8PU\nqcF2CCidK+uwnjkMBXvssQef/vSnOeywwzjxxBM5+eSTB62vrq7mmmuu4cgjj2Tvvffm4IMP3rHu\n7LPP5rzzzqOuro4//OEPRWnf8ccfzyuvvMKnPvUpABoaGvjVr37FBz/4waKczzL0BPnLl0oFYUmG\nMDsElM6VtaxrSHulz3jllVf4yEc+MkQt2nmw93HnYeFCM1NwBIKbhgaYN6/4KghLcqxYAccdZ+wL\nXpRd+gyLxVKeBPnLl0oFsbPT2WmE8Ny55rWY2WcmTjSd/5w5RgCkUsattaGhTF1ZLRZLeeL4y3vN\nHEqlgtiZGYrsqHV1MHs2XHrpgLF63LgyTdltsVjKkyA9damiaXdWhtqe4xirh4JhqVYqZzvJcMDe\nv50Lx1++udmoHoZCBbGzEpT/qFTZUYeKYTdzSKVSbNq0yabtzhOnnkPK9hg7Fdn+8qVWQQwH/OJA\nwr73UtdBfHvOcItDGXbeSrYSXOHYSnCWnY2wjtfLblBVBbfcAhdd5P/99u3+xmfHE2zq1PBO3zm/\nczzH0FwMu0VS3krDTjhYLBaLG7+O3+l4Ozv9q6qJeEcj+33vprkZHnwQTj3V/9xgzr/XXuAV/9rY\nCBs2JDuDKBtXVhE5SESecy1bReSirG2miMgW1zbXFHpei8VicRuM29pMh97WZj47I/WwuglxvgdT\nZ6G5GR54wAiGoHMD3H23t2AA8/1dd0W/3lJSsHBQ1dWqeriqHg6MBzqARR6bLnO2U9U5hZ7XYrFY\nohiMg+JA8uHUU42X0tq10YzVDz8cfLyw9UNF0gbpY4E1qvpWwse1WCyWHNvCqlXhAYBBcSBxaWgw\nNoZUKnrwYcx8mmVD0sLhDODXPuuOEpEXMFXgLk2qpoPFYikupfSyCTqXl22hv3/AsygbJwAwKA4k\nrs3BHTcSJfhwxQp47LHga85Kz1Y+qGoiC1ADbAT28li3G9CQeX8S8FrAcWYAK4GVY8eOVYvFMnQs\nX67a3Kza0KAqYl6bm833bjo6VFtbVefMMa+dncmda+lS1QULVFMpVdNlR1saGwfa4XfsO++M9737\nujs7zXde525uVt282X+9szQ05HevggBWagJ9emLeSiJyOnC+qh4fYds3gQmqujFoO+utZLEMHUFe\nPu7kb2HeQoWeSwSqq3Mro4XR0AB///vAzMOZlWTHgcT93k3Qtb/2mn9CRDDRz0uWlK8ra5Jqpen4\nqJRE5EPAu6qqInIkxhC+KcFzWyyWhIli7J06NZn0EmEeRXEFg4NTFCdIXeWXoiJK6oqJE80xZ882\n9o+DD4bvftcIz0cfDTaEX3aZt2Aol2C5RISDiKSB44CZru/OA1DV/wSmAV8XkV6gEzhDk5qyWCyW\nohDF4BpFgETJDZS0R5G7jcVMnOc+dlsb/O53cMcd8O1vw5gx/jaJhgY46KDg43m1tZSCIxHhoKrt\nwB5Z3/2n6/2PgR8ncS6LxVIaohhck0oXnqRHkbuNo0cXL3GeV1I+p/7zNddAU5P/vo5h293Zjxlj\norLddRzcbXUC7txR1sXMDjssE+9ZLJbi09JiOjEvnM7N6dS9iJMuPOhcfqRSA8kG/doIxUucFzRr\nAtPJixgh4ZUQ8cUXB5d2/frX/Qv89PSYAkDvvz8QXLd9u/l87LHFqS9hhYPFUkRKWSQmaaJke40i\nQAo5l19uzVQKfvpTM/J/8kn/Nq5dW7xCSFFUYaomT9O8eaZ4z7x5ps2HHZYb2R30bLS3+68vVpT1\nsMvKarEMF4aiSEySdHYaj5sLL4RNm2DECKMnd+u5UymTRuKEE8x1dneb66yujp8u3Mu429IC06YF\ne0IFZaR97bXiFUKKogprb4d16+DqqwdUSDfcAH/7m5kNRKWqKniW8stfwtlnRz9eFGziPYulCER1\nAy1XorqnZhtka2rMdo89Bkcfncw5H3jAzADySUW+fTuMHJns7+B08qtXw403QkeH/7ZO5tYDDhh8\nbXFdc6urg4VJdTVs3eqUFC0/V1aLxZIhKS+eoSBq9TOv7bq7zeLkH/LreLO9bk480f+cp502+JxO\nvqQo3jqOuspP0MUVDNkCrK4uePuqKpgyxQgHtxAJEwy1tQOzsKoqM3ubO9d/+4qK5J8pKxwsliKQ\nlBfPUBBVsOUrAP3SYPjhHCt79B1VTZdUISQvYeh0+KmUsRuIGI8ld12IbMEQRnMz3HyzUUc5bQX4\n3vf873d3d/LPlBUOFksRiOIGWq5EFWz5CMCgWYkf7e1GhXP++fm7pCZRizlIGFZVwa23GrWaI4BO\nPNG8hgkGJ4+T21Zz2GEDM6v77jP3IMjmkE7DBz7wHnfdFZLIKQZWOFgsRSAo2VscL56hIKpgC9tu\n9GjjoeVWAYW5f3qRTsPGjf77bd8OX/6yidYuZlBYmDB0DM8OCxdGu1bH7NveDvPnm/ejRg3MkOrr\nw72iOjq2cMEFI4Gu8BNGxAoHi6UA/CJWk9Z1l5Kogi0s2+lFF0Ff32AV0Be/GD8SuqrKeEr57bd9\nO9xzj0lXEUXN5PWbqYZHHsedDeYT9T1zpqkOt3XrwHfBx1Cgi09/+jucfvp3mTx5MhOTcoVLIntf\nsZbx48cXkpzQYikqUTKWOtlK587NP1vpUBA1G6vfdg0N3llI6+v91zlLXV3uOVtbw/dzZ0T1u89e\n7W1sNK9h17p5s2l/lHN2dKjOmqVaUxMvkyyoVlXF2b5fp0zpG9ROEsrKamcOFkseRPXoSULXPRRE\nNeJ6bdfdbbxrvOjrC6/NLGJiHdwxFfvvH2y0dpNtDHdmCqtWwQ9/ONgG4DUL8PodHSN6dtvr642d\nwZkNdnYa99brrzftjZ8wUOntVaLHJwuHHOITKVggVjhYLHkwnF1VoxJVsGVvN3euvyqkq8t0prW1\nA3mIsqmoMILBOaZfx+yH2xieHYcRh6DMs27eeMN4GDnn8tsOTNyDcfdVwK9T78WUx4nG7NmRN41F\nIukzRORNEXlRRJ4TkZyoNTH8h4j8RUReEJFPJHFei6VYhKW9GM6uqsUmKN8SmM7RLy0GDL5/7hma\nV7U3Lxz9v3vffBL6Rck8W1EBjzziPZPMprKyn8MPf55jj/0qxlbgTSpVHaudt90Wa/PIJJlb6RhV\nPVy9I/NOBA7ILDOAnyZ4XoslUVasGJwQbeZME2W7YsXANkklnEuaMKFWilxPUZPo1dZ6f+++f/l4\nNzlG83z29WpHoanLHfr6hKeffpBXXvlfPv3pn2EEhLMYgXnnncJZZ8VTE82dC08/HWuXaCRhuADe\nBEYErJ8HTHd9Xg2MDDuuNUhbSk1HR3DpR8foGFYicigMz2FG5KhG5qTaElTWU8R/vYjqsmXmOHPm\nmM9RDbRNTaasaGur6mc/G98Y7PU7BhnDGxrM+uuu61WR/lDj8dFHd+y4R5s2GaP1scea1/feM9+3\ntqpWVsZra12dKaU6Z44qjHhdk+jXEzkIvAE8BzwDzPBY/xBwtOvzbzFlQr2OZWtIW4aMKB2BQ5Kd\nbaE1mMOEWlA943yEWZT2BtV9bmhQnT3bv+OP0jFnL6mU6pVXGo+ifLyEspd02lzf7Nn+XkqpVId+\n7nOnaG3tDIWO0GNOmRJ+bzs7/c8Xdv3mfh7Rp2UkHPbOvH4QeB6YlLU+snBwL3bmYCk1QSNVEeOS\n6iYJV9UkhEyYUJs1K7rQ88ItDObMyW1vfb3qzJmD70HY7GrBAtMBB7Up6Bhev0/UjrS6Ovq2Iqp1\ndf0K/Vpd3aXQp7BVYZPCBB037otaXd2uEDZzUJ02LdrvuWxZfOEwsIxXTaBfT6oS3PrM6wYRWQQc\nCSx1bbIeGOP6PDrzncVSVsQNdCrUVTWqS2wYYXrxVavyN6CHefw4382bB7/61eBAtKBAwEcf9U8t\n0dZmUmZkBxMGGZbNuDOYhgZz/osvhh/8IJqhWhU6O40doKenmw9+8Gccfng9Z5+9G5/+9CN8/ON7\nRk6//fjjA1Xcgjj6aFi6FD7/eWPA7+uLdvxEKVS6AGmg0fX+98AJWducDPwG47v1SWB5lGPbmYOl\n1JTalrBggWptrff5amvN+igUa+YQpK4KWtz3ym92FaYyqqkZmI1s3mxer7kmP5ULqB56qBm5L1hg\n9Pv5XFdDQ9+gexX0+8W9137337l3s2dHPU8yM4ckhMM4jCrpeeBl4KrM9+cB52XeC/ATYA3wIhFU\nSmqFg2WIyFfNE9duEGa0BbM+inopTKgFdYZBQi+Ozt+9pNPhnWBUlVE6HW5Yj6Imqq0d2L6pqV8v\nvvh5ra1tV5HtGkUlBINVi1F+v6D982Hp0ig2lTIRDsVcrHCwFJOgzjyuLSGuQIkzIo86YymGt1Jc\nbyH3MnNmtDZHnQk0NeXORmbONPvHGb0PLJu0omIP3W+/OVpZ2RVpH2fkn++MKu7MwQtHQPh7NFnh\nYLHkTdKeRnFH5XFG5H4dSkeHUWtMmzagLnHUL35CLa7Qa231NxqHLfX10YRadHWJEVZR7vvgxXtW\nUFfXo3fcYRrofh6iCOp8Z1SFqiajXXMZGaQtluFEUkZgh6ipNNzZQF94IXrGTi+D8YoVcOyxpri8\nwz33GIPrk0/6G8njGtBbWuDcc6Nv7yZqdbKDDzbtjmIcvv56uOwy8/sUGuTW2VnF+vWmC8zOEaUK\nN93kn1E3n4yrzc2FZ+Qt9JrjYIWDZZcj6bxIUSJos6uf1dSYDigK2V5SjnBzCwaHtjY47jhTwD6J\ntOCplKmVMG9e/H2jphEJSv3txX33wbHHvsv99/+dtrZDCE/04B9x7P4NsgXnpZf6Jx7cb79odRYc\namtNdbdCs2mHCaXKSujri5qiMBgrHCy7HEnnRYpS9CZ7puKXdM6L7OJAixYFp73Yvj3ZxH97753f\nftXVZhYW5roZx111+/Z+vvnN/2Djxm8B0zHJFxrzayAmS+uYMbB2bW4dB7ewyK5dvdde8Up/dneb\nYkCFEvSsQbIur1Y4WHY5ki7hGVYcB4JVAamUf2ff0JCrilizJlg4dHUll/ivs9N0oPnQ3Q0//7nJ\n4RSnzvPPfgZPPql4j/jbGT26h8sv/wGf/OQUTjutITDZXRhbtsB555m2+tWk9qp53d4efeYHg58r\nvwJRYesg6iyrIpmceUkYLoq1WIO0pRgEuVDW1w/kuIlDkIE7zONn6lR/l0gvA2Zra7ALZW1trgE7\n3/QccQyvQe6VfobYjg7VX/2qX7/1rc06Y8ZT+s///DUdO/ZAhc0+x+kfdBznvieRLsOrrfl6Jfkd\nM+g5yafAUjEN0kPa+YctVjgMHYXm+il3nD9YXd3gP1Z9feH5keIEezkBan7unPX1uR19WHyA2+XT\nfa35eGZFdWWtqVH93Of8O2m3x1V/f7+++uqr+u1vL9KamjYV2aZOSgqR93TKlMv04ot/rY2NPdrQ\n0B/aZsdrKykB4W5rXK8kJ6Gg170O82pravJf5+d15p1c0AoHS5EoZfbOYhFFuG3aFL3sYyGEBahd\neWVwh3PttbnHXL7clLf06tiyy5T6nTuVMp1q0HVG7RybmoIT6Yn066mn/knPOOMMHTlypEJKTW4i\n/3vvJ2z9ftvly007nJiH2loj/AsJVIsb59HQYO6p3yDB73mrqfGP1QiKjfD+faxwsBSBfHz2h4qg\nTiKKcIuTgbVQ/No0Z47q+PHBHc6MGf7Xnx3n4KWCCurcq6pMh+WkyM4mShSzI5CCz7VFYbqOGjVK\np0+frl/96hNaX98b+96H/bbZAiWfNBnu85sU2NEXJwOu13MZJ57DT1hF+32scLAUgVJ2mIXg10ks\nXRpduMXNwFooTmc+darqIYeYjjlKZO/UqfmfM+rI111DIZvse51KmXQVn/2suZ729j79v//7P73h\nhlu1unqb5/Hr67frSy/9Rfv7+0Pb5Xfv8x24OO2POoNwz1z8VD1OO9Ppwc/fnXf6C69Zs4LPW1WV\n3/8u+/dJKmW39VayDGI4lL8MCmI74QR/X43sGIYxY8zfzwtV44KaJC+9BBdeONhLKkpAU2Vl/ucM\nc310UDUZQDdtynU7zQ4Q22effsaNe44//vEpFi1awsUXL+O9994DYNSoJ9i4sZWKimq6uqozwWPC\n4sW1HHrofpHa5ecxtmgRvtlPg+JTJk40z/WYMbnr3DgZWx3vsIULg11Dr77aBPA5cRAnnmhe/YIr\nZ80KOrtSWQm9vbkeWtmuzNlk/z6zZ699K/BCI2KFg2UQSbt5FoOwILbubu91QyncOjtNcFo+tYxP\nPjn/87a0GFfNKPT0eHewPT09vPDCs6xbt4Q//GEJN9zwNFu3bgVg//33p6WlhcmTJzN58mTGjh27\nwx3TK3jM3a4g91+vznDp0vwHLo8+6j9oSKXglFNg6tTBbQ0LOKupGXyvFi4Mfi7ff9/ETnjVwq6j\ngyu5mZsar6BXqzyjsoNwx2TMnr1xc/DW0ShYOIjIGOAXwF6AAvNV9UdZ20wB7sdUjAO4T1XnFHpu\nS/Lk86ctNWvW+Hey3d3mT+slILKF29q1weeJG7QU5KN+443Gpz4uDQ1wxhnx93NwlBNR6OkxHWx3\ndzcrV65kyZIlPPXUU/zud7+jPdNLHnzwwUyfPn2HMBg1alTOcaKk6MgOfAvrDDs7TcyEH/X1wQOX\noI6+qws+/vHcNpuBktLWljuab2jIPV/YrHvECBMp7SUcaunisq5/49LUT1h0y1u8vq7GV7CWiiRm\nDr3AJar6rIg0As+IyOOq+ues7Zap6ikJnM/iIixoJi5Bf9oHHhgcJTpUD26YeqCqyls49PebAjIL\nF5q277eff04frz+/H52dpvO//nrzuatrcEDVYYcNrItDXZ3Jk1RoLp6oiPRy110/4Prr/43OTA92\n6KGHctZZZzFlyhQmTZrEXnvttWP7zk5zL/N9HrLVIUGd4d13Bwf+qQYPXPKZEbeMXcn57fsDzTnr\nvAZK++0H6bo+2jpy9YDpuj4OOqjS/LemdNPb0UU79aRpp4peFvN5UnRBXxtn1twDVycU3l4ABQsH\nVX0HeCfzfpuIvALsDWQLB0vCZFfoqq2Fc86Bb397IDmZmzBB4l5/883muzfegI0bTYd3wgkgYtIG\n+EWTlgOXXAK33jog3NwRyHPnDrT9wQcHIpiziTJLcoTCd7+bmw7D6YQmTYKvfCX66N1BBC6/vPi5\neNyoVgKPcO655zJ58mQmTZrEiBEjPLf1iho+/3wzgFi7NrrAyJ5l+Amchx8ObvsnPxl8nrFj/e+D\n52/d2Unq1ONYrPtzPI/RSxXtpE1nLv0svnAZqRueG9TIlhM7Ob+zC09h0rmNlpNSpJpTvH3JD1k0\n9yVeZ1/G8Tot3GcEA5SPYQ+S9VYC9gH+CuyW9f0UYDPwAqYi3KEBx5gBrARWjh071t9Ev4sTFrmZ\n7bqZT67/xkbzGuQKORTurVE8XRyXxqDKYc3NxkMnn5gOx6c+iveLf959/yUoTXfU4MSODuMhU1MT\nrZBNVVW/zpoV/nuGPXuOB1bc+JigZ3TatOC2B9Vm7tjUoc313vUafL20XG57HaS0lek6l6u0lena\nSe3gykFOI1tbdXndJG1mkzawRYVebWCLNrNJl9dNihZZl4BLILBSk+jPkziIaQ8NwDNAi8e63YCG\nzPuTgNeiHNO6svoTJTjJ7ZIX5AK4eXP+KQLiPMtJRV3H+W9F2TZujYOkUipE+e3cRI3f2Lp1q95y\ny++0trZdKyqiFb53lpqa8A49btRwlAFE2DM6f37wOXzLqS5frq31X9MGtnru51uxLm70W3OzGYmI\neAsTt39u0rVos/5YFfCMRuhfw5ZEvJVEpBq4F2hV1fs8ZidbXe8fEZHbRGSEqm5M4vy7IlHUBY57\nn/Peb5vaGD5tAAAgAElEQVSrr84/R3zUWbCfGiIftVQco3mYkfDVV+PXOEgyp35trVGH9PUFG2WD\n3HePO66fO+54lD/84X9ZsmQJzzzzZ/r7/wrUx25Pd7dZgupaxK1lECUNepgHWnV1sH3ojDPI1Zue\neCIcfzxrOi6gnbTnsTs6fJ7fqD7A7kZu3AjpNHVtbZzJrwevT7uMWHGt8UF4/LE+Bh+PfgB/kvBW\nEuBnwCuqepPPNh8C3lVVFZEjMQnYNxV67lJSSCbFYhDl2XU6btXgDnLVqviFSxyiuLcmXVwnzn8r\n6D6pwg03GFfROAIqn0IvDjU1xivI3d7DDjPPz+rVpn/ZYw947TX46EdNGxctCk7TvWVLG//wD/9N\nZWU/o0fP4IgjDuPll3cLNOCGEdShx+03owwggoW4sm6d8OSTxh14+3Zj46mtVVIVPTz+xTtIffm3\n8NBD5oZ1dRmJ0d8PquzHGtK00+aR2tv3+Y1bZKKtzbgjRTVixbHG++Hzx6qEAiJjXBQ69QCOBhRj\nT3gus5wEnAecl9nmAuBl4Hngj8BRUY5dLmqlJDIpJk3UtAatrdESv+VT8jDqLLhYKtYo6qAo9ynu\nTD7fEpHptLnXXu0Nsvmk030Z1ZCfeqhfa2p6dySpSyIBXfyUDeHPYb73tIGt2vrZ21VbW7Vjc6f5\nzWeu1db6r2lnKrwhndRqc0geJ0+8QsODzjVnTmk7BJ+bNh5UI/bfQUvBByjmUg7CIclMikkTZhR1\n2hCm4nz77ehF3t0dXRKZPYuRpsLrPgVdX1wBFbdzdJ/HL211sW0Y+bQ1SsqGKOk/ovwXzD31Fn7N\nbDJ6+yg5UnyW5UzIMhRv1eb6rvDn1z0CWbBAdbfdwi80rhErX3z+WFY4lIigEU0qlV8mxSTp6DDP\nSCplFr/Bit+AxskFk5262m+prlY95xwzAr7mmuhF6oc6X1NQ0rN8BFR2Tv3aWnP/Z8/2v9bGRu97\ndeutmzSV6h5ygRC3Q3dyRfkPqPu1qak/urfSnN9oM5tzPX2YMPjA9fV5Td0GGYrrv6ad7+XRaQdl\n4it18rEizxxs+owQgnShQTrdtrbSuCvX1cHs2cH1bsFbxemVCyaIVAq+9S340Y8G0lREMSwHqW/7\n++Gkk+Jdcz4EFbHPJy2Icz/vvtv49q9fb8ppvvuu+Yd60dMDd90FxxzzFkuWLNmxrFkzHfhO7Gvy\nu5641NSYJZ+UDWefDYcemrH/dPfR3gG1mAjEb1ffwGXcRooHgXCjzkRW8DYtLOILvM643BgAh6Ac\nKdnUZ4zyFRXUtbdzZvrBgQtsztMwKOL9I2cbV4ptjIxrF4lLEhKmWMtwnjmAGWSUM3F154XEOxSj\nuE4cNm+OX7shzPV2+fK4A9h+hS6FoxTQD3zgA3r66afrl770sG/6ar9l6lQze4ui1glbvv3twrUg\nHZs6tLX+a4PdN50TNDWZKUaYD3NSZeeyf1wnh3YSap6o0+BS2R48znM49GoC/W/BByjmUg7CIUxf\nH0UFWS5kd3ZBxVmc/59j4KyvD+6IfP3FXZSquE42zv8n+9x1dbn/V+cezZw5oL3wqxsQNQjOS0jc\nccer2tfXp6r52TCcvjaOK77XUleXkCYkrGN3dE+O/m3OnNwfPOqNSKejGcnq6pLvjKPEKJS6KEqW\njSOpOIeCD1DMpRyEg2p4fWC/Z7Oc6h8sXWr+T86Aq6HBfPazNaRSZlsnx7xfrnn34tbbe426h8L2\nEPQ/za4X7fzO6bT/NTr/7dbWwkbt9fX92rng1ztu0PJl2wc9Y0EDY6Ff5372SW2d9bQ2NEQPcCtq\nX5WPpPIaSUf9EZYtC/ce8iqhlwRhs4IhNrKRUIS0tTlEIMgl+dFHo6sgh4ply2Dy5MFtdHTVkptw\nEsi1p4QFfdXUDOjtnbic7dvNkkoZ1egXv5hMrYg4qtyghG0VFfDII8aX38tl3IveXuWWW9by9NNv\n09XlhOx44dxs7xvc29HNfect5szuO6G2lomcw9uXXcOicZfw+roa3nnHZCH1ul9p2hj35O20/Okx\nzu/4C165fPyeSYfs2gUFEzf4AczNzg50cf/Zli6FX/7S/FDZBpGJE+GnP4Wvf937B25ogAMPTODC\nPAiLURgORVGikISEKdZSLjOHIIo9SCg05URHR/AMvK5usPoknc7PT76+fmBG7VXb2JmNFHqv4qhy\nly8PHly6vZSiq7t7Fa5SmK7Q6btddXXYQLpP53JV7oqmJtXly4O1F45rJxkXTXlvR4xDWEWyOXOK\n5GFZiH9v0A8f5BaadBqKfCiXKbILyi23UjGW4SAcgp7PbLVFXJKwabW2hqgoZMAl1dGzx00U505e\ntmBB8LaF2BziqHKjxA5k1wqOohWprWzT7064Sf96w63a1OSv0mlqCnYPTrNFW5keeDHm9+/XBrYG\nunZ2pEdo66ync/rPHf3qNd3aOutp7Zz93eL63Wc/sFH0boUGugxVFGrQuZ1sjqUUWi4hNQJe1wT6\n35J19Pksw0E4qBbHEycpm1ZYgfSamoHkc1EHfpWVgw3V7qyWU6cG7/vZz+b/X46dcC/EA6i5WXXr\n1m79wx/+oGeccb9WVraHXnszm7UTMwVa3nCMNtT35GzT2GiuZ9kyVZGQwK6Qi+lYcJe21v6LtxdQ\nlA621J1ndtBY2EOVSgVkzcvjnMUUftnnDPqD5pvuNx+yfuMjwNaQLhf8atR2dJgln/xBYYnInLw3\nYfp3RxUclsv+vvuiJ5M75xxzrdmq1hUrTHqbIHbfPUJKGZ+LiqPKXbOqh3aPoisGpaqql4MOupiR\nIxdkqpzVUlHxru/2DbRRRU+mKMt2aIOJ/C8bmsZw97y3ePjxGsDkaTrjjMz1rFjBkvrZnNB+D71U\n0U0Nadqpdhd3CbmYurWvGrvEDhuGB36BGlGTWiXpj5+dxfDQQ01CJL8yeNu3w0UXme3yLV4RN3Ni\nEoT9Qf/618JzJ0XB4zeu8DeExcIKh4QIqlEbJStlNlE6Qr9MpxdfbLYbM8YEXvkVSReBxx4zz2tQ\n6U03NTUmCM6rkNDxx/sXgHc4+eSQ/3JA+tb9xnycdK3Qtr06Z7fs/nG/TctJ8zHPZGu1dHJ479dp\nb3+Ws88+e0dhm7++Wsvxk9+nVytoJ0097SgVfJlfMollngFZdX1tnN1wD2f/v6wLytyQz7S/z0b2\nZBFTgwO7/C4miqHXrzJRlBHGAQckly7Xi4kTjRBySuV5GY+3bMlvBDWURPmDlkJoJZkiOIukUnaf\nAPwIkw3wdlX9ftZ6yaw/CegAzlbVZ+OcYygyn8YhroNC1BG/X0Tv6NH+g8Jrrglua2WlWR55BI4+\nOvza3Fx9NaS0E+68e6A818kns4gz6e2tCdw3tB5y0Ej3s5+lpTLN+dtfAT6Qs2uV9NLSUkVHRwd/\n/OMfGfv7q6jCu3xYHV08fOlY9rhhcFHivX67kLfrv8mi9uOid+R+3ieuP20d23NTOAfh7uzDomCb\nm/1djoIeyrY2ePllc+yk0uX64YTxjxkD552XWzYP8htBDSX51B0tBoWkCA6jUL0URiCsAcYBNZjM\nq4dkbXMSpgKcAJ8E/hTl2I7NYShtTlEJ04cvWDDg1DBnTvj1hDliLFiQfyZVGJw4L469Ydn8lz3d\nkebUzPHVr4OxTYb+XhFchnITqBkj7dLqo/TzRx6pX66s1OtAOwO2HVSVy00+vvrZP66j8843Qq26\nOjdAzC+Rk1cgWZz7WVPjbzUvhqtdUNRlvobppCpIxaUcPKVUzTVnxYSUTeI94FPAY67PVwJXZm0z\nD5ju+rwaGBl27PHjx5c82NCPsGcw6FlpbDTeK1ErtzmEBd8VGh2bj6Bplvc8jaKtTPettlVbG9Hm\nGPGivCptbQPtqKjQzupq7QftD9hWwbs2ZD65uJ0fxctnNF/p7TVayMfomq97aSGdtYPXwxsUdZmP\nMBrqUeNQn1/V8XoYdC/LSThMw6iSnM9fBn6ctc1DwNGuz78FJoQde/z48bFdhosxkIj6DPhtF7WP\nCLqe7D4h35oCXuecNi26oGlgq6cLZnDO/P5ov0PARfWHNCxsvUej8k/fAMZla7fd/IM6wvKN5NvG\nuITlK4/zMEYlaETn96DFvdZyGzWW0lPKfW6Pe7DTZmUVkRnADICxY8fG0uUnWYrSIYrDh+qA/eDm\nm826deuM2rG7Gy68MNq52ttNNbCFCwfbIrzUsEklZHTubdTg1nbqeZ1cfWqKLhbzeY7nMXqpop00\nadqpoo/FF/+JVOqE8IO3tNA/a5anq4XiF2tsCFrnyfvvmzJws2cPfJdKmRSr2eHkXlRUBBsCOzpy\nv0ulzHFFglP6OiShh584ES65BObOjbefn5E7CkFG0ro68+oV9VwMd75iMxSeUg5FNEZDMgbp9YDb\niXN05ru42wCgqvOB+QATJkzQqHafpEtROgSVZ+zqMv3LTTd5l6ucONH8J6Pai+rqjFOH+3/jJ9wG\nl8pU2tud/ixeN5lOGw+iJ56IuD0djMM7/H8iK3mbvbM8cxaR6r0cFm72tL5v2LCBpUuX7khfXbtl\nC48B1UAa6KmpoaK6miqRZPJTu5kzxxhJd/ieAmvXmjTPYT9aT0+4a1Y2/f1mtFBXZwREXZ23EHGI\nm2rBz8shKF+5K6V1QZ21m6ARXWenEcgHHVSYi+fOkqKiEIppjAYKnnpgBMzrwL4MGKQPzdrmZAYb\npJdHOfb48eMj232KFbF+zjnBs28/zYE7QVtU9U8+M+6OpStMquSa7+gcrtYmNpuI2hzjsE8wVubY\ng22e/iUpA4O3vJas/Bx96bR2pdP6vZYW/chHPqKYSYHW19frcccdp//2b/+mv3viCe2+886Bqfqy\nZcno0LyWVGqwjjApY06UpbpadcIE/4colTI6vyjqiiDdZ9ifKMmU1qqlSR9RDhWkhprhUCYU4430\nKsZr6arMd+4a0gL8JLP+RSLYG1TjeSsVoxRlR0d4niG/bKXO8xmmxi7IVueRA3uHATY1VxfM79IF\nC0y93Tm1czOCw1UmsbHH2+Y57TmdU/2daFW5QhY/W8Bm0FOPO05vmDNHX73uOu299lrvjqlUNTTz\nkeZJLE5e9KBtwryToujfS2k8LYUnT7l4Cw0lPvegrIRDsRZ3+owwu0++A4kgA3ZYXqKgxS2QvP6X\nTU2Dk6DF9vKLWhjZ1XHkeO407TUwavRww/T19HEv1dWq8+ebJSMM3EuHT/v6Gxqi+fQW2llPiCjM\nokrzYi3O9YcJCq8OPerDX0rjaSmEUTl4Cw01tthPOPkMJMKerbC8RJWV0WtIJyrcoo6mp00L9lOt\nr/euaBPDDbOvvl6X/Mu/aIfPFCrQi8gvZar7BytEzZNOm+uPUpUnW5rn4+FTyJJOm9Ju06aFezll\nP9DFmDYnQSmE0VB6C5ULtthPOHEGElFm4h7xJTl9q1+/s+P/G9G3NpZwizqaTqXMErdzbWqKPHru\nB/1tmBDwWmpqzKzDa51bGkapMBZmrFm+3FxTUMWi7GCMzZv92+e+hiSFyNy50YShZ5bBXVz/btkB\nCaXsTiRBU7ng1OCYN884osybZ7yUvNxYo3jCtbRAdW4aH2AgL9HjD26nub6bhpouRJSGBh3IaPDi\nChg1CmbOhGuvNa8jRxqf2ywc76PmZuNYImJePbMjRPVScKrtqIZv66avDy6+GG1upq++3liMfTYV\nYIpfUqkgurv9vX3c3iYtLcZ7xotUyhR8Wbo0/MaJmJwhfnR1wbe+NfDbpFImkZQf9fXw1lvB26RS\n5kFctsy0p7bWf1swOVEc97wgsr1xgu5RIS6pll2bJCRMsZZipuyOOhPPrlo4KE11ZmVHeoS2cqbO\nrfmOttZ/TTuXrQivT+kUUciaSYTOkjs6jPohX2NIhKUPdOEhh+iYESN0Oui1mHQUvvuEFYCorh6Y\nzqXT8UfGUaaEfjcurkE7qmF6zpzctjkzEC/jcUeH6owZwedesCCazcNrNmD175YM2DKhhREYP1Hd\nxbh3VsL28UycmPLOvKudMMoEVtQBZ7IQujHLqfeYaDi/qUlHhxlRNjTkBDJEylja02NG3kWiDej+\n61+5ef/92WPqVMZedBG1P/oRzJ/vvYNf2leHE080o+IRI2DjRvjFL+JlGQ0rywj+Ny5uoJAzbQyb\nnTn1VaO0zWnf6NH+9TtFTOSkM4UMSnPtNRsIake5Z620lCdJSJhiLUWZOWRGmJ2zv6vN9V3eg0c2\na2d6j+CRV5ieN05Oiiiud4W4dKZSsWYa/c41ZBupgwwwYYtznJkzg+9LKpXsaDeuQduZNhZDjx90\nzNrawTENHR2m7W67UT6zATuj2OXAlgnNg6w/yvK6Sab+bl2Pvy9/fb3xM3X/aVtbTUmzoA5m2rT4\nSZXC/GrzdOnsqajQvjBhAMEqH69Sd+7FCXYLKtrs3E+/66iuVv3CF5L1Ool736K4tLoDx+Ik8Yoa\n9OLuvAvxximX/EOWkmKFQ1x8/igdpLS15mxjL/Dz5Xc6jYYGk2gtrLNx0jhHHemLmBF1mF9tjBGw\nO9ZgOxG8iY491tgygkbLYQLRsaMEuWOm0+EePkmObuPGLbg7Tb9R95135j8az7ZRRGlHvlgvpl2S\npITDrmNz8NE9m0IsC8N1+HHy+lRVmXw9hx4Kxx4L27YFb59Owy9/OTjPTnZiqCgVwVy4MyyF+MgY\n28dXvxqer2b33f1z9KTTJl/OmWea49x7r/dxOjpgxgy4++6BhFSqg7dJsuDM4CRUA/mDYED/75dT\nyK3HX73a2EuammDWrODfKqi97mMuWmTqqnol70oieZzNP2QpgJ3KlTWQoD9Kd3ewS2IcGhsHOpjD\nDvOvHeqmv3/AwJmNy69W/dwVs9DwTQbjGDiD3CidDH1RXCbDjjNp0oDP8T/8g7+Lp3PtheLl4/z3\nv8O774b7PdfVmVKa//EfxpD+ve/5J8uL2l7HeP6xj3lXRYNkOu+w36FU1cosw5Mkph/FWhJVKwVN\nsaOoOqIuTU3Riy44Sd8CjLT9Irp40iQ96qij9JOVlboJdAvG3TR24JmI0es7+Xyy1SGbN/vfhzg5\neuJE9CUZ3VuMYh5xHAHitrfYah+bf2iXhF0mCK6z0xQ4mDvXvEbJg+9FUKBQdbWJaHMHUuVLX9/A\n6DHMHbK3Fx580IykfUZ421T5+bJl9PX1MemSS1ixaBFVt99OxTXXIE665aik08YddcGC3NHysmXG\n1TJbvVZfPzigzBmF33qrGfWfcIJx2/3oRwf2cVQ5jY3e1/ziiwOfkxrdrogecBiLOK6wcUfjxQ5e\nixVZabFkUYhkAW4AVgEvAIuAZp/t3sRkY32OGFJt/MEHJ+uGFzbq3bTJGGU/85nwwK4oo8cInjLd\nqZR+e9Ys3eJzvu6GBt3297/nXoufq2NTk391suZmYyjPHlk//rh/G+vrVd97L959dNoXmltECx/d\ndnSYa4qSpyl7vyizjDiOAPmMxkvhamrzD+1SxOljg5ZChcPxQFXm/b8D/+6z3ZvAiLjHH+/XQXv9\nCaP+2f3+KM6fNMhdM8riVgd0dvp31JmlD/ScykqdccQR2pFKaU9dnfaHdRJhUbleHY7jZZXdCc2b\nF/16nPuXdIGNfDtIZ7+gRHWFRhNHcYUttEO3nbclQcpCOAw6EEwFWn3W5Scc/P6M2UnSCh19JVkz\nwNVB/u2NN7Q7xO+/H7SnpWWgHWGdRNTO2X2sqJlJvZZsPXrUTj+uLSFuBxn1N3OfK59ZRtDMxp0G\nJZ+4B4ulCCQlHJJ0Zf0qcLef9gp4QkT6gHlqSoF64q4hPd5vo64uOPdceOMN8zf94Q/zdy2EZGqx\n1tTQX1fH45dcwqKLLmLJkiUcsWoV8zAlL/0QoMrxaIpSjzZq7Vz3sRYuDE9x4Ud19WA9elT3yKj1\nXR3i1uKN+ps553JSj3R2+nsIebmP+rnCumvBrlhhzpFk8XKLZYgJFQ4i8gTwIY9VV6nq/ZltrgJ6\ngVafwxytqutF5IPA4yKySlWXem2o7hrSIurbsN5eY1QNIqqveIG1WHsqKrg7leJrW7bQNXs2jY2N\nHH300Xx9zBgannjCCLAgjjkm+sny8V0v5PqyDaNRO/2WFtNBRjlmPkS9pqoqk9tp3LjBBca98Lt/\nYXmLilG83GIZYkKFg6p+Lmi9iJwNnAIcm5nSeB1jfeZ1g4gsAo4EPIVDokT1FY8ZYJZzmv5+Hv7U\np7h3jz04vLGRD33601T+4z8awfT006YDCeL88835zzrLe707cdrf/hZvRA6FXd9jjw3u3KJ2+mEj\n7igdZlDCuLBrSqUG2vCb38SbZXiRT2K/JALZLJahohCdFHAC8Gdgz4Bt0kCj6/3vgROiHN/X5hBn\ncdsm/IiQYsEvpqAftO/cc41O3zGM1taaz088Ed3TRSTXK0g1154Slv/IS9cddH1B7Xv4YW89etyq\nSvkYW8POEXRNqdRA+mvV6B5H+XgblWsVNssuC+VgkAb+AqzFuKg+B/xn5vtRwCOZ9+OA5zPLyxh1\nVKTjl0w4qGr/n/6kvbvtpt3V1YPyErVhahkEJa7zzXhaWxvP+2nWrMFeV0H5mdw1h6MY4KPmCXIK\nVsyfH9w5F9PDJqrRPaqQihqMmI+3kc1fZCkzkhIOYo5VnkyoqtKVdXV5q3sAEzx39dU5X6sqf/7z\nn1myZMmOZcu77zIVGJ9Oc9jIkex+4IGMnDyZvffcE/nGN7zbUVvrb+CMy8SJ8NprA2qYmhr/Yztq\nqJEj/WsIZOOoafzy/TvfB+nom5uLr0dfuNAEsfn97nPmwOzZwdfkZvt2c5+8rsepJnfGGfldU9Cx\nS3GvLJYsROQZVZ1Q8IGSkDDFWsYfcYQZeX3ta/nNGlwjt76+Pn3uuef0Rz/6kba0tOiIESMUTAXM\n0aNH65e+9CWdP3++rl69Wvv7+weL4iAVRlid4aC6xVFnIKVWWQz1aDhMDZRKlVewma2ZYCkjKENX\n1uSpqDDGvDPPhK98xaRq6O0dnOLBr7IW0CvCT9au5cnTT2fZsmW89957AHz4wx/mpJNOYvLkyUyZ\nMoV9990XCUqZEWRc/cQn4Mkn/feNk4ojTi3mYiZOK3U2z2zD85gxZkbmlypFNb6hN2rFtnzI99i2\nQpuljClv4eDmM5+BtWuNOmHVKth/f5gwwWTW7O5Gb7wR7e2Fnh62V1TQ3d/Pcdu2sfKKK9hvv/2Y\nOnUqkydPZvLkyXz4wx+Of36/DuCuu4KFwxVXmDxEUVRFcfJGFbNwfNwYhUJw4g/cQrey0mSq9aOr\nKz8BFTeWopjH9rpuGxthKSPK2+YwYYKuXLnSfMj6M2k6TZ8Ivzn+eKbcfz/09pIGeoA+EW49+WTG\nTJ/OpEmTGD16dPEa+d57JuGbV8fe2AgbNpiRriNU3nkHfv7z+HEHqZTpFLMDsIpBqfTonZ3m3nmd\nJ8iW09BgkgYOVxfRoOu2dgpLgSRlcyj/rKwAnZ2oE2jU1gaqSFsbVdu2cfK999LY20sj5mJqgXpV\n/vXppzmzu5vRCxYUls01CCcyNlt1VF1tspmeeaZRf4iY91dfDR/6kH89AD+amoxa7ZhjzOsbbxR3\ndFmqbJ5BMQJVVWY07reuWLOmUhAlNsJiGWLKWq20bds2rrvuOqr++7/55vvv45EAGl+N/vvvG48X\np5DPuefC/febkbyXjjeu/tcrMtahp8ecc/58aG0drC6IE5DmdMx9fUbAtbfD8uXmfdDMIQlddjF1\n9A5Btg2vinFxg+jKFVuhzTIcSMKqXawFUBHRn4wcGRxnEGepq8v1KMnH2yRu4fr6etXZs6PXlk6l\nTKxBlLTXboaT50wUr6idMWPpUHuDWXZq2BXiHA444ABdsWIFzY88Euz3XgjNzeZvuWWL9zo//e/c\nuaaoTNz719BgXkXMDMNP3dXQYFRIv/iF93V76d2Hmy47yLbR1AS33GKcEHY2Tx4bG2EpIruEzaGp\nsdEIhlWrciuUJcX27f4ddJD+N6iCWRBtbWapqDCuuX60t5vrjqN+GG66bD/bRkOD8Va68MJkq7pl\nk1SVwbjYCm2WYUBZ2xx44QXTMbS3G/fGYhDUIQTpf4MS0EWhr8+M8hsa/F1GDz7Y2BiiupQOR112\ntm1j9Gj41reKn+V0qF1JS2HTsVgKoLyFQ1/fQMdQaL0FP1IpoxrycpsM8un3CoyrrzeG1CiqpvZ2\n2HPP4BrC3/2uGdH6rc/22CllfEKSZNeeKHaW03JJs13MuAuLpUAKUiuJyHUisl5EnsssJ/lsd4KI\nrBaRv4jIFYWcM3Gc1M5e+LlMOuqIRx+Fm282QW5z5hjvpKVLB6sL/Ein4cADg9ULtbVw8cWmfbW1\nZr8g9UOxC9aXglLMfoab+s1iGQKSmDncrKo3+q0UkUrgJ8BxwDpghYg8oKp/zvuMlZWDK5vV1hqb\nRFDKhbo6sy6VMq+OW+Qtt8AFF+Ru39jo3QF7qSOyg9IcdcGrr8INN3jHNTiddSrlrV548UWjdurt\nNbOa2lqz7eWXw2WXeQu0JGooDDWlmP0MR/WbxVJiCvJWEpHrgLYQ4fAp4DpV/Xzm85UAqvq9sONP\nENGVXiuqq42AOPlkOPRQOOggmDIFDjjAuyN2Mm+efrop/OJ0wkH7NDWZwjruDjUfb6AowiSbQr2O\nomQqLTb5xlqUwpMnKOvrcI++tuzylEVWVuA64C3gBeAO4AMe20wDbnd9/jLw4yjHD63n0Nw8UNh9\n2rSBYjtRfMeXLzexBxEyuu4gX//0uL76w90PvtBYi2LHagRl2c2n4I/FUkZQqqysQTWkgZ8CcwHN\nvP4Q+GohwkpEZgAzAMaHbdzdbbxbKiqCYyDa2mD16oHPjkEyKI2Fl3ohX3VEXMPjcFZ7JGHsLbYn\nz86gfrNYikzBNaQdROS/gIc8Vq0Hxrg+j85853e++cB8MGqlwJPGyVF0441wyimm4wkySDp46bdL\n5YfjCccAAAm6SURBVA1UyHnC1DnFThOdVE3lYnvyWFdSiyWYQqYdwEjX+28Bd3lsUwW8DuwL1GDK\nhR4a5fiJlAn1UhlEqSnspV4olToi3/OEqWNKkVrD1lS2WIYUElIrFRoh/QMReVFEXgCOyQgIRGSU\niDySET69wAXAY8ArwH+r6ssFnjc/nJFrWHRzfb23eqHQyNaoEbn5nMetzslkrqWtzXw+/niTWjxo\nfVLRwUH3tpxjLSwWyyDKOreSr7eSQ1VVvOA4EROPcOml/h4x9fWwfr3piP3IxxvIr6jNJZeY9UEq\noCjnCfPAiZunKV9s3iCLZUhJylupvCOkg3Dy78QRDum06ZxuuMEEl910k7dBMkgwQHx9eJCR9ppr\njNDySt8Q5zxhRuy4eZryxRp7LZadguErHKqq4MEH4dRTBzqhVMp0xH60t5sqbB0dAyP3yy83nXMx\nDZJhBnBHxQP5p28IM2LHzdNUCNbYa7EMe8pbrVRVpStragZ3+PX1ppCOM8J2VC+rVxuPpLhV1pIu\ne+nlCRQnvXe+Kp4wdc4bb8C++1p1j8Wyk7NrqJU+9jFjH1i9GjZuNInqDjxw8CjUUb0sXGjiHbyo\nrjbrvJLrJZXMLSjLZ5zqb/mqeFSNquz6683n7HrTjjHbqnssFksEyls4VFQko3Pv6fHfLwl9e1jg\n1xtv+CfEyyYfFY9bMG3fbvIw1dbm5mGy6h6LxRKR8hYOcQganeebljsqYYFfjzwyeNQeNIOImz3V\nSzA513nTTUY4uLFpoi0WSwTKuhJcLILSVeeTljsOUdJdOKP2efOMDWLOnGQqgdn00xaLpQjsPDOH\nMBdKKJ6+PWq6i+xR+6WXFq7iGc55mCwWS9mycwgHt5fQzTeb79aty+1wi6VvDyoZGjQzSULFM1yr\nv1kslrKmvF1ZJ0zQlSsDY6Tzq5dQDIaqHTYi2WKxuNg1XFnDKJdawDB0nkA2ItlisRSB4S0ckkoP\nnRRD5QlkXVQtFkvCFCQcRORu4KDMx2bgfVU93GO7N4FtQB/Qm8SUB7DGWDfWRdVisSRIQcJBVb/o\nvBeRHwJbAjY/RlU3FnK+HKwx1mKxWIpCInEOIiLAPwG/TuJ4kQmKbUgifsFN1FoMFovFshOQlM3h\nM8C7qvqaz3oFnhCRPmCemlKghVMqY2xQ3qRSekRZLBZLiQgVDiLyBPAhj1VXqer9mffTCZ41HK2q\n60Xkg8DjIrJKVZf6nG8GMANg7NixYc0rvjG2nDyiLBaLpUSECgdV/VzQehGpAlqA8QHHWJ953SAi\ni4AjAU/hkJlVzAcT5xDWPqC4xthy84iyWCyWEpCEzeFzwCpVXee1UkTSItLovAeOB15K4LylwXpE\nWSyWXZAkhMMZZKmURGSUiDyS+bgX8LSIPA8sBx5W1UcTOG9pcDyivLAeURaLZSdl+KfPKDY2PYXF\nYhlGJJU+Y+dJ2V0sHI+oJNJrWywWyzBheKfPKBU2PYXFYtnFsMIhKjY9hcVi2YWwaiWLxWKx5GCF\ng8VisVhyKGtvJRHZBqwe6naEMAJINqFgcbDtTBbbzmSx7UyOg1S1sdCDlLvNYXVi6b2LhIisLPc2\ngm1n0th2JottZ3KISCL+/1atZLFYLJYcrHCwWCwWSw7lLhySSe1dXIZDG8G2M2lsO5PFtjM5Emlj\nWRukLRaLxTI0lPvMwWKxWCxDwJAKBxH5RxF5WUT6RcTXA0BEThCR1SLyFxG5wvX97iLyuIi8lnn9\nQJHaGXoeETlIRJ5zLVtF5KLMuutEZL1r3UlD1c7Mdm+KyIuZtqyMu38p2ikiY0Tkf0Xkz5ln5Juu\ndUW7n37Pmmu9iMh/ZNa/ICKfiLpvkkRo55cy7XtRRH4vIh93rfP8/YeonVNEZIvrt7wm6r4lbudl\nrja+JCJ9IrJ7Zl1J7qeI3CEiG0TEs+RB4s+mqg7ZAnwEOAh4Cpjgs00lsAYYB9QAzwOHZNb9ALgi\n8/4K4N+L1M5Y58m0+W/AhzOfrwMuLcH9jNRO4E1gRKHXWcx2AiOBT2TeNwKvun73otzPoGfNtc1J\nwG8AAT4J/CnqviVu51HABzLvT3TaGfT7D1E7pwAP5bNvKduZtf2pwJNDcD8nAZ8AXvJZn+izOaQz\nB1V9RVXDgtyOBP6iqq+rajdwF3B6Zt3pwM8z738OfKE4LY19nmOBNar6VpHa40eh96Ns7qeqvqOq\nz2bebwNeAfYuUnscgp41h9OBX6jhj0CziIyMuG/J2qmqv1fV9zIf/wiMLlJbgijknpTV/cwirCxy\nUVBTWnlzwCaJPpvDweawN7DW9XkdA53EXqr6Tub93zCFhYpB3PPkFEACLsxM9e4olrqG6O1U4AkR\neUZMze64+5eqnQCIyD7AEcCfXF8X434GPWth20TZNyninusczIjSwe/3T5qo7Twq81v+RkQOjblv\nEkQ+l4jUAycA97q+LtX9DCPRZ7PoEdIi8gTwIY9VV6nq/UmdR1VVRPJ2vQpqZ5zziEgNcBpwpevr\nnwJzMQ/RXOCHwFeHsJ1Hq+p6Efkg8LiIrMqMSqLuX6p2IiINmD/iRaq6NfN1YvdzZ0dEjsEIh6Nd\nX4f+/iXkWWCsqrZlbEf/AxwwRG2JwqnA71TVPYIvp/uZGEUXDqr6uQIPsR4Y4/o8OvMdwLsiMlJV\n38lMnzbke5KgdopInPOcCDyrqu+6jr3jvYj8F/DQULZTVddnXjeIyCLMtHMpZXY/RaQaIxhaVfU+\n17ETu59ZBD1rYdtUR9g3KaK0ExH5GHA7cKKqbnK+D/j9S95Ol8BHVR8RkdtEZESUfUvZThc5WoES\n3s8wEn02h4NaaQVwgIjsmxmVnwE8kFn3AHBW5v1ZQGIzkSzinCdHH5npAB2mAp7eBgkQ2k4RSYtI\no/MeON7VnrK5nyIiwM+AV1T1pqx1xbqfQc+au+1fyXiGfBLYklGRRdk3KULPJSJjgfuAL6vqq67v\ng37/oWjnhzK/NSJyJKZP2hRl31K2M9O+JmAyrue1xPczjGSfzWJb2IMWzB97HdAFvAs8lvl+FPCI\na7uTMN4qazDqKOf7PYDfAq8BTwC7F6mdnufxaGca82A3Ze3/S+BF4IXMjzJyqNqJ8Vh4PrO8XK73\nE6MG0cw9ey6znFTs++n1rAHnAedl3gvwk8z6F3F52fk9p0W6h2HtvB14z3XvVob9/kPUzgsy7Xge\nYzg/qhzvZ+bz2cBdWfuV7H5iBp3vAD2YfvOcYj6bNkLaYrFYLDkMB7WSxWKxWEqMFQ4Wi8ViycEK\nB4vFYrHkYIWDxWKxWHKwwsFisVgsOVjhYLFYLJYcrHCwWCwWSw5WOFgsFoslh/8f94DYIuHzcncA\nAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68ac0e36d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ruamkVRRCIQlyixmAAf4kaQA2uUUOH\nmYhOohMTMMQkxmhGo6gDE5NRomZMzMBkxCxtdEZlNGoUHSNgNkDHNaIGXACNCMjSG719vz9OXfp2\n9V2rblVXw3mf5z613O3cW7fOd863iqpisVgsFoubsoFugMVisVhKDyscLBaLxdIPKxwsFovF0g8r\nHCwWi8XSDyscLBaLxdIPKxwsFovF0g8rHCyWiIjIQSKiIlIx0G2xWAqNFQ4WS4EQkStE5CUR2SEi\nb4jIFVnrDxKR34lIq4isFpFPZ60/T0TeEpEWEfkfEdmnuFdg2ZOxwsFiKRwCfBHYGzgNuEREznWt\n/zXwf8C+wNXAvSKyH4CIHAksAL4ADANagduL13TLno4VDpZBi4iMEJH7ROT9zMj8G65114vIvSJy\nT2bk/qyIfMy1/iMi8pSIbBWRl0XkbNe6GhH5YWbUvk1EnhaRGtepzxeRt0Vkk4hc7dc+Vb1RVZ9V\n1S5VfRV4ADg+c45DgY8D16lqm6reB7wAfM45B/AbVV2mqs3AHKBRROrzvnEWSwSscLAMSkSkDPgN\n8DxwIHAycKmInOra7Bzgv4F9gLuA/xGRShGpzOy7BNgf+DrQJCKHZfabD4wHjsvs+x2gx3XcE4DD\nMue8VkQ+EqG9AnwKeDnz1ZHAWlXd4drs+cz3zvrnnRWqugbYCRwadi6LJQmscLAMViYC+6nqXFXt\nUNW1wH8AbrXNM6p6r6p2AjcDKeATmSUN/CCz75PAQ8CMjND5EvBNVd2gqt2q+gdV3ek67j9nRvvP\nYzrwjxHO9Zj/26LM5zSwLWub7UB9xPUWS0GxXheWwcqHgBEistX1XTmw3PV5nfNGVXtEZD0wwlmn\nqu7ZwFuYGchQjBBZE3Duv7net2I6cl9E5BKM7eFTLiHTDOyVtekQYEfE9RZLQbHCwTJYWQe8oaqH\nBGwzynmTmRGMBN5x1olImUtAjAZeAzYB7cBYXGqdXBGRLwFXApNUdb1r1cvAGBGpd6mWPgY0uda7\nbSRjgapMGy2WgmPVSpbBygpgh4j8U8aAXC4iR4nIRNc240WkMROXcClGZ/8n4M+YEf93MjaIKcBZ\nwN0ZYXEHcHPG4F0uIp8Ukeq4DRSR84EbgKkZtdcuVPU14DngOhFJiUgj8FHgvswmTcBZIvIpEakD\n5gH3Z9koLJaCYYWDZVCiqt3AZ4BxwBuYEf/PMKoXhweAzwMfYFxCG1W1U1U7MMLg9Mx+twNfVNXV\nmf0uB14EVgJbgH8lt//Kv2DcVFeKSHNm+XfX+nOBCZn2fR+YrqrvZ67vZeBijJDYCNQBs3Nog8WS\nE2KL/Vh2R0TkeuDDqvoPA90Wi2UwYmcOFovFYumHFQ4Wi8Vi6YdVK1ksFoulH3bmYLFYLJZ+WOFg\nsVgsln6UdBDc0KFD9aCDDhroZlgsFsug4Zlnntmkqvvle5ySFg4HHXQQq1atGuhmWCwWy6BBRN5K\n4jhWrWSxWCyWfljhYLFYLJZ+lLRayWKxWPZk2tpg8WJYswbGjoXGRkilinPuyMJBRO7A5LLZqKpH\nZb7bB7gHOAh4E/h7Vf3AY9/TgB9hUir/TFV/kGuDOzs7Wb9+Pe3t7bkeYo8nlUoxcuRIKisrB7op\nFovFh5Ur4ZRToKsLWlqgrg6+9jVYsgQmTgzfP18iB8GJyCRMjvlfuITDjcAWVf2BiFwJ7K2q/5S1\nXzkmzfBUYD0mmdkMVf1L2DknTJig2QbpN954g/r6evbdd19McS1LHFSVzZs3s2PHDg4++OCBbo7F\nssfjNTtQhREjYOvW/ts3NMC77/rPIETkGVWdkG+7ItscVHUZJkOlm3OAn2fe/xz4rMeuxwJ/VdW1\nmWyYd2f2y4n29nYrGPJARNh3333tzMtiKQFWrjRCYNYsuO468zp8OMyfb2YMXnR1wf33F75t+doc\nhqnqu5n3fwOGeWxzIK6KXJjZw//zO6CIzARmAowePdpvm1zaaslg75/FMvC0tRm1kXt20NxsXm+4\nAXbu9N6vpQXWrvVelySJeSup0U/lnahJVReq6gRVnbDffnnHcSTO1q1buf3223Pa98477+Sdd97Z\n9fmggw5i06ZNgfs89dRTfOYznwHgwQcf5Ac/yNlcY7FYSojFi/1nB6pQ7VNeqq4OxowpXLsc8hUO\n74nIcIDM60aPbTbgKteIKdW4Ic/zDhhBwqHL75fOkC0c4nL22Wdz5ZVX5ry/xWIpHdasMbMAL3bu\nhO5u73UVFcYuUWjyFQ4PAhdk3l+AqbyVzUrgEBE5WESqMNWvHszzvAPGlVdeyZo1axg3bhxXXHEF\nTz31FJ/61Kc4++yzOeKII3jzzTc56qijdm0/f/58rr/+eu69915WrVrF+eefz7hx42hrawPgtttu\n4+Mf/zgf/ehHWb16td9pASNcLrnkEgAuvPBCvvGNb3DccccxZswY7r333l3b3XTTTUycOJGjjz6a\n6667rgB3wWKx5MvYsWYW4Ee29reqyhijlywpjjtrHFfWXwNTgKEish64DvgB8F8ichHwFvD3mW1H\nYFxWz1DVLhG5BHgM48p6R6YEYt5ceumlPPfcc0kcahfjxo3j1ltv9V3/gx/8gJdeemnXeZ966ime\nffZZXnrpJQ4++GDefPNNz/2mT5/Oj3/8Y+bPn8+ECb2OBEOHDuXZZ5/l9ttvZ/78+fzsZz+L3NZ3\n332Xp59+mtWrV3P22Wczffp0lixZwuuvv86KFStQVc4++2yWLVvGpEmTIh/XYrEUnsZG45rqR0dH\n/881NYVtk5vIwkFVZ/isOtlj23eAM1yfHwEeid26QcKxxx6bs1toY2Z+OH78eO6P6YLw2c9+lrKy\nMo444gjee+89AJYsWcKSJUs45phjAGhubub111+3wsFiKTFSKTMLcMcyVFX5G6IBtm0z2we5sibF\noI6QDhrhF5M619ywoqKCnp6eXZ/DXEarM1an8vLyUJuF375g4hec16uuuopZs2bFOpZl8DOQ0bSW\n3Jg4Ed55x/xua9fC88/DffcZg7QfjivreecVtm02t1JM6uvr2bFjh+/6YcOGsXHjRjZv3szOnTt5\n6KGHIu+bBKeeeip33HEHzRmfuA0bNrBxo5efgGV3ws9ffuXKgW6ZJYyaGtPRX3MNTJsWbIeA4rmy\nDuqZw0Cw7777cvzxx3PUUUdx+umnc+aZZ/ZZX1lZybXXXsuxxx7LgQceyOGHH75r3YUXXsjFF19M\nTU0Nf/zjHwvSvlNOOYVXXnmFT37ykwCk02l+9atfsf/++xfkfJaBJ8hfvlgqCEsyhNkhoHiurCVd\nQ9orfcYrr7zCRz7ykQFq0e6DvY+7D3fdZWYKjkBwk07DggWFV0FYkmPlSpg61dgXvCi59BkWi6U0\nCfKXL5YKYnenrc0I4XnzzGshs89MnGg6/7lzjQBIpYxbazpdoq6sFoulNHH85b1mDsVSQezODER2\n1JoamDMHLr+811g9ZkyJpuy2WCylSZCeuljRtLsrA23PcYzVA8GgVCuVsp1kMGDv3+6F4y/f0GBU\nDwOhgthdCcp/VKzsqAPFoJs5pFIpNm/ebNN254hTzyFle4zdimx/+WKrIAYDfnEgYd97qesgvj1n\nsMWhDDpvJVsJLn9sJTjL7kZYx+tlN6iogFtvhUsv9f++vd3f+Ox4gk2bFt7pO+d3jucYmgtht0jK\nW2nQCQeLxWJx49fxOx1vW5t/VTUR72hkv+/dNDTAb34DZ53lf24w5x82DLziX+vrYePGZGcQJePK\nKiKHichzrmW7iFyatc0UEdnm2ubafM9rsVgsboNxc7Pp0JubzWdnpB5WNyHO92DqLDQ0wIMPGsEQ\ndG6Ae+7xFgxgvr/77ujXW0zyFg6q+qqqjlPVccB4oBVY7LHpcmc7VZ2b73ktFoslisE4KA4kF846\ny3gprVsXzVj98MPBxwtbP1AkbZA+GVijqm8lfFyLxWLpZ1tYvTo8ADAoDiQu6bSxMaRS0YMPY+bT\nLBmSFg7nAr/2WXeciLyAqQJ3eVI1HSwWS2EpppdN0Lm8bAs9Pb2eRdk4AYBBcSBxbQ7uuJEowYcr\nV8JjjwVfc1Z6ttJBVRNZgCpgEzDMY91eQDrz/gzg9YDjzARWAatGjx6tFotl4FixQrWhQTWdVhUx\nrw0N5ns3ra2qTU2qc+ea17a25M61bJnqokWqqZSq6bKjLfX1ve3wO/add8b73n3dbW3mO69zNzSo\nbtniv95Z0unc7lUQwCpNoE9PzFtJRM4Bvqaqp0TY9k1ggqpuCtrOeitZLANHkJePO/lbmLdQvucS\ngcrK/pXRwkin4f33e2cezqwkOw4k7vdugq799df9EyKCiX5eurR0XVmTVCvNwEelJCIHAO+pqorI\nsRhD+OYEz22xWBImirF32rRk0kuEeRTFFQwOTlGcIHWVX4qKKKkrJk40x5wzx9g/Dj8cvvc9Izwf\nfTTYEH7FFd6CoVSC5RIRDiJSB0wFZrm+uxhAVf8dmA58VUS6gDbgXE1qymKxWApCFINrFAESJTdQ\n0h5F7jYWMnGe+9jNzfD738Mdd8B3vwujRvnbJNJpOOyw4ON5tbWYgiMR4aCqLcC+Wd/9u+v9j4Ef\nJ3Eui8VSHKIYXJNKF56kR5G7jSNHFi5xnldSPqf+87XXwpAh/vs6hm13Zz9qlInKdtdxcLfVCbhz\nR1kXMjvsoEy8Z7FYCk9jo+nEvHA6N6dT9yJOuvCgc/mRSvUmG/RrIxQucV7QrAlMJy9ihIRXQsQX\nX+xb2vWrX/Uv8NPZaQoAbd3aG1zX3m4+n3xyYepLWOFgsRSQYhaJSZoo2V6jCJB8zuWXWzOVgp/+\n1Iz8n3zSv43r1hWuEFIUVZiqydO0YIEp3rNggWnzUUf1j+wOejZaWvzXFyrKetBlZbVYBgsDUSQm\nSdrajMfN178OmzfD0KFGT+7Wc6dSJo3EaaeZ6+zoMNdZWRk/XbiXcbexEaZPD/aECspI+/rrhSuE\nFEUV1tIC69fDNdf0qpBuugn+9jczG4hKRUXwLOWXv4QLL4x+vCjYxHsWSwGI6gZaqkR1T802yFZV\nme0eewxOOCGZcz74oJkB5JKKvL0dhg9P9ndwOvlXX4X586G11X9bJ3PrIYf0vba4rrmVlcHCpLIS\ntm93SoqWniurxWLJkJQXz0AQtfqZ13YdHWZx8g/5dbzZXjenn+5/zrPP7ntOJ19SFG8dR13lJ+ji\nCoZsAVZTE7x9RQVMmWKEg1uIhAmG6ureWVhFhZm9zZvnv31ZWfLPlBUOFksBSMqLZyCIKthyFYB+\naTD8cI6VPfqOqqZLqhCSlzB0OvxUytgNRIzHkrsuRLZgCKOhAW65xaijnLYCfP/7/ve7oyP5Z8oK\nB4ulAERxAy1Vogq2XARg0KzEj5YWo8L52tdyd0lNohZzkDCsqIDbbjNqNUcAnX66eQ0TDE4eJ7et\n5qijemdW999v7kGQzaGuDvbe+wPuvjskkVMMrHCwWApAULK3OF48A0FUwRa23ciRxkPLrQIKc//0\noq4ONm3y36+9Hb7wBROtXcigsDBh6BieHe66K9q1OmbflhZYuNC8HzGid4ZUWxvuFdXauo1LLhkO\n7Aw/YUSscLBY8sAvYjVpXXcxiSrYwrKdXnopdHf3VQF9/vPxI6ErKoynlN9+7e1w770mXUUUNZPX\nb6YaHnkcdzaYS9T3rFmmOtz27b3fBR9DgZ0cf/w/c84532Py5MlMTMoVLonsfYVaxo8fn09yQoul\noETJWOpkK503L/dspQNB1Gysftul095ZSGtr/dc5S01N/3M2NYXv586I6nefvdpbX29ew651yxbT\n/ijnbG1VnT1btaoqXiZZUK2oiLN9j06Z0t2nnSSUldXOHCyWHIjq0ZOErnsgiGrE9dquo8N413jR\n3R1em1nExDq4Yyo+/OFgo7WbbGO4M1NYvRp++MO+NgCvWYDX7+gY0bPbXltr7AzObLCtzbi33nCD\naW/8hIFKV5cSPT5ZOOIIn0jBPLHCwWLJgcHsqhqVqIIte7t58/xVITt3ms60uro3D1E2ZWVGMDjH\n9OuY/XAbw7PjMOIQlHnWzRtvGA8j51x+24GJezDuvgr4depdmPI40ZgzJ/KmsUgkfYaIvCkiL4rI\ncyLSL2pNDP8mIn8VkRdE5ONJnNdiKRRhaS8Gs6tqoQnKtwSmc/RLiwF97597huZV7c0LR//v3jeX\nhH5RMs+WlcEjj3jPJLMpL+9h3LjnOfnkL2FsBd6kUpWx2nn77bE2j0ySuZVOVNVx6h2ZdzpwSGaZ\nCfw0wfNaLImycmXfhGizZpko25Ure7dJKuFc0oQJtWLkeoqaRK+62vt79/3LxbvJMZrnsq9XO/JN\nXe7Q3S08/fRveOWV33H88f+JERDOYgTmnXcKF1wQT000bx48/XSsXaKRhOECeBMYGrB+ATDD9flV\nYHjYca1B2lJsWluDSz86RsewEpEDYXgOMyJHNTIn1Zagsp4i/utFVJcvN8eZO9d8jmqgHTLElBVt\nalI96aT4xmCv3zHIGJ5Om/XXX9+lIj2hxuMTTmjddY82bzZG65NPNq8ffGC+b2pSLS+P19aaGlNK\nde5cVRi6VpPo1xM5CLwBPAc8A8z0WP8QcILr8/9iyoR6HcvWkLYMGFE6AockO9t8azCHCbWgesa5\nCLMo7Q2q+5xOq86Z49/xR+mYs5dUSvWqq4xHUS5eQtlLXZ25vjlz/L2UUqlW/fSnP6PV1TMVWkOP\nOWVK+L1ta/M/X9j1m/t5TLeWkHA4MPO6P/A8MClrfWTh4F7szMFSbIJGqiLGJdVNEq6qSQiZMKE2\ne3Z0oeeFWxjMndu/vbW1qrNm9b0HYbOrRYtMBxzUpqBjeP0+UTvSysro24qo1tT0KPRoZeVOhW6F\n7QqbFSbomDGf18rKFoWwmYPq9OnRfs/ly+MLh95lvGoC/XpSleA2ZF43ishi4FhgmWuTDcAo1+eR\nme8slpIibqBTvq6qUV1iwwjTi69enbsBPczjx/luwQL41a/6BqIFBQI++qh/aonmZpMyIzuYMMiw\nbMadwaTT5vyXXQY33hjNUK0KbW3GDtDZ2cH++/8n48bVcuGFe3H88Y/wsY/tFzn99uOP91ZxC+KE\nE2DZMjj1VGPA7+6OdvxEyVe6AHVAvev9H4DTsrY5E/gtxnfrE8CKKMe2MwdLsSm2LWHRItXqau/z\nVVeb9VEo1MwhSF0VtLjvld/sKkxlVFXVOxvZssW8XnttbioXUD3ySDNyX7TI6Pdzua50urvPvQr6\n/eLea7/779y7OXOinieZmUMSwmEMRpX0PPAycHXm+4uBizPvBfgJsAZ4kQgqJbXCwTJA5KrmiWs3\nCDPaglkfRb0UJtSCOsMgoRdH5+9e6urCO8GoKqO6unDDehQ1UXV17/ZDhvToZZc9r9XVLSrSrlFU\nQtBXtRjl9wvaPxeWLYtiUykR4VDIxQoHSyEJ6szj2hLiCpQ4I/KoM5ZCeCvF9RZyL7NmRWtz1JnA\nkCH9ZyOzZpn944zee5fNWla2r44dO1fLy3dG2scZ+ec6o4o7c/DCERD+Hk1WOFgsOZO0p1HcUXmc\nEblfh9LaatQa06f3qksc9YufUIsr9Jqa/I3GYUttbTShFl1dYoRVlPved/GeFdTUdOodd5gGup+H\nKII61xlVvqrJaNdcQgZpi2UwkZQR2CFqKg13NtAXXoiesdPLYLxyJZx8siku73Dvvcbg+uST/kby\nuAb0xkb4yleib+8manWyww837Y5iHL7hBrjiCvP75Bvk1tZWwYYNpgvMzhGlCjff7J9RN5eMqw0N\n+Wfkzfea42CFg2WPI+m8SFEiaLOrn1VVmQ4oCtleUo5wcwsGh+ZmmDrVFLBPIi14KmVqJSxYEH/f\nqGlEglJ/e3H//XDyye/xwAPv09x8BOGJHvwjjt2/QbbgvPxy/8SDY8dGq7PgUF1tqrvlm007WCgp\nVeXddHZ3R0xRGIwVDpY9jqTzIkUpepM9U/FLOudFdnGgxYuD0160tyeb+O/AA3Pbr7LSzMLCXDfj\nuKu2t/fwzW/+G5s2fQuYgUm+UJ9bAzFZWkeNgnXr+tdxcAuL7NrVw4bFK/3Z0WGKAeVL0LNWRQdf\n7v4Zv+f5/E8E1uZg2fOIEwUdhSjBXkH66SCPl3S6vx3EpEgIXvLxiHHT2moMwbnYHKC/p1HYuXrT\nXvh5D23XceNu1BtvvFGXLVuhDQ3RvIyCFrcXU9SaFXGN9H2eqyieED4ub4HPGpu1jWodD6oJ9L8D\n1vFHWaxwsBSCoD9YbW1vjps4BBm4wzx+pk3zFxBeBsympmCBUl3dX8Dlmp4jjuE1yL3SzxDb2qr6\nq1/16Le+tUVnznxK/+EfvqyjRx+qsMXnOD19juPc9yTSZXi1NVevJN9jBj0oEb0k+m7Wo2m2awOb\ndQUTVMEKB0thyTfXT6nj/MFqavr+iWtr88+PFCfYywlQ83PnrK3t39GHxQe4XT7d15qLZ1ZUV9aq\nKtVPf9q/k3aPnHt6evS1117T7353sVZVNavIDnVSUoh8oFOmXKGXXfZrra/v1HS6J7TNjtdWUgLC\n3da4XklOQkHPex3m1uY3RfOQrLuetZOe1CbO0zZ6fXmtcLAUjGJm7ywUUYTb5s3Ryz7mQ5ja6aqr\ngjuc667rf8wVK0x5S6+OLbtMqd+5UynTqQZdZ9TOcciQ4ER6Ij161ll/1nPPPVeHDx+ukFKTm8j/\n3vsJW7/fdsUK0w4n5qG62gj/fALVjHCMrrpKp8099XQXbmryf+CqqvyDNYJ0nR4/kBUOloKQi8/+\nQBHUSUQRbknbHoLwa9Pcuarjxwd3ODNn+l9/dpyDlwoqqHOvqDD9lZMiO5soUcyOQAo+1zaFGTpi\nxAidMWOGfulLT2htbVfsex/222YLlOA0Gd6dfp+Zw9y/aprtkYVDQ4Nq2xafBzNOQIeftIrwA1nh\nYCkIxeww88Gvk1i2LLpwi5uBNV+cznzaNNUjjjAdc5TI3mnTcj9nVLWQu4ZCNtn3OpUy6SpOOslc\nT0tLt/7f//2f3nTTbVpZucPz+LW17frSS3/Vnp6e0Hb53ftcBy5O++tquhS6tYp2rWGHb6e/61it\nrdo2ZJg2+MxwRHq0ri5LSN35sr/0mj07+EeoqMjtj5f1Ax0DiaTstq6slj4MhvKXQUFsp51mgq+8\nyI5hGDXK/Pu8UDUuqEny0kvw9a/3dUOMEtBUXp77OYNcH92omgygmzf3dzvNDhA76KAexox5jj/9\n6SkWL17KZZct54MPPgBgxIgn2LSpibKySnburMwEjwlLllRz5JFjI7XLr5KeiU9RvOIWguJTJk6E\nd9a0sXjUN1jLMMawlkbu50U+yik8RheVtEiaujrpE+jGXYtJdbewhFMz21XQQh11tFBBNw9es4p1\nh0/tjYM4vY3UmOP9oytnzw7+Efwe3GxfZs8L7P2B1s2Z81bwiaJhhYOlD7n8aYtNWBBbR4f3uoEU\nbm1tJjgtl1rGZ56Z+3kbG+Hii6Nt29np3cF2dnbywgvPsn79Uv74x6XcdNPTbN++HYAPf/jDNDY2\nMnnyZCZPnszo0aN3RYJ7BY+52+UX+ObXF65Ztp6W5hF4CYew37bm0cWcV3Y30PsDTGQV73Agi6tn\nsPasbzJm2sf6tjUzUtq1HdNYy5iMcFlMqupqOG9q70nuCnkwt241wRN+xbDLykyouHNB2WHZQbiC\nMjbNmbMleONo5C0cRGQU8AtgGKDAQlX9UdY2U4AHMBXjAO5X1bn5ntuSPLn8aYvNmjX+nWxHh4k+\n9hIQ2cJt3brg88QNWnKnx8gOqJo/H7Zti3c8MH3FuefG38/B0U1EobPTdLAdHR2sWrWKpUuX8tRT\nT/H73/+elsx08vDDD2fGjBm7hMGIESP6HSdKio7swLfQvrCtjbE/v446bqXZI+itrk4ZMyag9rLP\nlLiGds7ruBM+NgbO+1jfla4w6BraOY9f965Lp/uPlMKm3UOHmlBpP+HQ3m5ybNxyi3n4/CRrkUhi\n5tAFfFtVnxWReuAZEXlcVf+Std1yVf1MAuezuAjqkHIh6E/74IN9o0QH6rkdNSp4fUWFt3Do6TEF\nZO66y7R97Fj/nD5e/30/2tpM53/DDebzzp3mnjlFb446qnddHGpqTJ6kfHPxREWki7vvvpEbbvgX\n2jId2JFHHskFF1zAlClTmDRpEsOGDdu1fVubuZe5Pg/Z6qrAvvCee2hsb+Jr3OR5rIqeDhobq/1P\nlsuUePRo/zBor5HS2LGm8V6dfyoFhx1mHogpU/yP29VlRjfXXON/LcUiCcOFe8HMEKZmfTcFeCju\nsaxBOpjsTJLV1cZYOHeuf8BRkHune/2iRWa59lpjR7voIuPV0s8ANwDurYsWBdv15szpaxOsqTGv\ntbV92758eX6eWa2t5l4FGZVTKeNtFDeltIi3C2tc4qXc7tEjjzxev/GNb+h9992n77//vu9xfb2G\nlrXlXwzba//p01VBVzBBG9isabap0KVptpkAsJP+Kfi4y5eHF6zObkeQm9acOf3buGVLsJXdia4M\n8lpKwBMCWKVJ9OVJHGTXweAg4G1gr6zvpwBbgBcwFeGODDjGTGAVsGr06NF53aTdmbBnN7vjziXX\nf329eQ1yhRwI99Yoni5OHxNUOcwtIOLGdDg+9VE6Xf+8+/5LUJruqH1va6sR7FVV0fz0Kyp6dPbs\naELRP4XDFm2r2qv3IuKMIIIe0oxwUNBWUtrEDJ3H1drEDBMAFlScOSigxc9NK8wH2CvnRlAcgzua\nscAugSUnHIA08AzQ6LFuLyCdeX8G8HqUY9qZgz9RgpPcwURBI+QtW3JPERDnWU4q6jrOfyvKtnFr\nHCSVUiHKb+cmavzG9u3b9dZbf6/V1S1aVhat8L2zVFWF9+eB95Rt2sSM8IuJc1MbGlQXLgxuuF89\n1bBqQn4wMzriAAAgAElEQVQl6+JWOWpoMCORKP65SdeizfpjlcEzGqF/DVsS8VYSkUrgPqBJVe/3\nUF1td71/RERuF5GhqropifPviUTJJ++49znv/ba55prcc8RH9QDKTlnt1snHTWMcx2geZiN87bX4\nNQ6SzKlfXW3U0d3dwUbZIPfdqVN7uOOOR/njH3/H0qVLeeaZv9DT8zZQG7s9HR1mCaprEXhPqWMt\nWfr7KHnQw1zQKiuDDUTnntvfAHf66eZCgtKntrZ6P8BRfYDdbdy0KZpdI7Y1PgCPP9bR8LHwHcNJ\nwltJgP8EXlHVm322OQB4T1VVRI7FJGDfnO+5i0mQ4Tdpo3AUojy7TsetGtxBrl4dv3CJQxT31qSL\n68T5bwXdJ1W46SbjKhpHQOVS6MWhqsp4Bbnbe9RR5vl59VXTv+y7L7z+Onz0o6aNixcHp+netq2Z\nz33uvygv72HkyJkcc8xRvPzyXoFpvcMI6s8Dbbu0MIaszjbKCCLopjY3G++dJ580/sDt7cbqX11t\n3D8//3lTdOKhh8wN27nTCIyeHvM5CL8HOG6RieZm441U4dOlZo9aYlnjffD5Y5VDHpExLvKdegAn\nAIqxJzyXWc4ALgYuzmxzCfAy8DzwJ+C4KMcuFbVSAokUEydqWoOmpmiJ33IpeRh1FlwoFWsUdVCU\n+xR3Jp9rici6OnOvvdobZPOpq+vOqIb81EM9WlXVtStJXRIJ6GJmbOi9l5m00bF/5LCbetJJZhun\nDqpTPDo7c2KSD7BXaHjQsebOLW6H4HPPbPqMIpFgIsXECTOKOm0IU3G+8070Iu/uji6JzJ6FSFPh\ndZ+Cri+ugIoicPzO4+dFlp8NI5+aBuH5hfzuaUODajrV0ddrKJM2Ovafoa0t3MIfJUdKnKW2NvwB\ndo9AFi1S3Wuv8D9cXCNWrvj8saxwKBJBA5pUKrdEikniuFOmUmbxG6z4DWjuvNM7dbXfUllp3Fpn\nzzb2t6hF6gc6X1PS3oN+bsRz5vhfa32997267bbNmkp1lJRwiGpDblq0U+el5vV6DWUfaMiQ6KPm\nKFWMnE4916mu+xi5FO4IamOxk48VeOZg02eEEKQKDdLpNjcXJ1VDTQ3MmRNc7xa8VZynn25e3SrL\nIFIp+Na34Ec/6k1TEcWwHKS+7emBM86Id825EFTEPpe0IM79vOceExy4YYMpp/nee+Yf6kVnJ9x9\nN5x44lssXbp017JmzQzgn2OcXQEhnVa6mttppxrvOslKFTvpoCqzvv82tbRCVTVlVRW5ZWy4sAqO\nPBVO+SF0lIPb9ltZGeOaMoj430CHoBwp2dRmjPJlZf2NUw0N8dsX1MZs20qhjZFx7SJxSULCFGoZ\nzDMHMIOMUiau7jyfeIdCFNeJw5Yt8Ws3hLnerlgRdwDbo+W0aiUTFNC9995bzznnHD3//Id901d7\nLdW06vRpndo0+2ldVPUV3+yidWzT2dym13Kd1uKdLbWBLbrluzfmrwUJiiUYMsSoZMJ8mJMqO5f9\n4zp2iiTUPFGnwcWyPXicZxx0aQL9b6KdedJLKQiHMH19FBVkqZDd2QUVZ3H+f46Bs7Y2WBD6uYu7\nKVZxnWyc/0/2uWtq+v9fnXvk2DuD6gbkWlu5gS36wh13aXd3t6rGt2E0sEXbFv1ade5cbSPlm1La\nbRz2jSyumZSMKiSsY3eMuUFh/FFvRF1dNCNZTU3ynXGUGIViF0XJsnEkFeeQeIee5FIKwkE1vD6w\n37NZSvUPli0z/ydnwJVOBzt7pFJmWyfFvF+qeffi1tt7jboHwvYQ9D/NVjvvyvtf53+Nzn+7qSl+\nSoxd18o2baq5qM9oesXy9r7PWFW71rNN02zt36EzwXjvzJ6tWlfn3/FnGYc9I4uT6qziBo05NzO7\n8476IyxfHu49lET+ES/CZgUDbGQjoQhpa3OIQJBL8qOPRldBDhTLl8PkyX3b6OjexSeRZbY9JSzo\nq6qqV2/vxOW0t5sllTKq0c9/PplaEXFUuffc428bKiuDRx4xvvxeLuNedHUpt966jqeffoedO52Q\nHS+cm+2RXpo61rYdAF/96i5//YlcxDtXXMviMd9m7foqxry7isafn4O2tGSlir6fFDvhSWDFCmht\n9U8pTd8L75NZNJ2Gipr4QVd+xA0aA3OzswNd3H+2Zcvgl7/0thdMnAg//am5h14/cDoNhx6a/3V5\nERajMBiKokQhCQlTqKVUZg5BFHqQkG/KidbW4Bl4TU1f9UldXW5+8rW1vTNqr9rGzmwk33sVR5W7\nYkXw4NLtpRRd3d2lcLXCDIU23+0qK1UrpNN/5pCdYsJZHO+eOLom54fzckPzqktaCBfLfPx7g374\nILfQpNNQ5EKpTJFdUGq5lQqxDAbhEPR85uot55CETaupKbizF+l1SXX07HETxblzl4VlTM3H5hBH\nlRsldsD9P42qFakub9bvTbhZ377pNh0yxN+FdMgQ1SE1O73b6hUo5nUxzgMQpYP1iq5zZx+cPdsY\nmQrpd5/9wEbRu+Ub6DJQUahB58433W8uuITUUFirCfS/iXfoSS6DQTioFsYTJymbVpjreFVVb/K5\nqAO/8vK+hmp3Ustp04L3Pemk3P/LsRPuhXgANTSobt/eoX/84x/13HMf0PLyltBrb2CLtmGmQCvS\nJ2q6tv/soL4+Ez2/vF0b+CDUFhB4MYsWhXeyQR1ssTvP7KCxsIcqlfJPmpfLOQsp/LLPGfQHzTXd\nby7YGtKly8SJRs2YXYSmtdUsueQPCstD5uS9CdO/O6pgPxWok/Ll/vujJ5O76CJzrdmq1pUrTXqb\nIPbZJzyljN81xVHlrlndSUurX4oZpaKii8MOu4zhwxdlqpxVU1b2nu/2aZqpoJMlnGp0+c0wkd+x\nccgo7lnwFg8/XgWYPE3nnpu5npUv8E76TBY3f7q/zSAI98WsWxfu0+8XqBE1qVWS/vjZWQyPPNLk\nQ/Irg9feDpdearaLm4HR75zFIOwP+vbb+edOioLHb1zmbwiLhRUOCfHoo9EL20chSkfol+n0ssvM\ndqNGmcCr7m7v44jAY4/1drxRbIlVVSYILvsZd57Rzs7g/c88M/i/HJS9deyoDuqqheb2/sFV2f3j\n2M0rqONoz5KS1bQxruurtLQ8y4UXXsjkyZOZNGkSb79WzSmTt9KlZbRQRy0tKGV8gV8yieWeHXtN\ndzMXpu/lwv/OuqDMDalp3tq3vGQU3BcTxdDrV781ygjjkEOSS5frxcSJRgg5pfK8jMfbtuU2ghpI\novxBiyG0kkwRnEVSKbtPA36EyQb4M1X9QdZ6yaw/AxNDeaGqPhvnHAOR+TQOcR0Uoo74/SJ6R470\nHxRee21wW8vLzfLII3DCCeHX5uaaa8zc+c474eGHzXdnnmlew57RsHrIgQPdk7t4Qw6hov05YO9+\n+1ZIF42NFbS2tvKnP/2J0X+4mgoe9jxPDTt5+PLR7HvTz/t8P+x/7+Kd2m+yuGVq9JG+n/dJPn9a\nd2cfFgXb0ODvcRSW6fTll82xk0qX64cTxj9qFFx8sfHQyiaXEdRAkkvZ0UKQT4rgEJJI2V0O/ASY\nCqwHVorIg9q3hvTpwCGZ5f8BP828RiLJWgCFIkpn7tTbBbj55uDrCatZALn3Pd3d5j8/fXpvyuib\nPZOt9+eAA2DYMNixo/e7e+81M4qODsU7jYPJrhxWDzlwoLujjUc4niWcwik8RhcVtFBHHS1U0MWD\n7Wfx2cld7P/MM4zt7uafgCWc6rntkppp7HvMrP4nWbOGmtbN8Ub62T+uI+lz/dNWVvZO/aB/fvLm\nZnMzReC734UrrvC/qWGzjvnzzSjBiyQ66+wR0Nq1/iqyXF08B2rUGKeoSCEZO9akCCmEgMjXaAF8\nEnjM9fkq4KqsbRYAM1yfXwWGhx17/PjxRQ829CPMpTTIa6m+3nivRK3c5hAWfBc35sjvnIsWRc9a\nEHxOb++d6upoNsfA7K106TyuVsU7mGsHaGtZmbZVVmoPaCaTv3fgF+RWGtLPeOznMpprcjgv42Uu\nRtdc3UvDjNxR8Hp4g6Iuc3HxHEhPpVI4v6pnbeySycoKTMeokpzPXwB+nLXNQ8AJrs//C0wIO/b4\n8eNjuwwnVYrSTdRnwG+7qH1E0PVk9wm51hTwOuf06ckIGuj2/L6hoSfS7xC7/KRr6YnbWK+RRZzO\ntLzc5E7xC+oIyzeSaxvjEpavPM7DGJWgEZ3fgxb3Wktt1FhMTyn3uT3uwW6blVVEZgIzAUaPHh1L\nl18I9VMUhw/V3pntLbeYdevXG7VjRwd8/evRztXSYqqBZWsovGb2SSVkdO5t3OBWP1K0UkGXS43T\nzZLL/kwqdVrovo2NMHt2D17OFuV00Ui/CrS78An09mfrVlMGbs4cV+NTJsVqdji5F2VlwXo9r9KU\nqZQ5rkhwSl+HJFQ7EyfCt78N8+bF2y8f1UiQfrCmxrx6RT0Xwp2v0AyEp5RDAY3RkIxBegPgduIc\nmfku7jYAqOpCYCHAhAkTNKrdJ+lSlA5B5Rl37jT9S7b9wB3hP29edHVgTY1RA7v/N37CzatUZlh/\n5kVdnTEoP/FE1D0UX7sC7fyU2VTR0TeNQ9d34K4tnnrhjRs3smzZsl3pq7dtqwYeAyqBOqqqOqmq\nKmOJfo5US4gLaFzmzjVG0l2+pxjX0Sg63M7OcNesbHp6zGihpsYIiJqa4PrGcfXwfvr3oHzlQSmt\nc9XdB43o2tqMQD7ssPxcPHeXFBX5UEBjNEDeUw+MgFkLHAxUYUqBHpm1zZnAbzG9yieAFVGOPX78\n+MgR8oWKWL/oouDZt5/mwJ2gLV99ftAsOTuhnv8SXNSlb/Ea/5KU4qM6AtUhXpG/Wfk5uuvqdGdd\nnX6/sVE/8pGPKEbaaG1trU6dOlX/5V/+RZ944vd6550dvTP15SuT0aF5LalUXx1hUsacKEtlpeqE\nCf4PUSpldH5R1BVBus+wP1GSKa1Vi5M+ohQqSA00g6FMKMZF9TVgDXB15jt3DWnBeDStAV4kgr1B\ntTdCOorOvxClKFtbwztdv2ylzvMZpsbOx1bXurlVG2q9UzSkUqoLFxpD8LxZ63Ru9TwdwhZXtO52\nbajv9LZ5Tn9O51b+szb02d5E997JF7WebWrsCz27ljTbPSN//WwBW0DPmjpVb5o7V1+7/nrtuu46\n744p/xqa0ZZcpHkSi5MXPWiboDTXYffIS/oX2nhajJxHpZBXaaDxuQclJRwKtbjTZ4TZfXIdSAQZ\nsMPyEgUtboHk9b8cMqRvDrSg2gqewm3FCm2q/bJvoZdd1+zqOPp57gwZ1jtqdN+AjKT18/RpJaWL\nuECnc49Or7xfFy3cqW0Lf6GOMHAvrT43qCedNucJ67Dy7awnhKSqyL5h+Xj45LM41x8mKLw69KgP\nfzGNp8UQRqXgLTTQ2GI/4eQykAh7tsLyEpWXR68hnahwy3T4c7nGV80jdOu86c8F+6nW1npXtInh\nhtldW6tL//EftdVnChXoReSXMtX9g+Wj5qmrM9cfpSpPtjTPxcMnn6WuziTHmz493Msp+4EuxLQ5\nCYohjAbSW6hUsMV+wokzkIgyE29qCq45Ulvr3+/s+v9G9K2NJdwykqSJGf4zB7ZpU/U/mg44buc6\nZEjk0XMP6P+GCQGvparK6Nw9G++ShlEqjIUZa1asMNcUVLEoOxhjyxb/9rmvIUkhMm9eNGHomWVw\nD9e/W3ZBQim7E0nQVCo4NTgWLDCOKAsWGC8lLzfWKJ5wjY3+NdKdvES/+Y1x+Kgyedeoq3NlNHhx\nJYwYAbNmwXXXmdfhw43PbRaO91FDg3EsETGvntkRMl4KjdxPBd4eMxV00bjzLuNqpRpw1zzo7obL\nLkMbGuiurTUWY59NBZjil1QqiI4Of28ft7dJY2NvSHg2qZQp+LJsWfiNE/GPBgbjevatb/X+NqlU\n74/qRW0tvPVW8DaplHkQly837amu9t8WTKS1454XRLY3TtA9Kma0rmX3IgkJU6ilkCm7o87Es6sW\nutNU9/XwyUphHVaf0imikDWTCJ0lt7Ya9UPGGBK1RGScpRv0riOO0FFDh+oM0OtA24L2CSsAUVnZ\nt5pQ3JFxlCmh342La9COapieO7d/25wZiJfxuLVVdebM4HMvWhTN5uE1G7D6d0sGbJnQ/AiKn6is\nNDOO9nb/ioCqZlLgjqvo6DDLWWfBu7c8QMpvatLaakaU6XS/QIbAmBonyq+zc1eOGu8SkRHSQgfQ\nDHS8/Ta3fPjD7DttGqMvvZTqH/0IFi703sEv7avD6aebUfHQobBpE/ziF/GyjIaVZQT/Gxc3UMiZ\nNob5kDv1VaO0zWnfyJH+NWVFTOSkM4UMSnPtNRsIakepZ620lCZJSJhCLQWZOWRGmG1zvufrBgpm\ngBs08ApV806/P7quP98yaGFLKhXL7arHuYhsI3WQASZscY4za1bwfUmlkh3txjVoO9PGQujxg45Z\nXd03pqG11bQ9leq1q+QyG7Azij0ObJnQHMj6o6yomaQN8oGma/wrhjXU7tS2Od/r+6dtatK5J/1O\nxS9QTEysQOykSmF+tTm6dHaWlWl3mDBwJGKQcdcvEAN6g92CijZDr3eU17rKStXPfjZZr5O49y2K\nS6s7cCxOEq+oQS/uzjsfb5xSyT9kKSpWOMTF54/SSkpnVy3Uqirvjj7Ndm3iPPOnTadNorV0Wps4\nLzjGYNHO6CN9ETOiDvOrjTECdscatBPBm+jkk40tI2i0fNJJwdfg2FGC3DHr6sI9fJIc3caNW3B3\nmn6j7jvvzH00nm2jiNKOXLFeTHskSQmH3cpbKRAf3XMN7RzA3+j0SzNPLWs52OjIm5tNIYPmZhq5\nz99TqAIaz60yuuP6/pXI+lFXB7/8pTFgNDebv29zs/l8yinG+BHFi8WFuJZqQhLTpdPwpS+ZYg1B\n+Wr22cds63cNhx1m9P5HH+2ft7+1Fb7whb7eRdlkX3s++LmBpdPmtwnycHK7v82ZA1/8Inz1qzB7\ndvBvFYT7mNOn++v+HdtHPtj8Q5Y82HOEQ8AfZWzHauqqvDuzOloYQ/8/UYqdLOFUGthCmu0I3aTZ\nQUN9V2//ctRR/rVD3fT0eHeS0MevVv3cFbPQSFu5cAycQQLIydAXxWUy7DiTJvV2kJ/7nL+LZxId\nJHj7OL//Prz3Xrjfc02NKaX5b/9mDOnf/75/sryo7XWM50cf7V0VDZLpvMN+h2JVK7MMTpKYfhRq\nSVStFDDFbqvb19c43eCVTC5LLdUvJUXUogtO0rcAI22PiC6ZNEmPO+44/UR5uW4G3YZxN40deCZi\n9PpOPp9sdciWLf4qnzg5euJE9CUZ3VuIYh5xHAHitrfQah+bf2iPhD3GlTUpN7yAAgipym6W/EY5\n5Swn/bVSp81U0MkSTg10C62hvW9Zye50by75MHfIri4TRff229DU5OneuUOVny9fTvexxzL5299m\n5Sc/yac2b6bs7bdNfu+glM/Z1NXBbbeZwK1sd8fly+G00/qrg5wIP2c65IzC77mnbxHpj360dx9H\nlXPyyX3riTrX/OKLvSP0pGrxFqqWbBxX2Lij8UKXmvTK655ESm7LnkE+kgW4CVgNvAAsBhp8tnsT\nk431OWJItfGHH56sG17IqLd1c6s2zX5a501aok3lXwicMUQaPUbwlOlIpfS7s2frNp9Aso50Wne8\n/37/a/FzdRwyxL86WUODCbTKHlk//rh/G2trVT/4INZ93NW+0Nwimv/otrXVXFOUPE3Z+0WZZcRx\nBMhlNF4MV1Obf2iPIk4fG7TkKxxOASoy7/8V+Fef7d4EhsY9/ni/yFuvP2HUP7vfH8X5kwa5a0ZZ\n3OqAtjb/jjqzdINeVF6uM485RltTKe2sqdGesE4iLCrXq8PJeFn164QWLIh+Pc79S7rARq4dpLNf\nUKK6fKOJo7jC5tuh287bkiAlIRz6HAimAU0+63ITDn5/xuwkafmOvpKsGeDqIP/2xhvaEeL33wPa\n2djY246wTiJq5+w+VtTMpF5Lth49aqcf15YQt4OM+pu5z5XLLCNoZuNOg5JL3IPFUgCSEg5J2hy+\nBNzjp70CnhCRbmCBmlKgnrhrSI/322jnTvjKV+CNN8zf9Ic/7Kt7j1sfNIlarFVV9NTU8Pi3v83i\nSy9l6dKlHLN6NQswBS/9EKDC8WiKUo82au1c97Huuis8xYUflZV99ehR3SPj2hLi1uKN+ps553Js\nEm1t/h5CXrWHw/T2EyeaY48Zk7y9w2IZQEKFg4g8ARzgsepqVX0gs83VQBfQ5HOYE1R1g4jsDzwu\nIqtVdZnXhuquIS2ivg3r6jIuiEFELTSeZy3WzrIy7kml+PK2beycM4f6+npOOOEEvjpqFOknnjAC\nLIgTT4x+slx81/O5vmzDaNROv9DG1qjXVFFhcjuNGdM3EZYXfvcvLG9RIYqXWywDTKhwUNVPB60X\nkQuBzwAnZ6Y0XsfYkHndKCKLgWMBT+GQKFF9xYM6vCin6enh4U9+kvv23Zdx9fUccPzxlP/d3xnB\n9PTTpgMJ4mtfM+e/4ALv9W6Prb/9Lb53Tz7X99hjfTu3qJ1+Ep4yQZ5qYdeUSvW24be/jTfL8CKX\nxH5RBycWSymSj04KOA34C7BfwDZ1QL3r/R+A06Ic39fmEGdx2yb8iJBiwS+moAe0+ytfMTp9xzBa\nXW0+P/FEdE8Xkf5eQar97Slh+Y+8dN1B1xfUvocf9tajx62qlIuxNewcQdeUSvWmv1aN7nGUi7dR\nqVZhs+yxUAoGaeCvwDqMi+pzwL9nvh8BPJJ5PwZ4PrO8jFFHRTp+0YSDqvb8+c/atdde2lFZ2Scv\nUTOmlkFQ4jrfjKfV1fG8n2bP7ut1tWhRcKcexwAfNU+QU5Ri4cLgzrmQHjZRje5RhVTUYMRcvI1s\n/iJLiZGUcBBzrNJkQkWFrqqpyVndA8C8eXDNNf2+VlX+8pe/sHTp0l3LtvfeYxowvq6Oo4YPZ59D\nD2X45MkcuN9+yDe+4d2O6mp/A2dcJk6E11/vVcNUVfkf21FDDR/uX0MgG0dN45fv3/k+SEff0FB4\nPfpdd5mqeX6/+9y5JtdR0DW5aW8398nrepxqcueem9s1BR27GPfKYslCRJ5R1Ql5HygJCVOoZfwx\nx5iR15e/nNuswTVy6+7u1ueee05/9KMfaWNjow4dOlTBVMAcOXKknn/++bpw4UJ99dVXtaenp68o\nDlJhhNUZDqpbHHUGUmyVxUCPhsPUQKlUaQWb2ZoJlhKCEnRlTZ6yMmPMO+88kxHztNPMqNqd4sGv\nshbQJcJP1q3jyXPOYfny5XzwwQcAfOhDH+KMM85g8uTJTJkyhYMPPhjxS3wHwcbVj38cnnzSf9+g\n43pdb1QKmTit2Nk8sw3Po0aZGZlfhlPV+IbeqBXbciHXY9sKbZYSprSFg5tPfQrWrTPqhNWr4cMf\nhgkTTGbNjg50/ny0qws6O2kvK6Ojp4epO3aw6sorGTt2LNOmTWPy5MlMnjyZD33oQ/HP79cB3H13\nsHC48kqTzyiKqihOeupCFo5PKt9RFLxyIpWXm0y1fuzcmZuAihtLUchjFyoXlMWSEKVtc5gwQVet\nWmU+ZP2ZtK6ObhF+e8opTHngAejqog7oBLpFuO3MMxk1YwaTJk1i5MiRhWvkBx+YYtJeHXt9PWzc\naEa6jlB59134+c/jxx2kUqZTzA7AKgTF0qO3tfUvxO0QZMtJp02K7cHqIhp03dZOYcmTpGwOg6Oe\nQ1sb6gQaZQqsSHMzFTt2cOZ991Hf1UU95mKqgVpV/unppzmvo4ORixYZA2e+RWO8cCJjs1VHlZUm\nm+l55xn1h4h5f801pqBOnEyqAEOGGLXaiSea1zfeKOzo0q9ATnYxnHwJihGoqDCjcb91hZo1FYMo\nsREWywBT0mqlHTt2cP3111PxX//FN7duxaummq9Gf+tW4/HS0WFUOV/5CjzwgBnJe+l44+p/vSJj\nHTo7zTkXLjSpuN3qgjgBaU7H3N1tBFxLC6xYYd4HzRyS0GUXUkfvEGTbaG2FmTNNavDdLd20rdBm\nGQwkYdUu1AKoiOhPhg8PjjOIs9TU9PcoycXbJG7h+tpa1TlzgmMXsj1yFi6MlvbazWDynIniFbU7\nZiwdaG8wy24Ne0KcwyGHHKIrV66k4ZFHgv3e86Ghwfwtt23zXuen/503D667ztdTyhenBrOImWH4\nqbvSaaNC+sUvvK/bS+8+2HTZQbaNIUPg1luNE8Lu5sljYyMsBWSPsDkMqa83gmH1av+C9fnS3u7f\nQQfpf4Pq8wbR3GyWsjLjmutHS4u57jjqh8Gmy/azbaTTxlvp6183AnjWLNOZrlyZ7Pnb2oyKbt68\nwtmlvCiWTcdiyYOStjnwwgumY2hpMe6NhSCoQwjS/wYloItCd7cZ5afT/i6jhx9ubAxRXUoHoy47\n27YxciR861uFz3I60K6kxbDpWCx5UNrCobu7t2PIt96CH6mUUQ15uU0G+fR7BcbV1hpDahRVU0sL\n7LefMbB6UVEB3/ueGdH6rc/22ClmfEKSZNeeKHSW01JJs13IuAuLJU/yUiuJyPUiskFEnsssZ/hs\nd5qIvCoifxWRK/M5Z+I4qZ298HOZdNQRjz4Kt9xigtzmzjXeScuW9VUX+FFXB4ceGqxeqK6Gyy4z\n7auuNvsFqR8aG4OFzWBw/yzG7Gewqd8slgEgiZnDLao632+liJQDPwGmAuuBlSLyoKr+Jeczlpf3\nrWxWXW1sEkEpF2pqzLpUyrw6bpG33gqXXNJ/+/p67w7YSx2RHZTmqAteew1uusk7rsHprFMpb/XC\niy8atVNXl5nVVFebbb/zHbjiCm+BlkQNhYGmGLOfwah+s1iKTF7eSiJyPdAcIhw+CVyvqqdmPl8F\noKrfDzv+BBFd5bWistIIiDPPhCOPhMMOgylT4JBDvDtiJ/PmOeeYwi9OJxy0z5AhprCOu0PNxRso\nipHG61UAAAqoSURBVDDJJl+voyiZSgtNrrEWxfDkCcr6Otijry17PCWRlRW4HngLeAG4A9jbY5vp\nwM9cn78A/DjK8UPrOTQ09BZ2nz69t9hOFN/xFStM7EGEjK67yNU/Pa6v/mD3g8831qLQsRpBWXZz\nKfhjsZQQFCsra1ANaeCnwDxAM68/BL6Uj7ASkZnATIDxYRt3dBjvlrKy4BiI5mZ49dXez45BMiiN\nhZd6IVd1RFzD42BWeyRh7C20J8/uoH6zWApM3jWkHUTkP4CHPFZtAEa5Po/MfOd3voXAQjBqpcCT\nxslRNH8+fOYzpuMJMkg6eOm3i+UNlM95wtQ5hU4TnVRN5UJ78lhXUoslmHymHcBw1/tvAXd7bFMB\nrAUOBqow5UKPjHL8RMqEeqkMotQU9lIvFEsdket5wtQxxUitYWsqWywDCgmplfKNkL5RRF4UkReA\nEzMCAhEZISKPZIRPF3AJ8BjwCvBfqvpynufNDWfkGhbdXFvrrV7IN7I1akRuLudxq3MymWtpbjaf\nTznFpBYPWp9UdHDQvS3lWAuLxdKHks6t5Out5FBRES84TsTEI1x+ub9HTG0tbNhgOmI/cvEG8itq\n8+1vm/VBKqAo5wnzwImbpylXbN4gi2VAScpbqbQjpINw8u/EEQ51daZzuukmE1x2883eBskgwQDx\n9eFBRtprrzVCyyt9Q5zzhBmx4+ZpyhVr7LVYdgsGr3CoqIDf/AbOOqu3E0qlTEfsR0uLqcLW2to7\ncv/Od0znXEiDZJgB3FHxQO7pG8KM2HHzNOWDNfZaLIOe0lYrVVToqqqqvh1+ba0ppOOMsB3Vy6uv\nGo+kuFXWki576eUJFCe9d64qnjB1zhtvwMEHW3WPxbKbs2eolY4+2tgHXn0VNm0yieoOPbTvKNRR\nvdx1l4l38KKy0qzzSq6XVDK3oCyfcaq/5ariUTWqshtuMJ+z6007xmyr7rFYLBEobeFQVpaMzr2z\n03+/JPTtYYFfb7zhnxAvm1xUPG7B1N5u8jBVV/fPw2TVPRaLJSKlLRziEDQ6zzUtd1TCAr8eeaTv\nqD1oBhE3e6qXYHKu8+abjXBwY9NEWyyWCJR0JbhYBKWrziUtdxyipLtwRu0LFhgbxNy5yVQCs+mn\nLRZLAdh9Zg5hLpRQOH171HQX2aP2yy/PX8UzmPMwWSyWkmX3EA5uL6FbbjHfrV/fv8MtlL49qGRo\n0MwkCRXPYK3+ZrFYSprSdmWdMEFXrQqMkc6tXkIhGKh22Ihki8XiYs9wZQ2jVGoBw8B5AtmIZIvF\nUgAGt3BIKj10UgyUJ5B1UbVYLAmTl3AQkXuAwzIfG4CtqjrOY7s3gR1AN9CVxJQHsMZYN9ZF1WKx\nJEhewkFVP++8F5EfAtsCNj9RVTflc75+WGOsxWKxFIRE4hxERIC/B36dxPEiExTbkET8gpuotRgs\nFotlNyApm8OngPdU9XWf9Qo8ISLdwAI1pUDzp1jG2KC8ScX0iLJYLJYiESocROQJ4ACPVVer6gOZ\n9zMInjWcoKobRGR/4HERWa2qy3zONxOYCTB69Oiw5hXeGFtKHlEWi8VSJEKFg6p+Omi9iFQAjcD4\ngGNsyLxuFJHFwLGAp3DIzCoWgolzCGsfUFhjbKl5RFksFksRSMLm8Glgtaqu91opInUiUu+8B04B\nXkrgvMXBekRZLJY9kCSEw7lkqZREZISIPJL5OAx4WkSeB1YAD6vqowmctzg4HlFeWI8oi8WymzL4\n02cUGpuewmKxDCKSSp+x+6TsLhSOR1QS6bUtFotlkDC402cUC5uewmKx7GFY4RAVm57CYrHsQVi1\nksVisVj6YYWDxWKxWPpR0t5KIrIDeHWg2xHCUCDZhIKFwbYzWWw7k8W2MzkOU9X6fA9S6jaHVxNL\n710gRGRVqbcRbDuTxrYzWWw7k0NEEvH/t2oli8VisfTDCgeLxWKx9KPUhUMyqb0Ly2BoI9h2Jo1t\nZ7LYdiZHIm0saYO0xWKxWAaGUp85WCwWi2UAGFDhICJ/JyIvi0iPiPh6AIjIaSLyqoj8VUSudH2/\nj4g8LiKvZ173LlA7Q88jIoeJyHOuZbuIXJpZd72IbHCtO2Og2pnZ7k0ReTHTllVx9y9GO0VklIj8\nTkT+knlGvulaV7D76fesudaLiPxbZv0LIvLxqPsmSYR2np9p34si8gcR+ZhrnefvP0DtnCIi21y/\n5bVR9y1yO69wtfElEekWkX0y64pyP0XkDhHZKCKeJQ8SfzZVdcAW4CPAYcBTwASfbcqBNcAYoAp4\nHjgis+5G4MrM+yuBfy1QO2OdJ9PmvwEfyny+Hri8CPczUjuBN4Gh+V5nIdsJDAc+nnlfD7zm+t0L\ncj+DnjXXNmcAvwUE+ATw56j7FrmdxwF7Z96f7rQz6PcfoHZOAR7KZd9itjNr+7OAJwfgfk4CPg68\n5LM+0WdzQGcOqvqKqoYFuR0L/FVV16pqB3A3cE5m3TnAzzPvfw58tjAtjX2ek4E1qvpWgdrjR773\no2Tup6q+q6rPZt7vAF4BDixQexyCnjWHc4BfqOFPQIOIDI+4b9Haqap/UNUPMh//BIwsUFuCyOee\nlNT9zCKsLHJBUFNaeUvAJok+m4PB5nAgsM71eT29ncQwVX038/5vmMJChSDuefoVQAK+npnq3VEo\ndQ3R26nAEyLyjJia3XH3L1Y7ARCRg4BjgD+7vi7E/Qx61sK2ibJvUsQ910WYEaWD3++fNFHbeVzm\nt/ytiBwZc98kiHwuEakFTgPuc31drPsZRqLPZsEjpEXkCeAAj1VXq+oDSZ1HVVVEcna9CmpnnPOI\nSBVwNnCV6+ufAvMwD9E84IfAlwawnSeo6gYR2R94XERWZ0YlUfcvVjsRkTTmj3ipqm7PfJ3Y/dzd\nEZETMcLhBNfXob9/EXkWGK2qzRnb0f8AhwxQW6JwFvB7VXWP4EvpfiZGwYWDqn46z0NsAEa5Po/M\nfAfwnogMV9V3M9OnjbmeJKidIhLnPKcDz6rqe65j73ovIv8BPDSQ7VTVDZnXjSKyGDPtXEaJ3U8R\nqcQIhiZVvd917MTuZxZBz1rYNpUR9k2KKO1ERI4Gfgacrqqbne8Dfv+it9Ml8FHVR0TkdhEZGmXf\nYrbTRT+tQBHvZxiJPpuDQa20EjhERA7OjMrPBR7MrHsQuCDz/gIgsZlIFnHO008fmekAHaYBnt4G\nCRDaThGpE5F65z1wiqs9JXM/RUSA/wReUdWbs9YV6n4GPWvutn8x4xnyCWBbRkUWZd+kCD2XiIwG\n7ge+oKqvub4P+v0Hop0HZH5rRORYTJ+0Ocq+xWxnpn1DgMm4ntci388wkn02C21hD1owf+z1wE7g\nPeCxzPcjgEdc252B8VZZg1FHOd/vC/wv8DrwBLBPgdrpeR6PdtZhHuwhWfv/EngReCHzowwfqHZi\nPBaezywvl+r9xKhBNHPPnsssZxT6fno9a8DFwMWZ9wL8JLP+RVxedn7PaYHuYVg7fwZ84Lp3q8J+\n/wFq5yWZdjyPMZwfV4r3M/P5QuDurP2Kdj8xg853gU5Mv3lRIZ9NGyFtsVgsln4MBrWSxWKxWIqM\nFQ4Wi8Vi6YcVDhaLxWLphxUOFovFYumHFQ4Wi8Vi6YcVDhaLxWLphxUOFovFYumHFQ4Wi8Vi6cf/\nB4BnN9zhNXgeAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aabb26d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ruamkVNSAQBbeoURTwl6gB1GhcooYe\nZiJmEp2YgCGauGY0ihpITEaJmjExA5MRs7TRGZVxjaIxCmYDdFwjasAVjQaQpTd6+35/nLr07aq7\nVt2qrobzPs99arnbubdune+cbxVVxWKxWCwWNxWD3QCLxWKxlB9WOFgsFoslByscLBaLxZKDFQ4W\ni8ViycEKB4vFYrHkYIWDxWKxWHKwwsFiiYiI7CUiKiJVg90Wi6XYWOFgsRQJEblQRNaIyGYReVdE\nbnQLloyw+Z2ItIvIKhH5dNb+Z4rImyLSJiL/KyK7lP4qLDsqVjhYLMXjPmCSqu4EHAwcCnzDtf7X\nwP8BuwJXAHeJyG4AInIQsAD4IrAH0A7cUrqmW3Z0rHCwDFlEZKSI3C0ifxeR10XkG65114jIXSJy\np4hsEZFnRORQ1/qPicgTIrJRRF4SkdNc6+pE5IeZUfsmEXlKROpcp/6CiLwlIutE5Aq/9qnqalVd\n7xwW6AP2yZxjP+Bw4GpV7VDVu4HngX9wzgHcr6pLVbUVmAM0i0hjIffMYomKFQ6WIYmIVAD3A88B\newLHAReIyGdcm50O/A+wC3A78L8iUi0i1Zl9lwC7A+cDLSKyf2a/+cAE4MjMvt/CdOwORwP7Z855\nlYh8LKCdZ4rIZmAdZuawILPqIGCNqm5xbf5c5ntn/XPOClVdDWwF9gu+MxZLMljhYBmqTAJ2U9W5\nqtqlqmuA/wTOcG3ztKrepardwA1ACvhEZkkDP8js+zjwADAjI3S+DHxTVdeqaq+q/kFVt7qO+53M\naP85TAd+KD6o6u0ZtdJ+wH8A72dWpYFNWZtvBhojrrdYior1urAMVT4KjBSRja7vKoFlrs9vO29U\ntU9E3gFGOutU1T0beBMzAxmOESKrA879N9f7dkxHHoiqviYiL2HsBs1AK7BT1mbDAGcmEbbeYikq\nVjhYhipvA6+r6r4B24x23mRmBKOAd511IlLhEhBjgFcx6p9OYBwutU5CVGWOC/ASMFZEGl2qpUOB\nFtd6t41kHFCTaaPFUnSsWskyVFkObBGRf80YkCtF5GARmeTaZoKINGfcRy/A6Oz/BPwZM+L/VsYG\nMRU4FbgjIyxuBW7IGLwrReSTIlIbt4Ei8hUR2T3z/kDgcuC3AKr6KvAscLWIpESkGfg4cHdm9xbg\nVBH5lIg0APOAe7JsFBZL0bDCwTIkUdVe4LPAeOB1zIj/ZxjVi8O9wOeBDzEuoc2q2q2qXRhhcFJm\nv1uAL6nqqsx+lwAvACuADcC/kd9/5SjgBRFpAx7KLN92rT8DmJhp3/eB6ar698z1vQScixESHwAN\nwOw82mCx5IXYYj+W7RERuQbYR1X/ebDbYrEMRezMwWKxWCw5WOFgsVgslhysWslisVgsOdiZg8Vi\nsVhysMLBYrFYLDmUdRDc8OHDda+99hrsZlgsFsuQ4emnn16nqrsVepyyFg577bUXK1euHOxmWCwW\ny5BBRN5M4jhWrWSxWCyWHKxwsFgsFksOZa1Wslgslh2Zjg5YvBhWr4Zx46C5GVKp0pw7snAQkVsx\nuWw+UNWDM9/tAtwJ7AW8AfyTqn7ose+JwI8wKZV/pqo/yLfB3d3dvPPOO3R2duZ7iB2eVCrFqFGj\nqK6uHuymWCwWH1asgBNOgJ4eaGuDhgb4+tdhyRKYNCl8/0KJHAQnIpMxOeZ/4RIO1wEbVPUHInIZ\nsLOq/mvWfpWYNMPHA+9gkpnNUNW/hJ1z4sSJmm2Qfv3112lsbGTXXXdFRCK13dKPqrJ+/Xq2bNnC\n3nvvPdjNsVh2eLxmB6owciRs3Ji7fVMTvPee/wxCRJ5W1YmFtiuyzUFVl2IyVLo5Hfh55v3Pgc95\n7HoE8FdVXZPJhnlHZr+86OzstIKhAESEXXfd1c68LJYyYMUKIwRmzYKrrzavI0bA/PlmxuBFTw/c\nc0/x21aozWEPVX0v8/5vwB4e2+yJqyIXZvbw//wOKCIzgZkAY8aM8dsmn7ZaMtj7Z7EMPh0dRm3k\nnh20tprXa6+FrVu992trgzVrit++xLyV1OinCk7UpKoLVXWiqk7cbbeC4zgSZ+PGjdxyyy157Xvb\nbbfx7rvvbvu81157sW7dusB9nnjiCT772c8CcN999/GDH+RtrrFYLGXE4sX+swNVqPUpL9XQAGPH\nFq9dDoUKh/dFZARA5vUDj23W4irXiCnVuLbA8w4aQcKhx++XzpAtHOJy2mmncdlll+W9v8ViKR9W\nrzazAC+2boXeXu91VVXGLlFsChUO9wFnZd6fham8lc0KYF8R2VtEajDVr+4r8LyDxmWXXcbq1asZ\nP348l156KU888QSf+tSnOO200zjwwAN54403OPjgg7dtP3/+fK655hruuusuVq5cyRe+8AXGjx9P\nR0cHADfffDOHH344H//4x1m1apXfaQEjXM477zwAzj77bL7xjW9w5JFHMnbsWO66665t211//fVM\nmjSJQw45hKuvvroId8FisRTKuHFmFuBHtva3psYYo5csKY07axxX1l8DU4HhIvIOcDXwA+C/ReQc\n4E3gnzLbjsS4rJ6sqj0ich7wCMaV9dZMCcSCueCCC3j22WeTONQ2xo8fz0033eS7/gc/+AEvvvji\ntvM+8cQTPPPMM7z44ovsvffevPHGG577TZ8+nR//+MfMnz+fiRP7HQmGDx/OM888wy233ML8+fP5\n2c9+Frmt7733Hk899RSrVq3itNNOY/r06SxZsoTXXnuN5cuXo6qcdtppLF26lMmTJ0c+rsViKT7N\nzcY11Y+urtzPdXXFbZObyMJBVWf4rDrOY9t3gZNdn536udslRxxxRN5uoc2Z+eGECRO4J6YLwuc+\n9zkqKio48MADef/99wFYsmQJS5Ys4bDDDgOgtbWV1157zQoHi6XMSKXMLMAdy1BT42+IBti0yWwf\n5MqaFEM6QjpohF9KGlxzw6qqKvr6+rZ9DnMZrc1YnSorK0NtFn77golfcF4vv/xyZs2aFetYlqHP\nYEbTWvJj0iR4913zu61ZA889B3ffbQzSfjiurGeeWdy22dxKMWlsbGTLli2+6/fYYw8++OAD1q9f\nz9atW3nggQci75sEn/nMZ7j11ltpzfjErV27lg8+8PITsGxP+PnLr1gx2C2zhFFXZzr6K6+EadOC\n7RBQOlfWIT1zGAx23XVXjjrqKA4++GBOOukkTjnllAHrq6urueqqqzjiiCPYc889OeCAA7atO/vs\nszn33HOpq6vjj3/8Y1Had8IJJ/Dyyy/zyU9+EoB0Os2vfvUrdt9996KczzL4BPnLl0oFYUmGMDsE\nlM6VtaxrSHulz3j55Zf52Mc+Nkgt2n6w93H74fbbzUzBEQhu0mlYsKD4KghLcqxYAccfb+wLXpRd\n+gyLxVKeBPnLl0oFsb3T0WGE8Lx55rWY2WcmTTKd/9y5RgCkUsatNZ0uU1dWi8VSnjj+8l4zh1Kp\nILZnBiM7al0dzJkDl1zSb6weO7ZMU3ZbLJbyJEhPXapo2u2VwbbnOMbqwWBIqpXK2U4yFLD3b/vC\n8ZdvajKqh8FQQWyvBOU/KlV21MFiyM0cUqkU69evt2m788Sp55CyPcZ2Rba/fKlVEEMBvziQsO+9\n1HUQ354z1OJQhpy3kq0EVzi2EpxleyOs4/WyG1RVwU03wQUX+H/f2elvfHY8waZNC+/0nfM7x3MM\nzcWwWyTlrTTkhIPFYrG48ev4nY63o8O/qpqIdzSy3/dumprg/vvh1FP9zw3m/HvsAV7xr42N8MEH\nyc4gysaVVUT2F5FnXctmEbkga5upIrLJtc1VhZ7XYrFY3Abj1lbTobe2ms/OSD2sbkKc78HUWWhq\ngvvuM4Ih6NwAd97pLRjAfH/HHdGvt5QULBxU9RVVHa+q44EJQDuw2GPTZc52qjq30PNaLBZLFINx\nUBxIPpx6qvFSevvtaMbqBx8MPl7Y+sEiaYP0ccBqVX0z4eNaLBZLjm1h1arwAMCgOJC4pNPGxpBK\nRQ8+jJlPs2xIWjicAfzaZ92RIvI8pgrcJUnVdLBYLMWllF42Qefysi309fV7FmXjBAAGxYHEtTm4\n40aiBB+uWAGPPBJ8zVnp2coHVU1kAWqAdcAeHut2AtKZ9ycDrwUcZyawElg5ZswYtVgsg8fy5apN\nTarptKqIeW1qMt+7aW9XbWlRnTvXvHZ0JHeupUtVFy1STaVUTZcdbWls7G+H37Fvuy3e9+7r7ugw\n33mdu6lJdcMG//XOkk7nd6+CAFZqAn16Yt5KInI68HVVPSHCtm8AE1V1XdB21lvJYhk8grx83Mnf\nwryFCj2XCFRX51ZGCyOdhr//vX/m4cxKsuNA4n7vJujaX3vNPyEimOjnJ58sX1fWJNVKM/BRKYnI\nR4D3VVVF5AiMIXx9gue2WCwJE8XYO21aMuklwjyK4goGB6coTpC6yi9FRZTUFZMmmWPOmWPsHwcc\nAN/7nhGeDz8cbAi/9FJvwVAuwXKJCAcRaQCOB2a5vjsXQFX/A5gOfE1EeoAO4AxNaspisViKQhSD\naxQBEiU3UNIeRe42FjNxnvvYra3w+9/DrbfCt78No0f72yTSadh//+DjebW1lIIjEeGgqm3Arlnf\n/Yfr/Y+BHydxLovFUhqiGFyTSheepEeRu42jRhUvcZ5XUj6n/vNVV8GwYf77OoZtd2c/erSJynbX\ncXC31Qm4c0dZFzM77JBMvGexWIpPc7PpxLxwOjenU/ciTrrwoHP5kUr1Jxv0ayMUL3Fe0KwJTCcv\nYoSEV0LEF14YWNr1a1/zL/DT3W0KAG3c2B9c19lpPh93XHHqS1jhYLEUkVIWiUmaKNleowiQQs7l\nl1szlYKf/tSM/B9/3L+Nb79dvEJIUVRhqiZP04IFpnjPggWmzQcfnBvZHfRstLX5ry9WlPWQy8pq\nsQwVBqNITJJ0dBiPm/PPh/XrYfhwoyd367lTKZNG4sQTzXV2dZnrrK6Ony7cy7jb3AzTpwd7QgVl\npH3tteIVQoqiCmtrg3fegSuv7FchXX89/O1vZjYQlaqq4FnKL38JZ58d/XhRsIn3LJYiENUNtFyJ\n6p6abZCtqTHbPfIIHH10Mue87z4zA8gnFXlnJ4wYkezv4HTyr7wC8+dDe7v/tk7m1n33HXhtcV1z\nq6uDhUl1NWze7JQULT9XVovFkiEpL57BIGr1M6/turrM4uQf8ut4s71uTjrJ/5ynnTbwnE6+pCje\nOo66yk/QxRUM2QKsri54+6oqmDrVCAe3EAkTDLW1/bOwqioze5s3z3/7iorknykrHCyWIpCUF89g\nEFWw5SsA/dJg+OEcK3v0HVVNl1QhJC9h6HT4qZSxG4gYjyV3XYhswRBGUxPceKNRRzltBfj+9/3v\nd1dX8s+UFQ4WSxGI4gZarkQVbPkIwKBZiR9tbUaF8/Wv5++SmkQt5iBhWFUFN99s1GqOADrpJPMa\nJhicPE5uW83BB/fPrO65x9yDIJtDQwPsvPOH3HFHSCKnGFjhYLEUgaBkb3G8eAaDqIItbLtRo4yH\nllsFFOb+6UVDA6xb579fZyd88YsmWruYQWFhwtAxPDvcfnu0a3XMvm1tsHCheT9yZP8Mqb4+3Cuq\nvX0T5503AtgafsKIWOFgsRSAX8Rq0rruUhJVsIVlO73gAujtHagC+vzn40dCV1UZTym//To74a67\nTLqKKGomr99MNTzyOO5sMJ+o71mzTHW4zZv7vws+hgJbOeqo73D66d9jypQpTErKFS6J7H3FWiZM\nmFBIckKLpahEyVjqZCudNy//bKWDQdRsrH7bpdPeWUjr6/3XOUtdXe45W1rC93NnRPW7z17tbWw0\nr2HXumGDaX+Uc7a3q86erVpTEy+TLKhWVcXZvk+nTu0d0E4SyspqZw4WSx5E9ehJQtc9GEQ14npt\n19VlvGu86O0Nr80sYmId3DEV++wTbLR2k20Md2YKq1bBD3840AbgNQvw+h0dI3p22+vrjZ3BmQ12\ndBj31muvNe2NnzBQ6elRoscnCwce6BMpWCBWOFgseTCUXVWjElWwZW83b56/KmTrVtOZ1tb25yHK\npqLCCAbnmH4dsx9uY3h2HEYcgjLPunn9deNh5JzLbzswcQ/G3VcBv069B1MeJxpz5kTeNBaJpM8Q\nkTdE5AUReVZEcqLWxPDvIvJXEXleRA5P4rwWS7EIS3sxlF1Vi01QviUwnaNfWgwYeP/cMzSvam9e\nOPp/9775JPSLknm2ogIeesh7JplNZWUf48c/x3HHfRljK/AmlaqO1c5bbom1eWSSzK10jKqOV+/I\nvJOAfTPLTOCnCZ7XYkmUFSsGJkSbNctE2a5Y0b9NUgnnkiZMqJUi11PUJHq1td7fu+9fPt5NjtE8\nn3292lFo6nKH3l7hqafu5+WXf8dRR/0XRkA4ixGYt90mnHVWPDXRvHnw1FOxdolGEoYL4A1geMD6\nBcAM1+dXgBFhx7UGaUupaW8PLv3oGB3DSkQOhuE5zIgc1cicVFuCynqK+K8XUV22zBxn7lzzOaqB\ndtgwU1a0pUX12GPjG4O9fscgY3g6bdZfc02PivSFGo+PPrp92z1av94YrY87zrx++KH5vqVFtbIy\nXlvr6kwp1blzVWH4Gk2iX0/kIPA68CzwNDDTY/0DwNGuz7/FlAn1OpatIW0ZNKJ0BA5JdraF1mAO\nE2pB9YzzEWZR2htU9zmdVp0zx7/jj9IxZy+plOrllxuPony8hLKXhgZzfXPm+HsppVLt+ulPf1Zr\na2cqtIcec+rU8Hvb0eF/vrDrN/fzsF4tI+GwZ+Z1d+A5YHLW+sjCwb3YmYOl1ASNVEWMS6qbJFxV\nkxAyYUJt9uzoQs8LtzCYOze3vfX1qrNmDbwHYbOrRYtMBxzUpqBjeP0+UTvS6uro24qo1tX1KfRp\ndfVWhV6FzQrrFSbq2LGf1+rqNoWwmYPq9OnRfs9ly+ILh/5lgmoC/XpSleDWZl4/EJHFwBHAUtcm\na4HRrs+jMt9ZLGVF3ECnQl1Vo7rEhhGmF1+1Kn8DepjHj/PdggXwq18NDEQLCgR8+GH/1BKtrSZl\nRnYwYZBh2Yw7g0mnzfkvugiuuy6aoVoVOjqMHaC7u4vdd/8vxo+v5+yzd+Koox7i0EN3i5x++9FH\n+6u4BXH00bB0KXzmM8aA39sb7fiJUqh0ARqARtf7PwAnZm1zCvAbjO/WJ4DlUY5tZw6WUlNqW8Ki\nRaq1td7nq60166NQrJlDkLoqaHHfK7/ZVZjKqKamfzayYYN5veqq/FQuoHrQQWbkvmiR0e/nc13p\ndO+AexX0+8W9137337l3c+ZEPU8yM4ckhMNYjCrpOeAl4IrM9+cC52beC/ATYDXwAhFUSmqFg2WQ\nyFfNE9duEGa0BbM+inopTKgFdYZBQi+Ozt+9NDSEd4JRVUYNDeGG9Shqotra/u2HDevTiy56Tmtr\n21SkU6OohGCgajHK7xe0fz4sXerYVPoybfZqd5kIh2IuVjhYiklQZx7XlhBXoMQZkUedsRTDWymu\nt5B7mTUrWpujzgSGDcudjcyaZfaPM3rvX9ZrRcWuOm7cXK2s3BppH2fkn++MKu7MwYv2pSu0pf4r\nemzl42rsH1Y4WCyJkbSnUdxReZwRuV+H0t5u1BrTp/erSxz1i59Qiyv0Wlr8jcZhS319NKEWXV1i\nhFWU+z5w8Z4V1NV16623mga6n4cogjrfGVXBqknXRbcwQ9NsLppwsOkzLDscSRmBHaKm0nBnA33+\n+egZO70MxitWwHHHmeLyDnfdZQyujz/ubySPa0BvboavfjX69m6iVic74ADT7ijG4WuvhUsvNb9P\noUFuHR1VrF1rusDsHFGqcMMN/hl188m42tSUQEZe10U3cw9f58cFHCwYKxwsOxxJ50WKEkGbXf2s\npsZ0QFHI9pJyhJtbMDi0tsLxx5sC9kmkBU+lTK2EBQvi7xs1jUhQ6m8v7rkHjjvufe699++0th5I\neKIH/4hj92+QLTgvucQ/8aDxalNaW6NFM9fWmupuBWfTdj1sKbayhM9wAo/QQxVtNNBQuZW23t6I\nKQqDscLBssORdF6kKEVvsmcqfknnvMguDrR4cXDai87OZBP/7blnfvtVV5tZWJjrZhx31c7OPr75\nzX9n3boLgRmY5AuN+TUQk6V19Gh4++3cOg4DhEVHB9zTX/CheY+P8vW2g4CmSOfp6jLFgAom62Gb\nxEreZU8WM401jGVs7xqu47kEToS1OVh2POJEQUchSrBXkH46yOMlnc61g5gUCcFLIR4xbtrbjSE4\nH5sD5HoahZ2rP+2Fn/fQZh0//jq97rrrdOnS5drUFM3LKGhxezF5ttXLQCWiy5moTazXNJtU6NEU\n7b7tHvBcRfGE8HN5i+DiNQFUE+h/B10ABC1WOFiKQdD/q76+P8dNHIIM3GEeP9Om+QsILwNmS0uw\nQKmtzRVw+abniGN4DUpZ4WeIbW9X/dWv+vTCCzfozJlP6D//81d0zJj9FDb4HKdvwHGc+55EugzP\ntoZYvdtJaQszdB5X6CK+pMN82505ZtCDkk+FJSscLKWm0Fw/5Y7z/6qryxUOheZHihPs5QSo+blz\n1tfndvRhg0e3y6f7WvPxzIrqylpTo/rpT/t30u6Rc19fn7766qv67W8v1pqaVhXZok5KCpEPderU\nS/Wii36tjY3dmk73hbbZ8dpKSkAMGOXHdEtaziRtSrV73+swtza/KZqXZB04zRqwWOFgKRqlzN5Z\nLKIIt/Xro5d9LIQwtdPllwf3OVdfnXvM5ctNeUuvji27TKnfuVMp06kGXWfUvnHYsOBEeiJ9euqp\nf9YzzjhDR4wYoZBSk5vI/977ut36/LjLl5t2ODEPtbVG+BcUqBY30COd1vZFd3i3u6XF/4GrqfEP\n1gjSdXr8QFY4WIpCPj77g4WfAIgq3JK2PQTh16a5c1UnTAjub2bO9L/+7DgHLxVUUOdeVWX6KydF\ndjZRopgdgRR8rk0KM3TkyJE6Y8YM/fKXH9P6+p749z7kx80WKMFpMiLYB6IYeLL/JE6wSfaDGSeg\nw1dahf9AVjhYikIpO8xC8Osjli6NLtziZmAtFKcznzZN9cADTcccJbJ32rT8zxl14OuuoZBN9r1O\npUy6imOPNdfT1tar//d//6fXX3+zVldv8Tx+fX2nvvjiX7Wvry+0Xb73Ps+Ri9P+hroehV6toVPr\n2OITQOY6VJg1XsRY3N0P4G23+Quv2bODf4Sqqvz+eFk/0GGQSMpu68pqGcBQKH8ZFMR24okm+MqL\n7BiG0aPNv88LVeOCmiQvvgjnnz/QVTNKEFdlZf7nDHKzdaNqMoCuX5/rdpodILbXXn2MHfssf/rT\nEyxe/CQXXbSMDz/8EICRIx9j3boWKiqq2bq1OhM8JixZUstBB42L1C7fSnqLF+Ob/jQgQGXSJHh3\ndQeLR3+DNezBWNbQzD28wMczMQLVtEmahgYZEOjG7YuD06FeeaWJ4HMCIU46ybz6RVfOnu1/LPB/\ncLN9mT0vsP8HenvOnDeDTxQNKxwsA8jrT1tiwoLYurq81w2mcOvoMMFp+dQyPuWU/M/b3Aznnhtt\n2+5u7/61u7ub559/hnfeeZI//vFJrr/+KTZv3gzAPvvsQ3NzM1OmTGHKlCmMGTNmWyS4V/CYu11+\ngW++feHSpXmPXOoeXsyZFXcA/T/AthiB2hmsOfWbjJ126MC2hoVB19QMvFm33x78YG7caIIn/Iph\nV1SYUHHnerLDsoNwBWWsmzNnQ/DG0ShYOIjIaOAXwB6AAgtV9UdZ20wF7sVUjAO4R1XnFnpuS/Lk\n9actMatX+3eyXV3mP+slILKF29tvB58nbtCSOz1GdkDV/PmwaVO844HpK844I/5+Do5uIgrd3aZ/\n7erqYuXKlTz55JM88cQT/P73v6ct00kecMABzJgxY5swGDlyZM5xoqToyA58C+0LOzrg5z/3P2B9\nffDIxaejr6OTM7tug0PHwpmHDlw5bpw5rpeASKdzzxc27R4+3IRK+wmHzk6TY+PGG83D5ydZS0QS\nM4ce4GJVfUZEGoGnReRRVf1L1nbLVPWzCZzP4iKoQ8qHoD/tffeZkWVS58qX0aOD11dVeQuHvj5T\nQOb2203bx43zz+nj9d/3o6PDdP7XXms+b91q7plT9Obgg/vXxaGuzuRJKuQeL14cfVuRHu644zqu\nvfa7dGQ6sIMOOoizzjqLqVOnMnnyZPbYY4/+HTo6zM3M84HIVlcF9oV33hkcFq4aPHLJZ0o8Zox/\nNSKvkdK4cabxXp1/KgX7728eiKlT/Y/b02NGN1de6X8tJaJg4aCq7wHvZd5vEZGXgT2BbOFgSZjs\nCl21tXDOOfDtb/cnJ3MTJkjc62+80Xz3+uuwbp3p8E48EUTMc+3u/ArOF5MwF18MN9/cL9xSqf5+\nZd68/rbff7/5j3sRZZbkCIXvfS83HYbTB02eDF/6UvTRu4MIfOtbhd/bOAniVCuBh/jqV7/KlClT\nmDx5MsOHD/feODtZlHNT77vPTMkiCoycWUZHh9H1Z+//4IPBjf/EJ4IF05gx/jfC68fu6IBTT/X/\n4c4/H66/fmAbTzrJX4B1dsLJJ5uZwcUXmwfRi3Ix7EGy3krAXsBbwE5Z308FNgDPYyrCHRRwjJnA\nSmDlmDFj/C30Ozhh6YqzXTfzyfXf2Gheg1whB8O9NYqni+PSGFQ5rKnJeOjkE9Ph+NRH8USsrIy2\nXRQHlTjBie3txkHGFIYJP2dd1VZtmf1U+A8a9vA5LlhxA2SCHtLp04MbH1ScOSigxc9NK8wH2Cvn\nRlAcgzuascgugcBKTaI/T+Igpj2kgaeBZo91OwHpzPuTgdeiHNO6svoTJTjJHUwU5AG4YUN+hUvi\nPstJRV3H+W9F2TZujYN8C73EWbyEbtT4jc2bN+tNN/1ea2vbtKIiWuF7UBV6dF7Nd8I79LjFDKKM\nIMIe0oU0wHciAAAgAElEQVQLg8/hV081rJqQX8m6uMFvTU1mJBLFPzfpWrRZf6wKeFoj9K9hSyLe\nSiJSDdwNtKjqPR6zk82u9w+JyC0iMlxV1yVx/h2RKOoCx7vPee+3zZVX5p8XP+os2E8LkY9aKo7R\nPMxG+Oqr8WscFFpHwE1trdFI9PYGG2WD3HePP76PW299mD/+8Xc8+eSTPP30X+jrewuo9zijZl5z\nU0030MbYrlXQtTG4sEXcYgZR8qCHuaBVVwcbiM44I1dvetJJ5jr89Ptg1nk9wFF9gN1tXLcuml0j\ntjU+AI8/1iFwaPiO4SThrSTAfwEvq+oNPtt8BHhfVVVEjsAkYF9f6LlLSZC+PmmjcBSiPLtOx60a\n3EGuWhW/cIlDFPfWpIvrxPlvBd0nVaM2PuWUeAIqn0IvDjU1xivI3d6DDzbPzyuvmP5l113htdfg\n4x83bVy8ODhN96ZNrZzxD7/ijMr/ZuaoUbxw2Fz+86Wd6PDYvpouBKWL3BteRQ/NuEYTfh163I4z\nyggi6Ka2thrvnccfN/7AnZ3GyFNba9w/P/95U3TigQfMDdu61QiMvj7zOQi/BzhukYnWVuONFNWI\nFcsa74PPH6sSCoiMcVHo1AM4GjMceR54NrOcDJwLnJvZ5jzgJeA54E/AkVGOXS5qpSQSKSZN1LQG\nLS3REr/lU/Iw6iy4WCrWKOqgKPcp7kw+3xKRDQ3mXnu1N8jm09DQq/7F5FWhV6+p+Y72ZXaeWzNX\nxbO2sFEdzeKWAamm02zSJtbrciZ6q0Hyualxf+Swm3rssWYbJzWFUzw6O3Nikg+wV2h40LHmzi1t\nh+Bzz2z6jBKRZCLFpAkzijptCFNxvvtu9CLv7o4uicyexUhT4XWfgq4vroCK2ze6z+OXtroQG0aa\nzdrCjG1f+NcWVk2zSVuYMSDVdAsztIPa3MZGSdkQJf9HlD9DR0e4hT9KjpQ4S319+APsHoEsWqS6\n007h1xnXiJUvPn8sKxxKRNCAJpXKL5FikrS3m2cklTKL32DFb0DjpIKJOgCrrlY95xwzAr7qquhF\n6gc7X1NQzrN8BFR2Sv3aWnP/58zxv9bGRu97dfPN6zWV6sq7j2ti/YDOvYNabfLLeJq1bUEdupMs\nKmhEPWxY9FFz1CR39fX5T3Xdx8incEdQG0udfKzIMwebPiOEIFVoUExOa2tp3JXr6mDOnOB6t+Ct\n4vRKBRNEKgUXXgg/+lF/mooohuUg9W1fn3H/LjZBRezzSQvi3M877zSu/WvXmnKa779v/qFeaHc3\n99yhHHXMezz55JPbltWrZwDfiXF2BYR0Wqlq3cgSPkOK/kALz9rCtFFFT862gDGE1NTkl7Lh7LPh\noIOM7rura6Dxt7o6xjVlEPG/gQ5BOVKyqc8Y5Ssqcq+vKVqJz8htzLatFNsYGdcuEpckJEyxlqE8\ncwAzyChn4urOC4l3KEZxnThs2BC/dkOY6+3y5fHun9CjV3GlHml6d91555319NNP1y984UHf9NVe\nSy3tOn1at7bMfko7av1VMaGqI2f59rcLV4MExRIMG2ZmGGE+zEmVncv+cR07RRJqnqjT4FLZHjzO\nMx56NIH+N9HOPOmlHIRDmL4+igqyXMju7IKKszj/PxHzWl8fLAj93MXdlKq4TjbO/yf73HV1uf9X\n5x459k6//3Y+tZUdfX8f6Ku33qq9vb2qGt+G0cQG7Vj06/i++F5LXV0yqpCwjt1RPTn6t7lz86qP\nvO1hi2Ikq6tLvjOOEqNQ6qIoWTaOpOIcCj5AMZdyEA6q4fWBfTuDMqp/sHSp+T85A650OtjZI5Uy\n2zop5v1SzbsXt97ea9Q9GLaHoP9pttp5W97/Bv9rdP7bLS3RbLEDO3WXvr+ubsBoevmyzoHPWE2n\nNrJJ02z09io69lhj+AlqbKRGJdRZ5SOovEbSUX+EZcvCvYe8SuglQdisYJCNbCQUIW1tDhEIckl+\n+OHoKsjBYtkymDJlYBsd3bvkxkIBufaUsKCvmpp+vb0Tl9PZaZZUyqhGP//5ZGpFxFHlBuVrq6iA\nhx4yrvxeLuNe9PQoN930Nk899S5btzohO14oNWylm2pvfX9HB3zta9v89SdxDu9eehWLx17Mmndq\nGPveSpp/fjra1sZiprGGsdvqEKTYCo8Dy5f7B3iF6e7T6fyCrvyIG/sA5mZnB7q4/2xLl8Ivf+lt\nL5g0CX76U3MPvX7gdBr226/w6/IiLEZhKBRFiUISEqZYS7nMHIIo9iCh0JQT7e3BM/C6uoHqk4aG\n/Aq119f3z6i9ahs7s5FC71UcVe7y5cGDS7eXUnR1d4/CFQozFDp8t0tVd+tsuSVc35+9ON49cXRN\ncSqSzZ1bHBfLQvx7g374ILfQpNNQ5EO5TJFdUG65lYqxDAXhEPR85ust55CETaulJbizF+l3SXX0\n7HETxblzly1aFLxtITaHOKrcKLED7v9pVK1IbWWrfm/iDfrW9TfrsGH+OYuahvVpR93O8W5k9sU4\nD0CUDtYrus6dfXD2bGNkKqbfffYDG0XvVmigy2BFoQad28nmWEqh5RJSw2GNJtD/JtaRF2MZCsJB\ntTieOEnZtMJcx2tq+pPPRR34VVYONFS7k1pOmxa877HH5v9fjp1wL8QDqKlJdfPmLv3jH/+oZ5xx\nr1ZWtoX322zQDswUaHn6GE3Xd+ds09iYuZ5ly6Ld0KCLWbQovJMN6mBL3XlmB42FPVSplH/SvHzO\nWUzhl33OoD9ovul+88HWkC5fJk0yasbsIjTt7WbJJ39QWB4yJ+1NmP7dUQWHpbK/557oyeTOOcdc\na7aqdcUKk94miF12CU8p43dNcVS5q1d109bul2JGqarqYf/9L2LEiEWZKme1VFS877t9mlaq6M7Y\nDTqhFSbxOz4YNpo7F7zJg4/WACZP0xlnZK5nRa1/cEUQ7ot5++1wn36/QI2oSa2S9MfPzmJ40EEm\nH5JfGbzOTrjgArNdvsUr4mZOTIKwP+hbbxWeOykKHr9xhb8hLBZWOCTEww9HL2wfhSgdoV+m04su\nMtuNHm2SvPnVSBeBRx7p73ij9GE1NSYIzquQ0Akn+Nd/dzjllOD/clD21nGju2ioFVo7c4OrsvvH\nceuX08AhtNKYs20tHYzv+Rptbc9w9tlnbyts89artZwwZSM9WkEbDdTThlLBF/klk1nWbwx2Udfb\nytnpuzj7f7IuyLkh+RSNdl9MFEOvX2WiKCOMffdNLl2uF5MmGSHklMrzMh5v2pTfCGowifIHLYXQ\nSjJFcBZJpew+EfgRJhvgz1T1B1nrJbP+ZKAdOFtVn4lzjsHIfBqHuA4KUUf8fhG9o0b5Dwqvuiq4\nrZWVZnnoITj66PBrc3PllWbufNtt/cW5TjnFvIY9o2H1kAMHusf18LrsS1Xns8DOOftWSQ/NzVW0\nt7fzpz/9iTF/uIIqvKuH1bGVBy8Zw67XD6xJvMdvb+fd+m+yuO34XO8gP/y8Twr507o7+7Ao2KYm\nf4+jsEynL71kjp1Uulw/nDD+0aPh3HNzy+ZBfiOowSSfsqPFoJAUwSEkkbK7EvgJcDzwDrBCRO7T\ngTWkTwL2zSz/D/hp5jUSSdYCKBZROnOn3C7ADTcEX09YzQLIv+/p7TX/+enT+1NG3+CZbD2Xj3wE\n9tgDtmzp/+6uu8yMIkj7UVsbXg85cKC7pYOHOIolnOCZFuK+zlP53JQedn/6acb19vKv4J9Com4a\nux42K/ckq1dT176eM/l1pHsB5P64jqTP909bXd0/9YPc/OROPVgR/3qwDmGzjvnzzSjBiyQ66+wR\n0Jo1/g9Jvi6egzVqjFNUpJiMG2dShBRDQBRqtAA+CTzi+nw5cHnWNguAGa7PrwAjwo49YcKEkgcb\n+hHmUhrktdTYaDwUo1ZucwgLvis0ONY556JF0bMW5HPO2tpoNsfA7K306DyuUMU7LcQW0PaKCu2o\nrtY+0Ewmf/8UEvmUhvQzHvu5jOabHM7LeJmP0TVf99IwI3cUvB7eoKjLfFw8B9NTqRzOr2qe46w/\nTdlkZQWmY1RJzucvAj/O2uYB4GjX598CE8OOPWHChNguw0mVonQT9Rnw2y5qHxF0Pdl9Qr41BbzO\nOX16MoLGbxk2LNrvEPhbZ1JP+J2kL26jvEYWcTrTykqTO8UvqCMs30i+bYxLWL7yOA9jVIJGdH4P\nWtxrLbdRYyk9pdzn9rgH221WVhGZCcwEGDNmTCxdfjHUT1EcPlT7Z7Y33mjWvfOOUTt2dcH550c7\nV1ubqQaWraHwmtknlZDRubdxg1vjcPHF0Wb6zc0we3YfXs4Wle4qZR74BHr7s3GjKQM3Z07/d6mU\nSbGaHU7uRUVFsF7PK3I5lTLHFQlO6euQhGpn0iTzA8ybF2+/QlQjQfrBujrz6hX1XAx3vmIzGJ5S\nDkU0RkMyBum1gNuJc1Tmu7jbAKCqC4GFABMnTtSodp+kS1E6BJVn3LrV9C/Z9gN3hP+8edHVgXV1\nRg3s/t/4CTevUplh/ZkXDQ3GoPzYY/H3jYIISE833P4/nnrhDz74gKVLl25LX71pUy3wCFANNFBT\n001NTQVL9B9ItQUYhvNh7lxjJN3me4pxHY2iw+3uDnfNyqavz4wW6urMjamrC65vHFcP76d/D8pX\nHpTSOl/dfdCIrqPDCOT99y/MxXN7SVFRCEU0RgMUPPXACJg1wN5ADaYU6EFZ25wC/AYzwPsEsDzK\nsSdMmBA5Qr5YEevnnBM8+/bTHLgTtBWqzw+aJWcn1MtXc5FdvMZ/8Y8K9rz39T3aUv+Vbbq23oYG\n3drQoN9vbtaPfexjCiZ9dX19vR5//PH63e9+Vx977Pd6221d/TP1ZSuS0aF5LanUQB1hUsacKEt1\nterEif4PUSpldH5R1BVBus+wP1GSKa1VS5M+ohwqSA02Q6FMKMZF9VVgNXBF5jt3DWnBeDStBl4g\ngr1BtT9COorOvxilKNvbwztdv2ylzvMZpsYuxFYXlAI7lVJduNAYgmfN8s4xlE572zynT/e/7lra\nNUWbQlhd40zfw4eeeYU2gJ56/PF6/dy5+uo112jP1Vd7d0yF1tCMuuQjzZNYnLzoQdsEpbkOu0de\n0r/YxtNS5Dwqh7xKg43PPSgr4VCsxZ0+I8zuk+9AIsiAHZaXKGhxCySv/+WwYQNzoAXVVvASblHr\nIof2Gxtyb0CY19BVXK2LOEunc6dOr75HFy3cqssWvqRNfKhpNg9IL/2ku2i9a+lLp805wzqsQjvr\nid7n971hhXj4FLI41x8mKLw69KgPfymNp6UQRuXgLTTY2GI/4eQzkAh7tsLyElVWRq8hnaRwizKY\nFnp13vRntWXRVv/jZql8nBvQMvevkb2Geuvr9cl/+Rdtr6rydBsN9CLyS5nq/sEKUfM0NJipU5Sq\nPNnSPB8Pn0KWhgaTHG/69HAvp+wHuhjT5iQohTAaTG+hcsEW+wknzkAiyky8pSW45kh9vX+/4xwj\nqmttHOEWZTCdZpO21P6Lzk19V0W8VT/u2AH30jFsD21q8t4nu0B9H+hvycOVtKbG6Nw9G++ShlEq\njIUZa5YvNz9UUMWi7GCMDRv82+e+hiSFyLx50YShZ5bBHVz/btkGCaXsTiRBU7ng1OBYsMA4oixY\nYLyUvNxYo3jCNTf710h38hLdf79x+KgxeddoaOjPaPDCCzByJMyaBVdfbV5HjDAut9k43kdNTcax\nRMS8emVHiOKkUEUPzVtvZ1znSzSot49qA+2MJderI9XbxpKLHqGpSamv70XoI81mmtiQU6BegKl+\nSaWC6Ory9/Zxe5s0N/eHhOc0NGUKvixdGn7jRPyjgcG4nl14Yf+Pk0r1/6he1NfDm28Gb5NKmQdx\n2TLTntpa/23BRFo77nlBZHvjBN2jUkbrWrYvkpAwxVqKmbI76kw8u2qhO011toePe13gzKR+q3bM\n+Z7nVCJ0ltzeri2zn9J0TafPwLJP69liSkmCdlCrTWyINAtwll7Q2w88UIcPH60wQyu5Qm8NKlgT\nVgCiunpgNaG4I+MoU0K/GxfXoB3VMD13bm7bnBmIl/G4vV115szgcy9aFM3m4TUbsPp3SwZsmdDC\nCIqfqK42M47OTv+KgKpmVuCOq+jqMsupp5pgON+ZSftW7pn3Ememv58TyBAYU5OJ8mvuruTrXa8B\nuSPRetpYy540sRmAFFtdOYYqB+YYypoFOLQCXW+9xX/ssw+7Tksz5oIz2ftHP0IW+sQZ+KV9dTjp\nJDMqHj4c1q2DX/wiXpbRsLKM4H/j4gYKOdPGsOmZU181Stuc9o0a5V++U8RETjpTyKA0116zgaB2\nlHvWSkt5koSEKdZSjJmDM8CcMydYXdzQEDzwClPzBqWkyNH151EGbTkTtYn13sXns0+YSml7zTDv\nHEMeS59zEdm5ggopZu8cZ9as4JlDKpXsaDeuQduZNhZDjx90zNragTEN7e2m7alUv10ln9mAnVHs\ncGDLhMYn+39SV9f/6tdX1NdnVVfMSJe5x/5OxcfHX8T8zyPnCXL7nQb51WYd0DepXNbSXVGhvQEd\n4jZjcpDKx6vUnXtxilEHFW12bqjfjamuVv3c55L1OonrChvFpdUdOBYniVfUoBd3512IN0655B+y\nlBQrHGIS9D+pqQmPZ0inVZsau3V5+hjVdFpbOFPTbPbdNqhCYo6uX8SMqMP8amOMgPtcSycRvImO\nO864UgaNlo89Nrhjc4pRB7ljNjSEe/gkObqNG7fg7jT9Rt233Zb/aDxqKHoSnbf1YtohSUo4bFfe\nSkGEqZ7DKjG2tsLGLVWc0HoXna3dNHM3VXh721RVmXQ9S5ZAU2MPabYg9Pp6/NDQAL/8pTFgtLaa\nv29rq/l8wgnG+BHFi8WFuJZaQhLTpdPw5S+bYg1B+Wp22cVs60VDg8mXc+aZcMgh/je0vR2++MWB\n3kXZZF97Ifi5gaXT0NgY7OHkdn+bMwe+9CX42tdg9uzg3yoI9zGnT/fX/Tu2j0Kw+YcsBbDDCIeg\n/0lXV7BHopseqriH5m2G3iY2kGZzpvPfQlNjz7b+ZdLBHbxbMZoFzGIuV7OAc3mPkUxi5cCD9vV5\nd5IwwK9W/dwVs9Bol9KPY+AMEkBOhr4oLpNhx5k8ub+D/Id/8HfxTKKDBG8f57//Hd5/P9zvua7O\nlNL89383hvTvf98/WV7U9jrG80MO8a6KBsl03mG/Q6mqlVmGJklMP4q1JKlWCpphR9F0bNOeZBmT\nc/T+w/aIXnTBSfoWYKTtE9ElkyfrkUceqZ+orNT1oJsw7qaxA89EjF7fyeeTrQ7ZsMH/RsTJ0RMn\noi/J6N5iFPOI4wobt73FVvvY/EM7JOworqxJeeEF1T+orjbBbKeeGp7+uoG2AYFjdXQOLCvZm+7P\nJR/mDtnTY0781lvQ0uLp3rlFlZ8vW0bvEUcw5eKLWfHJT/Kp9eupeOstk987KOVzTuMb4OabzTQp\n291x2TI48cRcdZAT4bdtOpQZhd9558Ai0h//eP8+jirnuOMG1hN1rvmFF/pH6EnV4i1WLdk4rrBx\nR+PFLjXpldc9iZTclh2DQiQLcD2wCngeWAw0+Wz3BiYb67PEkGoHHDAhUS+8sEFv+3oTYHb15Me1\nvtI7yMwvcMxz9BjBU6YrldJvz56tm3wCybrSad3y97/nXoyfq+OwYf7VyZqajKU8e2T96KP+bayv\nV/3ww3g30mlfWG4R1cJHt+3t5pqi5GnK3i/KLCOOI0A+o/FSuJra/EM7FHH62KClUOFwAlCVef9v\nwL/5bPcGMDzu8SsrJ0T/D0b8s/v+T5w/acZdsz+OYHN4HIGfOqCjw7+jziy9oOdUVurMww7T9lRK\nu+vqtC+skwiLyvXqcBobzWt2J7RgQfTrcW5g0gU28u0gnf2CEtUVGk0cKZFVgR267bwtCVIWwmHA\ngWAa0OKzLi/hUMFh3v/FVJe2LNrafzcKHX35dHhR4wj8Osi/vf66doX4/feBdjc397cjrJOI2jm7\njxU1M6nXkq1Hj9rpx7UlxO0go9oC3OfKZ5YRNLOpr+93380n7sFiKQJJCYckbQ5fBu70014Bj4lI\nL7BATSlQT9w1pGGC5zZtnRWs+ep34fWMSfaHPxyoe49bH9RHr5xjTwiipoa+ujoevfhiFl9wAU8+\n+SSHrVrFAkzBSz8EqHIS10WpRxu1dq77WLffHp7iwo/q6oF69KjukXFtCXFr8Ua1BTjncmwSHR3+\nHkJetYfD9PaTJpljjx2bvL3DYhlEQoWDiDwGfMRj1RWqem9mmyuAHqDF5zBHq+paEdkdeFREVqnq\nUq8N1VVDulIO1z6PbRpoY2zPKzA3pOOOWmi8wFqs3RUV3JlK8ZVNm9g6Zw6NjY0cffTRfG30aNKP\nPWYEWBDHHBP9ZPn4rhdyfdmG0aidfrGNrVGvqarK5HYaO3ZgIiwv/O5fWN6iYhQvt1gGmVDhoKqf\nDlovImcDnwWOy0xpvI6xNvP6gYgsBo4APIXDgGPjJRoy6aiJ4E8e1Vc8qMOLQFtfHw9+8pPcveuu\njG9s5CNHHUXlP/6jEUxPPWU6kCC+/nVz/rPO8l7vdtn629/ie/cUcn2PPDKwc4va6SfhKRPkqhZ2\nTalUfxt+85t4swwv8knsF3VwYrGUI4XopIATgb8AuwVs0wA0ut7/ATgxyvEPoD56cjm/xV3AxY8I\nKRb8Ygr6QHu/+lWj03cMo7W15vNjj0X3dBHJ9QpSzbWnhOU/8tJ1B11fUPsefNBbjx63qlI+xtaw\ncwRdUyrVn/5aNbrHUT7eRuVahc2yw0I5GKSBvwJvY1xUnwX+I/P9SOChzPuxwHOZ5SWMOirS8SeQ\np1E4rnBQ1b4//1l7dtpJu6qrB+QlagXtgMDEdb6JmWprg5PVZS+zZw/0ugpK0OSuORzFAB81T5BT\nlGLhwhC/3yJ62EQ1ukcVUlGDEfPxNrL5iyxlRlLCQcyxypOJVVW6sq4ub3UPAPPmwZVX5nytqvzl\nL3/hySef3LZsev99pgETGho4eMQIdtlvP0ZMmcKeu+2GfOMb3u2orfU3cMZl0iR47bV+NUxNjf+x\nHTXUiBH+NQSycdQ0fvn+ne+DdPRNTcXXo99+uymb5/e7z51rch0FXZObzk5zn7yux6kmd8YZ+V1T\n0LFLca8slixE5GlVnVjwgZKQMMVaJhx2mBl5feUr8WYLHiO33t5effbZZ/VHP/qRNjc36/DhwxVQ\nQEeNGqVf+MIXdOHChfrKK69oX1/fQFEcpMIIqzMcVLc46gyk1CqLwR4Nh6mBUqnyCjazNRMsZQRl\n6MqaPBUVxph35pkmI+aJJ5pRtTvFg19lLaBHhJ+8/TaPn346y5Yt48MPPwTgox/9KCeffDJTpkxh\n6tSp7L333ohf4jsINq4efjg8/rj/vkHH9breqBQzcVqps3lmG55HjzYzMr8Mp6rxDb1RK7blQ77H\nthXaLGVMeQsHN5/6FLz9tlEnrFoF++wDEyeazJpdXej8+WhPD3R301lRQVdfH8dv2cLKyy5j3Lhx\nTJs2jSlTpjBlyhQ++tGPxj+/Xwdwxx3BwuGyy0w+oyiqojjpqYtZOD6pfEdR8MqJVFlpMtX6sXVr\nfgIqbixFMY9drFxQFktClLfNYeJEXbkyk94668+kDQ30ivCbE05g6r33Qk8PDUA30CvCzaecwugZ\nM5g8eTKjRo0qXiM//NAUk/bq2Bsb4YMPzEjXESrvvQc//3n8uINUynSK2QFYxaBUevSOjtxC3A5B\ntpx02qTYHqouokHXbe0UlgJJyuYwNOo5dHSgTqBRpsCKtLZStWULp9x9N409PTRiLqYWqFflX596\nijO7uhi1aJExcBZaNMYLJzI2W3VUXW2ymZ55plF/iJj3V15pCurEyaQKMGyYUasdc4x5ff314o4u\n/QrkZBfDKZSgGIGqKjMa91tXrFlTKYgSG2GxDDJlrVbasmUL11xzDVX//d98c+NGGj228dXob9xo\nPF6cSj5f/Srce68ZyXvpeOPqf70iYx26u805Fy40qbjd6oI4AWlOx9zbawRcWxssX27eB80cktBl\nF1NH7xBk22hvh5kzTWrw7S3dtK3QZhkKJGHVLtYCqIjoT0aMCI4ziLPU1eV6lOTjbRK3cH19veqc\nOcGxC9keOQsXRkt77WYoec5E8YraHjOWDrY3mGW7hh0hzmHffffVFStW0PTQQ8F+74XQ1GT+lps2\nea/z0//OmwdXX+3rKeWLU4NZxMww/NRd6bRRIf3iF97X7aV3H2q67CDbxrBhcNNNxglhe/PksbER\nliKyQ9gchjU2GsGwapV/wfpC6ez076CD9L9B9XmDaG01S0WFcc31o63NXHcc9cNQ02X72TbSaeOt\ndP75RgDPmmU60xUrkj1/R4dR0c2bVzy7lBelsulYLAVQ1jYHnn/edAxtbca9sRgEdQhB+t+gBHRR\n6O01o/x02t9l9IADjI0hqkvpUNRlZ9s2Ro2CCy8sfpbTwXYlLYVNx2IpgPIWDr29/R1D1Dq+cUml\njGrIy20yyKffKzCuvt4YUqOomtraYLfdjIHVi6oq+N73zIjWb322x04p4xOSJLv2RLGznJZLmu1i\nxl1YLAVSkFpJRK4RkbUi8mxmOdlnuxNF5BUR+auIXFbIORPHSe3shZ/LpKOOePhhuPFGE+Q2d67x\nTlq6dKC6wI+GBthvv2D1Qm0tXHSRaV9trdkvSP3Q3BwsbIaC+2cpZj9DTf1msQwCScwcblTV+X4r\nRaQS+AlwPPAOsEJE7lPVv+R9xsrKgZXNamuNTSIo5UJdnVmXSplXxy3yppvgvPNyt29s9O6AvdQR\n2UFpjrrg1Vfh+uu94xqczjqV8lYvvPCCUTv19JhZTW2t2fZb34JLL/UWaEnUUBhsSjH7GYrqN4ul\nxBTkrSQi1wCtIcLhk8A1qvqZzOfLAVT1+2HHnyiiK71WVFcbAXHKKXDQQbD//jB1Kuy7r3dH7GTe\nPGJodZkAAAq+SURBVP10U/jF6YSD9hk2zBTWcXeo+XgDRREm2RTqdRQlU2mxyTfWohSePEFZX4d6\n9LVlh6cssrIC1wBvAs8DtwI7e2wzHfiZ6/MXgR9HOf6EsFiApqb+wu7Tp/cX24niO758uYk9iJDR\ndRv5+qfH9dUf6n7whcZaFDtWIyjLbj4FfyyWMoJSZWUNqiEN/BSYB2jm9YfAlwsRViIyE5gJMCFs\n464u491SUREcA9HaCq+80v/ZMUgGpbHwUi/kq46Ia3gcymqPJIy9xfbk2R7UbxZLkSm4hrSDiPwn\n8IDHqrXAaNfnUZnv/M63EFgIRq0UeNI4OYrmz4fPftZ0PEEGSQcv/XapvIEKOU+YOqfYaaKTqqlc\nbE8e60pqsQRTyLQDGOF6fyFwh8c2VcAaYG+gBlMu9KAoxw9VK8VdHJVBlJrCXuqFUqkj8j1PmDqm\nFKk1bE1li2VQISG1UqER0teJyAsi8jxwTEZAICIjReShjPDpAc4DHgFeBv5bVV8q8Lz54Yxcw6Kb\n6+u91QuFRrZGjcjN5zxudU4mcy2trebzCSeY1OJB65OKDg66t+Uca2GxWAZQ1rmVfL2VHKqq4gXH\niZh4hEsu8feIqa+HtWtNR+xHPt5AfkVtLr7YrA9SAUU5T5gHTtw8Tfli8wZZLINKUt5K5R0hHYST\nfyeOcGhoMJ3T9deb4LIbbvA2SAYJBoivDw8y0l51lRFaXukb4pwnzIgdN09Tvlhjr8WyXTB0hUNV\nFdx/P5x6an8nlEqZjtiPtjZTha29vX/k/q1vmc65mAbJMAO4o+KB/NM3hBmx4+ZpKgRr7LVYhjzl\nrVaqqtKVNTUDO/z6elNIxxlhO6qXV14xHklxq6wlXfbSyxMoTnrvfFU8Yeqc11+Hvfe26h6LZTtn\nx1ArHXKIsQ+88gqsW2cS1e2338BRqKN6uf12E+/gRXW1WeeVXC+pZG5BWT7jVH/LV8WjalRl115r\nPmfXm3aM2VbdY7FYIlDewqGiIhmde3e3/35J6NvDAr9ef90/IV42+ah43IKps9PkYaqtzc3DZNU9\nFoslIuUtHOIQNDrPNy13VMICvx56aOCoPWgGETd7qpdgcq7zhhuMcHBj00RbLJYIlHUluFgEpavO\nJy13HKKku3BG7QsWGBvE3LnJVAKz6actFksR2H5mDmEulFA8fXvUdBfZo/ZLLilcxTOU8zBZLJay\nZfsQDm4voRtvNN+9805uh1ssfXtQydCgmUkSKp6hWv3NYrGUNeXtyjpxoq5cGRgjnV+9hGIwWO2w\nEckWi8XFjuHKGka51AKGwfMEshHJFoulCAxt4ZBUeuikGCxPIOuiarFYEqYg4SAidwL7Zz42ARtV\ndbzHdm8AW4BeoCeJKQ9gjbFurIuqxWJJkIKEg6p+3nkvIj8ENgVsfoyqrivkfDlYY6zFYrEUhUTi\nHEREgH8Cfp3E8SITFNuQRPyCm6i1GCwWi2U7ICmbw6eA91X1NZ/1CjwmIr3AAjWlQAunVMbYoLxJ\npfSIslgslhIRKhxE5DHgIx6rrlDVezPvZxA8azhaVdeKyO7AoyKySlWX+pxvJjATYMyYMWHNK74x\ntpw8oiwWi6VEhAoHVf100HoRqQKagQkBx1ibef1ARBYDRwCewiEzq1gIJs4hrH1AcY2x5eYRZbFY\nLCUgCZvDp4FVqvqO10oRaRCRRuc9cALwYgLnLQ3WI8piseyAJCEcziBLpSQiI0XkoczHPYCnROQ5\nYDnwoKo+nMB5S4PjEeWF9YiyWCzbKUM/fUaxsekpLBbLECKp9BnbT8ruYuF4RCWRXttisViGCEM7\nfUapsOkpLBbLDoYVDlGx6SksFssOhFUrWSwWiyUHKxwsFovFkkNZeyuJyBbglcFuRwjDgWQTChYH\n285kse1MFtvO5NhfVRsLPUi52xxeSSy9d5EQkZXl3kaw7Uwa285kse1MDhFJxP/fqpUsFovFkoMV\nDhaLxWLJodyFQzKpvYvLUGgj2HYmjW1nsth2JkcibSxrg7TFYrFYBodynzlYLBaLZRAYVOEgIv8o\nIi+JSJ+I+HoAiMiJIvKKiPxVRC5zfb+LiDwqIq9lXncuUjtDzyMi+4vIs65ls4hckFl3jYisda07\nebDamdnuDRF5IdOWlXH3L0U7RWS0iPxORP6SeUa+6VpXtPvp96y51ouI/Htm/fMicnjUfZMkQju/\nkGnfCyLyBxE51LXO8/cfpHZOFZFNrt/yqqj7lridl7ra+KKI9IrILpl1JbmfInKriHwgIp4lDxJ/\nNlV10BbgY8D+wBPARJ9tKoHVwFigBngOODCz7jrgssz7y4B/K1I7Y50n0+a/AR/NfL4GuKQE9zNS\nO4E3gOGFXmcx2wmMAA7PvG8EXnX97kW5n0HPmmubk4HfAAJ8Avhz1H1L3M4jgZ0z709y2hn0+w9S\nO6cCD+SzbynbmbX9qcDjg3A/JwOHAy/6rE/02RzUmYOqvqyqYUFuRwB/VdU1qtoF3AGcnll3OvDz\nzPufA58rTktjn+c4YLWqvlmk9vhR6P0om/upqu+p6jOZ91uAl4E9i9Qeh6BnzeF04Bdq+BPQJCIj\nIu5bsnaq6h9U9cPMxz8Bo4rUliAKuSdldT+zCCuLXBTUlFbeELBJos/mULA57Am87fr8Dv2dxB6q\n+l7m/d8whYWKQdzz5BRAAs7PTPVuLZa6hujtVOAxEXlaTM3uuPuXqp0AiMhewGHAn11fF+N+Bj1r\nYdtE2Tcp4p7rHMyI0sHv90+aqO08MvNb/kZEDoq5bxJEPpeI1AMnAne7vi7V/Qwj0Wez6BHSIvIY\n8BGPVVeo6r1JnUdVVUTydr0Kamec84hIDXAacLnr658C8zAP0Tzgh8CXB7GdR6vqWhHZHXhURFZl\nRiVR9y9VOxGRNOaPeIGqbs58ndj93N4RkWMwwuFo19ehv38JeQYYo6qtGdvR/wL7DlJbonAq8HtV\ndY/gy+l+JkbRhYOqfrrAQ6wFRrs+j8p8B/C+iIxQ1fcy06cP8j1JUDtFJM55TgKeUdX3Xcfe9l5E\n/hN4YDDbqaprM68fiMhizLRzKWV2P0WkGiMYWlT1HtexE7ufWQQ9a2HbVEfYNymitBMROQT4GXCS\nqq53vg/4/UveTpfAR1UfEpFbRGR4lH1L2U4XOVqBEt7PMBJ9NoeCWmkFsK+I7J0ZlZ8B3JdZdx9w\nVub9WUBiM5Es4pwnRx+Z6QAdpgGe3gYJENpOEWkQkUbnPXCCqz1lcz9FRID/Al5W1Ruy1hXrfgY9\na+62fynjGfIJYFNGRRZl36QIPZeIjAHuAb6oqq+6vg/6/QejnR/J/NaIyBGYPml9lH1L2c5M+4YB\nU3A9ryW+n2Ek+2wW28IetGD+2O8AW4H3gUcy348EHnJtdzLGW2U1Rh3lfL8r8FvgNeAxYJcitdPz\nPB7tbMA82MOy9v8l8ALwfOZHGTFY7cR4LDyXWV4q1/uJUYNo5p49m1lOLvb99HrWgHOBczPvBfhJ\nZv0LuLzs/J7TIt3DsHb+DPjQde9Whv3+g9TO8zLteA5jOD+yHO9n5vPZwB1Z+5XsfmIGne8B3Zh+\n85xiPps2QtpisVgsOQwFtZLFYrFYSowVDhaLxWLJwQoHi8ViseRghYPFYrFYcrDCwWKxWCw5WOFg\nsVgslhyscLBYLBZLDlY4WCwWiyWH/w/tMdHGd3OaBwAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aabb2750>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYXFWZuN+v1+ruatNAAANJhEQWARFMwk8BkwiC7Jie\nzEhwFEY0wQjK6qAQwI6iI5FlUJxkHIJLR5hBMqxCYBhIcEsCwyoBDGvCZhKy9L59vz9O3fTtqrtW\n3aquTs77PPep5W7n3rp1vnO+VVQVi8VisVjcVAx3AywWi8VSfljhYLFYLJYcrHCwWCwWSw5WOFgs\nFoslByscLBaLxZKDFQ4Wi8ViycEKB4slIiKyj4ioiFQNd1sslmJjhYPFUmREpEZEXhCRdVnf7yMi\n/ysiHSKyRkQ+k7X+TBF5XUTaReS/RWTX0rbcsjNjhYPFUnwuBf7m8f1vgP8DdgMuB+4Qkd0BRORg\nYCHwRWBPoAO4uSSttViwwsEyghGRvUTktyLyNxF5VUS+4Vp3tYjcISK3i8g2EXlSRD7mWv8REXlU\nRDaLyPMicpprXZ2I/Dgzat8iIo+LSJ3r1F8QkTdEZIOIXB7Sxn2BfwR+kPX9/sDHgatUtVNVfws8\nA/ydcw7gHlVdrqptwDygWUQa87pZFktMrHCwjEhEpAK4B3ga2Bs4FrhARD7r2ux04L+AXYElwH+L\nSLWIVGf2XQbsAZwPtIrIAZn9FgCTgCMz+34LGHAd92jggMw5rxSRjwQ09SbgO0Bn1vcHA6+o6jbX\nd09nvnfWP+2sUNW1QDewf8C5LJbEsMLBMlKZAuyuqi2q2qOqrwD/Dpzh2uYJVb1DVXuB64AU8InM\nkgZ+mNn3EeBeYFZG6HwZ+KaqrlfVflX9g6p2u4773cxo/2lMB/4xPBCRGUClqi71WJ0GtmR9txVo\njLjeYikq1uvCMlL5ELCXiGx2fVcJrHB9ftN5o6oDGYPwXs46VXXPBl7HzEBGY4TI2oBzv+N634Hp\nyIcgIg3Aj4CTfI7RBnwg67tRwLaI6y2WomKFg2Wk8ibwqqruF7DNOOdNZkYwFnjLWSciFS4BMR54\nCdgAdAETcal18mA/YB9ghYgA1ACjROQdzMzleWCCiDS6VEsfA1oz75/HNSMRkYmZY7xUQJsslshY\ntZJlpLIS2CYi/5wxIFeKyCEiMsW1zSQRac7EJVyA0dn/CfgzZsT/rYwNYjpwKnBbRljcAlyXMXhX\nisgnRaQ2ZvuewwinwzLLV4B3M+/fVNWXgKeAq0QkJSLNwEeB32b2bwVOFZFPZWYh84E7s2wUFkvR\nsMLBMiJR1X7gFExn+ypmxP9zjOrF4S7g88D7GJfQZlXtVdUejDA4MbPfzcCXVHVNZr9LgGeBVcAm\n4F+I+V9R1T5VfcdZMscZyHzuz2x2BjA5074fADNV9W+Z/Z8HzsUIifeABmBunDZYLIUgttiPZUdE\nRK4GPqyq/zjcbbFYRiJ25mCxWCyWHKxwsFgsFksOVq1ksVgslhzszMFisVgsOVjhYLFYLJYcyjoI\nbvTo0brPPvsMdzMsFotlxPDEE09sUNXdCz1OWQuHffbZh9WrVw93MywWi2XEICKvJ3Ecq1ayWCwW\nSw5WOFgsFoslh7JWK1ksFsvOTGcnLF0Ka9fCxInQ3AypVGnOHVk4iMgtmFw276nqIZnvdgVux2Sf\nfA34B1V932PfE4AbMSmVf66qP8y3wb29vaxbt46urq58D7HTk0qlGDt2LNXV1cPdFIvF4sOqVXD8\n8dDXB+3t0NAAX/86LFsGU6aE718okYPgRGQqJsf8L13C4UfAJlX9oYhcBuyiqv+ctV8lJs3wccA6\nTDKzWar6l7BzTp48WbMN0q+++iqNjY3stttuZFIhW2KgqmzcuJFt27ax7777DndzLJadHq/ZgSrs\ntRds3py7fVMTvP22/wxCRJ5Q1cmFtiuyzUFVl2MyS7o5HfhF5v0vgM957HoE8FdVfSWTDfO2zH55\n0dXVZQVDAYgIu+22m515WSxlwKpVRgjMmQNXXWVex4yBBQvMjMGLvj64887it61Qm8Oeqvp25v07\nwJ4e2+yNqyIXZvbw//wOKCKzgdkA48eP99smn7ZaMtj7Z7EMP52dRm3knh20tZnXa66B7m7v/drb\n4ZVXit++xLyV1OinCk7UpKqLVHWyqk7effeC4zgSZ/Pmzdx888157Xvrrbfy1ltvbf+8zz77sGHD\nhsB9Hn30UU455RQA7r77bn74w7zNNRaLpYxYutR/dqAKtT7lpRoaYMKE4rXLoVDh8K6IjAHIvL7n\nsc16XOUaMaUa1xd43mEjSDj0+f3SGbKFQ1xOO+00Lrvssrz3t1gs5cPatWYW4EV3N/T3e6+rqjJ2\niWJTqHC4Gzgr8/4sTOWtbFYB+4nIviJSg6l+dXeB5x02LrvsMtauXcthhx3GpZdeyqOPPsqnPvUp\nTjvtNA466CBee+01DjnkkO3bL1iwgKuvvpo77riD1atX84UvfIHDDjuMzs5OAG666SY+/vGP89GP\nfpQ1a9b4nRYwwuW8884D4Oyzz+Yb3/gGRx55JBMmTOCOO+7Yvt21117LlClTOPTQQ7nqqquKcBcs\nFkuhTJxoZgF+ZGt/a2qMMXrZstK4s8ZxZf0NMB0YLSLrgKuAHwL/KSLnAK8D/5DZdi+My+pJqton\nIucBD2JcWW/JlEAsmAsuuICnnnoqiUNt57DDDuOGG27wXf/DH/6Q5557bvt5H330UZ588kmee+45\n9t13X1577TXP/WbOnMlPfvITFixYwOTJg44Eo0eP5sknn+Tmm29mwYIF/PznP4/c1rfffpvHH3+c\nNWvWcNpppzFz5kyWLVvGyy+/zMqVK1FVTjvtNJYvX87UqVMjH9disRSf5mbjmupHT0/u57q64rbJ\nTWThoKqzfFYd67HtW8BJrs/3A/fHbt0I4YgjjsjbLbQ5Mz+cNGkSd8Z0Qfjc5z5HRUUFBx10EO++\n+y4Ay5YtY9myZRx++OEAtLW18fLLL1vhYLGUGamUmQW4YxlqavwN0QBbtpjtg1xZk2JER0gHjfBL\nSYNrblhVVcXAwMD2z2Euo7UZq1NlZWWozcJvXzDxC87rt7/9bebMmRPrWJaRz3BG01ryY8oUeOst\n87u98go8/TT89rfGIO2H48p65pnFbZvNrRSTxsZGtm3b5rt+zz335L333mPjxo10d3dz7733Rt43\nCT772c9yyy230JbxiVu/fj3vveflJ2DZkfDzl1+1arhbZgmjrs509FdcATNmBNshoHSurCN65jAc\n7Lbbbhx11FEccsghnHjiiZx88slD1ldXV3PllVdyxBFHsPfee3PggQduX3f22Wdz7rnnUldXxx//\n+MeitO/444/nhRde4JOf/CQA6XSaX//61+yxxx5FOZ9l+Anyly+VCsKSDGF2CCidK2tZ15D2Sp/x\nwgsv8JGPfGSYWrTjYO/jjsOSJWam4AgEN+k0LFxYfBWEJTlWrYLjjjP2BS/KLn2GxWIpT4L85Uul\ngtjR6ew0Qnj+fPNazOwzU6aYzr+lxQiAVMq4tabTZerKarFYyhPHX95r5lAqFcSOzHBkR62rg3nz\n4JJLBo3VEyaUacpui8VSngTpqUsVTbujMtz2HMdYPRyMSLVSOdtJRgL2/u1YOP7yTU1G9TAcKogd\nlaD8R6XKjjpcjLiZQyqVYuPGjTZtd5449RxStsfYocj2ly+1CmIk4BcHEva9l7oO4ttzRlocyojz\nVrKV4ArHVoKz7GiEdbxedoOqKrjhBrjgAv/vu7r8jc+OJ9iMGeGdvnN+53iOobkYdoukvJVGnHCw\nWCwWN34dv9Pxdnb6V1UT8Y5G9vveTVMT3HMPnHqq/7nBnH/PPcEr/rWxEd57L9kZRNm4sorIASLy\nlGvZKiIXZG0zXUS2uLa5stDzWiwWi9tg3NZmOvS2NvPZGamH1U2I8z2YOgtNTXD33UYwBJ0b4Pbb\nvQUDmO9vuy369ZaSgoWDqr6oqoep6mHAJKADWOqx6QpnO1VtKfS8FovFEsVgHBQHkg+nnmq8lN58\nM5qx+r77go8Xtn64SNogfSywVlVfT/i4FovFkmNbWLMmPAAwKA4kLum0sTGkUtGDD2Pm0ywbkhYO\nZwC/8Vl3pIg8g6kCd0lSNR0sFktxKaWXTdC5vGwLAwODnkXZOAGAQXEgcW0O7riRKMGHq1bBgw8G\nX3NWerbyQVUTWYAaYAOwp8e6DwDpzPuTgJcDjjMbWA2sHj9+vFosluFj5UrVpibVdFpVxLw2NZnv\n3XR0qLa2qra0mNfOzuTOtXy56uLFqqmUqumyoy2NjYPt8Dv2rbfG+9593Z2d5juvczc1qW7a5L/e\nWdLp/O5VEMBqTaBPT8xbSUROB76uqsdH2PY1YLKqbgjaznorWSzDR5CXjzv5W5i3UKHnEoHq6tzK\naGGk0/C3vw3OPJxZSXYcSNzv3QRd+8sv+ydEBBP9/Nhj5evKmqRaaRY+KiUR+SDwrqqqiByBMYRv\nTPDcFoslYaIYe2fMSCa9RJhHUVzB4OAUxQlSV/mlqIiSumLKFHPMefOM/ePAA+H73zfC84EHgg3h\nl17qLRjKJVguEeEgIg3AccAc13fnAqjqvwEzga+JSB/QCZyhSU1ZLBZLUYhicI0iQKLkBkrao8jd\nxmImznMfu60Nfv97uOUW+M53YNw4f5tEOg0HHBB8PK+2llJwJCIcVLUd2C3ru39zvf8J8JMkzmWx\nWEpDFINrUunCk/Qocrdx7NjiJc7zSsrn1H++8koYNcp/X8ew7e7sx40zUdnuOg7utjoBd+4o62Jm\nhx2RifcsFkvxaW42nZgXTufmdOpexEkXHnQuP1KpwWSDfm2E4iXOC5o1genkRYyQ8EqI+OyzQ0u7\nfu1r/gV+entNAaDNmweD67q6zOdjjy1OfQkrHCyWIlLKIjFJEyXbaxQBUsi5/HJrplLws5+Zkf8j\nj/i38c03i1cIKYoqTNXkaVq40BTvWbjQtPmQQ3Iju4OejfZ2//XFirIecVlZLZaRwnAUiUmSzk7j\ncXP++bBxI4webfTkbj13KmXSSJxwgrnOnh5zndXV8dOFexl3m5th5sxgT6igjLQvv1y8QkhRVGHt\n7bBuHVxxxaAK6dpr4Z13zGwgKlVVwbOUX/0Kzj47+vGiYBPvWSxFIKobaLkS1T012yBbU2O2e/BB\nOProZM55991mBpBPKvKuLhgzJtnfwenkX3wRFiyAjg7/bZ3MrfvtN/Ta4rrmVlcHC5Pqati61Skp\nWn6urBaLJUNSXjzDQdTqZ17b9fSYxck/5NfxZnvdnHii/zlPO23oOZ18SVG8dRx1lZ+giysYsgVY\nXV3w9lVVMH26EQ5uIRImGGprB2dhVVVm9jZ/vv/2FRXJP1NWOFgsRSApL57hIKpgy1cA+qXB8MM5\nVvboO6qaLqlCSF7C0OnwUyljNxAxHkvuuhDZgiGMpia4/nqjjnLaCvCDH/jf756e5J8pKxwsliIQ\nxQ20XIkq2PIRgEGzEj/a240K5+tfz98lNYlazEHCsKoKbrrJqNUcAXTiieY1TDA4eZzctppDDhmc\nWd15p7kHQTaHhgbYZZf3ue22kEROMbDCwWIpAkHJ3uJ48QwHUQVb2HZjxxoPLbcKKMz904uGBtiw\nwX+/ri744hdNtHYxg8LChKFjeHZYsiTatTpm3/Z2WLTIvN9rr8EZUn19uFdUR8cWzjtvDNAdfsKI\nWOFgsRSAX8Rq0rruUhJVsIVlO73gAujvH6oC+vzn40dCV1UZTym//bq64I47TLqKKGomr99MNTzy\nOO5sMJ+o7zlzTHW4rVsHvws+hgLdHHXUdzn99O8zbdo0piTlCpdE9r5iLZMmTSokOaHFUlSiZCx1\nspXOn59/ttLhIGo2Vr/t0mnvLKT19f7rnKWuLvecra3h+7kzovrdZ6/2Njaa17Br3bTJtD/KOTs6\nVOfOVa2piZdJFlSrquJsP6DTp/cPaScJZWW1MweLJQ+ievQkoeseDqIacb226+kx3jVe9PeH12YW\nMbEO7piKD3842GjtJtsY7swU1qyBH/94qA3Aaxbg9Ts6RvTsttfXGzuDMxvs7DTurddcY9obP2Gg\n0tenRI9PFg46yCdSsECscLBY8mAku6pGJapgy95u/nx/VUh3t+lMa2sH8xBlU1FhBINzTL+O2Q+3\nMTw7DiMOQZln3bz6qvEwcs7ltx2YuAfj7quAX6fehymPE4158yJvGotE0meIyGsi8qyIPCUiOVFr\nYvhXEfmriDwjIh9P4rwWS7EIS3sxkl1Vi01QviUwnaNfWgwYev/cMzSvam9eOPp/9775JPSLknm2\nogLuv997JplNZeUAhx32NMce+2WMrcCbVKo6VjtvvjnW5pFJMrfSp1X1MPWOzDsR2C+zzAZ+luB5\nLZZEWbVqaEK0OXNMlO2qVYPbJJVwLmnChFopcj1FTaJXW+v9vfv+5ePd5BjN89nXqx2Fpi536O8X\nHn/8Hl544X856qj/wAgIZzEC89ZbhbPOiqcmmj8fHn881i7RSMJwAbwGjA5YvxCY5fr8IjAm7LjW\nIG0pNR0dwaUfHaNjWInI4TA8hxmRoxqZk2pLUFlPEf/1IqorVpjjtLSYz1ENtKNGmbKira2qxxwT\n3xjs9TsGGcPTabP+6qv7VGQg1Hh89NEd2+/Rxo3GaH3sseb1/ffN962tqpWV8dpaXz+gnYt/o9rS\noqPhFU2iX0/kIPAq8BTwBDDbY/29wNGuz/+DKRPqdSxbQ9oybETpCByS7GwLrcEcJtSC6hnnI8yi\ntDeo7nM6rTpvnn/HH6Vjzl5SKdVvf9t4FOXjJZS9NDSY65s3z99LKZXq0M985hStrZ2t0BF6zOnT\nw+9tZ6f/+fyWGrq0tfafVEX0cOjXMhIOe2de9wCeBqZmrY8sHNyLnTlYSk3QSFXEuKS6ScJVNQkh\nEybU5s6NLvS8cAuDlpbc9tbXq86ZM/QehM2uFi82HXBQm4KO4fX7RO1Mq6ujbyuiWlc3oDCg1dXd\nCv0KWxU2KkzWCRM+r9XV7QphMwfVmTOj/Z4rVsQTDkK/zudyVdBJoJpAv55UJbj1mdf3RGQpcASw\n3LXJemCc6/PYzHcWS1kRN9CpUFfVqC6xYYTpxdesyd+AHubx43y3cCH8+tdDA9GCAgEfeMA/tURb\nm0mZkR1MGGRYNuPOYNJpc/6LLoIf/SiaoVoVOjuNHaC3t4c99vgPDjusnrPP/gBHHXU/H/vY7pHT\nbz/00GAVtyCOPhqWL4fPftYY8Pv7g7dvoJ0JJOwFUah0ARqARtf7PwAnZG1zMvA7jO/WJ4CVUY5t\nZw6WUlNqW8Lixaq1td7nq60166NQrJlDkLoqaHHfK7/ZVZjKqKZmcDayaZN5vfLK+CoXZzn4YDNy\nX7zY6Pfzua50un/IvQr6/eLea7/779y7efMC7jcbtZPaRGcOSQiHCRhV0tPA88Dlme/PBc7NvBfg\np8Ba4FkiqJTUCgfLMJGvmieu3SDMaAtmfRT1UphQC+oMg4ReHJ2/e2loCO8Eo6qMGhrCDetR1ES1\ntYPbjxo1oBdd9LTW1rarSJdGUQnBUNVilN8vaP98WL7csakMqDCgabZqExt1JZO3n6RshEMxFysc\nLMUkqDOPa0uIK1DijMijzliK4a0U11vIvcyZE63NUWcCo0blzkbmzDH7xxm9Dy4btaJiN504sUUr\nK7sj7eOM/POdUcWdOXjRsXyVttZ/RedXXqmtzNo+Y7DCwWJJgKQ9jeKOyuOMyP06lI4Oo9aYOXNQ\nXeKoX/yEWlyh19rqbzQOW+rrowm1IHVJ9tLSEu2+D128ZwV1db16yy2mge7nIYqgzndGVbBqMsJF\nl5VB2mIZSSRlBHaImkrDnQ30mWeiZ+z0MhivWgXHHmuKyzvccYcxuD7yiL+RPK4BvbkZvvrV6Nu7\niVqd7MADTbujGIevuQYuvdT8PoUGuXV2VrF+vekCs3NEqcJ11/ln1A3OuKo539TTSU1THcuWSWEZ\neQu96BhY4WDZ6Ug6L1KUCNrs6mc1NaYDikK2l5Qj3NyCwaGtDY47zhSwTyIteCplaiUsXBh/36hp\nRIJSf3tx551w7LHvctddf6Ot7SDCEz34Rxy7f4NswXnJJf6JB41Xm9LWlnvsWro4i1v5IO+ygdHs\nzgb2r32D5uuPJzXljOgX6kVYHvDKSgb6+yOmKAzGCgfLTkfSeZGiFL3Jnqn4JZ3zIrs40NKlwWkv\nurqSTfy399757VddbWZhYa6bcdxVu7oG+OY3/5UNGy4EZmGSLzTm10Dguh8rl467ndSbL+cUchgi\nLDo74c7Bgg/Ne36Ir7cfDDTlHLOOTm7kQlLuwjs9AusSyKkS9LBBuM9rDJLMrWSxjAiSzosUlEvI\n+T5IExDUcabTucWB1q4NFg7d3ckl/uvsNGmu86GnB37xi9y8VF44ap2FC+GYY8BLNWNoZ+zYXn70\nox+xfPmFNDWl82tchr4tbdx57jL/JFrgmWwrddynWKbH0cQm0mxF6CfNVprYxDI+O1QwwNAHKyjB\nVVjyqwiJqyqS6teTMFwUa7EGaUsxCHKhrK8fzHEThyADd5jHz4wZ/i6RXgbM1tZgF8ra2lwDdr7p\nOeIYXoNSVvgZYjs6VH/96wG98MJNOnv2o/qP//gVHT9+f4VNPscZGHIc577nmy7DHVns2dgQA3AH\nKW1lls7nck/PoZxjBj0o+VRYKqJBetgFQNBihcPwUWiun3LH+X/V1eUKh0LzI8UJ9nIC1PzcOevr\nczv6sPgAt8un+1rz8cyK6spaU6P6mc/4d9Juj6uBgQF96aWX9DvfWao1NW0qsk2dlBQi7+v06Zfq\nRRf9RhsbezWdHghts+O1lY+AqKFTW5nl39i4bklORkGvmx3m1jZqVLBg8XrYPLILWuFgKRqlzN5Z\nLKIIt40bo5d9LISwALVvfzu4v7nqqtxjrlxpylt6dcLZZUr9zp1KmU416Dqj9o2jRgUn0hMZ0FNP\n/bOeccYZOmbMGIWUmtxE/vfe1+3W58ddudK0w4l5qK01wj9ollXPttzRvjtSLW6gRzptbqrfKMHv\ngaup8Q/WCAqO8PiBrHCwFIV8fPaHCz8BEFW4xcnAWih+bWppUZ00Kbi/mT3b//qz4xy8VFBBnXtV\nlemvnBTZ2USJYnYEUvC5tijM0r322ktnzZqlX/7yw1pf3xf/3of8uNkCJSgyXOjXFRwZ3ICWluiC\nwfmTOMEm2Q9mnIAOP2EV4QeywsFSFErZYRaCXx+xfHl04RY3A2uhOJ35jBmqBx1kOuYokb0zZuR/\nzqgDX3cNhWyy73UqZdJVHHOMuZ729n79v//7P7322pu0unqb5/Hr67v0uef+qgMDA6Ht8r33eY5c\nnPY31PUp9GsNXVrPNm/B4D5WR4e/qsdpaEPD0Afw1lv9hdfcucE/QlVVfn+8rB8oqZTd1pXVMoSR\nUP4yKIjthBNM8JUX2TEM48aZf58XqsYFNUmeew7OP3+oF2KUeKbKyvzPGeb56KBqMoBu3JjrPZUd\nILbPPgNMmPAUf/rToyxd+hgXXbSC999/H4C99nqYDRtaqaiopru7OhM8JixbVsvBB0+M1C5fj7Gl\nS/FNfxoQoDJlCry1tpOl477BK+zJBF6hmTtzPYqclK2Oe9iSJcGuoVdcYSL4nECIE080r37RlXPn\n+h8L/B/cbF9mzwsc/IHenDfv9eATRcMKB8sQ8vrTlpiwILaeHu91wyncOjtNcFo+tYxPPjn/8zY3\nw7nnRtu2t9e7f+3t7eWZZ55k3brH+OMfH+Paax9n69atAHz4wx+mubmZadOmMW3aNMaPH789Etwr\neMzdLr/AN9++cPnyvEcudQ8s5cyK2wCPHyCVglNOgRkzhjY2LOCspmbozVqyJPjB3LzZBE/4FcOu\nqDACyrme7LDsIFxBGRvmzdsUvHE0ChYOIjIO+CWwJ6DAIlW9MWub6cBdmIpxAHeqakuh57YkT15/\n2hKzdq1/J9vTY/6zXgIiW7i9+Wbwedati9cud3qMrHgqFiyALVviHQ9MX3FGAUG1jm4iCr29pn/t\n6elh9erVPPbYYzz66KP8/ve/pz3TSR544IHMmjVruzDYa6+9co4TJUVHduBbaF/Y2WmCJvyorw8e\nuQR19N3d8LGP5TZ64kRzXK/90unc84VNu0ePNoWz/YRDVxc0NcH115uHz0+ylogkZg59wMWq+qSI\nNAJPiMhDqvqXrO1WqOopCZzP4iKoQ8qHoD/t3XebkWVS58qXceOC11dVeQuHgQFTQGbJEtP2iRP9\nc/p4/ff96Ow0nf8115jP3d3mnjlFbw45ZHBdHOrqTJ6kQu7x0qXRtxXp47bbfsQ113yPzkwHdvDB\nB3PWWWcxffp0pk6dyp577jm4gxOwlecDka2uCuwLb789OPJPNXjkks+UePx4/2pEXiOliRNN4706\n/1QKDjjAPBDTp/sft6/PjG6uuML/WkpEwcJBVd8G3s683yYiLwB7A9nCwZIw2RW6amvhnHPgO98Z\nTE7mJkyQuNdff7357tVXYcMG0+GdcAKImOfa3flNmVK6a47CxRfDTTcNCrdUarBfmT9/sO333BMc\n2Rw2S3KEwve/n5sOw+mDpk6FL30p+ujdQQS+9a3C722YZsSNaiVwP1/96leZNm0aU6dOZfTo0d4b\nZyeLcm7q3XebKVlEgZEzy+jshCUeD+l99wU3/hOfCBZM48f73wivH7uzE0491f+HO/98uPbaoW08\n8UR/AdbVBSedZGYGF19sHkQvysWwB8l6KwH7AG8AH8j6fjqwCXgGUxHu4IBjzAZWA6vHjx/vb6Hf\nyQnL3JvtuplPrv/GRvMa5Ao5HO6tUTxdHJfGoMphTU3GQyefmA7Hpz6KJ2JlZbTtojioxAlO7Ogw\nDjI1NdEK2dRVdWvr3MfDf9Cwh89xwYobIBP0kM6cGdz4oOLMQQEtfm5aYT7A7spBThuD4hjc0YxF\ndgkEVmsS/XkSBzHtIQ08ATR7rPsAkM68Pwl4OcoxrSurP1GCk9weeUEegJs25Ve4JO6znFTUdZz/\nVpRt49Y2rEixAAAgAElEQVQ4yLfQS5zFS+hGjd/YunWr3nDD77W2tl0rKqIVvgdVoU/n13w3vEOP\nGzUcZQQR9pAuWhR8Dr96qmHVhPxK1sUNfmtqMiORKP65SdeizfpjVcATGqF/DVsS8VYSkWrgt0Cr\nqt7pMTvZ6np/v4jcLCKjVXVDEuffGYmiLnC8+5z3fttccUX+KeKjzoL9tBD5qKXiGM3DbIQvvRS/\nxkGSKfVra41Gor8/2Cgb5L573HED3HLLA/zxj//LY489xhNP/IWBgTeAeo8zauY1N9V0A+1M6FkD\nPZuDC1vE0VVBtDzoYS5o1dXBBqIzzsjVm554orkOP/0+mHVeD3BUH2B3GzdsiGbXiG2ND8Djj3Uo\nfCz6AfxJwltJgP8AXlDV63y2+SDwrqqqiByByRq4sdBzl5IgfX3SRuEoRHl2nY5bNbiDXLMm3n/d\nTRT31qSL68T5bwXdJ1WjNj755HgCKm7f6KamxngFudt7yCHm+XnxRdO/7LYbvPwyfPSjpo1Llwan\n6d6ypY0z/u7XnFH5n8weO5ZnD2/h35//AJ0e21fTg6D0kHvDq+ijGddowq9Dj9txRhlBBN3Utjbj\nvfPII8YfuKvLGHlqa4375+c/b4pO3HuvuWHd3UZgDAyYz0H4PcBxi0y0tRlvpKhGrFjWeB98/liV\nUEBkjItCpx7A0ZjhyDPAU5nlJOBc4NzMNucBzwNPA38Cjoxy7HJRKyWRSDFpoqY1aG2Nlvgtn5KH\nUWfBxVKxRlEHRblPcWfy+ZaIbGgw99qrvUE2n4aG/oxqyE891K9X13xXBzI7t9S0qNDvqzqaw83a\nxEZNs0WFPk2zJadIfdyUDZEexEJu6jHHmG2c1BRO8ejszIlJPsBeoeFBx2ppKW2H4HPPbPqMEpFk\nIsWkCTOKOm0IU3G+9Vb0Iu/uji6JzJ7FSFPhdZ+Cri+ugIrbN7rP45e2uhAbRpqtQzKLtjJL02z1\n2XaLtjIrPNV01JQNUfJ/RPkzdHaGW/ij5EiJs9TXhz/A7hHI4sWqH/hA+HXGNWLli88fywqHEhE0\noEml8kukmCQdHeYZSaXM4jdY8RvQOKlgog7AqqtVzznHjICvvDJ6kfrhztcUlPMsHwGVnVK/ttbc\n/3nz/K+1sdH7Xt1000ZNpXry7uOa2Dikc++kVpv8Mp5mbVtQh+4kiwoaUY8aFX3UHDXJXX19/lNd\n9zHyKdwR1MZSJx8r8szBps8IIUgVGhST09ZWGnflujqYNy+43i14qzi9UsEEkUrBhRfCjTcOpqmI\nYlgOUt8ODBj372ITVMQ+n7Qgzv28/Xbj2r9+vSmn+e675h/qhfb2cudtylGffpvHHnts+7J27Szg\nuzHOroCQTitVbZtzKo+l6GYZn+V4HqSPKtppoIF2qujzrlJWU2OWfFI2nH02HHyw0X339Aw1/lZX\nx7imDCL+N9AhKEdKNvUZo3xFRe71NeWW+Cyojdm2lWIbI+PaReKShIQp1jKSZw5gBhnlTFzdeSHx\nDsUorhOHTZvi124Ic71duTJmHRj69Equ0CNN76677LKLnn766fqFL9znm77aa6mlQ2fO6NXWuY9r\nZ62/KiZylbLvfKdwNUhQLMGoUWaGEebDnFTZuewf17FTJKHmiToNLpXtweM8h0GfJtD/JtqZJ72U\ng3AI09dHUUGWC9mdXVBxFuf/J2Je6+uDBaGfu7ibUhXXycb5/2Sfu64u9//q3CPH3un33w7L5uzZ\nd2T0/QOgL91yi/b396tqfBtGE5u0c/Fv4vviey11dcmoQsI6dkf15OjfWlpyf/CoN6KhIZqRrK4u\n+c44SoxCqYuiZNk4kopzKPgAxVzKQTiohtcH9u0Myqj+wfLl5v/kDLjS6WBnj1TKbOukmPdLNe9e\n3Hp7r1H3cNgegv6n2Wrn7Xn/G/yv0flvt7ZGs8UO7dRd+v66uiGj6ZUruoY+YzVd2sgWTbPZ26vo\nmGOM4SeosZEalVBnlY+g8hpJR/0RVqwI9x7yKqGXBGGzgmE2spFQhLS1OUQgyCX5gQeiqyCHixUr\nYNq0oW10dO+SGwsF5NpTwoK+amoG9fZOXE5Xl1lSKaMa/fznk6kVEUeVG5SvraIC7r/fuPJ7uYx7\n0den3HDDmzz++Ft0dzshO14oNXTTS7W3vr+zE772te3++lM4h7cuvZKlEy7mlXU1THh7Nc2/OB1t\nb2cpM3iFCUPrEDwCrFzpH+AVprvPrl1QKHFjH8Dc7OxAF/efbfly+NWvvO0FU6bAz35m7qHXD5xO\nw/77F35dXoTFKIyEoihRSELCFGspl5lDEMUeJBSacqKjI3gGXlc3VH3S0JBfofb6+sEZtVdtY2c2\nUui9iqPKXbkyeHDp9lKKru7uU7hcYZZCp+92qepenSs3h+v7sxfHuyeOrilORbKWluK4WBbi3xv0\nwwe5hSadhiIfymWK7IJyy61UjGUkCIeg5zNfbzmHJGxara3Bnb3IoEuqo2ePmyjOnbts8eLgbQux\nOcRR5UaJHcguFRxFK1Jb2abfn3ydvnHtTTpqlH/OoqZRA9pZt0u8G5l9Mc4DEKWD9Yquc2cfnDvX\nGJmK6Xef/cBG0bsVGugyXFGoQed2sjmWUmi5hNRoeEUT6H8T68iLsYwE4aBaHE+cpGxaYa7jNTWD\nyeeiDvwqK4caqt1JLWfMCN73mGPy/y/HTrgX4gHU1KS6dWuP/vGPf9QzzrhLKyvbw/ttNmknZgq0\nMv1pTdf35mzT2Ji5nhUrot3QoItZvDi8kw3qYEvdeWYHjYU9VKmUf9K8fM5ZTOGXfc6gP2i+6X7z\nwdaQLl+mTDFqxuwiNB0dZsknf1BYHjIn7U2Y/t1RBYelsr/zzujJ5M45x1xrtqp11SqT3iaIXXcN\nTynjd01xVLlr1/TS3uGXYkapqurjgAMuYsyYxZkqZ7VUVLzru32aNqrozdgNuqANpvC/vDdqHLcv\nfJ37HqoBTJ6mM87IXM+qWv/giiDcF/Pmm+E+/X6BGlGTWiXpj5+dxfDgg00+JL8yeF1dcMEFZrt8\ni1fEzZyYBGF/0DfeKDx3UhQ8fuMKf0NYLKxwSIgHHohe2D4KUTpCv0ynF11kths3ziR586uRLgIP\nPjjY8Ubpw2pqTBCcVyGh44/3r//ucPLJwf/loOytE8f10FArtHXlBldl948TN66kgUNpozFn21o6\nOazva7S3P8nZZ5+9vbDNGy/Vcvy0zfRpBe00UE87SgVf5FdMZYVnUfq6/jbOTt/B2f+VdUHODcmn\naLT7YqIYev0qE0UZYey3X3Lpcr2YMsUIIadUnpfxeMuW/EZQw0mUP2gphFaSKYKzSCpl9wnAjZhs\ngD9X1R9mrZfM+pOADuBsVX0yzjmGI/NpHOI6KEQd8ftF9I4d6z8ovPLK4LZWVprl/vvh6KPDr83N\nFVeYufOttw4W5zr5ZPMa9oyG1UMOHOge28ersh9VXU8Bu+TsWyV9NDdX0dHRwZ/+9CfG/+FyqvCu\nHlZHN/ddMp7drh1ak3jP/1nCW/XfZGn7cbneQX74eZ8U8qd1d/ZhUbBNTf4eR2GZTp9/3hw7qXS5\nfjhh/OPGwbnn5pbNg/xGUMNJPmVHi0EhKYJDSCJldyXwU+A4YB2wSkTu1qE1pE8E9sss/w/4WeY1\nEknWAigWUTpzp9wuwHXXBV9PWM0CyL/v6e83//mZMwdTRl/nmWw9lw9+EPbcE7ZtG/zujjvMjCJI\n+1FbG14POXCgu62T+zmKZRzvmRbi7q5T+dy0PvZ44gkm9vfzz+CfQqJuBrsdPif3JGvXUtexkTP5\nTaR7AeT+uI6kz/dPW109OPWD3PzkTj1YEf96sA5hs44FC8wowYskOuvsEdArr/g/JPm6eA7XqDFO\nUZFiMnGiSRFSDAFRqNEC+CTwoOvzt4FvZ22zEJjl+vwiMCbs2JMmTSp5sKEfYS6lQV5LjY3GQzFq\n5TaHsOC7QoNjnXMuXhw9a0E+56ytjWZzDMzeSp/O53JVvNNCbAPtqKjQzupqHQDNZPL3TyGRT2lI\nP+Oxn8tovsnhvIyX+Rhd83UvDTNyR8Hr4Q2KuszHxXM4PZXK4fyq5jnO+tOUTVZWYCZGleR8/iLw\nk6xt7gWOdn3+H2By2LEnTZoU22U4qVKUbqI+A37bRe0jgq4nu0/It6aA1zlnzkxG0Pgto0ZF+x0C\nf+tM6gm/kwzEbZTXyCJOZ1pZaXKn+AV1hOUbybeNcQnLVx7nYYxK0IjO70GLe63lNmospaeU+9we\n92CHzcoqIrOB2QDjx4+PpcsvhvopisOH6uDM9vrrzbp164zasacHzj8/2rna2001sGwNhdfMPqmE\njM69jRvcGoeLL442029uhrlzB/Bytqh0VynzwCfQ25/Nm00ZuHnzBr9LpUyK1exwci8qKoL1el6R\ny6mUOa5IcEpfhyRUO1OmmB9g/vx4+xWiGgnSD9bVmVevqOdiuPMVm+HwlHIoojEakjFIrwfcTpxj\nM9/F3QYAVV0ELAKYPHmyRrX7JF2K0iGoPGN3t+lfsu0H7gj/+fOjqwPr6owa2P2/8RNuXqUyw/oz\nLxoajEH54Yfj7xsFEZC+XljyX5564ffee4/ly5dvT1+9ZUst8CBQDTRQU9NLTU0Fy/TvSLUHGIbz\noaXFGEm3+55iXEej6HB7e8Nds7IZGDCjhbo6c2Pq6oLrG8fVw/vp34PylQeltM5Xdx80ouvsNAL5\ngAMKc/HcUVJUFEIRjdEABU89MALmFWBfoAZTCvTgrG1OBn6HGeB9AlgZ5diTJk2KHCFfrIj1c84J\nnn37aQ7cCdoK1ecHzZKzE+rlq7nILl7jv/hHBXve+/o+ba3/ynZdW39Dg3Y3NOgPmpv1Ix/5iIJJ\nX11fX6/HHXecfu9739OHH/693nprz+BMfcWqZHRoXksqNVRHmJQxJ8pSXa06ebL/Q5RKGZ1fFHVF\nkO4z7E+UZEpr1dKkjyiHClLDzUgoE4pxUX0JWAtcnvnOXUNaMB5Na4FniWBvUB2MkI6i8y9GKcqO\njvBO1y9bqfN8hqmxC7HVBaXATqVUFy0yhuA5c7xzDKXT3jbPmTP9r7uWDk3RrhBW1zjT9/C+Z16h\nTaCnHnecXtvSoi9dfbX2XXWVd8dUaA3NqEs+0jyJxcmLHrRNUJrrsHvkJf2LbTwtRc6jcsirNNz4\n3IOyEg7FWtzpM8LsPvkOJIIM2GF5iYIWt0Dy+l+OGjU0B1pQbQUv4Ra1LnJov7Ep9waEeQ1dyVW6\nmLN0JrfrzOo7dfGibl2x6Hlt4n1Ns3VIeunH3EXrXctAOm3OGdZhFdpZT/Y+v+8NK8TDp5DFuf4w\nQeHVoUd9+EtpPC2FMCoHb6Hhxhb7CSefgUTYsxWWl6iyMnoN6SSFW5TBtNCv82c+pa2Lu/2Pm6Xy\ncW5Aa8tfI3sN9dfX62P/9E/aUVXl6TYa6EXklzLV/YMVouZpaDBTpyhVebKleT4ePoUsDQ0mOd7M\nmeFeTtkPdDGmzUlQCmE0nN5C5YIt9hNOnIFElJl4a2twzZH6ev9+xzlGVNfaOMItymA6zRZtrf0n\nbUl9T0W8VT/u2AH30jlqT21q8t4nu0D9AOj/kIcraU2N0bl7Nt4lDaNUGAsz1qxcaX6ooIpF2cEY\nmzb5t899DUkKkfnzowlDzyyDO7n+3bIdEkrZnUiCpnLBqcGxcKFxRFm40HgpebmxRvGEa272r5Hu\n5CW65x7j8FFj8q7R0DCY0eDZZ2GvvWDOHLjqKvM6Zoxxuc3G8T5qajKOJSLm1Ss7QhQnhSr6aO5e\nwsSu52lQbx/VBjqYQK5XR6q/nWUXPUhTk1Jf348wQJqtNLEpp0C9ANP9kkoF0dPj7+3j9jZpbh4M\nCc9paMoUfFm+PPzGifhHA4NxPbvwwsEfJ5Ua/FG9qK+H118P3iaVMg/iihWmPbW1/tuCibR23POC\nyPbGCbpHpYzWtexYJCFhirUUM2V31Jl4dtVCd5rqbA8f97qw8pRODQUv+2vgLLmjQ1vnPq7pmi6f\ngeWA1rPNlJIE7aRWm9gUaRbgLP2gSw46SEePHqcwSyu5XG8JKlgTVgCiunpoNaG4I+MoU0K/GxfX\noB3VMN3Skts2ZwbiZTzu6FCdPTv43IsXR7N5eM0GrP7dkgFbJrQwguInqqvNjKOry78ioKqZFbjj\nKnp6zHLqqSYYzm9m0tFhBpTpdG4cQ2BMTSbKr7m3kq/3vAzkjkTraWc9e9PEVgBSdLtyDFUOzTGU\nNQtwaAN63niDf/vwh9ltRprxF5zJvjfeiCzyiTPwS/vqcOKJZlQ8ejRs2AC//GW8LKNhZRnB/8bF\nDRRypo1h0zOnvmqUtjntGzvWv3yniImcdKaQQWmuvWYDQe0o96yVlvIkCQlTrKUYMwdngDlvXrC6\nuKEheOAVpuaNk5Iikudd1gh4JZO1iY3exeezT5BKaUfNKO8cQx7LgHMR2bmCCilm7xxnzpzgG5NK\nJTvajWvQdqaNxdDjBx2ztnZoTENHh2l7KjVoV8lnNmBnFDsd2DKh8cn+n9TVDb769RX19UOrKzrC\n5ZhjgvuXmTPzyKkU5lebdUDfpHJZS29FhfYHNGC7MTlI5eNV6s69OMWog4o2OzfU78ZUV6t+7nPJ\nep3EdYWN4tLqDhyLk8QratCLu/MuxBunXPIPWUqKFQ4xCfqf1NSExzOk02ZpbIzgKZSOViFxyGB1\nzpvhfrUxRsADrqWLCN5Exx5rXCmDRsthEtExpAS5YzY0hHv4JDm6jRu34O40/Ubdt96a/2g8aih6\nEp239WLaKUlKOOxQ3kpBhKmewyoxtrWZZdu28AR1VVUmXc+yZdDU2EeabcAAoJ7bNzQoE371XWPA\naGszf9+2NvP5+OON8SOKF4sLcS21hCSmS6fhy182xRqC8tXsuqvZ1vsiTL6cM8+EQw/1v6EdHfDF\nLw71Lsom+9oLwc8NLJ2GxsZgDye3+9u8efClL8HXvgZz5wb/VkG4jzlzpr/u37F9FILNP2QpgJ1G\nOAT9T3p6gj0Sw9GMu+c2mhr7tvcvUw7p5K2KcSxkDlfSQj3eDaga6KVZlnof2uVXq37uijmtiYlj\n4AwSQE6Gviguk2HHmTp1sIP8u7/zd/FMooMEbx/nv/0N3n033O+5rs6U0vzXfzWG9B/8wD9ZXtT2\nOsbzQw/1rooGyXTeYb9DqaqVWUYmSUw/irUkqVYKmmFH0XQEL/16LA8Zvf+oPX2zAeYYkVPdRhsx\n5999VUYDIrps6lQ98sgj9ROVlboRdAvG3TR24JmI0es7+Xyy1SGbNvnfiDg5euJE9CUZ3VuMYh5x\nXGHjtrfYah+bf2inhJ3FlTUpL7yg+gfV1SaY7dRT80t/naaNL3OLKS/Znx7MJZ81XZnCat5ib5Yy\nw9Qo7nuT5nvOJ/VGPbR6+9VuU+UXK1bQf8QRTLv4YlZ98pN8auNGKt54w+T3Dkr5nE1DA9x0k5km\nZbs7rlgBJ5yQqw5yIvy2T4cyo/Dbbx9aRPqjHx3cx1HlHHvs0HqiYG7ws88OjtCTqsVbrFqycVxh\n447Gi11q0iuvexIpuS07B4VIFuBaYA3wDLAUaPLZ7jVMNtaniCHVDjxwUqJeeGGD3o6NJsDsqqmP\naH2lX5CZxyDMHUzmHj1G8JTpSaX0O3Pn6hafQLKedFq3/e1vuRfj5+o4apR/dbKmJmMpzx5ZP/SQ\nfxvr61Xffz/ejXTaF5ZbRLXw0W1Hh7mmKHmasveLMsuI4wiQz2i8FK6mNv/QTkWcPjZoKVQ4HA9U\nZd7/C/AvPtu9BoyOe/zKyknR/4MR/+y+/xPnT5px1xxUAQ1mGU2zWRvZEhxb4FYHdHb6d9SZpR/0\nnMpKnX344dqRSmlvXZ0OhHUSYVG5Xh2O42aV3QktXBjc4cXJ+pdvgY18O0hnv6BEdYVGE0dKZFVg\nh247b0uClIVwGHIgmAG0+qzLSziAt3BIp3q0dXH34N0odPTl0+F5xRGExha4Osh3Xn1Ve0L8/gdA\ne5ubB9sR1klE7Zzdx4qamdRrydajR+3049oS4naQUW0B7nPlM8sImtm486DkE/dgsRSBpIRDkjaH\nLwO3+2mvgIdFpB9YqKYUqCfuGtIwyXObtq4KXvnq9+DVjEn2xz8eqnuPWx/UR69cR5exI2Th9R01\nNQzU1fHQxRez9IILeOyxxzh8zRoWYgpe+iFAlZO4Lko92qi1c93HWrIkPMWFH9XVQ/XoUd0j49oS\n4tbijWoLcM7l2CQ6O/09hLxqD4fp7adMMceeMCF5e4fFMoyECgcReRj4oMeqy1X1rsw2lwN9QKvP\nYY5W1fUisgfwkIisUdXlXhuqq4a0yGT12qaWbib0vQgtHp20m6iFxgusxdpbUcHtqRRf2bKF7nnz\naGxs5Oijj+Zr48aRfvhhI8CC+PSno58sH9/1Qq4v2zAatdMvtrE16jVVVZncThMmDE2E5YXf/QvL\nW1SM4uUWyzATKhxU9TNB60XkbOAU4NjMlMbrGOszr++JyFLgCMBTOERBgGYi+JNH9RUP6vAi0D4w\nwH2f/CS/3W03Dmts5INHHUXl3/+9EUyPP246kCC+/nVz/rPO8l7vdtl655343j2FXN+DDw7t3KJ2\n+kl4ygS5qoVdUyo12Ibf/S7eLMOLfBL7RR2cWCzlSCE6KeAE4C/A7gHbNACNrvd/AE6Idnwvm0O/\ntngUqPFd3AVc/IiQYsEvpmAAtP+rXzU6fccwWltrPj/8cHRPF5FcryDVXHtKWP4jL1130PUFte++\n+7z16HGrKuVjbA07R9A1pVKD6a9Vo3sc5eNtVK5V2Cw7LZSDQRr4K/AmxkX1KeDfMt/vBdyfeT8B\neDqzPI9RR0U8fq5waGJTYEbRvISDqg78+c/a94EPaE919ZC8RG2gnRCYuM43MVNtbXCyuuxl7tyh\nXldBCZrcNYejGOCj5glyilIsWhTi91tED5uoRveoQipKNbl8vY1s/iJLmZGUcBBzrPKkqmqy1tWt\nor1tYEgNgimsjn6Q+fPhiityvlZV/vKXv/DYY49tX7a8+y4zgEkNDRwyZgy77r8/Y6ZNY+/dd0e+\n8Q1vFUZtrb+BMy5TpsDLLw+qYWpq/I/tqKHGjPGvIZCNo6bxy/fvfB+ko29qKr4efckSUzbPT2XU\n0mJyHQVdk5uuLnOfvK7HqSZ3xhn5XVPQsUtxryyWLETkCVWdXPCBkpAwxVoOP3ySGZx+5TVt5cx4\nM4askVt/f78+9dRTeuONN2pzc7OOHj1aAQV07Nix+oUvfEEXLVqkL774og4MDAwVxUEqjLA6w0F1\ni6POQEqtshju0XCYGiiVKq9gM1szwVJGUIaurIlTUZGx5Z35IfjSuXDCf0NfzdAUD36VtYA+EX76\n5ps8cvrprFixgvfffx+AD33oQ5x00klMmzaN6dOns++++yJe2UEdgoyrH/84PPKI/75Bx/W64KgU\nM3FaqbN5Zhuex40zMzK/DKeq8Q29USu25UO+x7YV2ixlTFkLhyF86lPw5ptGnbBmDXz4wzB5ssms\n2dODLliA9vVBby9dFRX0DAxw3LZtrL7sMiZOnMiMGTOYNm0a06ZN40Mf+lD88/t1ALfdFiwcLrvM\n5DOKoiqKk566mIXjk8p3FAWvnEiVlTAw4L9Pd3d+AipuLEUxj12sXFAWS0KUtc1h8uTJunp1xr6Q\n9WfShgb6Rfjd8ccz/a67oK+PBqAX6BfhppNPZtysWUydOpWxY8cWr5Hvv2+KSXt17I2N8N57ZqTr\nCJW334Zf/CJ+3EEqZTrF7ACsYlAqPXpnZ24hbocgW046bVJsj1QX0aDrtnYKS4EkZXMYGfUcOjtR\nJ9AoU2BF2tqo2raNk3/7Wxr7+mjEXEwtUK/KPz/+OGf29DB28WJj4Cy0aIwXTmRstuqoutpkMz3z\nTKP+EDHvr7jCFNSJk0kVYNQoU2jm0582r6++WtzRpV+BnOxiOIUSFCNQVWVG437rijVrKgVRYiMs\nlmGmrNVK27Zt4+qrr6bqP/+Tb27eTKPHNr4a/c2bjceLU8nnq1+Fu+4yI3kvHW9c/a9XZKxDb685\n56JF0No6VF0QJyDN6Zj7+42Aa2+HlSvN+6CZQxK67GLq6B2CbBsdHTB7tkkNvqOlm7YV2iwjgSSs\n2sVaABUR/emYMcFxBnGWurpcj5J8vE3iFq6vr1edNy96celUysQaREl77WYkec5E8YraETOWDrc3\nmGWHhp0hzmG//fbTVatW0XT//cF+74XQ1GT+llu2eK/z0//Onw9XXeXrKeWLU4NZxMww/NRd6bRR\nIf3yl97X7aV3H2m67CDbxqhRcMMNxglhR/PksbERliKyU9gcRjU2GsGwZo1/wfpC6ery76CD9L9B\n9XmDaGszS0WFqbzmR3u7ue446oeRpsv2s22k08Zb6fzzjQCeM8d0pqtWJXv+zk6jops/v3h2KS9K\nZdOxWAqgrG0OPPOM6Rja2417YzEI6hCC9L9BCeii0N9vRvnptL/L6IEHGhtDVJfSkajLzrZtjB0L\nF15Y/Cynw+1KWgqbjsVSAOUtHPr7BzuGqHV845JKGdWQl9tkkE+/V2Bcfb0xpEZRNbW3w+67GwOr\nF1VV8P3vmxGt3/psj51SxickSXbtiWJnOS2XNNvFjLuwWAqkILWSiFwtIutF5KnMcpLPdieIyIsi\n8lcRuayQcyaOk9rZCz+XSUcd8cADcP31JsitpcV4Jy1fPlRd4EdDA+y/f7B6obYWLrrItK+21uwX\npH5obg4WNiPB/bMUs5+Rpn6zWIaBJGYO16vqAr+VIlIJ/BQ4DlgHrBKRu1X1L3mfsbJyaGWz2lpj\nkwhKuVBXZ9alUubVcYu84QY477zc7RsbvTtgL3VEdlCaoy546SW49lrvuAans06lvNULzz5r1E59\nfWZWU1trtv3Wt+DSS70FWhI1FIabUsx+RqL6zWIpMQV5K4nI1UBbiHD4JHC1qn428/nbAKr6g7Dj\nT98o/jgAAArcSURBVBZRz/yr1dVGQJx8Mhx8MBxwAEyfDvvt590RO5k3Tz/dFH5xOuGgfUaNMoV1\n3B1qPt5AUYRJNoV6HUXJVFps8o21KIUnT1DW15EefW3Z6SmLrKzA1cDrwDPALcAuHtvMBH7u+vxF\n4CdRjj8pLBagqWmwsPvMmYPFdqL4jq9caWIPImR03U6+/ulxffVHuh98obEWxY7VCMqym0/BH4ul\njKBUWVmDakgDPwPmA5p5/THw5UKElYjMBmYDTArbuKfHeLdUVATHQLS1wYsvDn52DJJBaSy81Av5\nqiPiGh5HstojCWNvsT15dgT1m8VSZAquIe0gIv8O3Ouxaj0wzvV5bOY7v/MtAhaBUSsFnjROjqIF\nC+CUU0zHE2SQdPDSb5fKG6iQ84Spc4qdJjqpmsrF9uSxrqQWSzCFTDuAMa73FwK3eWxTBbwC7AvU\nYMqFHhzl+KFqpbiLozKIUlPYS71QKnVEvucJU8eUIrWGralssQwrJKRWKjRC+kci8qyIPAN8OiMg\nEJG9ROT+jPDpA84DHgReAP5TVZ8v8Lz54Yxcw6Kb6+u91QuFRrZGjcjN5zxudU4mcy1tbebz8ceb\n1OJB65OKDg66t+Uca2GxWIZQ1rmVfL2VHKqq4gXHiZh4hEsu8feIqa+H9etNR+xHPt5AfkVtLr7Y\nrA9SAUU5T5gHTtw8Tfli8wZZLMNKUt5K5R0hHYSTfyeOcGhoMJ3Ttdea4LLrrvM2SAYJBoivDw8y\n0l55pRFaXukb4pwnzIgdN09Tvlhjr8WyQzByhUNVFdxzD5x66mAnlEqZjtiP9nZTha2jY3Dk/q1v\nmc65mAbJMAO4o+KB/NM3hBmx4+ZpKgRr7LVYRjzlrVaqqtLVNTVDO/z6elNIxxlhO6qXF180Hklx\nq6wlXfbSyxMoTnrvfFU8YeqcV1+Fffe16h6LZQdn51ArHXqosQ+8+CJs2GAS1e2//9BRqKN6WbLE\nxDt4UV1t1nkl10sqmVtQls841d/yVfGoGlXZNdeYz9n1ph1jtlX3WCyWCJS3cKioSEbn3tvrv18S\n+vawwK9XX/VPiJdNPioet2Dq6jJ5mGprc/MwWXWPxWKJSHkLhzgEjc7zTcsdlbDAr/vvHzpqD5pB\nxM2e6iWYnOu87jojHNzYNNEWiyUCZV0JLhZB6arzScsdhyjpLpxR+8KFxgbR0pJMJTCbftpisRSB\nHWfmEOZCCcXTt0dNd5E9ar/kksJVPCM5D5PFYilbdgzh4PYSuv568926dbkdbrH07UElQ4NmJkmo\neEZq9TeLxVLWlLcr6+TJunp1YIx0fvUSisFwtcNGJFssFhc7hytrGOVSCxiGzxPIRiRbLJYiMLKF\nQ1LpoZNiuDyBrIuqxWJJmIKEg4jcDhyQ+dgEbFbVwzy2ew3YBvQDfUlMeQBrjHVjXVQtFkuCFCQc\nVPXzznsR+TGwJWDzT6vqhkLOl4M1xlosFktRSCTOQUQE+AfgN0kcLzJBsQ1JxC+4iVqLwWKxWHYA\nkrI5fAp4V1Vf9lmvwMMi0g8sVFMKtHBKZYwNyptUSo8oi8ViKRGhwkFEHgY+6LHqclW9K/N+FsGz\nhqNVdb2I7AE8JCJrVHW5z/lmA7MBxo8fH9a84htjy8kjymKxWEpEqHBQ1c8ErReRKqAZmBRwjPWZ\n1/dEZClwBOApHDKzikVg4hzC2gcU1xhbbh5RFovFUgKSsDl8Blijquu8VopIg4g0Ou+B44HnEjhv\nabAeURaLZSckCeFwBlkqJRHZS0Tuz3zcE3hcRJ4GVgL3qeoDCZy3NDgeUV5YjyiLxbKDMvLTZxQb\nm57CYrGMIJJKn7HjpOwuFo5HVBLptS0Wi2WEMLLTZ5QKm57CYrHsZFjhEBWbnsJisexEWLWSxWKx\nWHKwwsFisVgsOZS1t5KIbANeHO52hDAaSDahYHGw7UwW285kse1MjgNUtbHQg5S7zeHFxNJ7FwkR\nWV3ubQTbzqSx7UwW287kEJFE/P+tWslisVgsOVjhYLFYLJYcyl04JJPau7iMhDaCbWfS2HYmi21n\nciTSxrI2SFssFotleCj3mYPFYrFYhoFhFQ4i8vci8ryIDIiIrweAiJwgIi+KyF9F5DLX97uKyEMi\n8nLmdZcitTP0PCJygIg85Vq2isgFmXVXi8h617qThqudme1eE5FnM21ZHXf/UrRTRMaJyP+KyF8y\nz8g3XeuKdj/9njXXehGRf82sf0ZEPh513ySJ0M4vZNr3rIj8QUQ+5lrn+fsPUzuni8gW1295ZdR9\nS9zOS11tfE5E+kVk18y6ktxPEblFRN4TEc+SB4k/m6o6bAvwEeAA4FFgss82lcBaYAJQAzwNHJRZ\n9yPgssz7y4B/KVI7Y50n0+Z3gA9lPl8NXFKC+xmpncBrwOhCr7OY7QTGAB/PvG8EXnL97kW5n0HP\nmmubk4DfAQJ8Avhz1H1L3M4jgV0y70902hn0+w9TO6cD9+azbynbmbX9qcAjw3A/pwIfB57zWZ/o\nszmsMwdVfUFVw4LcjgD+qqqvqGoPcBtwembd6cAvMu9/AXyuOC2NfZ5jgbWq+nqR2uNHofejbO6n\nqr6tqk9m3m8DXgD2LlJ7HIKeNYfTgV+q4U9Ak4iMibhvydqpqn9Q1fczH/8EjC1SW4Io5J6U1f3M\nIqwsclFQU1p5U8AmiT6bI8HmsDfwpuvzOgY7iT1V9e3M+3cwhYWKQdzz5BRAAs7PTPVuKZa6hujt\nVOBhEXlCTM3uuPuXqp0AiMg+wOHAn11fF+N+Bj1rYdtE2Tcp4p7rHMyI0sHv90+aqO08MvNb/k5E\nDo65bxJEPpeI1AMnAL91fV2q+xlGos9m0SOkReRh4IMeqy5X1buSOo+qqojk7XoV1M445xGRGuA0\n4Nuur38GzMc8RPOBHwNfHsZ2Hq2q60VkD+AhEVmTGZVE3b9U7URE0pg/4gWqujXzdWL3c0dHRD6N\nEQ5Hu74O/f1LyJPAeFVty9iO/hvYb5jaEoVTgd+rqnsEX073MzGKLhxU9TMFHmI9MM71eWzmO4B3\nRWSMqr6dmT69l+9JgtopInHOcyLwpKq+6zr29vci8u/AvcPZTlVdn3l9T0SWYqadyymz+yki1RjB\n0Kqqd7qOndj9zCLoWQvbpjrCvkkRpZ2IyKHAz4ETVXWj833A71/ydroEPqp6v4jcLCKjo+xbyna6\nyNEKlPB+hpHoszkS1EqrgP1EZN/MqPwM4O7MuruBszLvzwISm4lkEec8OfrITAfoMAPw9DZIgNB2\nikiDiDQ674HjXe0pm/spIgL8B/CCql6Xta5Y9zPoWXO3/UsZz5BPAFsyKrIo+yZF6LlEZDxwJ/BF\nVX3J9X3Q7z8c7fxg5rdGRI7A9Ekbo+xbynZm2jcKmIbreS3x/Qwj2Wez2Bb2oAXzx14HdAPvAg9m\nvt8LuN+13UkYb5W1GHWU8/1uwP8ALwMPA7sWqZ2e5/FoZwPmwR6Vtf+vgGeBZzI/ypjhaifGY+Hp\nzPJ8ud5PjBpEM/fsqcxyUrHvp9ezBpwLnJt5L8BPM+ufxeVl5/ecFukehrXz58D7rnu3Ouz3H6Z2\nnpdpx9MYw/mR5Xg/M5/PBm7L2q9k9xMz6Hwb6MX0m+cU89m0EdIWi8ViyWEkqJUsFovFUmKscLBY\nLBZLDlY4WCwWiyUHKxwsFovFkoMVDhaLxWLJwQoHi8ViseRghYPFYrFYcrDCwWKxWCw5/H/vyhPu\nJQ0Z5wAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aabb2650>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ru6tAqakAgCnGJGqMB/CVqgEgk7oYe\nZiJmEp04AUM0cc2YKGpgYjKRuIyJGZiMmKWNZlTGNYqOUTAboOMaUYMraCSAAr1v3++PU5e+XXXX\nqlvV1XDe57lPLXc799at853zraKqWCwWi8XipmKoG2CxWCyW8sMKB4vFYrHkYIWDxWKxWHKwwsFi\nsVgsOVjhYLFYLJYcrHCwWCwWSw5WOFgsERGRfUVERaRqqNtisRQbKxwsliIhIleJSI+ItLqW8a71\n+4rIb0WkXUTWishnsvY/Q0TeEJE2EfkfEdm99Fdh2VWxwsFiKS63q2ratbzqWvcr4P+APYDLgDtE\nZE8AETkEWAx8EdgbaAduKm3TLbsyVjhYhi0iMlpE7hSRv4nIayLydde6q0TkDhG5XUS2i8hTIvIx\n1/qPiMhjIvK+iLwgIqe61tWJyA8zo/atIvKEiNS5Tv0FEXlTRDaJyGV5tv0A4OPAlaraoap3As8C\nf+ecA7hXVVeoaiswH2gWkcZ8zmexxMUKB8uwREQqgHuBZ4B9gOnA+SLyWddmpwH/DewO3Ar8j4hU\ni0h1Zt/lwF7AeUCLiByY2W8RMBE4KrPvN4F+13GPAQ7MnPMKEflIQFNPEZEtGQH0Vdf3hwCvqup2\n13fPZL531j/jrFDVdUAXcEDAuSyWxLDCwTJcmQzsqaoLVLU7o675T+B01zZPquodqtoDXAukgE9k\nljTw/cy+jwL3AbMzQufLwDdUdYOq9qnq71W1y3Xc72RG+89gOvCP4c2vgY8AewJfwQiS2Zl1aWBr\n1vbbgMaI6y2WomK9LizDlQ8Bo0Xkfdd3lcBK1+e3nDeq2i8i64HRzjpVdc8G3sDMQEZihMi6gHP/\n1fW+HdOR56Cqf3Z9/L2I3ADMwtgaWoEPZO0yAnBmEmHrLZaiYoWDZbjyFvCaqu4fsM1Y501mRjAG\neNtZJyIVLgExDngZ2AR0AhNwqXUSQgHJvH8BGC8ijS7V0seAFtd6t41kAlCTaaPFUnSsWskyXFkF\nbBeRf8kYkCtF5FARmezaZqKINGfiEs7H6Oz/CPwJM+L/ZsYGMQ04BbgtIyxuBq7NGLwrReSTIlIb\nt4EicpqI7CaGI4FvAHcDqOrLwNPAlSKSEpFm4KPAnZndWzD2ik+JSAOwELgry0ZhsRQNKxwswxJV\n7QNOBg4HXsOM+H+KUb043A18HngP4xLarKo9qtqNEQYnZPa7CfiSqq7N7Hcx8BywGtgC/Bv5/VdO\nB/6CUQX9HGPj+FnW+kmZ9n0PmKWqf8tc3wvAORghsRFoAObl0QaLJS/EFvux7IyIyFXAh1X1H4e6\nLRbLcMTOHCwWi8WSgxUOFovFYsnBqpUsFovFkoOdOVgsFoslByscLBaLxZJDWQfBjRw5Uvfdd9+h\nbobFYrEMG5588slNqrpnoccpa+Gw7777smbNmqFuhsVisQwbROSNJI5j1UoWi8ViycEKB4vFYrHk\nUNZqJYvFYtmV6eiAZctg3TqYMAGamyGVKs25IwsHEbkZk8tmo6oemvlud+B2YF/gdeAfVPU9j32P\nB27ApFT+qap+P98G9/T0sH79ejo7O/M9xC5PKpVizJgxVFdXD3VTLBaLD6tXw4wZ0NsLbW3Q0ABf\n+xosXw6TJ4fvXyiRg+BEZAomx/zPXcLhB8AWVf2+iFwK7Kaq/5K1XyUmzfBxwHpMMrPZWbnuPZk0\naZJmG6Rfe+01Ghsb2WOPPRARnz0tfqgqmzdvZvv27ey3335D3RyLZZfHa3agCqNHw/vv527f1ATv\nvOM/gxCRJ1V1UqHtimxzUNUVmAyVbk4DnCyTPwM+57HrkcBfVPXVTDbM2zL75UVnZ6cVDAUgIuyx\nxx525mWxlAGrVxshMHcuXHmleR01ChYtMjMGL3p74a67it+2Qm0Oe6vqO5n3fwX29thmH1wVuTCz\nh//nd0ARmQPMARg3bpzfNvm01ZLB3j+LZejp6DBqI/fsoLXVvF59NXR1ee/X1gavvlr89iXmraRG\nP1VwoiZVXaKqk1R10p57FhzHkTjvv/8+N910U1773nLLLbz99ts7Pu+7775s2rQpcJ/HHnuMk08+\nGYB77rmH738/b3ONxWIpI5Yt858dqEKtT3mphgYYP7547XIoVDi8KyKjADKvGz222YCrXCOmVOOG\nAs87ZAQJh16/XzpDtnCIy6mnnsqll16a9/4Wi6V8WLfOzAK86OqCvj7vdVVVxi5RbAoVDvcAZ2be\nn0mmBGIWq4H9RWQ/EanBVL+6p8DzDhmXXnop69at4/DDD+eSSy7hscce41Of+hSnnnoqBx98MK+/\n/jqHHnroju0XLVrEVVddxR133MGaNWv4whe+wOGHH05HRwcAN954Ix//+Mf56Ec/ytq1a/1OCxjh\ncu655wJw1lln8fWvf52jjjqK8ePHc8cdd+zY7pprrmHy5MkcdthhXHnllUW4CxaLpVAmTDCzAD+y\ntb81NcYYvXx5adxZ47iy/gqYBowUkfXAlcD3gV+LyNnAG8A/ZLYdjXFZPVFVe0XkXOAhjCvrzZkS\niAVz/vnn8/TTTydxqB0cfvjhXH/99b7rv//97/P888/vOO9jjz3GU089xfPPP89+++3H66+/7rnf\nrFmz+NGPfsSiRYuYNGnAkWDkyJE89dRT3HTTTSxatIif/vSnkdv6zjvv8MQTT7B27VpOPfVUZs2a\nxfLly3nllVdYtWoVqsqpp57KihUrmDJlSuTjWiyW4tPcbFxT/ejuzv1cV1fcNrmJLBxUdbbPquke\n274NnOj6/ADwQOzWDROOPPLIvN1CmzPzw4kTJ3JXTBeEz33uc1RUVHDwwQfz7rvvArB8+XKWL1/O\nEUccAUBrayuvvPKKFQ4WS5mRSplZgDuWoabG3xANsHWr2T7IlTUphnWEdNAIv5Q0uOaGVVVV9Pf3\n7/gc5jJam7E6VVZWhtos/PYFE7/gvH7rW99i7ty5sY5lGf4MZTStJT8mT4a33za/26uvwjPPwJ13\nGoO0H44r6xlnFLdtNrdSTBobG9m+fbvv+r333puNGzeyefNmurq6uO+++yLvmwSf/exnufnmm2nN\n+MRt2LCBjRu9/AQsOxN+/vKrVw91yyxh1NWZjv7yy2HmzGA7BJTOlXVYzxyGgj322IOjjz6aQw89\nlBNOOIGTTjpp0Prq6mquuOIKjjzySPbZZx8OOuigHevOOusszjnnHOrq6vjDH/5QlPbNmDGDF198\nkU9+8pMApNNpfvnLX7LXXnsV5XyWoSfIX75UKghLMoTZIaB0rqxlXUPaK33Giy++yEc+8pEhatHO\ng72POw+33mpmCo5AcJNOw+LFxVdBWJJj9Wo47jhjX/Ci7NJnWCyW8iTIX75UKoidnY4OI4QXLjSv\nxcw+M3my6fwXLDACIJUybq3pdJm6sloslvLE8Zf3mjmUSgWxMzMU2VHr6mD+fLj44gFj9fjxZZqy\n22KxlCdBeupSRdPurAy1PccxVg8Fw1KtVM52kuGAvX87F46/fFOTUT0MhQpiZyUo/1GpsqMOFcNu\n5pBKpdi8ebNN250nTj2HlO0xdiqy/eVLrYIYDvjFgYR976Wug/j2nOEWhzLsvJVsJbjCsZXgLDsb\nYR2vl92gqgquvx7OP9//+85Of+Oz4wk2c2Z4p++c3zmeY2guht0iKW+lYSccLBaLxY1fx+90vB0d\n/lXVRLyjkf2+d9PUBPfeC6ec4n9uMOffe2/win9tbISNG5OdQZSNK6uIHCgiT7uWbSJyftY200Rk\nq2ubKwo9r8VisbgNxq2tpkNvbTWfnZF6WN2EON+DqbPQ1AT33GMEQ9C5AW6/3VswgPn+ttuiX28p\nKVg4qOpLqnq4qh4OTATagWUem650tlPVBYWe12KxWKIYjIPiQPLhlFOMl9Jbb0UzVt9/f/DxwtYP\nFUkbpKcD61T1jYSPa7FYLDm2hbVrwwMAg+JA4pJOGxtDKhU9+DBmPs2yIWnhcDrwK591R4nIs5gq\ncBcnVdPBYrEUl1J62QSdy8u20N8/4FmUjRMAGBQHEtfm4I4biRJ8uHo1PPRQ8DVnpWcrH1Q1kQWo\nATYBe3us+wCQzrw/EXgl4DhzgDXAmnHjxqnFYhk6Vq1SbWpSTadVRcxrU5P53k17u2pLi+qCBea1\noyO5c61Yobp0qWoqpWq67GhLY+NAO/yOfcst8b53X3dHh/nO69xNTapbtvivd5Z0Or97FQSwRhPo\n0xPzVhKR04CvqeqMCNu+DkxS1U1B21lvJYtl6Ajy8nEnfwvzFir0XCJQXZ1bGS2MdBr+9reBmYcz\nK8mOA4n7vZuga3/lFf+EiGCinx9/vHxdWZNUK83GR6UkIh8E3lVVFZEjMYbwzQme22KxJEwUY+/M\nmcmklwjzKIorGBycojhB6iq/FBVRUldMnmyOOX++sX8cdBB897tGeD74YLAh/JJLvAVDuQTLJSIc\nRKQBOA6Y6/ruHABV/Q9gFvBVEekFOoDTNakpi8ViKQpRDK5RBEiU3EBJexS521jMxHnuY7e2wu9+\nBzffDN/+Nowd62+TSKfhwAODj+fV1lIKjkSEg6q2AXtkffcfrvc/An6UxLksFktpiGJwTSpdeJIe\nRe42jhlTvMR5Xkn5nPrPV1wBI0b47+sYtt2d/dixJirbXcfB3VYn4M4dZV3M7LDDMvGexWIpPs3N\nphPzwuncnE7dizjpwoPO5UcqNZBs0K+NULzEeUGzJjCdvIgREl4JEZ97bnBp169+1b/AT0+PKQD0\n/vsDwXWdnebz9OnFqS9hhYPFUkRKWSQmaaJke40iQAo5l19uzVQKfvITM/J/9FH/Nr71VvEKIUVR\nhamaPE2LF5viPYsXmzYfemhuZHfQs9HW5r++WFHWwy4rq8UyXBiKIjFJ0tFhPG7OOw82b4aRI42e\n3K3nTqVMGonjjzfX2d1trrO6On66cC/jbnMzzJoV7AkVlJH2lVeKVwgpiiqsrQ3Wr4fLLx9QIV1z\nDfz1r2Y2EJWqquBZyi9+AWedFf14UbCJ9yyWIhDVDbRcieqemm2Qrakx2z30EBxzTDLnvOceMwPI\nJxV5ZyeMGpXs7+B08i+9BIsWQXu7/7ZO5tb99x98bXFdc6urg4VJdTVs2+aUFC0/V1aLxZIhKS+e\noSBq9TOv7bq7zeLkH/LreLO9bk44wf+cp546+JxOvqQo3jqOuspP0MUVDNkCrK4uePuqKpg2zQgH\ntxAJEwy1tQOzsKoqM3tbuNB/+4qK5J8pKxwsliKQlBfPUBBVsOUrAP3SYPjhHCt79B1VTZdUISQv\nYeh0+KmUsRuIGI8ld12IbMEQRlMTXHedUUc5bQX43vf873d3d/LPlBUOFksRiOIGWq5EFWz5CMCg\nWYkfbW1GhfO1r+XvkppELeYgYVhVBTfeaNRqjgA64QTzGiYYnDxOblvNoYcOzKzuusvcgyCbQ0MD\n7Lbbe9x2W0gipxhY4WCxFIGgZG9xvHiGgqiCLWy7MWOMh5ZbBRTm/ulFQwNs2uS/X2cnfPGLJlq7\nmEFhYcLQMTw73HprtGt1zL5tbbBkiXk/evTADKm+Ptwrqr19K+eeOwroCj9hRKxwsFgKwC9iNWld\ndymJKtjCsp2efz709Q1WAX3+8/EjoauqjKeU336dnXDHHSZdRRQ1k9dvphoeeRx3NphP1PfcuaY6\n3LZtA98FH0OBLo4++jucdtp3mTp1KpOTcoVLIntfsZaJEycWkpzQYikqUTKWOtlKFy7MP1vpUBA1\nG6vfdum0dxbS+nr/dc5SV5d7zpaW8P3cGVH97rNXexsbzWvYtW7ZYtof5Zzt7arz5qnW1MTLJAuq\nVVVxtu/XadP6BrWThLKy2pmDxZIHUT16ktB1DwVRjbhe23V3G+8aL/r6wmszi5hYB3dMxYc/HGy0\ndpNtDHdmCmvXwg9/ONgG4DUL8PodHSN6dtvr642dwZkNdnQY99arrzbtjZ8wUOntVaLHJwsHH+wT\nKVggVjhYLHkwnF1VoxJVsGVvt3Chvyqkq8t0prW1A3mIsqmoMILBOaZfx+yH2xieHYcRh6DMs25e\ne814GDnn8tsOTNyDcfdVwK9T78WUx4nG/PmRN41FIukzROR1EXlORJ4WkZyoNTH8u4j8RUSeFZGP\nJ3Fei6VYhKW9GM6uqsUmKN8SmM7RLy0GDL5/7hmaV7U3Lxz9v3vffBL6Rck8W1EBDzzgPZPMprKy\nn8MPf4bp07+MsRV4k0pVx2rnTTfF2jwySeZW+rSqHq7ekXknAPtnljnATxI8r8WSKKtXD06INneu\nibJdvXpgm6QSziVNmFArRa6nqEn0amu9v3ffv3y8mxyjeT77erWj0NTlDn19whNP3MuLL/6Wo4/+\nL4yAcBYjMG+5RTjzzHhqooUL4YknYu0SjSQMF8DrwMiA9YuB2a7PLwGjwo5rDdKWUtPeHlz60TE6\nhpWIHArDc5gROaqROam2BJX1FPFfL6K6cqU5zoIF5nNUA+2IEaasaEuL6rHHxjcGe/2OQcbwdNqs\nv+qqXhXpDzUeH3NM+457tHmzMVpPn25e33vPfN/SolpZGa+t9fX92rH0V6oLFuhIeFWT6NcTOQi8\nBjwNPAnM8Vh/H3CM6/P/YsqEeh3L1pC2DBlROgKHJDvbQmswhwm1oHrG+QizKO0NqvucTqvOn+/f\n8UfpmLOXVEr1W98yHkX5eAllLw0N5vrmz/f3Ukql2vUznzlZa2vnKLSHHnPatPB729Hhfz6/pYZO\nban9J1URPQL6tIyEwz6Z172AZ4ApWesjCwf3YmcOllITNFIVMS6pbpJwVU1CyIQJtXnzogs9L9zC\nYMGC3PbW16vOnTv4HoTNrpYuNR1wUJuCjuH1+0TtTKuro28rolpX16/Qr9XVXQp9CtsUNitM0vHj\nP6/V1W0KYTMH1Vmzov2eK1fGEw5Cny7kMlXQiaCaQL+eVCW4DZnXjSKyDDgSWOHaZAMw1vV5TOY7\ni6WsiBvoVKiralSX2DDC9OJr1+ZvQA/z+HG+W7wYfvnLwYFoQYGADz7on1qitdWkzMgOJgwyLJtx\nZzDptDn/hRfCD34QzVCtCh0dxg7Q09PNXnv9F4cfXs9ZZ32Ao49+gI99bM/I6bcffnigilsQxxwD\nK1bAZz9rDPh9fcHbN9DGeBL2gihUugANQKPr/e+B47O2OQn4DcZ36xPAqijHtjMHS6kptS1h6VLV\n2lrv89XWmvVRKNbMIUhdFbS475Xf7CpMZVRTMzAb2bLFvF5xRXyVi7MccogZuS9davT7+VxXOt03\n6F4F/X5x77Xf/Xfu3fz5AfebzdpBbaIzhySEw3iMKukZ4AXgssz35wDnZN4L8GNgHfAcEVRKaoWD\nZYjIV80T124QZrQFsz6KeilMqAV1hkFCL47O3700NIR3glFVRg0N4Yb1KGqi2tqB7UeM6NcLL3xG\na2vbVKRTo6iEYLBqMcrvF7R/PqxY4dhU+lXo1zTbtInNuopJO05SNsKhmIsVDpZiEtSZx7UlxBUo\ncUbkUWcsxfBWiust5F7mzo3W5qgzgREjcmcjc+ea/eOM3geWzVpRsYdOmLBAKyu7Iu3jjPzznVHF\nnTl40b5itbbU/7MurLxCW5i9Y8ZghYPFkgBJexrFHZXHGZH7dSjt7UatMWvWgLrEUb/4CbW4Qq+l\nxd9oHLbU10cTakHqkuxlwYJo933w4j0rqKvr0ZtvNg10Pw9RBHW+M6qCVZMRLrqsDNIWy3AiKSOw\nQ9RUGu5soM8+Gz1jp5fBePVqmD7dFJd3uOMOY3B99FF/I3lcA3pzM3zlK9G3dxO1OtlBB5l2RzEO\nX301XHKJ+X0KDXLr6KhiwwbTBWbniFKFa6/1z6gbnHFVc76pp4OapjqWL5fCMvIWetExsMLBssuR\ndF6kKBG02dXPampMBxSFbC8pR7i5BYNDayscd5wpYJ9EWvBUytRKWLw4/r5R04gEpf724q67YPr0\nd7n77r/R2now4Yke/COO3b9BtuC8+GL/xIPGq01pbc09di2dnMktfJB32cRI9mQTB9S+SfN1M0hN\nPj36hXoRlge8spL+vr6IKQqDscLBssuRdF6kKEVvsmcqfknnvMguDrRsWXDai87OZBP/7bNPfvtV\nV5tZWJjrZhx31c7Ofr7xjX9n06YLgNmY5AuNIS1R/ATEtT9ULhl7O6m3Xskp5DBIWHR0wF0DBR+a\n9/4QX2s7BGjKOWYdHdzABaTchXe6BdYnkFMl6GGDcJ/XGCSZW8liGRYknRcpKJeQ832QJiCo40yn\nc4sDrVsXLBy6upJL/NfRYdJc50N3N/zsZ7l5qbxw1DqLF8Oxx4KXasbQxpgxPfzgBz9gxYoLaGpK\nR2yN9/F6t7Zy1znL/ZNogWeyrdRxn2K5HkcTW0izDaGPNNtoYgvL+exgwQCDH6ygBFdhya8iJK6q\nSKpfT8JwUazFGqQtxSDIhbK+fiDHTRyCDNxhHj8zZ/q7RHoZMFtagl0oa2tzDdj5pueIY3gNSlnh\nZ4htb1f95S/79YILtuicOY/pP/7jP+u4cQcobPE5Tv+g4zj3PThdRr+aqObcdULvjshiz8aGGIDb\nSWkLs3Uhl3l6DuUcM+hByafCUhEN0kMuAIIWKxyGjkJz/ZQ7zv+rri5XOBSaHylOsJcToObnzllf\nn9vRh8UHuF0+3deaj2dWVFfWmhrVz3zGv5N2e1z19/fryy+/rN/+9jKtqWlVke3qpKQQeU+nTbtE\nL7zwV9rY2KPpdH9omx2vLb9z19KmabZ5t4ut2sJs/8bGdUtyMgp63ewwt7YRI4IFi9fD5pFd0AoH\nS9EoZfbOYhFFuG3eHL3sYyGEBah961vB/c2VV+Yec9UqU97SqxPOLlPqd+5UynSqQdcZtW8cMSI4\nkZ5Iv55yyp/09NNP11GjRimk1OQm8r/3vm63Pj/uqlWmHU7MQ22t6oi6Ll2Z+ow2+Z3LFVk8qIN3\nItXiBnqk0+am+o0S/B64mhr/YI2g4AiPH8gKB0tRyMdnf6jwEwBRhVucDKyF4temBQtUJ04M7m/m\nzPG//uw4By8VVFDnXlVl+isnRXY2UaKYHYEUfK6tCrN19OjROnv2bP3ylx/R+vre+Pc+5MfNESjv\nmQtYxSRtYrOm2apCr6bZmhNZ7NmABQuiCwbnT+IEm2Q/mHECOvyEVYQfyAoHS1EoZYdZCH59xIoV\n0YVb3AysheJ05jNnqh58sOmYo0T2zpyZ/zmjDnzdNRSyyb7XqZRJV3HsseZ62tr69P/+7//0mmtu\n1Orq7Z7Hr6/v1Oef/4v29/eHtsv33uc7cslcQHtqt3j2gfZ2f1WP09CGhsEP4C23+AuvefOCf4Sq\nqvz+eFk/UFIpu60rq2UQw6H8ZVAQ2/HHm+ArL7JjGMaONf8+L1SNC2qSPP88nHfeYC/EKPFMlZX5\nnzPM89FB1WQA3bw513sqO0Bs3337GT/+af74x8dYtuxxLrxwJe+99x4Ao0c/wqZNLVRUVNPVVZ0J\nHhOWL6/lkEMmRGqXr8fYsmX4pj8NClCZPBnWraNu7FjO4Ff+N8FJ2eq4h916a7Br6OWXmwg+JxDi\nhBPMq1905bx5/scC/wc325c5m6wf6K35898IPlE0rHCwDCKvP22JCQti6+72XjeUwq2jwwSn5VPL\n+KST8j9vczOcc060bXt6vPvXnp4enn32Kdavf5w//OFxrrnmCbZt2wbAhz/8YZqbm5k6dSpTp05l\n3LhxOyLBvYLH3O3yC3zz7QtXrMh/5PLgg/6dbyoFJ58MM2cObmxYwFlNzeCbdeutwQ/m+++b4Am/\nYtgVFUZAOdeTHZYdhCsoY9P8+VuCN45GwcJBRMYCPwf2BhRYoqo3ZG0zDbgbUzEO4C5VXVDouS3J\nk9eftsSsW+ffyXZ3m/+sl4DIFm5vvRV8nvXr47XLnR4jK56KRYtg69Z4xwPTV5xeQFCto5uIQk+P\n6V+7u7tZs2YNjz/+OI899hi/+93vaMt0kgcddBCzZ8/eIQxGjx6dc5woKTqyA99C+8KODhM04Ud9\nffDIJaij7+qCj30st9ETJpjjeu2XTueeL2zaPXKkKZztJxw6O6GpCa67zjx8fpK1RCQxc+gFLlLV\np0SkEXhSRB5W1T9nbbdSVU9O4HwWF0EdUj4E/WnvuceMLJM6V76MHRu8vqrKWzj095sCMrfeato+\nYYJ/Th+v/74fHR2m87/6avO5q8vcM6fozaGHDqyLQ12dyZNUyD1etiz6tiK93HbbD7j66n+lI9OB\nHXLIIZx55plMmzaNKVOmsPfeew/s4ARs5flAZKurAvvC228PjvxTDR655DMlHjfOvxqR10hpwgTT\neK/OP5WCAw80D8S0af7H7e01o5vLL/e/lhJRsHBQ1XeAdzLvt4vIi8A+QLZwsCRMdoWu2lo4+2z4\n9rcHkpO5CRMk7vXXXWe+e+012LTJdHjHHw8i5rl2d36TJ5fumqNw0UVw440Dwi2VGuhXFi4caPu9\n9wZHNofNkhyh8N3v5qbDcPqgKVPgS1+KPnp3EIFvfrPwexumGXGjWgk8wFe+8hWmTp3KlClTGDly\npPfG2cminJt6zz1mShZRYOTMMjo64FaPh/T++4Mb/4lPBAumceP8b4TXj93RAaec4v/DnXceXHPN\n4DaecIK/AOvshBNPNDODiy4yD6IX5WLYg2S9lYB9gTeBD2R9Pw3YAjyLqQh3SMAx5gBrgDXjxo3z\nt9Dv4oRl7s123cwn139jo3kNcoUcCvfWKJ4ujktjUOWwpibjoZNPTIfjUx/FE7GyMtp2URxU4gQn\ntrcbB5mammiFbOqqurRl3hPhP2jYw+e4YMUNkAl6SGfNCm58UHHmoIAWPzetMB9gd+Ugp41BcQzu\naMYiuwQCazSJ/jyJg5j2kAaeBJo91n0ASGfenwi8EuWY1pXVnyjBSW6PvCAPwC1b8itcEvdZTirq\nOs5/K8pE5w8SAAAgAElEQVS2cWsc5FvoJc7iJXSjxm9s27ZNr7/+d1pb26YVFdEK30MmlUTNd8I7\n9LhRw1FGEGEP6ZIlwefwq6caVk3Ir2Rd3OC3piYzEonin5t0LdqsP1YFPKkR+tewJRFvJRGpBu4E\nWlT1Lo/ZyTbX+wdE5CYRGamqm5I4/65IFHWB493nvPfb5vLL808RH3UW7KeFyEctFcdoHmYjfPnl\n+DUOkkypX1trNBJ9fcFG2SD33eOO6+fmmx/kD3/4LY8//jhPPvln+vvfBOo9zqiZ19wspQ20Mb57\nLXS/H1zYIo6uCqLlQQ9zQauuDjYQnX56rt70hBPMdfjp98Gs83qAo/oAu9u4aVM0u0Zsa3wAHn+s\nw+Bj0Q/gTxLeSgL8F/Ciql7rs80HgXdVVUXkSEzWwM2FnruUBOnrkzYKRyHKs+t03KrBHeTatfH+\n626iuLcmXVwnzn8r6D6pGrXxSSfFE1Bx+0Y3NTXGK8jd3kMPNc/PSy+Z/mWPPeCVV+CjHzVtXLYs\nOE331q2tnP53v+T0yl8zZ8wYnjtiAf/5wgfo8Ni+mm4EpZvcG15FL824RhN+HXrcjjPKCCLopra2\nGu+dRx81/sCdncbIU1tr3D8//3lTdOK++8wN6+oyAqO/33wOwu8BjltkorXVeCNFNWLFssb74PPH\nqoQCImNcFDr1AI7BDEeeBZ7OLCcC5wDnZLY5F3gBeAb4I3BUlGOXi1opiUSKSRM1rUFLS7TEb/mU\nPIw6Cy6WijWKOijKfYo7k8+3RGRDg7nXXu0Nsvk0NPRlVEN+6qE+varmO9qf2XlBzQKVgCykc7kp\nPJVEzJQNkR7EQm7qsceabZzUFE7x6OzMiUk+wF6h4UHHWrCgtB2Czz2z6TNKRJKJFJMmzCjqtCFM\nxfn229GLvLs7uiQyexYjTYXXfQq6vrgCKm7f6D6PX9rqQmwYabYNyizawuzQLKShqaajpmyIkv8j\nyp+hoyPcwh8lR0qcpb4+/AF2j0CWLlX9wAfCrzOuEStffP5YVjiUiKABTSqVXyLFJGlvN89IKmUW\nv8GK34DGSQUTdQBWXa169tlmBHzFFdGL1A91vqagnGf5CKjslPq1teb+z5/vf62Njd736sYbN2sq\n1Z13H5edWbSD2nhZSPPt0J1kUUEj6hEjoo+aoya5q6/Pf6rrPkY+hTuC2ljq5GNFnjnY9BkhBKlC\ng2JyWltL465cVwfz5wfXuwVvFadXKpggUim44AK44YaBNBVRDMtB6tv+fuP+XWyCitjnkxbEuZ+3\n325c+zdsMOU0333X/EO90J4e7rpNOfrT7/D444/vWNatmw18J8bZFRDSaaWq9f2cymMpuljOZ5nB\nQ/RSRRsNNNBGFb3eVcpqasyST8qGs86CQw4xuu/u7sHG3+rqGNeUQcT/BjoE5UjJpj5jlK+oyL2+\nptwSnwW1Mdu2UmxjZFy7SFySkDDFWobzzAHMIKOcias7LyTeoRjFdeKwZUv82g1hrrerVsWsA0Ov\nXsHlepTp3XW33XbT0047Tb/whft901d7LbW066yZPdoy7wntqPVXxUSuUvbtbxeuBgmKJRgxwsww\nwnyYkyo7l/3jOnaKJNQ8UafBpbI9eJzncOjVBPrfRDvzpJdyEA5h+vooKshyIbuzCyrO4vz/RMxr\nfX2wIPRzF3dTquI62Tj/n+xz19Xl/l+de+TYO/3+22HZnD37joy+vx/05Ztv1r6+PlWNb8NoYot2\nLP1VfF98r6WuLhlVSFjH7qieHP3bggW5P3jUG9HQEM1IVleXfGccJUah1EVRsmwcScU5FHyAYi7l\nIBxUw+sD+3YGZVT/YMUK839yBlzpdLCzRypltnVSzPulmncvbr2916h7KGwPQf/TbLWz8zs3NPhf\no/PfbmmJZosd3Km79P11dYNG06tWdg5+xmo6tZGtmuZ9b6+iY481hp+gxkZqVEKdVT6CymskHfVH\nWLky3HvIq4ReEoTNCobYyEZCEdLW5hCBIJfkBx+MroIcKlauhKlTB7fR0b1LbiwUkGtPCQv6qqkZ\n0Ns7cTmdnWZJpYxq9POfT6ZWRBxVblC+tooKeOAB48rv5TLuRW+vcv31b/HEE2/T1eWE7Hih1NBF\nD9Xe+v6ODvjqV3f460/mbN6+5AqWjb+IV9fXMP6dNTT/7DS0rY1lzORVxjOeV2nmLnOMR4FVq/wD\nvMJ099m1CwolbuwDmJudHeji/rOtWAG/+IW3vWDyZPjJT8w99PqB02k44IDCr8uLsBiF4VAUJQpJ\nSJhiLeUycwii2IOEQlNOtLcHz8Dr6garTxoaoqtzs0fhzozaq7axMxsp9F7FUeWuWhU8uHR7KUVX\nd/cqXKYwW6HDd7tUdY/Ok5vC9f3Zi+PdE0fXFKci2YIFxXGxLMS/N+iHD3ILTToNRT6UyxTZBeWW\nW6kYy3AQDkHPZ77ecg5J2LRaWoI7e5EBl1RHzx43UZw7d9nSpcHbFmJziKPKjRI7kF0qOIpWpLay\nVb876Vp985obdcQI/5xFTSP6taNut3g3MvtinAcgSgfrFV3nzj44b54xMhXT7z77gY2idys00GWo\nolCDzu1kcyyl0HIJqZHwqibQ/ybWkRdjGQ7CQbU4njhJ2bTCXMdragaSz0Ud+FVWDjZUu5NazpwZ\nvO+xx+b/X46bcC8sdqOpSXXbtm79wx/+oKeffrdWVraF99ts0Q7MFGhV+tOaru/J2aaxMXM9K1dG\nu6FBF7N0aXgnG9TBlrrzzA4aC3uoUin/pHn5nLOYwi/7nEF/0HzT/eaDrSFdvmRK1OYUoWlvN0s+\n+YPC8pA5aW/C9O+OKjgslf1dd0VPJnf22eZas1Wtq1eb9DZB7L57eEoZv2uKo8pdu9a/4BYolZW9\nHHjghYwatTRT5ayWiop3fbdP00oVPRm7QSe0wmR+y8YRY7l98Rvc/3ANYPI0nX565npW1/oHVwTh\nvpi33gr36fcL1Iia1CpJf/zsLIaHHGLyIfmVwevshPPPN9vlW7wibubEJAj7g775ZuG5k6Lg8RtX\n+BvCYmGFQ0IElaiNkpQymygdoV+m0wsvNNuNHWuSvPnVSBeBhx4a6Hij9GE1NSYIzquQ0IwZ/vXf\nHU46Kfi/HJS9dcLYbhpqhdbO3OCq7P5x82YgEyzmRV/fT2lre4yzzjprR2GbN1+uZcbU9+nVCtpo\noJ42lAq+yC+YwsoBY7CLur5WzkrfwVn/nXVBzg3Jp2i0+2KiGHr9KhNFGWHsv39y6XK9mDzZCCGn\nVJ6X8Xjr1vxGUENJlD9oKYRWkimCs0gqZffxwA2YbIA/VdXvZ62XzPoTgXbgLFV9Ks45hiLzaRzi\nOihEHfH7RfSOGeM/KLziiuC2Vlaa5YEH4Jhjwq/NzeWXm7nzLbcMFOc66STzGvaMhtVDDhzoTu/l\nNdmfqs6ngd1y9q2SXpqbq2hvb+ePf/wjb/y+FTjF91wXX/wlrrnmq4O+2/t/b+Xt+m+wrO24XO8g\nP/y8Twr507o7+7Ao2KYmf4+jsEynL7xgjp1Uulw/nDD+sWPhnHNyy+ZBfiOooSSfsqPFoJAUwSEk\nkbK7EvgxcBywHlgtIvfo4BrSJwD7Z5b/B/wk8xqJJGsBFIsonblTbhfg2muDryesZgHk3/f09Zn/\n/KxZAymjr/VMtp7LBz8Ie+8N27cPfHfHHWZGEaT9qK0Nr4ccONDd3sEDHM1yZnimhbin8xQ+N7WX\nvZ58kgl9fTQzm98ynXYaco5VXwdHHJH7PevWUde+mTP4lX8js8n+cR1Jn++ftrp6YOoHufnJnXqw\nIv71YB3CZh2LFplRghdJdNbZI6BXX/V/SPJ18RyqUWOcoiLFZMIEkyKkGAKiUKMF8EngIdfnbwHf\nytpmMTDb9fklYFTYsSdOnFjyYEM/wlxKg7yWGhuNh2LUym0OYcF3hQbHOudcujR61oJ8zllbG83m\nGJi9lV5dyGWqeKeF2A7aXlGhHdXV2g/aHph8bot2rFyd24B8cnE7P4qXy2i+yeG8jJf5GF3zdS8N\nM3JHwevhDYq6zMfFcyg9lcrh/KrG8J31pymbrKzALIwqyfn8ReBHWdvcBxzj+vy/wKSwY0+cODG2\ny3BSpSjdRH0G/LaL2kcEXU92n5BvTQGvc86alYyg8VtGjIj2OwT+1pnUE34n6ff4bhWT/OsWeI0s\n4nSmlZUmd4pfUEdYvpEoSxKjn7B85XEexqgEjej8HrS411puo8ZSekq5z+1xD3barKwiMgeYAzBu\n3LhYuvxiqJ+iOHyoDsxsr7vOrFu/3qgdu7vhvPOinautzVQDy9ZQeM3sk0rI6NzbuMGtcbjoomgz\n/eZmmDevHy9ni0p3lTIPvMzOk1nD2+zjHWH8PqYM3Pz5AzukUibFanY4uRcVFcF6Pa/I5VTKHFck\nOKWvQxKqncmTzQ+wcGG8/QpRjQTpB+vqzKtX1HMx3PmKzVB4SjkU0RgNyRikNwBuJ84xme/ibgOA\nqi4BlgBMmjRJo9p9ki5F6RBUnrGry/Qv2fYDd4T/woXR1YF1dUYN7P7f+Ak3r1KZYf2ZFw0NxqD8\nyCPx942CCEhvD9z635564Y0bN7JixYod6au3bq0FHgKqgQZqanqoqalguf4dqbYAw7APdXT62xAW\nLDBG0h2+pxjX0Sg63J6ecNesbPr7zWihrs7cmLq64PrGcfXwfvr3oHzlQSmt89XdB43oOjqMQD7w\nwMJcPHeWFBWFUERjNEDBUw+MgHkV2A+owZQCPSRrm5OA32AGeJ8AVkU59sSJEyNHyBcrYv3ss4Nn\n336aA3eCtkL1+UGz5OyEevlqLrKL1/gv/lHBnve+vldb6v95h66tr6FBuxoa9HvNzfqRj3xEwaSv\nrq+v1+OOO07/9V//VR955Hd6yy3dAzP1lauT0aF5LanUYB1hUsacKEt1teqkSf4PUSpldH5R1BVB\nus+wP1GSKa1VS5M+ohwqSA01w6FMKMZF9WVgHXBZ5jt3DWnBeDStA54jgr1BdSBCOorOvxilKNvb\nwztdv2ylzvMZpsYuxFYXlAI7lVJdssQYgufO9c4xlE572zxnzfK/7lraNUWbQlhd40zfw3ueeYW2\ngJ5y3HF6zYIF+vJVV2nvlVd6d0yF1tCMuuQjzZNYnLzoQdsEpbkOu0de0r/YxtNS5Dwqh7xKQ43P\nPSgr4VCsxZ0+I8zuk+9AIsiAHZaXKGhxCySv/+WIEYNzoAXVVvASblHrIof2G1tyb0CY19AVXKlL\nOVNncbvOqr5Lly7p0pVLXtAm3tM02wYZfx93F613Lf3ptDlnWIdVaGc9yfv8vjesEA+fQhbn+sME\nhVeHHvXhL6XxtBTCqBy8hYYaW+wnnHwGEmHPVlheosrK6DWkkxRuUQbTQp8unPW0tizt8j9ulsrH\nuQEtC/4S2Wuor75eH/+nf9L2qipPF1MvL6Idi1/KVPcPVoiap6HBTJ2iVOXJlub5ePgUsjQ0mOR4\ns2aFezllP9DFmDYnQSmE0VB6C5ULtthPOHEGElFm4i0twTVH6uv9+x3nGFFda+MItyiD6TRbtaX2\nn3RB6l9VxFv1444dcC8dI/bWpibvfbIL1PeD/i/erqSBS02N0bl7Nt4lDaNUGAsz1qxaZX6ooIpF\n2cEYW7b4t899DUkKkYULowlDryyDu7r+3bIDEkrZnUiCpnLBqcGxeLFxRFm82HgpebmxRvGEa272\nr5Hu5CW6917j8FFj8q7R0DCQ0eC552D0aJg7F6680ryOGmVcbrNxvI+amoxjiYh59cqOEMVJoYpe\nmrtuZULnCzSot49qA+2MJ9erI9XXxvILH6KpSamv70PoJ802mtiSU6BegGl+SaWC6O729/Zxe5s0\nNw+EhOc0NGUKvqxYEX7jRPyjgcG4nl1wwcCPk0oN/Khe1NfDG28Eb5NKmQdx5UrTntpa/23BRFo7\n7nlBZHvjBN2jUkbrWnYukpAwxVqKmbI76kw8u2qhO011toePe11YeUqnhoKX/TVwltzeri3zntB0\nTafPwLJf69luAr1AO6jVJrZEmgU4Sx/orQcfrCNHjlWYrZVcpjcHFawJKwBRXT24mlDckXGUKaHf\njYtr0I5qmF6wILdtzgzEy3jc3q46Z07wuZcujWbz8JoNWP27JQO2TGhhBMVPVFebGUdnp39FQFUz\nK3DHVXR3m+WUU0wwnN/MpL3dDCjT6dw4hsCYmkyUX3NPJV/rfgXIHYnW08YG9qGJbQCk6GI5n83k\nI6oclI8oexbg0Ap0v/km//HhD7PHzDTjzj+D/W64AVniE2fgl/bV4YQTzKh45EjYtAl+/vN4WUbD\nyjKC/42LGyjkTBvDpmdOfdUobXPaN2aMf/lOERM56Uwhg9Jce80GgtpR7lkrLeVJEhKmWEsxZg7O\nAHP+/GB1cUND8MArTM0bJyVFJM+7rBFwYGqI7BOkUtpeMyLHWOzXoH7nIrJzBRVSzN45zty5wTcm\nlUp2tBvXoO1MG4uhxw86Zm3t4JiG9nbT9lRqwK6Sz2zAzih2ObBlQuOT/T+pqxt49esr6usHV1d0\nhMuxxwb3L7Nm5ZFTKcyvNuuAXt5BXifoqajQvoAG7DAmB6l8vErduRenGHVQ0WbnhvrdmOpq1c99\nLlmvk7iusFFcWt2BY3GSeEUNenF33oV445RL/iFLSbHCISZB/5OamvB4hnTaLI2NETyF0tEqJA4a\nrM59K9yvNsYIuN+1dBLBm2j6dONKGTRaDpOIjiElyB2zoSHcwyfJ0W3cuAV3p+k36r7llvxH41FD\n0ZPovK0X0y5JUsJhp/JWCiJM9RxWibG11Szbt4cnqKuqMul6li+HpsZe0mwH+gH13L6hQRn/i+8Y\nA0Zrq/n7traazzNmGONHFC8WF+JaavGrh5YhnYYvf9kUawjKV7P77mZb74sw+XLOOAMOO8z/hra3\nwxe/ONi7KJvsay8EPzewdBoaG4M9nNzub/Pnw5e+BF/9KsybF/xbBeE+5qxZ/rp/x/ZRCDb/kKUA\ndhnhEPQ/6e4O9kgMRzPunttpauzd0b9MPrSDtyvGspi5XMEC6vFuQFV/D82yzPvQLr9a9XNXzGlN\nTBwDZ5AAcjL0RXGZDDvOlCkDHeTf/Z2/i2cSHSR4+zj/7W/w7rvhfs91daaU5r//uzGkf+97/sny\norbXMZ4fdph3VTRIpvMO+x1KVa3MMjxJYvpRrCVJtVLQDDuKpiN46dPpPGz0/iP29s0GmGNETnUZ\nbcTc//RVGfWL6PIpU/Soo47ST1RW6mbQrRh309iBZyJGr+/k88lWh2zZ4n8j4uToiRPRl2R0bzGK\necRxhY3b3mKrfWz+oV0SdhVX1qS88ILqH1RXm2C2U07JL/11mla+zM0mNXRfeiCXfNZ0Jae+QO9b\nNN97Hqk366HF2692uyo/W7mSviOPZOpFF7H6k5/kU5s3U/Hmmya/d1DK52waGuDGG800KdvdceVK\nOP74XHWQE+G3YzqUGYXffvvgItIf/ejAPo4qZ/r0wfVEwdzg554bGKEnVYu3WLVk47jCxh2NF7vU\npFde9yRSclt2DQqRLMA1wFrgWWAZ0OSz3euYbKxPE0OqHXTQxES98MIGve2bTYDZlVMe1fpKvyAz\nj0GYO5jMPXqM4CnTnUrpt+fN060+gWTd6bRu/9vfci/Gz9VxxAj/6mRNTcZSnj2yfvhh/zbW16u+\n9168G+m0Lyy3iGrho9v2dnNNUfI0Ze8XZZYRxxEgn9F4KVxNbf6hXYo4fWzQUqhwmAFUZd7/G/Bv\nPtu9DoyMe/zKyomR/4NR/+u+/xPnT5px1xxQAQ1kGU3zvjayNTi2wK0O6Ojw76gzSx/o2ZWVOueI\nI7Q9ldKeujrtD+skwqJyvTocx80quxNavDi4w4uT9S/fAhv5dpDOfkGJ6gqNJo6UyKrADt123pYE\nKQvhMOhAMBNo8VmXl3AAb+GQnSOt4MGXT4fnFUcQGlvg6iD/+tpr2h3i998P2tPcPNCOsE4iaufs\nPlbUzKReS7YePWqnH9eWELeDjGoLcJ8rn1lG0MzGnQcln7gHi6UIlKNwuBf4R591r2VUSk8Cc0KO\nMwdYYxZv4QD9mqru0VXz79H2+VdrU31X5P+6J0kUeKmp0b4RI/TBhQt17ty5etBBB+ls0G1R9p01\nK/ovn48Rs5Drq6kZfMyonX6xja1Rr8k5V76zDNXw0YeNQraUEUkJh1CDtIg8AnzQY9Vlqnp3ZpvL\ngF6gxecwx6jqBhHZC3hYRNaq6gqvDdVVQ1pkkvq0is6eKmYsPJrruJNeujAVSgcTuc54gbVYeyoq\nuD2V4p+3bqVr/nwaGxs55phj+OrYsaQfecR0PUF8+tPRT5aP73oh15dtGI1qQC62sTXqNVVVmdxO\n48cPToTlhd/9C8tbVIzi5RbLEBMqHFT1M0HrReQs4GRgekZqeR1jQ+Z1o4gsA44EPIVDHHqp4n5O\nog1vX+62NuXVVwPDvwxBHV4E2vr7uf+Tn+TOPfbg8MZGPnj00VT+/d8byfTEE6YDCeJrXzPnP/NM\n7/Vul62//jW+d08h1/fQQ4M7t6idfhKeMkGuamHXlEoNtOE3v4nmcRTkbZRPYr/IoxOLpQwpZNoB\nHA/8GdgzYJsGoNH1/vfA8dGO76dWymgx6NVZ3K5ptnlrCdimLUu7wudhEVIs+MUU9IP2feUrRqfv\nqCxqa83nRx6J7ukikusVpJqrsgjLf+SlRwu6vqD23X+/tx49blWlfIytYecIuqZUaiD9tWp0j6N8\nvI3KtQqbZZeFcrA5AH8B3sLYE54G/iPz/Wjggcz78cAzmeUFjDoq4vGDhUOarbqUL2kTm73/62zR\njqW/inRD+//0J+39wAe0u7p6UF6iVtAOCExc55uYqbY2OFld9jJv3mC3q6AETe6aw1F03FHzBDlF\nKZYsCfH7LaKHTVSje1QhFaWaXL42Apu/yFJmJCUcxByrPKmqmqR1dWtobVW8sgM1sYV3GM1zfDRT\nr6Aqq17B8UxeeCpcfnnOvqrKn//8Zx5//PEdy9Z332UmMLGhgUNHjWL3Aw5g1NSp7LPnnsjXv+6t\nwqit9U+BEJfJk+GVVwbUMDU1/sd21FCjRvnXEMjGUdP45ft3vg/S0Tc1FV+Pfuutpmyen8powQKT\n6yjomtx0dpr75HU9TjW500/P75qCjl2Ke2WxZCEiT6rqpIIPlISEKdZyxBETtaVFde4/92g9rdoQ\nEF/g6WLqGrn19fXp008/rTfccIM2NzfryJEjFVBAx4wZo1/4whd0yZIl+tJLL2l/f/9gURykwgir\nMxxUtzjqDKTUKouhHg2HqYFSqfIKNrPeSpYygl0hfUZFhbHlnXFGFdd96WmWHb+YV3vHMr57Lc3c\nZaqYZSpr1dFp0le46JUafvzWWzx62mmsXLmS9957D4APfehDnHjiiUydOpVp06ax3377IV7ZQR2C\njKsf/zg8+qj/vkHH9brgqBQzcVqps3lmG57HjjUzMr8Mp6rxDb1RK7blQ77HthXaLGVMWQsHN3Wf\nmsQZb4036oS178KHz4RJk0xmze5udNEitLcXenrorKigu7+f47ZvZ82llzJhwgRmzpzJ1KlTmTp1\nKh/60IfiN8CvA7jttmDhcOmlJp9RFFVRnPTUxSwcn1S+oyh45USqrIT+fv99urryE1CBNVgLJO6x\ni5ULymJJiLK2OUyaNEnXrFljPmT9mbShgT4RfjNjBtPuvht6e2kAeoA+EW486STGzp7NlClTGDNm\nTPEa+d57ppi0V8fe2AgbN5qRriNU3nkHfvaz+HEHqZTpFN3uoMXqREqlR+/oyC3E7RBky0mnTYrt\n4eoiGnTd1k5hKZCkbA7Do55DRwfqBBplCqxIaytV27dz0p130tjbSyPmYmqBelX+5YknOKO7mzFL\nlxoDZ6FFY7xYvdqMorNVR9XVJpvpGWcY9YeIeX/55aagTpxMqgAjRphCM5/+tHl97bXiji79CuRk\nF8MplKAYgaoqMxr3W1esWVMpiBIbYbEMMWWtVtq+fTtXXXUVVb/+Nd94/30aPbbx1ei//77xeHEq\n+XzlK3D33WYk76Xjjav/9YqMdejpMedcsgRaWgarC+IEpDkdc1+fEXBtbbBqlXkfNHNIQpddTB29\nQ5Bto70d5swxqcF3tnTTtkKbZTiQhFW7WAugIqI/HjUqOM4gzlJXl+tRko+3Sdx8RfX1qvPnRy8u\nnUqZWIMoaa/dDCfPmSheUTtjxtKh9gaz7NSwK8Q57L///rp69WqaHngg2O+9EJqazN9y61bvdX76\n34UL4corzb5xcGowi5gZhp+6K502KqSf/9z7ur307sNNlx1k2xgxAq6/Ht56a+fz5LGxEZYiskvY\nHEY0NhrBsHatf8H6Quns9O+gg/S/QfV5g2htNUtFham85kdbm7nuOOqH4abL9rNtpNPGW+m884wA\nnjvXdKarVyd7/o4Oo6JbuLB4dikvSmXTsVgKoKxtDjz7rOkY2tqMe2MxCOoQgvS/QQnootDXZ0b5\n6bS/y+hBBxkbQ1SX0uGoy862bYwZAxdcUPwsp0PtSloKm47FUgDlLRz6+gY6hqh1fOOSShnVkJfb\nZJBPv1dgXH29MaRGUTW1tcGeexoDqxdVVfDd75oRrd/6bI+dUsYnJIk7RuDWW4uf5bRc0mwXM+7C\nYimQgtRKInKViGwQkaczy4k+2x0vIi+JyF9E5NJCzpk4TmpnL/xcJh11xIMPwnXXmSC3BQuMd9KK\nFYPVBX40NMABBwSrF2pr4cILTftqa81+QeqH5uZgYTMc3D9LMfsZbuo3i2UISGLmcJ2qLvJbKSKV\nwI+B44D1wGoRuUdV/5z3GSsrzazCobbW2CSCUi7U1Zl1qZR5ddwir78ezj03d/vGRu8O2EsdkR2U\n5qgLXn4ZrrnGO67B6axTKW/1wnPPGbVTb6+Z1dTWmm2/+U245BJvgZZEDYWhphSzn+GofrNYSkxB\n3haiU5EAAArxSURBVEoichXQGiIcPglcpaqfzXz+FoCqfi/s+JNEdI3XiupqIyBOOgkOOQQOPBCm\nTYP99/fuiJ3Mm6edZgq/OJ1w0D4jRpjCOu4ONR9voCjCJJtCvY6iZCotNvnGWpTCkyco6+twj762\n7PKURVZW4CrgDeBZ4GZgN49tZgE/dX3+IvCjKMefGBYL0NQ0UNh91iz/+sBevuOrVpnYA79je+2T\nr396XF/94e4HX2isRbFjNYKy7OZT8MdiKSMohxrSwE+AhYBmXn8IfLkQYSUic4A5ABPDNu7uNt4t\nFRXBMRCtrfDSSwOfHYNkUBoLL/VCvuqIuIbH4az2SMLYW2xPnp1B/WaxFJmCa0g7iMh/Avd5rNoA\njHV9HpP5zu98S4AlYNRKgSeNk6No0SI4+WTT8QQZJB289Nul8gYq5Dxh6pxip4lOqqZysT15rCup\nxRJMIdMOYJTr/QXAbR7bVAGvAvsBNZhyoYdEOX6oWinu4qgMotQU9lIvlEodke95wtQxpUitYWsq\nWyxDCgmplQqNkP6BiDwnIs8Cn84ICERktIg8kBE+vcC5wEPAi8CvVfWFAs+bH87INSy6ub7eW71Q\naGRr1IjcfM7jVudkMtfS2mo+z5hhUosHrU8qOjjo3pZzrIXFYhlEWedW8vVWcqiqihccJ2LiES6+\n2N8jpr4eNmwwHbEf+XgD+RW1uegisz5IBRTlPGEeOHHzNOWLzRtksQwpSXkrlXeEdBBO/p04wqGh\nwXRO11xjgsuuvdbbIBkkGCC+PjzISHvFFUZoeaVviHOeMCN23DxN+WKNvRbLTsHwFQ5VVXDvvXDK\nKQOdUCplOmI/2tpMFbb29oGR+ze/aTrnYhokwwzgjooH8k/fEGbEjpunqRCssddiGfaUt1qpqkrX\n1NQM7vDr600hHWeE7aheXnrJeCTFrbKWdNlLL0+gOOm981XxhKlzXnsN9tvPqnsslp2cXUOtdNhh\nxj7w0kuwaZNJVHfAAYNHoY7q5dZbTbyDF9XVZp1Xcr2kkrkFZfmMU/0tXxWPqlGVXX21+Zxdb9ox\nZlt1j8ViiUB5C4eKimR07j09/vsloW8PC/x67TX/hHjZ5KPicQumzk6Th6m2NjcPk1X3WCyWiJS3\ncIhD0Og837TcUQkL/HrggcGj9qAZRNzsqV6CybnOa681wsGNTRNtsVgiUNaV4GIRlK46n7TccYiS\n7sIZtS9ebGwQCxYkUwnMpp+2WCxFYOeZOYS5UELx9O1R011kj9ovvrhwFc9wzsNksVjKlp1DOLi9\nhK67zny3fn1uh1ssfXtQydCgmUkSKp7hWv3NYrGUNeXtyjppkq5ZExgjnV+9hGIwVO2wEckWi8XF\nruHKGka51AKGofMEshHJFoulCAxv4ZBUeuikGCpPIOuiarFYEqYg4SAitwMHZj42Ae+r6uEe270O\nbAf6gN4kpjyANca6sS6qFoslQQoSDqr6eee9iPwQ2Bqw+adVdVMh58vBGmMtFoulKCQS5yAiAvwD\n8KskjheZoNiGJOIX3EStxWCxWCw7AUnZHD4FvKuqr/isV+AREekDFqspBVo4pTLGBuVNKqVHlMVi\nsZSIUOEgIo8AH/RYdZmq3p15P5vgWcMxqrpBRPYCHhaRtaq6wud8c4A5AOPGjQtrXvGNseXkEWWx\nWCwlIlQ4qOpngtaLSBXQDEwMOMaGzOtGEVkGHAl4CofMrGIJmDiHsPYBxTXGlptHlMVisZSAJGwO\nnwHWqup6r5Ui0iAijc57YAbwfALnLQ3WI8piseyCJCEcTidLpSQio0XkgczHvYEnROQZYBVwv6o+\nmMB5S4PjEeWF9YiyWCw7KcM/fUaxsekpLBbLMCKp9Bk7T8ruYuF4RCWRXttisViGCcM7fUapsOkp\nLBbLLoYVDlGx6SksFssuhFUrWSwWiyUHKxwsFovFkkNZeyuJyHbgpaFuRwgjgWQTChYH285kse1M\nFtvO5DhQVRsLPUi52xxeSiy9d5EQkTXl3kaw7Uwa285kse1MDhFJxP/fqpUsFovFkoMVDhaLxWLJ\nodyFQzKpvYvLcGgj2HYmjW1nsth2JkcibSxrg7TFYrFYhoZynzlYLBaLZQgYUuEgIn8vIi+ISL+I\n+HoAiMjxIvKSiPxFRC51fb+7iDwsIq9kXncrUjtDzyMiB4rI065lm4icn1l3lYhscK07cajamdnu\ndRF5LtOWNXH3L0U7RWSsiPxWRP6ceUa+4VpXtPvp96y51ouI/Htm/bMi8vGo+yZJhHZ+IdO+50Tk\n9yLyMdc6z99/iNo5TUS2un7LK6LuW+J2XuJq4/Mi0iciu2fWleR+isjNIrJRRDxLHiT+bKrqkC3A\nR4ADgceAST7bVALrgPFADfAMcHBm3Q+ASzPvLwX+rUjtjHWeTJv/Cnwo8/kq4OIS3M9I7QReB0YW\nep3FbCcwCvh45n0j8LLrdy/K/Qx61lzbnAj8BhDgE8Cfou5b4nYeBeyWeX+C086g33+I2jkNuC+f\nfUvZzqztTwEeHYL7OQX4OPC8z/pEn80hnTmo6ouqGhbkdiTwF1V9VVW7gduA0zLrTgN+lnn/M+Bz\nxWlp7PNMB9ap6htFao8fhd6PsrmfqvqOqj6Veb8deBHYp0jtcQh61hxOA36uhj8CTSIyKuK+JWun\nqv5eVd/LfPwjMKZIbQmikHtSVvczi7CyyEVBTWnlLQGbJPpsDgebwz7AW67P6xnoJPZW1Xcy7/+K\nKSxUDOKeJ6cAEnBeZqp3c7HUNURvpwKPiMiTYmp2x92/VO0EQET2BY4A/uT6uhj3M+hZC9smyr5J\nEfdcZ2NGlA5+v3/SRG3nUZnf8jcickjMfZMg8rlEpB44HrjT9XWp7mcYiT6bRY+QFpFHgA96rLpM\nVe9O6jyqqiKSt+tVUDvjnEdEaoBTgW+5vv4JsBDzEC0Efgh8eQjbeYyqbhCRvYCHRWRtZlQSdf9S\ntRMRSWP+iOer6rbM14ndz50dEfk0Rjgc4/o69PcvIU8B41S1NWM7+h9g/yFqSxROAX6nqu4RfDnd\nz8QounBQ1c8UeIgNwFjX5zGZ7wDeFZFRqvpOZvq0Md+TBLVTROKc5wTgKVV913XsHe9F5D+B+4ay\nnaq6IfO6UUSWYaadKyiz+yki1RjB0KKqd7mOndj9zCLoWQvbpjrCvkkRpZ2IyGHAT4ETVHWz833A\n71/ydroEPqr6gIjcJCIjo+xbyna6yNEKlPB+hpHoszkc1Eqrgf1FZL/MqPx04J7MunuAMzPvzwQS\nm4lkEec8OfrITAfoMBPw9DZIgNB2ikiDiDQ674EZrvaUzf0UEQH+C3hRVa/NWles+xn0rLnb/qWM\nZ8gngK0ZFVmUfZMi9FwiMg64C/iiqr7s+j7o9x+Kdn4w81sjIkdi+qTNUfYtZTsz7RsBTMX1vJb4\nfoaR7LNZbAt70IL5Y68HuoB3gYcy348GHnBtdyLGW2UdRh3lfL8H8L/AK8AjwO5FaqfneTza2YB5\nsEdk7f8L4Dng2cyPMmqo2onxWHgms7xQrvcTowbRzD17OrOcWOz76fWsAecA52TeC/DjzPrncHnZ\n+T2nRbqHYe38KfCe696tCfv9h6id52ba8QzGcH5UOd7PzOezgNuy9ivZ/cQMOt8BejD95tnFfDZt\nhLTFYrFYchgOaiWLxWKxlBgrHCwWi8WSgxUOFovFYsnBCgeLxWKx5GCFg8VisVhysMLBYrFYLDlY\n4WCxWCyWHKxwsFgsFksO/x+935zOn2NHSQAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aabb2790>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ruamkVRRCIYlyiJlEBf4kaIBqNu6GH\nmYiZqBMTMEQT14xGQQOJk4lEzZiYgcmIWdplRmVco+gYBbMBOq4RNLgBGgkgS2803f39/jh16dtV\nd626VV0N532e+9Ryt3Nv3TrfOd8qqorFYrFYLG4qBroBFovFYik/rHCwWCwWSw5WOFgsFoslBysc\nLBaLxZKDFQ4Wi8ViycEKB4vFYrHkYIWDxRIREdlPRFREqga6LRZLsbHCwWIpIiJylIgsFpFWEflA\nRL7lWrefiPxWRNpFZIWIfC5r33NE5B0RaROR/xGRPUp/BZZdFSscLJYiISJDgceAecCewEeBRa5N\n7gL+L7PuGuBeEdkrs+9hmf2+DAwD2oHbStZ4yy6PFQ6WQYuIjBCR+0TkbyLyloh807XuehG5V0Tu\nEZGtIvK8iHzStf5jIvK0iGwSkVdF5EzXujoR+VFm1L5ZRJ4VkTrXqb8kIu+KyHoRuSagiZcBj6tq\ni6puU9Wtqvpa5hwHAUcB16lqh6reB7wE/J1zDuAhVV2sqq3ATKBZRBoLvG0WSySscLAMSkSkAngI\neBHYFzgBuEREPu/a7Czgv4E9gDuB/xGRahGpzuy7CNgbuBhoEZGDM/vNBcYCx2T2/TbQ6zruccDB\nmXPOEpGP+TTzU8BGEfm9iKwTkYdEZHRm3WHAm6q61bX9i5nvnfUvOitUdRWwDTgo9OZYLAlghYNl\nsDIe2EtVZ6tql6q+CfwHcLZrm+dU9V5V3Q7cBKQwHfangDTwg8y+TwEPA1MzQucrwLdUda2q9qjq\n71V1m+u4382M9l/EdOCfxJuRwHnAt4DRwFsYVRKZ82/O2n4L0BhxvcVSVKzXhWWw8hFghIhscn1X\nCSxxfV7tvFHVXhFZA4xw1qmqezbwDmYGMhQjRFYFnPuvrvftmI7ciw5goaouAxCR7wLrRWQI0Ars\nlrX9EMCZSYStt1iKihUOlsHKauAtVT0wYJtRzpvMjGAk8J6zTkQqXAJiNPA6sB7oBA7ApdbJk5cA\nd9pj9/tXgTEi0uhSLX0SaHGtd9tIDgBqMm20WIqOVStZBitLga0i8s8ZA3KliBwuIuNd24wVkeZM\nXMIlGJ39H4E/YUb8387YICYBZwB3Z4TF7cBNGYN3pYh8WkRq82jjAmCyiByRsXPMBJ5V1c2q+jrw\nAnCdiKREpBn4OHBfZt8W4AwR+YyINABzgPuzbBQWS9GwwsEyKFHVHuB04AiMLn898HOM6sXhAeCL\nwIcYl9BmVd2uql0YYXBKZr/bgHNVdUVmvyuAl4FlwEbgX8njv5KxZXwHeARYh3FlPce1ydnAuEz7\n/gWYoqp/y+z7KnAhRkisAxqAGXHbYLHki9hiP5adERG5Hvioqv7jQLfFYhmM2JmDxWKxWHKwwsFi\nsVgsOVi1ksVisVhysDMHi8ViseRghYPFYrFYcijrILihQ4fqfvvtN9DNsFgslkHDc889t15V9yr0\nOGUtHPbbbz+WL18+0M2wWCyWQYOIvJPEcaxayWKxWCw5WOFgsVgslhzKWq1ksVgsuzIdHbBwIaxa\nBQccAM3NkEqV5tyRhYOI3I7JZbNOVQ/PfLcHcA+wH/A28A+q+qHHvicDP8akVP65qv4g3wZv376d\nNWvW0NnZme8hdnlSqRQjR46kurp6oJtisVh8WLYMTjoJuruhrQ0aGuAb34BFi2D8+PD9CyVyEJyI\nTMDkmP+lSzj8ENioqj8QkauA3VX1n7P2q8SkGT4RWINJZjZVVf8cds5x48ZptkH6rbfeorGxkT33\n3BMRidR2Sx+qyoYNG9i6dSv777//QDfHYtnl8ZodqMKIEbBpU+72TU3w/vv+MwgReU5VxxXarsg2\nB1VdjMlQ6eYs4BeZ978AvuCx69HAX1T1zUw2zLsz++VFZ2enFQwFICLsueeeduZlsZQBy5YZITB9\nOlx3nXkdPhzmzjUzBi+6u+H++4vftkJtDsNU9f3M+78Cwzy22RdXRS7M7OH/+R1QRKYB0wBGjx7t\nt00+bbVksPfPYhl4OjqM2sg9O2htNa833ADbtnnv19YGb75Z/PYl5q2kRj9VcKImVZ2vquNUddxe\nexUcx5E4mzZt4rbbbstr3zvuuIP33ntvx+f99tuP9evXB+7z9NNPc/rppwPw4IMP8oMf5G2usVgs\nZcTChf6zA1Wo9Skv1dAAY8YUr10OhQqHD0RkOEDmdZ3HNmtxlWvElGpcW+B5B4wg4dDt90tnyBYO\ncTnzzDO56qqr8t7fYrGUD6tWmVmAF9u2QU+P97qqKmOXKDaFCocHgfMy78/DVN7KZhlwoIjsLyI1\nmOpXDxZ43gHjqquuYtWqVRxxxBFceeWVPP3003zmM5/hzDPP5NBDD+Xtt9/m8MMP37H93Llzuf76\n67n33ntZvnw5X/rSlzjiiCPo6OgA4NZbb+Woo47i4x//OCtWrPA7LWCEy0UXXQTA+eefzze/+U2O\nOeYYxowZw7333rtjuxtvvJHx48fziU98guuuu64Id8FisRTKAQeYWYAf2drfmhpjjF60qDTurHFc\nWe8CJgFDRWQNcB3wA+C/ROQC4B3gHzLbjsC4rJ6qqt0ichHwOMaV9fZMCcSCueSSS3jhhReSONQO\njjjiCG655Rbf9T/4wQ945ZVXdpz36aef5vnnn+eVV15h//335+233/bcb8qUKfzkJz9h7ty5jBvX\n50gwdOhQnn/+eW677Tbmzp3Lz3/+88htff/993n22WdZsWIFZ555JlOmTGHRokW88cYbLF26FFXl\nzDPPZPHixUyYMCHycS0WS/FpbjauqX50deV+rqsrbpvcRBYOqjrVZ9UJHtu+B5zq+vwo8Gjs1g0S\njj766LzdQpsz88OxY8dyf0wXhC984QtUVFRw6KGH8sEHHwCwaNEiFi1axJFHHglAa2srb7zxhhUO\nFkuZkUqZWYA7lqGmxt8QDbB5s9k+yJU1KQZ1hHTQCL+UNLjmhlVVVfT29u74HOYyWpuxOlVWVoba\nLPz2BRO/4LxeffXVTJ8+PdaxLIOfgYymteTH+PHw3nvmd3vzTXjxRbjvPmOQ9sNxZT3nnOK2zeZW\nikljYyNbt271XT9s2DDWrVvHhg0b2LZtGw8//HDkfZPg85//PLfffjutGZ+4tWvXsm6dl5+AZWfC\nz19+2bKBbpkljLo609Ffey1Mnhxsh4DSubIO6pnDQLDnnnty7LHHcvjhh3PKKadw2mmn9VtfXV3N\nrFmzOProo9l333055JBDdqw7//zzufDCC6mrq+MPf/hDUdp30kkn8dprr/HpT38agHQ6za9//Wv2\n3nvvopzPMvAE+cuXSgVhSYYwOwSUzpW1rGtIe6XPeO211/jYxz42QC3aebD3cefhzjvNTMERCG7S\naZg3r/gqCEtyLFsGJ55o7AtelF36DIvFUp4E+cuXSgWxs9PRYYTwnDnmtZjZZ8aPN53/7NlGAKRS\nxq01nS5TV1aLxVKeOP7yXjOHUqkgdmYGIjtqXR3MnAlXXNFnrB4zpkxTdlsslvIkSE9dqmjanZWB\ntuc4xuqBYFCqlcrZTjIYsPdv58Lxl29qMqqHgVBB7KwE5T8qVXbUgWLQzRxSqRQbNmywabvzxKnn\nkLI9xk5Ftr98qVUQgwG/OJCw773UdRDfnjPY4lAGnbeSrQRXOLYSnGVnI6zj9bIbVFXBLbfAJZf4\nf9/Z6W98djzBJk8O7/Sd8zvHcwzNxbBbJOWtNOiEg8Visbjx6/idjrejw7+qmoh3NLLf926amuCh\nh+CMM/zPDeb8w4aBV/xrYyOsW5fsDKJsXFlF5GARecG1bBGRS7K2mSQim13bzCr0vBaLxeI2GLe2\nmg69tdV8dkbqYXUT4nwPps5CUxM8+KARDEHnBrjnHm/BAOb7u++Ofr2lpGDhoKorVfUIVT0CGAu0\nAws9Nl3ibKeqsws9r8VisUQxGAfFgeTDGWcYL6XVq6MZqx95JPh4YesHiqQN0icAq1T1nYSPa7FY\nLDm2hRUrwgMAg+JA4pJOGxtDKhU9+DBmPs2yIWnhcDZwl8+6Y0TkJUwVuCuSqulgsViKSym9bILO\n5WVb6O3t8yzKxgkADIoDiWtzcMeNRAk+XLYMHn88+Jqz0rOVD6qayALUAOuBYR7rdgPSmfenAm8E\nHGcasBxYPnr0aLVYLAPH0qWqTU2q6bSqiHltajLfu2lvV21pUZ0927x2dCR3rsWLVRcsUE2lVE2X\nHW1pbOxrh9+x77gj3vfu6+7oMN95nbupSXXjRv/1zpJO53evggCWawJ9emLeSiJyFvANVT0pwrZv\nA+NUdX3QdtZbyWIZOIK8fNzJ38K8hQo9lwhUV+dWRgsjnYa//a1v5uHMSrLjQOJ+7ybo2t94wz8h\nIpjo52eeKV9X1iTVSlPxUSmJyD7AB6qqInI0xhC+IcFzWyyWhIli7J08OZn0EmEeRXEFg4NTFCdI\nXeWXoiJK6orx480xZ8409o9DDoHvf98Iz8ceCzaEX3mlt2Aol2C5RISDiDQAJwLTXd9dCKCq/w5M\nAb4uIt1AB3C2JjVlsVgsRSGKwTWKAImSGyhpjyJ3G4uZOM997NZW+N3v4Pbb4TvfgVGj/G0S6TQc\nfHDw8bzaWkrBkYhwUNU2YM+s7/7d9f4nwE+SOJfFYikNUQyuSaULT9KjyN3GkSOLlzjPKymfU/95\n1iwYMsR/X8ew7e7sR40yUdnuOg7utjoBd+4o62Jmhx2UifcsFkvxaW42nZgXTufmdOpexEkXHnQu\nP1KpvmSDfm2E4iXOC5o1genkRYyQ8EqI+PLL/Uu7fv3r/gV+tm83BYA2beoLruvsNJ9POKE49SWs\ncLBYikgpi8QkTZRsr1EESCHn8sutmUrBz35mRv5PPeXfxtWri1cIKYoqTNXkaZo3zxTvmTfPtPnw\nw3Mju4OejbY2//XFirIedFlZLZbBwkAUiUmSjg7jcXPxxbBhAwwdavTkbj13KmXSSJx8srnOri5z\nndXV8dOFexl3m5thypRgT6igjLRvvFG8QkhRVGFtbbBmDVx7bZ8K6cYb4a9/NbOBqFRVBc9SfvUr\nOP/86MeLgk28Z7EUgahuoOVKVPfUbINsTY3Z7vHH4bjjkjnngw+aGUA+qcg7O2H48GR/B6eTX7kS\n5s6F9nb/bZ3MrQce2P/a4rrmVlcHC5PqatiyxSkpWn6urBaLJUNSXjwDQdTqZ17bdXWZxck/5Nfx\nZnvdnHKK/znPPLP/OZ18SVG8dRx1lZ+giysYsgVYXV3w9lVVMGmSEQ5uIRImGGpr+2ZhVVVm9jZn\njv/2FRXJP1NWOFgsRSApL56BIKpgy1cA+qXB8MM5VvboO6qaLqlCSF7C0OnwUyljNxAxHkvuuhDZ\ngiGMpia4+WajjnLaCvAv/+J/v7u6kn+mrHCwWIpAFDfQciWqYMtHAAbNSvxoazMqnG98I3+X1CRq\nMQcJw6oquPVWo1ZzBNApp5jXMMHg5HFy22oOP7xvZnX//eYeBNkcGhpg990/5O67QxI5xcAKB4ul\nCAQle4vjxTMQRBVsYduNHGk8tNwqoDD3Ty8aGmD9ev/9Ojvhy1820drFDAoLE4aO4dnhzjujXatj\n9m1rg/nzzfsRI/pmSPX14V5R7e2bueii4cC28BNGxAoHi6UA/CJWk9Z1l5Kogi0s2+kll0BPT38V\n0Be/GD8SuqrKeEr57dfZCffea9JVRFEzef1mquGRx3Fng/lEfU+fbqrDbdnS913wMRTYxrHHfpez\nzvo+EydOZHxSrnBJZO8r1jJ27NhCkhNaLEUlSsZSJ1vpnDn5ZysdCKJmY/XbLp32zkJaX++/zlnq\n6nLP2dISvp87I6rfffZqb2OjeQ271o0bTfujnLO9XXXGDNWamniZZEG1qirO9r06aVJPv3aSUFZW\nO3OwWPIgqkdPErrugSCqEddru64u413jRU9PeG1mERPr4I6p+OhHg43WbrKN4c5MYcUK+NGP+tsA\nvGYBXr+jY0TPbnt9vbEzOLPBjg7j3nrDDaa98RMGKt3dSvT4ZOHQQ30iBQvECgeLJQ8Gs6tqVKIK\ntuzt5szxV4Vs22Y609ravjxE2VRUGMHgHNOvY/bDbQzPjsOIQ1DmWTdvvWU8jJxz+W0HJu7BuPsq\n4Nepd2PK40Rj5szIm8YikfQZIvK2iLwsIi+ISE7Umhj+TUT+IiIvichRSZzXYikWYWkvBrOrarEJ\nyrcEpnP0S4sB/e+fe4bmVe3NC0f/7943n4R+UTLPVlTAo496zySzqazs5YgjXuSEE76CsRV4k0pV\nx2rnbbfF2jwySeZW+qyqHqHekXmnAAdmlmnAzxI8r8WSKMuW9U+INn26ibJdtqxvm6QSziVNmFAr\nRa6nqEn0amu9v3ffv3y8mxyjeT77erWj0NTlDj09wrPPPsRrr/2WY4/9T4yAcBYjMO+4QzjvvHhq\nojlz4NlnY+0SjSQMF8DbwNCA9fOAqa7PK4HhYce1BmlLqWlvDy796Bgdw0pEDoThOcyIHNXInFRb\ngsp6ivivF1FdssQcZ/Zs8zmqgXbIEFNWtKVF9fjj4xuDvX7HIGN4Om3WX399t4r0hhqPjzuufcc9\n2rDBGK1POMG8fvih+b6lRbWyMl5b6+t7tWPBXaqzZ+tQeFOT6NcTOQi8BbwAPAdM81j/MHCc6/P/\nYsqEeh3L1pC2DBhROgKHJDvbQmswhwm1oHrG+QizKO0NqvucTqvOnOnf8UfpmLOXVEr16quNR1E+\nXkLZS0ODub6ZM/29lFKpdv3c507X2tppCu2hx5w0KfzednT4n89vqaFTW2r/SVVEj4QeLSPhsG/m\ndW/gRWBC1vrIwsG92JmDpdQEjVRFjEuqmyRcVZMQMmFCbcaM6ELPC7cwmD07t7319arTp/e/B2Gz\nqwULTAcc1KagY3j9PlE70+rq6NuKqNbV9Sr0anX1NoUehS0KGxTG6ZgxX9Tq6jaFsJmD6pQp0X7P\nJUviCQehR+dwjSroWFBNoF9PqhLc2szrOhFZCBwNLHZtshYY5fo8MvOdxVJWxA10KtRVNapLbBhh\nevEVK/I3oId5/DjfzZsHv/51/0C0oEDAxx7zTy3R2mpSZmQHEwYZls24M5h02pz/ssvghz+MZqhW\nhY4OYwfYvr2Lvff+T444op7zz9+NY499lE9+cq/I6befeKKvilsQxx0HixfD5z9vDPg9PcHbN9DG\nGBL2gihUugANQKPr/e+Bk7O2OQ34DcZ361PA0ijHtjMHS6kptS1hwQLV2lrv89XWmvVRKNbMIUhd\nFbS475Xf7CpMZVRT0zcb2bjRvM6aFV/l4iyHHWZG7gsWGP1+PteVTvf0u1ctC7ZpOtUVY//wWVr2\n/Xfu3cyZAfebDdpBbaIzhySEwxiMKulF4FXgmsz3FwIXZt4L8FNgFfAyEVRKaoWDZYDIV80T124Q\nZrQFsz6KeilMqAV1hkFCL47O3700NIR3glFVRg0N4Yb1KGqi2tq+7YcM6dXLLntRa2vbVKRTo6iE\nIEu1uHSpzk59T4WeyPfFSzUZh8WLHZtKrwq9mmaLNrFBlzJux0nKRjgUc7HCwVJMgjrzuLaEuAIl\nzog86oylGN5Kcb2F3Mv06dHaHHUmMGRI7mxk+nSzv9/sK3jZoBUVe+oBB8zWysptkfbZMfLP/IAt\nTNU0WyKfM+7MwYv2xcu0pf6rOqdylrYwdceMwQoHiyUBkvY0ijsqjzMi9+tQ2tuNimTKlD51iaN+\n8RNqcYVeS4u/0Thsqa+PJtSC1CXZy+zZ0e57/8V7VlBXt11vv9000P08RBLUmR+wg1ptYkPk9hes\nmoxw0WVlkLZYBhNJGYEdoqbScGcDfeml6Bk7vQzGy5bBCSeY4vIO995rDK5PPeVvJI9rQG9uhq99\nLfr2bqJWJzvkENPuKMbhG26AK680v0+hQW4dHVWsXWu6wOwcUapw000BGXUzHgAplEV8npN4nG6q\naKWBvrQYfcFs9XRQ01THokVSWEbeQi86BlY4WHY5ks6LFCWCNrv6WU2N6YCikO0l5Qg3t2BwaG2F\nE080BeyTSAueSplaCfPmxd83ahqRoNTfXtx/P5xwwgc88MDfaG09lPBED/4Rx+7fIFtwXnFFQOLB\nAw7YUWhhPMt5j31ZyGTeZAwjWQ3AW+zPeoayF+s5qPZdmm8+idT4s6NfqBdhecArK+nt6YmYojAY\nKxwsuxxJ50WKUvQme6bil3TOi+ziQAsXBqe96OxMNvHfvvvmt191tZmFhbluxnFX7ezs5Vvf+jfW\nr78UmIpJvtAY0hLFT0Dc9CPlylH3kFr9Rk4hh37CoqMD7ncVfBg2rJ8fbh2dnMNd/k3oEliTQE6V\noIcNwn1eY5BkbiWLZVCQdF6koFxCzvdBmoCgjjOdzi0OtGpVsHDYti25xH8dHSbNdT50dcEvfpGb\nl8oLR60zbx4cfzzgm5iujZEjt/PDH/6QxYsvpakpHbE13sfr3tzK/Rcu8k+iBd7Jtk48MfrUD/o/\nWEEJrsKSX0VIXFWRVL+ehOGiWIs1SFuKQZALZX19X46bOAQZuMM8fiZP9ndp9TJgtrQEu8DW1uYa\nsPNNzxHHcB6UssLPENvervrrX/fqpZdu1GnTntZ//Mev6ujRByls9DlOb7/jOPc9OF1Gr+Ljbip0\n74gs9mxsvoEefjcg6EHJp8JSEQ3SAy4AghYrHAaOQnP9lDvO/6uurv9/q76+8PxIcYK9nAA1P3fO\n+vrcjj4sPsDt8um+1nw8s6K6stbUqH7uc/6dtNvjqre3V19//XX9zncWak1Nq4psVSclhciHOmnS\nlXrZZXdpY+N2Tad7Q9vseG35nbuWNl930zSbtYWp/o2NG+jhZBT0utlhbm1DhgQLFq+HzSO7oBUO\nlqJRyuydxSKKcNuwIXrZx0IIC1C7+urg/ua663KPuXSpKW/p1Qlnlyn1O3cqZTrVoOuM2jcOGRKc\nSE+kV88440969tln6/DhwxVSio8LqHPvfd1ufX7cpUtNO5yYh9pa1SF123RJ6nO+7qbuyOJ+HbwT\nqRY30COdNjfVb5Tg98DV1PgHawQFR3j8QFY4WIpCPj77A4WfAIgq3OJkYC0UvzbNnq06dmxwfzNt\nmv/1Z8c5eKmggjr3qirTXzkpsrOJEsXsCKTgc21WmKojRozQqVOn6le+8qTW13fHv/chP26OQPnQ\nXMBSxmkTGzTNZhW6Nc3mnMhizwbMnh1dMDh/EifYJPvBjBPQ4SesIvxAVjhYikIpO8xC8OsjFi+O\nLtziZmAtFKcznzxZ9dBDTcccJbJ38uT8zxl14OuuoZBN9r1OpUy6iuOPN9fT1taj//d//6c33nir\nVldv9Tx+fX2nvvLKX7S3tze0Xb73Pt+RS+YC2lO7awtTdQ7XeEYW5xyrvd1f1eM0tKGh/wN4xx3+\nwmvGjOAfoaoqvz9e1g+UVMpu68pq6cdgKH8ZFMR28skm+MqL7BiGUaPMv88LVeOCmiSvvAIXX9zf\nCzFKPFNlZf7nDPN8dFA1GUA3bMj1nsoOENtvv17GjHmBP/7xaRYufIbLLlvChx9+CMCIEU+yfn0L\nFRXVbNtWnQkeExYtquWwww6I1C5fj7GFC/FNfxoUoDJ+PKxaRd2oUcHupk7KVsc97M47g11Dr73W\nRPA5gRCnnGJe/aIrZ8zwPxb4P7jZvszZZP1Aq2fOfCf4RNGwwsHSj7z+tCUmLIitq8t73UAKt44O\n4/2YTy3j007L/7zNzXDhhdG23b7du3/dvn07L730PGvWPMMf/vAMN974LFu2bAHgox/9KM3NzUyc\nOJGJEycyevToHZHgnsFjrnb5Bb759oWLF+c/cnnsMf/ON5WC00+HyZP7NzYs4Kympv/NuvPO4Adz\n0yYTPOFXDLuiwggo53pywrIDcAVlrJ85c2PwxtEoWDiIyCjgl8AwQIH5qvrjrG0mAQ9gKsYB3K+q\nsws9tyV58vrTlphVq/w72a4u85/1EhDZwm316uDzrFkTr13u9BhZ8VTMnQubN8c7Hpi+4uwCgmod\n3UQUtm83/WtXVxfLly/nmWee4emnn+Z3v/sdbZlO8pBDDmHq1Kk7hMGIESNyjhMlRUd24FtoX9jR\nYYIm/KivDx65BHX027bBJz+Z22hXFHQO6XTu+cKm3UOHmsLZfsKhsxOamuDmm83D5ydZS0QSM4du\n4HJVfV5EGoHnROQJVf1z1nZLVPX0BM5ncRHUIeVD0J/2wQfNyDKpc+XLqFHB66uqvIVDb68pIHPn\nnabtBxzgn9PH67/vR0eH6fxvuMF83rbN3DOn6M3hh/eti0NdncmTVMg9Xrgw+rYi3dx99w+54Ybv\n0ZHpwA477DDOO+88Jk2axIQJExg2bFjfDk7AVp4PRLa6KrAvvOee4Mg/1eCRSz5T4tGj/asReY2U\nDjjANN6r80+l4OCDzQMxaZL/cbu7zejm2mv9r6VEFCwcVPV94P3M+60i8hqwL5AtHCwJk12hq7YW\nLrgAvvOdvuRkbsIEiXv9zTeb7956C9avNx3eySeDiHmu3Z3f+PGlu+YoXH453Hprn3BLpfr6lTlz\n+tr+0EPBkc1hsyRHKHz/+7npMJw+aMIEOPfc6KN3BxH49rcLv7dhmhE3qpXAo3zta19j4sSJTJgw\ngaFDh3pvnJ0syrmpDz5opmQRBUbOLKOjA+70eEgfeSS48Z/6VLBgGj3a/0Z4/dgdHXDGGf4/3MUX\nw4039m/jKaf4C7DOTjj1VDMzuPxy8yB6US6GPUjWWwnYD3gX2C3r+0nARuAlTEW4wwKOMQ1YDiwf\nPXq0v4V+FycscDPbdTOfXP+NjeY1yBVyINxbo3i6OC6NQZXDmpqMh04+MR2OT30UT8TKymjbRXFQ\niRWc2N6uLTOe1XRNZ6Rz1lVt05YZz4b/oGEPn+OCFTdAJughnTIluPFBxZmDAlr83LTCfIDdlYOc\nNgbFMbhrVfrrAAAgAElEQVSjGYvsEggs1yT68yQOYtpDGngOaPZYtxuQzrw/FXgjyjGtK6s/UYKT\n3B55QR6AGzfmnyEgzrOcVNR1nP9WlG3j1jhIKqNClN/OTdT4jS1btujvbrlF22prdb3URa43IHTr\nnJrvhnfocaOGo4wgwh7S+fODz+FXTzWsmpBfybq4wW9NTWYkEsU/N+latFl/rAp4TiP0r2FLIt5K\nIlIN3Ae0qOr9HrOTLa73j4rIbSIyVFXXJ3H+XZEo6gLHu89577fNtdfmnyI+6izYTwuRj1oqjtE8\nzEb4+uvxaxwkmVK/ttZoJHp6go2yQe67J57Yy+23P8Yf/vBbnnnmGf783HO829tLPVAPWfUG0pgM\npblZShtoY0zXCujaFFzYIo6uCqLlQQ9zQauuDjYQnX12rt70lFPMdfjp98Gs83qAo/oAu9u4fn00\nu0Zsa3wAHn+sT8Anox/AnyS8lQT4T+A1Vb3JZ5t9gA9UVUXkaEzWwA2FnruUBOnrkzYKRyHKs+t0\n3KrBHeSKFfH+626iuLcmXVwnzn8r6D6pGrXxaafFE1Bx+0Y3NTXGK8jd3sMPN8/PypWmf9lzT3jj\nDfj4x00bFy4MTtO9eXMrZ//drzm78r+YNnIkhx95JLu9+uqOHdz1BlZyEHO5gnZys5lW0U0zrtGE\nX4cet+OMMoIIuqmtrcZ756mnjD9wZ6cx8tTWGvfPL37RFJ14+GFzw7ZtMwKjt9d8DsLvAY5bZKK1\n1XgjRTVixbLG++Dzx6qEAiJjXBQ69QCOAxRjT3ghs5wKXAhcmNnmIuBV4EXgj8AxUY5dLmqlJBIp\nJk3UtAYtLdESv+VTRD7qLLhYKtYo6qAo9ynuTD6uVsWtwZgxw7u9QTafhoYeNZlFvctdQo9eX/Nd\n7XV2Dk5RGi2VRMyUDZEexEJu6vHHm22c1BRO8ejszIlJPsBeoeFBx5o9u7Qdgs89s+kzSkSSiRST\nJswo6rQhTMX53nvRi7y7O7okMnsWI02F130Kur64Aipu3+g+j1/a6kJsGGm25GYWDVnaSQWnkoia\nsiFK/o8of4aOjnALf5QcKXGW+vrwB9g9AlmwQHW33cKvM64RK198/lhWOJSIoAFNKpVfIsUkaW83\nz0gqZRa/wYrfgMZJBRN1AFZdrXrBBWYEPGtW9CL1A52vKSjnWT4CKjulfm2tuf8zZ/pfa2Oj9726\n9dYNmkp15d3HeWYWLXSJakResCB4RD1kSPRRc9Qkd/X1+U913cfIp3BHUBtLnXysyDMHmz4jhCBV\naFBMTmtradyV6+pg5syQerd4qzi9UsEEkUrBpZfCj3/cl6YiimE5SH3b22vcv4tNUBH7fNKCOPfz\nnnuMa//ataac5gcfmH+oF7p9O/ffrRz72fd55plndiyrVk0Fvhvj7AoI6bRS1bqJRXyeFDHqjmZT\nU2OWfFI2nH8+HHaY0X13dfU3/lZXx2+LiP8NdAjKkZJNfb15rajIvb6mpvjtC2pjtm2l2MbIuHaR\nuCQhYYq1DOaZA5hBRjkTV3deSLxDMYrrxGHjxvi1G8Jcb5cujVkHhm6dxbV6jOnddffdd9ezzjpL\nv/SlR3zTV3sttbTrlMnbtWXGs9pRGzHYImj5zncKV4MExRIMGWJmGGE+zEmVncv+cR07RRJqnqjT\n4FLZHjzOcwR0awL9b8EHKOZSDsIhTF8fRQVZLmR3dkHFWZz/n2PfrK8PFoR+7uJuSlVcJxvn/5N9\n7rq63P+rc48ce6fffzssm7Nn35GpOtYL+vrtt2tPT4+qxrdhNLFROxbcFd8X32upq0tGFRLWsTuq\nJ0f/Nnt27g8e9UY0NEQzktXVJd8ZR4lRKHVRlCwbR1JxDgUfoJhLOQgH1fD6wL6dQRnVP1i82Pyf\nnAFXOh3s7JFKmW2dFPN+qebdi1tv7zXqHgjbQ9D/NFvt7PzODQ3+1+j8t1taotli+3fqLttAXV2/\n0fTSJZ39n7GaTm1ks6bZ5O1VdPzxxvAT1NhIjUqos8pHUHmNpKP+CEuWhHsPeZXQS4KwWcEAG9lI\nKELa2hwiEOSS/Nhj0VWQA8WSJTBxYv82Orp3yY2FAnLtKWFBXzU1fXp7Jy6ns9MsqZRRjX7xi8nU\nioijyg3K11ZRAY8+alz5vVzGvejuVm65ZTXPPvse27Y5ITteKDVsYzvVNNBGFd39bQMdHfD1r+/w\n1x/PBbx35SwWjrmcN9fUMOb95TT/4iy0rY2FTOZNxjCGN2nmfnOMp4ClS/0DvMJ099m1CwolbuwD\nmJudHeji/rMtXgy/+pW3vWD8ePjZz8w99PqB02k46KDCr8uLsBiFwVAUJQpJSJhiLeUycwii2IOE\nQlNOtLcHz8Dr6vqrTxoaoqtzs0fhzozaq7axMxsp9F7FUeUuXRo8uHR7KUVXd3crXKMwVaHDd7tU\n9XadIbeFVx3LXhzvnji6pjgVyWbPLo6LZSH+vUE/fJBbaNJpKPKhXKbILii33ErFWAaDcAh6PvP1\nlnNIwqbV0hLc2Yv0uaQ6eva4ieLcucsWLAjethCbQxxVbpTYgexSwVG0IrWVrfr9cTfpuzfeqkOG\n+AWlqTYN6dWOut3j3cjsi3EegCgdrFd0nTv74IwZxshUTL/77Ac2it6t0ECXgYpCDTq3k82xlELL\nJaSGwpuaQP+bWEdejGUwCAfV4njiJGXTCnMdr6npSz4XdeBXWdnfUO1Oajl5cvC+xx+f/385bsK9\nsNiNpibVLVu69A9/+IOeffYDWlnZFt5vs1E7MFOgpenParp+e842jY2Z61myJNoNDbqYBQvCO9mg\nDrbUnWd20FjYQ5VK+SfNy+ecxRR+2ecM+oPmm+43H2wN6fIlU6I2pwhNe7tZ8skfFJaHzEl7E6Z/\nd1TBYans778/ejK5Cy4w15qtal22zKS3CWKPPcJTyvhdUxxV7ooV/gW3QKms7Obggy9j+PAFmSpn\ntVRUfOC7fZpWqtiesRt0QiuM57esGzKKe+a9wyNP1AAmT9PZZ2euZ1mtf3BFEO6LWb063KffL1Aj\nalKrJP3xs7MYHnaYyYfkVwavsxMuucRsl2/xiriZE5Mg7A/67ruF506KgsdvXOFvCIuFFQ4JEVSi\nNkpSymyidIR+mU4vu8xsN2qUSfLmVyNdBB5/vK/jjdKH1dSYIDivQkInneRf/93htNOC/8tB2VsP\nGNVFQ63Q2pkbXJXdP27YAGSCxbzo6fk5bW1Pc/755+8obPPu67WcNHET3VpBGw3U04ZSwZf5FRNY\n0mcMdlHX08r56Xs5/7+zLsi5IfkUjXZfTBRDr19loigjjAMPTC5drhfjxxsh5JTK8zIeb96c3whq\nIInyBy2F0EoyRXAWSaXsPhn4MSYb4M9V9QdZ6yWz/lSgHThfVZ+Pc46ByHwah7gOClFH/H4RvSNH\n+g8KZ80KbmtlpVkefRSOOy782txce62ZO99xR19xrtNOM69hz2hYPeTAge4J3bwlB1LV+QKwe86+\nVdJNc3MV7e3t/PGPf+Sd37cCZ/ie64orzuXGG7/e77th/3sn79V/i4VtJ+Z6B/nh531SyJ/W3dmH\nRcE2Nfl7HIVlOn31VXPspNLl+uGE8Y8aBRdemFs2D/IbQQ0k+ZQdLQaFpAgOIYmU3ZXAT4ETgTXA\nMhF5UPvXkD4FODCz/D/gZ5nXSCRZC6BYROnMnXK7ADfdFHw9YTULIP++p6fH/OenTOlLGX2TZ7L1\nXPbZB4YNg61b+767914zowjSftTWhtdDDhzobu3gUY5lESftqE3QRsMON9EHO8/gCxO72fu55zig\np4dmpvJbTqCdhpxj1dfBkUfmfs+qVdS1b+Ac7vJvZDbZP64j6fP901ZX9039IDc/uVMPVsS/HqxD\n2Kxj7lwzSvAiic46ewT05pv+D0m+Lp4DNWqMU1SkmBxwgEkRUgwBUajRAvg08Ljr89XA1VnbzAOm\nuj6vBIaHHXvs2LElDzb0I8ylNMhrqbHReChGrdzmEBZ8V2hwrHPOBQuiZy3I55y1tdFsjoHZW+nW\nOVyjindG0a2g7RUV2lFdrb2g7dT6VkBrYqN2LFmW24B8cnE7P4qXy2i+yeG8jJf5GF3zdS8NM3JH\nwevhDYq6zMfFcyA9lcrh/KrG8J31pymbrKzAFIwqyfn8ZeAnWds8DBzn+vy/wLiwY48dOza2y3BS\npSjdRH0G/LaL2kcEXU92n5BvTQGvc06Zkoyg8VuGDIn2OwT+1pnUE34n6fX4LrBugdfIIk5nWllp\ncqf4BXWE5RuJsiQx+gnLVx7nYYxK0IjO70GLe63lNmospaeU+9we92CnzcoqItOAaQCjR4+Opcsv\nhvopisOHat/M9uabzbo1a4zasasLLr442rna2kw1sGwNhdfMPqmEjM69jRvcGofLL482029uhhkz\nevFytqh0VynzwMvs7K6AlmND2IQpAzdzZt8OqZRJsZodTu5FRUWwXs8rcjmVMscVCU7p65CEamf8\nePMDzJkTb79CVCNB+sG6OvPqFfVcDHe+YjMQnlIORTRGQzIG6bWA24lzZOa7uNsAoKrzgfkA48aN\n06h2n6RLUToElWfcts30L9n2A3eE/5w50dWBdXVGDez+3/gJN69SmWH9mRcNDcag/OST8feNgghI\n93a487899cLr1q1j8eLFO9JXb95cCzwOVAMN1NRsp6amgkX6d6Ta4qelrqPT34Ywe7Yxku7wPcW4\njkbR4W7fHu6alU1vrxkt1NWZG1NXF1zfOK4e3k//HpSvPCildb66+6ARXUeHEcgHH1yYi+fOkqKi\nEIpojAYoeOqBETBvAvsDNZhSoIdlbXMa8BvMAO9TwNIoxx47dmzkCPliRaxfcEHw7NtPc+BO0Fao\nPj9olpydUC9fzUV28Rr/xT8q2PPe13drS/1Xd+jaehoadFtDg/5Lc7N+7GMfUzDpq+vr6/XEE0/U\n733ve/rkk7/TO+7o6pupL1mWjA7Na0ml+usIkzLmRFmqq1XHjfN/iFIpo/OLoq4I0n2G/YmSTGmt\nWpr0EeVQQWqgGQxlQjEuqq8Dq4BrMt+5a0gLxqNpFfAyEewNqn0R0lF0/sUoRdneHt7p+mUrdZ7P\nMDV2Iba6oBTYqZTq/PnGEDx9uneOoXTa2+Y5ZYr/ddfSrinaFMLqGmf6Hj70zCu0EfSME0/UG2fP\n1tevv167r7vOu2MqtIZm1CUfaZ7E4uRFD9omKM112D3ykv7FNp6WIudROeRVGmh87kFZCYdiLe70\nGWF2n3wHEkEG7LC8REGLWyB5/S+HDOmfAy2otoKXcItaFzm039iYewPCvIZmcZ0u4Dydwj06pfp+\nXTB/my6Z/6o28aGm2dLP+PuMu2i9a+lNp805wzqsQjvrcd7n971hhXj4FLI41x8mKLw69KgPfymN\np6UQRuXgLTTQ2GI/4eQzkAh7tsLyElVWRq8hnaRwizqYnjIl2E01W+Xj3ICW2X+J7DXUU1+vz/zT\nP2l7VZWni6mXF9GOxS9lqvsHK0TN09BgbkCUqjzZ0jwfD59CloYGkxxvypRwL6fsB7oY0+YkKIUw\nGkhvoXLBFvsJJ85AIspMvKUluOZIfb1/v+McI6prbRzhFnUwnUqZJUrsgHvpGDJMm5q81UXZxex7\nQf8Xb1fSwKWmxujcvda5pWGUCmNhxpqlS80PFVSxKDsYY+NG//a5ryFJITJnTjRh6JVlcFfXv1t2\nQEIpuxNJ0FQuODU45s0zjijz5hkvJS831iiecM3N/jXSnbxEDz1kHD5qTN41Ghr6Mhq8/DKMGAHT\np8N115nX4cONy202jvdRU5NxLBExr17ZEaI6KTjFdlS91zfQzhhyvTpSPW0suuxxmpqU+voehF7S\nbKGJjTnF7AWY5JdUKoiuLn9vH7e3SXNzX0h4TkNTpuDL4sXhN07EPxoYjOvZpZf2/TipVN+P6kV9\nPbzzTvA2qZR5EJcsMe2prfXfFkykteOeF0S2N07QPSpltK5l5yIJCVOspZgpu6POxLOrFrrTVGd7\n+LjXhZWndGooeNlfA2fJ7e3aMuNZTdd0FjxQzZ4FOEsP6J2HHqpDh45SmKqVXKO3BxWsCSsAUV3d\nv5pQ3JFxlCmh342La9COapiePTu3bc4MxMt43N6uOm1a8LkXLIhm8/CaDVj9uyUDtkxoYQTFT1RX\nmxlHZ6d/RUBVMytwx1V0dZnljDNMMJzfzKS93Qwo0+ncOIbAmJpMlF/z9kq+0fUGEDIS3YGSopMq\ntvfLR5Q9C3BoBbrefZd//+hH2XNymtGXnMP+P/4xMt8nzsAv7avDKaeYUfHQobB+Pfzyl/GyjIaV\nZQT/Gxc3UMiZNoZNz5z6qlHa5rRv5Ej/8p0iJnLSmUIGpbn2mg0EtaPcs1ZaypMkJEyxlmLMHJwB\n5syZwerihobggVeYmjdOSopInndZI+Ds1BC1O9xLPdqT6tIFNV/LMRb7NajXuYjsXEGFFLN3jjN9\nevCNSaWSHe3GNWg708Zi6PGDjllb2z+mob3dtN1tOMpnNmBnFLsc2DKh8cn+n9TV9b369RX19f2r\nKzrC5fjjg/uXKVPyyKkU5lebdUC3d9ACztUmNnoef7eKTdoWJgwciRhk3A26UU4x6qCizc4N9bsx\n1dWqX/hCsl4ncV1ho7i0ugPH4iTxihr04u68C/HGKZf8Q5aSYoVDTIL+JzU14fEM6bRZGhvD+5p0\nOlqFxH6D1emrw/1qQ0bAfonmFjMu3JvohBOMK2XQaDlMIjqGlCB3zIaGcA+fJEe3ceMW3J2m36j7\njjvyH41HDUVPovO2Xky7JEkJh13G5hCmeg6rxBgnKV1VlUnXc9hhpkhN99YOWmnA+PbkpohraFDG\n/Oq70B6QGCpCRbDARHNBpNPwla+E56vZYw//HD0NDSZfzjnnmOPcd5/3cdrbYdo0uOce/6RQSRac\n8UpC5XgDOfp/v5xCbj3+ypXGXjJkCMyY0T8nUpz2uo+5cKGpq+qVvCuJ5HE2/5ClAHYZ4RD0P+nq\nCi9WE4wiqDH0NtaxaFEVqRSMP7yD9yrGsJDPspKDmMsVtJPO2buqdzvNstD70C6/Wv3GN3yKXvZR\nRydTuSt0u/4NcBWSDspyGJShz20kDcuWOGGCsdg7HeRDDxW3OliQV0EUQ/KBBxrPAafYjh9R2+sY\nz4OEaBKdd7lUK7MMTpKYfhRrSVKtFDTDjqLpCF569ASeMIbeIcN8swHmqH1S24w2Yvp/+KqMekV0\n0YQJeswxx+inKit1A+hmjLtp7MAzEaPXd/L5ZKtDNm70vxFxcvTEiehLMrq3GMU84rjCxm1vsdU+\nNv/QLgm7ilopKS+8oPoH1dVm8HrGGfmlv07Tyle43aSG7kn3jR6zpis5ap/u1TQ/dDGpd+uhxXuE\nt1WVXyxZQs/RRzPx8stZ9ulP85kNG6h4912T3zso5XM2DQ1w661mmpQ9Wl6yBE4+OXf65ET4OeoW\nZxR+zz39i0h//ON9+ziqnBNO6F9PFMwNfvnlPt/dpEa3xaolG8cVNu5ovNilJv1UaoWm5LbsGhQi\nWYAbgRXAS8BCoMlnu7cx2VhfIIZUO+SQsYl64YUNets3mACz6yY8pfWV0YPM+gWTuUePETxlulIp\n/c6MGbrZJ5CsK53WrX/7W+7F+Lk6DhniX52sqclYyrNH1k884d/G+nrVDz+MdyOd9oXlFlEtfHTb\n3m6uKUqepuz9oswy4rjC5jMaL4Wrqc0/tEsRp48NWgoVDicBVZn3/wr8q892bwND4x6/snJs5P9g\n1P+67//E+ZNm3DX7VEB9WUbTbNJGNnuXnfRSB3R0+HfUmaUH9ILKSp125JHankrp9ro67Q3rJMKi\ncr06HMfNKrsTmjcvuMOLk/Uv3wIb+XaQzn5BieoKjSaO4gpbaIduO29LgpSFcOh3IJgMtPisy0s4\ngLdwyM6RVvDgy6fD88oy6vWdXwf517fe0q4Qv/9e0O3NzX3tCOskonbO7mNFzUzqtWTr0aN2+nFt\nCXE7yKi2APe58pllBM1s3HlQ8ol7sFiKQDkKh4eAf/RZ91ZGpfQcMC3kONOA5WbxFg5gEmzOmhUc\n6Rx5lp9EgZeaGu0ZMkQfmzNHp0+frocccohOBd0SZd8pU6L/8vkYMQu5vpqa/seM2ukX29ga9Zqc\nc+U7y1ANH33YKGRLGZGUcAg1SIvIk8A+HquuUdUHMttcA3QDLT6HOU5V14rI3sATIrJCVRd7baiu\nGtIi49SvXd3dJj9REJE9IQusxbq9ooJ7Uim+unkz22bOpLGxkeOOO46vjxpF+sknTdcTxGc/G/1k\n+fiuF3J92YbRqAbkYhtbo15TVZXJ7TRmTP9EWF743b+wvEXFKF5usQwwocJBVT8XtF5EzgdOB07I\nSC2vY6zNvK4TkYXA0YCncEiStjblzTcjePxHCDALPE9vL498+tPct+eeHNHYyD7HHkvl3/+9kUzP\nPms6kCC+8Q1z/vPO817vdtn661/je/cUcn2PP96/c4va6SfhKRPkqhZ2TalUXxt+85toHkdB3kb5\nJPZLKk7DYhkICpl2ACcDfwb2CtimAWh0vf89cHK04/urlaIsabZoy4Jt4fOwCCkW/GIKekF7vvY1\no9N3VBa1tebzk09G93QRyfUKUs1VWYTlP/LSowVdX1D7HnnEW48et6pSPsbWsHMEXVMq1Zf+WjW6\nx1E+3kblWoXNsstCOdgcgL8AqzH2hBeAf898PwJ4NPN+DPBiZnkVo46KePzChEMTG7VjwV2Rbmjv\nn/6k3bvtpl3V1dqb6fR7QVtBOzBeRb4n8kvMVFsbnKwue5kxo7/bVVCCJnfN4Sg67qh5gpyiFPPn\nh/j9FtHDJqrRPaqQilJNLl8bgc1fZCkzkhIOYo5VnlRVjdO6uuUxtCEKKGlaM/UKTmb8nDPh2mtz\nt1Tlz3/+M88888yOZfMHHzAZGNvQwOHDh7PHQQcxfOJE9t1rL+Sb3/RWYdTWeqd+yIfx4+GNN/rU\nMDU1/sd21FDDh/unfsjGUdP45ft3vg/S0Tc1FV+Pfuedpmye3w8/ezbMnBl8TW46O8198roep5rc\n2Wfnd01Bxy7FvbJYshCR51R1XMEHSkLCFGs58six2tKi+tWvRht417NVZ3Fdn4upa+TW09OjL7zw\ngv74xz/W5uZmHTp0qJEkoCNHjtQvfelLOn/+fF25cqX29vb2F8VBKoywOsNBdYujzkBKrbIY6NFw\nmBoolSqvYDPrrWQpI9gV0mdUVBhb3jnnwLnnmuwO3d39MzyIKA3aShXbWcTnGc/yHeu6pYafrl7N\nU2edxZIlS/jwww8B+MhHPsKpp57KxIkTmTRpEvvvvz8iAYbrIOPqUUfBU0/57xt0XK8LjkoxE6eV\nOptntuF51CgzI/PKVgpGRMQ19Eat2JYP+R7bVmizlDFlLRzcfOYzsH51BwtnPs+bK7oY+dEUjBvL\nmg9qGNO1lslzj6G2u5Xe7dBZUUFXby8nbt3K8quu4oADDmDy5MlMnDiRiRMn8pGPfCR+A/w6gLvv\nDhYOV11l8hlFURX5dYZeFLNwfCmzeXrlRKqshN5e/322bctPQAXWYC2QuMcuVi4oiyUhytrmMG7c\nOF2+PDMTyPozaUMDPSL85qSTmPTAA9DdTQOwHegR4dbTTmPU1KlMmDCBkSNHFq+RH35oikl7deyN\njbBunRnpOkLl/ffhF7+IH3eQSplO0e0OWqxOpFR69I6O3ELcDkG2nHQa5s0bvC6iQddt7RSWAknK\n5hBDjzGAdHSgTqBRayuoIq2tVG3dymn33UdjdzeNmIupBepV+ednn+Wcri5GLlhgDJxxRuVRWbbM\njKKzVUfV1Sab6TnnGPWHiHl/7bWwzz7xMqmCKTBz7rkmWO7cc+Gtt4o7unTUaE1NpiMWMa9NTclm\n8wyKEaiqMqNxv3XFmjWVgiixERbLAFPWaqWtW7dy/fXXU/Vf/8W3Nm2i0WMbX43+pk3G48Wp5PO1\nr8EDD5iRvJeON67+1ysy1mH7dnPO+fOhpaW/uiBOQJrTMff0GAHX1gZLl5r3QTOHJHTZxdTROwTZ\nNrwqxu0s6aZthTbLYCAJq3axFkBFRH86fHhwnEGcpa4u16MkH2+TuPmK6utNIqioxaVTKRNrECXt\ntZvB5DkTxStqZ8xYOtDeYJadGnaFOIcDDzxQly1bRtOjjwb7vRdCU5P5W27e7L3OT/87Zw5cd53Z\nNw7pTJlQETPD8FN3pdNGhfTLX3pft5fefbDpsoNsG0OGwC23wOrVO58nj42NsBSRXcLmMKSx0QiG\nFSsKKfAcTGenfwcdpP911ENxaW01S0WF8c31o63NXHcc9cNg02X72TbSaeOtdPHFRgBPn24602XL\nkj1/R4dR0c2ZUzy7lBelsulYLAVQ1jYHXnrJdAxtbca9sRgEdQhB+t+gBHRR6Okxo/x02t9l9JBD\njI0hqkvpYNRlZ9s2Ro6ESy8tfpbTgXYlLYVNx2IpgPIWDj09fR1D1Dq+cUmljGrIy20yyKffKzCu\nvt4YUqOomtraYK+9jIHVi6oq+P73zYjWb322x04p4xOSxB0jcOedxc9yWi5ptosZd2GxFEhBaiUR\nuV5E1orIC5nlVJ/tThaRlSLyFxG5qpBzJo6T2tkLP5dJRx3x2GNw880myG32bOOdtHhxf3WBHw0N\ncNBBweqF2lq47DLTvtpas1+Q+qG5OVjYDAb3z1LMfgab+s1iGQCSmDncrKpz/VaKSCXwU+BEYA2w\nTEQeVNU/533Gykozq3CorTU2iaCUC3V1Zl0qZV4dt8hbboGLLsrdvrHRuwP2UkdkB6U56oLXX4cb\nb/SOa3A661TKW73w8stG7dTdbWY1tbVm229/G6680lugJVFDYaApxexnMKrfLJYSU5C3kohcD7SG\nCIdPA9er6uczn68GUNV/CTv+OBFd7rWiutoIiNNOg8MOg4MPhkmT4MADvTtiJ/PmWWeZwi9OJxy0\nz7uRRQIAAAqtSURBVJAhprCOu0PNxxsoijDJplCvoyiZSotNvrEWpfDkCcr6Otijry27PGWRlRW4\nHngHeAm4HdjdY5spwM9dn78M/CTK8ceGxQI0NfUVdp8yxb8+sJfv+NKl/sWn/fbJ1z89rq/+YPeD\nLzTWotixGkFZdvMp+GOxlBGUQw1p4GfAHEAzrz8CvlKIsBKRacA0gLFhG3d1Ge+WiorgGIjWVli5\nsu+zY5AMSmPhpV7IVx0R1/A4mNUeSRh7i+3JszOo3yyWIlNwDWkHEfkP4GGPVWuBUa7PIzPf+Z1v\nPjAfjFop8KRxchTNnQunn246niCDpIOXfrtU3kCFnCdMnVPsNNFJ1VQutiePdSW1WIIpZNoBDHe9\nvxS422ObKuBNYH+gBlMu9LAoxw9VK8VdHJVBlJrCXuqFUqkj8j1PmDqmFKk1bE1li2VAISG1UqER\n0j8UkZdF5CXgsxkBgYiMEJFHM8KnG7gIeBx4DfgvVX21wPPmhzNyDYturq/3Vi8UGtkaNSI3n/O4\n1TmZzLW0tprPJ51kUosHrU8qOjjo3pZzrIXFYulHWedW8vVWcqiqihccJ2LiEa64wt8jpr4e1q41\nHbEf+XgD+RW1ufxysz5IBRTlPGEeOHHzNOWLzRtksQwoSXkrlXeEdBBO/p04wqGhwXRON95ogstu\nusnbIBkkGCC+PjzISDtrlhFaXukb4pwnzIgdN09Tvlhjr8WyUzB4hUNVFTz0EJxxRl8nlEqZjtiP\ntjZTha29vW/k/u1vm865mAbJMAO4o+KB/NM3hBmx4+ZpKgRr7LVYBj3lrVaqqtLlNTX9O/z6elNI\nxxlhO6qXlSuNR1LcKmtJl7308gSKk947XxVPmDrnrbdg//2tusdi2cnZNdRKn/iEsQ+sXAnr15tE\ndQcd1H8U6qhe7rzTxDt4UV1t1nkl10sqmVtQls841d/yVfGoGlXZDTeYz9n1ph1jtlX3WCyWCJS3\ncKioSEbnvn27/35J6NvDAr/eess/IV42+ah43IKps9PkYaqtzc3DZNU9FoslIuUtHOIQNDrPNy13\nVMICvx59tP+oPWgGETd7qpdgcq7zppuMcHBj00RbLJYIlHUluFgEpavOJy13HKKku3BG7fPmGRvE\n7NnJVAKz6actFksR2HlmDmEulFA8fXvUdBfZo/YrrihcxTOY8zBZLJayZecQDm4voZtvNt+tWZPb\n4RZL3x5UMjRoZpKEimewVn+zWCxlTXm7so4bp8uXB8ZI51cvoRgMVDtsRLLFYnGxa7iyhlEutYBh\n4DyBbESyxWIpAoNbOCSVHjopBsoTyLqoWiyWhClIOIjIPcDBmY9NwCZVPcJju7eBrUAP0J3ElAew\nxlg31kXVYrEkSEHCQVW/6LwXkR8BmwM2/6yqri/kfDlYY6zFYrEUhUTiHEREgH8A7krieJEJim1I\nIn7BTdRaDBaLxbITkJTN4TPAB6r6hs96BZ4UkR5gnppSoIVTKmNsUN6kUnpEWSwWS4kIFQ4i8iSw\nj8eqa1T1gcz7qQTPGo5T1bUisjfwhIisUNXFPuebBkwDGD16dFjzim+MLSePKIvFYikRocJBVT8X\ntF5EqoBmYGzAMdZmXteJyELgaMBTOGRmFfPBxDmEtQ8orjG23DyiLBaLpQQkYXP4HLBCVdd4rRSR\nBhFpdN4DJwGvJHDe0mA9oiwWyy5IEsLhbLJUSiIyQkQezXwcBjwrIi8CS4FHVPWxBM5bGhyPKC+s\nR5TFYtlJGfzpM4qNTU9hsVgGEUmlz9h5UnYXC8cjKon02haLxTJIGNzpM0qFTU9hsVh2MaxwiIpN\nT2GxWHYhrFrJYrFYLDlY4WCxWCyWHMraW0lEtgIrB7odIQwFkk0oWBxsO5PFtjNZbDuT42BVbSz0\nIOVuc1iZWHrvIiEiy8u9jWDbmTS2ncli25kcIpKI/79VK1ksFoslByscLBaLxZJDuQuHZFJ7F5fB\n0Eaw7Uwa285kse1MjkTaWNYGaYvFYrEMDOU+c7BYLBbLADCgwkFE/l5EXhWRXhHx9QAQkZNFZKWI\n/EVErnJ9v4eIPCEib2Redy9SO0PPIyIHi8gLrmWLiFySWXe9iKx1rTt1oNqZ2e5tEXk505blcfcv\nRTtFZJSI/FZE/px5Rr7lWle0++n3rLnWi4j8W2b9SyJyVNR9kyRCO7+Uad/LIvJ7Efmka53n7z9A\n7ZwkIptdv+WsqPuWuJ1Xutr4ioj0iMgemXUluZ8icruIrBMRz5IHiT+bqjpgC/Ax4GDgaWCczzaV\nwCpgDFADvAgcmln3Q+CqzPurgH8tUjtjnSfT5r8CH8l8vh64ogT3M1I7gbeBoYVeZzHbCQwHjsq8\nbwRed/3uRbmfQc+aa5tTgd8AAnwK+FPUfUvczmOA3TPvT3HaGfT7D1A7JwEP57NvKduZtf0ZwFMD\ncD8nAEcBr/isT/TZHNCZg6q+pqphQW5HA39R1TdVtQu4Gzgrs+4s4BeZ978AvlCclsY+zwnAKlV9\np0jt8aPQ+1E291NV31fV5zPvtwKvAfsWqT0OQc+aw1nAL9XwR6BJRIZH3Ldk7VTV36vqh5mPfwRG\nFqktQRRyT8rqfmYRVha5KKgprbwxYJNEn83BYHPYF1jt+ryGvk5imKq+n3n/V0xhoWIQ9zw5BZCA\nizNTvduLpa4hejsVeFJEnhNTszvu/qVqJwAish9wJPAn19fFuJ9Bz1rYNlH2TYq457oAM6J08Pv9\nkyZqO4/J/Ja/EZHDYu6bBJHPJSL1wMnAfa6vS3U/w0j02Sx6hLSIPAns47HqGlV9IKnzqKqKSN6u\nV0HtjHMeEakBzgSudn39M2AO5iGaA/wI+MoAtvM4VV0rInsDT4jIisyoJOr+pWonIpLG/BEvUdUt\nma8Tu587OyLyWYxwOM71dejvX0KeB0aramvGdvQ/wIED1JYonAH8TlXdI/hyup+JUXThoKqfK/AQ\na4FRrs8jM98BfCAiw1X1/cz0aV2+Jwlqp4jEOc8pwPOq+oHr2Dvei8h/AA8PZDtVdW3mdZ2ILMRM\nOxdTZvdTRKoxgqFFVe93HTux+5lF0LMWtk11hH2TIko7EZFPAD8HTlHVDc73Ab9/ydvpEvio6qMi\ncpuIDI2ybynb6SJHK1DC+xlGos/mYFArLQMOFJH9M6Pys4EHM+seBM7LvD8PSGwmkkWc8+ToIzMd\noMNkwNPbIAFC2ykiDSLS6LwHTnK1p2zup4gI8J/Aa6p6U9a6Yt3PoGfN3fZzM54hnwI2Z1RkUfZN\nitBzicho4H7gy6r6uuv7oN9/INq5T+a3RkSOxvRJG6LsW8p2Zto3BJiI63kt8f0MI9lns9gW9qAF\n88deA2wDPgAez3w/AnjUtd2pGG+VVRh1lPP9nsD/Am8ATwJ7FKmdnufxaGcD5sEekrX/r4CXgZcy\nP8rwgWonxmPhxczyarneT4waRDP37IXMcmqx76fXswZcCFyYeS/ATzPrX8blZef3nBbpHoa18+fA\nh657tzzs9x+gdl6UaceLGMP5MeV4PzOfzwfuztqvZPcTM+h8H9iO6TcvKOazaSOkLRaLxZLDYFAr\nWSwWi6XEWOFgsVgslhyscLBYLBZLDlY4WCwWiyUHKxwsFovFkoMVDhaLxWLJwQoHi8ViseRghYPF\nYrFYcvj/hyKvsgKKKHUAAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68ac190b90>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ru6tAqLiAQxbhETaIB/CVqgGhccA09\nzETMRJ04AYOauGY0ChqYOJlI1IyJGZiMmKWNzqiMS4wSxyiYDdBxjaBBVEAjYac3mu7+fn+cuvTt\nqrtW3aquhvM+z31qudu5t26d75xvFVXFYrFYLBY3FQPdAIvFYrGUH1Y4WCwWiyUHKxwsFovFkoMV\nDhaLxWLJwQoHi8ViseRghYPFYrFYcrDCwWKJiIgcJCIqIlUD3RaLpdhY4WCxFAkR+ZWItLqWLhF5\nxbX+IBH5jYi0i8gKEflc1v7ni8g7ItImIv8jInuX/ioseypWOFgsRUJVJ6lq2lmA3wH/7drkF8D/\nAfsANwAPiMi+ACJyFDAP+BKwP9AO3FXK9lv2bKxwsAxaRGS4iDwoIn8VkdUi8jXXuptF5AERuV9E\ntovICyLyCdf6j4rIMyKyRUReE5FzXOvqROR7mVH7VhF5TkTqXKf+ooi8KyIbROSGiG09CPgM8NPM\n58OATwI3qWqHqj4IvAz8jXMO4FFVXayqrcBMoFlEGvO4VRZLbKxwsAxKRKQCeBR4CTgQOBm4QkRO\nc212LmakvjdwL/A/IlItItWZfRcB+wGXAy0icnhmv7nAGOD4zL7fAHpdxz0RODxzzlki8tEITb4A\nWKKqb2c+HwW8parbXdu8lPneWf+Ss0JVVwE7gMMinMtiKRgrHCyDlXHAvqo6W1W7VPUt4D+A81zb\nPK+qD6jqTuA2IAV8KrOkge9k9n0aeAyYmhE6Xwa+rqrrVLVHVX+nqjtcx/1WZrT/EqYD/wThXADc\n4/qcBrZmbbMNaIy43mIpKtbrwjJY+TAwXES2uL6rBJa4Pq9x3qhqr4isBYY761TVPRt4BzMDGYoR\nIqsCzv0X1/t2TEfui4icCBwAPOD6uhX4UNamQ4DtEddbLEXFCgfLYGUNsFpVDw3YZqTzJjMjGAG8\n56wTkQqXgBgFvAFsADqBQ3CpdQrkQuChjO3A4TVgtIg0ulRLnwBaXOvdNpJDgJpMGy2WomPVSpbB\nylJgu4j8U8aAXCkiR4vIONc2Y0SkOROXcAVGZ/8H4I+YEf83MjaIicDZwH0ZYXE3cFvG4F0pIp8W\nkdp8GpkxZP8d/VVKqOobwIvATSKSEpFm4GPAg5lNWoCzReQzItIAzMEIGDtzsJQEKxwsgxJV7QHO\nAo4BVmNG/D/GqF4cHga+AGzGuIQ2q+pOVe3CCINJmf3uAi5Q1RWZ/a4BXgGWAZuAfyX//8rngS3A\nbzzWnQeMzbTvX4ApqvrXzPW9BlyCERLrgQZgRp5tsFhiI7bYj2V3RERuBj6iqn8/0G2xWAYjduZg\nsVgslhyscLBYLBZLDlatZLFYLJYc7MzBYrFYLDlY4WCxWCyWHMo6CG7o0KF60EEHDXQzLBaLZdDw\n/PPPb1DVfQs9TlkLh4MOOojly5cPdDMsFotl0CAi7yRxHKtWslgsFksOVjhYLBaLJYeyVitZLBbL\nnkxHByxcCKtWwSGHQHMzpFKlOXdk4SAid2Ny2axX1aMz3+0N3A8cBLwN/J2qbvbY93Tg+5iUyj9W\n1e/k2+CdO3eydu1aOjs78z3EHk8qlWLEiBFUV1cPdFMsFosPy5bBqadCdze0tUFDA1x6KSxaBOPG\nhe9fKJGD4ERkPCbH/E9dwuG7wCZV/Y6IXAfspar/lLVfJSbN8CnAWkwys6mq+qewc44dO1azDdKr\nV6+msbGRffbZBxGJ1HZLH6rKxo0b2b59OwcffPBAN8di2ePxmh2owvDhsGVL7vZNTfD++/4zCBF5\nXlXHFtquyDYHVV2MyVDp5lzgJ5n3P8FkoMzmOODPqvpWJhvmfZn98qKzs9MKhgIQEfbZZx8787JY\nyoBly4wQmD4dbrrJvA4bBnPnmhmDF93d8NBDxW9boTaH/VX1/cz7vwD7e2xzIK6KXJjZw//zO6CI\nTAOmAYwaNcpvm3zaaslg75/FMvB0dBi1kXt20JopB3XLLbBjh/d+bW3w1lvFb19i3kpq9FMFJ2pS\n1fmqOlZVx+67b8FxHImzZcsW7rrrrrz2veeee3jvvfd2fT7ooIPYsGFD4D7PPPMMZ511FgCPPPII\n3/lO3uYai8VSRixc6D87UIVan/JSDQ0wenTx2uVQqHD4QESGAWRe13tssw5XuUZMqcZ1BZ53wAgS\nDt1+v3SGbOEQl3POOYfrrrsu7/0tFkv5sGqVmQV4sWMH9PR4r6uqMnaJYlOocHgEUx+XzOvDHtss\nAw4VkYNFpAZT/eqRAs87YFx33XWsWrWKY445hmuvvZZnnnmGz3zmM5xzzjkceeSRvP322xx99NG7\ntp87dy4333wzDzzwAMuXL+eLX/wixxxzDB0dHQDceeedfPKTn+RjH/sYK1as8DstYITLZZddBsBF\nF13E1772NY4//nhGjx7NAw/01a6/9dZbGTduHB//+Me56aabinAXLBZLoRxyiJkF+JGt/a2pMcbo\nRYtK484ax5X1F8BEYKiIrAVuAr4D/JeIXAy8g6mVi4gMx7isnqGq3SJyGfAkxpX17kwJxIK54oor\nePHFF5M41C6OOeYY7rjjDt/13/nOd3j11Vd3nfeZZ57hhRde4NVXX+Xggw/m7bff9txvypQp/OAH\nP2Du3LmMHdvnSDB06FBeeOEF7rrrLubOncuPf/zjyG19//33ee6551ixYgXnnHMOU6ZMYdGiRbz5\n5pssXboUVeWcc85h8eLFjB8/PvJxLRZL8WluNq6pfnR15X6uqytum9xEFg6qOtVn1cke274HnOH6\n/DjweOzWDRKOO+64vN1CmzPzwzFjxvBQTBeEz3/+81RUVHDkkUfywQcfALBo0SIWLVrEscceC0Br\naytvvvmmFQ4WS5mRSplZgDuWoabG3xANsHWr2T7IlTUpBnWEdNAIv5Q0uOaGVVVV9Pb27voc5jJa\nm7E6VVZWhtos/PYFE7/gvF5//fVMnz491rEsg5+BjKa15Me4cfDee+Z3e+steOklePBBY5D2w3Fl\nPf/84rbN5laKSWNjI9u3b/ddv//++7N+/Xo2btzIjh07eOyxxyLvmwSnnXYad999N60Zn7h169ax\nfr2Xn4Bld8LPX37ZsoFumSWMujrT0d94I0yeHGyHgNK5sg7qmcNAsM8++3DCCSdw9NFHM2nSJM48\n88x+66urq5k1axbHHXccBx54IEccccSudRdddBGXXHIJdXV1/P73vy9K+0499VRef/11Pv3pTwOQ\nTqf5+c9/zn777VeU81kGniB/+VKpICzJEGaHgNK5spZ1DWmv9Bmvv/46H/3oRweoRbsP9j7uPtx7\nr5kpOALBTToN8+YVXwVhSY5ly+CUU4x9wYuyS59hsVjKkyB/+VKpIHZ3OjqMEJ4zx7wWM/vMuHGm\n85892wiAVMq4tabTZerKarFYyhPHX95r5lAqFcTuzEBkR62rg5kz4Zpr+ozVo0eXacpui8VSngTp\nqUsVTbu7MtD2HMdYPRAMSrVSOdtJBgP2/u1eOP7yTU1G9TAQKojdlaD8R6XKjjpQDLqZQyqVYuPG\njTZtd5449RxStsfYrcj2ly+1CmIw4BcHEva9l7oO4ttzBlscyqDzVrKV4ArHVoKz7G6EdbxedoOq\nKrjjDrjiCv/vOzv9jc+OJ9jkyeGdvnN+53iOobkYdoukvJUGnXCwWCwWN34dv9PxdnT4V1UT8Y5G\n9vveTVMTPPoonH22/7nBnH///cEr/rWxEdavT3YGUTaurCJyuIi86Fq2icgVWdtMFJGtrm1mFXpe\ni8VicRuMW1tNh97aaj47I/WwuglxvgdTZ6GpCR55xAiGoHMD3H+/t2AA8/1990W/3lJSsHBQ1ZWq\neoyqHgOMAdqBhR6bLnG2U9XZhZ7XYrFYohiMg+JA8uHss42X0po10YzVv/xl8PHC1g8USRukTwZW\nqeo7CR/XYrFYcmwLK1aEBwAGxYHEJZ02NoZUKnrwYcx8mmVD0sLhPOAXPuuOF5GXMVXgrkmqpoPF\nYikupfSyCTqXl22ht7fPsygbJwAwKA4krs3BHTcSJfhw2TJ48snga85Kz1Y+qGoiC1ADbAD291j3\nISCdeX8G8GbAcaYBy4Hlo0aNUovFMnAsXara1KSaTquKmNemJvO9m/Z21ZYW1dmzzWtHR3LnWrxY\ndcEC1VRK1XTZ0ZbGxr52+B37nnvife++7o4O853XuZuaVDdt8l/vLOl0fvcqCGC5JtCnJ+atJCLn\nApeq6qkRtn0bGKuqG4K2s95KFsvAEeTl407+FuYtVOi5RKC6OrcyWhjpNPz1r30zD2dWkh0HEvd7\nN0HX/uab/gkRwUQ/P/ts+bqyJqlWmoqPSklEDgA+UFUVkeMwhvCNCZ7bYrEkTBRj7+TJyaSXCPMo\niisYHJyiOEHqKr8UFVFSV4wbZ445c6axfxxxBHz720Z4PvFEsCH82mu9BUO5BMslIhxEpAE4BZju\n+u4SAFX9d2AK8FUR6QY6gPM0qSmLxWIpClEMrlEESJTcQEl7FLnbWMzEee5jt7bCb38Ld98N3/wm\njBzpb5NIp+Hww4OP59XWUgqORISDqrYB+2R99++u9z8AfpDEuSwWS2mIYnBNKl14kh5F7jaOGFG8\nxHleSfmc+s+zZsGQIf77OoZtd2c/cqSJynbXcXC31Qm4c0dZFzM77KBMvGexWIpPc7PpxLxwOjen\nU/ciTrrwoHP5kUr1JRv0ayMUL3Fe0KwJTCcvYoSEV0LEV17pX9r1q1/1L/Czc6cpALRlS19wXWen\n+XzyycWpL2GFg8VSREpZJCZpomR7jSJACjmXX27NVAp+9CMz8n/6af82rllTvEJIUVRhqiZP07x5\npnjPvHmmzUcfnRvZHfRstLX5ry9WlPWgy8pqsQwWBqJITJJ0dBiPm8svh40bYehQoyd367lTKZNG\n4vTTzXV2dZnrrK6Ony7cy7jb3AxTpgR7QgVlpH3zzeIVQoqiCmtrg7Vr4cYb+1RIt94Kf/mLmQ1E\npaoqeJbys5/BRRdFP14UbOI9i6UIRHUDLVeiuqdmG2Rrasx2Tz4JJ56YzDkfecTMAPJJRd7ZCcOG\nJfs7OJ38ypUwdy60t/tv62RuPfTQ/tcW1zW3ujpYmFRXw7ZtTknR8nNltVgsGZLy4hkIolY/89qu\nq8ssTv4hv4432+tm0iT/c55zTv9zOvmSonjrOOoqP0EXVzBkC7C6uuDtq6pg4kQjHNxCJEww1Nb2\nzcKqqszsbc4c/+0rKpJ/pqxwsFiKQFJePANBVMGWrwD0S4Phh3Os7NF3VDVdUoWQvISh0+GnUsZu\nIGI8ltx1IbIFQxhNTXD77UYd5bQV4F/+xf9+d3Ul/0xZ4WCxFIEobqDlSlTBlo8ADJqV+NHWZlQ4\nl16av0tqErWYg4RhVRXceadRqzkCaNIk8xomGJw8Tm5bzdFH982sHnrI3IMgm0NDA+y112buuy8k\nkVMMrHCwWIpAULK3OF48A0FUwRa23YgRxkPLrQIKc//0oqEBNmzw36+zE770JROtXcygsDBh6Bie\nHe69N9q1OmbftjaYP9+8Hz68b4ZUXx/uFdXevpXLLhsG7Ag/YUSscLBYCsAvYjVpXXcpiSrYwrKd\nXnEF9PT0VwF94QvxI6GrqoynlN9+nZ3wwAMmXUUUNZPXb6YaHnkcdzaYT9T39OmmOty2bX3fBR9D\ngR2ccMK3OPfcbzNhwgTGJeUKl0T2vmItY8aMKSQ5ocVSVKJkLHWylc6Zk3+20oEgajZWv+3Sae8s\npPX1/uucpa4u95wtLeH7uTOi+t1nr/Y2NprXsGvdtMm0P8o529tVZ8xQramJl0kWVKuq4mzfqxMn\n9vRrJwllZbUzB4slD6J69CSh6x4Iohpxvbbr6jLeNV709ITXZhYxsQ7umIqPfCTYaO0m2xjuzBRW\nrIDvfa+/DcBrFuD1OzpG9Oy219cbO4MzG+zoMO6tt9xi2hs/YaDS3a1Ej08WjjzSJ1KwQKxwsFjy\nYDC7qkYlqmDL3m7OHH9VyI4dpjOtre3LQ5RNRYURDM4x/TpmP9zG8Ow4jDgEZZ51s3q18TByzuW3\nHZi4B+Puq4Bfp96NKY8TjZkzI28ai0TSZ4jI2yLyioi8KCI5UWti+DcR+bOIvCwin0zivBZLsQhL\nezGYXVWLTVC+JTCdo19aDOh//9wzNK9qb144+n/3vvkk9IuSebaiAh5/3HsmmU1lZS/HHPMSJ5/8\nZYytwJtUqjpGK5X/+FGMUOsYJJlb6bOqeox6R+ZNAg7NLNOAHyV4XoslUZYt658Qbfp0E2W7bFnf\nNkklnEuaMKFWilxPUZPo1dZ6f+++f/l4NzlG83z29WpHoanLHXp6hOeee5TXX/8NJ5zwnxgB4SxG\nYN5zj3DhhXHURMIts7vpfK4ImSSSMFwAbwNDA9bPA6a6Pq8EhoUd1xqkLaWmvT249KNjdAwrETkQ\nhucwI3JUI3NSbQkq6yniv15EdckSc5zZs83nqAbaIUNMWdGWFtWTTopvDPb6HYOM4em0WX/zzd0q\n0htqPD7xxPZd92jjRmO0Pvlk87p5s/m+pUW1sjJ6O2tp05a6i00t1dmzdSi8pUn064kcBFYDLwLP\nA9M81j8GnOj6/L+YMqFex7I1pC0DRpSOwCHJzrbQGsxhQi2onnE+wixKe4PqPqfTqjNn+nf8UTrm\n7CWVUr3+euNRlI+XUPbS0GCub+ZMfy+lVKpdP/e5s7S2dppCe+gxJ04Mv7cdHf7n8166dQ43mBsg\nosdCj5aRcDgw87of8BIwPmt9ZOHgXuzMwVJqgkaqIsYl1U0SrqpJCJkwoTZjRnSh54VbGMyendve\n+nrV6dP734Ow2dWCBaYDDmpT0DG8fp+oHWp1dfRtRVTr6noVerW6eodCj8I2hY0KY3X06C9odXWb\nQtjMQXXKlGi/55Il0duXZqu2MHXXF2NAtVyEQ78Dws3ANVnfWbWSZVAQZ+aQBFHVWGGECbWTToon\n9Ny4hVeU0XZUVVaYymjWrPjnD+1IM+efPTvf423X/fa7S0899R69996H9J131kcWXmDUXlF/08WL\nTcxHn4rJW/g0sVE7qN31RdkIB6ABaHS9/x1wetY2ZwK/wvhufQpYGuXYVjhYSk2pbQkLFqjW1nqf\nr7bWrI9CsWYOQcIraHHfK7/ZVZjKqKambzayaZN5nTUrrsqlbznqKDNyX7DA6Pfzua50uqffvWpZ\nsEPTqa5YwinOAMN972bP6tIhbNY0W1Xo1jRbtYmNupSx/U5STsJhdEaV9BLwGnBD5vtLgEsy7wX4\nIbAKeCWKSkmtcLAMEPmqeeLaDcKMtmDWR1EvhQm1oM4wSOjF0fm7l4aG8E4wqsooymwkipqotrZv\n+yFDevWqq17S2to2FenUKCohyJplLV2qs1P/rEJP5PsSNksLo33xMm2p/0edU/MtbWFqvxlD2QmH\nYi5WOFiKSVBnHteWEFegxBmRR52xFMNbKa63kHuZPj1am6POBNwqGef3mT7d7O83+wpeNmpFxT56\nyCGztbJyR6R9do38Mz9gC1M1zbbI50xENbl4sbloH5cmKxwslgJI2tMo7qg8zojcr0NpbzcqkilT\n+tQljvrFT6jFFXotLf5G47Clvj6aUJs5M/oxZ8+Odt/7L96zgrq6nXr33aaBUe0au37PzA/YQa02\nsTFy+wtWTUa46KSEg02fYdnjiJoXKSpRU2m4s4G+/HL0jJ1eEdfLlsHJJ5vi8g4PPGDSMzz9tH/a\ni7i5npqb4Stfib69m6jVyY44wrQ7ShTzLbfAtdea36fQILeOjirWrTNdYHaOKFW47baAjLqZyLgU\nyiJO41SepJsqWmmgLy1GXzBbPR3UNNWxaJEUlpG30IuOgRUOlj2OpPMiRYmgza5+VlNjOqAoZEdc\nO8LNLRgcWlvhlFNMAfsk0oKnUqZWwrx58feNmkYkKPW3Fw89BCef/AEPP/xXWluPJDzRg3/Esfs3\nyBac11wTkHjwkEN2FVoYx3Le40AWMpm3GM0I1gCwmoPZwFD2ZQOH1b5L8+2nkhp3XvQL9SIsD3hl\nJb09PRFTFAZjhYNljyPpvEhRit5kz1T8ks55kV0caOHC4LQXnZ3JJv478MD89quuNrOwzs54dZ6D\nZhCdnb18/ev/xoYNVwJTMV7yjSEtUfwExG3fU64deT+pNW/mFHLoJyw6OuAhV8GH/ffvl961jk7O\n5xf+TegSWJtATpWghw1M2tuESDK3ksUyKEg6L1JQLiHn+yBNQFDHmU7nFgdatSpYOOzYkVziv44O\nk+Y6H7q64Cc/yc1L5YWj1pk3D046CfBNTNfGiBE7+e53v8vixVfS1JSO2Brv43VvbeWhSxb5J9EC\n72Rbp5wSfeoH/R+soARXYcmvIiSuqkiqX0/CcFGsxRqkLcUgyIWyvr4vx00cCgn2mjzZ36XVy4DZ\n0hLsAltbm2vAzjc9RxzDeVDKCj9DbHu76s9/3qtXXrlJp017Rv/+7/9RR406TGGTz3F6+x3Hue/B\n6TJ6FR93U3FST/g1Nt9AD78bEPSg5FNhqYgG6QEXAEGLFQ4DR6G5fsod5/9VV9f/v1VfX3h+pDjB\nXk6Amp87Z319bkcfFh+QHYVbiGdWVFfWmhrVz33Ov5N2e1z19vbqG2+8od/85kKtqWlVke3qpKQQ\n2awTJ16rV131C21s3KnpdG9omx2vLb9z19Lm626anXoip7FxAz2cjIJeNzvMrW3IkGDB4vWweWQX\ntMLBUjRKmb2zWEQRbhs3Ri/7WAhhAWrXXx/c39x0U+4xly415S29OuHsMqV+506lTKcadJ1R+8Yh\nQ4IT6Yn06tln/1HPO+88HTZsmEJK8XEBde69r9utz4+7dKlphxPzUFurOqRuhy5Jfc7X3TQ79cSu\nDt6JVIsb6JFOm5vqN0rwe+BqavyDNYKCIzx+ICscLEUhqVw/pcBPAEQVbqXMo+TXptmzVceMCe5v\npk3zv/7sOAcvFVRQ515VZforJ0V2NlGimB2BFHyurQpTdfjw4Tp16lT98pef0vr67vj3PuTHzREo\nm80FLGWsNrExNPVETgNmz44uGJw/iRNskv1gxgno8BNWEX4gKxwsRaHUiefyxa+PWLw4unCLm4G1\nUJzOfPJk1SOPNB1zlMjeyZPzP2fUga+7hkI22fc6lTLpKk46yVxPW1uP/t///Z/eeuudWl293fP4\n9fWd+uqrf9be3t7Qdvne+3xHLpkLaE/tpS1M1Tnc4Jt6ot+x2tv9VT1OQxsa+j+A99zjL7xmzAj+\nEaqq8vvjZf1ASaXstq6sln4MhvKXQUFsp59ugq+8yI5hGDnS/Pu8UDUuqEny6qtw+eX9vRCjxDNV\nVuZ/zjDPRwdVOO002Lgx13sqO0DsoIN6GT36Rf7wh2dYuPBZrrpqCZs3bwZg+PCn2LChhYqKanbs\nqM4EjwmLFtVy1FGHRGqXr8fYwoWw06ckZlCAyrhxsGoVdSNHBrubptP9I93uvTfYNfTGG00EnxMI\nMWmSefWLrpwxw/9Y4P/gZvsyZ5P1A62ZOfOd4BNFwwoHSz/y+tOWmLAgtq4u73UDKdw6Ooz3Yz61\njM88M//zNjfDJZdE23bnTu/+defOnbz88gusXfssv//9s9x663Ns27YNgI985CM0NzczYcIEJkyY\nwKhRo3ZFgnsGj7na5Rf45tsXLl6c/8jliSf8O99UCs46CyZP7t/YsICzmpr+N+vee4MfzC1bTPCE\nXzHsigojoJzryQnLDsAVlLFh5sxNwRtHo2DhICIjgZ8C+wMKzFfV72dtMxF4GFMxDuAhVZ1d6Lkt\nyZPXn7bErFrl38l2dZn/rJeAyBZua9YEn2ft2njtcqfHyIqnYu5c2Lo13vHA9BXnFRBU6+gmorBz\np+lfu7q6WL58Oc8++yzPPPMMv/3tb2nLdJJHHHEEU6dO3SUMhg8fnnOcKCk6sgPfQvvCjg4TNOFH\nfX3wyCWoo9+xAz7xidxGu6Kgc0inc88XNu0eOtQUzvYTDp2d0NQEt99uHj4/yVoikpg5dANXq+oL\nItIIPC8iv1bVP2Vtt0RVz0rgfBYXQR1SPgT9aR95xIwskzpXvowcGby+qspbOPT2wsqVZoDX3Gyu\nwS+nj9d/34+ODtP533KL+bxjh7lnl15q7uXRR/eti0NdncmTVMg9Xrgw+rYi3dx333e55ZZ/piPT\ngR111FFceOGFTJw4kfHjx7P//vv37eAEbOX5QGSrqwL7wvvvD478Uw0eueQzJR41ql8UdD+8RkqH\nHGIa79X5p1Jw+OHmgZg40f+43d1mdHPjjf7XUiIKFg6q+j7wfub9dhF5HTgQyBYOloRx5+tpbTWD\nkosvhm9+sy85mZswQeJef/vt5rvVq2HDBtPhnX46iJjn2t35jRtXumuOwtVXw5139gm3VKqvX5kz\np6/tjz4aHNkcNktyhMK3v52bDsPpg8aPhwsuiD56dxCBb3yj8Hsbphlxo1oJPM5XvvIVJkyYwPjx\n4xk6dKj3xtnJopyb+sgjZkoWUWDkzDI6OuBej4f0l78MbvynPhUsmEaN8r8RXj92Rwecfbb/D3f5\n5XDrrf3bOGmSvwDr7IQzzjAzg6uvNg+iF+Vi2INkvZWAg4B3gQ9lfT8R2AS8jKkId1TAMaYBy4Hl\no0aN8rfQ7+GEBW5mu27mk+u/sdG8BrlCDoR7axRPF8elMahyWFOT8dDJJ6bD8amP4onok3Y/cAlK\n0x05OLG9XVtmPKfpms5I56yr2qEtM54L/0HDHj7HBStugEzQQzplSnDjg4ozBwW0+LlphfkAuysH\nOW0MimNwRzMW2SUQWK5J9OdJHMS0hzTwPNDsse5DQDrz/gzgzSjHtK6s/kQJTnJ75AV5AG7alH+G\ngDjPclJR13H+W1G2jVvjIKmMClF+OzdR4ze2bdumv73jDm2rrdUNUhe53oDQrXNqvhXeoceNGo4y\nggh7SOfUBCOlAAAgAElEQVTPDz6HXz3VsGpCfiXr4ga/NTWZkUgU/9yka9Fm/bEq4HmN0L+GLYl4\nK4lINfAg0KKqD3nMTra53j8uIneJyFBV3ZDE+fdEoqgLHO8+573fNjfemH+K+KizYD8tRD5qqThG\n8zAb4RtvxK9xkGRK/dpao5Ho6Qk2yga5755ySi933/0Ev//9b3j22Wf50/PP825vL/VAPWTVG0hj\nMpTmZiltoI3RXSuga0twYYs4uiqIlgc9zAWtujrYQHTeebl600mTzHX46ffBrPN6gKP6ALvbuGFD\nNLtGbGt8AB5/rI/DJ6IfwJ8kvJUE+E/gdVW9zWebA4APVFVF5DhM1sCNhZ67lATp65M2CkchyrPr\ndNyqwR3kihXx/utuori3Jl1cJ85/K+g+qRq18ZlnxhNQcftGNzU1xivI3d6jjzbPz8qVpn/ZZx94\n80342MdMGxcuDE7TvXVrK+f9zc85r/K/mDZiBEcfeywfeu21XTu46w2s5DDmcg3t5GYzraKbZlyj\nCb8OPW7HGWUEEXRTW1uN987TTxt/4M5OY+SprTXun1/4gik68dhj5obt2GEERm+v+RyE3wMct8hE\na6vxRopqxIpljffB549VCQVExrgodOoBnAgoxp7wYmY5A7gEuCSzzWXAa8BLwB+A46Mcu1zUSkkk\nUkyaqGkNWlqiJX7Lp4h81FlwsVSsUdRBUe5T3Jl8XK2KW4MxY4Z3e4NsPg0NPWoyi3qXu4Qevbnm\nW9rr7BycojRaKomYKRsiPYiF3NSTTjLbOKkpnOLR2ZkTk3yAvULDg441e3ZpOwSfe2bTZ5SIJBMp\nJk2YUdRpQ5iK8733ohd5d3d0SWT2LEaaCq/7FHR9cQVU3L7RfR6/tNWF2DDSbMvNLBqytJMKTiUR\nNWVDlPwfUf4MHR3hFv4oOVLiLPX14Q+wewSyYIHqhz4Ufp1xjVj54vPHssKhRAQNaFKp/BIpJkl7\nu3lGUimz+A1W/AY0TiqYqAOw6mrViy82I+BZs6IXqR/ofE1BOc/yEVDZKfVra839nznT/1obG73v\n1Z13btRUqivvPs4zs2ihS1Qj8oIFwSPqIUOij5qjJrmrr89/qus+Rj6FO4LaWOrkY0WeOdj0GSEE\nqUKDYnJaW0vjrlxXBzNnhtS7xVvF6ZUKJohUCq68Er7//b40FVEMy0Hq295e4/5dbIKK2OeTFsS5\nn/ffb1z7160z5TQ/+MD8Q73QnTt56D7lhM++z7PPPrtrWbVqKvCtGGdXQEinlarWLSziNFLEqDua\nTU2NWfJJ2XDRRXDUUUb33dXV3/hbXR2/LSL+N9AhKEdKNvX15rWiIvf6mprity+ojdm2lWIbI+Pa\nReKShIQp1jKYZw5gBhnlTFzdeSHxDsUorhOHTZvi124Ic71dujRmHRi6dRY36vGmd9e99tpLzz33\nXP3iF3/pm77aa6mlXadM3qktM57TjtqIwRZByze/WbgaJCiWYMgQM8MI82FOquxc9o/r2CmSUPNE\nnQaXyvbgcZ5joFsT6H8LPkAxl3IQDmH6+igqyHIhu7MLKs7i/P8c+2Z9fbAg9HMXd1Oq4jrZOP+f\n7HPX1eX+X5175Ng7/f7bYdmcPfuOTNWxXtA37r5be3p6VDW+DaOJTdqx4BfxffG9lrq6ZFQhYR27\no3py9G+zZ+f+4FFvRENDNCNZXV3ynXGUGIVSF0XJsnEkFedQ8AGKuZSDcFANrw/s2xmUUf2DxYvN\n/8kZcKXTwc4eqZTZ1kkx75dq3r249fZeo+6BsD0E/U+z1c7O79zQ4H+Nzn+7pSWaLbZ/p+6yDdTV\n9RtNL13S2f8Zq+nURrZqmi3eXkUnnWQMP0GNjdSohDqrfASV10g66o+wZEm495BXCb0kCJsVDLCR\njYQipK3NIQJBLslPPBFdBTlQLFkCEyb0b6Oje5fcWCgg154SFvRVU9Ont3ficjo7zZJKGdXoF76Q\nTK2IOKrcoHxtFRXw+OPGld/LZdyL7m7ljjvW8Nxz77FjhxOy44VSww52Uk0DbVTR3d820NEBX/3q\nLn/9cVzMe9fOYuHoq3lrbQ2j319O80/ORdvaWMhk3mI0o3mLZh4yx3gaWLrUP8ArTHefXbugUOLG\nPoC52dmBLu4/2+LF8LOfedsLxo2DH/3I3EOvHzidhsMOK/y6vAiLURgMRVGikISEKdZSLjOHIIo9\nSCg05UR7e/AMvK6uv/qkoSG6Ojd7FO7MqL1qGzuzkULvVRxV7tKlwYNLt5dSdHV3t8INClMVOny3\nS1Xv1BlyV3jVsezF8e6Jo2uKU5Fs9uziuFgW4t8b9MMHuYUmnYYiH8pliuyCcsutVIxlMAiHoOcz\nX285hyRsWi0twZ29SJ9LqqNnj5sozp27bMGC4G0LsTnEUeVGiR3ILhUcRStSW9mq3x57m7576506\nZIhfUJpq05Be7ajbK96NzL4Y5wGI0sF6Rde5sw/OmGGMTMX0u89+YKPo3QoNdBmoKNSgczvZHEsp\ntFxCaii8pQn0v4l15MVYBoNwUC2OJ05SNq0w1/Gamr7kc1EHfpWV/Q3V7qSWkycH73vSSfn/l+Mm\n3AuL3WhqUt22rUt///vf63nnPayVlW3h/TabtAMzBVqa/qym63fmbNPYmLmeJUui3dCgi1mwILyT\nDepgS915ZgeNhT1UqZR/0rx8zllM4Zd9zqA/aL7pfvPB1pAuXzIlanOK0LS3myWf/EFhecictDdh\n+ndHFRyWyv6hh6Ink7v4YnOt2arWZctMepsg9t47PKWM3zXFUeWuWOFfcAuUyspuDj/8KoYNW5Cp\nclZLRcUHvtunaaWKnRm7QSe0wjh+w/ohI7l/3jv88tc1gMnTdN55metZVusfXBGE+2LWrAn36fcL\n1Iia1CpJf/zsLIZHHWXyIfmVwevshCuuMNvlW7wibubEJAj7g777buG5k6Lg8RtX+BvCYmGFQ0IE\nlaiNkpQymygdoV+m06uuMtuNHGmSvPnVSBeBJ5/s63ij9GE1NSYIzquQ0Kmn+td/dzjzzOD/clD2\n1kNGdtFQK7R25gZXZfePGzcCmWAxL3p6fkxb2zNcdNFFuwrbvPtGLadO2EK3VtBGA/W0oVTwJX7G\neJb0GYNd1PW0clH6AS7676wLcm5IPkWj3RcTxdDrV5koygjj0EOTS5frxbhxRgg5pfK8jMdbt+Y3\nghpIovxBSyG0kkwRnEVSKbtPB76PyQb4Y1X9TtZ6yaw/A2gHLlLVF+KcYyAyn8YhroNC1BG/X0Tv\niBH+g8JZs4LbWllplscfhxNPDL82NzfeaObO99zTV5zrzDPNa9gzGlYPOXCge3I3q+VQqjpfBPbK\n2bdKumlurqK9vZ0//OEPvPO7VuBs33Ndc80F3HrrV/t9t///3st79V9nYdspud5Bfvh5nxTyp3V3\n9mFRsE1N/h5HYZlOX3vNHDupdLl+OGH8I0fCJZfkls2D/EZQA0k+ZUeLQSEpgkNIImV3JfBD4BRg\nLbBMRB7R/jWkJwGHZpb/B/wo8xqJJGsBFIsonblTbhfgttuCryesZgHk3/f09Jj//JQpfSmjb/NM\ntp7LAQfA/vvD9u193z3wgJlRBGk/amvD6yEHDnS3d/A4J7CIU3fVJmijYZeb6COdZ/P5Cd3s9/zz\nHNLTQzNT+Q0n005DzrHq6+DYY3O/Z9Uq6to3cj6/8G9kNtk/riPp8/3TVlf3Tf0gNz+5Uw9WxL8e\nrEPYrGPuXDNK8CKJzjp7BPTWW/4PSb4ungM1aoxTVKSYHHKISRFSDAFRqNEC+DTwpOvz9cD1WdvM\nA6a6Pq8EhoUde8yYMSUPNvQjzKU0yGupsdF4KEat3OYQFnxXaHCsc84FC6JnLcjnnLW10WyOgdlb\n6dY53KCKd0bR7aDtFRXaUV2tvaDt1PpWQGtik3YsWZbbgHxycTs/ipfLaL7J4byMl/kYXfN1Lw0z\nckfB6+ENirrMx8VzID2VyuH8qsbwnfWnKZusrMAUjCrJ+fwl4AdZ2zwGnOj6/L/A2LBjjxkzJrbL\ncFKlKN1EfQb8tovaRwRdT3afkG9NAa9zTpmSjKDxW4YMifY7BP7WmdQTfifp9fgusG6B18giTmda\nWWlyp/gFdYTlG4myJDH6CctXHudhjErQiM7vQYt7reU2aiylp5T73B73YLfNyioi04BpAKNGjYql\nyy+G+imKw4dq38z29tvNurVrjdqxqwsuvzzaudraTDWwbA2F18w+qYSMzr2NG9wah6uvjjbTb26G\nGTN68XK2qHRXKfPAy+zsroCWY0PYgikDN3Nm3w6plEmxmh1O7kVFRbBezytyOZUyxxUJTunrkIRq\nZ9w48wPMmRNvv0JUI0H6wbo68+oV9VwMd75iMxCeUg5FNEZDMgbpdYDbiXNE5ru42wCgqvOB+QBj\nx47VqHafpEtROgSVZ9yxw/Qv2fYDd4T/nDnR1YF1dUYN7P7f+Ak3r1KZYf2ZFw0NxqD81FPx942C\nCEj3Trj3vz31wuvXr2fx4sW70ldv3VoLPAlUAw3U1OykpqaCRfo3pNrip6Wuo9PfhjB7tjGS7vI9\nxbiORtHh7twZ7pqVTW+vGS3U1ZkbU1cXXN84rh7eT/8elK88KKV1vrr7oBFdR4cRyIcfXpiL5+6S\noqIQimiMBih46oERMG8BBwM1mFKgR2VtcybwK8wA71PA0ijHHjNmTOQI+WJFrF98cfDs209z4E7Q\nVqg+P2iWnJ1QL1/NRXbxGv/FPyrY897Xd2tL/T/u0rX1NDTojoYG/ZfmZv3oRz+qYNJX19fX6ymn\nnKL//M//rE899Vu9556uvpn6kmXJ6NC8llSqv44wKWNOlKW6WnXsWP+HKJUyOr8o6oog3WfYnyjJ\nlNaqpUkfUQ4VpAaawVAmFOOi+gawCrgh8527hrRgPJpWAa8Qwd6g2hchHUXnX4xSlO3t4Z2uX7ZS\n5/kMU2MXYqsLSoGdSqnOn28MwdOne+cYSqe9bZ5Tpvhfdy3tmqJNIayucabvYbNnXqFNoGefcore\nOnu2vnHzzdp9003eHVOhNTSjLvlI8yQWJy960DZBaa7D7pGX9C+28bQUOY/KIa/SQONzD8pKOBRr\ncafPCLP75DuQCDJgh+UlClrcAsnrfzlkSP8caEG1FbyEW9S6yKH9xqbcGxDmNTSLm3QBF+oU7tcp\n1Q/pgvk7dMn817SJzZpmWz/j77PuovWupTedNucM67AK7azHep/f94YV4uFTyOJcf5ig8OrQoz78\npTSelkIYlYO30EBji/2Ek89AIuzZCstLVFkZvYZ0ksIt6mB6ypRgN9VslY9zA1pm/zmy11BPfb0+\n+w//oO1VVZ4upl5eRLsWv5Sp7h+sEDVPQ4O5AVGq8mRL83w8fApZGhpMcrwpU8K9nLIf6GJMm5Og\nFMJoIL2FygVb7CecOAOJKDPxlpbgmiP19f79jnOMqK61cYRb1MF0KmWWKLED7qVjyP7a1OStLsou\nZt8L+r94u5IGLjU1Rufutc4tDaNUGAsz1ixdan6ooIpF2cEYmzb5t899DUkKkTlzoglDryyDe7r+\n3bILEkrZnUiCpnLBqcExb55xRJk3z3gpebmxRvGEa272r5Hu5CV69FHj8FFj8q7R0NCX0eCVV2D4\ncJg+HW66ybwOG2ZcbrNxvI+amoxjiYh59cqOENVJwSm2o+q9voF2RpPr1ZHqaWPRVU/S1KTU1/cg\n9JJmG01syilmL8BEv6RSQXR1+Xv7uL1Nmpv7QsJzGpoyBV8WLw6/cSL+0cBgXM+uvLLvx0ml+n5U\nL+rr4Z13grdJpcyDuGSJaU9trf+2YCKtHfe8ILK9cYLuUSmjdS27F0lImGItxUzZHXUmnl210J2m\nOtvDx70urDylU0PBy/4aOEtub9eWGc9puqaz4IFq9izAWXpA7z3ySB06dKTCVK3kBr07qGBNWAGI\n6ur+1YTijoyjTAn9blxcg3ZUw/Ts2bltc2YgXsbj9nbVadOCz71gQTSbh9dswOrfLRmwZUILIyh+\norrazDg6O/0rAqqaWYE7rqKryyxnn22C4fxmJu3tZkCZTufGMQTG1GSi/Jp3VnJp15tAyEh0F0qK\nTqrY2S8fUfYswKEV6Hr3Xf79Ix9hn8lpRl1xPgd///vIfJ84A7+0rw6TJplR8dChsGED/PSn8bKM\nhpVlBP8bFzdQyJk2hk3PnPqqUdrmtG/ECP/ynSImctKZQgalufaaDQS1o9yzVlrKkyQkTLGWYswc\nnAHmzJnB6uKGhuCBV5iaN05Kikied1kj4OzUELW73Es92pPq0gU1X8kxFvs1qNe5iOxcQYUUs3eO\nM3168I1JpZId7cY1aDvTxmLo8YOOWVvbP6ahvd203W04ymc2YGcUexzYMqHxyf6f1NX1vfr1FfX1\n/asrOsLlpJOC+5cpU/LIqRTmV5t1QLd30AIu0CY2eR7/QxVbtC1MGDgSMci4G3SjnGLUQUWbnRvq\nd2Oqq1U///lkvU7iusJGcWl1B47FSeIVNejF3XkX4o1TLvmHLCXFCoeYBP1PamrC4xnSabM0Nob3\nNel0tAqJ/Qar09eE+9WGjID9Es0tZmy4N9HJJxtXyqDRcphEdAwpQe6YDQ3hHj5Jjm7jxi24O02/\nUfc99+Q/Go8aip5E5229mPZIkhIOe4zNIUz1HFaJMU5Suqoqk67nqKNMkZru7R200oDx7clNEdfQ\noIz+2begPSAxVISKYIGJ5oJIp+HLXw7PV7P33v45ehoaTL6c8883x3nwQe/jtLfDtGlw//3+SaGS\nLDjjlYTK8QZy9P9+OYXcevyVK429ZMgQmDGjf06kOO11H3PhQlNX1St5VxLJ42z+IUsB7DHCIeh/\n0tUVXqwmKo2Nff3LuKM7eK9iNAv5LCs5jLlcQzvpnH2qenfSLAu9D+jyq9VLL/UpetlHHZ1M5Reh\n2/VvgKuQdFCWw6AMfW4jaVi2xPHjjcXe6SAffbS41cGCvAqiGJIPPdR4DjjFdvyI2l7HeB4kRJPo\nvMulWpllcJLE9KNYS5JqpaAZdhRNR9SlX+2CrJPmqH1SO4w2Yvp/+KqMekV00fjxevzxx+unKit1\nI+hWjLtp7MAzEaPXd/L5ZKtDNm3yvxFxcvTEiehLMrq3GMU84rjCxm1vsdU+Nv/QHgl7ilopKS+8\noPoH1dVm8Hr22YWlvwbj1blr8Jg1XclR+3SvofnRy0m9Ww8t3iO87ar8ZMkSeo47jglXX82yT3+a\nz2zcSMW775r83kEpn7NpaIA77zTTpOzR8pIlcPrpudMnJ8Jv13QoMwq///7+RaQ/9rG+fRxVzskn\n968nCuYGv/JKn+9uUqPbYtWSjeMKG3c0XuxSk34qtUJTclv2DAqRLMCtwArgZWAh0OSz3duYbKwv\nEkOqHXHEmES98MIGve0bTYDZTeOf1vrK/ILM+g0eI3jKdKVS+s0ZM3SrTyBZVzqt2//619yL8XN1\nHDLEvzpZU5OxlGePrH/9a/821terbt4c70Y67QvLLaJa+Oi2vd1cU5Q8Tdn7RZllxHGFzWc0XgpX\nU5t/aI8iTh8btBQqHE4FqjLv/xX4V5/t3gaGxj1+ZeWYyP/BqP913/+J8yfNuGv2qYD6soym2aKN\nbNUU7eqXqrqfNqCjw7+jziw9oBdXVuq0Y4/V9lRKd9bVaW9YJxEWlevV4ThuVtmd0Lx5wR1enKx/\n+RbYyLeDdPYLSlRXaDRxFFfYQjt023lbEqQshEO/A8FkoMVnXV7CAbyFQ3aOtIIHXz4dnleW0XZS\nuoALMjUNgvvHv6xerV0hfv+9oDubm/vaEdZJRO2c3ceKmpnUa8nWo0ft9OPaEuJ2kFFtAe5z5TPL\nCJrZuPOg5BP3YLEUgXIUDo8Cf++zbnVGpfQ8MC3kONOA5WbxFg5gEmzOmhUc6Rx5lp9HzYAc43JN\npw4Z0qNz5jyh06dP1yOOOEKngm6LcrwpU6L/8vkYMQupiVBT0/+YUTv9Yhtbo16Tc658Zxmq4aMP\nG4VsKSOSEg6hBmkReQo4wGPVDar6cGabG4BuoMXnMCeq6joR2Q/4tYisUNXFXhuqq4a0yFj1a1d3\nt8lPFERkT8g8arG6jcsrKw5lWWo9v9q6gJkzd9DY2MiJJ57IV0eOJP3UU6brCeKzn41+4nx81wup\nNZttGI1qQC62sTXqNVVVmdxOo0f3T4Tlhd/9C8tbVIzi5RbLABMqHFT1c0HrReQi4Czg5IzU8jrG\nuszrehFZCBwHeAqHJGlrU956K4LHf4QAMy+c4vVbeuGrnz6Nx/b5G45pbOSAE06g8m//1kim554z\nHUgQl15qzn/hhd7r3S5bf/lLfO+ePK8PMHnJ3Z1b1E4/CU+ZIFe1sGtKpfra8KtfRfM4CvI2yiex\nX1JxGhbLQFDItAM4HfgTsG/ANg1Ao+v974DTox3fX60UZUmzTVsW7Aifh0VIseAXU9AL2vOVrxid\nvqOyqK01n596Krqni0iuV5BqrsoiLP+Rlx4t6PqC2vfLX3rr0eNWVcrH2Bp2jqBrSqX60l+rRvc4\nysfbqFyrsFn2WCgHmwPwZ2ANxp7wIvDvme+HA49n3o8GXsosr2HUURGPX5hwaGKTdiz4RaQb2vvH\nP2r3hz6kXdXV2pvp9HtBW0E7MF5FvifyS8xUWxucrC57mTGjv9tVUIImd83hKDruqHmCnKIU8+eH\n+P0W0cMmqtE9qpCKUk0uXxuBzV9kKTOSEg5ijlWeVFWN1bq65TG0IQooaVoz9QpOZ9ycc+DGG3O3\nVOVPf/oTzz777K5l6wcfMBkY09DA0cOGsfdhhzFswgQO3Hdf5Gtf81Zh1NZ6p37Ih3Hj4M03+9Qw\nNTX+x3bUUMOG+ad+yMZR0/jl+3e+D9LRNzUVX49+772mbJ7fDz97NsycGXxNbjo7zX3yuh6nmtx5\n5+V3TUHHLsW9sliyEJHnVXVswQdKQsIUazn22DHa0qL6j/8YbeBdz3adxU199QpcI7eenh598cUX\n9fvf/742Nzfr0KFDjSQBHTFihH7xi1/U+fPn68qVK7W3t7e/KA5SYYTVGQ6qWxx1BlJqlcVAj4bD\n1ECpVHkFm1lvJUsZwZ6QPqOiwtjyzj8fLrjAZHfo7u6f4UFEadBWqtjJIk5jHMt3reuWGn64Zg1P\nn3suS5YsYfPmzQB8+MMf5owzzmDChAlMnDiRgw8+GJEAw3WQcfWTn4Snn/bfN+i4XhcclWImTit1\nNs9sw/PIkWZG5pWtFIyIiGvojVqxLR/yPbat0GYpY8paOLj5zGdgw5oOFs58gbdWdDHiIykYO4a1\nH9Qwumsdk+ceT213K707obOigq7eXk7Zvp3l113HIYccwuTJk5kwYQITJkzgwx/+cPwG+HUA990X\nLByuu87kM4qiKvLrDL0oZuH4Umbz9MqJVFkJvb3+++zYkZ+ACqzBWiBxj12sXFAWS0KUtc1h7Nix\nunx5ZiaQ9WfShgZ6RPjVqacy8eGHobubBmAn0CPCnWeeycipUxk/fjwjRowoXiM3bzbFpL069sZG\nWL/ejHQdofL++/CTn8SPO0ilTKfodgctVidSKj16R0duIW6HIFtOOg3z5g1eF9Gg67Z2CkuBJGVz\niKHHGEA6OlAn0Ki1FVSR1laqtm/nzAcfpLG7m0bMxdQC9ar803PPcX5XFyMWLDAGzjij8qgsW2ZG\n0dmqo+pqk830/PON+kPEvL/xRjjggHiZVMEUmLngAhMsd8EFsHp1cUeXjhqtqcl0xCLmtakp2Wye\nQTECVVVmNO63rlizplIQJTbCYhlgylqttH37dm6++Waq/uu/+PqWLTR6bOOr0d+yxXi8OJV8vvIV\nePhhM5L30vHG1f96RcY67Nxpzjl/PrS09FcXxAlIczrmnh4j4NraYOlS8z5o5pCELruYOnqHINuG\nV8W43SXdtK3QZhkMJGHVLtYCqIjoD4cNC44ziLPU1eV6lOTjbRI3X1F9vUkEFbW4dCplYg2ipL12\nM5g8Z6J4Re2OGUsH2hvMslvDnhDncOihh+qyZctoevzxYL/3QmhqMn/LrVu91/npf+fMgZtuMvvG\nIZ0pEypiZhh+6q502qiQfvpT7+v20rsPNl12kG1jyBC44w5Ys2b38+SxsRGWIrJH2ByGNDYawbBi\nRTIFnr3o7PTvoIP0v456KC6trWapqDC+uX60tZnrjqN+GGy6bD/bRjptvJUuv9wI4OnTTWe6bFmy\n5+/oMCq6OXOKZ5fyolQ2HYulAMra5sDLL5uOoa3NuDcWg6AOIUj/G5SALgo9PWaUn077u4wecYSx\nMUR1KR2Muuxs28aIEXDllcXPcjrQrqSlsOlYLAVQ3sKhp6evY4haxzcuqZRRDXm5TQb59HsFxtXX\nG0NqFFVTWxvsu68xsHpRVQXf/rYZ0fqtz/bYKWV8QpK4YwTuvbf4WU7LJc12MeMuLJYCKUitJCI3\ni8g6EXkxs5zhs93pIrJSRP4sItcVcs7EcVI7e+HnMumoI554Am6/3QS5zZ5tvJMWL+6vLvCjoQEO\nOyxYvVBbC1ddZdpXW2v2C1I/NDcHC5vB4P5ZitnPYFO/WSwDQBIzh9tVda7fShGpBH4InAKsBZaJ\nyCOq+qe8z1hZaWYVDrW1xiYRlHKhrs6sS6XMq+MWeccdcNlluds3Nnp3wF7qiOygNEdd8MYbcOut\n3nENTmedSnmrF155xaidurvNrKa21mz7jW/Atdd6C7QkaigMNKWY/QxG9ZvFUmIK8lYSkZuB1hDh\n8GngZlU9LfP5egBV/Zew448V0eVeK6qrjYA480w46ig4/HCYOBEOPdS7I3Yyb557rin84nTCQfsM\nGWIK67g71Hy8gaIIk2wK9TqKkqm02OQba1EKT56grK+DPfrassdTFllZgZuBd4CXgbuBvTy2mQL8\n2PX5S8APohx/TFgsQFNTX2H3KVP86wN7+Y4vXepffNpvn3z90+P66g92P/hCYy2KHasRlGU3n4I/\nFuRxQ1gAAAo6SURBVEsZQTnUkAZ+BMwBNPP6PeDLhQgrEZkGTAMYE7ZxV5fxbqmoCI6BaG2FlSv7\nPjsGyaA0Fl7qhXzVEXENj4NZ7ZGEsbfYnjy7g/rNYikyBdeQdhCR/wAe81i1Dhjp+jwi853f+eYD\n88GolQJPGidH0dy5cNZZpuMJMkg6eOm3S+UNVMh5wtQ5xU4TnVRN5WJ78lhXUoslmEKmHcAw1/sr\ngfs8tqkC3gIOBmow5UKPinL8ULVS3MVRGUSpKeylXiiVOiLf84SpY0qRWsPWVLZYBhQSUisVGiH9\nXRF5RUReBj6bERCIyHAReTwjfLqBy4AngdeB/1LV1wo8b344I9ew6Ob6em/1QqGRrVEjcvM5j1ud\nk8lcS2ur+XzqqSa1eND6pKKDg+5tOcdaWCyWfpR1biVfbyWHqqp4wXEiJh7hmmv8PWLq62HdOtMR\n+5GPN5BfUZurrzbrg1RAUc4T5oETN09Tvti8QRbLgJKUt1J5R0gH4eTfiSMcGhpM53TrrSa47Lbb\nvA2SQYIB4uvDg4y0s2YZoeWVviHOecKM2HHzNOWLNfZaLLsFg1c4VFXBo4/C2Wf3dUKplOmI/Whr\nM1XY2tv7Ru7f+IbpnItpkAwzgDsqHsg/fUOYETtunqZCsMZei2XQU95qpaoqXV5T07/Dr683hXSc\nEbajelm50ngkxa2ylnTZSy9PoDjpvfNV8YSpc1avhoMPtuoei2U3Z89QK33848Y+sHIlbNhgEtUd\ndlj/Uaijern3XhPv4EV1tVnnlVwvqWRuQVk+41R/y1fFo2pUZbfcYj5n15t2jNlW3WOxWCJQ3sKh\noiIZnfvOnf77JaFvDwv8Wr3aPyFeNvmoeNyCqbPT5GGqrc3Nw2TVPRaLJSLlLRziEDQ6zzctd1TC\nAr8ef7z/qD1oBhE3e6qXYHKu87bbjHBwY9NEWyyWCJR1JbhYBKWrzictdxyipLtwRu3z5hkbxOzZ\nyVQCs+mnLRZLEdh9Zg5hLpRQPH171HQX2aP2a64pXMUzmPMwWSyWsmX3EA5uL6HbbzffrV2b2+EW\nS98eVDI0aGaShIpnsFZ/s1gsZU15u7KOHavLlwfGSOdXL6EYDFQ7bESyxWJxsWe4soZRLrWAYeA8\ngWxEssViKQKDWzgklR46KQbKE8i6qFosloQpSDiIyP3A4ZmPTcAWVT3GY7u3ge1AD9CdxJQHsMZY\nN9ZF1WKxJEhBwkFVv+C8F5HvAVsDNv+sqm4o5Hw5WGOsxWKxFIVE4hxERIC/A36RxPEiExTbkET8\ngpuotRgsFotlNyApm8NngA9U9U2f9Qo8JSI9wDw1pUALp1TG2KC8SaX0iLJYLJYSESocROQp4ACP\nVTeo6sOZ91MJnjWcqKrrRGQ/4NciskJVF/ucbxowDWDUqFFhzSu+MbacPKIsFoulRIQKB1X9XNB6\nEakCmoExAcdYl3ldLyILgeMAT+GQmVXMBxPnENY+oLjG2HLziLJYLJYSkITN4XPAClVd67VSRBpE\npNF5D5wKvJrAeUuD9YiyWCx7IEkIh/PIUimJyHAReTzzcX/gORF5CVgK/FJVn0jgvKXB8YjywnpE\nWSyW3ZTBnz6j2Nj0FBaLZRCRVPqM3Sdld7FwPKKSSK9tsVgsg4TBnT6jVNj0FBaLZQ/DCoeo2PQU\nFotlD8KqlSwWi8WSgxUOFovFYsmhrL2VRGQ7sHKg2xHCUCDZhILFwbYzWWw7k8W2MzkOV9XGQg9S\n7jaHlYml9y4SIrK83NsItp1JY9uZLLadySEiifj/W7WSxWKxWHKwwsFisVgsOZS7cEgmtXdxGQxt\nBNvOpLHtTBbbzuRIpI1lbZC2WCwWy8BQ7jMHi8VisQwAAyocRORvReQ1EekVEV8PABE5XURWisif\nReQ61/d7i8ivReTNzOteRWpn6HlE5HARedG1bBORKzLrbhaRda51ZwxUOzPbvS0ir2Tasjzu/qVo\np4iMFJHfiMifMs/I113rinY//Z4113oRkX/LrH9ZRD4Zdd8kidDOL2ba94qI/E5EPuFa5/n7D1A7\nJ4rIVtdvOSvqviVu57WuNr4qIj0isndmXUnup4jcLSLrRcSz5EHiz6aqDtgCfBQ4HHgGGOuzTSWw\nChgN1AAvAUdm1n0XuC7z/jrgX4vUzljnybT5L8CHM59vBq4pwf2M1E7gbWBooddZzHYCw4BPZt43\nAm+4fvei3M+gZ821zRnArwABPgX8Meq+JW7n8cBemfeTnHYG/f4D1M6JwGP57FvKdmZtfzbw9ADc\nz/HAJ4FXfdYn+mwO6MxBVV9X1bAgt+OAP6vqW6raBdwHnJtZdy7wk8z7nwCfL05LY5/nZGCVqr5T\npPb4Uej9KJv7qarvq+oLmffbgdeBA4vUHoegZ83hXOCnavgD0CQiwyLuW7J2qurvVHVz5uMfgBFF\naksQhdyTsrqfWYSVRS4KakorbwrYJNFnczDYHA4E1rg+r6Wvk9hfVd/PvP8LprBQMYh7npwCSMDl\nmane3cVS1xC9nQo8JSLPi6nZHXf/UrUTABE5CDgW+KPr62Lcz6BnLWybKPsmRdxzXYwZUTr4/f5J\nE7Wdx2d+y1+JyFEx902CyOcSkXrgdOBB19elup9hJPpsFj1CWkSeAg7wWHWDqj6c1HlUVUUkb9er\noHbGOY+I1ADnANe7vv4RMAfzEM0Bvgd8eQDbeaKqrhOR/YBfi8iKzKgk6v6laiciksb8Ea9Q1W2Z\nrxO7n7s7IvJZjHA40fV16O9fQl4ARqlqa8Z29D/AoQPUliicDfxWVd0j+HK6n4lRdOGgqp8r8BDr\ngJGuzyMy3wF8ICLDVPX9zPRpfb4nCWqniMQ5zyTgBVX9wHXsXe9F5D+Axwaynaq6LvO6XkQWYqad\niymz+yki1RjB0KKqD7mOndj9zCLoWQvbpjrCvkkRpZ2IyMeBHwOTVHWj833A71/ydroEPqr6uIjc\nJSJDo+xbyna6yNEKlPB+hpHoszkY1ErLgENF5ODMqPw84JHMukeACzPvLwQSm4lkEec8OfrITAfo\nMBnw9DZIgNB2ikiDiDQ674FTXe0pm/spIgL8J/C6qt6Wta5Y9zPoWXO3/YKMZ8ingK0ZFVmUfZMi\n9FwiMgp4CPiSqr7h+j7o9x+Idh6Q+a0RkeMwfdLGKPuWsp2Z9g0BJuB6Xkt8P8NI9tkstoU9aMH8\nsdcCO4APgCcz3w8HHndtdwbGW2UVRh3lfL8P8L/Am8BTwN5FaqfneTza2YB5sIdk7f8z4BXg5cyP\nMmyg2onxWHgps7xWrvcTowbRzD17MbOcUez76fWsAZcAl2TeC/DDzPpXcHnZ+T2nRbqHYe38MbDZ\nde+Wh/3+A9TOyzLteAljOD++HO9n5vNFwH1Z+5XsfmIGne8DOzH95sXFfDZthLTFYrFYchgMaiWL\nxWKxlBgrHCwWi8WSgxUOFovFYsnBCgeLxWKx5GCFg8VisVhysMLBYrFYLDlY4WCxWCyWHKxwsFgs\nFksO/x+LYx8tjVwE5wAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aaae8510>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ru6tBGXEAgCnGJmkQDOIkaIBr3LfQw\nEzET9RcnYFAT45LRKGggcZxI1IyJGZhEzNJGZ1TGJUaJYxTMBui4RtAAKqCRALL0RtPd3++PU5e+\nXXXXqlvV1XDe57lPLXc799at853zraKqWCwWi8XipmKgG2CxWCyW8sMKB4vFYrHkYIWDxWKxWHKw\nwsFisVgsOVjhYLFYLJYcrHCwWCwWSw5WOFgsERGRA0VERaRqoNtisRQbKxwsliIhIrUi8h8i8p6I\nbBaRR0TkANf6A0XktyLSLiIrROSzWfufJyJviUibiPyPiHyw9Fdh2VOxwsFiKR5fAz4FfAwYDrwP\n3OFa/0vg/4C9geuA+0VkHwAROQKYB3wR2A9oB+4sWcstezxWOFgGLSIyXEQeEJG/icgaEfmqa92N\nInK/iNwnIttF5HkR+bhr/UdE5GkR2SIir4rI2a51dSLyvcyofauIPCsida5Tf0FE3haRjSJyXUAT\nDwKeUNX3VLUTuA84InOOQ4BPADeoaoeqPgC8BPy9cw7gEVVdrKqtwEygWUQaC7ppFktErHCwDEpE\npAJ4BHgROAA4EbhcRE5xbXYO8N/AB4F7gP8RkWoRqc7suwjYF7gMaBGRQzP7zQXGAsdm9v0G0Os6\n7vHAoZlzzhKRj/g08yfAcRkhVo/p8H+dWXcEsFpVt7u2fzHzvbP+RWeFqq4CdgCHhNwaiyURrHCw\nDFbGA/uo6mxV7VLV1cB/Aue6tnlOVe9X1Z3ArUAK+GRmSQM3Z/Z9CngUmJoROl8Cvqaq61W1R1V/\nr6o7XMf9Vma0/yKmA/843rwBrAXWA9uAjwCzM+vSwNas7bcBjRHXWyxFxXpdWAYrHwKGi8gW13eV\nwBLX57XOG1XtFZF1GN0/wFpVdc8G3sLMQIZihMiqgHP/1fW+HdORe/HDzLH2BtowM5BfA38HtAIf\nyNp+CODMJMLWWyxFxc4cLIOVtcAaVW1yLY2qerprm5HOm8yMYATwTmYZmfnOYRRmhL8R6ATGJNDG\no4AFqro5M/O4AzhGRIYCrwKjs2wIH898T+bVbSMZA9QAryfQLoslFCscLIOVpcB2EfmXjAG5UkSO\nFJHxrm3GikhzJi7hcozO/o/AnzAj/m9kbBCTgLOAezOzibuAWzO2gkoR+ZSI1ObRxmXA+SIyJGPn\nmAG8o6obVfV14AXgBhFJiUgz8FHggcy+LcBZIvJpEWkA5gAPZtkoLJaiYYWDZVCiqj3AmZjR+RrM\niP/HGNWLw0PA5zEupF8EmlV1p6p2YYTBaZn97gTOV9UVmf2uAl7GdO6bgX8jv//KVZhZyBvA34DT\ngcmu9ecC4zLt+1dgiqr+LXN9rwIXY4TEBqABI1wslpIgttiPZXdERG4EPqyq/zTQbbFYBiN25mCx\nWCyWHKxwsFgsFksOVq1ksVgslhzszMFisVgsOVjhYLFYLJYcyjpCeujQoXrggQcOdDMsFotl0PDc\nc89tVNV9Cj1OWQuHAw88kOXLlw90MywWi2XQICJvJXEcq1ayWCwWSw5WOFgsFoslh7JWK1ksFsue\nTEcHLFwIq1bBmDHQ3AypVGnOHVk4iMhdmFw2G1T1yMx3H8RUtzoQeBP4R1V932PfU4HvY1Iq/1hV\nb863wTt37mTdunV0dnbme4g9nlQqxYgRI6iurh7oplgsFh+WLYOTT4bubmhrg4YGuOQSWLQIxo8P\n379QIgfBicgETI75n7mEw3eBzap6s4hcA+ylqv+StV8lJs3wScA6TDKzqar657Bzjhs3TrMN0mvW\nrKGxsZG9994bEYnUdksfqsqmTZvYvn07Bx100EA3x2LZ4/GaHajC8OGwZUvu9k1N8O67/jMIEXlO\nVccV2q7INgdVXYzJUOnmHOCnmfc/BT7nsesxwF9UdXUmG+a9mf3yorOz0wqGAhAR9t57bzvzsljK\ngGXLjBCYPh1uuMG8DhsGc+eaGYMX3d3w4IPFb1uhNof9VPXdzPu/Avt5bHMAropcmNnD3/kdUESm\nAdMARo0a5bdNPm21ZLD3z2IZeDo6jNrIPTtobTWvN90EO3Z479fWBqtXF799iXkrqdFPFZyoSVXn\nq+o4VR23zz4Fx3EkzpYtW7jzzjvz2vfuu+/mnXfe2fX5wAMPZOPGjYH7PP3005x55pkAPPzww9x8\nc97mGovFUkYsXOg/O1CFWp/yUg0NMHp08drlUKhweE9EhgFkXjd4bLMeV7lGTKnG9QWed8AIEg7d\nfr90hmzhEJezzz6ba665Ju/9LRZL+bBqlZkFeLFjB/T0eK+rqjJ2iWJTqHB4GLgg8/4CTOWtbJYB\nB4vIQSJSg6l+9XCB5x0wrrnmGlatWsVRRx3F1VdfzdNPP82nP/1pzj77bA4//HDefPNNjjzyyF3b\nz507lxtvvJH777+f5cuX84UvfIGjjjqKjo4OAO644w4+8YlP8NGPfpQVK1b4nRYwwuXSSy8F4MIL\nL+SrX/0qxx57LKNHj+b+++/ftd0tt9zC+PHj+djHPsYNN9xQhLtgsVgKZcwYMwvwI1v7W1NjjNGL\nFpXGnTWOK+svgUnAUBFZB9wA3Az8l4hcBLwF/GNm2+EYl9XTVbVbRC4FnsC4st6VKYFYMJdffjkv\nvPBCEofaxVFHHcXtt9/uu/7mm2/mlVde2XXep59+mueff55XXnmFgw46iDfffNNzvylTpvCDH/yA\nuXPnMm5cnyPB0KFDef7557nzzjuZO3cuP/7xjyO39d133+XZZ59lxYoVnH322UyZMoVFixbxxhtv\nsHTpUlSVs88+m8WLFzNhwoTIx7VYLMWnudm4pvrR1ZX7ua6uuG1yE1k4qOpUn1Unemz7DqZervP5\nMeCx2K0bJBxzzDF5u4U2Z+aHY8eO5cGYLgif+9znqKio4PDDD+e9994DYNGiRSxatIijjz4agNbW\nVt544w0rHCyWMiOVMrMAdyxDTY2/IRpg61azfZAra1IM6gjpoBF+KWlwzQ2rqqro7e3d9TnMZbQ2\nY3WqrKwMtVn47QsmfsF5vfbaa5k+fXqsY1kGPwMZTWvJj/Hj4Z13zO+2ejW8+CI88IAxSPvhuLKe\nd15x22ZzK8WksbGR7du3+67fb7/92LBhA5s2bWLHjh08+uijkfdNglNOOYW77rqL1oxP3Pr169mw\nwctPwLI74ecvv2zZQLfMEkZdnenor78eJk8OtkNA6VxZB/XMYSDYe++9Oe644zjyyCM57bTTOOOM\nM/qtr66uZtasWRxzzDEccMABHHbYYbvWXXjhhVx88cXU1dXxhz/8oSjtO/nkk3nttdf41Kc+BUA6\nneYXv/gF++67b1HOZxl4gvzlS6WCsCRDmB0CSufKWtY1pL3SZ7z22mt85CMfGaAW7T7Y+7j7cM89\nZqbgCAQ36TTMm1d8FYQlOZYtg5NOMvYFL8oufYbFYilPgvzlS6WC2N3p6DBCeM4c81rM7DPjx5vO\nf/ZsIwBSKePWmk6XqSurxWIpTxx/ea+ZQ6lUELszA5Edta4OZs6Eq67qM1aPHl2mKbstFkt5EqSn\nLlU07e7KQNtzHGP1QDAo1UrlbCcZDNj7t3vh+Ms3NRnVw0CoIHZXgvIflSo76kAx6GYOqVSKTZs2\n2bTdeeLUc0jZHmO3IttfvtQqiMGAXxxI2Pde6jqIb88ZbHEog85byVaCKxxbCc6yuxHW8XrZDaqq\n4Pbb4fLL/b/v7PQ3PjueYJMnh3f6zvmd4zmG5mLYLZLyVhp0wsFisVjc+HX8Tsfb0eFfVU3EOxrZ\n73s3TU3wyCNw1ln+5wZz/v32A6/418ZG2LAh2RlE2biyisihIvKCa9kmIpdnbTNJRLa6tplV6Hkt\nFovFbTBubTUdemur+eyM1MPqJsT5HkydhaYmePhhIxiCzg1w333eggHM9/feG/16S0nBwkFVV6rq\nUap6FDAWaAcWemy6xNlOVWcXel6LxWKJYjAOigPJh7POMl5Ka9dGM1b/6lfBxwtbP1AkbZA+EVil\nqm8lfFyLxWLJsS2sWBEeABgUBxKXdNrYGFKp6MGHMfNplg1JC4dzgV/6rDtWRF7CVIG7KqmaDhaL\npbiU0ssm6FxetoXe3j7PomycAMCgOJC4Ngd33EiU4MNly+CJJ4KvOSs9W/mgqoksQA2wEdjPY90H\ngHTm/enAGwHHmQYsB5aPGjVKLRbLwLF0qWpTk2o6rSpiXpuazPdu2ttVW1pUZ882rx0dyZ1r8WLV\nBQtUUylV02VHWxob+9rhd+y77473vfu6OzrMd17nbmpS3bzZf72zpNP53asggOWaQJ+emLeSiJwD\nXKKqJ0fY9k1gnKpuDNrOeitZLANHkJePO/lbmLdQoecSgerq3MpoYaTT8Le/9c08nFlJdhxI3O/d\nBF37G2/4J0QEE/38zDPl68qapFppKj4qJRHZH3hPVVVEjsEYwjcleG6LxZIwUYy9kycnk14izKMo\nrmBwcIriBKmr/FJUREldMX68OebMmcb+cdhh8J3vGOH5+OPBhvCrr/YWDOUSLJeIcBCRBuAkYLrr\nu4sBVPU/gCnAV0SkG+gAztWkpiwWi6UoRDG4RhEgUXIDJe1R5G5jMRPnuY/d2gq/+x3cdRd885sw\ncqS/TSKdhkMPDT6eV1tLKTgSEQ6q2gbsnfXdf7je/wD4QRLnslgspSGKwTWpdOFJehS52zhiRPES\n53kl5XPqP8+aBUOG+O/rGLbdnf3IkSYq213Hwd1WJ+DOHWVdzOywgzLxnsViKT7NzaYT88Lp3JxO\n3Ys46cKDzuVHKtWXbNCvjVC8xHlBsyYwnbyIERJeCRFffrl/adevfMW/wM/OnaYA0JYtfcF1nZ3m\n84knFqe+hBUOFksRKWWRmKSJku01igAp5Fx+uTVTKfjRj8zI/6mn/Nu4dm3xCiFFUYWpmjxN8+aZ\n4j3z5pk2H3lkbmR30LPR1ua/vlhR1oMuK6vFMlgYiCIxSdLRYTxuLrsMNm2CoUONntyt506lTBqJ\nU08119nVZa6zujp+unAv425zM0yZEuwJFZSR9o03ilcIKYoqrK0N1q2D66/vUyHdcgv89a9mNhCV\nqqrgWcrPfw4XXhj9eFGwifcsliIQ1Q20XInqnpptkK2pMds98QQcf3wy53z4YTMDyCcVeWcnDBuW\n7O/gdPIrV8LcudDe7r+tk7n14IP7X1tc19zq6mBhUl0N27Y5JUXLz5XVYrFkSMqLZyCIWv3Ma7uu\nLrM4+Yf8Ot5sr5vTTvM/59ln9z+nky8pireOo67yE3RxBUO2AKurC96+qgomTTLCwS1EwgRDbW3f\nLKyqysze5szx376iIvlnygoHi6UIJOXFMxBEFWz5CkC/NBh+OMfKHn1HVdMlVQjJSxg6HX4qZewG\nIsZjyV0XIlswhNHUBLfdZtRRTlsB/vVf/e93V1fyz5QVDhZLEYjiBlquRBVs+QjAoFmJH21tRoVz\nySX5u6QmUYs5SBhWVcEddxi1miOATjvNvIYJBiePk9tWc+SRfTOrBx809yDI5tDQAHvt9T733huS\nyCkGVjhYLEUgKNlbHC+egSCqYAvbbsQI46HlVgGFuX960dAAGzf679fZCV/8oonWLmZQWJgwdAzP\nDvfcE+1aHbNvWxvMn2/eDx/eN0Oqrw/3impv38qllw4DdoSfMCJWOFgsBeAXsZq0rruURBVsYdlO\nL78cenr6q4A+//n4kdBVVcZTym+/zk64/36TriKKmsnrN1MNjzyOOxvMJ+p7+nRTHW7btr7vgo+h\nwA6OO+5bnHPOd5g4cSLjk3KFSyJ7X7GWsWPHFpKc0GIpKlEyljrZSufMyT9b6UAQNRur33bptHcW\n0vp6/3XOUleXe86WlvD93BlR/e6zV3sbG81r2LVu3mzaH+Wc7e2qM2ao1tTEyyQLqlVVcbbv1UmT\nevq1k4SystqZg8WSB1E9epLQdQ8EUY24Xtt1dRnvGi96esJrM4uYWAd3TMWHPxxstHaTbQx3Zgor\nVsD3vtffBuA1C/D6HR0jenbb6+uNncGZDXZ0GPfWm24y7Y2fMFDp7laixycLhx/uEylYIFY4WCx5\nMJhdVaMSVbBlbzdnjr8qZMcO05nW1vblIcqmosIIBueYfh2zH25jeHYcRhyCMs+6WbPGeBg55/Lb\nDkzcg3H3VcCvU+/GlMeJgjJzZnGEQyLpM0TkTRF5WUReEJGcqDUx/LuI/EVEXhKRTyRxXoulWISl\nvRjMrqrFJijfEpjO0S8tBvS/f+4Zmle1Ny8c/b9733wS+kXJPFtRAY895j2TzKayspejjnqRE0/8\nEsZW4E0qVR25jbV08tSPVkbePg5J5lb6jKoepd6ReacBB2eWacCPEjyvxZIoy5b1T4g2fbqJsl22\nrG+bpBLOJU2YUCtFrqeoSfRqa72/d9+/fLybHKN5Pvt6taPQ1OUOPT3Cs88+wmuv/ZbjjvsJRkA4\nixGYd98tXHBB9JlAFzWsnv0LePbZyPtEJgnDBfAmMDRg/TxgquvzSmBY2HGtQdpSatrbg0s/OkbH\nsBKRA2F4DjMiRzUyJ9WWoLKeIv7rRVSXLDHHmT3bfI5qoB0yxJQVbWlRPeGE+MZgr98xyBieTpv1\nN97YrSK9ocbj449v33WPNm0yRusTTzSv779vvm9pUa2sjNbGNFu1hanGir9ggers2ToUVmsS/Xoi\nB4E1wAvAc8A0j/WPAse7Pv8vpkyo17FsDWnLgBGlI3BIsrMttAZzmFALqmecjzCL0t6gus/ptOrM\nmf4df5SOOXtJpVSvvdZ4FOXjJZS9NDSY65s5099LKZVq189+9kytrZ2m0B56zEmTwu9tR4f/+XLu\nE5u0g9q+GyCiR0OPlpFwOCDzui/wIjAha31k4eBe7MzBUmqCRqoixiXVTRKuqkkImTChNmNGdKHn\nhVsYzJ6d2976etXp0/vfg7DZ1YIFpgMOalPQMbx+n6gdf3V19G1FVOvqehV6tbp6h0KPwjaFTQrj\ndPToz2t1dZtC2MxBdcqUaL/nkiVBx+nVNNu0iU26lHE5G4wF1QT69aQqwa3PvG4QkYXAMcBi1ybr\ngZGuzyMy31ksZUXcQKdCXVWjusSGEaYXX7EifwN6mMeP8928efCLX/QPRAsKBHz8cf/UEq2tJmVG\ndjBhkGHZjDuDSafN+a+4Ar773WiGalXo6DB2gJ07u9h3359w1FH1XHjhBzjuuMf4+Mf3iZx++ze/\n6aviFsTxx8PixXDKKcaA39Oj1NBJJT2cz8+ZwBKaeZBUghHRORQqXYAGoNH1/vfAqVnbnAH8GuO7\n9UlgaZRj25mDpdSU2pawYIFqba33+WprzfooFGvmEKSuClR3uO6V3+wqTGVUU9M3G9m82bzOmhVd\n5ZK9HHGEGbkvWGD0+/lcVzrd0+9etSzYoelUV4z9w2dp2fd/172btUI7CDDkJDxzSEI4jMaokl4E\nXgWuy3x/MXBx5r0APwRWAS8TQaWkVjhYBoh81Txx7QZhRlsw66Ool8KEWlBnGCT04uj83UtDQ3gn\nGFVl1NAQbliPoiaqre3bfsiQXr3iihe1trZNRTo1ikoIslSLS5fq7NS3VeiJfF+8VJOxWLw41KhS\nNsKhmIsVDpZiEtSZx7UlxBUocUbkUWcsxfBWiust5F6mT4/W5qgzgSFDcmcj06eb/f1mX8HLJq2o\n2FvHjJmtlZU7Iu2za+Sf+QFbmKpptkU+Z9yZgyeOgPBxabLCwWIpgKQ9jeKOyuOMyP06lPZ2oyKZ\nMqVPXeKoX/yEWlyh19LibzQOW+rrowm1mTOjH3P27Gj3vf/iPSuoq9upd91lGuh+HiIJ6swP2EGt\nNrEpcvsLVk1GuOiyMkhbLIOJpIzADlFTabizgb70UvSMnV4G42XL4MQTTXF5h/vvNwbXp57yN5LH\nNaA3N8OXvxx9ezdRq5MddphpdxTj8E03wdVXm9+n0CC3zs5K1q83XWB2jihVuPXWgIy6GQ+AFMoi\nTuFknqCbKlppoC8tRl8wWz0d1DTVsWiRFJaRt9CLjoEVDpY9jqTzIkWJoM2uflZTYzqgKGR7STnC\nzS0YHFpb4aSTTAH7JNKCp1KmVsK8efH3jZpGJCj1txcPPggnnvgeDz30N1pbDyc80YN3xHGDtjJa\nNwBjgFzBedVVAYkHx4zZVWhhPMt5hwNYyGRWM5oRrAVgDQexkaHsw0YOqX2b5ttOJjX+3OgX6kVY\nHvDKSnp7eiKmKAzGCgfLHkfSeZGiFL3Jnqn4JZ3zIrs40MKFwWkvOjuTTfx3wAH57VddbWZhYa6b\ncdxVOzt7+drX/p2NG78OTMUkX2gMaYniJSCq2Enz946DkTfD2rU5hRz6CYuODnjQVfBhv/36+eHW\n0cl5/NK/CV0C6xLIqRL0sIFJe5sQVjhY9jiSLuEZVhwHgjUBqZR/Z59O5xYHWrUqWDjs2JFc4r+O\nDpPmOh+6uuCnPzU5nOLUef7JT+Cpp7w7dGhjxIidfOMb3+WTn5zE2WenA5PdOVSzg1p20EYDDbRR\nRTeLOIXU1vfg4otNY/2KUnsVvW5riz71g/4Pll+FqLB1EGmaVZFUzrwkDBfFWqxB2lIMglwo6+v7\nctzEIcjAHebxM3myv0urlwGzpSXYBba2NteAnW96jjiG86CUFX6G2PZ21V/8ole//vXNOm3a0/pP\n//TPOmrUIQqbfY7T2+84zn0PTpfRq/P5Z21hqs7hOm1hal/KibDG5hvo4XfMoAclnwpLRTRID7gA\nCFqscBg4Cs31U+44/6+6uv7/rfr6wvMjxQn2cgLU/Nw56+tzO/qw+AC3y6f7WvPxzIrqylpTo/rZ\nz/p30m6Pq97eXn399df1m99cqDU1rSqyXZ2UFCLv66RJV+sVV/xSGxt3ajrdG9pmx2vL99xsCRYG\nQY2NG+jhZBT0utlhbm1DhgQLFq+HzSO7oBUOlqJRyuydxSKKcNu0KXrZx0IIC1C79trg/uaGG3KP\nuXSpKW/p1a9llyn1O3cqZTrVoOuM2jcOGRKcSE+kV88660967rnn6rBhwxRSio8LqHPvfd1ufX7c\npUtNO5yYh9pa1SF1O3Rp6tPRO3eng3ci1eIGeqTT5qb6jRL8HriaGv9gjaDgCI8fyAoHS1HIx2d/\noPATAFGFW5wMrIXi16bZs1XHjg3ub6ZN87/+7DgHLxVUUOdeVWX6KydFdjZRopgdgRR8rq0KU3X4\n8OE6depU/dKXntT6+u749z7kx80RKO/HyNzn1YDZs+Pt66TA9Xow4wR0+AmrCD+QFQ6WolDKDrMQ\n/PqIxYujC7e4GVgLxenMJ09WPfxw0zFHieydPDn/c0Yd+LprKGSTfa9TKZOu4oQTzPW0tfXo//3f\n/+ktt9yh1dXbPY9fX9+pr7zyF+3t7Q1tl++9z3fk4lxAWK6S7GO1t/urepyGNjT0fwDvvttfeM2Y\nEXzeqqr8/nhZP1BSKbutt5KlH4Oh/GVQENupp5rgKy+yYxhGjjT/Pi9UjQtqkrzyClx2WX8vqSjx\nTJWV+Z8zzPPRQdVkAN20KdftNDtA7MADexk9+gX++MenWbjwGa64Ygnvv/8+AMOHP8nGjS1UVFSz\nY0d1JnhMWLSoliOOGBOpXb4eYwsX4pv+NChAZfx482CPHJm7zo2TstVxD7vnnmDX0OuvNxF8TiDE\naaeZV7/oyhkzgs/v9+Bm+zJnk/UDrZ05863gE0XDCgdLP5J28ywGYUFsXV3e6wZSuHV0mOC0fGoZ\nn3FG/udtbjaemlHYudO7f925cycvvfQ869Y9wx/+8Ay33PIs27ZtA+DDH/4wzc3NTJw4kYkTJzJq\n1Khd3piewWOudgW5/3r2hYsX5z9yefxx/843lYIzz4TJk/s3NizgrKam/826557gB3PLFhM84VcM\nu6LCCCjnenLCsgNwBWVsnDlzc/DG0ShYOIjISOBnwH6AAvNV9ftZ20wCHsJUjAN4UFVnF3puS/Lk\n9actMatW+XeyXV3mP+slILKF29q1wedZty5eu4Jc1OfOha1b4x0PTF9xbgFBtY5uIgo7d5r+taur\ni+XLl/PMM8/w9NNP87vf/Y62TCd52GGHMXXq1F3CYPjw4TnHiZKiIzvwLbQv7OgwQRN+1NcHj1yC\nOvodO+DjH89ttCsKOod0Ovd8YdPuoUNN4Ww/4dDZCU1NcNtt5uHzk6wlIomZQzdwpao+LyKNwHMi\n8htV/XPWdktU9cwEzmdxERYzE5egP+3DD5uRZVLnypcw7UBVlbdw6O01BWTuuce0fcwY/5w+Xv99\nPzo6TOd/003m844d/eOpjjyyb10c6upMnqRC7vHChdG3Fenm3nu/y003fZuOTAd2xBFHcMEFFzBp\n0iQmTJjAfvvt17dDR4e5mXk+ENnqqsC+8L77giP/VINHLvlMiUeN8q9G5DVSGjPGNN6r80+l4NBD\nzQMxaZL/cbu7zejm+uv9r6VEFCwcVPVd4N3M++0i8hpwAJAtHCwJk12hq7YWLroIvvnNvuRkbsIE\niXv9bbeZ79asgY0bTYd36qkgYp5rv2DScuDKK+GOO/qEmzsCec6cvrY/8khfBHM2UWZJjlD4zndy\n02E4fdCECXD++dFH7w4i8I1vFH5vwzQjblQrgcf48pe/zMSJE5kwYQJDhw713tgraviSS8wIYu3a\nyAIjZ5bR0QH3eDykv/pVcOM/+clgwTRqlP+N8PqxOzrgrLP8f7jLLoNbbunfxtNO8xdgnZ1w+ulm\nZnDlleZB9KJcDHuQrLcScCDwNvCBrO8nAZuBlzAV4Y4IOMY0YDmwfNSoUf4W+j2csMDNbNfNfHL9\nNzaa1yBXyIFwb43i6eK4NAZVDmtqMh46+cR0OD71UZxffNLuBy5BabojBye2t2vLjGc1XdMZ6Zx1\nVTu0Zcaz4T9o2MPnuGDFDZAJekinTAlufFBx5qCAFj83rTAfYHflIKeNQXEM7mjGIrsEAss1if48\niYOY9pDItpLPAAAgAElEQVQGngOaPdZ9AEhn3p8OvBHlmNaV1Z8owUluj7wgD8DNm/PPEBDnWU4q\n6jrOfyvKtnFrHCSVUSHKb+cmavzGtm3b9He3365ttbW6Ueoi1xsQunVOzbfCO/S4UcNRRhBhD+n8\n+cHn8KunGlZNyK9kXdzgt6YmMxKJ4p+bdC3arD9WBTynEfrXsCURbyURqQYeAFpU9UGP2ck21/vH\nROROERmqqhuTOP+eSBR1gePd57z32+b66/NPER91FuynhchHLRXHaB5mI3z99fg1DpJMqV9bazQS\nPT3BRtkg992TTurlrrse5w9/+C3PPPMMf37uOd7u7aUeqIesegNpTEK73KR2DbQxumsFdG0JLmwR\nR1cF0fKgh7mgVVcHG4jOPTdXb3raaeY6/PT7YNZ5PcBRfYDdbdy4MZpdI7Y1PgCPP9bH4OPRD+BP\nEt5KAvwEeE1Vb/XZZn/gPVVVETkGkzVwU6HnLiWFJFIsBlGeXafjVg3uIFesiPdfdxPFvTXp4jpx\n/ltB90nVqI3POCOegIrbN7qpqTFeQe72HnmkeX5WrjT9y957wxtvwEc/Cik1D9fChXV0d54JVOcc\nc+vWVs79+19wbuV/MW3ECI48+mg+8Oqru/Tf7noDKzmEuVxFO+mc41TRTTOu0YRfhx6344wyggi6\nqa2txnvnqaeMP3BnpzHy1NYa98/Pf94UnXj0UfOj7thhBEZvr/kchN8DHLfIRGur8UaKasSKZY33\nweePVQkFRMa4KHTqARwPKMae8EJmOR24GLg4s82lwKvAi8AfgWOjHLtc1EpJJFJMmqhpDVpaoiV+\ny6eIfNRZcLFUrFHUQVHuU9yZfFytiluDMWOGd3s9n6PGnbo0/RntaWjQb3F9QCH7Hr2x5lva6+wc\nnKJUlzJOm9ikabaq0K1ptmoTm3Qp47zVIPnc1Lg/cthNPeEEs42TmsIpHp2dOTHJB9grNDzoWLNn\nl7ZD8LlnNn1GiUgykWLShBlFnTaEqTjfeSd6kXd3R5dEZs9ipKnwuk9B1xdXQMXtG93n8Utb7fv7\nsEk7qA0sZJ9mm7YwNVZj2kkFp7COmrIhSv6PKH+Gjo5wC3+UHClxlvr68AfYPQJZsED1Ax8Iv864\nRqx88fljWeFQIoIGNKlUfokUk6S93TwjqZRZ/AYrfgMaJxVM1AFYdbXqRReZEfCsWdGL1A90vqag\nnGf5CKjslPq1teb+z5zpf62Njd736o47Nmkq1eXT8W/d1Xn7GZYdAVJwZxm3Q3eSRQWNqIcMiT5q\njprkrr4+/6mu+xj5FO4IamOpk48VeeZg02eEEKQKDYrJaW0tjbtyXR3MnBlS7xZvFadXKpggUin4\n+tfh+9/vS1MRxbAcpL7t7TXu38UmqIh9PmlBnPt5333GtX/9elNO8733zD/UC925kwfvVY77zLs8\n88wzu5ZVq6YC3/Lcp40GVjOaFDv6GZbbSNOQFqpat5iKZsSoO5pNTY1Z8knZcOGFcMQRRvfd1dXf\n+Fudax8JRcT/BjoE5UjJpr7evFZU5F5fU1P89gW1Mdu2UmxjZFy7SFySkDDFWgbzzAHMIKOcias7\nLyTeoRjFdeKweXP82g1hrrdLl8asA0O3zuJ6PRYU0L322kvPOecc/cIXfuWfvjozc3C+2KUOmvyc\ntsx4VjtqIwZbBC3f/GbhapCgWIIhQ8wMI8yHOamyc9k/rmOnSELNE3UaXCrbg8d5joJuTaD/TaQT\nL9ZSDsIhTF8fRQVZLmR3dkHFWZz/n2PfrK8PFoR+7uJuSlVcJxvn/5N97rq63P+rc48ce6fffzss\nm3NQR98L+vpdd2lPT4+qhjxjfiojp7ON44vvtdTVJaMKCevYHdWTo3+bPTv3B49qzGloiGYkq6tL\nvjOOEqNQ6qIoWTaOpOIcCj5AMZdyEA6q4fWBfTuDMqp/sHix+T85A650OtjZI5Uy2zop5v1SzbsX\nt97ea9Q9ELaHoP9pttrZ+Z0bGvyv0flvt7REs8X6dvR1df1G00uXdPZ/xmo6tYnN/T2I3MsJJxjD\nT1BjIzUqoc4qH0HlNZKO+iMsWRLuPeRVQi8JwmYFA2xkI6EIaWtziECQS/Ljj0dXQQ4US5bAxIn9\n2+jo3iU3FgrItaeEBX3V1PTp7Z24nM5Os6RSRjX6+c8nUysijio3KF9bRQU89phx5fdyGfeiu1u5\n/fa1PPvsO+zY4YTseKHUsIOdVNNAG1V097cNdHTAV76yy19/PBfxztWzWDj6Slavq2H0u8tp/uk5\npNp8woGeegqWLvUP8ArT3WfXLiiUuLEPYG52dqCL+8+2eDH8/Ofe9oLx4+FHPzL30OsHTqfhkEMK\nvy4vwmIUBkNRlCgkIWGKtZTLzCGIYg8SCk050d4ePAOvq+uvPmloiK7OzR6FOzNqr9rGzmyk0HsV\nR5W7dGnw4NLtpRRd3d2tcJ3CVIUO3+1S1Tt1htzp7yrqtzjePXH8ZeNUJJs9uzguloX49wb98EFu\noUmnociHcpkiu6DccisVYxkMwiHo+czXW84hCZtWS0twZy/S55Lq6NnjJopz5y5bsCB420JsDnFU\nuVHyH2WXCo6iFamtbNXvjLtV377lDh0ypNd3u6YhvdpRt1e8G5l9Mc4DEKWD9Yquc2cfnDHDGJmK\n6Xef/cBG0bsVGugyUFGoQed2sjmWUmi5hNRQWK0J9L+JdeTFWAaDcFAtjidOUjatMNfxmpq+5HNR\nB36Vlf0N1e6klpMnB+97wgn5/5fjJtwLi91oalLdtq1L//CHP+i55z6klZVt4f02m7UDMwVamv6M\nput35mzT2Ji5niVLot3QoItZsCC8kw3qYEvdeWYHjYU9VKmUf9K8fM5ZTOGXfc6gP2i+6X7zwdaQ\nLl/8StS2t5sln/xBYXnInLQ3Yfp3RxUclsr+wQejJ5O76CJzrdmq1mXLTHqbID74wfCUMn7XFEeV\nu2KFf8EtUCoruzn00CsYNmxBpspZLRUV7/lun6aVKnZm7Aad0Arj+S0bhozkvnlv8avf1AAmT9O5\n52auZ1mtf3BFEO6LWbs23KffL1AjalKrJP3xs7MYHnGEyYfkVwavsxMuv9xsl2/xiriZE5Mg7A/6\n9tuF506KgsdvXOFvCIuFFQ4JEVSiNkpSymyidIR+mU6vuMJsN3KkSfLmVyNdBJ54oq/jjdKH1dSY\nIDivQkInn+xf/93hjDOC/8tB2VvHjOyioVZo7cwNrsruHzdtAlC8so8C9PT8mLa2p7nwwgt3FbZ5\n+/VaTp64hW6toI0G6mlDqeCL/JwJLKGZB3OCzep6WrkwfT8X/nfWBTk3JJ+i0e6LiWLo9atMFGWE\ncfDByaXL9WL8eCOEnFJ5XsbjrVvzG0ENJFH+oKUQWkmmCM4iqZTdpwLfx2QD/LGq3py1XjLrTwfa\ngQtV9fk45xiIzKdxiOugEHXE7xfRO2KE/6Bw1qzgtlZWmuWxx+D448Ovzc3115u589139xXnOuMM\n8xr2jIbVQw4c6J7YzRo5mKrOF4C9cvatkm6am6tob2/nj3/8I2/9vhU4y/dcV111Prfc8pV+3+33\nv/fwTv3XWNh2EqsZzWhWewqEfvh5nxTyp3V39mFRsE1N/h5HYZlOX33VHDupdLl+OGH8I0fCxRfn\nls2D/EZQA0k+ZUeLQSEpgkNIImV3JfBD4CRgHbBMRB7W/jWkTwMOzix/B/wo8xqJJGsBFIsonblT\nbhfg1luDryesZgHk3/f09Jj//JQpfSmjb/VMtp7L/vvDfvvB9u19391/v5lRBGk/amvD6yEHDnS3\nd/AYx7GIk10pJBp2uYk+3HkWn5vYzb7PPceYnh6amcpvOZF2GnKOVV8HRx+d+z2rVlHXvonz+KV/\nI7PJ/nEdSZ/vn7a6um/qB7n5yZ16sCL+9WAdwmYdc+eaUYIXSXTW2SOg1av9H5J8XTwHatQYp6hI\nMRkzxqQIKYaAKNRoAXwKeML1+Vrg2qxt5gFTXZ9XAsPCjj127NiSBxv6EeZSGuS11NhoPBSjVm5z\nCAu+KzQ41jnnggXRsxbkc87a2mg2x8DsrXTrHK5TxTuj6HbQ9ooK7aiu1l7Q9sBEdZu1Y8my3Abk\nk4vb+VG8XEbzTQ7nZbzMx+iar3tpmJE7Cl4Pb1DUZT4ungPpqVQO51c1hu+sP03ZZGUFpmBUSc7n\nLwI/yNrmUeB41+f/BcaFHXvs2LGxXYaTKkXpJuoz4Ldd1D4i6Hqy+4R8awp4nXPKlGQEjd8yZEi0\n3yHwt87KMZS99Hp8F1i3wGtkEaczraw0uVP8gjrC8o1EWZIY/YTlK4/zMEYlaETn96DFvdZyGzWW\n0lPKfW6Pe7DbZmUVkWnANIBRo0bF0uUXQ/0UxeFDtW9me9ttZt26dUbt2NUFl10W7VxtbaYaWLaG\nwmtmn1RCRufexg1ujcOVV0ab6Tc3w4wZvXg5W1S6q5R54GV2dldAy7EhbMGUgZs5s2+HVMqkWM0O\nJ/eioiJYr+cVuZxKmeOKBKf0dUhCtTN+vPkB5syJt18hqpEg/WBdnXn1inouhjtfsRkITymHIhqj\nIRmD9HrA7cQ5IvNd3G0AUNX5wHyAcePGaVS7T9KlKB0WLvT/H+/YYfqXbPuBO8J/zpzo6sC6OqMG\ndv9v/ISbV6nMsP7Mi4YGY1B+8sn4+0ZBBKR7J9zz35564Q0bNrB48eJd6au3bq0FnsCUw2ygpmYn\nNTUVLNK/J9UWPy11HZ3+NoTZs42RdJfvKcZ1NIoOd+fOcNesbHp7zWihrs7cmLq64PrGcfXwfvr3\noHzlQSmt89XdB43oOjqMQD700MJcPHeXFBWFUERjNEDBUw+MgFkNHATUYEqBHpG1zRnArzEDvE8C\nS6Mce+zYsZEj5IsVsX7RRcGzbz/NgTtBW6H6/KBZcnZCvXw1F9nFa/wX/6hgz3tf360t9f+8S9fW\n09CgOxoa9F+bm/UjH/mIgklfXV9fryeddJJ++9vf1ief/J3efXdX30x9ybJkdGheSyrVX0eYlDEn\nylJdrTpunP9DlEoZnV8UdUWQ7jPsT5RkSmvV0qSPKIcKUgPNYCgTinFRfR1YBVyX+c5dQ1owHk2r\ngJeJYG9Q7YuQjqLzL0Ypyvb28E7XL1up83yGqbELsdUFpcBOpVTnzzeG4OnTvXMMpdPeNs8pU/yv\nu5Z2TdGm0KNGUAQLiybe98wrtBn0rJNO0ltmz9bXb7xRu2+4wbtjihO6XciSjzRPYnHyogdtE5Tm\nOuweeUn/YhtPS5HzqBzyKg00PvegrIRDsRZ3+owwu0++A4kgA3ZYXqKgxS2QvP6XQ4b0z4EWVFvB\nS7hFrYsc2m9szr0BYV5Ds7hBF3CBTuE+nVL9oC6Yv0OXzH9Vm3hf02zrZ/x9xifldG86bc4Z1mEV\n2lmP80l57XfDCvHwKWRxrj9MUHh16FEf/lIaT0shjMrBW2igscV+wslnIBH2bIXlJaqsjF5DOknh\nFnUwPWVKsJtqtsrHuQEts/8S2Wuop75en/l//0/bq6o8XUy9vIh2LX4pU90/WCFqnoYGcwOiVOXJ\nlub5ePgUsjQ0mOR4U6aEezllP9DFmDYnQSmE0UB6C5ULtthPOHEGElFm4i0twTVH6uv9+x3nGFFd\na+MIt6iD6VTKLFFiB9xLx5D9tKnJW12UXZmsF/R/8XYlDVxqaozO3WudWxpGqTAWZqxZutT8UEEV\ni7KDMTZv9m+f+xqSFCJz5kQThl5ZBvd0/btlFySUsjuRBE3lglODY94844gyb57xUvJyY43iCdfc\n7F8j3clL9MgjxuGjxuRdo6GhL6PByy/D8OEwfTrccIN5HTbMuNxm43gfNTUZxxIR8+qVHSGqk4JT\nbEfVe30D7Ywm16sj1dPGoiueoKlJqa/vQeglzTaa2JxTzF6ASX5JpYLo6vL39nF7mzQ394WE5zQ0\nZQq+LF4cfuNE/KOBwbieff3rfT9OKtX3o3pRXw9vvRW8TSplHsQlS0x7amv9twUTae245wWR7Y0T\ndI9KGa1r2b1IQsIUaylmyu6oM/HsqoXuNNXZHj7udWHlKZ0aCl7218BZcnu7tsx4VtM1nQUPVP3q\nE/eA3nP44Tp06EiFqVrJdXpXUMGasAIQ1dX9qwnFHRlHmRL63bi4Bu2ohunZs3Pb5sxAvIzH7e2q\n06YFn3vBgmg2D6/ZgNW/WzJgy4QWRlD8RHW1mXF0dvpXBFQ1swJ3XEVXl1nOOssEw/nNTNrbzYAy\nnc6NYwiMqclE+TXvrOSSrjeAkJHoLpQUnVSxs18+ouxZgEMr0PX22/zHhz/M3pPTjLr8PA76/veR\n+T5xBn5pXx1OO82MiocOhY0b4Wc/i5dlNKwsI/jfuLiBQs60MWx65tRXjdI2p30jRviX7xQxkZPO\nFDIozbXXbCCoHeWetdJSniQhYYq1FGPm4AwwZ84MVhc3NAQPvMLUvHFSUkTyvMsaAWenhqjd5V7q\n0Z5Uly6o+XKOsdivQb3ORWTnCiqkmL1znOnTg29MKpXsaDeuQduZNhZDjx90zNra/jEN7e2m7W7D\nUT6zATuj2OPAlgmNT/b/pK6u79Wvr6iv719d0REuJ5wQ3L9MmZJHTqUwv9qsA7q9gxZwvjax2fP4\nH6jYom1hwsCRiEHG3aAb5RSjDira7NxQvxtTXa36uc8l63US1xU2ikurO3AsThKvqEEv7s67EG+c\ncsk/ZCkpVjjEJOh/UlMTHs+QTpulsTG8r0mno1VI7DdYnb423K82ZATsl2huMePCvYlOPNG4UgaN\nlsMkomNICXLHbGgI9/BJcnQbN27B3Wn6jbrvvjv/0XjUUPQkOm/rxbRHkpRw2GNsDmGq57BKjHGS\n0lVVmXQ9RxxhitR0b++glQaMb09uiriGBmX0z78F7QGJoSJUBAtMNBdEOg1f+lJ4vpoPftA/R09D\ng8mXc9555jgPPOB9nPZ2mDYN7rvPPylUkgVnvJJQOd5Ajv7fL6eQW4+/cqWxlwwZAjNm9M+JFKe9\n7mMuXGjqqnol70oieZzNP2QpgD1GOAT9T7q6wovVRKWxsa9/GX9kB+9UjGYhn2ElhzCXq2gnnbNP\nVe9OmmWh9wFdfrV6ySU+RS/7qKOTqfwydLv+DXAVkg7KchiUoc9tJA3LljhhgrHYOx3kI48UtzpY\nkFdBFEPywQcbzwGn2I4fUdvrGM+DhGgSnXe5VCuzDE6SmH4Ua0lSrRQ0w46i6Yi69KtdkHXSHLVP\naofRRkz/T1+VUa+ILpowQY899lj9ZGWlbgLdinE3jR14JmL0+k4+n2x1yObN/jciTo6eOBF9SUb3\nFqOYRxxX2LjtLbbax+Yf2iNhT1ErJeWFF1T/oLraDF7POquw9NdgvDp3DR6zpis5ap/utTQ/chmp\nt+uhxXuEt12Vny5ZQs8xxzDxyitZ9qlP8elNm6h4+22T3zso5XM2DQ1wxx1mmpQ9Wl6yBE49NXf6\n5ET47ZoOZUbh993Xv4j0Rz/at4+jyjnxxP71RMHc4Jdf7vPdTWp0W6xasnFcYeOOxotdatJPpVZo\nSm7LnkEhkgW4BVgBvAQsBJp8tnsTk431BWJItcMOG5uoF17YoHfTJmOTnTShW2squ/OaOfQbPEbw\nlOlKpfSbM2boVp9Asq50Wrf/7W+5F+Pn6jhkiH91sqYmYynPHln/5jf+bayvV33//Xg30mlfWG4R\n1cJHt+3t5pqi5GnK3i/KLCOOK2w+o/FSuJra/EN7FHH62KClUOFwMlCVef9vwL/5bPcmMDTu8Ssr\nx0b+D0b9r/v9T3b9R+u7VejRetpU6NE6Wnd5/qTZoo1s1RTt6pequp82oKPDv6POLD2gF1VW6rSj\nj9b2VEp31tVpb1gnERaV69XhOG5W2Z3QvHnBHV6crH/5FtjIt4N09gtKVFdoNHEUV9hCO3TbeVsS\npCyEQ78DwWSgxWddXsIBvIVDdo60Qgdfgaku2K6zuGFX4Fg7KV3A+ZmaBsH941/XrNGuEL//XtCd\nzc19DQnrJKJ2zu5jRc1M6rVk69GjdvpxbQlxO8iotgD3ufKZZQTNbNx5UPKJe7BYikA5CodHgH/y\nWbcmo1J6DpgWcpxpwHKzeAsHMAk2Z80KjnSOOsvPp7B9jnG5plOHDOnROXMe1+nTp+thhx2mU0G3\nRemAp0yJ/svnY8QspCZCTU3/Y0bt9IttbI16Tc658p1lqIaPPmwUsqWMSEo4hBqkReRJYH+PVdep\n6kOZba4DuoEWn8Mcr6rrRWRf4DciskJVF3ttqK4a0iLj1K9d3d0mP1EQUT0LA93BaWA1uUZGt3F5\nZcXBLEtt4NdbFzBz5g4aGxs5/vjj+crIkaSffNJ0PUF85jPB6yM31sf9sZBas9mG0agG5GIbW6Ne\nU1WVye00enT/RFhe+N2/sLxFxShebrEMMKHCQVU/G7ReRC4EzgROzEgtr2Osz7xuEJGFwDGAp3BI\nkrY2ZfXqcI//wP6ONs+01tBXvH5LL3zlU6fw6N5/z1GNjex/3HFU/sM/GMn07LOmAwnikktMAy64\nwHu922Xrr3+N790TIYDOlyee6N+5Re30k/CUCXJVC7umVKqvDb/+dTSPoyBvo3wS+yUVp2GxDASF\nTDuAU4E/A/sEbNMANLre/x44Ndrx/dVKUZY027RlwY7QaVigw0wmrbVfTEEvaM+Xv2x0+o7KorbW\nfH7yyeieLiK5XkGquSqLsPxHXnq0oAsMat+vfuWtR49bVSkfY2vYOYKuKZXqS3+tGt3jKB9vo3Kt\nwmbZY6EcbA7AX4C1GHvCC8B/ZL4fDjyWeT8aeDGzvIpRR0U8fmHCoYnN2rHgl5Fu6J/+1Ksf+EC3\npqo7+uUleopx2oHxKvI9kV9iptra4GR12cuMGf3droISNLlrDkfRcUfNE+QUpZg/P7hzLqaHTVSj\ne1QhFaWaXL42Apu/yFJmJCUcxByrPKmqGqd1dctjaEMUUNK0ZuoVnMr4OWfD9dfnbqnKn//8Z555\n5pldy3vvbQUm09TwMc4e1sFFh7zC6Il/xwH77IN89aveKozaWu/UD/kwfjy88UafGqamxv/Yjhpq\n2DD/1A/ZOGoav3z/zvdBOvqmpuLr0e+5x5TN8/vhZ8+GmTODr8lNZ6e5T17X41STO/fc/K4p6Nil\nuFcWSxYi8pyqjiv4QElImGItRx89VltaVP/5n6MNvLPdTt0jt56eHn3hhRf0+9//vjY3N+vQoUON\nJAEdMWKEfuELX9D58+frypUrtbe3t78oDlJhhNUZDqpbHHUGUmqVxUCPhsPUQKlUeQWbWW8lSxnB\nnpA+o6LC2PLOOw/OP99kd+ju7p/hQURp0Faq2MkiTmE8y3et65Yafrh2LU+dcw5Llizh/fffB+BD\nH/oQp59+OhMnTmTSpEkcdNBBiAQYroOMq5/4BDz1lP++Qcf1uuCoFDNxWqmzeWYbnkeONDMyr2yl\nYEREXENv1Ipt+ZDvsW2FNksZU9bCwc2nPw0b13awcObzrF7RxYgPp2DcWNa9V8PorvVMnnsstd2t\n9O6EzooKunp7OWn7dpZfcw1jxoxh8uTJTJw4kYkTJ/KhD30ofgP8OoB77w0WDtdcY/IZRVEV+XWG\nXhSzcHwps3l65USqrITeXv99duzIT0AF1mAtkLjHLlYuKIslIcra5jBu3DhdvjwzE8j6M2lDAz0i\n/Prkk5n00EPQ3U0DsBPoEeGOM85g5NSpTJgwgREjRhSvke+/b4pJe3XsjY2wYYMZ6TpC5d134ac/\njR93kEqZTtHtDlqsTqRUevSOjtxC3A5Btpx0GubNG7wuokHXbe0UlgJJyuYQQ48xgHR0oE6gUWsr\nqCKtrVRt384ZDzxAY3c3jZiLqQXqVfmXZ5/lvK4uRixYYAyccUblUVm2zIyis1VH1dUmm+l55xn1\nh4h5f/31sP/+8TKpgikwc/75Jlju/PNhzZriji4dNVpTk+mIRcxrU1Oy2TyDYgSqqsxo3G9dsWZN\npSBKbITFMsCUtVpp+/bt3HjjjVT913/xtS1baPTYxlejv2WL8XhxKvl8+cvw0ENmJO+l442r//WK\njHXYudOcc/58aGnpry6IE5DmdMw9PUbAtbXB0qXmfdDMIQlddjF19A5Btg2vinG7S7ppW6HNMhhI\nwqpdrAVQEdEfDhsWHGcQZ6mry/UoycfbJG6+ovp6kwgqanHpVMrEGkRJe+1mMHnORPGK2h0zlg60\nN5hlt4Y9Ic7h4IMP1mXLltH02GPBfu+F0NRk/pZbt3qv89P/zpkDN9xg9o1DOlMmVMTMMPzUXem0\nUSH97Gfe1+2ldx9suuwg28aQIXD77bB27e7nyWNjIyxFZI+wOQxpbDSCYcWKZAo8e9HZ6d9BB+l/\nHfVQXFpbzVJRYXxz/WhrM9cdR/0w2HTZfraNdNp4K112mRHA06ebznTZsmTP39FhVHRz5hTPLuVF\nqWw6FksBlLXNgZdeMh1DW5txbywGQR1CkP43KAFdFHp6zCg/nfZ3GT3sMGNjiOpSOhh12dm2jREj\n4OtfL36W04F2JS2FTcdiKYDyFg49PX0dQ9Q6vnFJpYxqyMttMsin3yswrr7eGFKjqJra2mCffYyB\n1YuqKvjOd8yI1m99tsdOKeMTksQdI3DPPcXPclouabaLGXdhsRRIQWolEblRRNaLyAuZ5XSf7U4V\nkZUi8hcRuaaQcyaOk9rZCz+XSUcd8fjjcNttJsht9mzjnbR4cX91gR8NDXDIIcHqhdpauOIK077a\nWrNfkPqhuTlY2AwG989SzH4Gm/rNYhkAkpg53Kaqc/1Wikgl8EPgJGAdsExEHlbVP+d9xspKM6tw\nqK01NomglAt1dWZdKmVeHbfI22+HSy/N3b6x0bsD9lJHZAelOeqC11+HW27xjmtwOutUylu98PLL\nRu3U3W1mNbW1ZttvfAOuvtpboCVRQ2GgKcXsZzCq3yyWElOQt5KI3Ai0hgiHTwE3quopmc/XAqjq\nv44R5fYAAArZSURBVIYdf5yILvdaUV1tBMQZZ8ARR8Chh8KkSXDwwd4dsZN585xzTOEXpxMO2mfI\nEFNYx92h5uMNFEWYZFOo11GUTKXFJt9Yi1J48gRlfR3s0deWPZ6yyMoK3Ai8BbwE3AXs5bHNFODH\nrs9fBH4Q5fhjw2IBmpr6CrtPmeJfH9jLd3zpUv/i03775OufHtdXf7D7wRcaa1HsWI3A6k55FPyx\nWMoIyqGGNPAjYA6gmdfvAV8qRFiJyDRgGsDYsI27uox3S0VFcAxEayusXNn32TFIBqWx8FIv5KuO\niGt4HMxqjySMvcX25Nkd1G8WS5EpuIa0g4j8J/Cox6r1wEjX5xGZ7/zONx+YD0atFHjSODmK5s6F\nM880HU+QQdLBS79dKm+gQs4Tps4pdpropGoqF9uTx7qSWizBFDLtAIa53n8duNdjmypgNXAQUIMp\nF3pElOOHqpXiLo7KIEpNYS/1QqnUEfmeJ0wdU4rUGramssUyoJCQWqnQCOnvisjLIvIS8JmMgEBE\nhovIYxnh0w1cCjwBvAb8l6q+WuB588MZuYZFN9fXe6sXCo1sjRqRm8953OqcTOZaWlvN55NPNqnF\ng9YnFR0cdG/LOdbCYrH0o6xzK/l6KzlUVcULjhMx8QhXXeXvEVNfD+vXm47Yj3y8gfyK2lx5pVkf\npAKKcp4wD5y4eZryxeYNslgGlKS8lco7QjoIJ/9OHOHQ0GA6p1tuMcFlt97qbZAMEgwQXx8eZKSd\nNcsILa/0DXHOE2bEjpunKV+ssddi2S0YvMKhqgoeeQTOOquvE0qlTEfsR1ubqcLW3t43cv/GN0zn\nXEyDZJgB3FHxQP7pG8KM2HHzNBWCNfZaLIOe8lYrVVXp8pqa/h1+fb0ppOOMsB3Vy8qVxiMpbpW1\npMteenkCxUnvna+KJ0yds2YNHHSQVfdYLLs5e4Za6WMfM/aBlSth40aTqO6QQ/qPQh3Vyz33mHgH\nL6qrzTqv5HpJJXMLyvIZp/pbvioeVaMqu+km8zm73rRjzLbqHovFEoHyFg4VFcno3Hfu9N8vCX17\nWODXmjX+CfGyyUfF4xZMnZ0mD1NtbW4eJqvusVgsESlv4RCHoNF5vmm5oxIW+PXYY/1H7UEziLjZ\nU70Ek3Odt95qhIMbmybaYrFEoKwrwcUiKF11Pmm54xAl3YUzap83z9ggZs9OphKYTT9tsViKwO4z\ncwhzoYTi6dujprvIHrVfdVXhKp7BnIfJYrGULbuHcHB7Cd12m/lu3brcDrdY+vagkqFBM5MkVDyD\ntfqbxWIpa8rblXXcOF2+PDBGOr96CcVgoNphI5ItFouLPcOVNYxyqQUMA+cJZCOSLRZLERjcwiGp\n9NBJMVCeQNZF1WKxJExBwkFE7gMOzXxsArao6lEe270JbAd6gO4kpjyANca6sS6qFoslQQoSDqr6\neee9iHwP2Bqw+WdUdWMh58vBGmMtFoulKCQS5yAiAvwj8MskjheZoNiGJOIX3EStxWCxWCy7AUnZ\nHD4NvKeqb/isV+BJEekB5qkpBVo4pTLGBuVNKqVHlMVisZSIUOEgIk8C+3usuk5VH8q8n0rwrOF4\nVV0vIvsCvxGRFaq62Od804BpAKNGjQprXvGNseXkEWWxWCwlIlQ4qOpng9aLSBXQDIwNOMb6zOsG\nEVkIHAN4CofMrGI+mDiHsPYBxTXGlptHlMVisZSAJGwOnwVWqOo6r5Ui0iAijc574GTglQTOWxqs\nR5TFYtkDSUI4nEuWSklEhovIY5mP+wHPisiLwFLgV6r6eALnLQ2OR5QX1iPKYrHspgz+9BnFxqan\nsFgsg4ik0mfsPim7i4XjEZVEem2LxWIZJAzu9BmlwqansFgsexhWOETFpqewWCx7EFatZLFYLJYc\nrHCwWCwWSw5l7a0kItuBlQPdjhCGAskmFCwOtp3JYtuZLLadyXGoqjYWepBytzmsTCy9d5EQkeXl\n3kaw7Uwa285kse1MDhFJxP/fqpUsFovFkoMVDhaLxWLJodyFQzKpvYvLYGgj2HYmjW1nsth2Jkci\nbSxrg7TFYrFYBoZynzlYLBaLZQAYUOEgIv8gIq+KSK+I+HoAiMipIrJSRP4iIte4vv+giPxGRN7I\nvO5VpHaGnkdEDhWRF1zLNhG5PLPuRhFZ71p3+kC1M7PdmyLycqYty+PuX4p2ishIEfmtiPw584x8\nzbWuaPfT71lzrRcR+ffM+pdE5BNR902SCO38QqZ9L4vI70Xk4651nr//ALVzkohsdf2Ws6LuW+J2\nXu1q4ysi0iMiH8ysK8n9FJG7RGSDiHiWPEj82VTVAVuAjwCHAk8D43y2qQRWAaOBGuBF4PDMuu8C\n12TeXwP8W5HaGes8mTb/FfhQ5vONwFUluJ+R2gm8CQwt9DqL2U5gGPCJzPtG4HXX716U+xn0rLm2\nOR34NSDAJ4E/Rd23xO08Ftgr8/40p51Bv/8AtXMS8Gg++5aynVnbnwU8NQD3cwLwCeAVn/WJPpsD\nOnNQ1ddUNSzI7RjgL6q6WlW7gHuBczLrzgF+mnn/U+BzxWlp7POcCKxS1beK1B4/Cr0fZXM/VfVd\nVX0+83478BpwQJHa4xD0rDmcA/xMDX8EmkRkWMR9S9ZOVf29qr6f+fhHYESR2hJEIfekrO5nFmFl\nkYuCmtLKmwM2SfTZHAw2hwOAta7P6+jrJPZT1Xcz7/+KKSxUDOKeJ6cAEnBZZqp3V7HUNURvpwJP\nishzYmp2x92/VO0EQEQOBI4G/uT6uhj3M+hZC9smyr5JEfdcF2FGlA5+v3/SRG3nsZnf8tcickTM\nfZMg8rlEpB44FXjA9XWp7mcYiT6bRY+QFpEngf09Vl2nqg8ldR5VVRHJ2/UqqJ1xziMiNcDZwLWu\nr38EzME8RHOA7wFfGsB2Hq+q60VkX+A3IrIiMyqJun+p2omIpDF/xMtVdVvm68Tu5+6OiHwGIxyO\nd30d+vuXkOeBUaramrEd/Q9w8AC1JQpnAb9TVfcIvpzuZ2IUXTio6mcLPMR6YKTr84jMdwDvicgw\nVX03M33akO9JgtopInHOcxrwvKq+5zr2rvci8p/AowPZTlVdn3ndICILMdPOxZTZ/RSRaoxgaFHV\nB13HTux+ZhH0rIVtUx1h36SI0k5E5GPAj4HTVHWT833A71/ydroEPqr6mIjcKSJDo+xbyna6yNEK\nlPB+hpHoszkY1ErLgINF5KDMqPxc4OHMuoeBCzLvLwASm4lkEec8OfrITAfoMBnw9DZIgNB2ikiD\niDQ674GTXe0pm/spIgL8BHhNVW/NWles+xn0rLnbfn7GM+STwNaMiizKvkkRei4RGQU8CHxRVV93\nfR/0+w9EO/fP/NaIyDGYPmlTlH1L2c5M+4YAE3E9ryW+n2Ek+2wW28IetGD+2OuAHcB7wBOZ74cD\nj7m2Ox3jrbIKo45yvt8b+F/gDeBJ4INFaqfneTza2YB5sIdk7f9z4GXgpcyPMmyg2onxWHgxs7xa\nrvcTowbRzD17IbOcXuz76fWsARcDF2feC/DDzPqXcXnZ+T2nRbqHYe38MfC+694tD/v9B6idl2ba\n8SLGcH5sOd7PzOcLgXuz9ivZ/cQMOt8FdmL6zYuK+WzaCGmLxWKx5DAY1EoWi8ViKTFWOFgsFosl\nByscLBaLxZKDFQ4Wi8ViycEKB4vFYrHkYIWDxWKxWHKwwsFisVgsOVjhYLFYLJYc/j82lc1AOUzH\nkQAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aa9c8490>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAADSCAYAAAChKgyOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYVNWZuN+v1+ruamkVNSAQhbhEjdEA/hIXIBqJu6HD\njKKT6MQJKNHEuGQ0ChpIjBOJmjExA5OIJoHojJFxiVHiGAWzATquETS4ARoJIEtvNN39/f44denb\nVXetulVdDed9nvvUcrdzb9063znfKqqKxWKxWCxuKvq7ARaLxWIpP6xwsFgsFksOVjhYLBaLJQcr\nHCwWi8WSgxUOFovFYsnBCgeLxWKx5GCFg8USERE5QERURKr6uy0WS7GxwsFiKRIi0iQi94jI+sxy\nY9b6A0TkdyLSJiIrReQzWevPE5G3RaRVRP5HRPYq6QVYdmuscLBYisdtQD1wAHAM8AUR+WfX+l8C\n/wfsDVwH3C8i+wCIyOHAXOALwH5AG3BnyVpu2e2xwsEyYBGRoSLyKxH5u4i8KSJfda27UUTuF5H7\nRGSbiDwnIh93rf+oiDwlIptF5BUROcu1rk5Evp8ZtW8RkWdEpM516vNF5B0R2SAi1wU08UzgFlVt\nU9W3gJ8CX8qc42DgE8ANqtquqr8CXgQ+75wDeFhVl6hqCzADaBaRxkLumcUSFSscLAMSEakAHgZe\nAPYHTgIuF5HPujY7G/hvYC9gIfA/IlItItWZfRcD+wKXAQtE5JDMfnOA0cCxmX2/AfS4jns8cEjm\nnDNF5KNRmw0ckXl/OPCGqm5zrX8h872z/gVnhaquBrYDB0c8l8VSEFY4WAYqY4F9VHWWqnaq6hvA\nfwLnurZ5VlXvV9UdwK1ACvhkZkkDN2f2fRJ4BJiSETpfAr6mqutUtVtV/6Cq213H/VZmtP8CpgP/\nON48BvyriDSKyEcyx63PrEsDW7K23wo0RlxvsRQV63VhGah8GBgqIptd31UCS12f1zhvVLVHRNYC\nQ511quqeDbyNmYEMxgiR1QHn/pvrfRumI/fiq8AdwOvARoyNYUpmXQuwR9b2g4BtEddbLEXFzhws\nA5U1wJuq2uRaGlX1NNc2w503mRnBMODdzDI8853DCGAdsAHoAEYV2kBV3aSq56vqh1T1cMz/bVlm\n9SvAyCwbwscz3zvr3TaSUUAN8Fqh7bJYomCFg2WgsgzYJiL/mjEgV4rIESIy1rXNaBFpzsQlXI7R\n2f8J+DNmxP+NjA1iAsZ4fG9mNnEXcGvG4F0pIp8Skdq4DRSRUSKyd+YYpwJTgW8DqOprwPPADSKS\nEpFm4GPArzK7LwDOFJETRKQBmA08kGWjsFiKhhUOlgGJqnYDZwBHAW9iRvw/waheHB4EzgE+wLiE\nNqvqDlXtxAiDUzP73Ql8UVVXZva7CngJWA5sAv6N/P4rozPH2QZ8FzhfVV9xrT8XGJNp33eByar6\n98z1vQJcjBES64EGYHoebbBY8kJssR/Lrkgm4OwjqvpP/d0Wi2UgYmcOFovFYsnBCgeLxWKx5GDV\nShaLxWLJwc4cLBaLxZKDFQ4Wi8ViyaGsI6QHDx6sBxxwQH83w2KxWAYMzz777AZV3afQ45S1cDjg\ngANYsWJFfzfDYrFYBgwi8nYSx7FqJYvFYrHkYIWDxWKxWHIoa7WSxWKx7M60t8OiRbB6NYwaBc3N\nkEqV5tyRhYOI3IXJZbNeVY/IfLcXcB+mDOJbwD+q6gce+54C/ACTUvknqnpzvg3esWMHa9eupaOj\nI99D7PakUimGDRtGdXV1fzfFYrH4sHw5TJwIXV3Q2goNDfCVr8DixTB2bPj+hRI5CE5ExmFyzP/M\nJRy+B2xS1ZtF5BpgT1X916z9KjFphk8G1mKSmU1R1b+EnXPMmDGabZB+8803aWxsZO+990ZEIrXd\n0ouqsnHjRrZt28aBBx7Y382xWHZ7vGYHqjB0KGzenLt9UxO8957/DEJEnlXVMYW2K7LNQVWXYDJU\nujkbuCfz/h7gcx67HgP8VVXfyGTDvDezX150dHRYwVAAIsLee+9tZ14WSxmwfLkRAtOmwQ03mNch\nQ2DOHDNj8KKrCx54oPhtK9TmsJ+qvpd5/zdgP49t9sdVkQsze/h/fgcUkamYvPeMGDHCb5t82mrJ\nYO+fxdL/tLcbtZF7dtDSYl5vugm2b/fer7UV3nij+O1LzFtJjX6q4ERNqjpPVceo6ph99ik4jiNx\nNm/ezJ133pnXvnfffTfvvvvuzs8HHHAAGzZsCNznqaee4owzzgDgoYce4uab8zbXWCyWMmLRIv/Z\ngSrU+pSXamiAkSOL1y6HQoXD+yIyBCDzut5jm3W4yjViSjWuK/C8/UaQcOjy+6UzZAuHuJx11llc\nc801ee9vsVjKh9WrzSzAi+3bobvbe11VlbFLFJtChcNDwAWZ9xdgKm9lsxw4SEQOFJEaTPWrhwo8\nb79xzTXXsHr1ao466iiuvvpqnnrqKU444QTOOussDjvsMN566y2OOOKIndvPmTOHG2+8kfvvv58V\nK1Zw/vnnc9RRR9He3g7AHXfcwSc+8Qk+9rGPsXLlSr/TAka4XHrppQBceOGFfPWrX+XYY49l5MiR\n3H///Tu3u+WWWxg7dixHHnkkN9xwQxHugsViKZRRo8wswI9s7W9NjTFGL15cGnfWOK6svwQmAINF\nZC1wA3Az8F8ichHwNvCPmW2HYlxWT1PVLhG5FHgc48p6V1apxLy5/PLLef7555M41E6OOuoobr/9\ndt/1N998My+//PLO8z711FM899xzvPzyyxx44IG89dZbnvtNnjyZH/7wh8yZM4cxY3odCQYPHsxz\nzz3HnXfeyZw5c/jJT34Sua3vvfcezzzzDCtXruSss85i8uTJLF68mNdff51ly5ahqpx11lksWbKE\ncePGRT6uxWIpPs3NxjXVj87O3M91dcVtk5vIwkFVp/isOslj23eB01yfHwUejd26AcIxxxyTt1to\nc2Z+OHr0aB6I6YLwuc99joqKCg477DDef/99ABYvXszixYs5+uijAWhpaeH111+3wsFiKTNSKTML\ncMcy1NT4G6IBtmwx2we5sibFgI6QDhrhl5IG19ywqqqKnp6enZ/DXEZrM1anysrKUJuF375g4hec\n12uvvZZp06bFOpZl4NOf0bSW/Bg7Ft591/xub7wBL7wAv/qVMUj74biynndecdtmcyvFpLGxkW3b\ntvmu32+//Vi/fj0bN25k+/btPPLII5H3TYLPfvaz3HXXXbRkfOLWrVvH+vVefgKWXQk/f/nly/u7\nZZYw6upMR3/99TBpUrAdAkrnyjqgZw79wd57781xxx3HEUccwamnnsrpp5/eZ311dTUzZ87kmGOO\nYf/99+fQQw/due7CCy/k4osvpq6ujj/+8Y9Fad/EiRN59dVX+dSnPgVAOp3mF7/4Bfvuu29Rzmfp\nf4L85UulgrAkQ5gdAkrnylrWNaS90me8+uqrfPSjH+2nFu062Pu467BwoZkpOALBTToNc+cWXwVh\nSY7ly+Hkk419wYuyS59hsVjKkyB/+VKpIHZ12tuNEJ4927wWM/vM2LGm8581ywiAVMq4tabTZerK\narFYyhPHX95r5lAqFcSuTH9kR62rgxkz4Kqreo3VI0eWacpui8VSngTpqUsVTbur0t/2HMdY3R8M\nSLVSOdtJBgL2/u1aOP7yTU1G9dAfKohdlaD8R6XKjtpfDLiZQyqVYuPGjTZtd5449RxStsfYpcj2\nly+1CmIg4BcHEva9l7oO4ttzBlocyoDzVrKV4ArHVoKz7GqEdbxedoOqKrj9drj8cv/vOzr8jc+O\nJ9ikSeGdvnN+53iOobkYdoukvJUGnHCwWCwWN34dv9Pxtrf7V1UT8Y5G9vveTVMTPPwwnHmm/7nB\nnH+//cAr/rWxEdavT3YGUTaurCJyiIg871q2isjlWdtMEJEtrm1mFnpei8VicRuMW1pMh97SYj47\nI/WwuglxvgdTZ6GpCR56yAiGoHMD3Heft2AA8/2990a/3lJSsHBQ1VWqepSqHgWMBtqARR6bLnW2\nU9VZhZ7XYrFYohiMg+JA8uHMM42X0po10YzVv/518PHC1vcXSRukTwJWq+rbCR/XYrFYcmwLK1eG\nBwAGxYHEJZ02NoZUKnrwYcx8mmVD0sLhXOCXPuuOFZEXMVXgrkqqpoPFYikupfSyCTqXl22hp6fX\nsygbJwAwKA4krs3BHTcSJfhw+XJ4/PHga85Kz1Y+qGoiC1ADbAD281i3B5DOvD8NeD3gOFOBFcCK\nESNGqMVi6T+WLVNtalJNp1VFzGtTk/neTVub6oIFqrNmmdf29uTOtWSJ6vz5qqmUqumyoy2Njb3t\n8Dv23XfH+9593e3t5juvczc1qW7a5L/eWdLp/O5VEMAKTaBPT8xbSUTOBr6iqhMjbPsWMEZVNwRt\nZ72VLJb+I8jLx538LcxbqNBziUB1dW5ltDDSafj733tnHs6sJDsOJO73boKu/fXX/RMigol+fvrp\n8nVlTVKtNAUflZKIfAh4X1VVRI7BGMI3Jnhui8WSMFGMvZMmJZNeIsyjKK5gcHCK4gSpq/xSVERJ\nXTF2rDnmjBnG/nHoofCd7xjh+dhjwYbwq6/2FgzlEiyXiHAQkQbgZGCa67uLAVT1P4DJwCUi0gW0\nA+dqUlMWi8VSFKIYXKMIkCi5gZL2KHK3sZiJ89zHbmmB3/8e7roLvvlNGD7c3yaRTsMhhwQfz6ut\npRQciQgHVW0F9s767j9c738I/DCJc1ksltIQxeCaVLrwJD2K3G0cNqx4ifO8kvI59Z9nzoRBg/z3\ndQzb7s5++HATle2u4+BuqxNw546yLmZ22AGZeM9isRSf5mbTiXnhdG5Op+5FnHThQefyI5XqTTbo\n10YoXuK8oFkTmE5exAgJr4SIL73Ut7TrJZf4F/jZscMUANq8uTe4rqPDfD7ppOLUl7DCwWIpIqUs\nEpM0UbK9RhEghZzLL7dmKgU//rEZ+T/5pH8b16wpXiGkKKowVZOnae5cU7xn7lzT5iOOyI3sDno2\nWlv91xcrynrAZWW1WAYK/VEkJkna243HzWWXwcaNMHiw0ZO79dyplEkjccop5jo7O811VlfHTxfu\nZdxtbobJk4M9oYIy0r7+evEKIUVRhbW2wtq1cP31vSqkW26Bv/3NzAaiUlUVPEv5+c/hwgujHy8K\nNvGexVIEorqBlitR3VOzDbI1NWa7xx+H449P5pwPPWRmAPmkIu/ogCFDkv0dnE5+1SqYMwfa2vy3\ndTK3HnRQ32uL65pbXR0sTKqrYetWp6Ro+bmyWiyWDEl58fQHUaufeW3X2WkWJ/+QX8eb7XVz6qn+\n5zzrrL7ndPIlRfHWcdRVfoIurmDIFmB1dcHbV1XBhAlGOLiFSJhgqK3tnYVVVZnZ2+zZ/ttXVCT/\nTFnhYLEUgaS8ePqDqIItXwHolwbDD+dY2aPvqGq6pAoheQlDp8NPpYzdQMR4LLnrQmQLhjCamuC2\n24w6ymkrwHe/63+/OzuTf6ascLBYikAUN9ByJapgy0cABs1K/GhtNSqcr3wlf5fUJGoxBwnDqiq4\n4w6jVnME0KmnmtcwweDkcXLbao44ondm9cAD5h4E2RwaGmDPPT/g3ntDEjnFwAoHi6UIBCV7i+PF\n0x9EFWxh2w0bZjy03CqgMPdPLxoaYMMG//06OuALXzDR2sUMCgsTho7h2WHhwmjX6ph9W1th3jzz\nfujQ3hlSfX24V1Rb2xYuvXQIsD38hBGxwsFiKQC/iNWkdd2lJKpgC8t2evnl0N3dVwV0zjnxI6Gr\nqoynlN9+HR1w//0mXUUUNZPXb6YaHnkcdzaYT9T3tGmmOtzWrb3fBR9Dge0cd9y3OPvs7zB+/HjG\nJuUKl0T2vmIto0ePLiQ5ocVSVKJkLHWylc6enX+20v4gajZWv+3Sae8spPX1/uucpa4u95wLFoTv\n586I6nefvdrb2Ghew6510ybT/ijnbGtTnT5dtaYmXiZZUK2qirN9j06Y0N2nnSSUldXOHCyWPIjq\n0ZOErrs/iGrE9dqus9N413jR3R1em1nExDq4Yyo+8pFgo7WbbGO4M1NYuRK+//2+NgCvWYDX7+gY\n0bPbXl9v7AzObLC93bi33nSTaW/8hIFKV5cSPT5ZOOwwn0jBArHCwWLJg4HsqhqVqIIte7vZs/1V\nIdu3m860trY3D1E2FRVGMDjH9OuY/XAbw7PjMOIQlHnWzZtvGg8j51x+24GJezDuvgr4depdmPI4\nUVBmzCiOcEgkfYaIvCUiL4nI8yKSE7Umhn8Xkb+KyIsi8okkzmuxFIuwtBcD2VW12ATlWwLTOfql\nxYC+9889Q/Oq9uaFo/9375tPQr8omWcrKuDRR71nktlUVvZw1FEvcNJJX8LYCrxJpaojt7GWDp78\n8arI28chydxKn1bVo9Q7Mu9U4KDMMhX4cYLntVgSZfnyvgnRpk0zUbbLl/duk1TCuaQJE2qlyPUU\nNYleba339+77l493k2M0z2dfr3YUmrrcobtbeOaZh3n11d9x3HE/xQgIZzEC8+67hQsuiD4T6KSG\nN2b9Ap55JvI+kUnCcAG8BQwOWD8XmOL6vAoYEnZca5C2lJq2tuDSj47RMaxEZH8YnsOMyFGNzEm1\nJaisp4j/ehHVpUvNcWbNMp+jGmgHDTJlRRcsUD3xxPjGYK/fMcgYnk6b9Tfe2KUiPaHG4+OPb9t5\njzZuNEbrk04yrx98YL5fsEC1sjJaG9Ns0QVMMVb8+fNVZ83SwfCGJtGvJ3IQeBN4HngWmOqx/hHg\neNfn/8WUCfU6lq0hbek3onQEDkl2toXWYA4TakH1jPMRZlHaG1T3OZ1WnTHDv+OP0jFnL6mU6rXX\nGo+ifLyEspeGBnN9M2b4eymlUm36mc+cobW1UxXaQo85YUL4vW1v9z9fzn1io7ZT23sDRPRo6NYy\nEg77Z173BV4AxmWtjywc3IudOVhKTdBIVcS4pLpJwlU1CSETJtSmT48u9LxwC4NZs3LbW1+vOm1a\n33sQNruaP990wEFtCjqG1+8TteOvro6+rYhqXV2PQo9WV29X6FbYqrBRYYyOHHmOVle3KoTNHFQn\nT472ey5dGnScHk2zVZvYqMsYk7PBaFBNoF9PqhLcuszrehFZBBwDLHFtsg4Y7vo8LPOdxVJWxA10\nKtRVNapLbBhhevGVK/M3oId5/DjfzZ0Lv/hF30C0oEDAxx7zTy3R0mJSZmQHEwYZls24M5h02pz/\niivge9+LZqhWhfZ2YwfYsaOTfff9KUcdVc+FF+7Bccc9ysc/vk/k9Nu//W1vFbcgjj8eliyBz37W\nGPC7u5UaOqikmy/yc8axlGYeIJVgRHQOhUoXoAFodL3/A3BK1janA7/B+G59ElgW5dh25mApNaW2\nJcyfr1pb632+2lqzPgrFmjkEqasC1R2ue+U3uwpTGdXU9M5GNm0yrzNnRle5ZC+HH25G7vPnG/1+\nPteVTnf3uVcL5m/XdKozxv7hs7Ts+7/z3s1cqe0EGHISnjkkIRxGYlRJLwCvANdlvr8YuDjzXoAf\nAauBl4igUlIrHCz9RL5qnrh2gzCjLZj1UdRLYUItqDMMEnpxdP7upaEhvBOMqjJqaAg3rEdRE9XW\n9m4/aFCPXnHFC1pb26oiHRpFJQRZqsVly3RW6tsqdEe+L16qyVgsWRJqVCkb4VDMxQoHSzEJ6szj\n2hLiCpQ4I/KoM5ZieCvF9RZyL9OmRWtz1JnAoEG5s5Fp08z+frOv4GWjVlTsraNGzdLKyu2R9tk5\n8s/8gAuYomm2Rj5n3JmDJ46A8HFpssLBYimApD2N4o7K44zI/TqUtjajIpk8uVdd4qhf/IRaXKG3\nYIG/0Thsqa+PJtRmzIh+zFmzot33vov3rKCubofedZdpoPt5iCSoMz9gO7XaxMbI7S9YNRnhosvK\nIG2xDCSSMgI7RE2l4c4G+uKL0TN2ehmMly+Hk04yxeUd7r/fGFyffNLfSB7XgN7cDF/+cvTt3USt\nTnbooabdUYzDN90EV19tfp9Cg9w6OipZt850gdk5olTh1lsDMupmPABSKIv5LBN5nC6qaKGB3rQY\nvcFs9bRT01TH4sVSWEbeQi86BlY4WHY7ks6LFCWCNrv6WU2N6YCikO0l5Qg3t2BwaGmBk082BeyT\nSAueSplaCXPnxt83ahqRoNTfXjzwAJx00vs8+ODfaWk5jPBED94Rxw3awkhdD4wCcgXnVVcFJB4c\nNWpnoYWxrOBd9mcRk3iDkQxjDQBvciAbGMw+bODg2ndovm0iqbHnRr9QL8LygFdW0tPdHTFFYTBW\nOFh2O5LOixSl6E32TMUv6ZwX2cWBFi0KTnvR0ZFs4r/9989vv+pqMwsLc92M467a0dHD177272zY\n8HVgCib5QmNISxQvAVHFDpq/fxwMvxnWrMkp5NBHWLS3wwOugg/77dfHD7eODs7jl/5N6BRYm0BO\nlaCHDUza24SwwsGy25F0Cc+w4jgQrAlIpfw7+3Q6tzjQ6tXBwmH79uQS/7W3mzTX+dDZCffcY3I4\nxanz/NOfwpNPenfo0MqwYTv4xje+xyc/OYGzzkoHJrtzqGY7tWynlQYaaKWKLhbzWVJb3oeLLzaN\n9StK7VX0urU1+tQP+j5YfhWiwtZBpGlWRVI585IwXBRrsQZpSzEIcqGsr+/NcROHIAN3mMfPpEn+\nLq1eBswFC4JdYGtrcw3Y+abniGM4D0pZ4WeIbWtT/cUvevTrX9+kU6c+pf/0T/+iI0YcrLDJ5zg9\nfY7j3PfgdBk9Oo9/0QVM0dlcpwuY0ptyIqyx+QZ6+B0z6EHJp8JSEQ3S/S4AghYrHPqPQnP9lDvO\n/6uuru9/q76+8PxIcYK9nAA1P3fO+vrcjj4sPsDt8um+1nw8s6K6stbUqH7mM/6dtNvjqqenR197\n7TX95jcXaU1Ni4psUyclhcgHOmHC1XrFFb/UxsYdmk73hLbZ8dryPTebg4VBUGPjBno4GQW9bnaY\nW9ugQcGCxeth88guaIWDpWiUMntnsYgi3DZujF72sRDCAtSuvTa4v7nhhtxjLltmylt69WvZZUr9\nzp1KmU416Dqj9o2DBgUn0hPp0TPP/LOee+65OmTIEIWU4uMC6tx7X7dbnx932TLTDifmobZWdVDd\ndl2WOiF65+508E6kWtxAj3Ta3FS/UYLfA1dT4x+sERQc4fEDWeFgKQr5+Oz3F34CIKpwi5OBtVD8\n2jRrluro0cH9zdSp/tefHefgpYIK6tyrqkx/5aTIziZKFLMjkILPtUVhig4dOlSnTJmiX/rSE1pf\n3xX/3of8uDkC5YMYmfu8GjBrVrx9nRS4Xg9mnIAOP2EV4QeywsFSFErZYRaCXx+xZEl04RY3A2uh\nOJ35pEmqhx1mOuYokb2TJuV/zqgDX3cNhWyy73UqZdJVnHiiuZ7W1m79v//7P73llju0unqb5/Hr\n6zv05Zf/qj09PaHt8r33+Y5cnAsIy1WSfay2Nn9Vj9PQhoa+D+Ddd/sLr+nTg89bVZXfHy/rB0oq\nZbf1VrL0YSCUvwwKYjvlFBN85UV2DMPw4ebf54WqcUFNkpdfhssu6+slFSWeqbIy/3OGeT46qJoM\noBs35rqdZgeIHXBADyNHPs+f/vQUixY9zRVXLOWDDz4AYOjQJ9iwYQEVFdVs316dCR4TFi+u5fDD\nR0Vql6/H2KJF+KY/DQpQGTvWPNjDh+euc+OkbHXcwxYuDHYNvf56E8HnBEKceqp59YuunD49+Px+\nD262L3M2WT/Qmhkz3g4+UTSscLD0IWk3z2IQFsTW2em9rj+FW3u7CU7Lp5bx6afnf97mZuOpGYUd\nO7z71x07dvDii8+xdu3T/PGPT3PLLc+wdetWAD7ykY/Q3NzM+PHjGT9+PCNGjNjpjekZPOZqV5D7\nr2dfuGRJ/iOXxx7z73xTKTjjDJg0qW9jwwLOamr63qyFC4MfzM2bTfCEXzHsigojoJzryQnLDsAV\nlLFhxoxNwRtHo2DhICLDgZ8B+wEKzFPVH2RtMwF4EFMxDuABVZ1V6LktyZPXn7bErF7t38l2dpr/\nrJeAyBZua9YEn2ft2njtCnJRnzMHtmyJdzwwfcW5BQTVOrqJKOzYYfrXzs5OVqxYwdNPP81TTz3F\n73//e1ozneShhx7KlClTdgqDoUOH5hwnSoqO7MC30L6wvd0ETfhRXx88cgnq6Ldvh49/PLfRrijo\nHNLp3POFTbsHDzaFs/2EQ0cHNDXBbbeZh89PspaIJGYOXcCVqvqciDQCz4rIb1X1L1nbLVXVMxI4\nn8VFWMxMXIL+tA89ZEaWSZ0rX8K0A1VV3sKhp8cUkFm40LR91Cj/nD5e/30/2ttN53/TTebz9u19\n46mOOKJ3XRzq6kyepELu8aJF0bcV6eLee7/HTTd9m/ZMB3b44YdzwQUXMGHCBMaNG8d+++3Xu0N7\nu7mZeT4Q2eqqwL7wvvuCI/9Ug0cu+UyJR4zwr0bkNVIaNco03qvzT6XgkEPMAzFhgv9xu7rM6Ob6\n6/2vpUQULBxU9T3gvcz7bSLyKrA/kC0cLAmTXaGrthYuugi++c3e5GRuwgSJe/1tt5nv3nwTNmww\nHd4pp4CIea79gknLgSuvhDvu6BVu7gjk2bN72/7ww70RzNlEmSU5QuE738lNh+H0QePGwRe/GH30\n7iAC3/hG4fc2TDPiRrUSeJQvf/nLjB8/nnHjxjF48GDvjb2ihr/yFTOCWLMmssDImWW0t8NCj4f0\n178ObvwnPxksmEaM8L8RXj92ezuceab/D3fZZXDLLX3beOqp/gKsowNOO83MDK680jyIXpSLYQ+S\n9VYCDgDeAfbI+n4CsAl4EVMR7vCAY0wFVgArRowY4W+h380JC9zMdt3MJ9d/Y6N5DXKF7A/31iie\nLo5LY1DlsKYm46GTT0yH41MfxfnFJ+1+4BKUpjtycGJbmy6Y/oymazoinbOuarsumP5M+A8a9vA5\nLlhxA2SCHtLJk4MbH1ScOSigxc9NK8wH2F05yGljUByDO5qxyC6BwApNoj9P4iCmPaSBZ4Fmj3V7\nAOnM+9OIc4pUAAAgAElEQVSA16Mc07qy+hMlOMntkRfkAbhpU/4ZAuI8y0lFXcf5b0XZNm6Ng6Qy\nKkT57dxEjd/YunWr/v7227W1tlY3SF3kegNCl86u+VZ4hx43ajjKCCLsIZ03L/gcfvVUw6oJ+ZWs\nixv81tRkRiJR/HOTrkWb9ceqgGc1Qv8atiTirSQi1cCvgAWq+oDH7GSr6/2jInKniAxW1Q1JnH93\nJIq6wPHuc977bXP99fmniI86C/bTQuSjlopjNA+zEb72WvwaB0mm1K+tNRqJ7u5go2yQ++7JJ/dw\n112P8cc//o6nn36avzz7LO/09FAP1ENWvYE0JqFdblK7BloZ2bkSOjcHF7aIo6uCaHnQw1zQqquD\nDUTnnpurNz31VHMdfvp9MOu8HuCoPsDuNm7YEM2uEdsaH4DHH+tI+Hj0A/iThLeSAD8FXlXVW322\n+RDwvqqqiByDyRq4sdBzl5JCEikWgyjPrtNxqwZ3kCtXxvuvu4ni3pp0cZ04/62g+6Rq1Mannx5P\nQMXtG93U1BivIHd7jzjCPD+rVpn+Ze+94fXX4WMfg5Sah2vRojq6Os4AqnOOuWVLC+d+/hecW/lf\nTB02jCOOPpo9Xnllp/7bXW9gFQczh6toI51znCq6aMY1mvDr0ON2nFFGEEE3taXFeO88+aTxB+7o\nMEae2lrj/nnOOaboxCOPmB91+3YjMHp6zOcg/B7guEUmWlqMN1JUI1Ysa7wPPn+sSiggMsZFoVMP\n4HhAMfaE5zPLacDFwMWZbS4FXgFeAP4EHBvl2OWiVkoikWLSRE1rsGBBtMRv+RSRjzoLLpaKNYo6\nKMp9ijuTj6tVcWswpk/3bq/nc9S4Q5elP63dDQ36La4PKGTfrTfWfEt7nJ2DU5TqMsZoExs1zRYV\nujTNFm1ioy5jjLcaJJ+bGvdHDrupJ55otnFSUzjFo7MzJyb5AHuFhgcda9as0nYIPvfMps8oEUkm\nUkyaMKOo04YwFee770Yv8u7u6JLI7FmMNBVe9yno+uIKqLh9o/s8fmmrfX8fNmo7tYGF7NNs1QVM\nidWYNlLBKayjpmyIkv8jyp+hvT3cwh8lR0qcpb4+/AF2j0Dmz1fdY4/w64xrxMoXnz+WFQ4lImhA\nk0rll0gxSdrazDOSSpnFb7DiN6BxUsFEHYBVV6tedJEZAc+cGb1IfX/nawrKeZaPgMpOqV9ba+7/\njBn+19rY6H2v7rhjo6ZSnT4d/5adnbefYdkRIAV3lnE7dCdZVNCIetCg6KPmqEnu6uvzn+q6j5FP\n4Y6gNpY6+ViRZw42fUYIQarQoJiclpbSuCvX1cGMGSH1bvFWcXqlggkilYKvfx1+8IPeNBVRDMtB\n6tueHuP+XWyCitjnkxbEuZ/33Wdc+9etM+U033/f/EO90B07eOBe5bhPv8fTTz+9c1m9egrwLc99\nWmngDUaSYnsfw3IraRrSQlXLZlPRjBh1R7OpqTFLPikbLrwQDj/c6L47O/saf6tz7SOhiPjfQIeg\nHCnZ1Neb14qK3OtraorfvqA2ZttWim2MjGsXiUsSEqZYy0CeOYAZZJQzcXXnhcQ7FKO4Thw2bYpf\nuyHM9XbZsph1YOjSmVyvx4ICuueee+rZZ5+t55//a//01ZmZg/PFTnXQpGd1wfRntL02YrBF0PLN\nbxauBgmKJRg0yMwwwnyYkyo7l/3jOnaKJNQ8UafBpbI9eJznKOjSBPrfRDrxYi3lIBzC9PVRVJDl\nQnZnF1Scxfn/OfbN+vpgQejnLu6mVMV1snH+P9nnrqvL/b8698ixd/r9t8OyOQd19D2gr911l3Z3\nd6tqyDPmpzJyOts4vvheS11dMqqQsI7dUT05+rdZs3J/8KjGnIaGaEayurrkO+MoMQqlLoqSZeNI\nKs6h4AMUcykH4aAaXh/YtzMoo/oHS5aY/5Mz4Eqng509UimzrZNi3i/VvHtx6+29Rt39YXsI+p9m\nq52d37mhwf8anf/2ggXRbLG+HX1dXZ/R9LKlHX2fsZoObWJTXw8i93LiicbwE9TYSI1KqLPKR1B5\njaSj/ghLl4Z7D3mV0EuCsFlBPxvZSChC2tocIhDkkvzYY9FVkP3F0qUwfnzfNjq6d8mNhQJy7Slh\nQV81Nb16eycup6PDLKmUUY2ec04ytSLiqHKD8rVVVMCjjxpXfi+XcS+6upTbb1/DM8+8y/btTsiO\nF0oN29lBNQ20UkVXX9tAeztccslOf/2xXMS7V89k0cgreWNtDSPfW0HzPWeTavUJB3rySVi2zD/A\nK0x3n127oFDixj6AudnZgS7uP9uSJfDzn3vbC8aOhR//2NxDrx84nYaDDy78urwIi1EYCEVRopCE\nhCnWUi4zhyCKPUgoNOVEW1vwDLyurq/6pKEhujo3exTuzKi9ahs7s5FC71UcVe6yZcGDS7eXUnR1\nd5fCdQpTFNp9t0tV79Dpcqe/q6jf4nj3xPGXjVORbNas4rhYFuLfG/TDB7mFJp2GIh/KZYrsgnLL\nrVSMZSAIh6DnM19vOYckbFoLFgR39iK9LqmOnj1uojh37rL584O3LcTmEEeVGyX/UXap4ChakdrK\nFv3OmFv1nVvu0EGDeny3axrUo+11e8a7kdkX4zwAUTpYr+g6d/bB6dONkamYfvfZD2wUvVuhgS79\nFYUadG4nm2MphZZLSA2GNzSB/jexjrwYy0AQDqrF8cRJyqYV5jpeU9ObfC7qwK+ysq+h2p3UctKk\n4H1PPDH//3LchHthsRtNTapbt3bqH//4Rz333Ae1srI1vN9mk7ZjpkDL0p/WdP2OnG0aGzPXs3Rp\ntBsadDHz54d3skEdbKk7z+ygsbCHKpXyT5qXzzmLKfyyzxn0B8033W8+2BrS5Ytfidq2NrPkkz8o\nLA+Zk/YmTP/uqILDUtk/8ED0ZHIXXWSuNVvVuny5SW8TxF57haeU8bumOKrclSv9C26BUlnZxSGH\nXMGQIfMzVc5qqah433f7NC1UsSNjN+iAFhjL71g/aDj3zX2bX/+2BjB5ms49N3M9y2v9gyuCcF/M\nmjXhPv1+gRpRk1ol6Y+fncXw8MNNPiS/MngdHXD55Wa7fItXxM2cmARhf9B33ik8d1IUPH7jCn9D\nWCyscEiIoBK1UZJSZhOlI/TLdHrFFWa74cNNkje/Guki8PjjvR1vlD6spsYEwXkVEpo40b/+u8Pp\npwf/l4Oyt44a3klDrdDSkRtcld0/btwIoHhlHwXo7v4Jra1PceGFF+4sbPPOa7VMHL+ZLq2glQbq\naUWp4Av8nHEspZkHcoLN6rpbuDB9Pxf+d9YFOTckn6LR7ouJYuj1q0wUZYRx0EHJpcv1YuxYI4Sc\nUnlexuMtW/IbQfUnUf6gpRBaSaYIziKplN2nAD/AZAP8iarenLVeMutPA9qAC1X1uTjn6I/Mp3GI\n66AQdcTvF9E7bJj/oHDmzOC2Vlaa5dFH4fjjw6/NzfXXm7nz3Xf3Fuc6/XTzGvaMhtVDDhzontTF\nm3IQVR3PA3vm7FslXTQ3V9HW1saf/vQn3v5DC3Cm77muuuqL3HLLJX2+2+9/F/Ju/ddY1HoybzCS\nkbzhKRD64Od9Usif1t3Zh0XBNjX5exyFZTp95RVz7KTS5frhhPEPHw4XX5xbNg/yG0H1J/mUHS0G\nhaQIDiGJlN2VwI+Ak4G1wHIReUj71pA+FTgos/w/4MeZ10gkWQugWETpzJ1yuwC33hp8PWE1CyD/\nvqe72/znJ0/uTRl9q2ey9Vw+9CHYbz/Ytq33u/vvNzOKIO1HbW14PeTAge62dh7lOBYz0ZVComGn\nm+hDHWfyufFd7Pvss4zq7qaZKfyOk2ijIedY9XVw9NG537N6NXVtGzmPX/o3MpvsH9eR9Pn+aaur\ne6d+kJuf3KkHK+JfD9YhbNYxZ44ZJXiRRGedPQJ64w3/hyRfF8/+GjXGKSpSTEaNMilCiiEgCjVa\nAJ8CHnd9vha4NmubucAU1+dVwJCwY48ePbrkwYZ+hLmUBnktNTYaD8WoldscwoLvCg2Odc45f370\nrAX5nLO2NprNMTB7K106m+tU8c4oug20raJC26urtQe0LTBR3SZtX7o8twH55OJ2fhQvl9F8k8N5\nGS/zMbrm614aZuSOgtfDGxR1mY+LZ396KpXD+VWN4TvrT1M2WVmByRhVkvP5C8APs7Z5BDje9fl/\ngTFhxx49enRsl+GkSlG6ifoM+G0XtY8Iup7sPiHfmgJe55w8ORlB47cMGhTtdwj8rbNyDGUvPR7f\nBdYt8BpZxOlMKytN7hS/oI6wfCNRliRGP2H5yuM8jFEJGtH5PWhxr7XcRo2l9JRyn9vjHuyyWVlF\nZCowFWDEiBGxdPnFUD9FcfhQ7Z3Z3nabWbd2rVE7dnbCZZdFO1drq6kGlq2h8JrZJ5WQ0bm3cYNb\n43DlldFm+s3NMH16D17OFpXuKmUeeJmd3RXQcmwImzFl4GbM6N0hlTIpVrPDyb2oqAjW63lFLqdS\n5rgiwSl9HZJQ7Ywda36A2bPj7VeIaiRIP1hXZ169op6L4c5XbPrDU8qhiMZoSMYgvQ5wO3EOy3wX\ndxsAVHUeMA9gzJgxGtXuk3QpSodFi/z/x9u3m/4l237gjvCfPTu6OrCuzqiB3f8bP+HmVSozrD/z\noqHBGJSfeCL+vlEQAenaAQv/21MvvH79epYsWbIzffWWLbXA45hymA3U1OygpqaCxfp5Uq3x01LX\n0eFvQ5g1yxhJd/qeYlxHo+hwd+wId83KpqfHjBbq6syNqasLrm8cVw/vp38PylcelNI6X9190Iiu\nvd0I5EMOKczFc1dJUVEIRTRGAxQ89cAImDeAA4EaTCnQw7O2OR34DWaA90lgWZRjjx49OnKEfLEi\n1i+6KHj27ac5cCdoK1SfHzRLzk6ol6/mIrt4jf/iHxXsee/ru3RB/b/s1LV1NzTo9oYG/W5zs370\nox9VMOmr6+vr9eSTT9Zvf/vb+sQTv9e77+7snakvXZ6MDs1rSaX66giTMuZEWaqrVceM8X+IUimj\n84uirgjSfYb9iZJMaa1amvQR5VBBqr8ZCGVCMS6qrwGrgesy37lrSAvGo2k18BIR7A2qvRHSUXT+\nxShF2dYW3un6ZSt1ns8wNXYhtrqgFNiplOq8ecYQPG2ad46hdNrb5jl5sv9119KmKVoVutUIimBh\n0cQHnnmFNoGeefLJesusWfrajTdq1w03eHdMcUK3C1nykeZJLE5e9KBtgtJch90jL+lfbONpKXIe\nlUNepf7G5x6UlXAo1uJOnxFm98l3IBFkwA7LSxS0uAWS1/9y0KC+OdCCait4CbeodZFD+41NuTcg\nzGtoJjfofC7Qydynk6sf0PnztuvSea9oEx9omq19jL9P+6Sc7kmnzTnDOqxCO+sxPimv/W5YIR4+\nhSzO9YcJCq8OPerDX0rjaSmEUTl4C/U3tthPOPkMJMKerbC8RJWV0WtIJyncog6mJ08OdlPNVvk4\nN2DBrL9G9hrqrq/Xp//5n7WtqsrTxdTLi2jn4pcy1f2DFaLmaWgwNyBKVZ5saZ6Ph08hS0ODSY43\neXK4l1P2A12MaXMSlEIY9ae3ULlgi/2EE2cgEWUmvmBBcM2R+nr/fsc5RlTX2jjCLepgOpUyS5TY\nAffSPmg/bWryVhdlVybrAf1fvF1JA5eaGqNz91rnloZRKoyFGWuWLTM/VFDFouxgjE2b/NvnvoYk\nhcjs2dGEoVeWwd1d/27ZCQml7E4kQVO54NTgmDvXOKLMnWu8lLzcWKN4wjU3+9dId/ISPfywcfio\nMXnXaGjozWjw0kswdChMmwY33GBehwwxLrfZON5HTU3GsUTEvHplR4jqpOAU21H1Xt9AGyPJ9epI\ndbey+IrHaWpS6uu7EXpIs5UmNuUUsxdggl9SqSA6O/29fdzeJs3NvSHhOQ1NmYIvS5aE3zgR/2hg\nMK5nX/9674+TSvX+qF7U18Pbbwdvk0qZB3HpUtOe2lr/bcFEWjvueUFke+ME3aNSRutadi2SkDDF\nWoqZsjvqTDy7aqE7TXW2h497XVh5SqeGgpf9NWiW3NZmtA/52kKCZgHO0g268LDDdPDg4QpTtJLr\n9K6ggjVhBSCqq/tWE4o7Mo4yJfS7cXEN2lEN07Nm5bbNmYF4GY/b2lSnTg0+9/z50WweXrMBq3+3\nZMCWCS2MoPiJ6moz4+jo8K8IqGpmBe64is5Os5x5pgmG85uZtLWZAWU6nRvHECVj6Y4d4Vmc+6Kk\n6KCKHX3yEWXPAhxagM533uE/PvIR9p6UZsTl53HgD36AzPOJM/BL++pw6qlmVDx4MGzYAD/7Wbws\no2FlGcH/xsUNFHKmjWHTM6e+apS2Oe0bNsy/fKeIiZx0ppBBaa69ZgNB7Sj3rJWW8iQJCVOspRgz\nB2eAOWNGsLq4oSF44BWm5o2TkqLQKmhhSzrVqfNrvpxjLPbboce5iOxcQYUUs3eOM21a8I1JpZId\n7cY1aDvTxmLo8YOOWVvbN6ahrc203W04ymc2YGcUux3YMqHxyf6f1NX1vvr1FfX1fasrOsLlxBOD\n+5fJk/PIqRRgvS7Eo3OPis3aGiYMHIkYZNwNulFOMeqgos3ODfW7kOpq1c99Llmvk7g3LopLqztw\nLE4Sr6hBL+7OuxBvnHLJP2QpKVY4xCTof1JTE67DT6fN0tgY3tek09EqJLr7hNnT1gSO8OJ7dPZo\nLa3axEZdwphwb6KTTjLGjKDRcphEdAwpQe6YDQ3hHj5Jjm7jxi24O02/Uffdd+c/Go8aip5E5229\nmHZLkhIOu5S3UhBhqucwHX5Li1m2bQtPUFdVZdL1LF4MTY1dpNkG9ADquX1DgzLy598yBoyWFvP3\nbWkxnydOhI6OSE4sblJ08B9cwnsM5QRW+NRDy5BOw5e+ZIo1BOWr2Wsvs633RZh8OeedB0ce6X9D\n29rgC1/o612UTda1F4SfG1g6DY2NwR5Obve3GTPgi1+ESy6B6dMDf6tA3MecPNlf9+/YPgrB5h+y\nFMBuIxyC/iedncEeiXFobOztX8Ye0c67FcOZyzRmMot6vBtQ1bODZlnkfcBMJ2G8Fb2Fi3E13YbQ\nvdPldAnjuJCfBVcx29mAjIEzSAI5GfqiuEyGHWfcuN4O8vOf93fxTKKDBG8f57//Hd5/P9zvua7O\nlNL89383hvTvftc/WV7U9jrG8yOP9K6KBsl03mG/Q6mqlVkGJklMP4q1JKlWCpphR9F0RF361C7I\nOmlOfYHUdqONmPafvjqjHhFdPG6cHnvssVpZ+UmFjQpbFLo0zdaM2ui4aMZmEaPXd/L5ZKtDNm3y\nvxFxcvTEiehLMrq3GMU84ngCxG1vsdU+Nv/Qbgm7iytrUl54QfUPqqtNMNuZZxaW/hqMV+fOVPJZ\n05Wc+gJda2h++DJS79TDAm+/2m2q3LN0Kd3HHMOVV47nU59azsaNJ/Demh5G3nIFzW0/z8wOfh/e\nuIYGuOMOM03KdndcuhROOSVXHeRE+O2cDmVG4ffd17eI9Mc+1ruPo8o56aS+9UTB3OCXXuodoSdV\ni7dYtWTjuMLGHY0Xu9SkV173JFJyW3YPCpEswC3ASuBFYBHQ5LPdW5hsrM8TQ6odeujoRL3wwga9\nGzcam+wJJ4THdUUaPEbwlOlMpfSb06frFp8TdqbTuu3vf8+9GD9Xx0GD/KuTNTUZS3n2yPq3v/Vv\nY3296gcfxLuRTvvCcouoFj66bWsz1xQlT1P2flFmGXE8AfIZjZfC1dTmH9qtiNPHBi2FCoeJQFXm\n/b8B/+az3VvA4LjHr6wcHfk/GPW/7vc/cf6jud6aJi11Hds0zWZtZIumaFO/VNV9tAHt7f4ddWbp\nBr2oslKnHn20tqVSuqOuTnvCOomwqFyvDsdxs8ruhObODe7w4mT9y7fARr4dpLNfUKK6QqOJo7jC\nFtqh287bkiBlIRz6HAgmAQt81uUlHMBbOGTnSCt08BVFrVzPNv2APbSNlM7ni5maBsH949/efFM7\nQ/z+e0B3NDf3NiSsk4jaObuPFTUzqdeSrUeP2unHtSXE7SCj2gLc58pnlhE0s3HnQckn7sFiKQLl\nKBweBv7JZ92bGZXSs8DUkONMBVaYxVs4gEmwOXNmcKRz1Fl+pMFhVqrqHONyTYcOGtSts2c/ptOm\nTdNDDz1Up4BujdIBT54c/ZfPx4hZSARdTU3fY0bt9IttbI16Tc658p1lqIaPPmwUsqWMSEo4hBqk\nReQJ4EMeq65T1Qcz21wHdAELfA5zvKquE5F9gd+KyEpVXeK1obpqSIuMUb92dXUZD8QgotYZj5Ll\ntJUG3qDX2Og2Lq+qOIjlqfX8Zst8ZszYTmNjI8cffzyXDB9O+oknTNcTxKc/Hbw+amP93B8LqTWb\nbRiNakAutrE16jVVVZncTiNH9k2E5YXf/QvLW1SM4uUWSz8TKhxU9TNB60XkQuAM4KSM1PI6xrrM\n63oRWQQcA3gKhySJ6ioe1N85NNCak97aKV6/uQcu+dRneWTvz3NUYyMfOu44Kv/hH4xkeuYZ04EE\n8ZWvmAZccIH3erfL1t/+Ft+7J8oF+vH44307t6idfhKeMkGuamHXlEr1tuE3v4nmcRTkbZRPYr+o\noxOLpRwpZNoBnAL8BdgnYJsGoNH1/g/AKdGO769Wirq4bRN+RMmw4Jfeuge0+8tfNjp9R2VRW2s+\nP/FEdE8XkVyvINVclUVY/iMvPVrQBQa179e/9tajx62qlI+xNewcQdeUSvWmv1aN7nGUj7dRuVZh\ns+y2UA42B+CvwBqMPeF54D8y3w8FHs28Hwm8kFlewaijIh6/NMJBVfXPf+7RPfbo0urqTnU8lMzS\noik26Z98aiGro5f3+r62NjhZXfYyfXpft6ugBE3umsNRdNxR8wQ5RSnmzQvunIvpYRPV6B5VSEWp\nJpevjcDmL7KUGUkJBzHHKk+qqsZoXd2KvLQhDrNnw/XX536vqvzlL3/h6aef3rm8//4WYBINDaMZ\nMuQIDj54L8aPH8Jl+yyl7qtTvVUYtbX+KRDiMnYsvP56rxqmpsb/2I4aasgQ/xoC2ThqGr98/873\nQTr6pqbi69EXLjRl8/x++FmzTK6joGty09Fh7pPX9TjV5M49N79rCjp2Ke6VxZKFiDyrqmMKPlAS\nEqZYy9FHj9YFC1T/5V/ymzW4B27d3d36/PPP6w9+8ANtbm7WwYMHK6CADhs2TM8//3ydN2+erlq1\nSnt6evqK4iAVRlid4aC6xVFnIKVWWfT3aDhMDZRKlVewmfVWspQR7A7pMyoqjC3vvPNMQsxTTjGD\naneGB7/CWmZdF2vW/Iizz36SpUuX8sEHHwDw4Q9/mNNOO43x48czYcIEDjzwQMQrO6hDkHH1E5+A\nJ5/03zfouF4XHJViJk4rdTbPbMPz8OFmRuaX4VQ1vqE3asW2fMj32LZCm6WMKWvh4OaEE2DDmnYW\nzXiON1Z2MuwjKRgzmrXv16CdnXx/jtLV1UPbjloqKjvo7u5k27aTueaaFYwaNYpJkyYxfvx4xo8f\nz4c//OH4DfDrAO69N1g4XHONyWcURVUUJz11MQvHJ5XvKApeOZEqK6Gnx3+f7dvzE1BBNVgLJe6x\ni5ULymJJiLK2OYwZM0ZXrFhhPmT9mbShgW4RfjNxIhMefJD2rioWM4lVjOTD8hZrz+jk4HM/x7hx\n4xg2bFjxGvnBB6aYtFfH3tgI69ebka4jVN57D+65J37cQSplOkW3O2ixOpFS6dHb23MLcTsE2XLS\naZNie6C6iAZdt7VTWAokKZvDwJg5tLejEycirj+TtLRQBZz+q19RATTSxT/xS7NSgaVN0HwazJ9f\nvCm7I7CyVUfV1WY57zyj/mhu7u3IZs/2rwfgx6BBcM458Ne/wqGHwne+YzqRYlGqbJ5BMQJVVUbN\n5hUjUsxZUymwsRGWAUBZC4dt27Zx4403UvVf/8XXNm+m0WMbX43+5s3G48Wp5PPlL8ODD5qRvJeO\nN67+1ysy1mHHDnPOefNgwYK+6oI4AWlOlbLubuPB09oKy5aZ90EzhyR02cXU0TsE2Tba2mDqVJMa\nfFdLN20rtFkGAklYtYu1ACoi+qMhQ7S70IAHZ6mry/UoycfbJG6+ovp6kwgqanHpVMrEGkRJe+1m\nIHnORPGK2hUzlva3N5hll4bdIc7hoIMO0uXLl9P06KPBfu+F0NRk/pZbtniv89P/zp4NN9zg7yrl\nh1ODWcTMMPyM0Om0cdH62c+8r9tL7z7QdNlBto1Bg+D222HNml3Pk8fGRliKSFI2h7KuIT2osdEI\nhpUr/QvWF0pHh38HHVQTOKg+bxAtLWapqDC+uX60tprrjqN+iKLLLicc20ZTU68KLZ02S08PXHaZ\nEcDTppnOdPnyZM/f3m5UdLNnm9c43mKF4HfdTU0DX2Vm2WUoa5sDL75oOobWVuPeWAyCOoQg/W9Q\nAroodHebUX467e8yeuihxsYQ1aV0IOqys20bw4bB179e/Cyn/e1KWgqbjsVSAOUtHLq7ezuGqHV8\n45JKGdWQl9tkkE+/l0dPfb0xpEZRNbW2wj77GAOrF1VVxitp4UL/9dkeO6WMT0gSd4zAwoXF9+Qp\nlzTbxYy7sFgKpCC1kojcKCLrROT5zHKaz3aniMgqEfmriFxTyDkTx0nt7IWfy6SjjnjsMbjtNhPk\nNmuW8U5asqSvusCPhgY4+OBg9UJtLVxxhWlfba3ZL0j90NwcLGwGgvtnKWY/A039ZrH0A0nMHG5T\n1Tl+K0WkEvgRcDKwFlguIg+p6l/yPmNlpZlVONTWGptEUMqFujqzLpUyr45b5O23w6WX5m7f2Ojd\nAXupI7KD0hx1wWuvwS23eMc1OJ11KuWtXnjpJaN26uoys5raWrPtN74BV1/tLdBKFZ9QTEox+xmI\n6jeLpcQU5K0kIjcCLSHC4VPAjar62cznawFU9bthxx8joiu8VlRXGwFx+ulw+OFwyCEwYQIcdJB3\nR2lr0ZgAAAq7SURBVOxk3jz7bFP4xemEg/YZNMgU1nF3qPl4A0URJtkU6nUUJVNpsck31qIUnjxB\nWV8HevS1ZbenLLKyAjcCbwMvAncBe3psMxn4ievzF4AfRjn+6LBYgKam3sLukyf71wf28h1ftsy/\n+LTfPvn6p8f11R/ofvCFxloUO1YjKMtuPgV/LJYygnKoIQ38GJiNSVgxG/g+8KVChJWITAWmAowO\n27iz03i3VFQEx0C0tMCqVb2fHYNkUBoLL/VCvuqIuIbHgaz2SMLYW2xPnl1B/WaxFJmCa0g7iMh/\nAo94rFoHDHd9Hpb5zu9884B5YNRKgSeNk6Nozhw44wzT8QQZJB289Nul8gYq5Dxh6pxip4lOKm9Q\nsT15rCupxRJMIdMOYIjr/deBez22qQLeAA4EajDlQg+PcvxQtVLcxVEZRKkp7KVeKJU6It/zhKlj\nSpFaw9ZUtlj6FRJSKxUaIf09EXlJRF4EPp0REIjIUBF5NCN8uoBLgceBV4H/UtVXCjxvfjgj17Do\n5vp6b/VCoZGtUSNy8zmPW53T0mK645YW83niRJNaPGh9UtHBQfe2nGMtLBZLH8o6t5Kvt5JDVVW8\n4DgRE49w1VX+HjH19bBuXXBK7Hy8gfyK2lx5pVkfpAKKcp4wD5y4eZryxeYNslj6ld2rnoMXTv6d\nOMKhocF0TrfcYoLLbr3V2yAZVishrj48yEg7c6YRWl7pG+KcJ8yIHTdPU75YY6/FskswcIVDVRU8\n/DCceWZvJ5RKeReHcWhtNVXY2tp6R+7f+IbpnItpkAwzgDsqHsg/fUOYETtunqZCsMZei2XAU95q\npaoqXVFT07fDr683hXScEbajelm1yngkxa2ylnTZSy9PoDjpvfNV8YSpc958Ew480Kp7LJZdnN1D\nrXTkkcY+sGoVbNhgEtUdfHDfUaijelm40MQ7eFFdbdZ5JddLKplbUJbPONXf8lXxqBpV2U03mc/Z\n9aYdY7ZV91gslgiUt3CoqEhG575jh/9+SejbwwK/3nzTPyFeNvmoeNyCqaPD5GGqrc3Nw2TVPRaL\nJSLlLRziEDQ6zzctd1TCAr8efbTvqD1oBhE3e6qXYHKu89ZbjXBwY9NEWyyWCJR1JbhYBKWrzict\ndxyipLtwRu1z5xobxKxZyVQCs+mnLRZLEdh1Zg5hLpRQPH171HQX2aP2q64qXMUzkPMwWSyWsmXX\nEA5uL6HbbjPfrV2b2+EWS98eVDI0aGaShIpnoFZ/s1gsZU15u7KOGaMrVgTGSOdXL6EY9Fc7bESy\nxWJxsXu4soZRLrWAof88gWxEssViKQIDWzgklR46KfrLE8i6qFosloQpSDiIyH3AIZmPTcBmVT3K\nY7u3gG1AN9CVxJQHsMZYN9ZF1WKxJEhBwkFVz3Hei8j3gS0Bm39aVTcUcr4crDHWYrFYikIicQ4i\nIsA/Ar9M4niRCYptSCJ+wU3UWgwWi8WyC5CUzeEE4H1Vfd1nvQJPiEg3MFdNKdDCKZUxNihvUik9\noiwWi6VEhAoHEXkC+JDHqutU9cHM+ykEzxqOV9V1IrIv8FsRWamqS3zONxWYCjBixIiw5hXfGFtO\nHlEWi8VSIkKFg6p+Jmi9iFQBzcDogGOsy7yuF5FFwDGAp3DIzCrmgYlzCGsfUFxjbLl5RFksFksJ\nSMLm8Blgpaqu9VopIg0i0ui8ByYCLydw3tJgPaIsFstuSBLC4VyyVEoiMlREHs183A94RkReAJYB\nv1bVxxI4b2lwPKK8sB5RFotlF2Xgp88oNjY9hcViGUAklT5j10nZXSwcj6gk0mtbLBbLAGFgp88o\nFTY9hcVi2c2wwiEqNj2FxWLZjbBqJYvFYrHkYIWDxWKxWHIoa28lEdkGrOrvdoQwGEg2oWBxsO1M\nFtvOZLHtTI5DVLWx0IOUu81hVWLpvYuEiKwo9zaCbWfS2HYmi21ncohIIv7/Vq1ksVgslhyscLBY\nLBZLDuUuHJJJ7V1cBkIbwbYzaWw7k8W2MzkSaWNZG6QtFovF0j+U+8zBYrFYLP1AvwoHEfkHEXlF\nRHpExNcDQEROEZFVIvJXEbnG9f1eIvJbEXk987pnkdoZeh4ROUREnnctW0Xk8sy6G0VknWvdaf3V\nzsx2b4nIS5m2rIi7fynaKSLDReR3IvKXzDPyNde6ot1Pv2fNtV5E5N8z618UkU9E3TdJIrTz/Ez7\nXhKRP4jIx13rPH//fmrnBBHZ4votZ0bdt8TtvNrVxpdFpFtE9sqsK8n9FJG7RGS9iHiWPEj82VTV\nfluAjwKHAE8BY3y2qQRWAyOBGuAF4LDMuu8B12TeXwP8W5HaGes8mTb/Dfhw5vONwFUluJ+R2gm8\nBQwu9DqL2U5gCPCJzPtG4DXX716U+xn0rLm2OQ34DSDAJ4E/R923xO08Ftgz8/5Up51Bv38/tXMC\n8Eg++5aynVnbnwk82Q/3cxzwCeBln/WJPpv9OnNQ1VdVNSzI7Rjgr6r6hqp2AvcCZ2fWnQ3ck3l/\nD/C54rQ09nlOAlar6ttFao8fhd6Psrmfqvqeqj6Xeb8NeBXYv0jtcQh61hzOBn6mhj8BTSIyJOK+\nJWunqv5BVT/IfPwTMKxIbQmikHtSVvczi7CyyEVBTWnlTQGbJPpsDgSbw/7AGtfntfR2Evup6nuZ\n93/DFBYqBnHPk1MACbgsM9W7q1jqGqK3U4EnRORZMTW74+5fqnYCICIHAEcDf3Z9XYz7GfSshW0T\nZd+kiHuuizAjSge/3z9porbz2Mxv+RsROTzmvkkQ+VwiUg+cAvzK9XWp7mcYiT6bRY+QFpEngA95\nrLpOVR9M6jyqqiKSt+tVUDvjnEdEaoCzgGtdX/8YmI15iGYD3we+1I/tPF5V14nIvsBvRWRlZlQS\ndf9StRMRSWP+iJer6tbM14ndz10dEfk0Rjgc7/o69PcvIc8BI1S1JWM7+h/goH5qSxTOBH6vqu4R\nfDndz8QounBQ1c8UeIh1wHDX52GZ7wDeF5EhqvpeZvq0Pt+TBLVTROKc51TgOVV933Xsne9F5D+B\nR/qznaq6LvO6XkQWYaadSyiz+yki1RjBsEBVH3AdO7H7mUXQsxa2TXWEfZMiSjsRkSOBnwCnqupG\n5/uA37/k7XQJfFT1URG5U0QGR9m3lO10kaMVKOH9DCPRZ3MgqJWWAweJyIGZUfm5wEOZdQ8BF2Te\nXwAkNhPJIs55cvSRmQ7QYRLg6W2QAKHtFJEGEWl03gMTXe0pm/spIgL8FHhVVW/NWles+xn0rLnb\n/sWMZ8gngS0ZFVmUfZMi9FwiMgJ4APiCqr7m+j7o9++Pdn4o81sjIsdg+qSNUfYtZTsz7RsEjMf1\nvJb4foaR7LNZbAt70IL5Y68FtgPvA49nvh8KPOra7jSMt8pqjDrK+X5v4H+B14EngL2K1E7P83i0\nswHzYA/K2v/nwEvAi5kfZUh/tRPjsfBCZnmlXO8nRg2imXv2fGY5rdj30+tZAy4GLs68F+BHmfUv\n4fKy83tOi3QPw9r5E+AD171bEfb791M7L8204wWM4fzYcryfmc8XAvdm7Vey+4kZdL4H7MD0mxcV\n89m0EdIWi8ViyWEgqJUsFovFUmKscLBYLBZLDlY4WCwWiyUHKxwsFovFkoMVDhaLxWLJwQoHi8Vi\nseRghYPFYrFYcrDCwWKxWCw5/H9W4rBNvPhJswAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f68aab3fb10>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "tr_data = tensor.from_numpy(data)\n",
-    "tr_label = tensor.from_numpy(label.astype(int))\n",
-    "# plot the classification results using the current model parameters\n",
-    "def plot_status(w, b, title='origin'):\n",
-    "    global bd_x, bd_y, data      \n",
-    "    pr = np.add(np.dot(data, w), b)\n",
-    "    lbl = pr[:, 0] < pr[:, 1]\n",
-    "            \n",
-    "    plt.figure(figsize=(6,3));\n",
-    "    plt.plot(bd_x, bd_y, 'k', label='truth line')    \n",
-    "    plt.plot(data[lbl, 0], data[lbl, 1], 'ro', ms=7)\n",
-    "    plt.plot(data[~lbl, 0], data[~lbl, 1], 'bo', ms=7)\n",
-    "    plt.legend(loc='best')\n",
-    "    plt.title(title)\n",
-    "    plt.xlim(-1, 1);\n",
-    "    plt.ylim(data[:, 1].min()-1, data[:, 1].max()+1)\n",
-    "    \n",
-    "# sgd\n",
-    "for i in range(1000):\n",
-    "    act = dense.forward(True, tr_data)\n",
-    "    lvalue = lossfunc.forward(True, act, tr_label)\n",
-    "    dact = lossfunc.backward()\n",
-    "\n",
-    "    dact /= tr_data.shape[0]\n",
-    "    _, dp = dense.backward(True, dact)\n",
-    "\n",
-    "    # update the parameters\n",
-    "    opt.apply(i, dp[0], p[0], 'w')\n",
-    "    opt.apply(i, dp[1], p[1], 'b')\n",
-    "\n",
-    "    if (i%100 == 0):\n",
-    "        print 'training loss =  %f' % lvalue.l1()\n",
-    "        plot_status(tensor.to_numpy(p[0]), tensor.to_numpy(p[1]),title='epoch %d' % i)\n",
-    "\n",
-    "#train(dat, label)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "The layer class has forward and backward functions for back-propagation.\n",
-    "* forward() accepts two arguments, the first one indicates the phase (training or evaluation); the second one includes the input tensor(s); It outputs the layer values as a single or a list of tensors.\n",
-    "* backward() accepts two arguments, the first one is not used currently; the second one includes the gradients of the layer values. It outputs a tuple, where the first field includes the gradient tensor(s) of the input(s), and the second field includes a list of gradients for the parameters.\n",
-    "\n",
-    "The optimzier class **apply** function updates the parameter values using the gradients. The first argument is the iteration ID, followed by the gradient tensor and the value tensor. Each parameter tensor has a name associated with it, which is used by Optimizer to keep some internal data (e.g., history gradients) for each parameter.\n",
-    "\n",
-    "The loss class computes the loss value given the predictions and the ground truth in **forward()** function. It computes the gradients of the predictions w.r.t the loss function and outputs the gradient tensor(s) by **backward()** function."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Observation\n",
-    "\n",
-    "We can see that prediction of the data points are getting correct labels with the training going on."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Next [CNN example](./cnn.ipynb)"
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/doc/en/docs/notebook/model.ipynb b/doc/en/docs/notebook/model.ipynb
deleted file mode 100644
index 23a5553..0000000
--- a/doc/en/docs/notebook/model.ipynb
+++ /dev/null
@@ -1,536 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# SINGA Model Classes\n",
-    "\n",
-    "<img src=\"http://singa.apache.org/en/_static/images/singav1-sw.png\" width=\"500px\"/>"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Layer\n",
-    "\n",
-    "Typically, the life cycle of a layer instance includes:\n",
-    " 1. construct layer without input_sample_shapes, goto 2; or,\n",
-    " \n",
-    "    construct layer with input_sample_shapes, goto 3;\n",
-    " 2. call setup to create the parameters and setup other meta fields;\n",
-    " 4. initialize the parameters of the layer\n",
-    " 3. call forward or access layer members\n",
-    " 4. call backward and get parameters for update"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from singa import tensor, device, layer\n",
-    "\n",
-    "#help(layer.Layer)\n",
-    "layer.engine='singacpp'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Common layers"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from singa.layer import Dense, Conv2D, MaxPooling2D, Activation, BatchNormalization, Softmax"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Dense Layer"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(2, 3) (3,)\n"
-     ]
-    }
-   ],
-   "source": [
-    "dense = Dense('dense', 3, input_sample_shape=(2,))\n",
-    "#dense.param_names()\n",
-    "w, b = dense.param_values()\n",
-    "print w.shape, b.shape"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "w.gaussian(0, 0.1)\n",
-    "b.set_value(0)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[ 0.02440065, -0.03396009,  0.01396658],\n",
-       "       [ 0.00771775,  0.07841966, -0.05931653]], dtype=float32)"
-      ]
-     },
-     "execution_count": 5,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "x  = tensor.Tensor((2,2))\n",
-    "x.uniform(-1, 1)\n",
-    "y = dense.forward(True, x)\n",
-    "tensor.to_numpy(y)\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(2, 2) (2, 3) (3,)\n"
-     ]
-    }
-   ],
-   "source": [
-    "gx, [gw, gb] = dense.backward(True, y)\n",
-    "print gx.shape, gw.shape, gb.shape"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Convolution Layer"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(4, 6, 6)\n"
-     ]
-    }
-   ],
-   "source": [
-    "conv = Conv2D('conv', 4, 3, 1, input_sample_shape=(3, 6, 6))\n",
-    "print conv.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Pooling Layer"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(4, 3, 3)\n"
-     ]
-    }
-   ],
-   "source": [
-    "pool = MaxPooling2D('pool', 3, 2, input_sample_shape=(4, 6, 6))\n",
-    "print pool.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Branch layers"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "from singa.layer import Split, Merge, Slice, Concat"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[(4, 6, 6), (4, 6, 6)]\n"
-     ]
-    }
-   ],
-   "source": [
-    "split = Split('split', 2, input_sample_shape=(4, 6, 6))\n",
-    "print split.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(4, 6, 6)\n"
-     ]
-    }
-   ],
-   "source": [
-    "merge = Merge('merge', input_sample_shape=(4, 6, 6))\n",
-    "print merge.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[(2, 6, 6), (2, 6, 6)]\n"
-     ]
-    }
-   ],
-   "source": [
-    "sli = Slice('slice', 1, [2], input_sample_shape=(4, 6, 6))\n",
-    "print sli.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "(4, 6, 6)\n"
-     ]
-    }
-   ],
-   "source": [
-    "concat = Concat('concat', 1, input_sample_shapes=[(3, 6, 6), (1, 6, 6)])\n",
-    "print concat.get_output_sample_shape()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Metric and Loss"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[[ 0.13973515  0.21827343  0.21068712  0.21905626  0.21224809]\n",
-      " [ 0.2354937   0.1047527   0.18490241  0.23617713  0.23867407]\n",
-      " [ 0.2659435   0.11397494  0.1659178   0.22683497  0.22732878]]\n",
-      "0.0\n"
-     ]
-    }
-   ],
-   "source": [
-    "from singa import metric\n",
-    "import numpy as np\n",
-    "\n",
-    "x = tensor.Tensor((3, 5))\n",
-    "x.uniform(0, 1)  # randomly genearte the prediction activation\n",
-    "x = tensor.softmax(x)  # normalize the prediction into probabilities\n",
-    "print tensor.to_numpy(x)\n",
-    "y = tensor.from_numpy(np.array([0, 1, 3], dtype=np.int))  # set the truth\n",
-    "\n",
-    "f = metric.Accuracy()\n",
-    "acc = f.evaluate(x, y)  # averaged accuracy over all 3 samples in x\n",
-    "print acc"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "1.80309379101\n",
-      "[[-0.78104687  0.18748793  0.16346708  0.24803984  0.18205206]\n",
-      " [ 0.21501946 -0.83683592  0.19003348  0.20714596  0.22463693]\n",
-      " [ 0.20000091  0.23285127  0.26842937 -0.87474263  0.17346108]]\n"
-     ]
-    }
-   ],
-   "source": [
-    "from singa import loss\n",
-    "\n",
-    "x = tensor.Tensor((3, 5))\n",
-    "x.uniform(0, 1)  # randomly genearte the prediction activation\n",
-    "y = tensor.from_numpy(np.array([0, 1, 3], dtype=np.int))  # set the truth\n",
-    "\n",
-    "f = loss.SoftmaxCrossEntropy()\n",
-    "l = f.forward(True, x, y)  # l is tensor with 3 loss values\n",
-    "g = f.backward()  # g is a tensor containing all gradients of x w.r.t l\n",
-    "print l.l1()\n",
-    "print tensor.to_numpy(g)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Optimizer"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<singa.tensor.Tensor at 0x7f6a0c7cfe90>"
-      ]
-     },
-     "execution_count": 16,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "from singa import optimizer\n",
-    "\n",
-    "sgd = optimizer.SGD(lr=0.01, momentum=0.9, weight_decay=1e-4)\n",
-    "p = tensor.Tensor((3,5))\n",
-    "p.uniform(-1, 1)\n",
-    "g = tensor.Tensor((3,5))\n",
-    "g.gaussian(0, 0.01)\n",
-    "\n",
-    "sgd.apply(1, g, p, 'param')  # use the global lr=0.1 for epoch 1\n",
-    "sgd.apply_with_lr(2, 0.03, g, p, 'param')  # use lr=0.03 for epoch 2"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## FeedForwardNet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "conv1 (32, 32, 32)\n",
-      "relu1 (32, 32, 32)\n",
-      "pool1 (32, 16, 16)\n",
-      "flat (8192,)\n",
-      "dense (10,)\n",
-      "[u'conv1_weight', u'conv1_bias', u'dense_weight', u'dense_bias']\n"
-     ]
-    }
-   ],
-   "source": [
-    "from singa import net as ffnet\n",
-    "layer.engine = 'singacpp'\n",
-    "net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())\n",
-    "net.add(layer.Conv2D('conv1', 32, 5, 1, input_sample_shape=(3,32,32,)))\n",
-    "net.add(layer.Activation('relu1'))\n",
-    "net.add(layer.MaxPooling2D('pool1', 3, 2))\n",
-    "net.add(layer.Flatten('flat'))\n",
-    "net.add(layer.Dense('dense', 10))\n",
-    "\n",
-    "# init parameters\n",
-    "for p in net.param_values():\n",
-    "    if len(p.shape) == 0:\n",
-    "        p.set_value(0)\n",
-    "    else:\n",
-    "        p.gaussian(0, 0.01)\n",
-    "print net.param_names()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "conv1 (32, 32, 32)\n",
-      "relu1 (32, 32, 32)\n",
-      "pool1 (32, 16, 16)\n",
-      "flat (8192,)\n",
-      "dense (10,)\n"
-     ]
-    }
-   ],
-   "source": [
-    "layer.engine = 'cudnn'\n",
-    "net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())\n",
-    "net.add(layer.Conv2D('conv1', 32, 5, 1, input_sample_shape=(3,32,32,)))\n",
-    "net.add(layer.Activation('relu1'))\n",
-    "net.add(layer.MaxPooling2D('pool1', 3, 2))\n",
-    "net.add(layer.Flatten('flat'))\n",
-    "net.add(layer.Dense('dense', 10))\n",
-    "\n",
-    "# init parameters\n",
-    "for p in net.param_values():\n",
-    "    if len(p.shape) == 0:\n",
-    "        p.set_value(0)\n",
-    "    else:\n",
-    "        p.gaussian(0, 0.01)\n",
-    "        \n",
-    "# move net onto gpu\n",
-    "dev = device.create_cuda_gpu()\n",
-    "net.to_device(dev)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "## Next: [Simple models](./regression.ipynb)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/rbm.ipynb b/doc/en/docs/notebook/rbm.ipynb
deleted file mode 100755
index c9aadf5..0000000
--- a/doc/en/docs/notebook/rbm.ipynb
+++ /dev/null
@@ -1,346 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Train a RBM model\n",
-    "\n",
-    "This notebook woul train a Restricted Boltzmann Machine (RBM) over the MNIST dataset using PySINGA. The RBM model would learn a feature representation of a digit image like MNIST images.\n",
-    "Please refer to the previous two notebooks for basic usages of Tensor and other moduels of PySINGA.\n",
-    "\n",
-    "* Please intall Pillow to enable image display. 'conda install pillow'\n",
-    "\n",
-    "## Download the training data\n",
-    "\n",
-    "We will use a pre-processed version of the original MNIST dataset."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "import os\n",
-    "import gzip\n",
-    "import argparse\n",
-    "import cPickle\n",
-    "import utils\n",
-    "import urllib\n",
-    "from PIL import Image\n",
-    "\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline\n",
-    "\n",
-    "\n",
-    "def load_train_data():\n",
-    "    # download the data for the first time running\n",
-    "    print 'downloading data'\n",
-    "    urllib.urlretrieve('https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz', 'data.bin')\n",
-    "    print 'finished data downloading'\n",
-    "    f = gzip.open('data.bin', 'rb')\n",
-    "    train_set, valid_set, test_set = cPickle.load(f)\n",
-    "    traindata = train_set[0].astype(np.float32)\n",
-    "    validdata = valid_set[0].astype(np.float32)\n",
-    "    print traindata.shape, validdata.shape\n",
-    "    return traindata, validdata"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Import PySINGA modules"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "from singa import initializer\n",
-    "from singa import optimizer\n",
-    "from singa import device\n",
-    "from singa import tensor\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Setup\n",
-    "\n",
-    "* Load training data\n",
-    "* Create the device. If GPU is available, please set **use_gpu=True**\n",
-    "* Create SGD optimzier with given momentum and weight decay\n",
-    "* Create parameter Tensor instances and initialize the parameters"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Loading data ..................\n",
-      "downloading data\n",
-      "finished data downloading\n",
-      "(50000, 784) (10000, 784)\n"
-     ]
-    }
-   ],
-   "source": [
-    "opt = optimizer.SGD(momentum=0.8, weight_decay=0.0002) \n",
-    "\n",
-    "hdim = 1000\n",
-    "vdim = 784\n",
-    "w = tensor.Tensor((vdim, hdim))\n",
-    "w.gaussian(0.0, 0.1)\n",
-    "vb = tensor.from_numpy(np.zeros(vdim, dtype = np.float32))\n",
-    "hb = tensor.from_numpy(np.zeros(hdim, dtype = np.float32))\n",
-    "\n",
-    "print 'Loading data ..................'\n",
-    "dat,_ = load_train_data()\n",
-    "\n",
-    "use_gpu = False\n",
-    "if use_gpu:\n",
-    "    dev = device.create_cuda_gpu()\n",
-    "else:\n",
-    "    dev = device.get_default_device()\n",
-    "\n",
-    "for t in [w, vb, hb]:\n",
-    "    t.to_device(dev)   "
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Train the RBM model for 10 epochs, for each epoch\n",
-    "* Plot the weight matrix\n",
-    "* Update the model parameters using CD algorithm,\n",
-    "    * Get a mini-batch of training data\n",
-    "    * Do positive phase\n",
-    "    * Do negative phase\n",
-    "    * Compute reconstruction error\n",
-    "    * Compute parameter gradients and update the parameters"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Epoch 0, Reconstruction error per image = 17.989441\n",
-      "Epoch 1, Reconstruction error per image = 10.955967\n",
-      "Epoch 2, Reconstruction error per image = 9.593508\n",
-      "Epoch 3, Reconstruction error per image = 8.900522\n",
-      "Epoch 4, Reconstruction error per image = 8.485637\n",
-      "Epoch 5, Reconstruction error per image = 8.187351\n",
-      "Epoch 6, Reconstruction error per image = 7.981121\n",
-      "Epoch 7, Reconstruction error per image = 7.825888\n",
-      "Epoch 8, Reconstruction error per image = 7.707948\n",
-      "Epoch 9, Reconstruction error per image = 7.589963\n"
-     ]
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGdUlPfW/v8Z6jAwQ+9laFJEOoKCohGxYY1E1Fii0aiJ\n6eUknuQcU0xOEk9iTHL0xBg10WjsmkgQQRAQpAnSe5FehzIDzFDm/yL/3Ov3vHj+v7x4sv7nWcvr\n3bDuNffMl9n73vva17VFWq2WR3iER3iE36Hz//cHeIRHeIT/LDxKCo/wCI/wX/AoKTzCIzzCf8Gj\npPAIj/AI/wWPksIjPMIj/Bc8SgqP8AiP8F/wpyUFkUi0RCQSVYtEojqRSPTmn3WfR3iER/ifhejP\n0CmIRCJdoAaIBVqBfGCDVqut+B+/2SM8wiP8j+LPqhTCgTqtVtug1Wo1wDlg1Z90r0d4hEf4H4Te\nn/S+jkDL//G6FYj47y42MTHRarVaLC0tEYvFaDQahoaG0NfXx8TEhM7OTkxNTZFKpRgZGdHR0YG5\nuTljY2NoNBo0Gg02NjaoVCoMDAxQqVR0dHRgamqKoaEhenp6jIyM4ODggEqlwsjICJVKxcjICLa2\ntgwPD2NsbEx3dzf6+vqYmpry8OFDrKysmJycxMLCgq6uLgwMDGhvb8fV1VW43t7entbWVuF9RCIR\no6OjKBQK/Pz8UCqVaLVaRCIRAwMDSKVSRkdHMTMzQ6VSMTo6ip6eHt3d3Tg7O2NoaIi+vj79/f2I\nxWLGxsYwMDBAKpWio6PD+Pg4AFqtlr6+PszMzJiamsLAwACFQoFYLMbQ0BCNRsPY2Jjw3Y2MjNDT\n08PAwIDm5mbEYjFGRkbCPcbHx5FKpahUKsbGxjAzM2NycpLx8XFEIhEymQyFQoGuri46OjrC/2Zo\naAilUomTkxMqlQpdXV309PTo6+vDysqKgYEBzMzMGB4eBqCrq4tp06YxNDSEmZkZRkZGKBQK+vv7\nGRwcxMvLC4Dx8XF0dHTQaDTC51er1UxOTmJsbIy+vj7j4+Po6+szNTWFvr4+urq69Pb2YmZmhlqt\nRq1WIxKJ6OrqwsHBAbFYjLGxMX19fWg0GqysrFAoFFhZWWFgYEBPTw9arRZDQ0MMDAwYGxtDIpEI\n56xSqZBIJExMTKBQKFAoFDg4ODA2NoaVlRX6+vp0dHQgk8nQ09NjYmICrVaLSqVCLBbT09ODXC5H\nrVYzNjaGVCqlrq4OGxsbAHR0dDA3N6eyshI7OztUKhVSqZSpqSnUajXGxsbC3xQKBYaGhkilUpRK\nJRMTE0gkEqamptDV1QVgZGQEfX19RCIRurq6tLW19Wq1Wuv/W/D+WUnh/wqRSPQM8AyAra0tH374\nIa6urmRlZXHx4kW2bt2Kvb09Pj4+pKSksGDBAsrKyli5ciWFhYUEBwdz5MgRampqOHjwIMnJyRQU\nFCCVSunq6uL8+fPcvHmTDRs2sG/fPs6fP09kZCRhYWFcvnyZhQsXcvfuXYaGhoiMjOT27dt4e3sz\nY8YM9PT0kEql3L9/H11dXerr65k/fz737t3j8OHDJCYmcubMGXR0dFiwYAFZWVl0dXURHx/PxYsX\n6ezsJC8vj+PHj3P69GkKCgqYNWsWoaGh3Lp1i4SEBDIzM9FoNLz++uuUlJSwcOFCXnjhBdrb27Gw\nsGDp0qWIxWLu37/P7Nmzyc7OxsXFhaKiIh5//HFu3bqFiYkJw8PDyGQyHj58iFQqpaioiKCgILRa\nLXPmzOHmzZs0NzczNTXFhg0b+OWXX/Dy8qKiooLq6mqeeOIJMjMzmTt3LuXl5URFRVFaWkpFRQWO\njo6EhISQlZWFWq3GxcUFqVRKaWkp4eHh7N+/n08++YSWlhb8/f159tlnWblyJbq6uixZsoRz584h\nk8mQyWQ4ODiQn5/PkSNH2LJlC8HBwTQ0NFBTU0NAQABRUVHs3LmTEydOcPr0aWxtbZk7dy4jIyN0\ndnaiUCj47LPPeOaZZ3jyySe5ePEi06dPp7Kykrlz59LT08Ps2bN59913mTNnDr29vYyPj/P5558D\nsGHDBjZt2oSjoyMDAwN8+eWXiMViLCwsKCwsxM3NjaioKAYHB0lMTGTfvn2kpqYyOTnJ7du3WbJk\nCdXV1QQGBhIcHMz4+DhRUVG8+uqrtLe3M2/ePN577z2io6NJSUnhpZdeIigoiNLSUjw8PLh27Rpv\nvvkmFRUVFBYWoq+vT2trK25ubjz11FPIZDJOnTpFQEAAf/vb34iPj8fAwIDIyEiMjIyor6+nqamJ\nqakpXFxcuH//vpDU0tPTeeGFFzAyMmJ0dBSpVMrw8DD29vb09/dz+/Zt7O3t+frrr5v/SGz+We1D\nG+D8f7x2+n//JkCr1X6j1WrDtFptmJmZGc3Nzejp6REREcFbb72FnZ0dg4ODVFRUIJVKKS8v5+WX\nX2Z4eBiVSsWPP/6Iq6sr+/btY968edjZ2aGj89vX8fX1BeDkyZM0NjYyPDzMJ598gqOjI3Z2djz5\n5JMAGBoa8t5776Gnp8f06dOxsbHB0dERsVjM8ePHqa+vp6WlBTMzM9ra2ggODgbg559/Zu3atdja\n2tLR0UFjYyNOTk50dnbi7++PoaEhP/30EzU1Nbi6uuLu7s7OnTsZHx/n+eefp7a2lqioKFpaWrh4\n8aKQ2RMSEujr6yMuLg6VSoVarSYlJYXx8XHMzc2pq6sjNjaWiooKPDw8GBkZQVdXl+rqary9vVGp\nVLi5uWFjY4ONjQ13796ltraW3NxcVCoVV69eJT09neHhYTo7O/noo4+4ffs2c+fO5datWzg7O1Na\nWsrg4CBlZWVMTExgZmaGq6srLi4u6OrqUl5ejkgkwsjIiDfeeIPS0lI0Gg1paWmsWbMGiUSCWCym\nu7sbuVyORCLB29ubvr4+LC0tyc3NZffu3XR1dTF//nymTZuGubk5JiYmAELV8vPPP2NgYIClpSUd\nHR3U1tYSHR2Nrq4ud+/eZdq0aeTm5uLk5ERlZSUODg40Nzfj6+tLXl4ezc3N2NnZ8euvvwLg6urK\nzZs3yc/PR6VS8cYbbxASEsLrr79OVVUVk5OT1NfXk5mZycaNGzE1NSUvL4/h4WHWrVuHWCxGqVSy\naNEiioqKOHPmDABhYWF88sknNDc387e//Y3Q0FDKy8uJi4tDV1eXefPmMWvWLJ577jkyMjIYHBxk\n586d6OjosG/fPvbu3cvQ0BCenp48/fTTlJSUANDa2kpzczPZ2dmcOHGC/Px8rK2tCQkJobm5mbKy\nMlJSUrCysmLz5s20tbVRUlJCWVkZzzzzDN3d3ZSVlVFQUCCc8R/Fn5UU8oFpIpHITSQSGQDrgev/\n3cX9/f0olUo0Gg3j4+O89dZbjI2N4eHhQXBwMIsXLyYoKIitW7eSmppKeno6g4ODDA0NodVqefbZ\nZykrK2PdunWMj48L5ZilpSXHjx/HwcGBlpYW3NzcuHLlCu7u7rS2tjJ9+nSamprYuXMnxcXFSCQS\nhoeHcXBw4ODBg1haWrJt2zb09fXx9/fno48+AmDevHl0dnaSnZ0NwPr16/Hz86OgoIDGxkacnZ05\nf/48x44d48yZMzg6OnLs2DF8fX05c+YMfn5+SCQSbG1tWbBggXAOhYWFBAUF0djYCICPjw+bNm0i\nPT0dR0dHxsbGmJiYICgoiKqqKq5du4ZarSYwMBCFQoGbmxuPPfYYAwMDNDY2Mn/+fO7fv8/OnTvx\n8PDAwsKC+Ph4KioqCAwMJDc3FwcHBxoaGmhvb+fy5ctUVlYyMjKCu7s7ly9fJjs7m6mpKezt7Zk1\naxYJCQloNBr6+/vp6+sjLS0NHx8f+vv7OXDgADU1NdjY2NDY2IinpycajYbc3FyWLVtGZmYm9vb2\nPPPMM8jlcubOncv+/ft5/PHHWbp0qXAGcXFxzJ49m1OnTnHixAkmJycxNzdn+/bteHl5UVxczMOH\nD3FxcUGhUKDVavniiy/4+uuv6e3tZdu2bUilUh48eEBpaSkAhw4dwtramt7eXnp6ehgYGMDZ2Zm0\ntDQWLFiAWCzGxMSExx9/nK6uLm7duoVcLic9PZ2ysjL6+/sJDg7m4MGDODk5ERoaCoBareaZZ57h\n5s2bODs7c/nyZb7//ns+/vhjHjx4gI6ODidPnuTmzZuo1WrCw8N59tln2bx5M3fv3sXS0hI7Ozu6\nu7sxMjLi2LFjQkzo6+sjkUjw8fFBKpXS29vLgwcPcHR05LnnnmPt2rU8ePAAZ2dnHj58yNWrV3nq\nqadYvXo106ZNw97envHxcWJjY/niiy/+cPD+KUlBq9VOAHuBm0AlcF6r1Zb/d9dLJBImJyfp6OhA\nq9USHBxMeHg4Pj4+HDx4kNLSUsbGxoTMu2jRInJycpg5cyaDg4P4+/uTlZXFzz//jEQiEZLC2NgY\nFRUVeHp6olQq6e3txc/Pj76+PgwMDDA0NCQ9PZ2EhAT27duHj48POjo6pKenC6Xc9evXmZiYoK2t\njcLCQuC3vtjGxoaXXnqJjIwM9PX1efDgAaGhoXh7e6Onp4eFhYWQsQMCAvjwww+5evUqERERWFlZ\n0drayowZM7hw4QI9PT0ANDY2otFoKCsrQ0dHh+zsbLq7u1Gr1bi7u7NgwQLS09M5dOgQbm5unD9/\nnvHxce7du8e8efOIiYmhrq4OsViMu7s7J0+eZOXKleTm5hIdHY2LiwtJSUksX74ctVpNXl4eU1NT\n1NfXMzk5ybZt21i2bBne3t7o6+vzzjvvoFarsbKyYmhoiIqKCtavX49cLmd4eJgdO3bw0UcfoVAo\nMDMz47PPPsPc3BypVMquXbv44YcfmD17NuXl5ezatYtZs2bx4osvEhMTg4WFBZWVlSQlJSESiTh1\n6hQAxsbGJCYm4uHhQUJCAosWLcLd3Z2VK1dy9OhR2traePHFF2lqasLFxQUXFxfi4uKQy+UYGRlh\nZ2dHdXU1Y2NjrF69GgsLCwBee+01ampqhHawtLSU0tJS7OzsWL9+PXfu3BES8qVLlxgcHBRK+amp\nKRwcHKioqMDExITTp0+TnJwMwA8//EBERATPPPMMJ0+eJDg4mFu3buHh4YG5ubnQGvb29rJixQrG\nx8dZvXo17777LgYGBvzyyy+0tLTQ2NhIUVERzc2/Vfj29vZER0djbGxMfn4+/v7+6Onp0dPTg42N\nDSkpKcJDKzExke7ubnbu3EleXh7W1tZkZGTQ09ODo6Mj+fn5HD58+A/H75+mU9BqtYlardZLq9V6\naLXaA/9f16rVakJDQ7lx4wZtbW2Ymppy6tQppFIp+fn5VFVVMTExwbp164iPjycxMZHnnnsOMzMz\n7O3t+eKLL1i7di07duwgJiaGO3fuALB06VJMTU0RiUSUlJTQ3NyMh4cH58+fF0rcwcFBgoKCyM/P\np66ujl9//RVra2umpqYEMqysrAw3Nzfy8/OB37L4wYMHfztAHR2OHz9OTk4O9+/f5/79+zz55JPY\n2dlhZGTEjh07UKvVvP322+jo6GBjY0NpaSl5eXlcvnyZpKQkEhMTATA1NaW5uZklS5ZgZmZGX18f\nMpmMtLQ0kpKSEIvFLFiwAG9vb0JDQzl48CARERFMmzaNixcvkpKSwvz58xkeHqaoqAgnJyf8/PyQ\nyWTs2LGDjo4OnJ2dSU1NRaVScf/+fczMzIiMjKSjo4Py8nIaGhq4e/cuY2NjNDc3Y2FhgVarpbu7\nm9bWVnbu3ImFhQXXr1+nqakJGxsb0tLSiIiIwNHREUNDQ86fP89rr73G6tWrsbW1RSQSERMTg5GR\nEQkJCbS3tyMSibh//z6+vr588sknmJqaAlBbW0t9fT3u7u5MmzYNDw8P9PX1yc7OZunSpTQ2NrJx\n40ZSU1Npbm6mqamJ3Nxcent7EYvF3Llzh4qKClQqFePj49y6dQuAqqoqgXR88skn8fPzIz09nZ6e\nHr777jsWLlxIYmIi+vr6fPvtt6xcuZLe3l6Ki4tJSEjg3Llz+Pv7M3/+fKHnh99a0JUrV2JsbMz4\n+Dj+/v5s376d3t5eent7UavVVFRUEBwcTGdnJzk5OQD885//RC6XY2JiwtKlS1EoFPj4+PDxxx8D\nUF1dTVtbG11dXdjb23Pr1i36+/vZtGkTLS0trFixgurqam7duoWuri5WVlYMDw8THByMXC7H0NCQ\n6upqpqamsLKyoqOj4w/Hru7+/fv/8MV/Fg4dOrT/yy+/pKenB19fX4GhLS0txc/Pj6GhIVxdXenr\n6yMzM5PnnnuOqakpTp8+LUwiFi5cyNmzZ8nLy2PPnj188803lJaWYm9vT11dHZOTk7i4uGBmZkZK\nSgpBQUG4u7sLjPzdu3cJDw9n3rx53Lx5k7lz52JpaUlRURGBgYHIZDLq6+tJTEwkLi6OFStWcPr0\naaytrenq6sLPz49Lly4RFxdHc3MznZ2diMViMjMzBUZ5wYIFlJSU0NnZiUwmw9/fn8LCQnx9fcnJ\nyWHz5s2Ehoby8OFDqqurhcA2NTXF19eXpqYmZs+eTWtrK9999x1xcXH4+PggFovJysoiPj6enJwc\nPDw8mDFjBmq1GldXV8LCwtDV1RXIp6mpKYaHh1Gr1ejp6aFQKFi4cCGTk5P09fUhFovZtWuXwGc4\nOTnh5eWFkZGRcH5r1qyhtraWyspKfH198ff356effkKj0WBhYYFcLmfhwoVkZmYK7PfZs2dZtmwZ\nLi4uzJo1iwcPHqDVavnhhx8oKSmhq6uLt956i6VLl5KXlyf0179XDkNDQyQkJKCrq4ufnx9qtRoP\nDw86OzuprKxkxowZODk54eDgQEBAAPPmzSMrK4vs7GzWrl3LE088gUQi4e9//ztKpZJZs2Zx7do1\nZsyYgb6+Pjt27KC6upqUlBSuXr3K+vXreeWVV5iYmMDNzQ1HR0fkcjlbt25l2bJlnDlzhpdeeonc\n3FzefvttPvnkE+7du0dlZSWWlpbExMQQERGBq6srg4ODFBYWotVqiY6OJjc3l7y8PJRKJdXV1Xh5\neWFhYUFMTAxHjx7lvffeY3h4GFdXVzQaDdHR0XR1dXH69Gnc3d2prq7GyMgIqVRKc3MzgYGB2Nra\ncuPGDUZHRzExMaGlpQWVSoVMJqO9vZ38/PyO/fv3f/N/i8f/iKTw2Wef7ReJRGzevBmFQkFYWJjA\nph89epQLFy6QlJREQ0MDW7ZsEZ7Yurq6zJkzh6CgIAoLC4mJicHGxobCwkJSU1N5++23kclkPP74\n40L1UFpaSlBQEBKJhK6uLqysrAgLC6OmpoakpCSmTZvG0qVLaWlp4ddffyU6OpqcnBy8vLz4+eef\nKS8v5/nnnycjI4OEhATMzc2ZNWsWFy5cYPbs2aSmpqKvry8QgRYWFmzbto3i4mKMjIxYunQpJiYm\nFBUV0d3dzcGDB8nNzSU3Nxe5XM4nn3zCp59+Snp6On5+fgwMDNDT04NarcbR0ZEDBw6waNEiwsPD\nefjwIffu3UOpVDI8PIypqSkhISHcvXsXlUqFpaUlX3/9NYaGhpSWlhIVFYWlpSVyuZzGxkZiYmKI\njo5mfHycK1euIJFIMDQ0RC6XU1VVhaWlJTKZjMTERAYGBvDw8BBaCDMzM+A3UldfX5+hoSECAgII\nCQnB1dWVnJwcGhsbcXFxwdjYmICAAKHiCA8PJzk5GVdXV4KCgoiOjiYwMJBr167x2GOPcffuXerq\n6oSnm5ubGzU1NbS3t9Pe3o5EIiE0NJTk5GQ8PT0ZHh7G0NCQ8vJyvL29cXd3p7e3l7y8PMzMzEhM\nTGRkZIS9e/fy1VdfsXHjRurr65k5cybu7u4MDAywfv16geScMWMGiYmJTE5OsmTJEiIjI8nPz0eh\nUDAyMoKPjw+mpqb89NNPPP/881RUVLBr1y5aWlqIj4/n73//O25ubkxNTeHn50dFRQWDg4P09PQQ\nFRUlcE9/+ctfuHHjBvPmzaO2tpabN2/i4+PDqVOnWL16NfHx8bz22mt0dnYSEhJCdXU1AQEBuLq6\nUlJSQmhoKKOjo6SkpFBfX4+/vz9arRYLCwvCwsIwMTFh+vTp9PT0UFZWRkNDw/+epPDee+/t9/Hx\nQSKRkJ+fz6VLl3BycsLNzY3IyEh+/PFHFi9ezK1bt4iMjCQzM5Ply5cTGRlJUlISjY2NbNu2jezs\nbK5du0Zvby9lZWX4+/ujVCqFntDIyIi5c+fi6OhIY2Mjo6OjtLe3U1xcjJ+fH9HR0VRXV2Ntbc39\n+/dZvnw5VVVVzJkzh76+PnR1dblz5w4xMTHU1tYyMTFBcnIy3d3dlJSUoKOjw2effcb777/PkSNH\nuHfvHnl5eVy8eBFzc3NiY2NZv349bm5umJubIxaLSU5OZs6cOVy6dIkPP/wQiUSCTCZDq9WiVqsx\nNTVlYmKC3bt3k5+fT1hYGO3t7WRmZtLb24tWq2XhwoX09/cLo7uhoSHa2tro6+tDqVQyODjI2NgY\nhYWFBAQEALBixQpu3LjB+Pg4gYGBuLq6Mjo6KhCgbW1tfP3115iZmSGXy4Xpw/DwMNXV1QL5plQq\nUSgU3Lp1Cx8fH1QqFfb29uTl5eHk5ERhYSGxsbFcv36drVu3YmpqSm9vLwqFAnd3d4aGhoR5+sWL\nF4mMjMTOzo6oqCisra3R0dHBzMyM7u5ubG1tGRoaIiIigsTERORyOZmZmQwPD+Ps7MyqVauwtbUl\nOTmZvLw83N3diYyM5OjRo3z++eeMjIywYsUK1Go1SqWS8vJypFIp06dPR1dXl9TUVCoqKjh8+DC7\nd+/Gzs6OtLQ0BgYGcHNzIzg4mPLycoyNjQXOxtnZGQMDA4qLizE0NEShUBAYGIiLiwtr164lPT2d\niYkJ9PT0MDExITU1Fa1Wy8aNG/nll1+Ijo6mubkZtVqNk5MTXV1dQqV64MABurq68PHxoaKigpCQ\nELRaLf39/ZiZmSESiZBIJBgbG7Np0yb+9a9/YW5uLkyITp48SWpqKnZ2dvj7+3Pnzp0/lBT+IwxR\nVlZWLF68GLFYzOTkJN7e3gwMDKCnp8fy5ctpb29nfHycRYsW8eDBA3bt2sW7777LwMAAmzZtYmho\niD179tDR0SGIfwBcXFwwMjKipqaGU6dOCU+9Q4cOUV5eTkpKCvr6+ixdupTLly/j4eGBWq3miy++\nEAQi7u7ujI6O0tDQwHPPPQf8Ni6ytrZGrVbz1ltv8e2337JhwwZ27dollLivvvoqra2t+Pn5ERER\nwfbt2/H29mb+/PmYmJgQHh6OWCzG19eX2tpaAIqKiliyZAkZGRlYWlqi0Wjw9vZm4cKFvPvuu/j4\n+HDs2DHmzp3L+vXr2bx5M3V1dbS2thIaGkpISAiTk5Po6OgQFhYmVDLBwcHY29ujo6ODkZERv/zy\nC5s2bRJ4k5iYGAoKCpicnESlUnHnzh1CQ0PZunUrnp6eWFpasn79egwNDVGpVJSVlQntRlVVFUFB\nQXz44YfY2dlx8+ZNMjIy+Mtf/sL4+DiOjo4UFxdjZmbG4OAgV69eZe7cuaxcuZLPP/9cIGavXLkC\nQHBwMJcvX2ZkZAS5XE5+fj5xcXGIxWIiIyOZmppCq9Wye/du5syZw8svv0xPTw+VlZVcuXIFhULB\n8PAwK1euxNramhMnTgDw1VdfUVFRwXfffcfhw4dxcHCgq6uLiooKQTj1u0jJwcGB2bNn8/DhQ3R0\ndAgJCSEyMpIrV64wODjI008/zb59+wCwsLBAX1+fN954g9jYWGpra7l//z5VVVUcPnwYqVSKs7Mz\n+fn5uLm5IZVK8fT05PLlywwNDXH79m0yMzOZnJxELBYTFRUFwMWLF3n//fd59dVXCQsLw9vbm4sX\nL2JlZYWenh5eXl6cOnWK3NxcWlpaqKurE1rL3xPqiy++yJYtWxgYGCA2NvYPx+N/RKXw6aef7jc2\nNubBgwdERUXR0dHBzJkzyczMpLOzk6CgIO7evYuBgQHu7u5kZGQQFRVFeno6Z8+excbGBjs7O4yN\njfHx8cHNzY3r16/z/PPP4+npiVwux93dHV9fX/z8/EhNTcXe3p5ly5axdu1adu3axfr16zEyMsLL\ny4u5c+eir69PWVkZWVlZVFRU8Prrr/PDDz+QkpLCjBkzmD9/PidPniQ3N5d169bR3d2NtbU1urq6\nDAwMsHXrViorK4mIiECpVNLU1ER3dzd9fX3cuXOHuLg4urq6GBoaYsuWLXz++efExcXR2NiIvr4+\nMpmMuXPnkpiYyIMHD0hISGB0dJQNGzYIpOPt27fx9/dnbGwMpVKJtbU1xcXFwg+1r6+PrKwslEol\nISEhmJubMzk5SXNzs9BK/P3vf+fpp58mJiYGHR0dpk2bRl9fHxEREVy5coW2tjbmzJlDSUkJ3d3d\nJCcnExwcjJ6eHvPnzyclJQULCwshOdnb26NWq6mqqsLY2JhXX32V3NxcjIyMEIlELFu2jDt37ggk\npZ+fH99++y1BQUGkpKTg4ODAnDlzcHd35/jx4yxevJgHDx6Ql5fH5OQk8fHx/Pvf/xY0LP/6179Y\nvnw5EomE1NRUdHV1yc/Px9vbG5lMhlQq5fr166SlpfH1118THR3NTz/9xJo1a4iOjsbX15fOzk6S\nk5MFgdyGDRv46quv8PT0pLa2VtBfWFtbk5WVxejoKOHh4Vy7dg1HR0eWL19Oc3Mz9fX19Pb20t3d\nzezZs5mcnCQ7O5vc3FwaGhqQSCS8+OKLnDhxAlNTU0xNTXFycmL9+vUMDQ0RHh7OTz/9RHZ2Nm++\n+SYajYaLFy8KKluRSIStrS1FRUUYGhqyYcMGOjo6mD59Oh4eHtTW1gqircbGRpRKJebm5hgZGZGc\nnExlZeX/nvbhyJEj+yMjI1EqlQLjn5eXh6GhIfX19UIPrFKpyM7OJjAwEGdnZ6ZPn45cLiciIoLO\nzk5BaVdUVER2djZr1qwhJyeHvr4+9PX1efjwIUqlkuDgYKKjo+no6KCoqIht27ZhZmbGpUuXyMnJ\nYXx8nIqKCtra2ggMDEQkEmFgYEB1dTV3797lueeeo6amBvityhkfH8fY2FgQLT322GOCau33KmNk\nZARPT0/ruC6tAAAgAElEQVQ6OzsxNjbm3r172NnZUVNTQ0FBAffu3WP58uW0tLQIhF5aWhpLliwh\nICCAW7duCS3K72PY4eFh7t+/j7OzM1qtFrlcTlNTE+Pj42RmZlJQUICNjQ3h4eFcv34dOzs77t69\nS1BQENevX+e5555DJpNhbGxMXV0dV65cwdDQEIlEQlNTE2NjYwQGBlJeXo5cLic3N5f4+HhB/GRq\nakpLSwsLFizg0qVL9Pb24u7uztKlSxkcHKSqqorBwUFhbp6RkYGTkxPW1tYcP36cpqYmRkZGMDU1\nRaVScffuXdauXcupU6dISkrCy8sLsVhMWFgYRUVFhISEYGpqysDAACqViomJCVxcXJg2bRpKpVKQ\nm6tUKjQaDZaWliQnJ1NeXs6rr76Kvb09EokEExMTNBoNx44dY3x8nJaWFmxtbenv78fBwQGRSER4\neDhz584VgtzT05OGhgY8PT3ZvHkz9+/f5+eff+Zvf/sbZ86cob29nWXLllFdXY2FhQVisRg3NzdB\ndwGwZ88e3n33XXbv3o1Wq8XIyIjW1lZMTEzIyckhMDCQiYkJrl+/jq2trSA1NzQ0pK2tDS8vL0ZH\nR7GxscHFxYWcnBwWLVrEDz/8gKGhIRMTE6xYsYKwsDB6enqYnJzEz88P+G1KlpeX978nKbz//vv7\nExIS8PT0BCA6OlpQvxkbG5OVlUVVVRVbtmzB2tqa1tZWZs6cSUtLCwYGBnzwwQfIZDK2bNnC8PCw\nICWdNm0aTzzxBB4eHrS3txMUFERZWRmpqanMnDmTc+fOCT322bNncXR0ZGRkRJhvj4+PI5PJiIiI\nID8/n6mpKUGA09jYiFqtJiEhATc3NwYGBli8eDG//PILExMTzJw5k4GBAb766ivWrFlDe3s7Xl5e\nWFtbs2DBAszNzWltbUWlUjE1NUVeXh6hoaGEhYWRl5fHY489RmdnJ6Ojoxw/fpz169cL6sUNGzZw\n5swZ+vr68Pf3R6PR0NzcTF5enuDp2L9/Py0tLbS0tNDb24urqyudnZ3o6OgwMjKCRqPB09MTrVZL\nSkoKTk5OzJ49Gx0dHdzc3Kivrxe8Ca2trTg4OKCjo4NYLGZqaoq+vj7CwsKE4JPJZILPpLy8HGtr\na8rLy2lrayM9PR1TU1Ps7e2ZMWMGR44cwcTEhMWLF9Pf34+fnx8rV67kyy+/5JVXXqG7u5unnnoK\nY2NjKioqaG1txdPTk7t379LY2Eh/fz/z5s2jvb2dHTt2cOnSJRobG9m0aRP9/f1s3LiRzs5OysvL\nWbNmDefPn2fGjBnk5eURGxuLnZ0durq6mJqa4uzsjFQqxdLSUhAX5efnk5OTQ1hYGMnJyUxMTGBr\na4u9vT2jo6MkJiaSmZlJRUUF/v7+uLm5ERISwuXLl/H29ubBgwfExsaiVCoJDAwkLS2N0tJSVCoV\nZmZmNDU1YWxsjIuLCwDW1tbk5+dTW1tLS0sL+fn5qNVqpk+fzuTkJJs2baK3txcrKyu6uro4duwY\nnZ2drFy5kh9//JHp06cjEolITU0lJiaGzz//nL179/Lee+9ha2uLRqPh4cOHVFVV/e/hFCwsLLh2\n7RpZWVmkp6eTlpbG4sWLsbe3Z+PGjdjY2BAbG8vp06dJSkrC3NycK1euYGpqypdffsn+/fsJDw9n\nYmKCsbExQVgyMTHB6OgoN27cwMvLC5lMhpeXF7t27QJg9uzZWFlZUVNTI6jmRCIR77//Pjk5OVy4\ncEGoHqZPny6YeqZNm8b8+fOJi4vjxx9/pLW1lTt37tDf3098fDwRERH89a9/FbK7QqFALpfj4OBA\nYmIin332GWlpachkMjZs2CD0e0qlkuXLl+Ps7ExOTg5RUVGYmJgIZJa+vj4jIyPC6DAjI4PAwEAa\nGhpwcXFhx44dKJVKlEolJ06c4IknnmBsbIyysjLB8FNRUUFSUhIzZ87k7bffprCwED8/P5YuXUpP\nTw+lpaV4e3tjYGAA/GaqEYlENDY2IhKJKCgoQCaT4efnR2FhIf39/Wg0Grq6uvDy8qK8vJwHDx4w\nODjIrl278PLywtPTk+zsbNzd3QWBzrPPPkt6ejoODg4CMw/wr3/9i8DAQKampqisrCQyMpLu7m7e\neustAgMDiYqKIiQkhKqqKnp7e3nxxReFFrGpqYnm5maCgoKYNWsWvr6+6On9Zu95/PHHWbhwIZcv\nX+bs2bMsWrSI3Nxc6uvrsba2Ri6X4+rqikKhQK1WExQUxL179/Dz88PW1haFQkF3dzcnTpygsrKS\n5cuXC79fT09PwUQWGhrKnDlzuHPnDlKplMrKSmQyGUFBQcTExGBra4uFhQVXr14V5OgpKSl4eXkR\nEBDAO++8A8Bf//pXrK2t8fDwYOvWrdjY2NDT00NISAhHjx5lbGyM4uJiPDw8hEpxz549jI2NERwc\nzMWLFzl48CABAQHo6+sLCsw/gv+ISuHw4cP7t27dKijSDAwMKCoqwtnZmRMnTgjz5qVLl/Lyyy+j\nVqu5evUqIpEIb29v/P39qa+vZ9asWRQUFAgjv5MnT3LmzBkWLFhAeXk5LS0tdHV10dzcTGJiIgqF\nghkzZtDb20tsbCxubm6YmZlhZWVFQEAATz/9NIGBgcyaNYukpCRmzJjB1atXeemll+ju7mbx4sVM\nTk4yNTVFWFgYhoaG1NbWcvbsWQYHBwkMDGTJkiWo1Wru3bvH+Pg4lpaWBAYGUl9fL4hSJBKJwKbP\nnz8fhUJBa2srAQEBZGZmoqOjQ2BgIGKxGDs7O4aGhvj888+5fv06Q0NDzJ49m59++om+vj7Cw8OR\nSqXMnTtXUBqOj49TUlLCxo0biY+PF9yYarWayMhIQbDz4osvolQq6e7uRk9PD319fcRiMUFBQYJD\n0MLCgv7+fpYsWUJHRweOjo6Ehoby/vvv8/PPP7Nr1y48PDzYuXMnr732Gt7e3hw5coT4+HgqKytx\ndnamoaEBHx8fQYRz8uRJLl68SFtbG9988w3u7u5CcqusrGTWrFmsWbOGsrIyRkdHiY+P58qVKyxZ\nskRwUDY1NWFiYsKqVavIzs4mIyOD8PBwysrKuH37NqGhoTg6OlJZWUlISAgpKSmsX7+eiIgIRCIR\ne/fuZceOHfzzn/9k//79FBYWMjo6ioODAwqFAg8PD6ZPn05/f7/QBvx+z+bmZsbGxgRy9Nq1a+zd\nuxepVEpHR4eQXK5fv86yZctwdHREo9FQX1/PgwcPqK+vFyTWn332Gffu3ePKlSuCA/Z338bvoqye\nnh6cnJzw9vamqqoKHR0dYez8O3/l5uZGRESEYC5rbW2lqKjof0+lMDg4iLW1Nb/zCoODg0xNTTF9\n+nRmzpzJ6Ogoq1atorCwkB9++IG8vDw++OADRCIRU1NTFBUVCUHu4eGBXC4H4PXXX2fBggX8+9//\nxsDAAKVSKdho/fz8kMvlZGRk0NLSQnFxMTdu3KCiogKRSERSUhIHDhygqKiIsbExbG1t6erqAqCh\noQG5XM4LL7xAf3+/8DQeGRlBpVLh4ODA/v37cXBw4OrVq5SUlODv749arcbGxoaIiAh8fX1xdXXF\nwcGBX375BYBnnnmG1tZWzM3NkcvlpKSkoNFoaG1t5dKlSzQ3N6Orq8u9e/cIDQ0lJycHXV1diouL\naWpqoqGhQbBDHzt2DH9/f/Ly8vDy8iItLY0rV64IbsLq6mpMTExoaGigt7eXlStX8u9//5uYmBgq\nKioIDQ1lcHAQlUollLv5+fkMDAwwMjKCgYEB06ZN48aNG7S3t3P+/HmWLl2Kq6srpqamXLp0ibt3\n7zI5OUlycjKTk5OCJ0QikXDo0CHmzJnDp59+Sn19PTt27ACgvLyczZs3o9FoGBgYYM2aNXz88cdM\nTEywdOlSZDIZeXl5rFmzBhMTEzw8PFi7di179uzhtddeo6SkBJVKha2tLZmZmfy+RMjY2FiwNbe0\ntPDjjz9SU1PDpk2buHfvHjt37iQrK4utW7dy4MABLCwsGB4epr29nRUrVnDmzBkOHTpER0cHp06d\nEioQPT09XnnlFRQKBTt37hTs22+++Sb9/f3U19ezfPly3N3duXr1qjBVs7GxYc6cOQQEBLB9+3aq\nqqq4ffs2Tz31FABbtmxBJpPR2dlJW1sbUVFRxMXFMTg4iKmpKf7+/nR2dlJTUyNI1eVyOXZ2dhQU\nFNDU1CTI6lUqldAm/xH8RyQFKysrYfy0du1aFi9ejIWFBc7Ozujp6eHm5kZzczPu7u5oNBokEglH\njx7l5s2bLFy4kKeffprw8HCKi4u5c+cOvb29ADg4OJCRkcHmzZsxNzdHoVAQGhoqyJIfPnzIwoUL\nUalUZGVlMTAwgFgsxtzcnCeeeII9e/YglUrJysoiJSWFTZs2AQjjo3Xr1uHt7U1MTAwTExMUFBQQ\nGBjIY489xrp161CpVOzduxc/Pz+ioqKE0efp06dpbW3l3Llz9PX18de//hX47QdWVlaGo6Mj5eXl\nwvzez8+P5cuX09bWRlNTExEREdjZ2VFWVoaxsTHDw8P84x//YPny5ejq6mJjY8Pq1aspLi7G398f\nhULBsmXLcHBwwNLSko8++ggDAwPa2tqIiIhgeHiYoaEhQROhVqu5c+cOzs7OdHd3k5qaire3t/Dk\ncnR0JDMzk6GhIV599VXGxsaQyWR4eHjQ09NDdXU1169fZ+HChYIzUU9Pj4ULFzJv3jycnJwIDAzk\niy++4PHHH2f27NnCWNbQ0JDt27dTV1eHmZkZEolESKDp6ekMDAzw/PPPk5qaSkNDA2vWrEFPT4+U\nlBRWrlyJVCqlvr4eqVTK9u3b8fDwAKC+vp6qqio8PT1ZtWoVn376KaWlpaxevRo9PT0iIyNxd3dH\nIpFw5coVnJycAGhpaeHTTz/l2WefJTw8nG3btiGTyTh37hwA4eHhgluzv7+fyclJDhw4QHx8PHl5\neTg6OtLR0YFIJOLIkSN8//33XLhwgfz8fAwNDQVNzRtvvEFLSwsFBQUAwvkaGRkxODgoqHzt7e2p\nqKgQiFVfX19SUlKYOXMmBQUFtLe3s337diYnJ2lvb6ejo4PKykoUCsUfjsf/iKTQ1NTEwMAA/f39\nTE1NkZaWRllZGd999x1yuRxfX19cXFwEA4tarUYmk7Fq1SoSExOJj4+nqalJMCWJxWIANm7ciKWl\npSAqMTc3p6Ojg3/+858EBwdjYWGBq6srhoaG7Nu3j23btuHi4kJ5eTnnz59nYGCA3t5e7t+/z5tv\nvik4zUxNTUlJScHZ2ZkLFy5gYGDA5s2bKS8vZ9myZQDcuXNHMGRt3bqV8+fPk5GRIUiif5+xh4aG\nkpqaCsDAwADZ2dmCE/L3sWVCQgKDg4PExcXh5eVFRkYGQUFB+Pj4cOLECWpra8nMzMTa2pq6ujrS\n0tJQqVScPXtW2I8gk8mwsLDg6NGjiEQiFixYgKmpKR9//DHe3t74+PjQ0dHB7du32bBhAzExMYyO\njhIREYFEIsHBwQEbGxumpqYoKCigq6uLf/zjHxw7doyUlBQ6OjoICwtjdHSU0tJSNmzYIPT4hw8f\nprm5mZSUFOrq6ggJCcHT05OAgABqamoIDw8X5vPFxcUYGBgwMjKCm5sbR48eZdq0aejo6BAUFER/\nfz/Xr19n7dq1aDQacnJySEpKwtnZmTlz5uDr64uhoSFTU1PMmDFDMLGNjY3h6OhISUkJ7733Hnl5\necyZM4eIiAjMzc05cOAAycnJiEQiTp8+TXl5Ob29vWzatIm4uDhKS0tJSUlBqVQyOTmJVCoFfvPB\nlJWVYWlpyaVLl6isrOT48eMolUoqKipwcXHB3NwcY2NjbGxsOHPmDJGRkSxZskTYX2Fubo5arWbJ\nkiV0d3cDsG3bNsRiMV5eXoJuZ8GCBYSEhPDYY4/x8OFDBgcHcXR0ZPfu3SiVSlxcXCgrK8PIyAgL\nCwuBH4mIiMDY2PgPx+N/RFJwdXXl0KFDGBsbc/36dVauXMlLL72Ev78/Q0NDpKWlYW1tzfz58zEw\nMKC1tZUVK1YwNTXF5OQk0dHRSCQSwZBTUfHbKsjvv/+e5uZmlEolPT09FBcXo9FoSEpKEnYvNDc3\n093dzZEjR6iqqhK2Nm3fvl3wL4yNjdHT08P06dMB6Onp4f333+fmzZuEhITw/fff89VXX+Hk5MTu\n3btZt24dycnJ6OvrU1JSwttvv82qVavo6+vjpZdewsLCgmeffZbu7m66u7tZtGgR8FtS+J1x1mq1\njI6O8sorr1BQUEB5eTklJSU4OjqyY8cOpqamCA4OxtnZmZkzZzI2NkZfXx8lJSW4uLggk8mIj49n\ny5Yt9PX1oVAosLS0pLCwkPDwcFauXMmuXbuIjY3FzMyM1tZWgoKCyMvLo7W1leTkZM6dO0dBQQGR\nkZFcuHCBnJwcRkdH8fHxISkpibVr1xISEiJslUpMTBRY+5s3b1JbW8uuXbuQSCQEBwejVCqJjY3l\n7NmztLa24uTkxIYNG7C0tCQzMxP4TTbt5OSERCJBoVDwO9fU09Mj7Daoq6ujsLCQ1tZW+vv7uXjx\nIl5eXvj4+HDr1i06Ozvp7+8nPDwca+vfFg0FBQVx8uRJpqam8PLyQq1Ws2rVKgYGBqirq2PdunV8\n9913GBkZ0dfXx8yZMzEyMuKdd97BwMCAn3/+mY0bN5KcnCzoZwBiY2MpLCzE3d0dLy8v+vr6iIqK\nQqvVsnfvXhITE9HT08PW1pYHDx6wZ88eSktLSUxMpLKykurqakJCQmhvb+f48eMkJCQA4OzsjLW1\ntSDbnpiYwMnJSXDVarVaFAoFbW1tjI+PI5fLBXNbU1MTtra2+Pr6olKpaGtrIyUl5Q/H439EUujr\n66O3t5eBgQFB8FJaWiro0DUaDbdv32Z8fBxXV1ecnZ2pq6vjxo0bODo6MjExwaJFiygvL8fDw0Po\n0RMSEnjnnXcYHR0lKytL2IMQFhaGhYUFwcHBlJWVIZFI2L59O0NDQ1RWVrJmzRrS0tLIyMhALBaj\nr6+Pvb099fX1APT29vL4448zNTWFsbExS5YsEbYglZaWcuvWLeGpaW1tjYuLixA4165dE5ZqfPDB\nB4yNjZGXlwcgrOkyMzNDR0eH9957j7NnzzIxMSG4+FJSUhgdHSUgIEB4L/hNZanValm0aBG6urrU\n1tYyOjpKTEwM+/btE7iLsLAwmpqaePXVV6murubhw4cMDw9TX19PXV0dCxcuZO7cubi6urJz505c\nXFy4du2aQOLJ5XIsLS158cUXqaurw9DQEH9/f+7du0dERAR79uyhqamJp556ivb2dkE8deTIEVpa\nWhgcHESr1dLW1kZnZydff/013d3dQoUVFxfH0NCQMA4+dOgQoaGhtLS0oFQquXr1KmNjY4yOjjI1\nNYVUKhW8MjY2NoICceHChRw+fJimpibgt7HfBx98wKpVq1i8eDEzZ86kpqaGV155hXPnzmFtbU10\ndDTm5uZ4enpSVFTE008/ze7duxkbG8PGxobMzEy8vLz46KOPaGhoAEAqlfLyyy9z48YNXFxcMDAw\nEDQHIpGIF154gTt37mBvb09ycjIrVqygubkZuVyOl5cXcrmcM2fOsGrVKuLi4gT9i1qtRldXl3fe\neYcDBw7Q3NxMTk4OMTExQuX7+7i0pqaG77//XjBJGRkZceHCBYaGhnjqqafIzMwUKps/gv+IpGBk\nZISHhwcTExNUV1cjl8vJysrCwcGBr7/+Gh0dHUxNTbGysuLmzZv4+/sjEomYOXMmhYWFzJ49W9jR\nqKenR1FREQDnzp2jtraWX3/9lVWrVnHlyhV8fX1RKBSsXr2a/v5+LC0tWbVqFenp6RQUFLBmzRpa\nWlq4d+8e7u7ueHp6EhMTw7lz53BwcAB+Kxk/+OADcnNzKSkpQVdXl1WrVmFiYkJoaCg//vgjDg4O\nhIeHM2vWLHbu3EltbS3Tp09nw4YNNDY2Eh8fL4hb3N3dAbCzs2PRokVotVq0Wi1bt25Fo9EQGxtL\nb28vUVFRqFQq8vPz6e/vx9/fn9DQUIaGhti5cydRUVFcv36dgoIC8vPzGR4epq2tjeeff57+/n58\nfHwEZ6ZGoyE9PZ3IyEi0Wi22trbCyDYrK0sQjqlUKmJjY2lqamJwcBA7OzsqKiqwtLRk8+bNDA4O\nkp2dzcjICDdu3BC4id/XoNnZ2dHa2srSpUuRSqVUVVWhq6tLS0sLTzzxBFKpFG9vbz799FMAvv32\nW2FUOTIywqpVqygoKCApKYmYmBhhf6eLiwtyuZyKigqcnJyoqqpCo9Hg5+eHRqNBrVZz5MgRoWy+\nefMmv/76K9988w3FxcW88cYb/w977xlW5bmu7Z6D3gZIUYqgUgPSESmKNEFFRVRUjC1Ro84YNcWS\nzKwkTk3iNDGWRGMSNMaKxEKwgQJipUgXEUE6SO8C0hn7h3M8e63j+I5vZe211/HNtfd6/lAEZDDG\n+7z3c9/XdV4cOXKEd955hzfeeIOEhAQ2bdoEQH5+PrW1tfT19dHX14eZmRmrV68WZKv+/n5x8dbX\n19Pf309qaioaGhrk5eWho6NDVlYW586d4+zZs/T396OhocGuXbvo7e1l8+bN1NXVMTIywgcffICS\nkhJ79+7l5MmTorIpLy+ns7OTefPmsWLFCuzs7IiPj+fp06cCc7dv3z7eeOMNEhMT8fLyEtqLtrY2\nALS1tcnJycHd3V0cqf/M+i9BvP9Hl0Qi+T//S/zP+p/1//2VLZPJ3P+9L/o/Bm7918vExIT4+Hji\n4+MZGRmho6MDAwMDiouLmTVrFoWFheTn5/PkyRN++OEH1NXV+eWXXzA1NWXWrFk8e/YMGxsbhoeH\nuXTpEjKZjF9//ZWbN2+ybds20SkuKioiJyeHuro6FBUV8fDwwMDAADs7Ow4fPoyDgwN6eno4OzvT\n29vL06dPUVRUREVFBVtbW3bt2sWlS5fIzc3lo48+4uXLl8JG7eXlhb6+Pn19fVhZWQkTU319Pa6u\nrmRkZKCkpMTw8DDh4eHcunVL3J1v3brFuXPnuHjxImpqalRXV5OdnS38/erq6ly9ehUnJyfs7e05\ne/YsgYGBSCQSqqqquH37Nh988AElJSWEhYVx7Ngx4QF49913CQsL49atW0Ji7erqyowZMzh37hz3\n798nNzeXiRMnMnPmTPLy8qiqquL999/HwMCAjIwM0SdYuXIliYmJNDU1cefOHUxNTQkKCiI3NxdL\nS0sxlr1z5w5aWloEBQXR3d0tDFclJSXIZDJMTU2ZPHkyly5dwtzcnJKSEiZPnkx4eDjr16/H2tqa\n/Px87t69S0BAALq6unzxxResWrWKV69ekZ6eLjwVJiYmrFy5UhCsX716JdShv//+OxKJhKioKFas\nWMGKFSsoLi7m2rVr7Ny5k4sXL+Lr68utW7eIjo5m9+7duLm5kZycjEwmQyqVMnv2bBISEujv76e2\ntpbAwEBUVVWxs7MTlu78/HxiYmIwMjIiPDwciUQiOBQ7d+7Ezc2N9PR0li9fTltbGzdu3BB6BAsL\nC0pKSsTEx9XVlS1bthAbG8vLly/Zu3cve/fupbKyEj09PdTV1Xny5AkTJ04kJiaGkJAQsrOzefTo\nEWvXrsXe3p59+/axZ88e4uPjRQOysrKSXbt2/anr8Z9CvPTxxx//7cmTJ0JmOzw8TGNjI0uWLGH8\n+PEoKCgwevRoPvroI1JTU5k0aRLDw8N4e3vzzTffMH78eJqamrCzs2PlypUMDg5y+fJltm/fjqmp\nKSUlJQwMDFBUVISNjQ1BQUFCWDI0NMSTJ08oKChgxowZTJgwgYSEBMFlGBoaEufxyZMnc+3aNaEF\nWLNmDTU1NYwbN04Qjjo6OigoKGD69OmMjIygr69Pe3s7pqamYmoya9Ysbt68KYAqU6ZM4cKFC3h7\nexMTE8OkSZPYvn070dHRDA0N0dDQIMZQcu7D/PnzOXr0KAMDA0ybNo2goCBsbGyor6/H0NAQX19f\nOjs7Wbp0KV988QXBwcGCLiwv3VNSUpg6dSrl5eXMnz+fhw8fMm7cOIyMjASiraqqSszCAwIC0NTU\nJDIykpSUFBYuXMj3338vMG8bN27kzp07zJ49WyDX3dzc+Omnn+jo6GDt2rWkpaWhra1NeXk5UqmU\n4uJioYhMSUnh3XffpbW1FT8/P9zc3Dh69Cj79+8Xx7Lk5GQCAwMZHh4mIiKCjIwMcnJygP+b9Vld\nXY2GhgavXr3CwMCAhw8fEhYWxsqVK1m8eDFbt27l+fPnvPHGG+Tk5GBubs7AwAA2Nja0tbWhra3N\nvXv3yM3Npba2lvDwcNLS0ujp6WF4eJhff/2V3t5ekpOTUVZW5uHDh2zfvh0jIyPOnj1LU1MTjo6O\n7Nq1i5CQEB4/fkxgYKDwxGzZsoXo6GihGYmKihIy8aCgICIjI4XgbuPGjQwNDQmfyf3795kzZw7N\nzc3o6uqyb98+li9fjq+vLwkJCXh4eBAcHCygLvKbVHp6Oo8fP/7vI14yNjZm586dAo4plUqZN28e\nfX19QtY8btw4fvjhBzQ1NcnPz0dLS4uRkRHef/99nJ2dcXJyErp8ead1zJgx1NXV0dDQgJ2dnbho\nL168iK6uLo2NjZSUlNDd3Y2vry8tLS0C2PHbb7/h7e1Nfn6+eEG0trYCsGHDBubOnUt3dzf+/v4Y\nGRlx/fp1zpw5g6WlpRDqjIyM4Ofnh5qaGo8fPxbd6WXLluHi4kJ0dDQTJkwQ51Nvb282bdrErVu3\n+Oyzz1BSUmLevHno6Ojg7OyMu7s748aN4/r161y9epV169YJutONGzf4/vvvsbS05PHjx5w/f57l\ny5cTFRXF4sWLOXz4MGlpaTx48AAvLy/KysoYO3Ysp06dEuizwcFBbt++TUFBgTjbamhoUFhYSGBg\nIO7u7uzbt4/JkycTHBxMcnIyurq6AhtWUVHBo0ePRH9HKpWSkZEhNqaoqChcXV2RyWTU1NTg5OSE\njl8fRr8AACAASURBVI4Onp6egqU4evRovLy8KCgo4Mcff+Srr76iqKiIPXv2cPDgQTw8PFi/fj3d\n3d08fPiQ6dOns2rVKvT19TEzM+PBgwe4ubmRlZVFcnIyvb29wOuewp49e1i3bp2ABCsoKLBo0SK6\nu7v56quvyMnJQSqVoqOjw/vvv8+kSZNYtmwZDx48wN/fHy8vL1xdXZk7dy7Pnj0DwMvLi8jISM6c\nOUN3dzfJycmMHj2aoqIi0R+ysLDA3Nyc3bt3Y2xsTHt7Ox4eHmzdupVXr16xf/9+XFxcyM/PF2Pv\noqIi1NXVxaYzMDDAs2fP6Ojo4Pr166K/sXLlSgoKCqivr8fIyIgjR47w9OlTysrKRDP23r17LF68\n+E9fj/8UlcLx48f/Jh+JOTs7Y2NjQ0pKCklJSaxcuZLo6GiWL1+Ojo4O7e3tlJeX09PTI0Jcbt++\nTXFxMRoaGtTV1WFtbU10dDTm5uYoKCjg6OjIL7/8wsqVK7l06ZLg9s+ZMwcTExO6urro7e3F29ub\n5uZmSktLRammqqrK7NmzUVdXR1dXl0uXLhEWFiYAK4aGhty+fZsNGzagr69PVlYWL1++RFFRkebm\nZvbv3y+YCvHx8ZibmyOTyVBQUMDb25uRkRHMzc05e/YsOTk5gmlobW2NgYEBT58+ZWBggKGhIfLy\n8gR2btu2bURGRuLo6CgQdtbW1pSVlbFo0SKGh4f58MMPCQsLo7q6Gh8fHwHpiI6OZtmyZXR2dvL2\n22+joaHBb7/9Rl9fH/X19dja2gqVo7KyMsHBwbS1tXH+/HlMTU1F8Iyenp6gI2VkZPDq1Ss0NDRI\nTU1l6tSp5OTksGjRIj777DMRIiP3KSgqKvLrr7/i5OTE+PHjqaqq4s6dO7zzzjtkZWWxbds27O3t\nMTAwIDc3FycnJ2pqahgYGKC0tJS4uDg+/PBDlJSUyMzMJD8/n+bmZj766CN+/vlnfHx8eP/992lt\nbSUhIYE5c+Ygk8kwMDCgpKRETAra2tooKiriwoULzJkzBwUFBU6fPg2AqakpysrK/P777zg4OKCu\nrs6VK1eor6/H3Nyc+/fv4+vrS2trK+PHj8fAwABra2seP36Mvr4+TU1NvHr1ir6+PpycnIiLi8PJ\nyYn4+Hjh2wGIioqioqICFxcX7O3tiYmJEfzR5uZmAXRxc3Nj/vz5JCQkYGNjg5aWFo6OjkRGRgqm\nSE5ODra2tuTn56OmpkZhYSELFiwgMTHxT+PY/ikqheHhYebOnYurqytnzpzh008/Zfr06QQEBBAb\nG4uOjg7R0dGoq6vz7bffEhERgba2tpAEDwwMkJ+fL3j/ckWjsbExUqmUzs5OjI2Nyc7OZsmSJQKv\nVVVVxfHjx5FIJIwaNYr3338fJSUlce6NjY0lKChIBJKoq6sDr1Vuz58/x9bWVkiOU1NTuXz5MoOD\ng6xfvx5HR0ekUikbN27E1dWVP/74g7lz5zJ69Gjmz5+PhYWF+L3kaj4HBwcBDPHx8RF/nzFjxtDd\n3Y2tra0w6/j5+fHNN9+Qn59PSEgI6enptLW14eXlxYEDBxgeHubdd98Vd40LFy6QkJBAUlISO3fu\nRElJSWDcy8vL0dTU5K233hLZGlKplKVLl9LS0oKioiKPHz9m5cqVGBsbExISgpeXF0uXLkVZWZmF\nCxcSFRVFc3Mzfn5+zJ8/nwcPHrBp0yaePXvGsWPHxBGspKSEDz74gMHBQbZt28a9e/c4dOiQAKHq\n6upSVFTEkSNHSEpK4vr165SVlXHr1i0mTpwokqpmz57NyZMnOX78ONbW1sTHxxMSEsKFCxewsLBg\nxowZREZGiipsz549aGtrExQURH9/P7/++iu5ubm8ePGCmTNnipyPsrIyli1bRlpaGnZ2dsIxe/bs\nWfLy8njvvfdwcXERIB8XFxeampq4cuWK2MACAwNJTU0VEzM1NTUGBwdZtWqVUD52d3fzL//yL0IT\nI0fS9ff3A69jA+rq6lBQUKC0tJT33nuP9PR0ampq0NXVFXQwqVTKV199hZubmzDLxcXFMXXqVN54\n4w08PDwoLS0Vk5U/s/5pKgV5/Janpyc1NTUEBQXR19cnqoENGzYwevRo9PX1KSsrw8zMDHt7e5SU\nlNDV1eXTTz8lNjYWZ2dnfH19OXjwoKD++Pv709bWRktLC3V1dbS1teHv709+fj6WlpYYGBggk8mw\ntbWltrYWKysr2tramD59Ojo6Oty9e5dp06bx/Plzbty4wZQpU2hoaMDHxwcNDQ2kUimjRo3C29tb\ngF0jIyPR1dXF2dmZy5cvi5m6fISoqKhIdnY2WlpaREdHU15ejo+PD46OjtjY2NDc3IyNjQ2Ghob8\n/vvv6OjoMHPmTPbv38+iRYuIiIjAyMiIkpISzpw5g5eXlxAK/fDDD7x8+VJEp3l5eeHv7y8aq9bW\n1pw7d47w8HC6uroEOkxHR4e0tDTMzMzw9fXlxYsX5OfnC+PYyMgIZmZmmJmZ0dzcLNSijY2N4oLL\nzs4WQT5yGffWrVvx8/NjeHgYFRUV7OzsuH79uriz+/n5kZWVxZUrVzAyMhLUZrmzT19fH2VlZWpr\na5kyZYpwjaqqqjJr1iwuXLjA3r17uXfvHp6enowZM0aATGxsbIiPj2fOnDnExsZibGyMiYkJDg4O\nDA8Pi35TVlYWhoaGIpRm4cKFJCcn4+npSX9/PzU1Nbz77rvcv39fNJ7l1ns5pau0tBR/f38AHB0d\nOXr0KOnp6UydOhVPT0/q6uoYN24cxsbGIpNC3kNLT0/H2NiY6upqEhMT0dPT47PPPqOqqoqBgQGa\nm5sF3EZOjdLT0+PRo0dMnDiRlpYWobNxdHQU4Taamppoamryyy+/UFxc/N+nUmhsbMTJyYnu7m6O\nHj1KV1cXV69excDAgMePH+Pu7o6VlRVvv/02bW1trFy5klu3bgGvwRV3797l559/RkFBgZcvX3L3\n7l0Aent7WbBgAXFxcXh5eVFZWUljYyP5+fmcP3+eV69eERQURGpqKpqamshkMsHVz8/Pp6WlhczM\nTDQ0NEQHHV7LnM3NzTEzM8PS0hJ1dXUaGxt58OABgYGBREdHc/DgQWG08vHxobe3l8uXL3PkyBFa\nWlqIjY1lwoQJlJWV4eHhAbw+T0+YMAFFRUWKiorIy8sTQNr29nbu37+Pvb09t27dQkFBgY0bN+Lo\n6MjKlSvp7e0lKyuLW7duMXPmTLS1tVFRUeGjjz7i3r17QuCya9cuLly4QGNjI2lpady+fRsdHR1u\n3bpFRUUFeXl5XLlyhStXruDl5UVgYCADAwMMDw+Lsj0mJoampia6urro6uoS3ffnz5+Tl5fHhQsX\nUFJSwtvbmy1btrBmzRouXryInZ0d2dnZqKioMHv2bLS1tXn27BkNDQ3/ploaO3asQKafPHkST09P\nAgICsLW15fnz5xw+fJiFCxeKxqutrS15eXmiMly3bh1Hjhyhp6cHM7PXQWXyvMuCggLef/99mpqa\nOHv2LGPHjhW+lObmZhoaGrh586agIr18+RJHR0e2b9+OoqIig4ODnDhxQugf3n77baFVcHV1pba2\nVgBTb968yZQpU5BKpYSHh/P8+XNxxHJyciIwMFDg+caPH4+ZmZnwKBgZGRETEyMet/y40traSnx8\nvLDyyytNuUZn0aJFIjxGHtrT0NDwH7JO/1NsCvJmlfwu//jxY9rb25FKpdjZ2XHz5k22b9/O5s2b\n6erqEiWjoqIiS5cupaqqiuHhYd555x3hzwdwdnZGU1MTOzs7cnNzhdddjiubO3cu9fX1rFixgrq6\nOsrLy1myZAlVVVW89957ZGZmCjhGbGyseCGYmpoyODjInTt3UFVVFYYYORK+u7ubwMBA1q9fLzBk\nFhYW/P3vf0dZWRlVVVWmTJlCcXExLS0thIW9DuSWN0LLy8s5efIk3d3d3Llzh8LCQrZu3YqBgQF+\nfn6imTpz5kxGjRqFg4MDTU1NfPXVV7S2ttLY2Mj48eNxdnZmcHCQkpISbG1tefPNN9m9ezdaWlpk\nZmZSWVkpKpeBgQGcnJwIDw8XXIknT57Q0tJCdnY2tbWvU/+srKzw8vIiNzeXxMRE0ROQj93kG5S/\nvz8mJiacOXOGMWPGsG3bNqE0VFNT4/79+2zatAllZeV/02icOXMmcXFxjBkzhq6uLqZNm8aYMWPY\nuHEjCgoKokF56dIlIiIiGBkZYdGiRaLSKisr4/Dhw/z444+sW7eOb7/9FgAtLS1SUlKYNGkSixcv\nZt68efz+++/MmDGDly9f8scff1BUVISdnR2HDh3i3r17lJWVoaKigqenJ1lZWVy+fJnTp0+zdOlS\n0Wj8+9//zosXLwTWXn5czczM5OTJkwQHB6OiosKWLVtwd3cnLy+Pa9euMX78eBoaGkhJSSE3N5db\nt24JtB3Apk2b8PT0JDU1VaRgmZiYYGpqirGxMYqKivT09NDR0cGNGzdE2Gxubi52dnaUlZWJ5qK+\nvv5/qNH4T7EpyGfzkyZNQiqVEhwcjJubG8eOHcPHxwepVMqBAwdEMvLmzZupqanht99+w93dnQUL\nFggllzxfD16f906dOkV6erpAi7948UKo8c6dO4eqqiqdnZ20tLQQFBREcnIy1tbWdHV1ERERgYGB\nAd3d3Zw5c0ZsCvHx8djb2+Pt7U1/fz9SqVTYkl1cXEhNTcXJyYkTJ04IKo486FZJSUkkH8sJy8XF\nxcBrvXtOTg73799n165dTJs2jdmzZxMcHMyDBw+EgcbW1hZ1dXXy8vIwNzfn/PnzALi5udHR0SFS\ns6urq9mzZw979uxBQUGBr776CnV1ddra2ggNDRUv+itXrmBsbExDQwMmJiZs3ryZefPmCeqwjY0N\nSkpK+Pr6IpFIMDAwwMTEBGVlZZKSkmhra8PU1BQLCwsuXLiAra2tsMNPmzaNSZMmoaGhwblz50Qm\nw4MHD4iNjaW/v5+oqCg8PV+HkkdGRjIyMoKzs7OwzZeWlrJ9+3YGBgYwMTFheHhYBL86Ojry66+/\nYm5uTnNzM8rKyvj6+iKTyTh48CDr1q0DICUlhVOnTonMz9u3b7Np0yba2tq4fPkynp6eDAwMIJVK\niY6OZty4cVhaWjJr1iyioqIoKytj4sSJzJkzh1GjRgmj1dq1a3FxcWFwcJC7d+8SFhaGpqYmGhoa\nPHnyBJlMhoeHh3ArGhsbM3PmTMF2cHJywtvbm08//ZSlS5dy8uRJ4DXc5vTp0/T29tLc3Cyajm5u\nbqioqBAfH4+BgQGjR4/m888/x8TEhJGREVavXs2dO3fYvXu3IEnn5uZy5MiRP309/lP0FL755pu/\njRkzBlNTU+rr6/H09ERdXR0tLS3Mzc2pqalBT09PlNd1dXVUVFTQ0dGBs7MzQUFBIljEy8uLadOm\nsXfvXuzs7EQ6zkcffURHRwd1dXXo6+tTXV39b7wMurq6mJubM3fuXPbt2yfOlMXFxcycOZN58+aR\nnJxMdnY2p0+fJj4+nqtXr9Ld3Y2fnx/379/H3NwcDQ0NwXMYNWoUampqYtogz4J49eoVnZ2dmJiY\niAvo9u3bbN68md9++40VK1bQ0dEhxoanT59meHiYNWvWcPToUfT19UWm4LVr15g+fbpwFTo6OqKp\nqYmbmxvOzs6Ym5ujrq5OS0sLgYGBjBo1itLSUrq7u/n8889RVVXl+fPnTJo0iTNnzrB3717h2tPU\n1OT69euCTCTvxstzJuR/OzkrMDMzk4GBASIiIoRX4caNG9y+fVtsYj/++COjR49m5syZKCoq0tnZ\nSW1treBfrlmzBm9vbxISEjhy5Iho9DY2NmJmZsbLly+xtLQUidWlpaXY2tqipaXFjRs38PDw4Kef\nfhIuQSUlJZKTk1FSUuLx48c4Ojpy+fJlIUCLjY0lLCyMkpISPDw8RLNOntMxPDzMtWvX+Pbbb1m7\ndi0bN27k/PnzGBgYkJWVhZ2dHTKZDEVFRS5dusTTp08JDw8nJyeHKVOmCN1KSkoKurq6IuDnxYsX\neHl5cfjwYQwMDGhubqa9vR2JRMKjR4+YO3cuzs7OtLS0oK+vT3NzM7Nnz+bmzZv09PSgqakpNsiM\njAzU1dXp7u7m5MmTSCQS3N3dhSXdzs4Oa2trYmNj//v0FPr6+tDS0iI5ORmpVMqmTZu4evUqurq6\nbN68mcuXL/PgwQPGjh1LXV0dP/30E/39/eJBJyYmUlFRQV9fH6mpqWLUM3bsWNHYiYqK4tWrV6ip\nqdHV1YVEIsHW1pZJkyZhaGgozDQHDx7E3d2da9eu0d7ezrJly6isrOSLL77Az88PeA0C6evrIzQ0\nFEdHR0aNGoVUKsXJyYno6GjGjx+PiooKRUVFHDx4kMzMTHp7exkYGKC1tZUffvgBT09PIiMjUVdX\n56uvvgJep2S7uLhgbm5OcnIy+/btQyaT4e3tTUhICFevXsXZ2RlHR0c0NDTIzs7G2dkZDw8P1q5d\ni6GhIYcPHyYpKYmoqChBIJL7/OW9iqCgICZOnMiZM2coKSlh/vz5ZGdnExYWxk8//URzczNTpkxh\ncHCQuro6ZDIZ9fX1HDt2jKGhIaRSKZMnT6apqYmFCxdSVVXFb7/9hp6eHkZGRhw4cIDu7m4UFRWZ\nMmUKu3btIjs7mxUrVrB9+3ah4ZDTgx4/fiz4nObm5iI4WM4iMDMzo7OzU3A009PTqa6uxsHBQSgh\nh4aGcHBwEGE1tbW1hISECOOSra0tEyZMQEtLi2nTptHW1sYPP/zA2rVryc7OprCwkHnz5tHU1ERs\nbCx6enq4ubmRkZEh0pl27NhBf3+/oHXBa1enpaUlhoaGxMTEMGHCBLq7uwV09v79+1RWVuLq6oqT\nk5PwMRQWFtLR0cGECRMwMjIiIyMDPT09wsPDxTVx8uRJpk+fjqmpKePGjWNgYABVVVUWL16Mi4sL\nFy5cICwsDCcnJ9TV1Xn06BHBwcFYWVnx8uVL0tLSxCYTExPzp6/Hf4pNQSKRsGbNGgFR8fHxQUdH\nh+zsbBwcHDh+/DiTJ09mwoQJvPPOO9jY2DBjxgysrKzw8fFBTU0NfX19fvrpJ0ZGRsT5aWBgAE1N\nTQHehNfNPLnZJjg4WODDu7u7efLkCSYmJhQVFTE4OIiampqAc+jp6VFTUwMg0pLa29upqqri3Llz\n9PT08OLFC3x8fMjLy+Ply5fMmTOHOXPm4OfnR2JiIl988QX+/v6EhIQwevRokWz8ySefAK9hMx0d\nHcTExNDR0UFoaCjp6em0t7fz448/iqi8np4eKioqmDZtGo2NjWRkZHDjxg327dsneI89PT2kpaUx\nMDDAw4cPsba2pqioiBMnTmBlZSXyHOREqc7OTrKystDQ0KC+vp579+4RFhaGmpoabm5uDA0NYWlp\nSUVFBatWrRKJUjdv3mRwcJBZs2bR1dXFixcvWLBgAdOnTycnJ0eAR5WUlCgvLxfuSD09PdLS0oiJ\niaG7u5ulS5cCMDIyQlxcHI2Njezbt4+pU6fS399Pa2srrq6ugsS8b98+SkpKyM3NFSIlV1dXAWWV\nl/TysGELCwtmz55NTEwMUqmUqKgotm7diqKiIkuWLGHZsmV89913NDY2YmVlhZ2dHXFxcfT395OT\nk8OFCxfw8vISEytzc3PgNTVMWVkZFRUVnj9/Tl9fH6dOnSIyMhIDAwMsLCzYuHGjqFrlQcVubm7c\nu3dPBMYYGRmhoKDAtWvXAGhqasLY2Jivv/6ae/fuMXbsWIqLi3nw4AEHDhygtLQUc3NzTp8+zePH\nj8nJyWH16tUUFBQwMDAAIEahixYt+g/xFP4pjg+//PLL34KCghg3bhwvXrwgOTmZiIgIced+8uQJ\n27dv5/fff2fTpk1Mnz4dNTU1YZ1WVVWlvr5epPvm5eVx//59li5dKoCsckquVColLS1NYMPkvgWp\nVIpMJuPChQvs2LGD6upqKisr8fHxQUVFhYaGBqysrLh69SoBAQECEXf27FkRLS+32I4fP57h4WHi\n4+NpamoSc3V7e3tiY2P55JNP+PHHHwWgQ548lZSUhKGhIf39/SxbtkyExRgZGVFZWcmoUaME/GPs\n2LHMmzdP3P3b2tqYN28e8fHxWFpa0tfXx4QJE5g5c6Zwb+bl5aGtrU1eXh4pKSniQvr111/ZuXMn\nLS0tnD59WmDzKysrqa+vZ/bs2SgqKqKpqcnw8DAuLi7Y2NiIEJ/79++LjebOnTuiR/Pee++JBqCr\nqyuPHj3C399fNC+XLFmCoaEhZWVlXLt2jcLCQqytrYWceNKkSSQnJ4uxaVBQEDt37mTKlClkZWWx\nePFibt68iZaWFkuXLuX48eM4OjpiaGjIyZMnMTU1RVtbmxs3btDQ0EBPTw+NjY04Ojqio6PDH3/8\nwfnz53F0dCQzM5Pg4GAsLS1FY1RXVxd3d3eWLFnC0NAQIyMjpKSkIJVKxWTJ0NCQFStWsH79esaP\nH8/ChQvJyMhg+vTpnD59munTp9PQ0ICpqSkdHR34+vqKI1x5eTmFhYXMmTNHaCRUVVW5d+8eTk5O\nrFq1iuLiYhYuXEhRURFaWlp4enqir69PQUEBPT09RERE0NDQQE1NDVZWViKNSz6iraioICAggCtX\nrlBVVfVff3yQSCSVEonkiUQiyZNIJFn/+JyeRCJJlEgkJf94q/vv/Zz29nb+9re/ceTIEQwNDXFy\nckJTU5Pq6mrS0tI4fPgwBw8eJCIigurqah49esSdO3e4evWq/MFibW1NZGQkGzduxNbWFkCUaZ2d\nnUyePJkdO3bQ2NjIm2++iYeHB8+fP0dbW1uEfMij7q9fv87EiRNRU1MjKiqKSZMmsW3bNmbNmgW8\nliPLqUBr1qxBS0uLuXPnCjYgvB6BWVpa4uHhgbGxMcnJyWRkZODi4kJiYiLKysoiu1EOfPnrX/8K\nwNmzZzl8+DB1dXUMDAyQkpKClZUVCgoK7NmzRzD/CwoKuHfvHioqKnh7e5OUlISHh4cIYH3rrbd4\n9eoVo0eP5sSJE6Snp+Pu7o6ysjIRERF8+umn5OTk8PDhQ5SUlDAyMhLS3rKyMjIzM2lpaSExMZGL\nFy+KANeYmBju3LnDnTt3KC8vJygoiPLyctra2tiwYQMrV66ko6OD9957jz/++ENAWpWUlIiNjWVk\nZARvb2++/fZbtLW1cXd3RyKRAFBaWsr69esJDQ0ViLW2tjYUFBQwNTVFV1eXqVOn0tLSQkxMDF5e\nXqirq1NXV8fEiRMZO3YsN2/exNXVVQS0ACxfvpyRkREmT54sWBJvvfUWc+fOJSsriwkTJrBnzx4a\nGhpYvnw5t27dwtnZmYiICKqqqgRvUiaT0dPTIyZGvb29DA0NoaioSEZGBnPnzmXlypVUVlZiZmaG\nmpoa165dExb+HTt2IJPJSEtL41/+5V/Q19ensrISFxcXSkpKWLNmDfCapyCPCpDJZGRkZPD06VMR\ncOPi4kJdXR2pqakoKiqybNky6urqBMti69atODg4oK2tzW+//SYSrf7M+n/j+BAgk8lc/pUl8xPg\ntkwmswZu/+Pj/+0yNDQkLCyMN954A0VFRZFlWFhYSFFREY8fP+Yvf/kLqqqqgr1vYGBATEyMCCtt\nbW3F2tqa9PR0QV4qLS1FXV2dV69eERsby88//0xERASVlZV8+eWXODg4YGNjI0oxExMTCgoK8PPz\nE558ZWVlVq5cyXfffUdk5OtNNjMzEy8vL+bMmUNTUxPl5eXiCZRHhst7DdHR0fj4+ODn54erq6sQ\nJ8n5EUpKSjg4OACvA3O7u7uZOnUqERERxMTEcOLECebPn8/06dOxtLSku7ubiRMnYmFhwcGDB1m+\nfDmZmZlcv36dzz77jIyMDBH6cu/ePc6cOcPPP/+MsbExK1asELp8JSUlIiMjuXLlCqtXr6akpAQ1\nNTVWrVrFb7/9xsyZM5FKpZSWlqKmpoaenh7bt28nKyuL3bt3s2XLFkFBysjIwNPTk/nz51NYWMjz\n589xcnIS4bJnz57l7NmzTJs2jYkTJ1JfX8+dO3cYM2aMSDbq6+sDEOnaNTU11NfXc+XKFcLCwjAw\nMODQoUOEhYVx8OBBpkyZwurVq/Hw8BBiJWNjY9LS0lBSUqK+vp45c+aIub8cnvPo0SMCAgK4fPmy\nuPDLyso4efIky5YtIzc3lw0bNnDlyhUUFBSYPXs21tbWeHl5YWlpiZWVFY2NjULefffuXWxtbbG2\ntmbUqFF8+OGHImDW399fmJTKy8sZO3YshoaG7N69W8BuPTw8sLKyYurUqTg4OPDo0SMAJk6cyL17\n9zh37hxxcXECjSeHrzQ1NfH555+Tm5uLhYUFeXl5KCgoIJPJSEhIEAImS0tLHB0dhQfkz6z/ip5C\nGHDqH++fAub/e9/Q29vL+fPnhQknOTmZ1NRUgW6Xm2k6OzsxMzPj1atXdHR00NfXh7m5OVu3bqWj\nowMHBwcSEhIEZKWlpYXOzk4CAgLQ1tYWJN+HDx9iZ2fHjRs36OzsZMqUKSLKbPXq1Zw5c4by8nLe\nfPNNFixYwOHDhwkPDxdCmKSkJKKjo7l9+zaamppoa2uzfft2Ybu+evUq9fX1NDU1sX//fr744guq\nq6sJCAjg/v37gqDT1NQkkpngtSwbICEhAWNjYw4cOICmpiaXLl3i559/FmKn9PR0AgMD+e677+js\n7GT58uU0Nzdz8eJFVq5cyZ49e1i6dClXr15FT0+PwMBAent7+eGHH0hPT+fp06cCpR8aGsqYMWOo\nr69HUVGRixcvsmjRIo4dO0Z2djbff/89ioqKBAYG0tjYSG1tLa9evSIpKYn8/HxUVVXZv38/o0aN\n4osvvmDGjBki4aqtrY0tW7bg5+fHrFmzSEhIoKenh+7ubhFVv23bNqHSg9d9oMjISCoqKqitrRWj\nQX19fYGXDw0NxcTEhJycHLq6uqiqqhIqVDs7O7Zv3463tzdNTU04OzsDr0d8o0aN4q233kIqR6os\ntgAAIABJREFUlbJ+/XrMzMy4ceMGS5cuxd3dnY6ODh49eoSDgwNdXV0cPnyYBQsWsHbtWjw9PTl0\n6BA3btzg9OnTotH4/fffs2rVKoaGhnj27Bnl5eVs3bqVzMxMAgMDeeedd9DS0uLOnTv09vZSUVFB\nSEgIDg4ODA0NkZGRwZMnT0hJSSEoKIgLFy4AMHfuXFxcXAgMDGTcuHEsW7aMuLg4UlNTSUpKYnBw\nEIlEwoYNGzhx4gTZ2dlkZWWhqanJ3LlzuXr1Kj09Pejo6DB16lTRyP0z6z+7KciAJIlEki2RSNb/\n43OGMpms/h/vNwCG/6tvlEgk6yUSSZZEIsnq7+/H3d2dZ8+eUVFRweTJk/Hz86Oqqoq3336bY8eO\nsWjRIi5cuEBoaCh6enr09/dz+fJlcnJy0NTURCqV0tTUxEcffcSHH34IvI6dNzMzIzc3lyVLluDr\n68vHH3+Mmpoa06ZNE1VJQUEB69atE+lRixcvFmaZX3/9ldLSUhHnDTB16lQ0NTUxMzPD0NAQe3t7\nPv30U3788UeqqqqYNm0alZWVODs7Mzw8zMaNG8nNzaWrq4vQ0FBkMpkoLydNmsQbb7wBQFdXF6Wl\npYSFhXHp0iUOHTrEjBkzePfdd3Fzc6OwsJCCggI8PDxQUlIiPT2dO3fukJSUhIaGBoODg+Tk5DBj\nxgz6+vqEz6KpqUmkITU1NXHixAmOHTuGmZkZjx49oqOjg8rKSpEU5e/vL/IL09LSiI6ORklJiYCA\nACZNmoSCggL9/f3IZDLhVty1axeBgYH09/eTlJTE3bt3mTt3LgMDA1y/fp2kpCRGjx5NcXExS5Ys\nEfbfFStWiEh1ABsbG3x9fTEyMhJBrIcOHeLQoUOYmprS3NzM4OAgT58+5dGjR6IxraSkxJMnT1BU\nVGTRokU0NzeLBjG8Pj60t7eTn5+PVCpFXV2do0eP8s477yCTySgpKcHAwICtW7dy/fp1Xrx4wZYt\nWwgODsbQ0JDi4mKBTV+8eLFojNbW1jJr1ixsbW0xMDCgra2NlJQU0tLSRAzf+fPn+dvf/sbChQvF\n/zd+/HhaWloYN24c2dnZpKen09/fL9Dx+/fvx8bGRqDbDh06hJubm6gYR0ZGGDVqlDD4bdq0CQMD\nA5qamrh165bQRujo6KCurs6pU6f4s+s/uyn4yGQyFyAEeE8ikfj+63+UvcY6/S+pSjKZLFImk7nL\nZDJ3eQPHwsICIyMjgXc3NDTkzJkzREVFYWhoiIqKCqdOnaK9vZ3Hjx+zadMmEY3+/PlzQkJCOHr0\nqLijy1FecjyWjY0Nq1evBqCkpARdXV0GBwd5/vy5aB7Ju8AjIyM0NzcTFhaGr68vJSUlosx3cnLC\nxsaGBw8ekJaWRmFhIYWFhULQU1lZib+/Pzk5OVy+fJn6+no2bdokxlOtra3U1NTwxhtvUFNTI2b+\njY2N+Pv7C3nqxx9/TE9PD/Hx8WRmZoqUpry8PFJTU7GxscHb25uXL19ib2+Ps7Mzra2tyGQyOjo6\nCA8P5+LFi8LJKY+h++yzzwgJCeH58+cEBwfT39/P8uXL6enpobCwkGfPnhESEoK7uzvm5uZ89913\nYvyZm5uLoaEhLS0taGhoiLQlDw8Poe5UUVHB2NgYd3d3IiMjRZ8lIiICe3t7zM3NCQ8PJzw8XGDj\nd+7cCcCNGzews7PDysoKCwsLCgsL8fX1FT0CeeO3tbUVdXV1jI2NRURdQEAAADt27CA1NVXkX8Lr\nPk1cXJwYvd6+fZtt27Zx+/Zt4SmQj5znzZvH8PAwT58+pbGxEVVVVTw9PdmxYwcHDhzAzc2NxMRE\nAHGM++STT+jo6GDy5Mnk5OTw008/kZSURGxsLJs3b2bHjh2cPHkSVVVV7O3taWxsFEng06dPR1dX\nlxMnTghj2MDAALNnzyYiIoInT56wY8cOWlpaMDQ0JCcnB2dnZzIyMpgxYwbz589n//79KCgoUFJS\nQnp6OsPDwzQ3NwuFZ09Pz5++qP9Tm4JMJqv9x9sm4A/AA2iUSCTGAP942/Tv/RxVVVXOnDlDU1MT\nvr6+ZGRkoKamxvbt28nLy8PFxYWbN2/i4uLCb7/9xrhx44iIiODAgQM0NDTg5eWFo6MjH330EStX\nrhTjl3v37vHkyRP6+/sZHBzk119/xdbWVoBYCwoKmD17NkZGRty4cQNlZWVKSkowNDSko6NDQDZW\nrVrFxIkTxcw7JSUFTU1NwsLCaGxsRE1NjePHjwuI7KJFi3B2dsbZ2RltbW0UFBRIT09n/Pjx/PHH\nH0LiKo+jkyswFy5cyIkTJ3jnnXdQVVXlyy+/RFFRkdTUVFasWEFXVxdLly5l3LhxIuUqLi4ONzc3\nKioq0NHR4dWrV4SHh3P//n2qqqoIDw8nNjaW7777jsrKSkxNTVFQUCAuLg5fX1+SkpLo7+8X0mg3\nNzciIyOJjY2loKCAS5cucfXqVaZMmSKi0BMSEnj16hWDg4MirdvCwoLq6mqGhoZQV1fHzc2Nb775\nhmnTpiGVSlm0aBHJycnY2NhQW1tLa2sru3bt4vHjx4wZM4bPPvsMeM2qSElJ4a9//avgGsTFxREa\nGsqkSZPE+butrY329nZKSkr4/fffxciusLCQS5cuCeCvvKfg5eVFSEgIW7ZswdTUFA8PD+F3kfM5\nvv/+e3Jzc+ns7GTTpk00NTWRnJwsEqbz8/MZM2YMqqqqopmtr68vjnp/+ctfePHiBbt27eKbb75h\n1qxZrFu3jgMHDqCvr8/q1atpaGjg2LFjqKqqUlFRgZWVFTo6OgQGBhIeHi5cktra2qxZs4b+/n5U\nVFSorKxk/Pjx7N+/X/SRrKysePbsGdeuXSM4OJj09HSUlJSYM2cOg4ODfPzxx0yaNAlbW1sRA/hn\n1v/jTUEikWhKJBKp/H1gBlAAXAXe+seXvQVc+fd+1qhRo7hy5QqKiop88MEHIg5dHnaRm5tLXV2d\nMP/IMd1KSkoiwjwzM5MDBw6QnJxMVFQU8PoJ+/rrr7GwsKC9vZ2AgADS09NZtmwZVlZWFBcX8+LF\nCzo6OnB3d2fevHkiHdna2pqAgAB6enpYuXIl7e3taGhoAPDw4UPGjBlDdXU1vr6+BAcHs3HjRioq\nKqiqquLMmTPExMRw7tw5Nm/eTEVFhSBJy2Pu5fr12tpa3NzcgNebjZqaGomJibz99tu4ubnh6enJ\nggULOH36NHZ2drx48YKHDx+KmLu3336buLg4rK2t2bRpE5X/iLwPDQ0lLy+PvLw83n77bTZs2ICr\nqytNTU1s3LiRmTNn8ve//104Drdt20Zvby/x8fG4uLgQGhqKpaUl06dPx8rKiszMTEpLS7G2tmZk\nZITQ0FBCQ0Opqanh8uXLKCoqUlVVJTwgTU1NVFdXC5elq6srkyZNIjw8nMrKShGY8+DBAzGbB/j2\n229FRL21tTWHDh2ira2N48ePY2lpSUtLC19//TW6urrCfi7PogwMDERZWVlUgHp6ehQVFQGvUf6/\n/PILERERvHz5kuzsbB48eCCgvXIIsLu7O+7u7pw6dUrE3nV1dZGSksKqVavQ09MjJiaGM2fOAHD7\n9m2kUik+Pj709/ejo6PDjh07MDY2xsnJiWPHjuHk5ISPjw+VlZX88ccfqKmpCXHX8ePHaWpqorW1\nFQUFBQFZ8fX15fnz5/T39/P06VOys7O5cOECa9as4Y8//sDT05PExESsrKxQVVXl2bNnIh9DT08P\nS0tL7t69i6WlpQDy/tn1n6kUDIGHEonkMZAB3JDJZDeBvUCwRCIpAYL+8fH/drW3twuKsrGxMVlZ\nWfT19WFqaoqtrS3r1q3D1taWffv2YWxsjKGhIYODg3R1dZGbm8uYMWPQ0NDgwIEDODk5oa2tDSBK\nWUNDQzo7O9m9ezdPnz4VfnY3NzdOnDiBj48PVVVV9Pb2oqenR0FBAcPDw1RXV/PBBx9gaGiImpqa\niPX++uuvOX78OE5OTty6dYvq6mpqa2sZO3YsWlpa2Nvb09rayvr16/n8889ZsmQJJ0+eFIk/FhYW\nIsZLKpUK96W8iz00NERXVxdGRkacP3+evr4+Fi5cyNmzZ7l37x7Ozs4CHvPy5Uv8/PwEkGbDhg10\ndnYSGhqKv78/Q0NDvHjxgoCAAOzt7dHV1UVLS4vY2FjefPNNFi5cyNy5c/nmm294+fIlL168QFVV\nlUOHDmFkZMTAwACxsbHMmTOHc+fOoaWlRX9/P93d3URFRTE8PIyenh61tbWYmJjQ2trK5cuXRcCK\nTCZj3bp1zJw5k6ysLA4fPoyuri7z58+noKBASMzlL9offviBJ0+e4OHhQUZGBrNnz8bGxgZVVVUu\nX77M0aNHRfiPfMNRUVFBR0eH5ORkLCwsWL58OW+88Qa3b99myZIlwGsOqDy89uLFi3h6elJfX8+R\nI0coLS2ltLSUtWvX0t3dTUJCAtbW1hQWFvL5559TVVXFggUL+Pbbb0XFKv99tbW18fb2JjQ0lIyM\nDAYHB1m2bJkYUz98+BAvLy/gderT7Nmz8fX1xdzcnKKiIlasWMGECRMIDQ3l0aNHTJs2DXgdihMc\nHIxUKqWurk6khWVkZDAyMkJeXh4AKioqKCoqoq2tjZeXF6NGjaKnp4f3338fdXV1srKyqKqqQkdH\n509f2P9Dc/6f9T/r/z/rvw/NecKECSxZsgRra2v6+/u5e/cumzZt4saNG5iZmTEwMCCov3PnziU6\nOpr169eTmZlJe3s7WVlZjB49WkA+ZsyYgYODg3DJlZWVkZeXR3NzMxoaGtja2tLX10dxcTGTJ08m\nPz8fe3t7srKyCA4OFtSdsWPHUlBQIGLO/fz80NXV5dy5c0yePJkjR46wbNkyBgcHSUpKYnh4mK6u\nLjZv3szDhw/p7+8XR5aKigoqKytRVlZmyZIlaGtrc+LECQwNDZk1axbe3t7MmzePKVOm4OXlxdDQ\nEOPHjxc25t27d/PXv/4VmUxGXFwcioqKoilra2vL3r17mTp1KlOnTuXu3btcuHCBPXv2UF5ejpKS\nEu3t7SKpSh4gs2TJEuLj4/Hw8CAnJweJREJxcTF/+ctfSEhIQE9Pj6GhIX7++WcCAgKEBffo0aNi\nRGZubi7s0/X19Rw9epSNGzdibGyMtrY2P/74o3AtNjY2YmtrS3FxMfX19VhZWaGmpoaBgYFI/UpJ\nSSE7O5uqqirmzp3LtWvXGB4e5tixYyQmJgrvhZ6eHtnZ2YwbN46WlhYSEhJwdnbm2bNn9Pb2oqCg\nII5QW7du5dmzZ9y+fZuamhrc3d2pqalBR0eH4OBgkVvZ0dHB0NCQGEWrqKhw5MgRFi5cyIQJEygt\nLRWTseLiYjZs2EB6ejoNDQ0MDw/T0tJCeXk58+bN4/79+3R0dODn50dBQQG+vr5UV1dTVFQkBFDu\n7u48f/4cCwsLIWYrKytjy5Yt7Nu3jwkTJogJ2osXLwgPD+eHH37A3t6eqqoq7O3taWtrw8LCgri4\nOCEC09DQICcnBx0dHTQ0NOjq6iItLY07d+78qevxn2JTkEgkqKio4O7uTn5+PvPnz+fmzZt0dnbi\n5eVFb28vL168ICgoiN9//x0rKytkMhmZmZlYWloKbkJtbS06OjqizHd1deWbb77B2dkZY2NjPD09\nSUpKIikpiUWLFqGmpkZxcbHwu8tVh/v37ycxMZGysjJiYmKwtbVFUVFRILi0tbWpr68XgSRffvml\noE4rKyvT1NREYmIiO3fu5PLly9jZ2dHS0sLbb78tHoucY1hfX09sbCwA+/btE4GjSkpKHDx4EB8f\nH9avX4+/vz9JSUmcOHGCwcFBzMzMcHZ2RklJSWg0rl27hqmpKWZmZgKccvHiRb766isaGxuRSqUC\nSKKnpydQ4YaGhiQnJ7Njxw4mT54sjlRyBd2ePXvIzs7GxcWFs2fPMmHCBL788ks0NDQIDQ3l+++/\n56uvvsLe3h5VVVXS0tLQ1dXl1KlTbN68GVtbW4KDgwkMDCQzMxMjIyNCQ0PJyckhISEBTU1NQkJC\ngNeinRMnTtDV1cWWLVvYtm2baCTfuXOHuLg4oqOjRSNSflRct26d0Px3dXURExODpqameM727duH\njo6OELbZ2tqSmprK8+fPqaysRFdXV/hb9u7dy+nTp9m3bx93796ls7NTxLClpqZy8uRJMS3p7+/H\n0dGRoaEh7t27J0aMAQEBwtIeEhLCw4cPcXZ2Ji4uDisrK2bNmoWSkhJWVlYCpV9RUcHYsWMBRECP\njo4Ourq6IoTH0dGRKVOm0N/fj0QiQVlZmZqaGt566y0iIyOpr69n3rx5ODo6IpPJ+Oabb5g5c6Yg\nbf+Z9U/hffjuu+/+NmvWLKqrq/Hw8CA/Px9NTU0mTJhAUlIS06ZNEyo9+eaho6Mj0pLb29sFBn7q\n1Kl8//333L17Fzc3N/z9/amsrMTJyYlPPvmE5uZmGhsbcXNzY8yYMWIaYGVlxdGjR0UWoPwuZ2Fh\nIeAY1dXVpKamilTjd999l6ioKBYsWEB1dTV2dna8fPkSBQUFGhoaqKqqYuPGjQKv1draSkdHB1On\nTuXp06dMmjQJHR0dGhsbuX//vgjTBcQTLo8ck1+4Bw4cICgoiLFjx6Kurs7YsWNJSEhg8uTJuLi4\nEBUVJRqxoaGhdHV14e3tTXR0NJs2baKkpITKykpkMpm4wz948IDu7m4kEgllZWWMjIxgaGhIXl4e\nMTExyGQybGxsBBovKSkJW1tbpk6dSlFREYsXL+bq1auMHz+e7u5u2tvbGT16NIaGhgwPD2NqaoqD\ngwMBAQFUVFRgb2/P5cuX8fHxQUtLCwMDA9rb2wWP08DAACMjI5YuXYqWlhZXrlxBU1MTGxsbAgMD\nhWHN2dlZODi//PJLmpubGT9+POPGjaO6uprly5eTmJjI48ePMTAwEM3ZBw8esHbtWvr6+igpKWH6\n9OmYmJgwatQoDA0NOXjwIKqqqlRXV9Pd3Y2JiQnLly8nOzubzs5OQkJCyM3NJScnh3Xr1nHz5k2U\nlJRoaWlBVVWVUaNGUVFRQU5ODtnZ2TQ1NeHu7i6CX3t6enjy5AlNTU0cOnSIjz76iNWrV1NdXU1J\nSQmpqakYGBgQERGBnZ0dBw8eJD09neTkZMaOHcvYsWN58uQJfn5+Ikk7Li6ODz74gJcvXzJ69Ghq\nampobm7m+++/R+3/Yu+9orK6+r3t66b3DtJ7kV4sgCh2LEQBe0ej0VRNfHzUFJMYjSaWFI1JjL1i\n7w0FQZQqHaWL9M4tvZfvwH3P8e7vZOfgHd/IM769TjLURMkta665/vP3uy4VFW7dukV2dvbf6j78\nIxaFnTt3fhsQEMDs2bNJS0ujqKiIV69eIScnh4aGhtCVWVhYcPv2bdTV1dHT08PDw4Oqqiqsra3J\nz8/Hw8ND1JplgtT29nYyMzPR1NQkICCA0NBQQT/67bffCAwMZNq0aaSkpDA0NMS7774rqrQqKiqo\nqakhkUjo7+/HxMSE+/fv891339HY2IiCggIjRozgwYMHGBgYoKamRlNTE56enri4uFBfX4+xsbEA\nkwwMDAiwiuwV4c2bN5iYmHDlyhVcXV3p7Ozk8ePHgg48f/58oZeXJeliYmKYPn066urqxMXFMW3a\nNJSUlLCyssLNzQ0LCwuMjY1JT0+nq6tLYOj09PRobm6mvr4eGxsbSktLxRNKT08PZWVl+vr6kEql\nWFtbU19fz6pVq4SebnBwkKamJiZOnEhubi7FxcV88cUXHD16lPHjx/PJJ5/w/vvvc+bMGVauXImh\noSERERFIJBJ6e3vJzc1lzJgxuLq6oqGhgYKCAk+ePBFW58jISMLDw1FXV2f48OFcv36dpqYmTExM\nePLkieAhyIalp06dYt26dWhqagol/KxZs1i3bp1IH8rwePv376e8vFzkQ2JjY/Hy8hIMiP7+fmpq\nalBTU8PAwABfX1+UlZWJjo7m0aNHGBsbo6Ojg7W1Nd3d3bx69Yrc3Fw8PT3R0dFh5MiRwvHY09PD\n7Nmzha6tsrKSuLg4gdiTsSNfvnzJ7t27mT9/PlpaWujo6LBw4UL27NnD4OAgo0eP5tdffxUnQGZm\nZkgkEmpqapgwYQKxsbEYGBiIIWNmZiYODg5YWlqSnp7OggUL0NDQoKKiAkdHR65cufKfw1OQSCTI\nyclx9epVuru70dTUZMKECRgZGbFr1y76+voETfnQoUOoqamho6PDn3/+iZeXF25ubigqKgrh5uvX\nr4G3AaWIiAh6enqor68nNTWVnJwcJk+eTEZGBu+99x4xMTEYGxuLbXtRUZFYcPT19SkpKSEwMJCP\nPvoIa2trAMF0vHHjBrt370ZNTU2c6b98+ZK//vqL/v5+DAwMOH/+PFFRUaioqKCsrCy05DExMTx4\n8ABLS0uRU1i0aBGtra1MmzaNhIQEUlJSePjwIaampkRERLBw4UJev36NtbU1pqamPH/+XJCkVFVV\nBfe/qamJnp4enj9/joGBAePGjUMikbBlyxZevnzJBx98gKurK9ra2nzyySfs2rWLoKAgAgIC6Ozs\nxN7eHm9vb1xcXOjs7KS0tJTa2lqSkpLw8PDgxx9/ZOnSpcjJybF69WoWLFjAq1evCA0N5dixY/z8\n888kJyfz6NEjHBwcGDVqFFlZWRgaGtLX1ycIVc7OzlhaWqKnpyeELrIkZXFxMSNHjqSxsZHo6Gje\nffddAWnNzMykuroaOzs7IiMjOXXqFE5OTmzZsoVhw4axcuVKGhoaBLIM4PLly+jo6DA0NERhYSHm\n5ubk5uZibW2NgYEBcnJy+Pr6iqZiamoqXV1dzJgxg/DwcOzt7UlOTiYzM5OhoSFhIFdUVCQpKYmV\nK1dSVlZGZ2cnx44d4/Tp09y4cUPIiHx9fYXcxdTUFBcXF1xcXMjIyBCwGSsrK3GEunnzZrKysli0\naBGZmZkYGhpiYGCAnZ0dbm5uJCYmMn78eLFYvXjxguHDh4uC1pIlS0hOTiYjI0N8z/zd6x+xKCgp\nKfHq1SvxNJXlA0pLS3n48CFWVlaCTFxcXIy3tzfFxcU0NjbS19fHnj17RMRWVVVVHPf9+OOPmJqa\nMjQ0xJgxY2hra0NbW5uIiAh0dHTIy8vDzc2NnTt3CpCmrq4uRkZGJCcn09HRwYwZMzh69ChpaWnc\nvPk2crF+/XqMjY0JCwsTVVhvb28sLS357bffxDGRLE+wdOlS9PX1ef36Na9fvxYpSW9vb5H5h7ff\nYA0NDcTHxzN9+nTeeecdgoKCKCkpEQQq2ZD0wIEDwgwt2xEcOXKE4OBg9u/fz4MHD9DT0+P27dvM\nmDFDGJRnzJjBkSNHKC4upr+/n8OHD7Nnzx709fVpaWnBzMyMiooKVq1aRXNzM7GxsSIsM3r0aBIT\nE8nJyWHUqFHY2dkJIlVnZyf379/nvffeo7a2lra2NiHsiY+Pp7u7m3v37vH69WuUlJS4fv06VVVV\neHh4cPLkSQEW1dDQIDU1VZCuZU3Wnp4eYZJubW3l/fffp6ysTJi/y8vLuXnzJo6OjpiamqKpqSkC\nVYAA7Jw8eRJjY2PGjh1LZ2cn77zzDsbGxtTW1vL8+XMcHR0F4l+WbZk2bZoYGspQcTo6OuLvLSgo\nCD8/P3777Tfk5OTYsWMH/v7+Yra1ZcsWxowZI2ZbAwMDZGdnC4hMUlISvb29PHz4UGRhfH19RT6i\nrq6OvLw82trayM/PJzk5GX19fSoqKlBXVycyMpLMzEyOHj2KgoICGhoa9Pf3M2nSJBGplj0o/871\nj1gUlJWVRU+/qalJDPnCwsKEtk1XV5f6+nqSk5NJTExEQ0NDxHhXr17N0NAQvb29mJubk5SUBLwd\nWvn6+grElqWlJbt27eLmzZtYWVmhqKiIvr4+gYGBpKamip67ra0tn332GaGhoSQlJQnEmqWlJQB/\n/PGHiBTLMuxRUVH4+/vz4MEDfH19cXd357333uP+/ftERUWRkJCAi4sLurq6nD59mtbWVlFD7u/v\nB976JLq6urh48SLwlldYV1dHU1MTkyZNwsTEhKSkJDFoSk9P59SpU/T29lJfX09aWhoDAwMEBgby\n8ccfC4rPhx9+SHp6OtOmTSM5ORktLS1GjBiBsrIyc+bMQVFRkbi4OPH7DA4OsnXrVpydnTExMRHF\npCdPnqCvr8+FCxf497//TWxsLCYmJgwNDfH06VPWrl1LfHw8z58/x8HBAWNjY8aNG8fx48dZunQp\na9euRV1dnR07djB8+HDk5OSIjo7G0tJS3GRRUVF0dHQwatQoOjs7uXr1KuvXr+fq1atCjFNcXMzF\nixc5evQoy5Ytw8nJCT09PVxcXFiyZIlIUVZWVlJZWQm8HQ7LdjqygXNAQAB5eXnCEqanp0d8fDzl\n5eX89NNPNDc309PTw4MHD6isrEQikaCqqsrx48dFYzY5Ofm/ZV6cnJx48OAB4eHh5OTkkJ6ezrFj\nxxgYGMDKyoqLFy8yceJE1NTUePbsGUNDQxgaGnLkyBGmTp0qPofo6GhMTEzQ09PD3t6e9PR08Roz\nfvx4gZv77bffqKur45133mHWrFk0NDTw7NkzBgYGeP/99xkaGhLszr97/SMWBTU1NYKCgmhsbMTQ\n0JClS5dSUVEh3kUB0ceX4dVk8g2ZuScvL09wAteufdvNysnJEbiyCxcu8ODBA0JCQggNDUVXV5eY\nmBjU1dVJTU1lzpw55OTkiGm0LJCjq6uLj48PZmZmYiu6detWUlJSaGtrE60/We+/u7sbU1NTkpOT\n6ezspKamRvweHR0dmJubU1VVRXt7OwYGBlhZWVFXVwe8pe0ATJo0idraWnbu3CkcClu2bKG/v5/t\n27fz4sULwsPDWbFiBRs2bMDIyIgLFy4QGBgo6EG5ubl0dXVRVVVFfn4+LS0tFBYWoquri7u7O4OD\ngzQ2NrJ37142btxIfn6+KBCNGTOGrq4uioqKePnypRikhYaGUlFRIUJY48aNw8zMjEPTGRVJAAAg\nAElEQVSHDrF48WIUFBQwMDDg8ePHXLt2DQsLC65du8bHH3+MlZUVZ8+eRU9Pj7Vr19Lb28vg4CBa\nWlpcv36dKVOmAG+boj4+Ptja2jJx4kRu3bpFUlISH3/8MS0tLXzzzTfMmDEDExMTfv31V27evMn8\n+fN5/fo1FhYWjB49mjNnzhAXF8ebN2/w9/cHECG2kJAQYmJiqKur48CBA1y6dAk9PT1Wr16NsrKy\nEOJs3boVPT09Dhw4QHd3N+fPnycyMpKWlhYSEhIID38b2p0+fTpPnz7F19eX/Px8Dh48yL59+/j4\n44+ZPXs2U6ZM4d///jdWVlZ0dXWxdOlSjh07hrW1tfCYPH/+nFOnTqGlpUV1dTWAkB319/czbtw4\nzM3NmTdvHkpKSmKXIZFICAoKwtXVla+//pq2tjZyc3OxtLQkOjqaCRMm8OTJE+rr68Ux9N+5/hGL\nQm9vL0VFRRQUFODq6kphYSFbtmzh7t27Aqfu7u5Odna2gGAODg6SlZUlYBN3794lLy+PyZMnc/fu\nXeAtJjsnJ4fr16/j7OwsDNZ9fX2iXpybmysm3oDgAX777bd0dXUxcuRITp48SVVVlWjyVVRU0N/f\nT1NTk/jaPTw8SExMFH2JsrIypFIpc+bMQUdHRxiGZGqw3t5ezp8/T1xcnNiB2NvbY25uTkBAAHp6\nepSVldHU1ERHR4dgSaiqqlJbWyuy+B0dHRgYGHDx4kVsbGz45ZdfGDt2LNXV1fT399Pe3i7EKVpa\nWrS1tWFqaoq2tjYzZ87Ex8eHO3fuYGVlxbNnz0TOXmbm/vHHH+nt7cXLywtLS0s8PT3JycnBwcFB\nlKji4+P59NNP6erq4uHDh6SnpzNy5EgOHjzIyZMn6erq4vfff2fDhg1oamqSnp4uRDKzZs2isbFR\nHN8VFxdTVVXFhQsXePr0KaNHj6auro47d+4wfPhwNmzYQE9PD7q6uvj7+5OWlsbLly+xs7MTngcZ\nc8PExISEhATxfSaLUxsYGODu7s6sWbNoaWlBU1OTvLw8qqqqSE5O5uLFiygrK5Obm4uVlRUeHh4C\niCKbc8k6Cl5eXpibm/P06VNKSkrYtm0b169fx8bGhrVr15Kfn09KSgo3b95k3Lhx3L17Fx8fH0pK\nSujp6UFfX5+VK1eSkZHBvn37hM+kra0NKysrRo8eTVZWFmFhYbi7u1NTU0NzczP5+fm4urqSlpbG\nTz/9hLu7O729vSxatAgZBFleXh4DAwOSk5MFr+LvXP+IReHNmzciH/7nn39iYWFBX18fo0aNIiIi\ngrq6OlJTU5kyZQqTJk0CwMnJCVdXV2JjY0lKSmLLli1UVVURFxdHUFAQAK9evaK0tJQ1a9agpqZG\nb28vbm5u2Nra8tVXX5GRkYGCggLZ2dk8fvyY8PBwXr9+jVQqZebMmZiZmZGfn8/nn3/OoUOHxJl3\nQkICDQ0Ngm68fv16EhMTsbe3p6Ojg9GjRzNp0iSRv8/IyMDS0pJ169aRmJgo/IIfffQRnp6eYrjU\n3t7OxIkTUVFR4c2bNzx79oxr166Jp7vsiGzbtm10dXXh4uLC3bt3qaqq4tKlS7x69YqSkhI0NDQE\nICU4OBhvb2/k5OSIiYlBRUWFq1evcuLECaKiorC2tkZPTw8nJye2b9+OmZmZwM7p6upy4cIFampq\nuHfvnhCbyt5lT548SVRUFKtXr8bExISOjg5Wr17N9evXkUqlnDhxgo6ODuTl5dm2bRuRkZEcP36c\nxsZGrly5wsaNGykqKmLWrFkC8V5aWsrcuXPp7Oxk2LBhxMbGIi8vj5mZGQ4ODjx69IjBwUFu3brF\n8+fPxSumg4MD1dXVgq4sw87LtuMeHh7MmDGD5ORkWltbUVRU5PXr16J9mZubK7Drr1+/5s2bNygq\nKhIZGUlkZCRv3rxhcHCQ9vZ2KioqmDZtGgAXL14UJjGpVEplZSVycnJ0dXXx7rvvoq2tTX5+PjNn\nzuTrr7/Gy8sLDQ0NmpubBR6vpKSEzZs3Y2lpKR4Q8DYD4erqip2dHT09PTx+/JiamhqmTp3K8OHD\nUVBQYMaMGXz33XeEhIRQXFxMfn4++/fvJz8/n/7+fqRSKePGjfvPGzTK5CwSiYRp06aRk5PDsGHD\nsLS0JCQkRNiN09PT6evrY9myZSQnJwt3Y1dXF+3t7Xz00Uf09vYK9ZesNSjTn0VHR6OkpERNTQ1f\nfPEFAwMDzJo1S/Dz//rrLxoaGkSnoLGxEScnJ65fv8769esFun327NmYmJhgZmbGq1ev0NHRYfjw\n4URERBAaGip05IaGhrx48YLy8nKmTZtGbm4u3d3dbNy4EQcHBzIzM8UQEd4mO3V0dAQxZ/Xq1ejo\n6AjKsOyo9eDBg1hYWJCfn8/q1av59ddfOXv2rLAaVVVVUV5eLgCxWVlZxMXFce3aNRITE9m1axeu\nrq5kZWXR1dUleJXXr18nKSkJb29vQkNDycvLIzQ0lHv37nH48GFaW1sZN24c27dvZ3BwkIULF3L1\n6lVWrlyJu7s7jx494vTp0zx//hwPDw8+/fRTbt26hbm5Obdu3cLFxUUMds3NzVm+fLkQ6sp2CmPH\njuXIkSOcPHkSRUVFOjs7hYn8q6++EqbwqVOnoq+vj5+fH05OTnz33XcAQsySnZ2Nvr6+qFMrKiry\n5s0bFi5ciIKCAocOHcLAwABlZWVevHiBhoYGnZ2djBs3joyMDPT09HB1deWTTz7B09OTtLQ07Ozs\nKCwsRFFRkcuXLwNvW53Nzc1CLqSlpUVrayudnZ309PRQVFQk/JFubm6kpKRw79498YApKipCIpFw\n//59IQoGRKfj8ePHHDp0iOLiYvr6+tDR0eHIkSMkJiZy9uxZ4uPjefPmDVVVVTg4OPDDDz+I4XdR\nUZE4+v3/rDr9f+uSSqXk5uaKbeXmzZu5fPky1dXVGBsbM2zYMAICAsjNzSUtLY2rV6/i7u7O5MmT\nBdrMwMCAM2fOiAANIIAnUqmU3t5exowZg42NDcXFxYwZM4aQkBAcHByorKwkMzMTOzs7wsPDiYyM\nxMDAgMbGRlxcXBgxYgQVFRViaPXXX3+RnZ1NRUWFkH729/cjJyeHh4cHs2bNQk1NDXV1dRoaGvjX\nv/4lpK4lJSUkJCRQXl4ujkcfP34MvCVFFRQUkJCQIBaHMWPGkJOTQ2dnJ46OjlRXV6OgoMDt27dF\nkWvXrl18/fXXqKiosG3bNu7duyd8Bbm5ucyaNQsvLy82bdpEYGAgO3bsoKamBnNzc0G3evz4sTgr\nb2lpobW1FRsbG6Kioli2bBlLliyhq6uLmJgYFi1aRGFhIQkJCRw5ckTUyH/55Rd+/PFHgoODMTc3\nZ86cOVy4cIHU1FRKSkqwtbWls7OT8PBwampqMDQ0pKWlBTk5ObEwyo7qbt26RUNDA9bW1jx69Ahv\nb29Onz6NmZkZxsbG4vXg4MGDKCoqMmbMGJydnUVbcOTIkWJGBAg4zZMnT7C1tRXhL319fZYtW8bc\nuXPx8/Pj+++/Z9GiRSgoKGBhYUFUVJSQ+2ZnZ7Nu3TpxGgZvS061tbWinizDtJeXl1NXV4eBgQFT\npkwhPz8fOTk5Fi9eTGtrK6mpqSxevJjc3FyUlJSIjIxEUVGRpqYm4K2FbNiwYSQlJbFz5076+voI\nCgoSeIGWlhaCgoJYunQp69at48qVK4wcOZLBwUGSkpJ48OABzs7OBAUFcfHiRY4fP/6378d/xKKg\nqqpKYGAgEyZM4M2bN6xbt46pU6diaGhIXl4e8vLywv3o7OyMm5sbNjY2dHZ2kpycTGBgIJmZmejo\n6BAfHy9sOPPmzSMjI4OioiKBj5dhuAcHB8nLy+P3339n2bJlLFu2DAsLCxwdHVm7di0KCgp0d3ez\nd+9eKioqaGpq4tChQwCMHz8eU1NTKioq2Lhxo5DGbN++nWfPnmFjY8POnTvR0dEhMDCQ2NhYFixY\nIMjNssVl9uzZQkAKbxuTGhoa7N27FxcXFy5evEh0dDQuLi5MnjyZ2NhYFi9ejJKSkug1mJiYiNcD\niUTCpEmTyM3NRUtLi/r6ej766CMePnzImDFjBMfyyZMnwl587do1HBwchF7ew8OD9PR0zp07h7+/\nPyoqKlhbWxMcHExYWBgdHR2cOXMGNTU1JkyYwOHDhzl9+jT9/f1s3LiRhIQEoqOj0dHRITU1VaRR\nGxsbefDgAR9//LHA3CcmJlJUVERra6s4Rq6srERVVZWUlBTU1dWZO3cuU6dOpbGxkc2bNwuega7u\nWx6ws7MzmpqajBw5kqioKDQ0NEhJSaG7u5v4+Hg+/PBD4O0JV1tbm+gf+Pv7k5+fz9SpU8XN39XV\nJQC8RkZGvHr1iv7+flxdXcXJxIYNGzh79qwA+dTX16Ourk5ubi4mJiYcOnSIFStWUFZWJsxexsbG\n3Lp1i6lTp5KRkcGECRNE98HOzo7m5mZZ4lBg3nJycqitrRVtTUtLS06fPo2qqira2to8e/YMOTk5\n0tLSWL58OX/99Rft7e3cu3ePTZs2oa6ujoKCAtbW1igpKQm3yN+5/hGJxl9//fXblStXEhkZibOz\nM9XV1ZSXl1NfX09hYSEKCgqYmZkxd+5cEhISKCoqwsjISABCPD09iY6OFufH7e3tAm/l7+8vCjl2\ndnY8ffqU9957T/z4/PnzuLi48PTpU2EUkrEENTU1Wbp0KTExMZSVlXHgwAEOHz6MtbU1zs7O4kTB\n29sbJycnoQF7/Pgxra2t5Ofn4+Xlhb6+Pjo6OlRUVFBVVYW+vj6mpqYCkLp//35B4T1w4AAVFRW8\nefMGd3d3nJ2diYiIoL6+noCAAIaGhlBQUBCkqGPHjqGnp8f9+/fR0NBg1KhRdHd3o6uri4uLC2vW\nrOHTTz8Vw7r+/n48PT2xsLBAVVWV0tJSAUcJCwvj8uXLDAwM4OfnR0pKCgcPHhR+wv379zNlyhRe\nvXpFQ0MD2dnZpKamYmNjQ19fH25ubjx69IiysjLu3LmDhYUFHh4eDB8+nMbGRuzt7Xnz5g2XLl1i\naGiItLQ0xo4di1QqRVNTk0uXLrFp0yZBpzI2NiYvLw81NTXi4+OFnOXy5cuoq6ujra0t8Gp79uxh\nzJgxFBUV0dzczNDQECEhIYwcOZLIyEimT59OTk4Ompqaok8go0zFxcWhqqrKkydP6OjowNraWnzW\nenp6mJqasmvXLoYNG8bEiRPp6+tDS0uL2NhYlixZIjQD169fZ8KECcjLy7N27VrOnj2Ln58fxcXF\nGBkZiRM0BwcHLl26RHBwMI2NjYwfPx5VVVVaW1vFQnvlyhU0NTXp7OwkKSmJwMBAlixZIujPM2fO\nJDU1lZiYGObNm8emTZvw9/dnyZIltLe3o62tzdy5c4mKiqK7u5vp06fz888//+fEnA8fPvytv78/\nbW1tpKenY2VlRVVVFYqKikLcCYgyka2tLRUVFcybNw87Oztu377N3LlzxbR4+vTpHDhwAH9/f+rq\n6oiJiSE7O1tswTIzM3F1dRVUY1kwJz09XVCPb926RWhoKGvWrBEJwJ6eHk6ePMmhQ4doaGggJiaG\nUaNGoaWlxblz57C2tubx48diIRs9ejSRkZGYmJjQ3d3N+PHjsbKyQiKRoKWlJTDz9vb2REZGMmLE\nCD788EMRUx03bhxlZWUoKCgwbtw4TE1NGTZsmADX+vv74+DgwJ9//klnZyddXV20tLQwMDCAmZkZ\nd+/e5V//+hfR0dEiGhwQEMCoUaOoqqoiMTGRd955B39/f+7evUtFRQVLly5FU1OTtrY23N3dmThx\notjRrFq1CgMDA+7fv4+WlpbgTY4ePVo4GFRUVHB0dGTatGnY2NgIsU1oaCjd3d2UlJRQVlZGbGws\ntra25OTkoKCgwJgxYzhy5AhfffUVxcXFmJqaUlxcjJaWFn/++SchISEYGxuLVqRs6z9hwgTOnTuH\nk5MT+vr6BAcHU1hYyLx583j9+jWqqqrcuHFD7HJkn016ejrz5s0jIiKCJUuWCKeosbExlZWV1NbW\ncvXqVWTfl66urixevFhwM+rr60lJScHV1ZUpU6YQERHBsmXL2Lt3LyNGjODQoUN89NFHDBs2DFNT\nU6ysrEhJSRFJzP7+fgHQff78Oerq6uKoPSkpCVVVVdHN0dXVZebMmXzwwQeUlJTQ19eHo6OjGGYa\nGhoyceJEGhsbmTt3Ln19fQQHB3Pt2jUWLFhAbW2tjHPxnxNzlkqlQre2evVqXr16xYwZM8QW38bG\nhrt374qZwaRJk1i8eDHnzp1DTk4OW1tbnj17hlQqJS0tjdraWuDt1tLQ0JBJkybxzjvvUF1djaKi\nIlKplPz8fDo7O5FIJOzcuVNM6mWuABmA85tvvqG1tZWenh6BYysvLxcBqaSkJC5dukRDQwORkZGi\nobZ06VJsbW2ZNWsWQUFBFBQU8OTJEw4fPoyRkRHFxcWUlZWJjgIgGpYysk53d/d/u8FDQkIIDw9H\nXl4eCwsLXrx4QUFBAd9//z3//ve/UVdXp7a2ltmzZws0u7y8PDNnziQzM5Pp06cLIpSenp7wF27f\nvp0JEyYQGBhIcnIyERERmJqaUlpaSk5ODl1dXcK5cfr0aZE+TEpKEl7JzZs3s3XrVuGQ/PXXX1FW\nVmbFihViEPzkyRN0dHTo7+9n06ZNDBs2jDVr1vD+++8THR0NvB3cWVpa0traioKCAq9fv8bd3Z2S\nkhLU1NTYtm0bkyZNEgGk27dv8+677xIaGiqekB0dHVy7do158+Yh44XIy8uL9/Suri6kUiljxoxh\nypQpHD58WGy1VVVVsbGxITs7m6CgIF6+fCm6ITk5OXR3dzNz5kwB221paREi2EePHhEUFISioiLB\nwcEoKysLpWFHRwcLFixg9OjRXLp0CXV1dSZPnoxEImFwcJDq6mqqqqoEaPb7779HWVlZgHJv3brF\nO++8g5ubmzgWtrGxYc+ePQIfYG9vT0pKCo2Njairq7NgwQIxYJVBh//O9Y9YFGQ8OnV1df766y9a\nWlpITEykoqKC5cuXo6mpiYWFhfDwVVdXU1xcjKenJ2pqahgZGWFiYoKpqSnjx48XmvCmpiYyMzPp\n7+/n6tWrQmXu5+eHo6MjcXFxjBs3TvyatrY2YWFhZGVlYW1tjYmJCba2tkilUi5evEh2drb4ekNC\nQhg2bJjI5y9duhRtbW327NlDXV0d7e3tvHjxgri4OFJTU2loaMDMzEwIX2VwV5nHUnZJpVJ0dXXJ\nzs6moaGBgIAAYmJihPZs0aJFDA0NcerUKfz8/ETMVUNDQyDvY2NjgbenOtevXyc+Pp7Lly+Tl5dH\ndXU1Y8eOxdraWqTvVFVVUVdXJzExkfLychYuXEhhYSEBAQE4ODiQnp7OtWvXMDc3R1tbGwcHByFD\ncXR0xNPTk3fffVeQin18fAgNDSUhIQFlZWV++OEHQU/q7e3lxYsXTJ8+nUWLFvHo0SMKCgrEsFUG\ncu3o6KCsrEzIdoqKioTVuampiXv37uHt7Y29vT2XLl0iLy8PJycn4uPjqampISwsjLVr12JrawtA\na2srJSUlJCcni+PsP/74g6CgIGxtbSkrK0NeXp7q6mqkUiklJSVYWVmRkJCAnZ0d0dHRQvv+9OlT\nrly5AoCbmxsDAwOCVTl8+HA8PDwwMTFh6dKlWFpaUlJSQmVlJV988QXR0dEsXLiQefPmceLECdTV\n1cnLy0MqlYpXToDt27fj5OTE0NAQy5Yt49ChQ0RHR5OVlUVNTQ1ubm48efKEYcOGsXPnTvHgWrhw\nIRKJhKdPn3Lp0iWio6MZP348EyZM+Nv34z9iUejo6EAqlaKnp4eWlhYODg6kpqaKTnh1dTV6enrE\nxcXR0dGBmpoaqampWFhYUF5eLo6TZGKXlJQUAOED0NLSYvr06eTn55OdnU18fDydnZ388ssv5OXl\nYWBgwLJly9DQ0OCnn37CwcEBVVVVMjIy6OjoENtRGdJKQ0OD5ORkdHV1iYuLY+3atfzyyy8i2+Dm\n5kZHRwcvXrxAXV2dR48eiS2/tbU1T58+ZezYsQIBLsN66+vrY25uzrJly4iLi8PQ0JCenh6mTZsm\ngl1z5szhypUr7Nq1i8rKSgwNDYmPj6e9vR1LS0vhiNTS0iIsLIxFixaRmJiIjY0N9fX14pjyr7/+\nYtGiRRgYGAiVWk5ODlpaWtTV1dHQ0CCOtUxNTbG1tSU/P5+MjAyys7NJSEjg5cuXFBcXo6SkxO7d\nu1m5ciV+fn7ExMRQX19PcHAwd+7cYcWKFXz44Yf4+/sLj0dERARHjhyhvb2drKwsZs+eDcCqVat4\n9uwZvr6+2NjY8PLlS06ePElnZyfV1dWMGzeOwcFBXFxciIyMZM6cORgZGWFtbc25c+dQUVHh008/\nRSqVCo4BvF3IZUYuLS0tBgcH8fLywtPTk76+PgYGBujt7UVFRYXm5mZWrVrFq1evmDt3Lnp6ekgk\nEvz8/JCTk0NdXZ33338fACMjI1xcXDh48CCjR4/m9u3blJSUcPz4cT788EMCAgIYMWIEvr6+rF+/\nnvfff5+ioiJOnDiBkpIShoaGREVFUV5eTnV1NQsXLgTeypGfPn2KRCLh0KFDLFu2jEWLFjFhwgSh\ntNfR0cHX15cDBw4gJydHRkYGn376KWpqasJ90t3dzeXLlwW39O9c/4iZwq5du751cnJCQ0MDRUVF\nLCwsyMvLw97enpaWFhwdHTE2Nqa9vV1k4jMyMvD29qa3txcDAwN6e3vR0tLi4cOHfP311+zbt094\nFfr7+yksLERbW1uQiGRR5uzsbKysrCgpKeHzzz/nl19+EVVrDQ0N9PT0mDFjBvHx8RQUFJCeni6c\nl8rKylRUVKChoSHqswMDA6IrYWRkRGNjI5MnT6a3t/e/vSbIXg9OnTqFvr4+cXFxjBw5EmdnZ2pq\nakROwtfXl6ysLLy9vcnPz0cikaCiokJVVRWpqakMDg4SFhZGaWkpGRkZuLq60t7ejqamJtevXxdH\nfkFBQWIBLS8vF9VtHx8fwsPD2bJlC6ampnR0dPDee+9hbm7O2bNnOXDgADdu3BCNSRk7Ul9fHw8P\nDxYtWiREqTKdm2ygOjAwIE4SvLy8xDt9f38/tbW14skWFRWFqakp9+7dQ0tLi5cvX+Li4oKrqyvZ\n2dls2LBBlMVkx8uRkZF8/fXXpKWlYWNjg4WFBTNnzmTHjh34+PigqKjItWvXxPHmtGnTxILp6+uL\ntrY2XV1dXL16VbhAX7x4IUS6b968wcbGhjFjxlBZWSk+x87OTszNzXnx4gWJiYmMHTuWqqoqvvzy\nSzE4vXr1KgsWLMDCwoKSkhISExN58uQJWlpauLq6kpCQQG1tLcuWLaO4uJiZM2eK1+K7d++SlJTE\nN998I4J5W7duFacdSkpKjBgxgt9//x1zc3N6enq4e/cu9vb2NDc3Y2lpKeA3RkZGjBgxghs3bsji\n1P85MwVNTU2xDe7r6yM/P1/w7t+8eSNquyNGjBDv/IGBgbx48YLKykpx1isrDv3ww1tW7Pz58wWk\nNTQ0FHl5eeTl5YG34ZDOzk5hEk5ISGDLli1ERUURFxcnbuYHDx5w+fJl2traBC5cVVUVS0tLVqxY\ngY6ODj4+PnR1dfH8+XO6u7uJiIjg3r177NixAxMTEzIzM1FSUsLHxweJREJraysbN27E399f9Obh\n7VPn8uXLPH78WCjbjxw5QkFBAWVlZWhpaYlSkry8PFOnTuVf//oXhw4dor6+nh07dqCjo0Nra6ug\nBSkrKxMaGoqcnBzOzs74+flRWlrKpUuXOHbsGBcvXuTChQsUFxfj7OxMSEgIBQUFZGZmChaBLHXX\n399PV1cXx44dIyoqChsbG2GGHhwcFCErLy8v7O3tuXHjBmZmZmLeYWNjg7a2NqWlpSJLcPnyZUxM\nTERfZcKECSxfvpy+vj4MDQ0ZPnw4J06c4MSJE8ycOZPOzk6ePHmCn58fysrKZGVl0d3dze7du/nt\nt98YO3asgLbIhL/wVk1oYGBAWVkZ165d4+DBgwKxHxMTQ2FhoQDnVlVVIZFImDp1Ki9fvqS0tFRQ\nulatWkVlZSWJiYnA2/lSb28vX3zxBQkJCXz//fesXLmSsWPH0t3dTVZWFkpKSnh4eFBZWcmff/5J\ndXU1M2bMID09nfj4eDw8PEhOTiY2NlbsFH777TeUlZWxsLDg8uXL2Nra4u7ujr6+PteuXePkyZOY\nmJiIY0stLS26u7uFsb2mpobs7Gyys7OZNWuW2I3+nesfsVM4evTotyEhIcyePVtw9FtbW8WWqqur\nS9BsIiIimD17tgj0yJ64hYWFAmoRHx9PZmYmP/zwA7dv3xbb39TUVHbt2oW1tTW1tbXo6uqio6OD\nh4cHAwMDSKVSrKysCA8P58qVKyJIExERIf6d+Ph4Zs+eTVNTE1lZWQLYUVNTw/jx45GTkyMwMBBn\nZ2cUFBQYHBwU2jOZH9PHxwctLS3MzMxQUVHh4cOHZGZmMmLECKRSKQMDA9y9e5cpU6aIXY5UKqW5\nuVlkEmxsbNDV1RVDxStXrgiOosyU9Pz5c6qrq8nNzSUrKwsdHR2Sk5NZsWIFnp6etLe3o6GhwYQJ\nE2hoaEAikdDV1SWMUQEBARw/fpz29nbU1dVpb2/H39+fvr4+/Pz8aGxsZNasWQLvbmpqymeffYaO\njg7btm1j9OjR3L17F19fX2pra4mMjGTnzp08e/aMlpYWYeiWSqUMDg5y5swZxowZg7KyMs7OzsKV\nYWBgwOjRo9HU1CQ5OZng4GBBs66qqhItwba2NlEWevjwITNnzqS8vJz79+/T3d1NZ2cnZmZmODk5\nibBRQ0MDlpaW9PT00NDQALzduhcXF1NYWMj06dO5ePEiHh4ejB8/nuLiYkF0TklJEW6FyZMnU1ZW\nRmFhIZ6enly9elVYomUGMwsLC+zt7VFQUBA/p6amhoKCAkNDQ6iqqvLixQtiY3w4GrcAACAASURB\nVGP57LPPxL1QW1vLtWvXyMjIQF9fn/7+fvbs2YOnpye7d++mpaWFnp4eHj58yJIlS+jt7aWqqoro\n6GgqKysFMObu3bt/a6fwvzTn/73+9/r/z/WfQ3O2sLAgPDwcf39/njx5wvz58zl//jxaWlrk5eXh\n7u6Ov7+/aBQ6Ojpy+PBhvL29SUhIYPbs2VhbWwsJiUwnvmfPHlJTUzl06BCff/45LS0t2NjYiMGk\ng4MDhYWFQjbb19eHk5MTdXV1fPXVV4SGhvLLL7/w+PFjfv31V/bt28cHH3xATEwMaWlpNDY2Ymdn\nJ+LNsplBRkYG/v7+PHr0CF9fX5ydnXn27BmvX79m48aNVFZWkpeXx/Llyzl8+DABAQFMnjyZb775\nhjdv3uDs7ExoaCgqKir89NNPqKmp4eXlxa1bt+jr62POnDnEx8djamoq3BOhoaE0NjYSHx/PnDlz\niI2NpbW1lYKCAj777DNUVVVJTEykq6uLzMxMAgICCAwMJC8vj8LCQgwMDGhoaGDhwoV0dHTw6NEj\n5OTkiI2NJTQ0VLAFZSKV4cOHM2rUKJHZd3FxISsri0ePHjFr1iyam5sxMjJi3LhxlJeXc/HiRXR1\nddm0aRP79+8nJCREoMtk5/5jxoxh3759jB49mitXrmBoaIifnx82NjZs3rxZBH+Cg4OJjY1FVVWV\nhIQExo4dS3Z2NoaGhmhoaKCrq4ufnx9ffvklkyZNYvPmzYSHhwvUW3d3N+rq6vT19XH8+HFMTU2Z\nOHGiaFvK/j/2798vkp16enoUFxdjZ2dHWloaeXl5HD16lPPnz3P69GnOnDlDXV0dbW1t1NbWoqCg\nQH9/P/Hx8WKoPHbsWJydnfniiy9ISkrihx9+QE9PT8xEtm/fTnh4OJMmTRImtJMnTzJ79mxKS0sZ\nPnw4ly9fZtasWZiZmXHz5k309fXx8vJi9+7d2NjY4OzsTH9/P/fv38fGxobJkyeTmZnJmjVrBC3q\nf7r+Ea8P+/bt+/b06dOUlpairKxMXV2dIOrEx8ezcOFC6urqxLY5MTERY2NjZsyYgZGREUVFRdTW\n1lJSUkJHRweRkZG8ePECPT09HB0dKSoqYuTIkSJ/XlBQIAZRurq64vjro48+Ql5eHgUFBaqqqrC1\ntSUtLQ1bW1s2bNhARUWFiAxLpVK0tbUZMWIEDQ0NlJeX4+DgwNSpU8nLy+PmzZucOnWK33//HW9v\nb5YuXcqzZ8/o6+sTk3lZYEheXp47d+5gbm6OnZ0dQ0NDGBkZcfHiRYyNjXFzc6OpqYna2losLCyo\nrq7Gx8eH1NRU6urqaG1tRVtbm3v37hEQEMCVK1fYvHkzZWVlzJgxgw8//BBra2txNBYaGkpLSwsv\nX76kr69PzAq0tbWJj49HKpWKrXNvby+NjY1IpVJ6enoEEMbc3FwYr6Kiopg2bZrgRgwfPly87587\ndw5tbW28vLxQUVFh//79dHd3ExcXR3d3N319fVy/fp38/HwSExOxsrJCSUkJIyMj3nnnHfG1enl5\nMTQ0xKZNm/j666/FInDr1i3RVcjKyqKzs5M3b97Q0tJCf38/+vr6YqFSV1dny5YtrFmzRgwVg4OD\naW9vJyMjQ3RhJBIJly5dQk5ODi0tLUpLSykoKEBDQ4PGxkasrKyIiYmhuLgYVVVV0QuRlZzu3r3L\n6NGjUVNTIzc3l/Pnz4swWFVVFb6+vrx584bRo0fT1dWFpaUl+/btw83Nja6uLm7fvk1tbS1WVlZo\naGjw7bff0trayokTJ/jss8+4ceMGhw8fFqzH1tZWjIyM8PPz4+XLl1y9epXQ0FCKiooICwvDwcGB\n/fv38+LFi/+cQePQ0JAIpyQnJ2NmZkZrayt5eXkYGRlRWVlJdXU1RUVFPH36lOrqahwdHTl37hwV\nFRUiCNTV1cW8efP44IMPANi4cSNr1qxhYGCAx48fM2XKFEEBlkqlODo6Mjg4yJQpUwgPD+fcuXOC\ns9/S0iKOsdrb28XUG0BOTg4XFxfk5eWpqalhaGgIfX19kpKS2LZtG2ZmZuzatYuPPvqI7777DkVF\nRVauXMnixYtFdn337t309PTg5ORETc1bSXd7e7t4QsmCViUlJQKUUlZWJj6brq4uzMzMxA2Tnp6O\ns7MzRUVFqKmpMX/+fADS09NZtWoV9+7d4+OPP0ZPT0/QnGWDu8DAQAICAliyZAljxoxBXV0dW1tb\nERVWVFQU2ndlZWVWrlxJfX29+Bqtra3JysoiNjaWR48eiZ2VtbU1YWFhSKVSDA0NMTIyYvv27Tg6\nOrJmzRoUFBTo7e3l008/FZ/B8OHDxXu5TAArYzzo6emxdetWtLS06OrqIjw8nG+++UYwKHfv3o2q\nqqqIcvf394vKsAylvnfvXn7++WdKS0vx8fERR4PTp08nOjqa33//nZ6eHmbMmIGtra0Ay+jr6xMU\nFCT6EzNnzgQQ1Ov169ejoKDAnTt3WLduHRYWFpSVldHf38/mzZsxNTWloaFBQHemTZuGlpYWurq6\nPHv2jOnTpxMSEiIq2du3bxfDR19fXxYuXMiOHTvEArtv3z7Gjh0rdiZtbW3k5OSwYcMGPvnkEwID\nA1mxYgWPHz8mMjKSdevW/e378R+xKCgqKgrYqexD0dXVxcLCgjt37nDixAn++usv/Pz8aG1tpbW1\nlaysLFRVVdHS0mL79u2MGjWKadOmCUgFvG3Gbd26FYlEQnZ2Nvfu3UNOTo6wsDDMzMxYsmQJg4OD\nLF++XAAtSktLqaio4P3338fb25uBgQHmz5/P119/LZ4EnZ2dvHjxguDgYOLj44mLixMDOmdnZ6RS\nqUB9y4Qyrq6uXLx4kfr6eiH+aGlpwdPTU9B2Vq1aha2tLV5eXmRlZdHa2oqlpSWhoaG4uLjw6aef\noqGhIfh8eXl5AkLj5uZGX18fxsbGODg4sG3bNtrb25GTk6Ovr4+QkBC+//57enp6UFZWZuTIkdy+\nfRslJSVSU1NJTk7m6NGjpKeno6qqiq+vL9euXePLL79kxYoVTJw4EXV1dezt7Tl79ixNTU0Cctrc\n3Cy6KjLGoUyy8/z5c1paWkhKSuLVq1e0tLSQn5/P119/DbzVudXX17N+/XoAcnNziY+P5+TJk7i5\nuVFYWIiPjw8VFRU4ODigq6tLd3c3O3bsoLW1FVNTU/r6+ggMDOS7777DwMCADRs28N1332FoaCiI\nQ/b29sTExKCkpIS6ujqjRo3i4MGDGBsb8+DBA4qKimhqakJBQQFjY2O6u7txc3Pj4cOHhIeHo6mp\nyY0bNwRtS5Y8DAsLQ09Pjy1btnDhwgUKCgqIiIhg7969DA0N8fnnn4tj74KCAqKjo2lvb8fY2JiM\njAzk5OTw8vKivr6eGzdukJWVBcBXX30lHn7vvfceCQkJNDc3s2XLFg4cOICxsbGgS8Hbxb+xsZHk\n5GTq6+vR0NAgMjKSO3fu4O3tLQJ9f+f6R7w+/Pjjj99OmTKFBw8eMG/ePAwNDXn+/DlTp07F3d0d\nT09Ppk6dSlZWFh4eHkyePJnm5mYSExNRVFQkKysLR0dHzpw5g7u7O2ZmZvzxxx8MGzaML7/8kkOH\nDhEVFUVBQQF2dnakpqbS19dHZWUlR48eFR6FpqYmvLy86Orq4sqVK/T39/PgwQP8/f0JDQ0lKipK\nsP4nTpxITEwMc+fOFTHS4cOHExgYSFpaGn19fXR2dvL69WuePn2Kj48PXl5eDBs2jISEBHx9fXF1\ndeXYsWOEhYUJFoMMAFtVVcXYsWM5deqU0KLb2tqiq6tLXl4ely9fxs3NjV9++UUUkby8vMjNzWVg\nYEBEa+3s7ARfUfYaIBOdtrW1cfDgQTZt2iSODwcGBpCTk0NFRYX+/n5GjhxJXV0dM2bM4NSpU6JP\n0tbWxueff84ff/wh5LtLliyhuLiYWbNmoa2tja+vrzi+tbW1xdzcnIGBAcrKyggJCRGvLzKM2+HD\nh1m7di2rV68mIiICNzc3YmNjhaRl7ty5yMvLExwcLBbzuLg41NTUBAR23LhxXL9+HXd3d06cOIFE\nIiErKwsHBwdSUlIwMDAQN6GMjTBt2jRiY2MxNjYWpxJDQ0PcunULHx8fCgsLxeI6duxYMjIymDFj\nBr///rsA/ZaVlSEnJ0dpaanwUxw7dgxDQ0Pa2tpITk7G3d0dHx8fcnJykEqlrFmzhnPnzhEfH4+v\nry/x8fFYWlpy9epVdu3ahZycnGB07tixg3379rFmzRqxw5ABXPT09Bg7dqxodqqrq3P8+HG2bt1K\nbGwsTU1NtLW1kZSU9J9TiPr555+/ra6u5oMPPiArKwtFRUX6+vpob2+nqKhIpO48PDw4f/483t7e\npKWl4ePjw8KFC/nzzz+Rl5fH2dmZzs5OdHR0OHbsGAcPHuTRo0fY2dmRmZmJm5sb9+/fp62tjbCw\nMCorKwkODubYsWNoa2vz3nvvcfz4cb766it+/vlnHB0dWb58OU+fPkUqlVJfXy8AqHfv3sXLy4v7\n9++joKBAV1cXU6dO5fDhw4LQM336dH744QfMzMxEnLmlpYX09HSMjY05cuQIw4cPp62tTbyXP3/+\nnICAAOrq6qisrCQsLIyhoSEqKytpbW3F19dXyERaW1sF5GP+/PkMDAygpaVFeXk51tbW9PX1ER8f\nT0NDA1OmTGHChAn09PTg7OwsQKehoaEiPZifn8/r169Zvnw5Fy9e5OzZszQ3N/Puu+/y888/Y2Nj\nQ05ODpWVlXh7e3PmzBn8/f0ZP348X375Jc7OzowfP566ujp6e3v58ccf8ff356effkJRUZHm5mZS\nU1NRV1cXFeLa2lpCQkJQUlLi559/ZmBggG3btnHgwAFSUlJEsu/Ro0eCynTkyBFevnzJokWLmDRp\nEnv37qW3t5cRI0bw+vVrkbmQl5fHxsaGmJgYEQk3MDCgqKiIxsZG/P39sbKyIjc3F11dXaysrHBw\ncOD69euYmZlhbm5OXl6eoFPl5eWhrq6OkZERaWlpREVFifCSuro6FhYWzJkzh5qaGhoaGtDQ0GBo\naEjU2mfPnk1VVRUqKiq8evUKd3d36urq+PDDD7l9+zY6OjpUV1eTkJCAl5cXBQUFzJ07l+bmZnJz\ncwkLC2Pr1q3s3buXlJQUxo0bJ+rwJSUlojrf3d1Nd3c3L1++JDg4mHHjxmFnZ8fp06f/78wUJBLJ\ncYlEUi+RSF78Hz+nJ5FIHkkkkqL/+qfu//Frn0skkmKJRFIgkUim/Z1FQUFBgR07dpCUlERhYSGp\nqan09PSgoaFBUFAQ06dPp6enh2fPnrFkyRKePn2KjY0NKioqPHnyhG+//VbkwSUSiVCDXbhwgZcv\nXxIdHY1UKqW4uJjBwUEWLFjAhg0b6OrqwsDAgKNHj4rhko6ODt9++y1+fn6oqqpy+/ZtMjMz8fb2\nZvLkyQC8fv1a+BlXrFjBwMAAr1694ubNm7i5uTFq1Cih8frtt98YGhpCW1ubgoICKioqWLlyJba2\ntlhaWmJnZyemwjJwraKiIgYGBgQHB/Pw4UMqKipwdXVl1KhRFBQUYGxszMaNG0lJSUFVVVXcbLLP\ncvXq1SQlJTFv3jwcHR0JDg7m9OnTGBoakpOTI6zYjx8/xtPTk/v377Nnzx6ampqwsLAQOvvZs2cT\nGBjI+vXrmTp1qjgzV1JSor6+HjU1NUHMkvk2uru7aWho4IcffmDt2rX09fWxc+dO5s6dS0dHB56e\nnowaNQp3d3cyMjJoamoiLi6OS5cuAW8HmHPnziUlJYUPPviAxYsXs27dOubNm4e/v7+Yh7x69Yo1\na9YglUqFQ9LW1lb8/cqCOxKJBEAwEerr6zEwMBBYterqampra0UV+vr16wwMDFBfX099fT2amppc\nuXKFS5cu4eTkRHNzM4cPHxZYQCsrK6ysrLh69Sr29vbcvHkTExMTtLW18fT0RCKRMH/+fIaGhti5\nc6dQw7m4uPDJJ5/Q1tbGlStXmDVrFrW1tQI24+npiby8PCdOnBDNWU1NTVEOk1G/jIyMUFRUFPOx\njIwMGhsbCQgIECdZJSUl/61f8z9df2emcBKY/v/6ua1A9NDQkAMQ/V8/RiKRuACLANf/+m9+l0gk\n8v/TH6CsrIySkhIJCQl4enoSFBRES0uLgKZs2LCB7u5uVq5cKXj8enp6gqLb29tLc3OzoD3LCL53\n795l4sSJTJ06lQcPHvw/7L1nWJXnurZ7Dnob9F6kSZGmFEF6U8GK3WlJbNFEY8qMK2ZmzhRNTFw6\nkxgT06wYYy9BsYMiRZqAgID0joBI73XsH87x7Lm+79h7ZX/7z5zHsZ5fw+E4pPi+z/vc931d54WH\nhwcxMTEcOHCADRs2MGXKFM6cOSNOHUZGRsyfP59NmzYhkUjEf66zszMNDQ3U1tYCiOMuwMcff4yq\nqiqrVq1i6dKlmJmZkZSURHZ2Nrt27eLMmTMEBwfj7OxMT08PMpmMwsJCWltbRdah3Kuho6PD8PAw\nt27doquri++++47AwECcnJwEr1EqlVJYWCg2oUePHhEQEMD4+LjAp2dmZgqYi5eXF0FBQRw8eJD6\n+npUVVUxNDSkoaGB2bNnCxvuggULmD9/PrNmzeLgwYMAjI2NsX79eiQSCY2NjTQ1NTF9+nTU1NSI\niIhg/fr1VFZWAi+x9729vSQlJVFcXIyvry/nz5+nsrKSs2fPUl9fT0BAAGNjY8TGxuLv7094eDgW\nFhaMjIwIonVQUBCRkZGipj558iRFRUWiyRsfH4+TkxOOjo6Ul5fT29vLpEmTmDZtmiA1p6enI5FI\nWLdunegDyR2G9vb2QuGqpaXF1KlTWbZsmbB/a2trk5qaSkVFBW5ubigpKeHj4yNOBEpKSuzatYuf\nfvoJQCQxffTRRyQkJGBlZcW5c+eIjY2lsrKS+fPnk5OTI/w369atY3BwECMjIzZu3EhtbS07duxg\n06ZNvP766zg4OAAvAbZ6enp4e3uzd+9eSkpKKC0tRU1NjUmTJjE+Pi6YG/IyyN7eHnNzcxQVFUlP\nT2fz5s0cOnRIlM5/dP23m4JMJksBOv6Xt2OAk/94fRJY9E/vn5PJZMMymawGqAR8/7uv0dPTw+PH\nj+np6WF8fJzm5mZ0dXUF5mvVqlW8+uqrpKenY25ujpeXFyEhIURGRgplopqamohVkzcb9+3bR0lJ\nibgIk5KS0NLS4tixY+Tk5HDmzBlmzZrF5s2b6evrIyMjg/b2dnGU7OnpITMzk46ODhITE0XO48TE\nBIsWLSIhIQEXFxfU1dW5desWr732GgUFBbz33nvU1NRQXV3N0qVLUVdXp7GxUdT0RkZGvHjxAldX\nVzGCg5dPM7liTr5ROTo6YmRkxOrVq5k+fboICZV7/vft20deXh4jIyMkJiYKxNeGDRvo6Ojg2bNn\nfPXVVyLT0NramsLCQl555RWcnJyE3FnuriwpKeHDDz9EUVGR0NBQdu3axcaNG8nOzhbwl6lTp5Kd\nnc2jR4+wtrYmMTGRrKws/uM//gM/Pz+srKxYtmyZUFgGBQWhp6fHzz//zPXr19HV1SU9PV0Y2GQy\nGRMTE8BLOnJ6ejqJiYlCd3D16lVmz57N0NAQNjY29PT0CA6nnZ0dmzdvpr6+noiICO7evUtgYKD4\neXp7e4GXLA55ienu7i70GnJqc35+Po8ePSIjIwNvb2+cnZ1JSEhgdHQUHR0d8bNLpVIqKipQU1MD\nEFoWec7p8ePHWbJkCV5eXoSGhnL69GkxIkxNTeXLL7/kxo0beHt7M2/ePBwcHDh+/DiRkZFcuHBB\nyKfr6uowNjYW7AstLS00NDTw9fVl+/btIvQmKyuL3Nxcent7qaur49KlSxQUFKCgoMDhw4dpbGxk\n1apVfP311//dbSjW/+n0wUQmkzX/43ULYPKP1xZAwz99rvEf7/2/ruHhYSIjI9m4caOgycybN4+8\nvDwRfZWVlUVcXBw3b94kPT2d5ORkcfxUU1Nj7dq1SKVS7t+/z+nTpwEoKCjAzMyMdevWYWNjg729\nPd988w1nz55lypQphISEiHn/7du3eeuttzh06BD79+8XcVx79+4VQa9yzp2LiwtdXV3Y2tqSnJws\nzE+bN29m/vz5IqympKSE999/n4yMDAoKCigvL0dVVVXYdJ2dndHT08PNzU18v76+vnzyySesWrWK\nsrIyXrx4wYULF0hLSxPcx8LCQp49eyY4EHIEuLa2NkpKSixfvpzDhw+TmJhITU0NAQEBbNmyBQMD\nA+zt7UXTra+vD2NjY+bMmcPw8DANDQ0i+szV1RVbW1tR49vY2PDaa68xadIkMa+fMWMGFRUVhIaG\noqqqytGjR6muriY1NZUffvgBqVSKpaUlly9fprW1FTc3NyYmJpg9ezYDAwOkpaWJRl5QUBAAcXFx\n/OlPf6K4uJijR4+iq6tLS0sLT58+xcDAgP7+fsLDwwkICBBy9Fu3buHr68v4+Dh79uyhvr4ea2tr\nYRYDuHDhAtHR0VRVVfHs2TMyMzNZsGABBw4cwMDAAGdnZyGbnzt3LsHBwUyaNImZM2cK9J+ZmRm2\ntrYMDg6KoJ1jx44hk8mYMWMGNjY2/Pjjj+J6LCkpwdvbG3t7e5KTkwkNDWXnzp2sX7+ekydPkpCQ\ngLGxMampqZibmwvtB8CmTZvo7+/nq6++YsOGDSQkJHDt2jXKysrYu3cvzc3NXLhwQcjOlZWV6e3t\nZdasWXh4ePDtt9/S1NREZGQkJ06cIDr6fz3s/z+v/98jSdlLnfT/Z5myRCLZIpFIciQSSc74+Dgp\nKSlMnjwZExMTtLW1uXDhAs7OzqipqbFixQrMzMyYP38+XV1duLu7s2fPHlRVVfnkk0/461//KkCc\nn3zyCZaWlsDLcZf8xj979qzwJTx8+JDx8XEWLlyIg4MD06ZNIzg4GCUlJfH0PHPmDK2trVRWVuLh\n4cG8efOYN28e8NLqfeHCBZFklJOTg6+vL/39/fz00080NzczNDQk+hM7d+5EX18fXV1durq6MDc3\nR1VVldLSUg4cOCDGWwYGBrS0tFBQUEB9fb0wcE2ePFk0jsLDw4mMjERJSYkLFy4QFBSEn58f3d3d\nuLi48OzZM4qLixkbG2PNmjVs3ryZR48e8cMPP3Dq1Cny8/PJzs7mvffeo7m5mf7+fjIzM7lz5w7b\ntm2jvb2dw4cPC1emqqoqnp6eVFdX8+DBA+7fv09CQgJdXV309PTg4uJCYWEh06ZNIzAwkNu3bwsm\nY2VlJa2trejp6VFQUICLiwvh4eECoqupqSk2RzlkxcHBgYcPHwqsnI+PD2vWrOHZs2cMDQ1hbW1N\nQ0MDZ86cYcmSJaxcuRIHBwdmzpyJt7c3w8PD+Pn50dzczNatW0XATkdHB9nZ2SJVW0FBAX19faRS\nKaWlpRw6dAhHR0cyMjJITEwkLi4Od3d34uPjSUpKwsnJifLycrq6uhgbG0NbWxt4aY5TUVERCsJz\n584xNDTE7t27kclkaGtrMzY2xqRJk8jMzGRoaEiEHP1znsWGDRuwsrIS8QQXL14UOLa0tDQOHTqE\njo4OKioqnDt3jpGREfbs2cPZs2eZN2+esIi3tbVRVFREZmYmtbW13L59Gy8vr/8ST/jfrf/TTaFV\nIpGY/ePmNgOe/+P9JsDqnz5n+Y/3/rclk8kOy2QyH5lM5mNmZkZxcTGdnZ0oKiri6emJsrIy5eXl\n9Pf3I5FISEhIoLGxkZqaGhwdHXn99dfx8PDg6NGj2Nvbk5eXx/j4uBANwcvkqZMnT6KoqMjatWtp\nampi+fLlrF69msbGRhISEvj++++5dOkSnZ2drFmzBjMzM1auXIlUKqWvr4/y8nLBI5Dn/CkoKBAY\nGIiOjg6amppoaWlRV1dHW1ubgIvI2YPnz5+noqJCzOIVFBR4/vy5iJ0LCAgQs2k5V1FOWlZXV8fG\nxoYXL17g4+ODr68vjx8/5syZMxgaGrJp0ybBJti0aRN9fX2YmJiIjMV9+/bR2tqKtrY2n376KTY2\nNpiamqKoqChqb3d3d+Bljsbw8DBWVlZoaGhQXV1NZmYmixYt4sMPP8TAwAA1NTWUlZVRUVFBW1ub\n0dFRnjx5wowZM5gyZYpIpPbz8yMwMJCOjg66u7uJjIxkfHyclpYWurq6KC8vp6amhuLiYrq6uoiO\njmbVqlUAIuZPngJ+7NgxZsyYwejoKK2trfzyyy/CDVheXk5eXp4ok+SY86tXr1JbW0tzczP5+fkA\nTJ8+ndbWVkZHRwkNDcXCwgKpVMorr7wiiNt1dXVoaGigoKAgKEbGxsZERUXh6OiItbU1M2bM+C9I\nerl92cTEhEOHDuHr60tTUxNWVlakpaXx9ttvi+Tyd955h7KyMlHG+Pv78/nnn+Pi4kJVVRWDg4NC\nV2Fpacn06dMJDQ1FTU2Nv/zlL5SUlNDS0kJYWBg6OjrU1dXh7u6Og4MDhYWF7NixA3t7e1atWsWT\nJ08ICAhg9+7dNDQ08PHHH//hm/v/dFO4Bqz7x+t1wNV/ev9PEolEVSKR2AIOQPYf+QcXL15MT08P\ndXV1PH/+XIhc5FBPCwsLWltbCQsL48aNG9TV1WFrayti43t6ejAwMMDIyIiqqirgpSW7u7ubnJwc\njIyMaG5uJjAwkMuXL6Ovr4+/vz+dnZ1cuXIFJSUlzp07h4KCAoqKiiIcNS4uTlwc8maYnZ0dZmZm\nhIaGoqioyMjICCEhIcydO5eamhq8vLwoKCggKSmJhQsXConu3/72NxYuXMjUqVM5cuQIwcHB9Pb2\n0t3dDcDTp08pKCjA1NQUS0tL9PT0OHz4MDU1NYIsXFxcTEREBFpaWmzatAlPT0+uXLnChx9+iIOD\nA7Nnz2blypW4urqybt06Tp06RU9PDxs2bCA6OhoVFRX09PQoLS3Fz8+P/Px8nJyc0NTUpLq6mrNn\nz9Lb24uysrJIYnr27BkLFiygtbUVVVVV/P398fX15ddff2XFihWoq6uTbw5XtgAAIABJREFUn5/P\nrVu38Pb2FuBbiUTC8+fP0dPTQ1tbm5GREaZPny7k4q2trSxYsEDkdwA8evSI4eFhNm7ciIuLC1FR\nUeIpHB0dzezZs9m3bx/vvvsuqqqqQrFYWFhIZGSk6E2Mj4/j7e2No6Mj8LJB6ejoyOrVq6murubu\n3bs8fPiQnJwcTE1NKS4u5tGjR+jr67Nq1SrRuJ46dSqGhoYUFRXR1tZGXV0d/f39qKurA4imnoaG\nBo6OjiI1zM7OjsWLFxMWFkZubq4QQZ07dw4jIyOcnJzIy8vj5s2bQg7t5OREYmKiuCfKy8spKytD\nR0dHQGfl11xiYiLffPMNx48fJyMjg7CwMNauXcvkyZM5d+4ceXl5hIeHk5iYyMyZM5k1a9Yfvrn/\nyEjyLJABOEkkkkaJRLIJ+E9glkQiqQBm/uPPyGSyYuACUALcBt6UyWTj/93X6Ozs5PHjx6Snp6Os\nrEx6ejoNDQ2Ympoyd+5cvvzyS/r7+5kyZQqFhYUYGxvz3nvvcenSJTEblqcmDw8Pi7osIyODdevW\nkZqayoMHD/Dx8SE6OpoFCxbg7OxMbm4uc+fOxdHRkdHRUfz9/YWFdu3atSQnJxMUFPS/NWkyMzMp\nLCwU2K/h4WEuX75MVVUVUVFRNDU18ezZMyIjIxkeHubgwYM4OztjaGhIc3MzKSkppKamEh8fj7Ky\nshhDzZ07l+7ubgYHB2lra8PQ0FCAZE+cOMGTJ0+YNWsWNTU12NnZERgYyIMHDzh9+jTNzc309vYK\n621zczPXr19neHiY7OxszM3NOXXqlOAC1NTUoKqqytKlS8nPz2d8fBxVVVVee+01QkJCePHiBSoq\nKrx48YLS0lIePnxIbm4uCxYsoK6ujuHhYQICAlBWVubAgQOcPHmSjRs3CkrW+++/j4uLC8ePHxfR\n8XKfSWRkJAsWLCA0NJSioiJ0dXWFbVn+u5BrE1pbW1FTU6OiogJTU1MmJiYICwvj448/prKykvb2\ndn755RdCQkJE3OCjR49QU1Pj5MmTgiOwfv16YmNjBcfSwsKC8vJyzMzMRKiu/JQUFxeHkZERExMT\n5OXlCcp4VFQU2dnZrF+/HiMjIwCioqL461//yvbt28Voedq0aSKGICoqCm9vb1auXMkrr7xCdHQ0\n165d48mTJ4ITqqqqSktLC+fOnRNN546ODiH97unpYd++fcTExODi4iJ8FmvWrGHHjh309fWJNPY/\n//nPREdHY29vj6WlJZMmTSIlJUUkYP+R9UemD6tkMpmZTCZTlslkljKZ7JhMJmuXyWSRMpnMQSaT\nzZTJZB3/9PkvZDKZvUwmc5LJZLf+yDfR1dVFWVkZEomEyMhIrl69yqxZs0SYxqZNm7h48SJKSkpM\nnjyZ/Px8SkpK2LhxI3/729+4d+8epaWlNDY2UldXJ2p0f39/nj9/zvr16xkZGWF4eFjQiOUpwCUl\nJairq6OoqMjo6CjDw8Ns27aN7u5uFi9ejKmpKcbGxjx69Iji4mLg5dFu27ZtItlnxowZaGhoiCnK\nvHnzMDc3x8bGhvr6ejQ0NMjMzBT0nzlz5oiocXl+AbxsNFpYWKCpqUleXh5NTU0sXryY/Px8oWA7\ne/YsGRkZzJkzRzjnTp48SUBAAObm5hgaGorjaXh4OBERERgYGODm5oaKigojIyNERUURHR3Npk2b\nuHv3Lq+88gpGRkbo6+uLv9+yZQuzZs0SJxljY2O6urqoqqqisrKSI0eO8ODBA6qqqoiOjmbevHkc\nOXKEzs5OfHx8SE9Pp7a2ll27drFkyRLKy8vJysriyJEj3L59m4GBAWQyGUNDQxgaGvLVV18BUFxc\nLFSrrq6uBAUFYWZmxp/+9Ceh9TcxMWHq1KnIZDLy8vLQ19fn/PnzKCgosGzZMo4dO0ZISAhmZmYi\nIerSpUsilcrKyorBwUEWLlwo/BKurq4sWrRIxAKMjo7S1taGnp4ePT09fPbZZwQEBGBsbMyxY8fE\ng0euXt29ezelpaVcvHiRI0eO8NVXXwnBmVQqpaCgQFCfxsbGyM/PZ8eOHaipqXHt2jUBUfnLX/4C\nIIjg8hs8JSWF3NxcDh06hLu7O/PmzaOsrAxHR0f09fVFWK8cMCsnhssfcOfOnfsjtyLwL6Jo/Prr\nr3fJk4j19PRwcnISKcHykZ+ysjJTp05FTU0NNzc3FBQUxBFYV1eXqKgotLS0cHR0xNvbm8OHDzN/\n/nyuX7+Ou7s7x44dY2xsjM7OTrS1tRkYGEBJSYmHDx8SGBhIcXExr732GhKJRIyx7ty5IyzHS5cu\npaGhgfT0dNzc3DAyMhJhtmNjY2zatImioiK0tbUJDw8nLy+PgYEBVFRUCA8PZ2hoSEhn1dTUMDY2\nFlSorKwsioqKcHFxwdzcHGNjYxQUFCguLiY0NBQzMzO6u7sF99/BwUEkCxsaGhIUFCTEOEeOHBEZ\nFMHBwWRmZmJra8vly5cxNDTE3t5eXOxvvvkmjo6OjI+Pk5WVhaqqqiAmJSYmcu/ePSZPnoyHhwcS\niYTVq1eTnp5OZGQkVlZWrFixQkBUFRUV+Y//+A8aGhrESUdbW5u+vj7Ri6ioqBAjShMTE3HUfv78\nOREREYIwpK2tLXJBe3p6hPCssrKSffv2cfjwYRwcHAS4dc6cOTQ2NrJkyRLy8/PJyckhIyMDMzMz\ncnNzuX37NkeOHGF0dBQPDw+SkpIwMDBAU1MTFRUVEeb67NkzsrOzUVNTIzc3lzlz5uDt7c358+c5\ncuQINTU1Is35888/p7e3lxkzZgi8vkwmE7/P6OhoLCwsSEtLE4BWeT9lzZo12Nra8ujRI4FZa2pq\nwsvLi5qaGi5evIi7uzuOjo6UlJRQWVmJlZUVzc3NqKurU1hYSF9fnzjBLV68mIyMDJ49eyYYoc3N\nzXR2djI6OkphYSHbt28nNjb238clKYdXurm58eLFC4KDgwkICKC/v5/k5GS+/PJLXFxcqKmpwdLS\nklu3bvH48WMsLCxQUFCgv79f0Jzd3NxEAtOjR494++230dbWFsKlnJwcIiIiUFFRISsrC3Nzc8rK\nyjA2NiYpKQljY2PCw8Opr6+nu7tbWKhlMhk7duwAXiZPLVy4kODgYDw9PZkyZQo///wz/v7+TJ8+\nndOnT2NoaMiuXbtQUVHhm2++4dGjR/T19ZGXl4e2tjZZWVkiu0LeGV69erVIGmpqamLKlCmcO3eO\nK1euIJFIePDgAT///DNSqZT09HQRKgMIL8ikSZNITU1FTU1NKBanTJnCwoULBQlqYmKCoaEhamtr\nCQoKIiEhgb6+Pu7evUtjYyPR0dFYW1sTHh7O06dPycrKYmBggNOnTyORSDA0NGRwcJALFy4we/Zs\nenp6mDVrFr///jt9fX0MDQ3h4uLC8PAw7u7utLe309bWxqpVqwgKCmLGjBkUFRVRXl4uEqrkUNyr\nV6/S09NDc3Mz06ZNQ0VFBbkEfu/evQJl9vDhQ/7yl78IIpeBgQFHjx4lNzeXSZMmsX79ejIzM0Xj\nrry8XMz6R0ZGUFVVJTU1lZiYGLy8vMTYU47ru3r1KtbW1hQVFSGVSgXNq66ujsrKSt555x3gpSjK\nxsYGFxcXfH19CQsLw9PTk7y8PJSVlXFwcGDfvn1MmjRJiPHkzdKioiLq6urYvHkzjo6OfPHFF0yf\nPh14mcT+7Nkzrl27xvj4OBKJhPnz53Ps2DECAwNF81O+URsZGWFra4uzszM2NjZ4e3tTX1+Pj48P\nenp6oh/2R9a/xKYACJBIV1cXn332GYcOHcLMzIxvvvkGS0tLzp49S1paGnv37sXNzQ1HR0e2bt1K\neHg4LS0tfP311ygqKpKcnCxY/+Pj4xw9epTvv/+eP//5z3z99decPXuW9vZ2Dh48iEwmw9bWFolE\ngq2tLQoKCjx8+JDq6moCAwN55ZVX6OzsZO7cueTl5Qn89uPHj3nw4AHHjx9naGiI4eFhfH19sbe3\nJyEhgdLSUgEIuXPnDrNmzcLV1ZWcnBz+9re/0dDQQEREBO+++67YpODljW1qakpRURE2NjZkZWXh\n4+ODtbU1urq6KCoq8tFHH3Hv3j1MTU2F0CsgIACApUuX4u3tzdKlSzl9+jSKioqkpKSgpKRERkYG\na9asob29naSkJOEylfcW5LmScp29PLhVX1+frKwsQYieMWMGiYmJpKWlsWLFCnbs2CF6A0pKSkIR\nKBf7AEgkEqHJcHBwoLi4mPr6enEaHB8fF1Hply9fxtLSkgcPHjB37lyOHDnC5s2b2bdvH/Hx8Rw/\nfpynT58SExPDtWvXkEqlIih28eLF+Pr60tLSwoULF4iNjRW5oo2NjRgYGDA4OIixsTEaGhpMmTKF\ny5cv09/fT1paGmFhYejp6WFgYMCxY8fYvXs3dXV1+Pn5cffuXRwcHFi7di3Nzc3COu3n58fw8DDJ\nycm0trZy+/ZtUlNTcXZ2Fvh8+fXV0NCAn58f5ubmWFlZYWlpyY8//sj+/ftpaWnBzs5ORB76+voy\nOjrKjRs3iIqKYmhoiPz8fKZOncr4+DhPnz6ls7OT8vJy+vr6BMIwPT2dsrIyHjx4IHoinZ2dIp7g\nj6x/ifLh5MmTu+rr6/Hy8gJehrhoaWmJ2DQ57EJDQ4OnT5/y2muvUV1dTXBwMLGxsaxbt44ZM2aQ\nmZkp6te4uDg8PDzo7u4WYJKgoCASExPF2CwiIoKoqCh++uknbG1tuXfvHl5eXjQ1NTF37lx+//13\nYYppa2vj0qVLVFdXs3jxYhQUFPj+++959913GR0dpbOzU0wsurq66OvrY968eaKB1tHRQWhoKLGx\nsSxatAh3d3cuX77M2NgY6urq4njv5eWFuro6T58+JSQkBKlUSldXFwMDAygoKFBUVMSbb77JzZs3\nUVJSwtHRUdSsMTEx4rNubm7cuXOH77//nsOHDyOTybhx4wbj4+N4eXmhp6fHmTNnqKuro6WlhdbW\nVoyNjfH09KShoYGCggKePHlCQ0MDRUVFGBkZiamLubk5mzdv5rvvvkNDQ4OVK1eKQBwPDw+h5JRb\neQMDA6moqEAqlfLo0SNxxAbw9vZmdHSUpqYmbty4IdSGqqqqjI6Osn37diwtLYWj8p95m+bm5iQn\nJzM4OCgAp25ubnR2dvLFF1/Q29vLggULiI2NZcaMGSKV6/r165SVlWFra8srr7yCqqoqqqqqjI2N\n8ezZM8zMzIiLi8PJyUn4PeQl3a1bt4iJiaG7u5tTp06xdetWjI2NUVdXx8PDg97eXjw9PcW06v79\n++jr66OiooKBgQEBAQEsW7aMkpISEhMT+eqrrxgfHxf8kOrqakEINzY2prm5GQcHBxQVFfnkk0/Y\nuXMnycnJSKVS7O3tRVTh+fPnCQ0NZdmyZaSnpzNr1iysra3FeNXd3Z3jx4//+7gk//73v+/69NNP\n0dDQ4MiRI+zZs0fw++fOncvt27cxMjJiZGSE4OBgGhsbBRJs1apV/Pbbb+jp6REbG4uLiwvPnz/n\nxo0b+Pv7ExUVhbGxMbW1tYyMjKCvr89XX31FcXExH330EZcuXWLlypXExcVhZmZGdHQ0+fn5TJ48\nGTMzM/Ly8sST3d7eXvgpkpKSRCPu119/paWlRQhS5FZfAwMDIe6JiYmhoKAAqVRKSUkJUqmUlpYW\nysvLsbGxERhuV1dXkpKSUFBQYOHChWzfvh1XV1cmT54sUrM/+OAD4QXQ09NjfHwcGxsbFi5ciI6O\nDl5eXuTn5zNnzhzgJYhUW1sbNTU1hoaGUFZWFuYxV1dXXFxcUFFREfh7+fF6bGwMNzc3qqqq0NbW\nxtbWlu7ubtra2qipqWHLli309/eLC1cmk2FmZoa/v7/ISpRKpeTl5Qm6sZubG729vQL53tzcjIaG\nBhYWFpw6dYqoqChCQ0M5d+4c7e3tQiRkb29Pc3MzM2fO5N69e0RHR/Pw4UNUVVWJj4+nr68PdXV1\n3N3d8fX1xdbWliVLllBWVkZcXByffPIJ5eXlDA8Po6uri76+PsbGxgwMDHDy5EnBZ5CPxOXUI3V1\nddTU1MjLy2PlypV4enpy/PhxWlpaRPDMmjVruH//Pj4+PqSlpfHixQs8PDw4duwYZmZmrFq1it7e\nXtLS0qisrBRTJS0tLVRVVXn27Blqamr09PQwd+5cbt68SVVVlVChyglTX375JVOnTqWkpEQ4Na2s\nrBgeHmbdunVUV1fz66+/snXrVu7cuYOTkxPXr18XfbGMjIx/r01BVVUVfX198dTo6uoSgpHq6mpx\nwwAiqNPLy4vz588jk8k4ePAgx44dIysri19++YUXL14I8MqBAwewtrbGzs6O69evs3v3bhYvXszD\nhw/Jysri8uXLJCQkEBYWRnV1tRCV6Ovrs23bNrKyspg5cybd3d3ExcXx+eefU15ejru7u8jxmzx5\nMvb29vj4+KCuro6bmxtJSUm8ePECQ0NDQXXu6upifHyc3t5e1NTU0NDQwMfHhxMnTjBnzhza29tF\nHZ6fn4+WlhYKCgoi88De3l4Qiy0sLFBWVqapqQlDQ0OsrKwICAjgxIkTAMycOZPDhw8LSXJsbCwx\nMTHU1tairKyMqamp0H5YWFgwf/58amtrSUtLw9raGplMRnx8vHga9vf3i7DYvr4+Pv30U7FZV1RU\nEBYWRmNjIx0dHRQVFREaGsrjx48ZHBwULMekpCTMzc2JjY1l8uTJKCsrMzExITIOFixYwLVr13jt\ntdf4+uuvsbS0pLm5GX19ffLy8tDR0aGiooIVK1YglUpRVlZm0qRJuLq60t/fT2VlJVeuXBHeCjne\nTCqVIpFI0NfXR0FBQWR1tre34+zszEcffYS+vj6BgYFigiMvn1JSUkQz9/79+2zYsIGBgQHi4+NZ\nv349Hh4eIuS4qakJU1NTPvroI1avXo2rqytHjx7F0NAQPT09KioqqK6uJicnh/v37xMaGsqxY8fY\nuHGjcMHeu3ePt956S6RaFRYWsmHDBgoLC8nNzWX69OlcvXqVLVu2kJCQgJeXl1BQynmXlpaWgilp\nYGBATk4OT548+R+a8/+s/1n/s/7L+vehOdvb2/PLL7+Qn59Pd3c3c+bMobi4WOQZ9vX1CYDm06dP\nWbhwoQCEvPHGG8yaNYva2lokEglVVVV4eXnh5eXFe++9h5OTE76+vhw8eBAnJycBpTA0NCQ/P5/3\n3nuPn376SWQx6uvr09nZSWlpKba2tsyaNYs7d+5w+fJl/Pz8OHz4sAikOXfuHGvXrkVBQYGWlhZk\nMhlWVlbo6+tjZGREcnIy5eXltLS0sGPHDlH29Pb20traikwm48CBA2RkZODv78/MmTOZNGkSr7/+\nOvv27SMiIgInJye++OIL3n33XQoLCxkZGUEikdDZ2SnISmlpaUyePBktLS3U1NQYHx9HUVFR9Eds\nbW1JTU0VmQFyTJyKigoZGRksWbKEDz74QHgzTExMmJiYwNraGnNzczo7O4mLiyM0NBRzc3Nmz57N\n+Pg46enp9PX1ER4ezsmTJ2lubhaW9IsXL2JgYCByHWpqakTwzunTp5kzZw4jIyMMDAzg7e1Nc3Mz\nO3bs4NVXX0VPTw9zc3M++OAD3nnnHXR0dIiOjqakpISbN2+KzIvAwEBBSR4aGiI+Pp6//vWvbN26\nleXLl5OTk4NMJuPcuXN88803bNu2jbi4OGJjYwkLC8PLy4uqqirs7e2FazU9PZ2RkRFhKU9KSmLu\n3LnIZDKcnJy4du0apqamqKioEBMTw5IlS3jttdfw8PAgNTUVLS0t8vPzKS4uZseOHWhpaVFeXk5D\nQwNdXV1cv36dmTNnEhAQQFxcHBs3biQ9PV30ee7cucPOnTtJSUlBW1ubXbt2YWdnJxrQVlZW+Pn5\n0draip2dHaqqqiL0Nzg4mPXr13P69GmKi4uRSCTU1tYyMDAgCOF/ZP1LTB8GBweFmktXV5fffvsN\nZWVlNm7cyKRJk7C1tcXS0lK4/Pr7++ns7OS7775DXV2d3t5e4dmXSCRs27YNgFdeeYW+vj5xwQ0N\nDTF79mycnZ1pbGzE3d2drVu3EhQURGBgoIg6k8+rly5dyrJly4iOjmbnzp28+uqrAAIFr6ioSGtr\nK83NzSQlJdHY2EhBQQGJiYl8+umn6Onpoaenx/Lly2lqakJbW5uUlJT/MiL68ssvSUhIAF4qOwMC\nAoiPj2fKlCmoqKhw5coV/Pz8UFFREUnMK1asoL+/H6lUip+fH4aGhqirqzN9+nTy8vKYM2cOt2/f\nprGxUVjA/fz88PLyIiYmBl1dXVJSUnj48CEVFRXk5uaKsufNN99k2rRphIeHo6+vT0dHhxh/LVmy\nRABWPv/8c6RSKUFBQQwODrJjxw7S09NJSUnhxIkT1NfXU1RUxHvvvYeOjg4uLi5IpVKSk5Nxc3Mj\nNDSUrq4uXFxcuH37Nunp6cBLQ4+ysjImJibs3r0bS0tLrKysePjwITU1Nejq6pKYmEhTUxNXr17l\n6NGjDAwMUFpaipOTEw0NDezZs0dshnKLs9w0JKcpx8XFkZOTg7q6Ojk5OdjY2JCdnY2DgwOurq5c\nunQJJSUlent7hXiorKxMNFrlcuSwsDA0NTVpamrCyMhIOGHnz59Pfn4+Z86cwdLSktLSUnp7e1m4\ncCESiQQNDQ1sbGwoLi5GS0sLKysrkU4GL0vkq1ev8tlnn4kJz6uvvsqCBQswMDCgp6eH58+fU1VV\nhYODA6Ojo/T09LBgwQLu3r1LRUUF169fFxEDe/bs+cP347/EpiCRSJg6darQBISFhREWFkZtbS0d\nHR1kZWURERGBlZUVU6dOxcDAgOLiYtTV1YXSTs7Gz8rKEuaPiYkJTE1N+fjjj+nq6sLCwoLCwkKG\nh4cZGRnB0dGRpqYmJiYm6OjoYNmyZXh6eqKrq8vRo0fp6+vjz3/+M83NzTQ2NgrJ7OPHjwX05fHj\nx/j5+bFmzRoiIyNZuXIlMpmMN954AzU1NWQymRgvymGk8gBTV1dXgoODBRtAWVkZLS0tBgcHxVM/\nNDQUZ2dnfvjhB27cuCEaY/b29jg6OpKbmyti0Zuampg8eTLFxcXo6Ojg5OTEnTt38PT0JDk5mfPn\nz3PgwAGmTZuGjo4Oa9euJTQ0lPz8fIKCghgeHiY2NlZMKtra2jAwMOCHH37A0tKStLQ0RkZGqK2t\n5dVXXxUNyW+//Zbjx49z6tQpPD09RZ0+NjaGs7MziYmJDA0NCWy5m5sbV65cEapSU1NT9u7dC7yE\n1qipqdHY2Mj777/P5cuXyc7OxtbWVljBDxw4wMKFC1m8eDG7du0S8fb+/v40NDTwySef0NbWhr29\nPfb29gC8/fbbqKqqUl9fz7vvvsvRo0fR09MjNTWVtrY2MZHIzs4mNTWVNWvWCCBwamoq3t7ewvna\n1dWFn58f8HJSlpqaSlVVFWVlZVhbW9Pe3o6xsTEpKSl4e3vzww8/4Ovri52dHbW1tZiYmJCQkEB9\nfT0FBQUUFxeLzVmucNXU1BSKz2+++YaWlhbOnj0rRpNKSkrCpdre3k5qaiqlpaW8/fbb1NXVUVBQ\ngL29PRMTE+jp6fHll1/+4fvxX2JTGB8fJzY2FmNjY0HNLS4uRldXl/7+fvr7+/n666/FmCgnJ4cX\nL17w22+/kZqayr1790SG4fDwMHFxccBLi7OLiwvLly8nMTGRyMhIVq9ejampKS0tLcJUVFdXR0JC\nAosWLRIef2VlZYqKioiJieHkyZNoaGjw888/A+Dl5cXp06eZmJhAKpWioqLCjz/+KPIi/vSnP1FY\nWEhPTw+WlpaC+iRX6xkZGdHa2kpKSgpPnz4VHfbly5dz7tw5pFIpNTU1REZGYmhoSElJCYsWLWLh\nwoUcPXoUDQ0NIdhJTExkZGREjMGUlZUZGRnhs88+IzY2ljfffJPs7Gx0dHSEVbizsxNzc3NmzJgh\nTDynT59mcHAQT09P6uvrWbRoEcbGxjx9+pR169Yxc+ZM9PX16enpwd/fn19//RVfX18OHz6MjY2N\n8IPIG3/yBqOpqSlTp07Fz8+PiooKZDKZ2Fhyc3MBKCsro76+HnhJy6qqquLMmTNERESwfft2tm7d\nyo0bN+js7ERFRYUbN26wePFiwTK4fPkybm5u2NnZcerUKUHGKigoECPhDRs2kJWVhYuLCzdv3mR4\neJijR4/S1dWFmZkZCgoKuLm5YWNjg6KiIvb29kyePFmAdZ48ecKePXv49ttvOXPmjBBb3b59W6gT\n5fAYIyMjKioqxElx3bp1mJmZoaKiIrIapFIpkZGRTJ06FSsrK+Lj49m4cSNLliwBXlKt5Yi1+vp6\nfv/9d5YvXy5Ccr28vPD396ekpERkV5aUlNDa2sqbb76JoaEhOTk5PHr0iMuXLwuNzR9Z/xLTh2+/\n/XbXl19+iZ2dHUVFRYLYc/fuXczMzICXCi85CDU4OBhlZWWMjIzw9fWlqqoKOzs7xsbGMDQ0xM/P\nj9OnTxMeHo6xsTFOTk5kZWURFhbGkSNHUFNTE+nGDQ0NQheQmZmJpqYmRUVFYpfPzs4mIiKCFy9e\nCIjLW2+9RV9fH2VlZfT29lJZWYm5uTkNDQ3o6uqSk5ODkpIS/f39lJWVCZakjo4OQ0NDODk5icjz\n5cuXixzFLVu2sGLFCgwNDRkbGxMbVHZ2thhNjY2NiTTq5ORk1q1bJwxYZmZmrFmzhqysLGQymUjJ\nGh0dFZJsTU1NLl68KOLX/fz8KCsrExLvsrIyfHx8cHd3Jz09neHhYdzc3Dh27BizZ89mZGSEtLQ0\ngoODkUqleHh4COXh2NgY7e3tJCYmoqCggJeXFzY2NgwPDwMvswzkm+327ds5f/48Xl5eGBgYUFRU\nJNKtgoKC8PDwYM+ePaxZs0YkW7/xxhtMmjSJ+vp6Zs6cydDQEFFRUXR1dYnAYE9PT1paWvj000+5\ncuUK/f39FBcXMzg4KL5HIyMjioqK8PDwoKenh+nTp1NVVSX0ATqLteqtAAAgAElEQVQ6OqSnpwuN\nyPTp0ykvL2fevHlMmjSJoKAgLCwsRLpXZ2cnqqqqTJ8+XZwoa2tr2bp1Kx0dHdy5c0f0FeSO32nT\npqGkpMSDBw9QVlbG19cXGxsbLC0t+e6775g/fz7V1dVCcr1lyxaqq6s5evQo1tbW3Lp1C6lUyuDg\nIPv37xfl6rNnz8jJyWFsbEz03gYGBpg1axaHDh369xlJ/vjjj7vg/4ZgPn78mIyMDAwMDHjx4gVB\nQUGcPn0aFRUVgRi/c+cOysrKWFhYMDExga6uLhcvXmT9+vU8efKE+Ph4XF1dxZhN7ueXX+zKyso4\nOzvj4+Mjnmpy4ImRkRHLly8nPj6e+fPni1m1oaEh58+f58033yQuLo6oqCjBgHjjjTeoqanBxMSE\njIwMIVhRV1enrq5ONKakUimenp48f/4cExMTvv32W7y8vDh16hRLlizh6tWrFBcX09fXR0REBFu2\nbGHx4sW0trYK7oJEIkFBQYHo6GgRj37p0iWcnJw4ceIEUqmU8vJyUlNTRSnV3NyMgoKCqOkPHTrE\nzp072b59Oz/88INITx4aGhImse+//57g4GCGh4cJCQkRDIHR0VGGhoZoaGgQzVP511yyZAnz58+n\nu7ubnp4ePv/8c2bOnImLiwuBgYHY2Njg4OAgFIZpaWnY2dlhZ2fH77//LiLfTU1N+frrr/Hy8qKx\nsZF3332XU6dOUVlZyapVq/Dy8sLJyYmrV6/S29uLv78/UqmU7OxsLC0t+fnnn9m0aRN6enrcvHmT\nq1evoq+vT2xsLEuXLuXx48dYWlqiqalJY2Mjg4ODJCcnExYWJqC3SUlJrFixAjU1Nfr6+lBSUhLX\nwffff8+TJ0+E6e7jjz/mzp07LFy4kLa2Ntra2rCysuL58+csWLAAMzMzNDQ0RBmkpKTEsWPHWLp0\nKfb29hgbG5OYmEhSUhIZGRmivLKyssLd3Z2EhARUVVUZHh6ms7OTjz76SAQUOzg48OTJEz744AMM\nDAxISUlh3bp1qKiocPnyZTw9PRkdHf3D3od/iZGks7OzzM3NjXXr1gm6zOjoKLdv32bVqlV0d3eT\nkZEhCLuWlpYsXryYrKwsent7RYrSqlWrCA4OpqKigpkzZ5Kens79+/eZNGkSKioqQgm3cuVKwR78\n/fffCQkJEdBUeZOvtLSUwMBA4Xx866232LVrFydOnMDPz090cvv6+jA3N0dTUxMFBQXKy8uFOUse\n5yU/PTx8+FBkTG7cuBFNTU0mJiZob28nKCiIJ0+ecP36dZYuXcqPP/5IWFiYaJSlpqYKqbOamhq/\n/PILjo6OhIWFMTY2RkpKCiUlJbz66qsoKiri5uZGR0cHkydP5tKlS2zbto0LFy7Q2NjIlClTaGho\nwMfHB3t7e3777TdCQkL4/fffiYmJQUdHhxMnThASEsLw8DCZmZns2rULBQUFvvrqK0xMTARCbv/+\n/YyNjREWFkZwcDAZGRlIJBL8/PzE5jU6OkpzczMjIyPExMRw7949AXKRyWSsWrWKnJwcVq9ezY4d\nO6iqqiI8PJzc3Fy8vb15+PAh2tramJmZ4ezsTEVFBaWlpSxfvpw7d+6I0khTU5N58+bR2tpKamqq\nCIt58803aW1tJT09HTs7O77//nu2bt0qNtShoSHhg9m1axfLli3Dzs6O7u5uIiIiGB0d5bfffuOd\nd95hYGCAjo4OXFxcsLCw4ODBg4SHh5OWloaWlpYgfH/66adCXdve3k5fXx+pqanY29vj7e2Nvr6+\n4HsMDg4yNDREY2Oj6GMVFRWJaZpcaZuVlSWyJx48eIC7uzs+Pj7cv3+f1tZWnJ2dhQ3d39+frKws\n9PT0RH9qwYIFf2gk+S/RUxgcHGTBggXY2NgglUppamqio6ODzZs3Ex8fz+3bt9HS0uLVV19l0aJF\n9Pf3U15ezvPnz7G3t2f69Ons3r2b7u5urly5Irzura2tIkg1OzubdevWoaury8GDB4WwSB4nJvfU\nv//++2hoaLBs2TL09PTw8PCgpaWFkpISQSnavHkzWlpatLS0kJ6ezvj4OJaWloSEhFBbW0t/fz8Z\nGRlMTExQVFREcnIyv/32GzNnziQkJAQTExOqq6u5cOECw8PDwsB1/vx5FBUV2bJlC9nZ2QwNDdHU\n1ERqaiqAKCP6+vrw9vYWISuamposWbKExYsXU1hYSHV1Nc3NzWITXbNmDR0dHWhpaaGpqUl9fT06\nOjpkZmaipKSElpYWDQ0NqKmp/RfvxenTp3n+/DnOzs6iByKXoBcXF9Pb20tUVBSenp7o6ekxMjIi\nsO5VVVUkJSURFxfHhx9+SEREBJWVlXz22Wfk5eUxOjpKbW0tvr6+PH/+XBCtTUxMhE/A2NgYQ0ND\ngoOD0dTUxNXVlaqqKkZHR9HS0mLv3r1Mnz4dqVQqSskTJ04gk8l4+PAhtra2Ij5v//79qKmpkZ6e\nzpIlSzhz5gwtLS0kJiZSX1+PVCrFzs6Os2fPIpVKMTY2RklJiaNHj4oJxDfffENpaSkVFRWCeD1t\n2jTq6uqEIam1tZWKigra2tqIjY3FyckJGxsbIiIieOedd4Q35Oeff+bbb79lbGyMvLw8Ll26xNy5\nc4WBKycnR6DjADHq3rx5sxCpyWX1ckCNnZ0dKioqgkgWHh7Oixcv0NPTE9fYH1n/EuXDL7/8smvS\npEkMDQ1RUVEh7MzyEFBfX1+ePHmCu7s7vb29aGpq0tbWxoIFC0hKSuLcuXNYW1tjZWXFo0ePyM3N\nJTs7mylTplBbW0tVVRVLly5lypQpSKVSNm7ciI6ODq2trYSHh6OoqMjjx48FgVldXZ3Ozk6ysrJw\ncnKisrKSwMBArl27Rn5+Pm+88QY+Pj7o6+sLFkNubi7GxsZs2rSJ+Ph4TE1NsbW1FaYWf39/urq6\nMDQ0FJRiExMTXrx4gZWVFUePHmXt2rWkpaWxceNGLC0t6ejo4K9//StWVlZkZWUxNjaGubk5urq6\normVmJiItra2sNk6ODjg5ubGw4cPGR0dpaysjJ9++onGxkbS0tKIiopi5syZPHv2DAUFBeLj4zEy\nMqK2thYXFxdxjA8KCsLc3Jzr168TFhZGe3u7SM62tbWlsLAQNTU1FBQUsLGx4dq1a5iYmIhQ32PH\njtHZ2SmmGqamply7do233nqLiIgIbt68ibq6OjExMYSEhGBubs6TJ0/45JNPaGpqIiQkRJRFenp6\nuLq6oq6ujouLC9ra2kilUsHylKse5TmSbW1t5OXlYWZmRkNDA/Hx8dy8eVNsEL29vVy5coX8/Hz8\n/PxwcHAgKiqK27dvY2lpiaKiIv7+/ly/fh09PT3U1dXR0tIiICCAR48e0d7eTlNTE0+ePBHjcj8/\nPy5evEh4eDidnZ3CbXv//n0MDQ25f/8+tbW1BAQEMDQ0xMcff4xUKgVebvYxMTEMDQ1RVVVFcnIy\nnp6eqKur09HRwa1btxgZGaGwsJAZM2Zw69YtJiYm+O677/Dw8MDV1RUvLy8GBweprKxEV1eX8fFx\nGhoa0NTUpLi4GE1NTW7evPnvY50eGBgQT2Rra2u2bt0qwkoMDQ15+vQpIyMj4ki+cuVKQkNDuXfv\nHkZGRkRERAiijoODA5qamgAEBweTk5MjNoCWlhZSU1NJTU3F3d2dtWvX8p//+Z+UlZXx+PFjAgMD\nhQvNwsKCjRs3cvLkSaZMmUJ9fT2TJ08GXnIWhoeHSUlJEcKQ6dOno62tTV1dHc3NzZSUlJCZmcme\nPXvEnNjJyYnx8XFycnKYNm0ad+/exdHRkfPnzwMvPQrLly/n73//O2NjY+Tk5LBjxw66u7vx9PQU\nNXllZSUffvghOjo6mJmZUV1dzZ07d4iKimJ0dJQHDx4I6MyqVavYuXMnIyMj7Ny5E4CTJ09SW1uL\ntrY227ZtY3R0lBcvXjA4OMivv/6Knp4ezc3NIiI9PT2dxsZGAfGQNwcVFBQEBXnWrFloaGiQm5uL\ng4MDM2bMYNOmTdTX1+Pv709NTQ379u1jcHCQ06dP4+HhwYYNG/j000/ZsWOHIA7JHYa6urpMTEyw\nZMkSESCsr6/PhQsXePDgAaqqqsyePZvdu3fj6uqKt7c3iYmJQsPi4uLC7NmzxSTqyZMnbNy4kRkz\nZqCiooK3tzfe3t60tLSInpGXlxePHz/mxo0b3Lhxg/b2dgFJzc/Px8bGBg0NDfT19QXENyoqCiUl\nJYqKijA0NOTw4cOUlpaKa8HQ0JCOjg5ef/11nJ2dCQoKwsrKips3b9LX18fTp08ZGBjg7t27mJiY\n4Ov7MhFB3jyVB+LExMTw+eefExcXh7m5ORYWFvz2229MnjyZpqYm4uPjMTQ0ZGBggJaWFtavX49U\nKmX//v3Y2tqKackfWf8Sm4KioiLq6upoamqSkZHBjRs3MDIyEhyFGTNmCJPLrVu3ePbsGXv37hUl\nxaxZs9DS0uLkyZMChgoQEBDA9u3befz4MSoqKqK/0NPTw+3bt7l+/bo4gpqbm5Obm0tWVhYjIyP0\n9PSIKC8fHx88PDyEHsDNzY3MzEy0tLSIiIjAw8ODZcuWsX//fhITE9mwYQP29vYcOHCAefPmoaqq\nyjvvvIOqqipXrlxhyZIlnD17lrfffpubN2+K38PZs2fp6upCXV0dXV1d3N3dRZ6gRCIhJCSEgoIC\n3NzcMDU1FSPE0NBQAL744gvKysooKytjypQpDA4OUlNTg42NDR988AE//vgj3d3d+Pr6YmVlxb17\n93Bzc8PZ2ZnPP/8cOzs7MY4LCgqisbERPT093n//fQoLC3n77bdZvXo1AwMDLF++HH19fa5evYqG\nhoZAz1lZWYlcy99//52lS5fS2tqKkZERbW1tjI+PY2xsjKmpKYcPH6asrIyQkBBiYmIAmDNnDi9e\nvCAlJYW8vDz279+PtrY27e3tPHv2TMBR0tPTMTQ0JDw8nAMHDtDX14dEIkFdXZ2RkRHBh5DzEQwM\nDHByciIjIwMXFxcCAgIE0EdBQYFDhw6hqamJVCplzpw5aGpqMnPmTI4cOUJSUhLq6up0d3cTGBiI\ni4uL4B4oKCiQlpZGf3+/yCBxc3OjsLCQzMxMgXtLTk7GwcGBY8eOUV1dTX9/P/X19YSGhvLOO++I\nSIOBgQHg/6LuPaOyutq23eOmS++9I10EaYIogoIiwS6xlxg15rHExJpmS2KMJUaNvRs1sXcDqIAg\nIgrSpAnSkY4gSId7/+Bh7ucd+xv7db/f9yN7jeEfgjchWWuuOa/rvI6jj2p9//59tm/fzqtXr7h6\n9SqxsbHY29sTFhaGra2tEBNbW1szevRoUlJSCAwMxN/fn7Nnz/L+/XsWLFggzFYfev0jFgUlJSUc\nHR2FkXnEiBEAYviotraW7OxsDhw4wKpVq4iKimLmzJmcP3+e3377jfT0dIyNjTEyMsLHx4ekpCSg\nT72loaHBypUrxY1cU1MjiNDv37/n+PHjnDp1SvAAhw4dKo4M0IeJ72cR9Nt7/Pz8WLhwIQMHDiQ1\nNRVNTU1++ukn9u/fT3BwMO/evaO1tZWQkBCGDBnC48ePGTVqFI8fP0ZLS4vY2Fg8PDx4+PAhEydO\nFMiwkSNHUl9fz+TJkwW0Vk9Pjzlz5qClpcW5c+cwMDBATU0NNzc3uru7SU9PR1FRkdGjR2NjY0Ng\nYCAjRozA3t6eTz75hObmZsGZ2LNnD4MGDaK4uJjnz5/j6urK1q1bMTEx4a+//hK4ck9PTzo6OvDw\n8GDHjh28fv2aYcOGkZSUxK1bt4iJieHw4cOkpaXh7++PmZmZcHRKpVJyc3M5ffo0O3bs4MSJE5iZ\nmdHa2kpBQQFRUVGUlJSgpaWFoaGhQL1HR0cDsGjRIpSVlcnKykJZWZnU1FROnjwpDE0eHh74+Pgw\ne/Zsnjx5wooVKxg9erTwYTx79gwjIyNsbGyoqqoSOYMHDx5w6NAhli5dSmpqKpaWlgQHB5Oamsr5\n8+cxNjZGW1ubhIQEVFRU8Pf3F5wI6KtPnT59mqdPn7Jx40axuzt58iTh4eFEREQIU5eJiQmff/45\na9asQV9fn+7ubmEHHzp0KEpKSpiYmDBixAiB7n/w4AG5ublCT+Dj48P+/fsZOXIkjo6OWFlZiR1O\nfHw8jx49IiQkBFVVVerr6ykqKsLDw4M7d+5QUlJCcXExrq6uLFiwgNGjR2Ni8t/qV8T1j1gUuru7\nRYKuqKiI1tZWfvrpJxwdHTl27BjGxsbY2toKRp+qqir3799nzJgxuLu74+3tzfbt2wkICKCsrEyc\nHT/55BN2797NxYsXefPmDaqqqvT09BAdHY2lpSUODg6UlJTg6OiIsrIyzs7OlJeXc/ToUfHQGRgY\nCJ9lP5dPQ0OD06dPk56ejpmZGVeuXKGsrIykpCQuXbokCoKOjo7Y2try888/i+CSg4MDLi4uaGpq\nYmtrKwSq/Z/bH9lVVlYWsJJ+2o+pqSmenp7cv39fnOtdXFywsbERKbrz58+joaGBurq6aGvt27eP\nlpYW1q9fL37Xf1ej0dPTo7GxkZEjRxIXF8eUKVO4du0a169fJzExka1bt6Kjo0NHR4fwIPZr5nR1\ndYXktt+2/Ouvvwpc3O3btwkODiYjI4OKigpKSkpEHWXTpk34+fnR29vLb7/9Jo58ixYtwtLSEktL\nS7q7u5k5cybp6ek8ffoUPz8/ysrKyMrKErsSR0dHKisrxZyMoqIikZGReHh4sHDhQhGKqqysRE9P\nj99//13Yt2pqahg3bhxGRkZUV1ezb98+pk6dyosXL8jJyaGpqQltbW2OHz/OzJkzWb58Odra2ixZ\nsgRDQ0OgD6r66NEjjI2Nef78OVOmTOHx48f8+uuv7N27l4SEBAGzqays5PLly6irqwt+Y38rvbe3\nl/nz5ws1oYGBAREREcTFxVFRUYGysjIuLi68e/eOe/fu4e/vT3h4OFeuXEFeXp6SkhIsLS0ZPnw4\ndnZ2rF69WjA/9+3bJ7pYH3L9IxaFuro63NzcsLOzY926dbS1tWFqaoqSkhJfffUVR44cITo6mtra\nWs6dO8fu3bsZN24c06ZNw83NjcePH7Nq1SqKiopQUVEhPz8fgDlz5jBx4kThcKyvr0dHRwcfHx8x\nkrpy5UpevnzJ8ePHuX37NlFRURgaGoo4qbGxMdnZ2cyZM0dUeeXl5UVy0N7enoyMDKRSKdeuXcPD\nw0Oc50NCQjh06BAFBQVCStpPyyksLKS8vFyc/6Ev725iYoKioiIyMjK4urqKiPSUKVNECKempoYH\nDx7g5OSEu7s7v//+O1u3buXNmzdMnToVLS0t/v77b1xcXJBIJDg6OhIcHMxXX31FZGQkOjo6ADx+\n/Jiuri6hxWtqaqKzs1Oo2n18fDA2NsbNzY0TJ04QEhJCWVkZRUVFAkX/999/s2bNGvFwbtmyBXt7\newIDAwUOLTg4GAUFBfT19UVdZvz48QLa2g/UgT6ac38ydNmyZXh6erJr1y4mTpzI7du32bZtG4MH\nD+b9+/c0NzeL3YCuri4lJSXY2Niwd+9eVq1aRW5uLgYGffKyn376CRsbGz777DNOnjzJnDlz6O7u\nFq3f5uZmduzYwZ49e1BSUiIyMlL4M8+cOcPhw4e5du0aLS0tXLlyRdC9+pFwwcHBaGtro6ysTEBA\ngHgJKCkpYWNjg6WlJa2trYwaNYrXr19TVFREdXU1eXl5+Pr6YmlpSXR0tMii9NOqzczMCA8Px9PT\nk+zsbJ49e8amTZtEJ6e9vR0vLy80NDRYsmQJOTk5rF+/nuTkZF6+fMnly5exsrISBKoPuf4R3YeT\nJ09uHjJkCJmZmQQGBqKpqYmDg4Mo7nR1dfHll18ilUqpq6ujubmZ48eP4+Pjw5EjR+jp6aG4uJgV\nK1bQ3d2NnJwcf/zxB5aWllRXV4sWpqqqqiie9bP93N3d8fLyIi0tjba2NrS1tQkKCkJGRka0gerr\n6wXY4uzZs6ioqCCRSMR0nYKCAmFhYaLopqOjg5aWFjdu3GDevHns2bOHqVOnCp9FaWkpLi4uZGZm\noqKiwtKlS/n1118JCwtDIpGgoqLCJ598QldXF7q6uvT09DB//nyqqqqQkZHh4sWLLF68GE9PT54/\nf46VlRW2tra8ePECb29vfH19efjwIQYGBpibm4v8u7KyMvn5+SId2L8wNTY2oq2tjb+/P9ra2iQm\nJtLe3o6zs7O4Edva2jAyMkJbW5vk5GTU1NQYO3YseXl5tLW10dbWRmVlJXv27KGjowNra2siIiKY\nMGEC9fX1NDU10d7ejoqKCtbW1pw+fRptbW2cnJxwdXVFRkaGP//8E3d3d96+fcuYMWNQVlamvr4e\nc3NzkpOTKS0tJSAgAF1dXdTU1GhoaEBDQ4M3b97w9u1b9PT0xFu0v/WnqqpKUlISsrKyDB8+nJiY\nGLZv387u3bt59OgR1tbWYlEuKCjAzc1NSGv75cWvXr3iyy+/REtLi4qKCqqqqtiwYQM7duxgypQp\nJCYmijRhe3s7T548YejQofzwww9AH7o/LS2Nbdu20dDQgLu7Oz09PYwcOZKnT5+ipaVFa2srYWFh\nZGZmEhcXh76+Pra2trx+/VpwIvX09EhPT0deXp7Tp0+jqqqKuro6KioqmJqaEhgYiLe3N3Z2dkRE\nRCAnJ4dUKhVIvcePH39Q9+EfMTrd1dVFYmIibW1tPHz4EEVFRWJjY5k3bx7Pnz8XUIldu3Zx/fp1\n1NXV+fHHH9m2bRuzZ88WrL5+snD/G93IyAgtLS3k5OQYNWqUGKKSSCSkpKSgqalJZmamSI79Z37A\n2NiYJUuWoK2tTV1dHYWFhYJ2ZGZmRmhoKDt37mT8+PGUlpYSHR1Ne3s7vr6+uLm5oaysjJmZmYjy\nlpWV0dXVhYmJCSUlJSQkJPD06VPMzc3F5xYUFDB8+HCKi4s5fPgwr169QkdHh87OTk6ePMn06dO5\ndesW06dPp66ujvPnz2NraysgtLNmzSI2NpbExEScnZ3p7OxEU1OTU6dOMXDgQFGIW7hwIQsWLCAo\nKAhra2t0dHR4/vw5tbW1VFdXY2pqioWFBcXFxbi4uLBs2TJWrVqFoaEhKioqosvw+eefs3v3buzt\n7blw4QIKCgqoqKhgY2PDunXr2L17N1euXMHAwICVK1fi4+NDXl4e165dY+TIkejr6wvKUL/vISkp\nidWrV/Pw4UMKCwtxcnLCwsKC2NhYgoODuXHjBsXFxXh5eVFZWSnEu1FRUSgpKbFt2zYiIyNpbGxk\nyJAh+Pj4sHfvXpYsWUJaWhoFBQVYWVlx7do1/Pz8OHz4MA4ODujo6LBo0SKWLVtGR0cHlpaWnD17\nlqysLEaPHo2fn5+AzCooKLBixQqgr9bw0Ucfoa+vT1lZGfX19UJ0u23bNoyNjSktLcXLy4sLFy5Q\nU1ODp6cnZ8+e5fnz55iYmBAYGIiFhQV3797Fx8cH6Ksp9OME7ezs6O7u5uXLlyxevFiYyLq7uykr\nKyMnJ4epU6fy6NEjgfS3sbFBTU1NyJeLioo++Hn8RxwfFBUVqaysxMfHB2tra7S0tFBUVKSzs5PV\nq1dz4cIFLCwsGD9+PGVlZdTU1BAdHc3o0aOJiYlh/fr1mJubc/HiRdFWhL7KcL9MJSwsjICAAFRV\nVUlISMDFxYX3799z48YNurq6hDjl22+/ZdWqVejp6fHu3TtSUlK4cuUKrq6udHd3A5CWloaGhgaa\nmpqYm5uLScg1a9ZQUVEhnAc1NTXY2dkRGBhIdXU1DQ0N3L59GycnJ6qqqggPD+fhw4eiqGllZcW5\nc+dEeCsxMZHBgwejoKAg+H39b8Xo6GhGjRpFSkoKlpaWBAYGEhAQgKWlJerq6mhoaNDW1kZFRQVL\nly6lurpa5EAGDx7MunXrWLhwIRKJBC0tLebOnStkOHJyctja2lJZWcnNmzcJCQmhs7OTuro66uvr\ncXZ2Jjg4mNOnT1NSUsK3336LgoICdnZ2+Pv709bWxoEDB3j69Cna2tr09PSwa9cujh49yu3btyks\nLERfX5+kpCTmz59PeHi4mJLcv38/P/74IyUlJSxYsIBXr17R29vLjBkzaGhowMjIiKlTpzJhwgSs\nrKyE6n716tX09vZiZ2eHvLy82N73A2EfP37Mjh078PX1RUFBAVtbW4yMjLC0tKSzs5Np06YJ2/fM\nmTPx8/OjpKQEZ2dnduzYwYIFC4iJiRE6+P6agp2dHffv3yc7OxsdHR309fWRlZVFS0sLqVTKkydP\nMDMzQ0ZGRszo9Pb20tHRIahbO3bsICoqSlCboa/o3E8h+/zzz6moqBDTsk+fPhXFxdmzZ7Np0yac\nnJywsbEhMjKS5ORk6uvraW5uxtDQEE9PT9avX//Bz+M/4vhw+PDhzatWrcLGxobs7Gz+/vtvNmzY\ngI2NDWPHjkVLS4u6ujoWLlzIsWPHCAoKoqqqip6eHsLCwkS+wdHRkZycHIERCw0NRU5Ojo0bN/Lo\n0SPev38v4qJ5eXksWLAAU1NTFBQUGDVqFDExMYwYMQJLS0tUVFRIT08nIyMDNTU1PDw8UFBQ4Pjx\n42KGXl5enpiYGF6/fi2Yi0+fPqWmpgYNDQ2srKyoqKjAzs6O5uZm6uvrxc+KjY1lzpw5tLS00Nzc\nzK1btwgKCqKyspLAwEDk5OSQk5NjwIAB9Pb2MmzYMLq6ukhJSUFJSYmGhgbs7e0JCQkhIyMDe3t7\nHj16JKS05eXlWFtbExsbi66uLgMHDmTTpk3o6OiQn5+Pm5sb8fHxYiin3+2op6eHlpaWmN50dXXF\nwMCA169fU15ezsuXL0lKSkJZWZnbt2+LXVN0dDTOzs6sXbuWfrSenZ0dz549o7W1lalTp/LFF1/g\n7++PkpKSeMvfunWLjo4O/vzzT9LT00Un6sKFCwDCj5GRkYG5uTl5eXmkpKSIiPTjx48xMDBAU1OT\n8vJyVq5cybhx41BUVERXV5e6ujoBhNXX12fKlClUVlaiqlaeTCgAACAASURBVKoqose2trbcu3cP\nT09PpFIpVVVVZGRkMGjQILS1tQkNDcXV1RUVFRXx337cuHEcP36c6dOn09jYSGNjI9bW1uTk5JCb\nm8vYsWO5dOkSoaGhVFRUkJeXh7a2NjU1NUJNqKGhwfv371FUVKS2thYHBwc2bdokhtvU1dVZsWIF\nlpaWvHv3TqQSX716xaeffipQ/To6OsKAJisri4WFBS9evGDRokU8evRIvMTOnDnz/5/wUkdHB5WV\nlbx48YLBgwdja2srYKr9D5+pqSkXL17k6NGjQuo6bdo0jh07hq2tLX///Tfnz59n6NChoo3k7e0t\nos6urq7izWJqasrnn3/Oli1buHDhAvHx8Zw5c4a3b9+SnJwsYCn5+flie62ioiIKdO3t7Rw4cICH\nDx/y/v17WlpaxEMbEhKCrq6uAG/010a8vLwICQmhu7ubiooK5OXlyc3NZdiwYejq6gLw6NEjpk+f\nzqZNm0hOTmbjxo0YGxtjbGxMV1cXZWVlBAQE8OjRIywsLCgvL+fHH38kICAAW1tb7O3tefPmDU1N\nTVhaWgqKb2trKw8ePGDu3LkC8KKrq4uLiwvOzs7o6OgwePBgJk+ejFQqpampibCwMAYNGsTFixfF\nXISmpiY//vijQMtPmDCBhIQEoqOjCQ0NJScnh9u3bzN8+HBUVFR4/Pgxe/bsYdu2bRw/fpyFCxdi\nZGSEiooKbW1ttLS0kJGRQUFBARs3bgRAW1ubN2/eMGnSJM6dO4e1tTWfffYZAwYMQFtbG3Nzc+bM\nmYOPjw8rVqxg2LBhPHv2jJSUFKRSKRMmTGDQoEFUV1eLMWVA9PX/+usvGhsbhcthzJgxDBkyhJUr\nV7J582YKCgpQUVEhPDyc8ePHCynPuXPnePToEfLy8qirq9PQ0CdFu379OkuXLsXKyoq6ujoGDBiA\nkZER+vr6qKiocPr0abKzszE3N+fChQsCca+rqyusXl1dXfj7+6OjoyPuBT09Pdrb29m9ezdmZmZU\nV1czcuRI7O3tWbZsmfB89HMrOjo6yM/Pp7GxkcrKShwdHdm1axcDBw6kvLycp0+ffvDz+I9YFHp6\nevDx8WHJkiVkZ2fT3d3NjBkz0NHR4dq1a7x8+ZJdu3aJWHN6ejpv377l999/R1FRkfv379Pb24uf\nn5+IJgOcP39ezO/7+flRWlqKjY2NeOMFBgaioaHBkCFDaGhoYNCgQQwfPpyEhASOHj1KQEAAbm5u\njBkzhqtXr/L8+XMA/P39GT58uNCkT58+nebmZhITE/Hy8sLLy4tbt27R0NAgiMgNDQ3ExMTwxRdf\nUFZWhrOzMxYWFuKNCIgC0ciRI0VLrz/7Dn2KMjs7O8rLyzEzM0NFRQUPDw8UFRW5fPkyaWlpyMjI\nEBoayokTJ9DS0mL48OHIyMjg5OQkHlZHR0cWL17MtGnT0NPTw9TUFHt7exQVFXF0dGTGjBm8ffuW\niooKMS6+f/9+ysvL+fTTT7Gzs8PHx4f6+np27tyJn58fampqzJkzh0uXLnHv3j22bdvG8OHDuX37\nNt9++y2zZ88WanYvLy/Rtdm7dy9mZmYkJiYCCOK2VCrlhx9+IC4ujj/++IOFCxfy8uVLqqurRRtu\n48aNuLm58fXXXzNu3DiKiooIDAwUQaHm5mbOnTsHIOpQysrKFBQU8OzZMwwNDTlz5gw5OTnk5eVx\n9OhRnj59ysiRI3nz5g07duxg0aJFhISEYGtry8qVK8VusP94YmhoyKFDhzh79iyampoMHDiQ1tZW\nAQ62trYWCMCNGzdSU1NDSUkJKSkppKSkcO7cORHHj42NZf78Pm+zkZEROjo6eHt7ExkZyeTJk9m+\nfTvGxsYi2LVo0SKh5vPy8hILkZKSEhUVFSxatIjo6GhKSkoEguBDrn/EotDZ2YmSkhInT57E1tYW\nOzs7IiMjCQoKEj6AqVOn0tjYyLFjxxgzZgzDhw9HWVmZMWPG8OjRI/z9/XFwcMDHx4d9+/YBiEhu\nSEgIN2/eRFdXl8TERCZOnEhERASpqaksXbqUGzduUFNTQ2BgIGPGjOH9+/dcuXKF7OxsiouL6erq\noqmpSdQUkpKSsLKyws7ODk1NTc6cOYO+vr6QhsTHx/P111+Tk5ODiYkJbW1tvHjxgrFjx3L37l0U\nFBRwdnbm6NGjvHr1SrwlLSwshPMyODhYTHk+efKE+Ph4wsLCUFVVZePGjQQGBpKdnc3q1aupqamh\noaEBFxcXGhoaKCkpYd68eZSXl5OcnIyNjQ1OTk5cuXKF9vZ23NzcWLdunehz5+TkiFqOlZUVx44d\nY/Pmzdja2vLll1+irKzM+fPncXd3x87ODisrK+7du4etrS2zZs2itraWzMxMfvrpJ+7du4eGhgZf\nfPEFWVlZyMvL8/btW7S1tQUR6MqVK7S1tTFgwAAePHggJhyhz6e5c+dOmpqaBCWrP97er7pvaWkR\n49xFRUVIJBLu37/PiBEjKCkpITAwEC8vL5ycnFBWVgb6UHf9b84tW7awaNEiJk2axMyZM7G2tsbZ\n2ZnY2FgWL16MRCKhtraWVatW8csvv/D999+joaHBjRs38PLywtfXl99++w2AmpoaFBUV2bx5M7Gx\nsSgrK9PR0UFUVJRwVGhqapKamoqenp6gihcVFVFbW4utrS1v375FU1OT9PR00QmJjIxk2rRpvHr1\nitOnT9Pd3c3KlSs5duyYwMI1Njby559/cuzYMfH3vLy8RAfOzMyMJUuWMG3aNBHi+pDrH1NTCAoK\nEuOsZWVlAq/dr/oyMTHhwoULWFlZ0djYyP3794mPj6e+vp6goCD27NmDvLy8CNHExsYyceJESktL\naWtrw9fXl5SUFJEdKC8vZ9KkSfT09PDy5UvGjx9PXFwcdXV1vHnzBnd3dxwcHMSDZGNjwzfffMPb\nt29ZtWqV0Hn9/vvvjB49msbGRjIyMnBychIFRUdHR2xsbJCXl6erq4uoqCiBknN0dKS7u1s4Ee/e\nvYu9vT0DBgxgx44dQmbj5OSErKwsixcvRktLi/T0dKysrMTocv8btrq6mnHjxlFWVsbMmTOxsLBg\n+/btqKurIy8vz7179/jzzz+ZOnUq0dHRdHZ2YmhoSFpamphRaGpqIj4+npEjR+Lj48PNmzeJiIgg\nKiqKrVu3IpFIiIqKIigoCC8vLzFXoq6uTl1dHV1dXaxdu5a1a9dibm5OSkoKPT09hIaGMmfOHEaM\nGEFaWpoovPWbmaqqqnj9+jWpqanMnz8fX19fMRS2f/9+HB0dSU1Nxc3NDV9fX9ra2tiyZYugLQ0a\nNIi4uDj++usvPvnkE5qamkTGZMKECezevZuRI0cKDd7AgQOJjIwEYMiQIbi4uHD27FmcnZ1Fy7G9\nvZ3q6moxBammpkZrayvx8fEYGxvT2NhIeno6gwcPFsam3NxcJBIJUqmU8PBwsrKy+OKLL3ByciIw\nMJB9+/ZRWFhIXV0dERERQtk3ZMgQYmNjWbp0KW/evOHBgwe4uLjw6NEj7OzsGDJkCJ6enty4cYNJ\nkybx9ddfM3/+fOTk5Lh+/ToaGho0NjZy8OBBlJSUGDZsGEVFRbx8+RI7Ozt27tyJqakpV69e/T9T\nU5BIJCclEkmNRCJ5+R9f2yyRSCokEknav/+E/sc/+1oikRRIJJI8iUQy9kMWhYaGBp4+fcqaNWuw\ns7Nj0qRJfPrpp6SmppKamsqkSZN49uwZ1tbWtLS0oK+vz7p16xg/fjw3b94kLy+PcePGCcxYf4sv\nLi5OnGMPHTpEb28vVVVVuLq6MnHiRFJTU9HQ0MDV1ZXq6mo6OzuRl5fHxcWF8vJysRhUV1dz4MAB\nvvrqK6CvS2BpacmQIUNwcHAQAlxvb28KCgrIzs5GQUEBc3Nz1NXViYmJ4dy5c6K3bmRkRFRUFOrq\n6hQUFNDW1gb0JRoXLFjAtm3bqK+vB/qQciNHjuTWrVscPXpUzDScOXMGJycnLC0tmThxIrKysv/l\nRvvll1/YunUru3fvprGxETMzMwETbWxsREFBAQcHB0aNGsXLly/Jzs7m5cuXuLi48Ouvv1JRUcGk\nSZMIDw9n4sSJ3LlzBwsLC6KioigqKuLIkSOcOXOG/Px8mpubkZGRob6+nu3btxMZGSlqQ1ZWVrx/\n/57p06czbtw4Jk6cyKNHj5g9ezbPnj3jyy+/ZPDgwYJPUVNTg7m5OY8fP+by5cucOnWKkJAQiouL\nKS0tpb6+ntbWVnJycmhvb0dVVZXly5cDfYGfb7/9lo6ODhQUFIiPjxeR91GjRtHc3IylpaVYiJyc\nnMQodP9DFBQUxNu3b8WicuvWLeGw6OnpYcqUKbi4uIj6Uj9hS0FBgS+//JLAwEACAwNpbW3Fzc2N\nyMhI6urqiI6OZvv27bi5uTF37lzGjBlDV1cXK1asELmMAwcOoK+vD/TVl+bNm4e8vDzBwcF88803\nvHnzRlC2YmNjaWtrQ0dHR8xXjB49mq6uLp48ecKIESPw8PDg559/Zvv27Zw5c+ZDHkXgw44Pp4GQ\n/8XX90ilUrd//7kHIJFInIAZgPO//85BiUQi+9/9gH49VkdHB8eOHePVq1ckJCSQmprK1KlTiYiI\nQFdXl9DQUIYMGYKenh4VFRXo6OiQkJDA8OHDUVNTEwKP9+/fA33nyFu3bnH37l28vb0ZOXIkhYWF\n1NfXY2pqiru7Ow0NDVRXVzNkyBBMTU0FKvz69escP34cVVVV1NTU8Pb2FjfCgwcPCA8PJyUlBSsr\nK+bPn4+Ojg4aGhpoaGgwd+5cwsLCaG9v58GDBwJ6kZmZiYaGBl5eXnzyySe8ePGChQsXCo6gg4MD\nf/75JyNHjhRBrJqaGkpLS9HW1iYwMFAgxaZPn87t27cpKCggNjYWdXV1GhsbsbKyorW1lc2bN9Pe\n3s4XX3xBY2Mjhw4doqWlheLiYhYvXszJkycFoj04OBiJRIKxsTFWVlZMmDABY2NjCgsLyc/PF4ap\nqKgoDhw4QEREBN3d3WzdupXY2FhOnDiBmpoadnZ2fPfdd6LlGBsbS3l5uRjdffbsGStXriQ4OBgN\nDQ0ePnxIenq6mEmBvsi7VCrF2tqaoKAgrl+/jpqaGoGBgcjIyHDgwAH8/PzEXIGioiLv3r0TbIng\n4GBSUlJITU0VA0rQ94Lo6enh+fPnNDQ0kJCQwI0bNzhy5AirV69GX18fV1dXKisrxe6q//d2dXXl\n8OHDKCgokJubK+AlAKqqqvj5+YkcQEREBEeOHCEiIoJnz55x/fp11q9fz5gxYzh27Bg5OTk8efJE\n5Dpu3LjBvXv30NfXx8XFRWRs5s+fT0NDAzo6OmRnZ4vj0sqVKykuLhZx+d7eXvLy8vDz82Pjxo3c\nv3+fhoYG7ty5g42NDTNmzODAgQMsXbr0Ax71vuu/XRSkUmkc0PCBnzcR+EsqlXZIpdIioADw/u/+\nUkdHB1lZWRgaGrJy5UqMjIxobm5m6NChQs1lYGBAa2srLS0tfP3115SVldHU1MSOHTvE20VWVlbY\njaHP4tzf8vHw8ODx48fcvHmTt2/fEhMTg0QiobGxkRkzZiAjI8NHH31EZ2cnY8aMYfbs2QwcOBAF\nBQWampro6OgQuXQLCwsB0HBwcOCbb74hLS2N1tZWQkNDuXLlChKJhJycHHR0dNDU1KSgoIDly5dj\naWlJdnY2mZmZTJw4kRUrVoiiVXp6OnZ2dmJ2vp/j10/oiY6OFkWy0tJSdHV1Wbx4MRkZGejp6bFl\nyxaOHj2KmZkZEomEuro6AgMDRWzZ1dUVWVlZMjIyuHz5MmVlZQCcOnWK7OxsRo0axebNmykpKeHg\nwYM8fPiQ5uZm3r17R2lpKdOmTaOzs5Nx48aJc3M/zSg1NRUvLy9+/vlnVq9ejaKiIlu2bKGuro74\n+HiePHmChYUFTk5OmJubs379ej7++GPS0tIYO3asWBgnT55MfX09srKyYvsbExODt7c3oaGhhISE\nYG5uzjfffMOxY8d48eIFly9fFhq1/oi6m5sba9eu5dq1a/33MX/88Qfy8vJA35i6o6MjGzZswM7O\nTrSzMzIyqK2tZeXKlXR0dFBTU4OSkhJz586ltraWlJQUZGRkhDxXQ0NDGKb//vtv/P396enpYerU\nqeTl5SEnJ8f8+fO5dOkSVVVVgra9fv16GhoaWL58OcOGDaOqqkpwOwFxTC4oKODWrVtC3/fs2TPu\n3LlDRUUFra2tWFtbo6SkxJ49e/j5559ZunQprq6uxMbGUlJSwh9//CGEvR96/e8UGldIJJKMfx8v\ntP79NROg7D++p/zfX/t/vVpbW5k9e7bIhb948YKKigpkZWVxc3Ojp6dHDH30A1vLy8sZM2aM0JoP\nHDiQgoIC7t27J+jI/W+/pqYmLl26hIGBgTA4BwcHo6KiwpAhQ1BWVubVq1ds3rwZKysr5OXlycrK\nQiqV8vz5c4KCgnBxccHf3x+AFy9eoKqqira2NmlpaSxcuBBFRUXy8/O5ceMGenp6lJWVkZKSQk1N\nDUuWLAH63oIFBQU8fvyY7777jtraWkaMGEFdXR3Qd749ePAgYWFh5Ofn09nZKXLy169fZ8OGDRQU\nFNDa2kpFRQVtbW0cPHgQMzMzrKys2LdvH87Ozujr63P16lWSkpJQUFCgoaGBy5cv8+7dO5ydnUXn\npn/OZODAgbS1tfHHH3+IJNzWrVuZPHmyICj3j99GRUXx999/09bWxqtXr1BXV8fNzY3BgwejpaXF\nsGHDuHDhAgcPHhRquSNHjuDi4oK7uzt1dXViAamursbAwEDAaqGP7NzT0yN2UUOGDEFHR4cnT56Q\nmJhITk4OampqfP/997x7946srCx+/vln0SJMS0tjypQp2NjYcP78eTw8PAAEMLefpt0vi6mqqmLE\niBG8ePGCzMxMsUs4cOAAhYWFBAUFER4ejry8PKNGjWL8+PG4urqKgJy5uTkLFy5k7ty5IsOxfv16\nZGVlhRhXWVmZftzg1KlTGTp0KMrKygQFBREVFcW8efPIysoiKytLdLiGDh3KiBEjuHv3LgEBAeTn\n56Orq0tmZiYDBgwQOrpNmzZx8uRJ3r9/j66uLurq6pw6dYqvvvqKpKQkVFVV/wvR6UOu/+micAiw\nBtyASmD3/9cPkEgkSyQSSbJEIkl+9+4dDQ0NXLlyhbFjxzJw4EAxPOPk5ISmpqbAsp89e5bNmzcz\nbNgwAUfR0NCgqamJBQsWMHXqVIEOf/r0KZ2dnYSHh2NoaCgsxp2dnZiYmHDjxg1hDOofLuqHsQQH\nB1NRUYGJiQnXr18nODiYEydOAIi+cHNzM0VFRRQXFzN58mScnZ0JDQ1l9OjRlJaW8u2332JhYcGB\nAweQSCTcunWLvLw83N3dWbt2rdhmf/nllwDk5+dz8uRJrl27RmZmJu/evRMsBGtra6qrq4VCvaKi\nghcvXmBiYsLIkSMFErywsJDMzEwRbCosLKS3t5fCwkJyc3MZNGgQaWlpDBgwgL/++ovi4mJOnDjB\nDz/8gLy8PMrKymIhLCsr4/z58wQHB9PS0sKsWbNYs2YNMjIyrF69mv3793Pjxg327t3L2LFj+fXX\nX3F1dRVDVO7u7jx8+FDIfLW1tbl69SpHjhzh7t27hIWFUVBQgJeXF2vXrgVg4MCBGBgY4OjoyPv3\n73nz5g0xMTGUlpbi5uaGubk5165d47PPPuP8+fPY29sTEBDA69ev8fDw4IsvvuDdu3dIJBLROu3/\n3GnTphEdHU13dzffffedAPhmZmYyaNAgOjo6RAzeyckJJSUlzp8/T2trK/X19fT09HD//n0yMzP/\nH2/0PXv2YGBgIEblMzIyuHLlCrNmzcLf35+8vDzi4uKYOXMmkZGRlJaWMmrUKN69e0d3dzcJCQnC\nKg5w7do1Efk2NTXFyMiIly9fEh0dTXJyMp9//jmDBg3iypUrbNu2DW1tbTFEqKmpSV5eHgYGBmJS\n2MLC4oOfzf/RoiCVSqulUmmPVCrtBY7xfx8RKgCz//hW039/7X/1GUelUqmnVCr1NDExwdXVlY6O\nDhITE1FXVycvL4+rV68Kc5CbmxuA2Bk8ePCAo0ePkpqaSlBQkBC3FhUVMWDAAKCPvNS/tWpvb6em\npob6+npGjx7N06dPMTY2ZuDAgQwYMEBo3aurqwWf8F//+hdSqZTly5eLbD30eR/6gzHa2toMHjyY\nsrIy7t+/L7bmCgoKREZGYmxsjKenJ42NjSxevBhXV1caGxspLCxEXl6egIAAYYuSSqWUlZWhqKjI\np59+yv3795GXlxdpuZiYGMzNzTEyMmLJkiVs3LgRNTU17t27h4ODA7Kysvz666/o6OhQW1tLc3Mz\nd+/eJTU1laSkJN6+fcvZs2eBPtmIrKwsCxcuFFCO/u2ziYkJ3377LU+fPmXGjBmCOXj+/HmKi4tx\nc3PDzMyMf/3rXzg4OJCYmMj+/fvx9PRk9+7dHDx4kObmZtTV1XFxcSE3N5fMzEw+//xz3r59y6xZ\ns9DR0RFA17i4OME8lJWVpbKykiFDhmBpacm9e/fw9vZm9OjRrFmzhhkzZqCurs779+85ceIE3t7e\nLF++nNGjR1NWVoahoSHy8vJs3rxZ5BAAYmNjuXPnDqGhoSgrKzNp0iQCAwMpLy9n1qxZgtp17949\nsehqamqiq6tLfn4+Fy5cIDIykgEDBmBsbCyi6eHh4Vy+fBk9PT0sLS25ffs2NTU1eHl54eHhwatX\nrzA3Nxf3/aeffoqvr69oFfb29rJ3716UlJSQk5NjwYIFAMycORMrKyuePHmCgYEBNTU1hIeHC6BP\namoqy5YtIzMzk9LSUuzs7Dh06BBVVVXU1NQIyXJsbCxlZWVi9/wh1/9oUZBIJP+ZhJgM9HcmbgEz\nJBKJokQisQJsgWf/3ef1I8S8vb25ceMGdnZ2DBs2DCMjIx49eoSOjg7Hjx8Xvdnk5GRmzJjBl19+\nybhx4+ju7hYmIHNzczw9+4C1V69eFUMm/eGOV69ecffuXeLj4xk/fjympqbU1tby4MEDent7yc/P\nF+Si2NhYscW0sbHBzKxvvcvLy8Pc3JyysjJMTU25f/8+hoaGjB8/nkePHrFw4ULGjBkj8GZKSkrM\nnz+fX375herqahITE8nOzsbU1JS1a9eKrd3UqVORSqV0dXWJwSJ5eXnOnj3L8uXLhU/gxo0b3Llz\nR3gcp0yZgp6eHhcvXuTkyZNkZmYSHx/P27dv8fLywsLCgrFjx7Jq1SpmzZrF4sWLBb26f6FRV1fn\n448/ZtGiRfT29hIREYGxsTGBgYEUFxdjaWnJgwcP8Pb2pr6+nkWLFnH8+HH2798v0n6qqqq4u7uL\n4uGLFy/Q1tZmz549PHnyhPDwcH766Scxbeju7s748eMJCwsTbohdu3bR0tJCdnY20McVyM3NJS4u\njqCgIKH/KyoqYvr06Rw7dowJEybQ29vLmTNnyMvLo6mpiaCgIAYMGPBfGBj9A3P19fUEBAQgIyND\nT08P8fHxaGtr8+TJE5ycnAAIDAzk+fPnTJw4ka6uLpycnGhra8PPz48RI0YIZsfBgwfJz89n69at\nAgEwf/58Ll68yOzZszEwMKC8vJw3b95QX19PYmKiOCL/8MMPvHv3jujoaBYtWsTTp08Ft3Hr1q1k\nZ2dTX1/PsWPHmDZtGmlpaTQ3N5OVlcX58+dxcHBg/fr1NDU1MX/+fMLCwlBTU2P27Nk4Ojqirq5O\nVVWVsIp/6PUhLck/gUTAXiKRlEskkk+BHRKJJFMikWQAgcCXAFKpNAu4BGQDEcAyqVTa89/9jPb2\ndpSVlUlLSxNkoH5FWXNzM3JychgaGlJWVoaNjQ0yMjI8fPhQbPt6e3uF47Curo6oqCigL23W0dHB\nmzdvUFZWxsDAgMzMTKZNm8a8efMwMTGhtLSU6dOno6+vT3x8PE5OTnh7e6OgoMCzZ88EzTcvL08M\n18jKyhIQECByBa9fv6ahoYF9+/Zx9epVOjo6UFNTY8SIEeTk5FBQUIC6ujphYWFA39tCX1+f06dP\ns3XrVgECefPmDerq6mhqamJgYEBOTg5ycnLIysqKGLaCggIuLi78+OOP/wX0mZ+fz7p165gxY4ZA\nw+fm5uLk5ISOjg7t7e3ExMSQkZFBbGwsPj4+uLi44O3tLcZ4Y2JiRFuyXyLbLynNyMgQ/Ep7e3ss\nLCxYvnw5H3/8sejDv3nzBj09PZHyU1ZWJiYmhhMnTqCoqIiNjQ0hISF4enqyadMmoI+6debMGQ4d\nOgT0MTBCQ0Oprq7m0aNHAISEhCAvL4+hoSHDhw/H1dUVd3d3urq6RIjM1NQUExMTKisraWlpwdXV\nlSlTprB48WKgbwK1v8D57NkzEXpKSEjg9evXDB48GE9PT7y8vLh69SpTp04V7EV5eXlRh2pqamLX\nrl3CgXH//n08PT1ZtGgRHR0dNDY2kpubi56eHufPn+fq1asYGBjQ3t7O48ePKSwsFHzR2bNn4+7u\nLlqj3t7eIo27e/duXr9+DfRFns+ePUt0dDTnzp1j3rx5PH78mJKSEn788Ud6e3sFQSojIwNnZ2dc\nXV0Fd/LJkyeixf0h14d0H2ZKpVIjqVQqL5VKTaVS6QmpVDpXKpW6SKXSwVKpdIJUKq38j+//SSqV\n2kilUnupVPr3h/xLdHZ2oqenR0tLC8uWLSMuLo7BgwfT0dEhGHpycnIEBwcLFkJ2djYmJibk5uZy\n7do1/vzzT2RkZLhx44ao5nd0dDB9+nQUFRXJysri5cuXqKurExERQVlZmahax8TEYG9vT0tLC4MG\nDeLy5ctUVFTQ0tKCVColOjqaiIgIFi1aBPTFnDdu3MiaNWuora0lICCA5ORkJkyYwMaNG/nxxx85\ndeoUqqqqhISEoKioSEREBPHx8ZiamoqK9tdff015ebkokp06dYrTp0/T1dVFZmYmK1euxMPDg4CA\nAAYNGsSIESMwNjZm3rx5HD16lGvXrjF48GCCgoIIdwRvoAAAIABJREFUCAggLS2NnTt3cv36dQYN\nGoSqqio6OjoUFRURHx+Pr68vHh4ewiB16tQprl27hqOjIzt27EBBQYHGxkbU1dXJz8/HysqKhIQE\nuru7RcAnLS2N9vZ2YUnesmULMTExDB8+HAcHBxQUFGhvbyc0NBQlJSVaW1tJTEwkMjKStWvX4uTk\nRFxcHPfv32ffvn00NTXx0UcfCRBqTk4ObW1t5OXlISMjg0Qi4fr167S2tpKVlSW6ESdOnCAmJkaQ\nnTds2CA8oNXV1SgqKvLRRx+Js/+6deuorKwkJSUFX19flJSUyM/Px8bGBg0NDfT19Xn9+jUODg6i\nZdsv5k1OTsbW1paUlBTk5OTo7OwUZ3R7e3tUVVWxt7cXTpKrV6/y4sULGhsbGTx4MOrq6vT29hIa\nGsrkyZMpLi4mODgYeXl5IiMjmTRpEqtWrWL69OlcuXIFgNu3b+Ph4SGCU/24uDlz5qCqqsrXX3+N\ntbU1CxYsQFdXV1C++rMk/fMW/Yv/w4cPP3BJ+IfEnI2MjPj111/ZsGED3333HR999BENDQ2YmJgg\nIyPD8ePHGTp0KC0tLRgaGnLz5k1mzJgh/seZmpoyZMgQHjx4wOLFi3FwcAD63kL9fMT+c20/31BP\nT4/U1FTU1dWpqakhKyuL1tZW7t27x2effYapqSkeHh5UVFQwevRoNm3axDfffAP0dUu8vLzQ09Oj\nrq4OAwMDgV+7dOmScFcmJyeLgRVdXV2WLFlCRkYGVVVVhIWFcefOHV6/fo2lpSXQF1QaO3Ysz58/\np66uTgBcBg8ezObNm8WitnTpUrq7uxkwYAC7d++muLiYzMxMFi9ezKRJk0SNZNKkSbx9+xZLS0v8\n/f1ZsGABa9aswcfHh4ULFxIQEMCwYcOIiIgQ9Rhra2uBiZNKpXz//fcEBQURExPDhg0bqKuro7S0\nlMOHDyMjI8P333/PnDlzUFNTIzIykvv37/PJJ5+wd+9eHjx4QHNzs4CTGhkZERcXx5MnTygqKhIp\n0IKCAtLS0sT9YGhoiL+/PyUlJQwdOpSwsDAsLS2ZP38++/fvZ8WKFezYsQNnZ2ecnJyEaDglJYWE\nhAQhwTE1NRUx55s3b4r7orq6Gk1NTRQUFMjMzGT27NmcP3+ejz76iA0bNpCWloasrCxXrlwhMjIS\nWVlZYb3q33X2S4EdHR3Zt28fBgYG2NnZYW1tzfz583F0dKSjo4OKigoSExOZM2eOYE7q6Ohw48YN\nDh48iL+/PyYmJiQlJbF48WI++eQToA8fV1BQQGBgID4+Pty7d4/09HRaW1s5duwY2dnZTJs2jYaG\nBlJTU1FSUsLAwAAVFRUSEhIIDQ3l3LlznDhxAnNzcwF8+ZDrH7EotLa2cu7cOUpKStiwYQM1NTV0\ndXX9F1BGXV0d5eXlxMXF8emnnxIbG4u+vj7BwcF0dnZy9epVtLW16ejoYMaMGUBf2mzs2LGUlpYy\nZ84cPvroI/z8/DA2NsbLy4sJEyawcuVKpk+fTmFhIR4eHoSHh6OpqSlU6levXkVPT4+MjAxRU1BV\nVaWrq4vJkycLaEq/Nbu+vp5x48YJSOqqVauws7Pj7du3GBgYEBQURFpaGjo6Onh5eTFmzBhRBJo0\naRJaWlp4eHjg6+tLVlaWaEO+e/eOwsJC4uLisLe3JycnBw0NDYKCgnBwcCA3N5cLFy6gr69PWFgY\nSUlJnD17luzsbHJycsjOzmbPnj2sXLmS2tpaAgMDaWtrE5FjPz8/AfaIjIxk5MiR1NTUcOnSJcrK\nypg2bRqHDh0iPz8fiURCaGgon376KXfv3uXzzz/n2bNn6OnpYW5ujqKiIi4uLqirq+Pr60tLSwtG\nRkb89ttvtLW18e2333Lr1i0sLS1FIbW/+1BaWkpmZibnzp0jNDSUlJQUSkpKcHJyQkZGhjlz5jBv\n3jySk5N59eoVAwYMwMXFBQUFBaKiohg8eDDm5ubIy8vj6+tLQkICAOXl5ZSXl1NZWSmGjaZMmYKR\nkRHffPMNmZmZREZGsmDBAkGCnj17Nvn5+YSHh3P16lU8PT3FvVlYWAiAjY0NZ86cITY2loULFwoW\nRmVlJcuWLcPQ0BATExOh0Tty5AjNzc24uLigq6uLnZ0dW7ZsoaKigq+//lrUQCZMmCCUeElJScyc\nOZMFCxagqqqKRCIhOTmZf/3rX/zwww9oamry8ccfIy8vT3Z2NiEhIZSUlCCVSlmwYAG//PKLCId9\nyPWPWBRkZGQ4duwYsbGxvH37lgcPHqCjoyO2pP2Q035E2qVLlxg4cKA4Ow8aNIixY8eirq7Ozp07\nxao4duxYWlpaqKioID09XUxYqqmpsW7dOnp7e9m0aROpqanCCrR//3527tyJmpoaBgYGmJmZ0dTU\nJPTsAPfu3SM/Px95eXmePn1KYmIira2tdHZ2cuXKFWpra7l48SJnzpxh0aJFtLW1MWXKFH7//XeC\ngoLw9fXl5s2bnDlzRrx5oS/unZOTQ1VVFbm5uSgoKHD48GEyMzOZO3euyAP0Oy66uro4cuQIt2/f\nxtvbGxMTE7KysigoKGDs2LE4OTnx8uVL4Rl48+aNgL0GBQVhZGREYmIijY2NVFVVoaioiFQqxcDA\ngPz8fFxcXAQev66uThiSVVVVef36NY8fP2bz5s1YWFjg7+9PRUWFYDXW1tZSVVXF+/fv8fT05PXr\n1/z9998iqTlmzBgGDhxIUFAQEolE7JYCAwMFWaisrIzRo0djZWXFw4cPCQ8Px8XFhYSEBJKTk+ns\n7CQ+Pp78/Hza2trYuXMnO3bsEHCcjo4OcTRzdXUVrdl+jVr/79rd3Y2ZmRl2dnZkZGQwe/ZsCgoK\nUFVVxd/fn99//52//vqL+vp61q5dS3d3t9iB7Nmzh9u3b/PFF19gZ2fH6NGjBWKuo6ODYcOGCeK1\nRCJh3759zJgxA2NjY7777juKi4uZO3cu169fF/Mg0Fd8379/P+3t7VhYWPDLL7+wb98+0tPTGTdu\nHAoKCsTExLBkyRJkZGSYNWsWK1asENi2qVOncujQIeTk5Fi+fDm///77hz+P/0ee6v/Nq7u7G3Nz\nc2RkZKitrcXLy4vs7GzBT+g/J+rp6eHn58fixYvJzs5GWVmZly9fcv78eSEd8fb2FtOMtbW1Ygzb\n0dGR5uZm0tLSaGhooLi4WFTd+wGnzs7OeHl54ejoyLNnzwgJCaGnp4crV66gq6srdiA1NTV4e3vT\n1NSEu7u7UL99//333Llzh6amJr766it8fX2pqKhg//79rF69mpaWFmJjY6moqKCzsxN1dXWqq6uF\nAGTcuHHY2tri4OCAhYUFDQ0NxMfHI5VKuXnzJk+ePKGxsRFlZWVKSkpQUVFh0aJFdHd38+zZMwF8\nKSsrw8nJCQUFBR4/fkx7ezuJiYnk5ubi5ubG8+fP+eqrr3j//r3YTc2bN4+oqCjq6upoaGigsbER\nRUVF7O3t8fPzY+DAgcjKyjJjxgw0NTUZMGAAycnJVFdXU1hYiIODg4CRampq4u7uTkZGBnJycty+\nfZukpCQUFRVZtmwZR48eRVFREQ0NDfLz87l9+7aYOnzy5Ak+Pj6kpaWJhdXMzAw1NTX8/f3ZtWsX\nFy9eFN2euro6fH19mThxIv2t7c7OTsrLyzlx4oTwM/TPU7i6uvLq1Svu37/PmTNnsLS0xM/PDzc3\nN4YMGYK/vz+FhYWsXLkST09Pjh07hoeHBxMmTKCqqoq9e/fS3t4u7oXs7GyMjIzIysri+vXrpKSk\nsHTpUsrLy+nt7eXPP/8kNTWVnp4ecnNziY6OZt++fYKEXVlZSUJCAsuXLxc4t/577O7du2RnZxMX\nF8fr16/ZuHEjc+fO5cGDBxgZGXHmzBkcHByQSqXo6OgwdOhQIiIi0NHRIT4+nosXL3Lt2jVmzZpF\nUFDQBz+P/4hFod8wZGRkRENDg5j0A+jt7aWnp4dz587R2dkp7LtycnIkJSWRnJzM/8Xee4VVdbbh\n1mPROyy69CYoSEcRAQER7CD2EntLYiyJ0SQm0XwxJsaYaCzRmGiwRVGxYMGCiB0LKL0KgvTeFr3s\nA/d69/6Pds52vv/a8yTlSi5XInOu+T7PfY9hbm6Or68vSUlJTJ8+nYCAAACKi4sxMzNjx44dvHnz\nhjFjxmBhYSEGSPL1ZlVVFV5eXrx48YJZs2YREBAgZhTV1dUsWbKEgYEBwsLCAAgICODTTz+lt7cX\nAwMDIQ9ZtGgR7733HpWVlXR2dpKbm0t1dTXh4eGMGjWKIUOGoKCgQGRkJA4ODigpKaGsrCy+HQ4c\nOICrqyvx8fEcP36clJQUUd6Rw14dHBxwcnJi6dKlSKVSYmNjGT9+PEuWLBEUpMDAQOLj4xk6dCjb\ntm1j6dKlbNq0iebmZsG8XLFihXi13rNnD8uWLWPQoEF4e3vT399PbGwsb968Ye/evTx8+JD79+9T\nWVnJl19+ib+/P19++SUlJSXcvn2bdevWsXfvXr7//nuGDx8uzNI1NTXIZDJRAEtNTWX9+vVIpVLe\nvn2Ln58fRUVFLF26VLzeDho0iIcPH6Kvr8/atWsFPUjedbG1teXgwYNCiZeTk0NWVhY5OTliOPzy\n5Uusra3x8PDg/v37AIJ+XVtbKz77kiVLsLa2ZsiQIfj6+pKamsrjx4+pqqrCxsaG/Px8MZfJyclB\nKpUKyIp8ZRoQEEBRURE+Pj7k5uYKvZyLiwuXL1+mu7sbCwsLDA0NSUhIYNasWXh6evLJJ5+wcOFC\nLC0t+eKLL4TIODo6GkDo6j766CPU1NT4888/UVZW5vXr1wQHB3P37l3evn2LoqIiKioqXL58mUmT\nJmFhYUF9fT1dXV0CIZ+Wlibecv/J9a+oTu/evfsbeXSzoKBAmInevHlDRUUFTk5OlJeXY2pqipeX\nF52dnSgpKTF8+HBMTU2JjIzk77//pr6+Hm1tbbq7uzl58qSAdi5atAgHBwdu3LjBsGHDuH79Op9+\n+ikpKSnk5+fT2NhIR0cH3d3dpKWl0dfXh76+vmAcxsTEYGJiQnV1NXFxcaxbt46QkBDS0tJ47733\nhBVJVVWVw4cP4+vry4ULF7CyshIqOLnHobm5GQ0NDbS1tYmNjcXX15f+/n7OnDlDVFQUra2tlJeX\nY2ZmJqxLbW1tYm36+PFjvL29BRhk1KhRqKqqcvr0afT09Pj222/p6OgQvoWenh5aWlqwt7cnMDCQ\n/v5+nj9/zsyZM3n58iXu7u74+/ujp6fHBx98wF9//UVbWxtBQUEoKCiwYMEC1NXVBclIU1OTN2/e\nIJVKqampwdjYmJycHL7++mvy8vJITExk7dq1TJw4ERsbG1atWkVAQAAjR47E39+fnJwcQkNDRbry\n1q1b9Pb24unpSXx8PN9++y3W1tbo6uqirq7O8+fPSUtLw8nJCUtLS4YPH056ejoXLlxg2bJleHh4\n0NbWJs7i6urqfPPNN0I519TUxIULF9DU1MTGxoaGhgYBcTE3NxfQFJlMhqenp6AlHz58GC8vL+Dd\n4FOOYu/q6sLR0ZH9+/fz9u1bZs+ejUwmo6OjA2dnZ1JSUoiPj+fPP/9k4sSJYjhYVlbG7NmzOXv2\nLKWlpSxZsgQjIyOePn3K3bt30dLSwsPDA1dXV65cucLs2bNRU1MjJyeHadOmERQUREFBAZGRkTQ0\nNDBq1ChevnxJf38/lpaWdHV1UVtby3fffcfDhw9RVlamoqJCiGCMjY05d+7cfw+OTV1dnc2bN4sz\nrbxkIoeRwru5Q1tbG4cOHcLNzY379+9z8uRJSktLyc7ORkFBAW1tbWQymVgXGRkZMXLkSOFYzMnJ\n4fbt23h5eZGVlcWTJ09QVFTk448/ZurUqUyZMoVBgwahp6fHrl278PDwYOLEiZiZmeHs7Cy6D/C/\n1qjnz59n8eLFoqMxd+5cysrK0NPTw8bGhtLSUpKSkqiurqa1tZVBgwYRExODubk5AQEBPH/+XFiB\nysvLUVdXZ/bs2cKDoKenR2RkJLW1tVy8eBELCwv++usvbt68yZQpU2hubkZXV5ehQ4eKNV9tbS3j\nx49HJpPh4OCAkZERWVlZYsI/efJkbty4wcyZMykuLqaiooJ169bx1Vdf4e3tja+vL4aGhhw6dIja\n2lquXr2KmpoaLS0tGBoa4uPjw6BBg5gyZQqzZ89mxowZuLq6oqGhwfbt2zlx4gR//fUXycnJ7N69\nGz09PczMzDh27BiKiopYWVmxYcMGbGxsCA0NZcWKFUKX1tDQgKmpKVZWVsIKHhYWxvz58xk0aBAv\nXrygsLCQHTt2kJycTEdHBxUVFcybN4/GxkZKS0s5cuQISUlJnDhxgvT0dAD8/Pzo6+tj/fr1pKSk\n4OHhwZkzZ4TZurOzk19++YXg4GA6OjpwdXUlJydHaPuGDRtGfX29gKKOGTMGeLd9iI2N5d69e0J3\np6Ojw6JFi0SyMTw8HBcXF1RUVBg7dizV1dWcPXtWaPsCAwP54YcfOHXqlEhKPnjwgLq6OsrLy2lp\naSEnJ4e2tjZ++uknnj17hqKiImZmZlhYWHDkyBFqamoYN24c9+7dE7h3ecS5oqJChMP+yfWveFM4\nePDgNydOnMDZ2ZmioiKGDx8uaD1y3tzp06dRUVEhODiYuLg4WlpaMDAwEOKLvr4+xo4dK2xTcuX4\nsGHDuHDhAnl5eTg5OaGioiK4dWFhYWKecO/ePXJzc6mrq0NBQYGgoCCysrKE9vzWrVvCV/Dxxx+T\nmppKb28vubm5fPjhh2zcuFG4Cq5evYqJiYlgAGZnZ5OamiqgIHJdeWRkJAMDA1RWVgpeQl1dHTKZ\njBEjRlBVVSXSjlpaWjQ0NLBixQqKiopYvHgxSUlJVFRUcO/ePXJycli1ahVnzpzB3t4ef39/QkJC\nqKmpISkpSRxdmpubCQkJEWDclpYWmpubUVVVpb29HQsLCzo6OnByckJXV5dXr17h5eUl1sMmJia8\nfPmS3t5e4bZQUFAgNTWV5uZmVq9eTUREBHl5eaiqquLh4SGYEW1tbQwZMkRQsLZt2yZWsHKAi5w+\ntXnzZtGctbe3Z2BggL6+PpydncUbnXxLs2vXLgYPHkxPT4/4/K2trXzwwQf09/dz6tQphg8fTlRU\nFK9fv8bZ2ZmsrCxR0y4pKRHp0NOnTzNy5EgmT55McXExY8aMoaysDGNjY+rr65FKpUIsnJmZSVpa\nGp988gnd3d3U1taKh5O5uTktLS2iKJWZmcn69euFyau6uprJkyfz9u1bhg0bJop4Tk5OxMXFsWPH\nDl69eoWPjw/Hjh2jubkZIyMjgoODefr0KWZmZujq6nLhwgXCw8MpKytj/vz53L59G319fe7cucO1\na9eIiorCwcGBlJQUbt269d/zplBXV8fevXsxMjIiKCiIxMREBgYGyM/PJzQ0lCtXrhAREUFISAgZ\nGRmCdvP++++LTr+ioiLl5eXY2dmhoPDuP6uyshJLS0ucnJyEYGbu3LlUVFTg4ODA27dvaWlpobi4\nGFVVVRYtWkRubi4KCgrCciwvHllaWgrz1OXLl2lvb0dbW5u+vj7y8/PFTERZWZlVq1bh5OTEvHnz\nOHr0KPr6+v+fhmFYWBh6enqMHj2asrIyIUF9/vw5qqqqGBgYsGzZMk6cOEFubi4lJSVYW1uLzyyR\nSDh16hRNTU2i+KSmpsb9+/fx8vJCJpMxadIk1NXVqaurIygoCBcXF3Jzc2lububt27ckJCSgrq5O\ne3s7jx494q+//iIoKIi//vqLZ8+ekZqaKgQ5jY2N7Nu3j+bmZl68eIGLiwtpaWn09PTw+vVrqqur\nuXLlCrq6unh4ePD7778TERHBwMAAycnJNDY2oqmpybFjxzh8+DC9vb1s2bKF4cOH8+eff6Knpyeq\nyGPHjiUrKwtPT0+BcJPJZMTHx/Pw4UMKCgpQVVVl3rx5/Pbbbxw5coQpU6Zw6dIlDh8+LCrFHR0d\nLF26lNu3bwPv3kZ//fVXLl26xMuXLwUO7sWLFyxevJiGhgb09PSEIu/ChQs0NzeTmJhIW1sbf/31\nFxUVFchkMmbNmiVs1gsWLODWrVv09PSgpqZGc3MzkZGRhIaGMmnSJFpaWhg7dizOzs5MmDCBmpoa\nZs+ejY6ODhKJhNmzZ/Pw4UPMzc3Zs2eP4DRcv36dBw8e8PPPPzNkyBDa2tqEVGfOnDncunWLZ8+e\noaamJo6Lx44dIzY2ltLSUpydnbl58yZGRkYkJyeLdOQ/uf4VDwV5g/D69esiRGRoaEhvby+HDx9m\n1qxZKCkpYWhoSGBgIJcuXcLf3184DYyMjEQOQCaTsWDBAgBKSkpITk4W600lJSU6OjoEI0FLS4u3\nb99SUFBAQUEBJ06cEGANPz8/1NXVGT16NCtWrCAwMFAMMNva2tDU1BSJsfj4eCIiIoiJiSE1NZVD\nhw6RmpqKjo4OPj4+gk68fv16Vq1ahZGREcrKyvzwww84OTnx4sUL4F0Cc2BggIyMDD788EOcnZ2R\nSqVkZGTw+++/4+vry+nTp6mvr0ddXR07OztRt/3444+prq6mr68PU1NT6uvr6e3tJScnh7KyMmJi\nYhgYGGDu3LliJ56SksLbt2/x9fXliy++EKRrXV1dIas1MzOjpaWFL774gsbGRiIjI3nw4AEzZswg\nIyODkSNH4u7uzqeffirkJcOHD+ePP/4Q5/8RI0aQmprKokWL6O/vJzIyUuQxwsLCqKysxNfXFwAL\nCwv6+/txc3Nj0aJF9PX1ER8fT1lZGcHBwWhra7N+/XpevXoljhbyle6oUaPIysrCw8MDBQUFpk+f\nLmzWcXFx2Nvb88knn4gjalNTE+bm5kyePBkrKytu377NiBEjKCgooLGxkbCwMHR0dAgICCAiIkJI\nbJ4+fSpi2hoaGgQFBTFx4kSCgoI4cuQIKSkprF27lpMnT9Lf309xcTGlpaWCDL5o0SIkEoloUkql\nUvT19fnuu+8EeUlDQ0O8wQ4fPpzPPvuM1NRUHj58iLW1NS9fvqS7u5uNGzeKYpm3tzfTpk3D09OT\nCxcusGXLFgYPHszp06fFMPufXP+Kh4J8ah4eHo67u7vIuNfW1hIRESEGRH/88QexsbEYGxvT3NxM\nXV0d33zzDR4eHtTW1vL69WvOnDnDiRMnAPjkk09wcnIiNjaWu3fv4uDggEwmw8rKCjc3N54/f46h\noSFNTU0oKysLrJhUKiU3NxdTU1OhgVdUVBSYt6+++oq8vDyOHz/OyJEjWbRoEfX19fj5+WFra8u6\ndeuwtbUVq9Hu7m7+/PNP4uPjBS9AriQzMTERM4WMjAwRI96zZw9+fn7Y29uzfv16WlpaOHPmDKdO\nnaKrq0t8oyxcuBB/f3/S09NZtmwZGRkZDB06VISXlixZQm9vrwg6dXV1CZFJV1cXHh4e6OjoMHbs\nWExMTOjv78fJyYnMzEwhtPHx8eH69eu4ubkRFxdHVFQU+fn5ODs7i3CUh4cHXV1d7Nu3j7q6OrFh\nGD9+PDk5OQQGBtLa2srLly8xNzdn27ZtaGlp4ejoiJaWlph33L9/H2VlZSQSCVVVVfz111+4u7vj\n7OzMihUruHr1Kn/88Qc6OjrMnj2bhQsXMnbsWD788EM6OjoYP368aHDKa8gAH330EZGRkWzdupXM\nzEzy8vJYsGABT548ISkpCXt7eyHGmTBhAiNHjkRfX1+Ig7Ozs0WW4+3bt2JW4e7uLop8ZWVlFBcX\nY2NjQ3BwMIGBgejp6WFnZ0dPTw/GxsacOHGCVatWERYWRnt7O+np6WJz4unpKZyPtra2jBo1SqwV\nZ82ahZ6eHqqqqnz00UcEBATg7e1NS0sLKioq+Pr6Ul9fj5mZGRs3biQkJITW1laSk5NRUVFBV1f3\nH9+PEnlP4P/mJZFI/u9/iP93/b/r//9XysDAgM//6R/6V7gkTU1N2b59O1ZWVlhYWHD//n0hX7lx\n44YAcdjY2KCtrc306dO5fPkyUqlUcABdXV1xc3Pj8OHDmJmZ8dNPP4kqqUQiYevWrZSXl7Nnzx4m\nTJhARkYG0dHRLFmyBAMDA/bv38/du3dpb28X6LXDhw8zb948zM3NycjIQE1NjU2bNhESEoKNjQ3r\n168nOjoaS0tLwsPDuX79Oh4eHjx79kyc9+XatdLSUgICAlizZg3ff/89WVlZREREEBcXh76+Pp9+\n+imXLl1CKpXy9ddfs3LlSpSUlDA2NubMmTP88ssvfP/997i6ulJaWoqpqalojw4ZMoTk5GSSk5OJ\nioqisrKSjIwM7OzsBMRUKpUSExODi4uLyH6cO3eOsLAw0YwMDg7m3LlzAnQjB7QMHjyYmJgYoaJb\nunQpZ86cQU9PD2tra4F4l6/rXF1dMTMz4/z585ibmyOTyYRcNi8vj2HDhjFixAgePHiAra0tLS0t\nREREoK2tzZYtW5g6dSq//vorVVVVWFpasmrVKjIzM1FTUyMzMxNNTU2xuZHHrH18fEhKShIW5sbG\nRn7++WcBS01MTKSsrIxDhw6xYsUKKisrsbOzA+DHH38kKChIBLpKSkro7+8XYS8NDQ1OnjzJiBEj\nePz4MePHj0dLS4uxY8cKalZRUZHA7N+7d4/AwEDi4uJ4+fIljo6O+Pv78+eff3Lw4EFmz57N0aNH\n2b17N9OmTWPHjh1i6J2ens7KlSs5efIkxsbG1NbW0t7ejru7O/Hx8eLYKNfGLVq0SPgkXV1dsbS0\nJDs7G0tLS168eIGdnZ04Gq1evfof3Y//iu3Dl19++c3MmTOxtbXlhx9+oKCgQPARFRUVhQ9y165d\n2NjYsHnzZq5cuSJIt6WlpZiYmNDW1oaioiIXLlygrq6OsWPHMjAwwNixY2lra+PevXv4+vpy9uxZ\npk6dSmRkJO7u7ty5c4fly5czefJk4e8bNmwYp0+fFiSgN2/eCAW7nByto6NDamoqYWFh4kggT/55\neXlRVlYm1qHGxsZitbh8+XJaW1s5evQooaF82r/2AAAgAElEQVSh6Orqcu7cOQEekQs/5syZQ0xM\nDIsWLaKpqYlXr14RHh7OxYsXcXFxYf/+/XR0dJCbm8vvv//O5s2bxerKzc2N9PR0SktLqaqqEiGn\nrKwsbGxsyMrKIioqinv37vHbb79x6NAh5s2bR3h4OBkZGchkMtrb2+ns7OTVq1cYGhoyevRokpKS\nkMlklJWVERAQQFtbG52dnQIh9/btW169eiVQ6+np6RQUFODn5yduZnd3d3777TccHR3ZuXMnEydO\n5MmTJ6IP8eTJE/7zn/8QFBTEjh07GDduHOfPn6e8vByZTIa+vj4GBgY4ODjQ1taGsrIyP//8M4qK\ninh6egr8m9yivX//fkaPHo2xsTESiQQfHx8yMzOxsLDA19cXLy8vbG1tuXv3LjY2Nty4cUPkOs6d\nO0dzc7PwO8izA3I4bHd3t1gPymQySktLsba25tGjR2RnZ+Pt7c39+/dpbm7G29ubAwcOCKuVjY0N\nfX19dHR04OvrS0xMDFOmTOHQoUPY29tjZGTEnDlz8PX1xcXFha6uLiQSCa2trSgqKlJYWEhvby/Z\n2dmEhISIoaiVlRV37tyhqqqKpKQk8vLy0NbW5saNG/892wd9fX0xHHJ1dcXX11eQmgcGBqioqODy\n5cu0tbWhoqLCvn37WLx4MTdu3ODs2bNYWlpiYGCAlpYWY8aMYefOncA7eo2amhqBgYFcvXqV0NBQ\nUlNT8fLy4vXr1+Tk5FBQUMDcuXMxNDTk/fffFymzhIQENDU18ff3JzY2VmjL4F0UV1VVlZqaGtzc\n3Hj69CkLFizg77//FhCRL7/8kvDwcFJTU3n+/LkgHQ8bNoyioiKePHlCQEAA1tbWQuQREhKCTCYj\nNTWVyZMnc+XKFXJzczl8+DAKCgpERETw4MEDJk2aRFlZGZs2beLBgwfMnTuXjRs3ilSgnGitpqaG\nvr4+PT09LFy4kN9//13UcH19fQkKChJBq6CgIB48eCBkumpqamKKPTAwgJ+fH2lpaXz//ffExMSw\nfv16AJEYTE5OxtramhEjRhAeHk5XVxd5eXkoKysTEBDAvXv3GDRoEEOHDiUuLo6IiAgUFRWF+EWe\nAdm2bRvLly/H1dWVhIQE/v77b06cOCG+gVesWMFvv/3G8+fPuXXrFrq6uigqKrJkyRKxxl65cqWI\nxsuhJY2NjcKg1NnZiaOjI2lpaRw5coSrV69y584dsrOz6evrY9iwYdjb2wuStEwmo7a2lr6+Pq5d\nuyYq3fAOjOPs7MzBgwfZvn276MM0NDTQ0tKCpqYm8+fPx9nZmcjISFF1bm5upqOjg8rKShGksra2\npqSkBAAVFRWsra3ZsmULa9asYdOmTdTU1ODg4EBAQABqamoYGhoyfPhwVFVV+fHHHzE3N6esrEwY\n0ezs7Jg0aRJr1qwRBa5/cv0rHgpaWlocOHCAS5cuiT7AwMAAWlpaJCQkoKqqirm5Obt37yYjI0Nw\nE+zt7VFSUsLLywsNDQ2sra0pKioSUM2kpCRaW1vJyMgQKbW6ujp8fX3x9fXl4sWLwgVQUlLCypUr\nOXXqFJqamujq6rJ+/Xq+/PJLVq1ahY6ODq2trcC7rvuYMWNYsGABCxYsQFdXl+3btzN48GDxg2dm\nZsYff/wh0Go1NTWYmJigo6NDV1cXGRkZdHd3s3LlSubNmwe8m7xbWVmJSb/cdrRs2TLOnTuHra0t\nnp6eAGhra5Ofn8+iRYuYP38+EomEx48fCw1cWVmZIENpamoK7mBDQwMdHR3o6OhQWVmJhYUFeXl5\nNDY20tzcTE9PjyinGRkZcf/+fZ49e0ZSUhKFhYXCJ/Htt98SEBAgIKvd3d2UlJSwaNEi0Sasq6tD\nQ0MDT09PAgMDqa+v59GjR/T391NYWEhXVxdJSUn09fUxfvw7i4Curi4DAwNMnjyZvr4+fHx8GDt2\nLPX19YwfP57hw4cLrV5JSQl1dXU0Njaira2NhoYGr1+/RkFBgfz8fIqKirh69SrwzmyekJCATCaj\noKCAqqoqpk+fjouLC/n5+VRUVPDBBx8QExPDixcvWLZsGVevXkVVVRVVVVVGjBhBXV0d0dHRODg4\niJ/dR48eERcXx5w5c8TGSlVVleTkZPz8/MjMzMTY2Jje3l4++eQTamtrcXBwIDIyksGDB+Pj44O1\ntTXFxcWMGjUKExMT4J32IC0tjbFjx3Lq1CmCgoJEaaqzs5NBgwZhZWWFRCJBVVWVn376iYKCAkxM\nTEhNTRVMU2tra3bv3o2RkdE/vh//FQ+FpqYmTExMcHV1RVVVFalUyqNHj2htbeXLL79EVVWV/v5+\nnjx5Qn19vcCUjRs3jjNnzggBS3x8PKWlpYLBL5PJ6Onp4cWLFwwbNoygoCAmTJhAR0cHHR0dbNy4\nEU1NTe7fv8/Zs2eFwGPo0KFoaWmRnZ3NyJEjUVVVpampiYiICPF529vb2b17N2fPnsXV1VWETmxt\nbTl37hydnZ2YmZmJG07uUti/fz/q6urs3buXlJQUPv/8c5F/kEekJRIJWVlZ2NnZYW1tzZMnT5g0\naRJdXV00NjaioaGBrq4uDx8+FN/iz58/F0KV3NxcpFIpU6ZMoaioCFNTU968eYOrqyseHh6idJOU\nlMTLly9ZtWoVoaGhxMfHI5VKmTFjBra2trx+/Zp58+Yxffp0Ro8eTXZ2NoqKiuzevZstW7aQkJAg\neA+enp50dnaydOlSgSGfMmUKK1asoKKigrdv33L16lWRBDUzMyM8PBx9fX3u3r0rvATLli0TeZPE\nxESeP38uLFODBg3i1KlTLFmyRETPCwsLRX7g448/xtbWFldXVzIzM2lpaRF9FZlMhqOjI2PGjOHZ\ns2eiJ5GdnS3AuB0dHQQEBIjqt5ubG3V1dRQWFlJcXExxcTHR0dHcuHFDPJx7e3sJDQ1FTU1N4PBN\nTEyYM2eOeLswNDSkrKxMBJqam5tpa2sjISEBHR0dRo0ahZWVFX5+fsIdGhERIfoNra2t7Ny5k40b\nN/Lq1StevHjB/fv3RWFr//79bN68mTlz5jBkyBDKysrYs2cP1dXVwmkp1xP8k+tf8VDQ0NCgsrKS\n169f4+fnx9q1a5k2bRqmpqbs3LmTNWvWEBYWRlNTE0pKSgQGBpKQkMDDhw8JCgqipaWFgIAAxo4d\ny4sXLwSc9Nq1a0ydOhWJRIKmpibOzs64uLgQEhKCpqYmJSUlIhr7wQcfsHXrVkJDQ/nuu+8IDAyk\nuLgYBwcHJk6cSE1NjXgVnTJlCk+ePBFDTHlC0tbWltGjR9Pf34+HhweXLl3CxsYGS0tLtm7diqqq\nKtOmTRNcBPm5UZ5T+Oqrr7h+/TqPHj1i165d5OXliSRgVVUV6urqQlaroqLCtGnTqKioEB7Nrq4u\nbty4gZaWFqmpqejp6dHQ0CDIQQkJCeTm5uLk5ERlZaWIhmdmZlJZWcmKFSvIz89HKpXS0tLCunXr\n2Lp1K2/evOH+/fvMmzePESNGMHr0aDIzM5HJZOL37MqVK5iYmLBhwwbu3buHv78/O3bs4NmzZ+jr\n6yOVSvnss8+Ii4tjyJAhlJSUCEqyoaGhuHnlKPnKykomTZokik9XrlzB2tqa+/fv88knn+Dg4ICH\nhweLFy8mMzOT4cOH093dzbNnz0hLS8PExITIyEguXrwIvMPyV1RUcPr0aYyMjMSacMqUKZw8eZK8\nvDwBVJk7dy6BgYHY2dkxcuRIzMzMxDFBSUmJX375RUB8+/v76erq4u+//yY8PJzS0lKysrIwMjIi\nICCAqVOn8v333+Pm5iZwcerq6igqKpKdnc2+fft49OgRmpqaPH/+XBCSwsPD2b59O8bGxuTm5rJt\n2zYSEhKIiIhg6NChjB49GhMTE9atW8e8efNYvXo1v/zyC0eOHEFRURFFRUVxzxQVFYmH2D+5/hWD\nxq+//vobe3t7jI2NMTc3586dO6ipqVFSUkJeXh6lpaWUlJSQmJjIhg0bSE9PZ86cORQVFeHl5UVs\nbCympqZkZWWxZs0aysrKuHv3Lt7e3owfPx4lJSU0NDS4fv06vb29fP/995ibm9Pa2sp7770nBmdy\nk/LcuXNJTExERUWF/v5+pk+fTnl5OTo6Oly6dInZs2fj6emJqakp4eHh7N+/H2NjY0aPHk1dXR0t\nLS0oKSkxefJkkpKSSEpKYsaMGSgoKPDw4UNcXV2FjERbW5va2lquX79OcHAw6enporKckJCAk5OT\nIEfdunWLoqIi0a0YOnQoW7duJSIigvLycnbu3Cl272PGjOHChQsoKSkJmYv8dV9uVLp27RrDhw/n\n2bNn9PX10djYiJOTEw0NDaSkpHD79m0mTJhAfX09M2bMAMDZ2RkdHR1MTExITk5m6tSpdHZ20tPT\nQ1tbG8HBwTQ1NREaGkp0dDR5eXniOKirqyuwak5OTtTV1TFs2DAuXbrE69evef78OWvXriUzM5O1\na9cilUrJzs5GKpXy4YcfoqysjIKCAsuXL6e9vZ2QkBBKS0uZOHEiv/zyC0lJSdTX1xMVFUVRURFZ\nWVkoKCiQkJDA1q1bxTYH3mnkkpKSRDZBU1MTLS0tETSSP7DluLRbt27R3d1NYWGhmPu8fPkSOzs7\nDA0NsbW15fTp07i4uNDa2oqjoyMKCgrk5eWhqKjIsGHDqKmpITc3l+LiYjHLmj9/vkjBDho0iHnz\n5vHTTz/x7bffcurUKaZPn052djYuLi54enqSmZkpLGHJyckcP34cLy8vqqqqMDAwwN7eHlVVVXJz\ncwkLC+PixYvcuHGDTz/9lIMHD/73DBpVVFSYN28efX197Nixg+PHj4ui07Rp09DU1KS7u5vg4GCO\nHj3Kw4cPKS4upqGhgYKCAhQUFHB0dBSvhB988AEABgYGlJeXc/PmTcaMGSO6/nPnzuXVq1c0Nzdz\n584d4uLiSEtLo6Ojg9jYWBISEjh58iS1tbU4OjqSkZGBg4ODIEZv376dwsJCzMzMhKvw0KFDaGlp\nUVJSgpOTE3l5eRgaGmJoaIirqyv79u0TA6q0tDRiYmJob2/n2rVrIuJ74cIF+vv7BfDU29ubO3fu\ncOfOHaGkGzlyJKampujr63PhwgX8/Pw4c+YMlZWVfP3119y5c4eQkBBevHjBypUrqa6upqSkBD8/\nP3p6ekQOvqenh6lTp/LgwQM2b95Mb2+vaOyVl5fz22+/MWjQIK5evUp9fb34fytvlv7555+Ul5cj\nlUppamoiOTkZf39/rl69Sk1NDfHx8dy6dYtp06Yxbdo0cRyTMwr7+/vx8/MjNjYWe3t7kUIdNWoU\n06ZNY+fOnUgkEqKjo9HR0eHatWscP36c1NRUli1bJixWrq6uDAwMEBwczOLFi9HX12f9+vV0dnaS\nn58vjnwSiUS0a42MjNi7dy8vX77k7Nmz6OrqCmlLeno6GRkZDAwM8OjRI27fvs1vv/2GiYkJhoaG\neHt7M2TIEO7evQuAm5sb5eXlFBcX4+joyODBgwkKCuLWrVts3LiRnJwc+vv7+fvvv0lOTsbX15eJ\nEyfi4ODA06dP2blzJ59//jk7d+7E0dGRo0ePAoiuRGNjI3PmzGHkyJFcuXKF7777jvv373P9+nWa\nm5uZOHEivr6+zJs3j+bmZhoaGujp6cHDw4OXL18ybtw4Dhw4QHNz8z++H/81DwV453aYPXs2165d\nQ0NDgx9//BEtLS3CwsJ4+/YtXV1d+Pn54ebmhrq6OmPHjkVPT080zzo7O9m3b59g/W/YsEGk7b79\n9lu0tLR4+fIl7e3tNDc3ExERQVlZGXPnzhVTdjnKOygoCAsLC1JSUvD29mbVqlXs2LEDgBMnTrBg\nwQJyc3Oxs7PD2NiYDz74gDdv3mBgYMCsWbPQ19dHQUGB169fk5uby/r16zE2NsbCwkIMGYuKipgw\nYQJZWVkAGBoasmTJEqysrEhMTCQvL4/3339fRJFbW1vx9/fH29tbvNn4+PgQERHBypUraW9v5733\n3qO9vZ20tDQWLlzIhAkTiImJoaqqSqDpnJ2dxdnV0dGR/v5+hgwZwvjx44mKiuL27dvExMSgoqLC\n7NmzaWlp4fHjx0gkEoyMjKioqMDd3R1XV1c2bNhAYWEhJiYmSKVSnj59yvTp0zl37hw//vgj/f39\nqKur09DQgLGxMYmJiTg6OtLa2kpCQgJjx47Fzc1N/CycPn0afX19QkNDUVVVxcvLi4yMDIKDg5k4\ncSKWlpYMHjyYr776isrKSszMzDh+/Di9vb0UFBQQGxvLN998g66uLk5OTsK4pKurS1FREY8fP+b1\n69f09vaKjZeCggKjRo3CzMyMyMhIVFRU2LVrFxEREZw6dYru7m709fUJCAhAU1OTx48fc/nyZeDd\ncDg4OBgPDw9aWlrQ1tYWhvA5c+bg7u6Onp4e8+fPx8zMjCdPnnD9+nW0tLREKW7btm2CDyLfwujp\n6RETE8Pjx4/Jy8vj22+/JTIykhMnThAUFERHRwcmJiY8ffqUTZs2sXr1aqKioli1ahWTJ09m6tSp\n3L17lzdv3nD27FlOnjz5j+/Hf8XxYc+ePd+0tbWJaGt0dDTp6emiSdbZ2cmzZ8+4ffs2xsbG2Nra\nUlxcjJqaGp2dnYJ/N3nyZCorK5k5cyb79+8nODiY8+fPM3v2bMzNzent7aWxsREfHx90dHQ4cuQI\nTU1NhIWF8erVK4KCggReXSKRcPz4cQwMDMSu9/z582K9V1hYiIGBAd3d3VRVVfH06VNkMhmmpqbY\n2dkxa9YsERWeO3cuDx8+JDc3V9Rozc3NhV69p6eHmzdvsmbNGl68eMH58+f56quvRI9h+/bt6Ojo\nEBYWRmtrKz/88APu7u5ibZqVlSUIyl1dXYJSJdeOJSYmMnToUJ48ecKIESM4cuQIISEhwoUZFRXF\n5MmTcXd3F/XeadOmMW7cOI4cOYKJiQkhISHs2bOH5uZmQTAyNTVl+PDhqKmpMWHCBNLS0pg3bx6V\nlZUCT5+Xlydee52cnEhMTGTOnDnk5uaSm5tLd3c3urq6JCQk8OTJExwdHQkJCeHMmTM0NTUxY8YM\nFBUVCQ0N5cKFCzg4OLB27Vq2bt0qwkNjxoxBKpUSEhKCoqIidnZ2VFRUkJmZSXt7Ow8fPkRdXV2o\nABYuXEh/fz8LFixg3LhxPHz4kICAAFJSUlBRUUFBQYExY8ago6ODt7c3s2bN4s2bN6irq3Po0CEh\nbc3KyuL9999HWVkZLS0tLC0t2bZtGytXrhTbkMLCQtzd3SkoKEAqlaKmpiaakbq6uvj6+pKens4f\nf/whGqiPHz/m5cuXggTV2dkpshn6+vqkpqYKnsX27dvJyMigurpaIOMGBgZQVFQUiMBp06Zx+vRp\nSkpK/nuODz09Pfj4+BAWFiZ4eePHj2fixIlilqChocGkSZNwdHRESUkJf39/ZDIZdnZ2DB06lJCQ\nEJKSkhgYGBC785aWFpEeq6+vp6GhAS0tLUEz+s9//kNycjJ79+4lJyeH3NxcIXTt6+vjwIED6Orq\nsmbNGnx9ffnpp5+Ad5IZqVTK0aNHhf1JfpZTU1NDQUGB+/fvC87f9evXBWH43r17HDlyhLKyMpqa\nmujo6BCmYZlMRl9fH11dXUI/dujQIVHMGTx4MGfOnGHmzJm4uLgIg7W+vj6DBg3i4MGDtLe3CzTd\ns2fPGDduHGvXrqW7uxsvLy9u377NF198QXJyMrdu3SI6OpoRI0YQEBDAxo0bqaio4NGjR6xZs4Y3\nb94wb948dHV1iYmJYdWqVXR3d+Pr64ufnx8dHR20tLTg4eFBfHw8KioqJCcnU1BQwN69e1FRUcHO\nzk6QpW1sbBg3bhytra2UlZWxa9cuysrKxEwH3sFrJ0+eTHt7OwsWLKCtrY26ujo2b95MSEgIH330\nEZ6enri4uDB06FDh8/jjjz9ITExkxIgRJCYm4uzsjLm5uaAjL1++nKCgIMzMzMjKyqK7u5svv/yS\n06dPU1xczObNm3n9+jUPHjwgNzcXfX19rl27xr59+1iwYIEILMlp2fIsTGtrK3p6ely8eJHnz5/j\n5eWFl5cXZmZmTJgwQXzRKSsr09jYiIGBAdbW1iJEFRcXx7lz51i2bBna2tqMGjUKeJd/mDx5smgQ\nu7i4kJ6eLrgIFRUVGBgYcPDgQXJzc8U6U75h6urqIjAwUDx4Dxw48I/vx3/Fm8K2bdu+kXfD5V13\neSNQV1cXqVTKwMAAqqqqQumVnZ3Nq1evRJVavp+Vl2lu3rzJ5s2biYuLo6enB39/f0HC1dDQQFFR\nET09PczNzTE3N0cqlWJkZER+fj7w7gbt7+8nPT2d6Oho+vv7CQ0N5dixY8ycORMtLS0uXryITCYT\naywXFxcGDRokntKGhoa0trYKQrW83l1XV0dnZyfr168XN0V8fDxv3rzBxcWF7u5uFi5cyIwZM9DS\n0sLGxkY8fOrq6ujo6OD58+f09vbS1NREV1cX9+7dQyaTYWtrK2YhISEhFBYW8vjxYwwMDLh27ZpQ\nm2tqaqKkpCTAoHKehZwXOTAwQGNjI4qKihgaGmJmZoaCggI9PT04OjqKunlFRQWffvoppqamyGQy\nVFRUGDNmDFu3bmXRokWkp6dz7do1PDw8KC8vF3FoNTU1/v77b7q6uvD19RUDwZkzZ2JjY8Mff/wh\n8hVDhw7Fzs4OT09PVq9ezaJFiygtLRUZjKVLl2JnZ0dCQoJoWx48eJAhQ4aIqnRoaKhoyMK7IpNc\np1ZbW8vnn3+OtbU1r1+/Zs2aNRw/fhyJRMKKFSuYOHEiAQEBmJqa8vDhQxoaGlBUVBTeEB0dHSws\nLNDS0sLX1xddXV0OHDhAdXU1c+bMoaSkhPLycrE5MDMz4+7du8TFxTFr1ixcXFxobm5m1KhRHDx4\nkIyMDDZt2oSLiwuPHz/mzZs36OrqcuLECfLz8/Hz8xMxejs7O/HXnZ2dVFVV8dlnn9HZ2Ym3tzcf\nf/wxly5dkmPs/nveFPT19Rk+fDjvvfceN2/exMXFhejoaCIiIlBXV+fYsWPExMTg4eHB1atX6evr\nw83NDXt7exFsyc7OxsjIiJKSErH3//XXX1FWVmbu3Lm4uLhw+/ZtofuW99e7urowMjJCXV2d1tZW\nkfirra0lKSkJHx8fNm7ciKWlpWAaLly4EAcHByZMmEBdXR2JiYno6uqSlJTEnDlzxEYhOjqaoKAg\nUlJSRH7d1taWoKAg7OzsBJFXbp46cOCAGBCmpKSQmppKXV0dtbW1nDt3jps3bwqxbkREBMXFxZia\nmhIcHMyUKVNwcnIS9Cp5izMnJ4c7d+4wZMgQIiIieP36NZqamri6uiKTybCxscHKyoqhQ4dSXV2N\noaGhWNfK+RBbtmzB39+f3t5e0U5taWkRXsNDhw4JRZt8sr5z506OHDmCjY0NS5YsISUlhaysLAIC\nAsjKysLZ2VlIfZuamkQg6M6dOxgbG/Pzzz9jYWHB+fPncXZ2Fi7Qu3fv4uzsjKamJrt378bc3Bwl\nJSXc3NywsLBAVVWV8vJyRowYQUhIiKBwqaqqEhcXx71797CxsUFJSQlVVVVSUlJEOExZWZnly5fz\n3XffsXz5cpycnDh//jwRERGC+zhy5EgmTZok0qORkZGUlZWRl5eHkZGRiGiHhIRQUFDAgQMHyMvL\nY+LEiWRnZ7N161aePHnC0KFDWbhwIY8ePcLKyoqrV68KICvArVu32LRpE+bm5mzYsIEVK1awf/9+\nTExMBD7v8uXLZGVlcfPmTdrb2/Hx8SEvL49t27ahqqrKs2fPuHfvHr29vcyaNesf34//ijeFTZs2\nfaOjo0N4eDiZmZn4+PgQGRlJcnIyfX19qKioIJFIWLp0KR0dHbx584Zr164JmaiamhqpqamCuDNl\nyhROnz4tko3a2tqcO3eOuXPncvnyZbq6uoSma9q0aZSWlvLrr78KLX19fT1lZWV89tlnKCsr8+jR\nI9LS0oT6PCoqiqdPn6KgoMCUKVPQ1tZG/vnDwsJ47733GDp0KEZGRlRVVdHW1oa/v7+g/YwbN47U\n1FQUFBSIiopCR0eHo0eP8tVXX5GSkkJaWhr6+vp4e3tTWlpKQ0MDtra2aGtro6SkhKWlJVlZWURG\nRuLm5kZhYaFQmufl5ZGZmUlgYCCdnZ1UVlayefNmBgYG2LVrFyEhIcKDqaqqioqKCpmZmYKqlJ2d\njYqKCtOnT0dLS4vQ0FBev35NWloaWlpaDAwMYGZmJiAzu3btQlFREQMDA/r7+3n//feJi4sjMzOT\npqYmbG1tRTelsbGR3Nxcpk6dSlVVFc+ePWPYsGG8fftW+Dytra1JTk4mNDRUYNZu3brFpUuXMDQ0\nJDg4GKlUSmBgICYmJrS0tGBra4uioiJjx44lMzOTsLAwNDQ0+PPPPyksLKSgoICPP/6YdevWsWfP\nHnx8fEhISMDCwgKpVIqpqalIED579kwQqvLy8kTh6aOPPkJdXR1TU1MOHDggfi6MjIwYOnQourq6\nuLu7C2s3vMuzKCkpYWJiQl9fH1paWtjb29PW1oa6ujp+fn48fvwYOzs7ampqUFdXJzAwkOjoaA4e\nPMjkyZPF+lh+7HV2diYhIQF3d3d8fHw4cuQI+/fv59KlSwwaNEjQsj755BMaGhro7OyksbGRIUOG\nkJSU9N/zpjB48GB0dHT48ccfCQsL4/z588THx5OUlISrqyutra3Y29tz/fp1ZDKZUJZ1d3dTUVFB\nb28v8+fPx9PTk3HjxokjwKpVqxgzZoyQl8gBLPKCVVRUFOfOncPMzIwDBw7Q1NSEVCpl1apVuLu7\nU1xcLLRpK1euxNDQEHjHf5C/JldXV5ORkUFfXx+XL1+mqKiIFStW0NbWhpaWFm1tbXz44YfcvXuX\nIUOGcPHiRYGYf/r0Kb/++qswa8ubiPPnz2fLli0ikOXs7CxQYs+ePePgwYN0dHSwc+dOTp8+LTRm\nP/zwA3p6eixZsoSXL19SVFSEmZkZib+vpEoAACAASURBVImJvHr1iqVLl9Le3k5DQwMhISFYWlpi\nbW1NVVUVampqpKeno6ysTEhIiPBH/P7779jb2yOVSkXic2BggHnz5pGVlYWLiwthYWFUVVURGBjI\nhQsXiI2NpaSkBPkXjqenJ1KpFC8vL6ytrTE2NmZgYEDIXjQ0NMTv2ZgxYxg5ciRNTU1YW1vT2tqK\nkZERW7duFetCU1NT4uPj0dDQ4MmTJ5w5cwZPT08hJ37x4gXLly9nYGBA+A4uXrzI559/ztixY1FV\nVcXb25u8vDyCgoI4deoUDg4OSCQSent7GTFihEgmqqmpCR+nlZUVKSkpbN26VXhBFy9eTFNTEzU1\nNbS3t3Ps2DFWr16NnZ0dOTk5uLi4YGFhQVZWFosXL8bMzIzOzk7S0tLYvn07YWFhaGpqUldXJ/Bw\ngFjrZmZmsmnTJnJyclBRUeHixYuoq6uTlpZGf38/K1eupKamBk1NTfHzsXLlSn7++WdKS0tRU1ND\nJpOJWcU/uf4VD4WKigpxblVSUuKnn35i1KhR1NbWoqOjQ1tbGyNGjMDNzY3Ro0eTnJyMpaUlO3bs\nEOfKb7/9ltOnTzNkyBDOnj0LvIv7DgwMMDAwwN69ezl06BCXL19m8ODBVFRUCJt0SkoKUqlUyFx/\n+eUXLl26RF9fHzNmzBA/aCEhIcA77ZhUKqWiogJNTU0mTJiAh4cHjo6OpKamkp+fz6RJk8Tked26\ndYJO7ePjQ2pqKhMmTGDhwoUoKSmJnMKECRMwMzPD1NSUZ8+eERMTw5UrV6ivr+eHH37ggw8+oLOz\nU2j0JBIJfX19lJaWEhQUJExUfX19NDU14eLiQlFRkZgPyKk9VlZWvHz5Ughl7ezsmDx5MuvXr+fJ\nkyecP38eFxcXDAwMxIDM3NycMWPG0N/fT0ZGBgcPHhQdhfT0dIFfHzJkCBMmTEAqlfLTTz/h7+9P\nfn4+9fX1FBQUkJ+fT25uLqWlpdTU1Agytdy+JX+Yent7s3LlSs6dO0draytnz54lKCiI9evX4+Xl\nxeTJk8nNzUVNTY2oqChxVp8zZw4aGhqEhYWxatUqsZ9vaWkR6UM1NTUKCwvp7++nvLwcb29vHj16\nJNaT9+7dw9DQkPnz5+Po6Cg6DKmpqRQVFXH58mUSEhIAePLkCcbGxkilUlHx19fXR11dnZEjR1Jd\nXc2pU6cICAjAz89PdHkCAgJE2WrHjh3U1NTg5OTEuXPngHdHajU1NbS1tZk2bRotLS10dXWxfPly\nHj16RHd3N9HR0XR2dpKSksKwYcPw8fHhxx9/5PHjxwwZMoSRI0dSUlLCpEmT/vtcktra2vj4+GBj\nY8Pnn3/O7du3qa2tJSoqiuPHj4s99fr169HW1sbV1ZXc3Fy+//57YXxevXo1bm5ulJSUEBQUBLzT\nmHt4eCCTydi8eTM2NjZERUXR19eHo6OjeKVrbW2loaEBExMTrl69iru7OzNmzGDw4MFER0eTmJjI\n9evXRZsxISEBR0dHHB0dReouOjqa9vZ2sZdXUlLis88+w8HBgY6ODgoLC3n+/LlQhcsz7JqamkJY\n0t7eLkxU1dXV9Pb2EhAQwIsXL7C2tmbJkiUoKyvz2WefERoayvvvv09hYSGqqqq0trZSU1ODqakp\npaWlFBUV8eDBA9TV1YF3fX9/f38KCwtRU1NDWVkZX19fRo4cKezYb9++xdLSEnd3d6Kjo8nOzqa8\nvJwxY8bg7+/PixcvyMrKEm7Ix48fc+XKFebOnYu9vT3q6up0d3ezdOlS1NTUsLa2FmtBeS16xowZ\nxMbGEhYWhpeXFzU1NQwaNIjAwEDgXcioqamJ3bt3c/78eZSUlLC2tiY8PFysSzMyMkhKSqK5uRlf\nX1/Cw8PJzc3FxMSEXbt2UVlZyejRo3n79i379u0D3pGyra2t2bBhA8XFxcycORMDAwMUFBRYvXo1\nY8aMEWalhoYGDA0Nyc7OxtDQkKtXr3Lx4kXq6+upqakhPDwcVVVV4J2OTk1NjcbGRrq6uoiLi+Pq\n1as8evSIpqYm8YUjT1rW1dWRmpoq+hza2tosX76c1atXI5PJBOZNDrCVF77k2MCEhATa2tro7++n\nurpaAIzv3r1LfX094eHhgkWio6ODi4sLv//+u5iz/ZPrn6joLSUSyV2JRJItkUiyJBLJuv/59/Ul\nEsltiURS8D//KP3f/p0vJBJJoUQiyZNIJOP+T7+GoqIilZWVuLi48OOPP5KXl0d+fj5BQUFIpVIi\nIyO5e/cuwcHB6Orq0tnZydGjRxk0aJDQfpWVlQkclXxt8+TJEx49ekRVVRXffPMNysrKXL9+nYyM\nDJYuXUp+fr4wGd+7d4+amhqWLVuGioqKIORGRUWxdu1adHR0BFnZx8cHTU1NkpKSuHfvHl5eXlhZ\nWXHgwAHc3NwYPHgwampqIvAjB7LMmDEDbW1t0tLSmDJlCj09Pdy6dUvg0eTeygcPHoj+v5KSEt99\n9x3jxo3j66+/RlNTk/b2dk6cOMGbN2/YsGGDAK989tlnxMbGiuLP9u3bxUDNw8ODx48fixVfcHAw\nV65cQVVVlYcPH4qbTCKR8PvvvyORSLC0tCQ+Ph4XFxeOHTtGVVUVEomEwsJCSkpKhANT/ufy9GR9\nfT2JiYkoKytz8eJFZsyYwZYtWygrK8PNzY3du3dTVFTElStX0NbWxs/PT4BQe3t7aWho4MMPP0Rf\nXx8vLy8MDQ2JiYnhzp07mJqaMnv2bExMTBgxYgRnzpxBS0sLExMTYmNj6e/vJyoqivj4eK5evcpH\nH30EvAuGWVtbM3ToUN68ecOBAwcYPHiwSIbGxsaK+n14eDhWVlaYmJgwMDCAnp4en376KVVVVXR2\ndnL48GER/pJHtuUPZCUlJUJCQpg2bZr4wnJzc6OyspIrV67Q1NQkmo3V1dXCm1FQUMC1a9c4c+YM\n8G7Q6Ov7P9h776ior37t+zP03pvSe5EqaKxosIC9N2yxGxP1tseYxERzexs1x6gxMTHG2HsJBERB\nQFCKgggiRZrUoffqAPP8YWavc5613nNy1nue97nPu85eaxbDDDPDmpnf/u39/V7X53oPW1tb3nvv\nPVxdXZHJZLz//vtkZ2djZ2cnwmPz8vJYunQpZWVljB8/nurqavF6isxSBfLvr4y/slLoBbbJ5XIP\nYBjwkUQi8QA+AR7K5XJn4OGfv/PnfQuBQUAI8INEIlH+916gtbWVoqIi9u3bh6enJ7q6uixZsgQr\nKytMTU0xMDDA0NCQjIwMpk+fjkQiYdOmTbS2tlJXV0dlZSWjR49m0aJFZGdniy+Cubm5WB5OmzaN\nhoYGDhw4wOjRo1m6dCmnT5+msbFRpEDL5XIqKyu5efMmiYmJQvabkpJCcHAw9vb2wLv93okTJ4iP\nj2f9+vWUlZWhp6fHjh07hHX24cOHhIeHU1BQgImJCTU1NXh4eLBo0SKuXr1KWloaUqlUeAAAMTls\n3LiR1NRU7O3thQKytbWVhoYGbGxsmDZtGl988QXGxsZC7lpQUMDBgwdZsGABubm5pKamsmXLFmxt\nbYUK1NnZmblz5yKVSjl9+jQffvgh5eXlaGhoUFJSwoABAxgxYgQBAQHY29uTnp7O559/TlZWFp6e\nnvT19Ym967Vr15gzZw5z586luroaKysrfvnlF/Ly8qirq2P27NmUlpaipaVFfn4+J0+eFFurW7du\nMXbsWOLj49HX1+eXX34RW7PQ0FABOomPj6e/v5/Dhw9jYWHBxx9/zLBhw3j58iVBQUFisrt58yaf\nfvopX375JS4uLvzxxx+oqKgwa9Ys0Z//6quviI2NxcnJiYEDByKTydi6dSu1tbWCCtXX1ydk6vv2\n7aOurg4DAwMR+6ZI/NbX18fFxQWA9evXU1xcTFFRkdhCKQBADg4OTJ8+ndLSUvr6+vjmm2/YvXs3\noaGhODo6kpeXh7GxMcnJyRgYGBAUFCS6DyYmJuJzqa+vJzc3l8DAQNLT0zl79iyamprExMRgamqK\nTCYTwT/r1q3DwMCAyspKrK2taWxspKioiA8++OAvHOrvxn84KcjlcqlcLn/+5/U2IBewBGYA5/78\ns3PAzD+vzwCuyuXyHrlcXgIUAkP/vddQUVHB09OT8ePH09LSgpmZGdevX+fy5cscOXKEBQsWkJ6e\nzpEjR5gzZw4pKSnY29vT0NDAzJkzCQkJ4dKlSyJyPTMzE0D0k9XV1Wlvb2fixInCbq2rq8u3337L\nsGHDkEql/PjjjyxcuJDu7m4RA7dixQqGDx9OSEgI8fHxAsSqwF3t2rWLPXv2cOPGDV69ekV0dDTd\n3d3k5ORgbm5OcHAw8fHxgpyTkJBATk4OiYmJeHp6EhISgp2dHY8ePQLAzc0NR0dH+vr6yMzMpLq6\nmlOnTjFgwAAePnxIfn4+RUVF1NXVcezYMbq6ulBRUcHR0ZFPP/1UpHFHR0ezZMkSRo8ejbe3N1u2\nbBEt288//5x79+7h6OhIbGwsDx8+RFdXFwcHBywsLMjMzGTq1Km4ubmhp6eHsbExb968wcLCgq6u\nLsaPH8/YsWOZP38+RUVFNDQ0YGdnx/Hjx0WL+MKFC6JDsGPHDuLi4tDT02Px4sWoq6tTVlbG77//\nTmhoqNhOKUZERAQdHR2oq6ujpKSEm5sbbW1tdHZ28uWXX5Kfn49UKqW3t5fe3l4cHBwoLCxkxYoV\nhIaGChCLlpYW3t7eLFu2DIB79+7R19fHvHnz0NfXZ968eZw6dUqE3Li7u4vWclZWFidOnEBPT49z\n586RkpJCfHw8enp6VFVVUV9fL2og2traODk5ERoayr59+5g0aZJAyMfGxmJgYEB9fT0NDQ0i67Oi\nooLExESGDh3K6dOnWbZsGRkZGTg7O/PNN98A7zwVysrKzJkzBwMDA9GyVbSF4+LiBFg4OjoaLy8v\nZDIZkZGRTJw4kba2NmQyGQ0NDXz44YeibvVXxn+qpiCRSOwAPyAVMJfL5dI/76oGzP+8bgmU/6uH\nVfx52//+XGslEkmaRCJJA9iyZQsWFhaYm5ujra0tVIx79+7liy++oKOjA319fQwMDNi4cSNubm7E\nxMTQ0tJCQkKCiOt6+/atoC7LZDL2798vfOUKK2xISAje3t7cu3cPc3Nz/Pz8mDVrFrt37yYnJ4fC\nwkIMDAwoLCyksrISFRUV2tvbhV02JiYGVVVVkZmwYMECnJycWLt2LY6Ojmzfvp28vDwGDBjAe++9\nR2FhIR9//DH29vbcunVL4NjKyspYuHAhP/zwA/Au0ero0aN0d3eLmDlXV1cSEhKws7Ojs7OTjIwM\npk2bRlVVlbDnPnv2jLt374rHlZeXc+7cOQwNDYmOjhZpyFlZWaI46ejoSHV1NX19ffT29jJixAgi\nIyOF/by0tJTAwEBxhpowYQIWFhbExcURGRnJ1atX6evrEwW2HTt20N/fz82bNzl8+DCtra2cPHmS\nuro6WltbcXNzIzw8nNzcXIyMjERL0dDQED09PZE6XVVVJZytRUVFVFdXs3PnTi5cuICmpia2trb8\n8ssvvHz5ktzcXGQymUC/b9q0iXXr1rFnzx4sLS05ceKEWI6/fv2aOXPm0N/fj7+/P0lJSTx58kSk\nf1tYWDB16lSKiopwdnYmKyuL0tJSgoKCcHV1Fe+ZXC5HTU1NFAQVhjJFWFBtbS3r168nOzubvr4+\npFIpAQEB+Pn5YWlpyYMHD6ipqSEpKYmmpiays7O5evUqdXV11NXV4eHhAcDhw4fZsGEDBw8eRCqV\nUlNTQ2xsLJqamjx58oSKigpKS0t5/fo1GzZsEMXk1tZWYmJimDt3rkjmPnDgAM7Ozn/9QFdU5/+j\nC6ADpAOz//y9+X+7v+nPn98DS/7V7WeAuf/Bc8v/5/I/l/+5/B+/pP2VY/0v0ZwlEokqcAu4JJfL\nb/95c41EIhkgl8ulEolkAFD75+2VgPW/erjVn7f9Pw4HBwe2bNnCnTt38PLywsXFBSsrK9ra2oiM\njGTTpk1kZmbi5eVFbGwsFhYWgmXX1tbGmDFjsLOz4+eff8bW1ha5XM7u3bv56aefSEtLY8iQIRga\nGiKVSiktLWXNmjXC/nrw4EFCQkKwsbGhrKwMY2NjRo4cSUREBLq6ugIGq9jaDBkyhPDwcMzMzCgv\nL+fChQsiHVnR5nvx4gX37t1jxYoVjBs3jv7+/n8TQLtr1y7mzZtHVlYWFy5c4L333mPu3Ln8/PPP\n1NTUCJhKW1sb586dw8nJiebmZvT09NDR0aG2tlacfdva2ujv7yc4OJjU1FSB4TI3NycxMZEVK1Zw\n7NgxxowZQ0ZGBuvXr0cqlVJQUEBubi5Lly7Fx8eH4OBgdu/eza1bt4RvYtOmTcJxqQhzOXLkCFpa\nWkIINmPGDCExnzlzJp2dnZSWloolsoWFBW/fvsXAwIAdO3aQn5/Pxo0bmTJlCi9fvqS0tFT0/L/4\n4gs2bNiAtrY2EydOJDw8nBkzZqCsrMzVq1dFQTcuLg6ZTIaRkRGXL19m2rRpTJ48mZ9//hk3NzfW\nrFnDqVOnMDExobi4mH/5l3/h+vXrJCQk4O7ujoGBAfHx8QKe4+LiwsyZM9m9ezdOTk6MGTOGn376\niaKiImbOnCkgtaWlpTQ3N3PlyhVCQkLYvHkzMpmMsrIyvv/+e6FjGDhwIKWlpWzfvl2kiR05coTs\n7GxUVVVRV1cnISGB8vJy0V1wdnbGx8eHvLw8du7cyQ8//MDVq1eZNGmSoEUZGhpSXFxMc3MzDg4O\n5OXlMXXqVNra2jh58iQnTpwQjIfCwkJ0dXVJSEjA29tbeDH+yvgr3QcJ7872uXK5/F/+1V1hwPI/\nry8Hfv9Xty+USCTqEonEHnAGnv57r9He3o6uri52dnb4+/szZ84cYmJicHZ2JigoiIEDBxIREUF2\ndjZ79uwhJyeHgQMHYmdnh5eXl5C/enl54ebmJgp3bW1tTJw4EUdHR5qamqitrcXBwYFTp04Jf76H\nh4fI9uvv70dXVxcjIyN8fHxYvXo1ixYtEpmQCqLT4MGDhSAoICCAW7du4efnR1VVFS9fvsTPz48D\nBw4A7+oPvb29WFlZMW3aNA4dOsTBgwcpKSmhs7OTAQMGkJKSArzbPmhoaBAYGMizZ8+orq5m9erV\njB07luXLlzNv3jxcXV3ZvHkzq1ev5unTp5iZmfHRRx9x+/ZtPD09+eGHH0Qw6caNG2ltbWX48OFY\nWVkxZcoUrl27JuhKK1as4NSpU0IN+fr1a5YtW0Zvby8mJiZs27aNxsZGJBIJxsbGZGZmMmHCBDQ1\nNUlMTBRfTAU9e+/evbx48YL09HRSUlKYN28eampqNDY20tzczO7du3ny5AnLly9n4sSJuLu7c/Lk\nSUxMTMQkHxQU9G8KxA8ePODq1auMHz8eGxsbbty4gZWVFWvWrGH37t0cOHAADw8PDh48iJqaGv7+\n/uzbt09MrIoDoaKiAn9/fzIyMoiOjkZPT4+rV68ikUgoLi5m+/btdHZ24urqypo1azA1NcXc3Jxz\n587R2dnJ4cOHkUql9Pf3s3LlSior353nJk2aJKjdr1+/ZtiwYWRmZtLd3S3gtZMnT+b06dO0t7dz\n7NgxIRHv6ekhODiYJUuWoKamRlhYmAhtMTAw4IcffhBg4N7eXgYNGoRcLhcUck1NTQwMDJDL5fz2\n2288ePAAY2NjWltbBbJN0d7/r+YpjASWAkESieTFn5fJwEFggkQiKQDG//k7crn8FXAdyAGigI/k\ncnnfv/cCcrlc1AFcXV05deoUdnZ26Ovri8yF3377jefPn1NUVMTz589JTU1l1apVWFpaCjFOQ0MD\n+fn5jB8/HniXrqyhoUF0dDQGBgZCEj1r1ixevnwpyM0FBQXcuHGDgIAAEhMTqayspL+/n7S0NPLz\n85k1axYGBgbY2NgA0N3djZubm8DPd3R0MGbMGN68eYOjoyNhYWE4OTmxZMkSbG1tSUtLw8fHh1On\nTqGkpMT27dsJDg6mp6cHfX194ZLs6uqioqJCCJrGjRuHRCIhJiaG8vJybt26RWxsLHV1deTl5WFv\nby+owitWrKCmpoahQ4cKErBEIuHw4cPY2NiQlpZGT08POTk5Agl2//59cXZSUlLC1tZWxKJbWVlx\n69YtEXZbVlYmPBI+Pj4cOXIEQ0NDvL29Wbt2LXFxcWhra2NgYEBLSwvu7u5cuHBByJ8VRCtTU1MG\nDRrE5cuXcXJyIjMzk0GDBlFTUwO8g/gOGTIEqVSKm5sbW7duZc6cOeTm5pKVlYW3tzfd3d2cPHmS\nffv28ejRIwoLC5k7d66oZSici3p6esTHxwPvQKgKtWZAQAAWFhZ4eHjw888/U1paSnBwMF5eXty6\ndYugoCA6OztFZoIiGNfExARVVVXMzMzo6uoCICQkhKtXr9Le3s66detwd3dnw4YNdHR08N133+Hg\n4MD58+dpa2vjxYsX2Nvbc/z4ccLCwgRvs6SkhJaWFiZOnIixsTHwTlei0Fxoa2vz+vVr0tLS6Ojo\n4Pr16yQnJ6OhoUFYWBi+vr4UFxfj5OTE3r17hR5kyZIlHD58GGNjYzHp/pXxV7oPj+VyuUQul3vL\n5XLfPy+Rcrm8QS6Xj5PL5c5yuXy8XC5v/FeP+btcLneUy+Wucrn83n/0GooWz8qVK1FRUaG2tpZr\n165x/PhxGhsbiY2NZdeuXYSGhrJkyRKWLl1KTk4OkZGRPH/+HHd3d9zd3Wlubqajo0MEtt68eRNz\nc3Pc3NzQ1tZm+PDhSKVS4uLiePv2Lc+ePaOyshJ7e3uMjY25ffs2mpqa/OMf/+DChQtiyTdw4ECu\nX78uJLNnz55FQ0MDuVzO48eP2bNnj3C5WVhYMHnyZFasWMGrV68ERvzUqVPU1NTQ0tJCZWUlmzdv\npr6+Xrg/AbKyshg+fDj19fUsXryYiooKXr9+TXt7O3V1dTg7O7Ny5UpGjRqFo6OjKFJGRUXR1tZG\nXV2dgLRERUVx//599uzZI8xIWVlZHDp0iIKCAsaOHSuwXq9fvyY7O5va2lrc3d3ffTGUlNi5cyeD\nBw+msrISfX19nj17hkwmo7W1lRs3brBjxw4KCgrYvHkzp06dIj09nRs3brBt2zbGjh0rXsvDw4O/\n/e1vPHr0iLCwMGJjY6msrKSmpgYXFxeys7OFxTk6Opq7d+/S0tLCxYsXefToEadOnRLioPr6ejQ1\nNenp6aG1tRUnJyf6+/t5+/YtMTExDB48mNWrVxMQECBaygAFBQVERkbi7u6Oh4cH9vb25OXlsWfP\nHhwcHGhubqa0tJS6ujq8vb0xNTUlPz+f2tpa5s2bJ7I0bGxsmD59uuhwGRoaYmhoSEFBAXv37uX0\n6dO8fv2aBQsWYGJiwpQpU/Dx8SEhIYHExEQMDAwYNmwYOTk5eHp6kpSURElJiaB3K4A7Cot1XV0d\nZmZm7Ny5ExcXF65du0ZTUxMjR47k7du39PX18erVK1paWrh8+TJr165l3bp1nD9/HrlcTlJSEpmZ\nmf+plcI/hSHq8OHDXzo6OlJcXIyNjQ1v377FyckJBwcHkbcolUrp6OgQ+O3hw4dja2tLTU2NkLsW\nFhYKmu2DBw9Ys2YN8fHxgnM3evRoKioqGDJkCM7OzlhZWQktvCJQdNq0adTV1TFp0iS8vLxobGxk\n3759ODo6oq2tTWpqKkuWLKG1tVUYoj755BP8/f3R0tIiIyODUaNGkZeXJ7iORkZGrF+/npMnT+Lr\n68ugQYOQyWQi6ryhoYF79+4xY8YMoQQ8cOCACHHZuXMnxcXFoguioB+Vl5czZ84cOjo6UFJSwtra\nWmjd5XI5y5YtQyKRUFFRQW9vL5aWlgwdOlTgzcrKyoQPYOjQodTW1go4iZKSElOmTEFJSYnc3FyB\nTVeEupw7d45Hjx7x1VdfMXv2bFJSUjh27BhaWlqiHVpXV4eamhpubm5ER0eLusvEiRMZMGAALS0t\neHl5kZ+fz8CBA7l06RKBgYHY29vj4uJCZGQkMplMQEojIyM5ePCgsIA7OTnh7e0tlKZSqZT79+9T\nVFREREQE8K7drQjCVSQ8379/n5EjRzJkyBA0NTWJjY3FwcEBTU1NPvjgA/r7+3FyciImJobZs2cT\nHh7O6NGjOXPmDHFxcfj5+REbG8vbt2+ZPn26cLyampoybNgwiouLBR2pvLycsrIy1q1bJwhaAwcO\n/DdIOiUlJRYuXChWKVeuXGHz5s20trZiaGgoEtOys7PR09Nj6dKlhIWF4ezsjFQqpbKyUnwXrl+/\nzoEDB8RKt729HVdXV9ra2khMTPzvY4jS1tbG0NAQfX19EhISqKmpwcHBgZcvXzJ79mxGjhwp4twP\nHTpERUUFbm5uHD16lKioKBwcHNDR0WHlypX4+/szcOBA4F2tYvny5dTW1tLS0sLLly/5/fff6e7u\n5vnz5zg4OJCYmMjdu3fJy8sjLS2Nb7/9VkA+1NTUUFVVJSQkhOfPnxMc/E6cqaqqSkBAADo6OgQH\nBwsrr5qaGjo6OiLh2NnZmdLSUgH1nD59OmPGjCE5OVnUQ7KysoQQxsjIiJqaGuLi4ti1axdqamoM\nHz6cP/74g7lz54oMhc8++4zExESsra0pLi5m2LBhXL9+nbKyMi5cuEBwcLDQxSsEYUuWLKG2tpbD\nhw+LYqYidbutrY2LFy8ikUiwtbXlww8/xNDQkDNnzrB//340NDRYsWKFIE+HhYUxb948Dh48yI4d\nO1BSUqKjo4Pjx4/T0dGBra0tWVlZnD9/nmXLlvH8+XPU1dUJDw/Hx8cHQ0NDodKLiIhAIpGIvANr\na2sMDQ25desWgYGBWFlZibrM1KlTuXjxIi4uLiKTQmHqUlC+ZTIZI0aMYPXq1dTU1AjsPcDvv/9O\ncHAwAwcOJCwsjIiICF69ekVHRweNjY2Ul5dz5coV0tLSuHbtGiNGjODevXu4uLiQnJyMpqYmjo6O\ntLW1ieft6+sjPz9frFQePHhAukUVAwAAIABJREFUV1cXCQkJJCUl8d577xEbG0tycjKGhoYcOHCA\nzZs3M3fuXHJzc/njjz+EDqKzs1OscpWVlXF0dBQp3Yr4gqCgILq6uoTjcsqUKbi6uvLixQtqampw\nc3Nj8+bNNDY2MmjQINTV1Zk4cSLV1dV/+Xj8p5gU6uvrefnyJXZ2dixYsAAbGxvhD4+MjCQ3N5eO\njg7mzp0rYtDy8vLw9PSko6ODiIgIrKys2L9/P/Hx8aLnbWdnh4GBAXv37mXw4MGcOXOGnp4eTp48\niba2Nr29vTg5OfHBBx+wYcMGoWz8+OOPqaurIykpCUdHR4yMjIRvAd7lJWzfvp2JEyfy3XffYWNj\ng4+PDx999BHW1tZ8//33aGhoUFNTQ3V1Ndra2sC7D1omk+Hs7ExKSgp5eXnEx8djYGAAvCuyGRsb\ni0gwa2trgoKCaGpq4uuvvyYjI4OUlBTMzMwoKSnByMgIdXV1tLW1CQoKQltbm9DQUM6fP88vv/xC\nfHw8FhYWaGpqYmNjg5OTEzo6OigrKzNx4kRcXV359ttvyczMxN3dneLiYr755hvy8vIoLS1l1qxZ\nzJo1C3V1dS5duoSBgQHNzc3MmjWL999/n0mTJtHZ2Qm8U/Y1NDRQVFREUFAQtbW1BAYGcuHCBVEJ\nNzQ0JCYmBk1NTR4+fIhMJhMAFUVNYcyYMZSWluLi4sK9e/eYNm0aaWlpqKmpkZGRQVNTE6GhoRQX\nF2Nubs6LFy9EN0Kh79+5cycNDQ1kZWUJopMC1lNbW0tZWRkjRozAzMxMFBfr6upISEjgyZMnIquh\nvb2dlJQUHj58iLOzM5aWlhgZGTF27FgxkSu8LYrnVHyGVVVV6OnpkZCQwJIlS8jMzOTRo0fs37+f\nPXv2UFlZKRKy6+vrhaNVQQ0rLCykpaWF169fC3v46tWr8fX1xd7eHm1tbUaMGMGbN2+Epic7O1vk\ngCjMfb29vVy+fJmFCxf+5ePxn2L7cObMmS8PHjzImTNnMDQ05OrVq8hkMhHjnZWVRWVlJSYmJnh4\neGBra0tFRQXPnz8XkNTVq1cLCW9xcTFPnjxhwoQJ6OrqYmFhgYGBARKJhI0bN9Lf34+Pj49QHj56\n9EjIYvv7+1FVVaWjo4O4uDhevnzJxIkTGT16NE1NTVy/fp3AwED+/ve/89lnnzFkyBBBzu3u7iY3\nN5cRI0YIXLqOjg6BgYFYW1tjZmZGU1MTJSUlVFRUMHz4cFEfUOC9NDU1WbBgAWvWrGHnzp1ERETg\n5+eHXC7HyMiI+vp6NDQ0kEqlpKenExAQwNGjR5k/fz7x8fEkJyfj6+uLpqYmenp6PH36FD8/P65d\nuyYoT4MGDeLkyZNiiTlixAiKi4tFvHtOTg52dnb09PTg7u5OQ0MDWlpaPHz4EDU1NcrLy9HR0RF5\nG+bm5ly7do1x48YxfPhwATopLCxkzJgx2NvbC2+CtrY29+/fx97entu3bzNgwADy8vJwdHTkwoUL\nrFy5Ek1NTYqLi9HV1eX69esYGRmRkZEhACWvXr1CQ0MDf39/LC0tKS8v5+XLl1RXVxMQEICvry8O\nDg7MmzeP+Ph4nj59yvLlyykuLsbKykowG9TV1Tl06BBOTk7Ckaurqys4oK6urowePZq+vj7OnDnD\nhg0buH37toiOf/36Nf7+/gQHB/PHH39QUFAgeAkqKioUFhbS1dUlQoE2bNhAcnIy3d3dwjErkUjo\n7+8nNjYWmUzGmDFjuHv3LtOmTUMqlSKRSKipqRFF0/T0dOLi4ggNDSU1NZXnz58Ln4jCF/T++++j\nq6vLoUOH2LVrF15eXty9e5cnT578pe3DP8WkcOLEiS8V8I379++zYMEC0tLSaGlp4ddff8XJyQkV\nFRWeP39OR0cHt2/fRltbW2RDDhkyhJ6eHj7++GPGjx9PUVER0dHR7Nq1i3v37rFu3Tqam5tpbW3F\nwsICLS0tEe/2j3/8g8LCQhFI8+TJE8aMGSMgIqNHjxa5k01NTcTHx3P27FmysrJYtGgRZWVloo4A\n77Ysb9++ZerUqfj5+REWFsbr16+RSCRkZ2fz9OlT6urqWLduHevXr0dLSwuJRMKFCxcYNmwYwcHB\n/OMf/+D06dP89ttvODg4MHjwYJSUlDA2NkYulwunaFNTE35+fiIJSwFmSU9Pp6SkBH19faqqqlBT\nUxM1Fx0dHbKysti1a5dw02VkZKCioiKAKnK5nMDAQOrq6tDW1qalpUW8f9bW1piamlJWVoalpSU3\nb97k6tWroo3a0tIiAldnz56NTCYThdTBgwejp6cn9CC+vr40Nzfz+eefs3r1at68eSNakqqqqly+\nfJn9+/cjlUrR19envLwcR0dHUlNTUVNTw9PTk/T0dKqrq3FxcWHHjh08ffqUiooKbt26RWFhId99\n9x0HDhwQ2Qw1NTUCK9/T04OVlRUNDQ3MmzcPZ2dnPD09UVJSIj4+nurqaqKjo3FychIMRD8/P5KS\nkujr66OwsFBEDJqamuLh4UFhYSGzZ8/G398fVVVVxo0bR2NjI+7u7mRkZAinq5aWFk1NTVRVVZGU\nlMScOXNEoK/CEHfu3Dnc3NxoaGggMTERDQ0NNm3axJw5c4RnxcTEhOjoaGQyGYcPH8bW1hZNTU3K\ny8t5+/Ytfn5+9PT0oKOjw82bN//71BR6enro6uoiNTUVDw8PdHV1Bab6xo0bWFtbM2rUKIKCgjAy\nMsLBwUFU7s3MzDA1NWX9+vX89NNPlJWVkZubCyD2lCUlJSQnJ2NhYUFTUxN1dXXcuXOHs2fPoqOj\ng6enJ8uXL8fU1JRdu3YRFxeHgYEBo0aNwt7eng0bNuDk5CTQXooUoO3bt3Pu3DkOHTrEN998w+nT\np5HJZGhqatLZ2cnf//53Dhw4gKqqKs3NzVRVVWFkZISnpyfR0dF89dVXlJWVcevWLeAdtPTixYu8\nePGCixcvMnPmTIqKisjPzxduuiFDhqCnp0dSUhLFxcWUlJQwcOBAHj16xKBBgygqKkJVVZVly5bh\n7e2NmZkZAQEBZGZm4uLiQldXl6h1qKmpicg+QMhiT506xatXr7h27ZrAtz19+pRJkyaho6ODoaEh\nH374IZcuXcLFxYWbN2/yyy+/sHXrVt68ecPvv//OgAED0NTUFF6Gx48fs23bNiQSCSEhIWRlZdHV\n1UVPT48QcAGiG6Otrc2AAQPw8PAgKChItK0VIFIPDw++//57Ro0ahY2NDVOmTOH8+fO4uLjQ0NDA\nuHHjGDVqlOAeKJiUhYWFIuRFT0+P1NRUdHR0+OSTT4iKiqKiokKATlpaWvj8889pbm4mKiqKzMxM\nfv75Z8aMGUNZWRmAONunpaVhbm6Oo6MjERERrF27lsLCQuG+lUgkBAUF0dLSQkVFBSUlJUydOpWW\nlhYOHTpEa2sr+vr64uTS3NzMb7/9RkhICHp6eixcuJD58+fz9ddfc/ToUYqKikT9TREU7OHhgY+P\nj5BD79mzh4aGBiIiIv5rW5L/XwxjY2PGjh1LSkoKmZmZWFhYcOnSJTo7O3nw4AEJCQlCG37p0iXg\nne9+0KBBvHz5kpMnT3Lr1i1CQ0PJy8sTX7BLly5hbm7OyZMnaW9vJy4uTngGRo4cyaJFiwgKChKz\nf1lZGc+fP8fT05PZs2dz584dmpqaOHjwICoqKsIlmZeXh56eHuPHj+fo0aOcOHECHx8fvvzyS/T0\n9NDT06O3t5fly5eTmJjIsGHD6O/v58qVK6II2tjYyIQJE0RlXXHb1KlTGTZsGDY2Nty5c4cNGzag\no6PDq1ev8PHx4fjx41RXVzNkyBC8vb1RUVERnEmAkSNH4unpycyZM/nhhx/YunUr9+/fx9jYmNmz\nZ7NhwwZBjH78+LGohyiq4gEBAULROXbsWMaMGUNeXh6TJk3Czc2NmTNnoqurS3FxMd3d3QLAumDB\nAoGe19PTQ1tbm4sXL6Krq0tfXx+amprMmDGDJ0+eUFlZSUNDAw0NDeTm5tLT08O6deuAdyEod+/e\n5dmzZ+zdu5dXr14RGRlJWloaysrKBAYGsnXrVioqKvDz8+PZs2eMGTNG1CgUduT58+fz+vVr0tPT\nAVi4cCG+vr5Mnz6dKVOmYGlpyYgRI/jmm28YPnw4mzdvRk9PT5xpR44ciUQi4ejRo+zfvx9XV1cx\nYRgbG3PmzBkA3nvvPQoKCoQexsbGBi0tLaZNm8bw4cPx9/fn119/xdXVFVNTUyoqKoQiNi8vjw8/\n/JCEhAQ2bNhAdHS00Cn4+fnR2NjI3bt3MTIyQkdHR6D8ysvLuX79Om5ubiLN++jRo5iZmZGUlMTx\n48d5/fq12G7PnDmT5OTkv3w8/lNsH44cOfKloaEhmpqaDB48GGtra6RSKW1tbaSnp+Pu7o62tjYW\nFhbMmjULe3t7gc9KSEigq6sLVVVVMjMzaW9v582bNzx79oxLly6Rk5NDfX09mzdvJjk5GR8fH+7d\nu0dVVRWFhYUYGhri6emJXC7HyckJLS0tNDQ0qKysFDLntrY2PDw8ePnyJVFRURgbG7NgwQLu3bsn\nIJ7Ozs5cvnyZFStWCNGTYrlaWVmJVCpl27Zt3LhxA5lMhqurK2VlZcTHxzN+/HguXryIvr4+zc3N\nZGdnExISwrp16wQnUKFfaG9vx9bWFplMhpeXF93d3cycOZPy8nK6urp49eqVCBWtqqri7du3WFlZ\nCY5kfHw8hoaGWFlZibj0zMxMfHx8GDhwIFlZWTg7O1NcXCww5wEBAXR0dGBoaMi9e/coKChAKpVi\naWkpci7y8vJE9JoiicnZ2VmgwoYMGYKqqioLFy4kPDyc0NBQXFxcuHr1KoaGhqSlpfHkyRPWr1+P\ng4MDUVFRpKenM3z4cJydnTE1NRWJzRUVFcIQNXz4cPLz81FTU8PY2Jj333+f1tZWmpubefbsGW/e\nvKGgoEBwFB49ekRNTQ12dnbU19dTXV2NmpoaCQkJqKqq8vjxY7q7u4VoaeXKlTg7O6OsrIy5uTnz\n589n2bJl2NjYEB0dzeDBg0U3p7a2lrq6OoyNjUURWxE/qAgqWrNmjaB8GxgY4OjoiIaGBn/88YcQ\nkcXFxTFlyhTxWQUFBVFfX4+VlRVv3rxBS0uL+fPn8+TJE+bPn4+joyMNDQ34+fmJiMFXr16RlpbG\n1q1bqa+vZ9WqVVy8ePG/T03hm2+++XL79u04OjpSUVEhMhz19fWRSCQsXLiQ5uZmysrKUFNTIzEx\nkYyMDHR0dLC3t8ff35/Q0FBevHghgmFTUlIICQmho6OD4OBgtm3bJmSz27dvFwU0hcQ4IyODyspK\namtrKSgoICkpiYKCAgIDA4VlOjc3l6SkJHbv3k1SUhKbN2/m/fffZ/HixVy5coVPPvmEkpISWltb\n0dbWJjo6GhMTE9zc3HB3d+fixYvs2bMHIyMjEXmn8MjHxcUxcuRI3NzcsLa2Ri6X8/TpU0pLS0lP\nTxc97/DwcDQ0NJg/fz7V1dUin1KRpbBt2zYePHjAqlWr6OrqoqSkhMDAQGJjY9HQ0ODUqVMC+9be\n3o5UKsXGxoaKigqKi4s5ceIEU6dOZeHChWLvr6qqKvQApqamwjk6e/ZsEUqjpKREY2MjjY2NBAcH\nk5KSgoqKCo2NjVhbW5Ofn4+3tzcvXrzg7du3VFRUUFlZSV9fH7t27eKTTz6hqamJNWvWkJCQwNq1\na2lra6O6uhoTExNevHiBp6cnJSUl7Nu3DwcHB+rq6rC3tyc5OVlg8588eUJpaSmlpaXMmTOHoqIi\nXr58ibu7O0ZGRri5uaGpqUlLSwvjx4/n+++/R1VVFXNzcyIiIoQ9u7a2lsbGRjo6OkTRLigoiN9/\n/x1/f386OztJSkrC0tJS1F78/Py4c+cOY8eOpampicTERGQyGcHBwYSEhCCVSnn+/DkPHjxg0aJF\ngtI0btw4vvvuO4YNGyZyS3bs2EFjY6MA2JiamhIWFoapqSmdnZ0C+ZednU1paakoyEulUkaNGkV7\neztGRkaEhIRgbGzMqFGjOHbs2H+fmoKSkhI6OjpcuHCBZcuW8ezZM1E3mDZtGnFxcYSHh+Pr64ut\nra2Y+QoKCujt7aWoqIjr16/T3Nwskprhnd69s7OT7OxsgfHu7+8XqVK9vb24ubkxaNAgOjo6CAwM\nxM/PT7SvWltbuX//Pg4ODrx48YKqqioA8vPz2bRpE+Hh4Xz11VfcvHmTZcuW8fvvv3PhwgWxTZg8\neTJLly6lrq6OQ4cOsXz5ctLT0zExMRFk47S0NP744w8AAgMD0dDQoLe3lwkTJlBQUCAOjKysLAID\nAxk8eDATJkwgMTGRFy9eoK+vT1BQEEpKSsybN49ff/2V/Px8SktLSU1NxdnZmY8++gh3d3eqqqrY\ntGkTEomEZcuWCex8eno6U6dOxd3dHRsbG5Gx2dXVhZWVFR4eHpw/f16kHpmbm6OsrMy1a9dITk7G\n2dlZ1FKWLFnC8ePH6e/vx9TUFF9fX4yMjMQZu6Ojg5UrV4rMw5qaGpSVlRk8eDAAmZmZon2pkGoX\nFBRw//596uvr6enp4YcffmDJkiVMnTqVV69eYWxsTGhoKHPmzMHa2prAwEB6enooKysThKSqqiqq\nqqrE6uW9994jJiaGrVu3is/Ky8uL69evU1NTQ3l5OXZ2dty4cYPKykquXLkCQF1dHf39/QKIs2LF\nCmbPns2KFStQVlYWk8Hjx49ZtGgRFhYWxMfHU1xcjImJCUlJSbi6umJubk5zczOLFi0SRWYfHx8h\npS8rK8PBwUEEIWtoaGBgYCA0ITY2Nmhra/Po0SPhE5oxYwa2trbU1tZiY2ODlZWVmNQVaty/dDz+\nFxzT/6+HtrY2UqmUb7/9lvj4eEG7vXPnDhkZGbx+/Zrdu3eLA97Y2Ji2tjZcXV0xNjbGx8eHsLAw\nqqqqaGhoYNCgQQBoaGjg6ekpFGR79+5l2bJlFBUVUVtbi6qqKr29vaKmkZyczMuXL0V4yNq1axk+\nfDgNDQ0kJCQIc015eTk7d+7kzJkzGBkZ4e7uLs5So0eP5tixY9jZ2XHu3Dn+/ve/Y2try5w5cxg4\ncCBSqZQTJ07Q39/Pzp07mTVrFlu2bAHe5Tpqa2tz584dvv/+e5KSkti+fTvwLrPw/PnzrF+/nqKi\nInx8fOju7ubChQvcuHEDCwsLsrOzaWlpwc7OjuzsbD744ANUVFSYP38+tra2vHnzRhh1rK2tqamp\nISoqCj8/P65fv86zZ8+EPLi3txd7e3tsbW0JCwtj4MCB1NTUUFtbS0dHB9nZ2fj6+oqaQVZWFtOm\nTSMyMpKAgAAMDQ1RUlIiOjpa7P/r6uqQSqVcu3aNefPm8eLFC44dO8abN29Yvvydty40NBR1dXVe\nvXqFnp4eDg4O4qCtrKykubmZ3Nxc0tLSiIiIYNOmTfT19ZGYmIizszPd3d0EBASQlZWFm5ubmGwC\nAgLQ1dUlNTUVc3NzwebQ09NDVVWVhw8f4uLiQk9PD2vWrMHKyoq3b99y5MgRkYRVWlqKl5cXRUVF\nwqtz8eJFsrOzCQsLY/To0YwdO5aioiLs7e1FgXHkyJE4OjqKrAdlZWUyMzMxMzOjuLhYTHavXr0S\noigNDQ00NTVpampCLpdz//591qxZI9rQpqamojjp5eVFeHg4v//+O83NzVhbW7NlyxZsbGzE9u0/\nM/4pJoXGxkbxgUkkEgwMDNDW1haThLGxMY8fPyY1NZXLly8zZ84c5HI5Z8+eJTk5mYcPH1JaWoqT\nkxNKSkriAzM1NRWPcXBwQCaTkZ+fj46ODmfPnhWOsi+++IL29nZ8fHwwNzdnzJgxZGdn8+LFC+zs\n7IRWQZEjMWjQIBYtWiQq1ioqKnz//ffs3buXuXPn8uWXXxIREcGyZcswMzPj5MmTtLW1cfv2bQID\nAykqKqK7u5ubN2+KJSK8C5mJjY1l0aJF6OrqcvXqVfLy8sRqyc/Pj127dglZbXd3N8rKylhbW1Nd\nXY2vry/KyspoamoyZMgQbt++zYsXL1BXVxcp2d3d3aipqWFpaUlVVRVbtmwRhOMJEyago6ODrq4u\ncrmchw8fYmhoiJGREWVlZTQ3N1NXV0dISIhwnpqZmZGWlsbs2bNFotSkSZMIDAwkJiYGDQ0NIiIi\nRKZCTk4OHh4eBAQE4O/vzzfffMPRo0fFKuzq1avo6+tTX1/PnTt3ePr0KV1dXfj6+qKtrc306dOJ\njo4WkNXPPvuM4OBgZDIZ69evx93dnVmzZuHr60tBQQF1dXXAu07Cq1evBLUpLS2N8vJySktLRc1E\nXV2d8vJyrl27xvjx45k8eTK3bt1CXV2dy5cvC5fq+++/LwJ8pkyZwqpVqxg8eDBbt25l1KhRBAcH\ns2DBAh4+fIiNjQ01NTXcuXOHnJwcYmJisLS0pLKykrq6On799Vf8/PyorKxEQ0ODH3/8EXh3oqyt\nrWXx4sV0dXWJOpe3tze1tbU0NTXx8OFD6urqCA4OJj8/n6lTp4qEsd27d4tJ5enTp/+pMJh/iknB\nwMCA2NhYUUVXLM0UlKGysjI0NTVxcnJiwoQJtLS0cO7cOYyMjJgyZQr19fXCZi2VSoVCUOGZmDBh\nAjU1NTx48IBJkyaRk5PDrl27GDt2LAMGDMDf35+oqCgR+a6np4ehoSHjx49nz549WFtb4+npyf79\n+4F3IR/wrvKsYCXU1NTw9u1bfvnlF7766ivs7OzIy8sTB25DQwNLly4lISGBOXPmEBUVhaOjIx0d\nHeJ9+Pzzz3FxcWHo0KFoamqya9cuqqurKSgowMLCgjFjxvDpp58SHx/PqVOnUFdXx8bGhqamJpqb\nm4UwCN6BP01NTVFRUUFTU1NUsBX5BB4eHri4uPD06VNsbGxITEwkIiKC4OBgrK2tsbOzw8nJicjI\nSKysrPD392f06NGYmZlx9+5dQT3+6quvGD9+PCNGjCA7Oxt7e3vu3btHREQE8+bNIzAwkMWLFyOV\nSmloaCA1NZX4+HgeP36Mo6MjLS0tTJ06VfzfGhoaxMfHY21tTX9/P+bm5ujq6gpXaF9fH3Z2dri5\nudHe3s78+fNpbGxk2LBhhIaGoqWlxfLlywVnUgGEVQimVq9eTV5eHuXl5Xh7ezNq1Ci6uroE2drD\nwwNfX19iY2NJT09HIpHg7e2Nh4cHgwYNor+/HyUlJcaNGwfAs2fP+O2330hJSSEoKIicnByMjY2R\nyWTY2toKJkJdXR2Ojo5YW1sL/J2Ojg5Hjx4VKVqfffaZOKtfv36drq4ufvrpJ7q7u1FVVaW9vZ3W\n1lbBLIV3hqyysjI++ugjwWX87LPPePz4MWfPnmXAgAEYGRkJA9dfGf8Uk0JdXR1Dhgyho6ODnTt3\niiVYUVERtra2TJgwgeHDh5OQkCB8DHPnzsXS0pLExESkUimrVq3i66+/xsnJCX9/f+AdHXnVqlVC\nGamnp8f27dvFkuz06dPU19fj5eXF4sWLWbt2Lb6+vqJlV1JSwrZt28ReTtE61NDQoKGhAXhnwlFE\n0127dg1LS0vxtwEBASxZsoR169YJMEx7ezstLS1s376dgIAAkpKShKciIiICFxcXzp8/T0tLC6tW\nrUJJSUkoHO/duydsvkFBQTg5OeHi4kJ9fT3Kysrs27ePv/3tb5iYmCCTyQgPD8fQ0FCwIhS5DDY2\nNnz++eciCk5LS4vOzk6ysrLQ0tIiNTWV+/fvo6amxurVq9HS0qKnp4e4uDhu3ryJiYkJhoaGZGZm\nMnnyZKqqqjhx4gRSqZQ7d+5gY2NDSUkJz58/RyKRcOXKFczMzEQg6ieffEJVVRUymYwFCxaQn58v\ngnZcXFxwdHTE1NQUHx8f1NXVuXv3Ll1dXaJoqJh8i4qKGDp0KKWlpezatQuZTMbRo0dxc3MTQq2N\nGzcCsGDBApSVlSktLcXY2Bh9fX1Onz7NsWPHqKioQFtbW3Qe0tLSBDJNsdVQ0LDPnj2LqqoqWVlZ\nwDsb/eDBg5k7dy7ffvst9vb2ovgbHR3NwoULiYqKYvv27QQFBWFgYEBkZCRPnz7F399fGP4yMjL4\n/PPPCQoKAmDAgAEkJCSI75yysjJ+fn6oqKhgZmYGvKuTnDt3jr6+Pp4+fUpBQQHXrl3DyMhIWNSl\nUimXL18W7fS/Mv4pug+nT5/+UtHuUbDmxo4di5GRkZDp5uXl0dfXR3FxMaNGjSIgIEDAIzZu3EhS\nUhKffvopZmZmbNy4kfLycn788UcaGhro6Oigq6sLDw8PAgMDxbK3v7+f58+fi6j04cOHc+DAARwd\nHTE3NxeegMGDB6Ouro5MJiMsLIz333+foqIimpubuXbtGj4+PqJoV1NTg46ODo2NjeLDOH/+PDo6\nOlRWVtLT0yMUcz4+PtTW1qKurs7Vq1dZvHgxBgYGNDY2CmdjfX09NTU1PHnyRLgzbW1tSUhIoKys\njCFDhuDh4UFubq4Airq7u+Pl5cW4cePw8vIiLCyMuro6enp6qKio4M2bN5w/f56cnBxUVVXp6uoS\nlXZF1PmSJUvEQa6jo4Ofnx9qamrY29vT3t5ORUUFO3bsoLOzk5ycHEJCQtDQ0GDz5s1kZmayY8cO\nlJWVKSgooL29HUtLS6HOa2trY8uWLezYsYPs7GxSU1OxsbEhKSmJcePGkZ+fL/rwb9++xc3NjcTE\nRJGF0NjYyNy5c+no6EAqlZKZmYmdnR3d3d189tlnYiWkKJY+evRITJbx8fFkZGTg6OjIwYMHUVJS\n4ujRo3h6etLT0yNqMgo5cnd3N5cuXUIikfDo0SOsra0ZMmQI5eXlxMTEMG7cOMrKyggPD2f+/Pl0\ndHTw22+/YWtry+LFi1FVVSU5OZmioiKioqJISUkRTlPFc7u7u/Po0SPWr1+PXC7nxx9/ZOXKlYSG\nhnL16lXGjh2Lv78/V65cwd7enoiICDG5K7aLnZ2dVFdXY21tzcCBA1mwYAGvX79GJpMJMVZMTMxf\n6j78JRzb/+nR0dGBr6+hSUL5AAAgAElEQVQvUVFRtLS04OnpKVJ7N23aRH9/P11dXZiYmODi4sLd\nu3eF6GXGjBkkJyczfvx4gT0fNWoUSUlJPHr0iKFDh/LkyRO0tbUZM2YMKioq6OnpERkZSUxMDEuX\nLqWnp4eAgAD279/PvHnzhCBnypQplJSUkJOTQ1tbG5aW7/izdXV1REVF0dvbyw8//EBnZycREREM\nGzaMhIQEmpqaGDZsGB0dHcLrXl1djZ+fH6WlpSxcuBAPDw/6+/uFPRbenQ2++OILjh07RkJCAs7O\nznz99ddMnz4dY2NjLl++jIWFBXZ2dowdO5bW1lYeP36Mrq4uCxcuRFVVVbgeS0pK8PLy4tKlS1RX\nVyOXywUroaqqSuQihoSE4OHhQUNDA/7+/lhbW/P69WshilHIfouLi3nz5o3Awo0cOZLTp0+LjsbZ\ns2eRy+Voa2vz5s0b8vPziYmJYefOncTGxlJSUoKBgQGpqalYW1sLxFtOTg7u7u7iTKZY1m/fvp2B\nAwcKAZWLiwu3b99mxowZIovSzs6Oqqoq+vv7CQgIoLu7m0OHDlFbWytUsQoobmFhIc7OzgQEBFBY\nWIijoyPr1q1DT0+PVatWYWNjg6GhIUVFRejr6wvn5cyZM6mvrxehwdbW1oSHhzNv3jwA0SlRAIEe\nPnzIoUOH6OnpIT4+HolEgra2NuPHj0cikWBpaYm6urroQCkIVkpKSty+fVvAgSdPnixWKk5OTpSW\nlqKpqSl8KHp6evT19VFUVMT58+f59NNPMTExISMjAyMjI4YPHy5EWgrH7l8d/xTbB4BHjx6RlpYm\n2pMHDhxg6NCh/PTTT/zxxx+Ul5eLpZPCS5+cnMy9e/dIT08XUAkbGxtRUwgICMDGxoY1a9awYcMG\nCgoK+OCDD+jq6iIkJIRVq1Yxffp03rx5Q01NDXPmzOHJkyfk5uYSGhrKlStXMDY25uXLlxgaGhIV\nFQXAhAkTBFX41KlTmJqaEhgYSFRUFMePH0dHR0ekWmloaODi4iKW3Js2baKmpgYlJSVh2lL8v9ra\n2pw9e5Y7d+4wefJkzp8/z5YtW9DQ0GDAgAEsW7aMnJwcDAwM6Ozs5NatW/T09PDBBx8QFxdHWloa\nSUlJxMXFicRkFRUVvLy8mDBhgoh1V1FRob+/X1Ttjxw5gkwmIz09nW3btlFaWoq7uzvPnz/H0dGR\nqqoqysrKKCsrw9fXl4MHD2JnZ8eAAQPo6ekhPDyc27dvM2XKFHp6ekQgjIGBAfPmzaOnpweZTIaa\nmpr4ooaFhfHTTz9hYmKCioqKeA8cHR3JzMzk+PHj/PTTT2zatAlvb2/09fV57733UFVVJS4uDiUl\nJSoqKsjLyxNq07/97W/Y2trywQcfiDqVIk/i1KlT3Llzh97eXmxsbEhJSSEgIAA3NzdmzJhBdHQ0\n+vr6+Pn5Cfv5/PnzRUu2u7sbHR0dcnJysLCwELUKZWVlgesPDw9HS0sLGxsbQQNbu3at8PJERkZi\nY2PDqFGj6O/vJzs7mxMnTogOjqurq5DS5+bmEhUVhYeHB+Xl5Tx69IisrCy6u7tFUffmzZs0NDTw\n5ZdfEhkZSVlZmVi5FBQUoKamxtKlS/H29sbIyOgvH4sSxdLi/+aQSCT/9/+J/xn/M/7/P9LlcnnA\nf/RH/xTbB1dXV+bPn4+JiQmZmZloaGhQUFCAi4sLjY2N6OjokJKSwpQpUwRvsLOzk6amJlxcXJgy\nZYoI7jQ0NMTJyYmZM2eydOlSsQ9WzMYAPj4+wsZrYWGBqakptbW1pKSksGDBAmJjYzE0NGTt2rXE\nxMQIMOagQYP4+eef+frrr9HX10dTUxO5XE53dzfGxsaC0VBYWMj/Yu89o6o88/7fz6b33kGQ3lSa\noDRBRRQLFmxRYy/RZDTFFCfREHU0MW1MfBJb1MTEgthFrEiRIiC9SJPee2/CPi8crvPMf501T85a\n58XMOs/9yrVx781i7/u+r+v3LZ+RkRESExMJDw/n7NmzAsm+evVqqqqqiIuLEx79Fy9eiNj20qVL\ncXFxoa2tjdbWVlxcXNDS0qK8vBxPT09Brba3t0dDQ4OioiLq6uowMzNj7ty5/PHHH+zatYvy8nKs\nra1pamqiurpaWLjHlvj5+fl4eXmhp6dHbm4uNjY2XLx4kTVr1lBRUUF0dDTa2tp4eHjw+PFjVq9e\nzdmzZ8WW49y5c5iYmHDv3j08PDxob2/H2tqasrIynJ2dBVDlxIkTNDQ0sGLFCiIjI5k2bRo+Pj7E\nxsZy6dIldHR0WLZsGQ8ePODrr79m7dq1InkYGxvL1KlTsbGxQVFRUbg/fXx82Lt3L8XFxWJ74ubm\nJhSdMb/C/Pnz+fHHH/n22295/Pgxd+/eRVdXl4kTJ3Lq1CnGjx8vVCYTExM+//xzQkNDycnJoa2t\njdDQUMH07OrqoqSkBD8/P7y8vDhy5AgnT54Ubc1dXV1MnjyZ0dFRLly4INqobGxs2Lx5Mxs2bCA4\nOJh79+5haGgouKVKSkp4e3szfvx45s2bx4EDB9i7dy/ff/+9gN1oa2uLZKivry8WFhYUFxejqKhI\nU1MTo6Oj5OTk8OrVKwYGBggMDKSmpoYvvviC999/n4aGBm7cuPGneZL/FoPGgwcPhru7u/Py5Uts\nbW2prq7G3d2dwMBAnJyc0NbWxsXFhdu3b9PV1YWsrCw6OjoUFRWxYsUKALEf1NLSwt7enjNnznDg\nwAEuXLgAvJb7xoxJmZmZuLi4iOVdR0cHERER/8Tc8/T0pKSkhMrKSmprawWBJyUlhYCAAAF1lZWV\nJTIyUvzOT58+ZcaMGQwMDGBjY0NiYiJNTU3o6+sLwpCampoo5ti8eTMyMjLk5eUxZcoUpFIpFhYW\nKCoqsmPHDtLS0vD29qazs5Nbt24RGxsrcGdj8e6qqipkZWVJSEhg06ZNREREMH36dJ48eYKCggJT\npkwRM42WlhYePXpEUFAQIyMjwrSTnp7OjBkzSEpKYmBggEmTJqGnp8e4ceOIjY1lZGQETU1NsrOz\n0dbWpqioCHl5eZydnenv72fLli14eXkhIyODnJycYHiO7YPr6uqYMGECDx48oLy8HCMjIxEeUlZW\nprGxkYSEBNasWcP69etxcHCguLgYeXl53N3diY+PJzU1VTAYzczMWLVqlYDqjEmks2fPprW1leHh\nYZKTk2lsbCQzM5PFixejoqKCvLw8Fy9e5NNPP2XDhg3cv3+fxMREqqqqWL58OSoqKpSVleHu7o6V\nlRWxsbGsXr2aoqIirK2t6ejoID8/n+XLl3PixAlGRkYwMzMjPT2dhoYGkpOTkZGREbi8V69esWfP\nHm7dukVtbS3jx4/H1dWV/v5++vr6cHBwQF5envHjx6OkpER+fj7FxcV88skn9PT0MG3aNK5du0ZT\nUxM2Njbo6uqyc+dO5OTk6O7upr+/n4qKCurr6/Hw8ODhw4eiW6Gvr0/M2fz9/bl48eJ/js1ZVlaW\nKVOmYG5ujoeHB62trYyOjhIdHU1oaKjoJ9y4cSPTpk1DRUUFMzMzpk+fTnFxsYiojjUDjRlLOjs7\nOX78OE5OTnz77bfIysqiq6uLm5sbjo6ODA0N8dNPPyEnJ0d4eDjW1tZcvHgRLS0tiouLOXbsGLNm\nzWL27NmMjIyI/WlZWRlVVVW0traio6PDgQMHCAwM5OHDh3h4eGBoaCjozZ6ensyePRsDAwNCQkLw\n9PREVlZWtELt27dPuO6SkpKorKxEIpEwPDzMo0ePqK2tFXZmHx8fzp07R3l5OdHR0UybNk1wNI2M\njLC2tkZBQUHsH//44w86Ojqora1FTU2NzMxM0tLSaG9vZ2hoiNu3bzNnzhwsLCyEr38s3vv777/T\n2NhIfX29CFwFBATg6OhIV1cX9+7dY9q0afT09LBhwwby8vKoqKigtrYWPz8/5s2bx5tvvkl5eTlS\nqVTk/g8ePMj169cxMDCgtLSUlJQUdHR0BNZsTImJiYlh2bJlhIaGit6AMa3++fPnqKmpCQhsQEAA\njx49Ek1O3t7e4oQek/gOHz4sVo1ycnLMmDGD7du3s2vXLg4dOsS0adP48ssvOXv2rLBxJyQk0N/f\nz6lTp0RuY3BwkOnTpwtJcvr06YICNWPGDOLj46msrOTly5fU1NRgamrKsWPH8PX1FU1IY4PvkJAQ\n3NzcyMjI4NixY6iqqhIWFgYgTGRjF6OtW7dy7949Hj16xKeffkpwcDALFy4Utvquri5CQ0NZtGgR\nMjIy9PX14e/vT3x8PHV1dRQVFf358/HfYaVw4sSJ8BUrVpCUlISjoyOzZ88WkNbc3Fw6OzsFg7G7\nu5t33nmHly9f8sYbb1BeXs7s2bM5f/48srKyom4rLS2NL774QgwOW1tbKSgoYPny5Vy9ehUZGRke\nPXokQBuKioqcOnWKBQsWkJ2dTX5+vrg7mpqaEhcXh5mZGdeuXRPqQWhoKMeOHcPa2pq0tDSGhoZo\nbGwkKiqKNWvWcOfOHbS1tUWfY15eHtnZ2Xh4eHDz5k2xhJwyZQpRUVEi5DQGeCkrKxN9DwUFBWhq\navLw4UPCwsJ4/vw5urq6ODg4oK2tjZWVFVpaWjx8+BBLS0uysrLQ1tamr69PyIaZmZksX75cJCYn\nT55MR0cHNTU1XLhwAQsLC6qrq3FyckJBQYHx48fz3XffERoairKyMteuXcPNzY329naBYDMxMRFD\nOmNjY+bMmcMvv/yCmZkZCQkJNDQ0CF3d2dlZFORIJBKqq6tFd6SDgwOXLl3C1NRUrO5mzJjB0aNH\nCQ4ORl9fn5aWFrq7uzEwMODVq1c8fvyYrq4uIcna2dmRnZ2NtbU1ixcvZt68eRgbG3P16lUOHjxI\nZ2cn/f39vPPOOwIVWF5eTlZWFq6uriQnJzN58mRqa2sJCAhATU0NAwMDfvvtN9ra2nB1dUVRUREv\nLy8SEhJEe3R4eDgZGRkMDQ0RGBjIli1bKCgooL29ndHRUTZt2iRq2hwcHISUfvXqVaysrEhLSwNe\nexNKSkpIS0tjwoQJlJSUYGpqKsBD9fX1olZgzKi0ZMkSli9fzqRJk7hw4YKIyXt5edHf34+CggIx\nMTHY29tz/fr1/5yU5Oeffx4+1nGnra1NXFwcioqKWFhYoK6uLiy9M2fOFJ0KY03O2traPH78mMmT\nJ4sMg1Qq5cGDBxgbG9PY2Mhf/vIX0tLSePfdd8UXbuXKlWLPraKiQlNTE/n5+cjKytLW1sb27duZ\nPn065eXlIk789OlTsrOzWbduHZaWlpSXl+Pr6yum9f39/cydO5fu7m6SkpJYvny56J+sq6ujuLgY\nW1tbnJ2duXfvHgsXLgTA1taWiIiIf4KXZGZmYmxsjIyMDFFRUQwNDTFt2jSSk5Pp7+/H3t4eBQUF\nRkZG8PX1pbi4mPj4eCZOnIimpia+vr4YGRmRlZXFu+++i46ODurq6qJC7cSJE0RERKChoUFnZ6dY\nWj969Ag5OTmcnJxQUlJi9erV9PX1kZWVxcSJE5k4cSK1tbUsWLCAtLQ0fv31V/z9/QkKCuLUqVOi\nNm+sQ3BkZAQ1NTVWrVpFb28vkydPRkZGhtLSUl6+fMmWLVvIyMggPz+fp0+fio5GT09PrK2tGRoa\nEvRpZWVlkSnZtGkTvr6+/PHHH6ioqKCjo0NdXR0LFy7ku+++Y8+ePUgkEiwtLTl37hy2trbMnDkT\nXV1diouLBUrAy8sLU1NTvvjiC5FdePHiBe7u7iJLU1lZyf79+3n48KHwR4zBVoKDg4HX4TtDQ0Me\nP35MRkaGcBpaWFhgampKSUkJN27cwMzMTNCgZWRkSEpKwtPTE2NjY+E+vHv3LuvWrRMhs9OnT2Ng\nYEBYWBhTpkxh06ZNpKSk0NjYSHNzM3fu3EFeXh49PT0mTZpEdXU1xcXFuLm54ebmhomJCffv3yct\nLe0/56Jw7Nix8LGuv7t37zJz5kwePnyIiYkJLS0tVFVVISMjg42NDZ2dnbS2tpKVlcXw8DC9vb30\n9fUJUGl0dDRz5szh/Pnz/PWvf0VGRkZ0IKioqDBu3Djmzp0rcODGxsbo6upibm7O+PHjkUqlaGpq\nIiMjwwcffIC8vDz19fV0dXUxPDxMbm4u/f39WFhYiKVodXU1KioqVFZWMjg4KNqpb9++zatXr7C2\ntiY5OVmk+kpLSxkeHubSpUvMnDkTGxsbTp8+zV/+8heKiopwdHRk3LhxXLt2jU8++QQdHR3i4+N5\n6623MDMzExKlv78/8vLyFBYWIpVK8fDwoKKigoKCAqqqqrC1tWX9+vUMDQ0xPDxMWloampqaPHny\nhJ07d1JXV8eKFSvQ1NRER0dHwH1lZWXp7u5GQUEBgPj4eHx9fSkvLxfDz4KCArKzswVRqrW1lZ6e\nHgGN7e3txdDQkJKSEtFfWVxczOXLl/Hw8EBfX58dO3Zw//59qqqqcHR05Pr16/z2229YWVkxMDAg\nfAm6urrk5eUxefJk0tPTUVZWFnfCsa3HGP3rp59+wsfHh5iYGFGAGxcXx9atW9HR0UFBQQFXV1ee\nPXtGc3Mzly5dErOsMfDKnTt30NXVpbe3l9bWVrF/9/HxQUlJiQMHDmBpacmDBw/47LPPxACzra2N\nrKwslJWVyc7O5vTp06SlpWFubs79+/dpa2tDR0cHVVVVbt26xdSpUykvL2fHjh0oKyuTlZUlYEcb\nN278J+PSrFmzOHPmDPn5+VhbW5OZmUlhYaHYeqqrq2NqasrPP/+MsbExw8PD6Ovrc/z4cQwMDCgr\nK+P58+f/OTOFoaEh7OzscHV1paamhitXrogKtf7+fgIDAzEyMuLZs2c8f/4cS0tL3nnnHQwMDKir\nqxNR4p6eHlpaWkQIZozTMHnyZIKCgrh27RqxsbFERUXh5+eHmpoaCQkJ6OnpcePGDYyMjHB0dMTY\n2JjExEQWL16Mv78/K1euRElJicmTX6s5CxcuFCf8+fPncXR0FJHa5uZmvL29efnyJVZWVsBr2o+y\nsjJ37txBVVWV2tpaHBwc8PX1xcXFRZS2Tpo0CRsbG9atW0dHRwcLFy4kPj4eJSUl3n//faqrqxke\nHhbYt4KCAoE8d3Bw4MWLF9ja2vLll1/S09PDvn37+Oijj0RRzL59++jo6BDKyfr160lISKCjo4Oh\noSEGBwcJDQ1lypQptLa28vTpU3Jycpg/fz4mJibk5uYKJubVq1fFZ9PU1ERNTQ0dHR3o6+vj4OAg\n7siOjo6EhobS0tLCpEmTmDt3Lrt37yYwMJD4+HhSUlJYuXKlaOA+efKkKDn56KOPiIiIIC4uDuCf\n7n5Xr17l5MmTwm79+eefCw9BcHAwkydPJisrS3RhVFZWkpaWRm5uLl9//TXLly8XvpXAwEAOHjzI\n1q1buXPnDhs2bBBuyTGfx5gzMzw8nG3btpGa+pqEeO3aNerr63F2diYoKIg33nhDFKe+9dZbGBsb\nk5eXx7hx4/Dw8ODZs2eUlZUJoMwnn3zCX//6V0ZGRoiLixPNXO3t7ZiamvLdd9+hpqbGzZs3Wbt2\nLbNnz+aHH36gurqaRYsWER8fj6amJp2dnejo6LB161YsLS1Zu3Yt1tbWzJ8/X5id/uzxb7FS+PXX\nX8Pt7OxoaGhgcHBQLDuDg4N5/vw55ubm1NXViZh0XV0dUqkUY2NjcRVfvXo1Dx8+ZN68eaSmppKa\nmoqOjg6fffYZt27dIiEhAScnJz799FNycnJQUlKip6cHc3Nzbt68ia+vL0lJSaSlpZGSkoKMjAxh\nYWGiJMXc3JwPP/yQ7u5uUc1dWlqKnp6eGCD5+/ujoKDAtWvXCAoKorGxkenTp9PS0sLWrVuJiIjA\n1NSUhoYGenp6sLS05NGjR7z33nscP34cIyMjpFIpy5Yt4+XLl2Kb8Ouvv7Jq1SoaGhq4c+cOLi4u\nDA8PExERwfjx45kwYQIdHR2oqKiQmprKyMgI3d3dzJw5U6xgHj58SFJSkhgKGhgY8ODBA4Gb19PT\nQ1FRkezsbLS0tEhKSmLixIloaWlx8+ZNgZXz9PTkwYMH+Pn5ISsrS2Jiojh5NmzYQEZGBrKysvT1\n9QmEup2dHTdv3sTBwYGkpCQuXrxIbm6uKHhNS0tDTU2NS5cusW7dOkZHR5FIJLx48QJDQ0MuXrzI\ntm3buH37tui6CAwMFJVm1dXVFBUV0dbWRnFxMf39/Tx69AgvLy/MzMy4deuWSNyOldXo6+vT3d1N\nW1sbgYGBYqtRU1MjDGvW1tZiRbdt2zbi4+N55513OHnyJJs3b+bcuXNs2rSJoqIiJBIJMTExvHr1\nijNnzqChoYGtrS3y8vLo6Ohw5coV3nnnHZKSkvDx8RFS882bN1m3bp0A4gwODpKQkICjoyNz585F\nVVUVMzMzEc66cOEC4eHhdHZ2CrftZ599xsmTJ/H29sbExARlZWV6e3uF9T0zM5Nx48Zx//79/5zt\nw/Hjx8PV1dXx9PQUBRXy8vIMDAxQWVlJW1sbtra2eHp6YmJiIkpQa2trWbFiBVlZWejq6lJWVkZx\ncTGBgYFcu3ZNyJAyMjI4OTmRnp7O/fv38fPzY2RkhICAAG7dusXnn3+Ouro61dXVhISEYGhoiKen\nJ+rq6nR0dJCVlUVrayu2trbEx8cLHkBISAgSiQSJRIKJiQnm5uZkZGQwb948CgoKUFVVRUlJiWvX\nrpGamsqCBQuwsrKivLycgYEB7OzsMDU1paKigtu3b+Pm5kZycjLd3d2CeXjlyhU+/vhjIiIiUFBQ\nQEVFhejoaIaHh5k5cyb3798XNtunT58yceJE6urq8PPzw8rKCgsLCywsLFixYgUGBgaYmZnR3NzM\n5MmTxWv5+/tz69Yt+vr6MDMzw8HBgfT0dJYtW0ZSUhItLS0iGNTX14e3t7cocDU3N2fhwoUMDQ1x\n584dTExMSE9Px8bGBj09Pc6dO8eCBQuE3Jueno6vry/Lli3D0tKSwcFBkSC8cOECdnZ2dHV1YWho\nSHNzM+3t7RgZGeHq6kpsbCwODg5Cr798+TL9/f2Ympri5+dHWVkZ3t7egkcRHBzMN998Q3l5OYsX\nL0ZDQ4OlS5cSEBDA4OCgWImOKSZjFuyysjKWLFki7PReXl4YGxtTV1eHm5ubaH/ev38/bW1touRm\nxowZZGdnk5aWRnl5OUuXLuXVq1f09fWxdOlSEhMTBXJwjCM51g+SlJQkUAAXLlzg0KFDJCQkiDar\ngIAA7t27J4aXqqqqTJ48menTp9PQ0CCyD+3t7ejq6pKSkoKGhgYpKSlYWlrS39/PkydP/nMuCl9+\n+WX43r17SU1NxdXVle3btzMwMMDw8DA2Nja8//77mJmZERkZKdh+kZGR4q6hr69PRkYGISEh5Ofn\n8+LFC3Jzc4Uxx9fXV7T2KigokJ2dLSrfcnNzcXZ2JjU1lbq6Otzd3Tl58iR9fX1UVFQQFBTEwMAA\npqamqKqqcu3aNb755hs6Ozuxt7enqakJS0tLMSBNTU3F2dmZq1evivlEaGgoEomE1NRUIiMjCQ0N\nRUZGBltbW65fv46+vj737t1j7dq1uLm5iUHUWPAlOzubt99+WzAElixZQmFhIc3Nzbi6upKfn8+r\nV6+or6+nsrKSkpIStLS0yMjIoLW1leLiYoaGhmhvbyc7Oxtzc3MuXLiAvLy8KFZJSkpCUVGRFy9e\noKysTGBgILKyskLFKC4uxtXVldu3b6Orq8uZM2cE+qyoqEgg05KSkgRxOicnBwUFBfF+v/32G59/\n/jmFhYV4eHiQkZHB1KlT8ff35/Lly6SlpfHDDz+I2vyZM2fy6tUrUR03xnQ0NTXlt99+Y2BgACMj\nI3FR1NPTE63g+vr65Ofn4+npye3bt7GysmJ0dJTR0VFOnjzJ8+fPGRkZwcTEhG3bthEZGUldXR0u\nLi6igGbMviwnJ4dEIqGyspKIiAhWrVrF4cOHSUtLIyIiAmVlZSoqKsjPz2f9+vUYGBjwzjvvCFm1\npaWFtrY2ZGVl2blzJwcOHGDx4sVERkayZs0arl27Jm4oOTk5ZGRksG/fPuLj47G2tiYgIIDa2loR\nnQ4ODubVq1e0t7eL/z8WnNPS0qK0tJSpU6ciJycnqtmMjY3/v6t4l0gk4yQSyROJRFIgkUjyJRLJ\nrn88Hi6RSGr/DxL12HP2SCSSUolEUiSRSGb/T+8hKyvLwMAAqampNDU1ceHCBaqqqigqKsLU1JSC\nggIuXLiAh4cHU6ZM4eDBgyxduhRFRUUkEgmDg4MizhoQEEBPTw8Aenp6DA0N0dXVRV9fH3Z2dhgZ\nGfH+++8TExNDTEwMO3bsIDU1ldHRUWxtbfnhhx9wcnJi7ty5oiU3IyNDSKOAwNqN3UETExP59ddf\nkZeXF0vOtrY2Mfgac2c6ODhgaGhIdXU1VlZW3LhxQ0TBARwcHGhvb0deXl4EYHJzczE1NeXHH3+k\nrKyMrKwsTp8+jYqKCgMDA4LIlJGRwfTp05k0aRLLly/Hw8MDa2trfHx8mDJlCqWlpZSVlTFt2jQ0\nNTV5//33BYbsyZMnAoc+Z84cwZSMiIjAzc0NFxcXnJ2dKS0t5ZNPPmHq1KksWLCAHTt2MHv2bP74\n4w9RcFNYWEh4eDjl5eXCmFRVVYW1tTVLly6lqKiIZ8+eoaWlxeDgIBoaGtTX19PS0gK83vubm5sT\nHBxMXFwcM2bMEIyDMbxcXV0dNTU1zJ8/HxcXFzw8PLh9+7ZwCnZ2duLi4sLTp09FpsLMzIwVK1ZQ\nX1/PnDlzCA4OZs+ePdjb2/Py5UsAtm7dSm9vL3v37qWuro5nz54RHh6OlpYWmpqaxMXF4eHhQXZ2\ntghwhYeHc+vWLRQVFXF3d+fx48cYGBhQUVHB6tWrSUtL48qVK9TU1BAfH09aWhqLFi1CIpFgbW1N\nS0sLBw4cQEFBQfBTAbq6uti1a5do2RrrwRxTeAYHB9HW1iYqKgpbW1vc3NwEuEZXV5cbN27g6upK\nWloaUqlUVNL/mTIZwbUAACAASURBVOPPDBpfAR9IpVInYCrwtkQicfrHz77/7yRqgH/8bCXgDMwB\nfpJIJLL/6g1GR0fF4PDcuXOEhYUxMjKCs7Mz5eXlqKqqMm/ePDo6OsjJyUFeXp729na6u7sxNDTE\nyMhI7EPT09OZM2cOAObm5uzZs0cMHi9duoS+vr5IlYWFhQl788KFC1FQUCAoKIglS5bQ1tZGTk4O\nvr6+6Ovr8/PPPwu02dj+W1NTk8ePH1NaWsq8efNobW1l0qRJnD17ljlz5iArK4uVlRVKSkr09fXh\n5eVFQECA+OKtXLmS4OBgkYyD11p+bGws6enpaGtrC+PP2IXPzMyMsLAwEhISaG1tpaioCF1dXT7+\n+GOGhoZ4/PgxL1++JDo6GhUVFRGf9fb2xt/fn+TkZCorK2loaCAsLAxjY2PefPNNxo8fT319vXDV\naWtrY2lpSVdXFx4eHjg4OGBlZUVKSgpdXV2Ym5vz/fffExMTw6ZNmzh58iSpqans3r2bp0+foqKi\nQmBgoOhCbG9vp7W1VVieAwICOHHihACrjDlJx+ZFiYmJ9Pf38/DhQ54/fy5i6UNDQ/T19TFu3Die\nPHnC2bNnWbBgAVu2bMHc3BwdHR00NDSorKxkwYIFwsg1e/Zs0tPThachJCSER48e0dXVxaFDh1i5\nciX79u1DTU2NvXv30tnZiYGBARs2bKClpQUNDQ1mz54tWBpjPQdvvfUWdXV1jB8/nm+//Zauri4m\nTJhAdXU1OTk5AkhcUFBASEgIV69eRUNDg/z8fNatW8fdu3c5dOgQpqamlJeXi6r7gwcPcubMGXx8\nfNi2bRvm5uYiQJWUlERGRgalpaUcPXpUKD+2trakp6ejq6uLiYmJaOcegy7/2ePPoOjrpVJpxj/+\n3Q0UAqb/4ikLgUtSqXRQKpWWA6WA1796j56eHlxdXYHXE3ipVMrHH3/MuHHjhPSSkpJCUVGRcAKO\nWUZVVVW5cOGC2HOqqqqKk9fU1JSOjg4ePnxIeXk5GzZs4NKlS7i7u5OQkEBRURHKysrIycnx17/+\nFR0dHfr7+9HW1iY3NxcvLy/OnTuHp6cn27dvp6mpCXgdw1VQUKCpqYm5c+dy7tw5zMzMBHk4NzcX\nqVSKiYkJra2tDA0NUVlZKXBszc3NBAQEkJaWhr6+vohOd3Z2Ii8vj42NDdu3b0cqlRISEsKxY8c4\nd+4cly9fFsvQL7/8EjMzMyZOnEhxcTGRkZFs2bKFRYsWERAQwMSJE2lqahL9gmPRaRsbG4yNjeno\n6KC6upr169fT0dHB1atX6e3tZWBggLCwMPLz85kwYQIAO3bsECUu+vr6XL9+neTkZJycnEQLdGVl\nJaOjoxQWFjJr1iwyMjJEAc7y5csZGRlh9erVDAwMUFNTQ1NTE1FRUdTX15OQkIBEIgEQCoKNjY1o\noDI0NCQlJUWUoAwNDbFp0yY+/fRTnJyc2LVrl4in9/X14efnJ9yA//Vf/wVATk6O8DKMSZwpKSko\nKSlhYmLC3//+d3bv3s3g4CADAwMEBQUxb948Kioq0NHRITc3l+vXrzNu3Djq6uoICgoSfxuJRCK6\nEZqbmzE2NkZNTQ0vLy+mTZtGUlISmpqaFBQUsH79eoKDg2ltbeX777/Hx8eHvr4+3nvvPYKCgkhM\nTATggw8+QE5Ojhs3boh6+TH5d+PGjfT399PU1EROTg4TJ04kKyuLZcuW4ejoiEQiwdPTk6tXr+Lm\n5iaUuT97/L+SJCUSyXjADXj2j4f+IpFIciQSyRmJRKL9j8dMger/9rQa/vVFBAUFBbZt24anpycf\nffQR7e3t7N27l4GBATZt2kR0dLToNTx9+rSg//T29pKZmSlkKRUVFTw9PbG3txevm5KSwgcffMCa\nNWu4evUqBw4cID4+nu7ubvLz87lx4wb+/v64ubmhpKSErKws9+/fp6OjAzU1NXbv3k1iYiJTp04V\nPXdycnLo6uqiqKhIW1sbW7duJSsrSwBcbGxsmDFjBpqamuzatYv6+npsbGyA1+Qfe3t7urq6aG1t\npbS0FE1NTeC1Yen27dv09fUxNDSEVColJiaGv/3tbzg5OREcHExhYSEXLlxg1qxZDA0NMTo6Kspl\nTp06hZWVFZcvXyYmJoa2tjYaGhrE1H5wcJD29naysrJE59+HH37I4OAgRkZGbNmyhZSUFMGUqK+v\nx8DAgNDQUBISEti4caPoc1y7di319fWYmZmhp6eHlZUVQ0NDbN++HRkZGYFkP3v2LFlZWejp6Yme\nRGVlZd5++21KS0vZsmULmzdv5uOPPwZeU7Ls7OyIjY2lsrKSmJgYPDw8xP46Ojqa1tZWQVOeMGEC\nM2fOpKGhgSdPnpCWlib6MMePH89nn30GIOZMbW1t1NXVic9cW1ubefPmMTAwQF9fH83NzXR1dYmG\nLHV1dcaPH090dDRbt24lLy8PTU1NkpKSgNdbjunTpxMZGckHH3yAmZkZX375JWlpaRw9epSSkhL0\n9fVRUVERUufIyAgyMjKYm5tz584dZs2aRUREBA8fPiQrKwt43RKtqanJpk2b2LVrF93d3aipqeHv\n78/atWuxs7NDR0eH2NhYPD096ezsZOfOnbS3t6Ojo0NlZaVoqZKVlRU33T9z/OmLgkQiUQOuAu9K\npdIu4GfACnAF6oFv//S7vn69rRKJJF0ikaR3d3cTFxfHpUuXyMnJEWYjQ0NDRkdHmT17NoGBgeTm\n5pKQkIC6ujpJSUm4uLjQ3NzMihUrCAkJYdasWdy4cYOvvvoKABUVFR4+fEhubi7Nzc1oa2uzZ88e\nIVW1tLTwzjvvMDQ0hK6uLtnZ2dTV1SEvL4+JiQnW1tYUFRWhrq4uCMfwGnM35t2Xk5PD2tqahoYG\nWlpa2LZtG4cPHyY7O5vk5GQSExMF+LO2tpbKykq0tLTEMNXd3Z3u7m7gNSo9ICCAWbNmER0dLRx3\nP/zwg/iddXV1MTMz4+jRozg6OqKsrMzevXtxcXFBWVmZt956Cx8fHyQSiZDc3n77bSwtLcUKYHBw\nkKNHj3L37l0BHx0rGdHR0aGxsRFFRUVhrR6b19y4cUN4RWpqakQKdazQxcXFhaioKAEHHhsajwWi\nBgcHOX36NKWlpSxcuBCpVMr3339Pa2srP/74I/D6wnjo0CFWrFiBsbExly5dYnR0lLCwMJSUlBge\nHmbevHncv38fPT09mpqaOHbsGC9evKC1tZXZs2cTHh7OjBkzGBkZ4dy5c8Dri/HKlSv55Zdf2Lx5\nMwkJCXh5edHV1UVUVBSff/454eHhqKioYGNjw6NHjxgaGmJkZIQffvgBZ2dnUlJS6OjoIDIyUiQO\nMzMzqampYdWqVQIxZ2xsjKmpKXp6eqLpS1lZma+++orR0VGioqKQlZUVNey7d++mu7tbtI4DLFu2\nDDMzM3Hih4aGEhMTg56eHh4eHvT397NgwQKWLVvGjRs3UFFRYf78+Zibm5OcnExtbS1JSUk4ODiw\nc+dOMjIy/vS5+acuChKJRJ7XF4Q/pFLpNQCpVNoolUpHpFLpKHCK/3uLUAuM+29PN/vHY/90SKXS\nk1KpdLJUKp2sp6eHuro606ZNo7W1le3btwuSc3Z2NpcvX0ZGRgYHBwc+++wzfvzxRxQVFXnw4AGB\ngYFcuXKF3NxcdHR0WLhwofDaX7hwgblz55KXl4esrCzq6uqYmZnx7NkzvvrqK1atWkVKSgr6+voM\nDg7i4uLCmjVrMDExEUttJycnTExMxGR47BjDzhkZGVFQUEBRURETJkygoqKCwsJCMjMzMTAwwNXV\nlefPn4vIq4WFBVKplJSUFNra2oiMjBSdkmFhYXR2dtLX14eCggLe3t7CJhsYGMjhw4fJzMzkwYMH\nFBUVISMjw+DgIJaWlsTHx5OXl8c333zDp59+yuDgIMbGxixbtox79+5x5MgRpkyZwoIFC6itreWL\nL77AysqK5ORkpk6dSlVVFebm5kJCu3LlimA8tLS0sHDhQi5fviz8/8rKygC8+eabLFiwAEdHRwwN\nDSkrK+PMmTMcPnyYx48fU1hYyLZt27C1tRVlOdnZ2WKib2NjQ3l5OVFRUQDcvn2bSZMm0dLSQmlp\nKYGBgWL109/fz/z586murqawsBBbW1sxLxpzUt64cYPjx48DMHXqVKytrQGEm3TNmjXIysqipqaG\nhoYGv/76q6ii9/LyEtuDkZERjh07xo0bNwSYZgzJt2TJEgYHB4HXMfxnz55x/Phx0tLSePbsGQMD\nA5SUlFBeXi7q7sY4Drm5uTx//py2tjYePHiAjo4Oa9asIS0tjb/97W9igLlkyRKSk5NJTU0lODiY\nP/74Aw0NDX766SdMTEzIzMzk6NGjfPTRRygoKHDkyBFRTLR8+XKam5txcnKiubmZxMRE3Nzc/syp\nDvw59UEC/AIUSqXS7/7b48b/7b8tBvL+8e9bwEqJRKIokUgsAVsg9X96HxsbGxYvXoy8vDxXr14l\nMzOTiRMnkpqaiq6uLoWFhTg6OqKhocGcOXNERVleXh4TJ06ksLAQfX19njx5Iu68Y6GRZcuWiem4\nu7s7y5cv55dffuHJkydUVlbi7+8vugl0dHQoLy9HUVFRAGCam5tJT0/H09MTAAsLC7799ltRAGts\nbIyGhgZPnjxBIpEwMjKCi4sLly5dIjIyEktLS+FzGGMCZGdns379etauXSu+YAMDA3R3dwtoSV1d\nHY8ePSIqKoqLFy9SWVnJ9OnTCQoKorKyEjk5OWGcKSoqoq+vj8TERJYtW8a6detQU1MjMTGRFStW\n0NHRgb+/P6Ojoyxfvpy4uDh8fHxQV1cnMTGRFy9ecPToUUJCQhg3bhze3t5kZGSgpqaGlZUVsrKy\nggC9ceNGcnJy6Orq4vz58ygpKfHVV1/x/vvvC6Tc3/72N+rr63FwcODw4cMMDAygrKz8T/Hosa1E\nXV0dPj4+AHz99dcsWLBAdFE+ePCArKwsvL29qa+vp7e3VxTxuru7I5VKWb9+Pa6urpSVldHZ2cnJ\nkyf55ZdfKC0tFas7eJ1CffbsGerq6sjIyHDp0iUmTJggbN4BAQEcPnyYpqYmQkJCcHd3Z9OmTRga\nGjIyMsKlS5eIj4+no6ODhoYG4DUQ9+2338bc3BxTU1Pc3d1paWnhiy++EIGrdevWCQbD6OgoxsbG\ngnCmoaFBQEAAFRUV/Pjjj+LvEB8fT1BQkKjoG/PoLF68mIGBARGfnjNnDnZ2djx58oRDhw5x6NAh\noqOjxarBxMSEzs5OvL29/6dTUBx/ZqXgC7wJzPg/5McjEokkVyKR5ADTgff+8UfKByKAAuAe8LZU\nKh35V28gLy9PQ0MDP/74I/39/cIz0NHRwVtvvYWFhQU+Pj7Iy8vz8uVLrl27Rm1tLT/88AN9fX30\n9PQgLy9PTEwMqqqqLFmyBHjdpdjY2EhqaioPHjwQg8nTp0+Tnp6OiooKdnZ2dHd3k5mZSX5+PgcP\nHqSkpAQLCwuB4Gpvb0cikVBaWgpARkYGixcvFj39mzdvxtTUlAkTJuDs7IyqqqqoiW9ra+P27dvU\n1NRw5MgR4YkYC76oq6sL4IuLiwvFxcVs27aNI0eOIC8vj6mpqYjT3rhxg4yMDAEZefbsGXPmzCE7\nO1uYcwwMDDAxMUFHR4euri5qa2vJyMggLCyMmpoaioqK0NDQwMjIiLa2Np49e8bMmTMJCAigvLyc\ngwcPYmxsTEVFBV5eXpw+fZqGhgaGh4eRl5dn4sSJtLa28t1331FfX4+xsTEtLS2sX79eBND27t1L\nU1MTv/zyC35+fjg5OQlXp42NDe3t7YSGhqKlpUVqairDw8MCiPPs2TNWrVqFu7s7kyZNIjk5WXhR\nxlij8fHxqKmp0dLSImzWLS0teHh4MHXqVCwtLQkKChKlI4BQq4KDgwkJCaGlpQUTExOsrKyYOHGi\n8Fr4+fnh7e3N8+fPMTIyYteuXZw9e5ZVq1aJ+cfo6CiBgYEAYoD4448/MjQ0RFxcHL6+vty9e5ey\nsjLq6ur4/fffefjwIUuWLMHR0ZGlS5fy+eefExQUxMuXL7l8+TJubm50d3fz5MkT4PXc6sGDB9jY\n2ODn54ePjw+urq709PSwaNEi1NTUMDExoaqqipcvX+Ls7Ex7eztvv/02BgYGFBUVYWhoyOXLl/n5\n55/FIPfPHP9j85JUKn0K/D+94t1/8Zy/AX/7s79Eb28vWVlZ6OvrExoairW1NVevXhWmDIlEgpyc\nHI8fP8be3p7FixeTnZ3NoUOHGB4extzcXFhqXVxciI2NBSAoKIiEhASUlJSwsbGhuLgYqVRKa2sr\nRkZGYtA3huWaMGGCyMSXlJRw/vx5BgcHxd5zrNZ9y5Yt/P7778jIyLBgwQK++uorJk2axNWrV7ly\n5QqrVq1CU1OT2bNni7t7Y2MjXl5ejIyMUFVVhYqKCiEhIVhaWrJs2TISEhIwNDRk165daGpqCjBp\nUlIS6urq6OnpkZ6eLuYdp06d4uzZs8TGxrJlyxYiIiLQ0tIiMjJSOB/V1NSA1/6HsdnMuXPnOHr0\nKD/99BPOzs5YWFgQFxfHzp07MTExISUlRYSBpFIp/f39hIaGUlBQQE9PDxkZGXh7e/PDDz/Q1dVF\neXk5L1++pLCwkIMHD2JkZISCggJ///vf6enp4e7du2hoaGBiYkJNTQ05OTmCYVFUVMS+fftoamoi\nNzcXeF1Y+vTpU/T19ens7KS4uJiKigqsra25f/8+9fX1vPHGG/T29tLb2ytozMePH0dDQ4OFCxcK\nPmZ2djYbN27k4sWLmJubExERgZKSEp988gmnT5/myJEjDAwMkJeXh5ubG+Hh4bz55pt8+eWXLFy4\nkJ6eHubMmYOMjAxlZWUkJyeTl5eHo6OjGNwpKyujoqLCiRMnSE1NJSgoSNjkx4xvqqqqvPHGG2Rk\nZGBhYUF0dDQbN27k+vXrDA8Pk5eXx9SpU6mtrRWeCUVFRcLDw/nmm2+YNGkSERER4ua0aNEicnJy\nMDY2ZsGCBVRWVtLd3U1XVxcaGhqUlZWxdu1aIiMjmTRpEmvXrhXbsz9z/Fs4Gn/++edwd3d3Wltb\ncXJyIjk5GXNzczQ1NcnPzxdfXD8/P06ePImpqSkaGhr4+PgwceJEvvjiC9atW4ebm5toHs7KymLH\njh00NTWxbt06ysvLhcY8MjIiptxXrlxh7ty5dHV1IS8vT21tLRoaGvT19WFqaiocgQUFBUybNo3z\n58+zcuVKnjx5Iib6gYGB6OnpoaWlxeLFi2lpaRFtQq2trRgaGiKRSHBxceHMmTPMmDFDsBF1dHQw\nMTHh7NmzuLi4ICsrS2ZmpmAbjkWy6+vr6ezs5L333uPWrVsAIil58uRJbGxssLGxYcqUKcjKylJX\nV8eTJ09YunQp7e3taGhocPLkSZEtCA0N5dmzZ5SUlAgjloGBgbBml5eX4+TkRG9vL4ODg5SVlQnp\nOCMjg59++knQoFVUVJg0aRLx8fHcvHkTFRUVHjx4wLx58zA0NBSV9i0tLfzxxx/IysqKqf/p06cZ\nHR3Fzs6OM2fO4O3tjZ2dHeXl5aSnp9PV1UVXVxfwutR1woQJ7N+/n+bmZubOnUt6ejpNTU0CBFRR\nUYGcnBwTJkygublZ5A4sLS356KOPSExMxMzMjIaGBsrLy4mNjaWnp4eSkhLWrFkjQlRaWlpUVVWh\nra3NqlWrBHNi586dPH36VFi49+3bJwpmu7q6CAgIEOyP06dPo6enR11dHXJyckyePJnCwkKGhoaI\njIxk6dKldHZ24urqKmrgxviQIyMj/Nd//Rd+fn7Co9HT00NPTw+qqqpERETQ2tqKo6Oj+Pu/ePGC\nxsZG8TlmZ2ejq6vL8+fPCQkJ4cyZM/85NucjR46Ep6amsnr1alJSUvD392d4eJjOzk4hQ02ZMoW0\ntDR8fHyQSqXMmzePy5cv4+DgQEpKCvb29iQkJPDy5UvCwsI4f/48urq6SCQSDAwMGDduHOHh4fj7\n+4uGJm9vb1GC0t3dTVFRETU1NdTV1TF9+nS++eYbPD090dPTw9TUlOjoaHJycggMDGTz5s04OzuL\noE9eXh4BAQGCS/Dbb7+hq6vL7NmziYqKYty4cVRUVLB+/XqUlZUpKSkRtfJjlWE7duzgwYMHLFiw\ngJKSEqKjo1FXV8fExISmpiZkZWU5efIk7u7uuLi4UF1dLTT8zs5OobE7ODiQkZHB6tWraWxsJDs7\nm8HBQaZNm0ZJSQnDw8MkJiaioKCARCIhISEBa2trzM3Nee+991ixYgX379/H1taWlpYWamtrkUgk\nbNiwgbKyMkZGRlBXV6e8vJwVK1bw9OlT+vr60NXVZcuWLVy6dAlFRUUyMjJIS0sTZPCQkBCGh4eZ\nMWMGra2tREVF4evrS1VVFePGjeP8+fPMmjULDw8PLCwsOHLkCFpaWqxcuZKBgQEhD/r7+1NbW0t6\neroofcnMzKS1tZV169ZRXFyMjIwMo6OjVFVVkZiYiLm5OVZWVkgkEjo7O0XqdeyzGx4e5ptvvhGt\nWZWVlezatQsZGRliYmIoKChg8uTJDA4OsnTpUmRkZDh27JhIu0ZGRtLR0UFmZiYfffQRe/bsYdGi\nRZSWlorYdnp6OoGBgRQVFeHr6wtAQ0ODqJw3MDDAxcWFEydOcOvWLUZGRkSxjJycHMnJySIJrKWl\nRXR0NBKJhIKCAvr6+ggKChJBNDk5OdTV1XFwcBAXxszMzD91UfjfNuf/Pf73+P/P8Z/T5mxlZcWh\nQ4cYGhoS2HdLS0tevHiBq6sr48aN486dOzQ1NbFlyxZu3rxJe3s7q1atorGxERMTE+H8qqmpwcTE\nhA8//FBYX+Xk5JCVlWVwcJB3332XgwcPCkVj7dq1AvQyVh0WFxdHZ2cnFhYWTJs2jbi4OGxtbbl3\n7x5ffvklv/76K3p6enR0dHDhwgVxx3F3d6esrAw9PT2BiMvNzWXDhg0cP34ce3t7ent76e7u5s03\n36Sjo4M7d+4wZ84c/P39hSZfUVGBrKws9fX17Nixg/z8fG7evImzszOurq5IpVLa2toE2Oa7775D\nRUUFNzc3DAwMaGxspKqqiq1bt5KTk0NfXx/6+vp8/PHHbNmyBW1tbaE66Ovro6qqioGBAc7OztjZ\n2VFbW8vx48dRVVXFxsaGoaEhKioqUFNTo7S0lDt37rB7926MjY1RVVWlu7ub1tZW0Zo0PDxMRUUF\nAQEBvHjxQrRxjzVqT5w4EYB79+5hYWHBvXv3GDduHDdu3ODChQvY2NjQ0dHB7du3mTJlCoWFhcyf\nP59Tp07R29uLj48PhYWFuLm5cfnyZf76179SWFhIY2OjqKczMzMjLy+PoqIi9u/fz4kTJ/Dy8uLr\nr79m0qRJDAwMMH78ePbs2cNnn31GWVkZ1dXV+Pv7U1RUxMKFCzl69Cj29vbMnTuX27dvY2JiQm9v\nL3V1dZibm7Nv3z7hZpWXl8fAwIDc3FxiYmI4ceIEBw4cYP/+/Rw9epSuri6mTJlCUFAQd+/eZfbs\n2Xz33Wsxr6GhgWnTpon6N0dHR3bv3s358+eZNm0a77//Pnfu3MHCwoLU1FS8vb3FrG3mzJlirlNX\nV4eNjQ1xcXFcvHiRS5cu0dzczPPnzzl8+DDjxo37V6ehOP4tSlbGvrSWlpYcPXoUNzc38vPzqaur\nIzs7m5s3b9LU1MSSJUs4c+YMJiYmODg4EB8fz/DwMBcvXkRTUxMvLy/RMARQVFQklq5jstVYCWdz\nczNr1qwRElpfX58I9GzYsIG9e/fy9OlT0ZZz4sQJARUtLS3l999/FwGh+Ph4Ufb6yy+/8Mcff2Bn\nZ8fw8DASiQR/f3+6u7tpampi8+bNGBoa8vz5c0xNTdmzZw9Hjx4FXtO3nz9/zowZM/D392dgYICv\nvvoKIyMj7O3t+fDDDxkaGuLWrVui1ruoqAgrKyuWLl2KjY0NsrKy5OXl4eDggFQqFSWsUqmUu3fv\nkpeXh5KSEioqKvT09ODs7IyPjw/jx4+nqKiIgoICnj17RnBwMFlZWcTHxwtM3/DwMLGxsSxevBgj\nIyOUlJSoqakhKiqKoKAgQU8aGRkR25kxRJtUKqWvr4/e3l4+/PBDEhMTqampYenSpSxevFhIcWfO\nnCE6Oho9PT0hKX722We8evWKxYsX4+7ujqamplB1du7ciZaWFkuXLhVbrbS0NA4ePIiGhoZAuy9Z\nsoRHjx6JQNJvv/3GlStX+Prrr+nv7+ett95i0aJFvHjxgrNnz6KqqsqcOXMEiXt4eFiAYj/88EO8\nvF7bctzc3GhqaiIxMZHKykr09PRwc3Njz549WFpasmnTJvT19TEzM6O4uJhDhw5RWVkp9vkuLi6i\nsPfly5cUFxcDr413b775Jvr6+vT19QmJ2dbWlqioKFasWEFzczN1dXXIysoyefJkli1bhry8PPb2\n9pw/f56MjAzU1dWZN28e8+fP/9Pn47/NTMHLywtVVVWGh4cZP348T548ITg4mMbGRrG3jYmJwdHR\nkd7eXiwsLIiNjUVHR4e33npLWIMnTZqEqakpV69eFdl6NTU1GhsbycjIoKqqCj8/P3R1dWlubqal\npUVEo4eGhpCTk2NoaEhEhcePHy8q2Z89e0Zqairz5s0T7jNLS0tcXFxIS0vj/v37fPPNN7i4uFBe\nXk5ZWRnx8fF8++1rs+fw8DCqqqq8ePECqVTKlStXCA8PZ//+/UL2GnPT9fb2il6GMQ197IP19fVl\nYGAAObnXC72QkBCioqLQ1tbm6dOn9PT0YGJigoeHB5cuXcLCwoLbt28TGBjIhAkTBF49ICAAXV1d\nrK2tKSgo4OnTp/j5+VFbW4uOjg6LFy+mvb2dU6dOERwczNdff82NGzewt7fn5MmTzJ8/n4yMDDQ0\nNNDX1ychIUE0FQ8NDfH06VOcnJxITU1FQUGBr776SuQ1vLy8UFFRISUlhfb2dt544w1+/vlnwsLC\nWL58uehp/te0zwAAIABJREFUjImJoa+vj+7ubioqKsjJyUFbW1vQx5ubm8UcpbW1lZqaGoaHh1m7\ndq3ImZw7dw4fHx/CwsJQU1MjOzubBQsWYGhoKHo4xjCEtra27N69m7S0NJydnTlx4gTl5eWCXB4T\nEyOatnNycjAzM0NdXR1jY2PS09OxsLDAwMCAly9fCgbq9u3buXjxokAJFhcXi9cpKChAUVERR0dH\nzMzMSE1NJTY2FnNzcxQVFXn69KmY0ygpKQkEoLq6Oi4uLmRlZSEnJ0dUVBT5+fno6enx6tUrXr16\nxdSpU9mzZw9vvvkmBgYGf7pk5d9ipaCtrc3ly5dRU1PD3t5eoOPi4+PZsGEDfX19/OUvfyE7O5va\n2lrCwsIoLy9n2rRp7N+/n+LiYtGdcP36dRHDfffdd7l37x6Dg4NUVVUhkUjYsWMHBw4cQElJiays\nLGbPni1sw9XV1SJu+urVK2xtbUlMTEROTg49PT02btwIvNa8DQ0NsbOzIy8vj1mzZtHV1cWRI0do\naGjgypUr9PT0MGvWLPbv3096eroIWt26dQs/Pz9kZGSQl5dn3759REZGAoh4tr6+Pk5OTsTFxQnQ\n65hcVVlZye3bt4WEN3nyZNLS0kTS8eOPP+aDDz5AVVWVpqYmdHV1sbCw4IsvvuDFixckJSWxZMkS\nent7/6/23juqymtb4/699F6lSAdDR5qAYMEaFRW7IbG3eI0tORpTNFFjikk0aryJHqNRY4lGxYIo\nUUEEQaSDNCnSkY4gXYH9/aGsL7nj5h7v+L4bPWPwjLEH2xcGTNfe79xzzTWf56G8vJzMzEwyMjI4\nduwYfn5+nDx5koEDB6KlpcVHH33E3r17CQ8P5/79+0RFRfHdd99RWlrK66+/zsOHD/H39xdVko6O\nDiEhIbi5uTF27FjGjx8v/BRsbW05fPgwe/bs4e7duwDo6OgwceJEEhISROJUUVHhxIkTqKmpER4e\nzuDBgzl9+jQ1NTWCwVpSUsKlS5fQ1NQkPz9f6ESMHz+epqYmPD09OXr0KKGhoSQnJwPPtqibN2+m\nsrKS1NRUZDIZ/v7+xMTEYGFhIQxywsPDqa+vR0tLi6qqKjZv3sy0adO4e/cu3d3dbNu2jXv37olK\nYdq0aWhqauLs7My4cePQ0NAgIiKC4cOHU11dzbp16/juu++YNGkSJSUlaGtrc/v2beTl5ZGXl+fq\n1avMmzePmJgY4ZYNsGXLFpydnQkMDBRS/E1NTWJ+p7y8nP3796OtrY2NjQ3Ozs4iOU+aNIkHDx5w\n8+ZN5s+fz/Xr18XvfRG8Ekmhu7ubwMBAurq6xDTaypUrsba2pq2tDWVlZbZs2cKGDRuYMGECa9eu\nRU5ODnV1dS5dusSBAweYP38+9fX1eHh4CFvzzz//HHd3d27cuEFCQgKenp78/vvvrFmzhuvXr7Ng\nwQLu3buHgYEBEyZMIDMzk7NnzzJlyhQ0NDR48OABjo6ONDU1kZ+fT2trK/Bs+rK5uZk5c+ZgZGQk\njFT++c9/8vvvvzNz5kxycnIwMjIiJiYGExMT9PX1GTduHOvWrSMxMVFw83uFPeEZuaaiooJFixaJ\nacqCggIxORkZGYmpqSkWFhY4ODgIkZb+/fuTm5tLTU0NVVVVREdHU1FRwePHjxk5cqQYoukd1d65\ncyfOzs6YmZlhYmIiPs2srKyIjY2lsbGRvLw8hgwZwmeffcb7779PTk4OWVlZbNy4kadPn5KVlUVP\nTw8FBQUMHz5caFZs27aN9vZ2amtrcXZ2RltbmwcPHhAeHs758+fx9/enq6tLDF4FBwdz+PBhMd7b\nq2DVOySlp6fHN998Q2pqKikpKZSXl7N9+3Y0NDQYNWoUy5cvF8pIly9fZvXq1SQmJjJ69GiMjY3F\nINsPP/zAlClTsLS0pKenB5lMxsaNG/Hw8KCoqIgTJ07g4+Mj5gqSkpIEgerChQviqG/+/PnY2tpy\n/vx5AL755hvxOvceYcbHx7Nq1SqMjIxQUVFh2LBh3Lt3j/r6es6dO4evr6+osJYvX05GRgZr1qzh\n66+/ZtmyZQCEhIRw9+5d5s2bR1xcHF988YU4HjYwMOD+/fs0NjZy9epVWltb8fPzw8jICEtLSxYs\nWMDo0aOpqqoSBrW9hLwXwSuRFBQUFHBzc6Onp4cPP/wQFRUVtm/fTmlpKZ9++iktLS10d3dTUVHB\n6dOnGT9+PC4uLri5uXHz5k0hSGFnZ0dPTw9PnjwBYPfu3VhaWrJw4ULs7e1xdXUVY8uNjY0kJiYS\nFxdHT08PVVVVODk5oaenx6lTp+jp6SEhIYHIyEhyc3Px8/Pj9u3bANjZ2VFQUEBXVxdPnz4VLlLv\nvfceS5YsQUFBQUiBW1hYYG5uTkxMDAcPHqSlpQUtLS3s7Ozo7OykX79+GBsbA5CZmSmMUfv168eI\nESNwcHCgu7sbSZKYN28ekiSxf/9+UlNT2bdvH66urujo6KCvry96IO7u7sIrIyQkhAkTJlBfX8/q\n1atRVlZm+/bt1NXVcfjwYUJDQ4W1+y+//MJbb71FS0sL69evR1NTEycnJx4/foy+vj4lJSUkJCRQ\nWVlJdnY2GRkZoiqIiYlBVVUVe3t7CgoKyMzMpKSkhMGDB6Ompoa2tjbLly8nMjKS3bt34+joyLhx\n4ygpKaGsrEw4YsfGxuLt7U1JSQlbtmwRKky9PaFevYPp06ejo6MjYouOjsbe3p7z589TUlKCnJyc\ncHYGMDAwEM5gOjo6DB8+nE2bNolS/M6dO+jq6uLr60tmZibl5eWsWLGCZcuWYWVlhZOTE5WVlfj7\n+wsxGng2hfr5559jaWlJQ0MDa9asYdOmTfznf/4n5ubmXLlyBXV1dfLz81FQUCAwMJD+/fvz888/\nExsby7Vr1+jo6KCpqYnZs2dz5MgR4JkZzIEDB4iKisLX15fjx49TVlbG9OnTaW9vZ/r06cJ1rKCg\ngJ9//hktLS1hVWhlZcWSJUsYNGgQ6urqQhD2RfBKJIXOzk6ysrLQ19fH3Nwca2tr1q9fj76+PhMn\nTkRJSQkbGxuCgoIwMzPD09OTmpoazp07R11dHfr6+sjJPfuvxMXFiZvs7t27PHnyhI6ODtLT08nM\nzGTChAmCaThq1CiWLVtGaWkpbm5ulJSUMGbMGF5//XXxO5OSkliwYAEXL17E1dUVeKYUVVZWxi+/\n/MKDBw/EHHxKSgrt7e0UFRUJRp9MJiM+Ph4XFxfefPNNtm7dSlpaGg4ODrS1taGioiIs7p2dnfHx\n8RGTgHJycpw7dw41NTWWL1+OqqoqJ06cYMKECVhbW+Pu7o6ZmRmnT58WHez8/HyampqEgtOAAQO4\nfv06CQkJbNiwgfHjxwsfhbFjx2JgYMDw4cO5fv0648ePp7Ozk/j4eBYsWEBNTQ0ffPABxsbGgnnZ\n0tLC/fv38fLyQk9Pj6+//prt27dTVFREXl4e/fv3Z+bMmVhbWyNJEhcvXuTp06esXr2a3Nxcjh8/\nTnJyMlevXmXZsmWoqqoSHx8vFJs///xzrl27BoAkScydO5fu7m6uXbvGkSNHcHZ2FuIvZ86cQVNT\nUwx+ZWdno6ioyNtvv01paalo1Pau7fDhw4V8XmJiIps2baK2tpaAgABBG7927RpPnjwhPz+fqqoq\nfvrpJ3x8fNDQ0MDS0pJly5ZRUlIitg/d3d2sWLECHx8fbty4waeffsq9e/e4fv06W7duxdrampSU\nFBQUFNiwYQOqqqoMGTJEqGW/9957ggshJycn5hcWLVrE4cOH0dXVZcSIESxfvlwYCrm7uyNJEmvX\nrqWnp4e7d++SlZVFc3Mz77zzDrm5uURHRzN79mw+/vhjGhsb/+/0FP6v0NPTQ79+/bhy5QqRkZEE\nBASgoqLCwIEDheORsrIyO3fuZOTIkURFReHv7y/GmmfMmCHUgPbu3Yu8/DOhp6KiIjo7O/nxxx8x\nNDTk7NmzNDQ0cOPGDUG7bmtrEwyyXieehoYGysrKGD16NOvWrWP+/PlMmjRJjIr2MuFaWlqYMmUK\nQUFByGQytLS0SExMJCkpiVOnTrFv3z4UFRWxsLCguLiYW7duoaWlxbx588jLy6Ozs5Pq6mrB6lux\nYgXR0dFkZGQIXYKBAwdiYmLCBx98QElJCWpqakydOhV1dXU0NDQoLy/HyckJU1NT7t+/T21tLSkp\nKUydOhVNTU3hrylJEj4+PsTExPDgwQNkMhmZmZmEhYXx22+/kZGRQVZWFu7u7hgaGnL58mXq6upY\nu3YtRkZGeHl5YW5uTm5uLmvWrGH48OHk5+eza9cuWlpaOHjwIO3t7fz6668YGxtTXV1NR0eHcMwK\nDg4mISGBNWvWMGDAAMrLy4U83B+ru8bGRiwtLamqqmLlypVkZmbi4+NDYGAgzc3NlJWVYWZmJgbC\nsrOzmT17Nvfv3yc5ORlFRUUhGx8aGkpvI93V1ZXff/8dSZIoLCwkLi6OY8eOYWhoKJLk6NGjRUUw\nb9483n77bXHC1dzcTGVlJeXl5bS0tPDbb7+J90xnZye//vorpqam7N69G0VFRby8vFiwYAHp6emY\nmJgI1e5BgwbR0tJCfX09Bw4cQF5eni+//JKuri6OHj1KcHAwAG+99RaSJHH9+nXi4uIYPnw4EydO\nFEpOcXFxhISE8MYbbzBq1CjOnj0rpO5qa2vx9fVl3bp1ODg40NjYSHp6+gvfj69EUtDS0uLy5cuk\npKSQnp5OTk4On3zyCbq6uoKtKC8vj7e3NwsWLGDQoEHExsaKjm1iYiKamprs37+fsLAwMT/e1dUl\nzF7GjBlDdXW1mHS7dOkSR44c4dGjR0Liqqenh4qKCjGjAJCcnIyfnx9FRUUsWrQIeMYlqKysxNvb\nm+zsbK5du0Z8fDylpaViLz1nzhw0NDRwcHDg8ePHTJs2DW9vbzZu3Cj09ZSUlGhsbGT27NkAbN68\nGXt7exwcHFBRUSE2NpaMjAzmzp3LzZs3GTFiBCtXriQ1NZWQkBDmzp1LaGgovr6+xMXF0b9/f4KC\ngviP//gP0tPTKSsrY9KkSbi5ubFgwQKSkpLw9fXlo48+QklJienTp3Po0CHBsjQxMREJprcMr6qq\nwtbWlvT0dM6fP4+xsTHHjh3DxcWFyspKQWHu168fEydORJIkLl++zNSpU8nPz2f58uWiCRYYGMiI\nESPIzMzkk08+Ye7cuQwaNEgQvuCZ/+W9e/ewsLBg/vz5wLOj5XPnzjFgwADi4+O5desWkydPFvoP\nvexIZ2dnfvnlF+HAtWrVKqF7EBMTQ1VVlVBgGjBgAOHh4cyYMYOKigqqqqrYvXs3Pj4+5OfnCzau\nqqoqMTEx3LlzBx8fH/Ly8jA1NWXw4MHAM2eojIwMfv/9d8HxqKyspLm5mb179woxmz179rBq1Sru\n378vTIabm5vZt28f/v7+uLm58dprr4lj7/LycuTl5Vm8eDHV1dVYWlpy5coV3nvvPSEWq6qqSktL\nCzdv3iQ3N5empiYyMzMxMDDg5MmTFBQU8PjxY4qLi4VpzIvglTiS3L59+9Zvv/2Wx48fU1hYSGZm\nJo8fP+bRo0cUFxcze/ZsmpubGThwIPr6+hQWFnL9+nU6OzvJz8/H39+fW7duERgYiLW1tWgObd68\nmeLiYrq7u/H29mbGjBmEhYVx+vRpPvnkE/T19Xn69CkmJibIy8sLdSCZTMa1a9fIzMzExcVFEE3k\n5OQ4efIkJ0+eZOHChZiamuLu7s7du3dZsWKF8FPMz8+nra1NuDfNmTMHmUzGvXv3hIJP//79kclk\n1NTU0L9/f86ePUtnZyeBgYGcO3eOuLg4Jk2aBEBOTg6vvfYaMTExHDt2DC8vL6He02vgamZmhq6u\nLv369WPnzp188sknwmy3lxEJz86/161bJ/bP9fX1jBkzBplMhouLCx9//LEw7S0qKhLKTN9//z1h\nYWHY2tri5+fHpUuXcHd3Z+/evWIeodezo6KigpycHLS0tMjOzhZGp/Ly8oLivnHjRv75z3/i7OzM\nsWPHRJUVFBTElClTaGhowMPDg/T0dOLj41m+fDleXl5Ctaq7u5uuri5UVVWxs7MjISEBOTk5Zs2a\nRWpqKuHh4bi4uJCZmcn169dF/+Ef//gHOTk5lJeX4+/vz5kzZ3j48CF6enp89dVXlJaWoqGhQXx8\nvBhm6hWgLSoqYtSoURQWFqKiosKVK1cwNjZmzZo1tLe3k5ubi5PTM/nSixcvMnr0aDw9PWltbUVX\nV5czZ85QXFyMkpISX3/9NeXl5RgYGNDS0oKhoSE6Ojrs2bOHuro6du7ciZqamlCuWr9+vejjWFpa\n8sYbb2BgYEB+fj4LFiwQPg+jRo1iz549Qjyo11R537591NfX//scScIz5uHo0aOxsbFhwIAB7Nix\nA0tLS1asWEFpaSljx47l6dOntLW1ER8fj5GRERUVFZiZmfHo0SM0NTWxtbUlIiJCyJslJyfj6+tL\nY2MjO3bs4KeffhIuyampqcJ8RUFBAW9vb8aNG4ckSXz33Xfcv3+fGTNmsGLFCmHy2VuBpKamUlZW\nxtWrV1FVVcXd3Z1+/fqRnJzM8uXLhT19aGgoTk5O/PbbbxQWFjJ06FA+/vhj9u7dS3NzM/n5+dTV\n1QmNxrVr13LhwgXh0nT37l0mT57MxYsX6ejoYNSoURw6dIi4uDgGDx4sXnSZTEZeXh7x8fHs3buX\nnTt30t3dTXl5OTdv3qS9vZ2CggKcnJyEsvSTJ084efIkcXFxgjwTGhqKvb093t7eDB8+nPXr1xMZ\nGSmk2f39/Rk5ciR5eXk4Oztz5swZdu3ahYODAw0NDejo6LBv3z6qqqqYNGkSN27cQEtLCwUFBdzd\n3TE2NmbixInIyckxbNgwfv75Z3x8fBg8eLBw9O49qVFXVxeU3yVLlhAcHExtba0wf2lsbBQ29b3l\nei87UkVFhc2bN6Oqqip0BJYtW8bQoUPFqZW3tzeHDh3i4cOHSJLErFmzqK2tZdiwYdTU1PDVV1+x\nbds2+vfvL5SwezU31q1bR09PDwBBQUHMmDEDPT09eikDYWFh2NjYCJJcaWkpMTExGBkZ0dnZiZyc\nHI6Ojnh6erJ27VqWLFnCgwcPePLkCTt37gSgsrKSo0ePCvfonJwcGhsb8fX1xc7OjoMHD3LixAm0\ntLQ4c+YMTU1N7N+/n+bmZvz8/FBVVeXy5ctYWlqSk5PDxo0bX/hefCUqhe+//37r+PHj0dPTEw26\nhIQEXF1diYqKYubMmWRnZwsX55kzZ6KlpcXYsWMJCQkRo9G99nLHjx8nLS2NiRMncvbsWdzc3Bgw\nYACPHz+murqaN998k7CwMNFLUFRUxM/Pj9jYWDo6OnBzc0NPT4/i4mK8vb2Rk5PDzs6OtrY2zp07\nx5w5cwgICMDGxobLly9TUFBAREQEQUFBTJgwgcLCQpKSkli6dCnLly9nyZIlNDU1sWrVKnR0dNDR\n0WHQoEGEh4djaWmJqqqqaJqpqalRWFhITU0N48aNIyEhgXnz5pGQkEBXVxfJycnMnTtX9D9UVFRQ\nUFCgpKQEe3t7Ro4cycWLF7ly5QqhoaF89dVXeHp60tnZSV1dHYGBgURFRfHGG29QWFiIt7c3GRkZ\ntLS0EBQUhIODA4cOHWL79u0EBwczfPhwEhISaG5u5t1336WlpYXi4mJsbW1paGigqakJW1tb9PX1\nuXz5shBdjYiIYPHixejo6KCnp0d3dzdVVVWYmJigqamJubk5paWl3LlzBy0tLZ48ecKVK1fw8fEh\nNDSU1tZWPD09cXZ2JiQkhMDAQIKDg1FVVcXFxQUvLy+MjY25deuW6PG4urpy7tw5DAwMqKqqorGx\nUQx8+fv7Y2dnx7Vr10hLS+PJkyeYmZkxZMgQHB0dKSsro6ioiPDwcObOncvjx485ceIEXV1d6Ovr\nM3nyZKKioujp6RF+mKdPn+ann36iurqaqqoqtm3bRklJCVZWVgwbNoy4uDhBe+/p6WHq1KkMGDCA\nOXPmcPnyZfLy8jhw4AAmJiZivD4uLo7s7GxcXV2xsbHBzMyMsrIyMaHa3d3NjRs3mDZtmhCI7R3c\n+/HHH0lPT2fDhg3s2LGDpUuXkpOTI6T6zp8//+9TKairq6Ovr4+Hhwfd3d3U1NSgoKBAXV0d165d\nIyoqisWLFwv24KFDh8jKykJZWZkBAwbg4+Mj9BhCQ0PF3j8kJIS3335bdOMfPHiAsrIyxcXFODk5\nkZKSQnFxMWpqavz666/8+uuvQnxDQUGB1NRUJkyYQFFREfLy8vj6+gJw+/ZtHj58SHZ2thgCWrZs\nmTA1dXZ2ZsuWLURGRnLlyhX27Nkj3I+zsrIE81BHR0co5sAzZWB9fX0UFRWZPHkyubm5fPbZZyQn\nJ9PR0cHrr79Oenq68Fs8f/48ampq1NbWYmVlhaOjI4cPH8bZ2RkXFxfmzp1Lc3MzYWFhhIeHi8Gs\noKAg9u7di6+vL1VVVWRlZQlPytLSUj788EPmzJlDVlaWMF9NS0vjm2++4c6dO7i6uiIvL8+cOXPE\npGnvp+KwYcPQ1tZmwoQJ3Lhxg/v37/Pzzz8LH4je/fTt27dJS0ujtLSUsLAw8V6wsrLC0tISV1dX\nIiMjsbOzY/r06ejq6uLh4UFZWRl6enqCGfr06VO0tbXR0NCgoqKCL7/8kqqqKiFH3+sy/vDhQ3Jz\nc9HW1qa5uZmQkBCcnJxwcHAQjdEpU6aIT9TExESCg4O5e/eusH8DqKmpYcqUKULZu6amBi8vLwwM\nDIiIiOD8+fMcOnSI1tZWbG1t0dDQ4N69e7i6uhIWFkZTUxOnTp0SmhW9Bjuenp54eHgIrUpNTU3c\n3d0ZPHiw4I54eHhQXV0tjpx73cuamppobm7mrbfe4quvvhKelL06JFVVVaJX8SJ4JSqFrVu3bu3V\nSEhKShKlpqmpKf379xc6hE1NTbS2trJmzRq8vLw4ffo0s2fP5sKFC3zyySdERkYKjnxmZiY3btzg\n6NGjzJkzRxif+Pn5kZ+fj56eHg0NDTg7O5Oenk5JSQlLlizBxMQEHR0dYmNjRYbvFX399ttvyc7O\nxsTEBF1dXX766Sd27dqFgYEBXV1dyMvLk5ubC8CpU6dYuXIlZmZm3Lt3Dw8PDyElVl5eTnp6Oqqq\nqjg4OLBnzx62b98u3KMlScLR0VGcEixatIiAgABSU1NZunSpkC+3tbWlra0NFxcX0tLShFaEu7u7\neKNu2rSJmzdv4ubmhqmpKUpKSsjLyzN+/Hi0tLTEnEevOrWWlpYw2Pn444/Jz8+nurqaqKgoFixY\nwOuvv86WLVtEg+7hw4eYm5ujqqrK2LFjiY6OZtCgQTQ3N2NnZ4eFhQUNDQ2oqqpSUFBAd3c3YWFh\nKCsrM378eOTl5XnnnXfEdqyXFl5fX89rr71GaGioMANydHREWVmZwsJCLly4wKxZs4TM/JMnT0SP\nRElJScyk6Ovrc+bMGVatWkVsbCwjR45ER0cHPz8/+vXrR1tbG9999x3vv/8+FRUVHD16VIjs9kqz\ntbe3ExwczGeffUZbWxthYWFoaWlx48YNqqurGTp0KGVlZcyePZuzZ8+ybNky6uvrxelLbm4uubm5\nbNmyhTt37pCVlYWPjw/BwcF4e3sjLy+Pjo4ODQ0N4kRo3bp1XLhwAUVFRWbOnIm8vDybNm1CXl4e\nAwMDoRzeuxXs/YDcv38/AQEBjBs3TrikV1RUUF5eTkRExL+PnsLevXu3njhxguPHj6OhocGIESNI\nS0ujqqpKTKgFBARQXFzMtGnTSE5OprW1FRcXF+zt7SkpKUFJSQlzc3MqKyvR09MTWo0ff/wxDx8+\nZNeuXchkMrKzs6mursbJyQllZWWKioqEt2BeXh4lJSVCZcnNzY329nYePXqEh4cHN27c4MGDB+zZ\nswdDQ0NRtvcmA0NDQ1JTUxk6dKgQEbl16xaSJIlZBl9fX8aMGYO2tjY9PT0oKiqiq6vLDz/8wJEj\nR1BQUEBBQUHoJPQq/OzcuZOkpCTc3NzIyspCTU1N2LLdunULDQ0Nenp6KCsro7y8nKKiIiRJws7O\nDkVFRT788EOio6MpLy+nuroae3t7li5diqmpKcOHDycxMRFLS0vy8vKoqKjA29ubsrIysUbz589n\n3759lJeXY29vT1NTE3Z2dsjLy6Oqqkp3dzfR0dFMnDiRrKwsOjo6qK+vF9WGjo4O6urq1NfX4+vr\ny8CBAzE0NBQuzL6+vhw9epTVq1cLY92cnBxmzJjB9evXMTY2Ji4ujkePHqGurs69e/eE6I2VlRXV\n1dXExcVRVFTE4MGDaW5uFqdPR44cYfXq1URERHDw4EHmzZtHSkoK5ubmHDx4kE2bNnHx4kXk5OQo\nKSnBxMQEbW1tcnJyePDgAQYGBhgbG5Oenk5NTQ1ycnJ0dHQQGxuLv78/BQUFwkfjk08+4fvvv2fy\n5MkcOXKEp0+foqCgwLx58/j555959OgRCxcuFPL1r7/+Oq+99hoJCQno6uoKnxJvb2+8vb1pbGwE\nEL2Z3jkYR0dHkpKShKtYr4GOt7c3ubm5JCYmIi8vL/p0zxu2/z7bBzk5Oc6cOcPMmTOxsrLi2rVr\nJCQkoKKiwvfff8+UKVNQVVUVKjO9N/9PP/3Etm3bmDBhAtevXyc9PR07OzsxjvzkyRNu3bpFZ2en\nIP/U1dVhb29PW1sbAwYMoLS0lNbWVvr160dVVRVLly6lpaWFkSNHoqenh56eHgEBAVRVVYkm0IUL\nF5DJZEydOhUvLy9CQkIYOnQohYWFdHd3CwPaL7/8koKCAlRUVGhsbCQ7O5ubN2/i5eVFZGQkcnJy\n1NfXi5HZ8PBwOjo6UFJS4scffxRdZX19fTHfEBoaKhpsPT09dHd3Y2pqyoABAxg9ejSWlpbCji0/\nP5/g4GAhlb57924aGxvx8fHhzJkzjBo1infffZfg4GCmT5/O48ePmTVrFoCwk4+Ojsbf35+NGzcy\nbNhyYrqbAAARDElEQVQwjI2NcXBwYPjw4ezYsYPp06fj4OBAVlYWGzZsQFNTk+LiYoYMGYKamppI\nPkOGDKG8vBwbGxuSkpLQ1tbmwIEDwmLt0qVLAILk9vTpU0aNGkV+fj4+Pj4sWrSIrVu3YmpqipOT\nE0FBQSgpKTFr1iysrKzECcyMGTOErHvv6w8QGhrK7NmzOXjwIEuXLmXYsGGcPn2aQYMGUVhYKFyW\nejkpUVFRZGdnM336dDGD0tPTg5aWFqqqqkKar1+/fixatIjq6mo8PT1JTEzEycmJ0tJS5s6di7Ky\nMt3d3eIUavHixezatYs1a9Ywbtw4KioqUFdXx97enoyMDDGWPWjQIKKioqirq8PT01MM9/VOVz58\n+JBBgwaxd+9eMQQ2ZswYdHV18fb2Zvbs2WRkZHDy5EkOHjwohq1e6H78/3Y7//+H1NRUUb5aWVmx\nY8cO5OXlSUlJYf/+/eTn5+Pi4sLt27exsrLC3t4eOzs7vL29iYuLQ1tbG2dnZ/EiwTO9P21tbXR0\ndAgPDyc5ORk1NTUsLS0pKCjg6NGjeHl5UVpaip6eHoMGDaK0tJQhQ4Zw8+ZNvL296erqIjc3l/b2\ndmG9NXbsWJKTk0lNTcXGxoaAgADOnTvHokWLWLlypbBYCwoKwtXVlZycHGxsbEQDcdasWTg6OqKh\noUFCQoLw+TM2NubRo0fCDyImJkacDEydOpXa2lphsnr16lW8vLxoamrCxMQEPz8/lJWVhcNQSUkJ\n7u7u4tP5u+++Y+HChcjJyREdHY2hoSEBAQGsX7+eKVOm4O3tjY2NDfn5+YwZM4YDBw6wYcMGYSe3\ncOFCxo4di7a2Nrdu3eLEiRPs2bOHffv2sXXrVm7fvs3Fixe5fPkyFhYWgjbe2NhIa2srP/zwA+np\n6RQVFaGkpMSHH36ItrY26urqNDY2snTpUuDZCUxDQwN1dXXs37+fjIwMzM3N6e7uFnqVUVFR+Pj4\nsHPnTtLT0zl+/LgwQfn222+ZO3cu7e3tDBs2TCQ5JSUlKioqsLOzY9KkSSgpKWFmZsa0adMoLS1l\n3LhxaGtri+PD4uJiVq1ahZqaGlevXsXPz09sTSwtLcUJl6urK01NTaxfv16wZH19fUlPT+fkyZPc\nv3+fgoICwerdtWsX9vb2LFmyBCUlJfLy8ggJCWHz5s3U1dWJ3/v1119jZ2cnSHcVFRVUVlbS3t5O\nZ2cnlpaWeHl58f777/Ppp59y5swZFBUV+f3331m3bh2zZ8/GwMBAbA1DQ0Nf+F58JZJCa2srs2fP\nprKyUlBDe1VoFy9eLFynExMTMTY2FkYiBQUFFBYWoqCggJqamkgcve494eHhFBUV8eOPP7Jt2zY8\nPDywsbHB1tZWuAnX1tYKhWeZTEZnZyeqqqpiT+nh4UFtbS2SJImFtbGxQV9fn46ODk6dOsW8efO4\ne/cu+/fvp6ioiOzsbKqqqkhISEBTU5O2tjby8vLw9/cnIiKC8vJyrKysqKio4MCBA0J0pLei8fX1\n5c0338TQ0BBJksjLy8POzo67d+9y5coV9PT0hIaku7s7I0eOJDY2ls8//5zPPvsMOzs7vLy8MDEx\nwdLSEmNjY95//31sbGwwMTHB3NycUaNGcerUKbZs2UJ+fj737t3j9u3bNDU1UVZWhqGhIcePH6en\npwcPDw+xjev1YugdGuodmmlvb8fIyAhHR0dSUlJISEhg9OjRwnrNzs6OMWPG4O/vj7KyMiEhIUye\nPBkbGxsGDx4szEp6FaZVVFTQ0dFBQ0OD3NxcUlJSCA8Px8LCgunTp6Ours6UKVO4f/8+EydOxNLS\nklOnTgl5PkVFRerq6kT5XVNTQ0dHBzExMcyfP5+jR4/S2NhITEwMHh4eHD58mMePH1NXVyd0PWpr\na7lw4QKLFi0S1PP6+noh+gLPms7x8fEYGxszZcoU5s2bR05ODnp6euzcuRMnJydmzJiBra0tNjY2\nWFtbo6+vL95nvYnD3d2dFStWiHtix44dqKioYGhoiJycHF1dXejp6aGoqEhLS4twM+v1CP38889p\nbW2lra2NX3/9lWnTpqGtrS08Kv84IPav8Er0FHbs2LHV0tIST09PUlJSePr0qSjxepmLrq6uNDY2\ncubMGf7xj38IUw0PDw8xb2BpaUlcXBwDBw7k4MGDfP311xgYGGBpaUldXR3q6uooKyvT09NDfn4+\n3t7eFBYWEhQUhLu7O2lpaZSVlYnS78iRIwwaNIjXXnuN9PR0jI2NuXbtGmvXriUjIwNtbW2cnJw4\ncOAAmpqaJCUlUVRURENDA4GBgeINvWDBAoyNjQVfQVNTkx07dnD27Fn09PTw8vLi0KFD2NjYYGRk\nRGVlJYWFhQwePFhYjIeEhLB8+XJUVFSwtLSkpaUFBQUF/Pz8hCdhZ2cnFy9exMHBgbNnz2Jqaoq5\nuTmtra3Y2dkhk8lISEhg8eLFxMbG4u7uTlhYGAUFBYwdO5bKykrk5OSIiIjA39+fkJAQYU/W22fo\nLY39/f354YcfWLFiBUZGRowYMYKqqirq6+uFK3dqair379/HyMiIa9euCfpuXV0dNTU1uLm5kZ+f\nT2BgIDExMdy8eRNNTU3s7e2RJIm0tDTMzMxob29n2rRp1NfXk5+fj6KiIq2trfT09ODp6UlbWxtJ\nSUn4+/uTnJyMv78/ZWVlaGlp0dDQwOnTp9m8eTOxsbHo6enh7u6OqakpPj4+JCUlcefOHXbv3i0q\nqNTUVFpaWhgwYAD6+vocO3YMFRUVurq66O7uZs+ePQwbNoyLFy8SHh7OjRs3MDQ05MmTJ0RHR7No\n0SLq6+uJjo6mp6eH5ORk0tPTaW1tZeXKlQQHB6Ovr8+dO3f48ccfGT58OMuXL6erq4vy8nLCw8PR\n1NTk6dOn3Lx5UzQbW1tbhXFRVlYW2dnZ7Nmzhz179vDLL7+gpKSErq4uampqDBkyRJjvBAQEUFZW\nxs2bN/99egrm5ubk5OSgr6/PokWLGDBgAMXFxZSWlgqbbUmSKCsr44MPPiA0NBRjY2NcXFxITEzE\nx8cHBwcHQkJChKsUPONUJCUlYWFhgbW1NQYGBigqKtLR0YGtrS1ffPEFOjo6lJWVcerUKXx8fAQ1\n2cbGhoULF3L27FkiIiLw9vYW9NNePryCggJWVlaUlpZiaGjIL7/8Iui50dHRqKqqCju5R48eERYW\nRkJCAv3798fd3Z3IyEh++OEH0YTU1dVFXV2dpqYm7O3tefjwIbGxseTm5rJx40bKy8tJTU0lKiqK\n8ePHC0+LCRMm8Ntvv6GqqoqzszOOjo5MnjyZhoYGSktLKS4uprq6WtC0N27cKN7gSkpKQopeUVFR\nKBxFR0eTl5cnBEHq6uq4dOkSQUFBGBoaEhwczPjx4/niiy+Ef0KvM/b8+fOZM2eOsE5zdnbG3t4e\nHx8fJk+ejJaWltiatba2kpSUJDQlepWIS0tLee+999DU1KSzs5OTJ08yffp0McxWXl6OsrIy1dXV\nmJmZicnGESNGYGZmJlzCet2se2nUMpmMiIgIwau5f/8+NjY2VFdXi8bvhQsXiIiIICUlhX79+jF/\n/nyOHz+Ovb0948eP58aNG4KAt2LFCqF9ERISgqenJ/n5+cJox9/fH1tbWxYsWMDUqVO5c+cOT58+\npby8HGtra06ePElpaSkHDx7Ezc1NbHc6OzsZPXq04C6cO3dOsDktLCzw8/NjxowZuLq6imNUBQUF\nDA0NuXv3LpWVlQQEBPDmm2+io6Mj4n0RvBJJobOzEycnJ9ra2ti3bx8ZGRlkZGRQXV2NhYUFVVVV\nhIeHiy65tbU18+bNY8iQIYwaNYr9+/eTmZnJO++8g4aGBseOHQOesezGjh0rjsC6urqoq6tDSUmJ\n4OBgNmzYgLq6OmlpaXh7e9PR0SEGoaZNm0Z7ezvvvvsutra2ZGRkCCZfQkICbm5utLS0kJeXx5gx\nY3jvvfdIS0ujoaEBHx8fBg4cSGpqKrdv3+bEiRPs27eP+fPno6+vz5tvvomzszOPHz9mzZo1gtaq\nq6vL5s2bqa6uxtHRkejoaCRJwszMjOvXrzNt2jRx48fFxQmJ9vj4eFpaWsQxbnx8PPX19cIYpq6u\nTrhT9/T0EB0dTWRkJBkZGTx69AhXV1cKCgrQ0dEhKyuL2tpa3nnnHaytrXnw4AHTpk3D0NCQixcv\nMn/+fO7cuYONjQ0+Pj7MnDlTjDAPGzYMBQUFDAwMyM7OFm7Xd+7cwcPDQ9jLu7u7ExoaysmTJzE0\nNBQCLACzZs2ivr6eYcOGkZaWhkwmQ1tbG09PT5qamhg1ahTW1tZoamoyc+ZMNDU1iYiIYOjQoTQ0\nNDBu3Dhqa2s5f/68GAaCZ8zWwYMHExUVhZycHLt27RI9qNWrVwvpfjc3NzQ0NOjfvz8XL14UdoS9\nXfzjx49z+/ZtwWZUVFSku7ubHTt2cO7cOWxsbGhububs2bNMnz4dExMTAgICiIyMFE3PoqIienp6\nKCwsZNasWeTl5VFbW0tgYCAXLlwAYNeuXdy+fRsLCwv8/f2FkpSfn5+gZXd1dWFhYUFPTw+qqqq4\nurpiZGSEqqoqhYWFVFZW0q9fPzo7Ozlz5swL34+vippzLdAK1L3sWP4L+vHqxQR9cf1v8CrGBC8n\nLkuZTGbwr37olUgKAJIkJb2I/PTfiVcxJuiL63+DVzEmeHXjgldk+9CHPvTh1UFfUuhDH/rwJ7xK\nSeFfHpW8BLyKMUFfXP8bvIoxwasb16vTU+hDH/rwauBVqhT60Ic+vAJ46UlBkqQJkiTlSpJUIEnS\nRy85lmJJkjIkSUqTJCnp+TU9SZJuSJKU//yr7t8Qx2FJkmokScr8w7W/jEOSpI+fr1+uJEnj/8aY\ntkqSVPF8vdIkSZr4N8dkLklSpCRJ2ZIkZUmS9O7z6y97rf4qrpe6Xi8MmUz20h6APPAAsAGUgHTA\n6SXGUwz0+y/XvgU+ev78I+CbvyEOf8ATyPxXcQBOz9dNGbB+vp7yf1NMW4H3/5uf/bti6g94Pn+u\nCeQ9/9sve63+Kq6Xul4v+njZlYIPUCCTyQplMtkT4DQw9SXH9F8xFfjl+fNfgGn/139QJpNFAw0v\nGMdU4LRMJuuUyWRFQAHP1vXviOmv8HfFVCmTyVKeP28GcgBTXv5a/VVcf4W/Ja4XxctOCqZA2R/+\nXc7/vHj/15AB4ZIkJUuStPz5NSOZTFb5/HkVYPRyQvvLOF72Gq6RJOne8+1Fb5n+t8ckSZIV4AHE\n8wqt1X+JC16R9fqf8LKTwquGYTKZzB0IAFZJkuT/x2/KntV6L/245lWJA9jPs62fO1AJfPcygpAk\nSQMIBt6TyWSP//i9l7lW/01cr8R6/Su87KRQAZj/4d9mz6+9FMhksornX2uACzwr4aolSeoP8Pxr\nzUsK76/ieGlrKJPJqmUyWbdMJusBDvL/lrx/W0ySJCny7MY7KZPJzj+//NLX6r+L61VYrxfBy04K\niYCtJEnWkiQpAW8CIS8jEEmS1CVJ0ux9DowDMp/Hs/D5jy0ELr2M+P6HOEKANyVJUpYkyRqwBRL+\njoB6b7znmM6z9frbYpIkSQJ+BnJkMtmuP3zrpa7VX8X1stfrhfGyOpx/6LxO5Fl39gGw6SXGYcOz\nDnA6kNUbC6APRAD5QDig9zfEcopn5eVTnu0vl/5PcQCbnq9fLhDwN8Z0HMgA7vHsjd3/b45pGM+2\nBveAtOePia/AWv1VXC91vV700TfR2Ic+9OFPeNnbhz70oQ+vGPqSQh/60Ic/oS8p9KEPffgT+pJC\nH/rQhz+hLyn0oQ99+BP6kkIf+tCHP6EvKfShD334E/qSQh/60Ic/4f8B1ITNgrMEkkEAAAAASUVO\nRK5CYII=\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f13853ce210>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGd0k3e2//uRLMu2ZMm923LvHRdsA7YDwTEQgyGGYNJI\nIKQM/0kyyZxMSWbIrMlkTmaYyZAJSSaTQgKBBEILNRSDG7j33nuvkiXZkq37gsuzzrw49+Suc7JO\n7l1839nS0qPnp2e37/7uLTKZTNzDPdzDPdyF+H/7C9zDPdzDjwv3nMI93MM9/AvuOYV7uId7+Bfc\ncwr3cA/38C+45xTu4R7u4V9wzyncwz3cw7/gB3MKIpEoUyQStYhEonaRSPSLH+o693AP9/A/C9EP\noVMQiURmQCuwFugHyoBck8nU+D9+sXu4h3v4H8UPlSkkAu0mk6nTZDItAMeATT/Qte7hHu7hfxCS\nH+hzPYC+//B3P7D8P3uzXC43LS0t4ejoiKWlJQsLC8zOziKRSFAoFIyNjWFtbY1MJkMulzMyMoJS\nqWRhYYH5+XmMRiOOjo5oNBosLS3RarUMDQ2hVCqxsLBAIpGg1+txcnJCq9ViaWmJTqdDr9fj6OjI\n3NwcVlZWTExMYG5ujrW1NQMDAzg4OGA0GrG1tRVeGxoawsvLC5lMxsTEBK6urgwMDAjXF4lEGAwG\npqamCA4ORq1WIxKJEIvFzMzMYGVlxcLCAgqFgrm5OQwGA2ZmZoyNjeHu7o6FhQXm5ubMzMxgbm7O\n/Pw8FhYWyOVyxGIx8/PziMVilpaWmJ6exsbGhsXFRSwsLJiZmUEqlSKVSllYWGBhYQFzc3PhniUS\nCRKJhIGBASwtLbG0tEQqlQpnaG1tjVarZX5+HhsbG4xGI0ajEZFIhLW1NTMzM4jFYszMzACwsLAQ\nztHZ2RmtVotIJEIikTA7O4uNjQ1qtRqFQoFOp8NkMjE9PY27uzt6vR65XI6VlRUzMzPMzs6iVqvx\n8/MDwGg0YmZmhsFgYG5uDktLSwwGA4uLi8hkMszMzFhcXMTc3Byj0YiFhQVisZjJyUmUSqVwTwCj\no6N4eHgIv+309DQGgwFbW1ump6dxdHREKpUyPj7O0tISUqlUeI7kcjmTk5PY2toKzxfA1NQUExMT\nODk5odfrsbW1xcLCgtHRUeG3WlpaAkCn0yGVSpmamsLNzY2FhQUMBgNWVlb09/djb2+PyWTCzMwM\nhUJBV1cXLi4u6HQ6rK2tMZlM6PV6ZDKZ8J3UarVwP3Nzc8K5LC4uIhaLEYlE6HQ6zMzMEIlEmJmZ\nMTg4OG4ymZz+K+P9oZzCfwmRSLQH2APg5OTEvn378PDwoLi4mHPnzvHoo4/i6OiIn58fBQUFrFmz\nhsbGRjIyMqirqyMuLo5//OMftLS0sG/fPvLz86mpqUGpVDIxMcHRo0f55ptveOGFF3j66ac5c+YM\niYmJxMbGcvHiRVasWEFpaSkLCwvExsZSWFhIQEAAAQEBWFhYYGFhQVNTE2KxmJ6eHlJTU6moqODd\nd9/l6NGjXLx4Eb1eT1xcHNXV1ajVatasWcPly5cZHR2ltbWVAwcOcPz4cVpaWggPDyc6OpqbN2+y\nceNGSktLMRgMvPjii9TU1JCdnc1zzz3HxMQEcrmc++67D3NzcxoaGoiPj6e0tBQ3Nzc6OjrIyMgg\nLy8PpVLJzMwMCoWC/v5+bGxsaGxsJDQ0FLFYTEJCAvn5+fT392Mymdi8eTOXL1/G19eX9vZ2urq6\nePDBByktLSUxMZGWlhbi4uJoaWmhtbUVFxcXoqKihHPy8PBALpfT3NyMu7s7n3zyCT/72c8YGBjA\nx8eHv/zlL6xevVq4dlFREZaWlshkMpycnGhtbeX111/n5ZdfJjAwkLGxMTo7OwkPDycyMpLf/OY3\nHDx4kK+//hp3d3eioqLQ6/WMjY2hVqv5+9//zpNPPklubi5nzpwhMDCQjo4OkpOTGRwcJCUlhT/8\n4Q+kpKQwNjbG0tISH330EQA7d+4kJycHZ2dnJiYm+PTTT1EqlVhbW1NdXY23tzfp6enMzMxw4cIF\nXnnlFUpLS9FqteTl5bFixQr6+/sJCwsjOjoak8lEQkICjz76KOPj40RFRfH+++8TERFBZWUljz76\nKAEBAXR0dODl5cWNGzd46qmn6OrqoqWlBZFIxMDAAE5OTmzZsgWZTMa3335LeHg47733Hlu3bsXK\nyoro6GgkEgl9fX0MDQ2xuLiIh4cHdXV1ODk5MTMzQ1FREbt37xaCgYWFhRAEZ2ZmuH37Nk5OThw4\ncKDn+9jmD1U+DABe/+Fvz//7fwJMJtM/TCZTvMlkirezs6O1tRW5XE5MTAy/+MUvcHJyQqPR0Nra\nipmZGZWVlbzyyitoNBpmZ2f5xz/+gb29Pbt372b79u24ubkhFt+5nZCQEACOHz9OY2MjCwsL/Pa3\nv8XLywuVSsXjjz+OlZUVSqWSX/3qVyiVSsLDw3F2diYoKAg3Nze+/fZbOjs7GRsbw9bWlt7eXiIi\nIgC4ceMGSUlJuLm5MTIywujoKK6urkxNTREQEICZmRlvvfUWtbW1ODk54ezszK5du5BKpezatYuW\nlhYSExMZHh7m+PHjSKVSANavX8/w8DAZGRnMz8+ztLREUVERWq0WMzMz2tvbSUlJoaKiAjs7O5qa\nmpiYmKCkpARXV1cmJibw8PDAwcEBhUJBWVkZnZ2dlJeXo1aruXjxIrdv32ZhYYHJyUl++9vfUllZ\nSXp6OgUFBTg6OlJZWUlvby9lZWVoNBrs7e3x8/NDpVJhZmZGS0sLAJaWlmzdupWmpiYWFhZoampi\n7dq1yOVyrK2t0ev1+Pr6YmtrS1RUFObm5nh4eFBTU8O6detQq9VERETg4+ODtbU1Esmd+LSwsIBW\nq+XEiRNYWlri4eHB5OQkg4ODrFmzBhsbGxoaGggNDaWjo4OwsDDGxsaIjIxkZGSEZcuWUV5ezuDg\nIM7Oznz55ZcABAQEcPbsWRobG9FqtbzyyivExsby4osvMjQ0hNFopLm5mevXr7N7925cXV1pbm5G\nr9ezYcMGPD09mZ+fJz09ncbGRk6fPi08a7/5zW8YHx/n2WefZdmyZVy4cIHExESWlpZISEggNTWV\nPXv20N/fj4WFBQ8//DBGo5Gnn36a3NxcdDodgYGBbN68maamJgDGxsZoa2ujqqqK48ePU19fj729\nPeHh4XR2dlJbW0thYSFOTk7s2LGD8fFx6uvrqays5P/8n//D4OAgra2t1NTU4OnpiZ2d3fc23h/K\nKZQBgSKRyFckEkmB7cDZ/+zNU1NTAExPT7O0tMQbb7yBwWDAxcWFuLg4Nm/eLBxsU1OTEMHNzc2x\nsbEhNzeX1tZWtm/fztLSknAAdnZ2vPvuuyiVSvr6+ggMDCQ/Px8vLy/Gx8dJTU1lfHycp556Cp1O\nh6enJ0tLS7i7u/Pmm2/i5ubGww8/jJWVFWFhYRw8eBCA5ORkZmdnuX37NhKJhKysLCIiImhubmZy\ncpKgoCAuXbrE4cOHOXHiBEqlkm+//Zbg4GCuX79OamoqdnZ2eHp6kpaWxuLiIgD19fVER0fT1tbG\n4uIi/v7+PPDAA+Tl5REQECCkmMuXL6evr49Tp04xMjKCl5cXMzMz+Pn5kZqaytzcHMPDwyQlJVFX\nV8djjz2Gn58fNjY2ZGVl0dzcTFhYGBUVFdjb21NVVUVhYSEffvgheXl5tLS04ODgwPXr12lsbEQq\nleLm5kZCQgKbNm3CYDCg0WjQ6/VClJ2dneWll16it7cXe3t7JiYmcHZ2xmg0UlVVJURQmUzGG2+8\ngZ2dHatWreKFF14gPT2dzMxMAMFprFq1iq+//pqjR48ikUhwdnZm586dBAcHU1NTg1arJTQ0FKPR\niI2NDR9++CHvvPMOXV1dPPbYYygUCpqammhoaADg008/xdfXl/7+fiYmJpiensbV1ZUbN26QmJiI\nXC7HwsKCrKwsJicnKSgowMvLi4qKCtrb2xkcHCQmJoZPPvkEd3d3oqOjgTtO7NVXX+XGjRt4enry\n3XffcezYMT755BMh0F26dInKykokEgnBwcH86le/Ys2aNdy8eRMbGxscHBwYGBjAysqKt99+G4CZ\nmRlkMhkSiYSAgACsra0Fw3dzc2PXrl1kZ2fT3d2No6MjTU1NnD9/nscff5xNmzYRFRWFh4cHAOnp\n6fzzn//83sb7gzgFk8lkBPYCl4Em4GuTydTwn73/bi00OTmJVColJiaGwMBAgoKCOHDgAJ2dnZib\nm+Pr60tiYiKJiYmcPXuW2NhYRCIRiYmJtLa2kp+fL/AIcKcubWtrIzQ0lKWlJdRqNSqVSqjBurq6\nOHPmDC+//DJPPvkk/v7+iMViioqKOHnyJFFRUZw/f575+XkGBgY4d+4ccMeLu7i48Mwzz9DY2Cg8\ngKtWrSIhIQFbW1siIiLYtGkTs7OzREZG8uKLL1JSUkJcXBxubm5otVoiIyM5efIkXV1dAAwODjI/\nP09XVxdSqZSysjLm5uaQyWSEhYWRlZVFZ2cnhw8fJjg4mPPnz2Nra8vIyAiZmZls2LCB2dlZ5HI5\nHh4eHD16lLVr11JVVcXy5ctxd3fnxo0b3H///QCUlJSg1WqFmvv5559n+/btrF69GpVKxe9//3tE\nIhGOjo4sLi7S3t7O3r17cXNzQ6/Xs3XrVvbs2SPUu++//z5OTk7Y2tryxBNPUFhYiEqlorq6mn37\n9qFQKHj55ZeFzKG5uZnS0lKsrKw4duwYANbW1uTl5eHr68u2bdt44IEH8PX1Zf369XzxxReMjo6y\nZ88eGhoasLa2xtnZmfj4eCwtLZmfn0ehUFBXV8f8/DwbNmzA2toagOeee47Ozk4sLS0ZGxujtbWV\nuro63N3dyczM5ObNm0RGRtLX18exY8fo6enBzc2Nl156CVtbW3x8fOjs7MTR0ZGzZ89y6tQpAC5d\nukRYWBhbt27lwoULREZGcunSJaytrTE3N6eqqorQ0FD0ej1r167FYDAQHR3Ne++9h0Kh4Pr16wwM\nDNDV1UVrayutra3AnZJ6+fLlWFtb09TURHBwMGKxmPHxcZydnamsrMTd3Z2srCyuXr3KyMgIDz30\nEB0dHQQEBFBdXY3RaMTHx4eKigp++ctffm/7/cF0CiaT6YLJZAoymUz+JpPpzf+n996tzW/cuEF/\nfz+WlpacPn0aKysrOjo6GBwcRCQSsXz5cu6//37+8Y9/8Mgjj2BmZoZcLufkyZM8+uijPPHEE6Sn\np1NbWwtASkoKSqUSkUhEc3Mzvb29eHl58cEHHzA8PMzk5CRGoxEXFxcKCwuZmZmhrq4OuVzOzMwM\nY2NjmJubU1paio+PD5WVlcAdp/DRRx9hZmaGWCzm888/p7y8nOvXr1NQUEB6ejr29vYEBgby1FNP\nYWVlxa9//Wva29sZHx/n4sWLnD59ms8//5ySkhLhcxUKBcPDw6xduxaZTMbIyAgKhYKqqipu376N\nTCZj1apVrFixgtjYWK5evUpOTg5xcXFcvXqV0tJSkpKSMBgMdHR04OnpSUhICHK5nFdffZXJyUlc\nXFwoLi5menqapqYmrKysiIqKwszMjJmZGebm5ujp6WFxcZHm5mZcXFywt7dHr9czOjrKE088gb29\nPTdu3KCrqws3NzeqqqoIDAzEzc0Nk8nEhQsXeOmll4iJiSE4OBh7e3u8vb2xs7MjOzsbZ2dnvL29\nGR4ext/fn6+//hp7e3sA2tra6O/vR6VSERYWJpC6xcXFpKSkUFZWxrp16zh27BhNTU00Nzdz+vRp\nenp6mJ+f58qVK1RXVzMzM8PS0hIlJSXAnQzEysqKyclJNm/ejEqloqCggLa2Nj777DPS0tJobGwk\nICCAgwcPkpmZydDQEIWFhSQmJvLBBx/g7e1NXFwce/fuJTc3F7hDtq5fvx5bW1vEYjEBAQHCM9jR\n0YGLiwsAy5Yto7W1laKiIvz9/fnkk0+IiIgQzl+j0aBUKvn000+FcxgZGWFiYgIbGxsuXLjAyMgI\n6enpjI2NkZ2dLTyvZmZmeHt7ExERQUpKCkFBQSwtLdHc3IzJZMLV1ZXp6envbbs/CkWjWCwmKyuL\n5ORk/P392bRpEwqFggsXLpCRkYG1tTULCws0NDRgZWXFQw89hI+PD0ePHqWoqIiwsDACAgL4+uuv\nOXHiBNu2bQPgrbfeEogjrVaLg4MD09PTtLe34+/vT2xsLH5+fiiVSurr6xkbG2Pjxo0MDg4SHx+P\nvb09g4ODpKamolarBS++sLDAk08+yZkzZ1AoFKjVapycnDhz5ozgYFpaWlhYWGB4eJj5+Xk6OzsJ\nDg5mYGCA9vZ2nJycSEtLQ61WC9FMoVCwbt06+vv7qaqqoqGhgcXFRVauXCmUPK6urpibm/Pee+8J\nRnM3EsXHx1NTU0NgYCBr167F1tYWLy8vduzYQXZ2NtPT0wQHB2Npacn09DQuLi4sLCwwNTVFSkoK\nRqORoaEhbGxsePrpp+nq6mJoaAiZTEZCQgKBgYGoVCrKy8vJzMykpaWFoqIikpOTiYyMpLW1VSC4\nfH19ycjIoLOzEy8vL5aWlrh+/Trm5uZkZGQQFxcHQE9PD83NzUKmkJKSwquvvkpzczNffvklHR0d\nnD9/HpPJxODgINu3bycnJ4dHH30UkUiEt7c3Go2Gnp4ewsLCWLVqFREREWzYsIHk5GShU+Li4kJO\nTg4RERE8//zzFBUVsXLlSvLy8oiKiiImJobc3FwkEgkff/wx+/fvJzIykvfff58jR44QFhaGUqnE\n0tKS559/HnNzcwCio6M5deoUb7/9Ntu3b0etVlNZWUliYiI7d+7kiSeeYOXKlSgUCm7evMnMzAwR\nERHU1dXR0tLC7Ows58+fFzoxe/bsARDuz9fXF4VCQVpaGgBffPEF1tbWQnYplUoZGxvD1dUVJycn\nvvnmGzo6OrC0tGRqaorW1lZGRkYYGhr6/vb437Lm/yGIRCIOHjzIpk2bcHJyIi4uDnd3d6ytramr\nq2PXrl00NDTQ3d3NH//4RwwGA0tLSzg4OBAbGyt0JtavX096ejoXL14EYPv27Xh7e5OdnY1er2di\nYoL8/Hzuv/9+ZmdnhYO9m86eO3eO3t5eNm7ciEaj4dq1a0RFRdHa2orBYODmzZsAqFQqKisryc7O\nJjk5ma1bt9LQ0EBKSgolJSVCBDQ3NycwMJCMjAzc3NyQSqVs2bKFhIQEtFotfX19vPPOO2g0GgCq\nqqr43e9+R2pqKkNDQ0IEWVxcpK/vTof3F7/4Bebm5mRnZ1NfX8/Zs2cFw+3p6SEkJEToHshkMg4e\nPEhFRQUDAwNERUURGBhISkoKzs7OPPnkkzz00EN4e3sLD6hMJiMwMJDS0lICAgIwNzfn7NmzlJaW\nIpPJ6OzsZMeOHXh6eqJQKIiKisLW1pa2tjaSkpJISkoiIiKCubk5ioqKcHBwwNramqioKNzd3amp\nqSEoKIiysjICAgIICQnh+eefZ8uWLcCdLOz06dO0t7dTW1tLbW0twcHB1NbWUlVVxaVLl/D29mbD\nhg3MzMyg0+lQq9VERkbS0tKCu7s7np6eqNVqPv/8c9zd3QG4efMmjo6OfPPNN+zatUvoGGRmZmJv\nb09GRgZisRgLCws8PT0ZGBggPz+f3bt3k56ejl6vp7GxkYaGBoF3AHB0dGRqaopXXnmFjo4OHn/8\ncbq6upDL5chkMiwsLOju7mZkZITJyUmio6Pp6emhsbGRTZs2MT09jZeXF/39/Zw/f14wXqlUyhNP\nPME///lPKisrMTc3RywWk5qair+/PyMjI3h7e6PX6ykpKeHs2bP09/cjEomQSqXExcURHx/PihUr\nmJmZEQji7wOzffv2/TdN+r+PN954Y9/dVKqsrIxTp04RGhpKSkoKmZmZfPTRRwQHBzM8PMyKFSso\nKCggPDyc+Ph4Tp8+TW9vL9nZ2TQ3N3Po0CG0Wi319fUEBQWh0+kwGo14eHjg5uZGdHQ0BoOB8vJy\nxsfH0Wq1DAwMEBcXR2pqKm1tbRiNRqqrq8nOzqa1tZX77ruP2dlZLCwsKCwsJD4+noGBAfR6Pfn5\n+YyOjlJVVYWNjQ1vvPEGR48e5Re/+AV5eXkcP36cDz/8kJiYGNauXctLL72Ep6cnIpEIe3t7iouL\nSUxM5Ntvv+XXv/61oB+421pycXFBLBYL9aK9vT29vb3cvn2biYkJDAYDaWlpSKVS2traaGxsRK1W\nMzExgVqtRqPRoNVqMRqN1NfXEx8fj0wm47777uPmzZvMz8/j4eFBUlIS8/PzWFtbY2dnx/j4OIcO\nHcLZ2VnoPkxNTTE1NUVPTw/V1dXMz8+j0+nQ6XTU1dWhUqlQq9UEBgbS2NjI4uIipaWlBAUFcePG\nDbZt24aTkxNmZmZMTU1hYWHB4OAgGo0GqVTKlStXiImJwcPDg7i4OOzt7RGLxdjZ2dHT0yOk4qmp\nqZSXlyOXy/n666/p7e3F0dGRrKwsnJ2dKS4upqKiAldXV1JTU/n444957bXXMJlMPPDAA4L2ore3\nF6VSSUREBJaWlpw6dYrKykpOnTrFli1biImJ4dy5c5hMJmJjYwkNDaW7uxuTyURISAifffYZzs7O\nQpmrVCqZnp4mOjoaNzc3UlNTOXbsmHCPXl5eNDY2Mjk5SVhYGNevXyc4OJjp6WnEYjEqlQqNRkNx\ncTEJCQkcPHiQ0dFR/Pz8mJqaIjk5GblczsLCAgADA3caeg4ODmRlZfGXv/yF8fFx2tvbsbe35/jx\n45SWlmJvb09YWBj5+flD+/bt+8d/ZY8/ikzBycmJVatWIZFImJ+fJyQkRBDeREVF0dXVhUwmY8WK\nFVRUVJCRkcGf/vQn5ubmyM3NZXBwkH/7t3+jpaUFe3t7IWX09fXF0tKSnp4ezp07h5ubGwUFBXzy\nySdUVlbS0NCAg4MDqampfPfdd4LBffnll4jFYvr7+wkPDxccx1NPPQXcEaPY2Nig0+nYvXu3ULLs\n2LGD6elp0tPT+ctf/iK0zFJTU3nkkUfw9PQkKCgIgBUrVmAymfD396ezsxOAhoYGVq5cSWlpKQ4O\nDhgMBtzd3YmPj+fEiRO4urpy+fJl1q5dy9atW9m2bRtarZbZ2Vl8fHyEzzY3Nyc0NJSsrCzs7OwI\nDw8XnItIJOLy5cv89re/ZWhoiKqqKh566CFqamoEMra8vJyoqCg2bNiAr68vUqmU1NRUJBIJOp2O\n5uZmkpOTMRqNAq/w2GOPYWNjQ2trK01NTezYsQO407K7awRwp8Nyt9f/1Vdf4enpiUQioaCgAID4\n+HjOnj3L3NyckFlkZmZiZWVFQkICYrEYjUbD5s2bCQwM5Mknn2R2dpbu7m7y8vIYHh5mamqKjIwM\nXF1dOXz4MABnzpyhpaWFI0eO8MEHH+Dm5gbcKV9mZmYYHBykp6cHqVSKRCJh1apVTE5OolKpWL16\nNcuXL+fKlSuMj4/z9NNP89ZbbwEIjuuZZ54hKSmJ/v5+Ojo66Orq4k9/+hNTU1PY2dnR0NCAo6Mj\nDg4OODo6CjqVnp4egfeQy+VERUUBkJeXx0svvcSzzz5LbGws4eHhQgvyLpl85swZmpqaGBgYoK+v\nDx8fH0ZGRkhOTsbe3p7c3Fw2bNiARqNh1apV39sefxSZwttvv73vrigmLi4OrVbLfffdR3V1NVNT\nUyiVSpqbmwGwtbUlLy+P5ORkOjo6OHToEB4eHtjZ2SGTyQgICMDf35+zZ8+yc+dOfH198fLywtfX\nl5CQEHx8fDh37hzR0dHk5uaSk5PDb37zG9asWSOwzAkJCUilUlpbWyktLaWrq4tnn32Wb775huvX\nrxMQECBE98rKSoFV9vb2xtzcnPHxcbKystDpdKxatQqpVIpYLKa9vZ3GxkaamppIT09nfHycmZkZ\nHnvsMd577z3WrFlDX18fUqkUOzs7YmNjuXnzJv39/axdu5a5uTmSkpKora1FJBJRWFhIREQEUqkU\ntVqNVCqloqICR0dHIRpXVFSg1WqJiIjAxsYGS0tLWlpaBELyj3/8I9u2bePBBx9ELpdjY2ODXq/H\nz8+P/Px8pqenWbFiBW1tbXR1dZGfny847cjISMrKynBycmJgYEAgRmUyGVNTU0gkEnbu3ElXVxfz\n8/OYm5uzcuVKiouL6ezsxGQyoVKpuHz5Mn5+fty6dQsXFxcSEhLw8fHhyy+/ZM2aNbS0tFBSUoKZ\nmRmbN2/m2LFjTE1N0dvby6FDh8jMzMTDw4MbN26wsLBAdXX1v9zvpUuXOH36NB999BFpaWlcvXqV\n7OxsIiMj8fPzY2RkhCtXrrB8+XIaGxvZs2cPV65cITg4mJ6eHtatW4eNjQ0ymYzy8nJMJhOJiYl8\n9dVXODk5CeKpvr4+QeEZFBTExMQE9fX19Pb2AuDu7s7jjz9OTU0NCoUCkUiEUqnkkUceQaPREBsb\ny+XLl6murmbv3r0YDAa+++47LCwscHBwQCKRoFQqKS4uxtLSko0bN2IwGPD19cXX15exsTF0Oh0i\nkYjx8XHUajU2NjZIJBKKioqor6//XpnCj8IpHDx4cF9CQoIgXFGpVBQWFtLX10deXp4gjpmcnKSi\nooLAwEASEhKIiIjAzs6OkJAQNBoNLS0tREREUFNTQ3FxMRs3bqSsrExomS0uLtLf309SUhLbtm1D\nLBbT1tZGVlYWS0tLHDt2jIaGBqRSKQMDA0xMTODn54dEIkEsFtPR0cGtW7fYvXs3HR0dSKVSrK2t\nsbe3R6FQYGZmxoEDBwgODqa+vh6JRMLIyAharRaxWIy1tTWdnZ04OztTXV2No6Mj/f39VFdXc/v2\nbR544AGGhoaQSqVYWVlx69YtYmNj8fT05Nq1a1y4cIHh4WGWlpZITEykvb2djo4ObGxsmJqaQqFQ\nMDAwgNFopLa2lvr6ehwcHIiKiuLKlSvCdV1dXblw4QJ79+4VhE4NDQ188cUXyGQypFIpdXV1lJWV\nERMTIwhnbt++zfr161Gr1YyMjGBlZSUIhwoLC+nq6sLX15esrCxmZ2epr69Hp9OxcuVKAgMDGRwc\nxN/fn/7HzwnyAAAgAElEQVT+fr766ivq6+sxmUyCZL2iooLs7GyOHDnClStXCAwMRCKREBcXR3l5\nORERETg4ODA6OsrS0hI6nY7g4GAhQ+rr68PJyUmQjiuVSvLz82loaODnP/85zs7O2NraIpPJsLa2\n5vDhw8zOzlJbW4ulpSW2trakpaVhYWFBZGQkUVFRTE9Po9VqcXd3p7+/n8DAQB588EGqq6s5efIk\nu3bt4uLFi0xNTZGWlkZPT48glbaxsaGkpITIyEj0ej07duzg+PHjbNiwAYVCIUjdLSwsqKqqIiAg\ngPn5efLz8/H09MTa2hqRSISVlRVGoxFPT0/GxsYwMzNDpVIxMjJCdHQ0J06cQCwW4+rqyvbt20lM\nTKS/vx8zMzMCAgIEOyspKfn/TvkwOzuLq6urYACRkZHcd999DA4OYmFhwYkTJ7h165bwmkgkEvQG\n5ubm/PnPf2ZsbIxdu3YhEokEMVRDQwM5OTmsXLkSqVSKn5+fQIBJpVKKiopQKpV0d3dz5MgRXFxc\nmJycFDIBDw8PlEola9asETIVgLq6Orq7uxkdHSUjI4Ply5ejUChISkpCJBLR2dlJYGAgPT097N+/\nH5PJJMiR09LS2LJlC8nJyajVakwmE1qtFrhDssXFxdHZ2YlSqcRkMtHa2srHH38s6DYkEgnbt2+n\npqaGkZERYmNjkclktLW1cfz4cfr7+xkfH+f1118nKCgItVpNdXU1wcHBjIyMIBKJ6O7uxszMjNra\nWubn5zl//jwTExOsXbuWxcVFfH19WVxcJD4+HrgjpJmcnBRUfXfrWoVCQUBAAEtLS/j6+uLu7o5G\no+HkyZNMTEwwOTlJUVERf/zjH6mqqiI4OJiQkBCuXLmCRCIhMzMTvV6Ph4cHmzdvBiAoKIjw8HD+\n7d/+jYCAAJqbmzl37hwREREUFRXx3XffodFoCA8PRyQSkZ2dTX5+PqWlpezcuRNLS0uhDVxSUsLG\njRsBKCwspKCgQBBNyeVyEhMT8ff3JzQ0lLS0NNzd3fHw8KC0tJQzZ84wMjJCU1MTPT09dHd34+Pj\ng1wu5+OPP+by5csA9Pb2Eh4ezrJlyzh9+jR2dna0t7cL2drq1avJz8/n7NmzQoflrq7AxcUFlUqF\nVCrFaDRy4cIFgRCsra1laWkJc3Nztm7diqOjI7a2tuj1ek6cOMHly5dRqVR8+eWXhIeHIxaLqaur\nw8bGhs8++4yMjAwuXrxIR0cHU1NTAv/wffCjcAp2dnbcuHGDhoYG8vLyKCoqYtWqVcTExPDkk09i\nZWXFypUruXjxIqWlpahUKkpLS9FoNHz88cf85Cc/ITo6GqPRyOTkJLdu3QJgcXERnU4n9IbvdgPu\nykw9PDzQaDSCwTs4OGAymfjzn/9MRUUFRUVFFBYWUl9fT0hICPPz8wB4e3sTHx9PdnY2V69epbe3\nl/LychYWFti6dSspKSn89a9/Ra1W4+bmhlqtxtHRERcXFyorK/niiy8oKSlBoVDw0EMPsXr1agDm\n5+dZv349np6eFBcXExsbi4uLCz4+Pjg4OGBra4uTk5NAbJWWlhIaGsrIyAiurq5s3bqVubk51Go1\nx44dY+PGjej1elpaWrCwsGBiYoKBgQFu375NTEwM//7v/05hYSEhISE888wzwucGBQXh7e2Nt7c3\nAEtLS0Knprq6Grlcjo+Pj1CPa7VaRkZGcHR0pKGhgYqKCvr7+8nOzhb0CU1NTYSFhTE9Pc3Q0BA7\nduygoKAAJycn1Go1Y2NjAHz88ccCGdzU1MSyZcvo6+vjz3/+M2FhYYSFhREbG8vQ0BAajYaf/exn\nqFQqXF1daW9vZ3JykuTkZJYvX05QUJAgIc/MzOSBBx6goKCAc+fOERMTQ1VVFQMDA3h6emJra8vi\n4iI9PT1MTk5iY2NDXl6ewHno9Xr6+/v5/PPPqaurE5wYIChhlUolwcHBhIeHU1tbi729vaBOjY6O\nJjo6WhjcOnPmjDC8dvv2bdzc3PD29uaFF14A4Cc/+Ql2dnYEBQXx6quvIpPJ6O7uxtfXl9deew2N\nRkNVVRV+fn64urpiNBpZt24dMzMzREZGkpeXx2uvvUZYWBiWlpbExsZ+b3v8XxuI+o+424tfWFjA\n1tYWuEO0LFu2jE8++YT169dz9epVHn74YZ555hkGBgY4ePAgBoOBZcuWIZFImJqawtHRkcnJSZKS\nkmhsbOTRRx/l2LFjrF+/nvr6empqaoQJxvLycqysrHjggQeQy+XCAJaFhQVarZbk5GQefvhhBgYG\ncHZ25tChQwJZ5ufnR29vL3FxcRgMBubn58nKykKr1TI8PMyhQ4dob29nxYoV7Nmzh5mZGUpKSlCr\n1cTGxmIwGCguLsZgMHD27FlUKhWA0LaytbVldHQUg8FATU0Ner0evV6Pp6cnMpmMixcv8t5771FY\nWCi0J59//nn6+vpISUlhaWmJpKQkJiYmSEtLo76+nvLycnJzcwkODqa0tFTQ26enp+Ps7Mzp06fJ\nyckRpvSCgoJYWFigpaUFX19fent7sbCwwM/Pj9HRUeLi4qiqqsLX1xcfHx8OHDjA0NAQv/rVr1hc\nXOTtt9/mxo0bBAcH8+qrr/LMM8/Q39+PUqnE1dWV2dlZXFxciIiI4OjRoxQWFgKwd+9erKys6Ovr\nw87Ojlu3bhEVFUV8fDwNDQ0sLS3x7LPP8re//Y3Q0FCBdBsdHUUqlQrKwubmZlatWiVE3sLCQlxc\nXAR59P79+3nooYeECdgdO3YImoTXXnuNI0eOCBnJ2NgYCwsLREREEBsby/r163n11VcBqKyspK+v\nT2g53/1tf/e736FQKOjs7GRiYoLly5fz+eefs3v3buAOwXnx4kVBKero6Mj69et5//33AXjxxRfZ\nsmULjo6OxMbG8umnnwpt6oiICFavXo2Liwv5+flYW1vj4OBAZWUljo6OjI6OEh4eTmBgIAcOHCA0\nNJSenu81CwX8SDKFu4TIypUr0Wq1zM3NMTk5ib+/P1FRUahUKh555BH6+vo4f/48t27dYseOHSws\nLLC4uMjw8DDnzp0T2mG+vr4A7Nu3j6SkJEElNjo6KvSQvb29UalUdHZ20t/fT01NDXl5efT29mJl\nZUV+fj4HDx6ktbWVhYUFYeQWoLu7G1dXV37/+98LfeXm5mYWFxdZWFjAycmJnTt3YmtrS3V1NYOD\ng4SHh2NmZoZUKiU8PByVSoWPjw9WVlbcvn0bgJycHAYHB7G3t8fFxYWysjJmZ2fR6XScPXuWzs5O\npqenqa6uJjIykra2NkQiEZWVlYyPjzM6OiqMlx8+fJjIyEhqa2vx9fWlsLCQS5cuoVarSUpKoqOj\nA7FYTF9fH319fSQmJnLixAkSExOpr6/HxsaGvr4+RkZGBNn13WnQuyO5rq6uFBcXMzY2xv79+8nO\nzsbX1xc7OzveeOMN+vr6MDc358iRI9ja2qJUKrl8+TJLS0scOnSI+Ph4jh8/zuTkJBkZGQA0Njay\ne/du9Ho9s7OzZGVl8e6772IwGFi+fDlmZmbcvn2b+++/H1tbW9asWcP27dt55plneO655+jp6cFg\nMODg4EB5eTk6nQ6405EZHh5GJpPR3t7OlStXKCsr45VXXiE/P5/nn3+e3t5edu7cyYkTJ1CpVJib\nmzM2Nsby5cs5deoU7777LpOTk5w7d04YoZZIJDz++OPodDoyMzMRi8VMT0/z4YcfsrCwgKWlJVlZ\nWfj5+XH16lWGh4eRSCTY29uzZs0awsLCWLduHd3d3RQWFpKdnQ3A5s2bsba2ZmxsjL6+PuLi4sjM\nzESr1QqDUaOjo3R2dgrDcnZ2diiVSkpLS+no6OD06dPEx8czPT0tDPN9H/wonIK9vT0+Pj6UlJSQ\nnZ1NfHw8tra22NjY4O7ujre3NzKZDB8fH2F46JNPPqGuro6MjAyeeuopYmNjKSsr4+bNm4yOjgJ3\nVGy3b98mKysLKysrgVU/efIkHR0dAOTm5rK4uEh9fT2jo6MolUrs7OxIT09n69atWFhYUFxcTElJ\nCTk5OQDC/oLVq1fj7+9PSkoKBoNBIIvi4uJ455130Ol05OTk4OvrS1JSEsnJyXz11VecOHGC8fFx\nTp06hV6vF1qdEomEpqYmvL296evrE9hpHx8f0tPTGR0dZX5+npSUFHx8fGhvb8dkMjE3N8drr73G\n6tWrUSgUODo6kpGRQU1NDSEhIczMzLBmzRrc3NywsrLi/fffx8LCguHhYTw8POjt7WVsbAylUolG\no2FsbIxr165hNBoZGBggLy9PKAPutkmrqqqYm5vj4YcfxtzcHGdnZ3x8fATn1NraKkT3W7duMT09\njZ+fn/B7RkRE8NVXX7Fy5UrCwsIYHh4G7siGc3Nz6enpwc7ODoVCQVhYGG5ublRXVzM0NMTPf/5z\nrl+/ztTUFA8++KCgH3nuuedQKBR0dHRgbW1NTk4O/v7+AHR1ddHc3IynpycbNmzg5ZdfFrpdJpOJ\ntLQ0YmJicHZ25tq1a8jlcqFseu+993jkkUdITEwkOzsbsVjMX//6V+COovFuhB4aGmJ6epoXXniB\n2NhYysvLcXZ2Zn5+HltbW15++WVKSkq4cOECExMTWFlZMT4+TldXF08//TRjY2PU1dUBCJOtUqmU\n6elpQa4eHBxMQ0MD7e3ttLW14efnJ5CwbW1tjI6OsmXLFubn5xkeHmZwcJDu7m5BbPV98KMoH+6S\nY2NjYywuLgqDOiMjI/j5+eHr6ytECA8PD8bGxpDJZGRlZXHz5k0OHjwoLFCJi4sTDnbbtm2UlZVh\nZmbG3Nwctra2aLVafv/731NcXIxcLsfLywuNRsOePXuwtbWlqqqK2tpaysrKWLt2Ld3d3XR2dvLc\nc8/xwQcfAAgCmZUrV/Ltt98SExPDunXr+Pzzzzlw4ABXrlyhqqqKgoICJicn2bNnD/v378fMzIzh\n4WGysrIQiUT4+fnR0NBAUVERcIfQq6qqIiQkhISEBKKjozl+/Di//OUvGR0dJS0tDbFYTHV1NVu3\nbsVoNPL111+jVquRyWQsX76cqqoqhoaGiIiI4NKlSzz22GPs378fhUKBXC7nvffeY35+nrVr19LT\n08OlS5fIysrC09OTkpISpqamSE9Px8LCgrKyMuLj4wUdRH19PQaDgdraWlxcXDhx4gTR0dEolUps\nbW3x9/dncnKSvr4+Hn74YVpaWigoKKC2tpasrCyOHDnC4OCgMCchk8mEKHZX6t3a2irsAfDz8+Po\n0aPExsaiUqkwGAyUlpby0UcfCQNjly9fZnh4GKVSSUhICAEBATQ0NDA/P8/KlSt57rnngDuZgqur\nK7W1tZw6dYrg4GCWLVuGo6MjIyMjvPnmm4SGhrJu3TpefPFFhoeH0Wg0bNy4kYGBAfr7+ykqKiIn\nJ0foOgFoNBomJiYIDAzk5s2buLq6UllZiZeXF3V1daxYsQKxWIzRaCQkJIRf/vKXnDlzhunpaUpK\nSlCpVOj1ehYWFli+fDkVFRUAPPLII/T29gpam8XFRR588EHGx8fRaDTCpHB4eDgZGRlMTU3h6elJ\nd3c3SUlJ2NjY0NPTg42NDVFRUchksu9tjz+KTMHDw4MjR45gY2PDtWvXyMzM5KmnnsLd3Z3u7m6B\nzba2thb0+XeZa0tLS1avXo2lpSVubm5YWloKA1FffPEFbW1twqKOuro6NBoN3333HfPz82i1WsrL\ny2ltbRXIv7sTkQ899BADAwPCtqOJiQl8fHyAO+XDSy+9RGVlJZGRkZw/f56PPvoIe3t73nrrLWG4\nRiaT0dvby7vvvkt6ejpqtZrdu3cjl8t5/fXXGRkZQa1Ws3z5naVUWq0WLy8vTCYT1tbWSKVSfvrT\nn9LT08PExIQQXZYtWybo4t3c3Fi2bBlGo5Hx8XHa2tpwd3fH1taW+++/n7179zI1NcXk5CQymYy6\nujpiYmL46U9/yksvvcSGDRtQqVS0t7cTERFBRUUFarWa0tJSrl69SltbG8nJyZw/f56qqip0Oh2+\nvr5cu3aNdevWCXsW1Go1zc3NdHR0kJCQQGVlJT09PaxYsQJ3d3eho3GX4NPpdLi7u7Nu3ToUCgWN\njXfWd97deXH3PHJzc5HJZAwNDaHT6di8eTMjIyOUlZXR0dHBwMAAFy9eFMrGq1evMjg4yPT0NHFx\ncTg53Vk05O7uzoULFwQFo8lkEoRPExMTZGVlcfr0aQwGA9bW1kJk/tvf/oZcLufatWtCp+P27dv0\n9/cDdyTyDQ0NuLq6olKp0Ol0pKSk4OjoyN69e2lpacHKygoHBwcaGxt5+eWXaWxspLS0VBA6RURE\nMDY2xrlz51i/fj0ANjY2yOVywdHBncz32rVrjI6OIhKJhPLGZDLh4uIiOIDh4WEcHBzw8/NDp9Mx\nPj5OXl7e97bHH4VTmJmZEcQvAK+//jpNTU1oNBpCQ0OZm5ujrKwMuLMsw9nZmdnZWYqLi7G3t2dx\ncZH77ruPiooK/P39hRmFnJwcXn/9dWZnZykoKCAmJobZ2VnCw8NxcnIiMDBQMKKcnBxMJhNNTU2s\nX7+ewsJCCgsLBZXlXYEO3CkffvrTnzI3NwdAZGQkSqWSwcFBLl26xHfffYednR1jY2PC3oe7/e4r\nV66gVqvx9fXlrbfeEqYVAaH+lcvlGI1G3nnnHb799lthVPvkyZNcvXpVEHSdOXOG3t5eYfzZZDKR\nnJyMRCKhra1NiJZ79+7Fw8ODa9euERYWRm9vL7m5uTQ0NFBfX8/g4CDNzc00NDSQlpZGYmIifn5+\n7NixAx8fH7777jtkMhkZGRl4eXnh4OAgaPwVCgVeXl7U19fj4+PD448/LoxyT01N0dHRwezsLCdO\nnGB6elpYI3Z3p8GhQ4cEAhZg06ZNTE5OIpFIsLS05KOPPiIiIoLBwUGmpqa4ePEi8/PzzM3NsbS0\nJCzm6ezsFNaaRUVFERcXx9tvvy2oRX19ffnpT3/KqlWrSElJISYmhsbGRt59912uX7+Op6cnqamp\n+Pj44OTkREtLC7m5uezatUtYyXf79m3s7Oz49a9/zeDgIHCnY7Vjxw6KiopwcnJCIpHQ3t4uyLRf\neOEF6uvrUalUVFRUkJiYyPj4uCCo8/Dw4OzZszz44IMkJSUJxKhIJEIul/OHP/yB/fv309bWRmVl\npdB+NBqNHDx4kLm5Oerr6zl8+DBtbW1YWlpiZmbGt99+i0ajISsri8LCQoHA/z74UTgFCwsLQkND\nMZlM9Pf34+3tTVlZGXZ2dhw4cECYmw8ICODGjRuEhoai1WpJTEwUJLfOzs4sLi5iNBq5evUqAGfP\nnqWpqYn8/Hw2bNhAXl4e/v7+zMzMsHr1akFplpWVRWlpKZWVlWRlZQnyXw8PD1QqFWlpaZw5c0aI\nOne37FRXV9Pd3Y2lpSWZmZlYWFgQHBzM4cOHsbS0JCgoiLCwMLZv305nZydBQUFkZ2czODjIAw88\nQEhIiDCeDODq6srKlSsxmUwYjUZWr17N5OQkwcHBdHV1CVuH8vLyhHMKDQ1lenqa7du3s3LlSq5f\nv051dbUwAzE6Osq+ffuYnp4mMDAQf39/qqqq0Gq1XLt2jdjYWBYWFvD09MTZ2ZmEhARKS0sB6Ozs\nZG5ujhUrVtDX18fMzAyOjo7C8pB169YxNTVFU1MTBoOBW7duMTg4iF6v5/3332diYgJ7e3uBB4E7\ntb1EImF0dJRVq1ahVCpxdHTk+PHjwJ1lKOXl5Tg4OLCwsMCGDRuoqanh4sWL3HfffRgMBoaHh3Fz\nc8PHx4eOjg6cnJxob2/HYDAQHBwstI4/+OADFAoFANeuXaO4uJgvvviCzs5ODhw4wLFjx9iyZQsO\nDg7cuHGDrKwsNBoNZWVlNDc3MzExgVarxdXVldzcXLRardBWvTsxOzIywtLSEhUVFchkMpqamrC2\ntqaqqorTp09z/PhxdDodVlZWPPfcc+j1enJycujt7UWtVvPmm29iZmbG3/72N06ePImjoyOAkB1m\nZGSwbds2vL29OX/+POXl5Zw8eZKFhQXeeustnJycuHLliuAsbGxsmJ6exmQyIZfLaWlpEVa6fV/8\nICve/99CJBL973+Je7iH//+jwmQyxf9Xb/pREI0uLi58+eWXFBQUIJFIUKvVmJmZ0dbWxqZNm4Rd\ndWVlZezfvx9nZ2f+/ve/ExAQQFJSEoODg7i7uzM3N0dhYSEGg4HPPvuMo0eP8tprr3H48GHMzMyo\nrq6mrq6O4eFhpFKp0OW4uywzMjISBwcHwsPDmZubo7GxUdiEGx0dzR/+8AfOnj1LWVkZb775Jjqd\njieffJIbN24QGxuLvb09RqORgIAAtm3bxs9//nMGBgaIjY0V5hV0Oh3/F3vvGVX1mbZ9/zZFeq+b\n3hGQjoAiRcGGYteoSSyJxhTTZzKZFI2JSYxR79RJjBp7r2BBURClS5HepSO997qfD86+1j1rPeuZ\nvO8871oz672vL2zUzcK99//6X+d5HsfviIqKIjExET09PQwNDUlISOC3337jl19+EXCVkpISli1b\nJmhBSUlJODo6iuO8nEfQ2tpKZmYma9eupaGhgfDwcC5cuICCggKlpaVERUWxaNEi4uPjBU3a2dmZ\nF198ke+//17U5lKplPDwcAoLC2loaGD79u3o6uqSmZkpFJGffvopv//+Oy0tLeTk5GBiYkJ4eDg5\nOTnY2dlRUlLC4OAgycnJaGlpERwczMDAALNnzyYrK4uamhoApFIpvr6+REdHY2lpyZMnT/Dy8mLD\nhg288sorWFpaUlFRQVZWlmiufvLJJ7z55pt0d3eTmZnJtm3bKCsrw8TEhOXLlwtWxuTkJOnp6Uyd\nOpW4uDjGx8c5ceIE27dvZ+nSpTQ0NPDgwQPefvttLl68SEBAgJgIvPnmmzg5OZGeni6MUXPnziUx\nMRFAjG51dHSwsbHBysqKhw8fCku3oaEh8+fPFxQoCwsLvvrqK7y8vMjMzGTlypV0dnYSHx/Pnj17\nSEtLExDdsbExkpOT8fLy4ssvv+TMmTMMDAxw4MABvv76a8rKytDW1kZdXZ3y8nKmTZvG1atXmT9/\nPllZWeTl5bF582acnJzYs2cPX331FXfu3KG7uxt7e3tqa2vZvXv3H7oe/y28Dx999NFn9fX1QsM+\nPj5Ob28vzz//vBhHGhsb8/HHH5Ofn8/06dMFhu3777/H3Nyc3t5enJ2dBRHnypUr/PnPf0YqlVJS\nUsLw8DB1dXU4ODgwb948YdoZHR0lPz+fvLw8wsPDhc9AX19fXMjGxsYCxhkbG4tUKkVJSUl8yORm\nK319ffr6+njy5IkYU2ppaQlh1fj4OOrq6syZM4ekpCSmTZtGTU0NwcHBnD59mhkzZpCYmIijoyNv\nvPEG169fB54xLGtqaujt7aW+vp6kpCTxnNHRUTw9PQkLC8POzo7Gxkb09PSYPn06vb29rF69mu++\n+46QkBAcHR0pKyujpaWFyMhIHj9+jIeHB/X19YSFhYmSSc4yVFJSoqmpiZqaGhQUFATF+MaNGyQn\nJxMZGclvv/2GpaUl8fHxvPzyy4K8rayszOTkJH5+fpw8eZKJiQk2bNggJhdy27L8gmhtbSU9PZ33\n33+fgYEBZs2ahZubG7/88ougRB89elT83ycnJ1m0aBH5+fkUFRWhpKREW1ubeI3U1dUFzSg1NZUX\nX3yRFStWsGXLFrZs2SKgN+Xl5djY2DAyMoKlpSVDQ0Ooq6uTnp5OUVERra2tLFiwgOzsbMbGxhgb\nG+PixYtMTk5y48YNRkZGSEpK4r333sPAwIBr167R2dmJm5sbX331FeHh4VRXVxMUFCRe8+3btxMd\nHc3g4KBgUero6KCmpkZgYCAXLlxg/vz5FBcXs2nTJsbHxzEwMBAN1gULFtDT04Ompib79+9nzZo1\nBAcHEx8fj7+/v+CFyIVvVlZW5OXlkZub+5/jfTA2NuaFF15gfHwcFxcXDAwMWLRoEWNjYyQmJqKl\npYWLiwvHjh3D0NCQsrIy9PT0kMlkvPjii3h7e+Pk5CR4+3IbrpGRkaiFXVxcUFdXp6+vj+joaPT0\n9GhoaKCmpoa+vj5CQ0Npa2tjcHCQKVOmcPLkSTw9PSkqKiInJ4fh4WHhqVi3bh2zZs1icnKSgIAA\nDA0NSUxM5Nq1a8KTYWRkJMZMWlpaVFZW4uDgIEaU06ZN48aNG9jZ2QnNhLu7OytWrCAzM5N9+/Yx\nZcoUIiIi0NDQwMnJSXhDvv/+e9LS0gTstKKigrS0NM6cOYOzszOVlZXExsYSFRXF5cuXCQwM5Ndf\nfxWbn6+vLw0NDejp6REdHY2uri5SqVRAaOR4d3gGEamrqyM0NBQ/Pz/OnDmDu7s7YWFhpKamoqOj\nQ1NTE+3t7TQ1NVFYWIixsbGYDFVWVrJo0SLefPNNLl++jJ2dHWNjY9TX1+Pp6Ymuri7e3t4Ctquv\nr4+9vT2ZmZn8+OOPfPnll5SWlrJjxw4OHDggejRtbW08evSI8PBwnn/+eaysrNDT0yMrK0vAXdPT\n00X2Q3JyMt9++y2bN2+mp6cHmUwm/Bc9PT288847QlloaGjIpk2bsLe3Z9myZaSnpzNnzhymT5+O\nl5cXERERVFRUAM90Cr///jvnzp2jv7+fBw8eiEbl/PnzgWcyfmdnZ3bt2oW2tjZtbW24ubnx/vvv\n09PTw/79+3FzcyM3N5eDBw8Cz8Rw8ryN5ORkRkZGKC8vp7e3lzt37ohp2Lp16ygoKODp06dIpVIO\nHz5MUVERxcXFDA8PA5Cens7KlSv/8PX4b3FSOHTo0GdycYuvry/Ozs5kZmZy8+ZNVq9ezZ07d4R9\ntbe3l+LiYjQ1Namvr2dkZIQ7d+5QW1srQlxsbW25ePEilpaWKCkp4ejoyPHjx3nhhRe4efMmzs7O\nTExMCBlpc3MzExMTBAcHC2++XIE3ZcoUFi1aJOzM0dHRrFy5EplMRkJCAkZGRmRkZAhs/OPHj4Fn\nkyNcQGAAACAASURBVITa2lp++ukn7O3t8ff35969e5iZmYnAFDkG3NLSkhMnTlBfX4+BgQHwrFtu\nYGAg5KkymYyysjJmzpzJw4cP2bBhA3Fxcbi4uODs7IyBgQG2trbU1tayYMECxsbG2LdvH7Nnz6aj\no4Pp06fj7u6OTCYjJiaGRYsW0d3dTVRUFBMTE5w7d47x8XHa2tqwtramo6OD+fPno6ioSEREBI2N\njdy6dUvIeeWnn9bWVhQUFCgqKhJ32by8PDw8PCgrKyM0NJQDBw4wNDQkGAnm5uZMTExw9uxZXF1d\nsbKyorGxkcTERLZu3UpBQQF//vOfcXZ2xsjIiNzcXOzt7amvr6ezs5OCggJiYmJ466230NbWpqSk\nhLS0NGpqati2bRtnzpwhNDSU119/na6uLm7fvs2KFSvERVZdXY2lpSVTpkyhpaWF+vp60tLSWLRo\nEcrKyty5cwdFRUXs7OwYHBwUm7eOjo5QJVpZWXH79m18fX1pb2/H3NwcIyMjnJ2dKS4uxtjYWExM\nlJSUsLe358yZM7i4uHDv3j2MjY05fPiweB0aGhrw8PDA3t6e+Ph4DAwMMDU1pbm5GRsbGy5cuIC3\ntzdz587l3r17uLq6Cgfp+fPnGRsb4+233yYjI0OUcioqKlRWVrJ48WLu37/Po0eP/nNOChMTE4SH\nhws56ZdffsmcOXMIDw/nxo0bKCkpER8fj7q6OufPn2f58uVMTk5SUlKCqqoqra2txMXFkZaWRnV1\ntRhDSaVSIViysLAgNzeXJUuWEBwcTHFxsRjlTExMoKioyAcffICioqK4UOPj4wkPD2dgYICuri6R\n+NPc3ExDQwNTp06loqICPz8/KioqePDggcCcrVixAl9fXz7//HOBaV+2bBmurq6sWrVKiLCMjY0F\nhtzOzo7u7m7hYZCLguQWZSsrK5qbm1FVVWXz5s289dZbPHnyBB8fH5qbm1FWVmb27NmcPXsWgBUr\nVlBVVUVdXR3379/n4cOHZGRk8P7776Ojo4Ofnx9ZWVlMTEwwb948du3axZ49exgcHGRycpJt27YJ\nuG1DQwNr1qzB3NwcPz8/fH19WbZsGYqKiqxZs0aUCAsXLiQsLIyMjAzeeecdUdPb2Nhw69Ytqqur\n+eCDD+jq6uL5558nNTWVI0eOCDejnp4eZWVlHD16lPT0dGJjY6msrCQ5ORmpVIqpqSkmJiZs3rxZ\nkK3U1dU5e/YskZGR3L59m2nTphEeHs6RI0eEkev9999HU1OTsLAwFBQUuHDhAtnZ2bS1tREeHs7n\nn3/O06dPycnJYc6cOZSXl+Po6MiSJUvo6+vj+PHj5Ofns2HDBjw9PcWEw83NjY6ODh48eICFhYWg\neqenp6Ojo4O1tTUGBgaoqKjw2muv4efnh4KCAgMDA6JH8sorrxAcHPwPpqVVq1bR0dGBiooKTU1N\nbNmyhUePHvH06VORRvXkyRMMDQ355ptvCA0NpbW1lf7+fi5evIivry9WVlZ4eHhQU1PD5s2b//D1\n+G9xUvjtt98+k+/K8sbhnDlzkEgkyGQybG1tee655zAwMEBRUZGCggJ0dXWxtrZGSUkJXV1ddu/e\nTWxsLD4+PkRERPDjjz9iampKXl4ewcHBdHd38/TpU9ra2mhpacHV1ZWCggIcHR3R0tJCJpPh6OhI\nX18fpqamtLW1ERISgrq6OomJiQQFBVFTU0NcXBxBQUG0trYSGBgo3iA1NTVcXFxISkpi7ty5ooRx\ndHQkJSWFtrY2hoeHmTp1KgMDAygqKpKdnY2WlhZ37tyhqKiIwMBAHB0dsba2pq+vDwcHB7S1tbl6\n9SoqKiq4urpy/vx55syZw7Jly5g6dSq1tbUkJCQQGBjIqlWrUFNT4/z583R1dQlUmo+PDz4+Pvj7\n+6OmpoaDgwOXL19myZIlVFVVYW5uzvr161FXVyc3N1e8D+3t7YJ6bWBgIHo7lpaWdHZ2oqenh6qq\nqoiGkwNMNDU16ejoIDExkba2Nvbu3SuMaxMTE9jY2HD79m3Gx8fZvn07s2bNIi8vj+joaKRSKVFR\nUZSXl+Pj44OioiIGBgYYGxvT19eHr68vbm5uSKVSLCws8PLyIiYmhk8++YS8vDxmz56Njo4OZ8+e\nFRLxmzdvsmzZMhITEzExMcHAwAAzMzNxmlNTUxOj5bGxMVRUVIiIiKCwsBAzMzMGBwfp6upi27Zt\n5OTkoK2tjZ2dHcePH8fZ2VnE+1VXVxMeHo6CggK+vr6cPHmS4uJipk2bRkBAAKOjozg6OmJmZiby\nJJSUlETYkYaGBjU1NUIP8cEHH1BfX8/ExARtbW14eHgwMTGBpqamwA3IsYO1tbV0dXWRm5uLjY0N\nMplM+Hg0NTU5fvw4ZWVl/zknhdbWVtzd3RkZGeHXX39lcnKS6OhozMzMaGpqwtXVlXnz5vHKK6/Q\n3t7Ohg0bRCjGp59+SkpKCsePHxduybt37wLPFHGLFy8mISFB3E3r6+spLCwkNjYWZWVlkfgj1yA0\nNzdjampKeXk5ra2t5ObmoqysTGRkpJCgamtrY2Vlhbm5OYaGhigrK9PQ0EBWVhb+/v6kpqaydOlS\nCgsLxZ18aGiImzdvivn93bt3sba2FrFp8KyeNjMzE/CXzMxMUSN3dXVRWlqKtbU15eXlwrnp4eEh\n4s5KS0tFB1tdXR1lZWW2bt3K48ePqamp4cKFCxw+fJiTJ09SWlrKlStXSE5OxsLCgoqKCvr6+qit\nrSUzM5OEhASCg4Px8/NjbGwMdXV1nj59Snx8PNevX6erq4uBgQEGBgbIy8vj8uXLlJSUkJmZyc8/\n/yxm+tu2bWPr1q3k5OTg5+dHe3s7ampqhIaGoqmpSU1NDV1dXeKOPnXqVMzMzNDQ0GBoaIjY2Fh8\nfX0JDw/Hy8sLBQUFzp07x8yZM4VSNSQkhKdPn2JnZ4eenh779+/nm2++oaenB2NjY/FZkEqlFBcX\n85e//IXu7m5OnTqFtbU1gYGBos/S3t5OXl4e+vr6WFhYiN/11VdfRU1NDYDz588L5N9LL71ER0cH\no6Oj+Pj48PTpUywsLHBxceHChQu4u7ujrKzMiy++SHFxMQMDAygpKeHp6UloaKiwzRsaGuLg4CAE\nbIaGhkRHRzNnzhzs7Oywt7cnNTWVoaEhkpOTcXJyEnb12tpampqakMlkrFmzRsjGZTIZvb29NDY2\n4u7u/oevx3+LTUFHR4fS0lIBa01OTqa/vx8FBQWkUik//vgjbm5uhIWF0dHRwalTp9DX10cikbB2\n7VqGhoZQVVVly5YtAvQBz452mpqaODg4iPo2IiICAwMDLC0tWb58OWNjY0Jc1NLSwvr16xkeHmb7\n9u08fvyYwsJCRkdHOXXqlNCPm5iYMDExQVJSEhoaGuTn54u7ZnZ2NmlpaURERLBp0ybMzMxQVFTE\n19eXr7/+GmVlZZSUlAgMDKS2tpbu7m7RkNLQ0BDdc7lpKiEhgZ6eHl577TUsLS2JiIhAKpVSWVmJ\nnZ2dCH559OgRO3fupLq6mrq6OiH2GhsbEzyIJUuW8NxzzzE8PEx8fDy1tbXCmTo0NISJiQmBgYGi\nPq6rqxO+/aamJvr6+jA3N8fb25vCwkKBZjMyMsLBwYG4uDjhXpR3++vq6rC3t+eTTz5h3rx5hIWF\nMTk5SVVVFS+99BLa2tp4eHigo6MDwLx587hy5YoInHF2dsbKyopXX32VyclJCgoKmJyc5NixY4Kh\nIT/FzZgxg4GBAX766SfOnTvH5s2b+f7774FnMXfp6em4ubmxYsUKUWYtWbKEiYkJjh8/TnZ2Nm5u\nbuzcuZPS0lJycnLQ19cX7MU7d+5w7do1Zs+eLUrUb7/9ls7OToyMjLCwsMDQ0FBsLKdOnWLhwoUY\nGxvz/vvv4+zsTEZGBrGxsXh4eNDX10dSUpLIDLl//754Hd544w0CAgJISUlhaGgIDQ0N7O3tsbS0\nFE3srq4u+vv7iY2NxcDAAH19faqqqggJCaG1tZXVq1ejoKCAoaHh/6NG47/FpiAHgPj5+aGlpUVI\nSAhubm6cOHECb29vhoeH+e677xgdHcXExISXXnqJhoYGTp06hZubm4h2GxgY4O7du0IV5urqytmz\nZwW52d3dnaqqKrS0tFiwYAEXLlxgZGSErq4uZDIZixYtory8HHNzc/r6+njuuefQ0tJidHSUK1eu\nCBNMXFwczs7OeHl5MTQ0hIqKipAf29vbU1BQQEhICOXl5VhZWTE+Ps7y5cs5ePAgk5OTokkqk8mo\nrKwUHzB5fmFeXh4vvfQS06dPJzg4GHd3dwoLC6mtrWX69OkYGRnR3NzMgwcPGB0d5fr166ipqTFt\n2jSGh4fR0tLi6dOntLS0cODAAT799FMUFBT4+eefRc7j0qVLGR0dFUdXTU1NWlpaUFdXZ8uWLSxY\nsIDOzk7hyJSDQiQSCUZGRoIZmJKSQldXl9ho8/PzCQwMRFNTE01NTUxNTZFKpUgkEq5du4aqqip+\nfn4kJydz9+5dOjo6uHTpkriT/fTTTygrKwtat0wmo7Ozk7feeovGxkaRpq2iosK1a9cEnNXAwICn\nT58K45GZmRkHDhxg27ZtAKSlpbF//35RGiYlJfHzzz8zPj4ums9qamo4OTmRlJSEioqKoDRdvHhR\nEKODgoJQUVERprsXXnhBAHiSkpKYN28eqqqqaGhoUFJSgrKyMgEBAVRWVlJYWIihoSGzZ8/GxMSE\n2tpazM3NmTVrFu+88w7r168X2Zc9PT2cOnWKgYEBWltb0dfXp62tjWnTpjE6Oip0EVZWVvz1r38V\nMJi1a9eSkJDA3r17hbK3uLhYTDX+yPq36Cns3bv3M/koqKmpCS8vL/T19dHW1hZZh1paWpiZmYmO\nrDyLISIigtDQUHJzcxkaGiIsLIyAgAAOHDiAi4sL+vr6NDY2sn37doaGhkSMfVlZGVVVVQwODtLT\n04OlpSU+Pj54enqyb98+5s6dy4MHD6ivr2fWrFksXbqUxMRE8vLyOH78OPHx8dy5c4eBgQFmzpxJ\nSkqK8AXIfe0WFhYidcnQ0BAbGxuxw8v9FHJn4d27d3nppZe4efMm8+fPp6enh/LycnR0dMjMzERB\nQUEgtjQ1NcnLy6O/v59r167h7u6OkZERNjY2ODs7A882RBcXF2xsbFBVVRWUX01NTWpra5FIJHz4\n4Yfo6urS1taGnZ0d58+fZ9euXWIio6WlRXJyMvr6+jg4OGBiYoKioiLd3d2CqdDd3U16erqgUKur\nq/Pcc88xNjYm7q6pqamoq6tz//59jh8/jq6uLgsXLhRAG3lkWlpaGq+//jq+vr7cvXuXEydOsHTp\nUhQUFMjJyUFFRYWRkRFMTU0pKCigqKiIpqYmLC0tmZiYIDs7m9DQUGJiYlBWVhaTkbi4OMzMzCgs\nLMTR0ZG4uDgKCgp49913efToEdOnT6e8vJwZM2aI/sJ/jyDMycnhtddeY8+ePaxbt45bt26hoaFB\ncnKyKPc0NDSIjY2luLiYqKgoHj9+jLe3N52dneTk5BAfH4+hoSEFBQVCaDV16lROnDghToh1dXUC\nk7dkyRLc3Nxob2/H0NCQ4eFh0XiXN5tNTEzEKXFkZIT29nbOnDmDoqIi7u7uArjr6uqKtbU1169f\n/8/pKQwPD6Orq0tWVhYSiYQPPviAixcvisdFRUWi29/Q0MCBAweQyWRERETQ2dlJXl4etbW19Pb2\nkpaWJnZFqVQq/Ptnzpyhs7NTwDLV1dWZOnUqQUFBgtM3MjLC9u3bMTU1JTY2VqDEGxsb2b17N4GB\ngQAiCWnRokXY2dkhkUgwMDDAx8eHixcvivLi/PnzvP7661y4cEGMpgYHBzl79ixubm5cunQJNTU1\nfv75Z+CZV8Pe3h4TExMePHjAmTNnUFZWJiQkhBkzZpCdnS1SigYGBqisrMTf35/Zs2fz0ksvYW5u\nzuHDh8nMzCQ2NpacnBwyMzPp7u5mYmJC6AbmzZvH7NmzRVzd4sWLRarRvn37qKysZNasWaipqdHX\n18fk5CSFhYWcPHlS0LGmT58uhD3V1dWcO3cOqVSKlpYWR48eJScnR4BvN23axL179wgPD2f16tWi\nI29ra4uVlRUlJSUCjCN3Gk5OTiKVSunv78fQ0JC2tjYRPy/3nMyaNYvQ0FBWrlyJgoKCiGabP38+\nFRUVzJ8/X/QqbG1tsbOzQ19fH3d3d3p7e7l69SrLli2jvb1dWMazs7M5c+YMZmZmTJ8+nbS0NJG7\nsHnzZtEs7O3tBcDe3h6pVIqGhgYnT54U41z56DYuLk4oIQMCAnj11Vfx8/MjNzdXmLjMzMwoKirC\nyMhIaE+GhoY4deoUCxYswMTEBENDQyYnJ1FVVWXt2rX4+PgQFxdHeHg47u7uDA4O8ujRIxYvXszU\nqVMZGhoiNTUVgEePHokc1D+y/i02BXnYiaWlpThuyfMJpFIpX331FYGBgbi7u/PNN9/g7e0tMg1M\nTU2RSCTo6upy4cIFJBKJqJ/k5JvR0VFMTU1RUlJCQ0MDU1NTccrIzc0lMzOTvr4+srOzRTrQ0NCQ\nCFipqqoSQa7w7GhnYWFBd3c3LS0txMfHi3m33NE3MjJCUFCQyAyIiYlh165d2NnZCZTW6dOnsbKy\n4q233gKe4ev7+vq4f/8+w8PDhIaGCnu13GV4/vx5KioqePToEf7+/kgkEoqLi0lISODYsWP4+/sz\nNjbG4OAg+fn5jI6OCjltWVkZDx48wMfHh6lTp6Kqqsrg4CBlZWX09PTQ2tqKqakpCgoKVFdXExUV\nhaKiItOmTROw3MbGRj744ANu3LiBmpoa9+7dY2Jigjlz5jA2NkZnZydhYWHMnj1biLi++OILkbDU\n399Pb2+vAI3KhTjyqD95k/np06d8/fXXeHl5oaioSH9/v3gsk8k4dOgQHR0d3Lt3j1u3bqGmpoal\npSUmJiaMjIzg7u7O+Pi4aCDb2Ngwc+ZMYmNj0dHR4YcffmD+/PlMTEwQEhLCypUriYmJYWRkBBcX\nF1xcXMjNzUUmk5GRkcH169cFVPe/0736+/tRUVFBUVFRcBzOnTvHqVOnBKb//fffZ/r06djb2zM+\nPk5HR4cQKy1fvlzkhE5OTgogbGtrK8bGxnz99dekpqZiZWVFfn4+6enpfP/990JgFhsbS2JiIjk5\nObz88suUlZUxOTmJmpqaaIyuWLFCGMP+0PX4f+Ga/peXuro6bW1tmJubi8aYnJx78eJFfv75Z954\n4w1u377N77//zrvvvouTk5P4kI2MjKCkpCQwXidPngSe9SrkMJOnT59y5coVERqqr69PT08PVVVV\nIilaR0eH6Oho1q9fj6qqKs3NzcLpCAjVXX9/P2NjY4SEhFBcXIyJiQkeHh40NTUhlUpFytH169d5\n8uQJJSUl+Pv7s2LFCu7du8cLL7zAtWvXSEpKIiUlRdCc33vvPZYtW4aNjQ1bt27Fx8cHV1dX7Ozs\nBOarvb2d+Ph4pk2bRkhICM7OzjQ0NIhxWGZmJubm5qIBGRERgaOjoxi3enh4cP36dWJiYsjOzmZ4\neJiDBw+yePFiDA0NuXr1KtbW1tjY2IjXV24zNzc3R01NTdypnJycWLdunajTfX19hQhJDm9paWkh\nICCA6dOn09PTI+b6cXFxeHh4EBoaioWFhQCWJicnMz4+joqKikD7y70RK1eu5LvvvsPDw4Nbt27x\n8ssv09jYSF9fH0uWLCEhIQGZTCbSpgYGBrCwsACehcHExMTw5MkTzM3NGR0d5f79++zcuZPGxkYK\nCwvx9PQUqsXdu3dTW1vLzJkzeeutt3BycqKtrY3q6mo6OzsFhi8jI4Pg4GDOnDlDZWUla9euRVFR\nkfnz53P48GGRT2Jubs7g4CA+Pj64uLjQ3t5Oe3s7ly9fZtasWaJxLe9b1dfXs2XLFrS1tVm8eDFF\nRUVIpVI2b95MVFQUzc3NKCgoiM1djsnbvHkzVVVV7Ny5U4jR5CFHf3T9S5uCRCKpkUgkBRKJJFci\nkWT9/c/0JRLJXYlEUvH3r3r/7Of09vby008/cezYMTQ0NHB0dBRvZkZGBvv27ePixYssW7YMmUxG\neno6cXFxxMfHc/HiRSoqKrCxseHw4cO8/PLLTJ06FXhWVz958oSBgQGcnZ154403GBgYICoqCgcH\nBwoKClBSUsLBwUHYfoOCgrh79y4ODg7IZDLOnTuHm5sbf/rTn1i4cCHwLMXI1taWy5cvs379emGA\n0tbWxtHREUAEn7z44oui6djU1ISbmxuPHz9GW1tbpGbLicN79+5FQUGB2NhYbty4QXd3NwMDA0JP\nYWdnx44dO/D19aWvr4/29nZyc3MF9Sg7OxsHBwckEgn29vasWrVKqAjPnTtHaWkpbm5u9PX1ERQU\nxF//+leuX7/OvXv30NPTw87OjtDQUJFTee3aNYqLi7l48SIXL17E2dkZS0tLEhMTSUlJIS0tjSdP\nnhAeHk5fXx9jY2O88cYbrFy5kurqalauXMmXX36JsbEx3d3djI+Pk5CQQEtLC46Ojhw8eBAdHR2h\n7IRnAJtXX32VxYsXc/36daZPn057ezt2dnbY2dkJ7sH4+LhgSkqlUlpaWjAyMkJbW5vLly+jq6tL\nTk6OgKHITz2+vr58+umn1NbW8txzzxESEkJCQgIA//Vf/0VbWxvbt2/nb3/7Gy4uLgQFBdHd3S1O\nAd3d3XR2drJ48WIABgYGRF5JVlYWa9euZc2aNYJWNTIywvHjx0lJSSEmJoYPP/yQyspKQVuW+3Z8\nfHxobGxk48aNAIyNjTE+Pi7i/JKSkigvLxd6ET8/P0EuV1JSYvXq1TQ0NNDc3MzatWv58MMPcXV1\nRU1NjdOnT/PBBx/84ev6/8ZJYbZMJvP6b5bMD4F4mUzmCMT//fv/4zI0NCQ8PBw3NzcmJycxNTWl\ntbWVmpoa8vPzxTFLW1sbS0tLoavfv38/AQEBJCUliV1Y7oWHZ54AJSUlxsbGuHv3Lr/88gsLFiyg\nqqqKL774QqDMy8rKUFdXx8jIiKamJkJDQwWbQUlJiddee43vv/+ew4cPAwgZ79y5c8WsXN6dz87O\nFvh4BQUFYmJicHV1ZcaMGTg5OeHl5SU+3GpqakyZMkUYwRQUFBgcHMTd3Z0lS5aQmprK7du38ff3\nx9/fHzc3N9TV1bG0tMTa2pq0tDSWLl1KQUGBQK+VlpZiYGBAbm4uKSkpXL9+ndOnT6Ovr8+KFSvQ\n1NSksrISNTU1du/eTVZWFlu2bBG5FCEhIVy9epXp06fT399PQUEBampqogzIz8/nrbfeYuvWraLf\nkpubi5+fHwsXLqS5uRmJREJISAi2trbMmDGDkpISrl27hrW1Nba2tgKrpqenh5WVlaBjAYIdUVdX\nR0tLC7dv3yYkJARNTU3Onz/PunXrOHPmDH5+fkRFRREaGoqamhonTpzA3Nycq1evoqysTF9fH8uX\nL6enpwd45iWIiIggLy+PgIAAbt++TWpqKi4uLtTV1XH79m1hfJLj55uamtiwYQN2dna4uLhgYmLC\n1KlT6ejoEKVkUlISfn5+mJmZoa6uzsaNG/n555+xtLRk0aJFJCcn4+PjQ0tLCxYWFigoKPDxxx/T\n1tZGTEwMQUFBWFlZ4e/vj62trRC9OTk58eDBAy5duiTAPHLgkIKCAl1dXWzfvp3ExERMTU0FnGVk\nZITExEQxEXF0dMTT01PoH/7I+v+ifFgKHP/74+PAsn/2hKGhIeEUkyfkPHz4EBcXFzo7O/nuu++4\nffs2o6OjTJkyRWjgx8bGMDEx4c033xS8+5SUFCorKwGEay4wMFCMieR1mYODgwhYnTZtmtDOr1+/\nnsuXL1NVVcWKFStYvHgx+/fvZ+7cuQIT9uDBA6Kjo0lJSUFJSQk1NTW+/fZbtLW1cXFx4caNG1RW\nVlJUVMS6dev4/PPPaWtrw9vbm/v375OcnExAQADNzc0UFhYKsZXc95CVlYWVlRXvvfcempqaXLt2\njePHj6Ourk59fb1Anr3yyiuMjY0RGRlJZ2cn0dHRLFq0iKNHjzJ37lxhYfb19WVoaIijR4+Sn59P\nW1sbWlpaWFlZ8dprr2FjY8PAwAATExPk5eUxffp0fvzxR/Ly8vjll1/Q1dUlKCiIgYEB6uvrGR0d\n5cGDB+Tn5zNlyhT27duHrq4u3333nXitLSwsMDc358MPPyQoKAgbGxvS09Pp6upCU1OTwMBAoqKi\n2LFjBxKJBHNzc+CZyOjQoUM8efKEyspKLCwsMDIyEg3jiYkJVq1ahYmJCVVVVfT29oqeT1NTE56e\nnrz55psibVle+w8MDKClpcWqVavQ1NRk06ZN2NjYkJmZyapVq8SkIDU1VbhNf/vtN2bNmsVHH32E\nv78/hw8fJiUlhZs3b4pG45dffsmaNWsYGhqipKSEyspKtmzZIlLIN23ahKmpqSg7iouLCQsLY+bM\nmejp6YkYwcTERAIDA7l06RIAixYtwsPDg+DgYMzMzFi5ciXR0dEkJyeLBuL4+DgrVqzg8OHDZGRk\nkJKSIiLtrl27Rm9vL9ra2nh5eYmp1B9Z/+qmIAPuSSSSbIlE8srf/8xEJpM1/f1xM2Dyv3uiRCJ5\nRSKRZEkkkqzR0VGRkSiX2M6ePZv29nY2btzI3r17Wbx4MQcPHsTX15f+/n4aGxs5fvy40B3o6+tT\nXl7O1q1bRdNKjlrLzc0Vx/g9e/YwZcoUIiMj0dHREerFtWvXUltbS0pKCsuXL0dLS4v+/n6uXr1K\naWkp3d3d5OXlATBjxgzU1dXR0dER/oTVq1eLppD8Tm1tbS00CgUFBYyOjgoacHNzM5qamvj4+PzD\nBdHQ0EBYWBh3797lxo0bzJkzhzfeeINZs2bR3NxMc3Mz8+bNw87OTpwkTp8+LWLUSktLCQgIEBOd\nhQsX0tfXh6amJkZGRgwNDfHWW29x+vRpjI2NqaurY3R0lMePHwvEfUhICN3d3Zibm/PkyRMSjqsG\nXwAAIABJREFUEhLQ0dHBx8dHaCFGRkaYnJxkcHCQ1NRUvvnmGxwdHWlqauLq1atkZGSwfPlympub\nOXr0qMjg1NXVZePGjcyfP5+WlhZWr16Ni4sLfX19AMKBaWpqir29PQcPHmTv3r3s2LGDrq4uCgsL\n6ejoIDU1lQcPHtDf38/MmTMxMTGhoaEBHR0dVq5cSVVVFcXFxZiamgLPIL5ydJmpqSmampqcOXOG\nNWvWYGpqKpgcr7/+OqmpqdTU1LB161aRU1lVVUVnZyfffPMNc+bMEUrE5uZmZs6cib+/P1paWqL5\neenSJfbt20d3dzc//vgjc+bMITg4mG3btonogsbGRgwNDUlNTRUsCnlD8KeffsLR0ZEnT57g4ODA\noUOH8PLyYtasWdjY2DA2NoaRkZFQnv7pT39CKpXS3NxMXFwcAQEB9Pb2MmXKFLS0tDhy5Mgfvqj/\n1U1hlkwm8wIWAm9IJJKQ//6XsmdYp/8tVUkmk/0mk8n85GVHQkKCAJFKpVLs7e2xs7MjOjpajOaU\nlZXZv3+/iGbbvn07nZ2dVFRUUFhYSGRkJEeOHBHH8cjISPLz89HR0eHx48e4u7uzadMmpkyZQmtr\nK+bm5lhYWIgMhCdPnohxorxJEx4ezpw5c6iqqhINRzc3N3Hny87OpqSkhLy8PExMTES019KlS+nr\n6yMzM1NkA1RWVtLb20t3dzdVVVXY2dnR0NAgjs4dHR1ietDS0sLSpUsFVKSoqIi2tjaBgc/IyEBT\nUxNra2sGBgawtbXFw8ND2IL7+/uJiIjg9u3baGlpkZaWJgwzX3/9teAtzJw5k/HxcSFWKigoICkp\nifXr14t8ik8++URQpouLizE3N6e9vR0NDQ3Onz/P3LlzmTZtmkDh9fb2oqys/A84PScnJzZv3oyr\nqyv6+vps3rxZcC3Gx8f58MNnVeadO3ewsbHB3t5eNDt9fHzw9fVldHSUuXPnYmpqKkjcci9BdXU1\nwcHBSCQSPvnkE3Jzc7G1tRVpYdeuXePOnTsUFxezYMECHjx4wMaNGyktLcXMzEykWuvr64sxoJyc\nPGXKFDw8PNi4caOQlsuRdaamphw5coQ//elPDAwM4OHhQXp6Ot9++y0pKSkC+fbFF19w69YtdHR0\ncHV1pb6+Hn19fcbGxggNDWXKlCmcPXtW9CrGxsZYvnw5a9asEVmYAwMDWFlZiczU2tpa1q1bxwsv\nvMD58+dFitTjx4+RSCRC1v/TTz+Jz9gfWf/SpiCTyRr//rUVuAr4Ay0SiUQK8Pevrf/0l1BQEMAQ\necqwnp4er776qqAPP3nyBG9vbx4/foy9vT0rVqzg999/p7u7m4CAABwcHNixYwdr164Vjbv4+Hge\nPXokjFVnz54V3VqpVEpNTY1QUaakpIgAWgsLC/r7+wXUdOvWrbi4uAjlYWJiIgoKCkRFRdHW1oay\nsjLnzp3DxsYGZWVl1q1bJ9SI8sCXyspKDAwMuHTpEsbGxtTX15OVlYWrq6u4O4SFhXHhwgVh8/3x\nxx8ZHR0VMWf19fV4e3ujrKxMZmYmycnJZGZm4u7u/g88RHlid0NDgxBWffLJJ3R0dAheQHFxMV5e\nXmRlZaGpqUlXVxdSqZTQ0FDOnTtHSkoKLS0tJCYmCtKTPKT23r17jI6OMjIywooVK5gyZQoWFhY0\nNzczMjKCoqIiNjY2fP/99+IuHhkZSUpKikCP19TUsGfPHvLz8zEwMBBUoNWrV5OXl8fbb7+NVCoV\nG9LixYvx8/MTsXGlpaXU1dVRU1PDmTNnsLCwEFOYK1euiJGkHK7r7OzMvHnz2LZtG+bm5gQGBgq1\nakNDA8rKyly9epXHjx+joaHB+vXraW9vJyUlBW9vbx4+fEhhYSFGRkaiPIJn+gdTU1O+/PJL1q9f\nT3NzM5999hm//voroaGhvPPOOxw7dgypVMry5ctpa2vj999/Z3BwUEwUtLW1mTFjBgsXLmRsbExc\nE3KYsIqKikihOnDgAMuXL6e4uBhbW1uqqqp48OABQUFBFBQUiFPwyMgI7777Ll5eXri6ugqvxh9Z\n/683BYlEoiGRSLTkj4F5QCEQA2z8+z/bCET/s59lYGDAlStXhOdAV1eXoaEhGhoasLe3p7S0lOLi\nYpydnYmKikJFRQVPT0+Gh4extbUlNjaWoqIidu3aRXR0NOfPnweeYb137twpjlW+vr4UFxeLiUFp\naSmtra2Mjo7i6+vL+vXrcXR0JDExEXNzc6ZPn87IyAhr1qyhra0NDQ0NAEpKSjA2NqaqqkqQg+V3\nneLiYn744QeOHTvGoUOHWLhwIQUFBQI7Jm+mSqVSFBQUqKurw9PTE3gWXCtHr0VFReHi4oKjoyP+\n/v7cvXsXIyMjWltbKSoqIiwsTLhHy8rKcHBwYM+ePTQ3N9Pe3s6sWbOoqKigoaGBhQsX8v7774vR\n7OrVqwkJCeGbb75BQ0MDiUTC119/TVdXl2A0RERE4ODgQEBAAFKpVBCOTE1NmZycZP78+SxevJin\nT58SExODiooKnZ2duLi4iAumurqaoqIiOjo6cHFxwdbWluDgYEGbWrJkCWlpacTHx6Og8OyjKJ9I\neHl5YWFhwZ49e2hsbBSajvb2dj7++GOhS+jv72d4eJjGxkZ8fX3FOHNkZASJREJhYSEAnZ2dHDt2\njNdff5329naSk5NJS0vD2dmZS5cuYW9vT3JyMh4eHtja2nL9+nVeeuklLC0taWtrE5kPenp6XLly\nhatXrwLPwmK1tbWFsM3AwICPP/4YBwcHwsLCOHToEIGBgYSEhFBRUcH58+eRSCQMDg6ioqLCiRMn\nqK2tpa2tjSlTpgjh3bx580TPJCcnRzQd161bx40bN3B3d+f69evo6enR1dVFXl4eUqmUiYkJMY5+\n+PAhdnZ2qKio/EP69D9b/8pJwQRIlkgkecAj4KZMJrsN7AHmSiSSCiDi79//H1dXVxerVq0SaLOG\nhgZ6e3sxNDREX1+f5cuXo6+vz+7duzE2NhaGkLGxMUpLS5FKpSgrK/Pdd9+JcBKAzMxMVFRUhLnm\nwIEDFBUV8fTpU6Ep+PHHH0W6zvDwMDo6OlRUVCCTyWhubub1119HX18fVVVV0ax59913BTAjLS2N\ntrY2urq6sLKyQldXF3d3d7q7u1mzZg3ffvst8+bN49y5c3R3d6Oqqoq1tTVOTk5MTEygpaUlciqs\nra2FZLevrw89PT3i4uJQUFAgNDSU2NhYsrOzcXJyQiaTYWdnx/DwML6+vmRnZ7No0SKWL1/O0NAQ\nCxcuFLmVPT09orttYmKClpYW165dIyQkhICAANzd3XnllVdoa2sTRCb53W1ycpKHDx8SHh7OL7/8\ngoaGhvj9Lly4ID6ENTU16Onp0dnZyfXr11FWVmbnzp0oKSnx6quvsmbNGjIyMvjb3/6GiooKs2fP\nFv0BXV1d0RDcu3cv5eXl+Pv7U1RUxJo1a3B1daW/v5/Tp0/z9ddfM2/ePEpLSyksLKS5uVmg3h88\neICdnR0vvPACtra2pKSkCCGbkZER33zzDQ4ODsTHxxMaGkpDQwO//fYbJSUllJeXs3LlSnp7e3n4\n8KHgRB44cICGhgYiIyP5+eefiYuLw83NTSRPaWlpiSDkrKwsurq6iIqKEoK8rKwskXItZzV4e3uj\nra1NVlYWCxYswMrKiueee468vDyCgoKAZ0G+s2fPRltbm87OTqFIzcrKQiaTkZ+fz/DwsCBLaWho\nCL3NwMAAf/nLX1BWVubRo0fU19cLo9UfWf9Dc/6f9T/r/z/rP4fmbGVlxYoVK3B2dkYmk5Gamsq6\ndeu4e/cudnZ2jI+PI5VKKSsrY9GiRdy4cYP169dTUFBAS0sLpaWl6OjoMGPGDCorKwkODsbHx4c9\ne/awdetWKisrBcdOXV1dWIpra2vx8vKiqKgIJycnCgoKxCjP0dFRRH9ZWlpSW1vL7NmzxekiMDCQ\nQ4cOsX79esE2nJiYYHh4mDfffJMHDx4IW29ubi6NjY1UV1cjk8lYtmwZurq6HDlyBCsrKyIiIpg7\ndy7PP/88Hh4eeHh4iCwG+Q5/8OBBtmzZwuTkJMnJycJvoa+vj62tLYcPH8bT0xNfX1/S0tJISEjg\nrbfeoqqqSuRN7t69WyRfAWzatImYmBhhV9fU1KSoqIjNmzeTmZkpErl+/fVXQkJCkEqlLF26lF9/\n/ZVFixaRkJCAnZ0dnp6eIqz35MmTvPLKKxgaGqKtrS2eC8/iAeVka7lUXCaTiVj7devWER0dTV5e\nHi0tLYSFhZGQkCCQZSdPnmRsbAxdXV309fVFPJsc+uro6EhNTY2YyYeFhVFWVsauXbsEQ6OtrY0Z\nM2ZQX1+PkpIS8+fPp66uTqRPDwwMsGnTJlavXo1MJuPgwYMsWbIEJycnysrK8PLyYt68eRQXF/Pa\na6+RkJBAbW0tioqK9PT0UFdXR3h4OElJSQwNDREcHEx+fj4zZ84UPZCRkRHGx8dFw9HCwkK8n5WV\nlfz5z39mx44dSKVSqqqqsLCwoLa2lsjISI4ePYqzs7PA5vX392NiYkJGRoaAsMhJ6BoaGkKn8/jx\nYyHS+mfr32JTUFRUFMfukpISIiIiSElJYWxsDKlUyuDgIPX19URERHDx4kXBp8vJycHCwoKpU6ei\npaVFa2srqqqqImHXx8eHb7/9VjAM/fz8SEhIEKIfDQ0NSktLUVJS4qOPPuLmzZukpKTwX//1X1y/\nfp2amhpu3bqFra0tEokEFRUV4BlPobm5GSMjI2Hdlfs2hoaG6O3t5dGjR+zYsUPYcjMyMli7dq3I\nyIyNjRWUKbne/e2336a3t5eysjKmTJnChQsX8PDwYOXKlXh5efHw4UNu3rzJ5OQkBgYGYsrS1NSE\ng4MDycnJmJqaCpnzw4cPiYmJYefOncJsZm1tLUZfhYWFuLi4YGZmxqNHj3jvvffw9PTk7Nmz+Pn5\nUVlZiaamJjt37qSoqAhHR0eOHDmCg4MDx44dQ1VVlcjISA4ePMiHH36Ij48PMTExPH78GF1dXaKj\no9m4cSPu7u48//zzeHl5UVBQgJmZmSAbZ2VlkZOTw4wZMwBwcXHh0qVLDA4O8vnnn7NlyxamT5+O\ns7Mz6enpJCYm8uuvv/L555+jqqqKuro6SkpKwpkJzwx2d+7cQUNDQzTYfvjhB5HIPDk5ydSpU8nI\nyBCbtYGBgbCc7927lzNnzvDVV1+RlJREb28vxsbGuLi4kJiYyIYNG/jyyy+BZ16NwMBABgcHefjw\nISYmJuTn5zN//nzOnz/PxMQE8+fPJyUlBTc3N+7evYuNjQ1BQUGoqqoydepUTE1NSU5OpqKiQmhh\n5NmXampqmJiYoKenR0VFBdOmTcPLy0sklUskEtrb21m9ejUnTpygo6ODWbNmCWXrr7/+ysyZM3n+\n+ef/8Kbwb2Gd3rdv32dBQUF0dnbi6elJZWWlmBDcv38fb29vDA0NefjwIVpaWvj5+aGnpydsyMPD\nw8hkMgwNDZk5cyaHDh0iPj4eT09PAgMDaW5uxtXVlc8//5zOzk7a2trw8fHBxMSEpKQkbG1tcXJy\n4ujRo3R3dxMSEoKWlpaArnp7ewubrpwDqaCgwLvvvsvx48dZsWIFTU1NTJ06VfgYuru76erqYsOG\nDdy5c0dAQru7uwkKCqKyshJPT0+0tLTo7u4WuQ7y2lqeFZifny8SstLS0vjrX/+Kr68vDg4OqKqq\noqOjQ3Z2Np6env+QdWBpacmcOXMYGhrC3d1dSF1ra2uprKxEUVERFxcXEbPX1dWFRCKhpqZGGMge\nP34s7OGurq4MDw8Ly7CDgwOOjo6Ul5ezYMECkpKSMDU1ZWBgQODbTExMGBgYwNraWjhS5WGsDx8+\nxMfHBzU1NQwNDenu7ubWrVtoaWmho6ODoaEhkZGRwnKtqKiIpaUlfn5+FBYWihMGPPMJ/PDDD7S2\ntmJhYSHs9cuWLSM5OVkQpgMCApg2bRrx8fEiramyspI5c+ZgZmYmRso//PADKioq1NbW0tfXh4GB\nAVu2bCE7O5uenh6RWpWVlSVOW3JWqKamJlpaWlRXV5OVlUVqaiotLS24uLigqakpiOIlJSU0NDRw\n6NAh/vKXv/Diiy/y9OlTqqurSUtLQyqVsmDBAry9vfnb3/5GYWEhjx49wtDQEB0dHQHxra2txc7O\njoyMDDZu3Eh3d7fY3Pr6+tixYwdKSkokJyeTl5f3h6zT/xabwldfffVZSEgIc+fOFcGkVVVVwio6\nODgomIzx8fGoqalhbm6OnZ0dzc3NGBsbU11djYeHB6WlpQwODhIXF8epU6cYHBzk8ePHAtq5cuVK\nwTs4deoUM2fOZMmSJbS3t6Ojo8PatWupq6ujrq4OdXV1tLW1kUgkjI+PY2xszJ07d/joo4/o7u5m\nZGQEf39/4uPj0dPTQyKR0Nraip+fH87OztTV1aGiokJFRYXwOchkMsrLy/H29kZRUZHOzk5MTEyI\njo7Gx8eH4eFh4dbU1tYmMjISFRUVdHV1iYuLY3R0lOLiYry9vUVo7rRp04Tqz9HRESMjI4yMjCgq\nKhJlkjycVp6SbG9vT2NjIxkZGbS3twtkfm9vLw0NDUilUpqamli3bp0wNMmBJyEhIZSVldHY2Mh7\n773H2bNnCQgI4LPPPmPx4sXcuXOHpUuXoqWlRUxMDBKJhP7+fqqrqwkJCcHf318E6MoZCOXl5dy9\ne1coDu3s7AR1ysDAQGg1vL29MTMzQ1VVlXv37vHOO++gpaWFh4cHOTk5hIeH89VXX7Fr1y7i4+MJ\nDAzk2rVr7Nu3j6dPn1JVVSVYml5eXkKVCs/KG319fXR1dZk6dSoqKirEx8eTmJgoNjlnZ2cGBweF\nNmbatGno6enh6+uLkpISvb29DA4OEhkZiZeXF0pKStTW1nL37l2B4jcxMRFTtV27drFixQqMjIzQ\n0dFh9erV7N+/nylTpuDi4iKmF1ZWVhgbG4vG8cyZM0lPT0dXVxcTExOmTZtGVlYW1tbWWFtbU11d\nTWRkJLq6uiJi8D+KpwDP5rK3bt1CJpMJOo+6ujoHDhxgeHiY4eFhHBwcOH36NIGBgWhra3PkyBG8\nvLyEpfbevXuMj48LHfiTJ0+4cuUKMpmM4eFhcnNzKS8vx8/Pj7y8POGsU1BQ4Pr16xw4cEAAQTw8\nPDAwMODJkyeEhoaybds2kYWQlZUlErK/++47ZDIZWVlZnD59msrKSn7//XeGhoaYmJjg9OnTPH78\nGDc3NywsLLCxsRH1cGJiInZ2dkJXMW/ePAFtKSsrE5mWFhYWJCQksHLlSiQSCa6urkilUtrb22lt\nbcXKygpDQ0MsLS25c+eOkICXlJRgaGhIcHAwqqqqfPHFF5SWlrJlyxaBUfvkk0/YvXs3a9asISoq\nSkxZ5I5B+Sx/ZGSE/Px8fH192bFjB0uXLkUi+V/svVd4lWXet32u9JVeV3ohvXeSQIBAQgk1FOnd\nURAdBFGHUXREHCyHwmAFpSgYQXoNJBBCAiEhIb333nvv5d3gWdc3z7vz+B3fu+Ec33vvqPFAIln3\nfV/X//r9zlPC+++/L0CrM2fO5Pnz55w6dYqxsTEqKiqYNm2asFQrKirS1NQkAKxeXl4CLyb/mSkp\nKQlbt5eXF+3t7Tx9+pRly5aJ0FFhYaFIq548eZKff/4ZU1NTDh8+TFBQEB988IGwR8vnC1evXkVH\nRwdFRUWqqqqwsLCgpKRESHwAAgICKC8vZ3JykufPnwvr9/vvv8+MGTNEJFl+ggQvmrjy/khDQwP9\n/f2cPXuWc+fOERcXJ9qVoaGhVFdXc+jQIUJCQpg9ezZ+fn4UFRWhoaHBwMCAYEUAbN++ncrKSiIi\nIkRXRR7YkoODFy5ciL29PePj4+Tm5jJ16lRMTU0xNDRk69at1NfXU11dLV6cf/he/D9wP/9/vpSV\nlamvrxfd9xkzZtDV1UVdXR0///wzjo6OguQsb8OVl5fT3d1NR0cHR48exd/fn/7+flRUVIRS7auv\nvkImkzExMYGvry9dXV0oKChw8eJFNDQ0yM3Nxd7eng8++AAzMzOmTZuGlpYWhoaGxMbG0tjYyOzZ\nszl+/Djp6ek8fPgQgFdffRU9PT0WL15McHAwGhoaODk54eDgwLFjxzA0NBQCmKCgILZt24atra1w\nSoSFheHp6Ym7u7tI58GLG6K7u5v8/HxmzpyJn5+f2GrY2dnR2trK8PAwFRUVREZGYmpqytatW2lt\nbWV0dJRHjx4xffp0Lly4QHJyMlpaWty8eZPVq1ezadMmXnvtNYKCgvj+++/Jzc1ldHSUkydP8uWX\nX6Krq4tEIsHU1JS8vDz27NkjZKvysExISAhJSUnU1NTg7e2NnZ0dM2bM4MmTJ3R1dRETE8PGjRvp\n7OxEUVFRbJmSkpKELk1eJMrOzqazsxMnJydu3Lgh4LUaGhrk5eVRVFSEtra2QJFpaGiwYMECPv/8\nc+rq6li3bh3Z2dmie1JaWkp8fDyWlpbIZDI0NTUxNzenpeVFdq63txcdHR3Onj2LTCYjKCiIwcFB\nli9fLlacaWlpODg4IJFIsLGxoaWlhdmzZ4uH9Zw5c4AXsyozMzPgRfJw/vz5TJs2TWQMDhw4wLRp\n03BycmJiYoJ9+/axdOlSAgICBNouJycHa2truru7SU1NZWRkRKw4Aby9venu7iYtLY2Ghgby8/MZ\nGBigra2N3NxcxsfHqa6upr+/n4SEBDIyMoS5SiqVMjIygr+/P4qKishkMpqamv7w/fineCioqKgw\nOTmJj48PnZ2dPH78mK6uLjZu3IilpaUYnLW3t5OSkiLcfgsWLODhw4esXLlSgEXkk1h4kWKbOnUq\nQ0NDDAwMoKOjwwcffEBUVBQ2NjYoKCgglUpxcHCgoKAAe3t7ampqxB5ywYIFPH/+XOjW5R2Fn376\nCV9fX3p6ekRWIDMzk7lz54o9tpubG9u2baO4uJj4+HjS09OxtLREQ0ODn376id7eXoyMjLh+/bp4\nmzU3NzM6OkpCQgKDg4NER0dTV1dHSUkJpqamdHZ2ihTc2NgYjY2NpKSkCJr0vxdiNmzYwPz585mY\nmOC1114jMzOT8PBwYUWWo8nDwsJQVFSkoKCA0dFRhoeHMTMz491338XT0xN9fX0aGxuxsLDgwYMH\naGtrc+bMGQ4dOkR6ejoymYzBwUFSU1PZsGEDWVlZIlo8OTmJpaUlp0+fZsuWLWzatAktLS3Onz8v\nYCgJCQmCXAQvbEajo6N4eHgwPj7OTz/9xK5du0hMTKSyspLBwUGampp4+PAhJ0+e5K233mLmzJli\n2fzee+/h6OiIkpIS9fX1gnugpaXFs2fP8Pb2xtfXV5TS6uvrKS0tFSKbhIQEGhsbOXHiBG1tbdTV\n1XHlyhXS0tIYGRlBIpHw448/ClpWeno6/f39+Pv7c/LkSTw9PYmLi+Pdd9+lsrKS2tpa7t27J1KQ\n0dHR+Pj4MDAwwIMHD0SQ7ZdffiEsLExkbJ48eYKhoSGampqi/PX8+XNsbGxwdXVFR0eHoaEhrly5\nQktLC15eXnh4eIhgVkdHB//85z9F9FreFv0j15/ioaCurk5AQADt7e1YWFiwYcMGenp6UFVVFUSg\nrKwsmpubUVBQED4/Of1ZSUmJoqIiZs2aRVZWluik5+fn4+XlhZubG5cvX+b+/fssW7aMDRs2iD4E\nvGhT7tmzh6amJsrLy2loaBASDnnz0dLSUkRm3377bTIyMujv72dgYAAFBQW2bNlCf38/zc3NjI+P\nk5mZKfb82traSKVSYSZqaWkRAywzMzPa29uBF8NJAB8fHwYHB9m4cSMaGho0NDTw+eef09/fz86d\nO+ns7GTJkiW88sor7NixA0dHR06dOoVUKuW7776jra1NhLFqamrE9ypvNfr7+2NoaEhnZydnzpzh\n7bffJiEhgYSEBAwMDFi7di0KCgpkZ2eTkJDAxYsX6evrY968eUKhLt/zmpmZ8fPPPzN37lxxFPbp\np59y5MgRNDQ0OHfuHKtXryYsLExo452dnWlubqanpwd1dXVu3rzJzJkzgRcKQTc3N5F+TElJobCw\nkM2bN6OiosK3335LeHg4/v7+PHjwgOzsbBYsWMDY2JhY1v/8888kJCTQ3t6Op6cn8GI1qqOjw8KF\nC3ny5AlVVVV89913fP3110gkEjZt2oSJiQnbtm1j//797N+/HyUlJbZt20ZOTg6nTp3i3r17tLS0\nEBcXJ+QqoaGhxMfH4+/vT2FhId999x0//PAD+/fvZ8GCBcyYMYN3331XcBPCw8M5ffo0zs7OODs7\nY2trS3FxMSdPnkRTU1M8xDQ0NGhpaWF8fJw5c+YwdepUNm3ahKKiImVlZXh6emJoaIitrS3Kysoc\nPnyY0dFRgXhLSEjA3NycuLg4GhsbBeH8j1x/ioeCPKZaWlqKk5MT9fX17Nmzh5iYGIyMjFBRUcHZ\n2ZnKykrKyspIS0sTjUAzMzPq6urEEWJwcLCg2G7bto28vDyioqKwsLAgJCSEiooKxsfHxQe5vLwc\nOzs7sT0xMTGhoqKCzz77DEVFRezs7Lh69Sp1dXXCClRcXMz4+Djt7e1MTEzQ1NSEk5MTCQkJREdH\nU1JSQn19PQoKCuzcuROZTEZaWhotLS24uLjg7e1Nf38/169fJy0tTcSCLSwsMDY2xs3NDS0tLaqr\nq6mrq0NBQQE1NTXMzc0xNTUVbw55Nbyjo4Nr166hr6/Ptm3bcHd3p7GxkYmJCVpbW9HU1ERVVRUd\nHR0GBweFd3HOnDliGm9hYUF2djYSiYT8/HwqKysZGRnho48+oq+vD3NzcwwMDERU3NzcnLVr19Lf\n38+9e/f4+uuvGR8fp6CggLa2NqG3q6ysxN7enqioKNatW8fQ0BCPHj1iZGQEeHFTNTQ0cPPmizS8\nHLV/584dMjMzcXBwoL6+noyMDAwNDdm1axcaGhpIpVKsrKyIjY0lOzsbPT09hoaG0NePsCeGAAAg\nAElEQVTXR09PT6DY5BzK0dFRYmJikEgkGBoa4u3tzbx58+jo6EBFRYWysjJKSkooKiri/PnzDA4O\n8vz5c3x9ffH09OTgwYNIpVIsLCz48MMPxffv4eGBtbU1CQkJ1NXV8cEHHxAXF4dMJmP37t00NDSQ\nmJhIdHQ0U6dO5e7du/j5+YkbVV1dnWXLlpGbm8sXX3zB/fv3gf8H+efr60thYSGLFi3CwcGBiooK\nRkdH6erqwsLCgpqaGs6fP4+DgwO6urrMmTMHfX19cZQv77nIk49/5PpTPBR6e3txdnZm6tSpnD59\nGkNDQ4aHh3FzcyMqKoq+vj7B9ffy8kJZWRkPDw/Mzc1JTk4mPT2dv//979TW1oppNkB9fT0VFRVs\n2bIFLS0tlJWVsbe3x9nZmZUrV4oabUZGBs+ePWPJkiW0tLTQ1tbGggUL0NDQoKKigl27dvHTTz+J\nM++CggKam5sxMDBgdHSU1157jby8PIF4CwoKIiQkRBxxdXR0YG5uzs6dOwXrQSqV8vLLL4uINbyo\nTvv7+6OhoUFXVxdFRUU8e/YMa2trMckeGxsTBCh1dXWSkpJITExk3759lJWV0dbWhoqKiliZbNiw\nAXd3d8bHx4mPj0dVVZW7d+9y+fJlEhISsLGxQUdHB1NTU/bu3YuJiQmWlpbcvHkTLS0tLl26RF1d\nHbGxscTFxdHf309VVRUGBgaiOLVnzx4MDAxQUFBg3bp1XL9+HQMDA/71r39RV1eHlZUVW7Zs4cGD\nB5w4cYLh4WFu3brFP//5T0pLS1myZIlQpskjxXIPRXZ2NvLUrfzm6+zs5Pr169y9exeZTEZra6uY\nuTg4OKCoqEhPT4+IdMOLIeK8efME00FZWZmqqirRNiwsLBSfhfT0dJqamtDV1SUzM5OsrCy6u7tR\nVFSktbWVtrY25s+fD8Dt27eZOXMmmpqaQrKrrKzM2NgY69evZ2JiQkSd3377bVxcXER8vLy8XDR2\n33nnHaysrMQLQk1NjaGhIdzd3bGwsGBkZIRnz57R3NyMo6Mj2traTE5OEhoayqeffsrrr78uwlG/\n/vqrCC+1trYSEBAghuR/5PpTPBTk6jUFBQWCgoLIz8/H2NgYHx8f1qxZQ3JysjjKU1FRYevWrTQ1\nNREXF0dPTw+dnZ10d3fz+uuvMzIyIjh32traDA4O0tLSQllZGdHR0UIDt3v3boaGhpg/fz5SqZSW\nlhZOnjzJxMQEoaGhSKVS5NLbW7du8frrr2NpaQnAwoULxdKttLQUBQUFZDIZv/32Gzt37hS056Gh\nIUpKSqiqqiI8PJy8vDxGR0f561//iru7O7m5uVRVVYn/rpzeMzQ0hLW1NREREZibm7NixQrx4fnt\nt98EV1COj4uKiiI6OhpjY2OkUinNzc00NzcTFhaGvr4+hYWF4m2VkpLCO++8g6urK5WVlQK2Ai/M\nR48fP8bV1ZU1a9ZQX1/PkiVLiI6O5ttvv6W7u5vg4GAOHTrE2NgYy5cvJzo6mg0bNjB9+nQyMzMZ\nHx/HwMCAmTNncujQIX755RdRaNLS0mLatGkcO3YMFxcX1q9fj7GxsSBUAUydOpXIyEj+9a9/oaSk\nhJy1MTg4yCuvvMLdu3cpLS0V7sWIiAhsbW355JNPaGhoECKc4uJidHV1CQgIABDHhVu3bkVJSYlv\nvvkGc3NzTExMaGtrw8TEBC0tLcLCwmhsbMTFxYXZs2fz9ttvM2vWLPGZzMvLQ0lJScBQXnnlFXp7\ne9HQ0GDhwoWoqakJNeHw8DDV1dV4eXmJJGV5eTn37t2jtraWtrY2kX24fv06jo6O4rPQ3d2NkpIS\nCQkJREZGkpKSwtjYGDo6OkRGRnLx4kXOnz+PRCLBxcUFDQ0NLCwsOHTokLCWVVZWilSjfJX7R64/\nxUOhq6uL3NxcsrOz0dTUFOzAtrY2jIyMBHMwMTFR8BkdHBzw8/PD1NQUGxsbTE1N+f333wWuDRDV\n4e7ubtFbl0qlFBQUMH36dHbu3Imbmxs1NTUUFhZiaWnJxo0b/9tb39vbG3d3d2pqasQE9+zZs+Tm\n5lJbW0t2djZ1dXUMDg6ira0tjvJUVVVRUVGhrq6OVatW8emnnwpVeVZWFmVlZZSWljJr1iwxGG1r\na6OyspKcnBzBGXB0dKSkpIS6ujrxVtTU1KSwsBB3d3d6enp4++23eeutt5DJZJw5c0YMHO/fv09h\nYSGhoaG4uLiwZ88eZs2axZEjR8jLy8Pc3BwzMzNR49bU1PxvBRxnZ2cSEhJYtWoV69evZ2RkhCdP\nnrB161aqqqpISkri0KFD6OvrExERwWeffcbq1atxc3NDX19feB7Lysro6OjA2dkZPT099u7dS0ND\nA9ra2vT19aGoqCgeTLa2tkyZMoUbN27Q2tqKhYUFqampAonv6OiIs7MzmpqaWFhYEBMTI7aG3t7e\nNDY2YmxsjK2tLQUFBQJYmpeXh6+vL48ePcLe3l7kQMzNzXnjjTdYvXo1gYGBfPXVVyxdulQ4Nx88\neEBHRwceHh7k5+eza9cugZSDFxSu+vp6nJycUFVVFQG6goICSktL0dbWZs6cOcJRunz5crq6ukhP\nT2fWrFlCOS8PuMkHgjKZDD09PeGnUFNTw8fHh5aWFuzs7Ojs7MTe3p5p06YRHBxMfHw8np6eaGtr\nk5CQIJKTs2bN4s6dO2J79keuP8VDQU1NjenTpwsu4OrVq/Hz8xPJsH/PIFhbW2NsbIxMJmNgYIDc\n3FzBD9DV1eXZs2fiaGjlypXk5+dTWlqKm5sbIyMjtLa20tHRgba2Nk1NTcTGxrJx40ZefvllTE1N\nsbKyYtu2bQKWcuTIERobG+no6ODEiRPAC/KStbU15eXl7Nq1i5iYGPLy8vj8889pbGzExMSEH374\nAQ0NDWbMmCFEI/Lodnl5OfDi9CQtLQ1vb28A4XTYt28fdnZ2PHnyhOfPn6OtrY2pqSnPnj0jKCgI\nPT099uzZQ11dHWpqatjb26OlpSWOY+vr69HU1KSrq4tdu3YRGxsrzslNTU2Jjo6mqqqKiYkJbty4\ngZmZGTo6Oqirq+Pn50dqaio3btxgxowZqKioYG5uTkREBMuWLaO7u5uTJ08ilUqZPn06165dIy4u\nDjU1NY4fP05+fj6pqakoKSnR2trK3LlzmTZtGhMTE2RkZLBhwwasrKxwcXHh2bNnVFVV0dfXJ4C3\nra2tSKVS8vLy0NPTIzw8nMDAQDo7O/nb3/6Gra0tExMTArpjZ2eHmZkZXl5e5ObmoqysTFJSEmNj\nY+Tk5CAP56mqqooIubyFWVFRQWhoKDo6OqSkpNDX10dQUBDKysrCxdDf34+dnZ2Ije/du1ccB8ML\nMI6amhqZmZmYmJjw448/snHjRrKysnB3dyc1NRV9fX2io6MJCQmhoaEBb29vsR2Wf45jYmLIzc0V\nR6hlZWVC01deXo6JiQkPHz5ER0cHHR0dsWK5d+8eBw4cEMCh48ePC8O2hoYGtra2KCoqsmvXrj98\nP/5pug9SqZSHDx+KZW1MTAzj4+P09PSIau26deu4ffu26ETExsbS1dUl3IQBAQHo6uqKocqRI0fY\ntGkTu3fvpr+/Hz09PS5duiS040ZGRhw7dgxbW1tycnJECaehoQFlZWXU1dXZvXs3kZGRdHV1iYRc\nZmYmXl5emJqaUl9fz9KlS5HJZDx69AhLS0uOHTtGUlISenp6LF26FAMDAxITExkfH6ehoUH0Jr74\n4gs2bNjAsWPHgBfMvevXr1NeXk5gYKBwXcjrxn5+fhgZGWFra4u6ujqpqanExsbi6OhIamoqjo6O\nBAQECOmMk5MTL7/8Ml9++SXt7e1kZWUJd6a6ujp9fX2C2aiurs7y5cs5deoUk5OTrFq1ijt37nDk\nyBHxQbx69SpvvvkmGRkZjI6Ocvv2berq6vD09KS9vZ2ZM2dy8OBB8XZes2YNISEhGBkZIZVKMTAw\n4Pbt23z//ffMmTOH+vp6NmzYwNjYmBjc2dvbi2Ha0NAQSUlJKCsr09/fT2hoKPfv3+fHH39k/fr1\nWFhY0NzcTGtrq0DXVVZWihOhJUuW4OHhwT/+8Q/09PRIT0/H1NQUNTU19PX1efz4MYcOHRIx7qqq\nKpSUlHBzc8PR0ZHe3l7c3d2RyWQcPHhQxLU7OjrE9yufx4yPj3P8+HGCg4PR0tJi7ty5fPfdd6xY\nsYLo6GghhFVTU0NLS4uCggKGhoYwMzMT9KuWlhbmzp0LvMjCyKliWVlZBAYGsnDhQgoKCnj8+DFb\nt24lKyuLmpoa1qxZw65du9ixYwefffYZbm5uODg4MHPmTO7cuYOysrLoA/2h+/HPEHM+ceLEwYCA\nAIEzl3v2FBQUUFZWxtXVVUg02tra8PLyoq+vj8DAQMzNzUU8Vp5UmzdvHt999x2BgYGiwVhQUMDw\n8DCLFy+mqqoKe3t7CgsLqaqqIi8vT5R1goODCQsL4+bNm8yYMYN9+/axceNGcUz466+/cuTIEZqb\nm0lMTMTDwwNtbW0uXryIgYEBsbGxZGZmoqamRkREBAUFBejp6Yn9vxz9JV8dVVdXi8RiYGCg2Gc3\nNzdjb29PfX29yB7ItXnDw8NC9ebg4MC9e/dEt16uc5PJZCQmJvLGG29w//599PT0BE59xowZNDQ0\n8OjRIxYtWoSfnx/x8fFUV1czd+5cUcRxdHTE0dFR0JC2b9+OiYkJd+7cQVtbm5CQECQSCf7+/oSG\nhhIVFUVtbS02NjbMmDFDsCZLS0uZM2cOQ0NDVFRUUFxcTEpKCj4+PtTW1iKRSJg2bRpnzpxh3759\nVFZWYmxsLBiDFy9eZP78+VhaWorZiqqqKkpKSvj7+/Pw4UPRX5G/jRcsWEB1dTWqqqpER0ezYsUK\nIaExNTUlIyODtWvXcvr0aTZu3IipqSldXV0iKdrU1ERUVBQzZswQN+/mzZtpbm7GzMyM5uZmUlNT\ncXV1JSwsjMuXL7Nx40aOHTuGn58fP/74I9u3bxdbFWtra54+fUptbS1SqZTx8XGqqqowNDQkKysL\nbW1tjI2NiYqK4vnz52hoaKCrq0tsbCxKSkosXLiQ77//nsrKSkZHR5kyZQoDAwNs3LgRQ0ND7Ozs\naG9vZ/bs2WIYnZCQQHh4uEi+RkVF/efEnDs7O7l27Ro2NjasXbuWhoYG5s2bh4KCAlVVVejr63Pl\nyhXOnDnDtGnTCAkJYc6cOVy+fBkFBQXhbejp6aGoqIi2tjbgBSbb0NCQefPmERoaSkdHB/r6+kxM\nTNDe3k5fXx8TExN89dVXtLe3s337doaGhgQ/PzExkT179jA2NibSiIAIkoyOjvL06VPOnz9PcXEx\nMTExLF26lODgYN566y3c3NwICwsjICCA4uJinj59yqlTpwSSrK6uDnV1dQoLCwEEujsxMRFbW1uU\nlJRQU1NDTU2NlJQUDh8+zJdffsnAwAAjIyM0NzfT39/Pnj172Lt3rxjayQEiKioqSCQSwsPDyc7O\nZv78+Tx8+JCrV69iYGBAZ2cn7e3tfPzxx0ydOlWo6eQrHvn3ODQ0xKpVq2htbeXHH38kICBAZDF6\ne3vFNuvll1+mvLycjo4OLl++jKqqKgsWLBCg18zMTPT09NDQ0ODAgQOYmZmxZcsWXnnlFXGM/N57\n72Fqakpvby8DAwMiqt3R0cHExASRkZGEhobi6OjI48ePSU1NZeXKlSxevBgtLS2sra1RVlbm7t27\nrFixQnzG5GlNmUzGyMiIaDeuWLGCc+fO/bdf6+DgQGlpKWFhYWRlZZGZmYlMJiMnJ4fx8XGWLVsm\n3rxdXV2cO3eOvr4+oqKimD9/PuPj40RERKCrq0tycjJdXV309fWxaNEizM3NiY6ORldXl8DAQFRU\nVBgZGaGpqUng8gCxOpTrDG/evImdnR3W1tZMnTqVhoYGrKysOH/+PH19faJgJz+K1tLSYsGCBUil\nUlpbWwVK8I9cf4qHgra2Nlu2bEFJSYnz58+LElNbWxvbtm0DXkzmN27ciL29PU1NTeTk5ODp6SlQ\n4lOmTBHacDnJqKenR3T3r127xvDwsBDGent709rayqZNm8jMzMTf31/4/p49e4ampiampqZ4eXnR\n2dkpev7wwlOxcOFCjIyMmDNnDj09Paxfvx5DQ0NOnDghkGBJSUncunWLx48fU1VVhaqqKg4ODuKo\nKCgoSKQc5VdHR4eQvvb19eHr60tbWxtqamq4uroyc+ZMRkdHxVbGxMSE9vZ2jI2N0dfXx9zcnIyM\nDCYnJ9HW1ubu3bs8f/6cq1evikBVYGAgzs7O+Pn5ERsbi4qKChMTE4JcvXnzZkpKSggMDMTFxYX8\n/HxiY2ORyWQYGhpibGzM4sWLUVRUFI7GFStWCPvxnDlzWLlypUimnjhxAplMhqqqKpOTkwJKunLl\nSh48eCC07/AieTgxMSGKYfLl9N27d7l48SJeXl50dHSIQd3cuXNpa2tDXV0dmUxGamoqdXV1LFmy\nRBzzAaJ+//jxY9zc3HBxceHixYusXbsWNzc3srOz6e3tpampibq6OvEAe/jwIQ4ODsTGxgoPaHx8\nPGfPvrAYyG3Za9asQVdXFwcHB/z9/dHU1BRsz5qaGqqrq9m3bx8JCQmsXLmSbdu2Cd1dZWWloHfJ\nbdZvv/22IGy98sorfP/99yKT0djYiIODA+np6aipqfHtt9/S19eHkpISERERqKmp8ezZM+7cuSPI\nzvLv849cf4qHgjy6qqenh5aWFjY2NmRnZ+Ps7IyKigrt7e3IZDJxpCcfRDk4ONDW1oa+vj69vb24\nurpSXV0t/mBra2uxsrJCW1ub0NBQysrKePjwoVhu7927VxStli1bhra2NkePHsXCwkIEWuTtvmXL\nlolcura2NhkZGejo6JCcnMyOHTv47bffWL16NQ4ODnh7e9Pc3ExcXBy9vb3Ex8czMTFBd3c3RkZG\nJCUl4ePjw71790SmABCV4blz54ppdH9/P15eXnR1dWFkZERwcDBJSUn87W9/o7u7W5wIyMWvlpaW\nQgMfGhrK2rVrefToEVOmTKG9vZ1Zs2ZhZGTEqVOneO211/D19RUFMDlroLu7m7GxMfT19cUbSQ65\nSU9Pp6SkhPT0dAoKCqiqqkIikfDDDz+wfPlyAgMDKSgooLOzEy8vL27cuEFoaChff/01Tk5OdHR0\n8Oqrr3L9+nUiIyMZHR2lpKREZEtWrFhBZmYmrq6uWFlZkZ6ezoULFwSUVa7Q8/b2ZmRkhKVLl4pm\naExMDKqqqmzcuJGmpiaCg4N5/vw5APr6+igrK6Oqqoq6ujojIyO4uLjg4eEhGq7q6upIpVIaGxvZ\nunUr1dXVREREiCKVp6cn6urqaGpqsnv3buAFW2PKlCmcOHGCwMBA7t+/T3l5OQ8fPuT9999n5syZ\neHl5ERAQwI4dO9ixYwddXV1ERkaipKSEhYUFsbGxAt//0ksvAS9OH54+fYqCggK///47W7duJSIi\nQvgs9PT0MDU1xd/fn/3796Ojo0N+fr4YrBYWFooVUXR0tHCL/JHrTzFTOHz48EH5D0dFRQUrKytK\nSkpwcHCgu7sbd3d34Qzw9/dneHhYRD2Hh4fR0dGhr69PHMfs3buXr7/+GplMhqurKxKJRDTNUlJS\nUFRUJCkpSYRTjIyMSE1N5cCBA3zwwQckJyeLabyBgQHh4eGkpqYKlLu+vr4Ac8jV9vr6+gQEBCCT\nyYRtysjICEVFRTGPKC8vF2m8gIAAhoaGuHTpEurq6qSkpBAUFCSKOPK+hru7uzgubWpqEmWr1tZW\nCgsLkUgkTJ8+nfLyckpKSrCyshJJuUePHlFdXY2mpiZTp06lt7dXEIZ8fX0xMjJi2rRpbNmyRSQF\nAd544w0sLS25cOECn332GdeuXRO0KnmHRENDAw8PDxYuXEhWVhbz5s1j+vTp9Pb20tnZSWNjo3A5\ntLW14ePjQ19fn8DPy4d1S5cuJSkpCRMTE6Kjo9HT06O4uBg7OzscHR2pqqoSq8i6ujoaGxsFCfn1\n11+np6dHDO/kUB0XFxdxJGtnZ0dMTAyzZs0SwSF5y1YOY5ETi0pLSxkeHsbX11cwN4ODg4Xkd3x8\nXCRCMzIySElJwc/PT8Bks7KymD17NjExMWzYsAFvb286OjrIzMzk9u3bODs7izd8Q0MD69evp7Ky\nUoiGwsLCiIqKIi0tjTfffJOnT59SUlLCzp07cXFxQVlZWVCnfvjhB3R0dFBTU+Px48ciRq2jo4OK\nigohISHo6OgIAZG6ujqZmZn/OTyF48ePHwwODhaOxerqambOnEliYqJ4upeUlODt7U17ezstLS1Y\nWlpSUFBAS0uLgFsMDg5iYGDA1atXSUlJ4cCBA7S1tSGRSAgJCaGgoICJiQnRqZDJZCgrK/PkyRPy\n8vJYtmwZRUVFpKeno6WlxaJFi7h9+zaVlZX09PRQWVlJcXExERERKCsrs2XLFlE0Kisro729nYGB\nAe7evUtPTw9XrlwhODiY+vp6VFVVMTc3Z3h4mP7+fi5cuMDOnTuJiYkhJCSEmzdvCninnGMgH2bK\nHQsSiYS6ujrMzMxQUlLCycmJAwcO0NHRgZqaGhs2bBDpz/HxceCFFDcgIEC8XZydnYmKiiIrK4vY\n2FiqqqooKioiNjaWFStWCMx4Wloa9vb2pKSk0NHRwdjYGL29vZSXl/PgwQPU1NSYOXMm586dIzQ0\nlObmZt555x0MDQ3R09NDUVGRtLQ0QkJCCAsLo6GhAT09PXp7e6moqMDGxgYDAwOePXuGqqoqb775\nJl988QWvvvoqDg4OKCgo4OjoiEQi4cGDBzx//pxt27bR3t5OZmYmgYGBeHp68vjxY/T19Tl9+jQF\nBQXCGVJfXy+4Ezdu3GDnzp2iFFVVVcW5c+cYGhrCwsKCyMhIRkZGhFa+vLwcJSUlXnrpJVJTUykr\nK2NiYoK2tjbxtadPn1JfX4+3tzdjY2PcunWLxsZGrl69yv79+/Hy8hK8iu7ubqHHS05OFmrC0tJS\ncnJyWLFihchlLFq0iHPnzqGqqipYHlVVVcyZM0foFCMjI4mMjGTJkiVUVFSgoaGBpqYmDQ0N+Pj4\n0N/fz+DgILW1tQwNDWFiYkJHRwcZGRn/OQ+Fn3766eDy5csJDw9HJpOJ8lFtba3wNKqrq1NeXs7l\ny5cJDg4mLS1N6MlHRkYoKSnB09MTDQ0NUlJSyMrK4pNPPiEqKgozMzNx7Hj48GGsra3p6OjA0NAQ\nIyMj/P39UVJSoq+vDxsbGzZv3szt27dFUOby5csCzf7s2TMx0c3NzcXV1ZXExEQaGxsJDAxkfHwc\nHx8foTiT/1B0dHRobm6mrKwMNzc3NDU1MTExQVVVlfj4ePLy8pg2bZoIr8THx4vVRHV1Nd3d3QwN\nDeHg4MDk5KT4tXIGQEJCAp6enujp6VFSUoKioiJFRUVUVlZSUlJCRkYG+vr6FBQUsHnzZry9vWlp\naUFFRYWgoCBaWlpQVlZmeHiYpqYmSktLCQgIEEM0VVVVBgYGmDZtGmNjYwQEBNDV1UVYWJhI8Zma\nmvLxxx+jq6vLO++8g7u7O/Hx8Xh4eFBXV0dCQgIHDhwgJSVFHBGbmJgIc/ilS5fE8M3e3l7AZnR0\ndHBzc0NHR4eCggIWL14sClkVFRVUV1cLqlNBQYE4Ap49ezb19fU8evSI4eFhhoeHMTExwd3dHR8f\nH1RVVWlqasLGxoaxsTGampoYHx/HxMREhMsWLlzI1atX8fLyIjg4WMBgvL29ef78OW+99Rb+/v6E\nhIQIfoGzszMXLlwAXswc5G1cCwsLrKyskEgkaGtro6WlhVQqZXJykomJCdTV1cnPzycxMZG//OUv\naGlpIZPJ6Onp4d69e2RmZqKiooKioiIffvghvr6+nDhxQpw8JSUlsWLFCpHHSU5OpqmpCU9PT1RV\nVYmNjf1DD4X/S3P+v9f/vf7/c/3n0JwtLS0FoPPx48esWrWKS5cuoa2tTVFRER4eHkybNo0nT55g\nYWGBo6Mjp0+fxtPTk6SkJBYuXMiUKVMwNDSktbWVlpYW1qxZwzfffENxcTEffPABhw8fpq+vD1tb\nW1E2sba2pqGhATs7O+7duyfApp2dnezatYs9e/awf/9+nj59yuXLl9m/fz8bNmzg+vXr5Ofn09zc\njKWlJYqKivj7+4uhVk5ODsHBwcTFxeHr64uLiwuJiYnU1NSwe/duqqurKS0tZfPmzZw5c4apU6cS\nERHBhx9+KBRw8inyN998g7KyMj4+PgJttnz5chISErCzs8PJyYmHDx8SHh5OT08PiYmJrFq1iqSk\nJLq6ukhOTmbPnj1oamoKTkN+fj7Tpk1jxowZ5OXliVxAW1sbq1atYnBwkHv37qGsrCy2Fbq6ugwM\nDAiClb+/P1OnTiU2NhYbGxsxwY+Li2PRokX09vZiZmZGUFAQtbW1XL16FT09Pd58802OHj3K0qVL\n6e3tZXBwEAsLC6HM+/jjj/H09CQ2NlZg1mxtbfn888+FeSo0NFRsO7KysgTBSE9PTzg+fX19OXr0\nKEFBQXz00Ue8+uqrrFu3DqlUyuDgIBoaGkxMTHD+/HlkMhmzZs2irKyMKVOmkJOTw9OnT/noo4+4\ndOkSvr6+4hjZxsaGrKwsysvL+e677zh16hQ3btwQjMi+vj6amppQUlJiYmKCzMxMpk6dSnR0tMD0\nff7556SlpfHee++hqamJjY0NFhYWHDlyhJUrVxIREcGVK1cwMzPj8uXLoknq6OjI7du3mTdvHkZG\nRiQkJIgTjzNnziCTyXB0dGRkZITk5GTx/1VQUMCqVasE6Pd/uv4U24cjR44c/OWXX6ioqEBNTU3w\nBTQ1NUlLS2Pt2rW0tbWJI7C0tDSMjY2ZN28eMpmMqqoqWlpaKC8vp7+/Xxzd2NnZYW5uTnFxMX5+\nftTW1gIvTiUUFBR49uwZCgoK9Pf309DQwPbt28VST753l7fn1q5dS3t7O7du3clzlIMAACAASURB\nVCIoKEgcg8mX0TU1NTg5OREeHk55eTm3bt3i3Llz/Pbbb8yYMYN169aRlpbG2NgYbW1tLFmyhFu3\nblFZWYmKiopwKFpYWIjE5rVr1zA2NsbX15eOjg7Bc2xoaMDPz4/nz58LLoG2tjZRUVHMnj2b33//\nnb1799LY2CjaefK6dUtLi7hpKyoqUFZWRklJCSUlJRGkaWpqoqamhvDwcIaGhujo6BA38ODgIEpK\nSjg6OtLW1kZgYCAPHz5kwYIFgpgtH4qlpaVx4cIF1NXV8fHxQUlJiX/9618MDQ2RnJzM0NAQg4OD\n3Lx5k9zcXFJSUrCzs0NJSQldXV3mzZvHG2+8QU9PjyAz7dy5ky+//JKioiL09fV59OiRcFIWFRUx\nPDxMd3c3XV1dQraTkJDA8uXLBZl68+bNFBcXC9PVwMAAZWVlLFq0CEdHR3R1dXn69KnArtfW1opy\nUVtbG+bm5jx79oyCggK0tLRYunSpMD5JJBISEhJE27WmpoaHDx8yNjYmuJq+vr6Mjo7i5+fH+Pg4\nZmZmHD9+HCcnJ/FAloesVFRUOHHiBP39/dy4cYNXXnmFmJgY7ty5I/oQExMTGBoa4u7uTllZGfHx\n8YSEhAjcvIWFBb/88gvPnj37zwkvAZSUlKCtrU1KSgrm5ub09vZSVFSEoaEh9fX1dHV1UVVVRW5u\nLl1dXfj4+HD37l26u7upra2lrq6OkZERVq5cyauvvgrAyy+/zIoVK1BTUyMhIYHp06czMTEhjEN6\nenooKysTGhrKli1biI6Oprq6Gnt7e/r6+lBTUxNoK3lgCF6EjKytrZFKpdTU1DA5OYmRkRGZmZm8\n99576OrqcvjwYT788EMOHjyIuro6b7zxBi+99BJKSkpoaWlx9OhRgRpvbW0FXhS4bt26xYkTJ2ht\nbUVRUZHa2lrq6+spKSkRAtyBgQHGx8extLRkbGwMf39/MbEvLi5GR0eHtWvXAi9MQytWrODOnTvs\n27cPAwMDGhoaxJn22NiYkOj+5S9/Yfbs2ejq6mJnZ8fExIQwLKWnpzMwMIBUKhUIODlXQj70ffz4\nMQ8ePCAmJoavv/4aa2trli1bJmrMFhYWfPrppzg4OLBz507GxsYYHx9n7969InAm75Q4OTnxz3/+\nk8jISNTU1GhoaEBTU5OvvvoKAwMDxsfHWb9+Pe+//z5hYWFoamry97//HW1tbcbHx4VDRF5FtrGx\nQV1dnU8++YQffvhBDAl/++031NTUmDVrFrm5uVy/fp3JyUnmz5+Pt7c3c+fOpbGxEQMDA0JCQujv\n76exsVFYpw0NDcnPz+fQoUNiKLp9+3amTJkisHRy/VxdXZ1o1Xp7eyOVSsWLLzQ0lPDwcJHL2LVr\nl5iNubi4sGzZMvbs2SPyKrt37yYoKIj29nbq6upobm4mJyeHDRs2sGbNGvz8/Fi1apVQ1K9fv/4P\n34t/ipXCN998c3B4eBgHBweB7ZbDL06cOEFOTg7R0dGsXr2asrIyBgYG6OzsFNP0jRs3ippsdnY2\nioqKnD17lvnz5/Prr79ibGwsEO1qamq89NJLVFZW4uPjQ2FhIWfOnGHNmjUoKipSUlKCjo4OmzZt\nQkNDg8rKStatW8c777yDnZ0d9+/fFwOslStXcu/ePdLS0lBXV/9vLEUHBwcaGhqYO3cu165dQ1tb\nW7Qf4YWrQT4ofPToEXl5eXz55Zd0d3cjlUpFiMfMzIy1a9eioqIiGIHt7e3U1NSQm5uLubk5+vr6\nGBkZMTAwgImJCfr6+ixbtoyCggIkEgnj4+P4+fnx+++/o6ysjJGREa6urjx48EAk3iorK7l37554\nOFtaWnLjxg3u3LnDP/7xD1RVVZmYmMDGxobbt28DL3gVs2fPpqioCAUFBeHrVFZWFgIT+apPIpGg\nqKiInp4eFy5c4OzZszg5OYnjQ19fX86cOYONjQ1FRUW0trYyZcoUDAwMMDExoaioiDlz5jA2Noam\npqYItamqqmJiYoKenh4///wzRkZGrF69msOHD2NnZ4dUKiUmJoa3336bJ0+eYGdnR0NDA/7+/pw9\nexapVEpiYiJqamo0NjYKPJ/ciSHnZY6MjFBUVERnZycKCgro6Ohw48YNDh48SF9fHyEhIULRJmdb\nGhkZsXjxYh48eEBXVxf37t0jLi4Oa2trdHV1KSwsxMDAQICFSkpKGBgY4OrVqwL8a21tzYoVK3j0\n6JGAFSckJBAREUF0dLR4aSUnJwt3hdxofu3aNTIzM1m0aBHV1dXcuHHjP2elMDw8jLm5Ob/88gt2\ndnYi1y2vsm7atInDhw/T1tZGeHg4q1atwtDQkPT0dLKzs/n9999RUlISghI5nff58+fs37+f5ORk\nLly4gIeHBwEBAeJDnZuby927d7G2tqampoaJiQnmzp3L2NgYX331FcXFxaSlpVFXV8fRo0dFVNTM\nzIz169cTFxfHtm3bcHJyEsAW+TS+u7sbY2NjfvvtNx48eICpqSkvvfSSmFo7OTnh4+PD2bNnWbBg\nAQAjIyOsX7+eVatW0dfXR0BAAHFxcURFRZGcnMzIyAjm5uZMTExw8+ZNpFIpH330EQUFBURGRqKv\nr09WVhYNDQ0kJSXR2dmJTCbD2dmZvLw8urq6KC8v5+7du+zfv5/4+HjeeustdHV1iYyMpKmpicHB\nQcFqlCcXc3JyWLRoEenp6ejp6Yns/scff8z58+dpamqio6ODLVu2oKmpyaJFi5g3bx7r16/H0dER\nW1tbrKyssLGxobu7W+DP6urqKC4upqWlRcBQ3NzcOHDgANXV1RgZGfHgwQP6+vqQSCQYGxvj6urK\nwoULGRgY4LPPPuPYsWOcOXOGO3fuEBQURHh4OPfu3RPHvOnp6QBiuV9WVkZgYCC1tbW0tLTQ2trK\nggULyMzMREdHB2dnZ8bHx5HJZPzwww9iLiD3eMhzC3LMW0xMDEpKSlRWVqKsrMzIyAhaWlpMnz6d\nO3fukJ6ezuTkJPX19cybN4+//e1vYruyevVq4uPj+fXXX5HJZJSUlAjk3/r163FxcaG3t5f09HTe\nf/99UlNTWbduHZs3b0ZZWZl58+ZRXFzMyMgIs2bNws7OjqysLAwMDDh27BgbNmxARUVF8C3/6PWn\nWCkcO3bsYGdnJ6+88oog7YyNjTE0NCRSbGNjY0yZMoXTp08Lo468OXb69GlGR0dxcXGhr68PTU1N\nTp06xZdffinMuyUlJdjZ2REbG8vg4CDTp0+noaGBsLAwgX1btWoV169fZ/v27Zw9exYdHR2WLVtG\nWloaXV1ddHd3k5SUxJw5c4iKihJkKPnR49y5c7l69SpKSkpoamri4+PDBx98gKamJn5+fpiYmDA0\nNER8fDza2trcunULe3t7BgcHiY+PF0GeWbNm0dzcTH19PcuWLWN0dJTi4mK6urqYNm0aQ0ND5OTk\nUFdXx5tvvsno6Chr1qxhYGAAfX19mpubMTU1RSKRiKjx9OnTCQ0NFTVl+Rtq0aJFhISEoK+vT1lZ\nGUVFRbz88svcvHmTU6dO0dXVxZYtW/jpp59EqrSlpYWgoCAuXrwoiNPvv/8+Dg4OBAcHiwDT4cOH\ncXR05PvvvxesgMLCQrE1XLp0qRCyKisrc+TIERQVFfn222957733KC0tFc6Ha9euUVhYSHh4OKdO\nnSI/P19QsY8cOUJ3dzczZsygtraWsrIy9PX1UVdXx9LSksTERKZNm4aGhgb6+vqUl5dTX1+Pu7s7\nNjY21NbW4unpibOzM5aWlly6dIn+/n709fUpLi7GwsICRUVFAb3V09OjoKCA+/fvM336dJqamtDS\n0sLR0VE8sORbQiUlJQwMDEQNfGRkBAMDA7Kzs8VqauPGjTx9+hSpVEpTUxPPnj1j+vTpVFRUsHLl\nSiE7Dg4O5ueff+bDDz+ks7NT2NTlBTqJRIK3tzfDw8M0NzeTl5dHaGgoM2bMwMrKijNnzvyfWSlI\nJJIzEomkRSKR5P3b1/QlEskDiURS+l9/1fu3f/eeRCIpk0gkxRKJZMEfeSjItW3JycmUlpYKxZm9\nvT2bN28mODiY/Px8Ll26JBJaHh4eODo60tDQwL59+6ioqGDKlCkoKiqKmLO8tRcXF0dHR4fAi0dE\nRPDFF18wOTnJlClT+P7779HU1OTu3btIJBK+++47UVOW67zs7e2FMl7evmtubmbz5s1MTk6SlZXF\nzZs3CQ4OZuXKlYSHh3P8+HE++eQTnJyccHV1pa2tjYaGBt544w1mz56Nvb09U6ZMEQ4BOboMXkSp\nZ8+ezaVLlygsLMTV1RVPT0+qq6uxtrZm//79NDQ0YGBgQEdHB9nZ2SgrK6OhocGWLVt4+vQpS5Ys\nwdbWljlz5nDnzh2MjIyEP1MikVBRUUFISAgPHz7k+PHjjI+P4+LiwvPnz8nLy2Pu3LnMnDmT3bt3\n4+3tjYGBAY2NjUxOTtLS0iIcDZOTk8J6JI+Ff/jhh6xevRo1NTWOHj3K1q1b0dDQICgoiCVLlrBg\nwQJKS0vp6+sjKSmJ8+fPA4gBclFREevWrSMiIoLFixeLh1dZWRmVlZUkJiZy9OhRdHV1mTFjBjk5\nOSgqKjIwMCB09/KiGYCzs7NgaMjnDvCC+iWXC6WlpZGcnExhYSGdnZ3Ai2H36dOnOXXqFObm5tTW\n1nL27Fmhnjc3NxdNXXk0+d+3cs7OzqxatQpVVVX++te/8vz5cyYnJ/Hw8ODAgQN0d3cTFxfHvHnz\n6O3tFTMQV1dXpFIpt2/fJiIiQpyuOTs7iy1mfX09VlZWmJubM3PmTLS1tWlvbxcDZgUFBaZOnUpd\nXd3/K+v0H9k+/AKE/29f+zvwcHJy0gF4+F//jEQicQXWAW7/9Wt+kEgkiv/Tb6CiooKSkhKZmZki\nOquiooKlpSU1NTX8/e9/R1FRkR07dhAQEICVlRVBQUHiCGlycpLW1lbRhJPrvOPi4vD392fWrFkk\nJSXh5ubGkiVL/ltP4eLFi6SmpgoA59KlSwXTUZ66s7OzE719eDFo1NLSQiKR8Le//Q1FRUU2btzI\nkiVLcHJyEtuanTt3igHnlClTqK+vp6ysjOzsbAFCUVZWFg8xLS0turu7uXfvHs3Nzfz4449Mnz4d\nb29vsrOzuXPnjiBHVVVVcevWLfLz8wkICBBpzs8//1xYj5qamnBxcWHevHnClyCVSrGxsaG1tZXF\nixczOTmJvb09q1evFvn606dPC9DrX/7yFwBaWlpobm4W8JuQkBA2b94syEEnT55EQUGBjIwMioqK\n8PLyEoPby5cv09raiqenJx0dHURGRorAj56eHv39/WLQ6OvrS0BAAPv27aO1tZUbN24IzoGqqiqP\nHz/G2dmZ8PBwoqOjBZtiypQpbN++nbt375KdnY1UKmXVqlVCtLNgwQJBTx4eHmb+/PnCxzl79myh\n2evu7iY+Pp6qqiqcnZ2RSCR4eHiIhKJUKmX//v1ERkYCL3idy5cvZ8eOHTx+/BgjIyPOnTtHVFQU\nlZWVuLq6kpWVRVdXF0FBQcyePZvBwUHMzMx47bXX6Ozs5LXXXuP9999n06ZN2NraAghUnqOjI+fP\nnycjI4Oamhohhenr62NkZAR7e3u6urpQVVXFysoKAwMDIWlet24dZ86c4cmTJ0Iq/Eeu//GhMDk5\n+Rjo+N++HAGc/a+/Pwss/7ev/z45OTk8OTlZCZQBAf/T79HT00NeXp7Iynd1dWFtbY2enh6+vr7M\nnz+f7du3k5GRgZWVFR4eHri5uYmknJ6eHpqamly4cIGGhgYh0vzrX/9KSUkJt27dwtPTU+jjDh48\nSGVlJVeuXMHT05N169ahpqZGQUEBCgoKaGtro6enx+joKE+ePKGvr4/U1FScnJyAF/TpFStW8OTJ\nE9zd3VFXVycqKooPPviArKwslixZQnp6Om1tbSxduhRVVVUqKiooLS2lu7sbc3NzBgYG8PPzE4wA\nAC8vL8rLy4mOjiY7Oxtvb2+8vLwwMTFh2bJl+Pn5kZWVJUxLQ0NDfPrpp+Tm5jIyMiK2INnZ2ezY\nsYOOjg4aGhr48ssvSUlJoaWlBWtrawoKCli6dCnm5uZcvnyZc+fOifPskpIS3n33XaysrFixYgVf\nffUV27dvJycnh/r6elE8S0pKIjMzE2tra+Lj48nIyODNN98US/K1a9eydOlSLC0tCQwMRENDg59/\n/pkHDx6goaFBamqqsEQPDQ2hoPDio+jm5kZOTg4xMTFoa2szMTHBrVu3mDNnDpOTk4JoLf9zNzY2\nZu3ateIkJzk5GX9/f8zMzAT1G6Cvrw9lZWWcnZ3x8fFBIpGQk5ODRCKhqamJ+vp6cnNzefz4MWFh\nYTg6OpKeni7YFPIjTgMDA6qqqsTPLDQ0lFOnTpGeno6ysjJXrlxh1qxZ2NraEhoaypUrV7h+/Toe\nHh6UlJQIqOrUqVNZvnw5Pj4+XLx4EU9PT27evCm0A/JujYuLi/gzGhgYQFNTk2+//VbMdlJSUkhN\nTaW3t5e+vj7u/C/23ju6yjL9+/3s9J6d3rPTO2mkkB4gFOmEIqBIERHFhgg6IzOjo46jr4MORYoU\nAaVKBykJkEJ6CJBCSM9OQnrvbWe/fzD7Pr9ZZ63zet5z1ructX7PP2BkYcx+nvu57+u6vp/PtWsU\nFxejpaXFoUOHePbsGQsXLhSpzt9y/e8WGq2USmXTv37fDKhQsXZA/X/5cw3/+tr/4zU6OkpMTAwv\nv/wyhYWF3Lt3j/DwcCoqKigqKmJoaIhHjx5x4cIF9u/fz9GjRzl+/DjHjh0jLy8PhULBhg0bMDQ0\nJC0tTUA1KyoqMDU1ZdGiRTg4OGBubs7hw4dJSkrCz8+PZcuWkZCQwMjICLm5uSxevJj333+fjz/+\nGDU1NdTV1dm+fTsKhYKcnBwuXboEPN/adXV1iQfCycmJ4OBg1q1bJ4aWVMamLVu2kJWVxbVr14T8\no62tDWNjYywsLDA1NRVFKxWa7S9/+QuvvPIKlZWV9Pb2CuSZrq4uAwMD3L9/n5KSEv785z9TXFxM\nYWEhL774IsbGxmhqarJmzRr27dtHRkaGwH9t3rwZIyMjPDw8cHJyoqioSGQyIiIihM5dRVWKi4tj\n0qRJDA4O8tlnn2Fvb8/mzZsFuq2rq4vJkydTVlZGTEwMJiYmXLx4kfLyck6fPs0PP/yAiYkJJiYm\nwj3g4uLC0NAQCxcuZGhoiPz8fIKCgnj27JnYjt+6dYsZM2ZQXl7O0aNH0dbWpq+vT+xy+vr6iI+P\nx9rampUrV5KcnEx5eTlLlizB0dGR119/nYaGBuzt7VFTUxNF3MuXLzN9+nTq6+tpamqiqKiIxMRE\nDhw4gEKhEMNJkydPZu3ataxcuVLsoFQ0JUdHR1xdXYHnR1B4rqNTGchMTU1555130NLSYsWKFaLD\npTJ979ixg71797Ju3Tp27drFxYsXxUi1i4vLv5mcVqxYIdB3M2bM4MqVK+zevVvsAEZGRsjOziYm\nJkZAggcGBpg7dy7Tpk3jk08+oa+vj7CwMM6ePSus3r/l+v/cfVA+n5P+fz2mLJFINkokknyJRJKv\nUCjIysrC3d0dU1NTLC0t+fXXXzE2NkZbW5tFixbh5ubG4sWLUSqV+Pj48M0336BUKvnyyy/Zvn07\ndnZ2GBoasn37dqGfc3JyIjw8nKCgIC5cuEBvby/R0dHcv3+foaEh4uLiCAgIQCaT4evry/DwMI6O\njly9epXz58/T3t4uoJyqgg08dwhcuHCB8fFxwsPDBZ6tv7+fb775hvLyclEY3bFjB1u2bGFiYgJj\nY2MGBwdxdHRETU2N8vJy9uzZIyrvtra2Iv1YX1+PQqEQLUSVWDYqKoqwsDDRro2IiBA4ND8/P9rb\n2ykpKUEikbB06VI2btxIcXExX3/9NRcvXuTx48ci7t3Q0EBnZydZWVmcP3+eVatWMTY2xuXLl7Gz\ns+PJkydoaWkRHBxMS0sLqampZGZmCtT78PAwPj4+FBYW4ufnx6RJk7h27Rp2dnaizlFVVcXg4CCP\nHz/Gzs4OX19fxsfHiYyMFKRhDw8PYf2SyWRCmjs6Ooqfnx8JCQnU1tbS3t6OjY0NdXV1nD9/nrCw\nMBYsWICvr6+A2+jr6xMREUFLSwvLly8XzMOenh4KCgrEW1dHRwcTExPs7OwoKSlhz549mJqakpGR\nQXZ2NpcvXxbmbZVUqLGxkaGhISYmJkSiVHX0vXv3Lra2tpw9e5aJiQkOHDiARCIRwSY/Pz+Blh8d\nHWVgYICGhgZu376Nm5sbK1aswMHBQUTIb926hYGBASMjIyIqb2JigkKhEJDdN998UxS+i4qK0NDQ\nYGxsjNbWVh4+fCj4EZMmTfo/4pJskUgkNv96uG2A1n99/Rng8F/+nP2/vvZ/u5RK5UGlUhmiVCpD\nVP5ClevRx8eHgYEBSkpK6OnpwcDAgKKiIuEn9PDwYMWKFQQGBvLZZ59ha2tLUVERw8PDfPPNN2za\ntAl43sM+ffo0EomEhQsXMjAwQGBgIHFxcdTX15ORkcHPP/8s+rkffPAB0dHRLF++HA0NDbq6ugQj\nz9HRUTy8KgSZoaEhtra22Nvb09TUxNDQkACkzpo1i4GBAXJycqiqqqKjo4M333wTXV1dBgcH8fHx\nobu7m/DwcEpKSoDnViBXV1cBf7WwsBAE58DAQKKioigtLeWXX37BxMSEt956i+3bt3Pq1Ck++OAD\nuru7MTc3p6GhgYmJCXbu3ElzczM6Ojp8/vnn2NnZYW5ujpqamsC9q8jE3t7eKBQK3NzcUFdX5/Hj\nx9y+fZuFCxfywQcfYGZmhpaWlkitGhoaioc9LCwMFxcX+vr6mDlzphCoqoSrcXFxosah6verglqD\ng4NMnz6dFStWADB58mSGhoYoLCxEqVRy7tw5YeNua2vjypUrDA4OMmXKFIaHh2lpaWFwcJCBgQEm\nJibEAtPS0kJra6sQ1wYHB9PU1IRCoSAmJgZbW1uMjIxE52Pz5s2icKuieak4m9OnT8fHxwcXFxdC\nQ0NRV1fnwoULwPOFfGRkBHNzc44dOybi3paWlhQVFfHJJ58wNjaGmpoaMTExNDY2oqOjg7OzM76+\nvmzbtg0PDw8aGhoYGxujt7f3+YNjby+OyJqamvzlL3/h/v37Qm84NjZGe3u7ABnn5+ezYMECrK2t\nCQkJobCwEF9fX9555x0aGxvZvXv3b364/3cXhSvAmn/9fg1w+b98fYVEItGWSCTOgDuQ+1v+QhVD\nT1WMKysr4/79+8IH2N3dTV5eHqGhody6dYvGxkb8/f2prq4WFFxDQ0OMjY0FNs3IyIjR0VGKi4sx\nMzPj6dOnLFu2jBs3biCVSomMjKS7u5tDhw7R1NTE559/jpqaGg4ODri6utLf38/du3cpKSkR8VP4\nvzDks2bNQiqVoqOjQ0REBAkJCUilUjw8PEhNTeXq1atERETg5+dHX18fX375JRs2bMDBwYG9e/cS\nGRnJ0NCQUHqpmAg2NjbIZDL09fU5fPgw1dXVVFVVMT4+zoMHD5g6dSrGxsaii3H16lU++ugj3Nzc\nmDNnDomJiUyaNIl169Zx6tQphoaGWL9+PbNnz0ZfX1+MIHt7eyOXy/H09EQikVBfX8/Vq1fFFraz\nsxOZTCYedtXAVWhoKGFhYZw6dYqVK1eio6NDRkYG169fZ8qUKeTn59Pe3k5vby9tbW14enoKk1V4\neDgNDQ3cvHmTuro6XnjhBa5evSpguw8fPmR0dJTVq1fj4+PDjBkzSE9Px9HRkdjYWEJDQ/niiy94\n7733mJiYEC+PpKQkfH190dDQEKnBgIAAMQx369YtPD09Wbx4Mc+ePSMlJYX79++Tn5+PkZERLS0t\nVFdXY25uzpIlS2hqakJfXx8PDw+0tbWRy+U0NTXx7NkzwauA5y8IVZ1HBaWB54LYjRs3EhsbS0ZG\nBvv37ychIYFvvvlGgHPq6uq4e/cuRkZGVFZW4u7uTmZmJvDcw9nQ0CAs2E+ePEEqlTJ37lx6e3tJ\nTk7m66+/5ujRo2RlZeHm5sa2bdswNDQkOTmZ6upqpk6dSk5ODvHx8YSHh//mh/u3tCRPAVmAp0Qi\naZBIJK8CfwdmSCSSCiDhX/+MUqksAc4CT4CbwGalUqn4X/03urq6ePjwIffv30ddXZ2MjAyam5tx\nd3dn9uzZfP/999TV1YkV0dbWlj/96U9cuXIFDw8PFi1aJCbzRkZGhI/vwYMHrFy5koyMDO7du4eX\nlxfh4eFMnTpVFClnzJghFFsRERGigLd06VJxLHj33XeRSCTixs3Ly+Pp06c0NjYyb948xsbGSElJ\noaurSzAfhoeHWbFiBebm5ly+fBl/f38cHR2Ry+XcuXOHlJQUTp8+jbq6+r9JZgYHB+np6aGrqwsH\nBwdkMhlz585l//79ZGdnEx8fT01NDb6+voSFhZGWlsaJEyeor69ndHSUmpoa7ty5Q3NzMxcvXmR8\nfJz8/HwcHBw4fvw4urq6NDY2Ultbi46ODosWLaK8vBwdHR00NDRYsGABc+bMwcDAAEdHRwYGBqio\nqCA3N5fs7GxmzZpFfX09IyMjREZGoq2tzbfffsvp06d55ZVXaG1tRV1dnffffx8XFxfOnz9PZWUl\nBQUFgmMRGxsrHnCV60PVAnRyckIikZCRkcHjx48ZGhrC0NCQ0tJSTExM6O/vJzIyki1btojJzt27\nd2NhYUFNTQ27du2itLQUPT09Ll26JKxeK1eu5OTJk2hpaSGXyzExMaG0tBR9fX1qa2vFbkkqlXL5\n8mUsLCzQ1NTkyZMnosA9a9YsHj16xLJlywRLMSYmhk2bNvHaa68hk8kYHx/Hx8cHLy8vzMzMxHEv\nPj6euLg4fH19uXnzptg9amhoCHnyjRs30NbWBhAO1YmJCfr7+3nzzTeZNm0a7u7umJiY4Obmxvr1\n63nttdfo7u6ms7MTV1dX9u7dS1BQEJ6entja2gp+qapI/luu39J9WKlUtHnK5gAAIABJREFUKm2U\nSqWmUqm0VyqVh5VKZYdSqZyuVCrdlUplglKp7Pwvf/4LpVLpqlQqPZVK5Y3f8k10d3eLGzMmJkYU\nhZRKJZaWlrz88svCA6hiIfT09LBkyRK2b99OamqqEH/U19cLbFpISAhtbW2sXLkSpVLJ6OgoxsbG\npKWlIZVKsbe3p7S0FE1NTcEOHBkZYdGiRUgkEubOnYu1tTWOjo48fvyYp0+fAs976arkYVJSkqDc\n9Pf3Cz6gu7u7IBHZ2NgIzr+joyPR0dEChiGVSkXP/MGDB1hZWWFsbExeXh5NTU3MmzdPuButra05\nfvw4KSkpAlzi6uoqEqMqTZ22tja+vr7Ex8cTERGBiYkJPj4+GBgYMD4+zpw5c3jhhRfYsWMHGRkZ\nrFmzBlNTU4yNjVEoFCxdupTVq1czb948mpubaWtrw8LCgv7+fuRyOVVVVRw+fJg7d+4Id8KsWbP4\n8ccfGRwcJDIykrt379LQ0MAnn3wiUo337t3jxx9/5NatW3R1daFQKBgZGcHU1JRdu3YB8PTpU9zc\n3Jg2bZrQ5ZmamhIbG8vq1asZHh7Gzs4Ob29vhoaGSEtLE8am0dFRpk2bxpdffklYWBgWFhbEx8cD\nz/VuNjY2vP3222LXFxISws6dO9HW1sbDw4OEhAQGBwfx9vYWYFwViXvfvn0EBQVhYmLCzz//LHZT\nxcXFKBQKvvzyS5qamkhKSuLs2bPs37+f5uZm+vr6MDAwoKGhgZCQEEJCQoTs99VXX8Xa2lq4Tp2d\nndm8eTPw3PugpqZGVFQUjo6OPH36lLKyMs6cOYOzszPBwcEMDg4ik8nEZ6Oa5K2trUVXVxepVEpX\nVxcLFy7kxo3f9CgCv5OJxp07d37y2muvoa6ujr6+Pp6enjg4OAjTj7a2NqampiQkJAjxx9jYGCUl\nJYSHh2NgYMD8+fNFFiIoKIgDBw4wf/58gf8+f/488Lw1ZWtrS09PD729vWRlZREcHExVVRWJiYl4\nenoyMTHB+Pg4ly5dEn6F2bNni22np6cn1tbWAr2lojBVVFRgbm5OcHAwcrlcGJrd3Nx4+vQp/f39\nHDlyBA0NDRwcHATl98GDB5SWlgocuFQqBRAmbRcXF0ZHR1m8eDHa2toEBgaK7IK9vT3x8fH09fVR\nXl7OgQMHRH4gKipKoLquX7+OoaEhnp6eDA8PY2lpydatW3Fzc2N4eJiCggIMDAyYNGkSN27c4Nat\nW5w7d04sKCMjI6xcuZKcnByio6Px8PAgMTGRrq4url+/Ljo1qv+n8fFx3N3dGR0dZWRkhImJCYaG\nhnByciIsLExYjoaGhmhpaSEuLo7Tp08L8UpjYyOPHj2it7eXwcFB6urq6Ojo4O233yYzMxM7OzvC\nwsKYP38+c+fOpb+/n3nz5lFXV8ejR4/EuG9hYSHJycns2bOHkZERfHx8uHnzJqamphgZGQkTl6ur\nK6Wlpdy9exctLS2Ki4uFe+PatWt8+OGHVFVVkZ6eTkREBN9++63wNKpyChKJBD8/P8bHx5k/fz7W\n1tY8evSIvXv3oqOjQ3t7O3l5eSQkJAghjJWVFWNjYwLOUl1dzaVLlwgPD8fNzY2ysjKePXuGvr6+\ngPqq0q05OTkUFBQQHBzMo0ePBMBXlcbs7u5meHiY0tJSVq1axaFDh/5zsg8WFhYCLtrT0yPU221t\nbSQnJ7Nz504cHBx48OABenp6/PLLLxQUFODu7i4mtVT9a29vbxGRLi0tZe3atSLkMm/ePORyOdOm\nTWN0dJS0tDS0tbWp/ZcwVUW2mTRpEo2NjSKGa29vj0KhEPrx5cuXM3v2bKZMmcLkyZPx9PTk119/\nJTo6GltbW9HpeO2116isrOStt97i1q1bdHR0UFFRgbGxMfn5+dTU1JCYmCi2ouvWrePJkydYWFjw\n7NkzbGxsOHbsGKdPnxbj0YcOHRLJOmtra8zMzJBIJLS3t3P//n2cnZ3JyclBR0eHmzdv4u/vj7e3\nt0jf9fb20traKnD406ZNIzk5mba2NmGOcnNzQ0dHh+DgYKqrq8nOzmZ4eJhz584BYGZmJhbNhIQE\nBgYGmDVrFpcuXRLYeSsrKzo7OwUjU19fnzfeeIMZM2YQExPDkydPKC0t5ezZs3h4eIgibkpKihhm\ncnV1RUtLi8bGRtasWcOmTZt49uyZYB5cuHBByFcdHR35+eefycnJwczMjCVLllBYWCjqNTU1NRgZ\nGaGhoYG6ujrq6uoUFhYyc+ZM/P39qa2tpaCgQLghbt26hZeXF1VVVejo6PD48WPKysqExFV1L8yc\nORM7Ozu8vLzw9fUlNDSU4OBgcnOfl9KkUinvvPMOUqlUSGKvX7/O+Pi42H0uXrwYZ2dnvvvuOyHa\nVSgUtLa2kpmZKYbyZs+ezZkzZ5g6dSpubm7iSKNKE6tYI+bm5nh7e9PU1CSIVarhsN9y/S4WBXiu\njvP19aW7u5tdu3axb98+9PT0+PDDD1EqlRw7dozs7Gy+/vprAgMD8fT05A9/+IPose/atQsNDQ2R\ng1ddZ8+e5dixY7z++uucPn2a3bt3o1AouHHjBoaGhoSEhIhqs7GxMY8ePaKhoQE/Pz+WL19OW1sb\nM2fOpKioSIRKHj58SGpqKj/88ANdXV00NjZiY2ODiYmJcEUaGRkhk8k4e/as2Mbn5eWxceNG2tra\niIyMZOvWreTk5Ij0Y2trKzY2Njx58gQHBwcyMzPx8vJCJpNhamqKRCLh008/5d69e2hqaore9OTJ\nk5FIJLz44ov4+PiwdOlSjh8/jo6ODrm5uUgkEh4+fMhLL73Es2fPuHnzJqmpqQLKolAocHFxEXiz\n4uJioqKi8PDwQF9fX5z7DQ0NCQoKIjU1lYyMDBYtWsT27duJj4+nuLiY8fFx1NXVSUtLo6WlBU1N\nTXR1ddHT0yM2NhYrKytkMhmVlZU0Njaip6eHnZ2dAKIC7NmzB0tLS1E/OXbsGEuXLuXgwYNkZWWJ\nicXg4GA+/fRTIYWZmJhg5syZ+Pn50dPTw9WrV9m1a5cIGDU1NSGVShkbGxNHNEtLS86cOYNcLufy\n5cu4uLgISe8333wjujehoaFkZGTg6urK8uXLaWlpEdbpOXPmMDo6Sk5Ojpi9SEtLw8TEhAsXLmBs\nbIyRkRFubm7I5XL8/PyQSqXY2dlhaWnJL7/8wt69ewW/4+jRowCi47Jv3z5mzZqFiYkJ3d3dzJs3\nD4lEQnd3N319ffT29jIxMYGVlRXDw8M8fvyY1tZWMjIyGB8fF8lOVRfmt1y/i+PDjz/++ElTUxMB\nAQHiAVX1rwMDA9HU1BQfZFdXlzD1BAcHc/z4cV5++WXCwsK4f/8+iYmJVFZWcv78ecLDw+nr68PS\n0lKM6Obl5eHn50dwcDDTpk1jypQpHD9+HJlMRl5eHr6+vnR0dBAREcHVq1cxNzensLCQ7u5ukpKS\nKC4uZt68eaipqbF79242bdqEQqEQ3P3m5mbRJps3b55g7/X29jJnzhxOnTpFYmKiiDKr/v3Vq1cx\nMzMTMwhVVVXEx8djbm5Od3e3cD88ffqUN998U0wGWltbC+S8KmCkUChwdXUlKSmJ//E//ocYQb5+\n/Tpqamr4+flhamrKuXPnqKyspKuri+bmZszMzATJuKioiLKyMpHTt7KywtXVFSsrK3E237VrF0ZG\nRiJIptoZ1NTUsHjxYgYGBkhPTxejvqoJPJlMxtjYmOAeaGlpUVtby/Xr1/Hx8WFoaAgdHR0GBwd5\n5ZVXRHuvv78fqVSKVCqlrKxMqN9UC8y1a9dwcXERL5b+/n5iY2M5e/Ys0dHR6OvrExwczJ07d6it\nrcXFxYW1a9cKS7lSqWRwcBAXFxfRrXB2dqaurg5TU1PGxsa4ceOG+Dn/9NNPrF+/HgsLC7S0tPD1\n9WVoaIiQkBAsLS3FcVMV/zYwMCA+Pp5ly5Yhl8tJS0tj69atDA0NCbmwaieiah83Nzcjk8kwNDTk\n5MmTfPjhh2I36ezsjFwux8XFhRs3bhATE8OMGTPIz88XRjGVTcrd3Z0ffvjhPwfc+s0333zypz/9\nCX19fY4cOcLHH3+Mrq4uH330EdOnT+fu3buYmZkxOjrK5MmTBeVYlTU/efIkpqamnDlzBhcXl39z\nC6rM1HK5nIGBAYyNjdm/fz81NTViVj4hIYFff/0VW1tbEhISePjwoZhLKC8vF+IWW1tbrl69Smxs\nLDdv3mTGjBn09PRw5swZmpqaGB0dpb+/HxsbGxoaGjAzM8PX15fS0lKWLFlCcXEx5ubmglnQ0dFB\nZWXlv1mDVGJZhULBnDlz+OCDD3B2dsbb25uIiAi8vb359NNPhVlbhUmTyWTMmzcPc3Nz3NzcKC4u\nZubMmQwPD2Nubo6Ojo7wR6qrq+Pr6ysGwVQg2fr6eiGG0dbWZmxsDA8PD8rKyrCwsMDc3Jze3l5a\nWlro6elh/fr1dHZ2/hv81M7OjilTppCUlERsbCw6Ojo8ePCAyMhIcabv6+tj0qRJosuiVCpxdHTk\n+PHjTJ8+nYiICK5cuUJPTw86OjpMTEwI6Y2KmTF9+nRKS0tRU1MjNTWV/v5+dHV1cXV1JSgoCEdH\nR2bMmCE4ETt27KCiooLR0VGMjIzETqGvr4/Tp0/z1VdfMTIyQnt7u6AvGxgYoK6ujpqaGvn5+Sxc\nuJDAwECOHz9OW1sb6enp9Pb2smDBArKysvDz8yM/P5+uri7c3d25cOGCOMq0t7eTm5tLbW0t1tbW\n4jNRV1cXi1pPTw/x8fHcvn1bWLnMzMwwNjYmOTmZ999/X5i7DAwMyMnJQV9fn+HhYebPn09LSwsX\nL15k2bJl3L17FxcXF9LS0tDV1SU/P5979+795ywK//jHPz4xMDAQ1W8VZ9/FxQUnJyfq6uqwsLDA\n19cXdXV1YW328/MTY6a7d+9mz5495Ofnc/ToUZqbm4mOjiY6OpojR47g6uqKi4sLKSkpbNq0SfRw\nMzIySEpKEvHa2tpaQkND+f7779HV1eWll17i4cOHxMbGipHjP/3pT1RUVODj44O1tTXR0dHIZDJs\nbW0JDg5GIpHg7e1Neno6XV1dmJmZAc/lqYODgwKXrhKTBAcHc+rUKebPn09ra6vwWZSUlGBsbMzE\nxIRQvMlkMmE4VnklVD4Ie3t7wsPD+emnn1BXV2f69On88MMP9PT0EBwczE8//URiYqIYoDEzM6Ov\nr0/4I1XI8OzsbFFHuXHjBq+++ipmZmYMDQ3h7+8vKu9//etfRU2jtraW+Ph46urqaG9vp6CgQMws\n9Pf3izdvSkoK1tbWnDhxQqjxVBHvrKwsZs6cyd27d1m0aBEnTpzA1taWjo4OTExMRJS6ublZYPZV\n2QRVBF01A/H48WMcHBwYHh7m9u3bmJubI5FIkEqlaGhoUFpaypQpU+jq6sLDw4OPPvoICwsLpkyZ\nIo5qqqJnenq6MKKnpqayatUqBgcHuXr1KqtWrRIp2Pv37wup0TfffCO6UKdOncLQ0BCpVEpNTQ01\nNTU8ePCA9PR0goKCOH/+PMuXLycwMJCqqipSUlJ4++236e3tpa6ujsrKShYvXixkPJ6enly6dInE\nxETS09Px9fXl+++/Z3R0FAsLC9rb27G2tkYul+Pg4IBUKqWwsJCcnJz/pjn/9/Xf139f/3b959Cc\nnZyc+PbbbykrK6Ovr4+YmBgqKyupq6sjICCAoaEhjIyMaGpqoqqqiqlTp+Lu7s7bb79NYmIi0dHR\ngpWoKhLGx8ezc+dOZDIZMpmMq1evYm9vz9DQENbW1gJbtXDhQpKSktDS0hJ+icHBQeRyOba2tiK1\n9/jxY2QyGdu3b+fhw4di/n7evHloamrS2dkpeIUmJibY2tqSkpJCZWUl3d3dvPbaa7S0tJCXl8fw\n8DAdHR1MTExw4sQJLly4wIwZM5g9ezZOTk6sXLmSb7/9lujoaDw9PfnHP/7Bpk2bhMxGTU2N9vZ2\nsV1/8OABtra2SKVStLS0UCqVaGtr4+fnh1wux9HRkfz8fAwNDRkbG8PIyAg7OzvU1dV59OgRc+bM\n4euvv0ZfX18Qo1QVfalUyvDwMDdv3iQ8PBxbW1sR/1XRoKZMmSJ69y+99BK+vr5cuHABCwsL7O3t\n6ezspL6+nilTplBQUMDp06dFN2RgYAB/f3+6urr44x//yObNm4Xe/YsvvmD16tXiLF5eXk5GRgbO\nzs4YGhoSGBgo4vG9vb2kpKSwfv16vvvuO5H6VFdXZ+/evXz22We8/fbbnDlzhmPHjjF16lTRXVF9\njyEhIWRnZ4vipGpno/J3BgcHi0yEvr4+y5YtY+nSpaxZs4ZJkyaRlpaGkZERxcXFPHnyhHfeeQc9\nPT3kcjn19fV0dnaSnJxMXFwc4eHhXLp0idWrV5Ofn09AQIAIj23dupU7d+5gamrKH//4R5ydnbG2\ntqasrAxbW1vCw8NpaWnB09MTTU1NfvzxR0JDQ4mKimLt2rWcPHmSkpISlEqlyHp4e3sLJd3/6vpd\ndB9GRka4desW6urqGBoacu3aNbS1tVm2bBkymQx3d3fhMwwPDxduv6+++gpra2vGxsa4efOmoM98\n8cUXAEydOlW0txYuXMjExAQBAQHY29vT1dWFq6srBw4cwMfHBx8fHzo6OsRMeWNjI2FhYezfvx9/\nf38SEhKIjY0V37MKbNrR0UFnZyd5eXkizff48WP++c9/YmZmhqWlJUuWLKGjowN9fX2ys7MxNDQU\n1qXXX3+dtLQ04LkENSAgQCjG9PX1uXHjBpGRkejo6DA6Ooqamhrz588XurzY2FicnJzEfERpaSnT\npk0TxiJVQm/SpEl4eXkxd+5cTExMyM3N5dGjR9TV1VFWVkZYWBjGxsZs2LABPz8/4uLisLCwYHh4\nmKlTp+Lq6sqcOXNQKBR0dHSwc+dOjIyMCA0NZXBwkPfff5+CggLy8vIEou3Jkyf8+c9/RiqVEhgY\niJGREdnZ2fj6+hITEyM+g3v37nHv3j0AIZ4xMjLinXfewdbWFplMRmFhIc3NzRgYGHD//n0aGhq4\ndesWycnJKBQKIV3t7e1l48aNPHv2TNwb8DzVmJGRQV1dHREREeKIYWRkRHl5OX5+fmRnZ+Pi4oKf\nn59wLyoUCiZNmkRRURHl5eWYmppiZWUlPrP4+HiMjY2pr6/HwsKCyspKtLS0mDdvHiUlJVy6dAkb\nGxtqamrEuPjQ0JCgNZeVlWFgYIC9vT137twhKysLQGDxP/30U7Zv305CQgKrV69m8eLFWFtbC/+q\nivugon8tWrSImzdvUlFRwY0bNwgPD8fFxYXPP//8Nz+Pv4tFQXV+a29vx9bWlpCQEKKjo2lra2No\naIiCggLWr1+PmZkZ9vb2GBkZ0dbWhqamJqampnR2dhIbG0tTUxMlJSWsX78eeP6B6uvrc/XqVfr6\n+gQZemJiAgBra2sGBweRSCSCiejs7IyBgYGYnps1axb9/f0irAXPCck9PT2sXbtWDLksX76c6Oho\n5s+fD8DatWsF8MLe3p6SkhKOHDmCra0tLS0tLFiwAE9PT2JiYkT1e2BgAA2N55s3qVSKpqYmkZGR\nODg4cOTIEa5cucLQ0BDnzp3Dw8MDHx8fSktLRaGwtbUVd3d3ysvLMTMzw93dnZSUFDw8PEhPT+fW\nrVscOnSISZMmIZVKWbFiBXFxcZSXlxMaGopCoeDMmTMCU97Z2YmNjQ0//PADNjY25OXlMTExgVwu\nJzExkZ6eHoFP++mnn/j++++ZNGkS3d3duLu7o66ujpeXFwUFBUgkEjo7O3F2dhaejZGREWQyGdbW\n1vztb38DYMOGDYyMjFBbW8v69evJzMwU3Y/MzEzq6+t57733CAsLIyEhga1bt2Jvb4+pqamwXu3Z\ns4f+/n5hxAbYtm0bAwMDdHd389Zbb3Hw4EFMTEzIy8ujra2NX3/9VRimiouLWbVqFfv27RMW8mnT\npjE+Po6lpSX9/f1MnjwZeL6bSE5OFuBVV1dXuru7hbE6MDCQCxcuEBkZSVBQEB0dHTg6OpKVlSUC\nW2VlZVRVVVFUVCTmbvT19cXQ19dff01TUxNnzpwRITSJREJbWxtOTk60tLSQlpaGXC5ny5YtNDU1\nUVBQIABElpaW4uf7W67fxaIwMTHBjRs3MDU1RSqVkpmZSU1NjUCoFxYWkpiYyLVr10Tr8OHDh5w7\nd05M3zU2NiKTyRgZGRFvnbGxMWQyGSEhIVRXVxMSEsLUqVOxsrJCIpFgYWHByy+/TEdHBzdv3uTN\nN98kPz8fOzs7JBIJdXV1TJo0iYyMDLS0tLh+/TrwPOxy5coVwdvT1dXl4MGD4i06d+5cSkpKaGtr\nw97ennv37mFsbIyHh4fg9bW1tYnhFdUitWLFCm7cuIGuri61tbXExcWhq6tLYWGhELju2bNHCF2M\njIxEMdPf3x9XV1dRfPvoo484fPgw69evp7CwUFSxVVVuY2NjEhIShPzm7NmztLa2iu6NaqiqqKiI\nVatWER0dLezeAQEBXLx4kejoaI4fP467uztZWVkUFRWhpqaGhYUFJSUloh3s5uaGh4cHubm5dHR0\nMDIyQllZGbm5uWhpaYnAF0BGRgadnZ1kZmby1ltvMW/ePKGsV6Ucjxw5wooVKzh37hzj4+PcuXMH\nFxcXXF1dhd5d5QNR0aRfeeUVgcFPTk5mYGCAb7/9lurqavT09JiYmEAmk+Hi4iIELZ6envj6+iKV\nSsnKyuKrr77ixIkT/xbgunPnDp6enuTl5Yn5CpU3xMHBgZCQELZu3Yq/v79QAQQHBxMcHMyyZcsI\nCgrC19eXe/fusX79enGsUi32qsL7pUuXePnll/H19cXS0pKYmBjmzJlDUVERL774IgkJCaSlpdHW\n1saWLVtwdXVVFRc5f/48hYWFv/l5/F10H7777rtPtm7dipOTE7W1tURGRmJkZERaWhqamppIJBJM\nTU2ZN28ehw8fxtTUlOHhYYyNjZHJZIJbqK6ujlQqFV2JRYsWYWxsLNh6Pj4+5OTkCICKittYXV1N\nYWEhd+7cwczMjJKSEioqKvDy8qKpqQkfHx+Gh4dRV1fn+vXrvP766wwNDQn67pMnT3B0dKSpqUm8\n0VQLWnV1NW1tbeTm5iKVSkV8WzX+qyI3X7p0iXXr1rF06VKkUinj4+Pcv3+f0NBQsrOzxWLS1NRE\ne3u7sCItW7aM9vZ2mpqaMDc3Z/78+WRnZ6Ourk5iYiL79u1jZGSEadOmcfv2bXR1dbl48SJDQ0NI\nJBK8vLxoaGhg9erVqKmpiRl9lYFobGwMPz8/fv75Z+Li4piYmCA9PV2Ml6tualVgrLOzUyjU3d3d\nxZtVQ0ODAwcOiBHilStXcv36dYKDgzExMaGqqors7GyWLFmCv78/Pj4+bNy4kR07djBjxgzu37/P\n2rVrsbS0pKuri5deeglXV1eCg4MF7Rme28ZaW1vZtGkTycnJjI+Pk5ubKwarxsbGMDc3p7i4WDAO\nJk+eTE1NDbW1tSiVSvT09EhLSxM7SA8PDzFy7uDgwOTJk7Gzs+PYsWNER0cLwW9oaChyuRyJREJL\nSwtvvvmm6FLk5uZSUFAgBo18fX0ZGxsjMzMTTU1NMVdgbW3N3r17mTt3rgit5eXlsW7dOqqqqjhy\n5AjOzs7cvn0bExMTxsfHuXz5Mlu2bEFdXR25XE5hYSFqamosXLhQjLUnJCSwf//+/5yW5J49ez7R\n0dHB3t4emUzG06dPefToEQYGBoyOjjJjxgySkpIwMDDAx8eHsrIysrKyMDQ0FEMlZmZmJCUlsWTJ\nEiorK7lx4wbR0dGEhITQ1dWFjo4O3d3dGBsbi9FVS0tLzMzMKCsrQ0NDg23btmFubi5uttLSUjE8\npaWlhVQq5cSJE6xbt46bN28SFxdHZ2cnGhoavPrqq3R3d2NjY0NlZSWampqCQaDaDSxZsgQDAwOm\nTJnC+Pi4mMsICAjg3LlzvPjii1y+fJmnT5+KmPG7777LnDlzaGxspLCwkLGxMRwcHLCwsGDhwoXC\nSnTlyhUsLS05duyYmIrMzc3lvffeA6CxsRF1dXXS09Nxdnbm6NGjbNy4kX379vHXv/6V2tpauru7\n6e7u5vHjxwCC+jMwMEB4eDhXrlxhYmJCJPdUZ1rVYFJFRQUrVqzghRdeoKenh8HBQQ4cOMC0adNw\ndnbGy8tLIPxbWlqEQMfJyQkHBweuX78uevdGRkYcPXoUFxcX6uvrefPNN7l16xadnZ2sXr2amJgY\nPDw8uHDhAtXV1fj7+2NtbU1FRYWwZS1cuBA9PT2uXLkiMOo//fQTCxYsIDs7GxsbG5Ep6O7uJicn\nh+DgYPLz84mMjCQnJ4f169eL2pFSqRTp0YMHDwopcGdnJ9u3bycpKYnp06fT19fHwMAATk5ONDU1\nERQURH9/v7hPCwsLsbOz48iRI8TFxeHm5iZ2fSkpKWRkZHD+/HnGx8eRyWQEBATw66+/ip+7jo4O\nf/jDH5DJZBgYGODp6UlZWRmbNm1CV1eXX375hQ0bNqClpcWpU6cIDg5mYmKCY8eO/ee0JN3d3ZUh\nISEkJiZiYmKCubk5CoWC3NxcgoODKS4upra2Fk1NTRF7jYmJITs7W6joVSGU2NhYysrKWLp0KdnZ\n2dTU1GBhYYGuri46Ojo0NDQQGhrKpUuXCAsLo6GhgfDwcDE2nJ6ejr6+Pm1tbXh7ezMxMUFTUxOB\ngYH89NNPfPrpp4JI3N3dLYZhTE1Nsbe3p6WlhcDAQDHrrqamhomJCVevXqW8vJzY2FgePnzIggUL\nUFdXZ2RkhIaGBqGVu3fvHgkJCRw9elQgtDQ0NCgsLERXVxcLCwusra35+eef0dXVJSoqCk1NTe7c\nucPjx49ZvHgxBgYG4sHz9fXlypUrbNq0iQsXLohkZXNzM4GBgbi7u3Pp0iXs7OxITk4W6LoffvgB\nHx8fDA0NefDgAR9++CEKhYLvvvsOExMTAWU5cOAAvb29REREEBGJu4e+AAAgAElEQVQRQWVlJfr6\n+gQEBKBQKNDQ0KCvr4/GxkYRWkpNTRXDUhoaGiQmJpKRkcHmzZv585//jFwuJyYmRjAgnz59ip6e\nHr6+vjg5OdHR0UFBQQFRUVFcv35dOCGWLl0qkO1paWliWO2NN96gtraW1NRUnJ2dOX78OKtXr0Yi\nkTA6OkpfXx9FRUU4ODjw2WefMWvWLIKCghgZGSE0NJSuri5OnDjBtm3bBE7P1tYWf39/du3aRUxM\njEjednZ2smHDBnbu3MmcOXOEa0JFrba3t8ff3198PlFRUfT29jIwMEBnZyddXV1s376dR48eoa2t\nTVlZGVevXiUwMJCHDx+iq6vL8uXL+eWXX/D09CQ4OFgwMh0dHbG0tKSjo4Pp06cLGpPq5bRixYrf\n1JL8XdQURkZGiIuLw97eHn19fZqbmxkYGBDgSxV4ZP78+cTHx6Ourk5bW5uoI6gINgqFgqSkJKys\nniMju7q6cHZ2xs3NjfLyco4fP46xsTH37t2jtLSUoaEhAX1VWaeCg4PR1dUVv1pZWVFXV8ezZ89w\nd3cHYNmyZcJjkJaWhqGhIV5eXvj7+9PQ0MDAwACZmZmMjY3R1tbG06dPhSo+ISEBHx8foRcfHx8X\nLIFz584xMTHB+++/T35+PgqFgq6uLh4/fszIyIi4KRobG4UVydLSkvHxcRYsWEBiYiJVVVXI5XLa\n29vp7++nsbGRxYsXC/elasHT1dUlNzdXIN9GR0cxNTXFxcWFp0+fYmZm9m+1mqamJjIyMvD09BTe\ng97eXoG0k0qljIyMiDdrRUUFFy9e5OTJk+zYsYOIiAjkcjl///vfhVSnqakJf39/UV+B5wVWa2tr\nGhoaxE4wICBAiGBURciRkREOHjyIn58f1tbWODg4YGNjI7Ieubm5ODo68sILLwDw9ddfo6GhIZgQ\np0+fprm5maSkJDEc5+HhwdGjR0VtwcTEhAMHDvDs2TNcXFzYu3cvJSUlVFZWcujQIeC5al6VdnRy\ncqK3t5eqqira2to4efIknp6eeHl5ERoayttvv422tjapqamcPXuWgwcPolQqKS4u5tq1a8yaNUtk\nQHJzc6msrEShUKCrqyse+LfeeguFQoFUKqWlpQUtLS2hM3R1dcXMzIzIyEgOHz5MTEwMPT09mJub\nCyzdb7l+F3MK2trawtQ7MjJCXV2dkLAYGRkREhJCfX09enp6yGQyNDQ0GBoaIjExkVu3bpGamoqH\nhwd2dnZkZmYK8lJlZSWGhoY0NDQQFRWFnZ0durq6LFiwAFNTU4aGhv7NgVBUVISjoyNWVlb09PTQ\n3t6Oi4sLurq6GBsbk56eDiB65La2tpiZmWFqakpRURHW1ta88cYb/PDDD1hZWaGpqYmmpqawT4+O\njtLW1kZoaCgTExMYGhpSVVUlYKBOTk6kpqayceNG6uvrqa+v5+uvv2bRokXiiNPd3Y1UKmXq1Kmc\nPHmSAwcOEBISQlRUFAqFAh8fH8zNzSkoKEBfX5/y8nLKy8vx9fWlpqaGVatW4eHhQXJyMlpaWnz/\n/fd4e3tTWVnJlClTMDEx4eHDh2K38+uvv4oiXWZmJgqFguDgYMrLy9HS0hLzEL/88gt6enq0tbUx\ne/ZsPv74Y5qampgxY4ZINZaWlrJlyxa8vLz4+eefMTMzIygoiICAAObMmQM89zOMjY0REBBAWVmZ\nICr5+Pigo6ODm5sb7e3tqKmpoaenR2ZmJoGBgQwPD4uoenFxMaOjozQ0NJCamgrA+++/z7x58wgI\nCKC1tZUbN25QUFDA3LlzsbS0FGlRHx8fQkND8fb25u7du2hoaCCXywVRq7y8nM7OTpE6vHPnDlFR\nUQQHB3P48GHi4+Pp6uoiNDRU5E9UrUeA0NBQqqurWb9+vejAqCTKlZWVqHbuqh2XCkBkZmYmZlOO\nHz8u5jJeeOEFZs+eTVxcHG1tbdTV1WFoaIiLi4vIx2RkZPwfYTT+/3oNDw/j6upKR0cHUqmUhQsX\n0tHRQX5+Pu7u7nR1dQnO/cTEBCtXriQ2NpZnz54xefJkli9fjqGhIf39/Tg5OaGjowM8H4N+8uSJ\nWHQ6OjpIT0+npKQEHx8f4uLiKCgoQC6Xi8JTYGAgBgYGYmT57t27YnRURUjKzc0VrdKJiQkmTZpE\nVFQUGhoaVFRUIJfLefjwIbdu3WLz5s0MDAzwxz/+ESsrK+FcdHR0JCUlRbTn4LmsdN68eezZswel\nUklBQQGbN2+mo6ODgIAA5s6di52dHdXV1ezYsQOlUomuri7l5eVkZWURExMjyNcqH+Py5ctZt24d\nAwMDvP7660xMTHD27FmqqqqwsbHh3XffRalU0t7eTkdHB8ePH0dLS4uxsTFCQkJwc3OjqKhIFDKl\nUimTJ0/G3NxcuCVHRkaYPXu2kK14eHgQHR3N5s2bmZiYYMWKFUgkEg4cOICtrS2XL19m8uTJrFmz\nhs8++4ytW7cKQlJhYSH29vbo6uoCEBsbS11dHW1tbWhoaPDrr79SXFyMsbExU6ZMYcOGDYI8ferU\nKdHJUAFWkpOTgecwlGXLlhESEoK6urpI2jY3N6Onp4eFhQVBQUECZHL69GmysrI4dOgQCoWCx48f\nC3qRg4MDCxYsAGD27NlMTEwIXNrevXtFyrW9vV0UlN977z1mzZrF8uXL8fb25vbt23R2dpKfn8/Y\n2JhgY4SFPTci9Pf309TUxFtvvcWqVauYM2cOH330EdevX8fKyorZs2dz69Yt9PT0KCgo4MSJEzg6\nOqJUKunp6WHNmjUYGRnx3XffIZPJMDIy+s3P4+9iUVDp3/X09AQUQwUBcXZ2xt7eHgMDA/z9/blx\n4wZyuZz9+/djbW3NnDlzmDZtGiYmJty8eVPw7+D5ojB79mwqKytFpNfExITW1laqq6uprq4mPDxc\nhG5qa2tpaGhgZGSE3t5ewSVQYdFUq62vr6/oJkRGRuLl5UVUVJQ46qjexjt37iQ8PJyhoSFWr16N\ntrY2169fZ/LkyTx48IC1a9eSkpIiBmyuX79Ob28vJiYm6Ovr4+zsLKLBurq6xMbGUlFRgaOjIyMj\nI8jlcnx8fMQNunv3bsrLyykrKxMKvfr6etzc3Hj77bc5efIkfX19Yhjs0aNHIom6bds2bGxs6Ovr\nw9PTk+joaMbHx7G1teXdd9+lvLyc1157jaVLl9Ld3S18nrdu3UJbW5vY2FgGBgaE/9HLy4u0tDSW\nLFlCd3c3tra2YnFXwU2OHTvG06dPmTJlitjmqwbOCgoKKCgo4Pjx42LRamxsRCqVYmhoSF1dHebm\n5vj7+3Po0CFGR0fR1NREoVCgUCgYHBwUVCZ4DlhVDSHJZDKCgoKwsLDAxcUFMzMzLl68iL6+vvCJ\nqh68f/7zn2RmZqKmpibmE5ycnPDz8wMQvEsVBlBLSwt3d3fkcjn5+fnCEq7ahd6+fVv4M7q6uggL\nC2PLli1MmzaNR48eCf6Dl5cXSUlJfPnll5SUlHDlyhWSkpJwc3PjvffeY9q0aejp6aGtrS0Cc3l5\necTExIi8T29vL6+++ire3t7iiPpbrt9F92Hfvn2fbNq0iYKCAjHwoqenx6NHj7h27Rre3t4UFhYy\nffp0li5dSn19Pdra2ly6dEk4J52cnCgvLycqKoq8vDzu379PV1cXISEh+Pr60tvbi4GBgUBkmZiY\n0NfXh1wuJzs7W7Qqo6OjUVNTo6ysjKGhIQwMDBgYGBB9+QMHDvCXv/yF6dOn09TURHNzM5GRkRw7\ndoz169djZ2fH6OgoSqVSbLVv3rzJG2+8IQSto6OjYksdFBSEjo4OFy5c4LXXXhOodlV7zNramsmT\nJ4tio6urK6ampuIBU3kqVQr3KVOmYGZmhq2tLTExMYLGk5WVxauvvoq2tjbl5eUUFRWJG8ne3l78\nWl5eTnx8vJi4mz9/Pk+ePBEFw6ysLJ49eyZUfEFBQVhbW5OTkwM8B+YUFRVx9+5dduzYwfHjx4WR\nSoWJb2pqIjg4mNbWVnGEuHr1Kg8ePKCtrU0IcXV1ddm9ezdffPEFS5cu/bf0or+/P/39/Vy/fl20\nPs+fP8/g4CBOTk5oaWkxODgoAD5ubm78/PPPwsPh4eGBq6srd+/epbi4mEmTJmFqasrFixcJDg4m\nMDAQBwcHMjIyqKmpEUW/sbExjh49ysTEBCkpKWhra7Ny5UrOnz9PZGQkJSUlzJs3j5iYGAIDA1Eq\nldTU1AiTk62tLX19fTg6OmJhYYGZmRnV1dXk5+czMDBAUFAQhw8f5tNPPyUxMRErKytGRkbQ1dVl\nZGSEgIAAWlpaaGpqIiIiQuxY+/v7CQ0NJTk5GR0dHTIzM8XRIiwsjDVr1nDu3Lnf1H34XdQUxsfH\nuXfvnoitBgQEcODAAV555RUBWVEqlTg5OdHY2EhWVhYPHjxg7ty5uLu7Y25uzsGDBwkMDKSzs5MX\nXniBv//970RFRQmrtIrzX19fz6NHj4iJiREJOGdnZyoqKoiLi6OpqUkwEFVbbFtbWwYGBgR+W9Wa\nbG5uJigoiHPnzoldhQrVbWlpKRJ0c+fOxdTUVGQVzMzMUFdXx8bGBkdHR6EgMzU1pbW1VQxu3blz\nBw0NDTo7O6mqqsLLy4uAgABOnDghHr6IiAgh3NXT0xO5emNjY27cuIGvry8//vgjHh4efPHFF3z1\n1VcUFxejrq5OSEgIOTk51NXVIZPJSE5OZs2aNWJk3NbWluHhYaZPn87AwACVlZVi7t/FxUUsnNu2\nbSM6Oprp06ezf/9+MQ16+/ZtQkJCSE9PR0tLi76+PuFS+OSTT9i2bRt3795l165dwqE4f/58nJyc\naG9vZ2hoiI8//pgff/yR1NRUEZdWKBTY29sjkUioqKggIyNDFPskEgk5OTkkJibi6OgoHJWtra04\nODhw8OBB2tra2LBhA6mpqcycOZO6ujqampq4d+8eMTExpKSkMDQ0JDie586dIzMzE1dXV7Kzs1m5\ncqWYQlUtlA4ODmJuJC8vj5SUFORyOaOjo8L72NXVRV5eHh4eHmLU2djYmF9++QV3d3dWrFjBo0eP\ngOfTtleuXKG/v1/4LoKDgxkaGhLQoISEBObOnUtAQIBYoFUDUh988AH3799n5syZ7NmzRxzHfsv1\nuzg+9PT04Ovri6OjI+vWraO7uxtdXV20tbX55z//yeHDhykoKKCtrY39+/ezd+9efHx8iIiIwMfH\nh4cPH7Ju3To6OzvR0tIShKSYmBgBT9XU1GRwcBBjY2P8/f0ZHR2lvb2dqVOn8uzZM9555x1qa2u5\ne/cuJiYmwjOgGo2OiooSU2xaWlro6+vT3d2Nvb09jx8/pqenh5s3b+Lh4SGsxl5eXpw9e5aGhgau\nXbsmuhM1NTViyCQtLU0UoZ49e4aDg4M4X/v5+eHu7k5BQYGQq6oGY3Jzc5k8eTLOzs7s3r2bTz/9\nlObmZmbMmIGRkRF37tzBw8MDdXV1XF1dmT59Ohs3biQ9PV1MNhYUFKBQKASrcmBgQHAcq6urhZTV\n39+fkydPClisXC6ntLQUS0tLUlJS2Lhxo6j9vP7669jb2xMREUFxcTFKpZJZs2ahqamJnZ0dFhYW\naGtrk5CQIEjNurq6eHt7AwizU1dXF4sXL8bPz49Tp07h5eXFyZMn+dvf/oaLi4sA29TX1+Po6Iie\nnp6wbX/00Ud89dVXQr0G8Pnnn+Pk5MSqVavYtWsXq1at4n9S955RVZ7ruv9v0pn03nvvIk1ARVDA\niiV2YyzRNE0zMWsnZqWZarISY2wxJlFjjA2JyQIrTYpIsdBB6R0mvdf5/+Caz1l7jDP+27PP/pDz\njuEYOqVM8X2f57nv+7p+l7KyMhcvXmT+/PkMDg7y0Ucfce7cOaRSKVevXqW5uZn29nZOnjzJ6dOn\niY+Pp62tjT///FN0/N944w309PSYNWsW5ubmmJqaMnv2bLS1tUUMoru7O6amptTV1WFqasrDhw9p\nbGxkbGyMyspKAgICsLW15datW0J5+O6771JVVYWlpaVIqSooKCAlJYVdu3Zx//593n//fYaHh/H3\n9xeE89raWt577z2ysrLIzc3l/Pnz2Nvbi7LkSa6/xKKgr68vSMx6enrY29sze/ZswRxUONIUOHQ9\nPT0OHTpEd3c3x44do7W1lZs3b7J69WqMjIzEKp6bm0tLSwtTU1OUlJSgrKyMh4cHwcHBwpegqqpK\nbGysYAQoCDp6enoEBwejoqKCiooK5eXloj796aef6O7uxt/fn9LSUoyMjIiLiyMiIoLJyUnCwsKw\ntbXlhx9+YNGiRVy7dk0QiRQ5BaqqqrS3tzMyMsK+ffsAxNzc3t6eLVu2EBUVhZmZGb6+vixatIjo\n6GhBQlqzZg0LFizgjz/+QFVVlQULFlBfX4+ysjIhISHI5XJBStbS0mJoaAhdXV16enrIyMhg2bJl\nKCkp0dPTQ01NDdra2ixevBh1dXXKy8tRVVVlYmJCmJ8Uo0p3d3dqamqYnJwkMDAQKysrmpubRbLV\nrl27OHfuHHK5nIaGBqysrOjv7xcPek9PD3PmzCEhIYHq6mq8vb1ZsmSJYDRWVVVRUVHB4sWL0dTU\nFKDe1NRUqqqqWLRo0X9SpCoYjZ2dncLROjg4yOrVq0lJSRFKx08//RR/f39u377NmjVr+PDDDyko\nKCAwMJC0tDTU1NS4cOECsbGxggjl6OgoALtvvPEGgYGBItlbERun+P9oaWmhu7ub6upqrl69ioOD\nA7/88gt3797FycmJiooK9u3bR0pKCl5eXhgbGxMUFCS4o/39/cyePVtsCPX19cJAduHCBaqqqgSP\n4fr16+L0pCh1PTw82LFjB9HR0Wzbto2MjAyUlZWpqakRmLgnvf4y5YNiFq+QNitMKBUVFTg4OPDj\njz+SkpLCZ599hqmpKQ4ODly8eJGlS5eSkpIiUG2Kox+Anp4e2traTE1NCbOMQsTU3NzMxMSEeJCs\nrKwwNDSkqqqKgoICHB0dmTVrFpqamqKsUUSFGRsbs2DBAr799ltCQ0MFDVlZWZklS5YQGBiIpqYm\njo6OJCcnC7qygnpUX1/PlStXKCwsxNjYmNHRUeDxAzFz5kzq6uo4efIkNTU1Imzm9u3bLF68mMuX\nLxMXF0d7eztffvklLi4uIol6w4YNIvhVQVLW0tLi9OnTuLq6igi6TZs28fLLLzN79mxcXFwwMjLi\n5s2byGQyrly5glQqxdXVldbWVqytrdmzZw87d+7ExMQEAwMDlJSUyMvL4+WXX+bLL7/E2dmZM2fO\noK+vj6mpKfr6+jz//PPs3buXS5cuYWZmxssvv0x0dDQPHjwgPj6eiIgIdHR0uH79OlVVVTg7OwOP\n+ZcbN27k7t27dHV1iVpfUdLl5OSQlJREYGCgcDxqampSXl7O4OAgMTExpKamMjAwgKenpzAuPfPM\nM9y5c4fS0lIcHR25efMmYWFhnDhxAj8/P1xdXdm4caNIu7axsaGmpoby8nJCQkKYO3cuFy9exNbW\nFmVlZXbv3g0gTmdmZmbIZDJh/b969SqHDx/G1dUVNTU1IiMjyc3NFZqDL7/8UjTUIyMjsbW1JTEx\nUUwfwsLC8Pb2ZmBgAEdHR4aGhmhoaODpp5/m3r17+Pr6UlZWhkQioampic2bN1NWViaCmb29vdHR\n0cHU1BQVFRUxpn+S6y9xUlBTU0Mmk+Hn54eFhQWampoYGhqioqLC4cOHiY+PF3FyjY2NdHZ2cv/+\nfcLCwsjNzWXLli2Cn+/l5SVERmpqaujp6REREUFgYCAODg5MTk5SWVkpRphFRUXI5XKqqqpEwnFs\nbKyYEdfU1IiaUVE+KFZtDQ0N3NzcGBwcxNHRkZ07d4o4utLSUsrLywXpt6KigkePHnHr1i1sbGzo\n7+8nMjKS7OxsERvn7OxMfHy8+Dfm5+fj6OgocGDFxcXcvXuXqqoqzp8/LzIhPD09iY2NFZwJfX19\ncXyVyWRs3ryZ9vZ2enp6qKqqwtvbm927d4u0Zg0NDZYsWYKdnR0ymUwE0XZ2dpKamkpsbCwSiYS+\nvj5kMhmurq5ERUXx448/Ultby969e4WKUqEZ+PDDD8nNzUVHR4eJiQmOHDnCl19+ye+//05DQwOm\npqY8ePCANWvWsHLlShQN7/fff5+ffvqJxsZGFi1aRHl5uYh6U6DOXVxccHBwwMLCgrGxMRGZpqqq\nir29vSB4KSkpCWxdXl4ex44dw9/fH6lUipOTk6AfDwwMsGzZMuFPePbZZ4mKiqKvrw9XV1eOHDki\nHsaKigp0dXXFiM/Dw0OUgAYGBtjY2DA6Ooq5uTkWFhZ0dHRgbm6OqqoqSkpKmJmZMTw8TF9fH1Kp\nlGnTpnHgwAGuXr0KIO6xyMhIJiYmqK2tZdu2bTQ0NODi4sJ3331HbW0tfX19tLa2snXrVj7//HN8\nfX0xNTXl6tWrZGdnC1anYsypWMSe5PrLTB+2bt2Kg4MDtbW15Ofn8/LLL+Pq6kpgYCBaWlr09fWx\nYsUKLly4gL+/P93d3RgYGBAZGSl+b2trS2VlJQBJSUliPn727FmKiooYGhrCysqKsrIy6uvrmT17\ntkjocXJyora2FgcHB/T09FBRUaG5uZlHjx79J9HU8ePHiYyMRFlZWSQZPXz4EHV1dbq7u0UIisJI\n1NjYiJOTEx0dHXR3dyORSIiLiyMzM5MNGzbQ09PD8PAw165dY+bMmchkMkJDQ0WCtL6+PlpaWgJ5\nf/v2bZFH6evrKzBdbm5u5OTkiFDThoYGHBwcyMnJQVtbG2dnZ/bt24ehoSEPHz7E2dmZjIwMRkdH\nyczMpL6+nvr6eqysrLCzs+Phw4doaGjg6ekp6uH29nZKS0uF1+Hy5ctYW1sTGxtLVlYWHh4evPfe\ne6ipqWFiYiLQ6ePj44SGhvLOO+8QHh6Ompoa7e3trF69mitXrjA+Ps65c+coLi4Wjd3U1FQmJiaw\nsLCgpaWFhw8fCrHXvXv3BJOxsLAQc3Nz9PT0KC8v56uvviIgIABtbW2MjIzo7OwUMmGpVMqKFSvo\n7OxET0+Pjo4OMjMzsbe3p7S0FHd3d4aGhujs7KSyshIHBwekUilz585l2rRpqKmpicU2JiaGU6dO\nsX79ejo6OsTGUFxcLCLbLl++TEhICB0dHRQVFTExMUFvby+dnZ0MDAxgYmLC8PCwaCa7uLjw+eef\n09DQIDbH119/HXt7e8bHx6murkYulwufg0QiwcvLC0tLS+rr67G2tkZVVRU3Nzfy8/N57rnnSE9P\np6CgQNEc/38n90FRExYVFeHp6Ym9vT2vvPIKSUlJaGlpoa2tjZOTE9nZ2Xz33XfY29ujo6NDWFgY\nP//8M4aGhiQnJ3Pp0iX8/f159OgRALa2tvT29uLm5iaMOMPDw1hbWxMdHc2FCxdEEEt2drYIIFGc\nEOrq6jA2NhaGKEXdOzw8zK+//kpubq4ID42KimJ0dFTckMbGxlRXV4schtmzZ7Nu3ToMDQ1pbGxE\nTU2N3NxcAgMDRaJVTk4Oy5Yt44svviA3N5fdu3fj6emJhYUFnZ2d1NbWEhMTw927d/H09KS7u5uv\nvvqKOXPm4OLigouLC62trQwMDGBlZSVMSKOjo2RkZLBixQpUVVVpbGxET08Pb29vvLy8MDMzIzIy\nkg0bNoj6dt68eWKcOnPmTAGW/Y//+A+mpqYE5jw/P5+0tDRBRvr999+Jjo4Wgq8vvviC999/n99/\n/50XX3xRAHFHR0fp6ekhNzeXuro63nrrLQCRUTB79mwSExMxNDRk06ZNuLq6ijHfnj17iIuLY+PG\njXh5eXHnzh0xEg0NDcXFxYXa2lrhkIXHadZtbW1cuHCBzs5ONDQ0KCoqIiYmhtDQUOLi4nj77be5\nc+cOysrKzJs3jyVLlqClpcWjR4+4dOkSWVlZDA0NYWBgICzZly9f5vnnn8fKyorW1laUlZVF2To+\nPs4//vEP4uPj6e/v5+jRo6ioqIjEr9DQUDHC9vf3FwY9QPh/Pv/8c+zt7amrqyMiIgJfX18+/PBD\nYa/38PBgfHxc9OQUWSVeXl7s27cPCwsLGhoaBLzlSa6/xKIwOTmJv78/mzdvprKykvHxcaKiotDV\n1SUlJYXq6mpRhypWPsVoRiKRkJKSwsTEBP7+/ty9exc7OzsAsrOzGRsbY2JiAmdnZ9rb27GyskIm\nkwkpqqmpKS4uLoyOjuLm5kZwcDA1NTVkZmbi5uaGnZ0dAQEBQosOj+PoPD098fPzY8WKFcTExAiT\nzqxZs4RJZWpqColEwowZM+jv7yclJYVt27aJUFcfHx8SEhKEeMnZ2ZmrV68ya9YsTE1NBdFYgW4r\nLS3FycmJtrY2zM3NUVFRERqGP/74g/v37wsb7q+//oquri4hISEiNDcsLAw1NTVsbW3Zs2cP27Zt\nw8rKCmtra5ydndHR0cHV1VU4/RobG9HS0iI9PZ2PP/6YqqoqXn/9dWxtbfH396e3t5ePP/6YkJAQ\ntLS0WLt2LWfPniUxMZFvv/0WLy8vrly5wocffsjKlSsZHx8XDVyFTuPQoUPo6+uTlZUFIHBxQ0ND\nbNu2jczMTC5cuCAo2729vSQkJNDd3c2BAwewsbFh+fLlhISECOhuTk4Onp6eNDc3k5SUBDwOG/7g\ngw/Q0tKirKyM27dvY2FhwalTpygsLBSNwIqKCrG7f/bZZ7z00ktER0fj6OjIq6++ytjYGDdu3BBl\nibm5OceOHePs2bNoa2tjY2MjHKQeHh4i4CggIIC//e1vdHZ2UlNTQ1lZGQ8ePOC3337D09NThPEo\nkGmWlpZoa2sTFhZGUlISy5cv55tvvsHMzIw//viDK1eusH37diorK9mzZw/Ozs4oKSmJ+72mpobt\n27eTk5Mjph5Pev0lFoXx8XHU1dU5ffo0Dg4O2NrakpeXR1RUlJCCxsbGMjo6yqVLl4iIiBABKNOn\nTycnJ4fp06fj7OxMQEAA586dAxCqQD8/P2HFrqiowNfXl8RL0/0AACAASURBVJycHEpKSggNDRV4\nL1tbW5FRuGPHDpqammhra2NsbIyhoSEBQ7l//z42NjbY2NgIVZ9C5ZaRkUFBQQE7duwQyT09PT3c\nvXuX2bNnCwWgo6MjP//8M/X19XzzzTcAIg9hZGSEBQsWcPfuXW7evEl2djbp6elERkaioaHBrl27\niIiIoKqqip07d9Lb20t/fz9eXl7IZDLa2tpYt24dTU1N3Lt3D0dHR+GWVFJSIigoiBdeeIG4uDiS\nkpLE6EtdXR1TU1N++ukn9u3bh6OjIy+++CLa2tpcunSJadOm4eTkhI2NDcnJyXh6erJ+/Xq6u7t5\n9OgRn332GYmJiWhpabFlyxbS0tKQyWTU1tZiYGBAbW0tBQUFxMfHMzo6ilQqFaRlxcSorKxMEIbq\n6upElGB7ezutra0EBASIRd3Z2ZmamhrkcjmZmZkEBwfT1tZGWFgYHh4eeHh4iPl8R0cHhw8fxs7O\njs8++4yXX36ZpUuXsnbtWmxtbbG3t+fmzZts2LABeDwefumll/j000/Zt28fBgYGxMfH4+vry4wZ\nM/j++++Bx9MHqVTKO++8Q25uLoaGhkxNTXHz5k2srKyEsKq4uBhLS0tsbGxwcHCgoaFBkKQ7OztF\niK5i47l69SobNmygrKyM06dPo6SkxO7du/ntt99ob2/H29ubvr4+Dh06xL59+8jLy2N4eBgPDw96\ne3tRU1PDysqK7du3s2rVKqHAfJLrL9FTOHjw4AeBgYHiCNnQ0ICZmZng7kskEgwMDDh79izm5uai\nds/IyEAmkxEYGMiRI0cAqKurEwq+wMBAqqqqBAYsLS0NDw8Pjh8/Tnl5Ob6+vnR1dZGbm4u9vT13\n7tyhtbWViooKwSxISEigvb0dbW1tvv76a1paWti+fTvp6emoqKhw5swZpk+fTmdnJxUVFbi7uzM8\nPMyNGzdwcXHBzs4OTU1NpqamhKOyrq4ODw8PRkZGhMxYgUDX1tbmww8/5OrVq7i4uODk5ISmpibr\n16/H2NiYBw8e4OTkhJubG3K5nMLCQuRyOd3d3URGRtLe3s6KFSuE/VlNTQ14HMeWkJDA0qVLSU5O\nZnh4GCsrK4qKiti4cSMPHz4U0uAZM2aIHSotLY1bt27x9ttvo6amRnp6OlFRUVhaWpKbm4uxsTHq\n6up0dnYyPDzMpk2b+OGHHzA0NKSiooLBwUFmzpzJe++9J0xZurq6KCkpMTQ0REFBAb29vdTX1/Pg\nwQOWLFmCo6MjK1aswNDQkFOnTmFoaCgk2f9eNjk4OAh34p07d8Q0qr29XWDwIyIiOHTokMD9K/pG\n165dY2JigunTp+Pv7y927La2NjEqbmtrw9vbm4aGBsH2UODvh4eHKSwsxNnZma6uLsrLy6murmZ0\ndJTOzk7mzZtHWVkZn3zyCdbW1nh6evL999+LslQRH5+dnY2trS0pKSls3LhRmLi8vb1JT08XIbvu\n7u78/vvvxMTE8M033xAXF8fU1BTJycnCzn3q1ClRijQ0NHDv3j0sLS05evSowr7/P9NTkEgkP0kk\nknaJRFL8b699IJFImiQSyf1//Vr4b3/3tkQieSSRSCokEknskywKfX19FBcX88ILL2Bra0tUVBSb\nN2+mqKiIkpIS5s2bR0lJCS4uLgwNDWFoaMgrr7xCTEyMiPWOjIykr68PY2Nj0cEtLCzk6aefxtDQ\nkNOnTwtOoLu7u6D9mpqa4urqSn9/v0gtcnJyoqmpidHRUaysrBgYGCApKYl169YBj4/5ipg1CwsL\npFIpUqmUwMBAGhsbKS0tRSqVirDYzMxMzp07J5pUZmZmpKeni0RjReq0gYEB69ev59tvv6Wjo0Po\nFoKDg7l58yanTp3Cx8eHoaEhzp07h6enJ2ZmZsTExIhFRxFP//333/PGG29w9OhRRkdHsbe3F/2I\n/v5+cYqaPXs2JSUlgjHo6+vLL7/8Qm1tLfPnzyc2NpYFCxZw/fp1rK2tSU9PF2Ks9PR0MdWYnJyk\nv7+fc+fOsW/fPhwcHLCzs8Pe3l6oIhVuwqKiIhYsWEB5eTnr16/H3t6emTNnAojszvv37/PHH3+w\nZ88eIiMjaWtro7GxUVCzU1JSGB4eRktLi08//ZTR0VGee+45vvzySyYmJkQwsEIhqPBU2NjYkJmZ\nSXt7O15eXlRWVlJdXU19fT2VlZXMmzeP3t5eSktLsbCw4MqVK5SXlzNr1iwmJydZsmSJYFwCYqqg\nrKwsxFZz5swRWodDhw4xODhIcXExe/bsEaPPsLAw5HI5cXFxDAwMEBERIUAw8HhasnLlSiQSCTNn\nzuSzzz6js7MTmUxGfn6+6GcpQLK9vb0EBgYKDklQUBC+vr58//33/O1vf+Py5ctP8igCT1Y+nADm\n/29e/0Yul0/7168kAIlE4gmsBbz+9TmHJRKJ8n/1DYyMjMTqfObMGWpra8nOzqaiokJQckxMTFi4\ncCHTpk3DxMSE+vp6DAwMxGxXR0cHJSUl3NzcGBoaAh6Lov75z39y69YtfHx8CAoKoqmpieHhYczM\nzPDx8aGtrU3oxt3c3JicnCQrK4ucnBwuXbqEjo4OGhoa2NraCqimQleu6F9s2bIFY2NjjIyM0NXV\nZcOGDcyZM4fBwUHS0tLo7e0lKCiIiooKYbt++umnKSoqIi4ujq6uLgBBEgoMDGT79u3I5XKBjDM0\nNCQsLAxlZWWGhoaIjY3l8uXLVFRUcP36dXR0dOjt7RX05p07dzI0NCRcmsePH6e3t5fGxkaeffZZ\nzp49y/379xkbG2PBggXo6uqKkNTo6GhMTU2pqqqisbERCwsL9PX1uXnzJh988AEpKSnI5XI2bdpE\nfn4+169fR0tLCzs7O7Zu3UpjYyOampqkp6dTU1MjRq537tzhk08+wcvLCyMjI/Ly8igvLyc6OlqI\nxyQSCSoqKpibmxMaGkpqaiq6urrMmDEDFRUVfv31V5ycnPjoo4/Q0tJCSUmJtrY2jI2NsbOzIzw8\nnNLSUkpKSmhvb0dVVRWAjIwMJiYmKCgooK+vj5ycHP78809+/vln3n77bYyNjcWpQEF/sra2xsTE\nhICAAE6fPo2GhgZVVVXk5eWJRcHAwICAgABqa2tRVlYWYiPF/ZuZmSkQ8CdPnqSxsZF79+6RnJyM\nVColPz+fO3fuoKenJ5SaAMuWLaOtrQ0DAwMh7Q4ICODjjz+mrq4OV1dX/Pz8hJHP39+f/fv3k5+f\nL2BBzs7OLFiwgJ9//pnVq1c/waP++PovFwW5XH4L6HrCr7cUOCuXy0flcnkN8AgI/q8+aXR0VOTj\nbd26VeQOBAYGUlRURF9fHzY2NqioqCCTyXjnnXeoq6tjeHiYQ4cOiQaimpoabW1tLF++HIC5c+cK\nM4m7uzsPHjwgIyOD9vZ2CgoKUFJSYnJykqVLlwpj0fj4OGFhYcTFxeHs7IyysrKIGW9ubgYeN4HO\nnTsn5uV79+6loqKCnp4eIiMj+fPPP+nq6uLBgweYmJhgampKe3s7zz33nEgyLioqYunSpXz00Ufi\nRlAIayYmJigtLcXHxwdtbW0BM6msrKS5uZmSkhJaW1vR1dVlzZo1wsvwyiuvcPLkSfGz6OnpEfW1\nTCbDx8cHNTU1SkpKOHHiBE1NTaiqqvLHH3/Q0NBAUFAQX375JYWFhXzzzTfcuHGDvr4+0XRcuXKl\nSJ4aHx+noKBAkJ5ra2txdXXl+PHjHDx4EKlUyrPPPouysjIymYzs7GyUlZWRSqUoKyuza9cuwTR8\n6623RONu7ty5YnSrmN7cvn0bLy8v5s2bx8yZM3FxceHFF18kMTGR0tJSjh07hq+vL729vTg5OaGn\np0doaCg7duwgMzNTcR9z8eJFsUjY2Njg4eHBq6++Kkq4hoYG6urqUFNT46OPPhJjQU9PT1566SUG\nBgaE2rOnpwd43LcqLi5GXV1d9DWUlJSYN28eDx8+pKuri2eeeYbLly/j5OTE/PnzMTY2Zs+ePaio\nqLBt2zYRJqujoyP8NQotS11dnQD09Pf3c+3aNYqLi8WoOjY2FkdHRxITEzl69Civvvoq4eHhlJSU\n0NzczNWrV7G2tv4/UjT+3zQaX5ZIJIX/Ki8M/vWaFdDwbx/T+K/X/n8vBTBlzpw5NDY2UlZWRkdH\nB1KpFG9vbyYmJkRy8sjIiPDtK3z0CkWcAsOlUB4qUFRDQ0OkpqaipaUlrLfh4eFoamqKKLr6+nqO\nHz+Oubm54C8oKSlRXV3NnDlzRBMSoKSkBKlUKnbTtWvXioCWGzduoK6uLqCtY2NjrFu3TpQ0TU1N\n5Obmsm/fPtra2vD396epqQl4bMk+ffo0ixYtQiaTiffV2tpKRkYG27dvp6WlhZGREXp7e+nt7eXk\nyZM4OztjZ2fHt99+i729vcC/KRR0PT09/PTTTwwPD+Pi4kJXVxeHDx9mx44dKCsr4+LiQn9/P5cu\nXcLS0hK5XM6bb77JwoULhV8kODiYkZERkpOT+fPPPxkfH6e5uRkdHR2RR2llZUV4eDj79+8XwSi7\nd+/mb3/7GwsWLGDDhg34+flRUlKCl5cXvb296OvrExERIRqNNTU1TE1NUVFRwYYNG/D390dXV5eS\nkhJqamqE8/Hw4cM8evSIwsJCjhw5wsjICGZmZkxMTLB06VKcnJyEFwX+F7NDT08PIyMjFi1aRE5O\nDi0tLQQHB1NQUEBeXh7q6upMTU3x008/0d7eTlRUFMuWLRMq1vnz5+Pn54enpyeA8CYsW7aM+vp6\n7t27x9atW9HT08PExISUlBSUlJSYMWMGK1asEEpFAwMDwsLCyMzMZM2aNdTW1lJXV0d2djbwOHV6\n5syZZGRkEBYWRn19PePj4zx48ECUuWfOnGH16tWcPXuWvr4+nJ2dMTQ05OLFi6xfv567d++ipKRE\nZGQkvb29T/xg/3cXhSOAIzANaAH+8X/6BSQSyXMSiSRfIpHkDwwM0NfXx59//klERAQODg4YGxvj\n7u6Oj4+P+Jz6+nouXLgguP/5+fnk5+ejoaHByMgIK1asYNGiRRQUFABQVFQEPK4nFZblVatW0d/f\nj56eHrdu3eL333/n8uXLIg1qYGCAtrY2fHx8kMlkmJmZkZycTGxsLH/++SfweFoil8vp6+ujoaGB\npqYmVq9eja+vLzExMcTExKCjo8N7770n4s5HRka4fPkyRUVFuLm5sWXLFmpqarC0tOSFF14AoLq6\nmq+++oqkpCRKSkro6enB1taWZ555RtCie3p6BIdP8VA6OTkJn0FNTQ0VFRW4uLgwZ84c4VNobGzk\n4cOHuLi4kJuby9TUFCkpKdTW1nL16lV27drF6Oio2PmefvppKisrSUhIIDY2lvHxcV599VV27dqF\niooKK1eu5PXXXyc5OZmkpCTCwsK4dOkSLi4umJiYYGNjg4+PD3fv3qW1tZV3330Xb29vDh06RFpa\nGvfu3SM0NFQoJH/66ScAkbDl6urKyMgIra2t3L17l5GRERwcHIQke8WKFRw4cAA3Nzc8PDwoKyvD\n09OTjRs3Mj4+ztTUlDBmATg6OrJs2TJu3brFxMQEH330kbhPKisrcXZ2Fg9bS0sLNjY2dHV18euv\nv9Ld3U1vby/Dw8MkJyeLEgTAyspKMBwVvp179+6RlpbGzZs32bhxI6tWrWJgYICsrCy++OILrl27\nRktLCy+88AJdXV1ClKYghMPj6UNeXh7PP/88dnZ26OvrU1tbS1paGhUVFRw8eBBtbW1u3brF2rVr\nRXKYguN5+/ZtVFVVeeqppzh27BiWlpZP/Gz+txYFuVzeJpfLJ+Vy+RTwA/+rRGgCbP7tQ63/9dr/\n7msck8vlgXK5PNDc3FykIBcVFWFkZERLSwsJCQlcu3aNqakpMYKcMWOGAFj89NNPPHjwQKQrK3Z4\nxRjKy8sLJSUlLl68yNDQEG1tbTQ1NeHq6kp2drZIhe7t7WVkZARLS0sePnwoXHKKMmTVqlXipAGP\nXW86Ojp4e3sLEVBrayvp6emkp6dz+fJlVFRUSE1NxcDAQASprlmzBnt7e5qamqipqWF8fJygoCCB\n9lJRURH18dq1a8nKymJ8fJzBwUGkUik3btwgPDycsLAwtm7dKiCyVVVVQva7b98+4RhUKCUrKirI\nz89neHiYhIQEgcy3trbmtddeY+7cuRw/fpzi4mJu3bqFt7c3mzdvpq6ujp07d9LS0kJvby9Hjx6l\nsbERPz8/LC0t2b9/P46Ojhw5coSEhAQ8PT05ceIEX3/9Nc3NzRgZGeHu7k5hYSHp6el89dVX1NXV\nsXTpUpFT0dfXR1FRkVgYFYutj48PVlZWZGVl4e3tLerpkJAQlJWV0dDQEGPe+Ph4ZsyYIaLSBgcH\n+eqrr9DT0+Ps2bMA3L59mytXrgjGZ1xcHJGRkXR2dhIXFyfq9/Pnz+Ph4UFtbS2GhoZ0dnZy6dIl\nPv30U+Lj45FIJFhaWlJXVwfAjh07SE9Px9DQEENDQzIzMxkYGCA2Npbg4GAkEokQNtXV1aGrq8vk\n5CTLli1j+fLlDAwM8N577wmznKKZvXTpUuzs7MjNzUUqldLV1cW8efOYMWMGzz//PL/99htHjhyh\ntrZWlL4HDx7kwYMHNDQ0cOrUKRobG0lNTaWtrU30w57k+m8tChKJxOLf/rgcUEwm/gDWSiQSdYlE\n4gC4ALn/1ddT8O99fHxITk7GyckJT09PpFIpmZmZQrBhZGTEyMiISCjasWMHy5cvRyqV4uzsjIqK\nChYWFgQFBQGPm0v5+fm4uroKg0hTUxO3b9+murqa2NhY7O3t6e7u5saNGwwMDPDo0SMaGhoEOKSi\nooLOzk4hpYXHR1wnJyfq6uqwsrIiIyNDpEffvn2bZ555RmgsEhIS0NPTY9GiRRw8eJDm5mYePHhA\nTU0NFhYWfPDBB2JRiI2NFfxJqVSKra0tKioqJCQk8N5776GhoUFubi4nTpwQWvk5c+awevVqvL29\nxdixrq6OvLw8Ojs7hVJu/vz5bN++nZiYGDZu3IitrS0SiYTi4mJ0dXVxcHBg48aNrFu3jvHxce7d\nu4e1tTXh4eHU1NSgp6dHcnIyPj4+Qtjz1ltvcfnyZa5duybow9bW1kKR2NzcjEQi4dy5c+Tn52Nt\nbc3x48cJDw9HV1cXDw8PgoKCCAsLE1bk06dP09vbK+LXNDU1qaurE9/7iy++YGxsjJqaGubNm8dv\nv/2Gm5sbvb29nD59mtzcXJHxoQD8AiKqLzU1laGhIebOnYtEImF8fJysrCyMjIzIzc3F09OTiYkJ\nHBwcKCgoYMuWLcLXMTQ0RGBgIBEREYJ29f7771NWVsbOnTvp6+vDw8ODdevWcefOHRYuXIi6urpY\nVKurq8nLy6OqqoqamhpWrFghSsPVq1dTXFwsUsi+/fZbysvLkclk/Pbbb4SGhlJQUEBbWxslJSVc\nvHiRlpYW1q5di4qKCtHR0cyYMQMdHR0WL14s3MXNzc2EhIRgaGj4xM/3k4wkfwNuA24SiaRRIpE8\nC+yTSCRFEomkEIgEXgeQy+UlwHmgFLgK7JDL5ZP/1fcYGxtDQ0OD8vJyXF1dBUhldHSU8fFxVFRU\nRLKRQrmVl5cnmPqTk5Mikr29vZ3k5GTgsWR2eHgYmUyGqqoq5ubmVFVVERUVxZo1a8QNrMC5KYAr\n06ZNQ09Pj8rKSt599126urpEyIfiCgoKwtXVFalUSnV1NV1dXRw6dEgcdVVUVIQRqqOjA21tbQID\nAxkbG2Pz5s0YGxtz+fJl3nnnHSGZbWlpQSqVoqenh7m5uTg1DA0N8fHHH4u60s7OjgMHDjA4OEh1\ndTUPHz6kra2NHTt2sGzZMqZNmyYeJhcXFwwMDBgYGCAjI4Oqqipyc3Px8fHB0tISXV1dmpqa8PX1\nJSsri8bGRpydncXCOjg4SFFREZmZmbi6ujI5OYmHhwempqbs27eP2bNn4+TkREBAAJ2dnRgaGmJk\nZMT9+/eRy+UUFBTwxhtvCGBJcHAwwcHBPP3008jlcpSUlLh69apQHsbGxjJ79mxaW1u5desWU1NT\nhIeHA49LC39/f6ZPn465uTldXV2Ym5vj6uqKubk5LS0t5OXl0dTUhJ+fH6+++ipr164FHi/kNjY2\nvPbaa9y/f1+kTSUmJor0sIULFwoup0LO3dTUhI6ODtbW1sydOxe5XM6RI0eIj48HEJ6Pt99+WwjP\nOjo6UFNT49dff+X3338X9u7ExESKiorw8/MjLS2NvXv34uLigqOjI+3t7QQFBeHu7g4gTg9TU1Po\n6upy4sQJEhISOHnyJO7u7iQlJdHd3c0PP/zA2NiYCC9uaWkRC66mpiYhISEUFhb+z/IU5HL5Orlc\nbiGXy1Xlcrm1XC7/US6Xb5TL5T5yudxXLpfHyeXyln/7+E/kcrmTXC53k8vlV57kTSji1wYHB9m2\nbRv37t0TnEVPT0+mpqZQVVVl/vz5WFhYMD4+zp07dzAxMaGwsJDTp09z+vRpRkZGuHbtmiDiyuVy\n5syZg6qqKhUVFdTW1gpPgiIrQWE0UUwaZs6cSXFxMc3NzcLSfOfOHfLz84UENSwsjE8++YR3332X\ntrY2Zs6cSWVlJU899RTLly/n+eef58svv8TBwYHly5ejrKxMcnIypaWleHh4iJ7GG2+8QWtrq2gC\nXbx4kXPnzjE2NkZtbS3PPfcc06dPJzw8nLi4OIKCgpg+fTovvPACn3/+OQkJCZibm2Nra0tAQACV\nlZX8+OOP3Lp1Czc3N6RSKUZGRjQ0NJCZmYm3tze2trbk5OQwOTnJDz/8wMWLF1FTU+Pjjz+mu7tb\nBLMODAwwbdo04SlwdHTkl19+oaioSDQsdXV1OXnyJDk5Obi6umJpaYmKigrj4+PY29tTVVUldrYr\nV66I8JOKigouXrzI559/TmNjIwEBAWIRqq+vZ2RkhNraWoaHh5HL5aSlpYn0KTc3N8rKyigtLaW3\nt5fg4GBsbW359ttvcXFxEWYrZWVloV0BeOWVV2hvb6eoqIjw8HBx1NfX1xe0IplMRkxMDC+//DLf\nffedMNrdv3+fadOmCQn+4OCgIEUpfg5OTk4Cs3b9+nWROeLv78/g4CBtbW0sWrSIjRs3IpVKsbS0\nFO9n+/btvPvuu6xcuVIE2yYmJuLn5ydGtSoqKvT19REWFsbU1BQvvPAC/v7+LF++HC0tLby8vHBw\ncEBfX5/CwkJqa2uxtrZm2rRpODs7k5OT8ySPIvAXkTmbmZlx9OhRdu3axeeff05UVBTt7e3Cux8f\nHy8SnxXmp/nz53P+/HmB+HJ0dOT27dsCgaa4CgsLCQ4Oprm5WYzgPD09MTQ0pLa2VhCUmpub6ezs\nFEEtiqSouro6AgIC2LZtm1BNKmpexY1kaGiIg4MDwcHB/POf/xQ8gKamJiwsLBgcHMTZ2ZlXXnmF\njo4OOjs7iY6OJjk5mZqaGhGCqlD/3b17l9HRUTo6OtDQ0MDPz0/IrhWBo0NDQ8jlcs6cOcPg4CAP\nHz5kzZo1xMTEoKmpiYqKCgsXLhTBJSEhIezatYuPP/6Y6dOns3nzZsLCwoiIiKC+vh4lJSW6urqY\nMWMGPj4+4kT26quvEhISwu3bt3n99deRyWTIZDJ+//13AFavXs3ChQuRSqXk5ORQVFREZGQkKSkp\nIpBl/vz5rFq1CisrK+rq6oiPjyc3N5eamhqBQlNkSU5MTGBsbIyfnx8dHR34+/szbdo0HBwcWLt2\nLQ8ePCAuLk40X/X09ETdPjg4KAJm7OzsMDMzQ11dHXj8kDk7OzM1NYVMJhPTgdLSUhYtWsSvv/5K\naGgoBw4cEKeNY8eOkZWVJSYFCoS6uro6UqkUAAsLC06cOIG5uTkeHh64uLgwd+5c3NzcUFFREY7L\ntWvXYmZmRlVVFWNjY2RnZ/Ppp58SERGBsbExeXl5fPLJJ2zcuBGArq4u6uvrCQ4OJiwsjMTERLF5\nXL9+nfr6erZs2UJDQwN37txhcHBQpIVVV1cTExNDYmIiJ0+exNLSUiSFPcn1l1gUFPFitbW1PP/8\n87S0tDA6OsrQ0BAODg74+PgIDFh+fj4bNmwQvoIZM2YwODjI9evX0dXVZXh4WDStNDQ0CAsLEw+h\nt7c3wcHBeHt74+3tTVRUFL/88gvz588XfvW5c+cKnmB1dTUpKSno6+tTXl4u3Ix6enpMTEywePFi\nDA0NqampwcXFhZGREQwMDFi9erXAtO/YsQMtLS20tLTEw1leXo6+vj7+/v7i+8FjXLi2tjY+Pj6C\nCLx//356enowNjamu7tbEJ/z8/MxNDQUKC5FVJ6JiQlz587l/v37XLhwgerqampqaqisrOSTTz5h\n8+bNIrxWV1cXHx8f7OzsRJjMnDlzhNekr6+PpKQkOjo6WLt2LT///LNokCq0HLm5uXz++ec8ePAA\nJSUlrKysMDAwEOM/V1dX8bN97bXX6OrqYu7cuVy/fh1TU1PU1dXp6elhzZo1wGOPwqNHj0hKSmLu\n3Lk8evSIR48e4ebmhrq6OjNnzmTz5s0kJiaSmJgoUr2VlJSoqqrCz88PXV1dlJWV8fHxEYrGlpYW\ngVczMzNDV1eXqKgoLCwsOHz4MFlZWWKKkpuby5kzZ1i5ciVNTU1ERUVx7do1XF1d6e7uZnx8nIaG\nx5N3Dw8Pvv76a4qKili4cCGNjY3CCLdz505RMhkZGTEwMMDly5cFQs7GxgYzMzPOnTtHW1sbGzdu\nFCjB6OhoUlNTGRkZoaCggBUrVojQ34mJCWQyGWvWrOGbb76hsbGR1157DUtLS5SUlJg7d65oNK9a\ntYpjx46JvtWTXH+JRUFJSYmTJ08KA82NGzdECIyHhwfGxsY4OzsLw0dubi42NjbMmzcPc3Nz3Nzc\nCAsLQ1NTk6NHj/Lmm28CEB4ezvDwsOAiFhcXC3Xd3r17RVybApDh7OzML7/8wsGDBwUnUk9Pj76+\nPuRyubCfpqWl0djYKIAtJSUlIqhWEeeWmZnJ/v37u3H4kAAAIABJREFUBT/A19eXTz75hMjISNzc\n3Lh+/ToXL14Uo0VAOOgUuvWOjg4xGvT19UUikYjmWXFxMVVVVRw7doxr164xffp0DA0NKSkpoba2\nVtT65eXlmJqakp6eLrQTiqxMJSUlLl++zI0bNwSeXVNTE01NTZHedOPGDSYnJ5HJZLi7u2NlZYVU\nKqW1tZWCggJeeOEFDA0NcXFxYWBgQETQd3R00NXVRWdnJzY2NhQXF3Pv3j3CwsKoq6vD29sbZ2dn\n8T5sbW2Bx72a9vZ2oqOj6e/vx9vbm5kzZ3L58mVWrlwp0rfS0tLo7u7mxIkTZGVlIZVK2b17t4Cd\nFhQUMDY2JnZXT09PMW60tLQUjIzAwED6+vqwt7cXpOSdO3diZWUlGJhvvvkm+/fvZ3x8nDfffBOJ\nRCKyRX788UeuXbvGkiVLWL58OZGRkSL/0dzcnKVLl7J48WICAwMxMzPju+++4+WXX8bZ2ZnDhw8z\nNDTE6tWrycnJQU1NTVDDJiYm+PDDDwVcKCcnRwiQVq5ciaamJteuXeOll17C29ubZcuWERkZKWIM\n165dy7Vr19DV1eWZZ54RcOAneh7/bx/o/4lLQTlW3Eze3t5UVVVx5coVTp8+LYAY+vr6zJkzh6ee\neorKykoxlkpMTKS9vV1EfCvcjB0dHYyNjbF8+XLhm6ioqKCpqYny8nK0tbUFeszQ0JDQ0FAcHR3R\n1dUV2O+hoSGSk5MxNDQkNvaxlaOzsxNfX19h1x4eHqampob333+f7OxsBgYGWLNmDbNnz2ZkZITz\n58/zzjvvMDAwwM8//0xHRwfj4+OiIz5jxgzgsZrP3t5evJ/e3l4RU3b+/HnOnj0r9Pu9vb2CmKSs\nrExFRYU49tbW1oqdVdE/SEpKora2FltbWx48eMCePXtobW2lu7ubGTNmMHfuXG7duoVMJqO6upr0\n9HRhHvL19cXCwgIlJSVWrVqFsbExampqVFVV0d7eTktLC46OjjQ3N6OiooKJiQnh4eHU1taiqalJ\nXl4eubm5FBUV8fHHH5Obm4u1tTVmZma0t7eTnp4uRocPHjwgKCiIyspKbty4QU5ODsbGxmhqauLs\n7Mzp06fZv38/fn5+rFq1iqGhIRYvXkxMTAx6enpYWlrS09NDd3c3586dE+5AKysr7ty5g6+vL48e\nPSIzM5MrV64QEBAgPj8yMlKMLPfs2cOMGTP4+uuvcXd3JyIigp6eHo4ePcrAwIDoL9XU1GBlZUVn\nZycHDx4kOzubf/zjH8IgduTIEXJzcxkYGBCjYYWTVCaTCd/P008/Lfop8BjzlpqaSkNDAyUlJdy+\nfZu///3vrF+/XiD3z58/j7OzsygXFyxYQFJSEtra2ty9e5fvv/+e9PR0li9fLu6xJ7n+EovCyMiI\nmAB0d3fT2trKU089JSzLXV1dfP/994yOjlJYWCjqxKKiIkpLSwUEMy8vj7i4OPEDaG1txc7Ojh9+\n+EEEySpAnO7u7gwMDHD79m3hd29oaBDhLtbW1ty5cweZTEZ0dDSAEMIEBQWJ0ZjiGKqiokJMTAwR\nERG0t7czOjqKTCZDRUWFOXPm4OzsLOjIUVFR2NnZiQAThWT21KlTIq4sMTGRe/fukZqaSn19PXV1\ndYSEhODi4oKrq6tQ7RUVFTFnzhxiY2PR09MjMDCQ6dOnk5qaiouLC++88w5PP/00e/fupaOjg7t3\n7xIQEMCGDRsEsDQxMZG///3vglU5NDTE2bNnKSkp4fDhw9y5c0dAaPbu3UtQUBBbtmyho6ODgoIC\nVq1aRXx8POvXr8fS0hJ9fX3U1dXp7+8X4TYpKSmUlZUxb948DA0NaWhoICQkREQEKnZ0ExMT8vLy\nMDIyYtOmTaxZs4b8/HyamprQ1dXFxcWFtLQ0MW4bGRmhu7ubrq4uGhsbaWpq4tGjR6IDr8iozMjI\nwN3dne7ubnbs2EFZWRlbtmwhODiYBQsWMGfOHFpaWigqKmJsbAwbGxuam5tZtmwZ3d3dVFZWiofP\n2NiYsrIy4LHysLGxEUdHR4qKigQ9W0NDg6SkJLq6upiYmEBNTY28vDx8fX1xd3fn8OHDLF++HBsb\nGzZu3Eh9fT1lZWUkJCQAj3tnmpqabNq0CVNTUzIyMoRuwtjYmKSkJHp7e5mcnKS1tVVAZx0cHJiY\nmBCQYWNjY4qKigSv4kmuv4R1+h//+McHCoyUwrEmlUpFc9Dc3JzW1lZcXFwIDAxEJpMhkUhErbZ0\n6VLi4+NF5LyCuxAYGMjZs2dF3FpeXh4WFhYUFhaybt06CgoKqKysxMzMDC0tLTo7O7lx44YYkUok\nEnp6esjMzERPT4+enh6uXLnCc889R2hoKBUVFaxYsQILCwtSU1NRV1fnl19+wd7enl9++QVLS0sC\nAwNZunQp+vr6IuxEXV1dRI+7uLiIVT8uLk6MUBXpxIODg6ipqREdHY2trS1dXV3MmjWL7u5uRkZG\nxJHxzJkzTExM8NVXXwGPS6eHDx+Kh8be3p7Q0FCmpqZobGxk3bp1NDY2Eh0dzaxZszAxMWH9+vUk\nJyczNDQkwDALFy5EU1PzP7EGm5ubRTy7sbExjx494qWXXhK72/79+8X48uDBgzg5OTFt2jSh31e8\nZwW+fnJyEnt7e9LS0ti9e7dYWExNTenp6aG2thYjIyM8PT0JCQkhOzubxMREnnrqKWJiYsRm0dra\nipqaGrt370ZLS4tly5bR3NwsdmZbW1uGhoYwNTVleHgYNzc3+vr6yMrKQiaTYWRkJHIwvvvuOwGq\n9fPzo7GxkZkzZzIyMiJs6Q0NDSxevJj+/n4mJyfR1tamtLSUM2fOEB8fj5eXF93d3fT19VFdXc38\n+fO5ffs2jx49YtGiRSLwNj09HSMjIzw8PHB1deXq1avExcWho6NDS0sLMTExhIeHC1v00NAQjo6O\nJCUliUQsZWVl1NXV2blzJ8XFxQwMDNDb2ysmLdra2vzxxx//7+DY1NXVeeutt9DQ0EAul6OjoyPm\nuYpdXy6X09LSwqlTp3B2diYvL4/z58/T1tZGcfFj7ZRUKmVwcBB7e3vgsUvSy8uL2bNnI5FIRBqU\nsbExFRUVNDc3Y21tzdKlS/H29sbHxwctLS0hlnJwcCAsLAx9fX3c3NwEGRgeC6709fVJSEjgjTfe\nwMPDQ4w0i4qKBJ5cYWBSQEKMjY2FScXb25uysjLMzc2Bx80wDQ0NFixYgLu7O2ZmZlhaWgpwqmKs\nmJWVRVFRkYiRU1NTw9DQkKysLPbu3UtXVxfPPvus4AYaGRlRUVEhcjXnzZvHnTt3WLJkidBYrFu3\njuPHj+Pq6kp4eDg2NjacOXOGzs5Orl27hoaGBt3d3ZiZmeHt7Y2lpSXBwcHExsYSExPDSy+9hI6O\nDufPn2fr1q1kZGQwMDDAa6+9hqmpKVZWVmIBUOyONjY2AvGu8IZ0d3djYmIiAngMDAyIiopi586d\nBAUF0draSk1NDc8//zz19fVMTEwwOjrKokWLGBkZYXh4mPPnz5ORkcGvv/4qKMbBwcFMTk7y4osv\ncv/+faZPn85vv/3G1NQU6urqtLe3c+DAAQHKDQ8PFy7Lvr4+Zs6cSU9Pj8j4UEzD7O3tuXLlCgUF\nBQwODpKfn8/U1BQzZ86ktbWVWbNmsWrVKkJDQ9HQ0BDO3NOnT5Oeni42jsOHD5OQkCDeb0FBgRgR\n9/f3C9Zldna2MADOmDEDc3Nz0UwNCgoiNzcXZWVlwsPDWb58OT/++CNNTU3CdPck11/ipHDo0KEP\nzp07h5OTE/X19fj5+dHZ2YmBgYFAj507dw5TU1MiIiL4448/aG1txdzcnJdeeon79+8zNTVFWFiY\nCOvMz88XhOIrV66Ijq+Ojo5gNQYFBeHl5YWKigr3798X3no1NTWCg4NpaWlBVVUVdXV10e2Pj4/n\ntddeo7CwkKGhIerr69m4cSMHDhwQTcU///wTHR0dKisrCQkJobKykpycHEHd6ezspKmpiblz5wKP\noTJXrlzhwIEDAuQaHBxMa2srXV1dItl6amqKrVu30tnZyYIFC0hNTaWuro6rV69SW1vLqlWruHHj\nhhAThYSE0NPTQ3Z2Nq6uriILIjw8nAMHDuDg4EBPTw/t7e1IpVKh5FOMg5WVlXn48KEgYaurq4tE\na4WrLyMjA11dXWQyGQMDA2zcuJGnnnqK/v5+5HI5/v7+dHR00Nvbi0QiERMBdXV1Tp48iYeHh9D7\np6Wl4e7uzsjICEePHkUmk+Hm5oaenh6ampqMjY2ho6MjxqJ+fn5YW1tz8uRJDAwMUFNTQ1dXl/7+\nfsFXGB0d5cKFCwQEBBAXFycmIWVlZQwPD4uUKcX9c+PGDWbNmsXs2bOpq6sjNjaWjo4O9PT0aGlp\nEZ6ZoaEh4VbdtGkTAwMDjI6OMm/ePCYmJv6T10YRVbh8+XJaW1sZHx+nqamJ6OhokexkaGiIVCrF\n3Nycmzdv8ve//52Kigr8/PwE3NbExAQXFxdqamowNDREWVlZcD4V9OvU1FQ0NTW5ffs2KSkpRERE\nYG1tTWFhISkpKU90UvhLLAoffPDBB99++y1qamqCmqQAkoSFhZGcnExkZCTz58+noqICU1NTKioq\n2LVrF99//z0PHz4U4S6urq6i6aYg1qipqTE5OYmVlRX+/v7U19fj5eVFS0sLAwMDItE4KiqKxMRE\nLC0tGRoaQkdHh/b2dhoaGkSE2o0bN8R4ydraWjRAdXR0CAoKQldXVwSUent7Ex8fL7IhGxoaGB4e\nZtasWfT09PDaa6/h4+ODpqYmV65cITg4GDU1NYyNjfnoo48EDUkikeDr64uVlRVTU1Pk5OQIXqFi\nt1U0L52dnQX5R1VVldLSUqZNmyZISVNTU9ja2rJp0yYaGxspLy8nOTlZ3KQnTpygsrJSjD2VlZUx\nMDDg0qVL2NnZUV9fL/wMZmZm5OXl8f+x955RVZ5ru/YxKZPeO1KkSm/SLEgRULAQxV5j1CTGVNMT\nV8qKLmPa0ryxpJhtiy2WADbAAiioiHSQjiBFQJA6aRPm/mHm/a21x9j79fvxjZ33G+89xhziBATh\nee55X9d1nsepp6fH+vXr6erqIiYmRvRvlK90yqnEb7/9Rk9PDwqFgtTUVNzc3MjOzsbe3p7Ozk5u\n3brF5s2bKS8vR0VFBRUVFTo6OjAzM6OgoID6+nra2toYGxsjOjqao0ePinLg/v37Au02NjbG6Ogo\n586do6uri5ycHMLCwkhLS6O8vBx4qgkZHR3l7t27zJs3j+vXr2NlZUVTUxMqKiri56sUYJ09exZN\nTU0xjt2zZw+jo6OsXr2a4uJiDA0NMTAwwNzcHGdnZ4KDg7G2tkZHR0c4eT/++GOkUinz5s2jtLQU\nFxcXZs+eTUpKCjY2Nnz77bfExMRw8eJFrKysBJDW3d2d7u5uurq6UFdXFycyZWNZWX5lZ2fz+++/\nY2VlhYODA4cPHyYhIUHEDdy7d++/TvkgkUhITU3l2rVrVFdXMzAwgL6+PuPj4wIhpqenh5GRER4e\nHqSlpYmdfM2aNRgZGQkb7sDAgJh5d3d3C/y6l5cXCoWCwcFBpFKpOAUMDAzQ3t4uIsiNjY0xNDTE\nz88PTU1NJk+eTGxsLG5ubv9WyijrTXd3dy5cuEBERATHjh0jJyeHH3/8kaKiIiwsLJg2bRqdnZ3M\nmTOH1157TQBZlPwDa2trKioqgKcWcg0NDdGNVlKr79+/z+nTp4XjsqenR0StNTQ0UFFRwYsvvkhH\nRwdjY2OYm5vz+PFjxsfHBbr90qVLDA8P89xzz9Hc3Mz27dvJzc2lsrKS0NBQ3njjDRoaGtDW1hbA\nkqioKGxtbRkcHOTNN9+kt7eXmJgY8vLyiI2Npa6uTugkdu7ciYGBgdAx7Nu3T5CQHRwcyM3NZcqU\nKbS0tDB16lQhMHJzcxMcBEDwJHx9fYmJiaG3t5c//viDW7duYW1tjZaWFps2baKyshJdXV0CAgLo\n6emht7eXSZMmUVVVJUC8sbGxguiUmpoqsjmU0mFl0PDSpUvR0tLi2rVruLu709LSgkwmEx4NZTMy\nKSmJ4OBgCgoK+OCDD4CnjVEvLy/8/Pxwd3fnxIkTVFdX88knn5CcnCwUs01NTZiammJoaMgnn3yC\nlpYWdXV1nDt3Dl1dXSQSCW+99ZZQSuro6KCurs7p06dxc3Nj5cqVlJWVcfHiRUxMTEhOTqahoYEN\nGzagoqLCwMCAKMXc3Ny4dOmSIJldvHhRTDWeZf0lNoUJEyYQHh7O9OnTxThu+vTpdHV1ERUVhaur\nKzKZjMOHD3Px4kXxSt7Z2clXX32Fq6srXV1dtLS0cObMGQFuXbNmDWZmZmRlZZGbmyvAqLq6uri4\nuFBdXS38EePj48KIpJRCm5iY0N3djY6ODlKpVPxg33rrLcrLy/njjz8IDAwU/AOlJ2Dt2rXY2tqi\nrq7O+Pg4crmcI0eOkJSURHl5OcPDw8LtZ2VlhampKfDUwltUVMTIyAg///wzvr6+2Nrasm7dOvr7\n+0lOThbWX3NzcxGnFxwcTElJCYmJiVRWVop8y99//52VK1cil8vR0NAgIiICmUyGoaEhampq4sK3\ntrZm2bJl6OvrCwRda2sr7e3t9PT04OPjw/Xr10UpNmvWLBoaGrC1taW6ulqM5bq7uzl48CAPHz5E\nVVWVqqoq3N3dqaurEyPcBw8eiJ+RVCoVvAOl6/Du3buoqamJaPeTJ09ia2vLhAkTeO2117hx4wYH\nDhxAQ0ODuLg4Pv/8cwIDA4Vletq0adjY2LBx40ZKSkqEX2Xjxo3MmTOH7du3U15eLmTp5eXlXLly\nBV9fX7S0tDAxMSE6Oho/Pz/Mzc2FJkTpL9HW1hZjQngqXvL39+fmzZtCmWljY4O/vz++vr7o6uoK\neIqBgQHXrl1j7ty5+Pr6CkxbQ0MDMpkMU1NT4YNRNppPnTpFamoqr776KgqFAjU1NT744AOio6OJ\niopicHAQAwMDPDw8GBgYwM7Ojr179xIUFCQYmOrq6oJX8SxLovQJ/N9cEonk//438d/rv9f//9c9\nhUIR+J990F8iS9Lc3JwtW7bg6Ogo4r6Hh4cxMjLi3r17PHz4EDc3N0xMTDA0NOStt95i9+7dTJgw\ngevXr/PkyRMcHR1xcHDgwoULmJiYsGvXLubMmYOuri4qKips3bqVhoYG9uzZI0i7x48fZ9WqVZiY\nmLB3717S09MZHBwUYqSDBw+ydOlSzMzMqKysRFNTkw8//FCQmDZu3MiJEyewsrIiMjKS9PR0Jk2a\nRFlZGT09PTg4OHD16lXCwsJEbP1nn33Ge++9R3V1NbNnz+bixYsYGxvzySefcOzYMYyNjcUMW0VF\nBWNjY1JTU9m6dasY77W3t2NiYiK8/Y6OjuTn51NUVCRCaKurq8VRVF1dHRMTE06ePCmSqOVyOadO\nnSImJoaysjIh+z516hSxsbE4OTlRV1cHPDX9KMGzQ0NDrF+/nj179qCrqysCd8rLy5HJZML2bmtr\ny9mzZzEzM0MulzMyMoJUKuXBgwc4Ozszbdo00X0fHBwkMTERGxsb/v73vxMTE8OhQ4fo7e3FwsKC\nxYsXU15ejoGBAWVlZejo6BAQEMD9+/fJyspi4cKFBAUFCVell5cXnZ2dHD16lHfeeYeAgADOnTtH\nS0sLaWlpzJs3j66uLiFb/+OPP/D29mb58uX09/fT0tKCiooKNTU1eHh4oKGhQVpaGpMmTaKgoICI\niAg0NDRISEigqqqKsbExGhoa6O3txc/Pj6SkJGbOnCnQaba2tgQEBHDs2DEOHjzI7NmzOXHiBDt3\n7mTp0qVs27aNsLAwkSz9/vvv8+WXXwoB2+joKC4uLmRmZiKVSrGxsWFgYIDm5mYWLVokyhNXV1ds\nbW0pLi7G3t6ewsJCLC0thbbj888/f6b78S9RPvT09Agq7fbt20lNTUUqlYqL3tvbmzVr1pCTk8PA\nwABJSUlC5qtMLNbV1aWrqwt7e3tu3LgBPLXaamlpMXPmTEpKSsTRLSUlhenTp7Nv3z7mz59PXV0d\n27dvZ9q0afz8889cvnwZc3NzmpubaW9vRyKRMDQ0JGp/Jycn+vv7KSgo4NGjRzg6OtLZ2UlCQgJT\npkzBz8+PhIQExsfHWb16NXp6eri4uBAfH89zzz1HfHw8bm5u/Pjjj3h4eIiRpJLfEBgYyJUrV3B3\ndxde+7q6OuRyOV5eXiLKLikpiYyMDM6cOcNPP/3E3Llzqa+vp6enR4hqlPLir7/+mtDQUB48eICe\nnh51dXW89tprlJeXs3fvXgoKCrC3t+fzzz8X3v/+/n4GBga4ePEitra2xMbG0tLSwvnz5wUVWy6X\nMzo6ypo1azAwMBB1sq+vL15eXjQ3N4uL1NzcnMWLFxMXF8eBAwcwMjLiu+++w8DAgGPHjgFPBWeH\nDx/m3XffZcuWLWRnZzMwMMDt27e5efMm3d3dqKmpiQAhT09PBgcH+fDDD6mrq8Pc3JwDBw4gkUhY\nv369AO48evRI1P/KODktLS0CAwNZt24dfn5+wqFYUlKCuro6c+fOFRmeQ0NDdHd3Y2Vlxd27d7l2\n7RoAO3bsICcnB11dXQYHB7l+/Tr+/v6kpaWJzbakpITbt28zY8YMXn75ZRISEvjll19wdnYWJZaX\nlxfHjh0jLCwMgJqaGoaHh3n33XdF2WJra4u+vj49PT2Mj4/T09PDrVu3xPj+7t27wj2cmZkpflfK\n0euzrr/EpmBkZERAQABjY2N4eHiIKLXS0lKkUikDAwOcP3+e8fFxpFIpv/76K/Pnzyc9PZ1z586J\njAblKPHDDz8EYNmyZUilUqZNm0Z6ejpRUVGUlJQQFBQkkOEtLS1s3LiRSZMmiUago6MjN2/eRF9f\nn6lTp5KcnCy68PD05pVKpXR1deHu7k5hYSFvvvkmZ86c4fvvv0dfX5+dO3cSHh5ORUUFJSUleHl5\n8c9//hMfHx+am5spKioiODgYR0dHEQCiNHeVlpYSERHB9evXefDgAb///jsqKirExMSITnpbWxtr\n1qyhrKyMRYsW8dJLL4mxn4ODA319fWhoaGBgYMDIyAhr167lxx9/RE9PDysrK0JDQwkLC2PKlCmc\nPXuW6Oho8vPzSUtLIyAgAF1dXVpaWvD29kZDQ4PQ0FBKSkrYsWMHJ0+e5NVXX0UikXDr1i0kEgm3\nb9/Gzs6OgIAAIiMjGRoaora2FjU1NYKDg7l58ybm5uZ4enpy7tw54uLiUFVV5YsvvqCvr08wE959\n912WL19OdHQ02dnZ7N69m5SUFLy9vSkvL2fVqlUisi8tLU2MKp977jm6u7sxMzNj+fLl6Orq0t7e\nLsxmQ0NDIlVLyaSorq4mOTmZO3fuUFxcTEVFBaOjozg6OmJvb09qaip6enrCLg1PSd4SiUT0KpYs\nWUJAQAB79uzh66+/RlNTk5ycHGQymej9JCQk4OzszMKFC8X3JpPJhDdDaSSzsrIShihNTU1sbGx4\n5ZVXePnll9m5c6cQoU2bNg1tbW2MjY3x8fEBYNeuXRgZGQlaeUBAgIDkrFq1iqampme+H/8Sm4K2\ntjY//fQTly5dElZmqVSKVCrl1q1bWFhY4Ovry/bt28WY7vLly3h5eYnZt5K6++jRI6F3z8jIEMnS\nUqmUs2fPoqqqSnx8PGFhYaJZ5OLiQm9vL5s3byY9PV0kXq9Zs4ZPP/2UNWvWiDEkwLFjx5g6dSrL\nly9n6dKlGBsbs2XLFqysrNDX12d0dBRLS0tOnTqFpqYmL730EnK5HDMzMwwNDZFKpTx8+BCJRML7\n77/P4sWLgac2XCsrKzZt2iTwcYGBgSxatIgrV65ga2uLr68venp6WFpaUlxcjJeXFytWrGB4eJi8\nvDxMTU2xtbWls7OTR48eYWtri56eHhKJBA8PD3EcVR5N7ezsqK6u5uHDhzx48IDm5ma++uorUlJS\nhKW3qKiIO3fuUF9fz9dff822bdv48ssviYmJYWxsjOLiYuRyOe3t7bz22msCj6eqqoqzszPh4eFM\nmTKFzs5O7t27J16pFQoFd+7cQaFQkJiYCDwlJI2NjREVFSVOA0oF54wZM4SRSlVVlYKCAioqKmhp\nacHCwgIfHx/Gx8cFPbulpYWMjAzgKY8gOzub4eFhHj58yMDAAPPnzxeBQENDQzz//PPcvn2b6upq\n/v73v1NeXo6dnR3GxsYEBAQwODjItm3bcHBwEMyOnJwczp49y5IlS5g6dSq6urpoamqSkpKCh4cH\nJSUl6Orq0tbWxgcffEBXVxfTp09n5cqVTJ48mbCwMOzt7QWI2MrqKdTMyMiIsrIyQkNDOXHiBMHB\nwaSmpop8DWNjY5ydndHQ0MDQ0JAvvviC9vZ2LC0tqampwdramra2Nuzt7UXe6rOuv8Sm0NvbK452\n+vr6GBsbc+PGDUZHRwUmXF1dnby8PCoqKtDU1EQmkxESEsKFCxeorq6murqarKws2tvbhcJLJpMx\nOjpKcXExAQEBzJs3j+joaOCpcm7hwoXo6OiQk5PDDz/8QHV1Nd7e3nh6emJqasq9e/dwcXERkt7Y\n2FjxuTKZjH379pGUlIS7uzve3t5cv34dGxsbsrKyGBkZYXh4mAcPHjA8PExUVBS+vr5i3r1z504q\nKyt55ZVXqK6uBuDkyZP4+/uLetbKygoLCwvKy8tJSEjAwMBA5AM4OTmJPkdoaCgtLS0kJiYK4pK+\nvj5hYWEiEk45AfD39xeejLS0NPLy8li3bh2zZ88mOTkZbW1t1q1bh4+PDxUVFcyfP5/58+czffp0\n7t+/j7q6Ort27eKjjz4iJSUFqVSKn58fgYGBKBQK3njjDXJycjh16hRRUVEClNvX18e9e/cYHh7G\nxsYGBwcH5s2bh4GBATk5OaxduxaA999/n7a2Nmxtbbl3754gOEVGRqKjo8OJEyeYMmUKFy9exNzc\nnIcPHyKTyVBXV+f555/Hw8MDd3d3amtrGRzr09foAAAgAElEQVQcFCPJ4eFh7O3tCQkJoba2lu7u\nbgYGBmhqamL16tUYGxszNjbGlClT0NDQwMjICENDQ+rq6mhoaBDW6/Pnz5ObmytcnQqFgpkzZ6Kv\nr8/IyAh9fX1oamryyiuvIJPJGB4extHRUfTIOjs76ejoENkMZmZmRERE4OrqSmxsrMgAiYuLIzc3\nV1igDxw4wPr166mvrxd5HMHBwfzxxx8cOnSIjz76iJCQEOzt7WlqauKnn36iq6uLJ0+e4ObmJqY7\nz7L+EpuClpYWbW1tPHz4kODgYDZt2sTChQvR1dXl0KFDvPDCC7i5udHa2sqkSZNISEigurqajIwM\nAgMDRdhKaGgohYWFAgCSmppKQkICampqIglKaRjS19enpKSE5uZmysrKiI+P529/+xtTp05l7969\nTJkyhTt37mBsbMycOXNoa2sTY521a9eSn5+PgYEBUqkUNzc3WlpacHZ2Jjo6WsBc09LSAJBKpezZ\nsweFQsH06dNpamqiqKhIjF+VJ5ZXXnmF69evc+/ePT744AMePHgg5L9jY2Po6+tjb28vkqPCw8MZ\nGBgQINTx8XERVV9dXS0MZnfu3MHV1ZVr165RUFAg5vnDw8OiV9LX18fmzZvp7e1FX1+f9vZ2Xn31\nVb755hsaGxu5c+cOy5YtIzg4mPDwcAoLC5HJZNTV1YmIORMTE1avXk16ejq2tra8+OKLZGdnY25u\njo2NDVu2bCE1NRVzc3NqamoYGhoS6snZs5/mDSkDWZUZGnK5nIqKCtLS0tDV1SUjI+PfRsGbN28W\nsl9lg7ixsRETExMiIiK4cuUKABUVFXR0dHD16lXMzMyYMGEC6urq+Pv7C67lrVu3GB8fF14DJZVa\naWJTchtef/11UZbI5XKGhoY4ceIE0dHRtLa20tnZibm5OXFxcaxevZr9+/fj4uIiTmfKCLri4mJ2\n7dpFeno6ampqpKenk5WVBcDKlSv56KOPRAbnG2+8QUFBAbNmzcLd3V2E9L722mssX76cjRs3kpyc\nzJUrV1BTU0NVVRV/f3/6+/tpb28XmLdnWX8JRePWrVs/8/LywsLCAjMzM3Jzc4WoSJntl5eXx927\nd3n33XcZGRlhxYoVwrtw+vRpHBwcKC8vZ+PGjbS0tHDjxg0CAgKYNWsWenp6aGtrC7nzJ598wsjI\nCBKJhCVLltDc3CymAQYGBkRERHD+/Hn6+vpQVVVl3rx59Pf3Y2BgQEpKCgsXLsTLywsrKyuioqI4\nevSoyAWQy+Uigm7mzJnk5uZy/PhxVq5ciVQqpbKykgkTJmBubo69vT3a2tq0t7eTnp4uUGXwNE/g\n0qVLmJubExkZSWlpKRcvXqSmpob29nb09fXR09Nj165dgoR98OBB4uPjKS4uJjAwkMuXL6Ouri6U\nhdOnTxeuUyXtSpmvqGykKeGfd+7cITk5meeee46RkRHmzp2LRCLB3d1d5Gfcu3ePuLg4BgYGGBwc\nZGhoiJCQEDo7O4mIiODnn3+muLiYkpISRkdHkUgk9PX1UVhYiJWVFU+ePMHHx4fk5GRqa2spLi7m\nlVdeob6+nrVr12JgYEBbWxuGhoZs2LBB/Mzi4+MxNjbGw8ODwcFBAgMD+eqrr9i9ezetra3MmDGD\nhw8fUlpayvj4OGlpabzxxhtoaWlhaGjI6Ogovr6+nD9/nidPnqCnpyfKura2NoaHh/Hw8BAaF39/\nf3H6a2xsRCaTUVVVxd27d7G0tMTKygpra2sOHTrE5MmTefLkCf7+/kJ9KpPJhDmqpaWFR48ecf/+\nfeLj40VmyIYNG7C0tGTVqlX88MMPvP7665w7d47nnnuOsrIyJk6cyMSJE8nNzUUmk6GlpcX169c5\nePCgoIkrPTp6enrU1tYSERFBeno6165d4+WXX+a33377r6NoVJp0+vv7+eWXXzh+/LgAjsTHxzM4\nOMjY2Bienp7s3btX5Pu1trby4MEDJBIJEydOpLOzk4GBAYG0MjY2pr29nQsXLgg0PDzNgaitrRUN\nsqNHj1JVVYWqqiq//fYb2dnZ7N+/nydPnmBra0tNTQ3Ozs44ODgA8MEHH1BXV4eVlRWFhYUMDg5y\n4cIFJBIJlZWVaGhoUFhYyJQpU3Bzc2Pq1KkcO3ZMONqKioo4e/YsPT09pKenC9uwEmiioaFBVlYW\n7u7u3Lt3j5s3b4psiylTpuDp6Ym+vj6ZmZn4+fmRmZlJc3MzL7zwgtgQSkpKhO6+ubmZKVOmoKKi\ngre3N2VlZUgkEt555x06OjrYtm0bUqmUjIwMrl+/TldXF19++SXOzs5i0tDc3IxMJqOoqIjKykp+\n/fVXmpqaMDc3F0yAkJAQsrOzBb8hOzub2bNnk5iYiImJCS0tLaxYsYL+/n6MjY0JDw/n8uXLuLm5\niVizwMBAZs6cya+//opUKiU5ORkrKyvKy8vJzs4mOzubzz77jFu3brF3714cHR3R0NAgLCxM5IZ+\n9dVXDAwM0NjYKMpFNTU1+vr6kMvlGBgYcPz4cYqLi4UDtqGhQQT6VFVVIZPJaGlp4c6dO5w4cQId\nHR10dXXx8PDAyclJAE98fHx48OABNTU1+Pv7M3HiREJDQzl//jxvv/02JSUljI+Pc+LECfLy8pg8\neTIzZszA2tqa/Px8vv32Wz7++GO+/vpr3N3d2bt3LwC1tbVs2LCBtrY2Fi5cSGhoKFlZWfzyyy8U\nFRVx4sQJ2traCA4OxsbGhgULFjA2NkZvby9qamoEBgbS0NBAWFgYW7duFVTrZ1l/iU1BOWLy8vIi\nNjaWX3/9FU1NTf72t79haGjI9OnTaWhoQC6X4+joiIGBgfiPK1/Zm5ub6e3t5ccff+T3338HnsI6\nS0tL8fHx4auvvkJfX5+6ujrGx8cxMzMjLi5O8PNmzJiBmZmZwIdFRETg4uJCbW0tQUFBvPTSSyIy\n/tSpUyxdulTU68bGxsTHx1NXV8fQ0BCbN29m4sSJoqHY0NBAVFSU+P8WFxcLjHlkZKQwBBkaGpKY\nmCjCPKqrq0VXXYnh8vX1FSGkAQEBBAYGEhwcLCLd5s+fz+joKPfv32fr1q3MmTNHXEDR0dHU19dj\nY2ODlpYWXl5ezJ49G21tbXR0dMRxNycnhxs3bqCjo0NCQgL19fXcvHkTuVyOoaEhjx49wtPTE3d3\nd959913q6uqYMGECdnZ21NbWEhcXxx9//MHWrVuF07OrqwtTU1OuXr0qRpnZ2dli41SizZOSkjAw\nMCAkJAQtLS2BpVOmYGlqamJqasrLL79MT08P1tbWXL9+XUTSlZaW8vrrr2NiYoKTk5Nw0Gpqaorx\naHNzMyMjI/j4+IjGoKurKw4ODkRGRqKiosLZs2cJCgpi06ZNdHZ2IpFICAgIQF1dncLCQnEtmJqa\nMn36dHx9fenu7kZbW5vXX38dLS0t5s6dK3igSv5FUVERV65cQUNDg5qaGubOncuOHTsEjEZpktPT\n0yMlJYWCggJKS0vZs2cPYWFhbN++nZCQEPr6+tDW1qa+vp4DBw7wxRdfMHPmTBISEpgzZw5Lliyh\npKSEtrY2rly5IiIBnmX9JcqHXbt2faacd7e2tnLs2DFh6Y2NjeXhw4dcvnyZa9euiZFMV1eXGMEp\nG4qxsbF0dnYSHx/PL7/8wowZM0hJSWHZsmVMnDhRNHiUhKVvv/2W7u5uFi9eTEdHBxERESJDwsDA\ngMOHDwuH5IMHDzh79iyjo6MkJibS1NQkMi8bGxvJz89HoVDg5OREYmIiy5Yt4+rVq2RkZODo6Ehe\nXp74Gg4ODujp6VFdXY27uzvDw8Ncu3aNl156ibKyMq5evcq6detEJ/2f//wnBgYGzJ07l/7+fnbt\n2oWGhgaXL19GRUWF1tZWNDQ0mDJlCkNDQzQ2NqKvr4+fnx8XLlwgIyMDLy8vbt++jaOjIwcPHiQ8\nPJzJkyejo6NDSEgIixYtIi4uDkNDQ9zc3Jg/fz7W1tZcvXoVHR0dYmNj2blzJ11dXSJGXRm+qsy1\nqK6uZsmSJbS1tYl+iDIMtbq6Gnt7ezIzM0lISKC0tJSioiJkMhna2toiNcrHx4dp06aRlJSERCJh\n3rx5SKVSXFxcSElJEbLvffv2iXJt2rRpWFlZER4ejoGBAaamprS1tVFRUSG0A1ZWVkImPHfuXABh\naS4oKMDLy4vq6mpUVVWFEU9VVZXZs2cTFRUlsjtTU1OJiYnh5MmTFBYWsm7dOjQ1NdHT08PMzIwd\nO3bwxhtvYGhoiKGhIW1tbXh4eNDe3o6NjY3YnNzd3TEzM8PPz4+ioiKRyZmfn09+fj7379/H3d2d\nwMBA5HK5AP9YWFiQn5+Pjo4OT5484fPPP+f27dsUFRVx6NAhurq6GB0dRVtbm+HhYRQKBWFhYSQl\nJdHa2vpfp3wYGRnBzc2NmJgYRkZGMDc3JyQkhLlz59Ld3c3du3extbVl5syZBAYGIpVKmTRpEgMD\nA7i4uBAcHMyUKVPIyclBRUVF0HZ6enpYuHAh/+N//A9x/DUwMKCoqIjGxkY+/PBD7t69y65du7h9\n+zZlZWXI5XL6+/vR09MT/vpNmzYRHBzMl19+CTyl6Wpra3Ps2DH8/f0ZHh7G1dVVWHe7u7spLS2l\nrq4OCwsLMQ1wcXEhOzubP/74g/b2dp48ecLIyIgIQhkYGGBsbIyRkREWLlwomqbh4eGEhITg5eXF\n1atXCQ4OFg1HS0tLUa//9ttvgkytjHyfM2cOr776KkNDQ/j6+nLlyhXWr19PRUUFKSkp/Md//AdW\nVlbMmDGDnTt30tbWxq1btwgLC6Ouro65c+eiq6vLqVOn2LhxI2NjY/j5+YkNqL+/Hx8fH1JSUlBV\nVaWwsJAnT57w7bffYmNjg4uLC1paWixevBh/f38mT55MbW0tjY2NfPPNN3R0dDA4OEhjYyMAUVFR\nvPDCCwwODhIfH8/IyAhNTU1s3LiR8PBwXn31VRYtWiRqbE1NTaRSKcePH6ekpARbW1tKSkqYOHEi\nVlZWYtQ5b948fHx8sLCwoLKyErlczr59+7h+/TpNTU3s37+f+/fvc/nyZZqbmwU56b333mPFihU0\nNjZSWVkpTjBKDmhHR4d4VVeSw318fHB0dGTOnDloaGjQ2dnJyMiIoFCZmpoyPj6Or68vqampHDp0\niFWrVqGvr094eDjwFNyq9P+cOHECV1dX8vPzRQ6KsmH5008/UVZWRlBQkADmTpo0if7+fry8vISv\nQnntPsv6S5wUtm3b9pmFhQXp6emifoyKiuLGjRsYGhqiqqqKqqoqFhYWTJ06lcePHwuFYlNTEy4u\nLmIHVVdXRy6Xc/XqVT744AMuXLgAIFgH4+PjGBgY0NfXJ24q5RhUmcgslUppbGwUM/ijR4+iqqpK\nREQER44cISEhAR0dHdLS0hgfH8fHxwdtbW18fX0xMDBgYGCAlpYWMdr6+OOPGR0dRV1dXQBN+/r6\n2LRpk7BTX7lyRRB1RkZGiImJYf78+RgbGzNjxgy8vb0ZHR0V3nwlz2FgYAAVFRWKi4sZHBwUHW57\ne3uhYMzJycHU1JTk5GSMjY2pra1FRUVFjEytrKzIyMggICDg30AmyiO/qqoq+vr64tVHOX9XxvRt\n3boVMzMzurq6UFFRISgoiP3795OYmEhJSQkXLlwQDUglkFZdXZ2UlBQGBwfx9/dHTU2Nmzdvsnz5\ncmxsbDhx4oQAyZqYmGBiYkJgYCDffvsts2fPFpLq0dFRPv74Y8bGxsjOzhbEp+PHj2Nvby8yGGbO\nnIlcLkdTU5ORkRFcXV1pbW0VxrC1a9fi4uJCe3s7CxYsICkpiZ6eHlatWsXbb7+Nh4cH1tbWQsKu\nUCg4e/YsFhYWGBgYYGtri4qKClOnTsXa2ppff/2VJ0+eEBcXR0tLC21tbSxevBhnZ2d0dXXJzc3l\n8uXLzJkzB1dXV5EQvn//fiorK9mwYQNeXl4UFRXR0tKCk5MTSUlJVFdXExwcLJLYleYrZYK6TCZj\n9+7dyOVygoKC2LBhg8hKfdZG419iU9i3b99nsbGxODs7k56ejqenJ9evXycuLo6hoSG2b99OSUkJ\nc+fOpaCgAIVCQVdXF2NjYwQGBiKTyQTnsK6uju7ubvLz85HJZOjo6LBgwQJ8fX359ddf0dLSoqmp\nCX19fX777Tesra0xMzNDR0eH8fFxhoeHBSClqqoKPz8/ZsyYgYaGBk1NTWRkZLB7925UVVVFN72v\nrw9/f3/y8vL4+OOPcXV15erVq1y4cIG1a9cil8uRy+W4u7szODgokplcXV25ePGiaPLt37+f9PR0\n1q5dS3Z2NhYWFkgkEh4/fixON/7+/rS0tDBnzhw6OjqIjIwkJCQEhULB6OjovwmUsrKyaGpqIjMz\nk2XLluHk5ERycjJ2dnbY29uLBClvb2+RdDV58mRGR0fp6elh/vz56Ovrs2XLFt5//316e3tpaWlB\nVVWV7u5upk+fjo+Pj9hIlAEtKioqLFiwgE8//RQbGxtmzZolUG0KhYLi4mKmTJmCrq6u4GCYmJhw\n/vx5Jk6ciL29PeHh4VhYWJCUlMTSpUsZGBhgYGCAt99+GzU1NZqbm8nKymLevHmkpaUJ1Jy3tzd1\ndXWYmZkxffp0Ojo6SE9PZ+nSpaSmptLS0kJwcDBqamqCcbF06VLhr/Hx8eHnn39m8eLFaGhoiIZy\nXV0dM2bMQF9fn6CgIJqamrh8+TIfffSRgAH7+Phw9+5dfv31V1xdXSksLKS8vBy5XM7KlSspKSlh\n9+7d9Pf34+zsjK+vr2jQHj9+XCRM5efno6+vz9WrV/Hy8iIuLo7ExESCgoLE99vd3U1BQQEaGhoi\ne8TOzo7c3FwWLFggADIymYzq6mp27txJd3f3f53yoa2tjfv37+Pt7S3Gki+99BI1NTU8evRI5EoG\nBgbi5OREU1MT586dQ01Njf7+fqRSKbm5uQI0oqQuq6qqCsrNN998w7p16wQtp6qqiuXLl4vEnl9+\n+YWuri5sbGyENuCTTz7B09OTuro68vLyBKyzoaGBwsJCdHR0iIqKEgivRYsW8c477/D+++8zMDCA\nr68vly9fJjc3F1dXV2pqatDU1CQ+Ph6pVEpdXR0bN24kICAAeGohNzY2JiUlBRcXF2xsbASQ1MjI\n6N+OhWVlZSQmJuLn50d/fz9lZWUCgNrU1ISlpaWIZv/hhx/Q1NRk//79JCQkiBvfxcVFaAY6OjpE\nBLtcLufNN99kxowZonTZu3cvPT096OjoCNZFc3Mzn332GVVVVZiammJtbc369evJzc1lx44dDA0N\noampSXt7u1AqZmZmCihvVVUVhoaGtLS0CHlvW1sbycnJGBkZ0dvby8svvywyGWQyGWFhYVy6dAkb\nGxsmT57MzZs38fHxQV1dnQULFtDe3k5ERATu7u78/PPPInncwsKCH374gb6+PgYGBsjMzMTAwABn\nZ2fgqQU6KyuLo0ePIpVKKSwspKGhgcbGRgYHB3n11VextLTE19eXM2fOCJGRUpPh5uaGp6en2HA6\nOztZsGABwcHBuLi4iEb49OnTRUCusvQcGhpi4sSJDA8Pi4zKRYsW8cEHH2BoaCggv0o2xZEjR9DX\n12fBggWUlZWxefNm2tra6OnpEY3ZF198EU1NTZqamlAoFDz33HPPfD/+JTYFR0dHtLS0+Oc//0lA\nQAAXL14kLy+PK1euMGHCBIEQKy4uRldXF3Nzc8ErUF7MiYmJ2NraEhYWJhSCGzduJCIiAoVCQXR0\ntEBiWVhY8NJLL7Fs2TI6OjqwsbHhww8/pL29HUdHR1auXCnCUKqqqoRazsTEBAA7OztUVVUJDQ0V\nR3CJRCIaZ6tWrUIikaCtrY1MJiMuLo5r165hY2NDZmYm/f39PHr0iIKCAn766ScKCgoAOH/+PLq6\nuiQkJPDiiy/i7e2Nr68vYWFhzJ07l87OTioqKjhw4AAymYxDhw6RkpJCT08PBgYGHD16FCMjIxYt\nWkR5eTlNTU0CKpufn8/GjRuFpiAmJoaAgAAmTpxIX18f4+PjAkUeGhoqwCVHjhwRJiK5XI6Pjw8q\nKiqsXLmSqqoqPDw8xNF89uzZnDt3jgMHDlBTU8Nnn32GoaEhQUFBSKVSrK2t8fDwwNPTU4wRy8rK\n0NXVFY7MqVOn4uPjw9DQEPr6+vT396Otrc1bb71FU1MTp0+fxsTEhNLSUvT09Lhy5QrXrl3Dz8+P\n4uJiTExMKCsr480332RsbAzlSTgjI4Ovv/4ab29vcc0pey9ZWVkYGBgwPDxMZ2cnkyZNorW1lfHx\ncVRVVbG3t8fS0pIJEyZQVVXFpk2bhFfj+eefp7u7m7a2Nrq7uzly5AivvfYarq6uNDc3M2nSJBwd\nHamoqGDz5s2YmJigpaVFUVERX375JcHBwejo6IgQmdHRUQAeP37MtWvXqKysZMeOHdTX19Pb20ty\ncrJQhz58+JAFCxbQ3d3NyMgIlZWVVFVVMWfOHDEyVm46Sun/s6y/xKbQ2tqKnp4eEyZMABD8wOHh\nYYyNjTE3NycsLAwnJyeRzRcQEMDRo0eFhXj37t3cuHEDPz8/oSQsLy8XwMpvv/2Wzz77jKSkJKys\nrOjs7KS/vx8jIyNyc3NFatDY2Bjnz5/n5s2bjI6OsnDhQjw9PcnLyxOSWaUKT0lmjoyMFBOL3Nxc\n+vr6WLduHRKJBGdnZ7744gvxS1OiuYKCgpg/fz5qamriVWfGjBlYWFhgampKbW2t0Cg0Njby+++/\n4+fnR21tLevWrUNdXZ2cnBwxu585cybOzs50dHQIgZIy9g6gv7+frKwsgoODCQwMpKuriytXrlBT\nU4O7uzsrV65k586d1NbWcuPGDSQSCbW1tTQ3N9PZ2YmHhwdTp04Vm9+PP/7I2NgYcXFxIrfQ39+f\nwMBAFi9ejJubGydOnBA5m8rZv/JGV06bfH19qaqqEpp/ZfyZm5sbf//730lOTqarq0scpT/++GMM\nDAxQ9qAMDAxYsmQJPj4+aGho8OKLLwpc2+rVq3ny5AnwVEo/OjrK6OgoGhoa9PT0oKGhQWNjIxYW\nFoLNaWhoyM2bN9HQ0GDWrFn4+fkRFhZGU1MT9+/fF2VLbu7TMPWsrCz09fXR19enuLiY0NBQjIyM\nsLGxITIyko6ODvbv34+trS1+fn7cvHmTrq4uEQLc09MjTlWenp6iB6YE4WhrawsYcENDA/Hx8YLu\ntG/fPgYGBgRtKjw8nB07dlBZWYmtra3Y3KZNm0Z+fv4z349/iU1BKQqxtLTkm2++ITU1laGhISIi\nIjh16hTFxcVMnTqV//iP/0BPT4+QkBDa29t55513RKDJmjVrRCKSv78/8JTT4O/vT2dnJxs2bBBM\nPDU1NXR0dOjp6WF4eFiYmJQYKzs7O+bOnYuTkxMnT54kOzub1NRUcQJRotkdHR3R0dGhrKyM48eP\n09TUhLa2Nvr6+ujq6gpGgPLoqGzERUZGUlhYSH5+vvAOACJQNTs7G7lcjkQiIT4+XkSyKaXPH3/8\nMcuXL2fWrFmMj4+LzMTHjx9jamrKw4cPaWpqIj8/XyQZ1dbWEh4eTn19vUiB8vLyIigoiEmTJgmD\nkq2tLdra2uzevZuTJ08KeOmUKVPIz8+noKCAyMhIXn/9dXGsT0xMxNnZGX19fSQSiYjtU1VVpbi4\nmMePH3Pz5k2mTp3Kxo0buX37NpGRkfj4+AhCdEhICPAUzdfT08PPP//MV199hVQqxc7ODnd3d4Ft\nUyLo5HI54eHhxMbGCvLye++9R3t7O/7+/jQ1NYlkpPb2dszMzFi1ahUtLS1ERUVhYGCAjo4Ohw8f\nxtvbG1dXV1HaWFtbMzo6irm5OYWFhdy6dYu+vj7a29vx8/NDVVUVgJaWFqEaHRoa4vTp02RmZpKX\nl0d7e7uQZ7e3txMXFyeyH5VHenNzc9avX8/q1atpb28XDl8llk5ZPjY1NdHY2EhOTo4QYcnlcsbG\nxjAyMqKgoIDBwUHWrFnD2NgYEokEXV1dHBwcOHPmjLh2n2U9SxS9rUQiuS6RSMolEkmZRCJ548/n\njSUSSbpEIqn+80+jf/mcDyUSSY1EIqmUSCSz/rOvoUzUtbe359NPP6Wuro6qqiomT56Mvr4+oaGh\nJCcni7gvZaSWsbGxmD/39vbS0dHBxIkTRdJwXl4et27dorOzk++++06kP5eVlbFu3Tru3LlDZmYm\nNjY2FBYW0tHRwfPPP4+mpiaenp40NjYSExPD2rVrxSYC4OXlhY6ODnfv3iU/Px8fHx/MzMw4d+4c\nwcHBhIaGoqurK5qi7u7urF+/ntjYWJHeoxQzZWVlCZ5CWloaxsbG5Obm4unpycSJEzE3N2fr1q2E\nh4ezYsUKEbKydetWjIyMeO211wgKCqKlpYXnn39ebGpBQUFs2bIFFRUVpFIp/v7+ojzo6enBz8+P\nsrIyTE1NKS0t5cSJE4LS/M4779DV1YWOjg6ZmZn4+Phw4MABEURbU1MjcHVKGrJUKqWzs5PGxkZq\namo4c+YMY2NjXLt2jcTERL7//nvRF3rzzTepqKjg8uXL4rlLl/6fgPK+vj5Wr16NkZGRsNFfuHCB\nmzdvYmdnx/z580WORUpKChKJhPHxca5evYpUKiU+Pp5r165x/fp1oW41NTXFzs5OEJUPHz6MpaUl\nenp6bN68mczMTNTU1ITV3tfXF2dnZ9TV1TEwMBChMAMDA5w7d04c85WnvpaWFuzt7dHS0mLKlCnE\nx8dTVVVFa2srZmZmPHr0iOTkZFpaWjAxMcHCwgKZTCbs0pWVlWRlZXHw4EEA8XM3MTFh0qRJ2NjY\nIJFIhGPUw8ODH3/8EUtLS0pLS4mKihIn0cePHwufh5GREZ6enuIae5b1LOQlOfC2QqHIl0gkesA9\niUSSDjwPXFUoFF9KJJIPgA+A9yUSiQewDPAErIErEonEVaFQ/G/B88rm2L179/jHP/5Ba2src+fO\nRUNDAz09PeDpaULZVJkxYwaffvqpoANxoEEAACAASURBVAArj4s2NjZUVlayfv16Dhw4gImJiYCL\nmpubk5eXJwwoymZaWFgYrq6uBAcH09jYSEVFBTdu3CA0NFQ48JQmGUNDQ+BpInB6ejotLS3s3btX\neOxXrVoFQHNzMydPniQlJYW4uDjMzc158uQJdnZ2zJ49mx07duDo6EhXVxcuLi4i/NPExERgwW/c\nuIG7u7vgNqirq6Ourk5UVBRTp07l1KlTTJ48mVu3bmFsbEx+fj65ublER0dTVVVFSUkJWVlZ+Pr6\nCl5ff38/YWFh5ObmUlZWxpIlSygvL6e/v5/S0lLi4+NRV1cXisfS0lK2bdtGTU0NgYGBVFdXo6mp\nSW1trUhrtre3F+XDzp07BXp9wYIFdHZ2oqenJzYJFRUVTExMyM7OJj4+nm+++YbNmzdz9OhR4uPj\nOXLkCHFxcaSkpJCWloalpSU6Ojr88MMPBAQEsHjxYhH2GxMTQ0NDg0C89/T08PbbbzM0NEROTg7q\n6urMmjVLnBReeOEFjh49iqenp5Cuf/nll7z77rvI5XIGBwfR0dGho6MDHx8fjh07xqpVq5DL5eTk\n5GBubs7o6Cjz58+nsLAQW1tb4GlDUGm2srKywsXFhdOnTzN//nxcXV2xsbGhvLxcoP/7+/sFx/HE\niROEhYVx7do1Zs2aRVhYmNCsaGtri56ZiYkJra2t+Pn50dHRwdmzZ3n8+DGlpaUCiV9bW4uGhgYL\nFy6kurqapqYmnJyc6Ozs5OHDhyxcuJAjR44806bwn54UFApFq0KhyP/z7T7gPjABSAAO/flhhwBl\nezMBOKFQKIYVCkU9UAME/5++hlQqFTWX0thx5coVzp8/z+HDh3n++ecZGRnhu+++IzY2ltzcXFxc\nXBgYGGDmzJlMmzaNM2fOYGpqSlhYmJgS9Pb2oqmpKaYUMTExLFmyhJSUFJycnPjll1+Iiori8ePH\nHDlyRKT9hIWFoVAoWLp0Kf7+/kRERJCRkSGmEtXV1SJz4h//+AdnzpyhrKyMa9euifGhVColIiKC\nwsJC3N3dcXR0FNFpd+7cwdnZmcjISDFGgqcOQScnJ0ZGRigqKqK+vp4jR47Q29vLmTNnyMnJEbXl\n8uXLBaLe0tKSl19+mcTERHJzc7l9+zbz588nKCgIb29v3n77bcrLyzE3N+fTTz8lJSUFY2NjTpw4\nQXJyMgDe3t5io1q+fLmQfZubm9PQ0CBSlWbPnk14eDhLliwRWgYHBwe+//577OzsiIuLIy0tDVtb\nW+Li4tiyZQu5ubno6Ogwb948MQr8448/WLJkCerq6ixevFj0frKzswXkVHmMb29vp76+nh07dlBY\nWCji5uHpqW1oaIilS5fy4YcfcuvWLRYsWAA8nSjMmzcPgNzcXEZGRoT4aObMmWzbtg09PT3kcjme\nnp7k5OQglUrJz89n3bp1KBQKkpKSxO9MV1eXpqYmoRGAp3QvJycn4uLi+PTTT5k2bRpVVVV0dHRw\n6dIl9PX1aW1tpauri4sXL5Kfn8+DBw+4evUqkyZN4vvvv2fZsmXk5OTg7u7Ovn37gKceEG1tbaKj\nowVwRSKRiASxO3fuYGtrS29vLzdv3sTZ2ZmhoSHy8vKIjIwUfZv29nZWrlwpRvbPsv5f9RQkEslE\nwB+4A1goFIrWP9/1CLD48+0JwMN/+bSmP5/7X/+tFyUSSZ5EIslTKBRs27YNc3NzzM3NkUgk3Llz\nB4BXX32V7777jqGhIRFsumbNGiwsLLhy5QqdnZ3cvXuXiooKbt68SXt7OzKZDHia4rR9+3ba2tro\n6OjAycmJoKAgZs2ahb+/Pzdu3BDJx0uXLmXr1q2UlZVRW1uLkZGR6OAr+Q3KDMsbN24glUqRyWTc\nvXuXxMRE7O3tef7553F0dGTz5s0CcBIeHk5zczNbtmxh4sSJnD9/nkWLFtHd3U1jYyMLFizg8OHD\nwNMj7oEDBxgeHkZVVZU7d+5gaGjI5cuX0dPTo6enRzAC29ra0NHRITAwkLKyMtGH0dDQoKWlhXPn\nzqGnp8fVq1dxcXERSDp4mvKthHAoI8dmzJjB5cuXiY6Opry8nIcPHxITE0NNTQ0zZswgOjoaa2tr\nMjMzuXTpEmfOnEGhUGBsbEx5eTlvvfUWo6OjXLp0ib1796Kurs6ZM2d4/PgxXV1d2Nracv78eQoL\nCzE0NGTatGkYGRkJZJ6S6tzW1kZkZCRjY2M8ePCAlpYWNmzYwLlz51AoFFhYWHDkyBGKi4upqqpC\nIpGI9KVFixaxYMECtm/fjr29Pfv27SMlJQV4GrijLNk8PDzIy8ujqKiIzMxMLl++LKYkLS0tWFtb\nU1VVRUNDA76+vlhYWDA+Pi4UlKOjo8JLcOPGDT755BMyMzOxs7Pj4cOHrFixgqKiIrq7u6mvr8fR\n0ZFJkyaho6NDRkYGDQ0NZGRk0NnZSWlpKUlJSQwPD1NRUSFQ9/v27eO9994ToTiPHz/m1q1bqKio\ncOvWLerr66murub+/fvMnj2b4eFh+vr66O7uJiMjg4iICMrLy5FIJOzZs4dJkyY9+42uUCie6QHo\nAveAhX/+vft/ef+TP//8AVj1L88fABb9J/+24r8f//3478f/54+8Z7nXn4nmLJFI1IEzwG8KheLs\nn0+3SSQSK4VC0SqRSKyA9j+fbwZs/+XTbf587n+7HBwc2LRpkzhWOzo6oqury8DAALm5uaxdu5bi\n4mKcnJzIzc3F2tqaBw8eoK6uTk9Pj5CWHjlyBEdHR8bHxwURt6ysDF9fX4yMjHj48CHNzc2sWbOG\nsLAwzp07x+7du5k5cyYWFhY8fvwYIyMjQkJCOH/+vJBDA8LaHR0dTVJSEhYWFjQ1NfH777+zfv16\n3nzzTTIzM8nKyhKlxIoVKwSIMyMjAzU1NTHHXrduHVlZWZw+fZqAgAAWLVrEV199RXd3NxoaGtjZ\n2TE0NERqairW1tb09fUJR193dzcymQw7OzthKw8JCaG8vBwTExNRh5aUlLB8+XKOHj1KUFAQ5eXl\nLFu2jMePH/PgwQPhwvTy8mLDhg2sX7+ey5cv4+/vT3FxMWvXrmVwcJCKigrKysp47rnnOHjwIBoa\nGsTGxnL69GmWLFnCxIkTKSwsZObMmQwMDIg4++zsbIyMjFBTU8PIyIg33niDxsZG1q1bR2JiIkVF\nRYK7KJfL+eSTT4TBKDo6mqtXrzJ37lxGR0f5/fffCQkJwcrKihs3bjA4OIiVlRXHjx9n9uzZJCQk\nsHfvXjw9PXnhhRf44YcfRBPvhx9+4Pjx42RlZeHj44OpqSlZWVnMmDGDtrY2vL29WbZsGW+99Ra2\ntrZERkZy5MgRysvLWbRoEfX19URERFBXV0dHRwcXLlwQ+ZYdHR3U1NTw888/i7wNpd/l9ddf59Kl\nS8hkMr799lsxbZJKpWRmZorJ2ZMnT3BwcBAZGV9++SXfffcdaWlpTJs2TVCe9fX1aWpqoq+vDzs7\nO+rr6wkPD6evr4/Tp0+zdetWSkpKsLGxoaGhQTTDJ02aJKjYz7KeZfog4emr/X2FQvHdv7wrGVj7\n59trgaR/eX6ZRCLRkEgkDoALkPt/+hoymQwTExPMzMxwcnIiKiqK/Px8EepqZmbG7du3aWpqYsuW\nLdTV1WFvb4+NjY0gMt2+fRsvL69/mz4oI+6V8W7K2f2RI0c4dOgQJ0+eFNbWzs5O5HI56urqGBsb\n4+npyTvvvMOyZcsoKipixYoVnD59GgA/Pz9u3LhBSUkJ3t7epKSk4OPjI5SSPj4+/O1vf0MikVBV\nVYVcLsfS0pLZs2fz2WefsWfPHgoLCxkeHsbS0lJ48w0MDFBXVycgIIDq6mrh+AwICCA+Pp7Y2Fgc\nHBxYvXo18+bN4/79+xgZGbFs2TKysrKwt7cXnIb29nYSExPp7+/Hz88PS0tLwsPDuXjxInV1dejr\n67N48WJOnjxJVlaWIEErfflKLoFS569s8IWGhiKVSrlz5w5eXl40NjZy+PBhGhoa+J/svWd01WXe\ntn2k7Oz03nvvCSkkhPQAgRB6EZCmFJWxDDIiMzqjYmccEPUeFWToTaQLoSWhBNI76b1X0vtO2+8H\n7n2te77cj98e513PtRYfWEviXjv7f+1fOc/j/OSTTyguLiYtLY2srCxefPFFDAwMBBbsk08+ISkp\nia1btxIREYGXlxfHjh1DT0+P9vZ2AKKjo7G0tBTaiLt373LlyhXmz5+PhYUF169fx97enrfeeov3\n33+fzz77jBkzZvDZZ5+hrq6Oh4eHmBV4enqycOFC4PlKMiQkhMLCQhISEpBKpVy/fh11dXWqq6t5\n4403GB8fx8fHhzfffFNsK06fPo1MJmPfvn10dHSgqqrKpk2b6OjoABA6jYiICKqrq4XrcXx8nLGx\nMaqrq5k7dy6HDx9mcHCQQ4cOCRPZ+Pg44eHhrFixAolEQnJysqB76erqsnfvXpydncnJyWFqagoP\nDw8B8HV3d0ddXR1DQ0OUlZX5+9//LjQTCoVrbm4u2dnZDAwMCL7obzm/ZaYQBmwC5igpKRX89594\nYB8Qq6SkVAXM+++/I5fLS4BfgFLgDvDG/7Z5+O9/Q39/P6qqqjg5OXHp0iVsbW3R09PDysqK8vJy\n9u7dK2K7L168yLlz5/j0009xcHAgMDCQ2tpaBgcHaWhoEGqzuLg4JiYmyMjIQFtbGw0NDSQSCVFR\nUaSmphIcHCx6tHPnzmFnZydsq729vVy7do2ysjJiY2PR1NTE0tISeM77s7e3R1VVFTMzMyYmJoiK\niqKnpwdvb28ePXqEq6srGzduxMPDg/Lyctzd3YWEdteuXbz00ktMTEygpaUlBpgymUzkWg4MDBAR\nEYGysjK5ubm0tbUJhdvw8DAdHR3o6OgwMjJCf38/8fHxIjpMKpUKMdDJkycxNzenqKiI8fFxMRxU\nsCkVD7a6ujqenp6UlZXh6OiIr68vJ06cQElJiZCQEJ49e4aVlRVOTk64u7tz8OBBpFIp1tbWvPba\na2RnZ6OnpyfSu3x9fTl79iw6OjrI5XIGBwdZtWoV9vb2+Pn58eDBA+Fo9Pb2FjJnfX19AS7x9PTk\n3XffZfXq1TQ2NlJXV0doaChaWlqcOXOGb775hpycHBoaGnj55Zc5cOCASInW19fHwMBAoNgVNvfo\n6Gj8/f3FYFtBX1q1ahURERGkp6ezdu1a4dSNjY1FR0eHuXPniq2MiYmJSAtTVI4jIyNs2bIFV1dX\ntm/fLi4ADw8P8RoKCwsxNjbm4MGDXL9+XfyuFXmb4eHhghje0tLCoUOH6OnpQUNDQ7AgFDF6eXl5\n6OjokJ+fT3BwMN3d3VhYWPD1118LWtfy5ct5++23RaT9bz2/ZfvwRC6XK8nlcl+5XO73339uyeXy\nbrlcPlcul7vI5fJ5crm853/8m8/lcrmTXC53k8vlt/+3nw/Pb0Vvb2+WLl2KRCKhq6uL27dvizel\nsLCQL7/8kpCQEDZv3kxYWBiTk5OcOHGC2tpaDAwMsLOzE9hsxblx4wYODg6CaOzn50dfXx8FBQXo\n6OjQ0dEhYsd9fX2prKxER0eHY8eOceHCBX744QdKS0uxtrbmxIkTHDt2DHhOc9bW1mZqaoqsrCz2\n7NmDXC4nMDAQGxsbVq1axccff0x9fT0FBQX8+OOPnDlzhtbWVsbGxgRWfmRkROjgAerr60VYbHx8\nvGh3FNZoRZUQHR2Ns7MzkZGRXL58mezsbJHRsGzZMszNzcnOziYjI4Pt27dTUlJCbW0tFRUV7Nmz\nh+bmZoKCggBEenJjYyNjY2MEBgYKe++5c+eYNWuWkFGXlJQwPT3N8PAwly9f5pNPPqGlpYW3336b\n7777jqKiIk6ePMnu3buJiIjgiy++oKmpCW9vb3bu3ElJSQn37t0jPT2d8vJy4Q8pLy9n27ZtACQn\nJ3P37l1GR0c5d+6c8CMMDQ39m9dlenpalN2Tk5MMDAyQkJCAp6cnO3bsICgoiMzMTJEBUltby5Mn\nT3B0dMTb2xtXV1c6OjrYs2cP/v7+aGlpCYx7eHi4wM4pgne7urrw8vLCzs6O9evXC3iLvr4+hoaG\n1NXV8fnnn3P+/HlaWlrYtGmTEEMFBgZSWFhIcXExAQEBLFu2TGhFFGaqvr4+2tvbhchIX18fGxsb\nIUh79dVXsba25v79+4yOjuLj4yMw801NTQwNDXH79m3WrFnDhx9+yK1bt0RQblVVlaief8v5XSRE\nyWQyysrKGBsbIywsDDc3N1G+aWlpYWpqytDQEF1dXfj6+rJ8+XLMzc2xsbFhYmKCkpISNDU1qa+v\nx9/fX9Cc7ezsuHHjBlu2bCE9PZ2YmBi6urpwd3dHVVUVDQ0N3NzcqK+vZ3x8nICAAJydnfn1118x\nMzOjr6+PwcFBdu/ezVtvvUV0dDQnTpzAxsaG5uZmDA0NWbBgAXv37hXpQi0tLWIXfu/ePZSVlfH0\n9GTLli28+eabtLe3i5j5lpYWgoKCxC9MkeVoZ2fHiRMnBMZ9165dlJSUYGJiQm9vr3i9Cgepgsvg\n5OSEmpqaCA5ZvHgxU1NTgkGor6+PtrY28+bNExeqlZUV8+bNw9XVlZqaGoqLi2lvb8fS0pKQkBBG\nRkZoa2vD0NCQqqoqQkJCcHZ25vz58zQ0NLBy5UrWr1/PxYsXuXTpkgg9+f777wWrUMGbfP311wUV\n29PTk8bGRsEWUCgEJRIJ9vb2GBkZceLECQwNDQUy/tq1a5w6dYqDBw+K9GwvLy9KS0sZHh5GLpfz\n+PFjNDQ0ROS8jY0NWVlZjI2NsXXrVlJSUqiurmb79u0sW7YMdXV17ty5g5aWFlpaWiKTUl9fn/v3\n7xMeHs6FCxfw8fHhyy+/pKuri8WLF3P27FkAkUg1PDxMeHg4VlZWlJSUEBERwfT0NFVVVchkMnbu\n3Mnp06cFMLapqYnBwUGCgoKoqalh06ZN7N+/n9jYWOC5OW5sbExUdKqqqpSVleHl5SW4jYGBgRQV\nFQkb/dKlS0lKSmLv3r3k5+eTnJzM4OAgzs7Ooj37Led3IXNWeOYNDQ0pKChgaGiIoKAgxsbGiI+P\nx9/fX4BSf/rpJ3x8fPDx8eGLL77gm2++YXx8HFVVVWJjY3FzcxP47cHBQdavX8/AwABjY2NkZ2dz\n584dhoaGRImrSDTKy8vj7t27fPvtt0RERKCrqytK+7CwMMrKysRKS1lZWXy7rFmzRgSJ9vb2kpub\ny/79+4HnMuva2locHR25d++eSAcqKCjAxMQENzc3ampqxDpOU1OTvr4+cnJy2Lp1q/AAXL58GWdn\nZ5HN8Morr3D48GG8vLxQU1MjMjKSpKQk2tvbSU5OJjIyUrgKf/zxR5ydnVm+fDnd3d0cPnxYIN5N\nTU3p7++nv7+fQ4cOifCUuLg4BgYGOHbsGF9//bXIZTAxMRFkqEWLFvH111/z7rvvoqSkRHt7O3//\n+99pampCV1eXe/fucebMGVauXEl+fr4AjVpbW6OmpkZgYCC2trZcuXIFZWVl7OzsgOfZF0ZGRty4\ncYPY2FjMzMwoKytjZGSEyMhIjh49irOzM7a2tqSnpwuC0fj4OM7OzvT09ODl5cXGjRtpamoSOoXp\n6Wlu3bpFbGws5ubmXLp0iQcPHvD06VMGBwepqKigrq6OH374gYyMDG7cuIGtrS2//vor09PT3L59\nm76+PoFpV8Bb4LmqcXJyksTERB48eCBIWnfu3EFDQ4Mvv/ySf/zjH0Iw9u677xIfH09HRwcPHjzg\n8ePHXLt2Tcjg4fnlaGtri7KyMvPnz+fJkyfiQrK2tsbW1hYbGxtmznweDZmWliawfXv27KGlpUUw\nHhSt7W89v4tLobe3l+rqamxsbFiwYAHGxsZ0dXXh6OhIZmYm1dXVFBcXC4VWf38/z549Q19fX5CZ\npFIp+/bt4969e8JYZWVlhZGREd999x0+Pj5cunSJkZERzp07h7KyMqOjo+jo6LB8+XKhRgwLC+OT\nTz5hdHSU0tJSLCwsBBRDV1cXQMBDo6Ki+OijjzA2NkZbW5uVK1cyPT0tEp26uroEKFShs1dTU8PO\nzo7S0lKqq6sFSEbx/1b4BwoLC5FKpbi4uDAyMsLRo0epqqqitLQUFRUVGhoaBIVnenpapFiHhIRw\n/fp1/vKXv5Cbm4uxsTHq6upYWlpiZWWFmpoak5OThIaG4uDgwM6dOykuLsbAwICKigq+/fZbysrK\n6O7uJjw8nOjoaDQ0NEhISEBHR4fBwUFiY2OJiopi8eLFjI6OMj4+zttvv013dzcNDQ1CBDZz5kwO\nHz5MdnY2mpqaTE1NifdG0SK4ubmJygsgKiqKpqYm3NzcSEhIID4+nidPnmBsbExjYyPT09Ns2LCB\n7u5uPDw86OrqoqysjP7+furq6mhtbWXXrl20traSm5srfq6C3dDV1UVVVRVBQUFIpVI6OzvR09MT\nCdeJiYkiw1NJSYny8nKysrJwcHDAxcUFCwsLgoODheVaYWtubm4WvExNTU0aGxvR19fn1q1bBAcH\nk5qaSnZ2Nl999RUrV66kqKiI2bNni01afX09ZmZm5ObmAs/t+YODg7S0tAjK0rp165g5cyZmZmZY\nWVkJVH9xcbEgiikqXlNTU6RSqYDfKhD6v+X8LiArR44c2fvee+9x8eJFdHR0uHjxolA26ujo8PDh\nQ2pqalBRUWHmzJkYGRnR2dlJZmYm8+fPR0dHhz//+c/C7qxg+EdGRmJsbIyenp7Q6W/dupXJyUkC\nAgJEElVycjKWlpbCYKSmpsbExATJyckUFRWJiLVnz55x69YtoqOjee+99/jss8/w8/PDzs6O+/fv\n09TURE9PDxEREWhoaODo6CgQWwru/9jYGLm5udTV1REcHExAQAA2NjYcOHBAqC8XLVrEl19+yYsv\nvkhRURFubm4iV7O3txddXV3RRzs5OXHq1Cn8/f0pLy+noqJCiGzU1dVFVuGNGzfo7OxkfHwcR0dH\ngbCvqKjA0tKS9vZ2UVEoZMvDw8MCe6eiokJGRgYqKiq0tbUJNP3cuXOxsLDg7NmzxMTEEBkZibq6\nunhYHR0dRVz7hQsXRCqUmZkZ58+fx97enuLiYqysrDh//jwvvfQSqqqqwjl78eJFzM3NycrK4k9/\n+hNFRUU0NTXR19eHn58ftra25OTkkJycTHd3N1FRUcyYMQNPT0/Wr19PSkoK+fn5bN26lYqKCoyN\njXF2diYzMxNVVVW+++47LCwsaG5uRkNDAycnJ6ysrEQLFhgYiFwu5+HDh6xevZqEhAQaGhqoq6uj\nuroaLy8v5s6dy61bt2hsbMTBwYGpqSlhB1ekV7W2tvL++++LGVhraytubm6oqKgwPDxMamoqysrK\nzJo1i8TEROLi4uju7mZ6epquri7U1NTIycmhtrZWZG3U1dVRWlrK0qVLRVUxODjI7Nmzxbp28+bN\nuLu7K2Y5/znkpW+++WZvZ2cnkZGRIsuwtLRU0IM0NTWFbkEmk3H16lWam5uJiYkRgywlJSVee+01\noqOjRbrwjh07ePLkCVu3bmVgYAAtLS1sbGzQ09NDV1eXnp4edu/eTW1tLXp6ejQ2NpKRkUFISIjo\n0728vOjv76e+vp6hoSFSU1P54YcfBOSkoaGBu3fv4u7ujq6urriElixZIlDg/f39dHV18ejRI0H/\nWbt2LXv27BFBuqdPnyYoKIjw8HBOnjzJvn37SE1Nxc/Pj4CAADFDUWQXuLi40N7eLoJ0DAwMMDU1\nJTw8nKysLJqamtDW1qa/vx91dXUkEgnNzc1oa2tTU1PD66+/jkQiQUdHh8LCQqytrVm0aBHd3d1i\n5atQgaqoqDA1NUVLSwumpqYYGBhQVlaGk5MTv/zyC+fPnycyMpKnT5/S3d2NqqoqHh4eLFq0SHAa\n+/r6sLS0xMjIiIKCAqEHGRoa4p133mHnzp3U1tYSFBSEnZ0dqqqq3Llzh48++oi+vj4GBgbo7e3F\nx8eHX375RRjlHj9+THt7O/Pnz+eDDz4gKyuL1tZWbt++TU1NDd9//z379u1DT09PDBfPnj2Lq6sr\n09PT6Ojo0NTUxMKFC1m4cCHBwcHiIe3p6aGsrAwXFxeRhxEWFkZKSgoSiYSKigpUVFSEldvJyYm2\ntjZWrlxJcHAwNjY2vPzyy4yOjuLk5MTIyAhvvPGG+J0rrNF5eXksWbIEmUyGTCYjOzub0NBQEhIS\nsLOzo62tjfT0dPT09FixYgWxsbFC4i6RSCgtLUVLS4sdO3agoqKCvb094+PjTE5O4uPjw/DwMBoa\nGly/fv0/h7w0OTmJTCajoKAAOzs7tLW1CQ0NZe7cuXz++ecCXebi4kJVVRVjY2PCmefu7o6lpSVb\ntmzh0qVLdHV1UVNTA0BfXx8LFiygrq6O7OxsjI2N6e3tpb+/n6tXr3L48GHU1NTw9vYmPj4egN27\nd4tvTy8vL1xdXdmyZQs2NjbCadbW1kZVVRUffvghZ8+e5ccff+T48eOkpqYyPT0t8N2nTp1i586d\nKCsrU1BQQH5+PuPj49jb25OdnS16P4UUNyIignv37tHU1ERSUhLz5s2ju7ub3t5e9PX1qaurE4AX\nRSuhoCwVFRXh6upKaWkpqqqqLF26FHd3dwwNDfHx8aGyslLwHxUxYlKplKGhIezt7ZFKpeTn59PR\n0cE///lPUlNTOX/+vPAZZGZmEhUVhaamJvr6+rzzzjucOXMGBwcHfvnlF44dO8Y777xDTU0NN27c\nwMDAABMTE/Lz8xkZGeHatWu888479Pb2MnfuXFJTU+nr66O/v59Lly4J67RC3KNAubu5uREVFcX0\n9DSqqqq0traio6ODl5cXhw8fFiG5irxNd3d3+vr6mDNnjsiVgOcZIBKJhKqqKiwsLLh//7749lVX\nV2f//v0kJyeL6tHU1JTx8XFWrFhBdnY2//rXv6ipqeH48ePExcUJ0KyCmlVSUoKZmRmOjo5cvnyZ\nDz/8kGfPnvHkyRORubBo0SJ66/3V+QAAIABJREFUe3tpbGyktbWV+Ph4BgYGePvtt0UFqJgRDA8P\n8+WXXxIZGYmBgQGLFy9m5syZfPfddxw4cIDk5GTS0tJoaWnBwcEBdXV1zMzMhFRbMTTt7e0lOTmZ\nlpb/VT/4b+d3cSno6+vj7+9PWVmZ0Io/evQIdXV1cnNzqaioECq8hIQEMfBxdnamoaGBK1eukJSU\nRHx8PHV1dQQGBgJw8eJFjI2NOXbsGCMjIzx8+JCxsTGxT4+PjycmJobw8HASExNpa2sTHMTZs2dz\n7949+vr6OHjwICoqKjg6OgLPDVHa2tpERUWJYaeCye/q6oqmpiZSqZSXX36Z+vp6bGxsmJqa4vLl\nywLz3dHRQXR0NGFhYSLHYXBwkFmzZuHi4oKDg4OoRqRSKeXl5RgZGfHTTz/R2dmJpaWlICUr4uTU\n1dWZOXMmDg4O7Nmzh59//plXXnmFlJQUDAwMmDdvHps3b0YmkwHPITR2dnb4+fkJVLuNjQ0GBgZc\nuXKFWbNmERgYSGNjI5GRkTg6OrJgwQLU1dUpLS1lYGAAqVTKuXPnWL9+PZ9//rnIAzU1NeXx48fC\ncCSVSlmyZAk5OTk0NzfT399PT08PpaWljI+Ps2XLFuB5DLxizfqnP/2JmpoaEhMTqa6uRlNTU4Sb\nKMCkjY2NBAcHixWhwmexdu1aamtrBdVq5cqVeHp6snjxYubPny9+/wcPHiQ4OJhXX30VZWVljI2N\nsbKyEuzPb7/9lh9++AF7e3vhFTEyMuLQoUPA80tBoa599OiRuHz+53zhp59+Ei7LvLw8PvvsM6Gw\nfeGFF8jKymLHjh0kJydjbGwMgLu7O11dXSQmJqKvr8/09LRQsRYWFnLz5k10dXUpLi7G3Nycixcv\nYmpqyrNnz0SbYmJigp2dHQsWLODp06e/+Xn8XbQPBw4c2GtmZoaGhgbu7u7Y2dmhoqKCmpqaeBh0\ndXWxsbFhw4YNREREMGPGDOC5IERR3hYXFws8WkZGhogt6+/vZ8uWLeTm5qKtrc3jx48ZGhqivr4e\nXV1doRTz8PDA2tqaqakpcnJyUFJSEgANDw8PSktLuX//PmZmZqxYsYKHDx8KLL0i2m3u3LnU19eL\nkNSOjg6xPnzvvfcEAdrW1paOjg6ePHlCZGSkiJ4bGhqioaGBhQsXsnTpUlpbWykrKxOg1bq6OgwM\nDNDS0hIfuiVLltDa2sro6CidnZ3U19fj5ubGs2fPkMvlwoKsra0tBDTW1tYMDAwwMjJCXV2d4GBm\nZmYSHByMhoaGCNkNCAigv79fAGYbGhpob2/HwsJCBOgo1sKqqqqMjo4yODiIsbExPT09qKioMGvW\nLNzc3HjxxRe5cOECmzdvxsfHhytXrmBoaEh+fj6PHz9m+/bt2Nvbc+vWLUpKSpg1axZOTk5C3m1t\nbU1aWhqPHj3i3r17BAYGism7k5MToaGhDA8Piy1OU1MT1dXVQhCVmZlJV1eX0LUotAkKsI1iaKmn\np8ezZ8/YtGmTqLgUyPgtW7Zga2vL7du38fb2pqurC3V1dXp6emhqasLIyEhgA69duyZ4D7t37xYR\nBerq6piammJhYYG6ujoXL16krKwMVVVVMjIyiIuLIzs7G319fSIjIxkcHERHR0fAYpYsWcLAwABR\nUVE4OTnR1dVFSEgIS5cuFbi2p0+fsn//fjo6Oli+fDkXL178z2kfRkdHBSR0cnJSBK0oEokUvenU\n1BRyuZzs7GzS09MZGhoiKiqKefPm8corr6Crqyum8PBcsKIoAf/85z+jpqbGvXv32LBhA0uXLsXA\nwIDHjx+TlJREamoqpaWlpKWlcevWLa5du0ZRURHGxsbY29vT398vdr1xcXEkJibyzjvvsHr1auzs\n7Hj8+DFLliwhLS2N1tZWpqen+fHHH5HJZDg7OxMREcHdu3d5//33iY6OFhkJsbGxQuasKAFDQ0MZ\nGhoSD4ZCiFRXVydAHYsWLQKeS6Pv3r1LR0cHNTU1zJs3j5GRERYvXoyvr68Yfil0+ykpKVhYWKCr\nq4u+vj69vb0YGBiQl5dHWloax44dw8bGhg8++EBYvhWboMbGRmxtbQU5eunSpSxatIinT58KJWFl\nZSWzZs2ipKREXDpqamrk5+djbGws4t1zc3N59OgRDg4ObN++nV9++QV4zpTIyMjgo48+wsPDgwcP\nHjAxMUFjYyMWFhakp6fzl7/8hfb2dgYGBqirq+POnTtkZ2dTV1fHmTNnyM/PJyUlhdWrV6Onpwc8\nF/hIJBJCQ0NFG+Xl5UVycjLt7e04OjpSU1PD0qVL6ejo4P79+5SUlFBZWYm7uzu3bt3C0NCQY8eO\nsXnzZrHVKC8vp7m5mampKUJCQnjy5Ana2tp0d3dz/vx5BgYGWLhwIRs2bCA0NJQzZ85w/vx5wsPD\nuXXrFn/961+RSCQkJiYSGRkpxHcODg6Ym5vT2dlJQ0MD5ubmpKeno6+vj4aGBuXl5Xh7e1NeXk5h\nYaGYz9TX12Nra4utrS1mZmbs3LkTb29vXFxcfvPz+Lu4FJSVlTEwMCA5OZlVq1ZRW1uLk5MT09PT\nhIeHk5uby4MHD0Q8uFwuJzY2VigSOzo6uH37ttCaT09PA4gY74qKClasWMGdO3cEo29gYEDw+dzd\n3Wlvb8fFxQUTExOmpqbEh/zJkycC3qLoDauqqnjttddITExkz5493Llzh4iICK5cucI//vEPjhw5\ngpaWFjExMSLc4/DhwyxfvpzW1lb09PTo7e2ltraWvLw8IYNVAE3HxsZwdnamoqKC9vZ2hoaGqK2t\nFWadyMhIMjMzKSgoQCqV4uDgIFiRV69epba2lrq6OoqKirCzs+PTTz/F2dmZjo4OgZxXrBAVkJQ5\nc+ZgaWmJpaUlUqmU0tJSEURra2vL5cuX0dDQQE1NDSMjI5SVlbl+/brgACgpKaGurs7LL7/Mjz/+\nyODgIBYWFvj7+wsOgSIRetu2bYyOjjI6OiqqGUUvXVhYKIRW5ubmaGtrU1FRwa1bt0Tc29dff822\nbdvYuHEjFRUVItZ+wYIFYj40PT1Na2urUP21tbXx7NkzvL296ezsJDAwkMzMTN5++23c3d1ZtWoV\n6urqHD16lKdPn4oL8PLlyxQUFPDJJ58wNTUlEH4KnsKaNWtYtGgR69evRy6XMzQ0RHZ2NllZWcyZ\nM0eEz7S0tGBlZUVtbS0RERGC8Tl//nzOnz9PQEAArq6uQp6u0BlYWFgwOjoquI8KQrmPjw8aGhqU\nlJSQnp6OsbExkZGRWFhYiHAiCwsLDAwMkEgkvxmwAr+T9uHQoUN7FcBPhfGpp6eHu3fvYm5uLrT9\nDQ0NeHt7o6KiIqSlCjLS8ePHmZiYQEdHB1NTU27fvs3ChQtFm1FdXc2SJUuEjLi8vFywGp2dnZFK\npZSVlTE8PExZWZlgLlhYWIjsgPj4eG7duoWbmxtPnjwhNTWV+fPniyHYo0ePeOGFF3jjjTcwMzPj\nyJEjVFdXCx+Hm5sbdXV1nDhxAg0NDTZv3oyHh4f48O3atYuhoSEuXLhAZ2cnhYWF/PWvf+XGjRvo\n6upSU1PDmjVr6O3txc/Pj5SUFB4+fChCYNra2ujs7BReiuDgYKamprC2tsbBwYGioiK0tLREarKu\nri6NjY3MnDmTgoICOjs78fT0pLa2lpGREaytrUUpr8DgjY6O0tfXR1lZGeHh4ZibmyOTyUTU3Z07\nd3BxcRHOvMuXLwuIjiIHoauri6ioKJKTk0UqlZ+fH6dOneJf//qXSIvW0NDA0NAQExMTJiYmGB8f\nRyqVMjU1xdjYGBkZGXz++eeUl5cDz1Og0tPTmTVrFkeOHBH5momJibz88ssAwmOj0Iu4u7ujqakp\nBp+lpaXs378fZWVlAgICWL9+PTU1NYJuZWRkJP6emJiIiooKo6Oj1NXVsWDBArq6uqiurhZGPjU1\nNUJCQvD29mZoaIgZM2agpaWFqqoqnZ2d4stJkVa+ZcsWvv/+e1avXi3aZxMTE+rq6li2bBkJCQmE\nhoYKzUlGRoYQofX09GBhYSH8KEuXLqWurg6JREJ3d/dvXkn+LioFxdqspKREmIw0NDTYtWsXlZWV\nqKurk5mZKQg4Li4ujI6OCuTZzZs36ejowMHBAVVVVQWjASMjI5HEo6urKwxBCg5iSEgIysrK/PDD\nD/T29uLu7o6fnx+xsbH09fWJB7qhoYHQ0FABWHV1dWXFihXs2LGDlJQURkdHOXHiBG+88Qbr1q3j\n2rVrJCUlERgYiJKSEp999hm1tbUkJSXh6elJU1MTysrK3L59m+7ubjEZ/uCDD0S8O8CuXbvEuvDG\njRs4OjqSkJAgemxF4IeGhoaIhVNSUkIqleLv78+jR48oLy9HTU1N0JOUlJTQ0dHBwsKC+vp61q1b\nJ4Zn0dHR6OrqCqrR06dP0dLSEozBgYEBYdSytbWlp6cHIyMjMjMzWbduHb29vWhra7NkyRLmzJkj\n+unk5GQKCgpEm+bq6kp4eDh+fn4cPHhQRLYBwkTV2dnJ1atXyc/PF3JpRX996dIlJBIJixYt4ujR\noyxZskRAW62trVm+fDleXl7U1dWJ97a3t5eSkhJxaVZWVtLc3Cz8E6qqqjg4OODm5kZxcTFr1qwh\nLCyMgoICJBIJp06dYuXKlfj6+oo2F57H3G3atIkZM2bwxRdfEBQURFxcHHFxcYKO1NjYKOhcV69e\nRSqVUl9fT09PDzdu3MDHx4fu7m7U1dU5fPgw8Fzd2t3dzcqVKxkbG0NbW5uxsTExAG1sbOTWrVti\nJV9aWsrMmTMxNjbm2bNnLFmyBLlcTnd3N4WFhaLd/C3nd3Ep6Orqkp6ejkwmw8rKCnd3d6RSKS0t\nLSL8xcnJCXNzcywsLBgYGODEiRNMTEwQHh7O6OgoLi4uzJw5k+bmZrS0tIDnzrjOzk5CQ0Pp6+vj\nyZMnxMbGUlVVxa5duwgJCcHf3x9PT08qKipoamrC2tpafHvPnz+f/fv3C/XYgQMHAMRDq6hCbty4\nQUlJCePj4+Tl5bF582ZsbW2prq4W7IOJiQnmzJnDnTt3WLhwISkpKVhbWzMyMiIusZdeegk7Ozu8\nvLyQy+X8+OOPVFZW0t7ejq+vL76+vsybN4+ysjJSUlIwMTERzr3Ozk6UlJR4+PAhUqmUx48fY2ho\nKAxXil5UwWvw8vLC3d2dpqYmTExMyMrKIjc3l9mzZ2NlZYW9vT3m5uZiou7j40NwcLBo81auXMmz\nZ8/49NNPiY2Nxd/fn6KiIlxcXHj48CG//PIL8+fPJzo6mm3btlFUVER3dzdtbW1kZWWRlpaGjY2N\nCG9JSUkBns9VFDBdmUwmVKD19fVCvOXk5ISzszNqamqsXLmS/v5+goODhUfihRdewM/PD0tLS27e\nvAnA/fv3sbW1FWnhHR0dGBkZiXI7KiqKrVu3snbtWoKCgqivr6e0tJSSkhKMjIxE0Isi61PBySgq\nKuLMmTNiQFtZWSmk3BYWFpw+fRp/f38aGxvFpkdLS4uAgAD09PTYt28fNjY2bNy4kY8//lh8du/d\nu4dMJuP06dP/ZgJTrGwVr8PZ2ZmhoSFeeeUVwf7429/+Rm5uLpcvX8bU1BQ9PT1KS0t/8/P4uzBE\nKUQpfX19XLhwgYMHDwoS8sKFC3F0dMTKyoqff/5ZxGMpkF2dnZ1MTU2xYcMGTp48SWRkJF5eXgDc\nvXuXjRs30tHRgYWFBY8ePWLv3r1ERkaipKTEkSNHBGpboXlQV1cnJycHmUxGU1MTr7/+Oq2trQAi\nDEZDQ4Pu7m4cHByoq6tDX1+fgIAA/uu//otNmzahpaUlGAbu7u60traSnZ1NVVUV7e3tSCQS3nzz\nTZycnLh8+bJgCirQ5/fv3xccQalUyltvvUVHRwdFRUXY2toSFRWFTCYTOZJtbW1MTU1x6NAh1q1b\nR2trK+Pj46Snp7Nw4UK6urowNzcX2HJzc3OWLVvGnTt3hLkqNTWVgoICgoKCSElJQVtbG0dHR154\n4QWys7MFDqy4uJigoCDMzc0pLCxk1apVQiw0NjYmyuhHjx7R09ODvb09ly5dwsXFBX19fTZu3Ii/\nv7/QZqxdu5by8nIh9fb09KS9vR1TU1P8/PxQUlLi0qVLaGpqMjg4SH19PRUVFZw+fRodHR0WLFgg\nmAu7d+/m5MmTvPrqq6SmpuLj48OOHTt49913xfvS3NyMra0tExMTHDlyRAzpHBwcKC0tpbGxkfv3\n7yOTyWhpaSEqKgotLS2cnJxISEjgiy++ID4+XqDtxsbG8PPzw83NjdWrV/Pmm2+ipKRERkYGmZmZ\nQtK8f/9+Jicn+f7776mqqkJfX58ZM2YwNDSElZUViYmJYlgNCNCwg4MDcrkcmUyGhYUFubm52Nvb\nMzo6iqqqKkeOHOHx48cUFhaSm5vL+Pg4crkcCwsLXF1d6ezs5NatW3z00Ue/+Xn8XcwUfvzxx732\n9vYsXbqU6elp+vv7CQwMREdHh4aGBvz8/Oju7harpsDAQBF0MTY2xubNm8nJyWHHjh0YGRnx4Ycf\nUltbyxdffCEekLGxMUJCQggICBB7+ra2NvLy8vDy8qKmpgYfHx9++OEH4WXIzc3FwcEBf39/pFIp\nw8PDJCYmEhMTQ1VVFYODg/z6669iQBgaGipYjr29vZw+fVpwF3V0dGhubhaDzidPnoh1lkQi4eef\nf2bZsmXCWPXs2TMmJydFinRmZqYwKpmamnLv3j0xkDU1NaWjo4Ph4WGRZOTq6kpgYCAzZswQEuDR\n0VEaGhqoqKjg+vXrFBQUMDo6+m8ZkQroyKJFi2hvbxcXhIeHB6qqqkL+XFZWxocffkh7eztNTU0s\nWLAAfX19XnvtNerr69m+fTtqampUVFSI0B1XV1dGR0fp7e1l9+7d/PGPf6S0tFR80B8/fszs2bOp\nq6vj1KlTzJgxA5lMhpubG3fu3BEEqqmpKTZv3iw2VZmZmcJ9+Ne//pWhoSFcXV15+PAhMpmMx48f\ns2PHDszNzUlNTaW4uBhra2v+8pe/IJPJ+Oabb4T6VKFWTEpKQiaTiYGqsrIyT548ESak2tpakpOT\nmT17Nm1tbSQkJLB06VJGRkY4duwY9vb2xMTEYGRkRGFhIW1tbcJF6uvry8jICJOTk5w7dw4nJycy\nMzNFgNDJkydZu3YtS5Ys4fbt2wQHB+Pu7s6VK1ewtbXl5s2buLm54ePjg4qKCuPj4/T399PU1ISZ\nmRl2dnYsX76choYGsbGbmpriwYMHv2mm8LuoFBRru3v37tHW1oaVlRXHjx8XIS+KIAxA6Oy9vb2Z\nnp5m7ty55OXlERYWJoCdwcHBJCYmkpWVRWBgoDDzGBsbo6WlJRKDk5KSxArKysqKzz77jGXLlqGm\npsaVK1eYOXMmNTU1ZGdnC3oSPK9sUlNTefDgAV988QXj4+M8fPiQ4OBgioqKhH06ODiYp0+fMjw8\nTFtbG97e3rS3txMfH4+bmxsymUzEk8Pz/IujR4/ypz/9iby8PKytrTl69CgxMTEoKytz4cIFPDw8\nsLKywt/fHw0NDSoqKlBXVycuLg41NTWam5vFVN/Z2Znr16/T29uLXC7H2dkZFRUVsTI1MzMjKioK\nZ2dn4e93cnKis7OTp0+fYmhoiLm5OXK5nObmZuHb19bWJj4+nn/+85/09fXxhz/8gVOnTiGXy5FI\nJOTm5qKvr09GRgZvv/02KSkpFBYWYmpqyv3794mMjCQjI4Ps7GwyMzPx9PQUAq6uri48PDzYvXs3\nlpaWQlzm5+fHrVu3WL58OQsXLhTpUg0NDejr6xMSEsL4+DgHDhwQF9zq1av59ttvgeehLba2tnh7\ne1NZWYmTkxN79uxhYmKChQsXMjk5iZWVFQUFBbi5uQn83erVqzl16hQ6OjqUlZVhYmLCnTt3WLx4\nMfDcaKWnp4e2tjZmZmakpaWxd+9ehoaGePLkCXK5nJGREQIDA4mIiEBNTY3x8XH09fUZHh4mOzsb\nFxcX5HI5v/76q2BrLFiwQIB77e3tqa2tRVVVldraWiQSCfB8C9bb28svv/zCunXrBJvC1NSUNWvW\nEB0dLcRY9vb2v/l5/F3MFAAxmZZKpejq6rJr1y6cnZ05efIk165do7S0VGDB9PX1MTY2prCwkLS0\nNMrLy7ly5QoSiQQDAwM0NDSA5z2/hYUFL7/8Mq+99hplZWWCtLxixQreeustXn31VXp6eoS5Jykp\niYyMDObNm8elS5fQ1dUVLcKDBw+A58gwMzMzDh48KHBi/v7+JCQksG/fPmQyGVlZWcyePRsdHR3h\nBLSyshKtgIKopBjuwfN+eu/evaSkpBAaGsqjR4/YunUrenp6ODk5sXHjRqqqqkQgTFFREWpqamzc\nuJGsrCyKioooKSkRw8n79+8jkUhwcnIiPDwcJSUl7OzsUFNTA55Lip8+fcqpU6fQ0tKirq6OAwcO\nCAdidXU1dnZ2dHV10draSltbG+7u7hw+fBhbW1scHBzo6+vj7t27JCQkCBmvQgAEsGzZMrq7u4Hn\n9uUVK1bg6OjIrVu3+Omnn7CxsWFsbOzfcjrLyso4ePAgP/30E3/84x+ZMWMGRkZGxMTEYGBgwIMH\nDxgeHqa0tJSCggImJyexsLDg1VdfxczMjLVr12JkZMS9e/eYO3cuAMePH+f27dtMTU1hb29PTk4O\nM2fOJDAwkBdffFFcBvHx8SQlJbFmzRo2b96Mq6srM2fORC6XC5OTvr4+168/pw9KJBI8PT2ZP38+\n9+7dQ01NDRcXFzw9PSktLeUPf/iD2GCkpaVhZ2dHbGysYD588cUXaGtrI5fLcXR0FCvJiooKQRtv\namoiPz9f5G0q3uf79+/T3NzMG2+8QU5ODiMjI8TFxdHb28uDBw+QSqWsXr0aV1dXodf4LUdJMeT6\nv3mUlJT+77+I/3f+3/n//8mVy+Uz/0//0e+ifVAMaRQ8RqlUytOnT3F2dhbMg4yMDKKjo3F0dKS4\nuJienh6xrVAIk1RUVESpFB8fz6ZNm5BKpTg5OQk/v5KSEr6+vly+fBktLS0sLCwwMzOjvb2d9PR0\n1q1bJ+K2Xn31VZKTk5menubEiRP4+Phw6NAh3n33XXR0dNDQ0GB6epqxsTH09fVRV1dnampKzA5y\nc3PZvXs3Fy5cwMbGhpaWFpYvX05zczPZ2dkYGBiwfPlyysvLee211xgbG+ONN97Ay8tLJFN7eHhg\nbGxMfX09M2fO5OrVq4yNjQkqVWtrKx0dHYLLd+XKFbZu3UpzczM2Njbk5+fT0NCAi4sLOjo6YmVb\nUVFBZGQk+vr6lJWVibXnqlWrqKysJCcnB01NTXx9fXn8+DGrVq3il19+wcjIiNjYWE6cOIGenh45\nOTliB29sbExbWxuOjo7U1tby3nvvcfr0aYaHh5kzZw5JSUkEBAQIafKDBw/Q0tIiKiqK/Px8jhw5\nwrZt25BKpRgYGJCbmysQaKqqqoSGhlJYWEhwcDB///vfaWtrY/v27SKO/v79+9jb22Nvb8/IyAjz\n58/n+PHjfPXVV1y8eJG7d+9iZmaGj48PR48excrKCn19fRYsWIC1tTUff/wxc+bMobKykv7+fmJj\nY+np6REzqYqKCoKCgoiMjGT//v2cOnWKM2fO0N/fz+TkJLNnz0Yul/PTTz8JOpKjoyPbtm1j3bp1\nxMXFCT+IjY0Nly9fxsjISLhgN27cyAcffMCnn37Kvn37mJiYwMHBAUNDQzIyMpBIJISEhGBnZycS\nof7nilXRqgQFBfHs2TMOHDjAtm3bRKKZwh/yfzq/i0Hj559/vjckJIS6ujqBpw4ODhYsQkV5npSU\nRG9vLxKJBFNTU6qrq8Ue18TEhPr6eiESOnLkCJ999hlnz55FLpfz4YcfUlxcTH5+PoWFhfj5+VFQ\nUMBrr73GwMAAFy9eJCQkBGtra+RyObNmzaKqqoqGhgaam5vx8fFhfHycjIwMZs+ejZqampj+JyQk\n0NnZibe3N4WFhYSFhTExMYGTkxO5ubn09/cLv0JXVxeamprIZDLa2tp45513UFFRoaKigpiYGCQS\nCRYWFqiqqrJhwwZKS0vx9fVleHiY27dvixzMZ8+eiRVVZ2cnExMTFBYWsmbNGm7evElYWBgZGRnI\nZDJmz55NX18fvr6+NDc3k5GRwfz585HJZKSkpIiLVgG0VdhvjYyMMDc3JzMzk/HxcXR1dQXHUvGh\nVKzEFi1aJHQSKioq4uLW1NRkYGBAbGtycnJobGxEV1dX6P4VCtPs7Gw2bNjAmjVrcHFxoaKiAolE\ngre3twhvkUqldHd3Y2VlxZIlSwQuLi4ujv7+fiIjI+nr62Nqaorc3Fy6u7vJyclh0aJFaGtrI5FI\nOHv2LO+9955QpaampgrNho6ODnV1dfj7++Pu7k5KSgobNmyguLgYZ2dnBgcHKSkpYe3atRw9epTJ\nyUkcHR0FQPbJkydoamqiqakp1tQfffSRMNw5OTkJSfLY2BgODg5oamri4uKCVCqlsrKSqqoqdu7c\nKfCEV69epbe3F0dHR4yNjfnb3/6GkpKS+BmNjY08e/YMDw8PUlNT0dPTQ0dHh7GxMfbs2YOxsTEz\nZ87k0qVL/zk8hW+//XbvSy89p8XPmDGDnJwcLC0tKS8v5w9/+ANXr17FxMQET09PYVVVILn6+vpQ\nUVEhOzsbQ0NDxsfHsbW15dixYyxcuJCXXnqJ4eFhHj9+LBxoM2bMICQkRASAzp49m6VLl6KsrMzJ\nkydxdnamu7ubEydOsGPHDmFGmj17Nj///DNWVlYi1tzCwoJly5aho6Mjvkl8fHwEHdnU1BR3d3f0\n9PSIiYnBxsaG4eFhJBIJw8PDhIWFYWVl9W/9o7Ozs8hPqKysFGnUUqmUhQsXkpmZya+//sqcOXPI\nz8/H1tYWTU1NlJWV8fLyoqurC1tbW44fP46ZmRnDw8MoKSnR09NDdXU13d3deHl58fDhQ1asWCG2\nLSdPnmT+/Pnk5eXx+PGVcoKwAAAgAElEQVRjNDU10dHRwcTERKRPd3Z2IpVKuXz5skhRVmDOJycn\naW1tJSIiAnV1dWJiYkRKsq6uLqWlpezZs4cjR44wZ84cioqKqKmpEQOyoqIiFi1aRH5+Pm1tbQQF\nBeHu7o6amhptbW3IZDKmp6eF89TLywt1dXXs7OyoqKhgdHSU+fPnY2hoiFwu5+bNm8TGxnLt2jVq\namrw8/PDxsaGvLw89u3bR3NzM++//z7z589HIpGwb98+qqqq8PPzw8HBga6uLtra2igoKBDsBg0N\nDfHaExISePPNN8XmKiAgQMiJNTU1qa6uxtPTkxs3bhAdHS28PGVlZRgbGwvlZ3JyMqmpqZiYmBAZ\nGcmvv/7Kjh076OrqEhsQRQT90NAQK1aswM3NDT8/P/70pz+JKmLnzp309/czMDCAkpIStra29Pb2\niuTqpKSk/5xL4fDhw3s3b97Mo0eP8PDwYMGCBejp6fHw4UPKy8vFtF+hLnv11VeZmJggLi6O0dFR\nwsLC+Pnnn1FVVaWjo0NEdn/yySekpaUJHX9DQwPr168nMTERbW1tMjIymDNnjoCS7Nmzh+joaJ4+\nfUpWVhZKSkpiAq8Q1Fy5coXFixfj6upKXFwcp06dwtHRkbKyMrHeS01NZcmSJaSkpIghoYmJCbW1\ntZSUlODl5UVSUhJ6enrcv38fX19fHjx4wMsvv0xPTw99fX1MT09TUVGBqqqqsMtqaGiQmppKdHQ0\nVVVV6Onp4ePjg4mJCcbGxpiZmfHw4UMcHR0FeEOBhKurqyM3N5eVK1eiqamJrq4ufn5+1NTUUFtb\ny969ewWc1d3dXcSYf//996xduxYVFRVu3ryJt7c3AwMDQg1qampKXl4eenp6mJiYEBsby71797Cz\nsyMtLY3Ozk5BlPLx8eHu3buMjIwwOjpKa2srcXFxItzkzp07QrWppKREWFgYJ06cICIiAkNDQ3p6\neujp6RET+oKCAjo6OjA3N6euro6AgAAaGxuxsbHh9ddfJzIyElNTU65cucIHH3wgQnb/+Mc/4ufn\nx8qVK4Xuwc/PT2yxZDIZkZGRYtt0+PBhOjo68PX1FanSKSkpPHr0iICAAD7//HPS0tIEQn7btm2k\npKSIgN23336bOXPm4OjoKPBtOTk5Ivk6Pz9fYPmrq6vJycnBy8tLCMsUitK+vj7MzMy4e/cu7e3t\nmJubs2nTJjZu3IiXlxfHjx/nwoULmJubEx0dLcCvCor1jRs3/nMuhY8//nhve3s74eHhmJmZkZOT\ng1wux9XVFYlEgoqKCpqamqxcuZKhoSFOnnyea6uQ7KalpeHu7o6BgQExMTH09/eTmJiIhYUFnZ2d\n7Ny5k/z8fPbs2cP58+eRyWQsWbIELy8v2tvbGRwcpLq6mocPH9LY2EhPTw+7du0iLi6OyspKCgoK\n8PT0FCKRFStWYGtrS319PcHBwbS1tWFmZsbo6CgeHh40NTWRnp7O4sWLaWlpEWrJxsZG0fMmJycT\nFhYmdv8Klp/CQVlUVISBgQFKSkokJiYyNTUlFJuTk5O4uroKs0tMTAwtLS1kZGSIGYSvry/6+vrk\n5uby2muvoa2tja6uLiMjI4SGhopAHC0tLZ4+fcr09DRTU1NcuXIFU1NT3NzcsLOzE5VWUVERnp6e\nuLm5UV1dTWxsLMXFxSJRKjo6mps3bzI5OYmDg4Pom4eGhjAyMuKVV15BKpUyY8YMBgYGaGlpob29\nnQ0bNlBRUUF9fT05OTkiJ1MR7DM+Pk5aWhpyuRxdXV309PRIS0tjx44dREZGkpiYiIaGhlirKqq5\nTZs2oaqqiqWlJadOncLFxUWs6Kqrq1FVVaW3t5fQ0FAsLCz49NNPBdhndHQUb29v+vv7KSkpobS0\nlHfffZeHDx+yZs0aRkdHSU9PJysriwULFggqlZWVFUlJSTx69Ah9fX2KioqYO3cuPj4+DAwMkJiY\niJWVlbCwSyQS0tPTmTt3LnZ2dlhZWaGsrExycjIbNmwQis6rV69ia2tLfHw8s2fPZtOmTTQ0NAj0\nYFpaGsrKymhraxMQECD4joGBgfj5+WFgYEBiYiJ5eXn/OToFTU1N3N3dkUgkXLp0ibi4OBISEvDy\n8mJsbIzOzk5MTU3R1dWlqamJnJwcURqamJigq6uLpaUl6urqXLp0iXXr1gHPQah1dXUcPXoUV1dX\nuru7RajHiy++yNq1a9HX1xcMQScnJ5KTk5HJZOTl5XHo0CGio6MFDkyhd79586aAdsjlcurr65FK\npUxOTtLV1YWPjw9DQ0N8//33Al9WXFxMcHAwBQUFpKWlMTU1xZdffsmWLVuwtrYGwN/fn8rKStF+\nnDlzhk8//RRPT08ePHjA8uXLiYqKoru7G01NTRF1pzDNrFixgqKiIvLy8qiuriYoKIiPPvoImUyG\njo4Ovb29GBoacuHCBd5//30uXLhAbGyssIXr6ury4osvIpPJkEgkKCkpIZPJKC4uFq2ATCYjLCyM\ntLQ02traePHFF5mamqKyshJbW1usrKwwNDSkoaFBiMQUMXptbW00NDQQHR2Nl5cXLi4u3Lx5k/7+\nfkFz3rx5M15eXjQ3N/PTTz+xdOlS1NXVKSgoICAggK6uLubNm4eGhgbt7e1CTDUyMoKGhgbvvfce\n/v7+XLt2TSRmA7i4uKCkpISWlhaxsbGcO3eO/v5+zp8/j4WFBYaGhtTU1GBra8ulS5fo6OhAQ0OD\n+vp67O3tuXPnDgsWLEAmk/Hll1+KNjYgIEAY9yoqKigsLMTc3JyMjAx+/fVXsrOzGR8fJzExUbhh\nHRwcaG9vJzQ0lKKiIl544QVqa2vJysoSlmwF2yIwMJDt27cTEBDAuXPnUFVVJSYmhoqKCmGVV5jV\nPD09OXv2LE5OThgaGgLw1VdfMWvWLOEW/S3nd6FTUCDCZsyYQW1tLTdu3KCoqIisrCwGBgYICgpi\ncnKS69evU1xczKxZs/jqq68wNzenu7ubWbNmCbLMs2fP6Ox8HmuppaVFVlYWYWFhxMTECEXZnTt3\ncHV1ZWRkhJSUFEZGRgQg1MvLCz09PUHADQ8PZ+vWrWhqagpk2IIFC3j48CHj4+NcvHgRLy8vBgcH\nUVdXp7+/H0dHR1paWjAz+//Ye9OoKM90X/8q5hlkKGYoQAYREBABB1CUGDXOikOiSZziEG0TTTqJ\nSfeOaU3SRpNoEo1p00aNAziLRkURBBFknieRuZCpmOeh6nyw6zl7n/VfvXPWOh+613+/n1woFBa8\nz/s89/27r8taCF8cHBy4c+cOQ0ND4kypRterKT7u7u64u7uzd+9eNDQ0xJSbk5MT77//PvBizLyt\nrU2g5Xp6eigtLcXT01M84Xbv3k1XVxeHDx9mz549DA4OMjQ0xJ49exgaGsLa2hoDAwPWrFkjzEFS\nqRQ7OzuWLFkiosZJSUnk5uYyefJkzMzMKC4u5v79++jo6AgoSWRkpKBr9/T0iPiteoxdzY1Uh24s\nLS25dOkSkydPFpKaGTNmiPDS2bNnGRoawsHBgY0bN3Lr1i1SU1PR1dWlra0NLy8vbG1tSUlJEfHn\nwsJCFi5cSFFREZqamvj4+ODg4CAyDPCCp5CdnU1RUREHDx5k9erV1NXVsXXrVhYsWEB0dDQ7duwQ\nk699fX3U1NSIomtzczMDAwN8+umnvPnmm6SnvzAhXrlyhaamJry9vZk+fTrr1q0Ti/obb7whPBDN\nzc3iiFRWVsb69evR19fns88+Y9++fQwMDJCUlCTqXh0dHdja2nLq1CmMjIxISkpi1qxZBAUF8d13\n31FSUkJYWJioQZmamuLi4sK6desICgpiwYIFODg4sHDhQnR0dISO7vdc/xLHh1OnTn3q5uZGbW0t\no6Oj+Pn5oVKpmD17tqh2q4Uk6rl7XV1dUQgzNDRk+fLlJCYmMm/ePB48eCBafn/605+4c+cOCQkJ\n+Pn5sW3bNnJzcxkaGkJPTw8vLy/Onj1LUFAQeXl5xMXFkZKSwuDgIG+//TYymQw/Pz9kMhm7du2i\nu7ub8PBw7OzsqKurExBYdUuqs7OTrKwsEfp55ZVXxHHl/v37YnKtq6sLFxcXcnJy2LRpE2fOnGHs\n2LHo6+vz0ksvUVNTg7GxMTKZjKysLAHgiIuLw9ramu7ubmJiYpDJZHh4eKCpqYmhoSFZWVmMjIyg\nUCgICAgQKbicnBzRcq2rq8Pc3JzCwkIcHR15+vQpUqkUTU1NMWqblZVFSEgIBgYGxMbGolKpsLOz\nw8vLi3v37glDUX5+Ps3NzcK7kZ6eTlNTE62trfT19dHV1SWKe1KplPz8fA4dOiRafUqlUnQy1AAc\npVIp8Orq7fNrr71GTk4OhoaGODs7CyaBetelUCioqKggIyODjo4OkpOTmTJlCg4ODty4cYMvv/xS\nzDSoqVFdXV1C+acmb1VVVTE0NCSYC42NjVy8eJHNmzeTk5PD5s2bOX36NGvWrCE6Opr169eLxScp\nKYmuri6+//57TExMcHV1RVdXF11dXS5dusS7777L06dP8fX1JSIiAqlUyqVLlwRG0MrKSigL1IEo\nfX194bjo7Ozk7NmzvPvuu3R3dyORSLCyshIj635+ftjb2wtadGNjo5hZsbS05MGDB/8+xwd4AZWY\nMWMGiYmJZGZm4u7uTkFBgZgBmDVrFt7e3piYmHD79m0KCwtRKBS8+eabZGZmUl5ejoaGhnjC//DD\nD8ycOZMHDx5gZmaGt7c3cXFxlJeXExwcjEqlIjg4mBMnTnD8+HGGh4e5desW06ZNIzU1FWtra8Ee\nuHHjBiYmJqxfv56//OUv3Lhxg6GhIbZt24ZSqRRRZRMTE7q7uwVv0tDQkLq6Oq5cucLjx4+JjIxE\nqVSKrbqFhYXwVwJ0dXVx9+5dnJ2dmT59Oo6OjsTExLBmzRqys7MxMTHB3Nycy5cvY21tTUhICDk5\nOSKH8ejRI2xtbSkqKiI8PFzwG9XIuYqKCiwsLHB3d8fBwYH29nbOnTvHjBkz+O233zA0NBTKtEmT\nJjFr1iwOHDggYLfqpKh6u2tqaoqOjg4TJkwgLS2NU6dOoVKpGBoaYsKECZiamnLr1i1mzZrFli1b\naGlpoaurC6VSyZ49e/Dz80MikdDb24uuri7wgnuhr6+Ph4cHIyMjtLe3M3v2bEGytrS0xMbGRiRM\nVSoVXl5e+Pn5UVZWxsqVKwVf8+WXX+aLL74AEIVINf+irq6Ouro6urq6iI6OFn5Le3t76uvr2bx5\ns3iYLFy4EGNjY7S1tUX7cPbs2QAcOXKEtWvXolQqWbRoEffu3cPU1JSenh7mzJmDgYEBY8aM4bvv\nviMnJ4cbN26IYTapVCq+bvU/JLXqoanw8HAhh01ISMDa2poLFy6gUqkoKioSUhulUsnjx4/p6ekh\nLi4OFxcXTExMePLkCUZGRpSUlAjf6e+9/iV2Cl999dWnH3/8MWlpafj6+rJ582b6+vqEyHX37t3Y\n29sL0eq0adN4/PixeGpoa2uLPntVVRWFhYWCRVhQUEBYWBjJycloa2sLgq+awZeZmcnEiRNJSkqi\nrq4OR0dHjh07hoaGBs3NzYSFhaFSqbC3t8fQ0JArV67w0UcfMTg4iLu7O62trRgaGoq2Yn5+PnZ2\ndly8eFFYmJYsWYKFhQWFhYXExsYya9YsQc9RE6USExPZuHEjPj4+ODk5iUEeS0tL5HI5ERERgleg\nnvNXqVQiG6HuMDQ2NlJbWyvQ7c3NzYKArSY4ubm5iaKqujipjkSrz9Wenp6MjIzw+PFjjIyMeP78\nOX5+fty7dw8DAwNu3bolzMtq7bk6QNTY2MicOXMoLS0VpqM7d+6QmJjIpk2bkMvlok7g5eXF1KlT\nSUhIIC8vj4MHD9LQ0EBxcTETJ05keHhY1GXmzp3L6OgoJiYmnD59GoVCgZ+fnzhDW1paCjiqWsii\ntoK7urqiVCoZHh7m5MmTpKeni4lRNTeyq6sLf39/ka1QS2jV6Lri4mLhpti/fz+5ublcvXoVHR0d\n0Vlat24d3t7ebN++naSkJIHNHxoaEmr7Q4cOsX79en777TeWLVvGjRs3sLW1JS8vj5ycHPLz89mz\nZw+pqak4OTmJmRq1zu6VV15BIpFQW1vLzZs3yc7OpqWlheHhYUxNTSkvL8fd3R1LS0v8/f3p7u7G\nwsKCGzdu/L+BrEgkEkeJRJIgkUiKJRJJkUQi2fmPj38qkUjk/4eJWv05H0kkkgqJRFImkUhe/m+/\nCQ0Nent7yc/Pp6WlhStXriCXy4Vlp6ysjLi4ONzc3PDw8ODYsWNERkYilUrR09MTvPyCggLCw8MZ\nHX0hubaysmJwcJD29nYUCgXu7u6Ympqyfft2bt26RWZmJlu3biU5OVmctffv34+fnx/Lly/HwcFB\nkIAKCwtpb28HXpz37O3taW1txdjYmCdPnnD9+nUxfq2hoUFnZydubm5cuHCB+vp6vLy8CAwMxNfX\nV7S3Hjx4QFtbm3BJOjg4oFAo0NbWFi2o/Px89PX1uXz5Mo2NjVRWVpKQkICenh4KheK/kJOmTp3K\n2LFjWbRoEd7e3nh4eODr64uPjw8VFRXU1NQQFhaGoaEhK1eupK2tjdDQULKysggLC8PBwYHw8HBh\nWrp06ZLIXTg4OCCXy9m5cycRERFMmjSJRYsWERISwsOHD8WAV0NDA6+++ird3d0MDAwwZswY2tvb\nMTQ0ZMKECXR0dFBUVAQgho6ampro6OgAEIg89cI3Y8YMEfgyMDBAKpWSlZVFZWUl4eHheHh4YGNj\nI87j9vb2jI6O4u/vT2ZmJsbGxsALtkZUVBRNTU1ERkYyY8YMdu7ciY2NDUVFRRgbG7Nx40axiykr\nK+Pq1at89tlngnJ0//59oZpXk70/+ugjbty4gbGxMZMmTRI38sjICGvWrEEulxMdHU12djbx8fEk\nJyczdepU+vv7sba2pr29nT/+8Y9oaGjg4eGBt7c38AI8tGnTJmpqapgwYQLZ2dkYGhryzTff0NXV\nxfPnzxkZGSEtLQ0bGxt8fX2ZMWMGCoWC0dFRMjIyRHhudHRULOC/5/o9hcYRYLdKpfIGQoG3JRKJ\n9z/+7pv/bKIG+MffrQLGA3OAoxKJRPOfvYCadGtra8uFCxdYvHgxAwMDomNgaGjI1KlTGR0dJS0t\njd7eXkpLS4X70N7eHpVKhZaWFunp6ULS6eTkxIcffiiKad988w2Wlpbcu3ePkydP8v7772NtbU1x\ncbFAi23bto3169djaGhIdXU1M2bMwMXFhWPHjok31szMTJzhk5OThbFHS0tL8AOmT5+OpqYmgYGB\nWFtbo6enx6RJk4iIiKCpqYmDBw+ycOFCpk2bJvruEokEmUzGw4cPycvLQyaT0d7eTllZmchsODk5\nsWDBArq7uykvL+fRo0cYGxuzdetWOjs7SUlJ4enTpzx69EiITNVSkEmTJondQ2trK8uWLcPHx4e3\n3nqLV199lZ6eHmxtbcnIyEBfX59x48YxOjpKQEAA7u7uODs7k5GRIbBfMTExpKWlMX36dC5dukRp\naSm7du1CW1sbOzs7li9fzrhx40SnAl4sBDNnzmTZsmV88803lJSUiIEmQCQ1MzIy6O/vF0BdqVTK\n8+fPaWtro7e3Fy8vL3p6eoSUVW38Vp+pq6qqRIwbYP78+aSlpQkh7dy5c7l37x6tra3s3buXpUuX\n8uGHH6KhocE333yDtrY21tbW7N69Wxif5syZIzBrVlZWAOzYsYP6+npcXFw4cuQI/f39TJ48mdHR\nUZ4+fcqvv/6Kj48PGRkZvPnmm1RWVhIQEEBjYyMrVqzg+vXrHDhwAAcHB6qrq4V9+/Dhw5w6dYqA\ngAC2bdsmOlrnz5/n9OnT3L59m/Lycr755hvs7OwwNzdneHiY+/fvY2RkJDSBxcXFgunxe6/fo6J/\nrlKpsv/x526gBLD/J5+yCLigUqkGVSpVFVABBP+z1+jt7WX8+PH09/fj7+/P6OgoH3/8MTKZjIKC\nAmprawWe28rKCgsLCxobGzEyMkJfX5+zZ88SEBAgbFJ1dXXAC5dkR0eHyH2vW7eOY8eOERQUxMOH\nD0lNTeX58+f09vbyww8/YG5uzsjIiKA+u7u7c/LkSby8vNi+fbsAt9bW1opqeFhYGMePHxdpyoGB\nAXJzc0X+QK0XKy4uprq6GhsbGzQ1NQkODhZHHPUEm7qD4eTkxOLFixkcHGT+/PlcuHCBS5cuCR5j\nb28vq1evxtvbGzc3N5FA3Lt3LxEREYSEhODh4UFLSwuDg4Pi67q4uGBlZSWEsHV1dXzwwQeii9LU\n1ERZWRnz5s2juroaS0tLBgYG+OSTT9DQ0ODgwYOMGTOGq1evUlJSgqOjo9iFPXv2DJVKhVwux8/P\nD7lcTmVlJZ2dnWJxefnll+ns7BRpTXVsvaysTPwuqIuerq6uBAUFiYxKamqqkLw6ODiwbt06VqxY\nwdixY1m1ahU9PT3U19fT19dHcHCwmG5VJwxzcnJEq1TdIs7Ly8PIyAiZTEZ0dDSffPIJenp6aGho\nsGTJErZu3SpAtUVFRdy5cwcXFxdaWlrEg2fnzp2oVCo6Ozu5cOECfX19Iirt6OgoRDwymYzR0VF2\n7drF2rVrMTMz4+jRozg5OQlLVmRkpGhbbt26VRixtbW1GTduHG+88QZKpZL58+dTW1tLXV0dpaWl\nuLi4CB6DelbC09OT69ev4+HhIWoOv/f6v2pJSiQSGRAAPPnHh3ZIJJJ8iUTyd4lEMuYfH7MH6v7T\np9XzzxcRtLW12bZtGzNnzmT37t309fWxf/9+BgYGWLdunejxVlVVceLECcLDw5k1axbd3d3Ex8dj\nZWVFeXk5BgYG+Pv74+fnByDAljt37uTVV1/l/PnzfPbZZ1y7do2SkhJSU1M5deoU06dPFxbp/v5+\nwb5zcnLigw8+IDMzk5CQEFasWPHiTdPQwNTUVAAxt23bJjyFycnJ2NjYEBERgbW1NRs3bkQul6Oj\no4OmpiYdHR34+PgACNaeuoBXUlJCQkICo6OjaGpqoqWlxbNnz/j222+JiIhg0aJFZGVlERMTw8aN\nGzE0NERTU5OzZ89iamrKF198gYeHB9evX+fJkyd0d3fT1NTEw4cPMTU1FQW2tLQ0dHV1qa2tJSoq\nStii1KPZXV1d2NjYCKBLaGgoqampLFu2DKVSiampKYsXL6azs1MwKpydnenp6eG7774T3MOmpiZi\nY2Opr6/H1dWV0dFRent76e3t5dChQ1RUVLBs2TIWL17Mxo0bAZg9e7ZwPTY2NpKZmYmXl5cIBKmr\n8Eqlkp07d+Li4sK0adPUYFJxvr59+zY2Njbs2LEDeLHYlJWV0dvbS1tbG/Pnz8ff3x8zMzPmzZuH\nkZGRcFhKJBJxBLW2tsbW1paEhATeeustSktLMTY2JiUlBYCNGzcyb9484uLi+POf/4xUKmX//v3E\nx8dz6NAhBgYGmD59Oj4+PoIlqa+vT01NjWithoeHc+7cOW7dukV5eTnwgs9pbGzM2rVr2bRpE8PD\nw5iYmDBlyhS2b9+Oi4uLqI/Z2NgIyJB6t9rY2EheXh7Dw8Noamri6+v7u+/z370oSCQSI+Ay8I5K\npeoCjgGugD/wHDj0u1/1xdd7SyKRZEokkszu7m4SEhK4evUq5eXlPHjwABsbG2FTUod2srKyuHPn\njoBTTJo0CU1NTRYvXsy8efOYO3cut2/f5q9//SvwApsWFxcn4J+6urq89tprODs7Y21tjVwu5733\n3kNfXx8rKyuxZXVxccHLyws3Nzfxw5NKpeTl5QEvchVWVlbU1dWho6MjtG1dXV3s27ePc+fOoaen\nR2lpKfHx8QwPDzNmzBgaGxtpa2vDzc2NnTt3IpFI8PDwYGBgAICHDx8yefJkJk+eTFpamoCxXrp0\nibCwMAHKGB4e5q233mLy5MkEBwezZ88e7OzssLGx4dNPP2XSpElIJBK6u7vp7OxkzZo1uLi4sGbN\nGkEB2rFjBz///DMWFhZcu3YNPz8/pFIpNjY2NDY2IpFIuHPnDh0dHZiZmVFeXk5sbCx5eXlkZ2dT\nW1vL7Nmz0dXVpaGhAWdnZ7y9vTl06BC6urrCkxgcHEx7e7vAlO/atYuMjAxkMhkSiYRr166hUCg4\ne/YsAIWFhfzwww/CxXn69GmcnJyYO3eumBcJCAjgwoULog5x8+ZNGhsbaWxsxMPDg08++YRJkyah\nVCqFT6Kjo4MVK1Zw7tw5Nm3axMOHD3F2dqauro7ffvuNHTt2sG/fPjo6OjAwMCA6OprCwkKqq6vZ\nv38/rq6uPH78GIVCwdWrV3n69CkA2dnZVFZWEhUVxcaNGykrKxPmLFNTU5YvXy6UAYcOHaKkpITv\nvvuOrKwsBgYGkEgkQqdnY2Mjvu68efOws7PDxMSE/Px8kSBVg3n7+/tZuXIldnZ2gvC1atUqIZ7N\nz8/nwYMHODg48Pbbb/9fGaJ+16IgkUi0ebEgnFWpVFcAVCpVk0qlGlWpVErgb/zvI4IccPxPn+7w\nj4/9l0ulUv2kUqmCVCpVkJWVFYaGhoSGhqJQKHj77bcJCQmhrKyM7Oxsrl69yujoKOPGjeOPf/wj\nsbGxWFhYkJ2dTVhYmJCmSKVSoqKihIr+/PnzzJ07l+LiYiFa8fb25t69e/zpT3/ilVdeISUlRdCR\nw8PD2bBhA66urnh7eyOTyRg/frwwGOXm5qrfD6ZNm8bSpUtxcXEhPT2dmpoaIU5pbm7m0aNHQgBa\nWlrKpEmThOpNqVSSnp5OT0+PKF7BC3hLd3e3EIKqI6qOjo64ubnx0UcfkZycTFpaGt3d3WhpadHX\n14ezszMFBQWkpqby9ttv8/333wvvwrx580hISODrr7/G19dXDC9t2rQJR0dH8vPz8fHxETMDo6Oj\n9PT08Ouvv6Kjo4Ovry9tbW3MnDmTmJgYnJ2dMTAwEGPjUVFRYpzX2tqauro6bt68yaVLl4iNjRX1\nljFjxvDw4UMqKz9vMTMAACAASURBVCvFL6h6mKmhoUGAWxMTE3F3dxdejF27dol2skQiEQLhiooK\nXF1daWpqwtzcnICAAACio6P585//jJ6eHsHBwSIpOW/ePPLz81m1ahXa2tqYmZnh6Ogopmijo6OR\nSqVoa2uTnp5OUVERR44c4erVq0JQHBoayoQJE1i4cKFo8fn4+JCamsqxY8fIyMggMTGRnp4e0e6E\nF92ZnJwcDAwMiI+PJzo6mry8PH799VdsbGx4/fXXBcfRw8MDeFEDyc7OJi8vj1mzZon358SJExgb\nG1NeXk5MTAzHjh1DKpXy9ddfExMTg5ubmzCqq+PVDx8+/H+7U5C8yEf+DJSoVKqv/9PHbf/TP1sC\nqIe1bwCrJBKJrkQicQHcgfR/9hoqlQqZTMZLL72Enp4e169fp6SkBE9PT4H+bmlpET/kl156iTlz\n5ohoqbq6bmhoyK1bt0SXoL6+HplMxsqVKwX919fXl9mzZ/PBBx8QFxdHbW0tkyZNwtbWlpqaGnET\nm5qaCrdlU1OTsFED2NnZ8cMPP9De3k5fXx8+Pj7Y2tpSUlKCUqmku7tbeB+uX7+Ou7s7crkcCwsL\nKioqkMlkFBcXExUVxbx580TqTj2w09HRIZgA6t3Gt99+S0pKCoGBgcyaNUvMQMjlcjQ0NCguLqav\nr4/c3FxmzpzJihUr0NPTIz09nSVLloi5gP7+flasWEFhYSGenp6oVCpyc3PJz8/n+++/F2k8Hx8f\nKisr0dTUxM7ODm1tbfbu3UtnZydRUVHiiRgTE4OhoSGXLl3iT3/6E8XFxTx79ox58+ZhYGCAs7Mz\nKSkp9PT0iI7G7NmzWb9+PW1tbeIIpj7yffTRR8yePZuenh7Gjh3LkSNHqKys5KWXXmJ0dBSlUsng\n4CAODg74+PhgaGjIihUrsLa2pqioiLa2Ni5cuMDFixepqakRvz+AQMAZGhpiYGAg+IfGxsaMjo4K\nzsLz58+ZOXMmfn5+bNq0SeRjbty4wcOHD2lvbxe2MJVKxZYtW7C3t8fR0ZGQkBA6Ozv5+OOPiYyM\npLW1laioKHR0dIQlXD145unpyZgxYwTD4csvvxThvJSUFNEOP3z4sOCSLl68GG9vbzZt2oSlpaVw\nmVRVVfHuu+9y6tQp7t69y/Tp03F2dsbGxobe3l4mTZr0393q4vo9O4WpwFpg5v/RfjwgkUgKJBJJ\nPhABvPuPN6kIiAGKgTvA2yqVavSfvYCOjo6wHbe3t/Po0SNycnLo6Ohg6dKlyGQyfH19hRzj1q1b\nNDQ0sH//fhQKBR0dHahUKhITEzE1NRUG6aamJuRyOTk5OSQkJAhq7927d3n69CmampoolUqKior4\n+eefuX79Ort37yYrKwupVMqzZ88ICgoSpiH1ea+wsJDIyEg0NDS4dOkSf/zjH0XRTyaToaGhga6u\nLkqlErlczsmTJykuLubYsWM4ODiQmZmJoaGhsDOrsyLjx4/n6dOnvPPOOxw4cICuri7MzMxYuHAh\ngYGBpKam0tbWRnd3N11dXRQVFeHv709VVRWLFy/G3Nwce3t7se3s7u6mvb2drKwsXnnlFWpqaoT/\nwcfHBxsbG54/f05AQABhYWHU1NTwl7/8hYCAAIaHh8XAUW9vL3p6ehgYGODp6UlraytXrlwRU4Cd\nnZ288sorhISEoFKpRKjrhx9+YNu2bUyZMoXu7m4WLlyIq6srBgYG7N27FxMTE0pLSxkaGuLVV18F\nXmzH33nnHTw8PLC3t6egoAALCwvMzMwwNDTEw8ODjIwMgZR7+eWX6e/vF8yL4OBgxo8fT2RkpJDG\nwIvjQ3d3N6GhocydO5fKykpkMhmTJ08mJCSEhoYGxo0bx5w5c1i0aJFYRPfv38+ZM2eIiopiw4YN\nbNiwQZjLAGGAOnz4sOiahISEkJiYSHNzM8XFxRw9epTU1FS2bNnCnDlz2LVrF8eOHWPJkiVCLxcQ\nEMDg4KBA/unq6pKYmIinp6eIbdva2mJpaclnn33GrFmzGD9+PEqlUigOOjo6iIyMxMjIiObmZmxs\nbLhz5w6nT5/+Hbf5/77+20SjSqV6BPx/TVP89k8+Zz+w//d+E93d3ZSVleHo6Mj8+fNxcXHh119/\nFSOf6sGa2NhYvLy8RIvn008/FQLVuro6mpqaCAgI4Pbt28CLX5hHjx6JKbqSkhJGR0fp6OjAxsZG\nCGbT09OFki4yMpKCggIqKio4efIkfX19BAUF0d7eLnYga9eu5dKlS2hqahIWFsbhw4dxd3fn8uXL\nxMTEiAUjIiKC0tJSXnvtNRQKBRMnTqS3t5f6+nrhNXRzc2PevHmkp6eLfrOZmRl5eXnCuq2joyNy\nALa2tiIpuGHDBtF6u3HjBhYWFty6dUsk73R0dOjp6cHPz4/Ozk4WLFjApUuXOH/+PF9++SUWFhYE\nBATw6NEjgoODcXV1JT09XajjtLS06OnpYeHCheTl5YmMgY+PjyBFqWGu9fX17NixA5lMhoGBAWfP\nnsXExERAbx0cHOjo6EAul6OpqYmnpyc1NTW88cYbNDc3iyd6RESEiKh3dXVRX18vWBbR0dGUlpay\nfPlytLW1GR0dFbzKy5cvMzw8zOzZs6msrBRpvhUrVvDLL79gZ2dHS0sLJiYm7Nixg59++okvvviC\n7u5u0tPTsbW15YsvvmDJkiUcOnSIqKgoGhsbhVy3qKiIlJQU8vPz8fDwEM6P0dFRzMzMOHXqFJmZ\nmcyZM4eSkhJMTU1ZsGAB7e3thIaG8vrrryOXy8VA2qpVq7h9+zYqlYqcnBxCQ0NF90d97dq1i+PH\nj+Pn58edO3dob2/HwMCAl19+mdraWiQSCT4+PtTU1Ihdoxo6ExUVxY0bN/D29mblypXcvXv3996O\n/xqJxuPHj3/q5+dHc3Mznp6epKWl4eTkhJ6eHnl5eYIT4Ovry08//YSNjQ02NjZMnDgRLy8vPv/8\nc9asWYOHhwfGxsZUVlaSmZnJtm3baGpqEkr48ePHY2Jigra2NqtWrSIgIIDr16+zadMm+vv7MTIy\noqenBwMDAxEuUa/2paWlTJs2jTNnzjBv3jxSU1PFzMPUqVOxtrbGxMSE2bNnCxJSe3s7EomEwMBA\nHBwc8PDwIDo6WlCk1GlIa2trLl68SHBwMLq6uuTl5REZGYmbmxtubm4MDw/T3NzM0NAQUVFRYkBo\n7NixuLq6cvnyZSHKUWv1ampqyM7OZtasWcjlcmxsbPj5559ZtWoVEomE+fPn8+zZMyoqKhg3bhy6\nurqYm5sLw3dDQwPjx4+npaVFwFmGhoZwcXGhpKSEa9euiSeXtrY2zs7OpKenEx8fT29vL0lJSQQF\nBWFgYEBHRwdJSUm0tbUJuKmxsTGBgYFcvnwZLS0tbG1tuX79OlOnTkUmk9HQ0EBBQYHQofX29mJi\nYoKbmxsHDx5EoVAwdepU8vLyhMGrsbGRhoYGtLW1kclkdHV1YWJiwuXLl3F2dubdd98lOTkZOzs7\nGhsbefr0KQkJCXR1ddHU1ERUVJTYuajnbUxMTFi+fDnl5eUUFBTw1ltvkZmZiZmZmehmffvtt+zY\nsYP29nbhHdHX1+f8+fNoaWlRX1+PUqnEw8ODlJQUBgYGiI6OZtGiRXR1dREQEMCcOXN4+vSpYGbo\n6Ohw8uRJAgMDGT9+PE1NTYJgpampycWLF6mrq8PS0hJNTU3x/jc2NmJhYYGhoaGI2hcUFPDSSy9x\n+vTpfx+ewldfffVpUVERUVFR5OfnM2PGDJRKpdCLx8fHExgYSH5+PuHh4WhqajJz5kyuXLmCl5cX\nWVlZuLq68uTJE0pLS1m4cCFnzpzBwsICiUSCra0tjo6O7N27l8jISKysrOjr6yMiIgJ7e3u8vb1p\na2sjLy+P0tJSKioqmDZtGn/961+ZNGkSlpaW2NnZcfv2bfLz8wkNDeWNN97Ay8uLO3fu4OjoKLRd\n/f394pffxcWFJUuWkJycLH5IS5cuRUNDQ3gxv//+e9ra2igsLOSdd94hPj5eDESpWQHGxsY0NDTQ\n1dXFuXPnGDduHJ6enhQXF2NiYkJCQoKYUhwYGMDZ2Zni4mLmz59PTU0N5eXldHV1ERISQn19Pb29\nvTx+/FiMgz948ECMYu/atYvZs2dz7do1xowZI5Klo6OjrFmzhsrKSgYHB9HT06O5uZm5c+eSl5cn\nbqQFCxZw584dNDQ0yM/PJycnBwsLC7y8vIiIiGBgYAAfHx86OjpIS0sT2HsLCwt+++03Zs+eLexO\nalGveqhszJgxuLq6EhoayvPnz8nJycHT05O2tjby8/Npb29n2bJlVFVVoampyejoKI2NjSQnJ+Pg\n4CCizupC7sSJE3F3dxcV/8OHDyOVSnFycqK+vp4tW7aILX1JSQn+/v709/cLR+VPP/2Ep6cn06dP\nJyYmhr6+PvFz3LZtGxEREVRUVPDGG2+IduqMGTMoLy9n5syZ6Orq0tLSQkVFhZDl+Pn58fe//11w\nP9rb2+nt7UWpVJKWloZCoaCpqUkkLPX09KisrGRgYEDE5xUKheArjB07ljFjxhAbG0tubu7vWhT+\nh+b8P9f/XP//uf59aM4uLi588sknaGlp4eHhQWFhIWPGjKG8vFy4DdUhmC1btggclVoU4+zszJUr\nV8TuwsrKivfee4933nlHQFDVPe6NGzdy5MgRYa1etGgRfX19WFlZ0d7eLoSpHR0dODo6ijy7m5sb\nSUlJ/Md//Aeff/65oBjdvHkTX19fnJ2d8fPz4+nTp1hbW1NbWyuYAmoSsqOjo1CwL168mNbWVpKS\nkpg2bRqvv/46X331ldDAqcNFGzZsoKSkhPv37wvHpTpNqaGhgZWVFceOHUNPTw9fX1/Mzc3p6Oig\nvr6etWvXiieNpaUl3377LYsXL8bExARDQ0Oys7NFAU+dglMHX2JiYtDV1UUmkzE8PEx9fT0GBgZU\nV1eTlJTEqlWrMDMzw9jYGKVSSXt7Oz4+PtTV1aGtrU1tbS3BwcGUlpZiYWFBfn4+2tra6OnpCWCr\n2ndZVFQkzuU///wz7u7udHZ2EhcXR0hICNnZ2SxYsIDTp0/T2dnJlClTyM/PZ/LkyVy8eJF3332X\n2tpasfsyNTUVPf/S0lL27dvHX//6V/z8/Dh16hQ+Pj4CRHvw4EHee+89Ibvx9fUV3Mnjx4/j7e1N\nSEgIqampos7R09Mj5mSOHDmClZWVyLoUFhaSkZHBhQsX2LlzJ0ePHmXPnj2itrN+/XqOHTvGW2+9\nxZ49e1AqlWLM3cDAAFtbW6ZOncqmTZu4efMmkyZNYu3atTx48EBE8v39/enq6kKlUjFlyhSqq6tp\namoS8fP09HSOHz/OuXPnRNr0vffeE92z/+76l4CsqA1R9vb2fP3113h5eVFdXU1PT49I+fX09LB+\n/XouX76Mh4cHwcHB5OTkoKWlxaVLl7C0tGTy5MlisApejGM7Ojoybtw4Fi9ejJ+fH9HR0aL3vnDh\nQiorK2lubhbFzGfPnhEVFcWuXbvIysoSHYBff/2VqVOnAi9izup25saNG8nIyMDU1JTKykoSExO5\nd+8eQUFBQnn+5ptv0tvbS3NzM2vWrMHKyorc3FxcXV159913OXnyJPAilFRSUkJISAiBgYEolUqO\nHTuGvb09Li4u7Nu3Dx0dHR48eMDIyAglJSUUFRXh5OTEwoULkclk6OrqUlBQgKOjIxoaGmIcG+D7\n778X51YjIyMGBweZOHEioaGhjB07lubmZpqbm2loaGDRokW0tbVRVFTE5MmT8ff3Z2RkhMzMTGbO\nnImDgwPm5uYoFAru37/PpEmTkMlkZGRkoFQqxVFG3RY2MzMT38+xY8fIzs5GoVDwyiuvEBoaipeX\nFwAXLlwgMTFRtBizsrLYt2+fCKn5+/ujr69PW1sbMTExbN26VbTmjI2NsbOzIzMzk/3796Ovry86\nUVFRUeTm5pKenk5ERAQxMTEkJibyl7/8hZGREd58801mz55NXV0dsbGxWFtbs2jRIiIjI7G2tmZ0\ndFQQozds2CDISz4+PqLDU1NTg7m5OePGjWPz5s3Y29uzatUqHB0dcXd3R6FQ8MEHH/D8+XNiYmKY\nMWMG48aNE6zJhoYG0eEyNDQU/yd1G1jNzUxKSmLhwoV0dXXR2tqKlpYWXl5eYubC2dmZM2fOUFlZ\nibGxMcHBwWzZsuV334//EjWFAwcOfOrl5YW1tTU9PT14eXkJxmFbWxvjxo1jcHCQR48e4e3tzcjI\nCFZWViQmJmJgYMC2bdvo6ekRc/xqDsHUqVOZNm0aJiYmtLa2UlxcTENDg8jGt7a20t7ezuDgIFKp\nFA0NDfT19VGpVFRVVeHp6SlIyeopzOTkZMLCwrCwsBC+BT8/P4qLi3n06BEffvghfn5+VFRUUFlZ\nyb1793j77bfR1NSkv79faOHVicGDBw/yhz/8gWvXrhEZGYmDg4NIu0VGRgrIzMyZMwkPDwcgNDRU\nGIZ1dHRYvHix6LLk5OSI4FJQUBDx8fFYWlqSlJRESEgInp6e3L9/H11dXWbMmIGVlRVjx46loaGB\nwsJCAgMDhcciPDyczs5OYmJiCA0N5dSpUxw5cgQ3NzdiYmLw9/ensLBQ1COysrJEF2FgYIDS0lLc\n3d2prKxkzJgxbN++Xbgu1f344uJienp6mDdvHidOnGDp0qUsWLCAp0+fEhQUxKNHj8TTuaysjKKi\nIkxMTAgLC6OtrQ2FQsHg4CDe3t50dXXR1tYmblxXV1daWlo4ffo0EydOJDIyEjMzM3Jycpg7dy5S\nqRQLCwtqa2spLi4W3pCPP/5YJE/PnTtHZWUlkZGRgoOpUqlIS0sjNzcXBwcHDAwMsLKyoqioCHt7\ne2xtbWloaMDX1xe5XI4aSuzn50dLSwtlZWXcvHlTDE0ZGBjg6uqKtbU1+fn5pKSk4OjoiEQiobCw\nkJUrV/Lbb7+JxVBTUxMdHR0CAgJ49uwZxsbGJCcnU1ZWhlQqFUwLHx8fjh49yrx585BKpTx8+PDf\np9B44sSJTzs6OggPDxfIrpaWFgG7UGvcz549i5mZGXPnzqWiogJLS0s+/vhjpk+fjqmpKRkZGaSn\np6Ojo8OtW7f44YcfSE5OFj+kgYEBlixZwrfffou3tzeFhYWEh4czMDDAlStXMDAwYOrUqaI37uLi\nQkFBAdbW1nR2djJt2jR++uknfHx8cHR0RFtbm/b2dhYtWkR8fDzvvvsuzc3NxMXFoauri5eXl/BJ\n9vf3iy3t1KlTaW9vp6Ojg0WLFolWl6enp+hoeHt7k5WVxUsvvcTTp09RKpWiAp2WlkZra6sYMZbL\n5aJwFRkZiZeXF52dnRgZGdHW1iYcCXV1dTx79owpU6bQ3NwsXqurq4vLly8za9YsKisrCQ0NRaVS\ncfDgQWJjYykvL8fPz4/du3fzww8/YGZmxpgxY9DS0sLFxYWuri5RaFy3bh19fX24uroKxPj8+fMZ\nHBzk2bNn3L17V4yVj46OMmHCBOLi4qiuriY9PZ1Zs2aRl5eHt7c3d+7cwcfHh6tXr2JjY8PLL7+M\npaUlaWlplJWVie20OscQERHBkydPCA0N5eLFiygUCuRyOXFxcezevZu//e1v9PX1UVFRgZubG35+\nfty6dQsbGxv09fUpKCigu7ubGTNmoK61zZw5U0yuGhsbs2TJEu7fv4+Pjw9xcXHs27ePvr4+xo0b\nJ3ihT548EbmDpUuXcu/ePaZNm0ZJSQl9fX38/PPP6OnpYWlpya1bt3j77bd58OABCxcu5ObNmxQU\nFHDs2DF6e3sxNzfn+fPnhIaGIpfLqaqqEqzNhIQE3Nzc8PT0xMjIiL6+PjGElZycTHt7O+PGjaOh\noYHm5ubfXWj8lzg+KJVKsc27ePEi586dY+3atdja2tLa2kpZWRk7d+5k6tSpjBs3Ttx8enp6REdH\nc/jwYRYuXEhNTQ0eHh4i2HL06FHGjx/PkydPKC8vJzAwkIKCAjZs2EBpaSkrV64UmYXw8HDy8/O5\nevUq06dPx9LSktbWVlxdXent7RX9eEC4G6KiorCwsBDn/ZMnT5KTk8PSpUtRKpUivqxuKy1fvpwP\nP/yQ3NxcHj16hJeXF8XFxQIl/sYbb9Da2sr69espKCigvr6eoqIient76enpITU1FRcXF2QyGf7+\n/vj6+nLv3j1Rge7v70ehUJCeno5CoaCxsZHJkydTU1NDS0sLJSUlFBcX87e//Q0nJye0tLTo7+/n\n0aNHuLi40NbWxuXLlwWWzsHBgbfeeoulS5dSXl5OVVUVW7ZsYWhoiGfPnonq/qRJk2hra6OxsZGv\nvvoKlUqFpqYmfn5+WFpaUlpaysWLFzl9+jRTpkzB1dWVqqoqJkyYQGxsLLt37xbwWjs7OxwcHAgM\nDKSvrw8LCwu+/vprSktLKSoqQqFQcPToUSIiItiwYQNffPEFUVFR2Nraiph3QUGBOOKoh9hOnz7N\nzJkzcXR0REtLi+HhYfbv34+9vT0KhYIHDx4QGhrKyMgIzc3NXLt2TchkTp06RXV1NWZmZrz//vvY\n29tz6dIlAH766ScsLS3Fccjc3Jz8/Hx+/PFHnJ2dMTc3x9fXl5qaGqqrq0lISBBj6ubm5ixZsoTy\n8nIxwRsVFQVAfHw8JSUlREVF8ezZM3799Vf6+vrQ1tbG09OTpqYmmpqaOH36NA0NDYwdOxYLCwuk\nUimffPIJ7u7uVFVVoa+vj4ODA9bW1r/7fvyX2CkcPXr0U3UKLjQ0lMLCQpKSkujv7yc2NhYNDQ1a\nWlowMzPjyZMnhISECPpyTEwMJSUlLFu2DCsrKwwMDNDW1ubKlStcuHABXV1dJk6cyMDAAJMnTxaY\nqry8PEZHRykqKsLZ2ZmqqipsbW2xsLCgo6MDCwsLHj58SFVVFYODgzg5OVFQUMDdu3dZt24d+fn5\nBAYG8vTpU/r7+xkeHmbDhg1iOtLT0xN9fX36+voAiImJITc3F5lMRn19vUg9qvMBauT7wMAAcrmc\n0dFRrK2tGTt2LBkZGTg5OeHs7Ex3dzfHjx/HyMiI5ORkfH19MTU1FQk49bmzsLAQDQ0NSktLCQ8P\np7W1lQULFpCens7mzZupr6/n+vXrlJaWMjg4yDfffINEIiEsLAyJRML+/fuZOnUqzs7OAsCqp6dH\na2srcrmcvLw8DA0N8fb2ZsOGDUgkEuE1GBkZEWZl9Zx/XV0dS5YsoaGhgY0bN9LT08OuXbtQKpW4\nuLhgZGTE5cuXMTU1JSoqCoVCwdy5c8XOwt/fX7QKExMTRXKvtrZWgFJsbW2Jj48nMzMTmUyGVCpl\nZGSE48ePExERweDgIP7+/qJtrWYoAuzdu1cUU9XHifXr17Nz504mTJiAvb09xsbGSCQSpFIpLi4u\nJCQkEBUVxdWrV5k1a5YwjW3fvp3g4GAxUKetrc3jx49FO9bY2JjNmzeTmZnJkydPRGDN3d2dmJgY\nMQD1/vvvc+/ePezs7Ni1axfx8fH4+PjQ0NBAREQE9fX1YnH+7bffmD59Ovr6+kyZMgVjY2OcnZ2R\nSqVIJBIMDAy4e/fuv89OYXBwkLKyMqysrMTQ0vbt27GysmL27NlIpVK8vb2JiorC3d2d0NBQ+vv7\n+eWXX2hpaRGBG11dXR49eiTw1jk5OSiVSkZGRoR7QQ0j1dfXZ/z48bzyyisCjdbe3k5wcLAIAA0P\nD1NQUMDChQuJi4sTwyrqQEp0dDR1dXW0traio6NDbW2t8AloaGiIpF9OTg4mJiZ4e3uzdetWwc1T\nT9OpV3EfHx+CgoIE2VdTU5MbN26gpaXF559/joWFBb/88gsBAQE4Ozvj7OyMrq4u58+fp7e3l+Dg\nYBobG+nq6hLBKScnJ1JSUsjKyuLzzz9nwoQJ3L17l46ODkJDQ5FKpcLNGBoaikQiITMzk4iICBob\nGzl+/DjW1tbCvNzX10dVVRVjx47FzMyMM2fO8Ic//EGIcNTAUR8fH/r7+7l16xZdXV28/vrrjIyM\nsGXLFoqLi7l16xYzZ85EQ0OD7OxsMjIyAPjss8/E8NPw8DCvvfYao6OjpKSkcPbsWcF1bG1t5ddf\nf0VXV5eLFy/i6upKfn6+iFw/e/aM7u5uPv/8cwCRb1DPoOTm5nLs2DEBffnyyy/JyckhPj6e1tZW\nKisr6ejoYNeuXQQFBQmb0+rVq5HL5YKQpFQqWblyJcHBwaSnp7N+/XoaGxvJycnh6NGjdHZ2kpiY\nSFlZGT/++CMqlUrg1J4/f8727duxs7OjtrYWbW1tUcBcu3YtZ8+excbGhilTpnDo0CG0tbVpbm4m\nJCREFCK1tLSoq6ujrKwMY2NjFi1ahJaWFjk5Ofz5z3/mb3/7G93d3fT39//u+/FfYlFQx1WvX79O\ncnIyM2bMwNDQEJlMxvXr13FycsLMzIxDhw7h6+tLamoqAQEBItW2bds2JBIJnp6eHDlyBA2NF/+t\nuro6hoaGOHPmDFZWVly/fp26ujouX76Mq6srQ0ND6OjoiPOt+lymvqkmTpzI66+/zs6dO5k+fbrI\npRcUFDAwMEBvby+TJ09m8eLFqFQqBgcHiY2N5e9//zvffPMNe/bsobGxUZzB5XI5/v7+rF69muHh\nYfT09Ojo6BBb0a1bt5KWlkZpaSlr167FyMgINzc3pk+fzieffIJCocDDw4OtW7cyduxYgoKC6Orq\nYuLEidjZ2VFZWUljY6OQkKifekZGRiiVSvz8/CgqKqKhoQE9PT0aGhrIzc3lyZMn4vsJCQlBV1eX\npKQkKioqWLduHQEBAQQHB2Nubk55eTlr164lMDCQsrIydu3ahUql4v3332d0dJQLFy4IX4JapHPx\n4kXOnz/PxYsX2bFjB5qamsjlcpqamggODkZHRwelUglAe3s7UqmUhoYGQVAODg5myZIldHd3U11d\njaurK2fOnBF+jgULFlBRUcGtW7doaWkhPj6e1atXExsb+1/mStTFPblcTnp6OidOnBAzJXFxcRgb\nG4vJy8DA5HVTvQAAIABJREFUQCIjI0lOTsbExITOzk4Be+3u7hZRejWY5+rVq5ibm3Pp0iU6OjrE\niPPdu3fR09MjPj6eqKgo5s+fT2trK7W1tbzxxhtoaGjw5z//mebmZs6fP8/9+/cB2LZtG5qamjx6\n9Ijc3Fw8PDwIDQ0lNzdXQFaampo4cOAAH374Ifn5+ZSWlpKVlUViYiIymYyoqCiR7FQHtH7P9S9x\nfDhx4sSnIyMj1NTU0NHRgZGREcePH2f8+PHk5+djZWUlJvf27t3L6tWraWhoQEdHB3d3d3JyctDR\n0SE2Nlbw6G7evElYWBjBwcFUV1cTEhJCUlISNjY2lJSU0NTUJJj5ly9fFscEqVT6XxaIlJQUZDIZ\nnZ2dzJo1ixMnTrBy5UoePnzIyy+/zPPnz6mrq6O8vBw7Ozuqq6tFpLm6uprZs2czNDSEn58fY8aM\nYcGCBUgkEjQ0NNDS0qKmpoZp06Zx9+5dvvvuO/T19dHW1sbR0VFM5H344YecP3+e+fPn4+7uTkVF\nBVlZWaxbt45ffvmFOXPmkJGRgYmJCb6+voKb2N/fT3h4OJaWlgQGBpKYmMjUqVOZO3cuPT09BAUF\niaKtoaEhTk5OjB07loqKCpYvX869e/dE7FjtxrCwsCAzM5Pg4GAeP36MhYUFOjo62NraoqmpyZMn\nT7C1tWXcuHHcu3ePkJAQTE1NhXRHLf0JDAzE1dUVOzs7PDw8KC8vJzU1VRRv1co3TU1N0fZ0cnKi\nu7ub4uJiIiMjBQJ/ZGSE2tpahoeHiY2NZfXq1ZSUlIhE5/nz53Fzc6OlpYX+/n5xZFGpVISFhXH+\n/HkyMjJoaWnh5ZdfJi4uTmQ3Jk6cSHt7uxiCKyoqoq+vDy8vL27fvk1AQACtra1kZ2fj4OAgWAdy\nuZzz588LK9WsWbNYtmwZcrmc6upqOjo6GBoaIiYmBjs7O/z9/dHQ0MDBwUEwOAMDA4XoWG15mjBh\nAo2NjSiVSvG7UldXh1QqJT4+Xiy2paWlguOooaGh7jr9+3Qfvvzyy0+//PJLMUOv3gbW19dTW1vL\n1q1bGR4extXVleHhYVpbW0lOTqa7u5vKykqCgoJIT09nzpw5eHp6ikLRJ598glwuZ3h4GD8/PyIj\nI0lISODOnTu8+uqrODs7iyfU8PAw1tbWosJ/584dioqKmDhxIt3d3QKZdvbsWW7fvk1UVBSWlpZ4\neXlRUFDAmjVrsLa2xtzcnKysLBGvrqysZMWKFeKpnZOTw5o1a9DR0RFPLXNzc+Li4hgYGGDu3Lnc\nuHGD9PR0sb1ua2vD1NSUpKQkrly5IvDn+vr6ZGdnC2SZ2kf4448/8t577wnLkZWV1X9pYR4+fBgD\nAwOmTZsmZjfUtusDBw6QlpZGW1sbSqWSzZs3o1Qq+fHHH0lISMDR0ZHAwEASEhLw9PTk4sWLtLe3\nk5ubS29vLzKZTGj4lEolLS0tLFiwQAyBNTY2Ym9vz3fffceFCxewtrYmISEBAwMDUlJSePXVV1mw\nYAEtLS1MmDCBvLw8SkpKBGPD19eX3t5ecWSUSqXC7dDT08PixYt5+vQpDx48ICgoiMLCQuLi4rCz\ns6O7u5sNGzbw9OlT8X3duHFDyH3/4z/+A21tbbHQS6VSMXylzlwEBwcjl8uFzs/Ozo4333yT4eFh\nIe5pamoSnM7Vq1czMDDA8PAw169fp6ysjNraWo4cOUJnZyc6OjrC36DOwzQ2NvLhhx+iq6vLjRs3\nKC8v59VXXyU9PZ2KigocHByERbu8vFzAbW/fvs2YMWM4f/68MGhLJBIsLS2Jjo5GoVD8+9QUAN55\n5x3mzJmDnZ0drq6u7Nu3Dy8vL/bu3YuOjg5hYWEMDw8jkUgEdry+vh5bW1uBPnd1deXhw4eMGfOC\nDFdQUMCECRMEg/HEiROMjIywfPlyFAoFpqamgg701ltvsWHDBrS1tfnss89ISEgQ3AVLS0vMzc2F\n0uvatWvI5XJSUlIwMTFBJpNhbm5ORUUFO3bsQKFQcODAAY4cOcLAwABHjhwhPT0dPz8/Xn/9dX76\n6SdUKhXPnz+ns7MTExMTADZt2kRsbCxz5sxhwoQJ5Obmsnz5cq5cuSIgHwcOHKCoqIjAwEBKSkpQ\nKBS0tLSIKb5Dhw7x6aefiiq6WkdfWVnJ2LFjef78Oc7OzvT393P37l1ycnLQ1tYWwlRXV1cWL17M\nqlWreO+99yguLuby5cts3LiRiRMnEhISIjDxSUlJ/OEP/6u9Mw+K8sza/u9hEQSatdmUPTSySMvi\nBuKCiriUieMSl4wamcliZb5JnGSSd2KVSypVGbNWUjPZjIYZNRpxSYxK3MUVRWSTvdlkS7MvLdBA\n9/P9AX1/ydQ441R9rzBVfVV10Tw0cnngOX3u+z7nun6Pv78/jY2NNDY2snv3bsrLy4mOjub27dvC\nkyIhIYGYmBiWL18uKpctW7YQGhpKaGiomPf38vLC3d0dhULBp59+iiRJrFy5klOnTqHT6YT7l0nA\nRqVSodfriYmJwcLCgri4OJRKJTt27MDGxkZ08a1cuZKpU6fi4OCAjY0NYWFh7N+/n3v37jE4OMiG\nDRtwdnYmMjKSgIAA9u3bx44dO0SvwJ07d7C0tKS4uFhIxwEsXLiQzZs34+DgwMDAAFZWVnz//fdM\nmzaNWbNm4erqSmlpKZWVlcTExGBra8uYMWNYuHAhcXFx/PrXv2bp0qW0tLQA8NprrwHQ1tbGiRMn\n6O3tJTQ0VOh+mk7gTBvpsixz4sQJqquref/99/H392ft2rXiVGz8+PG/EIR9FIyKSuHjjz/ekZiY\niFKpxNbWlr6+Pm7fvk1wcDCZmZksXryYwsJC0tPTGRwcJD4+HhsbG2bOnMmPP/7IhAkTRHnn5eXF\n/v37yc7OFue+4eHhjB8/nvb2dhoaGoRAZnh4OAMDAygUCoKDgykoKMBoNOLl5SW8FydOnIitrS0q\nlYq+vj4OHz5McnIy8+fPR6lUkpGRQUlJCXfv3iU5ORm1Wi3svxMSEvjkk0+YNm0aXV1dfPDBB0iS\nhJOTkyjnXVxcsLS0FL0NJnmztrY2ZsyYQWZmJklJSeTm5mJpaUlWVharV68WWgNWVlY4OTmh0WgI\nDg4mOjpadFWeO3eOt956i7CwMNGPsGDBArKzs1mxYgVtbW2iNdloNDJ37lzc3d1Fw9WVK1eE+rFO\np2PFihX09PQIf4ympia6urrw8fHBwcGB9PR0AgMDiY2NpbCwkJUrV+Ln54dCoaC7uxtZlnF1dUWn\n06FUKikvLycvLw8XFxdkWeaHH35g6tSpHD9+nO7ubmJiYoRX45w5czh48CDd3d0EBgaiVqtxd3fn\n2rVrouM1Ojqab7/9Fn9/f+rr6+ns7OTJJ58EEGrdpp4Qg8GAj48PCQkJIklqtVoqKipISkqir6+P\nAwcO8NNPP9Hb2ysmYy0sLDh9+jQREREcO3aMjz/+WLQYv/LKK1RXV+Pi4kJMTIzoTTFJ9iUnJ+Pp\n6cnOnTs5cuQIGo2GPXv24Ofnh7e3N9nZ2eTk5FBcXCyEe7y8vNBqtQQHBwNDEoNarZakpCSCg4NR\nKBS0trai0+m4cOECOp2ODRs28PXXX7Nw4ULq6+sJDQ01iRH/91QK9vb2ODs7ExoaKoRJenp66Ojo\n4MKFC1y6dIlXXnmFhIQENBoNaWlplJSUCO/HmJgYKisrRRm+du1aYEja65lnnqGjo4POzk5qamqw\nsLAQv7jr16+LqcGjR4/y5Zdf0tTUhKm7srCwkPnz54sORJM6UG5uLjU1NeJ76+rqSExM5MyZMzg4\nODB9+nReeOEFdDodn3zyiVAB3rp1KxUVFfT09Ihlw507d9i7dy8Av/nNb4Trz6JFi6ipqeHDDz/k\n7t27WFhYMHv2bNra2oTGQlpaGnZ2drS0tAi3qWPHjhEUFIRKpeKpp54SY8zXr18nLCyMoqIinn76\nadLS0kQpXlxcTF1dHUVFRRgMBtauXcvOnTvRaDQ0Njbi7OxMZmYmu3fvFg7clpaWQuAkPz8fR0dH\n/P39hZtTcnIyNTU1lJaW8u6771JcXIxOp6O5uVkc0ZmcpzMzM8UyLjAwED8/PyZPnizK+uXLl2Nj\nY4NSqSQnJwdbW1uuX79Od3c3LS0ttLW1ifmM9957j9raWgICArhx4wb79+8HhpS7qqursba2Flbw\ns2fPJigoiCeeeAKtVktiYiKrVq3C0dGRgoICvvjiC9LT01Eqleh0OvFGMWfOHKG81NTURFhYGG5u\nbly9elVI0XV3d2NtbY1Op+Pu3bt4enqSm5vLwMCAsIs3dSVevHiRiRMnEhkZyVtvvQUM3fyhoaFM\nmDBBTNj6+vrS2toq3L1MSuUtLS10d3cTGRnJM888g0ajwcvLCwsLCyRJorW1VXTDPgpGRaWwc+fO\nHYGBgdjb23Pz5k0iIiIICgrCx8eH6dOn09fXx/jx47G0tESWZbZs2UJiYiKnT59m0aJFZGZm8oc/\n/IEbN25QU1PDxYsXyc/P5+jRo3z33Xc8+eST1NXVERYWRlRUFPX19YSFhWFvby/cd65fv868efOI\niorCycmJvLw8YSbT29uLm5sbn3/+OTk5OWL9Z9IDdHV1xcbGBgcHBwwGA1ZWVty7d49XX32ViIgI\nIUyi1+uJjIykubmZoqIiZFnG29ubt99+m7/+9a8UFxcLFWcvLy9xBr1t2zaSkpIoKSlh7dq1QjHJ\n5BLk4eHBrVu3xJn0tGnTmDZtGm5ubnz11VdkZWUxYcIEPDw8hKnK1KlT0ev1HDlyhIqKClGa6/V6\nPD09cXNzY/369Wi1WsrKyrh8+TLLly9nxowZ/OUvf2Hu3Lm0trbS2dmJj48PAwMDzJkzh8rKSmJj\nYzEYDEKl2nRDVVdX09raytmzZ7GwsCA2NhYLCwvWr19Pa2srJ06cIDo6mujoaOG1eerUKbKysti+\nfTsBAQHY2tpSXl7OyZMnSUlJwdPTk59++ommpibmzZtHe3s71tbWTJo0Ca1Wi5eXF99++y2bNm0S\nhjlOTk5ERUXh6+tLc3Mzn3zyCZs2bRJ7Ac7OzpSXl5OSkkJFRQWWlpbcvHmTN998k/b2di5duoRC\noeDixYt0dHQQGxtLU1MTS5cu5dq1ayxdulQYtuTm5nL27FmKiop48cUXaWpqQqvVigYoX19foRht\ncrc6f/48zz77rGhdX7p0KTqdju3bt1NdXY3RaMTKyoo9e/YQGhrKwMAABoMBb29vTp8+TVRUFDNn\nzkSv11NVVUVrayt1dXXcuHHjv6dSsLGx4Y033hBWcNHR0dTX11NcXMyyZcvw8vLiqaeewmg0smzZ\nMjQaDbW1tUIw1MbGhqamJsLDw3F3dxf9BJ9++ikbN26kvb2dc+fOcefOHa5cuUJnZyfh4eFERkaK\nX6pKpaK5uZnKykpKS0uxsLAQQzoACoVCSHuZlIbnzZvHyZMn6evrE8Y0JSUl2NjYiPbT+/fvM378\nePLz84UV2dq1a5k+fTpOTk7Y2toKn4qXX36ZkJAQPDw86O/v5/79+8ycOZP8/Hxef/113n33XcrK\nyigqKsJoNHL//n1gqFvP5Bhl+r9+//331NTUsGTJEiZOnMiGDRuoqqri1q1bQsh2165dtLS0CGUm\nT09Pent7uXv3Ln5+fiJJ1NbW8sILL3D48GHhsWFKmsHBwdja2uLk5MS9e/dITk6msLCQzs5ONBoN\npaWlDA4O8uDBA7q6uoSVm8mRqqenh4sXL4ryOCQkhI6ODkJDQ2lvb2fdunVig+/ixYuUl5ej0+no\n7Oykurqa1NRUXF1dGTNmDAcOHOD48ePCm2HcuHHCzdrd3Z2Wlhbeeecdxo0bR01NjRCo/f3vf8+d\nO3doa2sTG82Ojo7k5eXR29uLhYUFkyZN4sSJE2i1WpycnGhtbQXA0dGRy5cvi1OOt99+m5ycHNEk\nlZOTg5eXF4sWLeLw4cPU1NSgVqvJycmhubkZtVrN9OnTKSoqAhD9BLW1tULyrbm5GYVCQXx8vFDj\nMiU/Nzc3nJyc8PDwoLy8nKioKCorK/nuu+9obGxk6dKlJCcn09bW9sj346hICpIkkZaWxrJly8RR\n3K1btzAYDHz22Wdig8bf3x9JksSNtG/fPnbu3ElcXBynTp2iqKiIsLAwobSr1+u5ffs2/f39REVF\n4e7uTmdnJwEBAej1euzs7CguLqatrY3w8HChFgwQFRWFQqHAzc1NKAi//vrrAFy7dg2j0Sgqi8zM\nTNHrYJoQLCkp4eDBg9y4cQO9Xk9PTw+lpaWcOXOGBQsWcP78eQYGBtDpdOLM++rVq/T19SHLMt98\n8w3r1q2jt7cXOzs7BgcHSUpKEiPiISEhWFtbYzQaxburSa3J5JZVV1fHuXPncHd35/Tp0xw+fJi+\nvj6CgoK4cOECoaGhQv77pZdeIiwsjCeffFJ0Wer1etLT08XeiEkG3rR3sXfvXubMmUNAQIDwv3Bw\ncECSJBG/yMhI1q1bR3R0tHCCys3Npa+vj7179zJ+/Hi0Wq2IweXLl3F2dkav15OQkEB9fT2zZs1i\ny5YtfPzxx0yePJmEhAQ2btyIlZUVq1evRqVSidOj5cuXk5qaCgw1FpkmRM+dO0dSUhJvvvkmf/rT\nn5g8eTKpqalMnDhR+Jb29PRgNBoxGo3k5eXR3t7O+vXrhVFvb28vTk5OODg48ODBAwDc3NxYsWIF\nOp1OtNRHRUXR0tJCYmIiEyZMwNnZWZzyxMfH8/nnn7Np0yYhMOPp6YmPjw8FBQUsXLgQGEqOpind\n8PBwGhoa8PT0FCP6kiQJmbyVK1eKNyIYWoLNnDmT+/fvk56ezuHDh4mIiHjk+3HUJIV794bEoJVK\nJX5+frzzzjvi5njvvfeE0GZpaSkqlYrQ0FDs7e2JiIggIyMDV1dXsU5buXIlAPPnz8fe3h6FQiEs\n0z09PcX3fPHFF8IcxMHBgZCQEBoaGkR33JQpUxgzZgwajYaBgQHq64eU6idPnkxxcTHl5eWo1WoS\nEhLIysoiMTGRV155hRkzZojlj5+fH/X19bi4uJCbm0tbW5s4gx87dqwYmwWEAUtfX5+Yk/f09KSg\noIDExERkWSYlJQVLS0uuXr1KbGysGBlfsmQJSqWSwsJCjEYjTU1NYpS6o6ODv//97yQnJ4vvHRgY\nYNmyZaSlpZGSkkJ8fLzQUhg7diw7d+4kJSUFHx8fWlpaWLt2LTNnzkShUHDz5k3OnDnD1q1bOXTo\nEEePHqWsrIxLly6RnZ3N2LFj6e/vJzIyEjc3NywtLdm9ezeXL18mPz8fOzs79u7di1KpBBCjwYCw\nWW9ububrr79Go9GIZNTZ2Sk23ubPn09qaiqZmZl88803dHZ24uLiwltvvcWqVavo6uoiLi6O1atX\nA0P7Vm1tbfj5+TFv3jyh7RkXF4dGoxEKTvfv38fJyQkLCwteeukloQUxefJkOjs76evrw9PTE0vL\nISfE4OBg2tra+N3vfkdWVhbfffcd/v7+lJaWkp6eTlFRES0tLfj6+nLhwgW2bduGwWDgueeew8nJ\nicbGRk6dOsVnn31Gc3OzODk7cOAAfn5+jBs3TlReJl0MR0dHpk+fTnx8PImJibz33nscOXIEg8HA\nlStX2LJlC7/97W9RKBR0dXWJDfFHxahICj09PSxZskR4BVpYWPDtt9+SmprK5s2b2bx5Mz4+PtTV\n1aFUKnnxxRdpa2ujsrKSiooK7O3tkWWZ6upqwsLCRMvs9evXqa2tZf/+/WzevJnY2FjCwsIICgpi\n5cqVODk5YWlpiaOjI2fPnhW2byZ16c8++4ygoCAh2Hr58mUAAgICsLe3F1p7Tz/9tEgyN2/eJCMj\ng+LiYkpLS4XeY3l5ObNnzxaKzIGBgTQ1NbFr1y5UKhWAkOKOjo5mzZo1uLi4YG1tTVVVFdOnT6e2\ntpasrCwcHByELVlUVBSzZs3i5s2b/PnPf+aPf/wjgYGBQl3ax8cHpVIp+jI8PDwICAhgxowZ/Pjj\nj6xZs4bbt29TUlLCsWPHuHXrFhqNBm9vbz788EMMBgNTp04V3gRVVVXExcXh4uJCeXk5FRUVdHZ2\nCrm0cePGcffuXbKzs3niiSeIjIykpKQER0dHVCqVsAX86KOPmDt3LoGBgURERIg3hY6ODq5duybM\nVGxtbampqeHevXti8MzUyhsfH09lZSXLly8nMjKSQ4cOCVFfe3t7sZyCoU7J7u5ubty4wcqVKzl9\n+rRohAoKCiI1NZXu7m4GBwdxdXXlueeeo7a2lrS0NGbNmkV6ejoRERHodDoqKiqYN28eMKQ+fe/e\nPVxdXVm4cCHz58/n/v37uLu78/bbb4tORAcHB5RKpZjZsbKyYty4cVy/fp3Kykp8fX2FfiYMHU1a\nWlqiUCjo7+8X8ZVlmdbWVlENq9Vq1Go1L730Ev39/XR1dbFr1y4WL16Mvb09Hh4e1NbWCr6PglGx\n0bhr164dppkGk9XV+vXrmTFjBuXl5Zw6dYqIiAjq6upEWZ2Xlyc667y9vRkcHCQgIIDs7GxUKhV7\n9uxh27ZtODs74+/vLyzOTOWtqSL46aefWLJkiZgqM3WcLV68mO+//57Q0FCR+d3c3EhPT2fjxo2U\nlJRga2tLQEAABw4cYMyYMWRnZ1NdXY3BYGD27Nm4u7uj1WpFo4nJXMbe3p7U1FTOnDkjZjCOHj1K\nYGAgdnZ2dHd3U1VVRXR0NA0NDdTW1nL27FnWrFkjNAT1ej0DAwPEx8fz0UcfERMTw+DgICdOnCA0\nNJSTJ0/i5OREaGioGOgymdCYBEcCAwM5duwYGo2GiIgIKisraW5upqCggLi4OC5fvkxERATOzs5o\ntVoiIyNRqVQ0NDQQGhrKkSNHWLp0KR4eHsIAtbGxkY6ODqFslZOTg42NDUePHqWurg5XV1exv6BS\nqaitrSUxMVFMZlpZWQlf0ZKSEnx9fTEajSQnJ2MwGIR+QGNjI2PGjGHSpEkYDAby8/OFU7Vp2eHo\n6EhHRwcHDx7k+eefF0a1kZGReHh4oFKpyM3NJTc3l61bt5KVlYW1tTUtLS20tLQI4+Ljx49jZ2cn\nbsS//e1vREREcPbsWY4cOcLVq1fFnlNOTg7r168X/E3TqVVVVdjZ2ZGSksLZs2cJDg7mypUrvPba\na0RHR/PGG29gMBiora0lIyODsWPHotfrycvLw8LCggULFtDX1yfs/xoaGqisrOT48eO8+eabYnDQ\n2dkZGxsbpkyZQkZGBs3NzcTHx9PQ0EBmZuZ/T0fjoUOHdhiNRhYsWEBYWBhdXV3Y2tpSVlaGj48P\nLi4ueHl5CWmzo0ePMnHiREyt0SkpKcJEpqysDEmSOHnyJM8++yz5+fmEh4eLcer+/n76+/uxt7fn\n/fffx8/PD1dXV44fP45arRYyXaY9hszMTNrb21Gr1SgUCg4ePIhKpUKWZezs7FCr1Vy6dAlXV1e2\nb98ODBmElJeX4+bmRkFBATY2Nuh0OgoKCoQev8m49YMPPiAiIoIzZ84QHx+Pr68vWq1WVBJXrlwh\nPz+f7du3U1lZKQx34+LiuHnzJq2trfj7+3PmzBnc3Nzw9PQkMjJSyMUZDAbq6upQKBSUlJTg6enJ\nDz/8gKOjIwaDgba2NmJiYoTgp4ODAzExMZSUlIhBr8bGRtE9aTp5uHPnDiEhIaSlpaFWq3F0dKS6\nuhpZlomLiyMxMZHGxkZsbW0ZP348zc3NBAQEsGTJEhoaGpgxYwaSJFFbW0tTUxMnTpxAo9GwatUq\nFAoFWq2W1atXCw0BkzJ1dHQ0P/zwg1DEam1tFUY8kydPFonQ9Htwd3fnq6++4tVXXxXldFVVFbGx\nsWRkZKDRaPD39xduWWPGjGHfvn3Cr9LX1xd/f3++/PJL1q1bR0REBHPnzkWn0/Hjjz/S1NSEWq2m\ntLSUoqIi4b/g4+NDRUUFwcHB2NjYsGrVKtG2X19fT0tLCy4uLuTl5WFpacmUKVNwdHRk8eLF7N69\nmylTphAfH49OpxMCM97e3uTl5YnW87CwMMrKysjMzBRS86Yq0tQybdqH6+7u5ubNm/89pw96vZ5J\nkybR09NDamoq9fX1woEoKCiImpoaTp8+jVqt5s6dO/j6+rJ69Wpmz55NUlISe/bsIS8vj5SUFJyd\nnYUvodFoZMaMGWRkZIimqNraWgYGBjh16hTPP/+8WIer1Wp6enrEGO7LL7+MXq9nw4YN+Pn5UV5e\nzu3bQ0ZXxcXFqFQqent7hcrv5s2buX37Ng8ePGDSpEmEhIRQWloq1t9HjhwR+ohvvPEGTzzxBO3t\n7axdu1bY3CmVSj788EOampqYPHkyOTk5SJJEWFgYFy9e5Fe/+hWDg4MolUquXbtGYWEh1dXVQnPB\n3d1dWNx1dXXh6uqKt7c3HR0dhISEUFVVJd5VTX+cRqORiRMnUldXh4eHhxjbTklJEa5Z8+bNE6Pk\nCxcu5O7du3h7exMTE8PcuXMpLCwUycQ0pltSUiImNk37M6Z/W61Wc/XqVU6fPo2TkxMhISFs27YN\nQLhZzZw5k8LCQqFqFBsbS0tLC1OmTBGzMEuXLsXe3p7z588TFxdHZ2cnycnJNDc3k5aWxo0bN8jM\nzAQQY+G3bt1i7Nix7Nu3TxjTbNq0iUOHDrFixQomTpwopnUzMjLEvs6sWbPw8/Pj0KFD5OXliU5J\nKysrDAYDe/bsIT09HW9vbx48eMCZM2eYO3cuwcHBzJw5k6tXr3Lp0iUGBgYoKyvjwYMHaDQakpKS\nuH//Pu3t7SQlJYkN1+3bt5Obm4uHhwexsbGsWrWKoKAgJk2aJKzjTP6TsiyLBjuFQoG1tTVarZaW\nlhacnZ0ZGBjg3Llzj3w/jhY152bgAdAy0lz+AUpGHycw8/pPMBo5wcjw8pdl2f3fvWhUJAUASZLu\nPIr89OPEaOQEZl7/CUYjJxi9vGCULB/MMMOM0QNzUjDDDDN+gdGUFP7trugIYDRyAjOv/wSjkROM\nXl4c/xD7AAADGklEQVSjZ0/BDDPMGB0YTZWCGWaYMQow4klBkqSFkiSVSpKkkSTpf0aYS7UkSQWS\nJOVKknRn+JqrJEnnJEkqH/7o8hh47JUkqUmSpHs/u/ZQHpIk/Wk4fqWSJCU/Rk47JEmqH45XriRJ\nix8zJ19Jki5JklQkSVKhJEkvD18f6Vg9jNeIxuuRIcvyiD0AS6ACCALGAHlA+AjyqQaU/3DtXeB/\nhp//D7DrMfCYBcQA9/4dDyB8OG42QOBwPC0fE6cdwGv/5LWPi5M3EDP8XAGUDf/skY7Vw3iNaLwe\n9THSlcJUQCPLcqUsy/3AIeCpEeb0j3gK+Nvw878By/63f6Asy1eAfxyAfxiPp4BDsizrZVmuAjQM\nxfVxcHoYHhenRlmW7w4/7waKgfGMfKwexutheCy8HhUjnRTGA7U/+7yOfx28/23IwHlJkrIlSXp+\n+JqnLMuNw89/Ah7df+v/Lx7GY6Rj+H8kScofXl6YyvTHzkmSpAAgGrjFKIrVP/CCURKvf4WRTgqj\nDQmyLEcBi4CXJEn6hbCdPFTrjfhxzWjhAXzG0NIvCmgEPhgJEpIkOQBHgVdkWe76+ddGMlb/hNeo\niNe/w0gnhXrA92ef+wxfGxHIslw//LEJOM5QCaeVJMkbYPhj0wjRexiPEYuhLMtaWZYNsiwbgd38\nv5L3sXGSJMmaoRvvgCzLx4Yvj3is/hmv0RCvR8FIJ4UsQCVJUqAkSWOANcCJkSAiSZK9JEkK03Ng\nAXBvmM/G4ZdtBL4fCX7/gscJYI0kSTaSJAUCKuD24yBkuvGG8SuG4vXYOElDiiR7gGJZlj/82ZdG\nNFYP4zXS8XpkjNQO5892XhcztDtbAWwdQR5BDO0A5wGFJi6AG3ABKAfOA66PgctBhsrLAYbWl7/5\nVzyArcPxKwUWPUZO+4ACIJ+hP2zvx8wpgaGlQT6QO/xYPApi9TBeIxqvR32YOxrNMMOMX2Cklw9m\nmGHGKIM5KZhhhhm/gDkpmGGGGb+AOSmYYYYZv4A5KZhhhhm/gDkpmGGGGb+AOSmYYYYZv4A5KZhh\nhhm/wP8FWfpZrxJ6G0wAAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f13853ce1d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGd0W3W+/vuRLVmWZFu2Zbk3ufcW2+khcZodJ05PgBDI\nhAEObWCAmZMzM6zDANNgKDP0TlYYAiGFJCa9Edtxd2y5996rLMuymnVf5GbfMy/OOtz1H9bh3pXn\nnb12suWt/fuW53m+X5HdbucO7uAO7uA2HP63P8Ad3MEd/LRwJyjcwR3cwT/hTlC4gzu4g3/CnaBw\nB3dwB/+EO0HhDu7gDv4Jd4LCHdzBHfwTfrSgIBKJskUiUbNIJGoTiUQHfqz73MEd3MG/FqIfw6cg\nEokcgRZgLdAHlAP32O32hn/5ze7gDu7gX4ofq1LIBNrsdnuH3W43A18Bm3+ke93BHdzBvxDiH+n/\nDQB6/8vPfcDC/+5ihUJhn5+fR6VS4ezsjNlsRq/XIxaLcXFxYWxsDIVCgVwuR6FQMDIygqurK2az\nGbPZjNVqRaVSMTMzg7OzM7OzswwNDeHm5oaTkxNisZi5uTm8vb0xGAw4OztjNBoxmUx4eXkxMzOD\nTCZjYmJCuOfAwACenp7YbDbc3d0ZGxvDycmJwcFBAgMDkcvlTExM4OPjw8DAACqVCoPBgEgkwmKx\nMDU1RVRUFHq9HpFIhIODAzqdDplMhtlsxsXFhdnZWSwWC46OjoyOjhIQEICTkxMSiQSdTodEIsFs\nNuPk5IRcLsfR0RGTyYSDgwPz8/PodDrc3NyYn59HIpEwPT2Nk5MTTk5OWCwWzGYzYrEYo9GIVCpF\nLBYjFosZGhpCKpUK195+hgqFAqPRiNlsxtXVFZvNhtVqRSQSoVAomJ6extHREQeHW7lEKpX+03M0\nGo2IRCLEYjF6vR5XV1cMBgMKhYK5uTnsdjs6nQ5fX19MJhNyuRyZTMb09DR6vR6dTkd4eDgAVqsV\nR0dHLBaL8J1ZLBZsNpvwLGw2G2KxGJvNhlQqRSQSMTU1haurKyaTCavVCsDw8DABAQGIxWJcXV2Z\nmprCYrGgVCrR6XSoVCqcnJwYHx9nfn4eJycn4W+Ty+VMTk6iVCqFz2G325mammJ8fBwvLy/m5uZw\nd3dHIpEI7+rt78hutzM3N4eTkxNTU1P4+PhgsViwWCzIZDL6+/vx8PDAbrfj6OiIi4sL3d3d+Pj4\nYDQaUSgU2O12TCYTMplM+J1er0cikaBQKJidncVmsyGTybDZbDg4OCASiTAajYjFYkQiEY6OjgwM\nDIzZ7Xb1/3R4f6yg8D9CJBI9DDwM4OXlxfPPP4+fnx9lZWWcP3+ee++9F7VaTUhICDdu3OCuu+6i\nqamJtWvXUltbS1paGp9++imtra387ne/o6CggLq6OlxdXZmYmODw4cMcPnyY5557jgcffJBTp06R\nkZFBcnIyFy5cYNGiRVRUVGA2m0lJSaGoqIjw8HAiIiJwcnJCJpPR2NiIg4MDPT09LF26lOrqat55\n5x3+8Y9/cOHCBUwmE6mpqWi1WvR6PStXruTSpUuMjY3R2trKa6+9xokTJ2htbSU2NpbExEQKCwvJ\nycmhsrISq9XKE088QU1NDdu3b+fxxx9nbGwMFxcXli9fjpOTEw0NDaSmplJZWYmPjw9dXV2sXr2a\ngoIC5HI5BoMBuVzO4OAgrq6utLS0EBkZiUgkYsGCBdy4cYOBgQHsdju5ublcvXqV4OBgOjs76e3t\nZd26ddy8eZPU1FRaW1tJTk6mtbWV9vZ21Go1CQkJVFZWYrFY8PX1RaFQ0NLSgr+/P1988QWPP/44\ng4ODBAUF8f7777Ns2TIcHBxITU2ltLQUqVSKXC5HpVLR0dHBc889x3/+538SEhLC+Pg4vb29pKam\nEhsby+9+9zvef/99jh07hq+vL4mJiZhMJsbGxpienubDDz/knnvuYdeuXeTn5xMREUFHRweLFi1i\ncHCQhQsX8pe//IXFixczPj6O1Wrl4MGDDA8P88ADD7Bt2za8vb0ZHx/n4MGDuLm54eLiglarJTg4\nmOXLl6PX6zl//jxPP/00FRUVGAwGvv/+exYvXkxfXx+xsbEkJSVht9tZtGgRd999N+Pj48TFxfHZ\nZ58RHx9PTU0Nu3fvJjQ0lK6uLgIDAyksLGTPnj309PTQ1tYGwODgIGq1mo0bNyKXyzl79iwxMTF8\n9NFHbN++HZlMRkJCAhKJhL6+PoaGhrDZbPj7+1NfX4+XlxfT09OUlJSwb98+xGIxFosFJycnTCYT\nKpUKnU5HRUUFXl5evP32290/5Gz+WO1DPxD0X34O/L9/J8But39ot9vT7XZ7uoeHB+3t7SgUCpKS\nknj66adRqVRMT0/T2tqKg4MD1dXVHDhwAL1ej8Fg4LPPPsPd3Z29e/dy//334+Pjg0gkAiAyMhKA\nEydO0NjYyNzcHM8//zwhISFoNBr27duHQqFAqVTym9/8BpVKRWxsLB4eHkRFRRESEsKZM2dob29n\ndHQUpVIpvBAABQUFZGZm4uPjw8jICCMjI3h7ezM5OUloaCgAzz//PFqtFqVSibu7Ow888AByuZz9\n+/fT1dVFZmYmw8PDHDt2DIlEAsDatWsZHR0lKyuLubk5rFYrRUVFTE9PY7fb6ejoYNmyZWi1Wpyd\nnbl58yY9PT0UFhaiVCoZGhpCrVbj6emJm5sbN2/epLu7m9raWgwGA5cuXaKqqgqbzcbk5CS//vWv\nqaurY9myZRQUFODi4kJpaSktLS2UlpYyNTWFSqVCo9EQGBiIWCymtbUVuFUlbN68mebmZiwWC62t\nraxYsQKFQoGrqyvz8/OEh4fj4eFBcnIyMpmMgIAA4X5TU1PExsYSEBCAQqFALL6Vn8xmMwaDgZMn\nTyKTyfDz82NiYoLh4WFWrlyJSqWitbWVuLg4ent7SUxMZHJykvj4eIaHh0lNTaW6upr+/n68vb35\n9NNPAQgLCyM/P5/6+nqMRiPPPPMMSUlJPPnkkwwMDGAymaivr+fixYs89NBD+Pv709XVhcViITc3\nl8DAQMxmM3fddReNjY2cOnUKgPDwcA4cOMDExAQ/+9nPSEpK4vDhwyQkJGC1Wlm4cCGrVq3i8ccf\nZ3x8HFdXV/Ly8rBYLOzdu5etW7diMBgIDQ0lNzdXeL5jY2O0t7ej1Wo5ceIEDQ0NKJVKIiIiaGlp\noaamhsLCQtRqNbt27WJsbIza2loqKyv55S9/ycDAAK2trdTV1REQEIC7u/sPPrw/VlAoByJFIpFG\nJBI5AXcDp/67i6empgDQ6XTMz8/zxz/+EZvNho+PD2lpaeTl5bFixQr2799PS0sLzc3NiEQiZDIZ\n7u7u7Ny5k/b2dnbs2MH8/LzwANzd3Xn99ddxd3env7+fqKgoSkpKCAwMZGJigpUrV6LX69m/fz82\nm42goCBEIhGBgYH86U9/wt/fn507d+Ls7ExMTAwffPABAJmZmeh0OsrLyxGLxeTm5pKcnEx3dzcG\ng4Hw8HDOnj3Lp59+yuHDh1EoFFy5coXIyEiKi4tZtWoV3t7eBAUFsWLFCubn5wFoaGggPj6e9vZ2\nrFYrISEhrF69mhs3bhAdHY1EIkEikZCSkkJvby+nT59mYGAAPz8/ZmdniYyMZPny5RgMBkZGRkhP\nT6epqYkdO3YQEhKCUqlkzZo1NDc3ExsbK1RWVVVVXLt2jU8++YRLly5RWVmJRCKhqKiItrY2nJ2d\n8fPzIy0tjdzcXKxWKwaDAbPZTFNTE4GBgUxPT/Poo48yPDyMh4cH4+PjuLi4YDAYKC4uJiYmhvr6\neiQSCX/4wx9QKpUsWrSIf/u3f2PZsmWsWbMGgLq6OrKzs1myZAlHjx7l2LFjSKVSVCoVd999N8HB\nwVRUVDAzMyMEf3d3dz755BPeeustOjs72bVrFwqFgqamJhobGwH44osvCA0Npb+/n7GxMSYnJ/H1\n9RUCvEwmw8nJidzcXAYHBykoKECtVlNaWkprayvDw8OkpKRw8OBBfH19SUhIAMBisfD8889TUFCA\nn58fly5d4ptvvuGLL76go6MDpVLJ9evXKS8vx8HBgYCAAH7729+SmZlJUVER7u7ueHt709/fj1Qq\n5cUXXwRgenoamUyGWCwmLCwMuVwuVKAhISE89NBD7Nq1i/7+ftzc3KiqquLMmTPs3r2bvLw8kpKS\nCA4ORiQSsXz5cg4ePPiDD++PEhTsdrsVeAI4DzQCR+x2e/1/d/3tXmhychKxWExiYiIajYbQ0FDe\nfvttoVoIDQ1l0aJFZGZmkp+fT3R0NGKxmKSkJDo6OigvL8fV1RWpVArc6ks7OjqIiYnBbrczMzND\nUFAQMzMzmEwmWltbOXLkCM888wz33XcfERERzM/PU1BQwIkTJ4iLiyM/Px+DwcDAwICQHcbGxvD1\n9WX//v00Nzfj7u5Oa2srS5cuJTk5GS8vL1JTU8nOzmZmZoaEhAQeeeQRqquriY+PR61WYzQaSUpK\n4uTJk3R336rqhoaGMJvNdHd34+zsTHV1NRaLBVdXV+Li4sjJyaGlpYXDhw8TGRlJfn4+/v7+TE9P\nk5OTw6ZNmwS+IiAggGPHjrF8+XKh3fL19eXGjRssX74cu91OcXExBoOBiYkJoqOjeeKJJ9i3bx9b\nt24lKSmJP/3pT0gkEry9vXFwcKC9vZ0DBw7g5+fH3NwceXl57NmzB6PRiLOzM59++ilKpRKFQsGO\nHTu4ceMGPj4+aLVa/vCHPyASifj1r3+NRqNhfn6epqYmtFotCoWCkydPAiCXy7l27RqhoaHs3LmT\ntWvXEhAQwNq1a/n6668ZHx9n//791NbWIhaL8fDwIDExEUdHR6amppDL5dTV1TE3N0dOTg6urq4A\n/PznP6ezsxOZTMbo6CgtLS00Njbi6+tLVlYWN27cIDk5mb6+Po4cOUJ7ezteXl788pe/xNPTk8DA\nQDo7O/Hx8eHcuXPk5+cDcPnyZaKjo9m2bRuXLl0iNjaWM2fOYLfbEYlEVFRU4Ovry8zMDOvWrcNq\ntRIZGckHH3yAUqmkqKiIvr4+uru7aW9vp6WlBbjVUqenp6NQKGhsbCQyMhInJycmJibw8vKitrYW\nHx8fVq1axffff49erycvL4+uri4iIyOpq6vDbrcTGRlJTU0Nzz333A8+vz8ap2C3288AZ37ItSaT\niZSUFK5cuUJqaqrQX+3bt4/u7m4mJiZwdHRkwYIFrFq1iri4OHbu3AmAk5MT58+f58EHHyQkJISW\nlhbh8C5atIjq6mpEIhENDQ34+vqSkpLC66+/jpeXF25ubkKvXFRUJDxMqVTKyMiIQOBdv36dFStW\nUFlZCdwKCvn5+Tz44IOIRCIOHjyIXq+no6MDmUwmRHClUsn+/fuRyWQ8/fTTODg4EBMTQ11dHXV1\ndfT19eHo6Ci0HC4uLvT19bFt2zZEIhE9PT24uLhQUFCAv78/GRkZLFmyBIVCQUhICMXFxdx77720\ntLRw/vx5kpOTycjI4Ny5c3R2duLv74+vry/T09O8/PLLbNu2DQ8PDyoqKpDJZFRXV7N06VIiIyMZ\nGhrCYDAAMDo6CkBlZSUhISG4ubkxOjrK+Pg4d999N87Ozpw8eZLY2Fh8fX25du0aaWlpADQ2NnLx\n4kUuXrxIVlYW0dHR3Lx5E4lEgkqlIj4+nu7ubsLDwxkeHiYjI4Ovv/6a5ORkANra2hgeHmb16tXE\nxsYyNjaG3W5Hq9WSlZVFaWkpr732GgaDAW9vb3x8fGhoaKCzsxOLxcLFixeJj48XWsmysjIAamtr\ncXd3Z3JyknvuuYe2tjby8/NxdXXl9OnTrFq1iu7ubqKjo9m7dy8DAwN89913jIyMkJaWxu9//3t2\n795NUlISa9eupbm5mS+//BKpVEp2djZNTU04ODgQERGB2Wzm8uXLODs7k5SUxPz8PHFxcdTX11Ne\nXk5ycjKvvPIK1dXV6HQ6oWpzcXHhH//4BwAdHR0EBARgMBhwdXXl3LlzqFQq0tPTGR0dJTs7m/7+\nfoaGhnB2diYyMpJly5aRkJBASUkJ9fX1NDU1ERAQgFqtFr7TH4KfhKPRwcGBjRs3kpmZSXh4OBs3\nbsTFxYVz586xbNkygZ1tamrC2dmZ9evXExISwunTp6moqCA6Oprg4GC+/fZbvv32W7Zv3w7AX/7y\nF6ampjh79iyzs7PCy93S0kJMTAypqalER0fj6elJQ0MDHR0dbN26Fb1eT2ZmJu7u7gwNDbFy5Uqm\np6cFgshsNrN3717OnDmDUqlkcnISNzc3Tpw4gVqtprCwkLq6OqxWK6Ojo9jtdoaGhkhISMBisdDR\n0YGXlxcrV67EYDDg4uICgKurK2vWrGFgYACtVktdXR0A6enpaDQa9Ho9Pj4+iMViPv30U2JjYwkK\nCiI8PByz2UxSUhINDQ1oNBpWrlyJUqkkMDCQnTt3kpOTg06nIy4uDrlcLjDnAHq9nqioKKanp+nq\n6sLNzY19+/bR0dFBf38/CoWCBQsWEBkZSWBgINXV1WRlZdHW1kZ5eTmLFy8mNTWVjo4OZmdncXd3\nJyIiglWrVjE8PIyfnx9Go5Hr168jEolYs2YNqampSKVSBgYG6Orq4vTp08CtQP7UU0/R3NzMkSNH\naGlp4cyZM1gsFnp6eti+fTv33HMPDzzwAFKplMDAQIxGI11dXSQmJnLXXXexYMECNm/eTEZGhsBV\neHt7s337dmJjY3niiSeEtqG4uJj4+HiSk5PZvn07EomEd999l1deeYWIiAj+9re/cejQIWJiYpDL\n5UgkEp588kmBB0pMTOT06dO8/fbbbNu2jdnZWerq6sjMzOTxxx/nZz/7GatWrcLT05PCwkJGRkYI\nDw+nvr6erq4upqamuHLlCmKxGJlMxgMPPADA7t27AQgODkYul7N48WLsdjsnTpzAxcWFrq4urFYr\nDg4O9Pb24uLigoeHB99++y2dnZ1CVdHc3Mzo6ChDQ0M//Dz+H53mfxFEIhEffvgheXl5Ao+gVqtx\ncnKisbGR++67D61WS29vLy+++CJisRgHBwehdFy5ciX19fWsWrWKxYsXc+HCBQDuvvtuQkNDycvL\nw2QyMTU1xY0bN8jOzmZkZIT29nbm5uaIiopCJBJx/fp1BgYG2LBhA3q9nqtXrxIbG0tTUxM2m42C\nggIA4WDk5eWRmZnJli1baGlpYeXKlWi1WoGNl0qlREVFsWbNGoKCglCr1Wzbto2lS5ditVoZHBzk\n1VdfFTK0Vqvlr3/9K0uWLKG/v5/Y2Fh0Op1wrdVq5Xe/+x0ODg5s3ryZmzdvcvbsWQYHB1EqlfT3\n9xMcHExLSwutra0oFAo+/vhjampqhKAUFRXFsmXLCAwM5NFHH2X79u0EBwczMDCA0WjEzc2N2NhY\nqquriYiIwNHRkYsXL9LY2IhcLqe7u5tdu3bh5+eHm5sbiYmJuLi40NLSQmpqKunp6URHRzM3N0dZ\nWRlubm5IpVKByNVqtcKhiIyMJCIigvvuu4/s7Gzg/6nC2traqKqqor6+nvDwcKqrq6murubChQuE\nhYWxefNmjEYjBoOB6elpkpOT6enpITg4mMDAQPR6PYcPH8bPzw+AoqIivLy8OHHiBHv37mViYoLE\nxESysrLw8PBg7dq1gpSnUqno7u6mqqqKvXv3kpGRwfz8PI2NjdTU1JCTk8P4+DgAHh4e6HQ6Hnvs\nMXp6etizZw+Dg4N4e3vj7u6OVCqlv7+f4eFhJiYmyMjIYHp6mpqaGrKyspiamsLT05POzk7OnDnD\nyMgIAGKxmPvuu4/PP/+c6upq4Z1fuXIloaGhjIyM4O/vz8zMDMXFxZw8eVKolhwcHEhLSyM1NZWM\njAx0Op2Q0H4IHF944YV/ycH+P8Hvf//7F+Lj45HL5ZSVlfHtt98SFRXFqlWryM3N5W9/+xvBwcFC\nBr9x4wZhYWEsXLiQs2fP0tnZSW5uLs3NzRw6dAij0UhdXR2RkZGChhsaGkpQUBCJiYnodDoKCgro\n7+/HZDIxMTFBSkoKWVlZ1NfXMzMzIxz6trY2VqxYgdFoxMnJiRs3bpCRkcHAwACzs7MUFhbS39/P\nzZs3UavV/OpXv+Kbb77h2Wef5dy5c3z++ee89957LFy4kOzsbH7zm9/g7e2NWCzGzc2N0tJS0tPT\nyc/P58CBA0ilUsGfcJt9F4lEbN68mY6ODlxdXRkbG6OsrIyJiQlmZmZYsmQJ7u7uNDQ0UFNTg8Fg\nYHJyUlBqjEYjNpuNpqYmFi5ciFQqZfHixRQWFmI0GvHy8iIrKwu73Y5CocDZ2ZmRkRG++eYbgoOD\niY6Oxs/Pj+HhYSYnJ+nu7qa+vh6TycTc3BwGg4HGxka8vb3R6/WEh4cLqk9lZSUajYba2lq2bt0q\nyIAGgwGxWExfXx8zMzOIxWLOnz/PggULCAgIIDU1FU9PTxwdHfH09KSjowOVSoVIJGLx4sXU1NQg\nkUj4+uuv6ejowM3NjY0bN+Lt7U1RUREVFRX4+PiwZMkSPv74Yw4cuOW0X7duHfPz8zg6OtLb24tM\nJiMxMRE3NzdOnTpFVVUVp0+fZvv27aSlpfHVV18JzysmJobu7m4cHR2Jjo7m4MGD+Pj4IJPJ6O7u\nxtPTE71eT3x8PAEBAaSkpHDkyBEGBweZnZ1Fo9HQ0tJCX18fAQEBXL9+nYiICCYmJhCJRISFhWE0\nGiktLSUzM5P333+fsbExQkJC0Ov1LFu2DBcXF6xWK3a7ncHBQWw2G15eXmzatIk///nP9Pf309/f\nj0ql4tixY5SVleHp6UlMTAwFBQWDL7zwwof/03n8SVQKXl5eLFmyBAcHByFzm81mpFIpSUlJgtS2\ncOFCKisrWbx4MW+88QZzc3Ns3bqVnp4e/uM//kMg/RwdHQHQaDQ4OzvT29vL2bNnBbb5s88+Q6vV\n0t3dTXBwMGlpaVy5cgVXV1c6Ozs5duwYAL29vcTFxWEymejr62P//v0AQkadnZ3l/vvv5+jRo+zY\nsYOdO3cyNTXFqlWr+PDDD2lvbycpKYnly5ezc+dOVCqV8GIvWrQIBwcHNBoNnZ2dADQ1NbF48WKq\nqqrw8PDAYrHg5+dHcnIyp06dws/Pjxs3brBq1Sq2bdvGli1bsFgsmEwmAgMDiYiIAMDR0ZHIyEiy\ns7Nxd3cnOjoatVqNg4MDVquVS5cu8Ze//IXu7m6KiorYt28fjY2NWCwW9Ho9dXV1JCYmsn79ejQa\nDQALFixALBYzOztLe3s76enpWCwWOjs78fb2ZufOnSgUCrq7u+nq6mLr1q2IxWLi4+MxGo1CNdbc\n3ExERASxsbGcOHECHx8fAEpKSoT7nD17FqPRiL+/P7W1taxduxYXFxcyMzMRiUQYDAbWr19PZGQk\n99xzD7Ozs/T09HD16lX6+/uZmJhg3bp1+Pj48NVXXwGQn59Pc3Mzhw8f5qOPPiIwMBBHR0cGBwcx\nGo0MDg7S2dmJRCJhfn6e1NRUhoeHiYmJIScnhyVLlnD16lUmJibYv38/r7zyCnBL+RCJROzfv58F\nCxYwNDREb28v7e3tvP766wwODiKRSKitrcXT0xO5XI6bmxv19fVYLBZGRka4efMmIpEIV1dX4uPj\nAfj+++954oknePDBB0lLSyM2NlaodqamphCLxZw8eZL6+noGBgYECXZ8fJzMzExcXV3ZsmUL69ev\nFxLHD8VPolJ45ZVXXrhdgiYlJWE2m1m9ejVarZbJyUnkcjmdnZ3Mz8/j6upKSUkJK1asoKOjQ5CI\nlEolLi4uaDQaQZO+77770Gg0BAUFodFoBF38+PHjLF26lH379rFz505+/etfs3LlSnx8fAgLCyMz\nMxNHR0eam5upqKigp6eHRx55hKNHj3Lt2jWioqJYsGABZ86c4ebNm6xatQqRSIRGo8HR0ZGxsTGy\ns7Ox2+2sX78eNzc3JBIJra2tVFVV0dPTw4oVKxgbG0On03Hvvffy7rvvCnyCRCIR9P3CwkIGBwdZ\nvXo1RqOR5ORk6uvrhaolMTERqVTK9PQ0Dg4OVFZWolarEYvFTE1NUV1djdFoJCYmBqVSiVQqpb6+\nnrS0NIxGI2+++Sa5ubls2bIFmUyGXC4HICgoiBs3bjA7O0tmZiZdXV20t7dTUlJCeHg4VqtVIE3V\narVQIstkMpRKpZD97733Xnp6eoSyNiUlhaqqKtrb2zEajQQHB3PlyhVCQ0MpKirC39+ftLQ0QkND\nOXz4MKtXr6azs5OqqiocHBzIzc3lyJEjGAwG+vr6OHr0KKtWrSI4OJjCwkJmZ2fRarVC9nd2dubM\nmTMcO3aMzz77jOXLl1NQUMCWLVuIiYkhJCSE/v5+vv/+e5KSkmhsbOShhx6ipKSE6OhohoeHyc3N\nRalU4uTkhFarFYLGN998g1qtJj09naGhIYaGhoTqKSgoCJ1OJ1QGEomE8PBwdu3aJXhybrtnd+/e\njV6vJzk5mWvXrqHVann00UexWCxcuXIFuVyOWq1GIpEgl8u5fv06zs7ObNy4kfn5eUJCQoiIiECn\n0zE3N4dEImF0dBS9Xi987pKSEurq6n5QpfCTCArvvPPOCxkZGczNzQkvytWrV2ltbeXixYvMzMxQ\nXl7O6OgodXV1aDQa0tLSSExMRKlUEhoaitFopLe3l+joaGpraykuLmbjxo1UVVVhsVgEO/BtB9x9\n992HWCymvr6ebdu2IRaL+cc//kFNTY1gYR4ZGRFMO46OjnR2dlJSUsL+/fvp6OgQ3HrBwcGo1Wqk\nUil//vOfCQ8Pp6OjA4D+/n7m5uZwcXFBIpHQ1dWFj48PNTU1qFQq+vr60Gq1lJSUkJ2dzdDQEI6O\njkilUsrLywUpsbCwkFOnTtHW1oZEIiEjI4OGhga6u7uRy+WMjo4il8vp7+8X/AMNDQ14enoSHx/P\ntWvXUKlU1NTU4OLiwpUrV/jtb39LaGgoKpWKyspKjhw5Irg5y8vLuXTpEgkJCdTX1wut3Zo1azAY\nDAwPDwv3jY6Opri4mLa2NqKioti0aROzs7MUFxczPT3NokWLCAsLo6+vj5iYGDo6Ojh69Kggm7m5\nuWG1WinpRJcDAAAgAElEQVQvL2fbtm188803XLlyhfDwcJycnEhNTRUIZbVaLfTdtwnS6OhonJ2d\nmZiYQKVSYbFYhPassLAQrVbLs88+i1qtxsPDA1dXV9zc3Pj888+ZmpoS1JGAgADWrVuHTCYjKSmJ\nBQsWMDY2Jljke3t7iYqKIjs7m5s3b/Ltt99y//33c+nSJXQ6HStWrKCvrw+dTodcLsfT05Py8nJi\nY2Mxm81s2bKFs2fPsmbNGgICAlCpVIIVXavVotFoMJvNFBUVERQUhIuLCw4ODshkMkQiESEhIYyM\njGC1WomIiGBmZoawsDCOHz+OzWYjPDycvXv3snjxYuH9vO3lACgrK/v/Tvug1+vx9vYmOTmZwMBA\nEhISWLt2LWNjY8jlcr799lvKy8sJDAwkPj4eu91OaGgos7OzODo68vbbbzM5OcmePXtwcHAQzFCN\njY1s376dZcuWIZVKCQ0NFfpcmUwmRNy2tjY+/fRT1Go1AwMDeHt7k5KSgp+fH0qlkqysLJqbm7k9\nUVpfX09PTw+jo6OsWbOGtLQ0nJ2dSU5ORiqVMjQ0RGBgIPX19fzxj39kampKIJRWrVpFTk4OaWlp\n6HQ6AObm5oBbJFtycjKdnZ2Cf769vZ0vv/ySwMBAoqOj8fb2Zs+ePXR1dTE8PExsbCxubm60tbXx\n7bff0tvby9jYGL/61a+IiIhAr9dTW1tLREQEY2NjzM7O0tnZic1mo6amhuHhYY4ePcro6CiLFi3C\nYrEQHByMg4ODwHjr9XrBm3Hbg2+xWHBxcSEkJAS73Y6/v79AfJ08eVIIhlqtlrfeeouamhri4+OJ\njY3lypUrWK1Wwbnp4+PDhg0bgFtu1OjoaJ5++mlCQ0Opr68nPz+fqKgoiouLuXz5MjqdjoiICEQi\nETk5ORQUFFBUVMSOHTuQyWTcf//9ODk5UVpaysaNGwG4ceOG4PxcuHAhzs7OZGZmEhMTQ3JyMjk5\nOfj7++Pt7c2VK1c4ceKEQDY2NzfT3NyMRqNBJpPx8ccfc+XKFeBWi3nb+nz27FnBBn57nmLx4sUU\nFBRw6tQpjh8/ztzcHBcvXqSrqwuFQiGoC3a7nYsXL9Le3g7cklBvz3bc5mKUSiUA3333HefOnUOp\nVHLkyBHi4uJwdHSktrYWuVzOZ599xqpVqzh//jxdXV3odDoGBwd/8Hn8SQQFd3d3rl+/TkNDA9eu\nXaOiooJly5axZMkSHnzwQRwdHVm6dClXr16lsrKS8PBwqqqqmJqa4uDBg+zdu5fExETm5+eZmJgQ\ntGmbzYbRaKS4uFgo7W8bTWw2G35+fphMJrRaLXa7XZDo3njjDaqrq6msrKS8vJympiZiYmIwmUwA\nAg+xadMmCgsL6enpoaysDLPZzK5du1i4cCFvv/02c3NzBAcHY7FY8PDwwMvLi4aGBo4fP055eTkK\nhUJwa8Itv8batWvx9/enoqKCpKQkAgICCA4OxsfHB7VajZ+fHzKZjKamJqqrq4mLi2NkZASVSsXG\njRuZnZ3FYDBw9OhRNmzYgMlkErLG5OQkU1NTVFVVkZqayquvvsrly5fRaDQ8+eSTqFQqent7CQwM\nJCoqiri4OCQSCTabjd7eXkQikfDiBQcH09PTw/T0NNPT04yNjeHp6YlWq6W8vJy+vj7y8vIICQnB\nxcWFuro64uLimJubY3BwkB07dlBZWSkMpN1m8w8dOkRiYiIWi4WmpiZSUlLo6Ojgb3/7GzExMYSH\nh5ORkcHExAQGg4Hf/va3+Pn5Cfbn8fFxFi9eTGZmJlFRUYJ0uG7dOtauXUthYSFnz54lMTGR1tZW\ndDodUVFReHh4YLVa6ezsZGJiAgcHBy5duoSPjw92ux2z2UxnZyeHDh2ioaGBTZs2Ce+vv78/drsd\nFxcXgoKCiIyMFDiEtrY2zGYzycnJQtKAWxZ8kUiE3W6ntLQUtVpNQEAAjz76KAAPP/ywIO3+53/+\nJxKJhJaWFkJDQzlw4AAGgwGtVktERISgsNxWMxISEigrK+PAgQNER0fj5OREUlLSDz6P/2sDUf8V\nNpuNxYsXMzc3h1wux2azUVhYSEJCAp999hmbNm3i6tWr7Nq1i6eeeoqhoSE++ugjxsbGhEw5OzuL\nUqlkfHycjIwM6urquOeeezh27Bi5ubnU1dUxMTGB1WplZmaGN954A4lEwoYNG1Cr1SxbtgwfHx+B\njMvMzGTXrl0MDAygVqv57LPPhNmHkJAQ+vr6SE5OFibm1q1bx+zsrKC5d3R0sGLFCoE7uHLlCgMD\nA6SlpWEwGLhx4wZWq5UzZ84QHBwM3DINTU5O4uLiwsTEBCaTicbGRmGCUK1Wo1AoOH36NH/961+5\ndu2aMFPw+OOP09fXR0ZGBg4ODixdupTx8XGWLFlCU1MTN2/eZPfu3URFRVFRUcHc3JwwrxAREcG5\nc+fIysrCycmJnp4eoqKigFvVVkREBN3d3UIPPD4+TnJyMrW1tWg0GoKDg/nwww/p6uriN7/5DRaL\nhTfffJPLly8TFhbGCy+8wL59+xgYGEAmk+Ht7Y3RaEStVhMXF8fx48e5efMmcOswODs709fXh4eH\nB4WFhcTFxZGenk5jYyMmk4mnn36aDz74gKioKOF5jY+P4+TkxNatWzl//jwtLS2sWLFCmCUoKirC\n19cXg8FAREQEf/zjH8nLy0OlUjEyMkJeXh7vvfceFy5c4JlnnuHQoUNMTU2RnJzMyMgINpuNxMRE\nGhsbycnJ4fnnnwegpqaGgYEB/P39USqViEQiioqKeOmllwgKCqKpqQmDwUBmZiaffPIJDz/8MHa7\nndbWVs6dOyfMk7i4uLBr1y4+/vhjAP793/+dvLw8PDw8SEhI4OOPP2ZoaEiQGdetW4e7uzslJSVM\nT0/j5eXFzZs36evrY3x8nMTERGJjY3n33XeJjIykp6fnB5/Hn0SlMDMzg1KpZNmyZRiNRiYnJ5mY\nmCAoKEjIDnv37mV0dJSLFy9SWlpKbm4uRqOR+fl5pqenOX/+PDMzM4SHhwsOwZdeeonMzEw+//xz\nrFYrQ0NDAsPs5eUlDL10d3dTVlbGxYsX6e/vRywWU1xczCeffEJnZ6dgHZ6cnARulYx+fn68+uqr\nArvc0dGB1WrFZDLh4eHBPffcg5ubG01NTQwNDZGcnCxYgGNiYggMDCQkJASZTEZpaSkAW7ZsYXBw\nEJVKhYeHB9XV1UxPT2MymTh37hzt7e2C/JeYmEhbWxs2m426ujr0er0wb+Dq6spXX31FfHw8DQ0N\nhISECH+fwWBg4cKFdHV14eDgwNDQkMAFnDt3jkWLFqHVanFwcBCUhLa2NhwdHWloaBAkTkdHR3x8\nfKisrGRycpKXX36ZnTt3EhUVhUql4tlnn6W3txepVMoHH3yAh4cHUqmUq1evYrfbOXLkCMnJyZw+\nfZrp6Wnuuusu4FYQeuKJJzAajczOzrJhwwbef/99zGazoD6UlpayYsUKvLy8yM3NZc+ePTz00EPs\n379fmAj19PQUphzhlu4/MDCAXC6nvb2dK1euUFhYyDPPPMOFCxd47LHHGBsb44EHHuD48eMEBwcL\nY/rp6emcOHGCd999l8nJSfLz84WMLxaL2bVrF2azmZUrVyKVSjEYDHz++efMzc2hUqnIzc0lLCyM\nq1evMjQ0hFgsRq1Ws379elJSUlizZg19fX1UVFSQk5MDQF5eHgqFgrGxMfr7+1m0aBHZ2dmChBwf\nH49OpxPmgNrb2wV1o6ysjO7ubr777juSk5OZmpoiLi7uB5/Hn0RQ8PDwIDQ0lPLycrZs2UJ6ejrO\nzs7I5XICAgLQaDR4eHgQFBSE1WrFarXyxRdf0NTURG5uLvv27SMuLo6ysjKKiooES6ePjw+lpaWs\nX79eGCsNCAjg6NGj9PT0IJPJ2LNnD2azmbq6OoaGhnB1dcXLy4tly5aRl5cnBIiSkhLBWm21WnFy\ncuKuu+4iMDCQhQsXYjQauXnzJpGRkSxdupRDhw6h0+nIysoiKCiIpUuXsmDBAg4dOsTx48cZGxvj\n5MmTmEwm7r//fuCWlNjS0iIQSjabDTc3N4KCgli0aBHj4+PY7XYyMjIICAigtbUVm83GzMwMv/jF\nL1i+fDlubm6oVCpWr15NbW2t4FRcvny5wGB/9NFHgkpym0Cbnp7Gw8MDg8HA2NgYBQUFwpTq999/\nT0BAAEqlUpBJb09ebtq0CalUip+fHxqNRrBD9/X1kZKSQnNzM8XFxeh0OkJDQ/H39yckJISYmBiO\nHz8ulPm3yUMnJyd27NhBb28vSqUSlUpFYmIigYGB1NbWMjQ0xLPPPsuFCxcE6fH28NZzzz2Hi4uL\nkHm3bNlCWFgYAN3d3bS2tuLv7092djZPP/00Wq2W2NhYbDYb69evJzk5GW9vb65du4aTkxM2m43m\n5mb+/ve/s3v3btLT09m8eTMSiYRXX30VgKSkJKFV6O7uRqfT8eSTT5KWlkZdXR2+vr6YzWY8PDx4\n/PHHqaqq4uzZs8KeidHRUXp6enjggQeYmJgQBriqqqqYmZnBycmJyclJEhISUKlUxMXFUVtbS3Nz\nMy0tLWg0GmGmpqWlhaGhIbZs2SK0aUNDQ/T09Ag82w/BT6J9GBgYYHx8nMnJSex2u7Br4HapFxUV\nRVVVFeXl5Tg6OqLX63F2dmbbtm2UlJTw8ccfo1KpmJ2dJS0tjYaGW1vfduzYQUVFhbC0RK1W4+jo\nyJtvvklpaSmenp5oNBqMRiO/+MUvUKlUFBYWUl1dTVlZGdnZ2XR0dNDW1sbDDz8sTEl6e3tTUlLC\n8uXLOX36NGlpaWRnZ/Pll1/yxhtvUFBQwN69e7l69SpGo5EnnniCv//975hMJnp6eli7dq0gYTY2\nNgoavU6no66ujujoaNLS0oSBqWeeeYbx8XEhuNXW1rJ7925sNhunTp1Cp9Ph5OREeno6NTU1jIyM\nEBcXx+XLl9m9ezfvvPMOrq6uKBQK3nnnHWZnZ9m8eTPt7e1cu3aN9evX4+PjQ3l5OUNDQ6xevRq5\nXE5hYSEpKSmUlpYSFBREW1sbFouFuro6vL29+e6774iJicHV1RVXV1eCg4MZGRmhu7ubnTt30tXV\nxffff09tbS27du3i+PHjDAwMoNFohN0MBoOBhIQEFAoFcGv2Qa1WMzU1hUaj4ciRI2RkZKDRaLDb\n7Vy/fp333nsPqVQqOPlGRkbw8PAgMjKSsLAwamtrMZlMZGVl8dBDDwEgkUjw8fGhrq6OkydPEhkZ\nyeLFi1EqlUxMTPDCCy8QFhbGpk2beOqpp9DpdMzOzrJp0yb6+voYGBigtLQUX19fJBKJMGh1mw+J\niIjg+vXr+Pv7U15ejre3NydOnCAzM1NYhBIfH8+OHTs4deoUQ0NDFBYW4uvrK7S1CxYsQKvVAgjz\nMyEhIYK9esuWLYyMjDA0NCQQ3ykpKeTk5KDX6/H39xf+jUwmY2RkBIVCQUJCAjKZ7Aefx59EpeDn\n58dXX32Fq6sr169fZ+3atezZswe1Wk13dzdfffUVg4ODODo6Cm3A+vXrmZubQywWs2LFCqRSKb6+\nvjg5OVFff2sg88svv6Sjo4O5uTlGR0epra1Fp9Px3XffMTU1xcjICNeuXaOyspJPP/2UgoIC5ubm\n6O/vZ/v27QwPDyMSiZibm2NycpKQkBDgVtZ58sknqaioID4+nrNnz/L555/j4uLCm2++yaJFi+jq\n6sLV1ZXh4WE++OADli5dil6v57777kMul/Pyyy8zMjKCXq8nIyMDgNnZWQICAoBb1ZNMJuOxxx5j\ncHCQyclJQepKTk4WBqnUajUpKSlYrVbGx8fp6OgQVJOVK1dy4MABdDodU1NTyGQy2traWLBgAS+9\n9BLPPfcceXl5BAcH093dTUJCgtAiVFRUcOPGDfr6+li4cCFXrlyhrq4Ok8mERqOhsLCQlStXEhQU\nhFgsxmAw0N7eTnt7O4mJidTV1QmLT4KDgwkODsZsNhMdHU1DQwNzc3P4+/uzZs0aFAoFzc3NwC0L\neWBgICKRiNnZWXbt2oVMJmNwcJCZmRm2bt3K2NgY5eXltLe3Mzw8zLVr14iMjCQkJITLly8zMDDA\n1NQU6enpqNW3Fg35+vpy/vx53NzciI+PZ35+nkceeQSDwYBer2fDhg2cO3cOi8WCSqUiKioKd3d3\nIaBeuXKFvLw8CgsLBSIVICEhgcbGRtRqtTCBe/tvfuKJJxgaGhLkycbGRn71q1+h1Wqprq5mcHBQ\nUC/Gx8e5ePGiMELu7u6OXC4XAt38/Lzg1ry9Iex2pWGz2YSWMykpiZGRESHh3X73r1+//oPP408i\nKOh0OgYGBhgZGcFut/Piiy/S2NgoZJHbM/lms5nQ0FBhrdqNGzdwd3fHYrGwYsUKwa9/e0Zhy5Yt\n/OY3v2FqaoqCggKSk5OZmZkhPj5e4Ct6e3uJjIxk+/btggS4adMmQcJycHDAbDbj5eXFwMAAcIsY\nfe6555idnQUgJiYGmUzG8PAwZ8+e5dKlS8hkMiYnJ4Whlenpaebm5rh+/Tp6vR6NRsNrr73G9PQ0\nTU1NAMLKMplMhslk4r333uPcuXMMDw/j6elJfn6+UNYrlUry8/Pp7u5menpaWCW2cOFCxGIx7e3t\nmEwmMjMz+fnPf46Pj49gq72tDNTW1lJRUUFfXx91dXVotVpSUlJITU0lKCiI7du3ExoayvXr15HL\n5dx11134+/sLOyz6+vpQKBQEBATQ0NBAUFAQDzzwACaTidzcXPR6PX19fczNzXHkyBHBXOPs7IxO\np0Ov1/P1119jMBhITEwEYOPGjUxNTSGRSJDJZHz++efExsYKTsWLFy9itVqZnZ1FJBLh5uZGcnIy\nvb29uLu7Y7PZSE5OJi0tjRdffFFwi2o0Gh577DFB1UpJSaGiooIPP/yQq1evEhgYKJCut5f+bN26\nlQcffBAAlUpFWVkZ7u7uHDhwQJD43Nzc2LFjB6WlpXh5eSGRSOjt7SUpKQlPT08efPBBamtrCQoK\noqqqisTERKampggJCSEsLAw/Pz8uXLhATk4O6enpAjF6ewXe66+/zltvvSWQxREREUJr/dZbb2Gz\n2Whvb+fo0aN0dXUJnoazZ88yMzNDTk4OpaWlgpz5Q/CTCApSqZTo6Gjsdjt9fX0EBQVRWVmJUqnk\nnXfeQaVSERISQlJSEiUlJcTGxmI0Glm0aBHt7e0sWbIEb29vYafg7Vn3M2fO0NDQQFFRERs2bKCg\noIDw8HAMBgPLly8XyMFNmzZRWlpKWVkZOTk5DA8Po9Vq8fHxISAgQGgTbmediYkJ4cvu7u5GJpMJ\npheNRsMXX3yBRCIhMjKS+Ph4du7cSVtbG7GxsWzZsoXh4WGysrKIioqipaUFb29v4BYHctsbYDab\nWbx4MWNjY4SHhws7FiorK7lw4QKdnZ0EBAQQExODXq8XBq0KCgqora2lqamJmZkZRkdHeeWVV9Dp\ndISFhREWFkZDQ4MwtbhgwQIAQkND8fX1ZeHChdy8eVNYQ2c2m1m4cCEDAwPo9Xo8PT0FR15WVpZA\ndlksFsrKyhgdHWV2dpb3339f2D9oNptZunQpIpGI7u5uoZ1bsmQJrq6uqFQqYdz90KFDVFRU4Onp\nicViYf369dTW1nLx4kXuuusu5ufnBbb/9nPx8vKitbUVi8VCdHQ0RqMRgI8++kiYQL169SrFxcV8\n+eWXdHZ28sEHH3Dy5Em2bduGSqWiuLiYzZs3YzAYKCoqorKykpGREWFEe9euXRgMBmJjY5mcnBT2\nHoyNjTE/P09NTQ3Ozs40NzejUCioq6vj/PnznDlzhvn5eVxcXAS5cdu2bYLJ6bXXXkMkEvHee++R\nn5+Pp6cnAD09PUxMTJCVlcW2bdvw9/fn5MmTlJaWcuzYMSYnJ3nxxRdxdXXl7NmzAnfi6uoqLCu6\n7RJOSEgQpkV/CH6UFe//byESif73P8Qd3MH//1Fpt9vT/6eLfhJEo6+vL4cPH6a4uFjYBHy7lM/N\nzRV21d28eZM//OEPqFQqPvroI2FOob+/X1hIcbvN+PDDDzl48CAvv/wyn3zyCY6OjlRXV9PQ0MDw\n8DASiYS0tDSUSiXx8fG8//77JCQkCLrwzMwMjY2NwibmpKQkXnnlFfLz8ykpKeHVV19lbm6OPXv2\nUFBQQFJSEh4eHv9kN33qqacYHBwkOTlZ2I1gMpnYsGED169fx93dHZVKxfXr13n//fd5++23kUql\njI+P09zczIYNG+ju7kYqlVJSUkJYWBhBQUFcvXqVlJQU4Famqq6uZvPmzQwODrJ8+XJOnTolyFRr\n1qwhNzeXy5cvMz09LSzkePjhh3n11VeprKwUeIgVK1bQ2NjIwMAAjzzyCO7u7lRUVFBTU0NUVBQv\nvfQS7/5f7L13eJVluvb9W+ltpa/03lZ6ISGNhCQk9CYBKSJNYHBQZ+wyDrMHHVFUGHUQQRQUkV6U\nEiRAGimQkA6k994T0vt6/2DW/e45vvd4t9+33+873Me3739CErISwvPcz3Vf13n+zq++orOzk9zc\nXGQyGdHR0RQUFGBnZ0dVVZUQi+nr6xMcHMzIyAizZ88mNzeX5uZmFAoFFhYWBAQEcOPGDTEW9vT0\n5KWXXuKVV17BxsaGyspKCgsLCQ8PR1VVlXfeeYfXX3+d/v5+cnNz2bZtm6iyli9fjrGxMU+ePBFi\nILlcLo4a3377LW+//bZArWVkZLBjxw5+/vlnQkJCyMnJITk5me3bt+Po6EheXp6ggEdFRZGVlQU8\nHUXPnDlTiJSsra1JSkqioKCAmzdvIpPJRJNWaVL78MMPCQgIID8/n+XLl9PT00NycjIffvgh2dnZ\nAqI7Pj5Oenq6EJX9+OOPDA0N8dlnn7F3714qKirQ19dHR0eHyspKvLy8+Pnnn5k3bx4PHjyguLiY\nLVu2IJfL2bdvHx9++CGJiYn09vYKncnevXt/1f34m/A+7N69e09nZ6cogSYmJhgcHGTt2rXY2Ngg\nlUqxsrJi165dVFRUEBgYiLq6OgEBARw5cgRzc3MGBgZwdXVl1apVSCQSLl68yBtvvIGZmRllZWWM\njo7S2NiIk5MTsbGx6Orq0tfXx+TkJI8ePaKwsJDo6GhsbGxISkrC0NCQhw8fIpFIkMlkwtabmJiI\nlZUVqqqqLF68mObmZhwcHHB3dxfe+oaGBkJCQgQ2va+vD5lMJjDc0dHRZGRkCApRaGgoZ86cITQ0\nlKysLBwdHdm+fTs3b94U4ihl76ClpYV79+4REhLC+fPnGRkZwdPTk6ioKBwdHQVbwd/fn/7+fuLj\n4zl48KAgLFVUVNDe3s7cuXMpLCxELpfT1NREeHi4ODIp+QpK7HljYyOqqqr4+PigUCi4efOmYE2e\nPHkSKysrMjIy2LhxI5mZmYSFhQFPey/e3t5cuHCB6elp1q5dKwxULS0t6OnpCQZAT08PDx484K23\n3mJgYIBZs2bh4eHB0aNH+fjjj1m0aBFff/21eP2JiQmWLFlCSUkJFRUV6Ojo0NbWRl9fH42Njejq\n6jI4OIi+vj7p6els2bKFxYsX8/rrr7Nx40bq6+vF8c3e3p6RkRFsbW2FZT07O5uSkhJRwhcWFjI5\nOcnExASXL18GnqoSR0ZGSE9P59VXX8XY2JgrV67Q29uLp6cne/fuJSoqipqaGoKCgli0aBGFhYX8\n/ve/59q1awwNDREVFcWFCxcwNDREV1eX4OBgLl++zPz58ykpKWHTpk0iwqC9vZ3c3FzmzZtHf38/\nurq6fP7556xatYrIyEhSU1MJDg4mLi6Ovr4+iouLhTmruLiYoqKi/zreByWRdnp6GrlcjomJCQsX\nLmRqaor79+8jlUpxdXXlzJkzGBkZUV5ejpGRERKJhDVr1uDj44OzszM6OjqoqKiQmZkJPLVkNzc3\n8+TJE9EMHBoa4saNGxgZGQnqz5MnT4iIiBDnYQ0NDc6ePYuvry9lZWUUFRUxOjoqvArPPvss4eHh\nTE9PExwcLJ72CQkJ2NjYCJVdf38/M2bMQCqVCgSZEsjh4eHBzZs3cXBwEM0wDw8PFi9eTEFBAQcP\nHkRLS4s5c+agq6uLq6srvr6+WFlZ8eGHH1JQUEB0dDTT09PU1dWRm5vLpUuXcHNzo7a2luTkZObP\nn8/PP//MzJkzOX78uKA5KVmEhoaGJCYmIpVKMTIyoq2tjbS0NKqqqjA3N2dqakr8nqKiovD39+fC\nhQt4eXkRFhZGXl4e+vr6tLe309XVRVtbmzBhjY+PK7MGmDNnDjt27ODq1aviBmxqasLLy0uAWpSN\nMCMjI1xcXMjLy+PIkSO8//77VFZW8qc//YkvvvgCFxcX4uPj6ezsJDs7m7i4ONatW4eVlRX6+vo8\nePAAZ2dnioqKyM3NFdkPWVlZHDp0iLVr19Lf38/U1BQSiYTY2Fj6+/vZuXOn6E/IZDKee+45HBwc\nWLRoEbm5uURGRhIUFIS3t7egTsFT8tKxY8e4cOGC4GvIZDIRR6DMl/Dz82Pfvn3o6+vT2tqKh4eH\nqHwOHDggwDbffPMN8NRGL5FI0NPTIysri9HRUSoqKnjy5Am3b9+mu7sbOzs7nn32WQoLC2lsbMTC\nwoJjx45RUlJCaWmpyOHIzs4mPj7+V9+Pv4lK4ZtvvtkzNDSEo6Mj/v7+uLq68uDBAxISEli8eDGJ\niYnExcWJC/Dx48fiRlOiyxsbGwUcxNHRkXPnzolxmbOzM6dOneK5557j1q1buLm5oVAoiI+PF2PP\nqakpYU+ur68XN4qqqiqLFi1CXV0dQ0NDAeBQKBSkpaUhk8l48OABa9euFaYUiUQiPAeHDh3CwcGB\nGTNmcPv2baGlV7r/4KlN+eTJk3R1dQl/vp2dHaampjQ1NYmAmaqqKoKDg8nLy2PdunWkpqYil8tx\nd3fH0NAQGxsbmpubiYuLY2xsjK+++orw8HB6enqEq3RycpKEhATmzp3L8PAw8+bNA57SjsfHx0Vn\nvCtlSLIAACAASURBVLe3V/y7IyMjaWhoED+/uro6vb29GBsb09XVhaqqKmVlZYyMjKCiokJZWRnO\nzs40NTURGBjIkSNHBIxFJpNhbm7O5OQkFy9exMvLSwBc0tPT2b59O48ePeLVV19FLpdjZGREQUEB\nTk5ONDY20t3dTWlpKTdv3uQPf/gDBgYGlJSUcPfuXaqrq9m2bRtnz55l9uzZ7Nixg76+PhISEli9\nejUqKioCZaYM3uns7KSlpYX8/Hzmzp2LmpoaaWlpAvXW19fHrVu3cHZ2RiqVkpqaKtyz165dIyAg\nQAT5mJqaClCqoaGhAMgYGRkhl8s5fvw47u7uZGVlIZPJOHbsGBMTE5w5c4aWlhYBLE5LS8PExAQL\nCwva29txcHAQCtD58+eTnJyMh4eHuIZOnz7N9PQ0b775JllZWdjZ2VFaWoqWlhYVFRUsWbKElJQU\nHjx48F+nUpieniYmJobAwECuX7/OgQMHmDNnDnPmzCEhIQF4Cp1QVVXl559/Zvny5aipqVFSUoKq\nqiptbW3cvn2b3NxcamtrxZNXJpOhr6/PyMgI1tbWFBcXs2DBAiIiIgRJWFnaKhQKdu/eLSYY1tbW\npKWlMXfuXAYHB/9FEdbW1kZzczNyuZyamhoCAgKora0lOzsbGxsbtm3bxrp16wgJCeG9995j6dKl\nZGZmEh8fj5+fn8DE9fb2YmpqKlRstra24ogQERGBqakpFhYWIkHKxsaG7u5uVFRUeP3119mxYwf1\n9fW4urrS19eHkZERMTExXLlyBTU1NZYsWUJdXR3Nzc3cvXuXu3fvUlBQwOuvv46lpSWRkZGUl5cz\nMjLCypUr+fjjj/nkk08YHh5mcHCQF154gY6ODsbGxqivryc+Ph57e3uCgoLw8fFhyZIlqKqqsmLF\nCo4ePYpCoWDJkiXExMRQUVHBa6+9JijPVlZW3Llzh87OTg4ePMjQ0BCrV68mKyuLEydOCJekkZER\nlZWV/Pjjj2RlZZGUlER9fT15eXmYmZmJTWXDhg1kZmZy4cIFJBIJP/74I0uXLiU9PZ2goCDmzJnD\niRMnqKurA2Dnzp3o6uqKCu/8+fNkZ2fT3d1NVFQUb731lkC7BwQEUF1djVwuZ9myZTQ2NnL06FHK\ny8tZv349QUFBjI+PA0+ru56eHu7evYu1tTVlZWVERESQlZWFiYkJTk5Ogqu5c+dOAgMDBbl8586d\n9Pf387vf/Y7w8HB8fX0FICg+Pl74OVpaWnjhhRfIyckRSk9tbW3q6uowMzPjwIEDzJ07V+gzfvzx\nR2bOnImDgwN+fn7U1dWxZcuWX30//iYqhaNHj+5xdXVlbGyMmTNniuAPiUTC1NQUXl5ePPPMMxgb\nG6NQKCgvL0dHRwcrKyvhm3/vvfe4c+cO/v7+REdHc/DgQaysrHj06BFhYWH09/fT3NwsIJYuLi6U\nlJTg4uIiYshcXV2Ftryjo4M5c+ago6NDUlISYWFh1NfXc/v2bWbNmkVXVxehoaECGaahoYGzszNp\naWnExsaSk5PDxMQErq6uFBcX09DQwODgIG5uboyNjaGurk5eXp54/UePHhESEiKCVyYmJnB2dkZb\nW5srV66grq6Ol5cXP/30E0FBQSxYsAC5XE59fT0ZGRlERUWxbNkytLW1uXTpEn19fQwPDwswi5+f\nH0FBQQL2cfXqVZYsWcKjR48wNzfn+eefR0NDQ2QqzJw5k97eXrq7u2lvb8fAwAAdHR3Mzc2xtLSk\ns7MTQ0NDNDQ0GBwcRFdXl8jISO7du4e6ujoDAwOkpqbS2trK4cOHheNyfHwcR0dHkpKSUCgUbN26\nlfDwcB4+fMiNGzewtbVl4cKFVFdXM2PGDFRVVYWDcHBwkICAAHx9fbGwsMDKygpXV1du3LjBrl27\nKC0tJSIiAn19fS5evIiuri4ODg5cvXqVVatWidLeyMgIS0tLhoaGaGxsxNTUVOgOlBF70dHRojeh\n3CS3b99OSUkJUqkUa2trjh8/jlwuR1VVVbgoo6Oj0dDQIDw8nLNnz1JZWYmjoyN+fn5MT0/j7u6O\nubk5OTk5VFVViYZmYWEhmpqawnFrZGTEO++8Q1NTE1NTU3R1deHr6yv6VEoTlJJ1WV1dTW9vL/n5\n+djb2wvcnI6ODjo6Opw4cYKKior/OpVCZ2cnnp6ejIyM8O2334oSVyaT0d3djaOjI6tWrWLnzp08\nefKE1atXCxvpBx98QEZGBmfOnBGimNTUVOCpQnDhwoVkZGTg5+cnFGQPHz7k1q1bqKurM2vWLEpL\nSzExMWFqaoq2tjaMjY1FAEhubi4qKiqsWLFCOPmkUim2trZYWlqKcr+1tVXYnfPz85k3bx5FRUUi\nzWhsbIwbN25w5MgRenp6uHXrFra2ttTW1uLu7g48VbFZWFiIEjw7O5vc3FycnJwYGBigqqoKS0tL\nmpqasLS05Ny5c3h5eTFr1ixBqU5PT8fd3V1cbFu2bKGoqIj6+nouXbrEsWPH+PbbbyksLOTMmTOk\np6eL0nxsbIzW1lby8vLIyckhMjJSOEF1dXVpb28XvZMnT54IQVZxcTEJCQk8fvyY/Px8Dh06RH9/\nP66urrz00kts27aNhoYGYc5Rcga0tbWpqamhq6uLxsan0aMeHh5YWVmJSLxffvkFb29vwsLC8Pf3\nx9DQkOvXrxMcHMzk5CS9vb3ExMQwMDCAi4sLhoaGfPnll3zwwQcMDAwIbcnIyAgWFhaUlpayZ88e\nenp6uHjxIg4ODvj7++Pp6SloRWVlZUilUnR0dDA1NWXBggVs375d2LAvXbok5v6bN2+mt7eXiYkJ\nAgIC6O7uxsnJCX9/f86dOyfcpps2beLhw4cMDw+jqamJn58fs2fPJi4uToBmXFxchMZCJpNx5coV\nYmNjcXJywsXFhdzcXKanp8nIyMDJyYnW1lbhuWhra2N6eprnnnsOX19fdHV1hVmwpaVFIPR/zfpN\nbAoGBgaUl5fj7u5OSEgImZmZdHd3CyfewYMHRbnb0dHB+fPn0dXVRVVVlbVr1wov+8aNG7G0tGRw\ncBB4eoHp6uri6OhITU0Nc+fOFTp/Z2dnli1bhkKhYM2aNSJvQMlaePnllykoKODRo0eMj49z/Phx\ngSpTNuEyMjKEQEQqlaKlpUVJSQkpKSnMmzePF154ARcXF5Gt+MEHHzA9Pc3ExARBQUGCR6A81yuf\nAM3NzVy5coXW1laSk5MZGhpi27ZtmJmZERYWhrm5OaWlpdjY2CCTycTE5K233qK0tFQ0zBwdHZmc\nnKS0tFSg85UY8qSkJJFLOTY2xvDwMBYWFsyePRtHR0fRz+jv76ewsFAAWpTZGWVlZeTk5ODg4ICB\ngQEWFhYkJycTGhqKhYUFzz//PJs2bRIK0nfffZdly5YRFBTE6OgodXV1rF+/XoSeKBuNyuOPhYUF\no6OjQlS1c+dOJiYmhMT422+/xcXFRWzsNTU1+Pj4MDw8zN///neuXbvGxo0bOXz4MIAQfnl6egqG\nxQ8//MAzzzzD+Pg433zzDWlpabi5ufHOO+9QWVlJQUEBlpaWhIWF0d7eLuArs2fPFlbkAwcOiHg9\nJU2ps7OTBw8e8M0337B48WLs7OzYvXs3gYGBZGdnk5CQwKxZs5iamuLu3btkZGSQnJxMWlqaSDfb\nuXOnuBeUQBtlpKGxsTFjY2P09vbS39/PzZs3MTIywszMjKamJubNm0dnZ6eYxCkp4r92/SY2BaUT\nLCAgAD09PUJDQ/Hy8uLChQv4+fkxMDDAoUOHGBoawtLSkueff562tjbOnTuHj48P69atEyMoJXYM\nnsqPz58/T35+Pt3d3Xh4eFBdXY2Ojg7R0dGcO3eOvr4+ETgyf/586urqsLOzY2RkRBBvpqenuXr1\nqjDtJCUliWnA8PAw6urqpKWlCQOXsoxVNqSGhoZYsWIFP/zwAxMTE+jq6tLU1IRCofi/9ECqq6sp\nLS1l7dq1BAYGEh4ejlwup7S0lKamJnEDKUeTyqpDR0cHb29vFAoFUqmU1tZWOjs7+cc//sHbb7+N\nmpoaX3/9NVKplMnJSZYvX46KigoKhYKCggLRpNXQ0GDz5s3ExsbS1dVFcHAwtrZPY0F9fX2ZmppC\nJpNhYmKCmpoaubm5DAwMYGpqKjwUQUFBGBoaoqWlhb6+PjKZjMnJSS5cuIC6ujp+fn7k5ORw9+5d\nenp6+Omnn8QT9euvv0ZDQwN3d3f8/f0FSeuVV14RVGRlMtSNGzcwMzPj/Pnz6OnpiSOAm5sbMpmM\nL7/8UsB2Hzx4wHvvvYdMJqO9vZ3MzExOnjzJ+Pg4N27cwN7eHkNDQ7y9vcnOzkZbWxuZTEZwcDA/\n/fQTDQ0NmJiYEBYWhoaGhtCdrF+/XhwJs7KyhE5BU1NTRO6FhoZSWVnJw4cPMTIyIioqCnNzc5qa\nmoQj97XXXmPDhg2cOXMGeCr9P3XqFENDQ7S1tYkMErlcLiooU1NT7O3tefvtt7G2tkZFRYVVq1aR\nmprKxx9/TFJSEvA0jlBp5vs16zfRU/j000/3mJubi1msp6cnJiYmGBsbCwGNvr6+sOe2t7dTUVEh\nJgYRERHk5+czOjpKREQEQUFB/P3vf8fDwwNDQ0NaWlrYuXMnw8PDdHR0oKenJ0rq4eFhYXaKiIjA\n3d2d/fv3Ex0dTWZmJs3NzYSFhbFkyRLu3r1LcXEx3377Lampqdy5c0cANDIzM7Gzs8PCwkIYYMzN\nzWltbaWnp0fE4E1MTNDb2yv8FE5OTvT29nL79m02bNjA7du3iY6Opq+vj+rqaqRSKYWFhaiqqgq0\nl56eHiUlJXR0dJCQkICbmxs2NjY4Ojri7u4uRrtyuRxbW1u0tLTo7OwkICAAfX19GhsbUVFR4c03\n3xScRScnJ86dO8d7770nztgaGhqCJenk5ISJiQmqqqoCH690rN6/f19wGPT09Fi+fDkTExPiOJOd\nnc3ExAQZGRlcu3YNIyMj4uLi0NbWZnh4mPr6empqasjJyeHll18mICCAlJQUzp49y9KlS9HQ0KCw\nsFCkKVtYWIgqrqurS8TBP3z4kDlz5pCSkoKmpiZNTU2oqKjwyy+/4OjoSFlZGU5OTiQnJwtugzKS\nrb6+ntmzZwvTWFpaGgYGBmhpafHw4UM2b97MoUOHWLNmDTdu3EBHR4eUlBTMzMxQVVVFKpVy48YN\nysvLWbZsmbAz9/b2kp2dTXJyMoaGhmIyoewv/fDDD+jr6zM4OEhDQwOqqqo8evSIxYsX4+XlRXd3\nt6gM5syZw7Vr12hqahL9HWUq++DgIL29vZw7dw6FQoGnpyezZs0SiecODg5cv379v05PYWxsDAMD\nA/Lz85FIJOzZs0fw7P7t3/6NpqYment7mTFjBjU1NXz66acoFAqio6MZHByksrKSuro6enp6yM7O\nFknDZmZmWFtbY29vz9mzZ+nv70dfX5+hoSEBNI2KisLDw4OgoCCGh4fZtm0bxsbG3Lp1i6GhIZYv\nX057ezsfffQRwcHBAFRUVDA6Osr8+fMF0MXY2JiAgABOnTqFoaEhY2NjnDhxgu3bt3P58mXRSVZT\nUxN6gmvXrqGtrc2RI0cAuHXrFo6OjhgbG5Oens6lS5dQUVEhJCQEX19fysvLBRlaucGFhYURFRXF\ntm3bcHZ25uTJkxQWFgqlnRJbNzk5iVQqpbGxkcWLFzN37lzy8vLQ1NQkPj6eoqIiQkJC2Ldvn0iG\ntrGxEaavsrIyzp49y/j4OIaGhvj7+9PV1UVUVBS1tbVcv35d9EN++OEH7t+/L6LzVqxYQWZmJrNn\nz2bJkiUiGEcZ3FJWViboU8rvOTk5iY2NjQgabmho4NKlS3h6elJUVERVVRXh4eHMmjWL+Ph4tLW1\nBXV69uzZPH78mPnz5wvmob29PQ4ODhgZGeHp6cnw8DC3bt0S+Y7K5mpGRgYnT57EwcFBvC+TyXjy\n5AkbN25kfHwcNzc3oVlRThd0dXU5ceIEtra2tLW1YW5ujp6eHleuXKGxsZHQ0FCRfqXMrSgvL8fE\nxARLS0tKS0sxNjYWLsmxsTFOnTrF/PnzsbCwEN4eNTU11q9fj6+vLwkJCcybN4+ZM2cyMjJCfn4+\nS5cuxd3dndHRUaHXyc3NFQlcv2b9JjYFeOpotLa2Rk1NjcDAQCQSCTdu3EBfX59du3YRGBiIXC7n\n73//OzNmzCA+Ph49PT0B/jA2NhammuXLlwP/E4YyNjYm8gWUUwt1dXXmz59PUVGRiF3PycnB3t6e\nzs5OxsbGBOa9uroaIyOjf6EIW1tb09/fT2dnJykpKXR0dNDQ0IC3tzcDAwMi7XfevHkEBARw9uxZ\nPvnkEywsLIiLi8PKyopvv/0WOzs7du7cCSCeGFlZWYyNjREaGkp/fz/j4+MkJibS09PDqVOnKC4u\nJicnR8BoqqqquHv3LqdOncLPz4/x8XFGRkYoLS1lcnKS3NxccazJzMwUmgUNDQ2Gh4eFKKa7uxsH\nBweRBDV//nzU1dVxd3cXadxtbW385S9/4c6dO2hpaZGZmYlCoSA0NFQ0/oKCgoiKikJbWxsXFxeO\nHDnCyMgIFRUVDA0N0dfXx8TEBI8fPxZHCGXU39TUFDdu3KCtrY09e/bg5eWFmpoaY2NjBAQECK7h\nsWPHGBwcFBBTfX19LCwsxFPVw8ODyclJYTazt7dn5syZ3L59G6lUyr59+4iMjBQCtPj4eNLS0gDE\ntVZWVoaqqip5eXlcv34dHx8f2tra/oXuNTQ0hIaGBioqKhQXFzMyMsLp06f54Ycf0NXVJSwsjHff\nfZfQ0NB/mVQoYSlLliwR2Z5TU1PcuXMHeNp8NzMzY9++fWRlZWFjYyMEWZ9//jkVFRWYm5uL3M78\n/Hy2bt1KVVUVKioqSKVSwVBQHoN/7fpNbAq6urp0d3djZWUlyk5ra2u0tLS4dOkSJ0+e5IUXXuD2\n7dt88803vPnmm7i4uIiLWancUiYgKwNANDU1ReBKc3OzoB1nZ2ejr69Pd3c3ZWVlBAYGYmRkhLGx\nMTdv3mTNmjVoamrS1dUlgksA0QQaGhpiYmKCyMhIKisrkclkYrphZmZGYGAgZmZmXLt2jcrKSsrK\nyvD19SU6OpqkpCTWrVvH9evXycnJ4f79+6Lj/PLLL7Nw4ULs7OzYsGED/v7+eHh44OTkhEKhoLe3\nl66uLlJSUnB0dCQ0NBRHR0eampqora0lODiY/Px80b23tLQkOjoaJycnnJ2dGR0dxcvLizt37pCQ\nkMCjR4/o7e3l8OHDzJs3T3S8bWxssLOz4/Hjx7S3t2NoaIidnR1WVlZoaGiwcuVKvL29cXJyIj4+\nHmNjY7q7u/H29qauro779+9z584d0fByd3cXbk6lhPr27dvY2dkRFhaGra0t77zzDvA0FGZychJN\nTU1GR0cpKysT48lnnnmGf/zjH3h6epKZmckLL7xAbW2tmDLdvXuXqakpFAqFqPRsbGwAuHbt2r8E\n746Pj3Pnzh327NkjoCXOzs4EBgbi6+vLvn37qKmpITIykh07dogkp5qaGp48eUJ7ezvwtFcRFRXF\nmTNnqK6u5tlnn0VNTY3Y2FhOnDhBdHS08OaMjIyI6L6enh7a2tq4evUqoaGhTE1Nce/ePeHqbGho\nYOvWrUilUhYtWsTjx4+xtbVl8+bNLFu2jK6uLjQ0NMRIva+vj9HRUTZu3EhTUxN79uxhenpaHD+U\nyea/Zv2nNgWJRFInkUgeSiSSQolEkvvPjxlLJJLbEomk8p9vjf6j1+nv7+ebb77h5MmT6OjoYG9v\nj52dHVKplJycHPbu3cvVq1dZsGAB09PT3L17l+vXr3P37l0uXbpEcXExdnZ2nD17VphC4Gmjsaqq\nSrAbd+zYwdTUFAsWLMDW1pZHjx6hra2Nl5eX6GqHhoaSmpqKk5MT4+PjnDt3Drlczh//+EeRdxgY\nGIijoyMJCQmsXbsWHx8f1q5di5mZGR4eHkxMTKCpqUlUVBQvvfQSS5cupaOjg66uLtzd3cUcWkVF\nRVB1AL744gvgaSMzKSlJoOFramrw8vLC29ubPXv2CDBJV1cXeXl5Iiq9tLQUJycnQXVSWoEtLS05\nf/48paWlyOVycRTbvXs3165dIykpCTMzMxwcHIiIiCA+Pp6SkhLOnTtHdnY2p0+f5uLFi7i4uGBj\nY0NGRgY5OTnk5uZSXV3NnDlzmJiYAOC1115jzZo1NDU1sWLFCvbs2YOOjg6jo6OialFqA44ePYqG\nhoYYewLU1taydetWFixYwNWrV/Hx8aG9vV2ASJVou8HBQX788Ufi4uJwc3Ojs7MTIyMjNDQ0OHXq\nFDo6Oty7d09MCRYtWoSqqioBAQF8+OGHAls2c+ZMbt26xdjYGEeOHKGlpYUXX3yRjz76CBcXFwIC\nApiamqKurk4c2To7O8XEaGBggNHRUfr6+rh//z4bNmxg3bp1IpthYGCAL7/8kl9++YXLly/zxhtv\nkJ2dTWJiIrt378be3p7BwUF8fX1pampiw4YNwFP/z+TkJCEhIYyOjpKamkp1dbXgWYSHhzMyMkJa\nWhrq6ups2rRJYP/XrFnDO++8g4eHBzo6Opw6dYq33377V9/X/ycqhRiFQuH/7yyZu4AkhULhCiT9\n8/3/7TIxMWH27Nl4enqKaHilEefx48eCs6ivry9ShnV0dPjoo4+IjIyksLBQYLSVZzVABKdMTEyQ\nkpLC0aNHiY2Npbq6mr/97W8CClteXo6ampoYJymFU5OTk6iqqvKHP/yBQ4cO8d133wFPCb7e3t7E\nxMQwOTnJwMAAgGASqqqqYmNjg6amJomJiXh6ehIaGioyDZQ3lzIgRhn3pozNc3d3Z+nSpeTn55OU\nlIS3t/e/hN+Ym5tjampKYWEh8+bNo7y8nNTUVOLj46msrMTIyIiHDx9y//59fvnlF86fP4+RkRHL\nli1DQ0OD8vJytLS0+Mtf/iISkQYHB9HT0yMiIkKMFkdGRigrKxMJRR999BElJSXs2LGDzZs3s3Dh\nQtra2nj8+LGImevp6UFLS4vY2FjkcjmxsbH09vaSnJyMvb09zs7OIqxX2ShTPu0BkdfY0NBAe3s7\nSUlJhIeHi6Sn9evX8/PPPzNjxgyWL19OVFQUampqnDx5EltbW3766SdUVVVFpGB/fz/wtA8UFRVF\nYWEhfn5+/PLLL9y7d0+kQCUlJTFnzhzy8vJYtWqVaES/+OKLIm9EJpMhl8vp7u6mp6cHeOqpCAsL\nQyaToaOjw7p16/jmm2+ws7Nj5cqV3L9/Hz8/P4aHh7G3t2d8fJzdu3fT29tLYmIioaGh2Nvbi6pP\n2Qdwc3MTD72kpCRh+FM2eJ88ecKOHTtIT0/HzMyMqqoqFAoFExMTZGZmCiGei4uLGNX+2vX/xvFh\nOXDin38+ATzzH33B6OgoV65cQV9fn/HxcTIyMsjMzBSThn/84x8kJycjkUhQVVWlvr6evr4+pqen\nMTc356WXXmJgYAAPDw/u3bsn0pm6u7sF7kxTU1PMqjMyMpDL5WRmZjIyMoJcLhcKs1WrVok47xUr\nVrBkyRI+/vhj5syZI1BpGRkZJCQkcO/ePVRVVdHW1ubzzz9HV1cXJycnEhMTqa2tpbi4mMWLF/Ph\nhx/S3d2Nu7s7d+/e5d69e8yYMYOWlhZKS0tFsIjS5FVcXIyDgwM7duxAR0eHn376iR9++AGJREJd\nXZ0QRG3atAltbW3i4uJEWnNcXBxnzpwhKiqK7OxspFIpAQEBjI+Pc/r0aUpKSgT12c3Njddffx1H\nR0fR3CsvL8fHx4evv/6aR48ecejQIczMzMSTqampSfwflZWVoaGhwQcffICxsTGHDx8WDlYLCwuc\nnJx47bXXmDlzJlZWVuTn59PT04OhoSELFy5k8eLFfPLJJ6iqqmJlZQU8PZopKdpKkIyZmRnm5uYi\nX2HFihVYW1vT3t7O0NAQ5eXlSKVS2tra8PHx4Y033iAkJEQQrpSvq8xXNDQ05Pnnn0cul1NeXs7y\n5ctFvyA7Oxs3Nzdqamr44YcfCA4OZu/evQQGBnLq1CkyMjK4ffu2aDS+//77rFq1SjT6amtr2bBh\nAw8fPiQoKIitW7fi6uoq0HtVVVVER0cTGRmJiYkJZWVllJeXk5GRQUhICBcvXgRg8eLF+Pr6EhER\ngZWVFc888wyXL18mNTVVIAamp6dZtGiRmIYpNQ2+vr5cvnyZvr4+9PT0CAgIEAK5X7P+s5uCArgj\nkUjyJBLJ7/75MXOFQqGMo2kDzP9XXyiRSH4nkUhyJRJJrrKJVFJSIqTOkZGR9PX1sWnTJj7//HOW\nLVvGsWPHCAgIEOOb48ePU1lZKTIQq6qq2LRpkxBqKOm9jx8/5rnnniMyMpJPP/0ULS0tli9fjlQq\nxcDAgMrKSlatWkVTUxPZ2dksXboUqVTK6OgoCQkJVFZW0t/fL2bTISEhaGtro6uri7a2NqqqqsTG\nxvLnP/+ZtLQ0tLS0BG9gdHSU6OhoYZRau3YtmpqadHR0oKOjg4+Pj9hshoeHaW1tJSIigpSUFDIy\nMoiLi+PFF18USddtbW3ExcXh4eFBdnY2V65c4dixY4yMjDAwMCCs5aOjoxgYGBAXF8fg4KDIWxgc\nHGTLli2cOXMGmUxGQ0MDvb295OTkkJ6eTlVVFZGRkTQ1NWFhYUFLSwtpaWlIpVKh/BsfH2dsbEyE\n7WRnZ/PZZ59hZWVFfX09ly9fJjs7m0WLFtHQ0MDBgwe5fv06VlZWmJqasnPnTp555hl6e3tZuHCh\niECDp67D6OhozMzMsLe356uvvmL//v28++67YsT578d8IyMjhIWFiSwOU1NTVq9eTX19PWVlZVhY\nWABPvQTKSZWZmRlSqZQLFy6waNEizM3NRZyfkrhcX1/Ppk2bWLBgAZaWlsKM9d577xEREUFkZCSA\n4FBGRkZiampKW1sbSUlJnD59mj179tDY2MjHH3+Mq6srfn5+7Ny5kxkzZuDt7U1DQwOGhobcaDm3\nzwAAIABJREFUu3dPpHQrG4IHDx7E1dWVmpoaMboMCAhg1qxZWFlZiQmakpX5l7/8BVtbW1paWrhy\n5QqzZs1iZGREQGaVE7lfs/6zm0KEQqHwBxYCL0kkktn//pOKpwfF/yVVSaFQHFUoFEEKhSJIGaDh\n4OCApaUl1tbWODs74+bmxu3bt4WgA2D//v1oaGhQX1/Pjh076O3tpbq6msrKSubNm8eJEydwdnYG\nECgvqVQqLMNbtmwRN6WNjQ2urq7U1dUxPj4uxkKTk5NCbx4TE0N0dDQ1NTVit1WebXNycsjLy6Ok\npERwAqRSKVNTUyxbtozh4WHy8/NRV1fn+eefp6Ojg9HRUXp6eqirq8Pe3p6WlhYRG6cUcE1MTNDc\n3ExMTIxQx1VVVfHkyRM0NTWprq6msLAQNTU1LC0tGR0dxd7eHi8vL/r7+5menmZ4eJjIyEiSkpLQ\n1dXlwYMHdHR0MDw8zEcffSRK2oCAAPHEGRgYEBmJK1euRC6XY2Njw5/+9CeKi4txc3OjrKwMKysr\nenp60NXV5aeffiI2NhZXV1eGhoa4e/cufX19qKqqoqenx8GDB9HX1ycgIIBt27bh5eWFoaEhr7zy\nCnPnzkVHR4eJiQleffVVABITE7G3t8fV1RUHBwfKy8sFc1FDQ4NFixaJo5ednZ0gQ1dXVxMZGYlE\nIuHdd9+lqKgIe3t7QcpOSEggNTWV0tJS5s2bR3p6Os899xzV1dWYmpqKkaapqSlz587Fzs5ONO90\ndXVxdnZmzZo1nD17Fm9vbwoLC4Gn6tbvv/+et956i9HRUREKvH//fnJzc7ly5QqLFy9m3759JCUl\nCcdkdXW1iJWPiIgQdn2lMWxycpIVK1awZs0aSktLefvttwU0V4n2a2lpYfPmzWzevJlr166JTAsl\nEbqjo4Pm5mYOHTokrrFfs/5Tm4JCoWj+59sO4CcgGGiXSCSWAP982/Ef/hAqKnz99deMj48TFRUl\n3F+bN2/m4cOHIhotIiKCqqoqIVE+c+YMAwMDBAUF4eLiwscff8zKlSvR0NAAEBF0SuWeUouuq6sr\nOrL+/v5oamqSnp4uIsltbGwYGBgQMd87d+7E3d1dOO7S09NRVVVl/vz5tLe3C/emm5sbBgYGrFu3\njuDgYKKjo3Fzc0NfX5+6ujr09PS4ePGiyFrIz89HLpcLXHh4eLgwKsFTdd/IyAjXrl3DyclJEIrU\n1NTE5KK4uBhfX1/6+/vFOC4iIkLQlENDQ7l37x7vvPMOPT09AqKiFLXk5+djbGzM4OAgpqamhIWF\n8fPPP1NUVERfXx9ZWVncvn0bPz8/UlJSMDExISUlRQTfLFmyBC0tLSwsLOjr62NoaEiQh48cOUJo\naCi2trYsWLCA+/fvo6mpSV1dHbW1tezfv5+SkhKMjIzYv38/gDjPK1V6u3fvpqioiGXLlhEaGsqM\nGTPw8/OjsrKShoYGampqOHPmDBYWFrS2tlJXV8f169fx9fVlenpahMG4uroSHR3Nli1bsLKyIjg4\nGAMDA+F30dbW5tatW6K6iI+PF2NqPz8/cnNzKS8vF0HCygpEqVPYu3cvzz//PO3t7ezatYsjR44Q\nHR3NH//4Ry5cuIC5uTnLly+ns7OTY8eOiVAfpX8mODhYaCbgaQbIqlWrmJqaQl1dnfr6eiwsLNi/\nfz9Lly6lvLwcBwcHYYgLCgri4cOHaGlpsXDhQiYnJ3nttdfEBEvpvvw16//xpiCRSHQlEolU+Wdg\nHvAIuAps+udf2wRc+Y9ey9jYmGvXrjE6Osobb7yBnp4eQ0NDtLS0CAeYkha8YMECtLS08PHxEXqB\nxMREsZsmJCRw6dIl4GnG37vvvouFhQVtbW34+/tTUVHB2rVrxXlS2TCaNWsW27dvx8PDg7S0NIEM\nU9qKlWG3gLg46urq8Pb2JjAwkPXr11NVVUVRUREHDhzgu+++4/vvv2fevHlUVlZiYWFBZWUlcrmc\nyclJZDIZEolEwEbgaTqSEr02f/585HI5zs7OBAcHk5OTg0wmo6urS8S9ubm5sWrVKgFw+eqrr+jp\n6aG3t5eQkBBqampobm5m3rx57Nq1C3t7e7q6uli9ejUxMTHs379fJB199NFHtLW1kZqaioeHBzEx\nMTg6OhIUFISVlRVlZWXihpmeniYyMpJFixbR3t7OjRs3BDDUyckJa2trJBIJjx494vHjx9TX12Nv\nb4+1tTXh4eFUV1cTFhbG/Pnzyc3NJSMjQ1y0R48eFYg8W1tbvvjiCzo7O7lw4QJWVlZ0d3fz17/+\nFXgaDaDs/re0tIiNQDnOlEgkgpTd29vLmTNnxOaYnZ1NXl4eDg4OQhWq7CfIZDLu3LnD5s2bsbKy\nEhkNS5YswcDAgCtXrgg4cHJysgitVVNTw9ramr/97W84OTkRERHBoUOH8PPzY86cOTx69IgTJ04w\nMTHB6Ogo2traIoago6MDLS0tjh59KjiMi4ujpqaG/v5+8vLySElJ4fTp06xdu5abN2/i4+PD9evX\nRS8lPz9fsDqU6l/lFE1TU1M0s3/N+s9UCuZAhkQiKQJygASFQnET2AfMlUgklUDcP9//366+vj6W\nL19OS0sLDg4OgnRrZGQkZLOGhoZ88sknGBsbY2JiIkr8iooKLCwsUFNT48iRI/j6+opzmZK1Z2Vl\nxfDwMAcPHhRP/7a2NiwsLDh8+DAeHh5UVlYyPj6OVCoVEurW1lZ+//vfCx2/8hf78ssvi1FlTk4O\nXV1dPHnyBHt7e3FBd3d3Ex8fz+eff05cXBwXLlygp6cHdXV1EeA6NTWFVCoVORVKLcDU1BRDQ0MY\nGhqKqiQkJIRbt25RVFQkdAvKxCx/f3+KioqIi4sT0WJz587Fx8eH8fFxQYBydHTExMQEXV1drl69\nSkREBGFhYfj6+rJx40YBHDE2NubHH3/EysqK6elpsrKyiImJEc1UZSqVMgJdU1OT+vp6DAwMGBsb\nExmXe/bsQV1dnRdffJHt27eTm5vLkSNHUFdXJyQkhPLycnp7ezEwMBB6go8++ojKykqCgoJEkKtc\nLqevr49z584JxFlZWRmPHz+mtbWV6elptLW1SU9Px97enjVr1uDo6EhWVpYQsslkMv7t3/4NOzs7\nUlNTRTbHd999x+PHj6mqqmL58uUMDg6SkZGBjY2NgOTU19czd+5cjh07RlJSktCOwFPHrJ+fH7/7\n3e/Izc2lq6tLWNjXr19PcXExMTExGBoa8ujRI+bOnSv8K7m5ucTGxmJnZ8fatWspKipi1qxZABQX\nFxMdHS30NNPT0+jo6JCbmys+Pzw8LGIBdHV1mTlzJgYGBgwMDLBr1y7U1NS4f/8+TU1N/7fES/9N\nc/7v9d/r/z/rvw7N2d7enlWrVomnZ3Z2tnB7OTg4MDk5Kcrv+fPnc/PmTVavXs2jR48ER8DAwIDg\n4GCRAxEUFMSePXt44YUXqKmpETmEWlpaODk5MTExQWNjIz4+PgIfVlJSQmxsLJmZmTg7O2Nvb09x\ncbGYpc+ZMwdnZ2e+/fZbkQu5YsUKBgYGyMzMZHJykvHxcbZu3cq9e/eE+ET5vevr61EoFCxatAh9\nfX1OnTolQkiio6PZtGkT3t7eeHl5idxL5Q5//PhxNm3axPT0NPfu3QOe6jsMDQ1xdHTk+++/x8fH\nh4CAAO7fv096ejo7duygsbERhULB8PAwn376KUuXLqWtrQ2AdevWcePGDcLCwsjKykJXV5fS0lI2\nbtxIbm6uaIQdP36c8PBwLCwsWLx4MceOHSMuLk48mT09PbG1taW+vp7z58+zYcMGTE1N0dfXF18L\n0NzcjK+vLyUlJTx58kSMGO3s7GhsbGT9+vX89NNPPH78mI6ODsLDw8nIyEChUHDp0iUOHTok8jWN\njIwoLi4WR4oHDx4IavHY2BjwtEdTVVXFX//6Vx4/fkxiYiJdXV2EhITQ2NiIhoYGc+fOpaGhQcTH\nDQ0NsXXrVlatWoVCoeDQoUPEx8fj6upKZWUlvr6+zJ07l7KyMrZv305qaqowMimhvXPmzCEjI4PR\n0VHCw8MF6KexsZG6ujomJiaE1LmxsVE4HI2MjKipqeH111/nL3/5C+bm5qKX0NLSQnR0NBcvXsTJ\nyYnu7m7hwFVqVtzd3ZmamkJVVVUEwygzVMvKygTF7D9av4lNQUVFBQMDAzw8PCgvLycmJob79+8z\nPj6OTCZjZGSExsZGoqKiuHz5Mh4eHqipqYmLwtXVFalUSkdHB5qamuKM7u3tzWeffYaLiwsGBgYE\nBgaSnJxMTk4OS5cuRUdHR4iN3n//fX7++WdSU1M5dOgQ58+fp6qqitu3bwudu/L8LZPJ6OzsxNTU\nFHNzcy5fviwoUOPj4wwODlJUVMSbb77J3bt3cXBwIDc3l2XLljE+Pk5XVxdJSUnMnDmT1tZWUlJS\nAPj973/PwMAAlZWVqKurc+nSJby8vFixYoXQOCix5SYmJri6uqJQKGhtbRXCF2WAjY6ODllZWSQm\nJrJr1y4KCgpQVVXF3t6e4eFhpFIpJSUleHh4YGFhwcOHD9m5c6eAg8yYMUPYzHft2kVJSYkwXDk5\nOXH27Fk0NDSIjY3l5MmT/OEPf8DPz48bN24Ii3BiYiJr167F29ub7du3I5fLKSwsxM7Ojvnz5wts\nf1FRkTCbyeVyrl69ysjICJ999hnPPfccAQEBAr2elpbGZ599JkbLygt/xYoVjIyMiI05LS1NjIsB\nEf+Wl5fH1NSUGOk2NjYK/oTyGvrkk084c+YM7733HpmZmQwODmJmZoanpyepqals3ryZDz74AHjq\n1Zg5cybDw8NkZWVhZmZGcXExcXFxXLx4kampKeLi4sjOzsbT05M7d+7g4OBAaGioOJJaWlqSlZVF\ndXW1sKmrq6tjYmKCpqam2GBra2txcXHB3d1djLglEglPnjxh+fLlnDt3jidPnhAQECCu2XPnzuHn\n58eSJUt+9abwm7BOHzhwYE9ERIRI162trUVDQwMzMzPS09Px9/cX4a+6uroEBQVhYmLCxMSE8D5M\nT09jampKSEgI33//PXfu3MHHx4fAwEA6Ojrw8vJi3759AjE2Y8YMEQZqY2ODk5MTJ0+eZHBwUKQ3\nHzlyBGdnZ7y9vSkvL6e9vZ2cnBz6+vpQKBTs2LGD8+fPs2jRIjo7O5HL5YyPj6OioiJ+rmXLlgn0\nWEdHB0+ePCEsLIza2lp8fX3R09Ojr6+P5ORk3N3dxX+mRCJBXV1ddJSVobavv/46/v7+uLm5oaWl\nhZGREUVFRfj7++Pi4kJKSgoTExOiAlH6Hc6dO8err75KQ0MD1dXVqKio4OXlhYODA1lZWSLpqL6+\nntHRUczNzSkoKODOnTuMjIwI550y6tzW1hZnZ2dqamqYM2eOgJEODAzQ19eHqakpZmZm9PX1YWtr\ni1wuJyQkhMHBQTw9Pbl37x7+/v5oa2sLNL6Ssq2vr4+xsbFgEyhzEC0tLQkICKC8vBwVFRWR7dnU\n1MThw4dpa2vD1tYWMzMzOjo6WLRoEdnZ2eLhERwcjJeXFykpKWzfvp3x8XFqamqIi4vDzs4OS0tL\nIURTxr/19/djaGjI1q1bKSwspL+/n8WLF1NUVEROTg5btmzh+vXrIjZQSWyqq6sjPz+f7OxsOjo6\nxNRLW1ubgYEBSkpKaG5u5tixY+zevZvnn3+elpYWamtryczMxMbGhpiYGHx9fTl58iTl5eUUFRWJ\nPltdXR0BAQE0NzdjY2NDQUEB8fHxgiSuHD+/8sorqKiokJeXR0FBwa+yTv8mNoW9e/fumT17NjEx\nMRQVFYlRk0KhQFNTk5GREQwMDLC1tSU5OVlE1NvZ2YmcxYaGBhHHPTIyQmJiIt999x1jY2MUFRWh\nq6tLVFQUq1atwtzcXJTvfn5+rFq1iq6uLvT19Vm9ejVNTU0ill7JhZyYmMDMzIykpCT+9Kc/8eTJ\nE9HkUwa7SCQSOjo68Pb2FkYlFRUVSktLhZNTQ0NDhLCqq6vT09ODTCbj0qVLwgJbUFCAtbW1MMOo\nq6ujq6vLnTt3GBsbo7a2Fm9vb9rb23n48CHOzs5YWFgIfYepqSkmJibCJakUKCkR90q+Q2dnJ3l5\neSJDYGJigu7uburr6wXF59lnnyUkJISgoCCRQREWFkZFRQWtra3s3LmTy5cv4+/vz969e8UGsXDh\nQjQ0NEhMTERLS4uBgQGam5uJiopixowZ6OnpCWdhWFgY1dXV3L59m5UrV6Knp4e9vT3p6en09fVh\naGgotBozZszAzs4OPT097t27x8svv4yBgQHOzs7k5+cTFRXF4cOH2bVrF+np6QQGBnLlyhU+/fRT\nWlpaqKmpQS6Xi0Tt+/fvo66uLjZEpVzZyckJNTU1bt68SWpqKqamplhZWeHu7s7g4CAVFRU8fPgQ\nPz8/jIyMCAgIQE1NTYxlFy5ciJ+fH+rq6tTV1ZGcnExFRYXInHRwcKCiooI///nPPPPMMwIyvHLl\nSg4cOICBgQFOTk6cPn0aPz8/LC0tBTJQqYcoKChAV1cXc3Nz3NzcKCoqEsrP1tZWYmNjMTY2pqmp\nCWtraxISEn7VpvCbOD4oU5hu3bqFVCpFU1MTHx8furu7OXDgAH/+858ZHh7Gx8eH06dPU1VVhVQq\n5ejRozzzzDMCZZ2SkoKbm5tg8ldXV3Pt2jU0NTWZnJykpKSE6elpIQMNCwvj1q1bIh785s2bqKqq\nolAosLKyYmxsjMrKSt544w20tbUF+7GwsBAHBwdSU1MFLCMvL4+WlhbMzMyor69n6dKljI2NceHC\nBdra2lizZg3a2tooFApaWlr+JS5dORaNiYmhoKCA0NBQId0dHBxk9uzZXL9+nZUrV4pS1trampyc\nHDo6OgTJx9bWlqNHj+Lg4ICJiQnl5eV4eHiI8evHH39MZGQkmzZtor+/n66uLn73u98xPT1NZWUl\nU1NTXLp0CT8/PyENV2LSFAoFNTU1zJw5k1deeYX33nuPy5cv88EHH7B69WqKiooIDw+ns7OT48eP\nU1VVRWlpKbGxsZiYmPD9998za9YsqqurkclkguJUWlqKmZmZoE+pqqoKQIqXl5cQhy1fvpzJyUly\ncnIwMzMTOPzDhw9TU1PDc889x6effoq9vT1mZmZiNKc0al2+fJlZs2YJLYOnpyfV1dV4enpiaGgo\nouCLiorQ09Pj8ePHhISE8MEHH9Dc3MyMGTO4evUqenp6AvYDT6lheXl5nDlzhvXr1zM8PMyZM2fo\n7u5GV1dXjDpjYmJITEzkb3/7G5OTk9jb24sgYAcHB0ZHR/Hw8ODhw4fA/8ybjI2NFZxO5VHCxcWF\nlpYWIW+fmpoSgTOjo6MiJ6Wqqgo1NTV6enr+P5U5/x9Z6urq4oYCCA4OFknUX331lTj3qqqqihJN\nOcPt7Ozkyy+/FGg0iUQiIJVffPEFpqamAqqphI2cOnUKLS0tqqurcXNz4/3338fa2prZs2djbGyM\nnZ0d2dnZNDQ0EBoaytdff82DBw9EGbt+/XoMDQ2ZN28eYWFh6OrqYmNjg729Pe+//76Igq+qqiIo\nKIgNGzbg7OwsgklDQkIEGUkmk6GlpSV+DwMDA5SWlhIUFCSYldXV1SJUV/kap0+fxsLCgg0bNjA4\nOMjIyAhZ/4O99wyr8kzfd89FWbRF772LdAREBEQQRWzYiMGWaIzGZJKMxkkmMaZMMpmUSXLEqIkl\nxiRqjMbYC6gICEhTEATpSFvURe9tsT846znm9/+wJ3vvL5lj/54vMYsIK6z3fd7nvu/rOq+7d5k1\na5aQGevr63PlyhU2bdpEYmIizz//PP7+/hw8eJDi4mIGBwc5fPgwn3zyCXp6emhoaGBhYcH9+/f5\ny1/+Qn9/P0VFRSxZsoS6ujrCw8PJzc2lvr4ePz8/nJ2dmTVrFtnZ2XR3d5Oamsry5cvp7e1FXV0d\ndXV1amtryc/PZ2xsjNzcXNra2hgZGaGiooLe3l6cnZ25du2acLbKZDLKysrExq8qRUxNTVm4cCHv\nvvsu7e3trF69WsT6qXD/eXl5WFtbY2BgIJ6gKgaGyvtw4sQJwbocGRkhPj6eadOm0d7eTmFhIc7O\nzkxOTmJnZydi+GJjYxkdHSUmJgY1NTX8/f2FeEnVMwgJCeHIkSMolUp27dpFSEiIGB1v3bqV+Ph4\n8RpAaWkpTk5OIgZvZGSE27dvi8ayr68vfX19FBcX09LSQmVlpcjlqK6uZmRkRDA0MzMzKSgo4NKl\nS0xOTiKVSlEqlQQEBKChoYGJiYn4Pfye9YfYFKRSKRKJRMR0Z2VlMTQ0xIYNG0RnW1Wzpaenk5SU\nxNjYGHFxcWRlZREbG8v4+DiDg4NYWVlx//594ImKLTAwUHxNR0eH9957j2vXruHq6oqWlha6uroC\n7Orj40N7ezvW1tZs3ryZefPmUVhYiL6+vpBgw5PgFF9fXwYGBnB2dmZkZISqqioWLFhAU1MT8+fP\nx8PDg4SEBKqqqsjIyKCyshJTU1M0NTU5duyYuNivXbsmuuVtbW3CQTg0NERSUhI1NTVUV1djYmIi\nnnIq/Fh1dTW5ubkiG0CVeThz5kzWrFnDvHnzUCqVbN68mZKSEpFspKenh4eHB2pqaiLiTdW1n5iY\nwNramldeeUVka7a2tmJmZsbt27eRyWR8++23fPrppxQXF2NiYsLg4CD37t1jxYoVlJWVce/ePcG4\nsLe359SpU6xdu5a1a9diYmLChQsXMDMzQ6lUkpubi4WFhRCGqVKdvL29mZqa4uuvv2br1q3k5OSI\n99jR0UFmZiaffPIJr732GpGRkdjb22NsbMxHH32Evb09k5OTyOVympubgSfMjtzcXPz8/ETJN2vW\nLDo7O6mpqWFgYAA1NTWuXLlCdXU1+/fvF/L5kydPkpqaysDAAEqlkoMHDwrm4f379xkcHCQwMJBv\nv/0WHx8f7ty5w549e6ivr6e+vp60tDS0tLRE/OC0adNoamoiKSlJmPpOnjzJ3LlzxcksJycHY2Nj\ntLW1MTMzo6GhgZKSEkGQ0tbWZnx8XExUVA+Z7u5u7t69S0tLC1999RW9vb14eXkJb8nvWX+ITUFH\nR4eZM2eKptT69esZHR0VMNGWlhaKi4tpa2sThJoLFy7g7e3NnDlzBCEpLCyMBw8esG7dOuAJQkzV\nlDtz5gxXr14lLi6OjRs3YmRkRGFhoTAhPf/88zQ3N1NVVcXjx4/R1NQUbkJXV1dh2YYnU4LS0lKG\nhoYYGxsTJGlVTT4xMUF5eTm9vb08evQIIyMjhoaGqKiowM7Ojq6uLmF+UdnEAWHz9fPzY3x8nGee\neQYTExMeP37MRx99REdHB2vXrqWvr49Fixbxpz/9ie3bt+Pp6cnBgweZmJhg37594kJXKf2KiooY\nHBzk4cOHaGpqCtdif38/Z86c4c033+TatWskJydjbGzMc889h4aGBkVFRaSmpoqudmRkJHV1dfT3\n9wuVnLm5OSdPniQqKgoNDQ3u3LnDX/7yFxFmevDgQWJiYpg7dy5yuRwLCwtcXV1pb2+nq6sLdXV1\ngWyHJ1F/np6eODs7Ex4eTkFBgXAyamhocODAAaKjo4XHoKamRvzsqakpkbx0584dFAqFmERpampi\nYGDAwoULycrKoqmpiW+++YZPP/2UiYkJnnnmGRwcHHjuued45513eOedd9DS0mL58uWkpqby3Xff\ncevWLZqamrh165bgHkRHR5ORkUFgYCAVFRUcOnSIb7/9lt27dxMeHk5YWBi7du3C1tZWhAsfP34c\nHx8fvLy88PLyQi6Xc/DgQQwMDMS4WFdXl87OTiQSCREREURERJCYmMjk5CTV1dUEBQVhbm6OlZUV\nk5OT7N27VzROjYyMyMrKEn2olpYWIff+PesPsSmojkLV1dUCmLF161Zu3ryJgYGBSEqWy+XU1NRQ\nUFDA0NAQNTU1Asx669Yt6uvrCQsLEyO+tWvXUlxcTHJyMtbW1kRERCCXywH45ZdfCAsLo6WlRejX\nVZr2x48f8/XXXwtq04ULF5DL5cJUUltby8TEBJ2dnUxOToqouvT0dOGqVCgU6Ojo8Pzzz2NmZkZR\nURFDQ0Pi9DIyMkJycjKFhYXCNmxjY4OFhYWIS2tsbBTzdKlUipWVFdbW1lhbW9Pa2opCoaCnp4eW\nlhYuXryIoaEhCQkJeHp6CqWfqraVSqUYGRkxNjYmWIWhoaF4enqSlpaGiYkJhYWFKJVKcSOOjo7y\n5ptviiQrIyMjAgMDhdx55cqVom9y+PBh0YPp6OggKCiIBw8e0NnZiYeHB6mpqSxdupTOzk7S0tIE\nvDYqKor29nauX78OPDmxtLe3c+PGDUpLS3F2dqa+vp5Hjx5hbm7O9u3bMTc3R09PDwsLC5KTkykt\nLcXMzAyJRCIUryowrsrZOjExwa1bt5BIJJiamuLv78+cOXNobW1lamqKmpoaKisrqaio4PTp03R1\ndZGdnU1ERAShoaF88sknaGtrC4OYKiFKJce+c+cOcrmcPXv2iNTtV155BblcTlZWFmlpaQQEBHDr\n1i3xAFSh3BYuXEhpaSlffvmlwLENDAxgZWWFr68vdXV1xMbGitxRFXfD1taWxsZGzp07h6OjI/r6\n+kRFRWFpaSkmQcbGxlRXV4veyu9Zf4hNQdWsCw4O5qeffsLY2Jjh4WGcnZ25ceMGQ0NDtLW1ERQU\nhK+vL+rq6iKI9u7duxQUFIgP4P79+0RERABPbK2PHz8mMTERPT09dHV1cXd3x9vbm4ULFwo0V0lJ\nCXl5ecyfP5/Ozk66u7sJCwsT0VzPPvssR48eFQEgqvGk6iZbv3495eXlyGQyFAoFwcHBzJw5U8y+\nu7q6cHBw4JlnnqGtrU3coGvXrmX69OmC/zA0NERAQAAGBgbCBl1YWIiHhwchISGiUblw4UKkUima\nmprk5OSQmprK9u3befz4MZ2dnWhqaqKjoyOi3ry9vQWxSltbm6SkJC5dukRubq4w5NgDfOknAAAg\nAElEQVTY2PD666/j5uaGhYUF165dQ09Pj8uXL9PS0sKtW7dIS0ujv7+fxsZGjI2NOXfuHPn5+bz9\n9tuC9BwfH8/JkyextbXlo48+EmXZqlWrSE1N5dixY0xOTpKUlMRnn31GTU0N8+fPx9fXV3xmCxYs\nELmTZWVlgspka2vL/fv36ejo4OrVq4K32NLSgqOjI52dnbi7uwvc/7+XJcHBwcTExJCbm0t3dzea\nmpqiV/P48WMePnyImZkZhYWFZGZmChv2o0ePRP9DTU2Nzs5OFAqFAKxevXqVOXPmoKenR1dXFx0d\nHaipqaFUKnnmmWdQU1MjPz+fkJAQ/vrXv+Lo6IiVlRXj4+PU1dWhoaFBR0cHe/bsEalX8KSkVgmc\nLC0tGRoaEv/vlpaWSKVSJiYmmDNnDnv27GHz5s10dHQgl8u5fPkyjY2NyGQyuru78fPzE6E4v2f9\nITYFQ0NDgSQLDg7mwYMHWFpaEhYWxoYNG8jMzGRwcJDa2lr09fV57rnnRCxZV1eXeGJu27aNyclJ\nwbmTyWSMjIzQ1tZGQ0MD6enp6Orqcu3aNV566SUkEgnLly9HqVRSWVkp6sTQ0FABUXVyciI5OZmt\nW7eKDyw6OhpTU1NhY1WBMk+fPs2WLVtoampiaGiI3t5eqqqqaGlpYf78+WI0uWnTJjw9Pamurqax\nsVHo/lVzctXTXGUTTkhIwNDQkJ6eHk6ePMnu3bspLi6mp6eHiIgIbty4QUpKimAztrS00N7eTlRU\nFCYmJpSVlYnAkcLCQnbs2MH06dNpaGgQrklVA/bKlSuiOapQKIiJiSEpKYkDBw6IceTbb7/N5OQk\nS5YsIT09ncTERMLCwgS/wt3dndDQUN5//30OHDjA/fv3OXnyJDo6OsTExPDFF18QHBzM+vXrsba2\nRiqVkpSUBIC/vz+nT5/m73//uzA2hYSE0NnZybp16/juu++oq6tj3rx5REREsGzZMpydnfn444+R\ny+WCk1ldXY1MJiMoKAh4Uj709fWxceNGNDQ0+OKLL7C3t8fW1haFQoG1tbWwTff29hIcHMyiRYvY\nsWMHkZGRws1ZWFiIpqamiKPfvHkzfX196Ovrs2jRImHL7+7uZmpqCrlcTlBQEB0dHQQGBtLQ0MC1\na9eECSonJweFQsFPP/2Em5ubEC8NDAygoaFBVlYWFy9eJC8vj7GxMYyNjTl79izHjx/n9OnTyGQy\nAgIC0NXVFbmS6urqKJVKmpubkclkqKur//edFHp6eigrK6O4uBhtbW3ee+89QbdR7XAq19fdu3cF\nuNTf31+M4qytrTl79ix2dnbimN/S0kJYWBhDQ0OMj4+LII/i4mJmzZolbs6mpiZqampwcHDg6aef\npqGhASsrK/r6+vDz88PT01Mca+GJSkwVzlJaWkpDQwMjIyNYWFgIbJqmpiZSqZS2tjZxI/T09HDr\n1i0qKiqora2ltraWsLAw8vPzgSekqLq6Oh4+fCieBC4uLlRUVIgwkp6eHgGUUaHKd+zYwc6dO7Gw\nsODkyZPk5uYyNTVFSkoKFRUVREVFMX36dF544QVmz57Nl19+KehOqlKktLQUHR0dZDKZ6HH4+vqS\nmZnJsmXLePrppxkfH+fu3bu8+OKLNDQ0kJuby+uvv46JiQmLFy/mvffeIy4ujmnTpgny0vnz56mv\nr6ejowNXV1dMTEx44403aGhoEDmNKlI0gJOTE46Ojly5cgWFQoGjo6PI/Lx+/Tre3t54eHigo6OD\ntbU12dnZODk5MW/ePJEAbWJigqOjI+Xl5SKOrqSkhBkzZpCeno6Liwt2dnZYWVkxbdo0Xn/9dWF3\n//LLL1m4cCE6Ojo4OjqKRp6fnx8lJSVs374dBwcHcZNlZmbS0tKCu7s7WlpaGBsbExAQQElJCdXV\n1RgYGDB37lzGxsZwcHBg8eLF9Pb2UlxcLExfADdv3kQqlYq+komJCQYGBlRVVbF27Vo0NTVFI16V\nCm5nZ4evry8+Pj7cu3ePgIAAzM3NKSgoIC0tDTs7O2bOnMnt27dFMMzvWX+ITUFHR4dZs2YRGBjI\n4OAgy5Ytw8fHB5lMRnNzM7q6uuTm5jI5OYm1tTXGxsaYmpoyPDxMRUUFc+bM4eHDhxgaGv6P3Ifl\ny5fz6NEjoR4cHx+nvb2d1tZWbG1t6e3tJTMzk40bN7JlyxZsbW1xcXFh48aNGBsbMzo6yv79+2lr\na6O7u5ujR48CT0amdnZ2PH78mGeffZa0tDSqqqp455136OzsxNTUlB9//BEjIyNCQ0ORy+X4+vqS\nm5vLwMAAtbW1KJVKIiMjKSwsxM/PD0BEz7366qs4OjqSlZUl0opkMhl5eXnMmDEDU1NTXnnlFRob\nG0UwrkwmQ0tLi4CAANra2tDT06O3t5ctW7Zw+/ZtIiMjBTD15s2b1NTUMDw8zJUrVzA1NcXS0hI7\nOztmz55NTk4OycnJhIWFIZVKsba2ZtmyZQLEsnfvXvGZXblyhezsbIyMjDh58iTl5eU8fPhQaO6X\nLVsmUOrl5eUsWbJEyMPv3btHQ0MD/f39AoyjUChE/J6pqSkxMTHMmDGDvr4+du3aJXwhSqUSpVKJ\nra2tYCeqdChpaWkiLu+zzz4DnkjU+/r6BF175syZNDQ0EBMTg6GhoRBxqZ66Kum3yv2qpaVFWloa\nf/7znzl58qSYRKmYlKrQnCNHjrBp0yby8/OxtrYmLy8PfX190Uvo6uoS1OtZs2ZhYmLC0NAQN2/e\n5OHDh+LBU19fz8DAAObm5qJBm5OTg76+PgYGBtTX16NUKrl27Rp79+5lw4YN9PT08P7777N161Z0\ndXWRyWS4uLiIeMXfu/4Q4iU1NTW0tLRIT0/Hw8ODx48fk5SUxMTEBIODgxgbG+Pu7s7TTz/NpUuX\nhFkoPT2dvr4+1NTUaGxsJCgoCCMjI1GDfv311yQmJvLiiy8yOjoqSEGfffYZCoUCmUzG3r17cXBw\noKioiLq6OubMmSMaiJqammzZsoVff/2VoqIirl69SnR0NMXFxfj4+GBqakpLSwuLFy/G1NSUO3fu\nYG1tzVdffUVqaipGRkZER0eL+X5bW5s4DVhZWfHll1/y1FNPceTIEeDJzPvKlSvU1dURFBTE7Nmz\n8fX15fDhw6irqxMUFISpqSnTpk3D2NiY9PR08TNV2Q5BQUGoq6szPDyMu7s7L7/8Mh9++CGdnZ2U\nlpaiVCpJTExEV1eX3t5eCgoKaG5uFtAYlfAmOjqaq1ev8uWXX2JkZIS5uTlJSUm88MILFBUVMTY2\nRlJSEq2trfj7+zMyMkJ0dDSvv/46dXV16OrqkpiYKDBlKqXghQsX+Oc//0lMTAxDQ0M89dRTAtgC\n4OzsTENDA+Pj44yPj3Pv3j3Gx8cFX/Hq1at8+umnrFu3DicnJ7q7u5HL5dy6dYuwsDDq6uro6OhA\nW1ub+fPn4+PjwzvvvIOJiQkFBQXifVhYWJCdnc3du3fp6enB19eX+vp6pFIpM2bMwMXFhb6+PubM\nmYOtrS3vvPMO06dPZ9q0aaJJCk+yOiwtLVEqlRw5ckSg+sLDwzl48CAJCQncuHEDS0tLBgcHRT9D\nlWjm4uJCWFiYkMFHRj6Bl61du1ZoEkpKSoiKimLFihWUlJRw69YtkRiuysx46623RHllZ2fH9OnT\nCQsL4/bt22hqauLl5fW778c/hMz54MGD78+aNUvQaMzNzUXWokQiwdvbW3T5VUe58fFxfHx8sLa2\nJiMjg1WrVtHY2IiNjQ3z5s3jwIEDBAUFidNAeXk5ExMTxMTE0NTUhIODAw8ePKC8vFxIpEtLSwkL\nCyMqKooLFy4QERHBm2++ycqVKwkICGBwcJAzZ87w2Wef0dnZSXZ2NtOnT0cmk3H58mX09fVFrLy2\ntjaxsbHCgVdfX8+sWbOwsrISRp6QkBDBQlSRfZ9++mnMzc1pb2/H0dGRlpYW1NTUBOzEyspKhK6o\nkHWqbr5SqRQBOObm5mRnZ7Nt2zaBAdPT0yMkJISwsDAUCgXp6enExcXh6+tLRkYGHR0dREdHY2Fh\ngYGBgQg9WblyJeXl5WzYsAFLS0uuX7+OTCYT3v/AwEDCwsJEloSTkxMzZswgNjZWTBEiIyOF8q6i\nooIHDx4QHBwstBkqz4rKn6FiZsCTlOe4uDjs7OxQKpU0NDRgZmaGVCrF29ubzMxMHBwcmD59OrNn\nz6ajo4OYmBiam5vR0tIiKSmJVatW0dfXh0QiEbFziYmJ/PDDD6xdu5Zp06bR09ODubk53d3dtLe3\nk56ezpw5cxgcHMTCwoKNGzcKelVrays5OTmC6n327FnWrl3LgQMHmDlzJqdOnWLdunUCaefg4EBO\nTg7t7e2oqakxMTGBXC4XExIDAwPMzMxISkoiNzcXIyMjZDKZCKhRbdjl5eWiFJmYmGDt2rWC7t3X\n18esWbOYmJhg/vz5FBYWMmfOHBQKBR0dHVy/fv2/Jzaup6eHy5cviwuwpaWF2NhYoXRU0W5+/vln\nIiMjmTt3LqGhoSJWzdXVlfz8fPr6+qisrEShUABPxEsmJibMnTuX2bNno1AosLCwEE5FlWDlo48+\nYmhoiGeffZaJiQlRA2dlZbFlyxampqYYGBigqakJgMbGRu7fv8/4+Di5ubmcPn1axNsvXryYOXPm\nsGPHDnx9fQkLC8PPz4+qqipycnI4ffo0hoaGNDU1iUzAyspKADQ0NOjt7RVJVarNQ01Njby8PL76\n6iuRttTX10dXVxeTk5O8+uqr/PWvf8XBwQEtLS3mzJkjsiAlEgkLFiyguLiYmJgYUlJSuHLlCsbG\nxgwNDdHR0cHHH39MUFCQ0AXk5ORgY2NDTU0NbW1tKJVKVq5cSVdXF0ePHmXmzJkolUqhihweHmbf\nvn2sWbMGuVwu8OU6OjosWLCAyclJJiYmKCoqErW8CjSamJgoSjCAjz76CHNzcwYGBujs7OTRo0cY\nGhrS1tbG0NAQv/32G4sXLxaOS1VJomqqurq6oq+vT1paGsuWLUMikYjrTLWpTk1NMTU1RUhICCtX\nruTcuXOYmpri4OCAhoYGbm5uVFdXExkZSU5ODvn5+Zibm1NRUcHY2BhLlizBx8dHXLuqINjk5GRi\nYmJQKpUsWLBAeDZUv6PY2FgMDAyEHiQ4OBhNTU2hJ1GVnvBEICeVSnFxcRFlnqGhoYD4dHV14eTk\nJKZzra2tBAYG0t/fL6ZbkZGRaGtr09nZKfIvfs/6Q2wK+vr6rF27FnV1dX799VdhChocHGTbtm2o\nq6tjbm5OfHy88N6npaUJV6G9vT2Ojo4YGxsza9YsMZvu7++npKSEzs5OLly4QFdXF48fP2bhwoUi\n22Hjxo3cvXsXLy8voqKiiIiIEDFexsbGBAUF0dfXx/Xr18X3NTExYcGCBVhZWTFnzhyBbLO0tOT4\n8eNMTU0JjmJqaqoImlFTUxMTgvb2doKCgkQgLTwpH7q7uzEyMqKnp4ehoSEx05ZIJDg6OjJ37lyR\nYxEcHIyVlRWDg4Mil9HBwYGHDx8K7X9ycrLIElQdNwMCAkTgztmzZxkcHEQikfDDDz9QVFTE+vXr\nxcnG29ub8vJykeZtamqKhYUFCxYsQE1NDQcHB5ycnIiLi+O9997D3t6ehQsXisyDnp4ezp49i7m5\nOYaGhqirq6Orq8uKFStYvHgxqampVFdXC9ioTCZDqVQyMjIidAISiYTffvtNdOiHhoa4d+8evr6+\nRERE0NnZiaGhIebm5hQXF6NQKIiLi+Pdd98VEyOV/T4zMxMvLy88PT05ceIEK1aswMHBgTt37tDd\n3U1PT4/Am6mpqZGcnIyLiwupqan09fXxyy+/iIg+QIijEhIShNBN9ZoqmKi+vp7GxkbeeustUlNT\niY+P59lnn+X69evo6Ojw+PFjFAoFfX19gsL1/PPP4+7ujoaGBhs3buTIkSOkpaXR2NjI4OCgsOOP\njIywf/9+kQcRExODVCqloKCAa9eukZKSQkBAwH+f92F4eFjM/fX09HB0dOThw4cidLStrQ2ZTEZJ\nSQljY2PCAOXj40Nvb6+Q2np6etLQ0EBZWRmAcIeZmJiIiLerV69y7tw5RkdHeemll0StvmbNGkxM\nTPj666+xtbVFS0uLxsZGcTEtXLhQSFD19PREwvL9+/dZt24dly5dYvXq1UIHoWImqLTtKrWjoaEh\neXl5+Pr6kpaWJjQF8CSWztDQkLlz55KSkoKGhgaDg4NMnz5d9FZmzJjB/fv32blzJwMDAwwODpKZ\nmcn4+LiId5PJZOjp6REZGUlCQgKZmZk4OTnR1dVFWFgYFhYWnDhxghdffJGIiAhhbVbdxKq4d0tL\nSzEBUEFuHj58SFVVFUVFRVRWVlJfX49EIuGnn34iPj6esLAwKioq6OzsxMXFhQsXLjBjxgy+//57\nnJ2dUSgUJCQkcPHiRU6dOsXo6KgA4wDExcXx8OFDQcwuKCjgypUrtLS00NjYiKurK2pqamLCo0pn\ntrOz486dO6irq7Ns2TJaWloICQkR1GVjY2M0NDQEmn9ychJfX1/8/PywtLREIpGIcJ7GxkbWrl1L\nc3Mzy5cvF/J0Ly8vtLW1kclkvPjii8CTEGMXFxcOHz4ssipra2vJzMxkz549hIeH4+3tTUBAAImJ\niWzbto3h4WFOnTqFVCrF1taW1NRU2traaG1tZeXKlcATgE5eXp5wmj799NOiaTs0NCRCgVTYeJlM\nRkVFBV9++SUSiUScalT6lMzMzN99P/4hegr/+Mc/3vfz88PIyAgtLS3R2VcJUry9vZk2bRpmZmaE\nhIQwOjpKY2Oj8DXIZDKRcJSVlcVLL73E119/jampKZ6ensLhp6GhQUZGBlNTU6KTq/I25OTksHv3\nbl5//XXxNV1dXYyNjVm8eDH5+fniprCzs6O+vh5TU1Pa29tFPah6cuvq6mJgYCDEMyrGYFNTE3p6\nemJ+Pjo6yvnz59HV1SUjI0PkF7S3tzM5Ocn4+DgeHh48fPgQc3Nz2trasLGxEX551cklKCiI2tpa\nqqursbW1ZXBwEG1tbTIzM2lsbERXV5egoCD6+/tFgnJgYCBWVlZERUWxceNGXn31VeF92LVrFzY2\nNpw7d463336bixcvClrVwMAABgYG6Ojo4OXlRWxsLGVlZURFRREWFiaaaS0tLfT29qJUKhkYGCA4\nOFgIgLq7u+no6GBiYoK4uDjy8vIwMzMjOTkZCwsLamtrcXZ2xtnZmcbGRtasWYOGhgYtLS20trbS\n3d1NeXk5zz33HOPj4+JG9fLyYt++fYLglZqaKnQmUVFRwlg3c+ZM4Yy8ffu2yLJQKf9mzpxJf3+/\ngK+2tbUxMDDAxMQEw8PD2NvbU1hYSHZ2NiEhISgUCt58803BY7x16xYbNmwQ3yc/P5/z58/j7++P\nu7s7RUVFwjmr4j/a29sTFRXF9evXycvLY/v27dy/f5/a2lo2btyIt7c32traGBkZoampyZEjR9DW\n1hbRilZWVgKeo66uTkhICAYGBvj4+JCdnY2Ojg4FBQX/PTyFb7/99v3w8HCKi4tFXHpoaCi5ubmi\nrlZtAqp0ZAcHByoqKujo6BCS4uHhYUxMTLh48SI5OTns2rWL7u5uJBIJc+fOpaamBk1NTYyMjOjv\n78fExAR1dXXu3r0ratOKigrxPhYvXsz169epq6sTo8Tq6mpWrVqFVColISGB0tJSoqOjqaurQ6FQ\nMDw8zI0bNwQ0JDQ0lLa2NrS0tASUdXR0lIsXL7J+/XrS0tIIDw/n7NmzPP300wLNvmrVKuRyOXfv\n3qWjowN9fX1hu1ZJej09Pfnwww/p6elBS0uLp556iv7+fjGuAkTt2tvbi5GREW5ubly7dk3MsuVy\nOY8ePeL69eskJCSwevVqQbVydnYmLy+Pzs5OxsbG6O/vp6Ghgdu3byOVSpk9eza//PILERERtLS0\n8NZbb2FiYoKJiQmampoUFhYSGRnJwoULaWxsFMTn2tpanJycMDMz4969e0ilUrZv387nn3/Oli1b\nRB6myrSVmppKaWkpa9asoaenh+LiYmbOnElAQICQFP/000/Clq3K01ClXV24cIHt27cjk8nIzc2l\nrq5OAHWsra35+eefGR8fZ9q0adjY2FBdXY1EImHFihXcu3ePyspKxsfH6ejoYPXq1eTn55OdnS2u\nSdXUqLW1latXr/KXv/xFZGDm5ubS1dWFo6MjVVVVIn8jLCyM2tpaSkpKWL58OZcuXaKjo4O4uDh+\n/PFH9PX1BXOisbGR8PBw8RA6ceIEFy5cIC4ujoaGBrS0tJDJZLS2tuLp6cnQ0BCjo6MiU8TCwoLe\n3l7u3bv337MpHD58+P3ly5czf/58LCwsBHuupaVFsAZUQRfnz58nNDSUwsJC+vr68PHxYWhoiOrq\nanx8fNDV1SU/P5+CggLeffddkpOTsbKywtXVlZKSEj788EMcHBxQKBRC7xAYGIi6urqozdevXy+c\nlNOnT+fcuXPC2pufny80/OXl5YLo3NLSQnDwEyamj48P9vb2TE1NMTIyIoQoCoWCuro6PDw8xFhM\nW1ubjIwMCgoKCAsLo7+/n6mpKQEIGRkZQS6XMzAwwNTUFG5ubkgkEmxsbNDW1iYwMFDQery8vITW\nXU1NjcrKSurq6qipqRHUnrKyMhGKq1AoxGRDNcabmJigvb2dsrIyQf0ZGBgQsJuQkBDGx8fx9/en\nt7eXqKgopFIpHR0dWFlZ8dlnn6Gvr8/LL78sovnc3d1pamoiIyOD119/nfv37wtwjqWlJQMDA8JD\nERoaKhpshYWFWFlZYWhoyLRp0zAyMqK8vJyFCxeip6cnRGUNDQ3MmjVLvG8jIyNyc3MFP+H27dti\nM7ayshLHealUSmtrK46OjoyPj9Pa2ip4oCqH5MKFCwVjQqXavHXrFv7+/uTl5bFz504CAwOZM2cO\ncrmc+vp6PDw8+PXXX5FIJHh5eaGpqYmuri52dnZCsWhgYIBMJhPS9ampKXR0dCgrK+POnTs8++yz\nIp+kv7+fW7duUVJSgpqaGlKplF27djFjxgxOnTrFyMgIk5OTPHjwgLi4OEZHR1EoFBQVFdHe3o6H\nhwdSqZSUlJTftSn8L835f9f/rv//rP8umvOLL75IcHAwmZmZxMfHc/78efT19amoqMDb25uQkBDu\n3r37P3iKPj4+5OTkEBcXh4ODAyYmJigUCjo7O1mxYgXffPMNlZWVvPHGG/zzn/9kcHAQZ2dntLS0\nGBsbE9l7zs7OwkFnb29PT08PW7Zs4a233uLPf/4zOTk5XLx4kVdffZXExETOnz9PWVkZbW1t2Nvb\no6amxowZMygoKBAhKKGhody5cwd/f3+mT58uYtFffPFF6uvrqa6uZt26dfz0008EBgayYsUK3nvv\nPfEe4+Pj0dLSYv/+/WhqahIYGMjly5cBiI+P586dO0KncPv2beLi4ujt7SUrK4tVq1aRnZ1Nb28v\n6enpvPDCCxgYGJCRkcHIyAjl5eWEhIQQHh4uZNrm5uZ0dXWxfPlygbPT0NAgLS2NpUuXimzNsbEx\nHj58SGRkJP7+/mRkZODg4ICHhwelpaVkZmayYMEC+vr6sLS0JDg4WJh0jI2N2bp1KwcPHhSmp+Hh\nYWxsbOju7iY4OJh33nkHT09PMjIyRJ/AxcWFvXv3oqamhp2dnZgQaWtrU1JSgp+fn3DMqupsPz8/\nDh06RFBQEB988AHbtm0jISFBlJl6enoolUpOnz6Nubk5kZGRoqwpKSkhOzub3bt3c/bsWfz9/TE0\nNKSurg4nJyeKi4upra1l//79fP3111y9epV//vOfdHR00NPTg0KhEAGwpaWlIl0rICAAV1dX9u7d\nS2FhITt37kRPTw8bGxucnJzYt28fixcvZsOGDVy9ehVzc3MuXrxIdHQ0crkcV1dXkpKSmDdvHiYm\nJty9exdjY2OcnZ05fvw4FhYWuLm5MT4+Tk5ODubm5oSHh1NWVsby5csF4OU/rT9E+fDFF1+8f/To\nUerr69HR0RHae5lMRlFRkYjwkkqlODk5UVpaKppkKpSX6mg+NDRESkoK9+/f/x81YkBAgLBNy+Vy\nJBIJubm5qKurMzo6SmtrKxs2bBAp0nK5HEtLSyoqKrC0tGTVqlV0dXVx4cIFIf5RNfBUTUR3d3cW\nLlwoeIMHDx7k7NmzhIeHs3r1agoKCpiYmKCrq4u4uDiuXbtGXV2dENi4uLhgb2/P2NgY1tbWnD9/\nHisrKwIDA+nu7kahUGBlZUVbWxshISHcu3ePtrY2ent7kclkXLt2jblz53LmzBleffVVmpubCQoK\n4t133xWJVgqFggULFjA8PCyUh6rjrcqV2NLSglwuZ968eYyMjNDT0yOO+KrJhGpWHhgYSEZGBlFR\nURgbG+Pv74+bmxtqamoUFhaKRqpqWqCKwsvPz2d4eJixsTGuXbsmTFtubm5oampiaGjIvHnz+Otf\n/0pfXx/u7u4APPPMM+zbt4/KykqMjY3JysoiKioKPT09EejT19dHf3+/MMdlZGQQHx+Prq4u//jH\nP1i3bh2VlZUolUqhrGxoaCA2NlZE/6kyKFUhvPX19ejp6aFQKLCxsSEvL4/S0lL09fWJjY1lYGBA\nELRycnIIDAxER0eHpqYmbt++LcAxCoUCHx8fJiYmBCHM2tqaY8eO4eLiwsjICLdu3UJbWxtbW1s0\nNTX5/vvvGRgY4Nq1a2zcuJGUlBShA/H390epVAqpd3V1NRkZGURERAgCuq2tLT///DMZGRn/PeIl\nQBCBcnNzsbGxoa+vj/LycoyNjenp6WF0dJSGhgYqKyvp6enBz8+PjIwM4EmQZnNzM6Ojoyxbtozn\nnnsOeHIBLVu2DC0tLRH1rVQqmT59uvCaa2hoMHfuXDZs2EBKSgpNTU24ubkJUpPKtagyVamWg4MD\nurq6QtBkampKcXExb7/9NgYGBuzZs4ePPvqIN998Ex0dHXbt2iUalLq6uhw4cN7ox6EAACAASURB\nVAClUilQYADNzc1cvnyZQ4cO0dnZiYaGBk1NTbS1tYn+gEp0BAjcV2hoKFVVVbi6ulJZWYmBgYEA\nzZSWlrJ06VKSk5P505/+hKmpKW1tbWhqarJixQpxY8ydO5fNmzczZ84cAcYdHx9nxYoVABQWFjI8\nPIyOjg7r1q2jo6OD6upqlEol1tbWVFZWCht3amoqhw4dwsHBgdjYWEHEcnBw4J133sHd3V1kWCiV\nSrZv3y4eBPb29tTV1eHm5sbnn3/OwYMHBWxHV1eXb775RgBMExISeOONN5g/fz6Ghobs3LkTAwMD\nlEolbm5u4r3Bk9Oonp4eb7/9NocPH6a5uRl/f39+/vlnpFIpYWFhVFZWCk5nbGwsM2bMICYmRvAk\nVOPAtrY25s6dCzzRrFRVVfH5558jkUhIT08X+L2enh4Ann76aRwcHJDL5VRWVjI6Ooq3tzfq6urC\nNxEREUFsbCzR0dHAE51CXl4eRUVFuLm5sWjRIl544QWkUimenp5s27ZNeCmampqQy+WUlJTw1FNP\nsXz5cvz9/YmPjxdE6YSEhN99L/4hTgr79+9/f3JyUlhHVQpCqVTKd999R3p6OhcvXmT58uWUl5fT\n399PW1sbEomEadOmkZiYiKWlJSEhIZSVlaFUKjl27BiLFi3i9OnTWFpaUl5eTktLC3p6eixdupS6\nujqhNPzhhx+Ij49HU1OT8vJyodvX09OjoaGBhIQE3n//fRwdHbl+/TrR0dE0NzcTHx/P9evXyc/P\nR0tLi8rKSqHHd3JyoqOjg6ioKAGPraqqQk9Pj8HBQeRyOerq6oSGhpKRkUFxcTGff/45PT09yGQy\njIyMUFdXx8HBgdWrV6OtrU1kZCSjo6O0tLQIqbClpSUWFhYYGhoKOa6ZmRnLli2jrKyMyclJlEol\n/v7+nDt3TnAYXVxchF25qamJqqoqLl68SGlpKQYGBpibm3P58mVu3brFu+++i4mJiRjHJSUloaWl\nRXNzs8iGlEgktLa2Cu6Dg4MDNTU1KBQKpqamBP/ByMiIn376iVOnTuHk5CQauP7+/nz33Xc4OztT\nWVlJZ2cnzs7OmJmZYWZmRkVFBZGRkaipqWFkZERiYqLwuNjZ2Ynva2FhwYoVK/jiiy/Exn3z5k12\n7txJVlYWLi4uIgToxx9/RENDg+zsbDQ0NKivrxcNwcrKSjHhioiIYGJigpqaGrq6ukROyYULF3jj\njTcYHBwkLCyMgoICWlpaxBhSJpMRHx9PVlYWHR0dgmHx70nZRkZGuLu7097eLrijly9fZmpqCqVS\niYODA0uXLuXOnTvo6+vz448/cvfuXZYsWUJKSgoGBgaMjIyIrFEjIyPkcjm2tracPXtWNMabmpr4\n9ddf/3tOCiMjI9jY2IigEScnJ8bGxpg5cybvvvsu69at4+OPPwaeUG5VO29JSQlFRUVcuHABgJMn\nT4oQDXjCz9uxYwd5eXkcO3YMf39/EcyppqZGVVUVt2/fxtTUlLq6OqampoiOjkYikbB//35qa2sp\nLCykqamJDz74QBCHraysWLNmDbdv32bDhg24u7uLkiYmJgZtbW2GhoawtLTk1KlTXL9+HWtra1av\nXo2fnx+FhYVC+Xby5EkWLFggfg+JiYmC2qwi9dy4cYOsrCyGh4eZNm0aWlpaXL16FXV1df72t79R\nXFzM8ePHMTIyori4WOjyVU84V1dXgYerqanh6tWr7Nmzh5s3b7J7925kMhknTpygo6NDmI+0tbWx\nsbFh0aJFVFZWMn/+fIqLi4VGYXJykjfeeIPffvuNtrY2urq6SExMxNTUlPnz5xMZGcmaNWtwc3PD\n0tISU1NTrKys6O7uFu7JlpYW6uvrRVYBPAmDee2114QzMCUlhf7+fpRKJaampnh4eIiexYEDBzh6\n9Cjff/89ly5dIiwsjOjoaG7evElISAhXr16lsLAQgNzcXMrLy6mpqSEoKIjGxkYx3YqNjRUUZ9U4\nVIV1Gx8fp6CggI6ODtra2pg9ezZtbW1C5pyamoq6urogZE1MTKCnp4e3tzc3b97k/v37KJVK2tvb\nWbZsGa+99hpKpZK6ujpWrlxJdnY2Z86cQV9fn/LycmH7f+qpp/Dw8GBwcJCSkhJ27dpFUVERCQkJ\nPPPMM0ilUubNmyes12FhYdjZ2VFUVISJiQnffvstCQkJaGpqcvfuXXHt/p71h9gU1NXVuX37NgkJ\nCdy5c4eGhgaRO9Dc3Iy3t7cw6Hz77bfU1NSQlZVFcHCwiJFLSUnBy8uLrq4ucbxesmQJN2/eJDw8\nnHPnzjF9+nRxs4SEhIj5eENDAxUVFYSHh3P//n0BRHn8+DGbN28mLy+PkpISIUfu7e3l6NGjgk84\nPj4u3HU//vgjIyMjdHV14e/vz5EjRxgfHxd/V3UK6uvr4/jx41hZWVH3r4j7sbExbty4gYeHB3p6\nehQUFLB582YmJiaE9NbW1hZvb2+R6/C3v/0NNTU1/vznPwvV3fDwMGZmZpiYmJCSksK9e/eYNWsW\nW7duxdvbm/DwcGbPnk1ERATvvvsuM2fO5LnnnmN0dJT79+/j7e1NXl4eR44c4ddff8Xe3p5Dhw7h\n4uJCSkoKzc3NuLm5sW/fPjw9PVm8eDFHjx4lJycHFxcXent76enp4auvvgLg1KlTZGZmkp+fz927\nd0VS04IFC5BIJISEhIiNPDMzk+3bt7NmzRqhyUhPT+eXX35h9+7daGpqsnfvXn777TfmzZvHypUr\nOXr0KNevX0dXV1fg4FTWbpXMeWBgAEdHR5RKJSUlJVRWVhIaGkpoaKg44fj7+2NnZ8eFCxeoq6vD\n0dGR7OxstLS0kEqlNDY2IpfLcXJy4vbt2+Lara2tRSKR4OLiwksvvSQQgSoJupubG5GRkaxcuRJb\nW1s8PT1pb28XQcaLFi2ipKQEQ0NDKioqgCent8bGRpYsWYKdnR3FxcUkJiZy9epVVq1ahZqaGlZW\nVtjZ2WFpacnIyAjd3d1Mnz5dKDfPnDlDXFwcixYtEuXO71n/cVOQSCTfSySSdolEUvJvr5lIJJKb\nEomk6l//NP63r70lkUiqJRJJhUQiWfh73oSGhgZ/+ctfyM3Npbq6Wmi63d3def755wkICCA9PZ19\n+/YJRJunpyeenp50d3ezY8cOKioqsLe3R1NTU8ick5KSkMvlgtDU3NyMUqkkLi6Ojz76SHxge/fu\nRV9fn4sXLzI6OsrBgwfx9/fH1tZWvCcXFxfxdFBxDNrb21m/fj3j4+MUFhZy+fJl8eEvWLCA77//\nnj179hAYGIiPjw8KhYKmpiZeeukl4uLimD59Oi4uLiLpqKWlRTgBVRf1zz//zKNHjwgICMDb21t0\nyHfv3k1bWxumpqZ0dHTw6NEjtLS00NfXZ926dWRmZrJkyRKcnZ2JjIwkJSUFS0tLWlpa8Pf3R0tL\ni46ODhYsWEBmZiaHDx9mcnKS6dOnk5KSInwHYWFh7Ny5E0dHRyGeAejo6BASbaVSSUdHB0VFRUJt\n+OGHH7Js2TKMjY35+9//zrp16zAwMBBEo/j4eOrq6oQM/LfffgOegFujo6OpqakR9fGSJUtYsmQJ\n0dHRPH78mEePHnH79m0OHjyImZkZUVFRVFdXMzg4SH9/P7NnzxYbvcoQNW3aNPT19YVkXiKRiN6J\nitZdVFTEvXv3KCoqQqFQIJVKMTQ05IcffuCbb74R/YMTJ04wa9YsAIFQS0tLw9zcXJinBgYGsLOz\nw9PTk+XLlyOVSnnhhRfIzs4WlumPP/6Y7u5u8vPzmTt3LsPDw6IHonq/t27dYv78+djY2IiTUl9f\nn7j+nJ2dhUBNReeqra0lMjISLS0twsLCaGtrExL937N+z0nhByDu/3jtTSBlamrKHUj5178jkUi8\ngETA+19/5xuJRKL+n36AVCpFXV2d4uJivLy8mD9/Pvr6+kKZ9vbbbwv4SGRkJN7e3kRERNDa2oqD\ngwMSiUTYYs3NzcUHlpWVRUBAgOAZeHt7s3jxYs6fP8/KlStxdXXll19+IS8vDw8PD1xdXXnqqad4\n/vnnBXDF3NwcNzc3MeqEJ/wH1XH37bffRlNTk3Xr1rFw4UI8PT3FxbVx40YKCgqYOXMmjo6OyOVy\nKioqKCkpQS6XCwydSq6sp6dHX18f165do7m5mUOHDjFr1ixCQkIoKiri8uXLaGtrU15eTnV1NRcv\nXuTx48fMnDmTgYEBTE1N+fTTT8nOzhYZGdOmTWPBggW8//77tLS0oKuri5OTE52dncTFxaFUKrG3\nt2fdunVs2bKFpUuXCsyXjY0Nzz77LJOTk3R1ddHe3o6fnx96enqEhYWRkJBAZWUl6urqfPXVV+jo\n6FBRUSFq9uTkZMrLyzl9+rQAuCoUCn777Te8vLwIDQ3F0NBQOCIBEfX3yiuv0N3dTVJSklBDqiA6\nXl5erFixQpQuqh7Jyy+/zI0bNygrK0NXV5f4+HikUikAsbGxwkA3MTFBVFQU5ubm2NvbEx4eLmTG\n/f39pKam0tDQgJubG+rq6nh6euLl5SWAP6+99hqnTp0CoLKykqVLl7J+/XqysrIwNjbm5MmTwrzk\n6upKUVERHR0dhIeHo0IEODo6sn37dsbGxnjhhRf45JNPSExMFH6fpqYmUc5cunSJBw8eCN6DTCYT\nQbIODg6Mjo6ir68vNgilUklOTg5r1qzh559/Fsj337v+46YwNTV1B+j6P15eDvz4rz//CKz4t9d/\nmZqaGp2amnoMVAMh/+lnqAJQVAj1/v5+7OzsMDY2xtvbm3nz5rFt2zZxGvDy8sLDw4Pp06fT1NSE\nqakphoaGnD17lubmZuFB37p1KzU1NSQnJ+Pl5SXUbrt37xYE5GnTphEfH4+2tjb19fVoaGhgYGAg\n0oWys7NF48jNzQ2A0dFR4uPjyc7OxtPTEz09PZKTk/niiy+oqKggNjZW2IpVbkyVslAlrR0bGxN5\nlqpRlmrefuPGDQoKCvDx8SEoKAhra2sWLlxIQEAAZWVlqKmpiUbrBx98QEVFBRMTE2RkZLB06VIe\nPHjA888/T3d3N62trXz55ZdCrmxra8ujR4/EOPfixYucOXOGmJgY6urqePz4MTt27MDb25sVK1Zw\n4MAB1q9fz6NHj2htbWVgYAAPDw/y8/N59OiRyKKsrKxk+/bteHh44OjoyMqVK1m0aBEWFhZiIzl7\n9qzIA1WlbNXW1jI6Oiqe6B4eHpSUlJCSkoJMJmNsbIzr168zd+5c1NTUsLa2xsHBQYz8rK2tSUhI\nQEtLC0dHR3JycgSmr6ioSMBbBgYGUFdXx8vLi4CAACHDnpqaEmTsiooK0tPTiYqKwsXFhYcPH6Kl\npYW1tbXwPpiYmCCXy0XYcHh4OD/++CNFRUVoaGhw6dIlZs+ejaurK+Hh4Vy4cEEoYmtqajhz5gzN\nzc3MnDmTZcuWERwczK+//oqPj8//cOLW1tZiYmKCi4uLwAGomu8nTpxAX18fePLgU2WBDgwMcOPG\nDR49eoSuri4//fQTcrmcxYsXc+LEif90G4r1/7anYDk1NaXaeloBy3/92RZo/Lf/rulfr/3frrGx\nMWbPns2aNWt49OgRN27cICAggMbGRkpLS9HQ0KCgoIDjx4+zb98+Dh06xLFjx/j+++9FA27Lli3o\n6+uLSQU8iY0zMTFh6dKlODg4oKenx/Hjx8nMzCQwMJBNmzaxePFiEV+2YMECNm/ezGuvvUZvby8a\nGhq88sorTExMUFBQIFJ7PT096evrw97enoyMDJydnQkODmbdunX4+fmRlpbGyMgIV65c4U9/+hM5\nOTlcvnyZy5cv09/fL3zzxsbGGBsbCyrO/fv3mTFjBu+99x6bNm2irq6O3t5efv31V3GTDAwMkJqa\nSklJCe+//75Ibd6wYYO4UJ555hkOHTpEZmYmzc3N+Pr68vrrrwvuoCrHsK2tDTU1NYKCgmhqaiIl\nJUXoB2bOnImXlxcdHR188MEHWFpasm3bNqysrDAwMKC3txdfX1+qqqoICwvDxMSEW7duUVZWxrFj\nx/juu+9EvF1BQYFwWw4PD7Ns2TImJiYoLi4mMDBQ3CQAaWlpzJ07l6qqKr7//numpqYYGhqiubkZ\nNTU1urq6iIyMRE9PjxUrVnDjxg1qampYvHgxrq6ubNq0idbWVuzs7FBXVycqKgqAy5cvExMTQ0ND\nA+3t7VRUVLBy5UoOHjxIV1cXenp6Atv38ssvs2nTJhwdHXnqqaeIiIjA0NAQR0dH3N3dUVNTY8mS\nJeL7Tk5O4u/vj56eHps2bUJHR4eEhATBOHB0dEQikfD3v/+dw4cPs2HDBj799FN+/vlnEfKj8oKo\nyrOEhAS6uro4duwYc+bMEcSqrKwsFi9ezMjICIWFhcyePVu4ZQcHB4mNjSUuLo4333yTvr4+goOD\nOXfuHKGhob/75v7/3GicejIX+n8sU5ZIJNskEsk9iURyb2Jigvz8fAH2VNVoOjo6aGtrs3TpUtzd\n3Vm5cqXAu+/fvx81NTW++OIL9uzZg52dHYaGhuzYsUM0rRwcHAgODsbX15dLly4xNDREWFgY6enp\ngkzs6+uLi4sL06ZNY3h4WBzXzp49S1dXF62trbi7uxMRESHSlMbGxrh8+TITExMEBwdTVFSEt7c3\nvb29fPrppzx8+FDEnb/77rvs3LmTiYkJMT5ydnZGQ0OD2tpaDh06JEoRGxsbod9vbGxkampKhMuO\njo6KEJDAwEC0tbX54osvCAsLEyMtDw8P4cmQSCSsXr2azZs3U1ZWxnvvvceVK1coKysjLy+PrVu3\nIpfLUSgU5OXliZJqcnKS5ORkDA0NefDgAVKpFD8/P9rb28nKyuLevXtkZGQINaIqp1KVWZmUlCT8\nDA8ePODRo0f09PRQUlIiICiq35sqqs7NzY2srCzgyXjx0aNHeHh4MDExIWhK1dXVyOVyzM3N+b/Y\ne8+wKs90b/9kwaIKLGBR1gKkdxAQaYIo2FHUJPaWRJNxEhOdmWRSpuyUmZ2eiUkmExOjMRp7wV5A\nBKlKR0BAEKT3tugLFuv94F73sef4f3jzfz9lH8d+vlgOD6Q8z/3c93X9rvNsa2sjJSWFqKgo4uPj\ncXd3Z8WKFcIWHhkZSU9PD6tWrfo30U5xcTGtra2Mjo4yY8YMbGxscHR0pKamhq+++kpMlubl5XH+\n/HkhDKqsrCQkJITu7m4mJibQarWYmZkBT9iPUqmUnJwcbGxsuHjxItPT0xw+fBiJRIK+vj6Wlpb4\n+/uj0WgwNzdHo9EwNDRES0sLKSkpuLq6sn79epRKpbjHdC+BsbExMdxnbW3N5OSkIEvv3LmTiooK\n4uLiqK6uRiqVoq+vz8DAgJjEzMzMJDAwUCgZf8n1/7oodOrp6Sn+6+FWADpRXSvg/N/+ndN//d3/\n59Jqtd9rtdo5Wq12joODAxUVFYJkq0Nj6WQi5ubmPHr0CEtLS2QyGd7e3qxcuZLg4GDeeecd7Ozs\nRCjkyy+/ZOfOncCTH9i5c+fQ09MjMTGR4eFhAgMDmTt3Lm1tbdy9e5fTp09z8uRJcnNz+fvf/87q\n1avZvHmzSFbq2AE6ToHuCg0NxcLCAhcXFyHlGB8fFyakFStWMD4+TmlpqUgS7tixA2trazQajSgY\nzZkzh+rqauBJTcHT05OOjg4cHBywtrYW8eM5c+aIyKqOFPTmm2/y6quvcvLkSd544w36+/uFZVir\n1fLll18KAtPnn38uMOZTU1Ncv36d4uJiccbWJQmDgoIwMTGhvLycjIwMFi9ezBtvvIFcLhc3nW4q\nb3x8nPLycmbNmoWTkxODg4MsXbqUpUuXsmTJEjo6OhgdHSU6OhqpVEp7eztTU1PimKLT3sfGxgqO\ngE6Uo8ubXL16laCgIMbHx+nt7SU1NRWJREJ4eDhSqZSBgQGRjDQxMcHX15fs7Gy6urpEwAqe5CDa\n29vRaDTMmzdPZBt0vIRdu3aJwi08cUzk5uYik8lISEggICAALy8vIiIi0NfXF21wBwcHMfR26tQp\nkc+wsbGhvLycv/3tbwLXHhoaKrIynp6e+Pv789JLL+Hp6UlraysajUbo3ZycnPDx8SEgIABDQ0M+\n+OAD7t69S3l5OQqFgtHRUfr6+nBwcMDW1pb8/HyWLl2KnZ0dAQEBVFRU4OXlxUsvvUR7ezvff/9/\njSeI6/91UbgEPPtfv38WuPjf/n6jnp6ekZ6enhvgBeT/kg+4atUqAeF8+PAhpaWlpKen09raSm1t\nLQ8fPiQtLY3Q0FBOnTpFe3s7QUFB1NfXizOXVCoVo9fAv924FhYWVFVVsX37dmGemj17NsPDwxw9\nepTOzk7efvttJBKJ+GH09/dz69YtwTPo7+8HniQJ3dzcBHhFR1FeuHAhnp6eBAYGkpaWxvHjx5k9\nezZ+fn709PSwb98+du7ciYODA998842wBOtuBN3XYm9vj5ubG+bm5hw8eJCmpiYaGhqYnJykrKyM\n+Ph4LCws2LNnD8888ww3b97k97//PX5+fjz11FOsWbOGoKAgtm/fTnJyMhMTE2zZsoX4+HhRec/L\ny8Pd3Z3u7m4RIGpqaiItLY2Ojg4MDAxoa2tj1qxZTE9Ps3TpUtrb2zE2NiYkJITZs2eTnJwsUpo5\nOTmkpKSIRU63CxkcHGTWrFl0dnYyMDBAREQEjY2N3Lhxg4aGBhISEkhLSxNp0bKyMiYnJ9mwYQMB\nAQEsWLCA3NxckY6MiooSu6+RkRGGh4cpLy8nPT0dDw8P9PX1RSRbN60KT9683t7eJCUl0dHRQUZG\nBllZWRQVFSGTyQSVy8bGhg0bNoiciQ7q0tTURG9vr1jodC1miUTyb6SrpqYmpFIpoaGh7N69m6io\nKHJycti3bx8RERG8++679Pb2iuNxdnY2MpmMhoYGgRUExJh8fX09k5OTIt27cuVKBgYGSE9P57PP\nPuPw4cMUFRXh4+PD+++/j5GREVlZWTQ1NREXF0dRURExMTGEhob+4of7l7QkTwB5gI+enl6Lnp7e\nTuAjYLGenl4tsOi//oxWq60ETgMPgBvAbq1Wq/m//R+6GfmcnBzBN9BVqxMTE9m/f7+Ylc/NzUWh\nUPD2229z6dIlfHx8SEpKAhBz/7pKa2lpKWvXriU/P5/MzEy8vLwICQkhNjaW4OBgOjs7iYuLEzeF\njho0MTHB6tWrRTdk79696OnpiRtXN2Pf3t7O0qVLmZycJD8/n6GhITw9PQWU47nnnsPR0ZGbN28S\nEBCAo6Mjjx8/5ubNm9y+fZsTJ06gr6+Po+OTsosuEqxSqRgYGMDV1RUPDw8SEhLYt28fWVlZxMTE\n0NTURGBgIIGBgWRkZHDo0CHRbtXVBtra2rh48aKohygUCs6cOYOJiQldXV08evQIAwMD4uPjefjw\noajSR0VFsXjxYkHQnpqaoqGhgaKiIvLy8khISKC1tVWEy3TzDJcvX2b9+vV0d3czNjbG22+/jZOT\nE9evX6exsZHCwkLRJYqJiSEiIkLAck1MTEQk2NHRUSxaDx48QK1WY2FhQV1dHcbGxgwMDBAaGspL\nL72EWq2mpaWFb775BlNTU+rr6zl48CAPHz7E1NSUGzduIJE8ucXXrVvH6dOnMTAw4PHjx1haWvLw\n4UMkEgk1NTWMj48LXd/ly5exsbFBq9VSVlZGaWkpM2fOZMGCBVRWVrJmzRqxowgPD+e5557j2Wef\nFbUDDw8PvLy8MDc3Jzo6mvDwcCIjI4mKihKp2IaGBuHkNDU1pbu7m1u3bomfw+DgIN3d3Wg0GkZG\nRtixYwfR0dE4OzujVCrx8/PjueeeY+fOnWLWw9nZmR9//BEfHx/8/PxQKBR4e3tTUFAgZkd+yfVL\nug+btFqtQqvVSrVarZNWqz2o1Wp7tVrtQq1W66XVahdptdq+//bv/1Or1XpotVofrVZ7/Zd8EiqV\nShh9oqKiuHLlCgkJCYJetHHjRiGzsLe3Z2pqivHxcRITE3nrrbdETFg3Wy+TyQAICQmhr6+Pp59+\nGnhSCzAzMyM7OxsrKysUCgVVVVViYm5sbIyxsTFWrFiBVCpl0aJFKBQKHB0dqaiooLa2FniC4Prj\nH/+IhYUFGRkZxMXFCUVXa2srS5cuJTAwkPnz5zMxMSFqBbpt65w5c/D392doaAiZTCaAKCUlJdjb\n22NhYUFRURF9fX0kJiby4MEDHB0dcXR05OjRo2RkZJCQkEBHRwcKhYIDBw4IPbqlpSUmJiYEBgYS\nFxdHeHg41tbWBAQEMGPGDAwMDFi9ejXLli3jww8/pLCwkPXr12Nubo65uTnT09OsXbuWLVu2kJiY\nSHd3tyiMjo+P09zczOPHj/n555/JysqisbGR6OhoEhISOH/+vCgaX7hwge7ubt544w3mzZtHbm4u\nGRkZnDp1ilu3bomjolqtxtLSUjg1Hj16hJubGwkJCTg7OxMUFIRMJiM0NJRXXnkFtVqNp6cnISEh\nTE1NkZ6ezsTEBKdPnxZx43fffZfQ0FAB7QW4du0aDg4OvPbaa9ja2goq+DfffCPqGosWLRK+z4mJ\nCUFbbmlp4cCBA4SEhGBiYsLJkydFQbCmpobp6Wk+++wzent7ycnJ4cKFC+zfv5/W1lYRW+/v7yc2\nNpaIiAjhpNi+fTsODg7cuHEDW1tbnJycePHFF8X3QWcFd3Z25uHDh9TW1nLq1CmUSiVBQUFoNBqc\nnZ2Ry+WMjo5ibW0tXgwmJiZCHpSYmEhqauoveRSBX8nswz/+8Y93f/Ob32BkZCQiojpgiO4s7+7u\nzpIlS3BxcSE8PJyxsTEqKiqIiIjA1NSUFStWYGVlhZ+fH7NmzWL//v3CGOzq6sq1a9eYnJwU+f3B\nwUHa29spKCggOjqalpYW1qxZQ2hoqNgVnD17FmNjY0pLS1m8eDHt7e2kpaXh5+eHUqlk3rx53Lp1\nCzMzM7Zs2SK2n6GhofT19YlosZubG8XFxQwMDPDDDz8IUeng4KCA1FZWBsF+DAAAIABJREFUVuLj\n44NSqUQmk6HRaKiuriYuLg5vb2/09PRISkpienqasLAwgoODuXbtGk5OTmLSr6qqiv379wur0ty5\nc8nIyECpVIqpOh8fH/T09Jg5cyavv/46Xl5egsxsZmYmWmPXrl3jxIkTAnAyOjrK+vXrRTpSl/lQ\nqVSkpKSgp6fH3r17GR4eFkAYV1dXtFotIyMjqNVqIezVeRV0VfPu7m7mzp3LsWPH8PT0xMTERJjG\nBwcHGR4eFrTjbdu2UVZWhoODA3PmzGHVqlUsW7aM8fFxkpKSaG5upqSkhKqqKqytrcXRYt++fUxM\nTODt7c3169exsLDA2toaExMTDAwM8PLyEjJiXct09uzZxMXFkZqayt69e6mrqyMnJ4fIyEj++c9/\n0tXVRXh4OOPj40xNTYm6AUBSUhJOTk5UVFSIhWdwcJCSkhJiYmIICQmhsbEROzs7gXv38vKirq6O\na9euERsbKwbcdMe53t5e7Ozs6OrqwtDQkIKCAsrLywkMDKSyslKAbsLCwoQTVSdMWrduHd99993/\nnNkHXZRV97DGxcUJMlBaWhrffvutAFnq6elx6tQpIV6Vy+WCkqxQKPDx8aGtrQ14sopv3boVExMT\nXF1dSUpKore3l4ULFzI0NER2djYmJiaiiPngwQMkEgleXl4CaTY+Po6TkxMajYYtW7YAT9pFCxcu\nJCIigtmzZ+Pl5UVmZiZRUVHCi9DZ2cmuXbsoLy/n5ZdfFuKUuro6TE1Nyc/Pp76+nlWrViGXywHY\ntm0bVVVVyOVyWltbsba25uDBg5w8eZLR0VHu3LnDsWPHMDMzo6ioCHt7e+zt7ZFKpaLSrOvVGxsb\nk5KSIgpkOslIT08Pzc3NdHd3U15eTmJiIjdv3qSzs5P09HRxoxoZGTF79mweP37MvXv3GBsbIzk5\nGXiCeNMVK+fNm8fQ0BALFizg2rVrtLe3i7djT08PHh4eGBgYoFAo2LlzJ3FxcURERFBXV0d1dbXg\nP+qq+bm5uaKI5uzsLASsGzZsEO1GuVwugK6urq5IJBJcXV05d+4chYWFWFtbk5SUxIMHD4SCvaGh\nQeDNpFIpUqmUyspKsavTTRnqZg9SU1Px8fGhvr5eqAarq6tpaWmhqalJqOjj4uKwt7fH19dX1KIC\nAwMpKytDq9Uik8nYuXOnGCjTYfB0C3FVVZWglO/fv5/Zs2cDCAJWUVGR4HwsW7aMc+fOERcXJ5R/\nCQkJPHr0CDMzM5GgtLGxwcPDg7a2Nnx9fcXx+Jdev4pFAcDY2FhU5HU5BENDQ9Fv/eGHH8jMzGTf\nvn2EhITg7+/PJ598ItpP//rXv5BKpdy7d098TK1WS3JyMsePH2fnzp1cvXqVjz76CHiSsbezsxPj\n1DoXYEFBAa2trQQGBopecUJCAlVVVTQ2NgJPtvlZWVkcPHiQ/v5+GhsbsbKywtLSkszMTEpKSpgx\nYwZubm5cvnyZmJgYgZB78cUXxVbytddeo7CwkHnz5gFPosO6I42TkxP37t3Dy8sLFxcXwT189913\nyczMZHJykvHxcUZGRggLC0MikbBhwwb8/f155pln+PnnnzEyMhLgF112/vHjx1y9epXbt2/T0dHB\nuXPnRJvUy8uLhoYG6urqiIiIwNfXF0NDQ0pLSzE2NhY7iby8PO7du0diYiLvvfce8+fPp6qqSjxQ\nOTk5tLa2IpFIBD05NjZWjE+3tbXR2dmJqampOA7qQkYffvihYDfGxsZy5swZVq1axfHjxykuLubO\nnTsUFxfj7+/P66+/jrGxsQh/xcTE4Ovry/DwMDdu3ODjjz8WgbjOzk6xmOlkNzKZjBMnTvDw4UNO\nnz6No6OjQPD/9a9/5euvv6ajo4M5c+ZQWFiIt7c3a9eupbOzU1inN27cKIamJiYmxEOv61DI5XJc\nXFzw8fGhvb0dd3d3jIyMsLW1RS6Xc/nyZb799lva2toEZRuedLfUajWff/45ixcvxs7OjpGREVat\nWsX09DS9vb0MDw8zMDDA5OQkcrmcyclJ7t+/L8S1Go2Guro6+vv7hVvkl1y/iuPD4cOH39Wpu3Rb\nTK1Wi7u7u0iu6Vov09PTwpsXEBDA8ePH2bhxI3PmzOHu3bskJSXR0NDA6dOniYmJEQTj7u5uQkJC\nRDIxIiKCRYsWMXv2bE6cOIGzs7MoLI6MjDBnzhxu3LghOAn9/f2kp6dTVlbGypUrkUgk7N+/n507\nd6JWqxkZGRHE4fb2dkZGRli+fDnwxGsxNjbGypUrOXfuHKtXryYsLIxz586JOYfLly8jl8uJjIzE\n0NCQhoYGkTocHBykr69PiGVffPFFUlJSsLKyEnTn4uJikpKSGBwcZGpqCg8PD27fvs27777L0aNH\nkUgkpKSkIJFI8PPzw8rKiuTkZJqamlCpVPT09GBjY0NwcDDDw8PU1tby6NEjent7qaqqwt7eHhcX\nF+zs7LCzs2P37t388MMPmJubk5SUxPXr1+nr68Pb25uWlhbRYiwqKsLd3Z2qqiqRIlQoFGg0GjQa\nDd7e3kilUpqamrh06RJ+fn5MTEwIJuS6deuwtbWloqLi30jSVVVVWFpaUlBQQENDA/r6+qSkpODm\n5sbg4CAHDhxgaGiIqKgozp8/T2xsLGZmZoSFhXHnzh2amppwc3PjueeeE+lVjUbD+Pi4GHjy8fHB\nx8eHjo4OrK2tmZiYIDU1leXLl9Pf38/x48fZunWrsFX5+fmhVqsJCQnB2tqaxsZG0tLSBIvS0NCQ\nqKgoNm3aREtLCwUFBezevVu0W+vr62lubqa5uRlXV1dsbGzo6enB2dkZc3NzLl++zNtvv019fb0A\n1HZ3d+Po6EhaWhpxcXHExcVRXl5OSEiIWHBnzpyJp6fnLz4+/CoWhc8///zdv/71r5iamnLs2DFe\nf/11jIyMeP/995k3bx63b98WPxSdWTgzMxMnJydWrFjB6dOnsbGxITk5WXAMLl68yKJFi8Q3p6mp\niZGREczNzTl8+DDNzc1s2LCB9PR0YmJiSE1Nxc7OjoULF3L//n2cnJzE1F1vby8REREolUouXrzI\n/PnzuX37NvHx8ahUKq5evUp7ezuTk5OMjo6iUChob2/H1tYWX19fkZ6rrKzEysqKxsZGLCws6Ovr\no76+HqVSyfXr15HL5bi7u5OTk8PU1BTLly/nzTffxM3NDX9/fyIiIvDx8eGDDz4Qk5k66IqLi4s4\ninh5eVFRUcHChQtRqVTY2NhgbGyMqakpo6Ojou2q+9XPz0/IfA0NDZmensbQ0BCNRoOHhwd1dXXY\n2Nhga2vL0NAQPT09qNVqNm3axODgoFDcTU1NoVQqCQ8PJzc3l4iICKRSKaWlpYSEhKBWq/Hy8mJ4\neFi80XVtXkdHR3766ScWLFjAnDlzuHnzJiqVCkNDQ9RqNXZ2dvT394uotw5RJpFIyMrKEoEkV1dX\ngoKCcHJyIi4ujqamJlJSUnjrrbcEwl03RyGXyxkaGuLs2bN89tlnTE5OMjAwwMjICM7OzkJeMz09\nTUFBAcuXLycoKIhjx47R09NDVlYWKpWKpUuXUlhYKOZe+vv7cXFxEeSsNWvW0NvbS0FBAU1NTeLY\np6+vj76+vmj1joyMEBMTw+3bt0VB2MrKCgsLC+7cucMrr7yCQqEQY9qFhYXo6+ujVqtZvHgxvb29\nXLlyhaSkJHGUzMnJwdTUlJKSElJSUv5nLQo6JZbuLKWz4Oh8ira2tsyaNQt9fX1B/PH19eXSpUto\ntVoOHDjAp59+SnFxMceOHaOxsZG4uDiioqL4+eefcXNzw8XFhaysLHbs2CEU8JmZmdy6dUsw/Jua\nmggJCeG7777DxMSE9evXU1paSkxMDCqVinPnzvGXv/yF2tpafH19sbW1JSIigpkzZ6JQKJg1a5Z4\n2LKzswVaXavV4uXlhVqtZmpqSuTYTUxMCAkJ4cSJE6xcuZKuri6CgoKYmJjgwYMHWFhYoNVq6e3t\npbu7GxcXF1Fos7GxQU9Pj7a2NmxsbIR6/NixY6LdeOjQIVQqFbNmzeLMmTOsWLGCzs5OjI2NhUQn\nPj4ehUJBYmIiDQ0NFBcXo1QqmZqaIjU1lS1btmBtbc34+LiIPhsZGfGPf/wDKysr9PT0aG5uJjY2\nlra2NgYGBigpKWH27NmUlpYyNDTE7du38fT0JC8vD1tbW86cOYOrqysGBgbo6emRm5tLZmYmCxcu\nJDs7m8TERM6cOYODgwMDAwPIZDIeP36MjY0Nvb29rF69GgMDAyQSCba2tgJl1t7eTkZGhvBzTExM\ncOvWLWxtbQUcxcDAgOrqaiIjI+nr68PDw0MEtHROCH19fdzc3DA1NSU7O5vQ0FBRh9LlGK5cucLa\ntWvx8vLC29ube/fu0dHRgUwm49tvv2XlypW4ublx7tw5TE1NMTc3FyPRJSUl5ObmEhgYyOXLl1mz\nZo2QGWVlZbFnzx6Renz06BErV66kvr6e+/fv4+npyY0bN0hKSuLu3bv4+flx6NAh1Gq1+P7o0APO\nzs6Cs5GTk/O/NOf/vf73+t/r367/OTRnV1dXvvjiC2praxkeHiY6OpqGhgYBWNFp5Ds7O2lsbGTe\nvHm4u7vz1ltvsXz5cqKiokS0t7W1FX9/f+Lj4/nuu+9wcnJCqVSSmpqKo6OjSKrpUGuLFy8mKytL\nbJt1W+yWlhbs7e3x9vamurqa8vJynJ2deeWVVygpKaG5uZmLFy+ybNkyEbfVarViBsPBwYHs7Gzq\n6uoYGBhgx44ddHZ2UlRUJCAs09PTnDx5UtCXnnnmGZycnNi4cSNfffUVMTEx+Pj4sG/fPl588UUe\nPHggRma7u7uZnp5GqVQKP4JMJsPAwACtVouxsTGBgYGiaKkjC+m2zwqFAolEIirwX375pRjdtrW1\nZWJiQnwtY2NjZGRkEBoaKvyZOmKwWq0mMjKSEydO0NXVJZKIFy5cwNbWFkdHR8ER1BVbT58+LViE\no6OjBAYG0t/fz5/+9Cdee+01kU/56quvhKMxKiqKhoYG7t27J3iLOuO0zn6VnZ3Npk2b+O6774iO\njhbhpM8//5wPPviAXbt2cf78eY4fP05sbCwhISE0NDTg5OREf38/oaGh5OfnMzo6KlB/mZmZzJ07\nV7gurl69ir29Paampqxbt45t27axbds2fH19yc3NxcLCggcPHlBVVcVvf/tbTE1NaW1tpbm5mb6+\nPjIyMoiNjWXOnDlcuXKFTZs2iYlYKysrMjMz2b17N7dv38bKyoq//e1vODs74+DgQE1NDY6OjsLT\noRvtPnHiBKGhoURHR/PSSy/xww8/UF1djUajoampiYmJCXx9fUWd5/92/Sq6D2q1mrS0NPT19TE3\nNxc99dWrV+Ps7Iybm5vI7UdFRSGTyTA0NOT999/HyckJrVZLWloa3d3dSCQSQfyJiopidHSUgYEB\nlixZgkajwd/fH3t7e1QqFS4uLvz888/4+vri5eUlqE3d3d20trYSGhrK0aNH8fX1Zf78+YLTAE+i\nztPT0/T39zM4OEhxcTHDw8N0d3dz//59vvnmG2bMmIFcLmf16tX09vZiaGhIbm4uZmZmwhi9Y8cO\n4fkbGRkhJCSEq1evCvrStWvXCA8PF2drAwMDkeC0tbVlwYIFeHl5ibP8w4cPiY+P5/r163R0dODs\n7MzY2Bh+fn54eXmxZMkSZDIZxcXFVFZW0traSn19vWAb6LTsMTExyOVy1Go1cXFxuLi4sGTJEqan\np+np6eHLL78U0JSRkRH27t1LSUkJRUVFnDp1is7OTiorK3n//fextrYmKioKa2trCgsL8fX1Ze7c\nufT39+Pu7k5GRgaZmZnAk0XCyMgIS0tLEQl3dHSksrKS9vZ2jIyMyM/Pp6WlhfT0dPLz8wXuzNXV\nlZGREZ599lnBWdBongRqExMTuXv3Ls3NzYSHh3P79m3Ky8sxNzentraWgIAA0WGYNWsWmZmZSCQS\ntFots2bNoqamRkzd2tnZiQGuuLg4zM3NaWtrw9bWlvr6eqRSKcuWLaOmpoYrV65ga2tLc3MzQ0ND\nxMfHC6Gvjj2pY1fcuXOHvLw84AlP4dKlS7z99tv8/ve/Z/78+WzevJkVK1ZgZ2fH2NgYbW1tNDY2\niiGzqakpVq5cSWpqqpDWhIeH4+rqyqeffvqLn8dfxaIA4ObmRl9fHwqFgtDQUKKioujp6WF0dJR7\n9+6xe/duzM3Nsbe3x9zcnL6+PqRSqTAvRUZG0tHRIQxIgHjz37hxg9HRUZFk011yuVy0rEZHRwkJ\nCcHV1RUzMzOeeeYZxsfHSUhIYHR0lMHBQfT1n/Bi7t+/j0qlEpyB8PBw1q5dS3R0NIsWLUIqlfLs\ns8+iUCgwNTXFzc2N6upqjhw5gkKhoK2tjWXLluHh4cHcuXPFyPPo6Kj4P3QAlsjISJycnDh8+DDX\nrl0TOnbdjEVdXR1eXl5COe7r6ytuXl3R0svLi7t373L79m1+/vlnAgMDkclkbNiwgfj4eJqamggO\nDkatVgsCUm5urtg1HD16FHt7e4qLi5menqa1tZU1a9aI78n+/fs5ceIEX375Jf7+/gwMDODl5YVU\nKsXb25v79++jp6dHf38/zs7OuLu7c+3aNcbHx8Vb8L333gNgy5YtwhW6efNmgcKztramoKCAtrY2\nXnzxRUJDQ5k3bx579+4V9ZyQkBDa29s5cOCAWPR10JL/+I//EC+IXbt2sW/fPiwtLSkpKRF5mOHh\nYWpqaqisrOTpp5/m+++/Z86cORQUFBAXF4dWqxWtQV1IycvLi/T0dNrb26mrqxP3sbm5OSkpKcya\nNYubN28SGRkpDOa6MJtOoacT9VZUVIg0rm4yVqPR8MUXX9DZ2SkM4Q8ePBALoZOTE52dndy9e5f2\n9nZ2795Nd3e3iGZrtVpsbW3561//+oufxV/FoqDVaklPTxe9/sLCQkGZefToEUVFRSxYsEB0HPLy\n8sjOzubUqVNcu3aN69ev09bWhlKpRK1Wi1V8cnISpVJJWFgYra2tBAcHEx0dja2tLfBkUdi0aRMD\nAwNcuXKFPXv28ODBA1xcXMRAkG5IxcTEhFu3bgFPYChXr14VpmcjIyN+/PFHpqenGRwcFJDTjo4O\nbGxsyMrKwszMDG9vb6ytrbG0tBRaL51/AJ6Eom7cuIGxsTGPHz8WC0ZZWRkLFiwgPDyczz//XPSh\np6amuHz5Mo2NjQQEBIjPW6cV+/HHH9m2bZt4qCwsLMT8wIwZM1i5cqVIyJ07d+7f3rZPP/00MplM\nZP11BbiRkRECAwO5cOECsbGxHDt2TCw+FRUVGBgYYG1tTUVFBQMDA9jY2KBUKnF0dCQjI4OWlhaG\nhoaorq4mOzsbIyMjGhsbBVg0Pz9fIMreeOMNEhISSEpKIjs7m+7ubrRaLfv372fjxo2cPXuWwcFB\nbt26hY2NDQqFQoy064aT/jtmvba2Fjc3N9LT0xkZGWHfvn1i5Hhqagp7e3scHR0xMzMTRW5fX18s\nLS3Jycnhww8/5OjRo1y7do2pqSngCf/B29uboqIibGxsBPWqtbUVZ2dnIiMj2b17N35+fgwODmJq\nakpAQABz5sxh2bJleHl5ERgYSE5ODtu2bWPhwoXAE6KTr68vGo2Gzs5OLl26xJYtWwgJCcHJyYl5\n8+aRmJhIaWkpSUlJzJ8/n/T0dPr6+ti7dy++vr48ePCAgoICLl26JBT3v+T6VXQfvvjii3d1K35T\nUxMRERFC4gEglUpRKpUkJiZy5MgRzMzMUKvVWFtb4+TkRFNTk3ggLCws8Pf35+zZs6xfvx4zMzMU\nCgVdXV24u7tTVlbG1NQUEomEgYEBEUcuKSkR1uPKykoePXqEh4cHvb29eHp6MjExgYGBAcnJyeza\ntYuxsTEePnzI8PAwDx48wNnZmZ6eHuRyuTBb67bmLS0tFBUVYWdnx/DwMLNnz0atVqNWq1mzZg0j\nIyMkJyfzwgsviIdRo9GQm5tLaGgoBQUF2NnZIZPJaGxsZGhoiMjISGpra4VUtqurC3t7e5YsWcK9\ne/cwNDRk1apVfPvtt0xOTorWrpGREVeuXGF4eBiNRoObmxtdXV1s2rQJY2NjOjo6iIqKwszMTCTv\nAgMDOXv2LNHR0Wg0Gu7evUtERARmZmbCgKWnp8fU1BT9/f1CpOLh4YGNjQ0qlQpjY2MOHz4sqMWr\nV68mJSWFsLAwrK2tefToEXl5eaxbt06kMJ999lk+++wzFixYQGFhIZs2bUIul9PX18eWLVsICgoi\nODiYoaEhsRgqFAq6u7t54YUXyMrKYnp6mpycHPT09JBIJGg0GqysrCgvL8fb21sYuHXj3BqNBplM\nRlZWltiSe3p6Ul1dTVRUFE5OToSFhaFUKkULtbe3F2NjY0JDQ4WcdnBwkN/85jeo1WpSU1O5e/cu\nFRUVaDQaHBwc8Pf3F3UZExMTIiIicHBwQC6X8/XXX7Ny5Uqam5sxMjKisLCQLVu20NzcLDI1ujb9\n+Pi4aFfq2pvl5eVIJBISExMFJyQ+Pp79+/f/z4k563quupZXfX09KSkp4hydlJQktlnz58+np6eH\ne/fu0dvbi56enojlZmRkEB4eLsAaOsuTWq3G3t6euro6TExMaGpqQiKRIJfLhWfCy8uLixcvEhsb\nS1xcHIsXL6aurg6lUomZmRlyuVxsRdVqNWVlZYSEhGBubo6RkRGbN28WEFDdNKWnpydubm50d3ej\np6cntpBhYWEie/DZZ59hYmICPAHYHj58mCtXrlBXV0dQUBA7duzAz8+P5uZmMd/v4uKCqakpW7du\nxdraGldXV+7evUtDQ4NgRt69e5cDBw6we/duAgICaGhowMvLi+LiYuRyOYcPH2b+/PkcPHiQzZs3\nC/jI9PQ0Z86c4d69eyL3UVtby5o1a4RZWU9Pj6amJu7du0dOTg6XL19GIpHQ0NBAYmIin3zyCSEh\nIUxOTnLgwAEsLCwwNzdn165drFixgtWrV9PZ2cny5ctJTU1FX19fTPHduXNHDBl9/PHHBAQEcOvW\nLV5//XWqqqpoaWlh165dbNu2jUWLFpGamkp5eblgT+gWsUOHDrF06VJmzZoFwBtvvMHChQvJzMxE\nqVSKGLm/vz8tLS0igiyVSiksLCQ0NFR4LSQSCRYWFmIH4uDgwI8//giARqOhp6eH559/nsLCQpKS\nknBxccHExISRkREeP35MeHg4dnZ2ODg44OzsTE5ODr29vRw/fpzQ0FCCg4ORyWTk5OSIROOOHTuI\niorC3d2dXbt2cevWLVFMb2lp4U9/+hPR0dFERkbyzDPPkJ+fz549e0hNTeWrr74iMDAQAwMDTp48\nycyZM/9/xZx/FS1JLy8vbXR0NGvWrMHa2lo8yGVlZXh5eVFUVERLSwtGRkZ0dHRgaWlJVFQUd+/e\npbe3V4BElixZQmxsLHV1dUIZ3t7eLrb4utHdgIAArl27RlBQECqVirCwMKGPKywsRKvV0tfXJ27U\n/v5+fHx8OHv2LH/84x+Jj49n8eLFdHV1MT4+jrm5OXK5HDc3NwYGBggMDMTS0lKk9mbMmMGlS5do\namoiPj6evLw8Vq1ahYGBAVNTU7S3t7N27VpKS0vJzMwkISGBn376iaioKBEkKi8vx8zMDEdHR+Ry\nOUeOHEFfX5958+ZhaWnJ1atXKS0tZdWqVYJwpCs03rhxgxdeeIFLly4J9FdXVxcBAQG4u7sLb+Ht\n27dJSkrC09OTn376SRx3iouL2bNnDxMTE3z//fdYWFjg5+eHRqPh+++/p7+/n6ioKCIjI+ns7BRG\n6OnpaYyMjIRWb2BggBUrVpCRkYGFhQUtLS0Cq5adnc1LL73EBx98QHNzM1FRUZSWluLk5ER9fT3G\nxsb4+/szc+ZMMVgUHh5OSkoK8MTUtH37dhQKBS0tLaSmpoq+/fPPP099fT3Z2dm4urpy8uRJNm7c\niJ6entDM1dXV4ejoyIcffkhMTAzh4eFMTU0JFsSxY8d4++23Aejr68PW1pbg4GDR6cjJyUEmk6FS\nqdi+fTvffvst8fHxPH78mKqqKgYHBwXpOSAgAA8PD7q6uoiNjaW/v18g2gcHB9mzZw8lJSUC0nvx\n4kXmzJnD/fv3mTFjBuvXryc5OZmZM2cSGhpKVlYW+fn54qjT3d1NQkKCmOq1sbHB0NCQdevW/aKW\n5K9ipzAxMUFsbCwKhUJMyOm05xcuXKC5uRmlUsnKlSuJjIwUNqKOjg7hQdizZw9arZY7d+6ImoEO\nAOvi4kJ9fT0XL14UkWDd8IuZmRnGxsaMj4+LM7WRkRGBgYGYmZmJEEh7eztubm4ArFmzBqlUKtpg\nFhYWBAYGireOSqUSKO+enh4aGxspKSkhJCSEBQsWEBoaytjYmHAM6lbx5ORkNBoNb731Fvfv32dy\nchKVSiXAIyUlJejr6/Po0SMsLCxEfHZsbIzVq1ezfv16Hj9+TGNjI93d3ahUKjo6OsQgmKmpKWZm\nZgwMDIht6cTEhKBp//cEpk5I29jYiIODA+3t7eTn5+Ph4YFMJqOqqgqVSiVEs7oo9+bNm5menqaq\nqopTp05x5MgR/uM//kNMon788cfCc9Hd3U1wcLDo2ACiHdne3o6FhQU2Njb4+vpib28vJjwbGxsZ\nGxvjxIkTIrKtmy7VyVFKSkqYOXOmaH1+8cUXGBgYUFBQQEJCAqdPn6a1tZW0tDQaGhqwsrLC3d2d\nr7/+Gg8PDzw9PZHL5fzwww+0t7fj7e3NgQMHqK6upr6+nsOHDwPg7+9Pc3MzDg4OuLi4MDo6Smtr\nK11dXZw9e1YQlsLDw9m+fTsajYaMjAyuXLnCyZMnxWDWtWvXiI+PF4Xv4uJiMZQ3Y8YMHB0dsbe3\n54UXXmBiYgJjY2M6OzsF3k03/q1UKomMjOTnn38mIiIClUqFlZWVGPX+JdevIqdgZGREZ2cnBgYG\naDQaWlpaWLZsGaWlpYKQ1NbWhkwmw8fHB2NjY4aHh1mxYgU5OTkjTYgCAAAgAElEQVRkZWUREBCA\nk5MT+fn5ApXV0NCATCYTQlaFQoG5uTmBgYGiuKTbDeg0bLqbYXx8nNHRURERtrCwEGflGTNmEBgY\niFKpFEzJhw8folAo2LZtGwcPHhQ5AN12NDg4GH19fVQqFREREYLUo+MHwJO8RkZGBjt37hTikX37\n9pGUlCRQ6v39/ZiYmLB48WIuXLjATz/9REhICNHR0Wi1WoKCgrCzsxPassrKSurr6/H19aW5uZmn\nn35aFNump6c5dOgQgYGB1NbWMnv2bMEl1Al4bty4wZYtW6irq+Pu3btMT08za9Ys6urqMDQ0xMjI\nCH9/f86cOSM6IPHx8bz11lu0tbWxYMECFAoFZWVlVFdXs2PHDsLCwjh//jz29vYEBgYSFRUlCmw6\na3JgYCAPHz4kIyMDT09PfHx8MDQ0RC6X09PT82+1AVdXVyYnJ+no6KC/v5/m5mYmJydpbW0lNzcX\ngFdffZX169cTEBBAZ2cnKSkplJWVkZCQgKWlJYmJiUJwGx4ejo+PD3fu3EGr1dLc3CygM9XV1ahU\nKvGQZWRkMHfuXIKCgjh+/DgLFiygp6eH4OBgXF1dSUlJEa7NkZERwsPD6erqYvv27WRmZjI1NUVR\nURFr166loaFBqPDq6+uxsLCgq6uLiooKlEolPT096OnpcejQIbq6usTOcMmSJURGRopEp6GhIW5u\nbmRmZmJiYkJ+fr6YxP0l169ip6CTY/T19WFsbMyiRYsEfFJXKJmcnGRkZAStVsvmzZuJj49naGiI\nuXPnCu+jLrOum5rz9vamqqoKQ0NDOjs7GRoaIi8vj4aGBoKDg5k3bx719fU8fvyYuro6goODmTVr\nFpaWlkLrfffuXSwtLent7RW2IV39Qwc2DQsLY+7cuejr61NVVUVrayvl5eVcvnyZV155hfb2dv74\nxz+KeQLdx8rOzhZyG3iyzVu5ciX79+9Ho9FQUFDAzp07BTw2MTERR0dH6uvree+991Cr1Wi1Wior\nKykrK2Pu3LmMjIxQXFxMXV0dPT09JCUlsWHDBoaHh9m6dSuTk5MkJydTW1uLu7s7f/jDH5BIJHR1\ndTE6OsqpU6eQSqVoNBoBta2qqhJRagsLCzHwMz09LWYeEhMTBeXZ29ubuLg49uzZg6GhIVu2bMHQ\n0JD9+/cTGBhISkoKERERPPfcc3zyySe88sorgpD04MEDFAoFxsbGgsXY2Ngo2p8XL16kuLgYa2tr\nMck6f/58oeC7f/8+dXV1yOVyoqOjycjIAKCqqkrwMgwMDAgMDGTmzJmCmWhvby8Wu7Nnz3L8+HFu\n3bol4sO6o6xWq8XBwUHQnBcvXsz09LTIG3z55ZdUVlaKwnNXVxd9fX28/PLLPPPMM2zduhVfX1+u\nXbsmCtBqtZrbt2+LYBI8yay0t7fz5ptvsnnzZhISEnjttddIS0vDwcGBp556igsXLqDVasnLy+PY\nsWPi/hwdHWXDhg2YmJiwf/9+MVD1S69fxaKgg4GamppSWVnJnTt3sLOz4+WXX0apVCKXy9FoNISG\nhgqU1bfffotMJiMmJoaYmBhkMhmpqamMjY0Jk5OPjw/z58+noaEBAwMDsrKyBPqqoaGB5uZmAcYc\nGxsT9t7x8XEGBwfJz89HX19ftKp0xxJ/f38KCwuxsrIiPDxcTF1evXqVlpYWNm3ahJeXF5999hk+\nPj4MDAywefNm9PX1OX/+PIGBgVRUVAiTk64wef36dVQqFZaWlpiamuLh4SFqIaampoLaq1Qq6e/v\nF9Tjp59+Gj09Pfbv309dXR11dXV4e3ujUqlob2/H09OT3/zmN5w7d46xsTHc3NxwdHSkqqpKUIz2\n7t2Lvb093d3deHl5CdiqTrLS2NjIc889x+rVq+nv7+epp57CxsZGaNPj4uJQq9XMmDGDtLQ0PDw8\nyMzMZMWKFQwPD+Po6IhKpUKlUmFkZIREIuGnn36iurqaiIgIli59IhOLiYlhfHyc+/fvc//+fc6e\nPcv09DR9fX10dnYil8sxNjamra0NS0tLPD09+fnnn5FIJBgaGqLVasUodk5ODgYGTzbDDg4OAv/m\n5OTErFmzRPHYzs6OjIwMzM3NmZiYYMGCBbi5ubFmzRo+++wz8vLy0NPTE+EyFxcXgeWXSqUUFxcz\nPj5OR0cH8IQi3tPTQ0VFBY6Ojmg0GmpqanBwcCArK4uWlhYGBwcZHBwkMDCQP/zhD8TFxVFRUSH4\nD56enmRkZPDOO+9QWVnJ9evXSU9PZ+bMmfzud79j2bJlyOVycZ+4u7uTl5dHWFgYUVFR/PTTTwwO\nDoq05eDg4C9+Hn8Vi4KhoSEeHh5ip+Dm5sbo6CgZGRmcPXsWeAIH+ctf/sL69euFeefMmTNcuXKF\n5uZmbG1tsbKyIjAwUJwrP/roI6RSKTExMajVakHTrampYWxsjObmZkpLS7lx4wYFBQXiDSOVSqmv\nr2dkZARLS0s6Ozs5cuSI2IKFhYWxbds2lEolzc3NSKVSTp48yUsvvcTKlSvFgM7y5csJCwujvLyc\njRs3cv/+fQwNDSkrK8PFxYWamhoWL15MRESE+Li9vb0sWrSIR48eMTk5ibW1NevWrcPb25u0tDQc\nHR1RKBTMnTtXtL7s7e0JDw/HxsaGBQsWEB0djZeXF1u2bGFkZITp6Wlyc3N58803cXd3p7e3l4qK\nChQKBf/6178EB0A3k+/v749Wq8XX15c333yTjo4OgoODKS0t5fbt2xQXF3PkyBHKy8sJDw9HoVBw\n584dJicnxWJ16dIlfv/735OcnIypqSnV1dUUFhZy5swZIV01NjZm9uzZzJgxQ2zzv/zyS/T09Kir\nq8PMzIxDhw5RXl6Ou7u7CD/pFmG1Ws3p06cJCgrCyMiI6upqqqqqRMhtYGBAKPnu3LkjnAv19fU4\nOzsTHh5OSUkJhw4dEji+e/fuoVQqiYqKYsmSJXR3dwNP3twnT56kqqqKf/7zn1y+fBmA48ePk5iY\nSEZGBsHBwUxOTjJz5kySkpL47W9/S0BAAGNjY0xPTws4q7OzM7NnzyYyMlJ0be7du0dNTY3QxkVE\nRPDRRx8xb948PDw8MDc3R6VSYW9vT2NjI48fP8bd3Z0ZM2aIz9HPz4/09HQePXpEZ2cnYWFhPP/8\n86xevVrsIn7J9auoKUxNTQkst271PHjwIBs3buTll18GnrxBXFxc6OrqIj09ncrKShYvXizUbocP\nHyY4OJi+vj4WLVrE3//+dxFnVSqVGBoaij5/fn4+ERERWFhYUFpaioeHB62trSxatIiuri7y8vKY\nN28eDx8+xMHBQRTzdNRlmUzGsWPH6OjoICwsjBMnTpCWlsbIyAijo6Pih7d7925mzpzJ+vXrRQVY\nF5PVTffZ29tz/PhxAOGFbGxsxNjYmIyMDAwMDARk1NPTk6CgIE6fPo29vT319fUsXboUBwcHbt++\njVQq5caNG8TExGBhYcH169dxc3Pj+PHjeHp68vnnn/Pee+8JtoHuQX/06BH29vbcvHmT559/nrS0\nNJqamnByckIikQi6ku7r0qU0a2trGR0d5dNPPyUmJoZ58+Zx5MgRtm7dilQqJS0tjbCwMBEm02q1\nosf+3nvv8ac//YmsrCy++eYbcdMuXboUZ2dnent70Wg0fPzxx3zxxRdUVVURERFBZWUl09PTwp1R\nXl5Ofn4+HR0dSKVSJBKJwOc5OzuL+lJPTw9KpZJDhw7R29vLs88+S0ZGBgsXLhSx9tTUVCIjI7l5\n8yazZ8/G2toaR0dHTpw4QW5uLp6enuTn54t5DHhSzM7Pz2fmzJnU19ezadMmysrKxEtGR34eGxuj\nsbGRoqIifH19GRsbQyKRYG5uzvnz53F1deWZZ54Rhih7e3uuXbvGyMgI/f39yGQy0eY9f/48mzdv\nFp5NnZwoIiJCuFJ2795NSUkJS5Ys4fPPPxdt719y/Sp2CiqVCm9vb5ycnNi6dauYozcwMGD//v0c\nPHiQ4uJiOjo6+Prrr/n2228Fq1HHuN+4cSN9fX0YGBgIQlJERITI5puamorBKh22fGBggLCwMDo7\nO3nuuefo7Ozkzp07mJmZiUKjXC4Xq64uxSaVSjE1NWVwcBBHR0fKy8tRqVQCKVdfX09jYyNOTk5c\nvHhR3HAeHh48fvyY5uZmMQabnZ3No0ePgCd595kzZ2JgYIC+vj7BwcEEBARQXFzMvHnzaGhoEP6E\noqIiwsPDcXBwYN++fbzzzju0tbUxf/58zM3NBdJchypbsGCBmLMwNTVFJpNRXV2NVqsVb8ihoSER\nj66pqcHV1ZUZM2YQEBDAuXPnmD9/Pu3t7TQ3N1NTU4OtrS1ZWVk8//zzuLu7MzQ0xObNm3FwcCAi\nIoLa2lokEgkLFy7EyMhI5DhMTU1ZvHgxarVatIt1mHmlUolEIkGlUpGYmIi/vz+XL1/G3t6en376\niQ8++EBAeFQqFS0tLSiVSqRSqfAgvPrqq3z//fc0NTUJ6vI777wjFuhPP/2U559/Hj09PW7evEli\nYiKjo6P86U9/4tKlSwJl19nZSUdHBydOnODcuXMkJyfT09PDjRs3UKvVAOzdu1cIaHTkpujoaAwM\nDOjs7EStVuPj44O5uTk1NTWYmpoKErharaahoYHQ0FDc3NzIzc2lvLwcgL/97W88evQIhULBqlWr\niI6OFtqDXbt28eDBA95++23RNlUqlXzxxRe0t7fz0UcfkZ2dTW5uLsnJyTg5OYljyS+5fhWLgqWl\nJa2trbS2tiKTycT2WDcRZ2lpyZo1awQ+28bGhh9++AGVSsXhw4dpaWkhOzubp556Cmtra8H7u3//\nvpgmrKqqwsjIiKCgIKKiorCyskKr1SKRSIiNjaWrq0u0g3x8fETXw9DQEKlUKh5IgCNHjgjUeGVl\nJXK5nMTERGJjY0VISalU8uOPPxIbG0t6ejouLi7U1tYyNjbG/fv30Wq14sbQ5f4lEglqtRo3Nze2\nb9/O8uXLcXJyIigoiGXLlrFkyRI6OzspLy9n7dq1JCYmcurUKUZHR1myZAktLS1IJBJCQ0PRarWM\nj48jk8nE5KeFhYVoly5evBhDQ0NUKhWNjY2YmpqydOlSIYnV19cXmravv/4aZ2dnampq8PT0pLGx\nEY1GIzyXOhdjVVUVf/7znzlx4gRqtVpEz4eGhoTwtrOzkwULFnDz5k2Bql+5ciUWFhbAk47Ro0eP\nWLx4MaampsIdmp6eTl1dHQkJCRgZGaGvr09fX58QDA8NDeHv78/Dhw8F5SorK0s4Kj/55BOCg4PJ\nz89n+/btvPHGGxQVFRESEkJeXh4GBgacP3+e+Ph49PT08Pf3x9nZGRcXF0ZGRnjllVcICQlBT08P\nY2NjUWjUdQc6Ozvp7u6mpaWFO3fuoFAoOHHiBPfv38fNzY2HDx/yj3/8Q/hHZTIZs2fP5t69e5iY\nmDA4OEhUVJSogbS2tmJhYcG9e/e4fPmykOzW19dz5coVDh8+THp6OuPj41RXVzNz5kw2b95MREQE\nGzduFAVWnauzuLj4Fz+Pv5rjQ2VlpRC6SiQSiouLiYuLo66uDjs7O3788UdKS0t5//33hRb9woUL\nLF++XAg1BgYGyM7OZnR0FHiCQTMzMxOuv4GBAczMzDA0NKStrQ2NRiPoPU5OTtja2gpIp6+vL5GR\nkUilUrq7uxkcHBRMfltbWxYuXMg333xDXFycaIMZGBgwZ84c5syZg76+PiUlJWRnZ+Po6EhdXZ3A\nmtfW1gp9/IoVK8Rbp66ujtjYWB4/fszRo0dpbGzExsYGqVRKUVERy5cvJyMjgw0bNjA0NMT27dtx\nd3fHwsKC2tpatm7dSm5uLmVlZXh4eAikfXJyMl5eXpiYmIiH7NVXXyU+Ph5PT0/s7OxISUmhv7+f\nrKwsEdnt7+/H1taW//zP/+SFF15ALpdjbW0tvrY//OEPfPDBB3h6enLy5EnkcjmOjo6YmJjw/PPP\n884775CcnIyVlRW/+93vWLVqFfn5+Vy6dEkIbXRSGF1btry8nHXr1gkPiM5YNTo6SmJiInl5edy6\ndYvg4GAxjm1qakpTUxN9fX0sW7aM7OxssUgEBwcDTwatCgoKqKmpwd3dnaysLOLi4jhy5AhBQUHC\no6DziDo5OdHQ0CDe5MuWLePo0aM4OzsjlUr585//zP9h7z2jo7zSNe2rlHPOsZRzFkqggCSyyGBj\ngwm2282hbUO32/a027Hdto/dtnEGZ7CxSQcBJkhCQgKhgEASEspZKqkUSjln1fxQ154+a501zZw5\n6xvPt2avxZ+iJJVK9e732c9z39cNCKGQubk5AwMDDA8PEx0dTUZGBh9//LGoEmJiYiguLsbExAQb\nGxvefvttMjMzMTMzIyEhAUdHRzIyMsT0QZXlOTU1hZOTExMTE3R1dfHQQw8JiXZFRYXoVTz22GPI\nZDLxNwoMDMTU1BRra+t/Vz0/yPpVVApaWlr09/eLzDs9PT3x4Tt27BgXLlxgfn4eqVQqhCGlpaVE\nRkZy//590fTLzMzEx8cHV1dXYFE2rK+vT1RUFH5+ftja2qJUKgVodW5ujvb2diQSCR0dHczOzgpF\noba2trjYS0pKsLGxETbcpqYmUfaqDERubm785je/obW1lcLCQhoaGmhra8PJyQkvLy8aGhqoq6sj\nOzsbW1tbxsfHiY+Pp6ioiJqaGmCx45yamkpnZye9vb2Ulpbi6OiImpqawMDn5+dTUVHB8ePH2bBh\nAy0tLQQEBLBmzRpxkZuammJoaCjCbB9++GF6e3sZGhoSz3/hhRfYuHEjsAjNXbVqFfb29nR1dWFu\nbo6fnx9DQ0Pk5+eLu+fo6Kjw8cfHx3P06FHa2tp4++23RRS66rz85z//WWgllEol3377LW+99ZaY\nHllYWFBeXs727dvZtGmTcPE9//zznDx5ks7OTlasWEFNTY2oSjo7OzE0NMTR0REHBweBR6+pqSE5\nOVkkRBsZGbGwsCBQZbDIilT1nXR0dHB1dUUqleLn58fIyAhbtmyhsrKS2tpannjiCZKTk5mensbF\nxYUvv/ySHTt2UFlZKcjfxsbGwKJLMj8/XwjK7OzsGB8fFyawwcFBwXDU0dHByspKpILp6ekJyld2\ndjZqamriiBoXF8fCwgLNzc08++yztLe3C6Vpb28vU1NTyOVyduzYwVtvvUVQUBDGxsZcu3aNoqIi\n0f9ydnYmJCSEgwcPPvD1+KswRB05cuT1ffv24ezsjEwmo6ysjAMHDuDh4SFK+KmpKdasWUNqaiqe\nnp4CORUfH8/o6ChGRkbY2trS2NiIUqnk6tWr7Ny5E4lEwoULF6ipqWFmZgZTU1Pq6+vp7e0lMjJS\nGGkcHBzo6OjA2dlZwEp6enpobW1FV1cXOzs71NXV+frrr0lKShIW5/z8fBFhPzQ0RFpamhhrBgQE\n0Nvbi5OTEx0dHaJLvHnzZm7fvs2OHTsYGRlhcnKStLQ0li9fTl9fH9HR0ZiYmKCjo4OFhQWmpqaE\nhoYil8vFmG1qaoqQkBC2bNlCfX09Xl5e3LlzR4zmOjo6cHFx4fbt2+jr6+Pi4sJnn32Gqakpzc3N\nAk2n0m4oFAra2tqws7MTm5iGhgaenp5YWlrS0dFBX18fjY2NVFRUoKury+XLl7G3tyc5OVlgwV5+\n+WUkEomY+3d2djIzM0NQUBCvvPKKaIYpFAq2bNnCtWvXRMZGRUUFTk5O2NjYUFhYyOzsrOBdqtD4\n9fX1lJaWMjExga6uLtXV1djZ2WFkZERtbS1Hjx4lICAAAwMDTE1NGRwc5MKFC4SFhaGjo8OGDRsE\n3q23t5e8vDwcHR2FFmRoaIipqSnq6upwdHREV1eXhIQEAgIC0NLSwsjIiNHRUZKSkkRTtb+/X9zR\nVZqOpUuXkp6eLgKJVNFyquSnoaEh7OzsmJqaEkchd3d3Dh8+TGNjo9DbPP300yJBrK2tjZmZGdrb\n29m1axdqamqEh4fj4OBAZ2enqAo8PT0pLS3lySefJC8vj7KyMkxMTPjxxx//7zFEzczMMDAwQE1N\nDV5eXjg5OfHiiy+Snp6Ouro6JiYmuLu7U1FRwQcffICvry+WlpZERUXx888/Y2Jiwo0bN7h06RL+\n/v40NzcDizDQsbExPDw8cHd3x8HBgZmZGTFySk1N5fbt29TV1XHnzh26urooLy+nr6+P1tZWOjo6\nBK9PS0tLdJxVIp979+6xsLCAUqlk2bJlTE5OEhUVhZGREY6OjshkMuzs7IS0dvfu3Tg6OtLb2ytC\nZkJCQoSH/s6dO2zatIkPP/yQkpISDh06JKonVV7Dxo0baWhoIDg4mNHRUT777DOSk5Px8vLCw8OD\nnp4exsfHsbe3RyqV4uHhwdTUFIWFhaxevVqUm/8YumNpaUl0dDRbt24VFVJsbCyBgYFkZ2ezdOlS\nrl+/jomJCYcOHRJ23uTkZEpKSsjLyyMhIYG6ujrOnz/PunXrsLGxQalU8u677/LXv/6Va9euceDA\nAVxcXDA2NmZmZobh4WFhk3/uueeAxclOf38/kZGR5OTkYGJiws6dO/H392fp0qUiVPihhx5i9+7d\nhIeHU1paSnl5OWpqaoSGhuLi4kJ7e7swL8GidkChUHD+/HkGBgYEuj42Npbo6GgSEhJ46aWXxGYU\nHx/PunXrMDAwoKWlhUuXLgkqk+o1Aly+fJl9+/ZhY2MjID/6+vro6ekxMzPD4cOHOXXqFP39/SLk\ntb6+XiSiq1iYwcHBmJubi8+CKoj48OHDSKVSWlpaiIiIICgoiOeffx4DAwO6urrw8fFhdnaW1tZW\n5HK56CUFBwfz8ccfY2Njg1wuFxmVD7J+FZuCSj23Y8cOMZ9X2Xdzc3Npa2sjLS2N6Ohobt++TXFx\nMRMTE2KUd/PmTaanpwkICKCyslLMpouLiwW628nJicHBQRwcHBgdHWV8fJzQ0FAcHR1FiKiqj9DT\n00NxcTFSqRQ7OzsR7qE6l4WGhuLt7U1AQACbNm0iMTGR/v5+ysvLiY6OJiQkRLD/NTU1iY6OZmxs\njLy8PPbu3Ut7ezuenp54e3tz6dIlIV5ycXEhLS1NhMpkZGQIvDssHltUuQlWVlZirKihocHly5ep\nrKxES0uLiIgITp06haGhISEhISKvUoWPt7e35+233+b3v/89Tk5OODg44ObmhqWlJb6+vgKN39nZ\nib6+Pvn5+bz88su0tLTw0ksv4eDgQEhICMPDw7zxxhssWbIEXV1dtm7dytmzZ7l27RpHjhzBy8uL\nGzdu8Pbbb7Nlyxbm5+cxNjYmJCQEqVRKRUUFhw8fxsTEROgUVLi4iYkJduzYQWFhIVeuXGHp0qVU\nVFQwPDzMxYsXUSgUfPPNNxgbG5OUlIS/v784/5eWluLp6UlPT49gYBgYGPDnP/9ZELrv3r2LnZ0d\n586do6amhsbGRl599VWam5uFXuRvf/sbhw4dYvny5bi5uYn8yuzsbHEssba25tixY5w5cwYDAwMc\nHBwYGRmhp6dH0K4mJiaIiori97//vTjCNTY2UlVVxdmzZ/Hx8aG/v5/r16+LiENbW1v09fWJiIjg\n2rVrrFu3TlR6qampXL16lT179tDS0sJf//pX3N3dRaNalf+5Z88e7t69S0dHhxDePcj6VWwKc3Nz\naGtrc+bMGZycnLC3t6esrIz4+Hghd46Li2NycpJz584RFRVFaGgoSqUSPz8/CgsLCQoKwsXFhcDA\nQDGb1tPTE6k5lZWV6Onp0dTUhKenJ8XFxTQ0NBAYGEhZWRkdHR3Y2tri5ubG5OQke/bsobu7m97e\nXhE3p9Kl379/H0dHRxwdHTEzM+P69etIpVJ8fHzIz8+nuLiYffv2CdyXasOIiYkhKysLTU1NpFIp\nJ06cQCaTceTIEQCh4Z+cnGTFihUUFRVx9epVbty4QU5ODnFxcWhpaXHw4EFiY2NpamriwIEDQkOg\nSrfu6elh27ZtdHV1iU3S19eXq1evIpFICAsLY8+ePaxdu5asrCza2tpEg1AVSPLpp58ilUp54okn\n0NfX58qVK/j7++Ps7Iy9vT3Z2dmiOTcwMEBDQwPvvfce6enp6Orq8thjj5GWloZMJqOhoUHQmIuL\ni7lw4QJTU1Po6emRlZWFhYWFqMJUCLOJiQmxCasi0/r7+1myZAkDAwOC0qUyDRUWFgp24ZIlS0R1\nqJrP9/X18fXXX+Pk5MSrr77K/v37SUlJYevWrTg6OmJra0t2drYwdLW3t/Pkk0/y2muv8cknn2Bi\nYsLFixfx9/cnIiKC7777DlicPujq6vLHP/6Ru3fvYmpqyvz8PDk5OVhbW+Pu7o6NjY3IzlAZpzo7\nOxkeHsbDw4P+/n709fWpra2lsbERWEyoevjhh0VQjYaGBocOHSI1NZWBgQHh8D1y5AivvfYaZWVl\njI+PC3GahoYGdnZ27Nmzh40bN+Lj4/PA1+Ovoqfw6aefvh4eHs7rr78uDCiWlpbo6+ujpqaGRCLB\n2NhYwECHhobIzc3l9u3b9Pb2EhoaynfffSe+1sLCgvz8fGGqUSgU2NnZkZWVhYuLC19//TXl5eW4\nurqiUCi4desW1tbWFBQU0N7eLtR+RkZGovGnoaHB+++/T1dXl5j3SyQSzp49S2BgIL29vTQ0NIhy\n/ebNm7i6uooZugr2YWBggEwmE89TYdXS09MJCAjAyMiIP/3pT2RlZeHh4YGrq6uAhJqZmVFdXY2L\ni4uIY6usrGRhYUE4Fvv6+khJScHBwYETJ04wPz+PRCIhJyeH1NRU1q9fL0ZZNjY2lJWV8cgjj9DQ\n0MDo6Ci5ubksWbKE8PBwrl69Sk5ODjk5ObzwwgtoaGiQlZVFYmIiJiYm4iJQKpUCnbd582ZOnTqF\nvr4+DQ0NYnT7zjvv4O7uTktLCzo6OszNzQkH6MDAAG1tbdy/f5/169djb28vvBRnz54V9CkXFxc8\nPT0ZHBzkyJEj2NnZCbR9SUkJv/zyi1AhlpeXi3CdI0eOEK6ukBgAACAASURBVB0dTUREBM3NzUil\nUqE1CA4OFmYmT09Puru7xRGss7MTLy8vZDKZcNIWFRVhbW3N+Pg49+/fJyAggKGhIRobGwUkdXh4\nmNjYWBoaGvj000+xt7fHy8uLH3/8URxLy8vLkUqllJSUYG9vz61bt3j44Yfp7u4mKyuLgIAAcnNz\nBZDF3d2dS5cuER8fz+eff86KFSuYnZ3l5s2bwgj1888/CzJWW1sbZWVlWFlZ8d1336k29v+anoJE\nIvlOIpEoJBJJ5T889rpEIpFLJJKyv/9b+w//9yeJRNIokUjqJBLJqgfZFEZGRqiurmbv3r04ODgQ\nHx/Prl27qK6upq6ujsTERDFKmpqawszMjH/5l38hISGB7OxsmpubherO3NxcdHCrqqp49NFHMTU1\n5dSpU0gkEgYGBvDy8mLp0qXU19cL49P4+Djz8/MimUoVt67qJmdkZPDQQw8BiM61h4cHNjY2wpIc\nHByMXC6nvr4eHR0dbG1tMTY2Fs5NAwMDkVil8t93dXWJ12tiYsLWrVv56quvxJFhdnaWsLAwbty4\nwcmTJ/Hy8mJiYoLU1FTc3NywsrIiISEBpVJJfn4+YWFh3L59m2PHjvHb3/6Wzz//nMnJSaRSKStW\nrODevXuMjo6ir6+Pq6sr8fHx1NbW0tXVRVNTEyEhIfzwww+0t7ezZs0a1qxZQ0pKCteuXcPJyYnc\n3FwBTb19+zZ9fX2Mjo4yPz/P6OgoV65c4fnnnxdWX1tbW6ampoiJiRFxf3V1dSQlJdHc3Cw2MFWG\n4vDwMHZ2dlRUVHDt2jWefvppli5dSm9vL52dnQwODjI3N8fZs2eZmppCV1eXw4cPMzU1xaOPPipI\nU7q6uty7d08oBOPi4hgfH8fBwYGCggL6+/vx8/Ojvr6epqYmOjo6aGhoIC4ujqGhIeFVyMrKor6+\nnpiYGObn51m9ejV+fn5i+qDSrqirq7N27VoBpVVFEb7zzjtMTk5SU1PDs88+i7u7O9u3bxe/76pV\nq0Ra9tmzZ4XYqqysjHXr1iGRSIiOjubw4cOiWsrOzubevXvMz89jZGSEpaUlIyMjBAUFsbCwQGlp\nKSEhIfj4+PD999/z7LPPkpGR8SCXIvBgx4djwOr/4PHDSqUy+O//rgJIJBJfYAfg9/ev+UIikaj/\nsx+gCrCYnp7m3LlztLe3U1RURHNzM2vXrqW4uBhra2tSUlIICgoSdFwTExPOnDlDaGioqCpUd2BY\n9OZfvXqVgoICfH19hfdhfHxcnJ+7u7uZmJggPDwcLy8vpqenycvLE/N0AwMDtLW1cXJyEk6zO3fu\nkJycLAJHHnnkEUxNTTEzM8PQ0JBt27YRGxsr7iwTExOEhobS1NSEgYEBAQEBPPTQQ1RXV7N27VqR\nkuTm5saVK1cICQlh586dAvbS29uLqakpERERaGhoMDk5SVJSEunp6dTV1XH9+nXBXlRh7J966imm\np6d55plnGB8fF2Kv9vZ2Hn/8cU6cOEFxcTFzc3OsW7cOIyMjvLy88PX1JTk5GVNTU5GfqYq4z8zM\n5M033xS2682bN1NZWSmCeu3t7dm8eTNdXV1IJBIKCwtpb28XOYalpaV88MEHQpB27949WlpaWLZs\n2b8jA6mrq2NlZSUk0oaGhoSGhqKurs6ZM2dwdnbmgw8+QE9PDzU1NfH+2NvbExISQm1trZgwqbQl\nqtStsrIyRkZGuHv3LleuXOHHH3/ktddew9zcnMDAQLq7u4Vhy97eHjMzM0JCQjh79iy6urqCjaHa\nFIyNjQkODkYmk6GhoUFJSQmXLl2iuLiYxsZGCgsL2bhxI05OTpw+fZquri5qamq4c+cOenp6VFZW\nUl5ejomJCVKpVPA6V69eTX9/v5iWqZSLH3zwAa2trXh6euLv78/8/DwtLS34+vryySefiH5bUVER\n7u7urFixgpMnTwoC+IOsf7opKJXKXOBBWU4bgVNKpXJaqVS2AI1AxD/7ounpaerr67GwsGDnzp1Y\nWloKV2R1dTUDAwNYW1sjkUjo6uri5Zdfpq2tjdnZWb766isUCgW2trZoaWmhUCiE2mz58uUCSOHl\n5UVFRQW5ubn09vZy7949YHHykZKSIv648/PzREdHs3nzZqRSKRKJRCgfe3p6gEXH3fnz53F1dcXZ\n2Zn333+fxsZGhoeHiYmJEQnTJSUlgsrU29vL7t278fX1paWlhaqqKlavXs0HH3wgehW1tbU4Ozsz\nPz8vEqhUsNSlS5fS0dGBQqGgpqaG3t5eDA0N2bx5M42Njairq/PUU09x6tQpbGxsUFdXF0IaVaqT\nyjhUWVnJiRMn6OvrE6PF1tZWwsPD+dvf/kZ5eTnvvvsuGRkZol/R0dHB9u3bhWx5bm6O6upq3nnn\nnX+XRnTmzBlRru7cuRMDAwOmp6cpKSkRuhE1NTX++Mc/Ctn322+/LTZy1Z1aTU2NtrY2nJ2duXv3\nLj4+PsTHxxMVFYWnpyd79uzh1q1btLS08OGHHxIYGMjAwIAI442JieE3v/mNCBxWhQ2r7ux2dnZ4\nenry1FNPCaeoXC4XzdW33noLqVSKhYUFoaGhPP3000xNTYkUapUQTWX20tbW5vbt24SEhKCpqUlC\nQoKYCDzxxBOkpaXh5eXFihUrsLGx4emnn0ZPT4+dO3cSGBgoHKajo6Pi9Y2NjSGTySgsLMTf31/0\n1FpbW7GyssLMzIyUlBSkUimZmZl8+eWX/OEPfxC+ne7ubm7cuIGtra2A2DzI+t9pND4jkUju//14\nYfr3x+yB9n94TsffH/ufrqmpKdauXUtsbCxyuZy6ujoUCgXa2tp4e3szNTXFwsICJSUlwrk4ODgo\nwlJU7rPOzk4KCwuFzFlNTU1kLd68eRNdXV0h+42MjERPT0/Mn2UyGd9//z2WlpaC/gSLstv4+Hic\nnJzw9PQEFkdKenp6WFpa0tLSwubNm9HQ0GBgYIDc3Fy0tbVpbW0VvYjt27eLCUN3dzdlZWV88cUX\nAonW2dkJLLrczp8/T3JysvB/qL5PWVmZYCmOj48L6OxPP/2EVCrFxcVFeOeNjIzIzMykpKQEXV1d\nBgcHOXbsGJOTk7i7uzMwMMDnn3/O/v37kUgkODs7i0xFVQ/kpZdeYuPGjUxOTmJiYkJMTAxzc3Nk\nZ2cLYrKK/OPp6YmPjw/Ozs6Eh4eLxpexsTG//e1v2bt3L2vWrGH37t0sW7aM6upqkaqsCnpRNRpl\nMhkLCws0Njaybds2AgICBOxVLpczPz+PpaUlx48fp66ujoqKCo4dO8bo6CiOjo6oq6uzZs0apH8P\niVVBdKampoTb0MTEhBUrVnD37l16e3sJDw/n3r17FBcXMz8/z+TkJN988w0dHR3ExcWxZcsW7Ozs\nWLduHStWrCAgIABvb29g8Qaxfv161qxZQ3t7O+Xl5ezcuRMTExPMzMyEUS0hIYE1a9YQGhqKq6sr\nhoaGBAYGUlRUxMaNG2ltbUUmkwk5cnBwMJGRkdy5c4clS5YI7cu9e/dEvshPP/3EunXrRPPRw8MD\nU1NTLl68yIYNGygtLWVubo64uDix2TzI+s9uCkcAVyAY6AI++F/9BhKJ5CmJRFIskUiKx8bGGB0d\nJS0tjWXLliGVSjE1NcXLy4uAgAChX29ra+PChQscOHBAUI6Li4v/HeB15cqVogpQKQXj4uIwNjZm\ndnaWlJQUhoeHBUo+IyODK1euYGVlhb+/PxMTEygUCry9venv7xfswlWrVpGeng4sVheq5p5cLqen\np4ctW7YQGBhIcnIyK1aswM7OjhdffBFjY2POnTvH+Pi44ChKpVJhAbeysmLv3r3i93vllVfIycmh\nurqakZERbG1t2bZtm0Ci9fb2iiRq1aajCtBV2Wqbm5txcXEhNjaW1tZW0U2vr6/HxcWFvLw8pqam\nyMrKoqmpiczMTJ5//nmUSqVwZD7xxBNUVVUJ3cH8/DwHDx7khRdeEDzMJ554glu3bnHz5k0iIiLI\nzMzE2dkZa2trcTy7f/++gMyEhITw1ltvUVhYSHV1NTExMUxMTODq6ios8vb29piYmODq6srMzIzw\neszNzYkq4NatW6xfv5433ngDb29vPD09aWtrIyAggEceeUQwFVQBOYAI4FUdI/71X/+V4uJiFhYW\nRAL11NQUs7OzgtvQ2dnJt99+KxB7KoSeyjwGi5vC8PAwp0+fRl9fHwcHByoqKsjLyyMrK4vdu3fz\n2GOPAYvHzs8++4yrV6/S2dnJiy++yMDAgKgqVU5KgKysLO7du8fu3btxcnJCX1+f5uZmbty4QWVl\nJUePHhXUpoceekhUs7q6ulhaWnL37l00NTVJSUkRJLAHXf+pTUGpVPYolcp5pVK5AHzN/zgiyAHH\nf3iqw98f+4++x1dKpTJcqVSG/yN/r7q6GlNTU/r6+rh8+TJpaWloaGgQFhaGhoYGQUFBtLW1UVxc\nzFdffUVFRYVILlLlKejp6QGLd141NTVSU1OZnJwUXEcVkELF//vHpmJDQwPq6uqkp6ezZcsW1NXV\n2bx5s0BbwaK0VQUvVZ3FFQoFBQUFXL9+ndTUVNTV1cnJyUFTUxMbGxsBKLW3t0cul9PS0sLMzAyh\noaEioEZDQ4PBwUHMzMzYsGEDubm5jI+PMzMzg6amJoWFhaSkpJCYmMiuXbt4/fXXWbFihWBVOjk5\n8Ze//AUdHR1h8EpLS6O2tpbS0lLGx8dJTU1lfn4ec3NzpFIpzz//PKtXr+bLL7+ktLSUK1eu4Onp\nydatW2lra+PQoUN0d3czMjLCN998Q2dnJ4GBgdja2vLdd9/h5OTEm2++yaVLl3BycuLChQt8/fXX\nDA0NYWtri4+PDzU1Nfzyyy989NFH1NfXs2rVKgICAgRroLa2locfflj1uWB0dBR/f39sbW0pKSnB\nz88Pf39/Dh8+jL+/P9ra2hgYGNDY2EhkZCSXL18mLCxMZBtMTk7y0UcfoampKTabu3fvkpGRIejM\nqvdRhfVTHaWuXr0qwmS9vLzo6enh+PHjvPjii/z8889MTU1hZWUlLt6XX35ZxMWp8iGGh4dFuLG+\nvj5GRkbcv3+fzs5ODAwMxMW6YcMGRkdHeemll6iqqqK/v18cfdetW4ejoyNlZWXo6ekxPj5OUlIS\niYmJvPjii5w5c4YLFy6Iz0tkZCQff/wxlZWVdHd3c/r0adrb28VxWVWJPcj6T20KEonkH7edzYBq\nMvELsEMikWhLJBIXwAO488++3+zsrIgTy8/Px83NDRcXF5FGbGVlRWZmJiYmJuKut2nTJg4cOMD2\n7dsxMTERIzpVwhQsphyphCy+vr4i3uv27dvI5XJWrlwpKDnp6ekiA6K7uxsbGxvhiR8aGsLS0lKo\nzdrb23FxcREI+du3b6OlpYW3tzdFRUU8+uijxMXF0d/fT3p6OiYmJiQlJYmLqqKiQghK3n//fdFo\nTEhIEFRplbRaS0uLixcvcvjwYbS0tMjNzeWrr74iLS0NpVLJ0qVL2bp1K4GBgeTl5ZGRkYFcLqe0\ntJTBwUHCwsKwsbFh9erV7N+/nzVr1vDb3/4Wb29vdHR0qKysxMjIiMDAQFHqz87Ocv/+fdzc3IiP\nj6e+vh4tLS0yMjLw8/Ojt7eXL774QmQeqiob1cRlZGSE4OBgFAoFCwsLAlmvr6/Pzz//TGxsLJaW\nlri7u+Pn50doaKg4Xp09e5bR0VEaGxvR1NRER0cHmUxGTk4Orq6uHDlyhKmpKVpbW4mMjOTUqVOC\nv/DTTz+Rn58v/j4qzBwsipcGBwdFlZSQkAAsTr4KCgqwtrYW8Jv+/n5MTEy4ffs2Tz/9NGZmZjg6\nOgqrfXJysrh4f//739PY2Mjjjz/OyMgIvr6+bNmyhYaGBhITE5FIJOJ9kMlklJeX09TUhFwuJykp\nic7OTvLz81m1ahXNzc1CPn/06FFqa2sZGBjg3LlzBAYGiqqrra2N8+fP09bWxsqVK9HS0mLp0qWE\nhISgra3NypUr8fT0FKrHsLAwTE1NedD1ICPJk0Ah4CWRSDokEskTwHsSiaRCIpHcB5YDvwdQKpVV\nwBmgGkgHfqdUKuf/2c+YmZlBR0dHcANVMViq2CwNDQ00NDQYGxsTiLKKigrxoVd1bLu6uujr6+Pm\nzZvA4vRhcnKSvr4+NDU1hdglISGB7du3Y2dnx8DAAImJiYJe7ObmJhxmTU1NvPTSS4LIrFpKpZLQ\n0FDc3NwwMDCgtbWVwcFBvvvuO5qbm1EqlWhoaAi7tMqd6evry8zMDLt27cLU1JTr16/z7LPPik1B\ndbcyMDDAysqKoaEhrKysmJiY4NChQ7S0tNDf34+xsTGfffYZs7OzyGQy5HI5AwMDPPnkk6xcuRJf\nX1/09PSEicbExESQp1WQmZCQEExNTdHU1KSxsRFfX1+Kioro7OzE1dVVgGD7+/spKSkhNzcXDw8P\n5ubm8PHxwcLCgiNHjhAeHo5UKiUoKEiQg1Xv3fT0NOXl5Rw4cABLS0tcXFzw9/cnOjqa7du3i1Hs\nrVu3RJbk8uXLRWRgQUEBMzMzRERECC6En58fXl5eGBgYMDQ0hLW1NT4+PlhbWyOXyyksLKS1tZWg\noCBeffVVoRBU+TqefvppqqurhcfhwoULVFdXExQUxNq1a/H39+f69eu88sorTE5O0tvbKzYwFVz2\n66+/5uLFi8DikcDd3Z333nuPhYUF5ufnBUv03/7t38jNzcXd3R2JREJ6ejqVlZV4enpSWFjIZ599\nhpOTE1KplOHhYQICAnBzcwMWjWFtbW3Mzc1hYGDA8ePHuXjxIqdPn8ba2prMzEz6+/v54YcfmJiY\nYGBgQExcVPQyPT09IiIiqK2t/V/iKfxT67RSqXzkP3j42//J898C3nrgVwAifm1oaIg9e/Zw8+ZN\nwsLC6O7uJiQkhJGREXR1dYmLi6O0tJSGhgZhqy0pKaGvrw8nJyesra25efMmwcHBqtdCXFwceXl5\nQsRkaGgoshOzsrLQ09Ojs7MTf39/4WkvKirCycmJmZkZJBIJJSUltLS0cPDgQd577z0iIiL44IMP\nGB0dxdvbmyVLloiGY0dHB48//jgxMTE89dRTWFlZAYv8xaGhIaKiopiensbCwoKtW7eiUChEJ/vC\nhQvo6uqyfv16urq62LdvH0NDQxgaGrJnzx7eeecdbGxscHV1pbu7m/Pnz7N7926USiW2trZUVVVx\n+/ZtmpqacHd3F5CY8vJyenp6RGdf1Sf48MMPMTMzIywsjOeeew43NzfGxsYIDw+nqqqKyMhI7t27\nJy7I48ePs2rVKqF7aG9vJzU1FV9fX/z9/VFTU0MulzM3NycIzsPDw/T391NXV8fdu3fFnfXatWsU\nFBSI3oOJiQk5OTl0dHRgYWEh6FNqamrcvn1boOcCAgKor68X8NuYmBiGh4f58ssvcXNzE2YrKysr\nYmNjhVvwwIEDnDt3jtraWmJjY7GxseHrr79GS0uLubk5TExMGB4eZu/evYKv0dTUhEQiobKykp07\nd/LTTz8JG7PKpCSVSpmfnxeeFkdHR7Kysujv76e5uRk/Pz8RH6BCq6nyKPPy8hgZGeGpp54S6sW0\ntDQArl27hr+/P1evXkVXV1dobFasWMHU1BSPPfYYfX19JCYmMj4+joeHh9DHqHQny5cvFzkYxcXF\nD3w9/ipkzqoQ0wMHDvDxxx8THR1Nf3+/AG9cunRJ0GxUPL/ly5dz7tw5IViyt7enqKiIrVu3it1W\n9QddsmQJ3d3ddHZ2oq6ujpubG2ZmZmKUNz4+Tm9vrwjmXLduHWZmZjg5OdHW1kZwcDD79u3jyy+/\nBBYRXD4+PkJfYWpqipubGyEhIWRkZIjcwuHhYSwsLBgfH8fd3Z1nnnmG0dFRhoeHWb58OXl5echk\nMpE8NT4+TnR0NOXl5cIkpqWlhb+/P/v27aOnp4e5uTkOHz7M2NgYc3NznD9/nqmpKZqamlizZg0J\nCQno6emhqalJUlISfX192NvbEx0dzQsvvMCbb77JkiVLePTRR0lISGDt2rWMjY2JcVhcXBxhYWG4\nuroikUh45plniImJ4e7duxw6dIje3l4UCgWZmZkolUrWrl0rfmZZWZnAphUVFVFQUMDk5CSrVq0S\nJOqOjg4uXLhAYWEhNTU1KBQKenp6RCW2sLCAubk5/v7+DAwMEBgYKHIyVePX1atXs2PHDhwcHNDX\n1xeZFbOzs/T19aGvr4+jo6PgYwCkp6fj5uYmEp0MDQ1xcHCgra2NxMREfvjhB0JDQ/n2228pKSmh\nsbGRr7/+mjt37mBubk5ERAS9vb3IZDI0NTVFf0k1nraxscHb2xupVEp4eDiurq5oa2ujUChoaGhg\nw4YNWFlZ0dTUxODgIGVlZbz00kvExcVhYWFBdXU1R48eZevWrQAMDQ3R2dlJSEgIkZGRZGdnMzo6\nikQiITs7m4GBAfbv3y84o6q+1OzsLO3t7SQmJpKVlcXPP/+MjY0N+/fvf+Dr8VexKUxOTvK3v/0N\nmUzGvn376OvrY3JykunpacEjUCgUtLa2Ul5ezpYtW2hubsbS0pLw8HAmJibIzs4WSb0qx52WlhZR\nUVEMDg6SmJiIv78/oaGhRERE4O/vT0JCAqmpqaxZs0ag0OLj41FXV2d0dJSWlhays7MFSkt1LlNF\nw61cuRJTU1PRvZ6fn8fKyopHHnmEjz76iKGhIQ4dOsTCwgK2trbY29sTEBBAbW0thoaGBAcHExcX\nh7a2NgDJycno6uri4+ODVCqls7OTr776ivHxcWxtbRkeHubMmTMoFAqKioqwtbUlKChIROLl5ORg\nZmZGbGwsFRUV/PLLLzQ1NdHc3ExDQwNvv/02+/bto76+noSEBFxdXcVZdOvWrWKjuHfvHps2bQIW\nK5yBgQF27drF8ePHaWlpYW5ujtDQUJYvX055eTlHjx4VsFwHBwfs7OxwcXHBzMwMZ2dnurq68PX1\n5cCBA8LBl5GRIZgUQ0NDrF69qI9T3WGvX78upifNzc0CEhMZGcnBgwdJS0vj8uXLIptRQ0MDuVxO\nYGAgBgYGaGho4O3tLZqPXV1dglNhY2ODoaEh0dHRODk5cfLkSe7evSvgLTk5ORw7dozNmzejUChE\neKu7uzujo6PMzMwgly/2z93c3Pjzn/9MbW0t8fHx4nGJRMKePXuEeczS0pKxsTEyMjJoaGjAwsJC\naA2uXbtGX18fmzZtoqOjA1jsLxUWFjI9PS3yHTZu3IiFhYWQUqekpPDJJ59QWVnJn/70J1xdXTE1\nNSU2NhaFQiGaqCdOnBC2/QdZv4pNQU1NjZMnT3Lz5k0UCgVpaWkYGBgQGhqKp6cntra2SKVSQdMp\nLS0VgbNOTk54e3sTERGBjo4O33//PU8//TTwP3DhcrlcaMFVXMK//OUvtLe3C8qPiij9008/8fnn\nnzM0NCRYjIODgywsLIgwmPz8fDo7OwWwRVWu1dfXi+zEgoICPv30U5GR4OXlxRtvvEFCQgIODg5k\nZWVx/vx5+vv7BRpc5QGQyWRUVFQgl8vZuHGj8EpMT0+zsLBAT08PlZWV3L9/n++//57c3FyCg4Mx\nMDCgvr5exK65uLhQW1uLlZUVOTk5yGQympub8fT0ZMmSJairq3Py5Em++eYbpqensbOzw9TUFBMT\nE8bGxoiIiCA9PZ2FhQXGxsZwd3cX7j0VOeqxxx7DwMAAOzs7oYNQKpX09PSgUChED6SsrIyKigqC\ng4Pp6OjA09MTLy8vfHx8UFdXx8HBAYCgoCD6+vqIi4tjYmICd3d3IiMj+eWXX3jqqadQKBTcuHGD\nrKws+vr6OHr0KDk5ORgZGYksxZGREUpKSpidnRWp06oYu/7+fuzt7dHU1MTNzY3g4GDa2tpwcXER\nQTFPPfWUwPaZm5vzu9/9jnfeeQelUslzzz2HhoaG2MhPnTpFTk4OK1eu5De/+Q3Lli0TCHlHR0dW\nrVrFqlWrCAkJwcbGhvfff5+DBw9ib2/P8ePHmZ6eZsuWLSLHUoUEnJ2d5cUXX0RXV5fAwEBhllJX\nV2fDhg0sLCxw5coVnnvuOeLi4ti2bRtLly4VPNKtW7eSk5ODpaUl27dv59y5cw9+Pf7XXNb/e0s1\nU5ZIJPT09ODv709HRwfZ2dmcOXNGRJ2Zm5uzfPnyf5eYdOfOHXE3Cw4OFlBWWHSwTU9Ps2nTJtzd\n3ZmfnxcqM1W33NDQUMzW4+PjcXFxQV9fn5qaGnx9fQVq3sTEhJUrVwKLF6+fn5/AvM3OztLY2Mhb\nb71FZWUlk5OTbN68WdBzLl++zKuvvsrY2Bgff/wxQ0NDLCwsiN8rLCwMgGXLluHk5ISrqyv6+voM\nDg4Kgc7Zs2f56aefGBsbE25KPT09EhMTUVNTo7W1laqqKrq7u5HJZLi7u6Ojo0NhYSHz8/Okp6fT\n2tqKo6MjJSUlvPLKKzQ2NqJQKIiOjiY2NpabN2+KCLfTp09TWVkpnKcqq/aOHTswMzNDU1MTmUwm\nLn5PT0/BE7C0tCQyMpKOjg6MjIyoqamhpKSEoqIiPvnkE6qqqkTE2cDAAAUFBQKZXlVVRUhICC0t\nLeTm5lJZWYmpqSk6OjqYm5uTmprKu+++S1hYGNu3b0epVLJp0yZWrFiBjo6OQJ4PDw9z4cIF4Q5U\ncS18fX1pamoiPz+fGzduEB4eztatW1mxYgXJyckkJCRgY2PD66+/ztKlS3n33XdxcnIiJiaG0dFR\nvvrqK4aHh0UDUy6XY2try9DQEH/961+5c+cOx44dY2hoiLGxMb755hsRz9fY2ChIz3p6egwNDTEz\nMyOEWh0dHULZ2dPTQ15engDMpKWlcfDgQbZv3y4CeH/55Rfc3NwwNjZGQ0ODTZs2cenSJTG+fv/9\n9ykoKCAlJUV8xh5k/So2hampKZYtW4aJiQmDg4N0dnayceNGpqenGR0dRS6X8/HHHzMyMkJtba2g\nHdfU1IiyPjQ0lPLyctauXStyFLq7u5FKpXz77bd0PVBw5wAAIABJREFUd3cTGRmJVCplZmaGwMBA\nJiYmKCwsZGZmRnAUVOEuNjY23Llzh76+PoEjUwlhQkJC+OKLL5ibm8PQ0JC4uDjU1dVZtmwZYWFh\nDAwMiOBSVQdYNbNXNcdUwBdNTU1xNzt79qzAe2VmZlJZWcmtW7eQy+W0t7cLgIibmxsrV67E29ub\nlpYWoqKiiI+Px8DAQHAe8vPzkUqlvPTSS+zevZs33ngDuVxOSUkJMTExPProo1y9ehUdHR1yc3N5\n7bXXmJ6extLSUnS1i4uL+eKLLygtLaWgoACZTMYbb7whko77+/upqqpi3bp1pKens3btWkGKMjAw\nEAIvFS+hvr5eeFc6OjqEF2X16tWiIWhubs69e/cwNTXloYceYu3atZSVlSGTyTA0NBRjX2NjY4aG\nhpBIJExOTgpOQWdnpwCseHp6iuNDfn4+Hh4ejI6O8vjjj3P//n2efPJJkpOTefjhh0lOTmZgYIC6\nujqmp6extbVFLpezadMmYdjT1NREQ0ND9ABgMRhILpcLE5eamhpOTk5Cp6KaLGlqalJXV4evr6/g\nOKxfvx5XV1fWr18vjHSZmZkAgry1Y8cOrKysuHfvHubm5oKbmZOTg0KhYG5ujsbGRs6dO8eyZctw\nc3NDqVQKGrrqtd6580+VAWL9KqzTH3744eu6urpCC9/S0oKenh5yuZze3l7MzMzo7e3F19dXINnV\n1dWJiYnB2tqaDRs2CCGHtrY209PTpKamsmTJEk6fPi2aXCUlJVhbW1NbW8u2bdsoKiqivLwcOzs7\nDA0N6e3t5dq1a8zMzKClpYWamhrDw8Pcvn1bTEcyMjJ44oknCA8Pp6mpibVr12JjYyOovD/99BPm\n5ub8/PPPWFlZERUVxYYNG7C2tqayslKMR42MjLh79y4uLi6oq6tz9uxZNm/eLAJVpFIp/v7+TE9P\nY2BgQHx8PI6OjoyMjLBs2TJGR0fFdGVhYYHU1FRGR0f5/PPPUSqVREdH09zczOjoqPAELF26FE1N\nTfr6+ti1axe9vb1s2LCB+Ph4jI2N2bFjBxkZGWhoaBATE8PAwABJSUloamrS2dmJjY2NQL0tLCww\nPDyMmZkZLS0t7Ny5k+7ubrKzszl27JgIWz1z5gzu7u6Ehobi7+/P7OwsMTEx6Onp4enpSWVlpeje\n37p1iz/84Q/Y2NhgZWWFhYWF6O2Ym5uLfpDq+LBq1SpWrFghRp/d3d3o6enxzDPPoKenR0pKCgqF\ngsuXL2NgYICTkxOTk5OYmpoyPDyMn58fo6Oj3Lx5k/b2dvT19bl3755o+qmgKWFhYXR1dRETE8PM\nzAyOjo4cO3ZMkLDGx8eZm5tDXV2d2tpaAUFxcXFhenqawcFB6uvrWbp0qYi1W7FiBRKJhLy8PG7d\nuoWRkREeHh64uLhw+fJl1q9fLxKk4+PjiY6OZmFhQVSmxsbG/PLLL8I3o62tjaGhIU8++SQVFRVM\nTU2JWEXVBvNfZp3+/2JpaWlx6NAhdHR0RPBqWVmZcDaqgmfb2to4efIkzs7OlJaWcu7cORQKBdXV\n1SwsLAjqr6qbb2xsjK+vL7GxscCi6CgvL0+IdhQKBW5ubmzatInAwEACAwPR1tYWQRwuLi5ERUUJ\nHNw/lmCzs7MYGRlx6dIlXn75ZZydnQXevaqqCg0NDaysrET3WZUrqNLD29ra4uHhQUNDA9bW1sBi\nyailpcXy5cvx8vIS8M/ExER0dHQoLS0lODhYUImjoqJobm5GTU0NHR0d7ty5wwsvvCDizFURcebm\n5gIcq6+vT1JSEqWlpWzYsIGmpiYUCgU7d+7k+PHjwnjk5ubG6dOnGRgY4MKFC6irqwtdgOpuHxgY\nSEJCAjExMbz22mvo6+tz6dIl4WwdHx9n3759AjNWUVEhYKhbtmzB3t4eT09PkpKShHhJNbGxtLRk\nYmICY2Nj4uLiePrpp1myZAn9/f0oFAr27dsnnJULCwusWrUKNTU1lEolly5doqCggDNnziCTyQAI\nDw9nfn6eJ598kvLyciIiIjhz5gxDQ0PMzs7S0dHBl19+iaWlJXp6ekRHRwuC9/T0NElJSUxMTFBV\nVSU2ZljEvN26dYuKigqB75+dnSU4OJjh4WGWLl3K2rVrCQgIQF9fX4BwfvjhB65du4a5uTlBQUH8\n/PPPpKen096+aB26f/8+Q0NDyOVyxsbGBEymtLRUfG5XrlyJg4MDDQ0NLCwsEBoaKnoTUVFRrF+/\nnh9//BG5XC6O1A+yfhWVwueff/76pUuXcHFxoaOjg4CAAPr6+sROaWJiwtmzZ7G0tCQuLo4rV66I\nTIG9e/dy//595ufniYqKYm5ujhs3bnDnzh1SUlJwdHTk2rVrAspqZGQkjhBRUVGC7lteXk5dXR3N\nzc1oa2sTHh4uLlIVT9HExITz589z8OBBqqurRez4li1bOHbsGMHBwWhpaZGZmSnYfgEBAchkMjHa\nsre3Z2BggK6uLhISElhYWKCzs5O0tDQ++ugjRkdHmZ6eFopAFZRWX18fiUTCzp07GR4eJikpSdjL\nVYSjlJQUcnNzcXV1JTg4mLCwMCECUoWjzszMsGzZMo4ePSrozaqQ1fn5eVFiqyjMFRUVREZG4uLi\nIhyfqlBUPT097t69K4Jlp6am2LZtG5s3b2ZkZEREz3V3d4ssQ29vbxobG9HQ0OD8+fM4OTmJtOW8\nvDz8/PyYnp7mxx9/ZHR0VJiY9PT0mJqaQkNDQ0ynvL29MTc359y5cxgbGws24tDQENPT0+zdu5fp\n6WnOnj1LaGiomDJ5enpSXV3N5OSkwJ/r6OiwZMkSCgsLWbp0KTExMbS1tbFmzRoGBgYwMDAQxxLV\nqLWqqoqZmRk2b94smBLR0dHMzs4K+Mzq1avp6+tjenqadevWMTAwwNDQEPX19SxfvpyRkRECAgJE\nfqiVlRVpaWm8+OKLNDU14efnR3Z2NkqlEjMzM6RSKV1dXVhYWKCtrc39+/fx8fERQUA3b95ES0uL\nkpISbt26RVRUFPb29lRVVZGTk/NAlcKvYlN44403Xn/vvffQ0tISZg4jIyPGxsZYsmQJubm5JCcn\nC/mokZERdXV1PPvss3z33Xc0NTWJFGQ3NzcaGxspKioiKCgILy8vETtvY2PDkiVL6Orqws/PT5iZ\nVLPnhIQE0tLScHBwEOEpfX194sw4PT0tUn9VY8KBgQHMzMwwMzMjIiICU1NTkpKSRCWQnp6Op6en\nIO6oPjijo6O8/PLL+Pj4oKOjI8xgWlpaGBsb895771FXVyfUcaqUZBXvobKykqGhIczMzPD19RV9\nCXd3dwYHB4mPj0dTU5OKigqCgoJwcHDgzp07ApX/6KOPCnJ2ZmYmvb29JCcnc+zYMSoqKujq6qKg\noAClUom1tTUXL17E1dWVtrY2fHx8qK6uxsLCgvv372NsbMyjjz7K1NQUK1euRKFQEB4eztjYmLjT\neXh4cPHiRaGvyM3NFbZoOzs7hoeHKSoq4sCBA9TX1yORSFBTUxMNZhUKv7u7Gy0tLeLi4jhx4oQA\nqarO40ZGRszPzzM/P8/Vq1cZGBggPz+f2NhYsrOzqampEbj66elp7ty5w8qVK8nOzsbS0lLQk0pK\nShgbG6OlpYWOjg7Onz+PtrY2s7OzREZG8uWXXwp1alVVFaampoIo7uXlJaCs+vr6+Pv709rayl/+\n8hfm5uZISkqiqakJb29vkpOTRXjMV199RWxsLL/88gsODg7cu3dPyPT7+vro7u5GQ0MDHR0dzp07\nx61bt2hvb2dqago7OzuKioq4ePEiFhYWuLi4cObMGVavXk1NTQ1NTU0UFxf/33N8UAkybt26RVNT\nE+Pj4+jr66NUKklLSyMlJUXcLVxdXblx4wbLli2jvb2dhx9+WKjRjI2NGR8fZ/PmzcBiKdrY2IiW\nlhZ+fn7CbKMyDKmrqzM5OUl/fz+1tbWcOXNGgFJUFURoaChJSUl4enoSGRkpXrOZmRkmJiZ4eHiI\nEI8zZ85QVFTE999/T1VVFfb29kRERDAwMMCqVav43e9+x65du7C0tERdXZ0nn3xSYOlhseGqakht\n27YNZ2dnARpNT0/H2NiY06dPi/fHz89P6Dcef/xxBgYGUCqVWFpaijGqasR55coVcSeXy+W8+eab\n3Lp1i5KSEsLDw9m/fz8NDQ1oampiamqKo6MjiYmJYhT6/PPPMzIywsqVKykuLiYmJgaZTCZEXJ99\n9hlaWloimPe7775DQ0OD9vZ2pFIp5eXlBAYGIpPJCAkJERh4Nzc3RkdHxZFPKpUKKXVMTAz9/f1c\nuXJFJC2ZmJjw+OOPI5PJMDAwIDg4WOR8Ojs709TUJCpBFX8BFhWCUqmU/fv3C4Ds2NgYRkZG7N69\nG01NTXJycoQ7VqlUEhMTI35GYmIiV69eJSwsjLKyMv7whz8Ai41RX19fAgIC8PT05Pz581RXV/P2\n229z7tw5pqenaWlpoa2tDRMTE4yNjfnwww+FsjYzM1MoN5944gnhZtTV1UVdXZ1vv/0WqVTK9u3b\naWpqIj09HX19fc6cOUN9fT3btm0T+RZ+fn6sWbMGDw8PMjMzeeyxx0R2p2rU+SDrV7Ep2NrasmzZ\nMiIjI3F3d8fc3Jzo6GgGBwdZtmwZXl5ezM3NcerUKbKzs7GxsWFqaor+/n4++eQTwQjo7u7m0qVL\npKamAghgS0FBAXfv3sXBwYHZ2VkBTm1sbMTc3BylUolEIhEwUUtLS5qbm7GwsBC+hX+cIe/fv5/6\n+nquXr1KUFCQCClViUdUvgpdXV3U1Bbf4rNnz5Kenk5tbS3T09OEhoaKvoOZmRmwmBxVWVnJzMwM\nP/74Iz4+Ptja2or06JycHMrKygSdWQVZWbJkCXV1daSkpFBfX4+dnR2VlZVcuHCBxx57jLm5OXR0\ndEhISBAVkCpLY/Xq1bi6uvLQQw+JKHZLS0sBR1VRqW7cuCHYhqtXr6arqws7Ozva2tro6OjA2tqa\nwcFBTpw4QWtrq4hfVwWvurm5MTc3h1wux8rKio0bN6KpqSk2A5XoR3UmnpmZobu7m9TUVCwtLbGw\nsOCFF16goKCAEydOsLCwwPLly/n4448JCgpi27ZtLCwsEBkZiaOjI7t27aK2tlbY7vfu3cvq1at5\n7733xJ1z8+bNNDQ0kJGRQUhICMbGxlhYWLB8+XICAwOxtLSkp6eH6upqWlpaUCgU6OjoIJfLhS3f\nw8ODgIAAbt++jUKhoLOzU1QLqoAWVcCsnp4eeXl5JCYminF3VVUVMplM0MBUFZ+9vT0+Pj6cO3eO\nnJwc/tt/+2+imfnmm2+SnJzMmjVrALCwsBDcEUdHR3788UfCwsKYmJigtLRUwG0edElU3cv/k0si\nkfyffxH/b/2/9f//VaJUKsP/2ZN+FVmSlpaWHDx4EFdXV1xdXSkqKmJmZkYo4WQymWgqGRsb88c/\n/pHDhw9ja2tLbm6uGOG5urpy9epVzMzM+PTTT9m4caPAfz3//PPIZDK++eYbEhISqK+v5+zZszz8\n8MOYmpry7bffCvT4xMQEMpmMkydPsnHjRqysrGhoaEBbW5s//elP7NixAxsbG3bt2sWFCxewsbEh\nJiaGmzdviibW6Ogozs7O5ObmEhkZiUKhICgoiMOHD/Mv//IvNP539t47qOoz7f9/Hdqh9w6HotKb\nICgIUpRgQbGFiD0mMTFxd93d7GaTmN1kjTHFFN2sZpNYotHEGiVWUBREUCnSe++998OB8/3DnHt2\nvzPf5/H3x28mz8zzmTkDHtoI53Pf93Vd7/frXV1NZGQkKSkpmJqa8rvf/Y7jx49jamrKN998w/Ll\ny9HQ0MDExITbt2/z1ltvcfjwYUGgNjMzQyKRCIOUClK6cOFCOjo6qKmpERQlVUlw6dIlYTFXKBRc\nvnyZyMhISktL8fLyYsGCBVy+fJmoqChmzJghQnVcXV25ePGiSO7eunUrR48eFSO7gYEBKioqxLTD\nzc0Ne3t7fv75ZywtLZHL5cjlcqRSKQ0NDcycOZN58+Zx//59cepTcQzff/99wsPDOX/+PIODg1hY\nWBAXF0dlZaUgRKuIWVVVVTx8+JDY2Fj8/f3JyMhAqVSKHIWffvqJV199leDgYBITE2lvbyclJYVl\ny5bR29srZOvXrl3Dw8OD+Ph4hoeHaWtrQ11dnZqaGjw8PNDS0hIUp4KCAsLCwpBKpaxYsYLy8nIh\nihsYGCAgIIDExETB0CwpKUEmk+Hv78+ZM2c4fvw4S5cu5ccff+STTz7hueeeY+/evYK/kJeXx1tv\nvcX7778v3K1yuZxZs2aRkZGBpqYmdnZ2jIyMiCZ3bW0tLS0tzJo1C0dHRwoLC3FwcKCwsFCAie/f\nv88HHzydT/FXUT6o5t3W1tZ89NFHJCcno6WlJV70vr6+bN68mYcPHwpOXUlJCXV1dSxfvhx9fX30\n9fXFPP7+/fvAkyOYtrY2ERERlJaWcu/ePZYsWUJycjLBwcEcOHCA2NhYGhsbee+991iyZAknTpwg\nJSUFS0tLWlpa6OnpEQIZFYBUFSijUhA6OjoyMDDAkiVL8PPzw8/Pj8WLFzM9Pc26devEDDoyMpLo\n6GjCw8Nxc3Pj5MmTuLu7CydlTk4OFRUVBAQEkJaWhru7O5mZmcTHxwuCkpeXF/X19ejq6nLt2jUy\nMjK4fPky3333HcuWLaOurk6QfVtbWykrK6OwsJAvv/ySoKAgmpqaxGTk5ZdfpqKiggMHDojItnfe\neYehoSEaGhoYGxtjfHycpKQkbGxsWLRoEW1tbdy4cYO+vj5cXFyYnJxEoVCQkJCAoaEhdXV1XL16\nFW9vbzw8PGhubqa4uBiZTIaZmRmrVq0iOjqa77//HmNjYw4fPoyRkRHnzp0DnqhQz58/z86dO3n1\n1VcFHObx48fk5OQwODgoRtQ+Pj6Cbr13714RDfDDDz8gkUhISEhAW1tbfF9zc3M8PDxEnJy2tjaz\nZ88WnMSUlBTgiapSU1OTJUuWkJubKyYVfX19WFtbk5eXJ+z5H374IZmZmYJFeefOHfz8/EhOTqas\nrAwfHx9KSkrIysoiPDycV199lRUrVnDkyBFmzpxJRUUFHh4eeHl5cebMGTHqrK2tRS6X8/bbbws7\ntCoeb2BggOnpaYaGhnj06BGlpaX4+PiQl5fHo0ePMDc3F2rImzdvcv78eZGD+jTXr2JRUCkSFQoF\nnp6eBAYGYmxsTGlpKVKplNHRUW7cuCHGg99//z3Lly/n5s2bXLp06T8So4OCgnjjjTcAWLNmDVKp\nlJCQEO7cuUN4eDilpaWCdqQax23btg0XFxdeeeUVNmzYgKOjIxkZGRgaGhIUFMT169fR0NAQkJXi\n4mKkUqnAxRcVFfHuu+9y9epVjhw5gpaWFv/85z+ZO3cuFRUVlJaW4urqytGjR0W+ZFFREUFBQWIk\nB0+ShlU48AULFpCamkp9fT0XLlxAqVQSGRlJeno6c+fOpbW1lfj4ePLz81m1ahUvvPACfX19wuY8\nPDyMlpYWBgYGyOVy1q9fz4kTJ9DX18fKyoo5c+YQGhrK3LlzRZ5Afn4+ycnJzJ49G6lUSltbG97e\n3mhraxMcHExJSQl///vfuXTpEi+//DISiYTs7GzxViaTCZOXCoSiqalJYGAgDx48wMLCAg8PD37+\n+WcWLVqEmpoau3fvZmhoiJCQEAB27tzJmjVreO6558jKyuK9994TwcG1tbXEx8eTmJhIbm4ud+/e\nRVtbm6mpKZYuXSooRKtXrxYhPCqXpIqYpKmpyeTkJDKZTMiHs7OzKSwspLKyksnJSWbMmIG9vT0p\nKSkYGhqiUCgYGxsT/09A9Cqee+45AgIC+Oqrr9i/fz86Ojo8fPiQ0dFRFAoFVlZWrFy5EhcXF2E6\nMzY2Ri6XC/aCyvthbW0tXgtSqRRbW1u2b9/O9u3b+eKLLxgYGEAmkzFv3jy0tbUxNjbGx8cHpVLJ\nl19+iYmJCa2trQwPD+Pj44OdnR3BwcGC7fm0169iUdDV1eXIkSMkJSWhq6vLwMCAkJRmZmZibm6O\nn58fu3fvpqioSKjUVNhvb29vdHR0cHd3p7W1FR8fH+CJtHV4eFjcxKpO76JFiwgICCA/Px+FQsHM\nmTMZHx9n+/btpKWlYWpqipqaGqtXr2bfvn2sWrUKExMTAao4cOAAc+fOZdWqVaxcuVJ0xC0sLDA0\nNEQul2NpacnNmzeRSqWCZmRkZIS2tjba2tpClfnxxx+zfPly4IkN19ramhdffJGRkRHhA4mLiyMz\nM5MZM2YQFBQkGlmlpaXMmjWLhIQEhoeHyc3NxcjICAcHB3p7e+ns7MTOzk5Y0F1dXRkcHGRychIL\nCwsxVlVlH9TW1tLU1MRnn33G9evXMTQ05NGjRxQWFpKbm0tTUxMHDx7k7bff5osvviAqKorp6WlK\nSkoEkGXnzp0sXryYqakppFIpHh4ehIaGCt1FcXEx9vb2jI2NoampSU5ODkqlUnAM9fX1USgUhISE\nCKL3vHnzBNFalR6uMqhlZ2cL16nKFOfg4EB/f79A5METbPzDhw+Ry+W0tbUxMTEhIgMGBgaYnJxk\n8+bNPH78mPLycv72t7+J4B0Vbn5qaop33nkHZ2dnoSTMzMzkp59+EoYkVXP5woULzJo1i9zcXOET\n+eMf/0hXVxfh4eFs3LgRPz8/wSTt6OggIiJCTB+MjIyoqKgQIqvAwECSkpLEAmVhYSEYFrq6uuze\nvVtQzZuamrCxsaG1tRWZTMbJkydFM/tprl/FojA0NIS5uTmenp4YGhpiampKZmYmk5OTbNy4EV1d\nXXR0dMjNzSUvLw91dXXRFb98+TLV1dVUVlaSnp4uItrgSRDs5OQkpaWlBAYGsmLFChYtWiQSolet\nWiX4DF9++SWFhYXCVGVjY8Pjx49xcHBAoVAI+zU88bqPjo4KVZq7uzteXl5kZGRgY2NDZmYmw8PD\n9PT00NraKrIxvby8uHHjBlKplN27d1NbW8vzzz8vVHcXLlxg9uzZaGhoUFVVhYWFhfDgq+Ct5ubm\non+iimOLjIxkaGiIdevWoaenJ8ZfoaGh1NfXC7Crl5cXvr6+dHZ2oqmpSXJyMvn5+WzcuJElS5Zw\n9epVNDQ02Lx5M/7+/lRXVxMbG8vixYsFwUcqlfL111/z+uuvk5SUhKamJr6+vvj5+QHw1ltvkZ6e\nzvnz51mwYAErV65kcnKSkZERSkpKmJiYwM7OjpkzZ7J48WL09fV59OgRGzduBGDPnj10dXVhZ2dH\nYWEhhYWFKJVKgoOD0dDQ4MyZM7i7u3Pv3j3s7e3p6+tjamoKExMTEhIS8PLyEroQlacFEKeDwMBA\n6urq6OvrY2xsjNbWVlHiKRQKgoKC0NbWxtTUFG1tbZEz2dzcTHNzM0lJSeTm5iKTPUGRKpVKFi1a\nJEJzh4aGhNRaNS1wdXVlenoaU1NTent76ejoENmipqamREZG4ubmJk478CT3ITs7m46ODsbGxjhx\n4gTPP/+8oD6rktGuXbvGqVOneOONN5g9e7ZgRBw9epSBgQFR5qmUkk9z/SoWBZVuoLm5mblz5/La\na68RFxeHrq4up0+f5sUXX8Td3Z3Ozk7mzp3Ls88+S319PampqUIHMHfuXEJCQigqKhKorJSUFJYu\nXYqmpiZWVlY4OTkREBDA7NmzMTc3p7y8nO7ubqqqqggJCWHv3r0EBwdz/vx55s2bx6NHjzAwMCAh\nIYHW1lYRBvP6669TVFSEoaEhUqmUmTNn0t/fj4uLC6GhoWJ2rSIGaWhocPbsWQDh3aivr8fNzU24\nCOHJqDMtLY3c3FyB45LL5airqzM+Pi7EXVNTU4yOjhIZGcnY2Bhr164Vp6Pc3FyRmakymOXk5ODs\n7My9e/d4/PgxHh4eVFZWMjw8zPDwMLW1tSgUCl599VVGRkYwMTGht7eXbdu2ceDAAUEbjouLIyAg\ngLCwMAoLCxkZGaGxsVGEoKpyCK5evYpUKuW1114T4iQ3Nzd27txJSkqKsHiPjY3R1taGkZGRWHCd\nnJwYHBwUEuGpqSmqq6u5deuWyGhU5UauXbuWV155BblcLngFTU1NtLS0YGpqSkhIiKj9Kysr6e7u\nJjU1FQsLC8Fg8PDw4PLlyzQ1NZGdnc309DTR0dGEhIRgaWmJo6OjML3Z2tqirq7Ojh07RFmi2rnP\nnj1LVFQUbW1t9PX1IZPJWLlyJVu2bOHYsWPMnDkTPT09BgYGMDAwYHh4mPz8fA4cOMDt27dRV1cn\nOTlZYOm2bdvGG2+8IXgdO3fupLCwkGXLlglxlKurK3/84x/ZsmULr732Gjdv3uTevXtoaDyZH6iy\nIrq7uwXq/mmuX4Wi8a9//et7Xl5eWFtbY2FhQW5uLkNDQzQ3N9PX10d7ezuZmZlkZmby+uuvI5FI\n2LRpE52dnVhYWHDhwgVcXFwoLS1l+/btNDU1kZGRQWBgIDExMRgbG6Onp8fVq1cpLS3lvffeY2ho\nCC0tLdauXUtdXR1VVVUsXboUCwsL5syZw8WLF0VdvmTJEqGcu3LlCuvWrcPDwwN7e3tCQkK4cuUK\nJiYmREZGigaplpYWgYGBZGRkcO7cOZ599lm0tbUFHMbc3Bw7OzvU1dXp6enh6tWrzJ8/n4qKClGL\nJiYmYmlpyeLFiykuLiYxMVHw/3V0dBgfH+fw4cPiNHP48GERsRcQECDKpYGBATo6OggMDEQulwsP\nw8GDB/H19RUI8+7ubhYsWICVlRUPHjzgxo0bPPPMM4KwJJFIcHd3R09PD2NjYwoKCnjmmWeETXh0\ndFT0RSIjI/n+++8pLi4mMzOT8fFxFAoFo6OjZGVlYWpqyuDgIN7e3ty+fZva2lry8vLE30+1e6uy\nJbZs2SISwiMjI8WNPTk5SVBQEF9++SVvvPEG9fX1BAcHCwv41NQUt27dYteuXejo6GBiYoJcLheL\nQXd3N4aGhpibmwuz2OTkJK6urlhZWZGeno7hMFe5AAAgAElEQVS3tzcPHjxgfHycuro6YYN++PCh\nKPlsbGw4ceIEc+fOZWhoiNmzZzM5OUl6ejrj4+OCJNXd3U13dzeVlZUsX76chIQE5HI5r7zyClZW\nVmzatIlDhw7x2muvkZiYyKpVqygtLUUmk2FnZ8eDBw/o7+9HKpWSkpLCsWPH8PT0pLOzE0NDQ5yd\nnTE0NKS4uJiwsDDu3btHWloaL730EmfOnPmfo2iUSqUsWrSIoaEhvvvuOy5evMjg4CCdnZ0sX76c\nsbEx5HI5rq6u/OMf/+DChQvk5uYKS7GGhgYymUzkGqrqUyMjIzo7O7l27Rpz5syhurqaqakp5s2b\nR0NDA+rq6uTn53P69GnheTh58iS3b9/m2LFjDA8PY2trK5iHTk5OAPzmN7+hoaEBCwsLqqur6e3t\n5fr160xOTgpFZlJSkoDEBAQEkJSUxPT0NFpaWpSVlZGYmEhPTw9paWmC0Xj37l0hNEpLSxOd65yc\nHPT09FAoFERERAhSb0FBgci/aG9vZ9u2bSLpqbS0lLVr19LR0UF7eztBQUFoaGjg4+NDTU0Nurq6\nvPnmm0xOTvLBBx+IBllWVhY9PT38/e9/x9PTU3AEW1paGBsbo6ioiMrKSmG0UfVaKisrCQoKorCw\nEIlEwq1bt7hx4wYRERE8++yzWFpaMjg4yKZNm1BXV8fJyYnw8HABNlVhyAICAoiIiODs2bNoaWlx\n9+5dbG1tqa+vp6ioiOzsbD7++GNSU1M5fPgwTk5OaGlpMW/ePBISEjA2NubQoUMMDw/T3t4uqM0q\n7J7KyHbu3DkKCwt59OgRJiYmNDU1oVAo6O/vp6SkhNHRUXHKunjxonAhenp64uzsLEpUHx8fGhoa\nqKmpERDbgIAALl++zK5duwRa7/Tp02RlZeHl5UVgYCBmZmZkZ2fz+eef8+677/LJJ5/g5eUlEshr\na2vZsmULbW1trFixgqCgINLT0zl16hRlZWX8+OOPon9mamoqFm0VCXz+/Pl0dXUxf/583nzzTYFr\ne5rrV7EoqEZMXl5eLFq0iEOHDqGpqck777yDsbEx4eHhtLe3CyWioaEhampq+Pn5YWpqysKFC2lp\naWFgYIBvvvlGUGZee+01SkpK8Pb25rPPPsPY2JimpiY0NTWRyWSi4x4fHy/ivFT4sPnz5+Ps7ExV\nVRX+/v68+OKLHDp0CHgy116xYgX19fUYGRlhYmJCREQEjY2N9PX1sWvXLmxtbdHW1qa1tVWw9iQS\nCVNTUzx+/Jju7m6qq6uZP38+NTU1wBMPfVxcHGZmZjx8+JCKigpiY2Pp6+v7jywLMzMzNDU1CQkJ\nISQkBH9/fyH3XrNmDdPT01RUVPDuu++yePFijh8/Tk9PDwsXLqShoQErKyv09fUJCgpi9erV4iQV\nGxvLhg0byM7OJjMzEy0tLWJjY6mvryczM5OJiQmMjY3p6OjAw8MDNzc39uzZQ0NDA/b29sIbsWDB\nAq5evcqHH36IjY0N9vb29Pf3Y2xsTGpqqsj4ULkV/z0m/caNGyI7UgXAaWtrw8HBAQsLC2EM2rp1\n639YrhUKBTKZjM7OTl588UWMjIxwcnISpZmOjg4tLS0UFxcLD8rs2bMxMDBAT09PcCpUjtqrV6/i\n5+fH+vXrBSbe19dX+ElUM39zc3NCQ0NFw1JXV5ff/e536OjoEBsbK/Q1S5cuxdPTk/LyclJTU9HR\n0aG2tpbFixfz6aefoqenx+PHj0UZpa+vz40bNygsLKS0tJTDhw8zb9483n77bebOncvY2BhSqZSO\njg7OnDnDJ598QnR0NHFxccTGxrJ161ZRHt+9e1dwGp7m+lWUD1988cV7CoVCRGqfPn2a0tJS+vr6\neOaZZ6iqquKnn37i7t27IotgaGiIoaEh1NXVmZqaEmac7u5uli1bxrFjx1iwYAG3bt3i2WefxdnZ\nmY6ODpGV4OjoyP79++nr62PTpk0MDQ0RFhaGoaGhGOccO3aM6elpHj16RFdXF1euXGFsbIwNGzbQ\n0dEhjq/l5eWCeuzi4sK6detYt24daWlp3LlzBzMzM+7duyfqZDs7O6RSKbW1tbi7u6NQKLh+/Tov\nvPACZWVlpKSksHHjRrS0tLCxsWHfvn3o6ekRExNDe3s7n3/+OWpqaiQnJ4t+jJaWFmFhYcjlcmpr\na9HT08PHx4dbt25x7949PDw8yMrKwsbGhkuXLhEaGoqvry/q6uqEh4ezfv16li5diomJCY6Ojqxf\nv14YkdTV1YmOjuYf//gHPT095OTkMHPmTMzMzPD398fY2JglS5ZQX1/PypUr6ezspLa2ltHRUZqa\nmpg5cyZ1dXXY2NiQmppKbGysCFbt7+9HW1ub9PR0cnJy8PPzExg4qVQquAMeHh7cuXMHmUzGhg0b\n+OGHH4TLNDAwEAcHBxYvXiySvlWW9bGxMe7evYu9vb1A7y9btkyQilSgXHd3dxobG9HS0mJ4eFiw\nOmNjYwkLC6OrqwsjIyPu3r1LZGQkFy5c4PHjxzz//PNIpVIMDQ0xMTHhgw8+YPfu3dja2mJqakpH\nRwd+fn709fWJv3t9fT0BAQHY2toye/Zs8vLy+Pbbb7G2thbN9PLycjw9PcWo3tnZGTs7O+zt7Skr\nK0NfX5/BwUF2795NVlYWZWVlHD16VGRMqMpLNTU1goODuXr1Ku3t7f9zyge5XI67uzsxMTGMj49j\nbGzMvHnzxC758OFDHBwcCA8PJzAwEF1dXTw8PBgZGRFNl5CQENHYU8XGDQ0NsWzZMk6ePCl2B1Va\nT1dXF++88w6lpaV88cUX3Llzh4KCAiYmJhgfH8fQ0JD9+/fj5ubG9u3bCQwMRLWAqiCx586dE7ue\nm5sbOjo6oplUU1NDXV2dAKPY2Njg7OzMw4cPuXHjBl1dXWI8ODExATyZlsCTTvnWrVspLi7m5s2b\nREZGEhUVRVBQEBkZGQQEBGBnZycyAVTOw9OnT9PT00NLSwuzZ8+mpKSExYsXs2PHDiYmJkQ5EBcX\nR1FREdevX+fbb78VOpEDBw7Q2toqzEt1dXVERkaiqanJ+fPnRRK2t7c3QUFBAuSh0h5MTU1RVFTE\n4OCgwJi5uLigra0tPBq+vr7U1NTQ0NDAnj176OnpEQ1HeIKke/311xkbGyMqKkpkWzz33HMEBwfz\n0ksvsW3bNiwsLARrUV1dncuXL1NeXo6JiQnV1dUC+a8a96qYBhYWFlRWVjIxMcGxY8cE2ero0aMU\nFRVx6dIlmpubkUgkXLlyhTfeeIP169fT2tpKZWUls2fPRkdHh127dgHQ1dUluBrFxcXMmzcPLy8v\nsUhJpVKBh1O5Wi0tLZmensbDw4Nbt25x/Phx1q9fj4GBgTipLFy4kJCQEHp7ezl37hyurq5kZ2cL\n12xPTw/q6up8++23VFRUEBQUJEKRHB0d6e3txcPDg7a2NsbGxtizZ89T34+/ipPC3r1731OlQFlb\nW1NYWEh0dLRIbVZXV0cqlSKTycSqXVBQwPXr1+nu7sbV1RVTU1Nyc3ORSqUoFAru3r3LG2+8QVJS\nEurq6oI6NDk5iYGBAWNjYyIvQUNDAzMzM9TV1SkoKEBdXV0o+nJzc0VOYFhYGKdPnyY+Ph5dXV0e\nPHggHH1GRkb4+PigqanJ4OAgHR0dAjW+a9cu1NTUUFdXp7Ozk97eXgYHB3n++ecFG+DatWuMjo7i\n7OyMXC5n0aJFrFmzBgsLCxYuXCgAL7W1tQwODlJRUSESodXU1MjPz2diYkJ0th0cHJg7dy5NTU08\nfPgQU1NTrl+/jrGxMXV1daipqTE6OkpdXR2mpqbk5+fj4+ODv78/DQ0NlJWV0dvbi7m5OVNTU+jo\n6GBkZMTExASurq5UVFSIlO8PP/wQY2NjsUsFBATwww8/sHTpUoqLi7ly5YpgHObn59PZ2YmGhgYp\nKSlCaKOurs69e/dYt24dNjY2XL16VZCeVA7Z+fPnc+TIEcLDwxkbG8PFxQW5XM6+ffvo6+sjMzMT\nmUzGxMQEiYmJ2NrakpaWxoMHD4iJiUGhUAj786xZs0Q/amJigo0bN+Ls7Ex3dzdLly7l2rVrdHZ2\nsnnzZvbu3cuMGTOwtbWlsrJSsCIuXryIpaUlhoaGwtgVEhKCTCbjxIkTDA4OsmTJEhobG2loaGDj\nxo24urqira1NTk4ON27cYMmSJbi4uNDX18f8+fP5+uuvqaysZNu2bXh5eYlUKGdnZy5evEhTU5Ow\n3ltbW2NnZ4e/vz86OjoMDw8zOTnJ119/zfj4OKGhobzwwgtcvnwZW1tbzp49+z+Hp3D48OH3YmJi\ncHZ2FtFk9+/fZ8WKFUxPT/POO++Ql5dHTEwMJSUlAkyipqZGYGAgIyMjdHd3Mz09LW6a/Px8kWoc\nFxeHn58fJ06cQE1NTQSzXrhwATMzMzH1UKVQqZRrqkZOaGgohoaGtLS0kJqayoEDB1BXV0ehUAiA\nhq+vL8XFxezbtw+ZTCYabatXr2ZqagqJRCIalapkaBcXF+7cuYOWlhY3b97kyy+/5M6dO2zcuJGc\nnBxsbGxQV1ensbGRnJwcysvLmTt3rsB7Dw0N8cwzzxAREcH4+Li4wcbHx5HL5dy7d09MbtauXSte\nWDKZDFtbW1paWnBzc8Pf31+4/1Rp0BMTE6xfvx4dHR3eeustfvOb3zA2NiZEV8PDw4SEhODl5cXs\n2bNpa2sjLy+P0dFRMbH54IMPsLa2JioqitHRUa5du8bAwADFxcWEhISgra0thFWmpqZcuXKFWbNm\nCSWeqakpV69eZe3atUxPT9PX18fOnTuRSCTU1dWRkZFBXFycGE2ryreWlhaMjY0JDg6mp6eH5ORk\nEhISuH37Nm1tbQQGBqKurk59fb1AqjU1NeHi4oKXlxfff/89a9asEWAVS0tLGhsbCQ0NxdjYmICA\nAFpbW7l+/Tp/+ctfqKqqor29HV9fX/Ly8jh48CB2dnbk5eVRVFTE8PAwL774IlVVVXz44Yf09/fj\n6ekpXjOBgYGcPXtWnBrz8vIwMDAgJSUFHx8flixZwqZNmwgJCaGzs5P169czMjJCfn4+SqVSYANk\nMpngQ6jEYWNjY1RVVfHZZ58xMDDwP6d86OzspKysDC8vL6ysrDA3Nxe/xNbWVtzd3dHR0SEkJIRZ\ns2bR2trK1atX0dbWZmRkBC0tLR49ekR3dzcKhYJnnnkGeIKOVzH6v/zySzZs2EBtbS2dnZ20tbUR\nHx/Phg0b0NTU5NixY3R2dopIMkNDQ3bv3o2np6cI3KioqACeAGGLi4vF7qUSsixZsoTt27ezf/9+\nxsfHmTFjBklJSeTl5QkQqFQqJSoqSlClNmzYgK+vL/CEOGxsbMyNGzfw9PTExsZGxMIZGRnh5+fH\n0NAQzs7O5Ofns3LlSvz8/BgeHqayspKVK1cyNDREU1MTVlZWWFpaolAo2L9/v8Dfr1mzhnnz5qGr\nq4unpyfW1tbU1tbS3t7O6Ogop06dQqFQsGvXLubPn8+CBQvw8/MTce96enoYGBiIhfmjjz6ipqYG\nExMT7OzseP7558nLy2P//v2Mjo6ioaHB+Pi4mJ7cu3eP1atXMz09TV1dHSYmJoJADU+i85KSkjA0\nNGRoaIht27aRm5tLSkoKo6OjAhJrZmYmLMtubm7o6uqyZMkS0RtycnLi+++/F41GVW7n4OAgIyMj\nZGRkoKenh6OjI+rq6ri7u5Oens6xY8eQSqWUlZXR2toq1I+qkaGPjw9Xr14VKLiUlBTs7e1xcXHB\n29ub+fPno6GhIRbu4OBgfHx8BFA2ODhYeHX8/f2ZmJhAoVAIQndcXBwAq1ev5s9//jMmJiaCaVFT\nU4OFhQUnT55EW1ublStXUltby44dOwTdysnJia6uLl577TU0NTUFxm3FihVPfT/+KhaFGTNmoKOj\nw8GDB5kzZw43btwgPz+f1NRUUTtHRUVRXV2NkZERBgYG4mjZ29uLQqEgPj4eBwcHQkNDBbREFQE2\nNTVFeHi4OM5ZWVmxbds21q9fz9DQEDY2Nrz88su0tLTg5OREQkICq1evZnx8nKamJgwMDNi4caOQ\nilpZWaGmpkZAQABTU1PU1dWhqakpjuWrV68WcNapqSlCQ0N58OABlpaWZGRkMDExQXd3N8XFxZw6\ndYrCwkIArl+/joGBAcuWLWPbtm14eHjg7u4usi+npqYoKSnhu+++Y2RkhJMnT3L58mWam5vR09Pj\nwoULmJqasnbtWkpLSwVsNT09ncLCQjZv3szQ0BCTk5NERETg7++PlZUVQ0NDKBQKsrKyaG9vJzg4\nmKGhIYaHhzl//jxz5szBysqK6elpPD09UVNTIz4+nurqalxdXYmKikJNTY24uDiuXbvGt99+S3V1\nNe+88w62trbMmTMHLS0tZDIZAQEBoswKDAwU3EiV5n/OnDl4enoKsZZK8v6b3/yG4eFhjh8/jpmZ\nGQ0NDZiamnL79m3u3r2Lh4cHTU1NmJqaUlZWxt69e1EoFPzpT38CnuRVHjx4EG9vb3Fq6+zsJCgo\niJycHMzMzBgfHxcKwObmZpGzYWdnh7W1Nfb29jQ2NrJ9+3bh1di6dSv9/f2iR/Tdd9/x5z//GW9v\nb4aHh/Hy8mLWrFmUlZWxc+dOTE1N0dfXJzs7m48++oiAgAD09fVpbW1lenoauVwOPAnFSU1NpbKy\nkk8//ZT6+noRFdjX10deXp445ahyMUtKSmhqahKGM9WCNj4+/h8Tnv/u+lUsCm1tbRgYGCCTyVBT\nU+Orr77CwcFBsALt7OwIDw/HwcEBT09PRkZGmDt3Lt9//70YTx48eJD09HQCAgK4ffs2gLC1AmIe\nfOnSJQwNDWlqamJoaAgjIyMKCgpwcHDA2NiYrq4uLl++LFBk8fHxokOseiGojDc9PT1oa2sTHh6O\nvb09mpqapKamMjg4yLZt2zA3N8fHx4dvv/0WuVxOZWUlrq6uQpkZGRkp4ucBwsPDsbKyEolPOTk5\npKenU19fz+XLl7Gzs6O1tZWXXnoJPT09bt26xZ07d8jMzCQiIkKoPgcHB4WdvKmpCaVSKXbH+fPn\nExAQQFdXF9euXaOxsRFfX1+2bNnC3r17aWxsJDU1lfHxcUpLS2lubmZgYAA3Nzfmz5+Puro6RUVF\nHD9+nKmpKZ555hm6u7tFw1elNPTx8eHatWuEhIRQV1dHXl4ew8PDwg6s2iG9vLyoqanB2toaeNJs\nlUqluLq6cujQIS5dukRnZyf37t3D1dWVvXv3Cin8nTt3kEqlxMXFMXv2bGGRn5iYIDw8nOeee06w\nIQcHB1EoFCLMeGRkBB0dHWpqatDX16elpQVdXV3s7OxITU1FIpEQERGBi4sLc+bMoa6ujvLycurr\n68nIyCA3NxdAkJhVI8WgoCCsrKzEYt7U1MRXX30luJnp6enid66trU1/fz979uxhdHQUd3d3kSVp\naGiIhoaGiAh48OABLS0tLF26lO+++47MzExxIq2trcXZ2ZnY2Fj27dtHa2srTk5Owgs0b9488vPz\nn/p+/FUsCnp6esJC/Nlnn5GcnMz4+DgLFizg3LlzFBcXM3/+fL799lvMzc2JjIxkYGCA119/XTTu\nNm/ejIuLC+Xl5SKKXkUc7urqIiEhgRkzZrBw4UKUSiU6OjoMDAwgl8uxs7MTCVG3b9/G1taWmJgY\nnJycSExMJCsri5SUFMEXePjwIY6OjshkMvT19amuriYxMZHa2lqRKqXKcjQzM6OlpYWJiQmR3uPn\n50d+fj4lJSVIpVLhG1BlH2RlZaGmpoaWlhbx8fHk5eUxNjbGW2+9RVVVFe+++y4bNmwgKipKCIFU\nuZaq+re5uZmCggK0tbVFDR4WFkZLSws6Ojro6Ojg7OyMn58fjo6ODA8Pi2RuTU1NDh8+zKlTp6io\nqCAiIoKgoCAeP35MQUEBERER7Nixg5ycHJKTk4mJiRHUKaVSybZt21AoFExMTFBSUkJXVxe5ubli\nelBcXExkZCTe3t6iYRYY+IT9IZFIGBwc5PTp07z11lvo6enh4OCAnZ2dMPq0tbUJzPyiRYsEEHV6\nepqXX36ZgYEBvL29aW1t5fz588CTndfc3JyEhASam5sJCwvDxMQEHR0dLl68iIeHB35+fsTHxyOR\nSHB0dBTBNkVFRWJR6+rqwsvLSxC1VP2p3t5eIXe+f/8+mZmZ1NTU8PPPP6Onp0d3dzcxMTFUVlaS\nlpaGmpoazz77LLa2trzwwgts2rSJnp4e3nzzTeAJa1OhUNDT00NjYyNtbW3U19eTm5vL4OAg09PT\nQs5sYmJCWVkZw8PDvPLKK8Khqaury8yZM0V84NNeTxNFL5NIJHclEkmpRCIpkUgku3553lQikdyS\nSCRVv7w1+beveUsikVRLJJIKiUSy+L/7GRoaGgJKokouqqqqws/PD319febOncuNGzcEfltNTY13\n330XMzMzbG1tgSfjx56eHmbMmCEUgnl5eWRnZ9Pb28u//vUvNDQ0SE9Pp7y8nJ07d5KVlSUSpgsK\nCujp6WHr1q0YGRnh7e0tiMsJCQnCvQmIHkdJSQllZWX4+vpiZWVFSkoKwcHBhIeHY2RkRGlpKZOT\nkzg7O7Nu3TqioqLQ1dWlpKSEwMBAlEol2dnZmJubA09GncbGxuTm5uLp6YmHhwfOzs7s2bOHqKgo\n1q5dK0JLf//73yOTyfjtb38rwk83b95McnIyDg4OzJ49W0w9NDU18fHxIT8/n8HBQfr7+/H396ex\nsRELCwtKS0vFeG58fJy//vWv4sWelZWFq6srJ06cEKO62tpa6urqMDIyQiKRUF9fj4aGBoODg2J0\np/LwP3z4kLi4OD799FMR1Lpjxw7q6+u5d+8eenp6+Pv7/4e4Znh4mGeffVZ8vrm5OSkpKeTk5ODo\n6MiyZctwdHQkODiYe/fuMTU1hVwu5/79++jo6BATE0NGRgYZGRnEx8cDTxqZMplM5FOcOnUKGxsb\nzMzM2LJlC48ePRI+FVXWqIuLC/r6+lhaWorG7sjICD///DMKhQKAmpoaAQO2t7dHS0uLgIAAFi9e\nLKLhLS0t6ejo4OLFi7S1tWFqaoqtrS2jo6PY29vT3NxMTU0N6enpHD9+HIC0tDS8vLwwNjbG2dlZ\nTMlCQkKYmJhg7ty5HDlyBDs7O8rKyoREXiX26urqIi0tDUNDQzw8PMRJ7GmupyEvKYDXlUrlY4lE\nYgDkSiSSW8DzQIpSqfxIIpG8CbwJ/EUikXgCCYAXYAvclkgkrkql8v9JeRgcHKS5uZnc3Fw++OAD\nOjo6WL58OVpaWhgaGqKuro6mpialpaXs2LFDkHlkMhk9PT3I5XKioqKQyWSUlZWxbds2QTGqr6/H\nz88Pc3NzcnJy+PTTT0lNTRVZE4GBgbi7uxMQEEBvby/19fVkZWUxf/58mpqaGB4eRi6XExkZKXgK\n/f39pKWl0dHRwYcffkhJSQnq6uqsW7cOdXV16urqOHv2LDdu3CAqKgonJycmJiawtrbGxMSEgwcP\n4uTkRG9vr3D6qV64KktvamqqqH9VuZYGBgbExMQQHBzMlStXCAoKIjc3FxMTE3Jzc7l//z7h4eGU\nl5dTVlbGgwcP8Pf3Fw0tlf04JydHyKBVI0IVBNTCwgJHR0eUSiUVFRXs3r2bpqYmgoKCqK+vx8TE\nhPr6eqqrq1m8eDEODg5UVVVha2vLBx98IMJkV61aRX9/PyYmJlRVVXH27FmkUim6urrk5uaycOFC\nvvjiC1588UXOnj1LdHQ0x44dIzo6mps3b5KWloalpSX6+vocP36cOXPm8NxzzwkjW3h4OA0NDWhp\naXHkyBGmpqZ49dVXUSgU5OXloaWlJURGAFu2bOHHH3/E09MTmUxGTU0Nn3zyiXAzdnd34+HhIQhZ\nly5dYtOmTcKivWzZMuRyObGxsRQUFIgo+rVr11JRUUF3dzeOjo74+vpy8eJFli9fjpeXF87OzlRW\nVooRtgpgI5PJOHfuHGFhYdy9e5dnnnmG0NBQ0VPQ1dVFS0uLnp4ezMzM6O7uZt68eQwMDJCYmEhb\nWxslJSXMmDGD4eFhent7kUqlrFy5krq6Ojo6OnB2dha6lRUrVvDDDz881aLw354UlEplm1KpfPzL\n+0NAGWAHrARO/PJpJ4BVv7y/EjijVConlEplHVANzP2vfoZKduzv7097eztGRkbcunWLa9eucfLk\nSaGX//TTT1m8eDG5ubm4uLgwOjoq0nNUgM8FCxZQXl4OPFlstLS0BLU5OjqarVu3cuPGDWbOnMmh\nQ4dYtGgRAwMDXLx4kaVLlzIwMEBQUBAjIyOsXLkSLy8vwsLChJUbnuwOlpaWbNu2jYMHD3L27FkK\nCwtJT08XzACJRCJSk93d3UUMXXl5OUVFRTg7OxMaGoq9vT05OTkAODo6ChJxYWEhVVVV/Pjjj/T3\n93PmzBkePnxIYWEhLS0txMXF0dnZSV9fHyYmJrz44ovExcXx+PFjkXkREBCAh4cHf/vb3ygrK8PM\nzIwPPvhAdO/Pnz9PcnIyEolExJn19PTw7LPPEhUVJUa1KjDr+Pi4cBCuWrWKhoYGQbtS5UhER0eT\nnp6OlZUVCxcu5KWXXiInJwdDQ0NiY2PR1tYWdKa4uDg0NTVZuXKl4BM8ePCA4eFh9PX1BfdBdQMc\nOHCAgoICWltb0dHRQUNDA39/fzQ1NYmPj+eTTz7h8ePHAmjq6urK4sVPDqrZ2dlMTk4yZ84c9PT0\nWLhwIX/7298wMjJCqVTi6enJo0eP0NDQICcnhw0bNjA5Ocn169eprKwU7tOWlhYGBwfFomBvb8+s\nWbNYvHgx7777LiEhIZSXl9Pe3s6NGzeEqrG/v5/bt29TXFxMeXk5N2/exNHRkYMHD7Ju3ToyMzPx\n8PDgX//6F/AkvEZHR4fIyEikUinW1tZMT08LAlR+fj5OTk5MT0+Tk5ODq6srExMTFBUVERkZyeDg\nIFNTU3R2dpKQkCBOz09z/X/qKUgkEhBeNJQAACAASURBVCfAH3gEWCmVyrZfPtQOWP3yvh3w7+bt\n5l+e+7+/18sSiSRHIpHkKJVKPvzwQ+F8U1NTE9l3O3fu5J///KeIHNfT02Pjxo1YWVmRnJxMT08P\neXl5VFZWcv/+fbq6ugQMZXJyks8++4zu7m5RWvj7+7No0SL8/Px49OiRmNOvXbuWPXv2UFFRQUND\nA2ZmZqLRpqI/qdBeubm5aGhoCBrvihUrkMlkxMfHI5PJBAXJ1taWoKAgWlpa+Pvf/461tTWpqaks\nXbqUwcFB2traWLp0qdjNLCwsOHXqlNgtsrOz0dbW5vLly0IZ9/DhQ8F81NLSwt/fn9LSUpKSkpiY\nmEBLS4uOjg4SExPR19cnLS0NZ2dnIiIiKCkpQU1NTVCk29raUCgUaGpqEhoaSlJSEqGhoVRUVNDa\n2sqiRYtobGwkPDychQsXYm1tzf3797l9+zZXrlxhenoaExMTUY6ptBGfffYZ+vr6JCcnixGgg4MD\nycnJwnIeEhIixpiq2hee1P5hYWFMT0/T2NhIe3s7GzduJCkpCblcjrGxMWfPnqWgoECQnYKCgpBK\npYL9sH//fuFaTEpKAhBhKQBubm6CaZmRkUFKSgrGxsZCb2Fra0ttbS2tra0iDUupVOLo6IiWlhYK\nhUKg2zIyMvjrX//KvXv3kMlkNDU1sX79ekpKShgYGKC2tlYs9hKJhKtXr1JfX8/du3fp7u6msLCQ\nxMREJiYmKC8vF7+Hb775hnfeeYejR4/S29tLV1eXCAtOS0ujqqqK6upqysrKxCIwMDBAb28vaWlp\ngjKmpqbG119/jZub29Pf6Eql8qkegD6QC6z55d/9/9fH+355+09g0789fxR49r/53sr/ffzv438f\n/78/cp7mXn8qmrNEItEELgKnlUrlT7883SGRSGyUSmWbRCKxATp/eb4FkP3bl9v/8tz/83JycmLn\nzp0CBuLg4IChoSEjIyPk5uayfv16SktLcXJy4vHjx1hZWQm34+DgoEjjOXfuHA4ODiiVSv7yl7+w\nZ88e0Qg0NDSkra2N1tZWNmzYQHR0NKdPn+Zf//oXkZGRWFpa0tXVhZmZGYGBgdy8eRMDAwNx6li4\ncCEDAwPExcVx4cIFLCwsaGlp4erVq6xfv5733nuPy5cvk5GRQUVFBZmZmcTFxTF//nzgyY6ipqbG\nTz/9xAsvvMDvf/97fv75Z37++Wd8fHxYv349n376KQMDA0ilUuzs7JDL5dy9exdLS0sxudDR0RFz\naVWS1fT0tCAlGRsb09PTg7GxMeXl5cTHx3PmzBkCAgKorKxkzZo1oqNdW1vLsmXL8PT0ZNeuXWzc\nuJHbt2/j5+dHUVGRSH2qqqqirKyM2NhYTp8+jVQqZf78+Vy6dIm1a9eKsJeFCxcyOjpKR0cHDQ0N\nPHjwAGNjY0Gl/u1vf0tDQwOvvPIKq1atElF9CQkJTE1NsWfPHrZu3YpUKiU6Opo7d+6wdOlSJicn\nuXTpEnPmzBFkq/HxcaysrDh37hwxMTEsX76cb775Bk9PT55//nkOHz6MtbU19fX1HD58mHPnznH/\n/n18fHwwMTEhIyOD0NBQEVy8bds2du7ciUwmY8GCBfzwww9UVFSwcuVKoWZsaGigp6eHmzdvEhER\nwauvvkpXVxc1NTWcOHECGxsbiouLsbS0pLm5mVdeeUVM0vbt20d+fj6amppoaWlx//59Ojo66Ovr\nY3BwEAcHB9zc3Kivr+f999/n0KFD3Llzh3nz5jFv3jwKCgowMDCgublZhOfU1dURHh7OyMgIP/30\nE2+88YagRzc0NKCnp0dOTg5ubm4ia+NprqeZPkh4stuXKZXKz//tQz8DW395fyuQ+G/PJ0gkEqlE\nInEGXID/Mgd7bGwMY2NjzMzMcHZ2JioqioKCAlxdXfH39xfgldbWVsEyUEl1XV1d6erqIjs7G3d3\ndxwcHET9NDExQVhYGDY2NvT09Ij694cffuCrr77i4sWLuLq6YmhoSFdXl8BnmZqa4u7uzu7du4mP\nj6eoqIitW7dy+fJl4ImF9sGDB4LEe+vWLdzd3eno6KC6uhoPDw/R+a+pqWF6elrAUv70pz9x/Phx\n7t69K4JXVN58AwMDMSmoq6ujt7eXhQsX4uvrS3R0NJGRkTg4OPDcc88RExMjFoHVq1eTlZWFra0t\nSUlJYhITGxvLyMgIPj4+WFlZERYWRnJyMg0NDejr67Nq1Sp++uknMjIyiImJobGxkbi4OBQKBaam\npvzzn/8U4y9DQ0PBTNDS0iI7Oxtvb2+am5s5ffo0TU1N7Nu3j6KiIh49ekROTg7x8fGC/tTf38/e\nvXu5e/cuW7ZsISwsDC8vL7755huxYAOCcKRq/N2+fZvExESio6OxsbHh2rVrYhN5++23ee+99/D1\n9eXjjz9GW1sbd3d39u3bh76+Pm5ubsTExAAIbUhhYSFJSUlIpVKuXbuGjo4OdXV1bN++nYmJCdzd\n3fnDH/6AmZkZ9vb2Qn584MAB4dlISEigo6MDgPj4eOFHqK2txdfXl5KSEiYnJ5HL5TQ0NLBw4ULB\n5zh69Kj4m8jlckJDQ1mxYgVaWlqkpqair68vXgtvvvkmM2fO5PHjxygUCtzc3IRj1d3dHV1dXUxM\nTFBTU+P9998nIyND6EBKSkrIzc0lPz9fQHCe9nqankIosBlYKJFI8n95LAM+Ap6RSCRVQPQv/0ap\nVJYA54BS4Caw87+aPKiuoaEhNDQ0cHZ2FgYOVeBmWVkZu3bt4uHDhyQmJnL06FEOHTrEJ598Irh7\nzc3NjIyMCKQbQExMDEqlUkBKpFIpAP7+/ty9exdXV1fkcjnp6emcO3cOR0dHysvLKS0tpbe3lyNH\njlBQUMCCBQvQ0dER48+JiQkcHBxQU1PD3NwchUJBWFgYY2NjAnri5ubGli1b8PT0FF3iH3/8EalU\nyo4dO3j99deZmppCT09PNDAnJyfp6ekRI9rg4GDU1NQoKCgQvou6ujomJyfp7e0Vwqeuri4WLVok\nTEIq/BzAmTNnsLa2prS0FLlcTmNjI729vejp6fHw4UM8PDxElqanpye1tbV4eXkxd+5cvvnmG7S1\ntYVbz9bWlpkzZ+Li4sLnn3+OlpYWdnZ2bN++nezsbAwMDLCzsxNekAsXLmBgYIBSqWRwcJAVK1Zg\nZ2fHnDlzSE9Px8nJieLiYhEnB09EO97e3iKz8vXXXxdRdy0tLURGRmJqasr58+f5xz/+wePHj2lq\namLr1q18/vnnaGtrMzw8jJGREaampqSmpgJgaWnJ5OQk4eHhgmWoEroNDg6yevVqQkNDxclUJpMR\nHh7OihUrMDc3Jzo6GkdHR7S1tTE3N2d8fByABQsWCEv9li1bmDVrFlu2bGFwcJBvv/0WDw8PcUos\nKirCzMyMr7/+mitXrtDV1YVEIhG7vyo2Hp70QI4ePUpfXx96enq0tLRQVFRET08P58+fJysrCwMD\nAyorKwXY1sLCgo8//hgTExNCQ0OJjY1lx44d/7HoPs31NNOH+0qlUqJUKn2VSuXsXx7XlUplj1Kp\nXKRUKl2USmW0Uqns/bev+UCpVM5UKpVuSqXyxn/3M1Sr+pIlS8QYJikpia+//pqenh7Kysr46quv\nmD17Nq+99hqBgYFoa2vz9ddf09raiq6uLjY2NoyMjIgmHcCVK1eQyWTY29sLas7w8LCAoqpOKIsW\nLcLX15empiaMjIz48ccfOXfuHN988w0VFRVYW1tz5MgRTp48CTwBrKqyLgsLC/nDH/6AlpYWfn5+\n2NnZsWLFCr744gvq6+t5/PgxR44c4aeffqKxsVHAXNetW8fExISIkQdobGzEzc2Nnp4eIiMjaW1t\npbOzk8nJSdTV1YXeISwsjJkzZxISEiKadyMjI4yPj7Ns2TKsrKzIz88nLy+PzZs3U15eTl1dnQjl\nVUFfpqamGBwcFPkVcrkcX19fwQdQjT1VmYsqhejo6Cjnz58X6rk//vGPfP7555SUlHDq1Cn+8Ic/\nEBERwfvvv097ezuurq7s2LGDgoICkpOTSU1Npbi4mL6+Pvz9/SkvL2fr1ieHztTUVG7fvs34+Dhn\nz54lIyODs2fPMjw8jJqamtCKjI+P09HRIUjcqmO9p6cnO3bsIDAwkEePHokMkLq6OjIzM3F0dMTT\n0xM3Nze6u7v57W9/i7e3N/r6+sjlctTU1AgJCREQ4YaGBkJCQoRq097enu3bt4sMEJW6sqGhgf37\n93PhwgU6OjqEp2XNmjUEBARQXFxMWVkZ8+bNIy4ujtHRUXx9famvr6e1tZX+/n7a29uFRN/Q0BA7\nOzu6u7uxsLDghRdewNLSkrS0NBQKhZCKq6zlfX19XL9+nTVr1vDRRx9x584dNDU1ycvLo6qqiqGh\noadZD4BfSUKUXC6nurpaiDJUVmgHBwdhCLlz5w4TExNERUWxYcMGnJycmDlzJpqamnR0dKCrq0tD\nQwO+vr6UlpYC4ODgQFJSEps3byYrK4vY2FiGhoZEErWJiQlubm7U1NQgkUjw8fHBw8ODK1euYGxs\nTFtbG6Ojo+zevZvf//73RERE8P3332Nra0traysmJiZER0fz2WefsW7dOsbGxqiursbNzQ0LCwtu\n377N6Ogorq6ubNq0ibfeeouuri5mz54tsi/9/f3FH0w1hjMxMeHcuXMimeqFF16goqICc3Pz/whq\nmZ6e5q//h73zDI+6zP/1Pakz6cmk9056QnohEEIIJTQRQRCRotgVXXXVVVf/66qrqwu64FpAcSnS\nEQgQiCRAQhoJ6b2XyUx6IZOeOS+45rnOvjq+O+65zrz2Qkrm93ue7/fzue933xX9D29vb2QyGVKp\nFCcnJ1EMMzExwcLCAktLS4yMjFi0aBFHjx6lsbERZ2dnli1bhr+/P62trVRXV9PW1oajoyNxcXGM\njY2hUCgwMzOjqamJ6OhoPD09OXbsGI2NjTzyyCM8+uijnDlzhuPHj5Ofn09qaip79+4VX3oLCwuy\nsrJ4+umn+eabb1i6dCnBwcFi1andiAACLCOXyzly5AhyuZzExESGhoY4d+4cx48f5x//+Aezs7M4\nOzsTGBhIWVkZAwMD6Ovrc/PmTQwNDQXcRbvynZiYYOvWreTl5dHU1MSOHTtITU1FR0eHmzdvIpVK\n0dPT45FHHmFubg5dXV2ysrKIioriwoULeHt788knnzAwMMCSJUsE3Uu7zhwbGyM2NlbUqxMTE9HR\n0aGlpQWNRsPTTz/N4cOHsbS0xMTEhLa2NhHX1/697tu3j5SUFABx4tLSxiUSCbW1tQQGBrJ+/Xrq\n6+uZP38+xcXFAqKblpZGTk6OmC1kZWUxMjKCn58fSqXyN38ffxcxZ63YwsLCQmDAY2Ji0NHRYenS\npfj4+ODp6YlcLuezzz4jMjKSwMBA3nrrLT744AP6+/sxNDQkMTFR9N4BxsbGhBNhfHyc/Px8ceee\nnp4mMDCQxsZGjh49Sn5+PufPn+evf/0rkZGRmJubY2tri0QiYcGCBVRUVAgAhhYFJ5PJeOqppwSm\nS6FQcO/ePb788ks0Gg2urq709/fj5eXF9evX2blzJ5s3b6ayshILCwucnZ1pbW0Vayht9PrevXts\n2bIFQ0NDvL29uXjxIs7OznR0dFBTU8OePXs4cOAAvr6+GBoakpCQwM2bN1EoFNy8eZP4+Hiys7NR\nq9V8//33Ihff29vLDz/8wODgoOBTaGGqX3/9NZ2dnUilUtasWcPU1BQ//vgje/fuZXJykrS0NORy\nOVVVVWKtunfvXvbs2YNGo0GlUvHJJ5/Q3NyMrq4uFy9e5OTJk6xfv57a2losLCwoKCjAwsICU1NT\n4uPjcXBw4OTJk8zNzeHm5gY8cF9YWVlx9epVUlNTxcpTo9GQkpLCjz/+iKenJ/b29ty5c4fAwEDM\nzc1Rq9W4urqiUCjw9fXl8ccfp62tTWQW5ubmuHbtGgsWLMDe3l70W7Q8xs7OTrq7u/nyyy+5c+cO\nFy9exNramosXL6JWq8nMzKSvrw9jY2P6+/v/Y2jX1NTE9PQ0WVlZIhV65coVzp49i0aj4YMPPuCz\nzz6jvLycDRs28OabbwoY8O3bt8nMzBRcUm18Wose1NHRYfny5eTn5zM0NMSGDRtEQcva2prIyEjm\n5uYoKipifHwcKysrXn31VRobG7Gzs0MikQhZ82/9/C4eCkNDQ7S0tODs7ExycjImJiaC/V9aWkpT\nUxNlZWW4uLigVCoZGhoSFN7JyUnKysqYmZnhiy++4OrVq+KhoH3jfP311/j7+3P27FlGR0c5efIk\nMzMzIty0du1aHnnkEdHc++tf/yre+tpsRFFRkRgCFRUV8emnn7JgwQJeeeUVMavYvHkzk5OTpKen\nMzc3R19fn3AowoNcv1QqFeTp+vp6CgoKMDc3BxCVZo1GI/wWLi4uqNVqjh07RktLC01NTczOzv5H\nxmBmZoawsDA0Gg3h4eFcuXKFZ599lvLycuRyOVKpFHt7e+GWnJ2dJTIyEjc3N1F1NjIyoq6ujq++\n+kpcJ2JiYoiPj8fY2JjMzExBeVqyZAkLFy5k2bJlgt3w/PPPMzw8jEKhYP369UxNTREREcGBAwe4\ndeuWMGodOXJEoOTGxsbE21yhUAAPBo3aL/bFixdJTU3l5s2b2NnZoVKpmJ6eZuvWrYIsNDo6SkND\nA319fTQ1NdHY2Mirr75KW1sbJSUl4tc1MTERurjm5mbBzFSpVOLNffToUX799VekUim2traCvl1a\nWoqHhwcBAQE4OTkRHh6Oh4cH8OA0OjU1hUKhICwsjKioKGQyGa2trVhZWYkafEFBATU1NXz55Zes\nXr2a8vJy0VJtbGyktbUVCwsLQQ3r6OgQsB7tnGndunWCS6rdVnR0dFBfX49MJhPksPj4eDGTMzEx\nIT09XQxcf8vndwFZ+fbbb99//fXXOX/+PAYGBkIGam1tLVDWWpmFNgs/OjoqCE1GRka8/fbbgsij\nUqnEysnGxgZjY2OsrKzQ09Njx44dTE5OEhkZycTEBIaGhuTm5uLp6cnKlStFsk6j0XD79m1KSkqI\njo4mNjaWoaEhMjIySEpK4qWXXuIf//iHQHzduHGD5uZmxsbGiIiIwNLSEjs7O2QyGbGxsQQEBGBj\nY8Pc3Bz37t2jpaWF0NBQwsPDcXZ2Zu/evaLxuWTJEg4cOMCqVatoamrCy8sLjUaDk5MTIyMjmJiY\nCBeEm5sbp06dEm3DhoYGnJ2dMTQ0RCqV0tTUhJ+fH5cvX6a3t5e5uTmhlB8dHaW+vh65XC64FVpQ\nq/Zh5Ofnx9jYmEj66enpoVQq0dPTo6+vj+TkZC3VhwULFpCcnIyRkRFDQ0OUlpaKjVJiYiJnzpxB\no9FQVVWFpaUlR48exdPTk+rqapycnDhx4gSPP/44+vr6KJVKZDIZ58+fx9nZmaKiInbv3k1NTY1Q\nvs+fPx8PDw9u3LjBjRs36O3tZdGiRYSEhBAYGMimTZvIzc2lrKyMnTt30tDQgFwux8vLS7QG9+/f\nj729vaAwaR9Senp6ODg4EBYWhkQioaSkhNTUVPGzqNXRBwcHs2TJEq5du4ZSqcTFxUXYwFpbW+nq\n6hJBsddee42Ojg7hcvD29kYqldLf309hYSHGxsZERESQkZEhJLizs7OCLVpSUkJnZyfNzc1ER0ej\nVCppa2sjJSUFV1dXdHV1hYLP3t6eM2fOCNrTjRs3uH379n8PeWnfvn3vDw4OEhERwY0bN/D29qa5\nuZnZ2Vnu3LmDoaEh5ubmovr6yy+/0NDQwLJly2hvb8fd3R2ZTMaOHTtYtGgRKpWKa9eu8dRTT5GX\nl8eOHTu4f/8+crkcV1dX5HI5lpaW9PT08MILL4hMf3NzM9nZ2cyfP18QlbRvo66uLu7fv09eXh57\n9+4VSca2tjYuX76Mm5ubsGKbmZmxcuVKXFxcaGhoYG5ujra2NrKysrh58ybd3d2sW7eOv/zlL+jr\n66PRaDh+/DixsbFCE/bOO+9QWVlJWFgYwcHBYgVrYmKCubk5bm5u9PT0oKOjg1KpxMrKCjs7O+Li\n4igqKqK7uxsTExNGR0eFgq+7uxtjY2MaGxt55plnRKNTu/5dsWIFo6Oj1NXVicizr68vMpkMQAhc\nraysqKysxNPTk9OnT4sHQlVVFf39/czNzREUFERqaio+Pj6EhobS29uLtbU1rq6ulJSUIJVKxYP5\ntddeY8+ePbS0tAgIq4GBATdv3uSdd95hfHyctrY2hoaGiI6O5ptvvsHOzk4Uorq7u3n44Yd5//33\nKS4uprOzk8zMTFpbW8WWysbGBm9vbyGwnTdvHuPj4xgZGdHZ2UlKSoqAtCqVStFT6OzsxMfHR8SO\no6Ojhf25qqoKY2NjTExMcHBwwM3NDZVKxbp164iIiMDV1ZVdu3YxMzODs7OzEO4YGxsDCH9GdXU1\nqampTE5OMjU1RX5+PgsXLuTKlSu4urrS2dnJnTt3sLCwYNWqVSxevJjBwUFhpG5sbMTS0pJt27Yx\nNzeHl5eXmIsEBgYyNjaGVCrl7Nmz/z3kJW3NVusZtLCwEALUt956C0NDQ5ydnXF1daW6upr+/n4x\n/fX29sbZ2ZktW7Zw+fJlcRWBB9yDJUuW0NLSIgQko6OjDA8Pc/bsWfbv3y9MzsnJyUxMTPDGG2+g\nVCpxdXXFz88PX19ftm3bhpOTk1jzKRQKWlpa+Pjjjzl16pRwVVRUVAgWY1lZGb/88gu7d+9GV1eX\nyspKSkpKGBkZwcXFhZKSEvbs2UN3d7doCMbExHDr1i36+vrIy8tj4cKF3L9/X6DD29vbBexEu03o\n7u7GwcGB6upqYTE2NDRk2bJleHt7Y2lpSVBQEE1NTaJXoSUXGxoaMjk5Kd5YZWVltLe389VXX5Gd\nnc3Ro0dpa2sDHtTFFyxYIMhLr7zyCsePH8fd3Z2ff/6Zf//737zyyivU1taSnp6OpaUlrq6uVFRU\niO7Gq6++Sk9PDzExMdy+fZuRkRFGRkY4fvw4UVFRACLcI5VKkcvl+Pn5kZiYKHwYXV1d2NvbM2/e\nPL777juio6NZtGgRjzzyiFixDg8Pk5SURGJiouATmJubCx2fra0t2dnZGBsbU1FRgZGREd9++y15\neXkMDg5ibm6Ok5MTarWaxMREbty4wT//+U9aW1s5duwYS5cuFcJWrZmrqqoKe3t7PDw8OHHiBB98\n8IFwWXZ0dDA3N8eqVasEF7Onp4fk5GRGR0d5+umnGRoawtzcXNT+x8bG+OCDD4iPj0cul7Nq1Spx\nHfv000+5dOkS2dnZ4qWojZxHREQwODiIqakpGzduZGRkhOzsbHGN+i2f38VDwdzcXFB+FQoFXl5e\n5ObmIpPJqK6uprm5md7eXtrb28nIyGBkZES89Ts7O7l69Sp5eXksXbqUtrY2wSc4f/68wFdNT0+T\nl5fH5OQkxsbGmJubk5aWRlpaGvHx8dy4cUMkHiMjI1m4cCHZ2dmMj4/z1VdfoaOjI+CcLS0tmJqa\nEhsby9tvv81HH33EggUL+POf/yzqtgYGBkLmoT3laFkNBgYG9Pb2kpiYSHR0NHK5HHjwgxAeHo6r\nqyvu7u4oFApSU1PR1dUV1KnDhw8LvLy7uztSqVScHKRSKfPnz8fNzY2PPvqI8+fPs337dhFqSUpK\nYtOmTUxMTDA7O0ttbS0uLi5EREQQGRmJmZkZ9vb2yGQyTp48SWhoKGFhYXR1dRETE4OHh4e4HlRX\nVzMyMoJUKuX48eM88sgjfPLJJ/j7+2NhYYGNjQ35+fmYmpoyPT0twlK3bt0Sb/2hoSFqamqYmZlh\n+/btwIMv17lz5ygqKhKnuOzsbFGUW7hwIe+8844oQymVSiIjI6msrBS8Qk9PTzZt2kRLS4sIhq1d\nuxZ/f3+WLVsmDFPLly/nH//4BzExMWzZskU4KbVCF+2//XfffYejo6MIuFlZWfHFFw9yfGFhYbS3\nt4tthTaXERQUJPIy33//PVZWVkxMTJCXl8d7772HVCqlo6OD1NRUiouLefLJJ7l165ao0fv5+TE4\nOEhmZiZyuRyJRMLw8LCA7GoTt9qfizNnzmBvb8/9+/f5wx/+IGYa9vb2LFmyRNC9fsvnd3F9+Pvf\n//6+g4MDUqkUX19ffHx8hL23pqYGW1tbbG1tcXV1ZfPmzSQnJ4vBWlVVFUZGRujp6VFZWcnk5CTt\n7e3k5eVx4MABysrKGBkZ4fHHH6egoIDp6Wnu3Lkj/AhGRkaEhYUxNTVFUFAQXl5ezM7OcvfuXaF/\nGxwcxMvLi4aGBm7evImjoyOrVq0iNzeX5ORkJBIJaWlpdHd3ExcXJ3TgNjY2tLe3i6Tk66+/Tm5u\nLtPT07i6utLT00NBQQFxcXEcP35cvJ06OztJTU0lOTkZhUJBVVUV7u7udHd3U1ZWhq2tLdbW1iK+\nvXz5cjo6OpicnGRkZITm5mbc3d3p7+9Ho9GICrKxsTGVlZXY2dkJo9b9+/dpaWnB29sbXV1dCgoK\nSEhIwM7ODktLS6qrqwkPDxeaPS0JqrOzE0dHR+RyuRic6uvro6+vL+xKpqamYl26cOFC5s+fz9at\nW/n555/Zvn07gYGBXLhwAUtLS+7du0dubi47d+7ExcWFjIwMamtriYmJwdXVFUNDQ4HKv3nzJunp\n6Vy9epXIyEju3LkjILTaXMXAwAD37t2jq6uLpqYmQam+e/euKMfNzs4yOjrKxMQEhYWF4iiuVqsx\nNTWlp6eH7du3ExQUhKOjI7a2tqxZs4bnn38eJycnLl++TEhICH19fWJz1NDQgJ2dHRYWFjg4OHDx\n4kWkUikajYaXXnqJefPmoaenh56envg1TUxMOHHihPg7zM3NZeXKlaJdmpiYyPj4OCYmJvT19eHg\n4MDKlSuZmZkhOjoab29vhoaGSEhIYP369UxOTlJYWEhpaSkHDhxApVKxZs2a/y5t3MTEBHFxccTH\nxzM3N0dPT4+Y6spkMpYuXYq1TjCUrAAAIABJREFUtTUSiQSJREJ+fj537txhfHychQsXiuOjkZGR\nIPXCg1XR1NQUq1ev5t1332Vubo7bt2+zefNmVq1ahYmJCTdu3CArK4vi4mLq6+vJzc3lwoUL4jpg\nYWEhotPaXe/ixYvJysripZde4sknn8THx4eysjISEhLIysoSVKBDhw4xOzuLj48P0dHRZGVl8eKL\nLwoMW0VFBYsWLRJvMwMDA+RyOREREajVan799Veqq6spLS2lsLCQpqYmMjIyUCgULF68WByxb926\nJfoG8fHxjI+Pk5qaKqCz7u7uNDc3MzAwQEFBAba2tshkMuRyuUDSab+UP/zwA97e3rz11lvMnz8f\nT09P+vv7cXFxob29HWdnZwBmZ2dZtWqVELvo6+uLB0xkZCQVFRUMDAygVqsxMjISW5bGxkbRlcjL\ny8PNzY2dO3eKvb+lpSVFRUX86U9/wtfXl8zMTKampmhtbcXGxoaCggLeffddenp6xAvg4sWLFBcX\n09bWxqlTp6isrKSgoIB169aJzY62K6M1f01NTeHi4iIGhFp03fLly+nt7eXatWtUVlbS0NDAvHnz\nyM7OFtmJTZs2iYRgfX093d3dzM3NERUVRVFRkeinnDx5ktHRURYvXsyjjz5KaGgop06d4ty5c6Sm\nppKRkcEHH3yARCIhOzubBQsWiHSru7s79vb29PX10dnZib29PSUlJVhbW2Nqakp7eztubm6Ul5dT\nWVkpHJYtLS1YW1vj7OyMvb09zz77LEFBQXh6ev7m7+Pv4qGgo6ODubk5eXl5rFu3TryFNBoNCxYs\noKSkRCTTZDKZ8EGOjIygq6vLwMAA169fZ3JykubmZubm5oAHefeJiQnq6upYtmwZ165dY2pqSrgR\n2tvbcXFxEdcQNzc3jI2NBeZ9YGCA27dv4+DgQGNjI/39/cCD68OOHTvIzs7m+eefJzs7m7CwMM6c\nOcPf/vY3Dh48iImJCfHx8SxatAgjIyMOHTrEihUrGBoawszMjIGBAVpaWqioqCA3Nxd4UOk1NDRk\nYmICFxcXamtr6e3tFStIFxcXli5dSlxcHAUFBdy7dw+JRCKO/HFxcVy+fJnW1lbhbnBxceHTTz/F\n29sblUrFo48+KtBuQ0NDeHp60tXVJTiTjo6OyGQyGhsbxVtJu9eXyWQi9KWjo8OFCxcEB0BXVxep\nVMrWrVv517/+xeDgIJ6enkRFRWFsbCzoQg0NDTz55JNMTk6K8pREIhE4toqKCszNzbGwsMDa2hoT\nExPq6uq4dOkSHR0dTE9P8/e//53nn3+eJ554gqamJjEkTUlJISwsjJUrV4qXi3bvr13t+fv7o1Qq\nCQoKori4mN27d+Pv78/atWvR0dHhhx9+oKysTKDfzp8/T2FhIS+88AIzMzMC4adde2/YsIFly5ax\nYcMG5ubmGBkZobi4mOLiYhITE4VvVHv3VygUpKSkCIp1YmIip06dEqdULSFJS3JydHRkampKINx1\ndHSYnZ0lICBAMCbLysqwsbEhISFBxO59fX1xdHTE0tISfX19jh8//pu/j7+L68PXX3/9fmRkJOvW\nrSMvL0/06LXyUa1YValUEhgYiIGBAdbW1jQ3NzM+Ps7ExAT/+te/xCrI2tqaK1eusGzZMoKDgwUe\ne82aNQQEBKBUKqmqqsLU1BQbGxv8/f2RSqVUVlaiVqtpamoSVxR7e3t6enrIysoiNTWVa9eu4e/v\nT0FBAaWlpSxatEiYnoqKiti8eTMvvvgiFhYWHD58mJaWFhwdHTE3NycwMJD+/n6+++47NBoNmzdv\nxs/PT6jcXnrpJUZHRzl16hQ9PT3U1tby2muvceXKFeGlTEtLExbj7OxsAX+Ry+X09PSgVCoZHx9H\nV1eX8PBwNBoNzs7OuLm5UV1dLSzU1dXVyOVyOjo6CAkJoby8nOHhYUJDQ6mtrWVgYABXV1c8PT25\nfPmyyENMTU0xPDxMRUWFuGbMzMxw48YNnnjiCa5evSoMUnK5nJ9++gkdHR3mzZvH3bt3KS0t5f79\n+2Jm8/HHH6NSqQgJCeHo0aN88803VFVVMTs7Kwo/2h/0yclJIfuZmJigvLyc9957j8bGRiQSCQ8/\n/DAlJSVERkZy6NAh1q9fj66uLpmZmWzbtk3EpHV0dJiZmRGkJLlczu3bt1GpVBQXF/PFF18I1+Yj\njzxCe3s75ubmInzV2dmJubk5165dw8DAgImJCbHBGB4epqWlhfDwcKG7j4+PJyQkhPHxcSIjI0Wk\nWovV7+joEJHzbdu2sX//fh555BEMDAyoq6sT19AlS5aQkZFBVFQUrq6u4gSmndsMDQ0Jd+eePXvE\nkF1fX5/+/v7fvJL8XZwURkZGMDQ0pK6uDplMhq2tLQYGBjz99NM0Njaio6Mjdvt1dXV4enoyMTHB\n+fPnuXTpEufPn6erq0s8SbVZAwsLC+7evcvNmzcxMjISyHYrKyvKysqIjY1FX1+fb7/9Vgw4o6Oj\nSU1NZWxsjPb2djw8PFAoFMTFxREWFgaAl5cXaWlpIjY7Pj7OyZMn2bVrF5s3byYjI4OMjAy8vb3R\n0dHhk08+EWEaDw8P2tvbkUgk3Lhxg4GBAXEU/fzzz6moqGDp0qVi+KatzKanp+Pg4EBOTg5OTk5I\npVLUajXT09PCchQQEICBgQGGhoZCn15XV4eenp5YJ+rr62NhYYGjoyMtLS089NBDSCQSIUo1NTXF\nwMAAY2Nj6uvrMTQ0xM7Ojv7+fmGkio+Px9XVVRikioqKeOihh+jp6cHc3JzVq1eTlJTEyZMnkUgk\nFBQUUFVVRXp6OtnZ2Xh7e5OQkEBYWBj79u3ju+++E7Ss48ePY2pqikql4sKFC5SUlNDT0yNyHrGx\nsRw/flzkOY4ePUpqaipSqZT33nsPa2trNmzYIGLb2i3ByMgI1dXV9PT0MDU1RVNTk8CyOzs7C5Bt\nVFSUQKfHxcXR2NiIoaEhP//8M2vXriU4OFhsQ+DBVXLz5s2EhISwb98+5s+fz/Lly1m5ciVFRUXY\n29vT0tLC6dOnKSsr4/jx48zOztLR0cHg4CC//vorQUFBDA8PI5PJOHToEPBgXTkwMMCaNWvEyw4e\nDDZnZmbo6Ojg6tWrVFZW4ujoSGFhIfPnzxeN4ISEBKanp+nt7aW8vFwQqH7L53fxUDA1NaW4uJip\nqSmcnZ0JCAgQgx65XE5zczOenp7iPqX1EWjbeBMTE0LGqS1IwYNmnJZtNzQ0RH5+PikpKbS2tvLs\ns88SERHBvHnzcHNzo6mpCaVSiZeXF6Ghofj6+pKSksL+/ftxdHTEx8eHffv2AYh8uvbYfP78ecrL\ny5mamqKyspK1a9fi5uaGQqFgYGBANOpCQkK4dOkSCxcupKCgAHt7e8bHx8Xfw4YNGwTeC+DIkSPU\n19fT09NDcHAwfn5+IitfVFQkduNGRkaiynv79m1kMhmFhYXi6CiTyTAzM8PQ0JD79+9jampKUFCQ\nqJ3b2dlRVlZGVVUVUVFRuLu74+7ujp2dHTk5OZiZmREYGEhUVJSQrK5Zs4a+vj4++ugjkpKSCA8P\np7y8HB8fH3Jycjh8+DBxcXEsXryYp59+mnv37tHX10dPTw8lJSXcuXMHR0dHlEolCxYsEFcorWzW\n2dmZiYkJZDIZurq6wmExNDSEj48PXl5eGBgYiDVfTEyMcHusWbOG4OBg7O3tuXz5MgA5OTm4uLiw\nfv16gZEzNzfHzs5OVJh3797N1q1bxcq2pqaG0tJSjIyM8PHxISkpCR0dHaFkA6isrOTEiRPk5+cL\nFLyHh4d4+J49e5agoCA6Ojrw8PAgLCwMIyMjgYJ///33hdD3vffeExmGzMxMJicnOXLkCCMjIyJr\nooUXA+IaMTMzw44dO5iammJmZoa3336bgoICzp8/j729PaampkJk9Fs+v4tC1PDwMH5+fgJK+eGH\nH3Lv3j2mp6dJTU3F29sbW1tbjh07hrGxMd7e3ixZskTcSyUSCY899hinT58mPj5eiC9u3LjB5s2b\n6e3txd7enlu3bvH3v/+dRYsWoa+vzzfffMPc3Bzr1q3D2NhYXCNUKpW4x+/atYuuri40Go1YHcpk\nMgHqbGpqwtramqCgID7++GOeeeYZ3N3diY2NxcHBgcDAQBQKBXfu3CE/P5+mpiZkMhnPPfccXl5e\nXLp0ibS0NADu3r1LQkICRUVFAjxqbGzMq6++KtBdWi7CzMwMg4ODzM3NoVQqmZ6e5tChQ6xatQqV\nSsXMzAwlJSUsXrxYfPH/d67gqlWruHr1Ku3t7ejo6AjPZGBgINnZ2UilUjw8PNiyZQvFxcWMj4+L\nqG5ERASOjo6UlZWxbt06FAoFV65cQa1Wc+vWLfHg1dXVxcvLi3PnzuHh4YGlpSWPPvooUVFR/PLL\nA/zGpk2bqKurE2/CefPmoVKpcHBwEJ2Sy5cvI5PJqKurQ19fn/r6ek6cOIGRkRFJSUlcuXJFXL+O\nHz/O7t27ycvLIygoiKeeeoo333yTdevWoVKpUKlUuLm5MT4+zvHjx2lsbCQsLAw3NzdaW1tpaWkh\nIyMDtVotjuzGxsa4u7tz7tw59u7dy9KlS4V5anJyktDQUHx8fHjsscfYvXs3k5OT5ObmUlRUxIoV\nK6ivr+fjjz9mdnaWAwcO0NHRQXd3t1DK29nZkZmZSU5OjhDiWltbU1xcLMxRMzMzmJubMzo6KnyZ\nGo2GQ4cOUVBQQGNjI7dv32Z8fJyhoSHc3NwEiPb69eu8/fbbv/n7+LuYKRw4cOB9T09PVqxYgUaj\nYXR0lNDQUHGM1K6ZJiYmmJycJDw8HHd3dzo7O5mYmGDTpk2UlZWxfft2rKys+Oyzz6irq+P9999H\nqVQyOTnJ9PQ0iYmJhISEiPupttocEhJCW1sbgYGBHDhwAAsLC6RSKSUlJbi7uxMaGopUKmV0dJQb\nN26waNEiWltbGRkZ4fr162KFqmULajXhR48excDAgGPHjgmQi1YTXlhYiL+/v7AHnzp1irVr12Ji\nYiIYCVp/Yk9PD/n5+cTHx4vevNZD4ebmJmhLExMT2NjY4OjoiKenJ6GhoYSGhnLz5k1xYmlra6O2\ntpZLly5x9+5dJicnGRoaEqUyIyMjmpubWb58uQjfaKvt+vr6ODo6MjY2RmVlJe+88w5KpVLkKSws\nLNi9ezcKhYInnnhCxKz7+vowNzcX/Iq+vj7eeustnn/+eWpraykuLsbNzY3c3FxiY2NpbW3lyJEj\nBAUFMT4+jre3t0D8Dw8PMzc3x44dO5ibm6OpqYns7Gzs7OwwNzfnnXfe4f79+3h7e3Pz5k0mJibI\nzc3lmWeewdbWlvz8fGpqanBwcGDPnj2Mj49z6NAhvL29ycrKIjMzE41GI4bS+vr6pKeno6OjQ0FB\ngQAMt7a2cv36dRISElAqlVy9epWVK1eiVqv597//jZubG0lJSTg7O1NWVoZKpeL06dOUlpYSEhIi\nhDgnTpzA3d2du3fv8vjjjwPw448/8thjj7Fy5UoyMjKIjIzEy8tLAGauXLlCaGioqLlr5wktLS3Y\n2tri5eUlBvYajYa5uTnm5ubIzMz8TTOF38VJYWpqSmTYVSoVcrmco0ePMjw8zIYNG2htbaW1tVUY\nhn744Qf8/PyEDq68vJyYmBj27t2Ls7MzISEhXLhwgbt37xIWFsb169cxMTFBLpdjZmbG1NQU586d\nIyMjg7S0NFEV/vOf/8yaNWswMDDgwoULhIWF0djYSFFREVNTUyLRODQ0JPbab775Jmq1msLCQhYs\nWEB9fT1dXV34+voSGRlJc3OzmLL7+/tTW1sr0oZaKayl5QNlhr6+PidOnGDXrl2UlpZia2vLqVOn\niIuLY2Zmhl9++UUQe0JDQ5FIJNTX1yOVSlm8eLGAto6Pj9Pf34+bmxvp6ekMDQ2h0Wjw9PREIpGg\nUCiYnp7G3t5eNEvVajWhoaF4eXnR19dHVVUVFhYWIkGphZxoScupqans37+f4eFhdu/ezdGjR9Fo\nNOjr6wvxaXFxMa+99homJiZiQp6dnU1CQgK3bt2ioKCAoqIi/P39sbGxAWBgYEDQj7RSHu085+rV\nq6xdu5bVq1czOztLUFAQzc3NuLi4EBMTg0QiYd++fSiVSnEC3L9/P/DA/+ns7Iyfnx8NDQ14e3vz\nzjvviB6M9jSpo6ODv78/4+PjmJubs379eo4dO4aJiYmgeF+/fl20L7WxcxMTExHY+uMf/8jY2Jiw\njI2NjREYGMjChQuRyWSo1WpMTEzEitjDwwONRsPly5cxMDAAHswqSkpKxEq8tbUVHR0dGhoaRMGu\npaVFpEXXrVtHWFgY9fX1mJiYsH37dhYuXCjWk9oW6m/5/C5mCvDgbqYdipmYmPD000/j7u7OyZMn\nOXfunCDTyuVyTE1NsbCwoKamhsLCQhobG0lPT0dPT0+85QGCgoJwdnZm69atPPnkk1RXVwvIyMaN\nG3nrrbd49dVXBTZbywUsKChg8eLFnDt3DhMTE1pbWzE3N+f27dvAgyafra0tH374IceOHcPa2prA\nwEAuX77MZ599xsDAALm5ucyfPx9TU1N8fX1xdnbGyclJcP20+rX4+HhMTU2BBzmFV155hfz8fGJj\nYykoKGDTpk3CUL1lyxZxLNcGZYyNjdm8eTPl5eXU1dVRV1dHdXU1Dg4O5ObmCppVbGwsEokEV1dX\npFKpgLtUVVXx888/Y2pqSmtrK19++SX9/f34+fnR0tKCm5ubYD+oVCp8fX354YcfROpyeHiYa9eu\nceXKFVJTUxkcHKSqqkrgv1avXk1vby8ajYaZmRnWrFmDh4cH6enpHDp0SHADtJ5OJycnamtr+fLL\nL/nuu+94/vnnRelMm1e5ffs2o6OjNDc3U19fL9KmO3fuxMrKijVr1mBlZcWvv/5KUlISAMeOHeP6\n9etoNBpcXFwoLS0lIiKCqKgoHnvsMVFuWr9+PXl5eWzatImtW7fi6+tLRESE+LnUSnHT09OBByIj\nf39/lixZQlZWllDeBQYG0tDQwK5du9i0aZMI1Lm4uIhUqFKp5H/+538wMTFBo9Hg7u4uVp1NTU3k\n5OQIoXJJSQlNTU1oNBqWLl2KSqXiypUrAiVXWlrKzMyMkOxevXoVQ0NDHnroIby9vcX17Ld8JNpJ\n/f/Nj0Qi+b//m/j/n///+X//U6zRaCL/T//R7+L6MG/ePDZu3IijoyM1NTVIpVLKy8vx9vZmfHwc\nU1NT8vPzSUpKwtPTk8rKSgYGBpicnMTZ2Zl169aRkZGBjo4OTk5OQi325JNPioGZllMgkUgIDAzk\n/PnzAuOm1Xrl5+ezceNGbty4gaWlJTt37hRasqNHj+Ln58e+fft4/fXXMTExQSqVMjc3x+TkpJju\na4/as7OzlJeX89xzz3Hu3Dkhh01LSxMwFm3rTetNUKvVvP7666KurFKp8PHxwdramra2NsLCwrh8\n+bL4c8tkMpRKpWggLliwgEuXLvHYY4+hUChwcnKiqqqKzs5OPD09MTY2FoSjxsZGYmNjMTc3p6Gh\nAXt7ezIyMli9ejXNzc2UlJRgbGxMYGCgIFOfP38eKysrQaAyMzOjsrISHx8f0UJVqVS4urrS1tbG\nnj17OHXqFGq1moSEBHJycggMDCQsLIyioiLu3LmDVColPj6eyspKDhw4wHPPPSdaseXl5fj6+uLi\n4oKBgQGRkZFUVVURERHBvn376OnpYcuWLeTn5xMUFMStW7dwdXXFzc0NtVrN4sWLOXbsGB9++CGn\nTp3ixo0bWFtbExAQwJEjR7C3txc4PkdHRz7++GOSkpJoaGhgZGSE5ORkBgcHmZqaElTr8PBwEhIS\n+PLLL/nhhx/4/vvvGRsbY3p6WpS6fvrpJ5YvX05tbS2urq784Q9/4KGHHiIpKUnkChwcHLhy5Yro\n/VhbW/PCCy/w6quv8tFHH/HJJ58wMzMj6GPFxcXi78DFxYXm5mahEJybm6OhoQF4IOiNiIigt7eX\n/fv3s3XrVgYHB8nOzhZS3P/T53dxfdCuXCorK3FychINsrVr1wqwxJNPPimiuLOzs7i6ujI4OEhC\nQgKDg4OEhIQIsaatrS0AGzduFNKPbdu2YWBgQHl5OceOHSMgIIC2tjaWLVuGqakpt2/fJiIiQqyf\nFi1aJAjSdXV1pKSkiGuJdnCj/YG5fv26AInU1dUJdbv2SKmNrtrZ2YnijtYctGLFCiEWycvLIyQk\nBAcHB2xsbHjuuefEn9Xa2pqzZ89SU1MjMGvapNvs7CxDQ0OcPXuWVatWceHCBZydnSkuLmZsbIzE\nxETm5ubw9vZGrVZz584doqOjmZqaIjMzEysrKyoqKoSDUfuD5ePjI6LQxcXFODk50draKuLjMzMz\nhIeHMz09zfLly0lNTSU0NBRjY2Px+zEyMmJ2dpZ79+4JMvGlS5eQSCSiXjwxMSEeVj4+PjzxxBOs\nXLkSe3t7ZmdncXR0pLa2lqNHjzI0NERRURFRUVG88MILwINrV1hYGP7+/kL4IpFI+OWXXwTqbmxs\nDF9fX0xNTTl48CAvvPACX3zxhXjg//zzzzz55JOCpRAbG4unpyfNzc1ifhAREcH9+/e5dOkSu3bt\nAh5wQGdmZigvL+fChQscPnxYbLA6Ojpobm7m1KlTIh3r4uJCfHy8ICv5+vqiq6uLo6Mjjz/+OJWV\nlQAEBASI1mROTg49PT1YWFggkUh47bXXKCoqEoBhrbPS2dmZhoYG0YDVdk2WL1/Oa6+99pu/j7+L\n7cPevXvf3759OxqNRrxFHB0dqaur45lnnhFKOB8fH+Lj45HJZAQEBIg3FCD28lNTU7i6unLw4EHS\n0tJ47LHHUKvV5OXlYWdnJ9p1WjeiSqUiISGBFStWMDc3x8GDB/Hw8GBwcJCffvqJ7du34+bmRkdH\nB7GxsRw5ckR046enp3FwcGDVqlWYmZnR2NhIeHg4gYGBGBoaCpO2t7c3ZmZmLFmyBHd3d9RqNXp6\neoyPjxMTEyPMSwEBAXR1deHh4YFaraavr4/a2lru378vKEvJyckUFRVx+fJl4uLiKC8vx8nJCX19\nfeDBl2poaAhnZ2dOnTqFXC4XWQjthHpwcBA/Pz9RvDE3NxdtR20XIy8vDyMjI4yNjbGzs6O1tZXk\n5GSxpbh48SLJyckolUqSk5NFKk+lUhEbGyt6KPn5+fT29qKvr09tbS0vv/wyP/30EzExMdTW1tLe\n3o6/vz+NjY2UlpYKKlFPTw/h4eF4e3ujp6eHQqEQb+TW1lbs7e3x8/PD1NQUFxcX6uvrmZiYYMmS\nJYKInJGRwaJFizh37hwKhULMmMrKyvjiiy9QKBS88cYbLFmyBIBPP/2UxsZGMXAdGxujo6OD4uJi\nYRSTyWTCtpWens5TTz1FR0cHfn5+BAcHc/r0aVHzbm1txdXVlezsbJKTk4UJurq6GiMjI3GqycvL\no6ioCLlcTkxMDOnp6ezevVtsjHR1dUlNTeXUqVOMjY2xevVqfHx8CA4O5o033iAvLw9dXV2ee+45\n+vv76e/vRyKR4OLiIvD6CoXiN28ffhcPhW+++eb9bdu2CdPu8uXLMTMz4/bt29TU1NDd3U1GRobw\n+e3cuZPp6WmWLFmCRqMhPj6eo0ePoqenR09PD+3t7eTm5vKXv/xF7NaHhobo6upiw4YNZGRkYGBg\nQG5uLklJSdy7d4/W1lb27NnDwoULqaqqoqSkBIlEglwuRy6Xi7DNqVOnSEtLw9vbm5SUFE6cOIGb\nmxt1dXUMDw8zODhIXl4eK1euJD8/H2traxwdHbG3txeDscDAQG7duoWRkZF4GGRnZ7N9+3aGhoYY\nGRlhamqKmpoaMUSrrq7GzMxMKN1bWlqwsLAgMjISOzs7sV25c+cOHh4eYko9ODjIzMwM7e3tlJeX\nk5aWJo7nQUFBNDY20tLSwocffigKS/PmzcPU1BQ3Nzf+/e9/s2bNGiQSCRkZGQQEBDAyMsL8+fPF\n4Le8vBwzMzPkcjlJSUnk5OTg6uoqjN+GhoaYmpoSGhpKdnY2fX19jIyMCGqQ1r589epVAenVaDTE\nxMRw5MgR4uLikMvljI2N0dvbi46ODhKJhIaGBtra2rCyskKhUBAVFYVKpcLJyYk//vGPREVFYWtr\ny+nTp3nrrbdQq9WMjY3xzDPPCNJyd3c3HR0dBAUFUVNTQ0JCwn8gzSYmJvj6669RqVQEBwdjYGBA\nTEwMd+7cEUCeP//5zxQUFDA1NUV0dDRbt27l5s2b9PT0YGlpKaC/WiaItsb/888/Y2dnR0VFBVKp\nVDx87969K5waNjY22NjYoFKpGBsbw9bWll9//ZXOzk7MzMx44okn2Lp1K8HBwXz//fecOnUKGxsb\nkpKSRM8nLy8Pd3d3Ll68+N/zUPjggw/eV6lUAqqprS37+fmhp6eHRCLB0tKSrVu3Mj4+zrfffota\nrRYm5ry8PLy8vJDL5aKLoFWy9/b28txzz1FWVsbLL78sEmJpaWn4+fnR29uLWq2mtraWa9eu0dzc\nzMjICM8//zwpKSnU1dVRUVFBQEAAOTk5FBcXs27dOlxcXGhtbSU8PJyOjg4sLCyEd6Gzs5OioiJW\nrlxJV1cXbW1tws+oBcvevHmTqKgodHV1cXJy4sqVK8TGxhIfH4+uri5VVVWYm5ujq6sr5hohISEo\nFArGx8dxd3fHxsYGmUxGfHw8SqWSgoIC3NzcsLe3FxrzsrIynnjiCUxMTDAxMUGtVhMdHc2ZM2c4\nduyYwICPj4+jVqs5e/Ysjo6OzJs3DxcXFzZu3MjY2Bg1NTWi1t7S0sKCBQuora0lOzub4OBg4aI0\nMDDAx8cHCwsLbG1tGR8fRy6X89RTT2FhYUFwcDADAwMoFAr6+/vZtGkTjY2NdHV1UVhYSEpKCn19\nffj5+Qm0mdbdaWxsjLGxMTU1NTz99NMkJCSQmZmJvr4+np6ezM3NER4ezoULF8Rq2d7ensOHD+Pr\n60tiYiJyuZympiYMDAw1o0B9AAAgAElEQVSYmpoiKioKa2trPvvsM6Kjo1m4cCGTk5Mienzv3j0q\nKip45plnKCwsFNXkoqIi8vPzWbp0KZOTk3R3d2NjY0NOTg55eXni95mWlkZwcLD4mfTw8MDc3JzC\nwkL09PS4d++emGnY29sLLsOWLVsE+0ObT0hJSSE2NpbNmzejVCq5f/8+ra2tYlYglUoJCwsTdPLw\n8HBhxMrKyuLu3bv/PTkFIyMjEY45ffo0y5Yt4+LFiwQFBTEzM8PQ0BAuLi5YW1uTmZkpyMupqalY\nWVlhY2ODlsdw5swZNm3aBEBsbCydnZ0iCNPb2ysCJTt27OChhx7C0tJSXCfCw8P58ccfGR8f5+7d\nu3z33XdC3jk2Niby7teuXWP16tX4+voyMzPDzMyMOLLev3+fwMBAxsfH+f7777G0tMTMzIzq6mrC\nwsIoKSkhKysLtVrNZ599xtatW3FwcAAexKBbWlpEdPXSpUu8/fbbBAYGcvv2bVavXk1iYiLDw8NC\ndtLT00NFRQV2dnbi6K1t+YWGhvL6668zPT0t4uFWVlZcuHCBl156icuXL7NgwQKGh4cFvu3ll1/G\nwMCA2dlZDAwMkEgkdHR0kJiYKLoUMTExFBUV0dPTw7p165ibmxNBKjs7O2xtbWlra0OlUjExMUFL\nSwuZmZmoVCohrtWau86fP8/Y2BguLg9Mgxs3bmTevHkolUoOHjzI0qVL0dXV5e7duwKNt3jxYvT1\n9Wlra8POzo7p6Wl0dHQEjt/f359z586JiDw86Kvo6Oggk8kIDQ3l5MmTVFZW8s9//hM7Ozv09fUp\nKytDLpdz5swZAVJta2tj3rx55ObmsmLFCmZnZ/nkk0+IiIgAEMU0KysrGhoaKC8vx9ramtzcXE6e\nPEl7eztjY2NcvXpVnIC8vb0ZGBhgwYIFdHZ28vDDD1NTU0NlZaWo58tkMu7du0dMTAy7du0Sint9\nfX1x9WpsbGThwoXo6ekxMTEhqtmBgYECzPLFF18QHx/PA9Hbb/v8LgaN09PT+Pn5ERYWRnNzsyjC\nFBQUMDg4SGhoKBMTE5w5c4by8nISExP517/+hbu7O/fv3yc6OpqamhoMDAxEvh4QLr3ExEQSEhJI\nT0+nvLyc7OxsXF1dGR0dJScnB4VCwbFjx7C0tCQgIAATExNOnz5Namoq8fHx7Ny5EyMjI2JiYgDE\nEVlLbtYO27QQDCcnJ1QqFWZmZoID6OLiQm5uLmNjY+IqEBQUhL+/P4cPHwbA09MTLy8vPv/8cwwM\nDEhNTUWpVOLh4cGrr74q5gY9PT1YWVkxNjYm4rhapdj8+fN56aWXGB4e5uDBg3z88cdMTk4yOzvL\nCy+8wMTEBBYWFshkMtavX09fXx8ajQZLS0tsbW1JSUnBwsKCuro6srKyqKmpISgoCFNTU6qrq8nK\nyhLauOnpaZKSkhgdHaW3t5fh4WH6+/sFKbu1tRVHR0cR6tJasa9cuUJERIQIRMXHxwvi0JkzZ0Sw\nasuWLWRmZpKbm4uxsTEjIyPMmzdPJEIvXbqEubk5XV1dpKSkiP+Hm5sblpaWlJSUiCFvV1cXpaWl\n1NXV8eWXX/Lwww/T1dXFs88+y8aNG7l+/Tovv/wy1dXV4v6vhQXb2dkJf+ff/vY3Hn30UUFdvnr1\nKr29vXh5eZGQkMCmTZvw9fXFyMiIF198ETs7O2pqalAoFOKKVF5ezrZt23B1deXNN9/k888/Z3Jy\nkry8PJHX0AJljh8/LtqQiYmJhIaGcvjwYWpra4mLi6OjowNXV1dMTU3x8vJi+/btREZGCs3e6tWr\n0dXVFX2J3/L5XVwfDh8+/L63tzednZ3Cx6Cjo8OyZctobW0VtWFzc3OhHTc2NmZ2dhZzc3MsLS3Z\nsGEDd+7cIS0tjczMTAoLC7G1teWtt97i119/5datW0RGRrJ9+3bq6+tRq9XY2toSEBDAiRMnCA4O\npqSkhMzMTIqKitBoNDz33HP4+voKSMUbb7xBf38/CxcuxMHBQbxd5XK5OLoODQ2JVdTQ0BCpqalM\nT0+TkpJCVlYWGo3mP7yWNTU1bNu2jWPHjhEUFCTi0m1tbQI5V1VVRUxMDFNTU6Snp2Nvb49arebM\nmTM4OTnh6ekpcN737t1jdnaW/v5+goOD0dXVpaOjQzgeV69ejVKpFF98a2trATDR1dUV0dji4mJi\nYmIwNTUVpSI7Ozv8/PzIyspi3rx5goylVCrR0dEhISGBwsJCuru7xcNmdHQUa2trKioqsLa2prq6\nmr/85S+MjIwwMTHB2NgYbW1tGBoacu3aNR577DGxOta2NC9fviz07np6enh6erJgwQLs7OzIy8sT\np4Sqqiru3r37Hzp2V1dXLly4wF//+ldycnKYnp5GqVTi6OjI4OAg+vr6LF68WMBm29raGBwcZHh4\nmIiICNra2khPT2fbtm3i3+rEiRM8+uijnDhxgq1bt1JTU8P09DSFhYWMjIzw7bffYmVlJdgU8OB0\n+eKLL9LR0YGnpyepqanI5XLOnj0rht42NjaMjY1RVFREYGAgKSkpQleYl5fH8PAwJ06c4JlnnmFo\naEjUv7dv386NGzcIDAzEyckJCwsLsaru6+ujoKBAG43/77k+AHR2dpKUlERmZiYFBQV4e3tTU1OD\nSqVCrVazbNky/Pz8MDEx4ZdffqG0tJSBgQF27NhBeXm5ELn+8ssvLFmyhK+++ork5GRu376NmZmZ\nEMHW1dURHR0t7lwHDx7kq6++Ah78wy1dupTs7Gysra2Ry+VMTU2JstDjjz/Ou+++y9WrV5mZmWHn\nzp1IJBLUarWQ2czMzBAUFERdXR2mpqYoFAouXLhAXl6eaNYpFApkMhnW1tZYWloKft7IyAi//vor\nLi4uREVF4ezszKVLl9i4caOIMzs5OXHt2jVMTU0JDg6mtrYWBwcH7t+/T05ODjY2NlRUVBAXF4dM\nJhPpz3nz5tHS0iIQ5w4ODvT19XHhwgWSkpLIzs5GX1+f2NhYQkJCuH//PosWLeLzzz+nt7cXW1tb\nkbxcu3YtjY2NmJmZYWBgQFpaGoWFhRw7dozx8XEMDQ0JCQnBwMCAW7duER8fz4svvihqzBMTE/zx\nj3/Ew8MDAwMDJicnxbq3tbUVQ0NDfHx8mJ2dZXh4mNTUVNzc3CgqKkIqleLp6YmlpSWHDx9mcnIS\nX19fPD09KS8vZ/PmzURFReHi4kJqaqr4t9Xaw1JTU5mdnRVIufHxccbHx/H09BQ8xMHBQXbv3k1Z\nWRn5+fmsW7cOJycn2tvbkclk+Pj4iI3FDz/8wPr165mZmWHZsmXk5eWJNeuiRYswNjbG0dGRL774\ngpKSEq5cucLo6Cizs7NYWFigo6ODnZ0dhYWFxMfHMzU1xf79+4mPjxegX61n9eDBgxgZGVFaWkpy\ncjLOzs7Mzc2Rn59Pf38/Fy5cEKe6vLw8TE1NaWpqwsbGhsnJyd/8XfxdnBQ+++yz9//0pz+Rn59P\ncHAwu3fvFhN4FxcXXnvtNZycnIRoVUtjMjY2xsvLC11dXcrLy1m0aBGNjY1UVFRQWlqKnZ0d5eXl\nImuvq6uLgYEBhYWF6Ovr093dTX5+PvPnzxfyVicnJ7755hskEgk9PT3ExsYyNzeHk5MTJiYmnDx5\nkj/84Q9MTU0JZLiRkRH29vbk5uZSWVmJtbU1Z86cETLSFStWYGVlRX19PVevXiUuLk7oxrTo7ps3\nb7Jr1y4CAgJEC04LPtXmBrSde60qXldXF29vb8rKypibm6O1tZW+vj66urrEcb+3t5fm5mamp6eZ\nmJgQ/MY7d+7Q2tpKRUUFfn5+ZGZmil/DxcUFNzc3JicnycnJwdjYWFigtRTkjP/F3nsGR3mla7tX\nS0KhlVNLauWcQCighEgi5wwmmGDjMfa2wYA9xsZhGAcc8JjxYMbYOIDBYJODQBgJWQKhgBLKKKeW\nWjm1QguFPj9wr2/m1KnZnKpdp2bq7PcX1aJpVfOu9T7ree77un/99Z8CUk1MTPDx8RFOvdjYWAoK\nCkQTMDk5mXv37rFhwwYRpVZcXIy/vz8RERGkpaWRl5fHxx9/jFKppKysjAkTJgidgq2tLdOmTUOj\n0WBqasqZM2dob28nNDRUbBLW1tZoNBry8/NFdoS/v7+IfRsbG2N0dJTTp09z//59QbhatmwZFy5c\nYHR0lJCQEEG/sre3F+IhmUxGXl4eZ8+eZePGjRw8eJC8vDxOnTqFnp4edXV1VFRUsHHjRvz9/Xnu\nuefIzMxk7ty5YupRVFTEjBkzOHHiBGvWrOH27dssXbqUxMREUUUVFhaSn5/P3r17xWg+LCyM7Oxs\nxo0bh42NDfPnz0dHR4fq6mouX77MgwcP6OrqQkdHBwsLCyoqKvDw8MDW1pbg4GB6enqwtrbm6tWr\n/zOQFYlE4iyRSH6TSCQlEomkWCKRvPL76/slEknj/y2JWvueNyUSSaVEIimTSCT/Ld1BR0eHwcFB\nCgoKaGtr4+LFi7S0tKBQKJBKpRQWFpKQkIBcLsfd3Z0ffvhBBGBoqTMDAwM8ePCAqVOnMjr6OORa\nJpMxNDQkQCaenp6Ympry7LPPkpSURF5eHtu2bSM5OVmErLz33nv4+fmxbt06nJycOH78uEii1kZv\nac97bW1tSKVS7t27J5x0Wvv1wMAAnp6e3Lhxg8bGRgIDAwkJCSEoKIjOzk4mTJjAvXv36OnpEQIb\nuVxOZ2cnurq6tLe309bWJsaS8fHx1NfXU1dXR0pKChqNhqamJhQKBcHBwSK8VBtq4+Pjg7u7u+hb\nVFZWish0bT+ht7eXCRMmCECHo6MjU6dOpa6uDoVCwaVLlwgODiYwMBB7e3taWlp4/vnniYmJISQk\nhAULFhAaGkp2djbe3t6o1Wp6enpYuXKlmGZog3z19fXx8fFhYGBAhKj29/cjlUppa2sT30FjY6PI\nrygtLWXGjBn/NNZ0cHAgOzubmpoaYeaytrbm/v372NraCvHRxIkTycvLw8zMTNwLK1asoLm5mWnT\nphETE8PLL7+Mra0tRUVFWFlZ8eyzz6Kjo8Nbb71FWVkZp0+f5qOPPsLZ2VkQnCZMmEBRUZFIiDpw\n4IAw3E2cOFHYnfX19YVt/+eff+b+/fvcvn2bgoICgoODxXfT29vLjh070NXVxcPDQ7A0VCoVW7Zs\nQaFQiHGpsbExH374IT09PdTX1zMwMEB2djb29vYEBAQIm7x2LfzjA0PbZ3uS60kajSPAqxqNJgCI\nAl6SSCQBv//s0D8mUQP8/rN1QCAwH/i7RCLR/VcfMDY2xquvvopcLufs2bOsWbOGkZER/Pz86O7u\nFrxDXV1dcnJy6OjoIC0tjbq6OjF2Gh0dFVBXreLPxcWF1157jebmZnR1dfnss8+wsLAgOTmZv//9\n77z66qt4eHjw8OFDpkyZgoeHB6+88grPP/+8wG5pE56///57ATLRJjkZGxtz7949amtriYqKwsjI\nCHt7e86fP8+0adMABAvC0NBQZFk8fPiQQ4cOMW/ePCIjI4UzDh6zDtLS0igqKsLHxweVSkVWVhZX\nrlwhPz8fDw8P1qxZg4GBAdXV1WRmZmJjY8Ozzz5Lf38/6enpVFRUkJGRwbhx41AoFIyMjODv7y9Q\na11dXYLq4+/vz9atWwVi3c7OjuzsbKRSKYGBgQIb7+XlhYuLC/n5+XR1dWFra8vly5fJyclh0qRJ\nXL16lfr6el566SXMzc1xcnJi06ZNjB8/nszMTBoaGgRPc9asWWzcuJG//OUvlJeX/5MKVYtR19q6\nU1NTKSwsxNjYWEQGPnr0CH9/f8bGxvjkk0/EJEiLItMmPml7IgDz588XdvXS0lLmzZtHUlISTU1N\nfPrppyxevJg33niDoaEhPv/8c/T09DA1NWXv3r0YGhpiaGjI3LlzmTRpkjhaAmzdupWmpiacnZ35\n4YcfUKlUREREoNFoxMbi7OwsrP1aYE5/fz9r1qzh1q1bHDlyBEdHRxQKBZs3bwbgq6++4syZMwQE\nBLBz506ioqLQ09Pj6tWr/PDDD9y8eZOmpiaOHDkigDjd3d0kJCQglUpxd3cXSlBvb+//2YBZjUaj\n1Gg0ub//WQWUAo7/4i3LgJ81Gs2QRqOpASqBiH/1GVprqVqtJjQ0lLGxMfbt24ezs7NoZFVVVZGZ\nmYm1tTX29vYC2qqrq8upU6cIDg4mNTUVIyMjFAoF8H+6xomJiRQXF7Np0yaOHj0qjgu//fYbZWVl\nqFQqjh07JjYXjUbDhQsXcHFx4fTp03h4ePD888/T3t4OPO5/6Ovr09HRQWRkJJ9//jmmpqaMjIzQ\n2dnJ/fv30Wg02Nvb09zcTHV1NcXFxVRUVGBpaYmpqSnBwcEUFRWJVCnt96ANvlmwYAEjIyMsXLiQ\nixcvcuXKFdLT08XEYfHixfj4+ODo6EhmZiaZmZl88cUXxMTEEB4ejqenJx0dHQwNDQk8u7YJJZFI\naG5uprGxkQ8++EDoE5RKJRUVFcyaNUtY2IeGhvj444+RSCQcOXIEc3Nzbty4QUVFBY6OjvT09Agu\noUajob6+XoBwtQRpV1dXRkdHhbjo0aNHdHR0iEqppqZGjMyqq6uxtbUV2Qva11NTU0lISKC3txc3\nNzc2b97MvHnzCAwMZN26dYyMjFBXV0dfXx8BAQFinPjLL78AUFBQgKWlJQ0NDZibm1NUVCSmQE5O\nTly/fp3du3ejr6+PRqNh1apV7N69GysrK8zMzCgtLSUlJQV3d3exsQG88847wONR9NGjR4VqsaCg\nQKhZtURtfX19nn32WREr//333yOTyeju7ub9999n2rRppKSkALBt2zb09fVJTk5GX1+foKAgNmzY\ngJ6eHrGxsVRXV1NdXU1+fj6Ojo6kpqayY8cOsUFo+1EuLi5kZmYK5e+TXP+vRpISicQNCAEyf39p\nh0QiKZBIJN9LJBLL319zBBr+4W0K/vUmgr6+Pi+++CIzZ85kz549DAwM8MEHH9Df38+WLVuIj48n\nISGB0tJSfvzxR2bNmsX8+fMZHBzkxo0bYh6tFW9oU3u0/YOXXnqJ1atX88svv7Bv3z6uXr1KUVER\nycnJnDx5kunTp4upRltbG1evXqW2thY3Nzd27drFgwcPCA8PZ9WqVY+/tN/p0zo6OnR2dvLKK69Q\nXl6OpaUl6enp2NjYEBkZiVwu55lnnqG1tZW+vj50dHTo7u4mJCREeAIaGxtFk628vJzU1FTBJdDR\n0UGhUHDw4EERa5aVlcWZM2fYs2cP1tbW6Ovrc/78eQwNDdmzZw++vr7cuHGD3Nxc+vr6aGtr4969\ne5iZmYnPy8zMxMDAgObmZhYvXiwqrlWrVok8TAsLC/r6+rCxsWHChAlCjDU6OoqJiQkLFy5EpVJh\nZWUlGqDahaE9AnR2dop0Im3vRyKR0NHRwZdffklZWRmLFy9m7ty5PP300wDMnDkTd3d3cnJyaG1t\nFaYorSAoJydHoOPfffddPDw8iIqKor29XWQdtLa2cvPmTWxtbdm+fTvw2ABWUVFBf38/3d3dgqto\namrK7NmzMTExwcDAAKVSiYGBARqNBj09Pezs7LCzsyMjI4Nnn32WsrIyTExMSE9PB2Djxo3ExsaS\nnJzMwYMHsbW15YMPPiAxMZGvvvoKfX19AQGeMWMGUqlU2NQdHBzIy8sjOjqaL7/8ksTERKGr2Ldv\nn0h50loATE1NCQsL489//jPe3t5oNBqys7MxNzfnwYMHbNu2TZDJ2trayM3NFTLpgIAAnvR64k1B\nIpGYABeAXRqNphf4CvAAggEl8Jcn/tTH/97zEokkWyKRZGujrS5fvkxNTQ137txBJpPh7u6Oqakp\nTz31FPPnz6egoIALFy4I9V5ERARGRkY89dRTwjyVkJDAp59+CjxWeGl7B1piz/r16wV04tGjR7z5\n5pvY2tpibW3NvXv3hLhk/PjxeHl5CeyadqwGj3UVNjY2IgRVG2bS29vLoUOHuHDhAtbW1tTV1ZGc\nnCxm59pZd3BwMLt27UJHRwdPT0/RGU5PTyc8PJywsDDy8/MZHh7G29ubxMREsVi0/ZfVq1cTHR1N\neHg4e/fuxdHREUdHRw4dOiQSlVUqFSqVirVr1+Lq6srLL7/MgwcPaGhoYO/evZw9e1ZQiYOCgnB0\ndBR6eYD4+Hi6urowMzOjurqaW7duUVxcTEFBAQ0NDUyfPl2Yf1xcXJgwYQJ//etfsbOzw9jYGEND\nQ7y8vOjr6xPOzl27dgmg6djYmOjGnzt3DoCSkhK+++470Rf57rvvCA4OZsmSJaJXExAQwIULF8T0\n4OLFi9TX19PZ2YlcLmf//v0EBQUxOjrKpUuXgMfIv1WrVnHx4kWee+45EhMTkclkVFdXc+PGDbZv\n386BAwdQKpXo6elx4sQJ7ty5I1Bq2hCalpYWrl27RlVVFfCYA9LQ0MCCBQtYtmyZaIZr2R5acdfI\nyAiHDh0iOzubzz//nLS0NNFH2b9/P52dnSLPFB5zQO3t7TEzM6OqqorJkydTUlIiGsYqlYpNmzZh\na2vLhQsXkMvlrFmzRqhbs7OzRR9u+/btFBcXP/HafKJNQSKRjOPxhvCTRqO5CKDRaFo0Gs2oRqMZ\nA47xf44IjYDzP7zd6ffX/unSaDTfaDSaSRqNZpI2GToqKoqOjg62b9/OlClTKC0tJTc3l2vXrjE2\nNkZAQAC7d+8mMTFRwDhmzpwpblZbW1vWrl0rFILnz59n1qxZlJaWinmvt7c3cXFxvPPOO8TGxnL/\n/n08PT0xNjYmNjaWbdu24e/vT2BgIM7Oznh5eWFqakpBQYFIKpZIJERHR7Ns2TLc3d0FadrV1ZWO\njg5aW1sFMnxkZISamhoiIyNxdnbG29ub4eFhMjMzGRwcFH4PgJiYGPr7+8VYLyAgADMzM2QyGebm\n5uzbt4/U1FRKSkoYHR1FR0eH4eFh3N3dBXBm06ZNHD9+nL6+Puzt7Zk9ezZ3797lyJEjeHl5MXfu\nXBoaGli9ejXW1tZCvqx9uqvVahFkoqOjQ0BAgECyf/vttzg7O4sFr9FoWLp0KaGhochkMqytrWls\nbCQpKYn4+HihDJw4cSIGBgbcvXuXiooKKioqBLTE1dWV5uZm7t+/DzzeGL28vESwyYEDB/Dy8hLT\nkZiYGHp7e2loaMDLy4v6+npkMhmTJ09maGiIs2fP8sILL2BoaChGkwBz586lsLCQNWvWiI1c62UZ\nGhrip59+wtzcHIlEQnp6OtnZ2Xz11VdcvXqVjRs3olKpCA8PJygoiPnz5wt1qxZdf/r0aYqLi0lN\nTRUq3MHBQTH+LC0tRSqVkpaWxvnz58nKyuL48ePinn348CFHjx4VoS3ah6A2MCgzM5OhoSGOHTuG\ngYEBNTU1XL58mbNnz+Lk5MSf/vQnLl++jIuLCz4+PsI5rFKpuHPnjrjHnuR6kumDBPgOKNVoNJ//\nw+sO//DXVgBFv//5KrBOIpEYSCQSd8AbuP+vPkOj0eDu7s7s2bPR19fn+vXrVFVV4e3tTXFxMUND\nQ7S3txMVFYWvry+zZ89mzpw5Apzq5+dHWVkZUqmUuLg48aRTKBS4u7uzdu1avL29MTQ0JCgoiJkz\nZ/Liiy9y48YNHj58SHBwsIhxGzduHFVVVWLB1NfXCwFIdHQ0APb29hw7doyuri6B83Jzc6OsrIxH\njx4JI8yxY8e4fv067u7utLS0YG5uTnV1NU5OTpSVlbFs2TLmzJkjpiWDg4P09fXR29srZt01NTXc\nvn2bv/71r2RkZBAYGEh0dDSNjY0MDg4KilNFRQVqtZri4mJiYmJYtmwZRkZG5ObmsmjRIlQqFZGR\nkQwODrJs2TIePnwokpJyc3PJy8vj66+/xtHRUUSyaxV92vzLgwcPolKpWLZsGVVVVahUKi5fviwQ\n9G+99RaFhYVUVFQQExODTCbDw8ODBw8eiEV+8eJFYmNj2bx5Mz09Pejr6wsEG8DOnTuFtNzLy4v3\n3nuP5uZmZs2ahY6ODjo6OvT39wt/h42NDevXrxdEaqVSyblz57h8+TIKhUIEEQNkZWWRnZ0tJhmp\nqamEhIRgZWXF4OCgwPh1d3czY8YMJk2axAsvvCDyP7QS+56eHtG402g0bN68GVtbW1xdXYUlfe/e\nvcyfP5+uri6WLFnCuHHj6O7uFnoWLaHcwsKCadOmUVlZyVtvvUVQUBDwONA3OjqasbExvv76a4KC\ngvD29mbBggWEh4ezfft2nJ2dmTdvHp6enigUCnbs2MGFCxdITExk8uTJwgczODhIaGjof7fUxfUk\n4qUYYBNQKJFIHvz+2j5gvUQiCQY0QC2w/fcvqVgikZwFSng8uXhJo9GM/qsP0NfXp7m5mV9//RV3\nd3ch5pHJZCxfvpzi4mICAgJEcyg1NRUrKyvefvtt3n77bXR1dRkbG+O3337DzMyMxYsXk5CQQHt7\nO01NTahUKpRKJdOnT0ej0ZCQkCAabmq1mqKiIo4cOUJfXx+XLl1CrVYTHh5OZWUl4eHhQtGnxWSX\nlJSwYsUK1Go1V69e5e233xZPODc3Nzo7O4Xdubq6mqNHjzJx4kSSkpJYsmQJeXl5GBsbc/PmTSZP\nnszBgweBx0+d0tJSvvjiC+zs7Ni6dSuGhoYsXLhQoNIuXrwoACxlZWVMnDgRpVLJggULePDggZhk\nmJqa0tvbS3d3N/n5+cyZM0fE03l5eaHRaOju7qakpISpU6cSEBBAfHw8cXFxHDhwgNzcXBH1bmho\niKmpKUZGRnh5edHR0UF8fDwLFiwQ8uMFCxaIPoS28ffJJ5+IcWpxcTFr166lvb2d7u5uPv30U6ys\nrKisrMTV1ZWVK1fyzTffUFBQQFJSEtu2bUMmk1FWVoalpSWjo6MMDAzg4OBAXFyckEXPmzePwcFB\nqqur8fPzE2Io7XRAi9fv7u6mr6+P0NBQVq1axb59+/D29sbV1RVdXV2USqUghIeGhoon9KeffkpT\nUxNLly5FLpcTFclV4FsAACAASURBVBXF+fPniY6O5vvvv8fW1paqqio+/fRTbt++TWFhoQD1aLF5\nRUVFVFZW8vXXX5Obm0t0dDQDAwMYGhpSXl7OmTNnCAwMZGRkhLS0NAAMDAxITU1l3rx5SKVS5HI5\nOjo6SKVSPvroI4yMjCgsLEShUDA0NISfn5+A2SiVSrq6unBwcOC3336jqamJV1999QmW+uPrv90U\nNBpNKvD/5Ka48S/e8yHw4ZP+EiqVirKyMpydnVm6dKmw7N6+fRtPT0/GxsZQq9XExcXh4eFBQEAA\n2dnZvP3222g0GuRyOY2NjTQ3NxMWFiYW8cyZMwXdRxvTPjY2Rn9/PzY2NiIYpqioCAcHBwwMDJg1\naxZlZWXU19dz8uRJsctqU5IB1q1bx9WrV9HR0SEiIoLDhw/j5uZGbm4u586dY/r06YyNjTFlyhSK\ni4t56qmn6OnpISIiQjjq9PX1hfBm9uzZ3Lt3T6RVmZmZiWCcwsJChoeHRe/ByckJjUZDfHw8L7zw\nAs3NzYSHh/Prr79ibW3Nr7/+iq6uLkZGRujr69Pf38/48ePp7e1l/vz53Lhxgxs3bvDOO+8I+/Sd\nO3cIDw/Hw8OD+/fvC1iHnp4e/f39giLU3t5OSUkJgYGBbNq0CbVajVKpFGDaZ599Vkiuz507h5WV\nFcXFxcLVOTQ0RGtrqyA0NzQ08NRTT9HW1iae6FphmoWFBd3d3SiVShQKBRMmTBA5kevXr8fY2Jix\nsTEMDAxoamri0qVL9PX1MWvWLPLy8oRBacWKFXzzzTc4OjrS1taGqakpzzzzDMeOHeP999+nr6+P\n9PR0zM3NOXToELNnz+bzzz9nxYoVQpLc0tJCcXGxEFhpWQYAIyMjmJmZ8fPPP1NQUMDMmTOpqanB\nysqKWbNmieyS5557jra2NqysrEhISGDRokVi0lBSUkJ4eDjt7e2iVwHw8ssv8/333xMUFERSUhLt\n7e3o6ekxd+5c6uvrUavVeHh4CIVsQ0MD7e3tNDc3s3LlSq5fv46fnx8rV64kMTHxSZfjv4ei8euv\nv94/ceJE2tra8PX1FTemkZERBQUF2NrakpKSgp+fH3//+99xcnLCycmJ0NBQ/Pz8+PDDD9m8eTM+\nPj6YmJhQXV1NdnY2O3bsoKWlhc2bN1NfX09gYCAmJibo6enx1FNPERQUxK1bt3jmmWfo7e3FyMhI\neNDVajU2Nja0tLRQWlrKw4cPmTx5MsePHxfpP2ZmZoyMjBAVFYWtrS0mJiZMnTpVYOXb29uRSCRi\nwfn6+gois5YrYGxsLJpF0dHR6OvrU1xcTGxsLG5ubsISrC21Fy5cSGFhocCuaxOara2tsbGxEQE1\nCoWC/Px8Ebcnk8k4efIkK1euREdHh9mzZ6NQKGhsbMTf3x9TU1NsbGyQyWSYmZkJBWN3dzcqlYry\n8nIGBwdxd3enrKyM69evi3TlcePG4eDgIEJe+vv7uXfvHmFhYSI3U8sX0D5BzczMCAoK4vr16+jq\n6iKTybh69SrTpk3D1dUVpVJJcXEx3d3dIslJ27j8+uuvhdo0Pz9fhNq2trbS2tqKvr4+7u7u9Pb2\nYmpqyvnz5/Hw8OCll14iLS0NOzs7WlpaqKioICkpSSDulyxZQnl5Of39/Zibm9Pe3o6FhQXLli2j\nvLycwsJCtm7dSm5uLubm5pw5c4Z33nmHY8eOsW3bNjo7O4mMjMTHxwdDQ0PR5GxsbBTReRkZGahU\nKi5evMicOXNEnMHs2bOprKzEyMiItLQ0jI2NOX36NBMmTBD5l52dnXR3dyORSLh27Rr19fVYWVmh\nr6+Pg4MDGRkZNDU1YWlpKRK+jI2Nhcnr5MmT/zk8hYMHD+7Xlpf5+fnExsYCj5WDnp6eJCYmEhYW\nRmFhobDNamnLfn5+5OTk4OnpSWZmJg8fPmTp0qX8+OOP2NnZoaOjg1wux8nJiY8++ojY2Fjh858+\nfTrOzs74+vrS19dHcXGxICJHRUXx+eefExYWho2NjWAYasvq9evX4+3tze3btwUlKjQ0VOQoJCUl\n4eHhwYoVK8jNzcXExISSkhIWLlyIRqOhrq4Of39/jh07Rnd3N4WFhezcuZO7d++KXAmtH8HMzAyF\nQkFXVxdnz57F19cXLy8vHj58iLGxMcnJyQLC8Y/htPPmzUOhUFBZWYlKpWLSpEkoFApUKhX379+n\nr6+PR48ekZqaioGBAVZWVuzbt4/p06cTFxcn0o1ramoYGRlhzZo11NXVMTQ0hJ6eHm1tbcyZM0dU\nM9pmrbYpWFBQQHZ2tiBaT5s2jcHBQQIDA+np6SE7O1swLaysrLhx4wbz589n/Pjx2Nvbc/LkSSEa\nGh0dxcbGBjc3N8LCwmhpaRFP7Z6eHkpLS0W/o76+Hl1dXUZHR2lubiYlJUXQpzUaDQMDAxgZGREc\nHIy3t7cgZB89ehQ7OztRHWiDXZKTkwXtWa1Ws3z5cnR0dDh27Bi+vr5ERkZy5coVBgcHefjwITt3\n7uSNN94QGRZaIvfdu3eJiIigoqKCadOmIZVK6ezspLa2Fnd3d5HE9eOPP3L8+HGhEO3r62NsbEyY\nvTo6Ohg3bhzp6ekYGBhQX1/P0NAQ06dPF2g+rbhO27e4ceMGeXl5T7Qp/C/N+X+v/73+/3P959Cc\nPTw8eP/999HR0RGyY0tLS8rLy4mIiBBkIoVCwbZt20hKSkKpVLJhwwYhMb169aoos21sbNi5cyev\nvvqqSP4dN24cAwMDbN68maNHj2JlZUVNTY3IIrS1taWzsxMrKyuys7Pp7e3F0dGR0NBQ7t+/j4eH\nB6mpqbzzzjv86U9/wszMTMAzxo8fj7Ozs8Cb2djYoFQqGRwcpLa2lkWLFnHlyhXkcjlDQ0MMDg6y\ncOFCOjs7SUtLIyoqiu3bt3Pw4EF0dHRobm5GX1+fxsZGNm3aRHl5Ob/99hs+Pj64uLiIjr1EIsHW\n1pbvv/9ejDAtLS3p6ekR2RY9PT0MDg5iZWXFN998I1B3xsbGPHjwAEtLSwwNDbGyssLT0xNXV1da\nW1u5dOkS+vr6Qs+hVCoxMjKivr7+n/IoTExMhJtRC0fRZj9OnDiRqqoqLC0tKS0tZdy4cSKwVk9P\nj6ysLGHgMTMz48svv+Sbb74RnMnExETCw8N58OABixYt4tSpU/T09BAdHU1hYSFRUVFcuHCBV155\nhfr6enFENDU1RS6XU15eTnl5OQcOHOCzzz4TNnl/f3+GhoYwNTXl6NGjvPDCCyiVSpqamvDz80Op\nVDJz5kyhZtX6OywsLFCpVAwMDGBjY8Of//xnvvjiC6ytrTEwMPgnU9OVK1fYtm0bJ06cYMeOHfT3\n9+Pv78+ePXv4+OOPef311wV4tquri/Hjx2NgYICDgwOxsbE899xz/Prrr4SEhLB27Vru3LmDnZ0d\nZWVlBAYG0tfXh0ajYdKkSTQ0NNDa2kp3dzf29vbk5ubyxRdfcPbsWbq6uqipqWHHjh0CavvfXf8W\nkBWtuEcul3P48GF8fHwEsUar43/06BHPPfcccXFx+Pr6EhUVRXFxMQYGBly9ehWZTEZERAT9/f1C\nZKRUKnFycsLX15fFixcTGBjIhQsXCAgIoL29nQULFlBdXS1GixKJhNraWlavXs3OnTtFbLqFhQW/\n/PKLGEkqFAoSExNRKBRs2bKFnJwczMzMqK2tJSMjg7t37xIcHCyoz9u2baOrqwulUin0AYWFhXh6\nerJjxw5OnToFPBZFaWnQ2vd/++23yGQynJ2d+eyzz0RXenh4mJKSEkpKSpDL5SxcuBBXV1cMDAwo\nKSnB0dERHR0d+vr6SEpKAuCTTz4RaHCpVMrIyAiTJk0iKioKNzc3WlpahBFr6dKl9PX1UVlZSWRk\npHAsaiPo7e3tMTc3F8E3WvGTFmyijUXTsgu0KVi9vb2cOnWKwsJCurq6hNrP09MTgLNnz5KcnIy9\nvT0lJSXk5eXx/vvvo6enx5IlSwgKCsLQ0JCOjg7Onz/PH/7wB6ytrVmyZAkmJibCu3HgwAGkUqkg\nMS9dupSioiLhmr1+/ToZGRm8/vrrjI6Osn79embMmEFjYyPJyck4OTmxdOlSYmNjRTJYbGwsfX19\nrF+/XjQatZoAraDL0tIST09PNmzYgJ2dHStWrBDU5s7OTv7rv/4LpVLJkSNHiImJwdPTk+LiYtra\n2mhpaRFmMSMjI+bPny8iCJctWyZgPampqSxatIj+/n56e3vR09PD29ub0NBQHj16hFwu59SpU9TU\n1GBsbExoaCi7du164vX4b9NTGD9+PDY2NgwMDODj40NOTg4LFy6kr68PX19f1Go1d+7cwc/Pj+Hh\nYSwtLbl9+zaGhoZs27ZN4N3Hjx+Pi4sLZ86cYerUqURHR2Nqakp7eztlZWViQiGVSuno6KCrq4v+\n/n5MTU3R0dHBxMQEiURCTU0Nvr6+ouE5NDREUVGR4APY2NjQ2tqKTCYTLrbU1FT27NlDUFAQ1dXV\nVFVVcfv2bZE/oeUGaLX+iYmJfPHFF2zfvp3r168LWk5+fr44I2q5BTNmzGDq1KmCA9Hb28vo6CiG\nhoasWLGCzMxMobnv6+tDJpMRGhpKSkoK1tbWpKenExoaire3t8DRT58+HXt7e9zd3VEoFNTU1DBx\n4kSRhxkZGUlXVxdXrlwhPDycs2fP8uGHH4o8w4CAAMrLyzEyMkImk1FYWChu6uHhYcrLy3F2dkah\nUGBpacmWLVtEM1O7qKqqqujv7yc2NpZvv/2W5cuXs3jxYioqKpg0aZLwR/T29lJRUUFJSQmmpqZE\nRUXR1tYmMjQDAwPp7++nq6sLlUolJiGtra2cOnWK8PBwpk2bhqmpKUVFRcTGxmJpaYmVlRVKpZLy\n8nL09fWxsbFh7969qNVq5HI558+fp66ujilTptDc3CwgNrm5uWRnZ+Pm5oahoSHW1taUl5djb2+P\nvb09bW1tokG4atUqsrKyCAgIoLW1lYqKCm7evMnw8DA1NTVIpVJcXV2xtbUV95GLiwsSiYSHDx+y\nYsUKbt26JbQOenp66OnpERoaSn19PVKplPT0dCorK7Gzs0MikYgx5Q8//MCsWbOwtbXl7t27/zPW\n6f8vLnNzcxISEtDX1xesRplMRnZ2NqtWrcLW1pZ169bR2toqTEg9PT2EhITw/vvvU1hYiLW1NQ8f\nPuTGjRvCJvr000+TkZEhnli6urqsW7eO7777juHhYaqqqggJCcHc3JzTp09TUVEhYCRa0Ic2ms3K\nyoply5YBCNahk5MTDQ0NTJ48mY6ODl599VXa2tqIi4ujr6+PyZMns3fvXhoaGkTyc15eHlOnTmXc\nuHHo6ury/PPPc+fOHQDhaDQxMcHNzY2MjAwWL15MW1sblZWVtLS0oFQquXTpkiAoT58+nY6ODhFx\n9tJLL/Hcc89hYmKCUqkUjsU//OEP1NXVUVhYyLx58xgdHUWtVtPR0UFpaSnXrl0TYrHAwECMjY35\n61//yrfffsudO3eoqanh8OHDnD59mpaWFiZOnMjg4KCYMGg9HG+88Qbe3t6Eh4cTExMjRmgymYx7\n9+5x69YtYSizs7MjKiqK+vp6Ll++DCA4m5aWlgJu+8svv4iMjMWLF1NaWsrNmzexsbEhKysLpVJJ\nbW0tS5cuZWBggNmzZ3Pu3Dni4+OFNsPV1ZVjx46hVCqprKwUINy0tDT09fWRSqXk5ORQXV0tgn6G\nhobYtm0bixcvJjs7G7VazY4dO6itrcXHxwd4XD2YmJjg5+dHVFSUgOZo8yi3bdvGvXv3mD59uphC\nJCUloVar6erqIj4+nhkzZpCVlYW3tzdNTU0APP/883h6ehIZGSlGxmq1WjR+FQoFP/30E7q6uri4\nuBATE4O1tTWWlpZERERQXV1NTk6O4FpoTYJPcv1bbApjY2PMm/cYu3Dx4kV++uknVq9ejampKUql\nkoKCAnbv3s2ECRNwdHRk165dNDY2YmFhwfHjx/nb3/7GypUrqa6uxtPTUwRwHjt2DH9/f3Jycqip\nqSE0NJTKyko2b96MUqlk7dq1DA8PY2try9y5c6mqquLy5cvExMTg6OhIe3s7crmc3t5ekTsAj+3N\nAwMDrFixAhMTE+Lj4/Hx8eHnn38mPz9fyGC1OLSIiAgiIiJYu3Ytb7zxhhjdaScI9vb2wGNzTUdH\nhwgFUSqVFBUVoVKpaG9vJycnBx8fH9zc3AgODiY4OFjMuisrK+nt7aW5uZmsrCx6e3tpb28nLCwM\nhUIhtAAlJSV8//332Nvbo9Fo6OrqIjU1FScnJzHdyM7OJi4uDkdHRzZt2sT8+fMpKSmhrq6OrVu3\nMjw8LHIe2traGD9+PK2trTQ1NfHll18KL4c276KmpoazZ89y8uRJxo8fz4QJE2hvb8fb25ubN2+y\ndetWUaLL5XIcHR0JCwsTZ/dDhw6JKkGLO1u9ejWvvPIKX331FZs2bcLOzo6srCxeeuklITxydHQU\nEN9z584RExODg4MDEomE4eFhjh07hp2dHQMDAyJgZmxsDIVCwdmzZyksLESlUnH8+HGqqqowNTXl\nwIED2NnZ8euvvwKP06BsbGwoKCigs7MTCwsLsrOz+frrr5HJZMLxWVNTI5y+Wnu0TCZj2bJl1NXV\n8fTTT3Py5EkRRZ+cnEx5eTkrV66koaGBixcv8ujRIzH2HhwcpL29nRMnTtDb24uPj49wdL777rt4\nenpSUVEhksK13++TXP8Wx4e///3v+1988UXGxsYIDw8XZXpPTw/Xr19HIpHQ3d0tsFUREREEBwfj\n7OzM2bNnKSgoYPny5UIroK+vz7lz5zh16pTgAQwPDxMZGSlYAQ8ePEBHR4eSkhJkMhkNDQ3Y2tpi\nZmaGSqXC0tJSIN+HhoZwd3enuLiY+Ph4NmzYIEZUVVVVDAwMiN7B3LlzcXZ2FpqJgYEBwVbURqDX\n1dWh0WgwNDREX18fc3NzUlJShH5fu+Csra1xc3MT6HYPDw/a2toEAzAzMxNfX1+kUinjxo0jJSUF\nQ0NDIbkGqKioIDo6mo6ODubOnUteXp5wbsbHx1NWVsbw8DDfffcdEomEKVOmMDIywtGjR0WTV0tN\n1hKYtOW2VCrFy8uL119/HYlE8k/pxlqCtJmZGWNjYyiVSubPn8/AwAArV65EpVLx0UcfMTw8LKhW\n58+fx8zMjNWrV9PR0cG8efNISEigq6tLEKkUCgXJyclMnjwZfX19qqursbe3p6ioCLlcTkJCAmlp\naXh4eGBjY8PY2BjffPMNs2fPRq1WC+DMggULcHFxESKo3bt3iwZvdXU1FRUVrFu3jg8++AA3Nzcc\nHBywsbFBV1cXKysrHB0duX37NqtWreLWrVvCk3Ht2jWeffZZxo8fT2BgIC0tLejp6ZGTk8Po6Ki4\nL1asWCEIYdogYk9PT4EanDVrFrt27SIlJQW5XM6OHTu4desWvr6+dHR0EB0dTX19vYiL06pjrays\nmDJlCubm5nh4eGBubs64ceOQSqUkJCT85xwfhoaGRNdeC1PdsWMHcrmcRYsW4e7uTkREBKtXryYg\nIEBEsx8/fhyFQoGNjQ1WVlaYmpqSmZkpiLhaufTo6CjV1dU0NTUREhKCkZERUqkUT09P4Yv38/Oj\nt7eXSZMm4e/vj0ajQa1WU1hYyJIlS0hMTBTNMD09PRoaGsR5s6enB6lUSkNDA+PGjaO3t1ewG+vr\n6yktLUVPTw97e3tefvllcnNzsbGxoaurS5TRAIGBgYIMnZ6ejkQi4datW+jq6nL48GEsLS05deoU\nAQEBuLq6IpPJ0Gg0nD59mpGRESIiImhvb2dwcBCVSiUW6v379ykoKODQoUMEBATw22+/0dHRQVBQ\nEDKZjLCwMPbs2UNISIjQF0yaNEk0Be3t7QU4RK1WU1dXh5ubG2ZmZly8eJEtW7YwOjrKo0eP8PX1\nZfr06fj4+NDR0UFSUhJdXV2sXr0aqVTKmjVrxJl64sSJSCQSioqKRHN4//79JCQkAI9j8dauXcvg\n4CCZmZlcuHABPz8/BgcHaWpqEjqGixcv4uHhQXFxMe3t7cIE19PTw0cffQSAs7MzkyZNQqlUigCb\nU6dO0d/fz+TJk/nqq694+PAh6enptLW10dTUxMjICBs2bCA6Oprg4GDGjx/PmjVr6OzsFMeHsbEx\nFi5cSHh4OAUFBeKYW1NTw4kTJ4R9vLS0lOPHj/Po0SNiY2PR19enra2Nbdu24eTkRFtbGwYGBqLX\nsn79es6fP4+joyNRUVEcPnwYHR0durq6iIiIwMzMjDlz5oj0rJycHCwsLERsQWlpKZ988gmnTp1C\npVKJlLAnuf4tKoVDhw7tX7x4Mffv3xf5kMbGxmg0Gs6dO8eUKVOEAy4iIoKqqipiY2O5du0a06dP\nF51YHx8ftmzZQn19PT/++CNRUVE4Ojpy7do15HI52dnZjI2Ncf36dZHVpwVr9vb2CrEUPN6oLCws\nCA4O5rPPPmP27NmkpqZy7949fHx8KC8vx8LCgoiICCIjI2ltbcXAwIC8vDxhnLl06RIGBgYi+l2r\nsFu/fj1NTU2CBZiZmUlVVRUlJSUYGRlRV1fHokWLBGBE2yTr6elBrVbz0ksvYWZmhlwup7W1VWDX\n8vLyhLcgKCgIjUZDc3MzpqamdHd3ExAQQE1NDZ2dnTg5OdHa2kppaSl9fX0YGxtjbm5OaGgo1dXV\n3Lt3D7VazVNPPSXOyx0dHeTl5bFs2TIMDQ0pKipi48aNgsugxahPmTKF2tpalEoleXl5VFZWUltb\nS1ZWFnfu3CEgIIA7d+4wMjJCQECAKOfv3LnD008/TU9PD8PDw7z77rvY2dkJkZEWgW5ubk5cXJyQ\ncS9evBiFQsHJkyfR09OjvLycF154gePHj/PHP/6Ro0eP8sc//pFff/0VKysrCgoKaG9vZ//+/Vha\nWpKdnc1PP/2Eu7s7JiYm9PX14ejoyJtvvomFhQXjx4+nvb2d4eFhIZSqqqoiKyuLFStWoKenR0lJ\nCUNDQ+K+GBwcFBuora0tP//8MzNmzGDDhg0CbR8dHc3Q0BAfffQRcrmctLQ0ysrKKC0tpaamBg8P\nD/Lz89FoNJiYmODi4kJSUhKvvfYa2dnZjIyMsH79embNmsV7770nqEvnzp0T0yFtJfr7/81/TqVg\nYmJCUlISDx48ICsri9LSUj788EOGhoZ49OgRCoUCHR0dFi5cyCeffIKnpyclJSWCSVhQUIBGo+HE\niRPcvHmT+vp64PHCtrKywsTEBH9/fxQKBT09PSKkNDExkUePHnH69Gnu3r2LUqmktbWV3t5eYUvW\njheVSqVoNDo4ONDZ2Ym3tzf19fXk5ORQVlbG4OAgCoUCFxcXJk6ciImJCeHh4Tg4ODB79mwmTZrE\njh07BFNPLpfT3d0tFJy7d+/G2dlZqNsePHiAQqFg3bp1VFVVER4ezpo1aygpKSEhIYEpU6YIU1R6\nejomJiZER0ezfPlyysvLhRTYz8+P5cuXU1lZSVhYGFu3bsXY2Jg5c+bw+uuvY2hoiFQqxdHRUUiX\n9+zZg1KpxNTUFDs7O5qbm0lMTBSOSLlcLvDsWqmttrx/+PAhfn5+NDQ0MG3aNEJDQ3F2diY6Opqw\nsDDUajXr169n7ty5+Pv7ExMTg47O41vxxIkTlJWV4eHhwaZNm8Qiv3TpEh4eHjQ3N4u4Py8vLzw9\nPWlpaaGmpga5XM7Vq1eZPHkyCQkJPP/889TU1ACPXYd9fX1UV1djbGyMq6srNTU1jB8/nvLyclpa\nWkhLSyMwMJCCggKUSiV+fn54eXlRUVFBYWEhTk5OtLS0YGJiIqAlra2tVFdXc/fuXYyNjYVno7Gx\nkZ9//hmZTIaNjQ1vv/02L7/8Mo2NjVRWVtLZ2UljYyNnzpwR4Fm5XE5wcDDwWBotkUiYOXMmLS0t\naDQabt68SVBQEFeuXAEQAT81NTW0trZSW1vLuXPnaG1t5c6dO1RUVNDT00Nra6t42D3J9W9RKXz8\n8cf7Dxw4IIQWBQUFqFQqamtraW5u5sUXXxRnT7VaTXt7O+np6XR3d1NdXU1wcDD3799n7ty5eHh4\n0N7ezoULF9i3b58oAwMDA5k6dSppaWkkJiaydOlSAfkcGBhgYGBAjJI6OzuJi4ujtLRUBNGYmZkh\nkUj46aefuHXrlijTtA2dlStX4uzsjKmpqaAJOzg4UFVVxbJlyzAzM6O5uZmioiLWrFkj3Hxarfrt\n27cZHR1lzpw5xMXFkZ+fT2RkJHp6ejx69Ihx48aRkZEhNBla7Fd+fj76+vpiTGVnZ8d3333Hzp07\naW1tFURpLY5Liwo3MDAQuoupU6diYGCAvb09f/vb38jIyBD282eeeQaJRMLJkydJSUnBwcGBoKAg\nMjMzcXd3Jz4+ns7OToqLiwW7USKRoFQq6e3tFVqE5uZm9PT06OzsxNHRkZMnTxIfH4+FhQUZGRlI\npVKSk5NZt24dixYtorW1lZCQEAoKCigtLRXAWK3U2N7eHktLS0FGunDhgmj+VlVVkZycTFhYmNhA\nXVxchMagurpaaDFu3bpFUlISXl5evPXWW0JMpVKp8PDwYPny5UilUvr6+mhqamLSpEk0Njai0WhE\nNOG6desYHh6mrq4OFxcXmpqauHbtGpMnTxYBx319fcTHx1NeXk5tbS0nT56ku7tbJHFpsXxXrlyh\noaGB3bt3i8yLqqoqVqxYQWpqKhUVFbi4uDBv3jyhSZkyZQotLS1cvXoVIyMjLly4wNq1a4UTWMvT\nbG9v/8+pFADefPNN5syZg4ODA+7u7rz99tv4+Pjw9ttvY25uTkxMjEhezsnJwdjYWDQHOzo6MDQ0\nxNXVVSDT4bH7TOsQ/Oqrr/jxxx+RSCQiH9HGxgapVEpMTAy7d+/mpZdeQkdHh/3793Pz5k1mzpzJ\ngQMHsLKyEiGmAD/99BNKpVIkM9vY2Ijf46233qKxsZHPPvtM2LGPHDlCeno6vr6+rFy5kmPHjjEy\nMiKqEhMTgJ5QDgAAHERJREFUEwC2bNnCzZs3mTNnjmBErFmzhnPnztHb20t0dDR//vOfUSqVREdH\ni2qgpaWFgoIC0tLSOHz4MG+++SYjIyO0tbWRk5PDo0ePqK+vx93dnaamJqGsvHXrlgDHpKWlkZGR\ngZubGytXrmTz5s0CRXfhwgVWr15NUFAQoaGh1NbW4urqyv3799m6dSuOjo40NTVRV1fHyZMnKSsr\nE9RlV1dXTE1NCQ8PZ+LEicyfPx8rKysmTJjAhg0b8PDwwN3dXXAE7O3tsbW1xdzcnK+++gqJRMLK\nlSsFoamzsxNDQ0O6uroYGBjA3d2dsbExoqOjMTQ0JCYmBjs7O/bv34+BgQEREY/ZP0uWLCEkJARj\nY2PGjRuHq6sr58+fF8SsZ555BhsbG5ycnERY62uvvcbY2BhlZWU8ePCAsbExSkpKWL9+vbhvp0+f\nzr59+zAxMWFsbAx9fX0SEhIICwtj/vz56OnpkZmZSXV1NREREZiYmKCjo8OcOXMICQlh+fLlzJ49\nWyDa//CHPwDQ2dnJjRs3RD6pNjpg/vz5+Pn5cfnyZU6dOoVEIhFq37/85S9MmDCBLVu2YG5uzp07\nd3B0dKShoYGNGzc+8Vr8t6gUvvjii/2xsbEiMFWtVpOdnY2zs7NIcNbOprUJUrq6ukydOlXs8iUl\nJcyaNQs7OzvOnDkj0pB+/fVXfH19sbe3F1Sk6OhokpOTCQwMZGxsTNBvKysr0dfXx8TEBAsLCzo7\nOwkICBDuPG1fY9asWcTGxmJlZUVGRgYlJSWCCO3t7c3Q0BADAwOEhITw7bffioDRI0eOMDg4KIRB\n2qAa7exaKpUKTmB3dzfh4eFkZWURGRlJSUkJurq6IlPS2dmZBw8eCNZBZWUlzs7OAmD722+/kZKS\nwr59+/D19WVkZIS+vj4xt16wYAE9PT1MmDCBpqYmRkdHmT59OnZ2dqSkpPD000+LJCjtk3PRokWo\n1WoaGxtxdnamra2N/v5+kcV4+/ZtvLy8CA4OpqmpiQULFgjxlzYAxdLSksHBQfE7l5aWCnDt5cuX\nBRm6t7eXsLAwQkNDxVHpxx9/pKWlBTc3N5GXmJKSQkpKCmq1mkmTJvHLL7/g6OgovkMtV3Pq1KnI\nZDLu379PXl6eIFZNnToVJycnQZzSHnkGBgY4ceIE7e3tqNVqQekCSEpKwsfHh4sXL3Lw4EFaWlro\n7u7m+eefp6GhAQMDA8aPHy8mSAYGBri7uzNnzhykUimffvopv/zyCxUVFZw7d07AUIqKiigqKhJa\nEXt7e2QyGR0dHXh7e6Orq4u5uTk9PT3MnDlT/J3W1lbhTAV46qmn+Omnn4RDVsuZeNIo+n+LSkF7\nFvP29mZkZITGxkahYrtz5w4pKSns2rVLBGteunSJyspK7O3t8fHxISQkhMbGRiF4Wb16NfCYALxm\nzRpUKpVg5WtHfmZmZty7d4+amhq6u7u5ePEihw8fprW1ldDQUOzs7KiurmbatGnU19ejo6MjkFbF\nxcUoFAqampro6+tDqVQSFhbGlStXMDMzY9q0aWzZsoWhoSE++OADcRPt3LmT2tpahoeHUSgUWFlZ\nkZubK2TOW7ZsEajz+fPn09jYyN/+9jfhG4iIiKC7u5uqqiqkUimXL1/GwMBABM66u7tz5coVXFxc\n8PDwYMGCBfT395OWlibGl5WVlaxYsYL4+HiCgoIYGhqitraWjo4O6urqGBsbY/HixRw4cICHDx/S\n2tqKiYkJGRkZnDhxgsLCQhEZFxsby9DQkKAiu7q6Mnv2bBHr1tTURFVVFYcPHxaj27a2NjQaDZmZ\nmXR0dNDS0kJubi5aY56rqyvOzs6EhoaSmpqKt7c3K1euxMjISKRow+PKZmxsjN7eXtra2oSw5+DB\ngzQ2NuLi4kJ6ejo//PADAB0dHTQ0NKCvr8/w8LCwp2sRee3t7UyePJmFCxdiaWnJgwcP+O6770Tz\nTsuq1CLctQK5trY2vL29sbS0JDMzk9u3b3P16lVUKhUajYb+/n6Ki4uxtrYWVdupU6eQy+VYWFgQ\nFBRERkYG/v7++Pv7s3fvXgAx7tWOMMvKygQM18HBQRzN4uLiaG1tpbOzE2dnZxYtWkRlZSVyuVwk\ntnd2djJ58uQnXo//FpXCe++9t9/Ly0sEaQYGBuLh4YGTkxOzZs2it7cXa2trQdzdtWsXs2bN4ubN\nm4K9t2PHDvLz86mvr+f27dvk5ubyyy+/EBcXx4IFCwSabcKECTQ3NxMUFIRUKiUqKkqEqUZGRhIe\nHo6hoaEQFanVaoaHh7G2tua7774jJycHa2trLCwsOHv2LH/84x+RyWQYGBhgYmIisPMlJSXs27eP\n4OBgCgoKCAoKEl6Dzs5OSktLGR0dRSaT8e677/LNN99QXV2Njo4Oenp6Aio6OjrKBx98QGxsrEgg\ncnZ2RqlUCqy5Fi2mtYpHRkYSERGBhYUFp06dIj8/H09PT2xsbDAyMsLMzIzw8HB6eno4f/68OIY0\nNTXR3d2NXC5HJpOxefNmuru7KSoqIiUlhaVLlzJ58mS+/fZbpk6dKiTi2uPItGnTaGpqIjAwkOHh\nYTQaDRqNhr6+PmH/bm1tJTk5GV1dXYKDg9HR0WHNmjX09PRw8eJFIcrq6+vD09OTuLg4MjIy2Lt3\nL25ubpibm1NRUUFcXBzbt28XiPT29nZmzJhBT08PhoaGTJw4kdbWVuzs7Dh37hzPPPMMxcXFhISE\nYGFhQXh4OG5ubqLs3rhxI0qlkgsXLmBra0ttba2wimt1Bq+99hq9vb2iqZuYmIharRZirAULFpCV\nlcWcOXPo7+9HqVRy//59rl27Jo4d/f399Pf3Y21tzaVLl3B3dxdJ3To6OiLPdMOGDWRlZWFsbMzi\nxYvp7Oxk7969FBUVCXyflrs5NjYGgLW1NQkJCQI5+OjRIxoaGkQYUnp6+n9OpWBgYMCePXvIyMgQ\nsWiNjY2Ul5czZ84c5HI5K1asYGhoiNmzZ1NXVyei1p2d/6/2zjw2qvNc47+P8W6PjRfG42W8EO/Y\n2NiD7WIzRCwhIQWbpG6gREmrJFdQcnujKI1IG6QkrVKlUu4fJcqVSpMQkYCLA6kJLm4MmGIWkxgM\neB/bBGzjffA2Hpvxcu4f4/kuRCWh0q1nKs0jjeb4zMjz6NU57/mW930eHe7u7gwODhIbG4tGoyE+\nPh6wVTRu27aN4eFhzpw5w6VLl7hw4QIWi4W0tDTp1LN8+XJSU1NlpV5DQwMqlYrY2FhUKpXcErJX\n6q1fv56oqChWr14tLwy7i1FraytCCMLDw2Ulm12ByN6T8OSTT8rdCXvnIcD27dvlzTs+Ps6NGzdY\nsWIFdXV1/OpXv+J3v/sdRqOR+vp6aWsHsH//fry9vbl9+zYjIyNUVlZSVlZGV1cXa9euJTExkaee\neorr169z/vx5jEYj7u7u7Nmzh8HBQWJiYujv7ycgIIDZ2VnpcmQ3ge3p6eHpp5+mtLSUkpIS6WFp\nL66ydwjeuHFDmt1YLBZZUmx/Yg4NDUkrt/z8fCIjI7lz5w7nzp2TAqsJCQkMDw+TnJzM6Oio5J2V\nlcW5c+fo6OiQSaC9vZ2DBw8SFBSEp6enfAjYex60Wq2sAQkKCuL27du8//77hIaGytLzI0eOsH37\ndpqbm+8RdPHw8MBoNDI7O4sQgiVLlnDy5ElMJhO+vr7cvn0bQI6iOjs75UPCaDSSmZlJf3+/NPAx\nGAyUl5fT09NDXFwcly9fZnh4WFZ4Go1GWQcCyC1L+1a2j4+PrLG5cuUKIyMjBAUFodVqCQkJIS4u\njt7eXuxiRcePH6e3t5d169axevVqyfdB4BRJQQjB559/zsaNG4mJieHvf/+71Kzfu3cv6enpeHl5\nERYWJvd/jUYj+/fv56233iI9PZ1jx47R3NxMTEwMVqsVsBW/2OeP9oar4eFhoqKimJiYQFEUmpub\nZamuh4eH9H+w1zEEBgZiMBjo6emRnWb2uaXBYCAtLU3eRHareLPZTFdXF8XFxVRWVmK1WqUFfVlZ\nGevXr+fkyZNMTU1hNpulVNb58+elbXxJSQmbN29mfHwclUrFxMQEBoOB0tJSZmdniY+Px8fHh9nZ\nWdLT08nKymLNmjU89NBD0rOgu7tbNkSdOHGCL774QraJV1VVkZqays6dO+np6WHHjh1kZmby+OOP\n4+Pjg6+vLxaLhbKyMnJycti3b58USo2NjWXJkiWUlJSQk5NDREQEY2NjbNu2DV9fXxYsWCDnwHFx\ncTz++OPSSVyr1craiOLiYkJDQzGZTLJc+8yZMyxcuJCpqSny8vLo7e1l5cqV7Nq1SxqvZmdnS4u3\noqIiEhMTmZmZQavVsmnTJvbt2wfYCovshVB2F+pf/OIXvPPOOyQnJ/PJJ5+wePFiWQ9ix507d2ho\naMBiscjdB7Vazfj4OF5eXnh7e2OxWABbstmwYYNcEKypqSEtLY3h4WH0ej0ZGRloNBr8/PzIzc2V\n60w7d+4kLi6Onp4etFotERERckoDthLxuro6LBaL3IqNioqS3pIqlUpqdxYUFKAoikyAERERZGRk\nyG7ev/zlL1IY90HgNEnBrksfHBzM4sWLefPNN1GpVPzxj3/kD3/4A93d3SQkJMihf1ZWllyHOH36\nNAsXLpR26ps3bwZsK8M+Pj74+/vT3NzMyMiI9Cc4efIke/fulU8MNzc3IiMj6e3tlZoOmZmZspho\nampK7j5kZGTQ0tJCV1cXy5cvZ+XKlTQ2NrJq1SpeeeUVVq1aRWRkJAEBAeh0Ovr6+nB3d5c9Cfn5\n+QQHB6NSqbh69SoDAwOArRZ+bGyM6elp8vLyMJlMRERESO0/b29vnnnmGTw8PDh79iwpKSlERUXx\n6KOP8sQTTxAREUFbWxtCCAYGBoiKisLLy0tOEwwGg1TsGR4e5rHHHqOsrIxt27bxyCOPSNXpmZkZ\n3njjDV544QXCwsIYHh7mxz/+Mfn5+fj6+lJTU8OZM2fYsWMHx44do7y8HKPRyNdff01DQwOKomC1\nWsnKypK7DwcOHODixYvSE/Hw4cMEBAQwMzOD2WxmwwabFelLL73E2NgY/f397Nu3T2pqREdHY7FY\npBNXQUEBJSUlfPXVVxQXFzM4OIi3tze7d+9m06ZNjIyMkJeXR1FREWBbtxoeHiYyMpL8/Hx5ren1\nejo6OggPD5edokFBQSxcuJDt27ej1WppaWlh6dKl0nFLq9WiUtmcEKOjoxkaGuL555+nrq6O8vJy\nqftZWlpKc3OznO+fO3eOt99+m7GxMZ5++mn8/Pzo7u7mxIkT7N+/n/7+frlz9tlnnxEeHk5wcLBM\nUDMzM8TExMiF6tzcXJKTk3n77bf54IMPUKlUXLx4kRdffJGf/exnqFQqRkdHCQoKkoKwDwKnSAp2\n0ZH+/n6GhoYQQnD06FFKSkr4+c9/zrPPPotOp8NkMqHRaPjlL3/JwMAA33zzDZ2dnajVailxlpiY\nSG1tLYAc1v35z3/m+eefR6/Xk5CQQHR0NEVFRQQGBsrdhgsXLmC1WpmensbNzY2+vj4++ugjoqOj\n5dCrqqoKsJXM+vj4MDExQXFxMRs3bqSiooL33nuPL7/8koqKCtmZZjc4vX79Og8//LDcArOrPv/2\nt7+VWv8Wi4XMzEzS0tJ46qmnWLhwIZ6ennR1dZGZmUlvby9Xr17Fx8eHpqYm+UTIz8/n66+/5t13\n3+XFF18kOjqapKQkYmNj0Wq1BAcHU1hYKP0Fk5KSePjhh6msrGT9+vWcPXuW+vp6PvvsM86cOSP7\nCd566y0A9Ho9W7dupbGxkc7OTpYvXy5djr755hvGx8eZnp7G19dXSuPX1tYSExNDYmKitHDXaDTy\nibV7925+8IMfEBsbS3x8vBRutQvPuLu7o1arZfn41atXqaioIC4ujsLCQtlCbhfKSU1N5dChQ6Sm\nplJTU4Ofnx8mk0mK7Q4PDzM+Pk5NTQ2FhYWcPn2amZkZWVtw6NAhzGYzKpVKOjPdunVL7ohUVlYS\nHx+PxWLh5s2brFy5ErCZwdgVp9etW4fBYKCvr4+QkBB2796NTqcjKSkJRVEICAhAr9eTmprKggUL\nCAkJoba2ls7OTjQaDRs3bpSjlp07dwK26cnk5CSTk5NyemffbRBCkJWVxdKlS3n55ZdRFAWTycSu\nXbt49NFHUavVaDQauru7Jd8HgVMsNL7zzjtvJCcno9frqa+vx2q1smXLFrKzs2lra6OiokJWyO3f\nv5+CggIaGhq4cuUKer2esLAwZmdniYqK4sqVK8TGxvKnP/2J119/HX9/f3Q6HaOjo2g0GtRqtSyu\nsY8MfvjDH8p5aFdXFzdv3mT16tWUlZWRkJCATqejtbVV6gj+5Cc/ob29HU9PT3Q6HQcPHmRqaopL\nly7R1dXFzMwM+fn5UjzUrscXFxcnG5gOHDjAqVOn8PDwICkpiaNHj0olZLPZTEdHh9wuvHHjBqdO\nneJHP/oRUVFRhIaGYjabEUKQk5PDnj17pER4eXk5cXFxVFRU4O/vT0JCAhMTE+h0OqnzZ/d9CA8P\np7S0FKPRSEJCAtevX8dkMmE0GsnJyZE7FgEBAQwMDJCSkiL1CxMSEigvL2ft2rXyZjebzVIBKCAg\ngO7ubhobGyWvgYEBWQJsNpuJiYmht7dXOkz/9a9/xd3dnZSUFOlOHR0djRCCdevWMTU1xfXr1wkM\nDKS7uxuVSoVer5fiLytWrODatWvk5+fLhGzv/HzhhRdoaWmRitkhISHyemlqauLll1+mtrZWPl1N\nJhNhYWFMTk5y/PhxvLy85A176NAhkpKSKC8v55NPPpGWdvadmK1btyKEkPb0165do6Ojg+DgYLnV\nGx8fz1dffcWOHTtIS0vjN7/5DdPT0/T09HD69Gl8fX2xWq3U19ezYMECDAaD1Kmcmpqiv7+f9vZ2\nKioqePXVVzl9+jSzs7MEBARIM+Pq6moGBwfJzs6mr6+Pixcv/vsItx48ePANIYRcFDObzbi5udHe\n3i4bf7RaLR0dHRQUFHDs2DFSU1OZnp6ms7OTn/70p3h6enLs2DFaW1tZsGABpaWlPPPMMzQ0NEiN\nBpVKxfT0NJOTk3h5ebFnzx4iIyNZtGgRX3zxBUlJSVLNOTk5GbAZiNjlsvz8/GRZqr3LMT09naqq\nKjQaDW+++SZWq5WpqSmpzFtbW4sQgvHxca5evSqViCYnJ3nllVd47733SElJ4cSJE7Jl2z707+vr\no6qqitraWl577TU6Ojqkwk9OTg7nz5+XF++pU6cIDg6WDWX2Dk17h6Kfnx9tbW0sWrSIsrIyvLy8\nmJmZkU1g9mO1Wk16erosxfXw8KC/v1/KzX/00UesWLFC2rHbZcTVarVMiLm5ueTl5TE0NISnpyca\njYaBgQEiIiJYu3YtfX196PV6KT1nMpmkZ4K9Zb63t5eioiI6Ojpob2/n0qVLrFu3juzsbD7//HO5\nRTg0NCTXKZYtW0ZgYKAcjSiKgkaj4YMPPuCll17Cz8+PsbExOjo6WLZsGdXV1XR0dBAWFkZ4eDj1\n9fW4ublRXFzM6Ogo09PThIWFycT/xBNPkJiYKJ28jh8/zujoKCkpKbS2tmI0GomMjMRkMkl/04ce\neoigoCA2b95MSkoKly9fluX29mmGoihkZGSgVqtZs2YNH374IVlZWej1esxmM2azmebmZkJDQ7l2\n7RorV66USt9NTU1UV1djsVjw9/fH39+f7u5u3NzcJHchBGazmerq6n+f3Qer1crSpUuZmJjg008/\n5datW9KBKDo6mra2NsrKyoiPj+fy5ctERESwefNmDAYDa9as4eOPP6ahoUFWch06dEj+79zcXKlW\nbK8Tn5qa4ssvv+S5555Dq9XS3Nwsy2dDQkJoamri1Vdf5c6dO2zZsgWdTkdbWxuXLl0CbO3Iixcv\nlk+G9PR0tm/fTk1NDVarlczMTBISEmhtbeXs2bOcOHGCo0ePUlBQQGBgIK+//joxMTEMDQ3x5JNP\n3rNC/v777zM4OMiyZcuoq6vDzc2NjIwMzp8/z6ZNm5ienpZGti0tLdy8eZOWlhbJ3S7yYW//1mg0\nstHr5s2bzMzM0NLSQmNjoyxaSkxMpKenh8DAQHp7e6XrkVarpaurC4PBQGBgoOw5qKurIzQ0lNTU\nVPLy8mhqapKLs3Ydxvb2dnp7exkaGqKxsZGMjAxMJhOKorBkyRJqamqorKxErVZLn0uAwsJCObqq\nr6/H29ubRYsWkZmZycDAAFlZWQQHB+Pj40NhYSGenp787W9/Y/ny5ZjNZtavX09/fz+HDx/mwoUL\nVFdXA7Z1q9zcXGpra+VOhU6nw9vbm61bt3LkyBEp2RcaGkpQUBAXLlygu7tbisFERERQWlpKQ0MD\ner1N/9T+oLEvKtv1GewKXXFxcWRnZ1NTU0NVVRWKotDZ2YnFYqG9vZ1Vq1bR3d3NyMgIq1at4uTJ\nk4DNYLahoYGQkBCWLl1KUVERixcvJiMjgwMHDhAQEMCdO3eIioqSUy37tv7dydbf3x+r1UplZeUD\n34/OouY8AIwDg47m8i2E4HycwMXrn4EzcgLH8IpWFGXR933JKZICgBCi5kHkp+cTzsgJXLz+GTgj\nJ3BeXuAk0wcXXHDBeeBKCi644MI9cKak8L2rog6AM3ICF69/Bs7ICZyXl/OsKbjgggvOAWcaKbjg\nggtOAIcnBSHEo0KIFiFEmxBil4O53BBC1AkhrgghaubOBQkhKoQQrXPvgfPA40MhRL8Qov6uc/fl\nIYR4bS5+LUKI9fPI6Q0hxK25eF0RQmyYZ046IUSlEKJRCNEghPivufOOjtX9eDk0Xg8Me8+7I16A\nCmgHFgMewFUgxYF8bgAh3zr3e2DX3PEu4J154GEAMoH67+MBpMzFzROInYunap44vQG88g++O1+c\nwoDMuWM1YJz7bUfH6n68HBqvB305eqSQDbQpinJdURQrUAwUOJjTt1EAfDx3/DFQ+K/+QUVRzgDf\nboC/H48CoFhRlDuKonwDtGGL63xwuh/mi1OPoiiX547HgCYgAsfH6n687od54fWgcHRSiAA67/q7\ni+8O3r8aCnBCCHFJCPEfc+dCFUXpmTvuBR7cf+v/F/fj4egY/qcQ4trc9MI+TJ93TkKIGGAZcBEn\nitW3eIGTxOu74Oik4GzIVxQlA3gM2CmEMNz9oWIb6zl8u8ZZeAD/g23qlwH0AO86goQQwg84DLyk\nKMro3Z85Mlb/gJdTxOv74OikcAvQ3fV35Nw5h0BRlFtz7/3A59iGcH1CiDCAufd+B9G7Hw+HxVBR\nlD5FUWYURZkF9vJ/Q9554ySEcMd2432qKMqRudMOj9U/4uUM8XoQODopfA3ECyFihRAewBbgqCOI\nCCF8hRBq+zHwCFA/x+fZua89C5Q6gt938DgKbBFCeAohYoF44Kv5IGS/8eawGVu85o2TsAkcfAA0\nKYry33d95NBY3Y+Xo+P1wHDUCuddK68bsK3OtgO/diCPxdhWgK8CDXYuQDBwEmgFTgBB88DlILbh\n5RS2+eVz38UD+PVc/FqAx+aR036gDriG7cIOm2dO+dimBteAK3OvDU4Qq/vxcmi8HvTlqmh0wQUX\n7oGjpw8uuOCCk8GVFFxwwYV74EoKLrjgwj1wJQUXXHDhHriSggsuuHAPXEnBBRdcuAeupOCCCy7c\nA1dScMEFF+7B/wK0r+NYuJI5vAAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1385a46c10>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGdwm/eV9v0DCAJEYSdBUGDvvYpUI2VKIlWpShdZbpIS\nl8R2rMQl67Ibx14nSpzdxN64ykW2ZTt2IktWpzpFipREURR7J0iCvYEFIAACBJ4PennPZt7ZWT+z\n8azfd3TN6AM5twDwxn3O/5zrXNcROZ1ObuM2buM25iH+3/4At3Ebt/HDwu2kcBu3cRt/h9tJ4TZu\n4zb+DreTwm3cxm38HW4nhdu4jdv4O9xOCrdxG7fxd/jekoJIJForEolaRCJRu0gk+qfv631u4zZu\n4x8L0fehUxCJRC5AK1AA9AKVwL1Op7PxH/5mt3Ebt/EPxfdVKWQD7U6ns9PpdM4CfwE2f0/vdRu3\ncRv/QEi+p9fVAvr/9HMvsOi/ulipVDqdTic+Pj64ubkxOzvL9PQ0EokEpVLJ2NgYSqUShUKBQqFg\nZGQElUqFzWZjdnYWu92Or68vRqMRmUyG2WxmcHAQd3d3ZDIZEokEs9mMWq1mZmZGuGZ2dlb4f3K5\nnPHxcVxdXVGpVPT19eHj48Pc3BxeXl6MjY3h6urK4OAgwcHByOVyDAYDarWagYEBfHx8MJlMiMVi\nZmdnmZycJCoqCqPRiEgkQiwWMzU1hZubGzabDaVSidlsxmazIRaLGRkZQavV4urqiqurK1NTU7i6\nujI7O4tUKkWhUAivLRaLcTgcTE5O4uHhwdzcHFKplOnpaVxdXZFKpcK9cXV1xWw2C/dBIpEwNDSE\nVCoV/s3fw//8mVQqFXa7nbm5OUQiEUqlkqmpKcRiMWLxrbNEJpNhsViwWq34+vpiNpsRi8W4uLhg\nNBpRqVSYTCaUSiUWiwWAqakp1Go1VqsVhUKBm5sb09PTTE9PMzk5SWRkJAB2ux0XFxdsNpvwndnt\ndux2OwqFAolEgt1ux9XVVfj7xWIxBoMBDw8PLBYLdrsdgKGhIRYsWCB8txMTE9hsNjw9PZmcnMTX\n1xepVMrY2BgOhwOpVIqbmxszMzMolUrGx8fx8vLCaDTi5uaG0+lkcnKSsbEx/Pz8sFqteHh4CK8h\nl8txcXHB4XDgdDqxWCxIpVImJydRq9XYbDZsNhtubm4MDAzg5eUFgFgsRqVS0d3dTUBAAGazGaVS\nidPpxGq1IpfLMZvNKBQKjEYjrq6uKBQKzGYzc3NzyOVy5ubmEIvFiEQizGYzEolEeP4GBgZGnU6n\n/38XvN9XUvhvIRKJHgEeAfDz8+PFF19Eo9FQVVXF2bNnuffee/H39yc4OJgrV66Qk5NDa2sr+fn5\n1NXVkZGRwSeffEJ7ezvPP/88ZWVlNDQ04O7uzvj4OF9++SUHDhzgl7/8Jbt27eLYsWMsXLiQpKQk\nzp07x6JFi6iqqsJqtZKWlsbly5eJiooiIiICmUyGm5sbTU1NSCQSenp6WLJkCTU1Nbz77rscOHCA\nM2fOMDs7S2pqKnV1dRiNRpYvX86FCxcYGRmho6OD3/72txw9epSOjg5iY2NJTEykoqKC1atXU11d\njd1u57HHHqOuro6ioiIef/xxIQEuWbIEmUxGc3MzaWlp3LhxA7VajV6v54477qC8vBy5XI7RaESp\nVDI4OIhKpaK9vZ2oqChEIhHp6elUVlYyMDCA0+lk3bp1XLp0ieDgYHQ6HX19feTn51NTU0NKSgod\nHR0kJyfT3t5OZ2cnfn5+JCQkUF1djc1mQ6PRIJfL6ejoQK1W8/XXX/PEE08wODhIUFAQH3/8MYsW\nLUIkEpGamkpVVZVwL319fdHpdDz11FO89tprhISEMDY2Rn9/PwEBAcTHx/P888/z3nvvcejQIQIC\nAkhJScFisTA6Osr09DT79u1j+/btFBUVceLECaKiotDpdGRnZzM0NERWVhavv/46ixcvZnR0FIfD\nwaeffsrQ0BAPPPAAW7duxd/fn/HxcQ4cOIC7uzsqlYq6ujqCg4NZunQpJpOJs2fP8uSTT1JVVYXJ\nZKK0tJTs7Gz6+vqIjY0lJSUFh8NBbm4u27dvZ3x8nLi4OA4cOEBycjK1tbUUFRURGhpKd3c3Wq2W\niooK7rnnHnp7e+ns7MThcAjJas2aNbi5uXHu3Dmio6PZt28fRUVFuLm5kZCQgEQiob+/n6GhIRwO\nBwEBATQ1NeHn58f09DTXrl3jgQceQCKRCIfIfKKempqiqqoKX19f3nrrre7vEpvfV/vQBwT/p5+D\n/p/fCXA6ne87nc6FTqdzoZeXFzqdDqVSSVJSEo8//jg+Pj5MTU3R3t6Oi4sLNTU1vPDCC0xOTjIz\nM8P+/fvx8PBgx44d7Nq1C7VajUgkAiA6OhqAw4cP09zcjMVi4YUXXiAkJISoqCh2796NUqnE29ub\nl156CbVaTVxcHEqlkqioKEJCQiguLqazs5Ph4WE8PDzo6+sjPj4egMuXL5OVlYVarWZ0dJSRkRHU\najVjY2MEB9/6s5999lnq6upQKBSoVCp27NiBUqlk586d6PV6srKyGB4e5vDhw7i6ugKwcuVKRkdH\nueOOO5idnWV2dpby8nIMBgMOhwOdTseyZctoaGhALBZz5coVdDodly5dQqFQMDAwgK+vL97e3ri7\nu1NTU0NPTw9NTU2YTCbOnz9PbW0tdrudqakpfvGLX9DU1MTSpUspLS1FJpNRVlZGfX09V65cYXx8\nHI1GQ2RkJMHBwUgkEnQ6HQBubm6sX7+etrY27HY77e3tLFmyBKVSiaenJ2KxmLCwMNzd3UlKSkKh\nUKDVamlqamLRokWMjY0RExODRqNBpVIJ1cfs7Cwmk4njx48jk8nQaDQYDAYGBgZYvnw5vr6+dHZ2\nEh8fT29vL0lJSUxOThIXF8fg4CCpqanU1NTQ39+Pv78/+/btAyAsLIwTJ07Q0NDAzMwMTz31lPCs\nDQwMYLFYqK2t5cyZMzz88MNotVr6+vqYm5tj/fr1BAUFMTs7S15eHk1NTRw7dgyA8PBwnn76aQwG\nA/fffz+JiYns27ePuLg4bDYby5YtY9WqVTz55JNMTk7i5eXFunXrmJubY8eOHRQWFmI0GgkJCWHN\nmjV0dHQAMDo6SmdnJw0NDRw9epSmpibc3d0JCwujra2NmpoaLl++jK+vL9u2bWN0dJTa2lquX7/O\n008/zcDAAG1tbdTX17NgwQKhGvku+L6SQiUQLRKJwkUikRTYDhz5ry6enJwEbpWWc3Nz7N27F6fT\niUajISMjg8LCQnJzc9m5cyft7e20tLQAoFAo8PT0ZMuWLeh0OrZt24bD4cDT0xMAb29v/vCHP+Dl\n5SUEdVVVFVqtlsnJSVauXMnMzAy7d+9GLBYTFBSERCIhMjKSvXv3otVq2bp1K25ubsTFxQkPWEZG\nBpOTk1RVVeHi4kJhYSGpqakMDQ0xMzNDWFgYxcXF7Nu3jy+++AI3NzfKysqIjo6msrKS/Px8NBoN\noaGh5OTk4HA4AGhubiYhIQGdTofVaiU0NJS8vDwqKyuJjo5GoVDg6upKYmIi3d3dnDhxgt7eXvz8\n/JidnSUmJobc3FxMJhMjIyOkp6fT3t7Opk2bCAkJwdPTk+XLl9PW1kZcXBzNzc0oFAqqq6u5ePEi\nn3zyCefPn6eiooK5uTnKy8vp6OhALpcTGBhIWloaq1evxmazYTKZsNlstLW1odVqmZ6e5sc//jEj\nIyN4eHgwMjKCVCrFYDBQXl5OdHQ0LS0tuLi48Nvf/hZPT0+ysrL48Y9/zOLFi1mxYgUA9fX1rF69\nmkWLFvHNN99w+PBhZDKZ8PAvWLCAyspKpqeniYmJQSKR4Onpyf79+3nrrbfo7u6mqKgIuVxOc3Mz\nzc3NAHz11VcEBwczMDDA6OgoBoOBwMBALl++zMKFC4WWYe3atfT09FBSUoKXlxeXL1+mqamJwcFB\n0tPT+eyzzwgICCAxMREAm83GK6+8QkVFBRqNhgsXLnDo0CG+/PJLurq68PT0pLS0lPLycsRiMT4+\nPrzwwgukpKRw5coVvL290Wg09Pf34+rqyksvvSTEwnwbEh4eLrS3Op2O8PBwHn30Ue655x6Gh4dR\nqVRcvXqV48ePU1RURGFhISkpKYSFhSEWi1m6dCmfffbZdw7e7yUpOJ1OO/AEUAw0AV87nc6G/+p6\nNzc35ubmMBgMuLi4kJycTGhoKFqtlrfeeovW1lZcXFyIiIggNzeX7OxsTpw4QWRkJGKxmISEBDo6\nOqipqRF4BLjVl3Z2dhIbGwuA0WgkODiY6elprFYrzc3NfP755zzzzDPce++9JCQkYLPZuHDhAgcP\nHiQ+Pp7i4mKmpqbo7+/n8OHDAIyPj6NWq3nooYdob2/H09OTjo4OsrOzSUxMxN/fn4ULF7JmzRrM\nZjNJSUns3LmTmpoaYmNj8fHxwWKxkJyczLFjx+jp6QFu9b6zs7Po9XrkcjkNDQ04HA48PDxITEwk\nPz+flpYWvv76a+Li4jh69Cjh4eFYrVbWrVvHpk2bmJubw93dHa1Wy7fffsuSJUtoaGggPT0djUbD\n1atXWbp0KXNzc5SVlWEymTAYDCQnJ/Pkk0/y8MMPc++997Jo0SJef/113Nzc8PPzQywW09nZya9/\n/WsCAwOxWCysW7eOu+66C5PJhEwm49NPP0WlUqFQKNiyZQvXr19Hq9XS3NzMH/7wB5xOJy+++CJh\nYWHMzs7S2toqnIAnTpwAQKlUUlpaSmhoKHfddRerVq0iICCA/Px8Dh8+zNTUFA899BCNjY0C15GY\nmIjT6WR8fBw3NzcaGhqYnZ1lzZo1eHh4ALBz5056enqQyWSMjo7S2tpKc3MzAQEB5OTkUFlZSVpa\nGn19fRw6dAidToeXlxe/+MUvUKvVaLVaOjo6CAgI4Pz585w8eRKAkpISoqOj2bBhA+fPnycyMpIT\nJ05gtVoRiURcvXoVLy8vpqenWb16NXCravnoo49QqVRcuXIFvV5PT08PnZ2dtLe3A+Dr60tGRgZK\npZLm5mZiYmJwc3PDYDDg7e1NQ0MD/v7+LF68mPLycqxWK5s2baK3t5fY2FgaGm6FW0xMDPX19ezZ\ns+c7x+/3xik4nc4TwInvcu3s7CwpKSmUlJSQkpKCQqHg1KlT7Nixg56enr9LFjk5OSQmJlJUVASA\nXC7n0qVLPPbYY4SGhtLc3CwE76JFi7h58yZisZimpiY0Gg3Jycn8/ve/JzAwEC8vL+x2O35+fly+\nfJnIyEgaGhoQiUR/R05dvXqV5cuXU1lZCcDY2BgnT55k165dOJ1OPvnkEyYmJmhsbMTLy4uioiKG\nhoZwd3dHLpejUqn4+c9/jouLCzExMVRVVdHY2EhfXx9yuZywsDAAVCoVra2tbNy4EZFIJPAEpaWl\nBAQEkJGRweLFi5HL5QQEBFBZWcn27dvp6OgQOJOFCxdy5swZOjs7CQwMZMGCBUxNTfGHP/yB9evX\n4+HhQVVVFW5ublRVVbF8+XLCw8MZHR3FaDQKBKZYLKa8vJzg4GCB3B0fH+fOO+9EKpVSWlpKbGws\nCxYsoKysjLS0NMRiMTU1NZw7d46zZ8+ycuVKYmJiaGxsxMXFBX9/fxISEtDr9UK5n5WVxZEjR0hK\nSgKgvb2doaEhVq5cSVxcHKOjowC0traydu1aysrK2Lt3LyaTCU9PTyIiImhsbKS9vR2r1cqZM2dI\nSEgQCM9r164B0NDQIBCL8/ds3759qFQqjh07Rl5eHr29vcTExLBr1y4GBgY4ePAgY2NjZGRk8NJL\nL3HfffeRnp7OqlWraGpq4ssvv0Qmk5Gfny+0ubGxsQIP4ePjQ1paGgCxsbFCeb9w4ULeeOMNoQWI\nj4+npaUFiUTC119/DYBOp0Or1WI0GlEoFBQXF+Pj40Nqairj4+OsXLmSwcFBBgcHkcvlxMfHk5+f\nT1RUFNeuXaO2tpbm5ma0Wi3+/v6MjIx859j9QSgaxWIxhYWFZGZmEhUVxYYNG3B3d+fMmTMsWrQI\nf39/XFxc6OjowM3NjVWrVhEeHs6ZM2eoqqoiOjqa4OBgjh07xrfffsvWrVsBeP311zEYDJw6dYqZ\nmRkUCgWjo6O0tLSQkJBARkYGsbGx+Pv709LSQmtrK0VFRVitVhYvXoy3tzeDg4Pk5eUxNTUl9Huz\ns7Pcd999nD59Gi8vLwwGA+7u7hw5cgQvLy9KS0upqanBbrczNjaGi4sLY2NjpKSkIBaL6e7uxs/P\njxUrVggMN9xKCitWrGBwcJCGhgYaGhpwOp1kZGQQEhKCxWIhMDAQsVjMl19+SXx8PKGhoURGRmK3\n20lMTKSxsZHQ0FByc3Px9PREq9Wybds2CgoKmJycJCkpSWDU/fz8gFulqlarZXR0lI6ODtzd3bn/\n/vvR6/X09fWhVCrJyMggKioKrVZLTU0NOTk5dHV1UV1dzbJly8jMzESn0zEzM4OHhwcxMTGsWLGC\nsbEx/P39sVgsXLt2DYfDQV5eHikpKcjlcoaHh+nt7RVO3kWLFvHEE0/Q3NzM3/72N1paWjh16hSz\ns7N0dXVRVFTEgw8+yK5du/Dw8CAwMBCz2Ux3dzepqank5eWxaNEi7rzzTqEtAPD392fr1q3ExMTw\ns5/9jJKSEjIzM6mqqiIuLo60tDSKioqQyWT8+c9/5ne/+x1arZY33niD/fv3Exsbi0wmQywW8/Of\n/1zggRISEjh16hQffPABmzdvxmq10tjYSHZ2Nk888QQ7d+5kxYoVqNVqrly5wuDgIKGhoTQ2NqLX\n6xkZGeHSpUvArYp5+/btANx55504nU5h0pWVlQXAqVOnUCgUdHV1YbPZEIlE9PT04OrqiqenJ8eO\nHaOjowOZTMbY2Bitra2MjIwwNDT03ePxfxTN/0B88MEHbNy4UeARfHx8EIvFtLe3s337dm7evEl/\nfz+vvvoqCoUCmUyGt7c3CQkJ5ObmUldXx7Jly8jKyuLMmTMA3H333YSGhrJp0ybMZjNTU1NcvXqV\ndevWodfrBQIuLCwMu93O1atXGRoaYt26dRiNRi5cuEB0dDSNjY04HA4uX74MgFarpba2lg0bNpCd\nnc3GjRvp6ekhPz+ftrY2+vr60Gg0SKVSwsLCyMvLIzw8nKCgIIqKili+fDlwi0x67bXXmJmZAW71\n03/+859ZvHgxfX19REdHC6f30NAQNpuNf/mXf0EikbBx40Zqa2s5f/48/f39eHl5MTAwgEajobW1\nldbWVhQKBfv376ehoYGBgQESEhKIjY0lJyeH4OBgfvrTn7JlyxaCg4OF1sXT05OUlBSam5uJiIhA\nLBZTUlJCd3c37u7uQs8+TxAmJCQgl8tpaWkhKSmJ9PR04uPjcTgcVFZWCu1cTEwM7u7uNDU1ER4e\nTmtrKzExMURFRVFUVMTKlSuBW1XY8ePHaW9vp7Kykrq6OkJDQ6mqqqK6upqSkhJiY2PZunUrVquV\nqakppqamyMjIYHBwkIiICIKCgpiYmOCrr75Co9EAUF5ejp+fH0ePHuXuu+9mbGyM5ORkcnNz8fLy\nIj8/H4nkVuHs7u5Oa2srbW1t3HfffWRmZiIWi2lsbKSmpoaCggIMBgOA0BrMVxfzr+3r64uXlxdS\nqZTBwUGGhoYwGAwsWbIEm81GXV0dy5cvx2w24+7uTnt7O6dOnWJsbAwAV1dXduzYwWeffUZNTQ0u\nLi64uLiQl5cnfF8BAQFMT09z+fJljhw5QkdHB7Ozs0gkEtLS0khNTSUzM5PJyUmhLfkucHn55Zf/\nh+H8P8evf/3rlxMTE1EoFFy9epVvv/2W6OhoCgoKKCws5PXXX0etVmOxWEhPT6eiogKtVkt2djZn\nz56ls7OTtWvX0trayueff47ZbKa+vp7o6GhmZmZwOBxERkYSEhJCUlISo6OjFBcXo9PpsNvtGI1G\n0tPTWbt2LdXV1YyPj3Pz5k02btxIV1cXubm5wqy/oqKC7OxsBgcHMZlMlJWVodfrqaysJDAwkCee\neIJDhw7x1FNPcfLkST799FPefvttFi9ezNq1a3n55ZcJCAhAIpGgUqmorKwkPT2dY8eO8ctf/hJX\nV1dcXFyYnZ1FLpejVquZm5tj06ZNdHZ2CjPqyspKxsfHMRgMZGdno1araWho4MaNG8zMzDA5Ocn0\n9DQWi0WYY7e1tZGdnY1MJiMjI4Nr164xMzODj48P+fn5iMVigdwaGBjg8OHDREREkJCQgJ+fH0ND\nQ4yNjaHX64W+3Wq1Mj09TWtrK97e3szMzBAdHU19fT3T09PU1NQQFhZGc3MzmzZtQqVS4eHhwczM\nDGKxmN7eXoxGIxKJhOLiYrKysliwYAHp6el4eXnh6uqKr6+vwN3Mj1rnOYV5Qk+hULB+/Xr8/Pwo\nKyujqqoKjUbDokWL+OCDD3j22WcBKCgoYG5uDldXV/r6+pBIJMTFxeHt7c2RI0eoqqri2LFjbNu2\njYyMDD777DPc3NxYunQpsbGx9PT0IJVKiY6O5rPPPiMwMBC5XC4QvhaLRahc4+Li+Otf/8rIyAgm\nk4no6Gja29vp6enBz8+PS5cuERkZKegjwsPDmZ2d5erVq2RnZ/P+++8zMjJCaGgoRqOR3NxclEql\noEUYGhrCarXi5+fHpk2b+Nd//Vf0ej3Dw8P4+flx+PBhysvL8fX1JS4ujtLS0oGXX375/f8uHn8Q\nlYKfnx+LFy9GJBJhMpmIjIxkbm4OpVJJamoqo6Ojwhz55s2bZGZm8s4772Cz2di4cSN9fX288MIL\nNDc34+HhIYy3QkNDcXNzo7e3l+LiYjQaDSUlJXz00Ue0trYyPDxMZGQksbGxlJSUIJVK6ejo4Pjx\n44jFYvR6PYmJiVitVvr7+9m5cycAZrNZEOZs376dr7/+mqKiIjZt2sTExAQrV67kiy++oKOjg4SE\nBJYuXcq2bduEU9PhcLB48WJhbDdPNLa0tJCdnU11dTUeHh7Mzs4SGBhIUlISJ06cEHQcubm5bN26\nlU2bNgEwNzeHVqslMjISkUiEi4sLkZGRFBQUCKX8PFlos9k4e/Ysb7zxBh0dHVy6dIlHH32U9vZ2\nZmdnMZvNtLa2kpycTH5+PuHh4czNzZGSkiIIYjo7O8nIyMBut9PV1YVaraawsBClUoler6erq4t1\n69ahVCpJSUnBarUSHR2Nq6srHR0dhIaGEhsby5EjR1Cr1TidTqH3T09Pp7i4GLPZTFBQEA0NDaxa\ntQp3d3cWLVqE0+nEaDRyxx13EBERwT333IPZbKavr4+LFy/S29vL+Pg4BQUF+Pn5CT36qVOnaGlp\n4csvv2T//v0EBwcjFosZGxvD6XQyODhIZ2enIIpKTExkcHCQhIQEYfpVWlqKwWDgoYce4t///d8B\n8PDwQCQS8cADD5CWlsbAwAB9fX3odDrefvtt+vv7mZub4+bNm7i7uyORSJDJZLS2tmKz2RgbG6Ox\nsRGxWIyXlxdxcXEAXLp0iZ/85Cfs3r2b9PR04uLiKCsrE7QHAAcPHqSuro7R0VFBQDc8PEx6ejpy\nuZx169axevVqTCYTixb9l9rB/xd+EJXC73//+5fd3d1pa2sTJgDzZM7Y2BhSqZTe3l5BhVZdXc3y\n5cvR6XR8/vnneHt74+HhgYeHB2FhYYSHh3P8+HF27NhBaGgoQUFBhIeHk5CQQGBgIAcPHiQvL4+d\nO3dyzz33sGfPHnJzc1mwYAExMTEsWrQIh8NBa2sr1dXV9Pb28sgjj/DNN99QUlJCfHw8GRkZnDlz\nhurqavLy8nB1dSUiIgIXFxfGx8fJz88XxpW+vr7IZDJaWlq4cuUKQ0ND5OTkYDAYmJyc5K677uKd\nd96hoKCAgYEBpFIp3t7epKSkcPXqVQYHB1m1ahVWq5XY2FiampqQy+VUVFSQmJiIm5ubMNatqqrC\n398fV1dXJiYmqK+vZ2Zmhri4OFQqFXK5nPr6epKSkpiamuI//uM/yMvLY9u2bcLkQCKREBgYSGVl\nJWazmYyMDPR6Pe3t7VRVVREWFobNZiM2NpbGxkb8/PwYHBxkeHgYuVwuVAwymYxt27bR29vL3Nwc\nDoeDuLg46uvr6ejoYHp6mqCgIEpKSggNDeXy5ctotVqBQ/nqq6/Iz8+nr6+P6upqJBIJa9eu5dCh\nQ8zMzDA0NMSRI0fIyckhNDSUsrIyjEYjtbW1pKam4uHhgUwm4/jx4/zlL3/h008/ZdmyZVy5coVN\nmzYRGxtLSEgIfX19lJWVkZycTEtLCw899BC1tbVER0czNTXF+vXr8fb2xul00tzcjN1uJzU1lYMH\nD6LRaEhPTxf0KvNK0nmCV6fT0dvbi1wuJy4ujs2bN9Pf349cLhcUiVu3bsVoNJKSkkJpaSm1tbU8\n+uij2O12Ll68iFKpRK1WC2rV0tJS5HI5hYWFiEQigoODiYyMZGpqCovFInA187oImUzGtWvXqK+v\n/06Vwv+aovE/QyqVCkpEnU5HbGwsBw4cYHx8nMHBQTQaDU1NTSxYsEBg8MPDwwkICMDX1xeJREJX\nVxc9PT0sWrSI6upq4BYhWFdXR1BQEIGBgXR3d9PW1sYvf/lL7rjjDoaGhrh48SJvvPEGo6Ojgg5h\nyZIlTE1NCaISk8lEbW0tExMTACQnJ9PU1IRWq8XhcBASEoK7uzsAr776KuvXr+fMmTPIZDJu3LiB\n0+lkZmYGiUSCn58fvr6+fPXVV0RFRTE6Osr+/fuBW4Tr6OgoKpWK6elpjh8/ztKlSxGLxRw/fpya\nmhqkUilxcXFkZWXhdDqpq6sjMTGR6elpAgMDkUqljI6O0tTUhNlsxs/Pj7i4OM6fP098fDwlJSW4\nurryzTff8NFHH5GQkMDY2BhfffUV9fX1LFy4kICAAL799lvOnj3L3XffzaFDh4iPj6e2tpa1a9fS\n29vL4OAgSqVSkA5fvHiR0dFR8vLyKCgooK6ujqNHj+J0Olm6dKnAMQQFBdHT08M333zD8PCwICM3\nmUwABAYG8vXXXwsE68jICDk5OUgkEnx9fXE4HII0fXJykjvuuIPIyEiMRiMJCQl4e3szOTkpkNJX\nrlwBbpGu0MUsAAAgAElEQVR4RUVF+Pj4sGbNGgwGA/v27SMqKoqWlhb8/PxwdXXlJz/5CSaTiZiY\nGKKjoxkfH6elpYXk5GQcDgfr169nxYoVfPHFFwBkZmZy+vRp1Go1d911F9XV1VitVpxOJyEhIVRX\nV5OdnY3JZCIiIoJjx46Rn5+PyWRifHyc7u5uLBaLkFACAwOBW9OW6OhowsLCkMvlWK1WYmJiGB4e\nFghks9lMYmIi7733nqC0/NGPfoRSqeTrr7/GxcWF4OBgxsfH/6/i8QfRPhiNRtRqNampqWi1WpKS\nkli1apXA6n/zzTfcuHEDrVZLTEwMcKs1mO9L33vvPSYmJrj77ruRSCTCqdnS0sK2bdtYtmwZMpmM\nkJAQbDYbtbW1KBQKIUBaWlrYt28fXl5edHR04OHhQVJSEoGBgbi7u7NixQra2tqEzzvPHI+NjZGf\nny+MnRISElAqlUxMTLBgwQKqq6t59dVXGRoawmg04u/vz/LlyykoKCA5ORmDwSBo4+GW/iE5ORmd\nTicIlXQ6HYcOHUKj0Qgn244dO+jr62NgYIDIyEhUKhU6nU4QM42NjbFnzx4iIiIwmUzU19cTGRmJ\nwWBgYmKCzs5O4T50dnYKCTgtLQ2LxYJWq0UqlQqk2NTUlKDanNfgz/sjgoODcTqdwvjTaDRy4sQJ\nhoaGsNvttLS08P7771NfX09CQgLJycmUlJRgNpvJy8vDarWiVquFGX50dDTR0dE8/vjjhIaG0tDQ\nwLFjx4iIiODKlStcuHCBiYkJYYxbUFDApUuXKC0tZdu2bSgUCh588EGkUilXr16lsLAQuKVCvXLl\nCu7u7mRlZSGTyVi4cCFRUVFkZmayadMmAgMD8fPzo7i4mIMHD6LT6aioqKC6upr6+nqioqKQyWTs\n27ePkpISAPr6+oiJiSExMZGzZ88K0nQvLy/c3NzIzMykoqKC48ePc/jwYWZmZjh79ixdXV3I5XJC\nQ0NRqVQAXLhwQVCMNjQ0YLfbEYvFbNq0CQ8PD4FjOXv2LKdOncLFxYVvv/2WpKQkJBIJdXV1yGQy\n9u/fT05ODufOnaOnp4fJyUkGBwe/czz+IJKCp6en4F24dOkSVVVVrFixglWrVrF7925EIhFZWVmU\nlZVRV1dHVFQUDQ0NGAwGPv30U4qKioQ59/j4OFVVVcCtXttisXDlyhUiIiJwdXUlLi6OwsJCZmdn\nCQgIwGq1UlVVxczMDGq1GhcXF/74xz9SX19PdXU1tbW1tLW1ERMTIwRvcHAwaWlpbNiwgWvXrtHV\n1cX169exWq3cddddZGVlsW/fPmw2G1FRUTidTtzd3fH29qazs5Njx45x48YNFAoFhYWF5OTkAGCx\nWFi1ahWBgYFUV1eTlJSEVqtFo9Gg0Wjw9/dHo9GgUChobW2lvr5eOD3mycJ5YvHw4cOsXr0ai8VC\nV1cXUqmUiYkJpqenqa+vJzMzk9dff51Tp04RGhrKnj170Gq1DA0NCXqC9PR0lEolDoeDgYEBRCIR\nDQ0NyOVygoOD0ev1GI1GIWl4eXlRXV1NRUUFPT09bN68mdDQUGQyGY2NjSQlJWGz2RgYGGDTpk3U\n1dXh5+cnnJqA4B+w2Ww0NTWRkpJCW1sb77//vpAUFy9ezMTEBFarlVdffRW1Wo1KpaK5uZmRkREW\nL15MVlYWsbGxwuhw9erVrFixgvLycoqLi0lMTESv12OxWIiPjxc4nM7OTsbHx3E4HBQXF6NWq3E4\nHNhsNjo6Ovjiiy9obm5m3bp1wvMbEBCA0+nEw8MDf39/wsPDqa+vF/weNpuNjIwMMjIyBGPakSNH\nmJubw2azUVlZia+vLxqNht27dwOwa9cuvLy8iIiI4De/+Q1zc3O0trYSFBTEc889h9FopL6+noiI\nCAICAhCLxSxfvpyJiQkSEhKoq6vj2WefJS4uDqlUKsTHd8EPon1wOBwsWrRIcDna7XZBTPTxxx+z\nZcsWSkpKuPPOO3nmmWcYHh7mww8/pK+vj4iICHx9fbHb7Xh4eAhik5qaGu69914OHTpEYWEhNTU1\n9PX14XA4mJmZ4e2338bhcFBYWEhQUJCgnJNIJFgsFhYuXMjdd9/NwMAAarWajz76SCCB5uWyycnJ\nuLi40N/fz8qVK7FYLLS1tfHtt9/S2dnJc889R15eHsPDw4KXIj09ncnJScrLy5mbm6O4uFjwS4yN\njTE2Nia0D9PT07S1tWEwGBgdHcXX1xeFQsGRI0f4t3/7N44fP45CoSA4OJg9e/ag1+vJzMzExcWF\nJUuWMD4+zpIlS2hubqa2tpa7776bqKgobty4ISSLrVu3EhcXx8mTJ1myZAkikQidTkdERAQxMTE0\nNzcTGxuLTqdDJBLh6emJwWAgISFB0EQEBwfz0Ucf0dbWxnPPPYfdbuftt9/m5MmTBAcHs3fvXh54\n4AH6+vpwc3PD09OT2dlZobU5evQo9fX1ADz88MPIZDJ6e3vx8vKipKREaJfmR8jPPPMMH330EdHR\n0YKsenx8HLFYzMaNGzl9+jRtbW3k5uYKo7jLly+j0WiYmpoiJCSEV199la1bt+Lr68vQ0BAbNmzg\nzTffpKSkhD179nDgwAGmp6dZunQpw8PDAKSmptLW1kZBQQGvvPIKcGuMPDQ0hFarxc/PT5hQvfzy\ny0RERFBXV4fVaiUjI4MPP/yQRx55BKfTSUNDA6dPn0YqlQoE57Zt2wQ58osvvkhhYSGenp7ExMTw\n4YcfMjg4SGZmJkuXLmXjxo2CvHlychJ/f38aGhoYHBxkYmKC5ORkUlJSePfddwkLC6O3t/c7x+MP\nolIwGo14enqSm5uL1WplbGyM0dFRoV2IioriwQcfZHJyknPnzlFVVcWqVaswm804nU5MJhOnT59m\nenqasLAwQkNDAXjttdfIzMzkk08+wWazMTQ0hE6nw9XVVSBvuru76e7upqysjNOnT9Pb24tEIqG8\nvJxPPvmE7u5uweY735bM6xD++Mc/0t/fj0KhoLu7G7vdjsViwdPTk3vuuUdwLY6OjpKWloavr68w\nDdBqtYIwZV4puWHDBgYHB/Hx8RHce/OS7DNnztDa2orRaKS1tZX4+Hg6OjqYm5ujoaEBq9WKwWBA\npVKhUqk4ePDg34mZbty4wZkzZ5iZmSE7O5uenh5EIhHDw8MCX3Px4kWysrJobGxkbm4OvV5PR0cH\nra2tiEQimpubMRqNzMzM4OLiglqt5ubNm0xOTvLyyy9zzz33kJSUREBAAI8//riQBN588008PT0F\nw5XD4eDw4cMkJSVx8uRJpqenWbZsGXDL/7Fnzx7MZjMWi4UNGzawb98+ZmdnWbx4MQDXr18nJydH\nGMXdf//97N69m4ceekhQ7vn4+AguR0CwvSuVSnQ6HRcvXuTixYs888wznD59mkceeYTp6Wkeeugh\nDh8+jFarxcPDg/7+flJTUzl8+DDvvPMO4+PjnDx5UpDSSyQStm7dKny+efv5V199hdlsRqPRsHr1\nakJDQ7l48SL9/f3ArepizZo1LFy4kOXLlzM0NMTNmzfJz88HYOPGjYLIbGBgQBhpm81mfHx8BB6p\nubkZp9OJTqfDx8cHuVzOtWvX0Ov1nDx58u8MY98VP4ik4O3tLQhUtm7dSmZmJjKZTChT56sBrVYL\ngNVq5euvv6a1tZUNGzawa9cuEhISuHbtGlevXhWksWq1msrKSlauXCmUaoGBgfz1r3+lt7cXHx8f\n7rvvPiwWC01NTQwMDKBSqfDy8mL58uVs2LBBkMpeu3aNbdu2AQj+/ZycHAICAsjKysJsNnPjxg1B\nyXfw4EGmpqZYvHgxGo2G3NxcMjIy2L9/P0eOHGFsbIxjx45hsVi47777AASx1ryteL4k1Wq1LFy4\nkPHxcVxcXAQfQ1tbG3Nzc5hMJnbv3s3SpUvx8PDA19eXvLw8oQ+emppiyZIl+Pr64uLiwscff4zT\n6cRgMODj4yNUUBqNRkjK5eXljIyM0NLSwsWLF9FoNIIsXKPRCG7DDRs2IJVKhcnN6Ogo4+PjjIyM\nkJSUREdHB9evXxdO6PlJUExMDEePHhVUrPPB7OrqytatW9Hr9Xh7e+Pn50dKSgpBQUHU19czMjLC\n008/TXFxMQaDgYKCAsRiMaWlpbz00ksolUra2tpwd3dn06ZNREREANDT00N7ezuBgYGsXr2ap556\nirq6OqKjo7HZbGzYsIHk5GRhbD1vQ25sbORPf/oTRUVFZGRksHHjRiQSCXv37gUgMTGRhoYGvL29\n6e7uZmpqiscee4ykpCSh3J+X0j/yyCPcvHmT06dPC6Kl4eFh9Ho9O3bswGAw0NraCsDNmzeFnQkG\ng4HU1FQCAgJITk6moaGBxsZGWlpaCAkJoba2lri4ODo6OhgcHGTz5s1CmzY4OIherxcOtO+CH8RI\n8oUXXng5Pz+f3t5eQkJCqKqqEpjpBQsWkJ2dzezsLENDQ/j4+Ag6/MzMTPr6+njnnXeYnZ3FZrOR\nkpLC+Pg4V65c4ZVXXhHccPMnhkql4rHHHkOpVBIeHs4dd9zBt99+yxNPPCFMJFpaWrh+/ToBAQGC\n0Wr79u387W9/49q1a2zfvp3a2lri4+M5d+4cOTk5hISEcPnyZX71q1/h7u4u7EZQKBTcf//9fPnl\nl1RXVwtVTmBgICtXrsTpdNLS0sKlS5dYt24ddXV1xMfHI5VKyc3Npbm5maeffhpfX18KCgqIiIhg\nenqauLg4fH19hVNhamqK5ORkofLx8vLi0qVL5Ofnc+bMGRQKBWq1muPHjzM4OMjSpUsZGhqio6OD\ntLQ01Go15eXlwr6GqKgoamtr8ff3x2AwsGrVKoaHh3E4HHR3dyORSDhz5gwGg0EQy0gkEsHyO/9g\nnjlzhuLiYqKjo+ns7KStrQ2FQoGPjw9KpRKVSkVYWBgajYaTJ08SHx+Pu7s7JpOJ8PBwiouLSU5O\nFkRXOp2Op59+mvj4eJqbm+nt7aW8vByVSoWbmxtpaWm0tbXh6urKo48+SmBgIOXl5WzcuBEPDw/a\n2to4ffo0cMssFBwcjJubGwcOHKCjowMfHx+WLFki7DBYtWoV3t7emEwmwa06NjZGdnY2e/fuJT4+\nHoPBgJ+fHzdv3mRmZkYwms3v3JgXhQUEBPDkk09isVhwOBx0dXUJkxe5XE5QUBB6vZ7r16/z05/+\nFLPZTGRkJDKZjODgYJYvX46/vz8ymYz29nbm5uZITEwU9jGMjY0J5LPVamVwcBCFQkFISAhubm5c\nuHDh/zviJY1Gw9/+9jeUSiUVFRWsWrWKe++9Fy8vL3p7e/niiy8Eb7vVamVgYICCggKsVisSiYRl\ny5YhlUqFG9bU1ATcsst2dnYyMzPD6OgoDQ0NjIyMcPjwYYaGhtDr9Zw+fZqysjLee+89Lly4gNls\npre3l61btwoVh9lsxmAwEBISAtw6dR577DFBN3/q1Ck+/fRTpFIp77zzDpmZmej1eqHXnV8+Mj09\nzd13341SqWTv3r2MjIwwPT1NZmam8D4LFixALBbj6+uLXC7nRz/6EUNDQ8JmIrlcTnJyMq6uroSG\nhuLj40N6ejp2u53R0VG6u7vRaDS4u7uTk5PDK6+8gtFoZGJiArlcjl6vZ8mSJfz5z3/mueeeo6io\niMjISPr7+4WkMjs7y7Vr16isrGR4eJiFCxdSWlpKc3OzYOm+evUqy5YtEyYV86Km9vZ2YmNjaW5u\nZmxsjMzMTBYsWIBGoxGEaa2trZjNZgICAli5ciVKpVI4IRcsWEBwcDAuLi6YTCbuvPNOZDIZw8PD\nmEwmtmzZwvT0NJWVlbS1tTEyMkJpaamgIjx37hz9/f1MTk6SmZmJv/+tRUMBAQGcPXsWT09P4uPj\nsdvt/OxnP8NisWAymVi7di1nz57FbrezYMECYmNj8fPz491338Xd3Z3z589TWFhIRUUF169fR6+/\ntVgsISGB5uZmoZKdnp4mKyuLyMhIHn30USYnJ4WE3NLSwvPPP091dTV1dXUMDAzQ29tLfHw8ExMT\nnD9/XrCQe3l5oVKpiIqKIjw8HLjl37h69Srj4+NIJBLkcjkjIyNYrVZ8fHzw9/cnLS2N4eFhvL29\nCQsLE5bUzEv0vwt+EElhamqKgYEBRkZGcDgcvPbaazQ3N2M2m0lOTsZkMnH58mVmZmYICgoS1qpV\nVFQIa9lycnIENnb+BmzevJl/+qd/YnJykkuXLgkjt/j4eMLCwoiLi6O/v5+YmBg2b94sbFnauHEj\n169fFzzw82vbBgYGgFvtw/PPPy9UH/O21tHRUY4ePcr58+eFdW2zs7MATE9PY7PZuHLlClNTU4SH\nh/Pmm28yPT0t7IeYP0Hc3NywWq18+OGHnD17Vlgtd+LECUpLS5mamsLDw4OTJ0/S1dWFwWAQFrEs\nXLhQ0G3YbDZSU1N54IEHUKvVXL58mfDwcPR6PQUFBdTU1FBWVkZnZyd1dXVCOR0bG4tGo2Hjxo2E\nhYVx5coVlEolS5cuFRZ2bN68mb6+PmF5SnNzM0FBQTz44IM4nU42bNggWM4tFgt/+ctfmJqaEuTb\nJpOJmZkZDh48iMlkEvYTFBYWCq5YhULBZ599Rnx8PD09PYyMjHD+/Hnsdrug+1CpVCQnJ9PX1yes\npktJSSEtLY1f/epXdHV1AQg7CHJycli2bBmpqamUlZXx8ccfU1JSIrR4MTExeHp6otPpWL9+Pbt3\n70YikeDv709lZSVeXl4888wzwrPg4eEh2MT9/f2RSqUMDAwQHx+Pt7c327dvp7GxEa1Wy82bN4mP\nj2d6eprQ0FCio6NZsGAB586dY82aNaSlpQmmO5FIhEKh4E9/+hNvv/224LsIDw9HoVDg7e3Nm2++\nidPppLu7m0OHDtHT04NcLkcsFnPy5EmMRiOrV6/m2rVrwo6R74IfRFKYN8w4HA76+voIDg6mqqoK\nd3d33nnnHQICAoiMjCQrK4vq6moSEhKw2+0sWbKE7u5ulixZIngEHA4Hhw4dAuDkyZM0NjZSXl7O\nunXrKCsrIyIigpmZGXJzc4Vx0ebNm6mqqqKiooI1a9YIVcX8CHDp0qWcOHFCcBXOS12bmpqELyI/\nP19oSb788ktBZJWUlERRURE6nY7ExES2bNnCyMiIUKK3tbUJp5larSY7Oxu4VTUsXLiQkZERIiIi\n0Ov1SKVSrl+/Lkh2AwICiImJYWZmhs2bN7Ns2TIqKipoaGgQSMnR0VHeeOMNpqamBLVne3s7FouF\n8vJyMjIykEgkhIeH4+/vT1ZWFnV1dbi6ujIwMIDdbhfWnZlMJua3ZCkUCu644w6mpqZoaWnBZrNR\nVVXFxMQEFouFffv2odPphKS9bNkyJBKJQD6aTCYWL16MSqXCx8eHU6dOAbdGklVVVXh7ezM7Oysk\nrzNnzpCXl4fT6aS/vx+tVkt0dLSwYam9vR273U50dDRmsxmADz/8UHCglpSUcPXqVf7yl7+g0+n4\n+OOPOXnyJFu2bMHb25vKyko2b96M0Wjk/PnzlJeX09/fj9FoJCAggLvuuouZmRliY2OZmJgQKpv5\ntW/19fVIpVLa29tRKBS0tLQIFvK5uTkUCgWPPPIIEomELVu2CFOCN998E4D3339fcN0C6PV6xsfH\nycvLY/Pmzfj7+/PNN99QUVHBN998w8jICL/+9a9RKpXClMfpdKJSqQT+YJ5fSUxMFMxe3wXfy4r3\n/1uIRKL//Q9xG7fx/39UOZ3Ohf/dRT8InUJgYCCff/45165dE3pJkUhEV1cXq1evRqfTUV9fT0ND\nAy+++CI+Pj588sknhIWFkZmZSX9/P4GBgczMzHDt2jXsdjtvvfUWBw4c4He/+x1vvfUWLi4u1NbW\n0tTUxPDwMFKplLS0NDw8PIiPj+eDDz4QZLIJCQmYTCaam5uFTbhJSUm88cYbHDx4kEuXLvGnP/2J\nmZkZtm/fTnl5OcnJyXh6egqOzN27d/PTn/6UwcFBQRYNtyYna9eupbS0FE9PT3x8fLh8+TL79u3j\nzTffRCaTMT4+TmtrK2vWrEGv1wva9bCwMLRaLaWlpaSkpAC3tA21tbUUFhYKBOLx48cRiUR0dnaS\nn5/PunXruHDhgrBNOioqiscff5zf/OY33Lhxg66uLgICAgRic2BggB//+Md4enpy48YNQRG5d+9e\n3njjDUZGRigrK8Pf35/c3Fxu3ryJVqulu7sbs9lMRUUFnp6eLFy4EKvVytKlS6mqqmJwcBCn04la\nrRYW6Go0Grq6uoiPj+fZZ59lz549BAUF0dHRQW1tLdnZ2UgkEp5++mmee+45TCYT169fZ+fOnbS1\ntaFWq9m4caMgb3Y6nVRWVhIbGytwBO+++y7//M//zNq1axkeHqa0tJSHH36Yo0ePkpWVRWVlJZcu\nXWLXrl2EhoZSXV2NVCoV+Kp5s9a8DkSpVArS+VOnTnHz5k2Ki4vx9/cnLy8PpVKJ1WpFq9Wyd+9e\nMjIyuHHjBps2bWJsbIwLFy7w2muvUVlZKSzRtdlsXLp0iYz/w957Bkd5pevaVyuHVs4555ZaKCOB\nMlFEgwljPDADNmMP9mAcx2NmxgFvYxsbB7AxmQHbZIPJQiiigCRQQIFWzkI5Z6m/H0yvOlN1am9/\n33ynyrvOXn+UQOrq7ne9z3qe+77uoCA+++wzTpw4wfDwMF988QUffPABCoUCIyMjdHR0qKqqQiaT\nCYFaQUEBJSUlbNy4ES8vLz7++GN27dolJjSqSvPDDz/8Rdfjr+L40NfXx/79+9HS0kIikSCRSBgZ\nGeG3v/0t3t7ezJo1iyVLlnD+/HmhEly4cCHx8fGcOnWKwcFBGhsbsbGx4Y033mDu3LnAEyn0pk2b\nSE1Npby8nNbWVnx8fPj9739PREQEIyMjAsJSX1+PjY0Nzs7O3Lp1i/7+fhoaGujr60NfX59z584J\nDsKNGzewsrJi/fr1Ahbq7e0t9BElJSX86U9/QqlUYmVlRWtrK/b29kilUszMzHBzc2NkZET44hcs\nWAA8OZaopLJ//vOfKS0tZXR0VDQaVYaqgoIC1NTUOHfuHA8fPsTb2xuZTEZcXBydnZ34+PgQFxeH\nh4cHcXFxfPnll7i5uTFnzhyGh4cpLi7m9ddfJycnB1dXV+G/LygowNjYmMDAQAoLC8XxYWxsjO7u\nbiFAunbtGrdv38bU1JS9e/cyMTHBjRs3mDdvHp2dnSxcuBA7Ozv6+vpwdXXl5MmTPH78mKeffhql\nUin4Cyo/ikoODTBnzhw0NTVZunQpW7Zs4erVq2zYsIEFCxbw9ttvo6Ojw9NPP83Q0BBr165lYmKC\nK1euUFdXR21tLXV1df8iAlMdzTw8PAgLC+Pw4cMsWrRI+DxUvEovLy/U1NTo6enB1tZWEKQuXbok\nPBgzMzM0NjZy+PBhIXP+7rvv+PHHH3nuueeYPXs2Fy9eJDMzE6lUyjvvvENoaCh1dXVERkYSFxfH\nxMQEu3bt4sqVKzx+/BgPDw+KioqEk9Tf3x94ot2pqKjg73//OxMTE2LqlJGRQWRkpKCQ79q1C7lc\nzrZt28jNzUVHR4f333+fnp4eurq6GB0dFU7hX7p+FZuCmZkZK1euZHp6Gg8PD0xNTZk3bx4zMzMU\nFBRgYGCAi4sLFy5cwNDQUHD9JBIJK1euxNfXFycnJ5GNkJeXBzyxZLe0tDAwMICnp6dgEdy4cQNj\nY2NaW1tpaGigt7eX8PBwOjs7BTfh3LlzYtZcWlrK2NiYMEStWrWK8PBwpqamCA4OxszMjMzMTK5e\nvSoIRpqamnR3dwvSUXNzM87OzvT19fHSSy/h6ekp1IyqZpinpycLFiyguLiY/fv3o6urS2xsLLq6\nuri4uIg5+l//+ldKSkqIjIxkamqKpqYmioqKuHz5Ml5eXjQ0NJCRkUFCQgKXL18WAq7S0lLKysrw\n9/enqakJY2Njbt++jVQqFY8xJSWF2tpa7OzsmJ6exsTEhLa2NmJiYpDL5Vy4cAFfX1+BvDc2Nqaz\ns5OOjg7a2tooLy8XkmEV1i4yMpLnnnuOK1euCDZAU1MTXl5eGBkZ4efnJwxlJiYmuLq6UlhYyJEj\nR/jrX/9KbW0tr7zyCvv27cPFxYWlS5fS1dXFvXv3iI2NZdWqVVhYWGBgYEBBQQHOzs4UFxdTVFTE\n9PQ0ALm5uXz33XesXr1aAIIBYmJiGBwcZOvWrdTX12NqaoqxsTFr1qzBwcFB4PgjIyMJCQnB19eX\n2NhYoZSUyWQcOnSI8+fPMzo6SnZ2NqamplRWVpKYmMjU1BQmJiaEhYXx6aefCkGUj48PO3bsYHBw\nkE8//RQfHx+Ki4s5dOgQgBCMGRgYkJ2dzdjYGI8ePaK/v59bt27R1dWFvb09q1evprCwkNraWiwt\nLTl8+DBlZWVUVFQIeE9eXp7Q2PyS9avQKRw8ePDvqlFXQECAkOLeuHGDhIQEkpOTiYuLE+Qf1RtP\nJRZR+ehVYRxOTk6cPn0aJycnNDQ0cHV15fTp06xZs4a0tDQ8PDxQV1dnxYoVWFhYUFtbi1KpFGPO\n+vp69PX1MTU1RV1dnQULFqClpYWhoSE///wzy5cvZ2ZmRtB8CgsLBT5L5Y1XYc/37duHg4MDcrmc\nlJQUzM3NUSqVaGtrI5fLgSckpx9++IHe3l6MjY2RSCQ4OjpiaWkpFHCqkV9ISAhlZWWsWbOGrKws\n/Pz8hHbfysqKlpYWYbM+dOgQkZGRwuyk8h5cu3aNuLg4RkdHSUxMRCKRcPLkScFTcHJyore3l0WL\nFqGrq8vs2bOpr6/nzp07wsLb2dmJiYkJnZ2daGpqolAoGBkZQalUCgFWe3s7MpmM48ePMzo6ysjI\niBAkTU5Ocv78eXx9fbGysqKzs5OcnByef/55Kioq2LZtGx4eHhgYGFBUVISbmxuNjY10dXVRWVnJ\nnTt3+NOf/oSBgQGPHj0iPT2d6upqNm3axNmzZ5kzZw6bN2+mv7+fK1eusHbt2n8JW7GxsREbd1tb\nG5VfZ2cAACAASURBVIWFhUIIdffuXaRSKdbW1vT19ZGSkoKrqyuGhoakp6cLte1PP/2EXC4XQT5m\nZma4u7tTUFCAkZERjY2NjI+PY2Njg7+/P998841gdJqZmXHkyBExmWlra0Mmk+Hq6kpmZqagPD9+\n/BhnZ2fOnDmDXC5nwYIFZGRk4OPjg46ODjKZjFOnTgGI/BNbW1sqKyvR0dERzM+0tDTy8/P/++gU\npqenmTt3rgBs7N+/n7i4OGJiYgTlNysrC4lEws2bN1mxYoXo8KooQXfu3KG4uJiGhgYBLbG0tMTA\nwECwDR8+fEhsbCxhYWGUl5dTWlrKpUuXkEgkTE9P89577wmakKWlpQhuGRkZoa+vT+RKqO6KHh4e\n1NXVERAQIO7Wjo6OPP/88/zmN78hLCyMd955h2XLlpGXl8fKlSsJDAxk3bp1uLi40N/fj6mpqehk\n29vbi/FsdHQ0NjY24kiiOhuq7nLvvPMOzz//PE1NTdjb29Pf3y+ox1euXEFLS4tFixaJ0JesrCzB\njtyxYwfOzs7Mnz+fxsZGRkZGWLt2LZ988gkffvghQ0NDdHd3s3HjRpqbmxkZGaGuro7ly5fj6upK\ncHAwfn5+LFy4UKDhvvnmG9TU1Fi+fDkJCQk0NjbyyiuvIJVKOXDgABYWFiQnJ9PZ2cnx48cZGxtj\n7dq1ZGVlcfz4cWEwMjExoaqqih9++IHc3FzS09NpbW2lpKQEMzMzzMzMxNEtKyuLM2fOMDo6yvHj\nx1m6dCmFhYVERkYK0E1Dw5P8k61bt6Kvry+EcKdPnxb0qqioKHbs2EF7ezt37twR6kBfX1+SkpJQ\nKBTs378fhUIhXtfJyUngCZC1u7ub9PR0bGxsqKysJDo6mpycHCwsLHBzc8PKyoqpqSlefPFFAgMD\nmZiYoLOzk+eee46RkRH+8Ic/EB4ejq+vrzBwrVq1ip6eHrS1tQXgJzs7m9raWgwNDUWKma2tLV98\n8YWwtHd3d3P06FGCg4NxcXFh1qxZ1NbWCkDQL1m/ikrhu++++7uXlxfj4+MEBwfz+PFjcX6fnp5G\nLpcL0MX4+Di1tbVoaWlhb28PIM5v6enpgrv39ddfY2dnR3l5OeHh4fT39wu7c0dHB87Ozjx69Ego\nxrS1tXF3d2d0dBQzMzM6OjpEhZCamkpERAQNDQ3cunWLqKgoenp6iIiIwNTUFKVSKR5PVlYWsbGx\nFBQUMDo6iq+vL1VVVQIqonJbqlgL2traZGRkUFZWRnh4OC4uLtjZ2aFUKnF2dhaOOhU27OrVq/j7\n+5OQkIC7uzsNDQ3k5eURHx9PUlISenp6XLp0id7eXoaHhxkfH8ff3x9/f3+CgoLQ1NTE2dmZK1eu\nsHjxYu7fv4+ZmRkbNmwQku6JiQlCQkLo7+8XQSxGRkYi/8Ha2lroAnR0dETsXnR0NHl5eaipqTEy\nMkJmZiZtbW0cPXpUaDkmJydxcXHhzp07wBP0ekREBGVlZSQnJ+Ps7MyCBQuoq6tj1qxZwmPh4OBA\nf38/s2bNwt/fHwsLCywsLHBycuL27dvs2LGDqqoqIiMjkUqlXLx4UViTL126xJo1a0RlZ2xsLGze\nTU1NWFlZCbDp8PAwGhoaxMTE0NbWJgA2Y2Nj/P73v0ehUCCVSrGysuLQoUMie2J6epq6ujpiY2PR\n09MjMjKS8+fPC6mzr68v6urqeHt7Y2lpSUFBgXgfq8KONDU1haLRxMSEN954g5aWFqampujs7EQu\nlwsiWV9fHyYmJpSXl+Ph4YFCoaC7u5vi4mKcnJyEUU8VtXjixAkUCsV/n0qhq6sLLy8vkfw0OTnJ\n9evXMTc3p7+/H3t7ezZu3MjLL78sZvL19fU4Ozuzd+9esrOzOX/+vOgZZGZmAk9K7gULFpCTk0NA\nQAAdHR20tLTw8OFDbt++jbq6OiEhIZSXlwvnnqosrqur4/HjxwKSsnbtWoqLiwFE99nCwgJjY2PU\n1NRob2+nqKhIkIUSExMpKyujurpapAXdunWLw4cP09vbS3JyMnZ2duJsDU9UbKqkq7KyMnJzc8nP\nz8fZ2Znh4WFBgW5ra8PKyoqzZ8/i4eFBeHg4HR0dKBQK0tPThU1cQ0OD3/72t5SUlAiwycGDBzl4\n8CBFRUV8//33IqRGBYZta2vjwYMHFBcXExcXh1wuR01NDX19fbq6usjMzOT69esCQjM6OkpJSQm3\nbt2itLSUoqIivv32W0ZGRnB3d+fll19m69at9PT04Ovry8DAABoaGoSHh6Ojo0NNTQ1dXV20tDwJ\nEPPy8hLcw4GBASF9njVrFoGBgSJ3ITQ0FHV1dUZGRoiLi2N6ehoXFxekUinffvstf/vb3xgaGsLU\n1BR4IgyzsrKisrKSDz/8kO7ubs6dO4ezs7NoFHd1dTE+Pk5DQwP6+vqoq6tja2vLihUr2LJli5j1\nX7x4EXV1dQA2btwoRGoqSKqK3vXjjz8K0vbGjRsFgEVXV5fAwEDmzp1LfHw8MzMz6Onp4ebmJjQW\n5ubmXL58mfj4eFxdXfHy8hIA1+zsbFxcXKivr8fAwID6+nphbV+3bp2ISZienhYCMtW06pesX8Wm\noKLZenl5ERwcTG5urrCrmpmZ8dVXX6Gvr09UVBStra1cvHgRTU1NJBIJK1asQFtbGwMDA9avX4+V\nlRVDQ0MAeHt7o6enh7OzMw0NDSxatIiFCxdiZWWFt7c3SUlJSCQS1q9fL8wkSUlJKJVKXnzxRfLz\n8ykuLmZ8fJxvvvlGCGEsLS2ZmpoiJycHQ0NDIdJRiUVu375NUlISmzZtEqKs4OBgdu7cydjYmADQ\nNjY2Mjg4KJxxenp6DA4O0tbWxpUrV2hubiYtLY3R0VF+97vfYW5uTnBwsJDM2tnZYW9vj5OTE5cv\nX+bll1/m4cOHomHm5OQkOvuurq4kJSUJgc6NGzeoqakhPj6eqakphoeHsbW1JT4+Hnt7e0xMTGhs\nbKSvr4+SkhJ6enqENFnFOHjw4AEODg7CRJaRkUFYWBgODg6sXr2a559/XlR6b7/9NmvWrCEwMFD0\nbdavX4+1tTWzZ88WiruYmBiuXLki0O12dnaYmprywgsvMDExQX5+PmVlZRw6dEho+qOioqiqqsLH\nx4fx8XH+4z/+g1u3brFhwwaOHDkCPCEv3b9/XxwJoqOjOXHiBMuXL2d0dJQDBw6QkpKCs7Mzr7zy\nihiD29vbExUVRVdXF2lpaVy4cIE5c+aITezzzz9nYGBA8BCMjY1pb2/n7t277N+/nyVLluDm5sa7\n775LeHg4OTk5XL58mZiYGCQSCVlZWWRkZHDnzh2ysrLE8/Diiy8SHh4uLPaGhob4+vri4uKCoaGh\nkK4PDg4KYZ21tTUdHR0sWbKEnp4eVq1ahUQiwcLC4v9Vo/FXsSloa2vT19dHYGAgUqmUoKAgfH19\n+fnnnwkICKCvr48jR44wODiInZ0d69evp6enhzNnziCTyVi7di36+voMDg6SlZUl7g6enp5cvHiR\nBw8e0Nvbi4eHh4hBi4yM5OzZs/T29opxU0JCgjBljY2NsWzZMkxMTJBIJFy7dg1dXV3giTrO3d0d\nf39/BgcHkUgkZGZmCsWbQqEgMjKSvr4+0SdYsWIFP/74o0hhbm1tRalUUl9fT21tLfDk7lBbW4tC\noRA04YiICDw9PQU6PigoCAMDAxobG4UjVDVBkMvlqKurI5VKaW9vp6uri3379rFjxw7U1dU5dOgQ\nxsbGzMzMsGLFCiGJLSoqEs1DDQ0NNmzYQGxsLG1tbQQFBQnIaUBAANPT01hYWAgEf3FxMYODg5ib\nm+Pg4EBrayuzZs3C0tISLS0t9PX1MTMzY2xsjNOnT6OhoYGnpyf3798nIyOD3t5eQY2GJypETU1N\n3N3dkcvlgqT1yiuv0NnZKbwEFhYWovF56dIldHV1efz4MRKJREywDhw4wG9/+1sA7t+/z86dOwWV\nOjs7mzNnzjA+Ps7169eFjyQwMJCioiLhV5DL5Vy8eJGmpiYBGNbS0hL8h/Xr1+Ph4cH4+Dh5eXlC\n2aqjo0N1dbUIC66pqREVaVxcHFZWVoIZERkZyY4dO3j22Wf58ccfgSfS/1OnTjE8PExra6uYWri6\nutLb28u1a9fEserVV1/FysoKQLBHPvnkE+7cucPMzAzl5eUcOHDgF1+Pv4qewieffPJ3W1tb4YD0\n8PDA3Nwcc3NzDA0NKSgowNDQUATBqjrQqgs5IiKCoqIixsbGiIiIICgoiM8//xw/Pz+MjIxoa2sT\nTZ2Ojg4MDAy4d+8ejY2NIjbN2dmZuXPn4ubmxt69e5kzZw55eXn/4mXPysqiqKiIb775hszMTFJT\nUxkaGiI4OJi8vDwRdafiCVpbW1NbW0t3dzdeXl4iX0LlATAzM8PZ2VlwIp555hnS0tKIiooSnEFD\nQ0OKi4tRU1Nj1qxZ5OXlCcqQytDl4eGBo6OjKDNnZmZwd3cXLkCVcUYulyOVSmlubkZdXZ3t27ej\npaXF48ePcXV15cyZM7z33nuiZ6CmpoZCocDU1BQXFxcxjent7aW7uxs1NTUGBwfJyckhIiJCjIoX\nLlzI1NQUFRUV5OTkUFhYSGdnJ9nZ2dy+fRtjY2MSEhLQ09NjZGSEpqYm6urquH//Ptu2bUMul5Oe\nns7Zs2eFNfvhw4doaGgwPj6OtbU1Dx48oLy8/F+i5ysqKoiPj+fu3btoamqKyc3Vq1cFi1HVz6io\nqOCFF16gqqoKBwcHmpubiYmJEVQrldZAS0uL8vJyNmzYwIEDB1izZg03btxAV1eX5ORkLCwskEgk\nSKVSrl27RlVVFUuXLqW4uBg/Pz96enrIzs4mJSUFIyMjId8fHBzEw8ODkydPYmpqyvDwMI2NjWho\naFBWVsbixYuRyWRiIjUxMUF8fDxXrlyhoaFBBOGovCmDg4P09fVx/vx54IkuQ7UZqUKDrl69+t+n\npzA+Pi5GT/Ak2eny5csMDw/zwQcfiEZPeHg4CoWCjz/+GKVSSWxsLIODg9TV1VFfX09XVxf5+fmc\nPHkSeFLm29jYYG9vz9mzZxkcHMTIyIiRkREsLCyE4Mfb21uImVQYrDt37jA8PMySJUvo6Ohgz549\nhIQ8UYgqFArGxsaIj4/H2dlZcA8CAgI4ceIE+vr6jIyMcOTIEbZt2yYgpVpaWhgbG3P58mVcXV25\ndesW+vr6osRNSUkRQbCZmZlcvnwZpVIpEN+1tbUYGBigpqYm8hZmz55NdHQ0W7Zswdvbmx9//JGS\nkhLS09PFrL6vr08g4dra2khKSmLevHncu3cPfX191qxZQ2lpKSEhIezatYuioiKioqLw9vZmfHxc\ncB7Onj0rAmP8/f3p7u4WSVE3btzA3Nyc6elpTp06RVZWFtXV1WhqahIXF0d+fj5z584lMTERJycn\nLCwscHR0xMbGhoqKCtE0tre3Z3R0lJmZGZycnNDS0sLOzk78fX9/f4qLi6mqqiIoKEjg81WBtU1N\nTURFRVFeXk5CQoKowpydnXF2dha5nJOTk6SlpZGYmIimpibDw8MEBgZy584djh07hqurKyEhIWRn\nZ2NsbEx/fz+bNm1iYmICb29v4S9Q4dAMDQ05duwY9vb2tLe3Y2VlhZGRERcvXqStrY25c+cSERHB\n66+/TkxMDKWlpVRWVgp/TWVlJUZGRiQkJIhr4tSpU8ybNw8bGxusrKyYnJxEqVTy7LPPIpPJuHTp\nEosXLyY8PFyI0pYtW4a7uztjY2PcvXtXaH2uXr36i6/HX8WmIJFISEpKwsbGBnV1dVEGX79+HalU\nyksvvURQUBCurq58/fXXzJo1S5S/UqlUoL1u3ryJUqlk8eLFwJNEYG1tbSYnJwVrT9VBl0qlzJ8/\nn6KiIgoKCgTb0cPDg97eXqamplBXV6eyspKamhqMjIwECGRoaEgwGjo7O0lPTxewDD8/PxE2EhUV\nxcKFC5k1axbff/89X3zxBSYmJuLC2LdvH/b29mzZsgVAnBXz8vKYnp4mJCRESL5TU1Pp7u4WXIZ7\n9+4RHByMoaEh1dXV3L17l5MnT+Lj4yP0BpWVlcKo5OLiQmVlJXl5eQQFBREQEICmpiaDg4NUV1fT\n19cnxp6GhoY0NzeTkJCApqYmHh4eAOIN//7775OWloaOjo6YNoSFhQn7tlwuJz4+HmNjY9zd3Tly\n5AiTk5MiTXtwcJDp6WkePXrEvXv36O3tFdmg09PT3Lhxg/b2dt5++22hKVEJxVTru+++Y2pqivPn\nz3Pr1i2h0zA2NhbqwOnpaWFic3R0JDg4mNTUVPT19Xn33XcJCQkRz/NTTz1FXl4e2trazJkzB1dX\nV2pqatDQ0OD+/ftcu3ZNsBOcnJwEOHZoaEgocYuLixkZGeHUqVMcO3ZMgHh27txJWFiY6HGpQpFL\nS0uZN2+ekFUrlUpSUlIA6OzsxMLCgt27d5OZmSni+oqKiti7dy+PHj3CxMSE5ORkrl+/TkFBAZs3\nb6a2thZNTU3BqwB46qmnRNDuL1m/ik1BT0+Pnp4ebGxsmJycpLa2FisrKzQ1Nbl48SLnzp3j6aef\nJiUlhf379/P666+Lc3Z/f7+wMK9atYrR0VHOnTsHIJBZKra/KuFZVcJ1dnZSXl4uVIkmJiakpaWx\natUqNDU16erqEmExgGgCDQ0NCW1FbW0tJiYm+Pv7i6mAKvbu559/FvFjMpmMiIgI0tLSWLt2Ldeu\nXaOwsJD8/HwBhP3DH/7A/PnzcXR0ZM2aNQQEBCCTyXBzcxPYuY6ODtLS0nBwcCA4OBg7OzsaGxup\nqalh1qxZFBcXY2tri56eHtbW1iI0xdXVlYmJCfz8/MjIyODGjRtUVVXR3d3N119/TUJCAhYWFly/\nfh07OztsbGzE1EJPTw8bGxuBkF++fDk+Pj64uLiwbNkykafp6elJXV0dhYWFpKSksHDhQrq7u3F1\ndcXNzY3BwUHMzMzEsUeVzu3o6Mjbb78NPFHfTU1NoampKZLBq6urCQkJYenSpezbtw9vb2+Kiop4\n9tlnqampYXx8nPnz55OTk8PMzAzT09MkJyeLlHGAK1eucOvWLRHcqgLAvP/++2JyY29vT0BAAN7e\n3nz00UcoFApiYmLYsmUL7u7u9PT0UFNTw/DwsKAjFxQUEBsby48//khNTQ2rVq1CXV2duLg4jh07\nRnx8PG1tbdja2orcDnd3d/r7+2ltbeXatWsEBQUxMzMjUADwhNmxefNmpFIpixcvpqysDGdnZzZt\n2sTy5cvp7OxEKpXi6uoqgnPHx8dZv349zc3N/O1vf2N6epqenh5MTU2FavaXrH/LECWRSOqBQWAa\nmFIqlSESicQUOA04A/XAGqVS2fuf/Z6BgQH+8Y9/YG5uTkxMDPb29jg6OjIwMMDdu3cxMzPjyy+/\n5JlnnhGUIpUpR6FQiLP8999/z4EDB4Qu3cvLi2vXrqGhoYGzszNBQUFkZmaKkrGsrAxjY2O8vLz+\nJaEoIyOD4OBgamtrOXPmDJs3bxaafICgoCD6+vpITk5mzZo1TE5O4uDgQFpaGubm5gwMDKCpqUlM\nTIx4U3Z2dooIsKysLGGRVeHSAfbt2yc2DlNTU4E36+/vFwlNKkWjypehIkDZ2dlRWlqKq6srMzMz\nODs7i8Aca2trzp8/T0VFBQsWLKCtrY2AgACSkpJYv349qampWFtbo6GhgYaGhki0zsrKIjU1FXhC\nyd65cyc6OjrcvHkTPT09ocyMjY1lcnISdXV10dS8fv06y5YtY3x8nG3btjE2NsbExIQwXNnZ2fHV\nV1+xY8cO/Pz8KCkpAaChoYE//OEPtLe3c+nSJeLi4rh79y4ymQxvb2/s7OyYPXs2ZWVlHDt2jGXL\nluHr60tHR4dIBzt27Biampr/8jwvWrSIlpYW5HI5u3fv5u2332blypVUV1dz69YtjIyMOHLkCNHR\n0WzdupWbN2+Kyk8VF+/p6Ulrays6OjokJiayc+dOBgcHGR0dFbCX06dPc/jwYbKysvD19aWrq4vj\nx48jk8mYmprixIkTxMXFcfXqVd59910uXLggeBI3btzgrbfe4siRI0xNTQnb+tjYGKmpqYJAVVdX\nR0xMDOnp6aSlpaGlpcVzzz2HQqHAzMyMtWvX8sYbbxAVFUVnZyenTp3ijTfeYMWKFb/ouv7/o1KI\nUyqVgf+LJfMtIEWpVHoAKf/8+j9dZmZmREREiHm+jY0N/f39tLS0UFlZSXZ2NuHh4YJXODg4iFQq\n5f333yc2NpZHjx4xMTGBr6+v0AYA1NTUoKmpydTUFOnp6Rw7dkwQfj/++GP8/PyE8EMV29Xf3090\ndLTIc1RXV+fVV1/l4MGDgrRbWlpKQEAAc+fORalUMjg4iFKpRC6Xixm3k5MT+vr6ZGRk4OfnR3h4\nOO7u7ri5ueHp6SlgJRoaGri5uT15Mf4p+vHy8iIpKUkEyHp4eCCXy/H398fc3FycV0tLS0lISKCq\nqoqsrCySkpJE5mJZWRl5eXncvHmTc+fOYWxsLJiTjx49QkdHh7feeovq6mq2bNnC8PAwUqmUiIgI\nsrKymD17NhMTE9TU1AixzkcffUR5eTmbN2/mmWeeITExkY6ODsrKyvDz82PRokX09/ejr69PfHw8\nfn5+zJ8/n9HRUTIzM0UzVKlUkpaWhr29vZAvq3T6np6eNDY20tjYSHt7uxCO6ejoiGbslStXCAwM\nZOXKlcL8durUKZydnfnpp5+QSCRMTU2xcuVKBgcHAQTduaioCD8/P27evMm9e/dwd3eno6OD1NRU\nITp76qmnUCgU1NfX8/LLL2NtbY1MJsPc3BwvLy+6urpEwGx2djZRUVEYGxujq6vLunXrOHz4sJiS\nFRYWIpfLmZycxNXVlaGhId555x16e3u5c+cOERERuLi4EBkZKSTOAO7u7mRkZHD+/HlSUlIYHBwU\nKlhtbW1GRkbYtm0bWVlZmJubCziLqoeg4oa4uLggk8nE8/tL1v+J48Ny4Pg/Pz8O/Jfb09jYGDdu\n3MDAwICJiQmys7PJzc3FxsaGlpYW9u3bR05ODtra2qirqwsmoUQiwc7OjhdeeIGRkRE8PT3Jzc0V\ngRrd3d0ikVhXV1dASjIzM/H29iY3N5ehoSHc3d158OABVVVVLFu2jKtXr9LQ0MDKlStJSkrigw8+\nIDo6WoBjc3JyuH79Ojk5OUJP//XXX6Orq4ujoyMpKSk0NTVRWlrK3Llz+fTTT4Uj7u7du+Tl5REY\nGEhzc7PQ7cMTia+amtq/lIo6OjqcP3+eY8eOMTU1JUaWbm5uPPPMMxgZGRETE0NXVxdXr14lOjqa\nc+fOMWfOHJH6HBgYyNTUFKdPn6a8vJyenh6kUine3t68/vrruLq6Mjo6KtSivr6+HDx4kNLSUvbv\n3y9m9WNjY7S2torXqKqqCh0dHd59911MTU359ttvCQwMRENDA0tLS2HRDgoKwtraWvxtCwsLli9f\nTlJSEnv37kVDQ0M0GkdHRzl27Bj19fU0NDRgZ2cnksBUjsfly5fj7OwsAnTLy8vR09MTXou33nqL\n0NBQQfcGxCh4+fLlmJqasm7dOjw9PampqSEpKQkfHx9aWlooLCwUpOwffviBoKAgPvnkExETl5WV\nxZ07d0Se47vvvsvq1asZGxujsLCQ+vp6nnnmGcrKyggMDGTr1q3IZDIGBwcZGhqivr6ehIQE4uPj\nsbS0pKKigsrKSu7evUtYWJiYHixZsgS5XE5UVBT29vasWLGCM2fOCACMSqeTmJjIoUOHuH37NtnZ\n2UxNTeHr68u5c+eEw1fVqP6l69/dFJTAbYlEUiiRSJ7/5/eslEpl2z8/bwes/nf/USKRPC+RSAok\nEkmBSoqrUCiYmJggMDCQ4OBggdz+8ssvWb58OUeOHCEoKIiBgQEaGxs5evQoZWVlouytqanhN7/5\njQhera2tFeaQ9evXM3fuXD777DN0dHREo1JbW5uGhgZBw8nLy2Px4sWCGHTr1i0hUS4vLweeRIWp\ncgC1tbVRKpWEhYWxfft2MjIy0NDQEFLVyclJgoODefToEWpqajz77LNoa2vT3d2Njo4Ofn5+IipM\nNTJVHSFUc++tW7cSEhJCa2srra2txMTE4OfnR15eHufOnWP//v0MDAzQ399PVVUVcrmc8fFxjIyM\niI+PZ3BwUCRY9/f38+yzz3L69GksLS3FHfn+/fvk5+dTU1PD3Llzqa+vx87Ojo6ODrKzszE0NCQw\nMBBvb2/hD1Fh0QoKCti3bx+2trZUV1dz8eJFcnNzWbhwIQ0NDXz55ZdcvHgRExMTzMzM2L59O+vW\nrWN4eJj4+HhcXFzERebr6ysApY6Ojuzfv5/PPvuMnTt3UlxczM2bN8VjunnzJsPDw0RFReHj40Nz\nczMWFhasW7eOpqYmFAoFlpaWACxbtozh4WHRrzI0NOTChQskJCRgbW1NXV0dlpaW/PGPf6S4uJj6\n+no2bNhAUlIStra2tLa20tXVxTvvvENERASzZ88G4PHjx4SGhhIfHy9s8rdv3+bEiRO8+eabPHr0\niF27dmFubo6HhwcvvvgiUVFR+Pr6UlNTg4GBAbm5uVRUVIjJDsDXX3+Nu7s79fX1eHt7c/LkSUJC\nQoiKisLCwoL+/n6MjIy4d+8ecXFxvPfee7i6utLS0sKZM2eIjo5mcnISHR0dpFKpmHD9kvXvbgpz\nlEplILAI+KNEIon+X3+ofIJ1+t9SlZRK5XdKpTLkn30I8vLycHBwwMbGRqC2PD09SUtL44cffmBy\ncpKZmRl2796Nnp4eLS0tbNq0ScSg1dfXk5iYyPfffy9AlwkJCZSXl2NgYEBZWRkymYyNGzeio6ND\nW1sbDg4O+Pj40NTUxOjoKKWlpULuPD09LbIRVReJKrLOx8cHe3t78vPzKSgooLS0lIqKChEzp2IU\njo2NUVBQgIaGBuvXr2doaIjJyUn6+vpoaGjAycmJ1tZWxsfHgSemp4CAAKampmhpaSEyMpL2mMYD\nlQAAIABJREFU9nays7OpqakRYTn19fWUlpaiVCqF49DBwQFfX1+GhoZEdmVkZCSpqaki8l5lDd+1\na5dgX/r5+SGRSEQ6cWFhIadOnWL58uW4u7tjb2/P66+/TklJCZ6enigUCmxsbOjt7UVfX59Lly6R\nmJgozFoqk5HKD7Jv3z6MjIwICwvj97//Pb6+vhgaGrJjxw4SExPR09NjcnKSl156CYDk5GTBL3R2\ndkahUCCTyZg1axaGhoYsW7YMFxcXDAwMRLCqCsc2d+5c1NTUePPNN0W1pcrUuHHjBhkZGSgUChIS\nEsjOzmbNmjXU19djYmKCjo4OYWFhWFpaiumQCi2nIk6vXr1aWOpV4iVLS0tOnDjB66+/zuTkpIjF\n27NnDyUlJSQnJ7N48WL27NlDdna2qKAUCgW6urpMT08zZ84cdHR0+OGHH4QxbHJykqeeeoq1a9dS\nWVnJm2++KY4gqrzVjo4OtmzZwubNm0lOThauSFWWqko+vn//ftHM/iXr39oUlEplyz8/dgAXgTDg\nsUQisQH458eO//JBqKmxd+9epqamhITUxsaGDRs2UFZWJjq/MTExtLa24u7uzuLFi0X6sGpc+cUX\nX7Bs2TK0tLQAyMzMpKioCHV1dWZmZjh//jzu7u4CLV5TU4NMJhOKxMHBQZH2MzAwQFlZGS0tLbz6\n6qt4eHgIx112djYaGhrEx8fz+PFj0Vjz8fHB1NSUtWvXEhERwdy5cwUVWEVQunjxIhYWFmIKorIH\nA4SHh3P16lUWLlyIUqnk0KFDjI6OcuXKFezs7KitrcXT0xM1NTUxuSgvLycgIIDh4WFMTU0ZHx8X\nBqPHjx8TFhbGvXv3eO211+jr68Pa2hpnZ2fB7ispKcHa2prR0VGMjIwIDg7m2rVrVFZWCmFScnIy\nAQEBpKWlYWZmRnp6OlNTU4yPj7N48WJ0dHREg3VgYICZmRksLS05evQoYWFhODk5sXjxYoqKitDW\n1qampoaamho+//xzQRT6/PPPAVi5ciXFxcXs3LkTOzs73n77bUpKSli+fDmRkZGEhYUJ519jYyMK\nhYLTp09jYmIi4uRv3rwpGnuqyZSHhwfR0dFs2LABOzs7QkNDhZnt8ePHSKVSUlNTqaqqEtDa3t5e\n0RNQxQeqUqBURxkXFxcsLS356KOP2LBhA+3t7aIHlZiYyPbt27l27Rq2trYsX76cjo4ODh06xODg\nIBUVFaJqCQ4OZsGCBUxNTQGgrq7OqlWrmJ6eRl1dnfr6eiwtLdm9ezeLFi1CoVAI0VV2djZyuVxk\nSS5evJiZmRm2b99OYGAgPj4+qKn98kv9//OmIJFI9CUSiYHqc2A+8BC4DGz85z/bCFz6r36XiYkJ\n169fZ2xsjD//+c/C2NTa2oqHhwf19fXiDhUfH4+WlhZ+fn4MDAxgZWVFSkoKCoWCl156iZs3b3L5\n8mXgCTr+1VdfxdLSksePH4vYrzVr1gh7bH9/P1paWsTGxrJt2zZkMhmZmZlYWFgQEBDA6OgoSUlJ\ndHd3C5mzQqHA3NychoYGfHx8CAkJYe3atdTV1fHgwQP27NnD4cOHOXXqFPPnzxcvqKoXoFIzAgK4\nofq92tra5Ofnk5CQgI+PDx4eHkRGRvLw4UMsLS3p7u6mvr6eyMhIfH19WbVqFc3Nzbi6unLkyBH6\n+vro7+8XxB8VX+Evf/kLjo6O9Pf3s379ehISEvjss8/Q0tJienqa3bt309raSk5OjgCJuLi4EBQU\nhK2trRitqvQekZGRLFq0iI6ODq5fv46RkZEIfFFJbgsKCqioqKC6uhozMzNMTU2JiIigtraWsLAw\nIaDKy8sTBqMjR45gbGyMt7c3Dg4OfPvtt/T19XHx4kVRobz//vsivEbVV3j8+DG+vr6oqamhra3N\n2NgYampqIhi4p6eH06dP884779DT0yN8LU5OTqSkpODj40N+fr4A2GZmZrJp0yZsbGxoa2sjIyOD\nhQsXYmhoyOXLl7l+/TrwJBTWxMSE0NBQdHR0cHJyYteuXTg6OhIaGsqnn36Kq6srsbGxFBUVcfDg\nQdG/UaVD19fX09nZiba2toCsJCQkUFdXR19fHwUFBaSkpPCPf/yDdevWkZycjEwm4+rVq+jq6tLc\n3Ex+fr44KpmYmGBjY0NqaiouLi5oaWkJrckvWf9OpWAFZEkkkmLgHnBVqVTeAD4C5kkkkiog8Z9f\n/6erv7+fxYsX09bWhpOTE01NTfT19YmO7vLly8XdxNjYWJB+VSATVTDs0aNHkclkYtarEoWozDXf\nfvstCoVC4ORV9lcvLy8ePXokSsWamhpmZmZob2/nueeeE+WlakrwwgsvcO7cOTw8PCgsLKS7u1vE\n1quYj93d3SxdulRoAC5cuEBfXx/q6urY2dnh7u7OzMwMUqlU8BtV3D9V6pORkRF3795FTU2NoKAg\nkpOTKSsrw8XFBaVSiZ2dHVNTU8jlcsrKyoiJiRGhsgkJCfj6+orU6FmzZuHk5ISJiQl6enr8/PPP\nREVFMXv2bORyOevXr6ejo0Po8b///ntsbGxQKpXk5OQQExPDxx9/jL6+vnh8Fy9eZHp6Gh0dHRob\nGwWjUoWE/+CDD9DS0mLr1q1s376doqIivvvuO+FOVZGEpFKpaDS+9957VFVVERISQkVFBYsWLRKR\n8GfPnuWDDz4gMjKSyspKysrKaG1tZXp6WkTS2dvbs2rVKiH/VQnZLC0tefvtt3FwcCAjI4PQ0FBq\namo4fvw4JSUlVFdXs2TJEoaHh8nOzsbGxoa6ujq++eYb0Rw8duwYd+7cwcfHR7wXpFIpMpmMF198\nkYKCAjo7O1m5ciVSqZSNGzdSVlZGYmIipqamlJSUsHDhQkJCQjA2NhYaB3t7exEwFBkZCTyZcMXE\nxIjsEJVlurCwEIlEQmlpKcPDwwwPD4ufhYaGYmhoyMDAAG+99RYaGhrk5uYKm/svXf9Dc/6f9T/r\n/57134fm7OTkxJo1a4Q0NT8/n5UrV5KRkYGTkxPT09NYWlpSXV3NvHnzuHXrFqtXr6asrIyuri6q\nqqowNDQkNDSU2tpaEUX+zjvv8Lvf/Y6amhpKS0vp6OhAR0cHZ2dnJicnaW5uRiaTCWtxZWWlMNS4\nurri5ORESUkJjo6O4m7h7e3N8ePHCQgIEIad4eFhcnJymJ6eZmJigmeffZb8/Hymp6cJCgqioqJC\nZPoplUoWLFiAVCrl7Nmz2NraMnv2bGJiYti0aRO+vr5Cr6EKOQU4fvw4zzzzDEqlkry8PCQSiaia\nnJycOHnyJH5+fgQGBnLv3j2ys7PZvHmzMAUNDw+zd+9eFi9eLNR4a9eu5ebNm4SFhZGbmytoVs88\n8wyFhYVIpVIhuAkPD8fa2ppFixZx9OhR0axzdHTE19cXW1tbmpqaOHfuHL/5zW8wMzPDwMCAkydP\niiyL9vZ2kag0MDAgKhHV2Xj9+vUCTNLR0SEel1Kp5Oeff+aTTz5hamoKIyMjTExMKC0txdramp6e\nHh48eCCoxarGbVhYGLW1tfz1r3+lvLxckJ/CwsIEvk9FiTI2Nhb28M2bNwvZ9VdffcXTTz+Nm5sb\n1dXV+Pv7k5iYSGVlJVu2bCE9PZ3m5mZhDlPF7mVnZzM+Pk54eDgVFRWEhobS2toqQnomJyfx9vYW\n9CTV66nSRuzcuRMLCwuam5uxtLSkra2NOXPmcPnyZYHLs7W1ZXh4GHNzc9EIVvUgGhoa0NXVRUND\ng8nJSaqrq7l06b88yQO/kk1BTU0NQ0NDvL29hbQ0Ly+P8fFxzM3NRZRbdHQ0Fy9exMfHB01NTUpL\nS7GxscHd3R2pVCrOZaq0IV9fX7744gtcXFwEcvzOnTsUFBSIBlllZSUaGhrs2rWL8+fPk5qayjff\nfMP333+PQqHgzp07ODk5Ce87PLE4q+SjFhYWXL58GTs7OyF4GhkZ4eHDh2zfvl2cUwsKCli0aBFT\nU1P09PSQlpZGcHCw8N7Dkxj2gYEBIbq6fPkyPj4+LF26FDc3NzIzM7lz5w7T09OYmZnh4eEhAnQc\nHR3Jzs7GysoKW1tbdHV1ycnJITU1lVdffVW8cR0dHRkZGRFOS29vbzG2ff7555HL5Zw7dw65XE5t\nbS26urq8+uqrVFZW4ubmxqlTp3Bzc+PcuXNoamoSHx/PyZMn+eMf/4hMJuPGjRuUlZVhamrKrVu3\nWLdunSivXV1duX//Pk5OTsyfP19AWR4+fCh8DZ6enty4cUMwDp566ikCAgJwcHDgwYMHZGZm8vHH\nH/PFF1+gra2Nnp4e6urqLF26lJGRESYnJ5mYmBBkYxUYZf/+/RgaGnL//n1mZmbw8vIiPz+fpqYm\nGhsbhSO3o6OD3bt388MPP/Duu++Sk5PD8PAwlpaW+Pr6kp6ezpYtW1C5i1Ub/+joKLm5uVhYWFBa\nWkpcXBwXLlxgenqa2NhY8vPzRfaoo6MjYWFhaGlp4eLigrW1Nffu3RMSbHiSZm1qaoq2tjampqYC\npuLi4oK7uzsVFRWoqamhpqbG0NAQS5cuFaHGMpkMR0dHJBIJP/30kxCR/dJN4Vdhnd6zZ8/f58yZ\nQ09PD35+ftTX16OpqYmFhQV3795FLpdjbGzM3bt30dPTIzQ0FHNzc2FDVgFDzczMCAsL48SJE9y+\nfVuMsjo7O/Hz82P37t309/eLjEN7e3vu37+PlZUVjo6OnDhxgpGREaKjozE2NubgwYO4urri4+OD\nQqGgs7OTgoICJiYmUCqVbNq0iUuXLjF//nxh+VbJfQcGBpiamiIxMZGMjAxBdRoaGhJuPl9fXxFx\nn5ycLCyuMzMzSCQS1NXVKSsrQ0dHR8SWbdu2DX9/f9zc3ERQ68OHDwkMDMTNzY2MjAyROTB37lzG\nx8eFmOWll16isbGRuro6JBIJMpkMFxcX8vPzaWtrY3JyksbGRiYmJrC0tKSkpITbt28zNjaGp6en\naI6p8grc3Nyora0lNjaW3NxcQcrq7+/HwsICKysruru7xeg3JCSE0dFRAgICyMvLw9/fH319fYyN\njenr6+Pq1avi4jQxMSE6OhpdXV3h9rOyskIul1NdXY2GhoZIRWpqauLAgQO0tbVhb2+Pubk5nZ2d\nIhOhqKgIe3t7QkND8fHxIT09nc2bNzMxMUFdXR0LFizA2dkZGxsbNDQ0+Oyzz1BTU6OtrY2+vj4M\nDQ3ZsmULJSUlDA4OsnDhQkpKSsjLy+N3v/udsFJ3dHSIoNumpiZKSkpEHqe7u7vQxQwNDVFeXk5L\nSwunTp3i3XffZe3atbS1tdHQ0EBmZiZOTk7ExMQgk8k4ffo0VVVVPHz4UGRMNjU1CTao6rVavHix\nGBV3d3czOjrK1q1bkUgkFBUV8eDBg19knf5VbAoffvjh36Ojo4mNjaWkpISmpiZqa2uRSCTCGGNk\nZIS9vT0pKSkiot7W1paOjg5MTU1pbm7G19dXhJfevHmTgwcPMjY2RmlpqYg5W7NmDba2tkilUv7x\nj3/g5+fHunXr6OnpwcTEhKeffprW1lY6OjowNjYWLrupqSnMzMxIS0vjrbfeEhe9v78/OTk5gsLc\n0dGBp6enMGEplUpKSkoEtUhPT4+6ujpR7fT19WFubs6ZM2cIDQ1lbGyMkpISbG1tMTAwEI0ylX9/\namqKhoYGofd/+PAhTk5O2NjY4ODggIuLiwCcVlRUMDMzQ3NzM729vejo6DAyMkJPTw8uLi50d3eL\nyHNVx/7x48fU1dVhYWFBfX29wNkHBwejVCrp6+sjPDycqqoq2traeP7557l06RIymYyPP/6YmJgY\n8vPzmT9/PhoaGoKf0N/fT3t7O3PnziUgIEA4+MrLy8Wk5ObNm6xevVokJefm5tLb24uBgYFQC86a\nNUvoFFTpzCYmJjg6OgoU+7Fjx3jttde4e/cus2bN4qeffuLTTz+lubmZuro6PD09uXv3LoGBgeTn\n54sJTG1tLRYWFuKIqaGhwZUrV0hNTcXMzAx7e3v8/PwYGhri0aNHlJSUEBQUhLGxMXK5HC0tLfr7\n+wUlWxXX1tDQQFZWFo8ePaKoqEg83qqqKl577TWWLFmCubk5RkZGLFu2jD179mBmZoajoyNnz55F\nJpNhaWmJmZkZMzMzTExM4OPjI2Awtra2eHp6UlJSIghMjx8/Jj4+HlNTUzHiv3bt2i/aFH4VxwfV\nXfH27dvo6emhqamJr68vnZ2d7NmzhzfffBNLS0vBvVNlGR49epSlS5eira1NVVUVqampeHl5CQ99\ndXU1169fR1dXF6VSKcZTPj4+nD59mqCgINLT01m7di0///yzGDNNTU1haWnJ6Ogojx49YseOHWhr\na5OWlgY8CXtxcnIiMzNTJCnn5+fT3t4udOhLlixhYmKCS5cu0draKjz/EomEpqYmHj58yOTkJHPn\nzhU6+ujoaEpKSgQ3QiqVUlhYSFRUFDdv3mTNmjWMjIxgaWkpsg26urqIiIjA0tISOzs7jh49irW1\ntejB+Pr64uHhwaNHj/jss8+IiIjg2WefFYzF5557TkxxZmZmOH36NFZWVoSHh6Orq4uhoSEaGhqo\nqanR2tpKeHg427dv5y9/+QuXLl3io48+YsWKFeJxK5VKDh8+TENDA5WVlcTFxaGrq8sPP/xAZGQk\nVVVVmJmZIZPJmJiYoKKiAgsLC6EBUVdXR0tLi5aWFoFDLy8vZ+nSpSiVSoqLizEwMBBwlQMHDtDS\n0sKKFSvYs2cPTk5OQmRmZGQkzGYXLlxg9uzZtLe3i0Squro6/P39kUqlDA0NiSpEdQQIDw9n7969\nAr9+6dIlcXNSkaI0NTUpKiri/PnzrF69mpGRES5cuEB3d7c4EpeUlBAXF8f169f5y1/+gkQiwd7e\nHi0tLRobG7G3txcOSpVqdsmSJbS0tBATEyOs76rq2cnJic7OTuLi4oTNv7q6GrlcjlKpRE9PD29v\nb+rr69HV1aWvr09MS37J+lVYpzU0NGhtbRXz7eDgYPr6+mhra+Orr77Cz88PKysrES6ip6dHfX09\nfX19dHV1ceDAAXx8fAT0UpWys2/fPszMzBgfH0cul9Pb28vY2BinTp1CX19flPC7d+8W5baKMlRY\nWEhdXR1BQUF88803wjUIsGbNGiEhDgsLQ19fHxsbG2EBtrOzE3kEMpmMdevW4ebmRn9/PwqFgqCg\nIDw8PHBzc8PMzAwdHR0AtLS0RDKQTCbDycmJ0NBQGhsbkclkjI2N0dnZiUKh4MyZM1hZWbFu3Tom\nJycZGxsjJyeH0NBQfv75Z/Lz89HX1+fq1ats3bqVtWvXsmnTJmQyGd999x0lJSV0d3dz4MABPvjg\nA3R1dVFXV8fMzIzc3Fxee+01BgYGKC8vZ+HChbS3txMeHk5+fj719fXIZDKcnZ2FOKqnp4fU1FTm\nzZsnGBBKpZLa2lru3buHRCLh/v37tLW1MTAwQFVVlUhfTk5OFheyKpa+oaEBAwMDhoaGsLS0xNzc\nnHnz5vHWW28xODjIypUrqa2tZWpqiunpaUpKSnjw4AEWFhYCh2Zubk5XVxfwxO5uaGjIyZMnsbKy\nYvbs2YyNjbF06VK8vb3p6OigtLQUFxcXYcprbW0lMjKShIQEJicnSUxMRF1dXbwf4UlPIT4+nlmz\nZnHixAmmp6d54YUXhKBOqVSyceNGFi1aJL6nrq5OXV2dUIHev3+fsbExMjIyhJDNx8eHoaEhSkpK\naG9vR/H/sPeeYVGf69r3b2gzDL0OvUuVDgJKERUlFqyxx0TTTI/btdOWMdnJ2lnJSlnJippmjCZR\nY4kVEFREUJogKEov0svQh17n+WDmftd+3uN4d/bzfnizj3ffX9QRZYaZ//2/7us6z99ZXc3U1BT9\n/f2CGNbW1sbAwAD5+fmUlpaSnp7OxMSE4DtofCimpqbi5/B71h9iU9Bgrn18fBgYGCAnJ4fx8XEe\ne+wxAgMDcXJyEgy+K1eukJyczMjICAkJCeTl5TF//nwmJiYYHh5GoVAImaeHhwdBQUFirq6lpcXb\nb79Namoq7u7uGBoaoq+vj4ODA83Nzfj7+9Pf34+TkxM7duwgJiaG+/fvY2JigouLCzY2NgCcOHEC\nX19fRkdHcXFxYWJigsbGRhISElAqlSxatAhPT09WrVpFU1MTt27dor6+HgMDAyQSCUePHhWNyqtX\nr4q4eqVSyfT0NDdu3GB0dJRr164JyIuGmtTd3Y2VlRXDw8OC+Kynp0djYyN5eXkAhIWFsW7dOpHS\n/Nhjj1FRUUFcXJxwPWqEPuHh4aIKmJiYQEdHBxsbG3bu3CnyMdvb2zEwMOD69evI5XK+/PJLPvvs\nM8EcHBgYoKioiBUrVlBXV8fdu3dRqVRIpVIcHR05e/Ysa9asYfPmzVhZWZGamoqFhYVQZv4zEKSk\npISpqSm8vb2RSCR8/PHH7Nixgzt37ojJQldXF/n5+bz11lvs2rVLmIb09PT45JNPBFi3tbVVTF8M\nDQ0pKCggMDCQwMBAsrOziYiIoK+vT7g0p6amOHXqFOXl5fzjH/8QdKRDhw6RkpLC4OAgU1NTfPvt\nt3z33Xfi+WqoTZ9++im+vr7k5uby0Ucf0dDQQENDA9nZ2Whra2Nubk5GRgaOjo5UV1eTnp7O1NQU\nVlZWnDx5knnz5olp0+3bt0V2pJmZGa2trVRUVAhMgEZId+3aNXp7ewV+b3BwkNzcXJqamjhw4AAD\nAwN4eXkJZefvWX+I44O+vj7h4eEiLj0pKYmDBw8il8tpbm5GIpHQ3d0tzlUdHR2kpqby2muvIZPJ\nBPxCE2W2YcMGvvrqK6qqqti1axd1dXWcPHmS3Nxcli5diq2trRCP+Pr60tvby+OPP87p06dRKpXY\n2tri7e2NSqUS8fIaTBjAk08+SUlJiQiRMTY2ZuvWraJyMTY2pq6uDpVKRUVFBWFhYUKhqTkr29vb\nY2JigkKhEMeHf7ZgSyQStm3bJt7klJQUZs+ezZo1aygpKRFn1unpaXp6enj11VcJCAhg//79uLu7\nU1NTI5yD9+/fx8/PT0TMBwcH4+DgQG1tLRcvXuStt95i7969GBkZ4ezszPPPP09dXR0VFRVkZmYi\nk8kICgoiKiqKixcv4urqilQqxcXFBUtLS3755Rd27tyJnp4eKSkpnDt3Dm9vb1auXMmBAweYO3cu\ncXFxgiepAdhoVmpqKrt27QIQbEtbW1vc3Ny4f/8+XV1dLFiwAHhoFIqKisLBwYG8vDxCQ0OJjY2l\npKREGIq+//57ZDIZPT09wh2oq6uLsbExixYtIicnh+npab7++mt6e3tZunQp27dvp7y8nKioKOzs\n7AgICKCkpESIpxobGxkeHmZqaor09HSBbI+Li+PGjRv8y7/8C8XFxZw5c4bTp09z4cIFTExMBKhW\nT0+PiYkJlixZwsmTJwkKCkImk+Hl5UVpaSlffvklw8PD4viguQkaGhoyb9480bDt6enhwYMHxMfH\n09DQICT0Bw4c4E9/+pOYYGiIWGlpaURERPx/bp3+L6+xsTGam5upq6vDw8ODnp4etm3bxpUrVzAw\nMEAqleLu7i7KqKKiIlQqlUCra5J9Hjx4QFRUlLAiP/roo6KDbm1tTXR0NG1tbWhra3PixAnmzJmD\nUqnE3d0dW1tbDAwMUCgU1NXV8c0336Cjo4OTkxPnz5+npaVFzL81oag9PT1MT0/T1dWFk5MTOTk5\nXL58WeDN9PX1efzxxzE3N6esrEwEoYSHhzM9Pc3169cpLS0VpaiNjQ0KhQIPDw+R4djY2IhMJkNX\nV1ewFKytrVEqlSJItaGhgeTkZIyMjFi2bJnIcdBsGJqut6mpKVNTUzg6OgqmoYeHBzk5OchkMhF0\nWlRURGVlJcPDw7zxxhuiCWtiYkJgYCBVVVUoFAqSkpKYmJjg6NGj/PDDD4yOjlJXV0dnZycBAQHc\nu3ePgYEBPDw8uHnzJvHx8bS1tZGZmUlnZyejo6PMnz+fnp4e0tPTgYfJzp2dnaJKcnBwoLGxUbgY\nn3nmGWxtbcXkJT09nZqaGuzt7dHV1RUW68nJSdFshYcGo4yMDLS0tLCwsCAoKIiIiAgRttLY2Eht\nbS1VVVUcO3aMjo4O8Zzj4uL47LPPkEql2Nvb88Ybb4hehcYcl5OTQ0dHB2+++SbFxcXo6uqye/du\n2tvbyc/PF/4ETYNT4xPRaCUqKirYt2+fgNoMDQ1hY2ODr68vra2tLFq0SOSOakbftra21NXVkZKS\nIprnGoWkhYWFcFLW19eL5/t71h9iUxgeHsbDw4OQkBCOHj2KsbExIyMj2NnZce3aNUZHR0Ueore3\nN9ra2syaNQszMzNyc3O5ffs2zz//PO3t7RQXFwupaGdnJw0NDaxbtw4DAwPRgPH19RWiFXt7e8rK\nyigsLGTRokUMDAzQ399PaGgocrmcpqYmNm3axOHDh4WppKamhs7OThFJt3btWqqqqpDL5SiVSlGi\nWllZidm3o6Mj69evFxASS0tLVq9eLaLn4CFLIDAwULz+6upq7t+/j4+Pj2j8SSQSli5dilwuRyKR\nUFhYSGZmJtu3b6epqYne3l60tbXFWCwpKQkfHx+mp6fJyclBT0+P9PR00tLSKCoqEsGszs7OvPHG\nG/j5+WFqasrly5eRy+Wkp6fT2dnJ1atXuX79OkNDQ0IKfeHCBW7fvs3777+PpaUlWlpaJCYmcuTI\nEezt7YVkOSIiguXLlwvQzczMDBkZGfz973+nvr5ejN4079mCBQsYHh7GwsKCmpoa1Go1arUaBwcH\nMRrOyMgQ1KHm5mbhi9D0aTTaArlcDjw8Ui1YsEAwIXV0dKitrRUIueLiYkHOzszMFIGt1dXVouqD\nh87D7u5uUbmkpaUxd+5c5HI5/f39wiEqkUh4/PHH0dHRobCwkJCQEN555x2srKywtrZmcnJSELM7\nOzv5y1/+gqWlJXZ2dgCisvDz88Pa2pqhoSFKSkro7OwUAUSTk5PEx8fz+uuv88QTT9CKCUVtAAAg\nAElEQVTb20tHRwcZGRm0trZiZGTEwMAAfn5+Yor2e9YfYlPQdLglEgmhoaEUFxdjY2PD/Pnz2bJl\nCzdu3EClUtHS0oKlpSVPPvkko6OjZGVl0dXVRXd3N/39/Tz11FNMT08L74OhoaFozmnyFOVyOWlp\nabzwwgtIpVJWr17NyMgI9+7d4+uvvwYgICBAGHycnZ3JzMzkySefFMKSmJgYLCwscHZ25sGDByJB\n6eTJk2zbto22tjaGh4cZGBigtraW3t5e4uPjqampYWJigs2bN4uStLW1Vfy/CoUCmUzGxMSEEPjY\n2try6KOPolAoGBkZ4fjx4+zdu5eioiJGRkaYN28eGRkZZGVlYW9vj4GBAUqlkq6uLtE41dCrrl+/\nzv3793nllVfw8vISnW9nZ2cGBwc5dOgQJ06cICoqii1bttDT08P8+fNJS0sT59OIiAhee+01ZmZm\nSExMJDc3lw0bNhAVFUVjYyOurq54eXkxd+5c9u7dyxdffCEyFgwNDVm+fDmfffYZMTExbN26VQit\nLl++DDxMcf7111/Zs2cP+vr6jI+PExwcTGdnJ5s3b+bTTz+lqalJ3MEfeeQRXF1d+eijj2htbaWv\nrw9zc3Nqa2tFEhM8PD6oVCq2bt2Kjo4OH330kRhrd3Z2olAosLW1ZcmSJUxPTxMfH8/atWt54YUX\niImJobS0FBMTE4qLi9HR0eHs2bMAbN26laGhIYyMjFiyZAlSqVTkU0gkEtra2ggPD0epVOLr60t7\nezuXL1+msbERpVJJQUEBSqVSJGprNoXh4WG0tbW5efMmycnJ5OXlCQr5xYsXOXLkCKdPnxYOS11d\nXUxMTPj4448ZGxtjcnJSHD80G8jvXX+InsLAwACVlZV0dHSgq6vLnj172LNnDyEhIaK0trCw4NKl\nS/T19TE9PS0895OTkzg4OGBtbc2ZM2cICgoSZ/+Ojg6WLl0qmnkRERGCzhsYGMjmzZvx8fGhvb1d\n5AmsXbuWoqIiIiMjqaqqIjY2lomJCeFkg4fjrbGxMWQyGRUVFYSGhjI5OYlCoSA0NJShoSFmZmbQ\n0dERUeya0Nzbt2+jUCiEA27OnDnk5uYCD5184+PjAiar4SRouvG2trZ0dHQglUqpr68XY6fnnntO\n0JR+/fVXTE1NiYiIIDMzU/AD7t69S0REBO3t7Xz22WfY2toKNZ3GcmxgYIC+vj4DAwNoaWkRFBRE\ndnY2jzzyCGZmZkxNTVFQUMCRI0eYN28ew8PD7Nq1i97eXpYtW0ZgYKBIxNJEpZ89e1YoTDVhMm+/\n/TZOTk6CPK0haAE4OjoKEpeLiwuOjo4UFRXh5OTEe++9x6uvvoqPjw9SqRSFQkFpaSkBAQEsXLgQ\nPT29/6ATqa6uFg3M+/fvExYWRlZWFq6urjg5OeHs7IylpSXh4eHY29vT3d3NE088wfz589HX18fE\nxITLly/j5+dHaGgoV65c4amnnkJXV1eMvfPy8ujv72f+/PkMDQ2hq6uLs7MzBw8eZGhoiJCQEObN\nm0dTUxNOTk4kJCTw1VdfiXGoxv2amZmJtbW16LWYmppiYGDAgwcPWLFihcghraqqwsHBgZ6eHvz8\n/PDz88PY2JiffvqJTZs24ejoSHl5OZ2dnSxZsoTw8HBu3rxJZWXl774e/xCVgkwmIzw8nMDAQEZG\nRli6dCne3t4ixMTY2Jg7d+4IAZGJiYlIHaqrq2Pu3Lkinv727dscOfKQBpeUlCQgp5pwzo6ODtrb\n2/H09GRkZIT8/Hy2bdvGjh07sLe3x8vLiy1btmBqasro6CgHDhxAqVTS19fH4cOHgYcjUzs7O5qa\nmti4cSM3btygqamJ3bt3i/P3iRMnMDc3Jzw8nK6uLjw8PCgsLESlUtHQ0IBarRaNUc0I1cjICJlM\nxgsvvICdnR03btwQDSNdXV0KCwvx9/fH3NycnTt30tTUxPT0tIii19HRwdfXV4xtVSoV27dvJzMz\nk5iYGFpaWjA3NxfcgOHhYVJSUjA2NsbJyYlZs2YRHR3NjRs3RCiNTCbDxsaGZcuWkZiYiEql4uOP\nP0YmkxEWFkZKSgqFhYVYWFjw66+/UlVVRXl5OXp6eujq6pKUlERcXJyYpWvQ7xpBUltbG4ODg2KO\nriFSVVZWYmFhwYIFCwgMDGRwcJCXX36ZkJAQAYHVoPvNzc1xcXERSPusrCwmJyeprq7miy++AB6S\nvTWVm6b5q1QqhRNR06fy9vbG0NBQ+Al6e3txcnJCS0uLzMxMdu3axdGjRwUtq6+vD6lUKqTdR44c\nYefOnRQVFYlmtlwuJzs7m+DgYAYGBvDx8aGhoYGwsDDMzMwYGxvj2rVr3Lt3T2wKmrRvS0tLlEol\nNjY23L59GwMDA4yMjOjo6GBqaorU1FQOHz7MunXrGB4e5rXXXuOxxx5DLpdjbGyMq6srWlpa/6XY\nuD9EpaA5A+fk5AgEVXp6OuPj44yNjWFlZYWHhwdr164lNTWV8fFxYmNjuXnzprirtbW1ERISgpmZ\nmfh/9+3bx/r163n22WeZmppCX1+fCxcu8Le//Y2BgQH09fXZv38/CoVC6BLmzZvH4OAgw8PDSKVS\nHn/8cc6ePcu9e/c4c+YMiYmJ4pxvZmYm0qnNzMy4efMmdnZ2nDlzhszMTBEJFhkZSWFhoSAz19bW\nYmdnx759+1i5ciU//vgj8LAZlp6eLjaxmJgYRkZGBDA2NDQUfX194uLisLOzIyMjg9zcXMzNzcnN\nzcXb25uQkBB0dHQYGxvDzc2NXbt2sXfvXnp6eigvL2f27Nls2rQJmUxGd3c3d+7cQalUMjExwfLl\nyzl37hytra1ERESQmprKxx9/jJGREQqFQhyjSktLmZiY4PLlyyJ5amJigvnz57N7927q6+sxNzdn\n06ZNzJs3D0tLS6ysrNDX1+fcuXN88MEHxMbGIpFIWL9+PdPT06K6c3Fxobm5WZiGNIpLDRjl0qVL\nvPvuu0L7oUHzaQjcmiOZTCZj/vz5+Pn5sWfPHszNzUXCtoGBATY2NhQUFJCSkkJfXx+BgYG0traK\n8FsNeWnRokVYW1vz9ttvExgYKHJBNOW4kZERNjY2TE1N8cMPPxAWFoauri7h4eEcOnSItWvXcv36\ndYyNjZmenhbxAAUFBYyOjjJr1iwiIyOFGnbevHkArFu3jr6+Pmpra6muriYxMVEAaK5du0ZCQgL3\n799nYGCANWvW8P7775OUlMTp06exsLDA29ubuXPnCp6jJqbg96w/RKUglUpFZFZ+fj7W1tb09/cz\nOTmJRCLB2dmZyclJSktLGR8fx8/Pj5GREeLi4li0aBF5eXmsWbOGwcFBrKys2Lx5M/Bws8nPz+f7\n77/n5MmTNDY2sm7dOlpbWzE3Nyc7O5vW1lbu3r1LcHAwEomEvr4+5s6dS1lZGVFRUbzzzjuEhISw\nePFiMTrUnDuzsrKESvH8+fNIJBJOnTolmlaOjo6CND06Okp8fDwJCQmYmpqira3Nxo0bUSqVojFa\nV1fH888/T0hIiECWq1QqPD09SUhIwMfHR4BIGxoaCA8PZ9u2beKuoVQqqaqqQltbG319fbKysti7\ndy+ZmZmMjY0RFhZGVFSUgJeWlZURHR1NVFQUDQ0NXL58mYULFxIXF4ezszP+/v689tprfPDBBzQ0\nNPDiiy9iZ2cnYtOjo6MJCgoiODiY0NBQ8vLy6OnpEY3ROXPmkJmZyfHjx3FyckJPT4/R0VFsbW3J\nysrCysqK27dvo1Qq/8PxAf6vrI7Ozk5SUlKEa1VDxtb0G3x9fSkqKsLFxUUQnpydnQUOTzP3NzIy\nwtDQEKlUipmZGbdu3WLjxo1UV1ezfPly4uLicHJywtHRkcrKSlEtWVhYoFKpWLFiBXv27MHR0ZGA\ngABxlKyvr8fFxYWsrCzWrl3L+fPn8fDw4NSpU+J46ubmRmRkJB0dHdy+fZvR0VEWL17MpUuXmJiY\nIDk5WcQQnDp1CoBz587R3d3NrVu3RILakSNHBJfSzs4OZ2dnXnjhBby8vAQL4tFHH8XR0ZFly5ZR\nXFxMcHAwJiYmQrvze9YfYlPQmGGcnJwEsioxMRGpVEpHRwdGRkakpqZy9uxZ4uLiiI+PJzw8nDNn\nzqCtrY2bmxu3b99GpVJRXV1Nb28vAO7u7pibmxMdHc2cOXPo7u5GoVCI/Mjh4WHkcjn//u//zvT0\nNNu3b2dmZoampib6+vrIyclh8+bNqNVq0XUHaG1tpbi4WIzvTp06xZ07d7h69SpLliwhPj6el19+\nmYCAAGHCqa6upqCggHPnzolk4ra2NuRyuZBfa2trMzg4SFFREc7Ozujr62NkZIREIuHWrVscOHCA\nQ4cOMT4+Tl9fH319fajVal588UXeeustnJycMDQ0JDo6ms7OTkH8XbhwIffv32fBggVkZGSQmpqK\nubk5ExMTdHd38+mnnxISEkJUVBT3798XysCqqip6enqAh8Gl3d3d/PDDDyK8pKysjNHRUYaHh9m3\nbx8rVqygq6uLvr4+MjIykMlkxMfHo1armZqaEjZ0Nzc39u7dK7iHW7duFWjzTz/9VIizOjo6BKm5\ns7MTlUrF+fPnWb58uQjybWlpYenSpaIi0UwfNIAViUQCPJTS29nZibJfrVYTFhbGqlWruHTpkpCJ\nSyQSvLy8ePDggahG8/LysLS0pKGhgYmJCRITE0WfZGBggBMnTjA8PExGRgaxsbHMzMwQFxeHiYkJ\nd+/eZXR0lLGxMeLj45FKpcIPEhwcjJ6eHmNjY7S2ttLT0yOmBKdOnUJPTw9nZ2dGRkZITk5GS0sL\nfX19fH19GRgYwMXFhZycHDGdCw0NZWxsDKlUiqWlJXPnzkVPT4++vj6Rkv171h9iUzAyMmLDhg1o\naWlx+vRpRkdHKS4uZmJigp07dwrJqiY9qbGxkZSUFGbNmiWaRk5OTpibmzNnzhzKysqAh2KgsrIy\nlEolp06doqOjg8bGRpKSkkhISEBfX59HH32Ua9eu4eDgQFxcHAsWLBB3QrlcLqjSly9fFjNvTfSb\nra0tkZGRjI+Pk5SUhJ2dHSdOnBDNs8LCQnJzcykuLhYiLM2YrKuri4CAACHLhYfM/r6+PnF3GhkZ\nITQ0VGxyLi4uxMXFMTExwbVr1wgNDUWhUDA2NiYyJZydnSkvLxfZkVevXqW4uJhLly5RUVFBb28v\n3t7e4ix+5MgR2tra0NXV5fvvv+fWrVts2bJF8B19fX1FAI+lpaWwiy9YsEBYsV1cXEhISOAvf/kL\n9vb2PPLII6xevVocmS5evCgCg/X09DA2NmblypUsWbJEjP/u3bsHIGCmY2Nj3Lx5k8jISLS1tTl2\n7BiHDh3CxcWF8fFxYUn39/enpaUFIyMjoUvo7e1l8eLFfPDBB0KFqrHf5+fn4+fnh5eXF0eOHGHp\n0qVYW1uTmpqKUqlkbGyM9vZ2Ud6npqbi6upKdnY2vb29/PLLL2RnZ3P8+HHgoT1frVazevVq9PX1\ncXFxYdasWQwPD/P000/j6elJY2MjHR0d7N27l2vXrrFixQq2bt1Keno6enp6NDQ00NPTIz6vAI89\n9hju7u7IZDI2bNjAwYMHuXnzJl1dXcIFe/v2bQYGBti/fz8DAwOo1Wrmzp2LtrY2xcXFpKSkkJGR\nwezZs/Hw8Pjd1+MfYlMYHR0VrkQDAwOcnJwEQEPTHNTX16ekpISRkRH09PSoqakRwFILCwtGRkbw\n9vamublZdFrb2tqws7PD0tKS6OhoqqqqOH/+vMCI7dy5ExMTE+bNm8eWLVuwtLRk37592NjYCOKz\nRmO+cOFCgd+Wy+Xcu3cPAwMD7ty5w5o1a7h69SorV67E3d1dZA5mZWUxOjpKRUUFU1NTIuqrqKgI\nHx8fkY6sibczMTHByMiI2NhYMjMz0dbWFrkUIyMjIsT23r17vPLKKwwNDQlZ+OTkpHDfGRgYYGBg\nQExMDKtWrSIvLw9nZ2d6e3uJiorC1taWEydO8OyzzxIbG4uPjw8eHh7k5eXR2toqJiuaUB7NlKK2\ntlaE7ZSWlorAFC0tLU6cOMHKlSuJjY2lvr6e3t5enJ2dSUtLw9/fn2PHjuHk5ERXVxfLly/n4sWL\nHD9+nLGxMR48eEBkZCQAixYtory8HC8vL5GfeP36dVFZubi4CNu3kZERK1asEM7JwsJCtLW1SUhI\noL29neDgYLHZmJmZid6VkZERurq6BAYGEhAQgI2NDVpaWsJ30NjYyIYNG+jq6iIpKQlra2v09PTw\n9PREV1cXAwMDdu7cCYCVlZXgY4aGhpKRkUFdXR15eXm89tprhIWF4enpiY+PD0lJSTz55JOMjY1x\n+vRp9PT0RENZqVTS0dHBypUrxfO9ffs2MpmMGzdu8Oijj5KUlMSCBQsYGxvD1NQUGxsbAgMDefzx\nxwUf48CBAyIQeHx8XOD08vPzf/f1+IexTmuYCXp6eqIa0OQO+Pj44OnpiZWVFZGRkYyOjopm2OTk\npDDOGBoakp+fzzPPPMOXX36JpaWlMNo8ePCAqakprl+/zuDgIMXFxcjlckpLS5HJZOTl5fHGG2/w\nyiuvUFBQgFwuR19fH3Nzc5YuXUpxcbG4KNzc3GhpacHMzIyuri5MTEywsrIiODhYKCMNDAywtrYW\nHeCenh46OjrE3wUFBTE5OUlycjL6+vpcv36dqKgoHB0dxXl1enoaLy8vYbft6enBwcEBAwMDOjs7\nuX//PtPT0wQGBvLgwQMePHgggm+lUik5OTm0tbWhr69PcHAwg4ODWFtbMzg4SFBQEA4ODixevJit\nW7fyyiuv0NPTw+TkJG+++SYKhYLk5GRee+01kpOThcdjaGgIExMTUcYuXLiQ2tpaYmNjRapUb2+v\nmCpMTU0xOjoqZOy6urr09vYKn4eGefDPOZYa/L2joyPt7e2sXr1ajHc1F09FRQWPPfYYEokEmUyG\nXC7Hw8ODb775Bnd3d6ampkQqVXp6OgsWLMDb2xuFQiEYiZqkKl9fX0ZGRqitrWVqakpUf25ubkRH\nR6NUKsVrGR4eFsAXTXJZb28vu3fvpqysjJiYGK5fv87WrVuZM2cOQ0NDFBYWcubMGUJDQ3F3dxek\n7XXr1tHS0kJ8fDyOjo7ExsaSnp5Ofn4+Tz/9NHfu3KGxsVGAauRyuYggOHToEFKpVAiuLC0tGR8f\nx9DQEB0dHUJDQzE0NMTHx4dbt24hk8m4ffv2fx+ewldfffVudHQ0paWlGBkZ0dTURFhYGIWFhejr\n6wsdeFhYGIODg/T394tMgO7ubrq6upDJZIyOjmJqairEHi+//LIQkcTExNDc3Cz85yqVCnNzc9GM\nrKqqYtmyZUJFaGhoyLJly7h8+TINDQ0MDg7S0NBAXV0d69atE+O2qqoqoqOjaW5uRqlUMj4+TkZG\nBv39/Vy+fJmwsDB6e3sFQFZzvk5JSWHdunXk5OQQGRnJyZMnWb9+PRUVFbS3t7Ny5UpaWlrIy8uj\nu7tbhNN0dHSID7SXlxcff/wx/f39SKVS0Wxtbm4WoBZzc3NCQ0NRqVSYmJjg5uZGamoqxcXFQppb\nVlZGcnIy69atE5OJsrIynJ2dKSoqoru7WxjOmpqayMrKQiqVMmfOHE6dOsWcOXNob29nz549mJqa\nCmLQ3bt3iY6OZsmSJbS0tIgxaU1NjfBNaOC6Tz75JJ988gnbt28XpCsNzv7GjRvU1dWxcuVK+vv7\nuXfvHuHh4QQHB1NQUICBgQHHjh2joaFBcA/a2tqora3Fz8+Pc+fO8dxzzwlTVGNjIz/++CODg4Mo\nFAqOHTvG1NSUqE5qa2tRq9UkJSVRUlJCVVUV09PTKJVK1qxZQ2FhIfn5+TQ2Noq07bS0NGHYe+WV\nV5g9ezY9PT0UFRXR19eHo6MjNTU1lJSUCCRcfX09FRUVLFu2jNTUVLq7u0lISODIkSNibCuRSGhp\naSEqKgpTU1M6Ozs5evQoycnJLF68mJaWFnR0dAR5zMvLi5GREcHG0NDLfsvk+O+zKXz77bfvakoj\nKysrHBwcGBkZoaOjg76+PtFgaWho4MKFC8yZM4e7d+8yMDCAr68vw8PD1NXV4evri4GBAcXFxRQV\nFfHnP/+ZK1euoFAocHNzo7y8nH/7t3/D1taW3t5e8QEODAwUOY4ODg5s3ryZ9PR0XFxc8PT05MKF\nC2KkU1RUxKpVq+jr66O6uhoPDw+Ki4vp7OwkJCQEQAShqtVqxsbGROdb48ibNWsWcrkcKysrpFIp\n+fn53Lp1i7lz5wrhkwYQMjY2RkdHB+Pj40gkEjw8PETTTC6XExQURENDA3l5eXh7e2NqaipyBWtr\na4WnpLS0FFNTU6qqqli/fj3+/v50d3cjkUgIDg6mu7sbuVwuQnAqKirw9fXl559/ZnBwUOQXhoeH\nMzExQWBgICqVipiYGPT09IRh7ZNPPkEul/Pss8/i7u5Ofn4+bm5uNDU1kZOTw8svv8ydO3cYHR3F\nzMwMKysrhoaGGB8f5+TJk0RFRaGnp4eLi4vwhRgZGYl05draWhYtWiRYBa2trbS0tBAaGopSqaSy\nslLoDiIiIujs7CQjI4OZmRnGx8dRKBTMnj2bgIAAdHV16ejoENMtzRRHoVBQW1srmKBnz57F39+f\nqKgoampquH79OgEBAeTn57Nr1y6CgoJEcE9TUxPe3t6cO3cObW1tceSQy+U4OjqKz4WRkRHGxsZI\npVIkEgkzMzPI5XIqKytFpaGhUg0NDQk1Kjyc1r388ssEBwdz5swZcUy4f/8+CxcuFNVaWVmZ0Mjo\n6emRkZHxuzaF/6E5/8/6n/X/n/Xfi+b84osvEhoaSk5ODsuXL+f8+fMYGRlRU1ODr68voaGhwnLs\n5ubG0aNHRZ7ikiVLcHR0xMzMjO7ubnp7e0lKSmL//v3U1taya9cuPv/8c0ZHR3FyckIqlYoObkdH\nBy4uLmRmZgoiTn9/P0888QR79+7l+eefp7CwkJSUFJ577jk2bdrE6dOnqayspKurCwcHByEJ1tip\ny8rKiIiIICcnB39/f7y9vcnLy6O5uZmnn36apqYm6urq2LhxIz///DNBQUGsXbuWd999l6GhIVxd\nXVmxYgUymYz9+/ejq6tLSEgIFy5cQCKRsGLFCm7evImrqyuenp5cu3aNxMRE0XRcs2YNeXl5DAwM\ncPPmTZ544gmRITE2NiZyFSIjI6moqKC1tVXAaJcvX87IyAiXL18Wce5LlizB2NiY0dFRJiYmRMaE\nv78/ubm5ODo64uHhQWVlJXl5ecTHxwvNSHBwMO3t7aSnp2NsbMz27dv5/vvviY+PZ3h4mLGxMWxs\nbITke+/evXh5eZGTk4OhoSFeXl64ubmxf/9+8f5ERUVRXFyMVCqloqICPz8/Hjx4gKmpKVKpFCMj\nI2bPns2hQ4cICgrivffe45lnnmHNmjVC3yCXy5mZmeHUqVNYWVkRGxvLgwcPxPRG02M6ffo0AQEB\nGBsbi17HvXv3ePDgAV9++SUfffQRaWlpfPjhh2Js2tXVha6uLmq1msrKSvz8/Lh58yYBAQE4Oztz\n4MABSktLeemll5DL5SgUClxdXTlw4AAJCQk8++yzpKWlCSjw/PnzaW9vx8XFhStXrjB//nzMzMwo\nKCgQNO9jx44Jkd/ExAQFBQVYWVkxd+5cKioqWL58uYhT/M/WH+L48Nlnn7373Xff0dTUhL6+vhjB\nGRgYcO/ePREOKpPJcHV1pbq6Gnt7e6Kjo7GxsaG+vp6enh6ampoYGxsjMzOTwsJCvL29hb00ICBA\nzGrb2trEyFBbW5vJyUm6urrYuHEjOjo6GBgY0NraipWVlUhFWr58Of39/Zw/f56oqCh6e3vR19cn\nJCSE/v5+mpub8fDwYPHixdTX15OZmcm+ffvE169evZqSkhImJyfp6+sjMTGRS5cu0djYKJyLrq6u\nODo6ClvsuXPnsLGxEd9DE2DT1dUlLgylUimcl2lpacTGxnLq1Cleeukl2traCAoKEg68pqYmenp6\nWLhwoZiNGxsbo62tjZGREba2tlRVVdHe3k5nZyexsbGMjo4yMDAgSvyxsTF0dHTw8/MTSsDc3Fxi\nYmIwNTXFz88PNzc3EViSnJyMgYEBvr6+SKVSfvjhB8bGxiguLmZ0dJTJyUmuXr1KdXU12dnZeHh4\nCHNPXFwcb7/9NiqVSuDPtmzZwv79+6mpqcHU1JRbt24RGxuLkZGR6LhrUHNTU1MYGhqSnZ1NUlIS\nBgYGfPTRR2zYsIGamhpmZmaIj48XrIvFixcza9YsTE1NycvLQyKRCF5BU1MTcrmcnp4ebG1tKSws\npKysDENDQxYuXMjg4CAymQy1Wi28NVKplNbWVq5du8bExATe3t50d3cLz0dAQAASiQSFQsHPP/+M\ns7Mz4+PjXL9+HUNDQ+zt7dHT0+Onn35CpVIJOvb169fJyMhALpfj7+/PzMyM0GjU19dz48YN0eeK\njo4Wo/KsrKzfdXz4Q4wkNdguDSnY1tYWlUpFVVUV5ubmjIyMMD09TXNzs3Adent7U1hYiEwmo6+v\nj46ODiYmJli6dCnbtm0DYNOmTSxduhSpVCpQZVNTU3h6eqJSqcS0IzY2lo0bN5KZmUlLS4sQjGgm\nEBMTE8J5pnm+jo6OyGQysdFocgj+/Oc/Y2RkxOuvv86HH37Irl27kEqlvP7666xcuVJ0yr/66itm\nZmaYNWuW0Lu3t7eTnJzMt99+S09PD3p6erS1tdHV1UVNTQ2NjY0YGxszODjIzMyMmD1HRUVRW1sr\nNkxjY2O2bNkCQEVFBYmJiWRkZPDss89ibm4uUPhJSUnCEThv3jw2b95MVFSUwIuPj4+LEVlpaakY\nVW7YsIHu7m4ePHjAzMwMtra21NTUUFBQQHZ2NtnZ2fzwww/Y29uzYMECRkZGsLGxwdHRkddffx13\nd3c2bdqEWq1mZmaG7du3ixuBhp+gyQb9xz/+gUwmo6urC7lczjfffIO5uTlTU1Ok/bYAACAASURB\nVFOsXbuWP/3pTyxcuBBjY2NefPFFjI2NUavVAoWm0Sk4OTkhl8t54403OHjwIG1tbWJUqqurK+Ls\nrl27hra2NosXLyY4OJiEhARGRkZQKBTExMSI8Xls7MMsZVNTU+rr6/n888/R0tIiNzeXLVu24OHh\nQV9fHzMzM6xduxZnZ2fa29upra1lbGxM6Bv09PSEejYhIYH4+HgAnnjiCRFt5+rqSmJiIjt27EBP\nTw8vLy927NhBSEgIfX19tLW10dzcTHl5OatWrSIpKYnZs2ezbNky7ty5I/pgv3f9ISqFffv2vQsP\nFYgamevo6ChSqZTvvvuOtLQ0fv31V5YtW0ZdXR19fX2iVHNzcxPSzvDwcCorK5mZmeHQoUMkJiby\n66+/isZRR0cHxsbGLFmyhKamJvz8/KitreWnn34SCsqKigqkUinr169HX1+ftrY2Vq9ezQcffICT\nkxOXLl0SsJDly5eTlpZGYWEhurq6VFdX4+rqyuDgIE5OTnR3dxMTE0NycjISiURAYQYGBmhubgYe\nXtA5OTmUlpaKSYKhoSFmZmaCbrR69WpkMhnR0dGoVCpaW1spLy8XWn47OztkMhmDg4NYWlqKykZj\nEJqZmWH27NlcuHBBwFo0r2V8fJzGxkYqKir49ddfuXfvHpaWllhYWHDhwgWuXbvG22+/LUad9vb2\nXL58GX19fcFt1DgGe3p6mDNnjkDc1dXVCV+Fvr6+0AgcPnyY06dP4+TkhKenJ2q1mtmzZ/P999/j\n5uZGXV0dvb29Qp1oYWEhHKt6enpYWFiwatUqJiYmmJmZwdHREXNzc3788Uesra1ZtmwZn3/+uRjf\nXrlyhV27dpGbmysCY3x9fTly5AhaWloCY1dbW4tMJsPBwYH6+nrs7Oyoqalh7ty5winb19eHRCLB\nyMiI8+fP8+qrrzIyMkJUVBSlpaW0t7cLPJ2+vj6rV68WE5zz589TUlIiLNKaqL1Zs2YJotLAwACX\nLl1CR0eH6elpHB0deeSRR8jOzsbQ0JCjR49y69YtEhISyMzMFLkot27dwtraWvhxrK2tOX36NAUF\nBSxbtoz29nZOnDjx36dSGB8fx87OjqNHj+Li4oKLiwszMzOEhITwr//6r2zevJm//e1vGBsbC9af\nu7s7VVVV3L9/n+TkZCYnJzl+/Dh6enqYm5sDcOfOHV588UWKi4v5+uuvCQgIwMPDgwsXLqCjo0ND\nQ4PIkmhqamJmZkZ88DTHmTt37tDa2sqf//xnGhoagIfcg3Xr1nH9+nU2bdqEu7s7lZWV/yEAV5Pc\nc/z4cZEavW7dOvz9/cXu7+3tzbFjx1i4cKH4OWzYsIG1a9cyNDSEj48Ply5dIj09nezsbMECNDQ0\nJCUlBXiYvVhcXMzPP/+MiYkJ9+/fFz59DVtAU0FoxoGpqam88847pKam8u6772JgYMDPP/8sMgst\nLS0xMzMTH0hN6pFGsKWvr8/MzAyvvPIKFy9epLOzk/7+ftauXSvUjjExMaxbtw4PDw8UCgWGhoaY\nm5vT19fHxMQEixYtorOzk5aWFoaGhoSq09PTk5deekkc365duyYqI2NjY9zd3Zk/fz4DAwMcOXKE\nH3/8kYMHD3Lu3DliY2OJiYkhIyODsLAw0tLShOb/1q1bVFVVUVtbS1BQEE1NTXR0dKBSqViyZImI\nAdCIo8zNzTl8+DDj4+OUlpaK56qZaGigMBr+osZMNT09jVwux93dnStXrnD79m0hXFu5ciUvvfSS\nqHpXrlxJSUkJp0+fRiaTUVVVJehea9euxdPTk7GxMSorK9m1axcVFRWsXr2aLVu2iAq3rKwMiURC\nVFQUCoVCZEMcOnSIlStXoqurS35+vgD5/J71h9gUtLW1yczMZM2aNeTk5NDS0iLi2js6OggLC0Oh\nUGBvb8++ffuoqqoiMzOTiIgIEhMTSU1NJS0tTZRsGh7dkiVLyMjIIDIykosXLzJr1ixu3bqFSqUS\nKcFPPPEESqWS6upqIiMjuXfvHvPnz6eyspKamho2btwoIt81FB+VSsWhQ4ewsLAQsmapVEpAQAA/\n/vgjQ0NDIoDm8OHDTE9PY2RkJMhNQ0NDDA8P88svv2BjYyPw5hMTE1y9ehUfHx8MDQ0pKSlh27Zt\n4vxfWlqKg4MD3t7eAp7y7rvvoqOjw2uvvSYERWNjY0LGnJWVRUlJCXPmzGHHjh34+fkRGRnJnDlz\niI2N5a233iIiIoJnn32W6elpioqKcHV1JT8/n++++46TJ09iZWXFwYMHcXJyIisri/b2dlxdXfn2\n22/x9PRk0aJF/PTTT+Tn5+Po6CgCd7777juRUlRYWEhhYSEFBQWYmJhw7949Fi5ciI6ODiEhISKF\nW6Mv+WfjWlZWFsePH+eNN95gZmaGL774gnPnzhEXF8eyZcs4ePAgFy5cQE9Pj/r6eqFbCQsLE14H\nTQCwWq2mrKxMVABz586lp6eHsLAwQkNDcXJy4sKFCzQ2NuLg4EBubi5aWlqC8KRJ49Jg0zQx8RKJ\nBDc3N3FE07A+JycncXd3JzY2ltWrV+Po6Iifnx/d3d1UVlYyNjZGTEwM5eXlGBgYUFVVBTy0Tre1\ntbF06VKcnJwoLy9n9erVXLp0ieXLl6OtrY2VlRX29vaC5NTX14ebmxvT09MCBLNo0SKWLFkijju/\nZ/2nm4JEIjkkkUiUEonk/j89Zi6RSK5IJJKa3341+6e/e1MikdRKJJIqiUSy5Pc8CR0dHXbt2kVB\nQQG1tbUUFhbS09ODp6cnO3fuxM/Pj+TkZIFiv3TpEp6envj6+qJSqdi1axfl5eWC06f5wV69epW2\ntjZu3LhBd3c3LS0tzMzMkJCQwPvvv09vby+enp78/e9/FzyA4eFhvv76a3x9fXF2dqakpIS6ujpc\nXV3x9fUVb5i2tjbd3d0CsX7nzh1SUlKYO3cu69atY8mSJRw5coQ333yTqKgofHx86Ojo4MGDB+zc\nuVOYatzd3XFycgIe9hTs7OyYmppCKpUSHh7O8ePHqaqqIjw8HF9fX+rq6vD09OT999+np6cHc3Nz\nOjs7qaysFOX55s2buXHjhnAMRkdHk5WVhUKhoLu7m4CAACEkWrJkCXl5eRw8eJCZmRn8/PxIS0sj\nOzubuXPnEh0dze7du7GxsRHTHXiIJdPW1kZXV5eZmRna2tooKSlBqVRSWlrKhx9+yKJFi7CwsOCd\nd95h/fr1mJqaEhoaSkJCghBn9fX1UVJSwoULFwAwNzcXk4D169ezdu1aVq1axfLly1m0aBHNzc3c\nvXuXjIwMDh06hJOTE4mJiTQ2NtLf349KpSIyMpLm5maqq6vFRuzp6SnQeBoakSbtWiqVCh3LnTt3\nuH37Np2dnRgYGAgQrCbxvKKigp9//pmIiAjgYdWoUCjIysrC0tJSHOlUKhX29vaC6q2rq8uTTz7J\njRs3hB7m008/FYE+8+bNY3JyUpCXZs2ahZGREVlZWcTGxmJjY4OFhQU+Pj4MDg4KjJuzszNGRkYE\nBQUJV21DQwNxcXHI5XJiY2NFBsXvXb+nUjgMJP5vj70BZKjV6llAxm9/RiKR+AIbAb/f/s0BiUSi\n/Z99Aw3ivaysDB8fHxYsWICZmRkuLi7CSGJkZMTu3btZsGABISEhxMTE0NnZiYODAzo6OoyOjqKl\npYWVlRVhYQ9Hsfn5+cyePZuIiAiKi4vx8/PjkUceIS0tjaSkJFxcXDhx4gS3bt0SJKdNmzaxc+dO\nkdmnGfNoRp2/vU6BfHvnnXeQyWSsX7+eBQsW4OvrS3l5Ofn5+WzcuJH79+8TGBiIq6srbW1t1NTU\nCMqUqakpurq6wgRjYGCASqUiOTlZRKFFREQQGRkpVIf6+vpUVVVRUVHB2bNnaW1tFdMJS0tLPvnk\nE/Lz89HW1qazs5NZs2axaNEi9uzZQ0dHh0g/6u3tZdGiRUxPT6NQKNi8eTNPP/00jzzyCOfPn8fQ\n0BBnZ2c2b97M5OQkvb29gvYjl8uJiIhg9erVNDQ0oKOjw4cffoiRkREPHjwQVdKVK1e4f/8+v/zy\ni9iAlUolFy9exMPDQ0hxNa5VeIhjCwkJ4bnnnmNgYID09HSampowMzPDwsKC27dv4+fnx9q1azl8\n+DCtra0oFArc3d3ZvXs3165do6amRqDfNFmSCQkJeHl54ezsjFqtJjY2VpCjo6OjcXBwICYmhv7+\nfjIyMmhubsbNzQ09PT18fHyYPXu2kIy/+uqr/PLLLwAi+GfDhg3k5eVhYmLCL7/8ws2bN8UY8c6d\nO3R2dhIdHU1ISAiDg4OiqlCr1Tz11FN8/vnnPProo8JC3tLSIt6DtLQ0SktLaWpqQiaTYWxsLBCA\ndnZ2giju4eGBsbExU1NTFBYWsm7dOk6dOkVubi7t7e2/41J/uP7TTUGtVmcDvf/bwyuBI7/9/giw\n6p8e/0WtVo+r1eoHQC0w5z/7HiqVisrKSsbHx5menkalUuHg4ICZmRleXl7ExMSwc+dOUdL5+vri\n5eWFl5cXbW1tmJubY25uzpkzZ2hvbxc23Mcff5yGhgauXbuGp6cnNTU1WFpa8q//+q/09vaSmpoq\n9P8atp5mzm1kZCTceBqvhGYspgGSFBQU4OXlhVwuJzMzk6+//pq6ujri4uLE+TAmJgaApqYmGhoa\nhKpufHycoKAgFAqFMET5+/tTX1/PlStXhGkqNDQUW1tb4uPjCQgIECDTzs5OpFIp7777Lg2/JRnf\nvHmT5cuXU1JSwpNPPikasl988QVFRUX09vZiY2NDeXk5kZGRmJubc+HCBS5cuMDixYtpbm6mpaWF\nV155hZCQEFavXs23337Lhg0bqKysRKlUMjw8jKenJyUlJVRUVODo6MitW7dobGxkx44deHh44OTk\nxPLly1m8eLG4u8nlchFSo/GcODg4iJ+JxuLs6elJWVmZyJgYHx/n8uXLzJ8/H21tbaytrXFzcyMs\nLAxDQ0Ps7OxYs2aNGKnm5eUREBCAtbU19+7dExOjoaEhtLW18fX1JSAgAJlMRnFxMWq1Wryu+vp6\nsrKyBFvh7t27QhY/NDTE5OQk5ubmtLe3o6enB0BkZCQ//vgj9+7dQ0tLi5SUFObMmYOXlxdRUVH8\n+uuvnDt3jlmzZlFfXy9iBMLCwli6dClhYWFCMXnlyhXhxG1oaMDU1FTg1ZqamlCpVOjo6HDmzBnk\ncjlTU1Pk5uYK96RKpeLKlSsif/TYsWO0t7eTmJgoNrHfs/5PewoKtVqt2Xo6AMVvv7cHmv/p61p+\ne+z/cU1OThIZGcnatWuprKwkJSUFf39/2traxFm+qKiIgwcP8uWXX/Ltt99y5MgRDh8+LAxOTz31\nFCYmJly9elU04RoaGjA3NycxMRFHR0d0dXU5evQoeXl5BAYG8tRTTwlMlaaZtnHjRjHj19XV5Zln\nnmFqaoq7d++SlpYGIEaadnZ23Lx5E2dnZ0JDQ9myZQv+/v5kZ2czNDTE2bNn2blzJ7du3SI1NZUL\nFy6gVCrp7e3FwsJCyKx9fHwABBTj7bff5vHHH6exsRGVSsWJEydIT08XEWAZGRncvXuX9957j8LC\nQkpLS3niiSdEdPzWrVv55ptvhLfB19eXN998E11dXcEmrK+vFxLy4OBg6uvrSUtLo6CgAJVKRVBQ\nEB4eHjQ2NvLee+9haWnJ9u3bRdNQIzGvr68nIiICCwsLbty4QXl5Od988w2HDx/GwcFBNDn7+vqE\nfH3ZsmWo1WoqKiqEuCk4OBh42LibN28eNTU1HDp0SBiqNNqSvr4+4uLi0NXVZdmyZaSnp4uQWHd3\nd7Zt2yZEZTo6OsTFxQGQnJzMggULaG5upru7m9raWlatWsWBAwdoa2tDKpUKPcuf/vQnduzYIfge\nGgCuBlmnra3NI488AjzMrFCr1fj7+yOTydi8eTOGhoasXbuWnp4eoqKicHV1RSaT8fHHH/Pjjz+y\nZcsW9uzZww8//ICxsTEdHR3C+q8xw61cuZLe3l4OHz5MZGQkZ86c4a9//SuZmZnMnz+f0dFRIZIb\nHBwUfaqEhARWrFjB7t276e/vJygoiPPnz4vjzu9Z/68bjeqHOun/skxZIpE8I5FIiiQSSdHk5KSg\n51haWmJvby+yCKRSKUuXLsXLy4t169aho6ODh4cHBw4cQFtbm88//5x33nlHVBYvv/yysDjb29sT\nHBzM7NmzSU1NZXR0lMjISDIzM8X4zN/fHw8PD1xcXBgZGcHd3Z3U1FROnz5Nb28vnZ2duLu7i+Yc\nPNzEUlNTmZqaEvZcHx8fenp6+Otf/yo63iEhIfz5z3/mxRdfZHR0VNz5XFxckEqlNDc3891334mj\niK2traAnaYAu2tra6OjoiNGhv7+/mED89a9/JSYmBkdHRzo6OnB1daW7u5uamhrB5Xvssceorq7m\nzTff5PLly1RWVlJcXMyOHTtobW1FqVRy69YtLl68yMqVK1Gr1WLkqAkw1Zh78vLyKC4uFmRhDcdB\nE5g7a9YsMjIyBOOhpKSEu3fvCliKoaEhjo6OTE9PExoaioGBAdra2nh4eAhrr729PVVVVQLKO2vW\nLEJCQqiurqapqQkLCwva29vJzs4mMjKSmJgYnJycWLZsGfPnz8fU1JTw8HB6enpYvny5gMRo4tna\n2tqEH0Uzmq2urubzzz9nZmaGrKwscnNzOXfunMZZSEVFBUFBQfT29jI1NQUg3jNdXV1hqjMxMSE5\nORm1Ws3PP/8sUO+mpqZ4e3ujpaWFmZmZgPYolUquXr2Ki4sLjz76KLa2tuLoqxlBjo6OUlhYyO7d\nuwUYR4O727JlC7W1tcTFxQl6tSZ68N69e/T09HDjxg18fX2xsrL63dfm/+mm0CmRSGx/u7htAeVv\nj7cCjv/0dQ6/PfZ/W2q1+lu1Wh2mVqvDbG1tKSsrY3h4WAh6BgYGBKrL0NBQ4N2trKxwcXER4pK9\ne/diZmZGfX094+Pj7N+/n+3btwMPgbDnz58HEBmHPj4+hIeH09nZSX5+PidOnODo0aPc/F/svWdw\nnFW6rn0pZ7VyauWcJRQty1ayLeeccAIbzABjMgNMmYGBGQYYDmEGGDwmGOOAExhn2ZYtS5ZlS7Jy\nsHJWK7bULamVWlL3+eHd65z963C++k4Vu2q/v2yKktr9vmu9z3qe+77uO3f4+OOP2bp1K9u3b0ci\nkaBQKISZyMPDQzwIOhORtbU1vr6+uLq6CipPcnIyfX19AgdWWVlJR0cHw8PD7Nq1CxcXF2GUGRsb\nIzY2VvAfLCws8PPzo7+/XzT2dKEeiYmJLFy4kPr6es6ePYujoyNvv/02Tz75JKdOneIPf/gDcrlc\nJHBrtVo+//xzQcj+/PPPcXZ2FmKw7OxsKisr8fb2ZmZmBl9fX8zMzMQ5Xyf1TU9P57XXXsPBwUEs\nAGNjYywsLJiamqKuro7g4GAhVV66dCmrV69m6dKl9PT0MDExQUJCAsbGxvT396PRaJDJZHR2dtLT\n04NGoxG5EIAwgdXX16PVarl27RoRERFMT0+jVCrJzc3FyMiIhIQEbGxsUCgUQm1pYWEhEqV1mHud\nhiIyMpK+vj4AYVG3t7dnw4YNSKVSnnzySeRyOQ4ODmg0Guzs7CguLsbGxoaUlBQBZklISMDIyEg0\nRl1cXEQy1U8//YSrqyvNzc1YW1tTU1PD3/72N+GSjYyMZGBgAHNzc3EEfuKJJ4QVX2fNBnBzcyMg\nIECkk3/22WcUFxdTXl6OjY0NExMTKBQKUW0WFhYKQ6Guaerl5cVTTz1FX18fhw4d+tWL+//rpnAB\nePw//vw4cP5/+++P6unpmejp6fkAAUDxr/mBq1atQi6XI5PJaGpqoqSkhJycHGQyGQ0NDZSXl3P5\n8mUx9uvt7SUkJISWlhYRgqLrKOveshYWFkxPT4sZdFNTE/v27SM/Px9ra2uioqKYnJzk1KlT9Pb2\n8vLLL6Onp0dISAhhYWH09/dz/fp1KisrsbOzQ6lUAg+9Gt7e3gLYqkvJTklJITo6WpwPf/jhBx55\n5BGR8nPw4EH27NmDo6MjX3zxBdHR0UxPT4sHobW1lcbGRpycnPDz88PKyopDhw6JpKiZmRnq6upI\nT0/H0tKS5557jj179nDz5k2ef/55oqKi2Lx5M2vXriU8PJxdu3Zx4cIFpqam2Lp1KwsWLMDGxgY9\nPT1x7FEqlWKq0tHRQU5ODnK5XIz3YmNjMTY2Zvny5QIvrwu7uXTpEqtWrcLY2Jjbt2+TnZ1NTEwM\nra2t4p5MTk6KI8Lo6CgJCQm0tbWRlZVFS0sLCxYs4Pbt2+INXFlZyczMDJs3byY0NJSFCxcKn8eK\nFStISkrirbfe4vnnnxc/v7a2Vkik4WFVMDk5SWhoqOjm5+Tk4O/vz7JlyxgcHCQvL09UBbqNt6ur\nCwcHB7Zu3Yqenp44LuiSwpVKpeg/6LIc9fX1qa6uFvoEmUyGkZERcXFxvPTSSwIr/+GHHxIREcEr\nr7xCZ2cn0dHRdHR0CPCuTiavqzK1Wi09PT20tbWJ+25jY8Pq1atRqVTk5+fz5ZdfcvLkSSoqKggJ\nCeHvf/+70CXIZDIWLFhAeXk5SUlJREZG/urF/WtGkieAe0CQnp5et56e3pPAh8ASPT29JmDxf/wd\nrVZbC5wGHgBXgX1arXbu//Q7dB75e/fuoa+vT2FhIUqlkqCgINasWcO///1vGhoasLe3F6O1119/\nnQsXLhAYGMjy5ctRq9UMDQ0JqzFAdXU169ato6SkhLt37woq0rx584iMjBQkIjMzM4aGhkhNTaWi\nooKJiQlWr15NbW0tQUFBPP/888IjAQ9DRZubm+nt7WXRokWi5zA1NYWHhwc9PT2Mjo6yd+9evLy8\nuH37NuHh4Tg6OtLW1saVK1fIycnhxIkTGBgYiAc3MzOT8fFxRkdHGRkZwc/PT9B+P/roI3JyckhM\nTKSrq4vw8HACAwO5desWBw8epK+vD319feRyOTk5OfT29nLhwgVmZ2cFxer8+fOYmZkhl8tpbGwU\nmPnm5mYRxhMREUFKSgo2NjYEBgaKBaE7NqSkpCCTyZiZmSEmJgYjIyO+//57srOzWb9+PXK5HIVC\nwbvvvouzszO5ubnIZDJKSkrQaDSi6omLi8Pf35+GhgZMTU3FhqvjJBYVFQlFps4yra+vz9DQEKGh\noTzxxBNoNBq6u7v517/+hampKc3NzZw4cYKWlhbMzc25fv26mD5s2rSJn376CUNDQ9ra2oTZDh5K\nuHXydzs7O7KyskRaue4I5OrqSmpqKo2NjaxZs0boKmJjY9m5c6fAp+mYoT4+PpiZmREXF0diYiKx\nsbHExcXh6elJTk4O7e3tTExMYGZmhqWlJb29vdy8eVM0MHXGKo1Gw8TEBLt27SIxMRGpVCrgsbrf\nq/N6SKVSfvzxR3x9fYV9Pzg4mNLSUgICAv7Pu8F/XL9m+rBNq9W6arVaI61W667Var/TarVDWq12\nkVarDdBqtYu1Wu3w//b//02r1fpptdogrVab9Ws+xOjoqEhWjo+P5/Lly6SnpwvJ7qZNm8jOzhac\nfyMjI0Ht+eMf/0h+fj4VFRW0t7eL8RUgFr5Ovz89PY2ZmRl37tzByclJdOKtrKxE6TY5OSli2XTz\nYalUyoMHD2hubgYeIrjefPNNbGxsyM/PJyUlRUAuent7yczMJDo6mvT0dGZmZoQDMSUlBXd3d4EK\nVyqVWFlZodFogIebjbOzs0giUqlUrFy5UgSAeHh4cOTIEfLz80lLSxMRYgcPHiQmJgYXFxdBLA4L\nCyMlJYWYmBgkEglhYWFCjbh+/XoyMzP55JNPqKqqYs2aNUKGrNFo2LJlCzt27GDlypUiJs3e3h61\nWk1PTw+dnZ2cPn1aOD/j4uJITU0VfZvExESOHDmCQqHgueeeY/78+RQWFpKfn8/Zs2fJy8sTaH4d\nLUiHsdfRkdPS0oTQx9rampCQEF555RVBcE5MTERPT48bN24wNTXFqVOnUKlUzJs3j/379/PII48I\naC9AVlYWLi4u4ig0MDBAREQE3377rVjIixcvZnJyEh8fH8FqdHR0pKOjg0OHDgkGw+nTpxkYeHhi\nbmxsRKPR8Mknn6BSqSgtLeXixYv8+9//prW1VeQ5TkxMkJaWxrx581Cr1dTV1bFt2zakUilXr14V\ncvUnnngCeDjqNDAwIDExUcTl6TQSTk5OBAcHC2+HLqPE0dGR2dlZent7MTMzw9bWlpGRETIzM8nO\nzv41SxH4jXgfPvvss3d+97vfCdxUVFSUYPBJpVLs7e0JCwtjxYoVeHt7k5SUxOTkJNXV1cTExGBm\nZsbKlSuxsbERM+UDBw6wdu1a8vLy8PT0FA+Pzj6tUqloa2ujoqKCpKQkBgcHWbduneD266AfZmZm\ngoSsy+nTlaXz588nJycHc3Nztm/fLqYdOkJUVVUVIyMjeHh4cO/ePYaHh/nuu++wsLBAKpUyMjLC\n9PQ0lZWVoipxdXVFIpEwPT0tjgpBQUGYmpqyfPlyZmdniY+PF1ZqT09PUWE8ePCAAwcOCAWcLmzX\nxcWF7OxssVkYGxvj6+vLH/7wB/z9/dHX1xeOv9DQUC5fvsylS5c4evQodnZ2BAYGolKpWLduHeXl\n5cTGxhIaGkpmZiajo6Pk5OSgr6/Pvn37GB8fZ3x8XEBddW86HSLO29ubsLAwvL29BQlaRyI6fvw4\nAQEBwldRVVWFUqkUTbnh4WE2btxITU2NcI+uW7dOLObly5fT3d1NWVkZDQ0N2NraUltby61bt/js\ns89Qq9UEBASQlZWFtbU1zs7OGBsbY2ZmRlBQECUlJVy6dAmJREJFRQVRUVEsWrSImzdvsm/fPurr\n6ykoKCA+Pp4DBw7Q19dHTEwM09PTaDQajI2NiYmJEfZ2X19famtr+eKLL4D/VREnJSURExODTCbD\nycmJubk5enp68Pf3p6mpievXr5Oamoq3tzdNTU2i8lUoFDg7OzMzMyPgYIIYRAAAIABJREFUrHV1\ndYSHh9PY2ChQ+bGxsSgUCpE41tjYyPr16zl48OB/He+Dvb09t2/fxtXVFaVSyYIFCwgLC2NoaIib\nN2/y3XffIZFIKCwsFB6H8vJyAgMDxY3V4bsDAwPFl9jQ0MC2bdswNzcXs/OJiQkWLVqEXC6noKAA\nExMTMRPWKeB8fX0FkFQXSzc3N8fWrVsBWL9+PWlpaSQkJBAdHS38/3Fxcbi4uJCTk0NPT49gMTz3\n3HNkZWXR3d1NY2MjpqamFBcX09zczKpVqwTWe9euXdTX1+Pg4EBPTw8SiYQDBw5w4sQJRkdHycvL\n4/jx42JE6+rqiqurK0ZGRvT09JCbm4uHhweFhYWYmJiQnZ1NSEgIfn5+JCcnC+Vha2sr/f39PHjw\ngA0bNnD16lUGBgbIy8ujo6MDW1tbzMzMSEhIoKOjg+LiYpEmBYgIuezsbBGeM3/+fLKzs+np6UEu\nl2NiYsLAwAC+vr5iM9i9ezfJycnExcXR3t5OY2OjoCXrJOQ6irJCoRAK1aGhIdavXy9yMnRkpatX\nr+Lj4yOi2i5evEhJSQl2dnYsX76choYG0a9pa2vDwsICPT09jIyMMDQ0pKamhszMTEJCQkQva3Z2\nFj09PW7evCnCf01MTKipqaG5uRmZTEZHR4fIFpk/fz6Ojo4EBQWJ415wcLAgatvb27N7925xHKms\nrOTevXsCeV9TU8PKlSvx8PDgm2++EfQuXWJ4ZWUlhoaGIlv00qVLolFqZ2fHwoULxWfUNSd1wr/+\n/n78/f2xsbERx7Nfc/0mNgV4OCnQTR2++eYbjh07hqGhIW+88QZ9fX0cOHCA7Oxs0aALCQnhiy++\nYN68ecjlcg4ePIiRkRHFxcVCCANw8eJFTp8+zeOPP052djZvv/02+vr6FBUV4erqyvz589FoNEK9\nVlRURHd3N6Ghoaxbt46RkRHS0tJoaGgQzsbKykru3LnD4cOHUSgUdHR0IJFIsLa2Jjc3l6KiIsF+\nuH79OomJiSLa7oknnkCpVLJw4UJee+01SktLRRjM4OAgrq6u1NfX4+Hhwf379/H398fDw0PYvP/y\nl7+Ql5fH+Pg4KpUKlUpFbGysCJcJDQ1lw4YNHD9+HGNjYyorK9HX16empoYtW7bQ2trKxYsXyc7O\npqOjg++++47p6Wm8vLzw8/Ojs7OTzs5OYmJiCAkJEW8kIyMjzMzMBAi0tLSUJUuW8OGHH5KcnExD\nQ4NAyRUVFQlOpG6sPH/+fJycnHB3d2dgYAC5XI6ZmZmIjVer1QC888472NnZie/l3LlzrFy5kjNn\nzoi+U2lpKX5+fjz77LOYm5tjbm6Ovr4+CQkJoqrJzs7mb3/7G1NTUwAMDAxga2srjnO6+3Xq1Cka\nGxs5deoULi4ueHt7Y2ZmxquvvspXX31Fb28vMTExVFRUEBwczKZNmxgcHBSp07t372Zubo7y8nKR\nA5KdnY1arebs2bM4OzsTGBhIWFgYfX19omdiY2ODjY0NV65c4euvv6avr0/wIgGio6NRq9V88MEH\nZGRkIJVKmZqaYs2aNajVavr6+lCpVCiVSqanp7G1tWVubk4g2IqLi1Gr1bS1tYmkqV97/SaOD4cP\nH35nbGyMiIgI9PX18fLyQq1W4+PjI0Zkrq6uODk5YWpqKporoaGhnDhxgi1bthATE0NxcTGrVq2i\nra2NU6dOibeYo6MjcrmcqKgoEVaalJTE0qVLiY6O5uTJkyKSPiQkhKmpKWJjY8nJycHW1pYHDx6g\nUCi4ffs2FRUVrFixAn19fb7++mt2797N1NQUKpVKQDX6+vpECpDOZjszMyPQ5itWrCA+Pp7z58+j\n1WoxMTHh8uXLODg4kJiYKLIA0tLSRHjN6Ogo1tbWtLS08OSTT3LlyhUcHBzEqLS8vJzVq1czMjLC\n7Owsvr6+3Lp1iz/96U+cOHECfX19bt68iYGBgWA5XrhwAZlMxtjYmBBUhYeHMzExIejQw8PDNDU1\n4ezsjJeXF46Ojjg4OPDMM8/www8/YGlpyfLly7lx4wZKpRJ/f3/6+vrYsGEDxsbGlJWV4enpSUtL\ni+jUOzo6Mjc3h0ajwc/PDyMjI2QyGefPnycsLIzp6WmMjIyYmppi3bp1wv03NjYm8Ox1dXXY2tpS\nXl5OY2MjgJj5j42NcfjwYTHtOHv2LAsWLBAjV132p5eXF3v27EEikQgoi65ZfOvWLYKCgoR5yd7e\nnsnJSbKzswXl6scff+TRRx/F1tYWU1NTgoKCmJubIzIyEhsbG9ra2rh69ao4quji5LZv305vby+V\nlZU89dRTTE9PMzw8TEtLC93d3chkMnx9fbGzs2NoaAgPDw8sLCy4fv06+/fvF+N53fTIxcWF3Nxc\nFi5cKF4+ERER4miiE5H9+9///q8Dbv3000/f0UWP//jjj7z00kuYmJjw/vvvk5ycLBanThosl8vJ\ny8vDzc2NZcuWcfbsWezt7fnll1/w9vZGLpcLh5gucEVn0TU3N+fHH38UD25eXh4JCQnk5OTg4OBA\neno69fX1SKVSTExMaG5uRi6XC6fmhQsXSE1NJTc3l9TUVFQqFdevX6e3t5eZmRkmJydFkrMOMa9T\nzz148ACJREJPTw9WVlYMDw/T3t6Om5ubwG/5+vpSUFDA3NwcS5cuZf/+/Xh7exMaGkp8fDyBgYF8\n8MEHzM3NYWlpib29vXAArlu3DgcHB/z9/ampqWHx4sWMjIyIh9bMzExkNupIyYGBgQQHB2NhYSHy\nNXTnY10F1draiq2tLXZ2doyPjwsU/ObNmxkbG0Mul+Pp6cns7Cxubm7ExMRQWlpKbGysKNPDw8OF\nHkInldblVsDDef8PP/xAWloaMTEx3Lx5UyDhda5PpVJJVFQU9fX1pKenMzAwIHgIk5OTWFtb4+Xl\nRXh4OG5uboLgff36dd544w1aW1tFJIBEIsHBwQGVSsXPP//MJ598wszMjBhnuru7C7zb3NycYBiE\nh4dz4sQJ5HI5+fn5jI6OsnjxYsrKyggKCqKmpobh4WHc3Nw4d+4cUqmUNWvWMDQ0xP3798XY083N\nDT09PQwMDIRsWsdluH37NhYWFqxatUpQnQsKCnj22Wdxdnamq6sLAwMDSktLgYey+9TUVJRKJVeu\nXGHFihXcuXMHT09P7t27h7m5uU6R+19rU7Czs8PGxoaZmRnkcjkqlQpvb288PT1FaRUZGYmhoaFA\naQUEBHDx4kU0Gg3ff/89H3zwARUVFZw6dYrW1lZSUlJISEjg1KlTIkXq3r177Nq1i/j4eIqLi7l9\n+zY5OTkUFRURFxdHV1cXYWFhfPvtt5iamrJhwwaqq6uZN28eY2NjnD17lv3799PS0kJQUBD29vbE\nx8fj7u6Om5sb4eHhQpxUUFAg0OparRZ/f39mZ2eZnZ0VoTZmZmZERUVx8uRJVq1axeDgIOHh4SKS\n3srKCq1Wi1wuZ2BgAC8vL6qqqnBxccHOzg6tVotMJsPe3l6EsB4/fhxDQ0PS09M5fPgwY2NjhIaG\ncu7cOZYuXSqsxTohU2pqKi4uLixbtoy2tjYqKyuRSqXMzMxw8+ZNtm7diq2tLWq1mqCgIORyOaam\npnz55ZciS7Orq4t58+YJPUJlZaUIrhkbGyM/P18Etug2cE9PTwwMDNDX16e4uJjc3FwWLVrEvXv3\nBEXZ2dlZdPB1CHelUsnatWsxMTERJjjdxKCvr4/bt29TV1cnhEU3btzAyckJPT09kc7d0NBAQkIC\nQ0NDeHt7i6lEfHw8EokEQ0ND/Pz8MDMzIz8/n8jISFQqFQUFBWzevFn0WNatW4evry8BAQGUlpbS\n29uLra0t33//PatWrcLT05OzZ89iZmaGubk5XV1dokooLCwkODhY2KGDg4Pp7OwU9vGxsTFkMhkt\nLS0sX75cwHB8fX25ceMGK1as4P79+wQGBnLs2DHUajV2dnYMDw/j6OiITCbDw8MDa2trnZbjv2nO\n/3399/Xf13+6/uvQnL29vfnkk09oaWlhfHycefPm0d7eTk9PD6GhoajVaiwsLBgYGKCzs5OFCxfi\n7e3Nm2++SWZmJgkJCf8p/FVnv/7mm2+QSqW4urqSk5ODm5sbU1NTODg4YGFhQVdXFxkZGdy7dw9j\nY2NmZ2cxNzcXCVROTk74+/vT2NhIbW0tUqmU3//+95SXl9Pd3c358+dZtmwZRkZGKJVKwW6USCQ4\nOztz584dWlpaUCqV7Nmzh76+PsrKypiammJ4eBitVsvJkyc5evQoS5YsEXPrzZs389VXX5GcnExA\nQABffvklTz75JLW1tWg0GiFS0mg0uLm5UV5eLubVOoqwbvxYW1uLm5sbVVVVIlBG5yjU09PjwYMH\nLFmyhK+++gpzc3OBdJuamsLd3R1ra2umpqa4ffs2UVFRODs7M3/+fGZnZ0UzKyEhgdOnTzM4OMim\nTZsIDQ3lwoULODo64ubmJjiC8fHxVFRU8PPPPwvox/j4OBEREQwPD/PHP/6R1157DUtLSxwdHfnu\nu+9YuXIlFhYWxMfH09HRQUlJicCsBQcHU1RURFRUFOPj4xQWFrJx40a+//574uPjheDp448/5sMP\nP+R3v/sdv/zyi8iXiI6Opq2tDXd3dwGhvX//PpOTk+jp6REQEEB+fj5JSUnMzMwQGRlJVlaWyP/c\nuHEju3fvZvv27QQGBlJYWIiVlRX19fXU19ezd+9eoXLs7e0VXoR58+YRGxtLVlYWmzdvpqKigrCw\nMGxsbLhz5w779u3j1q1b2NjY8P777+Pu7o6LiwuNjY24ubkRFxeHXC7Hz89PRPZFR0czb948Xnjh\nBb766isRYNPd3c3U1BRBQUG/mtP4m5g+qNVqcnNzRdJNdnY2FhYWrF27Fg8PDzw8PHB0dEQikTB/\n/nxsbGwwNTXlnXfeEUzHW7duMTg4iL6+Pp9//jkACQkJgka8ePFiNBoNQUFBODs7Mzo6ioeHBydO\nnCAgIABfX1+Gh4dRqVT09/cjk8mIiorixIkTwr4dHx8vPrOPjw9arRaFQsHIyIhIF1YoFFRUVPCv\nf/1LxM6tXr2aoaEhgUw3NzcX6LMnn3ySgoIC4KG46pFHHuHq1asEBQVhYWFBVlaWkBrPzs5ibGzM\nmjVrMDIyws3NjfT0dIKDg/H09CQxMZGmpiZSUlK4fv06/f39SKVSJicnCQ4Oxs/Pj8WLF4s5fF1d\nndho58+fj0QiEQi15ORk7O3tmZmZITk5GQ8PDxYtWsTc3BzDw8N8+eWXWFtbExsby+TkJM899xzl\n5eWUlZUJcU9VVRXvvfcetra2YnRXUVEhVJpKpRJfX18hNwaYmJjAxMQEiUTCtm3b/pPIrLe3FyMj\nI0pLS5HJZNy+fZvKykoA0ZAbGxtj69atDAwMiIYmwPLlyyksLKSrq4vY2Fhyc3PFRqmLEaisrCQk\nJISoqCjy8/MFoCUiIoKmpiahQ3FychKfd8GCBVhaWtLT04ODgwNtbW0YGhqyZMkSGhoayMrKwsHB\nge7ubsbGxli4cCETExPCX9HY2IiFhQWurq7k5+dTXPzQFSCTybh48SKvv/46L774ooALr1ixAkdH\nRyYmJuju7qa9vV34VzQaDStWrODmzZs0NTVx8+ZNYmNj8fLy4tNPP/3V6/E3sSnAw0U2PDyMq6sr\nUVFRxMfHMzg4yNjYGAUFBezbtw8LCwscHBzEotIlGA8ODhIfH09fXx/19fVCT6BL3blx4wYTExM4\nOjoyNDQkRpZ2dnai8abT6Pv6+mJpacmaNWuYmpoiIyODyclJRkdHMTB4yIuprq5mdHSU7du3U1dX\nR3x8PBs2bCAxMZH09HTMzMzYvXu3mCXr5LxHjhzB1dWV7u5ulixZgo+PD4mJiSLYdHx8XPwOHXo9\nISEBNzc3Dh8+TFZWFiqVirNnz+Lt7U14eDhNTU34+vpiYGCAQqEgJCSEtrY2JBIJXl5e3Lt3D39/\nf4qLiwXWLDQ0FIlEIsAw/f39hIWFMTk5yU8//QQ81AuMj4/j5OTEyZMncXJyoqKiQvQw1qxZw8jI\nCAYGBnz99decPHmSTz/9lJCQEEZGRkQqkb+/P7W1tWi1WnF/ddF1OgKRs7Mzf/rTnwDYsmULarWa\n7u5utmzZQmlpKQ8ePMDGxoaysjL6+vrYs2cPUVFRJCcns2/fPnx8fIRStK+vT0TC6V4oAO+++654\nQezdu5ePP/5YbI5yuVzY3evr66mqqmL16tV8++23PPLII1RUVAguhrOzM5OTk0RFRQEPYcO3b9+m\nv7+flpYWEeSrA8aGhISQm5tLbGwssbGxTExM4OvrS01NDePj44JQ3tnZKf6d8NC3o1armZub45//\n/Cf9/f2cPXsWlUpFXV0dMzMz9PT04OLiwsDAgIDDPvPMM8jlcioqKnB3d0ej0eDo6Mj+/ft/9Vr8\nTWwKWq2WvLw8bG1tkUgklJeXI5PJhCmnpKSElJQU7t+/LxgGOu/ApUuXuHz5MjKZDFdXV9RqtdjF\nZ2ZmcHFxITo6mr6+PiIiIsQbS09PD0dHR7Zt28bY2BgXL17kxRdfpL6+Xvjfe3p68PDwoKysDFNT\nU8Hl01mx1Wo11tbWGBsbc+TIEQBGRkZITU2lrKyM7u5ubGxsuHv3rgg2sbOzQyKRMDg4SGVlpYCm\nAKxbt46rV69ibm5Oe3s78+bNEwGiqampxMTE8Pe//52uri5u376NQqHgzJkzNDY2EhISgpubG0ZG\nRpiamvLyyy9z5MgRtm3bRm1tLba2tiKmbGRkBHNzczZs2ICNjQ0ymYyzZ88yODiIn58fc3NzbNq0\nCSsrK6qqqli+fDmxsbFIJBLGx8cJCwvj4sWLzJ8/nxMnTuDl5UVBQQHV1dUYGxtjY2Mj1Ii6aDgn\nJydu3LhBe3s7SqWSuro6bt++jampKZ2dnYJTWVJSglKppLy8nLfeeosFCxawYsUKioqKGBoaYm5u\nji+//JLt27dz9uxZ5HI5169fx9TUFFtbWy5evMjo6Kiwpo+OjgKwefNmmpubxah2dHSUTz/9lNra\nWgwNDYWE3s3NDRsbG/z8/IR3wNramrt37/Lee+9x+PBhrly5IqTpOiNWWVkZdnZ2Ii9C1+RbsGAB\nTz/9NMHBwfT396Onp0dQUBCJiYmkpaXh6upKaGgo9+/fZ8eOHaSlpQGIe6rRaOjv7+fSpUvs3LmT\nuLg4fHx8SEtLY/Xq1UL8lJGRQXZ2NgqFghdffJGwsDAaGhooKSnh4sWLgu71a67fxKagr6/Pxo0b\nCQkJobu7m2XLlokkaJVKRUBAAD4+Pqxfv57vvvsOtVqNVqvF1dWV8PBw1Gq1iBoPCQkhOTkZeOip\n0NPTIywsDHjIgqyoqBDa8IaGBjo6OqiurqapqYm7d+9SUFDAiRMnOHPmDPAQ1BIbG4uRkZGAgMLD\nPkhJSQkqlYqjR48SHh5OeXm5QIiNj4/T3t7O3bt3KS4u5tKlS6KUjYmJwdzcHBcXFxYvXix+rru7\nO2+88Qbx8fH4+flx7NgxUcLDQ26fTgobFhZGaWkpe/fuxc7OTtz07du3MzAwwNDQEH/4wx84fPgw\nXV1dzJ8/n/b2diYnJ/n0008pKSnB2dmZ1tZWNBoN27ZtY8mSJUxOThIeHi5GiTo8++XLl0V/5uLF\niyxatIiJiQm2b9/OmjVrSE1NFV7/kpISQc/SJYd3d3cLTFx1dTWrVq1CoVAI74SubLa2tiY5OZnV\nq1fz5ptvcuXKFWHWevrpp1mwYAEODg58+OGH4uik82VotVrWr1+Pq6srW7ZsEQY5gHPnztHX1yc8\nC7p4d4lEIrB/t27doqamBmNjYy5evMjU1BSlpaU4ODigVqvZunUrSUlJLFu2DG9vb+Chjb6+vh4L\nCwuRFqbRaOjr6+Ppp59mbGyMrKwszp49S1VVFWNjYwwODuLr6yvgxBMTE6Snp+Pq6iqAO2ZmZtTX\n19Pd3c3IyAhr1qyhpaWF9957j7GxMc6dOyc4kjoWRkpKCjU1NVy9ehVbW1ueeOIJUlNTBZLvV6/H\n/8v1+//kMjY2prS0FLVaLWg+165dE87EtWvX0t7eTlNTE8nJyYyOjopySU9PDxcXF2HfjY2NFWAN\nHb9vdnYWBwcHmpqaMDQ0pKOjAyMjIyHIgYdz+59++onk5GRSUlLIyMigtbUVFxcXMb7T8fPUajVV\nVVVERUWJt68ue8La2pq5uTmMjIwICAjAz8+PgYEBwQ2IiYkRx5SAgAA+++wzgWMzMDDg2LFjXL58\nmZaWFsLCwti7dy8BAQF0dHRQUFAgIDRWVlbs2rULe3t7fHx8KC4uprW1lbfffhtDQ0OKi4s5dOgQ\nTz/9NKGhoXR0dBAQEEBZWRk2Njb88MMPJCUlcfr0aTZt2kR/f78QPh0/fpy7d+9y6dIlgRFbv369\nSFbW19enu7ub4uJi7t27x5UrVzAwMKCjo4OVK1fywQcfEBMTg1ar5fjx41haWmJubs6OHTtYunQp\nK1asYHBwkJUrV3Lr1i309fWFi6+goICWlhZRNgcEBJCXl8dzzz1HW1sb/f39PPXUU+zYsYP09HRu\n3bpFfX09dnZ2ODo6YmRkRHR0NKdPnyY9PV0ssldffZX09HTu3LmDm5sbfX19ODo6EhYWRk9Pj/DS\nmJqaUlFRQUxMDL29vSQlJQkBmo6FqNNUwP+SI+/cuZOysjJWr14tbO86EVhERAQ2NjZIpVK8vLwo\nLS1FoVDw888/ExMTQ2RkJBKJhKKiIoFNe/zxx0lKSsLT05M9e/YIBerMzAydnZ289tprIvpvzZo1\nlJWV8frrr3PlyhX+8Y9/EBwcjKGhIT///DOenp6Cgflrrt/ESDIgIECbnJzM+vXrsbW1xcXFRfjk\nvb29KSsro7OzE2NjYxQKBdbW1sTHx1NQUIBCoaC3txdzc3MWLVpEcnIyLS0tbNy4kfv37zMwMIBE\nIhFiHF3T7dKlS0RERDAxMSFkwlNTU1RUVDA9Pc3Q0BAhISFotVpGRkYIDAzk7NmzvPzyyyxevJi0\ntDT6+voYHx/H1tZWsAPHxsaEYlCr1TI3N4eFhQVnz56lp6eH9PR0CgoKWL16NYaGhmi1Wnp7e9m4\ncSPV1dXcuXOHtLQ0QQzWaDQYGRlRU1ODlZUV7u7uAg4KD4EhDg4OZGVlUVFRQWZmppia6PQdN27c\nYPfu3Vy5coWenh68vLwYHBwkJCQET09Prly5IiY0K1euxN/fn+PHjxMYGIijoyMlJSX8/ve/Z2pq\nikOHDmFtbU1gYCAajYZvv/2WoaEh4uPjmTdvHsPDw5iamgoXn6WlJcPDw4KvsHz5cvLy8rC2tqa7\nuxs7OztWrVpFQUEBTz31FO+//z49PT3ExsZSXV2Nq6urAJYGBgbi7u6OSqWisrKS6OhocnNzhf7g\n8ccfF+Ke69evMzs7i729PU8++SRtbW3cvXtXwHofffRR4dIcGRmhvb0dV1dXPvzwQxITE8V3HxIS\nQm9vL8eOHePNN99ET08PpVKJvb09kZGRfP311yQkJFBUVISVlRXj4+Ps3LmTb775hqSkJPr6+qir\nq0OpVHL//n0cHBwICQkRasT58+cjl8tRq9WMjIwwMjLC888/T0VFBaamptTU1HDu3DmSk5NF7sam\nTZu4dOkSbm5uREVFkZeXR0lJCeHh4UgkEnp7e0lJSRFHMp14bePGjb9qJPmbqBSmp6eZP38+zs7O\nmJmZ0dvby9TUFNHR0UKK6+zszMqVK4mKimJubo7Ozk4GBgZwc3MjJCSEffv2oaenR35+vjAYqVQq\n3Nzc8PDwoKWlhcuXL2NhYUFlZaWIQdNFw01PTyOTyUTVER4ejqWlJU5OTvT29goyL8CaNWswMTFh\nbGyMO3fuYGNjQ2hoKAEBAXR3d6NUKoXKbnh4GJlMRk1NDdHR0aI3oFarycvLE6RkgPPnz6PRaHj7\n7bepra1lbm4OlUpFTU2NwMgDNDU1YWxsLEaLOv7Dli1bhHdhaGiI0dFR+vv7Wb58OQqFQnj3x8bG\nMDMzo7i4mMnJSSwsLDAxMcHBwYHg4GDq6+uxtLTk3r17NDc34+TkRF9fn8iEsLa2Fl6H5ORkIiIi\nRADtpk2bhNfixx9/5NChQ7zzzjsiqv2DDz4Q4BAdbn5wcJCqqirg4fFBZ222srLC1tYWf39/XFxc\nCAoKwsjIiK6uLqampvjll18ECl3HjmxoaEBPT4/Kykrc3d3F6POf//wnBgYGFBcXk5GRwalTp+ju\n7hadent7ezEaDwgIEGa7w4cPC6DPoUOHRHyfrocUHBxMV1cXzs7OYgqgoz7pgK1hYWHExMSwadMm\nJiYmKCgoICcnR9C5Hzx4QFZWlsjuBMTYGx4uap3M/4knnmBmZgZ9fX0GBgZEY9LIyIjAwEA8PDyY\nN28eJ0+eJCEhAZVKJVKjfu31m9Ap6Bx19fX1zM7O0t3dTWZmphgZ6Uo5HRnYzMyM0dFRVqxYQWFh\noYCYSKVSSkpKBCpLR8CVy+XEx8fj5uaGtbU1wcHBaDQatFqtCAipq6ujuLiY0NBQ7OzsmJmZYXx8\nXBiRrKysxLnXysqKhIQEXF1dsbe3x8vLi46ODqRSKVu3buXw4cNCB6BbIFFRUUIDMW/ePGGKaWpq\nEmnAPj4+5ObmsmfPHjo6Oujq6uKrr75ixYoVNDc3C3Cpubk5K1eu5PLly/z4449ERESQlJQEQFxc\nHA4ODhQVFYmA3o6ODgIDA+nu7mbt2rV4enqSm5vLzMwMR44cITo6mrq6OqKiokSTMDg4GEtLS65d\nu8aOHTtoaWmhuLiYubk5wsPDaW5uxsjICBMTE0JCQjh79qzQayQnJ/Paa68hl8tJTk7G0dGRyspK\nGhoaWLduHQsWLBBvuuDgYFJTUwVg1dfXl5mZGYKDg2lpaeHu3bt4eXkRFBSEoaEh9vb2KBQK5ubm\nsLe3p7m5WdiJ+/v7GR4epqOjg7m5Ofr6+sQ927dvHzt27CA4OJilWXdnAAAgAElEQVTBwUGys7Op\nqqoiNTUVKysrMjIyKCoqEhBef39/Dh06hFqtRiaTIZFI8PPz48GDB2JsDXDnzh0SEhIICAjg9OnT\npKWlMTg4SGhoKFKplOzsbJydnWloaECpVBIbG8vo6Cg7duygsLCQ2dlZSktLWb9+/X9KcWptbcXG\nxob+/n6qq6tFQrqBgQHffvst3d3dNDQ0sHbtWpYuXUpcXBx6enoMDg4KNeadO3cwMzOjpKREvCh/\nzfWbqBSmpqYEdNTIyIj09HRGR0d58OABQUFBzM7OYmBgwMTEBAYGBuzatYslS5YwPT1NamoqO3fu\nxNzcHJVKJYJfARHnZmRkJDBahYWFIv5Ll8zb1tZGR0cH8+bNE+c/JycnkZRkZWWFXC4XDcGSkhLR\nV7CwsCAhIUEs9KqqKtrb26murubMmTM8/fTTtLe38+qrrzIzMyNYg87OzqKc1QEwdKX0wYMHBbt/\n586ddHd3I5VKWbp0KVKplPb2dv76178K7Hh1dTW1tbXEx8cLtmVbWxtyuZyVK1eyYcMGxsbGRHDN\nuXPnaGhoIDg4mNdffx1jY2PR9/jll18E/Sc0NBRvb2/q6+uFfFeHsbO3t0er1YqIv2XLlolqISgo\niIyMDF5++WUsLS3Ztm0bhoaGfPnllwK/lpSUxO7du/nss8945plnxLNQX1+Pi4sLZmZm6OvrExER\nQUdHhwhA0eUx6jwemZmZLFy4kJCQEE6ePEllZSUtLS04ODiQkJAgNCC6OPaoqCj09fUJCQlBKpXS\n398v+gRhYWE0NTVx5swZjh07xvnz5zl06BBTU1NUV1fj7++PRqPBycmJFStWAJCeno5Wq6WhoQEL\nCwv+8Y9/UF9fL87xHR0dDAwMsHfvXh599FF2795NQEAA58+fp729nZKSEmZmZsjPz8fNzU1QrcfH\nx+np6eFPf/qTmEq8+uqr3Lx5E3d3d7Zt28aZM2eYnp7mzp07Im1Mo9EwPT3Nxo0bRfyhp6fn/+9h\nMP/PLwMDA0xMTLCwsBCjKhcXF/bu3Yujo6MoeR955BEuXLhAS0sLn3/+OZaWluIsa2Njw82bN5mc\nnBTThsDAQJKTk2lvbxeNSF2+Q3t7OzKZTESfj4+P09HRIWCjCoWCwsJC9PX1sbe3x93dXSC4QkJC\nKCkpwdbWlpiYGLy9vYmLi+PatWsMDAywY8cO/Pz8+Oijj8TcWpcCffr0aVGib926lbt37wrbcFZW\nlnACmpmZ4ePjg4mJCRqNBisrKxYsWEBDQwMODg709/dTWVmJr68v69evB+Cbb76htbWVlpYW/Pz8\nGBsbo6enB19fX5544gnOnTvHxMQE3t7eSKVSoXGIiori+eefF3p5nX7CzMyM4OBg9u3bh0wm47HH\nHmPVqlUolUqBJMvJycHCwoKUlBSRbJWdnY2Hhwd5eXksXbpUgG1GRkYYHh7GwMCAubk5jhw5QmNj\nI3FxcSxd+jBMbN68eUxMTFBbW0ttba2o+pRKpTCZGRkZ0d/fj0QiwdfXlzNnzmBsbIyxsTFarVZY\nsXV4P3houNIF9Xh4eBARESH8Ivb29hQUFGBtbc34+DgZGRkEBwezbds2PvnkEwoLC9FqtUKf4O3t\nTXBwMPBwolVeXs7k5KQwxenMW/X19SKspaWlBUdHRwoLC+np6WF8fFxg8l966SXhbtRFHvr7+5Ob\nm8ubb75JXV0dV69eJScnB6lUygsvvMCqVauQSqUit9Lb25u7d+8SERFBQkICP/zwA0qlkkcffZSg\noKD/K57Cb+L4oCMBNTU1YWpqKpRp1dXVlJaWsmHDBhQKBW+88QaPPvqosJaePn1aIMlCQkLE2f72\n7dsA/I//8T/YtGkTCQkJwmHX09ODQqEgNDRU3ICqqirKy8vx9PQkPj6eyclJmpubGRkZEeKo69ev\nC6xbTEwMfn5+ZGVl0dPTg76+PmfOnOF3v/sd4+PjDAwMMDs7y9q1a4mMjOTq1au89dZb3L9/Hxsb\nG2pra/Hx8aGhoYHFixdTU/MwkS8uLo7BwUEWL14sMF+2trZs27ZNmHF0b1HdQzQ+Po6rqyseHh40\nNDQQGxtLb28v1tbWZGRkCCpyYWEhr7zyCj09Pdy/f18QgA4ePEhgYCCDg4NIpVKhQ9DX1yc4OJiM\njAzRxKqqqqKurg6tVkt7ezumpqZCv6BTAA4ODtLQ0EBVVRUvvPACJ0+eJCYmhurqanFM020ShoaG\nhIeHY2FhIbQlBw4cICMjg7a2NszMzPjXv/7FM888Q2ZmJiMjI4JHoMPIX7x4keDgYMzNzWltbcXU\n1JTIyEhmZ2cZGRnB3d0deDiCzMnJ4dVXX+Xq1auEhITg4+PDzz//TE1NDc8++yzj4+OUlJSwZ88e\nPD098fb25ujRo+jr66NSqTh9+jR+fn5cv36dJUuWAA83+Z07d3L06FHi4uKoqqrCy8tLvJDu379P\nSUkJU1NTNDY24uXlxdjYGNbW1qhUKvT19WlpaaGiogJHR0dxjIqPj2fx4sUoFAqKi4uZmppCoVAQ\nERFBV1cXs7OzJCQkiPAkHVIuPz8fX19fBgcHyczMFJuETuX7a67fxKYwOztLQUGBiPHWBbNu3LiR\nZ555RnwB3t7eDA4OcvPmTRobG1myZAmenp44Oztz9OhRIiMjUSgUZGRk8Ne//pX4+Hhyc3OFl93O\nzo6Ojg6qqqqEMKi6uho/Pz/6+vrIzMxELpdz//59kpOTefDgAS4uLiLRSUfxkUgkIn0nISGBo0eP\ncuPGDeRyuZA+u7q68tprryGVSnn88ceFXt7KygonJydRgTg6Ogqwhq2trahijI2NycvLw8DAgNHR\nURFzn5GRwblz53Bzc6O7u5s1a9bg5uZGdnY2xsbGXLt2jaSkJKytrcnKysLT05OTJ0/i5+fH559/\nzv79+6mvr8fc3JzIyEhqampobGxEKpVy5coV9uzZw507d4QnwNzcnHnz5qFQKFAoFDg6OmJubo6P\njw8tLS1MTEzw8ccfk5ycLMRMjz32GNbW1sJdmJ+fz9zcHKampnh6epKdnc17773H/v37KSws5MCB\nA+JotmjRIqRSKUqlEo1Gwz/+8Q/effddOjs7iYyMFCnLugqvpqaG8vJy5HK5IELX1NSwcOFC3N3d\nBS1qcHAQZ2dnvv/+exQKBY8//jh5eXlkZmYyPDxMd3c3165dIy4uTkymdBbnEydOCPBvSUkJGzZs\nEKQo3Ubi6elJd3c3u3btoqamhtLSUhobG5mcnBTZlS0tLZSWlhIUFCRiDs3Nzblw4QJeXl7CXg/g\n5OTE1atXmZiYYHh4GIlEIqhMv/zyCxs2bGDz5s0sW7aM4OBgVCoV8+fPx9bWFn19fZ5++mnq6+tZ\nsmQJH330kaBP/5rrN3F8GB0dJSAgAKlUKhSGxsbGGBkZ8c0333Do0CFKSkro7e3l888/5+DBg7i4\nuBAfH094eDjV1dVs2rRJyG47OzuBh6TdmJgYJicnsbKyQq1WY25uLiSqo6OjREZG0t/fz44dOxgc\nHCQ/Px9TU1OmpqaYnp7GwcGBwcFBHnnkEfHQGRkZiYBWqVRKTU0NIyMj3Lt3D3d3d4Fqd3Z25sqV\nK3R3d5OdnY27uzstLS3IZDLa2tqoqqrizp07Ipugp6cHT09PTExMMDY2Jjo6mqioKMrKykhKSqKr\nqwtjY2M6Ozu5f/++aCp++umnvPPOO3R3dwstvi4qz8DAAB8fH1JSUnj88ceFv97e3p6Ojg4MDAyY\nnZ2lqKiIkZERnJ2dcXd3p7a2Fnd3dywtLQkODubcuXMsWLBARLLr6NoFBQXs3r0bHx8fVCoVW7Zs\nwd7enujoaFpaWoSFW6fvNzExwcjIiMWLFzMzM4OtrS3GxsYEBgYCD2XEenp6jI2NsWTJEsLCwrh2\n7RqWlpYcPnyYDz/8UPQxlEqliKzX19cXvZqnnnqK48eP09XVJSC+b731Fp6enmzevJn333+fJ598\nEq1WS05ODqtWrUKtVvP6669z9epVzMzMyM7OZmBggN7eXk6dOsW5c+f45ZdfGBwc5OrVq+LI98wz\nz2BtbU1cXBxSqRSpVEpCQgJ6enp0d3cLLqQO6aavry9I4DMzM3R1dREdHY2fnx+FhYWiavzb3/5G\nS0sLLi4urF69moSEBKqrq8nLy2PPnj00Njby2muviXQqFxcXvv32W/r6+vjoo4+4c+cOt2/f5uLF\ni7i6uooX2q+5fhObgg480tPTI2b+iYmJFBcXi9COTZs2CU6Bvb093333HaOjoxw9epTOzk4x+7ex\nscHCwgJ46FHQYbJ1b8eoqChh/tE1MHWzYl0YTWBgoJh66B7ijo4OgQs/duwYCoWCmJgYamtrcXZ2\nZtWqVSQlJWFoaEh8fDxSqZTDhw/zyCOPkJeXJ87wuuQk3Qh0enqaP//5z8BDZadarcbb25udO3ey\nZs0aPD09iYqKIjMzk8WLF9Pf309tbS2bN29m5cqVnDx5EqVSyaJFi5DJHubuREZGotVqmZqaQiKR\nYGpqyvT0NBKJhNHRUe7du0dqairGxsYolUo6OjowNzdn8eLFmJub09LSgp6eHubm5sjlcg4cOICH\nhweNjY34+vrS2dmJRqMRBOn+/n6Ghoaor6/nz3/+MydPnkStVtPf34+Tk5Mob3VR7ikpKdy6dQuZ\nTEZERASrVq0SjbDOzk7a2tpIS0vDwsJCZC/oqpeFCxcKjoJCocDb21voDUJCQmhubmZycpIlS5b8\np57Cxx9/LFyQe/fu5ZVXXqGkpITIyEhxdDl//jxpaWkYGhoSGRmJh4eHAMw+++yz4ns1NTUVjUa5\nXM6DBw/o7+9ncHCQvr4+7t27h4uLC+fOnROhLM3NzXzxxRdismNjY0NkZKSYEimVSuLj48Uz1tfX\nJyZely9fpr+/X2SXnDt3ju+++47r168LL4SLiwvr1q0jJiaGjRs3cuPGDWZnZ2lqakKtVotx9q+5\nfjPHhwcPHjAzM0NRURHwcEEnJyfT2tqKRCLh0KFDNDY28uc//1koxs6dOyduvoWFBSMjI9y9e1fM\neq2srESZFx0dzejoqBgx6hJ5dDu+zpij0WioqKggIiKCxMREETmvowDBQ8R7eno6Bw4cID09HaVS\nydDQEKampri7uxMdHQ0gFH+urq60tbWJTae+vp6enh4aGhrEWwqgubmZBQsW0NbWxvHjx+nu7hbY\n7vLycpYsWUJhYSGPP/44KpWKzZs34+3tjY2NjYCJ3r17l6qqKjHa05WnOteljhf4wgsvkJGRIRKu\nrl69yvDwMCUlJRgZGTF//nxBbfrwww/Zs2cPDg4O2NnZYWBgQGVlJW+88Qbvvvsufn5+/PTTTzg5\nOYlF+thjj/H2229z8eJFJBIJL774Ilu3buXWrVtcuXKF9PR0bG1tuXr1Ku3t7cK49ODBA9avX09d\nXR33798nLCwMJycn1Go1K1eupKioiNzcXJECrYuf6+3tpb+/n0WLFlFYWIhKpRJkb4Bt27ZRWlpK\nQ0OD8GqkpqZy7NgxQkNDiYqKYseOHbz00kuoVCpcXV1paWmht7eXqKgoli9fzvfff4+HhwcmJia8\n9dZbwMNNITU1VcBRR0ZGiIuLIycnh48//hh/f3/Mzc2Ji4ujqKhIpJz95S9/4dq1azg4OJCSkiKO\ngLojQkxMjMAGuLu7Mz09TX9/P+vWraOhoQFvb2/Rp2lpaWHnzp0C9mtnZydCdp2cnITa9Ndev4lK\nQUfs1T0AFhYW2NvbY2RkxJEjR7h06ZJIa5bJZAwNDVFWVkZCQgINDQ089thjuLu7k5OTQ3BwsBAZ\nGRoaYmFhQVxcnLBM6+vri5LTwMCAwcFBDAwMkMlkqNVqioqKyMjIwMTEBIVCgUwmE00gnXehpaUF\niUSCiYkJ7u7ujI+P4+/vz2OPPUZra6uQ6uoMVaGhobS1tVFbWyuwa5OTkyxcuJDi4mIRG+fv788v\nv/xCX18f/f39VFVV4ebmhrGxMTU1NdTX13Pnzh3Kysr49ttvWbNmDV1dXURERLB06VJSUlIIDAzE\n1tYWS0tLVCoVCoWCjRs3MjAwIJR7kZGRvPHGGyKq3cLCgszMTFxcXIQQJyIiQhyJ0tLS0NPTQ6VS\nCR9/amoqn3/+OZ2dnfz973/HysoKHx8ffH19mZqaYv/+/ZSWloqq7ejRo/z5z38mNzdXdOKrqqrY\nuHEjq1atEi6+F154gTNnziCTyUhLS+PBgweo1WrCwsLo7e3FysoKDw8PwRjQaDQ0NzeTlpaGiYkJ\nrq6uQpKsq7wAKir+J3tvHhXlff99v4Z1YNj3RYZddhBEAQVEUBQVjUaNSUxibGo20zRpm9g0ada2\nSWyTpm1MahKtWzQaNe4CKiCyb7LJvi/Dvg77wNx/2Ov7tOf8zl3v3/Occ+d+zn2d4x8OMAMz1/W9\nvp/P5/1+ve8KTYaRkRFeXl54enoSEBDA8PCwuNjq6+vZs2cPSUlJYuR64MABtm/fzr1794R+QMor\n9fT0JDc3l+bmZsGBUKvVwlw1ODgo2KKSy3diYkLg40JDQ/nmm2/IyMhAR0dHlKjR0dHMz8/T2NjI\nL37xC1pbW8XiK/Vburu72bZtGx988AGhoaGYmppy69YtCgsLmZ6eRkdHBzc3NxYtWsRLL730wNfj\njwLH9sUXX7yza9culEol7e3tVFRU8Pzzz4twUX19fTQaDevWrRMcxt7eXuzs7FixYgUTExMoFAoc\nHBxobGwE4MqVK+zcuROZTMbFixepq6sTrsa6ujoGBwfFdk1iE0jONisrK/T19enu7qa1tVXgs3V0\ndPjqq69ISEgQtXhubi4dHR1CWHThwgXa2tqYmZkhMDCQ0dFRkfLc2dmJVqtl69at5OXlsWPHDsbG\nxpicnOTatWusWrWK/v5+oqKiRBydFIgTHBwsUF2AkGdv3rxZoOGKiorQ0dFBLpfT3t6Oq6sr+fn5\nGBsb4+bmxsGDBzEzM6O5uRlnZ2cRypKTk0NfXx/t7e04ODgIfoAkgrGxsRGQkMbGRiorK5HL5Vy9\nepUFCxYQHx9PQUEBvr6+vPHGG8zNzeHg4CDi4qanp/H19eWtt94iODgYU1NT+vr6SE5OJjU1ldnZ\nWc6ePUt5eTnu7u7Y2dmJ+b1kDW5pacHQ0JC6ujpRfhkYGFBdXS1gMDU1NRw9ehQ/Pz8UCgVWVlYM\nDQ1x8eJFFi9ejFwuF3BbS0tLBgcHRbTAxMQE7u7uDA0NoaurS319PQsWLEAul7NixQqCg4MxMDDA\n3NycsbExVq5cyZEjR9i5cyeDg4MiCqC6uprBwUEiIyPJyMggJCSEoaEhKisrhUJVCthxcXFhenoa\nuJ/p4OnpKQAprq6uGBoa8uKLL+Lg4IBMJqOlpUWUnZJMW8K99/T0YGNjI+AwpaWl/OQnPyE3N5fy\n8nLMzc05evTo/zm5DzMzMwwODlJTU4O3tzcuLi78+te/FtHv5ubmeHl5UVFRwUcffURQUJDAs58+\nfVqMxC5fvkxAQIBo3EkNloULF+Lp6Sms1VKT8ty5c2RlZVFTU0N+fj4qlYry8nLhje/s7BRMP319\nfREwOzk5yZkzZ6isrGR+fp65uTmioqL+jXfo4eEhTDe5ubnEx8eze/duPD09hd++vLycRYsWiWZY\nQUEBDz30EH/5y18oLS3lpZdeIiAgQESUd3R0sGXLFtra2ggPD2d8fJyvv/6a1atX4+PjI0aLUoSY\nu7s7np6ezMzMkJeXx4oVK5ienhYJxf7+/vj5+WFjY4PkPTEwMKCvr09kb9y+fZtly5aRmZmJubk5\ne/fuZX5+nt7eXuLj40Wc3IoVK6itreX8+fNs2rQJOzs7jI2N+fjjj/nggw9IT0/nhRdewMfHBwsL\nC6anp1Gr1ZSWltLa2sorr7wCIIC2YWFh5ObmCtiKn58fERERBAYG8tZbb7F9+3Z2795NdHQ01dXV\nVFdXixBXV1dXsatwcHAAwMXFRUiPBwYG0NXVJT8/n2XLlhEdHU1ERAS/+c1vyM7OZmxsTNC+JRv7\n5cuXKS4uZnJyEgsLCyFNv379Ok8++aRgdejq6opG9Pj4OPv37+fEiRN0dXVx+PBh5ubmhD5k0aJF\nODg4MDc3R0hICFZWVoKtYWVlxfz8PJ999hlubm60trYSHh5OcHAwr776KiYmJvT09ODn58fMzAz1\n9fW0t7cLv83ixYs5cOCA4HcUFhY+8PX4o1gUJOnsI488QnNzMzMzM4SHhwtoZltbGykpKURGRlJU\nVERRURETExOcPHkSrVZLVlYWExMTBAYGUllZiVKpBO5vGWdnZ9FoNLi4uDA6OopSqWRyclJg3KUA\nD0moExUVJUI4XF1dcXBwwM/Pj4qKCpH7IGUiBAYGsmnTJuLi4hgaGhKAV8moY2BggFwuJzo6momJ\nCXJzc3nqqafo6OjA29sbHx8fLl++LKzR7u7uXL9+nfDwcOzs7MjMzGRiYuLfpiqS3FVyBAYGBqKv\nr8/Vq1epqKhAT0+PpUuXcvbsWRQKBaGhoejo6IiuuIGBAY6OjvzpT3/i17/+Na6urixYsEDg2wMC\nAgSkVqVSoVAoyMvL47XXXqO1tZW3334bJycnFi1axNjYGG+99Rbh4eEYGxvz0EMPcf78eW7evMmh\nQ4dwd3cnOzubP/3pT2zduhW4720ICgrC1dWViooKPv74YywsLMQOaG5uDlNTUyYnJ9m4cSPFxcWk\npqYSFRVFVVUVo6OjovF26NAhjIyMiIqKwtvbW5RG0pi5t7eXjIwM4P5is2/fPtFIlbBuFy5coK6u\njubmZn7961/T1tZGaGgoQ0NDfPLJJ/ziF79gxYoVeHh4sGfPHqanp8nIyBBlia2tLceOHePcuXOY\nmJjg7OwsPCe+vr74+/szOztLTEwML730Emq1WjRTq6urOXfuHL6+vgwMDJCRkSG0MFIE4JIlS7hx\n4waJiYkClHvmzBmxE25ububjjz/G29tblEuzs7O0tLSwc+dOCgsLRW/qQY8fxaIgKeG+//574cEv\nKysjJiaG4eFhysvLWb58OePj45w+fVrEpmm1Wnx9fcnOzhZKs6CgIC5dugSAsbExzs7O+Pj4cO/e\nPYyMjGhqasLLy4vi4mIaGhrw9fWloqKCjo4OQQWanp7mscceo6enh/7+foFul8Aa5eXlIujTysqK\nW7duoVQq8fX15c6dO+Tn5/P4449z9+5djI2N6enp4e7du0RGRnLr1i309fVxc3PjxIkTtLW18eWX\nXwIIQ83k5KRwU168eJGbN29y8+ZNoqOj0dPT49lnn2XZsmU0NDSwZ88eRkdHRQ7GwMAAPT09bN68\nmZ6eHrFI+vr6kpKSgkwmIywsjMcee4y1a9eSkZEh/nbppD558iQHDhxAqVTy1FNPYWxszLVr1/D3\n90epVOLk5CRyEfbs2cPAwAC1tbX88Y9/JDU1FSMjIx577DEuXrxIQ0MDNTU1mJqa0tTURGFhIRcu\nXGBychIjIyNu3ryJjY2N2IU1NDSQlpbG5OQkbW1tojcwODjI6Ogo4eHhDA4O0tvbi7W1Nc3NzaKM\nk/D/krhMojHDfVzb119/jVKp5I033mDPnj2sX7+ehx9+WEy4MjIy2LFjB3Nzc7S0tLBr1y7efPNN\nDhw4IAAugYGBLFmyRBiipFCbV155Rahc5+fnycjIEJRpOzs7ampqsLOzw9bWlgULFqBSqRgbG8PL\ny0vsHGtqasQu98aNG2zdupW6ujq+//579PX1efnllzl//jyDg4MEBAQwMjLCl19+yb59+6isrBQl\nkLRjcXJyElMsSYH5IMePoqfwt7/97Z2IiAjef/99ZDKZCPY0MTFBT08PHR0dLC0tuXLlCvb29gwP\nD3Pnzh3BVAgNDeX48eNotVo6Ojqws7MjKysLf39/amtr6enpwcHBgdTUVJRKJQcPHqSoqAhXV1e6\nu7vJyMjAysqK7Oxs2traKC8vF667s2fP0tnZiY6ODvv37xc4MOnOdu7cOQICAujr6xNkn6mpKbKy\nsnB1dRXCqbm5OXJycjAyMqKjowN3d3emp6dFs+v69euEhYVhamrKL3/5SzIyMgSPQaIkWVpais6z\nJIGWFIZqtZro6GiGhoZISkrC0dGRU6dOoVar0Wq13Lx5k9OnT7Nu3Tpu3brFxMQE1tbWFBUV8fDD\nD1NbW8vQ0BDp6emEhIQQHBzM5cuXuXHjBjdv3uSVV15BJpNx/fp14uLiMDU1pbi4GHNzc+bm5ujr\n62NiYoL169dz/vx5kZkhqfA++eQT3NzchDBrdnaWkZERKioqGBgYoLW1lbKyMtatW4eDgwOrV69G\noVBw4cIFjI2NqaiowMXFBS8vL4aGhjh8+DD29vaCU1BaWkpKSgpxcXH09vZSWVmJubk5S5Ys4eDB\ng0RGRhIeHi5EWSkpKUxPTxMUFIS/vz+nTp0SdnOVSoVarRb5jm1tbRgaGjI5OSkmCJJ9Ozw8XOSS\nqlQqNBoNY2NjREdH09LSwj/+8Q+cnZ2FN6Ojo4Pe3l7q6upwc3MTbs78/HyRPpWSkkJAQACZmZnY\n2tqycOFC3N3duXz5MsuWLePvf/87cXFxzM7Okp6ezvj4OB0dHZw4cYLR0VF8fHxoaWmhpKQEGxsb\nDh06hFwu59q1a//f9BRkMtkhmUzWK5PJKv/lsXdkMlmnTCa7+89/6/7la7+WyWQNMpmsViaTrXmQ\nRUEyPz3xxBMsWLCA6OhowT+sq6tj5cqVQtE3MzODjY0Ne/bsISYmhoyMDFpaWoiKimJsbOzfSEVV\nVVU89thjWFhY8N133yGTyRgYGBAS1Lq6OsEe+NedgJubm5hGSH2J1NRUtm/fLr7u4uKCp6enqJ0V\nCgVBQUGoVCoaGhowNDQUCdJScKlCoWB8fBxra2vy8/MxNzenp6dHdJzNzc3ZtGkT//jHP0TNOjs7\ny6JFi7hz5w7ff/89np6eTExMcPHiRdzc3LCxsSE6OhqtVhodFEUAACAASURBVEtubi5hYWEUFhby\n7bff8swzz/DFF18wOTkpsgjv3r3L2NgYxsbGeHh4EBsbS319PT09PbS0tBAaGsqJEyfo7Oxk7dq1\nrFu3jvXr15OWloabmxtZWVl0dHSQnZ1NSUkJAwMDqNVq0URLTU1l7969ODo6ivHb5OQk4eHhBAQE\n4OPjI6YFra2tJCYmCi4AwNjYGPb29lRVVZGRkcFPf/pTIiIi6O/vp7u7m8HBQTQaDYcOHWJyclLk\nT0xNTQmSs7TzrKysFESqmJgYxsfHcXJyIi8vj4GBAXx9famvr6epqYmOjg4B8RkZGRHnRnp6Ok1N\nTURFRTE3N8fq1avx8/MTugpJV6Crq8uaNWtYuHAhkZGRaLVaPDw8eOutt5iYmKCmpobnn38eT09P\nNm3aRGBgIDKZjNWrVzMxMUFERAQXLlwQ/pqysjLWrl0rmol//etfGRgYYHBwkOvXr1NeXs7s7KwI\ntRkdHSUoKIj5+XmRueHj48PRo0d54YUX/pdSpx+kfPgHsPa/ePxTrVa76J//rgLIZDJ/YAcQ8M+f\nOSCTyXT/0wtI8ViSg6+jo4Pi4mJaWlpYv349paWlODk5sXHjRkJCQrCxsaGzsxNLS0u+/fZbQkND\nUSgU6Ojo4OXlJRBcZmZmXLlyhdzcXPz8/ASrURoZ+fv709PTw8TEBGFhYfj6+jI+Pk5mZiYFBQVc\nunRJRLu7uLiIJlBRURHx8fEC+759+3YsLS3FKHDz5s1ER0cLW6zEhmhtbUWhUIi8x9raWhITE4VZ\nxcPDg9TUVEJCQnjkkUfQarUMDAzQ39+PhYWFwMLNzMwQFxdHWloa9fX1ZGZmCgCNvb09ExMT7Nq1\ni+npaV588UXUarUQe7W1tbF7926OHz9OcXGxIACbmJjg5eWFn58fK1aswMTEhKqqKgEfsbCwEPLk\n9PR08XO1tbXk5eUhl8txdHQkKSmJrq4u5ufnKSgooLOzU2yJS0tL+fOf/4yTk5PgWrS2thIREfFv\nhh09PT1sbGwICQkhLy8PExMTESkopS59/vnnIkNSkgHb29vj7+9PXV0dTU1NgqANkJOTw9zcHOXl\n5YyOjlJSUkJKSgrffvstH3zwgXi9vr4+DA0N0dXVxdnZGQsLCwIDAzl37pwIcykrKxMjSalH0tHR\ngY6ODqWlpVy/fp2ysjJaWlrIz89n7dq12NjYcPbsWVQqFY2NjZSVlSGXy6mpqaGiogILCwtcXFzE\njUmSX5ubm1NbW4tGoyEgIIA///nPNDQ04O3tLTweLS0t+Pj48Nlnn1FQUIBaraa4uBhvb28SEhI4\nffo069evf4BL/f7xHxcFrVZ7Gxh8wOfbBJzSarXTWq22GWgAlv6nH5I64lZWVuzYsUNk4IWEhFBZ\nWUlPT4+w6ra1tfHmm28KVd3XX3+NSqUScJTe3l6SkpIAiIuLY2ZmRlB7KioqSE9Pp7e3l9LSUubm\n5piYmBDY7KCgILRaLbGxsWzbtg1XV1cBVdXV1RUeent7ey5duiR2DJ999hmNjY2Mjo6yZMkSUlJS\n6OzsJC8vT4BMe3p62LFjBwEBAbS1tVFVVUV8fDx//etfxWvU1dWhVCqZm5ujqalJqDfHx8eJiIig\nt7eX3t5eKioq6O3tRaFQkJycTHNzM/r6+vzkJz/hzJkz2Nvbo6enJ7rofn5+9PX1ERQUJGg+J06c\nEMlL169fp7m5mfDwcD799FMqKir44x//yOXLl0Wjs729nW3btqGjo0N8fDwajYaamhp+85vfiJRs\nR0dHfvjhB44ePYqxsbFIltJqtZSXl6Onp4eFhQV6enq8+eabODs709TUxKeffioEZ1FRUYyMjAiZ\nsFQaSJh9SXOyY8cOCgoKUKlUfPjhh0KuLl3IkjW7pKREOo+5dOmS4D3a29vj5eXFrl278PT0JCgo\niM7OTrq6ujAxMeH999/Hy8sLZ2dnIiMj2bt3r6BMy+VyAYQ1Njamrq4OAwMDCgsLCQkJEWPM9vZ2\n2traePbZZ0lLS8Pf35+EhATs7e155plnMDMzY/v27YSEhIg8jrGxMeD+5ExKiJJEXFNTU3z33Xfi\nvbaxsSE5OVnEDh46dIh9+/axcuVKIbzKysrC0dGRioqKB7yE/981Gl+SyWTl/ywvLP/5mDPQ/i/f\n0/HPx/6nx/T0NGvXriU6OprOzk5qamro7u7G0NBQmEckvsDY2BiWlpYMDw8TEREh+IXu7u50d3cL\n2SggehGTk5NkZmZiZGSEpaUlhoaGREREYG5uTmhoKEZGRrS1tXH48GGBkFepVMzPz9PU1MSKFStw\ndXUVHMGGhgbkcrnwD0hotaGhIXJycpDL5bS2tjI4OCg++Lm5OQHBKCsr46uvvhJb2O7ubgB8fHy4\ncOGCyKjU19enqakJlUpFbW0tW7dupbOzU4iShoaGOHXqlHD0SeE3CoWCW7duUVpailwuZ2RkhEOH\nDv3bLP7zzz/n2WefZX5+XvRpTp8+Le7ib775Jo888gjj4+NYWVmJdO6MjAwuX76MRqOhr68PMzMz\nvL29BdotJCSEffv2UV1djbm5OU8//TSPPPIIq1ev5sknnxSjS2kqYm5uTnh4uPjMOjo6mJ+fp7m5\nmU2bNhEQEIBCoaC2thaVSoVMJsPOzo6TJ09SVVVFWVkZx44dY2RkBE9PTwwNDUlMTESpVJKVlYWX\nl5c4x9zc3DA1NcXMzIyVK1dSVFTE4OAgoaGhlJeXC23ExMQEX3/9tSgbkpOTWbBgAUlJScTFxREY\nGIiPjw9wf0qwZs0aEhMT6ejooLKykh07dmBhYYGVlRU3b97EwMCANWvWkJCQIOTTRkZGeHp6UlhY\nKCLh2tvbhRw5ODhYTNsWL15MV1eXAMlKnIajR4+SmJjIDz/8IHoJFhYWnDt3jqSkJMrKypidnSU6\nOhq1Wv3AF/Z/d1H4AvAAFgEq4E//q08gk8n2yGSyIplMVjQ2Nsbo6CgpKSnExMTg7u6OhYUFPj4+\nYtuo1Wppbm7m6tWr7Nmzh0WLFlFYWEhxcTH6+voi1XnNmjUC7VVdXQ3A8uXLMTMzY3p6mjVr1jA6\nOoqpqSmlpaWkp6dz7do1rK2tCQkJEXmE0kjL2tqamzdvsnbtWlJTU4H7db7U3Ovq6qKvr49NmzYR\nEhLCqlWrSExMxM3NjZ///OcYGRlx9uxZRkZGuHLlCsXFxSiVSh5++GHRFN25cydwX/e/b98+cnJy\nxPjNysqKjRs3CgFRT0+PSKKWLiBra2va2tqwsbERuHRXV1dBcJaSgiRRTHp6ukhQrqurIyMjgzfe\neEOEsDQ0NPDss89SXl7OhQsXWLduHfPz87z66qu89tprGBoasnLlSh5//HFycnLIzc0Vac6Sa9Xa\n2hofHx9Bfnr55ZdZsmQJv/nNb8jKyqK2tpaoqCjh9bh8+TJw/yIzMzP7N7TZvXv30NXVxcXFBUtL\nS7Kysli9ejX79u3Dx8dH3BBCQ0N57LHHkMlkgmsgyYaVSiXr168nJycHjUbDn/70J4qKigRC3cPD\ng5mZGRHzbmJiQltbGwcPHqSlpYXR0VHR2JMo43DfzahWqzlz5owgdFdVVZGdnU1KSgpPPPEETz/9\nNAYGBhQUFPDFF19w+fJl2tvbee+99+jv70elUlFcXIxarUalUgFw8+ZNSktLeeyxx8QiIpWKVVVV\noqdy7949cdPR19dHoVDg7OxMSUkJBgYGrF27lm+++UboNR7k+G8tClqttker1c5ptdp54Cv+nxKh\nE3D5l29d8M/H/qvnOKjVasO1Wm24vb09vr6+IsbM1NSUgYEBrly5wrVr15DL5YSHh2NoaCgwXcXF\nxRw4cIDy8nKBr7awsGBoaEiQl/z8/NDR0eH8+fNCO97X14ebmxuFhYXY2tri5eUlLpwFCxZQU1OD\nTCbjypUrbN++HUNDQzZv3izQVnBfT2Bubi6MU56envT391NQUEBaWhpnz55FR0eH27dvi62qtbU1\nmzdvFspJSY8RHBxMf38/cF/uPTIygoWFBWvXruX27dsiTUhXV5e7d+8Ku+zjjz/Ob3/7W9avXy96\nCUqlkt/+9rcYGRnR29vL1NQUV69epba2ltLSUiYmJjh37hxTU1OYmZnh5OTEr371K5KSkjhw4AA5\nOTmcOnUKNzc3Vq1aRUNDAy+//LIYn3311VeoVCqCg4NxdHTk5MmTKJVK9u3bx5UrV4SH4vjx44Lz\nING5L1y4wF/+8hcqKytJSEgQHoypqSkaGhrEfB7uqzUDAgJwcHCgoqICX19fvLy8OHDgAAsXLhRy\nYZVKRXh4ODdv3iQoKEjYqjUajSjLzp49C0BxcTFpaWksXXr/VE1KSmLlypVMTk6SlJSESqVCq9WS\nlpZGRESEMLb19vZy6NAhXn75ZY4cOcLY2BhWVlbCifv73/9e9D1MTEzIysoS4URLliwRO9OsrCx6\ne3tFtsn69evZuHEjarWa119/nZKSEnp7e0lISABg3bp1wq2qUCiYmpoiMTGRpKQk3n77bb777jtS\nUlIYHR3F3t6eiIgIPvzwQ5GedebMGVpbW8nMzBSl5oMe/61FQSaTOf7LfzcD0mTiIrBDJpMZymQy\nd8AbKPhPz6fRaCgqKsLX15e8vDy8vLxYsGABs7Oz5OXlYWtry61btwSksru7mw0bNvDiiy/yyCOP\nYGNjg6enJ3p6eiL8Be43lyT/up+fH6ampnR1dZGbm0tXVxcJCQkolUpUKhWXL1+mq6tLRKk5OjpS\nVFREfX29uGNLry8RkaWosMLCQpGGVFRUxPbt24mNjWV4eJgbN25gamrK8uXLOXLkCF1dXVRWVoqk\n6L/+9a8Cvx0TEyMcfyYmJjg5OSGXy7l48SJ/+9vfxPb9iy++EFqMyMhIkpOTCQoKIi8vj1u3btHV\n1UVZWRlDQ0MsWbIEBwcHEhMTee6559i4cSMvv/wyixYtEqQrS0tLli5dyt69e3n22WeZnZ2ltrYW\nHx8f4uLiRIbltWvX8PPzo7+/n8OHD/PMM89w+/Ztcec3NjYW8BNfX1+h8UhLS6OlpUUs0FIIiqur\nK76+vgQHB4uJ0cWLF1Gr1TQ2NqKvry/Ma5mZmbi6unLkyBFR24eEhHD69Gns7OwYGhri22+/JTs7\nW0i89fT0xIIrGeby8/PRaDTCzzEwMEBeXh4ODg5CMi3J1u/cuSOIVC4uLgwNDREeHs7q1atZu/Z+\n7/3ZZ5+lqamJJ598ktHRUQICAkhOTqazs5MVK1YwPz8vGp4qlYqqqioaGxvp6ekhOjoalUpFdnY2\nq1evFhHzAF9//bWQ41+4cIGgoCDu3btHR0cH7e3tnD9/XiSrS9AdX19fYUv38fERcvLFixcL1eyD\nHA8ykjwJ5AI+MpmsQyaT/QT4WCaTVchksnJgJfAKgFarrQJOA/eA68CLWq127j+9hhQ4IsVu9ff3\nk5+fz/T0NBqNRsS8qdVqPDw8kMvl1NbWMjIywuLFi9FqtYJuK0WAwf0R39TUFP39/WLBaGtrIz4+\nnq1btwqYx6pVqzAxMRFpPxKDsLGxkTfffFPkLUqHVqslNDQULy8vTE1NaWtrY2hoiOPHj6NSqdDT\n00NPTw87Ozvq6+sZGhrCxMREbFEff/xxrKysuH37Ns8//7zovA8MDAjisq2tLaOjo9jY2AjrbktL\nC21tbejp6fHVV18xNzdHZ2enmKjs2rWLlStX4u3tjVwup6OjAy8vLywsLBgfHycnJ4fW1lZKSkoI\nDw/HxMQErVZLRUUF3t7eFBUVCXyblHegUqkoLCwkMzMTT09PNBoNvr6+WFpacujQIUJDQ3F1dRUG\nKisrKxQKBa2trczMzHDv3j12796No6Mjnp6eIqxny5YtaDQa5ufnycnJEe7YmJgYFi9eTH9/P7m5\nuczMzBAWFoaBgQGurq74+fnh6emJvr4+o6OjokHs4OAg8iUbGxsJDAzkD3/4A8nJyQC0t7fj6OjI\nc889R01NDePj44yPj3Pu3DmKi4sJDQ1lw4YNIubtgw8+QK1WMzw8LPiN8fHxaLVajh07Jsqd4uJi\nPDw8+PTTT0XDWMp1vHTpEoWFhXh6eiKTyUhNTRVW6vz8fA4dOoSjoyPu7u6MjY3h5+eHh4cHAK++\n+qp4D42MjDhy5Ajnz5/n1KlTWFlZkZmZyfDwMCdPnhQu3a6uLkZGRsR7ZWJiwtKlS0Vw84Me/9E6\nrdVqH/0vHv7mf/L9vwN+98C/AYh569jYGLt27SI7O5vIyEhUKhWOjo4i60GynzY0NJCVlYWfnx+F\nhYUMDQ3h4uIipMHSzHtubo7o6Giys7Opq6sTiTrt7e2EhoaSkpIimAH+/v6CRFxQUICTk5PgLZSW\nltLU1MRLL73E/v37Wbx4MZ999ploboWEhNDe3k5ycjJNTU089dRTREREsGfPHhwdHdHT0+Pq1asM\nDw8L95udnR2bN28WeZlw/y6pUChISkqit7eXXbt2MTg4iIWFBXv37uXtt98WSsru7m4uXLjA9u3b\nMTY2xt/fn4aGBsrKyqiurhbaCxsbG8rKyuju7mbZsmXCIBQfHy/CXyMiIti7dy8+Pj7MzMyIbnhE\nRAT37t1DX18fd3d3jh49KlKkXFxc6Ozs5MqVK3h5eeHv7y+k2NPT09jZ2YnG8MjICNXV1bi4uPCz\nn/2MhoYGQWq2tbXFx8cHc3NzMbWxtLSks7NTsCxKSkrQ0dERCkDJdOTo6CgEW8eOHcPV1RWFQsHQ\n0BDW1tZCBQuIxOn6+nri4uJYsGABn3/+ucj7UCgUjI2NiZAZiS4+Pj5OZWUlO3fu5Pjx4zg5OTE2\nNoaTkxMArq6uzM/P4+LiIkJuMzIy6O7upq6ujtDQUGZmZujv7ycyMhJvb2/R2C0uLkZHR4c9e/aw\nbds2mpubRd8qLS0NX19fUlNTMTAwEKEzCQkJTE5Osm3bNnp6eli6dCkTExNERUXR3NwsEqu6u7tZ\nsWIF1tbWdHZ2iinMgxw/Cpmzra0tJ0+eZM+ePfztb39jyZIlYkYrk8m4du0aS5YsEY2U4uJioqOj\nReCmBGYpKChgy5YtouOso6MjAmB7e3sFhMTd3R1ra2uqq6uZm5tjcnJSCEOkSDNLS0tcXFxobm5m\n0aJFPPXUU3z11VfAfYGNNC4cHBwUKU0hISGkpqaydetW2tvbmZiYEMISb29vXnzxRaamphgdHSUu\nLo6CggLa29sFS0ASsdy7d4/p6WkGBwfR19cnICCAHTt2iDyMTz/9lLGxMWZnZ7l69SoajUZMSZYt\nW4axsTH6+vrExcXR398vxmpvvPEGv/vd74TMecOGDWzduhWZTIa1tbXAz4eHh+Pm5oauri4vvvii\nyPH8+c9/LmTUmZmZAMTHx4vXrKiooLa2lrCwMIqLi8nOzmZ6eppVq1YJEnV3dzeXLl0iJyeHyspK\n+vv7/+2zmZ+fF/kTEtg0ICAApVJJUlISbW1trFq1iu3bt6NUKkX/SNKQjIyMYGJigqurK1ZWVoJM\nnZaWhoeHBxqNhv7+fhHz1t3dTUJCAseOHcPf35/jx49TVlZGTU0Nhw8fpri4GFtbW5YsWUJPTw9t\nbW3o6+uL/pJE13JwcMDX15cFCxbg7++Ph4cHCoWCzs5OGhoaSEpKws7OjsbGRnp7e6murua1114j\nJiYGa2trmpqaOHToEBs3bhR/R3d3N8HBwSxdupSsrCwRQJyZmYlarRZA3crKSrq7u9FoNExMTKBS\nqUhISCAjI0OMqH/6058+8PX4o1gUJiYm+N3vfkd7eztPPvkkAwMDjI2NMTU1JZSDErtQksK2tbUJ\ne+74+DgZGRmCMPTaa68B94GwERERDA4OEhcXR3BwMIsWLSI6OppFixYRGxvLhQsXWL9+vejqL1++\nHJlMxuTkJE1NTaSnp6NQKKirqxM9BVNTU2ZnZ0lISMDS0pK2tjZcXV2Zm5vD2dmZxx9/nE8++YSe\nnh5++ctfMj09LRgA/xq2IiUnSydufHw8crkcLy8v0es4cuQIU1NTYm797bff0tnZSUFBAUqlkuDg\nYOzt7WloaBDBNDExMVRVVYn4OYnw/P777/Pkk09SXV1NbGwsAQEBrF69msjISLZt28bOnTuJj4+n\nuLiYrVu3YmBgwPXr1xkaGmLXrl0cOXJERLoFBwezfPlyqqqqOHLkiGBCuLi44O7ujpubGw4ODixY\nsIDe3l78/Px45plnmJiYwM/Pj7S0NBQKBXK5XHA14b6FuLm5WbgzJfOQl5cX5ubmRERE8MYbb3D9\n+nVSUlIARGddcg0aGxujp6fHwoULxQRKpVLR3d3NwMDAv5mNPDw8uHbtGmVlZWRmZuLv709qaiqH\nDx9my5YtjI6OEhMTQ2ZmJl5eXqjVaqampsQi5ubmxquvvkp9fT3Lli0TY1MdHR127NiBs7MzoaGh\nODo6Mjw8THp6OtXV1ZiZmWFhYYGFhQUZGRn09fWxYcMGMX2QWBuzs7OUlZWxYcMGNmzYgIODA2q1\nmrGxMRITE/n8888pKSnhrbfeYuHChTg5ObF8+XJ6e3sZGxtjzZo1nDp1ir6+vge+Hn8Ui4Kuri6n\nT58mKyuL7u5urly5gomJiXAxKpVKPDw8BE2nvLxcsOs8PDwICAhg6dKlyOVyjhw5InIEli9fzuTk\nJN3d3YJrWF9fz/z8PO+++y6dnZ08+uijlJSUoNFo8PT05MSJE/zlL38RKHJDQ0P6+/uZm5sjLy8P\ngLy8PKFjUKlU1NfX09XVRU1NDZ9//jnm5uZkZ2fzzTffEBQUhLOzM15eXrz//vusWLECGxsbbty4\nwblz5xgcHBSiKCnIpK2tTYzyEhMT6e7uFnXnzMwMvb29lJWVUVhYyMmTJ8nPzyc4OFiMrTo6Oli6\ndCkeHh7U1NRgb2/PzZs3aW9vp6WlhYCAAKKjowE4fPgw+/fvR6PR4Orqip2dnQhsjYyMFAnL0vvj\n5OSEkZER/f391NbWsm3bNuRyuYDburu7o9Fo6OnpESBeMzMzITmWiEm+vr4EBQXh7e0t+j1wP9F7\nYGCA5cuXMzExgaurK2FhYVy8eJGf/exndHV1kZWVJbboBw4cID09HXNzc5577jlu374ttuYajUaI\njHx8fJibm2N4ePjftv4BAQGizg8MDKSvr4/du3eLLA8TExNeeuklPvroI3R1dXn11VeF+xXg7Nmz\n3L59W+RcREVF4ezsTEBAAG5ubiQkJBAfHy+UuG+//TavvPIKNjY2HD16FLVazZYtWygvL0dfX1+o\ncWdnZ/nVr36FiYkJwcHBtLS0CGt0UlIS09PTXLlyhddee42VK1eydetWli1bhlqtxtDQkE2bNnHr\n1i3s7OzYsmULP/zwwwNfjz+KRUGj0eDo6Ch8+pL8OCsri/Pnz6NQKBgdHcXOzo74+HjWrl1LfX09\nALm5uVy/fp2RkREWLVpEYGCgkIr29vYyMzPDpk2bREnR0dFBd3c35eXlGBsbY2JiIig+q1atwt3d\nXcSP+fj4oFaruX37NpaWliQmJgL372ZSzqTE7aurq+PDDz+krq6O6elpNm3aJAQ/169f57333hP+\n+tHRUebm5jA2NmZkZEQEgEiBom5ubhgZGTEyMiJSnKWAkuHhYZydnZmfn8fMzExc3JLoSzIXubu7\nY2BgIFKIUlNTaWlpYcGCBRQXF/POO+9QU1NDX18fcXFxREVFCVhpSUkJhw4dori4GFdXV+H7NzQ0\n5NFHHxVUrI6ODvr7+xkYGMDHx4eBgQH09PSwtbVl8eLFdHd3Y2ZmRn19PcXFxeTm5nLw4EEaGhpE\n1Pvw8DBFRUXirl9dXS2AMjk5OdTV1WFlZYVcLhfp1++99x6LFy8WEXUPPfQQCQkJGBoailwPyWIt\nCc4cHBwoKSlh4cKFNDY2cvv2bQoKCggNDRX29xUrVhATE4Obmxu//e1viYmJYf/+/SJYZmxsjEOH\nDoncC7g/ibK3t2dkZIQ33niD0tJSTp06xcjICOPj4+J9nJqaorm5mXv37lFWVibAv1qtls7OTjZt\n2kRnZ6dYFHp7e8nNzaWnp4fW1lbOnTvH888/z5YtW0QD8ubNm3h7ewvkmpQxqaenx507d3j//ffJ\nz89n3bp14hx7kONHwWicmppi2bJlYtwyPz/Phg0buHr1qqjB7969y4YNG9BqtSL44969e/T19WFs\nbExYWBjZ2dkkJSWJsU53dzdBQUEcOnSIyMhIIiMjRcc7PDyc6elpcnNzUSqVLFu2TIxvJPRYUVER\nxsbGYrQUHn4/mzMkJISvv/6aX/7yl5iYmBAdHc3g4CBRUVEEBwdTW1uLpaUlIyMjKBQKgoODRa6E\nRPzVaDS0t7cLbQLcd1w+8cQTnDt3jpmZGdrb25HJZEJxGRISglKpFMnTEgx1+/bt6Ovrk52djaur\nq2BdSn2E1atX8+6774qTKSEhgcWLF/PDDz8QGxtLaWkp9+7dE9kZ3d3dfPfddxgbG3Py5EnR3NPR\n0eHtt9/mhRdeQKlUcuTIEaqrq0lMTCQjI4O4uDjMzMwwMzMT5ChJdyFRoSS6VF1dHZs3b+aHH35g\n5cqVYnRoZWVFeXk5FhYWJCQkCPBNa2srpqamArMeEhKCWq1GT09PpG5JdOf5+Xm8vLzw8vKirq4O\ngPz8fLy8vBgfH+e5557j9ddf58UXXwQQjdOqqioxznR1daW5uZn169fT19fH3bt3xRTB2tpalEtS\nAG1UVBQVFRWEhYXh4uKCjo4OWVlZDA8Po6Ojg6GhIS0tLaxatUpg/Xfv3o2Ojg7BwcGi+Solppub\nmws6dl1dHVVVVVhaWjI2NoaNjQ137twRoTeSeSwyMhIrKyvm5uYwMDBAoVCgq6tLdXU1RUVFD3w9\n/iis05988sk7pqameHh40NraKshAXV1douE4MDBAUFAQixYtQqVSYWBgQGxsLC4uLmzcuFH4zKW+\nwtmzZwkPD+fUqVNs2LCBBQsWUFJSgq2tLTU1NWzZwwFFHwAAIABJREFUsoXbt2+L/EBzc3P6+/u5\ncuUKs7Oz6Onpoaury9jYGHl5eVhYWDAyMkJqaiq7d+8mNDSUlpYW1qxZg4ODg0gJPnnyJAqFgpMn\nT2JnZ8fy5cvZuHEjdnZ2VFdX09/fj0wmw9zcnJKSElxdXdHR0eH7779n69atzM/PMzo6ioeHB4GB\ngWg0GszNzYmNjUWpVDIxMSEcf3BfpwBw+fJlhoaGOHjwoHi8tbVVoL+ksFWJHPz000/T39/Pww8/\nTHR0tPAqpKSkiMSnnp4eYmNjxeJjb28vRp2AkJy3tbWJScqtW7c4c+YMSqWS4eFhLl26hIeHh6Am\nzc7OClGPVPNLvZisrCx+9rOfYWdnh4ODA1ZWVqjVapqamrC2tmbJkiVCrJSenk5sbKzwYZSVlYnM\nx+eeew5jY2PWrFlDb28vV69exdzcHBcXF0FOGhgYIDAwELVaza1bt2hubkYul1NVVUVpaSnffPMN\nBgYGuLm5ERkZSU9PD0uWLBEitxMnTtDa2sqWLVuYmJgQN5u6ujouXLggbPoymYzBwUFqa2sJDw8X\nzl9p15mdnc2dO3f+LelJ6nOZm5vT29tLbGysgAgHBwcL9aJUEujq6mJsbIypqSm7d+8WXMvx8XGh\n5jU0NOTq1av/5+DYDA0N2bt3L3K5HK1Wi5mZGWVlZfj7+xMaGopcLmd+fp76+npOnTolZJznzp2j\nt7eXqqoqNBoNcrmc8fFx0c03NzcnICCA5cuXC9aCpDK8e/eugFU8/PDDBAcHExQUhL6+PnZ2dly7\ndk3Ep0nJx4sXLxa/8+zsLCYmJly6dIl33nkHe3t74H7KU21tLTo6OlhZWQm0mwQGsbCwICsrC3t7\nezw9PWlsbBQ/29vbi76+PjExMSJ23cXFhdjYWExNTSkvLyckJITS0lIaGhoIDw8XDS89PT0KCgp4\n5ZVXGB0d5bXXXhMRcTY2NjQ2NlJfX4+JiQkJCQncvXuXjRs3igyCJ598kpMnT+Ln50d8fDze3t6c\nPXuW/v5+oQqUchWkoBR/f3+io6MJCwtj//79GBsbk5aWRlxcnOAoPvroo1hYWIh05ampKTw9PYWR\nx9PTk5iYGGEfHxkZwdbWFisrKyYnJzE3NycmJoa9e/eKqdTIyAg7d+5kbGxMGJwSExORy+Xo6OiQ\nmppKbm4u58+fp6urC4DQ0FDm5uZ4+umnqaioEHSqnp4eERl46NAhFAqFyAednp5GLpczOzsrciqk\nMaUU6Ovi4kJBQQFVVVVMTU2JtHBfX18RNb9q1Sp8fX0xNzfHz8+PkZERjh49SlpaGubm5vj7+3Pu\n3DnS0tIE3UvKEpHUpB0dHWi1WiorK+nt7cXX15fk5GRhKpueniY0NFS4LyMiIli/fj3Hjx8XcvgH\nPX4UO4XPP//8nZSUFKESDAwMFGYiaXx06tQprK2tiY2N5dq1a8IR9+ijj1JVVcXs7CyRkZHMzc2R\nmZkpaimlUklKSooAm0g7Eo1Gw7JlywgLC0Mul4txmnTHkCLcDA0NkcvllJWVYWFhwQ8//MDevXup\nra1lfHwclUrF+vXrxQVlaGhIRkYGJiYmtLS0EBgYSGdnJ8XFxSLIdGhoSNyFpWbltWvX+POf/8z4\n+DjT09MEBAQwMDDA+Pi4GDFKHe2xsTFiY2OprKykvr6elJQU2tvbWbt2LdnZ2cL1J+2qcnJy8PLy\nQqvVMjMzQ0xMDH//+98FHVulUmFiYsLMzAwLFy4UVCStVktpaSmRkZEolUosLCywtramtLQUjUaD\nkZERpaWlmJubMzIygkajITk5mc2bN4umqMSqlL6+cOFCUTZJ0mhJGJaVlUVQUBBTU1OcOXNGLGqm\npqYoFAomJiYEIFej0eDt7Y2ZmRnXr1/HxMQEhUKBsbExg4ODzMzMsHPnTiYnJzl37hzh4eEkJibS\n2dkpgoenpqYwNDSks7MTExMT4aeJiYlh6dKltLW1sXbtWkZGRjA2NqajowNTU1MKCwuFfkEmk7F+\n/Xrh7AwPD0ej0WBlZYVWqxUp1PPz86xZs4aBgQH6+vqorq4mJiYGtVpNQEAAFhYWyOVybG1tuXLl\nCr/4xS9oaWnBz89PjH8le7VKpcLGxga5XM69e/dwd3eno6OD2NhYbt++jZ6eHnfv3uXOnTssXrwY\nJycnqqurycjIeKCdwo9iUXjvvffe+fjjj9HX18fW1paSkhJRU4WFhXH79m1WrVrFjh07hE343r17\n7N27l8OHD9PU1ISlpSVzc3Pi7puXlydAEwYGBmg0GqER7+npwd/fn46ODjHyksqRq1ev4uLiIoJU\n+vv76ezsxNHRkampKW7duoWTk5PQ9g8PD2NtbY29vb3YFq9cuVIo+G7evImXlxfNzc1CPhsZGcn4\n+Djvvvsuvr6+GBoacu3aNeLi4kTs/aeffipiz7VaLX5+fmL7e/v2bWpqaujv78fMzEwEiOrq6uLt\n7c3Q0BDLly8XJ4cEM5VcgO7u7uzYsUMAPW/dusXAwAArV64Uc/r29nZu377N7OyssIp7eXnR3t6O\nn58fdXV1WFpaCpn09u3bmZmZITExkYGBAUJCQsTkZ2JigoULF3L9+nWhr8jOzmbBggUUFRWJRl1+\nfj579uyhoaEBQNi/JROUJEGXy+UsX76cY8eOiQWsvr6e9PR0TExMmJubY25ujtTUVEZGRsjOzhZA\nHsnbMjo6KkhKcXFxpKenY21tTUdHh+gnSTzF9vZ2YbvWaDQsXrxYGJKkEa8EXXV0dMTHx4fw8HBc\nXFwEfKerq4t3331XsDBaW1vx8fEhISFBhMccOXKEZcuWcf78eZRKJWVlZZSVleHt7Y1KpRLnj7Gx\nMRcuXCAjI4P29nYmJycFvemHH37A2toaDw8Pzp49y6pVq8TNrqio6P+c8kEmk5Genk52djbNzc2o\n1WohDklLS2PDhg0iZ8HZ2ZmcnBxiY2Pp7Oxk69atmJmZiaCX8fFxYa4ZHh6moaEBAwMDAgIC0Gq1\nYtUfGBgQeoT+/n6qqqo4ffq0+HCDgoJEdFtCQgI+Pj6ifgeEscXDw0MYcs6ePUthYSHHjh2jpqYG\nV1dXIdmNj4/nhRde4LHHHhOZE7t27cLOzk5ASKanp9HT06OhoYGHHnoId3d3FAoFjY2NZGRkYGZm\nxqlTpwQ5KSgoiMHBQdra2ti1axfDw8NotVpsbW0ZGRlhfn5ejLIkNsL27duFQ+/GjRvk5OQQGBgo\nosgkO7OTkxNxcXH4+/uj0Wh4/fXXGRsbY9WqVRQWFrJkyRK6urrw9vbGwsKCgwcPoqenR1NTEyMj\nIyKYVfKJVFVV4ePjIzIh4X5TUYqbk0o+pVIpdhTh4eH09/dz48YNQd6ytLTkqaeeErubkJAQNBoN\n09PTYist+Waio6NFczg9PR1XV1eeeeYZNBoNFhYWqNVqFAoFzzzzDDo6OkKLIIFyly1bhkKhECVo\nSkqKsFlLOQpWVlb4+PgIwdL58+eprKzkww8/5OTJk2Lq0NjYKPIiDhw4gImJCSqVioyMDAwNDZHJ\nZDz++OPY2dkBiJGnlM61detWWlpaSElJQU9Pj6NHj1JRUcG6desEsDUwMJB169bh5eVFamoqO3bs\nQKlUCgDwgx4/ikXBwcGBqKgoli5diqenJ9bW1kRFRYmO/sKFC5mfn+fMmTMipl5S/H355Zd4enoy\nMDCASqXiypUrogGzc+dObG1tycnJoaioCGdn538LyWhpacHe3h5dXV309PSYnJwUDrzm5mZsbGzE\nIqKvry/e2GeeeYaGhgZSU1MJCgoiNjaWvr4+oaLbtGkTjo6OGBsbo6uri46ODhcvXuTGjRvU1taK\nSHNdXV1sbW2FWaWxsVEkZX377bdi3LRjxw6Baa+rq0OhUODu7o6ZmRkbN25kyZIlNDQ0sHbtWurq\n6nBycuLevXtcuXKFJ554grm5OQwNDYmLi2NiYgILCwsMDQ2Zmppi48aNLFy4kEcffVS48aytrRkd\nHWVoaIjx8XGhqHN1dSUlJYU1a9YI7qUUBWdraytMSS0tLcJU5eTkRHNzMy4uLmg0GkGiXrduncjb\nAIRop6ysTNClVCoVFy9eFNOMt99+m4KCAk6dOsXU1BTLly/nyy+/JDAwkOTkZLRaLWFhYSiVStG1\nl47HH3+cVatW8emnnwoy06ZNm2hpaeHSpUuEhYWJHV9sbCzBwcHY2NjQ3d1NbW0tLS0t9PX1IZfL\nxWOA0MkUFBQwMDDAwMAA9vb2AuFuaWmJQqEQHobc3FxiYmLw8/NDrVZz7949WltbBaZP0lVIDtNz\n586RmZnJW2+9hUajQU9Pjz/84Q8kJCSQnJyMvr4+9vb2+Pn5MTs7i4uLCydPnmTx4sVMTk5SUlKC\nnp6eAOM+yCGTTBz/Ow+ZTPa//5f4v8f/Pf7/fxRrtdrw//RNPwqdgq2tLS+99BJubm54eXmRl5fH\n7OwsZmZmlJeX09bWho+Pj7Avv/baa+zfvx8nJycxC3Z1dcXNzY3r169jZWXF559/zpYtWzAyMkJH\nR4dXXnmF9vZ2jhw5QnR0NPX19Zw/f56HH34YCwsLjh49yunTp5mamkKtVtPR0cHp06dJTk4W3XtD\nQ0PeeOMNse3ftm2b0L2Hh4eTm5uLh4cHtbW1Ykucl5dHWFgY/f39BAQE8NVXX4l4uejoaG7fvo25\nuTkvvvgiX3/9NRYWFvzjH/8gKSlJkKMyMjL41a9+xZdffom7uzt9fX1YWVkJCpGnpyelpaVCviwl\nKjk6OqKvry8waJcvX8bd3R09PT3m5ua4fPky0dHR1NbWirCVq1evEhsbi7u7O83NzcD9aLSLFy9i\nZGTEzMwMjz76KCdOnBD+AQl0Ojk5iZubm5icXL58GVtbWwEvkcvltLW14e7uztKlS8nOzsbBwYGp\nqSmSk5NxdXXlgw8+ICYmhrNnz6JWq7G2tmb9+vXU19eLUsrIyIiAgAAaGhooLCxkzZo1hISEkJ+f\nz/z8PD4+PgwODnLx4kV++tOfEhUVxaVLl+jp6eHWrVusXbuW4eFhIVu/evUqvr6+PPzww6jVanp6\netDR0aG5uRkfHx/09fXJyMjAy8uL8vJyoqOjMTQ0ZN26dWKk2tbWJoRoFy9eZOXKlaSlpYn07tDQ\nUE6fPs1XX31FcnIyx44d449//CNbt27lww8/FIE0d+/eZd++fbz77rtYWloKuIuHhwe5ubkit2Ni\nYoKuri42btxIa2uraKC6uLhQWVkpWAx2dna4u7uTm5vL73//+we6Hn8U5YMU4+Xo6MhHH31EWlqa\n6Lbb29sTGBjIE088QUFBgehMV1dX09TUxPr16wXgYnBwEKVSSXZ2NnC/LJHCWKqrq8nOzmbVqlXc\nunWLiIgIPv74Y9auXUtnZydvvfUWDz30EMePHxdobZVKxeDgIDo6OkxNTQkVpbOzM+Pj49TV1dHX\n1yeCZuLj4wkMDCQwMJCVK1ei1WrZtGmTsE1HR0cTExNDZGQknp6efPfdd6JEACgpKaGhoUHQm/38\n/MjPz+ehhx6ipaUFrVYrGI/GxsakpqZSWFjIpUuX+Pbbb1mzZg2tra2MjY0RGBgoMG5VVVUcPHiQ\nsLAwurq6UCgUItegsbGRjz76iMrKSlxdXXn99ddRq9W0trYyMTHB9PQ0N2/exN7enri4OLq7u0lL\nS2NoaAhvb280Gg0ajYbt27djZmZGS0sLV69eJSAgAF9fX9rb28VJamlpSXJyMvHx8Rw/flz0IszM\nzPj+++8B6Ovr4+zZs+zZs4ef/OQnlJeXMz4+TllZGaWlpYyNjYkYwcDAQLy9vZmcnGT//v0i9OTM\nmTPIZDK2bt2KoaEh/4O9946K8lz3/j9D70MbpEmR3kEpAiKIKIIaDRE7xsQkmrZ3enb2Tk92TmIS\nY2KyYxLdSUyiscRewIIKgqj0Ir2PdBhgaEOb9w8y92/vd61zju8fv7Vy1jrPWrMchzIwzHM/131d\n3+/nCzP5DBqjlSYyzsDAgMDAQDZs2EBAQABXr14FEM7QJUuWUFRUJCYVAwMD2NraUlRUJCYCO3fu\nFAHHmkZ0YGAgly9fprKykoCAAKqqqrh9+zYLFizgmWeeYcWKFXz//fe4u7tTV1cnehJHjhwhOjoa\ngMbGRlQqFW+++abIlrC3txccx+npaZRKJfn5+dy9exc/Pz+Ki4u5c+cO1tbW3Lx5k/b2djIyMjhx\n4oTIQb2f4w+xKFhYWDB37lwmJibw9vYmNDQUqVTK3bt30dPTE1mLhoaGGBoa8ssvv5CcnMzFixc5\nefIk9vb2WFlZoaenR2hoKC+//DIAq1evFqaoa9eusWDBAmpqaggODqa3t5euri46OzvZsmUL7u7u\nbNu2jXXr1uHk5MTNmzcxMTERFmvN1Ragurpa0JPd3NwoLy/n008/JT09nR9//BEtLS327dtHSEgI\nDQ0N1NTU4OHhwS+//IK3tzcKhYKqqiqCg4NxcHCgqakJmEmeGh0dFaiya9eu0dzczMmTJ5menmbB\nggVcu3aN4OBg5HI5K1euJD8/nxUrVpCWliYaZM7OzgwPD4uou/HxcdasWcOvv/6KkZER1tbWBAcH\nExkZSWhoKOnp6URHR1NSUiKaprq6usJgZGhoSHh4OJWVlbz++uucO3eOxx57DIA7d+4gkUhE4lJQ\nUBALFy5EpVKJqU5YWJiA5fj4+HD27FlhgHrllVcYGhoSTdzt27fzwAMP8Oijj5Kfn89f//pXETzT\n0tJCSkoKZ8+epbi4WHA31Wo1S5Ysob+/Xyw8mvg5jdlMYyPXTKJmz55NXV0d6enp5OfnU1paKvJG\nXV1dcXBw+LdphmYcqlEGahgfa9asYe7cuXzzzTfs3r0bQ0NDbt26xfDwMJOTk8Kj4+3tzZo1a9i8\neTPW1tZMT0+jVqsZGxtDR0eH2bNnY2trK6ozPT097O3t2bZtG48++iiff/45g4ODODg4MG/ePPT1\n9TE3NycgIICpqSm++uorLC0tBVvD398fe3t7IiIiWLt2rdBr3M/xh1gUjIyM2L9/v3DODQwMCMx2\nTk6OwGy/+uqr3L59m+bmZs6ePSvs1H5+foIp0NHRgZ+fHzBDXtI0cwwNDbl+/bqQLXt7e5Ofn8/Y\n2JhIhdq6dSvZ2dlIpVK0tLRYvny5qCY0kw2A9957j3nz5pGUlERycjIWFhakpqZiZWUlRqOzZs0i\nKysLXV1dNm7cyPDwsOgyGxgYiCvVnj17hLpt1qxZ2NjYsGXLFoaHh+ns7MTDw4OkpCQB69BATebO\nnUt9fT2urq7i8+/cuYOxsbFQE/b09IjkJ5jZBiiVSiYnJ5HJZKKpWF1dTUNDA42NjbS0tLBnzx6u\nXLkiOJYaQOq9e/f4xz/+wfPPP8/u3buJi4tjamqKu3fvAjMV3zPPPCMSm01NTZk7dy7R0dH4+/vT\n2dlJRUUFbm5uTE1NYWhoSGFhIdPT02zduhWYISRNTk4SGhrK9PQ0QUFBhIaGMj4+TnR0NL6+vtjY\n2DAxMcGNGzfIysqiuLgYPT09wsPDBR5vaGiI7u5uYWLTiLtUKpUIbUlOTiY4OJjBwUEmJyfZvHkz\nZWVlVFZW8te//pXy8nK8vLywtbUVGLe//vWvAoQCCJFUSkoKERERQmh36NAhZs+eLeAxdXV1PPvs\ns7S3txMVFcXatWvx8/MjIiKCOXPmCOWind0M1Mzc3JyamhpCQ0M5evQoc+fOJSMjg6mpKZGHqkG8\n6+vr8/LLLwti2L1795g1a5bgkRw8eBBLS8v7Ph//EIuCRs+tQaZZWVmRk5MjrnBSqRQzMzMKCwsF\ncGNkZITg4GCOHz9OXV0dNTU13Lhxg97eXgFuHR0dZWpqSkhMk5OTWbx4MQYGBoyMjLBy5UoMDAzI\ny8tj7969FBcXM2/ePNHBLigowMHBgYmJCRQKBXFxccDMm390dJQjR46Ivaafnx8FBQVYWVmRl5cn\nVGhdXV1MTk4SHh6On58fV69eRVdXl+eee46GhgbWr18vVvFTp04RHByMjo4OtbW1WFlZYWNjI/wN\njo6O2NnZCRu2Jops0aJFgugklUppaWlBKpUyf/58Wltbxffw8fHB19dXkKgyMjIoLy9n7dq1LFmy\nhPT0dKamplizZg0BAQE0NjaSkJDA4sWLCQ0NpaamBgMDAw4cOMDzzz/PxYsX0dXVFWrQ6elpXn/9\ndTIzMzl8+DAREREsW7aM6elpsTirVCrs7Oxwd3dnyZIlGBoacvv2bTZsmGH5fPTRR8LeXFFRQXl5\nOVpaWoKwdfjwYVxcXCgoKBBwV4lEgp2dHWvXrhWjwZaWFrG4AMK+HhYWRnNzM729vYLbmZqaKiqq\nefPmYWJigo2NDbq6upSWllJdXS1oy5cuXaKwsBBHR0dghsIVHx+PVCplYmKCwcFBjI2Nef7559HS\n0kJPT0+YsjT8jY6ODgYHB8nJycHCwoKYmBihUNRQuJYuXUp+fr5gbf78888iO7KtrQ2FQkFwcDAX\nL17k0KFDvPLKK/j7+4tqY//+/QwMDDA4OCj0Jfd7/CEWBQMDAwHamD9/Pk899RQrVqxAX1+fX375\nhccff1y8mWNjY4Xw5tq1ayJbMDw8nIiICCoqKgS/8Nq1ayxdulREybu4uBAcHExQUBA2NjbU1dWh\nVCppaGggODiYnTt3Eh4eTnp6OsHBweTn52NiYsLWrVuF6g3gnXfeoaKiAjMzM/T09ISt283NjfDw\ncKysrHBzcxMUYh0dHc6dO4dEIhGg1tbWVry9vTEzMxOjs8cee4zs7GyKiop4/vnnuXfvHhMTE+jq\n6jI6Ooq2tjZWVlZMT08zPDzMwoULGRkZYfXq1fj6+qJWqykuLsbQ0JCmpiaBwteE5d64cYPi4mJ8\nfHzE767JFtDW1mbbtm1MTk6KUey6dev48ssvhXNy6dKl+Pv7i9d5ZGSE1tZWQkNDuXTpEhYWFsTH\nx3PixAkmJiZ49tlnKSgowN7entDQUJ566ikyMzPR09MTo9mWlhZMTU3FgqvpzyiVSqFQramp4fz5\n8yiVSq5cuUJ5eTkhISFs2LCBhx9+WJiUBgcHkcvltLW1YW5uTnh4ODdu3ACgtraWnp4erl+/jrW1\nNTY2Nujo6ODh4cGpU6doaWmhuLgYtVpNbGws8+fPx9bWFhcXF6RSKQsWLMDW1hYtLS0ef/xx0auY\nnJwUF4jY2Fg6Ojro7+9n9uzZrF69mi1btnDgwAGcnZ0xMDAQCl2FQiHCca5cuQLMNDyzs7MBePzx\nx3nppZcwNzentbWV7du3U15ezvLly0XF6OPjw4svvsjWrVt55plnuHz5Mjk5Oejq6jI9PY2vr6/Q\n4Whcwvdz/CEUjW+88cbbfn5+2NnZCUWjRk02PDxMR0cHV69eJTc3l+eeew4tLS3S0tJEB/n48eN4\nenpSVVXF448/TmtrK7m5uYSFhZGQkCBmxb/99hvFxcW8++67Qn/w0EMPiRDU5cuXY2Njg7+/P7/9\n9huDg4MYGhoKBJlUKhWUZ09PT3HlycjIEFdmMzMzEUXu5+dHTk6OMLgYGhrS2NiITCYTph8NPPT0\n6dPMnz+f6upqJiYmkMlknDhxAplMxrJly6ioqODEiRN0d3cL3YFSqWT//v3IZDKGh4f5/PPPiY6O\npqGhgZCQEC5fviyCS3p6eggODmZycpKBgQHs7Oz44osv8PT05N69ewwODgpkm0Ydl5mZSWRkJFpa\nWsJ96u3tLfIzSkpKWLx4sYDijIyMEBYWxvT0tFBHVlZWcuXKFYaGhpicnBRlv4mJCUqlkoCAADIz\nM0X24WOPPfZvorSOjg6MjIzYvHkzHh4euLu7ExERIXIkp6eniYiI4Ouvv+bZZ5+lrq6OsLAwsRhO\nTU1x6dIlnnvuOQwMDLCwsEClUuHp6cmxY8fo7u4WeZRaWlr09vYKl6WtrS05OTn4+vqKPoGmAVhX\nV8fNmzext7dn1qxZ2NnZceDAAcLDwxkaGhIwWo0qNDAwkMHBQYaHh1EqlYJgvX79egYHB3nqqaew\ntbVl06ZNfPXVV+zYsYNTp06xevVqIYTTRN51dXWhra1NZmYmP/74Iz4+PvT392NiYoKTk5Mw20VF\nRZGdnc2NGzfYunUrR44c+Z+jaNTT02PRokUolUp++uknQfRVKBSsXLlShMG4ubnx+eefc/jwYXJz\nc8VVQVMJ9PT0MDg4yKZNm4CZSK/u7m7S09OFPXVycpKQkBBaW1vR0tKioqKCQ4cO0draip6eHgcO\nHCA9PZ3vv/9eEI805B8XFxcAtm3bhlwuF6nHXV1dpKeno1KpRFzZiRMnCAgIwMXFBW9vbxFIa2Zm\nRl1dHWfOnKG7u5sbN26IXkVWVpbYa2dnZxMcHExdXR3FxcWYmJggkUiIi4vD3d1d6N4DAgIoLi6m\nt7eXrVu30tbWRlhYmHCCamC2ISEh6Ojo4O/vT0tLC2ZmZrz00ktIJBLeeecddHV1KSgooLS0FIVC\nwWuvvUZQUJDIntR4/cvKyqirq+OXX34RV+SRkRFqa2vF82ppaXHlyhXOnj1LdHQ0a9euxcHBgbGx\nMTZv3oyJiQm+vr4CSefl5UVKSgqAIGMdP34cXV1dkXDU0dFBVVUVBQUFfPbZZ1y9epWvv/4aZ2dn\n9PX1CQsLIzU1FalUynfffcfg4KDYp8OMk1DT/DM1NeXo0aOUl5dz+/ZtgWxXqVQMDAxQWlrK0NAQ\nIyMjFBUVcfz4cfT19TEzM8PX11c0lwHxemoStmbPnk1QUBCnTp3ixRdfpKysjPHxcX755Rdh3/b1\n9cXU1JS8vDw+++wz3nvvPT744AP8/PxEArkmSr6jo4Pk5GSCgoK4ceMGR44coa6ujsOHDyOXy/Hw\n8MDExIS4uDi0tLSYnp7G2NiY2NhYenp6iIqK4qWXXhLZpPdz/CEWBR0dHaanpwUxd/fu3ejo6PC3\nv/0NKysrYmNjUSgUGBkZ/Vumo7e3N1ZWVsRR1LegAAAgAElEQVTFxdHe3k5/fz/ffvstx48fB2Zg\nnRp57e7duzEzM0Mul2NkZCTCUvLz83nwwQdF0Km9vT1yuZywsDBcXFyor68nKCiIrVu3Ckbj5cuX\nWbp0KU1NTRgbGwu017179+jt7eXVV18V41BNgIu7uzsSiYSpqSkKCgro6OigtraWiIgIIXOWSqUk\nJSVhbm5Obm4ud+/eJTExEYVCgZ6eHlpaWnh5eYnMygULFhAdHU1QUBAbN27Ezs6O9evXAzPl8nvv\nvcfixYv58ssvRU+kubmZWbNmYWpqSlhYGKtXr0Ymk2FqakpycjKpqank5+dz69YttLW1WbZsGXK5\nnLy8PKGG1Lj0vLy8+OCDD2hubhZ0rObmZsLDwzl9+jQff/yxAM0qFAqMjY3Jzs7Gy8sLHR0dKioq\nCA0NxdfXV7wXLl26hKmpKUFBQUilUpycnIT128rKiqmpKaysrNi4cSNaWlo4Ojpy7do1hoaG8PDw\nYGhoiE2bNmFmZoarq6vYmmms+JqGqQYpJ5VKhd7Czc2NBQsWMDU1JbQLq1evpq2tjYmJCTGVKSsr\n45133gHA2tqaqKgoUQkYGRnx4osvYmhoSGJiIl5eXlhbW7N06VJ8fHyora0VGSJNTU3Ex8eza9cu\nzM3NKS0tFVMZY2NjLl68KBqfe/fuZe7cubz44otERESgUqkwNDREoVBw8uRJdu3aRUJCAsuXLycp\nKYmtW7fS2NhIX18fWVlZYotyP8cfYvuwe/futzXQyba2Nn7++WcRBKoJcv3111+5du0aXl5exMbG\nolKphHVWrVajUqlYsmQJPT09JCUl8f333xMdHU1WVhYpKSm4urrS2dmJrq4u0dHR2Nvb8+mnn9LX\n18eWLVsYHR0lKioKS0tL5s6dy5w5c4Tp5datWwwMDHD+/HmGh4fZuHEjPT09Qm5dWVnJ3bt3kUgk\neHh4sHbtWjZs2EBmZia5ubkYGRmJEjo8PFxsGzSmmOnpac6ePcvDDz9MdXU1mZmZrF+/XiRXf/DB\nBxgZGREdHU1raytffvklarWay5cvY2hoKMxQ0dHRTExM0NDQgIGBgWhs5ubm4unpKQJwMjIymD9/\nPn5+fmhpaZGcnMzGjRtZunQplpaWODs7s337doyMjMR0YOHChXzzzTdiL6yB3wYHB2NtbU1iYiJy\nuZzly5fT09NDQ0MDCoWCe/fuMWfOHJqamkR+R1JSEpWVlRQXF9PT04Oenh45OTncuXOHkJAQ0aMw\nNDRk0aJFYn+cnZ2Ni4sLqampnDhxgpiYGKampggJCcHNzY2kpCQcHR0Fh6ChoYHR0VGuXLmCk5MT\nMHMBSkpKQk9Pj1WrVhEZGUlFRQUeHh6i6hwdHSU+Ph5dXV2SkpKIjIykt7dX8BRjY2M5ceIEd+7c\nYevWraKKkEqlfPDBB7z99tu4uLgIKrVmwjF79mzhbYmMjGT27NkEBgZSUlLCDz/8gLW1NQUFBRQW\nFlJbW4u3t7fY8jk4OODk5ISjoyN1dXUYGRkxMjIiJnJ1dXV88803ghFpZGQkJP1hYWGcO3eOjo6O\n/znbh/Hxcby8vFi6dCmjo6OYmpoSERFBcnIyfX193LhxA0dHR8LCwggKCsLU1BQfHx+Gh4fx8fFh\n3rx5hIeHk5eXJ1gJAMPDwyxZsoSff/6Ze/fuIZFIMDU1FZFsr732GvX19ezatYv09HQKCgqEYEfz\nB/b29mbLli2EhITwl7/8BYAbN25gbGzMiRMn8PDwQF9fHw8PDxFJPzg4KAI+HR0dGR8fx9nZWUw0\nLl++TF9fnyhnVSoVwL/x+bZv305tba0gGiUmJhIbG0tRURGBgYHY2NgwPj4urMJDQ0McPHhQuDqD\ng4OprKxk8eLFPPLII6hUKry8vMjLyyMuLo6ysjLOnz/PDz/8IAJ4v/76a6Hr9/DwoKWlhfDwcLS0\ntDh16hTr1q1jenoaHx8fwsLCGBsbQ6lU4u3tzalTp1CpVNy9e5fR0VE++ugjXF1d8fDwwNjYmAcf\nfJDo6GgCAgIETPatt96ir69PuClhBg7z2muvMTo6KjgL9+7dY8WKFYSHh7NlyxaefPJJkTKtp6eH\nrq4uZ86cobq6WsS9OTo6IpPJSExMBGDZsmX4+flhbW1NXV0dKpWKn376iby8PORyOT/++CMlJSUc\nOXJEhBefOnWKV199VZC0q6urCQwMxMDAQFCbNE7Vs2fPUl5eTlhYGD4+Pjg6OpKQkIC+vr4ghff2\n9iKVSrGzsxPW76tXr/LDDz/w0EMPieoPZiC+mjTuo0eP4ubmRm5uLgMDA0xMTNDT08Pk5CTfffcd\nNTU1zJs3D3Nzc9zc3HB0dKSzsxNPT096enoYGxvjrbfeuu/z8Q9RKbz//vtvy2QyLl68yKxZsygq\nKmLx4sWUlpZibGyMjo6OcCRqZLylpaWcPn2avr4+IYHOz88XjPxr167x4osvcvnyZfT09IiMjBSM\nfyMjIxHYoUlotra2RkdHR2DDWltbGR4eJi8vj+PHjyOVSomKiuLQoUMia6GgoIDx8XG8vb2RyWRC\nH6FUKkVUl7u7O9u3b0dPT0/wA/v6+lAqlaxfvx6FQoFKpeLMmTOMjY3h4uKCSqVi0aJFrF69Ghsb\nGxISEggNDRWxegqFgvr6ejQZnFNTUwI+a2JiwujoKA4ODsydOxe5XE5+fj7m5uZcvHgRExMTQU4a\nHR2lsbERS0tLKisr8fHxISgoiPr6ekpLS4UYSFOqasZubm5uwn3a1dXFzp07MTU1paOjg9HRUUJC\nQvjtt99ITEykrKyMM2fOsGDBAoaHhykqKqKrqws9PT2ysrJQKBT4+fmhra1NVlYWa9euZdasWWRk\nZODo6IiOjg76+vpiQvH9998LAIqnpyfj4+Ps3LmT9vZ2bty4IQCyZ86cwc7Ojhs3bpCTkyNGo/r6\n+uJ3aG5uFltXDTJeoVCwePFiLly4QFtbG2lpaXz88cc4Ozvj4OBATU0NSqUSiUTC0aNHsbGxwczM\nTFCWwsPDcXFx4ZdffmFwcFCEx9bU1LB582Z8fX1F/yY9PZ0lS5bg6upKf38/8+fPZ9++fVRXV5OW\nloavr69IE3NxceHXX3+lra1NBBBbWVmJZreZmRkKhQK1Ws2+ffsYGhoiJiaGRx55hJMnT2JnZ3ff\njcY/xKLwj3/84+2EhARcXFzIyMjA39+f27dvs2LFCqanp3nttde4ffs28fHx3L17V5y0WlpahIaG\nMjo6+m8p0UqlkuLiYnGSrFixguDgYH7++Wemp6fp7u5GrVZz8uRJLC0tcXBwwNHRES0tLUZHR5me\nnhZvWF9fXyIiIrC0tBRW108//RRtbW0RRqrph1RXV/PZZ59hY2PD5cuXuXTpEomJiUxOTqKlpYW9\nvT0SiYTZs2cLkY1mhHT+/Hl27dolTozS0lLhIKytrSUvL4+SkhKio6MZHx9n5cqVDA8Pk5CQQEJC\nAsPDwwwMDBAYGMj4+Ljo8nd1dZGfny+8BadPn8bJyQmZTEZHRwdeXl6EhIQIBWNgYCAqlYqpqSnS\n0tLQ09PjzTffZNu2bUxNTYnO9/DwsNiCBAYGir6DBhWWmJjIzp07RWDLxMQEx44do7Ozk7KyMqKi\notDX18fY2BiJRIKlpSVnzpzB09MTBwcHQkNDMTMz49y5czz44INIJBK6urrYtm0benp61NTUkJOT\nw+rVq8nKyiI0NBQdHR3mzJkjQn80eP+LFy+yYcMGrly5Qnt7O/PmzUNbW5u6ujpaW1tFnoSHhwfe\n3t4cOXKEVatWYW5uTnt7OzKZjJaWFqKiorCwsGDevHnI5XLOnTvHX/7yF+rq6ujq6iIgIICysjI+\n+OADZDKZkGb39/ezfft2wVRQKBT4+/sTGBhIRUUFISEhHD16lImJCVQqFYWFhWKrEhAQQFJSEo88\n8ggxMTH09/ezYcMGRkdHKSsrQ6VSMTQ0JPptd+7cITY2VojlVCoVtbW17N69m/7+/v8524euri6q\nq6vx9fUVYayPPPIIdXV1dHR0iBJUI/Job2/n4sWLGBkZMTo6iq6uLnfu3KG3t5epqSnRrNHS0mJi\nYkIo8datW8e9e/fo6uqiu7ublJQUUlNTkUgk7Nu3j66uLqFhsLGx4ZVXXsHX15euri7Ky8tF00qT\nhKyvr09oaCh2dnao1WpiYmJYt24d+/btY3R0FHt7e65evUp5eTk2NjbCertgwQIREpOSkiIabfb2\n9kilUi5evIifnx+zZs2iubmZjo4OLC0tiYiIQKlUivSlpKQkfH19GRoaoqmpSRCANIIlmUzG1NQU\nb7/9NoaGhhw8eJDVq1cTEREhxGIymYy6ujoxljxy5AhqtVrgz6KioggICODw4cMolUpBOJqenqat\nrY2PP/6YxsZG0dh7+OGHKS0tZefOnYyOjgIz20NjY2Ompqa4ceOGCKBpamoSyDoNfamvr4/Lly9j\nZmaGUqkkLS2NiooKMjMzUSgUrF69mvPnz2NmZiYEY5r+xtKlSxkfHycyMhJHR0d+/fVX4VeRyWR8\n9NFHDAwMMDIyQk5ODqampoJ67e3tTXZ2Nvv27RM6Ck1WxOTkJDt27BAounPnzgmRUWZmJo6Ojri7\nuxMQEMCCBQswNDRkcHBQZGoEBwfT2dnJ3bt3mT9/vuAqaFLO1Wo1Li4uTE1NsWLFCgBWrlzJn//8\nZywsLPDx8RGTLalUyo8//oiOjg4rV66kra2Nbdu20dnZycDAgKh2nn76abS1tWloaBAg5Ps9/hCL\ngqurK4aGhnzxxReEhoZy/vx5SktLuXHjBg4ODqjVahYvXkxzczNSqRQDAwNsbGxE93VycpLU1FSc\nnJyIjIwU3fxNmzYxf/581Go1UVFRDAwMCNBqWloa69evZ2xsDFtbW7Zs2SJ6ACkpKTz00EMiv8DM\nzIx169YJ7oFMJhNCJIlEQkNDg4CbGhsbk5ycjJGRERYWFqjVaubNm8ft27exsLAgLy+P8fFxBgYG\nqK2t5ejRo0ImnJGRgYmJCYmJiWzevBlPT0/c3NxISEhg48aNaGtrU1JSIvICDh48yOnTp6mvr8fA\nwIBz585hY2PD6tWrqayspK2tDRsbG5HGtHbtWoaHh1Gr1WJqYWVlxfDwsJiKtLS0EBYWxtDQEEND\nQ5w+fZrQ0FDxd/Dy8kJLS4s1a9ZQX1+Pu7s7CxcuRF9fn4ceeoiMjAy+++47qqqq+Mtf/oKTk5PQ\n6s+ZM4fw8HD8/f3R1dUlKChIBNM2NzcDMyNJb29vxsbG0NLSEuCYHTt2MDU1xbfffouVlRXt7e1Y\nWVmRnp7OtWvXcHJyEgzMuro6du3axeTkpICh5OTksGfPHvz9/YGZEJeenh7CwsIoLy8Xsu/BwUHh\ns9D0fGbPno29vb0I6HnssceIiIgAEHoZjVbjxx9/5JVXXhEhx8HBwQJQ++c//xkrKyvMzMzIzc3l\no48+IigoSABXNBJmmMkVzc7OFld5jQT9xIkT9PT0UFxcLBq7Q0NDjI6OUlFRQXt7O/Hx8Rw5coTu\n7m7Gx8dF4O/9Hn+I7cPf/va3tzVhLhYWFjz33HOMjIxQXFxMfHw8dXV1xMXF4ezsLGygmjCNuLg4\nDA0N+eGHH5DL5axZs4b9+/fT0tIiIsWnpqb4/vvvOX36NPfu3cPe3l7AKWBGHxAYGEh3dzfa2toi\nuMTX15fg4GCB0Q4KCuLkyZNs2LCByclJxsbGMDQ0xNPTExcXF4aHhzlx4gTj4+OsXbtW8PeuXbuG\noaGhuNJaWloya9YsXF1dqa+vZ2xsjCtXrvD6668zODiIv78/PT09VFVVkZmZKfDg7u7uFBQUsHbt\nWiYnJ/n1118F8y8mJgZDQ0Omp6cxMjKiqqoKW1tb6uvrkUqlTE9PU1ZWRmxsLK6urnR0dHDhwgV0\ndXVxcXFh6dKlhIWFcfnyZUGVqq2tpaSkhL6+PhEMOzw8TGFhIaWlpRgYGBAbGyuEXaGhoQJn7ubm\nhlwuJyEhgebmZgoKCsQJ7uTkRG9vL2q1WsiZra2tyc3NZcmSJWhpaTF37lw++eQTkR3R3t6Ovb09\n33zzDc7OzkxOTlJWVoahoSHz589nxYoVVFVViRPN0NCQBx54gMnJSfbv309YWBgSiUQ8f09PDyMj\nI0xMTKCtrY2ZmZlwxObn52Ntbc2CBQuEYKu1tZXu7m6qq6tFitXFixfx9PTEwsICiURCT08PlpaW\nItE7ODiY2tpaDh48SGxsLKWlpYyOjjI4OEh4eDjt7e1IpVK+/vprpqamiIuLIyMjg6KiIlJTU+nv\n7xeUZ038gbe3N1999RXd3d2cP3+eJUuWiEU9ICCADRs2UFNTg7W1Ne7u7tTX1xMQEMCdO3coLCz8\nn7N9MDExwcvLC5lMxmeffcalS5cYGxsjMjKS48ePc/fuXaKiovjxxx+ZNWsW8fHxjI6O8qc//Ymu\nri4UCgWbN2/Gy8uL6upqsUpbW1sTEBBAZ2cnKSkpIlJL40wbGBgQmngN6k2TqhMXF4eDgwPnz58X\nVlmNmzE/P19kOGgArRcuXKC6ulrkNGhpaREZGYmRkREtLS1iv2hkZISPjw/l5eVUVVUJQxfMpGoD\nInjUxMSETZs2UVRUxNDQEG+++SZVVVW8++67pKWlERkZiZ6enjDtaNKPmpqaROS9Ro7b1NREVFQU\nHR0d6OvrY2hoiKOjI/7+/syaNYuRkREhAtPW1ua7777jwIEDVFRUEBERISjS5eXlREVF8cQTT4hJ\nyqJFi3B3dxeZAw8//DDj4+MMDQ1RXV1NT08PpaWlRERE8Nhjj1FTU0NcXJzIbrCzsxOkbA0/8fDh\nw7zwwguYm5vj4uKCTCajtrYWGxsbsf3TUJyXL18uto4PP/ywyKBsb2/nxIkTwMyUQCaTkZqailwu\nF+NnAwMDTp8+jZeXF2FhYaxfv14E6hoYGGBubk5ZWRllZWUMDQ3R1dUlqiWYIUbp6ur+2wQoNzeX\nnJwcSkpKOHbsGIaGhvT19Yme2LVr19DR0eGhhx7C0dGRbdu2sXHjRvr6+nj11VeBGaLT1NQUPT09\nNDU1iS2W5ufQ0dERyDbNRUCpVPL0008zNDQkSNRubm6cPXtWuC/v57ifKPrZEonkqkQiuSuRSCok\nEsmff3/cUiKRXJJIJLW//2vxL1/zmkQiqZNIJNUSiSTxv3sObW1thoaGcHZ25m9/+xu1tbXU1NTg\n7++PgYEBkZGRXLlyBZlMhomJCQYGBrz55ptYW1tjZ2eHlpYWQ0ND9PX1MWfOHJHiXFxcTH5+PgqF\ngn/+85/o6emRnZ1NdXU1r7zyCrdu3RKW37KyMpGZaG1tTUhIiFDEpaamin0iIKLea2traWhoICAg\nAJlMRk5ODmFhYYK1X1tbi0qlwsnJiQcffJCYmBiMjY2prKwUSVZFRUXCwabhMBYXF+Pn54efnx8+\nPj68//77REdHs2rVKpHsvH37djw9PfnTn/4kRlebNm0iMzNTRMM/++yzaGlpoaOjg5+fn8CGKxQK\nURlZWFhQUVHB/v37ycrKYnR0lPfee4+WlhYkEglFRUWim64xbrW0tIhwFolEQmtrq4CsagJPjx49\nyuTkJHfu3GH58uVCyBQVFcWOHTu4d+8eN2/exMzMjLlz5/6buGZ4eFhMXiIiIrCxsSErK4uysjKc\nnZ1ZtmwZzs7OzJ8/n5s3b4pQlLy8PIyMjEhISODWrVvcvHmTBx98EJgxIzk4OPDII4/Q3t7Ozz//\njK2tLTKZjA0bNgjfg7a2NkFBQSIW3tzcHEdHR5KSkkR8/dmzZwWSvqGhgXnz5tHZ2YmdnR16enoE\nBgaSkJBAY2OjwOJ3dXVx4sQJkart4ODAyMiI2JI0NTVx48YNfvzxR2CmevX19RXSZc331vhBoqKi\n2Lt3L7a2ttTU1BAREUF1dTUODg7cu3eP7u5usrOzMTU1xdvbWzA77ue4H/LSJPCiWq0ulEgkpkCB\nRCK5BGwFrqjV6g8lEslfgL8Ar0okEl9gPeAH2AOXJRKJp1qt/k8pD4ODgwKD/u6779LZ2SkMUZWV\nlejq6qKtrU1ZWRlPPPEEkZGRHDlyhNmzZ9Pf38/Y2BhxcXE4OTlRVVXF1q1b+eGHH7C0tKS5uZnA\nwEAsLS0pLCzk448/FnbsF154gZCQELy9vQkKCkJHR4e2tjaKiopYsGABcrlcBN3GxMRgZmYGzIwc\njx49Snd3N2+88YagMqWmpqKtrU1TUxM3b94kMzOTqKgoXF1dUavVODg4YGFhwddff42TkxMDAwPY\n29uLxcbCwoLx8XESExO5du2aCINRKpViJJiYmChMW2FhYWLcWFxcTFZWFpGRkdy9e5fq6mru3LlD\nUFCQmChoeIsFBQVUVFSQkpJCRUUFnZ2dtLa2sm7dOlxdXfHy8mJkZISamhpefPFF2traCA0NpbW1\nVbgwGxoaWLp0qXjN7e3tRUiwt7c3DzzwAEqlEnNzc6qrq/n5558Fs7KsrIyYmBh27drF1q1bOXz4\nMIsXL2bfvn3ExcVx+fJlbty4IYJgDx48SEhICOvWrcPa2lpUL3K5nPHxcfbu3Yu2tjaPPfYY09PT\nVFRUoKOjQ0xMjOB1btq0iSNHjuDl5YWjoyNNTU18+umnbN++nYmJCfr7+/Hz86O9vZ2QkBDS09PZ\nsGEDarWamzdvivzG5ORkSkpKcHBwAGaYHZpqyM3NjdDQUI4dO0ZycjIBAQG4ubnR0NAghGADAwO0\ntLQIW7UGy7548WKioqJET8HIyAh9fX36+voE/zI6OpqBgQFOnz6NXC6nsrJSbFsHBgYwMDBgxYoV\ngifp4uJCd3c39+7dY/ny5Rw8ePC+FoX/tlJQq9XtarW68Pf7SqAScABWAT/+/mk/Aqt/v78K+FWt\nVqvUanUjUAeE/1fPoaenh6Ojo+jSSqVSLl26xJkzZ/jpp59Yv349enp6fPLJJyQmJlJaWoqbm5sQ\nuMyfP5/jx48jk8mIjo4WScNKpRI9PT20tbVRqVTEx8fzxBNPcOHCBVxcXPjss89YtGgRAwMDHD9+\nnMTERJRKJcHBwfT19bF8+XK8vLyIjIwUmYwwc3WwtrZmw4YN7N27l6NHj1JaWsqtW7dETuXk5CQR\nERHcu3dP6BgKCgqoqakRBpfw8HCRXAUzJGMnJydUKpXIdDhy5Ai9vb0cPnyYvLw8ysrKaG9vZ+nS\npULzYGZmRlpaGsuWLaOkpITCwkKWLVsmAlw/+OADqqurMTc3Z9euXVy9ehWZTMZvv/3G5cuX0dLS\nwsPDA1tbW7q6ukhKShLefg24Q9OIW7RoEWFhYaxatYqWlhZBu/rmm2+ws7Nj0aJFZGdnY2lpSUxM\nDGlpaRQUFGBhYcHy5csxMjKitraWc+fOsXz5cnR1dVm5cqUIK7l9+zbDw8MYGxujUqmEKrC0tJQv\nvviC0tJSent7MTMzw8DAgLlz5yKVSlm7di1ffPEFZWVlJCQkAODu7i7uFxYWMj4+TmhoqPAKvPba\na5ibmyORSPDx8eH27dvo6uqSn59PSkoKKpWKjIwMamtrKSwsxNjYGLlczuDgoBgXa+TRiYmJvP/+\n+0RGRlJVVSWyPKytrenu7kahUJCVlUV1dTVVVVVkZGTg4ODAnj17SE1N5ebNm3h7e4uEr3nz5mFo\naCiw/7a2tqKPNTg4SHl5OW5ubujp6VFaWoqfnx+Tk5MCyTc0NCS2H6mpqSKa8H6O/6eegkQicQFC\ngFvALLVa3f77hzqAWb/fdwD+1bwt//2x//t7PSGRSPIlEkm+Wq3mww8/RCaTYWtri7a2tthXP/30\n03z99dfiSmtqasrGjRuxtbUlPT2drq4uioqKqK2tJScnh56eHmEwmpiYYPfu3fT29qJQKHB1dSUo\nKIj4+HgCAgIoLCzE29ubkJAQUlJS+Pvf/05NTQ1yuRwbGxsqKiqQy+WC/qTZn5eVlaGnpyfcckuW\nLMHOzo4VK1bg5OREWloaY2NjgpLT3t7Oxx9/jI2NDTdv3mTJkiWCBRgfH8+FCxeAmRL38OHDqFQq\npqenKSgoQE9Pj1OnTqGjo0NHRwe5ubnMnTuXvr4+wTKorKwkIyOD8fFx9PX16erqEsndubm5ODk5\nERMTI8xKenp6WFlZCWu2hk6VkZHB3LlzqampobOzk9jYWNra2oiJiWHRokXY2tpy8+ZNrl+/ztmz\nZ5mensbS0pKamhqefPJJVCoVubm5Qst/5coVRkZGRHp0ZmYm5eXlmJubExkZKUp6Y2Nj3NzcgJmu\ne2RkJNPT07S2ttLe3k5qaiqXL19mfHwcMzMzjhw5QklJCS0tLRgbG4tAn6SkJBYtWsTu3buxtbXl\n4MGDXL58eeZNKJcTGxsLgKenp8jP1Pw+UqlUJGLb2dnR3NwsqgZN6a3pPU1NTZGZmQnMgHzefvtt\nsrKyRD9H0+zTYO1mz57N7NmzGR8f5/jx49TX13PlyhV6e3spKSkRatDq6mrxOuzfv5+3336b77//\nXiST5+TkCOSbhkhdVVXFwoULUSgU9PX10dPTQ1ZWlrg4SiQS9u/fj5eX1/2f6Gq1+r5ugAlQAKT8\n/v/+/+vjit///RLY/C+P7wfW/DffW/2/t/+9/e/t//db/v2c6/dFc5ZIJLrAb8AvarX6+O8Pd0ok\nEju1Wt0ukUjsgK7fH78HzP6XL3f8/bH/9HBxceGZZ56hsLAQV1dXHB0dMTU1FWPJNWvWiJK7pKQE\nmUyGXC4Xza25c+diZ2fH8ePHcXR0RK1W88orr/DWW29RVVWFn58fUqlUiFHWrVtHUlISP/zwA999\n9x2xsbFYW1vT29uLhYWFYCSYmJiIrIe4uDj6+/tZs2YNx44dQyaT0dbWxvnz51m7di1///vfOXLk\niIhPv3XrFsuXLycyMlLsSzX5D2lpabz88sscO3aMs2fP4u/vz4YNG9i1axeDg4Po6+tja2vLxMQE\n2dnZWFtbi/g4IyMj+vv7hZRZQ5cKDjdOfqgAACAASURBVA4WgSOaUN7q6mrWrFnDkSNHRAWwevVq\nFAoFzc3NNDY2kpycjKenJ6+88gpr164V4NGKigrWrVvH2NgYdXV1VFdXk5SUxMGDB9HX1yc8PJyT\nJ0+SkpKCs7MzpaWlLFq0iJGREbq6umhubiYvLw+pVIquri4WFhY8/fTTtLS0sGPHDlatWkVZWRlV\nVVWsW7eOqakp3n33XR555BFRNl+7dk0oQk+ePCn+zjdv3kSlUoktUEJCAsnJyezbtw9fX1+2bNnC\n3r17hfjryy+/5NixY+Tm5uLn54elpSU5OTlER0fT3d2Nl5cXO3bs4IknnsDR0ZHo6GgOHz5MXV0d\ny5cvRy6XExkZSUtLC729vVy8eJGFCxfyxBNP0NPTQ319PT/99BP29vaUlZVhY2NDW1sbjz/+OBkZ\nGahUKt5//32RaaExgHV2dtLf38/AwADOzs54eXnR2NjI22+/zbfffisAw+Hh4ZSWlgqXr1KpZPbs\n2TQ2NgrJ8/Hjx3n55ZeprKzEwcEBuVyOoaEhBQUFeHp6YmNjQ3Jy8v2c7vc1fZAwc7WvVKvVu/7l\nQ6eBh3+//zBw6l8eXy+RSPQlEokr4AHc/q+eQ5MEbGVlhbOzMwsXLqSsrAwPDw+Cg4OxsbGhuLiY\n9vZ2duzYIQwv9vb2eHp60tvbS2FhoQCfaKYPGuejra0tfX19Yv97+PBhPv/8c44dO4abmxtGRkb0\n9PQIV5m5uTkeHh68+eabPPTQQ5SVlbFt2zbOnj0LQEBAALdu3RJo9CtXruDp6UlXVxeNjY14e3vz\n1FNPiaajJrUpISGB559/np9++km8WaytrSkrKwNmRrPa2tr4+PjQ3NxMX18fCxcuFHTomJgYIa7S\ndLelUikPPPAA+fn52NnZkZmZiVKppLe3l2XLljEyMoK/vz8ymYyoqCguXbpEU1MTpqamrFq1iuPH\nj3Pr1i3i4+NpbW1lxYoVTE1NiYaoJrZOw4EICwtDX1+f/Px8/Pz8kMvlHDp0CLlczocffigYBQUF\nBaSkpGBubo5CoaC/v58PPviAq1evkpaWRnR0ND4+Puzdu1fAVAAWLlyIra0tvb29eHt7k5mZyZkz\nZ1i8eDF2dnZcuHCBOXPm8OSTT/Laa6/xxhtvEBgYyKeffoqhoSFeXl58+OGHYsyt6Sl0dXUxb948\nysrKuHTpEgYGBpw/fx4DAwOam5t5+OGHGRsbw8PDg5dffhlLS0tsbW05duwY4+Pj7NmzR+hYUlNT\nxc+roYBFRkZSX19PYGAgVVVVYgQtl8tZvHgxBw4cYHh4mH/+858CrKrhTq5YsQJdXV2uXr2KsbEx\nMMPdePXVV3FzcxOSfQ8PD7S0tERimQZ2o6ury3vvvSemOcPDw5SWllJYWEhRUREDAwNi3H0/x/30\nFKKBNCBeIpEU/35LBj4Elkgkklog4ff/o1arK4AjwF0gHXj6v5o8aA6lUom2tjYuLi6cPXsWW1tb\n9PX1hVln+/bt5OXlcfToUf7xj3/w8ccf88knnwidfFtbm7Bea7h8S5YsEWM/Y2Nj9PX1mZycxNvb\nm/T0dFxdXRkZGeHy5cscOnSIOXPm0NraSk1NDQqFgj179lBQUEBUVBSGhobY2toC/x/v71/xaAsX\nLhQ23uLiYry9vYUqsbS0FBcXF44ePYquri5PPPEEr7/+OjCzEExMTAAIFqS2tjZKpZLw8HAkEonI\ndrx69apgD2pktt3d3XR0dLB48WIBodUkZwMcPnwYW1tb8UZtbW1lYGAAY2Nj8vLy8Pb2prGxEW1t\nbfz9/WltbSUwMJDIyEj27t2LkZERkZGR9Pf3Y2dnh6urK+7u7nzyySdC27Ft2zYKCgowNTUVKVyB\ngYGcPHkSU1NTpqam6O/vJykpCTs7O0JCQsjJyWHOnDlUVVUREBAgZM6mpqb4+fkJpuTzzz8vTsL2\n9naWLFmClZUVv/32G1988QVFRUW0traSlpbGrl27RHWnSZXKysoCZlSok5OTLFiwgICAAGEYu379\nOkqlkgcffJCoqChKSkpYv349Li4uLFq0iJUrV2Jra0tCQgJOTk6Chq2ZEixYsIBz584xOjpKWloa\n7u7ubNiwgf7+fvbt24enpyc3b95EW1ub0tJSrKys2LdvH+fOnaOnp0ekoQ8ODhIVFSWI4XK5nH/+\n85+CQ6FptnZ2dgpgi5mZGa2trYKvYG5uLhbEqKgoEhMT2bFjhxB/3e/x324f1Gr1DUDyn3x48X/y\nNX8H/n6/P4SJiQmenp4iDFYjdikpKSE8PJy+vj6uX79OcHAwzz//PPPnz2doaIidO3fS0tKCg4MD\ns2bNQqlUCkEHzDDvoqKiGB4exsTEBG9vb2pra1EoFGKbYWdnR0REhODkS6VSjh49ilwuR6VSkZCQ\ngKenJ9988w2bN28G4Pjx44SEhDA9PU15eTnPPfcceXl5+Pn5UV9fT3JyMl999RWPP/44xcXFIoCm\ns7NTkHtXrVrF+Pg4Wlpa4mdubW3Fw8ODgYEBFixYQEdHBz09PUJ15+rqSmxsrIjRm56eZt++fVha\nWuLm5sbExARJSUlCbKNQKNi0aRMFBQU0NzczNTXFM888w3fffSdi1RQKBQ0NDfj6+uLo6CiEVEZG\nRkKYVFFRgYmJiTiBR0ZGOHbsGP/xH//Bm2++yQ8//MCuXbuEEvU//uM/hGBsz549eHh4sHTpUrKy\nsigvL8fV1ZXKykoMDAxISEggPz+fLVu2kJ6ezvXr1wVu7ujRoyQkJHDu3Dlmz54t2BGaq2FfXx8h\nISFUVVXR3d3NxYsXmTdvHtu3b0etVnP79m1yc3OBGfFWQ0MDjz76KDKZjOnpafLy8njyySeF0lSt\nVqOrq0tUVBQwI0wqLi5mxYoVZGRkEBUVxb1799ixYweenp4AYvFpbm7mzJkz6OvrExERQVpaGm1t\nbaxcuZKqqiqys7NRKpWkpKQwNTXFlStX8PX1paWlhe7ubvr7+0UYL8zQnDXTA29vbyIiIrh79y7Z\n2dkClKtZTBobG5mYmODChQs88MADfPrpp7i7u7Nu3Try8/MFzet+jz9EQtT4+Dj19fViZOTl5YVU\nKsXV1VV4HDIzM1Gr1SxfvpwHH3wQFxcXvLy8MDIyEqtpS0sLfn5+VFVVATPjokuXLrF582by8/NJ\nTk5mdHQUT09PdHR0sLW1xcPDg7q6OnR0dAgKCsLPz4/Tp0+Lefzo6CjvvPMOf/7zn1m4cCG//PIL\n9vb2wqS0dOlSvvjiC1JTUxkZGaGyshJ3d3fMzMwEN2HOnDls2rRJBHsEBQUxMDBAR0cHISEhorTT\nVA1mZmaCz9jY2MjGjRupq6vD2toapVJJU1MT4+PjSCQSXn/9dbq6ulCr1cyZMwcDAwMMDAywt7cn\nPj4ebW1tjI2NhQnH0NCQmJgYYRZydHRkxYoV+Pj40NjYSEVFBQ0NDdja2hITEyMESUZGRtTV1REa\nGsqcOXNEiO6GDRtYt24dx44d46effuLOnTusXr2aTz75hMHBQYKCgjAyMuL69ets376d7777jri4\nOIKDg2lraxPWdV1dXQCRgGRlZcWvv/5KYWEh4eHhAnF38OBBdu/ejVqtxsPDAx8fH+7cuSMkyFev\nXkVPT08E22qoyyqVivXr13Pr1i0aGxvZsmULcXFxwhFrYGCARCJh1apVQsRUVlZGYGAgGRkZzJkz\nh507d6JQKFi4cCGnTs3sljV8idHRUcLCwnBwcKC+vl5wETRshrS0NH766SdMTU2RSqW4uLgwMjJC\nREQE9fX1RERE8Pnnn4vtjqbi0kyQNB4bf39/UlNTqa6uJigoiDt37lBSUsLIyAgrVqwgOzubF154\ngZqaGrKysoRs/v+lUvhDyJw1clKpVEpNTQ1TU1NER0ejp6cnPA8aB+Fbb73FvHnz8PPz44UXXuCl\nl15CLpdjbGxMVFQUzs7Ogp0/MjLCxo0bGRoaYnh4mNu3b3Pp0iXRc/D29qauro4DBw6Qm5vLkSNH\neOONNwgICMDU1BRHR0e0tbVZsGABpaWl4goikUiE2vLZZ58lODhYKPtKS0v55ptv0NHRwd3dnamp\nKTw9Pbl48SKPPvooGzdupLy8HFNTU2xsbESMmuZ1GBgYoLi4mHXr1mFkZISHhwcXLlxAJpPR3NxM\neXk5r776Kp9//jlz5swReQfXr19HLpeTlZVFRESEYD9+//33uLq6kpSURHd3NwcOHEChUKCvry+Y\nk8PDw3z11VfiddSIsA4cOMBnn33GyMgIiYmJWFpacvfuXbKysli2bBlffPEFf/rTn4Sl+qOPPqKq\nqorx8XEOHz7MsWPHhHFKw7swMDDAysqK+Ph4bG1tOXToECqVCmdnZ2Am+8LCwoKMjAzi4+NFhaKv\nr09iYiIHDhzA1dUVmUxGVlaWAMlqCMpyuRw3Nzc2b95Ma2srixfPFLPT09NcuXJFQF/PnDnD7du3\nqaysZHR0lM7OTtra2ti1axd5eXmcO3cOKysrzpw5w+DgIJcuXaK7u1tAU/7VdaiBuWogqUqlkjNn\nznDo0CHUajWffvopX375JcXFxaSkpPDaa6+xfPlyUa2kp6fz66+/Mjg4KEJmNFJrHR0dli1bxu3b\nt1EqlWzevFkoMS0tLQkLC2NyclKwPSwtLXnuuecoKysT0YJhYWEoFIr7Ph//EIvCwMAATU1NODo6\nipy+7u5uHBwcKC8vp6GhQdiP5XI5/f39ojoABPji448/5sKFC2JRsLW1xcLCgm+//RZvb28OHz5M\nb2+veCNq9uUrV64kJSVFRM9/+OGHDA0N0dDQwKxZszAyMuLOnTsC8V5YWMiuXbuIjo5mx44dTE9P\nMzk5yaZNmxgdHSU9PZ2JiQna2toYGBhAIpEIxoGJiQl+fn5UVlZSW1vLnTt3kEqlAISFhYnEI42g\nyd7enpGREU6cOEFLSwutra1MTk7S3d0NzLx51Gq1kE1rsgAeffRR7t69i6WlpcC6aYxgarWa0NBQ\nnJycWL9+PUVFRRgYGFBTU8OePXsoLy+nvb2dsLAwIiMjkUqlXL9+HWNjY4aGhoiLiyMmJoalS5cy\nNjbGxMQETz75JAMDA/T09LB27VokEgmhoaHs3buX/8PeewZHfZ5t37/Vqq7aqqFV771LgARCQoBo\nohhw6DbGuEPilthObCfx7Thx7Di2sbGJ7Ti2MVX0joSEkJBQ7x2VVS+ruurSStrnA/de89zvh/v2\nO/POPM47zzXDDCOKyu7/uq7zPI/jd2RmZorTWyfjvX37NuPj43h4eODi4iIk1HFxcXR3d+Pr68u1\na9dITEzk3r17ODs709fXx9TUFHv27BGsi8nJSRHf9+DBA2pqavj1r39Nc3OzYGHCw1uYXC5ndHRU\nqFzn5uYELbqtrY0TJ04IqpZCocDExITm5mYh9Q4JCREiO92G6ubmxvT0NF1dXQQHB7Nw4UKMjY1R\nKpU4OjqSmpqKm5sbBQUFNDc38+WXX/LII49QXl4ubPoPHjygqakJc3NzQQ1rbW1lZGSE3t5eYR7b\nuHGj4Eq6ubnh6+srWJ+GhoZiY1i+fDnOzs7IZDIsLS25deuWIFD9lPWzcEl+/fXX7/z617/m2rVr\n6Ovrc+nSJcFAhIegVN2bJiYmhgULFjAxMUFVVRWJiYkYGxvz5ptvEhsby/T0NP39/WRnZ7N06VLs\n7OyEjdnExER0mXU7rJGREQUFBfj4+LBhwwYkEgnz8/OCBFRWViYyJXQnxooVKzh48CBHjhwR406d\noEStVhMcHIyVlRVyuRwDAwMWLlxIWFgYdnZ26OnpUVZWJur4qKgoHB0dOXz4sLBmL1++nH/961+s\nXr1aQFF1nL7R0VEhgZ2amsLV1VUg7tva2mhoaMDR0VGYnpqbm/Hz8+PWrVv09/czPz+Pi4sLt2/f\nZmRkhIaGBkFNCgwMZO3atSKcdmJigsDAQMbHx9HX16e4uFjAWPT09Ojv72fFihU4OTmRnJzMkiVL\nWL16NRYWFoyMjFBYWIiRkREuLi6sWrWKy5cvMz8/T0tLCzKZjOPHj+Pl5SVCWE+fPs3jjz8uPocO\ns+bu7k5JSQk7d+6kqamJkZER2tvbCQsLw9PTk9u3b5OVlUVfXx+xsbGEh4cTGBjIL37xC/Lz8ykr\nKxN8Dl0mh051qvMPKJVK9PT08Pb2xtHRkfn5eRQKBSEhIRgaGlJVVcXy5cvJysoSFPGqqirCwsJY\nsWKFECM5OjoyOzuLtbU1nZ2dKJVKrKysUKlUvPLKK3R1dWFtbU1zczPu7u6YmZkJib/OaXrz5k02\nbdokTF5dXV3CFaork6KiolCpVMIq7enpiVarZXJyUvA6Ll26xN69e/Hx8eHOnTvcvXv334e8dPjw\n4XfUajVhYWECstna2opGoyEvLw8DAwNh/9VoNFy5coWqqio2btxIZ2cnCoUCc3NzUSeqVCpxWubn\n57N//34mJiZQKBS4uLhgb28vXrTnnnsOpVIp4tMyMjIICgqit7eX6elpAgMDGR0dpaenR+DZPv74\nY6qrqwVq6/r164I2bGVlha2tLZs3b8bJyYmuri4MDQ1paGjg9u3bZGZm0tfXx8aNG/nwww8FePbU\nqVMsWbKERYsWcfHiRV5//XVRNwYHB4sSysLCArlcjoODg+gl6PobdnZ2REdHixPS1NRUZErqFJEy\nmYympiaef/554bSrrKzE39+ftWvXMj4+Tk1NDQsWLGB4eFhkSUokEoG1t7KyorKyEnd3d86fP8+F\nCxdYunQpNTU1InkpJCSEhIQEoRjt7e3FysoKX19fioqKkEqlREVFMT09zeuvv84rr7xCc3MzixYt\nwtXVVWQyvvXWW8zOzlJVVcXo6CjR0dF8/vnnKBQKoqOjycjIoK+vj507d/L73/+e0tJS2trayMrK\nor29ncOHD/O3v/0NhUKBl5cXfX19gq05Pj6OoaEh7e3tLF++nE2bNrF8+XLa2trIzc1lfHyckZER\nvLy8CA0NxcTEhMjISHJzc9HX16eyshILCwvMzMxwdHQUt5ktW7YQFRWFi4sLBw4cQCKRiObmwYMH\nBa7f1NQUKysrmpqaSExMFGPM3NxcVqxYQUpKCq6uriLHRNfD0ulBJiYmGB8fp6WlBRsbG3bt2iVQ\ncxKJBCMjI4KCggSZ6ezZs/8+1unZ2VlmZmaorKzEwcEBW1tbQkNDWbx4Ma+88gpSqVQ80DoDz4MH\nD5ifn8fd3R0nJyd27dpFeno6IyMjAtihs6sqlUoKCgqwtLQUL/TFixf56quvmJmZITQ0lLi4OCYm\nJnj99dcZHBzE2dkZPz8//Pz8ePzxx1EoFNja2gIPu9Ktra387W9/4+LFiyKror6+HiMjI3Gqpqam\nsn//fjGO0jEFHB0dKSkp4Ze//CU9PT3CIbho0SLu37/P0NAQRUVFxMXFCf+9iYkJ7e3twp7c0dEh\n4LD29vbU1dUJ842uq+/l5YVcLicwMBClUomHhwcajYaAgADa29uFZFcHn62srESpVPLZZ59x9+5d\nTp48SUtLC3Nzc+Tn5xMbG4uZmRkWFha89NJLJCcn4+7uzo8//sipU6d45ZVXqKqq4saNG1haWuLt\n7U1NTQ2Dg4OcOHGCV199la6uLsLDw8nKyhKMyVOnTgnrtIGBAQYGBiJA1c/Pj6VLl4oOent7u4id\n++c//0lUVBQrVqzg0UcfpaqqiqCgIMbHx4mLiyM2NpbU1FTgIT5fKpUK1kB2djYymYzq6mpMTEw4\nfvw4hYWFQvjl5ubG6OgooaGh3Lx5k8OHD9Pe3k5ycjIrV64UZYluWlNdXY2joyMeHh6cPn2ad955\nh+7ubvLy8mhqamJubo4tW7YwMzNDS0uLuGVNTEzw9NNPo1arsbS0JDIyEnhoo3/77bcFJWvjxo0s\nXLiQo0eP8tFHH3Hx4kXS09NpbW3Fzc2NmZkZzMzMCA8PZ2xsDHNzcx599FHUajWZmZn/fgGzlpaW\nhISE0NraikqlwsPDg+zsbFGbdXR00NfXR0dHh3jwFQqFsIlmZGRQUlIi6EwhISEAoll0/PhxYePV\nxbAZGxuTlJTEjh07iImJITMzk56eHnp7e1m0aBGrV68Wyrkvv/wSqVSKi8tDoWZLSwtmZmYsXryY\n3/zmN7zzzjssXbqUP/7xj3h6eiKTyUSKVV9fHzKZDIlEQk5ODs7OzhgYGDAwMMCyZctYtGiRsE7r\n8jGdnZ3x8PBgcHCQFStWoKenR3NzM6amphw7doyenh4WLFiAi4uLaNLqYslCQkJwdXXl448/5tq1\nazz++OPk5eVhYWHBsmXLePTRR5mYmBDmGd28fvHixf+FkHz27Fl8fX0JDg4WUxI3NzcSEhKQyWTU\n1NSIEfCZM2fYunUrH3zwAb6+vsjlcqytrcnPz8fMzEzQsXU8RaVSiVqtZnh4WDQmn3jioQ6upqaG\ny5cvU1BQwPPPP097ezv37t1jaGgIuVxOfHw8b7/9NsbGxoSFhTE0NERYWBh1dXVIJBImJyfx9PRk\n+/bttLS0UF5eDiAmLKtWrSI2NhZra2vWrVvHp59+ypIlS3jkkUeYm5vD0tISX19fEWr7+eef8+23\n36JQKOjr62N2dhYrKyvef/994CFZSWczz8jIQCaTMTs7KyjjU1NT/PDDD1hZWTE+Pk5WVha/+93v\nBGc0Pj6ekpISnnrqKbKzs7GxsQEgICCAkZER0tLSxMd0kXPFxcWkp6djaWkpSrHk5GScnZ2Zm5vj\nxRdfpL29Hblcjr29PatWrRI/h5+yfhblw0cfffSOrgnm7+9PQEAAMpkMePgm0TXJnJ2d2blzJ6tW\nrSI8PFwkHsvlcnENnp6epq2tjfv37/PJJ59QWVnJ6Ogoe/fuJTc3F7VaTUFBAXNzcyiVSlHzT09P\nExQUhJ+fH7Ozs5SWlgrz0NDQEG5ubjQ1NYmmV1JSEnl5eSxfvhx9fX22bt3KwMAACxcuFKGgcrmc\nhoYGtFotRkZG/OY3v+H+/fvMzMzg7u6OSqWisLCQ6OhokVI8MTFBd3c3a9euJTY2ls7OTurr63Fz\nc6Ovr4+ioiIRUxYYGIi1tbXoPeggnkqlEmdnZwYGBgBEs9TCwoLq6mqxoapUKkZGRmhubsbb2xut\nVkteXh5xcXG4uLhgY2NDY2Mj4eHhAkyTk5NDS0sLHR0dYnTo6+tLVVUVUqkUIyMjJiYmGBwcFLg8\nnUN1yZIl7Nmzh1OnTvHEE08QGBjItWvXsLa2pry8nJycHA4cOICzszNpaWmidtb1SEZHR3F0dBQq\nx1u3bgm0f3NzMwEBAeKk1CHLdLfKqKgoWltbKSsrY3BwEC8vL6RSKSMjI4yNjVFWVsbk5KQoW3XN\n7meeeYbQ0FCcnJxQKBSsX7+eV199FUdHR65fv05YWBgDAwMinq+mpgZHR0dsbW1xdnbm6tWrgjvx\nwgsv4Obmhp6eHnp6ejg5OWFra4uVlRUnT56kuroaIyMj7t27x6ZNmygqKsLS0pK4uDg0Gg0ymYyB\ngQFcXFxITEwUSEAdQyQuLo4dO3ag0Wi4f/8+paWlfP311/T29rJp0yZOnDjx71M+TE9PEx0dTWxs\nLFqtFrVajaenJxMTE+jr67Ny5UrkcjlSqRQ9PT0KCgrIyckRTRWdVNTIyIjIyEikUimAmOcnJSXx\n7rvvMjU1RW5uLjt27GDz5s2YmJiQnp4uQl2VSiVZWVlcunSJ8+fPU1VVhZmZGc7OzqjVanp7ewFE\nw+nFF1/k0KFD+Pv7U1tby+LFi7l7964gNP34448ixDQ8PJyMjAxeeOEFETRSXV1NXFyciCAzNDQU\nOK/x8XHu3r0rotIKCgpoaGggLS2Nnp4eli1bhpmZmUjo7unpETkNU1NTrFq1SpQJLi4uQjatQ43p\nOJdjY2NYWlpSUVFBVlYW33//PYGBgbz55psiJWt4eBiFQkFHR4e4Lc3OzrJhwwY2bNhAZWUl+vr6\njI+P09bWRmRkJOXl5QwNDQlUW2ZmJjKZjJaWFtzc3IQc2sXFhf379wtCkpWVFUVFRULie/v2bWZm\nZoROo7i4mD//+c8CqKpUKrl8+TJlZWUolUouXrxIVVUVJSUlbNq0SUyMOjs7BaREV0bpGI+6jARd\n0663t5dbt25RXV0tOJSFhYVYWVlx+vRpHn30UVE+NDQ00Nvby/z8PJGRkWKSMzIywrlz55icnCQ+\nPl4kYl+6dImrV6+yceNGUlNTee+999Bqtdy7d4+lS5eKDBA3Nzfs7e3p7++np6cHBwcHysrKcHBw\nEI1hJycnSkpKqKmpISoqitHRURoaGrCwsMDR0REHBweeffZZQkNDxdj7p6yfxaYgkUiwtLSksLCQ\nzZs3i+ahgYEB8fHxlJaWihNWZ1kODQ0VJGfdNWtqagqlUim8+X19fcKSumLFCmG/tba2Znx8nK6u\nLry8vLCzs6OlpQWFQoGhoSFqtRqtVotKpSIrKwt7e3uUSiX9/f3Aw81m3759ZGVl8cILL5CdnU1I\nSAjnz5/n/fff5/vvv8fCwoLo6Gji4+MxNTXlhx9+YM2aNYyPj2NpaUlfXx/Nzc3U1NQI1Z23tzeG\nhoZMTEzg4OBAXV0dAwMDSCQSent7cXV1ZeXKlURFRZGfny/GlgqFAplMRnR0NOnp6bS0tNDW1kZd\nXR2urq588skneHl50dPTI+CtupBdb29venp6WLJkiRD7GBsb09zcLMbCdnZ2XLlyBWNjY/T19bG2\ntkZPT4+rV69y//59AgICRKjunj17+Oqrr8TPNiYmBhMTEzo7O3nw4AEPHjzg2WefFbi2np4eJBIJ\nERERAFRVVWFpaYmlpSXW1taYm5vT1NTEjRs3aGlpYXJykvfff58XX3yRJ598UqRsrV69msTERMLD\nw0lKSmJ+fp6BgQFxQPT09DAwMICvry+9vb0EBARQXl7OgQMHCAwMZN26dUilUn744QcBRbW2tubi\nxYsUFBRw4MAB5ufnGRkZYWZmlNH7jwAAIABJREFURoy9H330UVatWsW2bduYn59naGiIkpISioqK\nWLJkCVZWViiVStrb20VKWVJSkkixio2N5dy5cwQEBODl5SUmbt3d3QIYq4sqcHNzQ6vVoqenR0hI\nCPr6+kJwplPm6pSQ/v7+AupjYGDA6dOnf/Lz+LMoH44ePfrOokWL2LRpE3l5eTQ2NtLR0UFubi6B\ngYFIpVJiYmLo6+sjICBAQCeam5uZnZ1lbGyMzz//XOi/bWxsuHnzJqtWrSIoKIjJyUmam5vZtm2b\nOD11nWOFQiEEMNXV1Wg0GuG9Dw0NxdHRkd7eXuHYu337NoGBgRQXFwuC0NDQkAgj3b17Ny+//DLm\n5ub88MMPtLa2Cg5EcHAwo6OjosG5fft2/P39sbe35+LFi/zqV79CrVZz7tw5VCoVTU1NvPTSS6Sk\npAj594YNG9BoNAQGBpKRkUFubi4zMzMsWLCA/v5+uru7xQgxPDxccChcXV2pr69HKpUyNDREbW0t\ntra2dHZ2EhgYSHV1NVNTU0RERIjRl7u7u2D86enpMTc3J/QdOlajQqFgbm6OjIwM9u3bx9WrVwkO\nDiYhIQFbW1v++c9/AhAWFkZubi5FRUVoNBri4+O5e/cuf/3rX0VmwokTJ/jHP/5BTU2NCG6xsLBg\nwYIFArlmbGzMzMyMiOt7/fXXaWlpQSKRsG3bNhFTf+rUKbZt24ZEIuH27dvs378fPT09RkZG0Gq1\nTExM4O7uTnh4OHZ2dmRmZtLZ2UlRUREffvghVlZWLF26lC1bttDd3Y2FhQU5OTmYmpoKwndKSgrG\nxsZMTU3R3d1NYmIio6OjdHR0iJJDT0+PuLg4QkNDmZ2dJSYmBlNTUyYmJujs7GRiYoKOjg6x4ezb\nt48jR44IsFBDQwM2Nja0trYSHx9PWloakZGRuLm5YW5uTlFREaampoJ87ezsjLOzM6+//jrx8fEo\nlUrx3snMzPz3KR90DavGxkZMTU2xt7fHyMiI/fv38+DBA2ZmZigtLRV2Xx8fH6amprh8+TLXr1/n\n0qVLtLe34+zsLIi98PAqWlpaSk5ODoaGhuJksre3p7q6mkWLFqGvr8+3335La2sr7u7uREdHk5iY\nKAQprq6u9Pb2EhMTIxqYnp6erF+/nj179lBYWCgSiZ544gl27tzJ7du3uXbtmhgN6TwadXV1eHh4\nCPPPvXv3GBwcFFfRL774QkS96YjQFRUVSKVSrl69ip2dHYWFhTg4OGBiYiIi7vT09EQsnJGREYaG\nhgQGBoqMQQMDAzo7O7G1tRXOOp2EeuPGjUilUhYsWMCiRYswNTUV6LfW1lYMDAxQKBQMDQ2hVqsZ\nHR1l6dKluLi4MDg4iI2NDUVFRWzevFmMHbdt2yai6CUSCRUVFdTX13P16lXu3LmDp6cnS5YsITIy\nks8//1ykIsFDA5fuenz9+nVKSkro6urC19cXe3t7Fi1aRHJyMvr6+sTHx3P+/Hni4uIwMjLi/fff\nRy6Xs3fvXjGJ0XXd1Wo1tbW19PX1MTc3R0tLi/h+nJyckMlkeHh4CM/Jhg0biI6OFtj+s2fPkpSU\nRHBwMEuXLhUUroSEBHbu3EloaChffvkl4eHhrFmzho0bN1JYWMiCBQtobGwkOTmZ0tJSTp48ydTU\nFJ2dnQwNDZGZmUlAQIDAqX3//ffAw4DZ//0QkMvl6Ovri4j71tZWUlJSKC4uxtbWVqSUOzk5oVar\niYiIEErNsrIy1qxZ85Ofx5/FpmBmZiZiz5ycnAgNDUUulzM8PIylpSWNjY14enoKAdLIyAiXLl0S\nXd6ZmRkCAwOJjIwUs3h46IwbGBhg0aJFqNVqMf/t6Ojg2WefJSIiAg8PDxwcHGhqahLXy8jISHx8\nfFi5ciX//Oc/cXBwwMfHhyNHjgAPc/502ns9PT0uXrxIaWkpGo2Guro61q5di4+PDz09PSIrUYc8\nu3HjBkuWLKGoqAg7OzsRmAIIbYOHhwcSiYRz587R0NDAwMAAYWFh+Pn5ERISglKpFFZpnXOvp6cH\nrVYrAm2Li4tFH8bIyEhwDSYmJrCysiIkJARvb28xIq2oqKCuro6IiAh8fHzw8vLC3t6e+/fvY25u\nTmBgIIsWLRL9AZ245oMPPmDZsmWEh4dTWlpKcHAweXl5fPPNNyxevJjExEReeOEFCgoKGB4eZnBw\nUGzUCoWC3t5eli1bJkooY2NjcnJyhPZfp1rt7e3F3t4etVqNr6+v8HmsWbOG2dlZli5dytKlSxke\nHmbdunWEhoaiUChISUkBID8/H2dnZzZv3iwwcroDaHp6WqhT9+zZg5eXF+3t7dTX11NeXo6hoSEe\nHh6sWbMGqVTK5OQkMTExwMNRZHJyMrm5uSxcuBClUinKQFNTUy5fvoyvr68QoQUGBmJsbCxChP7w\nhz/g5eXF7t27+cMf/iDeu7peysmTJwVW0MjISNwQDAwMRBmhS/Oam5tDo9Hw1ltvkZOTw+XLl1Eo\nFAIi/FPXz8IQpVar8fPzE1DKd955h8LCQqampli9ejWenp7Y2dlx8uRJLCws8PX1FdmAOmPQE088\nwaVLl4iJiRHoqbt377J7925UKhX29vbk5ORw+PBhMTH45ptv0Gq1bN26FUtLSyHU6e3tRSqV0tfX\nx/79+2lvb2d+fl7oFIyNjenr68PFxYWGhgaRc/kf//EfPPvss3h6erJw4UJsbW0JCgqip6eHzMxM\nMjIyKCsrw9zcnEOHDuHl5cX169dZv349gFBPFhcXC6GMXC4X2omKigrkcjnR0dHo6emJsFGd/PfY\nsWMkJiaK8FFdeTMwMIC+vr7IZnRzc2P9+vXcvn2b9vZ2gQmztrbGx8eH1NRUDA0NcXd3Z/fu3ZSW\nljI1NUVhYaHYOBQKBeXl5TzyyCN0d3dz48YNRkZGBIS0sbERc3NzvLy8uHLlivAr7Ny5k6ioKK5e\nvQogzD06qbefn58gI+vCXDMyMgQ928DAgMbGRk6fPo2JiQnLli0jJSWFy5cv8/zzz3Pu3Dmefvpp\n8vPz8ff3Z//+/bzxxhts2LBBoOGdnZ0ZHR0lOTlZ0Lh1zdgHDx5w8+ZNxsbGaGtrY82aNbi7u+Pm\n5sa5c+c4cuQIiYmJ4maj6295e3vz5JNPcuDAAcbGxsjIyKCoqIi1a9fS3t7O+++/z/z8PEeOHKGr\nq4vh4WERp2dnZ8ft27fJzs5mw4YNwMMDTQcd0gUPm5qaMjQ0hL29PePj40xNTfHNN99QVFRES0sL\nd+/eFQlhXl5eBAUF0dfXR1pamghH/inrZ9NT0CUh6bj/wcHBmJmZMTQ0RHR0NFNTU8zNzYnEJWdn\nZ+Fi3LZtGxUVFTz++ONYW1vz6aefUltby9tvv013d7fwHSxfvpygoCBRUzY3N1NSUiKCWAMCAjh6\n9CgWFhYiBNTNzY2wsDCMjIwYGxvj7t27rFixgtbWVkZHR8V12NTUlNjYWKRSKXK5nLGxMY4fPw5A\ncnIylpaWYkRoYmJCUVER/v7+4oE9e/YsmzdvxszMjOHhYVQqlWiW9fX1kZubS0REBNPT09jb25Oe\nnk5zc7M4Ofr7+5mZmcHOzk5wD3Q5j1lZWeLG0traSm1tLdevX6egoECMMYeGhjAxMUEmk6FUKlmz\nZg39/f3k5eVhbm4unKU6+XNlZSVvvfWW4BysXbsWKysrnnnmGQYGBtizZw/Gxsa0tbXR1dWFhYUF\nfn5+aDQaVCoVb775Ji+88AJ1dXWUlpbi4uJCTk4OMTExtLa2curUKeE+9PT0FJqT4eFhcQhotVpq\na2tJTU3FxsYGW1tb3nzzTSYmJvDy8uLevXvCqHTw4EHs7OzIz8/nwYMH2Nvbc+jQIaampjhx4gQe\nHh6kpaWRlpbG7OysOKn19fVJSUkRVmwXFxfCw8NpbW0lNTWVuLg4enp6RG7o9PQ0J0+exNXVlRUr\nVuDu7k55eTk9PT2cPXtWpIaPjo4yMTHBmTNncHFxobS0lL179wLw/fff88QTT7B+/XpSUlJYuHCh\nyFl1d3fnxo0bREREEBwcjEwmY25uDrVaTVNTE/b29nh5ebFt2za6urqEbH9ubo6UlJSf1FP4WdwU\npqencXd35+7du6hUKiwsLDhz5gxqtZqtW7eiVCpRKpUMDQ1hZWXFV199RUBAABqNhqVLl1JVVUV0\ndDRffPEFjo6OBAcHc+HCBYqLiwkNDeXOnTuYmZlha2uLhYWFwFfdvn2bpKQkGhsbxVVuw4YNGBkZ\nceXKFZHwU1RUJPBfAENDQxQWFjI3N8err77K2NgYxcXFJCQkiDrWz8+PmJgYAX/RNUl15YW3tzdT\nU1OMj48LsIa+vj4XL17kscceo6KiAltbWxHbNjk5ydWrV4mIiGBmZoaQkBC0Wq1wEC5fvlzM1qen\np1Eqlbi6unLz5k2RzOTp6QkgNkoHBwdiY2MFbEbnJRgYGKCurg4rKyvRSOzs7BSNMZlMxurVqzl6\n9CjDw8M8++yznDx5UugxysrKGB0dpby8nNdeew19fX0qKiqwtrYWGPo7d+5QUFAgNkcdFEaXIv7i\niy/i4ODAhg0biImJISwsjNTUVDZt2iQCZ3XYOC8vL6KjozEyMuLLL7+kt7eX2dlZNm/ezNGjR8X3\n7OTkhJ+fHw0NDfj6+vLuu+8yOTlJZGQknZ2d4msICgpiZmYGuVzOtm3bOHPmDGZmZiiVShQKBXfu\n3BE1uk7haW5uLjadl19+mcnJSe7fv49Wq2VsbAwvLy/i4uKEmMvExASVSkVubi5ubm7Mz89z69Yt\nDA0NgYdj77KyMuRyOS4uLrS3t6PVaqmvrxdhQ52dnYyOjnLmzBmSkpKIjIwUt66nn36a+Ph47Ozs\ncHJyEi7Un7J+Fj0FeChSevDgAVKpFDMzMw4cOICbmxsXLlzg4sWLVFdXo9VqsbS0xNTUVISq6Jpp\nN2/eRE9PTzAD4OGL6+Liwt69ezlw4AC1tbW8/PLL9PT0CL7BG2+8gUajYWRkhBUrVpCZmUl+fj4J\nCQlcvnwZU1NTmpubMTc3F3VvXFwcdnZ2/P73v+fMmTMihv7mzZt8+OGHqFQqMjMzCQoKEgAZXaLU\nc889J24Hus3M3NwcACMjIw4ePEhhYSGLFi2iuLj4v5Q2jz32GB0dHYLMpHPW7dq1i5qaGhobG2lo\naKC2thaFQkFeXh76+vp4eHgQExODRCLBzc0NExMT4XuoqakhOTkZc3NzWlpaOHLkiHgwm5ubcXV1\nFTRhlUqFj48Px44dw8XFBTc3N0ZGRkhNTeXmzZskJiYyODhIbW0tU1NTgk+gS/nWaDQkJSXh4eHB\njRs3+P7770XvQGcGc3Z2pq6ujiNHjvCvf/2L559/XoTtrl69Gnt7e7KyshgeHqa5uVnItb28vNi3\nbx+WlpYkJSUJmrSO4HzmzBnS09PRarW4uLhQUVFBZGQk0dHR7N69m6amJsLDw9mxYwclJSXs2rVL\nmIkiIiIwNDQUCkK5XC4I3Pr6+gQEBLBy5Uru3r2LkZER/v7+BAYG0tTUxP79+9mxY4eQPDs5OZGQ\nkICpqSl9fX28++67mJqaipBZ3ahTJ5Tz9vamu7uboqIiGhsbmZ+fZ9WqVfT09HD16lVaWlo4cOAA\nlZWVzM3NkZSUxMTEBDdv3sTY2JhHHnkEb29vUZ79lCXRder/Ty6JRPJ//ov4v+v/rv//r2KtVrvw\nf/pLP4vywc/Pjx07duDs7ExNTQ3GxsZUVFTg7e3N5OQk5ubm4vT28PAQqLGpqSmcnZ3ZsmULqamp\n6Onp4ejoiLu7O+vXr+fgwYOicyyTycSoMjAwUNwCdAYslUpFfn4+27dvJyMjA2tra/bt20d2djbz\n8/OcPn0aHx8f/v73v/Pb3/4WMzMzjIyMRLPTwsICQ0NDtFqtsLrW1NTw9NNPc+XKFRwdHenu7iYp\nKYnOzk7Ky8uRy+WsX7+e+vp6XnjhBcbGxnjzzTfx8/NjbGyMvr4+fHx8sLKyoq2tjdDQUFJTU5me\nnhZjNN0JbmdnR0xMDCkpKWzfvl0Esuo0H15eXuKGAA9PooULF2JhYYFSqWTBggWkp6ezbt062tra\nKCsrE7mXeXl5JCUlCUny0qVLOXbsGBYWFtTU1ODl5cX4+LiIlXd1daWtrY1Dhw5x8eJFxsfHiYmJ\nIS8vj4CAAMLCwigqKiI/Px9jY2MWL15MbW0tX3zxBb/61a+EPqG6uhofHx+cnJzQ19cnMjKSuro6\nwsLC+Mc//oFKpWL79u0UFRUREBDA/fv3cXZ2xsXFRQQFnT17lnfffZeTJ09y7949bGxs8Pf3Jzk5\nmQULFmBhYcHy5ctxdHTk73//O3FxccKeHR8fz/DwMBqNhsnJSZRKJWFhYcTExHD06FG+++47vv32\nW8bHx9FoNCxcuBCtVsuZM2dYuXIlDQ0NODk5ibJ02bJlgsuhUChIT08XTEobGxtee+01Dh06xJ/+\n9Cf++te/Mjs7i7OzM3K5nNLSUgwMDIiMjMTV1VWkkvX39zM3N0dTUxPwEFYcERFBf38/X3/9Nbt2\n7WJoaIh79+5RUPDf8pPF+lmUDyMjIyLpRqfJX7lyJZs3b2br1q2EhoZy4MABiouLycnJYX5+HldX\nV9HBHR4eJiwsjImJCebm5kTtv3XrVioqKqiurmbPnj0YGBhQVVXF6dOnBRw0MTERuVzO/fv3Wbhw\nITKZDF9fX2JjY8WMvK6ujoSEBMFS1PERdd3/zMxM7t+/LzrkOr1+QkIC2dnZzM3NIZFIRMKyVqsV\nHvutW7dy9+5d4OHYTBf4umDBAp599lk0Go3wIVy+fJkHDx6IkejU1JQAo+omN2vXruXGjRs4OTlR\nVlbGxMQEy5YtY25uDg8PDyYnJ8nPzycqKorZ2Vnu3buHlZUVNTU1rFixgvz8fGHM0o2Bx8fHhcRW\nlyOp0WiYnZ0lLCyM2dlZVq9eLUJ2ZDIZIyMjXLlyRUBjKioq8PDwoKysjJs3byKRSIT5a3p6WigP\nfXx82LNnD2vWrMHe3p7Z2VkUCoVIy9IpBnUsRp1FWDeyjYuLQyKRIJFIuH79ukDdTUxM4O3tjZmZ\nGT/++CNPP/00H3zwAfPz85w9e5bz58+zb98+HB0d0dfXJzo6Gi8vL9ra2gSgRBeIe/PmTWHgSklJ\nYW5ujurqaq5fv87p06cxNDREpVLR2dlJa2sr3333HUNDQ+Tn5+Pi4sLSpUsxNDTE1dUVb29v4QLe\ntWuXSDfz8/PDwsJCiL50uZ8SiYTf/va3lJSUUFtby+DgII2NjUJ92tTUJEqq0NBQdu3aRWJiIi+9\n9NJPfh5/FtOHw4cPv7N//37goeussLAQR0dHIYk9f/48NjY2QlyioxcFBgYKqm5ubi5WVlbiIfr2\n22/ZvHkzu3btEhwEJycnQVeKiIgQHP+lS5eyfv16NBoN33zzDR4eHgwNDXHixAkee+wxXF1d6ezs\nJDo6mh9//BE3NzcxE1YoFKxbtw4LCwsR+x0UFIShoaGQUHt4eGBpacnKlStxd3cXD8HU1BQLFy5E\noVCQlZUlEorc3NwYHx8XcfSjo6NoNBr09fVJSEigqKhImIEqKipwcHBAKpUyPz+Pl5cXarUaZ2dn\nEaWn00KMjIzQ0tLC8PAwfn5+FBQUsHr1aszMzDAwMODs2bMsWbKEyspKCgoKkMlkmJmZCWxcfHw8\ng4ODGBoacu3aNZYvX05vby/x8fF0dHQwNzdHX1+fiGYLCgoiPz9fdMEbGho4ePAgJ06cICwsjIaG\nBjo6OgRRurS0lA0bNlBbW4tKpRKNT4lEQldXlziRW1pasLOzEyxMJycnGhoaBHVI17i9c+cOcXFx\nXLhwAZVKRWBgII6OjlRVVfHll1/S1dXFK6+8wooVK9BoNHz88cc0NDQQFhaGt7c3s7OztLS0UFhY\niL+/P0VFRchkMuLi4qipqeHGjRs89dRTdHR04OPjQ1BQEFevXmVmZgYDAwNaW1tFb2f16tXC/l1Z\nWYmBgQFOTk44OztTXFwsgoajoqK4fv06zzzzDIODg0KctmrVKs6fP8/4+DgbNmwQI8ff/e533L9/\nH6lUynPPPUd/f7/gbOj6QWq1mu7ubtLS0v59pg8ymQyFQkFaWhoRERG8/fbbAmX9wQcf0NnZyc2b\nN5mdneXRRx/lV7/6FS0tLaxZs4bq6mo8PT25cOGC6PTqcvOCg4O5f/8+SqUSrVZLX18fjz32GGfO\nnGFmZob6+npiYmJEnuIbb7zB3r17ycnJQa1Wo6enR1VVFd7e3kK7DoimYWhoKF988QVyuZyWlhaG\nhoZE0vWhQ4fIzs4WIiADAwOUSqV4sxcWFmJqasq9e/dEJ9vJyUnEt0skEmE0ksvllJeX4+DgQHZ2\nNkuWLKGqqoq+vj4hqNG5Ea9fv05ERAT5+fm4ubnR3d3N1NQUarWawcFB9uzZQ1tbG1qtlhUrVoh4\ns6NHj+Lr68v58+dZuHAhpqamODs7c+LECV5++WUAzp07R1RUlBg5arVavLy8yM/PFxvu1q1bBSin\npKSE+fl5rKysMDc3JywsjOTkZKampsjOzqa9vZ1NmzaJmx4gxsT+/v6EhITw448/8sQTTwiacUdH\nB/DQ4NTf3y9yINVqtYCgenp68vbbbwvFKsDu3buF3+W9996jpKRETId0m3NoaChBQUFMTU3h7u4u\n0OrvvPMOjY2NxMTEIJVKcXJyIjMzE3gYc/eHP/yBb775ht7eXh577DHCwsL4+uuvGRkZQa1W8+qr\nr+Lm5oZSqWRqaory8nJu375NcXEx27dvF3QmIyMjqqurgYfIP5VKhaWlpdBQODs7Y21tzblz50QZ\n99133yGVSunu7ubDDz8kLS1NxPx1d3djYmJCYWEhwcHBP/l5/FlsCgMDA3zyySesW7cOExMTsrOz\nWbBgAZs3b2Z4eJjp6WksLCyEPuC5554TL56lpSUpKSnipPfy8hJGoevXrzM6OsrLL7/M119/zbPP\nPstXX33F0NCQqON0stfq6mo6Ozs5duwYdnZ2vPbaazg4OFBYWEh9fT2hoaFkZGQAYGtrK7IUHnnk\nEZqbm7G2tmZqakpckb/88kv27t2LUqmkqakJtVotvAWmpqaMjIwQGhrKxMSEONm6u7vF6Zubm4tc\nLker1XLt2jVkMhmhoaF0dHTQ0NDA4sWLsbOzQyqVClBrYWGh8FkEBgaiUqm4deuW+Dq6u7vp7Owk\nKiqKs2fPUl5eTmRkJE1NTSK45t69e8JrYm9vz3vvvUdvby/d3d1ERkbi6ekpZN8ZGRkUFhYSFxdH\ndHQ0586dw8jIiEWLFgkFnp6eHlKplD179jA4OMiSJUvQ19envr6eiYkJgoKCyM3NpbPzYYiYqamp\nuJ1MT0/j7+/PjRs38PLywsbGBqlUSklJCc899xx2dnYcPnyY3t5efHx8mJ6eJiwsjJs3b/Liiy+i\nUqnEmFGpVLJq1Srm5+epqKjAyMiIzs5OYmJiGB8f54MPPiA8PFz0N6ysrBgYGEClUmFgYEBSUhL5\n+fkcOHCAwcFBkRRuZGREZmYms7OzyGQycnJyyM3NFTBZXZy8LgsyNjaW0NBQ0tLScHZ2Fu9dfX19\nrKyshKJR936anp4mLS2N5cuXs337dhQKBc8//zzHjx9ncHAQlUrFwMCAgBDt37+fgoICKioqiI+P\nx8PDA1NTUyEW+ynrZ7Ep6JJ9pFIpZ8+eZc2aNVy+fFnASEdGRvD09MTe3p7U1FTy8/O5du0ahYWF\n2NjYYGdnJ5gB586dY9euXQAi9fnMmTMEBwejUqlYtWoVjo6OHDp0SIyuFi1axOLFi4mNjeWjjz5i\nfHyckpISvv32WxHLNTk5KUqVjIwM1q1bh5+fnwiD1TUrJycnCQgIYGJigmPHjmFubo65uTm1tbWE\nhISQl5fHzZs3UavVfPLJJ+zYsUM443RhLL6+voyPj5Oens4bb7xBY2MjOTk5JCUlERcXh1qtxtra\nGrlcjkqloqysDEdHRzZv3kx5ebkAr4aEhPDiiy8KbHxzczNWVlbcunWLZ599lszMTBYuXMjQ0JAQ\nGEmlUmQyGRqNBhMTk/8ChMnNzQUeJiIXFxcLrJyOTeHm5iZCgltaWoQeoqWlhfT0dHp7e+nr62Pl\nypWsXLkSOzs7zp49KxrGANu2bcPHx4fe3l5+/PFHEhISACgsLMTLy4uJiQmhSG1qasLKyko4BwcH\nB/njH/+Ij48Pubm5QiIP4OHhgZ6enrixXLhwgbKyMj788EOcnZ3R09OjurqaBQsWcOXKFZqbm7Gx\nsaGrq4vQ0FCR5A3w4YcfCldnYGAg2dnZIkGrurpajC6/++47ent7GRsb49atW2RlZTEzMyNKvJiY\nGIaGhti6dSvV1dXCmwEPb886gri3tzfBwcFcvHiR2tpa0XBtampi+fLlGBsbMzs7y5IlSzh79iyh\noaFCffvxxx+zbNkyQYn+Ketn0WjUaDSC5dfS0sKVK1cEQ2B8fFzYpM+ePUtlZSXLly/nm2++EQ/P\n4sWLefDgAcbGxmJ3h4c/2NLSUpYuXcqSJUtIS0sTOY8LFixArVaTl5dHe3s7J06cwNTUlNDQUGQy\nGadPn2b16tXExMTwxBNPIJPJWLx4MfBQp5Cbm8vExAQpKSlERESg1WqFstDR0ZGBgQFMTEyYmZkR\nV8CioiJGR0epq6sTCj8/Pz9OnToFPHzjuru7c/jwYQwNDUlMTGR4eBgPDw8OHTqEoaGhCAixsrJi\ncnJSuO38/f2RyWSEh4dz6NAh1Go1P/zwAx9//DEajYa5uTkOHDjA+Pi4iHFfu3atsIObm5tjbW0t\nJNTFxcWkpKTQ2NiIj48PZmZm1NfXc/fuXRHXrtFoiIuLEwnaOnPX1NSUyKt0cHBg2bJl1NTUMD4+\njlarJT09nfDwcAYHB+nr62Px4sWCLnT58mU0Gg329vZs27aNjIwMcnJykMlkjI2NiYe7oKCAlJQU\nrK2tGRoaYsWKFTQ2NjJbv/2DAAAgAElEQVQ2Noa9vT0mJiaUlJSI9C0daPXBgwccPXqUzZs309XV\nxQsvvMDu3bvJy8vjl7/8JY2NjcTFxaGvr093dzezs7NYWloyOjrK9PQ0n3zyiVDQAqSnp9Pf34+H\nhwfR0dFs2bJFKFzffvtt7O3tqa+vF0a8W7duUVFRwe7duwkMDOSll17i888/Z3JykqKiInFrVKvV\nODg4cO7cOWQyGSUlJeKWceHCBZqbm4mJiRH9FRMTEzw9Pdm/fz8RERHExsayYMECNm7ciJ6e3v+r\nMJifxaZga2tLd3e3iPPWoay2bNnC5OQkarWavr4+zM3N2bFjB/7+/mg0GiG/1Wg0vPbaa/T29vLU\nU0+Jeu/q1av87ne/o76+nmPHjhEVFcXBgwexsLAQ+RAJCQkcO3YMe3t7bty4IcRL09PTrF27lqio\nKJYsWcLWrVv5/PPPgYebWGhoKJWVldjb26PVajEzMxMU3dLSUqEgS0xMRCaTsWXLFubn5xkcHGRu\nbo7GxkaRb/DUU08BkJWVxcTEBM8//7xIQrKysqKxsRGFQsHs7Czp6ekYGRmhVCo5ceIEo6OjotZ1\nd3eno6OD+vp6zM3NBacxOzubjIwMPv30U6KjozE0NKS7u5uKigpMTU3p6OgQAqqqqipmZ2dRqVTC\ne3D16lUaGxuJjo5mzZo1XL9+nfDwcGxtbTl37hxtbW0MDQ2xePFiysrKSElJEdhxHdNApVJhaGhI\nW1sbL774IoODgwJxV1paKq7jTk5OQparo0QVFRUREhJCV1eXYDQmJiayadMmAdJJTU2lr6+PpqYm\nrl27xq1btwSSHWDNmjW0trbS0dHBwMAADx48wNXVFbVaTWRkJLt37wYQuQ5DQ0PippqRkUF8fDy1\ntbUcOnSI27dvs3PnTuBhrP3IyAjl5eVcuHCBlpYW4akxMzOjuLiYwcFBqqur2bt3L97e3sjlchGI\nc+HCBZYtWyZGzrqmcGtrK2vWrCEsLExEHxYXF/Pll1+yY8cOwsLCGB0dxdfXl8cee0zEDfr5+REV\nFUV/fz8tLS309PSQm5srNt2fsn4W5QM8zM5LSEggJSWF3NxcvL29qa2tpb29ncHBQdatW4e3tzfm\n5uZcuHCB+/fvo1ar2b9/P7W1tbS0tDA/P8/ly5dJSEjgs88+IyEhgdzcXMzNzUlISBAY9qioKBYv\nXkxERATHjx/n008/RSKRkJaWxqZNm0hLS8Pc3Fw8iKmpqVhaWrJ7927Ky8u5c+cOs7Oz7Nu3T2Q6\n6CjOugyGuro6zM3N6e7u5tq1axQVFbFo0SLR8DQwMMDW1hY7OzsRMDs6OkpmZqbIFtCh2Lds2SJ8\n8W5ubty5cwcjIyMiIiJob28XJ9m9e/ewt7cXcXsmJiZYW1tjYWGBj4+PoP56enoKO3RaWhrx8fFC\nkhsdHU1QUBAajYaYmBiOHDlCd3c31tbWot7VaSvMzc0xNDRk3bp1FBUVcf78eUZGRgQ7QiqVUlRU\nxMKFC3n55Zf/S1L4W2+9hYODgyhVjIyMgIeJSoaGhqL7PzY2xpo1a3BzcxOIeR0H8tixYyJ929XV\nFVtbW37xi18QFRVFSkoKK1asEDwHXXrY6tWrmZ2dpaGhgZ6eHjQaDRMTEzg6OpKWloaFhQWdnZ08\n+eSTIjvikUcewcvLi/7+fmQyGd7e3iJk5vTp0yQlJYmxrC7oeHp6miVLlmBqaoqbmxt/+ctfKC8v\nJz09HbVazfz8PGZmZmK8XlVVRUxMjCAvxcTEUF5eLrI0AI4cOYJcLic/P59Vq1bh5OQEQFFREQMD\nA1y7do3AwEAsLCzIzc3FzMxMwH51/+9PWT+LkeRHH330zltvvUVeXh7BwcE888wzqNVq8YL/5je/\nwcnJidLSUlQqlRgJmZqa4unpiVQqpby8nPj4eBoaGqisrBR1dlVVFYsXL+bevXsit7GoqIj5+Xk6\nOzvJzc0V/oimpiYcHR355ptvmJ+fR6VSsXDhQubm5nBycsLMzIwzZ87w6quvMjMzI3IhjYyMsLOz\nIycnh9raWuRyOefOncPU1BRbW1sSExOxsbGhoaGB9PR0IiMjmZubIzAwkPz8fORyORkZGTz11FME\nBgaKiPmCggJMTEyYm5sjLCyMkZERjI2N8ff3p6qqCiMjI9Hl12g0oiPf29sr+hhDQ0OCyPy/eyIK\nCwtpa2vjwYMH+Pn5cePGDebn52lvb8fJyQknJydmZmZE00wnfc7KysLMzEzkSOjGhTpH5N27d5md\nnWXlypWUlJQgkUhwd3cnJyeHrKwsHn30UZqamtBoNFRXV4sAlby8PEpLS/nzn/9Mb28vjY2NBAUF\nMT8/L/iI0dHRwMNmZHJyMsPDw0LMo2Md6unpUV5ejr6+vshh1NmXdfqSM2fOUFxczNTUFG5ubiQl\nJXHlyhUkEgmRkZEYGhoyPz8vTFY2NjY4ODiIje8Xv/gFn332meg76caPSqWS7du34+Pjw+OPPy78\nMBqNhtHRUWpra4mKiuLSpUts3LiRnJwcNmzYIERVdXV11NTUUFpaymuvvSZ4nGFhYSLAVyf3lkql\nNDU1Cdv+yMgIUqlUjMbd3d2xs7MjPDxc9KCuXr36/w1kRSKRuEgkkgyJRFIjkUiqJRLJS//58Xck\nEknn/yOJWvdvfieRSBolEkm9RCL5H6NpdBTe8vJy+vv7uXTpEiqVSmQmlJWVkZqaioODAy4uLhw/\nflwEv+q0CWNjY5SWlgqhDjy0n05NTTE4OCjwYBYWFuzZs4fs7GxKS0t5/PHHuXPnDgMDA8hkMt57\n7z3c3d3ZuXMnTk5OHD9+nAcPHlBXVycSpUZGRrC3txcbQnZ2Nrdu3UKr1eLn54eBgQEzMzN4eHiQ\nkpJCW1sbgYGBQsOgC4zJzc1ldHRUxNgpFAoGBwfR19cXfoOGhgbm5+dJTU2lubmZlpYWsrKymJub\no729XSQT6RSKnp6erF69Gm9vbzw8PAgICMDPz0/g4HU3iI0bNzI2Noa3tzc5OTkEBweL+r+trY3O\nzk7R7NVNalQqFQcOHGDx4sWEh4ezevVqwsPDKSsrw8vLi5mZGSYmJti2bRvj4+MiawNAT08PLy8v\nZmdnaW1tFcElMpkMlUrF+Pg48HACowO+NDQ0sHz5ctRqtcj+cHZ2pqysjK6uLhEpaGtrS2FhIXK5\nXCg9Q0NDKSsrE4xGW1tbNm3aRE9PD7GxscTExPD8889jZ2dHdXW1ULBKJBL+9Kc/UVtby+nTp/n0\n009xcXFBT0+PtLQ0ke7l6uoKwKeffkpGRgZmZmYEBwdTVlaGm5sbZmZm7Ny5E7VaTXJyMvn5+WRk\nZNDQ0CDcn3Z2dqjVap555hn09fUF6QoeIt4fe+wx8fo2NDRgZmbG73//e6E3GR0dJT8/X8BzV65c\nycDAAKOjo1RWVuLr60tlZSXz8/OigflT1k/pKcwCv9ZqtYFADHBIIpEE/ueffaLVasP/89eN/3zA\nA4FdQBCwDvhSIpFI/7tPMD8/L24DycnJ7Ny5k/n5efz9/ZmcnMTU1JTo6Ghxyre3t3P16lUhidZd\n83WQEV1Ip4uLizBASaVSPvroI4yNjbl37x6fffYZr776qnhgli9fjo+PDwcPHhQ2256eHqFsO378\nuAC36rr0pqam5OTk0NraSkxMjCghrl69SmxsrDAjOTg4YGxsTFRUlLgWHj58mFWrVhEVFSWuzgAO\nDg7k5eVRW1tLQEAA4+PjZGdnc+HCBaqqqvDz82Pv3r3Y2NjQ3NxMQUEBzs7OPPnkk0Kt2NTUREFB\nAfr6+nR2dqLRaPD19SUwMJDm5mZGRkYYHh4Wm4fOMKaLsNdJnIOCgpDL5YSFheHr64uzszOVlZUM\nDw9jY2PD9evXKS0tJTw8nOvXr9PR0cHBgwextbXFxcWFffv2ERISQn5+Pu3t7cIZuGrVKp588kk+\n/fRTGhoa/osKVSfbLS8vR6PRkJ2dTUlJCYaGhvT29qJWq4U1emZmhr/85S/s2LEDT09PnJyccHR0\nRKFQ0NXVJURUAKtXrxYipPr6ehITE8nKyqKzs5PPP/+c9evX8/bbbzM+Ps5f/vIXkT/x2muvCUDN\nmjVriIyMxMbGRmD5d+3aRW9vL46Ojpw6dYqRkREhd9ZtLLrx9Z49e8TtRaPRsGnTJjIzM/nuu+9E\naLGuV/HVV1+RnJyMr68vr7zyCvHx8RgbG3Pjxg2+++47UlNT6e3t5R//+Afe3t4oFAoGBgbEBuXl\n5YWjoyP19fWCz/lT1/+4KWi12m6tVlvyn78fBWoBp//mnzwCnNZqtdNarVYJNAKL/7vPoYsnm5mZ\nISoqCq1Wy29/+1tcXFxEzHhzczPZ2dlYWVmJGDWdl+HHH38kLCyMnJwcjI2Nxczb3t6e4eFh7ty5\nQ21tLTt37uTbb78lJCSEu3fvkpqaSlVVFWq1mu+++w4HBwdmZ2cZHx//X+y9d3SVdd63e+30nZ3e\nyE7vHdJIIQmEJAgkSCeAgMoIPiM6o+KL+lhmcNRxRpnRsRdUOlJDJ0ASWkgCqYT03vtOslN3+n7/\nyOzfeWats56Xs9ZZZznrPPdfLgSBmH3f9+/7/Xyui+PHjyOXyzl9+rQw/WjOdm1tbeL1dP78+fz1\nr38VbgeFQkFWVhbT09NiTdnc3MyjR4+oqanBzMwMExMT5s2bR3l5OZaWlpiYmACzold9fX3s7OxE\noGnZsmWkpqZy+fJlsrOzUSqVDA8Pk5CQgJubGzY2NmRlZVFUVMT3339PREQEISEhIpU5MTEhLFEa\nkxZAd3c33d3dfPHFF0KIqoGrapKLVlZWTE5O8vnnnwOwf/9+TE1NSUtLo66uDrlczuDgoNChazBn\nNjY2IuLb09ODnZ2dOC5peiH9/f3k5uYKeY9mZaaZezg4OAj8nSZKnpGRgVKpxM3NjaeffpolS5YQ\nEhIi3JVNTU2CBG5mZkZzczNnz54FoKSkBHNzc1paWjAyMqK0tFQAcuRyOdeuXePFF19EX18ftVrN\nmjVrePXVV8Wqu6ysjOzsbFxcXBgYGCAuLg6Ajz76CC0tLUZHR9m3bx8zMzMYGBjw8OFDLC0tcXJy\norOzE2dnZ8zMzNi2bRu/+c1vsLGx4dixY5iZmdHb28unn37KggULuHfvHgDbt29HV1eXe/fuifnR\npk2b0NXVJSoqiqqqKmpqaigsLEQul5OVlcWePXtwdHRELpdjY2PDpUuXsLOzE1u8x73+Hw0aJRKJ\nCxAMPACigd9LJJJngHxm3yb6mb1h3P8vv6yV//4mgq6uLrt27RIcgqGhIb755htCQ0PZtm0b586d\nQyaTMT4+zq1bt/jP//xP+vv7OX36NJcuXcLMzIympiaBeNd00nV1dcnJyeGFF16goqKCAwcO8Je/\n/IWrV6+iVqvF5HrJkiVi3tDe3k5WVpaYXbz00kuUlJQQFRWFt7c3f/vb35BIJJiYmDAyMkJfXx+v\nv/46a9euFU9PjRNQ8wS/f3/2y6H5EIWFhdHQ0IBEIhGvywC1tbVMTk5iZ2cnehadnZ188MEHfP/9\n98TFxYkP0v3791mzZg3Dw8NcuXKFnTt38tvf/hZHR0cBFLWwsKC3t5eWlhZ8fHyYnp6mq6uLtrY2\nFi5cSHd3N4sXL6a9vR1tbW3Wrl0rQlNWVlbCNOTt7U1+fj5Lly5lenoaIyMjIiMjOXPmjPjA29ra\nMjQ0xPHjx/Hx8cHc3Jzq6mpyc3Oxt7cXFipdXV16eno4duwY5ubmLF++HEtLS2xtbfniiy+EcyI1\nNRVfX19KSkrw9PQkLy+P+fPnU1JSwty5cxkYGODnn38WUWdNwEoqlSKXy0lPT8fNzY2VK1fy448/\n0tDQgFQqRSaTMTAwwI4dO3jrrbeQSqUsWrRIkMGbm5vFjUGDa5szZw4PHz7kmWeeoaamBkNDQ/H/\nNDk5WQTuiouLWbhwIfv27aO0tFTY0318fNDW1iYkJITq6mqRgLW1taWoqIjVq1fz5JNPcuXKFYGf\ne++991i3bh3r1q0TZT6ZTEZwcDCrVq0Ss4qCggI2btxIcXEx27ZtQ1dXV5i5CgsLkclkot79uNdj\nryQlEokRcBZ4Va1WDwLfAm5AENAB/P2xf9fZ/95/SCSSfIlEkj84OMjt27e5cOEC9fX1ZGdnY2Nj\nI2YA27dvZ82aNVRUVHDq1Cnq6uoEE08qlfL000+zbt06Nm7cSEZGBvv27QNmQ1F37tyhuLiYwcFB\nZmZm2Lp1K3K5HB8fH3R0dHjrrbeQy+UYGRmJHbyPjw8BAQF4e3szMjKCl5cXlpaWlJeXA7NPLisr\nK7q6ujAyMkJPT4/c3Fz6+vr4xz/+wfnz57G2tqalpYXbt28zPDyMp6cng4ODQm67e/ducbzQ4LY0\nRaXQ0FBKS0uZnJzE2dmZnJwcEhMTRXlmcnKSJ598kgULFgi1noODA05OTnzzzTcEBgaipaXFyMgI\nQ0NDrF27FkdHR/7zP/+TgoICGhsb+eMf/8ilS5cwNzfn5s2bzJ07VwwYNVCWa9euCbt3Q0MDGRkZ\nlJeXU1JSQktLCzExMQJN5+TkREhICJ9//jkuLi7ildvBwUHkKdzc3Ni9e7cInU1PT3Pjxg1GRkZI\nSUkBoKqqiiNHjrB06VL8/Pz44YcfiImJITk5GSMjI1QqFe7u7qSkpFBbW4tCoeD06dM0NjYyODiI\nXC7nL3/5i1hVX758GZidA61Zs4bLly+zY8cOobarr6/n0qVLPP/88/z973+nra0NAwMDDh8+zI0b\nNygrK+Ovf/0rVlZWpKen09bWxrVr14Tbo7Kykvb2duLi4li2bBn5+fk0NTWho6ODkZGRwAZOTk7y\n7bffkpOTwz/+8Y9/idL/7W9/o7+/H2NjY6E8XLx4MdbW1piYmAiZbmVlJWNjYyiVSkZGRti+fbso\nyjk6OrJhwwYiIyPp7e0lNzeXGzduYGtry/PPPy+KVo9zPdZNQSKR6DJ7QzimVqtTANRqdZdarZ5W\nq9UzwH7+ryNCG+D4X365wz9/7F8utVr9g1qtnq9Wq+dbW1sjk8mIjIykr6+P559/nsWLF1NRUUF+\nfj5XrlxhZmaGgIAAfv/733P37l3s7e2pr69n6dKl3Lx5k5KSEiwsLEhOThagirNnzxIbG0tlZaV4\ngri4uHDx4kXeffddoqOjyc/PFxCKxYsXs337dubOnUtAQAD29va4u7uLTLpGvSWRSERQxd3dnfLy\ncurr67G3txf4tLt379Lc3CzIvREREUIfPjk5SW5uLiqViszMTOEjXLBggRCH6unpiaacph337rvv\nkpWVRWVlpYgPT05O4urqSlVVFbm5uWzcuJFjx44xPDyMjY0NcXFx5OTk8OOPP+Lk5ERCQgLt7e0k\nJSUhk8morq4WWxQrKytxI7lw4QISiUSs12JjY/nyyy+F4lwThlmxYgWhoaGYm5tjbm4uNH4ZGRmk\npKQIzJ22tjaZmZnU1NTQ2NiIVCrFxMRE0LILCgqA2eSim5sbIyMjNDQ08Pnnn+Ps7CzwaGFhYYyO\njtLZ2Ym7uzvt7e2ikTo+Ps7p06fZtGkTenp6BAcHi5xCXFwcpaWlrFu3TlC4bGxsOHfuHJOTkxw/\nflyYnDREqAMHDnDt2jU2bdrEyMgIwcHB+Pv7k5CQIEJR7u7uFBcXc/78eSoqKsjLywNmaWJjY2OC\nvNzY2IiBgQFFRUWkpKSQn5/P0aNHsbKyYs2aNdTU1HDgwAGhuF+2bBnl5eWUl5ezaNEi7t+/z+Dg\nID/++CM6Ojq0tLRw9epVzp07h5ubG2+88QaXLl3C1tYWZ2dn+vr6mDdvnphJ+fj4PM5HHXi87YME\n+AmoUKvVn/6XH5f/l5+2Fij95z9fBDZLJBJ9iUTiCngC/22RW0OdiY+PR09PjytXrgiUu+bM39PT\nQ2RkJAEBATzxxBMkJCTg7u5OXV0dnp6eVFdXI5VKuXr1qtgStLW14erqyoYNG/Dw8MDQ0JDAwEBi\nY2N55plnuHz5sliLubu7i8m/xkZUUVFBU1MTSqWS+/fvi5WYra0tBw4cQKlUMjExIYaR1dXVqFQq\nOjo6sLS05NChQ6Snp+Po6ChSiE1NTSLllpSURHx8vMCFq1QqhoeHGRoawsHBgYmJCZqamrhz5w6f\nf/45Dx48wNvbm/nz54vWoCa0omkJVlRUEBERwZNPPomBgQHFxcUsXbqUoaEhYY9KTEykpqYGZ2dn\nUdApLCzk559/FgQkR0dHWltbUavVyOVy9PT0xPzhySefpL6+XswipFIp165d4+233+bRo0fU1tYS\nFhYmhKtVVVUMDw+LFVpsbCzPPvssQ0ND6Ovr09/fj5eXFwAvvPACMTExDAwM4O7uzttvv41SqSQq\nKgqJRIKWlpb4+vj7+2Ntbc3GjRuxsbERQ+iUlBSuXr1KV1eX4AzAbMno4cOH6OvrY2xsTE5ODj4+\nPlhZWTE4OIifnx8ZGRlCUBscHMzzzz/P/PnzcXJyIisrS6DmNNN8tVrNxo0bsbS0xMXFRQhid+/e\nTWJiIsPDw6xYsQJdXV2GhoYwMjLC1NRUhKeMjY2JioqioaGBPXv2iAfEgwcPiIyMZGpqih9//JHA\nwEA8PT154okniI6O5vnnn8fFxYWkpCQcHBzo6Ojgt7/9LVeuXOHWrVtERkYKy5Tmhva41+PMFKKB\np4ESiUTy8J8/9jbwlEQiCQLUQCPw239+kcokEskpoJzZzcVLarV6+r/7DTST5bS0NDHh1nyxk5OT\nqaysxM/PT7y63rt3D1NTU95++23efPNNtLW1hZDEyMiIpKQkrl+/Tn9/P11dXSiVSrq6usRwSPPz\n9PT0UKlUoko7MDBASkoK4+PjhIaGijWfxlyswWRXVFSwatUqJiYmuHLlCm+++aZ443Bzc0OlUjE1\nNcXo6CgtLS3U1tbi5eVFZmYmiYmJlJSUIJPJSE9PJyIiQgzyPD09qaqq4ttvv8XKyoqtW7eiq6vL\nsmXL8PX1Zf/+/aSkpAief1VVlQBqLF++nIcPH6KrqwvMxpaHh4dRKpWUlZWRkJDAo0ePmJqawsXF\nRYSIKisrCQ8PFzi58+fP8+GHH4oAlEaAYmxsjFQqxcPDg97eXm7cuMHSpUuRyWQMDw8LcKtKpWLT\npk2Mj4/zpz/9CX19fa5du0ZJSQnr1q2jv7+fvr4+9u3bh4WFBQ0NDTg6OrJy5Uq+++47SktLyczM\n5Omnn8bKykpQoaVSKWNjY1hbW5OamioktsuWLWNycpLa2lrc3d3R1dVFrVZjbW0tgmWAcEYGBASw\nZcsWdu/ejY+PD+7u7mhpaYlVpZaWFt7e3lRWVpKQkMCXX35JT08PSUlJWFlZMX/+fM6dO8f8+fP5\n6aefsLS0pKGhgffff587d+5QU1NDUlIS165dQ1tbm66uLvEm+f333/Pw4UMiIiJYt24dUqmUxsZG\nTp48iaenJ1NTUwKEoq+vT3Z2Nk888QSmpqYCNCOTyfjzn/+MVCqltLSU1tZWUROYnp4mPDycjo4O\n4d3MzMykq6uLl19++TE+6rPX//GmoFar7wH/d22Kq//Nr/kz8OfH/UNo6sIaipKTkxNHjhwhNTUV\nb29v8XS4ePEijo6O+Pv7U1BQwJtvvgkg7pSdnZ2EhoZy9ersHy02NpacnBwMDAxwcXERjDuVSiVo\nRubm5jQ2NuLo6Ci0bNXV1bS2tnLs2DGB8B4YGBCV7OTkZK5cuYKWlhYhISF8/fXX2Nvbk5ubi5aW\nFrGxsUxPT7NgwQIqKipYu3ateFJrhn0aa7KLi4tIW1pZWeHr64uxsTG1tbVIpVIqKysZGRkRcFV7\ne3tGR0e5evUqL7zwAgqFguDgYNLT07G2tubGjRtoa2tjYGAgbno+Pj6MjIywZMkSbt68SUZGBq+/\n/jpmZmZ4eHhw9+5dQkNDcXFxoaCgQJCTtLS0GBsbY8mSJVRXV2NhYUFlZSW+vr489dRTjI+P09XV\nxdjYGN3d3Wzbtg1PT08hT2lvb6e2tpaRkRE8PDxEb0PT52htbWXdunX09vaK4tKCBQsoLi7G1NSU\n/v5+uru7aW1txcPDQ/g9n3rqKYyMjJBIJBgYGNDZ2cnFixfp6+tj8eLFlJaWYmNjQ11dHStWrBBA\n397eXtrb29m4cSM///wze/fuZXh4WKT/vv76a2JiYkS/ob6+njlz5gghTE5ODkVFRbi6uorNyMzM\nDMbGxpw9e5by8nKioqJobW3F0tKSxYsXC/iJs7OzAA+np6cLqzkgsPmaaLLmeuGFFzh8+DBz587l\n1q1bdHd3I5FIWL58OS0tLahUKuRyuSizNTc3i8/B2rVrSU1NxcfHh7Vr14qG7+Ncv4qYs6Ghoeiu\na1DVkZGRqNVq8vLymDdvnhCffvrpp2zbto25c+fi4+ODTCbjtdde48033xRrLQ2k0tPTk5aWFjZs\n2EBmZqYYwujo6LBq1SoWLVrEoUOH8PHxoaenh8nJSdrb27G0tESlUrFkyRKampqoqalhZGSEdevW\nAYjKqlwu/5f1lKmpKT4+PjQ1NTE2NoZMJhO7cw8PD+RyOYcPHyYkJISLFy+iUCgYHx/H09MTmP3m\nmJqaQqFQkJiYiJOTk1jL9vb2Mj4+zubNm/nll1+Ii4sTSctLly5hY2ODVCpl48aNVFRUUFdXR3Nz\nM/Hx8SLhduTIETZv3oypqSmurq40NjYyMjJCfHw8MMuJMDU1xcjISFCANcGt+vp6kQAsKysjLS2N\nyMhILCwsUKlUQtZ6/fp15s2bJ0jaHh4elJSUcPPmTZqbmwWfYs6cOcTGxnL+/HlcXV1FV6SqqoqF\nCxeKxqBm9Ts8PIyzszPOzs58/PHH+Pn58dxzz1FaWioGfZmZmYJurZmPaDoVd+/e5e233xZpVY3f\ns6urC319fUF/1vRBOjs7RZpy+fLl5OXloVAo2LVrF2lpacyZMweA8PBwPvzwQ/74xz8yNjZGSEiI\ncHKcOHECZ2dnhnofUEoAACAASURBVIeHRYRckwH55ptv2Lx5s+CCREREcP36dcHsuHfvHhcuXCAx\nMZE5c+aIfsnk5CRFRUXcvHlTpGplMhl6enpUV1fT09MjukQab+nt27dF2/Rxrl9FzHnfvn3vlZWV\nidVKXFyceDtwd3cnIyOD0NBQ8Rqsp6fH4sWLOX/+PN7e3hQWFgrYR2VlJatWreLw4cPI5XK0tLSQ\ny+U4ODiwb98+YmNjRdJx0aJFODk5CcN1VVUV1dXV1NTUMH/+fD7//HOCgoKwsLBALpeTlpZGfn4+\nUVFRJCcn4+Hhwc2bNwULMSQkhImJCZRKJXfv3sXFxYXVq1dTVlaGiYkJlZWVLF26lJmZGRobG/Hy\n8uLQoUMolUqKi4t5+eWXycrKIiYmRhiwNdHVlpYWFAoFZ86cwcvLS8wwDA0NuX37Np2dnaLi7eDg\nIAI6HR0d1NfXMzw8THBwMJ2dnQwODlJYWMjIyAgTExPcv38fbW1tjI2N2bt3L9HR0aSmpgp+Y2Nj\nI+Pj46xZs4aWlhYmJydRq9X09fURHx8vHJxSqZSFCxeSkZHB1NQUxcXFAibj6elJVFQUKpVKpDqL\niorw8vJCoVAIQnJiYiJ+fn7Y2tpy8uRJ9PT0SEhIEEcCjYdDoVBQUlKCq6srAwMD4o1kxYoVdHR0\nCN+BxgPq4uKCg4MDarWakZERDA0NhSWrtbUVXV1dDh06hI2NDZ6eniiVSnbu3MnY2JgY7vr5+aFS\nqVi1ahXa2tr89NNPwkx29epVVCoVNTU17Nq1iw8++ICgoCCam5tZs2YNRkZGZGVlMXfuXGpra4mJ\nicHQ0JCBgQGam5txcnLCxMQEHx8fjhw5ws8//yzgOENDQ0xPT/Pw4UOUSiVKpRJ9fX3y8/MxMDAQ\nQp+FCxeiVqtF5NnQ0BAXFxdMTExIS0ujsLDwsWLO/0Nz/p/rf67//1z/PjRnd3d3PvzwQ5H/rqqq\nEpXh0NBQ5HI5N27coL29nWeffZZbt26JSGhHRwcODg6i0KNUKrGysuKll15iz549zJkzB21tbeFR\n3LZtG/v37xebgOXLl6NSqbCyshLC1IKCAoaGhpDL5QQFBVFQUICLiws5OTn84Q9/4N1338XY2Fjw\nFAICAnBwcBAxYgsLC8EVaGpqYtmyZVy5cgW5XM7ExAQqlYply5bR19cnthovvfQSn332GVpaWoL2\n09bWxpYtW6itreXOnTt4eHjg6OiIrq4uAwMDSCQSLC0tOXbsGPr6+oL8PDAwQHt7O+vXr6e7u5ux\nsTHMzc05cuQI8fHxGBkZYWhoKLTvUqkUMzMz3N3dcXR0pKenh4sXL6Kvr4+9vT2Tk5NiDtLW1kZB\nQQGbNm3CxMQEmUzG9PQ0g4ODeHh40NXVhY6ODp2dnfj7+9PQ0CBgJxqvpY2NDTo6OhQVFYl/Z2xs\nzKeffsoPP/yAh4cH/f393Lp1i9DQUIqLi0lKSuL48eMMDg4SEREhim4aW3dzczMtLS34+vpiYmIi\nGJ81NTV89NFH/P3vf8fHx4ezZ8/i7e3NxMQERkZGHDx4kO3btwvTlY+PjyBgnTp1SpipCwsLMTU1\nFW9jlpaW7N27l88//xxLS0v09fVFqam0tJTLly/z7LPPcvToUV588UVGR0fx9vbmzTff5IMPPuCd\nd95h165dwOxx1N/fH319fWxtbYmPj2fHjh2kpaURFBTEunXrRHanuroaX19fAaQNDQ2lpaWFnp4e\nlEqlCER9+umnnD59GqVSSWNjI7t27RLNzv/T9avgKYyPj+Pk5ISNjQ3ffPMNHh4eNDc3C7VbUVER\nADt27OD69et4eXkRFRVFZWWlyINbW1szf/58RkdHBedOgzn39PQkKSkJX19fzp07h7e3NwqFgiVL\nltDQ0CCQbzDbY1+/fj2/+93vKCkpEQans2fPEhYWBszWvG/evElbWxvbtm0TDbaWlhby8vLIzs5m\n7ty5jI+PMz4+zo4dO8TAbNWqVZibmwu25EsvvcTx48eB2VBUTU0NwcHBokd/8OBBcb7/4osvMDQ0\nFLq3srIyysvLRRPT0dFRnC0dHBzQ1tZmdHSUzMxMAPbu3UtjYyN6enpCNxYeHk54eDiOjo50dHSg\nUCjo7+9n1apVjI6Oil6HprFYUVFBeHg4NjY2GBsbo1QqefDggXjlLy4uZmJigsHBQcbHx4XHU4OW\nGxgYELCc/v5+oWnXnKVPnz7NnTt3kMvllJeX8/DhQ95//320tbVZsWKFgOIqFApSUlLYsWMHFhYW\nPPnkk8hkMrGa/Pjjj5FKpYLEvGLFCrH3X7BgATdu3CAvL4+XX36ZmZkZkpOTWbRoES0tLTx48AAX\nFxdhz54zZw4SiYTY2FhGRkZITk4Wq0MvLy+USqXYBJiZmeHq6kpycjI2NjasXbsWDw8PfH196e/v\n57e//S1tbW189tlnREZG4uLiQmVlJQqFQvAgYHbOtnTpUoyMjMRa08DAAHt7e7Kzs1mxYgVjY2MM\nDw+jq6uLu7s7QUFBjI+PY2try9GjR2lsbEQmkxEYGMgbb7zx2J/HX8VM4W9/+9t7c+fOxdraWqC4\ni4uLSUxMFOUXlUrFrVu3xBTb1NSUGzduYGBgwPbt2xkdHWVqaoqAgAAcHR05duwYCxcuJDIyEiMj\nIxQKBTU1NXR1dYkodF9fH/39/WJfrsF1wezNwdvbW7TuxsbGqKio4O7du0RGRmJtbS0YgJrm3J07\nd3j11VeZN28ezc3N1NbWcvPmTXbs2IFMJhMcv4aGBmCW2vP111+zc+dOUlNTWbp0KXK5nOLiYsbG\nxoiJiRFP0YULFxIdHS10aYODg0xNTSGVSlm9ejVFRUXIZDLKysoYGRnBysqKwMBA7ty5g4WFBfn5\n+QQFBeHq6sr9+/eFak7jyWhsbKS1tZWgoCAx1dfAOq5evUpISAjnz5/n3XffxcHBgVu3buHj4yO2\nJFZWVpSXl4vp+cTEBDU1Ndja2tLZ2Ym1tTVPPfUUlpaWmJub4+vri5aWFg0NDSIX8OOPP7JmzRqh\n8gsODiYnJ4f+/n6Gh4eF/crIyIj58+fT09ODQqFgbGwMf39/AeQZGhpi+/btuLu7093dzbFjxwgP\nDxck8IqKCmJiYjAzM8PU1JSuri7q6+vR19fH3NycPXv2CPHrmTNnaG1tZcGCBXR2dlJSUsLU1BQP\nHz4kLy8PV1dXpFIp5ubm1NXVYWNjI8pJvr6+9PT0sHLlSuGm0NTCr1y5IlB1MpkMR0dHrKysqKqq\nIjMzEycnJyQSiZiRZWRkoKOjQ39/Pzo6OmhraxMcHExraytSqZScnBxqa2uxsbFBW1tbDLAPHTpE\nfHw8lpaW3Lt37/+d6vT/F5eJiQk3b95EV1cXb29vDAwMsLKyoqCggJUrV2JjY0NycjIKhYLBwUEi\nIyPF4Ozjjz+mrKwMS0tLYQzW4Ng2b95Mbm4uExMTdHd3o62tzYYNGzh06BDj4+PU1tYyd+5cLCws\nBOLK29v7X1RkdXV1GBoaiqcRzBaXzM3Nsbe3p6WlhfDwcLq7u3nttddQKpWkpaUxMjJCbGws77zz\njvAwWlhYUFJSIhwSurq67NixQ5Rg2tvbBVHKycmJ/Px8EhMTUSgU1NfXC8bhpUuXePToEaampixc\nuJDBwUE2b96Mh4cHO3bs4JlnnhGwUM2r9NNPPy3kskuWLGFmZobR0VG6u7spKysjNTVVDCj9/f0x\nNjbmyy+/5ODBg9y+fZva2lr27dvHmTNn6OnpYe7cuahUKgIDAzEyMhK9g927d+Pp6SmYlzKZjPj4\neMzNzXnw4IHwhUqlUmxsbAgNDaWtrU2skQ0MDDh37hxmZmbcu3eP+fPnc+bMGbq7u1m+fDkrVqyg\nsrKSjIwMrKysyMrKoqWlhebmZp588kmxNUpJSRHoMwBnZ2cOHDhAR0cHdXV1TE9P4+/vT15eHnp6\nehgYGJCbm0t9fb1A3g8PD/PMM8+QlJREfn6+oGI1Nzfj4eEBzK69DQ0N8fb2Jjw8HHNzc0pKSsSD\n7JlnniE3N5eoqCg6OzvR0dEhKysLmHWS3rhxgwULFggGZXt7OwDPPfccbm5uhIeHk52dTWBgIOPj\n49TX11NTU0NzczOnTp0CwM7OjujoaMzNzQUmvra2lvz8fGJiYsSG5nGvX8VNYWZmRrQCz549y5Ej\nR1i9erUg7hYXF4uas5WVFb///e+pr6/H2tqaH374ga+++oqNGzdSX1+Pm5ubqCIfPHgQb29vioqK\naGpqIiQkhNraWkEW3rhxI9PT05iYmJCYmEhrayspKSmEhYXh4OBAX18f1tbWKJVKOjs7GR0dBRDm\n5VWrVokUpa+vr4CBxsbGMjU1JZ6GCxYsYOHChWzevJk333yT3NxcMjMzRdpPwxzYuHEjfX19bNq0\nifLycjo6OigpKWFoaEg8pTw8PHBwcCAwMJDg4GDu37/PxMQE1dXVgpGYl5fH8PAwCoWCoKAg2tvb\nUSgU1NbWUl5ezsGDB7GyshJ4uJycnH/Rs2dnZ5OSkiJuJsuWLaOyspLW1la2bNnC9PQ0ra2twoqt\n0ce3trbyww8/oFKpRG/DzMyMjo4OTpw4waFDh/Dw8CA0NJTBwUFcXFxIT09n48aNojptZ2eHvb09\nISEhjI6OYmlpyaeffkptba3IbOzfv59nn32W119/nQMHDvDcc8+JyveLL75IaWkpCxcuxMHBQVSR\nU1JSWLBggcDnTU1NcfToUeHFKCwsJDg4mJmZGZqbmzl58iQVFRWoVCoOHDgg3tj27duHjY2N2Psf\nO3YMCwsLSktL6e3txdjYmKysLL799ltkMhlmZmbY2trS0NAgCmLPPPMMOjo62NnZsXLlSlpaWtiy\nZQu//PILSUmzWJK7d+9SW1vLunXraGlpEW8WlpaWglze3t7OoUOHGBoawsfHBwsLC/T09Hj33Xfx\n8vIStC5Nqetxr1/F8eGbb755b9euXajVasLCwkRqsb+/XzxBNNFWTWkoMDBQ8BcePXokEmeapOKp\nU6c4cOAABgYGBAUFoVarmT9/PoWFhejr6wunQmVlJdbW1nR0dAgA5uDgoOAn1tTUoFKpcHFxoaKi\ngmvXrrF582YqKyuFxETD1fvNb37DihUrcHV1xdPTE2NjY7H2u379ulCFNzQ0MDMzIzRuZmZmZGZm\nsnDhQsbHx4VYxczMDGdnZ3Jzc3FxccHNzY22tjZ++uknrK2tKSgowNPTU/QQ7t27h66uLq6urtTV\n1TEzMyOGtUqlkri4OB49esTTTz+NQqEQINupqSl++eUXtLS0iIyMZHJykiNHjhAWFoZcLhdGZhMT\nExQKBZ2dnTQ1NQlYqOZ7SC6X4+HhgVqtZnx8XJCsAbq6usT5ftmyZQwNDfHZZ58xMTHBnDlzkMlk\nnDlzBlNTU9avX09fXx9Lly4lIyODvr4+fHx8cHJyor29nTt37hASEoK2tra4qZaUlIhjzd27d3Fz\nc8PCwoLp6Wn279/PE088wdjYGB4eHiIb4OjoKI51L730knibrK2tpbGxkfXr17Nv3z7s7OzEB0tH\nRwdra2uh2Vu/fj0ZGRlER0czMDDA1atXeeaZZ/Dz8yMwMBClUomuri6PHj1CIpHg5uaGqakpy5Yt\no6ysjJKSEiIjI5HJZHh4eHDx4kWKiopISEjg97//PTk5Ocjlcl588UWuX7+Op6cnCoWCkJAQOjs7\nqampQSKRcPXqVaKiorC3txczHy8vL2QyGfr6+kil0seWwfwq3hQmJiaoq6sTuHY/Pz9efvllHB0d\nWb16Nf7+/ixatIjVq1cTEBBATEwMarWagwcP0tTUhJmZGTY2NlhYWFBQUCAAGKWlpWhpaaFWq2lq\naqKzs5Pg4GBMTEwwMjLCw8ODxYsXI5FIcHV1ZWhoiPnz5+Pt7c309DRjY2OUl5ezevVqbt++LYZh\nOjo6NDY2cu7cOXEmlslktLe3o6OjI8IqKpWK5uZmKisrgVmW/6uvvsqDBw+wsLAQtiVNddrHx4eg\noCARd9XS0uL27dtoaWnx008/iYGnn58fLi4uWFhYMDk5ycmTJ9HW1iYsLIy+vj7GxsYYGhpCIpFg\nZ2dHYWEhpaWlQviSlZUlPmia2cPOnTvx9/dnamqKkpISARE9efIkVlZWhIeHC1VdS0uL+EBdvnxZ\nQHFmZmZwcXEhMjJSlKzu3r2LQqFg1apVmJqakpiYSG1tLdeuXcPb2xuYDSxpWnx//OMfSUtLA2YH\n0GvXrmVkZIT8/HwuXryIh4cHQ0NDtLe388svvyCVSjl//jwuLi6Ul5fT3t5OVFQUjx49or+/n08+\n+QSY7XKEhITQ3d0tnuwnTpwQSdOffvqJ6upq8vLy6O7upqOjA5hVD8bExBAZGcm8efNYv349SqVS\nBM5mZmZYvnw5wcHBlJaWsmHDBlFXP378OEqlkuzsbMrKyjh69CgTExPExMSgra1NT08PTz/9tADI\nSqVSIW3ZtGkTFy5cwN7engULFvD111+jVqtRKpWEh4cjl8uJi4sT1GlN83TNmjXY29tTWVnJJ598\nwokTJ8TG5HGvX8WbwmefffbeqlWryM/PR6lUsmDBAkxNTdHS0uLUqVNiq3DmzBkCAwNpbW0lLi6O\nS5cusWTJEtasWcPk5CSenp5s3bqV5uZmDh06RGRkJPb29ly/fh25XC7mC6mpqQQEBGBsbCwks5oi\nkiY/rwFrBgYG8tlnn5GQkEB2djZZWVmigGVubk5ERATR0dF0d3czOTlJYWEh169fJzc3l/Pnz6Ot\nrY1UKhX4eUNDQ7Zu3UpnZycGBgaMj4+Ls2xjY6M4Mi1fvhyJRMLU1BRRUVFER0eLif6uXbswNzdH\nLpfT2dmJp6cnc+fOpbi4mN7eXkZGRggICBCyW5lMhlKpxNfXl+bmZvr6+rCzsxO4N43L09TUVARu\nCgsLGR4eJikpiXnz5hEVFYVCoeDRo0ckJiZiaGhIZWUl69evR6FQkJSURE9PD48ePSIiIoLW1lYx\nr2hsbKSpqYnCwkJu376Nl5cXubm5Aryi+Xvevn2bbdu2CUfnn/70J6ytrQX5KSsrC0tLS0xNTbl8\n+TJ6enqMjY2xYsUK+vr6+Omnn4BZee6uXbs4cuQIe/bs4bvvvuN//a//RUZGhjjz9/T0sHfvXqyt\nrcnLy+PIkSNYW1tjamrK2NgYdnZ27N27FxMTE/z9/enu7hYgWY1zITc3l7Vr16Kjo0NFRQVjY2Oi\nTKf5fmpra8Pc3JyTJ08SFhbGli1bBEYvJiaGyclJPvroIywtLcnOzqampkYMbN3c3Hj06BFqtRpD\nQ0OcnZ3JzMzktddeIy8vj5mZGbZv305CQgLvv/8+N2/eRCaTcfLkSSwtLZHJZGhpaYn2anFx8b/P\nm4KRkRG3bt0SCbiqqio+/vhjhoeHRU1WR0eHpKQkvvzyS5FlWLBgAa6urqLoc/jwYdLT02lpaQFm\n30AsLCyQSqV4enrS3t5OX18ffX195OXlkZ6ezsTEBCdPnhRoLs2HChDV5nnz5tHe3i4GjRqWooeH\nBx0dHRQWFlJbW8vU1BTt7e3izC+TyQgNDcXJyYmFCxcSFhbGiy++iIODg6hmDw4OEhsbC8CuXbtE\nB8PIyIjCwkJaWlpEkjAoKEhEca9cuUJYWBiNjY3MnTtXuB/DwsJISkqiqqqK7u5uwsLC8PLyIikp\nSViTt2zZgoGBAUuXLhWWZw3lWS6XY2JiwvPPP09raysWFhbMmTOHzs5Obt++jUwmEz39kZERWlpa\nMDc3F6/Vmq2Lt7c3PT09xMTEEBwcjIuLi1CzTU1NiXWfp6cnERERQjB7+PBhqqurcXFxYevWrejo\n6FBTU8PFixdxcXERbx/R0dE4OTnh4OBAW1ubELmkpqYSFRVFRkYGzz//vNj05ObmMjw8TH19PTKZ\nTPhBfXx8qKmpoa2tjYcPH+Lv7y/Wiy4uLvj4+FBfX095eTlyuZzu7m5kMpmoIisUChoaGkRxzNLS\nkoGBAbq7u7l48SK2trY4ODjwxz/+kZdfflkIi5VKpejXaMzScrmcefPmAYjBYGxsrIhcp6am4uXl\nRUpKCtra2v8C0VEoFNTV1XH06FEaGhq4d+8etbW1omGsYT8+zvWreFP4+OOP3/vzn/8syMMlJSUM\nDAzQ0NBAX18fL7zwgjh7joyM0N3dTW5uLv39/TQ0NBAQEMCDBw944okncHR0pK+vj9OnT/PGG28I\njLePjw+RkZHk5uZy8+ZNkpKSkMvl6OvrMzAwwPDwMFZWViJGev78ecrLy5k7d65QzUskEn755RfS\n09NZtWoVZmZmuLm5UV9fz8qVK0V3oLy8XAx46uvrSUpKwsTEhI6ODmpra1m7di26urqMjY3R2dkp\naM7a2tosXryY1NRUSktLCQkJQUdHBy0tLWZmZsjNzeXq1asYGxszMzODqakpRUVFwuwMs7DaI0eO\nsGvXLhQKBdra2lhaWjIyMiIYDMeOHcPAwICwsDC0tLSIjo4Wx5jvv/+evLw84WvQYMFOnTrF3bt3\nsbGxwc/Pj0ePHuHk5ER6errwGvT29mJmZoahoSG9vb3iBrxw4UK6urrQ0tJieHgYOzs7Tp8+TUZG\nBmZmZuTl5WFoaEhGRgabN28mKSmJ7u5ugoKCKC0tpbKykh07drBo0SL8/f0ZGRlBLpeLc761tbVo\nt65evZqmpiZu3bpFSEgIFRUVpKWl4erqyvDwMMnJyWKTs3r1ajIyMrh27Rqenp689dZbzJkzR8S2\nfXx8ePLJJ5FKpQwNDdHV1UVoaCjt7e1MT09z/fp1nJ2dSU5OFtwEzdzjypUrREVFsW3bNkZGRlAq\nlVy7dk28ZaSkpNDX18fo6KhY6Woo2Y2Njbzyyivo6uqSmppKXV0dy5cvJzMzk4qKCtzc3FiyZAkS\niYTi4mKio6Pp6Ojg3Llz6OrqcuHCBTZv3izEMnPmzOHixYv09PT8+7wpwGywJiEhAVtbW1xdXXnr\nrbfw8vJiz549mJmZERERIei/ZWVlGBkZibCIQqHAwMAAR0dHCgoKxBejqqoKPz8/BgYG+OGHHzh+\n/Di6urokJyeLXb6hoSFRUVHs2bOH3bt3MzMzw1tvvcXFixeJi4vj73//OxYWFpibm4tz5sGDB4Ub\nUEdHB2NjY6ysrBgbG+Pdd9+lqqqKTz75hJ9++onR0VH2799Pfn6++Cb74YcfmJqaoqenh8HBQYHg\n2rJlC2lpacTHx+Ph4UF9fT2bNm3izJkzDA0NER0dLfgCixYtEqGX9vZ2SkpKyM7OZv/+/bz22mvi\nv68JE7W3twteoFwuZ2xsjIyMDEpLZzEYeXl5FBYW4uLiwvr169m5cye7d+8mPz+fM2fOiPBXYGCg\nmCkUFRWxZcsWbG1taW1tpa6ujhMnTgjMfV5eHg4ODpiYmAiS9aJFi7CwsMDX15fVq1fj6OiIo6Oj\nwIXZ2NhgbW2NmZkZ33//PVpaWqxdu5a0tDQhbDU3NxfwWVdXV3R1dYmLi8PExESYkfbu3YtUKhVW\nr+XLlxMYGIihoSE6OjpiSH3//n3Rc9CwDUNDQ/n+++956aWXmJycpL6+npKSElQqFSUlJaxfv158\n3y5atIj33ntPPDSkUik3b94kLCxMKPVu3bpFdXU1ERERYvC6ePFifH19WblyJQkJCWIgqVHca9aV\n4+PjuLu7U1VVhaOjo9DqXbhwgePHj6Onp8fVq1dpbm7m008/JSIigp07d2JkZMS9e/dwcHCgtbVV\nqBQf5/pVvCl88cUX78XFxYnpv0qloqioCFtbW3JycsTrcFpaGpOTk7i4uIgnXGZmJm5ubpSXl5OQ\nkICNjQ0nTpwgJyeHlStXkpaWhqenp4gx9/T0EBwcTGZmJnPnzmVychIDAwPc3NzEmV4T+9WcwzUR\n4omJCU6fPk1cXByxsbFYWFiQl5dHaWkp9fX1hIeHC/T3xMQEfn5+HD58GG9vbwYHB9m/f78wP0VE\nRJCVlYWxsbEYKJqZmaGvr09HRwfDw8OEhIRQWFhIUFAQFRUVaGtrc+fOHZYvX46Dg4MILGk8hvb2\n9gQGBnL//n3u3r1LdnY2r7zyCt7e3kJ6snjxYh49ekRcXByjo6MEBATQ0dEhFHAaf0VycjK3bt3C\nzc2Nuro6+vv7WbZsmYDIaN7INGtDbW1tMS8ICgpCoVAQHx+Pg4MDBgYGotSjEadKpVLq6uqoq6sT\nH5Rz584RFhbGxYsXGRoaIjg4mNDQUG7evElERASHDh2itbUVJycnfHx8MDEx4fbt20LwqhHnarZJ\n/f39bNq0CbVaLYpwhYWFIhHq4+NDVFQUDg4OjI+Pi9VvVFQUw8PD7N+/n4GBAcbHx1m4cCF5eXlo\naWlx9+5dgYT75JNPBGX6ueeeo62tDbVaLRKPmrO9t7c3cXFxqNVq/vGPf3DixAlqamo4e/Ys7u7u\nyOVySktLBTPE398fW1tbrKysBIRGS0sLS0tLhoaGiI2NFQg9hUIhSN7a2tqsX7+e48ePExUVRVdX\nFz4+Pujp6ZGRkfHv86ZgaGiIubk5bm5uTE1N0dHRIV7ps7KyuHPnDq+//jrh4eGUl5dz9uxZampq\nkMvluLm5ERQURFdXF2ZmZmRlZYmKc05OjnAQDA4Oiuh0W1ubsOg0NTXR39/PmTNn+Oqrr8R0187O\njqamJqKjo2lpaUFbWxs/v1myfVlZGa2trbS3tzM0NIRCocDX15fTp09jbGxMfHw8W7duZWJignfe\neYfbt2/j6enJzp07BSy2vb0dCwsLHj58yLFjx4DZsJWVlRVaWlosW7aMrq4uvvrqK6qqqjAwMCA4\nOFhkJmQyGWlpaejo6DAwMICRkREuLi5cuXIFBwcH4X8YGRkhJyeHgoICvLy8qK2tZdWqVWRmZhIQ\nECBWoJoz7szMDAkJCXz88ceUlJTQ1dWFTCYjPz+fY8eOUVFRIViR0dHRTE5OUlVVhYmJiaBnabyK\n3d3d1NXVVRkI1AAAFsxJREFU8d133wkNu0abl5+fz+DgIJ2dnTx8+JCZmRlgNmTk6Ogo0oweHh6s\nXbsWY2NjTE1NKSwsZHp6mgcPHqCnp8fw8DDd3d0MDw/T2NjIRx99RGdnJ05OTuTm5vLDD7Ofgb6+\nPmGompqaorKykri4OJydnfHw8KCvr4+wsDCWLFmCqakpeXl5HD16lMzMTAwMDOjv78fU1FRQjDRk\n756eHtE5yc/PJyMjg8uXLzM4OCgkMBqbVn5+PtPT0xw+fFgIboKDg8nLy8PLywtfX19ee+01YJYv\n6u7u/i8eT2traxQKBZaWluJoduHCBUHmtrS0JD4+nsbGRuzt7dHT0xPi3aioqMf+PP4q3hQ++OCD\n9zw9PQUl19/fH09PT+zs7EhKShJTe40C7dVXXyUhIYHr168LAMhLL71EeXk5bW1tpKeni/+p165d\nY8mSJf/yhOno6CA0NBQDAwPCw8Opr6/n9u3bzJs3j4iICDHcsrOzY2xsjOnpaVEoys/PF8eJM2fO\n8Oqrr2JjY4Oenh4mJiaCWFxWVsbevXsJDg6msLCQefPmMTMzQ3h4OIODg1RWVooo7TvvvMOPP/5I\nZ2enOPdrvA4zMzN8+OGHLFq0iPr6erZu3YqdnR1tbW1MTEzQ0tKCrq6uUMZp+JEhISGYm5tz7Ngx\nysrKBEzV0NAQMzMzsec+f/48NTU1tLe3C0qV5gn17LPPMjY2RklJCbdu3SIxMZGoqCiOHDlCTEwM\ng4ODqFQq5syZw8TEBNHR0cJroOmSzMzMMDIygqWlJa2trXR1dQlbV1BQEDo6OqxevZqhoSHOnj1L\nUFAQgYGBAvR69epVcnJy2LNnjzBBVVdXC3P2nDlzGB0dRaFQsGjRIoaGhkQ2RdO7OHPmDNu3b6ey\nslKIbSIjI3F1daW6upp9+/axbds2Ojo6OH36tACrrFmzhubmZnR0dHj48CGvvfYaIyMj3L9/X5Cz\nJiYm8Pf3p7e3l2XLllFUVERSUhIqlYq2tjYyMzNF1yM5OZmpqSmmpqYwMTHhypUreHp6Ym1tjVwu\nR1tbm8LCQtLT09m8eTNFRUUYGxuzcuVK2tvbeeONN0QPSEdHh6+//ho3NzdgFgtnaWnJrVu38Pf3\nZ+nSpUxNTdHW1kZfXx8dHR3k5OT8+7wp6Ovr88orr/DgwQOxq21tbRW9c81ZSqVSsWDBAtGI8/Pz\nw8HBQYA4NKUqzaT1wIEDbN68mYGBAXJyckRefWJigoCAABEdDQsL+5epb2lpKRKJBGdnZzEVNzY2\nFtTlpUuX4uDgQHx8PHfv3kWlUjE0NISenp7AtGsYDhq3QHl5ufgArVmzBj8/P2QyGQYGBqIvsH37\ndqH7Gh4epqmpiYiICCoqKvjDH/7ARx99RFVVlRhkDg4OMj09zdGjRwXrcGhoSDgtNH5OLy8vNmzY\nIKbkNTU1aGtr8/PPP9Pb2ysCShqUuKYVqlKpUKlUdHd3s3HjRq5du8a5c+fw8/OjtrYWMzMznJyc\nMDQ0FBuK4OBg8Xft7u6mvr6e6elpBgYG6O3tpaurCy8vL6Kjo0UDMy8vT3gRPT09GRgYEE3A5ORk\n6urqmDdvHrm5uYIboYl+nzlzBnNzc/T19UlJSSE1NRV3d3d6enqwtrYWaVFzc3P6+/v5+eefsbKy\norOzk7a2Ni5fvswLL7xAY2Mjvb29gjehpaVFfX09arWamZkZ/Pz8RObC0NCQ/v5+8X2Rm5tLc3Oz\neEhoGJV9fX08evQIHx8fYmNjuXXrFl1dXeLoNzg4iL+/P/7+/lRVVYkcCMyW+SIjI9HR0WF0dBRD\nQ0MCAgLQ19envLwcpVKJjY0N9vb2WFlZERAQII67vb293Lx5k66uLuLj44mNjRV/3se5fhU3BYlE\nwsWLF0Ua8O7du6K+fPDgQebNm4eRkRE2NjYMDg7S399PWVkZx48f58MPP8Tb25uLFy9SXl6OnZ2d\nIO1OTk5SXFzMzMwM/v7+WFhYMDAwgLOzMyqViomJCcrKylAoFPj7+yOVSgkMDBQrTBMTE8zMzIiO\njqazs5Pf/e53wKzQUyKREB0djb+/P0VFRYJyrFarhZr95MmT3Lx5k8nJScbGxigrK+Py5cskJiaS\nnp7O+Pg4o6Oj3L59G0DYricnJzl//jxJSUkMDg4KMMiCBQu4fPkyMzMzIsk4MzNDYGAgYWFhxMXF\niYCVtrY27e3tZGdnY2ZmRkZGBjdu3GB4eBhDQ0MB/Ni1axdDQ0Ps2rWL+fPns2zZMgwNDTE0NGRk\nZITU1FSCg4M5deoUvr6+QnDi7e3NhQsXCAkJEU/rDRs2IJPJRGtPrVbj4OBAQkKCaDfK5XIaGhoY\nHh4mJSVFzHo0aLLMzExMTU2ZmpoS+Y+YmBj+8Ic/8PXXXxMVFUVgYCDPPvssWlparF+/Hi8vL9Rq\ntVCvHz58GLVajVqtJj09HYCsrCyio6PZtWsXX331FR4eHhw7dgwnJyekUikSiQQ9PT309PQYGRkR\nNzbN5kjTVtTV1cXAwECEgczMzFi+fDnj4+O4urpSUFAgvBRBQUFERkaKspOGhfnzzz/zyiuviLWo\nra2t8J5qCEnu7u6i3Obo6Eh3d7ewYGnoTklJSXR2drJ69WqmpqYwNTVFW1sbe3t7vL29RZv3ypUr\nAoz7ONev4vjw+eefv2dubk5AQABTU1OYm5uzadMm6urqOHDgAGNjY9ja2or1X0hICA4ODpSXl+Py\nT/26psA0MzPD3Llz+frrr9m9e7fYDFy/fh2JRIKjoyNRUVGkpaVx+/ZtIdmQyWRIpVJ0dXWRSqU8\nePCAxMREgV7T8ApPnDjBU089Jc7f8fHxTE9PCy7e8uXLsbOzo6enR3ADCgsLsbGxITs7GysrK2xt\nbbG2tsbAwICcnBwkEolITo6MjIi3FB0dHQwNDenp6WHevHlMTk6yZs0aEZIxNDRES0tL3BAsLCy4\nceMGZmZmjI+Pi1Shjo4OmZmZREdHMzMzI9a0ixcvJj09ncTERLHi6unpobW1ldOnT3Py5EmRK9BQ\npDUpz/7+ftatW0dmZiYFBQV0dXUhlUqFJ1KjbzMzM8Pc3JyDBw9SU1PD5OSkmFHY2Nigq6tLd3c3\nK1euZP/+/Rw5coTGfxrEL126JG56mlXf0NAQarWalStXcvLkSSYnJ8nMzBRn6gMHDrB161bGxsYI\nDg7Gy8uL77//XtimNdQmzXAxNjZWxM/r6uooLCxk+fLl9PX1sXPnTqanp0lPTyc+Pl4Mey0sLFAq\nleTk5LBlyxZgNk+QnZ1NUVGRwL6npaUJrueSJUsoLS3l1KlTTE5OEhUVhUwmo7e3l6amJuEijYmJ\n4bvvvmNyclJo+zSJ397eXubMmYOFhQXx8fG4u7szMjLC0aNHuXz5MgsWLODhw4d8++23HD9+XBxr\nDA0Nqa+vp6qq6t/n+DA6OsqyZctQKBQMDAygra3NtWvXuHLlCv/xH//Bli1bcHBwELKPvXv30t3d\nTWNjI+3t7ZiamqJWq2lpacHT01P4GfLy8mhqauLs2bM8++yzBAUF4eLigpOTExs2bMDU1FSsFAsK\nCkRRRltbG4VCweHDh3F2dkapVCKRSMTTTBPxValUHD9+nOXLl5Oamspnn33G1atXuX79Onl5ebS1\ntWFsbCyw8TExMZSVlTE6OoqrqyuDg4O8//77gvWvUqkICgoiICBA/PkMDQ1paWkRqbqKigoMDAwo\nKyvD1taWkJAQoqKiKPzf7Z1tTJv13sc/fwYdxrVC6aGwtYMCawDBHUr33B1dTG4dCUHmMHuhnkSj\nLjm59Y5ZouQY3SuT2+Q+r4xniR6To1m2DHbAszmc96aUzQfO2aDjGUrpGDBocRSwPBTsrvsF7f8e\nRhSTc9q+uD7JlV692qTf/NLrl//D7/p929p49913eemll9i6dStWq5Xc3FyMRiPp6emy5+PWrVsp\nLS2VXoq7d++mpaWFzs5O6uvr+fLLLxkeHiYrK4va2loAbDYbNTU19Pf3Mzo6SllZGVqtlrGxMW7e\nvMn8/DzhcFjutw8NDXHjxg3MZrP0MUxOTkan01FQUEBKSgrHjh1j165dWCwW+TQqIMuCo2Yqqamp\njI6OSht3k8lEdXW1dOIeHh6WxjH19fU8+OCDtLW1odPpuHPnjrwpox6UbW1tVFZWcvXqVZKSkrh9\n+zZms5nGxkb5CL1Wq6W6uprx8XEaGxvZvn07V69eJT8/n8XFRW7duiUX7np6emR1a7T9+p07dzAY\nDLz55puy9XsoFCI9PR273S4fT9fr9XR0dDAyMkJmZiYHDx6U98TRo0dlL8loJater5du6dH/ZNTs\nt7a2luTkZNm5+fHHH0er1ZKZmSmrJ9dLQowU3nnnnePFxcWUl5fT2dnJ0tIShw8fpqysjKGhIb74\n4gusViujo6N8/PHH0ijD5XJhs9nIzs5GCCHbw+fk5PD+++9TW1uLTqfDZDIRDAYxGo1otVoURWFy\ncpKsrCz8fr+ctgQCAW7duoXX6+Xhhx/ms88+Iz8/H5PJxODgIHq9ngsXLnDkyBGGhobQaDSYTCZO\nnTrF3NwcLpeL4eFhlpeX2bNnD4WFhSwtLbFz507uv/9+rFarHPafOXOG5uZmNBoNhYWFnDt3juhi\n69zcHMPDw9J70ePx4HQ6eeKJJ8jNzZVltBqNBpvNxokTJ6TJTNQuLdrGPjc3l8XFRUwmE3fv3pU3\nxcDAAFlZWZw7d47BwUHy8vLwer3St9Nut8sHrrRaLZOTkxQXF2OxWOS6wKVLl4huJRcUFDA7O4vf\n72d6elo+uu3xeFhcXJRuU2lpaSwsLDA3N4fJZMLn82G32xkcHOT8+fOkpKRQVFTE/Pw8Xq+XvLw8\nkpKSOHDgAIuLi7LKcmxsTC6qhsNhuru72bt3L52dnTgcDrnDFAgEOHPmDC+88AJutxuDwYDVapUO\n1e3t7fT19fHKK69IC/v5+XmmpqbIzMyU3bU2btwofScaGxvZtm0bTU1NfPTRR3LhMRwO43a7qamp\nQaPRMD4+TkdHB21tbXg8HtlTwul0yt6iL774IiUlJbz99tty5625uRmtViuntxs2bGDPnj2yT+Xy\n8rKsYGxububVV1+ltbVVWvrdd999smPYd999x44dO/D7/bS2tq5rpJAQSeHUqVPHowUoVquV+fl5\nkpKS8Hg85OXlsXnzZpnxKioqaGpqorS0lHA4zNjYGM8++6ws4oi2/WpoaOCZZ56RW2jJyclyO2pp\naYnk5GTee+89Ocz99NNPyc/Pl/0MCgsLEULQ1tYmF3C0Wi2nT5+W8/bU1FQeeughrly5QnZ2NseP\nH5fOQB6Ph9TUVDkCCQaDtLe3y25QoVCIl19+mRMnTlBUVMTly5fZv38/W7ZswefzkZOTw8TEBFeu\nXKG9vZ1jx47JR6lHRkaw2+18/fXXBAIBDAYDTqeTjIwM2SY+2hgm2kot2txFr9fT1NSERqORtfx2\nu51wOMz333+PTqejpKSEmzdvEggEpPdjdHRy8uRJdu/ejdvtJjc3l88//1y2dff5fCwvL7Nr1y72\n7t3L9PQ0Go2G9PR0pqamyM7O5pFHHpHVikII/H4/gUCAlpYW2tvbefLJJ9HpdExMTHDo0CG8Xq80\nUo12Pa6rq5M2a4FAAKPRSF9fH9u3byctLU2WIEfXGaJzeK1Wy+zsLGNjY3IrcGRkhOzsbNnpKSkp\nibq6OrmlaDQaMZlM1NfXU1VVJcuy5+bmuHDhAsFgkKKiItxuN319fbIEPj09XT7KbzQaeeqpp6RV\n/cjICDMzM+j1enp6euSUV6fTceDAAT788ENsNhvl5eUEg0GCwSC9vb0YjUa6urpwOBxYLBbMZjPd\n3d20trYyPT2NVqslLS2NiYkJNBqNLLGOVpKuNykkxPQhuhuwsLDA6dOnGR0dlYtRZrOZ/v5+mpqa\nyMnJweVyYTKZqKysxOFw8Oijj3Ly5En6+vp4+umneeCBB6QvoaIo7Ny5k2+++Ub2NfR6vYRCIS5f\nvsxzzz2HyWSSXgahUIiMjAz6+/t54403CIVC1NTUsHnzZrxer9wOcrvdWCwWQqEQPT09lJWVcfTo\nUVwuF+FwGLvdjtVqZWBgAKfTycWLF/nkk0+orKxEr9fz1ltvkZOTw8zMDNXV1bKXgF6v54MPPmBq\naorS0lK6u7vRaDSUl5dz/fp1Kioq5ILStWvXcLvd8qaJDi/T0tLo6uoiGAySlpZGRkYGs7Oz5Ofn\nS8enyPxSluvm5eXJcmufz4eiKBw5ckQm4n379pGRkUFrayv79u2jq6sLg8EgG4sMDAyg0WgoKCgg\nNTWVjRs3MjQ0hN/vZ2pqiv7+fkpKSggEAiiKQnFxMS6Xi6+++opNmzZhNpt5/vnnAaiqqmJmZob9\n+/fT1dUlpyTRLUa73Y7RaGTTpk0cOnSIlJQULl68iM1mY25ujscee4zJyUkaGhr49ttvpRGsEAK7\n3U5HRwcajYa6ujq2bNki2+I3NDRI051oRWVrayvj4+NkZWXJ2pXz58/T29srnaA2bNjADz/8wNmz\nZ+W2cDAYxOl0Yrfbyc/PZ8eOHVy/fp2Wlhbu3r3L7du3WVhYwOv14nA4GB8fZ3Z2FofDIfs0vPba\na/T29mIwGCguLubw4cOyJufs2bOyatdisUgz4egoNCkpCZ/Px9TUFDqdjqWlJZxO57rvx0Tp5jwJ\nzAHfxVvLjzCQeJpA1fVrSERNEB9dOYqi/OaXvpQQSQFACHFtPe2nY0kiagJV168hETVB4uqCBJk+\nqKioJA5qUlBRUVlFIiWFX1wVjQOJqAlUXb+GRNQEiasrcdYUVFRUEoNEGimoqKgkAHFPCkKIx4UQ\n/UKIQSHE63HWclMI0SmEcAkhrkWu6YUQ/yuEcEde02Og40MhhF8I0XXPtTV1CCFqI/HrF0I8FkNN\nx4UQY5F4uYQQFTHWZBZCfCmE6BFCdAshXolcj3es1tIV13itm+jTZPE4gA2AB8gDNMANoDiOem4C\nhh9dewd4PXL+OvDfMdDxO8AGdP2SDqA4EreNgCUSzw0x0nQcOPYT342VpmzAFjnXAgOR3453rNbS\nFdd4rfeI90hhJzCoKMqQoihLwGmgKs6afkwV8NfI+V+BJ/7dP6goSgswtU4dVcBpRVFCiqJ4gUFW\n4hoLTWsRK03jiqK0Rc6/B3qBLcQ/VmvpWouY6Fov8U4KW4CRe96P8vPB+3ejAJeEENeFEC9GrhkV\nRRmPnE8A6/ff+teylo54x/A/hRAdkelFdJgec01CiFygDGglgWL1I12QIPH6OeKdFBINh6IovwUO\nAn8QQvzu3g+VlbFe3LdrEkUH8GdWpn6/BcaB/4mHCCHEJuAs8F+Kosze+1k8Y/UTuhIiXr9EvJPC\nGGC+570pci0uKIoyFnn1Aw2sDOF8QohsgMirP07y1tIRtxgqiuJTFCWsKMpd4H3+f8gbM01CiBRW\nbryTiqL8LXI57rH6KV2JEK/1EO+k8E9gmxDCIoTQAEeAv8dDiBDifiGENnoO/AfQFdHz+8jXfg98\nEg99P6Pj78ARIcRGIYQF2Ab8IxaCojdehGpW4hUzTUIIAfwF6FUU5U/3fBTXWK2lK97xWjfxWuG8\nZ+W1gpXVWQ/wxzjqyGNlBfgG0B3VAmQAlwE3cAnQx0DLKVaGl8uszC+f/zkdwB8j8esHDsZQ08dA\nJ9DByh87O8aaHKxMDToAV+SoSIBYraUrrvFa76FWNKqoqKwi3tMHFRWVBENNCioqKqtQk4KKisoq\n1KSgoqKyCjUpqKiorEJNCioqKqtQk4KKisoq1KSgoqKyiv8DR1dBFfNSOH8AAAAASUVORK5CYII=\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f13dc43b310>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvHdwW/eV9v8BiQ6CYAHBCrH3LkqkSFWLVO+SJduxVeIk\nHsfKOsnKTjJ2ipK147IbO3YSxyWRbMuSuyxLVqEKRUoUVdhJsTewV7ACJDreP/TTnc0fO+t9J571\n7x09M5ghORfA5b33nO85z/Ocr8jtdnMP93AP93AXHv/bJ3AP93AP3y7cSwr3cA/38A+4lxTu4R7u\n4R9wLyncwz3cwz/gXlK4h3u4h3/AvaRwD/dwD/+AbywpiESitSKRqEUkErWLRKJffFPfcw/3cA//\nXIi+CZ+CSCTyBFqBVUAfUA485Ha7G//pX3YP93AP/1R8U5VCNtDudrs73W63DfgI2PINfdc93MM9\n/BMh/oY+NxTo/U+/9wE5/9XBKpXK7Xa78fPzQyaTYbfbMZlMeHp6olKpGB8fR6lUCq/R0VG8vLyw\n2+3YbDYcDgf+/v6YTCZkMhlzc3MMDQ3h7e2NVCpFLBZjsVgICAhgdnYWmUyGxWLBarXi7++P2WxG\nLpczMTGBWCzGy8uLgYEBfH19cTqd+Pj4YDQakUgkDA0NodfrUSqVjI+Po9PpGBwcFD5HJBJht9uZ\nnp4mKioKs9kMgIeHBzMzM8jlcmw2G15eXszNzWG32/Hw8GBkZITQ0FAkEgkSiYTp6WkkEgl2ux2J\nRIJSqcTDwwObzYZIJMLlcjEzM4NarcbpdCKRSDCZTML77Xa78F6LxYJMJsPT0xOxWMzIyAhSqVR4\nWa1WnE4nSqWSubk5HA4HKpUKp9OJw+HAw8MDpVLJ9PQ0np6eeHjcWUvuvtdqteLn54fFYkEkEuHp\n6YnJZEKtVmM2m1GpVFgsFtxuNzMzM+h0OqxWKwqFArlczszMDGazmYmJCWJiYnC73TidTjw8PLDb\n7czOziKXy7Hb7cJ5enp64nQ6EYvFOJ1OpFIpHh4eTE5OolarsVqt2O12RCIRQ0NDhISECPd2amoK\nu92ORqNhamoKf39/pFIpRqMRl8uFVCoVniOlUsnExAQajQaz2YxMJgNgamoKo9GIVqvFZrOhVquR\nSCRMTEygUCjw8PDA5XLhdruxWq3CPQ0ICBDujVwuZ2hoCB8fH9xuNx4eHnh5edHd3Y1Op8NisaBU\nKnG73dhsNuRyufC3u/daqVQyOzuL0+lEoVAI100kEmGxWPD09BTuyeDg4Jjb7Q7474L3m0oK/y1E\nItFjwGMAWq2WZ599lqCgIKqqqigqKuI73/kOWq0WvV7PzZs3Wbx4Ma2treTn53P79m3mz5/Pe++9\nR2dnJz/72c+4du0ajY2NqNVqxsfHOXbsGEeOHOHZZ59lz549nD17lszMTFJTUykqKmLBggVUVVVh\ns9lIT0/nxo0bREZGEhUVhUwmQy6X09LSgoeHB729veTk5FBfX88bb7zBBx98QFFREXa7ndTUVBob\nG5mZmWHJkiUUFxczNjaGwWDgd7/7HV999RUGg4GYmBiSk5O5ceMGq1atoqamBrvdzg9+8AMaGhrY\ntm0b+/fvFxJgTk4OMpmMlpYW0tLSqK6uRqfTMTAwwJIlS7hx4wYymQyz2YxCoRASZUdHB9HR0YhE\nItLT06murmZwcBC3283atWu5evUqer2e7u5u+vv7KSgooLa2lrS0NNra2khJSaGjo4POzk4CAgJI\nSkqiqqoKh8NBUFAQcrmczs5OdDodJ06c4KGHHmJ4eJiwsDCOHDnCwoULEYlEpKSkUF1djVQqRS6X\no9Vq6erqYv/+/bz00kvo9XqMRiMjIyP4+/uTkJDAz3/+c9566y1OnDhBYGAgqampWK1WRkdHMZlM\n/O1vf+OBBx5g27ZtnD17lpiYGAwGA9nZ2QwPD5OVlcUrr7xCTk4OY2NjOJ1Ojh49ytDQELt372bL\nli3odDqMRiNHjx7F29sblUrF7du30ev15ObmMjs7S1FREfv376empobp6WlKS0tZuHAh/f39xMXF\nkZqaitvtZtmyZXznO99hYmKCuLg4PvnkEyIjI6mvr2fz5s3o9Xp6e3sJCQmhoqKCbdu2MTAwgMFg\nwOl0Mjo6SkREBCtXrkQul1NSUkJ0dDRvvvkm27dvRy6Xk5iYiFgsZmBggJGREVwuF4GBgTQ1NaHV\najGZTFRUVPDwww8Li8bdRc/Pz4+ZmRmqq6vx9/fnjTfe6P46sflNtQ/9gP4//R72//1NgNvtftvt\ndi9wu90LfHx8MBgMqFQqkpOTefzxx4V/qLOzEw8PD2pqanjmmWeYmJhgdnaWd999Fx8fHx588EEe\ne+wxdDodIpEIgJiYGABOnTpFS0sLFouFp59+moiICKKjo9m3bx8qlQp/f3+eeeYZgoODiYmJQSqV\nEhMTQ2RkJBcvXqS9vZ3R0VG8vb0ZHBwkISEBgOvXr5OVlUVAQABjY2OMjo6i0+mYmJggLCwMgCef\nfJK6ujrkcjkymYwHHngApVLJnj176O/vZ+HChYyOjnLq1CnE4ju5ecWKFRiNRpYuXYrNZsNqtXL9\n+nXGx8dxOp0YDAby8vJobm4GoKysjI6ODq5evYpMJmNgYAAfHx+8vb3x8vKitraW3t5eWltbmZub\no7i4mNu3b+NyuZienubJJ5+kpaWFvLw8iouLkUgkXL16lerqakpLSxkdHSU4OJj4+Hj0ej2enp50\ndXUBIJPJKCgooLOzE4fDQUdHB9nZ2SiVStRqNWKxGL1ej0qlIiUlBaVSSUhICK2trWRlZTE2NkZs\nbCwBAQF4eXnh6ekJgM1mw2w2c+bMGWQyGYGBgUxMTNDf38+SJUvw8/PDYDCQkJBAf38/SUlJQlAO\nDQ2RmppKTU0NAwMDBAQE8OabbwIQHh7O2bNnaWhowGKx8OMf/5jk5GT279/P4OAgFouFmpoazp07\nx/e//330ej0DAwM4nU7WrVuHXq/HZrOxYsUKmpubOX36tPC5//Iv/8LExAS7du0iISGB119/nZiY\nGOx2O8uWLaOgoIAnnngCk8mEn58f+fn5OJ1Odu3axbp165iZmSE0NJSVK1diMBgAGBsbo6uri8bG\nRk6fPk1zczMqlYp58+bR2tpKfX09ZWVl+Pr6snnzZkZHR6mrq6OiooKnn36aoaEhOjo6aGhoIDg4\nGG9v768dvN9UUigHYkUiUaRIJJICDwIn/6uDp6amAJiZmcHtdvPyyy/jdrsJCgoiIyODTZs2sWzZ\nMvbs2UN3dzctLS243W4UCgVeXl5s3LiRrq4utm7ditvtxsfHBwCNRsMLL7yAj48Pw8PDJCYmUldX\nR0hICDMzM9x3333Mzc2xe/du5HI58+bNQyKREBUVxfPPP09oaCibNm1CKpUSFxfHoUOHAMjKyhIy\nsFgsZsOGDaSnpzM2NobZbCYsLIxz587x7rvv8vHHH6NQKLh58ybR0dFUVFRQUFBAcHAwERER5Obm\n4nK5AGhpaSE+Ph6DwYDFYmHevHksX76cyspKoqOjUalUiMVi4uLi6Ojo4PTp03R0dODj44Pdbicu\nLo6lS5disVgYGxtj/vz5dHZ2snHjRvR6PRqNhiVLltDa2kpCQgLt7e1IpVKqqqq4fPkyH3zwARcu\nXKC0tBSLxUJpaSldXV0olUqCg4PJzMxkzZo1OBwOZmdnsdvtdHR0CNdz3759jIyMoFarGRoaQiKR\nMDY2xpUrV4iJiaGtrQ2RSMRLL72EWq1m/vz57Nu3j0WLFrF8+XIAGhoaWLVqFQsXLuTLL7/k5MmT\nyGQy/Pz82LJlC0FBQZSXl2M2m4mNjUUqleLj48P777/PG2+8QU9PDzt27EAmk9Hc3Cwk0E8++QS9\nXs/Q0BCjo6OMj48THBxMWVkZCxYswNPTE6VSyZo1a+jq6uLKlSuo1WquXr1KfX09AwMDZGZm8sEH\nHxAYGEhSUhIAdrudl156iYqKCgIDAykuLuaLL77g448/pqenB7VaTUlJCSUlJQCoVCp+85vfkJSU\nRGVlJb6+voSEhDA4OIhEIuGpp54SYkGhUODp6UlkZCRKpZLJyUl6enqIjo7mscceY9euXUxMTKBW\nq7l+/Tpnzpxh69atrFu3jrS0NKKiovD09CQvL49jx4597eD9RpKC2+12AD8CCoEm4BO3293wXx0v\nl8txOp1MTEwAkJqail6vJzAwkDfeeIOWlhY8PT2Fhz43N5dz584RGRmJVColNjaWtrY2Ghsb8fLy\nQiqVAuBwODAYDMTFxQFgNpsJDQ3FZDJhsVi4ffs2R48e5Re/+AXbtm0jOTlZKB9PnDhBYmIiFy9e\nZHp6msHBQT799FMAjEYjAQEB7N69m66uLry9vens7CQzM5PExER0Oh3Z2dksX76cubk5EhMTefDB\nB6mvrycmJgZfX1/m5uZITU3l3Llz9PX1ATAyMoLdbqevrw+lUikkP41GQ1paGvn5+bS0tPD555+T\nnJzMV199RXJyMm63m/Xr17Np0yYAvLy8CA0N5dSpU2RnZ9PQ0EBGRgY6nY5bt26Rm5uLw+GguLgY\nk8nE5OQk8+fP54knnmD//v3s3buX/Px8Xn/9dRQKBX5+fnh4eNDe3s6//du/ERgYiMViYe3atWzd\nulXotY8ePYqXlxdyuZzNmzdTXV2NXq+nq6uLV199FafTyW9/+1vmzZuHzWajvb2d1tZWvLy8OHfu\nHABKpZLS0lLCw8PZsWMHK1asQKvVUlBQwOnTpzGZTDzyyCM0Nt4RshQKBYmJiTidToxGI3K5nIaG\nBux2O6tXrxZWyL1799LT04NMJmN0dFT4bp1OR15eHtXV1WRkZDA8PMzJkyfp7OxEo9Fw4MABQkJC\nCA4OpqOjg6CgIIqLi4XzLSsrIzo6mtWrV1NSUkJERATnz58XuJ+bN2+iUqmYmZlh1apViEQiwsLC\nOHbsGHK5nPLycnp7e+nt7aWzs5OOjg4A/P39yczMRKVS0dzcTFxcHAqFQkgC9fX1+Pr6kpGRQVlZ\nGTabjU2bNjEwMEBCQoJwfRISEmhoaODJJ5/82vH7jXEKbrf7DHDm6xxrs9lITU3l6tWrpKSkoFAo\nOH/+PPfffz99fX2Mj49js9mIjo4mNzeXzMxMtm/fjkgkQi6Xc+vWLfbv309kZCTNzc0cP34cgOzs\nbOrq6vDw8KChoUHoU//93/8dvV6Pj48PTqcTb29vbt68SVRUFM3NzTgcDsbHx9Hr9UgkEsrLy1m2\nbBkVFRXAnaRw/vx59u7di9Pp5PDhw0xOTlJbW4ufnx/btm1jbGwMHx8ffH198fHx4amnnkIsFhMZ\nGUllZSW3b99meHgYlUpFZGQkcGcVGRwcZP369Xh4eGAwGPDy8uLKlSv4+fmxcOFCgWvQarVUVlay\na9cuuru7+fLLL1mwYAE5OTlcunSJrq4ugoKCCAsLY2ZmhldeeYU1a9agVqupqqpCKpVSWVlJfn4+\nERERjI+PMzc3B9xZpUQiEVeuXCEsLIyAgAAsFguTk5Ns374diUTC9evXiY+PJyQkhOvXr5OWloZI\nJKK6uporV65QXFzM8uXLiY2Npbm5GZlMRkBAAMnJyfT29pKUlMTAwADZ2dkUFhaSmJgIQEdHB8PD\nw6xcuZL4+HjGxsYQi8UYDAaBE3nhhRcwm814eXkJAdDe3s7s7CyFhYUkJSUhFouRSCTCPWtsbMTb\n25uJiQkefPBBurq6OHToEHK5nNOnT3PfffcxMDBAVFQUjz76KAMDA3z66acYjUYyMzN55pln2Lt3\nL/Pnz2flypU0Njby4YcfIpVKWbFiBV1dXXh6epKYmIjJZKKsrIzQ0FAWLlyI2+0mMTGR+vp6Kisr\nycrKYu/evdTW1lJWVkZCQoKQDL744gsADAYDISEhTE9Po1AoKCwsxM/Pj8TERCYnJ1m5ciWjo6MM\nDw/j5eVFamoqq1evJiIiguvXr9PQ0EBTUxN6vR6tVsvo6OjXjt1vhaPRw8ODDRs2MH/+fGJjY9mw\nYQNqtZqioiLmz59PUFAQXl5e9Pb2olAoWLp0KZGRkRQVFVFVVUVUVJRQsn/55Zds3boVgNdee43x\n8XHOnz/P7OyskGk7OjpISEggLS2NuLg4goKCaG1tpaGhgR07diASicjJyUGj0TA8PMyyZcuYmpoS\n+mmbzcZDDz3EpUuX8PHxYWJiApVKxenTp/Hx8aG0tJSamhpcLhcjIyOIxWKmpqZITU1FpVLR09ND\nQEAAy5Ytw2KxoFAogDsr/PLlyxkcHKSpqUnI9hkZGURERGCz2QgKCkIsFnP8+HGSkpKIjIwkOjoa\nl8tFSkoKzc3NhIeHk5eXh0ajITg4mK1bt5Kfn8/U1BTp6el4eXkxPj5OYGAgcKd9CwgIYHR0lI6O\nDjQaDQ8//DADAwP09vYKpX5sbCxhYWHU19eTm5uLwWCgtraWJUuWkJWVRXd3NyaTCS8vL2JjY1m+\nfDnT09MCk15dXY3D4WDFihUkJyfj5eWF0WhkYGCACxcuAJCTk8P+/ftpaWnhiy++oLm5mXPnzmGx\nWDAYDOzcuZN9+/bx6KOP4ufnJ3y2wWBgwYIFrFq1isWLF/PAAw+QlZUlVI0BAQFs3bqVuLg4fvKT\nn1BUVERaWhq1tbUkJCSQkZHB9u3bUalU/OlPf+LFF19Eq9Xy6quvcvjwYeLj45FIJAAcOHBA+Dk+\nPp6LFy9y5MgRNm3ahM1mo6WlhaysLB5//HEefvhhli1bRmBgILdu3WJkZIR58+bR0NDA0NAQExMT\nlJWV4XQ6kclk7NixA4Dt27fjcrnQ6/XI5XIyMzNxu92UlJSgUqkwGAxCEjcYDIhEIry8vDh9+jTd\n3d3IZDKMRiNtbW2Mjo4yNDT09ePx/z6U/3kQiUQcPnyYjRs3EhwcTFZWFhqNBoD29nbuv/9+ampq\nGB0d5bnnnkOtVuPl5SVkzsWLF9PY2EhOTg5ZWVlcunQJgG3bthEeHs7GjRuZm5vDbDZTXl5OQUGB\n0G5MTU0REhKC3W6npqaGkZERCgoKmJub4+rVq0L14HK5uHHjBgChoaHU19ezfv16cnJy2LRpEyMj\nI6xevZqenh76+/uF4NXr9SxbtoyIiAgiIyPZsmULK1asQCwWMzk5ycGDB7FYLMCd1eytt95i0aJF\n9PX1ER0dzczMDACjo6PYbDZ+97vfIZVKWbt2LfX19ZSUlNDf34+fnx+Dg4NotVpaWlpobW1FqVRy\n5MgRGhsbGRwcJCkpiYSEBBYvXkx4eDj79+9nw4YN6PV64fOVSiWZmZl0dnYKKkZJSQnDw8N4e3vT\n09PDtm3bCAwMRK1Wk5ycjEwmE3iKrKwsEhISEIlE1NTUoFKpkMvlxMXFoVQqaW5uZt68eXR0dBAX\nF0dUVBSbN29m2bJlwB2C7cyZM7S2tnL9+nVqa2sJDQ2lurqa2tparl27RnJyMvfff79AmM7MzLBg\nwQJGR0eJiopCr9czOTnJZ599JiS+srIytFotp06dYtu2bRiNRtLT08nLy0OtVrNy5UqB8L17ngaD\ngYceeoiMjAwkEgkNDQ1UV1ezcuVKJicngTu8lclkElSY7du3C4qKRqPB09MTo9HI6Ogok5OT5OXl\nAdDU1MSiRYuwWq3I5XLa2tooLCwUWmhPT08efPBBjh49Sk1NDZ6enkgkEpYvXy5wEFqtlpmZGcrK\nyjh58iTt7e3YbDbEYjEZGRmkp6eTkZHB9PS0sKB9HXgePHjw/zqY/1n43e9+d/Bu23Dz5k2+/PJL\nYmJiWL16NZs2beLFF1/E399fWA2vX79OQEAAixYtoqSkhI6ODiHQjx07xuzsLPX19cTHx2M2m3G7\n3cTExDBv3jyBpT5//rxAfN2VJdesWUNVVRUjIyPU1dWxceNGuru7ycvLw2azIZFIKCsrY9GiRYyM\njGA2mykrK6Onp4fKykqCg4N59NFHOXXqFE888QRnzpzh6NGjvPHGG2RnZ7NmzRqef/55goODBY25\nsrKS9PR0Tp06xdNPP/0PLLxCoUCn0+FwONi0aRNdXV0oFAqsVitVVVVMTEwwNjZGRkYGQUFBNDU1\nUV5eztzcHDMzM8zMzGCz2Zibm8PpdNLZ2cnChQuRSqWkp6cLx2o0GtasWYOHh4fgT+jv7+fkyZPE\nxMSQmpoqJJ2xsTF6e3tpamrCZrNhs9mYmZmhvb0db29v5ubmiI2N5fbt20xNTQlyX3NzM5s3b8bL\nywuNRoPVagWgv7+f6elpxGIxZ8+eZeHChYSEhJCZmYlGo0Emk6HT6WhpaUGtVgOQlpZGS0sLdrud\nY8eOYTAYkEqlrF+/Hn9/f65du8bNmzcJDAwkOzubd955RyDwVq9ejcvlwtPTk/7+fkQiEYmJifj6\n+vLVV19RVVXFyZMn2bFjB9nZ2Rw6dAiFQsGSJUuIjo4WqtXo6GiOHDlCWFgYcrmcwcFBAgMDsVqt\nREVFER4eTmRkJJ9//jljY2PMzs4SHR1NR0cHXV1d+Pr6UlpaKrRuNpuNyMhIHA4HN27cEM57bGwM\nvV7P3Nwcy5YtQ6lUCr4Uo9HI7Ows/v7+bN68meeee47u7m4mJibQarV88cUXlJaWotVqiY2N5dq1\na4MHDx58+7+Lx29FpeDv7092djYikQiTyST02F5eXmRkZDA5OUlUVBR5eXnU19eTmprK22+/jc1m\nY+3atQwODvKrX/2K5uZm1Gq1EFh6vR6FQsHg4CAXLlxAp9Nx9epVwd9gNpuJi4sjMjKS0tJSxGIx\nXV1dXL58GZFIRE9PD0lJSdhsNgYGBti9ezeAYGoxmUxs27aNTz/9lE2bNrF27Vqh3/viiy/o7Owk\nPj6e7OxstmzZgkqlQiQS4XA4WLBgARKJhIiICHp77/i87sp1dXV1eHt7Y7VaCQwMJCEhgcLCQnQ6\nHTU1NWRnZ7Np0ybWrVuHSCQSyKuIiAjBqHJX//b29hakP7iTbC5fviwQuEVFRTz55JN0dnZisVgw\nm810dnaSmppKQUGBYCZKTExEJBIxNzcnkKoOh4Pu7m60Wi3r169HrVbT399PT08Pq1atwtvbm9TU\nVGw2G3FxcYjFYjo6OggLCyM2NpavvvqKgIAAnE4n5eXlAGRmZnLhwgXm5uYIDw+nsbFR+D+ys7OB\nO4TxokWLiIqKYteuXdjtdoaGhrh8+TI9PT2Mjo6yZs0a/P39+eSTTwAoLCyktbWVjz76iPfee4/w\n8HChrfPw8GB4eJiuri6kUil2u52oqCgGBgZITU1l27ZtLF++nBs3bjA9Pc2ePXt47bXXAFCr1YhE\nIh566CFSU1MZGRlheHiY7u5u/va3vzE0NCQkcZVKBYBEIqGrqwu73c7k5CRtbW2IxWJ8fX0FUvzq\n1as8/vjjfPe73yUrK4v4+HhKS0vx9fXFbDbjcDj49NNPqa2txWg0MjQ0hEajYXR0lNTUVCQSCatW\nrWLVqlWYzWbh2n0dfCsqhZdffvmgRqOhra2NhIQEHA4H+fn5tLW1MTY2hlQqpb+/H5vNhre3N/X1\n9axYsYKOjg6OHTuGn58fSqUSb29v5s2bR0REBKdPn+aRRx5Br9cTGhpKZGQkiYmJBAQEcPz4cfLz\n83nkkUfYuXMnP/3pT8nJySEkJEQIYqfTSUtLC7W1tQwMDPC9732PEydOUFJSQnJyMhkZGVy6dImq\nqiqWLVuGVColKioKsVjMzMwMK1asQCKRsHnzZoKCgpBIJEJJPD4+Tm5uLkajkcnJSXbs2MFf//pX\nVq1axfDwMBKJBB8fH9LS0igvL2dkZIT77rsPq9VKZGQkLS0tKJVKysvLSUxMRCaTMTU1hdPppKqq\nioCAAKRSKVNTUzQ1NTE3N0dcXBxeXl54eXlRV1cnEFZ/+ctfyM3N5f7770elUgnqgU6no6KiQqii\nBgYGaG1tpbq6mnnz5gkS6F0TzdDQEMPDw8jlcvz8/LBarUilUrZs2UJvb6/gUoyOjqa1tZWOjg7G\nx8cJDQ3l6tWrRERECMaqzMxM5s2bx8cffyxck8rKSjw9PSkoKODEiRNYrVZGRkY4e/Ysubm5gslt\nenqa2tpa0tPT0Wg0KBQKTp06xbFjx/jggw9YvHgx5eXlbN68WfBf9Pb2cuPGDZKTk2lpaWH37t20\ntrYSGxuL2Wxm06ZNAind1taGy+UiOTmZzz//nNDQUNLS0jAajRiNRux2O1arlaCgIEwmE319ffT3\n96NQKEhKSmLjxo2MjY0JzlOFQsGWLVswmUwkJycLLdP3v/99nE4nly9fxsvLS6guRSIR165dQ6FQ\nsHHjRtxuN2FhYcTExGA2mwXubHh4mKmpKXx9fZHL5VRUVNDQ0PC1KoX/NUfjf4ZUKhXIr87OThIT\nE3n//fcZGRlhbGwMnU5HQ0MDoaGhyGQyoqOjBWY3KCgIt9tNe3s7fX19LFy4kJqaGuDOqlhfX094\neDiBgYH09PTQ2NjIL37xC4HQu3z5Mq+99hoTExO89957uFwucnNzMZvNmEwmgoODhXbkrp8iJSWF\nlpYWgoODcblcREVF4ePjg0Qi4fnnn2ft2rVcunQJmUxGeXk5Hh4e+Pn5IRaLCQoKIiAggE8//ZSo\nqCjGx8f54IMPAIQVS6FQMDU1JTzwDoeDCxcuUFtbK7RQOTk5lJWVcfv2beLi4jCbzQQFBSGVShkb\nGxNMWwEBAcTFxVFcXEx8fDyXL18G7rDcH3zwAYsWLWJoaIhjx47R2tpKRkYGISEhnDx5ksLCQnbu\n3Mnx48eJj4+ntraWgoICBgYGBAflXevw5cuXmZiYYNmyZeTn51NfX88XX3yBzWYT+uiKigqCg4Pp\n6+vjxIkTDA0N4XK58PHxwWQyARAUFMTnn3+O0+kkMzMTo9HIkiVL8PDwwN/fH0CwtE9MTLBkyRIi\nIiKYnZ1lcHAQjUbD7OwsHR0dyOVygQdSKBRs374df39/Vq9ezeTkJG+99RYxMTG0trai1WpRKBQ8\n8cQTTE9PEx8fT0JCAqOjozQ2NpKSkoLL5WLdunWsWLGCo0ePAndI4KKiInQ6HTt37qSmpkbgiEJC\nQqitrWVh1cHFAAAgAElEQVT+/PnMzc2h1+spLCwUpGqj0UhfXx9Wq5WxsTHBNQrQ1tZGdHQ0UVFR\nKJVKHA6HYNhSKpWEhoZit9tJTk7m0KFDjIyMkJqayt69e1Gr1Xz00Ud4enoSFhYmPLdfF9+K9sFk\nMqHT6UhJSSEsLIyUlBTy8/MxmUx4e3tz/Phxamtr0ev1JCQkIBaLiYiIwGQy4XK5+Pvf/87MzAz3\n33+/UBLCHTPQ1q1bycnJQaFQCG7DxsZG5HK50DI0NzfzzjvvoFaraWtrE7TvwMBAVCoVy5cvFySj\nu+/v7e1lYmKCgoICUlJScDgcxMXF4evry+zsLKGhody4cYPf//739Pf3YzabCQgIYMmSJaxcuVJw\n4rlcLuEhmpiYIDExEYPBIMwF9PT0UFhYSGBgIPHx8URGRvLQQw8xMjLC4OAg4eHhaDQaent7OX/+\nPP39/RiNRn70ox8RGRnJ7OwsDQ0NREZGMjk5idFopL29HavVSm1tLbdv3+bdd99lenqatLQ0LBaL\n4ETMy8vDarUyMzPD2NgYWq0Wu92OQqHAZrOhUqnQ6/U4nU6CgoIIDAxkdnaW8+fPMzY2hkgkor29\nnXfffZfW1laSkpLIyMjgypUrzMzMsHTpUqxWK1qtlvz8fABiY2OJiorihz/8IfPmzaO+vp6TJ08S\nERHBrVu3uHLlClNTU8ybNw+A/Px8rl+/TklJCVu3bkWtVvPII48gkUi4efMmGzZsAKC0tJQbN27g\n5eVFVlYWcrmcBQsWEB0dzYIFC9i+fTs6nQ4/Pz9OnjzJJ598QkdHB2VlZZSXl1NTUyO4Xt9++22u\nXLkCwODgIDExMSQmJnL58mWhRfL29kYul5OSkkJVVRXnzp3jq6++wmw2c+nSJXp6elAoFML9uysB\n33U0NjQ04HK58PDwYNOmTajVavz9/VGpVJSUlFBYWIjb7ebs2bOkpKQglUq5ffs2MpmM9957j7y8\nPIGEnp6e/v+f+qDRaCgtLaWxsVGw2d53332sXr2avXv34nK5yMjIEI6Jjo6moaEBo9HIhx9+yPr1\n60lJSUEkEjExMUFVVRUATqcTi8VCZWUlERERghtw1apVOBwOgcS7desWExMT6HQ6xGIxf/7zn2lq\naqKhoYHGxkY6OjqIiYkRyLGwsDAyMjLYsGEDNTU1GAwGysvLsVqtbN++nczMTA4dOiSQWCKRCLVa\njUajoauri7Nnz1JbW4tCoWD9+vXk5uYCYLFYWLlyJcHBwdTV1QkSoL+/v/AKCgoS2PGmpiZiYmIY\nGRlBo9GwfPlybDYbFouFkydPUlBQIMh1crmcqakpzGYzzc3NZGVl8fLLL3PmzBnmzZvHU089RXh4\nOOPj42g0GlJTU1mwYIEwcDU0NISnp6eQUMPCwujv78dkMjE9PS3YwcvLy7l27RpdXV1s3ryZ8PBw\nPD09qa2tFYxWg4ODbNiwgaamJgICApibmxNY96NHj5KSkoLdbqepqUko6d99910SExPR6/UsWrSI\nqakpbDYbL730ElqtFpVKRVNTE8PDw2RnZ7Nw4UJhbgBgzZo1LF++nOvXr3P+/HkSExMZGRnB7XYL\n8qjFYqGrq0tw1p49exadTicMNnV2dvLRRx/R0tLC2rVrhef3rsJxl0QNCwujsbERf39/hoeHcTqd\n5OTkMH/+fGQyGRKJhDNnzuBwOLDZbNy8eRNfX190Oh179uwBYN++fXh7exMREcGLL76I1WqlqamJ\nsLAwnnrqKWZmZqirqyMyMlJ4bnNzc5menhau2YEDB0hISEAikZCcnPy14/Fb0T64XC5ycnKYmppC\nIpEI8l94eDjvv/8+999/P6WlpezYsYOnnnqKkZERDh8+TF9fH2FhYcJNUavVjI2NkZmZSXV1Nbt2\n7eLkyZNs3LiR2tpaDAaDsDIfOnQIu93Ohg0biIyMZO3ateh0OmGqMCMjg/vvv5+hoSECAgI4fPiw\nQALp9XrBNu3p6Ul3d7fQ8zc1NXHixAk6Ozt59tlnWbFiBUNDQ5w9e5bo6GgyMzOZmpqirKwMl8vF\nhQsXhArmbl/q5eUlBNt/JpLUajUqlYpTp07xyiuvcPz4cZRKJXq9nqeffpqenh7S09ORSqXk5uYy\nPj5OTk4Ora2t1NbWsnPnTuLi4oQSt7e3l40bN5KYmMjJkyfJyckR+ua7w2EtLS2kpqbS0dGBy+XC\n19dXqGjueiJCQkJ49913aWpq4mc/+xkul4u33nqLkydPEhISwquvvsquXbuE3trLywubzYavry/x\n8fGcOXOGpqYmAL7//e8jlUrp6+tDo9EIbc9TTz1FU1MTY2NjPPvss7z33ntERUUxPDws3He3282q\nVasoLCyks7OTJUuW0N7eDtypFIKDgwUJ+uDBg+zcuZOAgABGRkZYu3Ytr7/+OtevX+fAgQMcOXKE\nubk5VqxYIVQ96enptLW1UVBQwHPPPQfcWdHvTrgGBASgUCi4ceMGzz77LNHR0dTX12O1WklMTOS9\n995jz549eHp60tDQwPnz51EoFMJzuXXrVj788EMAfv3rX7N+/Xq8vb2JiYkRSMvMzExWrFghENd3\nF7SAgABaWloYGxtjenqalJQU0tPTeeeddwgLCxNcs18H34pK4W6bsHTpUhwOh8AlBAcHExsbS2xs\nLLt372Z2dpbLly9z69Ytli9fjsViweVyYbVaBTtyREQE4eHhALz88sssWLCA999/H4vFwsjICAaD\nAbFYjFQqxc/Pj76+Prq7u7l69SpFRUWCEnD9+nWOHDlCb28vVqsVlUoltCX9/f3odDr+/Oc/Mzg4\niLe3N/39/TgcDiwWCxqNRvDfd3Z2Mj4+zoIFCwgKCsLHx4e4uDhCQ0MJCwtDoVAIlc3atWsZGhoS\niNPm5mZMJhN2u50rV67Q2dkpsP935x/sdjuNjY04nU6mp6cFD8cXX3whBO68efOora3l4sWLzM7O\nsmDBAvr6+hCJRBiNRsHtefXqVebPn09zczNzc3MYDAahIoE76ojJZGJubg6xWExAQAB1dXWYzWZ+\n9atf8dBDD5Genk5QUBA/+MEPGBgYQC6X89JLLwlj7GVlZTgcDsGifeHCBUFNAGhububAgQPMzc1h\ntVrZsGEDhw4dwmq1ChOYlZWV5OXlodPp2LZtG4888gj79u1j9+7dTExM4OHhga+vL5WVlcLoukQi\nYXBwUBhNLi4u5uLFixw4cIAzZ87w+OOPYzabefjhh4UpTR8fH8Hfcfz4cf76178yPj7O2bNnhRFq\nqVTKxo0bhWrA29sbi8XCiRMnsFgshIWFsWzZMvR6PSUlJQwNDeFwOAgODmbdunUsXLiQxYsXMzk5\nye3bt1mxYgUAGzduFMx2Q0ND5OXlsXbtWmFMPSkpidnZWZqbm3E6nXR3d+Pv749MJuPmzZv09/cL\n7s7p6WlhmO/r4FuRFHx9fQkPD6eqqootW7Ywf/58PD09USgUhIaGEh0djVarJTAwUBgP/fzzz2lr\na2PDhg3s2bOH+Ph4KioquHnzJmNjY8AdF9vdBHJXU7878nvX/LFz506sViutra309fWhUCjQaDQs\nXrxYkPwqKiqoqKgQnJJOpxO5XE5ubi5arZbMzExmZmaoqKggPj6elStXcu7cOcxmM5mZmQQFBbF4\n8WIyMjI4cuQIZ86cwWg0cubMGaxWKw888ABwx7DS2dlJWFgYk5OTuFwuvL29BXv2+Pg4YrGY5ORk\nAgICaG9vx+VyMTs7y+7du1m0aBEajQZ/f3+WLl1KQ0ODYIDKycnB19cXkUjEu+++i81mY3JyEl9f\nX4aHh/H09BT4gfHxcSoqKhgZGaGpqYkrV66g0+nQaDTY7XYCAwNpbGxkbm6ONWvWIJFIBOXGaDQy\nMTHB+Pg48fHxdHV1UVNTg8lkIiwsjPDwcKKjo4mLi+PcuXOkp6cTGRkp3DOpVMrWrVvp6+vDz8+P\ngIAAYRamqamJ8fFxfvazn3Hu3DlB/hWJRFy9epWDBw+iUqloa2vDy8uLTZs2CfJ2X18f7e3tBAUF\nsWrVKn76058K5bfNZmPjxo2kpaURFBRESUkJYrEYq9VKTU0Nf/jDH9i8eTOZmZls3rwZiUTCCy+8\nANxxNDY1NaHRaOju7sZsNvO9732PhIQEOjs7CQ0Nxe12ExgYyL59+6irq+PixYtYrVa8vb0ZGxuj\nr6+PnTt3MjU1RWdnJwDV1dXMzs4KezSkpaURGhpKeno6t2/fpqmpiZaWFkJCQqivrycuLo729naG\nh4fZvHkzDodDUIT6+vr+R2Tjt6J9uGuKMZvNOJ1Oamtr/2EMOjk5mZqaGurq6hCJRJjNZjw9Pdm8\neTM1NTV89NFHaDQapqenycjIECbjNm/eTG1tLXK5HA8PD4KDg1Gr1fz1r3/l5s2baLVa4uPjsdvt\nHDhwAH9/fwoLC2lubqaqqooNGzbQ1dVFR0cHe/bs4fDhw8CdZHN3j4fCwkIyMzNZvXo1n332GS++\n+CK3bt3i4YcfprS0FLfbzQ9/+EPefvttjEYjLS0t5Obm4uHhQXh4OK2trYI/f3p6mvb2dmJjY0lP\nTyclJYXz589z4MABjEYjOp0OqVRKQ0MDDz30EE6nk0uXLgmybUZGBrW1tXR3dwu69q5du/jb3/4m\ntB6vvvoqU1NTbN26lc7OTkpLS8nPz0elUnHhwgUaGhrYsmULGo2Gs2fPEhsbK1QSvr6+2Gw2mpqa\n8Pf3p6ioSGDHpVKpsOFMd3c3W7duZWpqimvXrtHe3s6DDz7IpUuX6O7uZt68eQQHB6PRaLBYLCQl\nJQkafnt7u8AzREZG8uWXXwozFCKRiAsXLvDCCy/g7e3NlStX+OyzzzAajYLGf3c/A4vFwrp169i3\nbx9wJ+HeVbFOnTpFbGwsubm5qNVqLBYLv/3tbwkKCmLHjh386Ec/wm63MzIywqZNmzAYDIyNjVFV\nVUVwcDAymUwwUpnNZiYnJ4mMjOTy5cuEhIRw69YtfHx8uHjxIomJiXh7ezMyMkJsbCzbt28XCOHi\n4mJ0Oh0ymQybzUZGRoZgbd++fTsDAwOEh4cL927r1q0MDQ1hMBhoaWlBJBKRlZUlDKX19vbS399P\nZGQkFosFo9GIWCwWZOuvi29FpRAYGMjnn3+OUqmkoqKC++67jx07dqDRaOjv7+fDDz+kv78fq9XK\n3Nwcg4ODFBQUYLVaEYvFZGdnI5FIhAvc0tICwKeffkpXVxezs7OMj4/T0NDA4OAgx48fZ2BgQCD9\niouLeeuttygqKsJkMtHb28uWLVsYHx8H7piVJicn0evvbBHR29vLD37wA6qqqoiLi6OwsJAPPvgA\nsVjM4cOHSU9PF3rdiYkJ3n//fWHcetu2bSiVSv7whz8Im4dkZmYCd4jG4OBgPDw80Gq1KJVKvvvd\n7zI2NobJZMJkMqFUKklOTkYqlRIeHo6vry/p6enY7XZGR0fp6+sjMDAQLy8v8vLyeP755zGZTExN\nTaFSqRgYGGDp0qUcOnSIX/ziFzzwwAMCWZmRkSFsyFJeXk5lZSXj4+NkZWVRVlZGS0sLNpuN8PBw\nKisrycnJITQ0FKlUKpB0d/mItrY2YZgoODiYgIAAJicnCQsLE4xSgYGBrFixQljd4Q6JGxYWJiT/\n7du3I5PJGBoaYnJyks2bN2OxWKiqqhK+4/r168TExBASEsLly5cZGBhgamqK+fPno9VqhWfs0qVL\naDQa4uPjsdls/PSnPxUcn6tWraK4uBi73U5ERAQJCQkEBgby1ltvoVKpuHTpEhs2bOD69etUVlYK\nbebdEXQ/Pz9hgOkuyblv3z5mZ2dRq9XodDpaW1v5zW9+Q1VVFfX19QwNDdHf3098fDzT09OUlJSw\ndOlS4A75rlKpiIqKEkxpOp2O8vJyJiYmhM1rRkdHmZ2dxdfXF39/fzIyMoRhvPDwcGw2m3CNvi6+\nFUlhZmaG4eFhRkdHsdvtvPDCC4KPOy0tDZPJRGlpKSaTiaCgIGEI5saNGyiVSux2O4sXL6ahoYGo\nqCjKysoA2LRpE0899RRTU1OUlpaSkZGBzWYjISGBiIgIEhMTGR4eJiEhgY0bNyKVShkaGhLGfm/c\nuIFIJBL6uMHBQeDOSPavfvUroV+NiYkRRnK//PJLiouLkUqlTExMCFud3a2CysvLmZ6eJjIykrff\nfhuTySQEhNVqxeVyCVbm999/n6KiIqEXPnv2LFevXhUC/Ny5cxgMBsbHx5mcnMTtdpOVlYWHhwfd\n3d2C8eiRRx5Bq9Vy7do1IiMj6e/vZ8WKFVRVVVFcXExrayt1dXXU1tYKFl2tVsvGjRsJDw+nvLwc\nlUpFTk4OwcHB+Pj4sH79egYGBlAoFISEhNDS0oJer2fv3r14enqybt06zGYzIyMjzM3N8eGHHzI1\nNYXD4UCpVApGmxMnTmA2mwV2fN26dUxMTAjt49GjR0lISKC7u5uxsTEhcKempoTt+pKSkhgcHBT2\nlUhLSyMjI4Nf/vKXdHff2WwoMjKSxx57jMWLF7N06VLS0tIoKSnhvffeo7i4mKCgIPLy8oiJiUGt\nVtPd3U1BQQHf/e53USgUBAYGUllZiUaj4V//9V8ZGBgA7igO69evp6qqCq1Wi1QqZWRkhOjoaHx9\nfdmxYwfNzc2EhoYK7ZzZbCYiIoKYmBiCg4MpKSmhoKCAtLS0f5hRUCqV/PnPf+bNN9+kqamJuro6\nIiIiUKlU6HQ6/vSnPyESiejr6+PkyZMMDg6iUqnw8PDg3LlzmEwmCgoKqKio+FZssvI/wt0dj1wu\nF4ODg+j1eiorK1GpVLzzzjuEhoYSHx/PokWLuH37NikpKTidTnJzc+np6SE3N1eQF51O5z9YW5ua\nmrhx4warV6/mxo0bgtElLy8Pg8EgzBVUV1dTVlZGfn4+RqOR5uZmtFotQUFB5OTkcO7cOWHVmZyc\nZPfu3bS0tNDd3Y1CoWDVqlWo1WoiIiL49NNPEYvFJCQkkJKSws6dO+nu7iY1NZUtW7YIhpzw8HCh\nXIY729JlZWXhdruZm5sjPT1dGPLp7e1FLBZTUVHB2bNnaWlpwd/fn5iYGCwWCxs3biQvL4+bN2/S\n3NxMe3s7ZrOZ0dFR/vKXv2AymYSdp7q6urDZbNy4cUOYJIyKihL2gWhoaEAikTA8PIzL5SI7O1uY\n9fD19aWzsxOlUsmyZcuYnp4W5hCqqqowmUw4HA4OHTpEW1sbSqUSm83G4sWLkclkguvRYrGQm5sr\nDLbdnZI8duwYVVVV+Pv7Y7PZyM/Pp7q6mosXL7J8+XIhCO56VoaGhggMDKSzsxO73U5sbKywH+Th\nw4eFtqSkpISbN2/y8ccf09XVxZEjRygsLBRapcrKSrZt28bs7CwXLlyguLiY/v5+ZmdnCQwMZOfO\nnczOzpKQkMDU1JSgaoyPj+NyuWhqakIqlQryb3t7O8XFxVy+fFlI9I8++qjgYBwZGWFqaoq33noL\ngL///e8UFRUJGwT19/cLZrC7jsrPPvuMa9eucfz4cQYHB/n1r3+NSqXi7NmzwlCfSqUS+CilUkl7\ne7swSv518Y1s8f4/hUgk+t8/iXu4h//3Uel2uxf8dwd9K4jG4OBgjh49SkVFBZ6ensKuyHdLOIPB\nQGNjI42Njfz85z/Hz8+PI0eOEB4eTmZmJsPDw4KbrqKiAofDweuvv86HH37IH/7wB/74xz/i6enJ\n7du3aW5uZmRkBIlEQkZGBt7e3sTHx3P48GESEhLw8/MjISEBs9lMa2srcIekSkpK4k9/+hMff/wx\nxcXFvPLKK8zNzfHAAw8Ivvm7/viYmBgeffRRnnjiCYaHh0lOThZkPZvNxurVqykrK8Pb2xtfX1/K\nyso4dOgQr7/+OjKZjPHxcdrb2ykoKKCvrw+pVEpFRQXh4eGEhoZy7do1UlJSgDsrVX19PevWrWN4\neJhFixZx7tw5RCIRBoOB/Px8Vq9eLbgI5XI50dHR/OQnP+HgwYPU1NTQ3d1NYGAgeXl5tLa2Mjw8\nzN69e/H29qampoaGhgZiYmL4j//4D15++WXGxsYoKipCq9WyePFiampqCA0Npbe3l7m5Oa5fv463\ntzcLFy7EYrGQk5NDbW2t0H7pdDqSkpIoKioiODhYIEZ/+ctf8pOf/ITQ0FA6OztpaGggKysLsVjM\nj3/8Y5555hlmZ2epqqrikUceoaOjA61Wy7p16/Dz8xMY9srKSmJiYiguLsbhcPDmm29y8OBB1q5d\ny/DwMKWlpXzve9/j9OnTLFiwgIqKCq5evcq+ffsIDw+npqYGiUSCWCwmLy9PGNbq6+sjMzNT2Nkq\nMDCQ06dPU1dXR2FhIVqtVuBILBYLoaGhvPTSS8yfP19Q1oxGI8XFxfz2t7+loqKCsLAwuru7Bdl5\n/vz5/PGPf+T999/HbDbz2muv8dxzz9Ha2oq3tzcKhYK2tjaSkpI4efIkq1evpry8nLq6Ovbt20d8\nfDwvvfQSzz//POfPn2diYkKoNH//+99/rXj8VrQPU1NTvP322/+HvfeOjrrO+75fk957nfTeC+kk\nIQ0IvQjSRFxAVGwr6q7KtWvZddVrV91VFAFREBQpEqoQSiCE9JBCem+TnpCeTMokmfuP7Hyfe8+5\nz3X5PPs85+ye5/r9k8kAc4aZ37d9Pu/36y0MH6pz/I4dO/Dy8hKcxnPnziGRSPDw8GDlypUkJSVx\n7tw5hoeHkclk2NjY8PrrrxMbGwuAo6MjO3bs4P79+9TU1AhU1c6dO4mOjmZycpLBwUFu3ryJTCYT\n3MS0tDRGR0dpaWlhZGQEfX19Lly4IDz/qampWFtbs3XrVrq7uwkMDMTb21tIb8vLy3n11VeB+QJX\nT0+PkLNaWloKo429vb3gMMC8zPnu3btoa2vzm9/8hsrKSiYmJujr62N0dJTa2lry8/MpKipCIpFw\n/vx5oXz09/cnPj6e/v5+vL29SUhIwM3Njfj4eA4dOoSLiwvR0dGMj49TVlbGK6+8Io5T6urqBAQE\nUFxcjJGRkej29PT0oFQqUSgUDAwM8MQTT9DV1cXNmze5e/cuZmZmHDhwQHgzlixZQm9vL0uXLkUq\nlTI0NISzszNnz56lt7dXgEO0tbWpr68XE4lKMAUQFRUlahJPPfUUN27cYNu2baxZs4Y33ngDbW1t\nHnvsMSFrn5mZIS0tjc7OTlpbW2lubmZgYECAb1RHMw8PD0JDQzlx4gTLly8XE059fT0+Pj54enqi\npqbG4OAgdnZ2lJWVcefOHa5cuUJISIigQ7e3t/Ptt9+SmZkJwNGjRzlz5gx79uxh4cKFXLp0iays\nLPT19fnd735HSEgIDQ0NREdHEx8fj0Kh4L333iM1NZW+vj68vb0pKysTTlLVZD82NkZVVRXvvfee\nsGOPjY1x//59YmJimJ2dJSQkhA8//JDg4GBefvll8vPz0dXV5YMPPmBwcFAUIXV0dP79xEsqP/jc\n3Bzu7u6YmZmRlJSEUqnk4cOHGBoa4uDgwOXLl9HX16eyslJYVteuXYufn5/IYlBTUyM/P1+8rqoS\n7e7ujq6uLmNjY9y+fRsDAwPa29tFoU4F6ZicnERLS4uUlBQCAgKora0VqjTVSrRx40ah/gsNDcXc\n3JysrCyuXbuGra2tQF91dXUJtp5MJsPJyYnh4WFefvllPDw8hJpRVQzz8PBgyZIllJWV8fXXX6On\np0dCQgI6Ojo4Ozvj7++PlZUV+/fvp7KyksjISGZmZujs7KSiooLr16/j6emJTCYjMzOThIQEfv75\nZxYsWMCpU6eEbNvX15f29nZMTExIT09HX18fbW1t2trauHPnDk1NTUilUpF50dXVRVxcHAEBAVy5\ncgVvb28iIyMpKyvDzMxMYMHa29vFd6NQKIB5OExERAS7d+/m5s2bODs7MzIygkwmw8XFBWNjY7y9\nvUWLz8zMDFdXVx4+fMgPP/zAm2++SWNjIy+++CJHjhzB2dmZFStW8OjRI/Ly8oiNjWXt2rVC8FVc\nXIyDgwNlZWWUlZWJ95GXl8c333zDhg0bGBsbQ3VsjouLY2xsjD179tDa2oqpqSlGRkY8/vjj2Nvb\nk5yczMOHDwXAx9vbm8TERFFT8Pf35+jRo1y4cEHskkxNTamtrWXJkiUCyR4bG8vnn3+OkZERPT09\neHl58eqrrzIyMsJf/vIXvLy8KCsrE3Dguro61NTUBJR1cnKSmpoahoaGuHHjBr29vUilUjZs2EBB\nQQH19fVYWlry7bffChSbqhCen5/PY4899ovH47+Edfrrr79+T6FQ4OjoSEBAAG5ubsJEEhcXx61b\nt4iPj8fAwIDa2lqqq6sxMDCgpaWF4eFhURRSgSdUtluVldnV1ZWffvqJTZs2kZmZKYwta9euxdra\nWqxSy5YtY2pqSrARLS0tkUgkLF26FE1NTYyMjLh8+TKrV69mbm6O7OxsLCwsKCwsFKug6stUYc+/\n+uornJycCAwMJC0tDVNTU7FaBgQEIJFIsLOz48yZM4yNjQlzjLOzM1ZWVkI7PzExQWNjI+Hh4dTW\n1rJhwwZyc3Px9/fH398fIyMjzM3N6ejoICkpiampKU6cOEFUVBRDQ0MEBATg5+fH9PS0+FxVXguJ\nRMLp06dRKBTMzMzg4ODAyMgIy5cvR1dXl4iICJqamsjIyMDS0lKE4hgbG4tgGRVGfnp6msbGRhwc\nHOjt7cXLy4tTp04hl8tFFoGqiKgC0KqyGAoKCtizZw/V1dWCuamvr09ZWZkAnPT19VFXV0dWVhYv\nvfQSBgYG1NTUcP/+fRoaGnjyySe5ePEiUVFR7Ny5k+HhYa5du8a2bdv+IWzF2tpadIi6urooKioi\nMTERiURCQUGBEI319/eTnp6Om5ub0Eb09fVhb2/PhQsXCAwMpK+vDzs7OzGhFRQUiA7G7OwsTk5O\nBAQE8OWXX+Lh4UFpaSnm5uYcO3YMuVzOmTNn6O7uxs/PD1dXVzIzMzE1NcXGxobu7m6cnZ05c+YM\nQY1wxLsAACAASURBVEFBJCcnk5WVha+vL9ra2vj5+fH9998D8Pbbb5ORkYGNjQ01NTXo6upSW1vL\n6tWruXfvHoWFhb/IOv0vMSl88cUX723fvh1fX1/u379Penq6cDzm5uairq7O3NwcxsbG/PTTT8Jr\nXlRUhLm5OaWlpZSXl2NgYIBEImFkZITU1FR27NghfPyqG97Lyws/Pz8yMzNRU1MjPT0ddXV1lEol\n165dw9/fX0hGKyoq2LhxI3K5nNbWVvT09Lh06RJJSUn09/djbm7Oo0ePhFkqLy8Pb29vtmzZgp+f\nHwMDAyQmJrJo0SJu3brFqlWrBCpdR0eHoaEhbG1tqa2t5fbt2yxatAg1NTW6u7t54okn0NLSwtjY\nmO7ubtra2nB0dBQpSKdPn+a1114jOztbwD1cXV0JDw/nwoULom+uwpmrjD5VVVW88MILmJub4+fn\nR0VFBYODg4SHh7N7924WLVpEcXEx4+PjHDlyBH9/f5RKJWVlZSxatAilUomXlxdTU1PExMRQVlbG\nxo0b2bp1K5WVlSQlJWFvb09tbS379u1jbGyMXbt20dPTw7lz57Czs+P69etIpVLi4+O5desWDx48\nYOfOnZw5c0bwE1WDX9WKnJqaEmlHKql4d3e3aJd++eWXvPHGGzQ0NODg4MCyZcs4deoUg4ODZGVl\nkZKSQmNjIwsXLqS0tFTUGzQ1NQkNDWXlypXieObu7o5MJiMyMpKIiAh0dXWpqKjAxsaGVatWoaOj\nQ39/P5cvX2bHjh3ifSQmJnL79m0iIiJIT08X2glLS0vMzMxwdHTExcWF0tJSsSg1Njaybds2tLS0\nBCo/IyODt956i9raWmxsbMSR7MqVK6J24ufnJ7DziYmJmJubo6mpSVpaGhkZGaxbt07wM1RHrW++\n+ebfZ1L4+uuv31PBVYKDg+nr62PRokUigi00NJTk5GQMDQ2Ry+VCOGJvb8/c3Bz6+vrs37+fzMxM\nAgICiI6O5uDBg0IaGxERIeoOg4ODwsDS0NCAq6uriE9TwVFVwNYVK1agpaXF3bt3CQ8Pp7W1lZs3\nbxIdHS3MRmZmZiiVSjQ0NAQwJD4+npKSEiYnJ/H396elpYW6ujqRVTA5OYmOjg4PHz5EW1ubzMxM\nqqqqiIqKwtnZGVtbWyE7lkgkXLlyBU1NTby8vITDLykpCRcXF1pbWyksLCQ5OZnk5GT09fX5+eef\nGRoaYmxsjKmpKfz9/QUYRkVlun37NkuXLuXBgwcC1KpUKsnPz2dubo6QkBBGR0cZGRmhs7MTAwMD\nkf9gZWVFS0uLKHzJ5XL09PSIj4+noKBAIO5ycnLo6Ojg5MmTuLu7i+g3Dw8P7ty5g5qaGtu3byci\nIkJQoFxcXERxOTg4GDU1NaytrbGzs2N4eBh/f38CAwOFBNre3p579+7x/PPP09zcTGRkJFpaWly7\ndg1tbW0cHR25cuUKW7duJScnBzMzM4yNjcVuSCX2evTokWA+amlpifqMSmY8OTnJ7t27RTvWysqK\no0eP4uHhISLsmpubiYuLw9TUlNjYWC5dukRDQ4PIiVCpC21sbCgoKBB4PW1tbcrKytDQ0BBoPxMT\nE9566y3hqenr6yM4OJi5uTkMDAwYGhrCyMiImpoaPDw8qK2tFT4WVXBPV1cXenp66OrqcvLkSerr\n6/99cGyqtKDx8XFhXkpNTRUpUdbW1jzzzDPs27eP6elpVq5cSXt7Oy4uLhw+fJjc3FwuXbqEvr4+\nY2NjZGdnAwilWl5eHoGBgUJnXlFRIXYICxYsoLy8HENDQ6anp+np6cHc3ByZTEZPTw8lJSUoFAq2\nb99OeXk5gOAIWFpaCsBsb28vDx8+xNvbm5qaGpYsWUJVVRXNzc14eXkxNzfHrVu3OHbsGENDQ6Sl\npSGVSpHJZMJ9aWRkhIWFBRKJhLKyMnJycigoKMDJyQm5XI5MJsPExITu7m6srKy4ePEiLi4uRERE\n0N3dTUNDA5mZmTg6OqKuro66urp43+3t7Vy+fJnjx49z9OhRCgsLOXfuHFlZWQKlPjs7S09PDxUV\nFVRVVZGcnExQUBCampqCvJydnc3t27cZHR0V5qjS0lLu3r1LZWUl5eXlHD9+nImJCdzd3Xn99dd5\n6aWXmJiYEF0dlQpVS0uL1tZWBgYGhBjI3d1dxNONjIxw48YNPD09CQwMJDAwEHt7e/Ly8oiKikJT\nU5OpqSmSkpIE2k5PT4/jx4+LToWpqam4F6ytramtreXPf/4z/f39XLhwAUdHR4KCgvDy8mJgYACl\nUklHR4fIyXR0dGTTpk08++yzYvBfuXJFIP927tzJ0NCQWLxUlKuFCxfy448/4u7uzuzsLDt37qSg\noICpqSn09PQICQkhPj6epKQkZmZm0NbWxsnJSdjzLS0tuXLlipj8fXx8qKysREdHh+zsbJycnGhq\nasLQ0JC2tja6urrQ0NBg69atBAUFiSyV4eFhOjs7CQoK+sXj8V9iUjA0NKS5uRlPT08WLFhAbm4u\n3d3dIu3p4MGDqKurs3DhQqHeUhWKVq1aJUxMW7duFa1JmDer6Onp4ezsTEdHB6tXr2b16tVIpVIC\nAgKE4Wn79u00NzfT09MjOgF79+4Vlf6pqSkOHjyInp4egAgJzcvLw8TEhPb2dvT19TE2NqahoYEb\nN26watUqdu7ciYeHB3NzcyxYsID9+/czMDCAXC4nODhYGGiWLFkCILiPXV1dXL9+HZlMRkZGBtPT\n0zz11FPCfKW6ue3s7IQC8ezZszz33HOUlJTQ0tIiTGYzMzPU1dXh5OTE8uXLWbVqFSMjI9y8eVPY\ngGdmZpiYmMDe3p7FixdjaWmJkZGRgICWl5cLCKyNjQ1+fn40NTWJtC3VanTv3j1CQkKwt7dn7dq1\nPPfcc6ipqYnchCeffFLkQzY3N7N582ahJFSJdhISErh+/TpWVlaMjIxgY2ODoaEhL7/8MhMTExQX\nF1NcXCxEbSpjWkNDA56enoJ4ffv2bUFDhvnAoeLiYnx8fFi1ahWxsbGcPHmSjRs3iqOSaqJ++eWX\naWlpobKyEnt7e2JjY4WaMiUlhZiYGDGJ/e1vf2N0dBQzMzPxXjs7O7l//z4HDhxgzZo1uLu78+67\n7xITE0NOTg4XL15k8eLFaGhokJWVRXp6Onfv3iU3N1csMi+88IKgawECz6dSNKp2cSMjI1y9ehVr\na2tsbGzo6+tj3bp1DA8Ps2HDBmBeFKd6/Euuf4lJQVtbWzjBDA0NRdLSzZs3CQwMFP4BVebe5s2b\nGRsbIyUlhcDAQLZu3SrShHNzc8UN5unpyaVLl3j48CH9/f1Ck6+jo0NoaCgpKSki5HRmZobExER6\nenpwcHBAoVCwevVqTE1NUVdXJzU1VeQz3L9/Hzc3N/z9/UVwSk5OjjguNDU1ERUVxdjYmAhSXb9+\nPefPn2d4eBg9PT3a29tRKpXCcAXz3ZKmpiaamppYt24doaGhREZG4ubmRkNDAx0dHQQHBwvuf2Fh\nIX19fdy9excjIyNCQkLEqt7d3U1/fz9Hjhzh17/+Nerq6pw4cUJ8NmvXrhWQ28rKStTU1AQ3YMuW\nLcTFxQlwqaOjIxoaGgQEBKBQKLCwsMDExAQ1NTXKy8tFtoNUKmVwcJCgoCCkUilaWlro6+tjZmbG\n2NgYp0+fRk1NDRcXF8rKysjMzGRwcFCQlQCOHTuGpqYm7u7uAq0+NjbGyy+/zNDQkJB429raCvfm\ntWvXhDVaTU0NT09PzM3NOXHiBFu3bgXg4cOH/P73v8fCwoKuri6ys7O5ePGi2JU6Ojpibm5OeHg4\nZWVlGBoaIpVK8ff35/Lly7S3twuCuLa2NpWV84FnW7duxc3NTdSUFi9ejJ6eHtra2jQ3N2NgYEBU\nVJQ4QpqampKYmIiVlRUdHR2YmJgQExPDq6++ylNPPcXZs2eBeXPcDz/8gFwup6OjAyMjI9rb2wX9\nWQW9tbW15ZVXXsHS0pK5uTnWrVtHVlYWH3/8MXfu3EGpVFJTUyOUk7/k+peoKXz88cfv2dvbY2Zm\nRn9/v5DcqlaswsJCDA0NRQqyCkg6OztLQkICERERlJSUMDExQXh4OMHBwXz22WcEBgaKcNjdu3cj\nl8vp7e3FwMCA/Px8WltbBWrMxcWFRYsW4eTkxMGDB4mMjBTQ1PDwcCE4Kioq4quvviIzM5P09HRG\nR0cJDg6moKAAR0dH7O3tBU/QysqK6upq+vr68Pf3F8aWkZERFAoF5ubmODs7MzQ0RHp6Otu3bycz\nM1MAZ1SRdGVlZUJLoPosampqaG1tJS0tDVdXVxEK4+3tLVq7bm5uODk5oaOjw6NHjwgMDERfX5+O\njg7U1dV58cUX0dDQoLu7G1dXV1JSUvjggw9wdHSkq6tLhNqamZnh7OyMkZERmpqaDAwM0NvbKyLn\nc3JyiIyMpL6+HkNDQxITE0UoSkFBASUlJchkMvLz80lPT8fMzIzExEQMDAyYmJhAJpPR2trKw4cP\neeGFFwgKCuL+/ftcvHiR5ORkdHV1qaqqEqRlFdW6vr6eoaEhIRZqaGggMTGRnJwcdHR06OzsRKlU\ncv36dTw8PKirqxMhQjU1Nezdu1dEwXV1dZGQkCAq9tnZ2RgYGAjg7vbt2zl69Chbtmzh5s2b6Ojo\ncOvWLSwsLAQaPzU1lcbGRtasWUNNTQ3e3t4MDAyQmZnJnTt3MDIyoqCgQBxzPTw8BHhYVStTU1Oj\nqqpK0MQGBweFZT0pKYmff/6Z5uZmjI2NkUqlODk5kZOTI3YNKSkpALi5uREZGUljYyM+Pj44OTlx\n7dq1f5+awtTUlEhJViqVfPbZZ1y7do2RkRE++ugj5HI5c3NzxMTE0NjYKAJoExISRGegubmZvr4+\nHjx4IOg1qplU1T4aHx/H2NiYiYkJrKysCA4OFozFmJgY5HI5u3fvFqGgk5OTrFy5kv7+fj777DNC\nQ0OB+R7y1NQUiYmJODk5MTs7i56eHoGBgSKKbGJiguPHj/Pqq68KSKmOjg6WlpZcu3YN579nDurr\n6wtLdnp6ugiCzc7O5urVq8zMzIgkq5aWFqHPUGHXFi5cSExMDHv27MHPz4/z589TUVHB/fv3qaio\noLS0lJGREZRKJYaGhvT09LBy5UqWLFlCbm4u+vr6bNq0iYqKCkJDQ/nDH/7AgwcPCAkJwd/fn4mJ\nCcF5uHz5soDI+Pr60t/fT1RUFK2trdy6dQtzc3OmpqY4ffo09+/f5+HDh8jlcsLDwykqKiI2Npal\nS5dib2+PhYUFdnZ2ItPBzs4OmA/aUX3fKtuwnZ0ddXV1/PTTT/j5+VFeXk5tbS3+/v6Eh4ezbt06\njIyMWLBgAW1tbcTGxlJZWSni3ACRw6AKsFEoFMI2rqenJ2hFt2/f5tixY7i5uREaGirIycPDw+ze\nvRuFQiEWJkDg0IyMjDh+/Dh2dnZ0d3djYWGBqakp58+fp7e3l/j4eCIjI9m/fz9Lly6lsrKSuro6\nzM3NRQdKFUqjGhOnTp1i6dKl/2ACVCgU7NixAz8/Py5fvizCl1WitPXr1+Pq6srk5CQ5OTkoFAqK\nior4+eeff/F4/JeYFNTU1FixYgXW1tZiRVRXV+fGjRvo6+uzd+9egoKCxCoeFBTE+vXrRcz79PQ0\nlpaWpKeno1QqWbZsGTCfCKytrS1Wl5mZGeHqU0WCV1RU8ODBAwYGBoQVemRkRCRB19TUiCi13t5e\nYN7VaWNjg1wuF6nKvb29tLW1Ce2BlpYWUVFRrFixguDgYH788Ue++OILjI2NSU5OxtXVlYMHD2Jv\nb8+ePXuA+ULj+Pg4BQUFzM3NERoailwuR0NDg/v37zM4OMiZM2coLCwUA9fExITm5mays7M5deoU\nHh4eKBQKJiYm/sGopMrZLCwsJCQkhMDAQLE1b25uZmhoiIGBAVxdXYXARiWcUpnVpFIpPT09fPTR\nR2RlZaGtrS2k6eHh4czNzYldUUJCAtbW1ri7u3Py5EkUCgVNTU1MT08jl8tFtV4VaqM686oQdT09\nPezfvx83Nzc0NDRQKpWEhc3L9tXV1Tl06BAA58+fJy0tDWNjY6ysrDA1NWVychI3Nzdx1AGEJP7e\nvXsi+TkwMBCFQkF4eDgbN27kwYMHGBgYkJCQgKOjI62trSKVOzU1FS8vL2GhV8FbxsfH0dLSQk1N\njdLSUuEIVTE6Y2NjeeeddwgJCREiLS0tLXx8fCgtLWXx4sUUFxejqamJUqkUtO3e3l4sLS35z//8\nT7KysrC3txeCrAMHDoiMk9u3b3P16lUKCgp4+umnRXaFlZUVenp6SCQSNmzY8O/nklRhp2xsbFAo\nFCJgRE1NjStXrnD58mXWrVvHnTt3+OKLL/jtb3+Ll5cXDQ0NjIyMCDz4mjVrmJiY4NKlS8B8cQnm\nmYodHR2kpqaKlo+enh59fX2UlZUREhKCqakpZmZm5OXlsW7dOtTU1EQdwtPTU/THAWGDTkhIEK25\n4OBguru7sbW1JSwsTAidqquraWhowNvbm/DwcO7du8eWLVu4fv06JSUlFBYWCprzs88+S1JSEg4O\nDmzcuBF/f38h5lLZr3t7e4XVV1V0bG5uFgEuZWVl2Nraoqenh42NDYsWLRKe/NnZWfz8/CgoKCAt\nLU1Igr/66ivi4uKwsLDg1q1bODg4YGVlRUVFBTKZDC0tLWxsbET2wJo1a4T9fOXKlRgbGwuNfWNj\nI+Xl5WRlZbF8+XJGR0dxdHTEzc2NiYkJTExMaGtrE0IuVcbD7373OwAKCgqYmZkRjIaqqipqamoI\nCQlh5cqVHDlyBDc3N2pra3niiSeor69nenqapUuXkp+fL3Qpt27dYnR0FKlUCsC1a9dIS0sTwa2z\ns7PcvHmTjz76iL6+PqHi9PPzw9vbm08++YTKykoSExN5+umncXV1ZXBwUIQIqejIhYWFxMfHc/r0\naZqamnjsscdQU1MjJiaG7777jsWLF9PV1SWQ7Cpa9ejoKG1tbaSmphIYGMjs7KzYucE8s+Ppp5/G\n0NCQlStXUlFRgYuLCzt37mTNmjX09/djZGQkVKGDg4NMT0+zefNm2tvbefvtt1EqlQwODmJubi5U\ns7/k+qcMURKJpAUYBWaBGaVSGSaRSMyAs4Az0AJsViqVg//V64yOjnLq1CmBQJdKpTg6OiKXy7l3\n7x6WlpZ8+eWXbN68merqatLT0+nr6yM3N1dsPaVSKefPn+fzzz8XunQPDw9u3LiBuro6jo6OLFiw\nQGRJwvwuQMW7U4Wf+vj4kJWVRUhICM3NzZw7d45du3axcOFCEVYaGhrK8PAwd+7cYfPmzUxPT2Nv\nb09GRgaGhob09fWJ4BJ7e3tmZ2cZGhoS+oj09HSMjY3FsUPVhjp06BARERFkZmZiZmaGm5sbfX19\nAusGEBYWJhDzIyMjIthFFRvn4uKCUqnE2dmZmJgYZDIZVlZWXL58WZxVOzo68PX15bHHHmPDhg2k\np6dz+PBhdHV10dTUxNzcnOLiYrKysrhz5w4TExMMDw/z+9//Hk1NTU6ePIlEIkGpVIrUZdVA/s1v\nfoOWlhbXr19n/fr1zM7OsnfvXqFobGhooK2tDVtbWz755BPeeOMNvLy8ROFOJpPxzDPP0N3dzYUL\nF0hMTBQCLS8vL2Flr66uJjc3l8cff5ygoCD6+/sxMzNDIpFw7NgxNDQ0yMnJEdX8FStW0NbWRlBQ\nEJ988gn79+9nw4YNtLa2cvv2bQwNDTl58iTR0dHs3r1b0JFUVunOzk76+/vFJKna5o+NjQn+Z3p6\nOikpKXzzzTdkZmYK4Mnx48fx9fUVKLyEhAQuX77MH/7wBy5fvszU1BR+fn7cuHGDN998k++++06I\n7cLDw5HL5dy5c4fQ0FDCw8NpamoiKSmJjIwM7t69i76+Pi+99BLV1dWYmpqyefNmfvvb3xIZGcmj\nR4/44YcfeOONNwRO8L+7/t/YKSQqlcrg/82S+RZwR6lUegB3/v77f3mZmZkRGRmJl5cXMzMzSKVS\nxsbGRCqRapCqPBAqQOm7775LUlKScJl5eXmJ7T5AU1MTGhoazM7OkpmZyalTp4iKiqK+vp7PPvsM\nf39/PD09Rfy8sbEx4+PjxMXFoaGhgUKhQF1dnf3793Ps2DF+/PFHYL6SHRAQQExMjIi6AwgKChIg\nVxWsIzc3l6CgICIiInB1dRV5lnZ2dqKz4ebmNv9lqKkhl8vx9PRk5cqVVFVVkZ6ejqurKwEBAQQE\nBGBpaYmVlRWGhoaUl5eTmJhIc3MzeXl5LFu2jObmZkxMTKiqquLBgwekpaVx8eJFjI2NWblyJUql\nktraWrS0tHjttdeQyWT86le/Ynx8HF1dXcLDw3nw4AFRUVEoFAoaGxsxNDTE2tqajz/+mJqaGnbu\n3MmWLVtISEigr6+P8vJy/P39WbZsGePj4+jr65OUlIS3tzfJyckoFAoePHgg+Iyzs7OkpaXh4OCA\nq6sr7e3tIkHZw8ODtrY22tra6O7uJjMzk4ULF6KpqUlGRgbbt28nNTWVoKAgHn/8cQH7/fHHH3F2\ndubq1avAPAhn7dq14rupq6sjNjZWaElUSkpXV1d6e3u5f/8+8fHxFBUVsXHjRlH83LdvH9bW1sJ3\n4u3tTX9/v6gp5ObmsmjRIpGstXHjRo4fP45UKmXbtm2UlJQIjYCbmxvDw8O8/fbbjI6OiqKyo6Mj\nCxcuFPGFqs8hMzOTlJQU0tPTRSdraGgIXV1dJicnefHFF8nNzcXc3FwkT6vUp05OTiJ6z9/fX7Tp\nf8n1/8XxYR1w4u+PTwD/7fQ0OTlJWloa+vr6TE1NkZ+fT15eHhYWFrS1tXHo0CEKCgrQ1dVFQ0OD\nzs5OxsfH0dTUxMHBgWeffVYEm+bn54tAjf7+ftEd0NHREZCSrKwsPD09BQXJ1dWV0tJSGhoaWL16\nNampqbS0tLBu3TpWrVrFu+++S1xcHLa2tsD8FvfGjRvk5OSITIeDBw+iq6uLo6MjGRkZtLe3U15e\nTkREBJ9++ind3d24u7uTm5tLQUEBQUFBtLe3U1tbS0ZGBjDfi1a1CJ2dnXnyySfR1tYmJSWFEydO\nMD09LVpbTk5ObN26VbS0+vr6SE1NJTY2lpSUFKKjoykqKsLAwIDAwEBmZmY4f/48VVVVDAwMiILb\na6+9hqenpyhiqWzM3333HZWVlXz55Zc4ODgQGxuLQqGgq6tLAFqamprQ0dHh3XffxdjYmK+//ho/\nPz8kEonAtz/77LMEBweLguLAwADW1tZs2LCBVatWcfDgQbS0tEShUS6X88MPP9DS0kJHRwe2trbY\n2dlhYWEhEqLWrFkjYDkTExNUVFSgo6MjsPvvvPMOYWFhjI2NCefq+Pi4gLmam5uzefNmPD09aW5u\nJjk5WUxGhYWFBAQEiMLmggUL+PTTTwkMDOTSpUtkZ2eTnp4uJoV3332XjRs3Mj09TVFRES0tLWza\ntImqqioCAwN57rnnCAwMFChBmUzGokWLWLx4sdjd1dXVkZOTQ0REhOgeqECy0dHR2Nvbs27dOs6e\nPcvt27fJyclBS0tLpFAfPnyYmzdvkpWVxezsLJ6enpw/f144RRcsWICXl9cvHsD/7KSgBNIkEkmR\nRCJ59u/PWSuVyq6/P+4GrP9P/1AikTwrkUgKJRJJoYqLr0Kw+fv7ExwczOTkJL/61a/44osvWLt2\nLd999x0hISFCy3/s2LF/gLw2NDSwZcsWVq9eDUBzczNSqZT6+nqeeOIJFi1axF//+le0tLTYsGED\nGhoaqKuri3j1np4e8vPzhaR6bm6OO3fu0NLSwujoqADChoSEoKGhgZ6eHpqamqJDsHfvXrKzs8Xk\nY2RkxOzsLP7+/lRXV6OmpsauXbvQ09Ojv78fPT09fHx8xGSjaplGRUVx//59ioqKSE5O5plnniEk\nJISOjg7a2tqIiYnBz8+PvLw8Tp8+LWCsw8PD1NXVERAQwOTkJEZGRiQkJDA2Noauri4WFhYMDw+z\nbds2Lly4IM6anZ2dlJSUCOHT0qVLRatORXZWhcWq+IbT09NC9FRaWsrhw4exsbGhtraWK1euUFhY\nyKpVq2hra+Pzzz/np59+Enmfr732Gk8++SQzMzPEx8fj5OTEyMgIAD4+PkRHRwsJ8+HDh/nb3/7G\nH//4Rx48eMDVq1fp6OgQysrx8XFiY2Px9/eno6MDS0tLtmzZQldXFw0NDVhZWQGIXUNzczPW1tYY\nGxuTkpJCXFwcUqlUPP/iiy9SVlZGY2MjW7duZdWqVUilUjo7OxkYGOA//uM/iIyMFEj6np4eQkND\nWbJkCba2tnR1dXHv3j2OHz/Ob37zGyoqKvjoo4/Q1dXFwcGB559/noSEBKF8VeVE1NTUoFAoRN3q\niy++EB4MHx8ffvzxRyIiIoiJicHU1FTkg5SUlLBkyRL++Mc/4uHhQXt7O6dPnyYhIUHQlwwMDESH\n65dc/+ykEKtUKoOBFcCLEokk7n//Q+W87PD/SFVSKpVfK5XKMKVSGaampkZRURH29vbY2dlhZ2eH\nh4cHnp6e5Obm8uOPP4qMBxXJV2UaUhV/ZDIZSUlJnD17VghhkpKSqKqqwsDAQKQNPfXUUyI6XOXK\n7O7uFi0dFfNxbm6OR48ekZCQQExMDC0tLXh4eADzSkmpVMqDBw948OABpaWlglFoYGAgOiATExPk\n5+cLjf/09DRzc3MMDAwIqnFXV5eoKajaYjMzM7S1tREWFkZHRwc5OTkCQKsKSqmsrGR2dhZzc3Pm\n5uawt7cXMmIVzm3hwoVkZGRgYGBAYWGhIGb/6U9/wsfHh8nJSZFgpUonLiws5NChQ6xYsQJ3d3cc\nHBzYt28fFRUVeHh4UF9fj62trdAHXLlyhcTERNzc3ER4jQpRJpFIOHDgAIaGhkRGRrJnzx5BN37j\njTdISkpCX18fhULB3r17gf+rLevh4YGzszMNDQ2CqaGy2Ht6emJoaCiCVa2srGhubiY6Ohp1yodm\nQAAAIABJREFUdXXBonBwcBCZGqqVVKXizM7OZvPmzbS0tIjI+7CwMGxtbUlKShKFxcnJSQwNDXF0\ndOSxxx4jJSUFHx8fKioqgPm29w8//MDrr78uCrn37t3jk08+oby8nMzMTJKTk/nss8948OAB1tbW\neHp6UltbK462MTEx6OjocPbsWdE5m5mZYcOGDWzZsoW6ujr2798v9Ceq3I++vj727t3Lnj17yMzM\nFADcoqIiYJ7P0dHRwVdffSWK2b/k+qcmBaVS2fH3n73ARSAC6JFIJLYAf//Z+9++CTU1/vKXvzA7\nO0tUVBQ9PT1IpVK2bNlCaWkpzn/PjUxISGBgYAB3d3eWLVvGlStXmJqaIiQkBHd3dw4dOsSqVavQ\n1NQE5lOBKioqUFdXZ2ZmhkuXLuHh4YG+vj5aWlqiKzAzMyNafn19fcKtVllZSUdHB2+++SYeHh6i\ngpuXl4eGhgYJCQn09PSgpqbGrVu38PX1xcTEhE2bNhEbG0tsbKxIulZZuy9duoSlpSXt7e0UFRXh\n4eGBgYEBMF/AvHHjBsnJyYIxKJfLuXjxIhYWFjQ2NuLl5YVEIqG0tJTCwkLq6+sJCgpiYmJCpD2r\nCnE9PT2iRqByLFpbW+Pm5iYSvquqqpBKpUxNTWFsbCzStJuampDL5eTm5pKeno6fnx9ZWVmYmZmR\nmZkpgm9WrlyJnp4eZmZmjI+PMzo6yuzsrBgsCxcuxM3NjTVr1lBeXo6Ojg719fU0NjZy4MABQRQ6\ncOAAML+iV1RU8P7772Nvb88bb7xBZWUla9asYdGiRcTGxhIaGkp7ezttbW1UV1dz7tw5DA0N6e3t\npbGxkbS0NMHxVJ2lPTw8iI2NZfv27aJDZG5ujkQiEeRtlf3azs6O5cuXiwjC4OBgqquraWxsxNLS\nUqRyA7i6umJhYcHHH38sIDT79u3j5MmTrFixgldeeYV79+5ha2vL2rVr6e/v55tvvmFkZIS6ujqB\nulftNmZnZ4H5tuvGjRuZmZlBIpHQ1NSEmZkZf/7zn1m1apV4n+3t7eTn54sWp6amJqtWrUKpVPLK\nK68QHByMj48Pamq/fKj/P54UJBKJvkQiMVQ9BpKBCuAK8Ku//7VfAZf/u9cyMTHh5s2bTExM8Pbb\nb6OjoyM8AF5eXrS2tlJVVYWlpSVxcXFoamqK5BsLCwvS09Opra1l7969pKWlcf36dWA+wfjXv/61\n4BL4+flRX1/P448/LoJcR0ZG0NPTY/Hixbz66qsEBgaSnZ2Nubk5AQEBjI+Ps3z5crHdh/milaWl\nJa2trXh7exMREcHWrVtpbW2lpKSEjz/+mKNHj/Ljjz+SnJwstrUqRZ1CoRBGnc7OTnx8fID544+q\n95+YmIivry9eXl4kJCRQX1+PjY0Njx49QiaTERUVRUBAABs2bBCe+5MnT4pjREhICC0tLXR2dpKU\nlMR7772HnZ0dU1NTbNu2jSVLlvD555+L1eqTTz6ho6NDxNurtvXBwcHY2trS0NAgBoVSqSQyMpIV\nK1bQ29vLzZs3MTExYWBgAKlUirm5ObOzs2RnZ1NdXU1NTQ36+voYGhqycOFCWlpaCAsLIzExkcLC\nQgoKCgRYVCXFVuVoHj9+XOj7bWxsGBoa4sMPPxShNOPj40xMTAh2g7q6Otra2kxOTqKurv4PgNXz\n58/zzjvviMFeVVWFs7MzmZmZ+Pr6CuSdmZkZubm57Ny5E1tbW3p7e8WKb2hoyNWrV0lNTQXg3r17\nmJmZER4ejoGBAe7u7nzwwQfY2NgQGBjIRx99hJWVFQkJCUINq+pYGBgYcP78eZqamujt7UVHR0dA\nVhYvXkxjYyODg4OiYHzy5Ek2b97M7du38fPz4/r162hra9PS0kJBQYGouVhYWCCVSrl79y7Ozs5o\naWmJXe4vuf6ZnYI1kCWRSEqBAuCaUqm8AfwnsFQikdQDS/7++395DQ8Ps3z5crGlb2trY3h4GGNj\nY3R0dFi/fj2mpqZ88cUXGBkZYWJiInDoKk6furo6J0+exMfHR6y8Dx8+FEKOiYkJvv32W+rr60Us\nnY2NjbD11tTUCEa/Knmpq6uLXbt2YWJigo6ODq6urgA8//zznD9/Hg8PD4qLi0Uug6OjI0ZGRvj4\n+NDf38+aNWs4ePAgS5Ys4eLFiyLSTCqVCvec6mgD82o+GxsbscIZGxuTk5ODmpqaWMGrq6txdXVF\nqVRiZ2fHzMwMAQEB1NTUiALW5OQkixcvxsfHR+DQg4ODcXR0xNDQED09Pa5fv050dDSRkZEEBATw\n+OOP09PTQ1tbG8bGxpw5c0b0+PPz81m0aBHvv/++2O7L5XIuX77M3NwcmpqayGQyMdEVFRVhaGjI\nBx98gJaWFk8//TRvvPEGFRUVfPPNN0gkEhYsWEBdXR1DQ0NCtQjzGYoNDQ0sWLCA6upqFi9ejLu7\nO48ePeKnn37i/fffJywsjNraWiorK+ns7GRubg4tLS2ys7Oxs7Pjsccew9HRkYKCArEdt7S05M03\n38Te3l50s+rr6zl58iQPHz6ksbFRYOnz8vKwtbVFJpNx+PBh6uvriY+P58SJE9y9excvLy9xL+jr\n6+Pv789LL70kZPEbN27EyMiIZ555hsrKSpKTkwWfQxUVZ2ZmRnFxMfHx8Tg4OLBlyxbKy8tF2HBZ\nWRkJCQkYGxszMDDAzMwM+vr6FBUVCd7o+Pi4oGcbGBgQGRmJsbExw8PDvPXWWyJ5u729/f+WeOl/\naM7/c/3P9f+f69+H5qxqr6lsxgUFBWzYsIGMjAzhLbC2thaE41u3brFx40aqqqro7+8X59KwsDCa\nm5uJiooiNDSUt956i127dtHQ0CDSgXV0dERyTldXFz4+PtTX1+Ps7ExdXZ0w1Li4uODk5ERZWRkO\nDg6ikOnv788PP/yAn58fFy5cYPny5UKarCI8bd26leLiYmZnZ0WMnUoGDbBkyRL09fW5ePEitra2\nREREEB8fz65du/D29sbb2xuFQoGdnZ1gF37//fds27YNpVIpQCYmJiaYmJjg7OzMqVOn8PHxEeas\nvLw8du3aJSzocrmcAwcOCKIxwObNm7l16xZhYWHk5+ejp6dHXV0dTzzxhGhnzs7O8v333xMREYG1\ntTXLly/nxIkTJCcnk5ubK/IXpFIpHR0dpKSksG3bNszMzDAwMBBVc0CEtdbW1jIyMoKtra3Y8XR2\ndrJp0yaREdrX10dYWBgFBQXC1PThhx8yMzMjKNgVFRVYW1szODgoak/t7e1MT08DiPvh7bffprq6\nmrS0NHp7e4mIiBCU7MWLFyOTyYQqcGJigmeffVYwDT///HORolVfX09AQACLFy+mrq6OXbt2kZGR\nQUdHB2pqaoyOjtLe3s6iRYvIy8tjamqKiIgIampqCA0NpaurC5lMJjo3Xl5eou2qpqYm7Oovv/wy\nb7/9NhYWFuLoqQqZvXbtGo6OjgwODmJra4tcLhe7EJUcXdVR09XVFfW0pqYmLly48IvG47+EzFlN\nTU3EeWlqahIXF0deXh6Tk5NYWFhgaGiITCYjJiZGgDG0tLQE9MTNzQ03NzcePXqEtrY2vr6+wHx7\n68CBAyIAdPny5SiVStHpcHNzo66uDolEwp///Ge2bt1KbW0tR44cwc7OjpqaGu7du0dGRgYtLS1C\nKWlmZsbg4CCmpqZYWlqSkZGBiYmJEBbJ5XIBKenq6sLFxYX29naWLFnC4sWLBUE6ODgYQER67d69\nG39/f8EiPHz4MFevXmVubk6w+/70pz+RmppKfn4+TU1NFBYWkpOTg729Pbm5uXR1dSGVSkWr68SJ\nE1hYWDA4OIhEIsHBwQFDQ0NsbW2pqanBx8cHe3t76urqCAsLEwNTdYP29fWxb98+zM3NcXNz4/Tp\n08JR2dfXJ56Ty+X4+vqip6cnwKHvv/8+kZGR+Pr6cuTIEUpKSvjhhx8EWWt6epoHDx5w8+ZNoRZV\nVdf7+vr49ttvcXV1Zd26dTzzzDOUlJRw+PBhTE1N+fLLL8nNzeXWrVuidasyWFlbW9PW1kZFRYWo\nuh86dIju7m6Ki4vJz8/H0NCQ0tJSTpw4wcmTJzl9+rR4vY8++oimpiaeeuop8vPzxX24fv16ZDIZ\nzz33nJAjz87OsmDBAjw9PZmZmcHS0pLKykri4uKQy+XMzMwQFxdHTU0Ntra2QlinAv5GR0cTFRUl\nUr1URzYNDQ1MTU1xcXHBwcEBd3d3WltbcXJywt3dHSMjIzQ0NITgbc2aNVRXV1NYWMjIyAgODg4i\nFWx4eFgoMH/J9S9hnf7000/fW7RoEYODg/j6+tLa2iqizrOzswkKCsLU1FQ41sLDw7GwsBDx6+Pj\n48zNzWFhYUFYWBjff/89t2/fxt/fnwULFtDb24uvry+ffPIJo6OjDA4OEhISgp2dHSUlJcKpdvz4\ncaampoiPj8fExITjx4/j/PdcwYaGBvr6+igqKmJ2dhalUsn27dv5+eefWbx4MY8ePRKQDw0NDSFu\niYuLIzMzk4mJCfr7+xkfHxdR8KpBNDw8LDBrzs7Oop2nrq4uLMPm5uYUFRWxd+9efHx8cHNzE+lK\nVVVVBAcH4+bmRlZWFtPT00ilUmJiYpiamsLX15eUlBReeOEF2tvbaW5uRk1NjcDAQFxcXEQ24uTk\nJG1tbUxNTWFlZUVpaSl37txhcnIST09PQQ0qLCzE0dERV1dXmpubSUhIEMq6oaEhRkdHsbW1xdra\nmr6+PhwcHPDx8SEkJASFQsGCBQsoKCjA398fPT09EQ589epV4Tg0NTUVrbqcnBzR0QgICKCxsRFN\nTU2B45PJZHz99dd0dnYKB2ZfXx9LliyhqKiIkpISHBwcCA8Px9vbm8zMTHbt2iUgvStXrsTNzQ1b\nW1skEgmffvopAN3d3UIA9PTTT1NRUcHo6CjLli2jtLRU7MZu374tYgMNDAzQ0tKio6ODiooKioqK\n6O3tFdGCqiJ6TU0N7e3tnDlzhg8++IBNmzYJFmdGRgbOzs7ExsYK52tjYyMVFRUYGxujp6cndrnt\n7e1YWFhQXl7O0qVLRUF8YGCAqakpnn76aWHWKi4u/vdhNH744YfvJSQkEB8fT3l5OTKZTFheNTU1\nUSgUGBsbY2dnJ/DsDg4OAmppZmZGe3u7GLwqcMaRI0fEqq2np0diYiKbNm3Czs4ObW1tvv/+ezw8\nPNixYwdjY2NYWlqyceNGOjs7efToEcbGxtja2jI3Nyccdyqo5vDwMHNzc8JgpBKdqFZPBwcHwSQo\nKSlhZGQEqVQqVmAvLy80NDQYHBzEwsKCc+fOifCU8vJypFIp+vr6rFy5UnwOaWlpAunu4+NDX18f\nFRUV2NnZYWtri4ODA87OziJstLa2lrm5ORFBpuIpDg4O4uLiwsDAgLA3a2hoiPDelpYW0QJdv369\nwJvDfGReZGQkTU1NdHV1sWfPHi5fvoyfnx+ffvopixYt4uHDh4IslJaWhrW1NQMDA3R3d4sbXU9P\nj9nZWerq6oTPJDU1lY0bN6Knp4e9vT0FBQX09/djaGhIUVER7e3thISECCdncXExL7zwApaWlkil\nUoqKioiOjub06dPs27dPSMwvXbokuistLS14enqKxaaoqAgtLS2mp6dpamrC2tpaoNE0NTU5f/48\nd+7cwcLCAgcHBwHWqauro7S0lNDQUExMTAgICEBTU5Ph4WFByfb19UVdXZ22tjby8/Opq6ujvLwc\nIyMjHBwcaGho4JVXXmHFihUC7bdq1Sr++te/CvHWxYsX8fX1/QdFp0KhwN3dnYcPH6Kjo4OjoyPe\n3t6UlpZiYWGBpaUlvb29JCQkYG5uTnt7OzY2NqSmpv6iSeFfoqYgkUhQU1MjLS1NqAS9vb3p6enh\nk08+4c0338TCwoLIyEjOnTtHW1sburq6HD9+nHXr1gkQRnp6Or6+vmKLVldXx61bt0QrUfW8u7s7\n586dw9/fn9zcXHbs2MHPP//MtWvXmJ2dZWZmBgsLC8bHx6moqGDfvn1oaWlx//59YD7sxdHRkczM\nTORyOW5ubhQWFgoffUNDAytXrmR8fFwEf6rkterq6rS0tFBeXs7c3BzR0dEMDQ0BEBsbS0VFBRER\nEdTV1aGvr09hYSHR0dHcvHlThJ9YWFjg7OxMRUUF/f39hIaGYmNjg52dHcePH8fCwkJsVX18fARg\n5PPPPycsLIwnn3xS3LxPP/20wNHPzMxw5swZ7O3tiY6OxsDAQGg6NDQ0ePToEQsXLmTfvn28/fbb\nXLlyhb/85S+sWbNGDBB9fX2OHDlCa2srNTU1LF26FIlEwtmzZ4mKiqK6uhozMzN8fHyYmpqisrIS\nS0tLZDIZMN+f19LSoqurCzc3N4qKiqiqqmLNmjVIJBIhCx4bG8PExISjR4/S2dnJ6tWrOXDgAM7O\nziJIWGU6A7h48SLR0dF0d3fT3NyMt7c3MpmMwMBA9PT0GBoaEoRk1QQTGxvLoUOHGBgYwNPTkytX\nrmBsbIyTk5PoPmhra1NaWsqlS5dYv349k5OTXLx4kYGBAUxNTfHz86Ouro7Fixdz/fp1Xn/9dTQ1\nNbG1tUVHR4eOjg7s7OyYnp4W3h2A5cuX09nZSXR0NCUlJTg7O6OpqYm1tTUODg4MDAwQHx/P6Ogo\nCoWCmpoaAYfV1dUVuwgjIyMRaPxLr3+JmoKGhgZdXV1YW88rokNDQxkcHKS7u5sDBw4QEBCAtbW1\nEJqogjzHxsbo6+vj2LFjeHt7MzExgVKpJCAgAJg/R5qbmzM5OcmCBQsYGBhgbGyMU6dOCalxQEAA\nf/3rX7G3tyc+Ph4LCws8PT1Fmyo4OJgjR46IszvAhg0bMDExISEhgbCwMEH3dXR05PXXX8fR0RED\nAwPq6urw9vZm48aNQvFXW1sr7NCqnri2tjYwf4Optpa+vr7Y29uzYMECWltbCQoKQqlU0tXVRV1d\nHZcuXRIegrm5OeRyOdnZ2YSFhXHjxg0KCwsFDej5559n8+bN7NixA19fX44ePUpZWZlQu73zzjvi\nfGpsbExGRgavvfYag4OD1NTUsGzZMh49ekRYWBjFxcW0tLTg6+uLk5MT4eHhYkW/d++eAH7AvCpP\nhU3X1tamoqKCrq4u+vr6aG1tRS6X4+TkRHp6uoDXqlR5bW1tGBkZMTk5iY2NDTY2NiQnJ/PKK68w\nNTXFY489Rmtrq/BsqIAyZmZm6Ovro6Ojg5mZGY8ePQLm3YyGhoacOnUKKysroqOjmZ6eZvXq1Xh7\ne9PX10dVVRX29vaMj49jY2ODTCYjNjZWuECXLl2Krq4uvr6+2NjYiP9jQkICQUFBnDp1CoVCwTPP\nPCP4H7Ozs2zevJmEhAT8/f1xcXFBTU0NmUyGm5sbY2NjFBcXI5fLuX//vmine3t7MzY2Rnl5ufjO\nZ2ZmGBgYQCaTCcr24OAgeXl5lJWViURuLS0t1NXVBc7OyMiI/v7+Xzwe/yUmBRWkwsvLi8HBQbKz\ns1EoFPzqV78iNDQUBwcHdHR06O7u5tatW1y5coXR0VESExPJzc0lJiaG6elpxsfHsbKyoqSkBJhX\nsQUGBqJUKoXS7ve//70AZhgaGqKrqyuUYf7+/kJvsHv3bhYuXEhVVRXGxsa4urqKSSslJQUvLy8m\nJiZwdnZGoVAIkdDg4CAJCQl4eHiwZs0aurq6KC0tRSaToa2tzdzcHGfPnhVW37t374pqeW9vL3Nz\nc2IHkp6eTk1NDS0tLejr69Pd3c3Y2Bg2NjaMjIzw8OFDMeCampooKChAXV2d0NBQNmzYQFxcHHNz\nc+zYsYOamhri4uL+F3vvHVXlmbZ9/zYdNr2XTe8dpEhXsBFQsOuoMXWMcZxkUibJTJKJMymTzEyq\nmRRTJrZEjYkdURERECkK0juCtE3ZsOmwKfv7w+xrzfN+a32T9/3W+lae9T33P5I7KJu97/u6r/M8\nj+N30NbWJnwMurq6gus4MDAgLih7e3t27dpFaGgoxsbG9Pb2oq+vz7Vr1zAwMODDDz/kww8/pK6u\nDlNTU4aHh4XX4e7du1RXVwsWpUwm49y5c6xbt44dO3bg6OjIlStXMDc3R1dXl8rKSszNzUXj7s6d\nOywsLAiGxV//+lceffRRqqqq6OzsZHZ2FoVCQUlJCc888wzPPPMMCQkJYpLxwQcfYGZmxszMDJ2d\nnXR3dwMIBJ+GCp2Xl0dUVBSjo6P09vYyPj4uzFi1tbXs379f7FIOHDjAqVOnGBkZQaVS8cUXX/DF\nF1+I1zsxMUFwcDDvvPMO/v7+FBcX88EHH9DZ2UlbWxtFRUVoa2tjbm4uWBg1NTVcunQJlUqFpaUl\nP/74I3FxcUJPUF5eLuTXGjhwfX09Li4u2NvbY2hoiJ6eHgUFBSiVSjw9PXF3d2dkZETIuT///HOU\nSiXe3t7CLfpzjl9E+aBJIRofH8fLy4vg4GC+/PJL0VABRPiKo6Mjvb29XL58mRdffBFDQ0P6+vpo\nbW0lJiaGqqoqNm/ezD//+U8aGhr43e9+x927d/n+++8pKCggLS0NR0dHjI2NKS4uxt/fH7VazbZt\n2/jhhx/o7e0VjIWpqSmMjIxEsKtGMvvQQw9RWVkpiEAakrRSqWRgYABzc3Pu3bvHxMQEDQ0NREdH\nC4aeu7s7JSUlor+gGanB/afZwsKCoCI98sgjKJVKTp48yaVLl/Dz8yMzM5OKigrx5NGwGvbu3Utg\nYKBIpGpubkYqldLf309tbS0BAQFUV1ejp6dHeHg4zs7ONDc3k5OTwyuvvMLLL7+MVCrFy8uLZ555\nhrt379LS0kJ+fj5SqZRFixYRGxvLuXPncHV1RU9PT+x0Tpw4wRNPPIGJiQlnz57l1KlT+Pn5CRdk\nREQEiYmJXL9+HXd3d9FLmZubY25ujvPnz4vsTU3T18HBAVdXVyHX1sT0ffTRR0RFReHg4EBJSQkx\nMTEkJCRw+/Zt4YT84osvRH2v2YHo6elhYmLCsmXLKCoqQq1Wi5tm1apV/PrXv6a6upqYmBjc3d2F\nPDw1NVXYu2dnZ1GpVOTk5LBjxw5u3bpFUlISN27c4KmnnqKiooJTp05x/PhxsrOz0dPTQ19fX0zV\n5ubmhJAtMDAQExMTvL29qaur44MPPhD9L8090d/fj1QqJSEhgdHRUZycnBgYGKCzs5Pk5GTa29vF\ntfbFF1/w9NNPC1hMVVUVc3NzXLhwgejoaGFN/znHL2KnoFnV29ra8PT0ZHh4mB07dnD58mV0dXXR\n1tbG1dWV/v5+GhoaKC0tZWRkhPb2dgwNDent7eXatWt0dHQQHR0tav9NmzZx584dkZCckJBAX18f\nOjo6nDx5ksjISEZGRvD29haaAHt7e9rb2/nqq6/Q0dHB2dmZs2fP0tXVJYxLnZ2dIqBjfn6e/v5+\nZDIZJSUlXL16lebmZkZGRjAwMGD79u2YmZlRV1fH/Pw8zs7Oommn8WZodiB2dnbCm2BhYUFPTw+d\nnZ0YGRmhra2NpaWlGLkpFAoUCgWDg4O0tbVx8eJFjI2NWblyJV5eXmLXoVAoBLZOk4rt5OSEubk5\nEREReHl5UVxcLJ7a09PTlJaWik77Sy+9hEKhwMzMDFNTU0JCQkRy0Zo1a1CpVBw6dIhDhw4JPX9/\nfz9BQUHU1dUxNjaGn58fxcXFLF68mM7OTnJzc+nu7mZ8fJzk5GSUSiWXL18GoLu7m/7+fnJzc2lp\naRGp1D09Pdjb27N7925hPNPI4+/evYuLiwtGRkZYW1tja2srGsONjY3A/bTvvLw8AdUNDw8XOgbN\nFKK1tZXm5maOHDnCvXv3yMvLY9myZSxfvpyPPvpIpDi98MILIqNSY44rKipCLpfzwgsvUFlZCcDv\nf/97ent7BQpAI6UOCQlhYmKC0dFR9PX1Wbp0qRiF5+XlAYgSJiAgALlczrJly3B2dhbchNnZWSE/\nz87OxtHRUTAePTw8hLJRKpVy9+5d8Xp/zvGLWBTGx8fx9PQUQagmJibCvJObm8vU1BTDw8MEBwcL\nDbe7uzumpqaCsPzkk0/S09NDeXk5cXFxwP2RUkdHB2vXrhURWr6+vgQFBZGSkkJ3dzf29vbU1dVx\n+/Ztli9fLliFISEhGBgY0NnZyebNmzl48KAIAGlpaaG/vx8TExOmp6fJzMykqakJfX19+vr6RCKT\ntbU1pqamDAwM4OrqKjIGNDuEjIwMvL29Bf9hcnKSkJAQzM3NmZqaorm5mbq6OoKCgoiMjBRilPT0\ndIyNjVlYWODWrVtcvXqVHTt2iCmDtra2eO1r1qzB39+fubk5ioqK0NXVJTs7mytXrogkImNjY9zc\n3PjjH/8oCNhXr17FwMBACH4uX74s6NUaNPm5c+e4c+cOb7/9tsDnpaamCoDpvn37aGxsJDY2luXL\nl1NSUiLex7y8PD766CORqqTpA/X394ucSwsLC1pbW5mfnxcjSE0+R25urlj8NbZnzVTF0dGRqakp\nbGxsRFkSGRkpEqw0mo3GxkbBVNBIs6uqqrhy5YpY+FpaWkRmqVqtZnBwkMHBQTH3v3LlCrGxsWK0\nrGkaSyQSHn30UfT09Lh16xbh4eG88847mJmZYW1tzdzcHL29vaJP9te//hVzc3OhU9BMRAICArCx\nsWF0dJSKigph3lKr1ahUKpKTk3nhhRdE81jj09B839jYGAEBAWJy8XOOX8SiYGZmJgwxGmSag4MD\ny5cvZ+fOnRQWFjI+Po5cLsfOzo7HH38clUrFjRs36Ovro7+/H6VSyWOPPSZi5OB+HalSqejv76en\np4ebN29ibGxMdnY2e/fuxcjIiA0bNjA8PMzt27f55z//CdwXPZmYmDA6OiqmDI8++qj4wOLi4sSI\nqrW1FS0tLYyMjDh16hQPPvgg/f39jI+Po1QqaWlpEX2G1tZWpqen2bJlC56ennR1ddHV1SX+XU1H\nWqVS4erqKjz6GzZswNnZGZVKxYkTJ3jttdcEyzApKYm8vDwKCwtxcXERJcPAwAAJCQn5cvxkAAAg\nAElEQVRYWFgIdNn169epr6/nqaeeEunUzs7OyGQyRkZG+Oqrrzh06BBJSUls3bqV/v5+EhMTuXjx\nIv/85z8ZHR0lOjqa559/nvn5eVJTUykpKWHz5s3ExMRw7949vLy88PX1JTY2ltdee43333+f69ev\nc+bMGQwNDcnIyODtt99m2bJlPPjggzg5OSGVSrly5QoAAQEBnD59mt///vcYGxszMzNDcHAw3d3d\nbN++nbfeeot79+6JJ/jKlStxdXXlH//4h0DUWVhYCHNZSEgIcL+ZPTY2xvbt29HR0eHtt9/GxcUF\nOzs7uru7RaN4zZo16OnpkZqaypYtW9i1axcxMTFUVlYK74GOjo7ggG7dupXx8XFMTU1ZtWqVwOlP\nTk6ira1Nd3c30dHR9Pf34+npSX9/v2B0DAwMUFxcjFwu5/333xcLGtx/UGpra3Pjxg3Onz9PQUGB\nCMe5dOkS33zzDd9//z0WFhYsWrRIYObfffddlEol09PTDAwMYGpqKnYWP/f4RfQUlEolDQ0Ngjz0\n5z//mddee43w8HDs7e1Ffl5WVpagBru4uODr6yvAoLa2tvz4448sWrRIqNjkcjnp6enk5OSgUqmI\niIhAR0eHiooKvL292bRpEz4+PvT39zM/P4+joyMZGRlUVlbi4+NDc3OzUN7dvXtX0JzPnj0rIuub\nmpro7u4WROnw8HAmJyeZmZlBR0eH/v5+wsPD+fzzz4mPj6e8vBw7Ozva29tRKpVERUVRXFwM3I/P\nU6lU1NfXExERwczMDE5OTjQ3N4vQWw2Oq7W1VYzVnnjiCQHyPHPmjOAX5OXl0dzcTHJyMlVVVURF\nRdHb28t7772Hk5OTEOzI5XLBHtTkFOro6BAaGkpBQQGpqalYWFgwNzdHaWkphw4dYsmSJUxMTIiQ\nFg21WiO1HR4exs7OjqysLPz8/NDV1cXf35+FhQVee+01oULVWIP/HfE+PT3NlStXhN6jrKwMR0dH\n8vLy+M1vfkNgYCC6urrY2NjQ2NhISEiIsB1rckM0OgCNTLy2tpaIiAjy8/NxdXVFJpPh7e2Ng4MD\noaGhyGQyBgcHeeyxx4iPj8fQ0BBzc3MhgouOjub8+fM8+uijGBgY0NbWBtw3iymVSpKSkpiZmRGG\nt4MHDzI+Ps6iRYtE2eTi4sKSJUv46quvuHfvHn5+ftTU1GBjY0NeXp5I3wawsLDAyMiIjo4OUlNT\nGR8fx8PDg0uXLuHg4IBCoSAwMBBfX1+kUiklJSVs2bKF3NxcWltb6ejoYOXKlURFRYnG4889fhE7\nBQMDAyIjIwkNDWVycpJVq1bh6emJoaEhAwMDWFlZUVtbi0qlwsLCAnNzc6ytrZmenubu3bvExsZS\nW1srgmMOHrxPg1uzZg21tbUC2Dk/P09vby/d3d2CTlRaWsrOnTt56KGHkMlkBAUFsX37dsEH+Pzz\nzxkcHGR4eJgjR44A93czjo6OdHZ2snHjRhGkumfPHhHecfLkSZE4NDo6ipubmxAxdXR0oFariY6O\nprKyUsiyTUxMMDAwYPfu3djZ2VFQUEBJSQlwX05bVlZGQECAcOC1t7cL6KcmqMXX11d4PMbGxnj4\n4Ye5fv06CQkJdHd3Y2FhwfXr12lpaWFiYkL0Ijw8PAgJCWHZsmVcv36dGzduCMS7g4MDq1evZtWq\nVYyNjfG3v/0NfX19IiIiuHjxIrdu3cLW1pazZ8/S3NxMU1OTWGBWr17N0qVLRUS9JiLO0NCQyspK\n+vr6BBIP7oNBDAwMaGxsxNrampSUFAIDAxkdHWX37t1ERkYyMTEh8GaawCCZTEZzczPT09Ncv36d\nmZkZmpqaxO5Pk03Z2toqQodHR0dJSkrC3NxcfDY+Pj6YmZlhYWFBZWUlSqUSV1dXtLS0yM3N5Zln\nnuHo0aOClqV5vRop/aFDh3j66ae5c+cOUqlUYARv3LghplsaHICGIj4zM0NeXh41NTVihNrV1SV8\nDQqFAgcHB2pqakTDVOOcvHz5MidOnGDdunVMTU3x3HPPsW3bNrGoubu7o62tTWZm5s++H38ROwVN\nDVxUVISnpyft7e1cunSJ6elpIdn19PRk/fr1ZGdnMzU1xZIlS7h586awI8vlchYtWiSIvnBfp6AJ\nB52fn8fQ0JBz587xzjvvMD4+jpGRkdDSFxcX097ezuLFi5mYmPgvjcLz589TW1vL8ePHhcbc19cX\nMzMzBgYGWLZsGZaWlhQWFjI6OsqpU6coKCjA1taWyMhIEYYyPDzM5OQkTU1NODo68tlnn5GWliaA\nsHNzcwK9HhYWRlJSElNTUxw5cgS1Wk1ERIQgCTs7O3PlyhVu3ryJiYkJRUVF+Pv7ExERgba2NiqV\nCjc3N5599lleeeUVFAqFkENrLpqenh5u375NX18f09PTpKamcv78eZH4nJ2dzd/+9jeMjY2xtbUV\n8uCqqipmZ2fJyclhYGBALLiJiYk888wz3Lt3DxsbGzZt2kRMTIzwhOjp6XH+/HneeOMNEhISMDAw\nYNOmTczPz4vuuEwmo6enB5VKxfz8PHfu3GF4eFjQqbOysnj11VfZtm0b7u7ujI+P097eTn5+PiEh\nIbS3t9PV1YW2tjaJiYkEBQXxyiuvYG5uTnl5ORYWFhgbG+Pg4EBubi5HjhwRsveenh4kEgmxsbEC\nEZeWloalpSV/+tOfiIiIwMfHRwTKwv2sDltbW+bn5zl06BARERHo6uoSFhbGoUOHWLt2rZjg6Ojo\nMD09zfT0NPn5+ezduxd/f38WL16MlpYWAwMDREVFAfe1MMPDw7S0tNDS0sKaNWvYuHEj5eXl5Obm\nkpycTE1NDZOTk6xdu5Y333yT1NRUsrOzMTY2Fli70tJS8bD4uccvYqegr69PZ2cn2traFBcXY2dn\nJ954tVqNg4MDKpWKqqoqpqamCAgIQKVSERcXx4oVKygtLWXdunVCqrxt2zbg/mJTUlIi6q+uri42\nbNiAXC7HxMSEvLw8WlpaRAy8oaGhEOnU19cTGxvLW2+9JZ6gmtHhsmXLmJubo6CgQNRsGorw6dOn\nqaiowMTEBFtbW5FLMDExQWJiIkuXLhVl0oYNG0TKEtynTz/xxBNEREQgl8uRSqUolUp8fX1JSUnB\n19cXFxcXofePjY0VY8vZ2VnkcjlNTU1C1VZQUMArr7xCXl4eMzMzREZGEhMTQ0ZGBlZWVqI8iouL\no62tjatXr5KcnExSUpKgAD///PO8/vrrdHZ2smfPHhwcHMQ4NiEhgdDQUEJDQwkJCaG4uFh4QDQE\n64KCAsFm0NfXZ2ZmBjs7O/Lz87G2thbNM40YyNnZWfzZ29uLXC4nJycHT09PXFxc8Pb2ZvHixQKb\n5+PjQ0VFBa6urri5ufHAAw/g7u5OWloa8/PzAvFuZmYmiMsWFhaUlpYKMI5mF+Tm5oazszN1dXUU\nFhZSUFCAubk5IyMjpKWl8ac//QlPT09CQ0MZGBgQn5mbmxuFhYVkZmZy4cIFvL29OX/+PFu3bsXf\n3x8PDw9iYmIEC3N2dpaVK1eSk5PD9PQ0WVlZ6Ojo4OrqKpyM586dY2BggNLSUgFk+fbbb8nOzmZy\nchJ7e3tcXFx47LHH8PLyIi0tjZmZGTIyMpDJZKSlpVFVVUVYWBhmZmbcuXPnZ9+Pv4hFYWRkhIsX\nL+Lm5kZmZiYDAwMiTbq/vx8zMzMuXrzI2bNnSUpKYtmyZYSHh3Pq1Cl0dHTw8PCgvLycsbExmpub\nRV3m4eEh8PEREREMDAxga2vLyMgIQ0NDjI6OYmZmxhtvvIG2tjY7d+4EEJCXsrIyNm3axMLCgkDO\nw31aUmVlpUhf+vHHH7lz5w7Xrl1j+fLlLF++nL179xIaGiqeLg0NDZSUlHDu3DnMzc3p7++nr68P\nAwMDIb/W1tYWnERNurCG+VheXs6BAwc4fPgw09PTDA0NiU73nj17eOWVV4QnIDY2lr6+PnR1ddHS\n0iIlJYWamhqWLl1KTk4O2dnZWFpaMjs7i1Kp5L333iMiIoK4uDgaGhqoqanBwsKC6upqFAoFWlpa\nZGZmolAoOHjwIIsWLWJhYUG4EMfHx/nkk08EwmxkZIRr166hr69PcnKykI7X1NSIG/u1117Dw8OD\ndevWsW3bNqEW3b9/v5CYa1iUmqnO0NAQ586dIz09HR8fH1pbWxkaGiI1NZW4uDjs7OxEDmlFRQWp\nqaniGpNIJDg6OuLk5IS2tjZqtZqoqCjWrVsnvA0aQ5Svry+dnZ0sXbqUwsJCioqKBFJPpVKxcuVK\ngoKCgPtczZMnTzI+Pk5eXh4JCQlIJBJRltTX1zM3NyeMdpoyxMLCgtDQUDFl0IBhNVOCkydPoqen\nh7OzMxMTE5w/f565uTl0dHSE2tHNzU2ECQ0PDxMVFSVS0GxtbYmJiRGaEI2I6+ccv4hFwdjYmI0b\nNwL334zJyUkqKiqYn59nz549YvuakpKCu7s77e3tnD59WgSrurq6CiFNVFSUIBmNj49TV1dHb28v\nx44do7Ozk/b2djZs2EB6ejpmZmasWbOG7OxsgXpbsWIFdXV1gvQcGhrKxMQEV69eFbp0jcTZzs6O\n6OhoVCoVaWlpODk5cfLkSSG31sS7VVdXI5fLkUgkWFtbix2Jxuii8WbMz88Lg9TIyAgTExNER0eL\nHYqnpycpKSnMz8+Tm5tLWFgYNjY2aGjYmtSmpqYm4H6PIicnh4qKCrKzs2lsbBRhuppG1qeffkpT\nUxO6urocOHCAmzdvsn37dhQKBdHR0fj5+dHY2Eh+fj5WVlbCLp6cnIyWlpbIaFy2bBlvvPEGjo6O\nPPDAA6xfv57y8nIxzrS1tcXa2lrIjzMyMkhJSeHatWu0t7cLEKqBgQHz8/OiNxATE4Oenh6HDx8W\nrlWN5drJyQkPDw+am5sxNDTExsZGhM6uXLmSd955R+xAJiYmhDEpMDAQHx8fvvrqK5YtW4a5uTln\nzpwR2g5N0NDQ0BBnz57F3d2d/Px8FAoFR48eJT8/X+SVagJ9MzIyhJHKzc2N4eFhnnzySTFy7u/v\n56233uLatWusXr2abdu2cfnyZXR0dGhvb0ehUDA6Oiqu3e3bt+Pl5YVUKmXDhg0cOHCAwsJCxsbG\nhItV8/7u37+fkZER5ufnhQamvLyc8+fPc/XqVQIDA0W2yM85fhGLgiZhx9zcXCgIKysrsbGxYX5+\nnp6eHnR0dCgvLxejmoaGBgEstba2ZnJyEl9fXyEHBYTgxdbWlvj4eOrr6zl16hSnTp1CS0uL3bt3\nY2try7Jly9i5cyf29vZ8+umn2Nraoq+vT29vr4gCT0lJERJUTQqykZER1dXVZGRkcP36dTIyMsRI\nbmJigsLCQmZmZkQJ0d/fj5GREeXl5fj6+grRkIYlYGpqilQqZcmSJUJoMzExgYeHB5OTk0JnUVtb\ny29/+1smJiYYHh7m5s2bzM3NYW5ujqurK0ZGRkilUuLj48nMzOTmzZu4uroyNDRETEwMTk5OnDhx\ngl27dpGYmEhISAienp6irzIzMyOad5OTk3h4eAihjCYGr6amhqamJtrb29HS0uKHH35gw4YNItRV\noVDg7OxMdnY2Pj4+nDhxQmRRpqWlcf78eY4dO8bMzIwA4wAkJyfT0NAgREHV1dUUFxfT29tLX1+f\naJz5+/tjbW1NZmYmixcvxsvLS6DKlixZQm9vL2FhYWKxsbS0REdHB319fVFGaEofmUyGtra2MFC1\ntLSwfv16hoaGyMjIEM5Jd3d3dHV1kUql7Np1P9HAxsYGd3d3vvnmG8LDw8nJyaGpqYlbt27xzDPP\nEB4ejpeXFz4+PqxcuZJHHnmE6elpfvzxR/T09HBwcKCoqIiBgQH6+/tFPIGmB2JgYEBJSQkbNmxg\n3bp1rFixgtnZWUxNTQUHcseOHRgbG9PY2MiBAwfQ0tKipaWFmZkZ1Go1JSUllJaW/uz78RfRaJye\nnqa7uxtra2vc3d2xtLSkvr4eQ0ND7ty5w6JFi3B3d6e7uxsvLy86OztFDLfGP25qair4A7/5zW94\n//33ReyXhohjZWXFhQsXBAh2w4YNYgt848YNDhw4wKeffkpeXp6wQqtUKvbs2cO5c+eE5DovL4++\nvj58fHwYGhpiYGCAmJgYYYRSq9XMzs6SnJyMgYEB5ubm+Pn50dXVhY2NjYCdwP2dkYZtqAGwNjc3\nC6iGi4uLCEzVPHG8vb2pqqpieHgYc3NzlixZQktLCz09Pdja2qKrq4uuri4XLlzA3NwcDw8PZDIZ\nQ0NDyGQywRBwdnbmzTffxNLSkvT0dKampujq6iI4OBh3d3fOnDnDvn37eOWVV3BychJjRs02Njk5\nGX9/f+7evcuOHTvw8/NDX1+fmzdv0tbWRkFBAYaGhqhUKhwcHITJqbe3F6VSiUQiYcmSJaIOB6io\nqKCnpwdPT08CAwPp7OzkpZdeorKyUvg7NCKdP/zhD2Lh1zRg33//fTIyMtDR0aG6uprAwEDgfoka\nExPD4OAgLi4u6OrqMj09zffff09CQgLNzc00NzeLHMiRkRHxGlpaWrC2thafy8zMjEDHa4xdb775\nphgLnjhxgt27d+Pq6opcLqe0tJSysjLS0tJwcXGhpqYGiUTCli1b6Ojo4IUXXhAhLqdOnQIQClmF\nQsHOnTuxtLRkYWEBAwMDBgYGOHDgAE5OTlhZWVFUVCTgPm5ubmhra7N06VJGRkaEm1Yzmv05xy+C\np/Dpp5/uS0xMpLq6WlCWIiMjRQqwVCoVeK7x8XFGRkZwd3enpaUFhULBwMAAurq6IsD0/PnzFBUV\nsXfvXnHxaSTOZmZmuLm5MTIyIhp+ZWVlNDU1sWbNGqEiNDExYfXq1UJoMjExIYJcN23aJFDazc3N\nxMfH09nZSX9/vxgvDQ0Nce3aNRYtWsTY2JgwGsH9ZOXs7GzWrVsnkoFOnDghkoXkcjlr1qzh3r17\nlJSU0NfXJzInNYIUTe7lBx98wMjICHp6eqxdu1YsgPPz80gkEiwtLYmIiBD9E3d3dy5cuEBFRQWl\npaX09/dTXV3N6dOn2bRpE9u3b8fY2JiGhgZcXV25ffs2AwMDoll67949kTitSTSKiIigp6eHffv2\nYW5ujpWVFfr6+lRVVREfH88DDzxAd3e3GJM2NjYK4VBFRQX6+vo88sgj/P3vf+fhhx/G1dUViUQi\nfAuFhYW0t7eTnp6OUqmkurqaqKgoIiIiKCkpwdDQkOPHj9PZ2YmlpaUIb9FYx0+fPs2TTz4pTFEd\nHR0cPHiQsbExbG1tOXHiBPPz8/j6+gp9w9zcHOnp6dy5c4empiZmZ2fp7+9n3bp13L59m9LSUu7e\nvUt0dDTz8/NcuXJFhP/u3buXoKAghoeHqaioYGhoCGdnZ9ra2rhz5w4KhYKoqCju3r1LY2MjDzzw\nANnZ2SgUCpYtW8Y333wjcHYSiYTe3l4BZe3r6+Po0aNcuHCBlStXil20sbExg4OD+Pr6Mjk5iUql\noq+vj5mZGaysrBgdHaWsrOy/D2TlwIED+zIzM0lJScHa2honJycmJibo6+tDqVSira2NoaEhHR0d\nnDlzhqioKGpqaoThZXJyktbWVkEyqqiooKysjD/84Q9cvXoVW1tbPD09qaurY9++fUISq2EchoSE\niCafTCZj69atXLlyReQ+njt3Tlyg5eXlZGZminGRJnJuYGBA4NV8fHyEMm16ehpDQ0Mhg+3s7MTb\n21skNmm2h8XFxcTFxYkwl6KiIsLDw5mamqKvr4/Z2Vl0dHTw8vJCS0sLZ2dnjIyMCA0N5d69exQX\nF+Pr64u5uTltbW1iC9nZ2SmSoM3MzGhqamLTpk2EhIQwODiIWq0mPDycwcFBsctRKpXU1tbi7+/P\noUOHGBsbQ19fn8nJSaKiolCpVISFhTE2NkZCQgK6uroMDg5iY2PDe++9h76+Po8//jhubm4UFxfj\n5uZGe3s7N27cYM+ePVRVVTE9PS0QduPj46hUKo4dO0ZcXBy6urq4urpSXV2Nra2tYBiYmZnR2toq\n+gC+vr709PTQ3d0tJjaNjY0YGxtz+/ZtoqKi6O/vJycnB7VazczMjPATaFyiGoL47OyseJ9tbW1p\nbW2lra1NGJhCQkKIiYmhtbWVa9euERQURHFxsYgFiImJobe3l87OTvz8/Dh//rz4vHR1dUUOqsbN\naWJigrGxMYaGhqjVahYWFjA0NKSxsZFr166xbds2jIyMsLCwYGxsjPz8fGpra8Vu4amnniI8PJzT\np08Ll21dXZ2weQ8NDVFfXy+gP3p6ely9evVnLQr/Q3P+n+N/jv//HP99aM5ubm7s3buXRYsWUVRU\nRHp6OmfPnsXExITm5mYCAgKIiIgQ9GA3Nze+/fZbAgMDKSkpYdWqVchkMgHVGBoaYs2aNezfv5+2\ntjaeeuop9u/fz9TUFM7OzmIM5OjoiFwux8XFRaT9Ojo6Mjo6yo4dO3jjjTf49a9/ze3bt7l06RKP\nPfYYO3bs4MSJEzQ0NDAwMIBMJkNLS4vQ0FDu3LmDRCKhrq6OqKgobt68SXBwMH5+fhQVFdHZ2cmu\nXbtob2+nra2NrVu3cuTIEUJDQ9m8eTN//vOfxahpzZo16Ovr8+mnn6Knp0dYWBjnz58HID09naKi\nItzc3PDx8eHatWusXLmS0dFRioqKWLt2LcXFxYyMjHDjxg127tyJiYmJgOG2tLQQERFBTEwMDQ0N\n9PT0YGVlxfDwsCBGXblyBV1dXW7cuCFCUDRisrq6OlJSUvD396e0tFSIy5qbmykpKSEpKYnx8XGs\nrKwIDQ0VWgMzMzN27NjBwYMHBdh0enpa6FKio6N59dVX8fX1paioCGNjY3x8fHB3d+ezzz4TY8XF\nixcLFJkGSPPv8W/GxsYEBQXxzTffEBoayl/+8hd2795NRkYGUqmUyclJpFIparWaH374AWtra5KS\nkmhvb8fV1ZW6ujpKSkr4/e9/zw8//EBwcDAmJiZ0dHTg7OxMbW0t7e3t7N+/n9dff53Lly/z5ptv\nIpfLGRkZYXh4WNjqm5ub8ff3p6ioiKCgIFxcXPj888+prq5mz549GBkZYWNjg5ubGwcOHCAlJYWn\nnnqKixcvYm1tTVZWFomJicjlclxdXbl69SqJiYlYWFhQVlaGhYUFLi4uHD9+HBsbGzw8PMR0xtra\nmpiYGFGi/NwJxC+ifHjvvff2ffHFF3R1dWFgYCBGcEZGRtTU1LBmzRqxDffw8KC1tRWZTEZCQgJO\nTk60tLSIfMbp6WmuXbtGaWkp/v7+2Nvb09raSmBgoHCl9fb2oqWlxa1bt9DR0RG5kZs2bRLBsb29\nvVhZWYkmU1paGsPDw5w9e5bFixczNDSEoaEhYWFhKJVKurq68PT0ZNWqVbS1tVFYWMj+/fs5d+4c\nsbGxrFu3jvLycmZnZxkeHhbqs46ODvT09Lh8+bLQ7M/OzmJvb8/Zs2dxcHAgPDxcNNc0tum4uDiq\nqqro6+sTFtlLly6RmJjIyZMn2bt3L729vYSEhPD2229jZWVFZ2cnCoWClJQUMRs3NzdHR0cHMzMz\nZDIZra2twr6ckJDA5OQko6OjjI+PMzMzw/T0NDo6OgQGBqJUKgkKCqKkpIT4+HgRhOPu7o5araau\nru6/KOwMDAw4cuQI09PTVFZWMjU1xdzcnLBJ5+Xl4e3tLWhBSUlJ7Nu377/gxLZu3SoCWszMzLh9\n+zbx8fGYmZmJKc/Y2BhjY2MiJCU/P5/Vq1djZGTE3//+dzZv3iwCfzQBvHK5nOXLl+Pt7Y2ZmZkg\nbGuSz7u7uwUQ1d7entu3b1NTU4NUKiUlJYXR0VH09PSQSCTcuXOH4OBg9PX16e7u5urVq8zMzODn\n5yfEXRqjl5aWFra2thw7dkxEDxQUFAg+qJ6eHt999x0jIyPk5OSwadMmCgoKyM/Px8jIiMDAQBYW\nFrCyshLj+qKiImJiYuju7iYuLg57e3tOnjxJXl7ezyoffhEjSbivDDM0NKSsrEyQhZqbm4XNFO7r\nwZuamhgYGMDb25uKigqMjIwYGxsT9eADDzzAgw8+CNzPNVi1ahV6enqUlZUREREhoJdKpRIzMzMh\nh924cSN5eXl0d3fj4uLC+Pg4UqkUQ0NDka78704zDQ2qp6dHgFZqa2t5+eWXMTU15bnnnuPtt9/m\nqaeeQk9Pj5deeol169ZhZGSEoaEhn332mQgM1ajjent7ycrK4ssvv2R4eBh9fX3kcjkKhUKYogwN\nDRkdHUWtVuPp6YlEIiEuLo67d+/i7u4ubpaHHrqf3FdfX8+KFSu4du0av/71r4WW3sDAgIyMDCFP\njo6OZuPGjURERKBWqzEyMmJycpLMzEwkEonoA+jr67Np0yaGhoZob29HrVaLhVeDrLtx4waHDh3C\n3t6exMREpqamsLOzQyaT8dxzz+Hh4SF0KfPz8+zYsUM8CGQyGffu3cPd3Z2PP/6Y9957TzgPDQ0N\n+fLLLzE3N2d2dpZ169bx7LPPsnz5ckxNTdmzZw+mpqao1WqRoqVhVTg7OyOVSnn++ef517/+RU9P\nD0FBQRw9ehQdHR0WL15MR0cH+fn5aGtrk5KSQmhoKCtWrEClUmFnZ0d8fLyIqEtISAAQPZyPP/4Y\nLS0tSktL2bZtG97e3gwPD4vX6e7ujlwuF05Zf39/5ufn0dHRoaGhgcWLF5OcnMySJUsAePDBBykv\nLxc80OXLl/Pggw+iq6uLt7c327dvJyQkBKVSKbgbjY2NrFmzhgceeIDAwEBWrVpFVVUV5eXlZGRk\n/Ox78RexU9i/f/8+iUSCl5cXMpkMQLgMP//8c86cOcOJEydIS0sTasOenh76+vpwdXVl48aNuLi4\nEBMTQ3NzM3Nzc3z99desWrVKsAzb2trE9CElJYWuri4xTjt69CgrV67EwMCA2tpatLW12bhxI0ZG\nRsjlcjIzM/nHP/6Bk5MTly5dYunSpfT09LBmzRqysrIoKSlBT0+PxsZG3N3dmTUd8HUAACAASURB\nVJiYwMXFhcHBQRITE8nKymJ2dlYAUxQKBZ2dnQLcWlRURHV1Ne+88w4jIyOYmJgIdqOrqyuZmZkY\nGhoSGxsrfvfq6mpu3bqFpaUlTk5OSCQSlEolVlZW2NnZkZaWRkNDA3NzcywsLBAUFCSaX5qswQsX\nLoh49oaGBr7//ntBkra1teXcuXPk5uby6quv4ubmxtjYGI6OjuTk5AjiVWRkJB0dHYIAFRUVJQjD\nmvyGmZkZDAwMMDAwELHop06dwsnJSTRwAwIC+PLLL/H09KStrU0ECVtYWGBpaUljYyNLliwRar3M\nzExRzri4uGBpacnhw4ext7cnNTWV/fv3CyJ2Tk6OoDt7eHhw7949fH19OXz4MBKJhJKSEubn56mv\nr0dfX18E22ium9jYWObm5ujo6BAqUmNjY86ePcuTTz7J1NQUMTEx1NbW0tvbK/B0enp6bNiwgfLy\nchQKBWfPnqW6ulq4NO/du4epqSleXl4MDw9z7949AZzRoPtkMhmrVq2isLAQExMTTpw4QWlpKcuX\nLxdS8ampKWFK0/A77O3t+eGHHyguLiY9PR25XM7x48f/++wUZmZmcHR05OjRo7i7u+Ph4YGWlhYR\nERE8++yz7Nixg3fffRdbW1vWr1/Pr371K3x9fWlra6O+vp4LFy4wMzPD0aNH0dXVxdLSErifx/fk\nk09SVVXFxx9/TEhIiGAGGhgY0NXVRVlZGbq6uuKpp7HNHjx4kO7ubqqrq+nq6uKFF14QCU+2trZs\n3LhRYLk8PT2pra3F0dGRlJQUdHR0mJycxNrammPHjnH27FlcXFzYvHkzoaGh1NXV4e7ujp+fH999\n950AdqhUKjZv3syGDRsYGRnBy8uL8+fPk52dTV5eHhMTE0RFRWFqasrFixeZm5vjzTffpLy8nGPH\njmFhYUFdXR39/f2UlZWhVCpFoEhzczNjY2O0tLRw7tw5/vznP5Odnc0bb7yBVCrlyJEjDA0NidLF\nxsYGFxcX0tLS6OnpIT4+nurqagFFXVhYYM+ePWRlZdHb28vo6CgZGRnY2NiwdOlSYmNjWbt2Ld7e\n3gKmqgmhnZ2dJTExkb6+PuRyORMTE4KB4e3tze7du0X5lpeXx/j4OLOzsxgZGeHp6UliYiJKpZLv\nvvuO48eP89VXX3H69GmSkpKIjY0lLy+PsLAwLl26JHidmrFzS0sLISEh3Lt3j97eXhEgXFVVJQRK\nmiCWw4cPMzU1RXV1tcDDR0VFMTAwIGTON2/eRFtbW4wGNcY7V1dXcnJyuHXrFvPz8/T19bFu3Tqe\nfPJJ1Go1nZ2dpKenU1NTw6lTp8RDRTNJWLt2rcgRaWpq4umnn6a5uZnVq1ezZcsWoadobm5GT0+P\nxYsXY2VlRX19PSYmJnzzzTcigV0jSvu5xy9iUdDW1ub69etibt/V1YWFhYV4uicmJuLi4oKDgwMf\nffQR1dXVXLlyhbi4OFJTU7l48SLnzp3Dw8ODoaEh4bhbvnw5165dIyoqiqysLDw9PUXOQVhYGHp6\nemzZsoWhoSHa2tpEcGliYiINDQ00NDSwdu1aysvLqa2tFXLk0dFRvv76a6ysrDh27JioXYODgzl8\n+DBjY2MirOPQoUMAQg0pkUgYGxtjamqKEydOCGow3Of5X7t2DT8/P0xMTLhz5w47duxgfHxcJB5p\nnq7m5uZ0dHTwpz/9CV1dXV588UVRu2vGahYWFhQWFlJZWUlUVBQPP/wwAQEBxMbGEhkZSUJCAi++\n+CLx8fHs2rULiURCeXk59vb2FBcX8/XXX3P8+HHMzMz46quvcHZ2Jj8/n97eXtzc3Pj666/x9vYm\nOTmZb7/9VjQdR0ZGGBgYEO7O7OxsMSa+desWJiYmNDQ0sGzZMsGM1CzkxcXFPPfcc6xfv15YvfPz\n8zlx4gQvvfQS09PT7N+/n9OnT5OQkCBITxqFoAZeorF2ayzOExMTODs7s7CwQF1dHS0tLcTHxxMf\nH8/g4CDR0dFCgHbmzBna29txdHQUN72uri5dXV309PTg5OQksGmaiDa477V5/PHHsbW1FQncmvPL\nly8nIyMDFxcX/P39xUh7cnKS6Oho6urqMDQ0FNyD7u5uent7SU1NFczN1atXc/nyZdLT09HX18fS\n0hJ7e3usra1ZWFgQeoiFhQW0tLQ4d+4cycnJrFixgvj4+J99P/7HRUEikXwtkUj6JRJJzb+ds5RI\nJFckEknzT39a/Nv/+4NEImmRSCSNEolk1c95ETo6Ojz99NOUlpbS3NxMaWkpcrkcf39/fvOb3+Dt\n7S1ixG1tbQXEVEOp/d3vfkdtbS0ODg7o6OgILl9ubi69vb3cuHGDgYEBkW2QlJTEX/7yFxQKBb6+\nvrz33ntYW1tz8uRJJiYmOHDgAP7+/ri4uFBVVUVLSwuenp74+fmJD0wikaBQKNi6dSuzs7NUVFRw\n4cIF4uLi2Lx5M2lpaRw+fJiXX36ZxMRE/P396enpoaGhgSeeeILVq1cLvJyLiwuAIEupVCp0dXWF\nqKmlpYWYmBgCAgJEZsHbb7/N4OAgpqamdHd309jYiFQqxdTUlC1btnDjxg2hoIuLi6OwsFBcrMHB\nwRgbG4un5M2bNzl48CALCwsEBASQlZXF1atXWbx4MUuXLuWll14Smg6FQoFEImFoaAgdHR3xdNSg\n8ORyORUVFbz77rskJSVhbW3Nyy+/zIYNG7C0tCQsLIwVK1aQmZlJV1cX/f39VFRUiGh3S0tLMQnY\nsmULGzduZP369axZs0ZkIdy+fZsrV65w5MgR3NzcRGK5QqFgZGSEqKgoenp6hEIR7md9mJiYMDAw\ngImJCTo6OkilUhwcHNDX1xceiqqqKsrKyuju7hYJ51999RXvvvsupqamVFdXc/ToURYvXgzc3zXa\n2tqSn5+PpaUl1dXVWFhYoFQqcXBwwNPTk7Vr1wLw2GOPcePGDaamprC3t+fDDz+kv7+fxsZGYmJi\nmJ+fF4uYp6cnJiYmFBYWEh8fj42NDVZWVvj7+zM6OsrCwgJyuRx7e3sMDAwIDAzEzMxMCMwSEhIw\nNjZm6dKlDA8P/2+lTv+cncI3QOr/cu4l4KparfYGrv7030gkkgBgKxD409/5RCKRaP+nH6AJG9EI\nZpKTk7GxsRH5kH/605+wtLTkD3/4A8uWLSMmJkbo2x0dHdHV1UWlUqGtrY2tra0whdy6dYuAgAAi\nIyO5c+cOYWFhrFq1iry8PDIyMnB1deWHH36grKwMmUyGl5cXW7duZe/evbi7u+Pr64uNjY2IQh8a\nGuKn31PIRl977TUMDAwE2z8gIIC6ujry8vJYu3YtNTU1IrpcQzhqbm6mp6cHCwsLdHV1qa2tBe5P\nW0ZHRzlz5gydnZ188cUXREdHC/diVlYWBgYGNDU1UVNTw4kTJxgcHCQsLIzh4WEsLS354IMPKCkp\nEYwJT09Pli1bxksvvSTgK5qAUo3QxcrKiq1bt7Jr1y5SU1PJyspCKpXi4uLCpk2bmJubE85Sf39/\nDA0NiYiIEKpLPT09/vKXv2BmZkZHRwcjIyMEBgaSm5tLVVUVx44dE0Skvr4+Ll68iKurK+Hh4aKh\nqXlvAwICCAsL44knnmB8fJycnBw6OjqwtLTExsaGiooK/Pz82LhxI59++qmQOXt4ePDiiy+Sm5vL\n3bt3kUqlpKenC8zfihUr8PHxwcXFRTRXNeRozRQrNjaW4eFhrl+/Tmdnpwhg8fPzIyAggJ6eHoyM\njNi7dy/Hjx8H7jfIH3jgATZu3EhxcTHGxsYcP36coqIihoaGcHV15datW/T19REfH09wcLCABWv8\nE4888gifffYZ69evF4tCT08PxsbGyGQyrly5IhD3UqlUXDfGxsaCWGVmZiYWvrm5OSoqKli7di0/\n/vijUMX+3OM/LgpqtTofGPpfTmcCB3/6+iCw9t/OH1Or1TNqtfou0AJE/6efMTY2Rn19PTMzM8zN\nzTE6OopMJhOy3ISEBHbv3k13dzeurq4EBATg7e1NYGCgSGWysbHh1KlT9PX1cePGDQC2bdsmOsqe\nnp60tLRgZ2fHc889x9DQEJcuXcLW1pYlS5YIi6mhoaGof2dmZgSRR5PSA/dr//T0dMrKyvD29sbI\nyIjc3Fy+/vpr2traSEpKoqWlBbVaTWxsLHDfjt3Z2YlKpcLKyoqpqSlCQ0MFfAQgODiYtrY2cnNz\nuXXrFj4+PkRGRuLo6CiAIS0tLaJGNTY25pVXXqG7u1uAWdPS0qisrOThhx9GqVQKF11FRQVKpRI7\nOzuampqIiorC3NycrKwsLly4wKpVq7h37x49PT389re/Fbbif/3rX2zYsEFMfSYnJ4X3orGxEZlM\nxu3bt+nu7mbnzp14eXnh6upKamoqSUlJmJqaivfo8uXLQl9QW1uLk5MTHR0d/2Wq4+PjQ11dHdev\nX8fQ0JDp6WlycnJYsmQJ2traWFtb4+fnR0xMDCYmJshkMhHOY2trK1yQNjY21NTUiMnVxMSEGKWG\nh4ejp6dHeXk5arUahULB9PQ07e3twpkpk8morKwUYbwTExPMzs5iYWFBb2+v+Myio6P59ttvqaur\nQ1tbm4sXLxIdHU1wcDCRkZGcPHmSc+fO4eXlRXt7Oz/88ANDQ0NERUWRlpZGZGQkp0+fJigoiNzc\nXLHL1TQhnZycqK2tpbW1ldHRUVEWaKZiN2/epLCwkL6+PoaHh8nNzaW2thYDAwO+//57+vr6WL58\nOd9///1/ug3F8X8qXrJTq9W9P30tB+x++toJKP637+v66dz/46FSqYT3u7q6mqqqKl5//XXkcjkt\nLS0YGxtTVlbGp59+KkhBGoOMl5cXy5cvZ9euXeTn55OdnS3qMg3XcPny5ULC+9133xEXF0dYWBix\nsbFipVUoFCQlJbFhwwYCAgJISUlh0aJFPPLII2RnZwvkOdy/cMfGxrC3t6egoIDHH38cAHt7ewID\nAyksLGRiYoJTp06Rn5/P+vXr6ejo4OTJkwQFBWFhYSEWPU3EF9yXUC9atIi1a9eKxCDNfHpsbIxF\nixYxMjJCQUEBHR0d3L17l71791JVVcVHH33EyZMnGRwcZOvWrXz88ceYmpoyPj5OfHw8L7/8Mq+/\n/jrOzs64uLhw9uxZrK2tkUgkhIWFCVT4/Pw8ixcvFqaouro6Dh8+zOOPP86KFSsEwHR0dJTw8HDy\n8/OJioqipaVFBNOcPHkSc3Nzdu3aJTiHIyMj2NvbU1VVRVpaGr29vTQ2NhIeHo5cLic8PByAgoIC\nYmNjaW5u5vr16zg4OIgxIMDQ0BBbt26lq6uL9PR00tLSiI2NZeXKlRQWFuLt7S0cmvX19SQmJvLu\nu+9y4cIFduzYQW5urpDQZ2Zm8sknnxAcHIyjo6OQyickJGBlZcWNGzdITk5GLpfT0NCAtbU1Xl5e\ntLS0kJqayocffsjFixdxcnIiICAApVLJ5s2bmZycJCMjg4aGBhISEkQfRdMs7+zs5PnnnycsLEwY\nxFxcXNDS0hL2/NWrV3Pt2jUOHTrE+vXrefXVV5mdncXFxYV//OMfTE1Nid/vu+++E2FHS5cuRalU\nkpqayvPPP09kZCTnz58nMvI/ChnF8f+60ai+r5P+35YpSySSXRKJ5JZEIrk1OzsrwCKaYE3NBaav\nr09aWhq+vr5s3rwZHR0d3N3d+eKLL9DT0+Ojjz5i3759ODs7Y2lpyW9/+1vhcHR0dCQ0NJTAwEAu\nXbrE1NQUERERXL9+nf7+fqKjowkKCsLX1xeZTMbY2BgeHh5cvHiRM2fOMDw8TF9fH25ubkRGRoqy\nRKVSkZWVxfz8PIsWLRIlgkKh4I033hBhIxEREfzxj39kz549wj+gUqnw8PAQwpYvv/xSRIU5ODjQ\n398vYLASiUSIq6anp0WIbnBwMObm5uzbt4+lS5dib29PT0+P+PsawrQmlam5uZnnn3+eK1euUF9f\nT0VFBY888gg9PT3I5XJKSkrIysoiMzNTZHrq6emJMNSAgAAUCgWlpaWUl5dz8+ZNoUZ0d3cXgbke\nHh4UFhZibW3N7Owsd+7coaKigu7ubhoaGjA0NMTR0VHEt0ulUrS1tfHw8KCsrEx8Zk1NTXh5eTE7\nO4uHhwfBwcEiKcvCwoK+vj6xGEVHR+Pg4EBqaipLly7FwsKCiIgIhoaGSE9PF/oHzW6vt7eXmZkZ\nsbNwcXGhubmZDz74gOnpafLy8rh58yanT59GX1+fsrIyamtrxbZfYwvQTEs0EW0lJSViKrSwsMB3\n333H/Pw8WlpaWFpaCgeplZUVEolELHQ5OTm4ubmxfv36/1L6FhQUIJVKmZ6e5vbt2zz99NNYWFgI\nLqmlpSW/+tWvaGtrIyEhgY6ODoyMjNDX1xcZqAqFgqKiIvz8/P4/Qbz3SSQSh59ubgeg/6fz3YDz\nv32f7Kdz/7dDrVYfUKvVkWq1OtLBwYHa2lomJyeZn5/H29ubkZERSktLRR3V09ODnZ0dVlZWODs7\nk5ycTFRUFK+88gpmZmYi8OKzzz4Twh19fX0hDV66dCkTExMEBQWxaNEiBgcHKS4u5vjx43zzzTci\nh+Chhx7iwQcfxNzcnOHhYe7evUttbS0ymUxcCJqnq6mpqSAia35+bGwsg4ODrF27lqmpKaqqqmhv\nb2d4eJjt27cjk8nQ19cX9JxFixaJLaORkREeHh5CuagBiGpGkQkJCdTV1Qlp7ltvvcXOnTv58ccf\nefbZZ5HL5SJERq1Ws3//fnp6etDV1eWTTz4R79/Y2BhXr16lqqoKZ2dn5ubmcHNzw8TEhOjoaMzN\nzQUpKjk5mRdffBFbW1u0tbXR1tZGX18fqVTKzMyMyE6wtbVFqVSyYsUK1q1bR2pqKt3d3eK1ayy/\narUauVwuVJMSiYSoqChWrbrfkw4LC2N6eprGxkbUajW5ubkEBQUxNTUlItF0dHSIiooS8miNoUoq\nleLt7c3NmzcF+l9DXQ4ODqanpwctLS0WL16Mq6srNjY2rF+/Hjc3Nx577DGUSiX29vbMzc0JtaSF\nhYXAzgUGBhIVFSVYk3B/d6hSqTA3N+fkyZMCwSeVSqmvr+ftt99mYGBAlC6Dg4NIpVKCg4Px8fHh\noYcewt3dnZ6eHhYWFkQOp4ODg+Aw6Ojo8Mknn1BeXk5lZSWmpqZMTk6iUCgwNjbG0tKSkpISAf5x\nc3Ojvr4emUzGQw89RF9fH4cPH/7ZN/f/6aJwFnjop68fAs782/mtEolE//9i772jq6zTve9P2k7d\n6clO7zu9ZyeEEEoCgQChShEFxEFFfS2jM2MZj8uRcc7oeHD0qCgOPDgoINJLgNBSSEJ6JW2nkd52\n2k7Z6fv9g2f/1pm/Hp+13vUuz1rn/k/FTdbOfV/377qu7/fz1dPT8wbkwC+iO6xfv57+/n46Oztp\nbm4WaUu64M8HDx5w+fJlIiIiOHz4MJ2dnfj5+Ymh3cjIiMh80KGndDeubgfd2trK7373O6EZj4iI\nYG5ujqtXr9Lf388rr7wiAmpDQ0Pp7u7m9u3blJeXY2try+joKPDYq+Hj48Pq1auFTTggIIDExETi\n4+PFjvz7778nOjqa8PBw2tra+F//63+xd+9e7Ozs+PzzzwkPD2d6elrE0bW2tqJUKsVwUyqVcuzY\nMVQqlYCZtra2snLlSqRSKS+++CIvvfQSd+7c4cCBA8TFxfHUU0+xYcMGwsLC2L17N+np6UxNTQmI\nqrW1Nfr6+iInYnJyUsTPNTc3k52dzdDQEBKJhLq6OmJjY7GwsGDt2rX09fVhbGwsvp8bN26QmpqK\nkZEROTk5ZGRkEBUVRUdHB62trUKTEBkZSUdHB+Pj48TGxtLc3Ex6ejqNjY0sXryYgoIC0ftXVlYy\nNzfHtm3bCAkJISEhgby8PORyOWlpaSxevJh/+7d/47XXXhNW9bq6OnJzc/Hx8WFhYQG1Ws3ExASB\ngYFicKcLsV29ejVDQ0NkZWWRmZkpiFIjIyN0dHRgb2/Pzp07xYlUl2nZ3t4uhq2Tk5OYmpoCj18Q\n1dXVzM3NYWZmRnd3NwYGBkRFRfHGG28QGhrK7du3+eijjwgMDOTFF1+krq6OsLAw2traKCgowNbW\nlvb2dry8vP6FpdjT0yNmLvX19SJbYnx8nPz8fI4ePcqlS5d4+PAhoaGhHDp0CAMDA7EFSkhIoLKy\nkkWLFgldxS+5fslK8jTwAAjQ09Pr1NPT2w98DKTo6ek1Aqv+9z+j1WprgJ+BWuAm8P9otdr5/9Pf\nMTIywsOHDykoKEBfX5/CwkKGhoYICAhg8+bNfPfddzQ0NGBnZ8e9e/dwdHTk7bffJj09nYCAAFJT\nU5mYmEClUqHRaOjt7QUes/43bNhARUUFxcXF+Pj4IJfLUSgUBAYGChOOkZGRGMgUFxejVqtJTU2l\noaEBPz8/Xn75ZfT09MSNW15eLjQUycnJzM7OCi2/q6srXV1djIyM8MILL+Dr60tubi7h4eHY2NjQ\n2trK5cuXuXPnDidPnsTAwEDcuCkpKUxMTDA2NsbIyAh+fn7I5XKio6P561//yt27d1EoFPT09BAQ\nEICvry9ZWVkcPnwYlUqFRCIRN3xXVxdXr15lfn5eRNOlp6djZmaGSqUS7MDY2FhaWlowMjJCT08P\nuVzO4sWLsbW1JSQkBAMDAzo7OykrK6OgoIClS5fS3d3N3NwckZGRGBoa8sMPP3Dv3j02bNjAwMAA\nfX19fPTRR8hkMhF2UlJSIrYYcXFxxMTE4OPjI1BwarUaQMx4ioqKaGhoYH5+Hmtra+FVGBgYIDAw\nkL1796Kvr09nZyfffPMNRkZGtLS0cOHCBVpaWjA3N+fu3bti+7B161bOnz+PgYEBra2tWFpa0tjY\nyMLCgihEAQEB2NjYkJGRIdLKy8rKqKiowNHRkWXLltHa2kpaWprQVURFRfH000+zc+dO/P39MTY2\nFgBYY2NjoqOjxQwrMjISd3d3srKyaG9vF7JxqVRKT08P2dnZYoCpVqsFHk6j0bBr1y4UCgVOTk5i\nc7Nz50527tzJ2NgY4+PjuLi4cO7cOWH5d3V1JSgoiPLycvz8/P7P1eB/X79k+7BLq9U6a7VaI61W\n66bVao9ptdpBrVa7UqvVyrVa7SqtVjv0X/78X7Rara9Wqw3QarU3fskPMTY2RnNzM1ZWVsTExHDt\n2jWSk5PFDnjz5s3cvn2byclJbG1txc41OTmZd999l5ycHBGR3t7eLkhGISEhDA0NsW7dOhGzpWPw\nOzs74+joSHV1NRYWFgJAqtFoSE1NRSqVEh8fj5OTE+7u7jQ0NIijqIODA++99x7W1tZij2xlZcXQ\n0BB9fX2sXLmS6OhoVq1aJSbWw8PDrFy5UmRL+Pj4MDw8jFQqZWFhAXicYOzk5ISVlRVVVVXMzMyw\nYcMG6uvrcXd3x9PTkx9++IHc3FyWLVsmVoxHjhxBoVAgk8kwNzdHIpEQEhLC0qVLiYqKwsrKiuDg\nYKFj2Lp1K0lJSXz++edUVVWxdu1aTE1NMTY2BmDnzp3s2rWL9evXi5g0HehVxww4d+4chYWFdHV1\nERUVxYoVK7h3755gLvzjH/9gaGiI3/zmN8TFxVFYWMj9+/e5cuUK9+/fZ3R0VKDoTU1NBea+tbUV\nLy8vli9fLgZ4OinwH/7wB6ampoiIiGDJkiUYGBhw9+5dxsfHuXTpEmNjY8TFxfHWW28RGRkpjv4A\nGRkZyGQy3n77bWxtbent7SUkJEQcq/38/EhJSRFzEh1fwdbWlra2Nr7//nvxtj179qzwqzQ3N7Ow\nsMAXX3yBRqOhoqKC69evc+zYMRoaGlCr1VhbW4tw2YSEBPT09Kivr2fLli24ublx48YNrK2tcXJy\nEr6d1tZW0SY5OzvT0NBAXV0dP/74I/b29sjlcrRarci90ClodT+3qakpNjY2qNVqVq1axb17937J\nowj8SrwPn3322Z9efPFFzM3NsbOzIzo6WsSg6xKIFQoF69atw9/fn6VLlzI1NUV1dTWRkZGYmJiQ\nlpaGtbU1QUFBhIWF8c0337BhwwZyc3NFddZoNExOTuLp6cnU1BTNzc08fPhQeAo2b96MQqHA2NiY\n2dlZfvrpJyQSCbW1tWIKfe/ePYKCgnB1dWXJkiXcvXtXpE4/evQIOzs74uLimJqaoqKiQqxXs7Ky\nGBgYEINFNzc31Gq1QNfX1tYSGBiIk5MTlpaWYrqclJREQECACI/VaDQoFAoUCoWAiq5Zswa1Wk1N\nTQ1HjhwRmPLFixdz//59ZDIZd+/eRSKREBYWhomJCYGBgbzxxhsC2lJfX4+ZmRnBwcFcvXqVa9eu\ncezYMezs7MT8Y8OGDZSXlxMTE0NwcDArV65ErVaTmZmJgYEBL774IlNTU0xMTGBkZCTe+lNTU4yN\njWFubo6np6cAzE5NTTE5OYlKpSImJoaTJ0/i7++PiYmJ2DDpsjL+qyW+pqYGmUyGQqFg7dq1JCUl\nodFoSElJoauri/LychobG7G2tqa2tpbMzEwOHTrEzMwMfn5+XL9+HalUiqurqwCg6JiZly5dws7O\njrKyMsLCwli9ejX37t3jhRdeoLa2lvz8fGJiYvjuu+/o7e0lIiKC6elp9PT0hIoSEMPxhoYGvvrq\nK2ZmZkQSWlxcnNi66DZAPT09wvdx9+5dli9fjoeHh8gthcfBM05OThgbG6Onp0d5eTlNTU3COq4L\nOo6OjmZoaIjh4WHBCN20aRNHjhz57+N9sLe3Jzs7WwyOlixZQkBAACqVitu3b3PixAnMzMwoLCxk\nfHycEydOUFJSgo+Pj1B0ubq64uzsjL+/v2gfmpub2bFjB6ampri5uZGamsrMzAwrVqwQSkeJRPIv\nx0l9fX28vb1FpdZqtSJrYceOHcBjXfry5cvFL1c33NLJanNyckRiVF5eHi+++CLXrl0T6UQmJiYU\nFBTQ0NBAWlqamAw//fTTNDQ0YGtrS09PDxYWFnz11VecPn2akZERcnJyDSPhQgAAIABJREFUOHPm\nDKamppSUlIiCqSMIZWdn4+bmRkFBAcbGxty5cwd/f398fHxYvHgxCwsLIt27r68PpVLJrl27uHXr\nFv39/Tx48IC2tjYsLS0xMTERmLnCwkLUajXXr18HEOnV9+7dY/HixYyPjxMXF8fdu3fp7OxkcHAQ\nIyMjEfhqYmKCXC5n7969LF68mJiYGB49eoRSqSQjI0PAZuFx2zA5OcnIyIgQpg0ODrJx40Z27twp\n4LclJSXcunVLaCC8vLy4efOmCM1NTU2lsbFRDO4ePXokBsXGxsYYGhry8OFDVq5ciVwuF9+LTgGZ\nlZUltAUmJibU1tbS0tJCV1cXHR0d7Nq1C4BFixZhb28vuA+enp7I5XLq6+vRarXY2dmxe/dukYRd\nVVVFaWkp8/Pz4mWQmpqKm5ub4D8AInrv4cOHGBkZ4eHhwerVq7l9+zYxMTG4urpiZWXF4sWL6ejo\nQE9PD5lMhp+fnwD49vX14evri5WVlTBy/ZLrV1EUAHHjDA8Pc/ToUX766Sf09fV56623aG1t5auv\nvuL69et89dVXREZGEhQUxDfffMPixYsZHBzk22+/xcjIiJKSEvGZWq2W9PR0Ll26xJ49e8jJyeGd\nd94RwxgnJyfxsOiMMIWFhWL1t2HDBtRqNUuXLqWhoUEYoiorK8nNzeX48eMMDg7S0tKChYUFUqmU\nzMxM8vPzMTAwwMPDg1u3bgmgSVNTE88//zxqtZrly5fz9ttvU1JSIgROAwMD4qjo5uZGWVkZfn5+\nuLu7Y21tjZGRER988IHQL+h6yejoaAwNDXnyyScJCQlhy5YtnD59GolEQlVVFfr6+tTU1LBt2zaU\nSiXnz5/n6tWrKJVKvvzySxFo6+3tTXt7O11dXURHRxMUFISenh6lpaUYGRlhampKYGAgxcXFlJWV\nkZyczKFDh0hISECpVDI2NsbCwgJFRUUCaGpkZCSsyQ4ODri6ujIwMMDw8LCIjp+dnRVGoPfffx97\ne3uRHn7lyhXWrFnDhQsXqKmpEf4Jd3d39u/fj5mZGWZmZhgYGBAZGYmvr69QQh48eFDkivb392Nj\nY8Ps7KxoQc3MzDh37hxKpZKffvpJgIPNzMx4+eWXOXz4MN3d3YSFhVFdXU1wcDBPPPEEKpWKFStW\nAHDgwAHm5+cpKysTbkVditn58+dxdHQkJCSEsLAwenp6RDaqVCpFKpVy48YNjh49Sl9fHw4ODvz8\n88/A423J7OwsBw8eZMWKFbi6uqLRaNi4cSPT09NiszMyMsLExATW1tZotVrq6upQqVQUFxeLwfTI\nyIjIFvkl16+ifTh+/PifdFNwfX19PDw80Gg0+Pj4sGjRIqHscnBwwMrKij179jAxMUFQUBBnzpxh\n+/btREdHC2Luo0eP+Omnn1iyZAljY2PY2dkxNDRESEgIpaWluLi4sGTJEtatW0dERAQ///yzeBjl\ncrkIo83JyRHOQ51oqKKignXr1qGnp8d3333HM888w8TEBJOTkyL+vbe3l9nZWdasWYOenh7W1tbo\n6emxadMmrl27xrp164iNjeXatWtotVpMTEy4fv06jo6OxMXFIZFIaGtrIzk5GZlMhkajYWJiQvS3\n+/bt49q1azg6OuLu7i7MUxs2bGB0dFTs97Ozs3nnnXf46aefRPy7oaGhiLxLT0+nu7ub8fFxhoaG\nsLGxISQkhMnJSVpbW8UqVacE9fDwwN7eHnt7e1544QVOnjyJubk5q1evJjMzk9HRUXx8fMRK1sTE\nhPLycmET1lGp7Ozs0Gq1zM/P4+Pjg5GREV1dXULZNzU1JWjLGzduxMnJierqatRqNVKpFD09Perq\n6rC2tqaqqkpYxDMzM/H09GR8fFwY0xQKBRcvXiQxMRFTU1MUCgW5ubl0dnbi6enJvn37RO89OzvL\n1NQUrq6u3Lt3j8DAQAFg1VmU7969S0pKinBpbtu2DRsbG0xNTfH392dhYYHIyEisra1pbW3l1q1b\nWFhYIJPJMDQ0JCYmhieffJLu7m7q6urYv38/s7OzDA0N0dTURGdnJ729vcjlcuEq1SVz5+Tk8Mc/\n/pHe3l7hYh0dHRWJW4mJicTFxQkalaOjIwsLC7i6uuLl5fWL24dfRVH47LPP/vTee+9hamrK6dOn\nee211zA2Nubjjz8WfbulpaUYMqlUKrKysnB2dmb16tVcuHABOzs7Ll26hJeXF4ODg1y4cIGVK1cS\nFhaGvb09XV1d4sHVeQY2btxIXl4eUVFRZGVlYWtrS1JSEo2Njbi4uCCRSATVKTIyEplMxtWrV1m2\nbBnZ2dksW7aMsbEx7t27JwqBRqPB0dGRnp4eHBwckMvlNDc3s3HjRmpra7GysqKnpwepVMrw8DCP\nHj3C2dmZjIwMkSGQn5/PwsICKSkpvP/++3h7exMUFER0dDR+fn588sknzM3NYWNjg52dHePj47i7\nu7N9+3ZsbW3x8fERyLTR0VHBZjA1NRXrz4CAAAwMDAgICCAgIAAzMzMGBgYwMTFBq9UikUgErETn\nPbC1tWV8fJzh4WHm5+fZunUrY2NjqFQqoXdwdXUlIiKCyspKIiMjMTIyoq6ujsDAQObm5vD09GRy\nchI/Pz+R2QmP9/3ff/89K1asIDIykszMTNRqtSgOOh1EeHg4DQ0NrFixgqGhIRE1ODU1JQCvoaGh\novB3dnZy69Yt/vCHP9Dc3CwyLSwtLUUS1YULF/jss8+YnZ1lYmKCubk5XFxcsLa2FuE0xcXFJCcn\nExoaypkzZxgcHBTx8ElJSZSXlwuJ9uDgII6Ojly6dElEuA0PD1NUVCScnzoGhoGBgZBN67gMeXl5\nWFpasnbtWqytrZFKpTx48IDnn38eR0dH2trahKNVF5yjw9JnZGSwevVqHjx4IESAunySGzdu/Pcq\nCra2tlhZWTE7O8vg4CCTk5N4eHjg4eEhsgajoqIwNDSktbVViJzS09OZn5/n+PHj/OUvf6GiooKf\nf/6Z5uZmli1bhkKh4OzZs7i7u+Pq6kpxcTG7du0iKiqKoqIisrOzycrKoqSkhOjoaJEVcfz4cYyN\njYWpadGiRYyNjXHx4kXeffddmpub8ff3x97enpiYGNzd3XFxcSEsLAwDAwP8/f3Jz88XbzetVouf\nnx9zc3PMzc2Jo7WpqSlhYWH8/PPPpKWl0d/fT2hoqNi/W1hYiIj1/v5+PD09hYVad2Ts6urC1tZW\nDGR1J4Ply5cLGnNwcDBXrlxh9erVgmJkY2ODRqNh2bJlosA+evSIqqoqXF1dhZV727ZtWFtbMzU1\nJZBi5ubmfPvtt1hZWaGnp0d3dzdxcXH09vaiVqupqqoiLCyMuro6wY708vKitLQUW1tbrl69iru7\nO4aGhujp6VFcXExmZibJyckUFBSwatUqLl++jEwmY3R0FEtLS1GcdOwGMzMzAY3x8fFhampKeF/q\n6uqEsOju3bvIZDL09PQEbUupVKJQKBgYGMDDw4Pf/e53ODg4EBMTg1QqRSKR4Ovri4mJCbm5uYSG\nhjI2NkZ+fj7btm1jYmKC9PR0NmzYgJeXF76+vpSWltLT04O1tTUnTpwgLS0NV1dXLl++LARfuiiA\n6upqCgsLCQgI4ObNm6xbtw65XE5XVxeFhYW88sorjI+PC47DmjVrBF3J29ubrKwsUlNTKSsrw9/f\nn59++km0Rrr4OZ3N29LSUucn+R+a8/9c/3P9z/Uv138vmvOnn35KS0sLExMTgpenA5XoqDv9/f10\ndXWxZMkSvLy8eO+991i9ejWxsbF0dnaysLAg/p+VK1dy9OhRXFxccHZ2JjMzExcXF3G815GXkpKS\nKCgoQCKRCFWaRqOhu7tbKAuVSiW1tbW4urry0ksvUV5eTmdnJ1evXmXNmjUYGhqKIZu7uztWVlY4\nOjqSn59PU1MTIyMjQm5aVlYmAkEXFhY4d+4cx48fFww+V1dXtm/fzuHDh0lMTMTX15fDhw+zf/9+\nHj58yMLCAoaGhkIy7OLiQnl5udA36DYmxsbGBAcHC7yajpo0Pz8velx9fX3q6upYtWoVR44cEdZt\nOzs7QbvWtW15eXmEhoYik8mIj49nbm5OhJvGxcWJ3f327dtF7oEuw2NwcJCenh5iY2OpqKjg4sWL\nLF26lIWFBaGoHB4e5u233+att97CwsICe3t7/vnPf5KamoqZmRkxMTG0tbVRXl6Oq6srZmZm+Pv7\nU1JSQnh4OJOTkxQWFrJlyxYRCa/zgHz66ad8+umnPPfcc1y+fJlz586xaNEiIiMjaW1txdXVlZGR\nESIiIiguLhbDSX9/f2HQmp2dJSQkhFu3buHg4ICZmRmbN29m37597Nixg4CAAIqKijA3N0epVNLQ\n0MD+/fsFx7Onp4ehoSFyc3NZtGgRUVFR3Lp1iyeeeILKykrBQ8jLy+OVV14hOzsba2tr/vrXv+Lm\n5oaTkxNNTU3iNDg4OIiXlxf6+vqcPXuW8PBwFi9ezOuvv87XX38tth86kVRAQMAv5jT+KrYP09PT\nZGdni6SbO3fuYGFhwaZNm3BzcxP9nYWFBUuWLMHW1hZTU1MOHjwo0oQyMzNFQvKXX34JgEKhEJp5\nXTBrQEAA9vb2qNVq3N3dOXPmDHK5XFCb1Gq10OaHh4dz5swZ/P39SUxM/BenmY5YPDw8zNjYGJWV\nlUxNTTEyMkJZWRmHDx/GyMgIOzs70tLSRP+bm5uLqakpg4ODLCwssG/fPpG4PD09TXR0NBkZGQQE\nBAhCc3R0NBKJRKC+NmzYILgIOtS6p6cncXFxNDc3s3TpUm7fvs3AwAAuLi4iZ9PX15ekpCQsLS2p\nqqpCqVQKCXViYiJSqZRNmzaJ6HQ7OztmZ2dJSEjAzc1NJEgPDQ3x9ddfY2FhQUxMDBqNhpdffpmK\nigpKS0s5e/asgK18+OGH2NnZsWzZMuGS9PX1JTY2lpGRERHeqvsOJicnkUgkWFpasn37dhwcHJDJ\nZCIo2NDQUETL5eXl8fDhQ2F/dnNzY3R0lCeeeIKBgQFBJAJYu3YtxcXFdHR0CHenLpGsqamJkJAQ\nHj58KIJicnNzxXpSZ1nv6OjAzs5OFHyAhIQEoUi0s7Ojra0NfX19kpKSqKurEwnfuoGujpC9sLCA\njY0NDQ0NmJmZIZPJyM3NFduz7u5u0tPTeeutt3j11VdJTEwUIGJ7e3vGx8dFVIC7u7tIBEtLS+PO\nnTs0NTVx584dFAoF3t7efP7557/4efxVFAU9PT28vLwYGhpCJpMREREhWHi6dJxXX31V9MESiQSV\nSiVAE/39/URHRwuLq05PoKMS3717F41Gg4ODA8PDw+KXreupATQajQgDlUqlrF+/nunpaZKSkpia\nmhJedoDq6mrGxsZ48sknqa+vR6FQsHnzZmJjY1m+fDnm5ubs27cPLy8vnJyckMvlKJVKTp48iZOT\nEx0dHSQnJ+Ph4UFcXJxwSU5NTWFgYPAvvW9cXBwuLi4cP36cjIwM1Go158+fx8PDg4CAAJRKJZ6e\nnsBjuXhISAjt7e1YWVnh4eFBYWEhfn5+FBcXC51DUFAQ1tbWbN++nVWrVqFSqZDL5UxMTHDu3Dng\nMdNQ952dOXMGOzs7KisrWVhYENBatVqNgYEBR48e5eeff+Y//uM/8Pf3FxARY2Nj/Pz8qK2tFRJl\nR0dHvL29uXHjBhqNBjc3NxwdHXnvvfcAeOKJJ5iZmaGrq4snnniC8vJy6urqsLS0pKKigr6+Pvbs\n2UN4eDgJCQm8/PLLguEQHh5Of38/J0+eRK1W4+rqKpBoBw8eZHJyktHRUfbv38/f/vY3pFIp5eXl\nqFQqcnJymJiYoKGhgcrKStauXcuRI0eIjIykqqqKxMRE9PX1cXJyEjg/QDhDdaRmHcDGzMyMO3fu\nEBgYSF5eHtHR0SLxy9vbm7q6Oqampujp6aGlpYWOjg7hbwBETODc3Bxffvkl/f39XLhwgYmJCerr\n65menhaAGd0KUqVS8dxzzzE0NERFRQUuLi4C//7uu+/+4ufxV1EUtFqtWP/Z2NhQWVkpJrKtra0U\nFBSwZMkSqqqqkMlkZGdnC+/AtWvXuHr1qviCZmdnRRXXQUgjIyPp7+8X8V46x5+joyNPPfUUExMT\nXLlyhVdeeYWGhgZ8fHyEM1N3PDcxMSE7OxtAmIFmZmawsLDAyMhIyHTVajWJiYkClmlqakp+fj5S\nqRR/f390A9XBwUGqqqpobGwUdtyNGzdy48YNTExMRE6hhYUF5eXlLF++nIiICP7yl7/Q2tpKZmYm\nvb29/Pjjj9TU1IioOiMjI0xMTPjtb3/Ljz/+yM6dO6mtrcXGxgZLS0skEgmjo6MYGxuzY8cOzMzM\naG9v58qVKwwMDODn54eBgQHbt2/HzMyMqqoq1qxZQ3R0NFZWVkxOThIcHMy1a9dYvHgxp0+fxt3d\nndzcXGpqajA2NhYPsG7zYWNjg5WVFRkZGWJvXlNTQ2ZmJmZmZnR0dNDW1gY8ZkqMjo5SVVXFhx9+\nSHx8PGvXrqWkpIShoSHm5ub4z//8T55++mkB1blx44aIYrty5QoqlQo9PT06OzsFA2P79u0i5i8r\nK4vh4WEOHTokdBzz8/PY2dnh5OSEnZ0dfn5+ODs7ExAQgKWlJXl5eXzwwQf84x//EPZogLy8PHx9\nfamsrBTHf3j8pnd3d2f58uW88MILBAYG0tfXx9zcHH5+fiQmJrJkyRLs7OyQy+WUlpaya9culi1b\nBoBSqRQx9729vdy4cYO9e/cSFxeHt7c3SUlJbNq0iYcPH5KamkpKSgq3bt1iZGSE1157jZCQEJRK\nJaWlpVy7do3a2tpf/Dz+KrYPf//73//0yiuv4OnpSWdnJzExMZiampKTk8Ps7KzI4UtNTeXs2bPC\n5OLg4ICLiwuPHj3Cz88PQ0NDEUhy/vx5du7cibm5OS4uLqIHq66uFnTgoaEhRkdHqa2tpby8XJCY\nHj58SEtLi+g1fX190Wq1InL9wIEDaDQaGhoaGB8fp76+Hg8PD0ZGRrC2tiYnJweNRsPIyAgNDQ00\nNzcLU83c3Bzx8fEiR2Ljxo1MTU1x/vx5Xn75ZdLS0rC0tBQioNDQUCoqKnB2dsbGxob29nYWFhaI\ni4ujra2NrVu3Mj09LUChS5cupaioCFNTU1JTU/nuu+9EC5CTk4O+vj43b95kaGiImZkZnJycGBgY\nYOfOndjY2NDb20tcXJzQNcDjInjx4kViY2PRarWUlJSgUCgwNzcnPDwcuVyOnp4es7OzjI2NUVJS\nwsLCglDTjY2NYWtry/nz5wVTUncTx8bG4uDgQFNTE3l5eWzbtk2oA3fs2MGxY8cEdGXjxo3CSv7U\nU08RFRVFUFAQg4ODmJqaioKrVqt59tlnxVH8/v37wvA1OzuLlZUVDx8+FGlKOkiMUqlkbm4OmUxG\nVlYWIyMjmJiY4O3tjVKpJDo6Gg8PD2FMOn78OElJSSKjIzQ0lMbGRuHp+M1vfsPs7Cz37t2jsLBQ\nmLp0ehB4jAy0tLQkNjYWJycnbG1t+frrr9m0aRMdHR2YmJjw4MEDdu3ahUql4vz58zg4OJCVlYWD\ngwMjIyMUFxfz3HPPodVqUalUwna+evVq/Pz8mJqaYvny5Rw+fPi/j8xZh8aanZ0VWQw3b94UzsRN\nmzbR1tZGc3Mz8fHxjI2NCZY+gEwmQyKRiGOaSqUCENHsc3Nz2NraolQq0dfXp7u7G4lEgrOzs5hJ\nBAQEiFjyFStWkJSURFtbm/hsa2troeWfmZmhurqa8PBwpFIpJiYmPPHEE7i5uf3L8S8oKEjMKmZn\nZ1EoFERFRRERESEckF9++aVwxhkYGHDy5Elu3rwpEpMPHDiAj48Pra2t5ObmYmNjI0JX9+zZI6LC\nioqKaGlp4YMPPsDY2JjCwkL++c9/8txzzxEcHEx7ezt+fn5UVlZiYWHBDz/8QFxcHNeuXWPr1q2o\nVCpGR0eZm5vj1KlT5OXlcf36dQICAnj06BFbt24Vcmh9fX3a29spLi6moKCAmzdvIpFI6OzsZOPG\njXz00UcoFAoWFhY4e/as2Pdv27aNVatWsWrVKgYHB9mwYYMoVLr8h4KCAlpaWpifn+err77Cz8+P\n3NxcDhw4QFtbGyqViv379/P0008LvUhzczO2trbY2dlhYWGBQqHg8uXLLF26VHzum2++SXJyMnl5\nebi5uYl5iw7pNzU1RV1dnVCBRkdHMzg4SHx8PHp6esIqbWVlhUwmE0Yq3bp4165dVFZWsnHjRvz8\n/IR/pampiaCgIJHApYMBDw8Pc+HCBSIiIggNDcXa2pqysjLBfty7dy9LlizBzc2Nffv2cefOHVpb\nW5mamqK9vZ033ngDhULB4sWLSUtLo7KyknfffZf09HTRxhkaGnLp0iU8PDzEs/JLrl/FStLPz0+7\nZMkStm7dio2NDS4uLoyPj1NXV4e7uztFRUV0dnYikUgYGxtDKpWiUCjIy8tjZGREkHeTkpJYsmQJ\nTU1NbN++neLiYhHdLpFImJ6eZn5+nsDAQK5cuUJISAgzMzNER0ejr68v2AsajYbBwUGCgoLQarWM\njY0hl8u5ePEir7/+OmvWrCExMVFITR0dHUUeoE5pqdMQzM/PY2Jiwrlz5xgYGCA5OZns7Gw2bNiA\nkZER+vr69PT0sGXLFurq6sjPz2fp0qWcOnWK+Ph4sW3QpUZ7enqKz9MxIB0cHLh+/ToVFRWsWrUK\nR0dH5HI5/f392NrakpmZyZ49e8jIyKCzs1PcJDrilA4pdvfuXdauXYufnx9nz57F398fBwcHSkpK\neOGFF5icnOTEiRNIpVL8/PxYWFjg6NGjDAwMCES6TnAUHBwsYCU6kZZKpWLt2rVkZmZiZWVFV1cX\nDg4OpKWlkZeXx/79+/n3f/93enp6BNHK0dGRzs5OzMzM8PPzw9XVlfHxcWprawX6Tl9fHzMzM3bv\n3o2zszPt7e1kZGQwPz+Pvb09zz33HI8ePRIJ2GfPnhUUr+npaZEG7ujoyF//+ldiY2NJSEgQKVw9\nPT2cOnWK9957D319faFwDAkJ4ZtvvmHRokUUFhYilUqZnJxkz549HD16FIVCwdDQEDU1NQwNDVFa\nWoqdnZ0AyE5MTLB48WL6+vqYmZlhfHyc0dFRXnvtNaqqqjA1NaWyspKLFy+yatUqysrKMDc3Z/v2\n7Vy7dk24SHNycigpKSEiIgJbW1s6OjqEcAsQistNmzb9opXkr+KkMDMzw+LFi3F0dMTU1JSuri5m\nZmYICwsTABRd6lFQUJDQdOv+fWBgIC+99BL6+vrcv38fBwcHAMbHx3F2dhYZibdu3cLc3Jzq6moq\nKiqYnp7G1NQUU1NTZmdnaW9vx9bWFgMDA4KDg8Xqrre3VwR8wmN+ng57lZeXJ6zJfn5+dHZ2MjIy\nQlFRERMTEwwNDdHT04NSqSQqKkoIqubn58nNzWVmZkZU8StXrjA/P8+HH35IbW0t8/PzTE5OilVk\nRUUFc3NzNDc3i1bJ0dFRMAGffPJJOjo6RNLQ6OgofX19rF69muHhYUxMTITN1sTEhKKiIsbHx7Gw\nsBAO1aCgIBoaGsQsRKlUYm9vT29vL2VlZXh7e2NpaYlSqRT8x/+KjN+yZQsGBgZUVlZy6tQpjh07\nxsGDB1m6dCkDAwN8/PHHPHr0iOnpaUGi1s1XAKRSKfb29vT39yOVSrGxscHHxwcXFxf8/f2RSCT0\n9fUxNTVFeno6vr6+IgPB3NxcmNqqqqpEOwXw5ZdfYmhoSElJCcnJyZw5c4bOzk7u3buHUqnE1tYW\nT09PPvnkE3HC050I+vv7CQkJ4fvvv6exsZHOzk6R5xEcHExHRwdOTk54eXmJTcjAwACXLl1CLpcL\n2pdOhl5YWEh+fj53797FzMxMZG4mJiaKwXdFRYVA8uk2Ho6OjjzzzDPMzs4KgpWdnR36+voC9OPu\n7k5sbCxnz54lNjaWsbExrK2thdPyl1y/Cp2CRCIR/HtdvFpKSgpNTU2Ym5sTFRVFX18fjo6O4gYc\nGRkRROWsrCzCw8NxdXWlpKSEK1euAI+JuGNjYwwODqJQKHB2dsbS0hJ/f3/x5bu7u6PVagXkJTQ0\nFFtbW+bn5xkdHWVhYUEkGesMUVKpVODAdEOp9vZ2ZDIZO3bs4NSpUzg4OKCnp4eenh5KpZKIiAis\nrKxEAdTX18fGxobGxkZBifb09CQnJ4dnn32WtrY22tvbOXr0KCkpKTQ3N6PVahkaGsLMzIy0tDRu\n3rzJhQsXCAoKIj4+HgMDA5EUVFBQIIalHR0dyOVycbx3c3MjJyeHqakpTp8+TUxMjKD3WFlZUV1d\njb+/vyAw62zhpaWlzM3NiRWdkZERxsbGYoYjkUgYGRkhISGBN998k+HhYRISErCxsaGsrAylUina\nh2vXruHu7o6/vz8rV64UYSVeXl7Mzc3h7+9PS0sLhYWFAhqim/7rZkI6f4GtrS16enqoVCqB0NNt\nO3Tsx5dffpm9e/cSEBBAb28vd+/eFeBTMzMzEhMTBeo/OjoauVzO999/z/T0NJ2dnVhaWgqQrUaj\nEcj0+/fvo1Ao8Pf35+LFi6xYsYKBgQEx+L19+zaOjo40NzeLmL2pqSmeeuopiouLWVhYoLS0lI0b\nN9La2iqeidbWVmxsbOjv76e2thYvLy+B1jt69CjNzc20t7fzxBNPkJqaikKhwMDAQDApfX19yc/P\nx9zcnPLy8v9fGI3/n17T09N4e3szMDCAvr6+iDKvr68nICAArVaLqakpGo0GiUTCvn37SE1NZX5+\nnuTkZPbt2yeCZnXBr/D4i1EqlULsMzExwYMHD+ju7mbp0qUkJibS3d1Na2srnZ2dLF26lPDwcJEx\n4O7uTkFBAaampqhUKpycnAAoLS1ldnaWmpoarKysRDaDvr4+FRUVNDU1UVtby88//8yLL75Ia2sr\nv//97wXEZXx8XOy6PTw8BADD3t6etLQ0jhw5wvT0NEVFRTz55JNsKtnxAAAgAElEQVS0t7djb2/P\n6tWrcXV1paWlhT//+c/i8yorK2lsbCQqKoqhoSGKi4tpbW0Vx/XNmzczPj7Ozp07mZmZ4cqVK9TV\n1RESEsLbb7+NsbEx/f39GBsbk56ejomJCQYGBiIQR4e8s7KywtLSktDQUOzs7FhYWBAFbd26dVha\nWjI9PY2/vz8pKSm88cYbSKVStm3bhr6+Pp9//jmpqank5eWRmJjIs88+yxdffCFo2ACNjY3IZDLh\nfAwKCuLRo0eMjY0JvHlZWRl2dnZ4enqyYsUKEhMT8fPz48yZM5SWltLS0oK9vT0KhUKkRzc0NLB+\n/XoiIiLQ19dHLpcLj4puThMcHExjYyNnzpzh1KlTXLp0iaNHjzIxMUF1dTW+vr7Mz89ja2vL+vXr\nAUhKSgKgvr4eExMTPvvsM+rq6vD29mZ4eJjm5mZ6e3t55pln2L17N88++yze3t7CnVlUVCRQ7TrQ\nMDw+5fb09PD++++ze/duUWjv37+Pq6sre/fu5dSpU8JSf/bsWRwcHJiammJ2dpbNmzdjaGjI0aNH\ncXd3F2vvX3L9KoqCvr4+EokEc3NzGhoayM3NxdnZmf3794vh0dDQENHR0Vy6dImGhgYOHTqEiYkJ\nERERxMXFYWlpyd27d5mYmCA4OBh4TNOJj4+nvb0dIyMj7t+/L04lra2tdHd3i9zK8fFx2tra6O7u\nFsd+3WrTzs4OFxcXUW0DAwMpKSnBxsaG8PBw3N3diYyM5N69e6hUKnbu3Imbmxuffvqp2CM/9dRT\nzM/Pc+rUKcGW3L59O0VFRUxPTwNw48YN1Gq1OM7rqM+6ifmSJUv+hbdQWlqKm5sbGzduFP19S0uL\nQN/r4uu8vLx45plnuHLlChqNBg8PD9zc3GhpacHLy4vw8HBeeuklbGxsxK49OjpaJA+9/PLL9Pb2\nsmfPHtatW4darWbDhg3Y29uTmZmJubk5y5YtY25uDkNDQzIyMnByciI7O1vg6nQBtX19fSwsLDAz\nM8OJEydoaGggJiaGlJQUAGJjY9FoNNTW1lJTUyMYDqOjo/T392Nvb4+BgQH9/f1Ci3HhwgVMTEyE\niWtubo7Z2VmB9wPEQ19TU4O7u7so/s7OzgLSqtuUJCUlERISwp49ezh06BAFBQVotVo0Gg2RkZF4\neXkREBAAPE4306H4enp60Gg0WFtbMzIyQmNjI87OzqLls7e3F7QqjUbD8PAwgYGBvP766yxatEic\nQnT3bmZmJn/84x+pqanh1q1b3LlzB5lMxquvvsqmTZvw8PDAwsICf39/3N3dKSwsJCIiAoVCwYkT\nJ1Cr1ezcuRO5XP5/xVP41bQPurAWY2NjQSWqrKyktLSU7du3Mzo6yptvvsmOHTvo6enBx8eHn376\nSRhQgoODsbKyIiQkhJycHAD+/ve/s3XrVmJiYhgdHSUiIoLu7m5GR0cJCgpiampK9OqVlZW4uLgQ\nHR3NxMSE2KdLpVL6+/u5d+8e69atAx5z+XQoeN0x8uLFi/zmN79hcnJSUIo3b94sQj7ef/99CgoK\nkEqlImBWqVSSnJxMdXU18FiBqVKpSElJoba2VoiYdNF0paWlODk5IZFISEhIEAE6bm5uArSqUCjo\n7u7G0tKSFStWUFNTg1arpaioiNdee42enh6Ki4upqqpi0aJFHDt2TDywjo6OjI+PExoaikQiITg4\nmJSUFMrKyggJCREBMAsLC7S0tGBqair0C3l5eeII39jYSEVFBa+++irnz59HoVBQWVkp/P7j4+Ni\n66PDxOne6EePHmXFihUCbnLo0CH+8Ic/sGbNGkZHRwWsRCaTMT09ze3bt5HL5UilUtra2pBIJISH\nhzMzMyNSsuGxniAzM5Pf/va33Lp1i4CAALy8vDh37hwVFRW8/vrrTExMUFpayoEDB0Q+xg8//CBk\n7GfPnsXX15d79+6JUOCzZ8/y9NNP8+OPPxIdHS2iCnQtXWlpKaWlpYyPj4uCNDExgYWFBePj4xga\nGvLo0SMqKiqQyWRCp6BQKFi5ciUjIyMUFhYK4ZWlpaVILI+MjGRmZoaGhgYMDQ1xd3cnPz9fMCtS\nU1MJDQ0lISHh/0rR+KsoCvPz8+Tn59Pb28vw8DChoaGcOHGCzZs388ILLzA7O0tERAQ+Pj4MDg6S\nmZlJU1MTKSkpeHp6IpPJOHHiBGFhYQwODpKcnMxHH31ETEwMWVlZ2Nvbi2GU7hcQHx8vsgH9/PwY\nGBhg3bp1qFQqSkpKWLp0qRhWOTg4CFstPF5L6QZVixYt4vvvv+fu3bv09PRgYGAgJLd//OMfcXJy\n4tlnn8XBwQFzc3PMzMyEeEq3Rjt9+jTweEqsUql49OgREomErKwsFhYWcHZ2RqlU4uHhQXx8vJg8\n9/b2snnzZlxdXbl9+zZGRkbcvHmTxYsXY2Fhwc2bN3F1deXs2bP4+Pjw9ddf89Zbb4mAncjISOrr\n62lsbMTNzY3r16/z7LPPCs+GzokZFxcnhmf29vYiVbm5uZnJyUkOHTrEkiVLiI+P58yZMzzzzDNC\nthsUFER2drbQm+hwaH/5y194++23KS0t5dtvvxWt2YoVK3B2dmZkZAStVsuRI0d455136OvrE65L\nneVbIpFQU1ODUqlkdHRUgFDr6upYtGgRrq6u3Lx5E0CoKb///nvUajV79uwhNzeXlJQURkdH6e7u\n5ubNm8TFxXH58mVCQkKEd+P06dM8ePAAX19fSkpK2Lx5syBFTU5OUlpaioeHB93d3ezbt0+oIhsa\nGtBoNJiZmYlUrLKyMgIDA9FoNGJAePXqVTw9PYUjFx5rcDIyMpienmZwcBAzMzMUCgWGhoakp6ez\nfv16du/eTWpqKv7+/kxMTAgxlFar5fnnn6etrY1Vq1bxt7/9TaxUf8n1q2gf1Go1crkcFxcXnnrq\nKcbHxzEyMsLIyIhjx47x3XffUVpaSnd3N1988QWHDx/G3t6e2NhYQkNDqa6uZuvWrajVagwNDYU6\nTict1a3GZmZmMDMzIzw8HK1Wi1qtJjg4WKQq6TzyJiYmTE9PMzs7K6TRERERzM8/BlMbGRmJ9air\nqyu1tbWo1WqBSGtqaqKmpgZbW1tu375NZ2ensO82NTXR3d1Nc3Mz1dXV5OfnCypOV1cX7u7uSCQS\nJBKJ+PmLi4uJi4ujo6MDY2Nj2tvbKSsrEzSjQ4cO8f7779PW1kZCQoKQduuGc56enixdupQ9e/YI\nVJujo6MYXM3Pz1NYWCgQaDKZjMrKSpF14e/vz+XLl0lISKCvr4+uri6USiV2dnY8ePCAZ555RsBN\ntmzZgo2NDaGhoeJtv2LFCqRSKY6OjuJBSE5OFjmWEokEuVwOIP7MxMSE8HXcu3cPExMTfvjhBz75\n5BOBxxseHqanpwd7e3txHzk4OLBv3z5RtHUQ3z/+8Y+COXHw4EEOHDjAwsIC2dnZrF+/npmZGX73\nu9+RkZGBqakpGRkZDA0N0dnZyYULF7hy5QoXL15kcHCQjIwMQYp6/vnnsbCwICoqSugQYmJiRNz8\n9PS0KGC6LVJrayu9vb1iiKlL4yooKBDKw48//pjW1laxdYuLi6O6upqcnBx2795NW1sbv//974HH\n4jJXV1dOnDjB4OAghw4dIjc3lzt37ggYj+6F9kuuX0VRsLKyoru7m+7ubjHkUygUlJWV4ebmhkwm\nY+fOnUgkEoKCgnBwcODYsWOo1WpOnDjBo0ePyM/PZ9OmTVhbWwsW38OHD1GpVMzNzVFfX4+5uTmR\nkZEsXboUqVTK7Oys8BcMDAygVqvRarXI5XIhgjE1NcXQ0JD29nahpDx58iSjo6NER0dTX1+Pk5MT\n69evJy4uDmNjY5Fa9OOPPxIQEEBeXh4ymYyGhgampqYoLy9namqKjo4OpqameP/994HH4qXZ2Vm8\nvb3Zs2cPW7duxdfXF4VCQXJyMklJSfT09Ai02vr16zl16hQqlYrk5GQRKBIaGopWq2Vqakpgx2Zm\nZgRhKC8vj4SEBExNTRkaGuLRo0dYWFiwcuVKzM3NaWtrw8DAAKlUytDQEN999x1ubm40Njbi4+ND\ne3u7SMeSyWQix7ChoYGDBw9y+vRpJicnxdt5bGwMPT09WltbaW9vJyEhgfv37wvw6fr167GysgIe\nb4za2trE78jDwwNvb29yc3NpaWkhPj5egGAGBwfx9PREX1+f2dlZAgICaGlpQaPRsGrVKoqKisRM\n4fPPPyciIoKioiKee+45XnvtNR48eEB4eDhFRUXMz8+Tnp7OihUrMDY2FhRlDw8PpqamePHFFwkL\nC2Nubg4jIyMRXqNSqaivr6e/v5/+/n76+vooKioSSP2Ghgbc3d1pbGzk22+/FfJlKysr8fNIpVJB\nidLdY729vUilUoqKisjIyEClUmFiYkJDQwPnzp3jm2++4caNG0KRa2trS1paGuHh4WzevFnI8Bsa\nGpiZmaG8vPwXP4+/ivZhbm6O2tpa5ubmxFBHRyPW3bBHjx6lra2Nf/u3f8PPzw9TU1OuXLlCUlIS\nRUVFQt5aUFAgqqJUKsXMzAw9PT2io6OFDNnQ0FCEq+hssh4eHri6uoovMCoqivj4ePT19VGpVEKU\nA4+PdsuXL+fIkSOsWrVKcPKMjY1xd3cnIiICrVbL/fv3KS4uxsHBgfb2diYmJsT6U5c+vWHDBvHW\naWpqIjExkZaWFn788Ufh59BoNJSXl5OSkkJFRQUHDhxgeHiYtLQ0PDw8sLW1pbOzk127dpGfn091\ndbWQ8Jqbm5Oenk5gYCDz8/OEhIQQGhrK66+/zsqVK0WGQEZGBgMDA1RWVooN0MTEBFZWVnz88cc8\n88wz2NvbY2tri76+PpWVlbzzzjt88MEHyOVyzp07J6LXFxYW2Lt3L3/605+4evUqUqmUN998k927\nd5ORkcGNGzcErj89PZ3W1lbc3R8HizU0NIgcxtLSUoKDg3FwcGB+fp7169dTVFTEgwcPxGoxLCwM\nKysrBgYG6O7uZsWKFRQUFDA5OYm/v7/Asu/cuVOsRb28vMjPz2fVqlWicEdHR7Nt2zZef/11If9W\nKpXCM7Nu3ToxyTcxMeGDDz4AHhcFXZCuThsSHR1NVlYWf/7zn/Hz8xPW7/z8fGQyGTY2Nhw8eFC0\nY8uWLRPrS53RSifhnpubw9nZmZmZGfr7+9m4cSNKpRJ3d3cqKyuBx4Dip556iv7+fiwsLLCxsSEy\nMhJnZ2ccHBwwMDAQp+dfcv0qTgo6Ym9ISAiOjo6Ym5vj6OiIkZERJ0+e5MqVK0xOTopItOHhYSoq\nKli0aBEtLS3s27dPrPYCAgLw9vYGHr95zc3NiYmJQS6XI5PJBP7K0dERY2NjhoeHMTAwELi28vJy\nVq5cKf5bd3c3FRUV4saEx/HjlpaWGBsbiwBUX19f9uzZIzT8zc3NDAwM4ObmRlBQEB0dHdTW1nL1\n6lURNLJkyRIRegKPJ84XLlygr69PnAh0tGqlUklTUxM5OTk8ePCAb7/9ls2bN9PT00NYWBgpKSks\nW7YMf39/bGxskEqlAp22ZcsW+vv7GRkZob29nYiICN555x3Wr1+PmZkZlpaWpKSkIJPJBB4uIiIC\ntVrNgwcPWL58OXp6eiJwx8fHh2XLlvHFF1/Q0dHBJ598gqWlJV5eXnh6ejI9Pc17771HSUmJWA/r\nFIG5ubk0NjaKzI2tW7eyfv164eJ7+eWXuXjxorBz19TUCB5AX1+f4HXqTpB6enoiT9HIyAgnJyek\nUikzMzMYGhqKglteXs6PP/5IZGSkUEf6+fkREhLCyMgIGzZsEP6HF154gbVr14rW6+uvv2bbtm0i\nz9La2lqcbHx9fSkoKKC1tRVzc3NBhrK3t0cmk6FSqcS9ZmZmJhLHJyYmsLGxITo6muPHj5OZmfkv\ngUOJiYnMz8/T2NjIW2+9RXt7O3K5nMuXLwuEfl9fH1u3buXDDz8kOjoac3NzsrOzKS0tFY5bb29v\nIiIieOWVV37x8/irMEQdPnz4T3v37sXd3Z3Ozk4ePnzISy+9hL+/P9HR0cJOvG7dOi5evCiGbE5O\nTixfvpzJyUnMzc1Fz67Varl+/Tq7d+9GT0+Pq1ev0tjYyPz8PObm5tTX1zM6OkpMTIzo33XKRV1Q\nrbGxMT09PbS3tyORSHBycsLAwIB//OMfrFy5EkNDQ+bm5sSKCR5z+c+ePUtnZyezs7MiA9HT05PG\nxkba29uZm5tjx44dFBUVsWPHDqE1uH79OikpKQwNDREfH4+trS2GhobIZDKBeWtraxP8xomJCWJi\nYtiyZQvNzc0EBgZSVlYmsh47Ozvx8vKioKAAMzMzPDw8OHbsGFKpVDz4Ordgbm6u6J9lMhmRkZG0\ntLSgp6eHj48PdnZ29PX1MTQ0REtLi3BD6uTRSUlJlJSUEBAQwFtvvcXMzIxgNfb394ui+f777xMU\nFIRUKkWlUpGWlsbt27eZmZnhwoULVFZW4u3tjaOjo/DCODg4MDAwINbKulmMRqPB0NCQ+vp6MRBt\naGjg1KlTBAYGYv7/svfm0VGW+b7vp0LmeZ7nyjwHMidAIMwBggyKAoozW6W1t9q6bdtN290ObW9t\np25pWkFBRkEgECAkhEwkIWQkI5nnylSZK6mkkjp/0O9zu9fd6zT73Lvucd913rVcLmOIMan3ref5\nPd/v52NigpWVFaOjo1y8eFE4IFNTU5mcnBSquJs3b+LgcF+a7uHhwdjYGKampmL4qq+vT3JyMqGh\noejq6mJlZcXk5CQrVqzgyJEj7Nq1i6GhIebn53F1dRWvrdjYWAoKCoiMjGRiYoKamhrm5ubE6dTw\n8DDu7u6o1WoxH/Hy8hI2NC8vL4yMjNi3b5947XV0dIi69cMPP4xWq2X58uW4ubkxMDAg5LW+vr5U\nVVXxzDPPUFJSQnV1NRYWFnz77bf/fQpRkijj3r17+Pr64ubmxptvvsmVK1fQaDRYWloil8u5e/cu\n77//PhERETg7OxMfHy8KN3l5eVy6dImgoCCRDHNycmJqakpAVOzs7FCr1Tg5ObF48WLOnTtHTk4O\ntbW1FBcX09vbKzx8jY2NArCqq6uLvr6+CIBI+G4piqzRaIiJiWFiYoJVq1YJOejg4CDW1tYUFRWx\nYsUKnn76aQICAsQLr7a2lvDwcGHJLi0tZdOmTXz++edUV1fz0ksvERgYiLm5OQqFgs7OTnbs2CF4\niNPT03zzzTesXbuWgIAA0XeYnJzExcUFLy8v5HI5s7OzlJSUkJSUJJgBUizW19cXGxsbli5dytat\nW9HT02NgYIDY2FiCgoIoKCgQPkcLCwteeOEFFhYWGBgYYOXKlVRUVFBSUsLy5ctpbGzkwoULbN26\nFRsbG2xsbPjoo494//33yc3NZd++fQQGBgqs++TkJOXl5XR2dvLyyy8DCKBteHg4d+7cwdbWlsce\ne4zg4GBiY2MJDAzkzTffZOfOnTzzzDMkJyfT1tZGS0uLGCK7urrS39+Pqakp9vb2wP3k6uDgIOnp\n6YJalZ+fT2xsLMuWLSM8PJxf/vKX5OXloVQqiYmJISUlBWNjY9ra2rh8+TIVFRWoVCohHwa4evWq\n8IMqlUp0dXVZtGgRxsbGqFQqfv/733PkyBE6Ojo4evQos7OzNDc34+XlRVhYGA4ODiwsLBAeHo6N\njQ1mZmbA/WzM/Pw8n332GR4eHrS1tREREUF4eDivvPIKJiYmor8iiX67urqYn59Hq9USGxvLV199\nhZOTEz09PSLZ+SDXT+KhMD8/T0hICA8//DCtra2ipGRgYEBBQQFdXV1kZmYSGxtLRUUFZWVlqFQq\nTp48KX65UmiptrYWd3d3ANEV0Gg0Ivvg4eEhPhYVFUVQUJDozAcFBZGUlMTIyIgwTTs4OODv709N\nTY2IOUdGRhIQEEBwcDBpaWksXbqU0dFR6urqiIuLIzIykvz8fIyNjTExMSEpKUkkFB9//HF6enqE\nUTgjI0MsGT09PQVpycHBgVu3bomlpoRBd3JyEohvPT09goOD0dPTIyMjg5qaGuFYOHfuHMbGxoSH\nhyOTyXB2dmbJkiUYGhri6OjIp59+yjvvvIOnpyeurq44OTkJZXpUVBQTExMoFAqMjY25ffs2P//5\nz+no6ODdd9/FxcWFiIgIxsfHefvtt0XQKS0tjYsXL3Lz5k2OHDmCh4cHxcXFfPLJJ6KAJGVK3N3d\nuXv3Lh9++CGWlpaUlJQA9+dLksZv/fr1lJeXC4JQTU0No6OjAk3/3XffCSuTu7s7ra2tBAUFiUTh\n4OAg+fn5wP2HzRtvvCFucin4denSJYFW/8UvfoFCoSAkJITR0VH++Mc/8tprr7F8+XK8vLx49tln\nUavV5OTkiG2Jvb292OKampri6OjI+Pg4/f39+Pv7ExISwsLCAsnJybz44otMTU3R3d1NZ2cndXV1\n/Pjjj/j5+QnQi5SUdHR0xMTEhKioKLKyskhJSeHLL7/ExMSE06dPc/nyZR577DE6Ojr4+OOPhQh3\nbm6Oubk52traRJRaKns96PWTeChoNBr09fX54YcfcHV1xcXFherqapYuXcrIyAjV1dXCRHTixAki\nIyNZvHgxWq1WcPSkY52QkBChCTc2NsbZ2Rl/f3/q6uowMjKitbUVLy8vSktLxTRduuHt7e3x9vZm\nbm6ORx55hP7+foaGhgS6XQJr3L17FxcXF9zc3LCysuLmzZu4urri6+tLfn4+RUVFbN++nbKyMvT1\n9QWaLCYmRrgXPDw8OHHiBF1dXRw6dAi4j3jTaDSo1WqWLVtGXl4eFy9eFGk2if7z7LPPEh8fT0tL\ni5DLjI+PExgYKHyWaWlpDAwMiIekv78/169fB+4f1e7cuZN169ZRUFBAX1+f6IV4eXlx+vRpDh06\nhJubG3v27BFHdIGBgbi5uQnmpb+/P88//zzDw8M0NDTwhz/8gWvXrmFoaMiuXbs4f/489+7do76+\nHjMzM2ETv3jxIlNTUxgaGnLjxg1sbW3FiVFbW5tQ/HV2djI3N4dMJmNkZITJyUmWLFnC8PAwCoUC\nc3NzWlpaRHoxLCyMkZERIiIi8Pb2xtvbW8w0hoeH+eabb3B1deWNN97g2WefJTU1la1bt+Lq6oqF\nhQX5+fns2LGD+fl52traePzxx3nrrbf405/+hJWVFZcuXSIkJITo6GhRiBoeHsbQ0JD9+/cLUrVW\nqxXKAEl539DQINq0Li4u9PX1MTU1hY+PDyMjI4Lt2N7eDsD169fZtm0bzc3N/Pjjj+jp6bF//34u\nXLggHCYTExMcPHiQ1157jfr6eqanp/H09GRoaAgdHR1xxJ+WliYSmA9y/SRmCl9++eWB+Ph43n//\nfVEldnBwwNTUFH19fXR1dbG2thaSUAkZLjEVIiMjxapBYijk5+fj5+dHQ0ODOMvOzMzExcWFr776\nipKSEtzc3Ojp6eHGjRtYWVmRn59Pe3s7FRUVImx05swZITJ5//33GRgYYO/eveKU5McffyQwMJDB\nwUFaWlqEp7KwsBA3Nzfs7OzEwEvqUXR1dYmBnJeXF/7+/ly7do2YmBgsLCx45ZVXxPcvl8sxMjJi\ny5YtWFpa0tTUhJubG56enhgZGdHQ0AAgarhjY2OsWbMGZ2dnTp8+zejoKAsLC9y4cYOzZ8+yevVq\nbty4weTkJFZWVpSWlrJlyxYaGhpEMCw0NFTQlbKzs7lx4wY/+9nPAMjMzBQ8x7KyMszNzZmfn2dw\ncBCVSsW6desE0lxyZgQHB/P555/j5uZGR0cHenp6zM3NMTY2Rk1NDcPDw3R2dlJZWcnatWuxs7Nj\nxYoVGBkZcfnyZYyMjATWXi6XMzIywrFjx4QcyMnJicrKSrKyskSWora2FnNzcyIjI/nrX/9KXFwc\nixcvpq2tTUz6Z2ZmCA4Oxt/fn1OnTuHp6SmGvBMTEyIG39XVJfwTxcXF2NjYMDExIUJwEuPg7y3R\nSUlJdHV1cerUKVFxPnv2LH19fSKg5uHhQV1dHe7u7pSXl7N161ZGRka4cuUK/v7+5OTkiJaoh4cH\n6enpxMbGcvDgQeLj44WcZmJigvb2do4dO8bw8DC+vr60traKKP5f//pXKX7+/85MQSaTfSOTyQZk\nMlnN333sgEwm65HJZJV/+2vD3/27f5PJZM0ymaxRJpOtfZCHwvj4OHV1dezatQsXFxeSkpLYuXMn\n9+7do6mpiZUrV9Lc3Iynpydzc3OiI5+UlERubi7t7e3ExsaKAdLc3BxwX0W/a9cuLC0tOXPmDDKZ\njOHhYQFilco3vr6+InMO99+xe3t7mZmZwdHRkampKa5cuSLYjx4eHri6uuLt7Y29vb1Ql4WEhNDf\n309LS4sICFlYWFBeXs7Vq1cxMTFhamoKGxsbSktLsbCwoL+/X5xqWFhYkJqaytGjR1EqlcKUHRoa\nyq1btzh37hxeXl5iMOnm5oatrS1xcXEiyrx48WLKy8s5efIkTzzxBF999ZXoO6SkpFBdXS1kr9Ip\ngnRS0tHRIR6wvb29rF27lnXr1rF+/XqysrLw8PCgoKCA3t5eioqKqKqqQqlUMjk5yfz8PFNTU2Rn\nZ7Nv3z7s7e2xsrLC2toalUpFeHi4mHtITo7Ozk5SUlJwcnIiNDQUQJTF6uvryc/P5/HHHxerA2lA\np9Fo+POf/ywKcgcPHhQ2qe+//x6NRoOBgQF1dXXU19cDkJiYyNTUFE5OTty+fZuhoSH8/f1pbm4W\nhbjW1lbxYG1qasLOzo7c3FwRCpPM0RI0Be6fnElUrlWrVuHj4yMIVV5eXvziF79ApVLR2NjIM888\ng1wuZ9OmTQQFBSGTyUhJSWFqaoqoqCguXbok+jV3795lzZo1yGQy4uLi+OKLL1AqlSiVStLT06mt\nrRWhPBsbGxHEW1hYoKqqipCQEPz8/Dh27BjPP/882dnZD3IrAg+2fTgCrPtPPv6JVquN+NtfGQAy\nmSwI2AkE/+3P/Ekmky36Z/8Ba2trhoeHmZ2dJT09ne7ubomAGGkAACAASURBVCorK2lvb2fjxo1U\nVFTg7OzMli1biIyMxM7ODoVCgbW1NceOHSMyMhITExMxLZeyB+bm5ly6dImioiICAwMJCwujv7+f\nyclJ7OzsCAwMpL+/n4mJCRYvXoy/vz/j4+NkZWVRUlLC5cuXMTc3x8jICE9PT/FCKC8vJzk5mbq6\nOpydndm+fTvW1taCOJ2WlkZiYqL4BUkzkq6uLkxNTQkMDCQtLY3GxkZWrVolLEleXl5kZWURHh7O\n9u3bBV5LqVRiaWlJZGQk+vr6zM3NsWzZMhH3LigoELVlSW+2Z88eZmdneeGFF5iamuLw4cNMTEzQ\n2dnJ3r17+f777ykrK2NhYUFg1KVVS3x8PIaGhlRVVYlUnYWFBVlZWRw4cEDEr9esWUNzczOlpaUY\nGhpiZ2fHqlWr6OvrY35+njt37tDX10dbW5uwJH/66ac4OjpiZGREdXU1XV1dREdHi58B3C/I2djY\nEBoaKnBlQUFB6OjocPnyZRwdHTl48CBGRkbo6OigVCqxsLAQRi5pGS6JbgGKiorQaDRCP1dZWUlm\nZianTp3i/fffx87OjoiICEZGRsTqVBKp+Pv7c/78eYyNjenu7qa6uloQtqTWaG9vr+A4ZGdnc/fu\nXbq6uigtLSUlJQULCwt+/PFHFAoFbW1t1NbWYmRkRFNTE3V1dVhaWuLq6irmS6tWrRK/98bGRjQa\nDQEBAXz66ae0tLTg6+tLeHg4CwsLdHZ2EhgYyGeffUZRUZH4//Pz82PFihWcO3eO9evXP8Ct/ref\n/z/7BK1WmwcoH/DrpQEntVqtWqvVtgHNQMw/+0NqtZrW1lasra15+OGHBUItLCyM6upqYUDSarW0\ntbXxy1/+kq6uLrRaLYcOHaKnp0dg06QiCMDy5cuZmZkR8tqamhqysrKEf0EyNW3evBknJydCQkKQ\nyWSsXLmSRx99FDc3NwG0kPINcD+8dPnyZbFi+OKLL2hpaREPl+vXr9PZ2Ul+fj4LCwuYm5vT3d3N\n9u3bCQoKoqOjg/r6epKTk/nTn/4kwK1NTU24u7szPz8vuJNSSUnCgykUCu7evUt/fz/GxsasX79e\nHNc98cQTnDt3TmxZJIeGZHUKDg7GyMiIuro6jh49yszMDFZWVly/fp3W1laWLFnCZ599Rl1dHf/x\nH//BlStXhEa+u7ubHTt2sGjRIpKTk9FoNNy7d4/XX38dGxsbMZe4ePEiR48excTEhK1btwpoTUND\nAwYGBiLWfODAAezt7Wlububzzz8XD/Lo6GgmJyeFdcrJyYmKigp8fX2Ji4sjPDycoKAgtm/fTkVF\nBUqlkvfee4+goCAR9rKwsCAuLo49e/aIgA9ARkaGMFLZ29sjl8vZvXs3crmckJAQuru7RTT6N7/5\njTB2L1++nJdeeon5+Xn6+vpETR/AyMiI5uZm9PX1KSsrIzQ0VES7pQfAvn37yMrKIjQ0lGXLlmFr\na8vjjz+OhYUFW7duFTe3lC0BBDeip6eHiooKgoKCUKvVnDx5EoVCgbOzM7a2tmzevBl7e3syMjI4\ncuQI77zzjnhYSys6R0dHamtrH/AW/n82aNwvk8mq/7a9sPrbx1yArr/7nO6/fex/eqnVatasWUNi\nYqLI1Uu5fAmIIk3vx8fHxflzTEwMfn5+dHZ24u3tzeDgoBByAKJ0pFKpuHnz5j+8KCWvgeRUaGlp\n4fDhw2KW0Nvby9zcHK2trSQnJ/9DXba1tRVDQ0PB+U9NTWXRokWibm1oaCgAL7a2tuzYsUMsMQcH\nB6mtrRXGaimUA/flI+np6SQmJor+R3NzMwqFgvb2dtLS0lAoFCKUpFQqOXv2LG5ubnh5eXH48GFc\nXV0xNjYmJyeHyspKjIyMGBsb49ChQyIzMTIywsGDB3nqqacEAlypVIpBr7m5Oe+88w67du0SRmMJ\nT5afn09GRoaYI5ibm+Pj40NAQADu7u6EhYXx2muv0dzcLDTsaWlpJCcn8/jjj7Nq1SpBoZLYl5GR\nkaJgJK0yOjo62LhxI4GBgRgbG4t0ob6+PjY2Npw6dUq0aL/99luxfDYyMmLNmjW4urpSWFiIt7e3\neI15eHgIO/iyZcsoLy9nfHxcoN/u3LkjZh2HDh0SajlJFb9+/XqSk5MJCgoS7Ed7e3tSUlJYuXIl\n3d3d1NfX8/DDD2Nubo6lpaWolqempoq8g4eHB/r6+ri6ulJWVsaaNWvo6OgQGR24b52OjY2lvLxc\nQIZmZma4ffs2g4OD3Lt3j6NHj7J69WrS09MZGxvD398fS0tLzp07x7p168QqNT4+/v+T7sOfAW8g\nAugD/uO/+gVkMtlzMpnsjkwmuzM5Ocn4+DjXr18nKSkJd3d3zMzM8Pf3JywsDICFhQWam5u5du0a\nzz77LKGhoZSXl1NeXo6enh5qtZr169ezdu1aUUWWyiWJiYkCACIt183MzKiqqiIvL4+rV6+K5erM\nzAwKhQJPT0+RM8jKymLDhg1kZmYCiJmFBMKQ4qehoaGCLOTt7c3+/fvFqcrIyAgZGRmUl5fj4uJC\nWlqasFA9+uijAHR1dfHaa69x584d6uvrRVV2w4YNWFtbo1Ao6O7uxtbWFpVKRU9PD3Nzc1hZWdHR\n0SH+3tnZKRqV7e3tzM/P093dLRT3OTk5TE1NkZWVRWNjI4WFhbz99tsCfyaBQSsrK0lPT2fDhg1o\nNBp+8Ytf8Nprr2FgYMCyZct45JFHKC4uFrOMW7du4eHhITDpvr6+1NbW0traygsvvEB0dDSvvvoq\neXl53Lt3j7i4OOB+KvDKlSvA/ZvM3Nwcd3d3ZmdnGRoaor6+Hj09PTw8PLC0tKSoqIjVq1fz2muv\n4evri4eHB4ODg0RGRrJnzx4WLVoknBWLFy8G7ucU1q9fT3FxscDESw8BSZA7MzODWq0WwJm2tjb+\n8pe/iFWg5KUsLCwU7+h2dnZCUmtkZIStrS0NDQ3cvn1bYNmfeuopgb/7+uuvuXLlCj09PXz44YcM\nDQ2hUCgoKytjfHxcYNNycnKoqKhg586duLi4oK+vT2NjI7m5udTV1XH48GHGx8dpbGxkx44dzM7O\niiyNo6MjVVVVGBkZsXr1ag4fPiwCWg9y/S89FLRabb9Wq53XarULwCH+ry1CD+D2d5/q+reP/Wdf\n4y9arTZKq9VG2dvbExAQgEwmo7a2FlNTU3ETXb58WdRGDQwMBHehrKyMTz75hKqqKlauXImOjo5Q\nd0vHUIGBgchkMn744QfUajUKhYLBwUE8PT1FJ8HLy4vW1lZ0dHSErVmr1XLp0iUee+wxTE1Neeih\nh4TZCRCcQh8fHzGwUyqVlJeXc/36dS5cuICOjg4FBQXA/aWgk5MTDz30kAiTtLa2olarRd0b7g+t\nJiYmRDmpoKDgHwag0jB28+bNPPbYY7z99tts2LBBDOc8PT156623MDY2ZmhoCLVazbVr17h37x5V\nVVWoVCouXLiASqUSVqJXX32VtWvX8tVXX5Gbm8v333+Pi4sLcXFxNDY2CgbD9PQ0Bw8eZGBggNDQ\nUBwcHMTK4l//9V+5cuUK9vb2ZGZmcvLkSRGgCg0Npb+/nwsXLvDZZ59x9+5dVqxYga+vL6GhoahU\nKpqamsT5PNyvIwcHB+Pk5ERtba1gH/z5z3/Gy8tLrNIGBweJiori1q1bhISECM6jVqvlq6++QqPR\ncPbsWeD+HCg7O1s0GFevXk1ycjKzs7OsXr1atBYzMzOJi4sTq4n+/n6+/vprXnjhBb7++mtGR0fF\ngxPuMztKS0sxMTHBzMyM/Px8urq6CA4OJioqSjAUs7OzGRkZwcrKCktLS9avX8/WrVuZnJzk1Vdf\npbS0lIGBAZKSkoD7RispISmd8KSmprJlyxZ++9vfcvr0aXJzcwVENiEhgffee0+E8M6cOUNHRwd5\neXkib/Kg1//SQ0Emkzn93T8+BEgnExeBnTKZzEAmk3kBvsDtf/b1NBqNsOeWlpbi6+srTDy3b9/G\n3t6evLw8sT+VIrIvvvgiO3fuxNHRER8fH4Ftl5BWxcXFolQjxWsVCgUFBQUMDAywatUqPD096e7u\n5sKFC2IAJDkUpCyDtGWRkoe9vb24u7sLVVhFRQX6+vr4+PhQUVHB1q1bWbZsmYjRmpqaEh0dzdGj\nR+np6aG2tlb82T/96U+CipOQkIBMJhOSGRcXFwwNDUlPT+fQoUNCmnPw4EGRxYiJiWHdunUEBQVR\nUlJCfn4+vb29VFVVMTo6ypIlS7C3t2f16tXs27ePtLQ0Xn75ZaKiojA3Nxckp6SkJPbv38/evXuZ\nm5ujqamJgIAAkpOTqaysZGZmhoyMDHx8fBgeHub777/niSeeoKioiMLCwn/Qn2k0GuRyOUNDQ8zM\nzHDjxg0BBklPTyc5ORlnZ2eRnwgODhYDtoyMDKampgQDUiqvSauQU6dOMTc3R09PD0FBQfzwww8i\nYXjmzBkRdpPak9ID19TUlLGxMeGkkPocAwMDFBcX4+Liwr1793BxcaGtrQ2NRkNeXh4/+9nPsLe3\nF+an6OhoVq9eLVqSTz75pAgKjYyMEBQUJOzhSUlJAsxrbGyMQqGgrq6Oe/fu0dfXR1RUFH19fRQW\nFpKcnExPT49oSX7zzTc0NTUxMjJCeno6AQEBNDQ00NnZSU9PDz/++CMtLS0kJCRgbm5ObGwsnp6e\n6OrqsmLFin+Ik0dFRWFtbf3A9/eDHEmeAIoAf5lM1i2TyZ4Gfi+Tye7KZLJqYAXwcwCtVlsLnAbq\ngKvAi1qtdv6f/TdmZ2cxNDSktbUVDw8PhoeHKS0tFclDQOT9pUx4a2urGOz9/X59ZGTkH1JsEjVY\nemB0dnaydu1atm3bhrOzM6Ojo6xduxZjY2Py8vIICAggNDQUW1tb2tra+NWvfsXAwMD/rWUWERGB\nXC4XJJyRkRFOnDjB4OCgeDFLk2OlUomJiQnu7u7Mzc3x2GOPYW1tTUFBAc8995x4KIyMjGBkZISp\nqSk2NjbilEStVvP000/T1tbGvXv3UKvVHD58GK1WS39/v+gX7N69m6SkJHx9fTE0NKSrqwu5XC4E\nKsXFxXR0dFBdXU10dDRGRkbMzMxQXV2Np6cnlZWVDA0N4eHhQXx8PDKZjK6uLsrLy8nJyRFQVX9/\nfywsLDh69KjA0YWEhDA2NoatrS2Ghob09PSI+K101CylOBMTE9myZYvAppWUlIgYbnx8vGBNSicG\nERERwp0ZEBCAXC4XNCSpq+Ho6Eh3dzd5eXk0NzcTEhLCxx9/LKbunZ2dODk58dxzz3Hv3j1mZ2cZ\nGxvjzJkzFBcXEx4eTlpaGkFBQeTm5vLee+8xNjbG6OiosIylpKSwsLDA8ePHBSauvLwcT09Pvvzy\nS2QyGbq6uiwsLDA/P8+1a9eorq4mMDAQfX19srKyqKmpEbmE48ePixXe1NSUWBEBvPzyy3R2dqJW\nq9HX1+fw4cOcPn2aEydOYGZmxq1btxgbG+PkyZMMDg4yNjaGUqkUoTCJyxgTE0NbW9t/aabwT6vT\nWq320f/kw1//Tz7/d8DvHvg7AEHmValUPPHEE5SUlJCQkIBCocDV1RWlUom5uTkxMTEUFRXR2tpK\nTk6OWFmMjo7i6uqKnZ0d+fn5Yg6h1WpJTEzk1q1bNDY2imVUd3c34eHhXLlyRTAFJI5dYmIid+7c\nwdHRkfn5efT09KiqqqKlpYX9+/fz8ccfExERwRdffCEEqdKR1MaNG6mvrxdn688//zyurq7o6uqS\nkZHB2NgYK1asYGFhAQcHBx566CHhywS4dOkSpqamrFmzBqVSya5duxgeHsba2loYmd3d3UV5Kz09\nnS1btqCvry+kLfX19dTU1AgzkI2NDdXV1fT19REbGytiwgkJCRw4cABbW1vi4+PZt28fAQEBzM3N\nERkZyfz8PBERETQ3N6Orq4tcLuf7779n1apVqFQqXF1d6e3t5dq1a3h6eoojw66uLsFuKC4uZmJi\ngsnJSfEuvH//flpaWmhpaaGoqAhra2vxcL1y5QoKhUK0YaWymqR2Gx8fx93dndraWkZHR3FxcSEm\nJkbcHJLuT6lUYm1tTUJCgrgZnnnmGS5evEhLSwsrV67ExcWFP/7xjyJYZmRkxPj4OI899hirV68W\nASilUklNTQ2PP/44x44dE3wIJ6f7i2U3NzcWFhZwdXUVr8P8/Hz6+voEf1KtVguSc0BAANbW1iiV\nSjH7ev7559m2bRudnZ3k5OQAkJWVha+vL9nZ2UJmPDQ0REpKCmq1mu3btwuHqlqtJjw8XKxwJL7D\nsmXLsLKyEicYD3r9JGLOdnZ2HD9+nCeffJKvvvqKxYsXMzY2hoWFBTo6OmRmZgqAibGxMZWVlcTH\nx3Pu3DlRqba1teX27dts2bIFuVwuvnZDQwPR0dH09/eLfaCHh4eo7k5PTzM7O8v4+DhDQ0NUV1ez\nceNGrK2tcXNzo6WlhfDwcB5//HH+8pf7YbDJyUl8fX3FL9fa2hpPT0+Cg4PJzs5m69atInchGa7l\ncjkvvvgiarVatOzKysro6enB1dUVuL+XjomJEfLRkZER9PT08Pf3Jy0tTeztP/vsMyYnJ5mZmeH6\n9essLCzQ3t5OQkIC0dHRmJiYoK+vT1JSEsPDw2JG8Pbbb/P+++8TFhbG7t27eeihh9i5cyfGxsa4\nuLigVquJjY1l8eLFwnHx7LPPii3E/v37RYhIciYuX76c2NhYgRtrbGwkLCxMUKU0Gg0rV678hzbf\npUuXyM/Pp7KyEqVSKczXcH9FaGtri5+fH+Pj4/j5+QlpjbT3X716NY888ghubm4YGBgIG7dMJkOl\nUmFubi7arpJ9KysrS8TIh4aGMDU1xd3dncHBQZYtW8Z3331HYGAgp0+fprq6mpqaGo4ePUplZSW2\ntrYsWbJElNJ0dXXF3MrR0ZHr168L76SLi4sYfhoZGdHe3k5TUxMpKSlCJ9/f309zczOvvPKKQKh1\ndXVx9OhRsbKRQLUhISFibjI9PS10ftPT07z00ksMDg5SX18vTsskWO/KlSvJy8vjhx9+wMHBgSef\nfPKB78efxENhenqa3/72t/T09LBr1y5GRkYYGxtjdnYWV1dXvLy8GB4epr29ndraWtasWUNXVxfO\nzs7C5Hvz5k2BUZO6+YaGhsTExKBUKklOTiYsLIyQkBCSk5NZsmQJcXFxXLhwgU2bNjE8PIyDg4MI\nHalUKpqbm8nOzsbIyIjGxkYxUzA1NWVubo7k5GSsrKzo7u7+h3eMXbt28fvf/56enh5ef/11pqam\n8PT0xNnZGblcTn19PSYmJoSHh5OYmCheuMnJyejq6uLt7Y2rqysKhYITJ06IZufk5CRHjhwRyjYv\nLy/BoGhqauLWrVvi+LCmpobMzExaWloE4fndd99l165d1NXVkZSURHR0NKmpqSQkJLBlyxZ2797N\nqlWrqKioYPv27ZiZmZGRkcHo6ChPPvkkx44do6Wlhfn5eYKCgoiPj6e2tpYTJ04IUa6HhwdyuRx3\nd3ecnZ1xc3NjdHSU0NBQnnzySaanp/H19eXmzZuYmJhgZGTEyMiIAJZK3obCwkLi4uLo7e2lo6MD\nT09PrKysiI2N5de//jXXrl3jxo0bLFq0CCcnJxYWFujr68PPzw9DQ0P09PQE4h+gv78fhULB8PAw\njo6OmJqaisBafn4+tbW15OXl4ePjw6VLlzhy5AgPPfQQKpWKpUuXin+nUqmYmZkRDzE3NzdefPFF\nmpqaiI6OFlkWXV1dHnnkEYFtd3Z2FgDguro6wbGQ5LWDg4OsX78ehUIBIFaskqIwNTWVjRs3Cn7l\n+Pg4KSkpfPHFF9y+fZt///d/Fz/3hIQEBgYGGB8fZ9WqVZw+fZrBwcEHvh9/Eg8FHR0dTp8+LRBd\nFy5cEL80b29vPD098fb2ZmpqCm9vb6qqqnBwcGDz5s14e3sTEhIi3q2OHj3Kc889ByDy4QqFgq6u\nLiH51Gg0vPPOOwwODvLEE09QWlrK1NQUcrmc48eP88knnzA8PCz2iFKmvbi4GIDbt2+jUCjEoKqp\nqUkMED/99FMsLCzIy8vj2LFjhIWF4eXlha+vL7/73e9ITk7GwsKCzMxMzp49K9554f4NIQVoamtr\naWtrY/ny5SiVSry8vJiYmBAK8vLycm7dusUPP/xAaWkpISEhGBgYCEdDdHQ0np6eNDY24uDgQE5O\nDl1dXbS1tREWFkZycjIymYxDhw5x4MABoUiTaD2Sku769esiXCWZmoyMjBgeHqapqUnUre3s7Jid\nnRVzE4mEND4+LgJTdXV1IpcREhIi5jKSARzunxiNjo4SGxuLSqUSXIaMjAxef/11uru7KSgo4ObN\nm3R2dnLw4EFu3ryJhYUFzz33HAUFBYyPjwtxzfj4OHA/A7KwsMDk5KTYGrq7u4vhsIeHB+Hh4YyM\njPDEE0+QmJgokHU///nP+eSTT9DX1+fVV1/FwMBArBQuXLggBoW//OUviYuLw8XFhcDAQFxdXcUq\nKTQ0FGtra9566y1eeeUVkcaVuJbSsauE+9doNLz++utYWloSERFBd3c3g4ODyGQy1q1bx+TkJBcv\nXuT1119nxYoVPPTQQyxdulTMFDZu3Eh2djY2NjZs2bJFDKYf5PrJ4NicnZ3RaDQoFArBRCgpKUGj\n0WBiYiKO3cLDwzE2NqalpYXg4GDy8/MpKytjyZIlAmUltRkHBweZnZ0lLS2N4eFhFi1aJBRwFRUV\nhISECE0Z3I+W9vf3iyVfdHQ0CoWCe/fukZSUxNq1a7lz546AaMhkMkJDQ+nt7eXevXv84Q9/QC6X\nY2trS1pammhZXr9+nb6+PvT19fnwww/p7e0VhN+xsTExA4mOjmZwcBADAwO0Wi2jo6N0d3czMDBA\nV1cXFy9eJCQkRFitrKysCAwMZH5+XoSabG1t6ezsZMWKFfT29lJSUkJAQAAZGRns378fNzc3ysvL\nOXfuHG+//TYKhYKVK1cSHR1NZmYm69atE1zA5ORkgWsbHBxkbm6OjRs3kp6eTl9fHz09PcLKFBgY\nyK1bt0R5LSwsjOPHj2NiYkJrayuVlZVMTk6Sn59Pa2urwN+NjY1RUVEh4shNTU2sXr1aJFlnZmYI\nCAhAX18fPT09MjMzefvtt1EqlQQEBHD+/Hk2b96MSqUS8WTJJlZWVkZCQgJwf5mfnZ3NqlWr6Ozs\nFMKe0NBQ7OzsiI6Oxt/fH41Gg5GRkfAybtu2jZUrV7J48WLGx8c5fPgwk5OT7N27lw8++IC+vj6W\nLFnCxMQEr776Knfv3uXu3bvC3P3NN9+QlpZGbGysGFZLwlqJXdnX18eGDRsoLS0Vx96SMU0KbJ08\neZLS0lLkcjltbW3Mzs6Sn59PamqqSH8+/PDD+Pn58dxzz5Gbm8uvfvUr7ty5w8aNG9HT0+P06dMP\ndD/+JB4KarWahIQEurq6RHw5NTWVq1evolKpGB0dpaysjNTUVFGCkQovCoUCAwMDIiIiKCwsFBgt\nuL9kDA4O5vDhw8TFxREfHy8iy3FxcWg0GgoLC3F3dycxMZGhoSEWL14s0GPSL2n58uVotVqiou67\nOUNCQvj22295+eWXMTc3F+j52NhYgoODaW5uFrAQQ0NDAgMDmZ6exsvLCzc3N1xdXUWgSE9PT7yb\nXbx4kUcffZT09HQRTpL8Ch0dHQQFBQnGgySK6erqYvPmzejr61NUVISTkxMzMzOUlpYSGxvLm2++\nyapVq/jNb35DR0eHsGrFxMTw448/smzZMhoaGvjggw/w8/PDzMyMzs5Ozp8/j6mpKadOnRKMgoWF\nBd5991327duHq6srx48fFyq4goICli5diqmpKRYWFtja2qKvry/QZGVlZejo6Aj4bEtLC2lpaaSn\np7N8+XJxdGhlZUVtbS2WlpYkJSXR398vmJYmJib4+vpSXl5OWFgYMzMzonE5PDws9tMymQy5XI63\ntzetra0AlJSU4O3tzfT0NFu3buXf/u3fePHFF5HJZExPT+Pi4kJDQwMdHR24u7vj7u5OZ2cnmzdv\nZmBggLKyMuRyuXgYSwg9f39/McStra0lKioKFxcXZDIZJSUlYi4kncisXLkSKysrzpw5w6OPPoq+\nvj4BAQECFKNU3m8UWFhYoFKp2L59O62trTQ2NmJhYSHcF1IIa3Z2lpqaGgoLC4UycGFhAT09PUxN\nTcV9UlZW9sD340+iOv3xxx8fMDU1xdvbW7xLGxsb09vby/DwsLjBwsLCiIyMpK+vD0NDQ1asWIGH\nhwcPPfQQ586dY3h4GAMDA9RqNWfPniUiIoKTJ0+yadMm3NzcqKiowNbWlvr6erZt20ZOTg65ubl4\ne3tjbm7O0NAQGRkZqNVqdHR0WLRokVDN2djYMDY2RlZWFk8++STh4eF0dHSIll9NTQ0GBgacPHkS\nAwMDTp06JQCvmzdvFkk3aW8nqcfd3d1ZtGgRZ8+eZefOnQBMTEwgl8sJDQ0VmjIp6bmwsCCWiTo6\nOsKYfe3aNYaHhzl8+DAAsbGxdHZ2Mjg4yPDwME5OTiQnJ2NiYsLY2Bh79+5laGiI7du3Ex8fj4GB\nAdu3b+fatWtYWVkJOrQUb5bUddJRp0wmE/LSrq4uUlNTGR0dJTMzk/T0dNzd3RkaGiIzMxM3NzeS\nkpIIDg5mbm6OmJgYbGxs8Pf3p7Gxkfn5eZydncnPz+eFF17A3t4eJycnrKysmJ6eprW1FUtLS3Fc\nmZ2dTW5uLjExMSxfvhyNRkNNTQ29vb2YmZnxzDPPYGxszOrVqxkYGODKlSvY2Njg4uKCSqXCwsJC\nhLBmZmbIzs4WuYiGhgZKSkr47rvvkMlkeHp6CjN0VFQUCwsLuLi4cPr0aVpbW9m+fTsqlUrATZqa\nmkhPTxetUj09PZRKJQ0NDYSGhtLc3ExTUxOrVq0CoKCggMLCQoyMjPDy8sLd3Z3z58+zbt06rK2t\nGRgYYOnSpYJiHRkZiVarZWFhgXPnzrFo0SIx+DQ3BwSHXgAAIABJREFUN2fv3r3U19czPz+PWq0W\naV59fX2uXr363wfHpq+vz4svvoiBgQFwv3lWWVlJQECAaEDOz89TV1fHqVOnsLGxoaysjHPnzjE4\nOCj4d4aGhqhUKkEGtrCwICQkRAwPOzs7yc7ORqPRcPv2bZRKJUuWLGH79u3CsSBxETMyMvD29hYd\nCR8fHxGZhfvHqCYmJly6dIl3331XVF6XLFlCU1OTsDv19/fT1taGUqlkcHAQS0tLCgsLsbOzQy6X\n09raKvbTCoWCRYsWkZCQgK+vLy4uLri7uxMbG4uFhQUNDQ0EBQWJ9mJoaCgKhQKtVotMJuPOnTu8\n9NJLTExM8Ktf/YqZmRk8PDywtramra2NpqYmTE1NWbVqFTU1NWzatInGxkZ6e3vZvXs3P/zwA/7+\n/iQnJ+Pn58f58+cZGhri7NmzaDQaxsbGcHBwIDg4WBiq4+PjCQsL47PPPkNfX5/c3FwSEhIE9Wrb\ntm1YWVmhp6cnjFbe3t6kpqbi5OSEp6cniYmJoj4+MTGBjY0N1tbWqNVqzMzMSExM5Gc/+xlRUVGM\nj48zMTHBww8/LOQvenp6rFmzRpy65OTkUFJSwvnz58VAMCwsjPn5efbu3UtNTY0YMnd3dzMyMkJT\nUxNHjhzBwMAACwsLlixZIuTCCwsLrF27loWFBRobG8UpEYCrqysVFRXU1dUxMzMjVlT+/v4olUri\n4+NZsWIFfn5+QpCsUqk4duwY169fx9zcnODgYDIyMsjJyRHfb319PaOjo4Lt0N3dLUjg/f39BAQE\niKxNR0eHMHtJCvvY2FjWr1/P8ePH6e7uFj/fB7l+EiuFL7/88kB2draw7ISEhIhsgvTuf+LECWxt\nbVm6dCnXrl1jYGBAyD3q6upE8UOj0ZCbm0txcTHr1q3Dzc2Na9eu0dPTg7e3N6ampsjlcmQyGYmJ\niURFRWFsbCxMQ21tbejp6REdHS1Y+wYGBqIum56ezksvvURjYyNTU1P09fWxZs0aTp06JUJDhYWF\nwp/g7+9Pb28vFRUVAi47MjIiEm/SPODKlSt88sknTE9Pi3302NgY09PTGBgYIJPJ0NHRYdu2bahU\nKuLi4gThOTMzk+7ublJSUigpKcHHx4egoCBCQ0Pp6emhuLhYoNdnZ2dJSEjg0KFD2Nvb09nZSX9/\nP2ZmZkxNTYmBnORWkEQ07u7ugo9QVVWFRqPB0NBQ/FzGx8eZn59n8+bNYo8/OzsrQDajo6PMz8/j\n7e1Nb28vurq6ZGZmikCZVLYKCwtjenqa8+fPi4eamZkZJiYmqFQq5ufnBQvRy8sLExMTcfIknWYo\nlUphdpa+VnR0tKh1e3t7CweHoaGhkMyGhIRQXl7O0qVLiY6OpqOjQwz1pBWSubk5ZWVlQjqrp6fH\nunXrGBsbY9GiRSxevBiNRiPmVElJSWJLsHLlSoaHh+nv76e6upqkpCRUKpVIHxobG2Nra0t6ejov\nv/wy3d3d4nREJpOJo1ZJQW9sbEx9fT0ODg709PSwfPly8vLy0NHR4e7duxQUFBAREYGLi4vUm3ig\nlcJP4qHw7rvvHvjggw/Q09PD3t6eyspKIcgIDw+noKCAlJQU9uzZQ0dHBzKZjPr6el588UW+/fZb\noSOX4rVSMCY0NFTYeST/YFxcHIODgwQGBgqwRktLC/r6+ixdupSLFy+KMo4UE+3r68PR0ZHZ2VmB\nXlOpVILHZ2tri6OjI9HR0VhZWbF06VLhQJC2J21tbSLGKk3Wf/e73xEQECDIyKtWrRKi3c8//5ya\nmhrGxsbEO4+LiwtTU1Pk5+fT1NTEwMAARkZG+Pn5MTY2hp6eHj4+PoyOjhIfH4+enp7Yf3t4eFBe\nXs7s7Cw+Pj488sgjdHd3c/v2bW7dusX4+DjLly/nzJkzlJeXCyyaWq3G0dGR9PR0/Pz86Orqwt/f\nn6amJrG3trW1ZevWrczPz5OSkoJSqRTilL6+Pubm5vDz8+P69euMj4+LFKMUJbezsxPODim5qdVq\n0dPTY3JyEnNzc27evEltbS1DQ0OYmZkRExPDsWPHmJ6eFpTn3NxcjI2NWVhYYGFhgezsbCG/kfB2\nDQ0NIgilUqkoLi4mISGBvLw8EZoaHx/n9u3bTE1N0dPTQ2dnJxkZGULWExkZybfffsvk5CRPPPGE\niIqbm5vj7OxMYGAgS5YswdnZGWNjY7Gi+/Wvf83MzAxLly6lu7ubwMBAkpOTKSsrw8HBgRMnThAb\nG8vZs2dxd3enqqqKu3fv4uvrKyQ5cB8zePHiRXJycgTh2d3dnZKSEi5cuCACYefOnWPlypXcu3eP\n1tZWysvL//tsH2QyGTdv3uTWrVu0trYyPj6OoaEhMpmMnJwcNm3ahJWVFfr6+tjb21NUVMTSpUvp\n6+tj69atmJubMzY2hqWlJVNTU2zevBmA0dFR0XUPCgpCq9UyMTGBmZmZiBZPT08zMDDA3bt3OXny\nJNbW1piZmRESEoKhoSERERFi+SctGbVaLZaWlpibm+Ph4UFOTg7BwcGcP3+e0tJSjh8/TmNjIx4e\nHixevJjBwUGWL1/OCy+8wKOPPoqDgwO6urrs2bMHOzs7MQybnZ1l0aJFtLa2snnzZuRyuUCyFxYW\nYmZmxqlTpxgdHcXQ0JCwsDBGR0fp6elhz549YmApBaakCnJXVxeXL19GpVLxyCOP0NnZybvvvktm\nZiYFBQX4+vry6KOPiv29qakpDg4OYg6g0Wh44403xNm4dNrT19eHXC7HwsKCb775Bh0dHVpbWwUu\nbXZ2VqRS6+rq8Pb2pqOjg9DQUBYWFrC0tBS6OReX+w17d3d3NBoN3t7ehIeHMzQ0RG5uLi0tLXh5\neWFlZcWePXtEAEmyYUlZjo6ODpydnZmbmxMQXYDc3Fzc3d15+umnBSF8YmICIyMj/uVf/oWFhQUK\nCwuRy+Xi9ZeYmIiJiQlBQUHExMSQlZVFREQEtbW1vPDCC8B9QJCfnx8BAQF4enoKVP0HH3zA8ePH\nUavVtLW10djYKOrUhw4dwtzcXPQepJzKjh07sLOzAxBHnp9//jlOTk7s2LGDzs5Orl69ikwm49tv\nv6Wqqoo1a9ago6PD7OwswcHBrFu3DrlcTmZmJjt27MDd3Z38/HzBq3iQ6yfxUHB0dCQhIYGYmBjk\ncjk2NjbEx8czMjJCbGys8AyePn2aoqIiXFxcmJubE0ozuVzO8PAwfX19XL58mYsXLwKwe/dubG1t\nKSoqoqysTIhbtFotnp6etLe3Y29vL+K0MzMz4oiyvb1dvIOZmJigp6eHSqUC4KmnnqKlpUWAMxIT\nExkcHMTNzQ0bGxs2bdqEs7MzJiYm6OjooKury5UrV7hx44bYk4aEhLBo0SLs7OyE77C5uZn6+no0\nGo2I7dra2grGf2VlpahI+/j4YG5uzsaNG4mKiqKjo4PVq1fT3NyMo6Mj9fX1XL16lT179gg82bJl\ny4Qq3cDAAJVKRVpaGn5+fjz66KNiJmNtbY1UZ5+amiIuLk4UkjIzM1mzZg2Dg4Ni2drd3Y2dnR1K\npZITJ06II7N79+6JDomzs7NAw9va2rJ27Vp0dHRwcnJCJpOJ0E51dbXwWyoUCi5evIixsTHW1ta8\n99573Llzh1OnTgmAzJEjR0QJCSA8PBwXFxe2bdsmHJ0AO3fuJCUlhc8//1xsE6X6+oULF0Sj0cHB\ngaVLl4r+S29vL/X19XR0dIhKtXRMDfcbs4GBgZSXl6NUKoXP0sPDg4iICKytrcXqxcjISET4fX19\nmZycpK6ujo6ODoHpkx7sjo6OBAYGcu7cOfLz8zlw4ICI3X/00UesXLmSTZs2YWhoiLOzM8HBwcD9\nh+rp06dFqK+iogJdXV3BGHmQSyYFU/53XjKZ7H//N/F/rv9z/f//KtNqtVH/7JN+EjkFOzs79u/f\nLyKyJSUlzM3NYW5uLjh+/v7+WFtbY2FhwZtvvsmHH36Ik5MTBQUFjI6OCmXZtWvXsLa25ssvv2Tb\ntm2C4ycNbr777jsBbT1//jxbt27F0tKSo0ePikjx5OQk3d3dnD17ltTUVGxsbGhtbUVfX5+33nqL\nZ599Fnt7e9LS0sjMzMTBwYHIyEhu376Np6cnzc3NTE1NiT1zREQEw8PDBAQEcPToUR555BHa29uJ\ni4sTkpV/+Zd/4a9//av4XtasWYOuri4WFhbk5uby6quv8pe//EX4DKytrZHJZNjZ2eHt7U1lZSUN\nDQ0sXbpUtDodHR3R0dFBX18fCwsLrly5goeHB4sWLWJhYUFo65uamvD39yc6Oppr166RkJCAp6en\nYCtK0V9DQ0NmZ2fZuXMnJ06cwMTEBBcXF8bHx2lqakKlUuHp6Ymfn59wKkhJR6kJ29nZiZeXF9HR\n0dy6dQtHR0dmZmbYuHEjnp6evPfeeyQkJHD+/HlxErF+/Xqam5sxNjYWuY2goCBBLF69ejWhoaHc\nvn1bYP+VSiWXLl3iqaeeIjExUZStbty4wbp16wTbAO7XtQMCAgTfoL+/X2zjAgIC0NPT4+bNm8K8\nlJiYiIGBAevWrRMru/b2dkFxkhyn165do76+HldXVwHE/frrr4UY+KOPPmL79u188MEHJCYmEhcX\nR0VFBW+99RbvvvsulpaWYgbj7e1NcXGxaPuqVCpRwuvs7BTkaVdXV+EsqaurE8yQoqIi3nvvvQe6\nH38S2wdpHuDg4MBHH31EVlYWenp6yGQyHBwcCAkJYffu3dy5cwe1Ws2JEyeor6+nra2NDRs2YGpq\nKtpxbm5u3Lp1C0B4GBMTE2loaODWrVukpKRw8+ZNYmJi+P3vf8/69evp7e3l7bffZseOHXz//ffk\n5eVhZ2cnchI6OjpMT0/T3NwMIHoIzc3NonA0MTHBsmXLCAwMJCAgQMAyUlNTMTU1xcvLi/j4eOLi\n4oiOjsbLy4tz584hl8vFPrKyspLm5mbCwsIoLCzEz8+P27dvk5aWJnwAkpfSxMSE7Oxs7ty5Q0ZG\nBmfOnBFpvYmJCYKCglAoFCK598033xAeHk5fXx8mJiZ0dnaye/du2tvb+c1vfiNQ46+++ioqlUpo\n1NVqNTdu3MDe3p5ly5YxMDBAVlYWIyMj+Pj4CEPWjh07MDc3p6Ojg4yMDCHZ6erqEnh2S0tLUlNT\nWbFiBcePH/+H/bUEQxkcHOT8+fM89dRT7N27l9raWiYnJ7l79y7V1dVCjKPRaAgMDMTHx4fp6Wk+\n+eQTent7sbW15ezZs8hkMrZu3SqOuSWtWmBgIFZWViiVSgwMDAgNDWXnzp2EhoaSk5Mj5MZ6enqi\nByKdVIyMjODg4EBlZaWo53/wwQcUFRVhbm4u2BFhYWGCahUWFkZDQwOlpaWC9bhp0ya++eYbfHx8\nxAM5ODiYU6dOkZiYCNz3X6jVag4cOCBO4JycnDAzMxPzoomJCcrLy6mvrycwMFDg6WxtbSkuLqav\nr4/MzEzOnz//XzqS/Ek8FKysrERd19/fn6ioKCwsLEQeXKVSkZGRgZGREcbGxpw8eZINGzZw/fp1\nLl68iLOzs2AvRkVF8fOf/xyATZs2oa+vT2xsLLm5uSQmJorWoyRN6evrY8+ePfj6+rJ3717Rvisu\nLsbU1JSIiAgyMzPR09MTNOempiZBT/b29qa+vp4vv/yS7OxsTpw4gUwm47vvviMsLIyuri5aW1vx\n9vbm1KlT+Pn5iQGoVJSRpsoRERHMzMwIVFleXh4dHR1cuHCBhYUFEhISuHnzJiEhIeK47M6dO6xd\nu5aHH36Y8fFxFi1ahLu7OyqVSqTaZmdn2bJlC2fPnhWS09DQUOLj44WBKC4ujqqqKnJycggJCRH6\nuKCgIIyNjYmJiaGhoYE33niDa9eu8fTTTyOTySgtLRUZCVdXV8LCwli2bBlqtZr29nahdS8pKcHO\nzo6goCChfAd4/fXXmZycJDY2FoCnn36a1NRU9u3bR0VFBa+99hp5eXni5EPSrFdVVVFQUCCIQpL9\n29ramg0bNmBmZsbw8LAY4s3MzGBrayscoO7u7jQ3N3P16lXKysqorq6mqamJ2dlZ0fHIy8vD1NQU\njUbD9PQ0Ojo6lJeXA/eH48D/YO+9o6K807//1zD0OkjvIL1LVywgCmKLDRtGXRNNsiYxxd2UjbvJ\nJrvJV7MxMaaaaIzGEnUt2EWRKChSpHeQ3oZeBhhgmOcPM5/f7nPO812fP37n5HvOc58zRxwyDBnv\n+3N/rut6v19vVq1aRUhICF9//TWfffYZ+vr6PHjwAIVCISZey5Ytw9fXl6SkJDZv3iyCcTWJTtra\n2jg5OWFraysWfz09Pezt7cXi+NVXXzEwMCAiDzV6ioCAAIG81yD7hoaG8PPzw87OjsjISFavXi2M\nWk9y/CYWBQMDA3744Qdu3rwpFHc6OjoCaWZqakpgYCCvv/469+7do6amhgsXLhAZGSmi0wwMDPD1\n9RXx5ICIpde40jRItejoaNzc3MT3XVxcGBsbY/Pmzdy5cwdTU1OkUikLFizg448/FohuDRpt165d\nTJs2jfj4eObPn49MJuOpp54SdCbNyZCdnS20BQMDA4KqpJml6+npceDAAebNmweAjY0NVlZWbNiw\nQfAf3dzcWLhwIQUFBULlOHXqVGbOnElDQwNOTk48++yzjIyMkJeXh4GBAc7OzvT19dHV1SVCdeBx\nLN3g4CAqlQpra2vRWK2oqKC2tpa6ujqampr44osvuHXrFoaGhuTn51NaWiqYDAcOHOCll17i888/\nJyYmhsnJScrLy5FIJAwNDfHyyy8LPb65uTnTp08XUwy5XE5FRQXe3t5oaWlhYmJCfn4+k5OTPPPM\nMwAYGRkxMTEh4DnBwcEiR2H27Nkimn58fJyMjAyxW4LHwjGNFFqpVNLb2yvi6KRSKTk5OWIiolKp\nWLRokfA0KJVK1q9fT3l5OWVlZbz11lsUFRXh4+ODvb298Lq8/fbbeHh4CJNYVlYW58+fZ9WqVUJ1\nOD4+ztGjR3FwcCAzMxOlUklZWRnbt2+nubmZGTNmsHLlSry8vAgLCxPQ4ZiYGMFpMDMzo6qqirCw\nMM6ePUtwcDCpqaliZ2ZnZyduIlKplFdeeYWOjg5sbW1paWnB1tZWjNJPnDghSqUnOX4Ti4KmdvTx\n8cHU1BQLCwuysrKEmcnCwgIzMzORI6mtrc3o6ChBQUH885//pLq6mqqqKjIzM+nu7hZY79HRUYEi\nj4yMZNGiRcTHx4vVf9myZRgaGpKXl8c333zDw4cPiYyMJDw8HFdXV3Jzc7Gzs2N8fPzf7L2Dg4OM\njIxw7tw5MjIycHd3x9/fn8LCQszNzcnNzRUZCwMDAyKANigoiKysLHR0dNi+fTsNDQ2sXLlSrOIX\nL14kODgYHR0dqqurRZy5Bq/u4ODwbzHs5eXlQkoNj6ct5ubmNDU1IZPJiIqKoqmpCSsrK1paWvDx\n8cHX15fu7m4BfikrK2PlypXExcVx7do18ZkHBATQ1NREbGwsc+fOJSQkhKqqKnR1dTl27BivvPIK\nqamp6OrqEhwcjL+/P5OTk7z77rukpqby888/Ex4ezrx588QouKKiQgTseHp6Mm/ePHR1dcnOzhYS\n708++UTY2EtLSykrKxPGM5VKxalTp7C3t6ewsFD0R6RSKS4uLqxbtw4/Pz/c3Nyor68XeaGAcHBG\nRkbS1NQkDF5yuZykpCSMjIwYGxsjJCQEU1NT7OzskEqlYlF89OgRLS0tpKamihxKzREXF4eZmZkI\nSjYyMuK1115DR0cHQ0NDfHx8xKSpp6eH1tZW+vv7BWRmzpw5+Pj4sGzZMiF0mj9/Pnl5ecjlcoaH\nhzl27Bhr166lrq6OtrY2+vr6CA4O5vbt2/z888+8/fbb+Pj4YG1tzaNHjzh48CADAwMMDQ3h7u5O\nc3PzE1+Pv4lFwcDAQFhtp0+fzvbt21m4cCE6OjqcOHGCbdu24efnR29vL/Pnz2ft2rW0tbVx584d\nwsPD6enpISIigoiICMrKygQqKz09nfj4eAwMDLC3t8fV1ZWgoCCCg4PFhzcyMkJDQwNBQUF8+umn\nhIeH88svvwhatJGREVu3bhVBLgB79uyhoqICMzMzdHV1cXNzY2hoSIyhLCwscHZ2FnJnHR0dbt68\niVQqxdfXl97eXlpbW/Hx8cHMzEyMzrZs2UJmZiZ5eXns2LGD1tZWJiYmRIaDRCLB0tIStVqNQqEg\nOjqa0dFRkTikVqsFxVczuhwYGKCoqEj0WgoKCvD19RXN0OHhYbHN1szwHRwcGB0dZeXKlXzxxRd0\ndXVRUFBAXFycmNmXlJSgUChoamoiNDSUW7duYWZmxowZMzhz5gyDg4O88sorFBQUCMjLtm3bxOdQ\nXl6OQqGgvr4eIyMjsbBp+jODg4NERkaiUqmoqKjg8uXLdHd3c/PmTWpqaggMDGT9+vWsX78eHR0d\nZDIZ/f39tLa20traipmZGeHh4QIGU11dTWdnpxAp2draCnbFhQsXaGpqoqioSHhLIiIisLW1FRyH\nWbNmYW1tjba2Nlu3bhU6Ak28/KlTpwToVdP4XrlyJRs3buTHH38UHpeuri4MDQ3p7Ozk4cOH7Nu3\nj5s3bzI5OcmVK1fE7/vCCy/w+uuvI5PJaG5uZuvWrZSVlbFkyRKcnZ1FtN8rr7zCli1b2L59O+np\n6WRlZaGvr49KpcLX15fR0VG6u7vx8PB44uvxN6Fo/POf//yeht5rZWVFYWEhAwMDNDY2Mjw8TEdH\nh0Brv/rqqwBs3rxZCJFSUlKEuea5556jsbGRe/fuERkZyfz584Uk9OTJk+Tl5fHhhx/S29sr0pw0\nXv8FCxbg6OiIt7c3p06dEsGf8+fPZ3R0FJlMxuXLl1m9erXosIeFhXHz5k2MjY0FIFNfXx+ZTIar\nqyuZmZlcvHiRhIQEDAwMaGxsxMLCAmtra2xtbZmcnKS3t5cLFy4I6fL4+DhWVlacO3eOKVOmsHDh\nQkpLSzl//jy9vb2oVCqkUqkI3LWwsKC7u5u9e/cSFRVFfX0906ZNIy0tDV1dXQYHB4XKUKVSMTQ0\nhI2NDV988YWQlvf399PW1saMGTOEOi49PZ3Q0FC0tbVJTExEW1tb2JgtLCwoLi4mLi5OMAKHhoaY\nPn06urq6xMbGcvz4ccrLy7ly5QoDAwNMTk6iVqu5ffs2BgYGQq9/+/ZtGhsbycvLY+vWrbS1tbFi\nxQpMTU1pbW1FX1+fdevW4eXlhZeXF4GBgTg4OGBiYiJESgcPHuS5556jpqaG8PBw+vv7KSwsRKVS\nkZqayuuvv46+vj5TpkxhbGwMd3d3AR/RBMFqOJ9aWlqCLXH//n38/PzIyclhcHCQR48eMTo6Sk1N\nDZmZmdjZ2WFra4uNjQ1HjhwhKipKKHGVSiV37twBHieVDw0NMTw8jFKppKamhuXLl7N+/Xrkcjm/\n//3vsbW1ZcOGDXz55Zc8//zzXLx4kWXLllFdXY2rqytOTk7cv39fpFGlp6dz9OhRfH19hRBLk9uR\nm5tLZGQkd+/eJSsri02bNnHq1Kn/OYpGXV1d5s6dy+DgIMePH+fixYuMjIwwODjIypUrRRng5ubG\n3r17+fnnn4V5RGOdtrW1pbOzk76+PpKTk4HH4NbOzk6uXLlCQECAAHYGBQXR3NyMlpYWZWVlnDhx\nQiT/HD58mGvXrvHjjz+K2u3Ro0cCsQWwadMmkSTU2NhIe3s7N27cEBHgcrmc06dPExAQINKo8/Ly\nRJ1dX1/P1atX6ejo4P79+yJDICMjQ3gKMjIyhBOzuLgYExMTdHR0mDNnDu7u7ujq6lJRUYGvry9F\nRUX09/ezadMm5HI54eHhVFVVsXz5crq6uuju7iYoKAhtbW1RFshkMl599VV0dXX5y1/+go6OjjD2\n9Pb28oc//EGUDP39/bS0tDAyMkJJSQl1dXWcOHFCuBI1lKrw8HAqKyuZnJwkPT2d8+fPEx0dzfr1\n6wX09emnn2bKlClMmzaN2NhY8vPzxTgQHhuXoqOjSUlJQUdHR9jBu7q6qKmpobCwkK+//ppffvlF\nCNf09fUJDQ1l1apVmJiYcPDgQRQKBV1dXaLkk0qlws1oaGjIqVOnKCkpIScnR9CzBgcH6e/vp6io\niKGhIZRKpWBP6OrqCvOSh4eHyBQJDAyksbGRxsZGoqKiBKzlwoULvPbaa8KX89NPP3H37l3c3NxE\naHBmZiZ79+7lww8/ZM+ePQQEBPDNN98A0NDQwPr16+no6GDBggX4+/uTmZkpAo9TUlJoaWnB3d0d\nIyMjZs2ahba2Ntra2piYmBAbG0tPTw/R0dG8/vrr9Pb2PvH1+JtYFDQEXH9/f2JjY/nHP/6BVCrl\nT3/6E1OmTGHu3LlCA6+h1EqlUjw8PLCwsGDWrFmizvr+++85d+4c8JgvqMkN+Oyzz5DJZLS1tWFi\nYoKrqysRERHk5eWxcuVKEhISsLOzw9ramvr6ekEpfvTokWA0Hjz4mFd7+/Zt5s6dS1NTE/r6+iKX\nQoP7+stf/iJ2J3K5nM7OTpydnQFExmJLSwuVlZWEh4eLjrOpqSkLFizAzMyMe/fuUVxczLx58xgY\nGEBPTw+pVIq3t7cwJs2aNYsZM2bg7+/Phg0bcHV1Zd26dUJu/NFHHxEbG8unn35KX18fsbGxYqdi\nZmZGVFQUK1aswMbGBlNTUxITE1mxYgUFBQU8ePBANFs1yVtDQ0PIZDLkcjne3t54eXnx8ccf09TU\nhJOTE+7u7jQ2NhIaGsqlS5f47LPPBJKtp6dHMB98fHzQ1tamvLyc0NBQAgICxLlw69YtjI2NCQoK\nEvmKfX19IsFbs4tas2YN2traApSqga4olUrWrl0rsjE1ykMDAwNaW1spKCigpaUFtVpNUFAQpqam\nGBoaYm1tjYeHB7Nnz2Z0dJRLly7h7u7OwoULaW5uFjcTbW1tiouL+fOf/wwg1LeBgYH09/ejr6/P\nm2++iZ6eHgsWLMDLywsrKyvi4uIICgqirq6nC2a4AAAgAElEQVSO+/fvo6+vT0NDA7Nnz+bzzz8X\nO6+5c+cCjxuuN2/epLi4mIqKCg4cOEBgYCA7d+4kMjJSOEQHBgZISUlh//79JCYmiseWLVtobGyk\nu7ubu3fv/l8FzP4myodPP/30PU1qcUtLC8eOHePhw4cMDw8zZ84cCgoK+Omnn/jll1/w8vIiLi6O\n0dFRhoaG0NbWRktLi9HRUeLi4ujq6mLhwoUcPnyYmTNnkpmZyYoVK3B1daW9vR19fX2io6Oxtrbm\n008/RS6Xs2XLFgF6sba2Jjw8HG9vb3744Qf6+vq4f/8+CoWCa9euMTQ0JBj/VlZWjI6OUlFRQWVl\npRDOrFmzhnXr1nH79m1ycnKQSqXcunWL4eFhQkJCsLa2Rq1WCxecSqXi4sWLbNq0iaqqKtLT01m1\napUAqr7//vvo6OgwY8YMamtr+e6771CpVNy6dQt9fX2GhobQ0tJi1qxZjI+Pi1Ggj4+PcIx6enqS\nk5ODTCYjLS2NqKgofH190dLSIikpiXXr1pGYmIilpSVOTk7s2LEDqVQqbOmzZ8/m4MGD9Pb2Ulxc\njIuLCxYWFgQHB2NjY0NCQgJtbW0sXryYrq4uamtrBZBVE+euya1csGABVVVVFBQUIJfL0dbWJjMz\nk5ycHEJDQ0WPwtjYmDlz5jA+Pk5gYCBZWVm4ubmxbNkyLl26RHR0NCqVisDAQDw9PVm6dCnOzs7I\nZDLa29tpaGgQce2urq6o1Wq0tbVZtGgRBgYGLFu2THAmvb296ezsFJxPjTlt0aJFREVF0d3djbm5\nOenp6cTExHD+/Hmys7PZtGmTKBcNDQ354IMP+Oijj/D29sbCwoL6+npROjg7O6OlpUVFRQVz5szB\nzc2NkJAQCgoKOHz4sIDRPHz4kNraWry9vQkODmZsbAw7OzscHR1xdHSkrq4OQ0NDhoeHef3118nN\nzaW+vp6vv/6alpYWJBIJhoaGAkITHh7OtWvXaG1t/Z9TPmhcdPPnz0ehUGBgYMD06dNZsGABPT09\nZGZmClZfUFAQhoaG+Pn5MTIyQkBAAKGhoYSHh4uZeUFBAQAKhYK4uDiOHz9Oe3u70IAXFBQwMjLC\nzp07aWxsZO/evVy6dIns7GwRDyaTyURwaXJyMtOmTeONN94AHo+hDAwMuHjxIlOnTsXQ0JCpU6ei\nr68vUOGaMkLTpfbw8MDBwUFkKGgY/ePj44yNjQGPO+Sa2fVLL70kUPYxMTEsWbKEuLg4ysvLCQgI\nwMLCgtHRUbEb6e3t5dixY8IDohHNxMbGsnnzZpRKJR4eHmRnZzNz5kxKS0u5du0ax48fFzbkb7/9\nlra2Nh49eoSrqyutra2EhoYyOTlJSkoKK1euZHJyEi8vLyIiIkT4rKenJykpKaK8GBsb46OPPmLq\n1Klie79s2TLmzJmDv78/jx49oqamhl27dtHb2ys4mvA43Obdd99lZGREWOHb2tqIj48nLCyMtWvX\n8tJLLwmbsY6ODrq6uty4cYOKigr09fVpbW3F0dERCwsL4uPjAUhISCAgIABLS0tqa2sZGRnh+PHj\n5OTk0NbWxo8//khBQQHHjx8XwTXnzp3jzTffJDk5mY6ODiFG0tPTE4ao7u5uzMzMuHjxImVlZQLj\nbmtrS0xMjOBZdnV1IZfLMTY2Fj6QqVOnkpaWxqFDh1ixYgVGRkZCvBQbG0tERAR9fX2cPn1a9Kc0\nlPCOjg6USiWHDh3i0aNHhISEYG5u/m/QX40mZnR0lF27dj3x9fib2Cn87W9/e8/a2pqbN29iY2ND\nbm4uCxcupKysDH19fdHYmjp1KrGxscjlcgoLC7l48SI9PT14eXlhbm5OTk4Ourq6qFQq0tPT2blz\np7ibRkdHo6WlxfDwMMbGxqhUKpG/oKWlhY2NDXp6emRnPw60am1tZWhoiDt37nD+/HmRj3DixAnW\nrFmDgYEBhYWFKJVKvLy8sLGxwc/PD5VKJTIBjY2N8fT0ZMuWLejr6yOVSunu7ha06jVr1tDf349S\nqSQlJYXx8XFcXFzErkeztZ83bx4RERHCJ6+5E2tQdePj4yL5yMjICKVSKUjXTU1N5OXliR2CJqlI\nrVYzPDwsphRVVVW4u7sTHBxMbW2taPaamZmJrapMJhP2dI37VC6X8+mnnwrcmEKhIDg4mIsXLxIX\nF0dRURGXLl1izpw5KBQKCgsLkcvl6OjokJmZKdKwtbW1uXPnDqtXr8ba2pq0tDScnJwEWk8mk4kF\nXiN08/LyYmxsjE8++YTGxkYyMjKwtLRkbGyMS5cuYWtry71798jMzGTRokWoVCp0dXWFC7O+vh5d\nXV0kEglJSUk4ODjQ19fH7NmzuXbtGk1NTWzevJn9+/dja2uLk5MTVVVVIhX71KlTIvvSxcUFiURC\nVFQUU6dO5cSJE/T397N48WLq6+tFdkRgYCDa2trk5uZy9epV4uPjcXNzo6+vTzRMKysr2bx5M76+\nvpSWltLY2IiLiwvHjx9HLpcTGxvL0NAQ5ubmODo6EhERwZQpU4Q8+/Dhw+L/43e/+x0XLlzAxsaG\n06dP/8/hKXz55ZfvzZ8/H2dnZ65fv05AQAA5OTksWbKEyclJ3nzzTbKzs5k9ezZlZWVIpVJBogkN\nDWV0dFTQZR49eoRCoaCgoEDYgJcuXUpwcDBHjhxBrVbT3d3NxMQEKSkpwr7r5uaGVCoVXXJNeIu/\nv79g37W3t5Oens6ePXvQ0tIS8WVaWlpizPf1119jZWXF7du3uXnzJrGxsSLO3t7eXgSlmpiY4Obm\nRnZ2Ntra2ly+fJmPP/6Yu3fvkpSURFlZGXZ2dqhUKqqrq0WQq4bCvHTpUgYHB5k3bx4LFy5keHiY\nwcFBAgIChMBFE4+Xn5/PokWLcHBw4MqVK7i4uGBpaUlHRweenp4EBQWJBSs4OFi4QTdu3IiWlhbv\nv/8+GzduRCKR0NnZKTB1UVFR+Pn5CYVlZmYmWlpa6OjoMG/ePD7//HPs7OyYMWMGY2Nj/Pzzz0L2\nrEHbGxoaoqWlhbm5OZcuXRJiIU0S9dWrV1m5ciW6urq0t7ezceNGDAwMKC0t5e7duyxfvpyMjAyR\nNaoRAmnw/r29vVy/fp3k5GTS0tIEaFUqlQoK97x583j06BHu7u74+flx9uxZli5dKj6jKVOm0NjY\nSHR0NObm5oSHh9Pa2srFixd54403xLgzMDCQ8vJydu3ahUwmo6ioiOzsbLq7u3nxxReRy+W89957\ndHV1ERISQlBQEKWlpQQHB4s4vLGxMfLz85HJZGI0vmjRIrZu3UpMTAwKhYLk5GSUSiWlpaUMDw8z\nPDyMVCrFycmJvLw8ZsyYIcAwGkTc/v376e3t/Z9TPnR2dlJZWYmfnx+2trZMmTKF3/3ud9TW1tLR\n0YG3t7eYZfv7+4sRpZGRESMjI2hrawvrqlqtFs0aLS0tJiYmaGpq4ttvv2XNmjW0t7fT1dVFf38/\nq1atEnCQb7/9lpaWFiIiIrCxscHBwYE//vGP+Pj40NnZSVlZGdXV1QB0dXVRWVmJtrY2ISEhogEW\nGRnJ8uXLOXbsGAqFAhsbGwH2sLKyEiDX6dOnY21tzfDwME899RS+vr4A2NvbI5PJhCVb0/SUy+UC\nZKqJhs/KymLBggX4+vqiUChoaGhg/vz59PT00NDQgKWlJZaWlqhUKt555x0MDAw4c+YMS5cuJSoq\nCmtra9G0rK2tpbm5mf7+fs6dO4dEIuGFF14gNDRU9B7Onz8v0rqNjIxEzsInn3xCXV0dZmZmuLu7\nC9zZnj17GBwcZGJiQizOGhXiqlWrkEqlNDQ0iDucJqinp6eHtLQ0QYLasGEDFRUV3Lp1i87OTpKS\nkkSSVkBAAAUFBbi6umJjY8P8+fORSCRMnz4dW1tbTp06JfwqFhYWIgpuZGRE8CmcnJzQ19fHz8+P\n+/fv891336Gjo0NlZaVIFFer1bzwwgvY2NgQFBTElStXRDc/LS0NR0dHPDw8CA4OZtasWejp6TE4\nOMjcuXOJjo4mODiYzs5O8vPzxb+9iYkJgYGBjI2NoaWlJVKgNBbwJUuW8PLLL2Nubo6/vz9KpZKG\nhgYMDAw4fPgw8NhX09HRwZYtW+jq6mJgYEDoPF5++WUBn5mcnGTRokVPfD3+JhYFTT7kF198QUhI\nCFevXqW4uFiwEzQEYg02S4MRNzIyEifeypUrcXFxISoqirq6OgCSk5OJiopCrVYTERGBQqEgISEB\nW1tb1qxZw5o1axgfH8fe3p5169bR1taGvb09K1euZMOGDUgkEnp6ejA3N2fNmjVCKqrhMfr7+yOV\nSqmtrRVBqCYmJiQkJGBqaioCcYODgykqKsLc3Jzs7GzGx8cZGhqivr6e8+fPU1FRAcCNGzcwMjIS\nCUju7u54eHiQkJDAhg0b0NfXJycnhx9//JHBwUFOnTrF5cuXKS8vRyqVcv36dWxsbFi+fLmIydOY\nY8rKylixYoXQ8E+fPp3g4GAsLCxQKBSoVCry8/Opq6sjLCxM8BQuXbpEREQEjo6OgpIskUgEr0DT\nsTcwMGDdunWkpqby3XffUV5ezhtvvIGbmxvTpk3D0NAQb29v8b4GBgYEBQVRWVkp3JPwmIfg5eXF\n6OgoarWa3t5exsfH2bp1K1paWnz55ZdYWFgILsPVq1dJT0/Hzs6Ozs5OgZT/6quvmJiY4Pe//z0A\n9+/f58svvyQgIACJRMLUqVPp7e1l+vTpAmk2OjrKwMAA3t7ewlg2Pj4uCNyayPtnn31WAHc2btwo\nchyHh4c5fPgw77zzjvAnhIWFERAQQHl5OX/4wx+wtLREJpNx9+5ddu/eTWBgIMbGxkJ6rcl96O7u\nJiMjg9raWr788ksePXpEdXU1Fy5cEOVza2srS5cuRaFQMDo6SllZGR0dHcTExHDu3DnkcjljY2OM\njo7i7e39xNfjb6J82LVr13sRERHo6ekxZcoUXn31VYaGhgTHrra2lpiYGIHe1tCVT5w4wezZs9HT\n0+PIkSM0NTWRlJTEoUOHxNxYKpUyMTHBwYMHOXfuHHV1ddja2gptg0Qi4e7duwQGBtLb24tSqaSk\npERAUoOCgpicnKS0tJTAwEDOnz/P+vXrmZiYYHR0FAMDA7y9vXFxcRFR72NjY6xYsYKxsTH8/PzI\nzMxER0eH/v5+9PT0sLKywtraGkdHR2pra1Eqldy8eZN33nlHlADd3d2Ul5eTmpqKoaEhTU1NODs7\nU1JSwpo1a5BIJJw8eRK5XE5HRwczZ84UiktDQ0MqKyuFatPU1JTJyUmKi4vF59jY2Mj169cxMDDA\n1dWVBQsWEB4eTmpqKiYmJshkMmpraykqKmJgYABfX1+RzVhQUEBxcTH6+vrMmTOHiYkJATvV0tLC\nwMAAHx8fmpqaiI+Pp6GhgdzcXMFpdHZ2pqOjg8nJSWxtbSkrK8PS0pJ79+4RHx+PlpYWISEhfPbZ\nZzQ2NmJqakpnZyd2dnYcPHgQe3t74WbU1dUlMjKSxYsXU15ezt69ewkICMDAwIClS5cyPj7OoUOH\nhHdhcnISJycnuru7GR0dZXR0lMnJSaZMmUJvby9aWlrk5uaKUbdKpcLDw4Pm5ma6urqoqKigv7+f\n2tparl+/joeHB1OmTBFhvzKZjIiICDFWrays5OTJk8TGxgoBVE9PD5GRkbS3t2Nqasq+fftQqVTE\nxsZy/fp1CgoKSEpKoq+vD0NDQwEEamlpwcvLi/3799Pa2sqVK1dISEhgaGiIsbExgoODWbduHbW1\ntVhZWeHl5SXUn7m5ueTl5f3PKR8MDQ3FPFcj+9Qo1S5evEhlZSUzZszg2LFj2NvbM3/+fMbGxti+\nfbtQ023YsAEfHx+qqqpEFL2lpSUBAQF0dHTw1FNPiVi44eFhwelTKpW4ubkJBNmdO3ewtLRk5syZ\n2Nvbc/PmTfLz84VjESA/Px8nJyccHBwwNjamvr6e69evU1ZWhlqtFnbdsLAwkc2gYfRrDDvl5eVU\nV1ejo6ODj48PgJAyFxQUiNTqTZs28fDhQ/r7+/nggw8oLS3l73//O5s2bSI8PBwdHR38/Pxwdnam\nv79fIMk0iVWa36WxsZEZM2bQ1tYmMjmtra3x8fHBysqK4eFh9PT0sLa2RiqV8uOPP3LkyBHKysqI\niIjA399fiJumT5/O1q1befjwIWlpacyePRsPDw/Mzc1RqVRs3LhRTCY0XMWSkhJCQ0PZtm0bdXV1\nxMbGMm3aNIaHh7G3tycsLAxAoOP/+c9/smPHDiwsLHBzc0Mmk1FWVsaUKVOEIAtgwYIFLF++nN7e\nXiYmJtiwYQMDAwP4+fnR0dHBxYsXgcd3XktLS5KSkmhubiYqKkoQqK5du4anpyfh4eGsX78ePT09\nvL29hROxoKCAkpISBgYG6OjowMPDA6lUCvx/BO7e3l4GBgb46aefyMzM5O7duzx48EAg//v6+pg7\ndy4lJSWkp6cLpL6dnR3PPPOMGHO/+eabwOM0rsnJSTo7O6mrq6O7u5vm5mYhD9ewPDVj1srKSgYG\nBsQNdXh4GH19fTw8PLh8+bLYPT/J8SRR9E4SieS2RCIpk0gkpRKJ5JVfn58ikUhSJRJJ9a9/mv/L\na96WSCQ1EomkUiKRLPhP76GtrS3muG+++SbV1dWix2BoaMisWbO4e/culpaWoqb985//LAxDGm9A\nb28vU6dOFQrBwsJCHj58SF9fH8eOHUNPT4979+5RXV3Nrl27yM7OJj8/XwAp+vv7hbU1LCyM7u5u\nZs2axerVq4VQBB6PF/X19amvr6exsVFEgmVnZxMaGsr06dMxMzPj0aNHKJVKHB0deeqpp4iOjhau\nRB8fH1QqlSgrAOHQLC4uxt/fH39/fwIDA/nggw+YOXMmS5YsQSKRUFtbyzPPPIOPjw+vvPIK0dHR\nDA4OsmHDBu7evYujoyP+/v5s375dRN/5+vpSVlYm7lTBwcH09fUhk8koKSnhhx9+4M6dO4yMjPBf\n//Vf1NbWMj4+TmFhIXZ2dkL1CdDU1ERDQwMmJiZoaWnR0tKCnp6e8EIUFxdz5swZJiYmKCoqYvHi\nxXz00Ue4u7sze/Zstm3bhlwuJzs7GzMzM0JDQ0lLSxPng4azqcH02drakpGRQXl5Oc7OzixYsAAn\nJyciIyPJyckRBOy8vDyMjIyYN28eOTk5ZGVlsXTpUuAxS9HR0ZFnnnmG1tZWjh49iq2tLdbW1qxd\nu5bi4mKByvf39xdhMppGdGJiIiMjIwwPD3P58mUmJiYAqK2tJTw8XDgUNarR+fPn09LSwtDQEI6O\njsjlci5cuEBrayvGxsY4OTkJKE1XV5eIw/vxxx+Bx+pWTS9NY4DT09MTO55Zs2bxxRdfYG1tTV1d\nHVFRUdTU1AhCtyYE2MTEBG9vbxEj8CTHk5CXJoCdarX6oUQiMQHyJBJJKvA74JZarf4viUTyFvAW\n8KZEIvED1gH+gD1wUyKReKnV6v8j5WFgYEDEZb/77rt0dnayZMkSdHV1KS0tRVtbG4lEQlFREdu2\nbSMqKoqff/4ZZ2dnBgcHGR0dFRyEiooKNm/ezOHDh0XXODAwEHNzcx4+fMiHH35IdnY2aWlpvPrq\nqwQHB+Pn54efn5+IHCspKRGmpf7+foaHh5k1axampqbAY5dkdnY2XV1dvPHGGzQ0NGBkZCQaaPX1\n9dy/f5+MjAzCwsJECrajoyPm5uZ8//33ODk50d/fj729vYiil8lkKJVKAYLx9/dnbGwMhUIhOIWL\nFy8mKiqKGzduEBYWJsaNJSUlIiClvLycqqoqcnNzCQgIEA2toaEhIiIiyM/Pp6KiglWrVlFSUiKy\nNpOSksRYsr+/n+rqal599VURed7W1oapqalYFOLj43FycqK8vBw7Ozv2799PfX29SMnWAFHKyso4\nevSoYBWWl5cTHR3N3r172bhxI6dOnSIuLo7vvvuOOXPmkJaWRmZmJra2tpiYmHDq1CnCwsJITk7G\nwsKCkpISQkJCBHn5iy++QF9fn2effRZA9FhmzpwpdgrJycmcPn0aHx8fHB0dqa+v55NPPuG5555D\nqVTS19cndpWhoaGkpqaybt06MSrVeCYSExMpKirC3t4egJUrV1JVVUVXVxfe3t5ER0dz6tQpFi1a\nJERVmtgADZ1bowB98OABMTExglY+Y8YMxsfHgce7Z43F3srKivb2dmbOnMng4CApKSk0NjZSXl6O\nq6srCoWCwcFBIcxqaGgQEQlyuZzW1lYWLVrEsWPHnmhR+I87BbVa3aZWqx/++vUgUA44AMuAH3/9\nz34Elv/69TLgpFqtVqrV6jqgBoj8795DV1cXR0dHgoODkcvlmJmZcfPmTS5dusRPP/0kdAG7d+9m\nwYIFFBcXiwiwmTNnEhERwblz57C2tmbmzJlClz4wMIC2tjZSqZSRkRFiY2N54YUXuHz5Ms7Oznz8\n8cfMnTuXnp4ezp07R3x8PAMDA2LCkZCQgKenJ1FRUWRnZ4u7Q319Pebm5qxatYpDhw5x9uxZSktL\nycvLo7W1lcLCQkZHRwkNDUUul4vSqLCwkJqaGqqqqnByciIsLAw7OzuKioqAxwnGTk5OomlUXV0t\nGkYnT54kIyOD4uJiOjo6hHqzu7sbIyMj1qxZQ1xcHMXFxeTn55OQkEBQUBDu7u58/PHHInbs888/\nJz09HQsLC86dO0d6ejpaWlp4eXlhb29Pd3e3OEFtbW2RyWR0dHRgaWmJUqkkJiaG0NBQFi9eTGNj\nI729vTg6OnLgwAFsbGwESt3IyIioqCjWrFlDXl4e5ubmLF26FGNjY6qqqrh8+TKJiYno6OiI0TNA\nbm6uWARHR0cxMzMTn6kGe9/b2ysazaGhoVhaWrJu3Tq+/vprysrKiIuLA8Dd3V1Moh4+fMj4+Djh\n4eGYmpoSGxvLW2+9JcA53t7eQn2anZ3N8uXLGR0d5dq1a9TW1grHbEtLCwMDA2JRcHR0xN3dnYSE\nBP7+978THR1NRUUFbW1tXLlyRYw1NcrYmpoaSkpKuHLlCnZ2dnz++eesXr1ayL+//fZb4LGBSk9P\nj5iYGHR1dYV5TrO4l5WV4enpiYGBASUlJQQFBaGlpUV1dTUxMTGCm9Hd3U1SUhL9/f3/6VIXx/9V\nT0EikbgCIcADwEatVmtwLu2Aza9fOwBN//Ky5l+f+99/1nMSiSRXIpHkqtVq9uzZ82/lgCZ78KWX\nXuKbb74RDSpTU1OSk5Oxs7Pj6tWrohOrca3J5XIBQ5mYmOCLL76gu7ubgYEBgQ2Pi4sTaTqaFKqk\npCQ+/PBDampqBKRCIxzR1dVlZGRE2GVLS0vR1dVFqVSKJqi1tTULFizAxcWF5ORkEYU2bdo05HI5\n+/btw8LCgtzcXOHlkMvlzJkzh9TUVODxFvf06dMolUomJiYoKChAS0uL8+fPiyDSu3fvirAcLS0t\n0cxKTU1lYmICHR0durq6uHbtGkZGRjx48EDEtlVVVSGRSNDV1RXcBaVSib6+PuHh4UIjUllZSWdn\nJ7NnzxZ3qLi4OKytrXnw4AEZGRlcvXr133IVn3/+eUZGRnjw4AF79+4VkmDN1t7FxYX09HRKSkow\nMzMTyVuabISpU6cCj0eS06dPZ3JykubmZlpbW1m2bBm3bt0SiVFnzpyhsLCQlpYWZDIZ4eHh6Ovr\nk5CQwOzZs9m/fz82NjacPHmS27dvA4/FaHPmzEGtVuPh4UFBQQGlpaVkZWWRkZEhRn+tra3Y29vT\n1NQkzGXW1tZoaWnh6OiInp4eKpVK/Nx79+7x7rvvcufOHezs7Kivryc5OVmoJjVMSicnJxQKBT//\n/DO1tbXcunVLWNIvXLjA6OgolZWVYlf5ww8/8Le//Y2jR4/S29sr/u0VCoVAvdXV1VFVVcWcOXPo\n7u6ms7MTuVzOnTt3xOIEcOjQIby8vJ78Qler1U/0AIyBPGDlr3/v+9++3/vrn18AT//L8weBpP/w\ns9X/7/H/Hv/v8f/7I/dJrvUnojlLJBId4J/AMbVaffbXpzskEomdWq1uk0gkdoD81+dbAKd/ebnj\nr8/9Hw9XV1d27NjBw4cPhW9cI17RjGcqKipwcXGhsLAQKysrmpubRYMyNDQUW1tbzp49i6OjI2q1\nmj/+8Y/8+c9/Fg1LExMTOjo6aG9vZ/Xq1SxdupTvv/+eQ4cOCYBGV1eXUKzduHEDY2NjRkZGmJyc\nJCYmhv7+ftatW8fp06extrampaWFa9eukZSUxO7duzl27BhZWVmink9MTGT69Omo1Wru37+PVCrl\n8uXLJCcn89Zbb3Hy5EkBOU1OTuaTTz4Rjkhra2vGx8e5d+8eFhYWDA8Pi/j6gYEBRkZGsLe3Z2Rk\nBJVKJRx4ZmZm9Pb2YmpqSnV1NWvXruX06dPCBq2h+2jSsRITE/Hy8uLtt99m9erVggFZVlZGUlIS\nY2NjgmyVmJjIyZMnhVU5JSWFZcuW4eLiQlFREXPnzmV4eFjQpB88eIC5uTlSqRSZTMb27dtpamri\nxRdfZOnSpRQXF1NVVcXq1atRqVT89a9/5ZlnnhFJ37/88gsJCQlMTExw4cIFwsLCsLGxISsrC6VS\nKZgTc+fOZdGiRRw6dAgfHx82btwoypmGhgb279/P6dOnycrKws/PT4i/oqOj6ezsxNvbm5deeomt\nW7dib29PdHQ0Z86cobq6mkWLFtHS0kJkZCTNzc10d3eTmprK7Nmzee655+jp6aGuro6jR4+KoGHN\nubFt2zZu3LiBUqnkr3/9K8XFxWhra4uGt6as6O/vx8XFBW9vb+rq6ti1a5fAE2pAvxr7fHNzM0ND\nQzg5OVFfXy/k42fOnOGPf/wj5eXlODg40NzcjKGhIbm5uXh7e2NjYyN8IP/peJLpg4THd/tytVq9\n91++lQJs/vXrzcCFf3l+nUQi0ZNIJG6AJ5D9372HpnbUwC5mzZpFSUkJnp6eTJs2DRsbG1FLP//8\n8zQ2NgrHmLe3N11dXeTn5+Pl5SWaj7DlxT0AACAASURBVPA4/Umj3uvt7RW059OnT/OPf/yD06dP\n4+bmhqGhoTCYaCS3Hh4e/PWvf2XFihWUlJTw/PPPC6JTQEAADx48oLq6Gh8fH27fvo2HhwdyuZy6\nujq8vLzYtm2bUO2p1WosLS2ZN28eL730EsePH+fSpUsolUosLCwoLS0FHjeXNPboxsZG+vr6mDVr\nFgEBAcTExDBz5kycnJxYvnw5c+fOpb6+HjMzM5YsWUJ+fj62trakp6czNDRET08P8fHxInjGysqK\n6Ohobt26JRqjS5cuJSUlhQcPHghbdWJiIiqVCplMxoEDBxgaGkKtVmNqaiqYCTo6OuTm5uLr60tL\nSwsnT56kubmZ3bt3U1JSQnZ2Ng8fPmTFihUiMby/v5+PPvqI9PR0NmzYQHR0NL6+vnz55ZciiBdg\n9uzZ2NraCiR+Wloaly5dEuneGm3A9u3beeutt3jnnXcIDg7ms88+E/qIPXv2YGRkhJeXl+BfdnZ2\nEhoaSklJCWlpaSKqT2NhTk5ORqFQ4O7uzhtvvIGZmRk2NjacPXuWsbExvvnmGyHxTkpKEr/vhg0b\naG5uJjo6mrq6OlHOqVQqIYOPj4/n+PHjjIyMiLg5TRN51qxZLF26FG1tbW7fvi1CW0xMTHjjjTfw\n8PCgqKiIiYkJPD090dLSYu7cuXh6eqKnpyfCez/44AOysrIwMzNDoVBQUlIigLR9fX1iIvckx5P0\nFGYCG4E4iURS8OtjEfBfQLxEIqkG5v/6d9RqdSlwCigDrgEv/neTh19fg0KhQCqV4urqyuXLl7Gy\nshKz+vLycp555hnu3bvHyZMn2bdvH3/729/Ys2ePyHBsa2tjZGSElpYWMfPWNJwKCgowNjZGV1dX\n0HovXbokJgDXrl3j9OnT4sKuqamhr6+PTz75hOzsbAHktLF53DbRjBm1tLSwsLBArVYTGxuLVCol\nJCSE4uJivL29efrpp/H09CQ/Px9XV1dOnTqFnp4ezz77LO+//z4SiUTIf+Exa6G3t1eMWCMiIpBI\nJBQWFtLY2EhaWhotLS1MTk4Kj0Z7ezttbW3MnTtXEIU0mZyAMO1UVFQwPj5OU1OTSL3KycnBw8OD\n+vp6dHR0ROahhsj81VdfYWxsTHR0tCAJu7m54eHhwccff4yurq4gDj98+BBjY2McHBxQKpUEBQWR\nkpIieJi9vb3Ex8dja2tLcHAw9+7dw93dncrKSvz9/WlqahIXg5+fH01NTfj4+PDaa6+xevVqIdJK\nSEjAwsKCM2fOsG/fPvLy8mhsbCQ5OZm9e/cik8kYHR3F1NQUc3NzgWK3srJiYmKC6OhogoKCcHBw\nICwsjHv37jE0NMTy5cuZOXMmRUVFJCcnM3XqVObNm8fSpUuxtbUlLi4OFxcXjIyMsLCwEM7W6Oho\nEcmXnJyMp6cn69ato7e3l++++w4PDw/u3buHlpYWBQUFTJkyhUOHDnHlyhW6urqYmJigsbGR/v5+\nZsyYIcbTLS0tHD58mJ6eHgwNDWlpaaGgoIC2tjaOHj3KgwcPhGZjxowZqFQqTE1N+eijj4TLOCEh\nga1btwqC1ZMeTzJ9yFCr1RK1Wh2kVqun/fq4olaru9Vq9Ty1Wu2pVqvnq9Xqnn95zd/VarW7Wq32\nVqvVV//TexgbGws5r66uLl1dXVy8eJFvv/0WuVxOdXU1R48eZdq0aezcuZOgoCDMzc35+uuv6enp\nQV9fX+QnKpVKgd++fv069vb2QmTk7e3N6OioINZozEnLly8nKChITD7++c9/cubMGb7//nvq6uqw\nsLDg66+/5vjx4wCcP38eQ0NDoarbsWMHRkZGYpu2YMECDhw4QFtbGyUlJWJnoFGlNTY2snjxYiYm\nJtDS0hICo+bmZjw9PYVjTi6XC+WdVCrFzc2N5cuXM336dFxcXJg+fTr379+nqqpKyL0TExOxsrKi\npKSE4uJi1q1bR2VlJfX19VRXV/P73/+e9vZ2AgICGB0dpa+vj9LSUrq7u4XzcHJyEh0dHcHAVCgU\nGBkZUVVVhUqlYnh4mDNnzrB7927a2tp4++232b17N5WVlZw4cYIdO3YQExPD+++/L7QjW7dupaCg\ngCtXrnD16lVycnLEAv7o0SOefvpp4LFWIy0tjZGREc6ePUtubi5nzpyhv79fAGAnJycZGhoSzIGR\nkRE6OjpITU3Fw8ODbdu2ERISwoMHD8jKygIek4yys7NxcXHBx8cHPz8/+vr62Lp1K97e3mIKoXHU\nas6x9vZ2QkND6evrw8PDA1tbW3bu3Ck8FWZmZshkMlGmnDp1ir6+PjZt2kRUVBRJSUmEh4dTXl5O\neXk5s2bNIikpSUiPNc3U/v5+Ojo6/u3nOjg40N3djbW1NRs3bkQmk5GZmYm2traQR3d1dVFVVUV7\nezuXL19m4cKFfPrppwJ/n5ubS3V1tdDYPMnxm0iIGhsbo7a2lrGxMQE4MTExYerUqdjZ2WFoaMjN\nmzeBx5x9TZqQt7c3pqamgsrU3NyMvb296Lo6ODiQlpbGhg0byM3NZdGiRYLdIJVKsbe3x8vLi6qq\nKvT09AgICCA4OJiUlBQBVB0ZGeHDDz/k5ZdfJiYmhuPHj2Nvb09HRwfm5uYkJCTw1VdfsXLlSoaH\nhykuLsbDwwNjY2PhynN1dWXjxo289957YuY/ODhIe3s706ZNE1s7IyMjxsfHMTMzIyUlRWRarlmz\nhrq6OiwtLRkcHKSuro6JiQmkUinvvPMO7e3twlSjr68vMgPmzp0rsh9kMhlmZmbo6+szY8YMTp8+\nTU1NDfb29ixduhRfX1/q6+upq6sTeZSzZ89mcHBQqCCrqqoICQnBzc2No0ePUlZWxtNPP01SUhJn\nz57lhx9+ICcnhzVr1rB7924GBwcJCQlBR0eHjIwMnnvuOX788UdmzJhBSEgIHR0d2NjYCCYCgI6O\nDs7OzlhYWHDq1CkePnwoErZSUlI4evQo+/fvRyKR4Ofnh7+/P9nZ2bS0tFBVVcWtW7fQ0dGho6MD\nJycnMTocGxtjzZo1ZGdn09DQwNNPPy0k2pmZmRgaGiKRSASeXiKRkJeXh5+fH+np6bi6uvLxxx+L\nO/rly5eBx7tcf39/FAoF4eHhYgIxY8YM1Gq1YDOsX7+en376SQBZNNkckZGRNDQ0EBkZyWeffUZC\nQoI4dzXTFs1Oo7GxkaCgINauXUtVVRWBgYE8ePBAqEeXLFlCRkYGr732Go8ePeLu3bsMDg6KHeCT\nHr8JmbO+vr7ITKipqRGMfyMjI+bMmYO9vT22traYmZkJs4mvry87duzgxRdfpK6uDplMRnR0NC4u\nLuJEGB4eZt26dQJ1nZ2dTWpqKn19fUgkEiGLPnz4MBkZGZw8eZK3334bLy8v9PX1cXFxQUdHh5kz\nZ1JYWCgCSzQnpIGBATt37iQwMBC1Wk1jYyOVlZUcOXJEjNmkUin+/v7cuHGDLVu2sHnzZkpLSzEw\nMBBjQTc3N+BxAEh/f79ormpMRDdu3BBehJKSEv70pz/x6aef4uLigq6uLhEREaSnp9PY2Eh6ejpR\nUVHcu3cPhULBkSNHhLehs7OT48eP09/fj4GBgfj9FAoFX375JU1NTZiZmZGcnIyZmRk///wzn3/+\nuTCSmZubU1ZWxt27d0lISGD//v28+OKLqFQqOjo62L17N+Xl5QwPD3P8+HERy9fQ0ICFhYVA71tb\nW5OYmIi1tTVHjhxBoVAIXJ2NjQ0WFhbCdq6vr09VVZUwmh09ehQ3NzfhlfDy8hJjWAcHBxobG3F3\nd2fjxo00NjYKnYJKpSItLU24YK9cuUJubi4VFRUCDlxfX8+ePXvIysri2rVrWFpacunSJXp6erh9\n+7agRHV3d5OYmCjOX41/JTMzk6ysLHp7ezl79iw//fQTAPv27eObb74hLy+Pp556il27drFkyRK0\ntbXJz8/nypUr/PTTT/T394tdroY0raOjI0J/FAoFTz/9tAglNjc3JzIyksnJSQoLC0U/bMeOHeTl\n5QmxXWhoqJCFP8nxm1gU+vv7qaurw8nJiejoaPHB29vbi3lsSUmJuHP29fXR19eHgYEBenp6PHr0\niM7OTnbv3s2lS5dE7W9jY4O5uTkHDx7E29ubkydP0tHRwfHjxwXaXKVSsXjxYpYtW4aBgQHTpk1j\n7969Aj9uY2ODgYEBOTk5wnCUn5/Pvn37iI6O5plnnmF8fBylUsmmTZsYGhri+vXrjIyM0NTUJOjL\nGmWkxvJbUVFBTU0Nubm5YuuqYQKo1WqRRGRjYyOMVs3NzTQ3NzMxMSF0Chq1p6+vr5hC3Lx5k40b\nN1JeXs6UKVOECcvOzg4dHR3gsTjGyclJMBn19PSorq7m888/p6KiQqDbo6KimDJlChkZGRgZGaFQ\nKIiNjWXWrFnEx8cLqO5zzz3HwMAAfX19rF27FqlUSnh4OAcOHCA9PR0dHR0aGho4cuQIAKmpqSgU\nCjFt0tS8s2fPpq2tDQ8PD5HJePfuXZydnenq6mJkZITVq1fT19eHr68vSqWSqqoq5HI55eXlFBcX\ns3PnTqqrq3n48KG4QxoZGWFmZsbQ0BBNTU0EBAQwPj4uaNHNzc2cOnWKnJwcjIyMhJqytrZWSOGD\ng4NxcnIiODhYQHydnZ0ZGxujra0NX19fwsLCMDQ0pKGhAWdnZ27cuIGVlZUolw4ePMiKFSsoLi4W\n9niND8bY2Jj8/HzgsZRck2vZ2dnJ6OgoCxYsEFxJZ2dnpk6dSnt7u7Dx379/n/HxceLj43FxcRE3\nnuvXr//bIvafjt+ES/LAgQPv7dy5k0uXLiGVSoV3387OjomJCW7evElnZ6eITtOkG5WVlTF//nyM\njIzYuXOngHn09fVx9+5d4V3/V4ZecnIyY2NjREVFMTk5ia6uLjk5Ofj4+LB48WImJycFQj0zM5OC\nggIiIiJEba25g73wwgt8++23+Pr6YmRkxO3bt6msrKS3t1eE4cpkMuH4mzZtmugUFxcXU1dXJ0w4\n9vb27Nu3D3Nzc9RqNTNnzuTIkSPExcXR3NyMo6MjY2NjODo6Mjw8jIGBwb8RllJSUvDw8KCpqYna\n2lpx8WvyHzw9Pblx4wZdXV2oVCocHR355ZdfGBgYEMGtHR0d+Pv7k5CQQE1NDTY2NsKxqWkC5+Xl\noaenh1wuRyKR0NXVRWxsLA4ODpw+fZqoqCgSExNFlP39+/eBx5meiYmJXLlyBZVKRXNzM7q6upw8\neZKpU6dSUVGBo6MjJ0+e5Omnn0ZXV5eOjg60tbUF8q6oqIinnnpKxLY3NDQQHByMm5sb169fJysr\ni87OTlGa+Pv7s2rVKrKzsykoKGDLli3U1NQIgldxcTFKpZIDBw5gZ2cnsjfc3d2xt7dnfHwcW1tb\n/Pz80P9f7L1nfNRl3vb9nSSTOpnUSZn03nshECCFUIWAINIEIy6CrqvurmtZWVfXtq666tpxXblU\nQHoNhAQIIaSSQnohvffeM5O5X3DN+Vz7vLhunle31/O5z1dAQjKZ/M/2+x3H9zA0pK6uTjA/u7q6\n6O7upqKigrCwMBISErh+/TojIyPY29szNzcn2pJNTU2YmZnR09PD73//e/r7+7GysqK2tlZ4Kxob\nGykqKsLa2prIyEhSU1NJTk4Whcj29nbm5uaora1leHiYmpoawsLCGB4eZnh4mPj4eDw8PJienmZu\nbk6E01y4cIHHHnsMT09PMjMzuXHjxv8c8tKnn376hrZNc+3aNRwdHWltbRVQDj09PWQyGRqNBpVK\nxaVLl7h79y4bNmygo6ND0Ikff/xx4uLi6Ovr4+rVq6SkpHDnzh1SUlKYmZlBqVSKVqa1tTUdHR3s\n27ePe/fuiQr9tWvX8Pb2pru7m5mZGQICAsSKrb2C/P3vf6eqqorVq1fT1tbGxYsXUSqV4lhna2sr\nUGq9vb0YGBjQ2NgoNP2Dg4OsXbuWjz76SJCsjx07RkxMDBEREVy4cIEXX3yRpqYmAgIC8Pf3x93d\nHVtbW8zNzbG0tMTa2pqBgQEWFhYEhMXGxobo6GiRLKSF0Ojr64tMAyMjI5qamti3bx96enro6+tT\nUVGBv78/SUlJguhjY2MjLNPa5O6Ojg6x2FVUVODq6srZs2cFyr22tlawJ7Wo9oCAACIjI+nv78fU\n1BRfX19xCgoNDWV+fp4//OEPvPjiizQ2NhIdHS12ubt37wq2oNb4tGjRIj755BMcHByIiorixo0b\nDA8Ps337dg4ePEhZWRmtra3cvn2btrY2PvnkEz744AOUSiXu7u4MDAxw/vx5wS+USqW0tbWxfPly\nNmzYQGJiIs3NzSKhTIvICwkJwcDAgNDQUPLz89HT06O8vBxzc3NhcLK3t2dwcJBNmzYRFhaGi4sL\nTzzxBAYGBlhbW6Orq8tTTz2FTCZDKpViYmIiQnlXrFiBWq1mZmaGnJwckpKSuHz5Mq6urkLzoSWb\na2leMzMzDA0N0dTUhKWlJTt27GBubg43Nzd0dHSQyWT4+/szPj6OiYkJP//88/8c67SWTVBRUYFS\nqcTW1pagoCAiIiJE+IutrS1OTk5UV1fT2dlJTU0NarUaNzc3HBwc2LFjB1lZWUxMTAiL89DQEAkJ\nCTQ3N1NYWIhcLhdR82fPnuXQoUOoVCqioqKEpfqVV14R4hAfHx98fHzYuXMndnZ2Ih26u7ubtrY2\nPvzwQ86ePcuxY8e4cuUKTU1NmJiYCBjKzZs32bNnj+A5FhcXMzg4iJ2dHcXFxTzzzDP09vYKyWxk\nZCSFhYWMjY1RWlrKkiVLmJubY3x8HKlUSmdnJxYWFqhUKrq6umhubqanpweFQsG9e/cEd9DY2JjE\nxERcXV0xMzPD19eX1tZWkZnp7+9PR0eHuKpobcL19fU0NjYK+/rRo0dpbW1FpVKRl5dHTEyMeJCf\ne+45Tp48iYuLC//xH//BiRMneOGFFygvLyc1NRW5XI6vry/37t1jeHiYf/3rX/z+97+nq6uLgIAA\nbt26JUAuWu4i3C80agU+crkcb29vcaozMDCgpaUFe3t7XF1d+e677wgNDSUhIYHNmzfT2NhIUFAQ\nU1NTxMbGEhsbKyTk2nzQ+vp6rK2tycnJwcTEhLq6OkxMTDhx4gSlpaXCV+Hh4SE6JxcvXuSjjz6i\no6OD06dPEx8fL3QK/v7+AFRWVuLo6IirqytHjhzh9ddfp6Ojg6KiIhoaGkRMoVqtpqWlRcjIp6en\n2bt3LxMTE5iamop2+vj4OH/605+EzHr9+vVERkbyzTff8PHHH3Pq1CkyMjJoaWnB1dWV2dlZjI2N\nCQoKEsFFGzduZHR0lOzs7P95sXFmZmYiVKO/vx9XV1eysrIwNDQUffjBwUE6Ozu5deuWwE7Z29vT\n1tZGZmYmZWVlJCYm0t7eLgJmtYaUo0ePolaruXPnDvPz8ywsLKCrq8uGDRvYsWMHUVFR3Lp1i97e\nXvr6+oiKimL16tXi8w8dOoREIsHB4b6Fo6WlBZlMRlRUFL/97W85ePAgMTExvPbaa7i6uiKVSpFI\nJOzevZuhoSH09O43efLz83FwcBD++9jYWCIiIrC0tATuF0ZDQkJwcHDA3d2d0dFR4uPjRY6DiYkJ\nP/30E52dndja2qJUKsXkcXZ2xtDQkKCgIBwdHfnHP/7BlStX2LVrl7AoL1myhIcffpjJyUnm5uao\nqanBycmJyMhIIiMjkclkWFlZoaurKya8n58fvb29hIaG4uLiIgjF1dXVggN48uRJkpOT+fDDD/Hw\n8BCdDq2VeXp6GoVCwaZNm8jKyhI6kJGREaGf2LNnD3Df4XjhwgWKiorYt28fHR0dQksgl8tZvnw5\nr7zyCkZGRgQFBTE2NiZqNNpTl6enJ1u3bqWlpUWYzdatW4evry8JCQnExMRgbm7OunXr+Pvf/86S\nJUvE1VG7EIWFhTEzM8Nnn33Gv/71L6ytrenv70elUmFubs5bb70F3C/iaalRmZmZGBgYoFKpRH1h\nbGyMw4cPY2FhwejoKNevX+e3v/0tCwsLtLe3ExMTQ2lpKSkpKeTk5Ihnwd/fn7GxMa5fvy42I+2p\ns7i4mBs3bmBmZkZbWxsGBgaC+KzNKdVuIAqFgoSEBFHkfZDxi7g+fPjhh284ODigp6eHv78/QUFB\noj1XUVEhWktKpZItW7awcuVKwsPDmZubo6qqCmtra8zMzIQ7sa2tjZycHD766CMBx9ixYwe5ubki\nw0/LJdBoNCxatIiZmRl8fX3x9fVlYWGB0tJSdHV10dfXZ2hoCEdHR5qamsjJycHJyYl169ZRUFDA\n0qVLMTIyYvPmzYyMjBAREcHQ0BDz8/PCEagFr/z+978XEl03Nzf6+vooKSkhMjKSn3/+WXjse3p6\nWLVqFUuWLBGtNnd3d4aGhsjNzUWpVOLs7Iy3t7dQSra2topTUHNzM0qlUpCEbGxsMDIywtTUlOrq\nauzs7HBwcKCzs5Ph4WGam5vx8PBgfn6e/Px84uLicHV1FeSm4OBgEZmem5srEpHs7OywtrbG29ub\nqqoqFhYWMDQ0ZGpqioGBAQwNDRkcHGRiYoKVK1cSFxfH9u3bOXr0KCkpKSKWXtuZuH37Nnv37sXR\n0ZFr165RW1srdkqtKU2pVJKZmcnFixdJS0tj8eLFFBYW0tDQgK+v779d98rLywX/MyIigvb2dsrK\nyhgeHsbT0xOpVMrY2BhjY2NUV1eLZ0fLoBgYGODAgQMEBwfj4uKCnZ0dq1ev5uWXX8bOzo7U1FTC\nwsIYHBzE0NBQRNVp+QcODg5cunQJMzMzJBKJkFFrT2gODg5YW1tjaWnJkSNHqKysxMDAgFu3brFx\n40aKioowNTVl6dKlLCwsYGRkJLosiYmJ6OjoiBTy8fFx4uPj2bZtG/Pz89y6dYuSkhK+++47+vr6\n2LBhAz/++OP/nOvD7OwsMTExLF68GEDITbWEpOXLlyOXy8XR8s6dO+Tk5DAzM8OyZcuIjY1l1apV\nAqSqrbC3tLQwOzvLqlWrePfdd5mcnOTOnTts3bqV5ORkjIyMyMrKIi8vT6gGMzMzOXPmDGfOnKGi\nokIEsoyNjdHf3w/A8uXLuX37Nr/5zW944YUX8PX1pbm5maioKDIzM2lqakKlUvHjjz8ilUrx9PQk\nMDCQmzdvsm/fPlatWiU4A0uXLhUyZ617UXsEvHnzJjU1NRQWFgpPxc2bN+nt7WXRokWYmpqKhO6e\nnh7a2tqIjo4WTAYfHx86OztxdHQUNufS0lKsra3R09PD1taWyclJTE1NBR358OHD+Pv78+qrr4oY\ntMnJSZFc/V+TrtatW8fatWupqKhAR0dHhPmEh4dTXl7O0NAQs7OzmJubk5mZib6+Pp2dnbi5uYnI\nNkdHR/bs2cP58/dV8ubm5hQXF/Piiy/i7u5OWlqa6DBYWFhQWlrKBx98QG9vL2q1mubmZi5evEhZ\nWRnNzc2cP3+eqqoq7t69y7p164RsuKurC319fRYtWoSrqyvz8/PI5XLS0tLo6+vDy8uLnp4e4uLi\n6OjoIC0tjaqqKurq6oSr0sLCgpMnT7Jp0yZxfairqxOvJSIigoqKCoyMjAQ9amZmRoB6/Pz8uHDh\nAufOnSM5OZlr167x/vvvA/ehKtrrIty30Wvl3gMDA8Ji7+DggJmZmbiGFhYWisLj6OioKBxrRXu/\n+tWvCAkJwdXV9YHn4y9iUdDR0UEul3P37l3Wr19PZ2enEC3Fx8dTUlIi+u56enpMTU3h7+8vEnDG\nx8fJyMhgampKCHvgPnV5bm6OhoYGli5dSkZGBjMzM1hZWTExMUF3dzdeXl5CH2FlZYVUKhVU6L6+\nPrKysgTdZmBgAECIX27fvs2BAwfIz8/Hz8+Ps2fP8t577/HDDz9gYWHBokWLWLJkCTKZjJ9++omk\npCSmpqZExmVTUxPV1dUUFBQAiIzIyclJ0aoaHBwUacVOTk4kJiYSFBREXl4eJSUlLCwsoFAoMDAw\nIDo6mszMTFpbW2lpaaG+vl5cJbTtq61btzIxMcEHH3zAxMQE3t7e9Pb2EhERgaurK25ubpiYmNDe\n3k5vby+Ojo5YWlqSmpoqCpbarkpqaqr42fX19TE1NWX79u18/fXXtLe34+7uzuLFi8XXq62tpaam\nhn379qFSqcTvQCKRCIReVVUVcrkcc3NzEenW3NzMlStXxGno7bff5vnnn+fJJ5+ko6MDX19fEhMT\nBeJtzZo1aDQaRkZGxNWtt7eXwcFBvL296enpwcfHh4qKCvbs2fNvHokffviBsrIykWh18eJFCgsL\n2blzp5CXz83NYWdnB8AjjzxCUlKSoIIPDg5SUlJCUVERMTExyOVyQcvWtlWTk5OxtLSkv7+fmJgY\nTp06hY+PD+7u7uLr9vT0iKL4/Py8YGlqNBr09fUJDAwUUQdaPkd0dLQgbfv5+eHo6Ii5ubno9Dzo\n+EVcH7788ss3oqOjSU5OprCwkKqqKiFL1cImFi1axOjoKN7e3ujr62NjYyPMRuPj43z88cdMT09j\nYWGBtbU1ly9fFtHpk5OTNDc3s3XrVlF002LQbG1t8fT0FPdkLbdAm2akVCrp6ekhOzublStXcv36\ndQICAigpKaGqqoolS5YwMDDA+Pg45eXl7Nmzh+effx4TExN++OEHWltbcXR0xNTUlMDAQKampjh0\n6BCTk5M8/PDD+Pn5YWNjw9mzZ3n22WdFIpBWTPPcc8+Rnp6Onp4eQ0NDrFmzhoWFBXx8fEQk3MLC\nAkqlUsS0aSk82jwH7XWjvr4eXV1dhoaGqK+vR6FQ0NXVJVSdKpWKsLAwSktL6e7uxt3dHU9PTy5e\nvIhKpWJhYQGVSsXIyAgVFRUCxLKwsEBWVhZ79uzhzJkzBAUFkZSUhLW1NYcOHUKj0QifQX5+PhKJ\nhOXLl5OZmcm7775Lf38/wcHBHDlyhK+++ora2lrUarWol9jZ2TE/Py9w/nNzc8K9+cILL9DW1oZG\no2HLli3U1dUREhLCyZMnefjhj3+KGQAAIABJREFUh9HR0SE9PZ2UlBTB5dRoNAL/FxYWhp2dnWBw\nFhcX895772FnZ0dMTAzJycl0d3djYmLC7du3MTQ0pL+/H7lcztWrVwUMpq+vj8TERLHQabmZGo2G\nuLg4QkND0dHREaK8yclJ2tramJ6epqOjQyw4u3fv5rPPPmPXrl1IpVKxWWm5kpmZmaIVa2pqSmlp\nKYaGhgIMrF1IXn75ZRYvXkxrayv6+vr09/eTmZn5P+f6MD4+LsQzMplM0JZTUlKE4uzu3bs0NzfT\n2tqKp6cnMzMznDt3jtTUVM6ePUtraysODg5IJBItowFzc3PKysqEbVmbRKyVQoeFhSGRSDh8+DBN\nTU04OjqyaNEi0ZrTSmX7+/tZtGgRAQEBwH0k/erVq9m5cyelpaXMz8+TlpbGrl272Lx5M+np6Vy4\ncAE3NzckEgkfffQRXV1dNDY24uHhQUtLCwsLC+Tl5TE8PCwENtoJkZCQwOzsLJs2baK8vByNRsOl\nS5fE3dvGxkYU8LRAmampKby8vMQD4ufnJyrfenp6dHZ2itBbbUBtY2MjK1euRCqVolAoCAoKwsDA\nABMTE6ytrenq6kJXVxc7Oztx9x4dHSUmJgYnJyeGh4exsrKipKSE9evXC6T85s2bSUxM5Mcff0Qi\nkQhxzrlz58jIyMDDw4Po6GjCwsL48ssv+e6770QQ7IkTJ5DJZIJcVFJSQkdHh9APREVFcebMGXR1\ndYmJieHChQtERUUhlUr58MMPkclk7N69Gzc3N5qamujsvO/aHx0dpba2Vuhd2tramJiYYHJyEqVS\niampKR4eHixfvpz+/n6SkpIEcVkmk3H27FnWrFlDYGCgiLMDSEhIYNu2bSIxOiwsjJUrV5KcnCy0\nB01NTRw/fpyioiJ+/PFHIaAaGhoiOzsbf39/EU6rFXeZmJiIXFSt9N3Y2Jjg4GDm5uZoaWkhPT2d\n/Px8ZDIZN2/eJDg4GEdHRyYmJggODmZ6elqE6mrl0w8yfhGLglbJpS2+hIWFCZ2/XC4XLAUzMzMR\ntHHhwgXUajV+fn6i2qtNfjY2Ngbuy2kHBweJiIhgdHSU3Nxc0U564oknCA4Oxs3NDVtbW5qbmwXz\nPywsDE9PT+Lj4/nhhx+wt7fH09OTr776Crj/IGg0GpEqdf78eYqLi1Gr1WJSa4/lw8PDzMzMCG9C\nRkYG0dHRlJSUYG1tLSY13A/30DoRJRIJ58+f5969ewwODhIaGiqSnltaWigpKcHOzg5XV1eMjIwE\nMj0/P1/kZZqbm6Orq4uhoaF476anpwXlWlu3USqVVFZW0tjYSFhYmDjK2traUlBQIFSY0dHRmJmZ\nkZ2dzfr16xkYGOBvf/sbixcvJjg4mOLiYsLDw7l79y5ffvmlmCBPP/00BQUFjI+PMz4+TklJCXl5\nedjZ2QmyU05ODnBf8p6bm4uDgwNzc3OC69jX14eNjQ0TExN4enri7OyMrq4u8fHxwP06T3R0NAMD\nAwJFp7VaA6J+sX79ejo6OhgaGhLKxdnZWaKjo9m/fz87duzA1dVVXHfu3r2Lrq4uzs7OrFu3TkTa\nayXvlZWVnDhxgvz8fKKiomhsbMTX11fkil6+fBkPDw/REvbx8RGhrw4ODhw8eBAvLy927tzJn/70\nJ1ED0XIYjh49yvj4OPr6+oLObGlpib6+PlKplMDAQHR0dEROiVqt5vXXXycrK4tz586JBU9rtHqQ\n8YswRI2NjQkxycWLFzl48KBIel61ahWurq5YWlpy7NgxZDIZvr6+rF+/Hl1dXebn55FIJDz55JOc\nP3+exYsXi+CLmzdvsmPHDkHa1QaCLF++HD09Pb777jtUKhXJycmiwKedYNrawp49e2hvb0etVmNt\nbQ0gjpCOjo4izMTR0ZHXXnuNp59+Gjc3NyIjI7GwsCA4OFgkWl29epWysjLkcjnPPvssHh4eXLly\nhdWr7wOvy8vLiYyMpLS0FGNjY6KiojA3N+ePf/wjg4ODVFRUIJPJRJ7F6OgoKpWKwcFBpqam+Omn\nn4iPjxfKxcrKSuLj40VdwtHREalUiqOjIytXriQjI4POzk6mp6c5fvw4VlZWvPfee6SlpQlj0o4d\nOygvL2d6eprCwkKB0Lezs6OyspINGzbQ2dnJxYsXGRkZISsri8TEROrq6sTV7NKlS7i6umJvb88j\njzxCeHg4Fy9eRCKRsHXrVurr6/+NldjX14e9vT3BwcHi2G5sbCzo2VVVVZw8eRIDAwMSEhK4ceMG\nFy5cYO/evZw9e5a9e/dy584dfHx8SElJ4aWXXmLNmjX09/eLot3Y2BinTp2iubkZPz8/XFxcaG1t\n5d69e6Smpoor59q1a0UQzPHjx/n6669ZsWKFONnMzs4SEhIinKDatKba2loKCwtZuXIlfX19vPPO\nOywsLPD555/T3d3NxMQEiYmJInMiPT2d7OxsHnroIeC+1fvu3bu4uLgwOzsrtBt9fX0iwGdiYoJD\nhw5RXFxMa2srN27cYGRkRJymg4OD6e/v5/r16yIc+UHGL6am4OHhIWK/tOpGuVzO2NgYixYtEnda\nPT09IQ1ubm5menqaTZs2UVFRwWOPPYaFhQWffvop1dXV/PGPf6S7u5u5uTlUKhUJCQn4+/ujq6vL\nxMQE9+7do7KykpiYGLq6uvD19eXrr7/GxMREcCKdnJwIDg4WjIOsrCwBJJmYmODmzZs4OztjYmIi\nIsPMzc0ZHR3lyJEjzM/Pc+rUKczMzBgZGQHuw1SKi4vx9fUVE/bkyZMCbDo6OkpfXx8LCwsMDAzQ\n399PXl4eAQEBqFQq7OzsuHbtGo2NjTg7O2NsbCzSiBUKhThB+Pv7ExISQnZ2NqOjo0IeXFtbS2pq\nKnfu3EGtVosd3NjYWEy+lStXMjg4KDwfXl5e6OvrY29vz/T0NOXl5bzyyiv09PTQ19fH6tWrsbCw\nYN++fUJhqPUUNDc3i/6/SqWir6+P119/nf3791NTU8Pdu3dxcnIiJyeHmJgY2traOHHiBD4+PgKD\nrhVEDQ0NiaulRCKhurpatP1sbGx49dVXmZmZwcPDg1u3bjE7O0t2djYHDhxAoVAIK7FCoeCZZ55h\ndnaWEydO4OLiQnp6usgcycjIYH5+XiRaazSaf3setGE6cXFxdHd3k56ezurVq5mZmeHYsWO4urqS\nlJSEm5ub8GD8/PPPlJSUEBwczNTUlGA2Ojk5UVJSwq5du4D7TMUnnniC1atXc/XqVZHQdePGDVxd\nXbly5QqRkZEEBgYKP87Y2BiNjY1YW1vj6enJli1bhHtWrVaj0Wi4cuXKA9UUfhEnBW3fXutEMzY2\nJjMzk5GRETZt2kRzczNNTU0MDAxgbm7O559/LiZITEwMlZWVREdH89VXX2Fvb09AQACnTp2ipKSE\noKAgMjMzkcvlKBQKzMzMGBoa4ujRo9y4cUOkW9vZ2fH666+zdu1apFIply5dIiQkRJiW5ubmxElh\nZGSEoqIi1Go1zz33HGNjY9y9e5cVK1bQ0tIiFpilS5fS29vL9PQ0g4OD+Pj4cO/ePVatWoWHhwez\ns7MiGxLuq/nOnz/Pzp07KSsrw9LSksuXLxMeHs7Y2BiXL18mMjKS+fl5goOD0Wg01NTUYGhoyLJl\nyzA0NGRgYEDkDjo5OXH16lVB8nVxcUGj0YiFUltMc3NzY3p6WgiUhoaGqKurw9zcXBQStXp/LVth\nxYoVfPPNN4yMjLBv3z6OHTuGRqMR8mTtyeall15CpVJRWVkpoCcxMTFcuXKFoqIiSkpK8PHxEQKd\n4eFhvL29+c1vfoOdnR0bNmwgJiaGwMBAMjIyWL9+vUDpBwUFcffuXeEhkcvlfPPNN/T39zM7O8uG\nDRs4dOj+HOjt7UWpVOLl5UV9fT3e3t689dZbzMzMEBwcTHt7u3gN2uxGS0tLNm3axMmTJzExMaGl\npUXQrZKSkoD7d3+t1Nna2pqCggKee+454ZpUq9VMTEzg4uLC8uXLMTc3F5oPbdaDg4MDGo2GtLQ0\nwdZYtmyZkFErlUq6urqYn5+npqYGExMTJBKJkN6fOHGCVatWERYWRl1dHTKZjP379wuKlfak86Dj\nF1FTAATSXE9PD7lcTkpKCi4uLpw7d47Tp09TUVGBWq0WBRdDQ0Oqq6u5c+cODQ0NXL16FV1dXczM\nzDAyMgIQFeBdu3bx+OOPU1NTw29/+1t6enpISUnhzTff5LXXXmN+fp6xsTGBJy8qKiIuLo4LFy5g\nZGREc3MzMplMADuWLl2KQqHg4MGDnDx5UsTQX7lyhb/+9a/09PSQmZmJr68vMpkMb29vURXW7qTa\nHICoqChMTU2B+zqFp556iuLiYqKioigrK2Pjxo2ic7Fnzx66urrQ0dFhdHRURMJt27aNuro6mpqa\nBEJeWw+QSqW4uroSHR2NRCLBxcUFY2Njob2vra3l1KlTyGQympubBbjGy8uLlpYWnJ2dGR4epq+v\nj76+Pjw9PTly5IiI8BsbGyMjI4OrV6+yYsUKhoeHqa2tZWZmBj09PTZv3szg4CAajUbkJmh3u8OH\nD6NUKpmbmxPEIQcHB+rq6vjiiy/4/vvv2b9/vwge1pKbsrOzGR4epqWlhd7eXmQyGX5+fuzatQu5\nXC5OLdr0KoDTp0+TmZmJRqPB2dmZqqoqQkNDiYqKYufOnbS0tBAREcGjjz4q6Es7d+7E09NTdMC0\nMBUzMzPS09MBRFE3ISFBJD/5+fnh5+dHY2MjKSkpbN++nYGBAVHXiI+Px8TEhKGhId566y1MTU1Z\nWFjAzc0Ne3t7AMFD8PLyore3Vwi0FhYWSEhIoKenh3PnztHc3ExKSorQuqxbt47p6WkuX76MsbEx\nGzZswNPTUzxjDzIk2kr9/8khkUj+z7+I/zv+7/j//yjWaDSR/7tP+kVcH3x8fNi2bRv29vbU1dVh\nYGBAZWWlsIPKZDIKCwuF/Laqqkqo5RwdHdm4cSPp6eno6OigVCpxcXFh3bp1PPfccxgYGODq6iqq\n2AB+fn5cunQJY2NjYXTSxpht2bKFmzdvYmFhwWOPPUZubi4ajYYTJ07g4eHB+++/z8GDBzExMcHA\nwEAEdMjlcvH3rq4u1Gq1YEtevHgRe3t7enp6WLNmDV1dXVRUVIhdraGhgX379jE2Nsaf//xnPD09\nRfvU09MTc3NzOjo6CAwM5Nq1a8IyrbU8DwwMYG1tTXR0NBkZGWzZskUkaLe2ttLV1SXeA62Yp6mp\nibCwMExNTWltbUWhUJCVlUVSUhIdHR2UlZUJyMudO3dYuXIlV69exdzcnJiYGA4fPiwi8Nzc3Jic\nnBSCHEdHR9rb2zlw4ADnz58XhKGCggJ8fX1Fp+LOnTsic6Kuro7PP/+cF154QWQ4VlVV4eHhgVYC\nHxISIohD//znP+nv72fz5s0C2ltQUCCcsNqgoDNnzvDGG2/w008/cfv2baysrPD29ub06dPY2Ngg\nl8uJjY3FwcGBTz/9lNjYWBobGxkfH2fp0qWChzE9PU1LS4toSX7zzTd8//33fPfdd0xNTTE/P094\neDgLCwucOnWKhIQE7t27h4ODA2+++Sbr1q1j8eLFIlTG1taWmzdvYmJiQkBAAJaWlvzxj39k//79\n/OUvf+G9994TNndtnqVUKhUcjKamJgwMDESLtampCUBcAwcGBvjnP//Jtm3bGB4eFgCYBxm/iOvD\n2NgYUqmUqqoqlEol/f39xMfHs2HDBpKTkwkODiYlJYXi4mLy8vJEMMzo6CixsbEMDw8Ld5xarRZ3\nw02bNlFZWUl1dTWPPvooBgYGonIdEBBAZ2cnSUlJmJubU1BQQFRUFEZGRnh5eREbGytAmbW1tSxf\nvlyEwSwsLKDRaET1Pzs7m7y8PAwMDATxWKlUEh8fL5gCOjo62NnZUVNTg0ajwdzcnL6+Ph555BHh\nkiwsLMTPzw9bW1sUCgV79+4VHAUzMzMuXrxIQ0MDy5cvZ2FhgZmZGUxNTVGpVIyOjnLp0iWSkpJI\nS0tDqVRSVlbG9PS0CFdxdnZmenqaO3fuEBoaikqlIj8/HwsLC2pra1m6dClFRUVMTU0RGhoqzE0T\nExNUVFRgb28vQC/z8/OoVCqCgoJQqVRCURgQEICxsTETExOkpqYKlqXWal1eXk5GRgY6OjrC/DU7\nOysCWz08PETalY2NDWq1GhsbGxobGzl79iwjIyOUlZURFhbGk08+KfwpwcHB4vcmkUjQ0dEhLS2N\nyclJ4L6OQ4vJO3r0KHv37uXdd99FrVYL+7fWDSuVSlm0aBEeHh6CxgwQHBzM5OQkaWlp7N69G0CE\n8FRXV3PlyhVOnjyJvr6+iGtra2vj22+/ZWRkhOLiYpydnVmyZAn6+vo4OTnh6ekp/CnaKAMAb29v\nTE1NCQ4OJj8/X9TTJBIJBw8epKSkhJqaGoaHh2loaBCp3E1NTbS3t6Ovr09QUBCPPvqooIg/6PhF\nnBS0IEqt9VObjJSdnc2f//xnXn/9dVxdXUVGQU9PD0qlksjISCEiKSwsxNLSUshQ4b5g5d133yUn\nJ4dvvvkGHx8f/Pz8UCqVeHp60tzczK1bt1i7di3vv/8+JSUlfPXVV6xYsYKGhgYuXLjAyy+/zNzc\nHCUlJSxfvhxASGB1dXVRKBT85je/oaWlherqaoKCgvDw8BAglOHhYaamppibmyM0NJTZ2VmamprE\nHfvJJ58UNZDS0lJGRkYEOaqkpERwJXR1dbG1tWXZsmUUFxcLVHpxcTF2dnaoVCrUarVw+qnVavLy\n8oiKiqKlpQU9PT0aGhoEpGRhYYGSkhJWr16NRCJhbm6OkydP8tBDD3Hnzh2RwWliYsKSJUuoqalh\nxYoVTE5Oiu7Drl27aGpqIi4uTqRNTUxMEB0dLfT3ly5doq+vD4VCQV1dHS+++CJvv/02ycnJNDc3\nMzIyQmJiosDy6+rqcv36dYyNjVm+fDn6+vqo1WrRlerq6qK+vl7srtpogOrqavT09PD19cXa2pr2\n9nby8vLEhD58+DAPP/wwSqUSHR0dnnrqKTZv3szvfvc7JBIJOTk5vPPOO9jY2LBixQohL9bT0+Pb\nb78lLCyMvLw8lErlv/lVoqOj6ezsFO7Uzz77TKRZ9/T0iHi+TZs20dbWhq6uLrdu3cLKygovLy+h\n+ygvL8fW1pbAwEBOnDiBXC4XvzNHR0eCgoL4+uuv6enp4YknnsDIyAgbGxsee+wxhoeH8fPz4+mn\nnxa2fhsbG8LCwsjNzUWtVtPX18eDjl/EomBkZIStrS3Xr18nLCyM1157jd7eXk6cOMEHH3xAW1sb\nZ8+eRUdHh+3bt/PrX/+alpYWQkNDhbDp1KlTQr6qrbb7+/uTn59Pc3MzGo2GoaEhtm3bxvHjxxkf\nH6e2tlZIR9VqNS+99BI7d+4kNzeX8fFx0fJyd3enra1NmEpcXFxEH/2rr74S9BytO7KkpIT9+/eT\nn5+Pu7u7iHdvaGgQpxot9ksL1ABQKpVMTk4KZ2VnZydSqRRLS0sqKyuxs7MTIpmqqir6+/tZu3Yt\nOjo6DA4OMjk5yeXLlwkLC6OoqAhnZ2d6e3sFjWpkZIRHH31UeOuXL19OfX296He7u7tz+vRpYaNW\nKpUcO3aMX//61ywsLHD27FnCw8MZGhpi+/btaDQa3N3dKSwsxMHBAalUysaNG7l16xYuLi6UlpYi\nkUiEEzAyMpKzZ88yMTHB7du3aW1tZdOmTYyMjAi7e3NzM+Xl5Xh6ehIQEMDRo0fZvXs3MTExAhOn\nUqno6OhgeHhY8CCGh4dZtGiREAm99dZbhIeHC3Oc1vPR09PDm2++SVlZGTExMfT39wsSV35+PoGB\ngUxMTODq6ip8C3/5y1+oq6tj6dKlwkKvRccPDQ3x6quv8v3339Pb28ujjz5KSEgIhw4dYmhoiLGx\nMZ577jmcnZ1FCvndu3dJT0+ntLSULVu2CMaogYGByEEtLS0VYBonJyfRTbKwsOD06dPI5XJiYmL4\n17/+ha6uLl1dXXzwwQdcu3aNpUuXkpCQIDp52qjBBx2/iEVhaGiITz/9lDVr1mBsbMzt27ext7cn\nOTmZoaEhZmZmsLCwELyEffv24efnx+TkJFZWVly9epVVq1ahVqtxd3cXnLu0tDTGxsZ49tlnRe/3\nm2++EYuDQqFgaGhIsAU6OztF0s/zzz+Pvb09xcXF1NXVERwcLI75VlZW2NnZUVtbS3JyMvfu3cPc\n3Jzp6WmcnZ0pLy/n0KFDbNu2jba2NiYnJxkcHGR+fp6wsDCMjIwYGxtjyZIlTE5OCuFOT08PS5cu\npa+vj7y8PGG5vXz5MkZGRgIJrtX3KxQKJBIJkZGR3L59m4KCAmxtbTE2NiYgIIC+vj4yMzPF6+jq\n6qKnp4eQkBDOnz9PeXk5AQEBNDQ0MDc3R319Pbm5uTz33HMsXrwYW1tbDh48SH9/Pz09PYSFheHq\n6kp/fz/R0dFkZWVRUlLCkiVLRMivTCYjOjoafX19YUGG+6EpWp2Gjo4OlZWVqFQqQWPWxtwbGxsj\nk8lEQpaPj4+QRmtZA2VlZTzzzDMoFAq++OIL0RWZmZkhJCSEGzducODAAVFrgfuOWa0StaKiQiDf\ntJi9999/n5CQEKKioqisrMTc3JyRkRERALNmzRrKyspISUkR2aRwH7Z7+/ZtVCoVBgYGFBQUUFhY\nKIxtwcHByGQyofnQvlc3btzA3t6ezMxM1qxZI6Cr2lOjpaUlMzMzzM/Pc/HiRZYuXcrDDz+Mvb09\n+/fv58iRI4yMjFBQUMDo6KjI5XzssccoLi6mtraWZcuWCQ3NpUuXHng+/iIWBe0Dr6Ojw6lTp1i9\nejWnT58Wq9vo6Chubm7Y2NiQnp5OUVER165do7y8HDMzMxQKBQqFAplMxpkzZ9i2bRuAkD2fPn1a\nTJKkpCTs7e158cUXWbFiBRYWFixZsoTo6Gji4+P585//zNTUFCUlJRw+fJi4uDgmJycFoBQQcWbe\n3t7Mzs4C/4+kemZmBj8/P6anpzly5AimpqaYmZlRXV1NYGAg+fn5pKamMjAwwMcff8zWrVuFM87f\n35/Ozk48PT0ZHx/n1q1bvPTSS2Kyrl+/XsSEWVhYYGZmRl9fH0VFRTg5ObF582bxQAwMDBAQEMDT\nTz+NSqUS2C89PT1u3LhBSkoK+fn5BAcHMz4+TmtrK+bm5kilUszMzJidnUUqlWJoaMjk5CSLFy8W\nhaqQkBCKioqENl+tVtPU1ISzs7MAe7S2torUrebmZq5fv053dzdDQ0MkJiaycuVKrKysOHbsGLOz\nswJgs2nTJtGG++mnn1i2bBkLCwsUFhaKmoiW0FxbWyt69jo6OvT19fHmm28K4ZKxsbHo+7u4uAg3\nbmhoKGfPnuXOnTu8+eabODs7i1OhlZUVV65cobm5GVtbW/r7+4mIiKCqqkqcyv7+978TGhoKgK+v\nr6A4NTc3C5dnS0sL3333nUjHysjI4NatW0xOTuLu7s7w8DCLFy9mcnJS1L60vzftnKisrGTp0qV4\neXnh5+fHuXPnqKmpITw8nNraWhobG4mPj0cqlTI7OysKq0FBQVhaWrKwsMAnn3wiTjgPOn4RhUZt\nFkNYWBgtLS2cO3eOoqIiweXTBpecPn2ampoa4uLiOHToEN7e3kxNTQnNuRbqofW6Gxsbi2NiVFQU\nWVlZNDU1CW/8yMiI6P8eO3YMfX19wsLCMDEx4dixYyQlJbFo0SJ2796NTCYjMvJ+Nyc2Npb8/Hym\npqZIT08XCC1TU1MGBgbEAmFoaCh8+66urpSUlDAxMUFtbS1mZmZ4eXnh4+PDsWPHgPtGKxcXFz7/\n/HOh6x8fH8fV1ZWnn34aQ0NDgVO3sLBgdnaW6elp+vr6hCYiIiKCZ555RigqP/vsM+bn51Gr1ezY\nsYPR0VFMTU0xNDQkPj5eVNdlMhlyuZywsDDm5uYoLCwkLS2NhoYGXF1dBTDm1q1bSKVSSkpKUKlU\nxMbGMj4+LiTEXV1dzMzMMDw8LExq2jv4+Pg48/Pz3Lx5k6CgIBFvFxkZiZWVFYBwZNra2rJhwway\nsrLIyckR8BZ3d3fm5uYEfUhLu46Li6OlpYWxsTEsLS0FaFa7kPf09FBdXU19fT1ff/01Dz30EN3d\n3Tz99NNidz1w4ADNzc1Cmar9WUxMTBgfH2dqaopPPvlE5GDCfdrS4OCg0IJs3LgRR0dHTExMeOed\nd7CysqK5uZmuri4UCgVXr16lsrJSEL9+/etf8+WXXzI1NUVxcbHQa2jj7s+cOYOJiQkVFRUsXbqU\n0NBQ0tLSBDtDmx+qr6+Pp6cnjz/+OKGhoSxatAiFQsG6deuQSCTCD/Qg4xexKFhZWQl7sp2dHd7e\n3kRFRbF582bm5+cZHR1lYGAAU1NTNm/eLCAkBgYGItHoD3/4A4ODgzz55JPivpeamipw38eOHSMi\nIoJ9+/ZhamqKi4sLjo6OxMXF8fPPP2Ntbc2VK1fIzc2lvLwctVrNqlWriIqKYvHixWzcuFEYoubn\n5wkKCqKqqkqITYyMjFi8eDEKhYKqqioBz0xISMDY2Ji1a9cK2TLczwqwsLAgPT2dvXv3ApCTk8P0\n9DS/+tWvxOQ1MzMTqHmVSkVmZiaGhoa0tLRw5MgRxsfHWbRokUi70vrrTU1NWbFiBe7u7hQUFJCV\nlcXnn39OREQEUqmU7u5uqqqqMDAwoKOjA7lcjo6ODjU1NczPz9PX10dAQACGhoakpaXR2tpKVFQU\nSUlJXL16lZCQECwtLTl79ixtbW1icpeVlZGeni6w4/+V/GxoaEhvby8HDhxgZGQEjUaDvb09ZWVl\n4jiuVCpRq9Wo1Wrq6uqEByAoKIiuri5hRU9KSmLDhg20tLQwOTkpAK7d3d2kpaVx9epVEQ8HsHLl\nStra2kRIbF1dnQDIhIeHs23bNnR0dARpa3R0VJjtsrOzWbZsGfX19ezfv5+bN2/yyCOPAPfb6aOj\no1RWVnLu3DlaW1v58ce9n9ilAAAgAElEQVQf0dXVxcjISNCf6uvr2blzJz4+Ppibm7Ns2TJ8fX25\nePEi4eHhtLS04O/vz9TUFICAuYaEhKBSqSgpKaGkpIRvv/2WTZs2idqHNp5Qm/np6+tLdHQ0w8PD\ntLW10dvbS0FBgVh0H2T8Iq4PEomEjo4O4uLiSEtLEwW6mpoa0V5ZvXo1bm5uyOVyzpw5Q1ZWFuPj\n46SkpFBXVyc89ZcuXWLp0qV8+umnLFu2jMLCQkxNTVm2bBmZmZncu3ePsLAwYmJiCA4O5sSJE3z0\n0UfA/VV/48aNovqt9fFfu3YNMzMzHnnkETHB1Go1u3btQldXF5VKhbW1Nfb29lRXVxMSEkJtbS3m\n5uYiAk/rINRoNAwPD4sCnI2NDZWVlQBMTEyQnZ2Ng4MD4eHhODs7c/36dZKTk+nq6kJPTw83Nzdu\n3bqFnp4e4eHhdHR0CNJPdnY2tra2lJWVERkZibGxMdbW1sjlclEstbCwwNnZGVtbW0ZGRsjNzWXJ\nkiXk5+ejUqlEQpdarSYyMpLvvvuOjo4OZDKZSFFatWoVlZWVyGQyDAwMWLVqFSUlJZw/f17UH/z8\n/FhYWKCyslIAeNvb2zE0NGR8fJw33ngDc3Nz0VLVtnu1n6PFw01PT7Nq1SocHR0xNDRER0dHJINp\nbchaeK+1tTXJycmEhIQIX8Lhw4cBaGtrw9/fn1WrVokaUm9vr4C9WFtbk5GRgYmJCW1tbTz55JN0\nd3dTVlZGcnIyvr6+jIyMYGxsjKenp4CyaCXGKpWKpKQkysrK8PX1ZXZ2loiICBEq9NZbb1FaWsr1\n69dFHUubah4eHs69e/eIiYlhZmYGgEWLFlFWVsbo6ChdXV1IJBK+/PJL5HI5OTk5JCYmClRAVVUV\nIyMjXL58mcDAQORyOXl5echkMtrb27GyshLX3AcZv4hFYWJigs2bN5OZmcmqVauIiIjgypUrjIyM\n4OzszIEDB9BoNCJHb8OGDfT39zM1NYVGo0GhUJCdnU1CQgIXL17kypX78ZXXr19nbGyMTZs2kZOT\nI3rhly9fxsLCgt7eXu7evUtSUhIFBQV0dXWJLoizszPt7e088sgjWFtbY2trK6SiGzZsoKysDH19\nfbq6uoR09vr169TV1aGvr09qaiqLFi3CzMxMRJbn5+dTWFgoItCVSiX5+fl4eXkB93dJf39/DAwM\nRD/c2NiYhoYGoqKiaG1tRalUsm7dOi5evMjg4CDOzs5iMdAam7QPeX9/P/r6+szOzjI1NSWO1N7e\n3sIk1dbWRmhoqPD+a9OjXFxcRK6AmZkZra2tLFmyhIyMDCIiIsjMzEQmkwkZtEKhIDExkU8//VTY\nytPT0zE0NGRsbEwYkfbu3UtFRQXOzs4UFxezZs0aPD09hXV6xYoVFBUVcffuXfFemJqaIpVKeeyx\nx2hsbGRsbIxz584xPj5OZGQk1tbWGBkZERkZyfDwMLm5uRgbG1NeXk5CQgKHDx+mpqYGuVxOWVkZ\nWVlZIlPBysqK+Ph4vvzyS5ycnIiLixM4e2tra7Zt2yZQ+L29vbz11ls8//zzvPrqqwAcPHiQ3t5e\nwe3Yvn073t7euLu7c+XKFeLj45mbm6O3t5f29na2b9/OTz/9xJo1a8jOzubRRx/l9u3bODo6UlBQ\nwNDQ/UhWJycncnNzUSgU+Pj4kJqaKuTKWq5HWVkZOTk5zM/Pi7a1Vrfg7e0t3pOysjKx6D7IeJAo\neieJRJIpkUiqJRJJlUQief4///0NiUTS+f9Kotb+n1clEkmDRCKpk0gkq/+3L0JHh+npacrKysTO\nqk0Z1tfXp7i4mPT0dLEbHz9+nMTERNGT1vbHS0tL/w2AoVAoRDtOC+rQ7vh5eXlUVlayc+dOcS80\nNDTk7bffxs7Oju3bt+Pg4MCxY8doaGjg3r17ono+Pj6OnZ2dyFjMyckhLS0NlUolxCgqlQo3Nzeu\nX79OS0sL3t7ehISE4O/vz8TEBEFBQeTn5wsLLCB2by1lqbu7m8bGRhYWFoQrUqut0CZQ9fT0CBK2\nFqmWlJSEh4cHrq6u+Pr64uXlRVNTE93d3YSHh2NoaMjq1asZHx/HxcVFINXs7OyIjY2lq6uLjo4O\nUlNTCQgIICAgACsrKwYGBtizZw8REREEBwezYsUKgoODKS8vx8XFhenpadRqNZs2bRIAGKVSKSrq\nLi4uQvEJ9/MdjYyMRDsV7huXbGxsCA8PF/d7bT6DmZkZTk5OVFRU0N/fT1xcHC4uLlhbW4ukLVtb\nW0EHr6ioEHwCa2trAYLR1piefPJJrK2tqampwcrKil27dqGjo8P7779PfX09R48e5bPPPhNFyoyM\nDPz8/KitrRWsyi+++IJbt25hYmKCv7+/sDubm5vzyCOPMDExwalTp7h9+za3b9+mvb0dT09PJiYm\nUCgUjIyM8Pjjj6Onp4erqyvu7u7iGdu5cyddXV0EBwfT3NyMqakpr7zyioDtDg8PU1BQgIWFBa6u\nriQmJjI0NCQCY7y9vamsrGRhYUHU2R5kPEhNQQX8XqPR+AMxwK8lEon/f37s4/+aRA3wnx/bDgQA\na4AvJRKJ7n/3DRYWFnj55ZdxcHDg9OnTbN26FR0dHXEMMzExISoqCplMxp07d6ivr+fIkSMCkqlN\nklKr1eTm5oq+v5OTE88++yzd3d3o6enxySefIJVKBen5+eefF8Gq2irvvn37OHDgALa2tgwMDBAV\nFYWrqyvHjx8XAhBTU1MkEgkmJiZkZ2fT2dlJbGwsCoUCKysrUlNTiY2NRVdXV1CAjYyMCA8PJzY2\nlqKiIj7++GMSEhIIDQ0VFXKNRoONjQ0FBQXU1dURGBjI9PQ0N2/e5NSpU1RVVeHn58fjjz+Ok5MT\n9+7dEyYbbXxdYWEhjY2NFBcXo6enR1dXFyqVCg8PD3x8fIR4aWxsjLi4ODw8PNi6dSuPP/44BgYG\nWFhYUFZWJuS3CoWCwMBAfHx8sLe3p6amhpGREVGlLysrIzAwkMuXL9Pd3c3+/ftFRF1KSorgSWqv\nIH19fSJh64svvqCxsVGoFgGxg5eXlzM/P09OTo5oMff19YnTkLOzM7Ozs7zzzjs8/vjjwkzk6Ogo\n4C3aojHcP4FoGQt1dXUkJSWRm5tLZ2cnX3/9NWvWrOGNN95gYmKCd955R2D6X3rpJaysrNDT0xOn\nWK3uAu4zGrWMhpMnTzI6OkpERAQLCwtUVFRw/PhxFAoFbW1t7NixQ7hlNRoNDz30ENnZ2fzwww84\nODjQ29vL1q1bAfjnP//JyZMn8fT05IUXXmDFihUYGRmRkZHBjz/+KDJRtXH39vb2IkPEzMwMb29v\nYRtwc3MT0OEHGQ8SRd+t0WhK/vPP40AN4PDf/JeNwM8ajWZWo9E0Aw1A9H/3PaampvDz8xP6cYAX\nX3wRZ2dnGhoaGBgYoLm5mdzcXCwtLfHy8hI+A4lEwpEjRwgMDCQvLw9DQ0OB4FIoFAwPD3Pz5k3q\n6+vZsmWLoBXfunWLK1euUFlZycjICEePHsXW1pa5uTlGR0f58ccfsbW15eTJkzg4OLB7925RJNQu\nMv39/URGRvLWW2+J1J++vj6ys7OF86+vr094CRoaGpDL5cjlcgIDA8UOpb2WTE1Noa+vj4ODAytX\nrkRXV5e1a9eSmZnJlStXBL5Nq8t3c3PD0tKSvLw8ysvL+f7774mOjhYW6OHhYebm5piYmEBPTw+F\nQiG+l7Zb8NVXXzE2NkZqaiqdnZ00NjayZMkSUcVXq9V88803LCws8P333yOXywWxWgsr0Xr5te1H\nS0tL+vr6aGlpEWE1CwsLeHh4CDXnyMgId+7cYXJykvb2dvEstLa2YmlpiaOjI4GBgajVaiYnJ8nM\nzOT69esMDg7i5ubGY489RmJiIpGRkWzZsgWpVEp7ezsjIyMolUrhF9FSoisrKzEzM6O9vR2ZTEZl\nZSXNzc2C05mWlsavfvUrDAwMmJubIzk5md/97ncCyV5WVkZhYSGurq7CUQvwt7/9DUCkk0skEoyM\njCgtLcXS0lIklDs4OGBlZcXu3bt54oknsLGx4dixY8jlcgYGBvjHP/5BTEwMubm5AOzevRupVEpe\nXh7GxsaEh4fz6KOPikDhuro6Qfq2t7cnJyeHgwcP4uTkJGC72lxVbTjtg47/TzUFiUTiCoQBBUAs\n8BuJRLIHKOL+aWKY+wvGf3VedPDfLyJIpVL2799Pd3c33t7eTExM8PnnnxMZGcmOHTs4ceIEpqam\nzM3NMTQ0xMsvv8zw8DDHjx/n3LlzmJmZ0dnZiYGBAeHh4WLnlUql5Ofn89RTT1FdXc1PP/3E22+/\nzbVr15ibmxP5gYmJiUilUoyNjenq6uL27dsMDQ0RGxvLM888Q1VVFbGxsXh5efHXv/4ViUSCmZkZ\no6OjDA4O8uqrr7J582ZhV7a0tBTGlSeeeILc3FxUKhVOTk6MjY2J+HFdXV16e3uFwEZrjXVwcMDI\nyAi1Wk1PTw8HDx7k22+/JTY2loKCAnFH16oBr169yhNPPMGePXtwcnLi2rVr6OvrY2lpyfj4OB0d\nHXh7ewu5a2dnJ8uWLWNkZITY2Fh6enqQSqUkJydTXFyMubk5VlZWTE9Pi8JaSUkJSUlJqNVqTExM\nCA0N5cyZMyLn0tbWlomJCY4fP46Pjw9WVlb09/dz584dlEolHh4eyGQyZDIZPT09nDx5ErlczooV\nK7C0tPw3Q5KjoyMZGRn4+PhQVVUlEOta45SWafjzzz/j5eWFiYkJvb29tLa2YmJiglKp5Pr167i4\nuLB27Vq+/vprWlpahPR8dHSUt99+m1dffRVDQ8P/xd57Rkdd5/3fr0nPJJPee5+EdEpCGhBAkhBa\nQFEBXSsoLq6yq6uuuu66uuu6uuqqiNhAqhTpoYWEEkglIb3XSc+kZzKZlLkfsL/vua5z7nNd3Pf5\nP/A6/+v3BAiHZJiZ728+5f1+vYmLixN0o/b2dgwNDdHr9VhaWuLq6oqDgwNVVVVs2bKFhoYGzM3N\nKSgoAGD9+vUCISeBfD/55BPKy8uZnp4mMTFRINPmzJkjIt6am5txcHDg7t27rFu3jvT0dC5evCh6\n/3fffZd169aRkZGBXC7H0NAQCwsLwsPDefPNN4mLi0Or1VJSUoK/vz+VlZVs3rwZExMTkc95584d\nzM3NMTExETSy+7nu+6Ygk8ksgePAS3q9fkQmk+0C3gX0//71I+Cp/w/fbyuwFe6V49euXaO2tpb1\n69fT1NSEg4MDvr6+Ahih0+l49913yczMZNWqVTQ1NYm1zBNPPIFcLqepqYmLFy8KXbqZmRk3btwQ\nGX2Tk5Ns2bKFt956CysrK7q6uti+fTvd3d2MjIwIP3xoaChqtVqo/ebMmYOdnR1FRUUAArjS2Ngo\nICS5ubkkJiby2Wef0d/fT1tbm/jk1Wq1giik0+mIj49nw4YNnD17Fj8/P1HaFRUViTCUGzduIJPJ\n8PT0pLi4mLS0NJydnWlsbGR6epqUlBRiYmJQq9WEh4cL49JXX33FunXrqK+vF/OKNWvW4O7uzuuv\nvy6SmrOzszE2NiYhIYHr16+zYcMGHBwcBCrM3t6eS5cuERUVhVwu586dO9TX1zNnzhxxwKQbSmdn\np5B+JyQk4ObmJlK4JMOTZEhau3Ytr776KgqFgpmZGa5cuUJ6ejo///wzAHV1ddy4cYPly5djZWXF\n448/ztWrV4F72Q29vb34+flx9OhRysrKMDQ05M6dO8TFxaHX6/H39+ejjz4iNTVVDGvh3t7/kUce\nYefOnXz++eekpqZSUlJCQ0MD5eXlHD16lO3bt4u5zw8//CDcnyUlJdjb23P+/HkWLFhAVVUVISEh\nAAI7l5iYiIODA9evXxduXDMzM5YtWyayM3ft2oW5uTlZWVk0NTWJQJrPPvuMnTt3Ym5uTktLCwCL\nFy8WgjyVSkVaWhq3b9/Gw8OD3t5exsfHefHFF8nNzSUzM1O4hQcHBxkaGuL27dtcvnyZ7du3s379\net5+++37PZr3d1OQyWTG3LshHNDr9ScA9Hp9z3/4+z2ApKPsADz/wz/3+PfX/tOl1+u/Br4G8Pf3\n11tYWIjp8VNPPUVxcTGVlZVioio58Hx8fLh16xbz58+npqaGTZs2cfHiRdzd3fHw8OChhx5CrVYD\ncPLkSZKSkqiuriYwMBBXV1eMjIw4efIkDQ0NvPLKK5SUlJCUlCQmtv7+/pSVldHb2yvaibGxMWpr\na0UEmUwmY8GCBbi6utLf38/u3bsZHR0lIyNDSGOzs7OxsbFhdHRUzCakT6ChoSHy8vLQarXiQMK9\nNdTExIQAlAQGBmJoaIiVlRU6nY63336bgICAe0+qhweAyNPMy8ujo6ODdevWcfjwYQF5lQJRJX9C\ncnIy33//PStWrCA/P5+6ujrhHQgMDGR0dFRkOhgbG+Pn54dKpSIxMVF8mknxZbOzs6xcuVJUQjY2\nNtTW1tLQ0MDMzAxZWVm4ubmxcOFCurq6xCpVMuxIa9n+/n7x3N65c4fExEQmJiYYHBykqKiIpKQk\n9u7dK7iDk5OTqNVqAgMD6ezsFMzJ06dP8/PPP7Nq1Sr0ej2RkZH/6ZBVVFSQkZGBQqEQYcCnTp3C\ny8uL/fv3iwqzuLiYqqoqcnNzsbS05Pnnn+fs2bNERUUxZ84cnJ2dRcsj6UAkH41U0YyPjzM9Pc3k\n5CSdnZ0iPevOnTsiLCgvL09oDpqbm/nxxx/ZsmULACtWrCAzM5OBgQHS0tLIy8tjaGiIzMxMoTO5\ncOEC2dnZbN26lTVr1nD+/HlWr14tsHVRUVGMj49z48YNgoOD7+eoA/dxU5Dd00d+C1Tr9fqP/8PX\nXfV6fde//5gBVPz796eBgzKZ7GPADQgECv6rn6HX6/Hx8RFlYmZmpuDsnTlzRoBSY2Nj6e/vx9vb\nm/DwcGQyGa2trSJaPCgoiJ9++kkYojo7O4mMjBSy49zcXCIiIvD09GTu3Lnik3rr1q0YGBgIRn5T\nUxO+vr7U1NSgVqsxMjKiqKiImJh7oxFnZ2f27t0r0GgLFy7EzMxMpFd3dXXh4ODAt99+i62tLenp\n6QwODmJra0tpaSm+vr5cu3aNjIwMurq6xLZkYmJChK56enoyOTnJwMAA169fp6qqipKSElJSUgQC\nXTJ/yWQyGhoaRIBqTEwMCQkJdHR0UFZWxrJlywQKfWJiggceeEDY1Ht7e6moqBATa0dHRzQaDe7u\n7oJQLXEZd+/ejYODA2lpaeTn52Ntbc3Zs2dxd3fnp59+oq6uTsyENmzYQH19PR4eHqJNkzY4SUlJ\nxMbG8tVXX+Hs7CyCXAGeeeYZ7OzsyM/PR6lU8vjjjwuj3NjYmPAUeHp6igHe4sWLcXBwEDMKiTWQ\nkZFBc3OzeJ+VlpYyMjLCww8/jJWVFcXFxfj5+aFQKBgYGCA4OJgbN24QERFBYmIiarWaRYsWERsb\nS01NDUePHhVSaam60+v1rF+/nhs3buDr60tkZCQGBgbs3LlTEKjS09O5cuUKarUaW1tbbGxsuHHj\nhkg+i42N5ejRo7z00kti4FpQUCAUi99//z1bt26lo6NDgIXXrFnDzMwM5ubmuLu7093dzdNPP82B\nAwews7MjNjYWlUqFi4uLYCzc73U/lUIC8BhQLpPJSv/9tTeAR2UyWRT32ocWYNu/n6RKmUz2E1DF\nvc3FC3q9fua/+gHSDvjy5cu4uroKMc/DDz/Mpk2bqK6uJjQ0lJqaGioqKrh16xYWFha89tpr/Pa3\nvxUrwKtXr2JhYUFqaiqZmZkMDg7S19fH0NAQPT09QnAiwS3kcjmjo6NUVlaya9cuBgcHOXPmDBMT\nE0RERNDU1MT8+fNF+Sphsuvq6khPT2d6epqzZ8/y6quvUlpair+/v+AdSgOy1tZW6urq8PX15caN\nG6SmpgrhT3Z2NvPmzeOLL74AICAggPr6evbs2YOtrS2PPPIIMzMzLF++nODgYL799luOHz8uosTq\n6+uJiopieHiYlJQUSkpKhCtQoVCIm0ZNTQ1Lly6loqICrVaLt7c3xsbGogKaO3cu4eHhXLhwgVOn\nTvHHP/6RiooKoqOjBeLdysoKc3Nz/Pz8GBoaIisri2XLlmFhYYFGo2HFihXY2dmh0+nYuHEjWq2W\nN954AwsLCy5fvixEQENDQwwMDPDRRx9hY2NDW1sb7u7upKWlsWvXLqqqqrh16xabNm3C0dGRlpYW\nIcuemprCycmJs2fPolAosLS0FD4IyWJsZGQk+ASGhob/yUY/NjZGcHAwjz/+OL/+9a+ZM2cOAQEB\nmJiYCNS8QqHAw8ODuro6lixZwldffYVarSY1NRUbGxuio6M5deoU8+fP57vvvhMy5rfffpubN2/S\n3NzMAw88wNmzZ8X2R0rz3rVrF2VlZfT397Ny5UrkcjkdHR0cPnyYgIAAZmZmRItqbGxMfn4+Dzzw\nACqVSoBm5HI57733Hubm5lRWVgq2heQdioqKoquri6GhITw8PMjNzaWvr48XXnjhPo76veu/vSno\n9fqbwP+bm+L8f/Fv3gPeu98HIb05PTw8WL16NR4eHiLePTg4GENDQ4aGhjh9+jRubm4i6OR3v/sd\nAF5eXvT29tLd3c3cuXNFH7lo0SLy8/MxNTXF29tbBMrqdDrB24uKiqKnpwd/f3+mp6dZvHgxjY2N\ndHV1ceTIEWZmZggLC2N4eFhUIOvXr+f8+fPiRfjiiy9wc3Pj9u3bGBkZsXjxYmZnZ1m4cCHV1dWs\nW7eO0dFRYmJi0Ov19PX1YWZmJoJLFy1axIULF7C3t0epVKJQKISXQ0po9vT0ZGxsDA8PD0ZHRzl7\n9ixbt25leHiYiIgIcnJycHJy4sqVKxgYGIgBkxQSMzExwZIlS7h27Ro3btxg586dWFpa4ufnx7Vr\n10SQSHFxMd7e3nR1dTE7O8vMzIx4TmxtbamvrycoKIiHHnqIyclJent70el09PX1sXHjRoKDg0W0\nu5WVFe3t7YyNjREYGIiBgQEDAwOi2lOpVKxduxa1Wi2qiZiYGMrLy4WPZGBgAJVKhaenJ6dPn6ay\nspJHH31UDN9MTU1pb2/n9OnT9Pb2snjxYqqqqkRSd2pqKp999hnu7u5cvnyZzs5O1q1bxw8//MCb\nb77J+Pg4eXl5mJub89VXXxETE8Onn37K2rVraWxsxMbGhpaWFiwsLMjLy+POnTv4+PgIAdHs7CwK\nhYJTp05RU1MjyOAODg4sWbKEvr4+vLy8ePLJJxkdHcXGxobLly+zfPlyCgoKRJUXGRnJwMAAra2t\nAIL58OOPP4rXV8qsXLlypUDzu7i40NnZia2tLYWFhXR0dNDd3U1GRgaZmZkolUrWrFlDTk7O/R7H\nX4aiUS6XY2FhQUdHB6Ojo7S1tYmwjfz8fMLDw9m3bx+BgYF8/PHHPPbYY0RERAgT0M6dO3n11Vcx\nMDBAr9djZWUFQGBgIO3t7axfv56bN28K/7yhoSGrVq0iISGBw4cP4+fnR1dXl0jUkUJali9fLjIZ\nNRoNa9asAe5ZvaVsAintV4KbhISE0N7ezsTEBBYWFnh6euLu7o5SqcTZ2ZmDBw8yd+5cTp8+TV9f\nH/Hx8WJOUFtby8zMjOAkuLu74+PjI7QB09PTbNy4kSNHjrB8+XL0ej1ubm5cuHABBwcHTE1NefDB\nB6murhYEHun/4OXlxaFDh3jooYewt7cXh3JmZoaUlBRMTU3x9fXF0dERc3NzoTVobm6msbGRlpYW\ntFotXl5eVFdXC/S4ra2tsLZfuXKF7OxswsPDBcpcaglzcnJoa2ujoqJCZEAkJiZy5swZfHx8hBio\nvr6e+Ph4amtrBf6+r68PtVotno+PPvoIpVLJU089RUVFBR0dHSQmJnLr1i3q6+sxMzPDwcEBjUYj\nhGE3b97k1Vdf5bvvvmNmZga5XM4PP/xAX18fRkZGwutSXl6OmZmZSO3y9PTkgQceoKSkhP7+frZu\n3cqVK1cECGf+/Pl88MEHvP7660xOTooyfXx8nGPHjuHq6sr4+DgGBgbExsYKQdWePXt48MEHaW1t\nJTk5WaD0fH19gXs+mDNnzpCSkoKTk5MICNbpdJSUlJCTk4NWqxVmNSMjI2pra0UuRFdXl8DE3bhx\ng8WLF9/3efxF5D58+OGH71RXV/PQQw9x9+5dkpOTRTZDQEAAOTk5zJ07l6qqKpYtWyYcfqdPnyYo\nKIiSkhIx8KmtrWX16tVCEGJoaIirqyvu7u7885//ZNGiRTg6OjI5OUliYiJeXl74+Pig0WhEmSf5\nI3bt2kV4eDgODg64uLhw9epV8vPzSUhI4MEHHyQgIIDs7Gzc3d0FDVjKWrxx4wY+Pj6sW7eO6upq\nbGxsqKurY9myZSItOTAwkP379zMwMEBJSQk7duwgLy+P+Ph4mpqauHnzJkZGRlhbW6NSqejr6+P4\n8eMEBATg7+8vDsC1a9fo7OxkbGyMiYkJ3N3dqaurE6CN5uZmxsfHiYyMpKenh+HhYUpLS9FoNExO\nTpKfny/EWO+++y6xsbFcvnwZS0tLWlpaaG1tFb1xR0cHOp2OqakpMQCuqqpiamoKMzMzYmNjuXr1\nKtPT05SXl1NQUCDWmpK2f86cOYyOjnL37l0CAgJQq9VYW1uTmZlJWloawcHBODs7c+zYMYyNjVmy\nZAkymQxnZ2d8fHwIDw9nYGBA3GBGRkZobm5Go9GQlpZGd3c3hoaGIlkqOzsbHx8f3NzcgHt6ELlc\nLlKyOjs7MTU15cCBAzg7O6NUKhkfH2fr1q3odDpu3bpFTU0NISEhaDQaVq1ahYGBAd9//z2hoaFE\nRUVx8eJFNBoNDQ0NPPfcc3zwwQeEhYXR3t7OmjVrRKhNSEgIDQ0NxMfHC65Ge3s7np6eWFpaolQq\n+fHHH9mzZw9arZbh4WFGRkaYnZ2lrKxM6EJMTU0pLS0V2Ro6nY7ExESRrSq1GlKy2uXLl7lz5859\n5T78L835f6//vZ8VZ6EAACAASURBVP7vuf7n0Jz9/f157733MDIywtvbm7q6OmxtbWloaGDu3Lm4\nurpy5coVOjs72bJlC9euXRPQ066uLjw8PMjMzGR2dlZQaLZv387vfvc7MXAyNjZmYmKCRx99lO++\n+06o21asWCHyFQcHB7Gzs+POnTuMjIzg6upKdHQ0xcXF+Pj4kJeXx1tvvcUbb7yBQqEQPIWwsDDc\n3d0F619S9En+hBUrVnDu3DlcXV2ZmpoSgzlJu75gwQJefPFF/vWvf2FgYCBi67q7u9m4cSNNTU1c\nv34df39/PD09MTY2ZmhoCJlMhr29PUeOHMHY2Fgw/0ZGRujq6mL9+vW0t7ej1WqxsbHh4MGDLFmy\nBEtLS5GyLQ0QbWxs8PPzw9PTk97eXs6ePYupqSnu7u7CSi2pRUtKSti0aRMKhQK5XC4CT/z8/Ojt\n7cXIyEg4Jdva2gTWTJoBODg4YGxsLCL0mpubsbCw4O9//ztff/01/v7+DA0NiQqxvLycFStW8NNP\nPwkTVFVVFfPnz+fs2bM899xztLe3097eTnBwMFZWVri6ulJfX09DQwPvvfce//znP1EqlZw8eZLA\nwECmpqawtLRk//79bNmyhZ6eHrq6ulAqlXR1dZGUlMTx48dFZSJlc46NjaHRaLCzs+Ott97i008/\nxd7eHhMTE+zt7amtraWiooLz58/z2GOPcfDgQZ577jk0Gg1BQUG88cYb/OlPf+Ktt95i27ZtwL0A\nHCl/0sXFhWXLlvHMM89w5coVoqKiWLt2Lbdv38bJyYn6+nqCg4NFWyQ5Zfv6+hgeHsbZ2ZmysjI+\n+OADjh07xvDwMK2trWzbtk3I//+76xfBU5icnMTLywsnJye++uor/P39UalUTExM0NraSnl5OUZG\nRjz99NNcuXKFwMBA4uLiqK+vx9zcnAsXLuDk5MT8+fOZmJiguroaQABeAwICSEtLQ6lUcurUKQID\nAxkYGCA5OVkEy05OTjI7O0t7ezsbNmxgx44dVFZWMjY2ho2NDSdOnBCQlY6ODnJycujs7GTz5s3c\nuXMHKysrVCoVd+7cIT8/n9DQULFifOqpp+ju7qa1tZW0tDRsbW0F+3H79u0cPnwYuCeKamhoIDo6\nmoiICGZmZti3bx/29va4u7vz5ZdfYmFhQVFREVqtlsrKSqqrq7G1tWX58uXCXiyhxWUyGePj48KB\n+Oabb9LW1oaJiYmgLMfExBATE4OrqysdHR0CNbZ27VomJyfFfEdiC9TW1rJgwQKBRx8eHqaoqAil\nUomTkxPl5eVC2yEldzs4OGBrayvi1k+cOEF5eTkDAwPEx8cL/QnAzz//LLgadXV13L17l7fffhsj\nIyNSU1MJDg7G2NgYtVrNqVOneOyxx7Czs2PlypVYWFiIaLiPP/4Yc3NzkbacmppKdXU1dXV1osUp\nLi7m+eefZ2ZmhvXr17No0SLa29u5e/cufn5+bNy4kUWLFuHs7IyhoSEJCQnC0Ttnzj37T2BgIIOD\ng2ITYGNjg6+vLxs2bMDJyYn169cTHBxMaGgog4ODPPvss7S1tfHRRx+xcOFCvL29qa2tFXMqCdVu\nbm7OAw88gIWFBWNjY6xcuRJTU1NcXV3Jy8sjPT1dfMAYGRnh5+dHREQEWq0WJycnDhw4IBSeERER\nvPHGG/d9Hn8RM4V//OMf70RFRQlpbUBAAOXl5aSkpDAxMSFgGFKWnjTUu3DhAiYmJvzqV78SePeQ\nkBA8PDw4cOAASUlJxMTECEZefX09PT09REdHY2BgIKzBIyMjyGQyZDIZNjY26PV6WlpaUCqVeHh4\nIJfLmZiYoKamhuvXrxMbG4uDg4NIQp4zZw5VVVVcu3aNHTt2EBkZSUdHB/X19Vy7do0nn3wSKysr\n9Ho9CoVCvPBXr17lyy+/5KmnnuLixYtCtVhWVsbk5CRxcXEiBSohIYH4+HhkMhlhYWEMDQ0xNTWF\nXC5nzZo13L17F4VCQVVVlbBOh4eHc+3aNVH9hIeH4+PjIzYyixcvxsPDQ0BFu7q6iI6OFmGmUVFR\n9Pb2cu7cOaKjozlz5gyvv/46bm5u5ObmCvelubk59vb21NTUiOm5TqcTmY19fX04OjqKIae1tTXB\nwcFCG6LRaIiLi+Obb75h7dq1pKam0tjYSGRkJAUFBQwMDDA+Pk59fT21tbVYWFgQHR1Nb2+vQOCF\nhoai1WqFYWrLli34+vrS29vLgQMHiI+PJy4uDrlcTk1NDXFxcVhZWWFtbU1fX5/gONja2rJjxw5m\nZmawsbHh+PHjdHZ2EhMTQ09PD5WVlcKwlZ+fj6+vL3K5HFtbWxobG3FycsLV1RW1Wk1wcDB9fX2s\nXLmSwsJCQkJC6O7upqGhgbNnz4p4P7lcjqenJw4ODkLR6e3tjUwmo66ujlWrVpGVlYWRkRGDg4MY\nGhoKRH5nZycmJibk5eXR1NQkKmOtVktAQAD79+8nOTkZOzs7bt68eV8zhV9EpWBlZcXVq1cxNjZG\nqVRiZmaGjY2NkPe6uLiwYcMGMX2VdN/R0dF8+umnVFdXY2dnR01NDRcvXhTCko0bN1JYWIhOp6O3\ntxdDQ0M2bNjAvn370Gq1QiotSVjb2toIDAxEpVJhZmaGn58fTU1NWFhYYGdnJxKBJUaih4cHKpWK\nuXPn0t3dzW9+8xtGR0e5cuUKo6OjLFq0iNdff53u7m6amppQKBRCTCRFt0neCEA4GhUKheANpKSk\noFaraWlpEdFtp0+fFgafuLg4NBoNGzduxN/fn1/96lc8/vjjWFpa0tfXJ0rpRx99VITLLl26lJmZ\nGUZGRujp6aGqqooLFy7g4uJCQ0MDoaGhKBQKPvvsM77//nsBp/nb3/7GiRMnUKvVwsEphZz29PRg\nbm7Ojh07UCqVxMTEkJiYiI2NDUuWLBEy8dzcXNRqNXK5HCcnJ6Kioujs7BQxbGZmZpw6dQpra2tu\n3bpFVFQUp06dQq1Wk5KSQlpaGjU1NWRnZwuOhpRGnZaWxtjYGMnJyZw+fZrLly8LzYuHhwf79u2j\np6eH5uZmZmZmmDNnDsXFxRgbG2NqakpeXh6NjY1YW1tjZGQkpNGpqakUFRUJKpZKpRIbo0WLFiGX\nywkKCiImJka4TH18fNDpdDz22GMUFhYSGxsrWqu8vDzRAl64cIHY2FgKCgrEFgzgiSeeEOnleXl5\ngjZWW1srbr7Hjx8XG6iEhASBu583bx4NDQ3i51ZWVorvez/XL+KmoNfrReL08ePH2bt3L6tWrRIh\nnCUlJbzyyisEBARgbW3Njh07qK6uxtnZmc8//5xdu3bx6KOP0tTUhI+Pj5Cr7t27F6VSSVlZGe3t\n7URHR1NXV8cjjzwiykBpzyx57U+cOMG8efNwd3dnaGhIEJ97enqE08zV1RWNRsPKlSsxMTHh/Pnz\nBAcHc/LkSUE3npmZITQ0FFNTU+Lj41myZAmbNm3itdde49atW1y7dk2UjhK4dcOGDQwMDPDggw9S\nWVlJd3e3QJWpVCoqKysJCgrC3d2diIgIIiIiKCwsRKPRUFVVRU9PDx0dHYLTMDAwQHh4uACmNjU1\nUV1dzd69ewWeq6+vj/z8fDw9PdFoNPz000/k5uZy7NgxvL29eeqpp0hPT6eurg6VSsUjjzwiEOsS\nNt/X15fu7m7a29v54Ycf0Gg0aLVa3N3dsba2pru7m0OHDvHjjz/i7e3NvHnzxHrz6tWrrFu3TliR\nXV1dcXNzIzo6Go1Gg4ODA3/729+or68XPpKvv/6a559/nt///vd8++23PP300zg5OQnKc1VVlUh9\n2rhxIwCnTp0S3EK9Xs/MzIxIidJqtQL5JqUtHTp0iPr6emZmZjhw4ICoiP71r3/h5OTE9evXATh0\n6BB2dnZUVlaiVquxsLDg6tWrfPzxxxgYGKBQKITAqaamhqKiIp544gnMzc2FX0GlUrFp0yZ++ukn\n0tLSALhx4waNjY2sX78elUrFxYsXmZ6exs3NjbVr16LX61GpVOzbtw+NRiOYFwCvvfaaWI2bmZnh\n6OgolJL3c/0ibgoGBgaC3Pzcc88xPT3Nxx9/TFVVFe+++y4VFRUYGBggl8vJyckhISGByMhIvL29\nOXv2rECtu7q6YmJiIlR9O3bswNXVlVWrVqFUKgkODhbtQkdHB7W1tdy9exe459WXwlclYOzVq1fJ\nzMwUh1GCdXp6egpS8ezsrODq7dixg9///vcsXbqUTZs24ebmhqurK2ZmZty+fZv9+/cLObKzszOT\nk5OYm5sLO3NDQwNyuZyrV69iZmaGl5cXvr6+NDY2YmRkxPz58ykrK2Pfvn20tLRw6tQpgXk3MjKi\nsLCQ9vZ2QkNDGRsbo6uri4KCAqKjoxkZGWHlypViNqPT6Th16hQXLlygp6eHAwcOcPPmTWJiYujq\n6hJ0KhcXF/F4x8bGaGpqYmBggO7ubrq7u3F1deXDDz+kpqYGIyMjIiIigHsGJI1GI1gErq6ubNiw\nASMjI5KSkrC3t2fXrl2i+pGcordv32bhwoW0tLTwyiuviNVsfHw87u7u6HQ6PvvsM6ytrRkdHRXP\nzZ07d0S5X1hYyOTkpKBdA1haWtLW1oa3t7fos1evXi1WgX/4wx+wsbHB3d2d69evCyDNF198gaWl\nJQEBASK0yNbWVkje/fz8OHLkiDB+7d27l5deeont27cTEhIishu6urqwsLBg4cKF2Nvb89xzzzE4\nOEhZWRmenp7Mzs6SmprKxYsXgXvo+LfffpuamhoWLFjAX//6VxEj2NfXR2RkJK6urgwNDXH16lXe\nf/99fHx88PX15c033yQsLIxNmzbh7e0tErHv+zz+/z/K/+cunU5HU1MT9vb2ODo6EhISwo4dO/D1\n9WX16tVERUWRnJzM2rVrCQ0NZdGiRRgaGvLDDz8I/76zszP29vbCxw5QXl4uBE3t7e1inmBnZ4eV\nlRWBgYEsXrxYQE8lMrRSqWR6eprx8XFqampEwImkzzc0NKSpqYmff/6Z5uZmQRCSOIrSfEOr1dLW\n1ibCXWxtbdm5c6fgQvT392Nubi5eMMnApNfrKSoqQiaTcf36dQwNDTl8+DA2NjacPHmSkJAQQZHS\narUcPnxYmLSGhobQ6XSMj48jk8lwdXWlpKSE8vJyvv76awIDAykoKKCvr4+goCDs7e0JDQ1l8+bN\nhIaGMj09TWVlJXPmzEGn03HkyBHs7e2JiYlhdnaWqakpVCqVIBZfvHhRGLoAUQn4+/vT1dXFrVu3\nGBwcZNWqVTg6Oor0LUmoY2BgQENDA7W1tQC88cYbZGVlAaDValm1ahUjIyOUl5eTmZkp4gK7uro4\nfvw4lpaWnDlzBj8/PxoaGmhvbxcIsoGBAf7xj38A99qHqKgo+vr6RKLU4cOHGRoaIjo6mr1799LU\n1ERJSQmdnZ309PRgYGDAqlWrSE5OJiEhgaioKNavX8/Y2Bj+/v7APUVjSkoK0dHRVFVVsW7dOgYH\nB+np6eHIkSMMDAxQUFBAVVUVR44cYXp6mvj4eIGk37x5s5iZSQ5duBdec+bMGTw9PVm4cCG7du0S\nGoS4uDjc3NxYvHgxpqamIpXa0dGR9evX4+vrS11dHR999BFHjx5lZGSEiYmJ+z6Pv4hB4yeffPLO\nmjVruHPnDkNDQ8TFxWFra4uRkRE//fQTUVFRjIyM8PPPPxMcHCzoPadPn2bFihVs2LCBmZkZAgMD\nefTRR0UZGxcXh4eHB5cuXcLV1ZX8/HyxRoyKisLS0hILCwucnJwYGxtjdHQUPz8/zMzMBBY7MjKS\nTz75hOXLl3P79m0xYJMAKQsXLiQhIUEwI/Py8rhw4QIFBQUcP35cSI4lE42FhQWbN2+mq6tL6PkL\nCwvFoM/U1JSOjg5SUlKEp2PJkiXEx8czNjbG9PS0MA25uLjQ09ODUqkkMjKSu3fvolarRa8/MzOD\nSqVCLpcLOnF7e7tQB/b09NDQ0MDIyAiGhobY2toSHh5Oe3u7CPFNTk5m7ty5JCYmCjdjSkqKCJdd\nu3Ytw8PDpKamMjAwIFyC3d3d9Pb2Ul9fT2trK21tbUKJFxAQIGhKkntvdnaWrKwstmzZwvDwMDqd\njr/+9a/Y29uLKL6bN28KEG1mZiZGRkZMTU0JtNzu3buZmZmhpaWFbdu2cfDgQXbu3Mnu3bvZuXMn\n2dnZ2NvbU1ZWRk9PD3/84x9F4M/evXsFAGd6ehpXV1f+8pe/oFAoCA4Opr+/X9xsa2traWxsJC8v\nj4yMDIyMjKiurkaj0QhOwsjICIODg3R3d2Ntbc2hQ4eIjo4WsfddXV0sWrSIqakp8XNu3bpFQ0OD\n2GT4+vpSVlYmHJbe3t7cunWL3/zmNxQUFDA7O8sTTzzBsmXLePvtt8nOzsbCwoIjR45gZWWFmZkZ\ns7Oz2NnZ0dXVRWlp6f+cQaOFhQU5OTncvXuXwsJCamtr+ec//8nAwICwHpubm5Oens6ePXsEiiwx\nMRE/Pz/u3r2LTqfjwIED5OTkiFi0yclJbGxsMDMzIyAggK6uLoaHhxkYGOD27dtkZWUxNTXFiRMn\nBDJMyn6UIuikyK3Ozk4xaHRxcWFwcBB/f396e3spLS0VgJSBgQE8PT2JiIhAoVCINkdKBnrmmWfw\n9fUlKioKT09PMZAEePbZZ3F3dxepPlKWZGpqKiqVijlz5pCSkkJ5eTmnT59m7ty5tLa2EhYWRlFR\nEaampsyfP5/U1FRqamro6+tj7ty5BAQEsGLFClpbW4mIiOCRRx7B1NSUZcuWsXXrVkxNTXFycsLH\nx0fMAbZs2YJKpcLV1RVnZ2e6urq4du0acrmcS5cu4eTkhEajQaVSYW1tjYGBgdB+tLS0EBgYyNDQ\nEAsXLmTu3Ln4+PgQGRlJWFgYMpmMNWvWkJCQgJ+fH/Pnz8fA4N5b8cCBAyJrQoKmNjY2cv78eby8\nvOjv7ycrK4t58+bh5eWFi4uL0E44OjqSlZVFQkICOTk5PPXUU2LTU1xczPj4OI2NjaKf7+7uxt/f\nn9raWjo6OqiuriYiIoKqqiqhMpRwfdXV1bi4uNDX14elpSVBQUEAggp28+ZNLCwssLW1FYnh586d\nw83NDS8vL/7yl7/w8ssv09nZSW1tLSMjIwIH7+LigrW1NS4uLiI+T+JYLlq0SPhQpOrqxIkTGBkZ\niZTv9vZ2BgcHqa+vZ9++fSI8SErP7u/vF1Xu/Vy/CPHS6Ogojz76KFeuXBFlHdzr8/8joETq5aSS\nVDK9SBFmK1aswN3dncHBQeCeLr26uhpzc3MxoDx06BC1tbUkJSVhaWnJ2NiYYAdKZaVCoaCgoICJ\niQliY2PRaDRYWlqK7/v6668LB6Ofnx+lpaWsXr0aa2trUS4aGRkRFhYm4LA6nQ61Wk1ZWRnbtm3D\nzMyM6upqAWYFOHbsGE899RSXLl2iuLiYefPmiQQoW1tbLly4wN27dwkKCsLQ0JDq6mpGR0epr69n\nenoaY2NjLCws2L17Ny+//DLZ2dlMTEzg6OjIyMiI4Bp+9dVXBAcHs3DhQnQ6Hdu2bUOlUmFnZ8fn\nn38uUGvSdkCv1/Pzzz+Tn59PbGwsYWFh5ObmolQqOX/+PA4ODuTl5WFoaCgOzsTEhED3L1++XABR\nRkZGmJ6e5vTp0yLKrb29XUTn+fr6Eh8fT09PD4mJiSKQ5umnn8be3l7Es7m5uWFtbS0cnIcPH2Z2\ndpZt27aJNkDCvwECF7d69WrOnTtHa2srXl5eYmaUnJzM9u3bkcvlYhgZFRVFTEyMWInm5+eLP0vu\ny9bWVp588kmxuYiKikKlUnH16lWSkpJYvHixCJX54YcfmJ6eRq1WU1RUREhICKOjo8KnYWVlxblz\n54B7Q2eZTMahQ4cYHx/nmWee4ezZs9TX1+Pi4iJs97m5uaxdu5b29naqqqrw9vYmKyuL119/XYTz\nWFtbc/To0fs+j7+ImwLcw0/94Q9/YGBgABMTEzZu3MilS5fYtGmTiCKTFHj19fX4+PgIuOnAwADG\nxsaCUiT16DU1NaSmpnLu3Dl2796NlZUVJiYmrF27Fo1Gg4+PD+bm5sTGxqJUKrG1teVf//oX77//\nPrOzs3zwwQd8+OGHvPLKK9jY2AiTzLfffsuhQ4dE6KyEAJucnOStt94SoFcnJycBNQkPDxcGrq++\n+oqEhAT6+/sZGRkRcNFHH32Uq1evsnTpUrFme/LJJ/nyyy9RKpXEx8cLZd/y5cupqakRuQDShmF8\nfJw//elP6HQ6+vv7GRwcJDExka6uLjw9PamursbBwQGtVkt2drYIKi0qKsLJyQkvLy8iIiKIjIwU\nuv/29nZSUlJoa2sjLCyMhoYGvLy8yM/P55FHHhEV3vT0NBUVFZiamrJkyRIKCgqEqCg4OBiZTCaw\nd35+fkLwNDMzI0JbnJycBKH7m2++wdPTkzVr1nD58mXS09MF5HVoaIjx8XGWL1+ORqNh2bJlFBQU\nEBcXR25uLm+88QZ6vV4IzlasWEFLSwtyuRwDAwOcnZ05cuQIra2tDAwM8PLLL+Pu7s709DSxsbEi\nzVmK1CsrKyMkJITq6mpWr14t2I+JiYn8+c9/FrBhS0tLcnJymD9/PqtXr2ZycpKsrCxkMhkLFy4U\n4TBJSUliFiZ5TKytrXn88cfJzs5mcHCQqqoqDA0N8fHxEdXT4sWLCQgI4Oeff0alUuHt7c358+ex\nt7fn448/prCwEIVCITIuk5KS6Ojo4KGHHhKD8v/u+kXMFD777LN3kpOTcXBwEEKhsrIyHB0dyc3N\nJT09nZqaGrKyspiZmcHZ2RkDAwMWLlxIXl4e3t7eVFdXk5ycLF7sW7dusWbNGrKysvD398fe3l6o\nxqRPOumNL4WPSMEqRkZG2NraMjw8THBwsEiikuLalyxZQlJSEra2tty5c4eysjJaW1uJjo7Gzc0N\nExMTZmdnCQwM5NChQ/j7+zMyMsK3335Lb28vCoWCuLg4bt26hUKhwMDAQIiMpAjzsbExoqOjKSsr\nIygoiLq6OoyNjcnOzmbp0qV4eHhQVlaGhYUFCoWC+vp6XF1dheAnNzeX/Px8nnvuOfHYtVotixYt\nory8nPj4eCYnJwkNDRWf7ElJSTg6OnL79m0yMjLIzs7G29ubpqYmhoaGWL58ORMTE3R1deHj48Pg\n4CATExPY2Nggk8m4ceMGISEhREREMDQ0xKJFi/Dw8MDU1FSYeiRXpbm5OY2NjUIHYmBgwIkTJ4iJ\nieHcuXPi/x8dHc3169dZsGABP/zwA21tbXh5eREUFIRCoSAnJ0fErc+bN49jx45hb29PT08PQ0ND\nbN68Wdi/HR0dKSkpEVXgnDlzSEhIEECbsbEx1Go1CxYsYGRkhN27dzM+Po5OpyMpKYnCwkLg3obE\nx8eH48eP88EHHwiT2ZNPPklnZyc6nU7g2hwdHbGysmLOnDksXrwYrVbLJ598wuHDh6mtreXUqVME\nBQXh7OxMVVUVVVVVlJeXExoaKobnw8PDwnoutW1JSUmEh4fj6emJWq1mcnJStJAZGRkcPnyYuLg4\nent7USqVmJqakpWV9T9npiCXy4X2fmpqiu7ubhHjLSUyvfXWW+JOe+zYMWpra3F1dRWfbL29vdja\n2pKfn09GRgZw78Vbt26dQJq3tbUxNjYmSqqCggI6Ojro7+/n6NGjfP7554yOjhIfH4+Hhwft7e0s\nXLgQlUqFTCYTL3R1dTWdnZ10dHQwNjYm5gsHDx5EoVCwdOlSNm7ciE6n4+WXXxa+hc2bN9PQ0MD0\n9DSdnZ3Y29tTWloqsiQ3btyIvb09BgYGpKam0t/fL8p5S0tLwY6U9uHXr1/HxMSE4eFh5HI5vr6+\nZGZm4urqire3N8nJyYyNjZGXl0dJSQkBAQE0NzeTnp5OQUEBoaGh6HQ6Ojs7GR8fp6Ojg5mZGRIS\nEvjb3/5GSUkJPT09WFpaUlRUJFovf39/DA0NWbhwIVNTU4JS7e/vT3JyspgTqNVqmpqa+Oabb4QH\nY3BwkNnZWe7cucPo6KggP83M3OPwSDTiyMhIEZSTkZEhhoDFxcViOCtBcnp7e0WP/uc//5nu7m48\nPT0pLCzkyy+/BGBoaIjOzk6MjY2Znp6mvr6epUuX4uvrS0BAAMPDwyxYsIDFixeL98aRI0eE0Eii\nJmm1WsLDwwXyr6+vD39/f2xtbSkuLubKlSucO3dOKE5HR0eFCrO4uBi9Xs++ffvw9vbGzc2NefPm\nUVhYSHBwMCEhIbz00kviTEjVlJTjKd3sJNZEb28vJ0+eZGBgQKw+4+PjRWiQsbGxYFgsXLjwvs/j\nL6JS+Mtf/vKOlGgj8RMkbn16errwvNva2uLg4CA4+BL9p6mpiRdeeIH6+no6Ojq4fPkyBQUF7N27\nl0uXLrF06VI6Ozvx9PREqVTS19dHdHQ0xsbGzJ8/n87OTnJycggJCSE2NhaZTEZTUxOenp5otVqx\nTty/fz9FRUXY2dlhbW3N8ePHhRbCxMQEKysrLC0tMTQ0pKqqij/96U/MmzePgoICwsPDMTAwICkp\niZGREWpqapiamsLR0ZHXX3+d7777TqQDGRkZ4eDgQFNTEzKZjHfeeYeEhAQaGxvZvHkz7u7uqFQq\n4U2Ae2IXSaAiDfckE1RVVZXw3Usr0OjoaFQqFadPnxbCpL6+PgYHB/Hw8MDR0ZHHH39cSHqvXLlC\namoqcXFxHDp0iMTEREZGRoTWXlKaDgwMoFQqxQpMJpMxMTGBra0tKpWK7u5u8vLykMlkREVFYWJi\nQnp6OqOjoxw7dozo6Gih3vP29ubChQvcunWLnTt34u7uLoxOly5d4tlnn8XZ2RmtVsvAwIBIcZbL\n5URERNDX1ycs2L/61a+ora0VLUtCQgI+Pj7U1NTw4YcfsnnzZjo7Ozl69Chubm4CANPa2oqBgQEV\nFRXs2LEDrVZLfn4+crmcy5cvMz09TVhYGP39/aSkpFBWVkZaWhoajYbOzk6uXr3K4cOHuXPnDuvW\nrRPveWmdN1KAVAAAExBJREFUK1UJLi4uGBkZiRvLww8/LKTrq1atoq2tjddee43CwkKMjY0xNjbm\niy++EKtRvV6Pvb092dnZhISEkJ6ejl6vF21lV1cXt2/f/p9TKZiYmPDrX/+awsJCzM3NmTNnDu3t\n7TQ3NxMXF4eXlxcZGRlMTEwwb948Ojo6RGS7xEzo7+/Hzc0NBwcHMWndt28fDz/8MKOjo9y+fZuS\nkhKKiopE2RwREYFOpyMqKkoc2s7OTioqKpDJZOJ7S7hvKefvgQceECGnt2/fRqPRMDo6KkhJ0kFx\nc3MTCdM1NTVCobdmzRqUSiVyuVyExcI91r+3t7coGZubm0Xs+Ntvv837779PQ0MDNTU1mJqaMjo6\nKrYupqamDA4OMj4+Tk5ODpcvXxbwkYCAADIyMmhtbRVrLwMDA/bv38/g4CBOTk60tLQI0Vd+fr6Q\n6Wo0GuFIvXTpEqdPn0apVAoqkbQpkfT+kZGRVFZWCml5Q0MDU1NT4tOsv7+fwMBAgXKfmpqiuLhY\nsA4CAgIYGRkhODiY8fFxMjIyBFG7pKQEtVrN6OiooDWdPXtWuC7PnTvH5cuX8fPzQ61W4+joKGAo\nUsr4jz/+iKOjI2q1mvb2ds6fP8/WrVvp7OwUieXSQFmiQUlVYl5enpBoSxQuaSjd1tbGsWPHePHF\nF2lqaiIuLo6hoSGqqqoIDQ0lOTmZ27dv09fXJ7Qjo6OjhIWFERwcTG1tLbOzs+I91tXVRVxcHKam\npmi1WiGlNjc3p76+noGBAVxcXPDw8MDZ2Znw8HBGR0dRKpUiV1QiUUk4//u9fhE3BZlMxtmzZ0lL\nS8PHx4ebN29SVFTE4OAg+/btIyoqCltbW5ycnITk+O7duxw+fJj33nsPf39/Tp06RVVVlQiFhXvp\n0GVlZej1esLCwrCxsWF4eBhvb28mJyeZnJykqqoKtVpNUFAQxsbGhIeHY2FhIYCe1tbWJCQk0NPT\nw/PPPw/cW29JMw3pzerh4SGw5RqNhsHBQY4dO8aVK1eYmpoSrsazZ8+ycuVKsrKy0Gq1jI+Pc+3a\nNeDeYdTpdExPTwvqztDQELOzs4yPjxMfH8/58+eZmpoiICAAS0tLACIjI5k/fz5LlizB19cXmUyG\noaEhXV1dIlbs6tWrXLp0iZGREQwMDISeXsLnb9++ndjYWJYvX45cLhd5llLC9MmTJwkODsbOzg5P\nT0+CgoI4d+4cERERAlqzdu1azM3NxU5dmv8sXryYkJAQYQ1uaWlhbGyMM2fOYG9vz+DgoMhRyM3N\nxdraWrQxarWa+Ph43nzzTb744gvi4+OJiIjg8ccfx9DQkDVr1uDv74+RkRFOTk6sXLmSgwcPMjs7\ni16vF3zNvLw84uLi2LZtG3v27MHHx4fDhw/j4eEhnkczMzPkcjnj4+NUVVUxOTnJihUrUCgUIi9C\nGpZKKlZra2tSU1OZnJzEx8eH4uJiwsLCGB8fJyoqSoT2ODs7M2/ePAIDA/n222/57W9/i4eHBx0d\nHULaXVlZKQhJvr6+VFZWMj4+LrDu/v7+IiTHwMCAtLQ0hoaGWLlyJbOzs1hYWGBiYoKLiws+Pj6o\nVCpycnLIzMwUXo37uX4R7cOnn376jqSsm52dxcbGhkceeYTGxkb27Nkj3lxScvK8efPw8/OjqqoK\nd3d3Ojo6RAjG7OwsYWFhfPHFF7z00ksoFAqRKKzX6/H09CQhIYFLly5x7do14X2QDoK5uTmmpqYU\nFRWRmpoqoK8GBgZMTk5y+PBhHn74YTFnWLZsGXq9nt7eXtauXSswatKMQyaTUVxcjJOTE7du3cLe\n3h5XV1ccHR0xNTUVnD5JDafRaJDJZHh7e4vHI3kYZmZmSE9PR6fTCQaBsbExsbGxSIParKwsbGxs\n0Ol0+Pr6CpWlhKOTHqvEbMzJyWHZsmWkpqYK9FlbWxvHjx/np59+En6AwMBA5s6dy+TkJLW1tSJP\nQuIWdnV1YW5uLtK/JX2DJDY6ePAgTU1NQv1ZWloq5icSGHXPnj3s3btXEJjPnTvH9PQ0kZGR+Pj4\niFLY2NhYZEVMTU2Rl5dHZ2cnNjY2/Pjjjzz00ENotVoiIyMJCgpi9+7dIhk6PDyc/v5+UaVIdmmJ\n31FSUkJqaioajYYnn3wSvV5Pdna2QMQZGhpib28vErs3bdqEXq9nyZIl5ObmUlxcjJeXFyUlJVy6\ndImBgQEMDAxISUmhsrKSo0ePMjU1RWxsLObm5uL5vnjxIrOzsyQkJAgRVlhYGAqFgqmpKRobG/9T\n0tSyZcsICAhgaGiIAwcOcPLkSRISEigvL+eLL77gwIEDKJVK+vv7sbCwkLwX/3PaBwk73t/fz/Dw\nMEZGRmRlZZGVlcW2bdt48MEH8fT0ZHx8HDc3N95//326u7tpbm6mt7dX7Lil8E4pQ6CoqIjW1lZO\nnDjBli1bCA8Px9fXFy8vLx588EGsrKwwNDRELpdTWlqKTCZjZmZGHI4DBw7g6ekpSsW8vHvBV+7u\n7sLnfuDAAUHv/cc//sG5c+e4dOkShYWFAqhpZWVFU1MTSUlJ1NbWMjk5ibe3N2NjY7zzzjuCJaDV\naomIiCA8PJz169djY2Mj1lVKpVIoEM3NzSkvL8fR0ZHo6GgxgN21axdPP/00Xl5eBAYGish5W1tb\nUlNT8fl3gGl0dDSpqancunWLiIgIrl+/TkVFBUePHuXSpUu0tbXh7OzMb3/7W9H7P/jggzQ0NIjA\nUwsLCzo7O2lpaUGj0TA7O4upqakw/1RUVAjvRn9/v1jX+fr6YmZmxosvvsiCBQvw9fXFx8dHtFBD\nQ0MUFBSIRCRzc3NUKhVlZWVkZWXh4uLC6tWrmZ6eFsE0S5YsEYY0Cc+nUChQq9WibB4eHkaj0XD3\n7l3S0tLIzc3FxMSEwcFB3NzcyMzMRKPRYGZmhqWlJStXrqSzs5OTJ0+iVCrJy8vD19eXyclJMYAG\nqKqqoqGhATs7O1JSUkhKSmJwcBB7e3veeust7O3t8fDwYHx8HDs7O2JiYpg3b55IGSsvL6e9vR1H\nR0dSU1PFmXj22WeFY1ZSeEo5JB0dHQwMDDA7O8v8+fOJiIjgtddew8zMjJ6eHl544QVR4Tg7O9Pd\n3U18fPx9n8dfRKXw97///Z3Q0FCio6OpqKhgamqKjIwMwsPDaWpq4tq1awQEBNDR0cH+/ftZsmQJ\n1dXVlJaWijWgTCbDw8ODiooKvL292bNnD6+99ppAdms0GhEnLykPXVxc6O3tJT09HT8/PzHBbm5u\nFtWEv78/7u7uNDU1YWtrS2ZmJg8//LDowSV2w+joKKWlpWIAGB8fL7YVc+fOxdzcnICAAJGDefz4\ncRGOIpXiwcHByOVyxsbGUKlUKJVKOjo6hMc+IyODwMBAbG1txQwjKiqKb775Bn9/f6ampsjKysLP\nz4+cnBwUCoUw6kipTaWlpaSnpwtC1Pnz58UOvKmpib6+PjHLKC8vFyldvb29zJkzB29vb3p7ewkO\nDiYnJ0esMf38/ESM3uDgIGZmZqjVapqbmxkbG+Py5csMDw9jbW0tEqOkMJ2oqCgaGxtFxoc0T2hr\naxObjuTkZLEOtbe3p7OzE71eT3x8PFNTU1RXVxMbG0t1dbVIzraysmJwcJCjR4/y7LPP0tDQgL29\nPQEBAVhZWeHm5iYUtL/+9a+FwEoKm3FwcGB4eFgkhxkYGIhW19/fn/Pnz7N3717y8/OxsLBAr9fT\n0NDAQw89hEKhEA7UgoICwZZ4+OGHuX79OkqlktLSUrZt20ZYWBh//etfxeYtJycHa2trdDqdCESK\njY3FwMAANzc3JiYmUKvVNDQ0cPv2bX7zm99w9+5dpqensbCwwMzMjMjISEpLS1Gr1cybN096LPdV\nKfwibgoHDx58x8TEhCVLlhAUFCTMPM3Nzfj5+eHh4YGTkxM9PT2sWLGCK1euEBYWhl6vF4g2U1NT\nLl68KFyIJ06cYPPmzdTU1Iie09jYmJmZGXQ6HYaGhnz55Ze4ubnh4uJCZmYmvr6+TE9PCyozQElJ\nCUNDQ/8JXe7r64ter8fMzExoHtzc3PjjH/+IVqtFo9EIKlRRUZFgF0hltqurK5OTk7zwwgvs2bOH\n4OBgsrOzWbx4sWg9PD09Ra5laWkpO3fupK+vj7t379LW1iZ89kNDQ9ja2gqdg4ODg3isWq0WrVZL\nb28vcrmclpYWbGxsuHDhggCbTkxMiFCbsbExrK2tCQ0NFdJZIyMj1Or/p73zC23rOgP478NpHTL5\nj6oGS7JL5joGIQJRbUcxTilJIN4aHNImCtQv6cNgBMayPjSQUQglbxtsr3sojSljzASyOSYhgXXM\nDIf8c01qqbH+uJZgS2zHnpJYibHTJKcPuvfUCpGjwKp7H84PhK6vhP3zh/X5nHPP/b7/6R2Yg4OD\nbNu2jWw2q3fP2X0i5+fnWV5eJhqN0t3dzf3796mtraWxsZF8Pq/XF+bm5vR2Z7uM2KVLlxgbG+Pg\nwYPU1dXphifT09Ok02nGx8fp7e1l+/btDA4O6u3G9+7do6mpSW9kswu4KKV0F+9Tp05x9OhRPB4P\nhUKB2dlZIpGIvqvUXv1PJpNAsfqTvfPS7/cTDAY5e/YsfX19bN68ma6uLpaWljh//jxLS0uEQiEy\nmYxupnP37l0aGhrIZrO0trbS0tJCf3+//qDmcjkWFxfx+XxMTk7qqUJ9fT07d+5kYGCAzs5O3QCn\nUCjo751IJPQ6RTAYJJFIcPnyZfL5PB6Ph8bGRt3Bq6mpSS+WP3jwoOKk4Irpw6NHj9iyZQvLy8uc\nPn2a27dvk81mKRQKtLS0kEwmuXjxIsFgkHg8TnNzsy7RvmvXLn39vL+/X99JaBONRkuKWkxPT+te\ninbfxnQ6TSgU4vHjx7rO3okTJ1hZWSEWixEMBslms3paMjU1pYeSyWSSSCTCkSNHiMfjPH36VN9p\naW+4OnfuHENDQ/T19eH1ejl58iSbNm3SZc/sHZher5eBgQHy+TzhcJhUKsX69euJRqNMTEzQ29ur\nL4+Oj4+XVJ9eWVnRVY3sMnINDQ34fD4KhYIeaYkIuVyOTCaji7rY//3tW42VUhw6dIiNGzfqJil2\nB6aenh69K7K9vZ3Ozk4ymYwuCWavzeRyORYWFlhYWCCdThMOh/WiaSgUIh6Pc+XKFb0b9PDhwwDs\n27ePxcVFduzYwc2bN/UlVLsSdXd3N4FAAK/XSywWo6amRi+GPnz4kD179jA/P8/w8DDXrl3TC5gi\nQkdHB4lEgnXr1unmuBs2bODAgQMMDQ2xe/duvZhaX1/P1atXmZ2dxe/309XVRSAQ4MKFC6RSKV3K\n3b5p7cyZM4yOjuL3+ykUCoyMjLB161ba2tro6Ojg+vXrjIyM8OTJE2ZmZnSH7p6eHmZmZvTvbPdn\nOHbsGMlkEp/PRzgcJhaL0dbWRiQSYXh4GK/Xq9v6eTwefD4f7e3tukGtfRXFbjk4Ojpa8efRLdWc\n54GHwILTLs/wOu5zAuP1MrjRCZzx2qSU2viiN7kiKQCIyFgl5aeriRudwHi9DG50Avd6gUumDwaD\nwT2YpGAwGEpwU1J44aqoA7jRCYzXy+BGJ3Cvl3vWFAwGgztw00jBYDC4AMeTgoj8XERSIjIlIscd\ndsmJSFxEbojImHXuNRH5h4hkrGdvFTxOicgdEUmsOlfWQ0R+a8UvJSI/q6LTpyJyy4rXDRHZW2Wn\nN0TkXyJyU0S+EZHfWOedjlU5L0fjVTH2zi8nHkAN8C3wJvAq8DUQdtAnB7z+zLnfA8et4+PA76rg\n8Q7QASRe5AGErbjVAq1WPGuq5PQp8PFz3lstpwDQYR3XAWnrZzsdq3Jejsar0ofTI4UoMKWUmlZK\nPQIGgf0OOz3LfuAL6/gL4L013vt/QSn1byBfocd+YFAptaKUygJTFONaDadyVMtpRik1bh0XgEmg\nGedjVc6rHFXxqhSnk0Iz8J9VX/+XtYP3Y6OAL0XkKxH5pXWuSSllN+KbBZqcUSvr4XQMfy0iE9b0\nwh6mV91JRH4KvAVcxUWxesYLXBKvtXA6KbiNt5VSEeBd4Fci8s7qF1VxrOf45Rq3eAB/ojj1iwAz\nwB+ckBARD3AG+Egptbj6NSdj9RwvV8TrRTidFG4Bb6z6usU65whKqVvW8x3g7xSHcHMiEgCwnu84\npFfOw7EYKqXmlFJPlFJPgc/4YchbNScReYXiB+8vSqm/Wacdj9XzvNwQr0pwOilcB9pFpFVEXgU+\nAIadEBGRn4hInX0M9AIJy+dD620fAmed8FvDYxj4QERqRaQVaAeuVUPI/uBZvE8xXlVzEhEBPgcm\nlVJ/XPWSo7Eq5+V0vCrGqRXOVSuveymuzn4LfOKgx5sUV4C/Br6xXQAf8E8gA3wJvFYFl79SHF5+\nR3F++Yu1PIBPrPilgHer6PRnIA5MUPzDDlTZ6W2KU4MJ4Ib12OuCWJXzcjRelT7MjkaDwVCC09MH\ng8HgMkxSMBgMJZikYDAYSjBJwWAwlGCSgsFgKMEkBYPBUIJJCgaDoQSTFAwGQwnfA6PrX6xov7Hl\nAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1385a46e90>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGd0W/eV7v0DQYCo7CAJiATBBpJiF5u6ZFKiJEqi5CbJ\ntmwrieMUJ5OxPeM4s8a5ij0eZ6SMx4kTO8t2LFuWZLnL6p0UKapQ7L2DvQFgAxsAgng/6PK8kw/z\nju+a8Rrfd+n5BvAQ5wDn/Pd/72c/zxa53W7u4R7u4R4W4fE/fQH3cA/38N3CvaBwD/dwD3+Fe0Hh\nHu7hHv4K94LCPdzDPfwV7gWFe7iHe/gr3AsK93AP9/BX+NaCgkgk2iwSiVpEIlG7SCR68ds6zz3c\nwz3890L0begURCKRGGgFNgJ9wB3gEbfb3fjffrJ7uId7+G/Ft5UpZAHtbre70+12O4DjwI5v6Vz3\ncA/38N8Iz2/pc5cAvf/udR+Q/R8drFQq3QB+fn7IZDIcDgdTU1OIxWJUKhVWqxWlUolcLkehUGA2\nm1Gr1TidTux2Oy6XC39/f6anp/Hy8mJ2dpbBwUF8fHyQSqWIxWLsdjsBAQHMzs7i5eXF3Nyc8N70\n9DQymYyxsTE8PT1RKpUMDQ3h5+eHy+XCx8cHq9WKVCplcHAQvV6PXC5nbGwMjUbD0NCQcH4PDw8c\nDgc2m43IyEimpqYQiUR4eHgwOTmJTCbD6XSiVCqZnZ1lfn4eDw8PhoeHWbJkCRKJBE9PT6ampvD0\n9MTpdCKRSJDL5cJne3h4sLCwgM1mQ6VSsbCwgEQiYWpqColEgkQiwel0Cv9rt9uRSqV4enoiFosx\nm81IJBKkUilSqRSHw8H8/LxwTU6nE5VKxfz8PC6XC5FIhEKhwGaz4eHhgYfH3b1k8Xd0OBz4+fkx\nNzeHSCRCLBYzPT2NSqViZmYGhULB3NwcADabDY1Gg91uR6FQ4OXlxdTUFFNTU4yNjREdHY3b7cbl\ncuHh4cH8/Lxwf5xOJy6XC4VCgVgsxuVy4enpicvlQiqV4uHhwfj4OGq1Grvdzvz8PABDQ0PodDo8\nPT1RqVRMTEzgdDrx8fFhYmKCgIAApFIpVquVhYUFpFKp8BwpFArGxsbw8fERni+AiYkJrFYrGo0G\nh8OBWq3G09OT8fFxZDKZcI8A7HY7EomEyclJAgMDmZ+fx+l0IpPJGBoawsfHBwAPDw+USiXd3d0E\nBQUxNzeHQqHA7XbjcDiQyWTCezabDYlEgkKhYHZ2FpfLhVwuF343kUjE3NwcYrFYuCeDg4MWt9ut\n+c8W77cVFP5TiESip4GnAQIDA/n1r39NSEgI5eXlFBUVkZOTg0ajISwsjFu3brFq1SpaW1vJzc2l\nsbGR1NRUDh8+TGdnJ3//93/PjRs3aGxsRK1WMzo6ypEjR/jwww/Zv38/jz76KJcuXSIlJYWEhAQK\nCwtJT0+nqqoKh8NBSkoKZWVlhIeHExERgZeXFzKZjNbWVjw8POjr6yM7O5u6ujr++Mc/cvToUQoL\nC3E6nSQlJdHQ0MDU1BSrV6+mqKgIi8VCT08Pv/71rzl79izd3d1ERUWRkJDA7du3yc3NpaamBqfT\nyQ9+8AMaGxvZsWMHzzzzDKOjoyiVSjIyMvDy8qK1tZWkpCSqq6sJCgpicHCQlStXUlZWhkQiYWZm\nBrlcjsViQalUYjKZiIiIQCQSkZycTF1dHUNDQywsLLBp0yauX79OWFgY3d3dDA4OkpOTQ319PQkJ\nCbS3t7N06VI6Ozvp6urC39+fhIQEqqqqmJ+fJzg4GLlcTkdHBxqNhpMnT7J7925GRkZYsmQJx44d\nIz09HZFIREJCAjU1NUgkEmQyGYGBgXR1dfGTn/yEgwcPEhYWxujoKENDQwQGBmI0GnnhhRd45513\nOHHiBCEhISQkJOBwODCbzdhsNg4dOsTDDz/M/fffz/nz54mOjqarq4vMzEzMZjNpaWm8/vrrZGdn\nY7FYcLlcHDt2jKGhIfbu3cvOnTsJCgrCarVy9OhRvL29USqVNDQ0EBoayooVK5iZmaGwsJBnnnmG\nmpoaxsfHKS0tJSMjg/7+foxGI0lJSbjdbtauXctjjz3G2NgYMTExfPHFF8TExFBfX09+fj6hoaH0\n9fWh1WqpqqqioKCAwcFBuru7mZ+fx2q1YrPZWLduHXK5nJKSEiIiInjrrbfYuXMncrmcuLg4PD09\nGRwcZGRkhIWFBYKDg2lubhY2tPLych599FFh81sM1v7+/thsNqqrq/H39+ftt9/u/iZr89sqH/qB\nsH/3OvR/vyfA7Xa/43a7M9xud4afnx8mkwm5XE5iYiI/+tGPhC/U2dmJWCymurqaX/3qV1gsFmw2\nGx988AE+Pj7s2bOHH//4x2g0msXPJTo6GoAzZ87Q2tqK3W7n2WefxWAwEB0dzb59+1AoFAQEBPDi\niy+i1WqJiIjA09OTqKgoIiMjKSwspK2tTchKBgYGMBqNANy6dYv09HQ0Gg0WiwWLxUJQUBCjo6Po\ndDrcbjc/+clPqKurE3bohx9+GIVCwd69exkaGiIzMxOLxcLp06eRSCQArF27ltHRUVasWIHD4WBu\nbo5bt25hNptxuVx0dXUJwdHlclFaWkpbWxvFxcVIpVIGBgbw9vbG29sblUpFXV0dvb29tLe3Mzc3\nR3FxMY2NjSwsLDA1NcXPf/5z2tvbWblyJYWFhYhEIoqLiykvL6e4uBiLxYJOpyM2NpawsDA8PT0x\nmUyIRCK8vLzYsGEDJpOJ+fl5Ojs7ycjIQKFQ4O3tjUQiQafTCQ+2XC4nJCSE1tZWUlNTGRkZISIi\nAo1Gg1KpRCwWA+BwOJienubMmTPI5XKCgoIYHx+nv7+fFStW4O/vT3d3N0ajkb6+PuLi4hgdHSUy\nMpKBgQESExOpqalhYGCAoKAg3nrrLQAMBgPnzp2jvr6eubk5fvGLX5CQkMAzzzzDwMAAMzMzVFRU\ncPbsWX74wx+i1+sZHBzE5XKxefNmwsLCcDgcrF+/nubmZs6cOQOAXq/npz/9KePj4zz44IMYjUYO\nHjxIVFQUTqeT++67jw0bNvDTn/6UqakpAgMDycnJwe12s2vXLrZs2YLNZkOr1bJu3Tq6u++u29HR\nUbq6umhqauLcuXM0NzejVCoJCwujtbWVmpoaSktL8fX1Zfv27ZjNZqqrq7lz5w4vvPACw8PDtLe3\n09DQgFarxdvb+xsv3m8rKNwBYkQiUYRIJJICe4CT/9HB4+PjwN3UcmFhgYMHDwKg1WpJTU1l27Zt\nrFu3jn379tHf309bWxtutxuFQoFSqWTLli10dnayc+dOACEd8/X15ZVXXsHHx4fh4WHi4uKoq6tD\nq9UyMzNDbm4udrudvXv3olKpWLJkCV5eXkRFRfHKK68QFhbGli1bkEqlxMTEcPjwYQCWLVvG5OQk\nVVVVSCQStm/fTlpaGmNjY8zMzKDT6bhw4QIfffQRn332GUqlkjt37hAREUFVVRW5ubnodDoiIyNZ\nsWIFi2RvS0sLRqORnp4eZmdnCQsLY82aNdTU1BAeHo5KpUIsFhMREUFHRwdnzpyhpaUFX19fnE4n\nRqORNWvWMDc3h9VqJSUlha6uLrZs2UJoaCje3t6sWrWK9vZ24uPjaWtrw9PTk7KyMoqKivjkk0+4\nevUq169fx2azUVxcTHd3NwqFAq1WS0pKChs3bmR+fp6ZmRkcDgcdHR1otVomJyd58sknGRkZQa1W\nMzQ0hKenJ2azmZKSEqKjo+no6MDtdnPw4EFUKhVpaWk8+eSTZGdns3btWgAaGhrYuHEjmZmZnDhx\nglOnTiGRSPD19WXbtm0EBQVx584dbDYbMTExSKVS/Pz8OHLkCH/+85/p7e3lgQceQCqV0tTURGtr\nKwCffvopYWFhDA4OYjabhQB+69YtMjMzEYvFKJVKNm3aRHt7O9euXUOhUFBYWEh1dTV9fX2kpaVx\n9OhRgoODiY+PB8DpdPKv//qvVFZWEhQURHFxMSdOnODTTz+lr68PtVpNSUkJV69eFYLp//pf/4vY\n2FgqKirw9/cnNDRU+L2effZZYS3I5XLEYjEGgwGlUsnY2Bh9fX1ER0fz4x//mD179mCz2VCr1dy4\ncYNz585RUFDAli1bSExMJCoqColEQnZ2Nh9//PE3XrzfSlBwu93zwM+AC0AT8Knb7W74j45frIXG\nxsYASEhIIDQ0FH9/f9566y1aWlrw8PAgOjqa1atXs3z5ci5evIjBYEAulxMdHU1rayvNzc2o1Wqh\n7pufn6erqwuj0YiHhwfT09MsWbIEm83GzMwMNTU1HDlyhH/8x39k27ZtJCYmYrPZuHLlCl9//TWx\nsbEUFRUxPj7O4OAgx44dA+5GcY1Gw+OPP05XVxdKpZL29nZSUlKIjY0lJCSElStXsmLFCqampoiN\njeXBBx+ktraWiIgIfHx8mJ2dJTExkfPnz9PT0wOA2WzG6XTS39+PSqWio6MDuBvkli1bRk5ODi0t\nLZw8eZKlS5dy+vRp0tPTEYvF5OfnU1BQINTNOp2Oc+fOkZmZSWNjIykpKcKCysrKwm63U1hYyPT0\nNDabjfT0dJ5++ml++tOf8v3vf58tW7bwpz/9CaVSiUajQSwW09HRwYEDBwgODsZut7Np0yZ27twp\n1NrHjh1DoVAgkUgoKCigvr4evV5PT08Pr7/+Ok6nk1dffRW9Xo/D4cBkMtHR0YFarebSpUsAKBQK\nrl+/Tnh4OA8++CDr1q3D39+fvLw8Lly4wMzMDI8++qiwMcjlcmJjY3E6nVgsFuRyOU1NTczPz5OX\nl4darQbgySefpKenB5lMxsjICO3t7bS1taHRaFi+fDl1dXUsW7aMkZERTp8+TXd3N35+frzwwguE\nhYWh0+no6OggJCSE4uJiLly4AMDt27eJiIhgw4YNFBcXEx4ezqVLlwQO5caNG3h5eTE9Pc2GDRsQ\ni8XodDo++eQTvLy8uH37Nj09PfT29mIymejs7AQgICCA1NRUlEolTU1NxMTEoFKpGBsbQ6lUUldX\nh6+vL/Hx8dy8eRO328327dsZGBggNjZWWDOxsbE0NTXx85///Buv32+NU3C73WeBs9/kWLvdTlJS\nEsXFxSQmJiKXy7l48SIPPPAAQ0NDjI6OMjc3J+ysmZmZ3H///bjdbiQSCZWVlfziF78gIiKClpYW\nPv/8cwAyMjKora3Fw8OD+vp6goODSUlJ4fXXX8dgMODn5weAt7c3d+7cwWAw0N7ezvT0NFNTU+j1\neqRSKVVVVaxZs4aKigoALBYLly5d4vHHH8fpdHLo0CEmJiaE2m3nzp2MjY2hUqnQaDT4+/vzq1/9\nCrFYjF6vp7y8nNraWoHMNBgMACiVSjo7O9m4cSMSiYTu7m5UKhXXr1/H19eXzMxMMjMzkUqlqNVq\nqqureeihh+jp6eHEiRNkZGSwYsUKrFYrJpOJ4OBg9Ho9NpuN3//+9+Tm5qJQKKisrEQikVBTU0NO\nTg4Gg4HR0VGcTidzc3NMTk7i4eFBUVGRkGHMzs4yPj7O9u3bkUgk3Lp1C6PRiFar5fbt2yQkJABQ\nWVnJ9evXKSkpYc2aNURHR9PW1oZMJkOj0ZCSkkJPTw8JCQn09/eTkZHB5cuXiYuLA6Cjo4Ph4WFy\nc3OJjY3FarXi5eVFb28v+fn5FBcX8+qrrzI5OYmXlxcpKSlCxjQzM8O5c+eIi4tDJpMJzwZAY2Mj\n3t7ejI2N8cgjj2AymTh8+DAymYxTp06xfv16BgYGiIiI4KmnnmJwcJDjx49jtVpZtmwZL7zwAj/8\n4Q9JT08nNzeX+vp6Pv74YyQSCevWraOrqwuJREJcXByTk5OUlZURExNDdnY28/PzGI1G6urquHPn\nDunp6TzxxBPU1NRw48YNjEYj3d3duFwuTp68m1B3dXUJGZhCoeDixYv4+fkRFxfHxMQE69evx2q1\nYjabUalUBAYGkpubS1hYGGVlZTQ0NNDQ0IDBYECj0TAyMvKN1+53QtHo4eFBfn4+GRkZxMbGsnXr\nVnx8fCgqKhKyBm9vbwYGBpDJZCxfvpzIyEhKSkqorKxEr9ej1Wq5cOECX331FTt23O1+vvnmm4yO\njnL58mVmZmbw8vJibGwMk8lEbGwsiYmJGI1GAgMDaWlpoa6ujp07d+Ll5UV2djZqtZrh4WFWr17N\n5OQkJpMJuJuB7Nmzh6KiIvz8/JiYmEAmk3Hu3Dm8vb0pKSmhpqYGkUjEyMgIUqmUyclJkpOT8fX1\npbe3l4CAANatW8fc3BxyuRwAlUrF6tWrGRkZobm5maamJgBSUlIIDw/H6XQSEhKCp6cnZ86cIT4+\nnoiICKKiogBITEykubkZvV5PdnY23t7ehISEUFBQwH333YfNZiM1NVXYcfz9/VlYWGB0dBR/f3+G\nh4fp7u4WuBqz2UxPTw9qtZq0tDSio6MJCwujrq6O7Oxsenp6qK+vZ+XKlaSnp9Pb28v09DQKhYLI\nyEhWrVol1NFzc3NCPb9q1SoSEhJQKpWMjo4yMDDA1atXAcjOzuaZZ56hubmZr776ioaGBs6fP8/c\n3Bw9PT3s3r2b733vezz11FMEBgYSEBAg/C0rK4u8vDzWr1/PI488QkZGBlKpFACNRsPOnTuJiYnh\nF7/4BVeuXCE5OZnGxkbi4+NJS0vjgQceQK1W84c//IHf/va3qNVqDhw4wHvvvUdsbCweHh643W6e\nf/55gQcyGo0UFhZy/Phxtm7ditPpFHiTp556ij179rBmzRpCQkKoqKjAYrEQFhZGc3MzZrOZiYkJ\nbt26xfz8PDKZTHh2d+7cycLCAkuWLEEqlZKcnIxIJKK0tBSVSkVnZ6fQ7erq6sLlcuHt7c3Fixfp\n7u4WumOLvNjw8PA3X4//1QX93wGRSMSHH37I1q1bCQkJISMjA7VazcLCAiaTiZ07d1JbW4vFYuG1\n114TyDQ/Pz+MRiOrV6+mqamJ7OxsMjIyKCwsBGDHjh2Eh4eTn5/PzMwMs7OzlJeXC2RRTU0NY2Nj\nQjpcW1vLyMgIOTk5zMzMcOvWLQwGA01NTSwsLFBWVgaATqejvr6eLVu2sHz5crZv347NZmPjxo30\n9/czMDBAYGAgcJeIWr16NaGhoej1erZt20ZOTg4ymQybzcZLL70kpJrNzc28//77ZGVl0d/fL7Q0\n3W43FotFSL+lUimbN2+msbGRGzdu0N/fT2BgIENDQ/j6+tLS0kJbWxsKhYJjx47R0tLC0NAQ8fHx\nxMXFsWrVKvR6Pc888wybNm0iPDwcq9UqPJiZmZl0dXURHh6O2+2muLgYq9WKr68vPT093H///QQH\nB6NSqVi6dClyuZzW1laMRiPp6enExcUhFotpaGgQ2sjR0dHIZDLa2trQ6/VCWRcdHc327dtZvXo1\ncDcLO3v2LK2trZSWllJdXY1Wq6WyspLq6mpu3bpFYmIiu3fvRiQSMT09zcTEBJmZmYyOjhIdHY1e\nr2d8fJzPP/+c4OBg4G4aHxgYyKlTp9ixYwcWi4WkpCSys7NRqVSsW7cOiUTCwsICMpmMxsZGBgcH\n2bNnD6mpqXh5eVFfX8+dO3dYt26dwIN5e3szPT3Nww8/zMjICDt27GBoaIiAgACBAxodHcVsNmO1\nWlm1ahVeXl40NTWRlZXF/Py80GW6cOGC8Lmenp7s3r2b48ePU1NTg1gsRiKRsHbtWuFeL5LxN27c\n4PTp03R2duJwOPD09CQlJYXk5GSSk5OZmJgQNrRvAvH+/fv/66v6v4hXXnllf2JiIjKZjPLycr76\n6iuio6PZsmUL+fn5vPzyy/j5+eHp6YnRaKSsrIyAgACysrIoLS2lo6OD3NxcOjo6OHbsGNPT09TW\n1hIXF8fMzAwAMTEx6PV6oqKi6Ovr4/z587S0tCASiXC73SQnJ5Ofn095eTlDQ0PU1dWxdetWent7\nWb58udDzv3HjBqtWrWJkZISpqSlu3LhBT08PZWVlaLVaHn/8cc6ePcvTTz/N2bNnOXLkCG+//TbL\nly8nLy+P1157jdDQUKHHXFlZSWpqKl9//TXPPfecwMLPzc0hk8kICQlhbm6Obdu20dPTg0QiweVy\nUV1dzfj4OMPDwyQmJqLVamloaKCsrIzZ2VmBK3A6nUIf22QykZWVhaenJ8nJyVRVVWG321GpVGza\ntEnQI8zOztLX18fZs2eJjo4mLS0NPz8/BgcHhXZrU1MTTqcTh8PBxMQEnZ2dgj4gOjqapqYmRkdH\naWpqQqfT0dLSwtatW1GpVAKn4uHhQX9/PzabDbFYzNmzZ8nKykKn05GWloaPjw8ymYzg4GCamppQ\nKpUsLCyQlJREW1sbDoeDw4cPYzKZ8PT0ZOvWrQQEBFBSUsKtW7cICgoiMzOTd999l7/7u78DIC8v\nT9A3DAwM4HQ6iYqKQqPRcObMGSoqKjh16hT3338/K1as4E9/+hMymYycnBzCw8MFvsdgMHDkyBH0\nej0ymYzh4WF0Oh12u53w8HDCw8MJCwvjyy+/xGq1Mjc3h9FopL29nY6ODnx9fYUWpNVqFf5vYWGB\nmzdvCtdtsVgIDQ3Fbrezfv16ZDIZABKJhNHRUaanpwkMDGTbtm385je/wWQyYbPZCAgI4KuvvqK4\nuJigoCCio6MpLS0d3L9//zv/2Xr8TmQK/v7+ZGVl4eHhwdjYGAaDAbFYjFqtJjU1lenpaWGHa2xs\nJCEhgffffx+Xy0VeXh4jIyO89NJLNDY2/lV7KywsTBCIXL16FY1GQ0lJCR999BE9PT04nU7i4+PR\n6XTcvHkTuFvLXb9+HQ8PD3p7e4mPj8fpdDI4OMhjjz0GwMzMDEqlEpvNxvbt2/nkk0/Iz89nw4YN\nQr136tQpTCYTRqORZcuWsX37dhQKBQsLCzgcDtLT0/H09MRgMAhEY0dHB2lpadTV1aFSqXA4HAQF\nBWE0Grly5QoajYaGhgaWLVvG1q1bycvLQywW4+HhgU6nQ6/X4+HhIXAX9913H2q1mpiYGKFlOzc3\nR1FREX/+859paGjg8uXLPPfcc3R3d2O324VUPCkpiZycHIxGI/Pz88TGxgrf3WQykZqaitPppLu7\nm8DAQDZt2oS3tzeDg4P09fWxfv16fHx8SE5OFmpquVxOV1cXoaGhGI1Gzpw5Q0BAAE6nkzt37gCQ\nlpYmEHXh4eE0NTUJn5WVlYXb7WZycpLMzEwiIiLYvXs3brebkZERrl69SldXF2azmby8PAIDA/ns\ns88AuHDhAq2trRw/fpyPPvpI4IsWa/bh4WG6urrw8vLC4XBgMBjo7+8nJSWFXbt2sXbtWqHrsXfv\nXt58800AgcjctWsX8fHxmM1mLBYLvb29HD58mKGhIex2O+Xl5cjlckFI1NnZid1uF8pZqVRKQECA\n0E6/fv06P/zhD9m3bx/Lli0jOjqa69evC5mJ0+nk888/p7KyUtB7qNVqzGYzS5cuRSwWk5OTQ25u\nLtPT02RmZn7j9fidyBQOHDiw39fXl7a2NmJjY3G73WzYsIGOjg7MZjNeXl4MDAwwPz+Pj48PDQ0N\nrF27FpPJxPHjx/H19UUmk6FWqwkPDycyMpJTp07x2GOPERoaypIlSzAYDMTFxREYGMiJEyfYsGED\njzzyCA899BDPP/88mZmZhIaGEhsbS1ZWFk6nk+bmZurr6xkYGODJJ5/k1KlTFBUVkZSURGpqKlev\nXqWyspLVq1fj5eVFREQEUqkUm83GmjVrkMvlFBQUCMGpra2N27dvMzY2xvLlyxkdHWV8fJwHHniA\nt956iw0bNmA2m/H09MTX15fU1FQqKioYGhoiJycHu92OTqejvb0dpVJJRUUFsbGxSKVSpqammJ+f\np7KyEo1Gg0QiYXx8nLa2NmZnZ4mNjUUul+Pt7U1tbS2xsbFMTEzwxz/+kWXLlrFr1y7kcjlqtRqF\nQkFgYCDV1dU4nU6Sk5MZHBykra2Nqqoq9Ho9TqeTmJgYWlpa8Pf3x2w2MzQ0hEQiwd/fX1As5ufn\nC0Fvfn6e8PBwOjo6MJlMWCwWtFqt0G0oLi4mLCyMtLQ0wsPD+fTTT8nLy8NqtVJRUYGHhwcbNmzg\n5MmT2O12gfDNzMwkPDycmzdvCoRvWloa3t7eApF47Ngxjhw5wqpVqygvL2fnzp0CRzIwMCCQpc3N\nzTz55JNCeWO32ykoKMDPzw+Hw0F7ezsLCwssXbqUL7/8krCwMJKSkhgbG2N8fByHwyEEc5vNxsDA\nAL29vSiVShITE8nLy2N8fByxWIyXlxdyuZzt27czPT3N0qVLuX37NtXV1fzgBz/A5XJRXFwstMsX\neYwbN24gl8vJz8/H7Xaj0+kwGo1CiaxSqRgcHGRsbIyAgADkcjnl5eU0NDR8o0zhf0zR+O/h5eUl\nyJlNJhPJycm8//77DAwMYLPZ8Pf3p7GxkdDQUORyOQaDgaCgINLT0wkODsblctHS0sLAwACZmZlU\nV1cDd4UwDQ0NhIeHExISQk9PDw0NDbz44ousXbuWwcFBiouLefPNN5mYmODYsWM4nU6ys7Ox2+0C\nSTY7O0tDQwOTk5PA/0vohYSECLuon58fCoWC1157jZycHIqKipDJZFRVVQF3JdweHh4EBQUREhLC\nZ599JqSOR44cAe4SriMjI4Ki8vz588K1XLlyRVBBpqamCvVoQ0MDMTExTE9PExwcLMh129vbsdvt\nhISEEBMTQ2FhIUajkaKiIubn5/n66685evQoa9eupa+vj2PHjmEymUhMTESn03H69GnOnTvHAw88\nIJRzixqLgYEBzGazkJWp1WoKCwsZHx9n9erVAjv/xRdfMDMzw6pVqwC4c+cOkZGRVFVV8fXXX9Pf\n38/8/Dy+vr5MTU0BEBISwhdffIHL5WLZsmWMjo4KnY2AgAA8PDwEJd/o6CjZ2dmEh4djt9sFJn52\ndpb29na8vLy4desWADKZjPvvvx+NRsPmzZuZnJzkww8/JC4ujubmZkGt+bOf/Yzx8XGMRiOxsbEM\nDg7S0NBAQkICLpeLLVu2sG7dOuGeJScnc+3aNTQaDbt27aK6upqZmRlUKhXBwcHU1NSQlpaGw+Eg\nODiYwsLYeL3GAAAgAElEQVRCVq5cid1uF0hWp9OJ1WrF5XIJHEh7ezuRkZFERkYKGWZsbCz9/f1I\npVK0Wq3wLH744YeCLmXfvn2o1WqOHTuGWCwmNDRUeG6/Kb4T5cPU1BQajYakpCTCwsKIj49nw4YN\ngqb8iy++oKqqitDQUEH2aTAYmJ6exuVy8f7772Oz2XjwwQfx9PRkYmICgNbWVgoKCsjOzsbLy4uw\nsDBEIhHNzc3I5XJu3LgB3BUNvfvuu8hkMlpaWgRlo0ajQaFQsGbNGjo7OwWRUWNjI/39/YyPj7Nx\n40bi4uKYnZ0lKiqKwMBAnE4ner2e69ev88orr9DT04Pdbic4OJg1a9awdu1a4uLiGBsbw+12Y7fb\ngbsirri4OKGfLpPJ6O3tFUqH+Ph4oqOj2bVrF1arlaGhIbRaLT4+PvT393P58mX6+/uxWq386Ec/\nwmAwMDU1RVNTEwaDQeAgFlWeNTU11NTU8MEHHzA1NcXSpUuZnZ0lNDQUX19fVq5cyezsLJOTkwwP\nD+Pv74/D4UAul+NwOFAqlYSGhrKwsEBISAgajYa5uTkKCwuxWq14eHjQ0dHB4cOHBcFUamoqxcXF\ngnJz0X+Sk5MDQHR0NJGRkfz4xz8mLCyMmpoavv76a8LDwykvL+f69etMTEwQGhoKQG5uLrdv36aw\nsJCCggK8vb158sknkUgk3L59m23btgFQWlrK7du3USqVpKWlIZVKhRJkxYoVPPjggwQHB+Pr68uX\nX37Jxx9/THt7O4WFhdy8eZOKigpBLPXOO+9QUlIC3PVVREZGEhcXR3FxMWKxGJPJhI+PDwqFgqVL\nl1JbW8v58+c5c+YMNpuNq1evCtmDXq/H19cXgJKSEkHR2NDQwMLCAiKRiO3btwttRz8/P0pLSzl3\n7hwOh4MrV66QmJiIVCqlpqYGqVTKRx99RHZ2NiUlJcLG+n9d98Hb25vS0lLq6+spLi6murqa9evX\ns2XLFp544gkcDgcJCQlcv36dxsZGIW21Wq0cP36cvLw8kpOTEYvFjI2NCbvzwsICdrud6upqDAYD\nEomE2NhY1q5di91uJzAwEJfLxfXr1zGbzQQHB+Pp6ck777xDR0cHbW1ttLa20tXVRVRUFA6HA4DQ\n0FCSk5PZtm0bdXV1dHV1cefOHebm5tixYwepqakcOnQIsVhMYmKioJZbZO8vXrxIXV0dMpmMzZs3\ns2LFCuBuvb9+/XqBNIyOjiY0NJSAgAACAwPx9/cXAlVzczMtLS1ER0cLKsLVq1fjcrlwOBycPXtW\nKDlMJhMymYzJyUmmpqbo6OggPT2dAwcOcPr0aZYsWcLzzz9PVFQU4+PjqFQqEhMTycjIwNvbG5fL\nhdlsRiwW09TUhFwuZ8mSJQwMDAhGppGREby9vbl9+zbXrl3DZDJRUFAgkGdVVVVCW21gYIAtW7bQ\n0tJCUFAQs7OzjI6OAnDs2DESExNxOp00NjaSmJhIU1MTH330kcD/LF++nMnJSVwuF7/73e8ICAhA\noVDQ0NDA4OAgGRkZZGVlCbU1wKZNm1i3bh23bt3i8uXLxMfHMz4+jqenJ0uXLkWtVjM3N0dXVxez\ns7OIRCLOnz+PTqcD7srnTSYTH3/8MS0tLWzevFl4foOCgoC7OpNF4VhLSwsBAQFCcFy5ciVZWVl4\neXkhlUo5f/48drsdh8NBWVkZvr6+BAYG8uijjwJ3xVZqtRqDwcCBAweYnZ2lsbGRkJAQnn32Waam\npqirqxN0CJ6enqxYsYLx8XHi4+Pp7Ozk+eefJy4uDolEwtKlS7/xevxOlA8LCwtkZWUxPj4usOu3\nb99Gp9Nx6NAhHnnkEUpLS3nggQd44YUXGBkZ4YMPPhDMJlqtFpFIhEqlwmKxkJqaSnl5OQ8++CDn\nzp1jy5Yt1NbWCgrB2dlZjh49KtSL8fHx7Nixg4CAACQSCXNzcyQmJlJQUIDZbEaj0fDhhx8K3oew\nsDCGh4cxGo14enrS2trKhg0bmJ+fp7a2lhMnTtDR0cFLL73E+vXrMZvNnD59moiICJKSkpienqa0\ntBSAy5cvo9frgbtKyUVH6NTUFOPj49TU1GC1Wunr60MulxMYGMjp06d58803+fjjj5HJZOj1ev7h\nH/5BIAglEglZWVlCet3W1kZtbS0PP/wwRqOR2tpapqen6e/vZ+vWrcTFxXHixAkyMzOZnZ2lpaWF\nmJgYDAYDra2tyGQy2tvbcTqdBAQEMDo6KqTdERERBAcH8+GHH9LY2Mgvf/lLFhYW+Mtf/sLnn3+O\nVqulurqa3Nxc4TvIZDLsdrvQUl4kAQF+8IMf4OXlRV9fHz4+Ply5coW4uDieffZZWltbGRwcZP/+\n/Rw9epSIiAj6+vpQKpVIpVKcTie5ubmcP38ek8nEmjVrhHt+/fp1tFot4+PjaLVaXnrpJXbv3i0I\nezZu3Mgbb7xBZWUlzz//PB999BFzc3Pk5eUxNjaGVColPj6e9vZ2cnNzefXVVwFoamrCbDYTGhpK\nUFAQcrmcsrIyXnzxRSIjI6mvr8fhcBAdHc3hw4fZu3cvIpFI0F8sZoMul4sdO3YIxOj+/fvZvHkz\n3t7eREZG8t577zE4OCgoWx966CEUCgXl5eUCh9HR0cHo6ChTU1MkJiaSmprKe++9x5IlS+jv/yvr\n0f8nvhNE46uvvrp/+/btJCUl0dzcjEgkQi6XExoaKuzgSUlJTE1N4XK56OzsJC4uTlDmabVarl27\nRkpKipDiL9a4Gzdu5OTJkyyariwWCz4+PoIbcdGB1tfXx8DAAENDQ8jlcnp7e2lubhaIs0WxSFFR\nEatXrxZ4gUVybnx8nODgYCorK7FYLKxcuRKdTifcpOTkZPz8/AgODkaj0WA2m9Hr9UxMTNDU1ERN\nTQ2//OUvhd1jZmaG0dFR7HY7MzMzDA4OAnc7NYvkYUREBL6+vjQ2NlJTU4OHhwdLlixBLpdz+/Zt\ntm7dyoULF9Dr9Vy6dAmXy0ViYiIGg4GysjIGBwdRq9VMTEzg5+dHS0sLK1asoKSkBJ1Oh9Vqpb6+\nntHRUTw9PWlpaRFs0GFhYbjdbmpqatDpdOTm5iKVSlm/fr2g1/fx8UGj0ZCfn8/U1BS+vr6Ul5dT\nVlZGTU0Nq1evpqamhsnJSdatW8eVK1fIzs7m4MGDpKSk0N/fz5o1a/jd737H8uXLiYqKwmKxoFKp\niImJYWFhgfj4eAoKCoiMjCQ7O5v6+nqhzby42K5cucKePXuYmJjA4XAwNDTEpUuX8PLy4oMPPmBy\ncpKVK1eiVCrJzc3l7NmzqFQqwaqckJDA4cOHqampwW6309PTw+joKHV1dcTGxnL//fczMDDAfffd\nh1gs5vTp04JQbGZmBq1WS3BwMIcOHWLZsmXI5XJmZ2fJzc0Vyr+6ujokEgnp6emcPHmShx56SNCy\nTE5OYjQa0ev1QuclNjaWoaEhrl27JjiDtVotEomEwsJCgoOD6enpQafTYTabiYqK4vLly//3tCT9\n/PzQ6/VUVVWxY8cOMjIyhMBgMBiIiooiICAArVYr+NRPnjxJR0cH+fn5PP744xiNRu7cuUNZWRlm\nsxm4S0zduXNHcB0ulgznzp1jeHiY4OBgdu7cid1uF4hKuVyOUqlk+fLlbNiwAZFIRGVlJRUVFRQU\nFAAIIp/ly5cTEBBASkoK4+PjgrQ1JyeHy5cv43A4SE5OJjg4mFWrVpGcnMzHH3/MhQsXsFqtnDt3\nDrvdzu7du4G7Ii6TycSSJUuYnJxkYWEBtVpNcHCw4AaUSqXExcURFBREW1sbLpeLubk59uzZQ2Zm\nJj4+PgQEBLBq1SqhBLHZbGRmZgpOucVd0Gaz4e3tjdlsFjIOt9stlGBDQ0M0NjZSXFyMv78/Pj4+\ngoW6sbGR2dlZNm3aJDgily5disViwWq1MjY2RlRUFCaTifr6emw2GzqdjvDwcKKiooiNjeXixYsk\nJSURERGBxWIBQCqVsnPnTkH1GRQURHJyMnq9ntbWViYmJvjlL3/JmTNnGBsbE7iIoqIi/umf/knw\noajVarZv3y5IyHt7e+no6CA4OJiNGzfy7LPPUlNTg8FgwOFwUFBQQEpKitANkUgkOBwOqqqqOHDg\nAFu3biUjI4OCggKkUin//M//DNzVvzQ3N+Pj40NXVxfT09Ps27eP2NhYenp6WLJkCXCXQN2zZw/1\n9fVcuXKF+fl5vL29sVqt9Pf389BDD/2VyKi6uprZ2VnEYjFWq5WkpCT0ej3JycnU1NRQV1dHU1MT\nGo2G+vp6jEYjnZ2dDA8Ps337dlwuF0NDQ5jNZvr6+gRR1DfBd6J8WHSuLTrvqqurBQVXREQECQkJ\n1NfXU1dXJ5hLRCIRW7Zsoaamhs8++wyVSoW/vz8pKSm0tLQAsG3bNhoaGoR0fMmSJXh7e/PHP/6R\nGzduEBAQIPThX3jhBQICArh48SKtra3U1tYK7bTOzk4ee+wxPvzwQ+Du/IeysjJWrVrF+fPnSUlJ\nIS8vjy+//JJXX32V8vJy9uzZIxhVnn76ad577z1BFJWZmYnBYCAsLIz29nbKy8uBu4SryWQiOjqa\npKQkli5dytWrV/mbv/kbxsbGCAoKEpR1Dz/8MPPz81y7do3h4WFkMhkpKSnU1tbS29uL0Wjkxo0b\n7Nq1i0OHDqFSqfD29ubgwYNMTEywY8cOgoODKS0tJScnBy8vL65evSr4KXx9fYWSp7m5GY1Gg6+v\nL/Pz8zQ3N+Pv78+1a9eIiIhAJpMhFosFpV1XVxcFBQWC2m5RnlxcXIzJZEKv17NkyRJ8fX1xOBws\nXboUhUIB3GXdAwMDmZmZISIigpMnT7J+/XpBJXn+/HlefvllfHx8uHr1KsePH2dychJ/f3+MRiNR\nUVE0NDQwNzfHli1b2LdvH3BX7BMUFERDQwOnTp0iJiaGlStXCsN6Xn75ZTQaDQ8//DDPPPMMLpeL\n2tpatm3bRldXF6Ojo1RVVREcHIyXl5cQYKenpxkfHyciIoKioiJ0Oh1lZWWoVCquXLmC0WjE29ub\noaEhEhMT2bFjB5cuXaK3t1fY0RcHC6WkpNDc3AwgZB/h4eH4+PigVCrZsWMHg4ODdHZ20tbWhkgk\nEnxAi/qSRSWs3W7HarUiFosFL8g3xXciUwgJCeHLL79EoVAIJOPOnTtRqVT09/dz/Phx+vr6mJ6e\nZnp6msHBQcH27OnpSUZGhnDTFyWjAF9++aWQ9o+Pj9PY2MjAwACfffYZfX19mEwmzp49S2FhIe+8\n8w5Xr15ldHSUvr4+tm7dKrg2Z2Zm/orx7u3t5fvf/z4VFRUYjUYuX77MkSNHEIlEHDt2TOhbL6bm\nR44cYdmyZUxNTbFjxw4UCgWvv/46FouFyclJUlNTgbtE46K3YXHOwBNPPCFYshcnAS2qPxfLh+Tk\nZJxOJ2azmf7+foKCglAqlaxYsYKDBw8yNTXF5OQkcrmckZER1q5dyyeffMKLL77I3r17iY6OxmKx\nkJKSgtVqRSQSUVFRQXV1NWNjY6SlpXHr1i1aW1uZm5tDr9dTXV1NZmYmOp1OGOphMplobm4mLCyM\njo4OxsfHWbZsmWAKGxsbY8mSJQKZFxQUxNq1a1EoFLS3twN3SdzQ0FBEIhFTU1Pcf//9yGQy+vr6\nsFgs5OfnC3oMk8nExMQEt2/fJjIyEp1OR2FhIf39/UxMTLBs2TJBbh4cHMyVK1fw8fER9AfPPfcc\ndrud2dlZNm/eTElJiaBwTEhIQKvV8s477wiGpPz8fKET0dt7d7BYbGwsnZ2d+Pn5CQam9PR0UlJS\n+N73vofT6cTPz4+QkBCam5v5zW9+Q0VFBY2NjYyMjNDX14fRaMRms3H9+nWhfbs4EyMyMlIQ8wUF\nBVFRUcHo6CgSiQQvLy9GRkaYmZkRSrXU1FRBkh4WFiYEh0Vx3jfBdyIoTE5OMjIywsjICA6HgwMH\nDmAymQThjM1mo6ioiMnJSTQaDUFBQdjtdm7evIlCocDpdLJ69WoaGhqIjIwUSLytW7fy7LPPMjEx\nQWlpKSkpKTidTmJjYzEYDMTHx2OxWIiLi2Pz5s14enpisViErsKdO3cQiUQCKTY0NASAy+Vi//79\ngoQ6KioKqVSK2Wzmiy++oKioCLFYzMTEhDAWbGZmBrfbTWVlJTabDYPBwLvvvsvU1BRtbW3AXbfo\novZ+bm5OmPDU39+PXC7nzJkzXLt2jbGxMRQKBRcuXKC7u5uxsTEmJiZwu92kpqYKaszFrs1jjz1G\nYGAgN2/eJDw8nIGBAVatWkVFRQUXL16kubmZ6upqamtrBZuwt7c3mzZtwmAwUFVVhUqlIisrC61W\ni6+vL5s2bRLKrUVBVVhYGE888QRSqZQtW7YwPT3NyMiIQOyOj48zPz+PQqEQgtzp06eZmZkR2PHN\nmzcL4p5F70ZsbCzd3d1YLBZh4S6O61MqlcTFxTE8PCxkHsnJyaSlpfGP//iPQovPYDDw9NNPs3r1\natauXUtycjKFhYUcPnyYwsJCAgMDyc7OJjo6GrVaTU9PD2vXrmXfvn0olUq0Wi3l5eX4+Pjw3HPP\nMTAwACBIxKurqwkMDEQqlWKxWIT5FwUFBTQ1NaHVamlqaiIiIoLp6Wlh4I9Wq6WkpITc3FySkpLo\n6uoC7paSMpmMP/3pT7zzzjs0NTVRV1eHXq8XrufNN99ELBbT39/P6dOnBVWjh4cH58+fZ3p6mtzc\nXCoqKr4TQ1b+jyCVSomKisLtdjM0NERoaCiVlZUolUr+8pe/oNfriY+PZ/Xq1TQ3N5OcnMzCwgKr\nVq2it7eXFStWEBQUJMwU/PTTTwG4ePEiTU1NlJWVkZubK9ijZ2ZmWLlyJV1dXczPz7Nt2zbBbHPf\nffcxOjpKa2sr/v7+wsNy8eJFYdcZHx9n7969tLa20t3djUwmY+PGjfj4+GAwGDhx4gQSiYT4+HiS\nkpLYvXs3vb29ghNv0Rij1+uF0WZwtyxJS0vD7XYzMzNDUlISFouFiIgIenp6EIvFVFZWcu7cORob\nG/Hx8SEiIgKn00l+fj4rV66kvLyc1tZWwQJutVp5++23mZqaIjw8nOjoaEE3sThBanEmhUajITMz\nk4aGBiQSicDNZGRkYDabmZ6e5t9PyVqzZg2Tk5O0tLQIZd/s7CwLCwscOnSIlpYWodOwqPA0m80o\nFArsdjsrVqxAoVDg5+fH5cuXAfj444+prKwkICAAh8NBTk4OVVVVXL16lfXr1+Ph4cHAwAB6vZ6E\nhARGRkYICQmho6MDh8NBTEwMc3NzwjUolUoAiouLuX37Np988gkmk4mjR49y6dIlCgoKUKvVlJeX\n88ADDzAzM8OFCxe4dOkS/f39zM7OEhwczMMPP8zs7KxgXV7MbEZHR1lYWKClpQWpVEpPTw8KhYLO\nzk6uX7/OtWvXhED/ve99D5VKxc6dO7FarUxMTPCXv/wFt9vNoUOHuHbtmjAgaFEHs2bNGvLz81Gr\n1Xz66aeUlpZy8uRJ+vr6eOmll4TNQavVChqS8fFxFhYWUCgUdHR0EB8fj6fnN2cKvpUR7/+nEIlE\n//MXcQ/38P9/VLjd7oz/7KDvBNGo0+k4evQo5eXlwiRgsVhMV1cXGzZsoKuri8bGRlpbW3nuuefw\n8/Pj6NGj6PV60tLShE7C4py9+fl53njjDY4fP84f/vAHDh48KFh5m5ubsVgsSCQSkpOT8fb2xmg0\ncvjwYWJjY/H19SU2NpaZmRlhN/Dw8CA+Pp633nqLo0ePcvXqVd544w1mZmbYtWuXYOf18fHB5XIR\nHR3NU089xU9+8hOGhoZYunSpQH46HA7y8vIoLS3Fx8cHX19fbt68yaFDh/j973+PVCplbGyMjo4O\ncnJy6O/vF4aFhIeHo9PpuHHjhiD9HR0dpbGxUTCGZWVlcfHiRUQiET09PWzYsIGNGzdSUlIiTJOO\niori+eef56WXXqKmpoaenh6CgoJYuXIlra2tDA8P8/jjj+Pj40NNTQ0NDQ1ERUXxb//2b7z22mtY\nrVYuXLhAQECA0AbUarXCznr79m28vb1JT09nbm6OjIwM6uvrhfIrMDCQ+Ph4rl27RnBwML29vcTE\nxLB//37+9m//Fp1OJ8wnTE1NxdPTk5/97Gf8+te/ZmZmhurqah555BE6OzsJDAwkLy8Pf39/gWGv\nqqoiKiqK4uJiXC4Xb7/9Ni+//LLwG5WWlrJv3z7OnTtHeno6lZWV3Lhxg71796LX64WBs4ujzMrL\ny4WSbHEakk6nQ6vVcvLkSerr64VMct26dUImpNPpOHDggHCOgoICrFYrRUVF/OY3v6G8vJzQ0FC6\nu7txOp0UFxezbNkyfv/733P48GGmpqZ44403ePXVV2lra8Pb21uwqSckJPD111+Tl5cnDO1Z7Hr8\ny7/8C6+++irnz59nYmKCyMhIuru7+e1vf/uN1uN3onyYmJjg3XffxcvLC7FYLEyl3bt3L0ajkdTU\nVHbu3MmxY8eEeYlbt24lJyeHzz//nImJCXp6egS11yJZo9freeSRRygpKRFajkuXLuXx/4e994yO\n8jzXto/RSKPeexmNeu+dIgkQohcDpjnGYGwnNjguO3HZDnYce8eJ49ixDcYBG2xsOgbTQSDRVVCX\nUC+ojHrvMyoz8/4gc6+dtd61t9e3v28tZ337/oMkW2IYPc/9XPd1nedxbtkiJLZDQ0MCTOHq6oqX\nlxe3bt1idHSU5uZmhoeHMTc35+zZs8Lzf+XKFZycnNi4cSNdXV1ERkYSFBSEQqHAwMCAyspKwdpz\ncXGht7dXkJ70NtaJiQnc3d3p7u4mPT0dgMHBQW7duoWxsTH/9m//RnV1teAtjo+PU1NTQ35+PqWl\npUgkEn744QdKS0vFhCY5OZmBgQGh2vTy8iI5OZkvv/xSyHknJiYoLy9n586d5OXloVAokEgkhIWF\nUVRUhKWlpZDm/uceyvDwsPj3ZmRkCMDM3//+d6anp7l58yYLFiwQPAoXFxeGh4dRKBScPn36n0Zl\nxsbGNDQ0CFGNRqMRIqPExESkUimLFi0SFO4NGzawfv16Xn31VWQymZhsPPbYY2IC09PTQ3t7Oy0t\nLQwMDAhsmf7I5+fnR0xMDN9//z2LFy+mpqaGmJgYYcLz9fUViHgPDw8ePHjAzZs3uXTpEjExMYIO\n3d7ezqFDh0Tf6sCBAxw7dozt27eTmJjIuXPnyM7OxszMjLfeeovIyEjq6+uZNWsWqampTE9P8+67\n73L58mV6enoEN1Tf69Jv9np5+nvvvcfU1JRga9y5c4e5c+ei1WqJi4vjT3/6E5GRkfz617/m/v37\nmJmZ8cEHHzA4OEh/fz8TExMC6vtT189iU7Czs2PlypXMzMzg4+ODra0t8+fPR6fTUV5ejoWFBS4u\nLly8eBGZTEZlZaVoqKxatYqQkBBhlpJIJAKGYmdnR2dnJyMjI/j6+mJqasro6Cg3b97E3NyctrY2\nmpub6e/vJzY2lr6+PqampjAyMuLHH38kLCyM+vp6KioqmJycFJ6KdevWkZCQIH4x9vb23Llzh4sX\nL4ociJmZGQHZNDU1paWlRYiVXn75Zfz8/MjMzEQulwsXob+/vzATff3115ibmzNv3jyMjY1xd3cn\nJCQER0dHfvvb31JVVUVsbCwzMzN0d3dTVVXF1atXCQgIQKlUcu/ePVJTU7ly5QoxMTEcOXKEqqoq\nQRrq6OjAxsaGW7duYW5ujpGREa2trWRlZfHw4UOhsrSxsaGjo4O5c+cSERHBxYsXCQ4OJiEhgcrK\nSuzs7Ojv76e7u5u2tjYqKyuxsLBgenoanU5Hb28vMTExPP3009y8eRNvb29GRkZQKpXI5XKsra0J\nDAwUFmQbGxu8vLwoKyvj6NGj/OY3v6GpqYnnnnuOAwcOoFAoSEtLo6+vj/z8fGbNmsXSpUsFe6Go\nqAi5XE5FRQUVFRVMT08Dj1iK3377LY899pgABOsx7ePj42zfvp2WlhZsbGywsLAQIJmFCxdSVlZG\nfHw8MTExBAYGkpqaKliKYWFhfPXVV5w5cwa1Wk1ubi42NjbU1NSQlpaGTqfDxcWFBQsWsHv3bqyt\nrenp6SEwMJBXXnmF4eFhPvzwQwIDAykvL+fgwYMAgrFobm5OTk4OKpWK6upqBgcHuXr1Kl1dXbi4\nuLBmzRry8vKora3FwcGBAwcOUFFRQVVVlTCZFRQUsGbNmp98P/4sFI379+9/V6PRiOaR3kmXkZHB\n7NmzycjIEGwA/RFAH5qhf7q2t7cjlUoZGhpCLpdz/Phx/P39hbru9OnTrFu3jpycHEG5XblypcCO\nSyQSFi9eLPTv5ubmODs7I5FIWLhwITKZDCsrK86ePcvy5cvRarWC5qN/02dmZmhsbBQhLo2NjXz5\n5ZfI5XIiIiK4du2a8BIYGxsTFhaGRCLBzc2NkydPMj4+jrW1NRKJBC8vL1xcXMQGo1KpBCSlublZ\nXAzh4eGEhoZiZWWFnZ0dSqWSBQsWMDU1xeHDh0lKSmJwcJCIiAhCQkKYmpri6tWrwv+xYMECDAwM\nOHHihAhPcXd3Z2hoiEWLFmFubk5CQoJAyTs4OCCVSoUMWY+bq6+vZ3x8XLxODw8P+vv78fb25sSJ\nE0xMTDA+Po6NjY0wVp09e1ZsdAMDAxQUFPDMM89QW1vLCy+8gEKhwMzMjAcPHuDn54dSqaSnp4f6\n+noKCgrYsWMHZmZm1NXVcffuXerr6/nFL37BuXPniIuL46mnnmJkZITLly8LFoZ+svCf7eWdnZ0U\nFRWRmpqKgYEBpaWlWFpaiirv7t27+Pj4CDBKb28v7u7unDlzhoiICCFztrOzw8fHh5ycHKysrAQO\nPyAggLCwMD777DP8/f3FZvrNN98wPj7OiRMnxDHT29ub7OxsMcbs7u7G29ub48ePCz2M/riqh9Ye\nOXIEnU7H+++/z61bt3BychKmv9raWlauXMmtW7coLCz8SYrGn8WmsHv37nd/8YtfEBISwr1797h1\n67gF8dUAACAASURBVBbr16/H0NCQ7OxsZDKZ8DacOXOG7du3C/qQra2tEDbpqwf9hbBlyxYAkbCj\nh33o/x4DAwNu374t0oauXr1KaGioIPlWV1ezZs0aVCoVzc3NmJmZ8eOPP7JgwQIGBgawt7cXEtLJ\nyUny8/MJCwtjw4YNhIWF0dvbS2pqKqmpqVy9epVly5bh7u5OVFQUZmZmDA8P4+LiQn19PZmZmSQn\nJyOVSunu7uaJJ57AxMQEGxsbIcPWH096e3s5ceIEr7zyCnl5eaILr5f6njlzBgcHB5ydnenv76e/\nv5/m5mZGR0eprq5mx44dODg4EBkZSWVlJf39/SQkJLBt2zbmzJlDWVkZo6Oj7Nu3j+DgYLRaLeXl\n5cydOxcDAwOCgoIYHx8nKSmJiooKHnvsMTZu3EhNTQ3z58/H3d2duro6duzYwfj4OE899RQDAwOc\nPn0aFxcXsrKycHFxITU1lczMTAoKCtiyZQsnT57k5Zdf5ubNm/T19VFfX49SqWRwcJCpqSkhYbey\nssLPz4/Ozk4KCwvF6O7111+npaUFuVzO4sWLOX78OIODg9y7d4+TJ0/y8OFDEhISKCsr486dO0xP\nTyOTyYiNjWXx4sU0NjaSm5uLp6engMomJSUhk8moqKjAxcWFZcuWYWZmRn9/P2fPnuXJJ5+ku7tb\nYNquXbtGQkKCsKq7ublhb2+PtbU1np6eeHh4UFpaKkhRzc3N/OIXv0Amk+Hi4sL4+Di3b9/mjTfe\noK6uTjwYFixYwPnz54XNPCwsjPb2dqKjo0lNTcXe3h5DQ0OuX79OVlaW4E1aWloyMjLC448/ztdf\nf/2vsyns37//3aCgIDQaDREREfT19Ym+wMzMDPHx8cyfPx8LCwth5NFqtcjlcrRaLaamprz++uvk\n5uYSFhZGUlISX3zxheAr6s1Wra2tDA0N0dfXh4uLCw8fPsTHxwdDQ0Mh852cnBQKtGXLlmFsbExW\nVhZxcXG0tbVx5coVkpKSGBoaIjExEVtbW3Q6HYaGhjg7O3P79m1SUlIoLS1FrVYTGRmJUqmkurqa\niYkJ/P39mZycxNTUlNLSUmQyGffu3aO6upqkpCQUCoXQsHt4eKDT6Th//jwGBgYEBARw48YNfH19\nSUlJQaFQ0NTURElJCUuXLiU9PR1zc3MuXbrE8PAwY2NjTE9PExYWRkhICBERERgYGKBQKMjKymLB\nggXk5+djaWnJxo0b0Wq15OXlIZVKiYqKYmxsjImJCdrb27G0tMTExAQ3NzccHBx4+PChaHypVCpM\nTExITU2lsLAQnU6HVquloKCAjo4OTpw4IZSPGo0Gf39/7t69i0QiYcOGDcTFxVFXV8etW7fw9vYm\nLS2N1tZWIiMjgUfCI32ilN5+rRfruLm5kZ2dzTPPPINSqRQZDlevXsXIyAi5XM6FCxfYtGkTeXl5\n2NnZYWVlJY5y7e3tuLi4MDg4yMzMDIODgxgbG5OSkiJclHpc2rZt22hqahIQmq+++kpUo/qwntTU\nVBwcHEhOTubcuXM0Njbi5OREcHAwMpmMkJAQXF1dKSgoEA8aExMTysvLkUgkKJVKiouLsbW15c03\n3xTMib6+PmJiYpBIJAJ4a2VlRX19PQEBAVRXVzM0NERlZSVyuRwDAwO6urowMzPD1NSU7777jvr6\n+n8d70N/fz/+/v6Mjo7y/fffi1mxra2t4M298MILvPzyy6J739nZibe3N/v37ycvL48LFy5gZmbG\n+Pi44CSo1WrS09PJz88Xm017ezsPHjwQ3veIiAjKy8uF0aevrw97e3va29vp7e2lrKyMqakpnnrq\nKcrLy4FHJaiHh4fwt0skEvr6+qioqCAoKIi6ujrS09OpqqoSBB+dTkdWVhbffvstQ0NDXL9+HVdX\nV9ra2vD39wcePQH1jbHS0lLBANAz+tra2rCwsKCnpwcnJyfBGYiLi6Ozs5PGxkbu3bsnFIFSqZTN\nmzfz4MED2traOH/+PN9++y379u2joKCA06dPc+/ePfz9/cVsu6enh4qKCurr61m4cKGAlpqbmwtl\nXGZmJqOjo4yNjaFSqSgrK+P27dviHH/s2DEmJyfx9fXl1VdfZceOHeJzfd5lVFQUUqkUpVJJf3+/\naIT5+fnh4uIisj0zMjLw9/cXmg9vb2+KioqEgUmj0TB//nzBdpDJZHz33Xe8+eab4riivxacnJyo\nra3l008/ZWBggB9//FEc7fz8/BgYGEAikYhglomJCXx9fdm0aRPPPfecuPkvXrwoLNlbt24V2ZRx\ncXFMTk4SGBjInDlzOHr0KH5+fmg0Gp555hnu378vOBRxcXGkpaWxcOFCkTWpfyjBownN+fPnSUtL\nE43kuro6TE1Nyc7OFklR5ubmKJVKYW3fvHkzkZGRGBsbMzMzw8jICJ2dnYSHh//k+/FnsSlYWlrS\n3NxMYGAg0dHR5Ofni660tbU1e/fuRavVkpycjFKp5PLly6JRtGTJElFmb9iwAWdnZ1QqFfCocWdm\nZiaQW6tXr2bFihW4ubkRHh7OokWLANiyZQutra1CAqzT6Xj22WfJyckhPz8ftVrNnj17hD7f0dGR\n6elp7t+/j62trUDP29jY0NjYKOzaW7duFfFhsbGx/OY3v6GrqwuVSkVMTIww0CxcuBBANEK7urq4\nevUqLS0t3L17F41Gw5YtW3B0dBQGq/r6euRyuVBnHjp0iO3bt4snkF7mOjMzQ0NDA56enixZsoQV\nK1YwNjYmhF0LFy4UcFd3d3fS0tKwtrbG2NhYdPIrKioE0Eaf8djU1ERlZaW4gWUyGXfv3iU2Nha5\nXM7y5ct5/vnnMTMzIy4ujn//939ny5YthIWFMTExQXNzM2vXrhVmMTs7OwCSk5O5evUq9vb2jI6O\n4uTkhJmZGTt37mR8fJySkhIKCws5cOCAQI3Fx8fT0NBAQEAAGo2Gd955h8zMTDZt2sSJEyeAR3Sv\nsrIygoODWbJkCbNnz+bbb79l7dq1jI6Osn//fq5fv46TkxM7d+4U1Z0+pau/v5+bN29y+vRpZs2a\nJTaxTz/9VDy49PLy1tZWbt68ySeffMLKlSvx8/Nj165dJCcnk52dzQ8//MD8+fOBR0aumzdvcuPG\nDfLy8oR4aefOnSQmJpKTk4NEIsHe3p6wsDAUCgWmpqaMjIwwMjLC0NAQ586dw9nZGRcXF/r7+1m7\ndi1jY2OsW7cOeLTB6D/+KetnsSnoZ/Ph4eFYWVkREREhSuXIyEgGBgY4cuQIg4ODeHh48PjjjzM+\nPs6PP/5IREQEmzdvxsLCgtHRUXJzc0XIS0BAAOfOnRN4eP3uamJiQnR0tCDt6uXVqamp9Pf34+np\niUajYdmyZYKxcOXKFZHPcOfOHXx9fQkPD2dkZASdTkd+fr6Q2DY2NpKUlIRKpcLT05PBwUFWr17N\n2bNnGRwcxNTUVEwc9ClJ8MjV2dTUJAAlMTExJCYm4uPjw8OHD2lvbycsLAwzMzOampooKiqiu7tb\njAfj4uIEq7K7u5uBgQG+/vprXnzxRYyMjDh8+DBWVlZIJBKWL1+Og4MDBgYGwq7e39+PTqdj48aN\npKSk0NXVJUatMpmMsLAwpqamsLOzw8bGBgMDA6qrq0WStL4MDwkJwc3NDZlMhrm5OXZ2dgwPD3Ps\n2DHRRK2oqCAnJ4eBgQEuXbqEXP4oevS7777DyMgIX19fIiMjRYjuSy+9xNjYGMPDw1haWorIN0dH\nRzIyMpBKpXR1dYmeh729PUePHmX9+vUAlJeX8+abb+Lg4EBHRwc5OTmcP38elUol7OV6QrheLaoH\nzF64cIG2tjacnJxISkrC1NSUqqoqADZu3Iivr6/I/VywYAHm5uYYGxvT2tqKhYUFSUlJtLa2Ul9f\nj52dHQsWLBBhwdbW1syZM4dXX32VrVu3CjXuyMgIhw8fZnx8XFSI7e3teHl5iQxS/Xu+c+dOHB0d\n0Wq1rFy5ktzcXP7617+SlZWFVqulurqa/fv/21ODWD+LnsJHH330rlwuF6QaLy8vnJ2dcXJywsrK\nisLCQuGC1Hsh9NFg8+bNIz4+nuLiYlQqFbGxsURERPDpp58SFRUlcg23bt3KxMQE/f39IntPqVQy\nNjYmdATJycl4eHiwf/9+8TN7enqIi4tj4cKF5OXlUVBQwJ49e8jOzhbMhsjISAoLC/Hy8sLDwwND\nQ0McHBxwcHAQWRIxMTF4eXmh0+kEjdfW1hZvb2+Gh4e5efOmgMkkJCQIG621tTXl5eVIpVKCg4NF\nZ7yuro7GxkZu3ryJj48Pvr6+eHt7o+/N6INyvby8MDExoa+vj7CwMCwsLGhra8PY2Jhf/vKXolzW\ny7M//PBDfHx86O3tFUcWOzs7PD09sbCwELi7jo4OEcWXnZ1NYmKiwLzr8ywbGxspLi6muLiY+vp6\n8vPzyc3NxdramtTUVCwsLFCr1bS1taFUKikrK+P5558nPDyce/fucf78eRYuXIi5uTl1dXUCpKJ/\nXxsaGsQmq1arefjwIfPmzSM3NxdTU1M6OjrQ6XRcvnxZ4NUVCgU3b96kpqaGX/7yl4JGrechmJmZ\nUV1dTV5enojAq6+vZ/PmzRw8eJCNGzeSmZmJTCYjIyNDbKzm5uZcuXJFbOh1dXX4+/szODjInTt3\nuH79OtbW1hQUFGBsbMzo6Ci+vr4cO3ZMuEKVSiVSqZSqqiqRBzk0NISlpSUajYaFCxdy4cIFGhsb\nsbKywsPDA4VCQXZ2tuB2/Pjjj+h0Ory8vEhMTKSxsZGQkBA8PT25dOnSv05PQc9iLC8vR6vVsmfP\nHhGM8ec//5nJyUkkEgnz5s2joaGBP/3pT2g0GlJTU8Wb2dTURFdXFwUFBRw/fhxANKLc3d05d+4c\nk5OTWFtbMzExgbOzMzExMSxcuJCIiAhSU1NRqVT86le/ElFtarWaZcuW0d/fz+7du4mOjgYesR9V\nKhWpqal4e3uLZmdoaCgHDhxAKpUyNjbGd999xxtvvMEPP/xAR0eHOMroLck3btzAwsKCb775BoDb\nt2/j7u4uNq1Lly6hUqkICQkRqjQLCwvgUbCpRqNh1qxZJCUlsX37diIjIzl79ixVVVXcu3ePyspK\nysvLGR4eFmwGveErLS1N3KCbNm2irq6OuLg43nvvPXJycgS5R5/P0NTUJCjKlpaWgu+QkJCAUqnk\n+vXr2NnZMTY2xsmTJ7l9+za5ubki2Le0tJSUlBRSU1Px8PAQTzkHBwfq6uoEiNTd3R2VSiUubBsb\nG9zc3KiurubUqVMEBwdTVVVFfX09ISEhxMXFsXr1akG/bm1tZfbs2ZSXl5OcnCwMRvosBr04a2Zm\nRnhdLCwsBKMyMzOTgwcP4uXlRVxcnJh+DQ4Osn37dhELoIehent74+TkhLW1NQcPHsTNzU0EtdjZ\n2fHDDz8wMDBAeno6s2bN4q233mLJkiXU1NQIm7iLiwu1tbVCl6K/J44cOcLChQtxcXHBycmJiYkJ\n1Go1Tz31FKGhoZw5c4bVq1czf/581Go1VVVVrFmzBi8vL9RqNdnZ2UxNTVFcXCxSsn/K+llsCnqN\ngKOjIwYGBoSGhiKVSrl27Rrm5uY899xzREVF4enpyRdffEFERASPPfYYJiYmgtPv7OzMnTt30Gq1\nolcwPT2NsbEx09PTAiqqFwI5OTkxb948KisrKSwspK+vj9LSUkFG1q+amhqxM+tBIProcJVKRV9f\nH7dv3xZjw/DwcGQyGSYmJsTFxbFkyRLi4uI4cuQIe/bswdLSkiVLluDj48OePXuQy+U888wzwKPe\nysTExD9lIKjVaszMzMjNzWVgYICTJ09SUFBAQUGBsAbrsyoOHz6Ml5eXyISsq6tjZmZGMCpra2sp\nLi4mOjqa8PBwDA0NGRsbo7m5maGhIcEF0Fdsc+fOxcTEBB8fH2ZmZvDw8KCnp4ePPvqI3NxcjI2N\nKSoqQiqVEhsbK1iOISEhpKam4ubmhr+/P8ePH0en09HS0sLMzAxqtVpkRjx48IDBwUGRGK7RaMjM\nzKSnp4c33nhDOFABkfsA8Nlnn2FoaMgPP/zAjRs3sLa2FiCYyclJvL29mZmZEb0KuVxOVFQUt2/f\nxtzcnF27dhEUFMTU1JRgEpSWlmJlZcXChQuRy+W0tLRgbGwsNDMBAQGMjIyIyAB4pDzUv76ysjJU\nKhXHjx/nwIEDSCQS5syZw1tvvUV4eDj+/v5iihMcHExZWRmpqakUFxdjaGiITqfj1q1bAPT09ODo\n6Mif//xn0TwuKSnhwYMHfPbZZ0Krc+3aNX788Ufy8vKEAEtPe7awsEAikbB27dp/PZekPvfOxcUF\njUZDW1sb9vb2Yhx3+fJlli5dys2bN/n00095/fXXCQwMpLm5mZGREUZHR9HpdCxfvhy1Ws25c+cA\nxC/Lw8ODzs5Orl27RnNzM4WFhchkMrq6uigvLyc2NhY7OztsbW0pKipi2bJlwKOpiEKhICAgAIlE\nIt7YiYkJtFot8+fPp7m5GUtLS2JjY0VKkJ5ydPbsWSorK2loaCAoKIiYmBhu3LjBpk2byMjIoLS0\nlMLCQtFxfvbZZ5k3bx4eHh6sWrWK0NBQoqKi8PX1FYQlvX1YTyRycHAQfYiQkBAePHiAq6srpqam\nODs7M3fuXLy8vPDy8mJmZoaQkBAKCwvJzMwUjcQvv/ySOXPmYGtry/Xr18X5uqqqiqamJqRSKY6O\njjg7O4uEa33i1uLFi7GxsWFoaAgvLy9aWlqorq7m/v37LFq0CJVKhVwuF4QjPaEoKysLW1tbIiMj\n8fT05Pe//z0AhYWFaDQaZDIZarWayspKod5csmQJBw8exMfHh+bmZtavX09DQwNTU1PCIiyRSJiZ\nmSEzM5Ph4WEBXr1y5QpZWVlCzq7VasnIyOAvf/kLvb29NDU14eDggL+/P0FBQfztb3+jvLycefPm\nsW3bNoHjb2xsZHx8XNCRi4qKSElJ4fjx4zQ2NvLYY4+JzeDgwYPMnz+f7u5uPDw8mJ6exsfHB29v\nb7EZX716lbCwMLRaLffv3xeVYGtrK8888wyWlpYsX76ciooKfH192bZtGytWrGBwcBAbGxu8vb1F\nnunU1BRr166lra2NXbt2odVqGRoawt7eXljIf8r6HxmiJBJJMzAKaIAZnU4XJ5FI7IATgBfQDGzQ\n6XSD/9XPGR0dFWerOXPm4ObmJgAR+mSnL774gjVr1lBbW0tWVhb9/f3cv3+fyspKka585swZPv74\nY+7duwc8Gm9du3YNqVSKu7s7y5cvp7S0lLi4OKampmhsbBSpPMXFxWi1Wnx9fcnJySE6OpqmpiZO\nnjzJtm3bSExMFGEcMTExQi69YcMGpqam8PDw4Pbt25iZmYmGV3p6Op6enszMzDA8PExnZ6cQ7+il\nwPpRKMD+/fuJjY0lOzsbe3t7vL296e7uZnJykujoaDQaDbGxsdTU1Ii4tQcPHhAcHIyrqys1NTX4\n+PiI0nvWrFl0dnbi5OTEhQsXqKysZMWKFbS3txMUFMTGjRtZsWIFt27d4vPPP8fMzAxjY2NsbGwo\nKyvj7t27XLt2TWy8b7/9NoaGhhw+fFjImI2NjYUW38TEhFdeeQUTExOuXbvGxo0bxThOrVaL/o0+\nEfuTTz4RFGl9mK4eYNPd3c2pU6eYP38+2dnZhIaGEhgYiIuLC/Hx8dTU1HDz5k3Wr19PdHQ0AwMD\nYpx78OBBDAwMyMnJESPJxYsX097eLvpNr732GuvXr6e1tVVkdBw7dozExESeffZZrly5QmdnJ4GB\ngQKN39vbS3Nzs2BR6q/dyclJOjo6yMzM5Ny5c3z99dfcuXOH2NhYent7+fbbbwkODsbAwICvv/6a\nBQsWcObMGX7/+99z7tw5cSTJyMjg9ddf59tvv2VmZkZodCYmJsjMzCQxMZHY2FiamppIS0sTkwtb\nW1teeuklYQnYsGEDr732GnFxcfT39/Pdd9/x2muv/WSp8/8blcJ8nU4X9Z8smW8CWTqdzh/I+sfn\n/+XSx8bpR0qurq5MTEzQ0dFBXV0dd+7cERhuDw8PxsbGsLW15e233yYtLY3Ozk6mp6fx8/MTDTiA\nlpYWDA0N0Wg0ZGdnc+LECRISEqirq+OLL74gJCSEwMBAUWabm5sL77/+2CGVSnnnnXf49ttvRa+i\nrKyM8PBwZs+ejVQqFZWKnlykpxTrI+4jIyOJj48X4af+/v7iXC2VSvH29n70yzAwYGJigoCAAJYt\nW0ZlZSW3bt1CoVAQFhZGZGQkzs7OODg4YG5uTmVlJfPmzaO1tZWCggLhKLWysqKqqoqCggKysrI4\ne/Ys1tbWLF++HI1GI5p2L774Ip2dnTz11FNMTExgZmZGTEwMJSUlxMfHi43TysoKJycnPv74Y5Gg\ntHbtWlJTU+nt7aWqqoqQkBDS09NFQtH8+fMJCAggPT0diURCaWkpHh4eourJysrC1dUVb29vOjo6\nRLWkz/rUj4izs7PF+3z37l02bdrE9evXCQ8P5/HHHyc5OZnp6WmOHz+OQqHgypUrwCMV66pVq4T+\nv76+ntmzZ1NWVkZAQADXr1+nsLBQbLw5OTmkpKQIrkJ3dzetra28/vrrIpNEL0LSsxDgkaciJSUF\nCwsLjI2NWbNmDd9//z2urq48+eSTlJeXEx4ejoGBAf7+/oyMjPD2228zMTFBTk4Os2fPRi6XM2vW\nLCFxhkcPtLt373L69Glu3rwpcIJDQ0OYm5szNTXFCy+8IARZet6GHsMvl8uZmJjA09OT8PBwAQT6\nKev/i+PDauDQPz4+BDz2332DWq3mxo0bIqC0qKiI+/fvY21tTUtLC3v37qWkpESgvPWzfmNjYxQK\nBc8//zyTk5MEBARw//59USoNDAwwOjpKZGQkJiYmSCQSCgoKuHv3Lv7+/hQXF4uyt6KigocPH7Jk\nyRIyMjJoamoSuoZdu3aRkpIimmH5+flkZGSIGbKVlRV79+7F1NRUhMC0t7dTWVlJVFQUn3zyCR0d\nHfj4+JCXl0dhYSERERG0tLRQW1srgkVsbGwwNDQU4S1PPvkkMpmMU6dOcejQIcbHx0Uehbu7O+vW\nrcPW1pZZs2bR3d3N5cuXmT17NufOnWPWrFniPYuMjESj0fDDDz9QVVVFX18fVlZWhIWF8Zvf/Eao\nLPW+Dz2OvKKigr1796JQKEhNTWVqaorOzk6Rjfjw4UNMTEz43e9+h42NDV9//TXBwcEiFt3Pz49t\n27YRGhoqjjmDg4O4ubmxdu1ali1bxv79+5HJZKLMV6lUHD16VPAG9VWjg4MDNjY2SCQSli1bhq+v\nr/CElJeXCyp3UFAQf/jDH4iJiWFkZEQYu/T5nytWrMDe3p7169fj7+9PU1MTKSkp+Pj4oFQqKS0t\nJTQ0lIaGBs6dO0doaCiff/45ISEhXLp0iZycHEEBA3jnnXdYt26duG5bWlpYv3491dXVhIaG8sIL\nLxATE4NWqxWTljlz5oh8j5qaGmpqasjNzSUhIYEzZ84Aj/iiERERzJo1Cw8PD1avXs2JEye4fv26\nmIyYmZkxd+5c9uzZw/nz54WmxcfHh5MnTwqnaFRUFEFBQT/5Bv6fbgo6IFMikRRJJJJf/uNrzjqd\nrvMfH3cBzv+3b5RIJL+USCSFEomkUK8Ca2xsFDd3eHg4U1NTPP3003z55ZesXr2aQ4cOCRtrXV0d\n33zzDSUlJczMzDA9PU1dXR2PP/646Ak8fPhQoMI2b95Mamoqn3/+OTKZjDVr1mBgYIBGo0GpVLJq\n1Sp6enq4f/8+aWlpWFhYiMZPS0uLIAwBQm5qamqKoaEhMzMzBAUFsX37dnF00acRabVaAgICqKmp\nQSqV8uyzz/4TgSgoKEhsNhMTE/T19ZGQkMDt27cpKSkhPT2dZ555hqioKDo6OmhrayMpKYnw8HDy\n8/M5cuQIf/nLXxgcHGR0dJT6+npCQ0NRq9VYWVmRmprK6OgoxsbG2NvbMzw8zObNmzl79iz29vY0\nNjaiVCqprKykoqKCtrY2li5dSkNDgxDDFBcXY2NjQ3h4OIGBgUxNTYnph1qtpqKigoMHD+Ls7ExN\nTQ1Xr16luLiYlStX0tnZye7duzl9+jRGRkaYm5uLmTzA3LlzheQYICgoiFmzZmFvb4+Hhwf79u3j\n888/509/+hP5+fmcO3cOpVJJTk4OGRkZqFQq4eDs7e39J0t7U1OTCGpZsWIF4+PjNDc34+zsjLW1\nNadPnxYErObmZpycnNixYwe1tbU0NDTw+OOPs3z5cjFRGBwc5M033yQ+Pp6EhAQAuru7iYmJYcmS\nJbi5udHR0cGNGzc4cOAAr7zyCg8ePOCjjz7CwMAADw8Pnn/+eRYvXixyK42MjMjPz6empobp6Wlx\n3NmzZ48wgYWGhnLs2DHhxdCPW/VM0/T0dP7jP/6DoKAgOjs7OX78uDje6P0PevflT1n/001hrk6n\niwKWAjslEknKf/6Puket4v8rVUmn0+3X6XRxOp0uzsDAgJKSEtzc3PDw8MDT0xM/Pz8RO3/kyBEm\nJiaQSCR88MEH2NjY0NPTw4YNGxgYGKCpqYm2tjbS0tI4deoUCoUCgPnz51NdXY2FhQU1NTWEhITw\nxBNPYGxsTGdnJwqFQuQV6hN3TExMhFtwYGCA1NRUkpKSxBMUEEYXPVK+pKSEhoYGvLy8RMd3yZIl\nqFQqcnJykEqlbNmyBY1Gg1arpa+vj7a2Njw9Pens7BQ9hZGREUJDQ5menqa1tZWoqChaWlrIzs6m\npaVFTE/a2tqoqKhAo9FgZ2eHRCIR4bhjY2PodDpUKhUJCQncuXNHhNH29/czMjLC+++//09p0vpx\nr0qloqioiL/97W8sWrQIX19fFAoFL7/8Mg8ePMDf35/6+npcXFwYGhrC1NSUixcvkpycjEKhEPoR\nPaJMo9HwxRdfYG1tTXx8PNu3bycoKAhzc3N+97vfCXny9PQ0zz77LPBoLOvh4SEamY2NjXh5VEcQ\nywAAIABJREFUeYnyffXq1QQEBGBpaYlcLsfX1xcnJydaW1sFi+HVV18VAFl9Wti1a9fIzs6moaGB\nBQsWkJuby7p162htbcXS0hIjIyPi4uJwc3Nj3rx5KBQKBgcHUavVmJubI5fLWbVqFWfOnBFjUXg0\n9j5y5IjgZ4SEhHDnzh0++eQTKioqyM3NJS0tTVS7bm5u+Pr6iu/XYwVNTU1FoC488vysXbuWjRs3\nUl9fz5tvPjqF+/v78/DhQ/z9/ent7WXHjh388pe/FGCburo68vPzkUgkDA0NoVQq2bt3r7jGfsr6\nH20KOp2u/R9/9gA/AglAt0QicQX4x589/+2LMDDggw8+QKPRkJiYSE9PDx4eHiKw09PTE7Vazfz5\n8xkbG8PPz4/09HQuXrzI9PS0iOret28fS5cuFQ3B7OxsqqurRV/h/Pnz+Pn5iWDUuro6kdZ7584d\nenp6hFlqeHiYiooKlEolb7/9Nv7+/kKFmJeXh6GhIcnJyXR1dSGVSkWmn42NDevWrSMlJYXk5GRC\nQ0Nxdnamra0NqVQqJKlKpZKioiIBCoVHFci1a9eE7PnQoUNMTk5y5swZLCwsaGhowN/fH0NDQzFK\nbWhoICIiArVaja2tLZOTk6IR19vbS1xcHIWFhbz00kuMj4/j4uKCt7c3DQ0N+Pn5UVNTI1KkLS0t\niYiIENWRXqV369YtgoODuXfvHvb29mRnZ4t4uiVLlgjOop56PTMzg62tLadOnWL27Nn4+fmxfPly\nqqurMTExobGxkYaGBvbs2SOIQl9++SXwCLZbVVXFhx9+iFwu57e//S21tbWsWrWK1NRUkpOTSUhI\noLOzk46ODioqKjh58iTm5ub09vaK/MewsDA0Go04S/v7+zNnzhw2b96Mq6srsbGxODo6IpFIRDDt\n7du3qa+vx9XVlUWLFjEwMCASrGtra3n48CGOjo6YmJgIrqa3tzcODg58/PHHPPHEE7S3t7Njxw6O\nHDnCypUrefHFF8nNzcXNzY3ly5fT29vLgQMHGBkZoaGhAVdXV6ysrIiOjiYtLQ2NRgOAVCpl3bp1\nzMzMoNPpaG5uxtbWlg8//JDVq1fT1NQkKpPCwkICAwMpKSnBwMBApFG/8sorREdHExQUhIHBT7/V\n/x9vChKJxFwikVjqPwYWARXAeWDrP/63rcC5/+5n2djYcP36dSYmJvj9738votX1MtuWlhYqKiqw\ns7Nj7ty5IsJLrzm/efMmtbW1PPvss9y4cUM0m5ydnYVNWC+iaWhoYN26dYSGhqJUKhkZGcHCwoL0\n9HRee+01YmJiyMvLw8HBgdDQUEHEHRgYEN6H+vp6HB0daW1tJSgoiISEBDZu3EhLSwuFhYV8+OGH\n7Nu3j6NHjwrqsaOjI3V1dSgUClEm6nQ6cQ6GR5kHMpmMkpISUlNTCQsLIzAwkLS0NJRKJa6urvT1\n9dHa2kpcXByRkZGsWbOGnp4eFAoFR48eFZr46OhoWlpahFLvvffew9XVlZmZGZ588knS09PZvXu3\niOn7+OOPaW1tJT8/n+DgYFJTU1EoFISHh+Ps7MzDhw9FOa7VaomNjSU9PZ3e3l4yMzOxsbGhv78f\nJycnbGxs0Gg03L1795/yOszMzEhMTKSlpYW4uDhSUlIoKiqioKBAgEX1Umx9TPzhw4cZHR3l0qVL\nODk5MTw8zEcffYRarcbOzo7R0VHUajU9PT34+fmJiHe1Wi3CdeBR1XfmzBneffddBgcHKSsro66u\nDm9vb4G3KysrE+PY+/fv89RTT+Hq6kpPTw/37t0Tx8qLFy+SkZEBPKps9I1yKysrAgMD+eMf/4i9\nvT3BwcH88Y9/xMzMjDlz5lBYWMiXX37JyMiIqEBOnz5NU1MTPT09mJiYiDI/LS2NxsZGBgcHKSgo\nICMjg2+++Yb169dz7do1goODuXr1KoaGhtTX1wsfDjzKtpTL5WRlZaFQKDA2Nhamu5+y/ieVgjNw\nTyKRlAH5wCWdTncV+DOQLpFI6oGF//j8v1zDw8MsWrRIkJz1FmdLS0tkMhlr167FwcGBPXv2YG5u\njpWVFVNTU2i1WsHpk0qlHD58mKCgIDHrLS8vRyaTCTXYoUOHaGxspL+/n76+PpycnDh27Bh+fn7U\n1taKZlRDQwMzMzOiM29ra4uxsbGYEjz//PP88MMPYpTZ29vL2NgYnp6eWFlZCXT8ypUr2bt3L4sW\nLeLcuXMMDg5iYGCAq6srvr6+aLVazM3NRQCIu7s7zs7OaDQaVCqVCGw1MDAgIiKCGzduUFtbi4+P\nj4CzaLVawsLCqK2tZc6cOULdNn/+fIKCgoRTTi/+0otnrly5wuzZs4mLiyM4OJi1a9fS09ODUqnE\nxsaGEydOiESugoIC5s6dyx/+8AfMzMxEg+/ixYtoNBqMjIxQKpXY2toilUopKyvD0tKSd999F5lM\nxvbt23n77bepqKgQ6s3w8HAaGhr+ycsAsGvXLh4+fEhERATV1dWkpqbi6+tLb28vp06d4v333xeB\nP1VVVXR3d4vXkJOTg5ubG6tXr8bDw4OioiLS0tIABLHKw8OD7OxsIiIiqKur4/vvv6e0tJSHDx+y\naNEiJiYmuH//Pi4uLiiVSvbt20d9fT1z587lyJEj3Lp1i8DAQHEtmJubExoayq9//WsKCgro6elh\n3bp12NjY8MILL/DgwQOWLFmCnZ2d+FhP6yotLRXS+k2bNvHgwQMRNqzXSOgjDjUaDRYWFkIsVllZ\nyfj4uIhStLS0JDExEWtra5GipU/ebmtrE9XoT1n/S3P+3/W/6/8/61+H5qxQKNi0aZMIDdXjzW7f\nvo1CoUCj0eDs7CwSf69fv87atWuprq6mv7+furo6rKysiIuLo6mpicTEROLi4njrrbfYsmULDQ0N\nIh3YxMQEuVzO9PS0KN31TcL6+nqSk5O5f/8+Xl5eKBQKHjx4gIeHB0qlktTUVCIjI/nuu+8ICwvj\n7Nmz4ulSWFgopiDr16+ntLRUQGPq6upEGhAgnHTnzp3DxcWFhIQEUlNT2b59O4GBgQQGBjI9PS18\nEDqdjqNHj4rMyby8PAwMDAQN2svLi2PHjhEUFERUVBT5+fncv3+frVu30tPTI3Ikdu/ezeLFi8XX\nNmzYwLVr14iLi6OgoEDgu5544gmRu6HRaDh8+DAJCQk4OTmxZMkSDh06xLJly8jJyUEulxMcHIyz\nszMdHR2cPXuWDRs2YGtri4WFBcePHyc+Ph541KkPDAwUFYKLiwuAOBuvW7eO06dPU19fT19fH7Gx\nsRQUFKDT6cjIyOC9995jenpa/Lv1wrXBwUHKy8vx8vKira2NqakpAGJjY2lubmbXrl1UVVWRlZUl\niNdtbW3IZDLS0tIEm3FwcFD4X/Sy608//ZRNmzbh5+dHfX094eHhpKWlUVNTwzPPPMOtW7eEOWx0\ndJT29nbmzp0rLPd6gExMTAydnZ0ipEdPAWtvbxcVmZWVFa2trbz44ovs2rULe3t7cfTs7u4mMTGR\nK1euCOetXs+jp4Tpx7SGhoYolUpMTU2RSqXMzMzQ1NQkxp3/3fpZyJz1F3hgYCBGRkYkJyeTl5cn\nzo16rt6cOXM4c+YMcrlcILLgkeBFX2IaGxuLtKGAgAD27t1LbW0tlpaWLFq0CK1WS0lJCR4eHnh7\ne4uIub/+9a88/vjj1NfXC2NLdXU1d+7c4e7du7S0tAh4i729vZCZOjg4cOfOHaytrXFwcMDR0VHE\nuS9atEhwCtva2liwYAELFiwQkJXo6GgkEgl5eXkAbNu2jZCQEJRKJX19fezbt48LFy6g0+nw9PTk\n7t27vP/++1y9epX8/HxaWlooLi4mLy8Pd3d38vLy6OjoEMnT+fn5HDp0SLxeAwMD5HI5FhYWYkau\nd9DV1dURGxvL448/zunTp7G0tKSlpYXe3l5efvll7OzshKvPx8eH06dP09fXJ742MTFBcHAwJiYm\nVFVVUVtbyx//+EcSExMJDg7mq6++oqioiCNHjtDb28ucOXOYmpoSKVV6Sbre+NXX18ehQ4fw9vZm\n5cqVbNu2jZKSEr766itsbW358ssvycvL4/r16xQXF5Oeno6rq6tIEGtra6Oqqkp03f/+97/T1dVF\ncXGxGBeXlZVx6NAhDh8+zIkTJ8TP++CDD2hsbGTr1q1Chu7g4CCCb3/1q1+Jclyr1RIZGYmfnx9a\nrRZHR0eqqqqYM2cOKpUKjUYjQoz0oTXwqKlsZ2fHrFmzSExMxMDAgNbWVjGelkql2NraCuetj48P\nra2tKBQKfHx8sLa2xsjICKlUilqtZsWKFdTU1FBSUsLo6KiAAOXk5DAyMiL4DT9l/Sys0x9//PG7\nycnJDA4OEhwcLJSIjo6O5OTkEBkZia2tLdnZ2ZiYmBAfH4+DgwMajYaRkRHGx8fRarU4ODgQHx/P\n999/L1Rv4eHhwqTz6aefCk9+dHQ0bm5ulJeXi5v7wIEDTE1NkZycLLIl5HK5SPTt6+sTYbA6nY7N\nmzdz+fJl5s2bR39/PwEBAUxNTSGVSoW4Zc6cOdy7d09Ey09MTBAbG0tHRwdBQUGYmpoyPDwsmkde\nXl5otVokEolgHRgZGeHo6EhRURHPPfecCFK1sLDA0dFR5CPooaFqtRp3d3dmz54tJLRnzpzhhRde\nEBmaBgYGREVF4e3tLXIZVSoV7e3tTE1NCXtyVlYWk5OTQuBkZmZGYWGhuDj1CLK8vDxsbW0ZGhoS\n6jt9k04ulxMSEiLSr6KioigoKCA0NBQLCwusra0ZHR0VlZOlpaUQZZmYmJCTk4NWq8Xe3p7w8HAa\nGxuFsU2j0dDa2sq+fftoa2vjP1vw58+fT0lJCcXFxcjlcuLj4wkKCuLevXts376dyclJWlpaWLFi\nBb6+vsIT8eGHH6LVaunt7WVgYABTU1OeffZZqqqqGBkZYdGiRZSVlZGXl8e2bdvIysoS9nQzMzNk\nMpkQr5WWltLT04Ovry/GxsYiILm2tpb29nZOnTrFhx9+KFSUSqWS27dv4+Xlxdy5cwkJCeHMmTM0\nNjZSWVkpwmb1EuzW1lbs7OyoqKhg3rx5oiE+NDSEWq3m6aefBh71KEpKSv51GI0ffPDBu/PmzSMl\nJYWKigpaW1tF11gmk4kRl5ubG9euXcPS0hJPT0+cnZ3p6urC1tb2n44CarWaK1eusHfvXoHGNjMz\nIy0tjXXr1glc2eHDh/H29mbr1q2o1WpcXV1Zt24dXV1d9Pf3Y2VlJXbu6elpkbT8xhtvMDIy8k8G\nI73opLe3Fx8fH+RyOV1dXWi1WoqKioSSz8bGRqDfDQ0NGRwcxM7OjpMnTxIfHy/EQG5ublhYWLB0\n6VJ0Oh0SiYTMzEyBTAsJCaG/v5/KykqcnJxwcXFBLpejUCiwtbXFzs6Ouro6kVWg1xWoVCoGBwfx\n9vZmaGiI0tJSJiYmMDAwYGxsjM7OTpqamrC3t6e+vp5Vq1YRHx8vjgB6NmVLSwtdXV08/fTTnD9/\nnuDgYD755BOR6akXz9y4cQMPDw8GBgbo6uoiISFBpEzrBWeRkZE0Nzdz+fJl1q1bh5mZGe7u7sK9\namFhQXFxMZ2dncTGxuLn54eVlRVlZWW88MILuLm54ezsTGFhIUlJSZw6dYqXX36Z/Px8wsPDOXv2\nLB999BEdHR2C8KVvNhYXF2NiYiLCf/Qp0AqFAhMTEw4fPkxWVhYODg54eHgQHh7O6OgodXV1ImTX\n2tqasLAwjIyMGB4eRq1WM2/ePIKDgwVyrqioiLq6Oqqrq7GyssLNzY3GxkZ27tzJokWLcHBwwMrK\niqVLl/K3v/0NJycn3N3dOX/+PEFBQTg6Oorpgt5YVVpairGxMb6+voSEhFBRUYG9vT12dnb09PQw\nb948HBwcaG9vx9nZmatXr/6kTeFn0VPQPxX1O66RkREBAQF0d3fzySef8Nprr4kq4OTJk7S1tQkY\n5apVq5DJZDQ0NHDjxg3CwsIEk7+hoYHMzEwsLCyQSqU0NjaiR8mfOHGCkJAQQRK+ePEiV65cYWZm\nRtCFZDIZ5eXlvPLKKxgZGQk5ckVFBXK5nNzcXMbHx/H19aWoqIienh7s7OxobGxk0aJFjI+Pc+HC\nBTo6Oli6dCnOzs4YGBiIEaueh6BX882ZM4fKykri4+MFfy8/P5/Zs2eTlZXFunXr0Ol0ODo64uXl\nJdKuIiIicHV1xc3NjYMHDwqOhD5HUC86+vzzz4mJiWHLli0MDw+jUqlEMnJzczNTU1McPXoUHx8f\nUlNTsbGxwdTUFFNTU4yNjRkaGmLu3Lm88sor7Nq1iwsXLvDxxx+zbNkyysrKiI2NxdnZmb1799La\n2kp1dTWLFi1Co9Fw5MgREhISsLCwwMHBgaCgIOGC1F+48Khs1kvZvby8KC4upqqqipUrV2JkZERt\nba2A4FpaWvLVV1/R09PDkiVLSE9Px9vbm6SkJJHErG+knz17ltmzZ9PV1SXel7a2NpFANTg4SGho\nKIODg8jlcoqLi5k7dy6HDh2iv78fPz8/Lly4IPJC9dMHmUzGgwcPuHDhAitXrkStVnPp0iWGhoaw\ns7MjNDSU5uZm5s+fz7Vr19i5cydGRka4ublhampKZ2cnrq6uIgxGf5xdvHgxnZ2dJCUlUVJSIkaL\nrq6uQkeTnJzM5OQk09PTVFZWiowRmUxGaGgoHR0dTE1NMTo6KuTeP2X9LHoKRkZGdHV1CUlqdHQ0\ng4ODdHV18dlnnxEeHi6aUt3d3RgaGtLa2sr4+Dh9fX188803BAQEiHDTsLAwAL766ivs7e1RqVRE\nRkbS19fH0NAQx44dE0CQsLAwdu/ejbu7OykpKcL0UlFRIYRB+/fvp6SkRJz9H3vsMaytrUlOTiY2\nNhYTExMcHBxwd3fn5ZdfFjCPmpoafH19WbVqFT4+PgwPD1NbW0twcLCoJvSbDzziCOqTgfz8/HBz\ncyM6OpqOjg5x8ba1tVFXV8f58+dxcnLiscceE+fKnJwc4uPjycjIoKioSNCAdu7cyfr163nyyScJ\nDg7m66+/pry8HKVSye7du3nrrbfExmxpaUlmZiYvvfSSaOIuXryYvr4+oqOjKS4uprm5meDgYAGN\nzc/Pp6enh9u3b5OYmChMSFqtlsrKSu7fv4+JiQnV1dW0t7cLs5EeV3f79m18fX0BxEhYP0abnJzE\n1dVV8CN37NiBRqNh9erVtLW1oVKpUKlUVFRUUF5ejq2trSAkW1tbi5DcsbExLCwsOHLkCM7OziQl\nJTE1NcXy5csJCQmht7eX2tpaHB0dmZiYwMnJiebmZuFT0Ol0LF68WIyc9dfjzMwMKSkphIeHc+LE\nCaanp9myZYsIKNLpdKxYsYK5c+cSFBSEt7c3RkZGAtg7NjZGSUkJExMT3Lt3T4zTAwIChMq2s7OT\n+vp6pqam6Ovro6Ojg+HhYVHR5uXlUVFRwd27d5mcnMTY2BiJREJUVBRGRkZYWlrS39//k+/Hn82m\nIJVKCQgIYHh4mNzcXDQaDU8//TQJCQl4eXlhaGhIT08P165d4/z582KnzMnJEb/g8fFxnJychLTV\n19dXbBAjIyNMTU3xzjvvcP36dXx9fTE3N8fU1FTw8kJDQ4WzbOvWrSQlJVFXVyc6/PpN68cffyQw\nMBC1Wo1CoRApTWlpaQwPD5OSkoKvry8rVqygp6eHyspKlEolBgYGTE9Pc+rUKXp7e4XwSt8t7+3t\nRavVcvfuXVQqFbdu3aKyspKmpiZMTEz4P+y9aViUV7r++ytmirFknotinkRABVEccUBFcdZoYhIT\nTWLSbTpJd6addNLpTtLpTNuMRvN3R6NJjLMoRhRBkBlEBpF5nimKuSiGOh/Iu073Oef6b88+X7Kv\ns9cXseAqiqp3rXet57nv393R0SE6MUNDQxQUFAh1ZU1NDbm5uUKum5SURFxcHHq9nl27dnH//n3i\n4uKoq6tDoVAwa9YsTExMiIiIwNTUlN7eXsbHx7GwsMDBwUH4LaS7mZGREWlpaZiamvLRRx/x2Wef\nce/ePXHBFRYWsmrVKhoaGigvL6e/vx+5XI67uzuXLl1i/fr1PPTQQ3h6egqxk5T2JZ2TYfrsq9fr\n8fPzw9jYmL/97W889thjwpcxPj5Ob28vBQUF7N+/n+eff56FCxfi6urK+Pg4n3zyCRYWFmi1WmGq\ngunIeOk4ERoaSnp6OnPnzmVwcFDoTAYGBvjuu+8oLS3l4MGD5OTkcPfuXb788kthMBodHeXIkSMc\nOXIEgNLSUoaHhwkNDeUvf/kLfn5+5OXl8eWXX9LS0iKCa6TXkJ6ejkKhoKSkhKtXr6LVarGxseHC\nhQtER0eLAqaE3ZMmdWtrK/fu3cPLy0sAYk1NTcnKyhKmPm9vbwYHB8nMzKSyspJDhw6h0WhQqVT/\nAg76z8Zv4vhgbm5OdHS0yEUICwvj22+/RS6X09zcjEwmQ61Wo1Ao8PT0pK2tjbS0NF544QXkcjmt\nra3U1dUxb9487t69y9atW/n888+pqqriueeeo6GhgVOnTpGZmcnq1atxdXVFLpeTl5eHj48PhoaG\nbNu2jTNnztDe3o6VlZUw/kh5EG5ubqKSvWvXLkpLS4UKTyJJq9Vqent7BQdROnvOnTuX4eFhBgcH\nUSqV5OXl4erqirW1NU5OTiIYdXh4WBTiLC0t2bNnDxqNhh9++IErV64QGBjIunXrKC4uZsGCBYSF\nhYli69NPP01QUBBfffUV7u7u1NTUYGFhISLlJNKPqakps2fPxt3dncrKSm7dusWbb77Jyy+/jFwu\nJygoiBdffJGmpiZqamrIzMzExsaGWbNmERsby6VLl3B3d8fY2BgvLy/s7Ow4ffo0Tz75JHZ2dpw5\nc4aLFy/i5+fH6tWr+fzzz4mMjGT+/Pnk5uaKQBuNRoNOp2NsbIzk5GQOHDgATNvoJTKWlNvR2dlJ\nWFgYo6OjfPrpp0RFReHs7ExhYSExMTFiiy3Ztg8fPiyKvZKSz8TEBCsrK5YtW0ZOTg5TU1N88803\n9PT0sGrVKvbu3UtJSQkxMTH4+/sTExPD7du3Wb16NUqlkra2NqamphgbGyM1NZVdu3aRl5fH/Pnz\nyc7OZv/+/RQXF3P+/HlOnjzJtWvXhKgoMDAQQ0ND9Ho98fHxXLhwgdDQUBQKBSqViqqqKj788ENG\nRkYoLy8Xc6KrqwsLCwsWLlzIyMgIrq6udHZ20tvby+LFi4VNvqmpiW+//Zbf/e53wkFbWVmJVqvl\nwoULolb1oOM3sVOQqsBSOItarWbbtm1cu3YNQ0NDEYzS29tLRUUFubm5qNVqgctqb2/n5s2b1NfX\nM2fOHDIyMgDYsGEDd+/eJT09HTs7O2JjY+nt7cXU1JSzZ88SHh6OVqsVfAMbGxucnZ1pbGzk2LFj\nGBoaijtdU1OTeGNbWlqYmJigu7tbIMhcXV0pKCggLS2N2tpaBgcHkcvl7NixAysrKyoqKtDr9eJI\nIJPJyMrKoqKiQuxAHB0dcXJywtvbG1tbWzo6OmhsbMTa2hpDQ0NsbGyws7PDwcFBJD9JMWopKSmC\nY+Dj4yO0CGq1WmDrFAoFU1NTuLi4YGtrS0REBCqVitzcXPR6vUiGys3N5e7du4yMjPCnP/2J7u5u\nrK2thd1aSi5as2YNOp2Ob7/9luPHj6NWq6mpqaG3t1eoLEdGRggMDKSwsJCIiAiampoE9am/v58l\nS5ag0Wi4fv06AG1tbYJQLXkDGhoaRFTb008/jVKpxMLCAktLS4HC9/b2xsrKSjAPJQRfdXU1MM08\nvHnzJoaGhsyYMYOIiAgB0hkdHRXX3/379zl69KjwUMTHx7Ny5Uo+++wzjI2NcXFx4Y9//KPIqPT3\n98fFxYXs7Gy6urp44YUXKC0tRafT8frrr9PR0UFubi6FhYX4+/tTUFBAaGgog4ODaDQazMzMRMvy\n22+/FXUryacSHBxMV1cXy5Ytw8PDg4aGBmQymeCO3L9/n6tXr+Li4oKFhQVLly4Vhdj+/n7Mzc1p\naGgQr/dBxm9iURgaGkKlUjFr1iy+//57rKysGBoaws7Ojps3bzI8PEx/fz/BwcHi7KlUKrGysiI7\nO5vCwkL27t1Le3s7RUVFQira2dlJY2MjiYmJWFlZYWlpSWBgICEhISxYsIC2tjbs7e2pqKiguLhY\nbP/7+/tFz72trY2kpCSOHz8uAkBqa2vp6urC0tKS0dFR1qxZQ01Njfj5wMBAgoODsbOzE2xHpVLJ\n2rVrGR0dZcaMGbi6urJu3Tp8fHwE/2FkZISwsDBsbW3R6XTCax8SEsKcOXNEwXTNmjVYWloyOTlJ\nSUkJN27cYMeOHbS1taHRaDAwMBC5DWvXriUgIIDJyUmys7MxNjbm2rVrpKWlUVRUhKWlJRYWFgQE\nBPDGG28QGRmJXC7n5s2bGBsbc/PmTbq7u7l69SrXr19nYGCA1tZWbGxsuHz5Mnfu3OHDDz/EyWna\nIb9y5Uq++OIL3N3dhWhowYIFLFy4kLy8PJEcnpGRwWeffUZjYyMLFy4UactdXV0sWLAArVaLra0t\ndXV1As8mSeBbWlpIT08nMzNTSN0VCgVqtVrUakZHR3FwcBDHktmzZ7No0SLy8vLQaDTIZDJRu6mr\nqyM/Px9ra2vu3btHSkoK9+7dE4I5iWE5OTkpTHNS3//69etER0eLGEAJ+W9kZMTjjz8uvCxhYWF8\n8sknWFpaCuNaR0cHer2ezs5O/vGPf4ggZUCQq4OCgrC3t0ej0VBcXExXVxdyuRy9Xs/ExATx8fG8\n9NJL7Nq1i6GhIXp6esjNzRXX5+DgIAEBAdjZ2T3wfPxNLArW1tZiKx4ZGSm21ytXrmT37t1kZmYK\nlJezszOPP/44ExMT5Obm0t7eTnt7O/39/ezZswe9Xi+KNZaWluh0Ojo7O2lrayM/Px8LCwtSUlJ4\n/vnnsbS0ZPPmzXR3d5Obm8vBgwcFks3S0pKBgQHRZdi9e7f4wObNmydaVHV1dRgaGmIKWhvOAAAg\nAElEQVRubs758+fZtWsXarVaGJNqamrQaDTExcVRX1+PVqtl+/btqFQqWlpahKINEMEq4+PjeHp6\nCkHO5s2bRXrQzz//zNtvv01BQQFTU1MsWLCAmzdvcvv2bbFQdnd309PTw8KFC1EoFFRWVnL79m3S\n09O5d+8e+/fvF65PpVKJh4eHEEsdPnyY+Ph4tm7dSmdnpzgyHDx4kKGhIebOncsLL7zA1NQUy5cv\np6CggG3btjF79mxaW1vx9fUlODiY6Oho3nzzTT766CNu3LjBpUuXkMvlrFq1ir/+9a/is5WOUTdu\n3AAgKCiI8+fP88ILL2Btbc3o6ChBQUG0trby8MMP89Zbb9Ha2srixYtFwpKHhwcff/wx9fX1DA4O\nYmtrS1VVFcbGxqKmZGRkxODgIDt37sTQ0JC//vWvoq0tZTqoVCrWrVuHjY0N69at4+GHH+bxxx9n\n9uzZ3LlzRwjCTExMuHDhAgBbt25laGgIa2tr4Rjt7e1ldHQUIyMjWltbRaK5l5cXarWajIwMWltb\nBVKwra2N999/H6VSKTwgQ0NDGBgYkJmZyeXLl0lPT0etVuPq6kpaWhpHjx7l1KlT2NjYEBERwdTU\nFKampnz44Yd0dHSg1Wrp7e0V+RxS3epBxm9iURgYGKCyspLy8nKRnish1qWtoJmZGZcuXSIvL08Y\nX1Qqlfgw7e3tOXv2rECEw/ROISYmhsnJScbHx5k5cyYAxcXFeHt7k5SUhI+PD729vTQ1NQmOo0aj\nwc3NTfAN/P39afg1sh7g0qVLVFVV0dHRQU1NDa2trYyMjGBvb09ERAT+/v4YGBhgZGREd3e3qJFI\njsLa2lrq6+tpbGwkKiqKkpISAHp6emhoaKCyslIUJSVITHNzs4iMNzc3p7q6moCAAPR6PXv27OHZ\nZ5/F2dmZ5ORkiouLBSBGkm77+vqyZ88eIiIi+OijjygoKMDX1xcXFxe6u7tpb2/H2NhYQEClSL3M\nzExWrlzJ9u3bmZiYoKCggBdffJGWlhaKiop45plncHBwYPXq1bz88svExsaiVCpxcXHB0dFRxKZL\n2387Ozveeecd6urqkMvlTE5OCnMXTEN23d3duXbtmiAndXR04ODgQGZmJrNnzyYkJAQTExOhpfD3\n9yc+Pl4wCS0sLPD09BSfDUB5eTkRERGkp6fj4eEh5NmSg/SRRx4hLCyMDz/8kJiYGORyOU5OTqSm\npjIwMEBMTAzl5eXs2bMHZ2dnMcmkzovUMpR+d2lpKbW1tVhaWjJ37lwmJibw8vIiNjaWgYEBwUQo\nKysTn5WxsbEQvdna2opIuKVLlyKTyQQ81tHREY1Gg729Pb6+vri5uQnnqY+PD/X19WRmZuLi4sKc\nOXPIzs4mOzv7gefjb2JRkIpfM2fOZHR0lPj4eLy9vTExMaG7u1vEpEnhr1KcuYQPi46OpqKiAisr\nK/Lz8zl27Bjwf3rzm5ubBZKstbWV1tZWwsPDGRsbEzqFnTt34urqysyZM9m1axf29vYMDw9z5MgR\n0co8ceIEAOHh4Tg7O9PS0sLGjRu5ffs2LS0tPP7444LReP78eezt7Zk9ezaDg4NCPanRaGhoaBD2\n47KyMoKCgoDpHZOJiQl79+7F0dGRW7duiaLY2NgYRUVFBAYGYmNjw+OPPy7Ow2FhYSKoxdfXV8i9\nBwYG2L17N+np6cTGxtLa2ioEWNXV1YyMjJCSkoKFhQW+vr7MnTuXFStWkJGRQU5ODsuWLROhsomJ\niaxYsYKBgQHee+89TE1NiYyMJCUlhYKCAgGHrampERZwW1tb1q5dy5IlS5iYmKChoYGYmBjxWsvK\nyujp6WFwcBCVSgVMW5xNTEyoqqrCwcGBpUuXipyFPXv2MGfOHBF5LyHcLS0tcXV1pa6uDq1Wy82b\nN9FqtVRXV/PVV18BYGZmJnZuFRUVzJ07F61WS2xsLLa2tty5c4eBgQHB1rSysqKsrEwsTBIz4w9/\n+APff/+92N1pNBpMTU2prKzE2tqa48eP89JLL4mdRWFhIWZmZuTl5REQEMDo6Ci+vr7U19eLo6JW\nq+XWrVsiARwQNxopXUuS3RsaGmJhYcHg4CAymUwwOBMTE9HpdPz+979n586dgnHh7e2NoaEha9eu\nfeD5+JvoPkge+JycHLy9vWloaODq1auMjIwwPj4uMhM3btzIL7/8wvDwMIsXLyY3N5fe3l4ByoiI\niMDOzk4AJQ4dOsSmTZt44oknxDkvJSWF999/n+HhYeRyOYcPH8ba2prbt2/T1NQkKrUDAwPI5XK2\nb9/OlStXqKio4LvvvmP9+vVUVlbi5+eHra0t3d3dLFu2jBkzZpCRkcHg4CCXL18mIyMDe3t7IiMj\nRdqUFMhaVVWFk5MT33zzDatWrRJ5h+Pj46SlpdHQ0EB4eDgLFy5Eq9Vy/PhxpqamiIiIwNjYmMWL\nF6NSqbh8+TIFBQWYm5uTmZlJSEgIUVFR4rmUSiUvvvgir732Gmq1mnv37hEREcHu3btFzaG5uZmu\nri6Gh4dZvnw5V65coba2lvDwcFJTU3nvvffEXTMzM5NHH32Uu3fvMj4+zo0bN+ju7iY8PJypqSkW\nLlzI73//exobG3Fzc2Pr1q3ExMSgUChwcHDAyMiIy5cv87e//U0ExG7dupXJyUnRMnNzcxOim8nJ\nSe7cuUNPTw8BAQEsWbKEK1eu8Oqrr/LQQw+hUqkYHBykvr6enJwcgoODRbyegYGB6NC8/vrr2Nra\nUlRUJLw0bm5uXL58mS+//BKNRsOcOXNoa2vDyMiIefPm4eXlxdDQEKtWrUKhUPDnP/+Z6Oho/P39\nGRwcFIU7a2tr7OzsmJqa4vjx40RERGBkZERoaCg//PADa9asISMjAxMTEwwNDdHpdIyMjHDz5k2e\neOIJQkJCmD17NsbGxgKKA9NF8r6+Pqqrq6mrqyMpKYnt27eTn59PWloacXFxoqCZmJjI3//+d5Yu\nXcrNmzcxNTUlKCiI+fPnc+fOHYyNjQkICHjg+fib2SlIZKL8/HwcHR0ZGBhgcnISvV4vQmNLS0sZ\nGhoiKChIhHjEx8dTXFxMUlISw8PDODg4sGPHDgDBAjh27BinT5+ms7OTpKQkEdGdkZFBdXU1VVVV\nLFiwACsrK7q6uggLC+PevXvMnz+fTz75hNDQUBYvXixah0uWLBGEaAm/dunSJWQyGRcvXhQFPImB\nqNPpGBoaYt68eSxatEiYWTZu3Ehvby/R0dHAdK7k3r17iYiIoKOjQ+jpAwMDiY+PF9t9vV5PU1MT\nCxcu5LHHHmNoaAidTkdHRwdVVVUYGRlhampKZmYmr732Gunp6SJSLzo6WsBL6+rqiIuLIyYmhtra\nWtLT01myZAlxcXF4enqK9uQ777xDS0sLzzzzDC4uLgKUGhsbS3h4OOHh4YSEhJCdnU13d7co5kZG\nRnLr1i1OnTqFo6OjCO5xdHQUi2ZpaSkdHR2iXuPh4SHwclIt6MaNG6hUKjw9PVEqlWJ7r9PpRAy7\nlC2xcuVKvL29hTxcCmyVCs1mZmbY2dmRl5fH9u3baWtrIzExkbVr16JSqXB1daW0tJSsrCyysrJE\n8vmaNWt48803CQgIEDxImOaAKpVKbt++TWJiIikpKfj6+pKamsrmzZsJDAzEx8eHuXPnihTzqakp\n4uPjycjIQKfTiSxMT09Pzp07B0wfUbu6usjLyxOtVqk1PTIyImTtjz76KP7+/qxYsYLx8XESEhJw\nc3Nj1apVlJWVERoaipWVFXfu3Hng+fibWBT6+/u5cuUKSqWSxMREent7Wb16NXK5nO7ubmxsbLh6\n9argAS5ZsoTw8HAuXryIiYkJ3t7ewh1WXV2NWq0Gph13tra2REVFMWvWLCEY6uvrE5VihULBX/7y\nF+RyObt27UImkwnFWEFBAWvXrhVYr/b2aR6tFCKj0+m4c+cO586dE3Hs8fHxLF++nP379zNz5kwi\nIiLw8/Pj3r175OXlcfnyZUEp6uzsFHgymN4xDQ8PU1RUJIqG1tbWTE1NUVRUxP/6X/+LEydOMDo6\nilqtpr+/H5lMxtNPP81bb70l/t558+bR09ODsbExMpmMJUuWUFFRweLFi7l+/Tq//PIL9vb2Io/i\n4MGDREVFMX/+fGEzl7wFPT09yGQy1q1bh1qt5tixY6KwVV5eLnZVX331FfHx8QwPDzM8PMytW7cw\nMzNj0aJF6PV6dDodpaWlwhz15z//GZVKRWJiIlu2bBFn3s8//xw7OzuGh4epr6/n3r17mJiY0NXV\nRVdXF5cvX2bNmjUEBATQ2NjI4OAgK1asIDY2FmdnZxFlX1ZWxsqVK8U1ZmBgIMjQhoaGTE1NMWfO\nHDZt2iRa1k5OTuj1eoKDg2ltbWXRokXcvn2brKwsbG1t6ezsRKfTsXz5ctEtGRgY4OzZs4yMjHDr\n1i0WLFiAsbGxCNeR/Cc6nU7kY6SlpYlcVGNjY5EbIflgYFogJ3VcBgYGRIQgTMNth4eHUalUlJSU\nCAze3LlzRYShk5OTYFZKmSMPOn4Ti4LUBZiamuL06dOMjIwIVeL+/fuxsrLCwcGBuLg4cbw4deqU\nSD6ScgJnzJjB7NmzRbDI4OAglZWVtLW1cfLkSZqammhqamLLli2sXr0ahULBihUruHz5sugXJyQk\nUFtbK2K8goODGR4eFsg3QASkSsEkEqvQ3d2dU6dO0d/fj16v586dOxQXF1NeXk5X1zSqUqFQCJpz\nUFAQDg4OAvM2OTmJRqPBwcFBuD+jo6Pp6+tjcnJSsCml3ISwsDDs7OzQ6XSEhIQQGhoq+IswfXe8\nfv26iD27f/8+3d3dwj5tYmLCJ598QnFxMSYmJnzxxRdkZGSwc+dO+vr6iIyMxN/fn/v375ORkYFC\nocDW1hYHBwcWL16MgYGBuHsvWbKEd999FxcXF1atWsW6desoKSmhs7OT1NRUERhsZmaGQqEQtYab\nN28KmjRMn/0lSrQkmzYzM+Po0aMcPXoU5a+xeIWFhTg7O+Pp6Sk4nM7OztTX1zMwMMDKlSv58MMP\nxQ5kZGSElpYWcnNzRfH4yJEjLFy4ELlcLqzgBgYGdHd3k5OTQ2dnJ6dPn8bd3Z1bt27R09PD8ePH\nSU9PF0e+wMBA9Ho9q1evxsjISMCHe3p6eO655/D396epqQm1Ws0HH3zAzZs3WbNmDdu3b+eXX35B\nJpMJ0phUcAcEw8HS0pKNGzfy1VdfkZ2dzejoKGNjY9jZ2XH37l3a2tr47LPP6OvrY3x8nPDwcPR6\nPUVFRVy4cIHU1FSRRfqg4zexKEiMPcmA4+XlRUlJCTNmzGBycpKOjg5gOlJMatVIduGxsTGhV5eg\nFdKiIG1LJa17WVkZZ86c4fz586Kg5+bmRnx8PHv27MHFxYXDhw+jUCiEKUer1dLW1saiRYtEbJy5\nuTmVlZWYm5tz9+5d1qxZQ2ZmJomJifj5+REUFMTo6Ci5ubmMj49TX1/P2NgYnZ2dmJubU1xcLDIq\njIyMhPfB2toauVzOokWLSE9PF6nOSqWS0dFRzM3NReDNM888I+zYt2/fFinWXl5eyOVyLCwsiI2N\nZf369eTk5Ag6cUxMDG5ubpw+fZonnniCBQsWEB4ejo+PDwUFBdTW1jIxMYGdnR1eXl7odDpxbJF2\nETU1NcIb0tjYiKGhIRcuXGDbtm0sXbqUhoYG1Go1bm5upKam4uPjI7waXV1dYiH+8ccfGR0dpaGh\nQSDTFy1aRFVVFQEBAbi6ulJWVkZxcbFos3p7e2NkZCTi7teuXcu8efMIDAykqKhIRLa1tbURFhYm\nFhsJFWdmZoaVlRUWFhbMnDmT8PBwlEolJiYm2NjYoNPpqKqqIjExEY1GQ2JiIq6urgLOIxX6nnzy\nSQDxPh07dozw8HCRaF1YWMizzz7LzJkzUalUqFQqFi9ezO7du9HpdJw7dw5TU1OcnZ3Jycmhq6uL\n7u5uEU8gFT/lcjmFhYVs3LiRDRs2kJCQgE6nw8LCAicnJ8LCwti6dSuWlpZUVVXx9ddfA9PHmrGx\nMfR6PXl5ecLy/yDjN1Fo1Gq1tLa2Ym9vj0qlEiQZY2NjCgoKmD17Np6enjQ3Nwuxj6urK0qlUij2\nbGxsRIz3M888w8cffyw850NDQ7S0tGBtbU1ycjJ1dXWUlZWxfv16ysvLGRoaIj09nSNHjnDw4EEB\nTYFpJdxTTz1FcnKyWJzS09Pp6urC398fjUYjaD4eHh5CLDM+Ps6CBQswNzdHoVAQEBAg/kaYbr0B\nnDlzRlhiOzo68PLyEqRlCaqh0+kEfKSrqwuVSiUq1ba2tixcuJC6ujohxjIyMhJqP2tra7y9vXF3\ndxcOwIaGBhISEvDy8uKDDz7Azs6ONWvWiAg0yR584cIF3nzzTf7t3/5N2J+dnJyYmJjAxMSERYsW\nERgYSEtLCzt37iQgIEDUMurq6sjOzkYulzM1NYWjoyNtbW3Y2NjQ0dEh6jNxcXFcuXJFyJHv3LlD\nR0cHKpWKoKAg2tvbeemllygpKaG+vp78/Hz6+vro7u7m5ZdfprOzExcXF3FU+fTTT1m7dq24FiQo\nrtRW7OnpwdPTU2RV/vzzz8TGxlJVVSUcigsXLmRwcJDAwECCgoIEB7SmpgYfHx/RCQKEsevtt98m\nNzeXrVu3curUKfbt24eXlxednZ3k5eVRWFhIQkICnp6eVFZWYmRkxJYtW2hubuYPf/gDk5OT+Pn5\nCf2Dm5sb+fn5qNVqduzYIejRZmZmtLS0cOTIEWGTLywsxN7entHRUVQqFYaGhoSFhdHf34+Xlxfl\n5eXiunyQ8ZvgKXzxxRd/jouLo6ysDEtLS5qamoiKiqKwsBArKyvkcjlqtVrk6g0NDeHj4yMktd3d\n3RgZGQkVXHJyMllZWTzzzDPClhwXF4darcbOzg4fHx8RPWdkZERhYSE1NTUkJiYKqaulpSWrV68W\nuPORkREaGxupra1ly5YtGBkZidCU2NhYWlpa6OrqQqfTkZGRQXd3Nzdv3iQyMpLh4WFMTExwdnbG\nwMCAqakprl27Ju7is2fP5qeffhLJQu3t7axdu1bQldvb2zExMWF0dFR4K3Q6HYGBgXz66acMDAxg\nbGzMunXrhOJQQoXPmDFDZF9Ktt/k5GQKCwspKCigu7ubu3fvcvr0aTZu3MiuXbtEfoCXl5cA046N\njTEyMkJTUxOZmZmYm5sze/Zszp49K4Jq3n77bXG8MDc3p6SkhHnz5pGQkEBra6sAjFRUVODh4YGT\nkxN37tzBxMSERx55hA8++IBHH30UDw8PYBrLrtfryczMpKmpiVWrVtHX10d5eTlRUVFERUWRl5eH\nmZkZP/74I62trdjZ2eHi4kJbWxu1tbUEBgZy7tw5nnrqKaysrMjNzaWhoYHvvvuOgYEBHBwcOHXq\nFJOTkwQGBuLh4SGKw5IlXApqkQrVhYWF5OXlCfSflJTd09NDRkYGTz31lJiUJSUlYtdUU1MjUrYl\nibXkQr127RpqtZrFixcLWpaFhQUymUxYqKUF9fjx41y5coXly5eLTotkTJPcwmNjY3R1daHVarG3\nt2dwcJD8/Pz/PpCVQ4cO/XndunUsWbIEe3t7XF1dGRkZobOzE41Gg5GREXK5nKamJmHwkHrIUnS8\n5JGXy+UUFRWRn5/Pyy+/TFpaGg4ODnh7e3Pv3j1ee+01wfWzsbHBxsaGsLAwDA0NGR0dxdXVlc2b\nNws4iK+vL1euXBFBMIWFhaxfvx6NRkNNTQ0qlYrS0lJ6e3uZOXMmU1NT+Pn5CTGOVqsV3gNpwvr5\n+YkquNTDzsnJYd68ecIUdfv2bWbNmsXo6ChdXV1MTU1hYmKCr68vMpkMT09PEQnW3NxMTk4O/v7+\nQhosk8moq6ujublZ7IxsbW25f/8+W7ZsYebMmfT09ADTVnUpSk4mkzE4OEhZWRkBAQF89913DA4O\nijtrVFQUOp1OLDSxsbEYGxuLgNePP/4YU1NTkdScm5uLp6enwNnt3buX8vJyxsbGsLW1xdHRkaGh\nIcbHxzl58qRA+EsCIEdHR6ytrfH09MTGxoa6ujoWL16Mra0t/v7+dHR00NbWJjo20oJeXFxMVFQU\nXV1dpKamIpPJhBguKCiIsLAwjIyM6OjoEMzOzs5OJiYmcHR0pK6ujrq6OpYuXcrZs2eZOXMmMTEx\n1NTUkJ6eTmhoKNnZ2Rw4cICwsDDmzp1LR0cHLS0tBAYGcuXKFYyMjITeRgqUcXFxEUYpqRsCiIDe\n6upqIVuXy+XY2NgwNDQkdAyTk5OYm5vz7LPPEhUVRXJyMmNjY8hkMuGEHR8fp6+vj8rKSoED/NXl\n+kCLwv/QnP9n/M/4/8/470NzViqVPPfcc0RGRpKdnc3q1au5cOECVlZWVFdXExwcLL7n4eGBt7c3\nJ06cICQkhNzcXFauXIm7uzsKhUK4BxMTE/n000+pr69n//79As3m5uaGmZmZwK9JaVRSS8zV1ZWB\ngQEeeugh3n33XR577DGKi4u5du0ajz76KLt37+aHH37g/v379PT04ObmJnIZSkpKMDAwoKKigjlz\n5pCTk0NoaCiBgYHcvn2b5uZmnnzySRoaGqirq2P79u0cP36cmTNnsn37dt566y2Gh4fx8vJi7dq1\nmJqa8tVXX2Fqakp4eDiXLl0CppWaktfB39+ftLQ0VqxYIVgUSUlJ5OTkMDAwQGZmpjgS5OTkMDY2\nRm1tLZGRkURHR3P//n3a29uxt7dHrVaTkJDA0NAQqampGBsbk5WVxfLly7GyskKr1aLT6bh37x7L\nli0jKCiI/Px83NzcUCqV1NbWkpeXR1xcnDC0hYaG0tnZSVpaGlZWVuzcuZPjx4+zYMEChoeHGRsb\nEyEvc+bM4Y033sDX11fAVf39/VEqlRw6dAiZTIaLiwtz586lpKQEMzMzAZ9taGjA1tYWU1NTLC0t\nCQoK4tixY8ycOZO3336bp556inXr1iGXy0W+B8Dp06ext7dn4cKFNDY24uXlxb1798jNzeWFF17g\n9OnThIWFCcmxu7s7FRUVNDQ0cPDgQd544w2uXr3Ku+++K1rZfX19IqWsqqqKoKAgsrOzCQ0Nxd3d\nnW+++Yby8nKeeuop5HI59vb2eHp6cvjwYZYsWcKLL77I5cuXsbOzIyUlhfnz59PZ2YmnpydpaWks\nWLAAhUJBQUEBCoUCDw8PTp06hb29PUqlUsjR7ezsiI6OFkeUBw2E+U0cHz766KM/Hzp0iJaWFszM\nzOjr6wNALpdTVlYmJJwWFhaoVCrq6urw9PRkwYIFuLm5UV1dTV9fnyj63Lx5UyQdSelGwcHBtLe3\nY2BgQEdHBzKZjKKiIoyMjDAwMECtVrNx40aMjIywsLAQ7Me6ujrs7OxYuXIl/f39XLx4kZiYGAHI\nDA8PR6PR0Nraio+PDytXrqSuro7bt2/z6aefkpycTExMDBs2bKC4uJjx8XE0Gg0rV64Utl8TExMB\nfnF3d2d8fBxnZ2cuXrworNYDAwN0dnbi4OAgsGhlZWV0dHQIm/Yvv/xCXFwcp0+f5plnnqG9vZ3Q\n0FD+/ve/Y29vT3NzM729vSxdupTx8XHxN0q2bKkI2dzcTHd3t5jcAwMDQiAlGX1CQkLQaDSEhoaS\nn5/PvHnzREKSUqlEr9dTUVHB1atXsba2FpDaEydOoNVqKSkpQavVMjExIezmaWlpAq5ibW1NXFwc\n77zzDv39/SiVSgC2bdvG119/zf3797G2tqa4uFhoAiQp/ODgoAhJsbS0JCMjg7Vr1yKXy/noo4/Y\nvHkzNTU16PV6Fi5cKNyFy5YtEzF+EixWyqmUAl37+vpwdHSkqKiIsrIyzMzMWLJkCYODg0IXcvfu\nXcFsbGtr49q1a2i1Wvz9/ent7cXPz084Yg0MDLC3t+fnn3/Gy8uLsbExbt++jUKhwNnZGVNTU378\n8UcGBgZIS0tj48aN5OTkcOvWLRFQPDk5KcjPjY2NZGdnEx0dTVtbGzExMTg7O3P27Flu3rz5QMeH\n30RLEqbVfObm5uTn5+Pk5CR06g4ODkxNTQHTld779+/T2dmJt7e3oNNIRRVJ0fXwww8DsHnzZlas\nWIGJiQkFBQVEREQI6KWUQGVoaEhsbCxJSUmkp6fT2tqKu7s7g4ODWFhYiMBZrVYrpK0Scl2ySsN0\nO7GiooJXX30Va2trnn/+ed59912efvppTExMeOWVV9iwYYOgPX399ddMTU3h6+srzvYdHR2kpKRw\n5MgR+vr6MDc3p7OzE7VaTVVVleBH9PX1MTU1hbe3NwYGBsTGxtLQ0IBKpRLZjBLF9/79+yxfvpy0\ntDSeeOIJ7OzsRMcmMTGR8fFx5s2bR2RkJJs2bSI8PJzx8XHhKly/fj0GBgaUlpYyOjqKmZkZmzZt\nQq1W09TUJBSntbW1FBQUkJ2dze3btzl27BhOTk7Mnz9f4M3c3Nw4cOAAKpWKDRs2ANNnaUkXAdNV\n9+bmZry9vfnyyy/54IMPMDc3p6enB3Nzc44cOYK1tTUTExMkJSXxhz/8gfj4eKytrdm7dy/W1tbo\n9XpUKpV4bTCtlJTL5Tz//PMcPXqU9vZ2goODOXHiBEZGRsyZM0fUPQwMDFi8eDEREREsX74cvV6P\nu7s78+fPR6vV0tPTw4IFC4DpVqe0a5AUtDt27MDPz4++vj50Oh1JSUmoVCq6u7upr69nZGSEkJAQ\ndDodBgYGAsQjZWUC7Nixgzt37lBWVoanpydLly7loYcewtjYGJVKxfbt20WXSKplVFdXC49KUFAQ\n8fHxoqW7Zs2aB56Lv4mdwmefffZnAwMDVCqVaNWNj49jaGjIoUOH+Pnnn/nxxx9JSEigs7OT/v5+\nWltbaW5uxs3NjY0bNwr5a319PTqdjiNHjrB8+XIuXryIg4OD0PhLwqPW1lax7Tx58iRLly7F3Nxc\nuNY2b94sJuX69ev56KOPcHNz4+rVqyxatEh0CC5fvkxOTg7GxsYikEOr1eLq6my6eJEAACAASURB\nVCounpSUFCGekVSaElpMIveUlpby3nvvodFosLa2FiInLy8vEhMTBZ1Ko9HQ1tZGcXExhYWFKBQK\n3N3dRZq1QqHAxcWF1atXc//+fcbHx5mamiIkJITk5GSMjIyYMWMGLi4uXLp0if7+fhoaGqioqODk\nyZPcvXsXlUqFi4sLFy9e5Nq1a7zxxhtC8+/i4sL169extLSko6ODqKgompqamJqaEqRnExMT3Nzc\nBBZfp9NhYmKCubk5FhYWHD58mAsXLuDq6iq2tIGBgRw+fFi4/Pr6+vD19UWhUGBjY0NVVRWLFy8W\nJOQ1a9aI44zEujx27BguLi4CiiKBR1JTU/n9738vvDVNTU34+/tz/Phx9Ho9OTk5TExMUFJSIoA+\nkhFPSrPW6XRCZGRgYICFhQUXLlxg7969jI6OEhMTQ0VFhSBnFxYWYmRkxNatWykpKaG3t5cLFy5Q\nVlYmWsTNzc1YWVnh6+uLRqOhpaWF/v5+rl+/jrm5OVNTU3h4eLBs2TKysrKwsrLi9OnT5ObmsmTJ\nEtLT04VGp6ioCHt7e9GFcHJy4vTp0+Tk5LBmzRra29v56aef/vvsFKTz/cmTJ1Eqlfj4+GBsbExU\nVBQHDhxg586dfPzxxyiVStavX8+WLVsIDAykoaGBqqoqLl++jFar5cSJEyJEA6apy08++SQVFRXC\nw+Ds7MyFCxcwMzMTUBa9Xk9dXR16vZ7o6GgsLS357rvv6OjoEC7LF154QSQ8OTk5sWnTJlJTU9m5\nc6fQDbi7u7Nw4UJgmpxjZ2fHTz/9RHJyMkqlku3btxMeHs69e/dEH/7EiRMsXboUmNZEbN26lY0b\nNwru3vnz50lJSeHmzZuMjo4yf/58bG1tBd/v3XffpbCwUMBopSSsgoIC+vr6xLaypqaGwcFB7t+/\nz/nz53nrrbdITk7mvffew9LSkhMnTqDRaBgfH8fJyQl7e3uxILW3txMdHc3du3cFFHVycpKnnnqK\nq1ev0t7ezuDgIImJiTg6OrJo0SIWLFhAUlISAQEB2NjYYG5ujqWlJWq1WuxOJMGOVqsVDAw/Pz/2\n7dtHe3v7v0B2dDodpqamwn6s0Wg4deoUp0+f5ttvv+XcuXMsXryYmJgYMjIyCAsL4+rVq0IZW1BQ\nQFVVFbW1tYSGhtLU1CRadgkJCZSWlmJubo6Pjw8mJiZikRkaGqKyslLciWfPnk1XV5eQOWdnZ2No\naEh7ezuGhoZMTk6KTIpr165RUFAg6Fxbtmxh3759TExM0NzczJo1a8TnYWJiQmVlpbBkS0K4sbEx\nampq+N3vfkddXR0JCQls2rRJpJ5LGRhSjmRFRQUWFhYcP35cJLBLeZIPOn4Ti4KhoSHp6emsX79e\n/AE2NjbU1NTQ1dUlrNQKhYJPPvmE0tJSrly5QlxcHGvWrCElJYUzZ86gVCrp7e0V2LT4+HjS09OJ\niooiJSVFeCTGxsaE43DDhg1oNBrBNpAIvtXV1VRUVLB27Vru3LlDRUWFaB8NDAxw5MgRMemnpqaw\nsbFh5syZHD9+HI1GI3Iovv/+ewChhpTyFUZGRjh16hTOzs4i4n58fFzEvtva2lJcXMyuXbsYHByk\nsbGRyspKXF1d8fX1xcbGhvb2dl5//XXMzMx47bXXBFlqfHwcR0dHFAoFmZmZ3L17lzlz5vDoo48S\nEhIijgsLFy7kj3/8I3Fxcezbtw9jY2Ox+8jKyuLw4cOcOHFC3N1dXFxIT0+nvb0db29v/uM//gNf\nX18WLlzIiRMn/sXM1t3dzQ8//MDExASpqamUlZVRVFREYWEh1tbW1NTUEB8fj1wuZ+bMmYIMlJub\ny0svvcSGDRvEdZCRkcGPP/7IH//4R4aHh/n00085f/48sbGxLF++nKNHj/LTTz9hbGxMfX298JRI\nLEeYXqQ9PDyYmpri3r17VFVVMW/ePKQQoujoaObNmydMSTU1NTg7O4sCtBTF1t7ejqurK+np6eLa\nlT4/lUrF3r17cXV1RaPRCAWkt7c3K1asIDExEaVSSXBwsGAqDA4OEhkZSXl5ueBkALS3t9PZ2cmq\nVauEoG3VqlVcv36dxMRETE1NsbW1xcnJCTs7OyYnJ4XFWgoTkubI0qVLiYmJeeD5+J8uCjKZ7FuZ\nTNYlk8nK/umxGTKZ7JpMJqv+9V/FP33vFZlMViOTye7LZLKV/8/P+q/D2NhYhHdUVlaSl5dHc3Mz\noaGhPPfcc3h7e/Pjjz/y+uuv4+joyC+//EJISIig1x44cIDS0lKcnJwwNjYWb2xaWhodHR1kZ2fT\n0dFBdXU1U1NTxMbG8tZbbwlH38cff4yTkxMnT55kYGCAb7/9Fl9fX5RKJaWlpULJJtlPpezA3t5e\ntm3bxvj4OMXFxVy8eJG4uDgeeughEhMT+emnn3jllVdYtGgRwcHBIsps3759JCYmEh4eTlBQkBDr\nSJj7sbExDA0NiY6OFulACxYswN/fn/r6ekJCQvjkk08EcqupqYmqqipsbGxQKBRs3bpVdHE8PDyI\njY0lKytLVPlnzpwpUHKrVq0iKytLLF6zZs0iOTmZtLQ04UJ9/fXXxXNLKLPe3l4MDQ0xMjJicnKS\nlpYWCgoKaG1tJT8/n48++oi4uDgcHR157bXXSEpKEmzEVatWkZSUREtLCy0tLRQXF5OSkgJMn9Hj\n4uJobGxk+/btbNu2jU2bNrF27VrWrl1LZ2eniJr7/vvv8fHxYfXq1QKTNjAwwJw5cwQWXbLR+/j4\nYGVlJfQYpqamwhIuQWjr6+spKysT15+dnR12dnZ8++23vP/++9ja2lJSUiIyLGCaq+no6Eh6ejq2\ntraUlpZibW1NX18fzs7OKJVKkpKSmJyc5JFHHiEjIwOtVouDgwP//u//Tnd3N7W1tURHRwt+Jkwv\nMBJuMDY2Fnt7e+zs7EQ2hU6no7W1FQcHB0xNTQkICBCdlcbGRubNm4elpaVADEo3pQcZD9KSPAp8\nBnz3T4+9DFzX6/XvyWSyl3/9/59kMlkwsB0IAVyBVJlM5q/X6yf/d7/A2NgYIyMjKioqSEhIEIEu\nKpWK+vp6/vGPfxAUFMQrr7wiCjMxMTHcuXOHyMhIurq6mJiYwNDQUMBPYVpotG3bNtzd3SktLWXX\nrl0iik7iHJ45c4bQ0FD8/f2xs7MjICCAGTNmkJ+fj6enJ0NDQ9jY2NDb2yvclzKZTAS/Sm6/zZs3\nY29vT1BQEBUVFVRWVpKQkCDYCz4+PiQnJ9Pc3CzI1JKiUtLnW1hYoFarOXv2LKampty+fZvFixej\nVCpJT09Ho9H8C+78hx9+4Pr164SHhwvZ9auvvsrzzz+PTCajo6MDX19fli1bxuzZs6mursbU1BQP\nDw8GBgaIj49Hp9OhUCjYtm0b0dHRtLe38/7774tjx+bNm7l9+zb9/f2o1WoCAwNpa2sjMjJS5GCG\nhYXx5ptvCrycZNC6ceMG9vb29PT08NBDD6FUKkUS1GOPPYaHh4coYErvbUhICAYGBmzdupWGhgYK\nCwvRarU4OTkJBaS/vz/R0dE8/fTTlJSU4OTkhEql4pVXXmHevHlEREQIRapUHI6Pj+fWrVsMDQ3R\n0dHB/PnzgemjoLW1Nebm5ri4uIj8BCMjIzGhpONse3s7vr6+7N+/n59++gmY9hg89thj2Nvbk5+f\nj5+fHydPnqS5uZnIyEjc3d1FUJAUA6fRaPD392ffvn2kpKTw2GOP8fzzz/PKK6/Q3NwMTN94LCws\ncHZ2JjU1VUT5SS1MyesiRe7Z2NgQGBjIyMgIMC0XX79+vbBiS7jDBxn/6U5Br9dnAOr/y8Prgf/4\n9ev/AJL+6fEf9Hr9mF6vrwdqgLn/2e+Q3IxS2k1/fz+enp7Y2tri6elJbGwsTz/9NJ2dncJ66+fn\nR2hoKF1dXdjZ2eHs7My5c+fo7OwkKysLmG5ftbS0kJWVhVKppL6+HhcXFw4cOMDg4CCpqaki5FN6\nHZKZyNraGq1WS2lpKf39/QLhBggJbGFhIb6+vsjlcm7cuMHRo0dpaGggLi6OhoYGDA0NBTRDIj5N\nTk4yY8YMQUyyt7cXhqjQ0FDq6+u5fv06+fn5+Pr6EhUVhaurKzExMQQHB1NTU8PExIRoJ7766qtC\niZebm8vq1au5e/cujzzyiPBlHDx4kKKiIjQaDY6OjtTU1BAZGYmNjQ0pKSlcvnyZ+Ph4WltbaWtr\n43e/+x0xMTEkJSVx9OhRNm7cSFVVFT09PQwPD+Pn5yd2UO7u7hQXF9PZ2cnDDz8s2qorV64kLi4O\nuVyOUqnE3NycGzduUFpaipmZGeXl5bi6utLU1CQk2QC+vr5UVFSQkZGBubk5IyMjXLt2jUWLFgkS\nc0hIiMhI8PDwYPPmzeKOXVBQIGCnkgIQpo8PEvwkPDwcU1NTiouLkclkwmHY0tLCrVu3mDNnDh4e\nHpSUlGBlZSXSncfGxlAoFLS1tYnPbO7cufz444+Ul5cjk8lISUkhOjqasLAwZs+ezZkzZ7h06ZLI\n3Tx79ix9fX3MnTuXhIQEIiMjOXv2LMHBwf/ixG1ubhYg19LSUqqqqhgYGMDAwIArV65gZmaGTqcj\nLy+PzMxM2tvbUavVpKenU1ZWhoWFBefOnaOrq4ulS5dy+vTp/2waivFfrSk46fV6yaDdATj9+rUb\n0PxPP9fy62P/26HT6Zg7dy4bN27k/v37nDlzhsDAQDo6OigrK0OhUJCfn8+XX37Jl19+yaFDhzh8\n+DDHjh3jl19+oaenh3379qFQKLhy5YoQ+TQ2NqJQKFi2bBnu7u5MTk7yww8/iELU3r172bFjh+AY\nxMbGsmHDBvbu3UtVVRVmZmbs3r2byclJysvLBYbcz8+PgYEBHB0dycrKEpoJKRkoKyuLwcFBfv75\nZ5588klyc3O5cuUKp0+fpqamhp6eHuzs7LC1tUWhUAjTTlFREZGRkfzbv/0bDz/8sKh2nzx5kpSU\nFMzMzNBoNNy8eZOSkhL+9re/cfv2be7evcvevXvFJNqyZQuHDx8Wx6bAwEBef/11IY+WgLPSXSUi\nIoLq6mouX74saFahoaGoVCqKiop45ZVXmDFjBrt27cLZ2VlAbf39/WlsbGTOnDk4ODiQl5fH3bt3\nOXjwIMePH8fHx4ewsDC6uroYHBzE0dGR0dFREhISMDQ0pKqqSsiTw8PDAcjKymLevHlUV1fzzTff\nCB1/T08PU1NTqNVq4uLiMDAwYO3atVy9epX6+nqWLFmCt7c3O3bsoK+vD09PT1GMA0hOTmbx4sUi\n0bu2tpbExES+/PJLqqurMTExoa6ujsDAQF577TWefPJJHB0dWbVqFYsWLcLJyQk3NzcRFCuxGq5c\nuYJerycoKAgjIyM2b97MjBkzWLt2rQi59fPzw9rams8++4yffvqJHTt2cODAAb7++muBHJQyNCQc\n2+rVq+nr6+P48eNi4XjzzTe5cuUKMTExaLVa7t27R0xMjCB6TUxMEBsbS0JCAs8++ywajYaQkBAu\nX74siFwPMv4/Fxr10zrp/9cyZZlMtlcmkxXIZLICyR/v5eWFg4MD7u7uFBQUYGZmhpmZGQkJCQQG\nBrJ161ZBqDly5AhmZmZ8/vnnvPnmm3h6emJvb8/+/ftF2Kurqyvh4eEEBweTmpqKVqtl1qxZpKen\n09nZSWRkJMHBwfj7++Ps7MzQ0BC+vr788ssvYkWXdidRUVFERkYCCFqOhEiTcvx6enp4++23yczM\nxMDAgLlz5/Lqq6+yb98++vv7MTIyEjoJc3NzWltbOXLkiKi8SwrL6upqWltbReaFFJryz/h4W1tb\n3njjDeLj41EoFDQ3N+Pg4EBXVxcNDQ0YGBiwfv16du7cSVVVFS+88AKpqalUVFRQWlrKnj17RMZC\nbm4u165dY926dRgYGHDt2jUMDAwoKyvDxsaG4OBg1Go1BQUFFBUVkZOTw+joKFqtFpVKRXNzM35+\nfiL1WtreFhUVUVxcLIJlzMzMcHZ2FvHtEqtRpVJRWFgo3oP79+/j4+ODTqfD29ubkJAQysvLqa2t\nFbCTrKws5syZQ2RkJE5OTqxcuZIlS5ZgZ2dHVFQUfX19rFmzRugfpHi29vZ2tFotCoVCTPSqqio+\n/vhj4ZbNyckRHYHc3FxBMJLi8GQymVBESulmeXl5WFlZiUXixIkTTExMIJPJmDFjBsHBwZiZmeHo\n6IiBgYFY6NLS0vD29mbTpk04OjoSEREBTC+OUtJVcXEx+/fvx9raWtQMZsyYwfbt20XWSUNDg8jC\nGBgYEH6c7OxscSR+0PFfXRQ6ZTKZy69vkAvQ9evjrYDHP/2c+6+P/d+GXq8/pNfrZ+v1+tkuLi7C\nJDM5OSl64llZWdTV1WFubi6KcJLSa968ecyZM4fXXnsNKysrGhsbmZqa4tChQ+zevRuYxrwlJyej\n1+tZsGCBUJFFREQIDsHJkyc5evQoqampfPHFFzz22GPs3LlTKAfr6+tF1V+6EKScPmtra3x9fYWB\nZmJigujoaNRqNUlJSQIhJ/XdH3roIby8vLCwsCAwMJDR0VEiIyOFZVculwuRi5OTk4hpHxkZYc6c\nOYLLd/r0aZycnHj//ffZvn07Fy5c4MCBA7S0tGBraysIxp9//rlwWH7xxRc4OjoyY8aMf9ltSNVq\nyXAUGxsrQnILCgpYunQpf/rTn3B2dsbQ0FAEwMrlcrRarcjLlLIJVq5cyebNm0lISKClpYWhoSFm\nz56NmZkZarUavV5PV1eXQJwbGRkRGRnJihUrAARQt6qqCplMRkZGBqGhoYyOjoprQorGc3d3p7+/\nX6RNWVpaolKpyM7OprOzk87OThE2HBoaKuLvYmJiRPzaxo0bUalUPProo/T39+Pk5MT4+LjgGNja\n2jJ//nxmzZpFaGgoc+fOxcTEhOTkZGD6xqPT6bCxsRHYuaqqKqysrCgvL+edd94R7cqgoCC6u7ux\nsLAgLCyMwMBAdu3ahVKppKWlhcnJSUFXcnZ2RqVS4efnh5GREd988w137twRR5rh4WF6e3uRy+UC\nLxcXF4eDg4NInXJ1deXhhx+ms7NTQIcfZPxXF4ULwO5fv94NnP+nx7fLZDJTmUzmDfgBeQ/yhKtX\nr6a9vZ2Wlhbq6+vJysrixo0baDQaysvLSU9P59y5c0RGRvL555/T0tKCp6cnVVVVdHd3ix67FCP3\nf7D3XtFRnWf/9qXeNWqjURlpVEeod5AQCEmA6MV0DBjbkJC4xLHjN3GLYy8njmMnxE4cm2CDKza9\ni14kUAVVBOoNaTTqfaRRG813wLuf9c/Rx/q+/4Hftd59Ziy00GjvZz/Pfd+/64JHhbuJiQnu3buH\npaUlra2tvPHGG5SXl+Pq6ipW5cuXL9PX18eePXuYmZkhKiqKiIgI2trauHLlCqWlpaLyDqBSqQgI\nCBAKcQl+kpKSQmpqKtHR0Vy+fJmvvvqKmJgYYmNjqaur46uvvuLJJ5/ExcWFvXv3EhERIRyY8Giq\ns76+HrlcjlqtxsHBgQMHDoiBpampKbRaLUuWLBGgj5deeokbN26wZ88eUlJS2LlzJytXriQiIoJt\n27aRlZXF+Pg4GzduZM6cOTg7O2NiYsKNGzcExSgiIoLp6WkaGhq4desWw8PDmJubU1paSlJSEi4u\nLixZsoSuri4sLS2JiIggPDycq1evkpmZiYWFBTk5OVy4cIHo6Gg6Ojqoq6tjaGiImZkZYmJiaGlp\nYWRkhPj4eGpra8nKyhKTfHfv3mV6ehpAQGHXr19PaGgoSUlJ5OXloVareeKJJ5g3bx7vvvsuv/71\nr+no6BD/bqkGI2n0JAmK1JLMzs4mODiYRYsWMTAwwO3bt7ly5QrXr18XyPS2tjbc3NzYsmULtra2\nBAUFMWvWLExNTWlra2N4eJiBgQEmJiawsbEBHr0gKisrmZ6extbWls7OTszMzIiNjeW3v/2t2L6/\n8cYbBAQEsHPnTioqKoiMjKSpqYnCwkLkcjkajQa//7ZsS5dWq6WtrY2pqSmR/ly4cCF6vZ7i4mIO\nHTrEhQsXqKurIyoqio8//hhzc3MqKyvp6ekhOTmZiooKZs+eLeYqHud6nJbkj0ABEGJiYqIxMTHZ\nBXwALDYxMakHFv33f2M0Gh8AR4Eq4BLw/P9b5wEQD35RUREmJiYUFRUxMDBASEgIa9as4csvv6Su\nrg5XV1euX7+Os7Mzr732GpcuXSI0NJTly5czNDRET08Per2erq4u4BHrf8WKFdy7d4/S0lICAwNF\n8S4kJETcpBKXcenSpeTn5wvunwQ7+eUvf4mZmZm4cSXghySVnZyc5P79++j1epHl7+/vZ8+ePajV\nagoKCoiOjsbR0ZHm5maOHz/OlStX+P777zE1NRVtqMWLF6PT6RgZGWFgYAC1Wk1wcDDh4eH86U9/\n4sqVK8TFxYk6gUqlIjs7m08++USMRQ8ODopx7XPnzmEwGLh//z4KhYKsrCzs7Ozo7u4WrsG4uDia\nm5vF3L5KpWL27Nm4uLgQHR2NhYUFbW1tlJSUkJ+fT2pqKp2dnUxPTxMVFYW5uTmHDh3ixo0bQqjb\n1tbGX/7yF1H46+7upqSkhImJCcESTEhIwN/fn8bGRsERhEdjziYmJty9e5eGhgYMBgNOTk40NDQw\nPj5OZ2cnwcHBQuqi0WjYv38/pqamNDQ0cO7cOVpaWrC1teXmzZui6r5u3TpOnjyJmZkZzc3NODg4\n0NzcLPiXBoOBsLAwnJ2duXTpEs7OzoyOjlJSUkJJSQkuLi6kpqai0WhYtmyZ2I5HR0ezdetWNmzY\nQGhoKDY2Nvj4+KBUKjEzMyMmJoZ58+YRFxdHREQEfn5+3Lp1i9bWVsbGxkSsWqvVkpOTg5WVFfCo\n6N3b2yt2D5s2bSI+Pl6EnmJjY3niiSdYu3Ytg4ODYtr09OnTeHp6EhgYiLe3N6GhoZSXl//fxbEZ\njcatRqPR02g0WhiNRqXRaDxgNBr7jEbjQqPRGGw0GhcZjcb+/+Pr/2Q0GgONRmOI0Wi8+Dj/iJGR\nERobG3F2diYuLo5z586Rnp4u2lDr1q0TchBHR0c8PDwwMTEhLS2Nt956i+zsbEpLS4VgRaophIeH\n09/fT2ZmpoCHSqZeT09P5HK5qNTq9XoR+snMzEQmk5GQkICnpyc+Pj7U1tbS0tICgJubG2+99RYu\nLi7k5eWRmpqKk5MT/f39YqGIj49n0aJFTE1N4ejoiE6nIzMzE19fXyIiIoQtyN7eXmQ7ysvL8fDw\nwNHRkcrKSmZmZli9ejU1NTX4+PgI7Fd+fj4pKSn09PRgamrK/v37SUhIwMPDAzs7OywtLQkLC2P+\n/PnExMSIuoC9vT0uLi5s3LiR1NRUPv74Y+7du0dmZiZWVlYi2bd582a2bt0q+v8S8NZgMIi316lT\np7hz5w5arZbo6GgxdqvX60lISOCzzz6jv7+fHTt2iMRoXl4e58+fJy8vj6GhIVEvsbGx4fDhw8Cj\n3ZJKpSI1NRVPT0/CwsIEPeq1115jbGyM6OhowXG4du0ag4ODnDt3Tiw4r776KjExMTg7O4uMwuXL\nl1EoFLz22mvIZDLa29sJDw/nyJEjwnK9cOFCpqam8Pf3Fzo3Z2dnWltb+eabbwgLC8NgMHDixIn/\noDnPzMzwySefiOPihQsX+PLLL6mpqUGn0wka1ooVK5gzZw4mJibU1NSwdu1alEolFy9eFB7T7du3\ni89B6l55enpSU1MjRtGdnZ0JDAzEaDTi7Ows7l+5XC6wf9bW1ri4uDA8PExGRoYYtnqc6yeRfdi7\nd+87EhlHLpcze/ZsoUH39PQULTmJ4jt//nzGxsaoqqoiPDxchHucnZ0JCwsjPDyczz//nFWrVgmb\nlKSe0+l0+Pr6ClejVMEdGxtj9erVxMbGCnXbDz/8gIWFBTU1NaSnp9PZ2cnNmzeZNWsW3t7eJCcn\nc+3aNZydndmyZQstLS24urqSmJgo4KKSeu7atWt0dnaKQI+vry9DQ0Ni5l5Ch0mLwujoKNXV1WRk\nZBASEoKTkxOZmZlC3ZaQkMCZM2fw9/dn+fLlDA4OUllZyb///W+USqX4uXJyckSv28LCgri4OGxs\nbAgLC+OVV17B399fAFmsra0JDQ3l1KlTnD17lv3796NQKESRbdmyZVRUVIi3XlpaGjqdTtiN9uzZ\ng16vF6QpLy8vzMzMGB8fZ2BgQPzcs2bNws/Pj7GxMXE2jo+P57vvviMkJERkTiRqkV6vF1v35cuX\n8+DBA8HdXLJkCampqej1ejIyMmhvb6ekpEQUJaurq7l58yZ//etfmZycJDAwkIsXL+Lg4IBKpcLc\n3Fz0+AsLCwUeTxLBLl26lFu3brFr1y7u379PXl4esbGxfPnll3R0dBAZGcnk5CQmJibY2NiQmJgo\nfJ/h4eHU1NTw2WefibSpdGSKi4sTqVdAhPwaGxvJzs4mLS1NdIkkWbBkhZKmP0tLS3n48CHh4eG0\ntbXR1dWFUqkkPj6e/v5++vv7mZycpKGhgZUrV7J///7/OdkHNzc3cnJycHd3Z2BggOTkZIKDg+nr\n6+Py5cv8+OOPWFlZcffuXYaGhvj222+5e/cufv/t3rOxscHb2xuFQkFwcLBgKTY2NrJ+/Xrs7OxQ\nKpUsXbqUmZkZ0tPTaWtrIz8/X4zGOjg40NTUhKmpKf7+/jQ3N4tetK+vLwaDgQ0bNgCwdu1aFixY\nwJw5c4iJiSEoKIjCwkLi4uLEaGxbWxsvvPACubm5PPfcc5w7d466ujru37+PlZUVBQUF4ngjcRu3\nbdtGbW0tLi4udHR0YG1tzT/+8Q+OHDnCwMAAt27d4vjx4yL16enpiVKpxNzcHK1Wy61bt/Dy8qKw\nsBArKyuuXbtGSEgIAQEBJCUlCRmrJMhtaGjgqaee4vr163R0dFBUVERbWxsymUwYrKXPaXBwkMuX\nLwOPoKLT09NkZ2cze/ZsdDod8fHx5OTkoNFoBBV5cHAQX19fbGxsCA8PZAGSvQAAIABJREFUZ8eO\nHeIze/jwIXV1dVy9ehUfHx8xQn737l30ej1DQ0N4enoKqtOKFSvYvHmzsIaXlpZy7do1goODsbW1\nFQuvVBxcsmQJDQ0N/1GvsbOzE85FieGYlpZGYGAgWq2Whw8fYmJigqmpKbdu3SIgIIDW1lasrKyo\nqqqiubmZ9vZ2NBoNmzZtAiAxMRFXV1fUajX+/v54e3sTEBBATU0N09PTKBQKnnzySby8vJiYmOD+\n/fuUl5cLy3dlZSVLly7F29ubb775RrRmDQaDQM9ZWlqiUqlYtGgReXl5REZGIpfLhZJOqqEpFAox\nAq9Sqeju7sbf3x+ZTCaOZ49z/SQWBXgEpAwMDGRgYIADBw5w/PhxjEYjr776KjU1Nfzzn//k3Llz\n7Nu3TxS69u3bR3JyMv39/ezbt0+AXk1MTMT3vXjxIufOnWPbtm3k5eXxm9/8RhTRFAoFiYmJGI1G\n8dYoLi5Gq9WiVqtZvnw5IyMjzJs3j9raWjFtVlFRwe3bt/n666/p6+ujqalJtINu3Lghtmq+vr5c\nuXKFyMhI0Tb62c9+xvDwMAsWLOCNN96gpKREzKX39PSIlpxSqaS8vJygoCCUSiWOjo5YWFjw9ttv\nc/v2beF9GB4eFjmOLVu2EBERwdq1a/nxxx+xtLSksrJSGJbXr19PdXU1R48e5fTp09TU1PDJJ58w\nPT2Nv78//v7+tLW1ifSjpLO7c+eOkOjOmjWL4uJiysrKSE9P55NPPiEpKYn6+noGBweZmZnh7t27\ntLa2otPpRK0iISEBNzc3IbPt6+vDxsYGFxcXpqamRBDozTffxMXFRdjDpcGqU6dOcf/+fYqLiykp\nKcHT05Nt27aJz93S0pLIyEgCAgIYHR3l2rVrvPPOOyIH09PTg7Ozsxgek8lkWFpacvLkSWpqavj+\n++9xcnISi8zu3bv597//LY4ZNTU1REREsH79enp7e0lLSwPgl7/8JTMzM5SUlDA2NkZxcTHnz59H\np9Nx7Ngx5HK5EOZIMBspNGVvb8+VK1c4cOAA3d3duLu7c/z4ceBRt8RgMPD222+TmpoqFpVly5YJ\n0PHo6KgQGTs4OABQXV1Nb28vd+/eZXJykocPHzI4OCi6MI9z/SSOD1999dU7Y2NjREREYGJigo+P\nD6OjowQGBpKSkoJMJsPLyws3Nzfc3NzYuXOnyKQfOXKEjRs3EhcXJ4i5LS0tHD58mLlz5zI6OoqL\niwv9/f2EhYVRVlaGQqEgNTWVVatWERMTw5EjR/D09BRq8qmpKeLi4gTsorq6muHhYfLy8igvL2fZ\nsmXiLL9z507haLC1tUWj0dDZ2cnMzAxLlizB1NQUJycnzM3NWbNmDRcvXmTp0qXMnj2bCxcuYDQa\nsbKy4sKFCygUChISErCyshJiUYVCwcTEhJiXl8jJZ8+eRaFQoFKpcHR0pLy8nNWrVzM0NCTOxTk5\nOfzud7/jyJEjInRmbm7OrFmzcHR05Pz583R2dqLT6YSIJCwsTASwJAV7U1MTCoVCzIK4urqye/du\nDh8+jK2tLYsXLyYnJ4ehoSH8/f3p7+9nzZo12NraUlZWJlRw8Oimlba/EvfA3Nycjo4OMXI+MTGB\nhYUF4+PjrFy5UliphoaGsLe3Z3p6mqqqKpycnLh//75oZ+fk5KBSqRgdHeWHH34QheRTp06RkpKC\njY0N8fHx5OXlodFoUKlUPPPMM0KqOz4+zsTEhIiHSzao4eFhQUu+ceMGixYtYnBwkMOHD7Nu3Toh\ngw0JCWF6elrUM1paWoSrU8ooxMXFsWXLFrq7u6mtreXpp5/GYDAIZoYEAJZ4m4ODg6Idnp+fz5tv\nvkl3dzdubm74+/szNDSEXC4nLy+PlJQUMc4+a9YswSLx8vJCpVI99vHhJ7Eo7N279x0p7ffjjz/y\nq1/9CisrK/7yl7+QnJzM1atXxeBGVFQUfX195OTkoFAoWLx4MSdPnsTNzY3Tp0/j7+9Pb28vJ0+e\nZOHChYSFheHq6opWqxXV3uPHjzM8PMyqVavIy8sjOjqaW7du4eLiQnp6Oo2NjXh4eGBubk5jYyP9\n/f1ER0cjl8vJyspi/vz55OTkCBT4zZs36erqYmpqipGRERQKhQirSPmNNWvWCPNSV1cX9vb2DAwM\n0NLSgoeHB1euXEGhUODv709BQQEGg4HFixfzhz/8gYCAAMLCwoiJicHf358PP/yQqakp5HI5bm5u\n6HQ6fHx82LRpEy4uLgQEBFBdXU16ejqDg4M4OztjZWWFtbU1Y2NjGI1G1Gq1WCCkME1vby9WVlaY\nmJiIoqO/v78YlnF2dkan04l5g7Vr16LT6ejr60OpVDI9PY1SqSQqKop79+4RExODhYUFtbW1hISE\nYDAY8PX1ZWxsjICAAGE2gkdb36+//pq0tDRiYmLIzs5mZGREULoVCgVDQ0NERkYKtoJkxy4qKhIk\nb5VKRUREBJ6enoKyfeXKFf7rv/6LpqYmJicncXR0xNHREblczujoKKdPn2bv3r1MT08zOjoqHiRn\nZ2dsbW0F3iwtLY2wsDCOHTtGX18ft2/fZmBggIyMDEpLS1Gr1dTU1Ahq+OnTp/Hx8WHZsmUMDQ2R\nn58vJL8S2FfKVFhaWjI2NkZycjIFBQVCVCSTyXBwcODOnTvs2rVLMB4kcpgEAEpKSmJwcJBr166x\ncOFCCgsL8fb25s6dO4JgdvHixf9Zi4KrqysymYypqSl6e3sZGxvD19cXX19fenp68PDwIC4uDgsL\nC5qamoQx6cKFCxgMBr755hv++Mc/UlFRwbFjx2hoaGD+/PnEx8dz8uRJlEolXl5elJSUsGnTJqKj\noyksLCQnJ4ecnBxKSkqIi4ujvb0dtVrNN998g6WlJatXr+b+/fskJiai0+k4ffo0r732Go2NjSJE\nlZCQgI+Pj7BWm5ubExoaKsadpQ5DYGAgMzMzQkNnaWmJtbU1kZGRHD16lJUrV9LT00N4eDgTExNU\nV1djb2+PwWCgt7eX7u5ufH19qaqqwtvbG5lMxszMDBqNRoBTEhISOHz4MObm5ixYsIDvvvuOkZER\nwsLCOHfuHJmZmf+xddfr9aLSv3jxYh4+fEhlZSVKpZLJyUmys7NZv349MpmMiYkJQkJC6Ovrw8HB\ngS+++EIQoLVarWANjIyM8ODBA7HtHhkZoaCgAJVKRXl5OU5OTly4cAGlUompqam4wa9fv05GRgZF\nRUUsXLiQc+fOiSi2o6OjGFvX6XSsWbMGOzs7LCwsxOIrEbjy8vKora0Vg0jXrl0THStHR0dMTU2p\nr68nLi6Onp4elEolv/nNb5DL5cTHxwviVlBQkJD3SjuogoIC1q1bx+joKFlZWaxYsQJfX18CAwMp\nLS1Fq9Xi5OTEoUOHWLFiBZ6enpw9exYLCwtBJO/t7eXBgwdi2lDaPQYHB9Pe3k5xcTHPPfcco6Oj\ndHR00NLSInDuEnr/9u3bZGZmCrHQ0aNHxdGov79f1KW8vLxwdHSkurqanJyc/6U5/+/1v9f/Xv9x\n/c+iOX/00Uc0NTUxOjpKYmKiAFpIhmkJY6bVaklJSUGlUvH73/+eRYsWkZCQQFtbG0ajUcBNFi9e\nzBdffIGXlxeenp7cvHkTLy8vIcewsbFBo9GQnp5OQUEBVlZWYipNr9eLopCkaauursbLy4vnnnuO\nsrIytFotWVlZLF68GHNzc4aHhwHEG1wul1NQUEBjYyPDw8Ns376dzs5OysvLmZiYEFvwEydO8MUX\nX7Bo0SKeeeYZlEol69evZ9++fcybN4+AgAA+++wzdu3aRVVVFQaDATMzM3p6ejAajXh7e4s6iWSz\nluoUs2bNoqKiQkSC7ezsBMxUoVBgampKbW0tGRkZ7N+/Hzs7O4aHh3FxcWFychJPT0+RFs3Pzyci\nIgJ3d3fmzJnDxMSEKGYlJiaK3v26desIDQ3l/PnzwuExMDBAZ2cnCQkJVFRUcPr0aVJSUjAajYyN\njYk48euvv85vf/tb7OzscHNz49ChQyxevBgbGxvi4uJobW2lvLxcdJyCgoIoLy8XY9B3795l9erV\nHDp0SMhWTE1N+fDDD/nb3/7Grl27OHv2LMePH2f27NniayR7VlRUlIhqw6PEZl5eHklJSUxPTxMW\nFsbly5dxd3fH1taW1atX88wzz7BhwwZCQkK4c+cOdnZ21NXVUV9fz9NPP42NjQ1arZaOjg4GBgbI\ny8sjISGB2NhYrl27xhNPPEFlZSWhoaHIZDLy8/N58cUXyc7ORiaT8Ze//AWlUik4mNJusK+vD5VK\nhYmJiajFJCUl8eqrr/Lxxx9TW1srdpESNHbNmjWP9Tz+JLoPUpHIzMwMe3t7rl+/jkwmY82aNSiV\nStzd3YUpat68ebi4uGBnZ8e7776Ln58fpqam5OTk0NfXh6mpKf/6178ASEhIEO2tjIwMZmZmUKvV\nyOVyhoaGUCqVHDlyhJCQEFEgkwy9Go2GqKgojh49ilqtFkUc6VKpVBiNRvr7+8V2eXx8XPD59u3b\nJ3iIktnIzMyM/Px8rKys6O/vF+CNwsJC8TnExcVx9epV1Go1dnZ2XL58mfj4eCwtLTEYDNjZ2bFq\n1Sox55+eni4EonPmzKGpqYl58+Zx9epV+vr6xEIYEhJCUFCQ4BxWVlZSX18vjiULFizAzs6OZcuW\n4e/vT3JyMnK5nOnpaZKTk1EqlaSlpYlW2eeff46DgwOxsbGMjY2xZ88e7t27R1lZGcePH6e7u5vS\n0lLee+89XFxcWLBggVicAgICRC/dz8+P3NxcQTjS6/VYWlpib2/PE088gZubGwqFQpizzMzMhFqu\nsLCQqqoqAPr7+/Hy8mJoaIg1a9b8x8AVwLJlyyguLkaj0RATE8Pt27e5d++ecDCGhYVRXV1NeHg4\n4eHhwuVpNBqJjIykvr4ejUaDm5ubSMcCAmai1WpxcXGhtbUVU1NTUlNTqa6u5tKlS2IrL8lzxsfH\nmZmZEXIeKSiWn58vgmHt7e1cvHiRV199leeff5558+axYcMGlixZgqurKyMjIzQ1NVFfX4+3tzcG\ngwFzc3OWL1/OjRs3aGho4MaNG8TFxeHn58c///nPx34efxKLgomJifBCuru7Ex0dLc57Q0ND5OTk\niOKjdCbs6uoSOPbu7m6ioqKEIWjjxo3AI1Kwra0t169fZ2xsTCwGZmZmYhpsfHwco9HI+Pg4sbGx\nqNVqZDIZy5cvZ2JigrS0NIENlyg+VVVVjIyMsHHjRmpra0lISGDVqlUkJCSQmpqKTCbjqaeews/P\nD6VSyaxZs2hoaODw4cO4u7uj0WhITU3Fx8eH2bNni3aSRFyCR2ZrKWnp4eHBwYMHuXjxIgMDA5w6\ndQqlUklgYCCNjY2oVCrBoYiMjBTeTKVSSVFREQEBARQXF5OTk8PRo0cJDQ3F2dmZjRs3snTpUvr6\n+ggICBBxb6PRSHFxsXAyHDt2DBcXFyoqKpiZmaG9vV24J83NzTl48CDHjx/nz3/+swizBQUFYWVl\nRWBgINXV1czMzAj2hb+/v2BMenl54e7uzuuvvw48mgGR8PNPPPEEFRUVorZSUVFBT08P27ZtIyoq\nSnA2goOD8ff3JzIyUmDgRkZG8PLyEgW9d955h7GxMQYGBti1axcffPABDg4OlJWV0dvbS15eHmNj\nY9TU1FBWVsbSpUv5/PPPiYmJoaKignnz5omRdL1eL3Iz/v7+5Ofn09XVRXNzMz4+PoKWff36dYKD\ng8nPzycuLo7Y2FgmJibw9/cXPMaenh5aWloEbk9ymEq5nenpaf71r3/R1dXF6dOnBTNSr9ej0Wjw\n8PCgt7eXkpIS+vr62LVrF/39/ZSXl+Pp6cnMzAyurq789re/fezn8SexKBiNRlH9d3FxobKyku7u\nbqysrGhpaeHOnTvMnTuX+/fvI5fLyc7O5sqVK/z444+cO3eO06dP09HRgYeHB1NTU+KtI/kTYmNj\n6evrIzo6muTkZIHg8vDwYNu2bUxMTHD27FleeOEFkfqzt7dHo9Hg6elJaWkp1tbW3Lp1C4DQ0FAu\nX74sknlmZmZiXHZ4eJg5c+ZQUFBAQ0MD5ubmFBQUYGdnR3BwsOAoDA0NCVCJNOa8evVqLl68iI2N\nDS0tLSQmJgoFWnp6OrGxsbz33nvU1dVx5coVWltb2b9/PyUlJcyaNQtPT09RvHz55Zc5dOgQmzZt\norq6GicnJ9GbHxwcxMLCQrAkmpqaOHv2LH19fcyaNQtra2s2btyItbU1paWlLF68mNjYWBwdHdHr\n9SLkk5SUxJEjR/Dx8SEvL4+qqipRUCsrK0On0+Hi4iJ2eZcuXRKJ0aqqKm7cuCHauNIIeXl5ufhs\n/vjHPzJ79myWLl1KWVkZfX19TE1N8emnn7Jt2zZOnjwpMh56vR57e3txLxgMBtra2hgZGQEejW7X\n19cTFBREdnY2AwMDfPjhhyKAJJGMJFhLcHCwGIZzdHQkLy+P1157jc8++4ysrCzxO8vPz8ff3597\n9+7h4OBAbm4u8CjM5O3tTXp6Oj/72c8ICQlBq9UyMTGBn58fqampzJ49W+z4ysrK2LJlixjLrq+v\nJyQkhJmZGTo7O7ly5Qo7d+5kzpw5BAUFsWjRItavX09NTQ1Lly4VHpG+vj5efPFFIiIiRNI1KytL\nmNgf5/pJ1BRMTU1Zt24drq6uaDQaFi1ahJmZmXgI4+LicHV1JSMjg0OHDmFtbY25uTnu7u5YW1tz\n+/Zt3N3dRYtNLpfz6aefMjIygpubG+Hh4dTV1WFubk5FRQVmZmYi3SbBPiS7UU5ODnl5echkMubM\nmcPDhw+JjY1Fr9eLxB0g5KtSTzwkJIQHDx4QERFBaWmpaLc9ePBAjPJGRkYK8u7Dhw9xd3cnLS1N\n/MKUSiWvvvoqXV1d1NXV8eOPP4rchqWlJSEhIYSGhtLZ2UlqaioPHjzg5z//uZgOjI+PZ82aNZw8\neRJra2t+/etf8/HHH+Ps7MzcuXP5/vvvcXBwEH9mZ2cnglGbN2+mtrZWsCGkIJWpqSm2trYcOXJE\nnEkvXLhAeno6Y2NjglHp7OzMxMQEbW1tQpISFhaGmZkZvb29YjrP2dmZ5uZmli9fzgcffEBbWxsJ\nCQni4bS3t2fOnDmMj4+TkJDAu+++S1hYGLa2tvz85z+ns7OTsrIyPvjgAxFwGxgYwGg0Ymtry9q1\naykoKGDLli2cOHFCtFZPnjyJl5cX9vb2qFQqcnNz2bBhA21tbajVasrLy6mvrycwMJBZs2aRlZWF\nTqfj3r17xMbGYjAY2LZtG6ampqhUqv/gEzQ0NGBra4u3tzcVFRUADA0N8Ytf/EISu9La2opGo8HU\n1JTu7m6RviwuLsbW1lZkfSQSuY2NDXV1dVhYWNDb28vGjRtpaWnh/PnzxMfHc/PmTRYsWICVlRWX\nLl3irbfewtLSkqqqKsbHx3F1dWXnzp3Y2dlx584dgZ97rOfx/+uD/H/zsrCwEEk1Cd0utRqNRiOr\nV6+mqamJhoYGMVZbXl5OT08PMzMzKBQKLC0thZRVotdotVrc3d2Znp7GycmJmpoaUYyU0mxSbSAo\nKIgff/yR+fPns2DBAjIyMtBoNGJX4ezsjLf3I4iUlIoMCwtDJpNhbW0twi3S9s/e3l7Qi3p7e9Hr\n9SJGHR4eTmBgIMHBwXz++efixjU1NeWHH37g0qVLwpi8Z88eAgIChNhUGh6SyWQ888wzeHh4CJFr\nU1MTf/jDH7C0tOTOnTt8++23PPvss4SFhQmxbVlZGba2tnz//fckJiYKuEp/f7/gEnz33Xfk5uZy\n5coVwsLCaG1tZcOGDVy9epWenh5MTEwEqPXOnTtcvXpV2JBWr17Ne++9R2JiIgaDgZMnT+Lg4ICV\nlRWrVq0iIyOD9PR0hoaGWL16NXl5eZiZmQn/Q1FRkUgvfv7556LYt2vXLjQaDf39/ezevZsnn3yS\nuXPnkpubi0ajQS6Xi0nF5ORkLl68yLx588T3/c1vfsPChQvJz89HqVTS29srZhokq3ZjY6OYApVq\nHsnJyWJhlAbR3N3d+eabRzRCg8FAX18fmzZtorKyUqDZZTIZer2epqYm1Go19vb2eHt7i5Zyb28v\nZ86cITIykoiICJycnCgtLeXYsWMAbN++nblz5womwo0bN2hpaRGQFUmzOHfuXJYsWUJFRQXvvvsu\nFy5cYO/evYLDcO7cOVQqlYDNPM71k2hJBgUFGVNSUli3bh0uLi54e3szNDREXV0d3t7eAvtubW0t\netbSZJqkbJPJZKSlpZGSkkJDQwMbN26kqKiI3t5eZDKZGIIBCAkJ4fTp04SFhQl6khTckWzWg4OD\nhIWFiSOBWq3m1KlTvPTSS6xcuZKkpCQ0Gg2jo6Mizenn54der0etVgtTkfSWP3HihFCNZ2dns2LF\nCszNzTE3NxfCmbq6OgoLC5k/fz6HDh0iOTkZo9EoDE1OTk74+flhZWXFyZMnMRgMJCUl4e7uzvnz\n5ykvLycjIwMvLy/UajVdXV04ODiQk5PD9u3buXr1Kq2trahUKqEv8/LyIisrC5VKxbVr11i2bBkB\nAQGcOXOG4OBg5HI5paWl7Nq1i9HRUQ4dOoSdnR3+/v4YDAYOHjxIV1cXc+bMISkpSaQfJZ2Zq6sr\nvb29tLa20tfXR2ZmJjdv3sTR0ZGOjg7c3d1ZuXIleXl57N69m/fff1/g2R48eIC7uztarRZ7e3v8\n/f1RKBSMjo7S0NDArFmzKCgoEMNW27Ztw9vbm9bWVi5fvozBYEAul/Ozn/2M1tZWioqKUKlUHDt2\njM2bN2NpaSnCVlqtFrlczjvvvMPs2bOZN28eRqMRf39/Ojo6+OGHH3jrrbcwNzdnYGAAV1dXQkND\n+fzzz0lISODu3buiDrB9+3YOHDhAbGwsOp2O+/fv09vbS3l5OTKZjODgYFQqFTqdjpSUFDo6Opia\nmhKhqV/96leUl5cLg/rZs2dZunQpJSUl2NjYsHHjRpH6DAkJIS8vj5KSEmJiYnByckKj0TB79mwx\nRerk5CTtoh6rJfmT2ClMTk6Km9va2pq2tjamp6cJDQ3l3LlzgkS0ZMkSAgMD0ev1NDQ00NPTg7u7\nuyDjmpmZkZubK5Jno6OjeHl5CVvR1atXsbW15cGDB5SWloootZ2dHVNTU7S0tIi2XmhoKA4ODigU\nCrq7u+ns7MTX1xd4VMm2tLREp9ORn5+Pk5OT4BtotVqGh4e5e/cuOp2OwcFBenp6aGpqIjY2VmTr\nZ2ZmKCgoYGpqSpCMJf7Bu+++K4pzer2eqqoqZmZmqKioEFQiKd0nTeWtXLmSJ598Eo1GI+bdh4aG\n6O7uJjMzk4GBAeEKkPRvd+7cQafTiXqDq6srs2bNoqamBktLSwoLC0VAS9q2+/n54ejoSG1treBa\nSgJWnU7HqlWrsLCw4N69exw+fJgDBw7w5z//mdTUVPr7+/nggw/EG6+np0fs7CorKwFwcHAQC4mD\ng4OgSnt4eBAUFISlpaVwe1y+fFlwC1xdXbG3t6epqQkTExPRipXO6P/4xz9EtiUjI4PDhw+j0WjI\nzs6mqqpK7AQ//PBDIiMj8ff3x8PDg++++46enh4iIyP5+uuvaWhoQKPR8N133wGPzFZS7UliFgwM\nDNDb28u5c+cIDAwkPDxc0KUGBwfFDktCrj148IDLly+TkpIiyEsVFRVCJSCN98vlcnbs2MHU1BQT\nExNCmCPRsIKCglCpVMTFxXHy5EkSExMZHR3FyclJRL0f5/pJ1BQkeGVtbS2Tk5O0tbWxePFiqqqq\nsLW1FWARhUJBVFQUjo6ODA4OijZTbm4uUVFRYmLx7NmzwCMiriQPTUhIEDhvqUIu8R6NRiOVlZXi\n+OHs7MzMzAxDQ0MYDAbRIpMCUfb29sTHx+Pu7o5cLickJIS2tjYUCgVr1qzh+PHjODk5id3Hw4cP\n/+P7pqSkYGpqirOzM42NjWKxCQgI4ObNmzzzzDO0tbWh0Wj48ssvWbx4MXV1dWKy0cHBgRUrVnDt\n2jXOnDlDaGgoiYmJWFlZkZKSgpubG/n5+aKId/z4cQIDA9FoNKxevRpvb29ycnIYHx/nyJEjJCYm\nin6/dMySWqJXrlxhy5YttLW1UVpaKkhNTU1NWFpailrHmTNnsLCwYHR0lDlz5vDqq68yODhIcnKy\nGNOtrq5m/vz5LFq0iIsXL+Lj44NarWbp0qXMmTMHQHRSgoODaW5u5s6dO+LrTExMUCgUwuvp4OBA\nW1sbTk5OwkUh1S8AEQwCeO6559ixYwezZs2io6ODq1evCiGMvb09SUlJlJSU4OHhIZKvhw4dEpRn\nKWp97949JiYmRBI3Ly+P+Ph41Go1Z8+eJTU1lYGBAQIDA8X4uoTr02q1xMbGMj09zZYtWygvLxdh\nqlWrVtHc3CyeiZaWFlxcXOju7haqOY1Gg6WlJQcOHKC+vp62tjbWr18vCsHSvIyJiQmBgYGiwC2R\nxh73+knsFKSKbHf3I9RjSkoKOp2O5uZmQkJCMBqNQl7i4ODA7t27Wb58OaampmRmZvLUU08J0rFS\nqRQxXIlVZ2FhQXd3N2NjYxQUFNDR0UFaWhrz588XEeKOjg7S09OJjIzEyckJNzc3lEolhYWFYtGS\nCo1lZWVMT09TU1ODs7MzCQkJJCcnMzMzQ3l5OTU1NdTW1nLkyBFeeOEF6urqePnll+nr62NkZITR\n0VHkcjmFhYX4+PiQnZ0NPBKhrFq1in379jE2NkZ+fj6bN2+mubkZZ2dnli5dio+PD/X19cLGPD4+\nTnl5OS0tLURERNDf3y/qC729vSxdupRVq1ah0+nYvHkzExMTnDlzhurqaiIiInjttdewsrKit7cX\ne3t7Ll++jK2trSja+vr60tDQQHt7u9hRRERE4OrqKrbXZmZmLFu2DHt7eyYmJlCr1WRmZvLKK6/g\n5OTEhg0bMDMz4+9//zurV68WOYLdu3fzySefsHPnTqRjbH19vRgMtPWeAAAgAElEQVQOMjMzIyQk\nhIcPH4qWcFZWFqWlpbi6uuLj40NKSgrz588nMDCQo0ePUlxcTHNzM25ubsTHx1NUVAQ8UsKvXLmS\n6OhoTE1Nhaeys7MTFxcXgcBraGjgyJEjHD58mGPHjrF//35GRkaorKwUBVgXFxchbF2wYIEwbFta\nWrJ3716qqqpEWKmhoQGtVsu2bdt46qmnePbZZ/Hz8+PYsWPU1tZSXFws8PxeXl5ERkYCj0CzHR0d\nvPfee+zYsYPk5GRefvllCgoKRJ3hu+++o7u7mxs3bggV/ejoKFNTU6xevRoLCwsOHjyIUqkUcODH\nuX4Si4Kpqal4G9fV1Qkwys6dO7G3t8fOzk6AOE6cOEF1dTUfffQRpqamhIWFkZiYiJOTEzdv3mR0\ndFQg04OCgkhKShK25tzcXCwsLATQs729Xai9+vv7aWxsRKPRMDY2JnrXRqMRV1dXvLy8xGqrVqsp\nLS3FycmJyMhIQY3Ozc2lv7+fDRs24OXlxUcffYSzszMDAwNs27ZNgFsCAwNpampi3bp1Yh4AHsW8\npRisnZ0dgYGBmJubMzY2hoODA0lJSdTW1gp6c3FxMQqFghUrVmAwGDhw4ABNTU00NTURHBwsbiw/\nPz927NjB2bNnGR8fR6VSCe28n58fUVFR/OIXvxC7BOnnsbCwIDo6ml/+8pd0d3ezfft2gb5bvnw5\nzs7OZGdnY29vL87gZmZmXLlyRTAyUlJSRGpzeHgYjUbDxMQEOp2Ob7/9VjgoFi5cCDwaOJPw5dLw\nDzyic3V3d6NQKDA3NxemJ5VKxZkzZ7C2tsbKygqj0YjBYBAPmjRb4u7uTnh4OFVVVfj4+BAVFYWb\nmxuenp5iBkIiFS1cuJDIyEieffZZ/va3vwktvTSf4O/vL2xhFhYWVFZWMj4+TkdHBzqdTtS+pAlE\neGRMd3V15e7du2i1WiHAUavVvPTSSyQkJFBVVSWODwEBAWRnZ/Nf//VfVFVVcfXqVa5fv46bmxsv\nvPACa9euFQlZtVotwk9RUVHExcWJzMvGjRsJCQkRfNHHeh7//z7Q/zcuS0tLAgICxNCHUqlkeHiY\nGzducOnSJSwtLRkZGeGll15iw4YNwmX4ww8/cOLECZFqlMlkhIaGCjry3//+d0ELnpiYIDo6ms7O\nTh48eMDIyIgAjF66dIny8nJqa2tFyKi5uVnQcnt6esQADzzyJGzcuBEPDw+xu8nKymLHjh2sWLEC\nHx8fIiIiWLduHXPnzqWyspKtW7eKabfq6mq8vb3FmLU0KZmYmEhfX584LoyPjyOTydi+fTsJCQmU\nlZXh7u6On58fc+fOFUJTX19f4WNctGgR8+bNIyQkhM2bN6PX6wXj4MUXX8THx4eBgQEqKyuxtbXl\n4MGDgqso2Yak1mlkZCQvvvgifX19hIaGUllZye3bt6moqODw4cNUVVURGxuLXC4Xb+SOjg4aGhq4\ndu0aO3bs4ObNmzg7O1NZWUlhYSFnz54V7AGj0SgSmtLf/+qrrwAE3OTDDz8UwZ6hoSFcXFxwcnJC\nqVQyPj7OzZs38fPzw9nZGY1GI34OKT0qDS/l5uZy8OBBtm7dKhbD2NhYcnNz2bdvH+Pj44yNjVFS\nUoK/vz/R0dGkpaUxMjKClZUVw8PDHD16lNraWvbt28fFi49IgydOnGDRokUUFhaiVqsZHR0VNKzd\nu3cTGRnJ+Pg4/f393Lt3Dx8fHwIDA5k7d674HWo0Gu7fv09zc7PYjSYkJPDOO++QmpqKSqUS8yX2\n9va0t7eL3ZBE+7ayshKAn8bGRnp7e4mLi+Opp55ix44donP2ONdPIiX58ccfvyPZgjo6OoiOjub0\n6dPExsaSkZEhUFcZGRmMjo5y8+ZNmpubWbBggbBUf/fdd0RGRjIzM0NERATffvstL774IuXl5fT3\n94uZfo1GQ2lpKRkZGbi6utLS0iIciytWrECn01FcXExcXBy9vb34+fmJuoOlpSUHDx7klVde4dy5\nc0IekpeXx+nTp9Hr9bS0tIg+/YYNG0hLS2PVqlWCzqxWq1EoFNjY2Agyz+nTp7l16xZ79uwRnEej\n0SjAo9JorzQ+e+fOHczNzRkaGmLPnj34+fmRl5fH8PAwZWVlwkOYnZ2NnZ0dt2/fFjYoSUve0dFB\nZmYmXV1dApWWk5PD+vXrKSws5NKlS+JnkcAvku5OLpfj6+srdjh/+9vfcHR0JDg4mBs3brB8+XI8\nPDxEmrOwsBCNRsPk5CQBAQGUl5eLr9NqteTl5eHk5CTm/iWgiIODA9euXePpp59Gr9cTGhoqBpPi\n4+Px9PSkoqJCPARSh2poaEhg+e7fv09BQQGJiYmYmJgI3+XTTz9NU1MTvr6+uLi4YGNjw/nz5wkJ\nCaGoqIiRkRGmp6eZnp7mjTfeEOPqg4ODIqdw6tQpYmJigEeQIJ1OR2pqqjCKSa1l6bjS0NDApUuX\nsLGxQa/XYzAYCAgI4KuvvhIeh6amJo4ePcqbb77J9evXhbzGysqK6Oho7OzsxKTka6+9hrm5uahJ\nZWZmYm1tjY2NDREREXR2dpKUlISnpye2trYcOnTofw6OTWr5eXp6snXrVnQ6ncBiff311+zfv5+7\nd+/S3t7Oxx9/zKeffoq9vT2JiYmidbV27VqBJpcswPHx8URHRzM1NSUIPzY2NkRFRQlLr1qtpru7\nm/Xr19Pd3c3t27extLQUI6ZyuZz+/n7xd+DRltHa2loQdKuqqsRbxsvLi4aGBiEavXnzJhqNhps3\nb+Li4kJDQwOdnZ1CZpqfny8KTNJxRpLgSG2+O3fuEB8fL276xsZGSkpKhHj0r3/9K2+99RZNTU0k\nJydjY2PD9evXUavVmJqa/ofBqqioCCsrKzw9Penp6cHS0pKZmRnu3LlDf38/Pj4+uLu7U1ZWhoeH\nh1jIzpw5Q1JSEl1dXWg0GkHXLigoYNu2bQKMs2rVKpycnIRQ197eXoBt3dzcMBqNmJubk56eLj5f\nCwsLMU8gl8sxMTFBr9eTnp6OWq3m1q1bWFhYcOjQIfbu3YtKpWJmZkZo8aSBn5GREVxdXdm+fTsn\nTpwQHgyA119/HaVSyaZNm3jvvfeEEv7WrVusWLGCiYkJXn75Za5fv46trS0XL16kt7dXQGqzsrI4\nefIkfX19ZGVlCVLU7t27RQZEqVSiUqmIj4/HaDTy8OFDxsfHCQ4OFpq6qakpHj58SE9PD5OTk2i1\nWtHtkIqxAB9++CFNTU3I5XIB5ampqeHWrVts2bKF1tZWXnnlFYxGI1FRUXh4eHD06FF6e3v55JNP\nuH37NlevXiUrKws3NzdhA3uc6yexKDg6OqLVauns7MTV1RW5XC4q4t7e3nh5ebFlyxYBHFUoFHz1\n1VcMDw/z7bff0tjYSEFBAatXr0Ymkwkm//379+nv72dmZoaamhrs7e1JSEgQoaCpqSksLCyYPXs2\nAwMDwgAUHByMnZ0dCQkJ2NjYCMy5hAv/8ccfGRoaEg4Db29vli1bRkJCghCVyOVyfvjhB/HLliQh\ner2e0tJSdDodra2tTExMiLl/MzMzYZDavn27OA8mJSWxcOFC0tLSaG9vp6amhq1bt7JixQoOHTqE\nVqsV/09Clc/MzDA+Po6jo6MAhUhn5tu3b5OUlISNjQ19fX08fPgQOzs7EYqSFlXJnvzll1+iVCqp\nr68XRihpvkOhUIiKf11dHe+//z5HjhxhdHSU/v5+0TI1Go00NjbS2NjI3Llzyc/Pp7u7m8jISJYt\nWybyH9IRICUlRYBeVSoV+fn5NDY2/kc4TEoKmpqaMj09LYqS0oLyf6L5Pv74Y2JjY7l79y7PPvss\nL7zwAnl5eURFRVFcXIzBYBCTmnZ2diQlJYlho4mJCfbs2fMfVKilS5cCjzoctbW1dHZ20tXVRXd3\nN3fv3sXd3Z1r167R1NSEj48PdXV1HDx4kJaWFjH0Fh0dTVFRkWjnxsXFiXusq6sLmUzG3bt3uXr1\nqtgt1NTUcOzYMT777DPOnz/P0NAQNTU1AuwbERHBypUruXjxopD1TE1NiUnLx7l+Ei1Jg8FAVVUV\nk5OTFBUVMTMzQ3V1tRgHtrW15YsvvqC9vZ3f//73Imxz/vx55s+fT0lJCQ4ODuh0OoqKisTD7eDg\ngI2NDWZmZvj5+TEwMICDg4MAnU5NTYnCjq+vLz4+PoyNjQmnY0pKCiYmJvT29jI0NCQKjXK5nHnz\n5nHgwAEyMzMZGRlBr9dja2uLUqkkMjKS6elpcnNzKS8vx8XFhfb2dvR6PUajkfv379PS0kJjYyMr\nV64Ub536+nrmz59PU1MTNTU1dHZ24uHhIY4FmZmZVFZW8vzzz9Pf38/ChQvx8vISpKetW7eSn59P\nZWWl0K65urpy4cIFMYgVFRVFZGQkL730EosXL0apVBITE8PFixdFvQUgPT2diYkJ7Ozs+Oijj9i+\nfbuYpjQ1NaW0tJQ333yTN998E7VaLcaI1Wo1U1NT7Nq1izfffFO4Jl5++WV+/vOfc+rUKXHDurq6\nimOKdPavr69nxYoV1NbWikyHVMtZs2YNRUVFQvzS1dVFcHAwc+bMYWBggPb2dubPny+0dmq1mrCw\nMOBR9qG0tJSamhr8/PzIz89n6dKlgiAdFxfHpk2beO655zA1NcXV1ZXa2loRqV62bBlffPEFvr6+\n2Nra8vbbbwOPFoWUlBQcHBzE0FtMTAy3bt3irbfeIiAgAGtra/Fnnp6eODk58d577wmsWlpaGh4e\nHly9elWAW2NiYsQAmEKhYHJykq6uLpYvXy4WGmm2o6mpiS1btohhNXt7e9Gid3FxwczMjIcPHz72\n8/iT2ClIxN6IiAjkcjm2trZ4eHhgY2PD0aNHOX/+PKOjo8hkMlpbWxkaGqKiooI5c+bQ3t4uOAQ3\nb94kODgYPz8/ABHFjouLE31jaUFQKBRYWVmJ1KRWq2VoaIh79+6xcOFCrK2tGRgYQKvVUlFRgVwu\nF8eH5uZmHB0dsbS0xN3dnbGxMYKDg9m6dSuNjY0UFhbS1tYmMGWhoaFotVoePHjAuXPnRE4gOTmZ\n4uJi6uvrgUc7lFOnTtHZ2YlWqxUSFxsbG9FVuHXrFrm5ufzzn/9kzZo19Pb2EhkZyeLFi0lNTRXe\nQMk1MTg4yJo1a+ju7qa/v5/29naio6N54403WL58OU5OToLcLJfLBV8gLi6OkZERCgsLRUJwdHSU\n3t5e/P39mTdvHnv37kWj0fDXv/4VR0dH/Pz88PHxYWJigt/97neUlJSIt/qJEyd45ZVXyM/Pp66u\nTlih16xZw5IlS/jd734HwJ49ezhz5gxarZa5c+fy4MEDJicnCQoKoqenB0dHR7y8vPD19cXDwwML\nCwtaWlpITk7GzMxM6PYmJycxMzMTC25FRYWQtdrZ2aFWqwkJCSEyMpL+/n5WrlwpDF3PPfcca9eu\nxcrKCl9fX/7xj3+I8JGEppPG2QMCArhz546A9zo5OTE8PCzCVX19fWIoz87OTtCudDodzs7OxMfH\n880334iotiQcmjt3LgaDgfr6en7/+9+j0WhQq9VcvnwZvV4vxspXrVrF22+/LQJrEkd0YmICc3Nz\ngoKCiIyM5Lnnnnvs5/EnsVOQzphyuZyGhgZqa2t5+eWXsbOzw9XVldzcXMbHx1m4cCGnTp0iOjqa\nsbExpqamWLx4MWNjY/j4+ODm5kZFRYVoQ01PT2Nqasr333+Pq6urQLJJ4In58+eL/IRMJuPhw4cs\nWrQICwsLLCwsBJ9RenCkG8zW1pbKykq8vLw4duyYaPdMTU1x8uRJ7O3tCQ8PJywsTPzyp6enRW4g\nNTWVH374QViOent7gUcTmJJVaGhoCGtra0xNTUXbs6GhQUwzOjo6EhAQwPvvv09BQQGRkZECEmpt\nbU19fT3BwcHk5uYSERFBSEgI+/fvJzY2llOnTuHt7c2NGzcwGAxCndbd3S3sQlIYKjk5mdHRURF0\nGh8fx8TEhKCgIC5evEhCQgJbt24V1Or3338fPz8/5HI5qamplJWVMTo6iqOjIy+//DLPP/88crkc\nrVbLE088IWza7777LvAobCXFjSXgaWdnJx0dHTg4ONDe3k5TUxNxcXHI5XK6urqYNWsWg4ODjI+P\n88knn7BkyRKUSiVOTk6iO9TU1ERQUBDp6ekCyyZ5QqV5lIiICIGra2lpITw8nJGREd544w1hhDYz\nM2NkZIS1a9fy97//nbGxMWxsbBgeHkalUlFVVUVPTw/z5s0jKyuLBQsWCGmwdGTV6/V4eXnh4+ND\nX18fHh4eNDc3Ex0dzf79j+qAt27dwtfXl1deeYXw8HAmJyepra3F0dGRtrY2NmzYgKOjI5GRkchk\nMtGulQTEUl7l2rVrAuLyuNdPYqcwOTkpzqTBwcEolUpef/114UGUyWQinvqnP/2JuLg4IYg5fvw4\nMpmM27dvk5WVRWhoqCjcSSZpCaLi4uIiGAHR0dFC3yZNM7a3twuqUk1NjZi5l+zPkmBWr9dz6tQp\namtrMRgMoho+ODgotuSSt8LBwYHCwkIWLFjAs88+S2hoKCMjI8hkMmpqaoiKihLFsKKiItasWcOn\nn37KvXv3eP755wkNDcXOzo7Ozk5aW1vZvHkzXV1dQkf+7bffsmzZMkJCQkTRVGIJSJ2ZiYkJioqK\nSE5OZnh4WIwxBwUFERgYiKurK+np6TzxxBMCJCr5B3Nzc5k7dy4FBQU4OjoKpHl39//D3nsHR3mm\n6d6/RqGVc845IwmEJKJASIhgMgYbMBiPbcY2DuPZYsefvXPWaZym8Don8NjGQzQ5SCAUQCAkECgi\nJKGcc1YrtEJ/f2je+8xWbZ1h9ztVx/vVeauoMu1WC7X6fZ7nvu/r+l0dxMfHU1RURF5eHgsWLKCi\nooIzZ86wceNGzM3NcXFx4d/+7d/46KOPuHbtGs8++ywzZ87EwcGByclJBgcHKSwspKGhgZdeegmY\nVov29fURHh4uIqXt27cTGhrKnDlzCAgIYO/evWzdupXnnnuOxMREWltbqa+vl2Ozq6srHR0dsqgA\nwvo8e/asQHbT0tKEgRESEsIbb7xBRkYGHR0dzJkzR/oLdXV1nD9/noKCAoaHh7G0tJSNIC0tje3b\nt2Nra0tfX59Mi0xMTBgYGODDDz/kwIEDVFVVcejQIcbGxqipqRH5s4ODg+SXKjZzAFtbW6ampvji\niy/w8PCgrq6O8PBwIiIieOmllzA1NaWnp4fAwEDRdSghyyqVitjYWA4cOICLiwstLS3cuXPnoe/H\nX8WiMDk5ycyZM9myZQu1tbVotVoiIyNRq9XcvHmTxsZG0tLSiI6OpqioiDt37qDRaDh69Cg6nY4b\nN24wODhIcHAw9+7dw83NDZg+Mk5MTDA+Po67uztDQ0N4eXmh0+mYmpqSYJLg4GDs7OyYOXOmjJTK\nysoEg6XYohWZc2RkJIGBgYSEhLBmzRrmz59PX18fFRUVREdHExkZSXZ2NhYWFpibm7Nw4UKJZt+x\nYwdtbW34+Pjg5+fHpUuX5Mjo7e1NSkoKERERODs7k5eXx+joKKOjo6jVatrb20UbYW9vL/FwBgYG\npKSkUFJSIqnKp0+flkmLSqXCxcWFqKgojIyMcHR05IsvvuDtt9/Gx8cHNzc37OzssLa2JigoSEqH\nlpYWTExMyMvL45VXXqGhoYF33nlHALX9/f28/vrrREREYGxszJo1a7hw4QI3btzg0KFDuLu7k5ub\ny2effSYGJGUy4ebmxr1793jvvfewtLQUncLExIREsCclJVFUVER6ejqzZs2SxnFycjJNTU0cOnQI\nfX19IiMjcXNzo66ujuDgYMrLy/H09BQBGkwvNq+99homJibU1tZy9+5dPDw8SE5Opq6ujubmZv7w\nhz/Q2dlJcHAw/f39fPLJJ/z+979n8eLF+Pr6snv3bsbHx8nMzJRTo62tLUeOHOHChQuYmpoKaLat\nrU3KE4ClS5fy/PPPMzIyQnNzM42NjZSXl3PmzBn8/f3p6uri+vXrrFy5EpgWW5mamhIVFUVGRgbx\n8fF8/fXXmJqacvz4cZKTk9m+fTv19fV89tlnIgMfGxtjfHychoYGHn/8cfLy8mhqasLBweGh78df\nxaIwMTGBgYEBp06dkmlDcXExCxYsoLe3V/oHCrtAITPBtGrx2rVrhIWFCX0nOTkZmPaku7i4EBQU\nRGlpqdTmitVYIePeu3eP+vp6YemPj4+zefNm2tvb6erqkiOfIsUtLS3F1dUVV1dXrK2tycrKws3N\nTWy+N2/eZO3ateTl5UntV1BQwOzZs8nKyhJP/vHjx2lsbBTBjo+PDxMTE4yNjbFgwQIyMjI4c+YM\nycnJpKamiif+mWeeITY2lsrKSp555hlJWQ4JCRGdg9JvUBR8AQEBXLlyBZVKxaxZs3jsscdYvny5\nxLa7uLhIGMqpU6c4cOAAHh4e7NixA2NjY1JTUwkKCsLd3R1nZ2eysrIIDAzk+eefp6enh/Lycvbt\n20dqaipGRkZs27aN06dPSzSfubk5VVVV3L59m3PnzqHRaFCr1WRkZGBra4uJiQkwrfnPyspiZGSE\nxsZGufn6+/sZHh5m9uzZdHd309bWhrGxsQTP5ubmMnPmTHp7e4mIiMDHxwdvb2+RvHd3d/P999/j\n4eHBH/7wB3bv3s0jjzzCpk2bcHd3x8zMjKysLLZs2SJJ1k8++SRvvPEGX331FZaWlly4cIHQ0FCi\no6PFENXT04OxsTEvvPAChYWF2NnZMTU1xbVr17C2tsbd3R17e3vKy8sFye/i4kJ7ezsajQYfHx/6\n+vowNTWlsrJSGoIZGRls3LiRqqoqzpw5g4GBAXv27OHs2bP09PTIwrV//35effVVKioqGB0dxcvL\ni66uLlQqFc7Ozmzbtk10Mg97/SrES19++eWbCxYs4KOPPkJPT4/29nacnZ2FFGRoaIitrS2pqak4\nOTlJA0wRJs2ePZvjx4+jUqlobW3F0dGRrKws/Pz8uH//vvDzLl++jJOTE998841oyJubm4UJef36\ndWpqarh79y62trYYGxvzyy+/0NDQwNTUFO+//z5dXV3s3LlTpiRnz54lKChIPBRKoy07OxtXV1fs\n7Oyk4aVoBBobG+V5np6eBAQEcOnSJWJjY7GwsGDPnj3cvHkTPz8/vLy8MDExYc2aNXJjKY02ExMT\nAXQODw8THR3N4OAg8fHxODk5cfLkSbq7u9HpdGRkZHDy5EmSkpLIyMgQd+Tt27dZv3495eXldHd3\nk5GRQVhYmMBXMzIyyMjIkON9amoqixYtwtzcnKKiIqysrJiamhLn4sqVK0lOTkatVlNdXS0W9G++\n+QYPDw8aGxsFkqvkZSqTg4KCApYvXy79CGNjY+k5lJaWSr+jr6+PY8eOyXHbxcWFoqIiMjMzmTt3\nLh0dHZSVlWFqakpkZCR/+ctfiI2NZfbs2dTU1ODi4kJqaiojIyOEhIQQEBDA0aNH8fb2liZvf38/\nTU1NeHt709DQIOE0ubm52NraMjQ0REFBAYsWLWJsbIzm5mbpDWm1WhYtWkRbW5ug82bOnCmlS09P\nD42NjXh7e/PgwQO8vLwoLCxk48aNDAwMcPHiRQICArh69arY5d3d3blw4QJz5sxh//79zJs3T8Jp\nlMCew4cP09XVJTL6u3fvYmFhwV/+8hdFfv6/R7ykUqn+olKpOlQq1b2/e+xNlUrVrFKpCv/2Z9Xf\n/b//R6VSValUqgqVSrX8YRaFwcFBysrK2LZtG66urixYsIAtW7ZQUVFBVVUVS5cupbq6Gm9vbyYm\nJrCzs+OZZ55hwYIFXL9+nbq6OoGv2NraMj4+Dkzv6Nu3b8fS0pITJ04wY8YMenp68PPzY+HChVRX\nVwtyS2EtKA6z1tZWCSHRaDRcvnyZxx57DJh28rm6ukpDzdjYGFNTU0JDQ+no6KC6uhq1Wo2DgwOW\nlpYUFBSQmpqKqakpGo0GW1tb7ty5g4WFBR0dHVI+mJubs2LFCo4cOSK05/HxcUJDQ8nNzeXMmTN4\neHgwOjrKpUuXcHNzw8bGhpiYGHQ6HXfv3iU8PJyioiJOnjzJ9u3bxVzl4eFBQkICRUVFDA0Nibci\nLi6O2tpaYR7MmTOHY8eO0dLSwvLly1mxYgWrVq0iLS1NiEVNTU3k5uZSXFxMT0+P6BA0Gg0ZGRk8\n/fTT2NvbY2VlJZHuYWFhBAQECFdy0aJF1NfXEx8fj7OzM2FhYcC0EUjZWW/evMn27dsFp9fR0SEn\nt48//li68Pv372dsbIxVq1Zx9OhRJiYmMDQ0pLy8nPLycmDaZKfRaHB2dub27dt0d3cTGBhIVVWV\nZERWVVUJE6Kqqgp7e3uuXbtGY2Mj8+bNY3x8nISEBIKCgqT2V3QFM2bMIDExEX9/f5Gte3p68uqr\nrwr7cdeuXfj5+bF69WqCg4PR09MTle6cOXNITk6W8eu9e/dISEhApVIxd+5cvv76a3p6eujt7eXM\nmTOUlpYyPj6OpaUlNjY2DA4OEhQUJBb7kJAQfH19OXLkCM888wwZGRkPcytO/ywP8ZwfgRX/weP/\nptPpIv/2J/lvN1QI8DgQ+rev+UqlUun9o29gY2NDd3c3Wq2WCxcu0NzcTHFxMQ0NDaxevVqixzZs\n2CCWZSWx9+DBg8yaNUvIOD4+PnKDW1pacvHiRXJzcwkODmbmzJm0tbWh0WhwcHAgKCiI9vZ2+vv7\niYiIwN/fn56eHi5dusStW7dISUnBwsICY2NjPDw85INQUFBAXFwcFRUVODs7s2nTJmEvmpqasnr1\navHx37t3j8nJSWbNmiUKv6CgINauXcuDBw9ISEiQlCRvb2+uXr1KeHg469atkx24t7cXKysrIiIi\nZJdduHAh165do7q6mps3bwr8Q4k327p1K1qtVkJFfvjhBwYHB2loaODJJ5/k8OHD3Llzh6mpKZHH\nenp64uvrS0xMjCjwlIXT0tKS9PR03nrrLa5evcrU1BQJCfa2CHMAACAASURBVAmirlTGs/Hx8bS2\ntjIxMUF+fr40AfX09CgoKOCzzz7D0dERIyMjSkpKBMf29wGoM2bMkAi7/Px8zM3NBV926dIlHB0d\n2b9/P0ZGRujp6Yn+xMbGBm9vbyoqKqivr6e3t1eoVrm5uYKYGxwcpLi4mCtXrvDLL7/w0UcfYW9v\nz+zZs+Vr9PX1cXV1xczMDD8/P86ePSu+g5KSEhlJmpubExYWRktLCyqViuLiYjIzMyktLZUE7CVL\nlmBqasrZs2dpbW2lsbGR+/fvo1arqaysFPGRu7u7jL2XLl0q8vOKigomJiYICAjg888/p6qqCn9/\nf7FhNzU1ERgYyCeffCJy96KiIgIDA1myZAlnz55l+fKH2p+n3/9/9ASdTpcF9Dzk660Djup0ujGd\nTlcLVAEx/+iLxsbGxD++efNmSfYJDQ2V7rSlpSU6nY6qqireeOMNyXk4cOAADQ0N4p7r6OgQtdmi\nRYsYHR2VtJ979+6RlpZGe3u7QFYGBwdZu3YtHh4eREREoKenR0JCglB8tFotU1NT4q6E6ZTslJQU\nPD09cXNz48svv6SqqorBwUFmzZpFeno6tbW1ZGRkMD4+jpmZGQ0NDaxfv56wsDDq6+uFLfDNN99I\nr0IpP6ampmhoaMDX1xcHBweGhoYIDw+nr6+PtrY2AdsaGRmRlJREc3MzarWaHTt2cPbsWSlZNBoN\nc+fOJSgoiK6uLsLCwjA2NqasrIyff/5ZoufS09OpqakhMjKSzz77jIqKCvbt20dycjJarRaNRkNz\nczOPPvooM2bMEIlyVVUVv//977G2tpYm6MWLFzl8+DCmpqasW7cOOzs71Go1VVVVGBkZSabiu+++\ni62tLQ8ePODLL7+UhXz27Nkic1cYGkVFRYKwDw8PJyQkhEcffZSSkhIGBwd55513BDmnxAHMnTuX\nJ554QgQ+Op2O5ORkWSQUqvS2bdtkEtDQ0CAEpvfee4+goCD8/f2Jj4/nxRdfZHJykvb2dkxNTQUI\nq7AUlUV05syZmJmZsXjxYrq6uqiurua5554jLS2NiIgIFi5ciJWVFTt37sTGxoYNGzaIf8Lc3FxS\nspXJWWtrK4WFhZJ/cvjwYTo6OiT6YP369VIaHzp0iLfffpsVK1ZQW1tLU1MTOTk5gsh/2Ov/S6Px\nJZVKVfy38sL6b4+5Ao1/95ymvz32v7xGR0dJTExk/vz5IuNtaWkRrJcS/Jmbm0tvb6+MhGJiYvD3\n96exsVFYiHl5edK0UnIXRkZGuHbtGmq1GhsbGwwMDJg3bx5OTk5ER0fLUfPAgQPY29tjaWlJc3Mz\n4+PjVFVVsXjxYry9vUWfX1tbi5GREdbW1jQ0NLBq1Sr09PTo7u6WYJmGhgZGRkZwdnZm8+bNqFQq\n9PX16ezspKysjIMHD9Lb2yvYNJi2ZF+8eFGaqvr6+lRWVtLR0SFYdcWeqxwlFSWhp6cnBw8exNXV\nFSMjI65du0ZxcbEYhPbv3y9lRG9vL9999x1PPfUUOp1OgkxPnjyJu7s7NjY2vPXWW/zmN79haGgI\nc3Nz5s+fD0y7DVNSUgT4YmFhgZ+fH8HBwXh4eBAWFsarr75KXV0ddnZ2bNu2jVWrVrFgwQJ27tzJ\n8uXLqaysJDIyEhMTE2xsbIiIiJDfWXt7uyyKjzzyCIGBgdJQ7OzsRK1WY2try/HjxyksLCQ3N5cf\nf/xRGowWFhYkJSXh6uoqpGVA+jdmZmaYm5sTFxdHfn4+AwMDREREUFJSImyDnp4evv76a8rKyoiK\nimL58uW4u7uzYsUKFi9eLH0ImM7AVLiTTU1NlJeXs3HjRiwsLLC0tCQzMxMzMzPWrVvHwoULCQsL\nw9PTEwMDA+zs7CgsLCQxMZG6ujoaGxtlEQsJCSE2NpaCggIiIyOFI6nwQMrKyiQs59KlSwwODhIY\nGIiVlZX0ju7fv8/ExIR8nh72+q8uCl8DPkAk0Ars+8++gEql2q1Sqe6oVKo7Q0NDDA4Okp6ezqJF\ni/Dw8MDMzIzg4GAZqU1OTkrAxe7duwkLC6OwsJDCwkLUajVjY2NSA9+7N93+UFbHefPmSdLR0qVL\nxYJaXFxMVlYWly9fxs7OTrTtbW1tuLu709HRgY2NDVeuXOGRRx4hPT0dmBYpKTV0a2srHR0drF69\nmrCwMJYuXcry5csJCAjg5ZdfRk9Pj5MnT9LX10dycjL5+fk4OzuzZs0aSaF6/PHHgWlS1CuvvEJB\nQQHl5eWSspyUlISVlRVtbW00NzdjbW3NyMgI7e3tTE5OYmVlRWNjI+bm5jJe8/DwICYmhrq6OiYn\nJ+UD6+bmRkZGBgMDA1y5ckVMWW+88QbW1tay0O3du5f8/HzOnz/PypUrmZyc5LXXXmPv3r0YGRlJ\nOMmtW7fIz8+XbE5PT09hT/j6+lJaWkp1dTW//e1viYqK4sUXXyQzM5PKykpiY2NFdZeamgpMS8jN\nzc3Fc9DV1UVZWRlqtVr4ATdv3iQ+Pp6XXnpJgnAGBgaYPXs2O3bswNDQUHo3ypRKualzc3OZmJjg\n888/5+7du4yPj9PX1ye9Go1GQ0dHB3p6etTV1fHtt99SXV3N4OAgWq2W9PR0rl+/LlJ6BWxy+vRp\njIyMsLGxEXjKuXPn2LVrF7t37xb83ffff09KSgrNzc18/PHHtLe309zcTH5+Pv39/YJNy8rKorCw\nkM2bN+Pi4oK+vj7379/n2rVrVFRUcPDgQbq7u3nw4AEbNmxAo9EIXl85XRkbG7Ns2TJ++uknHB0d\nH/re/C8tCjqdrl2n003qdLopYD//s0RoBtz/7qluf3vsP3qN73Q63RydTjdHUWIp2HVFwJKcnMz5\n8+fFnGRoaIiHh4e86R9++CGFhYUsWbIEPT09rK2tJTwVEBDGyZMn5Wbv7u7Gy8uLO3fu4ODggK+v\nL3V1dYIVKykpYXx8nAsXLrB9+3YsLCzYuHEjN27ckPGWl5cXlpaWomtXxkpFRUVkZGRw9uxZ4UVO\nTU3h4OCAi4sLmzZtwtnZmZaWFhmlzZw5U7rWBgYGDA0NYWZmxpIlS8jNzZVj9dTUFNXV1ezYsYP1\n69ezdetW/vmf/5mVK1ei1Wqlnv7DH/6AkZERXV1daLVaLl++zIMHD4TvePbsWXp7e6UH8Pvf/54V\nK1bw5ZdfkpmZyY8//iiQlbKyMv7pn/6J1tZWRkZG+Oabb+js7CQsLAxHR0dRRu7Zs4eUlBRsbW1J\nT0/n6NGj9Pf34+rqKj/f+fPn+eqrrygtLWXp0qX4+voyc+ZMNBoNlZWV/67mVbIlnJ2dKS8vl0yL\nr7/+Gk9PT/ngDw4OEh4ezq1btwgODiY/Px+dTodKpZLm44kTJ4DpPlBGRgaRkZFMTk6SmJhIfHw8\nU1NTLFu2jJaWFjQaDWlpacybN0/6I52dnXz77bc8//zzfP/996JwVTQrn3/+OXfu3JFm87Vr1wRy\nExsbKzdjWloaQ0NDODs7Y2try7Jly3j00UcZHh4WolJbW5uMnVesWIGrqytVVVUiblu/fj1btmzh\n/fff5/Dhw9y8eZOuri7s7OxYuHAhf/rTn8jOzqalpYUzZ85QU1NDZmam0Msf9vovLQoqlcr57/66\nAVAmE+eAx1UqlVqlUnkD/sDtf/R6irDHz8+PO3fuSLTbwMAAt2/fxt7enuvXr2Nra4uBgQG9vb2s\nWbOGF154gcceewwXFxf8/f1FlxAeHg4gu1hISIiAWBX/vqLI8/T0pK6ujrNnz1JVVUVZWRl9fX24\nurqSl5dHZWUl/f39WFlZyS+ntbUVd3d32tvb5QioKAQLCwtZs2YNCxcupK+vj6ysLIyNjYmMjOTn\nn3+msbGR0tJSwYB9/fXX0mSbO3cuKpVK+hBOTk4YGRlx8eJFDh48yNTUFFlZWbLbwDSYJSEhgZCQ\nEO7evUt2djbt7e2UlJSIk9PJyYmkpCSef/55Nm7cyN69e5k7dy5WVlZUVVVhZ2dHQkICr7zyCk8/\n/TTDw8NUV1fj7+9PXFwceXl5DAwMkJycjK+vL93d3Rw5ckRGs7m5uVRVVUkG5OTkJF5eXjKmVOzj\n4+PjJCcns3jxYtzc3HB3d8ff35/g4GBpsF2+fFkWCqXh19LSQl5eHl5eXpw5c4aJiQna29sJDAzk\n5MmTsomcPHmS69evC7Faab7CtHipv79f8iXi4uKA6RiAnJwcPDw8qKmpwcHBgaqqKik5X3rpJZyc\nnHBzc5OSddmyZbKI7dy5k7q6OrZu3UpPTw+hoaE88sgjgocfGhqiu7tbJk337t2jvLyc9vZ2IiMj\n5fMYFxdHW1ubTDN++OEHqqqq6O3t5fz58/j7+1NWVkZDQwPNzc2cPn2aBw8eMH/+fCwtLUXlq6SN\nK5/3np4eoqOjxV7+MNfDjCSPADlAoEqlalKpVE8DH6lUqhKVSlUMxAOvAuh0ulLgOHAfuATs0el0\nk//oeyicg9raWjw9Penp6SEvL4+JiQmmpqYEdjE8PIyPj4/Ye4eHh8W7rlKpJGZOSekxNzdHq9XS\n1dWFvr4+zs7ONDU1kZSUxIYNG3B2dqavr4+kpCQMDQ3JysoiJCSEyMhIHBwcqKur449//CNdXV3U\n19dLQxAgIiICX19f6T/09vZy9OhR+vr6xJlpZmYmcBIzMzNcXFwYHx9n27Zt2NjYkJOTwzPPPCOS\n2d7eXoyNjTExMcHW1haNRiNimCeffFIalIODg/z000/odDo6Ojro7OxkZGSELVu2MG/ePHx8fDA2\nNqaxsRFfX1+srKzQaDTcunWLuro67t+/z9y5c4VInZ+fj7u7O8XFxXR3d0uWpGI2UqAonp6e0gW3\ntLTkr3/9K+Hh4bi7u4uYxsnJCbVaTVtbm/Rktm3bhoeHB35+fpLLuWbNGiYmJtBqteTl5VFQUAAg\nKtPe3l5ycnKYmJggPDxcQlyCgoLE8NbX14ezszOzZ8/GycmJ5uZmrl27RmVlJWFhYXz++ecsW7YM\nmC7NnJycePbZZ6msrJTewZEjR7hx4wYRERGsXbuW4OBgsrKy+OCDD+jt7RVmRnR0NImJiUxNTXHs\n2DEuXrwIQH5+Pl5eXnz11VeCmlepVExMTJCWliYnHQMDAy5fvkxRURGenp4UFBRw/Phx7O3t8fHx\nYWRk5N+Z+V5++WWx1hsYGPDjjz9y/PhxDh06JCrTwcFBjh8/TmdnJ4ODgwwNDTE6OoqhoSEuLi7S\ncK2rq/tP9RT+oSFKp9Nt/Q8e/v5/8fw/AX966H8B04uCUvPv2rWLO3fuEBcXR3t7O97e3nR2dmJl\nZUVsbCzZ2dnU1taSlpaGn58fubm5UhPa2dkJ2flv/xbx7ldWVtLa2oqJiQmtra1oNBqSk5PFy67U\npQrV18HBQWhLJSUlVFdX8+KLL/Lpp58SERHBV199RW9vL15eXoSFhdHa2sqqVasoKytjx44dzJ49\nm927d+Ph4SEiHMXurGQSbty4kc7OTqlPL126hJmZGQkJCfT19bFt2za6u7uxs7Pjrbfe4qWXXhIP\nR3t7OykpKdLk9Pf3p6GhgerqaoqLi+WDZmdnR3FxMc3NzcTExDA5OcnZs2eZPXs2b7/9Nra2tkRH\nR/P0008TEhIiPg7FZl1TU4OBgQG+vr4cPnyYxMREhoeHcXV1pa2tjStXruDu7i5y67q6OoHaZGdn\nMzg4yOjoKJWVlbi4uLBnzx5xfCp4fF9fXxn3tre3C1DV0NAQtVot5qy+vj5cXFwoLS1lYGAAJycn\nkWQrxGpzc3N6e3uxtrYmNjZWrPG/+c1vOH/+PHV1dSQlJeHh4cG+fftQq9UyoRocHOSxxx5j1apV\nsqu3trZSUlLCrl27xFinfG+Y9lRMTk7i7u4uocU3b96kqamJ+/fvExUVJTj2OXPmEBoaiqWlpahN\ndTodv/3tb9m4cSONjY1cu3YNQD7fGRkZeHt7MzU1RU9PD4sXL0ar1YpiNTQ0lLGxMeLi4oQlUVVV\nRUdHBwsXLsTS0pLGxsb/FE/hVyFztrOz49ixY+zatYvvvvuOyMhICX1RqVRkZmYSGxsru2hpaSkx\nMTGcOnVKouVtbGzEUOTr6wtMC5HKy8uZM2cOHR0dEk2u0IWKi4sFuzU8PEx3dzelpaWsWbNGaME1\nNTXMnDmTJ554QhxsQ0ND+Pv7i77C1tYWLy8vQkJCSE9PZ/369QwNDaHT6bCzs6O/vx9vb2/27NnD\n5OQkGo2G+Ph4MV8pXg1FxKLwGXt6eqQRp6DThoaG+OKLLxgaGmJ4eFhORfX19cydO1c0G4aGhsyf\nP5+uri4xj/3xj3/kww8/JDw8nB07drBx40a2bt2KnZ0dPj4+aLVa5syZQ0REBO7u7kxMTPD000+T\nkJBASUkJL774oqRU37p1C51OR1xcHNHR0eIcLS8vJyQkhJKSErKzs5mcnCQuLo758+fj5uZGZ2cn\nycnJ3Lhxg4KCAvr6+ujq6qK1tRVAAlEV8Ky/vz+BgYG4ubkRHx9PR0cHCQkJbN68GU9PT9RqNZOT\nkxgYGDBjxgwhfru7u2NtbY2hoSEA6enpImHv6urC1NQUHx8fent7iYuL48cffxSJ97179yguLubY\nsWOSXzpr1iyhTunr60uN7uDgQEZGhpSwLi4ueHt7ixK1trZWBHguLi5UV1fT2toqJrAFCxZgY2ND\nS0sLR44ckXH6wMAAHR0dYgTLzc2VU0NGRgajo6Ps3r2b7u5uScRWEtbb29tZunQp2dnZnD59Gnt7\ne3bu3PnQ9+OvQub85z//+c1PP/2UiYkJgoKCxMlnYGAgpB/lF3j79m1mzpwpN0V8fLzYes3MzHB3\nd+df//VfpWcQHh5OS0sL4eHhgiSPj4+XMc3p06fZuXMnpaWlqFQqYUK2trZSUVEhM2GlMZiXl8eO\nHTvo6+sjIiKC3t5e9PX1CQ8PZ2hoiNLSUl544QXi4+O5f/8+H3/8sbg0/f39Bb4yZ84cLCws8PX1\nFZXa66+/jrGxsWjmm5qauHz5MgEBATKGTE9Pp6OjQ34Whcx88+ZNRkZGcHd3lyCXtrY2mpqaaG1t\nZWBggPXr18vMOiQkhMcff5z4+HgpbZRF5caNGyQmJtLf3091dTUjIyNERUWRlpZGT0+PQEiioqIo\nKiri1q1bGBoaCtQlJCSE4eFhDA0Nxajj6elJUlKSMBYPHTokdnaFLaGYwdRqNXl5eaxatYqioiLx\ndbi4uGBtbc2nn36KVqultLQUf39/KdO6u7uJjIxkYmICV1dXZsyYQU1NDVevXiU4OBidTifsC09P\nTzo7O+np6WFoaIjm5mbJ+zx9+jTp6enSCFy+fDmNjY2ygCkakNzcXPbu3UtYWBi3bt0iMTFRxqbD\nw8NEREQIW9LV1VVwakqQz6xZs+jp6SElJUW8M8PDwxw9epS9e/dy/vx5bG1tBTkXGhqKq6srw8PD\nzJgxg7Vr1/LHP/6Rvr4+0tLSBNbi7e0t8v6kpCR++uknwsPDOXPmzEPJnH8Vi8Jnn332plarlVj4\nlJQU7OzsiI6OxtnZWQJV6+rq0NfX5+7du0RERBAVFYW3tzeGhobY2dkxOjrKlStX2LJlCykpKezd\nu1fci1NTU6SlpWFgYEBISAj79+/HxMSExYsXk5+fL7X05cuXycjIwNramuHhYYl/UxKJFIfj0NAQ\nHh4elJSUMDIygqOjowS+6uvrc/nyZc6dO0dAQACLFi3C39+fP//5z+zZs4eEhAQaGxvJycnBzc2N\n+vp6rly5wqpVqxgaGpIFr76+XmC0lpaW3L9/X8aSw8PDtLe3U1VVhbGxMW5ubiLjbm1tFcLP1atX\n8fb2Jjk5GWtrayoqKggNDSU2NhYXFxdOnjzJgQMHiIqKIiwsTPwg9vb2+Pv78/3338tc3cTEBLVa\nDUBbWxuTk5PiZAwJCaGjowM/Pz/09fUpLS2lvb0dGxsbCUMxMDDA29ub69evo9PpCAoKwtvbm+Hh\nYcLCwjh58iQ7d+6UmbtarUalUuHl5cXly5fFR3Dw4EHGx8fRaDQUFhYyODiIiYkJGzZsIDk5GX19\nfbq7uzEyMqKhoUECZ0dGRuQ0ZmxszODgIJ2dneTl5REVFUV4eDjj4+MielMWsW+//ZaKigq8vLzY\ntWuXqBGvX78u71dMTAwff/wxe/bswcjICDMzM6ysrCRcRuFhrF+/njVr1tDc3ExOTg4GBgaEhoZS\nVlYmMfcnTpxg06ZNJCQkSBK4YvJzcXEhKSmJiooKmpubWblypXhC4uPjqa6ulsSyd999l61bt2Jt\nba2kgv/3AbdOTEzg5OTExMQEbW1thIaG0tPTw507d/5d3e/s7ExCQgIJCQlUVVWh1Wq5evUq586d\nEwddWFiYdLIVOOa6deskcqyzs5OOjg5u376NiYmJxJQ5OjqSmJgoicK1tbWyQys26KSkJGDasRcY\nGIhKpWLmzJmSXL1v3z6ampqYmJhg3bp1REdHMzExQWZmJn/6058YGxvjgw8+QKPRMDU1Jeh6pQcS\nFRWFi4sLbm5u6Ovri6ItLy+PkydPcvDgQbq6umQXVKS5Y2NjdHR0UFNTw9DQEPX19Xh4eMiJYXJy\nkosXL9LQ0IC7uzt3797l/fffp6ioiPb2dpYvX86sWbO4fPky7e3t5Obm8sUXX3Dnzh1Bobu4uGBs\nbMxjjz0mU6CWlhZ6enpkJ+/r65PRcHh4OO3t7ZKsdefOHbKzszly5Ijg11xcXESSqwTiKBmRra2t\n3Lp1i+rqarGJz5gxgytXrvDaa68xZ84cNm7ciIGBAatXryYuLg5DQ0NRtWo0GikZYFohqDT5Ghsb\nycrKoqCggLCwMFauXMn8+fOZN2+ecCT+x//4HyxYsIB9+/ZhZWXFrFmz6O3t5YcffqC3t5dVq6bt\nPorqUqPR8Morr1BeXs6FCxcYHBxkeHiYH374gVu3bqHVamlubpY8C7VaLQK17u5uli9fTltbm7hC\nOzs7uX37Np2dnbS3t/PXv/6Vxx9/nFWrVtHa2irBRgqtzNDQkK1bt5KSkoJKpeLatWu8/vrrFBQU\nsGLFCvmMPcz1qyAvjY2NiZpRsY6uWrWK1NRUhoaGpGxYvXo1Q0NDqNVqqWHb2towMDBg9uzZXL9+\nnRUrVgh5qaOjg5CQEA4ePEhsbKyo8oyMjMTdlp2djZOTE3FxcXR1dREZGYlOp0Or1XL79m2MjY1Z\ntGgRKpWKqKgoAMLCwvj55595+eWXMTc3x9/fn7a2NmJjYwkKCqK2thYzMzM6OzsxNjYmICCAkZER\nvL29hQU5OTlJc3MzBgYGDAwMAHDhwgW2bNlCcnIyQ0NDtLe3U19fj6GhIXV1dQQFBeHk5CQsRCU2\nbfny5RgaGnLr1i3s7Ozw9/fn7t27REdH89prr5GYmMg777xDTU0NgMznjx8/TkJCApWVlXz00Uf4\n+vpiYmJCXV0d586dw8zMjKNHjxIREcGDBw8YGxvjzTff5LnnnsPNzY1jx47x4MEDlixZws2bNyWC\nTUGRqdVqBgcHsbOzIz8/Hz09PZYsWUJTUxOVlZXCX1i0aJEkhSuBNJaWlsTExNDe3s69e/dobm6W\n91o5KSpTK61WKw3b1tZW9PT0xGFaV1cHwO3bt/H29pZN4rXXXmPPnj3o6+uj0WhwdXWlrKyM2tpa\nvLy8mDlzJg0NDaxdu1aUskpClHLigmmEXmtrK9HR0ZSWlgp6XomZV6zVxsbGUtIqBr0tW7ZgbGzM\njBkzaGxspKqqSnJFLS0tGR0dZdOmTdTX1/PgwQMsLS3p7++XnA2tVsvY2Bj379/n9u3bREdHY2Nj\nw+TkJHp6elhYWGBqakp5eblMdx7m+lWUDx9//PGbFhYWeHt7yxtgampKU1OTqA+7u7uJiIggMjJS\nxBjx8fH4+vqyYcMGTpw4IfWcVqvlxIkTREZGcvToUdauXStjICsrK8rKyiRaPS0tTaLClfpOcVnO\nmDGD4eFhcnNzsbOzY2BggPT0dHbt2kV4eDj19fXSQCovL8fIyIhjx46hp6fH8ePHRQK7Zs0abGxs\nePDggSjWrK2tKSwslIDUkydPsm3bNlQqFRqNBj8/P6Ey29nZsWDBAtzd3SX7QRE1hYeHo6enR3p6\nOj09Pfz888/odDqio6Npbm6mo6NDkF8JCQmYm5szODjIb37zG7q6uti4caM0cTdt2sSVK1dwcHAQ\nQU9sbCxTU1PU1NRIPkJTUxMqlYrBwUFJOU5KSmJgYICUlBQuXLggvv60tDRcXV1ZvHgxYWFhjI+P\nM3v2bDGkKaWds7Mz169f57nnnhPmgJWVFaOjo9TW1soiERkZSXp6OtnZ2URERLBgwQImJiakhraw\nsOCpp57CxMSEpUuX0tnZyaVLl+Q1h4aGsLS0FJ3A6OgoqampVFdXy4nvxo0bHD58mKmpKTw9PZk7\nd+6/2zDc3Nz45ZdfqKmpYfPmzYyMjDA+Ps7o6ChVVVWcP3+ejIwM3N3dMTY2pqenR/o4SuNRwcJl\nZ2eTnZ2NWq2WTePUqVOsWLFCPCWLFi1i/vz5whtVqN/nzp0TKpixsTEWFhZS3ihj0fnz5wus+PLl\ny/99yge1Ws0LL7yAsbExOp1O7MZBQUFERkZibm6OSqXi/v37/PLLL1hZWXHnzh1Onz5NZ2enwD2V\n8ZK7+7So0tLSktDQUObPn8/ExAT19fVkZGSIhlwRdmzatImYmBiioqLQ09PD3t6e5ORkvL29mTt3\nrsySZ82aBSACIxMTEy5evMi7776LhYUFExMTREVFyZTD3Nyc1tZWamtr6e3tpaOjQ0JPFKBLbW2t\nUHEUeW1MTAy+vr64ubnh5ubGnDlzsLKyorq6WqLPGhoaCAwMlKYjwJ07d9i9ezdDQ0O89957jIyM\n4OnpKaE3Dx48wMzMjGXLlsmU5cGDB7S0tAgUJTAwUPT9586do6uri2PHjqHVaunv78fR0ZGwsDBs\nbGwICgoiNjaWkJAQvvrqK/T19eXEoORtrlu3DnNzabusrAAAIABJREFUc/T09CgvL0er1eLr6yuB\nMYomQvkZlJOFtbW1WIPnz5/PSy+9JPb4kZER1q9fj1arlT7HsmXLMDU1Fb3J7du3OX/+vEw1lN7M\nrl27KC0tZd68eZw7d476+nq5aX/++WfxtCjyaGV6oYiVFEx/TMy0iNfNzY3CwkLKysoYHx+XtHBF\n5Tpv3jzi4uIkBCg0NJTR0VGOHDlCWloa5ubmBAcHc+nSJa5evSrx8WVlZTJJUOL2ABE+BQcHs3nz\nZtzc3GhoaGBgYIDw8HBKSkowNzcnJiaGFStWSLr21NTUQ9+Pv4pFwcDAgF27dslYJTIyUgJZenp6\nMDQ0JD09nfLycgGGdnZ2YmpqKtZja2trEhMTMTU15eTJk/K64eHhfPfdd9y4cYO5c+eKRVqn07Fp\n0yYee+wxxsfHKSwsFLXb6OgoTzzxhIiCPD09SUtLk6Oog4MDnZ2d9Pb2yvH99OnTWFpaCptfp9NR\nWFgIQF5eHjk5OVhbW2NiYoKlpSUZGRksWLAAV1dXwaqvWLGCyclJenp6cHV1RafTYWJiImTigIAA\nli9fjr+/P6tXr6ampoaKigq+/fZbioqKSEpKIisri4CAADZu3EhAQADDw8Okp6fj4eGBkZGRfGCV\nhKuqqioKCgooKirCwsICa2trjIyMxJORn59PVFQUPj4+0v2/desWjY2NGBoakpmZCUzb3xsbG/nX\nf/1Xdu7cKQo7BbxaV1fH2NgYVlZW5Ofn097ezunTp0UkpuQz1NfXc+/ePb755huuX7+OSqXCxsaG\nqakpJicnMTMzkxOUvb09Wq1WGA9eXl54e3uLgW7NmjWSKzo2NsbSpUtlZFpdXc3ExASDg4PodDqc\nnZ2Jj4/n3r17Ehuv0+lISkoSNFpfXx9+fn7k5ORImXvixAlWr14t2RpJSUkEBgbi6OgoQTxKD2nh\nwoVypFdcsErC9rJly/Dy8sLOzg6Yjo3r6urC39+fc+fOUVxcLBve1NQUXl5ewqRwdHSkr68POzs7\nbt++TXNzM6mpqXz33XfCxPx7a/o/un4V5cPbb7/95gcffIChoSH29vYUFhYKR18Jbk1ISODJJ5+k\npaWFyclJmZv/9NNPYruemJjAx8dHGAMK2MPAwACtVouDgwPz58+nra2NoKAg8SDU1tZiaGjIvHnz\nOHPmDJ6enmi1WiwsLAT95ezsjFar5dq1a7i7u8vEQQG7uLq6yo6+cOFC6QhnZ2fj7e1NdXU1zc3N\nGBoaEhMTw+joKB9++KGkB6WkpEhvwMTEhC+//JLi4mL6+/uZmpqSGbhGo+HGjRuSKm1gYICfnx/9\n/f2o1Wr575iYGAwMDMjPzyc8PBxPT08KCwslsWjLli0CS7l9+zb9/f0sXryYU6dOcffuXaqrq0lP\nTxehkhKp1tDQQHBwsGjyHzx4gIODAxs2bECn0xEfH09vby9hYWGiuJyamiIwMJC0tDT6+/ulX6MA\nT5TSLDc3l6eeeoq6ujp0Op0040xNTcnIyKC0tJSuri6srKyIiori8OHDMp6rrq7mxo0bGBsbiwo2\nMzOTgYEBsrOzWbx4MVlZWVRUVKBSqQRhl5ubS0xMDFlZWdja2tLW1sbg4CC3bt1ieHiYlpYW6urq\nSElJEQS7IlkfHBzkqaeeoqKiAltbW8zNzXF2diY0NFRUliYmJoSGhtLe3s4777zD6Ogo8+bNo6Wl\nhaCgIOLi4igsLMTBwYETJ04QHR3NL7/8goeHB8XFxZSUlODv709tba0sZKamppw6dYqMjAwaGhok\nwDc3N5dTp05hbW2Nr68vp0+fZvHixTx48IC6ujry8/P/+5QPM2bM4OrVq2RnZ1NTU8PAwABqtZoZ\nM2aQlZXFunXrsLGxQa1Wy/F7yZIltLa2ik11YGAAS0tLhoaGWLNmDTAtg62qqkKtVhMSEiJ1sLW1\ntagIlV98QUEBR48exdbWVhDtRkZGEjTq5+dHdHQ0gJQ4FhYWuLu7C6/w7Nmz5OXlceTIEeE/RkZG\n0tbWxsKFC3n++ed57LHHhP2wbds2yVqA6d1sxowZ1NXViW3Y3NychoYGSRI6duwYfX19GBoaCoK8\nra2Nbdu2ScPSzs6OwcFBpqamqK+vp7GxkYsXL6LRaNi6dStNTU289dZbpKSkcPXqVTw9PXn88cep\nrKxkamoKMzMzHBwcWLBgAREREUxMTPDaa68xMDBAYmIid+7cYdasWTKetbCw4KeffkKlUlFdXU1v\nby+HDh2S99bZ2ZnS0lLc3d2pra2Vo7ySGj00NCQBqEoT1tvbm9DQUIGvKhJ4KysrnnjiCfr7+zE1\nNSU4OBiVSoVWq8XJyUl2zsnJSYkVBLh+/ToeHh489dRT4izVaDQYGRnx4osvotVquX79Ot7e3vT1\n9WFiYsL8+fPle8yZM4f09HQiIyO5f/8+zz33HDB9QlIyJJR+wN27d/nggw84cuQIWq2Wuro6ysvL\nMTc3x9LSkh9//BFzc3Pa2tq4deuWSKM3bNggJwVl9PvJJ5/g5OTEpk2baGpqIjU1lampKX766ScK\nCwtZunQpMH26DQkJYdWqVfj5+XHlyhU2bdqEl5eXRCQ89P34X7+V//ddjo6OzJ8/n+joaEGOKzjy\n6Oho/Pz8ADh69Ci5ubmituvt7WX//v0ihW5tbeXSpUucP38egO3bt2NnZ0dubi4FBQU4OjoKgNXT\n01M+QIqcdmRkBAsLC2xtbQXkqqDLDAwMRDL75JNPyk4aFhYmjSg3NzdsbW1ZvXq1RJzPmDEDAwMD\nUlNTyczMpKKiQlyASv9CMavU1tby4MEDxsfHOXHihEi3H330UVQqlXThFcWftbU1jzzyCFFRUTQ3\nN8uo1tHRkQcPHnDlyhWeeOIJpqamUKvVLF68WJD5yi68YcMGgoKC2Lp1KwYGBtI81Gg0DA4OotFo\nmDdvHjk5OXh5eZGamkpSUhJdXV04ODjQ0tJCU1OT5CIcOXJEiNxK3qTCRVRODra2tiQlJaGnp4eT\nkxMzZswQpoRCpFbG0+fPnxcOxr59+ygoKOCXX34Rs9fhw4cJCgpi1apVqFQqQbyvX79eyj2YToha\nunQpX375JZWVldTV1bF+/Xra2tokJ9PR0RFHR0cWLlzIzJkzsbOzo6WlRfDpCtimvb1dAnzc3d0J\nDAwkPz+fvr4+hoaGcHBwwMPDg8jISOzt7TE1NUWlUol9OjY2Fn9/fwYGBiQtTDlxKvAWxTl8+vRp\nbty4wbvvviuwn3379pGQkMDatWsllSwsLEyS0E6ePClN1Lt374rG5GEv1d+bfP5PXSqV6v/8P+L/\nXv/3+v//dVen0835R0/6VegU7O3tefnll/Hw8MDX15fbt2+LSaqkpEQis5Q4tDfeeIP33nsPJycn\nbt68SV9fH56ennh4eHDlyhVsbGz4/PPP2bx5M8bGxqhUKl588UWam5s5dOgQ8+fPl9HRunXrsLS0\n5PDhwxw8eJCxsTHBj505c4bly5dja2srfYfXX3+dF154ATs7O1avXk1GRgb29vZERESQn5+Ph4cH\n1dXVaDQaXFxcBKba3d1NQEAAR44cYdOmTTQ0NBATE0NOTg5WVlY8++yzHDhwAEtLSw4dOkRiYiJ6\nenpYWVmRlZXFq6++yoEDB+RUZG1tjUqlEt9CcXEx5eXlLFy4kI6ODhoaGmQXNjQ0xMLCgsuXL4ul\neGJigitXrhAbG0tVVRUBAQHMnj1beAKKpRymMfoXL17EyMhI8PfHjx/H1NQUV1dXBgYGqKyslKZs\nQEAArq6uXLx4EXt7e8bHxxkbGxOFobe3N9HR0YIKGxkZYfXq1Xh5efHee+/JZGBoaAgbGxuWL19O\ndXW1eAmMjIwICQkRYnFiYiJhYWHCnAwICJB8iCeffJKFCxeSkpJCe3s7mZmZLF++XExTMJ1KFRQU\nxIYNGxgaGpIpUE1NDYGBgRgYGHDt2jX8/f0pKipiwYIFGBoasmLFCkpLS5mcnKSurk5AL+fOnSM+\nPp7Lly9LfsisWbM4evQoBw4cYPXq1Rw6dIg///nPbN68mffff5+FCxcyd+5c8vPz+Zd/+Rfeffdd\nKYe1Wq0EFRsYGODk5MTIyAhtbW2sXLmShoYG2tra8PLyws3Njfv374vuws7ODi8vL27dusW77777\nUPfjr6J8GBgYwMrKCmdnZz7++GMyMjKEpaeMwLZt28bdu3fRarX89a9/pby8nLq6OlasWIGZmRmm\npqb09vbi7u5OTk6OfK1arWbu3LlUVFSQk5NDfHw8169fJzo6mnfffZekpCTa2tp4/fXXeeKJJzh2\n7Bg3btzAzs6OtrY2ent7UalUjI6OUl1dDUyr4zQaDdXV1XR3d8v8e/78+QQEBBAQEMC8efOA6VGW\nmZkZXl5ezJ07l7lz5zJnzhy8vb05e/YsPj4+QvAtLCykurqamTNnkpOTQ2BgIHl5eaxdu1aSoIOC\nggQAm5mZSUFBAZcvX+bkyZMkJibS0NDA0NAQISEhtLW1UVVVxf3790X/rmg8Ghsb2bp1K/X19bz5\n5psSvKrQh5uamiRY5OrVqzg4OLBo0SI6OjrIyMigv79fxDwTExMSY1ZfX09KSgohISEEBgYKYszV\n1RUrKytWr15NfHw8hw8fxtLSkv3792NlZcWpU6eA6cDWc+fO8eSTT7Jz507KysrQaDSUlpZSUlIi\nnorx8XGCg4Px9/dnZGSETz/9lJaWFuzs7Dh16hQqlUryIGFaIWhra0tQUBDW1tb09PSgVquZOXMm\nW7duJSwsjMzMTHQ6Hffv38fAwEDo14pwq7e3F0dHRwoLC8WI9uGHH4ridXR0lIyMDMLDw7ly5QoV\nFRWEh4dTUVFBXl4eS5Ys4eWXX2bNmjX88MMP+Pv7U1lZKVFvx44dY9GiRcB0/oVWq+Wdd94RybaT\nk5PoTCYnJ+nv76egoICKigoBBCnxBLdu3ZJF8Pz58//9RpJWVlZERkYyPj4uO5alpSUVFRWS3Xfp\n0iWh2xw7dowVK1aQlpbGhQsXcHZ2xsbGBkNDQ2bPns3vfvc7AB555BEMDQ2JjY3lxo0bLFiwQBpd\n3d3ddHZ20tbWxtatWyX+/dFHH8XNzU3UjOHh4aSnp4uJBaaluEr97eXlRXl5Od9++y2ZmZkcP34c\nnU7HkSNHxFKtBNCcOHFCLNrKza8wHmCa0aDYjGNjY8nKyhJ14cTEBPPmzSMjI4PAwEBqampITEzk\n9u3bJCYmsmnTJgYGBtDX18fDw4Ph4WEMDAwwMzNjfHyctWvXcubMGYyNjbGxsSEsLIx58+YxZ84c\nMjMziY6OpqCggKtXrxIWFiY8yeDgYIyNjYmJiaGiooK9e/eSmprKU089hUqlkrj3u3fv4urqSmRk\nJHFxcdJgMzQ0JDo6WgRgwcHBXLx4kSVLlqDT6di7dy+Dg4PExsYCsGvXLlatWsVLL71EQUEBr776\nKtevXycgIIDm5mbWrl0rXILs7GxMTU0Fttvf34+NjY1E2yuEKUDctPr6+oyPj+Ph4UFVVRWXLl3i\nzp07FBcXU1VVxfj4OD4+Pjg5OZGVlSWjxuHhYfT19WVsrYxQN23aRFRUFF9//TWffPIJarVaks8V\n+f66desENrtr1y6cnZ3R09NDT09PgpCUwFxFdapWq3F2dmbXrl3s3LmTb775Rizbs2bNQq1Wi0lK\nq9Wyf/9+bGxs6OjoYGhoiODgYBwdHZk9ezYbN24UvcbDXL+KRcHY2JiffvqJ9PR0yeBTOrI5OTmY\nm5sTGhrK7373O65fv05lZSUXLlwgNjZWuvDGxsaCbA8NDQWmyUtK4IiJiQm5ubkYGxsTGxsrCrr+\n/n4JZtm2bRs3btzA0tJSdop9+/axaNEiLC0tpdH4hz/8gYiICOLj41m6dCk2NjasXr1aGIdKfHhB\nQYHsWN3d3ZI0pVar6evrw8jIiL/85S9CAXJ0dMTe3l4mCc3NzXh6erJixQqJlw8ODiYgIIClS5fS\n0tKCq6srzz77rDSV1Go17u7u4uhzdHSUDEyl0z81NYWjo6NEzZeVlVFTU0NtbS2NjY188cUXpKen\nC6G4rKyM4uJiWltbOXDgAC+88AKfffYZcXFxTE5OUl5eLkrMF198kdWrV6NSqXBwcCAuLo5FixZJ\nepViyDI0NMTa2pqCggKmpqZ45plngGlCkjL2g2nFZkxMDCqVSkRVtra2TE5OcvPmTVJTU8nJyWF8\nfJzw8HCRlWu1WoH1wHQCeV5eHuPj4yL4WrlyJbNnz2ZgYIDR0VEef/xxKioqKC0tZe/evZSUlBAS\nEoKrqyuxsbHMmDGD119/XQC+ADk5OZw6dYoNGzYIuGZkZISDBw/i4OBAVlYWGo2GkpISdu/eTX19\nPbGxsaxfvx4fHx8iIyNF/bl48eL/l733jor6zvf/HwMDDL3XQXrvHRQUELFXjLFhXNM2MYnJbpJN\ncm9yN5u4N6ZfYxI3pqnRtRtFLBELqBRBeu916DD0DvP7w8z77Pee7/eu94/fObnn3Pc5c8QRZkZm\nPu/yej2fjye2tg+hZsbGxtTW1hIcHMyFCxfw8/Pj5s2bIrvU1taW4OBgkS69e/duuru7sbGxob29\nHRsbGxGMdOrUKUENe5Txm5gU1GdHLy8vjI2NxfZnamqKFStWYGFhIVSOag39xMQEgYGBnDt3jpqa\nGmpra8nKyqKvr4+SkhLg4eowMzNDfX09ERERLFu2jMTERMFpSEpKQk9Pj+LiYn788UcKCgqIjIwk\nPDwcFxcX8vPzsbGxYXp6GqVSKbIchoeHmZycJCUlhaysLMFSKC0txcjIiMLCQoaHh0V7dXp6mvnz\n5xMaGkpBQQHa2to8/fTTtLa2sm7dOlF5T01NJSAgAKlUKsJIbGxsaGtrY9OmTcjlcubNm4ednR0W\nFhZUVlZibW1NfHw8Wlpa7NixA3Nzc1pbWwWURu14VCgUeHh44OXlJTgNqampVFVVsWHDBhISEkhP\nT2dycpK1a9fi6+tLW1sbixYtIi4ujpCQEGpra9HR0eHkyZO8/PLL3LhxA21tbYFdn5ub49133+Xy\n5cucPHmSoKAgYmNjgYdHxNLSUhHwq57YpFIp9+/f5/HHHwfg888/p6+vD2traxE5Bw/pxlNTU5w5\ncwYbGxvKy8txcHBAR0cHLS0tPD092bZtm6Alq7UOar/K1NQU8+bNIzw8nNbWVhHC09PTQ1JSErq6\nukxOThIUFIS5uTkODg5oaGiQn58v4LPqTNP8/HzBwABISEjAxMSEqakphoaG0NfX549//KOIn3d3\nd0cqlWJjYyPSofr6+sjKysLc3Fzg09avXy/wcYsXL6a4uJienh5hp96wYQNNTU0iq8Tf35/bt29z\n9uxZ3nrrLdzc3LC0tKS+vp4jR44IGpOrqysKxf8Vlfp/Hb+JSUENGu3o6CAyMpLnn3+eZcuWoaGh\nwalTp3jmmWeEC08NvOzo6ODOnTuEhITQ399PaGgooaGhVFZWCn5hZmYmS5YsQVdXF1tbW0FJCgwM\nxM7OjpaWFkE69vHx4ZtvviEgIIDMzEw8PDwoLCxET0+P3bt309raKlbczz//XJh21Jp1NT7d398f\nc3NzAdRQtyTT09ORSqUiNl0toFJHosPDrXNWVhb5+fns2bOHjo4O4aZU6yrUSjn1lntiYoLly5fj\n6ekpzsN6enq0trZiamrKwMAAZWVlIuy1qKgILy8vampqGBsbY2xsTGDgkpOTBUVodnaWNWvW8PXX\nX9PT0yMAuZ6ensL8oy7IBgcHc+vWLYyMjAgNDeXMmTP09/fzyiuvUFRUhFwuZ9GiRTzzzDOkpaWh\nqalJZWUlIyMjNDQ0CAs7gFwuZ3h4mOHhYUJDQ5mbm6OiooKUlBS6u7u5ceMGjY2NeHt7s2XLFjZt\n2oRMJsPQ0JDBwUEUCgUKhQIjIyOCg4NFwGxtba0IcTUxMRFaEWdnZy5duoRCoaCiokLoG9ShQ+oW\n+cKFC7GyskJDQ4Mnn3xS1CrUR4szZ86wcOFCccE6OjqyceNGduzYwbFjx4RCtbu7Gz09Pdrb2yko\nKODzzz/n5s2bzMzMcOXKFfF6X3rpJV588UWMjY1pa2tj165d1NTUiHa3v78/Xl5e7Nmzh507d/L8\n88+TlZXF/fv3xZHRy8uLyclJ+vr6/o/dzT8bvwlF47/927+96+fnh62trcCHDQ4O0tzcLAI4bty4\nQV5eHi+88AIaGhps376diYkJ9PT0uHLlCp6entTU1IgtWmZmJhERESQkJIgA02PHjpGbm8tHH31E\nf38/hoaGrFq1iuLiYiorK0lISMDBwQEPDw/Onj1Ld3c3RkZGLFmyhImJCUxNTbl8+TKPP/447u7u\nIkAmPT0dXV1dQkJCMDMzE/p5dZDtlStXWLx4sai+q63aVlZWzMzMMDAwwIULF0RBVB3ScuHCBUxN\nTVm2bBllZWWkpKSgVCqFC254eJjz589jbGxMd3c3n3zyCcHBwTQ1NYnXpbZn9/f3i9VcHZ928OBB\nQZfu7++nvb2d+fPn4+TkRG5uLvfu3cPf3x+pVMry5cvFiqzOXigtLSUuLk5IvsfGxoiOjsbAwID4\n+HhOnjxJeXk5Fy9eZGBggJmZGTQ1NcUOY2xsDG9vb27fvi3i0p966ik6OztZv369AO3q6OiwdetW\nPD09BehVnaU5NTVFZGQkR48eZefOndTW1hIeHs7Y2BhFRUWiy6Jeuc3NzZmamsLJyYlTp07R09Mj\nLn54WJCUSqW4urpia2vL/fv3BRR3YGCA+vp6JicnhYLSxsYGGxsbrKysOHLkCFFRUQwPDwudgPo9\nUL+mqakpZmZmaGpqIikpieTkZNra2ti9ezc2NjYkJyfz9ddf8/vf/54rV66wdu1aGhoacHNzEzAd\nhUKBhoaGMG55eXmJGpKtrS2Ghobk5eUREhJCZmYmeXl5bN++ndOnT//PUTRqa2sTFxfHyMgIJ0+e\n5PLly8IWum7dOiE4cnBw4LPPPuPEiRPcunWLtrY2enp60NbWxtraWnw4t259iJVU25evXbuGt7e3\nSOZVb43VuLZz587R2dmJTCbj2LFjpKWl8fe//x14uDI3NTXh5uaGg4MDANu2baO9vV3w79ra2rhx\n44YornV2dnL+/Hm8vLywsrLCycmJwsJCEYfW3Nws2AXqugc83NlMT08jk8m4e/cu/v7+giplZGSE\njo6OUFeqU5fc3d0pLS1laGhIMB3VbUa17VepVOLv7y+AHm1tbZiYmLB7925kMhnvvPMOUqmU0tJS\nampq6O/vZ8+ePYSEhFBXVycMOePj41RUVNDU1MSJEyfo6OgQqUb19fWEhYVRVVXF7OwsGRkZnDlz\nhpiYGHbs2IGLi4uYzK2srAgLCyMuLo6ysjL8/PxISkoCHhqX5s+fT2pqKlpaWuTk5GBra0t/fz/N\nzc2UlJTw3XffkZ6ezrfffoubmxv6+vr4+/uLDsiRI0cYGhqir69PVPPVjlf17/f06dOUl5fz4MED\nTExMUCgUDA4OMjg4SGlpqSgUFhcXc/78eTQ0NDAyMsLPzw93d3cqKirE621tbaW1tZUFCxbg6OhI\nYGAgFy5c4JVXXqG6uprJyUl++uknMjIyRDiwTCbj3r17fPrpp3zwwQfs27cPPz8/vvnmG+ChB+Tx\nxx8X+DlPT0+BV2tvb+fq1au0tbXh4OCAvr6+qGfo6OiISXlgYICoqChefvllcSx5lPGbmBSkUilz\nc3P4+PgQHx/PRx99hFQq5U9/+hNmZmbEx8cLCtK8efMwNDREQ0MDJycnzMzMWLBggVjtvv/+ey5c\nuAA8hHVWVFTg5ubGgQMHMDU1FRBYdZGnsLCQpKQkEhISxFm9vr4eT09P5HI5TU1N+Pn5kZyczOHD\nh4GHQR2LFi1CoVCIc2NYWBg9PT309fXx17/+FRMTE3R1denp6RFgFJVKxezsLHl5ebS2tlJVVUVI\nSIjQAxgaGrJ06VKMjIzIzMykvLyc+Ph4RkdHxRvu4eGBiYkJVlZWxMTEEBkZiZ+fH9u3b8fDw4Ot\nW7eioaFBY2MjH374IbGxsXz88ccMDg4SFxcndipGRkZERUWxYcMGbGxsMDY2ZsWKFaxbt46SkhJy\nc3NRqVQsW7aMgYEBHjx4wPDwsNiVqFftTz75hNbWVhwdHYXvxNvbmwsXLvDll19ia2uLvb29wLip\nMxq0tLSorKwkODhY+CTgYQS7gYEBfn5+mJqaIpfLRYaEiYmJ2EUlJSWhra2NXC4nIyOD3t5eAZ7d\nuHEjurq6ODk5CeWhGthbWFgoztcBAQEYGhoKeraLiwsxMTGMjY2RkpKCk5MTixcvFmYudb2npKSE\nt99+G0Cob/39/YX8/K233kJbW5tly5aJhWHhwoUEBQWhUCjIy8tDW1ubxsZGoqKi+Oqrr0SdRH2M\nMjAw4ObNm2Ki/uabb/D19WXPnj2EhYWJcN2RkRGuXbvGoUOHRM1s2bJl7Nq1i7a2Nnp7e7l3754w\nrj3K+E0cHz7//PN31dva1tZWjh07xoMHD8R2tLi4mCNHjpCeno6Xlxfx8fFMTU0xNjaGpqYmGhoa\nTE9Pi/CO5cuXc/jwYaKjo8nJyWH9+vU4OjrS1dWFrq4u8+fPx9jYmL/97W90dnayY8cOpqamWLBg\nAZaWloSFheHt7c1PP/1ET08PmZmZAvWmXpHVrrTJyUmqqqoENtzX11e4L2/fvk1+fj4qlYq0tDTG\nx8cJDAwUxwZ18W9ubo6UlBSeeOIJampqyMjIEMUvW1tb9u7di5aWFuHh4SIdaHZ2lhs3bqCnpycI\nPgsWLGB2dlZEp3t6enL37l3u37+Pm5sbeXl5GBoakpGRwfz58/Hy8hIejM2bN5OYmIiFhQVyuZw/\n/vGPAAIiGxMTw+HDhxkYGBBFPnNzcwIDA7G1tSUxMZG+vj5WrVrFwMAATU1NdHR00N7ejqurK+3t\n7ZiYmHD9+nUSExOprq6msLCQrq4upFIp9+5Sd1YSAAAgAElEQVTdE1i0oKAgEbemhuGoQ1/c3NxY\ntWoVv/zyC5GRkczOzuLr64unpydr167F2dkZY2NjOjs7aW1tFS5RNRFZS0uLFStWYGBgwJo1a4iK\nihLuW3VHSO121NHRYcWKFYSFhdHX14eZmRnp6eksWrSIixcvcv/+fXbs2IFMJsPExAQdHR3ef/99\nPvvsM3x8fDAxMaGhoYGQkBBRc1KpVJSWlrJ48WI8PDwIDg6mqKiIw4cPY2FhQX5+PoWFhTQ1NeHp\n6Sk4j7a2tjg4OODg4EBjYyP6+vqMj4/z0ksv8eDBA1pbWzl48CCtra1oaGiIwqm2trbga7a2tv7P\nOT5MTU3h7u4uYrllMhnz589n2bJl9Pf3c/fuXWxsbPD19cXHx0fEvk9MTBAQEEBYWBghISHk5eUh\nkUgEznpsbIy4uDhOnDhBd3e3cCAWFhYyOzvLCy+8QHt7O59//rlIpx4aGmJqagoTExP27t0rClqB\ngYHiQlHnVV6+fBlHR0f09fVxdHREJpOJM3xjYyM9PT3I5XK0tbVFa6u4uJiMjAyUSiWjo6Mi+wAe\nGqIkEgmTk5O8/PLLtLa2cvfuXWJiYli7di0JCQk0NDTg7e2Nvr6+QJN3dXXR09PD8ePH6enpob29\nHX9/f6qrq1m0aJGY9FxdXcnPzycqKoqysjLS0tIEFMbe3p5Dhw7R2dkpsG09PT0EBgYyNzdHamoq\n69evZ25uDldXV8LDw0XMmtreqwbXTk9Ps3fvXlxdXQXNae3atcTGxuLl5UVzczN1dXW8/fbbKJVK\nEYEHD8Nt3nvvPcbHx4mKimJubo729nbi4uIIDQ1l48aNvPLKKyLhSkdHB5lMxrVr16iqqkIqldLR\n0YFcLsfc3JwlS5YAkJiYiK+vLxYWFjQ0NDA6OsqpU6coLCyks7OTn376ifz8fH766SdRgP755595\n44032LZtG93d3VRXV+Pv74+uri7PP/88AH19fRgbG3Pp0iUqKysJCwvDzc0NKysr4uLi0NXVpaur\ni+7ubjo7O0WCOTxsEd++fZsffviBNWvWoK+vL+hgsbGxhIWF0d/fLzI+MzIyaG9vF2i3kZERjh49\nSnNzMwEBARgbG+Pq6iqCitT+iomJCd54441Hvh5/EzuFvXv3vmttbc3NmzexsrIiLy+P5cuXU1NT\ng7a2Ntra2tjY2ODq6ioKW/9YeHNzc8PY2JgHDx6gra3N7Ows6enp/OEPfyAjI0PsDubm5hgfHxfm\nELUnXyKRCGNUbm4umpqadHV1MTw8zJ07d7h48SJWVlZERUVx4sQJtmzZIqLUJycn8fT0xNbWFh8f\nH6anp8V51sjICB8fH7Zt2yawW319fQwNDTE4OCgyANVxbmrSz8TEBPHx8axfvx4bGxvi4+MJDw9H\nU1OTgoICenp6BF16YGCAyclJ7t27h0qlQl9fn+npaWxtbQkKCqK1tZXCwkLBcJDJZPT09IjfRUtL\nC0ZGRtTW1uLk5ERAQAD19fUCiGpoaMj4+Dja2tqYmJgwMzODk5MTDQ0NaGtr093dzf79+9HS0qK1\ntZXh4WECAwO5fPky8fHxlJaWcvXqVWJiYhgZGaG4uJju7m7RilRjzKVSKRkZGWzatAlLS0syMjJw\ndHQEHlb4zczMSExM5MyZM/j7+wMPA3knJib47LPPRGaCqakpc3NzXLp0CWtra7Kzs7l37x6rVq0S\nKHi1C7OxsVG4cZOSkrC1tWVoaEiQpVtbW9m1axdfffUV1tbWzJs3j5qaGpGKffr0aaysrITbExAG\nvlOnTokks+bmZoqLi0lOTiYoKAiJREJBQQFXr15l8eLFODs7Mzg4SGRkJN9//z01NTXs3LkTLy8v\nKioqBHPz+PHjKJVK4uLiGB0dxdjYGHt7e8LDw7GwsKCrqwtNTU2xCC5cuJAnnniCS5cuYWVl9ciF\nxt/EpPD111+/u2TJEhwcHLh+/Tp+fn4UFhayYsUK4KFYKCcnh5iYGCorK9HS0qK5uVmIXKanp8Xs\n3vRrGk5hYaG4SNasWUNAQAA//fQTgLiQrly5gqGhIS4uLqIQptaaX7x4kdHRUXx8fAgLCxOwkPT0\ndPbt24eGhgZTU1O0tbWJrXpjYyPff/89FhYW3L59m9u3bxMdHc309LQQnOjp6YlMAGdnZ/Lz85FK\npVy6dImPPvqIe/fusX79empqaoQFuKamhuzsbLKzs1m8eDGampqsX7+esbExlixZwurVqxkfHxfu\nSzWQ5O7du/T09FBcXMyKFSuQy+WkpaUJOG13dzeurq74+PigqamJUqkkICBAQEF27twJwHvvvcfW\nrVvR1NQUiPOxsTFBXfLx8aG5uZk7d+6IqLfFixdz8OBB7OzsCAsLY2ZmhmPHjlFXV0dJSQkLFy4U\n+Qlqj0dqaire3t7Y2dkRFBSETCbj+vXrbNiwAR0dHdra2ti6davQlmRmZrJ+/Xqys7MJDg5GV1cX\nd3d3+vr60NLSIiIigoGBAX755Re2b9/OrVu36OzsFBNsbW0tCoWCRYsWUVNTI9KzL1y4wOrVqwVM\nx9TUVBQSTU1NCQsLo729nZSUFP70pz9RW1tLX18fAQEB1NXV8eqrr2JoaEh5eTnZ2dl0dXXx0ksv\noVQqefvtt+nu7iY4OJjAwEAqKioICgri1KlTTE9PMz09TWFhoQjT8fX1ZcWKFTz99NPExcUxNTXF\n448/ztzcHFVVVQwPDwtRnVwup6ysjKCgIAYHB9HV1WV2dpba2loOHjxIb2/v/5zjQ09PDzU1NXh7\ne2NjY4OpqSnJyck0NjaKopaxsTFxcXH4+fnR3d3NvXv3MDAwYHx8HA0NDYqLi+nv70elUhEXFwc8\nrDirdQjffvstmzdvFoW/sbEx1q9fz4YNG5iamuK7775DoVCIUFAnJydeffVVPD096evro7KyUngf\nent7qa2tFXkPauFKYGAgy5cv5/Tp0wwPD2Nubs69e/cEhEMNQlFTmycnJ1mxYoWINbe1tRUrujrE\ntaGhgZ6eHoyNjYmNjWV0dBQDAwOysrJYvHixCE1paWlh0aJFKJVKWlpaMDMzw8LCgpmZGd544w10\ndXU5d+4cq1atIioqSsBfjY2NhTBnaGiIS5cuoampyfPPP09gYCBhYWF4eXlx6dIlBgcHMTQ0RF9f\nn7m5OTo6Ovjss89oamrCwMAAV1dXdu7cSWVlJR9//LHYus7MzGBsbMz09DRZWVk8/vjjSKVSYU/v\n6uoS3o7+/n7S09MxNDRkbGyMzZs3U1tby40bN+js7GTTpk2kpKQIzkFRURGOjo7I5XISEhLEZGBp\nacnZs2epq6sDHhYE//rXvzI4OMjo6CiZmZlipdXX1ycgIICcnBy+++47ZDKZSBTr7u5GQ0OD3//+\n91hbWxMQEMC1a9dENf/WrVvY29vj6uoqmJEymYyRkREWLlxIVFQUoaGhKJVK8vLymD9/PvPmzcPY\n2Bh/f3+mp6dF0VwikbBq1SrgIbj4ueeew9zcHD8/P6ampmhubkYqlXLkyBFmZ2dZtmwZPT09JCcn\n09vbK6CuY2NjvPTSS8hkMurq6gSK/1HHb2JScHJyQiaTcfDgQUJCQrh+/ToVFRXcv38fW1tbZmdn\nRRHR1NRUtIfU7bC5uTnWr1+Pk5MTERERQgy0efNmQkNDmZ2dJSQkhJGREQFa3bBhA5s2bWJqagpr\na2vhrbeysmLjxo0CoqpUKjEzM2PTpk1CKmpubg4gMgLVIqXOzk5MTU0FsdfExAQNDQ18fX2pqqrC\nzMyM/Px84cRsa2sjNTWVmpoaANLS0gRibvPmzbi6uuLh4cHKlStJTk5GV1eXnJwcjh49KjQKV65c\noazsYb7vrVu3sLa2Zu3atdTU1IgA3Pv371NZWcnatWsZHx9HIpEQFhZGQEAA5ubmjI+PMzs7S0FB\nAXV1deJ3NTQ0xLVr1wgPD8fBwQGJRIKLi4tQg9bX1+Pm5kZ0dDT6+vo88cQT3Lp1i0OHDlFRUcFr\nr72Gq6ur0Or7+voKcIuuri7+/v5UVVUJgxY87Ai4u7szMTHB7OwsAwMDjI2N8fTTTyOTyThw4AAW\nFhYolUosLS25evUqt2/fxtLSEqVSiUwmo7m5mW+//Zbp6WmeffZZ4KEc+eDBg/j5+Yn/h5qhWFtb\ni7W1NWNjYwwODuLu7k5zczODg4PCJ+Ho6IiTkxP9/f08+eSTgtH4xBNPMDAwwODgIOPj4xw+fJh3\n3nmHsLAwkeXo7+9PeXk5b7zxBlZWVpiYmHD37l0+/vhjfHx8MDQ0FApLdX2pv7+fzMxM6uvr+eab\nb6ivr6eqqooLFy6IAOHu7m5Wr17N5OQkU1NTVFZW0tvby8KFC7l8+TLd3d2ita9eeB5l/Cas052d\nnSLdSVNTk2+//VZImqVSKXZ2dkRERIhwjWvXruHp6cmZM2fYs2cPExMTHDhwAG1tbV577TX+8pe/\nAIiglNnZWfbt20dHRwf29vb4+vpSW1uLjY0NFhYW3L17l4iICJRKJQUFBRQVFdHf38/GjRtJSkoi\nOzub0tJSIZlVU56GhobQ1dUlOjoaOzs7Ojs7uX37Njo6Ojz22GNUVFTg6OjIzZs3MTIyEhPD+Pg4\n/v7+aGhooFAoBDFpwYIFFBUVMW/ePOrr62lpaeHGjRtERkbS09ODv78/KpWKV155hd7eXs6dOydI\nUQsWLBDhJoaGhvT19SGXy6moqMDe3p7x8XFKSkpYvnw5FhYW1NbWcu/ePTw9PQkNDcXX15eenh7+\n9re/cePGDXx9fWlpaaG/vx+JRIKPjw8REREiOau+vh5TU1OxpVVvh7W1tdmxYwfGxsaC/lNbW0tJ\nSQn9/f2Ym5szMDAgRELe3t7k5+eLyPbx8XFkMhkuLi689957opYzOjqKp6cnX3/9Nd7e3hgbG3P3\n7l0MDAxYunQpQUFBpKSk8NVXX4mtdmJiooCWDA8Pi9a3trY2ExMTGBgYiDxHtZnKzc2N69ev4+Li\nQlRUFAqFAmtra0pLS9HQ0KC6ulpEusHD9rSFhQXT09Niu+/p6YmaTl5aWsovv/zCpk2bcHd35/Ll\nyxgZGREXF0dOTg5dXV0cOHAAmUyGu7s7165dAxAoQH19fRwcHMjIyGBgYICEhAQ+//xzbG1tGRwc\nZP/+/TQ2NopCvK+vL7m5ubi5uWFvby8Ymzk5OY98Pf4mdgr6+vp4eHhgYWHBgQMHuHHjBjMzM0RE\nRHD16lXq6+uJiori5MmTzJs3j2XLliGRSHjuuefo6elhaGiILVu24OnpSW1trQi+sLCwwMfHh66u\nLlavXo2FhQVRUVGMjY0JNJua66ivr4+pqanQo8+fPx8bGxvS09MpKSkhMzNTbHFLSkqEB0GdvaDe\n3czNzaGpqcnc3BwBAQFoa2vT1taGSqVienpaZBiqLyxtbW08PT2Bh1mSGhoalJSUIJVKMTMzIzk5\nmYKCApRKpQhw2bt3Lzt37hSP7+/vL9yXdnZ2NDc3C9muWo7b0tJCZGQkHR0dyGQy9PT0sLCwwNPT\nEwsLC8bHx9HV1cXS0hJNTU2OHz/OsWPHREiqp6enMEdFRkby9NNPU1BQQEZGBjExMUI/MT09zfbt\n25mcnGRoaIiKigpx/AoLC+OZZ56hublZ+CnGx8ext7cXE66an/jzzz/z/PPPY21tjaurK/r6+pSV\nlQmtiVKpRENDg+XLl7NhwwaUSiUqlYqtW7cyPDwszHGpqanAwyOfhYUFSUlJtLW1iXh2mUzGzZs3\ncXV1JSIigq1bt6Krq4uvry9GRkYYGBhQVFRERUUFo6OjdHZ24urqiqamJoCIj+/v72dgYICjR4+S\nmZnJnTt3yMjI4Pjx46Jes2jRIkpKSrh16xZ6enps3rwZuVzOrl272LZtG0qlkjfffBMAZ2dnZmdn\nRciPUqmkra2N6upqRkZGkMlkgkepri8MDAzw2muvMTQ0xMjICNra2mKi+UcK1T8bjxJFP08ikdyW\nSCQVEomkXCKRvPzr/WYSiSRNIpHU/vqn6T/8zFsSiaROIpFUSySSZf/sOTQ1NRkZGcHR0VEUbqqr\nq4VtNyYmRmDR1W/UG2+8gbm5OVZWVkilUiYmJlAqlTg7OwufQElJCcXFxQwMDHD69GlkMhnZ2dnU\n1dXxwQcfkJeXR3FxMXK5nNraWkZGRti2bRu2traEhobS39/PggUL2Lhxo2g1Ari4uKCjo4NCoRC+\nCVNTUyEtjY6OxtjYmKamJiYmJrCxsWHZsmVERkaK3ENXV1fm5uYoLS0Vx5K7d+9iZGREeXk5vr6+\n+Pn5ERISwt69e4mJiRGpRPX19ezYsYOAgABefvllYmJiGB0dZfv27WRmZmJvb4+Pjw/PPfccEokE\nTU1NfHx8xCqnVCoJCgoS6LXKykoOHz5MRkYG4+PjfPrpp1RVVTE+Pi6goidOnKCzsxN4GOve3Nws\nkqI7OzsFzq65uZmioiJOnz7NzMwMpaWlrFy5kn//93/Hw8OD2NhYnnnmGfr6+sjPz8fMzEx4J9Rj\ndHSU1atXI5fLxS4sJydH7HqWLl0qqu65ubmMjY0xMTFBUVGRWIXz8/O5f/8+q1evBhC5lU8//TTt\n7e3CxWhtbc1jjz0mYCkSiUTE3Ts4OGBiYoKzszMrVqxgdHSUsbExUlNTRTaIWsnZ3d0tUP0+Pj4k\nJiYKGvi8efME9EWdfO7g4MD4+Lg4knR2dpKZmSkEcpmZmXh6eqKnp4ednR1WVlbo6ekRGRmJVCpl\n/vz5fPHFF5iZmYkJv7GxEQsLC1paWuju7iY7OxsDAwPc3d2xtLT8Z5ehGI9yfJgBXlWpVAUSicQQ\nyJdIJGnA74CbKpVqn0QieRN4E3hDIpH4AFsAX8AOuCGRSDxUKtXs/+sJhoeHaW9vp6ioSFRn16xZ\ng5aWFuXl5SIyrLS0lKeeeorw8HBOnDiBk5MTo6OjjI+PEx0djb29PdXV1ezYsYMffvgBU1NTWlpa\n8PPzw8TEhKKiIt5//33y8/NJT0/n9ddfJyAgAC8vLzw9PdHV1UWpVIoI9c7OTjHrzp8/HyMjI/F6\nHzx4QF9fH3/4wx9obW3FyMiIDRs2IJVKqa+v586dO+Tk5BAYGIirqytSqZR58+ZhZmbGDz/8gL29\nPSMjI9jY2IhJTB1+Eh8fT0ZGhrCAj46OCg7CmjVrCA8P586dOwQFBVFQUICRkREVFRXcuXNHmMJq\namrIz88Xk8v09DTDw8OEhISInIKNGzdSXl5Oe3u7gOB6eXmhUCjo7++nrq6Ol156SVTLu7q6MDQ0\npK2tjZaWFhITE5k3bx7l5eXY2tpy4MABoQZNSkoS4rKysjKOHDki2AS1tbVERUXx6aefkpyczJkz\nZ4iPj+fQoUMsXLiQ27dvk5WVJQqv58+fJyIigm3btgnPRUBAAL29vfT397N//34MDAzYtWsXGhoa\nVFZWoqmpKeTSAFu2bOHs2bN4enoKM9xHH33EU089xdTUFEqlksDAQHp6eggLC+PWrVts2bIFHR0d\n0tPTMTMzY2pqiuXLl1NSUiJAs0lJSVRVVYlY+Pj4eE6fPs2KFStEnEBzczMaGhpUVVWhVCqF5T0n\nJ4fY2Fiys7NJSEhg/vz5YrLR1dVFJpOhVCqxsLCgu7ub6OhohoeHSUlJETBYR0dHxsfHRZs0MTGR\nlpYWBgYGMDIyoru7m46ODpYvX87Ro0cfaVL4pzsFlUrVoVKpCn79ehioBOTAOuDIr992BFj/69fr\ngJMqlWpSpVI1AnVAxH/1HFpaWsjlcvz9/UWlPS0tjdTUVI4dO8bGjRsxNDRk3759LF26lIqKClxc\nXAQuOzw8nIsXLwoArNpuOzIyglQqFVbrRYsW8cILL3D58mXkcjl//etfBU3o/PnzxMfHMzg4iLe3\nNx0dHaKHHBYWRn5+PjMzM8BDXbqpqSnr1q3jyJEjXLx4kfLycoqLi1EoFJSWljI6OkpgYKBwqJmb\nmwsLbn19Pfb29gQFBWFjY0NpaSnwEAIql8uZmJigsrKS2tpaLly4QHd3NydPniQjI0Nsx6OjowUo\nxsDAgMcee4zY2FjKyspEBoSvry8uLi589tlnVFdXY2RkxFdffSVw5j///DMZGRlIJBKcnZ2xs7Oj\nt7eXuLg4IiIisLa2FrJmtXozNjaW4OBgVq9eLWoOcrmcQ4cOiSSrjIwMpFIp4eHhbNq0SdCA1q1b\nh5GREdXV1aSmpgqk/cqVKwUZKD8/X2hJJicnMTAwoK2tjYKCAr788kvKyspEkVFXV5fg4GCsra3Z\nvHkz33//PZWVlcTHx6NSqXBxcRGy4aKiIqanpwkNDRWdnDfeeEPs0jw9PcnNzRVSbHVR9pdffqGu\nro6CggL09fVFDUjNPZDL5bi7u7N06VI++OADFixYQFVVFe3t7aSmporuijr6sLGxkaKiIq5cuYKN\njQ1ffvklmzZtEqQttfchODgYmUxGTEwMWlpaoj09NTXF6Ogo1dXVuLm5oaenR3l5OUFBQUilUhob\nG4WPaHZ2lr6+PjZs2PDfyn34b9UUJBKJExAM3AesVSqVGufSCVj/+rUcaP2HH2v79b7//FjPSiSS\nBxKJ5IFKpeLTTz/F0tJSWFqLioqQSqW88MILHDp0iMnJSezt7TEzM2Pbtm3I5XKuXLlCV1cXRUVF\nNDQ0kJmZSW9vr+jbTk9Pc/DgQfr7+xkaGsLFxUXAUXx8fCgrKxOFto0bN/LRRx/R0NCAQqEQWvSW\nlhYBzlCfzysrK5FKpUxOTtLY2EhMTAzW1tYkJCQIXDo8bDGqV7Svv/4aExMTCgsLiYuLY2xsjO7u\nbmJiYsjIyAAe4sLPnz8vMGhFRUVIJBJ+/vln5ubmRCHT09NTeAnUuK+0tDSmp6fR0tISJjADAwPy\n8vKwt7dnwYIF1NbWoqGhIVD5zc3NorAXGhpKWlqaMI719fURExNDT08P0dHRLF68WGQYZmVlceXK\nFVQqFWZmZtTW1vLss88yNjbGgwcP+PTTTzE3N+f27duMj48zMTGBk5MTd+7coaysDAMDA6KiokSU\nm7rOAojULpVKRUtLC+3t7YKFqS4Onj9/nqKiIhQKhUgn19PTIzExkYULF/LVV1+pxTrid6tQKIQ5\nyt3dXdQJcnNzycrKEhQjhUIhaFg9PT2Eh4eLz6S9vT0ymUyI4wCysrL485//zJ07d4R5Ljk5mebm\nZqE8dHFxEeCbv//979TX13Pjxg16e3vJz8/n4sWLTExMUFNTI9yaR44cYd++ffz9738Xu4s7d+4w\nPDxMWlqawBHW1tYSExNDX18fXV1ddHV1cefOHeG4VT/Wf6f7gEqleqQbYADkA0m//n3gP/278tc/\nvwSS/+H+74HH/sljq/739r+3/739/3578CjX+iO1JCUSiRZwDjiuUqnO/3p3l0QisVWpVB0SicQW\n6P71fgUw7x9+3P7X+/6fw8nJiT179lBYWCiItAYGBoyNjVFcXExSUhLV1dU4OjpSXFwsSELqAmVI\nSAjW1tZcuHBBuBFff/11/vVf/1WIogwNDcVMunHjRtavX88333zDjz/+SExMDJaWlvT19WFqaipW\nTfVrAIiJiWFoaIjt27dz5swZLC0taW9v55dffmHjxo188sknHD16lJycHGpra8nPz2fp0qVERkai\nUqm4f/8+GhoaXLt2jc2bN/Ov//qvHDt2TNi6t23bxieffMLw8DA6OjpYWloyMzMjCqyjo6Po6ekh\nk8mEKMjW1lb08/38/GhqasLY2BilUomRkRF1dXVs3bqV8+fPExQUJCAdg4ODtLa20tDQwPLly3F1\ndeWdd97hscceE4zGiooKNm7cyNTUFPX19VRXV7N8+XJOnTqFTCYjICCA1NRU1qxZg6OjIyUlJcLN\n2tPTQ0tLCzk5OZiYmIh4+ueee47W1lb27NnDqlWrKCsro7a2lo0bNzI7O8u7777LU089ha6uLrGx\nsWRkZLB06VJmZ2e5ePGiSF3Kzs5mamoKCwsLLly4QHx8PCtXruT777/Hy8uLHTt2cOjQIaytrWlu\nbuaLL77g7Nmz3L9/Hy8vL8zMzMjJyRF5HR4eHrz88ss8+eST2NnZERUVxfnz56mrq2P58uUoFAoR\n2NvX18fNmzeJjo7m2WefRalU0tTUxNGjR7Gzs6O0tBRra2sUCgXPPPMMv/zyC5OTk/zlL3+hrKwM\nqVSKtrY2WVlZdHd3o1QqBZRFnVj+L//yL/z444/cvHlTkMDU9nm1lHzevHk0NTWxaNEiRkdHOXfu\nHK+++ipVVVXI5XLa2trQ09MjPz8fDw8PrKysHlnA9CjdBwkPV/tKlUr12T/8Uwqw89evdwIX/+H+\nLRKJREcikTgD7kDuf/UcExMTGBkZYWZmhoODA9HR0VRUVODu7i7O3WVlZXR1dfHss8/S2tqKXC7H\n3t4eT09Pent7KSoqwt3dXRTw1I8bERGBlZUVSqVSnH/Pnj3Lvn37OHXqFE5OTsK0MjExgUQiEdbq\n999/nw0bNlBWVsYLL7wgiE5+fn7k5uYKEm96ejouLi50d3fT1NSEu7s7Tz31FFKplJaWFlQqFebm\n5iQkJPDCCy9w6tQpLly4ID7Yam++np6eyIxsbW1lYGCAmJgYfH19iY2NFcnTa9euFTZoIyMjVq5c\nSXFxMTY2NmRkZDAyMoJSqSQhIYGxsTF8fHywtLRkwYIFpKen09zcLFyCKSkp5OfnExsbS2trK8uX\nL2dubg4TExO+++47RkZGUKlUGBkZiUq7lpYW+fn5eHp6olAoOHXqFAqFgo8++ojy8nLy8vJ48OAB\nGzZswNDQkIGBAQYGBvjwww/JyMhg69atLFiwAC8vLw4cOCBSoAEWLlwosGVeXl7cvn2b1NRUFi9e\njK2tLdeuXcPd3Z3du3fz5ptv8vbbbxMUFMT+/fsFp/Ojjz4Sbe6EhATgoWpWne5069YtdHR0uH79\nOjo6OjQ3N7N582aGhoZwcnLizTffxNAnvuYAACAASURBVMjICCsrKy5cuMD09DTffvstPT09aGpq\nkpSUJHQKakjKggULaGhoEMe5mZkZZmZmaG9vZ9myZZw6dUqIm0ZGRgRwNSYmhnXr1qGlpcWtW7cE\n3cvQ0JDXX38dNzc3SkpKmJ6extXVFQ0NDeLj43F3dxfFZ21tbd577z1yc3MxNjZmdHSUsrIyCgoK\nKCkpYWBgQDA7HmU8Sk0hGtgBLJZIJEW/3lYC+4BEiURSCyz59e+oVKpy4DRQAVwDXvivOg/qoa5U\nOzk5ceXKFczNzZFIJEJgsnPnTrKzszl58iT79+/nvffe45NPPsHa2prw8HBxUXd0dIjE4H8sMhkY\nGIjQWgcHB1JSUpg3bx6Dg4NcuXKFs2fP4uHhIWy/w8PD7Nu3j+zsbMLDwwViGx5ONuoIebW6cfHi\nxWhpaRESEkJZWRkeHh5s374dV1dXHjx4gKOjI+fOnRNhuh988AEaGhro6emJirNawSeVShkbGyMs\nLEy8/oaGBtLS0kQqsZr9qFAohItQ/cHR0tIS8WNnz54VvMPJyUmx0ujr65OXl4ebmxsNDQ0CwNLd\n3Y2/vz+xsbEcOHAAQ0ND5s+fz/DwMLa2tjg6OuLq6sqHH36Ijo4OdnZ2/O53v6OwsBADAwPs7OyE\ne1UtR1ZDVBMSEoRUOCsrCzc3N+rq6vDz8xOMAzWkt7W1FS8vL1555RUhT+/u7mblypWiXvD555+T\nl5dHY2MjW7du5T/+4z+wsLAQOaCmpqbcvXsXeKhZmZ2dFewDuVxOSEiIIC+vW7eOmJgYysrK2LZt\nG25ubiQmJrJq1SrBwVQ7YtX0JoDo6GguX77M+Pi4YFps3ryZ/v5+Dh48KPJEAQoLC7GwsODHH3/k\n6tWr9Pb2MjMzQ2NjIwMDA0RHR4vCp0Kh4PDhw/T396Onp4dCoaCwsJC2tjYOHz5MTk4OpqamDA8P\nEx0djUqlQiaTsXfvXmQyGZGRkSQkJPDUU09hZGQkPjePMh6l+3BPpVJJVCpVgEqlCvr1dkWlUvWp\nVKoElUrlrlKplqhUqv5/+Jm/qlQqV5VK5alSqa7+s+fQ19fH1dWVJUuWoKWlRW9vLxcuXOCbb76h\nt7eXhoYGTp48SVhYGG+++Sb+/v5YWlryxRdfoFQqRazY4OAgk5OT4nHT0tKws7NDLpeLfu3k5CRd\nXV0CK+bm5saGDRvw9/cX4aXnz5/n7NmzfP/997S0tGBhYcGXX37JiRMnAEhJSUFPTw+VSkVlZSUv\nvfQSenp6eHh4YGlpSWJiIj/88IOw2545c4arV6+KImZTUxOJiYlMT08jlUpFAVOhUODq6srAwICA\ntiiVSiYmJtDU1MTV1ZW1a9cSEREhItzz8/Opq6sTWQDLli3D0tKSiooKysrK2LJlCzU1NcKu/Oyz\nz9LR0YGPj4/QdpSVldHX1wc8DH6Znp5GIpGQmZlJaGioOLpUV1cLd+X58+f58MMP6ejo4O233+aD\nDz6gpqaGEydO8OKLLxIbG8tf/vIXBgcHcXFx4emnnxZV98uXL5OVlUVLSwvBwcE0Njaybds24KFC\nUB1s+/PPP1NYWMjp06cZGBhApVIJwZk6gMbb25vx8XE6Ojq4efMmLi4uPPvsswQHB3P//n3u378P\nPOwY5ebmipg3NStz165dIgtCKpViaGgopNimpqb09vYSGhrKwMAArq6u2NjY8MYbbwgfjJGRESYm\nJjQ1NXHgwAFOnTrFyMgITz75JNHR0WzatEkQqdRhPRs3bmR8fFxg6zs6OhgcHKSjo0M8rrGxMXK5\nXEBsk5OTMTY2JjMzE6lUir+/P8bGxigUCtHtUIcUf/HFF9y/fx9DQ0MePHhAbW2t0Ng8yvhNyJyn\npqZoaGhgenqakJAQPDw80NPTE8cBNYUGYNOmTaxYsQJHR0fc3d1F7qEaqWVnZyeqrnK5nFu3brFt\n2zYKCgpYsWIF165dw8PDA4lEIniMlZWVgtEQGBhISkoKSUlJ1NXVMTk5yQcffMCLL77IokWLOHHi\nBLa2tnR1dWFmZsbSpUv529/+JtKFCgoKcHNzQ1dXlxs3bghMe3JyMu+//z4dHR3iQuvq6iIgIEC8\nYepV1c7OjsuXL4t496SkJJqamkRwbENDAxMTE0ilUt566y06OjqEqUZXV1dkBqj9/Pr6+uLDq7aR\nnzt3jrq6OmxtbUVsfUNDg9DYW1tbCwNWV1cXWlpa1NTUEBwcjLOzM0eOHKGsrIwnnniCxx57jHPn\nzvHdd9+Rm5tLcnIye/fuFfUeiUTCvXv3ePbZZzl+/DhhYWGEhobS1dUlAntkMhmAyEAwNzfn7Nmz\n5Obm4uvri1KpJDU1laNHj/Lll1+ipaVFcHAw3t7eYoJR52dqamrS3d2NXC4Xu7upqSk2bdokgCTb\ntm0jOjqaqakpMjMz0dPTY25ujsTERKFWVLcJ7927h4uLi6j5RERECDmymhimVoSqFaURERHMzMzQ\n0tLC3NwcmzZt4vjx4+K9sbe3FzmdLS0thIaG8sUXX4hzv1wuZ3JyEkNDQ7KysoiJiaGjo4OAgAC2\nbt0qlLtZWVmMj48zOjrKmjVruHfvHnv27KGlpYXMzExGRkbw8/MTwrNHGb8JmbNMJsPY2FioADU1\nNYmLixOztoWFhQCivvrqqwQFBeHh4cGLL77I7t27aWhowNTUlPnz5+Pg4ICdnR3w8EiyZcsWETue\nl5dHWlqaQJx7eHhQVVXFjz/+SEZGBidOnOCtt97C2dkZbW1tnJ2d0dTUFPQndeq02gsgk8l46623\nCAgIYG5uDoVCQWNjI6dOncLc3BwPDw8MDAyEs27Hjh3s2rWLyspKdHV1MTIyoq2tDScnJ+BhAMjQ\n0BBFRUUkJSVhaGiIp6cn169fR09Pj/r6ekpLS/nzn//MgQMHmDdvHlpaWiLQpampiVu3bonwldHR\nUY4fP46joyPLli0Tegd1qrKLiwtaWloMDQ1x8OBBOjo6MDU1ZefOndja2vLzzz/z5ZdfMjo6ypIl\nS4T68e7duyxZsoSvvvqK3bt3Mz09TVdXFx9++CHV1dWMjo7y008/cenSJTZs2IBCocDKyory8nLG\nx8extrYW6P4jR44wMDDAvHkPa9M2NjaYm5tz8+ZNFi5ciLa2NrW1tZiamrJ06VKOHj2Ks7Oz8Kx4\neHigqalJa2srdnZ2NDY24uLiwu9+9zva2tqIj48HHl68t2/fFpTmq1evUlBQIMhS6nrQxx9/zP37\n90lLS8Pa2lrEzaWnp9Pb24tEIqG/v5/ExETxWVBP0pmZmeTk5NDT08OZM2c4evQoGhoafPXVV3z7\n7bdkZ2ezbNkyXn/9dVavXo2Ojg7FxcWkpKTw008/MTg4KEJm1PBYbW1tVqxYwYMHD8QOxNLSUiRY\nq0E0xcXFQhq/e/dusrKyBFskODhY7AQfZfwmJoXBwUFB+1EnBfX19WFvby8SgsvLy0UatFKpZGBg\nAF1dXfT09Ghra6Ozs5MPP/xQACXgIXTVxMSEw4cP4+HhwfHjx2lvb+f48eOMjIzQ09PD5OQkiYmJ\nrF69Wpyrv/zyS4aGhmhsbBQimdzcXAwMDICHZ/wDBw6wYMECduzYwfj4OGNjYzzxxBOMjIyQlpYm\naNQ9PT3MzMzQ19cnoCV+fn5CnJSfny+Sp0JDQ9HW1kalUlFQUMDc3JxIvr5y5QptbW10dHQwOzsr\n4uykUqmQ5s7MzBAQEMCtW7fYvn07VVVVmJqaCvqyra2t+P7g4GDs7e0FzVq9E1CnMjc3NxMUFCQA\nHrm5uQL9FhsbS0xMjKBcz8zM8PTTTwtn5ebNm4X24YcffhBippqaGo4fP45KpeL27duMjo4KxJh6\nJYuJiaGzsxM3NzeRyZiVlYWzs7OgVanFOGqojVopWFFRQXFxMa+++iqVlZXk5+eLx1XvltSReN7e\n3kxPTwtadFtbG6dPn+bBgwfo6+sLC78aGyeXywkODsbBwYGAgAABVXFwcGBqaor29nZ8fX0JDQ1F\nX1+flpYWkdJtZGREdnY2fX19HD16lE2bNlFaWoq/vz/W1taUl5dTUVGBrq6uSKBqbW0VuZZdXV2M\nj48LepQ6htDZ2Znu7m5qamqQSqXk5OQwNzfHihUrcHZ2xsDAAHNzc65fv87y5csf+Xr8TUBWDh06\n9O4rr7zC1atXkUgkAu1lb2/PxMQEN2/epLe3VxSKbGxsmJqaoqKigoSEBAwMDHj11VeJiIgQgRx3\n7twhMjISKysrsRMxMDBgy5YtAguuNi+VlJTg7e0tvOxqQk9mZibFxcVEREQQEhLCxMQEN27cID4+\nnmeeeYZvv/1W1CbS09OprKxEqVTi6uqKsbGxAMz6+PgQEhKCubk5WlpalJaWii5FWFgYtra2Qsc+\nOzsrCE/x8fG0tbUhl8tFmIl6m/qfjxqurq60tbUJx5xUKhUTpru7O2lpafT29qJSqbC3t+fOnTsM\nDQ3R0NCATCYTBKTFixdTV1eHmZkZw8PDBAQECAOZOoGqq6sLlUpFX18fcXFx2NnZCSnyypUrsbS0\nZGhoSLQOLS0tWbZsGdevX2d6eprOzk40NTU5c+YMLi4uIoT1xIkTJCcno6OjI45EFy9eFBX4VatW\n0dbWxtjYmADqurq6cuXKFXJzc+np6SEyMpLQ0FD8/PzYsGEDeXl5FBYWsmvXLurr67GwsMDZ2ZmK\nigrGxsb4/vvvsbGxob6+nrm5ORGoou4MeXl5oaurS11dHZGRkWRnZwtZeElJCSEhIcTFxXHr1i0G\nBgbEZ1Od1FRbW4uJiQmdnZ289tpr9PX1YWlpKTiX6u5TQUEB1tbWREZGkpqayrp160QhsqWlhbGx\nMVEbUINZ1O3M2NhYXFxcGBsbY3JykiVLlmBoaEhKSgrJycm4u7uTkZHBzZs3/+eQl/bv3//u2NgY\nXl5e3Lx5Ezs7O1pbWwVmTFNTU7Rq1KEZ+fn5rFy5ks7OTsEu2LFjB4sWLaK3t5dffvmFnTt3kpeX\nx86dO4UiUi6X4+joiIWFBa2trezevZuqqiqsrKyorq7m+vXruLq60traytTUFAEBAYIhODIyQm5u\nLp988gkVFRUsXbqUlpaW/yPP0tTUFBsbGzZu3IiVlRXd3d0YGBhQX1/PzZs3ycrKQqlUsmLFCvbv\n3y8clSdOnCAqKoqQkBCuXLnCK6+8QlNTk+BSurq6Ctmxubm5cAuqU47ULbTQ0FAKCwvp7u5GV1eX\nqakptLW10dTUpKenB5lMRn19PU8++SRaWlpoa2tTVlaGr68vCQkJTE9PU1JSgpWVlcgklMlkaGho\n0NbWJv6PJSUlODo68vPPP5OSkiLkveodUVBQEGFhYfj6+hIeHk5fXx+Ghob4+/sLKlZgYCDT09O8\n/vrrvPbaa+LCc3BwQFdXl7KyMv785z+jqalJdnY2ExMThIWFsX//fuzt7QkJCeHmzZtCP/LOO+9Q\nXFxMQ0MD2dnZNDc3s3//fj7++GPkcjnOzs709vaSmpqKu7u7yJpsbW0lOjqadevWkZCQQH19Pbm5\nuczMzKChoYGjoyMBAQHIZDL8/Py4f/8+WlpaFBcXY2pqKhidtra29Pb2smHDBgIDA3F0dBTBMaam\npmhpafHMM89gYGAgUOxGRkYoFArROp2cnOTu3bskJCRw7do1HB0daW5uJj8/H0tLSxYuXCggQupj\nT2NjI6ampmzdulWoRzU0NDA1NcXb25uRkRH09PQ4efLk/8fee8VHfZ1r29eojMpo1FCvqKOOkFBD\nFAkZTDe92FQbVxw7xU4ct3iHxImT2N4hNgY7NsQ2xhSLDqJIqCBQQw0V1Ls0KqNRb1O+A+W/vuQ7\n2Jt99Hp/v/d/YiMQSKNZaz3ree77uv/3kJe0Wi0TExM8ePAAV1dXXF1dCQ4OJioqigMHDgAzVwEP\nDw+qq6vp6Ojg4cOHGAwG4RfYunUrd+7cEU49mJHMLlmyhObmZgoKClAqlWi1WkZHR0lLS+Orr75C\nq9USFxfHggULGBsb4/XXX2dsbAwPDw8CAgLEiMnZ2VmM+To7O2ltbeXDDz/k/PnznDp1ips3b9LS\n0oKtrS0KhYKCggJycnLYsWMHcrmc8vJyCgsLRd5fcXEx+/fvF/dVgJiYGIqKikTEWkJCgkgXltgL\nEuVJAqxKDc/6+np8fHyEaCU5ORkfHx9sbW0JCgqiublZlLohISGibAaEGayhoYH6+noOHTrE9evX\n+e6772hpaWFycpK7d+8SHx+PlZUVSqWSAwcOcPbsWby9vfnqq6/44YcfeOWVVygtLeXy5cvimtTY\n2IhareaLL77gZz/7Ga2trQQFBYnrw+DgICdOnCAqKgqYuUtLmDalUklAQICQPZuZmYlKyNvbmy+/\n/JLIyEhSUlLYsGEDLS0tzJs3j6mpKRISEliwYAHXr18HZqYEEoLN3t6e3NxcFAqFgLycPXtWWNQd\nHBzw9/cXTIpz587xpz/9iY6ODs6dO8eiRYuETiEkJESY9dzd3Zk9ezZff/01b731Fs3NzRQXF1Nb\nW4tOp2P9+vXo9XoaGxvp7+9n0aJFjI2NsXfvXkZGRrCyshKvw/DwML/+9a+Jjo7G2dmZ5cuXEx0d\nzZEjR/joo484ceIE169fp7W1FW9vb6ampjA3NxdXKjs7O9asWcPIyAjZ2dkixPhRnh/FpmBjY0NY\nWJjI2JPItdJ4sru7m76+Pnp6erh37x7Dw8OisywBOyUjTFtbG2FhYQBcu3YNR0dHvv32W7RaLUVF\nRUxNTQkE1ooVK9i+fTtRUVFkZ2fT3d2NWq0mJiZG3LW1Wi1///vfkclkwhnX2tqKlZUV0dHRvPzy\ny7z++uvExsby5ptv4unpKZpFu3fvZnBwEFNTUwwGg0hmNjIyQqPRsHDhQqKjo7Gzm3Gdj42NERER\ngbu7O76+voyOjpKUlISJiQnNzc2Ym5uLvoiTkxNubm7CxyCdrmFhYXh4ePDJJ5+Qnp7Otm3bKCws\nxMbGhoSEBNatW8fo6Ki4frm7u4uYNCsrK0GLOn36tEC29fb2EhERgZeXF0lJSVhaWlJdXc3w8DDm\n5ub88MMPrF69mg8//BBfX19sbGxQKBQUFxcLFLmLiwtPPPEEt2/fpr6+npGRETQajRD67Ny5E5jx\nlVy4cIHCwkL2799Pd3c39+7dEyapRYsW8frrr2NmZkZYWJjgaNbU1CCXy5HJZAQEBLB582aam5uF\n2WzFihXMmTOHRYsWERcXh62tLStWrODDDz8kISGBFStWoNfrUSqVAr0+OTnJoUOH+PLLL4XASqvV\nYmtri1Rhz5s3T3A2MjIyMDU1ZXp6mqCgIIGGP3bsGHZ2dgwMDHDjxg1efvllcXjNnz+f0tJS9uzZ\nQ25urtC9hISEMDw8zK1bt3BwcBCVmrGxMcXFxWRlZWFra0t7eztyuZyzZ8/i4+MjSNPd3d3Y2toy\na9YskpOTRb7qozw/iuvDn//8599IYqCQkBDmzp2LhYWFqB4koImEGlu6dKkAtlZUVODo6IiDgwMl\nJSVMTEyIccwf//hHKisrGRkZYfv27eTk5NDd3U1ZWRlGRkY0NDSIU2V8fJw5c+YwZ84cAMrKyjA2\nNsbU1BS1Wi0623l5eSIJWmLuKZVKNm/eLJyRg4OD6HQ6LCwsqKurQ6/XY2lpyc9+9jMKCwuZmJgQ\npWxJSQnR0dGcPHlSRMirVCoef/xxYmNj6ejooL6+XugXsrOzxRXI19cXR0dHli5dSl1dnXDQNTc3\ni1QlCYVuaWmJlZUVtbW1ohprb2+nv7+fxsZGfH19mZ6eJj8/n0WLFuHv74+zszNNTU1ERERgMBiE\ng7ClpYXW1lacnZ3FlEVCu1tYWDA6OkpfXx+Wlpb09/eLDNCUlBR27NjBiRMn2L17NyEhIVy9ehV7\ne3vKy8vJyclh3759eHh4cPPmTWpqaoiKisLR0RFTU1O0Wi1ubm5C5Xjt2jUWLlxIYWEhNTU14uc3\nPDxMV1eXIGjV1NQQHR1Ne3s7FRUVDAwMEBgYiLm5ORqNhpGRETGFkBaekZERfX19PP/880RFRTF7\n9mzc3NxITU3l7bffxsnJicuXLxMVFUVfX5+YHBUXF+Pt7S2uqufPnxeLet++fTg7OwuNi7u7Ow4O\nDjg4OPDNN99QUVGBubk5WVlZrFu3jqKiIpRKpQg2Njc3p7+/H29vbxYtWoSxsTHBwcH4+fkxOjpK\ncnIy27ZtQ6/Xc+vWLYqKivjqq6/o7e1l9erVfP311/97rg+Tk5PExcWRkJCATCZjfHycoKAgJiYm\nAEQUvJmZmRjj3L17l8nJSZKSkkhMTGTRokWYmJgICynMnOhTU1MsXbqUDz74gNHRUYqLi9m0aRPr\n1q3DwsJChJBUVlbS1tbGrVu3OHv2LOfOnaO0tFSo9gYHB+nt7QVmOuR3797lwIEDvPbaa4SEhAgh\nTnZ2Nk1NTeh0Or777jvMzc3x9/cnKCiIrKws9uzZw+OPPy5OW0nSDTMzeltbW0JDQxkbGxMV0L17\n98jLy6Ouro6cnBxUKhVxcXFCGp6fn09XVxetra3Mnz+fyclJkpOTCQoKor29XczOBwcHKS0tFZxL\nJycnxsbGRBl9+/ZtvvrqK8LDw3njjTdITExk9uzZjI2NYW9vT1dXlwg0mZ6eZsWKFaxYsUKcxuPj\n43R3dzNv3jyx+CYnJ0UIjITO9/b2pqKigsLCQlxdXXnqqae4cOECMMOUKCoq4uc//zk+Pj7CO1Bd\nXY2NjQ1lZWV8/PHHdHV1CTbDxYsXKS8vp76+nosXL1JZWSkakxYWFgB0dXWJQB1vb2+xgd24cYPe\n3l78/f0Fs6Cjo4OrV69SVVUlnIs1NTXY2tpy9uxZVq9eLWTZEgtTr9eL71sulzM0NMSZM2fQ6XQk\nJiayefNmAgMDuXjxIj/88AMbNmwgMzOTP/3pTxgMBnJzc0lMTBTiO09PTyH3lhqYDx48wMvLC2tr\nawYHB3FxcSE/P5+amhoiIiIYHBykublZvGc9PDzYt2+f6G886vOj2BSMjIxQKpVUVFSwevVqOjs7\ncXZ2RqlUkpKSQkFBgQCfSLiuOXPmMDExIRKZb9y4wcjIiFiQMKN3n5iYEDg3yX4rddZ7e3sJCgpC\nqVRSXV2Nvb29qAx0Oh29vb3k5OTg5OREc3Oz2BRaWlrYsWMHd+7c4bnnnqOwsJCgoCAuXLjA+++/\nz7fffoutrS3z5s0jNjYWKysrTp48yeLFi5mcnBTwi/r6euEVAPDz88PMzIzR0VEcHR0FiFP6mjw9\nPVmyZAmhoaHcvXuX4uJi9Ho9dnZ2WFhYEBMTI7wNTU1N1NXV4enpyd/+9jeR0iSJrD788ENGR0cJ\nDAxErVYTGRkpNi9patHZ2Ymnpyd2dnZcuXIFU1NT4Q0xMjLi8uXLFBQUiBg4KysrNm/ezNGjR8VI\nLjExUZS5VVVVVFZWsn//fnQ6HUNDQ3R1dSGTyUSWQ2VlpRBaWVlZYW5uTltbG9euXaOpqYmpqSne\nffddXn31VZ555hmR3p2cnExycrIgasOMFNzU1FS8F9RqNQEBAahUKvz9/YXiMzAwUDTvvvnmGx48\neEBHR4cIeSkoKGDjxo3o9XoRFiTxFDZt2kRqaipPPDGDE5E4n4WFhSQmJmJjY0NTU5PIfezv72fd\nunWiURwXF8cPP/xAUFCQCLWFGZqVh4cHnp6eaLVaESKj1+sFLs7IyIiuri7q6+vx9PQUI23JIOft\n7Y2trS1yuZyTJ08+8nr8UVwfPv3009/ExcWxZs0aCgoKKC0tFU2aefPmoVQqmT9/PgMDAwQEBGBq\naoqTk5MIiR0eHubPf/4zY2Nj2NnZ4eDgwJUrV0hOTmbOnDlis9i8ebPIZygvL8fOzg4nJycRUipx\nElQqFQsWLCA8PBw3Nze6urrIycnhscceE/j10tJSampqSExMRKVS0d/fT3l5OXv27OEnP/kJFhYW\nnDhxQqgsLS0tCQsLY2JigqNHj6LRaFi3bp1ILUpLS+PFF1+kv7+f06dPo1KpaG5u5pVXXhGn7MDA\ngFC8BQYGkpWVRX5+PgaDAW9vb5EcPTQ0JKSwpqamuLm5CRislN3Q0NAg3HyBgYHU19djMBiYO3cu\nxcXFdHZ24uPjQ0BAAJcuXWJqagqdTicanxUVFcTHx+Pq6operyc7O5tdu3Zx6tQpwsPDWblyJXZ2\ndhw+fFiclrm5ueTm5mJmZiZckAcPHqSvr4+IiAi++eYbDh8+LEJqTUxMhNxXuhoZGRkxOTmJVqul\noaGBl19+mba2NvR6PRs3bqShoYGIiAhxGstkMq5fv86uXbtEUrderxf5lPPmzcPV1ZW8vDyamprI\nz8/n4MGDuLm5kZiYyJo1a8QkJzc3F1NTUxH0c+3aNZF83d/fL1yinZ2dhISECEHWkiVLiIqKQi6X\nk5KSgkKhYGRkhPb2diYmJmhra8NgMDA0NMTOnTv561//ypNPPompqSmNjY3Y2trS1tZGbGwsWVlZ\nBAcH4+Pjg1KppLy8HLlcjoWFhfCneHp68uabbwpEmzRyzszM/N9zfZDswvX19djY2ODi4oK5uTk7\nd+4UoMqysjKampro6OjA39+f8fFxzp8/z9WrVzl37hzNzc24ublhZGQkwkptbW2pqKggPz8fmUwm\nEN6enp7U1dURGRkJwNdffy3CV+Lj41m6dClarZauri5cXFzo6+sjPj5e4NF8fHxYtmwZ27Zto6ys\nDL1eLwRDa9as4erVq1y8eBFvb2+0Wi0ff/wxnZ2dtLS04OPjQ0tLC3q9nvz8fDQajShFP//8c2pr\na8Wba/Xq1ZSVlTE1NcWFCxews7MTIi5zc3OBiocZypSfnx/m5ub/Rkmur6/H1NSUrq4u7O3thaHH\n1taW+vp6UlJSxPgqKCgIU1NTFAoFjo6O9Pb2YmRkhKurK8PDw0KcFBcXh7u7u8Dfl5SUsGbNGiFb\n3rhxI0uWLOEf//gHMpmMhw8fjDtl/wAAIABJREFUUldXR1paGjdv3sTX15fo6GgiIyM5fPgwX375\npcDcnzp1CisrKzo7O7l27Rr379+ntbUVHx8fPDw8iImJIS0tDSMjI+Lj47l69SpRUVEYGxvz8ccf\nY25uzp49e0SK1b8ayB4+fCiqPWlBjo2N4eLiIkKBUlJSxAKfP38+vb29KBQKLl68SGpqKsHBwcTF\nxQkTW0pKCps3byY8PJzPP/+cuXPnsmzZMtauXSsSrRsaGvjuu++4d+8ex48fF2K9np4escilTBAJ\nmaZQKOjt7WXZsmVMT0+LqVZ4eLioftPT07lz5w7m5ubC8u7l5SVGyVIlVlJS8r8v90Ei5hoMBpEo\nJBGGJf6gs7OzcDqOjY1x7do19Ho9c+bMQafTERwcTHR0NN3d3SIWzsHBAbVazbx589BoNNy5c0fo\nGJ566inCw8NFWlJDQ4OgAEdHR+Pv7y9yKCWisITKWrJkiTidjYyMhP1Yq9Xy8OFDkpKSmDNnjvDL\nj4+PY2xsLFDd0dHRlJaWMmvWrH+ztK5YsQI3Nzd8fHwwMTHh8uXL1NbWotFohCfEx8eHtrY2YZX2\n9fUV1m+9Xi9gnWVlZdja2mJiYiICUCWClIODA6Ghofj4+AgB1MOHD2lqaiI8PJzQ0FACAgJwdnYW\nuZlhYWHExMRgbW1NTk4Oa9asob+/nw8++ID58+cTGhpKYWEh8+fPp7q6mv/8z/8kMjKS5cuX89JL\nL1FQUMDIyAgjIyMUFRWRm5srUrcSExPJy8sDEHBdd3d3Qb/WarX09fXh6OjIxMSEwJfLZDIWLFgg\nEqkkgGpKSgrh4eG4uLiIkWRRURHu7u6sWLGC9vZ2BgYGUCgUuLm5MTU1RXR0NM8++yw7duzA29tb\npIKXlpZibGyMu7u74IaOjY2J3IcHDx5w5swZ4aZtaGhgzpw5WFpaYmJiIrQG0khYks7Hxsbi5eXF\nO++8w5w5c3jyySd55513xHv3+vXrTE1NceLECUZGRjA1NRWmOwcHB6ExCQ8Px9jYmC1btiCTydDp\ndLz33ntkZGRw4cIFXF1dsbKyEqE4j/L8KAxRQ0NDBAQEoNFouHz5Mm+88Qa5ubkMDw8LOKi9vT3n\nzp3D3t6eoKAgVq9ezfXr1wU38dlnn+XChQvEx8cL9FR2djbbtm0T2oD8/Hw+/fRTFixYAMyczDqd\njlWrVrF3714iIiJEZLlcLkej0bBr1y4hZJJ0CmZmZiI8VoKE+vn58cYbb/Dss8/i5+cnYs4lHNvV\nq1e5dOkSFRUV2Nvbc+DAAfz8/Lh27ZrQ0VdUVDBv3jxhQw4JCcHe3l4EsZaXl6NQKAgJCcHExERM\nOQYGBhgZGeHbb79l8eLF9Pf3o9PpqKysJDU1VUS9eXl5iRyNJUuWkJmZKRx633zzDQ4ODrz//vtc\nvnxZgGa3bdtGRUUF4+PjFBUVUVtbK5KzKysrWbVqFR0dHVy8eJGBgQFyc3OFZd3NzU0gxr28vNiy\nZQsbN25k7ty5okexefNm6urqBBQ3KCiInp4eER2nUCjIy8sT0mHJFfjDDz9gbGxMSkoK2dnZXLhw\ngSeffJLz58/z9NNPU1RURGBgILt27eK1115j2bJl9PX1Cdehubk5Z8+epbW1lcDAQDw9PWlpaaGx\nsZErV64Imfrjjz8uLOMnT57k6NGjpKamispmYmKCuXPn4uPjw/79+9m9eze9vb1CpShVHgcPHgTg\nb3/7G11dXcJPYjAYsLW1JT09naysLKGqdXR0pKysDC8vL+GSlcJzJen7yMgIn376KcXFxbS2tpKZ\nmUl/f7+A50rO34yMDF577bVHXo8/ik1BoVDQ1tbG+vXrMRgMPHjwgJSUFGEnlRh0KSkpyOVykWR8\n//59kR6Um5vLz3/+c4aGhjh06BAwkzQspUXBDNF3YmJC0G/Hxsaora1l6dKlYvT2t7/9jZCQEExN\nTamurmbevHksXLiQ1tZWERVWVVVFe3s7Op2OxsZGPD09sbS0JDExkampKaamphgZGSEtLY2uri7u\n3LmDr68v7e3tKBQKzMzMuHHjBg4ODigUClHiOjs7YzAYxMlvZmZGa2srra2t1NfXEx8fL5Kmr1+/\nTnNzM5GRkVhaWorcAynRyMbGRgSTnDx5kt7eXpEGJZPJxDTFzc0NvV5PQEAAxsbGlJaWMjY2xpo1\na6ivrxd8P6nykYA1586d4+OPPyYvL4/e3l42b95MY2Mj69ato6qqirfffpv29nY6OjoYGxsTPMb+\n/n7u3bvHH//4R44cOUJ2dja1tbViI5dIxG+++SYrVqxgcHCQ8PBwjh8/LnoyXl5ebN++nerqar77\n7jsuXryIQqFArVbz2Wef0djYyLJly4QoDGa6+VLvoL6+Hm9vb379619TVVXFyZMn2bp1K9evX6ew\nsBBnZ2fu3LkjRqU3b95k7ty5aDQaUaVIqVzSNOzSpUvs3LmTiYkJPv/8c1asWMGTTz6Jk5MT2dnZ\nTExMUFJSQlVVlZCvm5ubc+zYMbZu3Uprayt79+4VTXJ3d3dSU1M5fPgwK1euxN3dnUuXLuHv78+1\na9d44okn8Pf3Z3JyEldX13/Lk5w9ezaPPfYYpaWlYvolTbge5flRbAqTk5P4+Phw+/Ztent7kcvl\n3Lp1i8HBQdauXUtjYyPNzc0iSerjjz8W2PLo6GgqKyuJiYnhyJEjODs7ExwcDMxALcLDw8nKykKp\nVOLo6Ii1tTUqlYpvv/2W27dvs3z5ciorK3FycuKtt95i+fLlIvA1PDychoYGEfVmb28PzATUFhcX\no9PpOHDgABqNhoqKCpYvXy6mFEFBQSxevFiYWdRqNUFBQULK6+fnx/T0tLB9w8xI8uLFi2zdupWy\nsjLs7e1JT08nIiICjUYjItymp6eFM7OyshIzMzOSkpIwNzdHrVYzOTlJa2urCJSVrNmzZ8/GYDDQ\n1dXFxMSE6KFIijjJ6KPRaKitrRV+Ea1WS3d3N52dnYyOjopou6NHjzI4OMj+/fs5efIkOp1OhL92\ndnZSVVXFr371K6E3sbGxEVDRCxcuUFRUxP379wWHAmaMcJIDVhI8xcbGEhoaSnp6OuvWrWPTpk3I\n5XIiIyPJz8/H399fhLt8/vnn9Pf3Mz4+zurVq/n8888BUKlUuLm54e/vT11dHUFBQfzud79jfHyc\niIgIYU03NjYWVCR7e3ueeOIJzp49i0KhoKWlBVdXV7Kzs4UsWaFQ4OrqKt5f9+7dE6lleXl54mfs\n5ubGwoULRXK35CzNzs4WG3N6erpQmSYlJYlMEDc3NxFBX1NTg6WlpdBRjI6Ocvr0aVJTU4mMjBQ/\ntxdffFFQrCRdy6M+P4qegkwmo7q6mvr6eoyNjVEqlezevRtvb28uXrzI2bNnKSsrY3JyUmjNpUyI\noqIi6uvruXnzJkZGRoIZADOqMC8vL3bs2MHOnTupqanhF7/4BSqVir179/Lee+/x1ltvMTU1xfDw\nMIsWLRK6BSmPz8zMjMbGRqysrCgomKHKSXbuN954gzNnzuDi4kJwcDBXrlzhgw8+oKuri8zMTAID\nAwUWzMPDAw8PD55++mmhcpQ2M8l9KZfLefrpp7l//z7z588Xs3Zra2vCw8PZuXOnMAoNDg7S2NiI\nUqlky5Ytwk1aX19PXV0djo6OFBYWYmJigre3N3FxcYIhYWFhgVwuR6FQ8PDhQ9LS0rC0tKS5uZnP\nPvuM/v5+/P39xT14YGAAlUpFT08PAQEBnDp1Ck9PT7y8vBgaGuLGjRukp6ezdOlSBgYGxPRALpez\nadMm+vv7MRgMTExM8Pjjj+Pl5cW1a9cE1/BfN1x3d3cePnzIp59+yrFjx9i/fz8hISG4urry+OOP\n4+LiQm5uLr29vTQ2NqLRaIQidseOHSiVSmHzzszMFMKfc+fOkZWVJaTxkqlo/vz5bNu2TYQLb9++\nncrKSrZv38727dtFcKzUl2ltbcXGxoabN28CiKZucnKyiAwMDg4mODiYxsZG9u3bx44dOxgcHOT+\n/ft4eHiwePFirKysGBgY4ODBg1hbW6PX6/H19RUjyaamJnJycvD390elUpGfn09DQ4MIUO7q6uKH\nH36gqamJ3bt3U1VVhUwmY+XKlSJRXUpcDwgIEME9j7QepU79/8lHJpP9n/8i/u/zf5///z/FBoMh\n5r/7Qz+K60NQUBBbt27F1dWVhw8fYmZmRmVlJb6+voyPj2NlZUVRURELFy5k9uzZVFZWCrWc1BWW\nKgWpKbRq1Sp++tOfIpfLmT17tpCWymQyAgMDuXLlCpaWliJktqenR8BGs7KyhOvs3r17GAwGzpw5\ng5+fH7/73e94++23RW9Ar9czPT2NUqkUv+7s7ESn01FdXc3evXuFi1KlUrFs2TI6OztFOb1s2TLq\n6urYv38/g4ODvPvuu/j7+zM2NkZfX5+wYXd0dBASEkJmZqa4R1paWtLT00NfXx+zZs0iNjaWGzdu\nsGHDBrq7u0XobUdHh3gNJLWn1ENRKpUCOZeTk0NycrKwBUvd7qKiIlJSUrhx4wZ2dnbExsby97//\nHaVSycOHD4XqURLkuLu709HRIZq/Y2NjIuJtzpw5hIeHU1xcLKzY0dHR1NbWcujQIX7xi19gZmYm\nBGW+vr64ublhampKWFgYdXV1hIaGcvz4cfr6+li7di1lZWUEBgZSUFCAm5ub4ETGxcVx4cIF3n77\nbY4fPy76BAEBAaSlpeHk5IS1tTWJiYm4u7tz6NAhEhISaGpqYnh4mISEBAYHB4VhT7Jrx8XF8cUX\nX3Ds2DGOHTvG6OgoWq2WuXPnYjAY+OGHH1i8eDH19fUidEiKDSwpKUGhUIheg0KhIDg4mFmzZvH2\n22/zzDPP8O677/L++++j1Wrx8PDA2tqa8vJyTE1NiYyMxNPTk6amJtHw1uv1onc2Pj4uQoi++uor\nNm7ciEaj4e7du2LC8989P4rrg6Q8q6ysxM3Njd7eXhYtWsTKlStZs2YNYWFh7Ny5k/v37wuQhARd\nTUxMZHBwkLCwMMbGxtDpdOJ+unbtWmpqaqiurhb30MrKSs6ePSucglJsfFFREbGxsSL9NyEhgfLy\ncrq7u6mtrSUpKUnc9/R6vUgq1mq15Obmcu/ePWFLnj9/Ph4eHkKNaWRkhImJCS4uLgIVZ2tri0ql\nYuvWrSKwRFIHSp6CvXv3MjU1hYeHBzY2Nly6dIn6+nqSkpIEK9HKyorp6WmGhoa4fPkyqamppKen\n4+bmRllZGePj48TGxgqtvTRFCA8PR6fTUVRUhJ2dHbW1tSQmJnL//n1GR0eJiIjAx8cHa2trRkZG\nqKqqEn6Jjo4Opqen0Wq1hIWFodPpWLJkCYsXLyY0NFTAWCRxj8FgoKKiQqDgb968ibGxsTB/TU1N\nCQSaj48PGzZsYMmSJTg6OqLT6XBycqK+vp4LFy6I/k1kZCS7d+/G2NgYc3NzMUaNj48HZq6kN2/e\nFCPfsbExEVR76tQp9uzZw8GDB9Hr9Vy4cIELFy6wbds2nJ2dkcvlxMXFERAQQGdnJ8uWLROqy9HR\nUa5fvy6Ykjdv3hQHwPXr1zlz5gympqb09PTQ2dlJW1sbn376qZCYe3t7s2DBAszNzfH09MTf3x8T\nExOcnJxElAHMBNZI18aCggL6+vqwsbHByMiId955h/v371NTUyPi/Xp7e3FxcaGxsZH29nYBDNqy\nZQspKSk8//zzj7wefxSVgrGxMWFhYcLlJ82G8/LyeOedd3jnnXfw9vYmNTWVwMBA0TSKjY0VC7Sw\nsFDIlyXS7tDQEO+99x55eXl88cUXBAcHExISgrOzM76+vrS0tJCXl8eKFSuIiIigqKiIw4cPk5qa\nSkNDAxcvXuS1115jamqKkpISkTDU3t4u/AOOjo689NJLNDc3U1VVJRKgzc3NReNPohNJJi5pV5+e\nnmbPnj2iB1JSUoJGoxFTiMLCQpqbm5mcnMTY2BgXFxcWL15MSUmJEEvdv38fJycndDod09PTTE1N\noVQq0el03Lt3jwULFtDa2oqxsTFNTU0C2AEzpq+UlBQAgb5fsWIFxcXFtLe3Ex4ejkKhEKwEKWNg\nfHycBw8esH37dhobG1m4cCHt7e1MTk4KhqGVlZUwDUmipocPH/KLX/yC3/3ud6xcuZKGhgYGBwdJ\nSUkRWH4TExMyMzOxtLRk4cKFmJqaotPpMDIyYmRkhM7OTmprawkNDcXOzk74P2pqapDJZCJFu62t\njbNnz4rv7+uvv2bdunVC4PbSSy+xfv16Xn31VWQyGdnZ2Rw8eFCAd93d3YWq8siRI0RGRlJQUIC7\nuzuJiYlUVlYCCNNaaGgonp6eHD58GKVSiampqXAqnj9/nk2bNtHc3IyJiYmoRP38/LC2tiY3N5fy\n8nKcnZ0JDQ3lxIkTWFtb093dTX19PR4eHoSFhXHkyBFUKhW7du3C0tISR0dHdu/ejVqtJjg4mGef\nfZbOzk6am5txcnIiMjKSvLw8Idl/1OdHsSlYWlri7OxMRkYG8+bN480330SlUnHmzBn+8pe/0NLS\nQlpaGjKZjO3bt/PCCy/Q1tZGZGSkeNFOnz6NwWBgZGRE5OYFBwdTUFBAc3MzBoOBgYEBNm3axPff\nf49Go+HBgwfExMSQlZXF1NQUr732Gtu3b+fOnTuMjo6KBqgkGJJYil5eXri6uhIeHs6RI0ewsbGh\nrq4OtVotNpD9+/eTn58vMhqNjIxEYywkJITCwkIUCgW5ubmkpqYCM3P90dFR6urqRGaAhGt/8OAB\nLi4uFBUVERUVxYMHD+jr62PlypWYmJjQ09PD8PAw165dY968eRQVFeHp6YlKpRKx8BqNhg0bNgi/\nQUJCAnV1dQwNDfH3v/+d2bNnc/bsWebPn4+VlRUuLi6cPn2aZ599Fr1ez8WLF5k3bx5qtZrNmzeL\nvMaioiLc3NwwNjZm3bp13LlzB29vb+7fvy9OQXt7e6Kjo7lw4QJDQ0Pk5OTQ2NjIpk2b0Gg0Qi3a\n1NREVVUVPj4+hISE8P3337Njxw4RgyaZ3KQRscFgEMCU+Ph42tra8Pb25g9/+AMRERHC+7BhwwZG\nR0fp6enh3XffpaysjISEBNRqNdPT08THx1NQUEBISIigfkmbzW9/+1uxKcLMBiph2/v7+3n99dc5\nfvw4KpWKJ554Qqgb1Wo1w8PDvPTSS6Lkn5iYwMzMjGvXrlFWVsamTZtoa2tDqVQil8vF6LCkpIS+\nvj6USiWenp60trbi5eWFnZ0daWlpKBQK4uLi+PLLLzExMaGtrY0PPviAjIwMFixYwJIlS+jr68PC\nwoK8vDwxkXuU50exKajVag4dOsTy5cuxtLQkNzcXNzc3Vq9eLbh89vb2pKSk0N7ezvPPP09gYCBD\nQ0M4OTlx/fp1UlJSxJu0tLQUgPT0dEZGRnjxxRc5duwYu3bt4ujRo6jVarZs2YKDgwMajQa9Xi9M\nMFJH/MCBA3h4eFBcXMzDhw+JiIgQc28HBwfR/1i9erUYA42Pj+Pu7k5VVRVHjx5ly5YttLe3Mzw8\nTE9PDzqdjpiYGJFunZCQwMjIiBhJdnd3k5SUhEql4u7du9jY2CCTybh69SpmZmYCCV5dXU14eDj2\n9vaCtyhdYRwcHEQ53dvby927d9mwYQPt7e2oVCr6+voICwvj8uXLlJWVERQURG1tLaOjo1RWVpKf\nn89Pf/pTFixYgLOzM6+//rpILo6MjMTDw4Oenh5iY2PJzs7m/v37JCQkCPmxra0tcXFxYjFKfZYd\nO3ag0WgEa6K8vFyU5AUFBYKlaGlpiYWFBY6OjkxPTxMYGEhmZia+vr7Y29uj1+upqKjghRdewNHR\nkSNHjtDf3y8Ch8PDw8nOzmbfvn2o1WrBJ2htbRWL+sGDB8jlcnp7e5k3bx4jIyN88MEHREREMH/+\nfCoqKrCzs0Oj0Qipd2pqKg8fPmTXrl1isUvf3507d9BqtcjlcgoLCyksLMTCwoLJyUkiIyOxsrJC\nrVZTXFxMQkIC8fHxZGZm4u7uTk5ODsuXL0cmk2FtbS2qRnt7e+HxuHz5MpI3yM3NjWeeeYaTJ08y\nOjrK3bt3GR4exs7ODg8PD1E91tfXs3DhQjw8PFAoFCLI6FGeH8WmYGFhQWBgIEZGRpw5c4bly5dz\n9uxZQkNDheHJ398fJycnbty4QUlJCTk5OVRVVaFUKsUd3MrKirS0NLZs2QLMgFB7e3tJS0sTQSep\nqam4urryxhtvsHjxYmbNmsWCBQuIjo5m8eLFvPHGG4yPj1NaWsrPfvYzUTJLPyCYUUouW7ZMNASl\na4RGo2F6epqAgADGx8f57rvvUCqVWFtbi/i6u3fvcuHCBVQqFR999BGbNm3C2Xkmm1fqc/j6+qLR\naMjLy+OXv/wltbW15ObmsmrVKhYtWsT4+Dh2dnbY2dmhUqkoLCzEy8uLDRs2CM7/wMAAoaGh7N+/\nn+npaRE5ZmRkRHZ2Nk899ZS4Tg0PD9PS0iKapY6OjuL7kpBu8fHx3Lt3Tyzk/Px8UalMT0/T0NCA\np6cn1tbWODg40NLSgkqlErFzt27dEhCblJQUHn/8cezt7fnmm2+E1BpgzZo1+Pv709vby3fffUdC\nQgJarVaU7lLytV6vp6qqCnNzc2QyGcbGxnR2dnLhwgV8fHxoaGgQRiGYqe6MjY2xsbEhKiqKtLQ0\n8vLyePPNN/H19cXIyIja2locHBwERcvV1RWNRkN8fDx1dXUsX75ceCwk30xQUJDoJ0lVjrW1Na2t\nrRw+fJjBwUE0Gg0ZGRlCpSuxMSQx2tq1a6moqBA9AmlNVFVVsXDhQgIDAwkMDOT8+fNUVVURExND\ndXU1jY2NJCcnY2ZmhlarJTExkfPnzxMWFoadnR06nY6PP/6YhQsXis34UZ4fRaNRWkhRUVG0tLRw\n7tw5ET02NTVFaGgo4+PjpKWlUVVVRWJiIp988glBQUEiSUkCkKrVaoHKsrS0pLy8nPj4eKKjo7lz\n5w4tLS2UlZWJODMJ0HHixAlMTEwEjffEiRMsXbqUuLg4nnrqKUFaAkhISODu3buMjIxw69YtoqOj\nRZqVSqUSG4SpqSlTU1PY2tri4eFBaWkpw8PDAlkuKQUlW6uPjw9eXl588sknGBsbk5yczPj4OD4+\nPrzwwgsim6C3txc7OzsmJycZHx+nt7eX4OBgYdd+4YUXBObss88+Q6vVotPp2LRpE4ODg1haWiKX\ny0lMTESj0aDVarG0tMTGxobw8HCGh4e5d+8eV69eFYtdArTk5ORgYmJCSUkJWq1WpEdJCU4dHR2M\njo6i0WhoaWnBxcVFMCOGh4eZnp4mKyuL0NBQ8eeio6PFiX716lW0Wi0ODg6sWLGCnJwcYfoZHx/H\n19dXXNGys7MFKFXqawwNDQk6dklJiTAuqVQqampqqKur48iRI6xYsQKVSsULL7zArl27KCsr45ln\nnqG1tVU0Atva2piYmMDCwoKRkRHGx8f561//yqpVq0RPISsri76+PmbPni2IXRIs+E9/+hOzZs0S\nRj57e3tu3rwpdBCJiYk899xzHD58mNHRUUpKSkTVKMXdnzt3DgsLC6qrq0lKSiIqKoqMjAyRbylp\naORyOUFBQezatYvw8HDxmkoxgNLm+CjPj2JTmDVrFt3d3eTm5grzUVxcnAgeHRwcRK1Wo1Qq2bhx\nIzExMSJmLigoSATKajQann76aXJzc4GZN9hPfvITGhoaOH36NPPmzWPfvn3Y2dkxe/ZskVt56tQp\nHBwcuHHjhtgkAJYtW0ZsbCwJCQmsXbtWqOO0Wi3h4eFUV1cLsYm5ubnIraypqREn08KFCzEzMxM/\nnP7+fmQyGU1NTdja2nLjxg327NkDIII9nnnmGTQaDVZWVlhZWdHU1ISzszPT09NkZmZiampKc3Mz\n3377LUNDQ8TGxmJqasrs2bMFQVipVIqos+LiYnJycjhy5AgRERGYmZnR09NDTU0NxsbGgh0g0aim\np6fp7e0Vk4Tbt2/T2dnJ/PnzWbp0KRkZGURERGBvb8/58+dpbW0VxjNpulBRUYFMJqOnp0dsZHK5\nnL6+Pp555hmGhobQ6/U4OztTWlrK0NAQgLBi63Q6GhoacHR0pKKiguDgYNFEkwhIa9asEdez7Oxs\nUe5fv36dGzduiHg4gOTkZOGdUKvV1NbW4uPjw/T0NHPnzmX9+vWYmppiYmKCRqNheHiYkJAQAQ9O\nTEyktraWffv2kZ2dzYYNG4CZKcHg4CCVlZVcunSJ9vZ2vv32W0xNTTEzM+PBgwf09vby8OFDtm3b\nxpw5c7C1tWXBggX4+/tz5coVQkNDaWpqIigoSMiVW1tbSU1NJSIiAq1Wy/379yktLeXYsWOsXr2a\n0NBQRkZGCAoK4sknn2RkZAStVitcnBqNRjgxi4uLxab7KM+P4vogk8no6Ohg8eLFXL16lYKCAnx8\nfHj48KEoeZctW4aHhwe2tracO3eO27dvMzo6yq5du6itrRWe9CtXrrBgwQI+/vhjkpKSKCoqwsrK\nisTERDIzM2lsbCQyMpL4+HjCw8M5c+YMH3zwATCz669fv14EkEpOPUkmvX79enJycsjKyhL3ZGnu\n7+TkhIuLC3V1dURHR1NdXc2sWbMYHh7mypUrlJWVMXfuXHQ6nfD0Ozg4CO4/zJwODQ0Nwinq7e3N\n7du3BenH2NgYHx8fsrOzgf+3821sbMzg4CB37twRi0zqXQwNDaFUKlm1ahVtbW3Y2dnh5uaGo6Oj\nGJPFxMRQUFDAxMSEcIjq9XqioqL4+uuvBeTVwsICmUxGSkqKqLbMzc1JTU2lvLycy5cv09fXh0ql\nIjAwEBMTE6qqqnjyySd59dVXaW9vFyPO9957DysrK2xsbNBqtSIhqq2tDXNzc/z8/JiammJiYoJl\ny5bh6uqKhYWFcMZaWVlx4sQJBgYG8Pb2xsXFBRcXF3x8fAgODubGjRskJSXx7bffAoish2XLljEx\nMUFVVRU9PT1iQ5IOBUvGURkwAAAgAElEQVRLS1paWti3bx/9/f2UlZWJRTg6OopSqcTPz09MNdLS\n0khJSWF6elq8DuHh4WKzkcvleHl5cfDgQUpLS7l9+7bIPZVGt3PnzhVEaWkyFBcXR1lZGWq1mra2\nNmQyGefOncPS0pLs7GySk5Nxd3fHxMSEmpoahoeHuXr1KmFhYdjY2JCfn4+VlRXt7e3MmjVLUMwe\n5flRbAojIyOsX7+ezMxMHnvsMaKiokhPT6e/vx9XV1eee+459Ho9OTk5mJubs3r1atRqNePj4+h0\nOuzs7MjOziYlJYWLFy+KSK+MjAyGhoZYu3Ytubm5GBkZodfruXr1KjY2NnR2dlJaWsqSJUu4d+8e\nKpUKV1dXbt26haurKy0tLWzcuFEwHiSp6OrVqwV2q6OjA51Oh5eXFxkZGdTV1SGXy7l69SoxMTHY\n2NiIk/Hu3bvk5+eTkJAgxqr5+fnCDOTu7k5oaCjm5uYMDw+Tnp6OhYUFTU1NxMTE0NraiqurK8uX\nL+fSpUuo1Wo8PDy4ffs2Li4uDA8PC+6BxEOQdAxSonZ5eTl+fn7k5eUJ0EdoaKjQKwwPDyOXy/Hw\n8ECj0TAwMICVlRUtLS3ExcWRmZlJVFQUWVlZWFlZ4enpKcaoqampfPTRRxgMBlauXMnVq1cxMTER\n7MLa2lr27t1LeXk5bm5uFBcXs2rVKnx9fbl79y4wc6KXlJSIJqgkZDI3N2f79u3U19czMDDA+fPn\nGRoaIj4+HmtraywtLQWSTAquqaysZMmSJXzxxRfU1NRgY2NDeXk5WVlZAo/v4ODAwoUL+fzzz/H0\n9BTNyO7ubhwdHdm0aROmpqZYWFjQ1dXFb3/7W15++WXefPNNAH71q1/R3d1NeXk5d+/eZcuWLfj5\n+eHr60t6ejoLFy5Er9fT09NDe3s769evF32zvLw8Nm7cKERXBQUFoqfg7u5OXl6eIEunp6fj5+eH\nlZWVoFSVlJSIkaM0mtdoNDQ2NgpwUExMDGVlZWLTfZTnUaLoPWUyWaZMJquSyWSVMpnslX9+/Dcy\nmazj/5NELX3OGzKZrF4mkz2UyWTL/9svwsiIsbExysvL6evr48qVK3R3d9PT04OxsTEFBQWkp6cz\na9YsXFxcOHfuHMnJyYSGhuLk5MT09DTDw8MUFxcTHx8vGoJSqMfAwADt7e34+/tja2vLE088wb17\n96iqqmLr1q3cvn0btVqNhYUF77//Pg4ODuzYsQN3d3dOnjxJQ0ODGCfCDBTG2dmZzs5O5HI5OTk5\nwv8uNa0k/v7NmzdpbGzEz8+PsLAwQYKSmnVjY2NiRu/s7Mzg4KCgI3V1ddHU1IRWq+XWrVvU19fT\n0tJCVlYWY2NjtLW1iXzBjo4OARhdunQpvr6+zJ49m8DAQOFjUKlUREZGCgKQlNBUVFQkQK3ShtXZ\n2cn169cJCQkRFm61Ws327duJjIwkPDyc5ORkwsPDKS8vF9mIMDP+Gx8fZ2JiAk9PTxQKBXq9Hi8v\nL2QyGZ2dnchkMlEhqNVqcUKqVCocHByIioqira2NpKQkIW6zsbHB09OTmpoa+vr6RBCNk5MThYWF\nQhtha2tLREQEVVVVIi9E6lF0dXURGxtLTEwMe/bswcnJSWDft27dCsCf//xn6urqOHHiBEePHhU5\nCjdv3iQ4OJja2lq8vLwA+Oyzz8jJyRGW9oqKCuFS3bRpE5OTk6KpmZOTI4hWUnze0NAQO3bswNTU\nFG9vbzH2HhkZYceOHWLq09rairW1Na+99poIXe7r6yMnJwelUom7uztLliyhv79fwGqDgoKorKxE\np9OJPtujPI/SU9ACPzcYDCFAPPCSTCYL+efvffSvSdQA//y9bUAo8DjwqUwmM/6v/gG9Xs+bb74p\n6LebN2/GzMxMRKEpFApiY2MF1LOyspKvvvpK0G5dXFzQarXo9Xru3LkjHGweHh68+OKLqFQqTExM\nOHToEEZGRuTl5fGHP/yBAwcOEBoaSnt7OwsXLiQgIIA9e/awf/9+nJ2dUavVREdH4+3tzZkzZwQh\nSalUisaiBFJNSkrC0dFRcP0WLFiAkZGRAHCamZkRFRXFggULyMvL4y9/+QuLFy9m7ty5QilpMBhw\ndHQUE4Tw8HAmJyfJyMjg1KlTPHjwgKCgIPbs2YOfnx+1tbXcv39fhI7o9XoKCgpobGykqKgIIyMj\nuru7mZ6eZvbs2QQEBNDS0sLY2JgQGUky8Z07dyKXy7GxseHBgwcoFArmzJmDk5OTsGA7OztTW1vL\n4OAgs2bN4tq1a5SXlxMSEsKVK1dQqVQ8++yzODs74+TkxO7duwkPD+fu3buib9Hd3U1ycjIHDhzg\nyJEjgqkpRf2p1Wq0Wi3l5eVMTU0Jg5p02kpoeE9PTyYmJvjDH/7A008/jbe3t4CVSiE8kpEJZghJ\nhYWFBAYGUldXx9KlSyksLKSlpYXPPvuMZcuW8f777zM2NsbBgwcxMTFBp9Px85//HGdnZ2QyGcuW\nLSMqKkoE4gA88cQTqNVqnJ2dOXfunOitSCrOM2fO4ODgQHt7O1u3bhXSdRMTE1auXMm9e/c4ceIE\nbm5u9PT0iF7Fl19+yenTp/Hz8+OnP/0pjz32GBYWFmRkZHDixAlu3brF+Pg4//jHPwgODhafn5OT\ng62tLcHBwbi6ulJbW4uvr+//SLz0KFH0XQaD4f4//38YqAbc/4tPWQecNBgMkwaDoQmoB2L/q39j\nbGyMoKAgofoDeOWVV5g9ezYNDQ2o1Wqam5vJzc0V4SYKhUKUTd9//z1hYWHcu3dPlPSAmAJIWQNr\n1qzh66+/JigoiOzsbK5evUplZSVqtZrTp0/j5OTE5OQkAwMDHDt2DCcnJ86ePYu7uzs7duwQpV1X\nV5cgE8fExPCb3/wGhUKBQqGgp6eH7OxsxsbGsLGxoa+vj46ODkpKSkSEmJ2dHaGhodTU1IhRKsDo\n6ChyuRw3Nzcee+wxTExMePzxx8nJyeHatWvcuXOH/v5+xsbGhOXZzs6OvLw8KisrOX78OPPnz2fu\n3LnCAi2xDY2NjXFwcMDa2hojIyPUajV9fX2CF3n9+nW6urpobm4mNjaWkZERMdY6duwYOp2Ob775\nRmyEjY2N4soyODhIfX29wIRJc/m2tjY6OjoEnMbHx0cQgCT7uZTtKI3MWltbxcw9ODhYVHoZGRlk\nZGSITv+OHTtYvHgx0dHRbNiwAXNzc9HwdHZ2FuG9ly5dAhDaAyksp7KykpaWFhwcHHByciI9PZ2d\nO3diYmLCxMSE8M64uLiIqL+ioiJ8fX2Foxbgo48+AmY8B++88w4ymQwLCwsqKiqwtrZm1qxZAuvn\n5OTEzp072bNnD3Z2dpw6dQq5XE5PTw+ffvopcXFxwokr9avu3bsnQmI2bNiAXC4nJiaGhoYGoStx\ndnYWbEkPDw/c3Nyws7Pj0qVLODg4UFRU9G+Er//u+R9NH2Qy2WwgCsj/54delslk5TKZ7EuZTGb3\nz4+5A23/8mnt/NebCKampjz33HMsXLiQl19+mZGREX7/+9/T29vLtm3buHTpElevXqWmpoZLly6x\ncuVKtmzZgl6v5/z58yiVSrq6uoS5RlLHmZiYiCSmFStWcOHCBQ4cOEBmZiYVFRVkZ2dz4sQJEhIS\nhDS3ra2NS5cu0dLSgqenJ88//zxVVVVERESwevVq6XXAxsYGg8FAX18fb775JnV1dVhaWorglXnz\n5uHj4yNENL29vYJEHRcXh6Ojo4DESqdZQ0MD+fn5Qh+g0+no7u7m9ddfZ9GiRSQmJnL37l2OHz/O\nu+++i5eXF3K5nBs3bjAxMcGTTz5JSEgIt2/fpqKiQuQvFBQUiL9PypowNzdncHCQuLg4enp6MDMz\nY+3atSJvUqFQMDY2hrm5Od7e3pSUlAgMnUKhIDU1ldHRUWbNmoWxsTGOjo6Mjo5y/Phx6urqsLGx\nYXR0lLKyMhEYq1QqUSgUqFQqPvnkEyGzXrRokdCWJCQk4OHhQWVlJb29vVRXVxMUFCSoU42NjYyM\njAigSmBgIMHBwahUKoqLiykpKaG/v5+MjAxsbW3Ztm0bMLPZNDc3C07nSy+9xJw5c5DL5eJnL9GR\nJR+GlZUVHh4egqz91FNPUV9fj7m5uVi8a9euFSnfX331FU5OThw6dIiMjAzS0tJwd3dn0aJFhIeH\nExQUhFwuF2NKGxsbamtrmT9/Pr/+9a/JyckR6Wbvv/++aG7v3LkTY2NjrKysCA0N5ciRI0L9WlZW\nJpSQmzdvxtHREV9fX2HVHhkZEVO6R30eudEok8msgLPAqwaDYUgmkx0GfgsY/vnfvwD7/gd/37PA\nszBTjmdnZ/Pw4UNhKrK3t2f27NkolUpefPFFpqam+P3vf8/58+fFeGnOnDmMjY3x9NNPi6j29PR0\n0c03NzcnNzcXnU73b8nQb731Fvb29vT394votoGBAW7evIlSqSQ0NBS1Wk1ISAgtLS2Eh4djZ2dH\nUVERgECzNTQ04OrqKvL/FixYwKFDh+jt7aW5uZnOzk6RhhQbGysi2BYuXMjWrVu5ePEivr6+4r5X\nXFzMvn378PLy4tatWxgbG+Pp6UlpaakIbpV6G9K4VBod2tvbMzIywtGjR1m3bh11dXWMjY0xPj4u\nlHDvvvsugYGBTExMkJWVhbm5OXFxceTn57NmzRrhFpU61bdu3SIqKgozMzOBKpszZw7Nzc0YGxuT\nkJAgGmgSWDUpKUmMFaX8gampKQYHB/Hz82Pt2rX88pe/RKFQMD09zY0bN1i5ciVpaWkA1NfXiyug\nlZUVTz31FDk5OchkMtra2ujt7RVy7LKyMmQyGUVFRQKx5+zszKeffsqSJUuEeQlmJjtbtmzh9ddf\n59ChQ8LjUVdXx4MHD/j+++95+eWXCQ8PZ86cORw/fhwPDw8hZLOzs+P8+fPMmzeP2tpaQkJmbtC1\ntbU4OTmRmJiIi4sLmZmZGAwGtFotxsbGJCUlUVxcjLGxMUePHsXMzIyMjAwaGhoE0v+TTz7h1Vdf\nFdUOzGSdODg4CIjtY489JgRcbW1tAkQj9dvc3NxYtWoVQ0NDDA0NcefOHdLT03nhhRfYuHEj//Ef\n//GoS/PRNgWZTGbKzIbwrcFg+AHAYDCo/uX3Pwcu/fOXHYDnv3y6xz8/9m+PwWA4ChwF8PPzM1ha\nWhIdHY1arWbv3r0UFhYK/uHIyIiAobq6ulJQUMD8+fOpq6sTjUJnZ2fc3d3ZuHEj/f39AFy8eJGk\npCSqq6vx8/PD3d0duVxOWloabW1tvPLKK5SXl7N48WL+H/beMzrK69zfvkZ9pFEZ1VHvXaigigoS\nyAhRTLWNC3Evie3g4yQu8YkTnMQ+ceLEjkuMO8b0aopAgARIFHUJCfUyqqOuGdWZ0aj9P5DZ62St\nd52/37XeD856z/MJWKDG8+xn7/v+3dfV0tJCaGgoQUFB1NbWMjY2hqurK/Pz80xNTdHS0iLUWxKJ\nhISEBIEX+/TTT9FqtWzatElkKq5duybabxMTEyQmJgrS9MzMDDdv3kSv13Pjxg02b94M3G0xGgt0\nFhYWIuVpbW2NVqvld7/7HcHBwQD4+vqKmy8gIIDS0lL6+/vZtGkTR44cESk4Pz8/ysvLqaioEGzG\n7777juzsbFG7CAoKQqPRiDeMmZmZyBn4+fmhUqlITU0lNzeXgoICsbNYXFwkNzeXW7duMT8/j1wu\np6mpSVixioqKUCgUJCQkoFKpRPCpt7cXV1dXTExMkEgkjI2NCaFMdXU1aWlp6HQ6xsfHqa2tJTk5\nmf379+Pn5yeyA+Pj44SGhqJSqQRd+8yZM5w+fZp77rlHuA96e+9uWjMyMqivr2fr1q3Y2dkJWe+5\nc+fw9vZm//79QvZbV1fHnTt3uHXrFtbW1jzzzDPk5+cLqK27u7tA6BlnP1pbW0lKShLzNMZg2ezs\nLKOjoywuLorQlxFRV1ZWxtatW9myZQvd3d0cOnSIhx56CEBMu6rVatavXy/I3/n5+ZibmzM0NERh\nYSHXr1/n6aefZuPGjeTn57Np0yZsbW1Rq9XExcWh0+m4efPm/7c7Bcndw95XQNPS0tLf/tufuy8t\nLQ3887dbgfp//voMcFAikfwN8ACCgfL/6XMYyciBgYHU1taSn5+PwWDAx8eHCxcuCGZhUlISg4OD\n+Pj4iB5wX18fvr6+1NbWEhQUxPHjx0WXQKVSiZVfq9Vy69Ytli1bJt4AeXl5BAQE8NRTT7GwsEBv\nb68IBvn5+dHS0oJGo8HMzEyMVsPdt9F3331HQkICc3NzpKamYmlpSUdHB66urkKp9uWXXyKXy1m/\nfr0ozt2+fRt/f3+uXr3K1q1bGRgYYHFxEbh7LjWe0X18fER9o7S0lJqaGoF8s7GxYWBgQPxdExMT\nOjs7RVQ3ISGB9PR0oUnLzMwkLy+PxMREtFotq1atEqPQQ0NDgk/R3NyMo6MjBoNB5B2M4hOjtt3F\nxYV77rlHzGacP38ehULBiRMnaGtrE4LUbdu20dbWhpeXF11dXczNzdHS0iIcmsnJyXz22Wc4Ojoy\nPj6Ov78/AI8//jiOjo5UV1cTGhrKQw89xLVr1wgPD0en02FpaYlWq8XX1xcnJyd6e3vJyMjA0dGR\niooKdDqd2OIbHzbjVVtby8zMDA888AC2trbU1NQI2c3Y2BihoaGUlJQQGxtLeno6Go2G1NRU0tPT\n6enpYf/+/cLiNTo6Ku7de++9lxs3bhAQEEB0dDSmpqbs2rWLxsZG5ufnWbduHcXFxYyOjuLg4MD4\n+DhlZWVkZWVhZ2dHYmIix48f58UXXxQFV+P9plQq+fbbb3nyySfp6+tDKpUyODjIvffeC9zFwSkU\nCkETO3LkCM7OziQlJdHX1ycsYMZY9g+5fshOIQ34CXBHIpHc/uefvQE8JJFIYrl7fOgCnvvnD6lB\nIpEcBRq527l4YWlpaeF/+gTGYktBQYE4vwHs2LGDnTt30tTURGRkJC0tLTQ2NlJSUoKtrS2vvvoq\nu3btwtTUlLm5OQoLC5FKpeTm5nLhwgU0Gg3Dw8OMj48zPDxMdnY2JiYmFBUVicEbYxptz549qNVq\nzpw5g16vJzIykp6eHuLj48WWsK2tDbi7Zdy4cSPz8/NivLq+vh4/Pz8Rw11cXGR6epquri4BIrl+\n/To5OTk0NDQgk8koKioiLi6Of/zjHwCCH/jll1/i4OAg6iaZmZmEhYWxb98+jh8/jlqtZmxsTLgr\npqamWLt2LdXV1WIQSSaTMTMzw+TkpHBJNDQ04OLigqenJ7GxsUxPTws4bVRUFBcvXuT06dO8+eab\n1NXVER0dTXV1tZjfkEql+Pr6Mjk5ydWrV1m1ahUymQydTsfq1auxt7dncXGRBx54gKmpKV599VVs\nbW0pKCigrq6OLVu2iJ3Ue++9J7IiHh4e5Obm8o9//IPm5mZKS0t58MEHcXJyEoEnI/rcxcWFs2fP\nCmBMRkYGpqamtLe34+bmhpmZmZAFGWPmcLeNbDRiPf744/z0pz8lMjJSOCWHhobIyMgQbe+Ojg4y\nMzP57LPPmJycJCcnB5lMRkxMjDhGfPPNN8jlcrq6uvjNb35DSUkJvb29rFy5knPnzmFmZkZ/f7/A\n53/66afcuXOHoaEhMfw3ODjI4cOHCQwMZH5+nqqqKuBuna28vJzs7GwGBgbw9vbGwsICmUzGH/7w\nB6ytrWlsbKS3t5fZ2VnCwsIEhMXIc/T29qasrIyRkRGef/75H/Co373+r4vC0tLSDeD/aZri/P/w\nb94G3v6hX8T09DQtLS14eXmxYcMGKioqOHz4MBcuXCA8PBwzMzPUajVnz57F3d2d0NBQqqurxSy8\nj4+PyN4bg09wd8tYWVkpUmVdXV0sLi5iMBiQy+V0d3cTGxuLRqMRGrr09HQ6OzsZHh7m6NGjzM/P\nExkZiZeXl4jibtu2jQsXLiCRSIiJieGTTz7B3d2dGzduYGpqKirTiYmJtLS0sHnzZlFXkEgkjI6O\nYmNjQ2RkpDA55+Xl4ejoyMaNG7GxsaGzsxOpVEp7ezvj4+N4enoyOTmJr68vGo2GvLw8nnnmGbRa\nLVFRURQXF+Pm5iYIVJaWlsKRERgYKGoZN2/eFB5M41huUVERERERIlDk4+Mj4slLS0ukpaXR1dWF\nXC6nvb2d4OBg7rvvPvR6vZj+HBoa4oEHHiAiIgIbGxuOHDlCf38/KpWKqakpQkJCRPJyeHgYHx8f\nVCoVmzdvFsYqgISEBLFojo6OCuuVm5sbeXl5NDY28sADDwjOpKWlpegyGB/s5uZmzMzMBNX5/fff\nF8LdgYEBNmzYwL59+3jjjTeYnp6mvLwcS0tLvvrqK5YvX85HH33EvffeS2trq3Am2NjYUFpaKgSy\nRrP50tIStra2nDt3jubmZpYvX87g4CBOTk5ifNkop52ZmRFmqXvuuYeqqiokEgkdHR0sW7YMtVr9\nLzubp556igMHDhAbGyuSkHNzcwKrPzU1hYuLC0NDQ7i6ulJXV4dSqWR4eJgtW7aQn59PaGgo9957\nrwD5/JDrR5FotLa2FqhzY8rOuFUvKysjIiKC/fv3ExAQwPvvv8/OnTuJjY0lNDQUmUzGK6+8wssv\nv4yZmRlLS0vCIRAUFERfXx9btmzhxo0bJCUlCQfChg0bSE5O5ujRo/j6+tLX18fMzAyDg4Niy5Wd\nnU1PT48o2hm3bMYdiEKhYHp6Wkzt2draEhERQV9fHzqdDltbWwFsDQkJwc3NjSNHjgiuwNDQECtW\nrCAoKAi4uwNZWFgQ50ijkr6+vp6pqSkWFhbYvn07R48eZc2aNZiYmKBQKCgoKMDBwQFHR0e2b99O\nU1MTXV1d9Pb2sn79enp6evDy8uLo0aNs374dZ2dnfH19GRwcxNTUlI0bN2JtbY2joyMKhUKYqt3c\n3Ojq6hIuRL1ej6+vL62treTl5ZGQkICDgwOzs7PY2tpy9epVbty4QUREBNXV1URHRxMcHExVVZWY\nPDRKUt3d3UlLS+Ps2bP4+fkJxVp7ezsrVqygtbUVtVr9L0EuhUKBn58fH374ISEhITz22GM0NzfT\n09NDSkoKJSUlYjDOKBMyBsNu3brFr371K/bu3SsGhPbu3cvIyAhmZmYsLi6ycuVKGhsbRRdCIpHg\n6+vLmjVrqK2tZXR0lKeeeorCwkJB94qPj+e9997jlVdeQa/Xi226VqsVyDdjByAxMZGGhgZsbW35\n5ptv2Lp1K319faxcuZLExEQKCwtFeKmkpITz58+Tk5MjjklGBGFVVRXFxcXodDp8fX1Fm7mhoYGh\noSExS6TVapmbm+PGjRsCEPRDrh+FS/Ivf/nL7ubmZu677z7q6urIysrCzMxMvOWuX79OXFwcLS0t\nZGdnI5VKyczM5OzZswQFBXH79m0CAwMpLy+npaWFDRs28N133+Hj4yO4jZ6envz9738nPT0dZ2dn\nUQvw9fXF19cXrVYrkoutra3ExsayZ88ewS0wVpZLS0tJT09n27ZtBAYGcvXqVby8vOju7iY+Pp75\n+XkmJia4ceMGfn5+bN26ldbWVuRyOW1tbSIn39nZSVBQEAcPHkStVlNdXc2LL75IaWkpK1asQKlU\nimi2XC4XlfeTJ08SEBCAv7+/iFRfu3ZNvDm0Wi0eHh60t7eTlZUlMh4zMzPExMQwPDwsXJDG7kRF\nRYXor7/zzjskJSVRUFCATCajp6eH7u5udDodOTk5DAwMiNi0RqMhMzNTnJ0tLS1JTEzkypUrzM/P\nC2WfVColKCiI5ORk9Hq9GNeuq6sjKCiIsbExHBwcuHDhAhs3biQkJARXV1e+//57zMzMWLlyJSYm\nJri5ueHv78+yZcvQaDQ0Njbi5+cnRr91Oh25ubkMDw9jYmIiiEPGh83d3Z2lpSV0Oh3W1tZERkYS\nGBjI4OAgVlZWHDlyBDc3NyIiItDpdDz33HPMzc1RWloqCtFarZYNGzZgamrK3r17WbZsGbGxsVy6\ndAmdTkdHRwfPPvss7733HuHh4fT29ord382bNwkODqajo4MVK1ZgY2PDxMQEKpVKTKKGhISwb98+\nPv/8c/R6PRMTE2JW5s6dO0xPTzM1NYWVlRX19fVYWVnR19fH/Pw86enpAjVgamqKjY2NsFRfuXKF\n6urqH+SS/F+a8/9e/3v9/+f696E5BwUF8c477wi1mZE30NHRQVxcHAqFgitXrtDf388jjzwiosVG\narGnpycXL15kcXGR8fFxXFxceO6550RE1cTEBHNzc3Q6HQ899BBff/019vb29Pb2smbNGuFX1Gg0\nODk5UVNTw+TkJAqFgtjYWKqrq/Hx8aG8vJzf/va3vPHGG9jZ2aHVarl06RJRUVF4eHgQHh4uMhbD\nw8PodDrxOS5cuIBCoWBubg6dTseaNWvQaDSUlZWRmJjIrl27+PjjjzExMWF4eBgLCwsGBga47777\nhAMgMDAQLy8vzM3NGR8fx8TEBEdHR44dO4a5uTlBQUHY29szOTnJ0NAQmzZtoqurC71ej1wu5/Dh\nw6xcuRKZTCZm9G1tbZFKpdjb2wuew8jICHl5eSJdOT8/L9qQAwMD3L59m5/85CcijDQ/P8/09DR+\nfn6MjY1hamoqFPG9vb3Y2dnR0dGBmZkZFhYWIrhVW1uLnZ0dXV1dyGQy3n77bb744gsBmSkuLiYu\nLk50XY4cOcL09LSYQo2PjycvL49nn32W3t5eMQlpRMm1t7fT0dHBH//4Rz788EOCg4M5e/asKOrZ\n2NiINuDQ0BCDg4OEhIQIAtbJkyfx9fUlOjqa27dvi0CWMbT1xhtv8P777+Po6IilpSXOzs40NzfT\n0NBAXl4ejz76KAcOHOBnP/sZWq2WkJAQfv3rX/PWW2/x5ptv8txzz4khpvDwcKysrFAoFNxzzz08\n88wzXLlyhdjYWDZu3Eh5eTnOzs60t7cTGhoqjkVxcXGoVCpGRkaYmJjA1dWV+vp63nnnHU6ePClk\ntk8//bTQE/7frjqmuRUAACAASURBVB8FT0Gv14vM+hdffEFgYCAqlUo8VI2NjVhaWvL0009TWFhI\nYGCgoOFYW1tz6dIlXF1diY+PF2OxcHe4RqFQEBQUxLp16wgNDeX06dMEBwejVqvJysqiq6uLqakp\nMXHZ29vL9u3befHFF2lsbBQGp9OnTwvIikql4urVq6hUKh5++GFqamqws7NDpVJRXV0t6iDT09No\nNBqeeOIJVCoVXV1drF27Fnt7exoaGgQ8xQhZMRKMYmNjiYqKYnFxkf379+Po6Iinpyeff/45NjY2\nVFdXC3hqU1OTQNV5eXlhZWVFe3s77u7umJqaotVqKSkpYWlpiV//+tei7Wqs5icnJwsORF9fH2q1\nWkyWGlmISUlJhIeHs7i4SHNzM/Hx8bi4uGBra8vExARVVVViy19XVyds2LOzswwNDSGXy4UZaWJi\nghMnTlBXV4darRY2b6PB6PTp04Kr0draSl1dHb/5zW+QSCSsXbtWmLGNhedHHnkER0dH1q9fL4za\nlZWVfPDBB0ilUvEgrFmzRkBWkpKSuHbtGtXV1TzzzDMsLi6ydetWMjMz6evrE3qBhx56iMzMTFxd\nXTE1NSU1NVVM9BqZhyEhIUxMTAiVoIODA35+fmzfvh1XV1fuv/9+IiMjiYqKQq1W89RTT9Hd3c3f\n/vY3VqxYgb+/vyAuDQ8Po1QqgbvkpezsbKytrZmeniYnJwcLCwsUCgUVFRVs2LCB+fl5wRXx8/Mj\nKiqK2dlZXFxcOHjwIN3d3VhbW7Ns2TLefPPNH/w8/ihqCu+9997uuLg4nJyc0Gq1BAUFcefOHfEW\n9/b2ZmZmhsuXL+Pj4yOKehcuXMDCwkI4/Obn5wkLC8PLy4sDBw6Qnp4uIqxjY2O0trYyMjIi5is0\nGg1qtVr0+gHkcrng6IeFheHp6YmNjQ06nY6WlhaKi4tFTHlkZAQXFxciIiJoaGigqKiIXbt2sXz5\ncgYGBmhra6OoqIgnn3xS8BTt7e0FzfnKlSvs2bOHJ554gosXL7J+/Xrc3Ny4c+cOs7OzJCcnizis\nke1nampKeHi4QL/JZDI2bNhAfX09dnZ2NDc3Mz09jZOTE5GRkRQVFeHo6Mjt27fFw1dRUYGFhQWr\nVq3C29sbb29v2traRPfG0tISU1NTYmJiGBwcJD8/n+joaM6fP8/rr7+Ou7s7ZWVlBAcH09nZiZWV\nFU5OTv/CtZibm6OtrQ0nJydGR0dRKBRs374dR0dH7OzsCAsLw9TUlN7eXmFx/vLLL9m8eTM5OTko\nlUpiY2MpKytDrVYzMzNDe3s7ra2t2NjYiK9tbGxMtJANBgOTk5NMT0/zyCOP4O/vz/DwMAcOHCAt\nLY2UlBSsra1F0MjW1hZ7e3vGxsaE29HOzo4XXnhB3GPHjx+nv79fGK2NU4d1dXWUlpbi7++PVCoV\nynk3Nzfc3d3RaDSEh4czMjJCbm4ulZWVIo5txNUbXRLW1tb4+Pjg7OxMS0uLqEdJJBJaW1vZsGED\nV65cEQAYiUSCRCIhOjqawcFBzM3NKSsrQ6lU4urqKuY3AgICOHz4sMAOXr9+/QfVFH4UOwU7Ozuu\nXr2KmZkZoaGhSKVS7OzsqKqqYu3atXh6erJ161YheE1NTWVubo64uDg+/vhjWlpacHR0pLW1lcuX\nL4tE4wMPPCCQbsYq89atW/nuu+/Q6XRCiOLm5sbFixfp7+8XuxSpVCpYfzY2Njg4OLBu3TrgbmXZ\n6Pjr6+sjOjqagYEBXnrpJaanpykoKGBiYoLMzEzefPNNRkZGaGtrw8rKitraWnJycgS99/HHHxeS\njv7+fvGge3t7U11dzZo1axgbGxMUndHRUc6dOydkMsnJyeh0OrZv305AQAA7d+4U+DgjT0GhUHD/\n/ffT29tLS0sLq1atYmFhQRwzGhsbxWh6Z2cnkZGRODg48NFHH/Htt99SWFhIa2srb7/9NqdOnWJ0\ndJTIyEhmZ2eJjIxEJpMxODiIpaUlL7zwAhERESQlJbFy5UpcXV3JzMzE3t6e6upqysvLGR8fx8bG\nBhcXF6Kjo+nv76ewsBC4G00/e/YsDg4O3Lp1i9jYWM6ePYtarSYnJ4d169bR2tpKUVERrq6uYl7A\neEybmpoiKyuLc+fOCfQZ3OUTHDhwgKGhIZRKJQsLC4SHh3P79m1BSTKq2ezs7ET7dMeOHeKh1ul0\nPPXUU/T19REYGAjcbXtbW1sTFhZGcnIycrmc27dv4+Pjw/z8PI8++iiVlZUkJyczOjqKhYUFVVVV\nIiNz4cIFkpKSKC0tJSAggIGBu3nAnTt34ufnR3x8vKBM6/V6WltbaW1tpbu7m9OnT7O4uIiHhwcp\nKSnY2toil8uJj4+nvb2diooK4uPjaWxsFAnMH3L9KHYKH3300e7HH38cFxcXvv32W1pbW9m5cyfd\n3d04ODjQ0NAgjhUuLi68++67WFpaiiz93r17+fDDD3FwcCAwMBArKyuOHTuGiYkJKSkpYtIyMTGR\nzs5Oli9fLiQfRipvSEgIHR0d9PT0kJ2djb29PePj4+I/zzhz/91335GTk8Pc3Bw5OTl0dXWJ/HtN\nTQ3T09OkpKQwMjJCQkICarUaT09PXF1dWb9+PZs2beLAgQOUlpaybNkyuru7cXV1paysjL/+9a80\nNDSwbds2iouLGRoaYnp6WpiyjVVrY+IxNDSU+vp6rK2tGRoaYnx8HK1WS2VlpWi7Gbfhtra2tLe3\n09XVRUlJCcuXLxdUaWMuwMrKivz8fGxtbbl48SJyuZz09HRcXV0ZGxvDw8OD5cuXC6+kkSxllJBM\nTk5SUVEhvBNG+Yxer+fkyZNUVVWRnJyMv78/BoMBJycnLl++TE5ODtPT0+Tl5bFjxw6kUqkwUoWG\nhrJz504KCwuRy+Xo9XpeffVVfHx82LhxI/fcc4946xuhOPn5+URGRmJra0t2djYffPABzs7OJCcn\niyEzLy8vioqKxGyGUqkkMjISjUaDh4cHJ0+eRCKR4OzsLBaT8PBw9u/fT3BwMGVlZdTX14sdVWdn\nJwaDAYVCwZ49e7h8+TIrV64kJCSEqakpent7aW5upq6ujnXr1tHf309oaKhAwhnBOVlZWRw/fhx/\nf396enp46KGHuH79Ou3t7chkMszMzEhPT0en09HW1kZJSQkJCQmEhYUxMDCAlZUV77zzDllZWbS2\ntgrexD+p0/8+OwUTExOioqKQSCQ899xzLCws8Pe//53Gxkbefvtt6urqMDMzw87OTliLo6Oj8fLy\nIi8vj4aGBtatW4ebmxvm5uYi1ffCCy/g6enJ+vXrCQ0NJTw8nLGxMZGFaGlpEZn7oaEhoZSvqKgQ\nUNbz58/T2NhIcHCw0I97e3szODiITqcTLS6JRMLzzz/Pa6+9Rk5ODj/5yU/w8vISKbvKykpOnTpF\nd3e3cCHMzs5iZWUlRqc7OjqQSqVcu3YNS0tLvLy88PPzo729HQsLC+Li4qitreXgwYOoVCouXLgg\ninZmZmZUV1eLYtvMzAz9/f1UVFQQExPD5OQk69evx9TUVJinzp49S2FhISMjI5w4cYKysjKSk5MZ\nGhqioKAAuVyOi4uLaAfOzMzQ1dWFWq1mcHCQwcFB3NzceP/992ltbcXU1JS4uDhMTU3/pSBnbm6O\nm5sb27dvx9LSktTUVORyOV999RWjo6OMjo4KwWxpaSnJycl0dXXxy1/+ko6ODm7cuEFaWpp4gD/6\n6COsra0ZHx+no6MDiURCVVUV9vb2nDp1ioqKCiHFMQ6b2draCieETCYjKiqKdevWCQjML37xC2xs\nbHBzc+PGjRsMDw+LtrSdnR3+/v6YmJgQHR2NXC4nIeFuET8gIICjR48KUe7evXv51a9+JVgdY2Nj\nyOVyRkdHkclkJCUl4ezsLHBvt2/fFhCX3NxcMcBlYWHBf/7nf9LS0kJiYiJvvfUWIyMjIlhmnAOa\nmJjg2rVr/Nd//Rc+Pj4EBwfz61//mpiYGB5++GH8/PyQyWT/rxiNP4pFwbhSG2/CiIgIdu3aRVBQ\nEBs3biQhIYHVq1ezceNGIiIiyMrKwsLCgn379tHZ2YlcLkehUAjIp5GIW19fL4aQ+vr6GBwcJD4+\nHmdnZ0FTNvbAfXx80Ol0JCYmEhISgsFgYHp6mtbWVjZt2kRxcbHI55uamqJUKjl9+jQdHR3Mz89j\na2sr5C06nY75+XmhhG9tbcVgMCCTyfjFL37BrVu3BMnI6DiAuxBQY72juroaiURCSUkJZmZmHD9+\nHHt7ey5cuEBERASBgYHY2tqi1Wo5fPgw5ubmJCcnC2KyUWbj7u5ObW0tDQ0NfPnllwQFBVFVVcXw\n8DCBgYE4OjoSEREhimcGg0GMK8/NzXH48GHs7e1JTEwUtQKVSiVqLZcvX2bTpk3A3VF1d3d3YmNj\n8ff3Z2BggLKyMiYmJtiwYQNubm5kZGTQ1tbGlStXRGBJqVQKzsLrr7/OlStXgLuzIMa5kfr6ejEN\nODk5ycjICOfOncPa2ppz587h6+uLUqmkp6dHdIzUajV/+9vdcR1PT09iYmIYHR1FLpfT2NjI0aNH\nhXJw3759dHV1UV9fT29vL8PDw5ibm7N27VpWrVrFypUriY2NZcuWLaLuBXcTjWvXrmX58uU0Njay\nbds2EW4zfvyKigrq6+s5fPgwi4uLrFixgqWlJYaGhnjsscfw8/NDp9MJXwfcTc2eP39eGMM/++wz\n5ufnBYLOz8+PjIwMAcOtrq7Gw8ODzZs3ExQURFNTEx988AFHjx4VhfQfev0ojg8ffPDB7s2bN1Nd\nXS14A46Ojpibm3P06FGioqLQaDScOnWK0NBQxsbGyMrK4syZM6xbt477778fuHsE2LFjBz09Pezd\nu5cVK1bg7e1NQUEBCoVC0JKNpF8jLdn4FpycnCQgIACpVIq5uTk2NjYsW7aMDz/8kOzsbMrKykQA\npampCWdnZ9LS0li5cqXYvpeVlZGXl0d5eTnHjh0D7s4hODs7MzIygpWVFY888ojgP8zNzVFZWYlS\nqRTE44GBASEIMRgMZGVlkZKSwvT0NIuLizz11FM4ODjg6urKyMgIoaGhREdHC5ydwWAgKiqK+fl5\nenp6sLKyYnJyUrQINRoNXl5eDA4OolQqhVHLwcFBTBa2tLSgVqtJT08nMTGRlStXMjo6Sl1dHWvW\nrMHe3p6WlhY2bdqEVqslJydHTDXGxcWJ+kdnZ6c489++fZvi4mICAwOFDCYiIoKlpSUWFxcpKCjg\nkUceYXJyEoPBwLvvvoujo6PwWhqR7jKZTLxRFxYWWLNmDQaDgY8//lh8z88++yyHDx/mpZde4rPP\nPuMXv/gF165dw9HRkbq6OoaGhvjd736Hu7s7VVVV7N+/HxsbG6GFVygU/OlPf0ImkxEaGip+rsZI\nfkdHB7du3WLz5s2Ym5vT1NTE9PQ0SqVSYPWGh4cZHBxEJpOJuPLDDz8sUGqrVq1ifn6e3bt3I5PJ\nuHXrFu3t7TQ2NjIwMEBAQAC1tbUsLCxgZWWFn58fNTU1vPDCC1RWVrKwsMDjjz/O6tWreeONN8Rk\n7tGjR7GxsRFJTUdHRwYHB6mpqflBx4cfRU7BxsaGa9euMT09jY2NDTKZjMOHDxMXFydkGtbW1mza\ntIm//OUvvPLKK3R0dLBy5Ur8/PwEmPLgwYOsWbNGkJcMBgP29vZYWFgQEBDArVu3sLKyYmxsjMrK\nSjo6OsjNzeXMmTMYDAahljP20zUaDfX19URERDAwMMD69et59913USgUqNVqAgMDGR0dFZN5YWFh\nqNVq/Pz8cHV1RaPREBMTI87P/f39Yjs3NzeHWq2moaGBjIwMLl++zBNPPMHMzAyWlpZYWVlRXV0t\nBou+++47EhMTRSeho6ODl19+mWvXrnHfffdRVVWFmZkZy5cvJyAggNOnT2NiYkJcXBxzc3NERUVx\n8+ZNoqOj8fHxobOzk6ysLPEwKhQKAgIC8PT0RC6X8+CDD/L555+zbds2XF1d6e/v58aNG0ilUi5f\nvsyWLVvQ6/WoVCpkMhkSiQQbGxt6e3vp6ekRb6ukpCThnZBIJCwuLoqYuXFM29LSUoBbDx06hLm5\nOdu3bxfglfb2dtra2vD19WV0dFQ4Mo1wF5VKRXNzM87OzhQXF/Pzn/+c4uJiHn/8cTFLYBTnGmPQ\nCoWC4eFhIiIiaGlpEe3ERx99lCNHjuDk5CTcjiqVivb2drZt20ZjYyMymUzIa8bGxkTx0tgu1el0\nqNVqLl26xGuvvUZgYCB//OMfRQaitbUViUSCUqnk1KlTghS1tLSEt7c3x44dY3BwkMXFRdLT00Vy\n0WiVOnnyJFKpFCcnJ7GjMRbajX4LQEwLGw1aP/T6USwK09PTPPjggxQWFtLY2MjBgwdFu8pgMJCc\nnEx3dzcymYzly5dTXFzMzMwM5ubmwvJcVFTE6tWrcXNzE2Oty5cvp7m5WYRzXnzxRY4cOSLMzdbW\n1kxOTmJnZycEK83NzfT391NWViaGmIx2Z2P344033uCBBx5gYWFBLErr1q1DLpejVquprKxEKpUS\nGRlJY2MjDz30EAaDQbS0nn76aUHLUSqVYnT61KlTPPbYYxQUFFBVVUV0dLQgJclkMi5fvkxtbS0B\nAQHCczkxMUFnZ6eQ0FpZWfHll1/y4osvUlRUhF6vFwVBI9fw66+/Jjg4mKSkJBYWFnj22WdRqVTI\n5XI+/fRTurq6UCgUuLu7Ex8fz9LSEmfPnqW0tJTExEQiIiIoKSkhJCSECxcu4OzsLGC5xmORcTrR\niO5vaGhAIpGg1+uZm5vj/PnzKJVKkQ0wzqv4+/uTkpLC8PAwaWlpXLx4kfb2dp588kkB4jUxMcHL\nywu5XI5UKsXW1pYjR46wuLjIM888Q09PD9XV1QQEBIhjiVKpZHZ2lg0bNnD+/HlB1jJCeTIzM/np\nT3+KnZ0dDg4OmJiYEB8fT0JCAmNjY2g0GsHx0Gg04vvr7u7miSeeEG95Y5jo2rVrZGZmkpWVxcDA\nAAaDgW+++QaDwSBqCeHh4UxNTYk5DZlMxvnzd+cMN2/ejImJCYcOHUKv1/OTn/yEvLw8mpubUSgU\nZGRkMDAwQGlpKZs3b0alUtHS0oKnpyfXrl3j1VdfZWRkhP7+fuzs7Dh58uQPfh5/FIsCwNtvv83r\nr7+OWq3GwsKCHTt2cPHiRR566CHBNWxoaMDR0VG8NYx59LGxMczMzPDy8qK2tlZANVtaWli3bh3n\nzp0TIlgrKyvuvfdetFotfn5+GOEuRmLx+++/z+9//3vm5ub4y1/+wt/+9jd++ctf4uDgINpFX375\nJYcOHaK7uxtfX1+srKzw8vJiYWGBt956Cy8vL44cOYKHhwdpaWns27ePmJgYwsLCsLGx4dNPPyUt\nLY2xsTEmJycFcdgIjDGGqgYGBnjiiSf49NNPCQgIIDk5mezsbE6ePEl2djZtbW3MzMygUqmE5ESr\n1fLb3/6Wubk5xsbGGBkZITU1VVTcGxsbhQfg6tWrODg4kJiYSGVlJe7u7iLBFxcXx+zsLEVFRQwO\nDpKTk0NnZycRERG0t7cTFBREeXk5DzzwALW1tZSVlTE7O0tLSwt2dnakp6dTWloqRoSDgoKE3s3W\n1hZ/f38xj7C4uCikOm5ubuLh//rrr/Hy8mLLli0UFBSwfv1dYLixrjA5OUl2djYGg4Hc3FxKS0tJ\nSkqipKSE119/ncXFRVEQvOeee+jp6RHuCmdnZw4ePEhnZydjY2O88sor+Pj4YDAYSEtL48EHH2Rs\nbIypqSkRaDLuftatW8fZs2cBSE9P5/e//z3333+/YC0cPHiQhIQEtm7dil6v5+LFi5ibm7NixQpq\na2sF6NfBwYGUlBTs7e3p6urC19eXxx57TIhtSktLRcq3q6uLgIAAsrKy8PPz48yZMwwMDODu7i4m\nbP/0pz9RVVWFg4MD5ubmlJSUkJqaikqlYtu2bdTU1PygZ/FHUVP48MMPd69evRpnZ2ekUil6vZ76\n+nrkcjnXr18XEterV68yPz8vuICJiYnCo9jY2Mjq1atxdXXl2LFj3Lx5k3vvvZcrV67g7+8v8GvG\n1tLNmzeJiYlhdnZWDOyoVCpsbGyQSCTCgRAeHo6lpSWhoaEYDAaOHz9OVlYWGRkZODg4cPv2berq\n6ujp6SEiIgKFQoFUKmVpaQl/f3+OHTuGn58f4+Pj7Nu3j/7+fqF3N0I5JRIJRUVFuLi4YGlpyeDg\nIDMzM8Iu7efnh1KpxNzcnKtXr7Jy5UqxANra2iKTyWhra8PNzY2YmBgqKiq4desWFRUVPP3004SE\nhAgSUFZWlqADLSwsCMemXq8nIyMDFxcXKioq2LRpEwUFBXh4eIiOQ3Z2tjA3BwQECPeGUdN28+ZN\nIiIiWLZsGdPT06SmpgoOwPT0NAsLCzg6OjI3N4eFhQUdHR10dnZibW2NiYkJJ06cICkpiQsXLjA9\nPU1sbCzLly/n+vXrJCQk8NVXX9HZ2Ymvr6/wGhQVFQn3QWJiIidPnsTe3p6RkRE0Gg2PPvoo8/Pz\nrFq1ChcXF2pqaigtLUWn0xEdHU1qaipeXl4YDAa0Wi1jY2PEx8czPj7Oxx9/jF6vZ2FhgYyMDCoq\nKlhaWqK8vBwfHx9OnDjBu+++y9DQEBMTEzz55JMiiRseHi7o3EZQb1ZWFlNTU6IA2NzczPfff09Y\nWBgKhYLGxkYaGxupr68nOjoaFxcXHB0dmZiYEHIdhUKBTqcjIyNDdODGxsaERUoqlbJlyxYOHTok\ncH1Gf0ZhYeG/T0vyv2fv5+bmRFJtamqK8vJyrl+/zltvvSVW2iNHjohtlIeHB9HR0aL1U1VVJfBm\nxq2VEdbZ09PD5OQk/f39gu4zODjIyMgIR44c4ZNPPkGn05Geno6vry/9/f2kpKQIT4Ex2trU1ER/\nf78Yt56YmMDPz48DBw5gZ2fHPffcI5j/u3btEsW1+++/n/b2dhYWFhgYGMDR0VF8PwDbt2/HyclJ\nUJzVajWffPKJmA0ICwtjdHRUhH/KysowMzNjYmJChK2MMxY+Pj5kZmYyPT1NaWkpt2/fJjg4GKVS\nydq1a6mpqSEiIkIca/R6Pf39/SwsLJCUlMSf//xnAUGVyWRUV1dz+PBh2tvbRXsuJSVF6N3s7e0J\nCgoiMzMTf39/wano6Ohg3759QiFvdFcaoaL/PSUId9u9Hh4exMTEUFZWRmBgIFu2bMHe3l4g/mdn\nZ6msrMTGxkZ8jMnJSbq6unjzzTcZGBjAw8ODyspKPvzwQ+BuelWlUmFubi7i5JmZmQQGBhIcHMz4\n+DjLly8nPT0dW1tbSkpKOHHiBBUVFZiamjIyMiLGxI0MT0Ag2433nrGNPT4+zuzsrOhgGWGvEomE\nQ4cO4evri5eXF8nJyVRWVhIWFkZ4eDgvv/wycDfEFRAQIPAAra2tIkpvpIYPDg5y5swZUdSUSqUk\nJSWJ45+FhYUgdycnJ//g5/FHsVP44x//uNuYZCwvLxfSFFdXV7Zs2SIsTA4ODigUCv7jP/6DVatW\ncfXqVVavXk1vby/PP/88SqVSSEzKysr49ttvuXz5MqtWrWJgYAAfHx+CgoIEm9/U1FSYqYuKiggJ\nCREsRSOSzdjKMW4LKysrcXJywsHBgRMnTrBr1y4RDjGaoJeWlmhsbOT3v/89CQkJVFVVERERgUQi\nEUDR5uZmDAYDLi4uvPbaa3z99ddMTk6ytLSEubk5Tk5OdHR0sLS0xO7du0lJSaGzs5NHHnkEd3d3\nent70el0dHd3Mz8/z/Xr10VrMzk5mbi4OORyOYcOHRJuSxsbG6RSKa6ursTFxdHV1cX58+dpaWmh\nu7tbDNV4enri5OTEo48+iomJCTU1NVy6dImcnBxSU1M5cuQIGRkZTE1NMTs7K1iWycnJTE5OEhQU\nhE6nw9TUVIhxnJyc6OnpYWBgQDwcRi9Dbm4uWq2Wo0ePsnz5cuG78PX1JT8/n1u3bvHSSy+Jl0Bb\nWxuXL1/m6aefxt3dnfn5ecbGxkT9x8bGhujoaEZGRnBzc+P48eM8/vjjwoHg7u7OypUr8ff3p66u\njr/+9a88/PDD9PX1ceLECdzc3BgaGmLz5s10d3cjkUior6/nxRdfZG5uTtCxL126JIq4Y2Nj5Obm\nUldXx4YNG9BqtQwMDHDp0iVx32zevBlTU1Mxpn7lyhVxn7u7u2NpaUlFRQUFBQXs2LFDYOI3bNiA\nUqnk17/+NRUVFVhaWmJubs6ePXvEAr2wsICTkxPXrl0jNDSUTZs2IZFIGBwcRKPRMDAwQElJyb/P\nTsHCwkK0WSwtLQVFuauri4SEBPz9/dm6dSsGg4HY2FhB9AkICMDLywtTU1OhmHN1dRUR1H379vHA\nAw/8y9uyuroavV5PVFSUcP4ZjUcmJiYMDQ0JaKm7u7to69jZ2TE7OwvcPZ96enqSk5NDSUmJ4BJI\npVJaWlqE/cfDw4OWlhZkMhktLS0izHPvvfcSEhKCjY0NlpaWdHV1AXdZ/z4+PsjlcjQaDV1dXcTG\nxtLc3Mzu3bv585//LPL/Rg/h7OwsBw8exMLCQiQai4uLKSwsFMDVgIAAtm7dKtKMxsCP0cVopESb\nm5uzuLhIaWkpvr6+LC4uMjMzg1qt5r777qOwsJAzZ84QEhJCZ2cndnZ2YrHx8PBgfHycqKgo8eYf\nGxujvb1ddFqGh4fRaDQEBQWJo8Xi4iK1tbW4ubkBEBgYKNqnOp2OzZs3i87O7du3mZiYQKvVolar\n6ezsJD8/H7lcjpmZGfn5+Vy5coXAwEDUajUuLi7i48rlcsbHxzl06BAuLi6o1WqUSiUFBQU8/fTT\nDA0NMTo6Kh6ixcVFlEqlgMuGhYVRXl6ORqMRKVe42zkrLy+nq6uLY8eO8fOf/5yuri5SU1PFoJTR\nplVRUSEWKmP7PSoqiuDgYJqbm1lYWBAk7cHBQVasWIGVlRWzs7NYW1sTHByMVCqlo6MDjUYjYMXO\nzs7Exsai/94wOgAAEQNJREFU1WoJDg4W9/vw8DCZmZmkpaWJTsQPuX4Ui4KJiQnnz58nNzcXf39/\nrl+/TmVlJWNjYxw8eJC4uDjc3NwEgry/v5+amhqOHj3KO++8g7e3N99//z319fU4OzuLh3dubo47\nd+6wtLTEsmXLsLOzY3JyEj8/P/R6PTMzMzQ1NTE8PIy/vz8WFhYiHuvj4yMGZlJTUxkYGOC5554D\n7qLYTUxMWLFiBREREdTU1Ij++fz8vACQHD9+XLxNZmdnaWxsJC8vj/Xr11NQUCCw80ZUljGJNz8/\nz4ULF8jOzhbbba1Wy4oVK7h8+TJzc3PCKwgQGxtLQkICGRkZYpDG6JQoLy9HLpdz5coVCgoK0Gg0\nzM3NcevWLWJiYvjpT38KwIsvvkhSUhKrV68WOQ2jzzIqKoozZ84QGhqKk5MT3t7eBAYGcvHiRSIj\nI8XPfOPGjVhaWmJtbY2Xlxd6vR5nZ2dSUlIIDg5GJpPh7u4uyFTGKLWxYwMIIOz8/DwpKSmMj4+T\nlpbGm2++yT/+8Q8yMjKIjIxk586dmJqasnbtWgICAjA3N0ehULBu3ToRElpcXOTq1avA3aNkSkoK\nzz77LHv37sXX15cTJ07g6uqKXC5HIpFga2uLra0ts7Oz1NfXMzs7y5o1awSjcnJykvn5eQEAgruL\nTW5u7r8wESMjI9HpdMTExJCVlUVwcDAeHh7ExsYSGBjIV199xeuvv46Hh4cIgnl4eFBfX09mZiYA\nfn5+NDQ0CGjO6OgoQUFBQpKzsLDA2rVrmZycJDc3l4WFBaRSKVKpVECF+vv7uXbtGpcuXRJhqx9y\n/SiODx9++OFuJycnEWRxcHBgx44dKJVK9uzZw9LSEh4eHvj5+dHX10dcXByhoaE0NTWhUChQqVRY\nWFgQGBgo0N6ffPIJL730EnZ2djg7O1NYWCh8hmlpaeTn53P9+nWWlpawt7cXD4KtrS2WlpZiGEuj\n0TAyMoKJiQmzs7McOXKEHTt2oFKpMDExEYo6jUbDpk2byM3NxcvLSyTnlpaWqKmpwcXFhfLycpyc\nnPDw8MDJyQkLCwtBPWpsbGTLli3odDpMTExEZ8Ta2hqNRkNUVJSYt1hYWKCzsxMbGxusrKxYsWIF\nq1atwtXVVXQUDAaDwLObmppSUlJCcnKyENgYeZRGVNf69esxNzdnbGyMrq4uTp06xcmTJwWLMCQk\nRHQkWltb0el0bNiwgcrKSiorK+nv78fa2hqVSsXc3Bzu7u5iIbe1teXo0aN0dnYyNzeHpaUltbW1\nODg4IJFIxLDT559/zt69e+nq6kIikXDu3DkMBoMILw0ODjI0NISNjQ0bN27k7NmzQpXX09ODTCbj\nu+++Y9u2bej1eoGD++yzz8jNzWV+fp5ly5YxPDwsjOJZWVn09fVha2uLUqmkpqZG/N3HHnuMpaUl\nbty4QXx8PM3NzcK0NT4+zs2bN3n44YdZWlpi1apV3Lhxg+rqary8vKiqqiI/P1/g3devX09zczMn\nT55kdnaWhIQELC0tRW3KyANJTU3ls88+QyKREBkZKfwY7e3t9Pf3C4r16tWrCQ4ORqPRcPDgQU6e\nPElGRgaNjY3s2bOHffv2ERQUJHig3d3dNDU1/fscH7RaLWvWrGF0dJSJiQnMzc0pLi7mxo0bPPfc\nc2zduhUPDw/0ej2enp789a9/FbPnxnyBURYSGBgo5hmqqqro7u7m1KlTPPzww4SHh+Pr64uPjw/b\ntm1DJpOxtLSElZUVDQ0NmJqain77yMgIBw8exNvbm4mJCSQSCWVld8VYnp6eYs79wIEDZGdnc+rU\nKd59910uXLjAxYsXKSsrExpwmUxGZ2cnK1asoL29HYPBIBBwu3fvFlw+47EmMjKSLVu2IJfLxb8N\nCgpiaGiInp4eLC0tuXPnDk5OTixfvlz4Bj7//HOeeOIJUTvx9vbGzc0NR0dHcnJy8PunTzIxMZF1\n69ZRUVFBWFgY169fp6mpiePHj5Ofn093dzcKhYJdu3aJuZStW7eiVCoZGBgQadDBwUG6urrEDIix\nrtLT00NDQwPe3t74+/uL0XQbGxu8vb2xsrLi+eefF0dDX19fETIy5gGMYShra2v6+/upq6ujoKAA\nJycn1q1bh16vJywsjO7ubtLS0ggLC+PMmTMEBwdz+/Zt4T4wbvONxw5jpqSkpASpVMrk5KQA3k5P\nT4udzurVq+nv7+f06dP4+/tTVlaGn58fc3Nz9PX1kZKSAkBjYyNtbW04OzuTm5vLypUrBejnd7/7\nHQ4ODvj4+KDVanFyciIxMVFwOWQymYhVu7q6ihQr3EXdG+Pzxt2dUTykUqkYHx8XLdfo6Gh+9atf\nif+TZ555hjVr1gjh7tDQkPh6f8j1o9gp/PnPf94dFRUlWnAGg4FNmzYRERFBZ2cnxcXFAsJqnI1v\namqitraW2NhY3N3dRaDFCAb94osveO211wQ8VavV4u7uLpJj4+PjODk5MTIyInToU1NT9PT0oFQq\nSU1NpaCgAH9/fzw8PFAqlYIjuGPHDrq6ugT0Yv/+/SLi29/fj16vZ8WKFURGRopippWVFaGhocIK\ndOrUKYqLizE3NyckJIS8vDwiIiKE+EWlUhEcHIxKpaKpqYmbN2+ydetWQkJCcHBwYHJyEmtra2Ji\nYti7d6+4YY0tWKMqXqFQoNfr8fDwEByA9evX093dja2tLfn5+bS1teHp6SkW2Z6eHpYvXy5AMEbV\nW0REBH5+fmIgp7i4mNTUVNzc3AT6fXR0lLGxMRGtNnZ8CgsLGR8fx97entnZWfR6PQqFgpGREaKj\no+ns7OT06dNIpVJCQ0PFQFdAQABmZmZkZmai1WrFdKZKpWJpaYnU1FTm5+dpbm4mKSmJ5uZm0tLS\nRGhnfHyco0eP8uyzzwoqlvHo5ebmRnl5OW1tbbzwwgvU1dVhYmIiaiBOTk6Mj49TVFSEpaUlZmZm\nSCQSLly4gL+/P3l5eXz77beUlZWJo1x7ezsPPvggdnZ2aDQaiouLheHc1dWVHTt2UFxcLFR4P/vZ\nz4iOjuadd95hbm6OgYEBrl27JnZ7jY2NmJmZkZiYiIWFBZ6enmi1WkZHR2lra6OiooLnn3+e5uZm\n5ufnkUqlWFlZERUVxZ07d9BoNMTFxTEyMkJpaekP2in8KBaFgwcP7raysiIzM5Pg4GAxzKNUKsUb\nzwg1yc7O5sqVK0RHR7O4uEh/fz87d+7E0tKSixcvolQqMTU15eTJkzz88MO0tLQQGBiIqakpFhYW\nLC4uotfrMTExYc+ePXh4eODu7s758+fx9/dnfn5eQECWlpaora1lYmJCBI+OHj0qIqPGH/6NGzfw\n8vJi9+7dzMzMiDaUtbU15eXlgjhk3GYbH9Tnn3+eL774gtDQUK5evSrU6iMjI3h5edHX18fNmzep\nra3l5ZdfRq1WU1NTQ09PD3FxcZSXlzMxMYG9vT1FRUXI5XKcnZ1FEdNYNxkZGcHa2pqenh4xFm2M\nHM/OzhIfH8/CwgLT09PY29sTEREhZiRMTU3F4FZzc7PoEKhUKnx8fLh69SqBgYHY2NgI4ElycjIp\nKSlMTExgaWmJvb09arVasBWMC4zRDjUxMcGtW7coLy9n+/btYrpx/fr1dHR00Nrayu3bt8nJySE5\nOZnDhw8zPDwsFh5XV1daW1sJDw/H3t6esLAw4K7N3MXFhW+++YZdu3Yhk8mYmppiaGiIuLg4Kisr\n6evrEwXJlpYWAL7//nsmJiZYWloS0JRz586xYcMGAgMDSUhIQKvVcu7cOWZmZggLC6O1tZWGhgaB\n9bO3t0epVIqQ1s6dO4mLi+P27dt0dnYyOTmJs7MzTU1N4lhja2tLVlYW33zzDfHx8cTExDA9Pc3k\n5CSNjY24uLhQX19PWlqa6KLU1dVRUlLC6Ogo1tbWIqNhHJv38PDAxMTEWHz89zk+GAwGIiMj0ev1\nHDt2TNiPZ2Zm8PDwoLm5mUuXLuHu7k5DQwM+Pj6sX7+e1NRUVq1axZEjR2hpaREr9OnTp8XHTkhI\nEJOGGo0GpVKJwWDg2rVrYqvd1tZGWFgYc3NzgiC0e/duDAYD27dvx93dna6uLqGNa29vF8VKI57s\nZz/7GfX19SwuLpKYmEhoaCiNjY0UFBRw5swZTp06xcaNG3F0dOQPf/gDPj4+TE5OsnnzZmFllsvl\n7N+/n7GxMcLCwmhvb0cqlZKSkkJDQwPZ2dlIJBIcHR25c+cOra2ttLW10dbWhsFgwNHREQcHBzGc\nY29vj5OTE9PT0wIeY2pqSnd3N62trQLq4u3tzcjIiLihFhcXue+++3B2dhYYekdHR6qqqkhJSaGl\npQUnJycCAwOJj4+ntbUVMzMzMZZsdE8aw2LGB9ZYqAsNDaWhoYHy8nKsra3x8PDgkUceARA+xBUr\nVtDU1IS1tTVOTk5ERUUxMDBAWlqaaJk++OCDmJqaimKoXq9nzZo1jIyMcPbsWVHvgLuqv9jYWBoa\nGjAzM+PkyZN4eHgglUrZunUrp0+fJjMzk9DQUBwdHbG3t6e0tJTBwUHc3d1JSEhAoVCQn59PS0sL\ny5YtAxDtwJMnT3L9+nUxDFVQUEBUVBQBAQFiAS8sLMRgMIjYc2dnp0ibTk5O8n/aO2PQuqowjv/+\nFNtAfQRrQwm1tGl4Qzo9Ozg1HdVmid26dRBcRJTgEOnSVUFXh6JQpLRLKe1qxeCmVknT1JI21YKG\npFEyKA5G9HO45z3fDbnJzZD7neH7weWed+7lvR9/8j7OOe+9k/HxcWZmZgCYmprq5Tw2NtbbRKfT\n6fQ+cRkYGGB0dJTBwUGGhoZot9u9bfZWVlZYW1uj1Wqxvr7e+21JHXLZzflX4E/gN2+XDRwkPycI\nr52QoxP4eB01s6HtbsqiKABIulNn++kmydEJwmsn5OgE+XpBJtOHIAjyIYpCEAQlcioK266KOpCj\nE4TXTsjRCfL1ymdNIQiCPMhppBAEQQa4FwVJr0pakLQoadrZ5Ymke5JmJd1JfQckfS7pUTo/14DH\np5JWJc339VV6SHov5bcg6ZUGnS5KWkp5zUqaaNjpiKQvJf0g6b6kt1O/d1ZVXq551cbM3A5gD/AY\nOA7sBe4CJxx9ngAHN/R9AEyn9jTwfgMep4GTwPx2HsCJlNs+YCTluachp4vAu5vc25TTMHAytVvA\nw/Ta3llVebnmVffwHim8BCya2Y9mtg5cAyadnTYyCVxO7cvAa7v9gmb2FbBW02MSuGZmf5nZT8Ai\nRa5NOFXRlNOymX2f2n8AD4DD+GdV5VVFI1518S4Kh4Gf+x7/wtbh7TYG3Jb0naQ3Ut8hM1tO7RXg\nkI9apYd3hm9JmkvTi+4wvXEnSceAF4GvySirDV6QSV5b4V0UcuOUmXWAM8Cbkk73X7RirOf+cU0u\nHsDHFFO/DrAMfOghIelZ4Drwjpn93n/NM6tNvLLIazu8i8IScKTv8QupzwUzW0rnVeAGxRDuqaRh\ngHReddKr8nDL0Myemtk/ZvYvcIn/h7yNOUl6huKNd8XMuv/cwD2rzbxyyKsO3kXhW6AtaUTSXuAc\ncMtDRNJ+Sa1uG3gZmE8+59Nt54Gbmz/DrlPlcQs4J2mfpBGgDXzThFD3jZc4S5FXY04qdiT5BHhg\nZh/1XXLNqsrLO6/aeK1w9q28TlCszj4GLjh6HKdYAb4L3O+6AM8DXwCPgNvAgQZcrlIML/+mmF++\nvpUHcCHltwCcadDpM+AeMEfxhz3csNMpiqnBHDCbjokMsqrycs2r7hHfaAyCoIT39CEIgsyIohAE\nQYkoCkEQlIiiEARBiSgKQRCUiKIQBEGJKApBEJSIohAEQYn/AEP6xWeXNmKQAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1385a46e10>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvHlU3HWe/vsqqopaqWIrdoolLAHCHrJAIgkxe8iiJhpj\nt47atrbdYzutPb382rYd2/ZoH7u1d3tcsxnjEo1mhyRAEgKEfYdAsRRrAQUFtS/3j1y+Z/qPueM9\nM57x3pPnPzgF9YX6fN7L8zzvt8jn83Ebt3Ebt7EIv//tB7iN27iNbxZuB4XbuI3b+AfcDgq3cRu3\n8Q+4HRRu4zZu4x9wOyjcxm3cxj/gdlC4jdu4jX/A1xYURCLRFpFI1CUSiXpFItFPvq73uY3buI3/\nWYi+Dp+CSCQSA93ARmAYqAX2+3y+9v/xN7uN27iN/1F8XZXCCqDX5/P1+Xw+J/ABsOtreq/buI3b\n+B+E5Gv6vdHA0H/4ehhY+Z+9WKVS+UQiEUFBQchkMpxOJ/Pz80gkElQqFdPT0yiVShQKBUqlEpPJ\nhFqtxuVy4XA48Hg8BAcHs7CwgEwmw2azMTo6ilarxd/fH7FYjMPhICQkBJvNhkwmw26343Q6hZ+T\ny+WYzWYkEglKpZLx8XECAwPxeDxotVqmp6eRSqWMjo6i1+tRKBTMzMyg0+kYHx8nKCgIq9WKSCTC\n5XIxNzdHQkICCwsLiEQi/Pz8sFgsyOVynE4narUam82Gy+XCz8+P8fFxoqOjkUqlSCQS4e93uVxI\npVIUCgV+fn44nU5EIhE+nw+LxYJarcbr9SKVSpmfn0cqlSKVSnG5XLhcLvz9/bHb7fj7+yORSJBI\nJExMTODv749UKsXf3x+n04nH40GpVGKz2XC73ahUKjweD263G5FIhEqlYm5uDrFYjJ/frVyy+Lud\nTidBQUHY7XZEIpHw/Gq1GqvVilKpxOFw4PP5mJ+fJzQ0FIfDgVKpRCaTMT8/z8LCAtPT0yQlJQHg\ndrsRi8W4XC6sVisymQyXyyU8p0Qiwe12I5FI8Hg8yGQyRCIRZrOZgIAAHA4HbrcbgLGxMaKiopBI\nJKjVamZnZ3G5XGi1WmZnZwkJCcHf35+pqSm8Xi/+/v7COVIqlczMzKDVaoXzBTA7O8vU1BQ6nQ6n\n00lAQAASiQSz2YxcLsfPzw+v1wuAw+FAKpVisVgICQnB7XbjcrmQy+WMj4+j0WgA8PPzQ6VSMTAw\nQFhYGHa7HaVSic/nw+l0IpfLhe8tno/Fz8zj8aBQKPB4PPj5+SESibDb7YjFYkQiEWKxmNHRUZPP\n59P9V5f36woK/yVEItFjwGMAoaGhPPfcc0RERHDjxg0uXbrE+vXr0el0xMbGcv36dVavXk1PTw8l\nJSV0dHSQnZ3NwYMHMRgM/OhHP+Lq1at0dHSgVquZmZnh4MGDvPfee7z44ovce++9lJWVkZmZydKl\nS6msrCQvL4+GhgZcLheZmZnU1dWh1+uJj49HLpcjk8no6enBz88Po9FIQUEBra2t/OEPf+Dw4cNc\nvnwZp9NJZmYm7e3tWCwWioqKqKiowGQyMTw8zM9+9jPOnDnD4OAgCQkJZGRkUFNTQ0lJCU1NTXg8\nHh588EE6OzvZuXMnTzzxBGazGaVSSX5+vvAMmZmZNDQ0EBYWxsTEBKtWraK2thaxWIzNZkOhUDA1\nNYVKpcJgMBAfHw9AZmYmHR0djI6O4vV62bx5M1VVVej1egwGA+Pj45SUlNDS0kJaWhq9vb1kZGRw\n8+ZNBgYGCA4OJj09ncbGRlwuFxERESgUCm7evElYWBhffPEF99xzDxMTE0RHR/PBBx+Qm5uLSCRi\n2bJlNDU1IZFIkMvlhIaGMjAwwGOPPcZrr71GTEwM09PTTExMEBwcTEpKCs8++yx///vfOXHiBBER\nEWRkZOBwODCZTFgsFt555x327t3L7t27OXv2LElJSRgMBgoKCpicnCQ3N5fXXnuNlStXYjKZ8Hg8\nHDlyhLGxMb71rW+xc+dOdDod09PTHD16lICAAFQqFW1tbej1elasWIHdbufy5cs88cQTNDc3MzMz\nw5UrV8jPz8doNJKSksKyZcvw+XwUFxdz4MABzGYzSUlJnDhxgtTUVFpbW9m2bRvR0dEYjUYiIyNp\nampi+/btjI6OMjg4iNvtZmpqioWFBdauXYtcLufKlSvEx8fzpz/9id27d6NQKEhNTUUikTA6Osrk\n5CRer5ewsDC6uroIDg7GarXS0NDAfffdh0gkwuFwCEkvODiY+fl5GhsbCQ4O5i9/+cvAV7mbX1f7\nYARi/8PXMf/39wT4fL43fT7fcp/PtzwoKAiDwYBCoSAjI4PHHntM+IP6+/sRi8U0Njby05/+lImJ\nCSwWC++99x5arZa9e/fy5JNPotPpWORHlixZAsCZM2fo6enB4XDwgx/8gPj4eFJSUvj2t7+NXC4n\nODiYZ555hujoaPR6PQDx8fEkJCRw+fJluru7mZiYQK1WMzo6SnJyMgDXr18nLy8PnU6HyWRiamqK\nsLAwzGYzERER+Hw+Hn30UVpbW5FKpfj5+XHXXXehUCi4//77mZiYYOXKlUxOTnLmzBmkUikAd9xx\nBzMzM6xevRqn04nNZqOmpoaJiQk8Hg8DAwMUFhbS09ODy+XiypUrdHV1UVFRgUQiwWg0EhAQQEBA\nAGq1mtbWVgYHB+nt7cVut1NRUUF7ezter5f5+XmefPJJent7KSws5OLFi4hEIi5dusT169e5ePGi\nUL2kp6cTHx+PRCKhv78fkUiEv78/69evp7+/H7fbTV9fH/n5+ahUKrRaLRKJhIiICORyOampqSiV\nSiIiIujp6SErK4uJiQni4+MJCQlBpVIhFosBcLlcLCwscOrUKRQKBeHh4ZjNZoaGhli1apUQXJKT\nkxkaGiI1NZWZmRkSEhIwGo1CMBoZGSEsLIw///nPAMTFxXH27Fna29ux2+384Ac/ICMjg+9973uM\njY0xPz9PXV0dX3zxBY888gh6vZ6xsTGcTiebN28mNjYWl8tFcXExXV1dnDp1CoDY2Fgef/xxzGYz\nu3fvJjk5md/85jckJCTgcrkoKSnhzjvv5PHHH2dhYYGwsDDWr18PwL59+9iyZQtzc3OEh4ezZs0a\nhoZuFdjT09MYDAY6Ozs5c+YMXV1dKJVKYmJi6OnpoampiatXr6LRaNi2bRuTk5M0NjZSU1PDv/7r\nvzIxMcHNmzdpbW0lIiJCqEa+Cr6uoFALJItEogSRSOQP3Ad8/p+9eHZ2ViiHvV4vv/3tb/Hz8yMy\nMpLc3FxKS0tZt24dDz30EGNjY/T29iISiVCr1Wg0GjZt2kRfXx+7du3C5/Oh1WoB0Gg0PPfcc2i1\nWiYnJ0lNTaW9vZ2IiAgcDgcbN27E4/Fw//33o9Vq0ev1KJVKkpKSeOGFF9Dr9WzZsgWpVEpSUhJH\njhwBIDc3l7m5ORobG/H396e0tJS8vDxmZmZYWFggPDycs2fPcujQIT755BPUajUNDQ0kJCTQ2NhI\nSUkJUVFRJCYmsmLFCqHM7O7uFg671WpFr9dTWFhIU1MTer2egIAARCIRcXFx3Lx5k1OnTtHe3o5a\nrcbtdpOcnExRURF2u53p6WmysrIYGBhgy5YtxMTEEBAQQFFRETdv3iQ9PZ3e3l7EYjHV1dVcunSJ\no0ePcunSJSorK5mdneXSpUsYjUYUCgVhYWFkZ2ezceNG3G43VqsVp9NJf38/kZGRWCwWHnjgASYm\nJlCpVIyPj+Pn58fk5CSVlZUkJibS39+Pz+fjt7/9LSqViszMTL71rW+xcuVK1q5dC0BrayubNm1i\n+fLlnDhxgpMnTyIWiwkMDGTbtm2EhIRQW1uLxWIhNTUVuVyORqPh8OHD/O1vf8NoNHLXXXchk8no\n6Oigu7sbgOPHjxMTEyNk3JmZGaKioqipqSE/Px+xWIxarWbLli10d3dz+fJlZDIZFy5coK6ujqGh\nIXJzczly5Ajh4eGkpaUBt9qc3/3udzQ1NaHT6bh8+TInTpzgww8/xGg0olaruXTpEufPnwdALBbz\nq1/9iuTkZBoaGggKCiI2NpaJiQkkEgnf//73AbBYLCgUCsRiMXFxcahUKsxmMyMjIyQlJfH4449z\n3333YbPZUKvVVFVVcfr0aXbu3MmWLVtYtmwZS5Yswd/fn1WrVvHBBx985cv7tQQFn8/nBr4PnAU6\ngA99Pl/bf/Z6uVyOx+NhZmYGgIyMDCIjIwkMDOSNN96gs7MTPz8/UlJSuOOOOygqKuLs2bPo9Xrk\ncjkJCQl0dnbS3d1NQECA0Pe53W4GBwdJSkrCz88Pm81GREQE8/PzQtl18OBBnn/+ebZs2UJ6ejpm\ns5kLFy7w2WefkZKSQkVFBbOzs4yOjvLuu+8CCFzCAw88wMDAgFBSZ2VlkZqaSmRkJGvXrqWgoIC5\nuTlSUlLYtWsXzc3NxMbGotVqsVqtpKenc+7cOYaHhwEwmUy4XC7hMPX39wMQFBREfn4+69evp7Oz\nk5MnT5Kens5nn33GqlWrUCgUbN++nd27d+Pv749KpSIqKoqzZ8+yfPlyOjo6yMrKIiwsjNraWvLz\n87HZbJSVlQn9/PLly/nud7/L97//fR577DF27tzJ3//+d9RqNTqdDolEws2bN3nllVcIDw/H6XSy\nceNGdu7cyfz8PDKZjA8++ACFQoFUKmXHjh10dHSg1+sxGo384Q9/wOl08vLLL6PX63E6nQwPD9Pf\n349Go6GsrAwAlUpFZWUl8fHx3H333axbt47g4GC2bNlCeXk5NpuN++67j97eXoFLSEtLw+VyMTk5\niVwup6OjQ3i+gIAAAB544AEGBweRy+VMTk5y8+ZNent7CQ0NFVrD/Px8JicnOX36NAaDAa1Wy09/\n+lMSEhIIDw+nt7eXyMhIqqqqOHfuHAA1NTUkJCSwfv16oTU7f/48drsdn8/HtWvXkMlkWK1W7rzz\nTsRiMWFhYRw/fhyJREJNTQ0DAwMMDQ1hMBgwGAwAhISEkJ2djUqloru7m5SUFAICAgR+rbW1lYCA\nAJKSkqiurgZgx44dQovT1dWFWCxm6dKldHR08OSTT37l+/u1cQo+n+8UcOqrvNbhcJCZmUllZSUZ\nGRnI5XIuXLjA7t27mZmZYXJyEp1Oh16vZ9WqVaxatYo9e/bg8/mQSCS0tLTw9NNPs2TJEjo7Ozl+\n/DgA+fn5tLS0IBaLaWlpITw8nJycHH73u9+RmJhIYGCgUHHU1dURHx9Pf38/s7Oz2O124uPjkclk\nVFVVUVRURENDAwBTU1OUlZVx4MABHA4Hb7/9NrOzs4SFhREaGsr27duZm5tDqVQSHh5OYGAgzz33\nHFKplOjoaJqamrhx4wYTExPodDri4uKAWxeiv7+f9evXI5VKMRqNaDQarly5gkajoaCggFWrVuHv\n749Go6GlpYW77rqL0dFRPv30U5YvX86qVauYmZmhr6+P8PBw4uLisFgs/PGPf2TdunXI5XJu3LiB\nRCKhubmZkpIS9Hq9QLLNzs4yNzeHn58fFy9eJDo6moSEBOx2O2azmZ07dwqHOTk5mcjISK5fvy5k\nzoaGBqqrq7l69SqFhYUsWbKEmzdvCsElKyuLwcFB0tLSMBqN5ObmUlZWRmpqKgC9vb2Mj4+zYcMG\noTWQyWSMjIywbds2Ll++zIsvvojFYsHf35+8vDxOnjxJe3s7VquVL7/8ktTUVBQKBTKZTPjMOjs7\n0Wg0zMzMsH//fvr7+zly5AgymYyTJ09SXFzMyMgI0dHRPPzww4yNjXH06FHMZjP5+fk888wzPP74\n4yxfvpwNGzbQ1NTEkSNHkEqlrF27lsHBQfz9/UlPT8dqtVJTU0N6ejpxcXE4nU7S0tJoaWmhrq6O\n/Px8HnzwQVpaWrh69SopKSkMDw/jcrn44osvADAYDERGRjI7O4tcLufcuXMEBQWRnJzM7Owsa9as\nYW5ujunpabRaLRERERQXFxMdHU1dXR0tLS20tbURHx9PaGgok5OTX/nufiMcjX5+fmzbto38/HyW\nLl3K9u3b0Wq1VFRUkJqaSkxMDKGhoYyOjiKTyVi+fDmJiYlcuXKFpqYmYmJiiIyM5Ny5c3z66afs\n2nVL/fzrX//K9PQ0ZWVlAoNtNpsZHBwUCKPk5GTCwsLo7e2lubmZXbt2oVKpKCgoQKVSMTY2xurV\nq7FYLEIUd7lc3HvvvVRWVhIcHMzc3BwKhYLTp0+jUCiorKykoaEBPz8/TCYTcrmcubk50tPTCQoK\nYmBgAJ1Ox9q1a3E4HCgUCgCUSiWFhYWYTCa6u7vp7OwEbhGGsbGxuN1uwsLCkEgknDlzhrS0NJYs\nWUJiYiIikYj09HS6u7uJiYmhoKAAjUZDeHg4paWlFBcXY7FYyM3NRa1WYzabCQwMxOfzMT09TVBQ\nEJOTk4yMjBAYGMi+ffuYmppiaGgItVpNbm4uycnJxMTE0NLSQkFBAYODg7S1tbFmzRqWL1+O0WjE\nbrcjk8lISEgQ/m+Lqk97ezs2m43CwkLS09NRKpVCSVxeXg7AypUrefLJJ+ns7OTEiRO0tLRw9uxZ\nrFYrg4OD7N+/n4cffpiHH34YnU5HcHAwdrsdg8HA6tWr2bx5Mxs2bODAgQPk5+fj7+8PgE6nY9eu\nXSQnJ/PDH/6QsrIyMjIy6OzsJC0tjdzcXPbs2UNAQABvvPEGr7zyCgqFgpdffpm//e1vpKSk4PP5\ncLvdPPvsswIPlJyczOXLlzl+/Dhbt27F7XbT2dlJZmYm3/nOd9i3bx933HEHkZGR1NfXYzab0ev1\ndHR0YDKZMJvN1NTU4HQ6kclklJaWArB79248Ho+gSC1btgyRSERdXR1qtRqDwSCoQQaDAafTiUaj\n4fz58/T39wuqyc2bNzGZTIyPj3/1+/jfv9L/fYhEIg4ePMi2bduIiIigoKAAtVqNx+PBYDCwe/du\nmpqaMJvN/OY3vyEwMJDAwECCg4NZsmQJRUVFdHd3U1BQwPLly7l48SIA27dvJz4+nq1bt2K1WrHb\n7dTX11NcXExraysNDQ1MT08THBwsHNrJyUmKi4ux2+3U1NSg1+vp6urC4/Fw48YNACIjI2lra2Pz\n5s2sXLmS0tJSoTwcHR3FaDQSEhKCx+MhJiaGNWvWCGTmtm3b2LBhgyAl/fSnP8VutwO3OIX333+f\ngoICRkZGiIuLE7L29PQ0LpeLl156CZlMxoYNG+jo6ODatWsYjUZCQ0MZHx8nICCA9vZ2urq6kMvl\nHD16lO7ubsbGxkhLSyMtLY2ioiLi4uL43ve+x8aNG4mPj8dkMmG325FKpaxevVp4f7fbTWVlpZCR\nhoaG2LNnD+Hh4Wg0GtLT0/H396e7u5slS5aQk5NDamoqfn5+dHR0CAx/YmKioKbExMRgMBhISkoi\nOTmZHTt2UFRUBNyqwk6dOkVXVxeXL1+moaGB8PBwbty4QVNTEzU1NWRlZbF//35EIhELCwtYLBZW\nr17N7OwsqampxMXFYTab+eSTT4iIiACgurqakJAQTp48KRBzmZmZrFy5EqVSydq1a4WL7u/vT1NT\nE5OTk+zbt4+cnBxUKhUtLS3U1NQIWRpu8VYLCwvs2bOHqakpSktLGR0dJSgoCKVSKSQGo9HI1NQU\nhYWFKJVKOjs7ycvLw+PxIJVK6erq4vTp05jNZuAW97Bv3z6OHTtGU1MTYrEYf39/ioqKhASp1WqZ\nm5ujqqqKkydP0tfXJ8jQWVlZLFu2jIyMDGZnZ4VW9KtA/Pzzz//3b/V/E//2b//2/LJly5DL5dTW\n1vLpp5+SnJzM1q1b2bRpE88//zyBgYFCBqqrqxPK6erqanp7e1m3bh0Gg4HDhw9jtVppbm4mLS0N\nq9UK3IrocXFxxMXFYTAYOHv2LB0dHYKOm52dzfbt26mtrWV0dJSWlha2b9/O8PAwBQUFgiZ+5coV\n1qxZw+TkJAsLC1y7do2BgQFqa2uJjIxk//79nD17lkcffZTTp09z+PBh/vrXv7Jy5Uo2bdrEb37z\nG2JjY5HJZCgUChoaGsjOzubEiRM8/fTTgr5tt9tRKBRERkZis9nYvn07AwMDQuZblMsmJiZYunQp\nUVFRdHR0cP36dex2Ozabjfn5eVwul6BjGwwGVq1ahVgsJiMjg8bGRiHDLBKqCoVCYPsvXLhAYmIi\nubm5BAUFMTY2xuTkJIODg3R2duJ0OnE4HMKhW/QHJCUl0dPTw8TEBB0dHURGRtLd3c22bdtQq9Vo\ntVrB0zAyMiJkvFOnTrFixQqioqLIzc1Fo9GgVCqFICyXy3G73aSlpdHX14fb7eb999/HYDAgEonY\nvn07wcHBVFRUUF1dTXh4OPn5+bz55pv86Ec/QiQSsXHjRnw+H35+foyMjAhtok6n48yZMzQ0NPDF\nF1+wZ88eVq9ezZ///Gfkcjnr1q0jLi6OkZERNBoNcXFxHDp0iLi4OIGniIyMxOl0EhsbS2JiIhER\nEXz00UfMzMwILcQilxEYGMjVq1dJSEhgenoam82GXq8XeIiCggLeeustTCYT0dHROJ1OiouLhapS\nIpEwOzsr+D62bt3KCy+8QG9vL1arlZCQED799FMuX75MeHg4SUlJXLlyZfT5559/87+6j9+ISiE4\nOJgVK1YIGTEuLg6JRIJGoyEvLw+n08myZctYvXo13d3dpKam8u677+L1etmwYQPT09M899xztLS0\nCNEZIDo6WjCIXLp0iZCQEC5fvsyRI0cYGhrC5/MJpOb169fx+XwMDg5y/fp1xGIxg4ODZGRk4Ha7\nGR8fZ//+/QCCqcVsNrNt2zaOHz/Opk2bWLduHWazmZKSEs6dO4fBYCA5OZnc3Fx27NiBQqHA6/Xi\ndDrJy8tDKpUSFxfH4OAggEBWtrW1oVKpcDgchIaGkpiYyMWLFwkPD6ejo4OcnBy2bdsmEFcikUio\nRBbNQzExMRQXFxMQEEBycrIg2VqtVioqKvj3f/93Ojs7OX/+PM888wzDw8MC6Tg2NkZ2djZ33HEH\nqampeDweQY61Wq309/eTnZ2Ny+ViaGiI0NBQNm3aRGBgoFAprV69WiDLvF4vS5cuRalUMjAwQHR0\nNCkpKZw+fZqQkBCcTid1dXUAAsdgt9tJTEyks7OT4uJiAgMDKSgoAGBubo68vDwSEhLYu3cvXq8X\nk8lEeXk5/f39TExMsHnzZoKDgwV+6dy5c3R3d3Ps2DHef/994uLikMlkzM3NERAQwOTkJAaDAZVK\nhd1uJzo6mvHxcTIzM7n33ntZt24dN27cYH5+ngceeIA//vGPAKjVagDuuece0tLSmJycZGpqisHB\nQY4ePYrJZMJqtVJXV4dUKsXr9SISiejr68PpdDI9Pc3AwAByuRydTieYt65cucKjjz7Kgw8+SH5+\nPklJSVRVVQmmN7fbzfHjx7lx44bAu6lUKiYnJ0lPT0csFrNu3TpKSkoEIvmr4htRKbzyyivPBwUF\n0dvbS3JyshDR+/v7GR8fF8wbXq8XjUZDZ2cnd9xxBwMDAxw/fhyNRoNMJkOj0aDX60lMTOTkyZMc\nOHCA6OhoYmJiiI2NJTU1leDgYD7//HM2bdrE/v372bNnD8888ww5OTnExsaSkpLC8uXLcbvddHd3\n09bWxujoKA888ABffPEFly5dIisri5ycHMrLy2loaKCoqEioYvz9/VlYWBDKxF27dhEbG4tSqaSv\nr4+rV69isVhYuXIlMzMzzM7Osnv3bv785z+zceNGJicn8fPzIygoiJycHOrr65mYmGDdunWCK7O/\nvx+VSkV9fT3JycnI5XKhKmhoaCA0NBR/f3/MZjO9vb3YbDbBK6DVamlpaSEpKYnZ2Vn+9Kc/sWzZ\nMu69916USiUajQaFQkFgYCAtLS24XC6ysrIYHx+nq6uLhoYGgTxLTk6mq6tLILLGx8cRi8UEBQXh\ndrtRKBRs2bKFwcFBwekZExPDwMAA/f39TE5OEhERwdWrV4mLi6OiogK9Xk9OTg7x8fF8+OGHbNmy\nhZmZGWpra/Hz82PDhg2cPHkSt9uNyWSirKyM5cuXExcXx7Vr1zCbzTQ2NpKXl0dAQAByuZzPP/+c\nQ4cOcfjwYQoLC7lx4wZ79uxhyZIlxMTEYDQauXHjBsuWLaOjo4MDBw4wMjJCYmIiHo+HnTt3CkYh\ng8GAz+dj6dKlfPLJJ+j1ejIzM5mZmWFubg6Hw4HT6SQsLAyr1crIyAjDw8MEBASQk5NDSUkJ8/Pz\niEQipFIpcrmcHTt2sLCwQFpaGjU1NTQ0NPDII48IrZtarSY2NhaJRILX66W6uhqZTMbWrVvxer1E\nRkaSkpKCzWZjYWEBrVbLyMiIQNArlUrq6+tpa2v7SpXC/5qj8T9CJpOhUqmYmprCYDCQm5vLW2+9\nxdDQEHa7XTigsbGxqNVq4uPjCQsLIyYmhoiICDweD21tbYyMjFBQUEBjYyMATqeT9vZ24uPjCQ8P\nZ2hoiK6uLn784x9TXFzM6OgoFRUVvPHGG8zNzfHhhx/icrmEoDA3N0dgYCB2u11wLcItybSrq4uI\niAjcbjfp6ekEBgai0Wh45ZVXuOOOO6isrEQmk1FXV8eihXsxo4eFhfHxxx+j1+sxmUwcPXoUQND1\nF+2y58+fZ8WKFSwsLHDx4kVaWlpwOBzk5OSwYsUKXC4XnZ2dJCYmYrPZCAsLw9/fH5PJRF9fHw6H\ng8jISJKTk7l06RJJSUlcvHgRu93OiRMnOHbsGOvXr2dgYIDDhw8zMDBAeno6ERERnDp1ii+++IK7\n776bEydOkJCQwI0bNygpKRG0/kXTUUBAAOXl5czNzbF69WruvPNOmpqaOHHiBPPz8xQVFSESiait\nrSUmJobm5mZOnjzJ8PAwbrcbrVbL/Pw8ABEREXz88cd4vV5yc3MxmUxkZGTg9XoJCQlBJBKh0+mw\nWq2YTCaWL19OTEyMUC0sZvqenh5kMhnXr18Hbsneu3fvRqfTsWXLFiwWC2+//TZpaWl0dXURGRmJ\nXC7niSeeYGZmhqVLl5Kens7Y2BhtbW2kp6cDsHXrVtauXcuhQ4cABNUsNDSUvXv30tjYiN1uR61W\nExISQnNigte2AAAgAElEQVRzM9nZ2djtdrRaLZcuXWLVqlXYbDbm5uYwGo14PB6mp6fxeDyEhYUB\nt1SYxMREEhMTBavz0qVLGR4eRiwWExERgUQiISMjgyNHjjA9PU12djbf/va30Wq1HD16FD8/P2Ji\nYoT/7VfFN6J9mJ+fR6fTCSz70qVL2bBhAx6Ph4CAAD755BPBwLNo+4yNjcVqteJ2u3n33XexWCzc\nddddSKVSZmdnAejp6aG0tJQVK1agUCiIjY1FJBLR3d2NQqHg6tWrwC256q233kIikQieCL1eL3ji\nCwsL/4Go6ejoYHh4mNnZWe68806Sk5Ox2WzExcURGBiI2+0mNjaWy5cv8+tf/5qBgQGcTifh4eEU\nFRVRVFREUlISZrNZ4A8AzGYzKSkpDA0NoVAoUCgUDA8PC63P0qVLSU5OZu/evUxNTTExMUFYWJjg\nuCwvL8doNDI9Pc13vvMd4uPjmZ+fp729nbi4OMFv0dnZKVQV9fX1vPfeeywsLJCeno7D4SAuLo7g\n4GCKioqYn59nZmaGsbExgoODcTqdKBQK3G634LBzu91EREQISsClS5cEnqC/v5+jR4/S19dHeno6\neXl5VFVVYTKZWLFihVD9rFu3DoCkpCQSExP57ne/S2xsLI2NjXzyySfExcVx48YNoRqIjIwU2sf6\n+nrKysooLS0lKCiIhx56CIlEQnV1NTt27ADg6tWrXL9+HaVSSW5uLlKplIKCAvR6PatXr+aee+5B\np9Oh1Wr56KOP+OCDD+ju7qasrIzKykquX79OcnIyEomEN998k8rKSgDGx8eJj48X7PN+fn709/ej\n1WpRqVSkpKTQ3t7O+fPnOXfuHBaLhQsXLjA+Po5KpRLOGUBVVZXQSnZ0dODxeBCJROzYsQO1Wk14\neDhhYWFUV1dz9uxZ5ufnqaqqIiMjA5lMRnNzMxKJhMOHD1NQUEBVVRWjo6NYLJb/76kPGo2Gq1ev\n0tLSQkVFBc3Nzaxbt44dO3Zw4MABrFYrqampVFZW0tXVRUpKCr29vZhMJj766CPWrVtHVlYWEomE\nqakpoVLwer04HA6am5sFniIlJYXVq1cL/brX66WqqoqxsTHCw8ORSCS8/fbbDAwM0NfXR39/P4OD\ngyQmJuJwOIBbXEVmZiY7duygs7OT/v5+ampqsNvt7N69m8zMTA4ePIi/vz85OTmIxWJUKhUajUYg\n8Nrb25HJZGzevJlVq1YBYLfbBfmqvb2dhIQEYmJiCAkJQafTERQUJJSDXV1d9PT0kJCQIGTIlStX\nCsM2X375pdByGAwGQRZdWFjAYDCQn5/Pq6++ysmTJ4mIiOCZZ54hOTkZi8WCSqUiOzublStXolar\n8fl8TE5OIhaL6ejoEAjQ0dFRIWhMTEyg0Wiorq6moqKC/v5+du7cSXx8PE6nk4aGBpYtW4ZYLMZo\nNLJ582Z6enoIDw/HbrcLxrUjR46QmZkpVHkZGRm0t7dz5MgR0tLSiIyMFJQGr9fL66+/TkhICAqF\ngra2NoxGI/n5+axcuZLMzEwkklvF8MaNGykuLub69euUl5eTlpbGwsICSqWS9PR0FAoFNpuNgYEB\nbDYbPp+PU6dOERERgVgsFqS/Dz/8kO7ubjZv3iyc30W+RqVSIZfLiYiIoLe3F51Ox9zcHD6fjzVr\n1lBQUCD4J06dOiW4Qqurq9FoNISEhHDfffcBt8xWAQEBxMXF8dprrzE/P09bWxsRERE89dRTzM3N\n0draSmxsLKGhoUilUlauXInFYiEtLQ2DwcDTTz9NWloaUqlUqHS+Cr4R7YPX6xWGWKRSKU6nk+vX\nrxMaGso777zD/v37qa6uZvv27TzzzDOMj49z6NAhhoaGCA4OJjIyEj8/P9RqNSaTiezsbGpqatiz\nZw9nz55l69atNDU10d3djUgkwmaz8fHHH2Oz2di5cyeZmZns2bOHoKAgFAoFCwsLpKSksH37dkwm\nEzqdjoMHDwpkW2xsLOPj40LmaG9v584778Tr9dLQ0MBnn31GT08Pv/zlLykuLmZ8fJzPP/+c+Ph4\nwdyyWKWUl5cTG3trTGRmZobp6WlUKhVWq5WZmRmam5sxmUwMDQ3h7+8vcCJ//OMfOXjwIEqlkvj4\neH7xi18QFhbGsmXLkMlkrFy5kunpaVasWEFPTw/Nzc3s3btXsHpbLBaMRiOlpaWkpqby2WefkZub\nS3Z2Nq2trcK8w+KF6+3txel0EhoayvT0NKmpqXR1dQluv4MHD9LS0sJPfvITfD4f7777LkeOHCEi\nIoLGxkbWr1/PyMgIcrkcf39/bDabYMY5f/48vb29ADzyyCPIZDKGhoYEp2NqaipPPfUUvb29DA0N\n8eKLL3Ls2DHi4uKEqmpxinL9+vWcPn2awcFBioqK6OvrA24Rd1FRUcJ8yv/5P/+Hffv2ERYWxuTk\nJJs2beK1116jsbGRZ555hsOHD+N2u9mxY4cw+bioHpSUlPDSSy8B0NXVxeTkJLGxsURERKBWq6mp\nqeHHP/4xiYmJNDU14XA40Ov1vPfee9x3331IpVLa29v58ssvUalUDA8P43Q62blzJx9//DEAL7zw\nAps3bxYCw1tvvcXw8LBgnrr33nsFI5rT6USn09Hf34/ZbGZhYYGMjAzy8vJ46623iIyMxGj8h9Gj\n/0d8IyqF+fl5NBoNa9euxePxMDExweTkJFFRUSxdupSUlBQeeOABnE4nFRUV1NTUCFlxkcAqLy9n\ndnaWuLg4Ybjp9ddfJzs7m6NHjzI3NyfIaYt6dGBgICMjI/T19XHp0iUuXbrEwMAAbrebmpoaPvro\nI8GQsyjVAYyMjBAeHs7f/vY3RkdHCQsLY2xsTBjl1mg07Nq1C6lUyuDgIGazmRUrVhAREUFQUBCJ\niYlERkYSFRWFQqGgvr4eQPA5BAcHCxdxYWEBt9tNdXU1BoMBt9vN0NAQiYmJGAwGHA4HHR0d+Hw+\nbDYbGo0GtVrNl19+SVpaGp2dncTGxtLa2sr58+dxOBxkZWUxNjYG3ApELS0tBAcHU1tbS1ZWFjdv\n3mR2dpaBgQHa2tpobW3F6/XS3d3NwsICNpsNiUSCTqejpaUFu93Oz372M/bv309OTg7R0dEcOHCA\nsbExpFIpv/zlL1Gr1fj5+QmH+Ny5c6Snp3Px4kXm5+cFZaGrq4tnn31WGCsvLS3lnXfewe12k5eX\nh1gspqGhgRUrVhAdHc3evXv59re/zUMPPcSBAwcEX4dGo6Gurk7op6VSKSMjIwQEBDAwMMDFixc5\nd+4cP/7xjzl16hSPPfYYdrudvXv38umnn6LVatFqtQwPD7NkyRI++eQTwQx3+vRpwUovkUjYunUr\nHo9HkG4XnYmLrdiKFSuIjY2loqICo9EocD07d+6ksLCQwsJC5ufn6erqEmZAtm7dKpy5sbEx1qxZ\nw/bt2wGE6VWv1ytIw4sq0GLbZDQauXDhAmlpaVgsFlJSUr7yffxGBIXFoZCmpiZ27dolyCf+/v4k\nJiaSlJREaGgoUVFRiEQiAGEkeevWrRw4cIDk5GRqa2upra3FZDIBt0ay6+rqKCgowGaz4XA40Ol0\nXLhwgampKSIiItixYwcOh4Pu7m6GhoaQy+UolUpWrFhBSUkJAI2NjTQ0NAhuM7fbLWTjwMBAsrKy\nmJ6e5tq1ayQlJbF+/XoqKytxOBykpaURFhbGypUrycnJ4dixY5SVlTE1NcWZM2dwOBzs27cPuGXi\nWpTsFsvOgIAAQkJCSElJYXZ2FqlUSkpKCiEhIdy8eROPx4PNZmPv3r0UFBQIpq7CwkLa2tpISkrC\nYrGwfPlyAgIC8Hq9HDp0iPn5eebn5wkICMBkMqFUKgVn5OzsLM3NzQwPD9Pa2srly5cJCgpCo9Hg\ndrsFadRms7Fx40YkEonQUi1KcmazmYSEBAwGA+3t7czPzxMdHU1sbCzJycmkpqZSXl5ORkaGYJ5a\n/Mx37tzJ8PCw0Dbl5uYSFxdHX18fCwsL/OQnPxGMPuvXr8fn83HhwgVefvllYQ4lICCA0tJSYYx8\naGhIGPneuHEj//Iv/yLMotjtdnbt2kVWVhZRUVFUVVUhkUhwOp3U19fz6quvsnnzZvLz8yktLcXf\n358XX3wRuMWBdHd3o9VqGRwcxOFwsH//flJSUhgcHCQqKgqv10t4eDh33303bW1tlJeX4/V60Wq1\nzMzMMDw8zF133YXFYhG4q+bmZqxWq9ASZ2VlER8fT2ZmJk1NTTQ3N9Pa2opWq6WtrU0YOJuYmGDH\njh34fD7BVzI8PCwktK+Cb0T7MDo6islkwmazYbfbaW1tRSKRoFAoiImJITMzk7a2Ntra2oRlIgAl\nJSU0NjZy/PhxAgICCAoKIisrS5iM27Jli7BjYfFQBgUF8fvf/55r164RHBxMamoqLpeLZ599luDg\nYM6cOUNvby+tra1s3boVo9FIX18f9913HwcPHgRuBZva2loKCws5d+4cWVlZbNy4kRMnTvDCCy8I\n8+3V1dV4PB4eeeQR3n77bcEUlZeXR0xMDDExMdy8eVPQ6BcWFhgYGGDJkiVkZmaSkpJCZWUlTz31\nFDMzM4SHhyOXy2ltbeWee+7B5XJRWVnJ+Pg4crmczMxMWltbmZubEwZl9u3bx7vvvitwGi+99JIw\n5rsoBy4Gv4sXL6JUKtm7dy8hISF89tlnxMfHC7P7i1JjZ2cnwcHBwuCSv78/a9asISQkBKPRyNDQ\nENu2bRNGv41GI3fffTdVVVX09/ej1+uJiYkRsmpaWhpKpRJAGFKyWq0sWbKEL774guLiYpYuXYpU\nKuXzzz/nueeeIzAwkLKyMqEKXPwsk5OTaWtrw263s3XrVh566CHgVqUQFhZGS0sLJ0+eJCkpiaKi\nImFJza9+9StCQ0PZt28f3//+9/F6vYKBbWBggOnpaZqbmwkPDxfkb7jl25idnSUhIYGysjIiIyOp\nq6tDqVRSXl5OSkoKWq2WsbExsrKy2LVrF2VlZRgMBsrLywkLC0Mulwt7Pbq6ugDYtWuXsNBHq9Wi\nUCgoLS1lZGSE/v5+YcK1sLCQffv2CdXC8PAwCQkJOJ1OpqamEIlELF26VDA9fRV8IyqFiIgIPv30\nU5RKJS0tLRQXF1NaWopKpcJoNHLs2DGhDLdarYyNjVFSUoLdbkcikQijrzqdDplMJgSFzz77TMgu\nc3NzdHR0MDQ0xPHjxxkeHqavr48vv/ySsrIy3nzzTcrLy4XIvXnzZiG62mw2zGYzMTExABiNRh56\n6CEaGhpISkqirKxMGKs+duwYaWlpzM7OotFosFqtHDlyhNzcXCwWC9u2bUOlUvH6668zNTWFxWIh\nJydHeJ/w8HCkUimhoaEEBATwwAMPCPq3w+FApVKRkZGBQqEgKSmJwMBAMjMzcTgcTE1NYTQa0el0\nwvzGq6++isViYW5uTpB9161bx4kTJ/jJT37Ct771LUEJycnJwWw2I5PJaGxspLGxkdnZWfLy8qit\nraW7uxu73Y5er6epqYn8/HyioqKEjUB9fX10d3cTGRmJwWBgdnaWnJwcQkNDCQoKYnp6mvDwcAYH\nBwUJde3atahUKqH3XwyWi5uq9uzZg0wmY3h4mLGxMbZt2wZAU1MT/f39WCwWamtrWbJkCZGRkZSV\nlWE0GpmdnSU/P5/Q0FAAwsPDKS8vF3gMh8PBj370I2FvxdatW7ly5Qoul4uUlBQyMjKIjo7mzTff\nRCaTcf78ebZs2UJ1dTX19fXC3oOUlBT6+/sJDAwUBpjy8vLIzc3l4YcfxuPxEBgYSFRUFN3d3bzw\nwgtUV1fT3t7O+Pi4MNU4Pz/PlStXKCwsBBDcnAkJCcIui7CwMOrr6zGZTIjFYqRSKWNjY4IBS6fT\nkZOTI2yKiomJEYLD4iTlV8E3Iigs9vsTExO4XC5effVVoX/Ozs5mbm6OS5cuMTs7S2hoKDqdDofD\nQU1NDUqlEpfLxZo1a2hvbxcGpQA2b97MP//zPzM3N8e1a9fIzc3F6/WSkpKCXq8nLS2NqakpMjIy\n2Lx5M/7+/kxPT7N9+3Y6OjqEXt/hcBAcHCz04S6XixdeeIGFhQUAEhMTkUgkjI+Pc/z4cS5fvoxY\nLBYu86LVWCQS0dTUhMViIT4+nrfffpv5+XmBZFtciyaXy3E4HBw7doxLly4JE3iff/455eXlTE1N\noVQqOXfuHAMDA8zMzDA/P4/H4yEnJwc/Pz9h6i4jI4P7778fnU7HtWvXhOUhq1atoq6ujtOnT9PW\n1kZjY6PgBQkLC0OpVLJp0ybi4+NpbGxErVazYsUKIiMj0Wq1bNy4kdHRUeRyOVFRUdy8eZPY2Fju\nv/9+ZDIZW7ZswWq1Mj4+zsLCgjBxuMjS22w2bDYbX375JVarVZiy3LJlC2azGbFYjFKp5PDhwyxd\nulQojSsrK3G5XJjNZmF9WUpKCmNjY2i1WpxOJzk5OeTm5vLzn/+cgYFby4bi4uJ49NFHKSoq4o47\n7iAzM5MLFy5w6NAhKioq0Ol0LF++nKSkJCEZFRYW8uCDDxIQEEBUVBT19fVoNBp++MMfMjo6Ctxy\nNG7cuJGmpiZCQ0OFLVh6vR61Wi0oVDqdjra2NqFdSUhIEKZMr1y5wvr161m2bJkwdAe3vBV/+ctf\neOutt+jq6hI2RKnVaqKjo3njjTeEc/fll18yMTGBVqvFz89PkCxLSkq4ceOGMEL+VfCNCAqL3IHP\n52N0dJSYmBgaGxtRKpW888476PV60tPTKS4upqenh+zsbAAKCwsZHh5m9erVhIWF4Xa78Xg8wkKJ\n8vJyurq6qK2tZd26dcLKNavVyqpVqxgYGMDj8QjqxLVr1yguLmZmZkbwp4eEhLB8+XIuXLgg6Mlm\ns5n777+f7u5uBgcHkclkbNy4USART548iVQqJS0tjWXLlnHfffdhNBrJy8vjrrvuYnp6mlWrVgnt\ng053a21eSEgIOTk5+Hw+weFmMplITExkaGgIiURCQ0ODsEFo0cjldrvZunUrRUVF1NfX09PTI1RI\nU1NTvPnmm1gsFuLi4khOThbY7urqamEaNDU1VbgYi22ayWRCJBJRUFCAyWRiYWGBwMBAwQ68Zs0a\nLBYLPT09OJ1OmpqacLlcALz77rt0dHQIq8GKioqE/ZpKpRKn08nq1atRKpVCKwBw9OhR6uvrCQkJ\nweFwUFJSQn19PeXl5ZSUlCCVShkeHiYuLk7Y4BQdHU1/fz8Oh4OUlBSsVis+n4933nkHlUoFIBDU\nx48fx2AwcOzYMS5cuEBpaSkKhYLa2lr27t2LzWbjzJkznD59GqPRiNPpJDIykn379gnO0NnZWXp6\neoBbG5IWSVh/f3+GhoZQKpUYDAaqqqqoqKjA6/WiUCj4p3/6J7RaLbt372Z6epq5uTnef/99vF4v\n7733HhUVFcKCoJGREWZnZykqKmLz5s2oVCo++OADrly5wsmTJxkaGuIXv/gFKpWK8+fPC4uDFieB\nfT6fwK+kpaUJ0uxXwdey4v3/LUQi0f/+Q9zGbfz/Hzd8Pt9/OQTxjSAao6KiOHLkiLD8Y2FhAT8/\nPwYGBigpKWFgYICOjg56enp46qmnCAoK4ujRo8TGxpKTk8P4+Lhggqmrq8PtdvPaa6/x0Ucf8ac/\n/YmXXnoJkUhEV1cXXV1dwjbjZcuWodVqSUpK4tChQ6SmphIUFERSUhI2m01Y+yYWi0lNTeWvf/0r\n7733HmVlZbz22mvYbDbuvfdeqqurycjIELY/JyUl8eijj/LEE08wNjZGeno6nZ2diEQinE4nmzZt\nEhanBAUFce3aNd59911ef/11pFIpZrNZ0MONRiNSqZSGhgb0ej2RkZFUV1cLZpTp6Wk6OjrYuHEj\nExMTFBQUcP78eUQiEUNDQ2zcuJENGzZw5coV5ubmkMvlJCUl8eyzz/Lzn/+cpqYmBgYGCA8PF5bj\nLi461Wg0NDc3CyrG66+/zq9//WtMJhOnTp0iJCSEgoICwVSzKLfV1NSg0WjIyckRhr9aW1uZmJjA\n5/Oh0+lISUmhqqqKsLAwhoeHhRV4Tz/9tMBJdHZ2kp2djUQi4cknn+SXv/wlNpuN5uZm9u3bR19f\nH6GhoUKVtrjWr6mpiYSEBKqqqvB4PPzlL3/hhRde4M4772RycpKrV6/y4IMPcubMGfLy8qivr6e6\nupr777+f2NhYmpubha3aK1euFEbmR0ZGyM7ORqlUEhUVRXR0NJ9//jmtra2cOXMGnU7HunXrUCgU\nOJ1OoqKieOWVV4QlwaWlpUxNTXH58mWef/55bty4QVRUlLDItaKigry8PF5//XXBZfr73/+eX//6\n13R1dQmEY09PDxkZGXz22Wds2rSJuro6mpubefDBB0lNTeWVV17h17/+NWfOnMFsNpOYmMjg4CAv\nv/zyV7qP34j2YW5ujrfeegt/f39hHbrL5eLAgQOkpqaSk5PD7t27OXToEEqlktTUVHbs2EFJSQmf\nfPIJc3NzDA4OEhb2f7H33tFVl+ne92fv9GQnO72XnWSH9EoKJYVQIiXSFUQFRI+iYhnHUZ81nkGd\nGUePAzNzFEUERRQkCJGeEAgphDRSSO+99953sp8/4r7XOWs96xyf97zvWp71nt8/CRpCsvfvvn/3\ndV3f7+drzWuvvSaaNZo5dlZWFvX19XR2duLp6cnTTz/NsmXLRG16+/Zt2trasLW1xcXFhXv37olJ\ngIaufOXKFeH5T0pKwtbWlt27d4uusre3Ny4uLkilUiorK3njjTeARfaCpgQwNzfHxsYGDw8PJicn\ncXR0pKenh3Xr1gGLmoHMzEx0dXV5/fXXqaqqEg3E8fFxqqqqePDggVBsXrx4URiUfHx8iIyMZGho\niCVLlhAZGYlCoSAyMpITJ04I6Mnk5CQlJSUcPHhQ9Bg09OXi4mJkMhk+Pj6UlZXR29uLRCIRRKbH\nHnuMrq4uUlJSxFH3q6++Ym5ujoyMDCHUioqKwtramrGxMZycnLh8+TK9vb2sX7+e+fl5dHV1aWpq\nwt7eXhjdNI3GsLAwtLS0WLduHbt37+bu3bvs3LmTXbt28frrr6OrqyvIVlu3bhWmoaGhITo7O8Wk\nYGRkBJlMJhqN7u7uhISEcPbsWdatWyfcpnV1dXh6euLq6opUKmV0dBQnJycqKyvJyMggKSmJ4OBg\n4U3o6Ojgu+++E+KzkydP8sMPP/Dcc8+xbNkyrly5QnZ2NgYGBrzzzjv4+flRV1fH8uXLWbVqFSqV\nisOHD5OUlERvby8+Pj6Ul5czNzeHp6cnvr6+AOL9fv/995mZmcHd3Z3x8XHu3btHZGQkarWa0NBQ\nPvroIwIDA3n55ZfJz8/HyMiIv/zlLwwODgqHpp6e3n8/8ZKZmRmbNm1iYWEBd3d3zMzMiI6ORq1W\nU1ZWhpGRkUCKa2lpUVJSgpGREVKplPj4eLy8vIRNWmO8gUWRR1dXF6Ojo7i5uQk3YXp6OoaGhrS2\nttLU1ER/fz/BwcEMDAwwOzuLlpYWiYmJAndeXl4uuAGwaJMNDw9nfn6ekJAQYcm+fv26AGBMT0/T\n1tYmRnYtLS04OzszMjLCq6++ilKp5Pbt2zg5OQm9u1KpZPXq1VRUVPDNN98gk8mIiYlBT09PUJWt\nrKx44403qKmpISgoCJVKRW9vL9XV1aSkpIieQU5ODlFRUSQnJwvgaGVlJVVVVXh5edHR0YGpqSnp\n6enitWxqauLOnTuiaaiZpXd2drJy5UoCAwO5ceMGXl5egv1oamoqUO0dHR1UVlZibGzM3NwcCwsL\nYsa+b98+srKycHNzY2RkRMzwTUxM8PDwEBZkuVyOi4sLpaWlXLhwgVdffZXGxkaeffZZvvnmGxwd\nHVm1ahX9/f2iJxIXF4dMJkNPT4+ioiLs7e0pKysTiw0WWYrffvutYEouLCwI+fHExAT79++npaVF\nQGEeffRRrK2tWbNmDaWlpYSFhREcHIynpyfR0dFiE/Pz8+Orr74iMTGR6elpcnJykMvlVFdXs2bN\nGmDx4RQXF8dnn3327yDCr7/+OiMjI3z88cd4enpSWlrK119/DSDUtzKZjJycHCYnJ6mqqmJgYICk\npCRBq966dSvZ2dliTHzq1CnKy8upqqoSjfC8vDy2bt36i9fjr8I6feLEifdgUT7s4+ODq6srxcXF\nwiWYkpLCqlWrMDY2prS0lJqaGtHwGhoaIiMjg46ODqRSKcPDwzg5OXH+/Hk8PT3R0tLC1dWVxMRE\ntm3bRl5eHu7u7ujp6REfH4+dnZ3Id4iLixNoLw3cQyKRsGbNGnR0dJDL5SQmJgrLanZ2NlZWVkJS\nPTs7S2Njo7C41tfX88UXX+Di4oK/vz/JycmCvKyvr4+fnx+wWD5duHCByclJTExMkEgkuLq6Ymtr\nKyYyGo7BsmXLaGtrY8uWLeTl5eHv74+/v7/QabS1tREbG8vs7Cznzp1jxYoVDA4O4u/vj7e3NzMz\nM9y6dYuoqChmZ2dZvXo1WlpaJCQkCAqQvb09IyMjYrEtXbqU2tpa7t27JyCubW1tyOVy+vv7BVFp\nYmJCeCs04zkXFxcSExMFq0EulwuGwrVr10SDU2OPfvbZZ6mtreXgwYM4OTlhYGBAZWUlSqWStrY2\n+vr6qK+vp6ysjIMHDwpL+r1796irq2P37t1cv36dkJAQ9u7dy8jICDdv3mTPnj0i1Ka1tRVLS0v0\n9PSESayoqIjIyEikUqkQBWkk0NnZ2bi5uSGXy4WZy97ensTERAICAujr68PR0RFzc3MUCgVZWVmY\nmJiIe8HPzw8/Pz+OHDmCUqmkuroaU1NTvv76a8bGxjh//jzd3d34+vri6urK/fv3MTMzw9bWlp6e\nHlxdXTl//jwBAQE88sgj5Ofn4+fnh4GBAUuWLOG7775DIpHw4YcfkpaWhoWFhUDCV1VV8eijj5KR\nkUFBQcEvsk7/KjaFTz/99L09e/bg5eVFTk4OaWlpPPbYY2hpaZGVlYWurq7wNly+fJkDBw4gkUgo\nK41i4kUAACAASURBVCsTfADNGymRSBgbG+PGjRvs27cPQIBNNAwALy8v7t+/L76/RCJBpVJx584d\nvLy8RBBKbW0tW7duZWJigtbWVgwNDbl06RKxsbEMDg5iaWlJb2+vEIto3qzHH38cHx8fOjs7iYmJ\nITo6mqSkJOLj40UfxMDAgJGREUEJvnPnDlFRUcI+vWfPHgwMDDAzM6O9vZ3Ozk5cXV2RSCT09vZy\n8eJFXnnlFR48eICHhwe9vb24u7sTERFBYmIilpaW2NraMjAwIDIERkdHqa6u5sUXX8TGxoaQkBCq\nqqpEDsXevXuJiIgQ6Lvjx4/j5eXFwsICJSUlREZGClPZ6OgoYWFhVFZWEh8fz2OPPUZ9fT0xMTFi\nRPlP//RPAv3e39/P5cuXsbS0FJtpdHQ06enp5Ofns2fPHi5dusSrr75Kenq6WPwazcHc3BwqlQq1\nWo2JiQkKhYLOzk6KioqQSqV88cUXvPHGG3R1deHk5MS6deu4cOECQ0NDZGVlkZCQQGNjI+Hh4RQX\nF3P//n1mZ2eFziUuLo6mpiZyc3Oxs7Ojt7eXkJAQli9fjlQqFX2T9evXY2RkJH6fp556iu7ubvLy\n8oiJiSElJYWIiAju3r0rRo6WlpYYGRmhUChwcXGhoKBAlEEtLS089dRT6OrqYmtry+TkJBkZGbz9\n9tvU1tZia2tLZ2cnsbGx/PTTT5iamjIxMYGfnx9dXV0EBQURFRWFmZkZ2trapKSkkJqayvbt25HL\n5ZiYmIjS7+TJk/99NoUTJ0685+vry/z8PP7+/oJlp1msYWFhrFq1CkNDQ0EGmp2dxcXFRYx7fve7\n35GTkyO4e5999plAv4eGhjI8PExLS8u/i/tqaWlBoVCgo6ODnp4ezs7OzM3NYWxsTFdXFxs3bkRP\nT4+0tDSCgoJoa2vj5s2bLFu2jJGRESIiIjAzMxNUaWtrazIyMoiMjOThw4eoVCqCgoLo7OykqqqK\n8fFx3N3dmZ6exsjISORGZGVlUVVVxfLly3FxccHOzg49PT3s7OyYn5/n2rVrSKVS3NzcyMzMxNXV\nlcjISFF6lJaWCnSdkZGRaDBpwCt+fn54e3sTEBCAVCrFxcWFjIwMoqOjyc3NxcjIiMcff1x4PjSU\n5LGxMaampujs7EQmk2FgYICtrS2WlpZCSqxxFxoZGREVFSVKNw1ktKuri8TERAGg0ZSI9+/fFyE5\nS5cupaGhgYyMDNzc3IiNjaWtrY2AgABgUdzm7OzM4OAgXl5eAn1uZWWFg4MD+fn57Nu3j7a2NpYu\nXYpEIiElJUUQqK5fv86uXbvIy8vDzMxMZHwMDQ3R1dWFnZ0dw8PDqFQqQY+OjIxkfHxcMBZnZ2fZ\nt28fzT+HFllYWHDy5ElhitPg7mJiYrCxsSEqKorr16/T0NCApaWlgNx4enpia2vLgwcPaGlpwdDQ\nECMjI0pLS0VzuKioCDMzM9555x06OzsFUEZDJzMyMmJoaAgjIyPq6+vx9PSkqqqK4eFhqqurhfir\nu7tbWPBPnz5NfX39fx8c28DAAEqlUsxtx8fHSUlJwdTUlPHxcczMzHjttdf4zW9+g0qlIjY2lt7e\nXpydnTl58iS5ublcv35dZBfm5OQAiMCXgoICsdl0dXVRXl7O/fv3kUgkQkuucUdqNozOzk4GBgYo\nKytjamqKZ555hoqKxegKIyMjcVzUwFP6+/upqqrC09OT5uZm1q9fT0VFBY2NjYIEnJaWxunTpxke\nHubWrVvY2dnR1tYm3JfGxsbCzl1QUEBWVhY5OTk4ODgwPT1Nd3e3eEppZMh2dnaEhITQ1dVFQ0MD\nWVlZAlYqlUrZvXs3ZWVltLe3c+3aNU6fPs3x48fJy8vj8uXLZGZmitd+fn6erq4uqqqqqK+v55FH\nHiEoKAh9fX3x7+bm5pKamsrY2JiA4ZaWlpKZmSlq2R9//JGZmRnc3Nz4zW9+w0svvYRKpRJ8Bx0d\nHbFBafgPnZ2dwKIQTCMlHhwcJCUlBTc3Nzw8PPDz88PV1ZWSkhJWrlwpvBgxMTGYmJjg5OSErq4u\n586d46233mJiYkLIkTW+l5qaGo4dO8bw8DDXrl3D0dGRgIAA3NzcBANiYGAAqVTK0NAQSqWSxx9/\nnAMHDqClpcXs7Cw3btwQiVaaEkXz8NKg66Kjozl37hxKpZL5+XkOHjwoTicmJiaEhoaydu1a4uLi\nhMHMwcFB2PMtLS25du0aq1evxtXVlcDAQIHKz8zMFNxLY2NjQVnS0tJi9+7dBAQEiDzR0dFRMTX5\npdevYlMwNjamqakJT09PgoODyc/PF4o8ExMTTpw4IVgDLS0tJCUlMTc3h1qt5pFHHhECmJ07dwoM\nFiBQZRqr8/bt29m0aRP29vYEBgaKRtDTTz8tNoHly5cLv8K9e/fIzs5mamqKf/3Xf0VfXx8Aa2tr\n5ubmyM/Px8LCgs7OTnHUb2pqEsTgvXv34uHhwezsLEuXLuW1114TgpiQkBCampoEBRrAwMBAADFu\n3bpFS0sL2dnZLCws8PTTT2NjY4Ovr6+wyWryKRUKBSdPnmTv3r3k5eXR3NyMqampwMLX19fj4uLC\n+vXrefTRRxkfHxdKxtWrV4uehebYra+vj7a2tpAqV1RUiNfUwcEBX19fWlpaqKysxNraGj09PbS1\ntcnKymLp0qUoFAri4uJ48cUXkcvlhIWF8c4777Bv3z58fX0Frl3TzFuxYgXm5uYAREVFkZKSgrm5\nuVCwGhgY8NJLLzE8PExJSQl5eXmcPn0aU1NTDAwMCAsLo6GhAQ8PD9RqNe+++y6pqans3r1bWJH1\n9PQoKyvDx8eHuLg4li1bxqlTp9ixYwcjIyOcPHmS5ORkzMzMeP7552lvbxdPXQ17MzU1lcuXL7Ns\n2TKhaPzHP/7B2NgYZmZmwsfQ0tLCnTt3+OSTT9i8eTNKpZK3336b6OhosrKyOH/+PKtXr0atVpOe\nnk5qaiqpqank5eUJ8dLLL79MREQE2dnZAs/n6+uLo6MjhoaGjI2NMTIywuDgoKBWa6zhO3fuZHJy\nkp07d6JWq7G0tGTHjh2/eD3+KjYFDU/Q398fuVxOQEAASqWS9PR0AgMD6e/vJyEhgYGBARwdHdm5\ncydTU1NcuXKFwMBAnnjiCYyNjRkdHRVHRFjcFK5evSqYBPb29gLT5efnx5UrV+jv76evr4+ZmRki\nIyMFl39hYYENGzYI3mFKSoow7WRmZuLm5oa/v7+gJ2nY/ZodPSIiQvgEBgcH2bx5M9euXaOvr08o\n3wBhbgEEf7G5uZnNmzcTGhpKREQErq6uNDY20traio+Pj/AKFBQU0N3dTWZmplBeajiLPT09DA4O\ncurUKV5++WV0dHQ4e/asaGTGx8djaWmJlpYWNTU1IrF5YWGBxx57jOjoaHp7e1EqlSh+Dt319fVl\namoKc3NzIaetrq5meHgYKysrbG1tGRkZET4ETVqVubk5Q0NDnD9/HolEgpOTExUVFeTm5jI0NERS\nUpJgSnz//ffo6Ojg5uZGYGCgYC8cOnSIqakphoaGMDExwd7ensLCQqytrUlJSUEikdDd3S0MQBYW\nFiQkJIjFUFpayptvvom5uTkdHR1kZ2dz/fp1JicnSU5OFtkiy5Yto7a2FlNTU5ydnXF3d+fGjRt0\ndHQIt6umgQewa9cu3N3dhUI0NjYWExMT9PX1RTM2IiKC9vZ26uvrsbCwYO3atcJub2JiQnR0NG+8\n8Qb79u3jwoULwGKU4nfffcfk5CQtLS2Cu+Di4kJfXx9Xr17F2toaKysrXnzxRSwtLZmbm2PDhg3k\n5+dz9OhR7ty5g1qtprq6mi+//PIXr8dfhXhpYmKC/v5+wSAMCwvDysoKCwsLUXNpQKcBAQHU1NQI\nbLlCocDZ2ZmzZ8/S0dHBqlWrcHV1BRCim4aGBg4cOMD4+Di6urrI5XKuXbsmbiK1Ws2SJUsICgpC\nW1ubv/3tb+zZs4eSkhIhiXZ1dRWxcc888wy3bt2iqakJhUJBcHAw5eXleHh4YGtrS3BwMKampszN\nzXH79m0mJiZYtmwZBw8e5Nq1a0KsYmdnh0KhoLe3F1gcx2maYSMjI2RlZeHg4EBRUZFwi2pOJ319\nfQItHxsbS3h4OBYWFri4uIib2tjYmMDAQCQSCQMDA2zYsEHEpqvVarZt2yYALCqVih9++IGEhAQO\nHTqEVCrF0NBQCIycnZ0FfVpTxmh6PsXFxSxbtozOzk6RVKShKw8NDYkItYqKClpaWli2bBkvvPCC\nKNm6u7vFJqlpxmVlZVFYWChG1e3t7RgaGook5sLCQnGqMjExEYvyhRdeIDk5GUtLS2QyGQMDAwBU\nVlbS2dnJ2rVrBUH58OHD1NXVoVQqqa+v59FHH2V+fp66ujoaGhpQKBSir7Jnzx7++te/8tZbb3H1\n6lXB5EhJSRGp2jdu3GBgYIA333yTjIwMNm/eTFNTE2lpaSJlTPOgk0qlBAQE8Ne//pWoqCjh/4mK\niuLixYu0trayfft2GhoaxEb25JNPcuXKFUHy0pxOk5KSmJ+fx8TEhJs3b6JQKHjyySfx9vYW/55m\n5PtLrl/FSWFmZgZjY2PKysqYn5/n+PHj3L59m7GxMT755BMxwlu1ahX19fX85S9/ESaoiYkJOjo6\naGhooLOzk4KCAhISEgAEg8HJyYmrV68yOzuLmZkZ09PTIhNgzZo1BAcHExsby/T0NC+++CIymYzs\n7Gzm5ubYuHEjg4ODfPbZZ6Iu02xK0dHRuLm5Cb6Ct7c3J06cQCqVMj4+zrlz5/jnf/5nEhISxE3t\n6urKjRs3UCgUpKenY2xszDfffAPAvXv3cHBwwNjYmOzsbG7evMnExAReXl64urqKLMSFhQUxPly2\nbBmhoaHs3buXoKAgrl27RnV1Nffv36eqqkoYsADRoIqPj2fdunVkZWVhbm7Onj17qKurIzQ0lMOH\nD5OZmSkakxp1aXNzM9evXxdQUk9PTwYHB0UyVEZGBlZWVoyNjQlT2N27d2lubhaw1pUrVxIZGSme\nynZ2doILoemDODg4CByam5sblpaWODk5UVVVRUJCgshGrK+vFxu5hrasMRQtX76csrIyIiMjhcHI\n2dlZmIl8fHxQq9ViQ9X0rry9vbl79y7ffvstCoWC0NBQsrOzkUgkDA4OcuDAAebm5vDx8RFhMBqI\nsKmpKadPn8be3p6enh7R0ExMTGR4eJh169YRGRnJ4cOHiY+Pp66ujvr6eqytrbGxsREjRA2rcnp6\nmrNnz7J27VocHBywt7cXgJt9+/bh5+cn0tDWrl0rYDvbtm0TWZ2aHkZRURE3btz4xevxV7EpSCQS\n4uLixHFW451PSUnByMiIAwcOiHrqiy++ICAggK1btwq0l0ZSev/+fRYWFoRCUKVSoaurK5pMmkgz\nGxsbHBwcWLVqFTU1NRQUFNDb20tJSQmenp4iqGRhYYGqqioaGhqQy+XiqTM+Pi4MKJpU5c7OTrq6\nuggODha7eGhoKOvXryciIoJz585x7NgxZDIZGzduRKlU8umnn+Lk5MSzzz4LLPZWJiYmePDggQio\n0dilc3NzGRwc5NKlSxQWFoqkJGtra9ra2rh//z5nz54VkelTU1PU1dWJJ7nmBFFYWEhISIjI7Bwf\nH6elpUXYqzULcXh4mJUrV6Kvr4+Liwtzc3MiHfno0aPk5eWhq6tLcXExWlpa+Pv7Mz8/T39/P35+\nfqxatQqFQiFKOFgEnahUKqanp4U6sLKykqGhIRH1t7CwwN27d+nr6xNIM81Ievny5ajVaiQSCZ98\n8gm6urokJiaKzdXMzEyc0DQ/s6mpKbCogQkMDCQzMxNDQ0PefvttlEqlOJlu27aN0tJSTE1NxUJs\naWlBT0+PkpISUlJSUCqV4r3X5H9OTEygq6sr5NVTU1P88MMPfPnll+Jn/l//638JvJ1cLkcmk+Hn\n5yc2rsLCQrS0tESPARB49o8++kg8LB4+fEhFRQWffvqpMJvdvn2bxMRE8vPzee6550RgkIODgzgd\n7Nix41cRRf9/dRkYGAiIiEqloqurS/gIrly5QkpKCqtXryYzM5MjR47w9ttv4+3tTXt7O2NjY4yN\njaFWq9mwYQPT09PiJtQgszRy4pSUFBobG8WN3NHRwcOHDwkODhaThIcPHxIXFyeeDhpnISCaQBoX\n3urVq4VjcOnSpfT29mJra0t4eDjGxsZcvnyZ0tJSamtr8fb2JigoiLS0NJ544glSUlJ4+PAhBQUF\nouP8zDPPEBMTg6OjI5s2bcLX15fg4GCUSqVYTP39/dy7dw8LCwv8/f1Fc7OxsRGlUkl5eTm2trYi\nPj4yMhJXV1fRJ/H29qa4uJi0tDThxDt+/Djh4eGYmJiQkpKCQqHA3NxcbIhSqRRzc3ORY7l+/XqU\nSiXOzs6sXbsWuVzOyMgIzs7OtLa2UldXR2FhIWvXrmV+fh57e3uxSDXiofT0dORyudjsP/jgAwAK\nCwvFKWhqaoqKigoqKysJCwtj/fr14ine1dXFjh07qK2tFWzGhw8foqWlxfz8PHfu3GF4eBgHBwcA\nbt26RVpaGq2trYIEnZyczNGjR+nv76epqQkLCwvc3d2Fz+Phw4esWrWKp59+GhcXFwYHB0UCk6bk\nKywsJDo6mh9++IG6ujq2bNkilJKnTp0S0m8Nht7FxQUnJydGR0epq6sTWLr5+XkxHobFDfTZZ59F\nJpOxadMmKioq8PDwYO/evWzcuFGAZRQKBWZmZmJsunnzZtra2vj973+PWq1meHgYCwsLYSH/Jdd/\nqacgkUiagTFgHlCp1epQiURiDiQACqAZeFytVg/9R99nfHyc8+fPY2lpSWRkJHZ2duImvnPnDpaW\nlnz55Zc8+uijQs47ODhIQUEBJSUlWFtbo1AouHz5Mv/yL/8idOlubm7cvn0bqVSKvb09jzzyCFVV\nVYSFhTE9PU1TUxNWVla4ublRVFSESqXC2dmZ3NxcgoODaWxsJCEhgf379xMWFibqyJCQEMbHx8nI\nyODxxx9nZmYGR0dHMjMzRbKUlpaWSHSenZ1lbGxM8ByTkpIEB0KTKQmLOvqQkBBycnJEf0DDfgwJ\nCREfa2tr6ezsZGxsTEBWbW1tqa2tFSMwhULBsmXLBAb+xo0bVFRUiLhypVLJ008/zfr160lLS+Po\n0aMYGhoKqlBpaSlZWVkkJyczODjI2NgYhw8fRiqVcvbsWWZmZlCr1YK6pFar0dPT49ChQ+jr65Oe\nns6TTz7JwsIC+/btY25ujomJCdELMTc358iRI7zxxhsoFAoBxmlra2P//v10d3fz448/ioh3TRiw\nRnRVW1tLSkoKu3btIiQkhJGREWFB//rrr1Gr1WRnZ4upxrp16+jo6MDPz49PP/2U119/nSeeeELw\nGnV1dUlISCA4OJhXXnmFK1euCDny6OgoNTU1guepo6NDTEwMAGNjY0xPT9PZ2UlKSgpXr17lm2++\nISMjg7CwMHp7e/n444/x9fVFS0uLkydPsmbNGi5dusQf/vAHbty4gVqtxtvbWzAjv/32WyHWCgsL\nY2JigpSUFFasWCHuyzVr1nD37l2hYHzjjTcoKSkR5Kw333yTkJAQBgYGOHPmDL/73e/Ytm3bL1rX\n/2+cFGLVanXQv7FkvgOkqtVqDyD15z//h5eZmRkREREsWbJEPFlmZmbo7Oykvr6ee/fu4eHhgZGR\nkch7sLCw4Pe//z1xcXH09/czNzeHh4cH9fX1NDQ0ANDS0oKOjg7z8/NkZWVx6dIlcUMdP34cX19f\nvLy8qK+vF4IiTa9CV1eXubk5pFIp7733HmfOnBGd4ZKSEgICAli2bJnI9FOr1QQGBooxmiaNqri4\nmJCQEPH7aVKoNKEwUqlUNEalUimTk5MiR7O6upr09HSRQhQUFCTqcENDQyorK4mJiaGtrU0EtbS0\ntGBiYkJVVRUFBQVijCaXy0XTrra2Fh0dHZ5//nmRfqWJwgsODqa0tJTQ0FBmZmZobGzExMQEW1tb\n/va3v1FbW8vevXvZsmULkZGRQp+hyeqYmprC2NiYVatW4e7uTmxsrFAEOjg4iE0rPT0dGxsbnJyc\n6OrqEqclDTuira2N3t5ecnJyiIyMRCKRkJOTw65du7h79y7+/v7s3LmTyMhIZmZm+OGHH3B2dubW\nrVsimk3jc4BFzNuKFSsoKyvD3d2dtLQ0CgsLcXV1paenh4KCAqKiogQnVHN6eOedd7CwsMDPzw9b\nW1u8vLwYHBwUVK7c3FyR8airq8v27ds5c+YMVlZW7N27l7KyMvz9/YXTdmRkhMOHD4u4+hUrVuDg\n4MDy5ctRKBQCEOTh4cG9e/e4ePEiGRkZjI2NCem4sbExKpWKl156SYwx29vbhaNXkzyuMd35+/uL\nkfIvuf6/KB+2AN/+/Pm3wH/qxJiZmSEtLQ2ZTMbMzIwI/TA0NKSpqYnjx4/z8OFDTExM0NXVpbe3\nVxCWFQoFL7zwgtgU8vPzxVFJA7IIDAwUGoOCggLu3buHUqmkpKSEoaEhnJycqK6uprGxkfXr15Oa\nmipyCx599FHeffddoqKisLe3BxYNJsnJyUKVZ2pqyueff46BgQFubm7k5OTQ3t5OSUkJvr6+HD16\nlM7OThQKBXl5eSJUtrm5mdraWjIzM4FFurS2tja1tbUoFAp27dqFtrY258+fF4E3mk3P3t6e7du3\nY25uTlhYGD09PSQlJREREcGVK1eIiIjg4cOHGBkZERAQwMLCApcuXaK8vJyBgQGBcfvtb3+Lt7c3\n09PTwsTl5ubG999/T1lZGZ9//jlubm6sWrWKubk5cXIpLCwUeRJvvfUWZmZmnD59Gg8PDxEZp6Fw\ne3l5CV/GyMgIDg4OQjNy+vRp9PT0xGs7NTXFhQsXaGtro6OjQzSKNUEtmkhBd3d35ufnmZiYEFbn\n7u5uPDw8+NOf/sTSpUuFSxMWSz4jIyM2bdqElZUV27Ztw8PDg5aWFlasWIGzszNtbW2Ulpbi4+ND\nfX09N27cwNPTky+++AJvb29u3bpFXl6eWKQAhw8fZvv27eK+bWlp4fHHHxeZFS+//DJhYWFiUtPR\n0UF4eDixsbHY2tpSVVVFVVUVubm5RERECF1FfHw8AQEBYtPYvHkzCQkJpKSkkJ+fj0wmQyaTCfXu\n5cuXuXfvHvPz87i5uXH+/HkGBweRyWQEBQXh5eX1ixfwf3VTUAN3JBJJoUQief7n/2ajVqu7fv68\nG7D5P/1FiUTyvEQiKZBIJAXT09N4eHjQ2NjI1NQU7u7uIth1//79HDt2jG3btvHdd98REhIiXIHf\nfvstRUVFzM3NMT09TXV1Ndu3b2fDhg0Awp7b0NDAnj17iImJEQirHTt2oFarmZ6eFvkHGpadhhsI\nixOB1tZWxsbGqK6uBhBSWgMDA7S1tVGpVKLe0+DCHjx4IBBYSqWSmpoatLW1ee6559DX1xcBLp6e\nnmJBTE5OMjAwwNKlS8nMzKSkpIS4uDieffZZgoKC6Orqor29XQSd5OXl8e233/Lxxx+LI77Ga69B\nza9atYrx8XH09PQwMzNjdHSU3bt3c/XqVSwtLWlsbBSbU3V1NZ2dnaI7bmdnx+DgIA8fPsTc3FzA\nZGdnZ8Xxdnp6mqqqKs6cOYO1tTX19fXcvn2bwsJCNmzYQG9vL1999ZW42fX19Tl06BBPP/00arWa\nFStW4OTkJByoS5YsITw8HHNzc5ycnPjqq6/49NNP+fjjj8nNzeXSpUuisZqSkiL0JYGBgfT19WFj\nY8OuXbvo6emhqalJWKc3bNgg7PDW1taYm5tz8eJFli9fLnQgFhYWvPDCC4LdsWXLFuLj47G2tqar\nq4uRkRHeeustwsLCBJK+p6eHkJAQNm3aJERyqampfPPNN7zyyiuUlpby17/+lfn5eaysrHj++efZ\nuHEj/v7+VFdXo62tTV5eHtXV1ahUKqGx+fTTT1EqlbS3t+Pv78+PP/5IaGioSA3XyK1LS0uJi4vj\ngw8+wNfXl56eHn744QdxQtMg/zXuy19y/Vc3hUi1Wh0EbABelkgk0f/2f6oXsU7/R6qSWq0+oVar\nQ9VqdahUKqW0tBRHR0cxOlqyZAne3t4UFhZy7tw50Uz805/+JBorW7Zsoa+vT4wj16xZw8WLF0Vn\nODY2lpqaGoyNjamtrcXLy4snnnhCJFG7uLgQFhbG0NAQExMTlJWVoaOjg0qlQktLi+HhYWJiYggP\nD6e5uVkkAi9ZsgQ7Ozvy8/PJzc2luLiYxsZGXF1dMTY2RkdHh7i4OCYnJ8nKykIqlfLUU0+J372/\nv5/29nacnZ2FzRoWuRI+Pj7Mzc3R2tpKQECACKVtbm5mbm4OfX192tvbKSsrEzeRlpYWjo6OeHh4\nMDExITIgQkNDycjIwMjIiMLCQnHs/eCDD1iyZAkqlQqlUolUKiU6OpqpqSkKCgr45JNPWLt2Le7u\n7ri6uvLqq69SVlYmyjMbGxuGh4cxMDAgKSmJlStX4uzszNjYGDU1NQwNDbGwsIBKpeLEiRPIZDLC\nwsLYt28fSqUSY2NjPvjgA+FnUalUPPPMM8BidJqjoyNLlizB0dGRuro6nJ2dCQwMxM7Oji1btuDt\n7Y2xsbEoxTQTGM2Cef3116mpqcHBwYHS0lIAUlNTyc7Opr6+nlWrVpGdnc22bdtobW1FJpOho6Mj\nQLQaafHo6CjT09MiHm/Tpk1cvnxZBOrAYjrUuXPneOONN5BIJCxZsoTMzEz+/ve/U1FRQUFBAbGx\nsXzxxReUlJTg4OCAm5sbZWVlqNVqFhYWiIyMxNDQkAsXLhAXFwcsTs62b9/Orl27aGxs5J133kEi\nkYj3YMmSJfT29vLyyy/zwgsv8PDhQ+RyOTU1NULmPzIyQltbG59//rm4x37J9V/aFNRqdcfPH3uB\nn4BwoEcikdgB/Pyx9z/9IaRS3n//febm5ggLCxPKxe3bt1NSUoK9vb3oMM/MzKBUKlmzZg036FHG\nfQAAIABJREFUb94UTAMPDw+++uor1q9fLxqC2dnZ1NTUoKWlJQI63NzcMDIyYn5+nurqatzd3Zma\nmiI9PZ3u7m76+/uxtbVleHiY8vJy2traeO+994R1FxbrSC0tLaKiouju7kYqlZKamkpgYCDm5uZs\n376dmJgYoqKi8Pf3x97envb2dqRSKVevXsXW1pa2tjYKCgrEIoHFGPbbt2+zZs0a1Go133//PbOz\ns/z444/o6+uLRCaJREJlZSWFhYU0NTXh7+/P9PS0yHJcunQp1dXV9PX1ERYWRkFBAYcOHWJychJb\nW1vR2HN3d6eurg6FQiGant7e3sKKPjMzQ05ODunp6Xh7e5OVlYWZmZkY/c7OzrJu3ToMDAyQy+VM\nT08zNjbGzMwMZmZmXL16lRUrVuDh4cHGjRupr6/H0NCQhoYG6uvrOX78uDBWnTix6NPZuHEjVVVV\nfPLJJzg7Owt2RHx8PDExMaxevZqIiAj6+vro7u6mpKSECxcuYGhoSF9fH7W1taSnp+Pr64tarRa1\ntFKpZMWKFezevRs7OzuWLl2KjY0NEolE5DpqYDyajWFwcJDi4mIBY9GcPHR1dUVT09XVFXNzc44e\nPcquXbtob2/npZdeIiEhgW3btvHSSy9RWFgoNpX+/n6+/vprRkdHqa+vx87ODhMTE0JCQli9ejXz\n8/MAaGlpsWPHDsEd1eRTfvzxx2zfvp3Gxkbs7e3p7u4W6eNFRUVIJBJBvH799dcJDg7G29sbqfSX\nL/X/x5uCRCIxkkgkxprPgTigHLgK7Pv5y/YBV/6z72VqasqdO3eYnJzk/fffF3HuPT09IlSjrKwM\nMzMzVq5cia6uLl5eXoyPjwvASXV1NQcOHCAtLY3k5GRgEev9/PPPC4uzl5cXjY2N7NixA19fXzo6\nOhgbG8PY2JhHHnmEt99+m9DQUB48eICFhQU+Pj6Mj4+zatUqBgcHBTu/rq4Oa2trWltb8fLyYtmy\nZezevZuWlhZyc3P56KOPOH78OOfOneORRx6hs7NTmHGcnJyYnZ1FLpcLUK2npycADQ0N6Orq8vDh\nQ6KiovDz88PHx0eQk+3t7RkcHBTxYUFBQWzbto2+vj6cnZ354YcfGBsbY3x8nODgYNra2oTt9s9/\n/jN2dnbAooknLi6OY8eOoa2tzdzcHEePHqW1tZXi4mIByVX8HD5iY2NDQ0MDzc3NWFlZiUTo1atX\n09/fT1paGiYmJvT39wsJ9Pz8PBkZGZSWllJSUoKWlhZ6enqEh4fT0tJCSEgIkZGRFBUViTk9LIJb\nNYg8DRdjcnKSW7duYWNjw+joKEeOHGFqagpTU1PR/ddY2HV1dQUsViKRCPHS4OAgP/30E3/84x9F\nHJ8m2VljmNPQrE1NTXnw4AFPPvkkDg4ODAwMkJOTQ2xsLDKZjKSkJG7fvg1Aeno6lpaWouTx8fHh\n/fffx9jYGKVSyR//+EcRUlxQUMBnn30mogpkMhmJiYk0NzfT3d2Nvr6+OOavWbOGhoYGhoaGyM/P\n59atW5w+fZqdO3dy584dfHx8uHXrFtra2uJ0oOm52NnZ4eLiQmpqKs7Ozujq6oqx+i+5/isnBRsg\nSyKRlAD5wA21Wp0MfASsk0gkdcDan//8H16jo6PExcXR3d2No6Mjra2tDA0NCZrOzp07sbS0FM08\nmUzG7OyswHhZWFigra3NuXPn8PT0FKKNkpISsatPTk5y5swZmpqaRGajtbU1CQkJuLm5UVtby/j4\nODKZjPr6euEYfPLJJ4XxRjMlOHjwIBcvXkSpVFJUVERfXx/j4+MiuMPb25u+vj42b97M559/zvr1\n67l69SrDw8PiTXN3d2dhYQEjIyMRAOLg4ICNjQ3z8/NMT09jYmJCXl4eUqkUPz8/0tLSqKurE0lO\ndnZ2qNVqgfxauXIlMTExTE9PExMTw5IlS4RTLiAgACcnJxGCmpyczIoVKwgNDcXb25utW7eKWD25\nXE5CQgL29vZIpVIePHjAypUr+cMf/iBOWRrqsUZToCE56ejoUFFRgYmJCYcPH0ZPT499+/bx/vvv\nU1VVJQJ1fHx8RDydTCYTfZV33nmHxsZG/Pz8qK6uJioqCjc3N7q7u7l48SIffPABfn5+1NTUiOyE\nhYUFdHR0yMnJwc7Ojvj4eBwdHSkqKhJBNxpilYODA9nZ2UIu//3331NUVERTUxNr165lcnKSBw8e\nCObkyZMnqa2tZfny5Zw7d46MjAyWLFmC4ufkKZlMhq+vr2Bb9PT0sGvXLszNzXn55ZdFzW9mZiYs\n7qGhoVhZWYnN39HRUbhZly9fDix6NTTuz4GBAVQqlSgDNfmlExMTjI2NCYnz8uXLhWbk7bffFv2K\njo6O/yvE+//QnP/n+p/r/z/Xfx+as4uLC7t27UKpVApN+pYtW8jKysLZ2RmVSoW1tTUNDQ2sXbuW\n27dvs23bNqqqqujv76e+vl7UZc3NzURERBAeHs67777Lk08+SWNjo/haPT09IQXu7u7G09OThoYG\nnJ2dqa+vJyoqivz8fNHw1MzX29vbiYqKIjAwkO+//x5vb2+uXr3KunXrmJycpLCwUMTA79ixg5KS\nEhYWFsRTvK+vT8AzNQ2269evY2NjQ1hYGDExMRw4cABPT08RZfdvparnzp1j165d4vWRSCSYmpoK\nCpEGPxcUFER+fj75+fns3buXnp4eYHGycezYMUF9VqvVPPbYY9y+fVuUTAYGBtTU1PDEE09QVFSE\nTCZjfn6es2fPEhYWhrW1NY888ghnzpwhPj6enJwcnJyc8PLywsbGhq6uLn766Scef/xxYci6cOGC\nyAbVlIP19fWMjIyIcsbW1pbu7m62bt3KxYsXqa+vp7+/n5CQEAoKClCr1dy+fZvDhw+jUqkwMTHB\n1NRUWLeHh4cpLS3FxcVFWNNhcUrU3NzMu+++S0VFBampqfT29hIeHk57ezu6urqsXr1aJFxrzFsv\nvPAC27ZtQ61W8/e//50nnngCNzc3Ghoa8PPzY/Xq1dTU1PDcc8+RlpZGV1eXSLTq7OwUpYKmv1Nb\nW0twcLAwfmkmNx4eHsJEJpVKMTY2pq2tjUOHDvHP//zPgjGqKX8jIiJITk7GycmJoaEh7OzsmJyc\nxMzMjKqqKtzc3FhYWEBLS0ukcWtpaaFSqWhqahIToP/s+lXInDWjEw1TMSoqiry8PCYnJzE3N0cm\nk9HW1kZkZCSXLl3C0dERXV1dAT1xc3PD1dWVgYEB9PT0BBF3yZIlfPnll9TU1GBiYsKaNWtYWFjg\n4cOHODg4iIab5s3fvn07dXV1fPvtt9jb21NVVcW9e/e4f/8+ra2tInrLzMyM4eFhTE1NsbS0JCsr\nC7lcLtKrNL6DuLg4RkdHcXd3p729nZiYGOHNv3v3rnAw5uXlAbB//358fHxob2+nv7+fEydOcP36\ndRYWFnB2dubevXv86U9/4ubNm4KbUFxcTG5uLvb29uTm5tLZ2SnSrB88eMB3332HhYUFQ0NDwrYs\nk8mwtbWlpqZG8Bhqa2tZunQpjz32GImJiZiYmNDa2kpvby+vvvoq5ubmuLu7i3IrMTFRUKrPnz/P\nxMQEnp6e6OvrU1lZSX19PR9++CHh4eF4enpy4sQJ8vPzOXv2LAMDA6xcuVLoHe7cuYOuri6wSF1u\naWmhv7+fM2fO4OrqSnx8PE8//TTFxcWcOHECMzMzvvzyS/Ly8rhz5w5FRUWsXbtWoM+srKxob2+n\nsrJSiKKOHz9Od3c3xcXF5OXlCdrRd999x/fff09CQgL5+fncuXOHP//5z9TV1bFv3z6KioqEd2br\n1q20tbUJ0xwsejUCAgJwd3dHpVJhaWlJZWUlK1asYHJyUojhampqsLW1pbGxEbVaTVBQEObm5kRE\nRBAWFoZUKhVEcVhsNJqZmaFQKHB0dMTV1ZWWlhacnJxwdXVFLpejo6ODlpYW09PTPProo9TW1lJc\nXMzY2JgwneXm5jI6OiqMVr/k+lXg2I4cOfJeVFQUQ0ND+Pj40NzcLKLOs7OzCQoKwtTUlOzsbPT1\n9cVTa35+XhCAFhYWRA7B999/z+3btwkICBDEJU9PTz7//HPGx8cZGxsjICAAOzs7ysvLkclkmJqa\ncurUKfEmmpub8+OPPwoVXlNTEwMDAzx48AAtLS0WFhbYtWsXSUlJxMTEMDAwIIAqWlpajIyMIJVK\niYiIICcnh/HxcYaGhpiZmSE4OJienh6xiEZHR7l16xY+Pj64uLgwPz8vUPfV1dXo6upiaWlJYWEh\nBw4cwMPDA3d3d5EfWFVVRUhICK6uruTk5DAzM4O9vT3Lly9ndnYWb29vfvrpJw4ePEh7eztNTU2C\nTaj4ORauoaFBOE5nZ2extLSkpKSE1NRUZmZmRP6ioaGhUAIqFAqam5uJjo4mPz8fU1NThoaGmJyc\nFBkVGkKWn58fwcHBwOKUpbCwEB8fH4yNjTE1NWVkZITLly9ja2srzE3Lly9HT0+P7Oxs5ufnsbS0\nxN/fn4aGBiF4UqlUNDc3c/z4cZqbm3FycsLc3JzBwUFiY2MpLi6mqKgIZ2dnQkND8fT0JCsri2ee\neYaZmRkBe1EqldjY2DA7O8tHH33E/Pw8AwMDDAwMoK+vz3PPPSeQeuvWraO0tJTc3Fz279/P3bt3\nBSlKo2zUIPhKSkro7+9H8TPVW09Pj4mJCWpqaujo6ODSpUscOXKErVu30tPTQ3t7OxkZGbi6urJy\n5Uq8vb25fPkyDQ0NVFVVIZfLMTIyEvdPc3MzZmZmlJeXExkZycDAgOB/zszMsG/fPsEzLSoq+u/D\naPzwww/f0wBOy8rKaGtro7m5GYlEImTKFhYW2NnZkZycjImJCS4uLlhZWdHd3Y2pqSnd3d14e3tT\nV1fHzMwMycnJHDt2jKmpKaqrqwUuffv27QJffu7cOZydndm/fz9zc3NCJdjX18fQ0JDIENSEuJiZ\nmZGens5bb73F2NgYKpUKLy8viouLBatRcwM4OjrS19fH/Pw8Dx48EMRfU1NTOjo6cHV1FcEvZmZm\nXLhwgfDwcKanp6moqMDR0REjIyM2bNjA/Pw8arWalJQUYNFB5+Pjw8DAAJWVlVhZWQnJsLOzM2Zm\nZgI8u7CwQFtbm9AVTE1NMTIygkKhYGhoSDj7YNGD0tHRQUtLCxYWFtTW1hIfH094eLgQ64yMjBAW\nFkZLSwu9vb3s37+fq1ev4unpyT/+8Q9WrlxJdXU10dHRLCwskJaWJl6Lnp4ewsLC8PLyEnmZdXV1\nBAQE0NrayvXr19mxYweGhoY4ODhQUFBAX18fRkZGFBcX09PTw9KlS/Hw8EAul1NaWsqLL74oJOMF\nBQUCXPvaa6+Rn5+Pv78/ly9f5uOPP6azs5Pm5mY8PT3Jzs7G39+f4uJisVDr6+uxtbVFT08PhUKB\nTCbj22+/5fbt24IH6e/vz9jYGLW1tZSUlBAWFoZcLsfHxwcdHR2RHxoVFYWXl5cw3hUVFYmFrbmv\nGhsbOXjwIGvWrMHS0hK5XM769ev529/+Jpy8165dw8vLS4T0wqKGwcXFheLiYvT19fHw8BDTE83X\n9fT0EBsbK05NNjY2JCUl/aJN4VfRU9BotjU7rra2Nh4eHvT09PD3v/+d3/72t+IUcPHiRTo6OtDX\n1+fs2bNs3rwZHR0dGhsbSU1Nxd/fXzD56+vrxbhMS0uLxsZGATf56aef8PLyorCwkL1795KcnExK\nSgoLCwtMTU0JUdDDhw955ZVXiI6OFrr0yspKHB0dyc3NZXJyEjc3NwoLC4WPvqGhgXXr1jE2Nsb1\n69fp6uoiLi5OdPObm5spLy9nYWGBZcuWCW/+ihUrqKioICwsjNraWgwMDIQ+PjU1lccffxyJRIKV\nlRWKnyPiBwcHCQgIwN7eHjs7O06dOoWFhQWOjo40Njbi5eWFh4cHDQ0NfPbZZwQFBYkk66mpKbEh\ntrS0MDs7KwJdo6OjMTMzQ19fH319fYGKi4qK4rXXXuMPf/gD169f58iRIyKLMyQkBBcXF5566imB\nM1u/fr3YgCMiIkRAi7e3t3BBmpub097eDiwem3V1denu7sbZ2VmkLG/duhUdHR0RtDo1NYVcLufU\nqVP09/cL3qFGkj06OipcrbCYQK7BqDU0NIjsi6CgIOGI9fHx+Xfvp6enJ9999x1DQ0OCgyGXy3F1\ndcXNzQ1YpIaVl5dz8+ZNNm7cyPT0NElJScKd6OvrS3t7O2vXriUlJYXnn38ePT09bG1tkclk9PT0\nCECvp6enyKjUjKEjIiKE9V1zOrK2tmZ8fJzo6Gjm5uaYnZ2lrKxMnDz19PTw9/enq6uLubk5MRn7\npdevoqegra1Nd3e3kKSGhIQwNDREd3c3R48exd/fHysrK9RqtRALtbW1CdfdmTNnRNSbSqUSeQqn\nT5/GwsKCyclJ/Pz86O/vZ2hoiIsXL2JhYcHU1BT+/v589dVXODo6Eh0dja2tLX5+ftTW1tLQ0IC/\nvz9ff/01paWlovbfvHkzcrmcyMhIli5dir6+Pubm5jg4OHDo0CFcXV2RyWRUVVWhUCiIj4/H3d2d\nkZERAXd1cXER8FeN2ErzxKqsrBS5D4GBgQLlraOjQ2trKzU1NVy/fh0rKys2bdok6sqcnBzCwsJI\nTU2lqKgIQ0NDkpOTOXToEI899hh79uzB09OTU6dOUVJSQkNDA//4xz94++23Rbkik8m4efMmr7zy\nimjibtiwgf7+fgIDAykqKhJPW82RPC8vj56eHkH50YStLCwsUFpaSk5Ojng9WltbaWtro7W1lZmZ\nGZydnbl//75Qi2rETR0dHcjlcubm5nB0dMTW1pbY2FhefPFFJBIJW7ZsEfLz0dFRSkpKKC0txcTE\nBAMDA/T09JDL5cLiPD4+jomJCefOncPa2lokhG3cuBE/Pz96e3upra1FLpczOTkpOJialGqAuLg4\ncSqwtrYGFp/akZGR+Pr68uOPPzI3N8cTTzwhqM0LCwvExcURFhYmRpna2tp0dXXh5ubGxMQExcXF\njI+Pk5WVJeT1mnj6srIyOjs7xQm4r69PSK67urro6+sjJyeH8vJyMjMzmZ6eRltbG4lEIu6Zf0ug\n+iXXr2JT0NXV/Xd5AhpY6YEDB4Q2XUdHh56eHpKTk8XMf+XKlWRnZwtHn0axp4lV04BNYfHYOzU1\nxXvvvUdaWhqurq4C76Xp8np6ejI1NSUi1TVAUE25orkRrl69KmpsTSnS19dHbGwsExMTREZG4ubm\nRnx8PENDQ1RXV9PW1oZarUalUpGYmMjg4CDm5uakpaWJbnl/f78Q/UxPT5OZmUlFRYUQNfX09KBW\nq7GxsWF8fJwHDx4IsGd1dTW5ubno6uoSGhrKli1biIqKYmFhgSeffJKamhoiIyMFN2Dp0qUYGBgQ\nHByMgYEB/f39qFQq0YR85plnCAkJQU9Pj87OTiQSCampqejo6HDkyBGOHTsmjsIDAwMUFhayfv16\n2tvbKS8vF6h0Ozs7kpKS2LhxI7t378bFxUWwFHR1dampqcHIyEjwLzXyX6VSiY6ODh9++CHPPPMM\nVVVVAneuKXteeOEFfvvb3xIdHS2ITZ9++in6+vpMT0/T0tIiTiAymYz8/Hx8fX3x9fUlLS2N8PBw\n4TeZmJhgZGSEM2fO8PDhQz777DOysrIoKSnh888/5+zZswwODjIzM8OpU6cELau8vJzJyUl8fX15\n//33USqVFBYWcvLkSTo6OqipqRFZlPr6+mRmZmJsbExRURG3b99mamoKExMTrl+/LjgcsKixkclk\naGtrI5PJ6OjooKqqSjQQNSe47OxsRkdHcXFxwdnZmdHRUe7fv095eTknTpwQzExNWtQvuX4V5YOB\ngQERERFMTU2J+ujrr79GT09PNMX6+/tF+GtbWxvp6em8+eabAtrR1NQknIE7d+7k2LFjNDY28uKL\nL9Lc3ExiYiJZWVls2LABR0dH9PT0KCgoEBvOtm3buHLliiAzayy+urq6ODk58b/Ze6/wKK80bfcu\nqUqxFEs554wiiggQoIRAZDAGG4wBN7a7bbe7ne22u5vp6bbHaey2DY5tm2iyEVGAEiggSyWQhHJO\npaxSLoX6D3Ct3bP3vv7hn33iufasE0QBhVRV3/re9b7Pcz+Ojo5CP75161YqKyvFHd7CwoJNmzYx\nMDBAf3+/yI2YmJigoaGB6OhoJiYmGB0dxcPDg6KiIuzt7YUlWWcG0vkWdIzHPXv2MDw8LNxxAQEB\nrFy5kjt37hAXFyfcj2NjY/zqV7/C39+fAwcO4OzsTENDA3K5nL6+Pu7du0dgYCBKpRIjIyPi4uJw\ndnamqqqK4uJi3nzzTV566SVMTU0JCQnhhRdeoKOjg5aWFvLz87GwsCAiIoLExETOnz+Ps7MzBgYG\nuLm5YW1tzalTp9i7dy8ODg6cOHGC8+fP4+Pjw8qVK/n73/9OVFQUixcvprS0FDc3N6RSqTh7j4+P\nc+nSJZ555hngftSfi4uLOCLV1NTQ0dFBSEgIk5OTfPjhh4SGhorN38TEhLi4OMHTNDY25rPPPkMq\nlTI2NiaUfAYGBpiZmZGSkkJxcbGIqlepVKSnp/PEE09w584dYmNjRS5nfn4+GRkZuLu7o1KpBOI9\nOzub7du3U1JSQlxcHCUlJezbt4+KigrOnTvHoUOHyM7OFpDbwMBAJBIJEomE5cuXc+HCBUJCQrC2\ntsbT05O6ujreeecdJicnqaysBO5vIH19fQLRptFohDx+ZGSEJUuW0NzcjJmZGS0tLdy8eZPf/OY3\nwkGrg8GcOnWK6Oho0Td6kPWLqBR0UW1NTU14eXkxNDTEli1byM7ORk9PD4lEgrOzM0NDQ1RWVlJS\nUsLAwABtbW1CTZebm0tLSwsxMTHk5+cD9+2nVVVVArYRFxfHwMAAMpmMH3/8kZCQEDQaDd7e3kLe\namtrS0tLC0ePHkVPTw9nZ2cuXbpEe3u72BR0d6y+vj7m5+fp7e3F3t6e0tJScnJyhDpSp8Y0NTUV\nDkt7e3siIiLQ09OjuLiYe/fuCR29rmHo4eGBpaUlKpWKtrY2zMzMkEqlYuyp664PDg6iUqmor6/n\n0qVLmJqaCm5kX18fWq1WdM8NDQ2xtrZmfn5eMAXDwsJwd3fn9u3baDQaysvLGRkZoaioCKVSyejo\nKK+88gp9fX3I5XLMzc0JDg4WyUWrVq1Co9Hw5ZdfcujQIZHqpEOy1dfXi+lHWVkZoaGhtLe3c+3a\nNVp+jvxbtmwZQ0ND3LhxQ7y2KpWK3NxcGhsbsbe3p62tTVCU9u3bh4eHByYmJsjlcq5evUpbWxte\nXl6YmZlhZ2eHs7MzGo0GhUIhSNkzMzPk5OQIilRkZKSYZIyNjYkbS21tLV999RXV1dVkZ2ezYsUK\nsbnpAn9eeOEFZmdngftlvpOTE8XFxfT29vLb3/5WqA3/9Kc/0d3dTXFxsfC5lJWVERQUxPj4OEND\nQxgZGbFo0SJqa2v5xz/+QUFBAXBfV+Lo6EhISAgDAwMsX75cqH11NmxHR0dqa2u5evWqmNosW7YM\nPz8/obswMjISZroHXb+ITWF8fFykAh8+fBhTU1PB0s/Ly2N6epqxsTHBuQNEd7iwsJCysjL27t0r\nzCFxcXEA4qJKS0vDzMwMCwsLAgICCAwMFPRhnfCjsrKS5ORkxsfHGRsbw9/fH2NjY7q7u8nMzOTI\nkSNIpfcLq8bGRtEVn5ycJCMjg8bGRoyMjOjq6iIgIEBAVuRyOb29vXh4eJCens709DQ2NjbCIOPt\n7S0CZqempliwYAEKhYLp6Wlqa2upra0lNDSUqKgoTE1NkclkrFq1CjMzM+bm5qiqqiInJ0ckYOtG\nobpQ21WrVuHv78/c3ByFhYXIZDKuXr1Kbm6u6DvoKoQ//vGPREZGYmRkRG5uLnp6ehQUFDAwMMDl\ny5fJzs4W4SKWlpZcuHABpVLJe++9h52dHfPz86xYsYJPPvkEV1dXXn31Ve7evcvixYuJjY2ltLSU\nw4cPI5PJKCgo4JNPPqGtrY3FixcTFBQE3J+sLFq0iKmpKSwsLGhubhZSahcXFzo7O2lvbyc/P5+C\nggKmpqZoamoS2hHdqFQH59Wd0SMjI1m6dCm3b99mZGRE8Dd14+affvoJuVxObW2toFQ5ODjQ0NBA\nS0uLSJDq7e2lv79fzP1v3LhBdHQ0JiYmqNVq1Go1s7OzyGQydu3ahVQqRalUEh4ezkcffSQIzBMT\nE+I42Nvby7vvvouJiYnQKejCXHS4et2Rqb+/X2g65ubmSE5O5oUXXmD79u2MjY0xMDAgpPdmZmaM\njo6K53jQ9Ys4Ppibm6OnpydK5+LiYpYsWYK9vT2jo6N8/fXXRERE0N/fj7e3N48//jjnz5+npKRE\nAFNHRkZ4/PHHuXXrlhCWmJmZMT09LYIz7t69S0pKCtnZ2Rw5coSdO3eyefNm/va3v1FQUEBbWxue\nnp44OztjYmIi3JrFxcXs2LFD3M1iY2OprKzExcVFlObGxsZcuHCBN954Q8AtJiYm6O7uZmxsjNTU\nVGHOeeihh6isrKSzs5POzk4CAwMBRLCKRqPB1dUVJycnrl27xvr16zl37hwtLS2cOHGCwcFBMX6M\nj4/n/fffp6Ojgz179mBubk5fXx9GRkYisl2n5lQqlTg5OYmL8fbt26SkpGBra0tfXx+ffvopUqmU\n119/XVRvqamp7Nq1i5mZGV577TWh6tRtojp3oG6UnJqair29Pe7u7rzxxhvY2Nig1Wrx9PTE3Nyc\nlJQUIURatGgRpqamaDQa8doGBARw9uxZfvvb3wonoa+vL52dnezcuZOxsTGSk5NJSkoS7MPu7m7e\nf/99DAwMxIiwrq6OsLAw0VOSyWSMjo6ybds28vLy2L9/P08++aSYvHh7e+Pi4oK/vz+tra2kpKQw\nPT0tAL1lZWWYm5uTn5+Pl5eX4IBu2rRJaF3S0tLQarUitt7Q0JDu7m4WLlworPpqtZr1Xt/OAAAg\nAElEQVTGxkY0Gg0jIyMip+Qvf/kLd+/eFdeELoz35s2bZGdnizgCHfZP5+vx9/cnICCAnp4e9PX1\nee+994iKikKj0TA8PIyPjw8SieS/X6UwMjJCbW2tQLzv379fINYdHBzQaDSi5C8qKqKgoEBkJugY\nizY2Npw+fRonJydR5qtUKhEZPzMzQ2BgoKDuurm5sXr1ajw8PBgZGaGjowMHBwfS0tIYGxvDycmJ\n0dFRgoKC8PPzo6WlRXRwL1y4QENDAz09PSIEdWJiAktLSyIjI/Hz8wNAq9XS1dWFv78/3377Lb29\nvVy/fp2Ghgaam5tpb28nMjJSfBj6+/tpbW2lrq5OuBcdHBxobGykqalJ5DkaGxuLiwVg586d/PrX\nv8bBwYGLFy+iVCqZn58nNzdXSLd9fHzYvXs3CxYs4L333qOkpAQ/Pz8cHBwYGhpCpVKJI8rw8DAy\nmYwFCxaQl5dHamoq27dvZ3Z2ltLSUl544QXa29tRKpXs27cPOzs7Vq1axauvvioyMnSZk1evXhUb\nt4ODgyAUNzc3Y2hoiM57owOsOjk54eLiIqoSDw8P4b68efMmMTExBAYGIpPJUCgUtLS04OfnR3Jy\nMjExMQwMDCCXy3F1dRWcDbifARIREUFeXp4AyQYHB5Oamsr+/fvZsWMHCxYs4J133iEqKgoTExPs\n7Oy4fv06o6OjLFq0iHv37rFr1y7s7OxEc1gXyKMbGRoZGeHs7ExFRYVA2UVGRqLVanF1dSUmJga1\nWi1yTHU9hJycHGQymcDHWVpaIpfLaWtrEzxIV1dX+vv7sbOzQ61Wo1Ao8PLyQqFQ0NbWRnR0ND4+\nPrS1tZGfn4+joyPR0dEUFRUJNe6DrF/EpmBkZMTChQsJCwtjamqKFStW4PFz8Gtvb6+gJ+mcg7qz\nv0ajES9GdXU1ZmZmlJaWCideeno6dXV1YqQ3Pz9PZ2cnXV1dhIeHMzU1RVlZGVu3bmXr1q24uLgQ\nHh7Oww8/jI2NDSMjI3zzzTf09fUxMjLC0aNHAQgLC8Pe3p7Ozk7Wr19PYWEhnZ2dPProowJ1lpWV\nha2trQBv2traUl1dLYJu5+bmCAsLo6qqSqCyzM3NMTQ0ZM+ePeIiKCkpEeyC8vJy/Pz8sLCwYMeO\nHTQ1NTExMSHCPnTJSn19fRgaGjI6OsqOHTvIzc0lISGBrq4urK2tycvLo6mpSQBBjYyMCAgIIDEx\nkYyMDPLy8iguLiY5OVnMxjMzM4Vs+1//9V8xMjIiPDycS5cuUVZWhp2dHRcuXKCxsVEcpaytrcnM\nzGTZsmUCHBMdHS3QePfu3WNwcJCxsTEx9x8eHsbAwIC6ujrs7OxYvnw5QUFBjIyM8Nhjj7Fw4UIm\nJibE+FmnpdB5Y6anp8nJyWFqaoqGhgbBadDFztfX1wt47/z8PAkJCVhZWVFRUSGOjTrVYGVlpUgM\ng/uglueff57Dhw8L38bw8DBGRkYi1/HQoUO88sorAtjz008/YWRkRGlpKb6+vkxNTeHl5UVDQwNB\nQUEiyObWrVvcu3dP3Hh0Nxpra2shW66rq0MikYh/o6+vT05OjtBIzM3N8eyzz/LII49gamqKlZUV\n3t7e6OnpCRrZg6xfxPFB57UvKSkR0tkLFy6IN97T0xN/f382bNggQmKSkpIoKSmhv79fJAOHhYWh\nUCiEN//LL79kw4YNPPbYY8B9j8XVq1fZv3+/6FR/++23GBsbU1hYSHt7O9HR0aL0MjU1Ff71mpoa\nvvrqK9asWUNNTQ0+Pj5YWFjQ39/P8uXLUSgU5OXlMTo6ypUrV7h58yYKhYLw8HCio6MpKytjampK\n9AocHBz46quvSE1N5cSJE8D9ZtiNGzdoaGgQEeMLFy7k0KFDgmEglUpZsWIFXl5eZGVlUVpaiqGh\nIbm5uYSGhgrzkS774MUXX+Tll19mYGBAXAy7d+8WWZHt7e309/czPDxMUlISV65cobGxkfDwcK5d\nu8Zf//pXgYu/desWjz32GHfv3hXfa39/P6GhoczOzrJ48WKee+45Wltb8fLyYu3atcTHx2NlZYW9\nvT1SqZQLFy7wl7/8hfj4eORyOdu2bWN2dlbcIZ2dnenq6kKj0TA3N4dSqaS7u5vg4GBWrFjBxYsX\neeWVV9i+fTteXl6MjIxQV1dHaWmpMLd1dnaip6fHokWLCA0N5fXXXxf4fl1Ii6urK2fOnOHdd99l\neHhYcC719PSIi4vDw8OD8fFxUlNTsba2Zv/+/cTHxwv9gK4c/+dQ4MOHD4uUsYCAAE6cOCE2WYlE\ngoGBgbCd37hxg8cff5zQ0FAiIiKQyWT09/cTFRUFwLp16xgaGqKuro7W1lY2bdrEtm3bKCkpIScn\nh9jYWO7cuYNGo2H9+vV88MEHLFmyhPz8fKRSKf7+/kIhLJPJBLPjQdYvolIwNDSkvb0dfX19SkpK\nsLe3Z3x8XFB57ezsBC5NrVYLTkBkZCQrVqzgzp07rF27lsnJSezs7Ni6dStwf7P56aefOHToEGfO\nnKGvr49169bR29uLsbEx+fn54kVfunSpYPH5+PhQXV3NokWLOHjwIIGBgSxevFgQfJOSkpifn6ew\nsBAzMzP09PQEqvvKlSv89NNPGBsbY2lpSUtLi2iUxsTEkJiYiJWVFVKplHXr1omUJbjPlNy7dy/h\n4eGoVCpRKQUEBLB8+XK8vLxwcHBgbm6O9vZ2kpKSePzxx5menmZ+fh6VSiWOHkZGRty8eZNXXnlF\niFoiIyOJiYkhIyMDa2trmpubSUxMJDo6mqamJvLz80lKSiIxMRFnZ2cCAwP5/e9/z/79++nq6uLp\np5/G0dGRO3fuIJVKxUUXGhpKcHAwhYWFAovu7e0tyvXjx4+LfomOVZifn4+dnR2VlZX09PQIDYiL\ni4s4O6tUKjF+9vT0xNXVFVdXVxISEjAyMmJmZkZsBC4uLnh4eJCSkoKnpyfp6enA/5XVYWZmJvQQ\nVlZWFBcXs3nzZqGWXL9+PT4+Pjg7O3P37l2R+G1jY8PExAQrV67krbfeEh6O/v5+8Z65u7tTWFjI\nqlWrRGhMXl4e69evx9/fHx8fH+Lj4xkYGBBK1uTkZAoKCtBoNFy5cgWpVIqbmxtnz95nEmVlZdHb\n28vt27cZHx9HLpdz4sQJLly4wNjYmOjb7Nq1C29vb1asWIFGoyEtLQ1HR0fS09O5d+8eQUFBWFhY\nUFFR8cDX4y9iUxgZGeHy5cu4u7uzZs0aBgcHWbVqFaampqLLn52dzYULF1i8eDEpKSmEhoZy/vx5\njIyMhKlHrVZTV1fH4OAgcH9CYWFhQWRkJKGhofT19Ym7+8jICGNjY1hbW/OHP/wBc3NzNm/eLOLR\nR0ZGKC8vJyUlhbm5OSYmJujp6QGgp6eHO3fuMDMzg1Kp5OzZs1RUVFBYWMjSpUtJT0/nqaeeIjw8\nnPDwcHx9fbl37x63b9/mwoULWFhYMDQ0RG9vL0ZGRjQ3NwP3m2Hj4+MolUo8PDwwNzcXOO/y8nK+\n/fZbjhw5wuTkJAMDA6jVavT09HjiiSd466238Pb2Fs67/v5+oWxbtmwZVVVVJCUlcfXqVa5cuSIU\nomNjY0JLsGjRIpqbm6mqqsLY2Jjy8nIGBgYELn1wcJDvvvuOiIgI5ufnqaysZGpqipGREQ4cOMCy\nZcuYmJhgbGxMmNeWLFmCRCJhenqaO3fu4OjoSHh4uPh+V69ezaZNm4Ra9NNPP0WhUDA2NkZjYyM1\nNTWC1Nzd3U1WVhYZGRn4+/vT1tbG5OQkaWlpJCQk4ODgIC7s2tpa0tLSxGdMIpEIMrSBgYGQmG/Y\nsIGbN28Kd6XO7t7T00NiYiK3bt0SLtiBgQE0Gg3Lly8XzeHR0VHOnDnDxMQEN2/eFPEAuiRtneRe\no9EQHx8vqgRdmI9MJkOj0dDV1cXQ0JDIqThz5oxIelKr1WRlZaFWq5mfnycgIIDJyUm8vLyEE3Rk\nZITY2Fjm5+cxMTHBycmJmJgYwRrVpWQ/yPpFbApyuZwNGzYwOzvLyZMnmZiYQKlUIpVKefrpp7Gw\nsEChUBAbG4u3t7fQEbi5ueHxc8Csh4eH8EfoNAFjY2NCTXj48GFaWlro7Oxky5YtrFy5EgsLC5Yu\nXcqPP/4IQFxcHGlpaSLMZXZ2Fn9/f8bGxsjLyxOBJRYWFmI6EhUVxezsLKmpqTg6OnLy5EkGBweZ\nnZ2lsrKSiooKKisrBcPAyspKTDb8/f1F1Drcl8yq1WrRSJqYmCAhIUGkQfv6+pKens7c3BzXrl0j\nKCgIhULBzMwMCxYsIDQ0VJCJ4f7dUSd5vnLlCrW1tUJrb2xsjJ6eHu+88w5FRUXIZDI+++wzcnNz\nhTciLCwMT09PYSG3tLQU/ZwlS5agr68vfADLli3jr3/9K87OzqxevZrMzEyqqqro6+vj+vXr2NnZ\n4eTkhImJCZaWlqxatYqlS5dy48YNOjo6hA1eV01MTU1x48YNYmNjxTHv+++/x8vLi+npacrLy7Gx\nscHGxoaysjLm5+cFC3N0dJT09HQ++OAD7O3vw8QnJyfp6OigtLSU4OBgfH19OXjwIPHx8ejr63P8\n+HH6+vqQyWSC6t3T08Px48ext7cnLy8PlUrFd999R25ursgA0TWV09PT0dPTE5Hw3d3dPP/886JJ\nPTQ0xPvvv09BQQEZGRls3LiRq1evotVqhWFNB74F2LJli+B3rlu3js8++4xbt24xPT0tvDkVFRU0\nNzfz4YcfiuyTkJAQ0RA+ffo0V65cITAwUFDDHmT9IjaFqakp+vr6sLa2xtjYGDc3N5HzMDs7S09P\nD1qtlp9++kmo/6qrq4mIiBAd+omJCfz9/YUcFO7f0R0cHATIpKqqih9++IGsrCxMTEzYu3cv7u7u\npKWl8fjjj+Pk5MQ333yDpaXlf8iXUKlULF68WOTx6SCqRkZG3Llzh5SUFAoLC8nIyMDPz08cb0pK\nSgTrX6PR0N3djaGhIeXl5Xh7ewsbti7ezszMDCMjI5YuXUpOTg4SiYTx8XER7KGriurq6ti3bx+T\nk5NC+65rurm5uWFqaoqpqSkJCQmsWbOG4uJi3N3dGR4eJi4uDhcXF06dOsVjjz3GokWLiIiIwNvb\nm9LSUmpqapibm8PBwUFAO3Q6fp3Lr6GhQfyqU5xeuHCB7du3s2zZMpqamhgYGMDe3p6rV6/i5ubG\n+fPnsba2pqenR6j6jh07xsTEhADjACxdupS6ujoRmFNdXc3du3dRqVQMDAzg5eUlzuzu7u6sX7+e\nJUuWEBoaKqY4sbGxog+hoy7rjmwGBgaYm5tjYWEhzvO+vr4YGhpiaWnJ1NQU1dXVIpotMzMTFxcX\nQXSG+wrcPXv2APdDjHWp52FhYeTk5FBTU4NSqeTJJ58kKCgILy8vPDw8SExM5NFHH2VmZoYff/xR\nGKOKioro6+ujr69PNAQtLS2pqKjA1NQUpVLJunXr2LRpkxCM6ZqrISEhbNiwAblcTl1dHQcPHkQi\nkdDc3CzSvm/fvi2k1g+yfhGNxqmpKTo6OrCxscHLywtLS0vu3buHVCrl9u3bLFy4EHd3d1pbW4XY\nRAebGBwcxMjICAsLC/T19amuruapp57igw8+oLe3Fz8/P8EJMDY25vz58zQ2NqJUKlmzZg3V1dWo\n1WquX7/Ot99+y7vvvsvNmzdFMOns7Cy7d+/m0qVLgmKUn59PX18fvr6+jIyM0NfXJ75HuVwurNYx\nMTGC1aCj7CgUCrRarRjBnT17VlhidaMtHcK7o6MDDw8PYZXVNQV9fHyoq6sTtnFd2d/d3Y21tTVS\nqRQzMzNhM/f09MTFxUXoG1paWkhPT8fDw4P33nsPGxsb1qxZw9DQkOA5jo+Pc/bsWd58803+8Ic/\niH+vy/uUSqUsWbKEgIAAVCoV27Ztw9fXFwMDA/Ly8mhsbBQ0J61Wi42NDT09PSgUCjo7O1Gr1Ugk\nEhITE7l48aIYr1ZUVAizkJ+fH319fbz00kvCiHX79m2h5vz9739Pf38/zs7OGBkZkZCQwEcffURG\nRoYgXusmO2q1mpiYGAYHB3Fzc8PQ0JCJiQlOnDjBwoULqa+vF3fpJUuWMD4+LhLEGhsbsbGxob6+\nHm9vb1GpALS1tTE1NcVbb71FUVERGzdu5OTJk+zdu1ewJUtKSkQOhru7O/X19chkMlavXk1HRwe/\n/e1vmZubw8fHR1StLi4u4mfdtm0bNjY2SCQSjIyMaGho4Ouvv8bj58xPpVIpjH9eXl7o6ekRFhaG\nWq3Gzc1N6CgedP0iNgW5XI6BgQE3btzA29ub9vZ2UlJSuHr1Kk5OTgwPD6PRaIiLi6Ovr4+ZmRnS\n09NRKpVCbiqTyQSk5fPPPwfu2091I55Vq1YBoFAoUCgUjI+P09HRgVwuJz8/n6GhIR577DGKi4tp\nbGzE1dWVjIwMsrKyxOhPlwpkaWmJnp4eS5cu5ezZs8KCffPmTdHUkcvlKJVKduzYIfIcdHfrubk5\nvvzyS5577jlycnJEZ9jS0lKkTm3evJny8nKOHDlCd3c3ZmZmSCQSbt26hbu7uwC27t+/n/DwcDw9\nPdm4caMYNerp6WFhYYGVlRWRkZE0NzeLSPPjx4+L5pUu47CsrIwNGzYQExNDZ2enyJ04f/48UqlU\nIN/VajUnT55kyZIlrFu3jiNHjpCZmUlTUxN//vOfSU1NxcfHB2NjY8rKyv5D8IlOQNTY2Ii7u7t4\nz01NTVm7di179uwhMDAQX19f5ufn8fLyQiaTkZ2dTX9/PxkZGdy5cwelUklsbCyWlpZcu3YNf39/\nsrOzsbOzE5XEwMCAUEUCQkegIxHppjXe3t7iiKBL4q6trRUXbWlpKQ0NDczMzAhUXU5ODqWlpQCC\naKXL4rx69Sq/+c1v8PLyorOzk8rKStRqNQsWLECpVFJRUSE+y7W1tSI789ChQ8KH8fbbb5ObmysS\nqnNycnjssccwMjKira1N2Pxfeukl0VjW09NjYGCAwMBAurq66O/vZ2BgQBwpdPEED7J+EZCVAwcO\nvLV27VqSkpKwsbHB2dmZ8fFxent7RTPNxMSEtrY2zp07x8KFC6mqqkKtVuPj48P4+DiNjY0EBQVh\nampKWVkZpaWlvPTSS+Tm5mJjY4Onpye1tbW8+OKLgutnYWEh9Pw6+7GTkxPr16/nxo0buLq64u3t\nzZUrV/D29halmG5c1NjYiKenp3AF6gxKXl5eYo49PT2NgYGBuCC6urrw9vbG0NAQhUIhjFmFhYUk\nJCQIU1RhYSFhYWFMTk6KfoSRkRE+Pj7o6+uLY0J4eDgdHR0UFRUJ+EhzczN6eno0NTXR3t5OU1MT\nlZWVWFpaUltby+bNmwkLC2No6H7ub0REBIODg6LaGh8fp6qqCj8/PxFXZ2BgICYYGo2GqKgoxsfH\niYuLExuyra0t7733HjKZjB07duDp6UlRURGurq40Nzdz8+ZNdu/eLZpjVlZW2NraMj4+zuzsLIcO\nHSIxMVF04isrK7Gzs8Pc3BwXFxfMzc1pbm4mKSkJa2trEYjS1dVFZGQkPT091NbWig05KiqKvr4+\nsrOzxXthb29PUFAQCxYsQCqVolKpBLOzt7eX2dlZYZtuampi+fLlnD59mgULFhAbG0tDQwO5ubmE\nhIRQVFTEs88+S0hICDExMfT09NDZ2UlAQICYKOiIS3K5HDc3N0Hrlsvl4rgI97Fuuirg+vXrbN26\nFVNTUywsLBgbGyM/P5/q6mrm5uYwNjbmmWeeISoqiosXL6LRaNDT06Ouro7FixcLJ2lNTc1/OHJd\nv379gSAr/0Nz/p/1P+v/P+u/D83Zw8ODZ555hoiICNGwO3fuHGZmZjQ0NBAYGCgi2nW9hMOHDxMc\nHExJSQlpaWk4OztjZWXFwMAAQ0NDrFq1ivfee4+WlhaefvppPv30UyYmJnB2dsbQ0FDo2nt7e3Fx\ncaGwsBA9PT0cHR1Rq9U8/PDD/O1vf2PHjh0olUquX7/OI488wu7duzl+/LjwE+jm6uHh4ZSXl6Ov\nr09VVRUxMTEUFhYSGhpKYGAgN2/epLW1lSeeeILm5maam5vZunUr33//PWFhYWzZsoU//elPjI+P\n4+7uzqpVqzA0NOTgwYMYGBgQFhYmtBAZGRkUFRXh7u6Or68vOTk5pKSkoFarKSwsZO3atRQXF6NW\nq8nPz2f79u2Ym5tTVFTE9PQ0jY2NREREEB8fT21tLd3d3dja2jI4OEh6ejqjo6NkZ2cjk8m4desW\nycnJmJubMzk5yczMDNXV1aSkpBAQEEBpaSlOTk54eHiIPkJiYqII6gkODqa3t5ecnBzMzMx4+OGH\nRUUwPj7O9PQ0dnZ2AvP25ptv4uvrS1FREXK5HF9fXzw8PEQDzdHRkZiYGCoqKjA0NKSuro7AwEBa\nW1uxtLTE0NAQuVxOYGAg3333HQsWLOCPf/wj+/btIzMzUyDp5HI5Wq2WU6dOoVAoSEpKorW1FTc3\nN2praykuLub555/n5MmThISECIu+i4sL1dXVtLa28tFHH/Hqq69y5coV/vznP9PV1SVYnLqxZ0ND\nAwEBARQVFRESEoKTkxNffvkl9+7d44knnsDIyEgY5L7++muWLVvGyy+/TFZWFgqFQkTQ6yoanTrV\nysqKsrIyLC0tcXV15eTJkygUCtzd3ZmdnaWsrAwbGxsiIyNpaGggNTVVTEr+s/WLOD689957bx08\neJCOjg6MjIxEWWtiYkJVVRWrV68WABAvLy9aWlrw9PQUIpv6+noGBwdF0+fGjRuUlJQQEBCAnZ2d\nwJLpGoUqlQqJREJ5ebkAvAwPD7Nu3ToBteju7sbKykpEhaWkpDAyMkJWVhaxsbEMDQ1hbGxMeHi4\n8E7oGAINDQ0UFRXx0UcfkZWVRXx8POvXr0epVDIzM8PQ0BArV67k4sWLtLS0COeibsaum6hkZWXh\n5OREREQEo6Ojgk6lVqtJTEykpqYGlUrF2NgYJiYmXLlyhUWLFnHq1CmefPJJ0YF/5513BPJscHBQ\niK96enoEds7CwgIPDw9aW1tpa2ujv79fXNxqtZqxsTGmp6cF2Sc4OFj0SnRsRB2R28PDQ7gQs7Oz\nMTc3JyAgAGNjY44ePcr09LRgQ87OzpKTk0NjYyPXr1/Hz88PmUyGubk5iYmJ/Mu//IuAiEgkErZs\n2cKBAweoqanBzMwMpVIpBGF1dXWi76E7ksjlcvLy8li9ejUmJiZ88MEHbNq0SYyXdT+jzp7s4+OD\nXC7n1q1bIlqgra2Nrq4uTExMGBoawt7envLycsHUWLZsGaOjo8LLUVVVJZiNXV1dXL58menpaQIC\nAgTgd3x8nJCQEKRSKTY2Npw6dQoPDw8R1adQKHBwcMDAwIATJ04wOjpKTk4O69at4/bt2xQUFGBi\nYoK/vz/z8/NYWFiI5Ozi4mIWLlxIV1cXsbGx2Nvbc/bsWW7cuPFAx4dfxEgS7ivDjI2NKS0txcHB\ngdHRUZqamoTSTSKR0N7eLrruLi4uIlRTo9GIFJ309HQeffRRADZu3EhqaiqGhobCzz87O4uXl5cw\nzkgkEuLj41mzZg05OTl0dHQIM5SpqSmGhobMzMyID7Buubq6Cqu0VqvFzMyMe/fu8eKLL2JhYcGz\nzz7L/v37+dWvfoVUKuWVV15h/fr1QlX32WefCe2BTh2nUqlEPNjw8DAmJib09vYyNDREbW2t2EAG\nBgbQarXiQomLixMbZWNjIxYWFmJkVltbS3JyMrm5uezZsweFQiH0+pmZmWJKEhoaypo1a0QuJdxv\noq1duxZdAPD09LRgRAwNDdHS0iJIUE1NTfz0008UFhZy69Ytvv/+e+zs7IiPj2diYgI7OztcXFx4\n9tln8fT0ZN26dUgkEubn53n44YeFWtTZ2Zn29na8vLz47LPPePvttzE0NBSZHV999RXm5ubMzMyw\nbt06fv/737N8+XLMzMzYu3evGBvr5vI6nYKLiwvGxsY899xzfPPNN6hUKgIDAzly5Ah6enosXLiQ\n9vZ2ioqKkEgkYlSbkpKCnp4e7u7uwtLd19dHQkICcL9x3drayieffIKenh5lZWVs2rQJX19fBgcH\n0Wg0bNiwAW9vb3p7e2ltbWVycpKgoCARbdfQ0EBMTAyLFy8Wz/vQQw9RUVFBdXU1rq6uLF26lIce\negiZTIaHhwdbtmwhODhYWLA7OztpamoiIyOD5ORkAgMDSUpKoqqqioqKiv8j78MvolL4+OOP39LT\n08PLy0uU47qgzQMHDvDDDz9w9OhRUlNT6evrY3h4mJ6eHpqamnBwcGD9+vV4enoSFxdHa2urQGYl\nJydz/vx57Ozs6OzsFIrGxMREenp6CAgIoK2tjWPHjrF8+XLkcjl3795lfn6eTZs2iYty7dq1vP/+\n+zg5OXH16lWSkpLo6uoiMzOTrKwsbt68KaK8/Pz8BKZNp2+4dOkSw8PDdHR0YGpqSm9vL21tbWg0\nGqGau3v3Ln/9618ZGRkRiHNjY2NcXFzIyMjA1NSUmJgYhoaG6OrqEhmMFhYWQtDT29uLhYUFjo6O\nrFy5ktraWhGvFxwcTFZWFvr6+lhbW+Pg4MC5c+cYGhqira2NqqoqDh06hFKpFOAQXWbim2++SVBQ\nEKOjozg4OHDt2jXMzMzo6+sjIiJCGLzGxsaIjY1FKpXi5OREa2urmAIYGhpibGyMiYkJn3/+OT/+\n+COOjo6ipPX39+eLL77Ax8dHiH28vb2xsrISx8gVK1ZgaWmJs7MzmZmZTE5OCnyetbU1hw4dwsXF\nhZSUFD7++GMcHBwwMTEhOzubZ555RnhrWltb8fX15dChQ4IzMTs7KwRzzs7ODA8PiyohNjZWoORH\nRkbQ19fHxMSEc+fOsXv3biYnJ4mNjRWUqJGREUpLS5FKpWzZsoXKykoGBgY4d+4c1dXVuLu74+jo\nSFdXF3K5XPA7Ozs7GR4eFhOZ+fl5XFxcWLZsmeBcnjlzhpKSEhYvXkx+fr5o1Ox06PUAACAASURB\nVOqOC3K5nMHBQWxtbTlz5gxFRUVkZGTohFj/fSqF6elpHB0dOXr0KJ6envj4+GBoaMjChQt55pln\n2LZtGx988AG+vr6sXbuWhx56iICAAFpaWmhoaBDmqaNHj4oQDbhvl9Xx+t9//31CQkJQKBScO3cO\nY2Nj+vr6qKioEKjx2dlZoqOjhdtNpVIJReTzzz8vEp5sbW3ZtGkTV65c4dFHH8XHx4fKykpcXV1Z\nsmQJWq2W0dFRbGxs+OGHHzh//jze3t5s376d8PBw7t27h6+vL0FBQXz//fckJyeL12Hjxo1s2LCB\noaEhnJ2dhSrtxo0bTExMsHjxYqysrLh8+TITExO8/fbbFBcXc/z4caysrKitraW/v5/S0lJGRkZE\noEhDQwOjo6PU1NRw9uxZ3nrrLc6fP8+//du/YWpqyuHDhxkdHWV+fh5HR0fs7Oxwc3MjMzOTnp4e\noqOjqaioEFzLubk59u7dy+XLlwUzYuXKldja2rJs2TKWLFnC2rVr8fX1xdzcHJlMJpScuupEpVLR\n39+PRqMRbEIfHx/27NkjNBe6n3tqagqZTCYYEkNDQ5w6dYozZ87w1VdfcfbsWZYvX87ChQspKCgg\nJCSEK1euCF5naWkp9fX1NDY2EhwcLIJupqenycjIoLKyEkNDQ7y8vDA0NMTGxoYjR44I9HtLSws1\nNTWCjaCDA5eUlKCvr093d7e4mRkaGuLs7My1a9dEclhvby+bN29mz549Qsi2cuVK6uvrOXfuHFKp\nlJqaGlGNZmRk4OPjg0ajobGxkaeffpr29nbS0tLYsGEDMpmMRYsW0dTUhKGhIbGxsZiZmVFTU4OR\nkRHHjh0jJSUFAwMDSkpKxGf3QdYvYlPQ09MjLy9PqO86OzuRy+XU19czMDAgCEWWlpZ88MEHKJVK\nsrKyWLJkCRkZGVy5coXjx4+LfAEdj2758uXk5eURFRXF5cuXhVJSo9EQGRmJgYEB69atY3x8nNbW\nVsLDw2lvbychIYGGhgaqqqpYuXIlSqWSe/fuifGRWq3miy++wMbGhqNHjzI3N4eVlRVhYWF8++23\n9Pf3C5//oUOHkEgkKBQKQUweGxtjYmKCY8eO4ejoKOArs7Oz5OfnExAQIFx927dvF6V6fX09jo6O\nIiGor6+P1157DSMjI15++WUsLCwEZcnBwQFLS0vy8/NFPsHOnTsJDg4mLi6OqKgokpKSeOGFF1iy\nZAm/+tWvkMlklJeXC+3G559/LlykBw8exN7entzcXLq7u/Hw8BAU7cWLF3P06FFKSkqwtbVlZGQE\nlUrFyZMnmZ2d5caNG9TW1lJWVkZ5eTnm5uY0NTWRkpKCqakpCxYsEJr/27dv89JLL7Fu3To6Ojow\nNzcnLy+PY8eO8cILL6BWq/n44485d+4c8fHxrFixgn/84x8cP34cfX19mpubUalUGBoaEhERIY4P\nExMTuLi4iF5HfX098fHxJCYmMjw8TGxsLAkJCbi7u3P27Fnq6uqwsbGhsLAQuM941KV4Ozo6kpOT\nA9w33engsN7e3uzduxcXFxfUajUuLi5ifJyamsrq1avx8vISaeZtbW1MTEwQERHBvXv3BCcD7gvZ\nent7SUtLw8PDQwTg5uTksHLlStF3sbOzExR0tVotQod1UXsJCQkkJSURExPz4Nfjf/YXJBLJVxKJ\npFcikVT+02PWEonkqkQiqf/5V6t/+rNXJBJJg0QiqZVIJGn/78/6H5dMJhPhHToqcUtLCyEhIfz6\n17/G2dmZ7777jpdffhkbGxuys7MJCQkR9tbnnnsOpVKJra0tBgYG4oXNycmhu7ubwsJCgcmWSCTE\nxsbyhz/8QWQbvv/++zg7O3Ps2DFGRkb4+uuv8fT0FLNyXWS5TmTU1dUlxCJbt24VFt/Tp0+TlJTE\nzp072bhxIydOnOC1114TTACdBPupp54iMzOTqKgoEdum+yAoFApxV0xISOD8+fO0tLQIUEprayth\nYWH8+7//u3B7tra20tjYiEKhwMrKis2bN1NUVER6ejru7u7iiKLLCwgNDUUulzM9PU1qaipFRUXi\nbK0TLF2/fp3o6GhWrlzJG2+8gbm5OdbW1kJmPjg4iFQqRSqVCtdmWVkZbW1t3Lx5kw8++ECYlF57\n7TUyMzOxtrYmPDyctLQ01q9fL4JnysvLRdCNpaUlS5YsobW1la1bt/Lwww+zZcsW1q5dy+rVq+nr\n66OoqIjLly9z7NgxYarq7e2lu7sbtVpNVFQUKpWKxsZGYaPXJWr19/cLDLyJiQm2trbo6+sLxagO\nZtvS0oKtrS3W1tZ88cUX7N+/X/gNDh8+LC4yW1tbbG1tyc3NFfmWOmCug4MD7u7ubNiwgbm5OR57\n7DHy8/OZmZnBysqKjz76CJVKRUtLCwsXLkSr1Qocm5eXl5gYxcbGYm1tLaY5g4ODzMzM0NnZiUKh\nQCaT4enpiZGRERMTEwIBYGZmxvLly1Gr1aLX8iDrQUaS3wAfA9/+02MvA9e0Wu1fJRLJyz///iWJ\nRBIEbAWCAScgWyKR+Gm12rn/3X8gk8nEmVzH4a+vrxeS5rfffpvg4GDRaGloaBChqNHR0YLtL5PJ\nhCAGoLy8nM2bNzM+Pk5lZSXbtm3D1taWkpISVq1ahVwu5/Tp04SEhODl5SVi5BUKhRi1TU5OCoy5\nzn0pkUgwMzNDq9Xy1ltvCTWhjtyri0lPS0ujpqYGb29vwT9ob29naGgICwsLEd1+584d4P60ZXBw\nkNOnTwvrc1JSkshGGBoaYseOHVRXV9Pe3s53331HXl4eCxYsQKVS4evrKxpzWq0WlUqFt7c3y5cv\nJyoqSsStubi4MDIyIuy2crmcLVu2EBcXR0dHB++++y5yuVx8oG/dusXIyAiDg4P4+/vT3d1NREQE\nEomEoqIiFixYwBtvvEFnZ6coo4ODg7l+/TrW1tYMDAzw0EMP4eHhQVtbG5cvX2bnzp2CUKRzfQKi\na79x40ba2tooKChgYmICe3t77O3tKSsrw9/fn/j4ePbt28edO3dwcHDAz8+PP/zhD8TFxREZGYlc\nLmflypWCkJScnExeXh5jY2OoVCoSEhKYnZ3F3t5eZG84ODhQXV3NzZs3MTQ0JDU1VaRp6aY1Pj4+\n7Nu3jx9++AG43yB/9NFHsba2pqSkBH9/f44ePUp7ezsLFy7EycmJkpISenp6iI+Px9/fn8HBQfz8\n/HBxceHixYs8+uijvPDCC7z44oui6tBRxe3t7bl+/ToajQZbW1tBhJqfn8fGxgYzMzNB/QJElVxV\nVcWqVav48ccfRdLag67/tFLQarV5wOD/7eG1wD9+/vofwLp/evyoVqud1mq1zUAD8J/WLbqz7szM\nDDMzMwwPDwvbs4ODAwkJCTz55JNCF6BLPQoNDaW3txcbGxuR+tTb28utW7eA+/y89vZ2CgsLcXNz\no7W1FScnJ5555hnGxsa4ceMGlpaWYtdXq9XI5XJMTEwwMzNjamqKO3fuoFarUSqVuLu7A/dtsKtW\nreL27dtC0pudnc0//vEPmpubSUhIoLm5GZlMRnR0NPr6+gJJNjs7i4WFhRhJKRQKAeIMDg6mublZ\njFS9vLyIiIjAycmJ6OhogoKCaGhoEGdShULBiy++KOLpSkpKSE9P586dOzzyyCMMDw/T29vLxx9/\njFKpFKrDpqYmwsPDBQ358uXLJCcn09XVRW9vL8899xyLFy9m7dq1fPvtt6xfv56GhgaRj6DroejS\nunVA0e3btwsydmpqqrAR6zr/169fp7KyEiMjI6qqqnB0dKSjo0M0lQHx3Pn5+RgbGzM+Ps7Vq1dZ\ntmwZUqkUhUJBaGgoCQkJmJmZ4e7uzqZNm7Czs8Pa2prS0lIRs1ZdXc38/Dxwn3moQ8yFhoZiYGBA\nRUUFUqmU0dFRtFotHR0d5OfnExUVJaZbcrlcIP50Ksyenh7xnkVFRXHixAmqq6uRSCRcvHiRmJgY\nFixYQGRkJKdPn+bixYt4e3vT3NzM6dOnxXElLS2NhQsXcubMGYKCgsjNzRWjUl1T2sHBgTt37lBT\nU8PIyAharZZLly6J2D1d9oeuSZmfny+s7z/++CN9fX0kJSVx+vTp/+wyFOu/2lOw12q1OoN2D2D/\n89fOwD+LrDt+fux/u3SNp3Xr1lFTU8MPP/wg7ki6WLHi4mL+/ve/8/nnn/P555/zxRdfcOjQIS5d\nukRPTw9PPfUUCoWC8+fPc/78eeC+WcXa2pqkpCRcXV3RaDQcO3aMnJwcgoODeeKJJ9i+fTv6+vrM\nzMywaNEi1q5dy+7du7l37x5yuZwdO3YwNzdHdXW1OEf6+fkxMjKCvb09+fn5uLm5kZSUxK5duwgM\nDKSwsJDx8XF++OEHdu3aRVFREVlZWRw5coSamhr6+vpQKBRYW1tjZWUlvPlKpZLIyEhee+01Hn30\nUdra2gQG7tKlS0JPkZOTw927d3nnnXdEOtC+ffswNTVlbGyMDRs28NVXX1FYWEhPTw/+/v68/vrr\nwP3RnLOzM83NzczOzqLVagkNDaW+vp4LFy5QWFiISqUS7r6CggJ+97vfYWVlxfbt20XcmVqtxtfX\nl5aWFqKiorCxsaG0tJTy8nI+/PBDDh06JEArQ0NDjI6OYm9vL/gHUqmUxsZGIiMjUalUhIWFAXDz\n5k3i4+OF429iYoK5uTmB0x8YGCAxMRG4j/C/dOkSLT+H3Hp5eYnxpru7u0gwh/vQkqVLl9Le3i5Q\n9GvWrOHzzz+nqqoKmUxGy8/JV6+++iq7du3CysqKtLQ0kRym6+foqgiAy5cvo9VqCQgIQCaTsWnT\nJmxsbMjMzGRgYIDk5GQCAgKwtLTkwIEDnDhxgq1bt/LMM89w8OBBkWni6uqKjY2NGE+npaUxNDQk\nxG2nT5/mzTff5OLFi8TFxTE5OUltba1It9YJy3T2/6effpqRkRGCgoK4dOmSCPd9kPX/udGova+T\n/j+WKUskkickEkmpRCIp1SmwdClMLi4uAjNmbGxMeno6AQEBbN68Ga1Wi5OTEwcPHsTU1JQDBw7w\n5ptv4ubmho2NjeAvADg6OhIaGkpISAjXrl0TPMO8vDx6e3uJiIggMDBQAEzHx8fx9PTk2rVrnDlz\nRgBNXVxciIyMFB9cjUbDpUuXBCKturpa6A3+/Oc/k52djVarJT4+ntdff529e/cyPDyMVCoVOgkz\nMzM6Ozv54osvhIPN3t6e3t5eAYOVyWTIZDImJibEPNrPz08ccV577TVWrlyJmZkZ7e3tmJubo1Kp\naG1tRV9fn7Vr17J9+3bq6+t5/vnnuXbtmsDZ7969W1QvJSUlXL9+nTVr1iCVSsUHvbq6GoVCQWBg\nIENDQ+KiLy4uZnJykqmpKTw9Pens7BS0pdu3b6NQKJibm6O0tJSysjJaW1tpbm7GwMBANP50XEmp\nVIqnp6ew9uqyDLy9vdFoNHh6ehIQEEBlZSV1dXWYm5vT09NDQUEBUVFRREZGYmdnR1paGsuXL8fW\n1lbEDq5atUoI4UZHRykvL6e7uxuNRoOlpaVgH9TW1vL+++8zPDws+JQXLlzAwMCA27dvU1VVRUhI\nCBMTE+jr64t4PbjfgNQRw0xNTbly5Qp6enocO3aM6elp0WQOCgoSpb9UKhXampycHDw8PNi4cSMK\nhUJcvIWFhYJ0fffuXZ588klBCO/o6MDKyoqNGzfS0tJCWFgYLS0tmJqaYm5ujlqtFmPQoqIi/Pz8\nRCP3QdZ/VeaskkgkjlqttlsikTgCvT8/3gm4/tPfc/n5sf/H0mq1B4GDAIGBgdrKykphy/X392d8\nfJxbt24xMDBARESE6LzruAJxcXHExsYKma/OBfb555+zc+dOvvjiCwwNDbl48SKZmZnEx8czMjLC\nokWLRAR5Xl6eUL/19/fT29vLvn37cHFxYXh4mOHhYbq6uoTJRZchAPdzBGpra3F1dWViYoLGxkbm\n5uaIi4ujuLiYvXv3cvv2berq6nB3d2dwcJBHHnlEZCZ4eHgImrPOsmtqaoqXlxf37t0jJCSE4eFh\ncYyJjo4mODiYiooKzp49S0REBO+++y5r167l/PnzVFdXk56ejoWFBZ2dnUgkEj755BMyMzORyWR8\n+umnQu8wMDBAbm4uSqUSR0dH6uvrcXFxQaFQkJiYSEFBAc3NzUxOTpKcnIy7uztKpVJcEAYGBpiY\nmDA1NUVtbS1Lly7F2tqazs5O0tPTMTY2Znh4GKVSydzcnMhFGB4eRiKRCIOX7rkiIyPFSDIsLIxz\n586JpnBBQYGQuOtI3Lq+ga4/oxt5WlhYYGZmJqodHTEL7h/Nuru7kclkxMbGolKpsLe3Z926dVy6\ndInw8HCysrKwt7dnZmZGsDKSk5MJCQmhs7MTAwMD4SLVVaOOjo4ikfzkyZMsW7aMmpoajI2Nqamp\noaysjKVLlxIeHk5AQADt7e0YGxsTFhaGm5sbixYtwszMjI6ODrRarfh+ddOjvr4+9PX1+eabb7h7\n96547XWKTWNjYwwMDPjhhx/YvXs3ra2tzM7Oiutl9erV3L17l6ysrAe+uP+rlcI5YOfPX+8Ezv7T\n41slEomhRCLxBHyBkgd5woyMDLq6ukQMfWFhocB8V1ZWcu3aNU6ePElERAQffvgh7e3tODo6UlNT\nw9DQkMj5MzY2FjNZExMTpqenUSqVQnL6xhtvcOfOHWxsbIiOjsbAwIDs7GwGBwd5/PHHBWU5JCSE\n1tZWLl26RGlpKZaWlqLzrtNSpKamYmdnh4mJCV5eXiQmJrJ48WIWLFjApUuXOHDgAJGRkSxcuJB7\n9+7x9ddfs2XLFiwtLXn77bcJCgpCo9GInL/m5mbq6+uxtbXFx8cHIyMjvv76ayYmJoSDT6VSkZyc\njFwuZ+fOnbzwwgtcu3aNXbt2sWTJEh5//HFWr15NcHAw27Zt48KFC0xNTbFp0yYWLlyItbU1EomE\n7OxsPD09mZ+fJywsjNnZWWpra0UzTiqVUlxcTHx8PLa2tqSlpaFSqTAwMCAkJITg4GCys7NJSUlB\nJpNx7do1fvzxRxGIW1VVJWCsoaGhNDY2MjIyIjbBc+fOUVNTQ3R0NKWlpWI+f/fuXWZnZ9mwYQOB\ngYHExMRQUFBAcHAwW7ZsYenSpfzlL3/h+eefp7u7G61WS1NTk0hm1uUpjIyM4OPjIyqTvLw8fH19\nWbFiBcPDw9y8eZOLFy9y+fJlbG1thbjMxsaGLVu2YG5ujpeXF4GBgejp6dHR0YFarRY2fl32pUQi\n4e7du0Lt2d3djZ6eHuHh4bz44ov4+Phw6tQpfve73+Hp6cnDDz9MaWkpQUFBNDY2iqlQd3c3Li4u\ngqWo1Wrp7u6mo6ODmZkZampqMDExYfny5UxNTXH37l1Onz5NdnY2DQ0NhIWFCVVldXU1fX19xMTE\ncPfuXSIjI8UR9UHWg4wkjwCFgL9EIumQSCS7gb8CKRKJpB5I/vn3aLXaKuA4UA1cAp7+X+ydZ3hU\n57mu71HXjHrvbdR7Q0KIIoFoAkSxwRQT4tjBBZc4ib2N4+3EZcdJ7NixvV2TYAO2qaYXCWShhnqX\nUEe9l1GZUZdmzg/O+vbJr+Mf5zqX93Xt9ReQ0Gitb33f+z7vff/fOg/wAJMttYJkMhnFxcWMjo4S\nGBgozsetra3Y29uTlZWFlZUVr7zyCrdu3SI0NJTNmzeLotrMzIyYcWhoaGDz5s3U1dVRVVWFUqlE\nqVQSGRlJQEAAarWayMhIQUfaunUr+fn5TE5OsmHDBlpbW0W1WU9PTxTEKisraW9vZ2hoiJSUFPFL\nmpqawtHRkb6+PlQqFUeOHMHf35+ioiIiIyNRKBR0dnZy5swZMjIyOH78uGAHwoMKuUajQa1WMzY2\nRkBAAD4+Pvj5+fHmm2+Snp4uiqv+/v64u7tz584d/va3v4ki6cTEBLm5uXR3d3Pt2jWWlpaQdmEZ\nGRnCQNzY2MjMzAxRUVEiPg0PYsZRUVHY2NgQGxsroLplZWXcvXuX1atXC8V5eHg4+vr6nDp1iuzs\nbLZs2cLg4CDt7e2899572NnZUV5ezvDwMJWVleKBjY+PZ9myZfj4+Ii2obTgSp9FeXk59+/fR6vV\nilCWRqMREei9e/eir69PZ2cnf//730VG4ebNm6JIl5OTI6xeO3fu5OLFi+jr69PW1ibs1ktLS1RU\nVLC0tERwcDDW1tZiXmNqaorS0lLKysqwtLRk1apV9PX1sWnTJrEdDw8PZ+/evezcuZPQ0FDMzc1F\n3UZfX5/w8HCSkpKIjY0lMDAQpVLJ3bt3RUbBxMQEuVxOd3e36HrAA5SgJByWFnXp9+Lt7U1ERASp\nqals2rRJ1GycnJy4du0atra2+Pr6CrlNbW3t/1scm06n26fT6Zx1Op2hTqdz0+l0/9TpdKM6nW6d\nTqfz0+l0KTqdTvV//P3/0Ol0Sp1OF6DT6W7+mP+EBN6QzlSXL18mKSlJoNTS0tK4deuWwK67u7sL\n8s/rr79OVlYWZWVltLe3i2k5eNDeUqlU/5IYlCb/JDFJbW0tcrlcQFTm5ubYuHEjNjY2REVF4ezs\njIeHBy0tLSJkZG9vz9GjR7G1tSU/P581a9ZgZWWFSqViaGiIdevWERMTQ0pKCgsLC8jlctRqtZCV\nhoWF4eHhwejoqNC/wQPqkJOTE5aWltTV1Ym6QH19PR4eHvj4+HDq1CkhNpW6Dl9++SWxsbGiCCi9\nzRMTE4mMjMTKykqcae3t7XnooYdYuXIlH374IdXV1axduxZjY2P09fWRyWTs3buXRx99VMRjpcLo\n0tKS0LZdvnyZkpIS+vv7CQ0NJSkpSbQPY2Ji+Pjjj5mYmGDfvn0sW7aMkpIS8vPzRTFzcnJSQEtN\nTU05c+YM8F905JUrV+Lk5ERQUBDm5uZ4eHjwyiuviIVs1apVYpc3MTHBjRs3mJiYYMWKFfzmN78R\nD1BiYiIA6enpODg4cPToUaysrOjv7ycsLIxz584J5FxKSgqLi4vi/hocHMTGxobOzk6+/vprgoKC\nBEdUaoO3tbWh1Wr59NNPmZ+fp6GhgRs3bnDs2DHu3bvHzMwMdnZ2GBoakpaWRlxcHHp6erS2tgrU\nW3p6Oubm5tjb2wsSeUdHB/r6+sTExODk5ERjYyP19fWcPn0aKysrscuztLRELpeLOsnc3JxQy1lb\nW6PRaEhOThaOyh9z/SRmH95///0/PPXUU6L4IxmVpd6xh4eHEJUEBgayevVqpqenaWxsJDg4GBMT\nE9LS0kROIDQ0lE8//ZTU1FQKCwtxdnbm7t27aDQaNBqN6DvX19fT1NREXFwc8/PzpKWlER4eLvDh\n3377LQYGBjQ1NZGcnMzg4CDZ2dkEBQXh5uZGQkICmZmZWFtbs2/fPjo7O7GzsyM+Pp6FhQVKS0uZ\nnp7G3d2d27dvi8KipLaX/ITS4EtwcDCOjo6Ym5uj0WhoaGggKSlJuABTUlLEiLHUypKU9yqVitra\nWr788ksx3hsfH09ubi6Ojo5kZmair69PbGwsZmZmhIaG8uKLL+Ll5SVuUklDdunSJS5evMhnn32G\no6MjERERTE9Pi3ZndHQ0ISEhrFmzhsnJSXJzczE0NOSpp55idnaW6elpjI2NcXBwEKZmKTQkvb08\nPT2Znp5mamqK0dFRoqOjOXnypHB4SrsLlUrF7OwsarUatVrNxo0bxeK5fPly1q9fT2JiIlNTUyQl\nJdHd3U15eTmtra1YWlrS2NhIVlYW7733HgsLC3h5eZGeno6ZmRm+vr4YGBiIRbOgoIDvv/8eGxsb\nSktLCQ0NJTU1lYKCAg4dOkRNTQ35+flERUVx7Ngx+vr6CA0NZWFhAZlMhqmpKbGxsejr65OamkpI\nSAhNTU189tlnjI2NodFoaG1tJSYmRgBgbG1t0dPTY2hoCKVSSVtbG7m5uSQlJeHq6kpbW5uowYyN\njWFtbY27uzs6nY6Kigq6u7sJDQ2lv79fYABiY2MFsk6qL2zevJm///3v/31mH+zs7MjNzcXW1haV\nSsXy5cvx9/dnbGyM69evc/bsWYyMjCgvL2d0dJTjx4+LboWLiwtmZma4urri4OCAn5+fQLG3t7fz\n0EMPoVAocHNzIyUlBZlMRlJSkiABGRgY0N3djbm5Oe3t7ejr64vxbCMjI2QyGZ6eniwtLfHQQw8B\nD0QdycnJxMfHC4R7aWmpiNUWFBTQ3d3Nc889x507dzh8+DAXLlygoaFBFIvy8/Opra1l27ZtIqa6\nb98+mpubsbGxEUWxjz/+mNOnT6NSqcjJyeH777/HyMhIhKs8PDxEDiI3NxdnZ2eKioowNjYWqDIf\nHx/i4+PR6XS0tLTQ3NzM8PAwLS0tHDp0iMzMTIaGhqioqKC3txdzc3MsLCxISUmht7eX/Px8VCoV\nGRkZwIPU4eLiIrm5uSxfvhy1Wk10dDS5ubli7Fomk4ngj1wuJywsjJ/97Gdiwe/o6KCpqYnMzEzc\n3NxEhLysrIyZmRnGx8dxdnYWVKdt27bx8MMPo1KpBOrthx9+wN/fXwStsrKyqKiowNzcnI0bN9LW\n1ibCPB0dHYIXKe2K7t27x+rVq/Hw8BDnd5lMhr6+Pnfv3sXT01OYzRsbG+ns7KSvr4+enh52794N\nPMgpSBQoT09P4WNoampiaWlJeEjc3d2Zm5vj3r171NXVidH9mpoaNmzYgKurKydOnCAsLAx4II8d\nHx+noaEBY2NjPDw8WL9+vchh2NraIpfLxQzJ4uIiNjY2IgkpKeY8PT2xtLQUx7Mfc/0kFgV4QEhW\nKpWMjo7yz3/+U2zrjh49Sl1dHR9++CGXL1/miy++IDQ0lICAAD7//HOhl//ss8+Epuv/vNLT07l+\n/TqPPvooxcXFvPDCC+LvOTo6Eh0dzdLSEh4eHujp6VFWVkZfXx/+/v5s3rwZjUbDihUraG5uFnbo\n6upqcnJyOH78OCMjI7S3t6NQKDA3Nyc3N5fc3FwAsTUMCgpi2bJldHV1YrKOjwAAIABJREFU8cQT\nTzA5OUlycjKvv/46paWlJCQkAA+My1JLzt3dnbq6Onx8fPDw8MDS0hJDQ0N+97vfkZeXJ0aqJyYm\niIiIwMjIiD179hAWFsb27ds5ffo0RkZG1NbWIpPJaGhoYNeuXdTV1fHdd99x5swZMSim0+nw8fER\n7cWhoSFiY2MJCQlBq9UKJ4OpqakAq1RVVbFmzRo++ugjli9fTktLi7A5l5aW0tXVJfiFWq2W2NhY\nbG1tcXV1ZWRkRDzcNjY2IrQG8Oqrr2JjY0NFRQUrVqzgxo0brF27lgsXLlBXVydQe1JBUKFQiNZm\nSEgI3t7eTE9Pk5mZyeuvvy4WBYkWLs2pWFhYoK+vz8WLF2loaOD48eNidkShUHDo0CGOHTtGb28v\nwcHBNDc3ExoaysMPP8zo6KjwOz7//PMsLS1RXl7O9PQ0ZWVlwtFw/vx5HBwciIyMJCoqSiy4CwsL\n6OvrY25uTmZmJseOHWNoaAh7e3sRMgoJCWFpaYlXX32VlStX4uLiwtzcHFu2bBFR5unpaSYnJxkb\nGxNSoqamJkZHRykvL2dubo729nbGx8eFW+THXD+J48NXX331h9nZWcFKlBiNfn5+rFy5Uoz62tvb\n4+TkxKFDh5ibmyMkJIRz587x0EMPERUVRUVFBZs2baKzs5PTp0+TkJDA1NSUUHkHBwdTUVGBra0t\na9euZfv27URFRXHmzBlxbpPOalFRURQVFWFtbU1jY6OgGlVVVbF582ZkMhlffPEFjz32GOPj40xN\nTYlCohT1TU1NRU9PTxCBdu7cyc2bN9m8eTPx8fHcuHFDsBdv3LiBk5PTvxT3kpOThWB3dnZWkKIe\neeQRrly5gqOjo0h+1tTUkJaWxsTEBAsLC3h7e5Obm8tLL73E2bNn0dPTIy8vD0NDQxGmkSxEU1NT\nqFQqEaRSq9V0dHSIUeH29nZxjLOzs8PGxoYnnniCM2fOIJfLWb9+vYDfent7Mz4+zvbt2zEzM6Oi\nogIXFxeB6W9sbPyXnrmXlxcGBgYMDAxw4cIFwsLCmJubw8DAgNnZWbZu3YqLiwtVVVWipiS9ca2s\nrKirq6Ouro7p6Wlyc3Px8PBgenqaU6dOoVariY2N5cKFCyQmJmJqakpMTIxIAHp5efHYY4+J7oNE\ngnJyciIjI4OAgAAiIiJQq9XY29szMzNDVlYWa9euZXx8nNOnT7N9+3asrKxQKBT4+/uLe0eyg928\neVPkE0xMTIiOjmbv3r0MDg7S2toqwnEqlYrm5mZ6enqEZUt6w7u4uGBqakpJSYlQAEq7gomJCezs\n7Lh79y6JiYmCtCQ5RZaWlnB1dcXDw4Mvv/zyRx0ffhKLwvvvv/+H3/3ud5iamnLq1Cmef/55jI2N\n+fOf/0xCQgIZGRmYm5uLaLDUZ3dwcGDdunUCqXXp0iW8vLwYHR3lwoULrF27Vmy1JGGnoaEhly5d\nYmpqStQcQkNDyc/Px9ramuTkZPEQSJVqlUpFWFgY9vb23Lhxg9WrV5Odnc3q1avRaDRkZWUxODjI\n/Pw8Go0GBwcHuru7hRW4o6ODnTt30tDQIAJGkiWqvb0dZ2dnMjIycHJywsvLi8LCQrRaLevWreOt\nt97C29ubkJAQIiIi8PLy4t1332V+fh5nZ2fs7e3RaDS4urqyf/9+rK2thTkoKSmJsbExrKysMDEx\nEQMzOp0Of39/9PX1CQwMFOf4kZERTExMRFZeJpPh4+NDZ2cn1tbW2NjYoNFoUKlU6OnpsW3bNkEt\ncnNzY3FxETc3N8LCwqipqSEyMhIjIyOamprw9/cXO7KZmRmUSiXT09NiW+vg4MDXX39NUlISkZGR\nIkMiLQ6SIEeai0lKSkKtVovg0OzsLFZWVsIm7ezszIoVK+ju7ubWrVv89re/pb29XYxpW1paChbB\npUuX+OCDD1haWhIKPhcXF2F+XlhYoKysjDVr1hAUFMT333/P6OgoeXl5jI+Ps2bNGqqqqvDz8xNv\nahsbGy5duoSnpyebN29mcnKS/Px8ent7sbGxwcXFBZ1Oh6GhoTgqTk9Pi5yLnZ0dKSkpguhcVlYm\njNednZ0AYuJ3ZmaGZcuWMTExIRatoqIiXFxcKC8vx9jYmIaGBm7evPnfa1Gws7PDysqKhYUFkbH3\n9PTEw8ODkZERnJyciImJwcjIiI6ODrRaLUqlkhs3bqDVavn66695++23qamp4ezZs7S2trJy5UpR\nkHN1dcXJyYmKigp2795NWFgYxcXFZGdnk5OTQ0VFBVFRUfT39+Pn58eJEycwNDRk27Zt1NfXC4X4\nlStXOHr0KPfv38ff319YqTw9PXF1dSUsLAwjIyMCAwNFcVOhULC4uCiI0FKoxtDQEFNTU8LDwzl7\n9qyYAgwJCWFubk70phcXFxkZGRGcvsbGRkE3lnT31tbWODk5ER0dzZkzZ9DX12f16tWcPHkSjUZD\nUFAQV65cISUlRWzdpYnMVatW4eLiwvr16+ns7KSurg43NzcWFhbIzs5m165dorIdEBAgqFX//Oc/\nsbCwQCaT0d/fT2xsLMPDw2g0Gu7du0dISAgtLS2o1WrBlJQkrzdu3BBtO5lMJmoE69ato7i4mLVr\n13L16lUcHBxQq9VYWFjQ2dkpFqYdO3Zgbm6OoaEh9vb2wug8NDREYWEhTU1NODg4sLCwQGZmJo6O\njgJ7LxVWIyMjGR4extXVlV//+tc4ODgQHR2NQqHAxMQEPz8/TE1Nyc/PFy3soqIidu3axdTUFDdu\n3GDz5s2iM1RVVUVvby9WVlZ89913pKam4uTkxOXLlwVPoquri+HhYRoaGgSBOyMjgw0bNuDn50dv\nby8VFRU8/fTTaDQa+vv76erqErMpra2teHp6kp+fT0pKioDiXLhwQbRvpUVpYGAAZ2dnLCwsaGxs\nJDs7+39ozv9z/c/1P9e/XP99aM6enp68++673L9/n6mpKeLi4ujp6aGvr0+k/iQ0Wn9/P6tWrcLD\nw4PXXnuNlJQUYmNj6erqEimwoKAg1q9fz5dffomzszMuLi5kZ2fj4uLC9PQ09vb2mJiY0N3dzbp1\n6ygoKBAsRikFKRGOfXx8aGlpoaGhARcXF44cOUJVVRV9fX3cuHGDlJQUDA0NmZycBB6Eb6StaWFh\nIffv30etVnPgwAEGBgaoqqpibm6OsbExtFotFy5c4Msvv2TdunU8/vjjuLm5sWvXLr744gsSExPx\n8fHhs88+4xe/+IXg/hsYGDA8PCy+X1VVFQ4ODlhaWmJgYCDqFAEBAdTU1ODi4kJtbS0KhUI4Bxwd\nHdHX16exsZG1a9fy97//HblczuTkJDY2NszPz+Pi4iJi1oWFhaJlGh8fL4I9c3Nz4tyuUqnYsWMH\ngYGB3LhxA1tbW1xcXER+IzY2VsS0ExIS0Ol0zMzMiNmK1157jZdffhmFQoGtrS1nzpwhOTkZuVxO\nREQEPT09oh0pl8tRKpVUV1cTFhYminxbt27l9OnTYh5AT0+Pv/zlL7z//vs8/vjjXL16lXPnzonO\nUXt7O25ubsLbUV5eLtiJvr6+5Ofnk5CQwMLCAsHBwWRkZODo6IipqSnbtm3jscceY9euXQQGBlJa\nWopCoaC5uVl0dqSE7eDgICqVioKCAmJiYoiMjCQrK4udO3dSU1NDUFAQlpaWFBYWiq6VpaUlf/3r\nX3F1dRUcTMlfOj4+LlqTly9fJjg4mPj4eI4ePcq7775Lc3MzWq2Wnp4e5ubmBLXsx1w/ie7D/Pw8\n2dnZQnf2ww8/YGlpyc6dO3F1dcXW1hZTU1NMTEyECEShUIjztr6+Pjk5OYyMjKCnp8fHH38MwLJl\ny4QVee3atSwtLREQECDOp+7u7pw5c4agoCBRtJmcnBRx69DQUM6fP09AQACJiYnExMQACGiq1DtW\nq9VCcDI+Pk55eTmff/65QMNJqTM9PT3u3r2LkZERo6OjLC0t8bOf/UxU9+fm5oiKiuL27duiCp6R\nkSGOTVJYJS0tDUtLS5RKJcnJyQQHB6NUKomPj6ejo4PExEQyMzMZHR3F1dWVmZkZkaZLSkrC0tKS\n2tpaQbYaHR0lOTkZhUIh1GYJCQmiUJWQkICLi4ugQI+NjfH5559jZmZGVFQUMzMzgm1QVVXFhQsX\nGBwcpKysjDfffFNMqjo7O1NfXy9GwsfGxvD09OTu3buUlDxIw8/MzGBkZIS5uTlpaWkCYtLQ0EB/\nfz/6+vrU1NQwODhISUkJDQ0N4v/k4uIi/I8jIyMicAWwefNmysrK6O7uFjWLmpoa4WCUMgVShPvO\nnTvo6T14PEJCQrh//z69vb3CTn337l3ggZTYwsJC0L+7urrQ09Nj5cqV1NfXk56eLlrMGo2GhIQE\n4XiU5DwmJiY4OTlRUFBARUUFAP39/WRkZPCrX/2KJ598khUrVrBz507Wr1+Pra0tExMTNDc309zc\njLOzM1qtFmNjYzZv3izo2NnZ2URFReHp6cnnn3/+o5/Hn8SiIJPJ8Pb2FlLSyMhIoqOjGRoaYmJi\ngjt37vDcc8+JM7i0I5Bw7ENDQwI00tTUxJ49e4AHvV65XE5WVhZTU1NiMdDX1xfnr9nZWbRaLfPz\n80RFRQkU2tatW5mbmyMpKUlgwyWKT0NDA2q1mocffpimpiaio6PZsmULMTExrF69GisrKw4dOoS3\ntzfe3t4EBATQ2trK6dOncXBwoKenh5UrV4qgiTRxNzc3J76HhYUFenp6xMXF4ejoyLFjx0hPT2d4\neJgLFy6IMV5JcSf19sPCwujr6xNx26KiInx8fCgpKSE3N5ezZ8+K4qtk31apVCJM9d133yGTySgr\nK2NhYQF7e3vOnTuHjY0NNTU1LC0t0dfXJ9q1+vr6HD9+nPPnz/P222/j5+eHWq0WFixfX1/Rsx8c\nHMTa2hpPT08yMzOZnZ0VxdKXX34ZgLS0NBYWFhgYGGD79u1UV1cLnHtNTQ3Dw8McOHCA8PBwli9f\nzjPPPCMW9bCwMIaGhjhz5oxAk0mmrjfeeIPp6WnGxsZ4/PHH+dOf/oSZmRmVlZWMjIyINKaEjdu0\naROffPIJERER1NTUkJiYiJ6engDvREdHAw/mYAoKChgYGKCjowM3NzdGR0cxMTHhhx9+wM/Pj+Li\nYiIiIoQQ2cfHh8bGRhHq6uzspLOzk6amJkFIklKKS0tLfPHFFwwMDHD16lVhUp+enqa3txcHBwfG\nxsbEz3Ho0CEmJiaorq4WNiobGxteeOGFH/08/iQWBa1WS05OjmAM1NbWMjw8jLGxMR0dHRQVFZGY\nmEh9fb2Yf0hPT+ebb77hypUrnD9/nv7+fjHhJnH1FhcXcXJyElq0yMhIEhIScHZ2xsjICGdnZw4c\nOMDCwgJXrlzhmWee4d69eyiVSszMzOjt7RW0H1NTU5E/CAoKIiMjg4WFBRQKBQYGBoLEo1arWbZs\nGXl5eTQ3NyOTycQYrFSYtLKyEoNe9+/fR6rrbN++nYyMDORyOZ2dnSxbtgyFQkFlZSXJyclERkby\n1ltv0dTUxK1bt2hpaeGTTz6htLSUwMBAnJ2dMTExESjz7777jj179og2oIWFBUZGRmLXIi2eTU1N\nXL16VSwqZmZm7N27V4Sk1q9fT1RUFObm5mK7n56eTnx8POfPn8fV1ZX8/HwhBTY1NaWqqorp6Wls\nbW0xMzPD2NiY9PR0MRglDbkpFAqBZYMHGRDps3nnnXdYtmyZ8IZKyPQPP/yQAwcOcOnSJXp7e7l8\n+TITExOYmppy8eJFuru7WVxcpLu7WwxlPfLII4LmlZ2djUql4s9//rPItSwtLWFlZYWNjY0oXDo6\nOuLr64ulpSV3797lpZde4uOPPxYzJQAFBQV4e3uLuHxeXh5arZb+/n7c3d1JSUnhiSeeIDg4mL6+\nPmZnZ/H09BSQWRMTE1GA3bt3r2BFtLa24ufnx9LSEgMDA0JGtHz5cnx9fVm7dq2A32zYsIFNmzZx\n+/ZtxsbGePrppwWwp6qqivT0dAFv+THXT6KmoK+vz0MPPYSdnZ2Qy8pkMvEBx8XFiZivRGyWXIwm\nJiaiPWloaEhQUBB2dnZ88sknTE5OYm9vT0hICM3NzWLrCQ9ugurqaoF3a2lpoaysjJycHNGejIuL\nE/PqMzMz4q0jHR/KysqYmpri9OnT+Pv7i4p7eXm5gLM2NjaKNl5ISAimpqbExcXR3d2Ng4MDa9as\nEaPTLi4uvPjiiwwNDdHS0sLp06dFCtPExITAwEDxNly5ciXNzc0cPnyY3t5empqaiI+PJzU1lUuX\nLmFkZMQLL7zAhx9+iLW1NStWrOCbb75BoVDwwQcfYGVlJWQ7MzMz7N+/n5aWFmpqavDy8hJz/JLH\n88yZM6SlpQFw8+ZNkpKSmJqaYs+ePYJINDc3R19fH7W1tZiamhIcHIyenh7Dw8NMTk7S3NyMpaUl\n3d3dpKam8u6779LT08OyZcuExVmhUBAbGytqFW+//TYhISFYWVlx+PBh+vr6qKqq4p133sHPzw+l\nUsnw8DAymQwLCwt27dpFcXEx+/bt4+LFi2Ig6vvvvxc1End3d/Lz89m9eze9vb0EBQVRUVFBc3Mz\nPj4+hISEcPPmTSYmJqivryciIgKdTsfBgwfFrlYihstkMlpaWoSqvqamBn19fSYnJ3nqqafEcVIS\nysCDIFVQUBBarZaysjLBUpTGpeFBmK+lpUXUj3bt2iWmdqWW75o1azAwMODWrVscPXoUmUwmdiD2\n9vbEx8eLduby5ct/9PP4k9gpGBoaikm1oKAg2trauHLlCvPz88hkMnbu3CloxrGxsUxPT1NZWcnQ\n0JDY4hoZGVFQUEBkZKTg/fX19eHg4CAQaA0NDWKoRxqs8vDwQKvVEhgYyOnTp1mzZg3JyckkJSUx\nMDAgLD1SbxlgYWGBuro6goODsbCwwNjYmLS0NNEmlG5QiV6kUqkE4EUay5amH7/44gsxoainp8ep\nU6fIyMgQOvojR47g7e1Na2sr+fn52NjYYGNjg7m5OY899piIOhcXF9Pa2sobb7wh4CAnT57kF7/4\nhYDG+vv7U1lZiampKd9++y3Lli0jJyeHrVu3Cn7E7OwsJ0+eJC8vj9u3bxMREUFXVxcPP/wwt2/f\nFhHm3t5eysvLKS4uJisrCyMjIwYHB0lLS+PNN98kOjqahYUFLl++LHYoEsUoMTERtVrN9u3bKSgo\nQE9PT6joy8rK6OzsRKvV8sUXX6BUKiksLOTQoUP09vaiUql44okn2L9/PwkJCeTl5YlttMS9TEhI\nECRjpVIJwG9+8xtSUlK4e/cu7u7uQrwaGhrKwMAAs7OzdHZ2YmhoyL1794iNjWViYoLly5ejp6eH\nqakpenp62NjY4ODgwIkTD5ClUvBo9+7d3Lt3j7S0NPz8/LC2tmZmZoa2tjbR2nRxccHV1ZWGhgbh\ngQgLCyM0NBRLS0uqqqo4f/48AAcOHBDg271795KdnU1HRwczMzN0d3dz5MgRof5bt24dtbW1/PGP\nf+T69et89NFHKJVKDA0NuX79Ou7u7gI282Oun0RL0tfXV5eQkMDu3buxsbHB3d0dlUpFa2srLi4u\nFBUV0dXVhbGxMRMTE1hZWQnwxvj4OF1dXcIHmJiYSEtLC3v27KGoqIjh4WEBSJU4AQEBAVy8eJHg\n4GB0Oh1RUVEYGBgwMzNDXV2dGF0ODg5Gq9Wi0WhEL/iFF15g+/btxMbG0tnZKUzVUrpQSmJKZ0M9\nPT309fW5cOECk5OTrFq1iqysLFJTUzEwMMDY2JiBgQHS0tJobm6mtLSUxMREvvvuO7G66+npUVdX\nh62trZjgu3LlCouLi8TGxuLo6Mi1a9eoqakRgzSSkVmKXh84cIAffviBzs5OvLy8UKlU+Pn54eDg\nwNWrV/H19SUjI4NNmzbh4+PDtWvXxJ+XlZXx+OOPo9Fo+O6778QsydLSEsePH2dgYID4+Hji4uKY\nmJjAyMgIX19f9PT0sLW1ZXR0lM7OToaHh9mwYYPwSkpHvq1bt1JQUMAvf/lL/vjHPzIwMEB4eDj1\n9fXY2dkxMDAg5hscHR2Znp6mra0Nf39/iouL/2W6083Nje7ubtLT01laWsLR0ZHDhw/T1dUlZDDn\nzp1j7969mJiYiPCV1Nt/4403iIuLY/Xq1Wi1Wjw9Penp6eHMmTO89tprGBoaolKpsLW1JTAwkE8/\n/ZTY2FjKyspEHeDRRx/lq6++IjQ0VAy8DQwMUF1djZWVFUqlEi8vL9RqtRjHnp+fF7Hl559/nsrK\nSuRyOeXl5Vy/fp3NmzdTXl6OqakpDz/8MJmZmaI7VlhYSEVFBZGRkdja2tLd3U1UVJSY5JQmKXfs\n2PGjWpI/iZ3C/Pw8CQkJIgra2dnJwsICfn5+XLp0SRhvNm3aJJJwLS0tAtoaEBDAk08+ib6+vrDm\nwANYp7Q6t7a2kpmZKbbM5eXlotKtUChYWFigra1NqOQCAgKwsLAQiLSBgQE8PDyAB/w8Y2Njpqen\nKSgowMrKCn9/f1xdXenv72dqaorKykrUajXj4+OMjo7S0dFBVFSUiKLqdDqKi4uZn58XlOhr166x\nuLjIW2+9RWNjIzqdjrm5OZqampDJZOKcLtUqrKyscHR0ZG5ujq1bt7Jv3z5xPpe+9+DgIBs2bBDS\nUysrK2FsKiwsRKPRYG9vLxDugYGB1NfXY2hoSHFxsUCy9ff3U1VVhZeXFxYWFrS0tAgVfWhoKHK5\nHI1GQ2pqqnjbnjp1imPHjvGnP/2J1atXMzk5yZ///Gc6OjpQq9UMDQ0RERGBSqWiru6BQcDMzEws\nJObm5oJS5ejoiFKpFDWRubk5MjMzcXV1RafTiXkGqQ0pUZ4lDdt//ud/YmBgQGlpKevWrePUqVN0\ndnYKJ4WNjQ2urq688847hIaG4u7ujr29Pd988w2jo6OEhYXx1Vdf0draSl9fHydPngQgMDCQ3t5e\nnJ2dUSqV6OnpMTExwdDQENeuXcPT05Pg4GCio6NFcKyyspKqqipKS0sxNzentraWjIwMVqxYIWY1\namtrBbDF3t5e1Nv27NkjNIaSW1RfXx8TExN8fX1xd3cnMjKSK1euEBMTw/T0tHCE/NjrJ1FTMDIy\nYmhoiPr6eubn50Vdob6+HrlcTmxsLP39/Tg5OREREYGFhQUTExNs3LiRqqoqcnJyiIiIELHOy5cf\ngKCkQpNKpRJVfHNzc5RKJWNjY8I4JJPJqK6uJjc3V9CJACYmJlAoFGLhkAaipFacvb09jo6OBAUF\n0dPTg729PWlpaVy8eBFzc3MMDAwEUy88PFx83cTERPT19bG1taW1tVUsNj4+PmRnZ3Po0CF6enro\n6enh+PHjJCUl0drayuLionhYtm7dSnZ2NtevXycgIICYmBjkcjkrV67Ezs6OgoICAe/4/vvvUSqV\n9PT0sH37dlxdXcnOzhZCmoSEBCoqKggODsbKyoqWlhb8/PyQy+VkZGSwd+9eenp6qKioYHFxkeDg\nYEGrNjQ0xN/fn8uXL6Ovr8/MzAxxcXG89NJLqNVq4uLikMvlFBUVUVdXR3x8vJDrenh4EBAQQFpa\nmmj3enh4iPRnZ2cn5eXluLu74+fnh0wmE7Mgc3NzKBQKenp6MDc3x8jICJVKJVD8MpkMlUolConP\nPPMMBw8eFEDgjIwMmpubWb58OQqFgpiYGDEkFxERga+vL6dPn2Z2dlZ8j4iICKqrq5mfnxeTuAUF\nBURHRxMQEMC1a9dYvXo1KpUKb29vHBwcuH37Nvb29nR3d4s3uFarZf/+/VRXV6PT6SgrK2Pbtm3/\nUnTu6OjA2tqakZERGhoa8Pb2pqurC7lczpdffims5zt27BCFYCkqLZPJUCqVlJSUCBXi/w9G4//T\na25uDm9vbwYGBrCwsBBS0o6ODgIDA5HJZKJ9aG1tzYYNG6irq6OpqYktW7YI2evY2Bhubm60tbUB\niFZcbGwsQ0NDTE9Pc/fuXVasWMG6detQKBTU19czOTnJwMAA69evx83NDbVaLdqdN2/exM7OTkww\nwgPyktRqs7W1FaDQmZkZKioqqKurw8bGhvb2dm7dusXOnTt57733+Mc//sHExARzc3N4enpy/fp1\nIiMjBe9PaoV++eWXrFmzhoqKCnbt2iWU41FRUbi6ulJYWMhXX31FQEAA09PTlJeX4+DgQEhICDk5\nObS1tdHe3o6joyObN29maGiI0tJSHnnkESYmJrh06RL9/f2sXLmSpKQk8vLyGB0dJSQkhFu3bmFm\nZoaJiQnBwcE0NTXR1tbG3NwclpaWLCwsEBYWxr1799DpdMLNmJqaKqjYISEhbNy4EXd3dxoaGoiM\njATgr3/9KzqdjpycHNauXUtQUBAfffQRBw4coKGhAYD79+9jb2+PQqFAX18fPz8/GhsbcXZ2xsDA\ngBs3brCwsCAi4z4+PtjY2DA+Ps65c+cwMzMTb9eYmBguXboEQHNzM1u3bsXU1BSVSoVSqRRzKAEB\nAdja2oqai9TG/fbbbykoKOBXv/oVHR0dPPnkkwLiExUVxT//+U8x/1JfX4+BgQHvvfceO3bswMfH\nh4GBAZqbm5mdnWXPnj2MjIwgl8u5ffs2p0+fRqfToa+vL/D8zz77rFgUpqamGBwc5J133uE//uM/\n8Pf3Z/369ZSWluLk5ERcXBy+vr709/dTW1tLc3MzTz31lBij3rp1K6WlpRw/fpxnnnlGZC5+zPWT\nOD5IAE8pSFJUVISrqys///nPkcvlmJiYMDAwQExMDOfOnePevXu88847wAMx6bJly7CwsCArKwuN\nRkNwcDAAfn5+xMfH09nZiZGRkeAn9Pf309bWJtpG0pbv/v379PT0oNFoGBoaIjc3F61WK4qM0mob\nGBhIRUWFkMe4uLgQHh4uhC27du3C0dGR999/H2tra0Egmp+f59tvv8XT05OOjg527NhBZWUlc3Nz\nwANcuJTzVygUeHt7Y2RkhFqtxszMjLi4OBobG7GwsKC3t5fS0lIVP9sZAAAgAElEQVRxrNJqtRw7\ndoy2tjZR3NJoNAwMDODp6cmBAwe4evUqMzMzeHh44OHhIShHERERHD58GDMzM+rr63FychKdg5iY\nGJ555hmGh4c5ePAgqampTExMsHnzZqytrcnLy0OhUAjCkUwmIz09HQsLC7Kzs4mLixMPrzR9OTU1\nxcTEBCdPnuT+/ftERESQnJwMIDoPjY2NQmUPD/Bkw8PDODo6YmBgIAa93N3duXr1qjCEw4MW9+Li\nIiUlJchkMuABLSs4OJiGhgbc3d2JiIjAzs5OpBMbGhrENO26desIDw/n8OHDvPfee2JAbWZmhsjI\nSLy9vYUY19DQkNraWmZmZujv70elUmFoaCis6c7Ozujp6YkBufLycnp7e5mdnWVsbAxfX19+/etf\ns2zZMpE/gAcvtNzcXF588UUaGxvJzMzkzp07WFtb8+STT7Jt2zahD/T19cXZ2ZmSkhLCwsKIiYnh\nm2++YXJykp07d+Ln5yes3j/m+knsFIyMjFAqlbS2tmJiYiKSaaWlpZSXl7N//34mJyd59tln2b17\nN4ODg3h7e3P8+HExwRcSEoKFhQVBQUHk5eUB8MEHH7Br1y6ioqKYnJwUUdnJyUnCw8NFEEc6O0sD\nRZOTk3R2dgqa8tDQEHl5eULnHR4ejre3Nzdv3mR0dBStVsvt27c5cOAAs7OzjIyMYGpqykMPPYRS\nqaSoqIh///d/p7i4GGNjY1paWvD09KSlpYXVq1eLxSYmJoaRkRFRTZZQ4o8++iiLi4vinCyXy1m+\nfLnopkiV5vr6emJiYoTNeNWqVdTW1oot6nPPPUdvby9lZWVUV1cTGxvL8ePHhRPD2tpafE6mpqaE\nhYWxZcsWampqCAkJoa6uTnzP5uZmTExMiIyMxNLSktLSUjEY1d7eTnl5uRDQxsXFUVdXx9LSkpCa\nuLu7s7S0hK+vL3K5XCQav/76axITE+nq6sLIyIi3336bt99+GycnJyHMnZmZEaPM+fn5eHp6Ym1t\nLc7gYWFhLCwsoNFoxO4uPz+f7Oxsnn/+eTIzM/Hz8xPdnpKSEo4ePSpqQStXrsTV1RU3NzeOHz+O\nkZERk5OTnD17Fj8/P3744QeB+Ltw4QJ79+7l22+/JSIigrt37+Ll5UVERARxcXGUlZVRW1vLyMiI\nGL6anZ3F0tJSFL57enq4d+8eTk5OwlMRFRVFcnKyQOur1WrR1u7r60Or1RIcHCw6HEZGRjg4OFBc\nXIyjoyMjIyOC/rRp0ybeeOONH/08/iSmJD/44IM/SOpySUl26dIloqKiWLt2LfAA8CGBTbOysuju\n7hYzEL6+vpw4cYLw8HC0Wi2hoaGcOHGCZ599VoRexsfHRWW2oqKClJQU7OzsaGtrE0nH1NRUNBoN\n5eXl4sjh4+ODp6cnWq0WIyMjvvrqK37zm99w9epV2traxDTkxYsXmZ2dpbu7m9raWkxMTNi9ezdJ\nSUls2bIFf39/rK2t8fX1Ffovicxz+fJlcnJyeOqpp4RrQqvVkpWVhZ6eHsbGxlRWViKTycSxQiaT\niV64h4eHULtVVVXh4+MjwKUKhYK8vDysra25efMmqampDA0N0dPTw8aNG8XNam5uTk5ODjt37qSo\nqIj09HTa2trEzVpfX8/o6CjW1tY4ODiIm/t///6wsLDA19eXO3fusGnTJpydncW8SHFxsTBB+fr6\ncu/ePbKysli/fj2Dg4MUFhaK3P+zzz4rsOkWFhbcvn2b/fv3Mz8/T2BgoLBsSUq2iooK2traGB4e\npqurC1NTU9RqNQEBAWIhKywsFBawsrIyent7+fnPf05XVxdubm4iNn/9+nX8/PwoLCxErVYLWc6r\nr76KtbW12CUFBQVhbW3NhQsXxNFIEvFI+Y3Kykpu3rxJa2srenp6BAYGCuGOhGhfWlrCy8uLY8eO\nYWNjw759+7h//z7nzp3j1VdfJSsri97eXkZGRjA2NiYiIgJzc3Pu3LmDt7c3f/jDH4QGEP6rAG5i\nYkJQUBBDQ0PExcXh4OAgtaH/++DYJNuQi4sLBw4cEFsomUzGiRMn+PTTTykqKqKnp4d3332XDz/8\nEBMTE+Li4oiOjqa+vp4dO3aI+XspHRcTE0N4eDiLi4vY2toyPz+PiYkJERERYkRbat3t2LFD7AiM\njY3FXL29vb0YlpEUZFIrUaPR4OTkRENDg8hOuLi4cP/+faEck2bo79y5g5mZGffv3xdS0draWgoL\nC0UNpK+vT9RHTExMSEhIYPny5YIG3dvbi7GxMc3NzVRUVLBq1Srs7Ox4//33ef3112lrayMuLg5j\nY2OysrLE21DyCxw8eJDS0lKhcpOKrTqdjpKSEkZGRgRIpaSkBGdnZ8zNzfHz8+PKlSssX76coaEh\nent7aWlpwdbWlqKiIvbt2yfAOJs2bRLMQ6lGtHLlSqytrbG2tkar1Qoknk6nE1BTX19f4ME2XyaT\nMTMzI/iUhYWFGBgYcOrUKf72t7/h7u6OVqtlYmKCkZERESTSaDTi4bp8+TI9PT0iDPTKK6/g7u7O\n7t27efPNN3n66aeZn58XOY35+XleeOEF7ty5g0Kh4MaNG6hUKjo6Orhy5Qo3b97k+++/Z3h4mOvX\nrwtH5RNPPIGZmZnwOHh6ehIVFcXi4iJdXV3Mz8/j7++PgYGBiI5LyLrZ2VkGBgYIDQ3F19dXCJbh\nQf2ls7NT4PXj4uJobW0lLy+Phx56iL6+Pp5//nl0Oh1hYWE4Oztz+fJlVCoVn376Kfn5+WRmZpKe\nno6tra14pn7M9ZNYFCwtLenv7xd+RDs7O2JiYqisrBQ05YMHD4oP38nJiWPHjqHRaDhx4gQtLS0U\nFhayY8cOLCwsBJO/trZWDB41NjZiZmZGfHw8ycnJWFhYsLCwgKGhoeivq9VqUblVKBRCYiJt8aS5\nBMlOvWzZMlpbW3FzcyM1NZXY2FgMDAyERu3UqVO4ublRXl6OnZ2daOOVlZUxMTFBR0cHc3NzvPLK\nK8CD2sr8/Dw+Pj4cPHiQ3bt3ExwcLAIqUk+7paWF/fv3k5qayjfffENXV5f4s6WlJQIDAwUwROpR\nSwvj2NiYOOtLgNSOjg5x3JAq+jqdTpB/vvrqK1xdXWlpacHHx4fu7m7hx3BycmJkZITx8XGam5t5\n7733OHfuHFNTU4yPj2NnZ4dGo2FpaYnW1laam5uJj4+nqKhItCQ3bdokZDASLXrFihUCcOvm5kZx\ncTHt7e1ERUVhZGTE4uIiKpVKgGelYbeuri6mpqb+ZUcF8OGHHxIZGUlZWRlPPPEETz/9NHl5eYSH\nh1NWVsb8/DwZGRkkJydjbW1NYmIizs7OIt15+PBhQkJCmJmZQV9fX2jjhoaGaG5upr+/X7g4y8rK\ncHBwIDs7m/b2dlxdXWlsbOT48eOCfi0VK4uLizE3N0etVou8jPR1zc3NKSsrIysrS1i+GxoaOH/+\nPJ9++ilXr15ldHRUwHuSkpIIDg5m8+bNpKenMz09TUNDAwsLCyLJ+2Oun0RNYXFxUbQji4uL0Wq1\nNDY2EhcXJ0JLn332GUNDQ/z+978nICBABHhWrlxJWVkZjo6OaDQaSktLUavVAGKBMDEx+RekuoT/\nkjBnWq0Wb29vQRiurKwkOjqa1atXAzA6OsrExIQ4+9vZ2bFixQq+/vprNmzYgFqtZnZ2FjMzM9zc\n3IiIiBCTn5WVlcL0MzMzg4GBAZWVlXR0dAisvfTWkcAw7e3t1NfXMzAwgIuLC2NjY1RUVLBx40Ya\nGhp47rnnGB4eZvXq1Tg6OuLi4kJfXx979+6loKCAe/fu4evry9zcHHZ2dly7do3Q0FDgv3ZPzz//\nPJs2bcLFxYXIyEjS09Pp6+ujqakJnU7Hxo0bWVpawtTUlL/85S8cPHgQBwcHrK2tBe3o97//PUeP\nHhXBLonUPDs7y5NPPsnLL7/MjRs3MDU15aWXXuLIkSMisSlFcSUXpHT2l8jD0m4oMDAQGxsbWlpa\nSE1NpbS0lKqqKlF59/b2JjY2lsnJSXp7e0lMTKS4uJjZ2Vmh2IMHsw8VFRU0NjaK49a2bds4efIk\nKpWKxMRE0tLSePLJJ5HL5VhYWIjBt/DwcLZs2cLnn3+Op6cnCoWC119/HUD8WzMzM5FJiYyMJCcn\nh5dffhmlUolcLic6Opq8vDwcHR2xtLTkzTffFC9BCbt369YtwsPDgQd1q4CAAHG/SUNi69evp6en\nR4zDS3Sw3bt3i52ZiYmJyFpYWVmhr68vDGo/5vpJ7BSklFhYWJg4/zg7OyOXyzl16hTXrl1Do9EI\ngcf4+DjV1dXEx8fT39/PL3/5Szw8PMjOzsbPzw8vLy/gwUyFQqEgMjISpVKJi4sLBgYGYtDJxMSE\niYkJYY+amJigtraWtWvXitZVX1+fMEpJQzDt7e1YWlpiZGQkkF7+/v7s3r2b1tZWCgsL6enpYXx8\nHBcXFwICAujp6aGuro4LFy6IOYHly5dTVlZGa2sr8MC4fOnSJQYHB+nr6xPBIVNTU3p6eujs7CQ/\nP5+cnBw++eQT0tLSGBsbIzQ0lPXr17Nq1SoCAgKwsrIS+DqVSsXOnTsZGhoSP09kZCSvvfYamzdv\nxs7ODjMzM5KTk4WR2s3NjWXLlgnS0KpVq5DJZOLreXl5kZCQIGYX/vrXv2JpaYmXlxeurq4sLi7y\n29/+Vpi5dDodFy9e5Nlnn6WkpESEherr69m2bRsbNmzgpZdeAuCXv/wlV69epa+vj4SEBGpra5mf\nn0epVKJSqbCwsMDZ2VkwBgwNDenq6hJdDgcHB8zMzITHUVpwq6ur+fbbb4WmPiAggICAAIKCghgd\nHWX9+vW0t7fT2trKU089xc6dO5HL5bi7u/PRRx+xc+dOWlpahLRY8pVKbe+2tjYUCgU2Njao1Woc\nHBwE+9He3l6c9SXalUajwdbWlri4OE6cOEF2dva/CIcSEhJYWlqiqamJN998k76+PgICAsjLy2Nu\nbg65XM7w8DCbNm3i1VdfJTo6WtRlqqurBRxWqVQKmfKPvX4SOwVDQ0MhB21tbaWxsZHf/va3KBQK\nHBwcyMnJYW5uTlB9IyIixJZ006ZNzM7OChdiZWWl6MkuLi6ip6fHyZMnsbOzw8PDQ4wuS5ZpKekl\n4b7WrVuHkZERxsbGtLa2ig6EmZmZuMHkcjm1tbU4OTlx/vx5wRmcn5/n7NmzWFpaEhwcTHBwMNPT\n09jZ2Ql9vE6nY9WqVZw6dYr9+/dTWFgoTMOSwDQyMpKxsTFMTU0xNDQkMjISJycngWeXhpSUSiV/\n/OMfKSwsJCwsjNu3byOXyzE1NaWlpYWAgAByc3MJCwsjKCiIL774goiICC5duoSzszM//PADWq2W\nW7du4ejoKPyKEpxFX19fwG+7u7upqakR7VNvb29u3bolQKSZmZmEh4fzzjvv4OXlhb29vXio1Wo1\nJiYm/PrXv+app54S0WVpKtTY2Ji33noLeDBsJRX75HI5dnZ2DA4OMjg4KCZXOzs7iYiIwMbGhsHB\nQYKCgpicnGR2dpaPP/6YDRs24ObmhqWl5b9IW3x9fUlOThYzDG1tbTQ1NQlMX2BgIFu2bBH047Cw\nMCYnJwVhenh4WKjrd+7cyfvvv8/09LQobnp4eIi5hhUrVnDz5k3WrFnD6OgoTU1NmJubs7i4yMzM\nDG5ubnh4eIi0bnt7OxEREfzjH/8AHnRLPDw8eOGFFwgODmZhYYGmpiYBbdm2bZuAz0ghJ0dHR/z9\n/cVLaNeuXeTk5FBfX09gYOCPfh5/EouCFPVVqVSChfdv//ZvbNy4ETs7O3EMqK6u5k9/+hOtra2o\n1WqWL1/O2bNnefTRR7l9+zb9/f2sX7+e9PR04IGks7OzU0SWJYuvg4MD4eHhnDt3jrm5Oezt7cWM\ngiRFkeoL0gMtpffgAQhE4u5J/MXo6Gja29vZtGmTmLyTCMlFRUWsWbOGuLg4bt26xdTUlEgOhoeH\ni61dSUkJ+/bt4y9/+QtRUVHimKBSqejv76ejo4M9e/bwySefEB8fz9zcHN9++y1HjhzBzMwMc3Nz\nKioqsLGxwc3NDXd3d5RKJXNzcxQWFhIfH8/Q0BDDw8N4enri6+vLwsIC09PTpKSkMDMzQ05ODn19\nfSxbtoyZmRlOnDjBkSNHeOGFF9i1axe7du3i008/ZXh4mDVr1nD37l3UajWJiYk0NjZy8eJFke/w\n8vLiF7/4BWNjYxw+fJif//znhIaGotFo6OvrQ61WU11djbOzM0eOHOGbb74RMxGSTHfv3r2kpqaS\nkZEhoCkbN27EyckJpVIpwCRSUVFKtpaWluLi4iJmUDw8PKipqeHy5csCBJubm0tUVBQBAQH4+vry\n6quv0tnZia+vL9HR0Tg5OZGZmUlTUxP5+fmYmpqKeoA0YJSZmcmvfvUrcnNzhVlcJpMJoe7bb7+N\nnZ2dqMk8+eSTtLe3C0/l5OQkra2twmglSYxtbGxYXFzks88+Izk5mTt37hAbG4u5uTlhYWFigM3f\n35/Z2VnRJdJqtUKie+LECeLj44WU6MdeP4njw9LSEmFhYTzyyCO0t7ezsLAgXAaSdy8zM5O4uDiq\nqqooKSlhamqKU6dOsbS0xN27d5mYmCA4OFhARwFhPZYow9PT03h7e6Onp4dMJhNg1/DwcJydnYmI\niGDNmjWo1WqampqEYCYgIIC6ujrx8EouyuDgYLZt20ZiYqIg4cTFxREREUFeXh5WVlZYW1uzatUq\nMRhz8OBBBgYG8PHxQalUkpGRIbaM3t7epKenExERgbOzMxUVFczMzLC4uIhcLmd0dBQHBweGh4fF\nljQwMBBjY2MyMjKEaCYuLo5Lly6JN4nkq4yOjsbExAR7e3s++eQT3nrrLeEctLa2Ft4DaSfW398v\nMgQvvvgi3d3dvP3227i6uhISEsLk5CRHjx4lPDwcIyMjtm7dys2bNyksLOT06dO4u7tTUlLCxx9/\nzN69e4XnITAwEFdXV+7du8ebb76JpaWlyCksLi6KXVlKSgq1tbVkZWURHh4u9Orp6el0d3fz3Xff\noaenJ+jNHR0d+Pv709LSImpIBQUF8L/Ye+/gKs9z3fu3VNZS7723pS6EegM1QCCaaTEYQsCO4xjb\nxE71jnMSe+yE2Int7cQOtoNxwzTRixAIJCGDhApCHfXee11akpak9f1B3ufkzOw5Ye/vmzne35xn\nRuNhWVpa2O/7vM9939f1u3gkTX/11VcxMjKira2N+/fv4+HhQVZWlhiF/+pXv2J4eBhfX1/Gx8f5\n4IMP+NnPfkZiYiLe3t4899xzzM3NkZubK06N1tbWosQ1MTER4+3+/n78/PxYvnw5enp6rFmzhgMH\nDjA3N0dPTw89PT3U1dVx8eJF/Pz8GB4e/l+0MPb29piYmBAeHk5eXh4rV67k73//OwqFgoyMDG7c\nuMFTTz1FZ2cnH3/8Mb6+vshkMubm5tBoNHR1dbFjxw7KyspEcO7jru/EprCwsIBcLufcuXO4uLjg\n5OREZWUlK1asYHR0lIqKCmJiYpienuabb74hNDRUkG+USiV5eXkEBwfj6elJSEiIkA0bGRmJmr6m\npgYDAwNaW1vx8PCgsLCQhw8fCg98e3s71tbWeHh4sLCwwI4dOxgYGGB4eBiNRiMQWgC1tbU4OTnh\n4uKChYUF+fn5ODs74+3tzZ07dygsLGTDhg2UlJSI9Kby8nLCwsK4c+cOOjo6uLu7k5GRQWdnJ19+\n+SWA+N1SvyEnJ4fLly+TmZnJzZs3hWrw6aefJiYmhpaWFhEuMzk5SVBQEKOjowwMDLBx40aGh4ep\nra3Fzc1NiG4AwsPD2blzJ2vXrqWoqIjh4WGcnJyE8+/ChQscOXIEV1dX9u7di7GxMdevX8ff3x83\nNzcRw6dUKnnppZfE8fj9998nOzsbhULBnj17OH/+PLW1tdTX12NsbExLSwvFxcVcunSJ6elp5HI5\nt2/fxsrKSkyM2tvbuXPnjtB8SFoIqTwICwtjeHiYvr4+MZ5Vq9Xcu3ePoKAgJiYmCAkJEdQrKXlq\nZGSEzz//HDc3N1599VWee+45Nm7cyI4dO3Bzc8PY2Jhvv/2WnTt3srCwQEtLC3v37uW1117j8OHD\nmJubc/XqVYKCgoiKihKGqNHRUQwMDDhw4AAVFRVYW1sLaJC5uTkuLi7Y2trS0NCAra2tUMf29/ej\nVqvx8vJifHwcIyMjmpqaxDg9Ly+PLVu20NLSwtWrV9HX1+fHP/4xV69eFeHDk5OTfP7557z00ks0\nNTUJ+byEJXRwcGDnzp1s2LBBWNMfZ30nxEuHDx9+IzExkXfffVdAOSQAqjSzt7Gx4datWzg4OIgp\nQ1VVFWNjY0RERHDu3DkABgYGcHR0JD8/H09PTx4+fEh3dzdWVlYCuvnxxx9z9+5dnJ2d6erqIicn\nR4h3WltbKSsrEwCX06dP09HRweLiIocOHWJkZIS9e/dSXFzM4uIily5dws/Pj8HBQTGenJ2dpbCw\nEEdHR6ytrUXDq7i4GLlcTldXF66uroLCo1QquX79uhjDvfDCC9y7dw8fHx88PT0xNDRkw4YNQufg\n5OSEq6srRkZGAtCiVquJjIwUAhpHR0fOnz8vkotzc3M5e/YsaWlp5ObmMjU1haWlJcXFxWzatIn6\n+noGBwfJzc0VCVxXr14lNzeXnJwcXnrpJdF/SEhIwMzMjJqaGiwsLASrcn5+nnXr1nHjxg2xAU9O\nThIQEMDRo0dxc3Ojp6cHQ0NDlpaWBIRG4mKWl5eTlpaGra0t8fHxGBgYcPPmTeRyObW1tQJBNzEx\nwZkzZ7C0tMTU1BRHR0eqq6u5ffs2UVFRDA4OUldXJ0bYX3zxBTExMYSFhdHS0oKjoyPZ2dnMzMwQ\nEBCAUqnk5MmTwn8jNYm7u7uFEUkyt0kBQVNTU1RUVJCSksLCwgK9vb2CaKXRaFixYgVDQ0NcvXoV\nV1dXli1bxtWrVwVisKenBy8vL5qbmwW5SULHS1b2vLw8zMzMcHNzw9XVlczMTMLCwjhy5AgxMTGo\n1WpycnIYHx+npaWF48ePMzQ0JNK8JYDL559/jo6ODjdv3vz/Rrwkk8k+l8lkgzKZrOafXntDJpP1\nyGSyin98rf+nf/drmUzWLJPJGmQy2drH2RQmJyepra1l165dODs7k5CQwJNPPkljYyMtLS2sWrWK\n1tZWkd5kZ2fHM888Q1xcHHfv3qWjo0PcEFItBo+e6Hv27MHCwoKzZ88ik8kYGRnBx8eHlStX0tTU\nhL29PUqlkvn5eXR1ddHR0cHb25ve3l7m5uZwdHREpVJx48YNdu7cCSAyLN3d3bG1tcXQ0BAjIyMC\nAwMZHByktbVVBKyam5tTWVkpmoASouz+/fuYmZkxNDQkPq+ZmRlpaWlkZGQIrfr8/DyBgYGUlpZy\n5coVsZncuHFDPN0jIiJYWlriwYMHhIaGUl1dzfnz59m9ezeffvqp8DusWrWKiooKkUXh6elJYmIi\nHR0djIyM0NXVRWRkJBkZGfT19bFu3TrWrVvH+vXrycnJwd3dXUBNiouLqa6uFgAZrVaLSqUiLy+P\nZ555BinHw9zcnJmZGYKCglAqlXh5edHa2sqKFSvo7OwU6eJBQUHAIwGSpOkoLi7mySefFPZqaYKi\n0Wj405/+hFqtRl9fn6NHj6JWq1m3bh1nz55lcXERhUJBfX29EANJIbSSR2BkZESwM9va2uju7qa5\nuZnY2FhR59va2pKfny90EwsLC6xatYqAgADRq5B0Bbq6uqxatQpvb2/h+PTw8ODgwYOo1Wrq6+vZ\nu3cv3t7epKenExgYiJ6eHikpKahUKiIiIrhx44YQYtXW1pKamoqOjg4xMTF88sknQpl77tw5McI3\nNzcXm5Sfnx9LS0uCDu3t7c2pU6f44Q9/SG5u7uPcisDjlQ9fAuv+g9f/XavVLv/H1zUAmUwWCOwC\ngv7xM4dlMpnuv/oFVlZWgr937do1oQXv6upi06ZNlJeX4+zszLZt24iIiBB1tb29PV9//bUI8NDR\n0cHLy0t40s3NzcnMzKS4uFigzPr7+5menhaW5/7+fsEm9Pb2ZnBwkEuXLlFUVMS1a9cwMzPD0NAQ\nd3d3IbApLy8nMTFRkHS3bduGjY2NeHJt3LiRFStWiBDTxcVFwsPDBVDV39+fzZs309TUREpKiphe\neHh48O233xIcHMzGjRvRarVCGGRubi4SsZeWlkhISODOnTviSC6ZhCRvwM6dO5mfn+fAgQOoVCq+\n+OILpqam6O7uZt++fZw4cYKysjKWlpZYvXo1crkcd3d3cVFLfMbm5maxueXk5PDGG29w+/ZttFot\nqamptLW1UV5eLsazycnJ9Pb2srCwwIMHD+jr66OjowNdXV0ePHjAX//6V9EPqaqqoru7m4iICIHI\nB8R0ICAggIqKCkxNTfH19UVHR4fs7GxsbGw4evQoCoUCmUzG+Pg4ZmZmWFhY4ObmRkNDA52dnYyP\nj4vmcFFREYuLiwKiU1lZSXZ2NqdPn+add97B3t6eyMhI0SzU09PD2dkZY2NjvL29uXDhAiYmJgI3\nJ20Kpqamgr8ok8moqanh22+/paGhgb6+PsrLy0lISMDAwIDLly/T398vxs1yuVxMQMzNzXF2dhb9\nJcn3YGZmRkNDAxqNBh8fHz766CNaWlpQKpVERkYKklhAQAB/+ctfuHPnjgC3+vn5kZiYyKVLl4TY\n6nHWv9wUtFrtt8DoY77fE8AprVY7p9Vq24BmIPpf/dDc3Jyo6Xfs2CE88xI7r62tTfxPaGho4Ne/\n/jU9PT0AfPrpp7S2tuLg4ICenh7Dw8OsW/doD0tMTBRAEaVSSXV1NTdv3qS/v5/y8nI0Go3Agnl6\negq1XHp6Ovv27RPk3oWFBfT19RkYGAAeiUkkHoCzszMff/wxTU1NwkyUm5srbNNzc3OYmprS0dHB\n5s2bCQwMpKOjg7q6OlasWMHf//530auQyo+lpSW6urrw9Pj495gAACAASURBVPTE3t5e9AsmJibo\n7++nurqawcFBFAoFq1evFvX17t27uXr1KtbW1ujq6jIzM0NMTIxoZAUFBWFgYEB9fT3Hjh1jcXER\nW1tb8vLyhFvx448/pqGhgXfffVfYlFUqlUhalp5uCwsLNDc388orr2BhYcHAwAB2dnZkZmZy6tQp\njIyM2Lhxo/B5tLS0YGxsLPQnhw4dwsrKioaGBg4fPix6B+Hh4UxPTyOTycR7VldX4+XlRVRUFMHB\nwYSEhLB161aRIfnmm28Ko5CtrS2mpqbExMSwe/duamtrpeuYa9euiU3C2toab29v9u7di1KpJCAg\ngPb2dnp6enBwcOCdd94hKCiIwMBA0tLSePnll1laWmJgYEAoEOERS1GaWJWXlxMcHIyZmRkrV64U\n+ZAHDhzg1q1bhIaGEhcXh4mJCXv27MHa2ppNmzYRHh6Ojo4OZmZmAjRrZ2eHSqWiv7+fqqoq/Pz8\nWFhY4MSJEyJy3tbWlq1bt2Jubs7ly5c5ceIEhw4dYv369bS3t9PV1UVxcTEODg6izHyc9f+m0XhQ\nJpNV/aO8sPzHa87AP0unuv/x2v92zc3NsWrVKuLi4kRXtqurC7lcTkBAALOzs8zNzVFUVCQssyMj\nI0RHR+Pn50dvb69AxJeUlGBoaPjoL6ejI56c+fn5GBgYYGlpib6+PvHx8Tg5ORETEyMchp988gl2\ndnZYWVnR1dWFRqOhqamJpKQkvLy8hF22ra0NQ0NDwflPT09HV1eXkZER7t27h76+vpiiODs7s2PH\nDnR0dNDT02N0dJSGhgaOHTvG2NgYSqVSaCWk3kJUVBQzMzPo6OiI2HjpOC+N8kZHRxkbG+PSpUs4\nOjri7u7O8ePHcXJyQqFQcOfOHQFQnZyc5MiRI8zMzODm5sb4+DifffYZ+/btY2lpSUBrLly4IC62\nQ4cO8fzzzzM1NYWpqakgGBUWFpKVlcXi4iLDw8OYmpri4+NDQEAAbm5uBAcH85Of/ITOzk7s7e15\n6qmnSEtLIz4+XlivGxsbCQ8Px9TUFGtra5YtWyYajf39/SwuLtLV1cW6devw9fUVmpGRkRGMjIyw\ntrbm7NmzlJWVUVRUxJdffsnIyAgRERGCt+Ho6Mi9e/fw9PQU15i7uzsmJiaYmJiQlJREWVmZUCzW\n1NQIb8Lg4CAffvgh1dXVhIaGsmrVKlxdXUlPTxdSYkltaGdnR3JyMsnJyQKg+8QTT2BmZoaZmRl3\n797FwsKCrVu3kpCQQFBQkAjVNTMzo7KykuTkZDo6OsQJGR5lTcTExFBRUcGyZcsEbUoKx3348CEn\nTpwgJSVFjLkl4dqZM2dYvXo1dXV1LCwsEB0djUqleuwb+7+qU/gYeAvQ/uOf7wHP/GfeQCaTPQc8\nB//TYZabm8vKlSvp6upCpVIRGBgoRikajYb29nZyc3P53e9+h5GREVVVVcJOPDs7K1xiEtqrvr4e\nZ2dn4uLiaGhoQK1Wk5qaSnZ2tvh5CSASGxtLSEiIsD6vXr1agFJzcnJ49tlnkZqyGo0GPT09VCoV\nfX19DA4OsmHDBuGfHxoaorm5WYR+ZmRkMDw8TGZmJnp6ejg4OKBUKgVOKzExkU8++YTu7m5efPFF\nkXVgbGyMsbExycnJaDQaent76e7uxszMjJmZGQYGBkQIaktLi8gqlBiKxsbGlJaWiqSg+vp6kpKS\nuHLlCu7u7uTk5DAwMIBarea1114jOzsbfX19ampqOH36NGlpady4cYPXXnuNvLw8XnvtNX7zm9/w\n7//+7yQkJODg4EBJSQnl5eW8/PLL3LhxA29vb6ytrcVG/fDhQwYGBvjhD3+Ik5OTyGsEiImJYWho\nCG9vbzIyMoBHN9nIyIgAukiTDR8fH9zd3enq6qKwsJDU1FRhPNLT02N6epqwsDDCw8Opq6tjaGhI\nhO0CAtL73nvvsXLlSj744AMcHBxwcXFBrVYLOreUDWFrayvMUPv37xfOzezsbKEyBYS34+LFixga\nGmJubk5TUxNLS0tcvHiRvXv3ip5TcXExra2tIoksKyuLhIQEAcGVQn4B7ty5g6enJ9u3b0ehUNDa\n2ipORoaGhlRVVZGWlkZrayubN2/miy++QC6XY2hoiIODA9XV1bi5ubF69Wq+/PJLYTh7nPVfOilo\ntdoBrVa7qNVql4Aj/M8SoQdw/advdfnHa//Re/xdq9VGarXaSHt7e6G4qqqqwtTUlImJCa5du8bl\ny5cxNzcXEe1ubm48fPiQkpIS3nzzTcrKykhOThbMQoluAwhq09mzZ0UUnBR8IqG3vL29aWtrw8TE\nhKCgICoqKlhYWODKlSvs3bsXKysrtm3bxt27d8V4S4p/9/LywsDAAE9PT5FVkJubK9BkhYWFLC4u\nihj3733vezg7O9PT00NzczOzs7OEhISIC0Eul4tI+8TEREpLS8U8fHFxkc7OTvbt28e2bdt46qmn\n+MUvfiEAK1ZWViiVSn72s5+hUChEQ+7mzZs0NjYKjJjEvNTV1cXGxoaf/vSnrFu3jsOHD5OTk8PX\nX3+Ns7OzYDX+9Kc/pa+vj7m5OT7++GNBaLKzsxMni+eff56srCwsLS3Jy8vjzJkzTE1NicDd0dFR\nrl27xieffEJNTY1oyAUFBTE9PU1zczNr1qwR14ZarSYoKEgce/39/bG1teWTTz7BxcVFnBakBmZZ\nWRl+fn6UlJSg1WqRyWR8/vnnqNVqQUd+8OABubm5hIWFMT8/z5o1a4Qtf/Xq1XR3dzM4OMjNmzdZ\nuXIlLi4uwtr94Ycf8swzz3DkyBFxOpLQfH/7299ELohkU5dQ8bGxsTg6OqLVaoXfw9nZGVtbW1at\nWsXOnTtRq9W8/PLLFBQU0NfXR1xcHICggLW2tmJhYYGOjg7bt29nz549vPPOOxw7dkyAiR0cHEhK\nSuL3v/89+fn5IgujtbWVnJwc+vr6xLX7OOu/tCnIZDLHf/rjVkCaTFwGdslkMoVMJvMElEDJv3o/\nqSnl4+NDWVkZvr6+WFlZMTIyQmlpKdbW1ty9e1eMCScmJnjiiSd46aWX2LVrl9AiSLoEyVRSVFRE\neXk5QUFBBAQEYGpqSm9vL4WFhQwODpKcnIy7uzstLS1cunSJuro6amtrmZycxMPDg5KSEpqamgRB\nWrLh9vX14eLiwsDAANbW1lRWVqJQKPD09KSqqoqNGzeSkJAgkocMDAwICgri+PHj9PT0UFtbS19f\nHxYWFnzyySei0RgVFYVMJkOj0WBqairQ9VlZWRw/fpyFhQUKCgr48ssvuXHjBvCIVJScnExAQABl\nZWWUlJSIvoPkvLO3tyctLY0XXniBJ598kldffZUVK1aIp5q1tTVr167l5ZdfZu/evahUKhGckpiY\nSElJCWNjY2RmZuLp6cno6CinT58Wo9mioiKam5tFkvXS0pLAqM/MzJCbm0t3d7eYmiQlJeHi4oKL\niwtKpVLUy4AYFTY1NaGvr4+enh69vb08ePAADw8PMjMzBavS29ubs2fPimvi4sWL5OfnC2K15KkB\nxIPm/v37yGQy4efo7OwUYJTu7m4sLS1paGhApVIJKIs0aRofHyc+Pp709HTRt/r+979Pe3s7O3fu\nZGRkhICAANavX8/4+Ljwj4yOjmJhYcHg4CANDQ3U19cLt2R/fz8FBQVihCn1PL7++muam5vFf3cf\nHx/q6+tFmXHhwgWampqIjY3FwsJCOHMBVqxYISYkkpv3P8NofJyR5EngHuAnk8m6ZTLZD4E/yWSy\naplMVgWkAD8F0Gq1tUAG8BC4Dryo1WoX/9XvmJ+fx9DQUODBJNrM0tKSQGtJ8e0eHh7C3jszM0Nk\nZCRLS0vo6OgwMjLC9PQ0d+/eBR6N+Obm5hgZGUEul+Pk5ER3dzdpaWls2bIFR0dHJiYmSEtLQ19f\nX1hpw8LCsLGxobOzk9/+9rcMDQ3R0dEhGoIAoaGheHl5Cdv32NgYGRkZTE1NCbu1sbEx9fX1jIyM\nYGxsjL29PfPz8+zevRsrKyuKi4t55plnxKYwMTGBgYEBJiYmWFpaMj09LUZUe/bsobOzk+rqasbG\nxjh+/DjwSJQjefO3b99OZGQk3t7eIkDX29sbS0tLVCoVJSUldHR0UF9fT0xMDDKZjKmpKcrLy3Fw\ncKC2tpbx8XE8PT1JSEhALpfT1tZGZWUlt27dwt3dXVC2zc3NOXHiBCEhIbi6uhIQEMD4+DjOzs4o\nFAoGBweZm5ujubmZXbt24ebmhq+vL0qlkri4ODZu3Mji4iKzs7OCBAUQHR1NSEgIY2NjFBUVsbCw\nQFBQECYmJnh4eODn5yegNxLBKTIyEkdHR3p7e8nLy6OpqYng4GAOHz4sTiCdnZ04ODjw4x//mObm\nZrRaLUNDQ3z11Vfcvn2b0NBQtm/fTnBwMLdv3+btt99mYGCA6elpXFxciI6OZtWqVSwsLHDq1Cky\nMzOBR6pZd3d3Pv30U+CRj0fSKkifxc/PD4VCQXZ2Ng8ePBCS6wsXLmBrayuk6N7e3ri7uwNw8OBB\nOjs7mZubQ09Pj6NHj3LixAmOHTsmoDszMzOcOXOGvr4+xsfHUavVIlNSCr6JjY0VCLzHXf+yp6DV\nap/6D14++r/5/j8Af3jsT8CjGl26gffv3095eTmrVq0ScmApuyEmJoaCggLR2ff29qagoIDJyUkB\nB7l79y6hoaHSZyE+Pp7CwkLheTcyMhJjyMzMTIyNjZmbm8PDw4Px8XFiYmKorKzE2tpapDc/fPiQ\n5uZmXnzxRT788EPRpR8dHcXNzU3kN65fv56HDx+yd+9ewsLC+NGPfoSrqyuGhobcunWLsbEx1qxZ\ng66uLq6uruzYsYPh4WHRyZagqcnJyUxMTLB7925GRkbo6enh7bff5sCBA2IjGhgYIDs7m7S0NLRa\nLT4+PnR3d1NYWEhFRQXe3t7Mzs5iY2NDVVUVPT09InD30qVLBAcH89Zbbwmdw1dffUVgYCALCwtE\nRUWho6PDsmXL6O7uFri8kydPsnr1amZmZnBycmJwcJCcnBycnZ3F3F0Ci1haWgqC0dzcHI2Njbi6\nuvLCCy8IjmRRUZGga0vjXilvcmBgAH19fQwMDKirqxObvr29PTU1NWKsHBoaikql4sKFC/j4+GBm\nZsbY2Bjm5ubExMQIzb+UOC01MD08PHj77bcxMDBApVIJv8vOnTvZsmULAwMDVFZW0t3dTVVVFfv3\n7+ebb77BysqKiYkJHBwcAARWzsXFhcnJSdzd3SksLKS3t5f6+npCQ0OZnZ2lt7eXiIgIcWKVck2W\nlpZ47rnn2L59O11dXQIleOvWLTw9PYUIT6vVMjo6yooVK1hYWOCJJ54QWoulpSUiIyNFL258fJyh\noSEhMpPSuR93fSdkzjY2NmRkZLBv3z4+++wzQkNDmZycxNTUFF1dXfLy8oiJiRENm8bGRiIiIjh/\n/rxgBkjqvM2bN4uOMzxqNkpotZaWFnR1dXFxccHe3p6KigrGxsbEKWR0dJT6+no2bdqEra0tbm5u\ntLS0EBwczO7duzly5AjwSGAjpQCNjIxgY2ODp6cn/v7+5ObmsmXLFmZnZ8XIb2pqCg8PDw4cOMDS\n0hIqlYqUlBQqKyvp6ekRXg2VSkVYWBhNTU3Mzs6K2t/Ly4vU1FSGhoaYmpri8OHDTE1NoVKpuHfv\nHjo6OiJ7MjQ0FGNjY2GKGR4extnZmdjYWH73u9/x5z//maCgIH7wgx+wfft29u3bh7OzszBHRURE\nEBQUJMJefvCDHwjc/osvvsjQ0BCDg4Oifpf8I4aGhkLSHBAQQE1NjWBjrFy5ktjYWFxcXBgeHub6\n9esUFBRw//59xsbGhGwZEMG/3t7eTE9P4+Pjg5+fHy4uLiQlJTE6OkpKSgrbt2/H1dUVuVzO4uIi\nOjo66OjooFarRTScpaUlcrkcgJycHDw9PZmfnxdTDKVSyeTkJMnJyXzxxRfCul5dXU1FRQWnTp2i\nurpakKH7+/vp6OgQeZnwqDGal5eHs7MzSqUSBwcHPDw8cHd3F5mgLS0tJCYm4ujoSHNzswDJPP/8\n8yQkJIhU6jNnzgg9gZSLERAQQGRkJCUlJSwsLGBoaMjt27cFs2JsbEzoMtRqNWNjY4yMjJCUlERh\nYSEXL17ExsaGvXv3Pvb9+J3YFNRqNa+//jq9vb2iNhsaGkKj0Qj4ycjICJ2dnaKDLgFIpBi5vLw8\n9PX1mZ+f53/8j/8BgEKhIDo6mvHxcZKSkggLC0OpVAr2QFhYGFevXmXjxo1MTk4K4o7kdWhsbBQy\n26amJtFTMDExQaPRkJSUJMaXUqScp6cn3//+93nzzTdpa2vjN7/5DcPDw3h4eODg4ICTkxM1NTWC\nuRcbGyvqSEnwJG1aQ0NDXLx4kfn5eRwdHZmenhbE5gcPHojjuK2trRAxmZubEx0dTV1dnSD/tLa2\n0t7ezuuvv86uXbuora0lNjaWpKQkNm3axIoVK9i6dStPP/00a9asobKykq1bt2JhYcHVq1eZmJjg\nhz/8ISdOnKC1tZXFxUV8fX2Jioqirq6Oc+fO0draKqjN/v7+uLq64uzsjLu7OyqViuDgYJ566ilh\nSsvPz8fU1BRjY2MBKoFHJVRnZydFRUVER0fT09Mj0pzt7e2Jj4/nvffeIy8vj6KiIkxMTHB0dESj\n0TAwMICHhwcKhQK5XC7kvvBo1Nnf38/o6ChOTk4YGxuzbNkygoODqaqqoqGhgYKCApycnDhz5gxH\njx5ly5YtqNVqEhIS+PbbbwW4Rq1Wi1xIJycnfvSjH9HS0kJYWJgYqerr6/PEE0+IcBY3Nzc6OzvJ\nzc2lrq5O0MulqcTo6Chr164V4+nY2FgqKirQaDRUVlayYcMG0tPTcXZ2ZmhoiOnpaRITE/n4448p\nLCzknXfewd/fHz8/P/EwGB0dJSkpScjdH3d9J7wPH3744RsajUZEb1+7dg1bW1siIyNxcHBgYGCA\nlStX0tnZKeS8ISEhREZG4uHhIbwRUmrQk08+ybVr1/jlL38pBCRLS0tcv34dAwMDQkJCOHLkCGZm\nZqKRJpUqN27cICcnR4z9TExMWFpaYnZ2luzsbAYGBvDx8UGlUuHq6kpNTQ1zc3PY2dlRW1vLs88+\ni66uLtevX+f69ev4+PiQlJSEv78/7777Li+++CJJSUl0d3dz7949nJ2d6ezs5ObNm2zcuJGZmRnx\n1dnZSWBgIDKZDFNTU+HU7OnpQa1WC9ajkZERjo6OyGQyxsbGRHCOpaUlubm5gjxtaWlJfX09QUFB\nJCQk4OzszNmzZ/nkk08IDw8XJ4T+/n7s7Ozw8fHhyJEj4qknnUAkYdHi4iJr1qyhvLxcSLwlF+rD\nhw8ZHBwUoNe2tjYUCoVAl+vp6eHr64ubm5uYNpw/f569e/cKya60WXp5eZGdnc2VK1dwdHTk5MmT\nzM/PMzU1xYMHDwSvYcuWLVy/fh09PT3RR+ru7hYcSbVajUqlEpOssbExhoaGKC4uFkQqjUZDXFwc\nGo2GtLQ0+vv7OXz4sPAoPP300yJBSyJJ9/T0EBkZyUcffcSBAwdQKBQYGRlhbm4ubPoODg40Nzez\ndetWNm7cKKTiMpmMkJAQ6uvrxWg9IyODbdu2sXr1atG8zMzMRKvV4ujoSEpKCvX19fT09JCeni6m\naCkpKbS0tGBmZoafnx9/+MMf+N73voe1tbVkAPzvA26VIuPn5uYYGBggJCREkImzsrIwMTFhdnYW\nR0dH1qxZQ0pKCs3NzWg0GnJzcwVJOSwsjODgYAFYHRwcFPWXt7e3oCn19/dz7949DA0NsbCwwNra\nGicnJ1avXo2np6dIv1YqlUxNTVFYWIi5ubk42o2Pj6NUKtHT0yMkJAQ9PT0aGxt5//33hcR38+bN\nImT122+/5Y9//CPz8/P88Y9/FKWFnp4ek5OThISEAAiYipOTE7q6ukxNTdHf309ZWRnnzp3jq6++\nYnh4WGQJ2Nrasnz5ctRqNSMjI7S1taFSqejo6MDNzQ19fX1KSkpYWlriypUrdHR04O7uTmVlJX/6\n05+oqqpiYGCA9evXCyTb0NAQd+/e5a9//SulpaV4e3sTHh6Os7MzhoaG7Ny5EysrK0GwGh0dZXp6\nmqCgIMbHx0XyVVBQkAjy7e7upry8nMLCQk6fPk1XVxf29vY4OjqKUW5+fj7wKAzGz8+P/v5+SktL\naW9vx9bWVozUbt26xc9+9jMiIyPZunUrhoaGpKenk5SUhEKhQEdHh4GBATE9kNK3HBwcRFOwu7ub\nvLw8ga5fu3YtCQkJxMbGEhMTQ0REBK+//jorV67kvffew9LSksjISMbGxjh69CgjIyNs3LgRQORh\nqtVqDhw4QGNjI1lZWUxOTgoeRVFREfPz86LP0NDQIHoYhoaGjIyMCGWqBLEZHh6mtLSUkZERBgcH\nOXbsGDt37mT9+vViqlNUVCQ4mXp6ejz11FNkZWUBkJ+fz6uvvkplZSXp6eniGnuc9Z2ArMzOzhIf\nHy+QYzKZjLVr15KdnS0yEYuLi1m/fr3AoEnQld7eXnR1dYUtee3atYK8NDAwQHBwMMeOHSMmJoYV\nK1ago6ODsbExSUlJAj4iCYiGh4cJCQlhcXGR+fl5ioqKMDIyYuXKlejo6AjRTVBQECdOnOCll17C\nzMwMf39/urq6hMKyvb0dY2NjhoaGMDQ0xNvbm6mpKby8vPDw8MDV9ZGUo7+/X2QKAFy/fp3t27dz\n/fp1EUjT09ODvr4+HR0d+Pr6YmNjg6urq1Cv9fT0kJKSgkKhoLS0FEtLS5RKJeXl5URGRvLLX/6S\n1atX8/vf/562tjZkMpmYz587d46kpCTa2tr4y1/+gru7OwqFgoaGBm7cuIGZmRkZGRlERETQ2Ngo\nyrTnn39enDIaGxtJSUmhqKhISHglZJqRkRFTU1PY2NiIqPnk5GQ6OztpaWlh/fr1ZGZmEh8fL0aH\nFhYWwgsQHh7OwMAANTU19PT0CLL0/fv3RfyaQqEQKkRJFqynp4dSqRRiJ3gEsPH09ESj0bBx40Z+\n/vOf89Of/lSI0FxcXKivr6elpUUE5HR3d7N582YGBweFVVyj0YixJSBEaFKadEREhDi1lZeXMzo6\nKkRoo6OjJCcnY2pqyoULF9i2bZswpvX29tLW1iYS083MzJifn2fbtm10dXXR1NSEqakpY2NjIt9B\nKnMfPnzI/fv3iYqKwtraWgi6JGxcXV3df6rR+J0oH95///03LCws8PLyorOzk4aGBoyMjOju7mZi\nYkIARpYvX86yZcvo6+vDxMSE1NRUfH192b59O2fOnGFkZAR9fX0WFhY4c+YMoaGhnDx5kieeeAI3\nNzfKy8sxMzOjvr6eHTt2cP36dbKysggMDMTIyIixsTGuX7+ORqNhaWlJXDD37t3D1taWyclJ8vLy\n2L9/PyEhIXR2dpKSkoKjoyMtLS3I5XIhlsnIyMDOzo41a9awadMmLC0thWRZq9ViYWFBZWUlLi4u\n6Orqcu7cOfbs2YNMJmN2dhalUilSsW1tbYmNjcXNzQ09PT0SExPRaDTIZDICAwNFM3ZkZITjx48j\nk8kIDw+nr6+P/v5+RkZGcHBwYM2aNZiamjI9Pc0zzzzD0NAQ27ZtIzIyErlczrZt28jNzcXJyYm1\na9fS3d1NVFSU4AtYWFgIXiQ8aoZJG9Pq1auZmpri6tWrXL9+HXd3d5Gy5eLiQmJiokDrL1++HBsb\nGwIDA0UH3t7enjt37vDcc89hY2MjrPPz8/O0tLRgbm5OXFwcy5YtIzc3l3v37hEYGEhsbCwLCws0\nNDQIw9m+ffswMjIiJSWFwcFBbty4IWhU0ph3YGCA8PBwYcKTchybm5vJz8/n+PHjzM/PCx7l4OAg\ny5cvR6vV4urqypkzZ2htbRUCpPn5eRHMcuXKFfLy8nBxcRHTkIaGBvz8/ETjUULcFxYWUlBQgL6+\nPu7u7ri6ugqLu4SbW7lyJQkJCVhYWAhi+OzsrOAsSA9JExMT9u/fT319veA9xsfHi4Z9dnb2f5/y\nQaFQ8Pzzz6NQKFhaWsLc3FzYP5cvXy6Oq7W1tZw/fx5zc3PKysq4cOECw8PD1NTUMD8/j56eHnNz\nc6Kbb25uTnBwMHFxcSwtLYnafWpqijt37jA+Pk50dDRbt24lNjaWyMhIdHR0hOHJ09OT+Ph4bGxs\n8PLyEsEfMplM0JAyMzM5dOiQwKgvX76czs5Oke7U399PW1sbo6OjQnpbVFSEtbU1np6etLW1YWdn\nBzwqd/T09IiMjMTLy0ukFIWFhWFpaUlrayvBwcHU19fT2dmJj4+P2GTgkWrv2WefZXp6mnfffRe1\nWo2HhwfW1tZ0dHTQ0NCAiYkJa9asobq6WoSa9vX18dRTT3H58mXRAwkMDOTatWsMDw9z6tQp1Go1\nExMT2NvbExwcjLW1NX5+fsTGxuLn58fHH38sKM/R0dHiybR+/XqMjIyQyWTC7eft7c2GDRuE0jM2\nNlZcC1IKtpWVlRhVx8fHc/DgQZHeNTc3x4YNG4RcWAoKMjIyErQuyWoumdik3I79+/fz8OFD4uLi\nuHz5Mq2trQwPD1NdXc0333yDvr4+VlZWREREoKenJ+z06enpwosyOzsrwmWcnZ2prKzk4cOHgkqu\n1Wrx9PRkfHycuLg4VqxYgY+Pj8j7XFxc5NSpU9y+fRsTExMCAwO5deuWQOHBI+OfZICTeBMymUyM\n1n19fdm5cyeOjo50dHQwNjbG8uXLqampEePYtWvXcvLkSbq6ukRJ/TjrO7Ep6Ovrs3//fnp6epiZ\nmRFuRWksB49yFuvr60XTqa+vD0NDQxITExkfH8fCwoK0tDRMTEw4f/68eN/Q0FA+/fRT7ty5I0Ab\n/v7+6OrqsmPHDnbt2sXCwgIVFRVUVFSI7IU9e/YILLuXlxc5OTlC2mpnZye8/V1dXaSmpnLx4kVM\nTU0xNDQUcNWqqioWFhYoKyvj/v37WFpaCnFSXl4eJX5toAAAIABJREFU8fHxODs78/DhQwCBVZee\n7EtLSxgZGTE7O4tMJsPHx4dVq1ahVCpJT0+no6ODpqYmPvvsM6qqqli9ejV37txBqVSyefNm0RDN\nzc39X8Z3Hh4eXLlyhba2NpqamqitraWmpkbQiCVg6s6dO8VR3dPTEycnJzH6lQxrkk/f0tKSrq4u\n3njjDfbt20dYWBgODg4ixr61tVWwKR88eMDAwIAAyLa3t4t8ho6ODmpqajhy5Aj37t0TcmxAbMSS\nMc3W1laUeb29vXh5eeHl5cXs7Czz8/Ns3LhREIfm5+dJTU2lvr4ePz8/2tramJ+fF9Ftbm5upKam\n8vDhQyIjI1m7di1arZYNGzbg6OiIubm5YCLeu3dPXAvnz59nw4YNGBkZCRu6UqkUgbgSBlDSzEgJ\n2W1tbYK05e3tzapVq/Dw8BB/1/DwcMH+uHz5MpWVlQL4urS0hLe3NwkJCXR1dWFjY8Pw8DAWFhaU\nlpbS2dnJjRs3+Pvf/05QUBA2NjZCIPc46ztRPrz11ltvvP322+jr6wv9gCRLlYJbV61axf79+0Ve\nQ1VVFQcPHuTYsWPCdr2wsICnpyctLS0UFBQIsIdcLhcTgoSEBAYGBlAqlSLnQApDiY6O5vz580K5\nZ25uzsjIiOjmz8/P8+233wrQiUSBsrW1xdnZmYiICJGabWdnh6enJ0VFRbi5uYn5tEKhENDVP//5\nz+LzZWVlkZ6ejr6+PoaGhhw+fJiqqirGx8eFOMnJyUkoNjs6OgR2y8vLi8nJSRQKBT4+PkxOTopj\nZkVFBSEhIXh4eFBZWSnyEHbt2kVXVxcFBQWUlJQwNTXFypUruXjxIg8ePKCpqYmbN2+iUqlwdnYm\nMzMTf39/Ojs7CQgIEEf65uZm7O3t2bp1K1qtluTkZMbHxwkODhZWdq1WS0BAALdu3RLhPGVlZTg6\nOlJcXCxKs3v37rF//37a29tZWlrC0NBQGIBycnKora0VMNyIiAhOnDiBWq1Gq9XS0tJCYWEhBgYG\naLVaNBoN+fn5TE9PU1BQQFJSEvn5+dTX1yOTyZiYmGBycpKCggKioqL49ttvsbGxoa+vT3wWyfDW\n2tpKZmYmOjo64jT49ddfizKsoaEBGxsbTE1NcXJyIiQkhLCwMOzs7ESE28DAAIcOHWJubk5EEwQE\nBLBixQpqamqElyQiIoKMjAzc3Nyorq6mtrYWpVJJS0uLQK4ZGxtz+fJlbt26RU9Pj4jjKyoq4uzZ\ns6KvdPHiRZKSkmhubqa9vZ3y8vL/PuWDTCYjPz+fgoICmpubmZiYQC6Xo6ury927d9myZQs2Njai\ndiooKCA5OZn+/n6RCjU5OYm5uTkqlUp0hiVMlVwuJygoCB0dHaamprC2tkalUqHVapmdnaWnp4eS\nkhKOHz8uchCCg4ORy+UsW7aMlJQUAbWAR0pJMzMzTE1NcXFx4c6dO/j6+nLlyhVKS0s5deoUzc3N\nouTo7+8nLi6O559/np07d2Jvb4+uri67d+8WbjxAZBV0dHSwYcMGoV/v6uqitLQUIyMjTp8+LdKC\nAgMDUalUDA4OsmvXLuHFt7a2ZnJyEq1WS3t7O93d3WRmZqJSqdi9ezddXV28/vrrXL58mdzcXGHv\nbm5uZnFxESMjI9HHkGL3fvWrXzE5OcmqVavE6UEK+jUzM+Prr79GJpPR2trK6OioSD2WMjaqq6tx\ncnKira2NoKAgNBoNJiYmuLu7Mz09LXQeUiScu7u7yGQoKiqira0NNzc3LC0t2bt3L9PT0xgYGIhT\nn0ajwc7Ojs7OTuzs7ITKTwrByc/Px93dnWeeeYbFxUUsLCyYnp7G0NCQF154AZVKRX5+vlC2mpiY\niJAXSUB069Ytli9fTm1tLQcOHAAQN6Cvr69ovpaWlvL222+LvoTEzzAxMcHc3Jxjx45hamrKwMAA\n9+/fR1dXF5lMxqZNm7C2tgYemeOWlpZ4//33sbW1FQ3H7OxsFhYW+PLLL6msrCQpKUmg7QIDA1m/\nfj1KpZLs7Gy2bNmCu7s7d+/eFbyKx1nfiU3BwcGBuLg4oqOj8fHxwdramvj4eCYmJkR9DY/i2u7f\nvy+079KIyNPTU6jisrKyuHLlCgC7d+/GxsaGoqIiHjx4IJKKpRNFR0cH9vb2yOVy5HK5CDW1sbGh\nvb0dGxsb4WWQoCUA+/bto62tTfAMJbGIxCLYuHEjzs7OmJiYAI/KmJycHG7fvk19fb2Yy0sIeUkU\nJYWRLC4ucu7cOZydnYVLU19fn7q6Ovr7+wXf38rKivXr14umYnJyMi0tLdja2tLU1MStW7fYs2cP\ni4uLGBgYCOiMubk5CoWCmZkZtm7dir+/P7t37xZPUDMzM1QqlfiKj4+npKQEd3d3bt68yZo1axga\nGsLOzo7e3l66urqwsrJieHhYCJzm5+epr6/H3NxcWMoBhoaGhAFLV1cXOzs7oXsAqKmpQU9Pj8XF\nRfr6+rhy5Yqo8z/44APKy8s5e/Yso6OjREZGcurUKXx9fUlPT0cmkxEUFISTkxObN2+mvb1dTKJ2\n7txJamoqH330EY2NjbS1tbFt2zb6+/tF0piDgwOOjo4kJiaybNky7OzsBLugvb2dwcFBDAwMRFQc\nPNrElEql0EuoVCpsbW1xdXUVpwUTExPkcjkGBgaUlJQQGRkp1JTV1dW0t7cLlKAkeZecwxcuXKCw\nsJBDhw6h1WrR19fn/fffJzU1lc2bN2NoaIirqyvBwcHo6+vj6enJhQsXWL58OXNzczx48AB9fX3h\nHH6cJftnk8//qSWTyf7Pf4j/u/7v+v//KtNqtZH/6pu+EzoFW1tbDh48iJubG97e3pSUlAj7sKTi\nk6Lczc3N+e1vf8sf/vAHHBwcKCwsZHx8XCT+3rx5E0tLS/72t7+JrAGZTMaBAwfo7e3l5MmTxMXF\n0dzcTGZmJps3b8bc3JyMjAyOHDnC3NycqCUvX75MWloalpaWtLe3I5fL+bd/+zcOHjyIjY0N6enp\n3L59G1tbW0JCQigvL8fV1ZXW1lZhGiorKxNMAaVSSUZGBlu2bBFsQkma/Oyzz/LZZ59hYWHByZMn\nSU1NRU9PD3Nzc+7cucPLL7/M0aNH8fDwEEnLMpkMW1tbPDw8qKqqorGxkfj4eBHLbm9vDzw6ipqa\nmnLz5k3c3NzQ1dVlaWmJW7duERUVRWtrK0qlkrCwMHJycoiNjcXd3V00AL28vMjKysLAwACNRsP2\n7du5fPkyxsbGODk5MTk5SVNTk4CV+Pj44OzsLBKPJbWqQqGgq6sLDw8PIiMjKS4uxt7entnZWdat\nW4e7uzuHDh0SkwHp6bl27VoBkWlra8PAwIDAwEAh9161ahVBQUHcv38frVaLr6+vYDjs27ePFStW\ncP36dQYGBsjLy2Pt2rWCZwmPUqn8/f3ZsmUL09PTDA0NoaOjQ1tbG76+vujr65Ofny+QfnFxcSgU\nCtLS0qipqWFxcZH29nYmJiaIiIjg0qVLpKamcuPGDREjEB4ezsmTJ/n8889JT0/n1KlTvPPOOzz5\n5JMcOnSIlStXilyT3/72txw6dEjg2aTR6P3790XfTa1W09/fz7p16+jq6qK/v1+E8dbV1eHk5ER9\nfT02Nja4u7tTWlrKm2+++Vj343eifJicnMTCwgJ7e3vee+89cnNzRdKOnZ0dwcHB7NmzR2jBjx07\nRl1dHW1tbWLiYGxszNjYGC4uLhQVFQGPpgQKhYLY2FiampooKSkhJSWFgoICIiMjeeutt0hLS2Ng\nYIBXX32VZ599lnPnzlFYWIi1tbXQyuvo6DA3Nyci4+3t7VGpVLS3tzMyMiKIz3FxcaK+lMZsa9eu\nFbVzdHQ00dHRRERE4O7uzpUrV/Dy8hJ1ZGVlJS0tLQQFBXHv3j2USiWlpaVs3LiRzs5OZDIZ/v7+\n9Pb2YmJiQn5+PhUVFWRnZ3Px4kVSU1Pp6upienpaNLdaWlqoq6vjm2++Ef59Y2Njurq6RLPxd7/7\nHXV1dbi7u/Pyyy+jVqvp6upibm5ONFdtbW1JSEhgcHCQvLw8xsfH8fb2ZmFhQWwUEovyxo0bAp3e\n1dVFTU0Njo6OWFhYsH79epKTkzl9+jTm5uZ88cUXmJqacunSJeCRku/y5cvs27ePvXv3UldXx/T0\nNLW1tdTW1qJWq5HL5Wg0Gvz9/fHx8UGtVvPhhx/S19eHjY0N58+fRyaTsWXLFmGIGh4exsbGBn9/\nfywtLRkfHxdBrLt27SIwMJC8vDyWlpaEjFkyrTU2NjI3N8f4+LhohEtuxnfeeYeCggLh8s3JySE0\nNJSbN2/S0NDA8uXLaWhooLS0lNTUVA4ePMjmzZs5evQofn5+NDU1ERAQQGBgIBkZGaxcuRJ4lH8h\nKWClbAkHBwdMTExEZOLk5CQVFRU0NjaiVCqpqamhvLwcKysrSktLGRwc5Pbt22RmZgog7OOs78Sm\nIMV9aTQafH19RVhmfX29kAJnZmYKl2RGRgbr1q0jJyeHa9eu4eDggJWVFfr6+oSHh/OTn/wEgPT0\ndORyOZGRkRQUFBAfH09HRwfBwcEiNEUyYXl4ePDkk0+ydetWXF1duX//PkZGRgQHB4tNSoLHtra2\nIpfLmZiYwMPDg8bGRo4cOUJ+fj7nzp1jYWGB06dPExQUJOLe3N3duXjxIh4eHkxOTtLW1sayZcuw\nt7cXYiDJZtvU1ERMTAx37tyhvb2dK1eusLS0RGxsLLm5ufj4+NDS0kJKSgr37t0jNTWVLVu2MDk5\nKcZrMzMz6OvrC/PWhg0buHLlimBLSsKfqKgo8vPziYqKoqKiQtCk9fX1GR4eJiAgAENDQ6Kjo2lq\nauIXv/gFt27dYt++fQAi7r2srAxnZ2dCQ0NJTExkfn6etrY25HI5UVFRYsoQEBBAVlYWiYmJLC0t\n8corr6BSqYiKigJg//79pKenc/DgQSoqKnjllVcoKCjA19dXKAyzsrKoqKigoKAAExMT9PT0SE1N\nFcng6enpmJiYCP8DPDLdWVtbo6+vj0ajwcXFhaamJm7cuMH9+/eprq6mubmZhYUFvLy8cHBw4O7d\nu2LUKEXQl5WVIZPJRK9i+/btREREcPjwYd5//30MDAwoKipiamoKjUaDg4MDW7ZsITg4mO9973s8\n/fTTuLi4CICMFPMmhexIBi65XI6DgwPf//732bNnD0ePHmVychIHBwdCQ0MxMDDAwsKCoKAgZmdn\n+eKLL7C2thbKTj8/P4G+27JlC/39/Y99P34nNgVDQ0O++uor8vLyMDY2ZnJyErlcjkwmE7twaGgo\nr7zyCnfv3qWuro7MzExiY2NRKBQEBwdjZGQkno5Sx7m0tBSVSiV4h0VFRcjlciIiIrCysuLu3bsi\nGVqlUvHkk09y9+5d0RhKTEzkgw8+ECEtEjr+5z//OcHBwSQlJZGcnIyVlRUbNmwQdCYp/biyslJ0\nlQcGBpiZmWF6ehqFQsH4+DgKhYKvv/6aFStWAIik4qeeeoqJiQm6u7txc3Nj3bp1VFdX4+npiVKp\nRKlUkpaWxuDgIM7OzrzwwgvMz89TUVGBXC7HxcWFiYkJETMn5RO6ubkxPT3NwsICjo6OzM3NCV5E\nW1sb7e3t9Pb28tFHH3Hz5k10dXUFL7K6upre3l6OHj3Kj3/8Yz766CNWrlzJ0tISjY2N6Orqolar\neeGFF1i/fj06Ojq4uLiwatUqEhISCAwMZGJiQgS3SgnMFRUVLC0tiW6+lM0pCcVCQ0OJjo4WT+7A\nwEDs7e3RarXcu3ePa9euUVBQwOzsLAEBASgUCsEYGB8fp7S0FHiUzyDh7aRR7rp16wgLC2NychKV\nSsXOnTtpamqiurqaX/ziF1RWVhIYGIijoyOxsbHo6ury61//Gl9fXyEYKyws5Pz582zbto24uDjR\nkP7888+xtrYmLy9PBMc8/fTTtLW1ERMTw+bNm0VIjNQoT05OFlMYady7fPlyLl++LGz5CwsLwhgV\nHh4uQol/9KMfiYBgaYQ+MDCAra0tZ86cEc3sx1nfiU1BiuX29/fHzMwMa2trioqKmJubY+3atdjZ\n2WFmZkZ5eTllZWVCubh8+XLOnTtHfX09TU1NFBQUMDo6SnV1NfBoxLe4uEhrayvR0dGsWbOGNWvW\nYGFhgUKhYMuWLSgUCiorKzl27BgVFRXExsYKD8ODBw+wt7dHo9EwNjYm7L0SOCQrK4vi4mJcXV0J\nDAzk4cOHgtA7NTVFS0uLeGJINWNtbS0KhYKnn36azs5ONm3aJOyyWVlZLFu2DH19fZG34ODgQFdX\nF9u3bxdYMCcnJxHlLvH5FAoFe/fuxdbWViRiRUVF0dPTg62tLX19fQJ9JmUbXL16lYaGBrZu3Upy\ncjK5ubnMzMywfv16goOD6evrIz4+nuTkZMLDwwXKPCMjg4MHD4oTVEhICH5+fiwuLvLWW29x7do1\ncVKKj49HR0dHZBFIYS9+fn4kJyejo6NDcXEx27dvB+CDDz5gdHQUe3t7gS4D8PX1Ra1Wc/r0aezt\n7Xn48CFubm5iA5cYEcuW/T/svXdUlPe6v30NMAwwMJQBKUPvXYo0QVABe49RE1tiNNkpJz3ZO9nZ\n2dkpO4nJdse0HRNjbDHRREXF3lAUFBBQemfovQ8Mdd4/zHzffX5r/c7x/eNdK2et86w1SxwFlJnn\neb7f+/7c1xWKi4sL1dXVAKKNPDY2houLC9HR0TQ0NNDS0sLk5CQ9PT2sWrUKU1NTtFotoaGh2NnZ\n4eHhgaGhIXl5eRQXF1NdXS1sYnfu3BGpWYDk5GSsrKwYGxtjYGAAMzMzYU2Xy+V4e3sjlUpxcnKi\nt7eXuro6Ojo6hG0qKSmJoKAgli9fLsS1c+bM4d69e3R1dTEyMsLPP//MkiVLqK+vp729nb6+PoKC\ngrhx4wbHjh3jz3/+M56enkKkc+DAAQYHBxkZGRFyowc9fhcXBZlMJlqKcXFxPPPMM6Jl9csvv7B1\n61YCAwPp7e1l3rx5PPzww7S0tJCZmUlERAQ9PT1ERkYyY8YMysrKxKRYVlYWc+bMwczMTMAvgoKC\nCA0NRaVSiWV7W1sbgYGB7Nu3j5CQEO7cuYObmxuFhYWYmJgINJb+jvvFF19QVVWFQqFAKpXi5eXF\nyMgITk5OBAYGolQqUalUqNVqYavOysoSgzp9fX0CoKFQKEROYePGjWRlZXHnzh2ef/552tra0Ol0\nyGQyNBoNBgYGYkJycHCQyMhIRkZGSElJEXevsrIyQXW2sbERU4j6WkthYSH+/v6Ul5czPDyMVqsV\nGY8NGzYwOTmJp6cnOp2ORYsW8a9//YuOjg4KCwtJTEzEx8eHqKgoSktL0Wg0tLS0EBYWRkZGBgqF\ngrCwMI4cOUJnZyd//OMfKSwsxMnJidmzZ/PYY49x6dIlJBIJ5eXlaDQawXbUr5acnJwYHBxkcHBQ\nyE6Ki4tJS0ujtbWVy5cviwLgww8/zEMPPSRy//pcREtLCwqFgvDwcG7evAlAVVWVYGZaW1vj4OCA\nsbGx4D62trZSUVHB5OSkGKO2s7PD29sbOzs7EhISmDZtGgYGBjz22GNianNiYoLh4WFRD+js7KSv\nrw83NzdWr17Npk2bOHjwoEiotre3I5fLaWho4Pbt2+zYsYNLly4xNjbG2bNnxb/3pZde4plnnkGh\nUAiBT21trQAABQcH4+/vz7PPPstjjz3GH/7wB3JycsjNzRUzI3ojtZ5n+aDH7yLR+Pbbb7+jNwfr\n8WH9/f2Ci9jS0sKlS5fIz8/n6aefRiKRsH79eiYmJjAxMeHChQv4+vpSUVHBU089hVqtJisri5iY\nGJKTk4Xabd++fdy6dYvPPvtMUHnnz58vlsgpKSk4Ozvj7e3N0aNHBVx17ty5jI6OYm1tTXp6OmvW\nrMHHx0cs/zIyMpDJZEInbmJigo2NDa6urty+fZvz588za9YsTExMBBzU3t6eadOmiVXI8ePHiY2N\npbKykrGxMezs7Dhx4gTW1takpqaKE0M/RWdgYMDAwACnTp1CoVDQ1tbGhx9+SGhoKGq1munTp5OR\nkYFUKmVoaIienh4CAgJE0MXBwYFdu3ZhZ2cnRqAbGxuJi4sT1eqbN2+KZOKCBQswNjYW1Xg9Fi0x\nMZH29nZ6e3sZHh5m5syZWFtbM3fuXI4cOUJpaSknT56kp6eHiYkJwSqUSqWMjIyIAl97ezu3b99m\n69attLe3s2LFCszNzUUKdO3atQIi4uXlhaenJ6ampiIheODAATZs2EBVVRUzZsxgZGSEgoICJiYm\nuHjxIi+//LJ4XcbGxnBzc+PQoUN0dnaKlcvU1JSYXNVTlHJzc4WUqLe3V2QwampqyMzMFKs5e3t7\n9u7dS1xcnMDNa7Varl69iqmpKXFxcSJ+PTk5SVNTE6tXrxYn+x/+8Afs7e3ZuHEjX3/9NU899RRn\nz55l6dKl1NXVERAQgJubGzdv3hQipFu3bnHo0CH8/f0ZHR0Vr4veFB4WFkZWVhb5+fmsW7eOI0eO\n/M9JNEqlUmbPns3g4CCHDx/mzJkzjI2NCRbC2NiY0Kn/4x//YP/+/Vy4cIHGxkY6OzsxMjLCzs6O\nzs5Oent7eeSR+1hJc3Nzurq6OHv2LH5+fuKE8/f3p6mpCQMDA2H11U9Y/vTTT1y8eJEjR45gaGiI\nvb09arUaLy8vMfK8du1aWlpasLKyoqmpiZaWFjIyMhgdHaWhoUGgtfRBLGdnZ+7evYuBgQFWVlao\n1WouXrwoTgR9rSIrK0tc6PQnpF4xplAoMDExYe7cufj4+IgthoeHB8XFxQwMDLBu3Tp6e3uJiYmh\nurqaZcuW0dXVRW9vLyEhIchkMkJCQmhubkapVPLMM89gbm4uSFX6ZXJ/fz/PPfecaFfq6xsjIyOU\nlpbS0NDA4cOHRSdjeHhYkIcqKyuZnJwkMzOTffv2ER8fz4YNG/D09EQqlYohnsjISBITEykrKyM4\nOJiVK1cCEBISQlxcHKdPnxY8CL0lS69s09efdu/ejb+/P5aWloSFhbFy5UrMzc3Zv38/AwMDdHd3\ni2q+fq+v//keOXKEsrIy8vPzBT2ru7tbrKwGBgaElzEtLQ0DAwMsLS0JCgoSZGW4P2jV2NhIY2Mj\nCQkJuLu7M336dNLS0njhhReoqqpibGyM/fv3c/nyZRwcHMQcSmZmJp9++ikfffQRH3/8MaGhoXzz\nzTfA/RmQ1atX09nZydy5c/H09OTGjRucOHGCtrY2Qch2dnbG3NxcTLrq3RP6wmtUVBTPPvuscJM+\nyPG7uCjoq7BBQUHMmTOHjz/+GKlUyiuvvIKlpSVz5swRd2o3NzdsbGyQSCS4uLigVCqJi4sTToc9\ne/aQlpYG3E8elpaW4uXlxVdffYW1tbUYHHFzcxOeh0WLFpGYmIi9vT02NjZUVVXh4eEhYrNBQUE8\n+uij7N+/H7h/8iYkJIi7mJmZGREREYKPt337dhQKBebm5nR0dNDV1SWWj1NTU+Tl5aFWqyktLSU8\nPFzox83NzUlJScHCwoLMzExxJ9ZqtSLirecoODg4MHPmTGJiYkTLVv/vNDIyQq1W88knnzBr1iy2\nb99Of38/iYmJIn1obm5OTEwMy5cvx9HREWtraxYuXMjixYu5e/eu8EympqYyMDBAXl6ecFp2dnbi\n4+ODt7c3O3fupLm5WWRMKisrRaru+++/FynB7u5u8X/XFwT1YNPQ0FBRuLty5QpyuZzg4GChbdfH\noC0tLcUMy6pVq0Sb7vLly7S0tBASEoJOp2Pp0qVia6Cv5puamtLa2kp+fj7Nzc1IJBJCQ0PFEJte\nHDNr1iwGBweFzDcxMZG6ujpGR0cJDQ3FyMiI4uJi3njjDeA+XzQuLo7g4GB6e3sxMjLij3/8I4aG\nhsyfPx8/Pz8cHByIj48nLCyM1tZW8vLyMDIyora2lsjISHbt2oWzszOlpaXMnj1bvBcyMjIoLi6m\nsrKS3bt3ExgYyAsvvCAKjPoR6vPnz7N7927mzZvH3LlzSU1NZcuWLbS0tNDT08PNmzcFxOZBjt/F\n9uGf//znO1NTUwwNDdHY2Mj+/fvJzc1Fq9USFxfHvXv3+OGHH7h27Rq+vr6kpKQwNjYm1Gr67Lse\nbrpgwQL27t1LQkICOTk5LF++HDc3N3Fni4mJQaFQsHv3bpqamti0aRMTExPExsaiVCqJiooiKChI\n2Jdv3LghetB9fX2sX7+evr4+bG1t0Wq1otA5NjZGUFAQDz30EGvWrOHq1atCwnLp0iVGR0cJDg5m\n2rRpgsTj6+vL1NQUJ06cYNOmTVRWVnL9+nWWL18u+IPvvPOOAMmUl5dz6NAhQZ3S06gBZs6ciU6n\no7GxUdQvMjMzycnJwcvLi7y8PORyOTdv3iQ6OprAwEAMDAzYvHkza9asITk5WViTXnnlFVGk1Wq1\nJCQksHfvXvr7+ykuLhb5Cn19JjU1lb6+PhYtWsTQ0BD19fW0trbS1NSEl5eXIHJfuHCB5ORkKioq\nKCgooLW1FUNDQ27evCmwaGFhYVy5cgULCwsSEhLQarVMnz5dtCYXL17MxYsXiYqKYnJykoCAAPz8\n/FixYgUeHh6CQ9Dc3IxGoxFkZJ1OJ7ZCCoWCJUuWEBUVRXl5uQDF6msFetvYwoULCQsLo7u7G6VS\nybVr10hISODkyZPcunWLjRs3IpPJREv8r3/9K59//jmhoaGigxAZGcn4+Diurq6MjY1RVFREamoq\nfn5+REREcO/ePfbs2YOtrS15eXkUFhbS2NiIt7c3ISEhaDQa7O3tRTiptrZWvO5PP/00eXl5tLe3\ns2vXLjG2b2JiwujoKMbGxkRERHDlyhXUavX/nO2D3iUwd+5c0bKbOXMmCxYsoL+/n2vXrjFt2jR8\nfX0JCAjAxMRE5Br0qrDw8HByc3MF8QZgeHiYxMREDh8+TFdXl1heFRQUoNPpeOKJJ+jt7eWLL77g\n9OnT3Lp1i/7+fkZHR7GysuK9994jICCAVasCwlpKAAAgAElEQVRWMX36dF566SXgfqvT1NSUM2fO\n4OrqKgajZDIZMpmM/v5+6uvr6enpERgzPf+wqKhImIH1cxh6C5R+mzQ6OsrLL79Mc3MzN27cEGBV\nPfgkICAAMzMzRkdHMTMzo6WlhY6ODn7++Wc6Oztpbm4mKCiIiooKEhISePTRRwUEt6CgQABXL1y4\nwNGjR4H7gaw9e/bQ2tpKc3MzLi4uQko7OTlJeno6K1asYGpqCm9vbyIiItBqtQwNDeHp6cmpU6cY\nGBigvLycyclJ3nnnHdzc3PD09MTMzEysxvRkqpqaGt544w36+vqEAg/udwveffddtFotMTExTE5O\n0tLSIro3K1as4OWXXxY4Ov1K7cqVK1RUVAgcm0qlwtramuTkZOC+cUk/RqzH1h05coSioiI6Ozs5\ndOgQeXl5HDx4kIaGBsbHxzl27Bivv/46jz76KF1dXVRWVhIcHIypqSlPPfUUcN+7YWVlxcmTJykr\nKyMqKgofHx/s7e1JSkoSKrimpiYx7q93O7i5uXHt2jW+++47li1bhlwuFx2uWbNmERkZSW9vL8eO\nHUOlUnH16lUaGxsZGxujsbGRvr4+Dhw4QENDgwhleXl5oVKpxCSwRqNhdHSUV1555YHPx9/FSuH9\n999/x97enkuXLjFt2jRycnJYuHAh1dXV4qqnLwDqVwNFRUWcPHmS3t5evL29sbS0JDc3V4BaMjIy\nePnll7l+/TpmZmYCtKJHgMN9nLidnR06nU5Uo+/cuYOBgQGdnZ3CbnTq1CkcHByIjo7mp59+Yt26\ndZiYmFBSUsLo6KiYkPP39xcm687OTrEHXbduHRYWFkItPzg4SH9/P0uXLhXy3LS0NKampnB1dUWr\n1TJ37lyWL1+Ok5MTc+fOJTo6GgMDA3JyckRSUY8r12q1ImFnamrK1NSUCLk0NjYK4lRGRoZgAupF\nLA0NDWJoydnZmZCQEKqrq7lz5w4ajUZMnkqlUqysrJiYmMDd3R21Wi2kL998842wLenHpi9cuMDs\n2bMpKSnh7NmzJCQkMDQ0JE5CIyMj8vPzaWtrIygoCENDQzIyMnj44Yexs7MTvgOJRMLIyAh2dnak\npqZy9OhR/Pz8MDQ0FG3KHTt2UF9fz7Vr11AoFOh0OtLT08V7KTMzk8WLFwualp4pUV9fj0wmw9DQ\nkOXLl+Pg4MDAwABRUVGcP38etVrN448/zjfffIOtrS3Ozs5UVlYyNDSEoaEhhw8fFiPT+pVIdHQ0\nPj4+4ka0atUq6urqyM3NZfPmzURGRooQ1JkzZ5gzZw5ubm4MDAwQExPDd999R3V1NZs3bxbFc70k\n6ccff6Svr485c+ag0WjEzWjGjBnY2dnR1taGVCrl119/paWlhYSEBDZu3Mjp06exs7N74ELj7+Ki\n8NVXX72TkpKCi4sL58+fJyQkhLt377JgwQJ0Oh1/+tOfyMrKIjY2lrKyMoyMjKivr2d8fJzp06cz\nMTEhfq+34eiBKXK5nCVLlhAcHMzBgweB+7FqrVbLpUuXBEPR09MTIyMjBgcH0Wq1HD9+nJGREfz9\n/YUxSh/x/fDDD8UdXS9L8fX1pbGxkX379mFra0tGRgbXrl0jJiaGiYkJRkdHRUzV29sbMzMz3N3d\nKSgowMjIiJMnT7J9+3Zu3rzJsmXLRE5hbGyMqqoqbty4QXZ2NqmpqchkMpYtW4ZGoyE1NZUlS5ag\n1WrRarUEBQWh0+mYmpoiMzOTjo4O8bNUqVRcuXIFNzc34d10d3cnICBAZAlCQ0Pp7+9HIpEIK/X7\n77/PmjVrhFlbv5edMWMGAQEB+Pv7o1aruX79OjKZDCMjI2bPns3333+PSqUiPDycyclJDh48SElJ\nCUVFRSQkJGBkZCROSv3dNiAgAAcHB8LCwpBKpZw/f55Vq1aJNuuaNWuwsLAgPz+fzMxMVqxYwa1b\nt4iMjMTU1FS0fCUSCVFRUfT19XH+/HnWr1/P1atXaWtrE4StiooK2traSEhIEFuIwMBATp06xeLF\ni3FychK0LH1n5t/zHydPnuT111+nurqarq4uQkNDqamp4bnnnsPc3Jzy8nIyMzNpaWnhlVdeoaen\nhzfffJOWlhZmzJghtoPTp0/n0KFDTExMCOCPra0t2dnZBAQEsGDBArZt28bs2bOZmJhg9erV6HQ6\nqqqq6O3tRavVikBTSUkJwcHBwm2hR8x9++23dHR0/M/ZPnR2dlJZWYm/vz8ODg5YWlqyfv166urq\n6OzsxNvbGwsLC+bOnSuErNnZ2f9pP3337l16enoE6APut+2mpqZobm5mz549rFmzRhQDtVotS5Ys\nYenSpYyMjPDDDz+gVquJjY0V+7cXXngBHx8fent7qaysFLMP3d3dVFdXY2hoSFBQkCiABQUFkZyc\nTFpammAB6u1USqWSwcFBZDIZoaGhODs7Mz4+zvz584URWE/4uXr1qhjd1b/hLCwsRIdGJpOJDIaX\nlxcajUZUv7u6uqivr8fa2lqAZ15//XXMzMw4duwY8+fPF2IW/c9VH8zp7+8nPT0dExMTnnzySQEL\n8fHxIT09XQh69G+29vZ2du7cSW1tLWZmZnh7e7Np0yaqqqr47LPP6OvrY2hoSOjuR0dHuXXrFqtX\nr0Ymk6FWq1EqlbS3twuSUU9PD9evX8fCwoLh4WHWrl1LbW2t6DatWbOG9PR0LCwsCAoKoqCgQMhX\nkpOTkcvlREdHo1QqOXr0qCg0KpVK3nvvPfr6+tBoNNy8eRMrKyucnJwwMzMjNDSUnJwcvvvuO+H5\n0BvFjYyMePLJJ3F0dCQsLIyLFy+KkJHekOXl5UV4eDizZs3C1NSUgYEBEYSbMWMGfX19ZGdnEx8f\nj6enJ9bW1oSEhAjIqpeXFwYGBixevBhAXAiUSiUhISGMj4+Ln9G+ffsYGxsjOTmZ7u5u1q5dS09P\nDwMDA9ja2jIyMsLzzz+PiYkJlZWVTExMkJKS8sDn4+/ioqB3N+zatYuIiAguXrxIaWkpubm5qFQq\nJBKJYODb2Nig0+mQy+UClKHT6VixYgWenp7ExMRQV1cHwMMPP0xERARTU1OEhYUxPDzMrFmzcHR0\nZOnSpYIW5OTkxOLFi4UwdtWqVWzYsEHcPZVKJatXrxZRUb2s09fXF2NjY+Em7OjowM7OjlmzZmFl\nZYWlpSVGRkb4+/tTU1ODjY0NhYWFjI6OClrzmTNnRPru0qVLyOVykpOTWb16NZ6engQEBLBw4UI2\nb94sWpV79+5lYGCAtLQ0zp07R1FREVNTU1y5ckX83yorK2lvb8fW1pbc3FzKy8vFigIgMjJSsBZH\nRkYYHx8nPz+fmpoa8bPq6+vjwoULREdH4+npKSYmJRIJy5cvp7a2Fi8vL2bOnIm5uTmbN28mIyOD\nXbt2UVJSwosvvoiPj49oh4aFhZGUlERYWBhmZmaEhIRQUVEh8htwvyXp4+ODVqtlYmKCvr4+BgcH\n2bJlC3K5nC+//FLgxaZNm8a5c+dEcKq3txdjY2MaGhrYvXs34+PjbNu2DYDs7Gx27dpFSEiIoFUN\nDg4yc+ZM6urqcHBwYGRkhIGBAXx8fERNSF8gdHd3x93dnf7+fjZv3ixmNTZv3iwoTlqtlj179vC3\nv/2N2NhYrK2tmTVrlmAnvvXWW9jb24uC5fbt2wkICBBdKv1wGdy/OGZnZ1NbW8vu3buprKykuLiY\n9PR02traKC0tpaenh0WLFjE1NcXo6KhYOcTHx3Pu3Dm6uroYGxtjdHRUYOke5PhdjE63t7djbm6O\ni4sLUqmU77//noKCAgoKCpBIJDg4ODBjxgwsLCyEK8Df35+jR4/y3HPPodVq+fzzzzE2NubVV18V\nI6I1NTVij/3xxx/T3NwsvIdlZWU4ODgwbdo0bt68SUREhLAS5+fnMzAwwKpVq1i5ciW3b9+mqKiI\n8PBw4P+d6hwaGsLMzEyIVfTbCz0ZuaqqCldXV1HXKC8vF3dMffusvr5eIN5nzpxJYWGh0NXpQbOx\nsbEifGRkZCQkN5999hlWVlaYmZkRGxuLVqulq6tLYORUKhXFxcWoVCpGRkYoLCxkwYIF2NnZUV5e\nTnZ2Nt7e3syYMYOgoCA6Ojr49ttvuXjxIiEhIWL1IJVK8ff3JyoqitbWViorK0VrMykpiYmJCWbM\nmCF8kps2bcLGxoarV6+yfPlyqqurKS4uFvblwcFBWltbMTIyErZsvZtRq9ViYmKCp6cn77zzDnZ2\ndkRHRwvt/K5du/Dz88PCwkKAT1NTUwkPDyctLY2vvvqKbdu2kZKSIgjTcD+aLpVKRUK0p6cHCwsL\ncUHVR+29vLy4cOECnp6eYptgbW3NvXv3MDQ0pKysjN7eXjo6OoD7RCdbW1smJibIzMwkODhYJFW9\nvLy4d++eEBTpW7UKhYJ58+aRlZVFU1MTH374IXK5nMWLF4s0rkKhoKenB3Nzc/EeGhgYICkpiZ07\nd2JnZ8fw8DBffPEFNTU12NvbExQURGBgILm5ufj4+ODs7MydO3fEiP6DHr+LlYIexmlra8sXX3zB\n5cuXmZqaIjo6mgsXLqBWq4mJieHXX3/F1dVVRKC3bdtGZ2cnAwMDPPLIIwQEBFBVVSUEs/p5ira2\nNhYuXIhSqSQmJkYUivSVWU9PT+RyOZaWlty+fRsrKyuioqKwt7fnxo0bwouodwjoY8NOTk6Ym5vT\n2NjIxYsXKSoqEpKXyclJURDTJ9D09GEPDw8qKioEKs7Pzw9ARJn19CGlUilGxvv6+ti+fTuFhYW8\n++67bNy4kcDAQKRSKWFhYfj6+tLf34+TkxMNDQ00NTVRWlqKTCZDIpEIL0VbWxsymUz05v38/LCx\nsUGj0SCXy7G1tRVFtB9//JHS0lKmT5+On58fBQUFosK+ZcsWCgoKyMzMZObMmXh5eWFlZcXk5CTr\n169neHiY3t5eysvL6enpoaysjOnTp7N161bUajWJiYmEhYWh1WpxcXERTg2JRMLAwADHjx/n6aef\nxtHREX9/f2QyGYWFhVhaWtLV1SXmNxYvXszKlStFOGft2rUMDAwQEBBAZ2cn6enpAKKluGLFChoa\nGsQkromJCRkZGXh6ehIdHc0jjzyCmZkZQUFBWFlZIZfLReJ1aGiItrY2YcEChAi3p6eHnp4edu/e\nTWZmJtevX+fKlSvs378fAwMDIYctLCzk2rVrmJmZsW7dOpycnNi6dSsbNmygv79f5B88PDxELLqu\nro6BgQHa2tqoqqpicHAQuVyOmZkZY2NjgnLV19fHm2++KWLiUqkUHx8fzp8/L967D3I8iIreRSKR\nXJVIJKUSiaREIpG88NvzNhKJ5KJEIqn67Vfrf/ucNyQSSbVEIqmQSCTz/7vvYWRkxNDQEC4uLrz6\n6qtUVlZSUVFBUFAQZmZmJCQkkJubK+QWlpaWvPbaayiVSmxtbcWAVG9vrxCvAKKo1d/fz/Hjx5HJ\nZCI49Mknn5Cbm0txcTFOTk6CNvzwww/j7OxMZGQkfX19xMbGsmrVKmH00b9gxsbGtLe309bWRkBA\nAFZWVhQWFhIWFkZsbCwKhYK6ujq0Wi3Tpk1j3rx5REdHY2FhQUVFBe7u7uLfqB/JzszMRKFQUFxc\nTFBQEMHBwURFRfHee+8RGxsrMOP19fVs3LiRsLAwXnzxRRFwevTRR4WKLigoiCeffFKM+fr7+1NV\nVSWmJ8PDwwVduaKigoMHD3Lt2jWGh4fZsWMHJSUlYrpPqVRy6NAhMX7b3NyMWq1GLpcjkUjo6OgQ\nkeO6ujoKCgo4fvw44+PjlJSUMG/ePN5//32CgoKYPXs2W7ZsEQNS1tbWhIWFcfXqVfF+0HM2XV1d\nxXYvJyeHsrIyVCoVKSkpqFQqIiIixJjy8PAw9+7dw8rKitmzZ5OXl0d2drbYo+u9D08++SQtLS3s\n379fRJQfeughysrKmJiYQCKR4OPjg6urq2BCenp6snDhQoaHhxkZGeHMmTNMTEwA92cqoqKixNYR\n7suCFixYQE9Pj7jo9fX1cebMGVpbWzE2NsbNzY3h4WE8PT3p7+8Xcxk//PADcD8g5+Pjg6mpKfb2\n9qJIHRMTg7GxMXFxccJe1dLSQkxMDPX19VhZWYmBq9u3b4vCtn7L+yDHg2wfJoBXdDpdvkQisQDu\nSCSSi8BjwGWdTveRRCL5E/An4I8SiSQQWAcEAU7AJYlE4qvT6f6vlIfBwUGam5u5e/cub775Jp2d\nnSKVZmRkhLW1NTqdjrt37/L4448TFRXFwYMHcXd3Z3h4mO7ubuLj43F2dqaiooKNGzeyd+9eLC0t\naWpqIjAwEIVCQVFREW+//Tb37t0jIyOD119/XdwFvb29MTExYWhoiLy8PEZHR+no6BB72qioKHHy\nDg0NcefOHXp6enj++edpaWnB2tqaFStWiM7IjRs3yM3NJSgoCA8PD4yMjHBzc0OpVLJ3714xrm1v\nby+Aq1ZWVmi1WmbPnk1mZib+/v7odDo0Gg2mpqbY2dmxbNkyIiMjyczMFLQnhUJBaWkpGRkZRERE\nUF5eTmVlJXl5efj7+wvFvEajYfr06dy9e5eqqipxMjQ3N9Pa2sqyZcvw8fGho6ODzs5OqqqqeO65\n5+jq6mL69Ol0dnZiYWEheu6pqanCjDRt2jT27dtHTU0Nvr6+LF++HK1Wy+DgICUlJezbtw8LCws2\nbtxIfX09M2bM4J///CePPPIIv/76K3PmzAHu9+evXr1KVlaWSDGmpaURHx/P+vXrsbW1paioiODg\nYLq6uujs7GTHjh1YWVnx+OOPiyW+oaEhMTExnD59GoB169Zx9OhRfH19xWpq+/btbNmyRdxQwsPD\n6erqIioqSshmTU1NuXbtmiBIzZ8/X2zJAFavXk1ZWZnoPsybN4/Dhw+zYMECQkND8fb2pqGhQdCZ\nent7aW5uxsnJSbAwPvjgA5KTk4XDEu63lk1MTIQRqrW1ldjYWAYHB4WvoqKiAjc3NyGigfsTm/rW\nsEKhoKOjg/b2dubNm8fevXsf6KLw364UdDpdq06ny//t40GgDFABy4F9v/21fcCK3z5eDvys0+lG\ndTpdHVANRP9X30MqlaJSqQgJCRHJt0uXLnHq1CkOHjzIqlWrsLS05JNPPmH+/PmUlJQIrXhcXBwz\nZszgxIkTTJs2jbi4OJFLHxoawsDAACMjI8bGxoiPj+fFF18kPT0dJycn3n33XWbOnEl7ezvHjh0j\nKSmJgYEB/Pz8aGxsJDExEQ8PDyIjIyksLBR3B7VajbW1NUuXLuXAgQOcOHGC8vJy7t27R0NDAwUF\nBfT39xMYGCgwbLa2tpSVlVFTU0NdXR0uLi7CA1hSUgIgtiRarZaysjJqa2s5efIk7e3t/PTTT1y6\ndInS0lK6u7uJi4sToBgzMzMeeughZs2aRXFxMYWFhaSkpBAcHIynpyeff/45lZWVyOVy/vWvf5GZ\nmYm1tTVpaWlcv34dAwMD3N3dcXZ2pq+vj8TERKKiokQnqKurC6VSyejoqFj2L1q0SLz5HBwc2L17\nNzY2NsTGxnLjxg3gPgthxYoVFBQUoFQqWbp0qdCYnTlzhtTUVIyNjVm4cKEgA925c4eRkRHhu9C7\nKPPy8vjXv/5FSUmJICCZm5sLz+XatWvZs2cP5eXlzJkzR7gRkpKSAAS1KzIyEktLS5KSknjttddE\nrsHX11ewF/Q5meHhYc6fP091dTX5+fnI5XJaW1uFFAfuT3Xq+RZ///vfiY+Pp6ysjNbWVvGebG1t\nFWwHPULuzJkz2Nvb88UXX7B27Vpu3bqFv7+/mH3Qg1T0JnJHR0fRatZoNFRVVeHj44NCoaCsrEy4\nUtRqNbNnz0aj0TA1NUV3dzfLli0Tq9wHOf4/1RQkEok7EA7cBux1Ol3rb3/UBtj/9rEK+PcNTNNv\nz/2fX+tJiUSSJ5FI8nQ6Hf/85z+xtbUVssy7d+9iZGTEs88+y65du8Q8vLW1NevXr8fZ2Zn09HRa\nW1spLCwU3H996Ajuj7V+9913YtDF09OTkJAQ5syZg5+fH6WlpSKHsGrVKnbs2EFtbS0tLS04ODgI\niq+RkRHDw8PIZDIAysvLkUqljI6OUl9fLyxSc+fOFQQniUSCo6MjoaGh9PT0sGvXLmG+mjVrFhqN\nhq6uLuLi4sRJZGNjI2S54+PjInl57NgxpqamaG1t5dKlS/j4+NDT04NEImH69OlUVlZy4cIFxsfH\nBTHp/PnzyOVy8vLyUKlUxMfHizaqiYkJlpaW1NfXC6+CvtXm6+tLWVkZnZ2dxMbGil/106a3b98m\nOzubc+fOCf1dTU0NTzzxBCMjI+Tn5/Phhx+iVCrJyMhgZGSEkZERPDw8uHHjBqWlpf+pbWhvby98\ninC/6h4VFYVOp6OhoYHm5mYWLVrE5cuXGRkZEbKfwsJCmpqasLe3Jzo6WhQcZ82axVdffcW0adM4\ncuSIyPw3Nzcza9Ys4dAoLCwUHa5bt25hY2NDQECAQNI3NTXR0dFBTEyMeE+qVCoh1NF/3ezsbP7y\nl79w7do17O3tqaurY/PmzcLKpI+yu7i4CPS9nvjU2dlJXl6eyMRUVlaKEeeDBw/y6aef8vPPP9Pb\n20tTUxPXrl2jt7eXS5cuUVZWRn19PZWVlcTHx9Pd3U1bWxttbW1cv36d2NhY4bs8cOCAaHs/0KHT\n6R7oAZgDd4BVv/2+7//4897ffv0S2PBvz38PrP5vvrbufx//+/jfx//vj7wHOdcfqCUpkUikwFHg\nR51Od+y3p9slEomjTqdrlUgkjkDHb883Ay7/9unOvz33fz08PDx4/vnnhSpcPw6qLx6tXLmSyspK\nXF1duXfvHra2tjQ3NwsBbFhYGPb29qSlpQmt1quvvsqf//xnqqqq8Pf3x8LCQuyvVq5cyUMPPcRX\nX33FgQMHmDlzJnZ2dnR3d2NtbU14eLgYNtKvOuLj4xkYGGDDhg38+uuv2NnZ0dLSwoULF8Qq44cf\nfiAnJ4fKykry8/OZP3++uOvdvn0bQ0NDzp49y9q1a3nrrbc4cOAA586dIyAggEceeYRPP/1UhJPs\n7OyYmJggOztbZAlMTEwwMzOjv78frVaLo6MjIyMjTE5OEhwcjFqtFv16hUJBdXU1GzZsEB6Aqqoq\nFi1axMDAAI2NjQJ86+npyV//+ldWr17NtWvXCA4OprS0lBUrVjA+Pk5dXR2VlZWkpqbyyy+/IJPJ\nCAoKIj09Xch77927x+zZsxkeHhY0aT1ZSJ9YfPrpp2loaOD5559nyZIlYlT7oYceYmJigrfffptt\n27ZhYmLC7NmzycjIYN68eUxNTZGWlkZERAQODg6CymVra0taWhpz5sxh0aJFfP/99wQEBLBhwwa+\n/fZbMfa+c+dOfv31V3JyckRR+Pbt28TGxtLV1YWPjw+vvPIKmzZtwsnJiZiYGI4fP05NTQ3z588X\nCcTm5mZ6e3u5fPkyM2fOZNu2bfT09FBfX8/BgwdxdHQUtqeWlha2bt3KxYsX0Wq1vPPOOxQVFWFk\nZISxsTHZ2dmiZtXf34+bmxu+vr6o1WreeOMNfvjhBy5fvkxMTAxRUVEUFRVhYWFBc3Mzg4ODODs7\no1arxarz2LFjvPLKK5SXl+Pk5ERzczNmZmbk5eUJXuO8efMe5HR/oO6DhPt3+zKdTrfj3/7oJLD5\nt483Ayf+7fl1EolEJpFIPAAfIOe/+h4jIyMoFAqsra1xdXVl5syZlJaW4uPjI/bdxcXFtLe3s3Xr\nVhoaGnByckKlUgmcd2FhoejN6gt3Wq2WqKgopk2bRl9fHz09PTg6OnL06FHef/99fvnlF1xcXDAx\nMaGtrU0Ua6ysrHB3d+eDDz5gxYoVlJSU8B//8R+cP38euF9dzsnJobq6Gj8/P5HT7+zsFFSgxx9/\nHAMDA9EKsrW1Zc6cOTz77LMcPnyY48ePMzY2hq2trXBJ6qUz+sh0X18f8fHxBAUFMWvWLOGeXLp0\nKUlJSTQ0NKBQKFiwYAF3794V5uahoSF6e3tJTk4WIBNbW1tiY2PJyMhArVZjbm7OokWLOHXqFAUF\nBSQmJtLQ0MD8+fOZmJjAysqKPXv2CA+ihYUFNTU1REREIJVKyc/Px9/fn8bGRg4fPkxTUxPbt28X\nS/I7d+4IvkFvby/9/f189NFHXL9+nfXr1zNz5kz8/f3ZuXMnCoVCDEQlJCSIUWt/f3+uXr3KqVOn\nSElJwcnJifPnz+Pn58ezzz7Ln/70J/7yl78QHh7Ozp07Bafz008/RS6X4+fnJ5J8emt5SUmJgOJc\nuHABExMTgbsbGBjAzc2NN998E4VCgZ2dHSdPnmR8fJzvv/9esB1XrFghEHp6B2pcXJzgT1ZVVTE5\nOcnExAQtLS3MmzePI0eOMDo6yv79+9FoNMKSFR8fz5IlSzAyMiIjI0NIWywsLHj11VdF1mFiYgJv\nb28MDAyYM2cOvr6+mJqaYmtri4mJCe+9957QBWg0GoqLi8nPz6eoqEgUyx/0eJCaQjywEZgrkUgK\nf3ssAj4CUiUSSRWQ8tvv0el0JcARoBQ4Bzz7X3Ue9IceQOnu7s65c+dEC0Uf+tm4cSO3bt3ip59+\n4rPPPuPtt9/m008/xc7OjsjISDo6OhgbG6OtrU30vPVFprt37yKXyzE0NESr1eLk5MSJEydwdnZm\nYGCA9PR0UZnW482HhoZ4//33ycrKIiIiQrSG4D770cXFBUNDQ5RKJRKJhNTUVExMTIiMjKSoqAgf\nHx/Wr18vfJLOzs4cO3YMY2NjHnvsMT7++GMMDAwwMzMTFefJyUnRfx8eHiYyMpKpqSkKCgqorq4W\nYl2dTsfAwICYlmtubmb27NkCpWZsbCxEpUePHhW8Q/249tDQEHK5nDt37ggytFQqJSgoiK6uLkJC\nQgS0Vi6XExsbK5gG7u7ueHl58dFHH2FsbIxKpeKxxx6joKAAc3NzVCqVCGfp3RCjo6N0d3eTnJws\nQjY3b97Ey8uL2tpagoKCRKJRoVAQGA76cGQAACAASURBVBhIY2Mj/v7+vPjii6xbt05wKZYsWYJS\nqeTw4cPs2LGD27dvU1tbyyOPPMJnn30m5kUsLS2xsrLi+vXrACLyHRsbS3BwsHAx6D2ay5YtIyEh\ngZKSEh599FF8fX2ZP38+CxcuZNq0aSQlJeHm5oZcLkepVIrJ1tjYWM6cOcPIyIj4vIcffpju7m6+\n+eYbPDw8yM7OBqCgoAA7Ozv27t3L+fPn6e7uZnx8HLVaTV9fHzNnzhSp2ebmZvbt20dvby9mZmY0\nNTVRUFBAQ0MD33//vVhBDg8PC5SdTCbjb3/7m/CVzp07ly1btggy14MeD9J9uKHT6SQ6nS5Up9OF\n/fY4o9PpunU6XbJOp/PR6XQpOp2u598+5wOdTuel0+n8dDrd2f/ue8jlcry8vEhJSUEqldLR0cHR\no0f55ptv6Orqoqamhl9++YWoqCjefvtt9Oi2zz//XPD7ra2t6enpEXd7gIsXL+Lo6ChCRr6+voyP\nj9PT00NQUBByuRxfX19WrVpFYGAg3d3dKBQKjh8/ztGjR9mzZ4+w+n722WccPnwYgJMnT4qkZHl5\nOc8++6xI4SmVSlJSUjhw4IAYt/311185d+4cNTU1NDY2olarSUlJEbl3/Qx/c3MzXl5e9PX1ERkZ\nSXd3t9gqGBgY4O3tzdKlS4mOjsbNzU0kIGtraxkcHGRiYoJ58+aJ1UdxcTFr166loqICtVpNTU0N\nW7ZsobW1FX9/f2H1Li4upre3F51Oh7u7O2NjY+h0OnJzc4mIiECj0SCTyaioqBA4t+PHj/PJJ5/Q\n2trKW2+9xd///neqq6v58ccfefbZZ5kzZw5/+9vfGBwcxN3dnW3btnH37l1Onz5Nenq6wNeHh4fT\n0NDAhg0bALh+/booKqalpVFYWMiRI0cEzm14eFiQmsvLywkODhasyKtXr4rvFRYWxq1bt0SST61W\nk5eXh4uLC35+fvj7+9Pf38+mTZvw9fXFzs4OY2NjbGxsSEhIYPr06VhZWYnXYnBwEE9PT+zt7Xnr\nrbdENF2hUGBpaYlarebLL7/kyJEjDA8Ps2XLFhITE1mzZg2RkZGUl5dTVlZGQkICq1evZnh4GG9v\nb1pbW2lraxPaef2shqWlJSqVShjIN2zYgLm5uWB9BgcHo1AoaGhooKSkhKamJs6ePcu8efP4/PPP\nycnJQaFQcOfOHRF4etDjdxFzHh8fp7a2lvHxcSIiIvD19RUpRz2x9/Lly+h0OtasWUNqaiouLi74\n+vpiZWWFRqNBqVTS0tKCk5OT8PypVCoyMjJYt24dBQUFLFiwQPAcDQwMcHV1xcfHh5KSEqysrPD3\n9yc0NBRra2tWrlxJdXU1IyMjfPLJJzz99NMkJCTw008/4ejoKOYkFixYwLfffiumFvPy8vD29sbY\n2JiLFy+iVqtxc3Nj48aNfPDBBzQ3NxMVFcXo6Cjt7e1Mnz5dvGByuZyxsTGcnJw4c+YM1tbWNDU1\niRSeXhyr5wRKpVLeeOMNWlpaMDIywt3dHVNTU2QyGY6OjiQlJQlRjqWlJQqFQvAC09LSqKmpEeEd\nb29v6urqhDzGwcGBxMREhoeH6ejowNjYmMrKSsLCwvD09OSHH36gqKiIzZs389BDD3Hs2DF27dol\nRoTfe+89wSrUT2xu3bqVQ4cOER4eTlRUFJ2dndjb2yOTycSFUSqVCqLW0aNHRauup6eH9PR09u/f\nz9dff42JiQkxMTH4+flx/fp16uvrRVXfwMCArq4unJycRHxaL6zJz8+nqamJtWvXEhcXx9jYGNnZ\n2ZibmzM5OUlSUpIYpLtx44Ywlnl6evKPf/yDwcFBIiIiuHDhAnB//D4wMFDEs52cnGhsbGTGjBmM\njY2J6d2VK1dy5MgRjIyMsLe3F9vcmJgYrKysiIiI4IsvviA1NRW43+ocHR1FoVBw8+ZNZs2aJbIQ\n69evp7q6muDgYDIzM9FoNAwPD7N06VKysrJ4/vnnaWpqIisri8HBQRFhf9DjdxFzlslkWFtbi5PA\n2NiYOXPmYG1tLcZVbWxsMDU15YUXXmD69On4+vry9NNP84c//EFIVfXQUb3MdGRkhLVr14rYZ35+\nPpcuXaKrqwtDQ0O8vLwoLy9n7969XLt2jZ9++ok333wTFxcXjIyM8PDwQCqVEhcXx927dwUuXCKR\nCNDJn//8Z+GZ0MtWjx8/jp2dnVDdhYeHc/78eTZu3MgTTzxBWVkZxsbGIiKtTzcaGxszMDBAQUEB\nq1atwtraWijcTUxMhJPgvffe44svvkClUgkBzpUrV4T0Nioqilu3bqHRaDh06BBubm6kpKTQ2dnJ\nkSNH6O3tFaszqVTK4OAg3333HR0dHSiVSrZu3YqHhwdnzpzh66+/RqPRkJycLHrimZmZpKSk8NVX\nX/H0008zNjZGe3s7H330EWVlZQwODrJv3z5OnjzJypUraWtrY9q0aVRUVIhtyKJFi7C2tuaHH36g\np6dHINPt7e2xtbXlypUrxMfHI5VKqa2txc7OjgULFnDgwAE8PDywtbXl+vXrgmKtVqtxcHCgtrYW\nT09PHnvsMRobG8XErJ6xoZe+nj9/nsLCQqF3169IP/nkE27dusWFCxdwdHTk3LlzNDc3c+3aNbq6\nutDpdPT29opahd60rdVquX37Njk5OYLRuW/fPiQSCfv27ePAgQNcv36defPm8dprr7Fo0SJMTEwo\nLi7m5MmT7Nu3T4x8w/2Lo/7msnjxYvLy8hgaGuKJJ57A1tYWMzMzLCwsiIuLQ6fTUVRUhIGBAebm\n5jzzzDOiZTo5OUlERIQA/j7I8bu4KAwMDKBWq0W8WL88VKlU1NbWUl9fT1FRkcChd3d309fXh1wu\nR6FQ0Nrailqt5sMPPxSBEbgvV7G0tGTfvn34+vpy4MABmpqaOHz4sJhSHB4eJiUlReC3AgMD+frr\nr4XFSV/IycvLExbpwsJCvvzyS2JiYnjkkUfQaDQMDQ2xYcMG+vr6uHjxIn19fQIGOjExIdDfFhYW\nhISEUFJSImAm+qRkZGSkGNrRh6Wsra0ZGhri9OnTonc+NjYmmAeGhoYYGhri5+fH+Pg4ISEhXLly\nhfXr11NRUYG1tbUQrzg6OmJkZISBgYHAqC1YsEAM+1RVVfHNN99QXV0timaRkZFi0tLU1BSNRkNi\nYiLx8fGkpKSg1WqZmppi69atIgb86KOPijv5/v37yczMRCaTUVpayo8//ohOp+PatWtoNBpcXFxw\ncXERe974+HhaW1vx8vLixIkTzJ49m5s3bwpOwtDQEMuXL6e/v5+AgAAmJycFvl1fXHv55ZcpLS3l\nzp07ooCpf68MDw+LesXo6CjNzc1IpVIaGhpE/sHCwkJkYvTJUDs7OyIiInB1dSUkJARXV1fgvmBn\nfHyclpYWgVezsLCgoaEBT09Prl69ikwmIzc3l97eXvbt28fq1aspLi4Wr0FRURHFxcXIZDJBDWts\nbGRwcJCOjg5aW1vRaDSkpKQQEhKCXC7H1dUVDw8Puru7qaqqEgAegEWLFuHl5YWFhQVKpZKLFy+y\nYMGCBz4ffxeQlV27dr3z4osvcvbsWSQSCSdPnkSj0Ygl1uXLlwUrITY2FgcHB8bHxykrKyM5ORlL\nS0uef/55oqKiBPno+vXrxMTECJ+kQqFALpezZs0axsfHxeiroaEhRUVF+Pv7s2jRIiYnJ5mcnMTY\n2JisrCzu3btHTEwMoaGhgrU4d+5ctm3bxvfff4+/vz9SqZSMjAxR6XVzc0OhUGBlZSXmDiIiIlAq\nlRgbG1NcXExdXZ0YwnF0dGTnzp3Y2NgwMTFBTEwMP//8M0lJSTQ3N+Po6Mj4+DguLi6Mjo5iamqK\nubk5Y2NjODo6cvbsWTw8PGhqaqK+vl6c/HpUm4+PD5cuXaK7uxudTodKpSI7O1tg44yNjQV6LSkp\nierqaiwtLRkYGCA0NFQMauXn5wsV+9TUFD09PSQlJaFSqTh27BjR0dEsW7YMW1tbBgcHyczMZGRk\nBFtbWxYsWMD58+cZHx8XS9lff/0VLy8vysrKcHV15ccff2Tjxo2YmJjQ2no/F3fq1Ckhdl24cCHN\nzc1otVpRoPT09OT06dMUFBTQ3t7OjBkziI6OJiQkhBUrVpCbm0tBQQFbtmyhtrYWpVKJh4cHpaWl\ngqPh4OBAdXU1k5OTeHl5ifSmUqnEx8cHMzMzMZSnXwm0trZy9+5dIiIiSEpK4urVqyLpOD4+jkql\noqWlhdraWtFdeemll8TfKSkpEdukoqIi4ceIiYnh1KlTLFu2TBCy1Go1IyMj1NTUMDw8TElJCSEh\nIfT19YnJSW9vb2H/Sk1NRS6Xk56ezsaNG/H19SUjI4PLly//zyEv7dy5852RkRH8/Py4fPky9vb2\nNDY2otVqycrKwtDQEHNzcyQSCZOTk5w7d468vDzmz59PR0cHcrkcGxsbNm3aRGJiIj09PZw7d47N\nmzeTm5vLpk2bBOtfpVKJGQR9z7yiogKlUklVVZUYm9V/f/0kX3t7O0NDQ+Tk5PDpp59SWlpKamoq\nDQ0NpKenY2dnh42NDba2tqhUKlavXo21tbWIbVdXV3P58mWysrLo7e1l4cKFfPnll0gkEnQ6HT/9\n9BOxsbFERERw5swZXnjhBdRqNf7+/gIrbm9vj5WVFUqlEgsLC3p6epicnKSjowMLCwvs7e2JjIzk\nzp07dHZ2YmpqyuTkJFKpVCDmZDIZdXV1PPbYYxgbGyOVSikpKRHDShMTEyILotFoBHlZP+1pbW0t\nuBDu7u4cP36c9PR0Zs6cSUVFhZhgnD59OuHh4QQEBBAdHS1AMREREeTn5zM5OSmoWa+++iqvvfYa\nlZWVxMXFiTpSaWkp7777LjKZTMBzo6Ki+Mc//oFKpWLGjBlcvnwZjUbD+vXr+ctf/sK9e/eoqanh\n1q1bIqOwfft2nJ2dcXNzo7u7m9OnT+Pr60t3dzeGhoY0NTURHR3N0qVLSU5Opqamhry8POD+1tbZ\n2ZmwsDCxkrx9+7ZI3drY2GBhYYGbmxsODg4iVhwSEoKLiwtbtmwR3kdDQ0PBhTAxMflPK129LUuP\n1ktJSeHcuXMCG6cfTIuPjycpKQlDQ0PGxsbo6OgQrelHH32UgYEBkYq0s7MjKChIWKt+/vnn/znk\npYmJCbRarbATq1Qq/Pz8CA8P56mnnmJychI7OzshBW1qaqK8vByJRIKzszMqlYo1a9Zw+/ZttFqt\nQKbr72T19fXk5ORgZmbG5OSkqGwfOHCAyclJYmNjSUhIYHh4mFdffRWNRoOTkxN+fn6ixTRt2jTR\n5mttbaWhoYEdO3aQlpbGkSNHuHz5Ms3NzdjY2CCXy8nJySErK4t169YhlUq5d+8eeXl5dHR04ODg\nQH5+Plu2bKGzs5OMjAwAccIMDQ1RXFws9ov63EV7ezsKhULcbZuammhra8PGxoaamho8PT1pbm5G\nLpczZ84cPDw8sLS0xMfHRxQ8JyYmCAwMFNN6BgYG+Pr6IpPJqK2tpaqqis8//5wLFy5w+PBhcXHM\nzs4WU54WFhY899xz/PLLL7i6urJnzx6OHz/Oiy++SEFBAadPn8bCwoLg4GAaGhpEzPull14S2nv9\n9qGvr4+ff/5ZuCONjIxER8bS0hIvLy8ByjEyMqK6uhp7e3s8PDzYs2cPoaGhJCcns2LFClpbW5kx\nY4ZoPcbHx4uCoIWFBVKplKqqKmxtbbl58yampqZUV1djYmLCyZMnuXfvnhhA0ncGbG1tOXHiBNu3\nb6e1tZWTJ0+SkJAg9ugBAQEAFBUViZbtwYMHeeutt6irqxPTnWNjY0KNV1NTQ1dXF4mJiYyOjrJ5\n82Yx56FndgwODvLGG28QHh6Ovb098+bNIzIykm+++YYdO3Zw8OBBLly4INwPo6OjQuE3NTUlZnP0\nKzZ9y/dBjt/FRcHS0pLg4GCxB1epVFy/fh2pVEp/fz/t7e10d3fT0dFBVlYWAwMD/D/svWd0lOe9\nrn+NGiojjdpIGvXeC0IFFRBCVNGLqQ42YNxjJ/a2kzixHZes2HG8nbhsjE2A2NhggxFgREcChCRQ\nRxLqXRqVURmNetecD8r7nOR82Ju9/met4/Nf5/0CCFSQ5n3e5/nd933dbm5uODk50drayu3bt6mt\nrWXJkiWo1WpCQkIAuHbtGkqlku+++46ZmRmKiopESw/MYbx37dpFWFgY2dnZ9PT0MDAwQFRUlHDc\nzczMcOzYMQwMDET5Z1tbmwjjPP/887z66qvExsby2muv4ezszOzsLHq9nscff1woC7OzsxQVFeHs\n7IxMJkOn07F48WJxrIA5s1VYWBjOzs74+PgwOjpKYmIiMpmMlpYWDA0N+fbbb1Gr1Tg4OODo6CiA\nqu7u7piamgpq9MGDB7l69Srbtm2jqKgIhUJBbGwsa9euZWRkRISuXF1diYmJITIyUtCs9Ho933//\nvehUlLwLrq6uJCQkYGZmJmrfzMzMSEtLY+3atXz00Ud4eXkJleP+/fvI5XLGxsZwcXFhw4YNZGZm\nUldXx+DgIDqdjpqaGqampoQkWVlZyfnz5ykoKGDfvn10dXVx7949pqenMTc3Z/Hixbz66qvCqCQ1\nLEs9l8bGxgQEBLBt2zaam5tF2Gz16tX4+/uzePFioqOjsbGxYc2aNXzwwQeCHK7X67GyshKmuamp\nKT777DOOHTuGXC6ns7NTGLukAh1JUtXr9dy6dQsDAwMmJycJCAggNjYWjUbD119/jaWlJb29vVy6\ndInnnnuO4eFhWlpamD9/PmVlZTz++OPk5OQIf05wcLA4OkszMgnHVlhYSFZWligjMjQ05NSpU/j7\n+2NlZcUzzzwjKNN2dnail/Jhr5/E8eHDDz98y9XVFQMDA0JDQ4mKisLCwoKRkRHKysqEomBvb8/a\ntWtJSUlh/vz5TE5OUlZWhqOjIyqVisLCQsbHx1Gr1dy5c4f333+fyspKhoeH2bFjB9nZ2XR2dgq7\naWNjIxMTE8THxzMyMoK/vz/BwcHIZDIqKiowMjLC0NCQ/v5+nJycaG5uJjc3Fw8PD1avXk1BQQHx\n8fFYW1uzbds2cdyQvBKSti/Rfl555RXxNUpdCPfv32fBggWcPHkSd3d3xsbG6O7uJjU1lejoaDo7\nO2loaMDPz4+hoSEyMzNxdXXFy8tLFNYsXbpUTNHHxsZobm7G0dERnU4nItdS2Wl9fb2Q6qSGrYaG\nBjw9PZmamiI/P190RqpUKlpbW0VZi5GREfn5+bS0tNDc3Cxasv38/Hjw4AGTk5OiqLWnpwdTU1PR\nULVy5UqxCJ84cYLHH3+csLAwrly5go2NDeXl5WRlZXHgwAFcXV3JyMgQZTkSM0OqkL958ybnz58X\ndXzFxcUi3Obr6yvapEpKSujv76eqqoro6GjUarVIWfr5+Qm3pTToHh0dpaurCyMjI0Hefv7554mI\niMDb2xtXV1eSk5N55513cHBwID09naioKHp7ewXav6ioCG9vbzw9PXF2dubChQvCA7F//36USqWg\niatUKpRKJUqlkm+++YaysjIR1d64cSOFhYWi+8LQ0BATExP6+vrw8vJi8eLFGBkZ4e/vj6+vL6Oj\noyxdupRdu3YxOzvLjRs3yM/P59ixY/T19bFmzRqOHz/+f8/xQeoDlKq8pfmCRKmNj49HoVBgZmYm\ntuLSUSEhIYH4+HjxvhERERgaGgJzhpXJyUmSk5P593//d4aHhykpKWHr1q1s3LhRMA+Li4uprq6m\nvb2dmzdvcu7cOWGcMTY2RqVSodPp6O3tBeasuHfv3uWFF17g1VdfJTAwkPb2diIiIsSuZXJykhMn\nTghadEBAADdv3mTPnj2kpqZiZmZGVVUViYmJ4mlmYmKCQqEgODiY0dFR7ty5Q1VVFbm5ueTk5FBT\nU0Nubq5I7ykUCmxsbMjPz6ejowO1Wk1UVBSTk5PCCqtWq3F2dqalpUVAUxQKBTKZDEdHR0ZGRjA1\nNaWhoYGsrCyOHz9OeHg4r732GklJSQIGYmNjQ1dXFx4eHuj1eqanp1mzZg2rVq2ivLwcvV4vPA2R\nkZGUl5fT39/P5OQk1tbWXL58Gb1eT19fHx4eHpSXl1NQUICTkxOPPvooFy5cAOYs5oWFhbz88stC\nFp2YmKCiogKFQkFFRQUHDx4UTMP6+nouX75MRUUFdXV1XLhwgQcPHlBaWipkP0Dgz2NiYnB3dxfG\nsWvXromvqaenh5iYGNRqNVeuXKGqqor6+nrRfCUZ21JTU4VaIrEwZ2dniYyMpKqqinnz5qHT6UhL\nm4sJJSQksHXrVgICArh48SJpaWls3ryZrKws/vrXv6LX68nOziYhIUG4W6WdsGQRVyqVVFRU4OHh\ngVwuZ2hoCEdHR+7duyf4EoODgzQ3N4tOSWmmMX/+fCF7P8z1k1gUDAwMsLS0pKKiQnzDJcDlihUr\nKCgoIDs7m8HBQYyNjQVcc2JiQjQy37hxQ8iIUjZfIt80NTURGxvLrVu3GB8fx9bWlqGhIfr6+sR0\nuaamBisrKwwMDMQLrqurizt37mBvb09ra6vwu7e2trJr1y5yc3N5+umnKSwsxN/fn/T0dN577z1O\nnDiBQqEgIiKCqKgoLC0thZowOTkp4Bf19fVUVFSIHL9kUR4ZGcHe3p6KigoxHNRqtbi5uZGcnExg\nYKBop56ZmcHOzg5TU1Pmz59PVlYWLS0tNDU1UVdXh5ubGwcPHhR15Js2bWJkZIRPP/2UkZERIfUF\nBwcTEBBAcHAwcrmcjo4OwbS0sbHhypUrYrGVQk4XL14UBawmJiZYWVnxyCOPcPjwYeG/kKy7bW1t\nVFZWUlZWxlNPPcXMzIwAjhgaGgqvh3TzKxQKsbvp6Ojg8uXLgmT1+uuv89JLL/HUU0/R09ODv78/\nS5YsYcmSJURERJCamopMJmN4eBgTExMAuru70Wq1+Pr60tXVhbe3N5WVlWzbtg0fHx+Sk5OZnZ3l\nxIkTVFRU0NHRgUKh4Pz586JlDObO+lNTU8IUtXXrVpYtW8aGDRuAuYxFcXExRUVFxMXFYWFhQVNT\nEx0dHahUKrRaLRs3bsTOzo6+vj6io6M5c+YMvr6+eHt7i4/b1dWFq6sr7u7uzMzMYGpqipubGzKZ\nTPSJwJwLtqmpCU9PTxYsWCBQgOHh4Xh6emJjY4OJiQnffffdQ9+PP4njw8GDB99auHAh69atIz8/\nn8LCQhoaGiguLmbBggXY2toSHR0tcGvGxsYiiSaZb9577z3Gxsaws7PDzs6OS5cukZycTEBAgNge\nbtu2DX9/f5qamigrK8PW1hYnJyd8fX2xsLAQnASJcxAWFoaLiwudnZ3k5OSwbNkyMjMzCQkJoby8\nnJqaGhYuXEhnZye9vb1UVFSwb98+XnjhBczNzTl58iRdXV0CIx4aGsr4+DhffvklfX19rF+/ntDQ\nUJRKJWlpaTz77LP09fXx/fffo9FoaGlp4cUXX+T69euC87dixQoMDAzw8/Pj1q1bFBQUoNfr8fLy\nQqfT0dHRIfIT4eHhzJs3DxcXF1xcXGhoaBBuv8bGRhwdHYVkKdWNhYeHc+/ePTo6OvD09MTf358L\nFy4wMTEhujB1Oh33798nLi4OZ2dn9Ho9WVlZ7Nmzh2+++YbQ0FBSU1OxsbHh0KFDzMzMkJiYSE5O\nDjk5OcjlcpKTk7l9+zZ/+MMfBNnp+PHjfP7551RVVTEzM4OhoSEKhQJXV1fGxsYYHh4W5TDT09M0\nNjby7LPPolarmZ6eZvPmzajVakJDQ7lw4QJbtmzB0NBQKFGGhoYMDQ0xOztLf3+/yD9IFKTm5maK\ni4v5/e9/j5ubGwkJCaxbtw6NRoOZmRnZ2dmi+8LKyorLly8jl8sFuUlqJ9doNAQEBODp6cn09LQ4\n7lpZWbFmzRrMzc0ZGhqipaVFWLRhbsH52c9+xqeffio6QZuamgT7IiYmhqysLAIDA/Hy8hIPUqkl\na2hoSBTYvv7666Ig2NTUFI1Gw82bN//vOT5IceHGxkasra1RqVSYmZmxa9cuAaQsLS2lsbGRrq4u\nfH19hYJw+fJlfvzxR6HPSxIfzG1FHzx4QGFhocCajYyM4OHhQX19PSEhIUxPT3P8+HGqqqqwt7cX\nQRK9Xi92LH19fcTGxopJs5eXF8uXL2fHjh1i65+VlcXu3btZt24dV65c4eLFi/j6+jI7O8snn3xC\nV1eX6FVsbm5menqagoICdDqd2IoeOXKE2tpali5dyvDwMKtWreL+/fuMjY1x/vx5rKyshHxqamrK\n5OSkIOxI3nwzMzNBSZaQ7cbGxmg0GmxsbJDL5dja2mJtbU1dXR3JyckYGxujUCjw8PBAJpNhYWGB\no6MjWq0WAwMDXFxcGB4eFs1WMTExuLq60t/fj42NDcXFxWzYsEEoK1u3bmXp0qV8/fXXzMzMUF9f\nT2NjI2fOnOHatWt4enoSGRlJREQEhw4d4ujRowIIcvr0aaysrOjo6ODq1asUFxfT0tKCt7c3Xl5e\ngrIFc2EkqTzIwMCAgwcPYmxszP79+3Fzc6OhoUFM3YeGhqirqxO7vc7OTiYmJhgdHRWEKR8fH9HW\nvGjRIpE/sbKyIj09XTxkYmNjxTZ/yZIlbNu2jfDwcI4cOUJYWJjYOeTl5WFra0tLSwsnT54kLy+P\nI0eO0N/fT0tLC93d3eTk5BAUFCTugePHjwNzZqve3l5WrFjB5OQkdnZ2YiA/OjpKbW0tly9f5tat\nWyJhGRYWJo57Urajvb2dkpKSh45Nw09kUZDL5dy/fx+YyyvExMSgVCqZmpoSeG17e3ssLCxEglDK\nQkgFrcHBwURHR6PRaLCwsADm4sr9/f3Mnz9flHEsXLiQ3t5etm/fTmBgIC4uLlhZWVFXV8fo6CjB\nwcFERUXh5eVFUlISp06dwsHBAR8fH/72t78Bcy8EvV4vkpIXLlygqKiIqakpqquriY+PJygoiM7O\nTnp6ehgbG8PQ0BBXV1eBky8rPjrR6gAAIABJREFUK8PGxkbwGmBuQu7s7Cxq269du0ZNTY1QRIKD\ng3Fzc6O1tZXS0lKcnJzEQiCda3Nzc5HL5ZSVlaFQKARpycrKChMTE8bHx7G3tycwMFAMF52cnEQ9\nWXBwMOHh4aIPsaioCDMzM8LCwoiOjsbKyorc3FzWrVsnGrajoqIIDAwkPz+fhIQEamtr+ctf/kJ4\neDipqak899xz5OXlMTo6yvDwsDgO2tnZ0dnZSWJiIrm5uQBizuPi4sLMzIxIkWq1Wuzt7ZmcnMTb\n2xs3tzlkR3x8PMbGxixfvpwFCxYIzT8iIgKVSiUkSUn5Wb16tWgoNzc3F6nOyMhIDhw4wPbt24Wq\nVVFRIWoGXFxc2LRpE8bGxoyOjgrzW2VlJT/88AO5ublERUUJTJqZmRmGhoZkZGQIMLD0FDc0NCQq\nKgo3NzfeeOMNgoKC2L17N2+88YZ47V6/fl3MpUZGRkQPqkQ9nzdvHhYWFqIJW2rw0uv1vPvuu1y/\nfp1z587h7OwsBswPe/0kAlHSjEAi3r766qvk5OQIz72rqysKhYIff/xRFM2uWbMGY2Nj8TGeffZZ\nzp8/T3x8vCi+yMrKErFbJycnsVInJCQAcOzYMWZnZ0lNTWXv3r2ipKSrqwszMzP6+/vZs2cPbW1t\nwuEGCFefBNWQyWS4ubnx61//mgMHDogiXKnxV0KNnz17lqqqKuzs7HjxxRfx8fHhypUrIgRTXl4u\nvArW1tYEBQVhY2PDm2++SX9/P2VlZZiamhITEyNmK3q9XpSRfPPNNyQlJQknXFVVFStWrKC3t1e0\nXkm8v6SkJG7fvk1nZydDQ0N8/fXXODo68oc//EE4S93c3Ni2bZtw/0mJu/DwcBwdHXnw4AHr16+n\nvb2d8+fPo9VqMTQ0ZPHixcCcBdjX15fLly/j4eHB7t272bJlC2FhYVy9ehVTU1O2b99OXV0dCoUC\nmCvY0Wg0ODs7ExERgVwu5969e1hYWNDS0kJ7ezu1tbWkpaUJtkBOTg7vv/8+O3bs4Mcff+TAgQMU\nFxfj6+vLnj17KC0tZfny5fT29grUu4GBAWfOnKGtrU0sMs3NzTQ3N1NfX49Wq6W1tZU1a9bg5OSE\nu7s7J06c4MiRI6SkpFBXVwcgGrG9vLx47rnn2L17NxqNRrAypSDT22+/jUwm4z/+4z/o7OxkbGyM\nFStWIJPJxCD29u3bpKamAnPGo7KyMuFilezsnZ2dojtDp9Px+eefU1RUREtLCxkZGfT29lJfX4+/\nvz8RERH09vaSmZnJq6+++tD3409iUbCwsKCtrU00NlVVVZGSkoJOp0Oj0ZCYmIhWq2X58uWYm5vj\n5+eHqakpJSUljIyMsHfvXrKzs/m3f/s3hoaG+OyzzwBYvny5aIsC2L17NxMTE0xMTIhIskQUamtr\nY/78+Xz++ecEBQUJKnBkZCSJiYm0traKqrDKykrUarWoalepVFhYWLB48WJR3Co1OLW3t5Ofn4+X\nl5covTE1NeX69etCKpTOlI6Ojuj1eszMzOjs7MTY2Fg0HtXV1bFw4UJBYLpx4wbNzc3CaScpI5OT\nk7i7u4thVFBQEKdPn6anpwc7OzuRVRgYGKCnpwc3NzdmZ2fF/7msrIzBwUHWrVtHQ0MD33zzDf7+\n/vj7+9PT04NSqWRgYIBz587x8ccfC4LQjh07qK+vZ/PmzdTU1PDmm2/S0dFBZ2cno6OjKJVKXFxc\n6O/vp6CggD/96U8cOnRIqDVS94UEg33jjTdYuXIlOp2OsLAwjh07RmhoKBMTEyJ1+uDBA06cOMG5\nc+ewsLBAp9PxxRdf0NzczKpVq/4FG+/u7o6TkxN3796loaEBNzc3fvOb31BVVcWZM2d45JFHuHLl\nCkVFRdjb2wtuY2lpKdevXxe7TR8fH1xdXQUYx8TEhLKyMtLT09m1axcTExMcOXKE1NRUdu3aJcpy\npX7TiooKli5dilqtxszMjK+//ppt27bR2trK3r17mZ2dBRAo+4MHD7J27VpcXFy4dOkSvr6+XLp0\niU2bNgkUgIuLC83NzRgYGAjn5urVqykpKcHMzAxfX1/x9T7M9ZNYFCYnJ/Hy8uLWrVv09PRgaGgo\nQkXr16+nqamJ5uZmOjs7sbCw4MMPPyQsLIzJyUkhA0VFRfHll1/i5OQkzv6lpaUEBweTnZ2NXC5H\nqVSK8+rXX39NVlaWkNTs7e158803WblyJQYGBqSnpxMaGioGnpJqAaDT6cTk//nnn6e/v5/KykpW\nr14tgBlBQUEsW7YMjUbD2NiYaHiSbm5vb2+mpqZE9wLMJePS09PZvn079+/fx8bGRrQ1SdbtuLg4\nZmdnCQ8PZ3Z2lsrKSoyNjYWpqL+/n4mJCVpbW3FxcSEjI0M0UHl4eDAzMyMoU46OjkRHR+Ph4cHE\nxATh4eG4ubnR398v8g8WFhbMzMyg0Whob29nbGxMVNt9+eWXDA4OcuDAAWEQk8vllJaW0tLSQlVV\nFb/97W8FCUgul3P79m0R3S4uLqa4uFhsiWFueh8QEMDPf/5znJyc2LRpEwsXLiQoKIgrV66wadMm\ntm/fjomJCZGRkdy+fRs/Pz+io6NxcHDgyJEjaLVaRkdHWbt2LYcPHwYQA18fHx/q6uoICAgQw+nQ\n0FCamppQKpUYGhoSHh4uVJ2NGzdy9uxZkX9wdnYmOztbIOklq7JcLsfOzo78/Hyef/55pqamuHv3\nrpj7qFQqFi9eLOoKpqam0Gg0ZGVlCcXh2rVrAg68aNEiysvLsbGxwdnZmY6ODsbHx6mursbS0hIj\nIyN0Op04vixdupSwsDBqa2uxtbXl2WefJSkpSTiEpQDXw1w/iZkCIDRhKeewZ88ePDw8uHjxIqdP\nn6akpITR0VHR2iMFmQoKCqirqyMjIwMDAwOsra2FNh0YGIiHhwc7d+5kz549VFdX8+tf/5quri72\n7dvHW2+9xW9/+1umpqZEz2ROTg5FRUUsWrSIS5cuiQGoXC4X0mFiYiJKpZLXXnuNM2fOoFKphAb9\npz/9CbVaTUZGBr6+vuIc6O7ujrOzM0888QQDAwMiGBUTEyPSl5LBpbi4WHD51qxZg6WlJfPnz+fx\nxx+nq6tLtCg1NjZiYWHBI488QmNjI62trTQ0NFBXV4dSqaSwsBBjY2M8PDyIjY1FJpPh4eGBubk5\npqamyOVyamtrOXfuHObm5jQ1NfHFF1/Q19cn+AoeHh6iJq23txdfX19++OEHIZcNDAxw/fp1rl27\nxrJly0QrlLTjkUp99Xo94+PjrFq1CldXVy5fvsxXX30luAHSgit1dxw6dIivvvqKJ598kuDgYFxc\nXEhNTUWlUpGVlUV3d7co8LG3t2fBggXs2rULS0tLli1bhrW1NTdv3hRUoh9//FHEid3d3UVjVUxM\nDDt37qSjo4O4uDh27dpFVVUVu3fvZufOnXh7exMeHi4ataScQUZGBoAY6kpqisSvDAoKorGxkf37\n97N7925GRkYoLS3F3d1dcC4GBgZ45513sLKyEtQsKfbf1NTEnTt3hIR67949GhoamJ2dJSkpiY6O\nDn744QcaGxvZs2eP6LpYt24dExMTXL58GQsLC9avX4+vr69I4j7MJZMm9f8nL5lM9n/+i/h/1/+7\n/v9/Fen1+uj/6h/9JI4PklddpVJRU1Mjsvfe3t6Mjo5iaWlJQUEBixcvxsPDg6qqKrRaLRMTE7i6\nurJu3TqxU1CpVLi5ubFhwwZeeeUV5s2bJ56O0nnN39+fK1euYGZmhpOTE/b29oLBv2nTJuEr37Fj\nBwUFBczMzJCWloa3tzfvvvsub731FhYWFsybN4+ZmRmmpqawtLTE1NSUmZkZOjs7xaBv3759pKen\n4+TkhEajYfXq1XR0dAg+xIoVK2hsbGTfvn3odDrefvttUS8vWVol+ExwcDC3bt1iYmIClUqFubm5\neILb2dkRExNDRkYGmzZtEtvl3t5eOjs78fDwEBNxgObmZkJDQwUh2MbGhtzcXJKSkujq6uLBgwdi\nl1NcXMySJUvIzMwUlKCjR49iaWkpSnQl12NPTw+urq6o1WoOHDhAeno6o6OjREdHU1BQQEBAAGFh\nYcLgM2/ePKKioqipqeHTTz/l1VdfxdTUFEtLS6qrq/Hy8hJR8NDQUOrr6wkODha4uw0bNghob2Fh\nodguj42NERsby4ULF3j99dc5duwYubm52Nra4uvrK7gbEqjExcWFzz77jLi4OJqbmxkaGiI+Pp6B\ngQER2GtpaSEkJITY2FiOHDnCV199xVdffcXo6CjT09OEh4cDcPbsWRFBd3Z25r333mPlypXExcVR\nVFSEXC7HwcGBrKwsLCwsCAoKws7OjjfeeIMDBw7wxhtv8Mc//pHp6WlcXFxEX4ixsTERERG4ubnR\n2NgobM9S+xTMgYXCw8Pp6+vj73//O1u3bkWn03Hv3j2h8PxX10/i+CA1G1dWVooXclJSEqmpqaxf\nv56QkBB+9rOfUVRURF5eHjMzMwK6Gh8fj06nE/rtzMyMCJCsX7+empoaqqqq2Lx5MyYmJlRXV3P2\n7FmCg4Pp6uoiOTkZhUJBcXExsbGxmJqa4uvrS1xcHBUVFXR2dlJXV0diYqI47/2ziWdmZobc3Fzy\n8vIwNTWlqamJhQsX4ubmxvLlyykoKBC+dWdnZ5GFsLa2pru7m507d4qUZH5+PoGBgTg4OGBnZ8fj\njz8uBklWVlZcvHiR+vp6EhMTmZ6eFuf76elpBgcHuXTpEikpKVy7dg1nZ2fKysoYGxsT4Bppq15c\nXExoaKiAuSgUCurr64mNjRW+iPDwcGGQGRkZobq6GkdHR9rb2+nq6mJqaorp6WkBOlmyZAlJSUmE\nhIQIGMu1a9cwMzMD5pQVd3d3ysvLxQIuhb+k6TrMeUA2bdpEUlIS9vb2IiHb2NjIhQsXGBgYoLy8\nnLCwMPbs2YOBgQFmZmYiXh4XFwfMEZEyMjKE5Ds6OoqPjw8WFhb88MMPPPbYY7z77rvo9XouXLjA\nhQsX2LlzJ46OjpiYmBAXF4e/v7+ox5PJZISEhDAyMsL169fZuXMnABkZGeIBcOPGDc6cOYOxsTE9\nPT3Cev7ZZ58xODhIaWkp3t7eLFq0CFNTUzw8PPD19cXQ0BClUsmWLVuEX8PPzw9LS0vCw8PJz88X\nbeIAv//97ykpKaGmpoa+vj4aGhrQaDSCPNXe3o6JiQkhISFs376dpUuX8vTTTz/0/fiT2CkYGRmJ\nF1NoaCilpaUYGhpy9+5d3nzzTX73u9/h7u7OsmXLCAoKQqPR4OjoSHx8PAB6vZ78/HxsbW0ZGBgQ\nKciBgQHeeOMN7t27x9GjRwkKCiIkJASlUomHhwetra3k5eWxcuVKwsLCKCws5ODBg6xYsYKmpiYu\nXLjAyy+/zNTUFPfv3xfn07a2NmH1VSqVPP/88yKRFxoaiqenJ/PmzcPY2BhnZ2fGxsaYnZ1l/vz5\n/7Kqz8zMiLw9INqlpUWtsLCQ5uZmcdM4OTmxZMkS7t+/z61bt9i1axfFxcUolUpmZmaYmJhgfHwc\nS0tLZmZmuHfvHsnJybS1tQl3XFtbm8C2l5eXC8+Fk5MTZ8+eZeXKldy/f5+Ojg5B+YmPjxfQUanx\nqbKyku3bt9PU1MSiRYtob29nYmLiX3o3JalNo9GgVCqpq6vj5Zdf5v3332f58uXU19czODhISkoK\nIyMj4rUgoc4lHJtU/Do8PIxarRaIdBsbGxQKBba2toI+FBAQgJ2dHWq1mnPnzgmi9/Hjx9m4caNI\nqb7wwgts2bKFX/ziFwBkZmby9ttvo1QqWblyJU5OTszOzmJsbMyhQ4eIiIggPz8fV1dX8f0AiImJ\noaOjQ6RTDx06hIWFBSYmJnR1daFQKEhPT2fHjh00NTUhk8m4efMmdnZ2ou08JyeHsrIynJycCAkJ\n4ZtvvkEul4uouKurKyEhIRw+fBiNRsOePXswNzfH3t6evXv3Chz+M888Q0dHh2BvRkREkJ2djV6v\nF6ath7of/7/czP+7LnNzc5ycnLh16xZRUVG8/vrraDQafvjhBz788ENaWlo4e/YsMpmM3bt389xz\nz6FWq4mMjKSxsRFnZ2fh7R4eHhbhmsDAQAoLC4UsqdPpBEBTp9NRXl4uylonJiZ45ZVX2LlzJzk5\nOeIJU11djaen57+wFKWhYVhYmKiDk1btqakpSktL2b9/v5AiHR0dRSpzcHCQ4OBgob3fuXOHlJQU\nAJycnBgZGaG+vp6pqSmx4iuVSsGaKCoqYv78+Tx48ICenh7WrVuHoaEhGo2GwcFBrl69KkArbm5u\naDQaIZHqdDo2btxId3c3BgYGxMbG0tDQgE6n4+jRo7i7u5OWlkZMTAwKhQIHBwfS0tJ44oknmJ6e\n5uLFi0RGRtLf38/WrVuFvVoyBhkYGLB+/Xru3buHh4cHBQUFIpwjEY/S09Pp7+8nOzubhoYGtm3b\nRn9/P8HBwcBciE3qCpXk1J07dxIXF0dtba1YJKWy1tnZWfz8/BgYGCAuLg61Wo27uzt//vOfCQ0N\nFV6WrVu3MjIyQnd3N7///e8pKysjISEBnU7H1NQU8fHxFBUVERwczODgIC4uLjg4OFBZWcmxY8eo\nrq4WC4yrq6vAtvf19fHKK69w/PhxNBoNGzZsIDw8nC+//JLe3l6cnJx47rnncHd3x9XVlfHxcYyN\njbly5QoPHjxg8+bNtLW1YWVlJXbLMKecSWAaNzc32tracHd3x9ramrS0NMzNzVm4cCHHjh3D0NCQ\n1tZW3n//fTIzM0lISCAlJYW+vj4sLCzIzc0VitzDXD+JRUGr1fLpp5+yevVqzM3NycrKwsXFhXXr\n1qHVahkZGUGpVLJ06VI6Ojp49tln8fb2RqfTCdeaVCoq9QTCnCtseHiYZ555huPHj7N7927RLbl9\n+3asra0FLOPBgwd0dXXx7bff4uLiwjPPPIOrqytFRUXU1NQQFhYmptf29vaoVCpqa2tZt26dkInG\nxsZwdnamurqaI0eOsHXrVjo6OhgcHESj0QjUmomJCVqtlvj4eAYHB4Uk2d3dTWJiIl1dXdy9e1c4\nEq9cuYKJiQne3t5MTk5SWVlJUFAQCoWC2dlZFixYQE5ODnfv3sXW1lZsHbu7uykqKmLjxo10dHSg\n0WhE+Onq1auUlpaKBGB/fz9arZaioiJeeeUVFi1ahKOjIy+99BK9vb10dXURGhqKs7MzPT09REdH\nc+fOHUpLS1m4cCELFiz4l5JfQ0NDZmZmmDdvHrOzs+zevRudToeBgQF6vZ6SkhKMjIzEE1hiKZqZ\nmWFubi4crf7+/ty+fRsfHx9sbGyYmZmhsrKSHTt2oFQqhQTp5eXF+Pg4wcHB5Obm8vjjj6PVaoXh\nrLW1laSkJPR6PQ8ePMDIyIienh4iIyMZGhrigw8+IDw8nJiYGEFU0mq19PT0YGBgQEpKCg0NDfzs\nZz9Dq9UKToapqSl3795lamoKhUJBYWEhhYWFAugTFRUlWrtKS0tJSEhg8eLF3L59G5VKRU5OjuAn\nWlpaijIYW1tbJiYmmJmZ4dKlS8TExLBmzRqcnZ158skn+f777xkZGSE3N5fh4WFsbGxwdXUVx+zm\n5mYWLVqEi4sLFhYWXL78XzYtiOsnsShIAy2ZTMYPP/zAqlWrOH36NEFBQRgZGTE0NISfnx8ODg5k\nZGRw//59cnJyqKurw9LSEqVSiUKhQC6Xc/bsWXbs2AHMkYy0Wi0//vgjQUFB9PT0sGLFChwcHHj7\n7bdJSEjAzs5OgDeSk5N55ZVXGBsbo6SkhFdeeUVgzicnJ0X6UqLy+vn5iRIbJycnBgcHmZqaEi/Q\nkydPYmlpiZWVlahoz8vL4+zZs3R1dfHRRx+xbds2UTIjWaO9vLyEyec3v/kNlZWV5Obmivbg8fFx\n4YXv6emhoKBAFNtK5SiDg4OEhISwf/9+pqamkMvlIgeQl5fHzp07CQwMxN/fn9HRUSG7mpqa4uTk\nJI4Y5ubmTE9PExsbKzBkISEh3L17l76+PtEo1dDQgKurK+bm5sLv397ezuTkJE1NTdy4cUMsSsnJ\nyaxevRp7e3v+/ve/C04CzEFHvb296e3t5fvvvxc5AylmPTk5yeLFi5menqaiokLQo2QyGWq1mvT0\ndDGIMzQ0FDeZu7s7MpkMOzs7IiMjOXfuHHfu3OHVV1/F398fmJtD2Nvbc/PmTdRqNS4uLgwODpKQ\nkEBjYyOrV6/G2NiYjz/+mIiICGBuSH737l3mzZsnjpBWVlao1WoOHjzIwMAAWq2Wmzdvcu/ePYaH\nhwVwNSYmhpmZGdavX09FRQWVlZVotXP1KaampmJ34u/vj5+fH+fPn6eyspKFCxdSVVVFU1MTycnJ\nmJmZMTU1RVJSEufOnSMkJASFQsHk5CR/+ctfBKjnYa+fxKBxcnISPz8/FixYII4K+fn5lJSUMDk5\nSUhICKOjo5w7d47Kykri4uL45JNPCAgIYGRkhKioKFpaWjA1NaWvr+9fnjplZWXExsayYMEC8vLy\nBGjD1NSU/v5+8vPzqays5LvvvkMmk7FgwQJMTU05efIky5YtIzY2lt27d2NpaUlUVBQw57e/d+8e\nQ0ND3Lhxg+joaGQymaDzKJVKEfOWSD3S4E+n0wmAqFRKIh19JLvtwYMHMTAwIDk5mbGxMTw9PXnq\nqaewsLBgdnZWcB+npqYEnlzqSIyKiuKZZ55hYGCAkydPcvjwYWZmZpidnWXDhg0MDg4yb948jIyM\niI2NFdtnMzMzgVAbGBggLy+Py5cvi+OZXC6npqaGrKwswSecmZkRE/qenh66u7tpb29nZGRExKKd\nnJxITEykurpatFrduXOH4OBgRkZGGBwcZMGCBdjY2ABw9epVZmdnsbe3Z+XKleTk5JCbm4upqSmj\no6N4eXkxMTHB/fv3uXPnDiqVitnZWRISEkTXo7SDKikpEcEljUZDbW0tdXV1HD58mNWrV9PV1cXP\nf/5z9u3bR2lpKXv37qWlpYXExEQsLCxobW0V8WupW+HTTz8lNTVVBOFu3bpFb28vHh4eREVFsXbt\nWhFY++ijj7CxsaGlpYWOjg4sLS25du0alZWV7Nmzh6VLl/LEE09w6NAhhoeHKSoqErvGoaEhVCoV\n58+fx8zMjJqaGkHqunXrFu3t7URFRdHQ0IC5uTlGRkb4+fmxZ88ewsLCmD9/PnZ2dqxcuVJ4Rh72\n+kksCvb29mg0GrKzs3FwcMDb25vExERR6T44OIhWq8XS0pJHHnmEuLg4Ydf19vZGJpPxq1/9ipGR\nEQ4cOEBOTg4w5xB77rnnaGxsJC0tjYiICPbs2YOtra1I3cXHx3PmzBlsbW3JzMykrKyMpqYmDA0N\nWbFihYC/rF+/niNHjgAI+amqqkqYTczNzYmOjkalUtHQ0ICHhwdGRkZi0rxixQqmp6fp6+tDJpPR\n1NSEtbU1169fZ//+/QDcu3ePsbEx9u/fLxD2kpNOogTfvHkTQ0NDGhoa+Pbbb0Um38jICA8PD9rb\n28UOKjk5GU9PT7GzOnr0KCEhIRgZGdHd3U1tbS2GhoZicCp1LExPT4uFxszMjJycHDo7O4mJiSEl\nJUUk8mxsbEhPT6etrU0EzyRbsIT/6u3tZWZmRoBke3t72bdvH0NDQ0IpKi0tFa5LlUolFjEJsf/g\nwQP8/Pzo6uqira1NWIDXr18v7OdSh6aE2L927RphYWHi57NkyRJRsafVaqmpqcHf31/8LDdv3iyG\nwwMDAwwPDwsMfG5urphpPP7442RnZ7Np0yZgTt4eGBigoqKCS5cuoVar+e677wQarrKykq6uLmpq\nati1axcBAQFCBvXy8uLKlSv4+fkJq7dE7WptbWXZsmWEh4eLOVVpaSnHjx9nzZo1BAYGCkjrzp07\nxYIbHBxMQkICAwMDtLW10d3dTXFxsThGPcz1kzg+wBwsYvHixVy8eJG8vDy8vb2prq5GrVaj1+tZ\nvnw5bm5u2NrakpaWRmZmJkNDQ+zZs4empibUajUymYyrV68SHx/Pxx9/TEJCAsXFxcjlcuLj47l9\n+7bQ+xcuXEhISAhpaWm89957osVoy5Yt3L59G0NDQ5ydnZmamhI26Y0bN5KZmSn6CXft2iWCKg4O\nDjg4ONDS0iKs1/b29oyOjnL16lWcnJyYP38+s7OzDA8PMzMzg729PY6OjpSXlwNzwTBJ246JicHL\ny4usrCxSU1OF/dvDw4Ps7GzxlO7o6BDdktnZ2Tg6OnL//n2ioqIwMzMTPo+VK1eiVqtRKBQ4Ojqi\nVCoZGhqioqKCyMhIkSORwj1LliwhPDycU6dOiYZvMzMzDAwMhAIiHTdSUlIoKyvjypUrogbNz89P\nYO127NjBL37xC9RqtZA433vvPQwNDbGxsWF6elooMGq1mnnz5uHr68vU1BSTk5MsX74cR0dHkZgM\nDAzEysqKkydPCqS+SqVCpVLh7u5OQEAAPj4+xMfHi6q/jo4OgoKCWLlyJWNjY5SXl9PV1SXkXKkf\nwczMjObmZvbt2ydkxLVr1zJ//nzy8vKwsrLCx8eHZcuWAYjW66mpKWJjYykvLxd1AOHh4cJR+s47\n71BaWkpmZqawmZubmzMyMiK6LqQGaZhTNSScnPSQSktLw8LCglu3bonC3Xnz5lFXV8fk5CRXr14l\nODgYa2trCgoKBOLfxsaGiYmJh74XfxKLwsjICJs3b+bmzZusXLmS+fPnc+3aNRHAefrpp0WBiFwu\nZ/369QwMDAiFwNLSkqysLFJSUrhw4YJoh7558yZDQ0OsXbuW7OxsYE4GvH79Oubm5iJrvnjxYu7e\nvUtvby8qlYpbt27h4OBAc3MzmzdvFvh0yY68fv16ysvLMTY2pr29XfgmpHAPQHp6usDI7du3j+Hh\nYbKzs4UfoqurCxcXF/Ly8sSZVqVSCaT6wMCAMFi1trYSGRlJW1sbrq6uAiKj1WpxcXHh5s2baLVa\nhoeHRTGNUqmkt7dXHDO6CysUAAAgAElEQVTGx8fFTSqVno6MjNDe3k5QUBD5+fkoFAoGBgYwMjIS\nCDqtVis6C6OiosjKyhKEJ0tLS9HN4ejoyLJly/jwww8xNDRkzZo1XLp0CQMDA4aGhkhPT6euro7H\nHnuM0tJS7OzsKCoqYt26dXh6eooiE2nBKSsrE5RpCcW3Y8cO6urq0Gq1nD9/nuHhYRISEsSOKigo\niIGBAXFDSGfyL7/8kurqamECun37thj8KpVKFi1axBdffIGnpyeLFy8WEp69vT1btmwRVXxtbW28\n++67PP/887zxxhsA/OpXv0Kj0VBWVkZubi7btm3Dw8MDLy8vMjMz8fT0FBg6tVrNxo0bSU9PZ8WK\nFeTl5fHII49QUFCAi4sL+fn5Yqbg6upKXl6ekC5v3LiBj48PcrmciIgIZDIZJSUlwpAkIfJ0Oh0t\nLS3i30qDU2nRfZjrYaro3WQy2U2ZTFYpk8kqZDLZL/7x9rdkMln7/9JELb3PazKZrF4mk9XIZLJV\nD/E5BKS1r6+Py5cv09XVJaq/7969y6VLl7Czs8PJyYkLFy6wdOlSUVM/OTnJ4OAgRUVFLFy4kOnp\naWCuIWpqaor+/n7a2trw8/PDxsaGtWvXkp+fT0VFBVu3buX27dsiX//hhx+iUCjYtm0bzs7OnDp1\nisbGRpqamtDpdMDcE93BwUFg0rOzs7l+/TpjY2N4eXlhYGDA9PQ0rq6uXLt2jbq6Ojw9PQkNDcXf\n35/h4WHCwsK4d++eYAzAXEpycHBQTMbb29vFdv7WrVvU1dXR0tJCVlaWuKF7enoIDQ0Vk3QPDw9S\nUlLw9vbGw8MDPz8/vL29BU4uNDQUExMTMa9wc3OjpKQET09PlEolCxcuFMabjIwMgoKCCAwMxNra\nmoGBAbZv305YWBihoaEkJycTFhZGWVkZLi4ujIyMIJPJ2LRpk6Bme3p6Ym1tzezsLC4uLhgYGIj8\nhoQZ6+/vFwt8d3c39vb2REREoFarSUxMZGBgADMzM6ysrHBxcaGuro7+/n6WLFmCo6MjDg4OIhEo\ngVCl453EJ5B6PyUMfHR0NPv27cPJyYmGhgZsbW3FcfWvf/0rtbW1Iirt5eWFkZERGRkZBAYGUl9f\nL3gOX3zxBdnZ2YKsVV5eLtqlNm/ezOTkJD/++CPZ2dnk5uai0Whwd3dnfHxcwHV37tyJiYkJnp6e\nQvYeGRlh9+7ddHV1MX/+fLHLevnll8XRr7u7W+Qt7O3tWbp0KTqdjr6+PmpqaggICKCiokJ0gzzs\n9TAzhWng3/R6fTAQBzwvk8mC//F3f/nnJup/3ODBwE4gBFgNHJTJZIb/2SfQ6/X87ne/w8XFRagH\npqamBAYGAnMI+Li4OGxtbbl37x7379/n888/5+7du1hZWeHs7Mz09DTT09NkZ2eLrZ2zszNPPfUU\nGo0GY2NjPv/8c/R6PXl5efzhD3/g2WefJSIigo6ODhYtWoSPjw+PPvooTz75JM7OzuKc7Obmxvnz\n54UBRCqmMTc35/bt23R3d4sXqI2NDVevXiUhIQETExM8PDxwdXXF2NhYxLCzsrL485//TFJSEvPn\nzxccQb1ej52dncDRLViwAL1ez/Xr1zl16hRVVVUEBASwf/9+/P39qa6uprCwEG9vbzGXKCwspLGx\nkaKiIgwNDYX70N3dXZTcjI2NiaJUlUrF6tWr2b17t+AsVlRUIJfLCQgIwNHRkcDAQAICArC3t6eh\noUFst69cuUJZWZkIg3V3d/P0008L8vZjjz1GcHAwd+/eRa1WY2tri0ajYcmSJbz44oscPnyYlpYW\nZmZmhAKj1WqZmpqivLyciYkJ7ty5Q15eHlNTU/T29jI+Pk5/fz8qlYrx8XE++OADnn/+edEu7urq\nKmzrEo4OICUlhcLCQvz8/Kivr2f58uXCw3L48GFWrVrFBx98wNjYGO+++y7GxsZMTU3xy1/+UqQY\npe4FW1tb4S7ctGkTfX19ODg4cO7cObRaregcraioIC0tDTs7O7q6unjkkUeEfGpsbMzq1avJz8/n\n5MmTQurdsmULAEePHuX06dN4enryy1/+klWrVmFubk5mZibfffcdt27dYnJykuPHjwvjlEajIScn\nBxsbGzFPqaurw8vL679lXnqYKvpOvV5f/I/fDwFVgMt/8i4bge/0ev2EXq9vAuqB2P/sc4yMjBAY\nGChapwFeeOEFPD09aWxsFMCL3NxcbGxsCA4OxtLSUvQrnDp1iqCgIPLy8jAxMRHSm1KpRKfTkZWV\nJSSlb7/9Fl9fX27fvs3Vq1dFjuLcuXMolUqBJz969Cj29vakpaXh5OTEjh07hKehs7MTIyMjurq6\niI6O5s033xTNzhqNhlu3bjE2NoaVlRV9fX10dHRQWlpKQ0MD1tbW4v9QXV2NnZ2dSLBJle/Ozs6s\nWrUKY2NjVq1aRX5+PpcvXyY3N1f4NqTIs7W1NTk5OVRUVHD8+HGio6OFP16iKY+NjQm5TYLT9vf3\n09PTw9/+9jf6+/vJyMigo6ODlpYWoqOjRZZhenqaEydOMDMzw8mTJ7GwsCA7O1uQhKThXm1tLSMj\nI4IJ2NfXR2trqxhiSsU9EpxEp9NRXFwsdjzS1dbWhp2dHa6urmLQ19fXx40bN8jMzBTZip07d5KU\nlER0dDTr16/HwsICtVqNVqsVN217ezuXLl0C5mzW1tbWtLe3Y2ZmRnl5OS0tLSiVSuzt7cnIyBBc\nxIGBAdasWcNLL72Ek5MTBgYGlJSUUFJSgre3N8PDwyQlJQHw17/+FUAAZQ0MDDA1NRULq0KhoKOj\nQ/R0PPbYY+zduxeFQsHp06cxNjams7OTQ4cOCdkX5tgfhoaGFBQUoFAoWLBggbDqR0VFUV9fT3l5\nOXl5eTg5OVFQUMD7778vFka5XM6PP/6Ira0tRUVFYlbxMNd/a6Ygk8k8gUggD0gEXpDJZI8Bhczt\nJvqZWzDu/dO7qfnPFxGMjY156qmn6OzsxN/fn6GhIf7yl78QHR3N1q1b+eGHH7C0tGR6eprh4WF+\n9atf0dPTw7fffktaWhpyuVyw96UBG8xZZvPy8njiiSeorKzk1KlTvPXWW9y5c4ehoSHq6+spKChg\n0aJFQpJrbW0VgZiEhASeeeYZqqurSUpKwt3dnbffflvQp7VaLX19fbz55pts2bIFBwcH8UOUsNrS\n+X14eBhPT090Oh0JCQmiw1Fqeoa55iC9Xi+KXaampujs7OSll17i6NGjREZGCrNKUVGRCLtkZGTg\n4uLCzp07cXd358aNG5iYmGBjY8PQ0BAdHR1s27aN6elptFotnZ2dxMXFidBQT08PJiYmYlZibW0t\n+huk2rTS0lJBPJbL5Sxbtozz58/j7u7O0NAQSqWSkZERTp06hbe3N46OjoIzIXVDWFtbI5fL6e7u\nFj+3pUuXYmVlhYeHBx999BFxcXGoVCpu3ryJt7c3VVVVBAYGUlFRwcKFC2lsbMTX15f+/n7BwZS+\nj01NTZiZmaFSqcT3ZNmyZXz66ae0traKuLhOp+NPf/oTr732GiYmJsTExDA+Pg4ghqowN6vy8vIS\nNupHH32UhoYGTE1NxQxk/fr1mJmZce/ePZqamoiJieHzzz8Xw2OJZm1sbCwAuXZ2dqJj8sGDB2za\ntEmE+qSdzfvvv8/69evZvHkzZmZmYmcaHBzMW2+9RXh4OAMDAzx48AAvLy+qq6vZunUrZmZmeHp6\nUltbKwxiZmZmAmLzMNdDLwoymUwOnAF+qdfrB2Uy2efAu4D+H7/+O7D/v/HxngKegv85KKytrcXE\nxITm5mZsbGxEUevPf/5zpqenef/99zlz5gyJiYn09PTg5+fH2NgYBw4cQC6X09DQwJUrV4SGbGpq\nSm5uLrOzswLaunfvXn73u98JsOW+ffsE/jsjI0OEUPr6+ggJCRF2ahsbG4qKigCYmprCwcFBUJeG\nhoZEdv+TTz6hu7tbyEFVVVUMDw+zcuVKmpqamJycJCUlRZSJSEQmmOMI7t+/H09PT65du4aBgQHu\n7u6UlZWRmpqKnZ0dzc3N6HQ6UlJSBEk6MDAQW1tbhoeHOXz4MBs2bKC2tlak99asWYNKpeKPf/wj\nPj4+TE5Okp2djampKdHR0aKXU6lUiu27gYEBmZmZREREiNxEY2Mj/v7+ohEpPj6evr4+Wlpa8PX1\nxd3dXfzfpAVNYiD29/fj5+fHhg0beO2114Th5tq1a6SmpnLu3DkA6uvryc3NJSUlBSsrKx599FFh\nKVar1SI5evr0aR48eCAs3fHx8chkMhwcHPjiiy9YsmQJw8PD3LhxA5jT/bdt28Zrr73GJ598wtq1\naykqKqK2tpaysjJOnTrFL3/5S0JCQvDz8+Prr78Wxb4lJSUoFArOnDlDZGQkDQ0NwpZdW1srXJyu\nrq6CvC0Z3RITEykpKWFiYoKjR49iYGDAjRs3aGhowNfXF7lczqFDh3jxxReZN2+eaIFKTEzEzs4O\nCwsLurq6hPPT2dlZIN1feOEFSkpKuHHjBiqVitTUVIaHhxkeHiYnJ0e0UW3bto133nnnYW/Nh1sU\nZDKZMXMLwrd6vT4NQK/Xa/7p7w8D6f/4Yzvg9k/v7vqPt/3LpdfrvwS+BPD29tabm5uzYMEC+vv7\n2bdvH3l5eQK8Mjk5SUJCAr6+vjz55JOUlJQQFRVFbW0tu3bt4vbt2zg4OODs7MzWrVsFmkwqPv1f\nI7hpaWl0dXXx3HPPUV1dTWJiohjMeHt7U1FRQV9fnwgaSSRgafUHBBp8YGCAzz//nNHRUVGRrtVq\nyczMxNLSksHBQYaHh4XBSSaTMT4+TnZ2tih8kToFYmNjRcvTvHnzhMvT3Nyc4eFhPv74YwICAjAw\nMBD48NnZWby8vMjLy6Ojo4N169Zx+vRpfH19cXFxwcfHh8LCQoqKinByciIpKYmTJ0+yZMkSSkpK\nqK+vx8vLi8HBQWEdNzExER58Dw8POjo6RPW8RMeS7MtSw/Hs7KygZ1dWVjI7O0tOTg4qlYrIyEia\nm5vJysrCwMCA9vZ2QTmytbWlv7+fBw8eAFBSUkJCQgITExO0tbVRVlZGVFQUJ06cwN3dHT8/PyYn\nJxkeHiY4OJj29nY8PDxYsWIF6enpXLx4keTkZPR6PSEhIeJosmjRIioqKtiyZQsKhYLp6WksLS25\ndOkSzs7OHD9+nNnZWeGUrKys5O7du5ibm7N3716uX79OaGgowcHBuLq6ilZsKftRV1dHbGys2NFI\n4bCxsTH6+vqYnZ0VuDtprlNQUMDGjRvZtGkTra2tfP/998KNu2LFCq5evUp/fz9r1qwhLy+P3t5e\nYXnv7e0lKyuL3Nxc9u/fz5o1a7h8+TLr1q3DxsaGvr4+FixYwOjoKNnZ2f97dwqyOX/kEaBKr9d/\n9E9vV+n1+s5//HEz8OAfv/8ROCGTyT4CnAE/IP+/+jyenp74+PhQ+j/ae+/oqK9rb//5jtpo1Edt\n1HsXQkJChSIQAkQxxWAbbGzcSHCJS+LY17FvEts3xde5TmI7JG7xBUwA0wwYDJIRiCIQagj1Nupt\npNGojbpG8/tDzHnju24S7vq915D1aq+lhRACbY4053vO3vvzeW7e5MyZM4yPj+Pj48OZM2eQy+X4\n+vqSmJgoeARz584VLUFfX19u3rxJcHAwR48eZWBgAJjpTa9YsYLw8HBGRkbIy8sTBZiJiQm+/vpr\n/P392b59uxAgmcZVAwICqKmpEd2AwsJC4eCrUqnYt28fiYmJTExMCDWfWq0W4Bh3d3c+++wznJyc\nyMjIEB2LGzdu4OfnR11dnRAnmZ4qo6OjDAwMMDAwgJ+fn+icXL9+ncLCQsrLywX1qKurC71ez8DA\nAP7+/jQ2NiKTyaitrSUuLk4oFysqKli8eDFnzpwhMTGRkZERlixZQlVVFe7u7nR3d1NZWcnQ0JBo\n201OTuLs7Expaamoko+NjbF3716cnZ1ZtmwZV69eFSpIlUolXIBiYmKQyWRs2LCBpqYmvLy8BKnL\npLFYuHAhycnJfPLJJ9jb29Pf3y+q7o8++ihOTk7cvHmT0NBQtm7dysWLFwkODmZ8fBwbGxtGR0fx\n9fUV5PBFixbh7OxMQUEBw8PD1NXVoVAoWL9+vYANA+L/88ADD2Bvb8+NGzfw8PAQYJawsDDy8/OJ\nj49n4cKF9PX1kZSURFpaGhqNhj179gjwrenBAzNXiCtXrgiXJjMzM5577jnKysqQJIlVq1Zx5coV\nNBoNdnZ2ODg4CAWro6MjCQkJHDlyhGeeeQZXV1dgpmCcmJhIY2Mje/bs4cknn6S1tVV4aKxevRpz\nc3NsbW3x8PBAo9Hw6KOPisLm/PnzxUSpSQp/u3E7J4WFwCNAmSRJJbc+9hrwoCRJscxcH5qAnQBG\no7FCkqRDQCUznYtnjUaj4e99AUtLSzQaDZmZmbi4uAhZ6oMPPshjjz1GTU0NsbGx1NXViR3cxsaG\nf/mXf+G5554TWO5z585hZWXFypUrOXv2rGjP1NfX09PTQ3p6OpIkcenSJWFJ1tfXR0VFhQC0fPXV\nV2K0urm5mXnz5nHp0iWMRiNqtRqYUU7ec889TE9Pc/z4cV566SWqq6vx9fUlICBAtET7+vrECcPX\n15crV66wYsUKKisrsbW1JTc3V7APAIKCglCr1fz5z3/GwcGB++67D4PBwOLFiwkLC2PNmjUcPnxY\nnEa8vb2Ji4tjeHiYVatWUVxcjLm5uRi5Njn+1tfXi43A2dkZDw8PYmJiBD9gzpw5REVFce7cOcrK\nyvjJT35CaWkpERERlJSU4OjoiJ2dHdbW1qKGkJOTw9KlS7G1tWVkZISlS5diZ2eHmZkZ999/PwMD\nA/zwhz/Ezs6O7OxscXfu6+ujv7+f3/zmNzg4OIgrRkZGBrt27aKmpoa8vDy2bNmCs7OzIDWZpMSu\nrq6cPHkSuVwunKMtLCyor69HqVQKPwQTfNc05jw4OCh4oY8//jg7duwgOjpauG739vayaNEiQRc3\njTubRpBXrlyJQqFgzpw5nDhxgnnz5rF7926cnJxoamritddeE6e1hQsXcvr0aeG+XFtbS0tLC7t2\n7aKqqoq2tjbS0tKwsbGhp6eH/fv3ExQUxNTUlLiiWlhYUFBQQFpaGt3d3fj4+GBhYYGNjQ1vvvkm\nCoWCyspKWlpaBGTX2tqamJgYOjs7hZ7k+vXr9Pb28tRTT93OfgDcxqZgNBqvAP+dmuLrv/N3fgn8\n8naTGBoaora2Fl9fX1avXk1+fj4HDx4kMzNTiKK0Wi2nTp3CxcWF0NBQbty4wQsvvIBMJhO99u7u\nbjH4BDNHxsLCQiwtLQUvYWpqisnJSaysrMSLfmhoiOjoaIaHh0lOTqa5uZmenh6OHDmCwWAgIiIC\nb29vcQLZtGmTsEGPiYlh165deHh4iEnIJUuWMD09zfz586mpqWH9+vWMjo6SlJQkCEO2trZER0fj\n7e3NwoULOXHiBEqlkrCwMOGXaGVlRWNjo6AZ9ff3Cwr06dOnefLJJxkbGyM6OprLly/j7u7OuXPn\nMDMzw8rKCktLS8bHx4VeYMGCBVy7do38/Hyeeuop8ZS5cOGCaD8WFRXh6+srHH2MRiOJiYm0tLTg\n6OhIY2MjgYGBbNq0ibGxMXHS0Wg0bNmyhejoaGxsbPjiiy+EMlOv1xMSEoK5uTl6vZ6uri58fHxo\nb29n/fr1wigEZq5lpsp9T08P/f39tLe34+LiwoEDB6iqquKBBx5ALpdjaWmJlZUV7e3tnDp1ip6e\nHhYtWkRtbS0WFhY0NzezfPly3n33XTw9PTl//jzt7e1kZGTw+eef8+qrrzIyMiIk3rt372bOnDns\n2rWLtWvXijmH2tpaYTVfWFiIn5+fwLYZjUZsbW35+uuvqampIS4uDo1Gg4uLC0uWLBF+l66uruj1\neuRyOcXFxSxfvlwwJUwnLFPHBmZmd5588kn+8pe/EBMTQ05Ojmgvr127VrhkK5VKgRuoqKigvr6e\n7u5uNm7cSGZmJmFhYYSFhYkp3NuJu2Ki0TSR1tHRwcjIiKiOS5LE1atXiY6O5vPPP8fb25v33nuP\nbdu2ERcXR2hoKHZ2drz88sv88Ic/xNzcnOnpadHiCwoKoq2tjfXr13P16lUSEhLQarVYWFiwevVq\nkpKSOHbsGL6+voJpaDIEGR8fZ/ny5bS2tlJfXy/cgQFhZOrh4YFer2fJkiXCyTg6Opr29nYxXuzr\n6yv4ByqVikOHDpGQkMDJkyfp6uoiJSWFoKAgAOrq6jAYDOh0OtauXYunpydhYWHCdt1gMLBx40aO\nHDnCihUrxORhdnY2jo6OODo6snnzZqqqqmhqaqKtrY177rmH1tZWPD09+fLLL9mwYQPu7u74+/uj\n0WiwsrJi48aN2Nvb4+bmhre3N1ZWVqhUKnE8b2lpEawCb29vAXVNSEjA0dGR8fFxUSzOy8sTdCqT\nSUthYSHffPONUBF6e3vj6ekpnqh+fn5iGEitVpOcnExNTY2QWmu1WlpbW3Fzc8PPz48PPviAkJAQ\ntm/fTk1NDS0tLSQnJ5Obm0tzczPW1tY4OzszOjoqWnHXrl3jpZdeYs+ePRgMBmGvbhofn56eJjk5\nWdRSOjs7kclkggZWXl4udBvZ2dnCfXrevHn89re/Fepak3pybGyMEydO4OzsjF6vF+PjVVVV2Nra\nsmfPHjZu3EhbW5ugUV24cAE/Pz+R79dff82KFStwdXWlra2Nvr4+xsbGBExnbGwMb29vIaIqLy9H\no9GIeRBTofnKlSvCIOh24q5gSb7zzjtvVFdXs2nTJsrKyli6dClmZmaMjIwQHBzMpUuXRGExPT0d\nhUJBamoqX331lfBPCAwMpKCggNraWtasWcO+ffvw9/dHJpOhUqnw8PDgj3/8IykpKYI0lJycLMwv\nTD32pqYm1Go10dHRfPrpp0RGRopJygsXLnDt2jVSU1PZtGmTsKU3nUJMUtiBgQEuXbqEv78/Gzdu\npL6+Xgz+LF26VHAQg4ODOXDgAH19fRQWFgqSUnJyMmq1mtzcXCRJEh6NPT09HDt2TIzR1tfXY2lp\nyaVLl2hra2NwcJCRkRE8PDyor68nNTWVgYEBmpqahJW56elbUVHB6OgoIyMjAqsnl8t5++23xQ+o\nSSlowrQvX75cHE1NtvWpqalUVlaK09e8efPIzs5mamqK8vJyrl+/LtgDSUlJwvNAr9dTVlZGcHAw\nvb29ODg4iEKZSSZ/4sQJLCwsBIpdpVIREBBAVFQUAwMD4spmYoWOjY2xcuVK4YFgMBjQarWcO3cO\nf39/VCqVcJW2trYmMjKSwMBAuru7sba25vDhw7i7uxMdHc3ExAQ7d+5kamqK69evU1NTI5Dva9eu\nxczMjD179jBnzhzmzp3LuXPnGB0dRa1W873vfY93332XiIgIWlpaWLt2LTY2Nly7do3AwEAaGhoE\nfHZwcJCOjg58fHywsbEhNDSUvXv38uGHHzI2NiZAPyYfCb1ez8jIiJi1sLa2pr29nampKSGRHhoa\nwtzcHBsbG3x9fbG3t+f8+fMUFxffFkty1s15Nmbj/53453FzDg4O5te//jUymUxU5h0dHVGr1cTF\nxYmndEdHBw899JCo5N57771CWJSZmSkQai4uLnz/+9/nxz/+MW5ubshkMiwsLBgdHWXr1q3s3r1b\nGGEsX76c0dFRwZ1UKpWUlJQwODgofO5u3LiBr68v+fn5wjPSzs5OMC0jIyPx9PQkPDycxsZGnJyc\n6O7uZnR0VHwNU5Xe9JRdvny5MFKJj4/n+eef509/+hMymYzu7m4sLCzo6upi06ZNNDU1cfXqVQIC\nAsTIdH9/P5IkoVQqOXLkCBYWFgQFBQmNQnd3t5hXGB8fFxN0ixYtEurGmpoaUUB0cHDA19cXPz8/\nenp6OHPmDJaWlnh4eDA1NSXakJ2dnZSUlPDEE0+Iv2swGMRwllarFdLskJAQ2tvbsbOzE0o/k72c\nqT3n4OBAY2Mjtra2vPXWW3z66acEBgYyMDDAxYsXiYuLo7y8nBUrVnD48GH0ej3x8fFUVVUxb948\nzpw5w44dO2hra6OtrY3w8HBsbW1RqVQCbPvWW2/x3nvvERISwqlTpwgMDGRqagpbW1u++OILHnjg\nAbq7u+nq6iI0NJSuri4WLFjA8ePH8fPzIzo6mtLSUuzt7YWvgrOzM6+99hq//e1vUSqVQn9QXV0t\nZNSPPPII+/fvZ+fOnYyOjhIaGsprr73GG2+8wc9+9jO+//3vYzQaBTzIdG1buXIlTz31FDk5OURH\nR7N27VoKCgrE0JNJPwMQGxtLe3s7Wq2WwcFBXF1dqaio4N/+7d84ceIE/f39tLS08OSTTwo84T+K\nu8JPwXQ3cnNzEz8UHR0djI+P097eTk1NDQqFgh07dohJt6SkJOrr61EoFJw7dw53d3fi4+OFqSgg\nHG6Dg4NZtWqVcK8xTcSlpqbS2Ngojt1TU1O0tbWxefNmfvCDH1BVVSUANCdPnhQmK+3t7YLDuHXr\nVm7evIm9vT3t7e0UFxeTn59PWFgYg4ODaLVaHnvsMdra2mhsbGTlypXY29tTWVlJQEAAO3fu5PDh\nw8DMUJRarWbu3LlERUUxPT3NgQMHcHJywsPDg08//RQbGxsxHlxeXk5VVRVOTk6CuSmXy4VxpyRJ\nohUL8Oqrr9La2oqlpSUKhQJJkkhOTmb+/PkolUpxb9Xr9axfv57JyUlaW1tJSkoiLCyM6elpampq\niI+PF8i7wcFBbty4QUhIiPA+MPXoJyYm6OzsxMHBAUdHRyYmJujv7+fIkSOUlpai0+mYP38+kZGR\ngmD01VdfCV+N+vp6SktLee2115DJZKxcuZLQ0FAsLCzo6+sTqDalUsnq1atRKBRCO/L++++jUCiE\nDmbFihVUV1ejVquZP38+Fy9epLi4mCeeeAKj0ci9997L4sWLaW1tpaKiguDgYLZt20Zqairu7u6Y\nmZmxYMEChoeH2X4osAIAACAASURBVLhxo+j7BwcHCz8FkzTdz8+Pe++9F3d3dx544AFiYmKIiYlB\np9Px+OOP09jYyLvvvsuCBQvw9/cXo/Y9PT2i4GplZUVaWpqQV69YsQJLS0vc3NwoKipi7dq1TE9P\nMzo6KuTZkZGRAiC8f/9+mpqaUCgUxMTE8D8pE9wVNYV33333jbi4OJRKpagjVFRUiKe4l5eXIBF5\ne3t/q+JrYWHBtm3bGBsbw2AwEB4ejre3N/v27WPx4sWCwGRqTWq1WubOnYvBYKC/v5++vj4GBgYw\nGo3iyWu684eFheHt7S164ybnoeTkZCG6cXV1JTw8nPLyci5dusTzzz9PfHw8PT091NTUcPHiRZ54\n4glxYnF0dBRuzhcuXODjjz9m+/btZGZmCphpeXk5Y2NjJCUlCTOWpKQk0b2IjIykr6+PyclJbGxs\nWLNmDVVVVQJVr9frUSqVghPh5ORESUkJERER+Pv7U1hYiJWVFUuXLsXHxwdvb2+qqqro7u4mISEB\nKysrZDIZ0dHRdHZ2cvbsWQGFffnll1GpVBQVFRESEkJjYyNyuRylUkl9fb3wv5iamhIIM51Oh4eH\nBxs3bkSpVGJnZ0dYWBjm5ua0tbUJivMnn3zChg0bxPSnyd7cpKJUq9XU1tYK1++uri56e3sZGxsj\nKipKqGX1ej0PPfQQ/v7+Yhx+wYIFJCcnI5fLqa2tJTExUSDfdDodbW1tKBQK7O3teeqpp5ienkah\nUHD48GG6urqYN28eGo2GyspKDAYDZWVl5OXlERAQgLW1tTjZurm5CWbmX1sAFhQUEBERQXd3N/X1\n9Rw/fpyxsTGamprE3d/FxYXa2lpyc3MJCAhAkiTq6+tZvXo158+fF6i46elpJEkiOjpaiP3y8/Np\nbGzEzc0NCwsL0XX64osvWLx4MUqlksuXL99WTeGuOCnY2dlx4cIFzM3NRb/VxsaGgoICVqxYgbe3\nN5s3bxaDRIsWLWJ6epq4uDg+/PBD6urqUCqV1NbWkpWVJQZL7rvvPoqKipiYmBDH2g0bNrB3717h\nSxgTEyOcpLu6uggICKCzsxNra2sCAgJQq9XY2Njg6OgoDDZN/EdPT0/a2trEN+f5559Hr9eTnZ1N\nb28vaWlp/PznP6e3t5eKigosLCwoKytjxYoV2NvbY2Vlxfbt28UYb1dXFxMTE9ja2uLt7c2NGzcE\nLbmtrU2MY586dYqKigocHBzEFOTGjRsFIu+hhx4SG6HpKH3ffffR1tYmPAZMBdHu7m6qq6s5f/48\nrq6uNDc3C1elXbt2sWfPHr755htqa2t56623BF06MjKS0dFRoqKisLOzo7u7GysrK55++mmio6NJ\nTExk6dKleHp6kpqaiq2tLSUlJRQWFqLX6wXbMyoqis7OTgGDlcvlfPXVVzg4OHDt2jViY2M5deoU\nOp2OFStWsHr1aurq6sjNzRVAFZN1/fLlyxkcHGTJkiWcOXOG8+fPi1Ojt7c3+/fvFxoJU6u5tLQU\nc3NzYcBq+lkyMzNjYGCArVu3snr1agoLCxkfH+eJJ56gvb1ddIxMupnw8HASExPFBmwC95qMVJOS\nkoQ+x3SyHBoa4uzZs8yfP19cD02Tktu2bcPPz4+4uDgKCgoIDw9nbGyMqqoqampqBILAYDCgUqlE\n4VKpVDJ//nzUajX5+fnC8Mf0795O3BUnhQ8++OCNxx9/HBcXF/bs2UNNTQ0PP/wwTU1NODo6Ul1d\nze7du4Uq8J133hGedOvWrWPPnj188MEHwhVHLpdz+PBhzM3NSU5OpqGhgd7eXuLj42loaBC95JUr\nVwqce2hoKI2NjTQ3N7Ns2TIcHBxEX9kEXHV2dubzzz8nIyODyclJ0tPTaW5upra2FldXV27evCkU\njFqtlvnz59Pf34+Pjw+urq5kZGSwYcMG/vKXv4hWa3NzM25ubly/fp3/+I//oKqqig0bNnD58mU0\nGo0watXr9aJqrdPp8PHxISgoiKqqKuRyuegqDA8PU1hYCMy8wJydnamvrxfaEFN9Ij4+HoVCgU6n\nE7QquVzOmTNnUCgUnD17FmdnZ5YsWYKbmxu9vb14eXmJtTMh3YeGhnBzcxOSahMBaXJyUkxCjo6O\nCqCsyVFqcnISR0dHzp07x/Lly9Hr9Zw+fZotW7ZgbW1NWloa586dIywsjIceeoicnBxcXV2ZmJjg\nRz/6Ef7+/qxdu5bly5eTnJyMra0tLS0t3HvvvWIk2dbWlmXLlvH73/8eFxcXEhMTxUyFt7c3ubm5\nos7T0tIiTFo8PT05evSo4HqYNpOQkBD2799PSEgI+fn5lJeXY2ZmRmxsrNC1qFQqdu3axdmzZ1mw\nYAFhYWH09/fT1tZGTU0N5eXlrF69ms7OTuF1MT4+TkZGhhjRPnr0KAEBAbS2trJ161YuX76MWq3G\n1tYWS0tL0tLSGBsbo76+nry8PObNm0dYWBidnZ1YWVnx61//mqVLlwptjqurK1ZWVly5cuWf56Qg\nk8mIjIxEkiR27tyJwWDgD3/4A1VVVfzyl7+kpKREUJXy8vJISkpi7ty5eHl5cerUKfH0NR2dTP4E\nTz/9tACThoaGEhERQW9vL3q9nvb2dmpra6moqBBOO76+vtjY2FBUVCTENKdPnxZ3TNN8vre3N11d\nXYyNjWE0GoU0+emnn+bVV19l9erVPPLII6JOIpPJKCkp4dSpUzQ3N4s26cTEhFDuAUJ2fOnSJSwt\nLQVWXK1WCzv0mzdvcvjwYTQaDefPn8fd3R0rKyvMzMwoKSmhvb2d8PBwhoeH6ezspKioiOjoaAYH\nB1m1ahVmZmaCPHX69GkuXLhAb28vx48fJz8/n8TERLRaLTk5OTg6OuLs7CzagSMjIzQ1NaHT6dBo\nNGg0Gtzc3Pjggw8EjCU+Ph6ZTCYKcqYinEqlEoq/pKQkHB0d2bt3L1qtVmwwMOM0nZSURHNzMz/6\n0Y9Qq9Vcu3aNhQsX4ubmxvj4OH/605/EFGJTUxOSJAnT06+//pqCggImJiaws7MTYjOTm7Wvry+2\ntrbMmTOHlStX4uXlhZ2dHc8//7xwos7NzaW3t5e5c+fy0UcfoVAo8PPzw8zMjOjoaJycnER9KTAw\nkEOHDuHp6cnExAR79+7l1Vdf5Qc/+AFz584Vhe++vj7s7e1JSEjA1dWVhx9+mJ6eHiHHNrlVmQbv\nLC0tee211wS564033hCTjaaTikl7c/HiRd5++238/PwICwvj5ZdfJjY2lm3btuHv74+dnZ0A+N7W\n6/H/zsv6/1+YbMAdHR1xcXEhKiqK559/ntDQUNatW0diYiLp6emsWbOGyMhIli5dipWVFXv37qWh\noUHgut3d3SkvLxcGGBUVFZiZmWE0Guno6BD+ByYzlJCQEDEm6+Pjw9jYGPPnzxf+gHq9XmgULl++\nLObzTcapJ06cECRge3t7uru7MTc3F379pjtjTU2NuBa88sorwhdCp9NhbW0tBmGCg4OZO3eu0O/D\nDErOzMyMU6dO4eDgIJ6eQUFB2NnZMTY2xqFDh5DL5SQnJzM8PCwQ95Ik4eHhQVlZGZWVlXz22WcE\nBQVRWlqKRqMhICAAR0dHIiIiWLt2LWFhYUxNTVFVVUVwcDAAf/nLX7C1tSU+Ph6j0ShYmZ6ensL0\nY82aNUiShKWlJSqVSiDnurq6KCgoEBuSSqViwYIF1NfXc/78eTw8PJDJZGI2BGbszc6fPw/MaEEy\nMjLQ6XRUVlZy7tw53NzchE1cZmYmCoWC06dPixdLU1MTkZGRFBQUoNVq+d3vfgeAl5cXc+bMobe3\nF6VSSUVFBYcOHWJwcFCoIpuamqisrBQTrZaWlixfvpz09HSWLl0qPA1GR0fF+hiNRjIyMpg3b57A\nE/b396PVajly5Ah9fX0UFRVRWVnJ4cOHxaTr9PQ0vb29bNu2TfA85HK5mJTctGkTZ8+eFZqfjz76\nSFz5kpOThbmxyQz3xo0beHl5sX79ekJDQ6moqOC9997jyJEjDA4OCmn47cRdcX34/e9//8bGjRu5\nceMGQ0NDJCUl4ezsjIWFBYcPHyYqKkoYoZg6B0uWLOH06dOsXr2aLVu2IJPJCA0N5f7776elpYXd\nu3eTkpKCj48P2dnZeHh4cPXqVUZHRzl//jyxsbHY2NhgY2ODi4uL8Db08/MT8Fi5XM6cOXP4wx/+\nwLJly8jPzyc3N1e4Hrm5ubFo0SKWLFlCV1cXGo2Ga9eu8dVXX5Gfn8+RI0eAmaeUu7s7Go0GhULB\ngw8+SEdHB5aWlhgMBoqLi6mvr6e/vx8LCws0Gg0ZGRlIksT4+Djp6ekkJCQwPDzM1NQUTzzxhNhA\ne3p6CA8PF1ZgPT09wqF4cnKSpqYmrKysGBoaIjw8XDgvm+hRpu4LIDYIE9Jeq9WSkpIiXJy1Wi2l\npaUsW7YMR0dH6urqWLt2rRgaGhgYoLCwUFTaTbLqlpYW2tvbKS0tJScnRyhRZTIZUVFRGI1Gpqen\nycrK4uGHHxbOxO+88w5KpVJMA+bk5ODh4YGtrS1ZWVlIksT09LQAAf3ud79jbGxMwG0PHz7MCy+8\nwEcffcSPfvQjLl68iFKppLS0lM7OTt588008PT0pKipi7969yOVynJycMBqNuLm58bvf/Q5bW1tC\nQ0MF0Hh4eJiamhrUajVXr15lw4YNwrV5cHAQtVotvCRMloIODg7s2bOH6Ohotm7dSn19vfgZBvjZ\nz36GQqEgNzeX+vp6Kisr6e7uxt/fn9LSUqanp5HL5fj7+1NeXs5TTz1FcXExBoOBRx55hPT0dF57\n7TVycnKwt7fn0KFDwoPBYDAI56cbN27c1vXhrphTsLGx4eLFiwwNDaFQKLCxseHo0aNERUWJDoGV\nlRXr16/n3Xff5cUXX6ShoYElS5aIhbOwsODAgQMsX75cyGUnJiawt7fH0tKSgIAArly5ImSn+fn5\nODo6kp6ezrlz5wR/8q+5CDqdjvLyciIiIujq6mLNmjX8+7//OyqVSuj6dTodJSUlYkKxt7eXwMBA\nXF1dGRgYYO7cuUxOTqJUKmlvb8ff3x97e3th915VVcXChQs5e/Ys27dvZ3h4GGtraywsLMQ15qWX\nXuKLL74gLi4OR0dHSktLUavVPPfcc+Tm5rJhwwaKi4uRyWTMnTuX4OBgTp48KX4/NTVFVFQUeXl5\nzJ07F29vb5qamkhNTSU5OZmLFy+iUqkICgrC29sbJycnNm3axO7du9mwYQMqlYr29nZyc3ORy+Vk\nZ2ezefNmRkZG6OjowNbWVrAFWltb6ejoICAgQIBLJicnhTBpamoKc3NzVq1aJcDCVlZWom168OBB\nLCws2LRpE/fffz8w47GgVqvx8vKiu7ubq1evCuCvs7OzgAC7uLhw9epVnn32Wa5cucIjjzwiOj3F\nxcWiuGy6zmi1WsLCwqitraW9vR1HR0cee+wxjh49ipOTk7A1M/EZN27cKMRsJuv43t5eurq6UKvV\nhIWFicq/yfzm9ddfJyAggF/84hdibqO+vh5JkpicnOT48eO4uLhgZ2cHzFxNjxw5QldXFzDjq1Be\nXo5cLufixYu4u7tz7NgxobbUaDRYW1vj5OREXV0dGo1GCAoHBgYwGAziZ/V2467YFPR6PQ888ADZ\n2dlUVlZy4MABzM3NUavVTE1NkZCQQFtbGzY2NsyZM4fLly8zNjaGubk5dXV1JCQkCNWeq6ur8KOb\nN28e1dXVyOVy7O3tefbZZzl69ChNTU0sXLhQAEZMlXpTUbOrq4vc3FwGBweZP3++kOz29fUB8Prr\nr7N582YMBgN+fn7cuHGD9PR0nJycGBgYoLi4GGtrayIiIgSIdWpqCo1GQ3V1NY8//jgWFhZUVFTQ\n0NDA9PQ0MGMX/sgjj3D+/HkKCwuJiopCq9Vy8+ZNrK2tyc7OprS0FF9fX4xGIxUVFcKqzuTWLJfL\n+fOf/8yzzz4r1slUEDSxDPfu3UtQUBBJSUkYDAZ27NhBZ2cnSqWSTz/9lPr6ejw8PFCpVOKo+/XX\nX5OXl0d8fDzh4eHk5+cTGhpKZmYmzs7OTE5OYjAYcHBwoK+vD4PBIKhNqamplJWVYWZmJj4vMzOT\n+vp60tLSaG1tFVc+f39/kpOTBUIvKyuLhoYGtm/fLr4OzBCfXF1dhXz48OHDGAwGnnzySdra2igu\nLiYgIICGhgYAGhsbGRsbY/Xq1Zw5c0bIus+dO0dFRQVLly5l586dODk5iVNIfHw8cXFx4tRTUFBA\nQkICAwMDwjK9ubmZxx9/XDzlTSarpp/HxYsX09HRwcTEBHv27GF4eJje3l7KyspEEdJkA2hjY8PZ\ns2eBGTk2wIEDBxgfH+fBBx+kv7+fyspK3NzcSE1NpaWlRXgymOZ5VCoVFy9e5JVXXhFwHjs7O2Fi\ncztxV2wKAL/+9a955ZVXxBF669atZGVlERUVhbOzM87OzgKyWldXh7e3NzU1NYSEhNDX1yc4DWVl\nZYI2VFNTw6pVqzh9+rQAwSoUCtasWcPIyAj+/v4oFApiY2NFG+63v/0tP/vZzxgfH+fdd9/l/fff\n58UXX8TR0VG0dT7++GMOHTokBCwmrDjASy+9hKenJ0eOHMHb25ukpCQOHjxITEwMkZGRKBQKPvzw\nQxYsWCCYhCa02f3338+lS5dYsmQJjY2NaLVatm/fzkcffYSvry/z588nLS2N48ePs2LFCtRqtXha\nt7S0CGPT119/nfHxcXp6eujq6iI5OVlU3CsrK3F2dmZiYoILFy7g4OBAfHw8xcXFeHl54evrS2Rk\nJPHx8UxMTJCdnU1XVxerVq2isbGRiIgI1Gq1sIW/7777uHnzJnl5eQwPD6NWq3F2dmbRokXk5eWx\natUqrKysBLTH9CIOCAjAx8cHLy8vpqam8PT0BGYcrU3alP/8z//E29ube+65h5ycHFauXCncqIaG\nhhgYGCA9PZ3JyUnhZWnyOXzllVcEZxMgPT2dlpYWFAoFRqMRpVLJ559/Ltb5lVdewd/fX+DX7r//\nfrRaLf39/bS2tlJVVYWfnx+VlZVkZGRw8uRJYOZJ/uabb7JlyxZhtXbkyBESExPZuHEjo6OjZGZm\nYmFhQXJyMjdv3kSj0Yg2bWJiIkqlkqamJvz9/XnkkUe4dOkS/f395OfnI5PJ8Pb2pqWlheDgYJYt\nW4anpyfHjx+nq6sLT09PTp8+jVKp5Be/+AXFxcU4OjoKF/Tk5GQ6OjrYuHGjkGX/o7gragrvv//+\nG8uWLcPZ2Vl4A5qgrZcuXWLdunXU1taSk5MjHHPMzMyYN28excXF+Pj4UFFRQXp6Om5ubhw+fJgr\nV66wbt06Lly4IApqOp1OgEpyc3OJjo4W4piQkBA6OztFJ8DR0ZGhoSExfhoWFsb4+DhHjx4lLS2N\nhQsXiqN8aWkp7e3thIWFoVKpsLOzQ5IkvLy8OH78OJ6engwNDXHw4EHa29sF3j0vL098bk5Ojugk\naDQahoeHiY2Npbq6Gh8fH+HpmJOTI8ChZWVl2NnZYWtrS11dHW5ubsydO5fCwkLy8vK4ceMGjzzy\niMh9bGyMpUuXUlFRQUxMDAaDgaioKHp7ewWUxMXFheLiYtauXStmF0x1iLS0NPR6Pd3d3QQHB9Pf\n38/Y2Bh2dnYYjUauXbtGVFQUc+bMYWxsjOTkZLy8vLC0tGRoaIjp6WkBiTVRuE1TdzKZjMOHD5OU\nlERmZiZ6vZ7Y2FjmzZvHtWvXiI+P59NPP0WtVgvVqelIfe3aNQwGA0lJSXz55ZdibmJgYIDHHnuM\niYkJli5diouLCzdu3BBU8fj4eFJSUvDz82NiYoKRkRH6+/uJjY1Fp9Pxhz/8QRToFi5cSEFBAdPT\n0xQUFODr68vRo0d5++236e7uZnBwkMcff5z29nb0ej0RERFMT0+jUqlQKpVERUWxdOlS+vv7+eCD\nD4Q798mTJ4mMjMTd3Z3Kykqqq6spLy8nJiYGV1dXlEolg4ODBAcHY25ujru7O6OjoyxatEh04AYG\nBoRtnImLcvDgQebPny+uSFZWVmRnZ//ztCRNs/em/nVXVxc9PT0MDg5SWFjIlStX+OUvf0lKSgrF\nxcUcPHiQqqoqVCoVKpWK6Ohoent7cXJyori4mPXr1wMz7a1169YxMjLC4OAgLS0tDAwM0NHRIUZy\nu7u70Wq1HDx4kD/+8Y+Mj4+TmppKYGAgGo2G+fPn09nZiSRJAuddVVVFR0eHmMYbHh7Gy8uLffv2\nYWdnx7Jly1i/fj3j4+N8//vf5+rVq/j7+3PPPfdQW1sr/AtNRa9Dhw4BMxVnZ2dnzM3NycjIoK+v\njz/+8Y80NTVhb29PSEgI3d3d9Pf3Y21tTWFhIZIkodPpsLKyIiAggKysLNzd3fHx8WHhwoXo9Xqu\nXbvGzZs3xQSiSQpsUgP29PQwMTFBR0cHU1NTxMbG8pvf/IaioiJ0Oh22trYUFxfzxRdfCPs2MzMz\nEhMTmZqaorGxEQcHB4KDgwWqbs6cOeh0Opqamti3bx+tra3irj01NUVJSQl6vR6NRkN5ebkwpvHx\n8cHT01NMMwYEBLB+/XrRVisqKmJsbIyioiLs7e0ZHBwUMxoNDQ385Cc/Ed2RgoIC4bZs+r6b+J6N\njY2kpqYSHBwsbOji4uLEpOO1a9c4fvw4RUVFSJJET08PSqWSyclJIiIiBLRFq9USFBSEg4MDxcXF\nnD9/njNnzogrxuDgILW1tcLsVZIk9u7di7+/Pz4+PiQmJlJUVERERASRkZG8+OKLwMyMSUBAAGFh\nYcLTwVSzsbKyoqOjg/b2dk6ePClYnhYWFsTHxwvnL3Nzc2QyGTqdjsTEv2uo/q24K04Kv/jFL96I\niIhALpdz/fp1YmJiiIiIwNXVlc2bN4s7s5OTE15eXrz44ossW7aMixcvkpaWRmdnJ88884zQ/Wdm\nZpKXl8fu3bvJzs4mNTWVrq4u/Pz8CAwMpK+vj3nz5om+ular5fLly0JTIUkSTU1NBAYGiieFo6Mj\nBw8epLCwEFdXVxwdHfnyyy959tlnxRPe2dkZpVKJ0WikqqqKt956S3zTw8LCkCSJtLQ0RkZGqKqq\nYnJyEhcXF15++WU+++wzhoeHMRqNmJub4+zsjFqtRiaT8a//+q/Mnz+fxsZGtm3bJiYpR0ZGaG5u\nFnwEV1dXpqenSUpKIjY2FqVSyf79+6mpqcHHxwe5XI6NjQ3u7u7MnTuXhoYGzp49S01NDU1NTWi1\nWjG8Y+qlW1lZUVJSQmZmJitWrGDhwoUcOnSI1NRUhoaGmJqaEkatSUlJDA8PExQUxMjIiCBCm+oa\nra2taDQaCgoKkMlkxMbGIpfLycjIYHR0lC+++IKEhASio6OZmprC19eXzMxMrl69yvPPP4+rqys+\nPj7U19eTnZ3N448/jqenJ0ajUegoRkdHhfmuVqvF3d2dw4cPs337dmprawkICMDLy4ulS5cSGBhI\nUVER7777Lg8//DCtra0cPXpU2NStX79emNRWV1fzzDPPMDU1RUFBAdbW1mRlZTE5OSm6Y6tXr6a0\ntJR77rlHmMmcOnWKzz//XHQqzMzMMDc3F6e+yMhI3Nzc8PDwEBtHdnY2W7dupbKyEjs7O9auXYta\nreb111+noKBAdMc++eQT0SafmprC2dmZS5cuERISwsaNGzE3N0ej0YhOyNWrV/95TgpWVlY89dRT\nFBUVIZfLiYiIoLm5mebmZmJjYwkMDGTz5s1MTk4SExODRqMRlXxvb29kMhm9vb2CkWgaQf3888+5\n//77GRkZ4fr169y8eZPi4mLhVhQdHc3k5CRz585lzpw5Ap5SXl6OJElitzUajWImACAtLQ1PT0+W\nL1/O9evXxdShSXk4PDyMi4sLKpWK6upqbGxshFGLk5MTq1evJiQkRFjCmXwETRbtJuPNxsZGoqKi\nqK6u5s033+Sdd94R8/8KhYLR0VHGx8c5cOAAlpaWDAwMMDo6yuXLl4WqNCUlhcDAQDZs2EBLSwtX\nrlxBrVYjSZJgMZp0CzKZDKPRKI7HRqNREKTvvfdesrOzOXnyJKGhoeL04uXlhY2NjcDHRUREUFVV\nxfT0NFqtFrVazfj4ODqdju7ubuFElJKSIvDwZWVluLm5ATNGqENDQ4SFhTEyMiKujqGhoQJEOzIy\ngk6no7GxkfPnzws4blZWFpcuXRJtaxPfExBF4EOHDuHm5sbg4CB1dXXk5OSwY8cOent76enpES+i\niYkJ1Go1ZmZmyGQyQkJCKCwsRKfTIZfLhQuXaRy/qamJQ4cO8dxzz9HQ0CDIVnV1dcTGxrJ8+XJx\n8lKpVBQWFjI0NMScOXMEN9VgMIgCpsloyMbGhomJCRQKBUFBQcLjor+/H09PT7y8vHB1dWXevHlM\nTk4SFBTE8PAwxcXFaLValixZInK53bgrNgVJkjh79iwZGRn4+/tz+fJlCgsLxbE+Pj4eDw8P0dZr\nbW3lxo0bHD58mLffflvc3cvKylAqlWJhJycnxRSiySbMBCQdGxsTJp89PT34+flhYWFBZGQkDg4O\neHt7i7ZPcnIynZ2d7Ny5E5hxHJbJZKSkpBAZGUlJSQlubm7Y29sLPPzg4CDHjh0T7c6JiQlqamo4\ne/Ys69ev59y5c4yNjQlHZ0DM15usz9PS0tDpdExNTTE6OsqCBQs4d+4cU1NTwh4cZuSzCQkJLFq0\nCH9/fyRJEtLrwsJCHB0duXDhAt988w1arZaRkRGuXr1KbGwsO3bswMLCgueee46UlBTS0tKwtLTE\nzMwMvV5PZmYm4eHhnDp1ivDwcFxcXPDy8iIwMJCsrCwiIiJQKpWism9hYYFcLsfLy4uxsTEh5jLl\n6+npSXt7O6Ojo2RlZWFnZ0dfXx/FxcUAgvo1NTUlMPcLFizg5z//OR9++CFLliwhPDycrVu3YmFh\nQXp6Ov7+/lhaWgqIzsGDB5mensZgMAhNRX5+PsnJyezYsYO9e/fi7e3NsWPHBGYOZk6DJnl0VVWV\nmBGxt7fHRoHVhwAADzBJREFU3t5eiNBMBkAws9msWrWKiYkJPD09hTnw+Pg4MTExpKenEx4ejq+v\nr3jAffbZZ/z0pz/Fw8ND8EE9PT0pLy8XkJmAgAAqKirQ6/V4enqi0+kIDw/H09NTSL9XrFjByMgI\nGRkZGAwGLCwsUCgUqFQq3Nzc6OjoICcnh6ysLPGgvJ24K64P77///hsuLi5ERkZiNBpxcnLigQce\noKGhgV27dmFmZiYq452dncTFxREWFkZVVRVubm60t7djaWlJUFCQKJ7t2rWLF154AXt7e5ydnblw\n4QIGgwH/WxDRs2fPCiaEk5OTwJCbhEomFkJ/f7+w7BodHeXQoUNs2bJF2LctW7YMmLFoW7t2LRkZ\nGXh7e6PT6QRDsbS0VNQ7TEIqU+3AVBGuqKgQ03IymUw8FaytrRkcHGTOnDlMTk6Kr9fU1IStrS0K\nhYKFCxcKbN3FixeFTNnf35+WlhbMzMy4fv26uFdqtVrGx8dJSUkhPz+flJQU7rnnHiwsLIRf4smT\nJzl69CjR0dHIZDLCwsIETdlEOV69ejUlJSUUFBSIeQWTM5NKpcLd3V2wC44dO0ZzczNTU1NYWFhQ\nWloqiqw6nY709HQ+/vhjdu/eLUbBv/rqKyYnJ8XTVKPR0NHRgb29PevWrSMzM5PJyUmKi4tpbm7G\nysqKffv2sW7dOiYmJgS786OPPmLVqlVMTk4SHR2NVqsVRHHT9dPa2pqGhgZKSkpYs2YNBoOBRx99\nVFgCxsXFUV1djZmZGS4uLvT395Obm8vWrVuZnp4mPT2dy5cvU1RUhJeXF8XFxZw9e1bUHtatW0dN\nTQ3Hjh1jYmKCuLg4zM3NaW9vp729nczMTAwGAwsXLuTjjz8Wg10KhUIoTltbW/Hz88PV1ZUlS5aI\nq/D+/fs5evQoixcvpqqqij/96U989tlnBAYGotVqhS6ksrLyn+f6YLL60mq16PV6LCwsuHr1Ktev\nX2fnzp2sW7cODw8PxsfH8fLy4r333qOnp4eGhga0Wq1oQba1tREYGChOB0VFRTQ1NXHixAm2bt1K\nWFiYkApv3LgRhUKBwWDAzMyMqqoqMVAyPT2NTqfj4MGD3zJsLSgoAGYYlTY2NgwNDbFv3z6WLl3K\n8ePHeeeddzh37hxZWVnk5eUJfqLpm5KQkEBjY6NgO46OjvLTn/5UtDPHx8eFs/L69etxcnLC3t4e\ntVqNn58fXV1dtLW1YW5uzo0bN3B0dCQuLo6EhAQqKyv59NNP2b59Oz4+PoL74ObmhpOTE8uXLxeY\n9pSUFNasWUNJSQm+vr7k5uZSXV3N0aNHOXPmDG1tbahUKp5//nnMzMyIiooSlu0dHR1CGdnV1SX6\n/zBD5DKd5qqrq/Hy8sLf3188VRUKBV5eXigUCnbu3El8fDz+t6Cqra2tAKIVZ/p8uVwuipFZWVk4\nODiQkZEhWsotLS2kpKQQEhLCyZMnCQoKoqysTMyVmL53/f39jIyMUFZWRkZGBnl5edja2goupolQ\nbnrapqen09nZyYkTJ4Qrsq+vr0ABmDZYE5vExcWFNWvWkJqaKsxOfv7zn4tT5+DgIE5OTiQmJgoG\niI2NDeXl5cJ/MiPj/7CYt2/fztTUlJB2m9q2U1NTtLe3iynUhIQEYmJi+PGPfyy0Hjt27CAjI0NM\n0nZ3d//zFRp/85vfvDFnzhxiY2MpLy8XjrUmJyOTp35bWxv79+8XBpulpaXExsbi4eEhThMVFRX4\n+PjwySef8Morr2BnZ4eXlxejo6O4u7vj4OAgHJqcnZ3RarUCh27SKjQ0NJCUlMT58+cFRMakzThz\n5gxbtmyhubkZCwsLPD092bdvHzqdTvSgTUpJE+E5Li5OwF0CAwORy+WcOHGCK1euCLXn6dOniY6O\nRqFQiNkD0//ZZGu/YcMGQYDW6/XY2NgQExPD3r17RVvN1II1oeJNJrQeHh5i4Gn16tW0trZibW3N\nN998Q11dHR4eHqjVajQaDa2trWKW39fXF2tra7q7u4Ufg0ajISoqikuXLpGSkiIMVU2mMlqtFktL\nSyE8GxgYIDs7m4GBAezt7UV71DQZGh0dTWNjI8ePH8fGxoawsDBGR0fp6uoiODgYmUzG4sWL0ev1\nQrvQ2dnJ9PQ0ixYtEkwJk3v2woUL6ezsxM7Ojv7+fg4dOsT3vvc9GhoaUCqV4hTm7u7O9evXqa2t\n5ZlnnqGsrAyZTCZgts7Ozuh0Oi5duiROkjKZjMzMTPz9/fn666/ZvXs3169fx87ODplMRkNDA1u2\nbMHe3h69Xs/58+fJzc3l5s2bqFQq7r//fi5evCg8OJ555hnmzp3Lr371K6ampujs7BTj2OPj41RV\nVWFmZkZCQgJyuRxPT09GRkbo7e2ltraW4uJidu7cSX19PTCzkVpaWhIdHU15eTl9fX3ExcWh1WrJ\ny8u7rZPCXbEp7N+//w1ra2tSU1MJCQkRDryNjY0EBQXh6+uLm5sbOp2OtLQ0Lly4QExMjBA6bdu2\nDblcLqbfzMzMOHr0KNu2baOmpkao0ORyuTDulMlkfPTRR6hUKjw9PYX4xLQ7m/rMpuKWqTV06NAh\ngoKCMBqNWFlZERUVxdWrV/Hy8uKNN95gaGhIEKUUCgUFBQUYDAYhjOno6BCS4qeffppPP/2U0NBQ\nLly4IAqY3d3deHl5idHi0tJSXnjhBTEt2dLSQmxsLAUFBQwMDIh5DpOq0WQTPzY2xtDQED09PVhb\nW9PS0oKNjQ2ZmZliinJiYoKEhAQMBgPDw8M4ODgQERFBe3u7cFPu6+sTRdQjR44QFxdHV1cXvr6+\n5OTkiBeZTqcTfMrExEQGBgYEydo0ubd48WK6u7sFUVyr1TI0NMS1a9e4fv06mzdvFnMGq1ator6+\nntraWm7evElGRgaJiYkcOHCArq4urKys0Ov1uLi4UFdXJzbM0NBQYEas5OLiwmeffcZzzz2Hra2t\naIPGxcWRn59PS0sLbm5uqFQqampqMBqNHD9+XEjqTaa/p0+fZs2aNQQGBhIfH8/w8DCnT58WzIWa\nmhoqKipQKpX09/fj4OBAfX29MNl99NFHmTdvnhiJHxoaEowT0xXJxsaGtLQ0du/eLUDBQ0NDDA4O\nCmZHeXk5CxYsIDAwEA8PD0pKSrh27ZoYd7azs0Or1aJQKAQ1TSaTodfrb3tTuCuuDxMTE8JK6siR\nI3R2dtLc3Mzw8DCenp5UVVWRlZWFm5ubeHqtWbOGBQsWsGzZMg4dOkRNTY3YoU3TZkajkYSEBK5f\nvy60DA0NDUxMTHDx4kUee+wx/Pz8RHV7enoaFxcX6uvreeutt5icnGTTpk14eHjQ3NwssHFqtRp/\nf3/Gx8eprq4mPj6eZ555hoqKCqGCCw0NpbKykm+++YYvv/yS48ePs2bNGpRKJb/61a/w8fGhv7+f\n9evXf6tCfuDAAXQ6HaGhoWJjSU5OpqqqirS0NMzNzcUPU319vXgbHx9HqVTi6OhIZWWleIE7OTmJ\nNmFHRwcymYzm5mbq6upoa2tjcnISLy8vtFqtoGQbjUY2b94smAILFixAqVQKP4S6ujqcnZ0JDAxk\n3rx51NXVYWZmho+PD3Z2dlhaWtLS0iIq+nV1dYSGhjI4OMjU1JSoBxUVFWFjY4NKpeLBBx8EYO3a\ntQwODor/s8liLTIyks7OTlJTU/Hx8cHd3Z2HHnoISZLIysoSPz/p6emCEVJQUCBqNpIkERsbS0VF\nhUAHmujMGzdu5Pjx4yxevJjQ0FBBAs/Ly6OzsxMPDw8SEhLw8PAgKyuLmpoaQVwyuUYfO3aMy5cv\no1Kp6O/vJysri/DwcEGNysvL45tvvhEWdePj4zQ2NpKSkoJGo2FwcJDFixeTk5MDwAsvvEBNTQ1K\npZLw8HA2bdpEYGAgsbGxZGZmolQqkcvlwpfTzc1NdLRgxrCnr68POzs7JiYmBM36duJucXPuAYYB\n7T/63O84XLj7coLZvP4ncTfmBHcmLz+j0ej6jz7prtgUACRJKrwd++nvMu7GnGA2r/9J3I05wd2b\nF9wl14fZmI3ZuHtidlOYjdmYjW/F3bQp/MOq6B2IuzEnmM3rfxJ3Y05w9+Z199QUZmM2ZuPuiLvp\npDAbszEbd0Hc8U1BkqRVkiTVSJJUL0nSq3c4lyZJksokSSqRJKnw1seUkiR9I0lS3a1fnb6DPD6T\nJKlbkqTyv/rY38xDkqSf3Fq/GkmSMv77f/V/Jac3JElqv7VeJZIkrfmOc/KRJOmCJEmVkiRVSJL0\nwq2P3+m1+lt53dH1uu0wGo137A0wA9RAIGAJ3AQi72A+TYDLf/nYO8Crt95/Ffj37yCPVGAeUP6P\n8gAib62bFRBwaz3NvqOc3gB+/N987neVkwcw79b7dkDtra99p9fqb+V1R9frdt/u9EkhEag3Go0N\nRqNxAjgIbLjDOf3X2ADsufX+HmDj//YXNBqNlwDdbeaxAThoNBrHjUZjI1DPzLp+Fzn9rfiucuo0\nGo3Ft94fAqoAL+78Wv2tvP5WfCd53W7c6U3BC2j9q9+38fcX7387jMA5SZKKJEn6/q2PuRuNRhOI\nrwtwvzOp/c087vQaPidJUumt64XpmP6d5yRJkj8QB1znLlqr/5IX3CXr9ffiTm8Kd1ssMhqNscBq\n4FlJklL/+g+NM2e9O96uuVvyAP7EzNUvFugE3r0TSUiSZAscBV40Go2Df/1nd3Kt/pu87or1+kdx\npzeFdsDnr37vfetjdySMRmP7rV+7gS+ZOcJpJEnyALj1a/cdSu9v5XHH1tBoNGqMRqPBaDROA5/w\nf46831lOkiRZMPPC+4vRaDx268N3fK3+u7zuhvW6nbjTm0IBECJJUoAkSZbAVuDknUhEkiQbSZLs\nTO8DK4HyW/k8euvTHgVO3In8/k4eJ4GtkiRZSZIUAIQA+d9FQqYX3q24l5n1+s5ykiRJAv4MVBmN\nxt/+1R/d0bX6W3nd6fW67bhTFc6/qryuYaY6qwZev4N5BDJTAb4JVJhyAZyBbKAOOAcov4NcDjBz\nvJxk5n755N/LA3j91vrVAKu/w5w+B8qAUmZ+sD2+45wWMXM1KAVKbr2tuQvW6m/ldUfX63bfZica\nZ2M2ZuNbcaevD7MxG7Nxl8XspjAbszEb34rZTWE2ZmM2vhWzm8JszMZsfCtmN4XZmI3Z+FbMbgqz\nMRuz8a2Y3RRmYzZm41sxuynMxmzMxrfi/wP4kGDUUTp1JwAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1383e38310>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGlwlOeV/v3rRa3e1OpWq7WrtaN9FwJLbBYIxI7BYIMd\nPLE943Ecezxje8ZO6l+xTZZK7Ey8TRLHjuM4gBeCMWB2AQIJEBJoF2pJCO1Lt/ZW7y2p3w/89dTk\nw9T4rXdc43qL6wsl6unu5+m+73Ofc13XOSKfz8c93MM93MMCxP/bN3AP93AP3y3cCwr3cA/38De4\nFxTu4R7u4W9wLyjcwz3cw9/gXlC4h3u4h7/BvaBwD/dwD3+Dby0oiESiMpFI1C4SiW6LRKKXv63P\nuYd7uIf/WYi+DZ+CSCSSAB1AKTAA1AK7fT7frf/xD7uHe7iH/1F8W5lCIXDb5/Pd8fl8HuAzYOu3\n9Fn3cA/38D8I6bf0vpFA/3/6ewBY8l9drFKpfCKRCJ1Oh7+/P16vF5vNhkQiQaVSMTExgVKpRKFQ\noFQqGRsbQ61W4/V68Xg8zM7OEhQUhN1ux9/fH6fTyfDwMFqtFj8/P6RSKS6XC71ej9PpxN/fH5fL\nhcfjEV4nl8uZmppCKpWiVCqxWCwEBgYyNzeHRqNhcnISPz8/hoeHMRqNKBQKpqamCA4Oxmw2o9Pp\ncDgciEQivF4vMzMzxMXFYbfbEYlEiEQiZmZmkMvleL1eVCoVTqcTr9eLWCzGbDYTGRkp3K/NZkMq\nleL1evHz80MulyORSPB4PIhEIubn57HZbKjVaubn55FKpdjtduH1s7OzeL1eZDIZLpcLf39/JBIJ\nUqkUi8WCTCZDJpPh5+eHx+Nhbm4OpVKJ0+lkdnYWlUrF3Nwcs7OziEQiVCoVVqsViUSCWHz3LFl4\nb4/Hg06nw+VyIRaLkUgkwr05HA6USiVutxufz4fNZiM4OBi3241CoUAul2Oz2bDb7UxMTJCYmAjA\n7OwsEokEr9eLw+EQ1sXCfS48o5+fH3Nzc8hkMkQiEVNTUwQEBOB2u5mdnQVgZGSEiIgI/Pz8UKlU\nTE9PMzs7i0ajwWq1EhQUhEwmY3x8nPn5eWQymbCOlEolk5OTBAYG4nA4kMlkAExPTzM+Po7BYMDr\n9aJWq5FKpUxPT+Pv749YLGZ+fh4Aj8eDVCplZmYGvV4v/DZyuRyz2UxgYCA+nw+xWIxKpaK3t5eQ\nkBBcLhdKpRKfz4fH40Eul+NyuVAoFNjtdmGtOp1O5ubmUCgUzM3NIRaLEYlEuFwuJBIJIpEIsVjM\nyMjImM/nM/x3m/fbCgr/LUQi0T8A/wAQHBzMT37yE0JDQ6mrq+Py5cusWrUKvV5PdHQ0169f5777\n7qOzs5OSkhLa2trIzs5m//799PT08C//8i9cu3aNtrY2AgICmJiY4JNPPuHjjz/mF7/4BQ8++CAV\nFRWkp6eTmprK5cuXyc3NpaGhAa/XS2ZmJjdv3iQ6OprY2Fj8/f1RKBS0t7cjFosZGhqioKCA1tZW\n3nnnHQ4ePMjly5fxeDxkZGTQ1tbGzMwMxcXFXL58mbGxMQYHB3n55Zc5ffo0fX19xMfHk5GRQU1N\nDffffz+NjY3Mzc2xd+9eTCYTmzdv5plnnmFychKlUklubi7+/v7cvn2bjIwMGhoaCAkJYWxsjMWL\nF3Pjxg3EYjEulwu5XC68rqenh9jYWADS09Npb29neHgYn8/H2rVruXLlCtHR0fT29mI2mykpKaGl\npYWUlBRu375NWloaXV1d9Pb2EhQURHp6OvX19czOzhIaGopCoeDOnTsYDAZOnTrFAw88wNjYGOHh\n4Rw6dIjs7GxEIhHp6ek0NTUhlUqRy+Xo9Xr6+vp48skneeutt4iKimJiYoLR0VF0Oh1JSUm89NJL\nfPDBB3z11VeEhYWRkZGB2+1mfHwcq9XKRx99xM6dO3nggQc4c+YMiYmJ9PT0sHjxYkZHR8nNzeXf\n//3fKSwsZGxsjLm5OQ4ePMjIyAh79+5ly5YtGAwGJiYm+PTTTwkICECtVtPa2kp0dDSFhYV4PB4q\nKip4+umnaW5uZmJigqqqKgoKChgcHCQpKYmMjAx8Ph8rVqzg0UcfZWpqioSEBI4fP05qaiqtra2U\nlZURERHB0NAQYWFhtLS0UFZWxsjICP39/Xg8HiYnJ3E4HBQVFSGXy6mursZoNPLee++xbds25HI5\nycnJSKVShoeHhWcKCQmhvb0dvV6Pw+GgoaGBXbt2IRKJcLvdwqEXFBSEzWajsbERnU7H73//+95v\nsje/rfJhEIj+T39H/d//E+Dz+f7g8/kKfD5fgU6no6enB4VCQVpaGk888YRw8vb09CCVSmlsbORH\nP/oRZrOZmZkZ/vznP6PRaHjwwQd57rnnMBgM+Hw+fD4fCQkJAJw5c4aOjg7cbjc/+MEPiIuLIzEx\nkb179yKXywkKCuKFF14gKiqKqKgo5ufniYuLIz4+nkuXLtHR0YHFYkGlUjE8PExSUhIANTU15Obm\nYjAYGB8fZ3x8nJCQEKampggNDWV+fp7vf//73Lp1C5lMhlgsFn7k3bt3MzY2xpIlSxgbG+P06dP4\n+fkBUFxczOTkJEuWLMHj8eBwOKipqWFkZITZ2Vm6u7tZunQpd+7cwePxcOXKFUwmE5WVlYhEIgYH\nB1GpVMJiv3XrFv39/XR1deFyubh8+TK3bt2ldWw2G8888wxdXV0sW7aMCxcuAHDx4kWuXLnC+fPn\nGRkZITo6mszMTGJjY/Hz86Onpwe4myWsXLmSnp4evF4v3d3d5OTkoFKpCAwMRCqVEhISgkwmIzEx\nEaVSSWhoKLdv3yYzMxOz2UxMTAx6vR6VSoVEIgEQMoNTp04hl8sJDQ1lYmKC3t5eli5dSnBwML29\nvSQmJtLX18eiRYuYnJwkLi6OgYEB0tPTaWxsZHh4mNDQUH73u98BEBMTw5kzZ7h16xYul4tnn32W\n9PR0nn76aYaHh5mZmaGmpoavvvqKJ598EqPRyOjoKC6Xi7Vr12I0GvF6vaxcuZL29nZOnjx5d2FH\nRfHUU08xPT3N1q1bSUhIYN++fcTGxjI7O8uaNWtYs2YN//AP/4DT6SQ8PJxVq1YhkUh46KGHWLdu\nHTMzM4SEhHDffffR3383wV54ZpPJxJkzZ2hvb0ehUBAREUFnZyctLS1cuXKFgIAA1q9fz9jYGA0N\nDdTU1PDyyy8zOjpKV1cXra2thIWFodFovvHm/baCQi2QJBKJ4kQikQx4GDj2X108PT0N3F2oPp+P\n3/zmN4jFYsLCwsjJyWHLli2sWrWKxx9/HIvFQmdnJz6fD7VajVqtpqSkhNu3b7NlyxYAtFotAAEB\nAfyf//N/CAwMZGxsjOTkZEwmE6GhoXg8HtauXcvc3By7d+8WshKlUkliYiKvvvoqMTExrF27Fj8/\nPxITE/nss88AyMnJwWaz0dDQgL+/P1u2bCEvL4/p6WnsdjshISGcOXOGAwcOcOTIEQICAqivrycm\nJob6+nruv/9+IiIiiI+Pp7CwUEgzOzs7SUxMpL+/H4fDQUxMDEVFRTQ3NxMVFSU8V0REBLdv3+bk\nyZM0NzejVCqZn58nKSmJ4uJiXC4XExMTZGZm0tPTQ1lZGVFRUWg0GpYtW0ZXVxfp6el0dXUhFou5\ncuUKFRUVHDx4kIqKCiorK5mcnKSiooKhoSEUCgWhoaFkZ2ezZs0aZmdncTqdeDweent7CQsLY2Zm\nhkceeQSz2YxSqcRsNiORSBgdHaWqqoq4uDh6enqYnZ3l17/+NUqlkoyMDPbs2UNhYSHLli0DoKWl\nhdLSUgoKCjh69CjHjx9HLBYTGBjIhg0b0Ov11NTUMDU1RVJSEnK5nICAAA4cOMD777/P0NAQDz74\nIFKplFu3btHe3g7AoUOHiIqKYmRkhNHRUSYnJ4mIiKC2tpb8/HxEIhFqtZr169fT3t5OZWUlEomE\n8vJyamtr6enpITs7m4MHDxIeHk5qaioAc3NzvP322zQ3N6PX66msrOTIkSN88cUXDA4OolAoOH/+\nPKdOnWJubo75+Xlee+014uPjqaurQ6fTER0dzejoKFKplGeeeQZAKDWlUikxMTGo1Wqmp6cxm80k\nJiby93//9zz88MNC6VJVVcWZM2fYuHEjZWVlpKWlkZCQgEwmY8mSJXzxxRffePN+K0HB5/PNAj8E\nzgBtwBc+n6/1v7peLpczOzvL5OQkPp+P1NRUwsLCCAwM5Le//S1tbW2IxWKSk5NZsWIFy5cv5/z5\n80RERKBQKIiLi6OtrY3Ozk7UarVQ983NzdHX10dSUhISiQSn00loaCgOhwO73c7NmzfZv38/r732\nGiUlJaSmpjI2NkZ5eTnHjh0jKSmJqqoqpqenGR4e5sMPPwRgcnISvV7Po48+Sm9vL3K5XEjzk5OT\nMRqNrFy5ktzcXKanp0lMTGTz5s00NDQIm9PpdJKens65c+cYHLybRI2NjeH1ehkeHiYgIIDe3rvZ\nXlBQEIWFhaxatYr29nZOnDhBamoqX331FcuWLUOlUrFx40YhG1GpVERERHDu3DkKCgq4desW2dnZ\nGAwGamtryc3NxWazcfbsWWw2Gw6Hg8LCQv7xH/+RZ555hqeffprt27fz4YcfolarCQ4Oxs/Pj87O\nTt544w1CQ0Nxu92sWbOGjRs3YrfbkclkfP755ygUCmQyGZs3b8ZkMhETE4PFYuG3v/0tLpeLN998\nE6PRyOzsLENDQ/T09KDRaLh48SIAarWayspKYmNj2bFjBytXriQoKIhNmzZRUVGB0+nk4Ycfpqen\nB5/Ph0wmIzU1VSgz5HI5bW1tzM3NsXbtWgICAgD43ve+R19fH/7+/oyOjnLnzh26u7sJDg6moKAA\nk8nE4sWLGR8f58yZM3R3d6PVavnxj39MUlKSkOWEhYVx5coVzp49C0BtbS0xMTGsWLGCqqoqoqKi\nKC8vx+VyAXDt2jX8/PxwOp2sWbMGiUSCwWDgyJEjSCQSrl+/Tl9fH/39/fT09AiZWFBQENnZ2SiV\nSjo6OkhKSkKj0TAxMYFcLqe1tRWVSkV0dDTV1dUAbNy4EbPZTHJyMp2dnUilUlJSUjCZTDz99NPf\neP9+a5yCz+c7CZz8Jte63W4yMzOpqqoiLS0NuVxOeXk5W7duxWq1Mjw8jE6nIywsjCVLlrB8+XIe\neOABRCIRfn5+tLS08OKLLxIfH09HRweHDh0CIC8vj9bWVsRiMU1NTYSFhZGdnc2bb75JcnIyWq0W\niUSCUqmkrq6O2NhY+vr6mJiYwOPxEBcXh7+/P1euXKG4uJiGhgbgbmp38eJFdu/ejdPp5MMPP8Rq\ntdLU1ERwcDBlZWXYbDaUSiXh4eEEBQXx2muvIZPJiIyMpKWlhevXr2M2mwkLCxM4AJVKRU9PDytX\nrsTPz4+hoSECAwO5evUqAQEBFBQUsHjxYoEwa21tZdu2bVgsFo4cOUJubi5FRUVMTk7S3d1NSEgI\nMTEx2Gw23nnnHVauXIlMJqO2thaZTEZzczOlpaWEhIQwMTGBSCTCZrNhs9kQi8VcvHiRyMhIEhIS\ncDgcWK1Wtm7dilgspra2lqSkJMLDw6mtrSU5ORmfz0dzczM3btygurqapUuXkpCQQHd3NwEBARgM\nBnJzc+nr6yM5OZmBgQEyMzO5cOECycnJwN1syWw2s2bNGlJSUpiYmEChUDA0NERZWRmXL1/m9ddf\nZ2ZmBrFYzNKlSzl69CjNzc24XC6OHTtGSkoKSqUSf39/GhsbAWhraxMI44cffpje3l4+/fRT/Pz8\nOH78OCtXrmRgYICQkBDeeecdRkZGOHDgAJOTk+Tm5vLiiy/ywx/+kLy8PORyOQ0NDRw4cACpVMry\n5cvp6+tDLpeTkZGB0+mkrq4Ou91OXFwcbrebgIAAmpqauHnzJnl5eTz22GM0Nzdz7do1kpKSGBoa\nwuVyCWVJX18f4eHhAnF57tw5tFotCQkJ2Gw2iouLsVqtWK1WtFotERERLFu2jPDwcOrr62loaBDW\nsF6vZ3R09Bvv3e+Eo1EikbB+/Xry8vJISUlh8+bN6HQ6Ie2Mj48nNDSU0dFR5HI52dnZJCQkcP36\ndRoaGoiIiCAsLIxz587x5ZdfsnnzZgA++OADJiYmuHDhAjabDZlMhtVqZWhoiISEBFJTU0lISCAs\nLIzbt2/T0NDA5s2bCQwMZPHixYIKcd9992G1WoUo7vV62blzJ1euXEGv1wup3kIdXFVVRV1dHRKJ\nhLGxMRQKBTabjdTUVBb4k5CQEJYvX47X60WpVAKgUChYunQp4+PjdHZ2YjKZ8Pl8pKenExERIZB9\nC2ltcnIyiYmJxMfHI5FIyMjIoLOzk8jISPLz89FoNISFhbF582ZWrlyJzWYjLy+PwMBApqam0Ol0\nzM3NMT4+TmBgIBaLRVBtdu3axcTEBAMDA6jVavLy8khKSiIqKorW1laBeGtra6OoqIj8/HyGhoZw\nOp34+fkRGxvLkiVLBHbf5XLR2tqKw+FgyZIlpKamolAomJ6eZmhoiIqKCgCWLl3KM888g8lk4siR\nIzQ1NXH69GlsNhv9/f3s2bOHJ554gu9///uEhoai1WqFMqa4uJj169ezdu1a9u7dS35+vpA1GgwG\ntm7dSmJiIv/8z//MuXPnhO8rJSWF3Nxctm/fjkaj4a233uJXv/oVcrmcX/ziF/zhD39g0aJFgmrw\n4osvCu+blJTE5cuX+eqrrygrK2Nubo729nbS09N58skn2bVrFytWrCAiIoKGhgYmJycxGo2YTCYm\nJyeZmpqipqZGIIw3btwIwJYtW5ibmxMUqbS0NMRiMQ0NDajVanp6epiamkIsFtPd3Y3D4UCn03Hx\n4kW6u7tRKpVMTU1x584dxsfHMZvN33g/fieCAsD+/fvZsGEDERER5Ofno1Ao8Hq99Pb2smnTJlpb\nW5mammLfvn0EBQWh1+sxGAzEx8dTXFyMyWSioKCA/Px8YYGVlZVhNBpZt24dDocDt9tNXV0dy5cv\np7m5mfr6eqamptBqtTidTtra2hgdHWXZsmV4PB5u3LhBVFQUJpOJ+fl56urqgLs1/a1bt1i3bh1L\nlixh8+bNAkdhsVgYGBgQJLqIiAiKioqIiIggOjqa9evXU1JSgkqlwuPx8PLLL+N0OgG4ffs2n376\nKXl5eQwNDWE0GgUZcGJiAq/Xyy9/+UvkcjklJSW0t7dTU1PDwMAABoMBi8VCQEAAt27doqOjA7lc\nzqeffiqcvqmpqaSmplJcXExMTAxPP/00q1evJjY2lvHxcRwOB35+fhQXF2OxWIiJicHlclFRUSF8\nT319fWzbto3Q0FDUajUpKSmCShIfH092drZQrplMJlQqFWq1mri4OKHMWlA/4uPjSUlJYePGjdx3\n333A3RLq5MmTmEwmLl68SF1dHSEhIdTV1dHc3MzNmzfJycnhe9/7Hn5+flitVkH5WQi8MTExTE9P\nc+TIEUJDQwGorq5Gr9dz/PhxysrKsFgsZGRkUFhYiEKhYNmyZQIp7O/vT11dHRaLhQcffJCsrCzU\najWNjY1cv36doqIipqamgLu8lcPhYPPmzUxMTLB+/XoGBwfRarX4+/sjEokYHx9neHiYyclJiouL\nUavVdHR0kJOTI2S7JpOJkydPCu8rkUjYuXMnn332GfX19UgkEmQyGcXFxeh0OiF4z8zMcPXqVU6c\nOMHt27fxeDzIZDKys7NJS0sjNTWV6elp+vr6vvFe/F+TJP8z7HY709PT3Lp1C5PJRHt7Ozk5ORQW\nFiKRSPjBD35Aeno6gYGBGAwGmpqaUCqVQh3ncrnYs2cPPT09HD16FLlcDsDQ0BBisViQMoODgwG4\ndOkSZ8+eRSwWk5mZiVgsZvny5aSkpHDt2jXm5+fp7Oxk06ZNXL9+nQceeACn00l8fDwAAwMDjI2N\ncePGDTo6OnA6ndy6dYslS5awY8cO3nvvPZ566ik++ugjjh49yrvvvsvjjz9Oeno6P//5zykpKSEx\nMRG73c6xY8fIyckBID8/H39/f1pbW9FoNEgkEsLCwhgcHGTNmjU0NTUJr1uom0dGRigpKSEzM5O2\ntjaB4xCJRExPT6NUKrlz5w5SqZTq6moee+wx5ubm2LhxI5cvX0an06HVann++eeFwNvZ2cn4+DiV\nlZUUFRWRkZGBXC5nZGQEj8cjBCKtVit4B4aHh4mOjmZ+fp7ExEQqKytpbGxkdnYWo9FIe3s769ev\nZ3R0VPCgzM/PYzKZsNvtgvpgsViIjY2lsLCQ3t5e5ufnCQ8Pp6mpSSiv4uPjhed/9913AfD392fT\npk0EBQVRXl5Ob2+vkMEAbNiwgc7OTl544QV6e3txOBzU1dUhlUpZtGgRNpuNyspKbt26xalTp9ix\nYwd5eXk8++yzFBcXU1paikgkoqurC71eT0REBAAdHR2oVCo6OjrIzc3FYrHw3HPPodVqcTgc/OY3\nvyEgIACPx8OyZcswmUyMjo4SFRXFZ599xqJFiwTFIS0tjZGREeAub/XKK6/gcrmIiYmhp6eHdevW\nCd6LqKgozGYzs7OzrF27loyMDF566SX0ej3JycmoVCq+/vprrFYry5cvJysrizNnznyj/fidyBSC\ngoLIz88X2Oro6Gj8/f3R6XTk5+czPz9PTk4OxcXFdHV1kZSUxIEDB/D5fKxcuZLJyUlee+01mpub\nUSgUwgKLjIzE39+f4eFhLl26hE6n49KlS+zfv5+BgQHEYjFpaWmEhIRw8+ZN5ubm6O/v58aNG0gk\nEvr6+khLS8Pr9WI2m3n44YcBcDqdgnmprKyMzz//nNWrV7NixQqmp6cpKSmhoqJCWMCZmZls3LhR\nMAS5XC5ycnLw8/MjJiZGWBR37twhMzOTW7duoVarcbvd6PV6oqKiqKiowGAwYDKZyMzMpKysTOAI\npFIpkZGRREVFIRaL8fPzIyoqiuXLlxMQEEBSUpIg2bpcLiorK/njH//IrVu3OH36NC+//DJDQ0NY\nrVZcLhfj4+NkZ2cLgdLn8wlyrMPhoLu7m6ysLGZnZ+nv70ev11NaWkpQUBBms5mBgQEKCwvR6/WC\npp+SkoJKpRJq5UWLFnH69Gkh/b958yZwV9lZIOoSEhIwmUysXLkSrVZLYWEhAFarVSghd+7cCdzd\nRBcuXOD27dtYLBbKysoICgoSWPezZ8/S2dnJ559/zl/+8hdiYmKQyWTYbDZ0Oh2jo6N0d3cLGVxE\nRAQjIyNkZ2fz8MMPs2rVKm7evIndbmfPnj289957wF1iVCQSsX37dpKTkxkfH2dqaoqBgQE+//xz\nxsfHsdvtwpryer34fD56enpwuVzCtQqFgpCQEEFOv3LlCo8//jh79+4lLy9PCLQqlUowjP31r3+l\ntrYWq9XK2NiYYOxbtGgREomEFStWsGrVKux2uxAcvwkkr7766v+X/fw/gjfeeONVnU5HZ2cnCQkJ\niMVi1qxZQ09PDyMjI0gkEiwWC7Ozs2i1Wm7fvs3KlSvp7u7m0KFDgi6u0WgwGo3ExcVx/PhxHnnk\nEWGzREVFCeTi119/zdq1a3nooYfYtm0b//qv/0pGRgYxMTEkJSVRUFCAx+Ohvb0dk8nE8PAwjzzy\nCCdOnKCiooLs7Gzy8vK4cOEC9fX1FBUVoVQqiYqKQqlUCnWzSqVi69atxMbGCif2lStXBLZ/fHxc\n0Ld/+9vfUlpaytjYGGKxGJ1OR05ODg0NDYyOjrJq1SrcbjcajUZg7BsaGkhKShIWt9frpb6+Hr1e\nL/AnnZ2dOJ1OUlNTkcvlaLVaWlpaiI+PZ2Jigv/4j/8gNTWVXbt2oVKp0Gq1KBQKtFotTU1NzM3N\nkZWVhdlsxmQyUV9fj9FoxOPxkJiYyO3btwkODmZiYgKz2cyCM3VBKistLaWvrw+RSMTs7CwhISEM\nDg7S29uLxWIhNDSUa9euYTQauXz5MkajkZycHGJiYvjiiy8oKytjZmaG69evI5FIWL16NSdOnMDr\n9Qp8UV5eHtHR0Vy7do2pqSlu3rxJYWEharUahULBsWPHOHDgAAcOHKCoqIi6ujp27NhBbGwsRqOR\n4eFh6urqyMjI4NatWzzyyCOMjo4SHx+Pz+dj69at6HQ6bDabkIYnJydz+PBhYmNjSU9PZ2pqipmZ\nGcFJaTAYsNvtmM1m+vr6CAgIIC8vj/vvvx+73Q4gOCc3bNiAw+EgOTmZGzduUF9fz+OPP87c3BxV\nVVUolUqMRqOQXdXU1ODv709ZWRmzs7NCkHW5XNjtdqG8MJvNGAwGVCoVdXV1tLa2Dr/66qt/+O/2\n43eifJDJZKhUKsbHx+nv7ycvL48PP/yQgYEBXC4XAQEBtLS0YDQa0Wg0xMTEoNVqSU9PJzw8HK/X\nS3NzMyMjIxQUFAiMs8fjoa2tjbi4OAwGA/39/bS3t/PSSy9RUlLC4OAgly9f5t1338VqtXLo0CG8\nXi95eXn4fD5mZmYIDAwUyoOZmRngrlPQZDIRFhYmOCK1Wi1qtZo333yT4uJirly5gr+/v6BY6HQ6\nAKKjozEYDHz55ZdERkYyPj4u+B/EYjGjo6NIJBImJycpLy+nsLCQmZkZLl26REtLC06nk5ycHEJC\nQpidnaWjo4O4uDhsNhthYWHIZDLGxsbo7u7G6/USGhpKUlISFy5cICkpiYsXL2Kz2Th69CifffYZ\nZWVldHd3c/DgQXp7e0lPTycsLIwTJ07w9ddfs337do4cOUJsbCy1tbXcf//9jIyMMDY2hkqlQiwW\no1aruXjxIlarlSVLlrB69WpaWlr48ssvmZ6epri4WFAsEhISaGtr4/jx44KzLzAwEJvNBkBYWBiH\nDx9mfn6e3NxcJicnyczMZG5uDq1Wi0gkIjg4GKfTyfj4OPn5+URERODz+QRXp8vlEjKq69evA3dl\n723bthESEsLGjRuZmZnhD3/4g+DXCA8PR6FQ8PTTTwt264yMDAYHB2lubiY1NRWRSMSGDRsoLi5m\n//79AGRkZFBVVUVISAg7duxgdHRUyCSDgoJobm4mMzMTj8eDUqmksrJScE7OzMwwMDDA/Py8YLFe\n4EAWOJoP+LqwAAAgAElEQVT4+HhUKhVwNxANDg7i8/kIDQ3Fz8+PzMxMISPJzMzk0UcfRavV8vnn\nnwuS5YL/55viO1E+2Gw2DAYDGRkZwoleUlLC/Pw8Go2Gw4cP09TUhNFoJDk5GZlMRnR0tNA78Je/\n/AWbzcYDDzwgnJBw94vdtGkTixcvRqFQEB0djUQioaurS7CVAty6dYuPPvoIsVhMW1sbPp+PqKgo\ngoKC8PPzY+nSpXR3dwv329bWxsDAAFarlbVr1xIfH4/dbsdoNBIYGMj8/DyRkZFcvHiR119/XXAg\nhoaGUlRUxNKlS4mLi2Nqaor5+XlB056eniYpKUlIJ5VKJYODg1RVVREUFERKSgpJSUns3LmT6elp\nLBYLer0ehULB6OgoFy9eFAitxx9/HKPRKPAPsbGxTE9PMzAwQHNzM263m8bGRm7cuMFHH32E3W4n\nPT0dj8eD0WjEYDCwbNkyZmZmmJycZGRkBJ1OJ3jwZ2dnUSqVREZGMjs7S1hYmECuXr16lZmZGaRS\nKd3d3XzxxRfcuXOH1NRUCgoKqKqqwmKxCBmZXq9nxYoVACxatIj4+HieeuopjEYj9fX1HD58mKio\nKOrr67l+/TpTU1OEhYUxNzdHSUkJjY2NlJeXs2XLFvR6PY8//jh+fn5UV1ezadMm4K5foKamBqVS\nSXZ2Nv7+/ixevJjIyEgWL17Mjh070Ov1BAQEcPjwYT777DM6Ojo4d+4cly5d4tq1a4Ll+IMPPqCy\nshIAs9lMbGwsixYtoqqqCrFYTG9vL1qtFpVKRVJSEp2dnZw7d47y8nKsVivnz5/HYrEIGUBQUBA+\nn4+qqiohEzGZTMzNzQF3+RCNRkN4eDhhYWHU1tZy+vRppqenuXr1qpAFtrS0IJVKBbL66tWrDA8P\nY7PZsFgs33g/fieCgkaj4dq1azQ1NXH58mVaWlq4//772bp1Kw8//DBOp5OEhASqqqro6OggMTGR\nrq4uLBYLX375JcuWLSMzMxOZTMbk5KRwOs/Pz+PxeGhtbSU2NhapVEpSUhKFhYU4nU6Cg4Px+XxU\nVlYyNDQk2HL3799Pf38/vb299PT0MDg4SEJCAm63G7hL8mRlZbFx40ba29vp7u6mpqYGj8fDtm3b\nyMjI4MCBA8jlcgoKCpBIJCgUCjQaDYODg1RUVNDW1oa/v7+gYAC4XC6WL19OeHg4bW1txMTEEBER\nQWBgIMHBwcK/C4aW27dvExUVJZzaBQUFQhPT6dOnWbFiBW63m56eHuRyOVarVZD28vPzeeONNzh6\n9CgRERG89NJLJCcnC/6KzMxMioqKhIarBcddW1sbCoWC8PBwRkZGsNlsTE5OYjabCQgI4Pr161RU\nVNDd3c2mTZswGo04nU4aGhrIyMhAKpUyMDBAaWkpXV1dhIaGCg5MuKtCZWVlCb/bQi/BoUOHhMxw\nQaOfm5vjvffeIygoCIVCQWNjo/BsS5cuJSsrS+CX1qxZw4oVK6ipqeHixYssWrQIr9dLYGAg6enp\nQrPRwMAATqcTsVjMyZMnCQsLQyqVChbvzz//nI6ODtatWyes3wW+RqVSIZPJCA0Npaenh+DgYOx2\nO2KxmGXLlrF06VJUKhUKhYKzZ88KrtDr168TEBBAUFAQu3btAuCRRx4hICCAmJgY3nrrLaxWq2BZ\nfvbZZ7HZbLS1tREVFUVwcDAymYzCwkKmp6dJSUmhr6+Pf/qnfyIlJQWpVCo4ML8JvhPlw/z8PIsX\nL2ZsbEzovKupqUGn0/HnP/+ZPXv2UFtby9q1a/nnf/5nzGYzBw8eZGBggMDAQMLDwwUTksViISsr\ni+rqarZu3Up5eTnr16+nsbGR9vZ2gWz76quvcDgcbNu2jdzcXHbs2IFWqyUgIAC73U58fDxlZWVC\nJ9yBAwcEsi0yMhKLxSLU8y0tLcIiqa+v59ixY5hMJl599VVWrlyJxWLhxIkTREdHk5qaitPp5OrV\nqwCcP3+e6Oi7bSKTk5NMTEwI3YXj4+M0NTUxOTkpqApBQUF8/fXXvPfee/zpT39CLpcTFxfHT3/6\nU/R6vXBqFBQUMDk5SWFhIZ2dnTQ3N7Njxw5SU1Npa2vDZrMxNDTExo0bSU5O5tixY2RlZZGZmUlT\nUxMZGRnCKbYQhJxOJyEhIUxOTrJo0SKhdAkODuYvf/kLzc3N/OhHP2J+fp6PP/6YTz75hPDwcFpa\nWigpKWF4eFjYOA6Hg8DAQBITEzl//jxdXV0APPnkk8hkMvr7+wkICOD8+fMkJyfz/PPP09nZSXd3\nN7/4xS/44osviI2Npb+/Hz8/P/z9/fF4PKxYsYITJ07Q19fHsmXLuHPnDnA3U1joowgLC+PHP/4x\nDz30EAaDgbGxMcrKynjzzTdpamrihRde4ODBg8zNzbFt2zZmZmZQKpXExsZy+/ZtSkpK+PnPfw7c\nNVuNjY0RHR1NWFgYarWampoaXnzxRWJjY2lqasLtdmM0Gvn444956KGHhOB6/Phx1Go1AwMDuN1u\nNm/ezJEjRwDYt28fa9euRa1WEx0dzR//+EcGBgbIy8ujtLSUXbt2IZfLqa+vx+12YzAYuHPnjmC1\nT0tLIz8/n48++khQsL4pvhOZgt1uJzAwkOLiYubn5zGbzVgsFsLCwkhJSWHRokXs3r0br9dLRUUF\nNTU1FBQUCG29c3NzXLx4kenpaYxGI0ajEYD33nuPrKwsDh48yNTUlOAhkMlk+Hw+wbDT1dXFpUuX\nuHz5Mr29vbjdbmpra/nyyy8ZHh7G7XYLrdUAw8PDhISE8P777zM4OIjBYMBsNuP1enG5XKjVarZu\n3YpUKqWvr4/p6WmWLFlCREQEOp2O+Ph4wsLCBJt2fX09AKtXr2ZkZETYiD09PUKJdPPmTXp7e5md\nnWVgYACj0UhPTw8ej4fbt28LwU6j0aBWqzl9+rRgcY2KiqK5uZlz584J7tHh4WEAIbNSq9XU19eT\nlZVFV1cX4+Pj9PX10dTURHNzMz6fj87OTux2O06nU7Drtra24vF4eOWVV9i9ezc5OTlERUXx8MMP\nCyTxj3/8Y9RqNWKxmPr6epxOJ+fPnyctLY2KigrsdjsFBQUAAuez0MK9efNm/vSnP+HxeMjPz8fP\nz4+GhgYWL16M0Whk9+7dfP/73+exxx5jz549ghtTo9FQW1sr8EASiYTh4WE0Gg29vb1UVFRw9uxZ\nXn75ZU6dOsUTTzyBy+Vi+/btfPXVV2g0GrRaLf39/RiNRg4fPszvf/97JiYmOHXqFP7+/gBIpVLW\nrVvH3Nwc2dnZQmv0yZMncbvdxMTEkJ+fT2RkJFeuXGFgYACHw0FERARbtmwRykmbzUZHRwfFxcUA\nrF+/HoVCgdVqZWRkhOXLl7N+/XqkUqkQ/H0+H+3t7YJ5y2AwIJFIqK6uZmhoiPLyclJSUrBarSxa\ntOgb78fvRFDQarVER0fT3NzMli1bKCgoQCQSCR128fHxBAcHEx4ejp+fHxKJRKifN27cyO7du0lM\nTOTGjRvcuHGDsbExAPR6PTdu3CA/P18wL4WEhHDp0iWhIaa0tBSXy0VnZyd9fX3IZDIUCgWFhYWU\nlJQA0NDQQH19vVCfLvSuL1myBK1WS3Z2NqOjo1RWVpKYmEhJSQnXrl3D6/WSmpqKXq9n8eLFZGVl\ncejQIc6fPy947BfckQAikUiQ7KxWK/Pz86jVaoKCgoiLi8NqtQqZQUhICHfu3MHn82G329m+fTt5\neXlotVp0Oh1Lly6lra1NsMXm5+cLkuj+/fuxWq3Y7XbUajUTExNoNBoSEhIEf8MCb9La2srFixfR\naDRoNBpBQWhvb8fpdLJ69WpBEs3KymJ0dJSxsTGsVisxMTH09fVx69Yt7HY74eHhREREsGjRIpKT\nk4V2dqPRyPj4OHCXdN66dSsDAwPo9XpCQkLIzc0lJiaGrq4uHA4Hr7zyCqdOnWJ8fJwVK1bg8/k4\nd+4cv/rVr/D39xds1Zs3bxYs5AMDA0LLd2lpKS+99BJNTU1ERUXhcrnYsmWL4BytqqoS5MO6ujre\neOMNSktLyc/PZ/PmzchkMn76058CCNZ6jUYjEKe7du0iMTGR4eFhIiMjmZ+fJywsjC1bttDa2kpF\nRQVzc3MEBgYKjs4HHniAmZkZwTXb1NQkBN/x8XEyMjJISEggMzOThoYGGhsbaWlpEezT8fHx9PT0\nMDo6KqxTs9kstPEvNB1+E3wnyoeFzrUFScVkMgmDRQwGA9nZ2ZhMJlpbW/Hz88NmszE3N0dxcTE3\nb97kiy++QK1Wo9PpyMzMpLOzE4B169ZhMpmEkmCh/vrNb35DZWUlOp1OsK+++OKLBAUFCe3Wra2t\nrFu3juHhYXp6enjooYcExjk4OJja2lqKioo4e/YsWVlZrFmzhmPHjvGTn/xE6G+vrq5mdnaWxx9/\nnD//+c/09/fT2NhIbm4uUVFRRERE0NXVJTglFySvhdkLiYmJXL16lR/+8IdCW7ZCoaClpYVt27Yx\nOzvLpUuXGBsbE3z3ra2tAgdSW1vLzp07+fjjj1Gr1Wi1Wvbt28fY2BgPPvggYWFhVFdXU1JSgs/n\n4+zZs8jlciGt/uqrr4R7XAg2C4pHUFAQV65cwWg0IpPJKCoqQqvVMjIywsDAgCCX3bhxg+HhYXbu\n3MnVq1fp7u4mJiYGn8+HXq/H6/WSlpaGQqEA7qbjBoMBh8NBYmIiX3/9NSUlJaSnpyOTyTh69Civ\nvPIKQUFBnDt3joMHDzIzM0NwcDAKhYKUlBTa2tpwu92sX7+ev/u7vwPunugGg4GWlhaOHz9OYmIi\nRUVFaDQaxGIxr7/+OkFBQezZs4dnnnkGsVhMc3MzGzZsoLe3l8nJSZqbmwkJCcHf319oRXY4HExP\nTxMXF8f58+eFXpAFpSE2NhadTsfg4CDZ2dls3rxZsCKfP3+e0NBQgbjNzMwUujq3bNnC8PCwoLQt\nWKAHBwfp7u4WDGnFxcXs3LlT8Nj09vYSGxuLx+NhfHwcn89HcnKyYOj7JvhOZAqhoaEcPXoUpVKJ\nyWRixYoVbNy4UWjB/etf/0p/fz+Tk5M4nU4sFgsrV67E7Xbj5+dHXl4eEomE4OBgwXILcOzYMSEF\nt1qtmEwmgSwaGBigt7eXkydPcv78ef7whz9QXl6OxWJhcHCQ0tJSpqenEYvFwg8fGRkJQH9/P3v3\n7uXmzZvEx8dTXl7OwYMH8fl8HD58mJSUFGZmZtDpdLjdbr744guys7Ox2WyUlZWhVCp5++23GR8f\nZ2ZmhuzsbOAu0bggNRkMBgIDA3nkkUew2+3ClCmlUklaWprAbC8YhBa6BIeGhgRtuqCggDfeeAOb\nzYbVahWmWJWWlnLy5EleeeUVHnvsMeLj47FareTl5eF0OlGpVDQ3N9PY2MjMzAy5ubnU1tbS0dGB\ny+XCaDTS1NREXl4eERERyOVygdA0mUwYDAZ6enqYnp4mIyODoKAgAgMDmZycJCQkhP7+fpxOJwaD\ngeLiYsHDAQieErFYjNVqZdu2bfj5+dHf34/ZbGbDhg1IJBIaGxvp7e3FZrMJv0NoaCjl5eUMDg4y\nNTVFQUEBBoNBWGMXLlwQeAy3280LL7yA2+3G4XCwadMmbty4gcfjISUlhfT0dKKjo/nggw+QyWSc\nPXuWsrIyrl+/Tl1dnWA4S0pKore3l8DAQMLCwpiamiI/P5+CggIee+wxRCIRWq2WqKgoOjs7+dnP\nfsbVq1dpa2sT1lpSUhI2m41r164Jdm+NRoNKpSImJobY2FhkMpng5h0fH0cikQiTwKxWKwqFAoPB\nQF5eHlNTUwQGBhIZGSn4ORak2W+C70RQsFqtjI6OYrFYcLvdvPnmm/T29gp1mtVq5cKFC0xPTwt9\nDwus7UKPRHFxseBJWCDx1q5dyw9+8AOmpqaorq4mNzcXn89HYmKiIG9OTU2RkZFBaWkp/v7+TE9P\ns2HDBtrb2wUVw+12o9PphKaS2dlZ9u3bh8PhQCwWEx8fL0zH+eyzz7h06ZKwqF0uFy6XC6fTiUgk\norGxEavVKhBPdrtdCGILY9EWmPBDhw4JTL5EIuHYsWNcuHBBcK8t2HknJiZwOByC0UgsFjM4OIjH\n4yE9PZ09e/YQHBwsmITMZjOLFy+mpqaGr7/+mtbWVqG3IDw8XAiua9asIS4ujsbGRgICAliyZAnh\n4eFoNBpKSkoYGhpCLpcTERHBnTt3iI6OZvfu3SgUCsrKynA6nYyNjeFwODhw4AATExMCS+90OnG5\nXJw+fRqHwyGw42VlZUxPTwvE8cGDB4VpUCMjI1RWVjI7O8vU1JQwJi4hIUEYa+b1esnJySE3N5cf\n/ehHQjoeExPDk08+ybJly1ixYgUZGRmUl5ezf/9+Ll26RFBQELm5ucTHx6NWqxkaGmLJkiXs3bsX\njUZDZGQkdXV1BAYG8vzzzwt2ZLVazerVq4UOWYVCwcTEBJGRkcL30NbWRnBwMK2trURGRuJ2u4mP\njxe6TK9evcqqVatIS0sT2uXhrrfi/fff56OPPqKzs1NQG1QqFVFRUbz99ttIJBLMZjOnTp1ibGwM\nrVaLWCwW2uJXrVpFXV2d0EL+TfCdcDS+8847ry5evFgg8xZ84wkJCXz66aekpaWhVCopKCigt7eX\ntLQ0oTNxYmKCNWvWCJJUfn4+27Zt45NPPhFkvIqKCpYuXUp1dTUZGRnMzs5y33330dTUhFgsFrTk\njo4O1qxZw8jIiGBOiomJQafTcfPmTeEkysvLY9u2bZSXlwtzHuLi4jCbzWi1Wtra2sjNzUWr1RIW\nFkZJSQk3b94kMjKSnJwcYc7j7Owso6OjZGZmcvjwYXbv3k1cXJwQYBYmOy1fvpyenh7sdju9vb0C\neSmVSgUjz+LFi0lPT+f48eNMTk4KcwX7+vq4cOGCMLVnocFLq9Vy584dgbxbIALz8/MZHBwU9Hap\nVEp4eLgwozI4OJjOzk7Cw8OFWQmDg4NIpVJhIpLFYqG2tpbJyUnUajXd3d3cf//9wuxGu92O3W5n\n2bJlWK1WoqKiuHDhAs3NzYKVOy4uThgcYzKZqK6uZs+ePXR2diKRSITgtMANDQ8Po9fr0Wg0gn9j\nwe1aVVVFamoqfX19ghvw8OHDOBwOMjMzGR8fx+12s27dOlQqFdeuXaO6ulpwakZGRgpux7y8PLxe\nL21tbZw8eZL77ruP+fl5bty4IZRvWq0Wt9tNZ2enkM0sX76c6OhofD4fS5cupbm5GblczpdffonB\nYKC5uZnW1laysrI4ceIEGRkZ2Gw2kpKSiImJEWZODA4OcvXqVZRKJdevX0cqlVJeXk5aWpqgggwM\nDDA+Pk5MTAxutxuxWIxYLKalpeUbORq/lRHv/28hEon+92/iHu7h//+46fP5Cv67i74TRGNkZCQH\nDhwQutYWDB99fX3cf//9Qq3a1dXFc889R2BgIJ999hnR0dHk5ORgNpuF6bc3b94URn4tyEj79u1D\nJBLR3t5OR0cHY2NjSKVSMjMzBdb94MGDLFq0CK1WS2JiIk6nkzt37giTmJOTk3n//ff505/+xOnT\np/n1r3+Nw+Fg9+7dXL9+nfT0dDQaDXNzcyQlJfHEE0/w9NNPY7FYSE1NFQgkj8dDaWkpV69eRaPR\noNPpuHbtGgcOHODtt9/Gz89P6INftWoVQ0NDggwXHR1NeHg41dXVpKWlAXclRZPJxOrVq7FYLCxe\nvJjy8nLgLuO+bt06SkpKBJehXC4nPj6ef/u3f+NHP/qRUJuHhIRQVFREZ2cnIyMjPProo4K9vLW1\nlYSEBN5991327dvHxMQER48eRa/Xk5eXR1tbG6GhoYJ8e+PGDQICAsjOzsbj8ZCdnS3U0HCXqE1M\nTOTatWsYDAaBGN23bx/PP/884eHhwnzC7OxsoVP2tddew+l00tTUxK5du7hz5w56vZ41a9ag0+mw\nWq34fD4aGxuJi4vjypUrzM3N8bvf/Y7XX3+d1atXMzo6yrVr19i7dy9nzpwhNzeX+vp6qqur2b17\nt8CXLEzFLiwsFIjgwcFBcnJyUCqVREREEBkZydGjR2ltbeX06dMYDAZWrlwpTK+OiIjgjTfeIC8v\nj/r6ejZv3sz4+DiXLl3iJz/5iZA99vX14fV6qaysJDc3l3feeUcoLd966y1+9rOf0dHRQWBgIAqF\ngo6ODtLT0zl69CilpaXcvHmTpqYmHnvsMZKTk/nlL3/Jz3/+c8H1GB8fT29vL7/85S+/0X78znAK\nH330kTCmWywWMzc3xyOPPEJycjI5OTls376dTz75ROjh37p1KyUlJRw5coTp6Wn6+/sJCQnh2Wef\npaioCLgbbLZv386VK1fo6upieHiY1NRU9uzZQ1FREV6vV+gx6O/vJzw8nNjYWKqqqrDb7fT09Ah+\n+uPHjwsa8unTp4mIiGD37t2YzWaysrJISUkRXJNtbW28+OKLiMViIiIiGBsbIz4+Hr1eL3gvnE4n\nUVFRWCwWSktLgbsbvLKyEplMxnPPPYfJZBIIxAUHW21tLU1NTfh8PqFLLioqirS0NGHwa1JSEsuW\nLSMmJobi4mI++OADoR3ZbrfT2NjIU089xdWrVzEajYhEIjIyMqivr0elUpGamkpLSwujo6N/M07+\nwQcfxGw2c+bMGSorKwn8f9h7z+gqr3Nt91pqC/UuLfVeUEG9oQ5IIIqFqcYY44ATx8Z2bMeO03ac\n2E5iZyQucQAbg7FNt+kgqgBJCNR7b0u9997X+n6Qd55kjG/szTn7nDH8jbPfPxJNEmu975zzeZ77\nvm5jY44ePcrCwgLZ2dnExMTQ29tLdHQ0VlZWjI+P4+DgQFpaGv39/axatUog65ubm7G2tqanp4fF\nxUUhI5cUoCtWrGD79u3cu3ePLVu28PTTT/Pqq6+io6PDunXrBAVqYWGBBw8eCGSeNCkYHh5GX18f\nc3NzANzc3AgJCeHUqVMkJSUJIrgEWXFxcUFTU1P0e6qrq8nKyuLGjRsEBQUxNDSESqWis7OTY8eO\nib7VkSNHOHXqFHv37iUyMpLLly/z8OFDlixZwttvv42Pjw8NDQ1ERUWRkJDA4uIiv/3tb7l+/brY\nMCorK1lYWMDLywtfX18A8X7/4Q9/YG5uToyWs7KyBM8yLCyMDz/8kGXLlvHyyy9TUFCAgYEBf/nL\nXxgeHhaMDB0dHbq6uh77efxBLAqmpqasX78elUqFm5sbpqamxMTEoFarqaysFG+uhKoqKioSbP11\n69bh7e0tuuCS8QYQVl4pg0EiIGVlZSGXy4WMeWBggICAAFFbampqcuHCBfz8/FAqlVRVVTE7Oytm\nvVu2bCEsLAyVSkVwcDBmZmZkZmZy9epVzM3N6ezsZHp6mpaWFhwdHcX3sre3Z2xsjH379omphb29\nvWguubu7k5iYSHV1taBVx8fHI5fLBSzUwsKC1157jfr6emFfHhgYoK6ujvT0dNzd3ens7CQnJ4eY\nmBhu3rxJcHAwp06doq6ujpqaGry9vens7MTY2Jh79+6hp6eHTCZDqVRy584d8XMDYswYFRXFsmXL\nuHr1Kl5eXoJraGJiwvDwMP39/XR3d1NTU4O+vj7z8/PC6OPv78/OnTvJy8vD3d2dsbExOjo6sLW1\nxcjICE9PTwwMDAAwNjbGyclJSJtffvlllEole/bs4euvv8bOzo64uDj6+/vJzc0lODiYVatWYWBg\ngLa2NsXFxdjY2FBVVUVVVZXgDxQUFPDNN9+wYcMGYRBSq9XExsYyMTHBrl27aGlpwcDAAD09Pdav\nX4+FhQUrV66koqKC0NBQgoKC8PT0JC4uTkxL/Pz8+PLLL7lw4QIzMzPk5uZiZGREXV0dq1atQkND\nQ8B1Dhw4IPog3t7evPbaa4yNjfHhhx/i6elJeXk5R48eBR5xGiSz2cOHD5mcnKS6uprBwUGuX78u\nZPkbN27kwYMHVFZWYmpqyuHDh0V/Ynx8HLVaTX5+Phs3bnzs5/EH0Wg8dOjQ7wEhA3ZxcaGkpIRb\nt24RGhpKeno6cXFxGBkZUVxcTH19PSYmJrS2tjIwMEBWVhZdXV0iDMTR0ZFTp07h7e2NpqYmzs7O\nXLx4kY0bN1JQUCAoQGvXrsXGxoa6ujq0tLRITk5mZmYGpVIpzD4AiYmJ6OjoCHNWUlISKpWKnJwc\nLCwsyM3NZdOmTeLf6ujooFKpqK+v5/PPP8fZ2Rl/f39u3ryJnp4eCwsL6Orq4uvrK04T586dY3p6\nGiMjI2QymVA99vf3i7GZBA5pb2/niSeeoKCggICAAPz8/DA0NMTIyIi2tjZWrFjB3Nwcp0+fJioq\niuHhYZYtW4aXlxezs7PcvHmTuLg45ufnSUhIQEtLi++++w6VSiVGW+Pj46xatUpYfuvr68nOzsbC\nwkIoNY2MjBgYGBBjYKmJ2NzcjEKhEE3E8+fPi+aikZERZmZmzM/Pk5aWhqenJ+bm5gwPD1NYWMie\nPXuor6/nJz/5iejgV1dXC8q1pECtqanhpz/9qRhn3r9/n8bGRrZt20ZaWhpBQUHs2rWL0dFRrl+/\nzo4dO4BH04K2tjbMzc2Ry+WMjY3R2dlJSUkJMTExghhlYmKCtbU1vb295OTk4OrqiomJCdnZ2QwM\nDGBra8v58+dZtmwZfX192NnZYWZmhpOTE9nZ2RgZGYl7QaIgffjhh7i5uVFXVyce4PHxcc6cOUNP\nTw++vr6i7JGa1L29vbi4uHDq1CmWLVsmygU/Pz/09PTw8PDg2LFjaGho8Je//IW7d+8iYQikEf/6\n9evJzMyksLDwsRqNP4hF4bPPPvu9VCrk5uaSkZHB1q1b0dTUJDs7G21tbZGec/nyZZ577jk0NTWp\nrq7G1NSUqqoqKioqxDhmfHyctLQ0IVyBR2PFmZkZPDw88PLyEqq1hw8foqGhwcLCAnfv3sXT05O2\ntjZMTU1RKpWkpqYyPT1Na2sr+vr6nD17lvj4eEZGRgQQ08XFhZmZGQoLCwkICGDr1q34+PjQ0dFB\nfIz7jiMAACAASURBVHw8cXFxXLt2jfXr1+Pg4CAovWNjY1hZWdHQ0MC9e/eIjY1FQ0ODgYEBdu7c\niZ6eHsbGxnR0dNDd3S3IT319fVy6dImXXnqJoqIi3NzchBcjKiqKCxcuYGlpKXwKEsh1fHyc+vp6\nXnzxRaytrQkODha8iMjISJ555hlCQ0OprKxkcHCQgwcP4u3tzeLionhotLS08PLyYnh4mJCQEGpr\na1m7di1bt26lsbFRGLpaW1vZvXs3ExMT7Nixg6GhIS5fvoyJiQmFhYWYm5sTExPD/fv3yc/PZ/v2\n7Zw/f55XXnlFBOo0NjbS1dXF6Ogo8/PzzM7OCueso6MjXV1dFBUVoaGhwYEDB/j5z38uyE2rVq3i\n+++/Z3h4mAcPHnD69Gmam5sJCwujpKSEnJwcpqen0dHRITg4mOTkZFpaWsjJycHKyor+/n4CAwNZ\nvnw5i4uLVFdXY21tTUpKCgYGBgwMDHDx4kWefvppent7yc/PJy4ujtu3bxMVFSXuJSsrK5Ft4eHh\ngZ2dHUVFRWhqarJu3Tra2tp45plnhJFqamqKzMxM3n77berr67GxsaGzs5P4+HguXLiAqakp09PT\n+Pj4iNI1NjYWU1NTNDU1uXHjBnfu3OHJJ5/ExMQEIyMjRkdH2bp1K4cPH/4/Z1E4dOjQ7319fVGr\n1fj5+TE4OEh0dLSIYIuIiCAuLg49PT0mJyfFMd/R0VFgvt98801yc3Px9/cnLCyM/fv3C3JPSEgI\no6OjtLa2irgvCwsL2traBOVpyZIlIhDG2NiYzs5OUlJSkMvl3Lt3j4CAADo6OkhLSyMsLIzR0VEi\nIyNFFoOOjg4WFhZkZGQQExNDeXk5CwsLBAUF0d/fL6S+rq6uzM3NYWhoSHFxMTo6Oty/f1/UnU5O\nTtjY2CCXy1EoFCwsLHD58mU0NDRwcnLiwYMHIg9CalJJyrukpCT09fW5ceOGgH4sLi4K9Ly/vz8y\nmQwnJyfu379PTEwMDx8+xMDAgE2bNgkCkp6eHsHBwUxMTDA3NyeMTLq6uigUCiwsLKivr8fQ0FDE\nlunp6RETE0NRUZHoCxUXF9PV1UVaWhpOTk4i9MbV1ZWcnBw0NTXZuHEjgYGBKJVKsrKycHV1JSEh\ngY6ODpYtW8bi4iIKhQIXFxcGBwfx8PBg2bJl6OrqCixaUVERO3fupKOjQzhFb9++jaamJvb29qSl\npbF9+3by8vIwNjbGxMQER0dHhoaG6O7uFtRkCWIrKTQlt2RfXx9zc3Ps3r2b1tZW8b0PHz6Mp6cn\nWlpaoi+SkJCAQqEgLi6O69ev09jYKGzvenp6eHt7C/tzW1ubyC6pqKhAJpPR3t5OcXExJiYm/OpX\nv6Krq4uFhQUGBweJjIxES0sLfX19RkZG0NfXp6mpCW9vb6qrqxkZGaGpqQk7OztkMhk9PT3o6+uz\nZMkSvvnmGxobGx9rUfhB9BQGBwdxc3NjZGREsBEkpPXU1BQGBga88cYbvPHGGwDExsbS19eHo6Mj\nR48eJS8vj+vXr2NoaMjMzIxQb83MzJCUlERxcTH+/v6CC1BZWUlubq5gNBYXFwv59PDwsICgDg8P\nU15ezuTkJHv37hXpSgYGBuK4KMFT+vv7qa2txcvLi9bWViFaaW5uFvjz9PR0jh49ytDQkLDldnR0\nCLy5oaEhFhYWqFQqioqKyMrKEjFv8/Pz9Pf3C+SWhYUFV65cQaFQEBQURE9PD0qlkuzsbBQKBWq1\nWqQQlZeX09nZydWrV/n666/Zv38/OTk5XL58maysLNzd3ZmamhKNtLq6OpRKJSkpKfj7+4vAlYGB\nAfLy8gQdWzJHlZeXk52dTUVFBXV1dVy5coW5uTlcXV15/fXXefHFF1GpVDg6OjI6Ooq2tjZ+fn7A\nI47m8PCwMGhJvg4JFnP79m1B9Pb29sbNzY3q6mqxOy4uLhIXF4epqSl2dnZoampy+vRp3nrrLVGu\nSPeCpaUl9fX1HDhwgJGRES5fvoy9vT0BAQHCW6KjoyOUrIODg3h5ebFjxw727t2LpqYms7OzpKWl\nCUv2s88+K7Ipw8PDhTguNjaWb775Bnd3dxYXF3nppZfIyspidnZWoOWSkpJITk5mamoKTU1NbG1t\nhT1fen8TExNxcXEhMDCQ5uZmkYuhUCiorq7G0NCQ7u5uoXLcsmULAQEBaGtrs7CwILwVy5Yte+zn\n8QexKBgaGtLS0oK3tzfBwcEUFBQIt6K+vr6AgMTFxdHQ0MCNGzeYm5tDpVKRkJAgjtlbtmzBysqK\nqakp4JEEVVdXV0SAbd68WRzhg4ODheFp165d9Pb2MjIyIsQpu3fvJjMzk4cPHzI9Pc2nn34q9OOW\nlpYsLCyQl5eHpaUl3d3dyOVyzMzMaG1tFcTgZ599Fi8vL2ZmZggKCuKVV14RTsfQ0FCUSiVTU1Os\nWrUKQLjient7uXXrFu3t7eTl5aFWq9m5cycKhQIfHx9hk3VxcWHZsmW4uLhw4MABnn76aR4+fEhL\nSwvGxsbY29uzsLBAY2MjTk5OrF69mvXr1zM9Pc2tW7eoqqpi5cqVzMzMMDU1hYODA0lJSULs0tLS\nwvj4OJWVlSI8187ODh8fH9ra2qirqxPqR5lMJlSj9vb2rFixgpdeegkrKysiIiJ4++232b17N76+\nvkxNTdHW1kZKSgoWFhZERUWJKUFsbCy3b9/GzMxM4OF1dXXZu3cvQ0NDlJaWkpeXx7fffouxsbGQ\ncyuVSjw9PZHJZPzqV7/izp077Nixg4sXLwIICImPjw/JyclERETw1VdfsWXLFkZGRvjyyy+5du0a\nRkZG7N27l46ODmpqarCzs2PFihWMj49z584dzp8/T2RkpFjEPv30UyYmJgRdXEdHh+bmZtLT0/nw\nww9Zt24dHh4e/OIXvyAxMZEHDx5w4sQJEhISgEcxfenp6dy+fVucZAD27dtHeHg4OTk5As+3dOlS\ngXwfHR1lfHycgYEBzp8/j7W1Nba2toyPj7Nlyxamp6fZvHkzarUaS0tLNm/e/NjP4w9iUZBWZz8/\nP4yMjPD19cXV1ZWsrCwCAgLo6+vj+++/p6+vDwcHB3HUvXTpEsHBwTz11FMYGhoyOjpKbm6u2L09\nPDy4fPkypaWlggfY0NCAjo4OXl5eXLp0icHBQQYGBpiZmSE6OloQlOAR8cbS0hK5XM6tW7dEPsP9\n+/cFkHV4eFjs7AsLC+IoHBkZyezsrIgE27hxI9euXaO/vx9tbW0xcWhsbBQsATMzM1paWgTWPjQ0\nlIiICJydnVEqlbS1teHt7Y2BgQFKpVIcz+/fv4+lpSXh4eEiS7Kvr4+hoSGOHj3KSy+9hLa2NqdO\nncLU1FRMbRQKBZqamqKTLpGgNm/eTGxsLD09PTg7O+Pk5ISenh6+vr5C8m1sbCxI2SMjI1hZWWFl\nZcXY2BjOzs4oFArkcrngPg4NDXHmzBngESK/traW/Px8hoeHuXnzpmjqSgErrq6uLFu2TKQ/v/ba\naywuLjI+Pv5vsmNLS0vS09OF5V6C8VpYWHDu3DmefPJJAMFJMDMzo6OjgwcPHnDt2jUmJye5efMm\nzs7OQqshEZsl38G1a9fEyDsyMhI9PT1qamoA2LZtG66urkxPT5OXl0diYiImJibo6OgI1H9ERAQd\nHR00NTWJiYaVlZUA0yQkJPDzn/+c3bt3C9Ds6Ogox48fFypWXV1dOjs7cXJyor+/nytXrmBpaYm5\nuTl79+4V5rLVq1dTUFDAxx9/LF6XmpoaDh36L6sGcf0gxEuTk5MMDAwIb35YWBjW1tYiW0BfX5/I\nyEhGR0cJDg6moaFBMP2cnJywt7cX0JW4uDhhl62srMTNzY2mpiaee+45gRI3NDTkypUr9PT0CB6D\np6enEMr8/e9/Z/v27SJ1KDw8HCcnJ7755hsAfvSjH3Hz5k2USiXOzs4EBgaKo5yNjY2QOP+rtj82\nNpaXX34Ze3t7YbG1s7PD1dVViHqMjY1paWkhNDSUkZERsrOzxc3v4OCAn58fxcXFmJubMzAwQEtL\nC/n5+SQkJLB8+XJMTU1xdnamrq5OAGNUKpXIHli7dq1gXspkMtatW0dDQwOlpaUsLCxw+vRpzpw5\nwxtvvEFPTw96enrk5OSgUCjE0dzBwUHkRUo9n9LSUiIiIujp6UGhUBAaGsrAwAAZGRmMjY0Jn0NV\nVRU9PT2EhISwZ88edHV1mZqaEgRoQMBUs7OzKSoqIiUlBZVKRXd3t0DUeXp6UlBQIDBjBgYGLCws\nkJ+fT0BAANevX8fKykpwP+ERcq+7u5sVK1YwNDRES0sL77zzDk1NTSLnUhqLS6h8KV1renqanTt3\n8re//Y233nqLK1euiP7I7du3WbJkCRYWFqSlpTE0NMSbb75JVlYWGzZsoLm5WYT2RkdH09LSgp+f\nH/fu3cPX15ePPvqI+Ph4RkdHGRgYICYmhnPnztHW1samTZtobGzE1NSUrq4udu7cycWLF0VfQToh\nX7t2DbVajZmZGbdu3cLJyYmdO3fi4+Mj+mHSyPdxrh/ESWFubg4DAwPKy8uZn5/n8OHDpKenMzo6\nykcffQSAnp4eK1eupKmpiQ8//BCVSiUYgl1dXaJTXVhYKGLjrKyssLGxwcHBgatXr7KwsCDSihQK\nBeHh4axatYqgoCBWrFgh0oj19fXJzc1lfn6eNWvWMDw8zIEDB0Q+Q21tLdPT08THx+Pq6srCwoLI\nNPz888+BRwGhp0+f5ne/+x3Hjx+npaVFjJCuXLmCi4sL9+7dw9jYmCNHjgCQnZ2NjY2NmE2npaUx\nNjaGu7s7Tk5OtLa2ilpxcXFRhIcGBwezc+dOgoODSUtLo66ujgcPHlBTU0N5ebkAjUhY+vXr17Ny\n5UoePHgg7MJNTU2Eh4fz/vvvc+/ePZydnQkKCmJ8fByZTEZHRwc3btwQEBkp7Tk4OJjOzk7u37+P\nubk5ExMTnD9/nnv37pGWlkZtbS1mZmaUl5cTHR1NREQECoUCMzMzrKysMDMzQ6lUolAoAP6trnZz\nc8PKygoHBweqqqr47rvvREiwUqnEy8uLgIAA1q9fL9yiLS0tREVFUVZWxvLly4UhytHRUZiJJPNV\nUVERCQkJWFhYMDY2xtKlS7l79y4nT57E2dmZ0NBQwfEcHBxk7969LCws4OvrKzigzs7OWFpaYmpq\nytGjR7GxsRExh8bGxgJeu2bNGmJjY/n9739PamoqTU1NtLS0YG1tjYWFhUiVTkxMBB71QI4fP86q\nVauEelLq40hl2Pnz59m0aRNr165lYWGB2tpaNm7ciL29PXNzcyITpaSkhKtXrz728/iDWBQAkpKS\nMDc3R1NTE09PT3R0dLh9+zZ6enrs3r0bHx8f7OzsOHjwIH5+fqSmpiKXy9HR0RG7bk5ODouLi6JG\nn5+fF7ZeqdewZMkSFAoFjo6OxMbGCmx5X18f5eXleHh4MD8/j6ampogAa2pqEjN5ePTAKxQKZmdn\nBVylra2Nrq4uQkJCMDExQU9Pj6CgIFJSUoiKiuLkyZMcOHCAJUuW8MQTT+Dl5cX+/ftxcHDgxz/+\nMfB/pQ0VFhaioaFBYGAgc3NzGBkZkZeXx9DQEOfPn6egoICCggL8/f1RKBS0t7eTnZ3N8ePHsbW1\nZW5ujpmZGRobG1lYWKC0tBRHR0caGxspLi4mMDBQcAknJydpb29ncnKSiYkJnJycsLKyYmJiQqQn\nOTk5MTc3J0qhjz/+mPz8fHR0dCgrKxONQ4nl6OPjIxpkLi4uXLt2DZlMRmdnJ/Pz80LYJGHjR0ZG\nRNSfSqXizp079Pf384tf/AJ3d3d0dHTQ1tYmKioKtVqNTCbjL3/5C7q6uly4cIH79+8LXoTklJQw\n9P9K0Q4ICCArKwtdXV1++ctfipxHyURXVVWFmZkZK1euxNbWlvb2dvF/TE9PF6pCySgHj5SH2tra\nqNVqysvLmZqa4tSpUxw8eFCYn95++21cXFwEjdzQ0BB/f3/KysqIjIwUEYOASDcbGBjAysqKDz74\ngPv37wtwbXV1NZ999hm1tbUiY/K7774jNzeXvXv30traio6ODvb29iKTYvPmzaJX8TjXD2JRkHYw\na2trFhcX6e3txcjIiLm5OS5evMidO3eIiYkhIyODv/71r7z99ttClSetnvAIqjI7O8uVK1cARN6f\nFEGenp5OU1MTJSUlaGho0NHRQWlpKUFBQWJlLy8vJzExEZVKxdDQEPb29ri7uyOTycQLKzUyV61a\nRXNzM7q6uoSFhYm+RVhYGAYGBpw9e5aSkhLq6+vx9vYmICCAzMxMduzYwa1btygtLaWgoEDExj33\n3HPExcVhb29PSkoKS5cuJSgoSCxUs7OzDA0NCUCMn58fpqamNDc309LSgpubm4B7SiKkmJgYcUMu\nLi7i6+tLRUWFcNyNjo5y6NAhgoODMTAwID09XYBBGhoaRL/DxMQES0tLNDQ0WL16NW5ubtjb25OY\nmChm4ZJDr7m5mdLSUpKTk1Gr1SJEVyJAd3R0CIHP0qVLsbW15Y9//COACOXR1tZmZmaGyspKampq\nCA8PZ82aNRw9ehQnJyd6e3tJTU2lrq6OhYUFVq5cKRaoxcVF7ty5w9DQEDY2NgDcvHmTjIwMOjo6\nsLGxYXFxkevXr/PRRx8JHYehoSGOjo64urry6aefUlhYSEJCAjt37hQLolS6SiVfcXExCQkJnDx5\nkvr6elJTUwGIjo7m6NGjxMXFiQg+tVotTixjY2OCFi1pQXJzcwXOva2tjT179mBoaChiEz09Pdm9\nezcpKSmMjY1hamqKk5MT5ubmIlZw/fr1tLW18Zvf/AZ41JswNzcXJ6bHuf5bPQWZTNYCjAOLwIJa\nrQ6VyWRmwBnAGWgBtqnV6uH/7OtMTExw5swZLCwsRHKu1Ox79dVXsbKy4ssvvyQlJYWGhgZu3bpF\nf3+/wKSZmZnh7u7OpUuX+OCDD4Qu3dXVldu3bwv7b3JyMk1NTYSFhQlBkqWlJY6OjhQVFaFSqXBw\ncCA/P5/g4GCam5s5ffo0u3fvJiQkBC2tRy9XSEgIk5OTZGRkiORpBwcHwVFQKpVoaGiwZs0aHBwc\nhCJRAqBcu3ZNnGB0dXUF4v3IkSMEBwcLpaTEPlCr1YSGhjI7OyvUhRK6u7q6mqVLl4qodDc3N1Qq\nFU5OTkRERIgR67Vr16isrOSJJ54QdKfnn3+exMREMjIyREaljo4OxsbGVFVVkZmZyfXr1+nt7WVy\ncpI//OEPyGQyTp48KXo6EgJfrVYjl8v5yU9+gq6uLtnZ2SLQ5Omnn2Zubo6JiQn6+/sFlOTjjz/m\nZz/7GU5OToKW1dnZybPPPktPTw9nzpwhMTGR+/fv4+/vL/gDwcHB1NXVkZaWxo4dOwgJCWFkZETQ\nub/66iuhODUzMwMenUQ7Ozvx9fVl//79/OxnP+OZZ56hra2Ne/fuoaWlxdmzZwkMDOS1117j4sWL\n9Pf34+HhwfDwMA0NDbS3t9PU1ISWlpZA0o+PjzM9PU17ezs3btzgypUrHD16lIyMDKKioujr6+OD\nDz7A19cXuVzOkSNHSEpK4uzZs8IDIZPJWLp0Kbdu3eLNN9/km2++ESViWFgYU1NT3Lhxg+joaIKD\ng2lqasLU1JT09HSRHPbzn/+ckpIS5HI5W7du5c033yQgIICBgQG+/fZb3nrrrceeQPy/cVJIVKvV\ngf9iyfwlcEetVnsAd/756//0MjU1JTw8HE9PT1QqlTgCd3d3o1QqyczMxMnJCX19fezs7MTM+Ve/\n+hXJycmMjY0xPz8v0O9SN106/kmmmQsXLhAQEEBjYyOHDx8WAZxNTU3MzMwICXJ0dDTa2tqijHj/\n/fc5duwYZ8+eBaCsrIzAwECioqLEeEilUgm6kqWlJd7e3pibm1NRUUFYWBiRkZHippbSmyV8uNQY\n1dTUZHJyEi8vL9asWUNtbS0ZGRk4ODjg7+9PUFAQdnZ2IuuhtraW2NhYIdONi4ujtbUVQ0NDamtr\nKSoqIj09ncuXL2NkZMT69euZn5+nsbERuVzO7t27GRwcZOfOnUxPT2NoaEhgYCBVVVUEBgYyPT1N\nU1OTIGZ//PHH1NXVsWvXLtavX09UVBSDg4PU19eLrI6pqSkMDQ2Ji4vDycmJuLg4tLS0aGhowNbW\nFhcXF+bn58nMzMTKykqc4ubn54FHOoWOjg7a2tro6+sjLy9PGIDy8vLYtm0b9+7dw8fHh61btxId\nHc3s7CynT5/GyclJdNw1NDR44oknxCmysbFRMDTc3NzIyMiguLhYnDpKSkqIj48XnNCRkRGam5v5\nzW9+g5mZGX5+ftjZ2Qk1p+SDycvLIyEhgSVLlqClpUVqairHjh3DwsKCZ599lqqqKvz9/dHW1hZQ\nn9/97nci7Dg6Oho7OzsiIyNxcnISG5oUE3f27FmysrJE2TI6Oir0Gfv27aOgoEBEB8CjRVqpVGJr\na8vU1JRgQUin0ce5/r8oH1KBb/75+TfAf+nEmJ2dJTMzUxB5iouLyc3NRS6Xo1Qq+fzzz4WMWS6X\ni6BTXV1dXFxc+MlPfiIsy/n5+WLcJ+HOJAWcBMPIysrCxcWFyspKceyV8OGrVq3i3r17tLS0sGHD\nBjZs2MCvf/1rYmJiRKhofn4+N27cIDs7W+C2Dh48iL6+Pi4uLuTl5dHd3U15eTne3t58+umntLW1\n4eTkRH5+PsXFxSxbtozW1lbq6upEsIixsTHa2trU1dXh5OTE1q1b0dDQ4NSpU3z11VcMDw9TX1+P\nUqnExsaG1NRUkcPZ09PDjRs3iIiI4PLly4SHh1NaWoq+vj7+/v7CVVlVVUV/fz/GxsZiV/Tx8WFh\nYYGpqSna29txcnLi+PHjVFRUcPDgQWHUmp+fF699aWkpbW1tLFmyhDfffBNTU1OOHTuGq6sr8/Pz\nyOVy3N3d2bZtG56enhgbGwt0mL29PZs2bSIlJYXjx48L9SYgiFOdnZ10dXVha2uLs7OzAKhoaGiI\ngF6ZTMbExASlpaVoamrS09ODu7s7f/rTnwgODhYuTXhU8unr67N+/XosLS3ZuHEjbm5utLS0iFAY\niaEpkZ5u3ryJu7s7X3zxBV5eXty+fZuCggIyMzNFo/E//uM/2LRpE7Ozs5SUlNDe3s7WrVupqalh\n6dKlvPzyy0RERKClpYVKpRKY9ri4OBQKBbW1tVRXV5Obm0tkZCTnz58HYMOGDfj7+xMZGSnIz99/\n/z23bt0iPz9f0KYl9a7UW1lcXMTJyYnTp08zMDCAgYEBgYGBeHt7P/YD/N9dFNRAukwmK5LJZD/5\n5+9Zq9Xq7n9+3gNY/+/+oUwm+4lMJiuUyWSFMzMzuLu709zczMzMDM7OzixduhSVSsVzzz3HwYMH\nefLJJ/n222+Feq+2tpZjx45RXFwsjqZ1dXWiGwvQ0tKCjY0NSqWSHTt2EB8fz9///neh/FpYWBDT\ni/Xr1zM4OEhBQQHR0dHo6uqioaHBgwcPaG9vZ2JigtraWgCBdZN2B5VKhYuLC08//TTZ2dnMzc2R\nm5srxkBOTk7CdPX888+LfApdXV08PT3FAyFlPYSEhJCdnU1lZSVr1qxhz549BAQE0NvbKzBhAQEB\n5OXl8fXXX/PHP/6RgYEBJiYmaGxsxMfHR+ROxsfHMzExwZIlSzA1NWV8fJynnnqKq1evYmJiQkND\ng9BKNDY20tPTw5NPPklDQwP29vYMDw9TVlaGubm5sIjPzc0xPz/PwsICs7Oz1NXVcerUKSwtLWlt\nbeXu3bsUFBSQkpLC0NAQR44c4cKFC6jVanR1ddm3bx/PPPMMGhoaREREYG9vL3ZeDw8PwsLCMDU1\nxcHBgS+++IJ//OMffPzxxzx48IDvvvtOeBRu3rzJ3Nwc0dHRBAUFMTAwgLW1Ndu3bxeBsVLSeEpK\nipj5S36Ec+fOER0dLTIxjY2NeeGFF6ivr6empoa1a9eyYcMGYfEeGxvjF7/4BWFhYSLstre3l+Dg\nYJ544gkhkrt37x5Hjx5l3759lJeX8+mnnzI7O4uxsTF79+5l7dq1IkhYEn3V1taysLAgGqOfffYZ\n7u7uQo149uxZEXKzsLBAU1MT2traVFZWkpyczO9//3v8/f3p7+/n9OnTwugmIf+/+uqrx36o/7uL\nQoxarQ4EUoB9Mpks7l//UP0I6/S/pSqp1epDarU6VK1Wh2poaFBeXo6dnR0ODg44Ojri6emJj48P\nJSUlnDx5ktHRUdRqNe+9957AZG3YsIHe3l5xMycmJnLu3DmxO8THxwuNfkNDAz4+PiJEQzLORERE\niLqwsrISDQ0N5ufn0dLSYnR0lPj4eMLCwgRqDB5l+ikUCvLz83n48CGFhYW0trbi5uaGkZER2tra\nJCUlMTY2RlZWFiqVimeffRYtLS20tLSEzdjJyYnu7m7m5uaAR/Xp0qVLBcff19eXxsZGHj58SFtb\nGyqVCrlcLqLfpMBdbW1tHBwccHd3F7bg6elpQkNDRVJxUVERQ0NDDA0N8e6774reg7u7u+jsz87O\nUlpayp/+9CeSkpLE9ODVV1+lsrIST09PGhoasLKyYnR0FF1dXW7evCl2MynGfnh4WPwMhw8fRl9f\nn7CwMJ566inc3NwwMDDggw8+IC4uTmgMdu3aBTway9rb24sSq7GxERsbGwICArC3tyc1NRU/Pz/0\n9fXFfWJtbU1HRwcRERFoamry6quvUldXh729vcgVvXPnDjk5OSiVSuLj43n48CGpqam0tLSgr6+P\nlpYWoaGhwpotyZ6lstLBwYG1a9dy8eJFPD09heTd0tKSkydP8vrrrwu0X0ZGBh9//LEo4WJiYjh0\n6JBgNLq5uYnwXmm0rq+vz3fffUdycjLwaHK2efNmnnrqKZqbm/nVr36FtrY2np6eNDc34+npSX9/\nP6+88govvPACFRUVmJqaUltby4MHD8T91NnZycGDB0Xf6nGu/9aioFarO//5sQ+4AIQDvTKZ+2F+\nlgAAIABJREFUzAbgnx//yxA7TU1N3nnnHSH/lY70qampYrFQq9WsWLGCxcVF3NzcWLFiBdeuXUOl\nUhESEoKHhwdHjhwhOTlZCEtycnKor69HS0tLWHXd3NzQ19dndnZWZCxKYNjOzk4GBweFQaaiooK2\ntjbeffddYd0FyM3NFXjt7u5uNDQ0uHnzJoGBgZiYmLBlyxYRTS/dzJI459KlSygUCtra2igsLMTd\n3V2cKAIDA7lz546QX584cYL5+XnOnDkjci6lUVhNTQ3FxcUiFl6KrZdGbHV1dfT39xMSEkJhYSEv\nvvgi09PTgq3Y2NiIu7s7SqUSNzc3obVwd3cnMzOTjo4OceLJzMzEy8uL+/fvY2ZmJka/c3NzrFy5\nkiVLlmBsbMzCwoJIvzY3N+fWrVtERUXh6elJUlISSqUSXV1dlEol9fX1fPnllzQ1Nf3bTrZ27Vpq\namr46KOPsLe35/XXX6ehoYF169aRkJBAUlISkZGRDA0N0dfXR2lpKd9//73I06yrqyMzMxMfHx8W\nFxfFw+Dm5kZUVBTbtm3D1taWkJAQoeiUpl1SLKGjoyOJiYkMDw+LUk8qLyVZt3QCcXFxwczMjE8+\n+YRt27bR2trKK6+8woULF9i8eTMvvfSSyJdYu3YtQ0NDfP3118K8pFAoRCkn3d/wCEkvKXfn5+dR\nKpViMd20aZPQdvT09FBSUoKbm5sgRK1ZswaAn/3sZwQFBeHt7Y2GxuM/6v+PFwWZTKYvk8kMpc+B\nZKASuAzs/udf2w1c+q++lrGxMXfv3mVycpJ3331XNNz6+/vFylhWVoaJiQnR0dHo6Ojg6enJ5OQk\n5ubmZGZmUl9fz549e8jIyODmzZvAI/HS888/j7m5Ob29vXh4eKBUKtm0aRO+vr50dXUJ08zq1av5\n9a9/TVhYmFAN+vr6Mj4+TlxcHENDQ0LmXF9fj7W1Ne3t7Xh5ebF8+XKeeeYZcax9//33+cc//sGJ\nEydYvXq1iASvra3FwcFBJDktLi7S1dUl6j2lUom2tjZlZWVER0ezbNkyodUfHBzEzs6O4eFhOjo6\nCAoKIjAwkM2bNwtg5+nTpxkfH2diYoLAwEDa29vp7u4mISGBP//5z9jY2KChocFzzz1HcnIyBw8e\nREtLi7m5OT766CPa29spLy/Hx8eH+Ph4nJ2d8fPzw9raWsisLS0tRSJ0QkKC4FlIwFTJjzA3N8ed\nO3coLy+nrKxMhPuEh4fT0tJCUFAQUVFRlJSUUFJSIub0J0+eFIg8KY5+enqa27dvCxn1xx9/LIJs\nxsfHmZqaEic/HR0dMdnR1NQUo7jh4WEuXrzIH//4R4aGhqioqECpVOLq6kpJSQm+vr5UVVUJLUFB\nQQFPP/00dnZ2jIyMCAmz5EKVkHeZmZlYWFgQHh6OhYUFfn5+/O53vxOhPe+88w4AERERFBYWsn//\nfoaGhoQI7Pz587S0tNDX18eSJUvE4rhixQqampoYHh4mPz+f69evc/ToUTZv3sytW7fw8fHh9u3b\naGhoUF1dLdyukj9FyqGQID9S5OHjXP+dk4I1kC2TycqAfCBNrVbfAD4AkmQyWQOw6p+//k+vsbEx\nVq1aRW9vryARjYyMoKenh7a2Nlu3bsXS0pKDBw8il8sxNDQU3nqlUomZmRkaGhqcOHECLy8vsfNW\nVFQIS/PU1BTHjx8XX3t4eBhra2vOnj2Lm5ubmD8bGBjQ0NDAwsICnZ2dPP3005iamiKXy8WU4MUX\nX+T777/Hzc2NsrIyent7mZiYwMHBQcze+/v7SU1NZf/+/aSkpHDp0iWBJbexsRHHdwMDA9GrsLW1\nFVqN2dlZDA0Nyc/PR1NTE19fXzIyMmhoaMDV1RUNDQ1sbGxQqVT4+PjQ2NjI8uXLiYuLY2Zmhri4\nODw9PUXk+bJly3B0dBRWWunYHxwcjJeXFxs3bqSvr0+MC7/77jtsbW0FySoqKorf/OY36Onpsbi4\nyNTUFLdu3UKlUqGlpUVnZ6d4nSSIiPRw7Ny5kw8++ID6+npOnDgBgLe3N0qlkrGxMQwMDISe4Je/\n/KWQAtfW1hITE4OrqytdXV2cPXuWd999Fx8fH+rr66murhYjWy0tLXJycrCxsWH9+vVC7CMpBM3N\nzXn99dextbUlJycHPz8/6urqOH78OEVFRbS1tbFq1Sqmp6cpKChAoVDQ1dXFkSNHqKurIyIigpMn\nT5KVlYWHh4e4FwwMDPDx8eHVV1+loKCAvr4+tm3bhrm5OS+//DKVlZUkJSVhampKWVkZa9asISQk\nRBCc4+Pjsbe3F7J6KfehoqKChIQEjI2NGRoaYnFxEQMDA4qKitDR0RFWfClJzMTEhOXLl2NsbMzY\n2Bhvv/22SN7u6Oj4v4V4/x+a8/9c/3P9/+f6P4fm7OjoyPbt24X3vKioiNTUVLKzs3FwcGBhYQFr\na2uamppYtWoVt2/f5sknn6SmpkYQeoyMjAgODqalpYWIiAgiIiJ45513RKOmtraWoaEhdHR0BJ+g\nu7tblCeSDDg6OprCwkKhPKuursbW1pbOzk6io6MJCAjg+PHjLF26lMuXL4vdRVLizc3NsXnzZsrK\nylCpVPj5+dHQ0EB/f7+YJUt276tXrwoDUXx8PM8//7zIWZyfn8fW1hZDQ0PUajWnT58WmZMSg9LE\nxEQAQ86cOSO8AAUFBeTn5wtLODwynR04cICkpCShxtuyZQu3b98mNDSUgoICQQp+6qmnKC4uxsDA\ngMXFRU6cOEFYWBhWVlasXr2aY8eOsW7dOvLy8sTsXurQX7p06d8SvKWuOSBKuKamJsbHx7G2fjSY\nkmrjjRs3cvbsWZqamujv7yc4OJjCwkIAbt26JfpO0jiuuroaKysrRkZGKC8vx8nJSYTgwCORWUtL\nC7/97W+prKwU8unw8HARNLxixQra29sFO3FmZoYXXnhBMA0/+eQTduzYgaurK01NTfj6+rJy5Urq\n6up4/vnnuXv3rugrSZOs5cuXU1hYyMzMDCEhIdTX14upmdSrWVhYEHmTCoUCmUwm8ihfeeUVfvvb\n32Jubi4Eb729vURERHDz5k3s7e0ZGRlBoVAwNTUlGoyurq6oVCo0NTVpb28X07H5+XlaWlo4d+7c\nYz2PPwiZs6amJkZGRnh7e6OtrU1sbCx5eXlMTU1hZmaGoaEh7e3tREdHc+7cOezs7JDL5VRWVgKI\nLvng4CByuVwQcT08PDh8+DANDQ0YGRkJmm5paSl2dnY4OzvT0NDA4uIin332GampqTQ2NnLixAkU\nCgV1dXVkZ2fz8OFDOjo6yM/PBx6JrYaHhzExMcHCwoLs7GyMjY0xMzPD0tKS6elpGhsbWb16NZOT\nk3h4eAg0W2JiIqOjo9y7d08YrCQozLPPPouvr68I8/jyyy+5evUqarUaBwcH7t+/z3vvvce1a9fI\ny8ujtbWV4uJi8vLyxLFYmu3r6upSUFDAsWPHMDMzY3h4GJlMhoODAwYGBlhbW1NfX4+fnx8uLi40\nNDQQHBzM5s2buXDhguA99vX18fLLL2NmZoabmxtnzpzBxcWFCxcu0N/fj6urK9999x1TU1PCsyIZ\nlj744ANCQ0Px8vLiyy+/JD8/n5MnTzI0NERUVJQIcL17966QpEvagcHBQY4dO4aLiwvr1q3jmWee\noaSkhEOHDmFmZsYXX3xBXl4e6enpFBcXs2rVKpFuJWHjq6urhbnqiy++ECIlSU5cXl7Ot99+y4kT\nJ/j+++8pKCggPT2d999/n8bGRp577jlKSkqYm5sT2obOzk5eeOEFIUdWqVT4+/vj5ubG4uKiSIKK\niopiamqK+fl5YmJiqK+vR6FQiFDggIAAYasODw8XD7I0ntbS0sLExARnZ2fRI2hra8Pe3l7wIuVy\nuegJbdiwgfr6ekpLSxkfHxe5qbm5uYyPjxMfH//4z+MPAcf2t7/97ffR0dEMDQ2J6Cwpcjs3N5eA\ngABMTEzIyclBLpcTHh6OtbW16HZPTk6iUqkwMzMjLCyM48ePk56eTmBgoMC7eXp6cujQIWH8WbZs\nGVZWVlRXV6Onp4eBgQGHDh1icXGRyMhIzM3NuXDhAra2tri5uQlIbF5enmiKbd++nRs3bhAfHy++\nx/z8PBoaGoyMjKCpqUl4eDgPHz5kdHSUkZER5ubmCAoKore3F09PT5YsWcLY2Bg3b94UCcwqlQpA\nZFVISVDFxcXs2bMHNzc33NzcMDQ0xMrKitraWoKCgnB3dyc3N5e5uTlsbGzEmHHp0qVcvHiRn/70\np8KbII3gXFxcKC0tpb6+nvHxcTEitbCwEEag+fl5PDw8mJ2dRU9Pj6KiIrEQt7S0EBsbK5R1Ut6n\ns7MzNjY2gpDl6+srFsGgoCCKi4tZunSpAM6Ojo5y8eJFFAoFhoaGmJiYEBkZiVwuF/kNFhYW+Pv7\nC0Wm5Khsbm7m4MGDNDY24ujoiJmZGYODg6xYsYLS0lKKiopwdHQUC5QkwZ6dnaW9vZ3U1FTc3d2x\ntrZmZmaGP//5z8zNzQlKtSTfrqmpYWJigqSkJMrLy8nLy2P37t1kZGQgl8sZGBhAX19fINVra2sp\nLy//tyaoXC5namqKuro6urq6OH/+PH/7299ITU2lt7eXzs5OMjMzcXZ2Jjo6mqVLl3Lp0iWUSiU1\nNTUiY/JfG+cmJiZiIRocHBT3lISQk6joxcXFj4Vj+0GUD1LktnR07+rqore3F2tra0xNTWltbcXV\n1ZXQ0FCOHDkiKDP+/v6MjIxgYGBAa2sr/v7+lJSUiF1n06ZNIgdgYGCAn/70p2IcNzc3x9GjR/H2\n9hZvrIuLCzExMeTn59PS0iIoR2q1GmNjY9HAXL16NV1dXZSUlBAREUFRURFmZmYiyNXFxYXly5fT\n3d0tds3+/n5sbW0xNjamtLRUjIkk1iQ8gpzU1NSgVCoJDg7G0dGR5ORkurq6WFxcpL6+nvv376Oh\nocHy5ctpbW0VOYUymQxHR0dMTU1FxmVZWRm6uroiZqy9vR2VSoWOjg42NjbU1taSk5MjoKEDAwOM\njIwwMDAgsjOfeuopPDw88Pf3p7Ozk8bGRjZu3EhRURF1dXXs27ePb7/9Fn9/f9577z127dpFcXEx\n1tbWqFQq0ZiTQDZRUVG4uLggl8uZnJyks7OT8PBwioqKgEcNQZlMhpmZmUDmGRgYUFxcjL6+Pps3\nb8bHx4e+vj6ysrJ4/fXXqampwd/fn++//17M+999910yMzNZvnw5hw4dYvfu3dTX19PV1UVUVBSH\nDx8mISGBnJwcAd7p6enBwcGB5ORkQe8+cOAAHR0dWFlZ4eHhQUpKCs3NzYyMjACPOA0WFhY4Ozuj\nqakpFtWYmBh8fHwoKipCqVRy8eJFsfi5urqK0Jk33niDiYkJzMzM8PX1xdfXlz/84Q/09PTQ2trK\nhQsXhGBMAthKOSIPHz4Ueo3AwEBycnJwdnZGLpdTX19PUlKSgBn7+/s/9vP4g1gUpBSmO3fuCDu0\nm5sb/f39/P3vf+eNN97A3Nyc0NBQzp07J/BnJ0+eZOPGjWhpafH1119z584d/P39RbCIUqkkIyMD\nY2NjQRianZ0VKdfe3t6UlJSwa9cubt68Kbrpk5OTYqJRUlLCvn37iI6OFrp0CdOVm5srMi+Lioro\n6+sTKsHVq1czOjrKlStX6O3tZeXKlTg4OCCTyWhtbaWyshK1Wk1ERISQzC5fvpyqqirCwsKor69H\nV1eX/Px8IiMjuXv3Llu2bEFLSwtra2sBU5FOVzY2NigUCq5du4apqSmOjo6CDyntKPv37ycgIIBd\nu3YxMjLC1NQUzz33nKg55+bm+Pbbb/Hz8yM2NhZzc3PBZ9TX12dycpLY2Fhee+01/uM//oO0tDQ+\n+eQTkpOTKSsrIzg4GFdXV3bt2kVHRwd1dXWsXr0alUrFyZMnhXvU3NycpUuXMj09LezKko5DU1Pz\n3x7Q4uJiqqurefLJJ9HR0aGxsVGUBZaWlnz99df09vayatUqkpKS8PDwECNcCZcPj/QhEkatsbFR\nuGwDAgKEstXPz4/5+Xk8PT0pKirC19dXRN07ODgIXJuUrQGPnLiVlZVcv36dlJQUgdCXUsf9/f0F\nWObWrVsCLqNQKDAwMKCvr08wNb28vETY8MqVK+np6SE8PJySkhIRS2BnZyf4DxL9WwLdeHl5oaur\ni1wuZ9myZfT09KBSqYQH4nGvH0RPQVL5SbHhgYGBjI6O0tvby1//+leB11Kr1XR3d6NWq2lvbxfz\n6RMnTgj4qATBAIQxZWpqCl9fX/r6+hgcHOTChQsoFAoRaXbs2DEcHByEHl0Sq0iS4a+++oqqqirR\n4NuwYQPGxsZER0cTEhKCXC7H1NQUW1tb9u3bh7u7O/r6+lRVVeHs7ExKSgqurq4MDQ1RU1ODh4cH\nLi4u2NraYmZmJsRW0u5ZVVUluAZ+fn709fURGBiIjo6OCKdJS0tDoVCwbt06oQvIz88nLCyMzMxM\nsbPeuHGDl19+ma1bt7Jjxw48PDz46quvKC8vp7GxkU8++YS33noLQGD0r1y5wiuvvMLg4KDojQwM\nDIgUqZaWFjw9PXF0dCQ4OJi8vDx6enrIyMjA399fqCpnZmYoLS0lOzsbQ0ND6urqaGtro7W1ldbW\nVmZmZrC3t+fhw4e4ubkBj2A6TU1NdHV1CTaCnZ0ddnZ2JCYm8sILL6CpqcmGDRtoa2tjcHCQkZER\nKioqKC8vR09PT9C5Je0EPHLiGhkZceLECaytrYmKimJubo6UlBQCAwMZGBigoaEBAwMDpqenMTc3\np7GxkZiYGEEWT05OFgualZUV8CiBXDoVnD17lrm5OYFok2jciYmJItFaShHr7e3Fzc2NqakpSktL\nGR0dJTs7W5xGPT09GR8fp6Kigq6uLurr65menhZS9+HhYbq6uuju7iY7O5vq6mqys7OFPkPicWhp\naaGnpycIVI9z/SAWBR0dHbS0tPD09GR0dJS8vDxUKhU/+tGPWL58OW5ubmhoaNDT08P169e5fPky\nQ0NDREZG8vDhQwIDAwV81NraWkhbnZycRObi6OgoExMTvPfee9y/fx8HBwd0dXXFTijV+FIG4LZt\n2wgJCRE1m4ODg1i0Ll++LGpsBwcHkYSUmJgoWI+urq6sW7eO4eFh6urq6OjoENOJ8+fP09/fj6mp\nKffu3RMOQWkenZmZyczMDNnZ2VRVVdHY2IiOjo7gO9rY2DAxMUFeXh4PHjxArVbT0NBAXl4eurq6\nhIaGkpqaSnR0NGq1mmeeeYb6+npiYmJEEEpYWBh6enoig6K/v1909q2srNi1axeBgYHI5XKxK6an\np6Otrc1f//pXDh48SE1NDYaGhvT391NYWEhycrKQYEu1ra2tLXfu3GH16tVs375dhKVIjTIptERq\n3FVUVACPGo5yuZw///nP7Nmzh5qaGlFGDQ8PU1FRwY9+9CPefPNNEhISxGvy6aefoqOjw8zMDK2t\nrUKFamBgQH5+Pr6+vvj4+HD37l0iIyOZnp5meHiYyclJwbQsLi7mwIEDZGVlUVJSwv79+zl27BiD\ng4NMT09z5MgRQcuqqqoSm84777wjTo1ff/21IGOXlJQAj+Cx/yo7v337tlisrl+/TlhYmHgdysrK\nMDAwQFNTU5R+VVVV2NraYmlpKVicUiPR0dERBwcHxsfHycvLo7y8nEOHDjEyMoKrq6twiz7O9YMo\nH5YsWSLeoKVLl+Ln58fhw4eRy+U0NzeLgBRTU1PRiHzw4AGvv/66iDqXRpFlZWVs2bKFAwcO0NLS\nwgsvvEBrayvnz58nJyeH1atX4+DggI6ODkVFRbi6uqKtrc369eu5cuUKnZ2d6OjoiCh0bW1tISqS\nJLNPPfUUlZWVIvvS1NRUKAv7+/vR19enu7ubyclJGhsbiYiIYGpqiomJCVxdXcnNzcXKykqkAEn1\nqfTGBQcHY2FhwY9//GNGRkY4ffo06enpeHl5kZKSQmlpKZGRkSIXYWpqir179+Ll5cXnn3+Ora2t\nmLj8K3q+pKRE8A8UCgVlZWUUFhbyzjvv8NZbb2FgYICfnx+//OUv6ezspKWlhfv374txb2xsLGlp\nadjY2KCjoyN6GJcuXeLHP/4xjo6O/4u994yO+jrbvX+jmZE0o957Rb03JBASIFQAUcQD2NhgMLZx\njRPbyUpsJ06C48QlcU2cADYONjbNNr0IgYQKqCLUe0F9NKiNRr3rfCCzn+R91zrhlHVen/U++wti\n0JK0xPz33vd9X9fv4vTp01y5cgVPT0+Sk5M5ePAg4eHhrFy5kurqalxdXZHJZMJvMj4+Tnp6Oi+9\n9BJwH17r6uqKlZWVmIrogLW6wNXg4GBhSzczMyM6OprKykohPvvb3/6Gnp4eExMTQsmnr6+PsbEx\nycnJgpB95MgRenp62LBhA88884z4vQYHB7Nq1Sry8/NJTU0VRiepVCqUmrt27aK0tJRly5ZRUlLC\ns88+S2VlJefPn+f48eNkZ2czMjKCjY0N/v7+SCQSZDIZiYmJpKen4+/vj5WVlZiA/elPf2JyclJs\nijrZtpGREYmJiSwsLGBtbU1HRwe9vb3ExcXR1taGkZER7e3t5Ofn8+KLL9Lc3IyTk5NI7Dpz5gyR\nkZH/57wP/7uWroOsw5ZrNBq2b99OVlaWyPzTPTzV1dWUlpYyODhIV1cXUqmUnp4ecnJy6OjoECYg\nuO+M00lAdZMJjUaDXC7n8uXLBAQEiPBOnRrRxsaG9vZ2zpw5g1wux9nZmevXr9Pd3S3GWz09PczN\nzdHX18fCwgJqtRo7Ozvu3LkjJNfj4+MolUq2b9+OkZGRSJ22sbEhPDwcqVRKcXExjY2N4gZiZWWF\nnZ0dHh4eIsNRpzCUSCSYmJgIQlR/fz9DQ0Oo1WpB8DEyMhKkpYGBAUGPMjAwQKFQCOCIbkPS5R3o\nnKYlJSUMDw9TWFjInTt3GB8f57XXXqO/vx9jY2NMTU2FmlCXljQ7O8tnn33G8ePHRb0+ODhIQEAA\nd+/eZW5uDj8/P6qrq/H19aWrq4vMzEzu3r0rJgRarVZgyHp7e+nt7SU3N5fW1lbhvNRqtbi6uvLs\ns8/i5uaGiYkJhoaGXLt2DZVKhZeXF2ZmZtjZ2Qlti7W1tajRZ2ZmyM3NRSqVCrt5YGAgbW1tjI2N\ncffuXaFn+eyzz6iurubq1askJiayYcMG/va3vyGVSrGxseEXv/iFuN3pXK7FxcX09fXx8ssvU19f\nj1ar5Q9/+AMqlYrbt28Ln0tZWRl+fn6MjY2J21RsbCwNDQ0cPXpUmJn+mYUwNDTE6tWrcXZ2prOz\nEz09PRYXF3FwcKC+vp7MzEyhaUlISMDX1xczMzO0Wq3IMdVlaj7I+kFsCrrkpIiICI4fP46RkRFj\nY2NYWFiIOkl3i9BhxVxdXTE2NqawsJCysjKefvppent7xUQA7ge0dHV1kZycLAQvvr6+IiD13r17\nYpxTW1vLunXrBL1Xlxlx7949UlNT+fbbb8Uo8u7du/T19WFkZMTExATr16+ntbVVYLi9vb3x8fHB\n0tISpVIpUOlr165ldnYWa2trnJ2d2bBhA56enqLJNjU1RVBQkMB1NzU10dTURGhoqMC3y+VyNmzY\ngLm5OXNzc9TX15OTkyPswlqtVvgMurq6WL9+vRiVFhYWIpPJuH79Ordu3aKiogJDQ0MUCgWhoaG8\n9957REdHY2BgQG5urrD1Dg8Pk56ezvXr10W4iLm5OVevXqWiooJPPvkEW1tbYVr7+OOP8fDw4Be/\n+AVlZWWsXLmS8PBw7ty5w4kTJ8RE5ODBg3R1dYmaHO5H4q1YsYKpqSlBt56bmxMhu729vQLnVlhY\nyMTEBC0tLSLo1sPDA29vb6ampgQNHO4LmVatWkVJSYmQBuv6O+3t7YI90dzczOXLl6murhaRAO3t\n7QwPDzM/P09fXx/9/f1CPp2dnU1UVBRKpRKtVsvIyAhzc3PI5XKeeOIJZDIZ5eXlhIWFCUanlZUV\nk5OTDAwMsLi4yL179/jwww9FAxLuw1J042RLS0uGhoaorKwUoS+6w3Lt2rX8/Oc/Z+fOnSI9Tfd5\nxsbGjIyM4OfnJw6EB1k/iPJB1yVeWFggJCSEwsJC0fSLj4/n8OHDREZGotFo8PLyYu/evaSnp1NS\nUoJKpaK7uxutVsuTTz5JYWGheCPo3JCjo6NotVrq6upISkoiJyeHY8eO8eSTT7Jt2zbeffddNBoN\nXV1deHh4iDdTf38/Tk5O3L59m8cee0ycZjExMdTU1ODk5CRgGkqlkitXrvDGG28wMjIijDo601VK\nSoqwP+/YsYPa2lp6enro6ekRhigbGxsMDQ2ZnZ3F2dkZOzs7MjMzSUtL4+LFi9y9e5fTp08zNDQk\nLOaxsbH85S9/ob29naeffhozMzMGBgYwNDRkw4YNYsw5ODhIeXk5Dg4O/O1vf6Orq4uSkhJSUlKw\ntrbm3r17fPrpp8hkMt544w2mp6dpb29n3bp17N27l+npaX7zm98I52dNTQ2JiYl88sknPPLII1RW\nVtLV1UVKSgq2trZMT0/z5ptvipGum5sbZmZmrFmzht27d1NZWUlMTAwmJibMzs6K362fnx/nz5/n\nlVdeYXx8nKamJiHceeyxx5iYmCA5OVmYsWxtbenp6eGDDz5ALpcTHR0tyFO6YFe4/5CNjo6yc+dO\nbt68ye9+9zteeOEFkQ6uiwYICAigp6eHNWvWCODv9PS00GHk5OTg5eUlQma2bdtGdXU1JiYmrF+/\nnsXFRWEdNzQ0RK1WExERgVqtxtnZmdHRUW7evMnMzAwjIyMiAOatt96itrZWPBPj4+OYmZmRl5dH\nVlYWPT096Onp4erqSn5+PlVVVSgUCnx9ffHx8RFR8++//z5RUVHMzMwwNDSEt7c3EolEqDwfZP0g\nNgWtVktTUxNqtRqAP/zhD8jlciIjI4VISSqVcunSJaKjo8Vp6+bmhkqlwtvbGysrK86vmDX8AAAg\nAElEQVScOUNERISon/r6+khKSiI/P5+ZmRl8fHyYn5+nuroaZ2dnkpOTcXNzY2JiAo1Gg4ODA8nJ\nydTW1grDjg5q0dHRwdDQEABXrlxhZmYGmUzG3bt36enpYWpqCktLS0H8mZycFNMSHx8fjh49Smho\nKLW1tQL8Mjo6Snh4uFBKDg4OCoL0smXLGBsbw8HBQfRMlEoli4uLTExM0NzcjJeXF2q1ml27donA\n22vXriGVSsUUoqWlhdWrV1NdXU14eDjd3d18+OGHuLm5CcHO4OAgarUaPT09YaiRy+UEBweTk5ND\nSkoK5ubmzM/PU1paytGjR1mzZg3j4+M8/fTTaLVaNmzYQEhIiLB2a7VaLC0tyc7OxsfHB4VCgbe3\nN3Nzc7z33nv4+fkJE5REIhFUKx2KLysri4CAANzd3SkqKsLOzo7i4mL27dtHQEAAMpkMY2Nj2tvb\nCQsLE0Ex/f39+Pn5Cdm6LnmqtraWyMhI8vLyBAM0KCgILy8vvL29cXV1Ra1W88wzzxAbG4tSqcTc\n3FyI4OLj4zl16hRPPvkk5ubmAmhbWlqKVqslNjZWnN66dOqJiQn8/PwIDw9HpVLh4uJCVFQUp06d\nYnBwEBcXF6qqqoiPjycnJwdHR0c0mvs4U53btLu7m/j4eMbHx3FwcKCkpAQbGxtxYLm6uor+WFpa\nGmVlZXR1ddHc3ExSUhIxMTHcunVLIAofZP0gygdDQ0MiIyMJDQ1lcnKSNWvW4Obmhkwmo6+vTzwY\nulxJXUy4DkaiSz82NTWltLSUb775BoCUlBSam5tRqVSEhYUxNzdHd3c3PT09Ih6uvLycbdu28fDD\nD+Pi4kJISIhwuWk0Gr7++mv6+/sZGRkReRIhISHY2dmhUqlIS0ujsLAQtVot0pVNTU25evWqSG0a\nHR3FysqK5uZmRkZGaG9vF7ci3QYE9xHv+vr64o1XUFBAcXExMzMzjI+PU1lZKerFxx57TNTDoaGh\nmJiYIJfL8fT0FFOKsbExdu/eTV5eHsuXLxd5BLm5uSKyLjMzE319fWGXTktLIycnhzt37pCUlIRS\nqcTJyYm0tDSSkpIYGRnh7bffxtDQkLCwMDIyMigvL8fOzo6rV6/S0tLC3bt3RUBKamqqiKbr7u4m\nMjIShUIBQGNjo4g/0yVqDw8Po6+vT1NTEzY2NiQmJoqchT179oim7cTEhIhEMzQ0xNbWlra2NmZm\nZrhx44YoKz7//HPg/rhXd/g0NDQIIEtcXBzm5uZCHuzr64uFhQUKhYLa2lqReCWRSLh+/TqvvPIK\nx48fF9f84eFhDA0NaW5uxsjIiGPHjvGrX/2K6upqZDIZZWVlGBoaCq3B1NQUnp6etLS04OPjIxy/\nRUVF4kYH9zM2JycnsbS0ZHx8HCcnJ+7evStSt3X80Fu3bnHjxg3Wr18PwI9//GN2796NUqnEyspK\nTO50//4g6wdxU9DT08PQ0JCSkhLc3d3p6OjgypUrQjuuq9G3bNkiurqrVq3i9u3bDAwMEBgYyMDA\nAKGhoVhbWwvq8pdffsnWrVsJCQkRSchZWVm89dZbTExMoFAoOH78uLCYdnV1ic1DZ+nV7eINDQ0c\nOnSITZs20djYyJIlSzA3NxfNMh3XYXR0lMzMTPLz80VAiY6X2NXVJfBl9vb2HDlyhJSUFAGEnZub\nIycnRxho4uPjWbp0KSdOnGBubk6EhOqoSFevXqW4uBh9fX2hEVi6dCkRERGC1ffqq6/y2muvMTAw\nQH19PcuXL+e5555DT0+PtrY21Go1AwMDgjKVlZVFS0sLISEh5OXl8fbbbwtcfFFREXv27KG6ulpc\n+QcGBggODhbA21deeYWuri58fHxITU0lMjJSeEKkUimZmZm88847onTYtWsX8/Pzgg7t5OSESqVi\namqKhYUFKioq6OnpITg4mOTkZNLT03n11VfZuXOnCCV2d3fnzp07+Pr60traSnd3NzKZjOjoaMLD\nw3njjTewsLAQ5G8zMzNcXFz47rvveOutt9BqtURHR9Pb2ysCdtzc3JicnCQxMRELCwvefvtt4uLi\n8Pb2Znx8XDTujI2NBWPixIkThISEIJPJ8Pb25uzZs6xbt46bN28ikUjEuFEXVbdz507CwsLE6FeH\n4gNIS0tjaGhITF8efvhhdu/eTUlJCbm5uURHR1NdXc38/DxpaWl88sknxMbGUlBQgEwmw9fXl/j4\neOrq6gSx6YGfx/89j/X/2jIwMKCrqwuZTEZpaSm2traMj4+L5GRd8pBu5/by8hK5jWvWrBHoch3l\n+ZFHHgEQLIATJ05w4cIF+vv72bx5M319fRgYGFBQUEBTUxNdXV2sWbMGJycn1Go1Hh4e1NfXExcX\nx5dffomvry9xcXFCebhq1SqBEDcxMRHkJbgfI1ZaWir0D7qg3NHRUSIjI0W8m1wuZ8uWLQwNDQne\nX1tbG/v27SMsLIze3l7kcjlqtRpfX1+SkpL+RRCju1Y++eSTgpeom0ToNtn8/Hxee+018vLymJmZ\nISIigsjISNatW4eVlRWdnZ3ExsaKgNb8/Hzi4+NZtmwZDg4O+Pj48LOf/Yzf//73qNVqXnjhBRwc\nHMQpGBsbS0hICCEhIQQGBnL79m2Ghobw9/cXCVMFBQV89913QqSl4xDeunULW1tb6uvr6e3tFRMY\nZ2dnJBKJoDy3t7eTk5ODu7s7Li4uApemUCiYnp7G19eXzs5O0WNJSkrCy8uLdevWIZVKRVaHiYkJ\nxsbGKJVKLCwsKCws5KGHHkKr1bJ9+3Yeeugh/Pz8cHJyoqqqivz8fIqKirCysmJqaorU1FT2799P\neHg4kZGR9Pf3A4h4uaKiItavX09mZibe3t4UFhayefNmfHx88Pb2Zvny5Wg0GoFxS0pKEj4VXQyB\ni4sLFy5cAODy5cv09fVRUlIiRq1nz54lPT2dsbEx7OzscHZ2Zs+ePXh6epKYmMj09LQwhqWkpNDU\n1CT8JTrtzoOsH8SmoNVqycjIwM3NTeC1N27cKIQxJiYmZGdnc/XqVWJjY0lOTiY4OJhLly6hVCpx\nd3ensrKSkZERwQiE++IlCwsLwsPDCQwMZHBwECMjI/r6+hgdHWV8fBwLCwveeOMNzM3N2bZtG1Kp\nFJVKxfDwMJWVlYJiPDk5KZKG7927J07LiooKEWKra5CuXbuW559/ntDQUGFUqq+vFwQdU1NThoeH\nBW1HV5/K5XLGx8epqKjAw8MDExMTTExMmJ+fp7y8nJMnTwpHoq6kkUql7Nu3jzfffFP0VqKjoxkY\nGEAul6Onp0dCQgK1tbWsXr2a69evk5mZibW1tTihDxw4QHh4OHFxcXR0dNDQ0IC+vj5lZWXiOrtx\n40aGhob45ptvRA1fV1fH1NQUWq2WQ4cOERcXx/T0NGNjYxQUFKBQKIiPjxed9OrqapHboBP6rFu3\njq1bt4q+yoEDB8Qh0NLSQkNDg9gcu7q6uHz5MikpKfj4+IjEqfXr17NixQrs7e3/he2ow5LBf/Yt\n3Nzc0NfXZ2FhgWXLlrF161Zu3bqFtbU1VlZWzM3NiVDj2NhYSkpKyM/Px9jYmOHhYWZnZ0lISBAN\nzNHRUc6fP8/ExAQFBQXExcVhYGBATEwM5ubmtLW1IZFImJ6eJjo6mrGxMZHVEBISgr6+PtPT0/T2\n9qLRaAS49cKFC+jr6wv2ZXp6OhqNhrm5OZFk7u3tTX19PVNTUwwPDxMbG4uenh4KhQInJyeWLl2K\nnp4eWq1WvHcfZP0gygdjY2O2bt3K7Owsp0+fZmJigjt37qCnp8ePfvQjZmZmhLzUy8uLzs5OoUr0\n8PAQsV0DAwP4+/uLLq6u+29vb09ZWZkIJ9VpB06dOsWyZcu4dOkSMpmMmJgYPD09SU9PF5ZUf39/\nqqqqqKmpEVH0pqamxMfH09zcjK+vL7m5uSQmJlJXV8f3338vGnh1dXXMz89jZ2dHX1+f0Afo6E7h\n4eFoNBpRY8/OzopTYGRkBFNTU5YvX05JSQlzc3N4e3uTnJzMwsIC2dnZ/O53v6O3t5f+/n7xteRy\nOe3t7UgkEoyNjblx44YIHl2yZAlqtZqYmBgRH//uu+8yMTFBbGwsBw4cQCqV8qMf/YiKigqCgoLQ\naDTU1tYyPDzMsmXLMDMzw8rKCj8/PyoqKvD09MTT0xMXFxfee+89HBwcBIuwvr4emUxGTk4OTzzx\nBM7OzsB9PcaGDRsYHx8nNzcXLy8vcYIaGBgItuKNGzd45plnaGlp4ejRo9ja2oqEZ50RycLCguLi\nYoKCgliyZAkqlYqRkRHWrVvHRx99JHiXusCWnp4ennrqKUZGRjh06BBLly6lsLCQ48ePi4lBX18f\nBQUFWFlZceXKFfbu3UteXh6GhoZ8+eWXzM/Pi3RoHx8f+vr6SE5Oprm5GXt7e4yMjMjPz+ftt9/m\nyJEjtLe3Y2BgwIEDB1AqlaSmpmJiYsKxY8dISUmhp6cHS0tLJBIJTU1NADz00EOCPZGWlsaf//xn\nIiIisLS0ZGJiAnNzc4qLi5mdnSU/Px9HR0fm5uYEhau0tJTR0VEsLS0JCAj4/zz34X94TU1N0d/f\nj6WlpcgurKqqwszMTMTI6TIbdPPi6upq0Sx0cHBgYmICX19fIS0FUKvV2NraYmdnR3h4OHV1dZw+\nfZpr165hYmLCU089hZeXF6mpqezduxdHR0e++eYbkb/Q19fHzMwM9+7dIy4uTow6dTASQ0NDqqur\nSUxMpLCwkPXr1+Pn5yemHCUlJYLDOD09jUqlwsDAgPLycjw9Pbl9+zZSqRQDAwPgfsdZX1+f1atX\nk5OTg1QqZXJyUoSLymQyHB0dBeBjcnKSvr4+CgsLBdlZF5pjZGREbGwsmzZtori4GA8PD4aHh4mJ\nicHFxYXz58/z2GOPsWzZMsLCwkRdXl9fz/z8PE5OTnh7eyOVSgkICMDe3p7W1lbh4qyvrxfBO1Kp\nlGvXrvHYY4+RkJBAW1sbQ0ND2NjYcOPGDezt7bl27Rrm5ub09vayatUqLl++zLfffsv4+Djt7e2i\nhNIRuH19fXFycqKhoYH6+nrR+9AZg3x9ffHw8GDLli2sWrWK8PBwcRjoErCDgoLEZmNhYYFMJsPA\nwECwL0JCQoiMjMTX1xcjIyMRPlxfX8/GjRsZGxsjLS1NYOx0ExKFQsG+ffuA+xucq6srJ0+eJCQk\nhOzsbGpqaqisrOS5554TExRXV1fi4uLYs2cPs7OzXLp0SWgvioqKhP5Bd7sxNzensrISExMTqqur\n2bZtG9u3b2fDhg1MTU2hr6+PlZUVAQEBbN68GVNTU5qamvjss89YXFykvb2dqakpFhcXuX37toC6\nPsj6QWwK09PTQmvg4eGBr6+vaEwVFhZia2vLhg0bSE5OxsfHBwMDA1xdXQVTwMrKSjgh6+rqeP75\n5wHEvH5qaore3l5kMhnnz5/nwIEDvPTSS2RlZVFTU0NDQwNfffUV27dvF28undhpdnaWJ598kv7+\nfmGuuXXrltAcDA8P09/fT1RUFO7u7sTGxuLo6Ciu8boOsKenp+hJLC4uis85f/4/ubY6s1drayu+\nvr6oVCpBXtJ1rgcHBwUF6OzZs3R1dbF8+XLa29tRq9XMz8+Lcd3Vq1fJzs7Gw8ND+DKsrKzo6Ogg\nOTmZgIAAPvnkE9LT0/nqq68YHByks7OTkJAQoqOjyczM5Ne//jUjIyOiJ6AbEctkMuLj49m6dSsj\nIyPs3LmThIQEcW3WaDTcuXMHY2NjLCwshELT2tqaqakpRkdHhQW8q6tL0IYrKyuFWcrHx4ehoSFe\nffVVIiIimJycpLS0lMrKSnJycggMDBSaDaVSybJlyzh58qRoWtbV1Ynr+MjICNHR0cTHx+Pq6srq\n1asxMDDg9OnTREVFsbCwQENDA2NjY8THx2Ntbc3y5cvZvHkzs7Oz2NjY0NLSIkJpdeWO7vbx29/+\nFpVKxdatW0Vv6D/+4z/o7e3l5s2bfPDBB6IvMjs7i1wuZ+vWrfT09PDyyy+zbNkyNmzYIEx3Tk5O\ntLW10dTURGpqKqmpqTg6OmJvb49cLuerr74SqtXa2lrMzMyYmJgQZWd8fDxeXl4EBAQwMjIimu8P\nsn4w5YNcLufGjRssWbKE7u5ugV1zdHRkdHSUwcFB4uPjhbR448aNlJeXCyS2VCpFo9EQEBDA4cOH\nAUhISBDagtTUVPT09HBychLMAZVKJXIPNRoNe/bsobS0lLa2NhHymp6eLkJqdJHuOtnxqlWrOH/+\nPP7+/pSVlQkCk67UqKqqYs+ePQKcOj4+zvDwMAsLC/z973/n5ZdfJicnB19fX+D+6ZCfn4+enh7b\nt2+noqKCEydO0NPTg1KpZGFhgfz8fBFWGhAQwNtvv01YWJg4Ndva2hgfHxdNNnNzc5GL6ejoiIOD\ngzihTU1NhaO0uLiYzZs3C1RZd3c3ISEhopRSqVQCAnv27Fni4+NJS0vj5MmTbNy4kdbWVt566y2x\ncRsZGVFWVsa6desIDQ2lvr4epVLJ2NgYzc3NuLi4YGhoSE5ODkqlks2bNwPg7++Pt7c3CwsLeHp6\nIpPJuHbtGoODg6xfv57Kykra29tZsWIFpqamZGVl4ePjQ1ZWligvZDIZg4ODQhUJ9/tLBgYGFBcX\nc/z4cXJycggNDRU0KXt7ewICAlAqlaLsWb9+PXfu3KG5uZnZ2VkaGhp49NFHycnJEZi4oaEhJBIJ\nH330EUqlkqysLH70ox+xZMkSQX8aHR0lMDCQ8vJyKisrmZ2dZdmyZTQ1NdHd3c2ePXs4fvw4/f39\nrF27lj/+8Y/k5ORgYWGBUqkkLy+PvXv3YmhoSEdHB1evXiU9PZ3XX39dRBjofhZ/f396enoYGBgQ\nobNBQUHCGPYg6wdBXjp06ND+TZs2sXr1amxsbMR1+Z+baQqFgs7OTi5cuEBUVBR1dXWMjIzg7e3N\n6Ogod+/eJSAgACMjI8rLyyktLeXVV1/l5s2bQujU2NjIT3/6U2xsbBgeHsbU1FTo+fX09JiamhIz\n+by8PIG+yszMxMPDg8XFRUpKStiyZQtarZbW1lbc3d2pra1Fo9EQEhIi0qJ0wpzp6WnkcjkymYyx\nsTFUKpVwAFpZWWFgYMCdO3coKChgxYoVYupSWFhIaGiosIdLJBIhAJJKpULmrZtUFBUV4e3tjZmZ\n2b/wJDo7O4Xd2szMjMbGRh566CHCwsIYGhpCT0+PiIgIhoaGRLCMLu3Zx8eHI0eOMDo6KpyHERER\nYpIxPj5OTEwMcrmcoaEhrK2t+fjjj5HJZOzevRs3Nzdu374tTr2CggL27t1LfX09s7OzIslaN+I7\nfvw4cXFxyGQyXF1dqampwdbWFlNTU5ydnTE1NaWtrY2EhAQsLCzw9fWlr68PlUpFREQE9+7do76+\nHiMjIyoqKsSUQIdj17E0AgICCA4OFlFzutO7v79f3Ara2tpoa2tjzZo1nD17lqCgIJYtW0ZLSwu5\nubkEBgZSVFTEj3/8YwIDA4mOjkatVtPT00NAQADXrl1DX18fNzc35HK5SLS2tbVlYWFBuBwNDQ1Z\nXFxkYWEBhUJBc3Mz2dnZPProoyiVSszMzBgdHSU/P1/0qIyMjHjppZeIiori6tWrgvbV3NxMXFwc\nc3NzaDQaGhoaGBwcxMPDA5lMRnZ29gORl/6L5vxf67/W/3/W/z00Z3d3d1588UUiIyPJz89n48aN\nnD9/HhMTE1paWvD39xcR7brT+9ixYwQFBVFcXMy6detwcnLC0tJSXJs2bdrE+++/T0dHB88//zwH\nDx4UzjMDAwOha+/v78fZ2VnAU3Xlyo4dO/jjH/8odPpZWVns3r2bp59+mu+//16QpJ2dndHT0yM0\nNJSKigoRzhEdHU1RURFBQUEEBASQn59PR0cH+/bto7Ozk9bWVnbs2MGxY8cIDQ1l+/btvPXWW4yP\nj+Pm5kZqaioGBgYcPnwYfX19QkJCuHz5MgsLC6SmplJcXCykyrm5uSQnJwsWxaZNm7h9+zZarZa8\nvDx27dqFqakpRUVFTE9PC9xbTEyMiLW3sbFhaGiIdevWMTIywvXr15HL5RQWFpKUlISJiQlTU1NM\nT0/T0NBASkoKfn5+lJaWihDY1tZWbt++TXx8vFBxBgYGcu/ePfLy8jA2NuaRRx7hxIkT4lak+3/Q\narVERUWxf/9+vL29KSoqwsTEBG9vb9zc3IQy0cHBQYjBDA0NxSy+o6MDc3NzDA0NMTIywt/fn2++\n+YagoCD279/P888/z8aNG1EqlUxOTgoh0ZkzZ7CysmLVqlV0d3fj7OxMY2MjJSUlvPLKK5w5c4ag\noCAUCgVdXV2C8N3R0cGf//xnXn31VTIzM9m/f7/Im9RqtWIipusP6SYkDg4OHDlyhPr6ep5++mkM\nDQ2xtLTExcWFI0eOkJCQwC9/+UsuXryItbU1169fF5H2zs7OQp1qYWFBWVkZFhYWODs7c/bsWWE7\nn52dFfmf4eHhgoL+oCGzP4jy4cMPP9z/+eefCyz18PAwi4uLKJVKampq2LRpkwhO0Y0kPT09iY+P\nx8XFRWDJdA2q7OxsSkpKRMPy7t27YnQkkUjEn+Xl5RgYGCCXyxkeHiYtLQ2pVIqJiQm9vb2Ym5vT\n3t6OtbU1SUlJaLVarly5wrJly9BoNAJSotVq6e7uxtvbm3Xr1tHa2kpxcbFo4ukaVrr8QN3Dl5GR\nQUdHB/r6+mRkZODt7Y2TkxOzs7PY2dmRnp6Ok5MTYWFhjI6OolarsbKyEli0xsZG1Gq1sGlfv36d\nFStWcO7cOZ599lnUajX+/v68//77Ank2NDQk4sn6+vqwsrIS/QcPDw+6urro6OgQPZzR0VFGRkYY\nGxtjenqa6elpZDIZQUFBjIyMEBAQQGlpKTExMZiamuLr64ubm5vQMWRlZQlSt1Kp5NSpU0xNTVFV\nVcXk5CRzc3Pk5eXR2tpKVlYWfn5+Ihg1Li6OP/zhD4yMjODq6opEIuHhhx/m4MGDNDQ0YGJiQlVV\nFStWrMDS0pKmpiYhFBsbG2Nubg5jY2Nyc3PFhvDJJ5+wbds2YanWbU6634u3t7cYKc7NzWFra0tn\nZycqlQojIyOGh4exsbGhrKxMMDVWr17N2NgYcrkciURCXV0d/v7+yOVyVCoV165dE0IrXaN4bGyM\nwMBA5HI5VlZWnDt3Djc3NyF5trGxwc7ODn19fc6cOSNySTdv3izKTV1AsY4hqosnvH37NpGRkahU\nKqKjo7G3t+fChQsPXD78IKYPcF/Np1QqKS0txc7OjtHRUVpbW7GzsxN4qc7OTurq6uju7sbBwYHy\n8nJh1NFoNMzPz7N+/XoRVrpt2zZSUlIwMDCgoqKCkJAQZmdn8fDwEIwAiURCTEwMGzZsIDc3F5VK\nJb6/iYkJBgYGzM7OMj09LTz0gIjj0nHwzMzMqK+v57XXXsPc3Jwf//jHvP322zz11FNIpVJ+9atf\nkZaWhpGREQqFQpCjvby8GBgYAO4buK5fv87Ro0fRarVCaKXVamlsbKS9vR2pVEp/f7+YSMhkMjF9\n0E0lTE1NefbZZ4H7EXdJSUnk5uaKCD2NRoOhoSEbN25kcnKSiIgIgoOD2bRpEwEBAeKBGhwcFBtl\nZWUl09PTGBgYsH37djQaDZ2dnSwuLopYubKyMgoLCyksLOSbb77B1taWZcuWMTU1ha2tLY6Ojvzk\nJz9hyZIlpKWloaenJ1yjOsGZg4MD3d3deHp6cujQId577z0MDQ0Fvv+LL74QzsotW7bws5/9jMTE\nRExNTdm3bx+mpqYAogekw6Y5OzuLWvyrr75CrVbj5+fHyZMngfvW6n/G+MfGxhIeHk5ycjJSqVTA\neKenpxkYGGDFihUAYppz8OBBpFIp5eXlbN26VUxOZmZmROx9f3+/yH0IDAwUxr3W1laWLl1KXFwc\ny5YtA+7rFKqqqqivr8fZ2Zn4+HjB6HRzc2Pr1q2CczkwMCCgOOvXrycxMRE/Pz9WrlxJbW0tlZWV\nrF279oGfxR/ETeHTTz/dD/cRXE5OTuLNsrCwwIEDBzh58iQnTpwgKSkJjUYj+I0NDQ1YW1uzbds2\nPD09Wb58Od3d3QKZlZCQwOXLl7G1tRUiHx1b8d69e4Li9N1337FmzRpBWl5cXGTbtm0oFAoGBgZI\nS0vj448/xtHRkczMTBISEujt7WXjxo2kp6cLwnJdXZ1Qm+kk03FxcSKERDft6Ovro7Ozk+npaeLj\n4ykoKKCyspJ33nmH4eFhgThXKBQ4OjqSmpqKkZER0dHRDA4OolKpuHPnDqWlpRgbG7NkyRKmpqZE\nUKqjoyNr166lqamJmZkZFhYWCAwM5PLly0ilUiwsLLC3t+f8+fMMDQ3R2dlJdXU1X3/9NeXl5QQH\nB+Ps7MyVK1fIyMjgN7/5DWFhYYyMjGBvb09mZiYmJib09fURHh5OZ2cn8/PzjI+PiwwDR0dHurq6\nGBgYELRuHXbt4MGDXL58GTs7O6HJ9/X15fDhw3h7e9PZ2cnQ0BBeXl4iVKa1tZW1a9diaWmJs7Oz\n2NAmJiZwcXHBysqK48eP4+LiQlJSEn/961+xs7MTN6iXXnqJoqIigaX38vLi+PHj4mSem5ujrKwM\nPT09UULa2trS3d0tnLLt7e1oNBpkMhkKhYILFy7wxBNPMDU1RUxMjGBQjoyMcOfOHaRSKdu3b6em\npoahoSEuXrxIQ0MDrq6u2Nvb09vbi1KpxNvbW3AqdMAZExMTFhYWcHZ2ZtWqVRQXF2NsbMylS5co\nLS0lNjaW/Px8rK2tGRsbo6KiAisrK6G8tLa25vz580I/c+/ePb799tv/e24KU1NTODo6cuzYMTw9\nPfH29kapVBITE8Mrr7zCzp07+eijj4RQ4+GHH8bX11dkGFy5coXJyUlOnjwp8OBwf0791FNP0djY\nyIcffkhwcDBmZmacP38epVJJX18fNTU1jI6O0tzczMzMDEuXLsXExIQTJ07Q1yfqjLQAACAASURB\nVNcn+Io//elPhWfdzs6OrVu3cv36dWHMqa+vx8XFhbi4OBYWFhgZGcHa2ppz585x5coVvLy82LVr\nF+Hh4TQ0NODj40NgYCBff/01SUlJwH/Gj2/bto3BwUHs7Ow4ffo0GRkZZGdnMz4+zqpVq7C0tCQj\nI4ORkRE++OADSkpKOH36NBYWFjQ2NjIwMEBZWRlarRYLCwvc3d1paWlhdHSUuro6zpw5w/79+7l0\n6RLvv/++UHfqJh/29vbY2Njg6uoq8ggiIiIoLy9HoVCgUChYWFhg3759gnw0Pj7OunXrsLW1JSEh\ngVWrVrFp0yYxnjQ0NESpVDIwMCCmF2q1WozNdFmHXl5ePPnkk9y7dw8LCwtycnL+xRXp5OQkyrdz\n585x6dIljhw5wrlz50hMTCQqKopbt26JAFYdH7G0tJSWlhaR8tTZ2Ul/fz/z8/Ns3LhR+Dk8PDxQ\nKBRYWVkJzUN7ezudnZ3U1tYKOI9ulFtSUoKenp54b+hUkQ4ODmRnZ1NWVsbc3Bz9/f088sgjPPHE\nE0IQt3btWtra2rhw4QIymYyGhgZhtFq3bh1eXl7Mzs7S1tbGc889R09PD8nJyaSlpaGvry8w/4aG\nhoIj0djYiIGBAd999x0JCQkYGhpy+/Zt8fM9yPpBbApSqZTc3Fy2bNlCcXExPT09YjwzMDAgjCVm\nZmZ8/PHHlJWVcenSJRISEti8eTPXrl3jxIkTODo60tfXJySdCQkJ5OXlER4ezrVr13B2dqa6ulqY\nqXQUo6mpKSHa6e3tZcWKFbS0tFBTU0NKSgoVFRXU1tYK5eHw8DBffPEFtra2fPfddywuLmJhYUFI\nSAhfffUV/f39dHZ24uPjI+SzdnZ2AmU/Pj7OxMQEp0+fxt7envZ/JCPPzs5y69YtfH19sbKyoqqq\nil27djE0NERbWxuNjY04ODjg4eEhMgTeeOMNDA0NefXVV7G0tMTPz4/5+Xns7e0xNzfn5s2bVFZW\nsnTpUvbu3UtQUBDLly8nPDycVatW8fOf/5zVq1fzzDPPCLWlQqEgNzeXw4cP8+WXX6JQKDh48CA2\nNjbk5ubS29uLm5sbX3/9NUuWLCEuLo7jx48LA5HuJnf27Fnm5ua4desWLS0tlJWVUVVVhYmJCe3t\n7axduxYTExPBXIT7kXivv/46aWlpdHd3Y2xsTF5eHqdOneK1115Dq9Xy17/+lYsXL7J8+XISEhI4\ncuSIKAN0zk8DAwPCwsKExVlnP9YRl3TszBUrVqDRaMTH7u7unDt3TrAYdBBhAwMD0Vuwt7cnNzdX\nvHd1YFsvLy+eeuopnJycGB0dxcnJScQUpqSksH79ejw8PPD392diYkKUEsHBwdTV1f2LD+bevXsM\nDAyQkpKCu7s7nZ2dJCYmkpeXR0pKihCoWVtbY21tDdz3ENna2jI3N8f8/DzZ2dnExMSwatUqli5d\n+sDP47+dPkgkkr8DG4G+xcXFoH+8ZgmcAtyBduDhxcVFzT/+7XXgKWAe+Mni4mLGv/0hZDJefvll\nsrKymJiYYGhoSHgRdEq8Tz/9lObmZlasWMGNGzeIiIjA3t6e8fFxXnnlFX72s5+xceNG5ufnaW5u\nBiAvL08gy0JDQ0VPYOnSpbzxxhusWbOG+Ph4PvroI1pbW8V18vDhw7i5ueHq6iqSjXW7M9z3uuvp\n6dHf38+OHTsE0296eppt27YRGBiIg4MDv//97/nFL37B3bt38fPzQ6VS0dXVxXPPPYeBgYGwQ+vU\nfGq1GktLSyYnJ9HX12fFihV89913WFtbs2rVKszNzeno6CAkJIRPP/2Uxx9/HLlcTltbm6Aw606Q\njz/+WEw6dOrEZ555homJCZYuXSo6/ykpKRQVFXH8+HGCg4MJDQ3l0qVL3Lx5k4iICFasWMH+/fvZ\ntGkT09PT1NXVIZPJhM9CKpUyPz9PV1eXyEpoamri3Llz7N27F7lczi9/+UvMzMyoq6vD3t5eeD10\ncJp/fhjMzc2Jj4+no6ODRx99FLlcTk1NDdPT06L/UlBQICLtXn/9dTZv3sz3339Pb2+v6JHcvHmT\nu3fv4uLiAtwvTTs7OxkYGMDc3Jzx8XHBfFCpVLi7uwvQzO3bt3F3dycoKAipVMrnn39ORUUFmzZt\noqKigps3b/L73/8euE/LmpqaIjc3V3A9dIeTra0tzs7OhIWF8fXXX7N3714SEhJEWXTw4EGRiBUZ\nGSnSruF+T0RH7H7sscfo7u5GIpEQEBAgblcqlQoLCwukUqnQy+i4FVFRUdTX17NmzRru3bsnJNoP\nsh5kJPkl8Clw9J9eew3IWlxcfFcikbz2j7+/KpFIAoBHgEDAEciUSCQ+i4uL8/+9b6Cvr49cLqeu\nro7U1FSCg4NpamrC09OT9vZ23n33XYKDg3nkkUfQ19cXuYeFhYWsWLFCNOrkcvm/7JxlZWVs375d\nhI488sgj2NnZUVpayoYNGzA1NeXs2bMEBwfj6emJsbGx4NmVlpbi7OzM1NQUJiYmDA0NiWaYnp6e\nuO6++eabQk1oZmaGj48PjY2NwhPR0tKCl5eX4B/09PSg0WiE2lBfX5+KigoAwec/e/assD6vXLkS\nV1dXioqKGBoaYufOnTQ0NNDV1cVXX30lYtX7+vrw8vLiT3/6EwqFgsXFRdRqNV5eXiQkJBAREUFr\na6sIFNExKaampjAyMmLHjh0sW7YMlUrFe++9h4mJCe7u7mzdupWioiJGRkbQaDT4+vrS29tLWFgY\nEolEYNN//etfi2QvQ0NDgoKCyM7OFlFyDz/8MK6urnR1dZGRkSG8JuXl5QKzDhAQEIC+vj5bt26l\nq6uLW7duiVPezs6O8vJyfHx8WL58Oc888wzV1dUi2m///v0sW7aMiIgIjI2NWbduncCQJSUlcfPm\nTSGKi42NZWZmBnt7eywsLERKVk1NDQUFBRgYGLB+/XoGBwdxdXVlZmYGtVqNj48Pzz33nGBgtLW1\nsWvXLiwtLSkpKcHf35/vv/+erq4uli5dir29PSUlJQwODrJ8+XK8vLzQaDT4+PiIgJmdO3fy+uuv\n87Of/UzcOnp7ewU85saNG8zMzGBnZ4eRkRF2dnbo6ekxMzMjkIPm5uai8ainp0d9fT3r16/n8uXL\n6OnpiWyRB1n/tnxYXFzMA4b+Hy+nAV/94+OvgC3/9PrJxcXF6cXFxTagBYj+d99jdHSU+vp60eEf\nHh7Gw8ND0HmXL1/OCy+8wPDwMM7OzgK+Gh4eTn9/vwChnjt3jr6+PkHE3b59Oz09PRQVFeHi4kJX\nVxdOTk68+OKLTE5O/gu+fH5+npGREYyMjFAqlZiamjI+Pk51dTVarZaKigpx6szMzJCamkpZWRle\nXl4oFAoyMzM5duwYd+/eZenSpbS3t6NQKAgPD2dxcZGenh7hTdCdVEFBQVhYWIiyJCAggPb2dm7c\nuEFxcTHu7u6EhYXh5OREZGSkQMRNTEygVquxsbHhlVdeESdHaWkpa9eupaqqikcffRStVktfXx9/\n+ctfKC8vR6PRYGlpSVtbG8HBwUImnJGRQXJyMmq1GrVazU9/+lNWrlzJ5s2bOXr0qAjeHRwcZHx8\nHC8vL2pra2ltbRVIMd2G5eXlhZOTE2vXrmX58uXo6+uL20FOTg61tbWCauTg4EBPTw//LKDTfe38\n/HyUSiXj4+Ncu3aNNWvWIJfLBYU6NjYWU1NTPDw82LZtm0hOun37Nn5+flhbW1NfXy++9vj4OHK5\nnKCgIIKCgtDX16eqqgq5XM7ExAQA3d3dFBQUEBISgpOTE+Xl5ZiYmODg4CBuVhYWFgLGAvenFmfO\nnKGhoQE9PT3S09OJjIwkODiY8PBwLl68SGZmJp6ennR0dHDx4kVGRkaIiYkhKSmJqKgoLl68iL+/\nP3l5eeKW293djVKpxNbWlsrKSurq6oRE/saNG8jlciYnJykrK6OgoICuri60Wi35+fnid3zlyhUG\nBwdZvXq1YEo+yPqf7SnYLS4u6gzaasDuHx87Af8ssu7+x2v/3TUzM0N0dLSIlz9x4gQ+Pj6o1Wpq\na2uxtbWluLiYv/zlL3z++ed88cUXHD58mJMnT5Kenk5vby8/+clPsLS05MKFC1y6dAm4b1bRCVN0\np/6pU6fEPHzfvn3s3r0bmUzG4uIiy5cvZ+PGjezdu5eamhrMzMzYvXu3mLnn5eUBCGm1jrbk7OzM\n6tWrefLJJ/H19RXE4BMnTvDUU09RUlLC9evX+fbbb6mrq0OtVmNhYYGlpSXm5uZCVFJVVUVERAS/\n+tWv2L17N52dnWi1Wr799luuXbuGTCZjZGSEgoIC6urq+OCDD8jPz6empoYXXngBpVLJyMgIW7Zs\n4auvvqKoqEhAWn7zm9+I9GpHR0c6OjpEkG1ISAhNTU1cvnyZgoIC4TBcsmQJN27c4KWXXsLc3Jyd\nO3eKuDOtVitIyBEREWJ2X1paykcffcSxY8fw8fEhKipKZF7o6t21a9cilUppbW0lKiqKvr4+QZXK\nz88XnfxDhw4xNjYmtCVwv5+zcuVKFhYW2Lx5MxkZGbS1tREXF4ebm5vYDD08PJBKpcTHxwOQnp7O\nypUrxUSkubmZjRs3cvjwYaqqqpBKpXR0dODr68uvf/1rHn/8cUxMTFi7dq3Aq9vb2+Pu7o6hoSEp\nKSkAAq7j6+uLTCZj27Zt2NnZsXHjRvr6+khMTMTX1xdzc3MOHDjAqVOneOSRR3j++ef57LPPRBnq\n4uKCtbW1uPWmpKQwPDzM8ePHCQkJ4dy5c/z2t7/l8uXL4nfa0tLCihUrxGE6NzdHdHQ0ycnJ4hD1\n8/MjIyNDhPs+yPpfbjQu3t+K/4dlyhKJ5BmJRFIqkUhKZ2dnRZKxjttfWlqKvr4+SqWStWvX4u/v\nz0MPPcT8/DwODg4cOnQIExMTPvvsM9544w3c3Nyws7PjxRdfFCYYe3t7goODCQ4OJjs7m8nJSYKC\ngrh58yb37t0jPDxcWJ3t7OzEKZibm8v58+dF6rCjo6NgSML9hmBGRgYLCwvCVblkyRL6+vp45513\nuH79Onp6eixfvpzXX3+dJ598UgimZmZmRKmiVqv54osvhCVbx11obW0VoTQ6tuDY2Bh9fX14e3vj\n6+uLpaUlr732Gps2bUKhUNDR0YFSqaS3t1dkA2zatImdO3fS3NwsejZ1dXViKqNSqVCpVBQVFZGV\nlSX6JteuXWN2dpb6+npsbGzw9fVFo9FQVlZGeXk5xcXFTE5OMjU1hYeHB729vXh7e+Pp6UlZWRlW\nVlbC6n7nzh1aWlpEl9zKykpEsRsbG6Onp4e7u7uw9jo6OtLU1MSSJUuYmZnBw8MDLy8v4WY1Njam\np6eHgoICQkNDCQ0NxcbGhpSUFBISErC1tRVsCR3NGu67JCsqKkQArLm5ucjfbGxs5JNPPmFoaIic\nnBwKCwtJT09HX1+f4uJiqqurCQwMZHJyEplMhlQqFYpIfX19pFIpJSUlYvypp6fHd999x8zMDHp6\netja2uLv74+xsTGOjo4YGBiwsLCARqMhLy9PlGlWVlbi4S0qKkKpVDIzM0NNTQ3PPPMMhoaGokFp\nZWXFli1baG9vF9MUIyMjAd6tqqpCo9FQUlKCl5fX/xHE+z2JROKwuLjYK5FIHIC+f7zeA7j80+c5\n/+O1/9daXFz8DPgMwN/ff7G2thZ7e3tmZ2fx9fVlYmKCW7duMTg4SFhYGK2trdjb2zM6OopCoWDp\n0qWiRrt27ZqoxT7//HMef/xxvvjiC/T19bly5Ypw/42OjopMge7ubnJzcxkZGRGKtr6+Pl588UVc\nXV0ZGhpieHhYYNh1nnu4T/HRjRZdXV0ZHR2lra2NxcVFwsPDKS8v5+mnn6a4uFggyrVaLTt27BCj\nNnd3d7Kzs4mIiBD8B4VCgaenJ42Njfj7+4tE7dnZWaKioggMDKSiooJLly4RERHBn//8ZzZs2EBG\nRgb19fUkJiZiZmZGb28vEomEAwcOsHHjRuRyOQcOHCA1NVVExOXm5lJeXo69vb1wLVpaWrJy5Upy\nc3Pp6OhgZmZGSMhra2vR09NDT09PbNZTU1M0NjaycuVKLCwsUKlUrF+/HkNDQ7RarRA86dSOOoSZ\n7jSUSqUCAKvbGIODg+no6BBYufz8fNE/gvvWal06lkqlEkGyMpkMMzMzTExMBEh3dnZWlAZBQUH0\n9vaiUCiIjIxkYGAAe3t70tLSyMjIIDw8XORzzs7OolAoqKmpISkpiaCgIFQqFfr6+kRHR6NSqbh8\n+TLwn/RpMzMzzp49S0JCAvX19UKCraM1BwUFCVqUTraus9obGxuL7A/dz6tLXNclU33zzTdCQakr\nq6anp4Ws+8yZM+J26eHhIYR/qamp1NbWkp6e/sAP9//spnABeBx49x9/nv+n149LJJIPud9o9AZK\nHuQLpqamcvfuXbq7u0Vu4tjYGEuWLKGmpoarV69y9+5dnnjiCfbt2ydit3SwVF3On0KhEDNZ3U5b\nUVEh1If79+/H3d0dLy8voqOjKSgo4OLFi7i4uLB3716xCQ0MDNDa2sqNGzcESktnw9YlFLm6uopr\nuJ2dHQ4ODtjY2DAzM8PVq1c5d+4cr7/+OlFRUbzzzjsUFxfz+eefs7i4yJ/+9Cd27dpFUVGRiItr\nb28X0mMvLy/a2tr4+9//LiA0usSnxMREZmdn2bp1q9AN3Lp1C29vb8zNzQVqzcnJiVOnTuHs7Mz2\n7dvx9/dHrVYzNTXF9evXiY6OZnx8nIiICObm5mhoaODmzZuMj48jk8nIysri3XffFSGvp0+fRl9f\nX0SmZ2ZmsmPHDuB+zmRzczO//e1vRW08NjYmRoO3b99GT0+PmJgYvv32WzQaDf7+/uzZs0dQsQBq\namqEXiMrKwuFQiE2z8DAQPLy8vjjH/9IbW0tDz30EG5ubrS0tKBWq0WwzvDwsICx6sqO3NxcHn74\nYSFIunXrFr29vahUKmxtbRkYGKCrqwtvb2927NghSEu6GPju7m4mJibQarXMzMyIn1cikVBdXc2S\nJUvQ19fn3r17uLu7ExwczOrVqzl69CgnT54kIyOD1tZWHB0d2bFjB4GBgVRWVjIzM8Pjjz+OWq3G\nycmJa9euiWdCdyA5ODjQ0NCAgYEBK1euZHZ2ltraWpqbmxkfH8fb25vw8HAOHTrEhg0baG5uZmJi\ngqioKKqrq4mMjBS2/wdZ/7Z8kEgkJ4BCwFcikXRLJJKnuL8ZJEskkmYg6R9/Z3FxsRb4FqgDrgI/\n+neTB7hfJ9bW1gpTUkFBAUNDQ/j5+bF9+3Y+//xzmpubsbOzIzs7GxMTE1577TUyMjIICgpi3bp1\nDAwMCI2Cjkeni0Kvra2luroaLy8vlixZQnBwsLiNhISEMDExQUdHB5s3byY3NxeNRkNSUhJtbW14\neXkJ+rGO3VBeXk5HRwcDAwMkJCQwPT1NTU0NExMT2NrailRgna++uLiY4OBglEol7e3tnDp1iitX\nrnD06FH09PSEzToxMZGxsTHGxsZEp3/JkiW4ubnx5ptvkp6eTnBwMAMDA0L9mZOTw0cffSTwbaOj\no+Tl5dHd3U16ejrz8/PU1Pw39t4zOM4yXde9upXVymrlrFbO0QqWJdmWseWEAzbGGFbhAWwYD2ER\nh6kNDHBgiANDDjM4ALaFI06yHBUsyVYOVg5WVkvqVmxlqc8P1/fuxZ+9qH1WnWJVre8Pfyiwpe73\ne97nee7rqsXJyYmLFy9iZWUlyM46nY6IiAju3LkjMvmSdVuS0BobG9PZ2cmtW7coKCggNTVVvIWl\ncerhw4e5fv06a9euFdefDz/8EHt7eyorKxkcHKSiooLp6WnGxsZISkoiPj5erGVLHEG4++aVyWRi\n2Uiv12NnZ0dDQwOjo6N0dnbi7e3N9u3bBXpu//79GBoa0t7eLqpGMzMz8vLyxN9r8+bNnDp1CplM\nJmxeHR0dLCwsUFlZyeLiIiEhIdja2nL16lXB0SwpKeHWrVsoFAqSkpLE0pFUjoeHh3P//fdz7733\niuatBEORgnIrV64UhCdfX18KCgro6OgQY1FpB0KaesDd647kAZmammLz5s1ERERgbW0tGtD33HMP\nK1asYGhoiJGREZydnTl//rzIsXh4eBAYGEhtbS0+Pj7/2ddQPL9l+vCAXq930ev1Rnq93l2v1/9T\nr9dr9Hr9Sr1e76/X6zP0er32P/z7/49er1fp9fpAvV7/m2qW8fFxWlpacHR0JDIyktOnT5OWloaL\niwuOjo5s2LCBnJwchoaGMDMzQ6VSYWRkRGpqKq+++ipXr16ltLSU1tZW2tvbBW0nKCiI4eFhli9f\nzuLiomAbFBUVia09SV+u0+lE6Gf16tUiYebm5iY2Ajs6OoC7s+k///nP2NvbC1irlCkYHBwUm3Xp\n6enMzs5iZmbG1NQUq1evxtvbm9DQULy8vBgcHBTwFLhbGjs7OwtQi4mJCffeey/19fV4eXnh6+vL\n4cOHKSkpEZXL9PQ033zzDfHx8cJsJXkckpKSiIqKwsbGhtDQUBQKBc7OzmzdupWlS5fyySefUFVV\nRVpaGoaGhuJ68OCDD7Jr1y42bNhAb2+vqF4WFhbo6emhq6uL06dPU1JSQl9fH6GhoSxfvpzCwkIm\nJibE1WZsbIzt27cTFxdHUVERN27c4MKFC2LEaWRkJEp1iXnY3t6Ol5cXKSkpuLi4EBQUJMI+f/7z\nn5mcnCQmJoaUlBSMjY25fPkyWq2W8+fPo9VqSU5O5rnnnhNoeSmjcPHiRRwdHXnllVewsrIS2PhT\np06xuLhIUFAQGRkZLC4u4urqKjImNjY2dHZ28v333wv36IkTJ0QFIrkYvvrqK4Hvz8nJ4cCBA1RV\nVTE5OYmDgwPm5uZs2LCBxMREDA0NaW5uZt26dXh4eIi1caVSyfbt2wHo7OzEwMCAqKgonJycaGho\noLGxkaNHj2JlZYWXlxfz8/NYW1tjbm7OwsIC1tbWzMzMCEeltbW12IItKCj4jUfC7yT78NFHH72+\nd+9eEQNdtmyZ4OdLwtXU1FTWrVtHcHAw6enp6HQ6mpubCQ4OFl8eCSgaHh7Ol19+SWZmJsXFxTg7\nO1NUVCRirb6+vszPz1NTU0NjYyNJSUksLi6yYcMGIiMjxWLRgQMHMDAwoKmpieXLl6NWq8nNzSUk\nJAR3d3exFGRvb8/27dvp7OzEzs6OhIQE5ufnKSkpYX5+XuQIent7OXDgAFZWVnh6eooRU1VVFbdv\n3yY0NBQnJycsLS2ZmJigvr6e9PR0goKCcHJyIj09neHhYeLj44mLi+PkyZP4+fmxceNGhoaGqKmp\n4ZtvvsHNzU3Et/Py8oR+Ti6Xs2TJEqysrIiMjOSZZ57B29tbMP1MTEwIDAzkxIkTHD9+nM8++wwX\nFxeio6OZmppizZo1YkISFhZGWlqaqEyMjY3Zu3cvU1NTTE1NYWpqioODA0ZGRmKt19bWVry9vLy8\n0Ol0TE1NMTg4SExMDAcPHiQ4OFgYlyWa9PT0NBMTE4yOjrJq1SoqKytxcXFh6dKlrFy5kqSkJCYn\nJ0lNTaW7u1s0NyWozJUrV3j//feZnZ3F29tbELUDAwMxMjLCzs6O0NBQioqKyMrKwsHBgZKSEkJD\nQ1m/fr2Aw1RUVFBQUEBUVJRQzYeGhjI7Oyu8DnFxcchksl/t23zzzTdCd9/S0kJkZCTx8fHisJUm\nED4+PrS3t5Ofn096ejouLi6iIpV0dBLeb2FhgfLycrFy3dfXx8DAAF5eXkRHR6PRaBgdHWVhYYGW\nlhZWr17Nd999998n+6BUKsnLyxM8hPj4eLG9dv78eY4dO4ahoSFlZWUMDQ1x4MABysvLxXjNwsJC\nuBelUSbcvaNv2bIFhUIhgjJShdHe3s6NGzcwMjKiq6sLKysrOjo6kMlkqFQq7ty5g6mpqaAczc/P\ns2XLFgA2bdrEihUrSEhIICoqCpVKRVlZGRERETg7O4td82effZacnByeeOIJTpw4QU1NDeXl5RgZ\nGVFYWEhNTQ3r1q0Ty1Y7duygqalJzMINDQ359NNPOXLkCIODg+Tl5XH69Gnhx3B3d8fLywu5XE53\nd7c4AIqLizExMeHq1asEBATg7e1NQkICer2exsZGmpubGRgYED2aq1evMjAwQEVFBX19faKLvWbN\nGnH/1mg0YvxmY2PD4uIieXl5ooEbExNDQUEB3d3dDA8PI5fLmZiYwMvLCwsLC2JiYnj44YdJTEwU\ntmfprSoxLgDKysqExk/iEY6MjLBu3TqRzjQ1NaWyspKrV6+KcZ+3tzfXr1+nvLxcKOdbW1t/1a9R\nKBQimyCXy6mtrSUpKQk3NzfUarXwNcrlcoqKivDw8BA9LmlhTHKX3nfffQCCsBwQEICXlxcODg54\neHjQ0NDA3Nwczs7ObN++HTc3N2ZmZqivr6ehoQG4ew2trq5m1apVuLi48OOPP4pMxeLiIqOjo6KX\nIGkOKysrxUakubk5UVFR9Pf3Mz8/j729PV5eXqKSGBoawtPTE0tLS8EH/S3P7+JQgLtob5VKhVqt\n5ttvvxUbYy+99BLV1dV8+umnnDp1im+//Zbg4GACAwP56quvSExMRKvV8sUXX4iDQyaTif/uxYsX\nyc7OZufOnZSWlvLUU08J956zszNRUVEijSYxFvr7+wUbQafTsXTpUpqbm8X1oaqqimvXrvHDDz8w\nODgoYt+WlpYUFBSQm5srmAiXLl1CpVIRGxv7K7x4eno6r776KqWlpSQlJQF3LdkuLi40NTXh4eEh\n7oLSL9bQ0JAXX3yRgoICBgYGRGI0IiICMzMztm/fTkREBBs3biQrKwsTExNqamowMDCgvr6ezZs3\nU11dzYEDBzh8+DC1tbV8+OGHyOVy/P398fHxoaenB41Gw5IlSwgLC2NxcZHCwkKBC5PAKuXl5aSl\npfHpp58KWItE2r5586YIHMFd81VsbCz29va4urqi0WjQarWYm5tjZ2fHZl81tAAAIABJREFU/Py8\nWEGXMhwVFRUkJSWRnZ3NihUrOH36NLW1tZSVlVFRUYGtrS1btmzB3NwcMzMzDA0NCQ0Nxdvbm6mp\nKa5cucKrr74qcjCDg4PY2tqyuLiIra2tkBqfOXOGuro6vv/+e7HRKpmrDhw4QFdXF8HBwbS0tBAe\nHs59992HVqslNTUVgGeffZbFxUVxmJWWlnL27FlGR0c5duwYSqWSiIgIIiMj6ejoEBIbidtx5coV\nvv/+e4aGhlAqlUIGExQUxOLiIi+//DLJycm4u7szOztLZmYm09PT9PT0MDU1xcTEBFqtFoVCgVwu\np6WlBa1WS1lZmZAES6rC3/r8Lq4P33///euTk5OEhYUhl8txd3dnYmJCqK/s7OxwcnISH6rdu3cz\nMzNDWFgYWVlZgsJcXl5OZmYmd+7c4ciRIyQmJqLT6bC3txeLHBKDISMjgy1bthATE8Phw4dxcnIS\nd/fFxUViYmK4efOmaHKNjY1RXFxMVVUV69atQyaT8fXXX/PII4+g1WrF5OPOnTviLS/9e9bW1piZ\nmbFlyxYuXrzImjVrWLJkCdnZ2ej1ekxMTDh37pzYh5AoP8uXLxfOzPn5efGF2rp1K6dPnxZXKysr\nK2pqatiwYQPj4+Nivp+fn8/zzz9PVlYWcrmcgoICjI2NCQ4OxsrKivPnzzMwMCAU5jY2NgQEBAjm\n5X/8QElSVqVSiZ2dHY8++ihZWVkoFAoyMjIE/FYav957771YWlpSVlaGi4uLwPQ3NDSIkhkQTIj+\n/n6OHz9ORESEALlMT0+zfv164e0YHh5GoVAwOTlJTU0Ntra21NXVUV1djU6nIz8/H09PTyYnJzly\n5Iiwcp04cYJly5ZhampKTEwMhYWF9PT04OXlxSOPPIKTkxODg4PodDqhsD9//jyBgYFERUWJxSvJ\nRbF8+XJGR0cFtNbW1hYLCwsBnI2OjsbW1paOjg4uXLggVpOlDdcdO3YwODhIW1sbu3btElOlxsZG\nAV0NDg7G2tqakZERXF1dMTMzo6ysjJdffln8rry9vZmYmBC9LYkBIXkqlUql6JF4eHjw7bff/qbr\nw+/iUPjoo49e/8tf/iLcjk899RQmJia8++67JCYmcuHCBSwsLNDpdISHh6PVarl69SpOTk6sXLmS\n48ePo1QqOXnyJN7e3mg0Gk6cOMHy5ctFlqGvr4/JyUmBeZ+enmbdunUUFRURHBzMjRs3sLOzY8WK\nFdy5cwcnJycMDQ3FyRsWFoajoyMXLlwgLS2N3Nxcli1bJow/UgknfYA6OztFOSddY+rq6rCyskKt\nVmNtbY1Wq+XOnTu4urqSnZ2Ni4sL3t7eFBUVsbCwwIoVK3j77bcFqjsiIgJPT08++OADZmZmcHd3\nF4o9V1dXHn74YaytrfH19aWuro60tDS0Wi22traiyz01NcXi4iIBAQHI5XJRdUkgEzMzM2QymVjS\nkdZzpQ1M6c1kYGAgDqHh4WHc3d2Zn5/Hw8ODsLAwampqiIiIwMjIiMbGRuHC8PT0ZGpqCpVKJUZ8\ncLd5u3//fpYvX05UVBT5+fmMjo4K94UkyAkNDaW5uZn09HTGx8fFVWp6elrsf0iBtKSkJJG1eO65\n54SA1tLSEmtraxwdHZmamuLkyZP8/e9/Z25ujtnZWfR6PS4uLoLlsLCwQFlZGSkpKQQHB3P8+HE0\nGo34My5btozKykr8/f1pbGxEo9Fga2vLqVOn8Pb2JjMzk7GxMfLz8+nt7cXOzg5nZ2f0er2wXxka\nGjI1NUVCQoIwS0uMDwsLC0pLS8UBJl1zq6urmZmZEc3XsbExrl69Snp6Ordu3cLFxUUIbuvr67lw\n4cJ/r0NBcjfMzc2h0WiYnJwUSUWNRoOrqytxcXEYGxuL3QCVSsXZs2dZXFxk//79vPXWW1RXV5OV\nlUVLS4vwJJ4+fRo3Nzfxxtm6dSthYWEUFRWRm5tLXl4e5eXlREdH09fXh0ql4ocffsDQ0JANGzZQ\nV1dHfHw84+PjnDlzhldeeYXW1lYCAgKws7MjPj4eLy8v3NzcCAsLw8TERBw0Op0OhULB/Pw8fn5+\nLC4usrCwwOTkJEZGRpiZmRERESHeOpIwV+pkm5mZMTc3x9DQEGq1Gg8PD5qamvD09MTKykrkKiRw\nSkxMDFlZWRgaGpKamsqhQ4eYmJggODiYM2fOkJGRIUp3e3t7ZmdnSU1NxdXVlVWrVnHnzh1qa2sF\n6+/atWts2bIFGxubXyHFLC0t+ec//ynK8N7eXrGyPDk5SV1dHaGhobS2toq0n6enJ9XV1djY2HD+\n/HnBt5SubZcvX2bVqlXcvHmTFStWCBCLRMHq6OgQSrlNmzYJS7eDgwP+/v5in0NyhDo7OwtXozQi\ntLKywsDAQDT8pAThc889h5OTEzExMZibmwubmLQDEhgYKKrFzZs3Mzk5yblz51izZg2enp74+vpS\nVVVFT08P1tbWHDlyhDVr1uDk5MSZM2fE77qjo4PBwUEaGxspLi7Gz8+PS5cusWrVKvz8/Ojr66O8\nvJwnnngCnU5Hf38/HR0drFy5kr6+PlpaWvD09KSwsJAVK1aIw0jyh9ja2jI0NIStrS1qtRoXFxfB\nWbh+/fr/0Jz/5/mf53+eXz3/fWjOXl5evPvuu0JkEh8fT3d3N319fYSEhDAzMyPQaH19fSxbtgwv\nLy/+8pe/iJ0AiRfY19dHcHAw99xzD9988w0uLi64uLiQm5uLi4sLU1NTKJVKTExM6O7uFso3Y2Nj\n5ufnMTc3F8JPpVKJSqWiubmZ+vp6XF1dhWexr6+P8+fPk5GRgaGhoTAeubi4YGVlhVKppLi4mNbW\nVsbHx9m5cydqtZqqqiqmp6cZHh5Gr9dz8uRJvvrqK1asWMFjjz2Gm5sbmzZt4rvvviMpKQkfHx++\n+eYb4UtYWFgQSju4u+xTVVWFg4MD1tbWItwljRelaHFNTQ0KhYKFhQUsLCzEm7OhoYGVK1fyz3/+\nE1NTU8bGxrC1tWV2dhY3NzcsLS2ZmpqiqKiI0NBQHB0dSUhIYHR0lNLSUmZmZoiNjeXkyZNotVo2\nbtxISEgI58+fx97eHmdnZ2HRiomJobq6mlOnTpGcnIxer2dycpLg4GCGh4d57bXXePHFF7GwsMDO\nzo5jx46RlpaGmZkZkZGRdHV1UVNTI+7nKpWK6upqUVlJkfijR4+KpSy5XM57773Hhx9+yO7duzlz\n5gzHjx8nPj6e6Oho2tvbcXNzY2RkhIiICNGgk8lk+Pr6UlhYKEbMwcHB5OTkiL2De++9l927d7N1\n61YCAgIoKSlBoVDQ3NxMS0sLDz30EGZmZvT19QnK1I0bN4iNjSUmJoYrV66wadMmampqRA+hqKiI\nffv2iQWqTz75BFdXVxwdHWlvbxfVjJQYBjhz5gxBQUEsWbKEV199lbfffpumpqZfKQv9/PzYtGnT\n/+lrKJ7fxfRhdnaWvLw8DAwMhBTV1taWzZs3C6OTdCdevnw5SqUShULBW2+9hUqlwtDQkLy8PIaG\nhpDL5Xz22WcAxMXFCSuytMAkhZ8mJiZwd3cnKyuL4OBgVCoVo6OjjIyM0N3dTWdnJ+Hh4Rw7doyA\ngACSk5OJjY0F7iK3PD09xexYp9MJ++/w8DClpaV8/fXXgoe4Zs0ahoeHkclkFBQUYGRkxNDQEPPz\n8+zatUtscs7MzBAVFcWVK1cEku7SpUvExMQIU7KNjY1obvn5+ZGWlkZwcDC+vr7Ex8fT0dFBcnIy\nV69eRaPR4ObmxtTUFEFBQfj5+ZGeno61tTXV1dW0tLQwOjoqFrzMzc1ZtWoVXl5eJCUliYUlaWyX\nnp4uVHlff/21kNFMTU2xZ88eqqqqqKqq4vjx4/T393Pr1i3++te/YmNjw/Lly3F3d6e+vh6VSkVU\nVBTDw8N4eXlRWFgogKnT09MYGRlhYWEhxrUODg5CWS+Xy6mpqWFgYIDS0lIaGxvF78HZ2ZmxsTHW\nrVvH0NCQCGbBXbxZaWkp3d3dREZGUlBQQHV1NRYWFrS0tAg4TFhYGEFBQVy7dk1MscLDw2lra6O3\ntxelUomTk5PIYiQmJmJhYUFfXx92dnZ0dnYik8lISkqirq6O7OxsbGxs6O3tZXx8XMBfpWWjxsZG\nTE1NcXJyorCwkLKyMuAuT+HSpUvs27ePRx99lMTERO69915WrFghGucNDQ00NDTg5OTEwsICpqam\nrF69mtzcXFpbW8nPzycqKgpPT09hTfstz+/iUJDJZKJB6OTkREREBNHR0QwMDDAyMsKVK1d46qmn\nkMvlgq7c29sr0moShFWtVtPU1CRmyAsLC5ibm3Pt2jWxgjw+Po6BgQFzc3PY2tqKxtvs7KxQ1tvb\n27N+/Xqmp6dJTU1ldnaWsbEx0TFvaGhgfHycrVu30tTURExMDJmZmcTGxpKamoqdnR0PP/wwPj4+\nqFQqwUE4fPgwSqWS7u5uUlJScHNzIy4uToSBZmdnMTAwAO6q6eRyOXFxcaIJd/HiRdRqNSdOnECp\nVOLp6UlLSwtubm5MTk4yMjJCeHg4fX19WFlZ4ebmJuSyJSUl5Obm8vPPPxMcHIyDgwM7duxgzZo1\njIyM4OHhwfDwMIcPH0Ymk1FWViYyHceOHcPW1pbq6moWFxfp6elh9erV4md58OBBjh07xptvvomf\nn59ImxobG+Pn50dLSwsLCwv09/djbW2Np6cnly9fZnp6WmyWvvjii8Bd5f3c3BxqtVpg8aV0ZE1N\nDYODg+zcuZPIyEgSEhJ48sknCQ0NxdfXl/DwcAYGBvj5558FZFYiGb3xxhsC5rJ7927eeecdYRMb\nHBykqKhIBLzKy8vJyMjgyy+/FE1TSfPu6urK1NSUSMz6+PhQVFSEWq2mvb0dDw8PBgcHMTEx4fLl\ny/j7+1NaWkpUVJTImKhUKhoaGpidnRX+zs7OThobG0XCV8rtLCws8N1339Hf38+5c+fQ6XQ0NDSg\n0+nEIaXRaKiqqkKj0bBr1y7GxsaoqakR0wc7Ozv++Mc//ubv4+/iUFhcXCQ3N1eMu+rq6hgaGsLE\nxISOjg5KSkrEroC9vT2XL18mOzubn376iV9++UW8mZycnJibm6O4uBhAsAqjo6PRarVERUWRnJyM\ni4sLxsbGuLi4sGvXLubm5jh9+jRPPPEE1dXVQlzb09PzK419fn4+cHeGLMWLzc3NMTAw4Pjx4wLY\nGhMTQ25uLk1NTej1em7cuIGFhYVgL9ra2jI2NkZtbS2tra0CBLJ+/XpycnJEQ0o6MCorK4VZ+e23\n36a5uZkrV65QX1/Pl19+KcAiUizX1NSUffv2cfToUbZt20ZjYyO2trbieiFdXbZt28b8/Dy1tbVi\nth4REYGVlRU7d+7EyMiI4uJiVq1aJWhGU1NTooxOTEzk+PHjuLq6UlBQQGNjI0ZGRhgbG1NeXs7s\n7KwotQ0NDTl37hwtLS3i73758mUsLCzo7e0Vc/Tq6mrGxsa4ffs27777LjExMaxZs4bKyko0Gg0z\nMzN8/PHH7Ny5k9OnT9PT08PJkycZHh7GzMyMEydO0N7ezszMDF1dXUI2u23bNpqbm/Hz8+P69esM\nDQ3xt7/9jdLSUvR6PbOzs1hYWGBjY4Ojo6OoKP39/bGysuLGjRs8//zz/OMf/+DcuXPid1ZYWIi3\ntzc1NTUibwEIjmVGRgaPPPIIQUFB9PT0MDY2hpubGxkZGURHR2NoaIiHhweVlZU88MADpKSkAHex\n79KIU5LUSpZwlUpFenq6gN9kZGSwatUqsfK9Z88egoOD6ezspLy8nJycHKG4/y3P76KnIJfL2bJl\nC46OjnR1dZGRkYFMJiM3N1fAT+zs7MjIyODYsWPCqyip669fvy623yQBzBdffMHIyAiOjo6iNJT8\nBXD3IJLGNS0tLTQ3N1NWVsb169cpKCjAzs6OJUuW0N7eTmRkJDMzMyK4BHf7IKWlpWImHhAQQH19\nPSEhIZSVlaHT6ZidnRWnukajISwsDAsLCxISEujo6MDBwYG0tDQRnXZxceGZZ56hv7+f1tZWjh07\nRnp6usCj+/v7ExERwdDQEImJibS3t7N79276+/tpaWlhyZIlZGZmcurUKYyNjdm3bx+ffvoptra2\nLF26lEOHDmFubs6HH36ItbU1JiYm1NbWotPp2LVrFy0tLVRVVeHh4cHU1BSVlZW/wplv2LAB+N/A\nkvHxce677z7BJ5DCaLdv3xZINkAkXtvb27Gzs6Orq4u1a9fywQcf0NPTQ1xcHNXV1QBiVVjqVbzz\nzjsioLVnzx56e3uprq7mnXfeITg4GC8vL7HBamNjw9atW7l58yYPPvggp06dEhiyU6dO4erqKtBu\nhYWF3H///SK7IYlkfXx8iIiIICcnh+HhYW7fvi0YBw8//LCoaqVAlEwmo6WlBTMzMzw8PMSymE6n\nY8+ePYyMjFBWVkZnZ6fY1RgcHCQsLIy5uTmxiLVixQqRewHE51Iul6PRaNi8eTN37tzhypUr+Pn5\nUVNTQ1pamhAzv/TSS8hkMpqamoTEJj4+HgsLCyoqKliy5D8FoP3v7+P/3df4v/YxNjYWBmmp1D5+\n/LjgzW3atIm2tjaamppEn0Aq+6ampgSwtKioSOx9A6J6mJ+fx8rKirq6Oubn5+nu7sbS0hJfX198\nfHyQyWQEBwfz008/CTx5eno6arUaZ2dnDA0NsbW1FfBLCXwRHBwsJKEbNmzA3d1dGKmtra1FWSth\nzEJDQ4mMjBQ9jKCgIL7++mvxwZXL5Rw9epTLly/T2tqKr68vzzzzjIgHFxUVYW1tjbW1NQqFgn/7\nt3/D3d0dT09PkSp88803xcbmTz/9xO7duwUPICAggPLycrEPIkXHN2zYIPopMzMz/Pjjj+Tn53Pl\nyhWio6PFWu/ly5cZGhoSI8iKigpu3brFtWvXRF5h48aN/PWvfyUmJobp6WlOnz6NtbU1RkZGrFy5\nksTERJYuXSrGisXFxcjlclQqFXB3zVkaOX/77beiPH/ooYfo6+tDq9XyyCOPsHPnTmJjY8nLy6O/\nv18AUCWz1NWrV0lKSsLX1xe4u3m4YsUKCgsL8fLyEl6JsLAwIZbt6urCyMiI+vp6oqOjGR8fJzEx\nEZlMhomJCXK5HFtbW5ycnPjhhx8AhPFr27Zt3L59W5DHJQBve3s7/v7+mJmZ4ezsjKurKw0NDWi1\nWs6ePUtYWBihoaFYW1tTVVUlNnkfeOABkpKScHZ2ZvPmzeTn54vKp7u7myeeeILo6GiSk5NZuXIl\ndXV1vP/++5w7d47PP/9cXN8uXryIh4eHgM38lud3MZJUqVT65ORktm3bhr29Pe7u7mJ92NXVlZs3\nb4osgjSzlryTY2NjdHZ2olQqSU9PJzk5mcbGRnbs2EFxcbFIuhkZGQnVW0BAACdPniQ4OBiZTEZk\nZKRYHqmrq2N0dFQsLC0uLortyuPHj/P000+zadMmYmNjuXPnjhC/SOh1nU6HSqXCysqKxcVFDAwM\nMDAw4NixY0xMTJCamsrly5dZt24dhoaGmJqa0t/fz4YNG2hoaKCsrIzk5GSOHDlCQkICcBdGUltb\nK4zcEgtQokFLseiqqipSU1Nxc3MTmjwrKyvy8/PZuXMn165do729HV9fX7RaLf7+/iiVSs6cOYOf\nn59gNfr5+YmNPnt7e8rLy9m9ezdjY2McPXoUc3Nz0dzav38/arWahIQEEhISGBkZwdDQED8/P0Ed\nGh4eprW1lcHBQe655x7hlZQO7Q0bNnDjxg327NnD22+/jVqtFthzOzs78feQkGUzMzN0dHTg5+cn\nOA1wl8np7u5OT08PFy5cED2RPXv20NHRwa1bt/Dx8eHYsWPcf//9mJmZMT4+ztDQEMPDw9ja2vL6\n66+zZMkSUlNTRUO5q6uLn3/+mVdeeQVDQ0NGRkawt7cnODiYL7/8kri4OEpKSjA3N2dubo4HH3yQ\n77//nrCwMPR6PRUVFaLCsbGxwc/PD09PTyYmJoTDcnZ2lqmpKUZHR3nqqafE4V1WVkZ2djaZmZmU\nlZVhamrKli1buH79OkqlEh8fH4qLi6msrBSJ2L6+PnHYAVhZWaFQKNi0adNvGkn+LiqF2dlZEhMT\nxRv/zp07Yjnp1KlTDA8P4+DgwMqVK3F3d0en09HU1CSWNPz8/Hj88ccxNDQkPz9fqMKkTT83Nzda\nWlq4fv06ZmZm1NfXc/PmTSYmJjA2NsbS0pL5+Xna2towNTVFJpMREBCAlZUVzs7ODAwM0N/fL0ZA\nq1evxtjYGJ1OR1FREba2tgQEBODs7CzEKJWVleh0OsbGxtBqtfT09BAbG0tycrIoR2/dusXs7KyA\nt5w/f575+XneeecdcaWYn5+nubkZuVxOVVWVENcsLi4KqOjMzAyZmZns2LGDnp4eOjo6hANycHCQ\nVatWMTIygrGxMba2tkL/Jh2qjo6OYmMxNDSU27dvY2BgQFFREXV1dYKqVFVVJQAzLS0t6HQ6kpKS\nCAkJEXxIibxUV1fHkSNH2L9/P++99x7Lly9Hp9Pxt7/9jTt37gg3ZmRkJFqtltu3bwOIcaS0IGVn\nZycwfb6+vsI1Oj09zfXr10UlaG1tLUJtcrmc6upqnJycSE5OBuCzzz7D0NCQkpIS0tPT+emnn+js\n7OT69es0NDRgZ2eHm5sbb731FmFhYXh4eODg4MCPP/7I0NAQERER7N+/X0whDh06BNztL3V3d+Pq\n6oqfnx+GhoaCGH327Fk8PT0JDQ0lPj6e5cuXo9VqqaqqEuE4KysrqquruXjxIklJSSKrUVNTI5Dz\nDg4Ooie0detW5ufnxWKTlEQ1NTXFw8MDT09PwsLCOHfuHDExMUxOTv6KevVbnt9FT8HExITBwUFu\n377NzMwMnZ2d3HPPPbS1tWFiYkJ8fDy9vb1CviqNZO655x6qqqrIz88X7IOysjIRKpHKLa1Wy5Il\nS0QsWaVSsXr1akxNTcUXXQo5xcXFYWNjg0wmY2xsjPn5eYHAkpBZlpaWREVFoVQqcXV1JSQkRHDz\nNmzYwKlTp1AoFAQEBDA5OYlarSYiIgI7OzvkcjmpqalCLNrW1oanpycAvr6+whLd09NDd3c3hw8f\nJjU1ldbWVqanpwkLC8PS0pJ169ZRUFBATk4Ofn5+REdHY2FhIdgOhYWF4s/c09MjdGxbtmwR9iKd\nTkdWVhYpKSmUlpaK65DU5DI3Nyc7O1scNpLtSEo5GhkZiT7OL7/8Qnp6OtPT08TFxfH8888zNTVF\nfHy8YFhUVVURExPDxo0buXDhAl5eXgQFBbF582ZxUEqJVJVKRUdHB2VlZXh6eorrhQR/nZmZwdzc\nnN7eXiwsLMRhodVq0Wq1yOVyEQwC2Lt3L4888ggBAQFC+tre3k5cXBzW1tbCGu7g4EBkZCQ+Pj6c\nOHFC9EksLCwIDw+nqqqKubk50ccoKioiKiqKwMBALly4wLJly9Bqtfj4+ODg4MDly5eF3Lenp0eg\n8R944AFqampYXFzk1q1bbNy4UbgvABHD12g0AvvX2dmJhYUFn376KXV1dYyNjYkxZXh4OMbGxkxO\nTor19NLSUqysrLh9+/b/L4zG/9JnZmYGb29vMUpLTk4WHMWQkBBkMpmQbjg6OrJu3Tqqq6tpb29n\n48aNQvaq1Wpxd3cXP1wfHx9KS0uJjY0V67c3btwgKSmJzMxMUTWMjY3R19fHqlWr8PT0ZGxsTDD/\nzp8/j1KpZHBwECenu9DqiooKsdSkVCqJiYlBq9Wi0+koKyujtrZWfOEvX77Mli1b+OCDD/j6668J\nCAgQkdqzZ88SFRUl6NO2trasXbuWf/3rX6SkpAgBSUVFhSD/SNepH374gcDAQCYmJigrK8PJyYmg\noCAxo75z5w7Ozs5kZmYyMDDArVu3eOCBBxgZGeHkyZP09fWRkpJCeno6+fn5aLVawsPDhcdQahQ2\nNTXR1tbG/Pw8NjY2zM/PExYWRkVFBYuLi8LNKIW/pENj3bp1uLm50dDQIMZ3H3/8MXq9XliOgoOD\n+eSTT7j//vupr68H7nbdpYmFgYGByBNIYpXs7GxRRc7Pz4uQ1sjICMePH0ehUGBoaIiDgwOxsbHi\nBdHU1MTatWsxMzNDo9Hg4+ODhYWF8GVICLyysjLKysrw9vbmxx9/pLCwkKeffpo7d+7w2GOP0dXV\nJQ6Rf/3rX8LMXVdXh1wu5/3332fr1q34+PjQ399PY2Mj7u7ubNmyRcS+L126xI8//ohMJhNXn5KS\nEiGXBQSo9/333+eNN95ApVKxcuVKke5dsmQJ3t7etLe3U19fL/58arWa+fl51qxZQ3l5OYcOHeLR\nRx8Vo+7f8vwurg8SDNTS0pLm5mZu3ryJm5sbDz/8sBix9fb2Eh8fz88//0xNTQ1vv/02cNcTEBsb\ni5WVFVevXmViYoKQkBDgLop9yZIlQveen5+PoaEharWa1tZW+vv78fDwQC6XMzw8TEtLi6guBgYG\nuH79OgsLC9ja2uLi4iLUZoGBgZSVlWFtbU1gYKCgRhcWFjIyMsKmTZtwcHDg73//u9C73X///aKJ\nJ7Ed7733XiorK5mZmQEgJydH9EwUCgU+Pj4YGxszMjKCqakp8fHxNDU1YW1tTU9PDyUlJdja2pKR\nkcHc3Bz79++no6NDYOSkw87T05Ndu3Zx9uxZpqam8PT0FH8GLy8vYmJiePTRRzE3N6eqqgpHR0cC\nAwPR6/UkJCSwb98+NBoNDz30EGvWrGF0dJTMzEzs7OzIz8/H0tKSpUuXIpPJWFxc5MKFC5ibm5OX\nl0dsbCwymUwYj9ra2hgdHUWj0XDgwAGam5uJjIwkLS0NuOtRkHIfDQ0NXLlyBUDAWBwdHTEwMGB4\neBgbGxvc3Nw4d+4c5ubmmJqaotfrWVhYYGFhgVu3bokFJImoXF9fj6enJ5GRkdjZ2Ynt1qamJhwc\nHBgdHSU9PZ3IyEj27NnDBx98ILT00n6Ct7e3EOMaGRlRU1MjKgqaJSbCAAAgAElEQVSpTJeSppI1\nXQpClZWV0dPTI3YUfH19ee6554iPj6ehoUGAW319fcnPz+dPf/oTTU1NXLt2jby8PKytrdm9ezfr\n1q1DpVJhZ2eHn58fjo6O3Lp1i7CwMKKjozl8+DBjY2PC5ykFz37T9/H/07f5v+gxNjZGpVKh1Wox\nNTXF1dWV0dFRLl++TE5ODiYmJuh0Op544gm2bdvG4OAg3t7efPfddxw+fJimpibc3d2xsrIiKChI\n3Mc//vhjDA0NiYqK+pXUtLa2lrGxMdRqNc3NzWRnZ1NRUSFoPYuLi3R3dwus1cDAAMeOHROHQkRE\nBPfddx/Ozs5otVoWFhbIycnhgQceYO3atXh5eREbG8t9991HcnIyVVVVPPDAA6jVapF5d3V1pb+/\nn9TUVLEpGRMTg0ajYfny5TQ1NTE1NYVCoWDHjh0sWbKE2tpaHB0dUalUJCQkYGBggF6vF5MMpVJJ\nRkYGKSkpBAQEsG3bNiFmLS0tZd++fXh4eDA6Okp1dTWGhoYcPHiQ/v5+kdwcGxsjKioKc3NzwsPD\n2bt3L8PDw4SEhAhJi9Qlr6+vJyoqCnt7e0pLS5HL5ajVajo6OsjNzWXz5s0UFxdjZ2dHbW0tN27c\n4MyZM1RUVDA5OSlCYgqFgtLSUgAOHjyITCYTk4A33niD4eFhnJychDBXoVCgVCqZmpri1q1bgkSt\nVquFU0OpVDI5OSmquxs3bnDw4EF27NjBnTt38PX1JTIyktzcXL7++mumpqZEL8jX15eIiAjS0tKY\nmJjAzMxM8BGam5v57rvvyM7OBuD48eNkZGSIcNPk5CQ+Pj5kZmbyhz/8gYiICBHUKi0txcXFhYCA\nANLS0khNTcXY2Jje3l7q6upob28Xy1aRkZG8/PLLLFu2DFdXV4yMjNBqtZiYmKBWq2lra8Pa2lpM\nOSQFXmlpKe3t7QwNDREZGcmDDz7Ivn37fjVO/8+e38X1YX5+nhs3boiRk+Tey8zM5PHHH2d6epqg\noCAyMzOFqntwcJAVK1bg6uqKj48PBw8eJCwsTKzsvv3220RHR3P9+nWhZ3N0dGRmZoby8nJSU1Ox\ntrampqZGqMAzMzOF32DZsmWUl5fj5eUlyEgSxcfKyoqsrCy6urpYtmwZ//rXv7h27ZpYFVar1Xh6\nevL6669jbW0trj329vYYGBiIf9rY2GBjY8Phw4eBu9eH4eFhOjo6MDAw4OrVq0K60tzcjLOzs4DL\nOjs709/fz7Zt2/D09OTSpUsYGBiQk5NDQkKCsFZJ5mp/f38+//xznn76aQH7iImJoauri/r6eqG1\ne+SRR8jNzRVvORcXF+Li4uju7hbbcdKOSHt7O5OTk3zyySekpKQII9KuXbtwcXGhpKQEDw8Prly5\nwvT0NC4uLkRGRnLu3Dk+/vhj9u3bJ2S90s84OTlZODgWFxc5ePAgu3fvRuJtSAtSwcHBGBsb09jY\nKJq7EqiktbWVsLAwnJ2dRaUxNDSEo6Mj+/fvR6fT8fDDD1NUVMS6detERXXu3DnCwsI4fPgwISEh\nYqKUlZXFjRs3xEh369atwhA1OTlJRUUF3t7ejIyM8Kc//Un8TBsaGpiZmUGhUGBra0tlZSUnT54k\nJCSEkZER5HI5BgYGnD59Gi8vL5GDgLtR8itXroiRp5SmVSgUXL16lZSUFJ599lnS09Px9PTEwMCA\n5cuXi8Wthx56CI1Gg6OjI3/+858F2eq3PL+LSmFsbAx/f39cXV158MEHRTktl8v54Ycf+PLLL4VU\n9L333uPjjz/GwMCApKQklixZQk1NDevXr2diYkKYfuBuKSqNFSXsuampKREREczNzTE8PIxKpWJo\naIgNGzagVqsFiETaT5dGahEREWKP3tDQEBMTE7E63djYyOzsrOh4t7S0CKx8UVERPT09Ypbf1NQk\n4rA1NTUUFxfT3t4O3BXXuru7C2z70qVLSUxM5ObNm2LXQFpqKSsrIy0tDaVSyd///ndef/11Wltb\niY2NxdjYmGvXromxoKenJykpKTz00EOUlJRgbGwsRmLStOXmzZsC32Vra0tRURFubm4oFAr8/Pw4\ndeoUCQkJ9Pf309PTQ2trq4B77NixQywGrVy5EgsLC4KDg8U4eNmyZSKwJf1cJXKR1D2XGolKpRK5\nXM7U1BSpqan4+flx8+ZNDA0NycrK4vPPP8fd3R29Xs/IyIgQxi4uLqLT6bCxseH+++/nl19+obu7\nGxsbGwBefvll3Nzc2LZtG6+99hp/+tOfmJmZIS8vj3Xr1jE7O8u+ffuEri47O1tg/s+ePcvFixcF\nsPXChQuCFPWHP/wBCwsLwbqQriYLCwt0dnYyOzuLv78/RkZG3Lx5U+xDaDQa0bAMCwvD39+fkpIS\ngWr7+OOPRVRcokG3trZSUFDApk2bGBgY4Mknn2RhYYGwsDCcnJw4d+4cWq2Wb7/9lsLCQlFp29ra\nimvJb3l+F4eCJDDp6+vD3t4eOzs7YmJiBKDTy8uLXbt2CeCok5MT33//PRMTExw6dEj0ISTJqyRt\nqampQavVCuKPhYUFiYmJrFy5EktLS+bm5jAyMiIuLk5IYSRGo7m5OfHx8ZiZmWFkZER3d7doCmVl\nZTE6OkpcXJzwKa5du5aYmBiMjIyIiYnB3t6eo0ePCimqUqkUPP6SkhJGRkZob29nenqal156Cbh7\nCM7NzeHt7c2DDz7IfffdR3h4OEuXLiU9PZ2kpCR6e3tpbW1lx44dZGZmcujQIdrb20lOTqa3t5f5\n+XkCAwMFvfo/0n6VSqUA1MTHx6NQKOjv76e9vR0LCwuWLl2KpaUlvb29yGQybG1tGR0dZf/+/bi7\nu9PS0oJKpaKrq4uFhQXBpNRoNAwPD9PY2MhHH33EsWPHGB8fZ2RkBAcHB0HJlojEiYmJlJSUMDQ0\nRFRUFJmZmULYKzEQExMTsbKywtvbGzc3N4qKimhrayM0NBRjY2NxJ/f29kYmk6HX6wkMDBR+hrS0\nNCorK0VP4ZNPPiEyMpLS0lIef/xxHnvsMfLy8ggNDaWkpERwF1JSUrC3tyc5ORlnZ2fBlfjDH/5A\nUFAQk5OTyOVyoY0bGhqiqamJvr4+uru7GRgYoLy8HEdHRwoKCujs7BQN1x9//JGBgQGxTyDRvSwt\nLRkfHxf7MgADAwNYWlpSWVkppEUSVu/YsWN8/vnn/PLLL2g0Gurr61EoFKSkpBASEkJGRgYXLlwQ\nXAtp2e63Pr+b60N9fT1zc3PcvHkTvV5PQ0OD4BqamJjwzTffMDQ0xGuvvUZoaKjg6yUnJ1NeXi6k\nraWlpQJSaWVlhZmZGebm5nh4eIgyzMDAgP7+fqampoQkw9fXFy8vL8bHx6msrCQmJob09HQAQcaV\nxjoODg4kJiZy4MABwXGU7NTu7u4CKXb16lWxsKLRaATuXfqAt7W1CdAJ3O28L126lPb2durq6lCr\n1WKRq7KykjVr1tDY2Mgf//hHNBoNKSkpQq6rVqvZtm0bxcXF1NfX4+vrK7IH586dIzIyEplMRkJC\nAtHR0ezbt49169bh6OhIeHg4ly5doqenR1Qt99xzj+Ayvvvuu+zatUvsM0hg09dee42XX36ZwMBA\nTp06hYeHB8HBwUxOTrJ3715eeuklzp07h5mZGS+++CJPP/00X3zxBdnZ2SwsLGBvb092djbt7e3i\nztvS0sKaNWvE2nlwcDA2Njbo9XpWr15NaWmpYFdKTdSoqCh0Oh09PT0kJydTWFjI7Owsfn5+BAcH\nA7B9+3YqKipobGzEw8ODwsJCNm7cyA8//MDw8DBJSUls2LCBPXv2YGVlJcAkk5OThIaGsnHjRj77\n7DMBopXgRBqNhqVLl6JQKEREPDo6mtzcXP793/8dHx8fFAoFUVFR5OXl4ezsjJWVFX/961/p7e3F\n3t6ejIwMnJycyMnJEU7N8PBw/P39gbvV0/z8PHK5nIyMDHp6enB1daW2thYDAwNB9lpcXBS2rPDw\ncNFnMzQ0pKvrPype/8/P76JSMDIyQqPREBoailKpFLp0S0tLsrKyOH/+PKOjo0LFJS27SJjsxx9/\nHG9vb/Ly8vDz88Pb2xtARLEjIyNRqVQ4OztjYmJCb2+vEKXqdDqMjY3p6elhdHSU2tpali9fjqmp\nqVg6khJnkgxGavIYGxsLElBQUBBbt26lra2N4uJienp6GB8fF0st/f391NTU8PPPP2NtbS2kutJ6\nMoBKpeKXX35hcHBQNJ+kN71araarq4vCwkKuX7/O559/LnBooaGhrFixgpSUFEE3ljDx0jRkYGAA\njUaDWq0mMjKS//W//pcgA1laWpKWloa9vb1IXSYkJDAxMUFRUREpKSnI5XKh1/P29iYpKUlkFz76\n6CMhIHFxcWFhYYEXXniBiooKDA0NWVxc5JdffuGJJ56gpKREBLQaGxtZv349q1at4oUXXgDuluNn\nz56lt7eXpKQkampqmJ2dRaVSMTIyIhbKJJKW1KiLi4tjYWEBOzs7LC0tmZmZEbp2uBu0+umnn4iJ\nicHS0pLAwECCgoIICgpiaGiIlStXcufOHVpaWti7d6+ggLu4uPCPf/yDjRs30tLSQmdnp1gkgrtj\n71u3btHa2opCocDOzo7R0VEcHBxwcHBgZGQEpVKJsbGxcGlOT08LQXFiYiIHDx4kNzdXjHQBEhIS\nWFxcpLm5mffee4++vj4CAgIoLCwUfYr+/n5WrVrFiy++SGxsLNbW1ty8eZP6+nqRuFWpVISEhPDo\no4/+5u/j76JSMDIyYsWKFSiVSlpaWmhsbOSFF17A0tISR0dHcnNzmZmZYcWKFZw4cYKIiAi0Wi16\nvZ41a9YIXqGdnR3l5eViJjs/P49MJuPQoUMolUrRNOzs7GRubo6lS5eKX66lpSWdnZ1kZGRgamqK\nubk5zc3NjI6OYmFhgYWFheh1mJubC9jH8ePHGRkZQa/XMzU1xZEjR0TuISgoSDQZy8vL6enpYW5u\njmXLlnH06FF27txJcXGxyGpIiyfS38/c3BwTExOio6NxdnamtbWV5uZm4WBUqVS8+eabFBcXEx4e\nzpUrV8RoTlpAunHjBqGhoYSEhPD1118TGRnJ6OiocEHMz8+LNGhfXx8ODg4EBQVRW1uLTCYjOTmZ\niYkJurq6BBNQJpMJxVlMTAzbt2/n2rVrREZG8re//Q0PDw+cnJxISEigtrZWAGieffZZ9uzZIyYF\nGzduJCcnB2NjY958800AsrOz8fPzo7i4GDMzM+zt7RkYGGBgYAALCwvUarVoCNrY2KBWqwkKCmJs\nbIzp6Wm+/PJL7rnnHpFDkUaEbW1tqFQq0tLSkMvl2Nvb097eLkaRkiR4/fr1jI2N0dbWRlhYGOPj\n47zyyiu/8jpOTExw77338tFHHzE1NYW5uTkTExN4enpSX18v3B/Z2dmkpaUxMjJCU1MTJiYm6PV6\nJiYmcHNzw9fXV7gcWlpaiIqK4ttvvwXupi/d3d156qmnhISmqakJU1NT+vr6WLNmjfhs2NvbC+xA\nQECAcFZu2bKF3NxcGhoaxAj1tzy/i0NBWvXVarUEBASg0+l46aWXhJ7LwsICJycnqqqqeOedd2ht\nbWV0dJSkpCSysrLYtWsXly5dore3l5UrVwo/gZOTE52dnfj7+4tew+zsLM7OzkRGRvLzzz8zPT2N\nUqnEwMBAgFolkpKhoSH29vbCLCXd92ZmZvjll18wMjISLgGJ4pOZmSmIURqNBjs7O4qLi0lJSSEm\nJoacnBwBGZWw4VJpV1payvbt2/n4448JDw/nySefRKvVotFo6Orqoq2tjfvuu4+vvvqKJUuWMDMz\nw9GjR9m7dy8KhULcQaXwloeHB76+vszMzFBYWMiSJUtQq9WC9ejv78/MzIwQvUxOTnL16lW6u7uJ\njo5mZmaG/fv388c//pFnnnmGLVu2sHnzZr788ksGBwdZtmwZhYWFjI2NsXTpUhoaGjh16hTXrl1j\nfn4ef39/9u7dy+DgII899hgPP/ww4eHhjI+PMzAwwPj4OBUVFYJo9dNPP4kvflhYGLW1tWzbto21\na9eSk5ODnZ0dCwsLZGZmirXnoqIiKioqUKvVKJVKIiMjcXFxobS0FFdXV9GrkBKMZ86cYXp6Gnt7\newoKCoiMjCQgIAAfHx9effVVWltbCQoKIjo6GkdHR65cuUJTUxMFBQWYmpoSEhIiKMsAly5d4pln\nniEvL4/R0VGMjIzQ6/UoFAqGh4d5/fXXRWalpaWFPXv2cOfOHYKCggT3UeJFSlRoQODov/rqK9LS\n0sjLyyMmJgaFQkFERITA+wcEBDAzM8Pt27dFHsXIyIj4+Hh+/PFHlixZQklJyX+/RuPCwgLh4eFi\nhjw7O0tkZCTGxsYUFhbS1dXFlStXiI2Npbq6mpKSEnQ6HYcPH2ZhYYEbN24IhPvt27d/tbo8Pz/P\nwsKCiANLIlADAwNSU1NJTEwkOjoad3d3oqOjxY6+tEsgLfLU1taKNWfJRRkSEsL69etJTExkbGyM\n1tZW4uPjiYiIEHRoyeS8sLBATU0NDz30EP39/ahUKlQqFTk5OeJa4u3tTU5ODmFhYbi6ulJdXS0g\nMBYWFgwPDwsZqrR0ExgYiLGxMZcuXaK2tlZ8IM6cOSMwZtKbPTo6WlB+vvjiC9544w38/Pzw8vIS\n1GCVSiX8mr29vZibm1NaWsozzzxDZ2cnb731Fq6uroSHhzMxMcErr7xCREQExsbGrF27lnPnzlFc\nXExWVhaenp7cvHmTzz//nAceeACFQiGCRG5ubtTW1vLmm2+Kslf6LCgUCqanp1m+fDm3b9/m2rVr\nhIeHc/v2bYaGhrhw4QKdnZ0cOXJEJFwdHR1pa2vD399fwE01Go1ga1hYWPDiiy9iZmZGe3u7GDdf\nunSJrq4u1Go1zz//PMPDw/j5+TE8PMynn37KCy+8wLJly/D19eWxxx5jbm6O69evi2uJUqnkyJEj\nnDt3DoVCgYODgxhxBgYGiubz6tWrefLJJwUgqLu7m4aGBk6ePIm/vz9DQ0MUFBSQmZkJ3F22knoR\nubm5JCcnC2TesWPHuHz5Mjt27KCzs1NsyspkMmZmZsT/Y/PmzZSVlQkYy299fheHwvz8PCYmJhw7\ndgx3d3dcXFyoqqpi6dKlIkASHx/P5OQkhw4dIjw8/P9l772jozzPtd/fSKNe0Aj1Ue8V9YYKVQgB\ntijGGNOM2U7c7STbe8VZiR2vxHGc7QR7x7GxYzC2sU1vQjShgoQKQg1QQ72XUZdmpJFG0pw/yPt8\n2Wft9YW9zz7f8XfW96zF8lpjGJV53+d97vu+rutHTEwMer0eX19f8vPz/136joQJNzMzw8XFRdzU\n0rHa3d2dW7duUVdXJ24+yesvMfq2bdvG4OAgw8PDzM3NodVqxUiyvr4eFxcXXF1dUSgUFBYWolQq\n8fLy4tatWxQXF5Oeni4Udb29vVRXVxMeHk5xcTEGBga4ubkJrcNXX30FIL62VqslNjaW3NxcsrKy\nyM7OJjc3V+Qa7tu3j7i4ONra2njmmWeYmppiamqK4OBgxsbGGBwcZMOGDQwPD1NXV4e7uzu+vr7k\n5uYik8mIiIgQqUvl5eUMDw/j7OyMnZ0dYWFhXLhwgb/+9a+4ubmxe/duzM3NuXbtGoGBgbi7u4tQ\nFT8/P9H0rK+v549//CPXr1/H1NSUXbt2cfr0aWpra2lsbMTCwkLYvy9cuMDk5CQmJiYUFBSgUCgw\nMzMDoLOzk5KSErRaLd3d3cIgNDU1xczMDJGRkaLnIpfLaWpqEsa04OBgJicnCQsLw8vLC09PTzGf\nHx0d5ciRI7i7u/PGG2/w3HPPsWnTJrZs2YK7u7sI0dmxY4cwx+3evZuf//znfPrppyxZsoTs7GxC\nQkKIiYnh22+/BRACtxdeeIG7d++K3lNBQYFwdtrb29PY2CiMTZJwTUqoGh8fF78fKWymqKiIzMxM\n2trauHLlCsbGxhw4cEAwM/38/JicnOTo0aO8+OKLtLS0MDc3h4eHByMjI8hkMpycnHjiiSdYv349\nvr6+j3w//iAi3j/55JNfr1y5kg8++AC5XM7w8LDIZpRqZKkGdnFxEWO9+/fvMz4+TmxsLGfOnMHA\nwACVSoWLiwsFBQV4enpSV1cn5tVXrlzBwcGBv/zlLxQVFeHi4kJXVxc3btzAwsKCgoIC2trahHzY\nxMSE48eP09XVxcLCAr/97W8ZGxtjz5493L59m4WFBS5cuCBsyi0tLbi6ujIzM0NJSQlOTk4sXbpU\n7OC3b9/GyMiIrq4uXF1d0Wq14oa9evUqqamp2NjY8OMf/5iKigr8/Pzw9PTEwsKCjIwMrK2tBZjF\nzc0NS0tL4fOQjEgSUNTJyYmsrCyBsM/Pz+f06dOsW7eOvLw8pqamUCgUlJeXC9u2SqUiLy9P5BRm\nZWVRUFDAjRs3eOWVV1hcXCQnJ4ekpCRsbGyor68XYBfJPLZhwwby8vIEYXtqaorQ0FCOHj0q/ADS\nyFir1Qpk/eDgIFVVVaSlpbF06VISExMxMTEhJycHY2Nj6urqcHJyErCZ06dPi9LS2dmZ+/fvc/Pm\nTWJiYhgcHKSxsVEIfo4ePUp8fDzR0dG0tbWhVCrJyclhdnaW4OBg/P39+e677/Dy8hL4uPHxcXp7\ne/H09KSrqwsjIyO0Wi1lZWUsXbpUlD5r164VUXOSIGlhYYHk5GRGRkbIzs7G3d1dPKyGh4eZmJgQ\nKAEpN6O2tpYtW7YwMzPDxYsX8fLyIj8/H0tLS5RKJW5ubmRlZYm+Q2xsLBqNhpycHMbHx2lpaRHE\nMg8PDzG9sbS05PDhw8hkMm7cuPHfw5KUyWRHZDKZSiaT1f7da7+WyWS9Mpms5m9/Nvzd/3tTJpO1\nyGSyBzKZLP1RNgXJUPL000+jVCpJTk5mx44dNDU10dLSIriAPj4+LC4uYm9vzz/90z+xfPlygfWO\njY1FrVaL8Q1AbW0tu3btwtramjNnziCTycQum5ycTEtLi2jOzM/PY2hoiIGBAd7e3gwMDAgVnlqt\n5sqVKzz11FMAgmEpsQPNzMywsLAgJCREMBpNTExwcHDA2tqau3fvio1nenpayIIlS6v0/VpbWwu4\njdRInZubE16LS5cuiUTqGzdu4OzsjEKhIDIyUvj2w8LCqK2t5eLFi+zYsYNDhw4Jv8OaNWuoqalB\nrVZjYWGBj48PqampdHZ2MjIyQk9PDzExMZw8eZK+vj7Wr1/P+vXr2bBhAzdu3MDDw4PCwkLhu6it\nrWV0dFRIqaenp8nPz2fv3r1IHA9ra2vhR/Hx8cHLy4u2tjaSkpLo6uoSdHEppUkiejU3N1NeXs72\n7dtF43VoaEic3N5//33RmD1y5AjT09OsW7eOM2fOsLCwgLGxMc3NzULynpycjEajwcnJSZyOpFKj\nvb2dnp4eWlpaiI+PF3W+vb09hYWF9PT0kJSUxPz8PGvXrhVuUkBIzWUyGStXrsTb25vIyEhxnbz0\n0ktMT0/z4MEDdu3ahbe3N+vXrycoKAgjIyNRrkZFRZGTkyOI6dIUTCaTkZCQwKFDhxgdHWVsbIxT\np05RX1+PTqdjyZIl2NjYMDk5SUBAgChTg4KChBrz2WefJT8//1FuReDRyoejwPr/4PWDer0+4m9/\nLgPIZLJg4Ckg5G//5hOZTPYP7VkSwGJ2dpbs7Gx6e3tpaGigt7eXxx9/nMrKSpRKJU888QRxcXFC\nMCNBOSWAh3RDS0dOhULBpUuXKC8vJygoiGXLljEwMMDk5CSOjo4EBQUxMDDA2NgYYWFh+Pj40N/f\nz7lz5ygtLRV0YjMzMzw9PcWFIIWZSMCRrVu3YmdnJ0aBmzZtIiUlRdCR9Ho9UVFR9PX1YWVlRWBg\nII8//jjNzc0CQQYPbcPStGDDhg3o9XpGR0eZmJjAysqK0NBQYfpJTEykpKSE1tZWKioqRPiHQqFA\nq9WKmLQXX3wRtVrNl19+ydTUFD09Pezbt49vv/2WO3fusLCwwJo1azA2NsbDw0PYsCUFXlNTk9jc\ncnNzeeedd4RRbNWqVXR2dlJTU4OxsTF2dnasWLGC/v5+5ufnqaqqYmBggO7ubgwNDamqquKjjz7C\n3t5ecC67u7uJior6d4YdQ0NDbG1tCQwM5O7du1hYWODn54dMJiM3N5elS5dy+PBh0RsaHx8XiVSS\nUKi7u5uxsTGRalVaWiryKCcnJ7l79y7Xr1/n+PHj/P73v8fJyYnY2FhBpTIyMkKpVGJmZoaXlxfn\nzp0TeZL3798XUytra2txHRkYGFBXV0dJSQnNzc0i0j8xMRG5XE5WVhb9/f309fXR0NCAoaGhmChJ\n37tUoq5evVqMYCVYrSRVlyZLsbGxAmsQHBzMhx9+SEFBARMTE2JjSE1NJSsri7S0tEe41R+uf7gp\n6PX6QmD0Ed8vEziu1+tn9Xp9O9AC/MNwuLm5OTo6Oli6dCnbt2/H2dmZ6elpAgMDqaqqEkYlvV5P\nXV0db7zxBl1dXRgYGPDJJ58IX4BcLmdoaIgNGx4eXJKTk0WgiMQIkBKRa2pqmJubE9xDHx8fEaW+\nadMm9u/fL0oVnU6HXC4XrAU7OzuuXbuGu7s7SqWSzz77jKamJiYnJ1m2bBn5+fmiFpRcj+3t7Wza\ntIng4GA6Ozupr68nKSmJzz//XISASkdbyZDl6ekpEqiDg4PFMfvevXuoVCqMjY1ZvXo1/f39GBsb\ns2PHDi5fvixyGyQMmTQJCQkJEVqPb7/9Fr1ej6OjIwUFBTx48ICQkBDx+/zTn/4kbMrT09P09vby\n5JNPiqfb/Pw8LS0tvPLKKyxZsoTBwUEcHBzIzs7m+++/x8LCgk2bNmFnZ4e5uTmdnZ2C/m1ubs7v\nf/97lixZQmNjI3/5y1/EuFdiN8pkMlQq1b8bF8bGxhIaGkpERASZmZnU1dUxMzPDO++8Q2hoKD09\nPdjb22NpaUlsbCxPP/009fX14jq7fPmy8CxIVul9+/YJvULttAIAACAASURBVEJbW5sQBr3//vuE\nhISwbNkyMjIyeP3111lcXBQpUJLoTepTGRkZUVNTI/IukpOTmZiYEGKzgoICoqKiSEhIwNzcnJ07\nd2Jvb89jjz1GVFQUBgYGQlsCDwVyUpDK/fv3CQgIQKfT8d133zE4OIiHhwcODg5s2bIFCwsLzp49\ny6lTp/jjH//Ipk2b6OjooLOzk9u3bwsp/qOu/yeNxldkMtm9v5UXir+9pgT+XjrV87fX/qdLq9Wy\ndu1ali9fTm9vL7W1tXR3dyOXywkKCmJ2dhatVsvt27cZHR3F2tqakZERYmJiCAoKor+/X2QhlpeX\nY2pqCiC4CzMzM9y8eRMzMzMUCgXGxsYkJSXh4uIidvF79+7x6aef4ujoiI2NDV1dXWi1WpqamoQG\nX2rWtLe3Y2JigkKhoLu7m/T0dAwNDRkeHqa0tBS5XE5rayuLi4t4enqybds28fQZGxujqamJY8eO\nMT4+jp+fn5il+/j4cO3aNaKjo0U+5YMHDxgaGkKlUpGWlkZ/fz8ajYaxsTHGx8dFSeHh4cHx48dx\ncXHB2NiY4uJi6urqMDc3Z3x8nL/+9a/MzMyIKPcvvviCffv2odPpMDMzQ61Wc/78eVxdXXFycuK9\n997jhRdeYHx8HEtLS5KTk5HJZJSUlAhFopSO5OfnJ0JUw8LCeP311+np6cHZ2Zmnn35aZDNK1uvm\n5maioqKwtrbG3t6eZcuWic9scHCQhYUF8Xv18/MT1uaRkRHMzc2xsbHh/PnzVFVVUVpaKiLhEhMT\nsbe3Jz09HRcXF0pLS/Hy8gIQTThLS0sh1pJI0WFhYdy7d4/Kykp0Oh0qlYo///nPgocpJX5lZGSw\nYsUKAgMDxdzfwcFBOB57e3tpbm5m06ZNIgKtqKgIhULB5s2bRUqVl5eXwBNIp86uri56enpEAlVw\ncDDx8fHcu3ePZcuWMT4+ztzcHMXFxUJQJ2WK5uTkMDMzI9LCTp48yapVq3jw4AGLi4vExcX9p0aS\n/1WdwqfAbwD93/77R+DZ/8wbyGSyHwE/gocJvlNTU+Tm5pKamkpHRwcajYbQ0FDs7e1Fbd3R0UFB\nQQG/+tWvhIBIMiHNzMyINCXpF9vY2CjUec3NzWg0GlavXk1OTo44vnZ2dqJWq0lMTCQ8PFz49gMC\nAigsLMTW1pb8/Hyee+45IW2dn59HLpej0Wjo7+9HpVKRkZEhnvRDQ0O0tLQQExMj6MTDw8NkZ2dj\nbGyMo6Mjvr6+YraekpLCoUOH6Ovr44UXXuD+/fs8ePAAc3NzLCwsSE1NZX5+XoyyJOel9NSSgCZS\nJL65uTmurq5YWFhw584dFhcX6e7uFtDZrKwsMY4bGBhgdnaWN998k9zcXMbHx6mvr+fkyZOsXbuW\nq1ev8uabb1JQUMAvfvEL3nzzTT788EOSk5NxcHAQWPrXXnuN69ev4+Pjg729vXCK1tXVMTAwwL59\n+3B2dsbe3p6oqCgWFxeJj48XpOqTJ08CCM2IxBUdHR0V4hsPDw+6u7spKysjNTVVmNQMDQ3RarVE\nREQQGRlJQ0ODQM9Lwa1KpZLly5fzwQcfkJKSwocffii8DTMzM3h5eVFVVSWAPjY2NrS3t3PhwgX2\n79+PUqkUFnlbW1vMzc2BhyWqWq3m4sWLmJqais9icXGR8+fPs3fvXiwsLLCwsKCsrEwE1lhYWHD1\n6lXxIKysrBQUNIBbt27h7e3N1q1bMTU1pa2tTdC1LSwsqKurIy0tTeRyHD16VKgmHR0dqaurw8vL\nizVr1nD06NH/lHjpv3RS0Ov1g3q9fkGv1y8Cf+V/lAi9gNvf/VXXv732H73H53q9Pkav18dI9T1A\nTU2NCCbJzs7m/Pnz2NjYEB0djYmJidB837lzh7feeovKykpSU1NF0u7w8LD4wAIDA4GHnvfZ2Vl6\ne3sZGRnB3d1dmK28vLzo7u7GxsaG4OBgqqqq0Ol0ZGdns3v3buzs7NiyZQvFxcVivOXh4YFCocDL\nywsTExOBBK+vrycvL08ARcvKylhcXBQY9+3bt+Pi4iKeKJIdWFI0SrmP5ubmpKamUlFRwdzcHAYG\nBiwuLtLX18f+/fvZsmUL27dv59VXXyU9PR29Xs/SpUvx8/Pj9ddfFylUOp2OnJwcmpqahGZDcvpJ\n2X8//elPWbduHYcOHSI/P59jx47h7OyMt7c3dXV1/PSnP2VgYIC5uTkRmy/h486fP49SqeT555/n\n+vXr2NjYkJ+fz6lTp5iYmBA+kMnJSa5evcoXX3xBfX09aWlp+Pn5ERISIjIn16xZI64NrVZLcHAw\nTk5ONDc3ExQUhEKh4PPPPxdsC3t7e0G+qqqqIiAggLKyMvR6PYaGhnz55ZdMT09z6tQp4GFaVm5u\nrsjWSEtLY82aNcJP0NPTQ2dnp5gCeXt7k56eTn9/PwcPHmTfvn18/vnnwqjU1dUFwGeffUZNTY3Y\nwCX+hYeHBwkJCUL2fe3aNUHddnR0ZMWKFezcuZPZ2VleeuklioqK6O3tJTExEYC1a9eiVCrp6OgQ\np9sdO3awf/9+/vjHP/LNN98IDYKjoyMrV67kN7/5DTdu3KCvr48LFy7Q1NTEjRs36O3tFSexR1n/\npU1BJpP9fWLDFkCaTFwEnpLJZCYymcwL8APK/9H7SU0pHx8fKisrCQgIYMmSJQwPD1NRUYGdnR3F\nxcXY2dmJBN7MzExefvlldu7ciVKpJDAwEAsLC5RKJWFhYQAi5VaSHEtJt7du3WJkZITU1FRBcb5w\n4QL19fXU1dWhVqvx9PSkvLycBw8eiIaP1BkeGBjAxcUFlUqFnZ0dtbW1mJiY4OXlRW1tLZs2bWL5\n8uWMjIxQXFws/P/fffcdPT09AoGmUCj47LPPRJNNSiman5/H0tJSBNlev36dkydPityJr776ipyc\nHGQyGdHR0axYsYKAgACqq6uprKxkcHCQuro6pqamiIqKwtHRkfXr1/PSSy/x1FNP8Ytf/IIVK1Zg\nbW0tADsZGRm88sor7Nq1C41GI2TBycnJQop9+fJlMQc/fvw4e/bsoby8nPLycpqamkR5Nj8/j4uL\nCyMjI6jVavLz8+nr60Or1ZKTk8PKlSuFzsPHx4eAgAAxgcnNzWV6epqWlhbRSOzv76e6uhp3d3eu\nXbvG4uIio6Oj+Pj4cO7cOYyNjVGr1WRlZYnEak9PT4yMjITy0NLSkomJCSorK5HJZKSkpCCTyWhr\na6O0tBQfHx9xw9fX1zM1NUVBQQGvv/46SqVS/NzJyclkZGSwfv3D3vvOnTvp6Ohg27ZtDA8PExAQ\nQEZGBmq1mujoaKampkQ4jHTqkT7/wMBABgYGKCkpISkpiZGREaGaPXbsGK2trYyNjXH58mW8vLx4\n8OABnZ2d9PT0cO7cORobG0lMTMTGxobIyEhsbW2Ry+UkJycLI9n4+DgJCQni2n2U9Sgjye+BUiBA\nJpP1yGSyA8AfZDLZfZlMdg9YBfwEQK/X1wEngXrgKvCSXq9f+EdfQ6prOzo68PLyEgANKVZrbm6O\n+fl5pqen8fb2xsrKiv7+frRaLVFRUej1egHN0Gg03Lp1C3jYGZ6dnRWJNdJTOiMjg40bN+Ls7MzE\nxATr1q3D0NCQwsJCIiIiRChrd3c3b731FkNDQ3R3d4uGoF6vJzw8HG9vb6G/l0ZFUkaB5E9oaGhg\nZGQES0tLli5dik6n46mnnsLW1pbbt2+zf/9+sSlIpi9LS0uWLFmCRqPB2toaAwMDduzYQXt7O9XV\n1ahUKhHMMjo6yvDwMFqtls2bNxMRESFOMD09PXh7e6NQKNBoNNy5c4euri6am5sFz2Bqaop79+7h\n4OBAY2MjIyMj+Pr6snz5coyMjGhvb6empoYbN27g5uaGTqfD399fhMOEhobi6upKYGCgOB2YmJig\nUqnQarW0tbXx5JNP4uXlJfoycXFxbNiwgcXFRbRaLRUVFSJcRGomjo2NUVpaysLCgpAWe3l54e/v\nj6urKwsLC0xMTODm5kZ8fDyOjo709PSQn59PU1MTISEhHDp0SJxAuru7cXJy4vnnn6e1tRUDAwMG\nBgb48ssvuXHjBsuWLWPbtm1ERESQn5/Pe++9R29vLxqNRrA109LS0Ol0nDhxQuRqSgnXkhbA1NQU\nAwMDZmdnKSoqEpMCMzMzrly5Ija3xsZGLl68iEKhwM/Pj7m5Oby8vPDw8ADg5ZdfpqurS3hN/vrX\nv/Ltt9/y9ddfY2xszN27d5mdnRV0tKmpKWZnZ5mfnxeGQmtraxITE0U5/qjrH/YU9Hr9zv/g5cP/\nk7//LvDuI38HPGwCSTXkM888Q01NDevWrWNwcBBfX19UKhUKhYL4+HihS7h8+TLe3t7cunWLqakp\nEeApQTX/9r2QmJhIaWmpSOiR6japxre0tESn0+Hu7s7Y2Bjx8fFUVVVhZ2eHTCbD3NycpqYmmpqa\nePnll/n4448JDw/n008/ZWRkBDc3N8LCwoSKsL6+nr179xIaGsqPf/xj3N3dsbS0FCKTjIwMDA0N\n8fDwIC4ujuHhYdFxzsnJwcrKitTUVCYmJnjyyScZHh5GpVJx8OBBnn32Wby9vbG2tkalUom0p8XF\nRXx9fYV+QAqW1Wq12Nvbc+/ePaFBkEjXwcHBvP322yxdupTo6GgOHz5MSEgIOp0OV1dXDAwMxAjX\nxMQEb29vjh8/TlpaGtPT07i4uDA4OEhubi6urq6EhIQgl8sFC8PGxoaysjLUarUw80hz+9bWVnJz\ncykrK8PS0hJfX18x7h0cHEShUDA4OIiRkREmJiY0NjZiYGAgoLENDQ1oNBrs7OwIDg5Gq9Vy9uxZ\n/P39sbS0FCPKuLg4MZ6WiNMSncrDw4OioiKMjY2FZ2F6eponn3ySLVu2oFKpqK6upqOjg7t37/Ls\ns89y7Ngx8fSVrN5ubm7Mz8/j6urK5OQkbm5ulJeXi/QlCcDb1dVFVFQUQUFBWFpaMjIyQnNzMwsL\nCzz33HNs3bqVnp4eSkpKALhx4waenp4UFBTg7e0tILoJCQksLi7y2GOPidOSZImXxGKStyQlJUWE\n5EgErkdZPwiZs52dHSdOnGD37t188cUXhIeHi6ekkZERBQUFxMfHY25uLtKLJPy5TqcTqT5lZWVk\nZmaK5pJMJqOxsZGYmBhUKhWtra0YGhri5OSEs7Mz1dXVjIyMiCeWRPLNzMzEwcFBkJmCg4PZtWsX\nn3/+UAymVqvx8/NDoVAwMjKCvb29iCsvKCggMzMTvV6PXq/H1tZWbFovvPACCwsLqNVqVq1aRW1t\nLb29vSiVDwc009PTRERE0NLSIk44crkcT09PkpKSGB4eZnJyks8++4ypqSmmp6dFNmJXV5dImjI3\nNxceCIk8nZCQwNtvv80f/vAHgoKC2LNnDzt27ODZZ58VGY/z8/PExMQI7sH8/Dw7d+5k/fr1NDY2\n8uKLL6JSqRgcHBSU6JSUFKKjozE1NaW2tpaGhgYCAwOpr6/nzp076PV6kpOTiY2NRalUMjIywtWr\nVykqKhJhM8PDw/T39wMPN3KFQoGPjw9qtRpvb2/8/PxQKpWkpKQIgvi2bdvw8PDAyMiIhYUFcVrU\narVYWloKCbo0gszLyxMZE8PDw1hYWIg07FWrVnHkyBE8PT3JysqitraW6upqIRJycHAgMjKS/v5+\nOjo6BMQHEAInFxcXYc93dXUVOQptbW20trayYsUKXFxc6OjooLe3l97eXp577jmSkpLEJnjmzBlW\nr14trjGVSkVwcDDR0dFUVlaKBmVeXh6zs7M899xzqNVqIY9Wq9UiFHflypUUFxdz4cIF7Ozs2LVr\n1yPfjz+ITUGr1fL2228zMDDAU089xdDQkGhueXt74+7uztDQEJ2dnTQ1NbFixQoGBwdFfuDMzAz5\n+fnI5XLm5ub4xS9+ATxs3MXFxTE2NsbKlSuJjIzEz8+PtWvXkpqaKqSnjz/+OGq1miVLlrB8+XLB\nFWhsbBTW3r8nAltYWAgLtJTpLz05PD092b17N++88w6tra289dZbDAwM4O3tLTYjyYexbNky4uPj\nhcAmKSkJAwMDlEolDg4ODA0NkZWVJTiWk5OTHDlyhJaWFqqrq/Hz88PHx0eYge7cuYO1tTVxcXE0\nNDRQWFhIa2sr7e3tdHZ28utf/5qnnnqK2tpaEhMTWb16NZs3byYpKYnNmzfzzDPPkJaWRm1tLY8/\n/ji2trZcunSJ8fFxDhw4wPfff09raysLCwtCPFNfX8+ZM2dob29nYWFB+Pclj4TU3Q8PD+epp55i\nZmYGT09PiouLxdhudHRUQFvGx8fFfF3ifXR2duLm5oaTkxPLly/nww8/JD8/nzt37mBjY4OTk5MY\nJbq5uWFiYiJ6PJIMfHBwkP7+fsbGxkTMXFhYGFFRUdTX19Pa2kppaSmOjo4cP36czz//nMzMTLRa\nLcuXLxdZHVqtlpmZGfr6+oCH/M9nn32W1tZWIiIiGBgYYGFhAUNDQzIzM3FzcyM8PBw3Nze6u7vJ\ny8ujsbERuVyOhYUFpqam3Llzh5GREdLS0kTTOT4+npqaGnQ6Hffu3SMjI4P09HQRqDM1NUVycjKf\nffYZJSUlfPDBBwQFBREQEEB8fDxDQ0Oib3bu3Dnxvo+yfhDeh3/7t3/7tU6nE72Dixcv4uDgQFxc\nHA4ODgwODopZ7sLCApWVlYSFhZGQkICXl5cIr5idnSUnJ4cdO3aQnZ3Nz372M8bGxsQx7fLly5iY\nmLBs2TIOHz6MQqEgNTWVsrIy+vr68PT0JCcnh5ycHKytrZmensbc3FxEm12/fl1wAjQajRi5zc7O\n4uDgQG1tLc8++3Aye+XKFXJycvD19WXVqlUEBgbywQcf8NJLL5GamkpPTw/FxcW4urrS1dVFTk4O\njz32GNPT02g0Gqanp+ns7BTHcnNzc2pra4UkV6PRiI3SzMwMR0dHZDIZ4+PjgoFpY2NDXl4enp6e\nXL16lSVLltDc3ExwcDApKSkolUpOnDjBp59+SmxsLGFhYSiVSvr7+3F2dsbHx4fPPvtMUKGkfAeZ\nTMbAwACLi4usW7eO6upqgoKCUKlUeHp6CmWf1IiVsgvMzc2FGc3U1BR/f3/c3d2Znp4mODiYs2fP\nsnv3btRqNf7+/sjlcvR6PV5eXly/fp3Lly/j4uIipkmTk5NCtm1qasrjjz/OtWvXkMvljI6Oihi9\nsrIydu7cyfT0NDMzM0RHR4tgn8HBQcrKyoiNjSUyMlLM9eFh+lRPTw+ffPIJLS0t+Pr6cuDAAerq\n6jAxMaGoqEgkQEVFRXHo0CF+9KMfYWRkhLm5OdbW1tjZ2YnYOilUZtOmTfT29lJeXo6hoSEhISHC\nlavVajlx4gSbN29mzZo1jI6OEhgYyNWrV0UDNzk5mebmZnp7e0lPTxc28hUrVtDW1iZARO+++y7b\ntm3D3t6ekpISGhoa/nu8D/8rlvTDarVa+vv7CQ8PF1JU6WKem5vD1dWVtWvXsnLlSpqbm5mbm+Pa\ntWucPn0anU5HTEwMYWFhQiqqUqnQ6XRs3rwZX19fgTZTqVTcvn1bpOxKXIe1a9fi6ekpmlY+Pj7C\ngWdtbU16+kMrhyQ6MjIyIjw8HCMjI5qamjh48CADAwPo9Xo2b95MZGQkc3NzFBYW8v7777OwsMB7\n772HVqsVWofJyUmh+5e4kE5OTshkMnHjV1RUcPbsWb766itxQpLL5Tg4OBAaGsrMzAzj4+OioSQZ\nriSpspR81N3djZubG/fu3eODDz7g7t27DA0NsWnTJpYtW8alS5cYGhqiqKiIjz76iMrKSvz8/IiO\njkapVGJpacn27dtRKBQYGRmJEa80WpUkwgqFguDgYPr6+kQyUnV1Nbdu3eLcuXP09PQIupaEnS8q\nKgL+B4J9cHCQiooKOjs7hSpSr9eTl5fHyy+/TExMDFu3bsXc3Jz09HRSU1MFBLavrw+1Ws3NmzcF\nfcvJyYmamhrc3d3p6+sjLy+P+/fvExwczLp160hISCAmJoaoqChiY2N5++23SU5O5uDBg6KfNTY2\nxuHDhxkZGWHjxo3Aw0mUg4MDc3NzPP/887S1tZGTk8PExAQzMzN89913lJWVMTc3x8DAAM3NzTQ3\nN4vMDik+fvXq1cJvAw+zH6uqqkT+5Zdffsn27dtZt24d4+PjTE1NUVpaSnh4uGBh7N69m8uXLyOT\nySgqKuKNN97g/v37rF+/XkzkHmX9IEJWZmdnRShpW1sbhoaGpKenc/36ddRqNWNjY9y6dYtNmzZh\nZWWFiYkJlpaWVFRU0NvbKyCxhYWFQl0IDxHowcHBfPPNNwIaKpfLBV5tbm6O0tJSkXkwNDQkBDFz\nc3OUlZVhbm5OUlIScrmcqKgoAEJCQjh+/DgvvfSSiPbq6uoiLi4Of39/IcFWqVSYm5vj7e3N1NSU\noFy7u7uj1+sZGBjA2NhYSGZzcnLIzMwkJydH8AgHBgYwMjKis7MTHx8fli5diru7O0FBQSxdupT+\n/n6WLVuGiYkJFRUV2NjY4OPjQ01NDdHR0fzLv/wLa9as4be//S2tra3IZDJWr16NgYEBZ8+eFYao\njz76CHd3d4yMjKivryc3NxcrKytOnDhBbGwsra2tqNVqcnJyeP7553F1deXUqVOinLt9+zbLly/H\nysoKGxsbIZ6SGoK1tbXo9XpWr16NlZUVHR0dZGRkkJ2dLca38BAn39zcjEKhEA3choYGenp6sLS0\nFKnH0dHR6PV6zMzMmJ+fZ2hoiI6ODgYGBpDL5QQGBuLp6SkCbG7fvo23tzc6nY6MjAxee+013njj\nDaENkTwTTU1N+Pj4EB4eLrw3KpWKoqIifH19mZubQ6FQ0NTUBDwEDkkjaolW7ejoiFwup7a2Vkye\npAeSh4cH5ubmlJeXs3nzZiH9lnpe0u/B2toanU7H1q1b6e3tpaWlRTRRHR0dqaioQKvVotVqqaur\no7KyktjYWBFEI23Oer2e+vp6ampqHvl+/EGUDwcPHvy1NHLq7OykoaEBS0tLuru7GR8fFxODyMhI\nwsPDBV5u7dq1BAcH88QTT3Dq1CmGhoYwNjZGp9Nx8uRJwsLCOHbsGFu3bsXLy4uamhoRyLlt2zYu\nXbpEVlYWISEhWFpaMjk5ybVr15idnRVKOY1GQ1lZGY6OjkxOTlJQUMAzzzxDaGgonZ2dooHU1taG\nkZERZ8+eRa/Xc/r0aRwdHcnIyBCbWVNTEyqVisXFRRQKBffu3UOpVGJoaMiZM2fYs2ePyBWUQkcN\nDAxwdHQkISEBDw8PTExMWLFihWisBQYGIpfLKSwsFPoBmUwmwDeSpdfBwYH169djbW2NRqPhueee\nQ6VSiRONqakpmZmZ3Lx5Ezc3NzZu3CgSmHQ6nTDtmJmZ0dPTI04yEq1q9erVQiqdk5ODh4cHAwMD\nFBUVoVQqSU1NJTIyUmDnpMmBJAd3dHSksLCQH/3oR+IUIU2k2traRNr0smXLyMvLo7S0FH9/f2Jj\nY9HpdDQ1NdHX14e1tTV79uzB0tKSFStWMDQ0xLVr17Czs0OpVDI1NSXGyLGxsczNzXHx4kURWNvW\n1kZubi7ffvstc3NzuLu7k5iYyNDQEOHh4ej1etzc3Dh16hRtbW2iTzI3N4dGo6Gjo4Ps7GyKiopw\ncnLCzs5OeCB8fHzo6emhra1NUK1LS0spLi7G0NAQT09P8d5paWkiti4lJYWUlBTs7OyIi4sTG9mV\nK1fEhMbMzAwrKyv27t1LXV0dcrkcmUwmEroNDAz++6zT/yuWsbExL7zwgsiws7W15e7du4SEhIi4\ndEm+fPbsWaytrblz5w5nz54VQSIzMzNC7iolL0nMv4SEBObn5+nq6uL69euMj49z8+ZNIezYsmUL\n8fHxxMbGCmv2lStX8Pb2JikpCXt7e7y8vMSoEx6WPObm5ly+fJnf/e53YjNatmyZuGksLCzERSDp\nCSR9gkRT7ujowN7eHkAQpCStgYS4DwsLEz6LsLAwWlpa6O/vx8vLS+Ql6PV6ampq2L9/P2q1moMH\nDwr5rq2tLZ2dnTx48AALCwvS09Opr69n06ZNtLS0MDAwwI4dO7hy5QpeXl6kpKQQHBzMlStXGBkZ\n4fvvv0ej0Yhsx9DQUGxtbfHz8yMhIQE/Pz8OHTqETCajsrKSmJgYqqurAQSFGh7yHCUOwsaNG4XM\n+P/OULSzs0OhUKDT6QRb9JVXXiEiIoLJyUnm5uZYv369+AzMzc0Fb0JK67pz5w7Z2dkMDg4CD9Oy\nFhYW2L9/P/X19SQnJ5OVlUVbWxsjIyNUV1fz1VdfodfrBYfSyMhIAFukUXJzczNarVb0HaSQnoaG\nBubn56mvr2dhYQFXV1empqaIj48nISEBHx8fHB0dCQkJYWFhgVOnTlFYWIi1tTXBwcEUFBRQWFgo\npjBNTU1MTEwIV29/f7+gi/X19eHn58eOHTtwdnYWNLPw8HDu37+Pra0tiYmJrFu3ju+//56uri6h\nsXmU9YPYFIyMjNi/fz+9vb1MT08TGRmJsbGx0KHr9XouX75MQ0MDvr6+XL9+XYR1JCcnMz4+jo2N\nDevXr8fKyoozZ86I9w0LC+PQoUMUFhYSFxdHVFQUwcHBGBgYsHXrVnbs2IFer+f+/fvcvXsXuVzO\n7Owsu3fvZmhoCK1Wi4+PD7m5uULa6uDggEqlYnR0lO7ubuEnsLa2xtTUlAcPHiCTybh//z6zs7NU\nVFRQXV2NQqHA1NRUBLpIwFgJrpqWlibs0o6OjiJifWZmBr1ej4eHBytXrsTX15c1a9aI3MajR48K\n/31xcTF+fn489thjoieSm5uLm5ubSFZ2dXXl7NmzdHV10dLSIpScEo5NKgG2b99ORUUFkZGReHt7\n4+LiIja17u5ujI2NKSgoAMDW1pa+vj7eeecd9u3bR0xMDE5OTigUCvr7+2lpaWFychIbGxtqamoY\nGBjgwoULODs709nZKfgMnZ2d1NXVceTIEcrLywUsfbfH0gAAIABJREFUViaTodPpMDExwd/fH0ND\nQ0H8qqioEJuklEk5NzfHhg0bREy6FPzb1NREQECAoFtNTEwIHcaKFSsEWkDadB5//HGcnZ2xsbFh\nYmKCgIAASktLBXDo/PnzAlYMsGbNGgICAlAqleKUp9FoBJfTwsICExMTWlpa8PT0FBObVatW4enp\nKWLTJGixn58fFy9epKamBo1Gg4uLC3q9Hn9/f5KSkujr6xMBtVZWVlRUVNDR0cG1a9f4/PPPCQoK\nEqnSj7p+EOXDb37zm1//4Q9/EGahmpoakaMfFhZGcXExa9as4dlnn0WlUjE7O0t1dTWvvvoqX3/9\nNe3t7djb2zM/Py/GUMXFxQQHBwuXnSTkSUpKYmhoSBiSGhsb6ejoEHbbU6dO4eHhgU6nE7wGqRs/\nNzdHUVERbm5uaLVanJycmJ6eFsGcEug2ISFBnC7u3LkjSNgS4Sk+Pp7Z2Vk++OADfH19MTY25sqV\nK2zcuBFDQ0PMzMz49NNPqampYWxsTNCdpcZcSUkJPT09DA0Nie68dHH7+PgwOTlJZGQkcrmcmpoa\nwsLC8Pb25u7du0xPTxMQEMBTTz1FX18fhYWFVFZWolarWb58OZcvX6ayspKmpiZyc3OZnJzE1dWV\n7OxsgoKC6OrqIigoiNbWVlH/K5VKMjMzkclkrFixQli9TUxMGBkZwcDAgKCgIHJychgeHhaydmdn\nZ8rKyrCzsxM/1759++js7GRhYQEzMzM0Gg0mJibk5uZSX18vMiOioqL47rvv0Gq1LCwsiJGilDeh\n0+koLCxkamqKkpISEX4q0aHHxsaYmpqiuLiYqKgobt68KSZdk5OTFBcXC2hse3s7ly5dwsDAgPn5\neSIiIvj666+ZmpriwIEDQipuZWWFUqkkJCSE6OhogSsIDAxEpVLx/vvvi1OGJHNOTEykrq4OBwcH\nLly4QGRkJCdOnBAN4draWhEG8+DBA2ZmZrCysuL8+fNcv36d/v5+FhcX8ff35/bt25w6dQqFQoG/\nvz/nzp0jJSWFlpYWKffif5/yQSaTkZ+fz61bt2hubmZ8fBxjY2PkcjklJSU88cQTODg4YGpqirGx\nMYWFhaxatYrBwUG2bNmCtbW1eApNT0/z2GOPAQ+nBFKtHxwcLOK5bW1tRebi3Nwcvb293L59m2++\n+QYHBwcsLS0FiSgsLIxVq1YJujU8FNhIwJC/zyuUAl1OnDhBe3u7gJgODg6SkJDAj3/8Y3bs2IGj\noyNGRkbs3LkTOzs7kcsnmZ8k1V1oaKio2WtqajAzMxO9EwMDAwICAtBoNAwODrJjxw6mp6eRyWTY\n2dmhVqtZXFwUWvlLly4xPT3Nrl276O3t5de//jWnT5/m+vXrODk5sWXLFlpbW5mdncXU1BR7e3ti\nYmJEH+DnP/85k5OTpKWlUVlZSWRkJIODgyJ85tixY8DD6cHQ0BDffPMNY2NjYtJw7949EVMvKSct\nLS0Fvs7FxQVA5ElIYrDR0VHKy8vp6OgQgqS9e/cKqKqfnx9yuVyI2Do7O7G3t2dhYUGIuQBu3ryJ\nh4cHBw4cECTxyclJTE1NeemllxgfHyc/Px8PDw/GxsawsrJi+fLlAoEXHR1NTk4OkZGR1NXV8cIL\nLwAPXZK+vr74+/vj7OzMqVOnuHPnDr/73e/4/vvvmZubEw5VMzMzrK2tOXbsGFZWVgwMDFBZWSl0\nKhs3bhTAIWNjYxYXF/nTn/6Evb09mzdvprOzk+vXrzM3N8fRo0epqalh+fLlzM7OijHuhg0b8PX1\n5dq1a2RmZuLp6SkyLx91/SA2BScnJ5KSkoiPj8fPzw9bW1uSkpKYmJggOjpaeOK//fZbKioq8PLy\nEizBw4cP4+vry/DwMH19fVy+fJmsrCwAnn76aezs7Lh9+zY1NTXY2dmh0WhYWFgQ7khHR0exAUly\na+nikm4uMzMz5HK5kMzu3buXjo4OkWcYFxfH6Oio4B1s2rQJZ2dnrKysWFxcxMjIiLy8PAoKCmhs\nbGRmZoaAgABxA0u8Q0n9ptPpOHv2rGhUbdmyBWNjY5qamhgaGsLZ2RlfX1/s7OzIyMggKiqK/v5+\nYTtfunQpra2t5Ofn8/TTTwuGZkpKCrOzs9jY2GBqasr09DRPPPEEwcHBPP3000LnYGVlhUajQaPR\nMDU1RVJSEuXl5Xh5eXHt2jXWrl0rsPADAwMCkKJSqTh+/Ditra3Mzc3R0NCAlZUVra2tODs7I5PJ\nGBoaws7OTvhNJH2FVPvX19cjl8sFj+LixYsCxvvxxx9TXV3N6dOnUalUREZGcvLkSfz8/Fi/fj0y\nmYyQkBCcnJx47LHH/l1ZsnPnTtasWcOf//xnmpqaaG9vF+G8Fy9eZNWqVSiVSlxcXEhNTRV4wt7e\nXurq6ujs7BTBNoODg2L6oFQq8fHxobq6GrVaLfiiElPS0dERS0tLzMzMMDU1paKigujoaHGiq6ur\nE2pECSwED/EEgYGBnD17luLiYn7/+98DDzeLP/3pT6xcuZLMzEzMzMzw8PAQSlZvb28uXLggaNeV\nlZXI5XJR3jzKkv1nGhD/by2ZTPb//Tfxf9b/Wf//X5V6vT7mH/2lH4ROwd7enldeeQV3d3d8fHwo\nLy9nfn4eKysr4Q/w9fUVuK5f/epXvPvuuzg6OlJaWsr4+Dju7u4iOEShUPDxxx+za9cuzMzMkMlk\n/OhHP6K/v58TJ06QkJBAa2srV69eZcOGDSxZsoSzZ8/yySefMDs7K2KwsrOzWbNmjcCuGxsbCyai\nnZ0d6enpFBYWYm9vT0hICHfv3sXNzY329nY0Go3wV0iZCf7+/pw6dYrMzEyRTShJkw8cOMDhw4dZ\nsmQJx48fZ/Xq1cjlcqytrbl16xavvvoqR44cERMHhUIhThqenp7cv3+fpqYmMT6TTkGLi4sYGxtj\nZWXFjRs3BLZ8YWGBvLw8YmJiBC8hIiKCvLw84uPjcXNzE41VHx8frl69iomJiZidS5wDqc/R3NzM\n7OysSKd2dnbm2rVrwhk6NzcnnJseHh5ER0dTXl6Og4MDWq2W9PR0PD09ee+990hISCArK0uUeunp\n6bS2toqEaBMTE4KDg2lra6OqqorVq1cTGhpKRUUFer0ePz8/RkdHuXLlCnv37iU5OZmcnBwGBwfJ\ny8tj/fr1jI2NCTvx5cuXCQwMJDMzUwjGpPzEgIAAMfL19fXl/v37JCUlYWxszNq1a6mtrWVhYYGO\njg5xspVOHteuXaOhoUEwRU6cOMEXX3zBpk2bOHbsGP/6r//K9u3bee+990hJSSE+Pp7q6mp++ctf\n8t5774nQW8lBeefOHYyMjESo0ODgIOnp6XR3dzMwMICHh4fQWyiVShoaGsT1UV5ezjvvvPNI9+MP\nonyQZseOjo4cPHiQmzdvijmrg4MDISEh7Nq1SwSFfP311zQ0NNDR0UF6ejqWlpaCyOPq6irMOg4O\nDpiYmBAXF0dLSwt37txh5cqVlJSUEBMTw1tvvUVaWhoqlYqf/exnvPjii5w7d46ysjJsbW0ZGBhg\ndPRhPOXs7Kyo/Z2cnMRMWpoUaDQa4uLiBOQlNjYWeDhRsLCwwNPTk5iYGGJjY4mIiMDd3V345Jcu\nXQo8tOFKZOWysjL8/PyoqKhg48aNdHd3Y2BggJ+fH319fVhYWHDz5k3u3btHbm6uuBB7enoE23Jg\nYIC2tjZBPA4NDWVgYABzc3N6enp48skn6e7u5pe//CUNDQ24u7vz6quvMjMzQ29vr7DiShvf8uXL\nGRoaoqCggPHxcSEGkjYKS0tLurq6BCPC19eX7u5uamtrcXZ2ZsmSJaxfv57U1FROnTqFtbU133zz\nDdbW1qLkGx4eJisri71797Jnzx4aGhpQq9XU1dWJ0bNEnZa+xszMDH/+85/p7+/Hzs6O8+fPI5PJ\nyMzMFME4Q0NDLF26lMDAQBQKBWNjY5iYmBASEsKOHTsIDg4mPz+fhYUFUcKsWrWKu3fvig1PGsnW\n1NQIBeYf/vAHSkpKhE0/Ly+PZcuWcePGDZqamoTBrbKyklWrVvHaa6+xadMmjh49SkBAAC0tLQQF\nBREUFMSJEydITk4GEBj7999/X1j/Jf6p1C+amJigpqaGpqYm/Pz8qKuro6amBltbW8rLy1GpVNy8\neZPs7Gyh8n2U9YPYFJYsWUJERIRAjUVERGBtbS0Sb8fHx8nKyhKMx1OnTpGenk5ubi7Z2dli9GVk\nZCSIygDp6ekCNV9aWkpiYqKY9Uu6976+PrZv346Hhwdbt24lMzMTV1dXqqurMTc3JyQkhIKCAuRy\nubD3tra2YmxsLBRqLS0tHDlyhKKiIs6ePYtOp+P06dOEhIQwODhIV1cXbm5unD9/Hnd3d6ampujq\n6iI0NBQHBwdBnpLqwObmZmJjYyksLKS9vV18qHFxccLtJznviouLSUlJEQxEIyMjYUKSsHY6nY4N\nGzYIapSNjQ1BQUEkJiYSFxdHUVERMTEx4mIPCwsT3oDAwEDMzc2JjY2lpaWFn/zkJ+Tn57Nnzx5k\nMhnV1dUYGBhQVVUlyFEpKSkiPs/Y2JiYmBhu376Nvb09QUFBXL9+neTkZBYXF3n55ZfRaDTExDw8\n1e7bt08EpdbU1PD6669TXFyMv78/fX19PP7441y9epWamhqKi4uxsrISAbaTk5MoFAqBth8bGxMu\nyZmZGRQKhWhKurm50dzczNWrV6msrOTevXu0tLQwPz+Pt7c3jo6O3Lp1C3NzcxYWFpienkYul1NV\nVYVMJhO9iq1btwrfw8GDBzE1NeX27dtMTU2h0+lwcnIiMzOTsLAwnnzySQ4cOIC7uzvGxsYYGxuj\n1+sxMjLCw8MDZ2dnIaKSTgS7du3i6aef5siRI0xNTeHo6EhERAQmJiYiLWxmZoajR4+ydOlShoaG\n0Gg0+Pv7Y29vT2RkJJs3bxb6h0dZP4hNwczMjK+//pr8/HwsLCyYnJzE2NgYAwMDSkpKsLKyIjIy\nktdee42ioiLq6+u5fPmyAIaEhoZiYWEhRj9Sx7mqqkpk7ltaWlJeXi60C0uWLOHWrVuMjo7i4ODA\n5OQkmzdvFu49IyMjli9fzscff0xcXBxWVlai0fj6668TGhpKamoqqamp2NrakpGRIXwU8LBRJAWH\nZGRkiCe41PEeHx/HxMSEb7/9lqSkJPFv7Ozs2LlzJ+Pj4+KovW7dOoFf9/LyEo21kZERXF1dee21\n15ifn+fevXvI5XLc3NyYmJgQP5sEX/Hw8ECtVrOwsCC8JiYmJuLUJZmtPv74Y65fvw48pDU3NjZS\nW1tLf38/R48e5cCBA3z66acsX76cxcVFmpqaRKP2xRdfZMOGDUKht27dOpKSkkT0Wnt7u2iKOTg4\ncPfuXRYWFsRGbmFhwfz8vECyR0REkJCQgKmpqTCWOTo6Ag9j2y9dukRRUREzMzNi/Ozv7y/yB+7c\nuQOAXC4XwaxDQ0PIZDLWr19PZGQk4+PjqNVqduzYQWtrK/fu3eOf//mfRUy6lO8ol8t58803/13e\nYWlpKefOnWPLli0kJiZiYGCAWq3m8OHDIp5ufHycqqoq9uzZQ1NTE1FRUWzYsEHwLry9vRkeHmbl\nypU4OTkBD2XOra2thIeHc+nSJQICAsjLyxMYRGdnZyIjIwX74sCBA8IINzAwgJOTk0jDPnXqlGhm\nP8r6QWwKUu0YGBjIkiVLxMRAq9WSlpaGi4sL1tbWVFVViW6qVqslPDycM2fO0NDQQHNzMyUlJYyO\njopAiZmZGVHvxcXFkZaWJghEFhYWbNmyBTMzM+rq6jh+/Dg1NTXExcURGxtLSEiImFjMz88zNjYm\n7L1qtRqtVsu1a9coLy/Hzc2N4OBgGhoasLa25v79+2g0GpqamkQizsqVK4mPj+fBgweYmpqyd+9e\nenp62LhxowjrvHLlCsuWLRNp0E5OTsKctWXLFuFcVCqV2NjY8ODBA5ycnEhNTcXMzIxdu3bh4OBA\nd3c3tra2xMbG0tvbi52dHQMDA2J0JhmXLly4IJx70hxfo9GQnp4uwDbx8fGsWLGCqKgoEZF25swZ\nEVtuZGREaGgofn5+6HQ63n33XRHzLtmr4WFZUFNTw9TUFHZ2dvj7+5OSkgJAeXk5W7ZsAeCjjz5i\ndHQUJycnmpqaaGxsBMDb2xuNRsPJkycFhk1iYJqamhIWFsb+/fsJDw/H1dWV5uZmEVcHD8e9bm5u\nxMXF0d3dTW9vr0hv2rZtG6ampszMzBAWFoaDgwNeXl4YGBhQUVHBvXv3ePDgAV1dXeTm5lJZWSlU\ns/CQ0WBjY4NOp0OtVmNpacnPfvYzrKyssLCwwNvbGyMjI1xdXRkdHaW9vZ3BwUFu376NjY0Nqamp\nhISEsHXrViEyWrlyJffv32d4eJjp6WlOnDhBRkaGmIJMTk4SEhJCUVER58+f51e/+hXe3t7Y2toK\nWpSE2vP29v7f76Rgamoqgjbi4+N54YUXWLduHQYGBoJwExQUxPj4OOvWreOJJ55gYGCAW7duiZ0+\nKiqK6OhoGhsbBXW6vLycVatWYWFhgaOjIx4eHgQFBREWFoazszO9vQ8zZaXTxZkzZ0Tct8SzNDEx\n4Sc/+Qnd3d0iEFay0lpZWWFkZIS3t7egSQUEBGBraytuZumYKKnzfHx8BO0oICAAa2troY7bvXs3\nJSUlVFZW8tprrzE4OIherxdadwMDA5ycnDAwMGBqaorw8HC0Wi2rV68W8fNSCnRXVxe2trZi7OXi\n4iIyKwMCAmhsbGRubo7Z2VlmZ2f/L/beM7rJM13bPuRu2ZaL3LuNe+8FjOlgCKYGCMSQkEKYSd50\ndiYkM8MkmUx6MqQRJkAooST03nEBQ4x7xR1b7kVyl9yk9wfR/c28a+097B/fWtlr7WctLdsytoyk\n537u67rO8zixsrJi3bp1wAOTj4GBAQsXLuTbb7+ls7OToqIipk2bJvQalZWVIoQ2MjKSnJwcZDIZ\nYWFh/PTTT3R2dvL2229TWlqKh4cHc+fOZf369WRmZqLT6UQGZG1tLaampmK35OrqytDQkGAcTkxM\nUFJSwokTJ2htbeX69es0Nzfj5+fHypUrWbp0KRYWFoLBqFAohAciOjpakIz0iPibN29ibW0tUPhe\nXl6cP3+ezs5OamtrGR8fJzo6mvDwcBwcHETTNDU1FScnJwwNDdmwYYMY8U1MTKBWq/n555+ZNm2a\nULp6eXmxatUq1q9fz+HDh0XTt7u7G0tLS5qamrh9+zaff/45169fR6PRcObMGW7dugXAli1b2Lx5\nMzKZjJaWFtavX8/9+/dZvHixuIAGBgby/PPPs3HjRjZv3izI2jY2NkxOTopE6t7eXgEeepjjN6Fo\n/NOf/rQtPDxczOVLS0sZGBjg/v37wk146dIlysrK2LRpEwYGBqxduxaJRIKJiQlXr14lKCiI6upq\nEfV969YtEhMTmTVrloh227NnD3fu3OHrr78WkM558+ZRXFxMdXW14Pv7+PgIq7GtrS2zZs1idHQU\nW1tbzpw5w5o1a5gyZQoeHh4i/MXExISoqCjs7OwEwNTNzY38/HyuXbtGcnIy5ubmdHR0YG1tjZOT\nEw4ODoyPj9PX18fx48dJSkoS/gAHBwchndaXD6dPn6anpwcDAwMkEgkDAwOcP38eqVRKR0cH77//\nPqGhoTQ3NxMZGSmu5ENDQ8KXr9Pp0Gg0ODk5CaZEa2srSqWSpqYmkpOT8fLyoqCggFu3bhEWFoax\nsTFpaWmCgaCvd8vLy5k+fbpoyKrVapKTk7G3t2f27NkcOXJEsAj7+vqEV0EfdKuPxMvKyhJ2dv02\neOnSpVhZWdHS0oKZmRlr1qwhODhYoN79/PwwMzNjYmKCxMREDhw4wOOPPy7Q+hqNhsLCQiYnJ7ly\n5QpbtmzBzMwMOzs7gd/78ccf6enpwcnJicDAQHQ6Ha2trZibm+Pn54ezszMFBQUEBwdTVFRET0+P\n0JHU1dWRnZ2Ni4sLjo6OODo6sm/fPpKTk4VUX6PRkJmZKZy2evUlQGtrK6tWreKJJ56gpqaG3//+\n9zg5OZGRkcE333zDpk2buHjxIo888ojganh7e3Pz5k3a2tqQSCTk5eXx008//UvsoV5388svvxAV\nFcWtW7coKSlhzZo1HDly5H+OotHExIQZM2YwNDTEzz//zIULFwTdd/ny5YyNjQmt+8cff8zevXu5\nfPkyzc3NYnykd5QplUqR+ahn4V28eBE/Pz/q6uoYHx8nKCiI9vZ2jIyMaGpq4tKlS2JLffjwYTIz\nMzl27Jh4TIVCIRYBgEcffZT29nZsbGxoaWkRcuGxsTGam5tRKBQcO3aMKVOmiMVBL6+1sbFBoVBw\n48YNurq6yMvLE0Edubm5TExMYG5uTk5OjgCXVFVVCcHRvHnz8PPzw9TUVCRo37t3j4GBAdasWSM4\nkw0NDaSnp4tA0/DwcNF/0XfpN23ahLW1NVu3bkWn01FVVUVDQwODg4P87ne/IykpiaamJlQqFU1N\nTYyMjFBRUYFCoRC7AX0+pr7+1XMucnJy2LNnD1OnTmXdunUiAEWvTo2NjWXatGnU1tYSERHBsmXL\nAAQ858KFCxgaGnL37l0cHR0FnqyiooIDBw6QmZnJ999/L0rOiIgIsWvYv38/AwMDKJVK0c03MDBg\nZGREJJwfOXKE6upqCgsLkclktLa20tXVJSYdAwMDwIOeysmTJwGEndvf31+UNfpkKn3epJeXF5GR\nkZw8eZJXXnlFTC727dsnIu+cnZ0FKPiTTz7h448/5sMPPyQyMlIg/5qbm3n00UdRKpXMmjVLMCXP\nnz9PT08POTk5gvgllUqJjIzEzMxMNJLnzJlDX18fsbGxbN68+b/lffhNLAp6wk5YWBgzZszggw8+\nwMjIiJdffhmZTMbMmTMZGxsTM1d9M8/V1RW5XE58fLy4Wu3Zs4dTp04BsH79eiorK/Hx8WHHjh3Y\n2NigVCqxsbHBw8OD4OBgSkpKWLhwobjC6Wt1FxcXUQIEBwezevVqIeW9ffu2MKOYmJgglUqJiYlB\npVLR09PDZ599Jra0XV1dgkA0OTnJxMQEd+/epb6+nsrKSiIjI4UewNLSkrlz54pkoYqKCnGF0U8S\n9IpPPYEnKSmJ4OBgMjIyCA0NJSMjQ/AXPvvsM1JSUnj//fcZGBhgxowZIoHb0tKSxMREYfjRY94X\nLlxIaWmpyJyYOXMmw8PDFBUVCeNZV1cXAQEBTJkyha+//pr29na8vLzw9fWluroad3d3Tp06xQ8/\n/ICjoyNOTk6ChVlQUEBQUBAmJibcu3eP0NDQfwGA3LhxAysrK0JDQ7G3t8fFxUVoPvRWamdnZ5Ys\nWSLKwqtXr9LY2EhkZCTwQC5sbGwswmzhQTO7ra1NMDgMDQ0JDw/H0tISU1NTZDKZQNr39fVx6tQp\nXF1dxcKl0WgIDw8XnIQ//OEPwIP4uaSkJMLDw+nr6xPNSAMDA2GOcnFxEWFD+t6KkZERDQ0NREZG\n8t133+Hh4UFVVRUzZswAEKY5fTDQrl27CAoK4vnnnycyMlI0GEdHR7l27Rp79uxh3rx5zJgxgzlz\n5vDkk0/S3t6OUqkkNzeXrKyshz4ffxPlw+eff75tcnJSjOr27t1LXl4eY2NjJCcnU1payp49e8jK\nyiIkJIQ5c+YwMTEh6my9bXn27Nl0dXWxYMECfvjhBxGokp6ejpeXl+Aw6I1Le/bsoaWlRUiB4+Pj\ncXBwICoqirCwMI4fPy4Iu+Pj42RlZdHT08Pjjz9Of3+/8FDcu3dPXCHDwsJYsWIFq1ev5saNG5SW\nljI0NMTVq1cZHR0VjazR0VHa29sJCAj4lzShmpoasrOzxfbZ3d2dbdu2Cax8ZWUlR44cQaPRcP36\ndczNzRkdHcXAwICpU6cikUjEm97Pz08AUn19fcnPz0cqlXL79m3i4+MJCQlBp9PxzDPPsGbNGmbN\nmoWTkxPu7u78x3/8h9Dtq9Vqpk2bxt69e+nv76eqqgpfX1/kcjlhYWG4uroyb948+vv7WbRoEWq1\nGoVCQWdnp9hl9fb2YmNjw5UrV5g9ezb19fUUFRXR0dGBoaEhubm53Llzh7i4OIFZ1zMz1Wo1ERER\n5ObmCn2/Xng1MTFBYGAgwcHBgrAll8vp6Oigvb1dPPd6IrK+FJLJZCxevJjY2Fhqa2vx9fVlZGRE\niN3S0tIwNzcX1CKlUom9vT1ZWVlMnz6d06dPc/v2bdavXy/KRYlEwrZt2/j73/9OVFSUMIzFxcUJ\nNoO+rNE3c6OjoykvL2fv3r0ijbyoqIi2tjb8/PwICwsTzVkvLy8xArewsECtVvPcc89RVFREd3c3\nO3fu5P79+wI1PzY2homJCbGxsdy4cYP79+//zykf9B57PajD1NRU+MH7+/vJysoSTZ+AgACxXdKb\nXmJiYoQ6EB6IgOABHTk1NZWjR4/S29uLubk5xsbGlJSUYGBgQEZGBv39/XzzzTecP3+evLw8+vr6\nGB8fx9bWlm3bthEcHEx6ejoRERE8//zzABQUFGBmZsalS5fw8PAQicH6puLg4KBQuOkxZpGRkbi5\nuVFaWsqtW7cEjXliYoLx8XHgQf6FRCJhdHSU1157jfb2dm7dukVycjIrVqwgLS2Nzs5OEXyjVqvF\nFbC1tZUjR47Q3d1NW1sboaGh1NTUMG3aNNauXcv4+DheXl4UFRURGxtLVVUVly9fFrsqOzs7du/e\nTUdHB21tbbi7u9Pf309QUBDj4+OcPXuW9PR0dDodvr6+REVFodFoBHH5zJkzDA4OUlNTg06n4+23\n3xb9GalUyvz585k+fTr+/v40NzdTX1/PG2+8QV9fn1DnAcTFxfHuu+8yOjpKXFwcWq2Wjo4OkpKS\niI2NZfHixWzZsgUDAwNhkrO0tCQ7O1v4Ebq6unBzc8Pa2lrkPsybN0/sPvReg2PHjomsiyNHjpCf\nny8Ce/RpWv/xH/9BRkYGSqVS8C1NTU157rkZh77DAAAgAElEQVTnAOjt7cXa2prTp08L27W/vz9O\nTk6kpKSIJnpra6twyfr6+mJgYICHhwfZ2dl8//33LFq0SPQe4EE4cmxsLCqVimPHjuHm5iaCi/Vl\nan9/PwcPHkShUIgdpJ+fHx4eHmLapM9hffnllx/6fPxN7BTee++9bU5OTly7dg0HBwfy8vJIS0sT\nQg4zMzO8vb0FBLWzs5PKykpOnjyJUqnEz88PmUxGXl4epqamaLVaMjMzefXVV8nJycHS0lLw8kdG\nRpDJZOKx9f51vXOxsLAQIyMjEbCij4Fzc3MjISGBgwcPsnbtWhH0otFohH9enww8MDAgroxhYWGs\nXr1azIm7uroYGRmhr6+PRYsWodFoGB0d5cSJE0gkEiE8mjNnDkuWLMHd3V1IYA0NDfnll1/o6uoS\nOxOlUilqeHiwTdbpdDg6OhIREUFzczPFxcVYW1uTnZ2NmZkZSqWSiYkJsROQyWQ0NjYK8ZH+Kq4n\nKw0NDQm81+TkJN7e3jQ3N2NiYkJXVxe7du1Cp9PR1NSEUqkkJCSEq1evMn36dKqqqrh06RJTp05F\no9FQXl4uYutKSkpoa2sTDtYbN26wZs0a7O3tyczMFJkGw8PDODg4kJaWxsmTJwkMDMTAwICAgACG\nhob44osvaGxsJCsrS7y2p0+fxsnJiby8PHJyckhPT2dyclKUqp6enjQ0NGBqaoqRkRGLFi3C2dlZ\nTHWuXLlCc3MzTz31FN999x12dnYiTUz/fBw+fFikXHt7e6PT6YiLiyMgIIBjx47R2dnJqlWraG5u\n5s6dO2zYsIG4uDiMjIwoKiri3LlzzJw5Ey8vL/r7+0lISGDXrl3U1tbyxBNPEBAQQHV1NQ0NDXh5\neXHw4EH6+vrExVO/k9SzKxQKBaamppw6dQqFQsG0adPIyMjg3LlzODg48NNPPz3UTuE3sSh8/fXX\n2+bOnYu7uzuXLl0iPDxcBGgCbN26lVu3bhEfH091dbXQpY+NjREVFYVWq6WxsVGsoCMjIxQVFQke\n48KFCwkJCeHgwYMADA8PiyRqY2NjpkyZgr+/v1ApajQafvrpJ8bGxggICCAyMhJ7e3t6enq4du0a\nf/3rX5FIJGg0GgEbmTJlCs3NzRw6dAg7OzuysrK4efMm8fHxTE5Oije2lZUVU6ZMQSqV4uXlRWlp\nKYaGhpw+fZoPPviA3Nxc0tPTqa+vx9HREbVaTW1tLTdv3iQ3N5cFCxZgampKeno6arWauXPnkp6e\nLhYX/YTBwMCA7OxsOjs7KSsrY/78+Tg7O5OTk4O3tzfW1tYiVzMwMFA8L3oAKzxwg05OTvLXv/6V\nVatWCZWjoaEhY2NjxMbGEhgYSEBAgDgppVIpJiYmpKSksHfvXpydnYmKikKn07F3716Ki4spLS0l\nJSUFIyMjTE1NRaDsmTNnCA4OxsXFhaioKAwNDbl8+bKIW29sbGTVqlWCz5mTk8OyZcv45ZdfiI2N\nFT0XPfMyLi6OwcFBLly4QEZGBjdu3KCjo4O4uDgMDAyoqqqio6OD5ORkqqqqRP7FhQsXhLhIH0Cj\nUChISkpCLpcTFxdHe3s7J0+eZMuWLdTX16NUKomMjKSxsZHNmzcjlUqpq6sjMzOT5uZmXn31VQYG\nBnjrrbdQKBQkJiYKvHx4eDiHDx9mfHyciYkJiouLcXBw4M6dOwQFBZGWlsazzz7L7Nmz0Wq1QtNR\nW1srLl7j4+O4uLhQU1NDUFAQ3d3dQrRWX1/Prl276Ojo+J9TPvT09Ij/jJ7Nt3btWpqamkSMmX4r\nqK/vCgoKsLKyQqPRCHKSSqUCEM0aAwMDJicnaWtrY9++faxevZr+/n5UKhVjY2PMnz+fRYsWMTw8\nzP79+2loaBARZL6+vjz//PNMmTKF/v5+6urqRIaASqUSwTIhISFYWFgwOjpKSEgIqampnDt3jsHB\nQaytrUU6lVwuZ3BwUAhtPDw8mJycZM6cOUyZMgVANNMyMzOJjIzEwcFBzNdtbW2ZM2cOAwMDGBkZ\ncevWLVJTUwVdqaWlheTkZHp7e2lubsba2hq5XM7k5CSvvfYaUqmUU6dOsWDBAhITE3F3d8fX1xep\nVCpw4X19fVy6dAmpVMqzzz5LeHi4iMe7cOECAwMDyGQyzM3N0Wq1dHV18dVXXwl8e0BAAOvXr6eh\noYEvv/ySvr4+BgYGmJycFETu/Px8Vq5ciampKQqFArlcTldXlwCsKpVKsrKyhIJ09erVNDQ0cPny\nZRQKBatXr+b06dOCeVFUVISXlxfe3t7Mnj0bS0tL4uLisLOz4/jx49TW1gIPyqN33nmHvr4+RkZG\nBLDX1dUVqVRKVFQUeXl5fP/995iYmAhMXXd3tygXXF1diYyM5Nq1a8ITc/36dfFcRkdHk5qaiqWl\nJYODg8THxxMXF0dcXJyAyKSkpODv7y8wbFqtVmRUGBoaCkr0ggULePrppwUiXo8T1Gq17N+/n/Hx\ncebOnUtfXx+PPvqoIDzb2NgwOjrKCy+8gJmZGTU1NYyPjzNr1qyHPh9/E4uCt7c3ZmZm7Ny5k+jo\naK5cuUJlZaVQjhkYGDBjxgxUKhX29vbodDqsrKywtbVlZGQErVbL0qVLmTJlCgkJCaLs0GcD6nQ6\nwsPDGR4eFhHqCxYsYMmSJUxOTuLm5saCBQvEVWHJkiVkZGRgYGDAwMAAcrmcFStWiBJAT8nVjwYb\nGhrQ6XT09PSIwBJra2usrKwwNDQkMDCQhoYG7O3tKSkpEbW4HiqqX2yuXbuGVCpl9uzZrFixAl9f\nX0JDQ1m0aBEZGRliVLlnzx4GBwc5e/Ysly5doqSkhPHxcW7cuIGrqyuPPPIItbW1dHZ2IpfLRVDu\nwoULhVRbj6WTy+VoNBrGxsYoLCyktrZWdLeVSiWXL18mMTFRKPz0H5csWSJCaBMTE7GysmLjxo1k\nZWXx7bffUlFRwQsvvEBISAhhYWGYmpoSExNDSkoKMTExWFpaEhYWJhSeeiGZfuSnVqsZHx8XhKSN\nGzcik8n4+uuvcXBwYGBgAAcHBy5evMjVq1extLREpVJhbGyMQqFg165djI+P88wzzwAPwoa/++47\nwsPDkUgk+Pr6CtpUU1MTLi4uqNVqBgcHBa6tp6eHsbExvL29hcR8aGiIDRs2CKXmhg0bhEx6dHSU\nPXv28Kc//YmkpCRBCY+KiqKsrIx33nkHR0dH5HI5WVlZfPzxx6I/1N3dLUo6eLA43rlzh8bGRvbs\n2UN1dTUlJSVcuHCBjo4OqqqqRAkKD/pyDQ0N9Pf3k5iYyLVr1+jt7WVsbEz07B72+E1Ypzs6OrC0\ntMTDwwNjY2N2795NcXExZWVlaLVa7OzsiImJwdzcHAcHB6EYO378OM8//zwajYYvv/wSY2NjXn/9\ndd577z3gAbREKpUyPDzMRx99RGtrK87OzgQHBwsElrOzs0CGq1Qq7ty5Q0FBAcPDwyxfvpzly5fz\nyy+/UFFRQXR0NICgPOlj41NSUnB3d6enp4ezZ89ibGzMihUruH//Pi4uLuTm5mJubi48GPophU6n\n4/79+2ImnpycTElJCZ6eniLZSV+P9/X1CcHOG2+8gVKp5KuvvhIhLXFxcULXr0/sdnV1pby8HDc3\nN9RqNVVVVSxcuBAHBwfKy8u5e/cu/v7+xMXFERoaSkdHB99//z0XL14kPDxchM5IpVKCgoKIiYmh\ntbVV7Czs7OxISUlBp9MRHx9PcHAwEomEJ554AltbW7Kysli8eDH19fVUVFSgVCqRy+UMDAzQ2toq\n0riLioqE5l+j0Yhm3LZt27CzsyMpKUlMIHbs2EFAQAAymYzs7GysrKxYtGgRMTExwv6+efNm5s+f\nz9y5c0UpMTg4KMxR+mmVlZUVZWVloryTy+XCJu7j4yMSqszMzCgpKRExhCqVio6ODgCys7OFFF4/\nHQsODkYmk+Hv709ZWRmZmZk8+uijuLm5cfToUaysrFi4cCG3bt2ipaWF9957T/w/9GpcmUyGUqnE\nwsICT09PcnJyGB4eZvr06Wzfvh1bW1tGR0f58ssvaWxsFJOg4OBg8vPzCQwMxN3dnbt37xIdHS2a\n8A9z/CZ2Cvpa0N7eni+//JJr164BDxKIr127Rnt7O8nJyZw4cUIYhIyNjXnmmWfo7u5mcHCQNWvW\nEBQURF1dnZh764nDHR0dzJ07FxsbGxISEtBoNBgYGKBWqxkdHRUnm1QqJT8/H2tra2JjY3F0dOT2\n7dtUVVWRn58vtrgVFRW4u7vj6uqKlZWV0MSXlpYK0pKe7mRoaCgixsbGxpBKpfj4+FBbW0tjYyPG\nxsbCYDM8PIyhoaGw7jo6OpKRkUFxcTE9PT18+umnFBQU8N5777Fx40ahLoyOjiY4OJiBgQHc3NwE\ngq2qqkqcCAqFgoSEBBEYa2lpKTIi7OzsGBkZwcrKSgTrHjt2jMOHD1NRUUFERAR+fn4UFRVRXV1N\nXFycCALOzc0VRGdra2t0Op3AnqtUKmpqalCpVFRXVxMWFsbGjRtpaWkRdCM9fVu/4OqVmqdOneK5\n554TidYmJibitVEqlfT392NkZMTChQtZvny56IM89thj9Pf3C+HX+fPngQdTAv0uUKFQEB0djY2N\nDSYmJmRlZeHt7U1CQgKPPfaY2MXY2dkhlUopLi6mqqqKkZERwS0wMHhw6nR2dopEqt7eXnbv3s3N\nmzfJysri8uXL7NmzR1DC9LyEnJwczM3Neeyxx0Ts3OOPP87AwABvvPEG8GD3rNVq6ezspKGhQWD3\n6urq6O/vx8rKSlxgRkZGqKmpoa+vj7/85S8MDAyI58ff358rV66I9+7DHA8TRe8hkUhuSCSSSolE\nUiGRSF769X47iURyRSKR1P760faffuZNiURSJ5FIqiUSyYJ/9xj6fAUPDw9ef/11ampqqK6uJiQk\nBCsrK6ZPn05hYSF2dnbY2NhgZ2fHli1bsLOzw97eXjj0+vr6xBYPHqC9qqqqhBxYv+IrFAp27NhB\nYWGhAFHU19cLPJmHhwcxMTEMDAyQkJDAsmXLhFxY/4KZmprS09MjRoQymYzi4mJCQ0OJj48XHX2N\nRoOjoyNz5swhPj5e5D/oA2GqqqpERuXNmzexsrKioqKCsLAwwsPDSUxM5N133yUpKYkFCxaIMNl1\n69YRFRXFSy+9RGpqKqOjo6xdu5a8vDzc3NwIDg7m2WefFYhy/YKpUqno7e0VachyuZyamhoOHDhA\nVlYWw8PDfP3115SXl6NUKikpKcHa2ppDhw6J9KuWlhaam5tFI0s/7h0bG6O+vp7i4mJOnTrF+Pg4\n1dXVzJs3j23bthEREcHMmTN58sknGRwcpLy8HLlcTnR0tKBCw4NR8iOPPIKPjw+pqam4ublx584d\nqqqqcHNzY968ebi5uREZGcndu3cZHBxkeHiYyspKEQWYn5/P7du3xfbazs4Od3d3nnvuOVpaWti7\ndy+Ojo64ubmxcuVKqqur0Wq1orRwd3cXY009kl7foD5//ryQK9fX1xMfH/8vArXw8HAeeeQRBgcH\nmZiYwNPTU/RrOjo6MDAwwMvLi5GREVHGKJVKbt26xQ8//AA8EMj5+flhbm4uBGBWVlYkJCRgZmZG\nUlISH3zwATY2NrS3t5OQkCAAxHrD1T+neuvZjw9zPEz5MAG8ptPpCiUSiRVQIJFIrgBPAtd0Ot0H\nEonkD8AfgDckEkkI8BgQCrgCVyUSSYBOp5v8zx5gcHCQtrY2iouLefPNN+nu7iY9PR1jY2N0Oh02\nNjZotVpKS0vZuHEjMTExVFVV4ePjg0ajEQGlbm5u1NTUkJGRwe7du7G2tqa5uZnQ0FBkMhnl5eVs\n3bqVyspKcnJyePPNN4mIiMDf3x9fX18h2dUHe/b29tLX1yeIOnqewvDwMJcuXUKpVPL888/T0dGB\nvb09y5Ytw9DQkObmZnJzc8nPzyc4OFgsIt7e3sjlcvbu3Yubm5vg+ekXGxsbGzQaDTNnzhQyZ51O\nx8jICJaWljg5ObF8+XLi4uK4desW4eHhFBcXI5PJuHfvHllZWURFRQkxVVFREX5+foSEhDAxMcHQ\n0BDh4eGUlZVRV1fHypUrqaqqEhLf9PR0AgMDRbJ0bW0tv/vd7+jt7SU8PJyenh4sLS1pa2vjwoUL\nzJkz518IPwcOHKCuro6AgACWLVsmrOIVFRXs3bsXmUxGRkaGCJnZvn07q1at4ujRo6I5nJKSQlZW\nFrm5ubi6umJjY8OpU6eYMWMGnp6eyOVyysvLCQoKor+/n/Lycj766CPs7e158sknMTQ05N69exga\nGpKQkCB2Co899hhHjx4lMDAQV1dXFAoFH374IU8++SSjo6P09fURFRVFb28v8fHx3L59G1dXVwGz\n0Xsm0tLSKC8vF6DZ5cuXc+/ePXp6eoiKimLBggUcOXKEBQsWEBERQUBAAC0tLWi1Wpqbm1GpVKKM\nvX37NrNmzeL9999n7ty5JCcni56Cubk55ubmqFQqHBwchFlwcHCQkydP0tDQQE1NDd7e3kxMTDA4\nOIhWq2XGjBlCwyCTyejs7KSjo4N58+axe/fuh1oU/u1OQafTtet0usJfPx8EqgA3YCmw99d/thdY\n9uvnS4HDOp1uVKfTNQJ1QMJ/9RjGxsZiRt7d3Y2NjQ1Xr17lzJkz7Nu3TyQgf/TRRyxYsICqqirx\nZCQmJhIXFydCaZOSkoQuXT9PNjAwYHR0lKlTp/LGG29w7tw5nJyceOutt0TdeOzYMaZPn87AwAB+\nfn40NzczdepUvLy8iI6Opri4WPgx9N39RYsW8eOPP3LmzBmqqqooLy9HoVAINHtgYKBg99vZ2XHv\n3j0aGhpoamrC3d2d8PBwnJycqKysBBBXp5GREe7du0d9fT1nzpyhra2NAwcOiAasUqkkISGB3t5e\nOjo6MDc3Z9myZaSkpFBRUUFJSQlz5swhJCQEb29vASu1sLBg586d3Lx5E1tbW06cOEF2djYSiQR3\nd3fc3d1RqVSkpKSI2bc+U0Af4Dt9+nQiIyNJS0sTCV6Ojo7s3r0bGxsbsWDp+yaLFi2iuLgYuVzO\nI488go2NjeBhzJo1CxMTExYuXCjCSoqKihgZGcHCwgKNRoNUKkWhUJCfn8+3335LVVUVSqUSFxcX\nLCwsiI6OxsPDg9WrV7Nnzx5qa2uZMWMGWq0WX19fUlNTAcTrFxMTg42NDTNmzOC1115DJpOh1Wrx\n8/OjoKAAeBAxl5aWhlqt5uLFi9TW1lJYWIiFhQVtbW0MDAwIpoO7uzt+fn7Mnz+fDz/8kJSUFLHQ\nnjp1Cjs7OxQKhcgEbWhooKCggPPnz+Pk5MRXX33F6tWrReLVjh07AISXYerUqSKJ/NdzELVaTX19\nvfB9VFdXExUVhZmZGQqFgtmzZzM8PIxWq6W3t5clS5aIvtXDHP+tnoJEIvEGooFfACedTqc3aXcA\nTr9+7gb8cwHT8ut9/+/v2iSRSPIlEkm+Tqfj888/F1p3Y2NjysrKMDEx4YUXXuAf//gHarUad3d3\nbGxsWLduHZ6enpw9e1aElzY0NJCbmyv85wCTk5Ps3r1bdLB9fHwICQlhxowZ+Pn5ce/ePYKCgoiO\njmb58uV88cUX3L9/n/b2dpydnamqqqKxsVEExOjRXtXV1RgbGzM2NkZTUxMJCQk4ODgwc+ZMvL29\nWbFiBQYGBri4uBAREYFKpWL37t1it5KSksLIyAi9vb0kJiYKe6/+RNV3jIuLi9FqtRw/fhytVktb\nWxuXLl3C29tbZA5GRkZSU1Mj0N/Gxsb09PRw+fJlpFIphYWFAhKiH6OamZkhk8lEIK2ZmRnR0dFc\nvXoVPz8/qqqq6OnpISkpie7ubhFbrxeW3blzh0uXLoldXH19PRs3bkStVlNaWsq7776LnZ0dmZmZ\nqNVqET+v78/oSU5yuRwnJydBJIYHpUhcXByTk5M0NzfT0tLCwoULuXLlilgsTp06RWFhIQqFAldX\nVzH90ANdvvnmGwEXyc7OBh64EvX8Bn3Wpr5XpE/sCgwMpLW1VZjgurq6SE5OFu9JvWpVL3mHBya2\nbdu2kZ2djaOjI42NjTz55JOiEdnZ2UlgYCCenp4olUr2799PbW2tyMDIz8/n1KlTgrilt8D/+OOP\nfPbZZxw9epT+/n6am5sFBk9/cWhsbKS2tpbk5GSUSiXt7e20t7eTmZlJYmKiUHgeOHBA/N6HOnQ6\n3UPdAEugAFjx69d9/8/3Vb9+/ArI+Kf7dwGP/pvfrfvf2//e/vf2//st/2HO9YcaSUokEmPgGPCj\nTqc7/uvdnRKJxEWn07VLJBIXoOvX+1sBj3/6cfdf7/tPD29vb1566SUhRHF3dxfZA6WlpSxfvpzq\n6mqhALS3txcjraGhIaKionBycuLUqVO4ubmh0+l4/fXX2bp1K7W1tQQFBWFpaUl3d7fw6q9evZqv\nvvqKAwcOkJycjIODgxAJ6Q05UqlUzPWnTp3KwMAAGzZs4OjRozg4ONDW1sbly5dZsWIFn3/+uYg6\n04M658+fT1xcHDqdjry8PAwNDbl06RKrVq3iT3/6E3v37uXixYsEBwezdu1aPvnkE4Gic3R0ZGJi\nQkBkR0ZGkEqlmJmZCdWlfrY+OTkpAm9lMpnIbqivr2fDhg2cOnWKyMhI6urqSEtLY2hoCIVCQWNj\nI/PmzcPHx4d33nmHlStXkpWVRVhYGJWVlSxZsoTx8XGRQzl37lyOHTuGqakpQUFBnDt3jvT0dDw9\nPSkvLyc1NRW1Wk13d7eQ9tra2orchs2bN6NQKHjxxRdZvHgx5eXl1NbWsnLlSiYmJvjTn/7Epk2b\nBHrtxo0bLFiwgMnJSU6ePElMTAzOzs4i2l0ul3Py5ElmzZrFokWL2LVrF8HBwTz++OP84x//wMnJ\niaamJr744guOHj3K3bt3CQoKwsbGhrt37xIfHy/EcW+88QYZGRm4uLiQkJDAqVOnqK+vZ/78+bS1\ntREbG0tbWxsqlYrr16+TnJzMs88+i0ql4v79+xw8eBAXFxcqKirEe+Ppp5/m6tWraDQa/vjHP4rg\nVz3voLOzUwi89MrSxsZG3njjDXbt2sWNGzdEeVxeXi74EoODg3h4eNDU1MT06dMZHh7m+PHjvPrq\nq4Lk3NraKqZpgYGB2Nvbk5aW9jCn+0NNHyQ8uNpX6XS6z/7pW6eBJ379/Ang1D/d/5hEIjGVSCQ+\ngD+Q9189hkajQSaTYWdnh6enJ1OnTqWyslJAXF1cXKiqqqKzs5NnnnlGbBtdXV0JCAigt7eX4uJi\n/Pz8RLKw/vfGxcXh4OCASqUSteiJEyf4y1/+wuHDh3Fzc8PY2Jj29nZRh9nY2ODl5cXf/vY3li5d\nSmVlJS+99JLgFoaGhoqTXw8J8fHxoaurSyRDPfHEExgYGIiwWXt7e2bNmsXvfvc7fv75Z44ePcro\n6Cj29vaip2Bubi7ETi0tLfT395OcnExoaCipqalMnToVDw8PlixZwsyZM4VvIS0tjdLSUpHcPDQ0\nhEqlYvbs2UJp6eDgQFJSEjk5OTQ1NWFhYUFaWhpnz56lqKiI6dOno1AomD9/PhMTE9jY2LB3716R\npKWPj4+JicHExERYoFtaWjhy5AgKhYKPP/6YyspK7t69S35+PsuWLROiov7+fj788EOys7N5/PHH\nmTp1KkFBQWzfvh2ZTEZX14NrSkpKipAXBwUFCe+JfuJw+fJlgoODeeGFF/jDH/7An//8Z2JiYvjy\nyy+RSqUEBwfz6aefYmFhQWBgoJDK6wNYKysrBXzm6tWrmJmZ0dzczNKlS+nv78fDw4O33npLjGfP\nnDnD+Pg4e/bsEYCbpUuX0t3dDcDTTz9Ne3s7U6dOFfzJuro6tFotExMTdHR0kJaWxokTJxgdHWX/\n/v0MDw8Lo1lKSgqLFy/G2NiYzMxMMdGRyWS89tprTJkyhdLSUsbHx/Hz88PAwIDZs2cTEBCAqamp\niKZ79913ycvLE16V8vJyCgoKhNJ3eHj4353q4niYnsI0YD0wWyKRFP96WwR8AMyTSCS1wNxfv0an\n01UAPwGVwEXg+f9q8qA/9DN6b29vLl68KCYOZmZmVFZWsm7dOm7fvs3Bgwf59NNPefvtt/n0009x\ncHAgISFBZEx2dnYSExMDQGpqKhKJhNLSUiwtLTE0NEStVuPs7MyZM2fw8PBgYGCAc+fOcfLkSYKD\ngxkfH6e1tZXh4WHeeecdcnNziYyMxNzcXDSX9LN1Q0ND5HI5BgYGAuUeGxtLRUUF/v7+ZGRk4O3t\nza1bt3B1deXEiROYmpqyfv16Pv74Y4yMjJBKpcIlqcd2GxkZCXqPTqejqKiI2tpaLly4IFh7+t1C\nc3Mzra2tzJw5UzTMTExMRJL18ePHcXR0FESn1tZWhoaGsLCwoLi4WJChjYyMRGM0IiKC1NRUPvnk\nEywsLEhMTGRkZAQXFxfBTfjggw9Eg3jjxo0i/9PNzY3R0VEiIiI4c+aM2G319vYyb948XFxcCA0N\n5datW/j5+XH//n0BKgGE/FehUBAcHMzLL7/MunXr6O7upqenh/T0dOzt7Tl06BCfffYZt2/fpq6u\njtWrV7N9+3acnZ0ZHx/H2toaGxsbUfvb2dmJxnRYWBju7u7CWTs8PEx6ejrTpk2jvLyctWvXEhgY\nyMKFC0lLS8PR0ZHp06fj5eWFpaUlcrlcTAkSExM5f/48IyMj4udWrlxJb28vO3bswNPTk9zcXHQ6\nHSUlJdjb27Nv3z6uXLlCb2+viA5QKpUkJSUJVkhLSwv79u1DpVJhYWFBa2srhYWFNDU1sXPnTm7d\nuoWDgwOjo6OkpqZiYGCAkZER27Ztw9jYmISEBMFVkMlk/y1G478tH3Q63U1A8p98e85/8jN/Bf76\nsH+EhYWFMCUZGxvT2dlJTk4OeXl5TJs2jbGxMW7fvk1CQgJbtmwhNDSUsbExPvnkEzo7O/Hy8hIs\nfz1HEeDq1avEx8cLR5m/vz8NDQ2MjPuHxWIAACAASURBVIwQEhKCiYkJcrmcqVOncvPmTaEGPHHi\nhJjpL1y4EB8fHz777DNhlz179iwRERGCZPzCCy9QVFSEt7c3o6OjzJ07l8OHD7Np0ybq6uo4efKk\nyFV0cXFBpVIxZ84cxsfHMTY2FlHtLS0t+Pr60tfXR3R0NCqVioGBAdRqNYaGhgQEBDBr1iz8/f0Z\nGxtj+vTp7N+/HxcXFwwNDZmYmGD+/PmUlJRQWVmJSqVi1apVFBUVCTvwxo0bOXToEI888oiIT6+v\nr8fMzExMIEZGRtDpdCJWvrq6GlNTU2pqaggODkatVnPixAk+/vhjtm7dyqFDh3jvvffYunUrP/74\nI++++y5KpZIVK1bw7bff4u3tzeLFi8nJyaGsrAxnZ2cqKirQarXi78vIyODkyZNkZ2cLOtLJkydZ\nsGCBmCw5OzsL4ZlSqaSnp4dFixZRUlJCe3s7169fJzIykk2bNqFWq7lz547IAFEoFDQ1NbFu3Toc\nHR0xMDAgLy9P2Of1WR7Ozs5Mnz4dQ0ND2tvbqaqqIiUlRTTv2tra2Lp1qwC3ymQybGxsaG5uFrSo\nmTNn8tRTT4lRYGNjIzk5OahUKlavXo1Wq+XChQv4+vrS2dlJe3s7g4ODdHR0iHJVz1ns7e0lJCSE\n2NhYCgsLuX37NkFBQeJ1qKmpQavVotVquXz5MvPmzePLL78kMjKSFStWcP36dbRa7b+cF//u+E3I\nnPW67YmJCaKjowkICMDc3FzM+KVSKVevXgX+v0xADw8PUYPpc/j06dD6rqurqyvZ2dk89thjFBUV\niS62Xgfu7e2Nv78/5eXl2NjY/EvmxIoVK6ipqUGj0YgFISUlhUOHDuHs7Cx8BYsWLWLnzp2kp6cz\nNDQkgCYSiYTLly9TV1eHu7s7a9eu5W9/+xutra0kJCQwPj5OT08PERERYlykz2iQyWRcunRJ4N6W\nLVsm8hoHBgaor69ncnJSJFYpFAqMjY3x9vYWmYVOTk7MnDkTqVSKpaUl1tbWgnycnJzM6dOnRVr3\nypUrmTJlCk1NTTQ2NlJRUYGTk5PoEfT09GBkZMS9e/cIDw/Hx8eHXbt2Cd3IihUrOHHiBDt27CA/\nP5+nn36aP//5zwwNDRETE8PY2Bi5ubk8/fTTHDp0iMjISBISEgQf0dTUVLxpjY2N8fDw0Ft9uXnz\nJgEBASiVSs6cOcP+/fvZuXMn1tbWhISEEBQURGZmJo2NjdTU1HDx4kUMDAzo7e0VNGz9e2zFihUU\nFhbS2trK6tWrSUpKYmxsjLy8PKysrBgfH2fatGkiXVqfClVUVISvry+ffvopQ0NDYlIDD3Z3ISEh\njIyMEBMTIzQQenFYc3Mzo6OjLF68mKNHjwrEn4uLC4ODgyQlJWFjY0N0dDRffvmlKHfc3NwYGxtD\nJpMJ85tSqSQ8PJzHH3+c+vp6wsPDycrKQqPRiN3OnTt3ePnll2lra+POnTsiAVxf7jzM8ZuQOZuZ\nmYnMhNbWVszMzJg7d66og21tbUV+wYsvvkhkZCT+/v5s3ryZTZs2iQDTqVOn4u3tjYuLC/AA8b5q\n1Sr6+/sZGBigoKCAK1eu0NXVJSzT9+7dY+/evWRnZ/Pzzz/z9ttv4+zsLOLfjYyMSExMpKSkRODC\nJRIJwcHBSKVS3nrrLWGy6ejooLW1lXPnzuHm5iaSiOPj47ly5QobNmxg06ZNQlyjnyt7e3sDYGpq\nKlJ/VqxYgVwuFxHuxsbG1NTUUF5eznvvvcf27dtxcXERYSvXr1+nvr6ea9euER8fT15eHoODgxw5\ncgRPT09mz55Nd3c3R48eRalUIpVKmTJlCubm5gwNDbF7925hCHv22WcJDAzk8uXLfPPNNwwNDTFr\n1ixkMhnV1dXk5OQwZ84cvv76azZv3izKtg8//JDKykoGBgb44YcfuHDhAkuXLqWnpwdHR0fq6upE\nkyw9PR1bW1t27txJV1eXeM30QNtr164xbdo0jIyMaGxsxNnZmbS0NPbt2yd2hnrmwtjYGDU1NTg6\nOlJfX4+vry9PPfUULS0tzJw5E3hw8mZlZREdHY2Tk5MwkumTtnt6eqirq+OTTz7hl19+ITMzU1j5\nW1payM7Opqenh8nJSdGv0R/19fVoNBry8vLIz8+nra2Nn3/+mf3792NoaMiPP/7IwYMHuXHjBnPn\nzuXNN99k4cKFSKVSKisrBbqur69PhMwYGRmJUnDx4sXk5+czNDTEpk2bsLe3x9zcHEtLS7GIVVRU\nIJFIMDU15fe//z1Xr14Vo+2YmBgxwn6Y4zexKPT394uocf2Muq+vDzc3NxobG4UgyNLSUtRf/f39\nSKVSZDIZvb29NDQ08N5773HixAkcHR2BB7FxNjY27N+/H39/f3744Qeampo4fPiwAIEODw8ze/Zs\n4acICgriu+++o7+/XxhNzMzMKCgoEFezkpISvv76a5KSknjssceEci8jIwOVSiUcak1NTXR2djI6\nOkpbWxt9fX1CV19VVSWmFHowSExMjICAFBUVMTY2ho2NjWACtLW10dXVhVqtFiBOfTc7MDBQhKhc\nv36ddevWUVNTg62trSiTnJ2dMTIywsjIiLCwMNzc3Jg7dy5lZWVi0fn+++/FjiE0NJTY2FjkcjnF\nxcViAdE3PefNmyes608//TRqtZqxsTEee+wxoUU4ePAgN2/exMjIiNLSUg4cOMDk5CRZWVkMDQ2J\nDFA9eSklJYX29naRnjx79myBYevv72doaIjFixfT19dHaGgok5OT1NXVoVQqKSsr4+7du7zyyitU\nVFRQUFAg9AIWFhZYWVkxMjKCQqEgKCgItVpNS0uLAPgeO3aM4uJirKyscHFxEUrRO3fuIJfLiY2N\nFQnPeoivp6cn4+PjdHR0EBgYKBygra2t+Pv7c/36dQwNDSksLGRoaIi9e/fy6KOPCim7h4cHZWVl\nlJaWYmJiQnFxMfCg3BkcHKSrq0sIpvSZmfoYQl9fX0GE0pdDEomERYsW4evri6WlJfb29ly5coUF\nC/6t20AcvwnIys6dO7e99NJLXLx4EXhAzdE38/r7+7l69SpKpZLJyUkSExNxcXFhYmKCiooKZs+e\njUwm44UXXiA2Npbx8XGGh4fJzs4mMTERe3t7TE1Nsba2xsLCgpUrV6LVaomPjxcwksrKStFY0mq1\n6HQ6TE1NuX37NmVlZaI5NT4+zrVr15g9ezZPP/00e/bsISAgAENDQzIzMyktLaW3t1do5q2srAQh\nKDY2Fjs7O0xNTSkrK6OxsVGYcFxcXNi+fbuQ0sbHx/PTTz8xffp0IaTSR53pTVUymYyJiQlcXV25\ncOEC3t7etLa2cv/+fXHy69Hv/v7+3LhxQ8BTXV1dxU5CT+vp7u4mJCSEadOmCQZgX1+fsFFLJBIK\nCgowNzcXNt/e3l5SU1Nxd3fnxIkTJCYmkp6ejpOTE0NDQ+LEt7W1JS0tTYznVCoVExMTHD16lClT\npgi47v79+1m/fj1SqZS2tjZ0Oh1nz57F39+f0tJS5s+fT3t7O+Pj49y/f5/AwEB8fX05ffo0JSUl\ndHV1ERsbK0CqS5cu5e7duxQVFfHkk0+KRd7b25t79+4xMjLCvn37cHJyoqamhsnJSRFkPDIyglwu\nx8/PDwsLCxQKBfHx8RQUFAiRUElJCXFxcaSmppKZmcnAwACOjo4Cfdfe3i78CN3d3fyf//N/GBwc\nxNnZmbKyMtzd3XFycqK0tJTCwkLc3d1JSEjg9OnTLF26lN7eXhFmpFaraWxsRK1WC5OafpHUczUG\nBgbQarXMmzcPS0tLzp07x/r16wkICCArK4tr1679zyEv/f3vf9+mVqsJCAjgypUrODo60traytjY\nGDdv3sTAwECManQ6HZcuXeKXX35hwYIFdHV1YW5ujlwu54knnmDGjBn09vZy6dIlMjIyKCgoYMOG\nDeKFcnNzw8fHR8hPX3zxRWGkqa+v58qVK/j4+KBQKBgdHSUmJobR0VGB/7579y6ffPIJVVVVzJkz\nh/v373Pu3Dnkcjl2dnY4Ojri6enJqlWrhIXZ2tqahoYGrl69Sm5uLgMDAyxcuJBvvvkGAwMDdDod\nhw4dIjk5mZiYGC5cuMBLL71Ec3MzQUFBhIaGMmXKFBwdHbG3txdsQn2Wgp6y4+TkRExMDAUFBXR3\ndyOVSkWYjEQiobe3FxMTExobG9mwYQNGRkYYGhpSUVFBcHAwqampTExMUFZWhlwuZ2RkhODgYMzM\nzJBIJAJrb2trKxqrJ0+e5Ny5c4Je1NPTIyYYERERhISECGWktbU1MTExFBYWotVqiYqKYnJyktdf\nf50tW7ZQXV1NcnIyHh4eWFpaUl1dzV/+8hekUinZ2dmMjo4SHx/Pxx9/jLu7uwCSajQaMjIy+OMf\n/yh8HXfu3KGpqYnt27fz0Ucf4enpKVSF58+fx9/fH5VKJcbGsbGxLF26lHnz5lFbW0t+fj4SiQQr\nKyvc3NyIiYkRkXT6+EE9IcnS0hIvLy+cnJxQqVQsXryY4OBg3N3dWb9+vdilmJiY8OSTTwq9if7+\n9vZ2ZsyYgaGhIRqNhuzsbObOnSsCiBsaGigvL8fe3p6pU6cyY8YMjI2NBduyubkZKysr0TT18/ND\nq9Xi7OwswK/m5ub/s3IfJiYm0Gg0lJWV4eLigoeHh2j6bd68mcnJSRwdHfH29qampobm5maRo+Dp\n6Ymbmxtr1qwhPz+f0dFRgUxXqVTMmDGD+/fvk5eXJ1J99CDSH3/8Ea1Wy7Rp00hNTUWj0fDKK68w\nPDyMo6MjAQEBIonIwcFBpEO3t7fT3NzM559/zpkzZzh69CiZmZkiT8HCwoLbt2+Tl5fH6tWrMTMz\no7i4mLy8PDo7O3FycqKgoICnnnqK7u5uMTbTnzBDQ0NUVlaSnJyMVqtlYGAAnU5Hd3e3IBL19vbS\n0tIiTtT6+np8fHxoa2vD0tKSWbNm4e3tLdDlCoUCT09PJiYmBDvBzMxMjCLNzMxoamqitraW7du3\nc+HCBX766SdBc/7ll1+Ey1O/Mzt27Bienp7s2rWL06dP88orr1BYWMiFCxewsrISTAaVSsWOHTt4\n8cUXaWhowM/Pj8zMTIaHh+nr6xPNR3jQaDQ2NkYqlWJtbc2UKVOIiIhgcnJSTEAcHR3x8vJiz549\nhIaGMmvWLJYvX053d7fYASYlJTFt2jShLbG0tMTY2Jja2lrs7e25ffs2pqam1NbWYm5uzrlz54Qz\n1NHRET8/P0H/PnHiBH/7299oa2vj7NmzTJ06VdToQUFBSCQSysrKcHV1xdPTk/379/PHP/6RhoYG\nCgsLqaysZGxsTGRbNDQ0CI+JWq1mw4YNjI6OYmlpKcbpg4ODbN26Veh05s6dS1RUFDt27OCzzz4T\nY822tjbByzA0NCQsLAyJRIJcLmfx4sUMDQ2Rk5MjIDYPc/wmFgVra2vCw8NRKBQolUrc3NzIzMzE\n0NCQgYEBenp66O3tFaaS4eFhkQzc0tJCVlYWDQ0NpKamolAoCA0NBeDy5cvY29tz+PBhJicnKSgo\nQKPRCFpTWloa69atE3NzfXBKVFQUS5YsobKyksnJSfbu3Su8DPCg3rOwsCAmJobNmzfz6quvkpiY\nyB/+8AecnZ2ZmJhAq9XyxBNPCGKzTqejuLgYV1dXJBIJg4ODgkKkt7VqNBoiIiJwc3MTEev68dj9\n+/eBBzp2hUKBg4MDDg4OGBsbY2tri4eHh4hWd3V15dtvv+Xy5cusXLmSoqIiZDIZcXFxAj+nVquF\n+i0uLo7o6Oh/KUuOHDmCra0tPj4+9Pb2EhERgbu7O8nJyQJaq78CnTx5kkWLFvHFF1+IWlZvU7e0\ntBSKvfT0dC5fvkx1dTWDg4OCszAxMcHjjz8OIBpvd+/eZePGjXR2dnL37l1R0qWmprJlyxYBflGr\n1QQFBVFbWysyOPz9/Vm1ahX379+noqICeIA3CwgIYNq0acIUlZaWxocffijI4VqtVsBRIiMjGR8f\n56uvvmLv3r1YWFjQ3t4uhF1vvvkm8GAhb25uRqfTcePGDSQSCePj4wQGBhIbG0tLSws//vijoCud\nO3eO3//+9/T399PS0kJERAQVFRVs2LCB3NxcQfcKDg5maGiIa9euCbiwHsdWWFjIrVu3xHTKwMCA\n48ePi/ChZ555RuSb2NnZMXPmTJGv+jDHb6J8+OSTT7bpsWvh4eEkJCSImrakpAQfHx/c3d2Ry+XM\nnz+f2bNnExkZKRBiLi4ugjKjh6nevHmT999/n8rKSoaHh1m9erWI3KqsrMTQ0PD/sveeUXGe59r2\nMcDQOwxlYKiiF4kqQKCKhLrVhSJLsmVbih23JM72sh3bSb40xyvbsZNouyq2ZUW2LMlqICRQAySq\nQPTeBxjaMDD0Nt8P/Nxr7/fH3nrXt9d6vb/1Pmt5WYxN0cM8931f13Wex0lbWxsTExMkJiYyPj4u\nbMYS6ESKCtfpdCgUCmGJ9vHxIS0tjZKSEoHd2rdvH5OTkyxbtkyg2k1MTGhsbEQmk2FlZcVLL71E\nSUmJwGP19/dTUVFBVFQUZ86cwcvLi4mJCQYGBti8eTMxMTH09vYKqfb4+Dg3btxApVLh6+uLl5eX\nGD1KdGdpDObi4sLw8DDW1tYoFAqsrKywtLSktbUVpVKJu7s7HR0dDAwM0NzcjLe3NzMzMzx48IAV\nK1YIXmZnZ6egRBkbG/PgwQPRiJQmBUuWLKGqqorp6WnMzc1FuWVpacnAwABarZYNGzawdetWfvSj\nH3HmzBkOHz5MREQEWVlZODo6UlVVRW5uLs888wweHh7k5OQI2pUkEJubm0OlUnH79m0uXbrE9evX\nSU5Opry8nMrKSkJCQgRTU61WU1ZWxujoKLW1tcTFxaFWq6mtrUWn0wlOolarZXR0VFCment7hfFt\naGiIn/zkJ0RHR+Pj44NKpWLlypX8/ve/R6FQcPXqVWJiYhgaGsLMzAy9Xk9RUREBAQH4+/vj5uZG\nZmam6Gs99dRTODo6YmNjg8FgwM3NDYVCgYuLC1999RUPHz7E0tKS27dv89hjjwkOaXJyMnK5HGNj\nY5ELuWLFCszMzPD39xecztTUVA4cOAAshhUXFhbyxRdfoNVq2bx5M6dOnfqfUz5MT08THx9PYmKi\nSJQOCQkREtuEhARsbGywsLDA1NSUmpoaiouLmZycJDk5mcTEROLi4jAyMhKpzbC4o8/MzLBy5Ur+\n8pe/MDo6SmVlJTt27OCxxx7D3NycwsJCKisraWxspLe3l5s3b3Lp0iUuX75MWVkZpqamuLm5MTIy\nItKhV6xYQVFREc8//zyvvvoqISEhdHd3s2zZMu7evSvQ72fOnBFoMV9fX+7evcvhw4fZvHkz5ubm\n1NfXs2LFCurq6oDFo7OdnR0hISGMj49z9+5dampquH//Pvn5+TQ2NlJUVMTQ0JAY1dra2lJSUkJ3\ndzednZ2iB7J69WoCAwOFd7+jo4OxsTExxVlYWBAsB6l0yM3N5dSpUyxdupRXX32VVatW4ePjw9TU\nlIjl8/LyAhZLvs2bN5OWliaQZhMTEwwODhIVFSXktdIEJSMjQ6Q8e3l5iUmBm5sbP/rRj7hy5Qqw\nKNopLS3lZz/7GT4+Ply9epXp6WkqKyuxs7OjsbFRjDFnZ2dpbW0lKyuL+vp6GhsbuXLlCrW1tVRW\nVrJlyxZBntJoNJiamhIXF4e3t7fIXrx58yaDg4N4e3szODgoUqmzsrKora2lqalJ4PFsbGy4cuWK\nyN8AaG5upq+vT/RI6uvrMTMzY3h4mMuXL2NkZERiYiI7duwgMDCQa9eucf78eXbt2kVBQQEnTpzA\nYDBw7949kpKShLpVpVLh6uoq+I8KhUIwRKQpilQGVVVVid5BR0eHCCL28fHhySefZNmyZcKF+ijX\nD2JRMDIywsbGhtraWjZu3IhGoxEKtrS0NIqKisjLyxMSYJ1Oh5+fn8hdHB8fJycnR4w2FxYWgEVK\n9PT0NB0dHcTFxZGXlyduplSWBAYGYmpqSl1dHVZWVsjlcvGGk5KtHR0dRW4lLB7j0tPTKSws5Pjx\n45SVlREYGEhGRgZ//OMfOXv2rMg3lEZUZ8+eFY08GxsbsUPX1NQIfp4UWS7xAmtqahgYGMDc3Jzh\n4WG8vLxEp1mCuMCihNfU1FSkP0s7eXNzMyqVio8++kjEkW/fvp3x8XFOnDjB+Pg4AQEBjIyMEBgY\nSEhIiKBdaTQa1Gq1AJ1cv35dLLYODg4YGRmRkZFBSUkJISEhInptz549fPbZZ/T29uLj40NycjJO\nTk50dnZSXV1NZWUlx44dw2AwoNVqUavVGBsbi5KvuroaOzs7bG1tsbS0xMrKSgQMt7a2Mjk5yS9/\n+UteeeUVjh07xtDQEEFBQaSkpAjE26ZNmwTNS9r1pROLpCL09/enrq5OAHJTUlKYn5/n66+/pq6u\nTiRXX7lyhZKSErZt2yZQcTMzM4IpuWvXLlJTU9m6dStGRkbodDrKy8spLS0lNjYWCwsLWltb6enp\nwcXFBZ1Ox65du3B0dBRglvPnz4uNQxJbaTQaPDw8UKlUgvgsnabt7OxEIG5XVxft7e2C+yGTyVhY\nWCAqKkpE05uamvLNN9888vP4gygfTpw48auEhAS2bdtGUVERRUVFYoYfHx+Pq6srMTEx4pcql8tx\ncXFBo9GIh+h3v/udUDY6OTkJiEdgYKBIsE5PT2fJkiU0NDSIbq6zszP+/v5YWVkJToIU0hoREYGH\nh4dIalq3bh23b98mLCyM6upqGhsbRY6lRqOhvr6eo0eP8vzzz2NpacmZM2fo6+vDw8MDCwsLIc/+\n5JNP6O/vZ8uWLYSHh6NQKLhw4QLPPvssAwMDfP3112g0Gjo7O3nxxRfJyclBJpMJebSxsTF+fn7k\n5uZSXFyMkZGRODZLmZpyuZyIiAgsLCxEudDW1oaxsTFarZbW1lZcXV3FyLKrqwsTExMiIyMpLCyk\np6cHHx8fAgMDuXr1qpA+S7t9eXk5CQkJgkCUm5vLE088wcmTJwkJCRGxdydOnGBmZoY1a9aQn59P\nXl6eoBzn5ubyu9/9TjwcX375JR9++CF1dXUCjWZvb4+npyeTk5OMj49jMBgYGRlhbm6OtrY2jh07\nRnd3N3Nzc+zcuZOenh7Cw8PJyMgQXItr165x+PBhjI2N0ev1QoDk7u4uVIhFRUW0t7dTVlbGm2++\niY+PD0lJSWzevFlMciQgjZQKde3aNWxtbZmamkKv14t08oGBAQICAvD29mZhYUGUu66uriIDU6fT\niVGj5EvQ6/UcPHiQDz74gIMHD2JiYkJXV5fQ58TExJCXl0dwcLBoIjc0NGBqaoqVlRWjo6O4ubnh\n6enJW2+9RUxMjJCw9/X1cfv27f855YNerxeodCcnJ8HiP3DggCANVVZW0traKuKwJicnuXTpEllZ\nWVy8eJHm5mbc3NyQyWSC4mNnZ0dVVRWlpaUsLCyg1+uZmprC39+f9vZ2goODmZ+f5/Tp09TW1mJv\nb09iYiJr1qzBYDCg0WhwcnISMJSQkBAAfH19WbduHfv27RNH//z8fNLT09m0aRPXr18nIyODgIAA\nFhYW+Nvf/kZ/f7/42SVJd2lpKTqdTghspHQgiZyTmppKRUUF4+PjXL58GRsbG1pbW3FwcMDS0lJA\nOyUcl7+/v5gohISEUFZWRnNzszj9SCBSBwcHkXOYnJyMsbEx1tbWQs9vbW2Nm5sbWq1WxJtJ+PPR\n0VGio6Px8PBgeHhYjCd37NhBX18fSqWSffv2sXr1ak6dOiXm7G1tbZw7d47r16/j5eXFsmXLWLp0\nKf/2b//GyZMnaWhoAODbb7/F1tZW2NLLysro7OzE19cXPz8/4uPjycjIQCaTsXz5cnJycggNDUUm\nk/HRRx9hYmLCU089hYeHBy0tLaLrPjY2RlNTkzjt9fT0MDMzw9TUFEqlEgcHB5YsWcLatWvR6/Uk\nJSURHR2NTqfDzs6OzMxMVq1aRWBgIPHx8cIQtXr1anbv3k14eDgnT54kIiKC1atXs3XrVoqLi3Fw\ncKCrq4tz586Rn5/PZ599Rn9/vzh5Sl4GKS7x9OnTAKIxuXbtWmETd3BwICwsjPHxcRoaGsjMzOTW\nrVvIZDLu3r1LREQEPj4+TE5OsmTJEnQ6HWq1mvLyciGffpTrB7EoWFtbCyWXRNJxd3cXO4XUPZd6\nCpOTk9y+fZv5+XkCAgIwGAyEhYURHx9PX1+fUB46OzszMjLC0qVLGRkZESlTWq2WnTt3EhgYiFKp\nFDPxqakpoeKTEogll6G/v79g3K1atQqDwYBKpcLY2JjMzEyhQKytrSU2Npbg4GA0Go0gQUmOwoKC\nAmJiYqiqqsLe3l5QomCxQ65UKoW8+ubNmzQ0NDAyMkJcXJwIc1Wr1VRUVKBUKoVUWaPRMD8/z717\n97C2tqayshJ7e3vkcjlmZmY4ODggl8uZmprCwcGBwMBAUVtL8uCOjg6CgoKEv0GpVFJeXi4i7CVO\nZWFhoYi5f/fdd4mKiiIoKIiCggJWrlxJa2sr77zzjsis+MlPfkJBQQETExOMjY2JKDcJOpqUlCTo\nU+bm5sJVKhl5ZmdnGRkZERkVUl4iQHx8PKampqSlpREVFUVPT48oI5RKpUCmSw3ptLQ0NBqNaMJ6\neHgwNTXF0qVLOXr0KHv37sXd3Z3Ozk5qamooLy8HFuXXu3btwsTEhPHxcZH7UFNTw4ULF0Q4bkND\ngwCuymQyEYXY1NQkksyNjY2JjY1FpVLx2muvERwczIEDB3jjjTeEHic7O5uZmRn++c9/ivePpaUl\nQUFBODg4YG5ujo2NDUuXLsXExITdu3cLpunvf/97kRMqYeukUJxHuX4QhiiJizg8PExmZia/+MUv\nyM3NRavVCh+9ra0tWVlZgqGwZcsWsSvKZDKef/55Ll26RFJSkkCm5+XlsW/fPvr7+1EqlRQWFvLZ\nZ5+RmJiIwWAQ5Ny0tDSeeOIJScc8ngAAIABJREFUET0mcQ+Hh4c5dOgQarWaqakpoVMwMzOjv78f\nNzc3qqurhbLwX/7lXzhy5AihoaHC7x4dHc3g4CDfffcd58+fp66uDoVCwQsvvIC/vz/Xr18nNTUV\nWKyno6KiKCsrw8HBgdDQUBwdHXn77bcZHh6mqqoKMzMzYmJiMDc3F/yH0dFRhoeHOXXqFCtWrGB4\neJiFhQVqa2vZvHkzQ0NDmJiYiFwNyeyUm5tLf38/Op2Of/7znygUCt58802uXbsmuI179uyhvr6e\n6elpYeGOjIzExcWF2tpatm7dSldXF9999x2Dg4PI5XISExOBRQmwJEjz9fVFpVKxa9cuQkNDRRLV\nvn37aG5uFkTrwMBAceKIiorC2tqaoqIioSrs7e2lvr6e8+fPI5PJSE1Npbi4mD//+c/s2bOHixcv\n8vTTT/Pw4UP8/f05dOgQDx8+ZO3atQwNDTE0NISjo6PA2Hd1dYmfTQLPZGZmMjw8TFtbG1u3bhW6\niNOnT3Py5EnWrl0rHjJpQfH29ubFF19k37599Pb2Ul1dTUVFBQkJCQwPD/PWW29hZGTERx99RG9v\nLzMzM6SlpYmNLzMzk9zcXCFHdnZ2prq6GpVKxdTUlGCN9vT0YGdnh06nY2hoiL/+9a8CTSf5Vxoa\nGggJCSEyMhKtVsutW7d45ZVXHvl5/EEsClZWVqjVapGRV1tby7p169DpdAwODpKcnMzQ0BDr1q3D\n2tqagIAAzM3NBeTz8OHD5OXl8fOf/5yxsTH+9re/AZCamkpHRwcLCwsiZVpKQ9LpdMzMzNDU1CTk\nswAnTpwgODhYjCWXLVsmUoSkWLq6ujrUajXz8/O0trbi4uIiUPSAIExLUfbFxcV4eXkxODiIra0t\n5ubm5OTkCKGT9L1dXV0xGAxi5zc1NUUul9Pe3k5TU5PIrDA3N+fmzZt0dnaybNkyTE1NRQkiIcUl\nF2F4eDjnz58X8FVpoqPT6RgYGMDLy4uFhQUCAgKEsUav17NlyxZaW1s5c+YMgYGBQmevUCjQ6XRc\nunSJ9957j8LCQvr7+0lPTxcUpaamJt5++226u7sZHBxkfHwcZ2dnPD090el0lJaW8u6773LixAnu\n3LlDY2OjyLPs7OxEp9Px9ttvs2HDBoaHhwkNDeUf//gHERERTE9P4+/vz5EjR6ipqeHUqVOcP38e\nGxsbtFotH330ER0dHaSlpXH79m3xHvPy8sLNzY3i4mJaWlrw9PTkX/7lX6ivr+fixYvs3LmTzMxM\nSkpKcHV1FdzGyspKbt26RUREBDqdDn9/fzw9PQUYx9TUlKqqKjIyMti/fz+zs7N88cUXbNy4kfT0\ndJydncnNzRWTr9raWkFclsaPe/bsQa1Wi+xOQKDsT5w4webNm/Hw8ODatWtiI9m2bRvBwcEsLCyg\nUqlob28XDlMvLy/S0tKoqKjA0tKSJUuWiJ/3Ua4fxKIwPT2Nj48Pd+7cob+/n+DgYK5fv45OpxMJ\nQ21tbfT09GBhYcGf/vQnIiMjmZ6eZunSpdTV1RETEyMQXFLt//DhQ0JDQ8WR2tnZGVtbW7q7u/nH\nP/7B/fv3Rd3u5OTEW2+9xfr165HJZFy5coXw8HBaW1spLy8XYzkAnU5HWVkZ8/PzPPvsswwPD4v0\npY6ODkZHR4mKiqK/v5/u7m4mJycZGRkhKCiIlpYWli9fjr+/P7Ozs0xMTIhd0tjYmIyMDPbu3cvD\nhw9xcHAgOzub8PBwId1OSEhAJpOxdOlSDAYD1dXVItDVwsICnU4nVJ2SCEyyZvv4+DA/P09vby+T\nk5O4uLgQHR2Nt7e3uJceHh7odDpaWlqwtbXF2tpaSKl7e3sFFm7t2rV8/PHHjI6O8swzz/D1118z\nNzeHnZ0d5eXlREREUF9fzxtvvMHo6CjV1dVYWFhw8+ZNkpKSuHDhAmVlZZSXlxMYGChOYdI04fnn\nnxdH9vj4eIKDg8nMzGTnzp3s27cPCwsLli1bxs2bNwkICCAuLg53d3dOnjzJyMiIWNg++eQTYLGb\n7+7ujq+vr1iE3nnnHSYmJggLC6OlpQWFQoGZmZlgZTg5ObFt2zYuXbqEpaUlHR0deHh4kJ+fL7IZ\nraysxH1ydnampKSEZ599lrm5OYqLi4Wt2c3NjcTERBwdHQVdvKenh7y8PLEZ5OTkiBFqSkoKFRUV\n2Nvbo1Qqxb2vr6/HxsYGuVwu8PkXLlxg9erVhISE0NLSgoODAy+88AIpKSkolUo8PDz+540kZTIZ\ndXV1NDU1YWxsjJWVFYcPH8bHx4dr167x7bff8uDBAxGgaWFhgZGREZWVlRQVFdHQ0MCtW7fEuEaS\nMwcFBeHt7c3+/fs5ePAgDQ0NvPbaa2g0Gp588kneeustXn/9dSH6SUpK4t69e5SVlZGSksK1a9eQ\ny+W0trZibW0tRodJSUk4Ozvz6quvcuHCBVHSZGRk8M4776BWq8nJycHLywtzc3MCAwPx8vLC09OT\no0ePCg6j1H+Q8iTMzMx48sknKS8vJz4+nqqqKjZv3oytrS3R0dEcOXKEvr4+oYhsaWnB2tqavXv3\n0tbWRmdnJy0tLTQ3N6NQKCgtLcXY2BgvLy9RA3t5eWFlZYWFhQW2trY0Nzdz+fJlLC0taWlp4ZNP\nPmFoaAh/f3/a2tpQqVQMDw/T39/PwMAA/v7+XLx4EZVKJSLUb9y4IcogrVZLQ0MDMpkMS0tL9u/f\nj1arxWAwMDU1RVpamtj1Pv/8c5RK5X9YcD09PWloaODDDz/kyy+/5JlnniE0NBRPT0+RBH337l2B\nvpPGg1K6k62tLWvWrMHe3p7bt2+TnJwMQEZGBvn5+cCiBqC+vp7IyEji4uJIT0+nr6+PxMREDhw4\nQENDAwcOHCA9PR0/Pz8xxZFG07a2tty6dQtYdKkGBwezcuVKkWIeGhoqWB9S+tP09DS1tbV4eXmR\nkpKCra0tY2Nj/PrXvxaY+X9v+29tbeXevXsEBASIYJe2tjYWFhZITk6mp6eHc+fO0draysGDB6mt\nrUUul7Nlyxbm5ubIzMzE2tqarVu3smTJEvEee6TnUerU/5+8ZDLZ//kf4v9e//f6///1wGAwxP5X\n/9MPonwICgoSXV8J/SXZaScnJ7G2tqa0tFRw8iTU2PT0NB4eHmzbto3s7GyMjY1xd3fHy8uL7du3\n8+qrr2JmZoa3t7c4PcAi91/KRXBzc8PJyYnBwUFKSkp47LHHyMvLw8HBgT179vDgwQORveDr68uv\nfvUrfvOb32BtbY2ZmRkLCwuCkCN9LGnk6+rqePLJJ7l69Sru7u5oNBrS0tLo6emhqqoKOzs71q9f\nT0tLC0eOHGF4eJhf//rX+Pv7C3Wgn58f9vb2dHd3ExISIkg7SqUSCwsL+vv7RU5ibGwsN2/eZMeO\nHWg0GpRKJVqtFo1Gg0qlEmBYWKzdQ0JCsLa2FqaqoqIiVqxYQV9fH7W1tVhYWLBkyRIePnxIcnIy\nubm52NraEhUVJXIsmpqaUKlUTExM4ODgwODgIB4eHqjVap5++mkyMjIYHx8nNjaW0tJSAgMDCQ8P\np7y8nAcPHojGaWNjIx988IH4ndnY2FBfX4+vry/u7u6CASGFoHz11VcMDQ2xbds2KioqRJiLm5ub\nMAjFxcWRmZnJa6+9xscff0xRUREODg74+/sLxJutrS3Lly/H09OTv//97yxfvpz29nbGxsaIj49H\nr9czOzsrRHDSlOvkyZOcOnWKr776iomJCWZnZ4mMjMRgMHDx4kVWrVpFc3MzSqWSd955h9TUVBIS\nEigvL8fKygqFQkFubi7W1taEhITg5OTEm2++ydNPP83rr7/O73//e9HAlkbrcrmcyMhIobA0MzP7\nD/ZqWAQLRUREoNVq+eKLL9i1axc6nY7CwkIx4fmvrh9E+TAyMoJcLqempgalUsng4CApKSls3ryZ\nbdu2ERYWxsGDB0Vwx8LCgqh9pUTmiIgIJiYmhKMSYMuWLTQ0NFBXV8djjz2Gqakp9fX1XLlyhdDQ\nUHp7e0lJScHOzo6ysjKWL1+Oubk5AQEBxMfHU1tbi0ajobGxkRUrVgiWosTEGxkZYXZ2loKCAgoL\nC7GwsKCtrY34+Hi8vb1Zv349Dx48wMTERISJSF1rBwcHBgYGOHDggMhRLCoqIigoCIVCgaOjI0eO\nHGFmZgYPDw9sbGzIyMigublZyGEnJycFwm10dJRr166xZs0asrOzUSqVIvZ+2bJlQms/PT3Nw4cP\nRd+luroaW1tbWlpaiI2NpbKyksnJScLDw/H29haofcmd2NPTQ19fH7Ozs8zOzhISEiLiylauXElo\naCgWFhZMTEyQnZ0tFmOJH1BdXc3t27cxMTER4rDp6WmxWPn5+fHYY4+RkpKCQqFgfn4ehUJBa2sr\nV65cQafTUV1dTWRkJAcPHsTIyEgIw5YsWcLy5cuBxZL01q1bgmI8NTWFr68vVlZWnD9/nscff5zf\n/OY3LCwskJGRwdWrV9m7d6/oK0i6FImzKJPJhEYgOzubvXv3Aosc0Lm5Oerr68nJyeG7775DLpcz\nMDBAT08ParWa999/n7GxMaqqqvD19WXFihVYWlri7e0tCM0KhUJE0AHiyB8ZGUlJSYmw4MtkMt5+\n+20ePnxIQ0MDQ0NDNDU1Cfdta2sr3d3dmJqaEhYWxt69e1m9ejXHjh175OfxB3FSMDExISwsDAsL\nC8LDw6moqEAul1NQUCDqfgkpFhYWRl9fHy4uLqxYsUIAS0tKSnB0dBQyVFgUrPzyl7+kqKiIL774\nQnTjnZycUKlUwjSzbt06IiIiKCkp4e9//zsbNmygvb2dK1eu8PLLLzM3N0dlZaUYtanVaiH1VSgU\nPPfcc3R0dFBVVSUEJNLkwM3NTWgRJH6AZO1eWFgQ/npYJDqNjIyIRa2kpIT29namp6eFnn316tU8\nfPiQW7duceDAAcrKysTDMzk5ydTUFNbW1szPz1NcXMyaNWvo6enB2NiYjo4Oenp6BBy0pqaG5ORk\n4YO4fPkya9eupaqqit7eXkH5SUhIoKGhgYSEBCYmJkSsveREXLFiBd3d3UxPTzM6OkpcXJyA2ly/\nfp2BgQEUCgUNDQ38/Oc/55133mHt2rU0NjYyNjbG2rVrRZ6mkZERubm5WFpakpSUhFwuZ25uDiMj\nI/R6vTAuhYaGYm9vj52dHY6OjjQ1NSGTyQgODsbZ2Zmenh6+++47ERt3+vRptm7dKgRuP/3pT9m1\naxcvvPACBoOB7OxsfvWrX+Hi4kJaWhoKhUIkiH/44YdERkZSWlqKUqkkKSlJPLxS7KDU9/joo4+w\ntLRELpej0WjEYr5//346OjqYn5/nzp07/wHgkpeXR3V1Na6uroSHh/Pll19iY2ODiYkJzc3NeHh4\nEBYWxqeffkpfXx+PP/44FhYWKBQKjh49Sn9/PyEhIRw/fpyenh4h5IuIiODevXvCdv/Iz+P/p6f5\nv+mSjvF37twhNjaWX/7yl/T19XHhwgXeffdd2traOH/+PKampqSnp/Pcc8+JkNK2tjbc3d05c+YM\nBoNBoNEAAgICKC8vp6OjA1jUQ2zbto2zZ8+KI+zSpUvJz89namqKV155hfT0dAoKChgfH0cmk4kQ\nGrVaLTq4Xl5eKJVKwsPDBUS0traWwcFBFhYWqKqq4vDhwxQXF+Pr6yvY/G1tbej1eoKDg7l//z6W\nlpbcu3dPcASlBUTiBvb29gpce3V1NW5ubpSVlREREUFFRQWDg4Ns374dIyMjNBoNo6OjZGVlERsb\ny4MHD1CpVAJ9r9frxTTH2dkZIyMjYmJiaG1tRafTidDbCxcuCG6Cs7Mzly9fFqOya9euCcr0rl27\nMBgM+Pj4UFZWJizh27dvp6ioCG9vb4qKikSwjbOzM76+viKYNzc3l5aWFvbv3y+IxQAdHR3U19fj\n5eVFcHAw3377Lenp6Sxfvpympiba2toEFl2n02EwGAgICGB0dJSEhAR6enrw9PTkz3/+M2FhYcjl\ncmAxCHZ8fJyBgQHeeustKisrSU5OFuXBihUrqKioICQkhJGRETw9PXF3d6e6uppPP/2U2tpa8Xvy\n8PCgoKAAAK1Wy89//nO++uor+vr62LZtGxEREXz88ccCHvvcc8+hUqlwd3dnenoaExMTrl27RnV1\nNTt37kStVmNtbY2JiYmweku/XxsbG7y9venq6sLb2xt7e3suXryIubk5y5cv5+TJk8jlctra2vjD\nH/4ggmrWrVvH0NAQVlZW3L9/X5wMH+X6QSwKWq2Wv/71r2zcuBFLS0vu3r2LSqVi69atDA0NicyB\ntWvX0t7ezo9//GN8fX0FeyE7O1tkPEg1MMCtW7fQ6/U888wznD59mn379vHpp58yODjIvn37BBlJ\nLpeLsJnTp0/j4eHBsWPH8PT0pKysjMbGRsLCwkQuoZR52dzczLZt26ipqcHGxoapqSlcXV2pr6/n\n5MmT7Ny5U4iDJLzY8uXLkcvlwumo1+sF61/KLuzr66OgoABbW1tMTEzIyspCLpeLlO3a2lpCQkJE\n1zoqKorR0VHu3buHk5MTJiYmxMbGMjAwQEVFBVu3bqW3t5f+/n5GRkZEyEp5eTm+vr40NDSg0WgE\nYuyVV14hOTkZFxcXnnvuOYaGhujt7SUsLAw3NzcGBgaIjY0lLy+PyspK4uPjWbZsGVeuXEGpVAq3\n6+zsLJaWlszPz3PgwAF0Oh1GRkaCbWFmZkZUVBRFRUXCgWppaYmZmZnAmgUGBpKfny/MPQsLC9TU\n1HDgwAEUCoXICpXw+sHBwRQUFHDo0CF0Op24t52dnWISIXXqBwYGiIqKQq/X88477xAeHk5cXByV\nlZU4OjoKjLyJiQmrVq2iq6uLAwcOoNVqxclGwvZJFO7KykpKS0uFMjE6OhpbW1uRkJ2YmMiqVavI\ny8vDw8OD4uJiNmzYgMFgwMbGRpwaHRwcmJ6eZn5+nszMTGJjY8Xk5umnn+bcuXNMTExw//59xsfH\nsbOzw9PTk4MHD4p8CGkkaWlpKVCHj3L9IBYFCwsLAgMDkclkfPvtt6SlpXH27FlCQ0MxMzNjbGyM\ngIAAnJ2duXHjBlVVVWJEIzVtbGxssLa25vz586SnpwOL4as6nY4rV64QHBzMwMAAGzZswNnZmd/+\n9rdibhwfH09UVBTJycm88MILjI6OUlZWxquvvkpKSgoTExPMzMwIYYmkPJMagtJuPjY2xszMjNCf\nf/PNN1hZWeHo6CiYgkVFRYKY9Je//IU9e/aIciE0NJS+vj4RIPvgwQPeeOMNqqurKSgo4LHHHmN0\ndFQkS0mLWnFxMf7+/igUCoEhGx8fJywsjEOHDjE3Nyci5AFKS0vZtWsXS5YsISAggPHxcdra2oQ9\n3dXVlampKebn54UGXyJES76KwsJChoaGSEtLY3Z2lpaWFpRKJaampuLvq9FoBL7/9u3bQl6ckpIi\nfg+ff/45s7Ozwnch5WwMDg5y9uxZYmNjmZmZobS0VJRJycnJwk4tl8sxMlpsjbW3t3P16lVUKhWt\nra0A4uFUqVQYGRnh7OxMTEwMly5d4uTJkwJFZzAYBLEoNzcXtVqNp6cnY2NjJCcn093dzYYNGzAz\nM+P9998XpKigoCAKCwsxNTVFr9dTW1uLlZUVfX19/PWvfxWW+zt37ggupiRki49fDGPfunUrtbW1\n1NTUoNVqxTPR0NAgLPDSKLiuro74+Hiqq6tpb29n9erVWFhYMDs7y+rVq7l48SJhYWHY2toyOTnJ\nv/7rvwri86NeP4hG48zMDEuWLCE6OpqOjg6+++47ioqKKC8vZ3p6WvAFLl++TH19PXFxcXzwwQcE\nBgYyPj4u6DeSj12KILO0tKSqqkqQhYqLi+np6aGurg4LCwu0Wi0lJSXU1NQIa2liYiKWlpZ8/fXX\nrF27lvj4eA4ePIitra1AvCclJYkS4+bNm8TExIi5fG9vLwqFQiwWsMgIcHNz4+HDh8LerVAoCAgI\nIDg4mK+//hpYbLJ5enpy4sQJESoiCbueeeYZwUGQmk6zs7PMzMyg1WoJCQnB0dGR2NhYjh8/zsjI\nCF9//TWff/45CwsLzM/Ps2nTJkZGRjA1NRWnCZ1OJx5+GxsbgoKC0Ol0lJSUkJWVJcoza2trGhsb\nuXfvHiYmJlRUVDA/P09CQgIjIyP09/cLsdbExIQw40i9n7q6OkZGRpieniY/P5/Q0FDGx8fR6/VE\nR0eLHf369etCOJSamsr9+/e5f/8+pqamTExMCKFVZWUl9+/fx9XVFSMjIxISEkQ2o42NjQDGSP2l\n/v5+GhsbaWxs5JNPPmH9+vVoNBqef/55nnjiCaqqqjh06BBdXV0kJiZiY2NDV1cXOp0OExMTxsbG\nmJiY4G9/+xsbN24Ux/y7d+8yODiIj48PsbGxbNy4URC033//fezs7IQ829LSkszMTOrr6zl06BDr\n16/niSeeECKwsrIyQV7S6/W4u7tz6dIlLCwsaGpqYuXKlURFRQkVaVRUFK2trZibmyOXywkICODQ\noUOEh4cTGRmJs7MzqampGAwG0SR/lOsHsSg4OzvT19dHfn4+CoUCX19fVq1aRXp6ukCyjYyMYG1t\nze7du0lKSmJqaorZ2VkRvPLqq68yPT3NM888w71794BFU8nx48fp7OwUIav79+/HwcEBPz8/0a2+\nfPkyDg4OAmrS0dGBiYkJ69evZ/ny5SxfvpwtW7YIr8Ts7CwRERHU1NQI45bkc1AqlbS1teHt7Y2x\nsTHJycmYmJiQmprKwsICQ0NDGBkZ0dbWJhSLTz75JACFhYVMTEzwxBNPiLgwMzMzQVKanZ0VyK/W\n1lZOnz4totvlcrnofTQ2NmJra8uqVatEKG9hYSFffvklwcHBIoVLiqbv7u7G3t4eExMTOjs7hYIx\nMDAQS0tLSkpK6OvrIyYmRhx9IyIicHBwICMjQ3AYIyMjqaysJDs7m/LycvF1FhYWBGB3eHiYJ598\nkrGxMTFZkBZLAHd3d+bn5zEYDLS3t+Pk5ERdXR1LlixBo9HQ3t6Oh4cH69atY+vWrajVajQaDffv\n32diYgK9Xk9OTg45OTnCFAWLJja1Wi1CYiV/gMFgIDIykh07dojSZXx8nImJCQIDA0U6WVxcHE1N\nTTz++OPcv39f8BYlHkV1dTWZmZl0d3fz7bffikZzY2MjGo2GhoYG0tPTCQ0Nxc7OjuXLl6NSqbh+\n/To+Pj7CSDU1NQUsljtr164V0u6Kigqqqqo4ffo069evZ8mSJej1enx9fdmzZ484pYaEhIheSVdX\nFwMDA5SXlwvF6KNcP4jyAaC7u5uUlBTa29spLCwkICCAhoYGEXeWmpqKh4cHCoWCb7/9lhs3bjA6\nOsqRI0dob2+nu7tbpDIlJCTw/vvvk5SUJObC8fHx5Obminl/bGwsoaGhXLlyBYkpkZ+fz+7du7lz\n5w6mpqYipaegoAArKyu2bt1KVlYWeXl5AKSnpwtMllKpRKFQ0NvbS0BAALW1tbi6ujIxMcGNGzfw\n8PBg6dKlzM/PC9qRs7OziFCDxUZoU1MTSqWSmJgYgebetGkTQ0NDyGQyvLy8yMvLw2AwkJSUhFqt\nFoyB/Px8XFxcqKioIDo6WuQ42tjYsG7dOtRqNba2tjg7O6NQKASoVWpc6vV6IiIiUKlUJCUlERkZ\nyXfffUdvby8ymQxzc3OMjIxITk4W/EULCwvWrFlDdXU12dnZ4iH19/dHJpNRW1tLREQEL730kgjE\nHR8f51e/+hXz8/M4OTmJmDjpfWBmZsaSJUuYm5tjdnaWdevW4eLiIiTXUj/lm2++YXh4WFCK3N3d\n8fDwICgoSEifz507J75ucHAwaWlpTExM8PDhQ+Es1el02Nvbi9Cdjo4ODh8+zPj4OH/5y1/YuHEj\nMTExFBcXCxCuFAZz6dIlVq9ezczMDAkJCVRVVQn4qtTo9PPz4ze/+Q0VFRXcvn1bhAdL/MqVK1eS\nlZVFcnKyGKHGxcVRXl7O8PAwLS0tyOVyvvvuO2xsbLh58yapqakolUrMzc1Rq9XMzMxw48YNgoOD\ncXBw4MGDB5ibm9Pb2yv6E496/SAWhfHxcZF7t2HDBqKiosjOzkaj0eDo6Mjx48cxGAzCG7Ft2zb0\nej0zMzOCf3jnzh1SU1O5cuWKIPjevXuX0dFR0tLSyM/Px2AwsLCwwK1btzAzM0OtVlNaWkpiYiIF\nBQUMDg6iVCrJy8tDoVDQ3NwsoBiurq6iPt2yZQvV1dWYmZnR3d3N7OwsHh4e5OXlUVdXx/z8PFev\nXhWRYE8++SSTk5Pcu3ePBw8eEBcXR29vr2g0Sa5Od3d3goKChIchMzNT/JzLli2jq6sLlUqFk5MT\n169fZ3h4GKVSye3bt3F2dmZ8fJz29nb0ej0KhUI4Aufn55mamkImk1FfX4+Pjw9FRUVMTk7S399P\nQEAAJSUlQnorjT+Hh4cZHBwUhjVpUhMZGSmETJJwSSIO//GPf8TU1JSNGzeSmZmJTCZjZGSEyspK\n6uvrefzxx3n48CF2dnYUFRUJPYQkIU9JSaGyspLKykoCAgIwNTXF3t4eKysr9u7dS1NTE0NDQ1y8\neJGRkRESEhIEf1KaQpSWlmJpaUljYyMrV67kww8/FKenyspKcnNz6e3tFaPYlJQUPvroI6EhkMlk\n9Pf34+rqyo4dO4QsvLOzk5ycHJ599lneeustAH7xi1/Q19cnypk9e/bg5eWFn58ft27dEhZvrVZL\nV1eXiC5cv349paWl7Nmzh/Lycjw9PSkqKhKmO+lje3t7/P39uXnzJkuWLMHa2loQlioqKrh3757o\nF8zOzqLT6ejs7MTPzw8bGxvROP1vLR9kMplKJpPdlslktTKZrEYmk730/eu/kslk3f9LErX0Oa/J\nZLJmmUzWIJPJ/stoGplMxvj4OBUVFWi1WrKysgSLwGAwkJ+fL7IVFAoFGRkZrF27lqioKBGUotfr\nKS0tJT4+nrm5OWCxLJmgRPdwAAAgAElEQVSZmWFkZISuri4CAgJwdHRk48aNlJWV0dDQwK5du7h7\n966IUnvvvfewsbFh165duLu7c/78eVpbW+no6BBHXL1eL0jScrmcvLw8cnJy0Ov1Ymw5PT2NSqUi\nMzOTxsZGvL29CQsLIyAggLGxMSIjIykqKhJHXkAwE01MTBgYGECtVtPR0cHs7Cx37tyhoaGBrq4u\n8vLy0Ov1dHd3ixm1RC+S9Bz+/v54eXnh7++Pr6+vcCxKu9fKlSuZnp7G3d2dqqoqfHx8RBNOq9XS\n19dHbm4uQUFBBAYGig767t27CQsLIzw8nJUrVxIWFkZVVZUIgDExMWHHjh3MzMwwNzeHn58fTk5O\nzM/P4+HhASBm5lKtq9VqhXZCiq5bunQp3d3dJCcnMzo6ioWFhUhuam5uZmRkhNWrV+Pm5oabmxsV\nFRVidu/q6ioMWdJCLv3ee3p6iI6OJiYmhsOHD+Pu7k5LSwuOjo7s3bsXmUzGiRMnaGlp4auvvuKL\nL77Ax8dH8C2CgoIE5g4QCdCWlpaEhoZSVVWFSqXCwcGB7du3Mzs7K3wXBQUFDA8PC8GWFDa8b98+\nTE1N8fPzE++fsbExDhw4QH9/P8uWLUOj0WBnZ8cLL7xAX18fTU1NaDQacnNzkcvl2Nvbs3r1ajEx\naWhoIDAwUBDJpT7bo1yP0lOYA35uMBhCgQTgJzKZLPT7//aewWBY9v0/md8/4KFAOhAGbAROyGQy\n4//sGxgMBl5//XVUKhUXL15k//79WFhYEBQUhEwmw87OTpiQHjx4wMOHD/nggw+4c+cOdnZ2Aqs+\nNzcncg4BMb7p6+tDLpfz8ccfC6HTm2++ydGjR4mIiECj0ZCYmIi3tzd79uzh6NGjKJVKRkdHWbp0\nKSqVikuXLokba2NjIxqLUvLS6tWrcXd3x8HBgZs3b4qxnOTVNzY2JjIyksTERG7fvs0f/vAHkpOT\nWbp0qVjFDQYDTk5OlJaW0tzcLExMN27c4JtvvhHuvqeffpqwsDDq6+spKyvD39+fp556CoAHDx7Q\n2toqlJQDAwNCDu7r6yvYEFKp4OLiwpo1awS6zNraWvAqAwICcHd3JyQkRIiC2traGB0dxdHRUUyC\n/P39ycjIYGBggOPHj+Ph4YGzszOHDx8W8BWpL6LValm5ciU/+9nP+Mc//iGw5dIEZmhoiLm5Oaqr\nq5mdnSU3N5fCwkKR9zg1NcXQ0BAuLi7MzMzwpz/9iRdffFHIm729vYVsPTIyUjR716xZQ2lpKUuW\nLKGlpYX169dTXl5Oc3Mzn376KRs3buS9995jYmKCN998E1NTU+bm5njxxRfx9PQUyUtSjJ7kbJWS\nnFxdXbl69SpDQ0PExMSI0emFCxewtbWlv7+fXbt2MTQ0hK+vL5aWlqSlpVFaWsqZM2dwdXWlv79f\n9Co+//xzzp07h7e3Ny+99BKpqamYm5tz9+5dzp49y507d1hYWODzzz8nNDRUOCkLCwtxdHQkMjJS\nLKDe3t7/vQGzBoOh12AwlH3/Zz1QB3j8J5/yGPC1wWCYNhgMbUAzEP+ffY/x8XGCg4OZnZ0V9dhz\nzz2Hr68vHR0daLVaOjo6uH//vsiIsLe3F+XA+fPnCQ4OFmIZtVoNLO4OOp2OvLw82tvbSU1N5cyZ\nM/j6+nLv3j1u3LhBfX09Q0NDZGRkiDdaf38/J0+exNHRkQsXLqBQKNi7d68IAJEUgn19fcTFxfHm\nm28KzFlfXx+3bt0Sng1p15XCTB0cHHByciI4OJjGxkacnZ2xtrYGFncHU1NTlEolGzduRC6Xs2HD\nBoqKisjMzCQvLw+tVoterycqKgofHx/s7e25f/8+tbW1nD59mpiYGCIjI4W7UXKASuM2W1tbjI2N\nBaTjiy++YGhoiNu3b9PX10dHRwfR0dFMTk5ib2/P3NwcZ8+eZWZmhrNnz2JhYSHGntLpYHh4mPr6\nesbGxmhpacHU1BSdTkdHRwddXV3Y2dlhbGyMm5ubSAQfHh4WPIyenh4RrKpWq3F0dMTDw4OAgACm\npqYYGBggOzubW7duMTAwgIeHB+np6SQnJxMXF8eWLVuwtLRErVYzMDCAvb09NjY2dHd3c+3aNWBR\nZi1h3szNzamqqhJ/B2dnZ3Jycti9e7eAr27cuJGf/vSnQtlYXl4uFsDx8XHBzvjggw8AmJiY4PXX\nX8fExESQuq2srAQvQ9K2HDx4kCNHjmBlZSUakr29vXzyySfEx8eLMmr//v0YGRlRWlqKo6Mj0dHR\nQqofFRVFc3MzZWVlFBYW4uLiQnl5Oe+9957oq1hbW3P16lXs7e0pKysTvYpHuf63pg8ymcwHiAKK\nvn/pBZlMVimTyU7KZDKH71/zALr+3aep+c8XEeRyOcePHyclJYUXXniBsbExfve736HRaNi5cycZ\nGRlcu3aN2tpasrOz2bZtG0eOHGFubo4LFy5gZWWFVqsV5prw8HDxdSsqKjh69Cjr1q3j+vXrHDt2\njMLCQmpra8nPz+fbb79l+fLlxMbGYmdnR3t7O1lZWWJOffz4cZqamggNDWXTpk3SfcDGxob5+XkG\nBgZ4++23xcNQWloqTEN+fn4cOnRIpDlJGRKJiYl4enoKgpN0UmhtbaW0tBS5XI65uTkzMzP09vby\n8ssvi3HUvXv3+Pzzz/ntb3+Ll5cXZmZm3Lp1i7GxMfbt20doaCj5+fnU1NQwNjaGVqulrKwMCwsL\n5ubmGB4eprq6Gmtra6ampoiJiWF4eBgzMzMxspSO9VNTU8jlctzd3amsrCQlJYWFhQWsrKxYs2aN\noE7LZDIUCgXj4+OcOnWKhoYGrKysmJqaoqGhgYGBAYKCggR3QKPR8NFHH1FWVsaqVatYsWIFe/bs\nARDZmpJCtK6ujqCgIGHQamtrY3x8nOHhYb744gsCAwMJCAgQuo7S0lIGBwe5deuWoEvDIu6/o6OD\niYkJRkZG+NnPfkZgYCAmJiZER0djYWHBwsIC3d3dIspPCoZxcXERFuXm5mbMzMzEw7tlyxZiY2Mp\nKiriq6++ws3NjY8//picnBzB6Vy1ahWhoaGCoens7CzyOuvr64mPj+eVV17h3r17QgL/7rvvYmNj\nw44dO3j88ccxNjbGwsKC0NBQPvnkE0GDloRYTU1N7NixA1dXV3x8fNDr9ZSVlaHX60We6aNej9xo\nlMlk1sB54GWDwTAqk8n+Dfh/AMP3//4zcPR/4+sdA47B4nE8NzeXxsZGTE1N6ezsFEGgdnZ2vPzy\ny8zOzvLHP/6Rc+fOERsby9DQED4+PszMzHDs2DGsra1pa2sjIyNDdPMl3t/c3Jyo3Z966ilee+01\nMY8/fPgwg4OD4o1kZ2eHg4MDQ0NDhIeHCzm1BCiFxYaO9EZxd3entLSU7OxsVq1axfvvv8/g4CDd\n3d0MDAxQV1fH6OgoGzZsEBLd1NRUXF1duXLlCr6+voKaVFZWxtGjR/H19SUjI0OwECSAiyQKWlhY\nYOXKlcTHx4uuuqOjI3q9ns8++4ytW7fS2NgoxltpaWm4ubnx7rvvintWVFSEhYUFS5cupaysjLVr\n1+Li4iJox5IHISwsTIwqOzo68Pf3R61WizwOrVZLW1sbQUFB+Pj4sG7dOhQKBdPT05iZmaFUKpmb\nmxOS7J07d/Laa69hYmLC9PQ0169fZ9OmTVy8eBFYjHUvLCxkzZo12Nrakp6eLlD27e3tjIyM4OPj\nw9mzZ6mpqSE+Pp7i4mJRrikUCj755BNSUlJEwhIsnsL27t3LG2+8wfvvv8/mzZt58OABjY2NlJeX\nc+HCBV5++WWCg4MJCAjg1KlT4pQgTVpOnz5NbGwszc3NREREAAijWGJiIl5eXty4cQNjY2NBvpac\nkRL+TyaTkZ2dTXNzs2gcSpF60n2GRS2Mk5MTVlZWDA0NERERwYMHD0QPRKVS8dJLL/Hw4UNu376N\nQqEgNTWViYkJkRmSmZnJj3/8Y/bv389vf/vbR300H21RkMlkchYXhNMGg+ECgMFg6Pt3//0T4Or3\nH3YDqn/36Z7fv/YfLoPB8DHwMYCfn5/B0tJS6OqfeOIJiouLqampwcjIiNnZWRISEggMDOSpp54S\nLMOGhgZ+9KMfkZ+fj6urK25ubuzevVtIZqXg04aGBnx9fUUas8QTfOqpp2hoaGDFihWiXvfz86O+\nvh6tVouzszPz8/NiVChFb8lkMqKiolAqlYyMjPD3v/+diYkJtm/fzujoKFqtlpycHDFympycJCYm\nRuxAMzMzIoMiLy+P7du3A4tjqOnpaYFck/TqkqpTEmyZmJgIWe/c3Bw+Pj4UFxfT3d1NWlqaSHP2\n8PDA399fEI4kIdH58+eF1r+5uRkvLy/Gxsbw9/dHp9MJN6nBYMDLywuNRsPy5ctZu3Ytubm5DA4O\nCpt4amqqgJTa2tpSU1NDbW0tc3NzFBQU4O7uLo67ubm5GBkZ0d3djYuLC8bGxszOzjI8PCwW8ocP\nH5KUlMT09DRqtZrq6mqWLl3KN998g0qlws/PTzhEw8LCUKvVIrErIyODzMxMVq5cycLCAmFhYYLm\nnJSUJLwG9vb2zM/P09DQwLVr13Bzc+PUqVPMzs4yPz9PbW0t9fX1TE1NYWpqypEjR7h58yYREREE\nBwfj6ekpwmD8/PwoLy+npaWF+Ph4ampqSEpKYm5ujpGRESYmJgQzUwIDSaaukpISduzYwe7du+nu\n7ubcuXPiZJOamsqNGzcYHh5my5YtFBYWMjw8TFZWlsgBycvLo7S0lEOHDrFhwwaBaXNwcECr1RId\nHc3U1BT5+fn/vScF2WKx9xlQZzAY/vXfve5uMBh6v/9wJ1D9/Z8vA/+UyWT/CiiBAKD4v/o+ko20\nvLyc69evMzMzg7u7O9evX8fc3Bx3d3fi4uLo7u7G29ubpUuXYmFhIQwwlZWV+Pr6cv78eTEl6O3t\nJTU1laCgICYnJykoKCAiIgI3NzempqbIysrC29tbSIHVarUQC6lUKhobG9Hr9SIuTWr8ubq6cvr0\naRISEpiZmRGY9NbWVpydnWlvbxdoMFtbWzZu3MjY2JjQEEh0oR07dghxDyCwbcPDw/j6+gqW5IMH\nDygqKqK6uppNmzYJhqNerxfHw7a2NmQyGU1NTURFRbFixQrUajX19fUkJSWRnZ1NTEwMk5OTYhF0\ncXERoTQTExM0Njb+h259dXU1MzMzuLi4MD09zT//+U+RTSjlHmZlZeHi4sKFCxdob28XAadbt24V\n97Gzs5OZmRkaGxsZHBwkKSmJpKQkPvvsMywtLRkZGRFd90OHDuHg4EB1dbXQGty9excfHx8RpDMx\nMYGXlxfOzs5Cgejo6CiI0U1NTVhaWrJ9+3bRX4LFvsLMzAz79u3D1taWsrIy0VOQSpzy8nJx/4aH\nh4mOjmbDhg3odDo+/vhjrKyssLe3F/0lg8HAli1byM3NxdfXVzQ3n3/+eaqqqjAxMSEtLY2CggI0\nGg22trbY2tpy//59Vq9ejYODAzExMVy4cIHjx4+jUCgAROZJa2srX375JUePHqWrqwsLCwsGBgbY\ntGkTJiYm2Nra4ubmRn9/P48//jiXL1/G2dlZxOS5uLgIxsKjXo9yUlgBHAKqZDLZw+9fex04IJPJ\nlrFYPrQDx7+/STUymewsUMvi5OInBoNh/j/7BlIuwfXr13F0dKSurk6AVqXdPCYmhqamJmpqaoSY\n6JVXXuEnP/kJxsbGTE9Pk5OTg5mZGRs2bCArK0vM2XU6Hf39/YKa/P+2997RcZZn/vfnmVGdGUmj\n3nu1utXdZMmyXORugwEnwTEQIJiEJWEhZAkJsElgiVNgWWoAA8YF994tW5ItWZIly+q99zLqGkmj\nef+wn/sNezYb73l/P+w9r65zdDweyfble57neu77ur4lKytLoAX7+/spLy/n448/pre3l+PHj2Mw\nGAgJCaGlpYWoqCiuXr3KzMwMdXV1AFRVVbFmzRoMBgOHDh3ihRdeoKqqSkiU/a1DVXl5OTdv3sTT\n05Ps7GyWLl0qdPauXr1KZGQkH3zwAXD7qVNfX88nn3wixqJw+ykXGBjIihUr+OabbxgcHGRwcJDq\n6mrRFFyxYoWYOEiShEajYXR0lKGhIerr61m4cCFVVVXY2NiIiYJerxf9krCwMC5cuEBpaSk/+9nP\nKC0tFeQyrVYrgEoeHh4MDw9z5coVkpOThQu2/NrExITNmzfT19fHc889h7W1NWfOnBFFcGBgAJ1O\nJ87MslnO8uXLee+994Q13kMPPYS9vT0dHR2Chm0wGHBwcBAsQUAY3tbW1mJnZ4eTkxMWFhY4OTlh\nZmYmYM7yUzswMJDHH3+crVu3EhUVRXh4OBYWFqJYubi4CNm1xMREPvroI/R6Penp6cLA9+jRo4L7\nYGtrS0NDA//yL//C9evX6ezsxMfHh0OHDonjQHV1Na2trbz77rtUVVUJtKJKpaK3t5ddu3bh6+uL\nwWDgxo0b4p7Iz88nLS2N7u5uocStVqt59dVXUalUVFRU0NLSwvDwMMHBwULyvb29XfBJCgoK6Ovr\n46mnnrqLW/12/MOiYDQaswHpv/jWyf/mz/wW+O3dJjEyMkJVVRVeXl6sWLGC69evs3v3bk6dOiUs\nyXp7ewVWITAwkBs3bvDTn/5U2NHfuHGD7u5uoqKiOHfuHAALFy6ksLAQMzMz8cQyGAxMT0+LKcXc\nuXOFMezw8LCosD09PXzzzTcYDAaCg4NxdXUVlOyNGzcKGfTIyEjee+89nJ2dyczMxMTERIh6yopC\na9euZWJigqSkJGESamVlJXwc5s+fz4EDB3BwcCAkJASVSkVDQ4MwhNXpdHh4eDA4OIifnx+9vb2c\nPHmSbdu2MTU1RWhoKFlZWbi4uHD+/HkUCgXm5ubCI0M2j01MTCQ/P5+ioiIef/xxrKyscHR05PLl\ny/j7+2NnZ4efnx9eXl7odDphlBsXF0dra6toxPr6+rJ+/XomJibo7u7GYDDQ1dXFgw8+SEREBGq1\nmr1799Le3k5xcTETExPCnm9sbIyuri6hZ7F27Vr6+vpEwZ07dy7l5eXCnFbmUNja2rJ3717Kysp4\n6KGHMDc3F18dHR0cO3aMnp4eFi5cSG1tLWZmZjQ1NZGWlsbbb7+Nm5sbly9fpr29nbS0NL7++mte\neuklxsfHuX79OqampuzatYvQ0FDef/99MjIyqKqqwtLSksrKSjQaDbm5ueTn5+Pl5SVs7uSGpIxH\niYyMFMrZycnJDAwMEBAQgIODg0Cy3rhxg6VLl1JcXIwkSTQ0NBAeHk5/f7/oKQA89thjfP3110RH\nR5OZmSkMbNatWyfEd21sbMREJisri8rKSrq6uli/fj1nzpwROBMZhXs3cV8gGlUqlSATTUxM0NHR\nwbx585AkiZycHCE84eHhwZ///Ge+//3vCwMSKysrXnjhBX72s58JZJe1tTVw+8nb2trKmjVruHr1\nqqATK5VKVq5cSXx8PIcPH8bDw0MwCLu7uwXoKT09nZaWFurq6hgdHRXTB3nn4ebmxujoqPCI1Gg0\nAnQzPDyMtbU1Xl5eAuHm4uLC/v37iY+P5+jRowIf4e/vD9xuWskTgjVr1ognelFREcPDw0xPT7N+\n/Xr27dvH0qVLhYjLpUuXsLa2RqvVsmnTJsrLy8U4cM2aNbS3t+Pi4sKxY8eE0IjMRFSr1WzcuBGt\nVouzszPe3t5iLGpra0tzc7OALk9MTODh4SFMXWNjY7GxsRGy61lZWeTn5xMcHMyNGzcIDw8Xgrey\nF2R5eblQGJ4/fz4nT57E29tbsCQbGhqEdoJOpxOj36amJhwcHPD09OTf//3f8fPzY+vWrdTU1NDU\n1ERCQgJTU1O0tLSIDr+spAy3Va2ef/55vvjiC2ZmZrCwsGDnzp309vaiUChE41Tuh8hjZz8/P9LS\n0igrK6O3t5etW7dy8eJFwSWIiYnhz3/+M88//zwTExNEREQgSRITExNiJDgyMkJ0dDT+/v6iwHzx\nxResX7+e1tZW5s+fT1xcHJcuXRLHqNzcXE6dOkV6ejr29va0tbUJpe7r16+TnZ3N+Pg4Hh4ewtdS\nfjDKZsDj4+NMT0+Tk5MjaON3E/eFl+Tbb7/9m6qqKjZs2MCtW7dISUnBxMSE8fFxAgMDuXLlCrGx\nsVRVVZGeno5arWbx4sUcO3YMf39/YVdfUFBAdXU1GRkZfPXVV/j5+QnIrqurKx9++CHz5s0TN31i\nYiI+Pj64ubkxMjJCfX29KAJhYWH89a9/JTQ0FFtbW5ycnLh8+TI5OTkkJyezceNGfH19yczMxNPT\nk7a2NuLj44VMW3Z2tniiNjQ0iGnF4sWLMRgM1NXVERAQwJ49ewRb85lnniEvL4+kpCTq6uoEsUu+\nKLq7uzl48CBeXl74+vpSU1ODmZkZly9fpqWlhaGhIUZHR3Fzc6O2tpbk5GSGh4dpbm5mfHyc0NBQ\n+vr60Ol0lJeXC+JPYWEhBoMBMzMzduzYIdyzVSoVLS0tojGWkpJCZ2enkKaXadAVFRVi9zV37lwu\nXLggAEh5eXlYWFgQEBBAfHy8cOEaGRmhtLQUf3//b3kzyurDjo6OHD16FBMTE2HF7urqip+fH2Fh\nYQwPD4sjm0z+mZiYID09XZDOZEaprKzt4uIiVKVVKhUhISH4+vrS09MjZNpcXFwIDw9nenqap556\nCoPBQH5+PtXV1fj7+zM+Pk5GRgYmJiZ8+eWXREZGEhkZyfnz5xkfH6ehoYHHH3+cP/3pTwQHB9PS\n0kJGRgZqtZpr167h6+srCp/ciG5vb8fT01MAxnbu3Mn777+PXq9ncHCQoaEhIfkmf2YqlYrS0lIs\nLCyEl+bChQtRKBQCWapWq/Hy8sLKyopLly5x48aNu/KSnFVzno3Z+P9P/O9Rcw4ICOD3v/89JiYm\neHh4UFtbi1arpb6+nujoaLFF7ujo4OGHHyYnJ4euri42bNggLLvPnj0rLNTs7e158skneeGFF8To\ny8TEhImJCR5++GE+++wzwXNPT09nbGwMR0dHBgYGsLOzo6ioiJGREZydncUc38vLS8CjX3nlFayt\nrRkbG+PixYuEhoYKMlNDQwN2dnZ0d3czPj5OS0sLS5cu5fTp04KnMT4+Lhywrl+/TlxcHM8++ywf\nffQRCoWCnp4eTExMxNlQRnP6+vri7u6Oqakpg4ODKBQKbG1tOXDggGDjabVahoaGhDRYeXk5er0e\na2trvvnmGxYtWiQIPlVVVVhZWWFubo5Wq8Xb21sIgMieF25ubkxNTdHT0yPO77KdvCzKMjMzw8jI\niEBRKhQKuru7CQgIoKOjAysrK+F4bWZmhoODAyYmJpSWlmJlZUVjYyMajYZXX32Vv/71r/j6+jI0\nNMSVK1eIjo6mtLSU9PR0vvnmG0ZGRoiNjaWiooKYmBhOnz7N448/TktLC+3t7QQHBwuD3NraWurr\n63nttdf4y1/+QmBgIMePH8fPz4/p6WnUajX79u1j8+bNdHV10dXVJXwW5s2bx5EjRwRnRUZEyk9q\ne3t7fvnLX7Jjxw7s7e3F/6uyspLy8nJOnDjBD37wA77++muefvppsev95S9/ya9//WteffVVnn76\naQFoCwkJwdzcHFdXV5YtW8bTTz9NZmYmERERZGRkkJ+fj729PfX19QQFBQnlJ7mx2Nvby9DQEI6O\njpSXl/Paa69x+PBhhoaGaGlpYdu2bSxbtuyu7sf7Qk9hYmJC4OU//fRT/Pz86OzsFC461dXVaDQa\nnnjiCTIzM/Hz8xPnTrVazfnz53FxcREjN9kJWvYk9Pf3Z+XKlQQFBXHkyBECAwMFBr++vp7h4WFG\nRkaYmpqitbWVBx98kO3bt1NRUSGky48ePSpEVtrb27ly5Qrt7e1s3ryZkpISAaEtKiri+vXrhISE\niN7D1q1baWpqoq6ujqVLl2JlZUV5eTk+Pj489dRTgt47NTVFfX09kZGRhIeHYzAY2LNnD7a2tmLE\nqVarhRP1rVu3qKysRKvVkpaWhqenJxYWFtTV1Qmdh7GxMXJzcwH4xS9+QUtLC+bm5qhUKiFOkpCQ\nIDruOp2OkZER1q5dy/T0tOjCh4SEYDQaqampISYmRsCzZXEQf39/HBwcuHXrFnq9XqhVdXR0iH7H\n5OQkAwMDHDhwgJKSEnp7e4mNjSU0NBQvLy8Ajh07Rk5ODk5OTtTW1lJSUsLLL7+MJEksW7ZM4DQG\nBgaEIKqtra0Y1drZ2VFYWMg777yDSqUSPJj09HQqKyupq6sTY86ioiJ++MMfYjQa2bhxI4sWLaK1\ntZXy8nKCgoJ49NFHWbhwoTCFnTdvHqOjo6xbt07Y3AUGBjIwMEB5eTktLS1otVq8vLzYsGEDLi4u\nbNmyhejoaOHr+Nhjj9HY2MiOHTuYP38+Pj4+VFRUiEmZ3HA1NzcnNTUVlUrFyMgIaWlpmJqa4ujo\nSGFhIatWrcJoNArNRy8vL+bMmSM8T3fv3i2s6SIiInjjjTfu+n68L3oKO3bs+M3cuXOxs7NjfHwc\nf39/SktLWbJkCePj47i4uKDT6Th37hzu7u4YjUY0Gg0nTpzA1NRUeETKkwJ3d3e++uorFi1aRHx8\nPBqNhr6+Pmpra+np6SEqKorp6Wl0Oh0DAwMCXGI0GoWYSV1dHXPmzMHDwwO1Wi0su65cucK8efOw\nt7enp6cHJycngoODBSX3ueeeE4jLyspKsrOz+eEPf4iLiwtKpRJ7e3saGxsxGo1cvnyZjz/+mEcf\nfVScp52dnSktLUWv1xMfH09jYyO2trbExcWRlJSEqakpwcHB9Pf3Mzk5iVqtJiMjg4qKCuHDMDw8\njJ2dHXPmzOHSpUvY2tpSUlJCSEiImNSYmZmRkpIimnzl5eX09PQI81qFQiHAP6dPnyYsLIyzZ8/y\nwgsv4OzsTElJCf7+/jQ2NmJubo6dnR11dXWCxzA1NUVNTY3QOnR3d2f16tXY29tjZWUlbm7ZDi02\nNpaPPvqIdevWsW+IBRUAACAASURBVGzZMlEc8/PzGRgYYGxsjNraWqqrq1GpVISFhdHe3k5/f7/o\nU0xPTzM0NMTw8DCPPPII3t7e9Pb28tVXXzF//nwh4V9dXU1CQgIqlQobGxv6+/uFMpKVlRVPP/00\ncBsRu2/fPiEw09XVJajxsiSgt7e34L3U1dXh5OSEu7s7g4ODzJkzh66uLtLS0rh+/TphYWF0d3dT\nU1PDkSNHmJiYoLGxUZz9HRwcqKqqIicnB19fXyRJoq6uTvhimpiYoNPpxMg7PDyc7u5uMb5sbGwU\no9jx8XF8fHz45ptvWLhwIVqtlqysrLvqKdwXOwUrKysxzgsJCcHS0hJLS0vy8/NZunQp3t7ebNq0\nicHBQaFmJEkS0dHRfPLJJ2JGXV1dzblz5wSw5MEHHxSSXHKXed26dezcuVPoEsr+iTJSz8vLi46O\nDiwtLfHx8aGurk7oLMrTh9HRUeHx19raypw5c+ju7ua5555jZGSEc+fO0dHRQWpqKr/+9a+FV4FS\nqaSoqIhly5ZhY2ODmZkZjz76qDDpkJt4Go0GNzc3iouLWbp0qfBxlEVgT548SVlZGTY2NsTGxqLX\n61m7di2enp48+OCDbNmyRRRCtVqNk5MTGzduFKpMCxcuFDdQV1eXsN2zt7entbWVyMhI7OzseO+9\n9/j88885e/Ys1dXVvP766xw9epS+vj7xVAoNDUWj0dDT04OlpSVPPvkkYWFhxMXFsXjxYtzc3ITP\nQUlJCUVFRYyOjqLRaHB0dCQkJITOzk7hfWFhYcGxY8ewsbHh2rVrREVFceLECQYGBli2bBkZGRnU\n1tYKtOTly5dpaGigtbWVlJQUdDodixYt4vTp01y6dEkYq7q7u/P1118LuznZs0LWeTQ1NSUnJ4eq\nqirs7e0Faezhhx8W23e9Xs9jjz1Ge3u7mBglJycLi/jExES0Wi3FxcVimvLoo49y48YNkpKSBBK0\ntLQUGxsbhoeHOXXqFLGxsWRnZ+Pt7S0g71u2bMHb25vo6GgKCwsFOaysrIyqqiqampo4ceIE09PT\nODs7k5iYiFqtxtbWlvj4eBoaGsjLyyMyMpKKigrx995N3Bc7hXffffc327ZtE0KesoZdU1MTWq2W\niooKvvzySzw8PLC1teXtt99GoVAQHBzM6tWr+fLLL3nnnXewtrbGz88PS0tL9u3bJ7Z89fX1QrZM\n3v729vaydOlSBgcHBVhJxvenpKSg1WoZHR0Vuo81NTXY29vzxRdfsGLFCqampkhNTaWxsZGqqioc\nHR25desWIyMjxMTEMDAwQFxcHP39/Xh4eODk5MTSpUvZsGEDX375JdeuXSM8PJzGxkacnZ3Jzc3l\nD3/4AxUVFaxZs0b0TWRdw6GhIZqamoiLi6O3t1dAfmVHrZ6eHgYHBxkZGSE/Px9JkrC0tMTGxob6\n+nrUajUNDQ00NDRw7do1YmNjhU5lSUkJWq0Wc3Nzzpw5g6WlJSdOnMDOzo709HScnZ3p7+8X9vBy\ncbKzs2NkZEQQfHQ6HcXFxahUKoGEnJiYYGxsjCNHjlBUVERMTAy+vr5C/fjSpUuCXHXs2DEefvhh\nLC0tSUlJ4fz584SEhLBlyxYyMzNxdnZmenqa5557Dn9/fzIyMkhNTSUpKQmNRkNbWxsbNmwQUmwq\nlYrU1FT+/Oc/4+DgIMaW3d3deHl5ce3aNZydnZmcnBT9CJ1Oh5ubG9988w0mJiYCvdrT00NgYCC7\nd+8mICCA69evi0IfGRlJY2MjExMTuLq68sc//pGTJ08K0NnAwADt7e3U1NRw69Ytli9fTldXlxBk\n1ev1LF++nFOnTpGSksKBAwfw9fUV6tFZWVniM5SPRLJjem5uLrGxsQQHB9Pe3o6ZmRlvvfWWuDZd\nXV2xt7fHwsKC7Ozs/z07BYVCQWhoKJIk8eSTTzIzM8MHH3xAeXk5//qv/0pRUREzMzPY2tqSl5dH\nfHw8kZGRuLm5cfz4cW7evMmSJUuEvLms9f/MM8/g6upKRkYGQUFBYts9OjpKS0sLNTU1VFRUMDMz\nQ09Pjzgq3Lhxg6GhIc6fP8/x48e5desWAQEBAp/v4eEhMBXyiEupVPLjH/+YX/ziF2RkZPD9739f\nFAOAkpISTp8+TVNTk7C3m5ycxNzcXECLGxsbsbS0JDs7WzT5vL29qaurE45KN2/e5NChQ/T395OV\nlSVyViqVlJSUCLm50dFROjo6uHHjhhjhLV++HKVSydatW5mamuLUqVNCYObYsWMUFBQQGxtLX18f\n2dnZaLVatFotkiQJaHhjY6Ogg8uYjg8++ID6+npMTU2JjY0V4+SJiQlRbFxdXVm/fj2WlpYkJCSg\n1WrZvXs3vb299Pf3C4PZvLw8EhMTaWpq4oUXXqC+vp7c3FwWLlyIo6Mjk5OTfPDBB0IdqbW1FYVC\nwfXr14XmZUFBgZChkzkKGo2GlpYWMaILDw8XEn/W1tY888wzmJubY2trS3Z2ttBj+PDDD7G0tMTL\nywulUkl4eLiAJsNtLMz+/ftxdXVlamqKL774gldffZV/+qd/IiYm5ltNbCsrK2JjY3FxcRGmtsXF\nxQQEBGBqakpGRoZQDTMzM+Pll1+mtraWhIQEXn31VXGNygpc8jElMzOTN998Ex8fHyIiIvj5z39O\nZGQkW7ZsEaRCeX3v6n78P3BP/3+OyclJGhoa0Gq1ODo6EhkZyU9+8hNCQkJYvXo1SUlJpKWlkZGR\nwZw5c0hNTcXS0pJdu3ZRX1+Pra0tbm5ugnIrK+KWlZVhamqK0Wikvb2d7u5uEhIScHFxwcHBgeDg\nYBYtWoS5uTkeHh5MTU0RHx8vbOJlItS6deu4cuWKAJYolUoaGho4evQoDQ0NwG3AlDw1kOXgJyYm\naGhooK6uTpz/X375Za5evYq1tbUQZ5ULh5+fH5GRkSgUCoF2KygoQKFQcPbsWeGkHBQUhJ+fHxqN\nRkjJyzeb7Gs4NjaGJEmiR1FeXs5nn32Gv78/JSUldHd34+PjI+jB6enpBAUFCQ9MPz8/TExM+Oqr\nr7C0tCQ2NlYw/2QpOZVKRWZmJsuWLUOhUAh4cVhYGD4+PnR2dopJzvLly3F1dWXevHlUV1eLJ78k\nSTQ1NQlJ9hdffFE4Oo+OjpKeni4o1BcvXsTW1hadTsfQ0BAXLlxApVJx4sQJvL29aW9vp76+npCQ\nEK5fv05PTw9/+tOfgNvHB7nZZ2dnR1lZGXv27EGn0+Hv78/OnTtpaGigsrKSxsZGent7sbCwYMmS\nJSxfvpzU1FRiYmLYuHEjer2egIAA4LbL17Jly4iJiaGiooINGzYwNDREb28v+/fvF9T1iooKDhw4\ngNFoFAWlt7eXRx55ROhRytRouG1ec/bsWXx8fEhISOCjjz7CYDCIHWhgYCALFizAysoKnU7HzZs3\ncXd3Fw310tJS3n33XcEFkhmzdxP3xUhyZmYGlUrFxYsXsbOzY+HChUKT4JNPPuGBBx6gv7+fP/3p\nT8TFxdHU1MSqVavYt28fmzZtIjw8nIaGBjw8PNi8ebOAdHZ2dgqLeHd3d44dOyYEKWTvAPmmkBFs\n8rZbqVQSHx9PfHw8r7zyCqtXrxZd/NraWkZGRjAajaSnpwsDkqKiIoqLi+nu7hauTR4eHuIJ0d7e\njpeXF0uXLiUvL+9b9GGAP/zhD8yfP5++vj62bNlCb2+v4DXIgq/BwcFs3LgRg8GAnZ0d9fX1pKSk\n4OXlJTQpFQoF8+fPp6enR9DRDQYDc+bMEeSkqKgoWlpahJKShYWFsDKTbefz8vJYvHgx4eHhLFmy\nhKtXr1JaWkpGRgZKpZJbt26xZs0aGhsbBRv11KlTPPjgg+j1eqEB2dXVJUa4k5OTrF27VmhBJiQk\nMDIyIpCHg4ODAqz1ySefsH79eubNm4dSqeSLL77A29sbb29v9u7di4ODg1BR0uv1/P73v8fV1ZXx\n8XGef/55PvjgA1588UV2796Nv78/mZmZAjzU3NzMW2+9xejoKGfPnmXPnj1iqiKbCj322GMsWbIE\nd3d3oQilUCjo6+sThq42NjYYDAaOHj2KUqnk3LlzzJ07F7jtdnX27FlcXV05deoUSUlJLFq0iNLS\nUoaGhnjiiSfQ6XRs376dFStWiCMgwOuvv05KSgqXLl0iMDAQHx8fgRpNS0vj9OnTTE9P88c//hG1\nWo2fnx+7du2ivLyckydPYmVlJT5LWX/ibuO+KApqtZrLly8zPDyMpaUlarWaI0eOEBwcTF9fH8PD\nw6jVatavX8/bb7/NT3/6UxoaGr4lYa5QKNi7dy9paWmCLjs5OYmNjY2QRcvKyhISZfn5+djY2LBk\nyRLOnTsndB1HRkZQqVQYjUbRbQ4JCaGjo4OVK1fy5ptv4uzsTE9PD76+vkJBqKGhAV9fX3Q6nbCK\nu3z5MuHh4UJmzcHBAW9vb3Eh9fX1UVFRwYIFC8Rce2xsTOj45+fnMzo6ynPPPcehQ4eIjIxEo9FQ\nUlJCQ0MDTz/9NDk5OcIYFiAyMlLIoykUCiIjI5mamiIsLIy8vDwhe97c3MyCBQuIj48nKysLV1dX\nAgIChH7D2rVr2blzJ6tWrcLd3Z22tjauXr2KhYUFly5dYvPmzYyPj9PR0YFarcZoNAoCUGdnp3Ce\nio+PF0esmZkZ9Ho9ZmZmpKenC2NhMzMzIVqyZ88eTExM2LhxozBxle3iZBu87OxsYfgrj4JlFau8\nvDyeeeYZrl69KvpSgGhwym7Nzs7ODAwM4O/vT01NDR0dHYKodOjQIWxsbLCysiIiIoKOjg5qamrY\nuHEjFRUVaDSab8nHdXV1CX0ECwsLgUTMzMxk0aJF+Pn58dvf/pagoCDh9ahQKNDr9Rw8eBAHBwcB\nzff09GT//v10d3djNBpJSkoSyEVZUPjQoUNoNBoh86ZSqURfp7Ozk/LycpRKJQMDAxgMBvr7+/H1\n9b3r+/G+KAqyAMbFixcpLy9n9+7dmJubU1lZicFgIDY2VtCaQ0NDuXLlCtPT0yiVSmpqaoiNjeXS\npUskJydjb28vzpExMTGiEWdlZcX27ds5ePAgTU1NLFy4EAsLC+HcPDAwgK2tLZWVlXR0dJCdnc3g\n4CAJCQno9Xo0Gg06nQ6AX/3qV2zYsAGDwYCPjw9FRUWkpqZia2sr5vZqtZrg4GAqKyvZvHkzMzMz\nQv//hz/8oXDZbmhoEAXp6NGjPPLII1y6dIn8/HxCQkLo6+vj5s2bmJmZceHCBcEdkHn/PT09tLS0\niCetubk5n332GT/+8Y8FPt7e3p6RkRE8PDyYmJjg66+/xt/fn4SEBAwGA0888QTt7e0CJyIblDo6\nOpKQkMDMzAynTp0SDcqQkBAKCgoIDAzk3Llz2NvbMzk5ydTUFGq1WhjeSJJEW1sb8+fPF9oY09PT\nGAwGLly4QE1NDampqbS0tGBlZQWAj48PiYmJdHd3s2DBAs6dO0ddXR2PPvqoEG8xGo34+voKmrFa\nrWb//v0YjUYef/xxOjo6KCgoENMjuM2pmJiYYOXKlZw6dYqmpibc3NwEM3TJkiU89dRTaLVazpw5\nI3YxMTEx9PX10dvbS0FBATExMeh0OrEdb2pqYtu2bajVasFabW5uJisriyVLlgh3qenpaT777DPG\nxsbo6ekRWAidTidkAFUqlSDzyTiE3bt3o9fr2bx5s9CdcHZ2ZuHChTQ1NVFSUsLatWtpaWmhqqoK\nJycnsrKyePHFF+nq6qKtrQ0rKyuOHDly1/fjfVEUAN58803++Z//mYGBAUxNTfne977H6dOnCQkJ\nwd7eHq1WK5RyZWGQ8vJy/Pz8BIpO7inIoppVVVWsWLGCmpoaPvjgA7RaLRYWFqxcuVLMcS0sLJg7\ndy5RUVFotVreeustfvWrXzE2NsaOHTt49913+dnPfoaNjY0Y63z44Yfs3buXixcvCgKRt7c3JiYm\nvPTSSzg7O3Po0CG8vLyYO3cu+/fvJzo6mrCwMFQqleBgDAwMMDQ0JBqNmzZtIicnh0WLFtHQ0MDA\nwABbt27l448/xsPDg5iYGFJSUjh27BhpaWnU19ej1+vp7OykqalJiLS+/PLLTE5O0tfXJ8BHMjOx\nrKwMOzs7JiYmuHTpElqtlpiYGIqLiwX1OyQkhMTERHG06enpYcWKFdTX1xMcHEx9fb1QAtq4caOQ\nN5f5Iy4uLsybN4/c3FxBOZafVAqFQpimypyUqakpQeqR9Q0mJyeF6e369evJzMwkPT0dhUKBp6cn\nOp2Onp4eYbKzevVqrl27RlxcHPn5+bzwwgvMzMwIzc+0tDSam5sFAtPW1padO3dSXV1NX18fL774\nohCuSU1N5YEHHqC3t5e+vj7a2tqorKzE09OT8vJyli1bxtGjRwFYsGABr7/+Ops3bxZq0vv27SMh\nIYGNGzcyPj7OmTNnMDc3Z+HChcJvIjk5GbVaTXx8PLa2tjQ1NeHj48Ojjz5KVlaWcOmSJEns1AID\nA4VA8OHDh+no6MDDw4OTJ09iZ2fHa6+9JqjuRqNRmNi0t7ezbt06oWD1j+K+GEm+8847v5GnByqV\nCr1eL2irV65cYc2aNdTU1HD58mXBRlQqlURERFBSUoK7uztlZWUsWbIEZ2dnDhw4QFZWFmvWrCEz\nM1MInPb394utrcy+nJiYQK1W4+/vL2S0ZTrs8PCw8LMMDg5Gr9ezf/9+lixZwoIFC0ShKikpob29\nnaCgIBwdHbG2tsbU1BQnJycOHz6Mi4sLw8PDHDhwgKamJlQqFUlJSeTn5wvR1szMTFxdXTEzMxPn\nwLlz51JXV4eLiwutra2YmZmRmZlJUlISbm5ulJWVCXHQmpoaHB0diYqKoqCggLy8PG7evMlDDz1E\ncHAwExMT4oIvLS0lNDSUmZkZwsPDGRgYYHh4mAULFmBvb8+NGzfIyMjgwoULODs709TUxMDAAEuW\nLGF4eJju7m4xvpuYmMDKyoqZmRny8vKYM2cO4eHhTE1NkZCQgLu7u/BZnJmZEResUqmkrq5OsBoV\nCgX79u0jMTGRM2fOMDIywty5c4mNjSU3N5fo6Gj++te/Ultbi5+fH76+vpibm5OdnU1eXh7T09Mk\nJiZy+PBhoVOg0+nYtm0bk5OTpKSk4ODgwI0bN8jKymJkZIT4+HgWLFiAt7c3ExMTjI+PMzQ0RHR0\nND09PfzlL39Br9cjSRILFiwgPz9fmON6enpy4MAB3nzzTWHcu23bNtra2hgaGiIoKAij0YizszP2\n9vaEhoayePFi+vr6+I//+A/27NlDRUUFhw8fJiIiAmdnZyoqKqioqKC0tJTIyEgcHR2xs7NjaGhI\nTCicnZ2Zmppi/vz5xMTECH8OuH1E0mg0wlldHl8HBwdjbm7OhQsX/veMJOV5uqw21NnZSXd3NyMj\nIxQXF5OTk8Pvf/975s2bR1FRkdDnc3FxwcnJifDwcGF8UlxczOrVqwG4fv06a9asER92U1MTOp1O\nqOtWVFQIYNCePXt47733MBgMpKamEhAQQFdXFwkJCXR0dKBQKIQ8mnzEkLfter0eDw8Pdu7ciUaj\nIS0tjRUrVggbu2vXruHh4cHSpUsFPbqrq0sUFRnmvGHDBuzs7IRaj06n47333qO5uRmNRiMaRyMj\nI1haWorKL4vW+vr6cu7cOVxcXPDw8GDevHmMjIyQm5srlIgbGxtJTU2lsrKSsLAwoV49NTUl+PpR\nUVH827/9G4WFheh0OqytrSkuLubrr78WkuEKhYK4uDgMBgP19fXi80tJScHX15fQ0FCGhoZobm5m\n165dtLS0CLzF9PQ0JSUloidRVlbG1NQUgLBsj4qKIi8vDx8fH9asWYO1tbXQyRwdHRWGMoODg3R1\ndQmtyJdeeomOjg7c3NwoKCjgj3+8LRYmO3+bmpoyOTlJU1MTixcvFhJ8Q0NDzJ07VyAdc3JyOHLk\nCDdu3BC2d7KxTkhIiDCC7e3txd/fX6gmX7x4kdOnT4vrYnh4WChB5efno1Ao+OKLL4Ruxbx58ygo\nKGDOnDnMmTOH559/HrgN4vL19SU4OFgcTSwsLGhpacHU1JTOzk5aWlo4fvy40KhQKpXExsbS0dEh\nOD9KpZL+/n5hZns3cV/sFH7729/+RhZTycvLE1oJ9vb2PPDAAzQ1NTEyMoKtrS1eXl48++yzpKam\ncuXKFRYvXkxXVxfbt28XvP8zZ85w7do1PvvsMy5evMiiRYvo7OzEz88PHx8fBgYGRANM1oW8cuWK\nMAmF2zZj/v7+4uxoY2PD7t27KSgowNnZGRsbG44cOcL27dtxdHQUuHs7OzuMRiOVlZW88cYbJCUl\nUVhYSFBQEAqFQkC3y8vLmZqawsHBgZ///Od8+umnjI+PYzQaBcZdHnf+8pe/FFOXLVu2iJ2DXq8X\ntOjLly8LV+bExESioqKwtbVl165dgmIsN3FdXFyIjo6mqqqKM2fOUFtbS11dnZCPd3Nzw8HBgS1b\ntqBSqSguLub06dOkp6ezYMEC9u/fT3JyMqOjo0INSX5ST0xM4Ofnx9jYmMCLTE5OYmdnR2trK93d\n3UIhKioqCnNzc5YvX874+Dh79uwhPj6esLAwDAaDEELNycnhJz/5CXZ2dnh6elJXV8fFixfZtm0b\nbm5uSJIkRnUyPiEyMpK+vj5cXFz45ptv2Lp1K9XV1Xh5eeHt7S0Mc/Lz89mxYwff+973aG1t5eDB\ngzg5OdHb28uqVatobm4Wn+f27duZnp6moKAACwsLzp49y+TkJKGhoQwMDJCRkcGtW7eEO3hnZyfH\njx9n165dZGdns3r1atFYNTExEYQnR0dH3NzcsLS0JC8vjwsXLrB582YqKyuxsbEhIyODmpoaXnnl\nFaEWbmZmxmeffYaXlxcGg0FMo2TBnA0bNmBhYSFMd+/4bf7v2SmYmZnx1FNPCf+74OBggS6MjIzE\nz8+PTZs2iQ+gp6eHtrY2fHx88PLyQqFQ0N/fj5OTEw4ODuL8+tVXX/HAAw8IUlBxcTEFBQXo9Xoi\nIiIEbz4iIoKoqCixdS8rK0OSJJycnDAxMWFmZgZra2vhx5eSkoKbm5sYLcpzYJl5ODo6iqOjI05O\nTmKLL0uTa7VaVqxYQUBAgDA0lTvkDz/8sHAX6unpoa6ujvDwcCoqKnjjjTfYsWMHdXV11NXVCYl2\n+WYyMzP7lqtzZmYmnZ2dJCUl4efnx7p162hsbCQ7O1sUG3lOr9Vqqa2tFZyF/Px8vL29BaFKp9Ox\nbt06zp8/z7FjxwgMDKS5uVk4NllZWeHh4cH4+DghISGUl5djNBoFwUfuxvf19dHf3y+MfT08PFAq\nlZSVlX0LqzEyMkJISAh6vZ5Vq1ZRU1NDYGAgZWVljI2NMTExgU6no6GhgStXrmBrayvGgdnZ2QJF\n6OjoKJqRWq2WwcFB9u/fj4ODA8PDw1RWVnL58mUef/xxcbRsb2+nr6+PiYkJAchSKpXCWk/elcmj\nQ41GQ0FBAQ0NDezbt49nn32Wuro6FixYIHYJkZGRpKWlUVxcLPIqKChgdHSU8PBwIRY8MzMjHkIy\nU1OtVjM9PS12Dmq1Wrhhu7m5iS9Z9NfX15exsTFKSkro6elh8eLFzJs3T+R7N3FfFAVJkjh9+jTL\nli3D29uby5cvU1BQQHd3N3v37iUuLg53d3fs7e1paWmhsbGRwsJCDh06xFtvvYWrqyuHDh3i1q1b\n2NjYCF2+qakpgX2PjIzE0tJSSISPj48zOjpKbW0tfX19wsUpNDRUWMdbW1tjY2PDvHnz6Ojo4Mkn\nnwRun91khmFoaCg3b97EwcFB/NuyA9ORI0e4cOECk5OTQg9R9q04d+6cuKmzs7OB22Kder1emIUu\nWrSInp4eDAYDer2exMRELl26xNTUFL6+vmg0GiEJ97fnY7it8dfR0UFhYSFarZaLFy9y7tw5uru7\nGRoaIicnh5iYGJ566iksLS159tlnBUjM3NwcpVLJyMgIp06dEpRj2SXK1dUVHx8fAUOW/9/p6eko\nlUqxGxkbG0Or1RIXFye8DV1dXWltbWV8fJwLFy5gZWVFX18fxcW35T9lYJe88xgeHmbevHm89tpr\nvP/++yQnJxMSEiKs1mSMhqwfuXz5cvbu3cvMzAzT09OCU3H9+nUSExP50Y9+xFdffSWadbKuI9wW\nq9VqtYL8JvcibGxssLGxYWBggImJCRQKBWNjYwCiyE9OTuLi4iLg6/LDJi0tjTlz5uDp6UlUVBS+\nvr58/vnnvP7662ItPDw8hH3f4sWLgdtCxmVlZYyOjuLi4sLg4CAhISG4u7vj6enJ5OSkgDsvXboU\ng8GAqampWGN7e3va29vJzMzkwoUL+Pn53fX9eF8cH955553fODo6EhoaitFoxNbWls2bN9PY2Mh7\n772HQqHAw8NDkJXmzp1LeHg4ZWVlODg4CE9HPz8/ZmZmmDNnDu+99x7PPfccWq0WOzs7Ll26hMFg\nwM/Pj+TkZE6ePMnVq1eRJAlbW1tMTU1RKBQClltYWChUfHt6esRcee/evTz00EMC7CTrMfb395OR\nkcGKFSvw9PT8lrlraWkpdnZ2omi5urri4OCAUqmkuLgYo9FIaWkpmzZtQq/Xo1Ao8Pf3F1oHw8PD\nggWYnJyMQqGgqakJGxsbLC0tSU5OZtGiRbi4uAh48tTUFD4+PjQ2NqJUKsnNzSUpKQlAPAnlZmds\nbCzr16/H1NQUnU5HbW0tR48e5dChQ4SGhmJiYkJwcDCRkZHo9Xrq6uoEku/WrVsUFBTQ1taGRqOh\no6ND3CAuLi7Y29uj0Wg4evSoAE6ZmJgIfYKZmRl0Oh0pKSl8+OGHfPbZZzQ1NWFiYsKxY8eYnJwk\nIiICHx8fAWu2t7dn1apVnD9/nsnJSYHbMDU15csvv2TVqlVMTk4SHh5OYGAgH374IcuWLRM3am9v\nL+7u7kxPT7NkyRK6urqwsLCgvr6e4uJiVq1axczMDFu3bkWpVJKTk0NERARVVVXCW0Kn05GTk8Mj\njzyCwWAgm4AhlgAADUZJREFUPT2dK1euUFhYiIeHB4WFhZw+fVrYta1du5bq6moOHjzIxMQEUVFR\nSJJEc3Mz7e3tnDlzBoPBwPz58/noo48wMTEhNDQUlUol5Oibm5vx9fXFycmJ5ORkoRq1Z88e9u7d\nS0pKCtXV1XzwwQd8/PHH35Lca25upry8/H/P8WF8fJylS5fS29vL6OgoZmZm5OXlUVBQwBNPPMGq\nVasEccXT05P333+f7u5ugcOXXYpaWlrw9fUVHIUbN27Q0NAgiDYBAQF4enri6enJhg0bUKvVTE1N\nYWJiQmVlJUqlkunpaWZmZujv72fPnj1CMFWSJAGwkSG+w8PD7Nq1i8WLF3P48GHeeustLly4wJkz\nZ8jLyxMWaPKWLzIykvr6eqanpwVm4OWXXxZaAjIFOCwsjNWrV4snl2xo2t7eTltbG0qlkps3b6JW\nq5k7dy4xMTFUV1ezc+dOfvCDH+Dl5UVAQIDoDWi1WtLT0wXnfsGCBaxevZqbN2/i5ubGtWvXqKio\n4ODBgxw/fpzm5mZcXFzYvn07JiYmzJkzh3Xr1tHU1ERnZyehoaGo1Wq6urpoaGgQ5rCmpqbY2dnR\n0dFBVVUVrq6ueHl5ie/L5jAajYYf/ehHREdHiyOgLMUuC8/ILlXm5uZ0dXVx69Ytzp49Kxq5IyMj\n4s/J0PQjR47g6+vLrVu3UKvVAg4NMDQ0xNjYmCAk5ebmYmNjw/j4OLa2tly8eJHh4WFMTU1RqVQs\nXryY9vZ2Dh8+jJubG9evX8fT05Pp6Wna2tpE4668vJy6ujocHBxYtWoVycnJDA0N4eDgwCuvvCKO\nVnJPLCkpifj4eBQKhZBUk302ly9fLuzzZNsBjUZDb28vBoMBT09PpqamxITDaDSSkJBAZGQkP//5\nz7G2tqarq4utW7eybNkyAbKS4f13G/fFTuHtt9/+TUREBNHR0dy6dYvp6WlWrFhBcHAwjY2Ngl/e\n1tbGnj17iIuLo6Kigps3bxIREYGbmxtKpVLoAnh6evLxxx/z0ksvYWVlhaenJ+Pj4zg7OwsjkOHh\nYXF2X716NT4+PoLwU19fT2Jioth2ubq6Cm7GqVOneOihh2hubsbExARXV1d27dpFX18fRUVF9PT0\nMDo6SlxcHKGhoVhYWBATEyN0EGRsxPHjx8nOzsbExISgoCCOHz9OREQEKpWK8fFx2tvb8fHxoa2t\njbKyMnJzc1m7di2hoaFYW1szNDSERqMhLCyMr776Ci8vL6ampgR2IisrCysrK+zs7ITDsQx4Wrly\npRBbOX/+PNXV1bi5uVFXV0dXVxctLS1ER0dTXV0tGpRdXV2EhYXh5eVFT08P4eHhXL16lYSEBJyd\nnfHy8mJ4eFgAfWRSVEdHB0NDQ1y8eJHBwUE0Gg1TU1Po9XqcnZ3p6+sjNDSUpqYmDh48iEajITg4\nmLGxMaHepFAoSE5OFjZ4tra2QmhXhqtXV1cTHx9PdXU1CxYsoLOzEysrKwYGBti3bx8/+tGPBE/G\nz88PtVqNg4MD165do7q6Wvg0yA+G3t5eHBwc6Ovr48qVK1haWord5JkzZ/Dx8eHkyZN8/vnn5OXl\niTF5U1MTDz74ILa2tgKjkJOTQ3FxMc7OzjzwwANcvnyZkJAQSktL2b59O9HR0fzud79jamqKjo4O\nLl++jL29PXq9nsrKSkxMTASrVb6W+/v7qa6upri4WPzflEqlKKRz5syhoqICnU7H3Llz6e3tJTc3\n9652CvdFUfj6669/o1KpWLhwIQEBAQKdV19fT0BAAN7e3jg5OaHT6UhOTiYzM5PIyEjgNr/hkUce\nERe4vDgHDhxgy5Ytwh1KXjCj0SjOgx9++KFwKz516pTo5NrY2BASEoLBYODmzZsMDg4SHByMSqVi\n3759+Pv7iydZWFgYOTk5eHh48MYbbzA4OCgaTGq1Wsy2BwYGKCgooL29HWdnZ8bHx3n66af59NNP\nCQ4O5sKFC6SlpeHq6kp3dzceHh60trZy9epVSkpKhFZDYWEhzc3NREZGCq9A2XZPq9Xi4OBAUFCQ\n8HIcHBwUWgctLS2oVCpOnz7NzMwMkiQxOTlJQkICk5OTohEaGhoq1IMlSaKvr0+Ik+zfv19cZB4e\nHmRlZYmbTLaUj4+PJyEhgcHBQczMzNBoNAwMDODg4MCiRYvo7u4mKCgISZLo7+9naGiI3Nxcrl27\nxgMPPICVlRXd3d0sX76cmpoaampquHnzJitWrCAxMZFdu3bR2dmJiYkJo6OjODg4UFNTQ0hICFqt\nVrghGY1GHB0d+fTTT3n22WfRaDQCZxETE8P169fFU9rV1ZWqqiphWCxzW2SA1YkTJ1i1ahV+fn7M\nnTuX0dFRTpw4wcjICEFBQVRWVlJWVoa1tTWDg4PY2NhQV1eHt7c3fn5+bNu2jfj4eGE0PDg4KOTb\npqenhfJ1amoqn3/+OXFxcURERDA8PMzg4CDl5eXY29tTWlpKYmIivr6+ODs7U1RUxLVr1+jq6sLS\n0hJra2t6e3tRqVQ4OzuL6czo6OhdF4X74vgwOTkpRDsOHDhAR0cHTU1NQpm4srKSCxcu4OjoSFVV\nFT4+PqxcuZJ58+aRmprK/v37qamp4cEHH8Ta2ppjx46JvzsuLo7c3FwxoZC371euXBG+jVVVVQJs\n4ujoSE1NDa+99hrT09Ns2rQJNzc3GhsbuXXrFnC7WMnot8rKSuLi4njmmWcoKysTW7qgoCBhiLt/\n/34OHjzIypUrsbW15Xe/+x2enp4MDg6yevVqHBwcgNtNq3379gmvgLq6OiwtLQXZKDk5GVNTU6HQ\nU11dTVVVFTU1NWLsZ21tTUVFBcPDw2i1WmxtbRkdHcXPz4/29nYUCgWNjY3U1NTQ0tLC1NQUrq6u\n9PX1iQvKaDQKzIQMN7a3t6eoqIiEhAShqCSLgNTU1KBUKoXbsZmZGS0tLQwMDAgHqqCgIOFTGRQU\nRFVVFYWFheLi3bx5MwAZGRkMDQ2RlJRERUUFKpUKOzs7QkNDaW9vJyUlRShwf//730eSJM6ePUtY\nWBgTExOkpaXR39/PiRMnKCgooLCwELjdzI6KiqK8vByFQsHBgwfx9PTEysqKDRs2cOjQIebPn09Q\nUBAODg5YWVmRm5tLZ2en6O67urpy9uxZampqhBmMrBp96NAhwSHp7+/n9OnTBAQE4OfnJwyFzpw5\nIywMpqamaGxsZP78+WI3tWjRItEYfe6556iursbOzo6QkBA2bdok/q7z589jZ2eHSqUSfh0uLi4E\nBQVhaWkpeDsyXXtqaoq8vDzuNu4XNeceYBTovde5/Kdw4P7LCWbz+p/E/ZgT3Ju8vI1Go+M/+qH7\noigASJJUcDfy099l3I85wWxe/5O4H3OC+zcvuE+OD7MxG7Nx/8RsUZiN2ZiNb8X9VBT+YVf0HsT9\nmBPM5vU/ifsxJ7h/87p/egqzMRuzcX/E/bRTmI3ZmI37IO55UZAkaYUkSVWSJNVKkvSLe5xLoyRJ\ntyRJKpYkqeDOe3aSJJ2TJKnmzq+230Een0qS1C1JUunfvPd385Ak6eU761clSdLy7zCn30iS1HZn\nvYolScr4jnPylCTpkiRJ5ZIklUmS9Nyd9+/1Wv29vO7pet11GI3Ge/YFKIE6wA8wA24Cofcwn0bA\n4T+992/AL+68/gXw1neQRzIQA5T+ozyA0DvrZg743llP5XeU02+AF/6Ln/2ucnIFYu68tgKq7/zb\n93qt/l5e93S97vbrXu8UEoBao9FYbzQaJ4E9wLp7nNN/jnXAzjuvdwLr/2//g0aj8QrQf5d5rAP2\nGI1GvdFobABqub2u30VOfy++q5w6jEbjjTuvh4EKwJ17v1Z/L6+/F99JXncb97oouAMtf/P7Vv77\nxfu/HUbgvCRJhZIkPXnnPWej0dhx53Un4HxvUvu7edzrNfyJJEkld44X8jb9O89JkiQfYC6Qx320\nVv8pL7hP1uu/i3tdFO63WGg0GqOBlcB2SZKS//abxtt7vXs+rrlf8gDe5/bRLxroAHbciyQkSdIA\nB4B/MhqNQ3/7vXu5Vv9FXvfFev2juNdFoQ3w/Jvfe9x5756E0Whsu/NrN3CI21u4LkmSXAHu/Np9\nj9L7e3ncszU0Go1dRqPRYDQaZ4CP+X+3vN9ZTpIkmXL7xttlNBoP3nn7nq/Vf5XX/bBedxP3uijk\nA4GSJPlKkmQGPAwcvReJSJKkliTJSn4NLANK7+Sz9c6PbQXu3lXj/2z8vTyOAg9LkmQuSZIvEAhc\n/y4Skm+8O7GB2+v1neUk3VYk+StQYTQa//g337qna/X38rrX63XXca86nH/Tec3gdne2DviXe5iH\nH7c7wDeBMjkXwB64ANQA5wG77yCX3dzeXk5x+3z5+H+XB/Avd9avClj5Heb0JXALKOH2he36Hee0\nkNtHgxKg+M5Xxn2wVn8vr3u6Xnf7NYtonI3ZmI1vxb0+PszGbMzGfRazRWE2ZmM2vhWzRWE2ZmM2\nvhWzRWE2ZmM2vhWzRWE2ZmM2vhWzRWE2ZmM2vhWzRWE2ZmM2vhWzRWE2ZmM2vhX/DzfdzmpffLeR\nAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1383d654d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvHl00+ed//vSYlm7Le/yvuMdvAC2icGA2cFAAkkgk6RN\n0yVJ23TapJ3k157SZpJOS9omk9N0SZukSVizgZMBDBgwtjEGbPC+b/K+yrIkS9Z6/+D6e6Z/zJnc\ncydncu/h/Q/YR0hCep7P8/m8l0fk9Xq5h3u4h3tYgvh/+w3cwz3cw1cL94rCPdzDPfwD7hWFe7iH\ne/gH3CsK93AP9/APuFcU7uEe7uEfcK8o3MM93MM/4EsrCiKRaKtIJOoUiUQ9IpHoX76s17mHe7iH\n/1mIvgyfgkgkkgBdwCZgGLgJHPB6vW3/4y92D/dwD/+j+LI6hVVAj9fr7fN6vQ7gOLD7S3qte7iH\ne/gfhPRLet4IYOg//TwMrP6vHqxSqbxisRh/f398fX1xOp1YLBYkEgkqlQqj0YhSqUShUKBQKJie\nnkatVuN0OnE4HLjdbnQ6HVarFV9fX2w2G2NjY/j7++Pj44NUKmVxcZGAgABsNhu+vr7Y7XYcDgc6\nnY6FhQV8fX0xmUxIpVKUSiWTk5P4+fnhdrvRarUYjUZ8fHwYGxsjJiYGuVzO3NwcQUFBTExMEBAQ\ngNVqRSQSCe8/NjZW+J1IJMJsNiOXy3E6nahUKmw2Gy6XC7FYzPj4OBEREUilUnx8fLBYLEilUpxO\nJz4+PsjlciQSCQ6HA5FIhMfjwWq1olKp8Hg8SKVSrFar8P91uVw4nU5kMhl2ux1fX18kEglSqZSp\nqSl8fHyQyWTIZDIcDgculwulUondbhfen9vtxu12IxKJUCqVmM1mxGIxYvHds0Qmk7G4uIjD4cDf\n35/FxUVEIhESiQSr1YparWZhYUF4XgCLxUJQUBCLi4soFAp8fX2xWCxYrVZmZ2dJSkrC6/XidrsR\ni8U4nU7h+3G5XLjdbpRKJRKJBLfbjVQqxe12I5PJEIvFGI1GtFotdrsdl8uFSCRibGyM8PBwfHx8\nUKlUmEwmXC4XWq2W+fl5AgICkMlkzMzM4PF4kMlkwjpSKpUYjUb8/PxYWFhAJpMBYDKZmJmZITg4\nGKfTiVqtRiqVYjKZkMvliEQivF4vXq8Xh8OBVCrFYrEQEBAgfDdyuZzJyUm0Wi1erxexWIxKpWJw\ncJCQkBDsdjtKpVJ4Drlcjt1uR6FQYLVahbVqs9lwu90oFArhcxOJRNjtdiQSCSKRaGmNTXu93uD/\nbvN+WUXhv4VIJPoW8C2A4OBgfv7znxMWFsbt27epqqpi7dq1BAYGEhUVRV1dHYWFhXR1dbF+/Xo6\nOjrIysriyJEjDAwM8M///M9cv36djo4O1Go1RqORd955h3feeYfDhw9z//33c+XKFdLS0khNTaWq\nqooVK1Zw584dnE4nmZmZNDQ0EBkZKWx4hUJBV1cXYrGY0dFRcnNzaWtr4/XXX+fo0aNcvXoVh8NB\nRkYGbW1tWCwWCgsLuXr1KjMzM4yMjPCTn/yE8vJyDAYDCQkJpKWlcfPmTdavX09jYyNut5tHH32U\nrq4uduzYwTPPPCMUwOzsbGQyGb29vaSnp9PY2EhISAjT09OsXLmS+vp6ABYXF5HL5cK/GxwcJCYm\nBoC0tDR6enoYHx/H7XazefNmampqiI6OZnBwkImJCUpKSmhqaiI1NZWenh7hz8HBQQICAsjIyBA+\np9DQUORyOX19fYSGhnLu3Dn27NnD1NQUer2ejz/+mKysLADS09NpampCKpWiUCgICAjAYDDw5JNP\n8tprrxEZGYnRaGRycpLAwEASEhL40Y9+xF//+ldOnTpFWFgYmZmZLC4uMj09jclk4u233+bBBx9k\nz549nD9/nqSkJPr7+8nLy2N2dpbly5fz29/+lvz8fKampnC73Rw5coSxsTEee+wxSktLCQ4OZnZ2\nlmPHjqHRaFCr1bS2thIVFcWqVatwOBxUVlby1FNP0dzczMzMDDU1NeTm5jIyMkJSUhIZGRl4PB7W\nrVvHo48+ytzcHImJiXz++eekp6fT2trK5s2bCQ8PZ3x8nNDQUFpaWtiyZQsTExMMDQ3hcDgwGo0s\nLCxQWFiIXC7n+vXrREdH88Ybb7Bnzx7kcjnJycnCYTQ9PY3H4yE4OJiuri50Oh12u53Gxkb2798v\nFIKlYh0QEIDFYqGxsZGAgAD++Mc/Dn6RvflljQ8jQNR/+jny//6dAK/X+xev15vn9Xrz/P39GRwc\nRKFQkJaWxhNPPCGc6gMDA/j4+HD79m1efPFFxsfHMZvNvPfee2i1Wh544AF+8IMfEBQUhMfjwev1\nEh8fD8CFCxfo6urC4XDw1FNPkZCQQFJSEv/0T/+EXC4nMDCQH/7wh0RFRREREYHL5SIhIYGEhASq\nq6vp7OxkYmICpVLJ+Pg4iYmJANy4cYPs7GyCg4OZmZlhdnaW0NBQTCYToaGhuN1uHn/8cdra2vDx\n8UEsFlNaWoqvry8PPfQQ09PTFBQUMDMzQ3l5ORKJBIA1a9YwNzcnLM6FhQVu3rzJ+Pg4TqeTvr4+\nCgoKGBgYwG63U11dTXt7O1evXsXr9TIyMoJSqUStVqNWq2lvb8dgMNDd3c3i4iJVVVW0t7cDYLVa\n+e53v0tvby9FRUVcunQJj8dDRUUFVVVVXLx4kbGxMaKjo8nMzCQ2NhYfHx8GBgYQiUT4+PhQVFTE\nwMAATqeT/v5+li9fjkqlws/PDx8fH4KCgvDx8SE+Ph6VSkVISAi9vb1kZGQwOTlJVFQUgYGBKBQK\npNK755PT6cRqtXL27Fl8fX0JCQlhZmaG/v5+Vq1aRVBQEIODg8THxzMwMEBSUhJGo5Ho6GiGh4fJ\nysrizp07jI6OEhISwh//+EcAYmJiKC8vp62tDbvdzve+9z0yMzN56qmnmJiYYH5+nmvXrvHJJ5/w\n5JNPEh0dzczMDDabjY0bNxIdHY3T6WTdunV0dnZy9uzZuws7MpJvfvObzM3NsXPnTuLj4zl06BCx\nsbG43W42bdpESUkJ3/72t7Hb7ej1eoqLi5FKpTz00ENs3boVs9lMcHAwBQUFjIzc3Sazs7MYDAY6\nOzs5f/48nZ2dKBQK9Ho9XV1dNDc3c+3aNdRqNVu2bGF6epqGhgZqa2t54YUXmJqaore3l9bWVsLC\nwtBoNF94835ZReEmkCQSieJEIpEMeBgo+68ebDKZADCbzXi9Xn7/+98jkUgIDQ0lOzubnTt3Ulxc\nzBNPPMHMzAw9PT14PB60Wi1+fn6sX7+enp4edu3aBYC/vz8AGo2GF198EY1Gw/T0NElJSXR2dhIa\nGorT6WTz5s14PB4efvhhQkJCiIqKQqFQkJiYyM9+9jNiYmIoKSnBx8eHuLg4PvzwQwCWL1+O2Wym\nqakJX19fSktLyc7OZn5+HqvVSnBwMOfPn+fo0aOUlZWh0WhobGwkJiaGxsZGNmzYQFhYGPHx8axc\nuZIlsrerq4uEhASGh4exWq1ER0eTn59PY2Mj4eHh6HQ6vF4vISEhdHV1cebMGZqbm1EoFHi9XpKS\nklizZg12u53Z2VkyMjIYHBxk27ZtREREoNVqWbNmjdB9dHd34/V6qa6u5tKlSxw/fpzKykqqqqqY\nnp7mypUrjI2NoVAoCA0NZfny5WzevBmXyyWMXwaDgbCwMCwWCwcPHmR8fByFQsH4+DgSiYTJyUmq\nq6uJjY1leHgYl8vF73//e+RyOenp6Rw4cIDVq1ezZs0aAOGUzc3N5fTp05SVlSESifDz82Pnzp0E\nBgZy69YtzGYzy5YtQy6Xo9VqOXr0KH/6058YHh5m3759yGQy2tra6OrqAuDDDz8kMjKS8fFxpqam\nMBqNhIWFcevWLfLy8hCJRGg0GrZt20ZXVxdVVVWIxWLKy8u5efMm/f39ZGVlcezYMUJDQ0lNTQXA\n5XLxxhtv0NzcTGBgIFVVVXz66aecPHmS0dFR5HI5Fy9e5PPPPxfGsV/+8pfEx8dz584dAgICiIqK\nYmpqColEwre+9S3g7pgll8uRSqVER0ej0WgwmUxMTEyQnJzMN7/5TR5++GHcbjdqtZqqqirOnz/P\nrl272Lp1K2lpaSQmJuLr68uqVas4efLkF968X0pR8Hq9LuC7QDnQDpz0er2t/9Xj5XI5brcbk8mE\n2+0mNTWV4OBgNBoNb775Jp2dnYhEImHRr127lsrKSsLDw5HL5cTGxtLe3k5vby9qtVqY+9xuN0ND\nQyQmJiKVSrHb7cKsZrFYuHnzJkeOHOGll15i3bp1pKWlMTExQXl5OWVlZSQnJ3Pt2jXm5uaYmJjg\nz3/+MwBGo5HAwEAOHDiAwWBALpfT3d1NWloaycnJxMTEsH79erKysjCZTCQlJbFt2zZu375NWFgY\nfn5+2Gw20tLSuHDhAsPDwwDMzMzgdDoZGxtDq9UyNHSXlgkKCmL16tUUFxfT3t7OmTNnyMzM5PTp\n0xQXF+Pv78+uXbuEllOlUhEeHs7FixfJy8ujra2N5cuXExISwq1bt4QCVl5ejsViYXFxkVWrVvHN\nb36T7373uzz99NM89NBDvP3222g0GoKCgpBKpXR1dXH48GHCwsKw2+2UlJSwbds2LBYLPj4+nDx5\nEl9fX2QyGbt27aKzs5PY2FhmZ2f505/+hNVq5fe//z1xcXG43W7Gx8cZGhpCq9Vy5coVAJRKJVVV\nVcTGxvLAAw+wbt06AgICKC0t5erVq9jtdh588EH6+/sFLiElJYXFxUVmZmZQKpV0dnbicrnYvHkz\nWq0WgEcffRSDwYCvry9TU1P09/djMBgICgoiOzubzs5OCgoKMBqNlJeX09/fj1ar5ec//znLli0j\nNDSUnp4eQkNDqaur4/z58wDcunWL2NhY1q5dS01NDREREVRUVGC32/F6vVy7dk1Yexs3bkQikRAY\nGMipU6cQiUTU1tZiMBgYHh5mYGAAg8EAQEBAAFlZWSiVSrq6ukhKSsLPz4/Z2Vl8fX1pb29HpVKh\n1+u5ceMGANu3b2dycpJly5bR3d2NRCIhJSWFzs5OnnrqqS+8f780TsHr9Z4BznyRxzocDjIzM6mq\nqiI1NRVfX18qKirYtWsXVquVsbEx1Go1oaGhFBQUUFxczJ49ewQSrK2tjeeee46EhAS6urqEE33F\nihW0tbUhkUhobGxEr9cLc+eyZcvQ6XTCLF5fX090dDSjo6NMTk7idruJi4vD19eXtrY21qxZw507\nd4C7ReHKlSscOHAAq9XKW2+9hclkorm5maCgILZs2YLNZkOhUBAVFYWfnx+vvPKKQGZ1dHRw/fp1\nxsbGBB4DQKVSMTAwwNq1a4U50t/fn+vXr6PValm5ciWrV6/G19cXpVJJW1sbpaWlTE1N8fHHH7Ni\nxQphBOnv7yc0NJSYmBgsFgtvvPEGa9euRSqVUldXh0wmo7W1la1bt6LVapmcnEQsFmM2m7FarYjF\nYi5dukR4eDhJSUnY7Xbm5+cpLS1FLBZz69YtkpOTiYiIoL6+nqSkJADu3LlDfX09N27coKCggJiY\nGAYHB9FqtQQFBZGTk8PQ0BBJSUkMDQ2RmZnJpUuXWLZsGQA9PT1MTEywefNmUlJSmJ2dRaVSMTEx\nwbZt27hy5QqHDh3CYrHwzDPPUFBQwGeffUZzczMLCwt88sknpKSkoFarUSgUVFVVAdDe3o5Wq2V2\ndpYDBw4wODjIyZMnEYvFfPbZZ6xfv56RkRFCQkJ47bXXmJiY4OjRoxiNRnJycvjRj37E9773PVau\nXCmMs0eOHEEqlbJmzRoMBgNKpZLIyEgcDge3b9/GbrcTFxfH4uIiarWaO3fuUFdXR3Z2No8//jgt\nLS3U1NSQlJTE+Pg4NpuNc+fOATA4OEhYWBgmkwlfX18uXryIn58fcXFxzM/PU1BQwPz8PBaLBT8/\nP8LDw1m9ejXh4eE0NjbS0NCA0+kkPj6eoKAgJicnv/De/Uo4GsViMZs3byYnJ4f09HR27txJQEAA\n165dEzZNZGQkMzMzyOVyMjIySEhI4MaNG7S0tBAaGkpoaCgXLlzgk08+YefOnQC8/fbbzM7OcuXK\nFeE0s1gsjI+Pk5CQQHJyMnFxccKse+fOHXbu3ElQUBArV65EpVIxNTXFqlWrmJ+fZ3DwLk/jdDrZ\nt28f165dIygoCIvFgkKh4Ny5c8hkMqqrq6mvr0cqlTI9PY1KpWJ+fp5ly5YRFBSEwWAgODiYoqIi\nHA4HSqUSAIVCQX5+PrOzs/T09Aitb2pqKnq9HqfTSUhICBKJhMuXL5OYmEh8fLww72dmZtLT00NE\nRAQ5OTloNBrCwsLYuXMna9euxWKxkJeXh7+/P/Pz8/j7++NwOJiZmcHPz4+pqSmmpqYICAhg//79\nGI1GhoeHBeIzKSmJqKgoWltbBeKtra2NgoICcnNzGR0dZXFxEYlEQlRUFLm5uVitVnQ6HTabjfb2\ndsxmM6tWrSIlJQWlUsn8/Dyjo6NCp5Cfn8/TTz9NW1sbp0+fprGxkfLycsxmM0NDQzz66KN84xvf\n4Gtf+xp6vZ6AgADsdjv9/f0UFRWxY8cOtm7dyuOPP05OTg5yuRy4S2aXlpaSlJTED37wA86fP096\nejq9vb2kpqayfPly9uzZg1Kp5I033uA3v/kNUqmUl19+mT/+8Y8kJyfjcDhYXFzk+eefx8fHB4DE\nxESqqqooKytj69atOJ1OOjo6WLZsGd/4xjd48MEHWbduHbGxsTQ1NWE2m4mKiqK9vZ25uTnm5+e5\nceOGoIpt27YNgNLSUtxuN+Hh4UilUpYtW4ZYLKalpQWtVsvAwABGoxGxWMzg4CAWiwWdTseVK1fo\n7+8XCPe+vj6mp6f/v1cURCIRx44dY9u2bej1enJzc1EoFCwuLjI8PMzOnTtpaWnBbDbzyiuvEBwc\nTGBgIKGhoSQmJrJmzRq6u7vJzc0lNzeXq1evArBlyxaio6PZtGkTVqsVh8NBQ0MDhYWF3L59m4aG\nBmFz2O12enp6mJycpKCgAKfTSX19PeHh4XR1deF2u7l9+zYAer2ejo4OtmzZwurVq4UvcNOmTUxN\nTQktsdVqJSwsjMLCQvR6PZGRkWzevJkNGzag0WhwuVz85Cc/EeS6np4ejh8/TnZ2NuPj40RGRgoy\n6czMDHa7nd/85jfI5XKKi4vp6enh5s2bDA8PExISwtTUFCqVira2Njo7O5HL5Rw/flw4fVNSUkhJ\nSWHNmjVER0fz9NNPs379emJiYpiZmRE29Jo1a5idnSUmJoaFhQUqKyuZm5tDp9NhMBjYu3cvISEh\nqNVqUlNTkclk9PT0EBcXx4oVK0hJScHHx4eenh5UKhVqtZqEhARkMhl9fX1ERkZiMBiIj48nJSWF\nHTt2kJ+fD8D09DTnzp2jvb2dCxcuUF9fT2BgIPX19TQ3Nwvjz2OPPYZcLsdisbCwsMB9992HzWYj\nPT2d2NhYzGYzp0+fJiQkBIDr168TGBhIWVkZmzZtYnJykvT0dFavXo1CoWDNmjWCRCsWi2loaGBm\nZoYHHniAjIwM/Pz8aGxspLa2lvz8fIEH02q1LCwssHPnTqanp9m6davw/S+RpzMzMxgMBmZnZ1mz\nZg1arZaenh6ysrIQi8VIpVLa2to4e/as8LwSiYQHHniAEydOcPv2bcRiMTKZjPz8fHQ6HcPDw/j7\n+2M2m6mpqeHcuXP09/cL8mdWVhZpaWmkpKRgMpmEA+2L4H9NkvzPsFqtzM3N0dHRQXt7O11dXWRl\nZbF9+3YkEglPP/00qampArFYX1+PTCajqKiImpoarFYrBw8eZGhoiLKyMuF0GB0dRSQS0dPTw4YN\nGwgJCcHr9XL58mWB9c/KykIikVBUVERKSgrXrl3D5XLR19fHzp07qaurY/fu3VitVuLi4gAYGRlh\nZmaG+vp6urq6sFqtNDc3U1BQwN69e/nDH/7Ad77zHd555x0+/fRT3njjDZ544gkyMjL4t3/7NzZu\n3EhSUhJms5mysjKWL18OQG5uLr6+vrS2tgpafFhYGENDQ2zcuJG2tjYSExOx2+20tbXhdruZmJhg\n3bp1ZGRk0NnZSV9fH3K5HLFYzPz8PEqlkr6+PmQyGTdu3ODxxx/H5XKxY8cOKisr0el0+Pv78+yz\nz9Le3s78/Dzt7e3MzMxQW1vL6tWrSU9Px9fXl/HxcRYXF6mrq2N0dFToNNxutzAKLak/1dXVNDQ0\n4PF4iIyMFGTXyclJzGYzUqkUj8dDZ2en4EkBmJiYICYmhpUrVzIwMIDH4yEiIoKmpiaUSiW9vb3E\nxsbS1dWF0+nk9ddfB0AqlbJr1y78/f25ePEiAwMDFBYWkpubC9ydt3t6enjuuefo6+tjcXFRkHXj\n4uKwWCxcu3aNlpYWzp49y549e1i9ejXf/va3KSoqYvPmzbjdbgYGBggKCiI8PBy4W8gVCgXd3d3k\n5OQwNTXF97//ffz9/bFYLPzud7/Dz88Ph8NBUVERnZ2dTE1NER4ezokTJ0hOTha4o2XLljE+Pg7c\nHVH/z//5P9jtdqKiohgaGhI6kaX3PDk5KRDmKSkpPPfccwQFBZGSkoJWq+Wzzz7DZDKxdu1asrKy\nKC8v/0L78SvRKeh0OoEBnpycJCIiApVKRUBAAHl5eQDk5eWxZs0aBgcHSUpK4vjx43i9XoqKijCb\nzfzyl7+kqalJ2BCAQESOj49TVVWFv78/lZWVHDlyhJGREaRSKenp6QQEBNDQ0IDD4WBkZITGxkYk\nEgkGg4G0tDRcLhfT09M8+OCDACwsLCCXy5mdnaWkpIQTJ05QUlIizPMbN27k6tWr9Pf3ExsbS1pa\nGjt27EClUuFyuVhYWGD58uXIZDJiYmIEorG/v5+MjAyBRLLb7QQEBKDX66muriYwMJCOjg5SU1OF\n15PJZPj4+BAeHk54eDhisRgfHx8iIiK47777UKvVJCYmEhgYiNfrxWazUV1dzdtvv01bWxvl5eX8\n7Gc/Y3JyUlBPjEYj2dnZ3HfffaSkpAAIcuzCwoLwPp1OJ8PDwwQFBbFp0yYCAwMZHx9nZGSEvLw8\ngoODhdMwNTUVtVrNyMgI4eHhJCQkcO7cOcH0tLRBc3JyuHjxIna7ncTERDo7OykqKkKn07Fq1SrE\nYjEmk4n09HTi4+PZt28fIpEIo9FIRUUF3d3dTExMsH37dgIDAwV+6fz583R3d3PixAmOHDlCVFQU\nPj4+LCwsEBwcLJCPS8ansLAwxsbGWLFiBQ8//DDFxcXcuXNHOID+8Ic/AHeJUZFIxN69e0lKSmJ2\ndhaTycTw8DAfffQRc3NzWCwWbt26hUQiwel04vF4MBgM2O12jEYjo6OjKJVKQkNDBTn92rVrPPHE\nEzz22GPk5uaSmJjI1atXUSqVOBwObDYbJ0+epK6ujrm5OWFMXVLZRCIRRUVFFBcXY7VaycnJ+cL7\nUXLo0KH/N/v5fwSHDx8+FBAQQE9PD/Hx8UilUtavX4/BYGB8fByxWMzMzIzgQOzt7WX9+vX09/fz\n0UcfCa2aRqMhOjqauLg4ysrKeOSRRwgPDyciIoLIyEiSkpLQarWcOXOGzZs3s2/fPnbv3s2//Mu/\nkJaWRkxMDAkJCeTl5eFwOOjq6qKzs5OxsTEOHDjA2bNnuXz5MitWrCAnJ4fLly/T0NBAQUEBarWa\nqKgoVCoVCwsLrFq1Co1Gw+7du0lMTBScateuXcNms7Fy5UpmZmYwmUyUlpby5ptvUlJSwvT0NCKR\niICAAMFgNTs7y9q1awWH21J72tTURHx8PHK5HLPZjNPppKGhgaCgIMGh2dvbi81mE2Z4f39/Wlpa\niIuLY3Z2lj/84Q8kJiayf/9+lEolfn5+yOVyNBoNzc3NeDweMjMzmZ6epr29ndu3bwuafWJiIj09\nPQQFBTE7O8vExIQgHy65/DZu3MjQ0JDgygsKCmJ8fJzBwUEmJycJCQkRTDuVlZXExMSwYsUKYmNj\nOXHiBNu2bcNqtXL9+nUkEgkbNmzg888/x+VyMTs7K3wf0dHR1NTUYDKZaGhoYPXq1QLZeOrUKT74\n4AOOHj1KYWEh9fX17N+/n9jYWKKiopiYmOD27dukp6fT2dnJQw89hNFoFDrDPXv2EBAQwPz8PCMj\nI4IS9vHHHxMbG0tGRoaw+ZdcqsHBwSwuLgoKi0ajITs7m7Vr12Kz2QAEV+n27dux2WwsW7aM+vp6\nGhoaeOKJJ3C5XFRXV6NUKomOjkYsFuN2u7l58ya+vr5s2bIFt9uNXq8nOTmZxcVFgVsYGxtjfHyc\n4OBgVCoVt2/fprW1dezQoUN/+e/241difJDJZKhUKmZmZhgeHiY3N5e//e1vGAwGPB4PCoWC1tZW\nYmJi0Gg0REVFCfNsRESEwPZOTEyQl5dHY2MjcFfV6OjoICEhgYCAAIaGhujp6eHHP/4xa9euZWRk\nhOrqal5//XUsFgunTp3C5XKxYsUKRCIRJpNJkA/b2towm83AXbdeR0cHoaGhOBwOsrOz0Wq1qFQq\nfve737F69Wpqa2tRKBQ0NjayZOF2uVxERkYSEBDAp59+il6vZ2ZmhhMnTgB3Cdfp6WnEYrGw4HNz\nczGZTFRXVwsMe25uruC16OvrA2B+fh69Xo+vry/T09OCqUiv15OUlCQQk5cvX2Z+fp7PPvuMY8eO\nsX37dvr7+zlx4gQDAwOkpaWh1+s5f/48Z8+eZffu3Zw6dYro6Ghu3LhBcXEx4+PjwskkkUjQarVU\nVFQIJGJJSQltbW18/PHHmEwm1qxZIygW8fHxdHZ2cvr0aQwGA4uLi2i1WiwWCwBhYWF88sknuN1u\nsrOzMRqNZGZmClZ2sViMXq/HZrMxPT1NdnY2YWFhiEQi5ubmBEt1e3s7GRkZ1NXVAXdJ3D179hAS\nEsLOnTsxm828+eabZGVl0dPTQ2RkJAqFgieffJLp6WkSExNZvny50DmmpKQgFovZsWMHBQUFfPDB\nB8JaqKqqIjQ0lAceeICJiQlBedJoNDQ1NZGZmSnYk2tra1m5ciUOhwOLxcLw8DAej4eZmRni4+MF\nDqS3t5fHzVLNAAAgAElEQVS4uDji4+NRq9UAJCcnMzIygtPpJDg4GLlcTmZmJh999BEzMzNkZGRw\n8OBB/P39OXnyJCqVioiICKEIfVF8JcaHJcNPRkYGERERLFu2jPXr1wte8E8++YSmpiYiIyMFyTI6\nOhqbzcbi4iLvv/8+CwsL7N27F5lMxvz8PHD3g92xYwc5OTn/UG2X5sD6+nq8Xi+dnZ28++67wt/d\nbjcRERHodDokEgmrV69mcHBQMBm1t7czMjKC2Wxmy5YtxMTECKzyki4eGRlJeXk5v/jFL+jp6WFx\ncZGQkBDy8/PJz88nJiaGubk53G63QDSaTCYSExMFxl+hUDAyMkJtbS3+/v6kpqaSnJzM3r17MZlM\nTE5OotPphDzIpUuXGBsbw2g08rWvfY3o6GgsFgvt7e3ExMRgMpkwGAw0NzezuLhIY2MjN2/e5J13\n3sFisZCamorD4SAmJoawsDBBtzcajUxMTKDT6YTMwlI+YkkVCQ0Nxc/PD7vdTm1tLWazGZlMxsDA\nACdPnmRwcJC0tDRycnK4cuWKYB13OBwEBgZSVFQEQFJSEnFxcXznO98hJiaG+vp6PvnkEyIjI2ls\nbKS+vp65uTn0ej1er5cNGzbQ2trKxYsX2bVrFyEhIXzrW99CIpFw/fp1wdBWW1vLjRs3UCqVwui2\natUqQcrbu3cvOp0OjUbDRx99xIcffkhHRwfl5eVUVlZSV1cnFIa//vWvgtQ5OTlJTEwMycnJVFdX\nIxaLMRgM+Pn5CQRrR0cHFy5coKKigrm5OSoqKpienhYk6+DgYLxeLzU1NYJPoaOjQ5Dct23bhkaj\nITw8HL1ez+3btzl//jxGo5GbN2+SkpKCXC6nra0NsVjMiRMnyMnJoba2lomJCSwWCxMTE194P34l\nioJWq6W2tpbGxkYqKytpaWmhuLiY0tJS9u/fj8ViIS4ujqqqKrq6uli2bBl9fX1MTk7y6aefUlBQ\nQEZGBj4+PhiNRpqbm4G75qWlbiE2NhapVEpSUhKrVq3Cbrej0+nweDxUVlYKDL5EIuH48eMMDw8z\nNDSEwWBgaGiI+Ph4HA4HABEREWRmZrJjxw56e3vp6+vjxo0bLC4usnv3btLT0zly5AhKpZL8/HzB\n/6/RaBgbG+PKlSt0dHQgk8kEBQPAbrezZs0aQd2Ijo4WnIg6nQ6tVktAQIBgaOnv7ycyMlJYYDk5\nOXg8HtxuN+Xl5axduxaHw8HAwAAKhULgDJa6sVdffZXPPvuMsLAwnn/+edLS0rBarSgUCjIyMigo\nKECj0eDxeJiamkIqlf6D3XZ0dBSr1crMzAwTExNoNBpu3LhBZWUlfX19bNu2jejoaKxWK/X19aSl\npSGTyRgZGWHTpk309vYSGhqK3W5nbm4OgA8++IDMzEwcDgfNzc2kpaXR3NzMqVOnSE9PJzg4mMLC\nQsxmM263mzfffBOdTodcLqexsVGwWxcWFgp8BkBJSQlFRUXcuHGDK1euCFyJTqcjPT1dCBsNDw8L\nwa7z588L3dd/LnCdnZ1s2bJFWL/BwcF4PB5UKhVSqZSgoCAGBgYIDg4WwlxFRUXk5+ej0WhQKpVc\nuHCBhYUFHA4H169fR6VSodPp2L9/PwAHDx5ErVYTHR3N66+/zvz8PG1tbej1ep555hksFgsdHR1E\nREQQHByMr68vubm5gtNzaGiI73//+6SmpiKVSgUH5hfBV2J88Hg8rFy5ksnJSSHVd/PmTfz8/Pj7\n3//OwYMHuXXrFlu2bOHZZ59lcnKSo0ePMjo6ilarJTw8HIlEglKpZGpqioyMDKqqqti9ezcVFRVs\n27aNxsZGOjo68Hq9LC4uUlZWhsViYffu3eTl5fHggw/i7++Pn58fFouF6OhoXnrpJWZnZwkKCuLY\nsWMC2RYVFcXk5KRgI21qamLbtm1IpVJu377N6dOn6ejo4NChQxQXFzM5OcmZM2eETsdut1NTU4NI\nJOLSpUtERd2NiczNzWE0GlGr1UJ73NTUhNFoZGhoCIVCQXh4OGfOnOHNN9/kL3/5CyqVitjYWH71\nq1+h0+mETiovLw+j0cjKlSvp6emhubmZBx54gLS0NNrb27FYLIyMjLB9+3aSkpI4ffo0mZmZQnYg\nPT2d/Px8AgMDUalU9PT0CKrB7Oys4JSLi4sjKCiI999/n8bGRn76058iEol47733+Nvf/kZYWBgt\nLS1kZ2czOjoqOE7tdjt+fn7CSLM0Bj355JPIZDJhDr948SLJyckUFBTQ1dVFV1cXv/71r/n000+J\njY1lcHBQGD8dDgfr1q3jzJkzGAwGioqK6O3tBaCmpga9Xk9jYyNhYWG88MILHDhwQCAZt23bxuHD\nh2ltbeW5557j+PHjuFwu7r//fsGHEhMTIyhZr7zyCgDd3d1MT08THR2NXq9Ho9FQX1/PD3/4Q2Ji\nYmhubsbpdBIVFcX777/Pvn37kEqltLa28vnnn6NWqxkeHsZms1FaWsqpU6cAePnllykpKRG4qrff\nfpuhoSFycnIoKSnhoYceQi6Xc+fOHex2u1CI5ufnWVhYIDU1lZycHN59911CQ0MZHR39wvvxK0E0\nvvLKK4dKS0sFPz4gtFZLluKMjAwWFxfxeDz09fURHR1NW1sbMpmMyMhIKisryczMFFr8iooKFhYW\nWL9+Pf/xH/8huAXn5ubw9/dnamoKtVqNSqViZGSEoaEhxsfHmZiYQCKRMDw8LEh5Op2O/v5+LBYL\nly9fpqioCL1ez6effip0AEtfzK1bt5icnCQ3N5eYmBjm5+ex2WxkZmaiUqmIjIwUFmJUVBQmk4nO\nzk4aGhr40Y9+JMRgrVarQB4uJQU9Hg9BQUH09/djNpuJj49Hq9XS3d1NS0sLEokEvV6PQqGgoaGB\nLVu2cOnSJWJiYrhw4YLAf0RERHDr1i0MBgMajYaZmRk0Gg0Gg4Hs7GwqKysJDg7GZrPR0NCA0WhE\nIpHQ3t4u+EeWlI6WlhYiIyMpKipCJpOxYcMGpFKpEMLx9/dn48aNWK1WtFotbW1tXLp0iY6ODgoL\nC2lvb8dqtbJmzRouXrxIfn4+r776KllZWYyOjrJ+/XpeffVV8vPzSUhIEFKV8fHxSCQSUlNT2b17\nN7GxseTl5QmeEq/XK3BSFy5c4MCBA0JkenR0lAsXLuDj48OxY8eEEJparaaoqIjz588jk8nQaDQs\nLCyQlJTEe++9R2NjIw6Hg8HBQYxGI01NTaSlpbFz507Gx8cpKipCKpVy4cIFzGYzGRkZAjkeFhbG\n8ePHSU1NFWLfGzZsIDw8HK1WS2trK3K5nBUrVlBWVsb9998v+DDMZjPJyclERUXhcrnIzc0lISGB\nubk5qqqqUCgUzM3NER4ejkgk4urVq0IhCAsLY3Jykri4OCoqKr4Q0fiVGB/8/f2JjIykubmZXbt2\nkZeXh9frxdfXl2XLlhEfH09wcDARERHIZDIkEgnV1dVMT0+zfft29u/fT1JSEvX19dy8eZOpqSkA\nITyTnZ0tePxDQkKorq7GbDYTFhbGhg0bsNls9PT0MDw8LGTpc3NzKS4uBu5ad+/cucP27duBu2OJ\nXC5n5cqVaDQaMjMzhbEgPj6edevWUVdXx+LiIvHx8eh0OlauXMmKFSv48MMPuXz5MrOzs5SXlwvu\nSLhr4loKGFmtVtxuNxqNBj8/P4G3WMp6hIaG0tfXh9vtxmazsWfPHrKzs/H390en07F69Wra2tpI\nSEhgfn5e4FWcTidHjhwRYrtqtZrZ2Vm0Wi2JiYmCv6GzsxODwUBLSwuXL19GpVIJhqvQ0FC6urpY\nWFigpKQEiURCREQEy5cvZ3JyksnJSUwmk5BcXHrskmyanJxMamoqV69eJS0tTXCrwl3Seffu3QwN\nDREUFERISAg5OTnExsYyMDCAw+HgxRdf5MyZMxiNRoqLi/F4PJw/f57Dhw8jl8sZGBhArVZTWlpK\nbGwsgFDkg4OD2bRpE88//7wQNHM6nezevZuMjAyioqKora0V5MNbt25x+PBhNm3aRF5eHrt27UIm\nk/HSSy8Bd/0C3d3d+Pn5YTAYcLlc7Nu3j/j4eCYnJwkNDcXj8aDX69m6dSttbW3U1NTg9XoFZ+nY\n2Bh79+7FbDYLJqPm5mZsNhtisVjofpOTk8nIyKCpqYnGxkaam5tRqVSCmjQwMMDU1BTbt29HJBIx\nNTXF7Owso6Ojginqi+ArMT5MTEwIjjqLxSK48ZRKJVqtlpycHNrb22ltbUUsFgtM9apVq2hoaODk\nyZPCqZSZmSl0G5s3b6azsxONRoPVahVO6VdffZWrV6/i7+9PSkoKLpeLH/7whwQEBFBeXk53dzet\nra1s2bKFyclJBgYG2L9/P8eOHQPuBpRu3rxJYWEhFy5cICsri40bN1JWVsbPfvYzWlpaOHDgADU1\nNXg8Hh577DHh7of6+npWrFghkEZ9fX2CU3Jp3o+PjyctLY2EhASuX7/OM888I8SyFQoFLS0tlJaW\nsri4yNWrVwVOIT09nba2Nubn50lISODWrVvs37+f9957D7Vajb+/P7/4xS+YmJjg4MGD6PV66urq\nKC4uxul0cuXKFWQyGY888gghISF8/PHHhIWF0dfXh5+fHzqdDpfLRUdHB4GBgVy7do3o6GgkEgmF\nhYXodDpGR0cZHR1ly5YteDwebt26xcTEBPv37+fGjRv09/cLWQ+dTofb7SYtLQ2FQgHcJYeDgoKw\nWq0kJycLuYSluf+TTz7hxz/+MQEBAVy4cAGTyYTZbCYgIEC4f6CtrQ2n08nWrVv5+te/Dtx1CAYH\nB9PU1ERZWZkQrvPz80MikXDo0CF0Oh2PPvoo3/72t5FKpTQ3N7N9+3bBUtzS0kJISAi+vr4Cobyw\nsIDJZCIuLo6LFy8KASW1Wk1NTQ1RUVH4+/tjMBhYsWIFO3fu5OrVq3R3d3Px4kXCwsKEC2QyMzPp\n7OwEYNeuXYyPjxMdHS1wJlu3bmVsbIyuri4GBgaQyWQUFBSwb98+PB4PQ0NDDA4OCh3F9PQ0breb\n5ORkwdD3RfCV6BRCQkI4deoUCoWCzs5O7rvvPrZs2SJwBB9++CEjIyMYjUZh1i4qKsJutyOVSsnO\nzkYsFhMUFIRcLqenpweAzz77jIGBAWw2m+DUW5LfhoeHMRgMnD17loqKCv7yl79w8eJFJiYmGB4e\npqSkRCC/bDYbJpOJiIgI4O6p8+ijj1JfX09sbCwVFRUcPXoUj8dDWVkZiYmJzM/PC5vo448/JjMz\nk/n5ebZu3YpSqeTf//3fmZ2dxWKxCI7GpRSnTCYjODgYPz8/Dh48KIwUbrcblUpFeno6KpVKyFIs\nSV7T09OMjo4SHByMUqkkNzeX3/72t5jNZsxms3CD1Pbt2zl79iwvvPACX//614XAU25urhDeaWtr\no7GxEYvFQnZ2Nrdu3aK7uxu73U50dDTNzc2sWLGCsLAwFAqFQGh2dnYKdx6YTCaysrIIDAxErVYz\nPT1NYGAgIyMj2Gw2QkJCKCwsRKVS0d/fD9wlcaOiogST0u7du/Hx8WF4eJjR0VG2bt2Kr68vLS0t\nGAwGbDYbt2/fJiEhAb1eT0VFhbBW8vLyCAwMBCA0NJRLly6h0+kEV+jSuLawsMCePXtoampicXGR\n9PR00tLSiIqK4i9/+QtSqZRz586xZcsW6urqaGhoEAxnSUlJGAwG/P39CQsLY25ujry8PFauXMnX\nvvY1pFIpOp2OyMhIuru7+dWvfkV1dTUdHR1MTU0JF7dYrVbBQg13yXelUklMTAyxsbH4+voSHBxM\nY2OjkHmQSqWMjY0JoanAwEAhAavVatHr9TgcDmZnZwVp9ovgK1EUzGYzMzMzTE1N4XQ6ee211xga\nGsLtdpOVlYXFYqGiokK4OiswMJDFxUVu3bqFQqHA5XJRWFgoqAy1tbUAbNq0ie985zvMzc0J6TSv\n10tcXBwxMTGkpqYyNzdHRkYGJSUl+Pr6YjabhUx9c3OzcJuNv7+/IOu43W7+9V//lYWFBSQSCXFx\nccINTR988AFXr15FJBJhtVqx2+0sLCxgs9mQSCQ0NTUJrfV7772H2WwWitiSZXjpuriPP/5YCLiI\nRCLKysqoqKhgamoKpVJJRUUFAwMDwmUgS0ajpffidDpJT0/n4MGD/3CyL/k5bty4wenTp2lububm\nzZsCCbckxW7cuJG4uDiamppQq9WsWrUKvV6PVqtl48aNjI2NIZfLiYiIoL+/n6ioKB5++GEUCgVb\nt25lcXFRsDUfO3YMo9GI1+tFrVZjt9ux2+2Ul5djtVoFdnzr1q0Ch6FUKjl27BhpaWl0dXUxNjZG\ndXU1i4uLzM7O4vV6kcvlxMfHMzExgVarxel0smLFCrKzs3nxxReFdjw2NpYnn3yS++67j7Vr15KR\nkcH58+c5cuQIlZWVBAYGsnz5csFoNj4+zsqVK3nsscfQ6XRERUXR0NCAn58fzz77rEDcqVQq1q9f\nT1NTE0FBQcL1bUtu2qW1FBwcLFx4sri4KFz4Ex4ezvXr14Xo/pIkCXevFHjrrbd455136OnpEdSG\nJXn99ddfRyKRMD4+zrlz5zAajQQEBCCRSLh48SIWi4Xi4mIaGhqEzuaL4CtRFGQyGbGxsXg8HiEI\n1NjYiEql4siRI8TExJCRkSG4GDMzMxGJRBQWFjI6Okp+fj6hoaG4XC5cLhfvv/8+AJWVlXR3d1Nf\nX8+6deuor68nMjISu93OqlWrGBwcxOVysWnTJlpaWqirq6OoqAiTyUR/f7+QC8jJyeHSpUsEBAQA\nd33pBw4coLu7m8HBQeRyOZs3byYkJITk5GTOnTsn8CHp6ek88sgjTExMsHr1avbt24fRaCQ/P5/w\n8HD6+voICgoC7mbol8jSpfbZaDQSHx/P8PAwYrGYpqYmzp8/T2trKwqFgujoaDweD5s2baKwsJDG\nxkZ6e3vp7+9nYWGBmZkZ3nrrLcxmMzExMaSlpTE6OorD4RCMNBqNhtTUVAIDA8nNzaWlpUUwUEml\nUlatWsXs7KyQeFySOJekwaXbrZak4CX1oaWlBR8fH2w2G4WFhUJyb8nuvRRG0ul0XLp0CYBjx47R\n0NBAYGAgDoeD9evXc/PmTS5fviwU7rGxMcF5OjMzQ1RUlBAGSkpKYmFhAa/Xy7vvvisYf65evcqN\nGzf48MMPMRgMfPjhh1y5coXS0lKkUim1tbXs3bsXm83GmTNnKCsrY2hoCJfLRXh4OA899BB2u52k\npCRMJpNQyI1GIx6Ph97eXkFuVSgUDA4OUl1dTWVlpcBBPf744wQEBAg+E4vFwtGjR3G5XBw5coSa\nmhph8y7xAAUFBZSUlCCTyTh+/Dg1NTWcPXuWwcFBfvrTn6JSqbh06RKhoaHYbDakUqngf1EoFPT1\n9Qnk5hfFl3LF+/9TiESi//03cQ/38P9/1Hu93rz/7kFfCaIxIiKCo0eP0tDQgFQqZWFhAZFIxNDQ\nEOvWrcNgMNDR0UFfXx/f/e538fPz4+TJk0RFRbF8+XLGx8eFG5Vu376N0+nk8OHDfPrpp/z5z3/m\n0KFDiMViIcswPT2Nj48PGRkZaLVa4uPjOXbsGMnJyfj7+5OQkIDNZhPadpFIRHJyMm+99RZ/+9vf\nOHPmDIcPH8Zms3Hw4EHq6upIT09Hq9Xi8XhITEzkiSee4JlnnmFiYoLU1FSBQHI4HJSUlHDt2jWB\nvKutreXYsWO8/vrr+Pj4MDc3R19fH8XFxYyOjuLj48OdO3eIiooSyMGldttoNNLR0cHGjRuZmpoi\nLy+PixcvIhKJGBkZYcuWLaxfv57a2lrm5+eFdvuFF17ghRdeoLGxUbg9eOly3ImJCR555BE0Gg0t\nLS20t7eTkJDAG2+8wS9+8QtmZ2c5deoUAQEBZGdn097eTmhoqJCirK+vR6PRkJWVJVygszRDA8JF\nrXV1dQQFBTE6Okp8fDwvv/wyzz77LHq9nsHBQTo7O4UU61NPPcVLL73EwsICLS0t7Nu3j/7+fgIC\nAti4cSM6nQ6TyYTX66WpqYm4uDiuXbuG2+3mT3/6E7/85S/ZsGEDU1NTXL9+nUcffZTz588L+ZK6\nujoefvhhIiMjaWpqwsfHBx8fH4HMhrun9/LlywW/SFRUFGVlZbS2tnLhwgWCgoIoKioSQkt6vZ7f\n/va3ZGdnC16W2dlZqqqq+OlPf0pDQwMREREYDAacTic1NTUsX76c1157jXfffRer1cprr73GSy+9\nJCgcS4nMtLQ0IQZ+69Ytmpubefzxx1m2bBm//vWvefnllzl37hwmk4n4+HgMBgO//vWvv9B+/EqM\nD/Pz87zzzjvIZDJhE3q9Xg4ePChcgHH//ffz97//HT8/P1JSUigtLWX9+vWcOnUKk8nE0NAQISEh\nPP300xQWFgJ37z3Yu3cv165do7e3l7GxMVJTUzl48KBwZ4LRaOTixYsMDQ2h1+sFTmJhYUHwNSiV\nSj7//HPhec+ePUtERIQwFmRlZbFs2TLi4uKQSqW0t7fz/PPPA3eTmjMzMyQkJAiR27S0NOx2OxER\nEUxO/l/svWd01Oe57v0bdY1GvUujNqpIQr1LIHoH010A4xYHdxzHcZpXYsdxEqd547hhsHHAmN6b\nQAJUEOq911FvI2nU24zmfJDnOXuvddbePm/edy3v9e7/FwZsj4w0T7nv+7p+Vz+rVq0CFsRLOTk5\nmJqa8uqrr1JfXy8aRRMTE9TU1FBYWEhFRQU6nY4zZ85QUFCAXC4nODhYuDQDAgIEMyEpKYkvvvgC\nLy8v4uLimJycpKKigueee050x2FBw19SUoKFhQVBQUHU1NQIH4ZWq2VsbIzt27fT19fH7du3ycnJ\nwcrKiq+//hqtVsuDBw9ISkqit7eXhIQEHB0dhfT75s2bqFQqli9fLpD1bW1tODs709fXx/z8PEql\nEliwj+uNT7t27eL+/fvs2LGDvXv38tJLL2FiYsL69esZHR1l06ZNaDQacnNzGRsbo7+/n46ODtRq\nNWq1GgsLC9FoVCgUREdHc+rUKVauXEltbS1hYWE0NjYSGBiIl5eXmGz5+PhQX19PdnY2aWlpwoMx\nPz9PV1cXJ0+eFH2rY8eOcebMGZ566ini4uK4du0aDx8+xMTEhJ///OcEBQXR3NwscHrz8/P84he/\n4NatW/T39xMUFER1dTUajQZ/f3+Cg4OBBUZjTU0N77zzDrOzs/j6+jI+Pk52djYpKSkYGBgQGxvL\nBx98QHh4OC+99JKYenzwwQcMDQ0xODgosPR6IOz3eX4Qm4KtrS0bN25kfn4ehUKBnZ0dycnJ6HQ6\nKisrkUql2NnZcfPmTTQajXCJSSQSNmzYQFBQkJCjSiQSioqKgIUaXU9/9vHxEWKQnJwcIVttbW1F\npVIRHh4uxqISiYTLly8TGhoqiLgzMzNi1rtr1y7i4uLQaDRERkZiZ2dHVlYWly9fxsbGhq6uLsbH\nx1EqlXh6emJqaopSqcTd3R21Ws2LL76IQqEgIyNDnBQAvr6+LFu2jOrqakGrTk1NxdjYWMBCHRwc\neOWVV2hsbCQsLAytVsvg4CCNjY1kZGQI8GteXh7JycmkpaURFRXF6dOnqa+vp7a2lsDAQLq7u7G2\ntubu3btiXNXS0iKal3K5HABra2t6enpITEwkPDyc69evExgYSFRUFPX19djY2DA8PCwmH/X19VhY\nWDA3N4dWq2VgYEAAWouKivD392d0dJSOjg6cnZ2FPsLCwkJ8PT3d6fz587z00ku0trby9NNPc/z4\ncdzd3UlOTmZgYEA0j5cvX45MJsPQ0JCSkhJcXFyorq4Wo0lYYCkeO3aMjRs3MjExgU6nY35+npSU\nFMbGxtizZw9KpRILCwvRILSzs2P58uVUVlYSHR1NREQEAQEBpKSkiGlJcHAwn376KZcvX2Z2dpb8\n/HwsLS1paGhg5cqVGBsb4+HhwYYNG/jss89EXyUgIIBXXnlFgIP8/f2pqqoS/TB9vICFhQV5eXni\nUBgYGODmzZt0dXXh6OjII488IsxyNjY2HDlyhIqKCqFa1el0FBQUsHXr1u+9Hn8QisbDhw//1sDA\nQMiAvb29KSkpIT09ncjISDIyMkhJScHKyoqSkhLBvG9vb6e/v5+srCwBVBkdHUUul/Ptt9+KBou3\ntzdXrlzhkUceobi4GB8fH8zNzVm/fr0gKxkZGbFu3TrBC9B31QGWL1+OiYkJVlZWnD9/ntWrV6PV\nann48CFOTk7k5uaydetWZmZmxPxYp9PR0NDAp59+KjDpaWlp4mqp1xVIJBLc3d25cOECMzMzWFpa\nYmBggEKhwM3Njf7+fmZmZpiamqKtrY2EhAT6+vrYtGkTRUVFREREsHjxYmQyGVZWVrS1tbF8+XJm\nZ2c5ffo0SUlJwmkYFBQkOv5Lly5Fo9GIv9v58+fR6XRYWFjg4ODA2NgYK1euxNraWoBNHzx4ICCu\nSqUSKysrBgcHMTExobGxkbGxMYGtc3JyYmxsDDc3N65cucLExIQIiNE7Rm/duoW/vz/29vao1WqK\niop46qmnaGxs5Ec/+hFubm4CUurn50dHRwf9/f20tLTQ0NDAgQMHMDMzo729nZycHJqamti5cyc3\nbtwgMjKSPXv2MDY2xs2bN3n00UcxMDBAJpPR0dGBjY0NZmZmAgdXWlpKcnIyRkZGNDU1YWNjg5OT\nE/39/eTn5+Pr64uNjQ25ubmoVCpcXFy4ePEi4eHhqFQq3N3dsbGxwcPDg+zsbCwtLcXnKDIykkWL\nFvHHP/4RhUIh3v+rr75idHSUM2fO0NvbS3BwMJ6enuTk5IgRp16NePLkScLCwlizZg0lJSUsXrwY\nqVSKj48Px48fx9DQkL/85S/cvXsXa2trGhsbkUql1NXVsXnzZu7fv09xcfH3UjT+IDaFjz766Ld7\n9+4lMDBQGGp27tyJgYEBDx48wMzMDBMTE6RSKVevXhXzX/1JVVNTQ3V1Nba2tmIUeO3aNZ566inx\nNU9q+HwAACAASURBVKanp5mamsLPz4+AgACys7OFkw4Wxox6gKhSqcTa2hqlUsnWrVtF/oRMJuPs\n2bOkpKQwPDws2AA+Pj7Mzs5SXFxMeHg4O3fuJCQkhLa2NpYuXcrSpUu5efMmW7ZsEbwAPZ9QTwm+\nf/8+S5YswdDQEJVKxb59+0RnvqOjg76+Pnx9fTEwMKCvr4+rV6/ywgsvUFpaire3t8ilSExM5NKl\nSzg5OeHk5IRarWZoaIjW1lZGR0dpbGzkwIEDuLm5ER0dLXgRcXFxPPHEE0RHR1NdXc3g4CCffPIJ\ngYGBaDQaysrKBNQlMDAQlUolbgsbNmxgx44dKJVKkpOTcXZ2pqOjg7179zI2NiZ4j9evX8fGxkaE\nkyQnJ/Pw4UPy8/PZtWsXly5d4uWXXyY7O5uBgQFaWlro6elhdHRUSL/1iV1yuZzu7m7Ky8vR6XR8\n9tlnvPbaawwNDeHt7c2KFSs4f/48w8PDPHjwgFOnTtHS0kJMTAylpaXk5+czOTmJkZER0dHRrFq1\nio6ODnJzc7Gzs0OtVrN48WKSk5OZnZ2lpqYGJycn1q5di5WVFSqVikuXLrF37156e3spLCwkJSWF\nu3fvkpCQQGZmJgEBATg4OAhhla+vL+7u7gKvtnbtWvF9MjExwcnJiampKe7fv89bb71FQ0MDzs7O\ndHV1kZqaysWLF7GxsWFycpLFixcLpWNycjJ2dnYYGRmRlpbGnTt32L59O7a2tlhbW6NWq9m9ezdH\njhz577MpHD58+LchISHodDpCQkIYHBwUHvy5uTkSEhJISkrC1NRUkIGmpqbw9PREp9NhbGzM66+/\nTkFBAYsXLyY6OpqPP/4Yf39/6urqiI6ORq1W097eztjYGENDQ9jZ2dHV1YWnp6dg87m6ugILwpGe\nnh7Wr1+PmZkZ9+7dY/HixXR1dXHt2jViY2MZHR0lPj5ebETGxsY4Ojpy7949EhMTqaysRKvVEhER\nIdRw4+PjeHl5odFoBPjCxMSEzMxMmpqaBP1Y719wdHRkdnaWa9euIZFIBEjE09NT2H71UuSNGzey\nevVqZDIZaWlpjIyMiMUUGhpKYGCgGHd6e3uTm5tLYmIi2dnZSKVScdMpLi5GJpMRFRXF5OSkQM7r\nr9XOzs7Y29tTX18vICbT09PIZDKSkpIoKSnB0NAQQ0NDSktL6e7u5tatW8jlcgE7VSgU5OfnY2Bg\nwObNmwkPD0epVJKdnY1CoWDp0qV0d3cTGhoqICJ6KIyfnx9hYWGYmZkJKlVZWRmPPvooPT09REVF\nMTc3x927d8Ut7MaNGzz66KMUFBRgbW2NtbU13t7eqFQqenp6cHV1ZWJiQqgA9SzEmZkZAHp6etBo\nNOzfv5+2tjZMTU1xcHDgyJEjBAQEiOg6PYlbX+LcuXOH5uZmbG1tCQgIwMrKSuDii4uL6ejoEME9\n1dXVItCnqKgIa2trfvGLX9Dd3Y1GoxFjbH303fDwMGZmZrS2thIcHEx1dTWjo6MolUrc3NzQ6XT0\n9fWJn9vXX39NU1PTfx/vw9DQkDB4nDhxgvHxcdLT07GxsRHAijfeeIM33ngDAwMDkpOTUalUyOVy\nvv76awoLC7l9+zZWVlZMTU0JDr4+m0B/3VKr1fT09AgAqN5Qo2c+Tk1NMTw8jKOjIyqVitHRUbGY\nn3vuOaqrF6IrZDIZ7u7u2NnZCe2CHt3u7+9PZ2cnGzZsoKamhra2NgIDA5FIJGRkZPDll18yODjI\nzZs3xYmqt/HKZDLs7e3RarXk5+eTlZUlsgRmZ2eFaEmlUuHo6MiNGzdEYI7+Wp2Tk4OTk5PIFNy9\nezcVFRV0d3dz7do1jh07xkcffUR2djaXL18mJycHPz8/Jicn0Wq1dHV10djYSGtrK+vWrRPXVL0i\nsaCggHv37gnK0MzMDBUVFeTk5FBVVUVDQwO3bt0SZKY33niDl19+GQMDA2EAMzU1JTg4WCwCtVot\nxEDe3t64uLhgYmLC0NAQd+7cwdvbG29vbwICAvD396epqYmlS5cKDkFSUhL29vbI5XIkEglnz57l\njTfeYGJiQiQjTU9Pi7i1L774ArVazdWrV/H09CQ8PFzwJkxNTRkdHRXegUWLFrFnzx5xO52ZmeHm\nzZtIJBIAcRvSarXEx8cjkUjw8fFhyZIlHDlyBF9fX+bn53n99de5e/eucIfGxcWxevVq1qxZI/JG\n3dzcBFvD0dGRa9eusWLFCry9vYmOjhao/JycHFxcXKipqRF2fL3KcevWrYSFhWFiYoJGo2FkZERM\nTb7v84PYFGQyGUqlkqCgICIjIyksLKStrU2Ek+ohIKmpqdTV1XHr1i3xzVu2bJnAiG3btk3482GB\nK6h3W6pUKnbs2MHmzZuRy+WEh4ezdOlSYCEoRB8dFhYWxtzcHHv37uX+/fvk5OQwOTnJhx9+KPT5\nDg4OaDQa8vPzcXJyoru7G0NDQ5GXePHiRdauXctTTz1FYGAgU1NTRERE8MILLwgiUkxMDC0tLUxO\nTrJy5UoAwTzo7+8nPT2d9vZ2CgsLAdizZw+urq4sWrQIFxcXlEqloCf7+vry4YcfCr+FvvyRy+Vo\nNBqam5vx9PRk7dq1bN68menpaW7fvk1VVRWpqalCeent7c2aNWsA0Ol0whmqb7Tq0e165V1dXZ0I\nZ9XpdOTl5REVFYVcLic1NZUXX3wRNzc3EhMTeeutt3jqqaeE3Lu9vZ21a9fi6OhIQkKCEHDpXYr6\nBqb+/Z955hlUKhXl5eU8ePCAEydOCJdrbGwsLS0twtD11ltvkZ6ezuOPP86VKwvBZHogbnBwMKtW\nrSI2NpajR4+ybds21Go1R44cET2fZ599lu7ubmpqanBzc2PFihXioLpw4QLx8fECsPrRRx/9B6Wt\nkZERra2tZGRk8Kc//YmNGzeiUCj4yU9+wqpVq8jPz+fEiRMsXboUnU7H3bt3ycjI4N69e+Tn52Nt\nbQ3Aiy++SGxsrDBn6WXtLi4uGBkZMTw8zNjYGCqVivPnz+Pq6oqbmxvj4+Ps3r2b2dlZduzYgU6n\nw9HRke3bt3/v9fiD2BRMTEwYGRkhNDQUGxsbQkJC8PHx4eHDh4SFhdHb28v58+dRqVR4eXmxfft2\ntFotV69eJSoqiieeeAJra2tGRkbIz88XqjB/f3+uXr1KeXk5fX19Im7N2NiYgIAArl27xtDQECqV\niunpaeLj45mZmRHjqbVr1wo/RXp6utgUcnJyUCgUhIWFMTQ0xPz8PGVlZaJc0OcezszMCFnxli1b\nuHHjBn19fSKTERZowHqWgL29PUqlkra2NjZs2EBcXBxxcXF4enrS0tJCe3s7/v7+WFhY0NzcTHFx\nMZ2dnWRlZeHo6Cjsv5aWlvT19TE8PMyxY8d44YUXMDEx4dtvvxUy2I0bN+Lu7i7GgzqdTiR0bdu2\njSVLltDb24uHhwdeXl5IpVJCQkKYnJwU3AlDQ0Oam5tRq9U4OTnh6OjI+Pg4crlcNAllMhnW1tao\nVCrOnTsHLIyK6+vrKSwsRK1Wk5aWJkq3kydPYmRkJL6/ZmZmzMzMcPDgQdEv0nsJKioqcHR0FJkR\nPT09GBgYEBwcjKOjI5cuXWLbtm3Aguvw4MGD2NnZ0dnZSU5ODrdu3WJycpK0tDS8vb2FzqC1tRUH\nBwd8fHzw9PTk1q1bwrUZHx+PhYUFdXV1wMIkSqFQiBvqsmXLsLW1FWNAa2trEhIS6O7uprm5GXt7\ne6F+7e/vx9LSkuXLl/Paa6+xf/9+AZodHR3l+PHjTExMoFQqkUqldHV14eXlxcDAAFevXsXR0RFb\nW1uefvppAcBdu3YtxcXF/O1vfyM9PZ35+Xlqa2s5fPi/rBrE84MQL+npPdXV1czOzhITE4Ozs7OA\nVshkMhGSEhkZSVNTkxgryeVy5HK5oCWlpKQIF57eOtzU1MS+ffuYnJzEwMAAe3t7rl69Sn9/vyAV\nKRQKwsPDMTQ05NNPP2X79u0CWxYbG4unp6cYFz3zzDOkpaXR3NwsGoe1tbXIZDJcXFyIjIzE3t6e\nqakprl+/zuTkJKmpqRw8eFDYiefm5gTVWB/UoQ/5iImJEV55Dw8PysrKkMvlBAUFifxBlUpFa2ur\n+CDqicd6/Lm1tbW4OkskEmEz19f/et5gQ0MDpaWlaLVaTp8+zenTp/npT39Kf3+/GIc5Ozvj7u6O\noaGh2OSkUqno+ZSXlxMfH09fXx+urq5iZJiVlSW4EOPj41RWVorx75NPPikgt/raHsDT0xNvb2+y\ns7MpLi5m3bp1aLVa+vv7RRq43gE6Pj4uHKKzs7MUFhYSHh4uSjOpVCos2bW1tfT09LBixQoGBwdR\nKpX8+te/pqWlRRC1N2zYgFarpba2lo6ODuRyuaBqP/HEE3z44Ye8+eabXL9+XfRH0tPTRVix3n/w\n+uuvk5uby6ZNm2hvbyczM5O8vDwSEhJQKpUsWrRIbF4ffvghqampolxJSUnhzJkztLe3s337dtGT\n6OnpYc+ePVy8eFHcHCwsLASIWJ9GfffuXby9vYXGJzMzk4iICDHy/T7PD+KmMDs7i0wmo6Kigrm5\nOY4ePcq9e/dQq9V89NFHGBkZiXTd5uZm/vKXv6DVaklKShKE3fr6erq7uyksLBQnkoODA66urnh4\neHD9+nW0Wi2Ojo5iQerrusjISNasWcP09DSvvfYaUqmU/Px8tFot69atQ61W8/nnn4u6rLa2lsnJ\nSZYuXYqfnx9zc3Mixefw4cNIJBLGxsY4e/Ys7777LidOnKClpQWZTEZQUBCXL19GoVBw//59bGxs\nOHr0KPC/6UAymYyHDx9y/fp10W/Rf3CNjIyEBkAqlZKcnExUVBR79uwhOjqamzdvUl9fT25uLrW1\ntVRWVgrgrFQqZWRkhE2bNrFixQoyMzOxs7PjiSeeoKWlhdjYWN577z0BZomMjGRsbEyIdvRlm4WF\nBX5+fmKT7u7uFuPK0dFRLl68yL1797hw4QKlpaXIZDIqKytJSUkhLi4OFxcX7O3tcXBwwNraWkTb\nAwI0KpFI8PPzw8XFBS8vL6qqqjh79ix+fn5C3erv78+iRYvYsGGD8I20traSkJBAWVkZiYmJwhDl\n4eGBh4cHUqlURNSVlpayfPlyXF1dGRkZITg4mKysLE6dOoW3tzdxcXE8fPgQnU7H0NAQzz33HBqN\nhuDgYMEB9fLyEuFEhw8fxsXFRZizbGxsuHDhAqOjo6xZs4alS5fy29/+lq1bt9LS0iLw+Ppeh5mZ\nGampqcBCD+Sbb75h5cqVgkg+MTHB5OQkTz/9NKGhoZw/f56dO3eKzay+vp6tW7fi5ubGzMyMKH1L\nS0u5fv36916PP4hNQSKRsHLlShwcHISk2NTUlLt372Jubs6ePXsICQnBxcWFI0eOEBQUxKZNmwQi\ne25uDk9PT/Ly8tBqtaJGn5ubw9TUVMBV9OouJycnPD09SUlJob6+nrKyMvr7+6msrBTxYIaGhoId\n0NzcLJKrYcHVqQeW6k9EpVJJT08P0dHR2NnZIZPJCA8PZ926dSQnJ3Py5Ek++eQTTE1N2bZtG0FB\nQXz66aciyhwWeiuTk5OiCarvb9jZ2VFQUMDg4CCXLl2isLCQwsJCQkNDcXV1FXP6EydOCMK0Hhwz\nOztLeXk5Hh4eNDU1UVpaSkREhGAY6BkOesKPHiQ6MTFBXFwc5ubmeHt7i1CSgYEBDh06JJqzeklw\ncHAwWq1WoNqWLl1KQEAACoVCBO/oY+U0Go0gGDc2NooYd0CE9QwMDPCzn/0MPz8/Ab5JSUkBFtgI\nf/jDH7CwsODy5cs8ePAACwsLAZjRarV4eHgwMzMjanQPDw8WL14sshPefPNNPDw8mJ6eJioqiq1b\nt1JbW4u9vT0rVqzA2dmZ9vZ2TExMBChVoVAwMTEhMjph4ZZrbGyMVqsVYJQzZ87w+eefMz8/T0xM\nDD/5yU9ESWVpaSnAPGVlZSQkJFBUVCQal5mZmQAMDAzg6OjIn/70J7Kzs5HL5ZSVlVFbW8tHH30k\nqGNpaWmcO3eOwsJCnn32Wdra2jA2NsbT0xOZTIZEImHHjh3i+/B9nh/EpqDHSTk5OTE/P09/f7/g\nFF65coWsrCzi4uLIysri/fff56233iIoKIj+/n6xe+qdgrOzs9y4sZBrq2+Aubu709fXx927d2lu\nbqakpER4K0pLS8VCtra2pqamRjQgBwcHkcvl+Pr6AohvrN6Ft3r1alpaWjA3Nyc+Pl6IWhISErCw\nsODMmTOUlZVRV1dHSEgIERERZGZm8sQTT3Dnzh3KysooLCwUCO6nnnqKlJQU3NzcWLNmDSEhIURH\nR+Pj48Pc3JyQZefl5eHg4EBoaCiWlpa0tbWJ4Bm9Pdfc3BwnJydSUlJEbazRaAgNDaWqqkrEzOub\nbJGRkchkMjIyMlAoFFhbW9PU1ERTUxM6nQ5ra2scHR1F7qdCoRANRX0/x93dXXAqqqqqWLlyJRKJ\nRCyi+fl5pFIp3d3d5ObmYmFhIRqnf/jDH4AF5aFWqxXuyoqKCqqrq4mNjWXVqlUcP35cYPoeeeQR\nGhoa0Gg0rFy5ksrKSkxNTdFqtWRkZDA4OCh6Fbdv3yYzM5Ouri5cXV3RarXcunWLDz/8kKGhIVG3\n66Xuhw4dorCwkNTUVOGJ6O/vp6GhgfHxcVHylZWVsXTpUk6fPk1jY6OQXicmJvL111+zdOlS+vv7\n8fT0BBYo3+7u7oyOjtLQ0EBGRgaBgYFi4qS/5nd0dPDss88ik8nYtGkT1dXV+Pv78+STTwqRnR5L\np8/d0Gq1bNy4kc7OTn75y18K7L2e3/h9n3+ppyCRSJTAGKAFNDqdLkYikdgBpwFvQAns1ul0w//Z\n++iv2vpGj6urK15eXkgkEl555RWcnZ05evQoq1evprGxkdu3bzM4OEhpaam4pgUFBXHp0iXef/99\noUtXKBTcuXNHsAtXrVqFUqkkNjaWiYkJ2tvbcXJywtXVlaKiInQ6nXgdERFBS0sLp06d4sknnyQq\nKkrUkdHR0UxNTYkFPjExIcJM9Nc4iUTCxo0bhVV7fHycnp4eHBwcuHr1qkgnkkqlTE5OAguBuBER\nERQUFODg4IBcLhe1dmxsrJhiNDU10dvbKwi/epl3U1MTfn5+QosQFxcnPhQ3btygqqqKRx55REwu\nDhw4wJIlS8jOzuZ3v/sdZmZmgipUXV1NVlYWN27coLe3l4mJCX73u9+h0+n49ttvGRsbQ6fTYWRk\nRGxsLLCwuT/zzDNIpVIKCgp4/vnn0el0PProo8zOzqJWq5mbm6OtrQ1LS0sOHTrEK6+8gpeXlwCs\ndnd3s2/fPnp6ejh9+jSpqalkZ2cTEREhyonIyEgqKyu5cOECjz32mMjGsLOzQ6vVcvToUbRarciP\nhAWac1dXFyEhIXz66ae8/PLL7N+/n46ODjIzMzEwMODChQuEhobys5/9jHPnzjE0NERAQICw0iuV\nShoaGkTMICw0BKenp2lvb+fWrVtcv36dL774gszMTJKSkujr6+ODDz4gJCQEExMTvvrqK1auXMm5\nc+eEB0I/Gr9z5w5vvPEGX331FXNzc2g0GvFzT0tLIyUlhcjISEGnunPnDvfu3cPBwYHXX39dhBpv\n27aNn/70p4SFhaFSqTh27BhvvvkmO3bs+F7r+v+Nm8JynU4X8e8smT8HMnQ6nT+Q8d3v/9NHzzD0\n9/cXmQsajYa+vj5aWlrIzMxELpf/h6hvJycnfv7zn7Nu3ToRHuvn50dra6vQpXd0dGBiYiIMO/rc\nxqamJr766iuCg4NZtGiRSFEyNTVlbm6O5ORk8Vp/VT1x4gQXLlwAoLy8nMjISCEm0ZtlwsPDRamy\naNEibG1tqa6uJi4ujsTERAICAkQ4i7u7u0gU1icRGRgYMDk5SUBAgMg+zMzMxN3dXdw0PDw8xJiu\noaGBlJQU+vr6KCsrY8mSJbS3tyOTyairq6O4uJiMjAyuXr2KjY0NmzZtYnZ2lubmZkxNTdm7dy/D\nw8M89thjIpQlPDycmpoawsPDmZqaorm5GRsbG1xdXfn73/9OQ0MD+/btY8OGDf8hITswMJDU1FSm\npqaEkMnDw4PExESMjIxobm4WCeKzs7NkZmYKg1hPT4/wKHh7e9PR0UFHRwcqlYri4mKWLFmCVqul\noKCA7du3c+/ePYKCgti1axcpKSnMzMxw5swZvL29uXv3LlqtFkNDQ7Zs2SLQfXpxWGVlJT4+PmRl\nZVFSUoKHhwe9vb1UVFSwdOlSIQseHx+nqamJt99+W2D+PDw8CAoKYnh4WPhgCgoKWLVqFUZGRhgY\nGLBlyxa++eYb7O3t2bdvH/X19YSEhGBsbExQUBBqtZrf//73TE1Nib6Hu7s78fHxeHp6kpubCyxM\nzrKzszl37hxZWVlC/To8PIytrS1zc3O88MILFBUVCbEdLNyO29rahCDLzc2NxYsX/18Fwvx/UT48\nAnz93euvgf/SiaFnDeo7vSUlJeTm5mJoaEhLSwtffPEFVVVV2NnZiaDTubk5Ue8+//zzgkVXUFAg\nDEZDQ0OMjY0RFhaGubk58/PzFBYWkpWVhbe3t+AZ6uvt1tZWVq1aJXIgN2/ezObNm/n5z39OSkqK\nuIrm5+dz8+ZNsrOzMTAwwNramo8//hgLCwuh1uvu7qaqqgp/f38OHTpEe3s7crmcvLw8ysrKiIiI\nQKlUUldXJ4JFrK2tMTIyoqGhAU9PT7Gznzx5ki+//JLh4WHq6+tpbm7GxcWFjRs34ujoKFKqb968\nSVxcHFeuXCEuLo7y8nJkMplQBl64cIGamhoGBwexsbEhMjKS1157jfDwcObm5picnKSzsxNPT0++\n/fZbKioq+Pzzz/H392fFihVio9ZPHDo6OjAzM+PgwYNYW1tz4sQJodg0MTFBoVCwbds2/Pz8xJh0\nbGwMd3d3tm3bxrp16zh9+jTm5ua4uLgACOJUV1eXyJ1UKBQC6aaPFAwMDMTIyIjR0VFKSkowMDCg\nt7cXX19f/vSnPwksmd4FOjU1hYWFBRs2bMDJyYktW7agUChQKpVERUXh5OREV1cXlZWVhISECHOY\nQqEQysWMjAyKi4vJzs4Wzdtf/epXQg1aWlpKR0cHu3btoq6ujqCgIKE3MDExARbGpiEhIaSmpuLi\n4kJDQ4MA/MTHxwvE+6ZNmwgLCxMwnkceeYRz586RkZFBQUGByAKJiYnhk08+4eLFi2RmZqLRaPDy\n8uLUqVOoVCpkMhkRERGiufp9nn91U9AB6RKJpFgikTz/3Z8563S6nu9e9wLO/6f/UCKRPC+RSIok\nEknR9PQ0vr6+QrCkV6/BQp398ccfs3XrVo4dOya63dXV1Zw4cYLi4mJmZmYYGxujpqaGrVu3sm7d\nOmAhsFUPR3388cdJTU3lH//4B4aGhuzatUtgvbq7u9m4cSNqtZrCwkLi4uJEcGhubq5oxOln0/rQ\nFTMzM4yMjJBIJHh5efHoo4+Sk5ODRqOhoKBAxKp5eHhQV1eHoaEhP/7xj5FKpWLkFxAQIBbE1NQU\nQ0NDREdHk5WVJcChzzzzDIsXL6a/v1+QpiIiIigsLOSLL77g3XffFf2VpqYmYc22srJiyZIljI+P\nY2ZmJmr/Rx99lFu3bgk8fG1tLW1tbSJgZ/fu3TQ0NCCXyxkaGhJ6gMWLFxMYGMjMzIy43s7MzNDU\n1MSpU6cEmzEjI4P8/Hxhcf7666+5du2aaPweOHCAxx9/HENDQ6Kjo5HL5eLk9ff3JzY2VhC+P/vs\nMz7++GMOHTpETk4OZ8+epbW1ldzcXNLS0tBoNCQnJxMZGYlKpcLZ2Zndu3eLsaNeFLV27VpRMjo6\nOuLo6Mj58+dJSEggICCAlpYWLC0t+dGPfkRtbS1VVVVC7OXk5ERPTw9jY2O8+eabxMTEiODjvr4+\nIiIi2Lp1q5BO37t3T+hDKioqOHz4sCBn7927l/Xr1xMWFiakzYWFhSIRysbGBlgQRfn5+dHT00N4\neDjnz58nMjKSmJgYZmZmaGxsFHDZ1atX8+tf/5rw8HCGh4c5deoUqampGBkZYWNjg0wm46uvvvre\ni/pf3RRSdDpdBLAeeEkikSz99/9Qt4B1+j9SlXQ63WGdThej0+liDA0Nqaqqws3NDU9PT+RyucCA\nl5WVcfLkSdRqNUZGRrz77rs4ODiIjnV/f7+osVesWMHFixeF7Tc1NZWGhgYsLS1paWkhKCiInTt3\nYmZmxsDAAL6+viQlJQkXov6HpL9+jo6OsmzZMmJiYmhvbxcNR/1Czs/PJycnR+jY/fz8sLKywsTE\nhFWrVjEyMkJmZiZzc3Ps27cPU1NTjIyM6OvrE76Lnp4ekTw1OjpKcHAwMzMztLW1ERQURGNjIw8f\nPhSZg2ZmZuIWotFoxO3Jw8NDAED16dLR0dFkZ2djYWFBSUmJ8Ni/8847eHt7o9Pp8PPzw9jYmPj4\neDGpeOedd4S81tvbm1deeUVMZhobG3FycmJkZARzc3PS09OJi4tDLpczOjpKS0sLarWa+fl5Jicn\nOXbsGBYWFoSHh4tQV5lMxt/+9jeWLFmCTCZDo9GwZ88eACHrDggIwN3dXXy98PBwvL292bhxI6Gh\noQJFFxAQIExD8fHxGBoa8vLLL9PQ0CAi7AHu3bvHw4cPaWlpITU1ldzcXDZu3EhbWxtSqVRsUK6u\nrixZsgQfHx/GxsaEzN7Dw4N169Zx+fJlAgICqK2tBRbkyKdPn+a1114T7ta7d+/ywQcfCM5nYmIi\nX331FfX19Xh6euLr60t5eTkajUbItC0sLDh37pxga2g0Gnbs2MFjjz2GUqnkF7/4BaampgQFBdHS\n0kJgYCCDg4O89tprHDhwgLq6OhwcHP7DzXNiYoKuri4++eQTofL9Ps+/tCnodLqu737tBy4CcUCf\nRCJxBfju1/7/8n/CwIC3334bjUZDVFQUY2NjeHp6smXLFiorK3F1dcXAwIDly5cD/5s7cPPmTGJj\nbwAAIABJREFUTebn54mKiiIgIIAvv/yS1atXi4ZgXl4eTU1NQrN+69YtfH19kUqlAmLh5eXF4OAg\n9+/fp729ncHBQZydnRkbG6Oqqoq2tjbee+89wUkEhJknKSmJnp4eJBIJd+7cEWyFHTt2sGrVKpYt\nW0ZoaCg+Pj7C2n3lyhVcXV3p7OyksLAQX19fwRGMiIggPT2dFStWAHDq1Cnm5uYEWr6mpkaYwOrq\n6igpKaG9vZ2wsDBmZmawtbUVVOaGhgZUKhUxMTEUFRVx4MABEbHu4eFBQ0MDfn5+KJVKfH19Rf9E\nX2/rwa/5+flkZ2cTFBREdnY2dnZ25OXlMT8/z+zsLMuWLRO3kPn5ecbHx9FqtTg5OQnHoJ+fH6tX\nr6atrU1wA+vr6zl69KjQbxw7dgyA9evXU1dXx4cffoiHhwevv/46zc3NbNq0ieXLl7N+/XpSUlKE\ncKm0tJSzZ8+Kjb62tlbkSeh0OrEYFAoFCQkJ7N69G1dXV6KiooSis6+vD2tra7Kzs2lsbMTLy4vU\n1FSGhoYoKSkRMJbW1lbs7e0xNjYWNxBvb2/s7e05dOgQO3fupK2tjVdffZUbN26wa9cunn/+eerq\n6sREaWhoiOPHjwu6lp4poedCzM/PAwtj1+3btzM7O8vs7CwtLS1YWFjwwQcfsHv3bpRKpVBFVlZW\n4uvrS3FxMVqtlvXr16PT6Th48KCwbevj877P8/94U5BIJBYSicRS/xpYA1QBV4D93/1r+4HL/9V7\nWVtbC5PNe++9h6GhIZOTkwwMDODv709raytlZWVYWVmRlJSEubm5AHTqASf19fU89dRT3L9/n9u3\nbwML6PhnnnkGOzs7+vr6UCgUtLS0sG3bNsLCwujp6WFiYgIbGxvWrl3Lr371K+Lj4ykrKxMRbGNj\nY8IqrZc519XV4eLiQmdnJ4GBgSQnJ7N3716USiVZWVm88847HDp0iH/+85+sX79eTB1qamqQy+WC\nmzA/P09PT4+o9/SJVOXl5SQnJxMWFkZISIggHMvlchEhFxkZKa6tKpUKDw8PTp8+LdSD4eHhdHR0\n0N3dzYoVK/jjH/8ojEbPPPMM69at49NPPxViqL///e90dHSI/MZly5bh4+NDaGgojo6ONDc3i6u3\nVqsV3pHBwUFycnJEz0CPDJueniY9PZ2KigrKysrQ6XQi1LWjo4OIiAgSExMpLy+nrKxMgEVPnTqF\npaUlvr6+eHp6cu7cOaamprhz5w6Ojo6Mjo7y17/+ldHRUTHKnpycFEGvetWjHpajH8UNDw9z5coV\n3n//feFa1W+I5eXlhISEUFtbi1wux8rKiqKiIp544glh4iouLmb58uVYWFhw+/ZtMjIygAUgrK2t\nLXFxcTg5OREWFsZvf/tbjI2Nkcvl/OY3vxGq2OLiYj777DMGBweFsvTy5cu0tbXR29srphMAK1eu\npKmpSeDZb968ydGjR9m+fTtpaWkEBQWRnp4OLDS+c3JyBCxHLpfj7+9PRkYGHh4emJiY4O/v/73X\n9r9yU3AGciQSSTlQAFzX6XS3gD8CqyUSSSOw6rvf/6fP6OgoK1asoK+vD3d3d5TfhW9IpVKMjY3Z\nvXs3Tk5OHD58WHAV/n2EnF7Pf+rUKQICAsTJW1lZKSzNExMTnDx5kvb2dkZGRhgaGsLJyYkLFy7g\n6+tLY2Mj4+PjyGQyGhsb0Wg0dHV18dhjj2FjY4OpqalIG3rxxReFuk7vq9DnT9rY2BAcHMzAwADb\ntm3jH//4Bxs3buTq1auo1WokEgmurq7CPSeTyQS/0c3NTTgc9dOAgoICDA0NCQkJERZrPVdBn7wc\nEhJCc3MzSUlJLF26lOnpaZYuXYq/v79wyoWFheHp6SmstGlpaSQlJYkm1NatW0XwjZ6B6erqipGR\nEUVFRSQmJvLLX/4SqVQqUqn02nq9MEkfWtLY2IiNjQ1vv/02pqamPP744/z5z3+msbGRb7/9Fp1O\nJ0KCR0dHhTwc4Gc/+xltbW2EhISIaDmFQkF3dzfnzp3jnXfeYdGiRTQ0NFBdXS0EZYaGhjx8+BAX\nFxc2bNiAm5sbZWVlQiFob2/PwYMHBVI9JCSE+vp60Zfq6Ohg9erVTE9PU1RUJAJ0v/zySxoaGoiJ\nieHbb78lOzsbPz8/IV6ysLAgODiYgwcPUlhYSH9/P7t27cLBwYGDBw9SUVHBihUrsLW1pby8nJUr\nVxIVFSWQ76mpqbi7u7N7926qqqpE7kNFRQXLli0TIBt9WlhxcTGmpqbU1NQICblOp8PW1paUlBSR\nOvXWW29haGhIfn4+nZ2dYk18n+d/aM7/8/zP8/+f578PzdnT05Pdu3eL07O4uJgtW7aQk5MjlHjO\nzs40NzezcuVK0tPT2bp1KzU1NWJOro+XUyqVxMfHk5CQwLvvvivqr/r6eoEO8/DwEPAQ/Ynl6elJ\nU1MTycnJFBUViRj4uro6cWokJSWxePFiTpw4QVBQENeuXRPhqWVlZWg0Gubm5ti2bRsVFRXMz88T\nEhJCU1MTAwMDghmQmpqKVCrl+vXrODs7i9zK559/noCAAAICAoQ/w9LSEp1Ox+nTp8WIUi+Ltba2\nxsbGBk9PT06fPk1QUBCLFy+muLiYgoICQQWCBRXmJ598wurVq+nv70en07Fz507S09OJjo6mqKhI\nnPKPPvoopaWlWFhYoNVqOXXqFDExMTg6OrJmzRq++eYb1q9fT0FBAe7u7gQGBuLk5ERfXx+XL18W\nslqZTMaFCxeIiooCEEnd+huCi4sLOp1O+AX0Y7empiZBdtKLyu7cucNvfvMb5ubmsLS0xM7Ojurq\napydnVGr1VRUVODl5UVXV5do3EZGRtLe3s6vf/1rqqqquHv3LgMDA8TGxorc0BUrVtDR0SHSs6an\npzlw4ABbt25Fp9MJS7pCoaC5uZnQ0FBWrFhBfX09zz33HBkZGcKdOTY2Rk9PD/Hx8ZSWljI9PS0M\nfHrqeFdXl5jc+Pr60tvbi7OzMxKJBCsrKxEh//bbb2NnZydKz/7+fuLi4khLS8PDwwO1Wi2yHmxt\nbamtrUWhUIibW2dnJ2ZmZkKur1QqhSfov3p+EDJnAwMDrKysCAoKwtjYmOTkZAoKCpiamhIjlY6O\nDpKSkjh//rxI36mpqQEWQj59fHwYGBjAxMSEkJAQYIGncPToURobG7GyshI0Xb3r0MfHh4aGBubn\n5/n444/ZvHkzzc3NnDp1ChcXF+rr68nJySEvL4+Ojg4Bb7G1tUWtVmNtbY2DgwMPHz7EysoKOzs7\nHBwcmJ6epqmpibVr14oAEX00+rJlyxgZGeH+/fvCYKV/33379hEcHExnZyeDg4McOXKEa9euMT8/\nj4eHBzk5Obz33ntcv36dvLw82tvbKS0tpbCwEDc3Nx4+fChIQubm5hQVFXHixAns7OxE6pJcLsfS\n0hJnZ2cBf1UoFDQ2NhIVFcWOHTu4fPmy+ID29/fz0ksvYWNjg0Kh4MyZM3h5eXH58mUGBwfx8fHh\n3LlzTE5OCp9CfX09SqWSDz74gOjoaIHHz8/P5+TJkwwNDQmadmlpKffu3RNzfIVCQVtbGyqViuPH\nj4uJw549eyguLubw4cPY29vz+eefU1BQQHp6OsXFxaxatUoYrezt7ens7KSurk40Gj///HP6+voo\nKSkRcuKKigqOHz/OqVOnOHv2rBB7/e53v6OhoYGnn36aiooKNBoNTk5ObNu2jc7OTl544QUhR9aT\nrRQKBVqtFnt7e2pra4mPj2dyclKklzU2NuLq6kprayvz8/OEhYUJ0V5MTAyGhoZ0dHSIMsrQ0BBb\nW1u8vLzEZ1WvdfHx8RGMSX0TfevWrTQ2NlJeXi60IPb29uTn5wsWyfd9fhA4tr/+9a+/TU5OZnh4\nmJCQEJRKJUZGRjg4OJCfn094eDhWVlbk5eVhYmJCfHy8SIQaHx8X7D4HBwfi4uI4fvw4GRkZREZG\nEhoayuDgIIGBgXzxxRdMTEwwPj7O4sWLcXR0pLa2FhMTE8zNzTl8+LBILrK3t+fSpUvC3qz8Lp4t\nLy8PIyMjdDodu3btEhDUwcFBccJLJBKxCGNjY8nNzWV0dJSRkRERB9/f34+/v7+Ah6alpREaGioS\nn/RPfX296IuUlJSwf/9+FAoFvr6+YnHX1dURFRWFr68vBQUFTE9P4+rqSkJCArOzs0IC/vzzz9Pd\n3U1LS4uQJ3t7e1NRUUFdXR1jY2Miu8He3p7y8nIyMjKEWnRmZgapVEpJSQk+Pj54e3vT3t5OSkoK\nhYWFWFpaitPWx8cHNzc30QQNDQ0lMjISQ0NDoqKiKCkpISgoCCsrK6ysrBgZGeHixYsiwl6fnG1i\nYkJubi4ajUbkZjY2NmJmZiYi2FpaWvj444+F6Mve3p6hoSGWL19OWVkZJSUleHp6EhMTQ2BgIA8e\nPODpp59mdnaW9vZ2tmzZgr+/v2Akvvfee8zMzDA6OkpfXx8mJiY899xz1NXVMT4+zurVq6moqCAv\nL4/9+/eLYN6hoSGkUikmJib09PRQV1dHRUUFAwMDeHt7Y2xsjKmpKVNTUzQ0NNDV1cXFixf58MMP\n2bRpEwMDA3R2dpKZmYmPjw9JSUksWrSIy5cv09zcTG1tLVZWVshkMvr6+vDz8xPW6qqqKuLi4sRt\neHx8nNnZWZ588knm5+epqqqipKTke+HYfhDlw/T0tMCsFRYW0tPTQ19fH87OzoLarOf2HzlyBFdX\nV9zd3QkNDRVTgc7OThYvXkxJSYk4dbZv305bWxtqtRqVSsWBAwfw9PSktbWViYkJzpw5Q2BgIE8/\n/TQ5OTkC352fny+aXfrRk/7GAgtp1j09PZSXl4uwEHt7e+E09PLyErr3hoYGkTmhj4ovKysjMDAQ\nAwMDAWIBGBkZESG4kZGReHl5ia81MzNDTU0Nzs7OGBgYkJiYSGdnp4Cg6nQ60ejUcyMqKysxNzcn\nLy8PqVQqYtBMTExwc3OjtraW3NxcTE1NcXFxQaVSMTIygkqlwtbWVqRF6/mOXV1dNDc388gjj1Ba\nWkpDQwMvvPACx48fJzg4mPfff589e/ZQWVmJk5MTs7Oz3Lt3D19fX4aHh5mYmCA2NhaFQoGhoSET\nExN0d3cTHR0tkrft7e2RSCRi9CmRSJBKpcLnsn37dkJCQujv7yczM5Of/OQn1NXVERYWxunTp7Gw\nsODs2bO88847ZGVlkZyczBdffMH+/ftpaGigu7ubxMREjh49ypIlSxgbG6OlpQVDQ0P6+/uRy+Ui\nxNba2pqPPvqI7u5unJycCAwMZP369bS2tjI8vGDnqampEUAWIyMjurq60Gg0pKSkEBwcTHFxMa2t\nrVy9ehWZTCYSqj08PKioqODVV19lYmICBwcHcct999136enpoa2tjYsXLxIUFMTs7CwjIyNoNBoG\nBgZE2I25ubmwuefm5qJQKDAyMqKxsZG1a9eKJnxoaOj3Xo8/iE0BFkqIjIwMTExMMDY2RqFQoFKp\n+Oijj3j99dexs7MjOjqaixcv0tvbi6mpKd9++61IJf7yyy/JyMgQEFBYUDTev38fW1tbjI2NaWlp\nYWZmBgcHB3JycgS0RKPRcOPGDTIyMkSOo42NDQYGBpSWloqAGb3Rqra2Fnd3d/Ly8picnMTX15eS\nkhIxkmtoaGD16tWo1WouXbpET08Pa9asQS6XY2BgQFtbG1VVVeh0OuLi4oRkNjExkZqaGmJjY6mv\nr8fMzIyRkRHi4uK4d+8eu3btwtTUFCcnJ7y9vYXteNGiRWJyceXKFezt7fHy8kKpVOLn54e/vz/N\nzc188sknhIWFsW/fPtRqNRMTE+zfv5/Z2Vk6OzuZmZnhn//8J2FhYaSkpIhpglQqRSqViqnG66+/\nzq9+9Stu3LjBv/3bv7Fq1SoqKiqIjIwkICCAvXv30t3dTW1tLRs3bkSn0/HPf/6TuLg4UXLpKU7V\n1dWChgQL12YTExORKVpcXIxKpWLXrl2YmJjQ2tpKdXU1c3NzODs78/XXX6NSqUhNTeXIkSP4+/uz\nceNG4bTVz+cvX75MQkICPT09wqvR29tLRESEwNHpR8PBwcECAnz69GnGx8fx8PDg2rVrIrVaH65i\nbGxMdXU1t27dYt26dczNzZGRkcH4+Dj29vaEhYWJPsbt27fZv38/UqkUFxcXZDIZQ0NDODs7o9Pp\nRHAMLBi4ent7xaGjUCiwsLAQXNDx8XGWLFkiDpbS0lICAwPFxE4vfdeLyPSCvu+1Fv/15fyvP8bG\nxvT394tTWa9b//cOMzs7OwH70Gg0tLe3MzExQX9/PydPnsTPz4+pqSnm5uZEys7x48dxcHBgcnKS\nRYsW0dfXJ5phbm5uorY7ffo0np6eLFmyBBcXFxYvXkxTUxONjY0sWrSIL7/8kpqaGlH7b9q0SWgm\noqOjMTMzw8bGBjc3N1588UUxFq2urkYul7N27VoUCgVqtVo0hLy8vAT8VS+2MjMzY2JigqqqKjw9\nPXF2diY4OJjBwUHxdZqbm6mrq+POnTu4urqydu1aZDIZMzMzFBYWEh8fT1ZWFqWlpZibm5OWlsZr\nr73Grl27eOyxx/D39+fYsWOUl5fT2NgoaELz8/NIJBLMzc25fPkyr776KsPDwwLgOjg4SHBwMGVl\nZbS2tuLv74+HhwfR0dEUFBTQ09PD/fv3CQ4OFgImfc9AH5za2NiI8rsAno6ODmZnZ5HL5Tx8+BCF\nQgEsgGD0NytLS0u0Wq1o+i5btoxnn30WY2NjNmzYIMRmQ0NDVFZWUlFRgbm5OcbGxpibm2NlZSWS\nwicmJrCysuKbb77BwcFBlFbr1q0jKioKlUpFc3MzUqlUnNyNjY2kpKSQlJSERCIReL6goCAcHR2B\nhZ5CcnIywcHBXLhwgdnZWbZv3463tzdOTk5oNBqWLFkiRsJeXl4YGxszMDBAQEAAk5OTlJWVMTIy\nIrgQsCD3Hhsbo6Kigq6uLmHZ7u7upqenh8HBQfE6JyeHmpoacnJyxK1bv46MjY2RSqUisu/7PD+Y\nTUEvsBgbGyMvLw+dTsfTTz9NcnKy+MB0d3dz48YNLl26hEqlIj4+ngcPHggX2NjYGM7OzkLa6unp\nKTYItVrN6Ogov//973n48CFyuRwTExPMzMwEqVhPUXJzc2PXrl1ERUXR2toqdPj6D8K1a9fw9/dn\ndnYWDw8PkdKUmprKzMwMiYmJeHt7s2HDBsbHx2lpaRFd55mZGdGks7Gx4f79+6JbPjQ0hEajISsr\ni8nJSXJycqiurhY9AJVKhVQqxdnZmZGREXJzc8nOzkar1dLQ0CAaaLGxsWzevFmkbO3Zs4eGhgaS\nk5OFACkhIQGZTEZYWBgymYzBwUE0Gg02NjY4OjryxBNPEBkZiampqTgV79y5g4GBAX/+85/5/PPP\nqaurw9zcnP7+ftHs6+zspKqqisHBQYyNjXF1dSU9PZ3Vq1ezY8cOwd60srLC1NRULET9YtAnVysU\nCszNzfnDH/7As88+S21trTj51Go1VVVV7N+/nzfeeIPU1FQBLT106JBgMbS1tQlznIWFBQUFBSxa\ntIjg4GAyMjIExn1kZISpqSkGBwf56quvKC4u5uOPP+bevXsUFxdz6NAhjh07hkqlYmJigi+//FKI\njPR6gZCQEN5++218fHwoKSnh66+/prOzk9raWkpLSwVL4uHDh5ibm1NYWEh6ejpjY2NIpVJu3bpF\nTEyMKFErKipE6pWFhYUQlrm4uODg4ICFhQWWlpYUFBSIm4ye05CXl0dpaSmfffYZQ0ND+Pj4/F+5\nJH8Q5YO5ubn4AQUFBREaGsqRI0fEdVEikTA0NCRCS9va2sjLy+PgwYNYWlrS3NyMUqkUUew7d+7k\n008/pa2tjR//+McolUouXrzIw4cPWbduncggKCkpwdfXl7GxMdavX8+1a9fo7e3FyMhI1PnGxsa4\nubn9B0r07t27qa6uxsTEBAMDA+zs7IiIiEClUgmj0/9i772jqyzTtu/fTu+99957SKMEAgRCCaIg\ngooNCy4dxxnrM85YZnRZxl5eFGXUkaZOFBAIJSEkMQkJCSSBdFJ3etvp2an7+4O5z/U877fWOz7P\n9653+a7vuf4x7AAbs+/7uq/zPI/jdwwMDAj9aOnSpUxNTTExMYG/v79AUqytrXFxcREz0OTkJHBz\nh3dxceHhhx9mZGSEo0ePkpOTQ0BAAOvXr6eqqoqEhASio6NZWFhAq9Xy4IMP4u/vz6effoqLi4uM\naQcHB6mvrycoKEhyJpKSkgRoU1lZyYsvvsgzzzwjHoU//OEPdHV1STiKkhK1cuVKTp06haurq4x2\nbW1tOXHihLz/sWPHOHnyJL6+vqSlpfHFF18QFRVFamqqBKooAFYFo5+dnc0TTzwB3Iz6U5qFylSk\ntbVVTlrvvvsuoaGh2NvbU1NTg729PQkJCVRXV4sQ6uOPP/4PNnS4aSm2tLRk3bp1XL58WaLqOzo6\n2Lx5Mw8//LD0iOLi4li7di2FhYVs3rwZLy8vRkZGMDQ0ZGZmhosXL3LnnXdSVlZGUlISly9f5qGH\nHqKyspKTJ09y6NAh8vPzxaAVFhaGSqVCT0+P1atXk52dLQI3Dw8Pmpubeeutt5ienpZN0cTEhMHB\nQaE0qVQqbG1taW1tpbe3l2XLlgngp62tjaKiIh5//HEaGhrk7xwfHycrK0v4H790/SpOCjMzM7S2\nttLS0oKvry8jIyNs376dvLw8MY0o/L/q6mrKy8sZGhqiq6sLPT09Ojs7ycvLE+ipYgjZsGEDtbW1\nlJSUYGdnJ0BUExMTTp8+TWhoKAsLC8I3sLCwwM7Ojvb2do4fP46BgQHu7u7k5uaiVqslHEQJ6BgY\nGGBhYUFmzVeuXKGwsJDGxkYmJycxMzNj27ZtmJub09jYCNzkRipJymVlZTQ2NsoJxN7eHhcXFxk5\n9fX1CesfkH+fg4MDAwMDDA8PS5f7zJkzmJubs3TpUhnPKicYZbqiNPEcHBywtbUlIiICb29vKisr\nmZmZEdNUUVERly9fZmxsjGeeeUY2OgsLC8LCwmhoaMDFxYX169dLqvN3330nmRHKFEnpU4SEhFBf\nXy+1eE5ODs3NzfT397NmzRpGR0cpKCgAblqLe3t7yc/Pp6WlBQcHB9RqNZOTk3h7e/Poo4/i5+eH\njY0NpqamZGdn09vbS2BgoGyyXl5ezM3NiTwbbqL5Ll68iIGBAXZ2diQkJBAREUFLSwvj4+O0t7fT\n3NxMQ0MDn332GVeuXJHchczMTD755BP09fVxcHDg6aefFv6DYo4rKytjcHCQJ554gvr6egYHB3nz\nzTfp7u6mtLRU4gorKioICQmRTdHU1JSUlBTq6uo4ePAgRUVFANIHiIqKYnR0lJUrVwr0V19fH51O\nJ83inJwcoW2tWbOGsLAwrK2t0Wg0QutW/r2/ZP0qNoXJyUl8fX2Ji4vj8OHDmJqaMjExgbW1NZcu\nXWJmZgatVktISAheXl7C4DMzM6O4uJiKigoeeughuru7KS8vJykpCUBSiNetWyf+89DQUAIDA4mP\njxfE9vXr16mtrWXdunXijPP398fY2Ji+vj4yMjL4/vvvRZ/f2tpKf3+/1J/r16+ntbVVpiBKc8/G\nxgZzc3N6enrw9vYmPT1dGmReXl5s2LABX19fabJptVrCw8Oxt7cXPmRTUxPR0dHEx8fLkXvdunXY\n2dkxNzdHQ0MD+fn57NixQ568KpUKY2NjOjs72bBhg4xKL126hL6+Pjk5ORQXF1NZWYmxsTGmpqbE\nxcXx+uuvyxhQuTiVzSE7O5ucnBw0Gg3d3d1YWVlx7tw5qqqq+OSTT+TksXLlSt5++218fX15+umn\nJU5NSbU+evQo5ubmFBcXs3//ftRqNcuXLyc0NFQ+s6VLl6LVaiWPcX5+XrI9u7u7UavVFBYWUlhY\nyMTEBA0NDVhaWkpkXGhoqIB4lLIkLi6OlStXUlZWJjDampoagoKCUKvVVFRUYG5uzo0bNzh+/DjV\n1dWiVWlpaUGj0bCwsEBfXx8DAwOsWrUKuOm+jI+Px9zcXFK5ZmdnxWOir69PVVUV0dHRfPbZZxgZ\nGWFnZycUayXJ6b333pMpENw82Sgbqo2NDcPDw1RVVTE8PAwgAbnr16/n2Wef5a677mJ2dlZ8HUND\nQ1haWjI+Pk5ISIgQqH7J+lWUDwqKfGFhgaioKC5dukRqairOzs5MTk7y1VdfER0dzcjIiHDqzpw5\nQ3l5OV1dXXR0dDA2NsaePXsoLi6Wuszc3FxYC2NjY9TV1bFmzRry8/M5dOgQDzzwALfddhtvvPEG\nQ0NDtLe34+vri52dHebm5sJorKioYNeuXfI0S0hIoKamBjc3N+rr6wkLC8PExIQzZ87w3HPPMTEx\nIXVqT08PExMTrF27VsjCu3btoqamhp6eHrq6uiQhSsmYmJubw8PDAycnJ86fPy+ZES0tLWRlZTE0\nNISXl5dEzX/yySfcuHGDRx55BBsbGwYGBjAxMWHTpk1oNBrq6uoYGhriypUruLm58cknn9DT00Nx\ncTHr1q3D0dGR7u5uIWf/8Y9/ZG5ujubmZjIzM/nwww+Zmpri5ZdfZsWKFURHR1NTU8Pq1av56KOP\nuOOOO6iqqqKzs1MyDaanp/nzn/+Mvb09i4uLoqtYvXo1u3bt4vr16yxZsgQrKysWFhbkZxscHMzx\n48f53e9+x9TUlNiN1Wo1u3fvZmJigoyMDNLS0hgcHMTBwYHOzk7ee+89VCoVSUlJWFpaUlNTQ3R0\ntAjZDA0NGR8fZ9euXfz888+8/PLLPProo0xOTtLS0sKyZcvw9/cnMjKSvr4+UlNTWVxcxM7ODq1W\nK/2akpISgoODJWRm27ZtXLt2DQsLCzZs2MDi4qKUg8bGxjJ5UAxb09PTFBcXMzMzw8TEhOSUvPLK\nK9TV1aHYDiYmJrCxsaGgoIBz587R0dGBoaEh3t7elJSUSBp7aGgoQUFBcoJ45513iI0DNwrdAAAg\nAElEQVSNFfxdYGAgKpXqP3VS+FVsCmNjYzQ0NNDX18fi4iKvvfYaBgYGLFmyBBcXF7RaLQYGBpw+\nfZqkpCS0Wq3Unh0dHYSEhGBrayuSWqX27+/vZ+3atRQXFzM3NycyaoXdsGbNGjw8PJiammJoaAh3\nd3fWrFlDQ0MDTk5OtLa2kpKSwvz8PGq1WnbpM2fOMDs7i4GBgTQRFfVlXFycCKp0Oh09PT34+Phw\n8OBBIiIiqK2txcXFRWq+2NhYmWoo8M2mpiZSUlKYmZnBzc1NAmKUrIWpqSk5jvf19bFz504B3yrN\nwCVLllBQUEBzczMrV67k+vXrxMTE0NnZybvvvoufnx8BAQG4uroyPDzMwMCANLVGR0cxMDAgKiqK\nvLw80tPTxRp99epVvv76a9LS0picnOTBBx9kfHycjRs3Socdbl7UVlZWFBYWSoCNAqB96623BNgK\nSGQaIBF5OTk5REZGyk3g6urK5cuXuf/++wkNDcXAwEBUl5GRkcDNZrJCQfb29qapqUlKs5qaGoHX\nuLq64uHhQWRkJGFhYXh7e0uexaOPPkpSUpKIkPLy8oiOjmbVqlUcPHhQXLdKWVJeXs7o6ChLly4F\nbj7YrK2tBa8eFBQkjlwPDw9iY2P5/vvv0Wg0uLm5ce3aNZYtW8bFixdxd3dnZGQEQE6ZarWaZcuW\nMTk5iZOTExUVFTg6OtLX14eHhwdeXl7o6elx5coVtm7dSmVlpYyD165dS3JyMoWFhYIo/CXrV1E+\nGBsbk5CQQExMDFqtllWrVuHt7Y2BgQH9/f14enrS0dEhmQO2trY4OzszMzNDR0eHpB8rLrKDBw8C\nkJ6eLuOtmJgYFhYW6OjooKuri4SEBObm5qisrGTbtm3s2LEDT09PYmNj2b59uxByv/nmGxH1ZGVl\nARAZGSlxcbfccovIi7dt28bw8DBWVlacPXsWJycnEhMT0Wq12Nra0tzczOjoKG1tbczPzxMZGUlN\nTY3Mxy0sLDA0NOS+++4TBeelS5eYn5+Xfoqfnx+Wlpbs2rVLYt0UUZShoSG+vr4MDg5iYGDA+Pg4\nd999NwUFBRJhb2trS2FhoXApz58/L4j2tLQ0brvtNsGOrVu3DlNTU8GBrV27lpGREV599VVMTEyI\njo7m7NmzVFZW4uTkJAE57e3tGBkZ4eLiwrp16ySarru7m7i4OExNTWViMjY2xsTEhHAqlTq4sbER\nJycn0tPTCQ8PZ3R0lDvvvJPk5GSmpqakKWtvb4+xsTGOjo60t7czOztLbm4uU1NTNDU18fnnn8s1\nNjo6SmNjIw0NDSQnJ2NiYkJKSgr29vZUVlYyMTFBYGCgpETX1tYK20PxX/z+97/n0KFDguYbGRnB\n2NiYGzduYGZmxpEjR3j55Ze5fv06KpVKSrTKykq8vLyYnp7G19dX3K7KOLmsrIz6+np58HR3dzM9\nPS2lhoeHBy0tLczOzsqoVqVSUVhYSH5+vtDGHnvsMXbv3o2ZmRkODg4Spad8/5esX8VJQV9fH2Nj\nYy5duoSPjw/t7e1kZ2dL7aeYhDIzM8nPz2d0dJTU1FTKy8slBHRoaIioqCicnJyk9v/666+57bbb\n2L17N3p6eqhUKi5cuMCrr77K5OQkpqamHD16FD09PS5dukR3dzcxMTFyBLS0tCQ1NZX8/HwaGhrY\nt28fGzdupLGxUTDoQ0NDpKWl4eDgQH5+PhMTE0L5cXR0JDw8nMTERNnB5+fnpVH31VdfkZ6eLkBY\nZRzZ0NBAbGysBL0cPnyY2dlZ6WKvX78eHx8fzpw5I9bqgoICIiMjWbJkifw/+Pj48Pzzz/Pcc88x\nODhIbW0ty5YtY+/evejp6dHS0kJ1dTXDw8OMjIywYsUK8vLyaGpqEoXca6+9JgnYislKEQ8VFBQI\nRn1hYYGUlBSeeuopOjo6CA4OZtOmTcTFxWFvby+mnwsXLvDmm2+SlJSEhYUFd911F4uLiyLgcnd3\nFwXnwsKC/NwiIiLIyMggOzubZ599ljvvvJOAgAAppSorKwkKCqK5uZnu7m4MDAxISkoiPj6eP/3p\nT9ja2kq6lrW1tXAoX3jhBcbGxkTYZGxsTFJSkmRCrF27FisrK9566y2xo09OTspx3MLCAgcHBxYX\nFzly5AgRERESe3f8+HEyMjIoKipCp9NhZWUlrJDTp0+zfft24uLiiI6OxtTUlKGhIWJjYwHYsmUL\nw8PDNDU10d3dzZ133sl9991HSUkJRUVFJCUliS9j69atfPLJJxJeo6enR3BwMKmpqSKT/z/JaPzf\nspSmmKGhoRyPlCeBMopRegKKTHRubo6IiAjWrFlDXV0dW7ZsQavV4uDgwM6dOwGEBXD06FHRBiga\nc0NDQ4qLi0X6mp6ejo+PjyjplPyHw4cPC0hFqRWVerO0tBRLS0v09PQ4d+4cOp2O3Nxcrly5IvqH\njo4O0dFHRUWJr8LIyIitW7ei0WgEkd7W1saePXvkuKmnp0d3dzehoaGkp6fj7e2Ni4sL8/PzdHd3\ns3LlSvbs2SPuzJ6eHhobG9HT08PExITi4mKee+45CgsLmZ2dJS4ujri4ODIyMiQMNzk5mfj4eFpa\nWigpKWHZsmUCC/X39+eZZ57htddeE2OUq6sr1dXVGBgYkJycTGRkJFFRUYSGhlJeXs7g4CChoaF4\ne3sTHR1NaWkpx44d+w9BtAoYx9nZWX7+yjFfUd4pOQsKzdvf3x8PDw9cXFxITU3FxMQErVZLUFAQ\nXV1deHp64uXlRXp6OgEBAWRkZGBoaChZHUr8oDLBKSkpYfv27UxOTrJ9+3Z27txJWFgYrq6uVFVV\nUVxczOXLl6Whu2HDBl555RUSEhJISEgQjkNHRwfe3t7CpLxw4QIBAQFcvnyZzZs34+/vT1BQECkp\nKYyMjNDQ0IBOpyMtLY3Lly8zOzsrMQQeHh6cPHkSgNOnT9Pf38+lS5dk1Hrs2DGys7MZGxvD0dER\nd3d37rnnHvz9/UlLS2N2dlYSpdavX09TUxNBQUFYW1tTVVX1i+/HX8WmMDo6ytmzZ/H29mbLli2M\njY2xadMmeRJbWFhQWFhIdnY2SUlJrFu3jqioKLKzszE3N8fb21vcYTdu3BBdure3NzY2NsTExBAR\nEUF/fz+mpqZ0d3czNjYmfYAXXngBBwcHtmzZgr6+vsSjK0Ef8/Pz0jQE6O3t5dq1a8I0PHXqFJWV\nlZSVlbFixQoyMjJ4+OGHiYmJITY2Fn9/f+rr6ykvL+fs2bNYWFgwMjIi+YhKwKyhoSGTk5NUV1fj\n6+uLlZUVlpaWUuZkZWXx/fffC5VKmTQ8+OCDvPLKKwQFBeHo6CgXrb6+Pnp6eqxatYra2lpWrlzJ\n+fPnycnJwcHBgYWFBaampti/fz+xsbEsXboUtVpNfX09KpWKK1euSId806ZNDA4OcujQIaKjo9Hp\ndNTV1aHVaiVQJiUlhdnZWaampqioqMDU1JRly5ahp6eHVqulrq4OJycnYmNjeemll0R3ceutt0q6\n9qeffio+krq6Ourq6oRrqZwg161bR3BwsHweGzduFNp2QECAHLUzMjIkeUmB23h7ewv2PyUlhe3b\nt8upTpn6KHkJSnqT4jFQcitWrVol05KxsTFOnDjB9PS0bKpGRkYCn1Way0pGquKQdXFxkch4rVZL\nb2+v4NsByQZxc3NDo9GQnZ0txKaAgABx3zY1NTE9Pc3IyAhLly5FX18fMzMzPD09SUxMRF9fn5GR\nEflZ/ZL1qygfLCws2Lp1K7Ozs2RlZUn+nb6+Po899hizs7PY2toSGBgoNuTCwkIJI1VknUpkmWKp\nnpiYoKurCycnJ6qqqjAxMSE4OJjt27djamrKDz/8QEpKCqdOncLAwIDExER8fX0loEPRw1+7dk3S\nh+AmPm7FihU0NTURHBxMQUEBa9asoba2lqysLHx9fSXzQQn+HBgYwMnJSTgDzc3NxMTE/AfM29zc\nHBMTEzg7OzM2Noa1tTVLly7l8uXLzM/PExQUJGPN3NxcXnnlFXp6ehgYGCAuLo6xsTEMDQ3p6OhA\nT08PS0tLLly4gJ2dHefPn8ff35/u7m6WLFmCoaEhenp6vPnmm4yOjpKUlMSnn36KoaEhe/fupaqq\nSgQ2169fZ2RkhOTkZFE8hoSEUFVVhZ+fH35+fri7u/POO+/g7u4uU4/a2lr09fUpLi7mnnvukbxI\nR0dHNm3axNTUFBcvXsTf319AqMbGxiwuLjI9PU1eXh4PPfQQzc3NHDp0CBsbG1HnKZ4JCwsLiouL\niYiIICwsjN7eXiYmJti4cSPvv/++cD2np6fp7Oyku7ube++9l5GRET799FNiYmIoLCzkm2++kXzO\nwcFBLl26hJ2dHWfOnOHuu++msLAQS0tLvvzyS2ZnZyUdOigoiIGBAcGnubi4YGZmRlFREX//+9/5\n4osvaG1txcTEhM8++wxTU1MyMjKwtrbm8OHDpKen09PTg52dHSqVSvQs27ZtE8p0TEwM77//vgQX\nz8zMYGNjQ2lpKbOzs9I8nZubEyqVonS0t7cnNDT0/xy49X/X0mq1DA4OYm9vL5ReJbNgbm6O/v5+\n5ufnJbpcye2Li4uT8JipqSmCg4MlbBZu4rcdHR1xdnYmKiqK+vp6fvjhB3Jzc7GxseG+++4jODiY\nzMxM7rvvPtzc3Dhy5AhWVlaiT5+bm6Ovr4/ly5fL6NTU1FQCQaurq0lLSxO1ZHBwMEFBQSwuLlJe\nXs7i4iK9vb1otVopkSorK/Hz85PMSGWzUUjQaWlpXLx4USYNioTXwMBAOAh79uxBq9XS19cnGZq2\ntrZ4enpiYWGBmZkZKSkpZGZmUlZWho+PDxqNRurlEydOSLpSdHQ0Xl5eXLlyRTYyb29vgoODMTQ0\nJDw8XFD5tbW14r9oaWmhtbUVAwMDcnNzueeee0hNTRVIioODg4To5ubmYmFhIWq8U6dO8d133zE5\nOUl7e7uUUAqBOzg4GHd3dxoaGiQ8eHR0VOTPwcHBssGvWbOGhIQEOeEkJCTQ29tLWFiYPCBsbW0x\nMDAQnYBiVlqyZAlhYWGYmZlha2vL5OQkdXV1goTPzMzE29tbkHELCwuYmJiwZ88e+Xs9PDz49ttv\niYyM5OLFi/IQeeSRRwgNDcXX11ecs3fffTfz8/OcPHkSY2NjnJ2dKSsro7+/n8HBQdLT04Gb04fq\n6moZryrN8M2bNzM5OSlogbCwMDIzMyXmb//+/SwuLtLW1oZWq5XrsKKi4hffj7+Kk4JWq6Wrq0ss\nqDY2NtTV1aGnp0dRUREJCQls3LhRjDhNTU14eHhIo0mZB+vp6VFbW8ujjz7Khx9+yNDQEIGBgWi1\nWrq7u9HpdGRlZVFTU0NZWRlbtmyhvr4ejUbDqVOnOHLkCG+88YZIe+Hm0/v+++8nJydHzDU///wz\nAwMDBAQEMDo6Kk9q5eJRqVRSw9vY2GBjY4OPjw/9/f1i7FKoQz/99JMcGXt7e/Hy8qK5uVmOx35+\nfpIr0d3dzfDwsPjoCwoKsLa2lojznp4erK2t5ZSglCo+Pj6S4aAwENauXYufnx8ffPABjo6O3Hbb\nbQwNDTEwMEBUVBRTU1OcOHGCP/3pT7zyyisCjVV6GoaGhixfvlxQ43feeSf+/v7o6+tz8eJFbty4\nQVVVFebm5ujp6WFhYSGmN+VprvQlzp8/L/6Wqqoqenp68PX1JTAwkKGhIZ5//nnKy8tpb2+noqKC\ngYEBRkZGePbZZ9FoNCJkS0xM5OOPPxaacV1dnTTYxsbGSExMZHh4GC8vL0xNTdFoNPzwww9ER0fT\n2NhIS0sLKpWKtLQ0pqamcHd3l+vN0dGRpqYmfH190Wq1stl0dXWh1Wp58cUXKS0tZevWrfzwww/s\n2bNHyErl5eVcvXqV9evX4+XlJYDejIwMenp6+M1vfsP8/DyBgYGSDu3u7k55eTkjIyPs2LEDZ2dn\nDAwMJCbgm2++wc/PT6hLDg4OMsVRRtJjY2N4eXlx/fr1/xSj8VexKSijOKVJ09nZyZo1azh37hxu\nbm5MTk4yNjbGihUrGBoaAiAzM5OKigoBkujp6TE8PExYWBhffPEFAGlpaf/h9xsYGODj44O1tTVT\nU1N0d3djYWFBUVERw8PD7N69Wy4+Nzc3MjIyOHPmjITUKBFk1tbWqFQqUlNTOXHiBKGhoVy9elUQ\n6NevX5dTxD333MPk5CSxsbFMTEwwMjIiuvsnnniC/Px80edbW1tTVFSEnp4e27Zto7q6WuTDpqam\nzM3NUVRUJFqA8PBwXn/9dWJjY/Hz82PLli3CitDX18fKykp8C62treLhUJ7Q1tbWREREsLi4KDkI\nycnJdHZ2Cp/i7NmzGBoaSmK0EjWfmppKZmYm3333nUTcvfbaa6SnpxMaGoqFhQXl5eVkZGQQGxtL\nTU0NxsbGTE5OyqZuampKfn4+pqamZGZmAhASEkJgYCA6nQ4/Pz8MDQ05e/YsQ0NDZGRkUFVVRWtr\nK0uXLhVVZXBwMLm5uTg5OeHr64uhoSFDQ0NMT0/L5u7t7S0TLsWbEBUVhb+/P//4xz9wcXEhNDQU\nc3NzKXsyMjK4cuWKgHzr6+vZuXMn+fn58uTVaDSoVCo++OADzMzMuHDhAo8++ij+/v709PRQW1vL\n6OioXCPV1dXMzc2RlJTEjRs36OrqYvfu3Rw5coSBgQHS09N56623ZMM3MzOjsLCQ++67T3wOZ86c\nITs7mxdeeEF6Lkrid2hoKD09PQwODjI0NMTs7Czh4eGo1epffD/+KshL+/fvf1nh+js4OODu7i5c\n//HxcQwMDDAxMUGtVnPixAmWLFlCXV0d4+PjBAQEMDY2RnNzM2FhYRJ8UlFRIZ13BwcHvL29aWxs\n5Le//S1OTk6CU7O0tCQsLEx6CO7u7mRmZlJQUCDBHefPn8fX1xedTselS5fYunUro6OjtLS0SPyc\nRqMhOjpaRoGKMGdmZkYafhMTE/T29uLn54eJiYkEuVRUVFBcXCwiFZ1OR2lpKVFRUWIP19PTw9zc\nnICAAPT19fHy8sLS0pLo6GgpIQICArC2tqatrU1Gjmq1mpaWFmpqaoT1sH37dmJiYhgZGUFPT4+4\nuDiGh4dlQjAzM0NNTQ0BAQF8+eWXjI2NSUMsLi5O6FFKXL2BgQEajUbyD/T09Ljrrrvw8vISVFxr\nayuXLl3innvuob6+nvn5eUmynpycZH5+nkOHDrF8+XIxpCmwFktLS0Gvt7W1sWrVKuzt7QkODhb2\nZVxcHH19fdTX12NmZkZlZSXx8fEMDAwICn12dlYMShEREejr69Pb2yvMzsHBQfFMKJmkaWlpHDt2\njIiICLmRL168SEREBJcuXeLxxx8nPDxcSpauri7CwsLIyclBX19fiEtWVlZ4enoKrdvMzExk6zqd\nDp1OJ9b4vLw8du7ciZmZGTY2NoyPj1NUVERtba0QwH/3u98Js3Fubg4DAwNu3LjBihUrmJ+fl4jB\noaEhAa9cuHDhF5GX/pvm/N/rv9f/f9b/PTRnJZosLi6OkpISNm7cyIkTJ7C0tBTQSVxcHMXFxXh6\neuLn58fhw4eJiIigtLSU9evX4+Hhga2tLUNDQwwNDZGZmclf//pX2tvb2bt3L5999hnT09O4ublh\nZGQkhpmBgQHc3d1Fauzm5sb4+Dh33HEHf/3rX7n77rupqqoSu+zDDz/MsWPHqK+vlz+rr69PZGQk\nVVVVGBgYUFtby5IlSygtLRUpbWFhIR0dHezZs0dyG3fs2MHhw4cl1OUvf/mLxNpv3LgRY2NjDhw4\ngJGREZGRkZw+fZrFxUUyMjK4fPky3t7eBAQEkJ+fz9q1a8VLrzQXx8bGKCgo4K677hLvvVarpbW1\nVcJYGhsb6e3txcHBAY1Gw/r16xkdHSUnJwdDQ0NKSkpYu3YtlpaWTE9PMzs7S319vRitysvLcXd3\nx9vbm9bWVi5fvsyyZcuYmJjAzs6OsLAw+vv7KSwsxMLCgjvuuIMjR46InVxJ6R4dHSU+Pp6XXnqJ\noKAgLl26hLm5OYGBgfj4+Igy0dXVVcRgpqamNDQ0EBoaSkdHh+RzmJubExoaysGDB4mMjBSfw+bN\nmzEzM5OwWYAff/wRe3t7Vq5cSWdnJx4eHsKm+P3vf09WVhYRERGYmZnR3t4uGpb29nY+/PBDnnnm\nGXJycnjppZckb3JsbAwDg5u3VnNzM4GBgZSVlREeHo6bmxtfffUVDQ0N7NmzBxMTE2xtbXF3d+eb\nb74hNTWVF198kZ9++gl7e3tycnJITk4WZa+iTrW1teXq1avY2Njg6enJjz/+KNL/2dlZqqursbe3\nJyYmhpaWFtasWSNj1H+1fhXlw7vvvvvy/v37BUut0WjQ6XSYmppSU1PD5s2bWVxcxNramoCAANrb\n2/H39yc1NRUPDw+ampoYHh6mo6OD6elpLl68SFlZmRByWltbZXSkUqno6+sTvbixsbFIYJW+gxLt\nbW1tLVFhisU3OzsbBTJramoqx/DOzk4CAwNZv349LS0tlJWV8e6773L27FkSExPJzMzk+vXrLCws\nyM137tw51Go1hoaGZGdni4VbcVKeOXMGd3d3YmJiBB9ub2/P1NSUdOl7enqYmprC1NSU8+fPs3Tp\nUo4fP84jjzwiHfi3335bkGcajYbVq1ej0+mExajg4v39/VGr1QKpXbFihQBnJyYmmJmZkXJIsfSG\nhYWJM9XS0pKgoCBxstbW1pKXl4e1tTXBwcGYmpry3XffMTMzQ3V1tagWFZt0Tk6OTDysrKxYvnw5\nr7/+OqOjo3h5eaFSqdixYwf79u2jtrYWS0tLrl27xvLly3FwcKChoQGtVsvY2JiUJBYWFuTn55OZ\nmYmZmRnvv/8+27Zto7m5WXIcJyYmGBsbIy0tjYCAAMzNzfn555+llGhvb6erqwsLCws0Go3Y5K9d\nu4aRkRGpqalMTEwIQUtpcBoYGNDd3c358+cFoKsQsMfHxwkNDcXIyAh7e3tOnDghZUxpaSmOjo44\nOTlhZGTEsWPHZIPPzMzk6tWrFBcXSzm5uLiIlZWVxBGWl5cTGxtLT08PCQkJODs789NPP5GXl/eL\nyodfxUgSEOtxeXk5Li4ujI+P09rairOzM4aGhhgYGNDW1kZtbS1dXV3ywdjZ2cmNtri4yIYNG9i9\nezcAt956K+np6RLFFhERwdzcHH5+fvT19UlHNjExkQ0bNpCfny+QTkVFZmpqyvz8PFqtVqStOp0O\nT09PTE1N6e3tRafTYW1tTX19PX/4wx+wtrbmscce44033uDee+/FwMCAl156iVtuuUXGhV988QWL\ni4vCooSbBq7z58/z97//Xd5f8V0o2HS4OWpdWFjA29sbPT09kpOTBW7b0tKClZUVe/fuBW5G3K1d\nu5aCggL27NmDvb09Go0GY2NjNm7cyPT0NDExMYSHh7Nx40ZCQ0PF4alkMejr68tNbGxszPbt22UT\nVnIbWlpauHLlivg1Dh48iJOTE0lJSUxPT4sC7ze/+Y00ReGmgWjHjh0iOHNzc0OtVuPr68tnn33G\nm2++KcARIyMjDhw4gIWFBbOzs9xyyy08/fTTrFmzBgsLC/bs2SONRT8/P3Q6HU5OTsBNhaSZmRm/\n/e1v+frrr+np6SEkJERyOmNjYyXfU6fTkZKSQkxMjGSTBgQEsHTpUmZnZxkYGBADlLLZ7t+/Hz09\nPaqqqti6dStBQUFoNBpmZ2fZsmULAQEBDA4OCoErIiJCRobNzc0sWbKEZcuWie1/+/btXLt2TXig\ny5YtY/v27RgaGuLp6cmtt95KUFAQMzMzDA0NoVarUavVbNiwgdWrVxMaGsqKFSuoqamhsrKSdevW\n/eJ78VdxUvj4449fVhJ7FbipSqVifn6e//E//gfff/89R48eJS0tDY1Gw+joKP39/dTW1mJra8tt\nt91GQEAAycnJdHd3Mzk5yYEDB0hLS+P06dM4OzvT29vL0NAQ1tbWpKSk0N/fT1hYGGq1mh9++IHV\nq1djbW1NZWUlCwsL3HbbbSJk2bJlCx999JGgxVavXk1vby8bNmwgOztboC51dXUEBgYyMzODu7s7\nvb29pKSkSGBIb28vpqam9PX1icFr+fLllJSUcPXqVV5//XVGRkZkamBsbIyrqysbNmzAwsJCEN49\nPT2UlpYKVl3R4/f09GBubi5cyMbGRmZnZ1lYWCA8PJxTp04JKdnFxYVjx44JVryyspKvv/6aiooK\n4uPjxVtx5swZXnzxRQn+dXZ2Jjc3FysrK/r7+yVwRVF9JiYmSqSdWq0WhJmBgQFmZmZYWFjw6aef\nShCOMnlREPzKSVCj0QiTwsLCghs3brBx40YcHBzw8fFh8+bNaLVaietzcHDg4MGDQsD+8MMPcXZ2\nxszMjJycHH77299SWlqKj4+PAG0PHz6MVqvl0qVLzM7OUlFRIY5NZSNT0qyVQJWRkRH09fUxNTXl\nxIkT3HvvvWi1WhITE2lsbKS9vZ3R0VER323dulUafqdPn6axsRFPT08cHR3p7e3FxMSEgIAAOQkq\nSeVK1qiHhwcrVqygrKxMAoSuXLlCcnIyRUVF2NnZMTk5KVRvMzMzNBoNjo6OHDt2jJKSEjZs2EBf\nXx/ff//9/z0nBSWn4MiRI/j5+clRMykpid///vfccccdvP3220RHR3PLLbdw++23ExQURHd3N+3t\n7fz0009MTk5y9OhRuejhJj/vgQceoLGxkXfeeYfw8HAsLS05fvy4INOUkVFjYyPz8/PiOPz222/p\n7++nsbGRzs5OnnzySUl4cnJy4pZbbiE3N5c777wTX19fGhoa8PT0ZOnSpeh0OsbHx3FwcODkyZOc\nO3eOoKAg7rzzTqKjo2loaCAoKIjw8HAOHjwo8eMzMzPcdttt3HbbbfT392Nvb893333H2bNnycvL\nY2xsjFWrVmFnZ0dubi6Tk5N88MEHlJaWSk2pCIeuXLnC6Ogotra2+Pj4cOPGDX/fRJ0AACAASURB\nVCYmJqipqSErK4uXXnqJU6dO8f7772Nubk5WVhZarRaVSoWTk5PUp1u3bmVgYIDY2FjxdJiYmLC4\nuMiePXs4f/68BPWmp6fj6OjIqlWrWLlyJZs3byYoKAhTU1OMjY1lk52bmxPasAIvUYRhAQEBPPjg\ng/T29mJra8uFCxeYnJxkamoKuHmSSEhIQKPR8NNPP5Gdnc2XX37Jjz/+yLp161iyZAlFRUWEh4dz\n9uxZQceXl5fT1NREc3Oz9CAGBgZYXFwkMzOTmpoaVCqVaBhsbW357rvvmJiYQK1W09nZSXV1tUw5\nFE5DeXm5eFQAKXudnJzIy8vj2rVrQsDauXMnd999NzMzM/T395Oeno5arebkyZPo6+vT0NDA/Pw8\nABkZGQQEBMhmtHfvXvr6+khPT2fLli0YGRkJ5l9xGZubm9PU1ISRkRFZWVmsXr0aExMTiU34petX\nsSno6+uTn5/PLbfcQllZGWq1WlSDg4OD3HrrrYSEhGBpacmHH35IRUUFP/30EytXriQzM5Pc3Fyx\ns/b29gqPbtWqVRQUFBATE8P58+fFvz4/P090dDTGxsZs2LCB6elpOjo6pCmWnJxMa2sr165dk+CP\nmpoaSfUdHx/nyy+/xNHRUaK4FLzZ4cOHJag1KChIvu/q6iqcPuUiP378OC4uLqKPn5+fp6ioSGLY\nampq2LVrF0NDQ7S2toqMVgmKHRsb49/+7d8wMTHhueeekzHdwsICLi4u2NjY8PPPP1NdXU1iYiL3\n3XcfUVFRJCcnExMTQ2pqKk8//TSrV6/mwQcfxNTUlKtXr2JoaEh+fj779+/nb3/7G4aGhuzbtw97\ne3sKCgro6enBy8uLQ4cOScT74cOHRRo8Pj5OX18fJ06cYHFxkUuXLtHS0kJFRYUASTs6Oli/fj1W\nVlZCm4KbpKfnn3+eW2+9lY6ODszNzSkoKOD777/nqaeeYnh4mI8//piffvqJhIQEVq5cyVdffcWR\nI0dYWFjgxo0bdHd3Y2RkRGxsrEirFfvx4uIiDQ0NNDU1kZSUxPLlyxkZGZGv/f39OX78ODdu3MDW\n1paysjJ0Oh3GxsZ0dHTQ09ODs7Mz+fn5AIIDhJsly549e/D09GRychJXV1eMjY3x8PAgPT2ddevW\n4e/vT0hICNPT01JKKInXCsgWEGJzenq60LlWrVpFYWGhlDSKQ1PB7ClGqcXFRebn57l48SJJSUms\nXLlSFKO/ZP3L6YNKpfobsBno1+l0Ef98zQ74FvAB2oAdOp1O88/v/RuwB1gAntDpdGf/1XsYGhry\n5JNPkpOTw/T0tPAAUlJSSEtLQ6vV8tFHH9HQ0MCqVavIz88nJiYGR0dHpqameOqpp3jyySfZsmUL\ni4uL3LhxA7gZE65cBJGRkRJPHh8fzx//+EfWrFnDihUreO+99yQubm5ujr/97W/iuqupqaG/v192\nZ7jpddfT02NwcJAdO3awf/9+6urqmJmZ4fbbbyckJAQXFxdef/11nnzySdRqtYhKOjs7eeSRRzA2\nNmZ+fp6YmBjxbii8g+npaYyNjUlNTeUf//gH9vb2pKWlYWVlJfyIffv2Sb+ira0NS0tLaUwtW7aM\nDz74gAceeICOjg6WLFkiPoLp6WmWLFnC1NQUc3NzrFmzhpKSEr777juioqKIiYnh5MmTlJaWEh8f\nz9KlS/nzn//M5s2bcXBwELGMkoClr6/P4uIinZ2dVFRUEBYWRmNjI8ePH+eBBx7A0NCQF154AUtL\nSwHMmJmZoVKpRNmoAHrhprw3NTWV9vZ27r77bgwMDKiurhZH5PDwMCUlJbS3tzMwMMBzzz1HZmYm\n33//Pb29vUxNTREXF0d+fj7Nzc0i9PLz85NyxsbGhsnJSZE8d3d34+npyeDgIA0NDZSXl+Pt7U1M\nTAw9PT189tlnVFRUsGnTJhGpvfrqq8BNH4dWq6WgoIDY2FgaGxtxdnZmYGAAV1dXXFxciImJ4fDh\nw9x///2kpaXh7++PlZUVn3/+Oa6urlhaWhIXF0dFRYXoW/z8/JiamqKsrIw777yTrq4uVCoV4eHh\nIkrq6uoSO7aXl5fkV3R1dREfH09dXR1paWkMDAwIxOaXrF8ykvwK+Bj4+7977XkgV6fTvaFSqZ7/\n56+fU6lUYcBOIBxwA3JUKlWQTqdb+F+9gdJIrKurY8OGDURERHDjxg18fHxQq9W89tprREZGsnPn\nTkxNTblx4wZxcXFcvHiRFStWSJPK0NAQW1tbyY9QACrT09PU1tZyxx134OrqSkVFBRs2bMDa2pof\nf/yRyMhI/Pz8sLCwEJ7d1atXcXV1FVagRqOR91GpVNKkfPXVV6XGtbS0lFKisrKStLQ0Wlpa8Pf3\nx8vLS3oLw8PD2NraYm1tjZGREZWVlQByvM7KysLU1JSSkhLBxZeWljI0NMSuXbtobGyko6ODAwcO\nUFFRQWRkJL29vQQEBPD2229jZmbG4uIifX19+Pv7s2bNGpYsWUJzczMmJia4uroyOjpKWloac3Nz\nmJqasmPHDpKSklCr1bz11lsSXKs4GEdHR9FoNAQHB9Pd3U10dLREnUdERPDHP/5RzFlmZmZER0eT\nl5cnJKfbb78dDw8P1Go12dnZ3H///bi4uFBRUcHU1JTARcLCwjA2Nmbbtm2o1WouX77M+Pg4np6e\nuLi4UFlZSWBgIMnJyTz88MPU1dXh4eGBv78/r7zyComJiSQkJGBpacnGjRsFn7927VqKiooYGxtj\ncHBQOJDu7u44ODigr6+Pi4sL9fX1lJSUYGRkxKZNm4RwNDU1RX9/P0FBQezdu1eAOy0tLezatQtb\nW1vKy8sJCQkhKyuLzs5OUlJScHNzo6ysjKGhIZKTkyUtKyQkBG9vb7Kzs7n99tt58cUXefLJJwWn\n39PTI2XIhQsXmJubkwAZZ2dn9PX1mZubw9zcXAyDKpWK6elpDAwMqK+vF/6Enp6ejEh/yfqX5YNO\npysAhv+nl28Bvv7n118DW//d60d1Ot2MTqdrBW4Aif/qPcbHx6mvr2dmZoa5uTkxvihpQsnJyTz+\n+OOMjY3h5uYmpiMlxMPBwQEvLy9pnP3888/ATaeZQtN1c3Ojq6sLd3d3HnvsMebm5iR/IDo6mrm5\nOcbHx7G0tBSm/sTEBNevX2d0dJTKyko8PT2Bm7X/xo0bJVPRxMSE/Px8srKy6OjoID4+nvb2dszN\nzYmIiBAsW19fH/Pz85KdGBkZKcQkuHlDtLW1cfHiRUpLSwXf5ebmRnx8PCEhIbS1tTE1NUVfXx9u\nbm4S2rKwsMCVK1dYv349165dY9euXWLP/uijj7h69SoajUZo1REREZibm5Obm8v58+dZs2YNvb29\n9Pf389RTT7FixQoyMzM5ePAgmzdvprW1leHhYSYmJggICKCuro6Wlha8vb25du2akJEU7kF6erpA\nYJ2dnUXSXFdXh6mpqZwaOjs7xd4MN0NQampqKCoqwszMjPHxcc6ePcuaNWswMjKSz2vZsmVYWFjg\n5eXFtm3b8PT0xN7enoqKCoKDg+VUo4jzFOl3ZGQkkZGRGBkZUVtbi6GhoQSodHd38/PPPxMWFoan\npydXrlyRUd/U1JQQtHp6euTUuGTJEo4fP05jYyMqlYozZ84IOCUyMpLjx4+Tl5eHr68vbW1tnDx5\nkvHxcZKTk1m9ejVLlizh1KlThISEUFhYKKfcrq4uTExMcHR0pKqqSpyqSlmgr6/PxMQEFRUVlJSU\noFarGR0d5dKlS9TW1kqWxNDQEKtWrRKm5C9Z/9WegrNOp1M6F72A8z+/dgf+vci685+v/S/X7Ows\niYmJ3HrrrdTX13P48GECAwPp7e3l+vXr4iT75JNPOHDgAF9++SVffPEFWVlZZGdn093dzZNPPomd\nnR3Hjh0TU4larcbOzk7w2Ep+ZE5ODkFBQTz44IPs3r0bQ0ND9PX1SUxMZNOmTdxzzz1UVlZiY2PD\nXXfdJTN3ZcoQFBTE2NiYxM+5u7uzcuVKdu/eTWBgIFevXmViYoLDhw/z6KOPcvnyZS5cuMDRo0ep\nrq6WJqKtra2kbcNNM1B8fDx/+MMfuPvuuwVI+/3333Pu3Dn09PTkg6+rq+Pdd9+lqKiI6upqHn/8\ncczMzBgZGWHLli38/e9/p7S0lL6+PoKDg3nxxRfR6XR4eHjg6uoqISkAERERNDQ0kJ2dTXFxMT09\nPYSFheHv78/Zs2d54oknsLGxYdeuXRIAOzo6SkBAAG1tbcTExODg4CAJ2O+88w6HDx8mKCiIxMRE\n5ubmxO2pEIhVKhUtLS0CNVU4iz///DNJSUnU19ezb98+KS+UhCOFujUzM8PmzZs5d+4cLS0tLF++\nHA8PD3bu3MnY2JjkVaampgKQnZ3NypUrUavVDAwM0NTUxObNm/nb3/5GZWUlenp6tLW1ERwczEsv\nvcTu3bsxNzcnPT2dtLQ0Abz4+PhgYmIibsZz584BNzczQ0NDtm3bJvj73t5e0tPT5fT5xRdf8O23\n37Jjxw727t3LgQMHUKlUDA0N4enpiYODg4ynldjBQ4cOER4eTlZWFi+88AI//fQTcXFxTE9P09LS\nwooVK0RUNjs7S1RUFKtXr2bv3r2Mjo4SEhLC2bNnJeH8l6z/z41G3c2t+D8tU1apVA+rVKpylUpV\nPjc3R3l5OT4+Pjg7O+Pp6cnly5cxMjLC3Nyc9evXExwczI4dO2T2/Pnnn4u//YUXXsDLywtXV1ce\ne+wxmVUrEXARERHk5eWh1WoJCwujqKiInp4eoqKihNHg4OAgqPmCggKOHz+ORqOhv78fNzc34uLi\n5MKdnZ3l3LlzLC4uEh0dTW1tLT4+PgwMDPDWW29x5swZCYF9/vnnueeee+jt7QVuNhO9vLywtram\nv7+fr776StR1Tk5O9Pf309zcLE8KExMThoeHxY2p4OPt7Ox49tln2bZtG8bGxrS1tWFkZER3d7fk\nYWzevJldu3bR0NDAk08+SW5uLnV1ddTX13P//fcLTbqkpIT8/Hw2b96MiYkJubm5YgBydnYmMDCQ\nkZERrl69ytWrVyVDc3p6WmhVgYGB+Pr6UllZib29PTqdjoqKCioqKqivr5ckcVtbWxYXF4mJiZGs\nR19fX5kSuLu7U19fj5+fH7Ozs/j6+uLn58f169epq6vDwsKC7u5uysrKiImJITIyEgcHB9avX09a\nWhrOzs7Cqdi8ebOUfGNjY1y9elVQb7a2tri4uODk5ERdXR0fffQRQ0ND5OfnU1JSQnZ2NoaGhpSW\nlnLt2jXRFSilrvKZGRkZSYaHiYmJgHOzsrKYnZ1FT08PZ2dnMYm5u7sLP2N4eJiCggK8vb259dZb\nsbe3l5tXoUfPzc1RV1fH3r17MTExEf+Mvb09mZmZtLW1yWhdyfYYGxvj2rVrDA8PU1ZWhr+/vzhx\nf8n6r8qc+1QqlatOp+tRqVSuQP8/X+8CPP/d7/P452v/r6XT6fYD+wFCQ0N1NTU1uLq6Mj8/T3Bw\nsCgTh4eHiYmJobW1FRcXFzQajXSWU1JSCAgI4PTp03R3d6NSqThw4AD33nuvJEydPn2aLVu2kJCQ\nwMTEBMuXL2d6epru7m4uXrzIyMiI1LRDQ0M89thjeHt7o9FoRKnY1dUl4Z1ws6cQGxtLfX09Hh4e\njI6O0traik6nE7nzQw89xOXLl2lqasLLy0vq6sHBQZycnPD29ubixYvExcUJWMPMzAxfX1/hCYyN\njWFubs78/LxwDyorKzl16hQxMTHs27ePtWvXcv78eerq6lixYgXW1tbys/jss8/YuHEjRkZG7Nu3\njw0bNojgqrCwUICrSuyYvb29MCkV7UFGRobcqHCz225kZCTA0YaGhv/wvhkZGZiZmcmsfmZmhqSk\nJEmrUqlUDA4OSp1rZmZGVFSU3GSRkZG0tbXR1NSESqWipKSE2NhYSkpK0Ol0XLlyhSVLlgiybmBg\nAEtLS4yMjOTkpWz6Sl4I3DwN9fT0YGZmJmWni4sLW7Zs4ezZs0RFRXHmzBmcnZ2ZnZ3F2NhYMPbK\nnzU2NiYxMVHiC+HmVGlmZgZra2uOHz9OWloajY2NGBsb09jYSF1dHcuWLSMiIoLAwEA6OzsxMDAg\nIiICHx8fkpKSsLa2pqurC51OJ5MzZ2dnCQQyMDDg4MGDXL9+XezTCrxWCQA+ceIEd999N+3t7Xh7\ne9Pc3IyzszMZGRnU1NRw9uy/7PfL+q9uCieAe4E3/vnf4//u9cMqlepdbjYaA4GyX/IXbtq0iebm\nZjo6OiQ3cWxsjICAAGpqaiT3YM+ePdx3332Eh4djY2NDXl6eqLomJiYwMTGRZo2ZmRmzs7NC1O3v\n7+cvf/kLXl5e+Pn5CejyzJkzeHh4cO+99zI8PExsbCyDg4M0NTWRm5vL7OyszMbhZkallZUV3t7e\ntLe3S4KVu7s7Tk5OzM3Ncf78eU6ePMnvfvc74uPjeeONNygtLWXfvn3o6+vz17/+lV27dnH58mWZ\nwbe3t9PX14e9vT2BgYG0trZy4MABFhYWGBkZYXFxUWTK8/PzrF27FnNzc0xMTCgoKCAgIAA7Ozsc\nHR2Zn5/H3d2db7/9Fg8PD9F29PX1odVqOX/+PCkpKYyPjwsWv6GhgaKiIhEbnT17lnfeeYeZmRm8\nvLz4xz/+gZGREREREYSEhJCTk8OOHTuAm8fopqYmXnnlFYqKiqivr2d6ehojIyOio6MpKytDT0+P\nZcuWceTIEX7++WfCw8OlVFOentevX2d+fp5t27Zx4cIFTExMuHDhAgkJCeIhee+997h27Rq33347\n3t7etLS0SDx7Z2enNEUTExPp77/5vMrPz+eOO+7AycmJzs5OioqK6OzsRK1W4+npiUajQa1WExIS\nwh133EFRURGWlpaEh4czMzNDZ2enWPgXFhbk36tSqeSkqIB53N3diYyMZPXq1Xz99dccPnyYM2fO\n0NbWhouLC9u2bSMiIkKSuZRTm5ubm5QjOp2Orq4uuru7JZTGwMBA8kpra2u5ceOGBBfFxcWxf/9+\nMjIyaGlpYWpqivj4eK5fv058fLwwT3/J+pflg0qlOgKUAMEqlapTpVLt4eZmkK5SqZqAtf/8NTqd\nrgb4DqgFzgCP/avJAyCBoaWlpSwsLFBcXMzQ0JCQdT7//HMaGxtxcXHhwoULWFpa8vzzz3P+/Hki\nIiJYt24dAwMD9Pf3SxMOoKGhgfXr11NbW8u1a9cICAjAz8+P8PBwga8o9uT29na2bt1KXl4eGo1G\nwluUbrO+vr4IS6qqqqQ2XbVqlXxISh6Fop58+OGH8ff3p7y8nPDwcMzMzGhra+Po0aOcPXuWb775\nBkBw4UqWwsTEBMPDw3Ikd3Nz4+WXX+b06dOEhYWh0WgksyEvL493332X8fFxGbUVFhbS2dlJdnY2\nCwsLXL9+XTIhlCe2Yj2PjIwU/wUgBGp7e3uWLl2KoaEharWa0tJSCgsLWbFiBb29vSwsLBAZGYme\nnh5Hjx7l4sWLbNiwQer19957Dzs7O6qqqhgaGhKZ9NjYGEuXLiUxMVFk2Xp6epJ3oPwsysvLZWZv\nZ2cnMJyWlha8vLzYsWMHRkZGtLW18fXX/w977xlddZ2u/X/S+06yk52203vvIaQQEgglgISmKFgZ\nRxx0fBwddWbOWmM5njmOntHRsYyjDlZAmkiHUEwhCaQHQkiyE0jvPTs92f8XrN+9zrxjPetZ/+VZ\n63zf+Eb2SnZ+5S7X9bm+EgzdhQsX6OnpwdramoKCApm6b968mR9++AFjY2PBoykP9KqqKgwGA5GR\nkdjb23Pp0iWhK1+7do3S0lIsLS1JS0sT5oEikIuIiGDr1q1s3LiRqKgo7O3txSINdyuf7OxsEhIS\nCAgIwNfXlytXroii1cLCQn720tJSoXCNj49LDsj09LR8vq2tLYGBgcTHx7Ny5UoyMjLo7+8XT8b5\n8+dRqVT4+fnh7e1NSEgIdXV1gtC/l3Mv24eHDAaDu8FgMDMYDJ4Gg+ELg8EwaDAYVhoMhiCDwZBt\nMBiG/tv//x8GgyHAYDCEGAyGM/fyQ4yPj0u5Exsby7Fjx8jKykKr1QrP7+LFiwwMDGBhYUFoaChm\nZmYsW7aM119/nYsXL1JWVkZzc7OkRMNdYMfw8DCZmZkYDAbm5uYwMzOjtLRUVlG1tbXSq42PjzM3\nN8fq1atxdnYmKioKT09P4fQroAqNRiOw12vXrokhR1lbZmVlkZSURHp6OnNzc1hYWDA9Pc3atWsJ\nCAggPDwcrVYrGY0KKKa2thZXV1fs7e1lgqwMXxW2w8GDB6msrBQGwuTkJJ999hlLlizBzc0NGxsb\nzM3NCQ8PJyUlhZiYGOzt7QkPD5cMh61bt5KWlsYHH3xATU0NS5cuFWajqakpDz/8MI888gj33Xcf\nXV1dop9YXFyU2LYff/yRiooKGUpmZWVRWloqlcd7773H2NgYDzzwAAkJCZSWllJcXMy5c+e4evUq\nY2NjWFhYyPejMA9v376Nj48Py5Ytw83NjZCQEOzs7PDw8ODf/u3fmJiYEEGOhYUFeXl5IiEeHBwk\nLS2N559/nqSkJBwdHUlPTwfuVjIuLi784Q9/wMbGRiAyp06dYn5+ntDQUFasWAHcVU1aWlpKa9La\n2srevXsJDQ1lenqaI0eOyODzzp07GAwGPv30U2ZnZ9HpdFy4cIH9+/dTW1vL1NSUxNfl5uaydOlS\nzM3N0el0rF69Gm9vb/Ly8rC1tcXJyUkqL4WzGRsbK3OPxsZGDhw4gK2tLZ6enrLJsrS0FByfUjVb\nWloKTCgjI0NiAO/l/Cy8D+++++5rTz/9tEhrMzIyiIyMxNjYGBcXFwICAsjMzGT9+vWEhYWRkZGB\nXq8XbJmVlRW5ubk4OTkRHh5ORESE9NClpaW4ublRUlLCyMgIIyMj+Pv7Mz8/T01NjaRCLy4usnHj\nRsLDwzE3N2d+fp4vv/xSYJqZmZn09vaSn59PZGQkXl5eJCYmkpeXh7OzM1u3bqW1tRUnJycSExOZ\nn58XRqOXlxcnT56ks7OTffv2YWdnh5eXl1CYampquH79OhEREbi4uGBraytI+6ysLIKDg3F3dycz\nM5OBgQESEhJISEjg2LFjBAYGkpubS19fH7W1tXz22Wd4eXlRV1dHUlISBQUFuLm5ceHCBYyMjFi6\ndCn29vZER0fzm9/8RhKG7ty5g5mZmZCIjhw5wgcffIBWqyUpKYmZmRnWrFlDbW0tcXFxREdHs2zZ\nMsbHxykoKMDS0pKnn35aBpDK0MvExITZ2Vm6u7txcHDA09OTkJAQfHx8RNk5ODhIbGwsX3/9NaGh\noVhbW9Pf3y/I+OnpafR6PaOjo6xatUpEPmlpaWRmZkpATFpaGh0dHUJLUqlUNDY2cvHiRd555x3m\n5ubw9vbmzJkzoqQ0NjbG0dGRqKgoCgsLOXDgAC4uLpSVlREaGsqmTZu4du0ajzzyiAiXYmNj+fLL\nL+ns7CQiIoLZ2VmMjIyws7MjISEBIyMj1q5dS1RUFA0NDXzxxRfiAWlqaiIqKoqEhAQGBwflO1La\nn5aWFq5cuUJmZibu7u60trbKRkIB2URHRzM/P09VVRV9fX1ER0fT09Mjmgrls0dHRyX+b/Xq1Xz+\n+ef/c7wPSpCKg4MDAwMDJCYmEhgYKOzEI0eOYGxsTFlZGX19fXzzzTdUVVXh6emJh4eHkHlcXFwI\nDAyUSf+dO3fYvHmzmISys7NFKXjnzh1KS0ulPFaoPkZGRgQHB9PW1oa1tTWmpqb4+PiwsLDA5s2b\ngbtBHZmZmSQmJhIXF4evry/V1dVER0ej0WiorKyku7ubF198UVZ6hw8f5vr165SWlmJqaip5gDk5\nOSLxfeCBB2hqasLR0ZGenh5MTEz44IMP+P777+nv7yc/P58TJ04Ad8trT09PfH19MTIyoqOjg4KC\nAlxcXCguLsbCwoLLly8TEhKCr68vS5YswWAwcPPmTRobG2XLsWvXLi5cuEB/fz+1tbX09vaiUqlQ\nq9WsX7+e9vZ2CX1RhlX29vbMz89TWFhIUlIS4+PjxMbGUlJSQmdnJ8PDwwKdVVD1iYmJPProoyQn\nJxMeHk5LS4skJnt4eEjZXFFRgV6vl1g1c3NzQf4rCVwWFhbCuFACWH19fSksLJSg2FWrVgmaTrkW\nrK2txZtgbGzM9evXSU5Oxt3dnd7eXvr6+jA1NZVwIE9PTxkMNjY20t7eLqrUrVu3AhAbG4tarSYk\nJARvb2/UarVkjCqxf1u3bhWTVWNjIzqdDhMTEyoqKqiuriY7O1u8P4qnQgkkamhowMLCAi8vL1av\nXs3Nmzfx8fERalNsbCx9fX3Mzc2hVqvl+/b29qa/vx9fX195ydzr+Vk8FAAsLS2Fa/f555/zww8/\nYDAY+Ld/+zdqa2v56KOPOH78OHv37iUkJITAwEA++eQTli5dyuDgIB999BGmpqZUVlb+ixgmLy+P\nc+fOsWPHDsrLy9mzZw9mZmaUlZXh4uJCVFSUuNGMjIzkxlAyCZQ3kKIiBLh+/To//fQTBw4cEMej\nlZUVtra2st6bn5+XN7RWqxVr7hNPPMHY2BjLly/nj3/8I1VVVaSkpAAwODiIm5sbjY2NAvNQSMB2\ndnaYmZnx0ksvUVJSIn3kyMgIkZGRWFlZsX37dmJiYti4cSOHDh3CwsKC69evY2xszK1bt9i8eTO1\ntbXiFbhx4wbvvPMOZmZmAjPp6elhZGSEJUuWEBkZycLCgrAFrKysCA0NpbKyUjIxPv74Y5KTk2lq\namJwcJD5+XlKS0tpb2+nr68Pg8HAwsKCJEV5eHgwMDDA8PCw4Mbm5ubElv673/0OtVpNZWUlKSkp\nnDlzhhUrVvDjjz9y/fp1KioqqKqqQqVSkZubKyEvpqamhIaG4uvry+TkJBcvXuSPf/yjTPMVdoTC\n5bCzs8NgMHDq1Cnq6+v54osvsLKyIiwsDJVKxfbt29m3bx8dHR3yEIuOLhYgagAAIABJREFUjub+\n++9neHiY5cuXA/Db3/6WxcVFKisr0ev1VFZWcuLECYaHhzl8+DDOzs4iH29ra8PCwoKpqSlRxV6+\nfJkvv/ySwcFBMdDB3dbXYDDw8ssvk5qaiqenJ7Ozs+LV6ejoYGpqivHxcQYGBiRntLm5mcHBQcrL\ny5mdneX27duMj4+Lv+Zezs+ifdi7d+9rk5OT0jJotVrGx8cJDg5m2bJlEjumXFS7du0SocahQ4fY\nsmWLuPjWrl0rw7zk5GT0ej2Ojo6Mjo4SEhIiF9Tq1avZsmWLxLK5urpSX1+Pl5cXBoOB+Ph4SQhq\naGhgdHSUa9euUVNTI5DRL774gscff1wgodbW1rS1tdHb24uZmRnr1q3DyMhIwkKVifqaNWtISkri\n3LlzwuY7ceKE6CGUDUpmZiZubm7Mzc1JGzI0NMSmTZv48ccfcXV1Ffr1jRs3uO+++xgbG2Nubk4G\nWi+88AIHDx7ExMSEwsJCLCws5MJX+nClhHdwcBBhVnNzM3fu3JELys3NTQQ2arWaJ598kkOHDmFj\nY8OKFSsoKipieHgYX19fJiYm2LhxIzY2NlRUVODq6srAwACLi4vcunULtVot9nhFZNTT08ORI0fE\no2JmZsbU1BQbNmwQOfTIyIi89a5fvy7U7+rqakZHRykqKsLX15epqSkOHDjA+Pg4CQkJHD16lPT0\ndCwtLYXu1dnZia+vL7/4xS9wc3Ojr69PQDIajYYTJ04QEhIilnGNRsPMzAyXLl1ixYoVjIyMcODA\nAdavXy/27qCgIBYWFkhMTJQELiWwSKPRYGNjQ0xMDNu3b6evr4/W1lZJMxsaGqKxsZHOzk4GBgYI\nDQ3F3t6e0dFRqZiqqqr4/e9/L/4NPz8/IVwpkvjY2Fhpq5XEbyVQ939U+zAxMcGePXswMTHhxIkT\nrF27lvj4eAFsKHqCsbEx/Pz8uHbtGmfOnGF+fp5du3Zx5swZ+vr6yM/Pp6mpSajL5ubmxMbGEhAQ\nIFHzJiYm7N27l/z8fFxcXCgvL2fHjh2iJ0hJSZFNRlBQkLAbAgMDSUy8i7cbHx+nrKxM6MJnzpwh\nPz+f0tJSRkZGxLM/PT1NWloaU1NT4naMjIwUYrS1tTU6nY6ZmRkAdDodIyMjlJSUoNfrCQ8P54MP\nPmB6epqoqCji4uLIzMzko48+YnJyUii+vb29LF26lPvuu0+yAgYHB9myZQu3bt0iISFBMHYK1NPf\n35+kpCRiYmLIyMiQdk0B2jo6Okq6lkqlYnx8nIWFBXp6eujq6qKiooInn3ySgIAAdDodS5cuxcXF\nBWtra1atWiXsgsjISOrq6vDy8pL9PCAqSIWSpNXeFb4qwqa2tjbq6+vR6XTodDoSExNxdnYmISEB\nCwsL1q9fLzdyVVUVd+7cwcXFBQcHB1JSUnjyySfJzMyUlWRsbCxdXV3cunWLmJgYoqOjBYxz8OBB\n3nrrLdna3Lp1i+joaFQqFf39/SwsLFBVVUVAQAAPPfQQx48fl0SrsrIyHBwcaGlpwWAw0NPTw61b\nt4S87ejoKNmedXV1IkNWBE0LCwuMjo5iYWGBWq1m5cqVwF04jo+Pj0CGmpqa+N3vfodWq2V6eprR\n0VGKi4vp7Oykp6eHjRs3Mjo6yoEDB4iIiKCmpgZzc3NhOVy9evWe78efRaXw7rvvvubk5IS9vT1z\nc3MMDQ2h1+vx8fHBx8eHwcFB8dErqyQAPz8/4Rbu3buXN998U7DoygAxISGB48eP4+HhIUlRW7du\nJSIigpKSEn766ScKCgqoqqoiLi6O3t5e/P39+fbbbzExMWHDhg3cvHlTeueTJ0/yhz/8QZ7GarWa\nJUuW4O3tjaenJ+Hh4VhaWhIeHk5xcTF6vV6UaQoNaHFxEb1ej5mZGZaWlkRHR7N//342bNggWYyK\nMEhBuw8MDNDX1yf4OYXmrDgUHR0dJZLt0KFDmJqasmzZMr799lsmJiYICwvjxIkTZGdnC0rO2dmZ\n2dlZMjIy8PDwYNWqVdy5c4cbN27g4+PD3Nwcly9fZsuWLTg4OIhTUYHV/POf/0SlUolvICEhQQZq\nN2/eJDIykpaWFiYmJrh69aoQmlUqlaDmlDDgqqoq8vLyyM7O5urVq2RlZQmIZWxsDDs7O9ra2gQq\nsnnzZgHRuLq6EhISIqFCxcXFNDU1iRDpwoULYl1Xft7m5mYhYXt4ePDiiy/i5uZGfHw8NjY2WFtb\nyxC7sLCQ4OBgxsfHKS0tZfPmzej1ek6dOsWqVavw8vLCz8+P2tpaOjs7sbe35+DBg6xevRoXFxdO\nnjwpRHLF3dnU1MTVq1cJCAjg4sWLrFy5ksDAQLq7u6mqqmL37t1MTk7S09NDW1sbK1asoLe3l+bm\nZjw9PSkuLiYrK4uqqiqCg4PF26BWq+nv75e5lCJLb2xs5KeffvpfmvP/nv89/3v+5fzPoTn7+Pjw\n5z//WabFSUlJdHZ20t3dLW9NS0tLBgcH6enpYfny5Xh6egoTISEhQXiByr9ZvXo1n376Ke7u7ri7\nu5Ofn4+7uztTU1M4Oztjbm5OZ2cnK1asoKSkRHbmigqyu7sbZ2dnAgICaGxs5NatW3h4ePDss89K\nitGZM2dYuXIlZmZm0rK4u7ujUqlwcnLi2rVr6HQ6xsfH2bFjB729vdTU1Ej5t7CwwI8//sjHH39M\nVlYWTz31FFqtltzcXL744guxTX/22Wc89thjQuYxMzOTslir1VJTUyP5CCYmJjKnCA4O5saNGwKX\nsbGxEcqRq6srxsbGNDQ0sHLlSj7//HOsrKyE1jQ7O4tWqxWKc0lJiaxMk5OTGRoaoqKigpmZGeLj\n4zl27BiDg4Pk5uYSFhbGmTNncHJyws3NjZGREQYGBoiLi6O2tpYffviBtLQ0DAYDk5OThIWFMTw8\nzGuvvcbLL7+MjY0NarWaI0eOkJGRgbW1NVFRUXR0dHD9+nVxXfr5+VFXV0d4eDgzMzNiiT906BBR\nUVG0trZibGzM22+/zV/+8hd27drF8ePHOXr0qLROd+7ckfSsmJgYKisrhbvh7+9PcXExycnJzM7O\nEh4eTl5eHhqNBmtra3Jzc3niiSfYsmULISEhlJWVCf2oqamJRx99VAKNe3t7GRoa4sqVKyQkJBAX\nF8elS5dk+BsWFoa9vT0lJSX8+te/5vLly6hUKv72t7/h4eGBRqMRRWRsbCyjo6NotVoWFxc5ffo0\noaGhJCYm8sYbb/DGG2/Q1NTE4uIinZ2dzMzMEBAQQG5u7j3djz+LmYISkmliYoKtrS2XLl3C0dGR\nzZs3o9VqJWPR3NycFStW4OTkhK2tLW+++aa40woKCkRT/+GHHwJ3ba3KDZiVlcXCwgIhISG4uroy\nMTGBVqvl0KFDhIWFSQTcyMgI7e3ttLW1ERkZyeHDhwkJCSElJUVmCgaDAW9vbwwGAyMjI1Iuz8zM\nMDw8TFlZGZ999hnGxsao1WrWrl0rEumioiK5qRcWFtixY4f0e7Ozs8TExHDp0iWCgoKELxgfH4+F\nhYUIVNavX4+joyOBgYEsW7aMsLAwfH19SUpKoqOjg9TUVC5dusTg4CBarZapqSlCQ0MJDAwkMzMT\nlUpFbW0tzc3NIglW0F0rV67E29ubpUuX4uTkJOnMWq2WzMxMkVr/4x//kMHZ1NQUTz31FNevX6em\npoajR4/S3d1NaWkpr776KnZ2dqxYsQJPT0/q6+sJDAwkJiaG4eFhfHx8KC4ulu9AMR3Z2toKk9HJ\nyYn6+nq6u7tlldjf309lZaVEuyuKvtHRUXJychgYGBCoL8CaNWsoKyujs7OTmJgYioqKJE6tqamJ\nyMhI+W9ISAiXLl2S6zMyMlJi+ZydnXFxcaGkpASApUuXCv1brVbT3t6OkZERqampIs9X7NYTExOk\npqYyMzPD4uIiDg4ONDQ0YGlpiaurK8XFxVRWVgJ3IwQvXrzInj172LVrF8nJydx3331kZmbi6Ogo\nAb63bt36l4CZNWvWUFhYKLGCSk7o3r177/l+/Fk8FIyMjPD19WVwcFDCYOPi4ujr62N4eJiLFy/y\n3HPPYWRkhKmpKZOTk7S3t0vsdm9vL2FhYfT29koCEiAa9cuXLzM5OSmUaAVQoVarmZycxGAwSPZj\nVFQUGo1GwKAZGRnCWlBWnbdu3WJ8fJwtW7bQ2NhIXFwcOTk5JCQkkJGRgUaj4dFHH8XPz4/AwEDC\nwsJoaWlh//79ODs709HRQVpaGu7u7iQmJgqwRcGnA9L7KgO2vXv3cu7cObq7uzly5IgIvVpaWnB3\nd0ev1zM0NERERAQ9PT2oVCq0Wi1Xr16V4Wx+fj6HDh0iNDQUV1dXHnzwQXJychgbG8PT05PBwUG+\n++47jI2NxbXo6urK4cOHJfBUUTWuWbOGiYkJTExM+Prrrzl8+DCvv/46AQEB6PV6goKCsLCwEA+H\nMqS0t7cX4Mz09DRubm5oNBpeeeUV4G6svBLqqwxOGxoaBOfe39/Pzp07iYmJEc5GZGQk/v7+REVF\n0dfXx+HDhxkdHcXV1VVk02+++SZTU1MMDw+za9cu/vSnP0mSVH9/vzg/6+vrKS8vZ8WKFXz88cdE\nRERw48YN0tLSMDY2xsPDg8nJSXEz+vn5UVxcTG9vr1Qd/f39mJubc+HCBVnhxsbGSiCyv7+/kLoG\nBgZoa2ujra2NhoYGVCoVcDfEWIHu/vOf/6S3t5czZ86g1+tpaGhAr9dL/urQ0BA1NTUMDAywfft2\n2c5oNBp5kezZs+ee78efxUNhcXGR/Pz8f3krDA0NYWlpSVtbGxUVFWRkZHD79m0cHR25cOEC586d\n47vvvuPYsWOC4nJ1dWVubo7S0lIAYRXGxcUxPDxMXFwcqampaLVazM3NcXd357HHHmN+fp4ff/yR\np59+mqqqKvz9/VGpVHR1deHq6kpFRYVk+sFd8nBeXp6Qb0xMTDh27BgGg4GxsTFiY2MpKCigoaGB\nubk5iouLxVyj0WhwdHRkfHycuro60ffDXVPYhQsXsLKyEliLjY0N1dXVZGZmEh8fz3/+53+i0+m4\nfPkyVVVVAm4NDg5Gq9WK3frZZ5/l4MGDbNu2jYaGBtRqNfb29oJSU9Dq09PTVFdXc/LkSSYmJoiP\nj0etVrNjxw6MjY0pLi4mOzub+Ph4aSXCwsI4f/48ycnJHD16FA8PDwoLC2lsbMTU1BQzMzMqKytZ\nXFzExcUFS0tLDAYDJ0+eRKfTMTY2xo0bNzh37hw2NjZ0dXUJvv7GjRuMj49z8+ZN3n77beLj41mz\nZg3V1dWCIfvLX/7Cjh07OHbsGK2trRw5ckQQ8EePHkWn0zE9PU17ezvj4+PAXWS6TqcjMDCQy5cv\n09fXx1tvvUVZWZkEAtvZ2aFSqQTk4+bmRmBgICqViqKiIl588UXee+89Tp8+LfCW4uJifH19JQNC\nYTf29PTg5+dHdnY2jz32GCEhIWLWcnNzY/Xq1YLi8/T0pKamhh07dogsWyF2LS4u0t3dTVFRETt3\n7iQ5ORl/f3+WL1/Opk2b0Ol0rFixgpycHC5dusTw8DC//OUvCQ0Npa2tjdraWvLy8sSJey/nZzFT\nMDY2ZsuWLbi4uNDe3s7KlStZWFjg0qVLLC4ukpaWhoODAytXruSHH34AwMbGBkdHR6ytrcnPz0er\n1WJqakpYWBjOzs58/PHHjIyM4OLiQmRkJI2NjZiYmFBTU8Pi4iILCwuUl5fLWrCxsVFkrAo6Oykp\nSdKUpqam5K0Dd52S5eXlTExM8P333xMQEMDNmzcJCwujurqasbEx0cIr5pbw8HDs7e1JSEjgzp07\nODs7k5GRIX8wDw8Pfv3rX9Pb28vt27c5cuQIy5cvF/9+YGAg0dHRDA8Pk5iYSHt7O7t27RJHZ0pK\nCmvWrOHHH3/EzMyMZ555hg8//BC1Wk1KSgrffvstVlZWvPPOOxLNXltby8TEBI8++ig6nY7q6moJ\n9a2pqcHMzAxra2u+//57NmzYANwFligBKFu2bBEc2NTUFH19fQIhVUw4AwMD4jR0dnamq6uLdevW\n8c4779DZ2UliYiK1tbUAYm1WKre3336bmJgYNBoNy5cvp7Ozk9raWnJycoSQpDAznZ2duf/++7l2\n7RoPP/wwx48fF6PXsWPH5Pdyd3entLRU9AIRERFUVFRIqnRMTAznz59nYGCAmzdvEhsbC8Bjjz0m\nVa3CJzAyMqK5uRkrKytJeFaq2d27dzM0NERlZSUdHR2S16HIumdmZqisrMTFxYWVK1fi5uYmlYKl\npSW3b9/GxMSEwcFBNm3aRFtbG5cvX8bHx4e6ujqWLVuGqakp+fn5vPjii8zOzsoswdXVlaSkJGxt\nbampqZHW957ux//rO/n/4VGEGQsLC4SGhtLS0sKhQ4eYnJzExMSE3Nxc2VcnJCQwOzsr5ZLC5zc3\nN6ekpISYmBjRiv/36kEpP2dmZujs7MTJyYng4GCJ7g4JCeG7774TPPmKFSsYHBzE3d1dZgMK/HJu\nbo66ujpCQ0NRqVRYWVmxYcMGPD09xYxlb29PaGgo3t7eEpQbHh4uMXJBQUGEhYXx2WefyYVrbGzM\noUOHuHjxIjqdDj8/P1588UWJpy8tLRUhlLW1NY8//jheXl54enpSVVWFTqfjzTffxNzcnMrKSvbv\n38+uXbuIiIigu7ub4OBgKisrsba2Zt++fSQlJXHt2jXZcY+NjTEzM8O3335LQUEBly9fJj4+no6O\nDu6//36RQxsZGdHV1UVVVRXl5eXk5+dLYMvGjRt59dVXhQ50/PhxVCoVpqamZGRkkJycTEpKCnq9\nni1btoilOjAwEIDKykphOXzxxRf4+vpSWlrKzp076e7uZnh4mCeeeIKHHnqIuLg4CgsLGRgYwN3d\nHRsbG1QqFRkZGRQVFZGSkiIPpueff56srCxKSkrw9fVlZGSEkJAQwsPD6e/vZ35+np6eHszMzGho\naCAuLo6xsTFSUlIwMjLC0tISU1NT1Go1Li4ufPfddwByk2/bto2bN29KEIzSmt6+fVtaKVdXVzw8\nPCTR7NSpU0RHRxMeHo6dnR01NTVC/96+fTvJycloNBo2btwoVu+JiQm6u7vZvXs38fHxpKWlkZWV\nxa1bt3j//fc5deoUf//73wkICMDCwoKLFy/i6ekpLtR7OT+LlWRAQIAhNTWVBx54ACcnJ7y8vOju\n7qa1tRV3d3euXr1Ke3s75ubmjIyM4OjoKNmSo6OjtLe34+zsTGZmJikpKTQ1NfHAAw9QWlpKX1+f\ncBD7+/uxt7cX9Hp4eDjGxsZER0djbm4ugNexsTGJ9TYYDLLnP3z4MP/n//wftm7dKpPrmZkZvLy8\ncHd3x8fHh8nJSaH1GgwGjI2NMTExkVyF9PR0zp49y4YNG/5lJrJu3Tpu3bol8t7vv/+e5ORkjIyM\nMDIyoq6uDmdnZ5Fjnz9/XpKGXF1dOXfunESoabVaiclTqVQUFhayc+dOIRwrYq7AwECcnZ05fvw4\ngYGBnD9/nlWrVhEQEMDZs2dFFVdRUSHy7EOHDonGYWFhgS+//JLe3l6Sk5NZunSpQHACAwMxGAy4\nu7szPDxMc3OzZB0UFhZiY2Mje3Tlot+9ezdvvvkm/f39REZGcvPmTdm7K/4WhZ7c3t4utnRTU1MW\nFxfZvHkz3t7edHZ2in7F1dWVX/3qV9y5c4eysjL8/Pz44Ycf2LZtm0Bl+/v7GR0dRaVS8frrr7N0\n6VIyMjJYXFzE29ubO3fucOzYMV555RXMzMwYGRmRnMyPP/6YxMREysrKsLa2Zm5ujp07d/LPf/5T\nFLrV1dWSG+Hg4CAcy6mpKUHEKUCYsbExnnvuOXl4V1RUcO7cOTZs2EBZWZlAbQsLC2U+U1ZWRm1t\nLTExMTLUjIyMlLRyJZUsNzf3nlaSP4tKYXZ2lqVLl8obX1GH+fn5cfToUUZHR1Gr1WRmZuLp6SkO\nQkWk4e/vz+7duzEzM6OoqEhozhMTE3h4eKDVamlqaqKwsBBLS0vq6+vF5qvAQOfn52lpaZFsh8DA\nQBwcHHB3d6e/v5/u7m48PT2Bu5NshX6jvL0DAwNxdXWVhKrq6momJibE3dfV1UVcXBxLliwhLi4O\nIyMjrl27xuzsrJCMFZXm22+/LeQh5ecyMTERQGpzczOLi4uyWpyZmWHt2rVs376drq4u2tramJiY\nkAt+9erVjIyMYGZmhlqtFrLQlStXZCCn0WhwcnIiNDSUGzduYGxsTElJCTdu3JALraamRsw4Op2O\nyclJUlNTCQ8Plxts9erVWFlZiYhs7969vPvuu6xYsYLJyUneeustWltbGRkZkU3A0NAQdXV1ANjZ\n2eHo6MjQ0BAqlQpHR0f8/PxwdXXF19dXcj8VrLpGo2F2dhaVSiUIfGVD4eLiIvFuH330kZjq0tPT\n2b9/P62trTL7UUJe33jjDSIiIsRav2/fPoaGhoiOjuarr76ipaWFrq4uvv32W+CuR6G9vR2tViub\nML1ez8DAACdOnBBBm5JR0d/fT01NDTdv3qS6uhqVSkV1dTVnz54lJSVFvBo3b96UzFNl3axSqdi8\neTNzc3MC5XVxccHCwgIrKyu8vLxERXrq1Cni4+OZnJwUhsa9np/FTEGhItXV1TE9PS1BIc3NzZib\nm5OYmEhnZydeXl7Mzs5KHmJ2djbXr18nPz+f+Ph4PD09qaioEHVXe3u7TOWVUkylUhEQEEBOTg6W\nlpZCaK6urubSpUssWbIEe3t7jIyMmJiYYGFhQZBXiiHK1taWqKgo8WJERETQ0dGBWq1mw4YNnDx5\nEisrK8HK9ff3C7nZxMSEZcuWYW5ujrOzsyjU4O4ku6ioiIceekhYi99//z1paWk0NzfLTt/Gxoa1\na9dSUlLCpUuXZNZgY2NDRkaGaOGtra3p6Ojg8OHDBAYG0tbWxpYtW/Dw8ODixYuMj49z6NAh0tPT\nuXbtmrALWlpaCAwMFCLwgw8+KEq7+fl5IiIihLloZmZGSEgIx48fJzMzk7m5OWJjY3n55ZeZmZkh\nMTFRsGpVVVVERUWxefNmzp49i5+fH6GhoVJ5wd1Zzfz8PP7+/rS1tVFVVYW3tzd+fn4yuFTAI1ZW\nVnR3d0sLNzIywujoqGDflH4e4Omnn+aJJ54gODiYzs5Ozp07R2trK3FxcWKdrq2txdnZGXt7e3x9\nfTl69ChTU1MSxxcREUF1dTXz8/MC8ikuLiYuLo7Q0FCZtQwNDUmU3aVLlyRvUkHjm5qasmPHDqFN\nl5aWkpubK6G3gKg3BwcHaWxsxNvbm7a2Nuzs7Pj444+pr69namqKjRs3kpWV9S+Wf2NjYwICAoRG\nrcQr3uv5WTwUZmZm8PPzk3CLlJQUpqen6erqIioqCiMjI2Hhubu7s2nTJqqrq2ltbWXjxo14eXkJ\n8dfT01O8BP7+/pSXl5OQkEBfXx/T09MUFhaSmppKTk4O1tbWYnbq6ekR6MXo6ChmZmY4ODhw+vRp\nnJycJNwD7va9/v7+6HQ6XFxcSEhIwMfHh7GxMXEQuri4yJZg27ZtvPPOO/ztb3+TlGAnJydOnjxJ\nbGys0KcdHR3Jyclh7969pKamykCtqqpKYvS0Wi2lpaXs27eP0NBQJiYmKC8vx8XFhdDQUAoKCmhu\nbqa1tRVXV1dycnLo6+ujrKyMHTt2MDw8LDqC9PR0MjMzKSoqYmhoiKioKIqKisTg4+HhQUNDg/An\nHRwcWFhYIDIykoqKCgB8fX1pa2uTIaSSeLRhwwaxEOfm5jI3N8df//pX4G5Iz5o1a4iIiOD999/n\ngQceoL6+Hrgb3a6Yh0xNTQkMDKShoQEnJyc8PT0FmBsYGMj09LTkMg4NDXHkyBFsbW0xNzeXv4vy\n3TY2NsqLQDFuWVlZMTAwQEBAAE5OTnIjVVZW4uPjw3fffUdxcTG//vWvuXPnDr/85S/p6OhApVIR\nHR3NF198QUZGhmySjI2N+fOf/yzZpl1dXZJLkZuby+joKKamply8eJFvvvkGY2NjFhcXMTIyoqKi\ngrS0NLkn9Ho9/f39vPfee7z66qv4+/uTmZlJTU0N7u7uLFmyBE9PTxmSt7W18eSTT4plOzs7m+rq\nar777jueeOIJCRy6l/OzaB8UGKhSll69ehWtVssjjzyCqakp5ubmtLe3k5SUxOHDh6muruY//uM/\nMBgMYlRSbKhKxDfcbQGSkpJobW3F3NycwsJCjI2N6e7uRqfT0dvbi1arFZiokhupBH/89NNPsud1\nd3cXBFdISAiVlZUSsa5sOK5evSouRmdnZ9577z0JknnggQeYmZlh3759eHl50dbWxsaNG6mpqZHA\nkry8PNH529jYCPdvaGgIc3Nz4uPjZWff3t7OtWvXsLW1ZeXKlczNzfHll1/S2toqAapKaKm3tzc7\nd+7kxIkTTE9P4+3tjZeXl0A+4+Li2LVrlwx81Wo1AQEBIlx69tlnGRwc5NFHH2XNmjWMjo6ydu1a\nHB0dKSoqwtbWlrS0NIyMjFhcXBQScmFhIfHx8RgZGREYGMjExIRYe/v7+/nqq68EOqKg2P87L/LW\nrVtcvHgRIyMj2em7urpibm7O6OgoDg4OeHh4cOrUKRm+wt3hn7JdUo5GoyE8PFwyP2NiYlCr1Tg4\nOGBhYUFLSwsuLi4MDw+Tnp5OdHQ0Tz31FG+//TbFxcXMz88zOTlJbGwsvr6+EoxrZmbGjRs3JOC3\nt7dXVtNtbW3y8/b19aFWq6mqqqKnp4fZ2Vl5IL388sskJibS0NAgvE6FD/GrX/2KxsZGCgoKKCws\nxM7OjkcffZScnBx5mPn5+aHRaCgvLycsLIy4uDi+//57xsfHWbduHUFBQf/zeArm5uYEBASINkGr\n1TI6Osq5c+dkbz8xMcEvf/lLtm7dSm9vLz4+PvzjH//gu+++E1yZkqHQ0NAAwF//+ldMTU2JiYlh\ndnZWDDvKLryvrw+dTseZM2eora2VGPf5+Xk6Ojro7++X1kaJb4OOaXXNAAAgAElEQVS73L0tW7bg\n5ubG0NAQ8/PzXLp0iQcffJD169fj4+NDYmIi27ZtIzk5maqqKiE5GxkZodPpcHd3p6+vj2XLlhEf\nHw9AQkICQ0NDQgSenJzExsaGBx98kOTkZOrr69FoNAQGBkq6M0BAQABBQUG4uLiQnZ1Neno6wcHB\nko61uLhIeXk5zz77rNCna2trRXikMCXVajVjY2PEx8dLi7R7926Gh4cJCwujrq6O4uJimZLfunWL\n2NhYGUaamJjQ3d1Ne3s7RUVFbNiwgWvXruHs7Mz169e5cuUKJ06coKamhsnJSWZnZwkICMDGxkZc\nh99++63kM5qZmfHaa6/Jbn94eBg7OzssLCywt7dnenpaYDNKNdfX1ycpYXq9XrIki4uL+frrr9m+\nfTt37tzB39+f6Oho8vPz+fzzz9Hr9YyPj1NbW4ufnx9RUVFkZmYyOTkpSc5Hjx6lqamJL774QoAz\nR48eJTs7W1DqExMTBAQEsGHDBp544gmio6OZmpqit7eXsrIytFotoaGhZGdnk5mZiYmJiVQUCrEc\nIDo6mpdeekkGxyYmJgwPD2NqakpPTw86nQ47OzvUarWwHlUqlThGBwYGiI6O5qGHHhKz172en0X7\nMD8/T3FxMT09PQwNDREfH8+3337LmjVrePrpp5meniYkJIR169aJb350dFSINQEBAezdu5fo6Gi5\nqf70pz+JvtzR0VFKyqmpKSoqKsjKysLBwYGamhrCw8MZHx8nJyeH4eFhKioqWL58OWVlZfj6+srD\nQKH4KC64jo4Oli9fzt69e/npp5/IzMyUqHkfHx/+/d//XQQ/Li4uuLi4yHrT1NQUe3t7HBwcOHDg\ngHzuyMiIEKAuXbokqzOdTodGoyE3N5eCggKhBW3fvh1vb2/OnTuHiYkJeXl5LFmyRPIbXF1dOXr0\nKEFBQXz00Uc899xzdHR0YG1tTUJCAh0dHULzKSws5LHHHuOnn34SZqabmxtLliyhvb1dpLk2NjY4\nODiIV+X9998nLS2N2NhYTp8+zc6dO9FqtZSXl+Pu7s6FCxeYnp4WXsTp06f58MMP2bNnD/X19ezd\nu1eqsJSUFMHOGwwGvv32Wx5//HGxjytU4/DwcIlH6+3tRa/X09vbi7OzM7dv3xbVpiJXVtD6X331\nFXq9nscff5ySkhLuu+8+WfOdPHmSiIgIDh48SEREBO7u7vj5+XHw4EGuXLkiK91t27bJGnlycpLK\nykp8fX0ZGxvjhRdeEIu2olq0tbVFrVZTUVHB0aNHCQ8PZ3BwUBS6x44dw9fXl02bNolew9nZmcuX\nL4us3NLSkri4OOzs7CgoKCAlJYXf//73pKWl4e3tjaWlJVlZWbS2tjIzM8OOHTsYGRlBo9Hw4osv\nCtnqXs7PolJQUO4eHh48/PDDTE9Ps7i4iImJCfv27ePDDz+kpKSEtrY23nrrLd577z0pbZOTk6mp\nqWHdunVMTExgamoq1uqEhASioqIEwT4/P4+VlRXR0dFSvvn7+9PX18d9991HT08PBQUFmJuby8+g\n0WgYHh4mJiZGAKuKDXZychJnZ2dRLtbV1eHq6kpzczOVlZVCeOrq6pJdfmNjIz09PbS2tnL9+nWu\nXr0q4ard3d1otVqsra2xsbEhNTWV5ORkSkpKCA4OpqurC0tLS5qamqiqqmLlypVoNBree+893njj\nDdFxmJubc/nyZYKCgiR8ND09nUceeYTy8nLMzMzw9fVFr9djZWWFkZERlZWV9PT04Ovri729PcXF\nxYK6CwoK4tixYyxZsoTe3l46OjoklVkRAXl4eDAxMUFWVhZ2dnaEhIRI9ZGeni45BtPT08zNzYly\nz9XVVWYHcPdmMDY2ZmZmhmXLlhEUFCS+iEOHDvHRRx9J0pTiVVHCf6ampnB0dOT+++/n1KlTdHZ2\nyoDtlVdeQavVsnXrVv74xz/y3HPPMTU1RUFBAWvXrmV2dpZnnnmGkpISrKysOHv2LP39/TQ3N3P6\n9GnOnz/PDz/8QF9fH6dPn5aWb9euXdja2hIdHS1tWWRkJHNzc6K3CAoKwtTUlNLSUmZnZ+nq6pIK\nU1kfBgUFUVZWJvkaf/vb32TYuHz5cuLj49HpdJSWlrJhwwb6+/vZvXu3hM5qNBrOnTvH8PAwe/fu\npaSkhAsXLnDhwgXs7e3/3yLe//849vb2dHd3i6lEo9GQmJhIdXW1rKIeeeQR7O3tiYuLw8XFhS+/\n/JKJiQm+/vprGhoaKCsrIzc3F3t7ewkWuX79OsPDw5I8bW9vT0pKCtnZ2dja2jI/P4+FhQXx8fGM\njo4yPj6OmZkZ/v7+WFtbs2TJEqysrGSmofgSDh06xOjoqCgefXx8WLt2LbGxsZiZmZGQkICTkxMH\nDx5ErVZTW1uLWq2WDUJ5eTlDQ0Pcvn2bqakpXn75ZeDubGVubg4fHx927twpeDVFoLJkyRI6Ozu5\nffs2DzzwAGvWrOGbb76hubmZlJQU6VWDg4NZXFxkenoalUolbEJnZ2cGBwfJy8sjISFBthM6nQ4b\nGxvS0tKws7Ojo6MDg8EgcuyvvvoKrVaLTqcjICBAqoaYmBhpoUZGRmhqauKDDz7g8OHDjI+PMzY2\nhqurq+QwXr9+nZs3b5KcnExFRYUo+9atWyf+j66uLjo6OgQw6+Pjg7u7O9euXaO1tZWoqChxtA4N\nDeHj44OxsTEGg4HQ0FBZCS9btoyamhrxq3zwwQfigty9eze/+MUvKCgokKHp3NwcFy5cIC0tDY1G\nQ0pKitCmFJhPSEgIer0eIyMjVq9eDUB/fz+NjY10d3fT1tZGX1+fqBQVLJ2Hhwf19fV8//33Ird3\ndHQkISGB8vJyVCoVExMTsplQPlcRNBUWFjI6OoqJiQl1dXX88MMPfPrpp/z444/09fVx69YtrK2t\nSU1NJSwsjKysLM6fP8/ExAQ3b96UF9a9np9N+1BfX8/s7Ky8FRoaGqS8tbS05O9//zsjIyP88Y9/\nlNDWEydOSM+uVqulNRgdHQXurg4VdqJi+LG0tMTIyEhCUZQy1d/fHz8/P86fP09NTQ0mJiasXLkS\ng8EgF73SRiiht19//TVr165lamqKyclJoTRHR0czMzPD5cuXuXHjhpCQFhcXsba2pqSkhJaWFpqb\nm3FycpK3TnNzM6mpqdy+fZubN2/S09ODp6cnfX19VFVVsW7dOhobG3nmmWfo7+8nNTUVtVotv9vW\nrVu5evXqv8SuaTQaTp06RVxcHCYmJoLsevbZZ4WAHRMTw4ULF2hvb5dVY05ODsbGxlhaWvLWW2/x\n8MMP4+LiIii1K1eu8Nprr/G73/2O4OBgjh07ho+PD1FRUYyOjvL000/z8ssvc/r0aSwtLXnppZd4\n4YUX+PTTTzl79qxsYM6ePcvt27el91fQ501NTVRUVBAcHIyDgwMGg4EVK1ZQVVVFfX0909PTdHd3\n4+7uLpFuXV1dJCcnU1xczOzsLIGBgYSEhABw//33U11dLYPGkpIScnNz2bdvH8PDw6SlpbF27Vr2\n7NmDo6MjVlZWNDU1MTU1RXh4OLm5ufz1r3/Fy8sLW1tbXn/9deAuRi0tLU3mDn19feJ9ee655/Dz\n88PW1pa4uDjy8/MlOfrVV1+lra0NJycnVq1ahaurK2fPniU6Ohq4O7cKCgoC7oJT5ubmMDIyIjMz\nk87OTjQaDXV1dZiYmHDnzh02bdok2hVAJOCKnb6jo+Oe78efRaVgZmYmqDJFwOTh4YGtrS2HDx/m\n7NmzjI2NiQR1cHCQuro6kpOTGRwc5Omnn8bf319Sknx9feVzlYFZQECAUIN7enqkNNbr9Zibm9PR\n0cHw8DB1dXVkZWVhbW3N4OAgXV1d/+I4g7tkYIWmq1arGRkZITQ0lC1bttDS0kJZWRnd3d2CGVNa\nlJqaGg4ePIhKpWJmZoakpCQqKipoaWkB7g4MT548ycDAAJ2dndy6dQuVSoWNjQ1DQ0OS+/jTTz/x\n0UcfsX79evR6PZGRkWRlZZGWlkZISAgODg7y9hkeHmbTpk309vbK1D82NpbXXntNIuHs7OxIT0+X\nasbLy4ulS5ei1+spKSkhPT0dY2Nj0Xz4+vqydOlS/uu//ouOjg7ee+89ERm5uLiwuLjISy+9RHV1\nNUZGRszNzXHixAl2795NZWUl9fX12Nvb09DQwIYNG8jOzuall14CELxeZ2cnycnJ3Lhxg5mZGfz9\n/ZmYmEClUolc2N3dXf6e8fHxzM3NyTpVyXH87wE++/btE2NXSEgIERERhIaG0tfXJ4TvxsZGnnrq\nKbZs2YKNjQ2urq68//77kryttCRKy+Lv78+1a9doaWnBzs4OBwcHxsbG0Gg0aDQaRkZGhN+hXC+z\ns7Po9XqcnZ1JTU3l66+/Jj8/H2NjY7nGEhMTWVhYoLGxkXfffZfe3l6Cg4MpKysT2X5fXx9ZWVn8\n9re/JT4+HpVKJS2I8vv7+/sTFhbGrl277vl+/FlUCmZmZqxYsQJnZ2d0Oh0NDQ28/PLL2NnZ4eLi\nQkFBAdPT02RlZXHkyBEiIyMZGhpicXGRnJwc5ubm8PT0xNHRkaqqKikZFR7/N998g5OTE/7+/jg5\nOdHa2srCwgKpqakiR7a1taW9vZ3s7GysrKywsbGhsbGRsbExbG1t5UKDu7bWuro6XFxcOHr0qKx7\npqen+e6777C3tycsLIywsDDm5uZwc3Ojurqa9vZ2ZmdnSU9P59ChQzz00ENcu3ZNgkUmJycxNjYm\nKiqKoaEhrK2tsba2JjExEY1GI2E3xsbGWFtbExgYSEZGBlevXiUqKorLly9jbW2NpaWlOAKvXLlC\nZGQk4eHhfPrpp/LZrq6u5OXlsbCwQHt7O+7u7uIJCQkJEVVjSkoKExMTtLe3c+PGDaanp0U3kpeX\nR3x8PPfffz/5+fnExMTw1ltv4eHhgZubG0lJSdTV1TE2NobBYOCFF15g165dqNVqmePk5eVhbm7O\nm2++CcC5c+cICAjg6tWrWFpa4uzsTH9/PwMDA9ja2tLf38/p06eJiIiQkF4FlTY5Ock//vEPsrOz\n8fDwwMHBQb7b27dvExAQwPLlyzExMcHJyYmWlhZB6jc0NODn58e6devQ6/U0NjYSFhaGXq/nD3/4\nA46OjiIbnpycJDc3l3fffZfJyUmsrKwYHx/H29tbbM1LliyRdkRprczMzDAyMmJ0dBRPT08CAwMZ\nGhrCyckJnU5HTEwMn3/+OQAlJSV4eXnx7LPPEhERwdzcHDqdTtabq1atwtzcXFpVBTsQHBwsVPTN\nmzdz5coVGhsbZYV6L+dn8VBQpL5DQ0MEBwej1+t55ZVXWLNmjTghNRoNNTU1/PnPf0an0zExMcHS\npUs5fPgwDz/8MHl5eUJSUtZFSuR6UFAQDg4OWFtbMzMzg6urK1FRUezfv5/p6WkZbinJPcpb0czM\nDCcnJ6ampiRtGO7e/CdOnJCAVAU22trayoYNG0R0NTAwIASm9PR04uPjOX/+PDMzM6jValpaWoQo\nBHczD7Zt28YHH3xAZGQkv/rVrxgeHmZwcJC2tjaam5vZsmULn3zyCYmJiczOznL48GGeeuopbGxs\npAd1dHTEw8NDYK0zMzOUlJSQmJhIb2+vhIYEBAQwMzODXq8nJycHvV5PXl4eHR0dxMfHMzMzw969\ne3nmmWd4/vnn2bJlC48//jiffPIJ/f39pKenU1JSwvj4OGlpady6dYsffvhBEr7DwsLYs2cP/f39\n/OIXv2Dnzp1ER0cLPVqRg7u7u7Nnzx7279+PjY0NfX19hIeHU19fz7Zt21i/fj3nz5/H0dGR+fl5\n1q1bh6urK/7+/pSWllJVVUV/f79QibRaLRUVFVJtAuJgPHHihAwki4qKxJzm6+vLa6+9RkNDA5GR\nkcTGxqLRaLh06RKNjY0UFRUJe1OlUgk058KFCzz//PMUFBRINau8ZPr7+3n11VfRaDR4eHjQ0tLC\nU089JZmVysNMp9MRFxeHWq2WeZgCiPnss89YtmwZRUVFxMTEYGVlRVRUFIODg4yPjxMUFMTMzIwM\nuRcWFjA1NSUpKYkDBw5INaroH+7l/CzaByWX8MEHH+TOnTvMzc2JSamkpIT29nYuXrxIQkICtbW1\nlJeXo9fr2bdvH/Pz81y5coWhoSHJzVNkwzU1NczNzTE/P4+npyfT09MEBgZiaWkp1UlaWhqJiYn4\n+vqSmJgoGn1FS6DRaMQPoNy8MTEx4nLcsGEDKSkpjI2NodPpiI+PJzo6mpKSEpydnSW6zGAwUFdX\nx8MPP0xfXx9+fn4yw1BKXB8fHy5cuEBkZCRarVZk33AXuqKUpUNDQzg7OwtyTQF61NXVYWZmRmJi\nIqdOncLKyorY2FiMjIzw8PAgLi5OKD+ffPIJb7zxBoGBgQKBtba2FljJ5OQknZ2dWFtbU15ezvPP\nP09bWxtvvvmmSLvHx8f5/e9/T1RUFGZmZuTk5HDmzBlKS0s5fPgwXl5eXLt2jU8++YQdO3Zga2sr\n36eCiHvjjTewt7eXWdLi4iI2NjbMzs6SmZlJXV0dly9fJiIiguvXrzMwMMCZM2dobW3l+++/x2Aw\nEBISIpVUYGAgOp1OcPjK59ra2vLSSy9hZWXFnTt3qKqqwtfXV14mfX19/OY3v2FsbIyAgACGh4f5\n8MMPefnll0lPT8ff358nn3ySubk58vPzpWp0cnJi//79nDx5EltbW5ydnRkZGaG7u5uQkBChT+fk\n5LBnzx7m5+dFwl5fX8/Ro0cJCQlhYGCAwsJCcnJyAETVGR0dTWFhIcnJyXz55ZdYWFhw5MgRLl26\nxP333097ezuff/45wcHBGBsby3anp6eHTZs2UVVVRVdXl/iB7uX8LB4K8/PzmJubc/jwYSk9a2pq\nSE1NZXBwkJqaGpKSktDr9XzzzTdERkZKPJdCw42IiJALWpG2WllZiVjkxo0bWFpaSs+cn59PbW0t\n7u7u1NTU0NLSIklDi4uLbNu2jZ6eHgYGBpidnWV6elr6vbq6Ojw8PMQqrfAcFO9CUVGRCFqMjY3p\n7OyksrKSqKgoiouLMTIywsvLiyNHjtDe3s7XX38N3H2bzc3NMTU1RUJCAhcuXODEiROcPHmSixcv\nkpqaisFg4JFHHiE5OZnbt2//f+y9eXDU55nv+2m19n1Xo721oBUhCW0gCQFCCLFjW94AOxjHsROv\nKXsynpxUHE8yrrHjOPES22ATY2KDAWNA7IskQBIgIaF933e19n1pSX3/IL/nzlRNnXjmzD3X99Z5\nq1SUZdFIre73fZ/n+X4/X5588knGx8fFmq00u9LT0xkcHKSyshJvb28CAgJEHbh8+XKhLhUVFTE0\nNCQcwMjISM6cOcP+/fvx8vJi9+7dWFpacunSJbGCu7u7U1BQQGBgIM8//zz9/f1UV1fz7rvvcvHi\nRSwsLHjiiSc4ceIElZWV1NTUYG1tTWNjIwUFBZw+fZqxsTFMTU3Jzc2V2yDcT96+deuWOCEVMtb4\n+Dizs7NERETQ19cnDAXlul5QUEBoaCgjIyOEhYXh6+sr83u4r1P4y1/+gpeXF6+++ipPP/20pE75\n+Phgbm5OXl4emZmZYkLbvXs3v/jFL/j000+xs7OTgN+YmBi+/vprABHcPffcc5SWluLs7Mzi4qIw\nFpWsjNraWlxcXMSC39vby+TkJP7+/oyMjGBpaSnydICbN2+ydetWWltbuXz5MqampvzoRz/i/Pnz\nDA0NERAQwNjYGIcPH+YnP/kJTU1NzM3N4e3tLQY7Nzc3HnjgATZs2IC/v//3fj/+IBDvH3/88Rtr\n167lvffek6ajh4cHDg4OWFlZYWFhIYnTCg6ruLiYqqoqGQ2ePHlSMvk8PDzIycnBy8uLyspKOjo6\nsLe359y5czg7O/PBBx9w/fp13N3daWtr4/Lly1haWpKTk0NDQwOFhYWCDz9y5IjgxN58801GR0d5\n4oknJCH7zJkzBAYG0tfXR2Njo1hiCwoKcHNzk269MlkxNTWlvb1dPBpeXl4EBARIEpKDg4M05BTe\ng7W1NRkZGSJvXrJkCV5eXtjZ2UmPQVFsKgg5d3d3zp8/T39/P4uLi+Tm5nL8+HE2btxITk4OExMT\nODg4UFhYyNatW6mtrWVgYIDs7GyWLVtGcHAwZ8+eJScnh+zsbF566SUMBgNXrlwhMTFRkqCdnZ0x\nNjZmfHycxcVFtm7dSm5uLlZWVrS1tTExMUF4eDhffvmlNFytrKzke1bclkp2ZFpaGk5OTiQkJGBm\nZsbVq1cxNTWVrAR/f3/Gxsb47rvvsLOzE49GRUUFN27cIDY29t+N6ZYvX84XX3xBQkICK1asoLm5\nGQ8PD65cucLMzAxhYWEsXbqUr7/+Gq1WS29vL11dXYyMjNDZ2YlWq5WJzPT0NHfu3MHJyYmxsTFK\nS0vZsGEDi4uL6HQ6xsbGMDY2xmAwkJSUxNDQEJcuXcLHx4eIiAguXrzI4OAgY2NjkiuiqCtramrY\nsWOHMCh8fX3JycmRZqenpydnz54lIiKCAwcOsGLFCiYmJrhy5QpDQ0M0NDTw17/+VWIA6uvruXv3\nLlZWVhw8eBCAq1ev/veEwahUqoMqlUqnUqkq/83n3lCpVF0qlar0bx+b/s3/e12lUjWqVKo6lUqV\n/n02hbGxMaqrq3n00Ufx9PQkOTmZRx99lMbGRpqamkhLS6O5uRk/Pz/g/g64b98+Vq5cSV5eHm1t\nbcTFxTExMYGTk5Ncx8vLy9m9e7coEBXn3NKlS1m9erVkAyxdupT5+XnUajVqtRp/f396e3uFITg1\nNcXZs2clzcfb2xsPDw98fX2F7Kvg1nQ6Hc3NzZiZmeHi4oKdnR1lZWVcuXIFKysrETwVFxdja2uL\nTqeT79fGxoa1a9dy6tQp0Vfo9XoCAwMpKSnh/PnzaDQaZmZmyM7OFkFQREQEi4uLlJWVER4eTk1N\nDWfPniUzM5NPP/2U6elpvL29SU1N5d69e0xMTGBtbY2/vz+rV6+mvb2doaEhOjo6iIuL4/jx43R3\nd5ORkSEfly9fxsvLixs3btDd3U1hYSGVlZWS0bG4uMjU1BTZ2dns3r0bR0dHbG1tsbGxYWJigtDQ\nUPz8/PD19aW5uZlVq1bR3t7O2rVrWbJkiWQoTk5O4ujoKJvzQw89JM3RgYEBBgYG0Ov1/O53v2N6\nehq1Ws3BgweZmpoiLS2NkydPiv6koaFBxEBJSUlMTEyI5mFgYICgoCAaGhpobm6mo6OD+vp64uPj\npRR0dXXl+vXrdHZ2kpSUxPz8POvXr5eELeDfAXJSUlLw9fWVks3b21tCd+vq6nj00UfRarWkp6cT\nEhKCiYmJSKmjoqK4evWqKDurq6tZs2YNAHFxcezfv19G40ePHqW6ulos4/b29tJfWFhYoLKykuDg\nYLRaLcePH+fJJ58kJyfn+7wV7/8s3+NrvgA2/geff89gMET+7eM8gEqlCgUeBcL+9nf+rFKp1H/v\nH1AsorOzs5w7d46Ojg5qamro6upi27ZtlJSU4OHhQWZmJnFxcbi5uTE0NISHhweHDx8mJiZGTh+t\nViuedEdHR7Kysrhz5w5hYWFERETQ3d3N2NgYGo2G0NBQenp6GBgYkPKjq6uLo0ePUlBQwIULF7C3\nt8fCwgJ/f3+ZAZeWlpKUlERdXR0ajYYdO3bg5OSEvb09NjY2bNmyRaLSFdpwdHQ0PT09YqLasmUL\nDQ0NrF27VnQV3t7eFBQUEBwcTEZGBgaDQahNdnZ2LFu2TK7DitKxubmZ4uJiYS8qqsGdO3cyOzvL\ns88+y8TEBH/5y1+YmJigq6uLJ554gq+++orCwkLm5+dZu3Ytpqam+Pj44O/vz/LlyzExMaGgoID6\n+nqxnGdnZ/Ob3/yGnJwcDAYDa9asob29nbKyMszNzXFyciI5OZnu7m70er2oJLu6ulCr1ZSUlPD+\n++/L2Lm8vJz29nYRj8F9UrZarcbe3p7g4GDKysokkk2lUnHt2jUcHBw4ePAgpqamGBkZiarRxsaG\nJUuWUFtbS3t7OyMjI9IcvnXrlrxhxsfHKS0t5fLlyxw9epS33noLjUZDfHw8o6OjGBkZYWxsjIeH\nh6DkT548iY2NDd3d3ZSXl8umYG1tTUhICL29vRgZGVFTU8OtW7fEcFdeXk58fDxqtZrz58/T09ND\nb28vNTU1GBsb09zcTGNjo4B2lRI1JSWFkZERSc7W6/X4+/vz5z//mebmZpYuXUpcXBxwnzAWEhLC\n+++/T25uLqOjo1RUVBASEkJycrKE1nzf9Xc3BYPBcAMY+p6Ptx04ajAYZg0GQwvQCMT9vb80Ozsr\nzMLMzEyhEwcGBlJcXCzOQLh/+r/22muiMFS85a6urhgbGzMwMCDNmuTkZDGL+Pv7U1ZWxsWLF+nu\n7qa0tJTZ2VnGxsZ48MEHCQoKIiYmBjMzM3bu3MnTTz8t0l29Xv/vshZcXFzk5PTw8GD//v3U19cz\nOjrKsmXLyM3NpbGxkXPnzjE5OYmdnR1NTU1s2rSJkJAQ2traqKmpITExkQMHDoiHvrW1FS8vL0l9\n8vHxQaPRMDo6Kp3qnp4eysvLBR2mRKOZmJjw8MMPc+HCBRwcHFCr1czMzJCQkEBISAgDAwOEh4dj\nZmZGY2Oj4MTc3NzIzc2lrq6OkJAQ9u/fT0tLC3/4wx/Epjw9PU13dzcPP/ywNGiViPMXXngBOzs7\nent7cXV15fz58xw9ehRra2s2b96Mm5sblpaWdHR0YGNjg+/fUpDfeecd7O3tqa6u5qOPPhK7e2Rk\npKgGdTodzs7OVFVVodVqiY2NJTQ0lKioKLZt20ZVVRWzs7O88cYbhIeH09nZiYuLC9bW1sTFxfHY\nY4/JTUEJk1VOdiXTY+/evaJXaGxsFG7HO++8IwfJ5s2b+fnPf87i4iL9/f1yMgPSpzIxMREfjZ2d\nHYmJiUxMTFBXV8cLL7zA9evXiYqKkrLo0UcfxcnJiU2bNql61moAACAASURBVBETE4OxsTG2trYi\nR3Z1dRU/R2VlpdxmDx8+TF9fH97e3ri6urJz507Mzc05efIkx48f5w9/+ANbt26lra2NtrY2CgsL\ncXV1FZPg91n/K43GF1QqVfnfyguF4OABdPybr+n82+f+p2tmZob169ezcuVKOjs7qayspL29HVNT\nU8LCwqTRp1iT7ezsGBgYIDo6mrCwMPr6+tBqtQwNDVFUVCSnqWI+mp6eJjc3FwsLC+zt7TE1NRUj\nyapVq4Rq9PHHH0sfQJEg19XVkZycTEBAgCjMlPJACRFNS0uTDenWrVuo1Wrq6+sF57Vjxw4BkihB\nol9//TUjIyMEBgbKLF2r1XLx4kXpDajVampraxkcHESn07Fu3ToRRY2NjTEyMiLRalqtlm+++YYl\nS5bIKV9VVYWlpSVjY2McOHCAqakpvLy8GB4e5uDBgzz55JOispycnCQrKwuNRoObmxvvvPMOL774\nIsPDw1hbW5OUlIRKpaKgoICLFy+ysLAg2oHAwEDCwsLEXfjyyy/T3d2Np6cnjzzyiEi0d+3aRUZG\nhkxp7O3tJflbMewocNOuri42bNhAQEAAZmZm1NfXMzAwgK2tLXZ2dpw+fZp79+6Rn58vSLjk5GTc\n3NxIT09Ho9EIjxHuHzzKhmRtbU1KSgr37t1jenqaiIgIysvLKSkpkc79+++/T3l5OREREaSmpuLp\n6UlGRgarV68mODhYlJKurq4kJyeTlJREd3c3zc3NbNq0SeTlBQUFODk58eCDD5KQkEBoaCharRaV\nSoWFhQUVFRUkJibS0dFBR0cH1dXVAISEhBAfH09FRQURERGMjo6KcVBBux09epTVq1eTnZ3N7Ows\nS5cuxdramm+++YY1a9ZQX1+PwWAgNjb2PzWS/K/qFD4G/hkw/O3Pd4HvL5kCVCrVM8AzcJ/MPD4+\nTnZ2tqDcFaWei4sLcF/LUFFRQV5eHq+//rqAWBXs1vT0NGlpaVhaWsoTW1dXh4eHB/Hx8dTV1TEx\nMcH69eulm1tRUUFzczPj4+OsXLmS0NBQSYdavXq12HBzc3N59tln+dWvfgXcn5YYGxszOTkpE4r0\n9HTJYBgcHKSpqYmYmBiGh4c5duwY/f39nDt3DlNTU9zc3AgICKC3txdnZ2eSk5P585//THd3Nz/5\nyU+orKyUHEkLCwuSkpIE9d3Z2YmNjY3kJirshebmZkxNTWlra8PKygpPT08sLS0pKioSgVJ1dTUp\nKSlkZWXh6+tLdnY23d3dzM7O8otf/EKCfGtqajh+/Dhr167lwoULvP7661y/fp3/8T/+B//4j//I\ne++9R2JiIq6urhQVFXHv3j1efPFFLl++jJ+fHy4uLgwMDODl5SVl4J49e2TCERsby+LiInFxcRLI\ne+zYMQDJMbC3t2d+fp6hoSFqamoICQlBq9XS0dFBUVERq1evFrObWq1mdnaWyMhIoqKiqK6uRqfT\nYWdnJ0YrLy8vSSJLSUnhD3/4Ax4eHvj4+DA5OYmvry8lJSVMT09LOdLc3MyZM2d46qmnJI3pwoUL\nODs7y7TE3t6eiYkJsrKysLCwEOHYwsICJ0+eZPfu3dIsv337Nm1tbej1eqytrfnqq69ISEigo6OD\nu3fvYmZmJti0/Px8cU6amZnR0tJCWVkZExMTWFlZUVlZSWpqqgBuDh8+LIFJrq6u1NTU4Ovry7p1\n6/jiiy9kE/s+6790UzAYDH0Gg2HBYDAsAgf4v0uELsDr33yp598+9x89xn6DwRBjMBhiNBqNgFFK\nS0tFKnr+/HlOnToloFYTExPc3NyorKzk9u3b/PKXv+Tu3bsS1OHg4EB/f7/8wpQn4vjx48zNzdHR\n0cHg4CDe3t6UlZXh6emJr68vnZ2dODk5ERERwd27d9Hr9Vy4cIEnnngCV1dXduzYQX5+vpxmCuLb\n19cXU1NTvL29xXySm5srb/7bt28LPNTf35/MzEy8vLzo6uoSXkJYWBiDg4MA4ry0srIiKSmJe/fu\nodfrUavVLC4u0tfXx1NPPcXOnTvZuXMnzz//vNSKihLxxRdfxMzMjKGhIfR6PVeuXKGhoYGysjIW\nFhY4ceIE3d3dYnh65ZVXyMjI4NNPP+Xq1at89dVXInyqqKjgtddek2boRx99xNjYGOHh4bi6unLm\nzBk8PDx45plnuHTpEnZ2duTk5HDixAlGRkZwd3cnPDyc8fFxrly5wmeffUZtbS3r16/Hz8+PsLAw\nRkdHqa+vZ926dfLaUCLalixZQlNTE2FhYdja2rJ//36Blri6uorupLS0lKCgIG7cuCHinS+++ILJ\nyUnZbEpKSiRta3Z2lvT0dNatWyfmps7OTpqbm7lw4QIpKSkEBASQkZFBT08P7777Lnv27GH//v2y\nEStovgMHDlBWVoaVlRVWVlbk5+dTW1uLp6enkLLm5+fF7+Hu7o6bmxurVq3i8ccfF3dmbm6uGMEA\nuZ10dHTg5OQkJcdTTz3F73//ew4dOkR5eTk9PT24ubmxevVq3nzzTS5fvkxXVxdZWVk0NDRw9epV\nurq6MDU1/d7v7//SpqBSqZb8m//cCSiTiTPAoyqVykylUmmBQKDw7z2eXq+nuLhYcFiBgYHY2NjQ\n19dHSUkJDg4O5OfnS6d/cnKS7du389JLL8nEIiQkRIi/Stz5nTt3hAsYEhKCo6MjPT093Lx5k8HB\nQRITE/H19aW6uprTp09TWVlJRUUFU1NToparra0VuIdiw+3u7sbd3Z3+/n4xpigNqerqajZt2kRC\nQoIkIJuYmBAQEMDRo0dpb2+ntraWnp4e7O3t2b9/vzTZlK71/Pw81tbWODk5YWpqyrVr1zhx4gTz\n8/Pcvn2br776Spp90dHRUt6UlpZy7949+vr6qKqqYnJyUlylmzZt4oUXXmDPnj288cYbpKamYm9v\nT1NTE/b29mzcuJEXXniBzMxMRkdHhd6UmJhIfn4+Op1OkqIHBwf55ptv2L17N0VFRRQWFtLQ0ICF\nhYUo8dzc3BgYGGBsbIzs7Gw6OzuZnp7m6tWrpKSkSD/G39+foKAg9Ho9ANnZ2SIeMzY2FjNPaWmp\niLv0er0IwM6cOSO3tgsXLpCbm0traytarVbo33C/ITg2NkZxcTFGRkZSDjU2NpKfny9lnLW1NdXV\n1YyOjpKdnc3LL78st4yBgQGSk5PZtGmT9K0ee+wxWltb2blzJ319fQQFBbFhwwZ57oeHh0V0Njw8\nTGNjIzU1NfT19REQEIBOp6OgoEDk0ErP46uvvqK5uZmhoSEuXLiAj48PtbW1Min57rvvqKmpIS4u\nDicnJ6KiorCzs5OfLSQkBAcHB8bGxiQC8Puu7zOSPALcAoJUKlWnSqXaB7ytUqkqVCpVObAWeAXA\nYDBUAceAauAi8DODwbDw9/4NJdi1paUFPz8/RkdHKS4uZnFxkfn5eWZnZ1lYWGB6ehqtVoutrS39\n/f3MzMwQExMjKHVFOpuXlwfcVwEq3AQTExM8PDzo6Ohg69atbNq0iSVLljAyMkJ6+v3JaV5eHitW\nrBCJa3d3N7/5zW/o7++XAFtlLV++HD8/P9HXDw0NceLECdHCq9VqIUcPDAxgZWUl49LHHnsMR0dH\nCgsL2bt3r2wKY2NjmJmZyYhzYmJCUp0yMzMFU97T08Px48cly1KZ3GzdupWIiAi0Wi1mZmaCQVfi\n24uKimhra6OxsZH4+HgARkZGxPDV0NDA8PCwbAampqY0NTXJSFXpjivOxSNHjrBs2TK8vLwICgqS\n0FOFVjUzMyM2bz8/PwICAgSfl56ezuLiIrOzsxQVFVFZef9ciYmJISwsjOHhYQoKCoQJ6ejoKNMR\nd3d3DAYDo6OjeHl5id25s7OTa9euUVNTQ3h4OAcOHJAbSEdHBxqNhmeffZampiaMjY3p7OzkwIED\nXLlyhYiICDIzM1mxYgU5OTn8y7/8C11dXUxOTopBLC0tDb1ez7Fjxzh79ixwv/Ht4+PDwYMHUavV\nmJmZoVarmZubIz8/X2T2JiYmXLx4kXv37klZdf78eYkc0Ov1+Pr6Ckj4pz/9Ke3t7dKAPXDgAIcP\nH+bQoUOYmJhQXV3NwsICp06dkpzK+fl5FhcXMTc3FzdmQkICbW1t/ymewt/tKRgMhsf+g09//j/5\n+t8Bv/ve3wH3NwU7Ozv0ej1PPvkkFRUVbNq0SXZehbMQFxdHfn4+ra2tZGVl4efnx/Xr15mYmBDl\n2M2bNyXRx2AwsHLlSvLz86mrq6O7u1s06TqdjtOnT2Ntbc3CwgJeXl6MjY2xYsUKSkpKRBZqZWUl\nJq3nn3+ejz/+mOXLl/Pxxx9L3bx8+XJ0Oh2bN2+moqKCJ554gvDwcPbt24enpyfW1tZcvXqVkZER\nNm3ahKmpKb6+vsTHx9Pf3y+d7GvXroljcXx8nMzMTGkyfvjhhzz55JNs374dKysr+vv7uXHjBqtX\nr2Z+fp6AgAA6OzspLi6mtLRUPA/Ozs5ixoqNjZUXdUBAAG+++SbOzs5ERETwySefEB4ezsLCAklJ\nSZiYmBAZGcnAwADm5uZotVqOHj3K+vXrJWVJp9Nx7do1kT0rgJvp6WmB5ioNsrq6Onx8fPjpT39K\nY2Mjubm53L59GysrK7kZAtLH0el0srHW1dWJMM3BwYGGhgYmJydxcnIS09np06cJDg4WR6mNjQ2x\nsbEiE9+3bx9ZWVl0dXWxZcsWfH19yc/Px9zcnKGhIUxMTJiZmeHhhx/moYcekrCblpYWysvL2bt3\nL3/961+FjqXgzTw9PUVGr7wOS0pKaGtro66ujoiICCYnJ+nu7iY6OpqgoCD5/TU1NbGwsMCPf/xj\nHnjgATo6OkSWnZ2djY+PDzdu3ECr1YqqMzY2FrgfMagE9KrVamGRTk5O0tjYKKg/JVmsoqLie78f\nfxAyZ2dnZ44dO8bu3bv5/PPPCQ8PFz6BkuUQFxcndVtzczNRUVF899136PV6XF1dJcZ727Zt/07S\nWVtbS1xcHDqdjqamJoyMjFiyZAkajUaMNMqJpUwGduzYgUajQavV0tDQQGhoKLt27WL//vtisMnJ\nSZYuXYqDgwMDAwM4Ozvj4+PD0qVLyc3NZdu2bcD9SDFHR0fGx8fx9fXlueeew2AwMDk5ybp166iq\nqpIuvfK4ERERNDc3Mzs7y8jIiJCTlPj3kZERPvvsM8bHx5mYmKC0tBRjY2M6OjqIjo6WDAbFAzEw\nMICnpycJCQm88cYbvP322wQHB7Nr1y4efvhh9u7dS3BwsDAqoqOjWbp0KZ6enuj1eh5++GE2btxI\nQ0MDzz33HAMDA/T19QkUVWFMKs7R6upqgoKCqKmpobCwEJVKRVJSEjExMXh4eDAwMMClS5e4ceMG\nt2/fZnR0FJ1OR29vL3Df+2BnZ4efn5+UcQqVKzk5mdHRUdasWcMDDzyAj48PJiYm6PV6FhcXRftv\na2uLt7e3TJrg/oar1WqZnZ1lcHAQCwsLQkNDmZ6eJjU1lYMHD+Lj40NWVhYVFRXcu3ePY8eOUVlZ\niZubG9HR0XR3d9PS0oKxsTEWFhby2r1x4wbu7u5otVo0Go28dqytrYWboQQKt7a2SoTcvn37SEpK\nkk3w9OnTIlhSGKIhISFER0fLZMTKyorc3FxmZ2fZt2+fbAKKenRkZISRkRFSUlJEUu7s7Mxjj/1H\nZ/t/vH4QLsnp6Wl+9atf0dvbyyOPPEJ/fz9dXV24uroSEBAgEJGhoSFqa2tJTEwU5HpMTAxlZWXC\nYpybm0ORbpuZmRERESEhnFZWVjg7O5OWliZy2FOnTvGrX/2K7OxsbG1txbw0OzsrgTMRERHU19eL\nh97Kyorh4WGSk5MpLi6mq6uLtWvX0tXVhVarZdeuXRgZGXHjxg2++eYbMjMzCQwMRKPRMD09zYUL\nF1i2bJkYiZQsyVWrVsmm5ezsTGtrK3fv3pVSZ2hoiC+++ILR0VEcHBzECOTs7CwnTEhICDExMRKi\n2tPTw+DgIEZGRvz617+mrKyM8vJyEhISWL9+PWFhYbS1tcm4TnEGbtmyhdraWrKyslCpVOzbt48j\nR46IZkPJmigpKaGnp4cVK1YwPz+Pv78/ISEhzM7OMjw8jLe3t6Q0h4eH09TUhLe3N99++y0ajQZL\nS0uGhoZISEjgq6++ksSvyspKHnnkEW7cuAHcR+stWbKE5ORkPvjgA+Lj4xkYGCAlJQVzc3NKSkqY\nnJwUIZSSZamwKnp7e6X8CwwMFISaTqejrq6O4eFhRkZGSEhI4PDhw9TV1bF3716mpqZISEgQVkdz\nc7PoNgA0Gg0/+tGP5Dnr6+sTsMmWLVsoLS0lICAAjUbD3bt3uXv3LqGhobi6ugqOXnF0rl+/XnwL\ncXFxfP7554SGhlJRUcHGjRuxtbXF09NTgo/T0tJ4/vnncXFxIScnh82bN4v0W4nTS09P57PPPvtP\ngVt/EN6H999//42FhQX0er1cBRVgqKurK/39/SQnJ0u8VklJCWFhYSQlJeHt7S1NOaXb/vDDD3Pu\n3DleeeUV0YUvLCyQlZWFubk5y5Yt48CBAzg6OrJ27Vpu375Nd3c3Wq2WS5cuceHCBRn7WVhYsLCw\nwNzcHBcuXGBwcJDAwEAmJyfx9vamurqa2dlZXFxcqKysZO/evQDS9NJqtaxdu5bg4GDeeecdfvaz\nn4m0uKCgAE9PT9rb27l8+TLbt28XitP09LRYbE1MTLCysqKiooKGhgba29uZnJxkcHBQyFSurq5S\nZ/f19eHm5oaDgwPXrl3D19eXixcvYmtrS0NDAxEREaSkpODp6cmRI0f44IMPiIuLY9myZXh4eNDX\n14dGo8HX15dPPvkEHx8fgZcoTIDe3l4WFhZIS0ujpKSE0NBQ+vr68PX1xcjISICqTk5OODs709bW\nhqWlJZ6enuTl5WFhYUFgYCDe3t5CNzp58iS7du1icnKSgIAAwd/5+PjI78XNzY3Tp09LxJqSxGVm\nZsaWLVu4ePEiJiYmDA8PY2JiQmdnJ3fu3OGxxx4TLNyKFSswNjZGp9Oh0+m4ffs2cXFxUnYmJCSg\nUqnIyMigvb2dDz/8kKamJpYuXcrTTz9NZWUlpqam5OXlCbsgOjqa/fv3s2/fPkxMTISu7OLiItF+\ntbW1bN68mYyMDLq7uykqKkKtVkt+qnJoHD16lB07dsgmERYWJulhGo2GVatW0djYSGdnJxs2bBDA\nbHJyMq2trZibmxMYGMjvfvc7duzYIWi4mpqa/x7vw/+OtbCwwJIlSwSFvXz5csFtX758WQAn7u7u\npKamkpKSIkaaS5cucezYMebn54mJiWHZsmUCWFVGaTt27MDf3x8LCwsmJyfR6XTcuXNHENkKzGXd\nunVotVqWLFlCT08Pfn5+jI+Pk5+fj62tLZs23bd4KKIjU1NT4TLW1dXx/vvv09/fj8FgYPv27Sxb\ntozZ2Vlu3rzJO++8w+LiIm+99RbT09PitVBUkHAfweXq6ipossnJSaFLnzx5kkOHDtHd3Y2rqysm\nJiYsWbJErsCjo6Nyhezo6MDT0xMTExPu3LnD4uIip0+fpqOjA19fX8rKyvjjH/8oGQRbtmwhLCyM\nM2fOMDAwwPXr13n33XcpKSkhKCiI2NhYYRNkZmYKjbqrq0uanMro0djYGAcHB0JDQ+nq6sLMzEzS\npfLy8jh79izd3d2SrjU6OkpNTY00h5V0qv7+foqLiyU7wdraGoPBwM2bN/nZz35GXFwcDzzwANbW\n1qSnp0vqFiANwhs3bkjjTnHD+vj40N3dzbVr1yQlfP369cTFxbFixQqio6OJj4/n17/+NYmJibz3\n3nvY29tL+aaMJZXwGyXEeG5ujh//+Me0tbWRnZ3N2NgYMzMzHD16lMLCQmZnZ+np6aG+vl4anYrm\nYGxsjJSUFHQ6nViy+/v7KSkpYXh4mKGhIQ4ePMiOHTtITU1lamqK8fFx7t27JxwGQFyUCi7vtdde\no7q6mo0bN4q35PusH0T5MDMzw8qVK0URplarSUtL4/Lly3Ii5uXlsWnTJuH+K8IcBTKqcPHS09Pl\nhOnv7yckJIQvv/yShIQE1qxZI5kBKSkpzM7Okp+fj52dHUlJSYIqMxgMzM3NcfPmTaytrVm9ejVq\ntVryGcLCwjh69CjPP/881tbWLF26VGp6Pz8/kWD39fVhZWWFVqsVw4qPj4/Ys3U6HWZmZkJuunbt\nGtu2bePatWuMjY0xMDCATqfD2NiY9vZ2/Pz8sLW1xfdveYEuLi709fURFhaGmZmZBNRotVrKysqI\nioriH/7hH0hNTeW3v/0tTU1NqNVq1qxZg7GxMSdPnmTNmjX09PTw0Ucf4eHhgZGREZWVleIx+Oab\nb1i5ciXNzc2Mjo5y8eJFnn32Wdzd3Tlx4gQNDQ2sXr2awsJCVq1aJfh3ZRNRTGpVVVUsLi4K7bmt\nrY2MjAzOnTtHQkKCXJvt7OxoaGjA3t6exMRE8Ql0dXUJWfr27dvExsZiMBiwsLBAr9fT399PS0uL\nZHXY2Njg4+MjDIw7d+6g1WrR6/VkZGTw/PPP8/rrr2Nqasr4+Dienp7U1tZSWVlJQEAAUVFRdHd3\ns2XLFmEdKFAahdQEiAhNo9FQV1dHZGSkSO4VNaqiopyamsLb21sYFdu2bRO/xsDAAE1NTSJesrW1\nZWFhgZ07d9Ld3U1TUxPW1taMj4/j7OxMSUkJMzMzzMzMUFNTQ2lpKTExMTg4OKDX60XNazAYqK6u\npqys7Hu/H38Q5cN77733hsL4a2tro7a2VmzCo6Oj0iFesWIF4eHhdHd34+DgQGpqKuHh4WRmZvLN\nN9+IB2B+fp5jx44RHh7O4cOHyczMRKvVUlpaKlFxmZmZnDlzhqysLMLDw0UwdfnyZTnJTUxMmJiY\noKCgAI1Gw/j4ONevX2fv3r2Eh4fT3t5OSkoK7u7utLS0YGJiwsmTJzEYDJw4cQI3Nzc2b94sm1ld\nXR39/f0sLCzg6OhIRUWFvBFPnDjBj370I2EaBgQESOmgoM20Wi0WFhasXbtWnruAgACMjY0l+k0R\n60RGRkpPYWRkRLQKCu77mWeeQafTsX37dqKiojAzM2Pbtm3k5eXh4+PDtm3baG9vJyoqirm5Oerr\n6wXE0t7ejkqlEl9HZ2cna9asYXJyUshLyolcUFDAkiVLSE1NJTo6WkaMzs7OhIaGSliu4kjct2+f\npGvb2NgwPz9PQ0MDVlZWJCcnExERQW5uLgUFBfj5+REVFcX8/DyNjY3yuti9e7ds5jqdjsuXLwvg\ndnx8XAJz4+LimJ2d5fTp0zQ0NLC4uEhLSwuXL1/m8OHDzM7O4uPjw8qVK6W3BPeNa8ePH6e5uZnH\nH39cgm2mpqZoa2vj3LlzYp13cnJiZGSEuro6/Pz86O3tpbW1VXpX+fn55OfnY2RkhI+PD15eXhw7\ndoy0tDQcHR1Fvp2cnCwltbKRXb58Wehfyhj78ccfp7q6GjMzM1QqlRC6VSrVf591+n/HMjMz49ln\nn8Xc3FyUdvfu3SMsLEzEN+bm5lRUVHDq1CksLS0pLCzku+++Y2BggOrqaqanpyUvwMPjvt1CQcIn\nJCSg1+tpaWkRT7sStLJy5UoyMzNZuXIlcXFxzM/P4+Liwvnz56VOU9BfUVFR8j3Pz89jaWnJhQsX\neOutt0Rqu3z5crq6uoSj2NbWRktLC8PDwwwMDAjDwNHREV9fX1paWnB1dQXu6/7VarVoDTw9PXF1\ndRXhVUdHh0wnlIyGwcFBDAYDBoOBe/fu8eSTTzI5Ocn777/PzMwMWq0WJycn2tvbqampwdLSko0b\nN1JdXc3mzZtpbW2lt7eXzMxMrly5go+PD6tXryYoKEieqyNHjkjGgpubGxERETg5ObF06VLi4+MJ\nCAjgk08+wWAwyA1FIWKnp6djaWnJwsICDQ0NkgqVkZGBRqORyYqyJicncXFxEfSag4MDycnJvPTS\nS0RERDAyMiJZicqkwsrKSm4gxsbG3Lp1i6KiIs6fPy8mNsVevnfvXqqrq0lOTiYrK4vm5mb6+/sp\nLS0V2I2rqyuxsbGiYDU2Nmbz5s3SFJ6dnRWH4pIlSwQko6DUlXJYQQYmJCRIs1FR7n777bfcuHED\nOzs7QkNDuXnzJjdv3qSnpwdA+KC9vb3yp1qtpqWlhZ6eHgIDA3nkkUdwc3Ojo6ODgYEBQkNDqaqq\nwtHRkZUrV5KWlsbRo0dpa2v7dxqbv7d+EOWDiYkJTz31FI899hjT09NERUUJxntkZIS5uTmysrJE\n+HP16lX6+/vx8fEhKSmJ7Oxs7O3tiYmJob+/n++++04ed9myZfz5z3/GysqKlStXinnKyMiIBx98\nEGdnZxYWFigtLWVsbAy1Ws309DR79uxhcHAQMzMzAgMDyc7OllhzFxcXoeoODAyQlJTEmTNniI6O\nxtLSkoaGBmxtbamoqMDBwYHi4mIJT1ESrHNycoQpoHg1UlNTGR8fZ2hoiMjISHp7e7GwsGBqagq9\nXi+hLqWlpWg0Gtra2ujo6ODq1atyNb916xaBgYH4+/uj1WqprKwkLy+PwMBACSDRaDS8++67bNmy\nhY6ODszNzfH09JTpjBJ8qoSqxMfH4+7ujqOjIw4ODty6dUvUpbm5uTg7O+Po6Ehvby9vvPEG8fHx\n5OXlUVxcjJ2dneDOTE1NCQgIoLy8HC8vL0pLSwV0o0TgdXR0oNfrxbUZHx8vm6Zer5cmmiLQmZ6e\npqqqSkaXk5OTdHV1MTc3R0ZGBrdv3wbuG6I2bNhAfX09QUFBVFRUSFSctbU1S5YsITg4mKKiIrZt\n24aPjw/5+fns3LmT5uZmjIyMaGtrIyQkhFu3bkn/4vTp02zcuFFyFVJTUykrKxMsvaurq2xMsbGx\n1NfXy+YSGxsrm6TBYGB4eFj0MZGRkXR2dhIYGEhWVhZxcXEEBATg5uZGT0+PZHvU1NTIrVpJj3Jx\ncaGxsVHi/lxcXMQt+n3WD6J8ePPNN994++23xSxULomRHgAAIABJREFUWlqKlZUVIyMjLFu2jPz8\nfNatW8fTTz/N0NAQMzMzYsL58ssvaWlpkTe3Moa6efMmISEhLF26FDMzM6anp3FxcSE5OVmoN729\nvVRVVdHS0oK9vT2xsbEcOXJEak97e3vBrbu7u4tKzcfHRwAs09PTODs74+XlJfjwuLg4XFxc8PPz\nk/zA5uZmenp6sLS0lBfDu+++Ky7A8+fPs3XrVhHsfPLJJ5SWljI8PCw/l3L6KCEjg4ODLCws4Ofn\nx8jICGZmZtIcjYyMxNjYmHv37snNQ5Fwh4SE8Mgjj9DX10dubi6lpaWMj48THx/PpUuXKC4upra2\nlqtXrwp5+Ny5c4SGhtLe3i7Xfnt7e6FN7dixA7VaTXJyMuPj4wQHBwsmX4l5u3z5MjqdTlgLipPR\nycmJ0dFR8vPzeeKJJ4S2rWyICoNSkR87OTkRGRnJ119/LROrpqYmbt26hbm5OYuLi8zNzXHjxg1B\ntaWkpHD9+nVJh1bkxzk5OZIpqRi5RkZGyMvLY3p6mt7eXpqbm2U0q9friYyM5NChQ0xMTPD000/T\n0NAglm0PDw/Cw8OJiooSz0JQUBD9/f28++67Mv3o7e0lNDRUzHrOzs6cOXOGqKgojh49io+PD5WV\nlVRXV+Pv7099fT01NTWCuc/KyuLSpUt0d3ezuLgoCVPHjx+XcfWZM2dITEyksbGR9vZ2SktL/79T\nPhgZGZGbm8vNmzepr69nZGQEU1NTsQBnZmai0WiwsLDA2NiY69evs2bNGvr6+tixYwe2traMj49j\nb2/P1NSUdIZHRkZoamoSC7aJiYko4RT5qBIme+vWLb744gs0Gg02NjaEh4djamrK8uXLSU1NlQRr\nuK+UtLW1lROmoKAAf39/zp07R2FhIceOHaO5uRl/f38iIyPp6+sjLi6OZ555Rn4WExMTHn30UdEj\nAMLq7+rqYtOmTURERODg4CCeeisrK44dOyYshaCgIKanp9HpdGRmZjI1NSWCqYmJCRYXF+U2ce7c\nOaampti9ezddXV288cYbHD16lIsXL+Lk5MSOHTtoa2tjbm4Oc3NzXFxcpBu/sLDA66+/ztjYGGlp\naRQVFcnP5ePjg52dHX/961+B+9MDnU7Hl19+ycDAAB0dHaKqVE6wsLAw5ufnsbGxETOZUvIpbkRv\nb2+Cg4MZHh6mqKiI1tZWPD09sbOzY8+ePXJr8P1bMrder5fRp8JJVCTTALm5ufj4+PD0008zPz8v\nYbqWlpa88MILUlJ6e3sLrCYxMRFra2uCg4NZsWIFV65cITIykqqqKn76058C4ODgQEBAAP7+/ri5\nuXHixAmKiop46623OHr0KPPz8wINUpLVjxw5grW1tUxllMb45s2bsbe3B+6HLs/Pz/P2229Lhmh7\nezvXrl1jdnaWQ4cOce/ePVatWiVE7qVLl7Jp0yb8/f25dOkSmzdvxtfXV5iX3/v9+L/yZv7vWopr\nLD4+nsDAQJycnEhMTGR0dJTo6Gh8fX0xGAwcPnyYwsJC/Pz85Lp18OBBMbN0dXVx/vx5zpw5A9w3\nqzg5OQkG3MnJSd4sfn5+tLe3CxXZ2NhYOstKSKmLi4uMjUxMTMSTvmfPHlpbW4UyHBcXx/DwMF5e\nXtJc9PDwkDGaku14/fp1amtrmZmZITg4GJVKJcRnuA9ZaWlpYX5+nu+++w5XV1ecnJzYvn27UJwG\nBwcFz+7s7Ex6ejpRUVHSkFJuTS0tLVy/fp3HH3+chYUFzM3NSU5OZnZ2Fnt7e8zNzZmYmOChhx4i\nNDSUxx9/HIPBwODgoGDjpqamGBsbIzExkcLCQtFxbNiwQcxgSi6mg4MDvb29fP311wIRra6uFmCr\nRqPByMiI/v5+nJ2d2bBhA0ZGRri6ukpiF9zHkKnVaubn5+nq6uL06dMYGRlhb2/Phx9+SGlpKSdO\nnKCvr4/IyEhOnjxJQEAAGzduRKVSERoaipubG1u2bBEIKsCjjz7K+vXref/996mvr6e5uZnMzEz6\n+/s5e/YsaWlpAqVdvXo1ERERuLm50dnZSVVVFa2treh0OszNzent7RXBmbu7O35+fpSVlTE5OSkh\nL97e3ixfvlxESkoex927d4mMjBTWZGVlJa2trcLMVF5jrq6uBAcHc+rUKfLz83n77beB+5vFH//4\nR5KTk9m+fTvm5uaSzGVlZSWBQso4XKFyKQrM77NU/5kGxP9TS6VS/b//Tfyf9X/W//9XscFgiPl7\nX/SDaDS6uLjw4osv4u3tjb+/P4WFhej1emxtbSVvITAwUGCgv/rVr/jtb3+LRqOhoKCA0dFRvL29\n8fb25urVqzg4OPDBBx+wZ88ezM3NUavV7Nu3T9yF8fHxNDc3c/nyZZGPfvfdd3z44YfMzs4yPT1N\nT08PFy5cYO3atTg4ONDW1oaJiQmvvfYaL7/8Mk5OTqSnp0tjJzQ0lPLycjw8PMSYoiRDhYWFCZb7\n5MmTbNu2jc7OTqKioigqKsLW1pZ9+/bx+eefY2dnJ+QcExMTbG1tycvL48UXX+TgwYNotVoxBinP\nna+vLxUVFdTX18v4rKOjAzc3NxYXFzE1NcXGxoZr167h5eUlfIacnByio6PFJr18+XJycnKIi4vD\ny8tLmAF+fn5cunQJc3Nz9Ho9O3fu5MKFC1haWuLh4cHo6CiNjY3Mzs7K71Cj0XDlyhXJylQw/p2d\nnQI3LSoqEi7Chg0b8PX15a233iIhIYGsrCwmJiZElt7U1ISlpSWtra2YmZkRGhpKc3MzJSUlrFu3\njvDwcO7evYvBYCAwMJDh4WHOnz/PE088QVJSElevXqWvr4/s7Gw2btzI8PCwPIfnz58nKCiI7du3\nC1larVYLC9HY2JibN29Kk3TVqlWYmZmRmpoq04a2tjbGxsaIiooiKyuLNWvWcOXKFaqrq/Hy8iIy\nMpLjx4/zySefsGPHDr788kt+//vf8+CDD/L222+TmJhIXFwcZWVlvP7667z11luCZ5ubm8PX11cS\nw5XQXp1Ox4YNG+jo6BBEm4eHB7W1tfKnk5MT3t7eFBcX8337hz+I8kHpB7i5ufHee++Rm5srvnJX\nV1fCw8PZtWsXpaWlLCws8OWXX1JTU0Nrayvp6elYW1uLH8HT01N8AMooMyYmhqamJoqLi0lJSeHW\nrVusWLGCX/7yl6xfv56BgQFeffVVXnrpJU6fPs3t27dxcnJCp9MxNDSESqVidnZWrqNubm5MT0/T\n2trK8PCw/JJiY2Px9/fHz8+P2NhYjIyMSEtLw9raWhiDsbGxLF++HC8vLy5cuICvr6943cvLy2lu\nbiY0NJQ7d+4QEBDA3bt32bx5Mx0dHRgZGREYGEh3dzdWVlbcvHmTiooKrl27xpkzZ1i7di2dnZ1M\nTEwQHBwsTbLa2lq+/vprwsLC6O3tlbTphx56iM7OTv7pn/6J2tpaiSmbnp6mq6uL2dlZ5ufnycvL\nkyRmnU4nhCY/Pz8J29mxYwfW1tYi2Q4KCsLf35/29naJ2LOxsWHDhg0kJiby7bffYmtry9dff42t\nra1YkQcGBsjKyuKJJ55gz549VFdXC8CmurqamZkZTE1NmZubIzg4mMDAQGZmZvjggw/o6enBxcWF\nU6dOAUjZBfeFbI6OjgQHB+Pg4MDQ0JD0mh555BFCQ0PJyclhYWGBqqoqjI2NSUlJoby8nPr6epmE\nubm5SRI0wDvvvMOtW7dEFq80LpVkqaioKJqamigpKWHt2rX8/Oc/Z8uWLRw6dIilS5fS0tJCUFAQ\noaGhHD9+XCZcSsTg22+/zdDQEGZmZmKHVkrg0dFRysrKaGhowN/fn6qqKsrKynB0dKSoqAidTseN\nGzcEn/d91w9iU7CzsyMyMpKFhQUCAwOJjIzE1taW2tpajIyMGBoa4syZMzLOO378OBkZGVy9elWw\n546OjpiamhIdHc2LL74IQHp6ukTD3759m5UrV9LV1UV4eLhYkru6unjwwQfx9PRk69atbNu2DU9P\nT+7duycxYTdu3ECtVksEmSJUUvIDGxsb+eKLL8jLy+PUqVPMzc3x7bffEhoaik6no7OzEw8PD777\n7js8PDxEiqwQjBTV3bJly5iZmaGxsZHY2Fhu3rxJS0sL586dE3zZ1atX0Wq1NDY2kpyczM2bN0lK\nSmLz5s0C6VD8BIpnQlHxnT17FktLS+zt7QkKCpI+Tn5+vmgL8vPziYiIEJ6kMkWIjY2lsbGRV155\nhZycHHbt2gUg2Z1lZWV4eHiwbNkykpKS0Ov1osNXIteVdKirV6+KeOe5555jcnKSFStWAPDkk0+S\nkZHByy+/TGlpKS+99BK3bt0iKCiI7u5utm3bxvnz54XPqChclXGug4MDGRkZEu2mjA5nZmZEnq2M\ndxsaGrh48SIlJSWUl5fT1NQk1GQ3NzexVhsMBqampjAxMaGkpARA8kofeOABoqKi+PTTT3n//fcx\nNzfnzp07jI+Pi4N3y5YthIeH8+CDD/LUU0/h7e2NqampPLapqSleXl7iogREtPb444/z2GOPcejQ\nIcbHx3F1dWX58uWYm5tLZunU1BSHDh2S6YnSdFTCfbZu3Sou1O+zfhCbgoWFBYcOHSI7O1t4jaam\npqhUKm7fvo2NjQ3R0dG8/PLLMlY6e/Ysq1atkt3ewsKC4OBg+vr6CA0NBe5nMyrwVRsbGwoLCzEx\nMSEsLAwrKytu3LgheY+jo6Oi6LOxscHU1JS4uDg++ugjVqxYIScBwAsvvEBYWJgozRwdHUlPT8fe\n3h4HBwdUKhUajYaKigoMBgMbNmygvb2diYkJxsfHMTc3lwnLkSNHSExMBO7fbJydnXnkkUcYHh6m\no6MDb29v0tLSqKysxMfHB61WK4h4ZVz4yiuvSCydqakpnp6ejI2NMTQ0hKurq+QTent7Mz4+zsLC\nAh4eHkxNTWFsbExlZSUtLS2SqvynP/2JS5cusbi4SHl5OXV1dVRWVtLT08MXX3zBvn372L9/vyRW\nKfmbc3NzPPvss2zevFnyM5SbQUhICDMzM7S1tREaGoqFhQVubm6Ul5ezsLDAyy+/DNx3oM7PzxMR\nEYFKpSIyMpKEhAQsLCxITU0lODgYT09PjIyMKCwsJCsrS5ga/v7+mJiYsHTpUgHQFBUVAaBWqyku\nLhZJtJGRERkZGURGRjI8PMzExASPPPIIbW1tgqErLy8nNDQUd3d3Vq5ciYmJCa+//roIkABu377N\n6dOn2blzpwiaxsfH+eyzz7CxsSE3N5exsTFKSkrYu3evMBY2bNggm6QCHV6zZo34XmxtbWlqaiIi\nIoKzZ88SEBBATk6OgFQ0Gg0xMTFMTExgZGTEU089JT4MxRCn0+lwcXHh5MmTMtX4PusH0VNQ9PEB\nAQFCMVYSmFJTU1Gr1dja2or11M7Ojrm5OSIjI/n5z3+Os7Mzs7Oz4oZUgBIzMzPMz8/T3t5OXFwc\nzs7OqNVqFEfmAw88wPDwMLW1tZw6dYrU1FRWrlyJr68vNjY2HDhwQNyXw8PDws+bmJhgZmaGS5cu\nYWtrK56EkydPEhYWRlVVFRMTEzQ0NODu7i6J2UuWLOHy5cusXr2a3bt3k5WVxaZNm4TmfPHiRR57\n7DEmJiZobm7Gy8sLa2trOjs75Xo+Ojoq5VJNTY2wISwsLNi1axdNTU10dnbi4OCAq6srFRUVODs7\n09vbK5qI0dFRbG1tOXXqFNPT02zdupX+/n72799PTEwMGzZsYGhoSIjK9vb2BAYGin345MmTvPrq\nq1y/fh1jY2NCQkLw9vbm7t27/Ou//qtg6WNiYoiIiKCurk7Gk6tXr8bZ2RlnZ2fs7e0pKyujqKiI\nHTt2cOfOHf70pz8RHx+PRqOhvr5ecjMU1N3x48dxcXGhvr4ed3d3xsfHJTMzMjJS5Onnz59HpVLJ\nDWRubg4vLy9sbGwoKCjA1NSU0NBQxsbGeOCBB7h48aJoOMbHx/Hx8UGlUlFYWMjc3Jxcv8fHx8Ui\nr6y1a9dKcM/k5CRWVlb8/Oc/Z3BwkJmZGXx8fGhvb8fd3Z2RkRFaWlpQqVTcuXOHPXv2kJycjK2t\nrdw44H7uw4EDBzA2NmZqaoqjR4/yzDPP0NbWhrGxMQsLC8TExPD5559z8eJFrly5wk9/+lPs7Owo\nKSmhrq4OMzMzUbUq4qrvs34QNwVzc3P6+/vp6ekhPj6eZ599VkZWx44d46mnniI4OJjR0VEyMjJ4\n6KGH6O/vJy8vj8jISEZGRoiOjiY6Opra2lpJnS4sLGTNmjVYWVnh6uoqs+9ly5bh5uZGV9d9pqxO\npyM8PFwEOkqDpqysDDMzM1599VU6OjrkxP3kk09oamqSG4Wfnx9zc3NoNBqWLl2Ko6Oj4NyUa2JJ\nSQlqtVpOBSWpytbWVhp6u3bt4tatW9y9e5eXXnpJNgtzc3NmZmYwMjJCo9GgVqsZHx+XcmP16tUE\nBARgMBioq6uTnAVlFl9dXY27u7uMZoOCgqitrRXU3dzcHPb29jz88MMYGRkRGhqKiYkJ6enpfPzx\nx/T29spM3N/fnxUrVlBXV8fU1BQ9PT2Eh4eLkzQkJIQTJ07Q1dXFP//zP1NVVYWXlxdpaWk8/vjj\n3Lx5U24XU1NTEtGuSJ2V8mpiYkIYDffu3ePEiRMyp1cYjA888ABbt24VorUS9dbV1YW1tTVRUVEU\nFBQA92XDg4ODFBQUYGtri7u7O2ZmZvj4+HDx4kX6+vpoaGhAr9ezfPlyQkNDcXZ2JjAwEA8PD9at\nWyfP/Z49e2TEp9frmZqakn6ATqcTOPCDDz7I448/zrFjx3BxcWF+fp7+/4u99w6O8rz39q9V1660\n6nXVe0ENFSQkqgDRRLNNNwE3TNySuAS/9iEZx05cYju4YDtgAwaDjY0B0YVASBQJ1AvqfdV7XUmr\nsu8f8nNPzjtzTvj98ZvxmTk7owGEJbDQ8zz3fX8/n+vq6hJKvXv37vGPf/xDWLrPnDlDVlYWAH/8\n4x/ZvXs3SqWS5uZmtm/fTnNzM8nJySIb4e/vz/PPP8/OnTt55plnBKPTxsZGbMW1Wi09PT3CrvYw\nr1/FSqGvrw89PT1MTU0ZHh7m6tWrovvu6OjIZ599RkdHB01NTbz22ms0NTWxfPlyiouLMTc3Jy0t\nDX9/f9RqNfv27ePo0aPADOHYxcUFlUqFvr4++/fvx9DQkAcPHogT8vnz53Pp0iUqKysFcDM6Oppj\nx46JEMzu3bsZGBgQ+7KAgADc3d1xdnbG3d1dIMkWL14s4B0ajQaVSsXdu3fJzMzkySefxMjISOjV\nVCoVtra2AncGMzN6yXfR2tpKRUUFCoWCxYsXk5+fT0lJCa6urgLxZmZmRl5eHjY2NpiYmPDjjz+y\nceNGcnJyxGGX9DUtLi4WBCrJzH3s2DHCw8NFgEmCllhYWAg0fVBQEAYGBixZsoSWlhZsbW1pamrC\nwsKCW7dusW7dOuEXkMCm27dvF3TlDz/8kOnpaYKCgvDx8cHDw4OTJ0/i5eXF5OQk69ev5/Dhw+Kg\n0c3NDbVazaZNm/5TDXzbtm1otVo6Ojro6OgQ5i65XM7atWv55ptv+Pzzz0lMTORPf/oTlpaW4iwI\nIC4uDplMhrm5udhWHThwABcXF9zc3ATtS6pXx8bGotFoyMrKEjr3np4eOjo6kMlk4kZeX1+Pr68v\nYWFhHD16lM2bN6PVakX6VCpuxcTECKRaY2Mj3d3dvPrqq8TExPDTTz+xcuVKdu/eze7du9m/fz+r\nVq3iq6++4rnnnqOyslLU68+fP49Op8PPz4/Kykru3btHXFwcGo0Ge3t7gcbPysoiKiqK27dv09TU\nxBtvvPHQ1+OvYqVgZGTEwoULGR4e5scffxQ4bH19fdavXy/GWfb29rz//vscPnyYy5cvo1arRa5c\napT19vYK56OZmRl9fX2kpqaKqKjUQGxvbxcXX1paGkNDQ+jp6fHjjz+Snp7Ozz//jKGhoQCCenl5\niW7+hg0bhAJO0orfunWL8fFx1Go1jY2N/Pjjj3h5eWFhYSHGQ/r6+lhZWdHc3CzISPfv3xeBlbt3\n7zI5OYmJiQl37twhMDCQzs5OysrKUCqVmJqasmTJEqGfr62tRaVSCafF5s2bGRgYICYmhvr6elav\nXk1PTw8DAwPCDhUSEkJ7ezs2NjY8/fTTWFpa8vrrrzM1NUVFRQX19fUMDw+zZ88eYmNjaW1tpb+/\nX4xZy8rKUKvVnD59WlTDR0ZGxP63urpaCHY///xz4uLi2LJlC+7u7hgYGLB+/XqRloyNjaWuro6w\nsDDWrl0LzBy2xsbGcunSJQwMDMjJycHOzo7R0VG6urp48OABJ0+eJCMjg0OHDuHv74+lpSXh4eGs\nXbsWMzMzjh8/zuDgoKBjwcyZgtQhMTY25ocffqCyslLUzVtbW2lvb2d4eJiKigqGhoaQyWQUFxdz\n5swZdDod1tbWhIaG4ufnJ7oEs2bNQq1W09bWJpT1YWFhnD17lldeeUV8Pb799lvS0tKwtLQUFKSM\njAw+/vhjPvzwQ959913CwsIE8k+tVvPII4+IswYJTnPp0iX6+/u5c+cOjY2NAnkfEhKCXC7HwsJC\nwIOkDo30UHvY16/ipmBgYMD09DQhISEsWLCAd999FyMjI1588UWUSiWLFy9mcnJSJMVsbGyQyWQ4\nOTlhY2NDVFQUra2t9PT08PXXX3Pu3DlgZjleVlaGu7s7X331lRhF2djY4Orqip+fH4WFhSxcuJDI\nyEixzy0rK8Pe3h57e3taW1sJCAjgscceE6o1aZLR1tYmTvgjIiLo7++ns7OT/fv3Y2pqipmZmeAP\nWltbMzk5ydTUFDk5OdTU1PDgwQNCQ0NRq2ekWgqFgsTERORyuThQjYuLE/IZaW8v/f3nzZvHnDlz\nCAwMZNu2bYSFhfH4449jbGxMU1OTkLa88847DA0NsWDBApqbm0WzMCoqinXr1uHs7IytrS0rVqxg\n2bJlFBcXk5OTg1arZf78+Wg0GgoKCujr68PCwoKuri58fHzw8vLiq6++oq2tDTc3Nzw8PISJOiUl\nhePHj2Nvb4+dnR09PT1MTEwIPJmhoSEVFRUEBwcLqQtAeno6ZmZmol7t6OjI8PAwjo6OAu7r6OhI\ncnKy2BZKbgsJkb98+XL09PSEzBZmDrNbW1vJzc2lubkZAwMDQkJCMDMzw8DAAIVCgZeXF3PnzqWn\np4ezZ8/i5OREbGwsNTU1jIyMCHxeaWkpe/fuBWZWo3FxcYJALZPJePPNNwFYuHAhgYGBODg4EBMT\nQ2hoKD09PSLa3NDQQFBQEAcPHsTd3Z3y8nLBaFQoFGRkZFBaWkpVVRXffPMNfn5+PPfccwQHBzMy\nMoKBgQFarZb09HSOHTvGkiVLSEhIYOHChezcuVNQxrOyssjIyHjo6/FXUYj6+OOP/zw9PS2WVkeP\nHiU7O5vx8XHmzJlDcXEx33zzDenp6YKUIx3qGBgYiJx4YmIiHR0dJCUlceTIERYsWEBeXh6rV6/G\nzc2Njo4OlEolkZGRyOVyjh07hlqtZsuWLUxPTxMdHY29vT0RERHMmjVL0Iru3LnD5OQkmZmZdHV1\nsW3bNgYHB7GysmJ8fJyKigoxyw4NDWX9+vVs2rSJ9PR0SkpKGBgY4Pr160JyYm9vz/j4OO3t7fj6\n+jI9Pc3Zs2fZsWMH1dXV3Lp1izVr1ggA6Z/+9CdgBu5SWlrKTz/9xOjoKBkZGQIyAjOMR5lMRmtr\nqzj9v337Njk5OXh6epKbm4uJiQn3798nJiaGoKAgdDodu3fvZvPmzSxYsAAnJydUKhVvvPEGY2Nj\nwtcQFxfHsWPHBCnJy8tLMBFcXFxITExkeHiY5cuXMz4+Lp68EmZeOty8ceMGCxYsoK6ujsLCQtra\n2kTdWVryhoeHi+br3LlzBeMxOzubwMBAli9fTkZGBrNnzxYrv+DgYDZs2ICPjw+2trZ0dHTQ3t7O\n0NAQaWlpAuZraGjIihUrsLS0ZPXq1URGRlJdXY2Hhwfj4+MoFAoMDAxE5TspKYmgoCB6e3sFC1Fq\nxWZlZbFjxw5MTExEEOqtt97ik08+ITw8XMBYJIq2q6srw8PDFBQUsHz5ckJCQoiIiODBgwccPXoU\nGxsbcnNzhZjXx8eHoKAgkY/w8PDAw8ODmpoaFAoFY2NjPP300xQWFgqgb0NDAzKZDGNjY7RarRjT\nZ2ZmUldX9z+nEKXVavH19WXRokUC6ZWQkMDy5csZGBggIyMDGxsbvLy88PX1RS6Xi1PlqKgoccgo\n0YMlyoxGoyEhIYHTp0+LFqGBgQHFxcUYGBiwZcsWhoaG+Oqrr7h06RL37t0TTzQrKyv+/Oc/ExAQ\nwMqVKwkJCRElGCnDkJqaikqlwsLCQjgcDQ0NGRwcFOo7iSo8e/ZsVCqVsFsNDg6i0WjEJET6OsDM\n1OSVV16hvb2drKws5syZwyOPPMKqVavo7u4mMDAQMzMzRkdHRRBJrVbz/fff09XVRWtrK8HBwVRV\nVTF37lw2b97M5OQk7u7uFBcXM3v2bKqqqkhLSyMlJYXp6WmUSiVHjx6lra2NtrY2nJycGBwcJCAg\nQPAp16xZg06nE0vk0dFRhoeH8fDw4MKFCwwNDVFTUwPMHJS5uLjg5eWFXC5n8eLFxMfH4+npSXNz\nM/X19bz66qvi6yB1H6Kionj77bfRarVERkYKM9acOXOIiopi5cqVvPbaa+h0OhwcHMS8/vbt24KG\n1NnZKf5dEhMTAVi6dClBQUFYW1vT0NDA4OAgp0+fpqqqSjg7cnNz+e6772hubhYIu1dffZWtW7fS\n19cnyN4mJibs3r0bgJ6eHiwtLUlJSaGiooLw8HC8vLywt7cnNjZWNEWbm5tpa2vD1NQUHx8fZDIZ\nzs7O3Lp1i0OHDpGUlISpqakYT8fHxwuZzJkzZ3BwcODGjRviMLSpqYm+vj5OnDghMHvSBM/V1ZXO\nzk58fHyYmJhgbGyMF1544aGvx1/FSuHtt9+UvTnYAAAgAElEQVT+s6OjI9evX8fOzo7s7GxWrVol\nBCwmJibihrBo0SJhQDp37hy9vb34+vqiVCrJzs7G2NhYRHh///vfi4DLnDlzhFBGqVSip6fH5OQk\n9vb2TE9PY2dnh7GxMQUFBRgZGdHb2yuMRhcuXMDFxYXo6GhOnDjB5s2bhehlbGwMf39/nJychOlI\nQqnZ2NgQHBwsuIbS6bNGoxESGq1Wy/j4OGfOnEEmk4ng0ZIlS1izZg2urq4kJCQInXlOTo44KZdw\n5dISUpKWymQy7OzsCA0NlSqzKJVKMjMzRUZCq9Wi1WoF5qyhoQFHR0dCQkKE3l6j0WBhYcHQ0BCG\nhoZYWFgwNTUlMGdGRkZ0dXVx5MgREfXt6ekhICCAjIwMEhISqKqq4urVq+LgTrIjSdr25uZmAgMD\n0dfXJz09nU2bNmFjY0NmZqZ4ukvbh6SkJC5evIi3tzcGBgb4+voyNDTE/v37aWhoEN4MmUzGuXPn\nsLe35/79+9y6dYvk5GSmp6cxNDQULcz6+npMTEwwMjIiKSkJe3t7hoeHRc27sbGRJ554gkOHDmFp\naYmbm5s4vzEwMODkyZM4ODgIm7bUzPTz8+Ps2bN0dHTw2GOPCZ/D1q1biY6OxsDAgPz8fC5fvsy8\nefNwc3NjcHCQ6OhoDh8+TGVlJTt27MDX11cwHT09PTlx4gRDQ0MsWrRI6AWdnZ2ZPXu2UMwZGhpy\n/vx5GhsbiY2NZfv27Vy6dAk7OztOnTr1UCuFX8VN4cCBA3+W3HlXr14lJCSEiooKYfd5/fXXuXPn\nDpGRkVRVVaGvr091dTUajUYkIevr60UUeWRkRFzcZmZmrFixgoCAAHEmMDY2hkaj4datW+KbSzq8\nGxoaYnR0VODMAwICCA0NxdbWlp6eHq5fv84777wjPo80LfD29qa1tVX02SWSTmRkJBMTEwwPD2Nn\nZ4dSqcTPzw9TU1PRmdfX1yclJYV3332Xu3fvsnLlSurq6rCzs2NkZISqqipu375NVlYWy5Ytw9jY\nmOTkZDQajbh5jI+PMz4+jr+/vzBmZWZm0tnZSWlpKUuXLsXR0ZFbt27h4eEhzN1ubm4i7CO5LQcG\nBtDX12f79u1MTU3xzjvvsH79eoyMjOjp6REGpMjISPz8/PD19aWhoYGMjAwxpo2NjeXEiRPY29sT\nHh7O9PQ03377LQUFBZSWlhIfHy+WuZKGPSUlhcDAQJycnAgLC0NfX5/U1FTWr1+PXC6npqaGDRs2\noFAoxMW+YcMGsrOziYiIQKlUEhQUxNDQkLhAh4eHhRf0+vXrdHR0iAi6RJyW7M4Snj4tLY0VK1bg\n6uoqWp1qtZo5c+Zga2tLdHQ0bW1tnD17ltdee00ATUJDQ2lsbGTPnj2YmppSW1srNHYvvfQSQ0ND\nvPHGG9TX1zN37lwBEwoJCeHUqVOiI5Kfny9uaP7+/qxYsYJnnnmGRYsWodPpWLduHXp6eqKmLlnZ\nHR0dqa2txdfXl87OTuRyOTKZjNraWr755hva2tr+52wfurq6qK6uJiAgAGdnZ5RKJRs3bqShoYGe\nnh58fHywtLRk6dKlhIaG0tvbS0FBARYWFkxOTqLT6SguLqavrw9AHNZIBKG2tjaOHTvGxo0bBdlo\ncnKSZcuWsWTJEjQaDSdPnqS+vp64uDgBSNmzZw+enp4MDAxQW1srDq16e3sFjUeKAY+Pj+Pr60tc\nXByXLl1icHAQS0tL7t27R1VVlWBASmUeNzc3dDodixYtEk9EJycn8USXMHTS8tbCwoLFixeLp/ad\nO3eYN28eXl5eDA8P09zcTFxcHL29vWJkKGnq/vCHP6BQKDh37hzLli0jNjYWFxcXPD09hSW5ublZ\nTGoUCgVPPPGEcFN4enqSmprK4OAgFhYWmJqaotPp6Ozs5IsvvhBxZj8/P7Zs2UJ9fT1ffPEF/f39\nDAwMoNVqUSqVTExMiKCSXC4XTMXOzk5x2Nrb2ytuLuPj42zatIn6+nquXr0qFHQXLlxAqVQSHBxM\nQUEB7u7u+Pj4CLdHdHQ0FhYW/Pzzz2I7Y2VlxVtvvUV/fz8jIyNkZ2djZWWFk5MTpqamREZGkpeX\nx8GDB9HT06O6upqWlha6u7uRy+U888wzODs7ExYWxo0bNwRoNj09XTgxIyIihJVJyllERkYSFRUl\nqOBSulOKKEsrYUlss2rVKgCWLVvGrl27sLW1JSwsTGwZJiYm+Pbbb0UgbnBwkPXr14uvtbm5uUiW\nSjzSiYkJcU08zOtXcVPw8PDAxMSEgwcPEhERQVpamiDUSpHWBQsWMDAwILID5ubmgo+g0+mEGUoa\nx8HM6DA0NJSpqSmCg4PRaDTMnTsXBwcHEhMTWblypYgkJyYmivZhcnIy27dvFyEhW1tbHnnkESGD\nsba2Znp6Gh8fH+HAhJn9pbOzs7D+KBQK9PX18fPzo7m5GXt7ex48eMDY2BhDQ0N0d3eTmppKbW0t\nMENzlnIJ69atw9PTk5CQEFavXs327dvFVOLbb79laGiIy5cvk5qaSmFhIVqtlps3b6JSqVi1ahXV\n1dXCu3D//n0qKytZsWIFo6Oj6HQ6QkNDxR57dHSUiYkJCgoKqK6uFqfb0spozpw5eHl5iRN9PT09\nVq9eLdyf0dHRKJVKdu7cye3btzl48CAPHjzg2WefJTAwUMTQpYsmIiIChUJBcHAw1dXVGBsbC7lK\nSEgIvr6+jI6OMjY2Rm9vL319fezcuRNbW1s+//xzbG1tGRgYwMHBgStXrpCamoqpqakwaqnVao4c\nOcLk5CRPPvkkMDMx+uc//0lISAh6enp4eXkxMjJCXFwcarUaJycnNBqNKJNJZGjpLMbLy0t8zI4d\nOwRwZ/v27QwMDDAyMsLExARHjx5l3759zJkzR5C+wsLCKC0t5W9/+xuOjo7Y2tqSnp7ORx99hJ+f\nH2ZmZnR3dzMxMSHgP/39/WRnZ9PQ0MDRo0fF9ZCamkpbW5vYxqxatUpshRsaGhgaGiIqKorMzExx\nPqbVavHx8Xno6/FXEV5qb2/HzMwMV1dXDA0NOXz4MMXFxaKWamVlRVhYGEZGRlhbW9Pb24urqytn\nz54Vrb5PP/0UAwMDXn31VbG8r62tFV2K48eP09LSgoODAwEBAZSWlmJnZ4eTkxO5ubmEhYXR29vL\n3bt3uXfvHhqNBldXV9asWUNOTg4PHjwQshDpianRaJDL5SQkJODi4kJXVxeXL1/G0NBQkIycnJy4\nf/8+crmc2tpaTExM0Gq1BAcHMz09TV1dHcPDw8BMwEYKz9TV1VFTU8OlS5eYN28eg4OD+Pj4YGZm\nxssvv0x/fz+ff/65UOlFR0czOTlJd3e32Bo4OztTWloqeg5lZWWsWLECW1tbSktL/5PXQaJkHzp0\niNTUVEJDQ2ltbWVkZETQh2bPnk1zczPV1dW0tbVhZWUl+g9RUVEEBAQAMxeKdC6wevVq6urqKCoq\noq+vDxsbG4aGhmhtbRX0qKKiIjG7HxsbE2dIUghJGstKc3wpVi6NL1etWsXs2bP58ccf+eyzz9iz\nZw9JSUksWbJEfG2lFRbM5GImJiYwNzenuLiYyclJxsbGsLW1xcfHhytXruDp6SlG3fr6+hQWFiKT\nyaisrKS3t1cE2W7duoWdnZ3AvwUGBuLv74+5uTl+fn6UlpaSmZkpiFs//PAD5ubmrF69mqysLFpa\nWnjnnXfE/4eUxjUzMxN4eDc3N+7cuYNGoyEuLo7PPvsMS0tLJiYmOHDgAA0NDdjY2DBr1iwCAgLI\ny8sTq+6cnBxxCP+wr1/FSkGSjNrY2PDZZ59x48YN9PT0hP6so6OD2NhYzp49i4eHh9hXP/nkkyJw\nItVfa2trRfpLOo3t6Ohg8eLFQuoxPj4uCjxSK06hUGBqaiq6FZGRkdjb25OTk0NFRYWYbwMiuuvk\n5ISZmRkNDQ3cuHGDkpISpqamBB7Mzc0NPT09QegdHR3F1NRUzM8bGhqEgg1mSMb6+vqCPuTo6Mj2\n7dspKCigs7OTjz76iLy8PN5991127dqFp6cnhoaGREREEBwczNDQEM7OzjQ1NQkEmFQdbm5uZs6c\nObS3t2NiYoK5uTmWlpZ4e3tjZWUlLn5ra2tkMhk///wzP/zwA2VlZYSGhuLj40N+fj5VVVVERkay\nY8cOioqKyM7OJjY2VkhiZTIZmzZtYmRkhIGBAaqqqoSCPTg4mF27dtHa2iqeoOPj44I3ADPtw8HB\nQc6dO8fu3btxdXUlMDBQBJmUSiWDg4OiNLd69WqxfAZEgCsoKIiuri4uXrwIzKzibG1tBbo+PDxc\nuCZv3bqFu7s7c+bMYfPmzSiVSkJDQ7G2tsbExISioiIqKioYGRmhvb0dd3d3AZqVCNxS6/abb74R\nKdYrV66IQ9j+/n4iIyNF7VqhUPDYY4/h7OzMb37zG7Zu3crAwACvvfYaMLN6npqaoqOjQ6jqJB/q\nwMAACoVCuCSkMXF/fz9vv/02g4ODonDn5+dHamqq+N59mNfDqOhdZTJZukwmK5PJZA9kMtlLv7zf\nWiaTXZPJZNW//Gj1Lx/zukwmq5HJZJUymSzp3/0Z+vr6DA8P4+7uzquvvkplZSXl5eUEBQVhbm7O\nggULKCwsFDRhOzs7XnnlFSwtLUXJaWJigv7+fiFegZnYsJROS01NRS6XU1JSQltbG4cOHSI3N5fy\n8nLs7e3FQeWGDRsEhHV4eJioqCjWrl0rHBDSP5ixsTF9fX1COKNQKESrLjIyEgsLC+rr6wUwdv78\n+URGRqJQKKiqqhJS2YqKCpRKJQC3b99GqVRSVlbGrFmzCA0NJT4+nrffflu4HwEaGxvZunUr4eHh\nvPjiiyxatAitVsuWLVvIycnB2dmZwMBAnn76aWQyGfr6+vj7+4sDse7ubsLDwxkbG8PS0pLq6mqO\nHTvGzZs3GR4e5sCBAzx48IDu7m4KCwsxNzfn5MmTwsfY0tJCc3OzqJL39fUhl8uZnJwU+YOLFy+K\nktqiRYvYt28fERERLFiwgMcff5yRkREePHgg9sySMxJmRsmrVq3C29ubxYsXo1KpyM7OpqysDJVK\nxbJly8T+Picnh8HBQUZGRqioqMDa2pr58+eTm5srDm1hZsunUql45plnaG5u5siRI9jZ2eHi4sKG\nDRtEmU4mk+Hu7i7yGlZWVgQEBLBq1Sqh87t06RKTk5PADJMyMjKSrq4ucYYza9YsVq1aJfTwHh4e\n9PX1cePGDfGAcHNzQ6vV4u7uLqZRd+7cERH9rKwsUWCzs7PDwcEBCwsLYmJiMDU1Ze7cubzzzjtY\nWlrS2dkprNNyuZy6ujo6OjoE50G68T/s62G2D5PAyzqdLl8mk5kDeTKZ7BqwE7iu0+nelclke4G9\nwB9lMlkQsBkIBpyBNJlM5qfT6f5LyoO0nCwuLmbv3r10dHSwdu1a0RWwsLAQToFdu3YRERFBVVWV\nsAj39vYyd+5cVCoVVVVVbN++nW+++QYLCwvUarW4uZSWlvL6669TVVVFRkYG//Ef/8GsWbPw8/PD\nw8MDpVLJ2NiYKO1I0tG+vj4iIyPFRTAyMsK1a9fo7e1lz549dHR0YGtry9q1a9HT00OtVpOdnU1e\nXh7+/v64u7tjYmIi2oHffvstzs7O4oYxMjICgKWlJaOjoyxcuFDQqKenpxkbGxOQ2A0bNhAZGcnd\nu3cJCAigoKAApVJJZWUlmZmZYnJTXV1NQUGBCMBMTk4yMjJCcHAwJSUl1NXVsX79ejEW7OzsZNWq\nVQQEBNDT00Nrays1NTU8++yz9Pb2MmvWLLq7uwVwVKJSOTs7U1ZWho2NDSdPnqS2thZvb2+Sk5PF\nE6usrIyjR4+iVCrZtm0bbW1thIeH8/nnn7N+/Xp+/vlnEUdOSEggIyODrKwsVCqVyAAsWLAAV1dX\nbGxsKCsrE6GeoqIi3nvvPRwcHNi5cyeGhoaUl5cLPfulS5cA2LRpE6dPnxbj46mpKd577z127tzJ\n+Pg4AwMDhIeH09PTQ1RUFPfu3cPFxQW5XM6tW7dEp2D58uViSwawbt06wc6MiIggKSmJU6dOkZSU\nRFhYGAEBAajVaiYnJ2lsbKSvr0+o/7Kzs5k3bx7vvfeeaOhKWRW5XI6pqSl9fX0iWRsTE8Pw8DDn\nzp2jpqaGmpoasaKQzoXmzZtHU1MT/f39KJVK2tra6OjoYMmSJXz99dcPdVP4tysFnU7XptPp8n/5\n+RBQDqiAtcDRX/6zo8C6X36+Fvhep9ON63S6eqAGiPnv/gxDQ0OBxpaEKampqZw5c4YjR46wZs0a\n7O3t+fDDD0lKSqK8vBx3d3cmJyeJiYkhKipKzKVjY2NFLl3qmkuilri4OPbu3cv58+dxcHBg7969\nREdH09zczE8//cScOXMYGhrC29ubhoYGgSULDw8X1CdAnO6vWLGC77//ngsXLgjmQGNjI3l5eXR3\nd+Pj4yMwbDY2NlRVVVFfX09TUxMuLi6irSl5H1xcXHBycmJkZITKykpqa2u5ePEiarWaY8eOcfny\nZR48eEBvby+zZ88We1tTU1PWrVtHbGysoO8kJiYSFBSEh4cHn3/+OVVVVcjlcg4ePMjdu3exsrLi\n7NmzZGZmCjuRNC+Pj48Xlmcp1mxtbc34+LiwNCUlJdHc3MzAwAB2dnYcPXpUdBCysrIYHR0lODhY\nxKalGLWVlRVlZWViRm9kZMSyZctEzLmgoECc1YyOjmJiYkJzczO5ubl8+eWXQsXm4OCAQqEQYN+N\nGzdy+PBhqqqqWLhwoYDzzp8/H4DCwkImJiaIiooSq4k//OEPmJubMz09jbe3twDG3L9/n6SkJEZH\nR7l69SpVVVXk5eWhUChobW1lcHBQcA9UKhU+Pj4sW7aMDz74gHnz5okb7dmzZ8WDqbu7m4KCAhoa\nGsjJyeHKlSvY2dlx4MABHnvsMbKzs/H19eXgwYPAzIGriYmJMJGrVCpkMhk6nY7R0VHq6uoICAgQ\nmr2wsDDkcjktLS0sXryY0dFRpqen6e3tZfXq1WL1/DCv/09nCjKZzAOIAO4BDjqdru2X32oHHH75\nuQpQ/8uHNf/yvv/3cz0jk8lyZTJZrk6n4+OPP8bW1lYUPEpLSzExMeGFF17gn//8JyMjIzg7O2Np\nacm2bdtwc3Pj/PnztLW1kZ+fT319PXfv3qWrq4vR0VFgRlx75MgRent7GR4extPTk+DgYObPn4+X\nlxfl5eX4+fkRHh7OunXr+PTTT2loaKC1tRVHR0dREJLm8hLFR6r7jo+P09DQQHR0tNgiSJVefX19\nnJ2dCQkJYWBggKNHj2JmZsaDBw+Ij49ndHSU7u5uYmJihLDEysqKc+fOiWBRYWEhk5OT/Pzzz0xO\nTtLa2sqlS5cEhlyCkFRXV5OamopWq8XQ0JCenh6xXcrPzxeQEElsamJiglKpFKlLExMTQkNDBdWp\nrKyMnp4eYmJi6O7uJi4ujsWLFwsJzr1790hNTUWn02FhYUFdXR07duxAo9FQUlLCn/70J2xsbLh5\n8yZjY2OMjIzg7u4upiBSItXGxkZAYNzd3QHEk3p6epqmpiaam5tZvnw5165dE4GdlJQU8vPzxc01\nNjb2P6HevvjiC2xtbYWFCaC1tVXcILy8vCgsLKSyspLc3Fyys7OxtrbGx8dHTImam5vp6Ohg7ty5\nODs7Y2RkhEqlEoeU0ufNysrirbfeIiMjQ2xDn3jiCeEAlcJc7u7udHd3c+zYMWpqakhNTaW7u5vc\n3FzOnTvH6OgoNTU1Yjx98uRJ9u/fz+nTpxkYGKCpqUmMQq9evcqDBw/EYbTk4mxtbaW1tZWMjAwh\nntHpdJw4cUJ83od6Scqxf/cGmAF5wIZfft3///x+3y8/fgZs/5f3fw08+m8+t+5/3/737X/f/n9/\ny32Ya/2hRpIymcwQOA18p9Ppfv7l3R0ymcxJp9O1yWQyJ6Dzl/e3AK7/8uEuv7zvv3x5eHjwwgsv\nUFhYKDgFSqUSjUZDcXGx6Oy7u7tTVFSEra2tGBWNjIwQFhaGk5MTZ8+eRaVSodPpeO2119i7dy+1\ntbX4+/tjZmZGV1cXnZ2drFmzhs2bN/PJJ5/w3XffERcXJxKLlpaWhIWFkZGRIfwHMFM2GhgYYOfO\nnZw+fRpbW1va2tq4du0aGzZs4OOPP+bQoUOiAZmXl8eyZcuIiopCp9Nx//59kdB79NFHBffh6tWr\nBAQEsHnzZj744ANxqm5vb8/k5KR4imk0GkxNTTE1NRWacycnJ0ZHR4W0tbGxEaVSKcJOtbW17Ny5\nk5SUFLGiSEpKEmGn+vp6EhMT8fDw4K9//SsbNmwgMzOT4OBgysrKSE5OZmJigsbGRqqqqkhMTOTn\nn38WKr2LFy+SnJyMm5sbpaWlzJ8/X1Scm5qaREBIX18fS0tLnn32WZqamnjppZdYvXo1paWl1NTU\n8MgjjzAxMcG+fft49tlnMTY2ZtGiRaSnp5OUlMTU1BRnz55l9uzZODo6kp2djVarxcbGhnPnzrFw\n4UJWrlzJ119/LRqjhw4dwsHBgYaGBj7++GPRbZCW3Lm5uURHR4tw3Ouvv87WrVtxcnIiOjqalJQU\namtrWbp0KW1tbcyePVvUyG/cuEFcXBxPPfUUfX19NDQ08P333+Po6MiDBw8EwGfXrl1C3vLGG28I\nIKyxsTH37t2jq6uLvr4+QSP39/enoaGBV199lUOHDpGeni46H6WlpZibm9PS0iIs2Y2NjcTHx6PR\naDhz5gwvv/wy5eXlqFQqWlpahN1a4jWuWLHiYS73h5o+yJh52pfrdLqP/uW3UoDf/PLz3wDn/uX9\nm2UymbFMJvMEfIH/dkg6NjaGUqnEyspK+BLLy8sFxNXZ2Zny8nLa29t5+umnRdjExcUFf39/+vr6\nKCgowNfXFxcXFzElGB8fJyoqCjs7O3Fg6OjoyNmzZ9m3bx/ff/89zs7O6Ovr09bWxsjICNPT0+Lv\n8d5777FmzRrKy8v5/e9/T1paGgBBQUHk5uZSU1ODv78/GRkZeHh40N3dTUNDA97e3vzmN79BX1+f\nlpYWIX1ZuHAhe/bs4aeffuKnn34S39jl5eXAzOGSFHZSq9VCgBscHExCQgJxcXECMLtw4ULUajVK\npZKkpCSKi4uxt7fn9u3baDQa+vr6WLRoEePj44IiJDke1Wo1ZmZmJCUlcenSJYqKikhISKCpqYkl\nS5YwNTWFhYWFSM7pdDqhj4+MjBQA04CAAJqbm/nxxx9Rq9V88MEHlJWVkZOTQ15enkDISd/47733\nHrdu3WLbtm3MnTuXgIAA9u/fj7m5ueBiJCQkCDV7QEAAN27c4OLFiyxduhSVSkVaWhqzZs3i+eef\nZ+/evezbt4/IyEg+/fRTFAoFgYGBfPjhh8jlcvz8/Fi6dCkwk5QMDQ2lrKyMzMxMDA0NSUtLw8TE\nBLVazerVq+nr6xMNUTMzM2xsbLh48SKTk5McOXKE7u5uZDIZa9asEVSsp59+mra2NmJjY2loaCAk\nJISamhqmp6eZmpqis7OT5cuXk5KSglar5bvvvhMBqfHxceLj41m9ejWGhoZkZGQgl8uBGUbjK6+8\ngre3N0VFRaINqqenx6JFi/Dz8xM+Tblczl/+8heys7OxsLBgeHhY5FBKSkpEivNhXw9zphAPPA4s\nlslkhb+8rQTeBZbKZLJqYMkvv0an0z0ATgFlwBXguf9u8vDLx6DRaNDX18fDw4MrV65gbm7OxMQE\nRkZGPHjwgC1btpCVlcWJEyf4+9//zuuvv84HH3yAjY0NMTExdHV1iTqyZIdOSEgAoKSkBIVCgZ6e\nHhqNBjs7O1JSUnB1dWVoaIhLly5x7tw5goKCkMlktLe3o9Fo2LdvH3fv3mXWrFmYmpoK0ak0Wzcw\nMMDGxgYDAwOWL18uDr5KS0vx9fVl27ZtuLu7c+vWLZydnTl37hxGRkZs376dDz/8EAMDA8zMzERL\ncmpqSvQOJNHu9PQ0eXl5gjwshWYGBwcZHR2lsbGRlpYWEWP19vbGyMhIiEqlhp0Ud21tbRXWq8LC\nQry8vKirqxNjy/7+foKDg1mwYAHvv/++CEZpNBqcnJxwdXXF29ubv/3tbxgaGoo5e1FREQqFApVK\nxfj4OCEhIZw/f174IHt6ekhMTMTZ2ZmgoCBu376Nj48PjY2NhISEiESjubk5QUFBqNVqAgMD+f3v\nf8+WLVvo6uqiu7ub5ORkrK2tOXnyJH//+9+5c+cOlZWVbNy4kU8++URo5ywsLLC0tBQcASmFGhMT\nQ3BwMC4uLkRFRZGbm8vg4CCrV68mPj6e8vJytmzZQmBgIKtWrWLZsmXY2tqSkJCAu7u7uFlIU4KY\nmBiuXLnC6OgomzZtws/Pj/Xr19PT08MXX3yBi4uLQKxJN+7jx4+TlpZGX18fWq2WhoYGent7iY2N\nFaPD5uZmvv32W/r6+lAoFLS0tJCfn09dXR1ffvnlfwpNLViwAH19ffT19XnzzTfR09Njzpw5LF68\nmJ07d4opxMO+/u32QafT3QZk/8VvJ/4XH/MO8M7D/iUkwIUE32hvbyc9PZ379+8THx/P1NQU2dnZ\nxMTE8NprrxESEoJWq+WDDz4QFmkJoCLdaWEmlx4VFSWCOT4+PtTV1TE2NiZckba2tsTGxnLnzh26\nu7uxtLTkzJkzVFdX09fXR1JSEl5eXrz//vs899xzAFy4cIHQ0FB0Oh1VVVU899xzYuszNjbGkiVL\n+OGHH9i9ezd1dXVcuHCBgYEBmpubcXBwoL+/X2QL9PX1RcCopaUFLy8vBgcHBXtS2ioYGBjg5+fH\nokWLRCV2wYIFHD9+HGdnZwwMDJicnGTp0qUUFRWJMMujjz5KUVERarWa6elpduzYwalTp4SOfmJi\ngrq6OoGEk+K+Op2O/Px8IiMjqampEdo6CSl+5swZPvjgA958801OnjzJ22+/zf/5P/+H48eP85e/\n/IW+vj7Wr1/Pl19+ibu7O2vXruX27QvE3jAAACAASURBVNtiElFaWsr09DSbNm2ioKCArVu38vPP\nP5ORkcHU1BTGxsacPXuW5cuXc+7cOZE+lS5GKSyUnJxMYWEhLS0t3Lx5k9DQUJ566ik0Gg3Z2dki\nyScRsbZs2YKDgwN6enqiuTgwMCAAu05OTiQkJKCvry8SnfHx8dy8eRNPT0/a2tr4j//4D5ydnQHE\nk7mpqYkrV66gr6/PwoUL2bVrFx0dHSQmJqJWq8nIyKC3t5eNGzcyPT3NlStX8PDwENyH4eFh2tra\nBDFcaqT29PSI7EteXh5ZWVkiOj4+Pk55eTnj4+PodDpSU1NZsmQJX3zxBeHh4axfv164LP71uvh3\nr19FzHliYoL6+nomJiaIiIgQqnGJ62dqasq1a9eQyWRs27aNhQsXij1YfX296CdIHIDq6moA0Vd/\n7LHHKCoqYuXKlVy/fl0kCD08PPD19aWkpAQLCwv8/f0JDw9HoVCwfv16qqqqGB0d5R//+AdPPfUU\n8fHxNDU14ejoSGdnpxizffXVVyQnJzM0NMT9+/dxd3dHp9OJcZaTkxNbtmzh3Xffpampiblz5zI9\nPU1PTw8hISFiXKRQKAQANi0tTewh161bh1qtFjXmhoYGpqamMDIy4uWXX0atVmNkZISnpyempqaY\nmJjg4ODA/PnzkcvlAtMllZliY2O5cOGC8GU++uijeHh4oFaraWpqEhfuggULxJREX1+fiooKUZA6\nePAgxcXF7Nq1iw0bNnDmzBkOHDhAbm4ue/bs4Y033mB4eJiIiAgmJia4ffs2u3bt4tSpUwQGBgr/\npoODg/g7w0wE2cnJCVtbW06dOiUI0r29vVy4cIFvv/2Wr7/+GisrK4KDgwkMDOTmzZvU1tZSXV3N\nhQsXhCvE0dFRrO60Wi3r168XN5DHHnuM2NhYtFotOTk5mJmZMTk5KQCyUh3dz8+P4uJivL29+eij\nj8QZ1vXr14GZ1Z2Erzc2NkalUqFWqwkNDWVsbAy1Ws3IyAjLly/nzJkzwp/p6OgoTN8WFhZEREQI\nxiTMjDq1Wi0WFhbCmt3f309oaCiPP/44dXV1zJo1i5s3b6LVagWVOycnhxdffJH29nays7MZGhoi\nMDBQbHce5vWriDkbGxtjaWmJjY0NbW1tYrzk6OgoyjZ2dnbI5XKef/55wsLC8PHx4ZlnnuGJJ56g\npqYGlUpFfHw8Xl5eIkc/OjrKo48+yuDgIIODgxQUFJCamkpHRweGhoZ4e3tTWVnJ0aNHuX37Nj/9\n9BNvvvmmmMl7enoK/4MEJ4GZKK7Ujty3b5/Qvnd3d9Pa2kpqaioeHh4iuBQbG0tqaio7duxgz549\nIkdhbGxMc3OzwIUbGRkxMDBAYWEh69atw9bWlsDAQK5duwbMjEJLSkp4++23+eSTT3B0dMTY2JjI\nyEiuX79OZWUl165dIzIykvv37zM8PMypU6dwd3dn0aJFdHZ2cvr0aXp7ezEzM8Pb2xtTU1OGhobE\nUtXGxoZnnnlGHLZ+9dVXDA4OsmjRIhGSunPnDomJiRw4cIDdu3czPj5OR0cH77//PhUVFfT39/P1\n11+TmprKunXr6O7uxsnJibq6Onp7e8XKQalU8s9//pPW1lacnJyAGfuWra0tN27cYO7cuRgaGtLU\n1ISbmxsrVqzgu+++w8PDAysrK/H0Hh8fp7KyEnt7e2pra/Hy8uKpp55CrVaLbdX09DSZmZlC+pqa\nmkpRURG1tbWMj4/T399PdXU17733Hvfv3xcG6tTUVJqamrh9+7Zo10orPel7oba2ltHRUaEgaGlp\n4fTp0xw/flwQyU+dOkVaWhrLli1j7969YrtZXl5OSkoKhw8fFmNmQPgrpCh3Tk4Ow8PD7N69G2tr\nawwNDUWOQWoJwwza8Le//a1A1o+NjREREUFPT89DX4+/ipvC4OAgjY2NqFQqIiMjRcLM2dlZhH0K\nCwtFhLOnp4f+/n5MTU2xtLQUVJy33nqLn376CTs7O2DGs2hpacl3332Hj48Phw8fpr6+nh9++IHh\n4WHa29sZHBxk/vz5JCYmYmhoSGBgIEeOHGFgYICGhgasrKwwNjYmPz9fLMGKioo4cOAA0dHRPPLI\nIyK5t3XrVlH97erqEpkH6YnR3d2Nubk5ISEhlJWViVCMlJScPXs2BgYG6HQ6CgoKmJiYEKuD1NRU\nWlpaBKRlcHAQnU4nth9+fn5MTEyIp8eWLVuorq4W30A2NjY4OjpiYGCAgYGBwKgtXryYsrIywUw8\nePAgarWa2tpaoWC3s7OjuLgYExMTNBoN8+fPZ+7cuSQmJoql665du4RmbtOmTZibmxMVFcX333/P\nnTt30NfXJz8/n+PHjzMxMUF6ejrDw8OCpvyvB41tbW14enqKyUJWVha+vr4MDg4yPDxMUlISAwMD\nhISEIJPJqK6upre3l8LCQnJycvjDH/5ASUkJBQUFgugkdQVGR0dpbm7Gz89P6PH09fVpbGzkzJkz\nlJaWCniJhYUFFRUV3LlzBysrK6Kjo/Hw8CA4OFhAfN3c3JiYmKCtrQ1/f3/BdWhpacHX15f09HR0\nOh15eXmMj49z+PBhHn30UUpLSwkKCsLT05OioiIKCwsxMDAQ1DC1Ws3Q0JCwqff397N48WIBaPXw\n8MDb25u+vj5qa2vR19cX5LFVq1bh4+ODubk5tra2pKWlkZT0b9sG4vWrgKx89dVXf37xxRdFQywl\nJYWJiQlcXFwYGBggLS2N7u5upqamiI2NxdHRkampKSorK8UT7PnnnxcyEAmgIglgjI2NUSqVyOVy\n1q9fL3iMOp1ONN/8/PzE+GtychK5XM69e/coLS0lLi5ORIWvX79OYmIiTz75JEeOHMHf3x+ZTMaN\nGzcEK0+i8Zibm4tpQnR0NFZWVpiamgr9uFTCcXJyYv/+/VhZWYnU3Y8//kh8fDzt7e1iPOni4sLE\nxITYDkxNTeHg4MDVq1dxc3OjtbVVNDP19fWRy+UCy5Wenk53dzfT09OoVCpyc3MZGBigpaVFEJSC\ngoKYO3cudXV1ooocEhLC8PAwMplM3Bg7OzuZmJigp6eH+fPni8ZqTEwMa9euxcHBgaGhIeGctLa2\nZtmyZYJTKR2wnTlzBm9vb8rLy/Hw8ODYsWPs2LFDsBampqZISUnB19eX4uJilixZIt7f0NCAv7+/\nuHmUlJTQ0dFBREQEc+fOJSwsTCynJTtTfX29EK5WVFSg0Wj47rvvhFxGq9Xi5+eHlZWVEBRJjczW\n1lYiIyPJz88X/MnCwkKioqKYP38+N2/eZGhoCDs7O8bHx/Hw8KC1tZX6+noUCgVdXV289NJL4sBW\nkhFLEuKCggJcXV2ZM2cO586dIzk5mZ6eHlGJHhkZobGxUQTEpFDc6OgoCxYswNPTk8HBQWAGPSeX\nywVcxt/fn5s3b3Ljxo3/OeSl/fv3/1ny36WlpYkcwtjYmIjhmpmZoaenx9TUFKmpqWRnZ5OYmCjk\nmzY2NvzmN79h4cKFIvW1fft28vPzefzxx0Uv3tnZGR8fH6ytrWlqauLll1+moqICKysrqquruXbt\nGp6enjQ1NQm6kOQbGB4eJicnhw8++ICKigoSExNpbGzkwoULWFpaYm1tjaOjI56enmzcuBFzc3P6\n+/uxsrKivr6etLQ0srKyhNTmyy+/FMvFEydOEBsbKzwML774Is3NzUJe4+3tjZ2dnaBMS4UsrVYr\nBCNOTk5ERESQn59PV1eXEJYYGhqir69Pb28vBgYG1NfXs23bNoyMjDAwMKCkpAR/f38SEhKEKk7q\nYQQGBor9vlSXtra2Ji8vDw8PD86ePcvFixeZO3euiCB3dnYSEhLCrFmzCAoKIi4uTmRAoqKiKCgo\nYGpqShCZXnnlFVGEi4+Px9XVFaVSSXV1NX/5y18wMzMjMzNTjJg/+ugjET+/efMmU1NTbNu2jTff\nfJOSkhJqamq4d+8e9fX1fPLJJ7z//vvCSt7X18fly5fx9fUV8uDW1lYiIiJITk5myZIlAv0u5StU\nKhXh4eEin3H//n0MDAwoKCgQ4lxPT0/s7Ozo6+sjOTkZf39/XF1d2b59u3B0GBsbs23bNkH6VigU\nmJub09HRwfz58zEwMGBsbIyMjAyWLFnCpUuXxHSorKwMOzs7YmNjWbx4MSYmJgKQ29jYiEKhYMeO\nHfT19eHt7c309DROTk7izMrExOShcWy/iu2DZCoqLi7G0dERd3d3vL29CQsLY8+ePUxMTGBnZ4er\nqytVVVU0NjZSUVGBgYEBrq6uqFQqNm/eTGFhIePj40LU0d/fT0JCAvX19eTk5AgtvVar5eLFi/zw\nww/ADMdg3rx5aLVaXnrpJVFU8vX1xdvbW7gKJDt0e3s7TU1NfPTRR6SkpIg4bXd3tzj7uHv3Ljk5\nOWzYsAEjIyPy8vLIzc2lo6MDJycn8vLy2LVrF11dXdy8eROY2T4UFRUxMjJCeXk5sbGx6HQ6BgcH\nmZycFP16acQnFZmsrKyoq6vDw8ODtrY2FAoFCxcuxN3dHQsLC2F/dnV1FUGntrY2cVPw9/cXVqnq\n6mr279/P5cuXOXXqFGq1Go1Gw/3794mKihIroOeff57Tp0/j5ubGoUOHOH/+PL/73e/Iy8sjNTUV\npVJJWFgY7e3t9PX1ceDAAV566SWqq6vx8vIiMzOTkZER+vr6OHnyJGFhYcDMntjQ0BBTU1MsLCzw\n8vJi1qxZYiJRVVWFlZUVKpWKb775hqCgIBISElizZs1/UvvFxMQQHx8vzmMklHtVVRW2trZkZWVh\naGgoJi8XL16ktLSU3t5enJyc8PX1pa2tDblczpkzZ/jrX/9KS0sLly5dEoQrAH9/f2Bm3CjJgY4d\nO8a+ffuoqqoiPz+fsrIyxsfHSU5ORk9Pj/r6enp6ekTcffv27Wi1WqEKgJkt9euvv05oaChOTk4s\nWbKEsLAwDh48yMcff8zhw4e5evUqra2tuLi4CIPYrFmz0NfXx9bWluTkZEZGRsjMzBQj34d5/Spu\nCkqlkpCQENRqtQgYpaenY2BgIPBp0pw6Pz+f0dFRcaAojaIaGhqYN2+eaEUCpKWlYW9vz6lTp5ia\nmhIw0qGhIYFj27RpE4GBgaI3IfH9165dS2VlpWALSoQmmNnvyeVyZs+ezTPPPMPvfvc75syZw2uv\nvYajo6PgNDzxxBOMjY2JMVNRUZE4UBsaGhLFI2trawAx31epVPj6+opwi56eHk1NTUxNTfHdd9+h\nVqtxcHDA3t4eAwMDrKyscHV1xcTEhODgYFQqFV9++SVpaWnixF1C2yclJTEyMiLEu05OTgKrbmpq\nKizV33//PQqFAg8PD9GSdHFxYc6cOQJaOzIygqmpKSkpKaxYsYJPP/0UT09PFAoFxsbGFBUViW2I\nl5cXK1eu5Nq1a1RWVjI0NER/fz9VVVVMTk4Ki3VZWRkpKSnk5OSwc+dOOjs7yc3NRSaTIZfLmTdv\nHi+//DJGRkYiAOTn50dNTQ1GRkZYWFjg6+srcH6SQzEpKQk/Pz9BSZbs1H/9619FLV0ySPn4+AjW\nw4EDBzh69CgKhYKOjg4mJiawtLTkj3/8IzBzI5fGvRkZGeh0OrENiYyMpLm5mRMnTogtxPnz5/nt\nb3/L4OAgarVauEe3b99OVlaWEMEGBgYyMjLCjRs3BG2soaFBaPSktKiUPzh79ix+fn5C8jMwMCCE\nx4sWLRJ+1Yd5/Sq2Dx9++OGfJexaaGgosbGxmJiY0NvbS0lJCe7u7qhUKmxsbEhKSiIxMZFZs2Yx\nMTFBbm4uKpUKV1dX7t27Jw6S7ty5w9tvv01ZWRkjIyNs3LiRW7du0dLSQlVVFUZGRtTV1Qkk1/Dw\nMF5eXgQGBmJoaEhlZSWGhobCXizBO+/evYunpyfLly8nNzeXmJgY7O3t2bRpk7iop6en0dPTE6w/\nPT09FAoFL7zwgpCs+Pr60tHRIaYaJ06cwM3NDY1GQ1dXFytWrCAiIoL29nZqamoICAhAo9Fw+fJl\nYZ92cXHB0dGRhQsXUlVVxfj4uAg0SSlOCwsL7OzsUCgUAh0nSWmbmpro6OigpqZG9PsLCgoER1Ai\nBEuyFolC3NjYSG1trRC9+Pj4UFJSIlqNY2NjdHZ2Ym5uTldXFz09PSxdupTk5GS2bt3KyZMnefzx\nxwkJCeHq1atYWVlRUlJCZmYmTz/9NM7Ozly7do3S0lLCw8OxtrYWTVfJlJSSksKVK1eYP38+JSUl\nFBUVCYOz5JTMy8tjaGiIsrIyYmJiaGlpoby8nIGBAcH9lNiGnZ2d4vBZoVCg0+no6enhueeeY/bs\n2Xh6egoBz3vvvYednR3nz58nKiqKnp4ejI2NGRkZISsri4CAAHx9fXFycuLy5cs4ODggl8t58skn\nRahKp9OJr5+DgwPHjx+nuLgY+f9l772jqjzT/e/PBjaw6Zu6gU3vXToIIhoVsQdNbClqipOZSSaT\nMmfipJ0za2Yyv8wkKz0mmphMTLMXQBQsCKiAIL2DlE3fbHqRst8/yHO/Z971rjmetc5av7zv+t1r\nuVSQ4uZ57ue+ruv7/XzNzLhy5QqbN2/m9u3bWFpakpycjLGxMXK5HJ1Oh6enp0jB9vb2xsfHh+np\naVauXMnOnTsByMrKoqioiK+//pqhoSHWrl3LN9988/+d8kGqFePj4zEyMhI3l0QolpRe5ubmyOVy\nKioqKC4uZnp6msTERGJjY4mJiUEmkxEeHo6R0aL8orOzk9nZWZKTk3nvvfcYGRmhsrKSzZs3s27d\nOkxMTCguLqayspKmpiZ6e3u5evUq586d4+zZs5SWlmJoaCgER9KsNykpiVu3bvHrX/+aV155hYCA\nAHp6eggPDyc/P5/6+npmZmb44YcfRIipq6sr165dY/fu3aSnp2Nubk59fT3JycniaSaXy7GyshIb\nQH5+PtXV1RQUFFBQUEBzczOlpaVotVri4+NRKpUolUpKS0vp7u6mq6tLBKSkpqb+ExtSalZVVlZi\nYWHBwsICjo6OTExMiNFoQUEB3333HeHh4fzbv/0bqampeHl5MT09ja2tLf39/bi7uwOLJd+6detI\nS0ujqqpKNHglrkB1dbXAz1tbW3Pu3Dnm5+cZHR3F09OTqqoqEQm3c+dOzp07BywyJUpLS/ntb3+L\np6cnWVlZzMzMCFBvS0sLn3/+ueAntra2cvHiRZqammhubub8+fPU1tZSVVXF+vXrhbNVCp2Jjo7G\n3d2dhYUFZDIZubm59Pf34+bmxuDgoICVXLhwgbq6OpqamvDw8ODu3btYWFhw7tw51qxZI6Yazc3N\n9PX1sbCwQHh4OI2NjeKBdvbsWUG23rRpk6AgHTt2jIyMDEpLSzl48CB6vZ4bN26QkJAg1K1ubm7i\nupNUuPX19Xh6eoogGEdHR27cuEFlZaWYzrS3tyOTyUR4zJ49e4iMjMTT0/O+78efxaYgk8nEDDw9\nPV103F1cXFi3bh23bt3i2rVraLVaDAwMGBsbw9PTU9Rh09PT5ObmMjw8LI5YgIgC7+joICoqiqKi\nIqanpwXwVcqMMDIyoq6uToBWJWx2b2+vGEd1dnaKWW9HRwc7duzg5s2b7N+/n/Lycnx8fMjOzubt\nt9/mhx9+wNrampCQEMLDw7GwsODEiRMkJyczNzcntP7Nzc1UV1dTWloKIPBqExMT2NnZUVVVxcDA\ngGgqurm5kZqaire3N4WFhdy+fZu5uTlsbW0xNjYmPDycgoIC8SRvampCrVbz+eef4+vrS3d3Nxs2\nbGBycpLPPvuMyclJ/Pz8GB8fF3jzoKAgrKys6O3tpaurSzAVLl26hKGhIQsLC9jY2GBoaEhmZqbg\nASoUCpRKJdu2bePIkSP09PTg4eFBSkoKTk5OdHZ2UllZSWVlJU8++SSwSPHu6urC0NCQkJAQAKqr\nq/9JaKVQKBgYGCAnJ4eWlhYmJyf5/e9/z4svvsj+/ftFendycjJJSUmEh4eTnp4uItylJung4CA6\nnQ5vb296e3vx8vKioaGBBx98EG9vbxITE5mfn+fYsWPU1tbS3d2NlZUVmZmZFBcXs3btWoGKm5ub\nEzyFLVu28MADDwiAqk6no6ysjNLSUmJiYjAxMaG1tZWenh4cHBwYHR0lIyMDW1tbtFot4eHhnDhx\nAg8PDzw9PcXn7e3tFT2KhYUFjI2NhU9HKpHm5uaEsc3b21voaObn54mKihINdWNjY9E/u5/1s1A0\nmpqaotVq2b9/P0VFRWJ239nZyf79+8XYp66uTrwgDg4OdHZ2YmNjw8TEBG+99RYODg4oFAqBOjMy\nMhJ5DO3t7bz66quMjo6Sm5vL0NAQnp6eIqknJSWFiooK3N3dkclkpKamCqhFR0cH1dXVbNq0ifPn\nz9PX18dnn32GTqdj69attLW1ceXKFRoaGvjwww/x9vZmcHCQ999/X4S+pKSkoFar0Wq1fPTRRwBs\n3LgRV1dXnJyc+PHHH/H8KYtRSrSanp7mrbfeEsKUpqYmHn/8ccbHx3F3d6egoIDa2lpiY2MJCwsT\nuZUymYy+vj7Rj1i+fDl2dnaiXJiYmBCw0oGBAfz9/YU/ZNeuXQKI6u/vj0ql4uzZs0xMTAhV5czM\nDI2Njaxdu1Zo9e/evcszzzzDhx9+KMJrjIyM+Nvf/oa/vz87duzg2rVrtLS08OCDD7J7924++ugj\nPv74Y3p6eti7dy9Hjx7l4Ycf5vjx49TX12NpaYmFhQUqlYqtW7ei0+lob2//p6DeX/3qV5w/f56J\niQmx8QUFBXHq1Cni4+MxNzcHFpPCZ2dnqa2tFac0d3d3oTm4evUqCwsLFBYW8vnnn4uGo7OzM1eu\nXGFiYoLCwkIUCgX9/f0CoXf27FlsbW2ZmJhg5cqV7Nq1i7a2NsLCwjA0NBRoPz8/P3E9tbS0UFJS\nwtjYGNXV1fT29gqitSSll8vlGBgYMDIyInQ4UkJ1fHy8kGo7OTmJIJ7x8XHc3NywsbHh4MGD7Nu3\nj5aWFtGnuN/1szgpSCOT1tZW7O3tUavVKBQKdu3aRX19PYODg1RWVtLW1kZ/fz9+fn5MT09z5swZ\nLl68yLlz52hpacHFxUVkPcDiUbSqqorbt2+Lo6tUz3d0dIjd9rvvvhPW1MTERFJTU9Hr9fT09GBn\nZ8fQ0BDx8fGCVuzl5cXKlSt56KGHBMiisLCQHTt2sHr1ai5cuEBmZqb4/B9++CEDAwMMDAzg5+dH\nW1sb8/PzlJaWMjw8LExOX3zxBU1NTSKXccWKFVRUVDA6OkpWVhbm5ua0tLQIzYWUp3nv3j1GR0fx\n8vISTsvAwEDu3LlDc3MzcrlcEK3MzMywsbHB2tqaxsZGEhMThabBwcGBubk5LCwscHJyEuYsKbVK\ngrFGRkbi6uqKTqfDxsaG8vJyHnzwQfr6+lCr1ezYsYPU1FS+/fZbccG2traKRHG1Wk1ERATh4eF8\n+umnHD58WES+HT9+XMi7L126JHoY7u7u+Pr6Eh8fz4ULFwCIj48nNzcXX19f9Ho9hw4dwtDQkCef\nfBIXFxdaWloEsHR8fJzm5mbhdOzt7RWhQC4uLiiVSry8vEhNTWViYoK4uDgiIiIYHh7GxsaGCxcu\nsHz5cqE5kTwYy5cv58EHHyQkJIQvv/ySkJAQVqxYwfr16ykuLhaJ1sePH+fKlSsikLe9vZ3+/n7h\nZZiYmMDY2Jhvv/0WQDQmJY+Mg4MDtra2BAYGiulUVlYWubm5zM/Pc/XqVSFBn5ycFFzIzs5O7ty5\nI9yi97N+FpuChYUFFRUV4qkdHx+Pi4sLMpkMGxsbysrKREdbmuXm5+cLjJZeryc4OJjY2FiRjAOL\nzjhJ+SaBMWNjY9HpdAIMKoWBNDQ0MD8/T1hYGDExMfj5+ZGcnMzp06dFOMyXX34JQEpKCnq9Xjgl\ns7OzKS0tFQaVyMhIgoOD6evrE/oGSdNfXFxMdHS0OCb/Z0vrmjVrcHV1FWWEdPoYGRkhJiaG0NBQ\n4dWvqKgQmgtTU1N6enqYnZ2lqKhI8Citra1FAK+1tTXGxsbcu3cPpVKJr68v7u7uzM3N4eDgQHNz\nMxqNBn9/fyIiIvDy8kKlUlFZWSki7KOiorCysuLWrVts2LCBoaEh3n77bZYsWYKPjw83btxgxYoV\ntLe38+c//5ng4GDWr1/Pr371KwoLC0X2ZHFxMdeuXRPuvaVLl1JUVAQsSr8LCwuF21Ha/EZGRoQ7\n0dfXF7Vazfz8PLGxsZiamrJ27VoiIiLo6upi+fLlwnIvCeLKyspwdnZmzZo19Pf3MzQ0hKWlJWq1\nmqmpKcLDw9m3bx/btm0T5U5tbS3l5eUAqFQqtm3bJhgeMTExwCLZ+/Tp0xQXFxMVFUVDQ4PIA4FF\nGK+trS0NDQ2oVCpUKhWGhoZERkaiVqt5+eWXCQwMZPv27Rw4cECcbC5duiSs1hMTE8jlcszMzAgI\nCBBjb2nsa2RkxIMPPoihoSF6vZ6//OUv5OXlcebMGZydnUW61v2un0X5IGUaSMKSF198UbjKVq1a\nhbOzM5aWlly6dAk3Nzd8fX1JT0/HzMwMExMTZDIZzz77LOfOnSMxMRF/f38ACgsLeeihh+jv78fZ\n2Zlbt27xxRdfEBcXh1wu5+uvvwYWFWB79uwhJiZGgEkVCgVDQ0M88sgjaDQaJicnhU7BxMSEgYEB\nnJychDbC2dmZF154gb179xIcHIyJiYlI/NVqtRw7dowff/yRuro6VCoVzz33HD4+PsLZBohsibKy\nMuzs7AgMDMTOzo4333wTnU4nIuYiIyMFw1DSMQwNDXH06FGWLl2KTqdjYWGBuro6fHx8GBwcxMnJ\nCbVaLRqnycnJFBQUMDg4iFar5bvvvsPR0ZFXXnmFzMxMZDIZrq6ubN26lYaGBtHsa25uJjw8HEdH\nR6qrq9mwYQPt7e2cOHGCgYEB5HK5yL309PTE19eXS5cu4e3tjZeXF5s3byYoKIicnBwsLCx4+OGH\naW5uFkE7/v7+9PX1iYxEKysrkErfwgAAIABJREFUiouLMTc3p729ne7ubmpqagTTcfXq1ZSVlfHe\ne++xadMmzp49y1NPPUVFRQVeXl48+uijlJeX88ADD6DVaoUTVq/Xc/LkSTo7OwWfUqPRcPfuXbKy\nskTS1vr163F0dBT6gyNHjrBy5UoR4DMzM0N4eDgeHh48//zzbN26le7ubqqrq6moqCA2NpbR0VFe\nf/11ZDIZn332GT09PczMzJCWloahoSEWFhYUFBSQn58v5MgODg5UV1ejVqu5d+8eMpkMmUyGRqP5\nJ2DQe++9R1lZGV1dXXz55ZcMDg4KEnpYWBhDQ0NcuXKFF1988b7vx5/FpiAJZzIyMoDFWbWkVtRq\ntSxbtkz48a2srITKrqKigrGxMR599FHy8/N54YUXmJiYEDX7ypUruXv3rvg6jz32GFNTU8zNzTE0\nNMTs7CzNzc2kp6czMDCATCbjk08+ITAwUOQvREREkJCQQHt7u8gWqKurE5ONtrY2bG1tsbGxITk5\nGb1ez/T0NIODg5w4cYKOjg5KS0txc3NjaGhISJ1zc3PFqFASljg6OqLX61EoFPT09Agz0N27d2ls\nbCQmJoZ79+5hZmZGXl4eXV1dREZGIpfLxbxaijyXXqeIiAhOnjwpAm8loOfIyAgDAwN4enoyNzeH\nv78/MplMIPHXr19PS0sLP/74I35+foJ2LXXEz5w5w7vvvsvNmzcZHBxk586dNDY2sm3bNpqamnjt\ntdfo6uoSgA+pcTw6OkppaSl///vf+eijj7hy5QqNjY1CBCSRiN944w3WrFmDVqslKCiIL774grCw\nMAEbefzxxwUl+sSJE5ibmzM8PMzBgwfp6OggLS2Nq1evis1DrVbj5OREaWkpra2tqNVq9u3bR11d\nnSBIZWdnU1JSgr29Pbdv38bGxobKykouX74sTps+Pj64uroKMI6xsTFVVVVkZmby0EMPMTs7yz/+\n8Q/Wrl3Lrl27sLW1JT8/X5Cna2pqWL58OV1dXZibm/OPf/yDjIwMurq6ePTRR0Xp6+rqyqpVq/j4\n449FrmV2drbYZDds2EBoaCgLCwt4eHjQ1taGsbExbm5uuLm5iamQ1FeTvt/7WT+LTUHi31+5coX+\n/n4CAwO5cOECw8PDrFu3jpaWFtra2tBoNGi1WgoKCoiIiGB6eprw8HBqamqIiooSCC6p9q+srBTC\nJAsLCxHwqtFoOHToEDdv3mTlypVUVlZibW3N66+/Lp7aZ8+eJTg4mLa2Nu7cucP09LRoqg0PD1NW\nVsb8/LxAoDc0NLBx40ba29sZGxsjOjqaoaEhOjs7mZqaYmRkhMDAQJqbm4mLi8PHx0f0BKSnpJGR\nEVlZWWzdupWKigpsbGwEaUiKmJOah5LyraamBrlcTlJSEgqFQmQ3dnV14eLiwtWrV4Um3sPDg/n5\nebq7u5mamsLR0ZGIiAhh6pHUc5LJxtraGgsLC+bn54UDdGpqSkTLf/7554yMjPD000/z/fffC2FP\neXk5QUFBNDU18eqrr6LT6aitrcXMzEz8H06cOEFZWRnl5eUCFwaLE6OAgAB+/etf4+TkREZGBnFx\ncQQEBJCVlcWDDz4oJOSRkZHk5OTg6+tLTEwMbm5uwsw2NjbGunXrOHToEPB/d/O9vLzEJvTXv/6V\nyclJgoODRT/LxMSEyMhIDA0NsbW1ZePGjZw5cwaFQiFMe4WFhcJ9KflQJPhKaWkpTz/9NPPz85SU\nlIgoOok0Lmkupqen6e7uJj8/HwcHB/R6PXl5eWKEmpSURGVlpVBv9vb2il6ChYUFcrlcaDBOnjxJ\namoqQUFBtLS0oFQq+c1vfkNycjKurq64urqKUfL9rJ9FTwGgoaGB5uZmQSN67LHH8PT05OLFi/z4\n44+UlJQwMjKCtbX1P7kVb968SUNDA5cvXxajTUnz7+/vj4eHBw899BC7du2isbGRV199le7ubvbu\n3ctrr73GgQMHmJmZEZqHoqIiysrKSE5OJicnByMjI1pbWwXvDhZ5jfb29vzud7/j1KlTQoGYmZnJ\nX//6V7q6urh06RKurq4ieNXd3R1XV1f27dsnOIy1tbVCOgyLZcmePXvEsbO6upr09HQsLS2Jjo5m\nz5499PX1IZPJGBsbE99XRkYG7e3tdHV1CcKv9LQzMjLCw8OD2NhYEXVvYWGBQqHAyspKzPZNTExo\naWnh0KFDaLVafHx8aG1txdXVFa1WKxSlPj4+nDlzRrgbR0dHycnJIScnR5zuJOGXhYUFO3bsEOXM\n5OQkaWlpuLi4kJWVxZEjR3BxcWFmZkZ0yN3c3GhoaODTTz/l66+/5qmnniI4OBg3NzfWrVuHi4sL\n165do7e3l/b2dubn53F1dWXp0qUi3WnFihVYW1tz9epVkpKSgEUxT0FBgegF1dfXEx4eTkxMDDt2\n7KC/v5+kpCR2795NU1MTO3bsYPv27Xh6ehIWFoZCocDW1lYg8K5cuQIgZOIpKSlcv34dY2NjQkJC\nCAgIoLW1lb1797J7927m5uaor6/Hw8OD5ORkrKysGBsb48033xS5Jp6enkLx2tbWRmFhoUg4u3nz\nppDvJyUlCQxea2sru3fvFk7X9evXMzc3R3Z2toiikxyT97tk0vHqf+eSyWT/+7+J/7P+z/r//7qt\n1+tj/qt/9LMoHwICAnj44YdxdnamoaEBExMTamtr8fb2ZnJyEgsLC27fvs2yZcvw8PCgpqYGnU4n\nZK8bNmwgLy9PTC/c3NzYuHEjf/jDH4SiUKFQCPmxt7c3ly5dwtTUFGdnZ+zs7ASDf+PGjRQUFKBU\nKsnIyBCOvlOnTuHl5cVrr73GH//4R8zNzTE1NWVhYYF79+5haWmJiYkJCwsL9PT0MD8/T11dHXv3\n7uX8+fM4OzvT29tLWloaPT09omRZtWqVyE0YGhriP/7jP/Dx8RGpz15eXtjY2KDRaAgMDBRuQWdn\nZ0xNTcUT3N7enujoaCGR7enpwcXFhZGRETEqVCgUIgOxq6tLUK57enqwsbGhpKSExMRE+vr6aGho\nEEGvlZWVLF26lIKCAqytrQkPD+fIkSNYWlrS1NSEm5sbk5OTKJVKBgcHcXV1pauriyeeeIKsrCzR\nrZfIwqGhoaJ0kI7rkhHr97//PSYmJlhYWNDQ0CCennK5XBzzAwMDOXr0qGA2VlRU4Ovry+3bt3F2\ndhZotZiYGLKysnjllVf45JNPKC4uRqlU4u3tzfnz50U5GRsbi1qt5tNPPyUuLo67d+8yPj5ObGys\n8MlIIjhpynXkyBGOHj3Kt99+y+TkpCi/9Ho9p0+fZsWKFSIe8M9//jOrV68mMTFRTNIcHBzIz8/H\nwsKCoKAg7OzsePXVV3nqqaf4/e9/z1/+8hfm5uYE16Gqqgq5XE54eDhubm7CyCXZ4aXk86mpKcLC\nwtBqtXz99ddkZGSIBOvCwsL7uh9/FuXD6Ogocrmc2tpaXFxcGBwcZNmyZaSnp7Nx40ZCQkJETuLN\nmzcFE2B0dJSlS5cyOjpKaGgok5OTzM/Pi/o0PT2dhoYGamtr2bBhgzDynDt3TowMpaPcnTt3hOdC\nmkXX1dXR09NDU1MTSUlJQh23sLAgmnWzs7PcvHmTW7duYWZmxt27d0lISMDzpyDcsrIy5HK5CBOR\nRkNKpZKBgQF2794tXJK3bt3C398fe3t77OzsePTRR5mZmcHV1RULCwuysrJoamoSGDGpvp+bm2N0\ndJQLFy6QmpoqSpfKykrRd5FkzTMzM1RXV4vGXm1trQiGiYyMFB6GoKAg3N3dsbS0ZHJykqamJhFf\nJhmDZmdnCQ4OZmFhgeXLl5OSkkJwcDAKhYKpqSlyc3NFKVdVVYWrqys1NTVcuXIFuVxOaGgoLi4u\nglUJixqQTZs2sWzZMmEEcnBwoLW1lczMTIaHh6muriYsLIzdu3cLjUVISAi+vr7ExcUJO/rly5fF\nyHdqakpIhE+cOMHOnTv593//d/R6PRcuXCArK4uMjAzs7OwwNTUlPj6eoKAg+vv7Wb16NQYGBgQH\nBzMxMUFubi7btm0DEDqBuro6cnNzOXnyJEZGRvT399Pd3U1HRwfvv/8+4+PjVFVV4ePjQ3JysjCb\nSYRmBwcHMjIyBJXL19dXBN2WlJSINHGAN998k/Lycurq6hgcHBQSfScnJ1pbW9FoNJiYmBASEsK2\nbdtITU3l6aefvu/78WdxUpBkrgqFgtDQUCoqKjA2NubmzZu89tpr/P73v8fDw4OVK1cSEhIiZNAp\nKSlCrJSfny90CZJ+fHx8nAMHDlBcXMw333xDUFAQoaGhImy0s7OT8vJyVq5cSWhoKCUlJXz00Uek\npaXR3t7OuXPneO6555ibm6O6ulrYcru6ulAqleKH+Ytf/IL29nYqKysJDw8XGDdDQ0McHByYnJwU\naU4LCwt0di4GaC0sLPD444+LG6eyslJkB0pZEe3t7dy7dw8DAwNUKhXLly+nsrKSvLw8duzYQXl5\nubh5JicnxclKanStXLmSnp4eDAwM6OrqEuNVWAy3TUxMZGFhATs7O86fP09qaiq1tbX09PQQFhaG\nubk58fHxNDY2CqqzpMfYtm0bd+/eJSkpCY1GI4hZEkLPzMyMS5cuodVqsbW1pbm5md/+9rf8r//1\nv0hJSRFJzitXrhRYfgMDA/Lz8zE3Nxc4ttnZWdFHmZ+fZ2JiguDgYJRKpeBYNDU1IZPJCAoKEjyO\n06dPi4zKb7/9lo0bN+Lk5IShoSEvv/wyW7ZsEQrCnJwc3njjDZGPII2fjYyM+OSTTwgPD6e0tBQX\nFxcSEhKE2Co2NhaNRkNoaChubm588sknmJqaYmxsLMxV2dnZbN++XUys8vLysLe3F2nn+fn5VFVV\noVKpCA0N5ciRI1hYWGBoaEhzczNqtZqgoCAOHz5MX18fjzzyCGZmZtjb24vk9aCgIPbv3093d7fg\ngoaFhYnTwX+H0fiz2BTMzMxQqVRcvXqV2NhYXnvtNfr7+zl58iR/+9vfaGtr49SpUxgbG7Nz5072\n79+PRqMhMjKS9vZ2VCoVX3/9NXq9nvHxcWGu8fPzo7y8XJhERkZGWL9+PceOHSM0NJTy8nLCw8Mp\nKipiamqK3/3ud2zdulUIbQARQtPV1SWizdzd3XFxcSEkJITPP/9cJEVLL3xtbS27du2iuLgYT09P\n0dWWXJkBAQEUFhZibm5OQUGB6GQ7OTkxMTFBS0sL09PT9PT0YGJigr29PTU1NUJMFBISQnl5OYOD\ng2zatAmZTEZ/fz/Dw8Pk5OQI8q9arWZwcJB79+4xNjbG6Ogoa9euZWhoCAMDAyIiImhtbUWn03H0\n6FFUKhWnTp0iNjYWa2trbG1tyczMZNeuXczNzZGTk8OSJUvQ6XRs2bJFNMekaDq9Xs+mTZvE//vW\nrVsiNdne3h5PT09B0crPz6elpYUdO3ag1WqF96Gjo0NkhQYEBHD8+HG2b99OYmKiyOKcnp7m7t27\njIyMsLCwgJ+fHyMjI8THx6PRaHBzc+Odd94hODgYuVwOLHoUpKCa1157jcrKSpKTk0Uwa3JyMlVV\nVQQFBTE8PCw4Hc7Oznz++edUVVXxwAMPiEalRInWarW88MILHD16lN7eXjZs2EBYWBgHDx5kYGAA\nR0dHfv3rX4sj/8zMjPCNVFVVsW3bNqE9kMvlwhxXWVkp8H3u7u7i+rOxseH06dOCHXro0CHkcjlt\nbW385S9/EUE1q1atQqfTYWVlRUFBgZjI3c/6WWwKQ0NDfPDBB6Snp6NQKLh+/Tpubm5s2LABrVYr\nWH4pKSl0dXXx7LPPimw+Nzc38vLySElJQSaT4efnx507d4BFxPvY2BhPPPEE3377Ldu2bePQoUPo\ndDox1tJqtRgZGVFfX09vby/fffcdarWaJ598Ejc3N8rKymhoaCA0NFTkB9rb2+Ps7ExzczMbN26k\nuroaCwsLZmZmcHBwoKGhQQTjSkYcjUbDwsICCQkJGBsbo9VqSUhIICwsTHTe+/v7RU1fWFgojEcX\nL14UYqDx8XEqKysJDAwU2RixsbEUFhZy/fp1lEolRkZGhIWFMTAwQG1tLWvXrqW3t5eBgQHGx8fx\n9fXl2rVrAkvf0NBAV1cX7e3tVFdX89JLL7Fs2TIcHR15+umnGRoaoqenh6CgIJycnETeo4RslyTB\n586dw9PTk6SkJAGzkXItdu3ahU6nE+O40tJSFAoFUVFR3Lp1S5jNJLKzJLn29/enqKgIb29vbG1t\nmZ+fp76+nl27duHg4CCAp97e3ty7d4/AwECKi4t55JFHBPUKFk93S5cuBRY3bQm0GxkZKYJqpGmE\nRJ6ShF2GhoYsX74cjUbDjh07GBwcFCcbU1NTiouLmZubEw7esrIyAXWJj4/HxsaGsbExGhoaiImJ\nYcWKFRQUFODu7k5JSQnp6enMz8+LqRAslpczMzMsLCyQnZ1NdHQ0q1evFtfmsWPHGB8fFyY/Kysr\n1Go1jzzyCKWlpXR2drJs2TJcXFwwNTUVys77WT+LTUGhUAjxzLFjx0hPT+e7774jODgYU1NTJiYm\n8PPzw8nJiby8PMrLywVuS2raSGDO48ePC095eHg4o6OjnD9/nsDAQAYHB1mzZg0ODg689dZbxMXF\nYWtry7Jly1iyZAlJSUn88pe/FE63V155heTkZKanpwW7ERDKM6khaGxsjIODA1NTU0JzMTMzw7Fj\nxzA3NxcuSz8/P27fvs3Jkyfp6enhnXfe4aGHHhLOuKCgIPr6+vDw8GBgYICKigoOHDgggKRbtmxB\np9MxPz+PnZ2diIgrKSnB19dXWGm7urqYmpoiODiYHTt2iAuut7cXvV5PRUUFW7Zswc/PD29vbyYm\nJmhraxM4eBcXFxFHZ2ZmxtDQEDExMZSUlNDd3Y2/v7+4kdPS0rh3794/eU8k/FxfXx+Tk5PCMNbT\n08PIyAjJycmkpaVhb2/PkSNHuHfvngChrl27Fm9vbwYGBjh27BhRUVHMzMxQUlKCra0tBgYGJCUl\nMT09TUVFBUZGRqKElNSIarWa1tZWIZMGhJpTInidPXuWzz77jKqqKiIiIlhYWBDajPz8fDo7O0UD\nNTU1VWQ4yOVyPvjgA8LDw4HFJnlRUZEgcVdWVmJlZUV/fz8ffvghIyMj9Pb2UlBQIIRenp6e9Pf3\ni/7HunXrqK2tFYnisLjZSAna/v7+eHt7c+rUKerq6oiPj6empoa2tjahq5mZmWHFihWcOXNGOF0n\nJyf529/+RnJysuiz3M/6WTQaJZVaVFSUkMyWlJSI+PDAwEDGx8fJysoSte0777wjgkkiIyPp6OjA\n2NgYnU4nyMBmZmZUVVUJslBxcbHorEt25NLSUqqrqzl27Bh6vZ6lS5dibm7ODz/8wIoVK4iNjRXz\n7+joaGAR33bz5k3Gx8fJy8sjOjoamUwmPAj29vZMTU0JCbZSqcTFxYWqqip0Oh0dHR04ODgQEBBA\nYGAg33//PbCY7qRWq/nkk0+QyWQsX76c2dlZvL292bNnD+bm5iwsLNDf34+1tTWzs7MChBoQEIBS\nqSQmJoann36akZERfvzxR7755hsRYbZmzRqGh4cFszEiIkIcwSUtvRTDV1paSk5ODq2trTg6OmJh\nYUFTUxM3btwQ1GEJpCtBSgYGBkQClQSFValUpKSkUFtbK5yf+fn5BAUFCS9EdHS0OC1dunRJbHqp\nqakUFhaKm25mZkYE7lRVVVFUVCToUwkJCQwMDDAyMoKlpSVTU1MCaAOLNXVTUxNNTU0cOnSIBx54\ngL6+Pp577jn27t1LdXU1u3btoru7m8TERGxsbIQi09DQkLGxMSYnJ/n4449ZvXq1UAhevXpV3Oix\nsbGsX78eKysrTExMeO+997C2tkaj0dDZ2YmpqSkXLlygsbGRxx9/nLVr1/L4449z8OBBhoeHKSkp\nEc3E8fFxXFxcOHfuHKamprS0tJCamkp0dDSlpaUMDAwQFRVFU1MTpqamGBgY4Ofnx65duwgPDyc0\nNBQ7OztWr17NwsKCaJLfz/pZbAp2dnb09/dTWFiIg4MDnp6epKSksHPnTgwNDRkdHWVsbAyFQkFG\nRgapqancu3eP6elpvL29MTAw4N/+7d+Yn58X9muAvLw8nnzySTQaDefOnSMiIoKtW7cK9p+Pjw9R\nUVFkZmaiVCq5fv26MAbJ5XLWrFlDQkICCQkJpKen849//ANYBIyEhoaKaYmBgQGmpqZERkbi5uZG\nR0cHnp6eGBoakpSUhJGREampqYKzaGRkREdHB0qlktzcXPbu3QssxppPTk6yZ88ehoeHMTMzw8jI\nSCRLSWh0AwMDWltb+f777xkcHBT5jlLvo7GxESsrK5YvX46bmxs1NTUUFxdz9OhR/P39MTQ0pK+v\nj7a2NtGAVCqVGBsb09nZyfz8vGBNmJubU15ezsDAAJGRkSxbtkxE6SmVSrKyssTNExYWxp07d8jN\nzaWsrIyZmRkGBweZm5ujt7dX+Ekef/xxJiYmmJ+fx97envLyckZGRgAEqVuv19PZ2YmdnZ0YT/f0\n9IgTSWpqKuvXrxdk5Rs3bjAxMSGmA5cuXRKmKFhEx2s0GjQaDTqdjoaGBlFnh4aGsnHjRtHcm5qa\nYmxsDG9vb2ZmZoSRrqWlhZ07d3Lr1i02btwIIKL2qquryczMpLu7mxMnTmBsbIyxsTHNzc309vbS\n0NDAzp07CQ0NxdzcnLi4OEGYksRUvr6+zMzMAIu9ldTUVAEbqqyspLq6mm+//ZYHHngAHx8fRkdH\nBUN0YmKCmZkZkTs6Pj6ORqOhv79fNKPvd/0sygeZTCbcbe3t7WI019zcLNKQVq5ciaurK46Ojhw/\nfpyioiLGxsbYtWsXXV1d9PT0CJJOYmIi7777LvHx8ZSXl2NmZkZMTAz5+fn09fXh7+9PVFQUAQEB\nXLhwgddffx29Xk9RUZFoNBoYGODq6ipGjubm5qSnp3P27FnR0d2+fbuYMjg7O+Po6Cjs0bW1tTg5\nOTE5OcmlS5dQq9UsWbKE2dlZJicnkcvl2Nvbo1KpqK6uBhafDtJFHxMTQ0BAANevX2ft2rXiie7h\n4UF+fj5zc3MkJSUJPuDw8DD5+fmoVCoqKiqIjIzE3NycmZkZLCwsSElJobu7G0tLS2xtbQVerrW1\nlaCgIKqrq8VoV61Wk5CQQHBwMOfPnxdYdQl8m5iYKI7JpqamrFixgtraWnJzc+ns7KSnpwcfHx9g\nUYb98MMP88ILL6DRaLC1tWV6epo//elPImB3dnZW/Pvu7m5MTEzEDSJF4UnU5Lm5OYKCgrCxseH4\n8eOir+To6IizszPu7u74+Pjg5+dHdHQ0J06cABC5DBKjsry8nP7+fsrKyhgZGUGpVHLx4kVMTExo\nbm7mkUceYXp6mo8++oj09HRRPtnY2ODj4yOSnM6cOUNKSgqzs7MkJiYK9Js0CZGe4H/84x+pqakh\nLy8PHx8fRkZGsLCwYHh4mOXLl5OTkyNs27A41bhz5w5DQ0M0NTVhYmLC8ePHUSgU5OXlsXr1alxc\nXIRPZn5+npycHIKCggSNS3qfUqkUnND7WT+LTWF8fFzk3q1Zs4YlS5Zw8eJFAfX45S9/KcCYw8PD\nrF+/nvHxcTHfNjU15fLly6xevZrz588Lv/3169cZHR1lzZo14kaWvOfSU7GkpIS4uDhu3LghwBpX\nrlzBzs6O5uZm1q9fj5mZGY6OjsLWmp6eTk1NDcbGxmg0GqanpwU7sL6+ntnZWc6fP09cXBw2Njbs\n2bOH2dlZrl+/zu3bt4mJiaG3txdXV1dKSkrw8/MDFqcP/v7+oo7Pzs7GxMQEjUZDRESE4ArY29uT\nk5ODTqcT/oahoSHBZxwZGcHBwYGhoSHRsJuenhYZF56ensLqLUmXJR7g5OQkBgYG2Nvbo9PpGBwc\nFAGnYWFh3Lx5k9DQUK5fv46lpSVubm5CTLVq1Sr+/Oc/i3CazMxMwbisqqqiqamJ7du3U1ZWhqWl\nJTdv3iQsLAw3Nzdu374NLD7RJUKTFBloZ2eHubk527Zto7m5Ga1Wy9mzZxkZGSExMRELCwvMzMzw\n8fFhfHxcCISamppISUnh008/pampCSsrKyoqKsjPz6e3t5eFhQVcXFxYtmwZn376Kb6+vqIZKcXO\nrV+/HnNzc+F9uHjxIvv370dim7700kv09fVRVVXFjRs32LZtG66urnh6enL16lXUajV6vV6Ujenp\n6Vy+fJm0tDRKSkrYtm0bVVVVqNVqbt68KXoKrq6u3Lp1S5xqpQg7MzMzIiMjMTAw4M6dOyJoBxY9\nRMPDw2g0Gnx8fLC0tCQuLo6Kior/2fJBJpO5yWSyKzKZrFYmk9XIZLLf/PT2N2Uymeb/kUQtfcwr\nMpmsWSaTNchksv8ymkZKg66srESr1ZKTk0NPT4/oSF+9epWsrCxsbW2xtbXl4sWLPPDAAyxZsgRn\nZ2fm5uYYGxvj9u3bxMbGioagvb09s7OzDA8P09nZSUBAALa2tqxatYry8nJaWlrIyMigsLAQrVaL\nubk5H374Iebm5mzZsgVHR0dOnTrF3bt36ezsFMai0dFRnJyc6OrqwsjIiOvXr3P58mXGx8cFC296\nehp3d3eys7NpaGjA1dWVoKAg0ZwMCwsThhkpS1IaSRoaGjIwMCBCUaWyob6+no6ODvLz8xkdHaWv\nr4/BwUGCgoLQarXC3LRy5Up8fHxwc3PD09MTT09Pent7GRoaEmO6pKQk7t27h5OTE3V1dbi7u2Nn\nZ0dkZKTgUUpdfz8/P6ytrRkdHRXW55CQEFJSUggJCaGyshIHBwfGx8cxMTFh8+bN4inv4+ODvb09\n8/PzqFQqZDIZg4OD4mcv9XakEbCEyY+IiKCnp4fk5GRGRkYwMzPD0tISlUpFa2srIyMjLF++XDAK\nqqurxfhTmtE3NDSIjVypVJKWliYyHqKionjsscdwdXWlpaUFe3t7HnzwQQAOHjxIe3s733zzDd98\n8w2enp7I5XLy8vKEp0HSdCwXAAAgAElEQVSiex08eFAQmYKDg6murhbQlo0bNzI/P09WVhb5+fkU\nFRUxOjqKs7MzU1NTwny2detWTE1N8fHxERGCExMT7Ny5k/7+fgHwlUKP+vr6aGxsRKPRcO3aNQwN\nDbG0tCQ1NVX87CTTV01NDfPz86LPdj/rfnoKc8CLer0+GEgAfiWTyYJ/et+7er1+yU+/sgB+et8O\nIARYC3wsk8kM/9UXWFhY4NVXX0WtVnPmzBm2b9+OhYUFAQEBIvIsISEBBwcHiouLKSsr469//St5\neXnY2NgIJNXc3Bz5+fn/FNK5b98++vv7kcvlHD58WCCyDxw4wJ49ewgLC6O3t5fExETc3NzYsmUL\n+/btQ61WMz4+TkhICC4uLpw9e1a8sBKKS6FQcOXKFXQ6nbhAra2tuXLliqDtenl54e7ujqGhIWFh\nYSQlJZGbm8uf/vQnEhMTCQsLE7u4Xq/H1taWkpISWlpaiIuLw8DAgKysLH744QeampoICAjgqaee\nIiIigrq6Ou7cuYOPjw9PPPEEBgYG3L59m7t37wozlKRTcHZ2xsPDg+7ubmZmZhgfHxe8hqSkJLZs\n2YKBgQFmZmbU19ejUCjw9vZGrVYTEBBAQEAA9vb2wgUqbc7V1dV4eXmRmZnJwMAA+/fvR61W4+jo\nyJ49ewgMDKSkpITOzk5RXqWkpPDSSy9x5MgRUf5IExitVsvs7CzV1dXMzs6Km0kiREtaA3t7e+bm\n5nj77bd5/vnncXBwQK1W4/lTzuTg4CDh4eHCdbhixQpxKmttbWXVqlXcuXOH+vp6vvzyS9asWcMH\nH3zA5OQkr776KjKZjOnpaTH+npubY9WqVURHR2NnZycaglJWpqOjI2fPnqWvr4+YmBgx5Tl27BiW\nlpYMDg4KOpVkSktPT6e0tJRvv/0WBwcH+vv72bx5MwBHjhzh+PHjeHh48Nxzz5GWloZCoSA/P5/v\nvvtO9JaOHDlCcHAwrq6uDAwMCOv3kiVLUKlUgtT9Pxowq9fre/R6fdlPfx4D6gDXf/Ehm4Hv9Xr9\njF6vbwOagbh/9TUmJycJCAhgbm6OyMhIZDIZ+/fvx9vbm46ODmFBLiwsFEBUJycn9Ho9CwsLnDp1\nisDAQG7evCmO9LD4dBgeHqagoICOjg5WrFjB0aNHcXd3p6ioSKRCa7VasrOzxVixv7+fI0eOYGtr\ny5kzZ7C3t2fbtm3i5NLd3Y2hoSE9PT3Exsbyhz/8QeDM+vr6hLzW3NxcHMErKiq4e/cuVlZW2Nvb\nExAQQGNjIw4ODiJLUiI0qdVq1q5di6mpKatXr+b27dtkZmaSn5+PTqcTyccS46CwsJCGhgaOHj1K\nVFQUISEhqNVqkSAlKRhtbW2xtLTEwMBAPFGOHj2KVqsV/RYJ6S45F2dnZzl16hQzMzOcPHkShUJB\ncXExd+/eRaVSiUCX+vp6gTwzMjJiZGREpFhLDTyVSkVjYyOAyH6cmpqiu7tbjMykpqfkPJ2cnKSn\np4fc3FyuXbvG4OAgKpWKhx9+mMTERGJiYli7dq1AuEloeQsLCzQaDdnZ2cCizFqKf1MoFFRWVtLR\n0SECba9cuSKEYDqdjrS0NF544QWhjZBCXSQ/jqSUfP/998UGcuDAAYyMjFAoFNTX14sxeXd3Nw4O\nDri6urJ792727duHmZkZx44dw9jYmO7ubg4ePCj8IQA7duzAwMCA0tJSHBwcWLJkCZs2bcLU1JTo\n6GiampooLS3lxo0boo8khQ67uLhgbm5OZmYm1tbWlJWV/RPh679a/62egkwm8wQigVtAEvCsTCZ7\nDChl8TShY3HDuPmfPqyLf72JIJfLefrpp+np6cHf35+xsTE+/vhjEhMT2bx5Mz/++CM2NjZCyvvy\nyy8zODgoABsKhQKdToeJiQkxMTFiNm1kZERpaSl79uyhvr6e06dP88orr3Dr1i1GRkaELyImJgZz\nc3Osra0pLCzk5s2bgtP31FNP0dLSIsCrBw4cQCaTYWFhIcCnb775JhkZGTg6OlJWVoaVlRWRkZF4\ne3vj6+srxC2+vr4iZ6K9vR1TU1P6+/tFGExLSwstLS0iUr63t5epqSmef/55Dh8+TEhICIWFhQL4\nuWXLFoaHh7ly5Qru7u5kZGTg5eVFbm4uxsbG2NjYcO/ePXp6eti0aRPz8/MMDw/T19dHXFwcnZ2d\nREVFodPpkMvlpKWl0dTUJEJ1p6enkcvlgrIkSaLNzc1ZsWIF586dw8PDQ6RVj4+Pc+LECZHDIR1j\nJcaFRGnu7e0lMzMThULB8uXLMTc3x8vLi7fffpv4+HhUKhX5+fl4eHhQV1dHYGAgdXV1xMbGCnLx\n0NAQubm5+Pv7Y2RkhFarpbm5GVNTU1QqFZcvX8bZ2Znly5fz/vvv09nZiUKhEFF+77zzDr/73e8w\nMjIiPDyc2dlZYWYzNDREJpNhbW0tSs729nZ27dpFa2srxsbGlJSUAAhPjaQPiYmJ4dChQwJ7n5yc\njI2NDcbGxnh6eqLRaES/ytTUlIaGBjIyMti4cSPXrl0T8QRvv/0269atY8uWLQKrp1AoCAgI4PXX\nXxeEqrq6Ory9vWlpaWHTpk0YGRmhVqtpb2+nrKwMvV6PmZmZoJHdz7rvTUEmk1kAJ4Dn9Xr9qEwm\n+wT4I6D/6fe/A/v+G5/vaeBpAEtLS/Lz82lsbMTIyIjOzk6USiXu7u5YW1vzwgsvMDs7y1tvvcUP\nP/wgVGhqtZq5uTmeeeYZLCwsaGtr4/z580IqampqSlFREQsLC4yNjTEyMsKTTz7JgQMHCAwMZHR0\nlEcffZT+/n60Wi2XL19GqVRib28vUpGkYFEbGxuhlJydnUWlUolgldLSUi5dusTy5cv5+9//ztDQ\nEN3d3QwMDFBXV8fo6CirV68W8uXVq1fj5OQkFIASuLW8vJwnnngCb29vzp49i1wuR61WU1dXx8aN\nG7G0tBQMgeTkZBFwEhAQgJ2dHUqlksOHD7NhwwYaGxuZnp5mdnaWVatWoVKpePfdd/Hw8BAhOgqF\ngoiICKqqqkhOTsbR0ZGxsTHhJi0oKCAkJARDQ0Px1Pf29kaj0TA3N0dCQgI6nQ6tVktwcDA+Pj6k\npaVha2sr8jeln1Fvby/r169n8+bNvP766yLf48KFC6xdu5bTp08DizkKEvzG0tKSXbt2UVZWJiLf\nR0dH8fb2Fmi7+Ph4SkpKRHKVSqXi8OHDLFu2jLGxMfLy8oDFGn3btm288cYbvPvuu2zYsIHS0lIa\nGxu5c+cOx44d46WXXhKhtV999ZUYA1dXV6NQKPjqq6+Ijo6mtbVVxNw1NDTg6OhIYmIiPj4+ZGZm\nYmRkxMLCgniNKioqmJ6e5quvvsLAwIDc3Fyamprw8fHBwsKCjz/+mOeff16kX8Mis0M62Umvb1lZ\nGU5OTsJ9+Zvf/IY7d+5w7do1oeuYmppiZmaGa9eucf78eX7xi1+wfft2/vSnP93vrXl/m4JMJpOz\nuCEc1ev1JwH0en3ff3r/58D5n/6qAdz+04erf3rbPy29Xv8Z8BmAt7e33tzcnKioKEZGRtizZ49I\nfG5qamJhYYG4uDj8/f154oknqKmpITIyksbGRnbs2MGNGzdwdHREpVLx0EMPiUZWdnY2iYmJNDQ0\n4OHhgUqlwtjYmNOnTzM8PMzjjz9OY2OjMPz4+Pjg6+tLQ0MDQ0ND2NrasrCwwOjoKM3NzSJ6SzI3\nOTs7Mz4+zieffMLk5CSbNm1idHSUwcFBLl26hJmZmSAhSUx+KVZMylIsKChgw4YNwOIYSkp5MjEx\nITQ0FFjc3KSnm4SKk2boUnCuVLevWbOGEydO4O3tLY7gd+7cobKyUly8p0+fFmNFaQQ6OTmJpaUl\nIyMjmJqaitfdzc2N/v5+Ic+9fv06Wq1W2MZXrVolxDNmZmbU1tZSW1vL7OwsJSUlODk5ERERISLd\nDQwMROydgYEBMzMzDA0NCTFQRUUFiYmJwlxVU1NDWFgYP/74o+jqz83Nce/ePUJDQ0WGQ3p6OpmZ\nmZw7d47k5GTm5+cJDg4WqLv4+Hiqq6vZvHkzSqUSvV5PTU0NOTk5qFQqjh49KhSpDQ0NNDY2ir7M\nzp07BS05ICDgn2p0Ly8v0bSenp6mvr6epUuXMjQ0xPj4OJOTk+h0OmZnZzEyMuLOnTt0d3czOjpK\ncXExGRkZbN++HY1Gw7Fjx3jooYcAeOCBB7h48aKYtt26dQudTkdOTo64rgoLCykpKeGxxx5jzZo1\nZGdns2nTJuzs7NBqtURFRTE1NUVBQYGYcN3P+i83BdlisXcYqNPr9e/8p7c76/X6np/++iBQ/dOf\nzwLfymSydwAXwA8o/q++jhQqW1ZWxsWLF5mbm8PZ2ZmLFy9iYWEhMg+7urrw9PQkIiJCBIWo1WoB\n6jx16pQQwvT09LB69Wr8/f2ZnJzkxo0bBAcHo1KpmJmZISsrCw8PD3bs2MHs7KywnEoS1+bmZsbG\nxjA0NKS8vFwoGp2cnDh69KiwMEuItJaWFnHUdHZ25osvvsDKyor09HTGx8cF6NXd3Z2Ghga2bNnC\nwMCA4PJNT08zPDwskoNnZmYYHR2lvLycwsJC6urqWLduHaampvT19QmTkxRaCotP2iVLlrB06VI0\nGg319fUkJCSQl5cnLpKEhAQR+6bVaoVRRzoaGxgYYGVlRU1NDVNTU4Ki/MMPP6BUKoWASalUcuHC\nBezt7cWURpL/btq0CY1Gg7OzMx0dHYI81NfXR0JCAsnJyRw+fBhjY2NGR0cFLuyRRx5BqVRSW1sr\ntAbSaG9hYQErKysmJibw8vLC1tYWjUZDXFwcSqWSwsJCJiYmaGpqEgpNCfEuk8moqalhdnaW7du3\nY2lpSVlZGfb29tjb2wsMYFVVFVFRUSQmJgoS+Lp165ienuaTTz7B0tISGxsb0V+CxfyOq1evijBc\nuVzOs88+S1VVFSYmJqxZs4bS0lJ6e3uxtLQUzsjU1FRsbW2Jjo7m+PHjPPPMMzg6OgJw+/Zt4uLi\naG1t5auvvmLv3r10dXVhampKb2+vSDiTJjKDg4M88sgjghMh3StOTk7CPn+/635OCknAo0CVTCa7\n89PbDgA7ZTLZEhbLh7vAfgC9Xl8jk8l+BGpZnFz8Sq/Xz/+rL2BsbExfXx/Z2dnY2NjQ0NCAoaEh\nu3fv5qmnnqK5uZno6GiRqHTjxg3Mzc15+eWXeeaZZ4TJ5tKlSxgZGYldc3h4WBiS+vv7WbNmDTKZ\nTMAtjI2NxRH/iy++YGBggOzsbGHE6erqIiIighs3bjA/Py8IvhKPUa/Xc+LECV588UWampr+iYUn\nJU1VV1dTXl6Os7MzRUVFpKamCs7ejRs3CA0N5eDBg8DiU6etrY3Dhw9jYWEhRmSJiYn4+vqSlpbG\nsWPHGBkZYXR0FJVKRXR0NPfu3ROdbENDQwwMDLC0tGRiYoLx8XHa2tpYunQpzc3NIlwlMDBQeBYC\nAwMJDAwkLy+P2tpafvOb31BTU4PnT/FuSqVSmHVcXV0ZHx/n+vXrpKSkYG1tzfT0tJCHGxgYsH37\ndnQ6Hc888wxWVlZcvHhRpDFptVphQJJOJq6urqxevZoPPviAxsZGiouL2b59O3Z2dvT19Qmfh8TK\nOHnyJKampgQEBBATE4NCoRBsQikQyMnJCRMTE2Gjl2TKvr6+7Nu3j927d7NkyRKWLFkiwogSEhJw\ndnYWHM/o6Gg+++wzZmdnWb16NXK5nJCQEM6ePUtERATffPMNtra2tLW18Yc//IGSkhJ6e3vx8PDg\nxIkTGBkZ0dbWJpLSDx48SGNjIy0tLaxYsUKUBl9//bU4AUm9Crlczu3bt1mxYgUDAwN4eHhgYmKC\nQqHgjTfeEP0IyRns5+cnkO/d3d0C4FtaWopOp/uf5Sno9foC4P/NTZH1Lz7mT8B9FzFjY2PCopyW\nlkZxcTHff/892dnZhISEiIZcVlaWSIgqLy/n2WefxcDAAHd3d8r+r/bePCrKM837/zxQrEVZRbEX\n+w4iCCiLCCi4gRq3GGPMYpJOm87SSzqdSedMJu3MpDvTSTrdnV6STtKvMbbG3bivQUBEkEVl33ek\n2Ip9K5b6/aHPPZ050zPO733faJ+X6xwOZcGB25vnuZ/7vq7v9fkWF9PV1SWET3BHCFNUVISFhYWQ\nH09PTzM5OYmFhQXt7e1CZSij2OPi4rh9+zYGg4EjR45gMpmEU5KsJ9i4cSPnzp0Thri///3vv+GU\nnZKSwszMjAC1PPTQQ0xOThIXF4elpaUgHYWFheHq6sqiRYs4fPgwjo6OwoKtsbERa2trmpub6evr\nw9vbm4GBAQIDA9Hr9Zw5c4Znn31WLGA5OTm4urpy6dIlzM3NBWJe1ktMTk6K7PaFCxfYvn07dnZ2\ngja8Zs0aNm3ahLe3Nx4eHvT39zM1NYUkSURFRQkbtdbWVry8vIQWQRYBdXZ2smXLFiIiIkTviJ2d\nHaWlpRiNRkJDQ8V49Ho9Hh4etLe3s27dOgwGg9jpREVFCchrZ2encGdWq9Uij/DYY49haWmJlZUV\nVlZWdHR0cPLkSXp6eli8eDH19fVYWlrS3NxMamoq7777Lq6urmRlZdHR0UFqaioHDx7kJz/5CRMT\nE+KYsH//foKDg/n0009ZtWoVNTU1Av4j+10UFBTg6ekpnM1NJhMqlYqzZ8+KXVpvby8ODg4sWbKE\nvr4+Ac6RYUJFRUWkp6dz8+ZNkSsJCwsTAic5nn76ab788kvmz58vmrSmpqZYt24dHR0djI2NoVar\nRe7m6tWrVFRU0NnZKa7RkJAQgoKCuHLlyr3ejg+GolGpVKJUKgUPX6/XEx8fjyRJ5OTkCPaBu7s7\nv/3tb3nyySdZuHChoNO89tpr/OhHP0KhUGAymYSOwMfHh9bWVtasWUNeXh4LFy6ku7sbc3NzIV09\nceIEOp2Ojo4Opqam6OzsRKvVigRda2srdXV1jIyMCCZ/f38/XV1d6HQ6RkZGSEpKYmpqCqVSSWRk\nJO3t7QwODqJUKvH29hY+ga6urhw5coSYmBhOnjwpmm8CAgIAhC17f38/69atw83NTfzfZSzYunXr\nOHDgACtWrMDKygoXFxeysrKws7NDrVazefNmysvLBchVvoBcXFw4ffo0q1evxt3dXVjbqVQq4W3o\n6uqKn5+faCzTaDRCttzd3S2Ui83NzYLbMGfOHNG0JVv+hYSEUFRURFhYGEqlkq6uLr7++mtqa2uF\n74W7uzuLFi0SRzhZDNTY2EhcXBy1tbUMDAwINye44/jl5ubG7373O/z8/Ni+fTu1tbU0NzezcOFC\nJicnxRbb0dGRiYkJUYq7fv06P/rRj9izZ4+QbO/Zs4eenh6BqJMXcblio1AoCAwMZNmyZcKp7PHH\nHycrK0tUjCIjI/ntb3/LK6+8wsTEBPPmzcPMzIzx8XFOnjzJnDlzGB4eFt6OlZWVqFQqdu/ezYYN\nG2hra2Px4sXExMSQkZEhxG95eXmcO3eO5cuX4+DgQHt7O/39/YyNjQm02tjYGB4eHuh0OlE21ev1\naLVaOjs7GR8fZ2pqiqtXr5KYmHjP9+MDYUX/3nvv7ayqquLhhx+mtLSUlJQULCwsGB8fJyAggJyc\nHBYuXEhtbS0rVqxAqVSSnJzMyZMnCQgIEAKegoICampqWLNmDXv27CEgIECYn7i5ufGnP/2J+Ph4\nobePjY3Fx8cHnU7H0NAQTU1NgogcGhrK559/TkhICPb29ri4uJCdnc2VK1eEVZivry+ZmZlCFBQb\nGysSkzk5Ofj6+rJp0yZR029sbGTJkiVMTU1RX19PQEAABw4coLe3l4KCAl544QWuX79OXFwc9fX1\n5OTkAHcaxtra2gR4xsvLC19fX2pra7GwsCAzM1MoLuXuuvr6epKSkhgeHqa1tZXx8XFCQkIwGAxC\nVyAfL4qLiwUP4De/+Q3h4eFkZ2dja2sr3KyHh4dJTk4Wi4OcQEtKShLSboVCQWRkJF9//bXI2ufn\n5wu1XkxMDOPj48ybN4/h4WHKy8vx9/fHYDCgVqs5c+YMa9euJSAgAEdHR06cOIFCoSAxMRErKyvc\n3d3x9/cnLCxM7C49PDwYGhoScvMVK1YII+KZmRl6enq4dOkS3t7eAtM3NjaGjY2NsK7v6enB1taW\no0eP4urqSnh4uGiuM5lMFBQUiGrB6Ogoq1evxtzcXOhCIiIiuHTpkmgT/853vsMHH3xAUFCQMJSx\ns7MTClEZ2WdnZ8fAwAC3b9/Gw8MDlUpFQEAAn3/+OR9//DHj4+PiqDgzM0NVVZWoLtja2lJeXo61\ntTXt7e2iIiU7WCkUCpRKpfAAuXz5MsXFxfdkRT9Lc56N2fh/J/5+aM4BAQG88847QnhRV1eHRqOh\noaGByMhIXFxcyMzMRK/Xs2XLFnJzc+nq6mLDhg2isejChQviKe3g4MCOHTt49dVXRenLwsKCsbEx\ntm7dyq5du1Cr1bS2trJixQrGxsZEA5BWqxXbdVdXV+bPn8+NGzfw8vLi+vXrvPXWW7z11lsCYnH5\n8mVCQ0Nxc3MjJCSEhoYGtFot3d3djI6Oit9x9uxZXF1dmZycZGxsjGXLljEwMEB+fj4LFy7kxRdf\n5LPPPhO9AQqFgs7OTtatW0dLSwvXrl3Dx8cHd3d3LCwsGBwcFKyGo0ePYmFhgb+/v/AT6OzsZO3a\ntZSVlTExMYFarebQoUMkJiZiZ2cnIB4yhVqj0eDl5SUAL2fPnsXS0lK0Mnd3d4vze0lJidCG2NjY\nCG6inIuQk6z+/v7o9XqhIZF9LR0cHDA3N6e8vFxAY+3s7HjzzTfZtWsXvr6+DAwMkJ2dLWCyK1eu\n5PDhwwwNDREdHU1VVRULFizg7NmzPPvss7S1tQkAjEqlEhDTuro6du7cye9//3uCgoLE7lKmQh06\ndIjNmzfT1dVFZ2enyNksXryYr776Ci8vL8LDw7l165bw/hwZGcHBwYHXX3+d999/H3t7e6ytrXFw\ncKC6upqysjLOnDnDk08+yb59+/je977H2NgYgYGB/OM//iM/+9nP+Kd/+ie+973viYax0NBQcRxM\nT0/nhRdeICsri3nz5okksoODAw0NDQQGBopjkZxY7O7uZmhoCCcnJyoqKti5c6doGmttbeXpp59m\n5cqV93Q/PhA8BZlY7OjoyK5du/Dz80Ov14uzndzh9p3vfIesrCx8fX2JjY2ltrYWpVLJpUuXRCZe\nrhUDdHZ24urqSkBAAGlpaQQGBnL8+HECAwMxGAwkJiYKQYzcddnW1sYjjzzC97//fSorKxkdHUWj\n0XD8+HFRkrx9+zZXrlyho6ODRx55hNLSUuEufOPGDa5fv05wcDAGg0GANpuamqirq2PZsmXY2dlR\nXl6Ot7c3O3bs4PDhw8CdLjfZxnzu3LlMT09z8OBB7O3tcXNzY8+ePSiVSm7duiXowFVVVWg0GtFa\nbmtrS319PW5ubkiSxOjoKPn5+czMzPD666/T1taGpaWlqBTEx8cL0GpLS4uQUa9fv56pqSna2tqI\niYkhKCiImZkZamtriY6OFp2Lg4OD3Lx5E19fXxwdHcUiJPf33759G6VSiUajYWJigt7eXo4cOSIY\nhNHR0YIcDXDq1CkBT6mvr6ekpIQ33ngDSZJYvnw5gYGBKBQKDAYDJ0+e5NFHH0Wr1QpZuKOjI4WF\nhXz44YdYW1uLPhg5L9DY2MiCBQvIycnhxo0bbN++HZPJxKZNmwTuT2YbbN++naSkJJycnATIRZ4b\nmcUQEBDAwMAAFRUVtLW1oVarhbrUzc2NJ554gujoaCIjI+nv7+fpp5+moaGBDz74gMWLF+Pr60tl\nZSUGg4Hu7m4aGhqAO41iS5cuxdbWVsBtFQoFTk5O3LhxgzVr1ohrRqFQ4OXlRUhICGNjY2i1Wr78\n8kuampoEDPntt9++5/vxgcgpvP/++zujoqLQarWMjo7i7+9PWVkZKSkpwt7MYDAILgHccao+deoU\n5ubmovd9enqawMBAdDode/fuJSkpiZiYGOzs7IQMtru7W8haBwYGhC5A7qx0cnISpbrQ0FA8PT2x\ns7NjdHSUqqoqrly5QkJCAg4ODgLMGRQUxK1bt7hy5QqvvPIKCxcupL+/n8rKSq5cucKzzz6Lm5sb\nCoUCR0dHWlpaMJlMZGZm8tlnn/HUU09x5swZ1q1bh7OzM+Xl5UxMTLBw4UKam5uxt7dnwYIFwhg3\nODiY3t5ejEYjSqWStLQ0qqurUavV1NbWioYlucyo1WopLS0lJCQELy8vioqKsLS0ZOnSpWL3UVZW\nJoAtskw8NDSU1tZWzp07x7x587h48SI//vGPcXFxEbjylpYWrK2t0Wq11NfXC0/LqakpampqcHBw\nEOrTNWvW4ODg8I1mN9mKLjo6mk8++YSNGzeyfPlympqaRCep3EVZW1tLTU2NQLrfvn1bzIO8iA4O\nDjI4OMjjjz+Ot7c3vb297Nmzh6SkJGJjY7G2tqaurk6UMtVqtShZy9qG5557DkmSsLCw4MCBA0K8\nJcuKZ2ZmBPnJ29sbW1tbsbN1cXHB3d2dgYEBQkND0ev1pKamcv36dUJCQujs7KS2tpZjx44xPj4u\ndkne3t5otVpqamrIzc3F19dXVCZWrVpFRkYGCoVCXKuSJDF37lx6enowNzensLCQpqYmXFxcsLGx\nYXR0FG9vb7E7vAsRuqecwgOxU1CpVEL3LZfkZCBmamoqfn5+PPLIIwwODmJlZUVSUpKgEX/++efU\n19fj4OBATU0Nly5dEj3pmzdvpqioiMnJSQHgXL9+Pbt372ZkZISmpiYiIyNxd3cXPAVvb286Ojqw\ntrbG19eX+vp68aRLT08H7khmNRoNOp2O1tZWQkND6enp4Yc//CFDQ0NcvHiRtrY2UlNT2blzp+AJ\nmEwmiouLWblyJWq1GisrK5566ilBipLt5GWxVklJCampqfT09NDR0UFXVxcDAwOcPXuWiooK1Go1\n0dHRGI1GHnroIR10oy0AACAASURBVDw9PXn44YfZtm0bdnZ29PX1oVKpcHZ2ZuPGjbS1tVFdXU1i\nYqLovZcv0szMTBwcHOjo6CAiIgJHR0f++Mc/8sUXX3D+/HmqqqrYuXMnp06dEi3YExMThIaGYmdn\nJ5J1zz33HGFhYSxYsIClS5fi6enJokWLsLGxobS0lJKSEoGhlxvDOjo6BBTXysqKU6dOMWfOHPLy\n8pg/f75wgV61apXwFs3Ly0On05GTkyNoWcnJyfT29pKcnMz58+e5fPmyUErqdDr2799PV1cX9fX1\nwrOitLQUS0tLFAoF2dnZVFZW4uzsjCRJ9Pf3s3XrVtLS0sjPz8doNPLss8/S3t6On58fcKfsLSct\nZUjrzZs3cXd3R5IknnrqKYqLi4mLi6Ovrw9bW1sqKyvRarWMjIyIKk52djY+Pj5iUd22bRve3t7M\nnz+foqIiAgMDBYZOrricP3+eqakpnJyciIuLEzzQmJgYmpqayM/PJzw8XECJ7zUeiJ3C7373u51P\nP/20OD5UVVXx5JNP0tLSgkajoby8nL179+Lh4YFarebdd98FICQkhNWrV7Nv3z5+97vfoVKpRDPR\nwYMHxZavqamJnp4eFi5cSE1NjTB/lTHYMzMzBAcHC4fnlJQU7O3tGRkZwdraWtBvHB0d+eKLL1i9\nejVGo5GUlBThCO3o6EhpaSmDg4MsXLiQvr4+0aPh6ekpeHmbNm3iiy++IDc3l7lz59Lc3IyLiwvX\nrl3jvffeo6qqijVr1pCbm4ter2dwcJDJyUlRw16wYAFdXV14enoKs1RZ/DUwMMDw8DDXr19HkiTR\nAFRfX4+dnR0NDQ3CozAmJkbg0WR6scwQtLW15cSJEzg4OJCeno5Op8NgMODu7k5kZKTAyTs4ODA0\nNCQafPr6+igpKUGpVDI+Po6joyPj4+MMDw9z6tQpbty4QVRUlBDqzJkzh4yMDJYsWcLIyAgnTpzg\nsccew9bWliVLlnDp0iVCQ0PZunUrOTk5uLi4MDU1xQ9+8AMCAwNJT09n6dKlQjh1+/ZtHn74YS5d\nuiTIU0uXLuXXv/41zs7OxMbGMjk5SU9PD97e3uTl5Qnmhl6vJyAggP7+fnQ6HQcPHhRn/H379gkY\nzb59+wgMDOT69euUlJQI1mVTU5NgJLz77rucOnWK+Ph4goOD6enpob29nbq6Om7dukVaWhqdnZ0E\nBAQI5WpaWpow85Fl6q2trTz22GNcuXKFpqYm0XW5YsUKjEaj2FVERUUREhJCe3s7CoWCX/7yl6Sk\npAhFqex0/ne1U5DddyRJYseOHZhMJj777DPKysp4++23uXXrFnCn9Tc/P5/IyEjhLnT69Glu3LjB\nkiVL0Gq1KBQKwfp/4YUXcHd3Jy0tjaCgIIKCgjAYDAwNDdHa2kp1dTXV1dXCVdnDwwM7OzuB6Lpw\n4QKnTp2itLSUgIAA0fvg7u5OR0cH4+PjmEwmxsbGUCgUvPjii7zxxhukp6fzxBNPiMVgamqK0tJS\nQZOSjV2mpqawsrISIBC5c/Lq1atYWFig0+nw8fGhtrZWXHwlJSWcPn2awcFB8vLy8Pb2RqVSoVAo\nKC0tpa2tTQBtOzo6uHHjBnPnzhUEKoVCwdNPPy18HGSb9FOnTlFQUEB0dDQGg4G8vDw0Go0wP5Wl\n4c3NzeL829XVhaOjI5988omQSEdHR2Nubs7ExAQTExOoVCqhhFy/fj02NjbExMSg0WjYv38/vb29\n9Pf3C3Brfn4+MTExNDc38+qrr1JfX09+fj6JiYk4OjpiNBqFArSzs5Pbt29jZmZGXl4eWq2WjIwM\nioqKBIaus/NOi46dnR1tbW2CZRAWFsayZctEKfC5555DqVSi1WrJzc2lr6+PefPm8dlnn2FlZYWH\nhwdmZmaEh4djb28veln8/Pw4fPgwbm5uGI1Gdu/ezdtvv80rr7wiSrBubm4MDg4KizpnZ2e2bt1K\nR0cHxcXFBAUFCXNYGcVuaWnJG2+8QX19PfHx8bz55pt0dXXh7u4u3K/d3d0ZGhriypUrvPvuuyIp\n+sorrxAVFcXjjz+Or68varVaoO7v6X78372h/0+EnGBTq9U4OTkRERHBSy+9RGhoKGlpacTHx5Oa\nmkpaWhqhoaGsWLECW1tbDhw4QHNzM1qtFp1Oh06nE4k3+HebdkD4HsjATCcnJ+bOnUtycjLW1tZ4\neHgI1Z9sEy/XwtevX09WVpYQlsjy1ePHjwsl3pw5c+jp6cHCwgKj0SgQaE1NTaK3wMbGhjfeeIO8\nvDxxllUqlULv7ufnR0REhGickSSJoqIizM3Nyc7OFscs+QljZ2fH2NgYhw4dwtbWlvj4eCFYkV2p\nXFxcKC8vp7Kykt27d+Pn50dZWRldXV2iCzUwMJDU1FSCgoKYmpqisrJSuFx98cUXWFpaEhUVhclk\nYnp6mo6ODuFlmZ2dzfLlyzEzMxPyYjkX09HRQWlpqUiUubu7ExMTQ01NDVlZWaIy1NraSlNTEwCv\nvfaacHQeGRlh+fLlQoqelZWFRqMRC3tmZiZ2dnacPn0aLy8vQSQKDAzk6tWrdHV18cEHd9p1PDw8\nCAsLw2AwCBjsvn37MBgM+Pr6snfvXhoaGqiqqqKhoYGenh5sbGxISkoiLS2N1NRUoqKi2LBhA0aj\nUQjOTCYTK1euZMGCBVRWVop29t7eXg4dOkRfX5+AucgK2ZiYGKanp+nt7eXxxx8XSVwbGxthirNh\nwwYuXryIj4+PaMeemZmhr6+PhQsXEhoayuLFi0UreElJifBKmTt3LiUlJXz44YccPnyYgYGBvz9G\no4wYv3z5Mvb29ixevFgg1f/85z+zYcMGDAYDGRkZREVF0dbWRnp6OkeOHGHjxo2EhYXR1NSEp6cn\nW7ZsEZJOvV5PSEgIZ86cQafTcerUKRwdHbl16xaJiYlMTEwwZ84cAgICuH37tuhlt7GxQaFQEBMT\nIxyrVq9eTV7eHUyEnMyTJIn09HRxEcoy2K6uLkFMlp9E8fHxtLe34+HhwYoVK0RFYGJigosXLwLw\nq1/9ioSEBPr6+ti2bRs9PT2Mj4+Tnp7O1NQUw8PDBAcHs3HjRqamptBoNNTV1ZGcnIyXlxc5OTkM\nDQ2hUChYvHgxXV1dgjo1PT1NSEgIra2tGI1GIiIiaG5uFko5ObPt7+9PXl4e3d3d5OXlsXjxYubP\nn8+yZcu4du0aZWVlpKenY2FhQWlpKWvXrqW5uZmEhARqa2s5c+YMDz/8MEajUeRyZCS9bGu3du1a\nRkZGkCSJuLg4IR8HGBgYEAq+Tz/9lA0bNpCQkIBCoWD37t3odDr8/f05dOgQjo6OVFdXs2zZMqam\npvj5z3+Oq6srY2Nj/PjHP+bjjz/mtddeY9++ffj5+ZGZmYm/vz/19fU0Njby7rvvihzQgQMHCA8P\nx8nJCTMzM4KCgnj++edJTEzE3d0dg8HA1NQU5ubm9Pb2ioeBWq1menpatH5fvHhRIP/lc7+rqytn\nz54lPj6eJUuWcPPmTYaGhnjxxRfp7+/nu9/9LqtXr6arq0sg//71X/+VpUuXcvnyZfz9/fHx8cHP\nz4+rV6+SkpLCuXPnmJqa4le/+hVKpRI/Pz/27t1LRUUFZ86cEUfpzs5ORkZG6O/vv+f78YFYFJRK\nJVlZWQLjLq/+AQEB9PT0MDIygkqlYsOGDbz//vu89NJLNDQ0kJKSgpeXF6WlpUiSxIEDB0hNTRXk\nJaPRyJw5c1AoFPj5+ZGTkyP4h9evX0etVosW1enpaZHRlb0Yb9++TXV1NUFBQej1etLT0/nlL3+J\ni4sL3d3d+Pj40N/fT3FxMU1NTXh5eQk0uLOzM1lZWYSFhWEymXB2dsbBwQFvb2/UarXAvct+jseP\nH+eJJ55gdHQUa2trLCwsKCwsZHR0lJdffpnTp08Lv81bt27R2NjIjh07uHr1KgsXLqSsrIzp6WnC\nw8Px9/cXxKGIiAiMRiNhYWFcv36d+fPno9PpaGlpISkpCaPRSG5uLjqdDj8/P3x8fKipqSE9PZ19\n+/aRlpaGTqejra2N3NxcrKysyMzM5NFHHxV9DLIfhSRJtLS00NnZKX6OvMOQF4nx8XFRKpQ9KLy9\nvQVx6MCBAygUCjZt2sTWrVsxmUzU1tbS1NSEh4cH3d3dZGdnk5SUhFKpRKVSCRWqg4MDhYWFfO97\n3yM3N1fkpQBu3rwpOigtLS1xdnYW3Zm1tbW0t7ejVqtZuXIlp0+fFsY4ERERdHR00NDQwMaNG6mq\nqhLJW0Ag7OUdio2NDUajkcHBQbKzs0lJScHb25u3335bLMp1dXVYWVlRXV3NoUOHBDcB7nQLHz16\nlK6uLkwmE4sWLaK0tBRbW1uuXr2KVqvlq6++QqVSoVKp6OzsFD0sdXV1dHZ2CqiwPDaDwSASo/cS\nD8SiMDw8zCOPPEJGRgYVFRXs379fOESbTCaioqKEF0NwcDDZ2dnMzMygUCiorq4mKiqKzMxMkpOT\n0Wq1ItMqi1wsLS2xs7PjxRdf5OjRo7S3t7N48WIsLS0ZGxsTmXq1Wk1FRQX29vYCfSYnp2QcN8Bb\nb73FunXrmJmZwdXVlcLCQpKTk0Vysri4WGzLq6urefTRRzGZTHR1dVFVVcX27dsxNzenrKxMcAkA\nTp48ydatW8nMzKSwsJDAwED6+vooKyvDzMyMy5cvU15eLpiU5eXl4lwtP20tLS35/PPPhf/FyMiI\nyHR7eHgwPj7Ol19+KcxLACHYcXJyYvfu3dTV1eHs7IyjoyPx8fFMT09z7tw5rl27JtD4ckb8woUL\nODg4iByCjY2NaGIyNzdHr9cLRyNzc3MBH8nIyKCuro4lS5bQ1tYmkHSyBqWrq4uEhAQuXLhAQ0MD\nTz31lMDlTU9P4+/vj5OTE9PT0yiVSr766itmZmZ49tln0ev1FBQU4OPjI+r+DQ0NGI1G0tLSOHv2\nrECxyeauKSkp7NixA3t7e86fP4+ZmRkxMTFER0fT3d1NT0+PyLkMDAwIxF1LSwvPPPMMtra2wqio\npaWFnJwcVq1axaJFi0Rfza5duxgeHhalTVnL4uzszOTkJDY2Nly6dAmANWvWYDKZ+Mtf/sLk5CRb\ntmyht7eXsrIyXFxcSEpKoqmpicLCQh566CFaW1sF3i8nJ4fXX38dvV5Pa2srKpWKEydO3PP9+EAs\nCgC//OUvefXVV8XTY9u2bVy4cEHo4DUaDaWlpfj4+NDY2Ii7uztVVVWie9DMzAw3NzdRqoM7Lc7p\n6emcPHmSjz76SCjPZBWjj4+PgJksWLAAtVrNv/3bv/HWW28xMDDABx98wB/+8Ad+/OMfM2fOHFEu\n+vjjjzl48CBff/013t7eQk2oUCj47ne/i5OTE1999ZXIERw9epSFCxeKcusnn3zCokWL6O/vZ2Bg\nQPgHbtq0idzcXJKSkmhoaGBwcJCnn36aTz/9FHd3dyIiIkhOTubUqVPiopCbuBoaGoRF/Ouvv47R\naBTb3Li4ODo7O/H09BSL3sTEBJmZmWg0GiIiIigpKcHLy0uAWhctWsTo6Chnz56lt7eXtLQ0Ghsb\nCQoKoqGhgaioKAoLC9m4cSOlpaVcvXqVwcFBGhsb0el0JCQkiJKypaWlMOeVJAmVSoWPjw9ubm64\nubkJkhXc0YnIi8znn38uYLpZWVksW7YMhUKBj48Pvb293L59m+XLlyNJEuvXrycnJ4fo6GgKCwv5\nyU9+wszMDFFRUcAd8VJLS4tQYGo0GlHp6urq4o033hDuUykpKQLI2t3dLVzFZJ+F1NRUYWK8ePFi\ndu7cyWOPPUZbWxtOTk4cOHCA2NhY1q1bJ1r6LS0tiY+Pp6SkBL1ez5IlS7C1tSUyMhInJyeam5vx\n8fFh+/btXL16lb6+PgoKCpAkSezUgoKCSElJQafTceTIEbq7u3F1deX06dM4ODjws5/9TOAAp6en\nRZVJxvHJprj/XTwQJckPP/xwZ2pqKlqtFltbWyYmJqitrRVmsw899JCopctPbYVCQVhYGGVlZbi5\nuVFaWkpqaiqurq4cPXqUK1eusHbtWi5fvoyPjw9qtZru7m7R73716lXmzZsnauYBAQGClzgzM4Od\nnZ2gOVtbWwv+wJEjR1i2bJnIe5SVlXHr1i1u376Nn58fTk5OzJkzR1CoT58+jaOjI0NDQ3z11VfU\n19djY2NDfHw8BQUF4gl5+fJldDodVlZWdHV1MTIyQmRkJI2NjTg6OgoATGZmJrGxsbi7u1NZWYmt\nrS12dnbiKREZGUlRUZEgVz3yyCMEBwczPj7OxMQES5cupaysjJCQEHFcGhgYYGBggISEBJFzWbFi\nBRkZGYLg3NfXR2pqqiBLBQUF0d/fLwRU09PT5OfnExYWJoREUVFRQpY9MjKCyWQSC7bsciXfqJIk\ncfDgQRYtWsT58+cZGRkhOjqaBQsWkJ+fT0REBJ988gk1NTX4+voSEBCAtbU1V65coaCggOnpaWJj\nYzl+/DjW1tb09PQwMDDAd77zHcbHx0lNTcXJyYni4mKysrIYHh4mNjZWqArHx8cZHx9ncHBQmPP+\n5je/YWpqCjMzMxISEigsLBTdiF5eXhw6dIh33nlH6EeeeeYZ4UAVHBwssPb29vaEh4eLhrI//vGP\nHDhwgLKyMo4fP878+fNxdHSksrKSqqoqysvLmT9/Pk5OTtjb2zM0NERAQAAWFhZCKp+YmEhUVBSu\nrq4YDAbMzc0pKSnBzs6OtWvXcuDAARYsWCD+VtbW1ly6dOnvpyQpK8v8/PyYnJyks7NT1N1l5dh7\n771HQkICN2/eFBMqW5zPnTtX4NNu3bolJKDXr19n3bp1jI2NMTg4KGS8slNRZWUlPT09dHZ2snfv\nXj766COmpqZITU0V6rPY2FjhPhUaGgpAVVUVHR0dtLS0MDo6yvT0tJAh29nZsXLlSlavXs3ExARP\nP/00hYWFeHh4kJqaSnV1NUajEb1ej0ajoaysTLgYrV+/Hq1WKyzrhoaG+Oijj2hraxOJI71eL/IO\nxcXFwB0sunw2v3DhAi4uLnh4eBAXF8fw8DDXrl0TCsTm5maSk5Opq6sTAiT5/NrR0SHyD++//z6F\nhYUMDAygVqu5efMm+/bto76+Hm9vb8zMzFiwYAHT09OicuTn58eSJUvw9/cnODhYzPn+/ftpb28X\nJKmpqSnKysoYGRmho6ND5EPgTpVA1kMUFBTg7e3N2rVrmTNnDlqtVuRZ5Cdif3+/aO1uamritdde\n4/bt2+h0OgoKCnj//feBO+3ucjJ5cnKS5uZmkpKSCAoKwtfXV7hjRUVFYWNjQ25urih3y9oGrVaL\nyWQSPhvy3Pv7+2Nvb8+NGzfIyMjg3LlzDA0NiQpWU1MTFhYW4kktV4G8vLzEYiMvpq+88gqAEM+F\nhISgVCqprq7GwsKCpqYmJEni9u3btLa2cubMGXHPwB0eRUdHh5Bmm5mZYTAYxFHxXuKB2Cn8/Oc/\n3yk3hFy/fp0FCxYQGBiIg4MDmzdvpqGhgf7+frRaLb6+vrz44ovCfDQpKYnu7m5eeukl2tvb0ev1\nXLhwgWvXrvH555+TkZFBYmIinZ2d4g8xODhIXFwcJpOJ6Oho+vv7uXLlCp6enoLZr9frCQoKEqUc\ne3t79u/fT2FhoaDzHD9+nJdeegknJydsbW1xcHDAyckJk8lEVVUVP//5z4mNjRX5AUmSWLFiBZOT\nk1RUVDAxMYGzszOvvvoqn332GUajEZPJJOTQTU1NomchOjqalpYWtm3bJoxojEYjra2tjIyMkJmZ\nKUxXZIt7e3t79u7dK1qMra2tUSqVuLm5ERERQWVlJZcuXaKuro6GhgZ6e3uFWYmjoyOPPfYYKpWK\nmzdvcvbsWVasWEFiYiJHjhxhyZIlYkF0cnJicnKS+Ph4wYwcGxsTaLfx8XFh2NrT0yPKrPPnz//G\nce7LL78kJiaGsLAwZmZm8PLy4vz58+Tm5vLiiy+i0WgEuTgzM5Pt27fj7e2Nubk5BoOBqKgoxsbG\n0Gg0zJ8/H4PBgIuLCwcPHuSZZ56hpqZGiL5WrFiBv78/hYWF/PrXv2bbtm20t7dz7NgxnJycGBgY\nYM2aNQLMU1FRwUsvvcT09DRFRUVC6CX7N/b19bFmzRrKy8tZv349g4ODtLe389VXX7F7925yc3NZ\nu3YtVlZWmJmZCRMhWT0qY9nz8vLIyMhg69atVFVVoVarWb16NbW1tbz55pvChNbKyopdu3bh6ekp\nDITt7e1Fb9DmzZuxtbVFr9cL5+vc3Ny/n52CpaUlzz//PMXFxcJHsKWlhebmZsLCwgSXwGg0EhQU\nRFdXlxCieHl5idVQTo7JLjt79uxh8+bNAkxRUlJCcXGxKMmFh4cLs9iIiAisrKzo7e2loqJCWKfJ\nZF6VSiUWiCVLlqDT6URpsa+vj/HxcWxsbKisrGRkZAQnJye0Wi0VFRUolUrq6uoYHh5mzpw54oJU\nKpVYWlqKDPmWLVtwd3dHo9EIOe7cuXOpqqriF7/4BR988AH19fXCP0J2bd6/f79gHU5MTHDlyhVh\njRYXF4efnx/r1q2jsbGRK1euiHLa/v376e/vR6VSUVVVhclkEqRn+WYbGRlhYGCAdevWcfHiRdFl\n2NraKuTYsnZ/fHycwMBA8bN6enqora0Vkuqenh56enoIDAwkLi4ODw8PLCwsqKqqEqwDPz8/hoeH\nCQoKYmxsjNWrV1NVVUVgYKBoUJOz+83NzVy9evUbjtXXrl0TykQXFxfxczUaDYODgxw+fBgnJydG\nRkaoqKggKyuL5557TvQ/dHR00NfXJzwsLCwsRIK7uLgYg8GAhYWFSDorlUoKCwupr6/n4MGDvPzy\ny9TX15OQkMDIyAiNjY1ERkaSmprKrVu3GBgYwMXFRex45s2bh4+PD1VVVaJEDXck7wkJCahUKgGF\n8fPzE7mtvr4+AVhxc3MT0F/571BRUUFPTw+pqamCN3mv8UAsCpIkcf78eVasWCEMVGW82qFDh4iL\nixMeig0NDTQ0NFBYWMhXX33Fr371K1xcXDh69CglJSWo1WoxsfITWZIk5s+fj5WVFf39/eJJJvc/\n9Pb24u7ujrm5uTDodHZ2Rq1Wo9FoSEhIoKOjg+effx64g2KXJdRhYWHcuHEDe3t7NBoN4+PjjI6O\nMjAwwIkTJ/j6668FdbmmpoaLFy+yceNGLly4IL5X1v3LSjyj0cilS5eIj4+ns7NTCKFiY2PJzs5m\nfHwcX19f7OzskCSJefPmERsbS0JCglgQLSws0Ov1ohKSkZHBpUuXxLEsNzeXmJgYduzYgUql4uWX\nXyYhIYGUlBQsLS2RJInh4WHOnDkjHKBCQ0NxcHBAp9Ph7e1NRkYGQUFBqNVq0Q5ubm4uBFlyj0h0\ndDR+fn6oVCrc3d25ffs24+PjghjV29tLSUkJcMd5Wy7ZxsfHMzw8zKJFi/jnf/5nPv74Y1JSUggK\nCuLhhx/G0tKShIQE3N3dsbOzw9PTk7S0NA4fPiywe1lZWQACXrNjxw727t2Li4sLx48fR6PR4Ojo\nyMzMDPb29tjb2zM6OkpdXR2Tk5MsXboUjUYjYK0jIyPC5hD+3Y5OtuDLyckhPDwck8lEZGQk6enp\nRERE4OvrS2RkJD4+PuzZs4d33nkHV1dX8XBzd3enpKSE5ORk4E4VprS0VAB/+/v7CQ4Oxt3dHZ1O\nJ8hg8nF3amoKhUKBSqXCzc0NtVpNe3s7mZmZfP311/+jkuQDcXz48MMPd8q5AZPJhEaj4dFHH6W5\nuZk//OEPAHh6egprrKioKGGbZm9vL9qB/fz8mJ6eJjQ0lD/84Q/88Ic/RKPRYG9vT2ZmpjBkWbJk\nCadPnyYvLw9JksQ5XuYTWFlZUVBQwMqVKwV6TbYoP3jwoMg0yzxGuXkmLS2NtLQ0PDw8hLnr5OQk\npaWlODg4UFlZiVqtxtXVFUdHRyRJoqSkBJPJRGlpKZs3b2Z8fFyIZ2RX56GhIebNmycSTLIPg0wG\nXrJkCcnJybi6upKbmytuKm9vb8ExyM/PJy4uDrjjzjQ+Ps6iRYsoLCwUSj0rKyuBsz958iTHjh1j\n7ty5WFlZERwcLLpLZYDtihUrKC8vp6CggPb2duzs7ERewtnZGZ1Oh1arFW5F7e3tTE5OIkmS4DDK\nKr3k5GT+9Kc/sWvXLpqamlAoFJw4cYKJiQkiIiLw8fGhu7ub5uZmnJycWLNmDZcvX8ZoNFJaWkpT\nUxPm5ubs2bOHtLQ0pqamCAsLIyAggI8//pj09HSBhZcfAjMzMyxbtoyenh6srKxoaGjg1q1brF27\nlpmZGZ588knMzc25du2aaCxSKBQ4OzuLI+ejjz7KzMwMq1atIjMzk6KiInQ6HUVFRZw5c4b29nZM\nJhMbNmygurqao0ePCvoU3CmV6vV6IUZavHgxn376qUik29jYYDKZqKysFLQuZ2dnFi9ejJeXFwaD\ngUOHDnHgwAFSUlKoq6vjT3/6Ex9//LFgYyiVSlpaWigvL//7OT7ITxlZqGRpaUlRURFFRUU888wz\npKen4+zszPT0NF5eXnz66ad0d3fT2NhIX1+fuMFaW1vx8fERZjBFRUU0NjZy8uRJtmzZIrwQPDw8\n2LhxIzY2NkxMTGBubk5dXR3m5uZMTU0xPT2NwWDgwIEDog0WEKRd+fw3NDQk2nKPHTsm/C0vXrxI\nXl4eLS0t2Nvbo1QqaW1tZd68ecLMxd3dHaPRyE9/+lPBEpAvlrCwMNFmbG9vL7wZ9Ho9er1eyKDl\nklZUVBR1dXXs2bOHxx57DC8vL/z8/HBzc8PJyQmNRsPy5cvx8vISxrDr1q3j1q1buLq6kpeXR01N\nDUePHuXkyZOiSeuFF17AwsKCuXPnsm7dOpqbm9Hr9cydO1ewF5uamsSxSqFQiE7Luro6XFxchCu1\nyWQSoqE5c+bwzDPPEBkZibe3N15eXsKfob+/n4KCAmZmZgR8VhbknD9/Hmtra1JSUhgaGsLT05Pm\n5mYWLFiADEVjDwAACuNJREFUt7e3gKKUlpYK1zD5bzc4OMj4+Djl5eWkpaWRl5eHvb29yEFkZmYK\nNajsXKXX6zlx4gQuLi7k5+fj5eUlGBOxsXecECsrK6mrq8PBwYG1a9eSmJjI4OAgjo6OvPnmmwJe\nMzg4iFarFVZ3kiRhZWVFeXm50EysWrVK2Oc98cQTTE5OolKp6OrqYnp6Gl9fXyYnJ+no6BCItpiY\nGMLDw/nhD3+ISqVCr9fz5JNPsnLlSiGykuX99xoPxE7hvffe2xkREcH8+fMpLy9namqKlStXEhwc\nLFpAZcutAwcOEB0dTUVFBTdv3mTevHm4ublhZmYm3JQ8PT359NNP+Yd/+AexrRwdHcXV1RWNRsP0\n9DTDw8PY29vT2dnJQw89hLe3t+jBkM1dMzIy8PX1xc3NjcbGRjQaDWfPnmXbtm3iaSYbiXR3d1Nc\nXEx/fz9DQ0PExsYSGhqKra0t0dHRWFpaEhoaKlDdZ86cETbiwcHBnDhxgoiICGxsbIRS0MvLi7a2\nNkpLS7l+/Tpr1qwhNDQUlUrF0NCQaOzZv38/Hh4eGI1GUYLNyckRHgVGoxFXV1eRAF21apXY6ciJ\nRp1OR11dnRC8REZGivflGzMsLAwvLy+6uroICwvj2rVrosHHy8uL4eFhIWuWd1aydPfy5csif2E0\nGkWS1WAwEBoaSnNzM0eOHEGlUokEb3d3N4GBgZiZmQnHJ4PBgEajEarV5ORkUcKW+yqSkpKEx8LA\nwAD79+/n+eefp66uDnt7e3x9fVEqlTg6OnLt2jWqqqp46aWXRC5pZmaGrq4uwczIycnB2tpaZPMv\nXLiAj48Pp06dYteuXeTn56NSqYTD05YtW0Qj3OnTp8nMzOTGjRu4urry8MMPc/nyZUJCQigvL+fl\nl18mOjqaX/ziF0xNTYk2ctlro6qqCnNzcxYsWCCu5fHxcQwGAzU1NZSUlPDss8+K69HW1lZca5WV\nlQwMDDB//nx6enq4du3aPe0UHohFYd++fTttbW1JSkoiICCA4eFhALFd8vb2Flu2xMREsrKyhLmF\nXq/nsccek+uwNDY2YmZmxpEjR9i2bRs1NTX4+PigUCiwtrbGZDKJ8+BHH30koBhnz57F09NTCFtC\nQ0OZnp6mpKSEwcFBgoODUSqVHDx4EH9/fwAhfMrJycHd3Z1f/OIXGAwGBgcHqampYc6cOaKG3tvb\ny/Xr14VBx+joKM8//zy7du0iKCiIS5cusXz5ctzc3AQpuqWlhdzcXEpKSvjBD34gcOTyruPGjRsM\nDw9ja2tLdna2OB/LctuJiQmR4LOxsaG1tRVra2vOnz8vZN1TU1MsXLhQ0JI0Gg1z584VPRFyEtfG\nxoba2loOHz5MVFSUSHRduXIFPz8/bG1t6e/vZ3R0lJiYGGJjYwX/QjbadXBwEJUg2bFITurl5+dz\n9epVHnnkESHflTHr8sUvN8f95S9/EZ6Po6OjODo6UltbS0hICBqNRiwkcAd6++c//5nvf//7qFQq\nhoeH6e7uJjIykvz8fFpaWkT2v6amhpmZGY4ePSp6ENzc3HB1deXMmTOsWbMGPz8/oqOjGR0d5eTJ\nk6IfpaqqShC4BgcHRV+Kl5cXgYGBPPvss8TFxXHz5k1B+3J2dhYl6vDwcOzs7Fi6dCm7d+8WO4DB\nwUFBdnJwcKCsrIyYmBh8fHxwcXGhqKiI3NxcOjo6UCqVojFPqVQKYLGZmRkjIyP3vCg8EMcH2Rdg\nfHycY8eOodfraW5uZmRkBDc3N6qqqsjIyMDJyYmamhr8/PxIT09n0aJFpKSkcOTIEWpra8UFderU\nKfGzFyxYQF5eHmZmZvT29lJfX8/09DTZ2dnCt7G2tlZcpM7OztTV1fEv//IvTE1NCaxWY2OjSIY1\nNDSInUVlZSWxsbG8/PLLlJWViS1dYGAgpaWlnDt3jkOHDnHs2DHS09PRarW88847eHp6irKXo6Mj\ncCdDfvjwYeEQ1dDQgK2trWg2SkhIwNLSEicnJ2pra6mrq6O6upra2lomJyext7dHrVZTWVnJ8PCw\naJkdGRnBz89PtBnLDAi5rCkLYObMmUNvby8mk4mNGzdib29PV1cXixcvRqvVUlxcLDB4stdnZGSk\naO3W6XTCZKe1tVVk9Ovq6ggMDBSY+sDAQGpqarh165Zw6968eTMA6enpomQsOzdrtVphAZeamkpA\nQABeXl489dRT4sk9d+5ccQw1GAyiFbyoqAhAeHTIu4Fjx47h5eWFSqVi48aNHD9+nEWLFhEYGIij\noyMqlYpr167R0dGBu7s7CxcuxMXFRRjbhIeHi587PT3NkSNHyM7Oxs3NjZ6eHk6fPi2amMLDw8nN\nzeXcuXOMjo4KsxbZpEfmZiQmJpKZmQnAD37wA2pqatBqtQQHB7Np0yb8/PyYP38+X3/9tSBYyaV7\n2SLQ1tYWk8lEZ2engOwYjUby8/Pv+X58UGjO3cAI0HO/x/IfwpEHb0wwO67/STyIY4L7My5vk8nk\n9N990wOxKABIklR4L/jpbzMexDHB7Lj+J/Egjgke3HHBA3J8mI3ZmI0HJ2YXhdmYjdn4RjxIi8J/\nmxW9D/Egjglmx/U/iQdxTPDgjuvBySnMxmzMxoMRD9JOYTZmYzYegLjvi4IkSWmSJFVLklQnSdJP\n7/NYmiRJKpUk6aYkSYV339NKknRRkqTau5/vnZX9/38c/0uSpC5Jksr+6r2/OQ5Jkt64O3/VkiSt\n+hbHtFOSpPa783VTkqTV3/KYPCVJuixJUoUkSeWSJP3w7vv3e67+1rju63zdc5hMpvv2AZgD9YAf\nYAncAubex/E0AY7/4b13gZ/eff1T4JffwjiSgWig7L8bBzD37rxZAb5359P8WxrTTuAn/8n3fltj\ncgOi775WATV3f/f9nqu/Na77Ol/3+nG/dwqxQJ3JZGowmUxGYD+w/j6P6T/GemD33de7gQ3/t3+h\nyWTKBgz3OI71wH6TyTRhMpkagTruzOu3Maa/Fd/WmDpMJlPx3ddDQCXgzv2fq781rr8V38q47jXu\n96LgDrT+1b/b+K8n7/92mIBLkiQVSZK04+57LiaTqePuaz3gcn+G9jfHcb/n8PuSJJXcPV7I2/Rv\nfUySJPkAUUA+D9Bc/YdxwQMyX/9V3O9F4UGLRJPJFAmkAy9JkpT811803dnr3fdyzYMyDuAj7hz9\nIoEO4Ff3YxCSJNkBR4AfmUymwb/+2v2cq/9kXA/EfP13cb8XhXbA86/+7XH3vfsSJpOp/e7nLuAY\nd7ZwnZIkuQHc/dx1n4b3t8Zx3+bQZDJ1mkymaZPJNAN8yr9veb+1MUmSZMGdG2+vyWQ6evft+z5X\n/9m4HoT5upe434tCARAoSZKvJEmWwFbg3l0r/g+GJElKSZJU8mtgJVB2dzzb737bduD4/RjffzGO\nE8BWSZKsJEnyBQKBewP8/2+GfOPdjY3cma9vbUzSHSLJn4FKk8n0wV996b7O1d8a1/2er3uO+5Xh\n/KvM62ruZGfrgX+8j+Pw404G+BZQLo8FcAC+BmqBS4D2WxjLl9zZXk5y53z5nf9qHMA/3p2/aiD9\nWxzTHqAUKOHOhe32LY8pkTtHgxLg5t2P1Q/AXP2tcd3X+brXj1lF42zMxmx8I+738WE2ZmM2HrCY\nXRRmYzZm4xsxuyjMxmzMxjdidlGYjdmYjW/E7KIwG7MxG9+I2UVhNmZjNr4Rs4vCbMzGbHwjZheF\n2ZiN2fhG/H8ObACBgUB27gAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1383d59490>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvGd02+eV9ftDbwRAgA0sYKfEXkRSIqlG9WLKkmXH3R7H\nLXHKGzvFuSmTeDyZjB2XiZzliceOl+NlS26ymu3INNUlSiQlNokV7L2AJAiiEwBxP+jqf8cfZo3f\n9cZrfO/S/qIlCSTBP55znnP22fuIQqEQN3ETN3ETNyD+n34DN3ETN/HNws2kcBM3cRNfws2kcBM3\ncRNfws2kcBM3cRNfws2kcBM3cRNfws2kcBM3cRNfwteWFEQi0XaRSNQtEol6RSLR//V1/ZybuImb\n+PtC9HXoFEQikQSwAFuAUeAycE8oFOr4u/+wm7iJm/i74uuqFFYCvaFQqD8UCi0C7wO7v6afdRM3\ncRN/R0i/pu8bD4z8p7+PAqv+qxdrNJqQRCIhPDwchUKB3+/H6XQikUjQaDTYbDZUKhVqtRqVSsXM\nzAxhYWH4/X4WFxcJBoMYDAZcLhcKhQKPx8P4+DgGgwGZTIZEIsHn82E0GvF4PCgUCnw+H4uLi4SH\nh+N2u1EqlczPzyOVSlGr1VitVnQ6HUtLS2i1WuH/JiYmSExMRKVSMT8/T2RkJFNTUxgMBtxuNyKR\nCL/fj8PhIDk5GbfbDYBYLMbhcKBUKvH7/Wg0GjweD36/H7FYzNTUFPHx8UilUqRSKS6XC6lUit/v\nRyqVolKpEIvF+P1+RCIRS0tLuFwu1Go1S0tLyGQyXC4XMpkMqVRKIBDA7/cjl8vxer0oFAqkUikS\niQSr1YpMJkMulyOXy1lcXCQQCHzpPWk0GoLBIMFgEJFIhFqtxuFwIBaLEYuv3yVyufxLz9Hn8yES\niZBIJLhcLsLCwnC73ajVarxeLwBOp5PIyEh8Ph8qlQqFQoHT6cTtdjM7O0tGRgahUIhgMCj8vm63\nG4VCQSAQIBgMolarkUgkBINBpFIpwWAQuVyOWCzGZrOh0+nwer0EAgFEIhETExPExcUhk8nQaDTY\n7XYCgQA6nY6FhQWMRiNyuZzZ2VmWlpaQy+XCOVKr1dhsNvR6PW63G7lcDoDdbmd2dpaoqCj8fj9h\nYWFIpVLsdjtKpRKRSEQoFCIUCrG4uIhUKsXpdGI0GoXPRqlUMj09jU6nA0AkEqHRaBgaGiI6Ohqv\n14tarRa+h1KpxOv1olKphPOhVqvxeDwEg0FUKpXw3EQiEV6vF4lEgkgkQiwWMzk5ORMKhaL+u+D9\nupLCfwuRSPQ48DhAVFQUv/vd74iJiaG5uZna2lrWrFlDZGQkCQkJNDQ0UFZWhsViYcOGDXR3d5OX\nl8f+/fsZGhriySefpL6+nu7ubiGJvPHGG7z11lu89NJL7Nmzh3PnzpGVlcXy5cupra0lPz+fq1ev\n4vf7yc3Npbm5mfj4eJKSklAqlSiVSnp6em48TIqKiujo6OCPf/wj+/fv5/z58ywuLpKTk0NnZydO\np5OKigrOnz/PzMwM4+PjPP3001RXVzMyMkJKSgrZ2dlcuXKFDRs20NraSjAY5IEHHqCrq4tbb72V\nJ554ArvdjkqloqCgALlczsDAANnZ2bS2thIVFYXNZqO4uJimpiYAvF6vkNDUajXDw8MkJiYCkJWV\nRX9/P5OTkwSDQbZt28aFCxdISkpicHCQ6elpNm/ezNWrV8nMzKS3t5fs7Gx6enoYGhrCYDCQn59P\nc3Mzi4uLmEwmVCoVfX19xMTEUF1dze7du5mZmcFkMnH48GFyc3MRiURkZ2dz7do1pFIpSqWSiIgI\nhoeHefjhh3nllVeIj4/HZrNhtVoxGo2kpaXxk5/8hL/85S8cOXIEk8lEXl4ePp+PmZkZFhYWeOut\nt7jjjjvYs2cPX3zxBRkZGQwODlJSUsLc3BwFBQW89NJLlJWVMT09zdLSEgcOHGBiYoIHH3yQW2+9\nlaioKObm5njvvffQ6XRoNBo6Ojowm82UlJTg9/s5d+4cTzzxBO3t7VitVmpra1mxYgVjY2NkZGSQ\nm5vL0tIS69ev54EHHsBut5OWlsZnn31Gbm4ubW1tbN++HZPJxOTkJDExMbS1tbFt2zampqYYGxvD\n6/Vis9nwer2UlZWhVCqpr6/HbDbzpz/9iT179qBUKlm2bBkSiYSpqSmsVitLS0tERUVhsVgwGo14\nvV6uXr3KHXfcAYDP5xOStcFgwOFwcO3aNQwGA6+99trQV4nNr6t9GAPM/+nvCf/PvwkIhUKvh0Kh\nklAoVGIwGBgaGkKtVpOTk8O3v/1tIiIi8Hg8DA0NIZVKaW5u5pe//CWTk5M4nU7eeecd9Ho9t99+\nOz/96U+JiopiaWmJUChEWloaADU1NfT09LC4uMjjjz9OamoqGRkZ3HfffSgUCoxGI08++SQJCQnE\nxcXh9/tJS0sjPT2dixcv0t3dzdTUFEqlksnJSeH7Xr58mcLCQiIjI5mbm2Nubo6YmBjsdjtRUVEE\ng0Huv/9+Ojs7kUqv591du3Yhk8n41re+xezsLGVlZczOzlJdXY1MJgNg9erVzM/PU1JSwuLiIi6X\niytXrjA+Pk4gEGBoaIiysjKGh4fxeDxcuHCBrq4uzp8/TygUYmxsDJVKhUajQaPR0NXVxfDwMD09\nPfh8Ps6dO0dnZycAbrebH/zgB/T397N+/XpOnToFwIkTJzh58iTHjx9nfHycpKQkCgsLSU1NFZKU\nSCRCJpOxdu1aBgcH8fv9DA4OkpeXR1hYGDqdDplMRkREBBKJhNTUVMLCwoiJiRESz/T0NGazmYiI\nCNRqtfCc/H4/Ho+Hzz//HKVSSXR0NLOzs/T19VFSUkJkZCRDQ0OkpKQwODhIeno6NpsNs9nM8PCw\nkODHx8eJiYnh1VdfBSApKYnq6mo6Ojrwer388Ic/JC8vjyeeeIKpqSkWFhaora3l4MGDPPbYYyQm\nJmKz2XC73WzcuJHExEQCgQCVlZV0d3dz/Pjx6wc7IYFHH32U+fl5qqqqSElJ4be//S0JCQksLS2x\nfft2tmzZwne+8x08Hg+xsbGsXbsWuVzOPffcw/bt23E6nURFRVFWVsbY2PUwsdlsDA8P093dzYkT\nJ+ju7kalUhEbG0tPTw9tbW1cvHgRtVrN1q1bmZmZoampiYsXL/KLX/yCmZkZ+vv76ezsxGQyCdXI\nV8HXlRQuAxkikShFJBLJgbuBY//Vi+12OwAOh4NgMMi//du/IRaLiY2NpaCggF27drFhwwYee+wx\n7HY7PT09LC0todfr0ev1rFu3DovFQlVVFQB6vR4ArVbLz3/+c7RarVCa9vb2Eh0dTTAYZOvWrYRC\nIe666y5iYmIwm82oVCpSU1P55S9/SVJSEhs3bkQmk5GamsqhQ4cAKCgowOl0cvXqVRQKBbt376ao\nqAiHw4HD4SAiIoKamhr279/PsWPH0Gq1XL16lZSUFK5evcrGjRuJi4sjLS2N0tJSlpaWAOjp6SE1\nNZWxsTFcLhfJycmsXLmSa9euYTKZMBqNhEIhDAYDFouF6upqmpubUSqVhEIh0tPTqaioEG6h7Oxs\nBgcH2bFjB/Hx8eh0OtasWUNfXx+5ubnCczx79ixffPEFBw4c4OzZs1y4cIGZmRlOnTrFxMSEEJwF\nBQVs27aNQCCA1+vF5/MxMjJCTEwMTqeTe++9l4mJCVQqFZOTk0KrcuHCBcxmMyMjIwSDQfbt24dC\noSArK4u77rqLlStXUlFRAUB7eztbtmyhuLiYo0ePcuzY9WOj0+moqqoiOjqaxsZGnE4nmZmZKJVK\nwsLCOHDgAK+//jpjY2PccccdyOVyOjo6sFgsAHz88cckJCQwOTmJ1WrFZrNhMploamqipKREODe7\ndu2io6ODc+fOAVBdXU19fT39/f3k5eVx4MABYmNjycrKAiAQCPCnP/2J9vZ2jEYj58+f5/Dhwxw8\neJCxsTHkcjnV1dUcO3aMpaUl/H4///zP/0xKSgotLS0YDAaSkpKYmZlBIpHw+OOPC7GgVCqRSCSY\nzWa0Wi0LCwtYrVaWLVvG448/zl133SWc83PnzlFTU0NVVRXbt28nMzOT9PR0FAoFK1eu5KOPPvrK\nwfu1JIVQKBQAfgBUA53Ah6FQqP2/er1SqSQQCGCz2VhaWiIzM5Po6GhUKhV//vOfsVgshEIhUlNT\nWbVqFZWVlVy8eJGEhARUKhXJycl0dHTQ39+PRqMR+r5gMMjIyAjp6elIpVK8Xq/Qq7lcLi5fvsy7\n777L888/z9q1a8nMzGR8fJzq6mo++eQTli1bRn19PfPz80xOTvLv//7vwPUsHhERwX333cfw8DBy\nuZyenh4yMzNZvnw5ycnJVFZWkp+fj8PhYNmyZezYsYMrV64QFRWFXq/H4/GQmZlJTU2NcDvMzc3h\n9/uZmppCr9czOjqKSCQiKiqK8vJy1q1bR1dXFydPniQ/P5+PP/6YzZs3YzQaqaqqYs+ePSgUCsLC\nwoiLi+P06dOUlpbS0dFBYWEh0dHRQpUzPz/P3/72N+x2u1DCPvzww/zgBz/gySef5L777uPtt99G\nq9USERGBTCaju7ubP/zhD5hMJnw+H5s2bRJuOplMxgcffIBCoUAul1NVVYXFYiEpKYm5uTlee+01\nHA4H+/btIy0tjVAoxPT0NKOjo8KhBtBoNJw7d47k5GT27t3L+vXrMRqN7NmzhwsXLuB2u/nWt77F\n4OCgwKVkZWXh8/mYnZ1Fo9FgsVgIBAJs3rxZuCBufFYKhQKr1crg4CAjIyNERkZSWFhId3c3FRUV\n2Gw2ampqGBgYQK1W88wzz5CTkyNUOSaTifr6empqagC4cuUKycnJrFmzhtraWmJjYzl58qTAJV28\neBGZTCY8L7lcTlRUFJ9++ikA9fX1DA4OMjw8LPwJYDQayc/PR6PR0NPTQ0ZGBnq9ntnZWWQyGZ2d\nnWg0GqKjo7l06RIAO3fuFJJGT08PEomEzMxMuru7+e53v/uV4/dr4xRCodDfgL99ldcuLi6Sn58v\n9P0KhYJTp06xc+dOfD4fw8PDKJVKTCYTa9euZcOGDdx6660sLi4SCoXo6Ojg6aefJi0tDYvFImTF\nwsJCOjo6EIvFtLS0kJCQQG5uLi+//DI5OTno9XrsdjtyuZzGxkaSk5OZmppiYmICgLS0NORyOV1d\nXVRUVNDS0gJcD96zZ89y991343K5+I//+A+cTifx8fFERkayZcsWPB4PGo2GhIQEwsPDef7555HL\n5dhsNvr6+qitrWVsbIykpCSSk5MBBIJzzZo1yOVyJiYmCA8Pp76+Hq1WS0lJCaWlpcjlclQqFZ2d\nnezatYu5uTkOHjxIYWEha9asYWFhgYGBAaKjo0lJScHpdPLKK6+wdu1apFIp9fX1yOVy2tvb2blz\nJzqdjrm5OeRyOQ6HA7fbjUwm48yZM8THx5Oeno7P58PhcLB7927EYjGNjY1kZGQQHx9PU1MTGRkZ\nBINBrl27RlNTEw0NDaxatYrk5GRGRkYIDw8nMjKS4uJixsbGSEtLY2RkhNzcXM6cOUNmZiZwvVqa\nnp5m69atZGVlMTc3h0ajYWpqip07d3Lq1Cl++9vf4vF4ACgvL+fYsWO0trbidrv56KOPyMzMRKfT\noVaruXDhAgCdnZ3odDpsNhv33HMPg4ODfPzxxwB8+umnbNiwgdHRUYxGI/v27WN6epp3330Xm81G\nYWEhP/nJT3jyyScpKSlBJpPR2NjIu+++i0wmY/Xq1YyOjqLRaAQ+p7m5WWhHPR4PWq2WlpYW6uvr\nKSgo4MEHH6S9vV3gt6anp3G5XHz++ecAjIyMYDKZmJ+fRyaTcfLkSXQ6HYmJiTidTlauXInL5cLt\ndmMwGEhMTKSkpISYmBja29tpbGwkEAiQmppKREQEVqv1K8fu/xjR+J8hFovZvHkzLpeL1NRUUlJS\naGhooK6ujtjYWNLS0jCbzdTV1QllZ1JSEg0NDSQnJxMVFUVkZCQnT57k8uXLVFVV8cILL/DXv/6V\njIwMzp07h8PhEFj9mZkZkpOTiYyMRCaToVaraW1txWazceedd/LJJ59gNpvxer3Mzs5SWlrKwsKC\ncBADgQB33HEHFy9eJDIykv7+flQqFdXV1Tz00ENcuHABsVhMYmIiMzMzpKWlMT8/z+rVq1EoFDQ2\nNhIVFUVaWhp9fX2o1WoAVCoVpaWlzM7O4nK5GBsbo6CggKysLEwmE4FAgMjISADOnDnDtm3bSEtL\nQ6vV0tzcTF5eHn19fcTHx2MwGOjv78dkMpGUlIROp8PpdFJaWsrc3ByTk5MYDAZ8Ph9zc3OEhYUx\nMzPD4uIiBoOBDRs2cOjQIZaWligoKKCwsBClUgnA559/TnFxMePj40xNTVFWVkZSUhLHjh0T2PG4\nuDiKi4sZHR0VJhMdHR1kZGRQXFxMTEwMCwsL2O12xsfHhURcXl5OVVUVdXV1jI2NodPpaG9vJzMz\nE6vVygMPPIBGo2F+fp64uDiMRiMWi4XBwUHuuusutFoty5YtIz09Xbho4DqZvW3bNoaGhvjRj35E\nbm4uOTk5DA4Okp2dTX5+PmvWrOHUqVO88sorAifx+9//nrVr15KRkYHH48Hj8fDLX/6SBx98EID0\n9HRqa2tpbW3lscce49KlS7S3t5ORkcHDDz9McXExIyMjWCwWzp8/j8PhoLCwEIvFwsLCAgsLC1y+\nfJmoqCgUCgXbt2/n0KFD3HLLLcLUxOFwYDabsVqtWCwWVq5cycDAABKJBICBgQFSUlIwGo3U1tYy\nPz+PXq9nfn6egYEBjEYj09PTXz0e/w4x/X8MkUjEhx9+yLZt2zCZTKxYsUIYG46Pj7Nt2zba29tx\nu938/ve/JyYmhujoaEwmE+np6axevZr+/n5WrFhBSUmJcBC2bNlCYmIimzZtwu124/P5aG1tZdWq\nVTQ2NtLQ0IDNZkOr1eJ2u+nr62N6epqysjL8fj9NTU2YTCZ6enoIBoNCpRAbG0tXVxfbtm2jrKyM\nXbt2EQqF2Lx5M7Ozs4yPj6PVanG5XMTExFBeXo7JZCI+Pp5NmzaxadMmwsPDCYVCPP3008K4rq+v\nj4MHD7JixQqmpqZISEhgYWEBkUjE7OwsHo+Hl19+GY1Gw7p16+jr6+PKlSuMjo4SExPDzMwMKpWK\n9vZ2urq6UCqVHDhwgJ6eHqampsjOziYrK4u1a9eSlJTEE088QWVlJWazmdnZWbxeLyKRiIqKCmZm\nZjCbzTidTk6fPo3D4cBoNDI0NMTevXuJiooiLCyM7OxsYVKTmppKYWEh6enpyOVyoZ0LCwsTpjqD\ng4MkJCQwMjJCWloa2dnZ7Ny5k7KyMgCsVivHjx+ns7OTL774gitXrmA0GmlububatWs0NzdTWFjI\ngw8+iFKpxOl04vF4WLduHT6fj5ycHJKTk7Hb7Rw9epSYmBjgepkeERHBsWPH2LRpE1NTU2RlZVFW\nVoZcLmfNmjUoFAoUCgUikYi6ujpsNht79uwhLy8Po9HI1atXuXTpEitXrhR4MK1Wi9PpZOfOndhs\nNrZu3crw8DA6nQ6pVEooFGJmZobh4WHm5uYoLy9Hp9PR09NDbm4uUqkUsVhMe3s7n376KQsLCwBI\nJBL27t3Lhx9+SEtLCxKJROAHwsPDGR8fJzw8HKfTyaVLl/jss88YGBjA5/MhkUiEpLd8+XLsdrvQ\nlnwVfCMqBZfLhc1mo6urC4vFQnd3N/n5+ezcuROA73//++Tk5GA0GgkLC6OxsRGxWExFRQUNDQ0s\nLCxwzz33MDQ0xNGjR4UbbXx8HLFYTF9fHxs3biQ2Nha/38/p06eprq5GIpFQWFgoMOmZmZnU1tYS\nCAQYHBzklltuob6+nqqqKjweDykpKQCMjY0xOztLU1MTFosFp9PJtWvXKC8vZ8uWLbz22ms8/PDD\nvPPOOxw6dIhXX32VRx55hNzcXP7whz+wadMmli1bht1u55NPPqGgoAC43u4oFAra2toEkik6OprF\nxUU2b95MR0cHycnJeDweuru7CQaDWK1WKioqyMnJoaenh56eHlQqlTAX12g0DAwMIJfLqaur45FH\nHmFxcZGqqirOnDmDwWDAaDTy1FNP0dHRwcLCAh0dHczOznLlyhWKi4vJzc1FLpczPj6O1+ulrq5O\naG38fj9+v5+JiQni4+MJBAIsX76cCxcu0NzczNLSEvHx8fT09LB9+3asVitOpxO5XM7S0hJdXV24\nXC7h1rNarSQnJ1NaWsrAwAChUIj4+HhaWlpQqVR0d3ezadMm+vv7CQQC7Nu3D7geRFVVVYSHh3Pi\nxAkGBgYoLy+nuLgYgB07dtDb28tPf/pT+vr68Pv9NDY2srS0RGJiIg6Hg5qaGtrb2/nb3/7G7t27\nKSsr47HHHmPdunXs3LkTr9fL0NAQJpOJ2NhYAHp7e1Gr1fT29lJSUsLs7Cw//OEPCQ8Px2638/LL\nLwu6icrKSiwWC5OTk8TFxfHRRx+Rnp7O8PAwwWCQzMxMJicngeu81a9//Wu8Xi9xcXGMjo6ybds2\n/H4/AImJiUxNTREIBNi2bRsZGRn87Gc/w2g0kp2djU6n49ixYywsLLBu3Try8vKE1uS/wzeiUjAY\nDJSWliKRSJicnCQ+Pp6wsDAMBgPFxcVIpVLKysqEcVxKSgoHDx4kFApRVlaG0+nk2Wef/VIwAcTF\nxQnjxNraWrRaLWfPnuXAgQOMjY2hVCrJzc1Fr9cLPeD4+DjXrl1DIpEwOjpKTk6OEHw3ZsEejweV\nSsXs7CwbN27kww8/ZOPGjaxcuZKFhQU2btxIbW0tAwMDJCUlkZmZyc6dOwXBldvtJj8/H5lMRmJi\nIiMj13VeQ0NDZGVl0dXVhUajwefzERERQVRUFLW1tURERAiEZmVlJeXl5YIAKS4ujpiYGKRSKXK5\nHJPJREVFBWFhYWRkZBAZGSkInmpra3nzzTdpb2+nurqaX//610xPT2O324UEXVRUREVFhdDr3xjH\nut1uBgYGyMnJwe/3C2Td5s2biYyMZHp6muHhYVasWEFUVBT5+fmIxWKysrLQ6/WMj48TGxtLamoq\n1dXVhIeH4/V6Bd1FYWEhNTU1eL1eMjIy6OnpYfXq1V86IwsLCyxfvpykpCRuv/12pFIpCwsLnDp1\nit7eXiYmJti5cycREREcPHgQ+H/H0x988AH79+/HbDYL7aTJZGJmZkbQZng8HiIjI5mZmaGwsJB7\n772X9evXC7zF3XffLYw61Wo1IpGIPXv2kJaWxuzsLAsLC4yPj3Po0CHm5+dxuVw0NTUJIrobBLjH\n48FmszE5OYlGoyEmJobU1FQALl26xD/8wz/wwAMPUFJSQlpaGufPn0elUhEIBPD5fHz00UfU19cL\nQiqFQsHMzAzp6ekArFmzhnXr1uFyuSgqKvrK8Sh55pln/k/i+e+CF1988ZnIyEgsFgspKSnIZDIq\nKysZGxtjamoKiUTCzMwMfr+fiIgI+vv7qaysZGBggMOHD6PX6xGLxWi1WsxmM6mpqRw5coR7772X\n2NhYQYeQkZGBTqfj+PHjbNmyhb1793Lrrbfy61//mszMTBITE0lNTWXFihV4vV6hapmamuLOO+/k\n888/5/Tp0xQUFFBUVMTp06dpbm6mvLwcrVZLQkKCcCuUlJSg1+u57bbbSEtLIywsjOHhYS5duoTb\n7aa0tFQQ5ezatYs///nPbN68mZmZGcRiMeHh4RQVFQlcx9q1awWh0tjYGOHh4bS1tZGSkoJSqcTh\ncOD3+2lubsZoNKJQKFhYWKC/vx+3201WVhZqtRqDwSB83dzcHK+++ioJCQncfffdqNVqwsPDUSqV\naLVa2traCIVC5OfnMzs7S3t7O83NzSQmJgokWm9vLxEREdjtdqanp4WR6Q2F5A0C74YqT6/XC+W0\n1WolMjKSuro6EhMTOXv2rKCLSE5O5sMPP2Tnzp24XC7q6+uRSCRs3LiRTz75BL/fz/z8PGfPnqWg\noACz2cyFCxew2Ww0NTWxatUqwsLCUKlUHDlyhHfeeYf33nuP8vJympqauPPOOzGbzZjNZqampmhu\nbiY3N5euri7uueceFhYWMJvNiMVi9uzZg9FoxGazMTExgUQiISMjg48//pjk5GRycnKEhHpDERoV\nFYXP52NycpLR0VF0Oh3FxcWsXr1aaBelUikymYwdO3bg9XrJzMyksbGRpqYmvv3tbxMMBqmtrUWt\nVpOUlIRIJCIQCNDY2IhcLmfLli0EAgFiY2NZtmwZi4uLgmJ0fHyc8fFxoqKiBJKzvb194plnnnn9\nv4vHb0T7IJfLUavVuN1uRkZGWLVqFW+99Rb9/f2IRCIUCgXt7e0CYZaQkIBCoaCsrEwop69cucLU\n1BSlpaW0trYC14UwN3pdg8HAyMgIvb29/OxnP2Pt2rWMjo5y/vx5XnrpJZxOJ0ePHiUYDFJQUIBM\nJsNutwtB3tnZicPhACAnJ4euri5iYmJYXFyksLCQiIgIlEolL7/8MqtWrcLn86FQKGhtbUUikaDX\n64VS1Wg0cvjwYWJiYpidneXDDz8ErpfAs7OzwjM5e/YsRUVF2O12Ll68yLVr13C73RQXFwtS4YGB\nAZaWlnA4HMTExKBQKJidnWV4eBi/309sbCwZGRmcOnWKtLQ0Tp06hc1mo7GxUQi6gYEB3n//fQYH\nB8nKyiIhIYEvvviCzz77jL1793LkyBHMZjOXLl2isrKSyclJZmZm0Gg0wu928uRJnE4nJSUlbN68\nmfb2dg4ePCgQrCKRiCtXrpCSkkJvby9Hjx5leHgYl8uFXq/H6XQCYDKZOHToEMFgkBUrVjA/P09u\nbi6BQACj0YhEIiE+Pp7FxUWsVit5eXnExcUBUFFRgVqtxufz0dnZSW5uLvX19cB1Enf37t3ExMSw\na9cunE4n+/bto6CggIGBAeLj41EqlTz66KNMTU2RnJxMQUEBw8PDtLS0kJmZiVgspqqqilWrVvHu\nu+8KZ+HChQvExMRw++23MzExgcfjQalUotFosFqtZGdn4/V6kclkNDQ0UFxcLHxmIyMjhEIh5ubm\nCAaDREds64BcAAAgAElEQVRHA9Df309ycrIg/BKLxaSnpzM6OipUMhqNhvz8fA4dOiQ8i7vvvhu9\nXs+hQ4dQq9WYzWbcbjf/O8bHb0T7cEPRlZubi9lsZtmyZWzYsAGpVIpOp+PgwYO0tLRgNpvJyspC\npVKRkJCA2+3G7XZz4MAB3G43e/bsQS6XCyRQX18fO3bsoKioSPgaiUQiTAtaW1tZWlrCYrEIH3J3\nd7cQTAaDAYlEQmlp6ZeIms7OTsbGxnA6nWzZskUgtm5UCmKxWAisf/qnfxIUhZGRkZSWllJSUoLZ\nbMZmsxEMBoWbw263k5KSwvj4OAqFQqgK6urq0Gq15OTkkJGRwe7du3E4HExPT6PValGpVMzNzXHh\nwgUmJiaw2Ww88MADwviqs7OT5ORknE4nQ0NDtLS0CKTrlStXePPNN3E4HGRnZ+P3+zGbzcTFxVFR\nUSEoNicmJoRpxY0SVqPREBsby+LiIrGxsWi1Wnw+H/X19YJfYXh4mI8//piRkREyMzNZsWIFZ86c\nYXx8nMLCQqH6W7NmDQAZGRmkpaXx3e9+l6SkJC5fvszHH39MfHw8V69epbGxEZvNRmxsrEDudnR0\ncOLECW655Raio6P5zne+g1gspq6uThC0Xbp0iYaGBtRqNfn5+UilUlatWiVMSW677Tb0ej0qlYoP\nP/yQDz74QFATnj17losXL5KdnQ3Am2++KYw6p6enSUpKYtmyZdTW1iIWi4WJi1arJSkpSdCWnDlz\nhvn5eU6cOMH09DQqlYqkpCSio6MJhULU1tYK56yrq0vwnezYsYOwsDDi4+Mxm81cvXqVL774QuC1\nMjMzUavVdHR0IJFIOHjwIEVFRTQ0NDA1NYXb7f7/3vRBr9dz8eJFmpubOXPmDG1tbaxevZq9e/dy\n++2343A4SEpK4sKFC/T397N8+XJGRkaYmZnh8OHDlJaWkp+fj0KhwGaz0dbWBsDS0hKLi4t0d3eT\nnJyMXC4nIyNDaA9uBPDZs2cZHR0lOjoaiUTCBx98wNjYGGNjY4yOjjI6OkpKSgqLi4sAxMfHk5eX\nx86dOxkcHKSnp4f6+no8Hg+33XYbWVlZHDhwAK1WS3l5uTD21Ol0TExMcP78ebq7u1EoFGzZsoVV\nq657xbxeL6tXryY2NhaLxUJ8fLyQaCIjI9FqtRgMBtRqNd3d3QwPDxMfH8/c3BxKpZKCggLBTFRT\nU8OaNWuEakKtVjM/P4/T6WRsbIyVK1fy/PPPc/ToUWJjY/n5z39Ofn6+EMx5eXmsWbOGsLAwAoEA\nVqsVqVRKV1cXKpUKk8nE+Pg4LpdLmLjodDouX77MmTNn6O/vZ/v27QKJd6M8v5HoNm7cSH9/PzEx\nMXi9Xubn5wF49913yc3NZXFxkatXr5KVlUVbWxvHjh0jNzeXqKgoKioqWFhYIBQK8frrr2MwGFAq\nlbS0tNDf309BQQEVFRUUFhYK5q1Nmzaxdu1a4f1lZmYikUgwGo3k5uaiUCjwer2CpFwikfDFF18I\nvJRSqWRoaIiPPvqI7u5utm7dKpzfyMhIQqEQarUasVhMZGQkw8PDREdH4/F4kMvlrF69mrKyMtRq\nNRqNhhMnTuByufB6vdTX16PRaAgPDxd4q7vvvpuwsDDMZjP79u3DZrPR0dGByWTie9/7Hm63G4vF\ngslkIjIyErlcLqhqMzIyGBsb4wc/+AHZ2dlIpVJBgflV8I1oH5aWligtLWViYgK5XI7T6aS5uZmw\nsDD++te/8sADD9DS0sKOHTt44oknsFqtfPjhh4yOjgoPTiQSoVKpmJ6eJjc3l1OnTrFr1y5Onz7N\ntm3baGtro6Ojg1AohM/n4/jx4zidTm699VZKSkqED+FGKZuQkMBvfvMbwQn5/vvvC2RbfHw8VquV\n9PR0lEolzc3N3HLLLYIG4ejRo3R1dfHb3/6WtWvXMj09TXV1NSaTiaysLMxms3CrnD59WhC8zM/P\nMz8/T1hYGB6PR+jjbTYbg4ODqFQq4uPjOX78OK+//jqvvvqq4Gx84YUXMBgMZGRkoFKpKCoqYm5u\njlWrVtHT00NLSwvf+ta3yMnJoaOjA7fbzejoKFVVVWRmZgpBV1BQQFNTE/n5+ZSXl2MwGIQRmt1u\nRyqVMjc3R2ZmJhaLhdTUVIxGI++++y4tLS385je/AeDtt9/mjTfewGQy0dHRQV5eHhMTE0Ii9ng8\nhIeHk5aWxpkzZxgYGADg0UcfRS6XMzIyQlhYGCdOnGDZsmX88Ic/xGKx0NnZyQsvvMDhw4cxm80M\nDAwglUoJCwsjGAyyfv16PvvsM4aGhli3bh2Dg4PAdWVhbGwsra2tmEwmfv7zn3PfffcRFRXFzMwM\nu3bt4rnnnqOjo4Of/vSnfPDBBwQCAW677Tah9E9MTKS3t5cNGzbwr//6r8D1anR2dpakpCRMJhNa\nrZbGxkaeeuop4VYPBoPExsby9ttvc8cddwjq0E8++QSNRsPExARut5tdu3Zx9OhRAJ577jk2bdpE\nWFgYCQkJvPXWW4yOjlJUVMSWLVu48847hWrX6/USERHB0NAQdrsdn88nVGVvv/02UVFRjI+Pf+V4\n/EYQjb///e+f2bNnDzk5OfT19QHX57/x8fHY7XbCw8PJzc0Vxl8DAwPExcVhsVhQKBQkJCRw+vRp\nwaEXCoUEBnv9+vUcP34cpVLJ8PAw8/PzhIeHMzMzg1qtRqvVMj4+ztjYGNPT00xOThIKhRgZGWFo\naAi5XE54eDiDg4M4HA5Onz7N2rVrMZlMHD16FLlcjl6vx+fzERUVRUNDA1arlaKiIpKSknA6nfh8\nPnJzc4UeLyoqCqvVKugQuru7aW5u5sc//rFg+XW73TgcDsFnMDMzQygUwmg0MjAwIFizdTodfX19\ndHZ2IhaLBSdjc3Mz27Zt4/Tp0yQlJfHFF1/g9/tZsWIFCQkJXL58maGhIXQ6HVarlbCwMEZGRlix\nYgWnT58mMjJS4GpsNhsSiYSOjg5UKhU+n4+4uDhEIhHt7e0kJCQIKswNGzYgk8mIjo5Gp9Oh1+up\nrKzE6XSi0+mwWCzU1NTQ3d1NeXk53d3duFwuysvLOXHiBBUVFbz44ovk5+czMTFBZWUlL774ImVl\nZaSmpjI9PU1kZCRpaWlIJBJycnLYtWsXycnJFBcX09vbK4zthoeHCQQC1NTUcM8992C32wkGg4yN\njXHixAlBmm232ykuLiYsLIy1a9dy8uRJgSvxer2kpqby9ttvc/XqVRYXFxkaGsJms9Ha2kp2dja3\n3HILU1NTrFmzBqlUysmTJ7Hb7eTk5BAIBNDr9URFRfHRRx+xfPlyxGIxS0tLbNiwgfj4ePR6Pe3t\n7ahUKgoLCzl27Bh79+4VdBgLCwtkZmZiNptZWlpixYoVpKWlYbfbqa2tFVrmuLg4xGIx586dIyYm\nhomJCWJiYrBaraSkpHDy5MmvRDR+I9qH8PBw4uLiaGtro6qqitLSUvx+PwqFgszMTDIyMoiJiSEu\nLg6NRoNMJqOurk5wpt12220sW7ZMkNfekHRGRETQ1NREQUEBLpcLn89HTEwMly5dwuv1Eh8fz/r1\n64W5//DwMBKJBKVSSXFxMevWrQOgtbVVqFTguqJRrVYLE4acnBxGR0epqakhOTmZdevWceXKFfx+\nPykpKYSHh1NcXExhYSEff/wxZ86cYW5uTgjUGyWjSCRieHhYMBgFg0E0Gg06nU4QEsnlcsxmM9HR\n0YL+3+PxsHPnTgoLCwkPD0ev17Ny5Uo6OjpIS0tjYWGBkpISYZfD/v37mZubw+12o9FomJubQ6fT\nkZGRIegbent7GR0d5dq1a5w8eZKwsDC0Wi2BQICYmBgsFgtut5tNmzYhkUhISEigsLCQ6elpJiYm\nhEM6NDREb28vbreb+Ph4TCYTy5cvJzs7m9raWrKzszGbzczNzQHXCdZbb71VGHXGxMRQXFxMamoq\nQ0ND+P1+/vEf/5Hjx48zNzfHunXrWFpaorq6mpdffhmpVMrQ0BBhYWHceuutgrZkdHSU/v5+oqKi\n2Lp1K08//bRQNSwuLrJr1y7hvdTW1iISiVhcXOTy5cu88MILbN68mdLSUqqqqpDL5Tz77LMAJCcn\n09vbi06nY2hoiKWlJfbs2UNKSgrT09OCazYuLo4tW7bQ0dFBbW0tS0tLGI1GYXx522234XA4GBq6\n7m5ua2vD4/EgFouZmZkhJyeHZcuWkZOTw9WrV2lpaRFG8F1dXSQlJQkt9c6dOxGLxYLxa3x8XGjP\nvgq+Ee3D1NQUNpsNn8+Hy+Wip6dHmEio1WoqKiro7u4WHG8ulwu4PtNuaGjgwIEDwq2Ul5dHb28v\nAJs3b8ZisaDX63G5XMTHxxMTE8Mf/vAHzpw5IwRCIBDgqaeewmAwUF1dTU9PDx0dHWzZsgWr1crw\n8DC33347H3zwAYBQEVRUVHDixAkKCgrYvHkzn376Kb/61a9oa2vjrrvuor6+nqWlJe6//37279/P\n4OAgjY2N5OfnExsbS2xsLP39/TQ3Nwu/19jYGMnJyYKU+/Lly3zve9/DbrcLVUBbWxvh4eEsLi5y\n4cIFrFYrKpVK0DgsLCyQmpoqjN7efvttwsLCCA8P51e/+hXj4+M8+OCDxMXFUV9fz4YNG/D5fJw5\ncwapVMr9999PXFwc77//PtHR0QwMDAi6kUAgQFdXFxEREdTV1dHb24tYLKa8vBy9Xs/k5CSTk5Ps\n2LEDsVjM1atXmZ6eZu/evVy+fJnBwUFSUlIQiURERkYSDAbJyspCo9EA170PkZGRuN1uMjIy+OST\nT9iwYQPZ2dkoFAoOHjzIU089hdFopKamBrvdLqgttVot2dnZdHV14ff72bFjBw899BBwffwXFRVF\na2srx44dIz09nTVr1qDX61EoFDz77LNotVoeeughHn/8caRSKe3t7ezYsYOhoSHm5+dpb28nKioK\npVIpWJHdbjcLCwskJydz4sQJTCYTly9fRqfT0dDQQFxcnGBuKyoqoqqqivPnz9PV1UVNTQ0mk0lY\nIJOXlyec8RvVR2JiIuHh4cjlcrZt28bY2BgWi4WBgQGUSiXl5eXceeed+P1+RkdHGRoaIiEhgcXF\nRWZmZggEAixbtkwQ9H0VfCMqhejoaA4fPoxGoxHKyi1btqBUKgUycWJigunpaTweD9PT06xevRq3\n2y2oEsViMRERESgUCiEp3OgtXS4XDoeDjo4OQUp8g0Ssrq7mxIkTvPHGG5w4cYKJiQnGx8fZsGED\ndrtd6H9v3Hxw/da5//77aWpqIikpiRMnTrB//34CgQCffPIJ6enpOJ1OQcp8+PBh8vPzmZ+fZ/Pm\nzWg0Gl555RVmZ2dxOBzk5eUBCC5OhUJBVFQURqORe+65B6/XSygUErZA5eTkoNVqycrKIjIykpyc\nHMGnMTExQVRUFBqNhhUrVvDiiy/icDhYWFggPDyc+fl59uzZQ01NDb/+9a955JFHyMjIwO/3U1pa\nSjAYRK/X093dLdxWxcXFXLlyBYvFgtfrJTExkba2NgoKCoREdaOs7urqwmAwMDw8jNPp/JISdWZm\nBqPRyPj4OB6PR9ghoNFohLbxBsMuFoux2+3s3r0bmUzGyMiIoOq7wbSPjIzg9XppaWkhPT2d6Oho\nTpw4wdjYmLCX4oZXJCYmhlOnTmEwGEhLS8Pr9fLjH/9YOBu33347vb29BAIBQSKcmJjI66+/jkgk\nErQt9fX1NDU1MTo6ClyflgwNDREeHo7JZMJut1NSUkJJSQkPPfQQMpkMg8FAfHw8FouF5557jrNn\nz2KxWLBarcLiFpfLRV1dnUA63zB0JSYmkpKSgkqlIjIykmvXrglbwKRSKaOjo8zOzgqLbAoLC3E4\nHOj1ekwmE36/n7m5OS5fvvyV4/EbkRQcDgc2m42pqSkWFxfZt28fY2NjhEIh8vLyBAmq3W7HYDAQ\nGRkpeBNUKhVLS0tCNZGcnCxYSTdt2sTjjz+O3W7n8uXLrFixArguEU1MTCQ7O5uFhQXy8vLYtGkT\nSqUSt9vNtm3b6Ovro729XVhrZTAYmJqaAq63D//yL/8iyHOTk5ORSCSMj4/zzjvvcPbsWcRiMW63\nWzh0brcbsVhMW1sbdrudxMRE3nnnHVwuF/39/QDCWrQbq8COHDnCuXPn6O/vJxQKceTIEaqrq5ma\nmkKhUAhy3rm5OSFxZGdnIxaLGRsbw+/3k5OTw3333UdkZCS1tbWCE7S4uJi6ujqOHDlCa2sr9fX1\ntLS0YDKZMBgMBINBKisrSU1N5dq1a2i1WlatWkVsbCx6vZ6NGzcyMTEhcDpDQ0OYzWbuuusuVCoV\n27Ztw+PxYLVacTgcvPfee9hsNkKhEGFhYXi9XrxeLzU1NbhcLoEd3759O/Pz88Iqvvfee4/c3Fws\nFgvj4+PU1tYKNumlpSWUSiUpKSlMTU2h0+lYXFykqKiIoqIifvGLXwhEY1JSEo8++qig8svNzeXz\nzz/n/fffF+TeWVlZgi7AarVSXFzMfffdh8FgwGw209zcTHh4OP/rf/0vgbi7IdC6evUqkZGRwvq2\n2NhYpFIpW7Zsoaenh6ioKDo6OoiOjhaEXxkZGcTFxVFXV0dlZSVZWVlfGn0rFArefPNN3n77bfr6\n+oSJlFqtJjk5mT/+8Y9IpVImJyeprq7GbrcTERGBVCqlpqYGp9PJ+vXraWlpQavVfuV4/EYQjX/6\n05+eWblyJUqlEpvNhsFgwOv1kpKSwgcffEB2djZqtZrS0lJGR0dZtmwZTqdTGEFu3LgRtVpNS0sL\nRUVF7N69m7/+9a/ExMSg0+k4f/48K1eupKGhgaysLJaWlli1ahXXrl1DJBIJbsWenh4qKyuZnp4W\nxj03rM/Nzc1ER0dTU1PDihUr2L17NydPnkQkErFs2bIvkWBdXV0UFhai1+uJjY1l06ZNNDc3Yzab\nKSoqEvY8+v1+ZmZmyM3N5dChQ9x1110kJSUhlUqZmpoSVoetXr2agYEBQdx1gzC9UR35fD5KSkrI\nysri+PHj2Gw25ufn0el0DA8Pc+rUKWFyYTab6e7uJjw8nN7eXoqLi1EoFGi1WkQikWBtFovFwq6I\nuLg4bDabQCDeeDaJiYlYrVZGR0eRyWQMDQ2Rnp7OzMwMDQ0NzM7OolarGRwcZP369UgkEtxut2Bi\nWr16NQ6HQyCKW1tbiY6OFpL70tIS6enpdHZ2Ul9fz3333UdfXx9yuVwg6G64JScnJzEajYSHhzM1\nNYXRaOTgwYOEh4dz/vx5IeAaGhpQKBQcOXIEj8dDbm4uMzMzeDwetmzZgkaj4eLFi4IDdm5uTrDc\nd3Z2smLFChYXF+nq6uJvf/sbFRUVhEIhmpubyc7OprOzUzi/N9baORwO1qxZQ2JiIiKRiFWrVtHR\n0YFSqeTYsWMYjUba29vp6OggNzdXWOt2wzWcmJiIVqvl9OnTjIyMUFdXh0qloqGhAZlMxunTp4Vt\nVvHx8YI3JzExEa/XK+xsbGtr+0pE49ey4v1/FyKR6H/+TdzETfz/H42hUKjkv3vRN4JojI+PZ//+\n/TQ3NwsmlRs3VWVlJUNDQ3R3d9Pf38/3v/999Ho9H330EfHx8RQUFDA9PS1sVGpsbCQYDPLCCy9w\n9OhR3njjDf7xH/8RiURCT08P3d3dzMzMIJPJyM3NRafTkZKSwvvvv8/y5csJDw8nNTVVMP7c2F6c\nnp7OX/7yF15//XWOHz/OSy+9hMfj4e6776a+vp6cnBxh+3N6ejqPPPII3/ve9wSLbnd3N4DgeLx0\n6RI6nQ6DwcClS5c4cOAA+/btQy6XMz8/L+xOnJiYQCqV0traitlsJjY2lvr6eqHcttlsdHd3s2HD\nBqxWKyUlJUIFMzY2xtatW6msrKSuro6FhQWUSiWpqan86le/4mc/+xnXrl1jeHhY2O50w2Z97733\nEhYWRkdHB52dnaSkpPDaa6/xm9/8htnZWQ4fPozRaKSgoACLxUJ0dDSTk5P4fD6amprQarXk5eWx\nuLhIbm6u8Nzh+lQoNTWVhoYGQaefnJzMc889x49+9CNiY2OFzzw/Px+JRMJ3v/tdfve73+F2u2lr\na+P2229ncHAQo9HIxo0bMRqNAsN+Y/XdxYsXCQaDvPbaazz77LNs3LgRq9VKXV0d999/PzU1NRQW\nFgrLT+68804SExO5evUqMpkMmUzGypUrBbPW+Pg4+fn5qNVq4uLiMJvNfPLJJ4Ki0mg0snbtWjQa\nDYuLi8TExPDHP/6RgoICrl27xvbt25mbm6O2tpZf/OIXtLS0EBcXJ0jSL126RH5+Pi+//DJvvfUW\nLpeLffv28eyzz9LT0yMoLnt6esjOzubo0aNs3bqVxsZGWltbeeihh1i+fDnPP/88//Iv/zd77xkd\n5ZWm7V6lnHPOpVQKKKOMSCKanMFgsOmmHduph3bbX3u1PXaP2+7kto2xARtoosEGDCIIgZAQQjln\nlXLOEsollarOD/rdZ2atb81wTn9nLc86s/8gQBJQ1H7evZ/nvq/796SmpjIyMoK3tzdtbW188skn\nT7QffxI9hdHRUU6cOIGBgYHAUQPs2bOHwMBAwsPD2bp1KydPnsTKyorAwEDWr19PcnIyV69eZWRk\nRLyxX375ZeHNd3Z2ZuPGjYKx193dTVBQELt27SI+Pl4g4O7cuSNINx4eHuTk5Aho7PDwMCYmJly/\nfl1wBG/duoWLiwu7du2it7eX0NBQAgMDkcvlQvX3L//yLwBCcejr64u9vT3u7u5CC+/u7k5fXx/L\nly8HHouXHjx4gIGBAa+88gr19fXMzs4yPDws7NkFBQWUl5ej0Wj47rvvyMvLw8XFhaCgIAF+9fPz\nIyEhAXd3d+Li4vjmm2/w9PQkOjqayclJysrK2L9/Pzk5Obi5uaHVagkODqa0tBRTU1MUCgU1NTUM\nDg6io6ODVqtlamqKDRs20NPTQ1paGg8fPsTCwkI0WHNycoiNjaW3t5fY2Fjs7e0ZHx/Hw8OD1NRU\nBgcHWbhwoUDWt7a24ujoKKjL0l06MjISHR0dlixZwtatW8nIyGDLli3s27ePl19+GUNDQ1avXs3Y\n2Bhr1qxhdnaW3NxcxsbGGBwcpL29XYjATE1NsbW1BcDb25uoqCjOnz9PcnIytbW1hIaGolQqUSgU\neHh4iHGsXC6ntraW+/fvk5qaSmRkpEAFdnV1cfbsWdG3OnnyJBcvXmTfvn3ExMRw/fp1Hj58iIGB\nAW+//Tb+/v40NzcTFxfHokWL0Gq1/OY3vxG9oYCAAKqrq5mbm8PX11cUe0me/t577zEzM4Ovry8T\nExNkZWWRlJSEjo4OcXFx/PGPfyQ0NJRXXnmF/Px8zM3N+eMf/8jQ0BADAwNCoSoh/55k/SSKgrW1\nNU899RQajUYo5KS7WkVFBcbGxlhaWnLjxg3UajV5eXkChiFBKqXRjo6Ojqjs1tbW9PT0MDo6KiAf\nY2NjQvDR1NREU1MTg4ODhIeHMzQ0hEqlAuDKlSuEhITQ1NREZWUlKpVKADC2bt0qOvVRUVHY2NiQ\nkZHB1atXsbKyoqOjQyDR3N3dMTQ0FHCR4eFhXn75ZeRyOWlpaaJJB4/tyYsWLaK6uprTp08LKK2e\nnh4ODg4EBwdjZ2fHK6+8IuCrc3NzDA8P09TUJExPnZ2d5OXlkZiYSFpaGhEREXz33XfU19dTW1uL\nQqEQVKN79+5haGiIVqulqamJe/fu0dbWhpubGzKZTNidY2NjCQsL4+bNmygUCkEPku71AwMD9PT0\nCNv37Owsc3Nz9PX1ERQUxLZt2ygpKcHPz4/R0VE6OjqEg8/X11fQp6ysrPDw8KCqqopLly7x0ksv\n0dTUxDPPPMOpU6eEJ6Ovr4+8vDxCQ0NZuHAhZmZmyGQyioqKBJKsurpaCJkKCws5ceIEa9asYWJi\nAq1Wi1qtZsGCBUxMTPD000/T2NiIqakpRkZGrF69Gmtra5YuXUpFRQVRUVFERETg7+/PggULhAIz\nMDCQL774gh9//JGZmRmxMSXug76+Pu7u7qxevZqjR48KapS/vz+vvPIKo6OjfPjhh/j5+VFdXc2Z\nM2cAqK+vFzkQUtGrrKykv7+fGzdu0NnZiZ2dHRs2bCArK4vy8nIsLS05cuQI5eXlwsCn0WjIy8tj\n48aNT7wffxKNxiNHjrwnmYgCAgLw8vKiqKiIO3fuEB4ezt27d0lKSsLS0pK8vDwaGhqwsrKira2N\nvr4+7t+/T1dXFzKZTBiTzp07R0hIiJgOXL16lfXr11NUVCTsxqtWrcLd3Z2amhp0dXVZsWIFk5OT\nNDU1CfWhTCZj8eLFGBgYYGFhwcWLF1mxYgUajYacnBzR1d+wYQMqlYqWlhaxyZRKJV999RXu7u6E\nhoaSmpoqAj0knL1MJsPV1ZUffvgBtVotHHE+Pj44OzvT19eHSqVienqatrY24uPj6evrY82aNZSU\nlBAREUFoaChmZmaYmJjQ2trKkiVLmJmZ4cKFCwJGGhISgkKhYGpqitu3b5OUlIRarWbJkiWi4aXV\naoW/YmxsjKVLl2JpaSnAptnZ2djb26Orq0tzczMWFhYMDAwI8pI0+pROAZJz8/r162ICY25uLuAs\nN2/exM/PD1tbW0ZGRigqKuLZZ59FqVRy4MABXFxcMDQ0pLa2Fj8/Pzo6Oujt7aW5uZnm5maef/55\njI2N6ezs5MGDBzQ0NLB582Zu3rxJWFgYu3fvZmxsjJs3b7Jjxw50dHSEctPS0lIE3HR2dlJcXExS\nUpIwzNna2uLo6EhPTw+FhYV4e3tjZWVFdnY2g4ODIuciPDycgYEBXFxcsLKyws3NjaysLMzMzGht\nbRVCOIVCwUcffYS3tzdNTU1YWlpy8uRJxsbGuHDhAt3d3UKbkpWVJZrUfX19yOVyzp49S0hICCtX\nrqS0tJSQkBBMTU3x8vLi1KlT6Ojo8Je//IX09HTMzc1paGjA1NRUcDwzMzMpKip6okbjT6IofP75\n52Jpa/EAACAASURBVO89/fTTKBQKCgoKyMzMZMuWLejq6pKVlSVAIkZGRly/fp19+/ZhaGiIUqnE\nysqKqqoqgdjW0dFhYmKClJQUnn32WSF7npqaYnJyUvD7srOz0dXVJT8/H3hMfs7KyhJzZ4l/sH79\neiYnJ2lra8PExIQLFy6wcOFCRkZGsLW1paenB7lczuzsLIWFhYSHh7N582YCAwOF/n7BggVcv36d\ndevW4e3tLe6lo6OjghJ87949FixYIBRse/bswdjYGCsrK9rb24XXQiaT0dPTw40bN/jFL35BSUkJ\nnp6eYioTHx8vMGQODg4MDw8L78SjR49obGzk+eefx83Njfnz56NUKunu7iYmJoYdO3YQGRlJdXU1\nAwMDHD58WABZS0pKSExMRF9fH4VCIQAkDQ0NrFixgi1bttDa2kp8fDwODg60t7ezY8cOxsbG2Lx5\nM6Ojo9y4cUMwFy0tLVmwYAF5eXnk5eWxefNmrl69yiuvvEJWVhb9/f00NzfT3d3N6OgoarWa8fFx\nwWRwcXGhs7NTwHEOHz7Mq6++Ku7QS5cu5fLly+IOf/78eZqampg/f75QvkqA2sjISJKTk+ns7OTh\nw4dYWVkxNjZGcHCw0MNI48SVK1cKJsSVK1fYvXs3vb29FBUVkZCQQEZGBjExMWRlZeHr64udnZ1g\nVPj6+uLm5kZJSYngknZ0dPD0009jYGAgDFT37t3jrbfeQqlU4ujoSFtbG4sWLeLSpUtCeh0UFMTA\nwABBQUHExcVhZWWFrq4uN2/e5O7du2zatAlra2txktu+fTvffPPNf5+i8PXXX78XEhIi5uwDAwPi\n3qRSqYiPjychIUE42SS3n6enJ3Nzc+jr6/PGG2+Qn59PSEgIUVFRHDp0SNyNo6KiRN9hdHSUoaEh\nrKys6O7uxt3dHX19fQwNDYWX3dzcnN7eXlatWoWRkRH3798nODiYrq4url69Khh9cXFxWFtbAwh0\nd0ZGBrGxsVRVVaHVaomMjGR0dFSwENzc3Jibm8Pc3Jzi4mIMDAzIyspCqVSSkJCAh4cHzs7OGBkZ\nCVDHjRs30Gq1uLm5kZeXh5ubGzExMTg6OtLe3k5VVRVr165l2bJlmJubc/v2bR49esTo6CgajUZI\nZOfNmwc8lubm5+eLN6+JiQlr165FpVJRVFSEhYUFkZGRTE1NoVar6e7uxsTERBC1bW1tqa2tFbbt\nmZkZzMzMiI+PF294fX19SktL6erqIj09HScnJ/T09JDJZKLJqKury+rVqwkNDaWlpYXs7Gy8vb1J\nSkqiu7tbeAdcXV3x8fERVCGJWmVjY4OrqysVFRXs3LmTnp4e5s+fz/T0tAi3cXV15caNG+zYsYP8\n/HyhfJVGyN3d3Tg5OTE5OSmuOwYGBkJqD4/xexqNhn379gk/jJ2dHceOHUOhUIiYvra2NhYsWICn\npydxcXHcvXuXpqYmrKys8Pf3F1clKbtCOrGYm5tTVVWFRqOho6ODwsJCrKys+M1vfiNcm48ePRKO\nW+kKIl1Lg4ODqaqqYmxsjLa2NpydndFoNPT29mJmZoaRkRF///vfaWho+O/jfRgeHsbb25vh4WFO\nnTrF6Ogod+7cEUYjY2NjfvOb33Dw4EGBYJPmx6dOnSI/P5+0tDSRpyA9/aenp1m+fDllZWWEhYUJ\nnbmEIdfT0xOnE11dXaFclPBc4+PjVFVVMTIywvPPP09V1ePoClNTUzEnl4rCwMAAdXV1+Pr6ChxY\nVVWVmN3r6Ohw9+5dvv32WwYGBkhJScHJyUk84QHMzMywtbVFrVZTUFDA/fv3yc7OFoVEUq5JUWs3\nb97EwcGBiIgI+vv7aWpqIjs7GwcHB9RqNTo6OmzdupXy8nI6Ozu5ceMGJ0+e5LPPPiMjI4Nr166R\nnZ2Nj4+PwIR1d3fT2NhIa2sry5cvJzg4WPgeBgYGxElOogxJsWUPHz6ksrISpVJJenq68H386le/\n4sUXX0RPTw8XFxeRYxkYGIhWq6W7u5uRkREhDPPy8sLJyQl9fX0GBwe5ffs2Hh4euLu7o1AoUCgU\ngrzl6OiIVqslISEBW1tbXFxc0Gg0fP/990KtKIl2pqensbe3R6lU8s033zA8PExKSgoeHh6EhYXh\n6enJ6OioyGmU/m7BwcHs2bOHffv2ieyQmzdvIpPJgMd5EpJPJSYmBj09PTw8PFi4cCHHjh0TGRcH\nDx4kPT0dlUqFpaWlgNEkJyczNjaGTCbD2dlZ9LTs7Oy4fv06S5YsEWav9vZ2rK2tBdSlsrISMzMz\n4TXR1dVl/fr1omjOzc3x6NEjuru7CQ0NfeL9+JMoCtL9KzAwkKioKIqKimhubmZ8fBxjY2NOnjzJ\n8PCwAF/euXOH6elpYZU1NTUV6DPJnw8Ia7OLiwsDAwNs3bqV9evX4+bmJhiEWq2WvXv3Cqz6vHnz\nUKlU7N69m/T0dDIzM5mamuKvf/0rxsbGAGLTFRQU4OjoSFdXF7q6utja2tLR0cGVK1dYvXo1e/fu\nxc/PTzAZDxw4QHNzs3gDSUahZcuWAY9DcUZHR+nr6xMTkaKiImQyGbt378bZ2RmFQoGzszNtbW34\n+voSERGBXC7nT3/6E7t27RJsSEtLSwFSbWxsxNPTkxUrVrB27VpmZma4ffs2FRUVJCUlCXy9p6cn\nixcvZnZ2Fo1GQ3t7O6Ojo1RWVjIzMyPMWAEBAbS3t1NXVyfCWTUaDfn5+YSHh+Pk5MSCBQt49dVX\nRVDK22+/zc9//nNBkmptbWXp0qXY2dkRGxsrpgRJSUmiwA8NDYnvv3//fnp7eyktLeXBgwecOXNG\nhA5LkFcfHx9kMhm//vWvuXv3Lrt27RKhKxK9KygoiOXLlxMdHc2xY8fYtGmTyB69fv06BgYGPPfc\nc6JpKhG4JyYmuH37NpcuXSI2NlYAVg8dOiRCau3s7NDR0REN208++UREyf3yl79k2bJl5OXlcebM\nGWHkSk9PJz09nYyMDPLy8kR4zUsvvUR0dDS5ubkij9Pf318wP4aHhwVo54cffhDYQZVKxY4dO5ib\nm2PLli3A4wKzefPmJ96PP4miIFk/582bh5WVFcHBwcjlcnJycggPD6erq4srV67Q29uLp6cnmzZt\nQqPRkJKSQnh4OLt378bCwoJHjx6JIyI8LgopKSmUlZXR09MjwKdSDFxKSoogC01NTRETE4NKpcLT\n0xNdXV2WL1+Oo6MjRkZGpKWliaJw//590RsYHBxEq9UK33x4eLgIPVWr1SKqbN26ddy6dUswJ6Xu\ndWNjo9D9S5749vZ2nnrqKaKjo4mJicHDwwOlUklLSwt+fn7CK1BQUEB7eztZWVk4ODgQGxsrCmRf\nXx/Dw8OcPHmSF154AQMDA7777juBNHvqqafw8PDAwMBAjKsePXqEVqtlw4YNJCYm0tfXh4eHB15e\nXpiamhIYGCg4CBYWFujq6tLY2MjIyIjI3pBCcZydnUWupXQd++GHH9BqtSIPsaioiEePHpGWlibo\nyGfOnEFPT0+8vtL15M0330RPT4+pqSlxbaisrMTOzo6MjAw0Go2IqgsODsbe3p6rV6+KrntFRQWv\nv/668GU8ePCA1NRUsdk9PT2xt7dn4cKFtLa2Ym9vj6+vL66urqSlpYmRt/Qa19bWArBt2zbkcjlT\nU1Pk5eWxaNEiQeySsjUkVkhzczN2dnYsXboUBwcHBgYGMDc3Z/Hixbz66qvs27dPBNSMjo5y6tQp\nxsbGaGpqwszMjK6uLjw8POjr6+PatWvY29tjbW3N3r17sbKyYmpqimXLllFcXMxf//pX7ty5g0aj\noaamhqNHjz7xfvxJiJckek91dTUzMzMi6cbFxUUcXaOjoxkeHhbNsfHxccE8dHV15fz583R0dJCQ\nkICnpycA1dXVeHt709DQwJ49e4QV1dbWlpSUFIGomp2dxdvbm5CQEGQyGUeOHGHTpk00NTWhUqmI\njo7G3d2dU6dOAfDcc89x+/Zt8QSWuvNSiGpYWBg2NjZMTExw48YNpqenSU5O5s0338TDw4Ouri5m\nZ2dxcXHB19dX/D0k+600F3/w4AFubm5C5KJQKCgvL8fGxkaErubn57N48WIWL14scgnr6urEXVWr\n1YrciNWrV4u0Ih0dHVatWkV9fT2lpaXMzs5y8eJFfvjhB15//XVGRkYwMjKioKAABwcHXF1dRa5h\nX1+foBir1WrKy8uJjo6mr68PR0dHEZ774MEDpqamGB0dZXBwkMrKSoaHhwkMDGT37t2YmpqKCHrp\nyevp6Sk68AUFBaxatQq1Wk1fX5/gHnp7e4s8SSnrQq1WU1hYSEREBCkpKbi4uGBiYiIs2TU1NfT0\n9LBkyRIGBwdpbm7m3XffFWNjKYFKShzr7OzExcVFFKU9e/bw6aefcvDgQW7cuCFCgdPS0jAxMcHW\n1laIhV577TXy8vJYtWoVTU1N3Llzh7y8POLi4mhtbcXf3x89PT38/f05fPiwOK319fWRkJBAW1sb\nra2tbNy4UfQkuru7efrpp/nhhx/Q0dHBxMREhPnevHlTpFHfu3cPLy8vdu3ahUKhICsri/DwcMzM\nzJ54P/4kTgpSo6qsrIzZ2VmOHz/OvXv3GBkZ4dChQ+jr62NhYcGqVatobm7mz3/+szBBPXr0iM7O\nTmpra+no6BBMP3h8bJKUZ7du3WJubk6gzl1dXYmPj2fZsmVERESQnJzM9PQ0b7zxBqamphQWFjI3\nNycMOl9//TXh4eHAY36eFEAideel/oRUkScmJrh8+TK///3vOXHiBEqlElNTU4KCgrh69Sre3t5k\nZWVhbW3NN998AzymAzk5OWFmZiYCPgYHB/Hy8hLFREdHR4BYLC0tSUpKIioqil27dhEVFcWtW7eo\nr68nJyeHmpoaKisrRQGVRnCrVq1i8eLFZGZmCidmc3Mz0dHRvP/++6IxKOG9pETr27dvizg8qQck\nneSk8ezY2BgpKSncvXuX8+fPk5OTg66urkDsRURE4OzsLI7b5ubmIuIOHjcGpVAahUKBq6srXl5e\nlJeXc+HCBcE8lDZXYGCg0BRIuhKp4RkfHy8MUe7u7iJ7VKFQIJPJKCkpYcmSJSIMJygoiKysLM6f\nP4+HhwcxMTHk5OQIp+GBAweYm5sjJCREaFY8PT2xtbXFzs6Or7/+GicnJ8HOtLCw4NKlS0xOTrJq\n1SoWLVrE7373OzZv3kxra6vwytjZ2aFUKjE0NBQMD5VKxblz50hOTsbNzQ13d3cRE7d//37mzZvH\n5cuX2bFjB2vXrkWj0aBUKtm4cSOurq6oVCoePHjAxMQEpaWl4hr1JOsnURRkMhnJycnY2dkhk8nw\n9fXF2NiYjIwMTExM2L17N/PmzcPJyYmvv/4ahULBU089hYGBAfr6+qjVajw8PMjLy2Nubo6lS5cC\nj8eMBgYGzM7OYmdnx/T0NAYGBtja2uLl5UV8fDz19fWUl5fT399PZWUlAQEBYqIxNzdHbW0tDQ0N\nmJubiyf6+Pi4sKX29fWRmZlJW1sbvb29Iu9AAoSuWLGCpKQkzp8/z1dffYW+vj4bNmwgMDCQQ4cO\n4e7uzoEDB4DHvZXJyUkKCwvR1dUlNDSUubk5bG1tKSgoYHBwkCtXrlBQUEBhYSEBAQGiv/DgwQNO\nnTqFvb09MzMzTE1N0dTUxOzsLGVlZQIlVlJSQmRkJEFBQRgYGAiGw8TEhMDQSX2Z+fPnY2xsjIeH\nB5OTk7i7uzMwMMDnn39OSUkJBgYGVFRUiMahtHkUCgVJSUkoFArkcjnp6eno6urS2dnJzMyM6FkM\nDg7S1NTEyMgIa9asAR6j+e7du8fAwABvvfUWfn5+YiSdmJgIPGYjfPDBB5iZmXHp0iWBQTc3N8fG\nxgaNRoObmxtTU1Piju7u7k5ISAj379/HxMSEgwcP4ubmxvT0NJGRkWzatIm6ujpsbW1FhoUUSFtc\nXMzt27dFLqednZ04jU5MTIj3ijRhunDhAkePHkWj0TB//nx++ctfilwOqViEhIRQUlJCTEwMhYWF\nYh9I6Wb9/f3Y29vz8ccfk5WVJU6MdXV1HDp0iKqqKnR1dYXTMz8/n5///OfCnObp6SmarJs3bxav\nw5Osn0RRMDY2FvdSjUYj8OGTk5NcvXqV+/fvExERwf379/m3f/s33n77bYKCgujv72d6elpkPC5f\nvpyZmRlu3rwJINKnnZ2d6e/vFyOikpISgVwrLi4mIiJCOOxqamqEBFqacEjhGlZWVsD/DXlZvnw5\nTU1NGBoaiuOzs7MzMTExmJubc+HCBcrKyqivr2fevHmEh4eTlZXF7t27uXv3LmVlZRQUFIjG6N69\ne0lMTMTFxYUVK1YQHBxMZGQkcrkctVrNzMyM6JvY2toSFBSEubk5bW1tNDU14enpSXV1NY6OjsJ/\nn5CQgFwuFygvidGYmZlJf38/IyMjHDt2jLCwMExNTbl7967AvDU0NNDQ0IBGoxEBsTo6Oixfvhy5\nXI6rqytJSUmin+Pq6kpHR4doQkqKvn+/iYyMjOjq6iI3N1dIqp2cnPj4448BhHdFX1+fyclJKioq\nqKqqIjY2luXLl3PmzBnc3NwYHx9n3bp11NfXMzc3R3JyMtXV1RgaGqLRaLh79y79/f2iV5GWlkZm\nZqYIo9FoNNy8eZPPP/+coaEhITpzdHTE3d2dQ4cOkZ+fT1JSEtu3b8fFxYXe3l7q6+tFgw+gtLSU\npKQkoRhdu3YtWq2W6OhoTp06xaJFixgYGBAOSanf8ujRIzGp8ff3R61Wi9cEHgfM7t+/HzMzM556\n6ikqKyvx8/PjmWeeYcWKFUxPT2NnZ4dcLsfBwYGhoSE0Go3Iy3znnXfQ0dER+0o6MT3J+qd6CjKZ\nrAUYA+YAtVarnS+TyWyA7wAvoAXYrtVqh/+z7zM2NsbFixexs7MT/EMJx/7yyy/j5OTEt99+y9Kl\nS2lsbOTWrVsMDAxQVlYmKu+8efO4cuUKv//974Uu3dPTU/D2HB0dRTBJdHQ04+PjAn3m7OxMYWEh\nMpkMJycnSkpKCAsLo7W1lbNnz7Jv3z4RLwcQFRWFSqUiKyuLnTt3Mjk5iZeXF5mZmczOzgrzk6SY\nnJqaYmxsjO7ubpFnqKurK6YrklP1xIkThIWFUVhYiJ2dHc7OzvT09KCnp8f8+fOZnJwkIiKChoYG\nent7GR8fp7q6GoVCgYuLC42NjeI6I0WvDQ8PY2try61bt6isrGTDhg00NDQgl8t58803iY2NJSsr\ni9/97nciS9HMzExo/1NSUgTO/sMPPxSeC6kpqaury/z589FqtRgaGrJv3z5MTU0pKSnhhRdeQKvV\nsnXrVlHQVCoVnZ2dGBoa8tlnn/Haa6/h6uoqpN5dXV3s2bOH7u5uzp49y5IlS8jKyiIsLExsgPDw\ncIqKijh37hx79uwhNjZWTADm5uY4evQoarVaFE9AiJOCgoI4fPgwr7zyCs899xzt7e0C1/7jjz8S\nFBTEW2+9xffffy9OPZIsu6mpidraWnR0dEhKSgIenxqnp6dpaWnh1q1bpKamcvjwYe7fv09iYiKd\nnZ2cOnWK4OBgDAwMOHHiBMnJyXz//fccPHiQ1NRUcdK6c+cOb7zxBt988w2zs7Oo1Wrmz5+PSqUi\nNTWVpKQk8f9vY2NDamoqmZmZODk58eabb1JYWIiOjg6bN2/mV7/6FaGhoQwMDHD8+HEOHjwosH//\n1fo/cVJYotVqw/+dJfM3wF2tVusH3P3Hz//TJXVo/fz8xNEPoKenh6amJu7evYujo6OQHksOtF//\n+tciX0+lUuHj4yMksIA4SklJO9evXxe4tm+//VYErkqjQelzExISMDAwEGGdf/jDHzh79iyXLl0C\nELqH6OhowXRQq9WEhoYyOzuLg4MDQUFB2NvbU1NTQ0xMDAkJCfj7++Pk5ISPjw9ubm6i8Hn9I4pe\nArP4+fmxevVqGhsbycjIwNHRkeDgYMLDw3F3d8fW1hYDAwMaGhrElKC8vFxEokubuqSkhPT0dFJS\nUrC0tGTt2rVMT0/T3NyMoaEh27ZtE2o3lUqFhYUFoaGhwp0oSb6trKxwdnbmb3/7G0qlkt27d7Ny\n5UpiY2MZHh5GqVTi5+fHwoULmZ6extTUlJiYGFxdXYmNjRX5D87OzuIqcv/+fRwcHISUVxIKef0j\nul7iDRYVFZGUlCR8BVu2bCE9PR2FQsH27dtJTExkamqKCxcuIJfLycjIQKvVoq+vz/r168WprqGh\ngfj4eCoqKpDL5WRmZlJcXIy7uzudnZ2Ul5ezcOFCampqxNcplUreffddgfmTxrEjIyMiWyQvL48V\nK1YIYdZTTz3FuXPnsLa2Zvfu3TQ0NBAQECA2/sjICJ988glqtZqKigri4uJwcXEhNjZWBO7AY6LT\ngwcP+P7777l//z6jo6NCoWptbY1Go+GVV16hsLAQMzMzARw2NDQU5j4pEi8kJITJyckn3tD/X1wf\nNgAn//HxSeC/dGKoVCru37+PqampyBV8+PAh8Hhkd+zYMaqrqwXfvqenRzQnfXx8+MUvfoFGo8Hf\n35/8/HzhuJNmuSEhIaJDXVRUJEaKdXV1Ih6spaWFlpYWli5dSlZWFq2traxdu5b169fz1ltviTwG\nePxGSE1NFZh2S0tLvvzyS8zNzfHz86OwsFCIpHx8fDh06JAwROXm5lJRUSFUfHV1dWRlZQGP8y/0\n9PRQKpW4ubmJ2fK5c+c4fvw4w8PD1NfX09DQgKOjI6tWrcLW1pawsDC6u7u5efMmUVFRXLt2jejo\naEpLSzEzM2PevHloNBquXLki3I/W1tZERETwyiuvEBERgUajYXp6ms7OTtzc3Dh79iylpaV8+eWX\n+Pv7k5ycjFqtFqGmFRUVdHR0YGRkJPiWZ8+exdXVVTRe5XK5mNNLaUkSVGXTpk2sXLmSixcvYmRk\nJNKhp6enuXTpEp2dnSK01sfHRzQl9fT0WLRoEQEBARgYGDA6Oiq0HD09PXh7e/Pxxx+LJqmrqyuA\naJBKgTHr1q1DLpfT0tJCWFiY0JtIoJOWlhbS09Px9PTk22+/xc/PT8QESpHyAO+8847wvZSWltLZ\n2cmWLVuoq6vD39+fF198kYSEBIyNjdHV1aW7uxs/Pz8WLFggJO5VVVUCxXb16lUA1q5dS0hIiCga\n69at4/vvv+fu3bsUFhZibW2Nvb094eHhHDp0iEuXLpGRkSH6a9999x39/f1YWFgQHh4uMkGfZP2z\nRUEL3JHJZEUymewX//g1R61W2/2Pj3sAx//dF8pksl/IZLJCmUxWOD09jY+PD62trYyPj+Pu7o6f\nnx+6urrs37+fr776io0bN/Ltt98SEREh/vPOnDlDYWEhk5OTIgRm48aNrFy5EoCWlhZcXV1pbm5m\n165dLF68mM8++ww9PT22b9/O5OQk/f39dHd3s3r1aoFtk8jHALm5uXR1dTE+Pi6uBVFRUczNzWFs\nbIy+vj56enq4ubmxfft27t+/z8zMDHl5eZiYmAglX3V1NTo6OrzwwguYmJiIsZ6/v78oNtK/IzIy\nkgcPHlBZWcmqVat49tlnCQ4OFkw/KehESnf64IMP6OnpYXJyksbGRgIDA5mensbc3JwFCxYISpWZ\nmRljY2Ps3LmT1NRUwcSsrKykvb2dlpYWBgYGePrpp6mvr8fDw4OhoSEqKytxcHAQpiqVSiWOtyqV\nisbGRi5evIiDg4MIuyksLGTFihWMj49z+vRpbty4wczMDPr6+vz85z9n165d6OrqEhERgZubm3jy\n+vn5ER0dLcxFX331FV9++SWHDh0iOzubCxcu0NraSm5uLqmpqWg0GhITE4mMjGRwcBBHR0e2b9/O\n0NDQf5hqrFy5UlwZHRwccHR05NKlS8THxxMQECCmQwcOHKCmpoby8nJWr17Nxo0bBStifHycgwcP\nCgYjPIYOh4eHC4Lz4OAg9+/f59SpU7z88suUl5dz7NgxwbLYvXs3q1evJiQkREib8/LyqKurQ6PR\niIbgF198ga+vL729vYSFhXHlyhXCwsKIiooS9HFdXV2qqqpYtmwZ7777LuHh4Tx69Ej4c/T09ERS\n1fHjx594U/+zRWGBVqsNB1YDL8tksoX//je1jy/L/1uqklarPaLVaudrtdr50sjKxcUFuVyOh4eH\nGDeVl5dz7tw5Aav83e9+h729PaOjo6xevZqenh6USiV9fX0kJyeLkBCAhQsXUldXh4WFBS0tLQQE\nBLBlyxYhoZUq9uzsLNPT08LXLjW6JiYmWLx4MZGRkbS1tYkwGAk5n5ubK+bpPT09IkhWX1+f5ORk\nRkZGuHfvHrOzs+zduxcjIyP09fXp7e2lu7sbLy8vurq6hLR1bGyMwMBA4bZUKBQolUry8/Pp7Oxk\nbm4OQ0NDgV6X2JGGhoa4u7sLYCwggKvZ2dnijj88PEx/fz/vv/8+Hh4eaDQafHx80NPTE1r/8vJy\n3nnnHRYvXiwCd19++WWqqqrw8/OjoaFBvP5GRkakp6czf/58kWHR3NzM4OCgyK44c+YMJiYmhIWF\nsWHDBpEH+umnn5KYmCgSqHbu3AlAdna2CAN2c3MTdGcp62D16tUEBwdjZGSEl5cXfn5+2Nvb09nZ\nSWxsrOhDSTzD8vJyAO7du0dubi7Nzc0sXLiQhw8fsnr1alpaWoTmIjIyEmdnZ5KSkvD29ubRo0dM\nTU0JDuWqVav48ccf8fPzo6amBng89r548SKvvvoqenp6eHp6kp6ezkcffYRSqaS2tpaYmBhOnTol\nCq2Pjw9lZWWo1Woh0zYzM+OHH34Q6lYJ/b9z507a2tp4++23xThV6h0NDg7y2muv8dJLL1FfX4+t\nrS01NTVkZmYCjx8ynZ2dfPnll6KZ/STrnyoKWq228x8/9gGXgRigVyaTOQP848f/MsROR0eH3/72\nt0IRODExgYeHB+vWraOyslKYaZKTk4WtWAp5kUxHCoWC48ePs2zZMvT0HvdPc3JyaGxsFJr1dU0K\n4AAAIABJREFU1NRUfHx8RIRaVVWV4Aymp6fT0tIibLGSf72lpYUPP/wQHx8fERmfn5+Pjo4OCQkJ\ndHc/PhTdvn1bBL9u3ryZ5ORkFi1aJBpk0uddu3ZNjBELCgrw8fERo6OwsDDu3r3LkiVLkMlkXLhw\ngbm5Oc6dO4dGo6GqqkpMEerq6igrK6O9vZ3Q0FBmZmYEGzAyMpL6+noGBgaEbPz5558XuRdubm40\nNDTg5+cn5NJSJJ6LiwsPHjygp6dHNOuys7Px8/MjOzsba2trga6fmZlh0aJFGBoaYmlpiVarFT4A\ne3t7srKyiI2NxdfXlyVLlghjVVNTE/X19Zw8eVLg4//+978Dj5uzdXV1fPbZZ7i5ufH666/T0NDA\nmjVrWLJkCevWrRO9CylL8cKFC8ITUl1dLZiMWq1WbAZvb2/i4uLYunUrzs7OREZGijj63t5e4SmQ\nMhQWLVrEyMiISMtSKpU0NzeLfs6/h7fY2Nhw6NAhNm3aRHt7O6+++iqpqals27aNZ599VhSo5cuX\n8+jRI86cOSMYGM7OzuKIv2jRIubm5oDHYcObNm1iZmYGlUpFQ0MDZmZm/OlPf2Lnzp20t7eLdKuq\nqiq8vb0pLS1FrVazcuVKtFotb7zxBpGRkSIc90nX/+uiIJPJTGUymbn0MbACqASuAvv+8Wn7gB//\nq+9laWnJvXv3GBsb48MPP0Qmk4mjvaRILC4uxsLCgsTERIyNjYWnwNbWVmQzPvvss2RmZpKWlgY8\n9ig899xz2NjYCF96c3MzmzdvFjFm0vdYuXIl7777LgkJCZSVlWFtbS1oz0lJSQJCCo9DaJ2cnGhv\nb0ehUJCYmMju3btpbm4mMzOT999/n88//5xTp06xYsUKenp6sLOzo6amRohzLCwshAFJMkQ1Nzej\nr69PeXk5CQkJhIWFMW/ePFasWMGjR49EKG17ezvh4eHi2CqNTi9evMjY2Bjj4+OEhYXR3t5OV1cX\nixYt4uOPP8bJyQkjIyP279/PypUr+frrr9HT02N2dpZPP/2Ujo4OamtrmTdvniA5BwcHY2trS3Nz\ns5D6SgKexMREBgcHycnJEc0uc3NzDA0NUalUpKWlUV5eTllZGVqtFj09PWJiYmhvbyciIoKYmBjK\nysooKysTk53vvvsOc3NzfHx88PDw4NKlS8zMzHDnzh1Bc/rLX/7C0NCQQNdJ2H8PDw+MjY2FClFH\nR0eM4oaHh7l69SofffQRw8PD/8GsVlFRIZLEJUhqfn4+O3fuFCE8xcXFLF68GFNTU27fvi1cmJmZ\nmaJR7ujoSEhICO+99x66urq4uLjw/vvvi1NbUVERX3/9NQMDA6hUKszMzPjxxx/p6Oigp6cHAwMD\nURyTk5NpbGxkYGCAvLw8bt26xTfffMOGDRtITU3Fz8+PO3fuoNVqKSwsFIgBADc3NxQKBXfu3MHN\nzU1kqD7p+mdOCo7AA5lMVgbkA9e1Wu0t4A/AcplMpgSW/ePn/+kaHR0lOTmZ3t5eMZ6SMGj6+vrs\n3LkTBwcHIf6R4sY1Gg1NTU1Cz3/u3Dn8/f3FrLeyshIDAwMhOT579ixtbW0CKe/g4MCVK1cE+GJ8\nfBxTU1Pq6+tRq9V0dHSwfft2wcaTpgQvvPACFy9exNfXl7KyMvr6+kQvREqM6u/vZ9OmTXz55Zes\nWbOGa9euiXuzs7Mz3t7eaDQazMzMRACIs7Mzjo6OzM3NiTeNZDGW1HaS8UdHR0fkUAQGBgol38KF\nC1GpVCQlJYmgm7GxMUJDQ3F3d8fU1BRjY2NSU1MFTcnf358NGzbQ19dHS0uLgMlIJ7SioiLi4uJ4\n++23MTExYW5uTvj+NRoNOjo6dHV1iaDXxsZGrK2t+V//639haGjIjh07+POf/0xDQwPfffcdWq0W\nX19fEX8nycMBDh48SGtrq9ik8fHxyOVyOjo6uHDhAr/73e8E87KqqoqBgQFkMhm6urrk5OTg6OjI\nU089hYuLCyUlJSxatAh47Ct57bXXBFI9KCiIuro6Tp8+TWFhIe3t7SxbtgyVSkVhYSGurq50d3dz\n/PhxamtrBcpNOjVJugszMzMCAwPFSLCvr48tW7ZgZ2cnGJhLlizB3Nyc8vJyFi9eTEREhHhILFy4\nEGdnZ7Zu3UplZSUxMTEAYhpiaWnJ4OAgc3NzmJmZUVJSIsAzUnyATCbD2tqaBQsWCBbEW2+9ha6u\nLnl5eXR2dv4/Qrz/D835f9b/rP//rP8+NGcPDw+2b98unp5FRUVs2LCBBw8e4O7uLvILm5qaSE5O\nJi0tjY0bN4rxmpTlFxkZKSCZ8fHxfPDBB2zbto22tjbq6uoYHBzE0NAQV1dXZmdn6e3txd/fn6am\nJtzd3WlsbCQhIYGioiLc3d1xdXWlrq4OZ2dnOjs7iY+PJyQkhL///e8EBgaSkpIibLWlpaXMzc0x\nOzvLxo0bBVw1ODiYhoYGBgYG6OrqQqvVCrv39evXcXBwICoqiiVLlvDCCy/g5+cnFG7Ozs6iwp8/\nf56tW7eK46JMJhO5kR4eHly4cIGAgABCQkIoKioiLy+PZ555RvyZk5OTHD58mGXLltHf349Wq2XL\nli3cuXOHyMhIiouLBVZt27ZtlJWVYWpqytzcHOfPn2f+/PnY29uzfPlyTp8+zZo1aygoKMDV1RU/\nPz8cHBzo7e3l2rVrbNq0CSsrK0xNTbly5QoREREA9PX1CS2JhGrTarU4OTnR29vLhg0buHjxojg2\nR0ZGCglwWloa7777Lmq1WiDjJBrSyMgIFRUVeHp6Cik1QEREBG1tbfz2t7+lvLyce/fuCeJ1Z2cn\nBgYGLF26lLa2NiwtLRkeHmZ6epqXXnqJDRs2oNVq+ctf/sKePXuQy+U0NTURHBzMkiVLqKur48CB\nA9y5c4eenh5kMhnj4+N0d3cTGxtLSUkJ09PTREREoFQqCQ8PF+lj0uTGx8dHhMBKPEypJ/Huu+9i\nZ2dHZ2cn9vb2AoibmpqKm5sbIyMjAg5jbW1NXV0dcrkcjUaDrq4u7e3torE9MzNDS0sL33///RPt\nx5+EzFlHRwdzc3Mh8liwYAG5ublMTk6KkUp7ezsJCQlcvHgRFxcXjIyMqK6uBkAulyOXyxkYGMDQ\n0JDg4GDg8ZTg+PHjKJVKzM3NWbhwoXD1ubu74+npiVKpZG5ujsOHD7NmzRqamprE0bmuro4HDx6Q\nm5tLR0cHRUVFwGOxlZRebWdnR25ursC129raMj09TWNjIytXrmRmZgZ/f3/a2tpITExk8eLFjI6O\nkpGRQWhoKDKZTER6PfPMMyKsdnBwkGPHjpGSkoJGo8Hd3Z2srCw++OADUlJSyMnJoa2tjZKSEgoK\nCnB2dubhw4dCxmtsbExBQQGnT5/G1tZWJEdLVwhHR0eUSqW4Pkhv3C1btnDt2jUsLCxob2+nt7eX\nF198UaDvL168iFwu5+rVq8KsJZl+JJ9CfX09ra2t/OlPfyIyMhI/Pz+OHTtGbm4u58+fZ3h4mNjY\nWGZnZyktLSUjI0Pch6Ug2cHBQU6dOoWXlxerV69m165dFBcXc+TIEWxtbTl69Cj5+fmkp6dTUlJC\ncnKyoELZ2toKk5w02Tl69KjgMRQUFGBqakplZSWnT58W7tDi4mLu3bvHe++9R319Pc8++yyVlZWi\ncbphwwY6Ozt5+eWXhetQKvw+Pj7Cp1JTU0NsbCyTk5PMzs6SkJCAUqnE2dmZ5uZmNBoNISEhWFtb\nExMTw/z589HR0RGiI3js77C0tMTLywtXV1fkcrnIifT29sba2lqMvGdmZti4cSNKpZLS0lJGR0dx\nc3PDzs6OnJwckRT1pOsngWP785///N6CBQsYHh4mKCiIlpYW9PX1sbW1JS8vj7CwMCwtLcnNzcXA\nwIC4uDiRFjw+Ps7ExARqtRpbW1tiYmI4ffo0d+/eJSoqiuDgYAYHB/H39+ebb75hamqK8fFxQkJC\nxL1OV1cXfX19jhw5IqSl9vb2YlIgaSgGBgbIyclBX18frVbLtm3bhPx0aGhIPOFlMhlDQ0NCnpyb\nmyuCUGdmZggPD6e/v19sorGxMVJTU5k3bx4eHh7C7qzVaqmrqxM05+LiYvbu3YuPjw8+Pj5YWlri\n6OhIXV0dkZGR+Pr6UlBQwNTUFK6ursTFxaFSqVAoFPz4448cOHCAjo4O0dCMjY1FLpdTXl5OdXW1\n0PTPzMxga2tLWVkZd+/eRa1W4+vri0qlwsTEhOLiYuRyuUg6TkxMFMo6KSjY29sbV1dXBgYGhBkp\nPDwcXV1dIiMjKS0tRaFQCJn6o0ePuHz5sjgdWVlZERsbi4GBAdnZ2ajVauzt7UUIq4GBAU5OTkxN\nTaFUKvniiy+oqanBw8MDGxsbhoeHWbJkCaWlpRQVFeHp6UlUVBT+/v5kZ2fz7LPPolKpaG9vZ/36\n9WLMPDk5ye9//3tUKhVjY2P09vaip6fHz372MxoaGhgfH2fZsmWUl5eTm5vL3r17yczMxMDAgKGh\nIaFd6e7upra2loqKCgYGBvDy8hLYv8nJSRoaGujs7OTy5cv87W9/Y+3atSJtKzMzE7lcTmJiIoGB\ngfz44480NjZSU1ODhYWFiLWTy+U0NDRgbW1NRUUFMTEx9PX1YWhoyMTEhBiFAxJt7IlwbD+J64MU\nbZWQkEB+fj7d3d309vbi6OiItbU1HR0dgtt/7NgxEX82b948hoaGsLe3p6Ojg7CwMOHeA9iwYQNt\nbW08evSIoaEhnn/+edzd3cWvnT9/Hn9/f/bv3092drZQkBUUFAgSlL29vTiqSw3MZcuW0dPTQ2lp\nKdHR0ZSUlGBra8v4+Dijo6N4eHgI6rJSqaShoYGuri6ioqKwsrISgag6OjrMzs4KXcWjR4+ora2l\nsbGRyMhIQUvq7u5mampKiIiklOfu7m4xKdFqtbi7u2NlZSXCdMrLyzEyMiInJwcTExM6OjpE/qIk\nqMrJycHQ0FAg6CT2gZQgvXXrVhQKBaGhoXR1ddHY2MiGDRsoLS1FqVTy/PPPc/bsWRQKBX/4wx94\n+umnqa2tFcnREnZ+aGiIsbExoTfQ0dERnoL58+dTWloKIEZ9NjY25OXlAWBiYkJZWRkWFhZs2rRJ\nRKRlZmby5ptv0tDQQGRkJGfPnhXz/vfff5+srCwSEhI4cuQIzzzzDPX19XR3dxMfH8/x48dJTExk\nfHycpqYmoTb09PRk5cqVGBoaYmNjw2effSaO+AqFQjAShocf23lqamqws7P7D3miarVabOji4mJa\nWlpISUnBzMyMkZER5HI5bm5uVFRU8Mtf/pKJiQmh1g0ODuZf//Vf6e7uprW1lcuXLxMQECAiBmZn\nZ+nv78fd3V3kS7i7u4sEdoVCgY6ODvX19axatUrAjKUQ4ydZP4miAAiGob6+PgYGBnh7ezMwMMAX\nX3zBG2+8gZWVFfPnz+fSpUsiYPXy5cts3LgRQ0NDjhw5wp07d4R8GB4rGiVmgIGBAc3NzahUKuzs\n7Lh//z4BAQFUVFQwOztLamqqGDONjY0JslBJSQnPP/88cXFxQpdeV1cnYtynpqbw9vampKSEnp4e\nrKysUCqVLF++nNHRUa5evUpPTw8rVqwQacqtra2C9xgdHS0ks4mJiVRVVREdHY1SqRREqujoaDIy\nMti+fTtGRkY4ODjg6ekpbMeShdre3p4rV65gY2MjjuFeXl4ikOTLL78kNDSUZ555RsBv9+7dy8zM\nDJ2dnUxPT3P69GnCw8NZsGCBmCaYmpqKLIdFixbx+uuv884773Dr1i0OHTrEkiVLqKioIDw8HIVC\nwZ49ewTObN26dWg0Gk6dOkV0dDRmZmbY2dkRFBQkKMk2NjaC/iSd2qRJVFFREYODg+zYsQMjIyOa\nm5upqqoSQNdTp07R29vL4sWLOXbsGP7+/qxbt46enh5MTU3FfP7q1avExcXR09NDQ0MD/v7+Qi04\nNjbG9PQ0gYGB6OnpMW/ePPLz8wkNDeW7775jcnISZ2dnbt68ibW1Nb6+viJgVl9fn6qqKm7duiV8\nOOnp6cKrIJmSpH7AM888g6mpKU5OTpibmzM0NISTkxMymYygoCARNiw9eGJiYigqKsLX1xczMzOs\nra2xtrb+D1cCtVpNWVmZ0Lzo6+sTHh5Ob28vGo1GAIOfeC/+Mxv5/9SS3gTSU0Li+PX09PCHP/yB\nwMBAbGxsxJhwZmaGjo4OJicn6e3t5cyZM/j4+DA5OYlarSYoKAh4jPaSnlgKhYKenh56enr48ccf\ncXFxEclIP/zwAx4eHsK2HBISIgJnFQoFJ06coLa2VvQU1qxZg6WlJXFxcYSHh2NkZISFhQUuLi68\n+OKLBAQEYGZmRlVVFS4uLiQnJyOXyxkZGaG6uhq5XI6XlxcuLi7Y2NiIGb2BgQHj4+NUVlbi6uqK\nvb09gYGBDA0NERUVhYmJCfX19VRXV5OWloajoyMrV67E3NxckIfi4+PJzs6muLgYExMT0tLSeP31\n19m6dSs7duzA19eXEydOUFZWRm1tLZ9++im/+tWvRINKCl997bXXGB4eprW1ldWrV4vrUUlJicDC\nSU8oaeyVkZFBQEAAY2NjqNVqkQz+8OFDzM3NUSqVtLW10djYSEdHh6BP5ebmIpfLAYS4SdI8aDQa\nPD09cXNzY8GCBezfv1+kRLW2ttLb2ysk7hUVFRgaGqKnpyf4ChIQdmJiAjMzM86ePYutra3oaaxY\nsYL58+cLWbSUy2FjY0NDQwMLFiwQFOWVK1fi6OhIQEAA9vb2wGNmR2JiIkFBQVy+fFk0mj08PLC3\nt0etVpOQkEBQUJAAxujr64vXc2pqipKSEkZGRoT6FMDf35+xsTEhUJN8OpKmQUrD6unp4cGDB9TU\n1JCdnS0ANZJC09DQEBMTExHZ9yTrJ1MUDA0N8fPzY3x8nJycHLRaLfv37ycpKUkAObu7u0lJSeHK\nlSv09PQIGW9wcLDoFTg5OVFWVgYgItrgcSTb8PAwH330Efn5+bi6uqKvr4+xsbFQw8nlclQqFS4u\nLmzdupXw8HBaW1uxsrLC1dUVOzs7AK5fvy5UgBJmfmhoSCjtJK7iqlWrmJiYoLm5mY6ODhG/fu3a\nNWFKysjIEA7BkZER1Gq1oCVnZ2dTVVVFU1OTkGZLfYTR0VEePHhARkYGc3Nz1NfXk5+fj6mpKbGx\nsaxfv16AaXfv3k19fT2JiYlC+y9lBYSGhmJhYcHw8DCzs7OiWbpt2zaioqLQ09MT1460tDR0dHT4\n5JNPOHr0KLW1tRgbG9Pb20tBQQHJycl0dHSIJCN9fX0cHR1JT09n2bJlbNmyBV9fX9GYldyT0mkE\nHt99dXR08PLywsTEhI8++oj9+/dTXV0tkG2S+Gj37t0cPHhQzPpHR0f5/PPPBcuxpaVFmOMkmlZA\nQACBgYGkp6cLJufo6CjT09PCZlxYWMiRI0fIyMiguLiYQ4cOceLECeF/OHnyJCdOnAAeI/8mJiYI\nDg7mt7/9LZ6enpSUlHDu3Dk6OjqoqqqioqICjUYj0p4MDQ3Jy8vj7t27jI2NYWJiwq1bt4iIiBCv\nQ1lZmQgGMjU1pbW1ldLSUpydnYU5zNLSkvz8fAHHcXV1FbyNoqIiDh8+zNDQEN7e3sIt+iTrJ3F9\nMDY2Ji4ujtnZWQIDAwkJCeHo0aPCHqynp8fAwIAYO7a0tJCXl8ebb76JpaWlkKDGx8dTWlrK1q1b\n+eqrr2htbeXAgQO0tLRw+fJl8vPzWblyJW5ubujp6VFaWoq3tzdjY2OsWLGCGzduiCeLh4cH8Lhg\nSU9tSTK7bds2qqqqRHSd5FTs6+sTRqeBgQGmp6dpaGggISGBqakpJiYm8PPzIy8vT8BPnZychKhp\nYmICmUwmTELx8fEMDQ1x7tw57ty5g5+fHytWrKC8vJzIyEjCw8OF0OlnP/sZ3t7eAgkmBZIODAxQ\nW1srnvKGhobEx8cLoE1FRQXvvvsuv/71rzEzMyMs7P9i7z2jo7zPtd/fqI96QXWkUa+MupCoQgKE\nBIhqDNjBdnDZjuPY2dlur9dOcZzYfuOEeBuXxHaIK4QANsaYItElJCEhgYS6UO99pFGXRprzgTz3\nSc5Z6w3vPmed5b3Ofr4YxlogNPP8n7tc1++K5d///d/p7e2ls7OT69evi6MyLS2NM2fO4OXlhZWV\nFb6+vri4uHD69Gkee+wxwsLC+Oabb8jJyUGr1bJy5UoOHjxIbGwsqamp1NXV4eHhgUqlYmpqiqGh\nIUZHR6W3hruzhICAAFxcXETN2tzcLJXW/v37pXJUgm2Tk5MpLy+XLda7774roUDh4eHA3SrM3t6e\njIwMqfg+//xz2tvb2bRpE48//jiVlZUkJSWRmJgobtnNmzej1WqZmJjAwsKCmZkZrl69KsHCCjnp\n8ccfF+zZ4cOHuXLlCv39/URGRso2TKVSsXr1anJycoiOjmZgYAAvLy9aWlp48803BZev3BOKyzE2\nNla2EU1NTfT397Ns2TKamppQq9W0t7dTVFTEU089RUNDA97e3jQ3N2MwGDh69KiYqO71+k5UCjMz\nM7S0tMibr9frue+++0QxZzQaZQ14+/Zt6TO7urpETXflyhXa2tpISkoSK7Kio79+/Tqurq7Ex8dL\niIYiFTWZTISFhaHRaGSt2NbWxjfffCMOx0uXLtHZ2SnrLWWYpISj9vb24uHhQUVFBQUFBdTX10s0\n3I4dO7C1taWpqUmgsTqdDktLS4nAUyoQV1dXUTs6OTnR19cngSHKk8bFxUXArSMjIzLlPnfuHLa2\ntixduhR/f38GBwcFeWZtbY1arcbV1RVA+lKdToefnx+3b99mcnJSfq4FBQUUFxej1+t54YUX6O/v\nx87OTtR79fX10roYjUbee+89jh07JoPIkZERIiMjxe8RGRlJQ0MDQUFBdHZ2cvHiRRobGwWkajAY\n5D1TdvlXr14Vr8Hfu2d/8IMfEBgYKEaws2fP0tvbS0REBE5OTnh5eeHv7y9cC6VHn5ubIy8vDwsL\nC1xcXEhKSkKn09HW1sbY2BhtbW00NTVRX1/Phx9+SGlpKd9++634Ld5++23B+P/bv/2bVHcKI6Ok\npITBwUGeeeYZamtrGRgYYP/+/XR1dXH9+nVu3bpFYGAgZWVlhIaGymZDec9qa2v54osvBBkwOTmJ\nVqslLi5OpPZ+fn50dHSgUqlYWFjA29ub6upqcnJy8PLyws7OjvT0dElT1+v1WFhY0N7eLp6Ke7m+\nE4fCxMQEgYGBMkFWq9WMj4/j6OhIQUEBMzMzzM7OEhkZiZ+fH/Pz8/j5+aFWqyksLKS0tJQnnniC\n7u5uSktLSUlJAe5y7hT5qqOjI87OzoSHhxMSEiIZgLa2tlRWVtLQ0MC6desYHx9nbm5OOJEDAwNk\nZGRw/PhxGVq1tLQwMDAgINT169eL266rq4vQ0NB/2CUraO41a9ZgNBrlg7tp0yaBisDdwzEqKopF\nixZhNBqpr6+noaGB2NhYkpOTJe0nIyMDFxcX5ubmaGxs5OrVq9x///0y4VepVNjY2NDZ2UlWVhbh\n4eGSzmxmZsalS5coKSmhvLwctVqNtbU1SUlJvPHGG6SkpGBhYSFD1Zs3bzI2NsaZM2e4cOECer2e\n7u5uHB0dxdvwxz/+UfrnVatW8atf/YqAgACee+45CgoKWLlypZCojx49KmX0wYMH6erqkkm98p4t\nX75crN8tLS0YjUZxKvb39wvWPj8/H4PBQENDA3Z2dqKbWLx4sYB4FD1BfHw8qamp3Lhxg/HxcYxG\noyDOFCyfUqZ/++23VFZWilalqamJ4eFh5ufn6evrY2BggPT0dACuXLlCYmIidnZ2jI6OyufH2tqa\n73//+1hYWHD79m1iY2P58MMPZdU+Pj7O8PAwJpOJvr4+3nrrLcHBwd0KdXp6mvDwcFxcXBgaGqKy\nslK+D5PJhMlkYsOGDbzwwgs88MADzM7OotfrqampQa/X4+DgwNjYGBERERJadC/Xd6J9UFR7RqOR\n6OhoCgsLSUtLEw7/p59+KglPCqfu3Llz3Lx5k66uLlpbWzEYDDz22GMUFhbKB8HOzk56xrGxMWpq\nasjIyKCgoIBDhw6xb98+duzYwZtvvsnw8LAARpTw0eHhYXx8fCgvL2fXrl2C7VqyZAk1NTX4+PhQ\nV1fH4sWLsba2Jjc3l+eee46JiQlGR0eZnJwU09XatWulv/3e975HVVUVPT09dHV1iSHK1dUVGxsb\nWVN6eHhw/vx5tm7dyunTp2lubhZMmCK+Wrp0KQcOHKChoYGnnnoKFxcX+vv7mZycZOPGjQwNDVFd\nXc3Q0BClpaX4+vry7rvv0t/fT35+PuvXrxflnFJ2K45VBTTz1ltvMTY2xq9+9StWrVpFbGws1dXV\npKen895777Fr1y5JoVq3bh0eHh5MTEzw6quv4urqislkIjQ0FHt7e9LT09m9eze1tbXEx8fLMFGp\nFMLCwjh58iQ/+clPmJ6epra2Fj8/Pzo7O3nooYckgj4tLQ29Xi8p3wcOHMBkMpGcnIyzszM1NTVE\nR0fLTMnS0pKxsTF2795NYWEhr776Kj/4wQ8YHx+nqamJFStWEBISQnR0NENDQyxdupT5+XlcXV2Z\nnJyUtW5RURERERECQ9mxYweVlZU4ODiwadMmTCYTIyMjmJubo1ar6e/vJykpib6+PgICAoSfMDs7\ny8TEBDdu3MDe3p5f/vKXIsaD/xMIe/XqVXJzc4UFGhISQklJCTU1Ndja2hIVFUVERATt7e1YWFjw\n+9//nvj4eGZmZhgZGREjlFLZ3Mv1nTgUDAaDcA8WFhZ4/fXXJc/P29ubqakpCc8cGhpicnISR0dH\nNBoNnp6eREVF4eLiwldffUVCQoL0/gpjoaioiNnZWYKCgjAajSKPTUtLQ6PRMDExIashJYUqLCyM\n1tZWli5dKhl/Q0NDAOTk5PyDC0+hITs4OJCYmMjY2BgTExMsLCzQ1dWFRqPh8OHDUkZDkuDSAAAg\nAElEQVR7eXnR1NTE+Pg48fHxEnOn1+sZGhqSXMnZ2Vm8vLxob2+ns7MTW1tbrKysRLCjrG337Nkj\nslfFIZqUlCRJzKtXr6a6upqYmBi6urp46623CA4OJjQ0FG9vb4aHhxkcHMTc3Bw7OzsMBoOYsC5e\nvMi6devEGn3r1i0+++wz0tPTmZycZN++fYyPj7Nx40ZBlikWakdHRwoLC4UzkZKSwszMDL/97W9Z\nvHixaC7+3tyl0WiYnZ3l4sWLREdHExAQQGFhIZ6enpSVlfHoo48SERGBubm5aC90Op0IxhTru0Kv\nViArNTU1xMfHk5+fL2lKMTExREVFodVqJWTlmWeeISUlRSqoy5cvExMTw9q1a/nss8/4/ve/j5ub\nmwT4KIE2y5Ytw9zcHKPRiJ2dHQUFBUxNTREcHEx0dDS9vb34+vqi0+n46quvGBkZwdPTk/Lyclas\nWMHly5dFvgx3ncN2dnaSZTI+Po6LiwsVFRUsWrRIVrbe3t5YWFgwMDDA5s2bqaiooKenh8rKSjIy\nMlixYoW0Yvd6fSfaB6V8VU64tLQ0SWlSLLFdXV2C1HJ1dRVFY1tbG/Hx8dTV1WFvb09ZWRmHDh0C\n7u56lfWWMpRrb2+nt7dXVlIVFRXs2LGDHTt24OvrS0JCAjt37mTRokUMDg5y6NAhBgYGMBgMwmjU\n6XRC49m8eTPXr1+nr6+P7du3S+pPbm6uTPnn5uZwcnKSikYpiZVgUGUYZm9vj4WFBd///vdxdHTk\n+vXrlJSUMDc3x9jYmPAFnZyc2L17Ny0tLRgMBhISEnBxccHKykroPwqqbO/evcI16Ovrw8nJiWvX\nrtHU1MTExAQXLlzA3Nyc6Oho1qxZw/3338+FCxeoqKhg/fr1qNVqNBoN27ZtY+3atRgMBl599VVs\nbGyIiYmRFsLDw4Pc3Fzu3LkjEexeXl5kZGSwevVqQcnHxcVha2vL3NycHIxjY2PiQNXr9VhaWtLQ\n0ICnpycZGRnodDrGxsb43ve+JyrN6elpzM3NWbRokYQDt7W1yYEyMTFBY2Mjf/rTn+QzNjY2xp07\nd6ivr2fZsmXY2NiQkpKCq6sr5eXljI2NERoaipOTE1ZWVrJZ8Pf3Z2FhgQsXLvD8889z6NAhkSOP\njIxgZWVFU1MTNjY2/OUvf+FXv/qVBAwrArLKykrJYwgICKChoUGSt2ZnZ7lx4wZ1dXXy4Onu7mZq\nagoXFxempqbw9fWltbWVqakpeXjC3ayQa9eusX79eszMzHj66ad55JFHsLW1xd3dndDQUMzMzNi4\nceM934/fiUrB3NwcGxsbiouLhZt3+vRpCTGJiIggNDSUTZs2ceXKFQwGA6mpqdy8eZP+/n4iIiIY\nHh4mNjZWsvYAvvjiC7Zv345Op8PMzAyTyUR+fj4//elPmZycRK1Wc+zYMUwmk+zaY2JiWFhYwGAw\nSKTX1atXqaur47333mPDhg0yNFPyDtPS0kQQNTExQV5eHsXFxaIzULgBPT09zM3NSbXw2WefsW7d\nOk6cOAHc3Xnn5eVJUvby5ctJSEjgiy++YHZ2VkphBbF+5swZ8U3k5eURExNDcnKyHICBgYG8/PLL\nvPTSS7KFWLVqFT/84Q+Bu/zLiooKhoeH0ev1LFu2jKtXr9Lc3ExsbCxFRUW89tprkoBdUlLC3r17\nqaqqwmg0kpeXx9DQEN7e3szPz7N06VJefPFFuru7CQsLY8OGDXJgKcpQJWMxMTERJycnvve972Ey\nmUTApViWlazQ8vJyAcloNBrOnTvHCy+8wIMPPij0IY1GQ3l5OWFhYTQ1NdHV1SUy7qSkJH72s5/h\n4uJCeXk5rq6uODk54efnx1//+leqqqoYGxsjKSmJgYEBrKysSEpKEtNceno6jo6O7N+/n9WrVxMU\nFCRp3ICIsRYWFjhy5AhRUVGYmZkRFBTEqVOnyMjIoLCwkPn5eZydnbGwsGBiYoJvv/2W7du3k5SU\nJPF4w8PDYh7bunUrer2e+vp6enp62Lt3L/v27eP69esUFBSwZMkSWXVu2bKF999/n6SkJKk6w8PD\nWb16NfX19f+f8hT+X7uUfbWVlRVlZWV4enrKes7MzAxnZ2cMBgM1NTUYDAbRBkRGRrJmzRrq6+vZ\nunWrJAsraC8zMzPKysr461//yqlTp9Dr9SLEUYZptbW19PX1sX79eoKDg+nt7cXLy4u6ujpWr14t\njAYljQpg9erVmEwmSkpKJILt/PnzEhh68+ZNrK2tsbGxob29nenpafR6PTqdjuTkZKH3KG98YmIi\ncFeB+eijjxITE0NPTw8qlYrOzk6ioqLIzMyUOcPc3Bw9PT2sWbOGxx9/HLibstXT00N9fT0qlQpr\na2sKCgp46aWXyMvLY2Zmhvj4eOLj48nIyMDV1ZWOjg5SUlKIj4+npaWF4uJiUlJSSElJwdPTk6Cg\nIF544QVef/11+vv7efrpp/H29qayshJzc3OWLl2KTqcTdmNZWRmDg4NERETg5+dHTEwMxcXFfP31\n11LJKJJxpSVQJODKBkZR3ikZnHfu3CE/P5/g4GBpF9PS0kRkpLxnWq0Wf39/1q5dS2hoqEiVFeah\ng4MD9vb22Nvb4+rqSkFBATt27GBqaoodO3bw4IMPEh4ejqenJxUVFVy/fp0bN27g4uKC0WgkKyuL\nX/ziFyxdupTk5GQRA7W1taHVamXdfeXKFUJDQykrK2Pjxo0EBQURFhbGsmXLGBkZoaGhAZVKxZo1\na7h586YAZMzMzNBoNJw+fRq4q4Xp7e2lqKhIHlCnTp3i9OnTopb08fFh7969BAcHyxA7PT0dHx8f\nkWOHh4fj6Ogo2p17ub4Th4ISMurv78+WLVtkmKQAJuzt7SksLCQ3N5fk5GSysrLQ6XTk5OTg6OiI\nVquloqKCsbExGhsbRZeu8ADj4uJEL6+EkRgMBmZmZnBxceHll1/Gw8OD7Oxs0cAr689Vq1ZhNBqZ\nmpoSDUNvby+VlZXMzs5SUVHB2bNnqaiooKSkhJUrV5KZmSkJy4pRqb6+nrKyMnJycnBwcGB0dJTB\nwUFsbGxElm1ubs7U1BSVlZUEBATIB9lkMnHz5k2+/PJLjh49ysTEBL29vdL7P/7447z66quEh4fL\n3n5oaAhzc3NUKhVpaWnU1taSlpZGbm4uFy5ckKfb1NSUhMEofora2lrMzMwoLy9ncHAQk8nEpk2b\nGBoa4tChQ8TFxbGwsEBdXZ0ceAcPHiQ5OVnYjGVlZajVaum1Z2ZmRKeQmJjIz372M9FdbNmyhZs3\nbwLwxz/+ETc3NxkM19XVYW5uzsDAAG1tbZw9e5bMzExRqJqZmbF582ZWrVqFt7c3YWFh+Pv709ra\nSmZmpkTGw12ITUBAgKRkL1++nPvuu4+SkhIWLVqEs7MzRqOR2NhYhoaGRP9QVFQkieBGo5HU1FSh\nI4+Pj3Pq1CkJmF2+fDk2NjZSCXV2dqJSqZibm2PJkiWMjIxw5coVvL29iYmJwcrKiunpaXp7ewXf\nDnexfZaWlvj4+DA8PMzZs2cZGBiQKANlM9HY2MjU1BQjIyMsX74ca2tr7OzsJPfD3NxcVtf3en0n\n2gd7e3u2bdvGzMwMX375pfAJzM3N+eEPf8jc3BzOzs4EBgaKDTk/Px+NRiMEHGXQFBERIVBNpY91\nd3fn9u3bUkZt374da2trTp48ybJly/j2228xNzcX12Bubq4g4UJCQkRCqxCeFSxcU1MTYWFh5Ofn\nS8Vy4sQJEd8o9GdXV1cGBwfx9PQUl1tLSwtxcXHirAOEkuTp6cnY2BhOTk4sX76cAwcOiHxbeSJc\nunSJX/ziF/T29jIwMMCSJUsku6K9vV2EPJcvX8bV1ZXz588TFBREd3c3CQkJUoW9+eabjI6OkpCQ\nwAcffICNjQ1PPPGElONdXV1UVFQwMjJCcnIyjo6OuLm5ER4eLgj7oKAgfH19eeutt/D19f2HrYe5\nuTmFhYXs3btXDmsPDw82bNjA9PQ0V65cITAwUN4za2trOYQvX77M448/TmNjI4cOHcLJyYnAwEDG\nxsaEEGVnZ0deXh6RkZHodDo5LDdu3MiBAwdIS0sD7oJsOzs7pRQfHh7m/fffJzIykitXrvDFF18w\nNzeHWq1mcHCQkpISXFxcOHPmDHv27CEvLw8nJyc++eQTpqamhE2gBASvWbOGxsZGyQLNz8/n8OHD\nfPDBBzJj+eCDD7C0tCQjIwNnZ2f+8pe/sG7dOnp6esS4dOfOHQAxojU3NxMfH8/+/fslyUxRnhYX\nF0tmpKenJ0ajkfDwcDG6KQndUVFR//XES4rE1M3NDWtra/z9/cUVt7CwIGEhxcXFDA8PYzQaKS8v\nJzEx8R9yA8PDw+nq6pKbUXFaenh4oNPpuHPnDl9//TVXr17Fzc2Nhx9+mMjISDZv3sy+ffvQaDQc\nPXpUiMwDAwMCY1m5cqWsTtVqNY2NjVhbWws2q6ioiPXr1xMZGSmiqLKyMoxGI729vfKhtLCw4Nat\nWwQEBHDjxg0sLCzE1eno6IiVlRXp6emSvzg9PY2Pj49Qmj08PGhsbBTrb19fH0VFRSwsLODi4oKv\nry/29vbY2dmxbNkysrOzKSkpISAggJGREZYsWYJWq5WeNjY2lqioKAIDAyWibWFhgaCgICIjI1Gr\n1eh0Ory9vWltbaWhoYHm5mbq6+tpbm6mra0NS0tLLl++zMMPPyzsgMHBQdzd3cWQpmQ4dnZ2kpKS\nwpkzZzh69Cjj4+O0tbVJC5WamkpjYyPh4eFCc1Zk4gaDgaCgIOzt7QkPD2fx4sXs3LmTtWvXkpKS\nQmNjI2ZmZiQmJtLb20tkZKSs+ZR+3traGjc3N9zd3WUGo9PpJDl6cnJS1q0TExOSD+Hg4ICHh4do\nJh599FHg7oGv5CzodDquXLlCRUUFVVVVPP7444SHh+Pv74+fnx/Jycns3buX+fl5Tp8+LbqEkpIS\n+vv7GRwcFJqzs7Mzt2/fxtHRkerqau677z527drF5s2bGRsbw9zcHHd3d3Q6HRs3bsTJyYnGxkY+\n+OADGahPTk5iMpm4ceOGVGL3cn0nKoXp6Wk6Oztxc3MjODhYMh0XFhbIy8sjOTlZEpNCQ0MlLEUZ\nNCneBTMzM2pra3nqqac4cOAAQ0NDwgHo6enBaDRy7NgxKisrKSoqIjs7m/r6evz9/Tl9+jRHjhzh\nN7/5DdeuXcPR0RG4u9995JFHuHTpkrQP165dY3BwkODgYEZHRxkYGCA+Ph6tVisaibm5OeLi4nBy\ncsLZ2ZmAgAD6+/slANXT05OFhQVOnTolGZU9PT1otVqam5uJiIigp6eHwMBAzM3N0Wq19PT0MDw8\nTHBwsITKOjg4SMR5f3+/BMo4ODiQk5ODra0tAQEB+Pr6Mjw8TEBAgITeBAcHs3LlSjw8PLj//vtF\noRkXFyc5nj//+c/55S9/iZ+fHyMjI3JjWFpasmLFCsLDw9Hr9TzwwANiib58+TItLS1UVVXh6OiI\nhYUFNjY29PX1ydZGqWpSUlK4cOGCGKJu375Nb28vgYGBhIaGMjw8zIsvvkhpaSltbW0yXFZeHxkZ\nwd/fH1tbWxITE3nvvffIysrCZDJRW1srm53x8XGJ0dNqtdja2jI0NMSJEydYvHgxDQ0NdHV1iRR5\nenpagmgUrH1jYyNarZaZmRlu3boFIFuxn//851y/fp2tW7dy4sQJHn30UUJCQujt7aWsrIxbt26R\nkZGBv78/LS0tWFlZkZmZSXd3Nz/60Y8wGo2EhYXJTEGj0VBaWsrIyAi7d+/Gw8NDPELl5eViAnR2\ndhar+vj4OAEBAZiZmbFkyRKZv92+fft/K4r+O3EoKE/my5cvExISQkdHB2vWrCE3N1eekgaDgdWr\nVzM8PIxKpWLr1q2UlZXJasbc3By9Xk9kZKTEwStfD7B582YsLS1lazA1NUVPTw8ODg4UFRUxPDzM\nQw89RGlpKe3t7Wg0GtavX09ubi5tbW1iuIK7O2QzMzNSU1M5deoUixcvpry8nLy8PNzc3KiurpZE\n5kceeYTJyUnRLyh76E8//ZRnnnmGvLw8ES85OTlRUFCASqVi586dVFRUcOzYMUFrTU9PU1hYiL+/\nPyqVisjISN544w3i4+MJCQlh48aNtLa2Ck/B0dFRZiptbW2SfKw8oZ2dndHpdBiNRq5cuUJ2djZL\nly6lo6ND9v9K1mFPT48Iwb7++mtWr17Npk2bOHbsGBs2bKCxsZE33niDdevWodPpcHJyori4mKys\nLOLj46murpbJe2NjIxqNBrVaLRWEkjqtbJpMJhNBQUFYWlqSk5PD0NAQmZmZVFRU0NTUxMqVK3Fy\nciI3N5fw8HCJFlRciENDQ0xPT8ugUavVihHp8OHDXL16VfD7X331FV5eXkRERGBnZyd5kZmZmdy8\neZPGxkaMRiN1dXUS+KMcCnq9HpVKxdtvv42trS2XL1/mBz/4ASEhITKfUezt5eXlVFZWMj8/T3Jy\nsmxK9u7dy5EjRxgcHCQjI4M333yTq1ev4uzsjK2tLXl5eezbt0/mT2fOnCEnJ4ef/vSn0qLNzc2J\nvLy7u5vBwUGGh4eZm5tDp9OJavZeru8EeenDDz98JTs7m/T0dBYtWoRGo2FsbIzBwUHGxsbkSdPR\n0cE333wjisKxsTFCQkIYHR2VZCR7e3tu3rxJWVkZL730EteuXWPRokVotVru3LnDM888I1w/Jycn\nHB0dxUevPB02bdpEfn4+fn5+BAUFSRKzyWSiqKiIrVu3YjAYaGpqIiAggNraWvR6vUTH+/v7C3dv\nZmYGMzMzYfj19fURFBQkEA8l6rywsJBVq1YxMTEhmw2dTsfExAT9/f2Ym5vj4OBAWFiYJFI5OTmJ\nb76oqEh27C0tLahUKpqbm+no6BAGgZOTEw0NDeIAHR0dxcLCgsTEREZGRmTnPzs7S01NjdisDQaD\niKYULUl8fDyTk5MsWbIECwsLhoeHcXV15d1330WlUvHAAw/g5+cnZOSWlhauX7/O3r17qa+vlzmR\n8oSbn5/n0KFDrFy5Uv59ClTG3t5evCktLS2sXr0aV1dXIiIiGBgYkDlJX18fdXV12NraSns5MDAg\n0/2ZmRk8PDyIiIiQNXVfXx++vr4YjUYGBweZm5tj0aJFtLW10dLSQlpaGidPnmTx4sXSouTn5xMV\nFcX169f50Y9+xOLFi0W12N3dLaIvS0tLtFotlpaWMhD38PBgfn5evCQ2NjYiWVZI2JcvX+aBBx7A\n1tZWNm8FBQUSVuTo6Mhzzz0njAaj0YiFhYUcloprV9E9BAYGYmFhwaVLl+6JvPTfNOf/vv77+v/P\n9V+H5hwQEMCzzz5LQkICRUVFbNiwQeChd+7cISIigvj4eIqKitBqtQQGBnL48GF0Oh3FxcVkZmai\n0Whkyq/X69m0aRO/+c1vaG9v58knn+Sjjz5icnISHx8frKysmJ2dxd3dnYGBAXx8fISQ7O3tzdjY\nGLt27WL//v088MADVFZWyun91FNP8c0331BfX8/AwAC+vr6YmZmh0+m4ffs2FhYW1NbWipBk8eLF\nREZGUlBQQHt7u8SfNzc3c99993HkyBFiY2PZsmULv/rVryQda8OGDVhbW/PnP/8Za2trdDodZ8+e\nZX5+nszMTEpLS/H39yc4OJi8vDxRG5aUlAhpeWxsjLy8PB588EEJOFEcqQkJCfLkU3QCer2erKws\nRkZGOH/+vDg516xZg4ODA9PT08zMzFBfX09mZiZhYWGUlZXJFqilpYUbN24I5szFxYXFixeLz8Le\n3p5du3Zx5MgRli9fzuTkJDMzMxJDl5iYyCuvvEJoaCjXr1+XwF5/f38+/PBDeX+WLFlCRUUFNjY2\nNDQ0EBkZSVtbmzgn7ezsiIyM5IsvvkCn0/HKK6/w1FNPkZ2djVqtZmpqSnrsEydO4OrqSlpamkjS\nFTbFT37yE7788ksZRLa1taHRaKitraWtrY0DBw7w3HPPcenSJbGbK62uklLW3Nws7MzFixeLaK2h\noUGAMUpu5ueff05qaiqvvPIKp06dws3NjYsXL5KSkkJ/f7/Ae1NSUkSMpQixFOKWn5+fxP+5uroS\nFxcnFHTFdPbPru9E+/DWW2+98uGHH9LR0YFarUav12MymVCr1VRXV5OdnQ3cncgqQ7bQ0FCxkzY0\nNDA8PEx7ezszMzNcvnyZ4uJiIeS0tLQQGhrKwMAAKpWK/v5+VCoVZWVl2NjYoFarGR0dFZ2Co6Mj\nvb29Ik12dXVlzZo1jI2NcfbsWRTIrFqtJi4ujpGREVHxZWRk0NzcTGlpKW+++Sbnz59nyZIlbNq0\nierqaintMjIyuHDhAu3t7VhaWnLmzBnCw8Px8fER229ubi4ajUaQYd3d3bi6ujI1NSUeDYXfqFar\nuXjxIsuWLeObb77hiSeeoLe3l6ioKPbv3y+EY71ez5o1azCZTAJ6UQRiISEhdHV10dLSItAYg8Eg\nhjIlwkyRRRsMBqKioigtLSU5OVluYq1WKx6Ty5cv4+TkRHh4uChIZ2ZmuH37tqgW8/LyaG5u5sKF\nC4SFhUm5vXLlSl577TVJBlepVOzatYv3339fhmdVVVWsXLkSd3d30U0oKVmKD+Hq1atkZ2dja2vL\nO++8w44dO8S7sHTpUiYmJhgfHyc9PZ2QkBBsbW3Jz89ndnYWDw8PWltb6e7uFsaiu7s7t27dorKy\nEktLS1JTUxkfH5ctUl1dHeHh4cJszM3NZWZmhoiICPR6PQEBARgMBiIjI7GysmLRokWcOnUKrVbL\n7OwsJSUleHh44OHhgZWVFSdPnpQDPjs7W7QTtra2BAcHYzKZhM2hOIWVtjIpKQlPT09OnTrF5cuX\n76l9+E6sJE0mEy0tLdja2nLjxg3JcmxpacHT0xMrKyssLS1pbW2lurqarq4uXF1duXXrljACRkdH\nMZlMZGZmsnfvXgC2bdvGunXrJIpt8eLFzM3NERgYSF9fHw4ODphMJpYsWUJWVhZXr16lu7tbkOj2\n9vao1WoBmfy900yxbvf09LCwsICjoyN1dXX87Gc/w8nJiSeffJL9+/ezd+9eLCwsePXVV8nOzpaE\npoMHD2I0GgkKChK9u5JpeejQIcbGxnBwcGBwcJDR0VHq6+tpbW3FZDLJlkCr1WJmZkZKSgrt7e0S\ni+fg4MDTTz8NIJbw/Px8Hn30Udzc3IQpsWHDBiYnJ9HpdISHh5OZmUloaCh6vZ6JiQl6enrYsmUL\n5ubmVFZWMjMzg7W1Nffddx96vZ729nYWFhYEFKLkTVy/fp3PP/9chFSK0lSj0fDMM88QFBTEli1b\nMDMzY2FhgV27domGQaPRCKj3gw8+4De/+Q02NjYMDg5iZWXFn/70J+FFbtu2jeeee441a9bg6OjI\nY489JpucwMBATCaTWJF9fX1Rq9U888wzfPbZZ/T29hIWFiaJVbGxsXR1dVFWVobJZCIlJYW4uDgy\nMjKwsbEhNDSU5cuXMzc3x9DQEMuWLQMQZejBgwcFlrt161ZCQkLQ6/XMzs6ydetW2aQoLI7o6Gim\npqaYn5+nubmZpKQkli1bxpIlS4C77suqqirq6urQaDQsX76c7du3Y2lpiZ+fH9u2bRN6+PDwsAyH\ns7KySE9PJzIykmXLllFdXc3t27dZv379Pd+P34lK4d13333F3NxcKLeKEm9ubo4//OEPHD9+nCNH\njpCeno5er8dgMDAwMEBdXR1OTk5s27aN4OBgUlJS6O3tZWJigoMHD7J69WrOnTuHl5cX/f39DA0N\n4ejoyNKlSxkcHCQqKoqOjg6Zpjs7O1NRUcHCwgL33XcfarWaoaEhNm/ezLvvvou3t7e4Bnt7e8nM\nzCQnJ4eCggLm5+epq6sjNDRUkG6K8erKlSt0dXXR398vdtrOzk6mpqYk46KsrIxf//rXjI6OCvbc\nysoKT09PNmzYgIODgyjiurq6KC4upqysTIaP4+PjdHV1YWdnh6+vL+vXr+fOnTvMzs4yPz9PVFQU\nZ86cwczMDBcXFzw8PDhx4oQM6srLy/nkk0+4efMmycnJBAYGkpOTw7lz5/jZz35GYmIi4+PjeHp6\ncvHiRRwdHenv75fAFSWWPjk5WQJeurq6GBwcZHJyEpVKha2tLXZ2dvzhD3/g9OnTeHp6yuYlPDyc\njz76iNDQUFpbW9Hr9YSEhAiToqWlhezsbDw8PAgICGDz5s1SFWi1Wtzc3Dh06BABAQGsX7+ed955\nBw8PD2xtbblw4QLPPvus6DVaW1sJDg7myJEjTExMUFxczNzcnAQH+/j4SNZDd3e3KDVbWlr+wRb9\nzTff8PDDDzM1NcWSJUtoaGgQUnhFRQXm5uZs3bqVhoYGhoaGOH/+PPX19ZLJ0Nvbi42NDSEhIRgM\nBhFeKUIpRYOzcuVKyarIycnh5s2bpKSkUFhYiJOTE+Pj41RUVODm5iaVtru7OydPnqSoqIisrCz6\n+/s5evTof51KYWZmBm9vb44cOSJacXt7e5YvX87zzz/Pzp07+d3vfkdsbCzbt2/n/vvvJzw8nI6O\nDtrb2zlz5gwTExMcO3YMQKqHuro69u3bR2NjI7/73e/Q6XTY29vz9ddfy81ZW1vL4OAgd+7cwWg0\nEhcXh4uLC8ePH2dgYIA7d+7Q1dXFj3/8Y5GKenp6snnzZi5dusSePXvw9/ensbERPz8/li1bJrgx\nNzc3zp07J2Xxnj17JME4PDwcnU7H559//g/x49u3b2fHjh309fXh6OjI0aNHuXjxohCCV69ejZub\nG5cuXWJsbIx33nmHkpISTp06xaJFi2hubmZ4eJhbt25hMBhwdnaW729iYoLKykqOHz/OL37xC06f\nPs2BAwewt7fnq6++Ym5uDjMzM9zd3aU/VZyfcXFx0m7Z2NgwPz/PY489xvnz5+np6WF8fJy1a9fi\n7u7O2rVrWbNmDZs2bSIsLEzITwqmTvk59/T0SKuoCMNCQkJ4/PHHJQn60qVLTBHVLJkAACAASURB\nVE5OyobCx8eHxMRE9Ho9p0+f5vz583zyySd8/fXXrF+/XuZSERER5OTkyOqwrKyMxsZG2VK1t7cz\nNDSESqViy5YtVFVVoVKppAJ0dnbm+PHjkvqkHJzKlkMxp5WWlmJhYSGfDZPJhJ2dHe7u7ly5ckVW\nkCMjI9x///3s3btXxHoZGRl0d3dz+vRpzM3Nqaurk2o0MzOT4OBgjEYjbW1tPPnkkwwMDJCWlsam\nTZtE49HT0yOyaiXhy8LCghMnTpCeni6hQAp9+l6u78Sg0czMTHaxpaWlhIWFoVaraWhoEEuyo6Mj\nNjY2vPPOOyQkJHDt2jVSU1PJysritddeo6KighUrVtDb2ytDpNTUVPLz84mNjeXChQt4eXlx9epV\n7OzsiImJoaamhqysLA4fPkxHRwdLly7l3LlzZGVl8f777wsc5fbt2yIcgbutyqeffirIeTMzM1xd\nXYmJieHIkSNCDAoLC+PAgQM4ODig0Wjk36oE2BQWFhIRESHeB6PRKCvPoaEhamtr2b17N/39/UIa\njo+Px8/PD1tbWyYnJ3n55ZdRq9U8//zztLa2yhBVo9EwPz9PYWGhlPGenp6cO3cOS0tLoSVv27ZN\npNN1dXVUVVUJnemjjz7C39+f9957j/fff19As5OTk6SmpnLo0CGio6OxtbXlzTffxN7enk2bNjE6\nOkpfXx+5ubkEBwdTUlKCjY0NXV1dwmzo7OzkwQcfxNLSksjISAwGAwA3btzg4MGD/PSnP6Wqqkr6\n++PHj1NRUcEvfvELjh07JmEv8/PzvPbaa9jZ2REdHU1jY6PoOuLj46V9UDDnCwsL1NfXMzg4SFJS\nEh4eHvT09LB06VK0Wi1BQUGcOHFC1tXXr18XC3lbWxvd3d14enqSl5cH3NXHdHV14eLiQnBwMOnp\n6RJl6OHhIRkbitehqakJJycn6uvrpZWIjIyktrZWEPaACNWys7PR6/V0dnayatUqDh06xAsvvEB3\ndze2trYC2p2cnESv1+Pv7y/hSHl5edKW/L0H5J9d//RQUKlUfwaygX6TyaT722uuwF+BAKAV2GUy\nmfR/+38vA48B88CzJpMp55/9HZaWlvz4xz/mwoULzMzMCPAjOTmZ1NRUpqam+OMf/0hdXR2pqalc\nuXKFhIQE3N3dmZyc5Nlnn+XZZ59ly5YtADJEunbtGgsLCzQ0NKDT6QTWothp161bx6pVq3jrrbdo\nbm7myJEjzM3N8ec//xmNRoNWq6Wmpob+/n42b94sg6Senh7MzMwYGhrivvvu4+DBg9TX1zMzM8Ou\nXbsIDQ3F3d2d3/3udzzzzDNCV+rv76erq4snnnhC9ABxcXHyhinVweTkJNbW1qSnp/PVV1/h4uJC\neno69vb2tLa2kpCQwPvvv8+jjz4qegS1Wi3QktTUVN555x0eeeQROjo6SE5O5uLFizzxxBNMT0+T\nlJTE9PQ0c3NzrFmzhqKiIo4fPy6kotOnT1NaWkpSUhLLly/n9ddfZ8OGDXh4eIhBSYmhUyzpHR0d\nlJSUCMPx22+/Zd++fVhYWPDyyy/j4OBAbW0t3t7e2NraikZgdHQUS0tLuRmcnZ1JTU2lra2Nhx56\nSAC709PT0pcXFhbS2trK4OAgL774Itu2beOvf/0rfX19oqXIy8ujqalJALxBQUFSHTg5OTE5OYmV\nlRUuLi50dXWh1WoZGRmhsbFRNjsJCQl0d3fzwQcfUFJSwoYNGygvL5f4PoBFixYxPT1NXl4esbGx\nchj09/ej0Wjw8PAgNjaWI0eO8Oijj5KamkpgYCB2dnb8+c9/Fq+EkuepHGKKRfvGjRvs2bNHnvRK\nANL8/Lxg8czMzPD19WV0dJTp6Wl6enqEMfL3ae73et1LpfAJ8C7w2d+99j+AiyaT6X+qVKr/8bff\nv6RSqaKAPcBiwAe4oFKpwkwm0/+SGmlpaSmrvI0bN4rsVIkgf/3114mNjeX+++9HrVZLgtL58+dZ\nuXIlY2NjmJmZyZus5EdUVFSwfft2pqenqampYdeuXWg0Gm7evMmGDRtwdHTkm2++EcKPra2tkILL\ny8vx9PQUVuDIyIioEVUqlQwp33zzTfz8/CR/QavV0tDQwM2bN1m5ciUtLS0EBgai1Wq5evWqbAAU\nZqSVlZWkI9na2jIwMMCXX36Jra0txcXFLF26VHwSAwMD7NmzR/IT/vCHP1BVVYVOp6O7u5uQkBD2\n798vnpG+vj6Cg4NJS0uT8F0FfjIyMsLq1asxGo2o1Wp27txJcnIynZ2d/OY3v5E12datWyktLcVg\nMKDX6wkLC6O7u5vY2FjMzMxEZPXzn/+c7u5uBgYGcHBwICYmhsuXL6NWqzEYDOzcuRONRkN3dzdn\nz55l3759eHp6cuPGDSYnJ0V5GhUVhY2NDTt27KCzs5P8/HxGR0fRarV4enpy+/ZtsSI/9thjMogL\nCgri17/+NYmJiRI6s3HjRoHtrl27lsLCQknAUgjbvr6+uLu7Y25ujoeHB7W1tRJP6O3tLTdURESE\nsDuefPJJvvzyS+Aur3P37t24uLhQVlZGeHg4X375pQQSe3t7U1payujoqMxqFOVtYGAgZ8+eZfv2\n7bz66qs888wzcvP39vaiVqtZtGgRly5dwmg04uPjI0EyQ0NDGI1GaeWUz+PMzAyWlpbU1dWRmZnJ\n2bNn5d641+ufzhRMJlMeMPx/eXkr8Onffv0psO3vXj9iMplmTCZTC9AIJP+zv2NsbIy6ujpmZ2eZ\nnZ3FYDAQEhKCk5MTrq6uJCcn89RTTzE+Po6Pjw8RERGEhISQmJjI0NAQbm5uaLVaTp48KTRigO3b\nt9PT00NJSQne3t709PSg0Wh46qmnMBqNXL9+XXbayt+rbBzs7OwYHx+nurqa0dFRysvLxes/OztL\nZmYmt27dQqvVYmNjw7Vr1/j6669pa2sjLi6Ojo4OMe4oxGfFO+Hg4CDrPIXyA8i+/erVqxQXF6PR\naIiLi8Pb25vY2FhCQ0Npbm5mYmKCvr4+tFotTz/9NKOjoywsLHDr1i2ysrKoqqpi9+7djI6O0t/f\nz/vvv095ebmoDtvb29HpdNjZ2XHx4kVyc3NJT09nYGCAgYEBXnjhBdLS0sjOzubQoUNs2LCB1tZW\nhoeHGR8fJyQkhJqaGlpaWvD39xdQieJ/8PX1JSMjgyVLlmBlZYW7uztqtZpr165RU1ODjY0NVVVV\nskL7+9I2NDSUqqoqCgoKBIybk5PDmjVrUKvVODk5ER0dzYoVK7Czs8PPz48dO3YI4fnmzZuEh4dL\nTqhyTUxMyCo1OjoaS0tLampqsLKyYm5uDnNzczo7OykoKCAyMhKNRiPJ20pm5czMDM7OzvT09Mh7\nlpiYyKlTp2hoaMDMzIzc3Fzi4uKIjo5m8eLFnDp1iry8PLRarcTHKT6M9PR0EhISZB1dUFBAY2Mj\nAJ2dnQK3Udafyvucl5eHubm5hMUUFxfT2trKyMgIxcXFVFdXY29vT05OjkCATp069c9uQ7n+s4NG\nT5PJpBi0ewHPv/1aA/y9yLrzb6/9L6+5uTmSk5PZunUrdXV1HDp0iKCgIHp7e6mpqcHb21vCLT7+\n+GM++eQTDh48yKlTp8jNzaWzs5N//dd/xdXVlRMnToippLOzE1dXV1JTU9FqtYyPj3P06FFyc3MJ\nDQ3lscce46GHHpKoupSUFLKzs3nooYeoqKjA1dWVBx98kIWFBWprawXcGhoaisFgwM3NjcLCQnx8\nfFi5cqUAL27dusX4+DiHDx/m2WefpaysjCtXrnD8+HHKy8vp6ekRzLqjo6OYdm7fvk1iYiIvv/wy\nDz74IJ2dnYyNjfHVV18JNm18fJwbN27Q0NDAO++8Q0FBgeQm2NvbSx/6+eefU1xcTF9fHyEhIfzy\nl79kfn4erVaLj4+P2KvhbklaV1fHt99+S35+Pp2dnYSHhxMUFMSZM2f40Y9+hKOjI3v27JFyVzm4\nlUNw0aJF8gH97W9/y+HDh4mIiCAlJQWTycTU1BReXl4sLCywYcMGVCoVLS0tQjyKiYkB7rZ8KSkp\n1NfX8/7772MwGLC2tpY17MjICKtWrWJycpItW7Zw/vx5mpqaSElJQavVsnv3bgwGA8HBwZibm0u0\n2pkzZ0hNTaW9vZ3+/n7q6+vZuHEjn3zyCWVlZahUKjo6OggPD+fnP/+5tC7r168nLS0NPz8/8VZY\nW1vLik9hYoaGhmJpacmOHTvQaDRkZWXR19dHRkYGUVFReHl58cknn3D8+HF27drFD37wA/HoDA4O\nStiQsp5et24der2eQ4cOodPp+PLLL3nppZc4ceIE8fHxjI+P09raSmpqqqzL5+fn0el0rFu3jn/5\nl39hdHSU8PBwcnNz5ed7L9f/4+2D6a5O+n9bpqxSqf5FpVKVqlSq0rm5OcrKyggMDMTT0xM/Pz/K\nysqwsrJCrVazfv16wsLCuP/++1lYWGDRokUcPHgQZ2dnPv/8c376059KDNvTTz8tJhhPT0+io6PR\n6XRcvXqVyclJoqKiKCwspKenh5iYGIKCgoiIiMDV1VWsuQUFBZw8eRK9Xs/AwADe3t7Ex8ej0+kA\nhJazsLCATqejuroaPz8/Yf2fO3cOgGXLlgl+u6uri/n5eebm5vD39xeAzGeffSapQB4eHgwMDNDc\n3CxTZQUzr5TvISEhBAcH4+rqyvPPP8+ePXtEw6FSqejq6pKBXnZ2Ng888AB37tzhRz/6EZcuXaK6\nupr6+nr27dtHd3c3PT09wvnbunUr9vb2nD9/ntnZWerq6vDy8iI0NJSRkREqKiq4desWxcXFTE5O\nMjU1hb+/Pz09PQQHBxMQEEB1dTWLFi1CpVJx48YNbty4QU1NDW1tbZibm8uqLSkpCTs7OywsLAgI\nCJAWysfHh/r6egIDA5mdnSUgIIDAwECqqqqora0VZL6S9RgTE4O7uztZWVmkpaXh5eUlsYObN28W\n/cP4+LjkfSqbISXkp6amRgjXeXl5FBUVkZOTg5WVFdevX6eyspKoqCgpzS0tLeU9s7S0xNzcnJKS\nEgG9mpmZ8dVXX8nXK6tXOzs7NBoN9vb2mJubS8pXQEAA27Ztw9XVVYJglbSvubk5amtrefzxx7Gw\nsJB1vLu7Oxs3bqStrU2QAUq2x9DQELdv32Z4eJgbN24QFBQkG7l7uf6z24c+lUrlbTKZelQqlTfQ\n/7fXuwC/v/s637+99n+7TCbTh8CHAJGRkSalIpifnyc8PJyZmRny8vJkF97S0oK7u7uEyypos8DA\nQE6dOiVl6Mcff8zDDz/MRx99JGEh2dnZsmdfsWIFk5OTdHd3c/nyZYlOV4jGTz/9tNiER0ZG6Ojo\nELWi8kEASEhIEPy4Ih4BiI6OpqKigm3btlFaWkprayu+vr4YDAa2b98uO2R/f3+uXbtGXFwcDQ0N\nAGJzvnPnjmgPbGxsMBqNJCQkSKr22bNniY+P58MPPyQ1NZULFy5QU1PDsmXLcHBwEJTbhx9+yMaN\nG7G0tOSDDz4gMzMTtVpNd3c3eXl5QgZWYsdcXV1ZuXIlV65cobOzk/n5eTZs2CD0aUD6U3t7e6an\np6mvr2fVqlWCWs/KysLW1pbR0VFKS0uZmZlh6dKlODk5ib19aGgIMzMzLC0tsbW1JTY2VjZG0dHR\ntLW1cefOHbnZFIk7QHl5uXAN+/v7ZdukzJMcHBwoLCwU8KlC9o6KipKvjYmJEXr3li1bOHv2LDEx\nMeTm5gqsxNLSkvr6etLS0tDpdHJIKwDcM2fOAHcPsZmZGRwdHTl58iRr1qyhrq4Oa2trmpub+Y//\n+A+WLVtGVFQUwcHBYs+OioqSJHUHBweZJSjfr6enJ87OzvT19WFubs7hw4fFfWtlZcXk5CRjY2NY\nWVlhY2PDt99+y+7du2lvb5fwYQ8PD7KysqitrSU3N/eeb+7/7KHwDfAI8D//9t+Tf/f6YZVK9Xvu\nDhpDgZJ7+QMV+63yRBkaGpIStaqqijNnztDa2sojjzzCY489RlRUFA4ODly4cAGj0cjQ0BBjY2NY\nW1vLzlitVgsyzcrKioGBAd544w1xP6akpFBaWsrx48fx9vaWXIHExET6+/upq6vj8uXLzMzMiJMQ\n7mLeHBwc8PX1lbRkNzc3fHx8cHNzY25uTnp1xdPx+9//nps3b/L2229jYWHB7373Ox588EFu3Lgh\nVByFieDq6irzg48//hiVSsXo6Cjz8/OMjY3JCnHNmjXY29vLEyokJIRFixaJE8/Ly4tjx46h0Wi4\n7777CA0NlQn9xYsXWbFihXgOlBtcQZNbWFiQk5PD/v37mZ2dJTAwkGPHjmFlZYVOpyMiIoKLFy9y\n//33o1KpOH36NE1NTfz6178mPz+f2tpaeVJGR0dTUlKChYUFy5cv58iRIwwNDaHT6Xj44YcpLy8X\nqlVVVRVzc3Ps3LmTixcvYmNjw4ULFySkNS8vjwMHDlBeXs7u3bvFc6G4Adva2tDr9QwPD7NkyRL6\n++8+r/Lz89m1axeenp50dHRQUFBAW1sbnZ2d+Pn5SWBrREQEu3btEq+GTqdjZmZGMP5KeK5Cy1Kp\nVFRXV+Pv74+lpSWDg4P4+Pig0+lIT0/nk08+4dNPP+XMmTO0t7fj5eXFtm3bxCszNTXFI488Qn9/\nP97e3nLzmkwmurq66OnpwdPTk/r6evn5zczMiH9GoUMnJCTw8ccfs27dOtra2piYmCAxMZHq6moS\nEhIEjHsv1z9tH1Qq1V+AIiBcpVJ1qlSqx7h7GGSoVKo7wLq//R6TyVQNHAVqgHPA0/9s8wB39/5V\nVVWUlJQwPz9PUVERAwMDhIaGsmPHDj766CPu3LmDl5cX165dw97enhdffJGLFy8SExNDZmYmfX19\nEoKiBJE2NDSQkZFBbW2tpAEFBgYSGRlJUFAQs7OzLF68mJGREZqamti5cyeXLl1ieHiYtWvX0tHR\nQWhoKE8++aQw/QEJPhkeHpb+trq6WvBXCm/v0UcfJTAwkJs3b4qIp7W1lSNHjnD+/Hm++OILAFlD\npaWlMT4+zvj4OIODg2IG8vDw4JVXXuHcuXMCNQkODsbLy4u8vDx+//vfCx9hfHyc/Px82tvbycnJ\nYX5+nurqajw9PTl//ry0LQoENyoqSqC5cHfFFhkZiYeHB8uXL8fKyoqOjg6KiorIz89n5cqVAqyJ\njo7GzMyMI0eOcPXqVTZs2EB/fz8NDQ28/fbbuLi4SKqR8vMxGAysWLGClJQUgoKCxOatHLg+Pj6o\nVCpKS0ulOnFzc6O2tpahoSEaGhrQarXs2rULKysrWlpa+OyzzzA3N6elpYXLly/T19cnrAYl0Xvb\ntm18/fXXktWhQHVNJhMVFRVianN0dOTSpUsitCopKeH69esClVFER0o5HhUVxc6dO9m8eTMxMTFC\nv1JaqJiYGNatW0dcXBxBQUFotVoxxymBLxYWFrS1tUn4LNwdvisbhqmpKbZs2cLixYuxt7cnODiY\nhIQE0tLSWLZsGX19fQLAuXjxInZ2dgQHB6PVagkLC6O6ulogNvdy3cv24QGTyeRtMpksTSaTr8lk\nOmgymYZMJtNak8kUajKZ1plMpuG/+/rXTCZTsMlkCjeZTGfv5ZswGAw0Nzfj5eVFTEwMX3311f/B\n3nuGV1Xn69+fnez0ZKfs7B3Se0jvIQmB0KtBEESxi2VG8XgcdcY2zhydccpRjzr2rqA4IkFAukiA\nkEBCeiG9kN6zd9re6fm/4Frf58yb5+91Pc8Lz3Wd9cYLX2hYWeu3fr/7e9+fm9WrV8u4aPPmzZw7\nd46BgQHUajWRkZHY29uTnp7On//8Z86ePUtRURFNTU20traK/115gTIzM5mfn2dmZkaSf8pXvbKy\nEjs7O3HNzczMsG7dOtzd3aVrMSgoiObmZjkiaLVa/vCHP+Dm5kZxcTFLly5Fr9djMBjEVKPM+JWx\n0dTUFBs3biQoKIioqCi8vb2lo1EBxShVZc7OztTW1uLk5MT27dupra3Fz8+P0NBQsrOzqaysJCYm\nBoPBgNFo5JNPPiE1NVVGVra2tkRGRpKamkpcXBwuLi5ERkZiZ2cnan1aWhpvvfUWFRUVLFmyRM7G\narWae+65hzvvvJObb75ZuIaurq4sLCzQ09NDZ2cnP/zwAyUlJRK6UvwOys7jv/7rvxgdHWXnzp2k\npKRQUFBAQUEBP/74I4WFhSIgTk9PY2trK25UZaKRkZHBokWLCAsLk7DPCy+8gNlsJjU1VYjOCtD0\n5MmTUjn35JNPSp9DRkYGgPRwvPDCC8LmSEhI4OzZs8I2VKrgvLy8hMykpCP37t3L4sWLMZvNHDp0\nSHYgbW1tzM/P89lnn0mOIScnhwMHDlBWViYCq0ajYdu2baSnp8vRQiExnT9/HgcHB7RaLTt27ABu\niOSWlpbEx8ej1+upqamhsbFRavcUBoQCKJqensbZ2Vmq46ytrdFoNExMTJCZmSkdlT/n+kVkH958\n882XHnnkETlrr169mujoaBYWFqTQYtWqVWzatIno6GiWL18upSpK5+O2bdvkKxcVFcX777/Pxo0b\nKSwsxMPDQwpTla8sQFlZGY2NjdI9efPNNxMeHo61tTVzc3N8/vnnqFQqGhoaWLlypQhRcXFxUhzz\n008/odPp2LZtG52dnbi7u5OQkMDMzAzl5eUsLCzg7e3N8ePHaWtr4+DBg3L0UEZMlZWVVFRUEBUV\nJTXjo6Oj1NXVSXLPx8eHzMxMqSFLSkriyJEjhISEsHXrVvr7+6moqODTTz/Fx8eHmpoaUlJSyM3N\nxcPDg59++gmVSkV6ejouLi7ExcXx5JNPSvT7+vXrWFlZERwczMGDBzl48CBvv/02Pj4+pKWlMT09\nLY3XiuiamZnJ2NgYFy9exNbWlj179mA2m6VTQ6vVCslZCbH5+PiwePFiAgICmJiYwGQyMTQ0RHx8\nPPv27RMu5ODgoHgzJicnmZycZHR0lNWrV1NcXIyHhwdpaWmsWrWK9PR0JiYmWLZsmYSaGhsb0Wg0\nNDQ0cO7cOV577TWmp6fx8/Pj1KlTODs7yzPm5uZGdHQ0eXl5fPvtt7i7u1NUVERYWBg7duygpKSE\nO+64g9LSUnHI7t27l66uLiIjI5mZmWF+fh4nJyfpbVi/fj2xsbHU1tbyxRdf0N/fL81ekZGRJCUl\nMTg4KMDWwcFBCbQptYmLFi2ira1NcPIGgwE3NzeSkpKYmZmhrKyMoaEhYmJiGBwcpLe3F19fXxIT\nE6XRe25ujqamJtatW8enn376Pyf7oNVqJQQyODhIQkICQUFBjIyMcOrUKY4ePSodDj09PXz11VeU\nlZXh5eUlaq6iJCtcPLixim/dulVUX8ULvnz5cpqbm7l69SpWVlZ0d3fj4uJCe3s7lpaWhIeH09HR\ngb29PZaWlgQEBDA7O8u2bTfsGFlZWaxcuVJarRQOXmRkJDqdjqqqKvr7+3nuuec4c+YMTz/9NAcO\nHKCqqoqCggLUajVXr16lqqqKjRs3SufBzp07aW5uxtXVlb6+PiwsLHj77bclh5Gbm8vp06cFCuvj\n4yNots7OTqH6XrlyBRsbGy5cuCDI85SUFBYWFqiurqa+vp6+vj5aW1t5+OGHycnJYXBwkGvXrtHf\n349Go8Hd3V12ChcvXmRoaIgzZ26YU11cXKT/MSUlhbGxMRISEigoKKCzs1NciiaTiYCAAJydnUlP\nT+eee+4hNTWViIgIGhsbqamp4aeffsLLy0u2zSUlJWLZVf792NgYW7ZsYceOHZLwrKio4NKlS0RE\nRAjpOy8vj+LiYuzs7Fi7di2tra1MTEwANzo17OzsWFhYwN7eHpVKRVVVFUlJSeh0OgYHBxkcHESt\nVqNWqykuLsbX11cmOQ0NDbJr6uzslC96XFwcrq6u0nXh4uKCp6cnDQ0NTE5O4uvry44dOwQu3NjY\nSEtLiyD0KysrWbt2LZ6enhw4cEAyFXNzc4yMjNDQ0ICtrS1+fn5s2LCBxsZGfH19cXJyEsF9cHCQ\nyclJXF1d8fPzE8rT4OCgcEMVG/nPuX4RiwLcQHsHBgbS09PDJ598Iq1JL774IhUVFXzwwQccO3aM\nvXv3EhoaSmhoKB999BGpqamC67a0tKS0tPRfzDDnzp3j7Nmz3HHHHZSXl/PII49gbW1NUVER7u7u\nxMTEyNccbmzhe3t7CQkJYf369UxOTpKenk5TU5MUxFZXV3PhwgUOHjxIb2+veO0V3uPFixeFiXDh\nwgV0Oh1JSUn09PRwzz33yKz997//PWVlZaSlpQGIIt7Q0ICPj4+M5hRhU61W88QTT3DlyhWBl46M\njAgE5LbbbiMuLo6srCwOHTqEtbU11dXVWFpaUl9fzy233EJFRQVffPEF3377LRUVFfz973/H0tJS\nFg8lqZeamkpUVBSzs7Pk5eUxNTWFnZ0d4eHhFBcXU15ezvLly3n//fdJTU2VJKBiCmtra6O3t5f5\n+Xnm5uaIi4sTMba/vx+j0Si4sZmZGQkCPfvss7i5uVFaWsrSpUvFuHTkyBGqqqooLS2luLhYchZO\nTk4y2lR2IIqQ+sc//lFE3IGBAYHmuri44OTkxNzcHKdPn6a2tpaPP/4Ya2trIiMj0Wg03HrrrRw4\ncIDOzk6pFVBctcqRVPl55+fnKSkpYWxsjNLSUn744QeGh4c5fPgwOp2OuLg44uPjRbsxmUzAjQLk\n8+fPs2/fPoaGhnB3d5epRnh4OAsLC/zud79j6dKlUjmnxN07Ojowm81MTEwwODgo96ClpYWhoSEp\nmmltbWVsbIy2traf/S7+Io4PX3zxxUtTU1NERUVhaWmJj48PY2NjhIWFkZmZKYq6sv188MEHRej6\n/vvv2b59u4zrNmzYIGJeamoqExMTuLq6Mjo6SlhYmFSOb9y4kR07dpCUlMTXX3+Nh4cH165dw9vb\nG5VKRUJCAiUlJbi6utLQ0MDIyAhXr16lsrKSLVu2APDll19yzz33CCRUkGU2dAAAIABJREFUOX/2\n9vZibW0tBhel62H79u1cvHiRdevWkZSUxE8//SQwmaNHj+Ll5UViYqJMUDIzM4WePDc3J6PSrKws\njhw5goeHhzj5qqurycrKYnR0VLwQly9f5sknn+TgwYNYWlqSn5+PtbU1ERERaDQaTp06xfDwMGaz\nmaGhIVxcXAgLC8NoNNLS0iIPlKKaKxFlrVbLAw88QHZ2Ng4ODqxZs4a8vDwMBoP0Mig7tJKSEvR6\nPYODg8zNzVFbW4tWq5Vi2aCgICwtLent7eXQoUPExsYyNTWFWq3GbDaTlZXFokWL5PinjDurq6tx\nc3OjtraW0tJSjEYj+fn5+Pv7YzabOXDgAKOjoxJaW758Oba2tiQmJsrIMiAggIceekhi7mNjY8J+\nOHz4MIsXLyYhIYGJiQmxvJ8/f56VK1diNBr59ttv2bRpkywyYWFh4sHQarW0tbVx+vRp7O3tJcYd\nHx/P7bffzuDgIO3t7dx+++0sLCxgMBhoaGgQ6GpERATOzs6MjIwILayyspLnnnuOwcFBnJ2dCQ4O\nFoG5oKCA9PR04uLiBCrk6urK/Py8FOr+jzo+jI+Pi8J/7NgxNmzYQGJiIq+99ho2NjYyEVDw3AUF\nBZw8eZLZ2Vnuu+8+EZlyc3MlIgxgbW1NbGwsgYGBDA4O0tbWhlqtZu/eveTm5qLX6ykuLhaDT0BA\nABkZGQwMDNDf309YWBijo6P09/eL4qv8vCUlJaxdu5bq6mrOnDkj/ZEjIyMyJpueniYtLY3JyUl2\n7drF8PAwkZGRlJaW0t7eLjkOZTbd3NyM0WikoKCAiYkJIiIi+OCDD5iamiImJobExERWrFjBhx9+\niNlsxsrKioWFBfr6+khLS+OWW26hqqpKnI233HIL9fX1JCUl4evrK8cgo9FIUFAQKSkpxMXFkZmZ\nSXBwsNxjBUuvaA8ajUZIRr29vXR3d1NaWspDDz1EUFAQTU1NpKeno9frcXBwYN26dRQUFBAQEEBU\nVBTV1dX4+vrKeBIgISEBd3d3oTopO7W5uTn5qtbV1dHU1ERjYyNLlixBp9ORnJyMnZ0dWVlZ9PX1\nCUugra0NvV6Pq6srS5cu5aGHHmLVqlViLY+Pj6erq4u6ujpiY2OJiYkRQfe7777j1Vdf5dKlS3R1\nddHQ0EB8fDwajUai3qWlpQQHB7Nr1y6OHz8uHZ4lJSW4uLjQ0tLCwsICvb291NfXMz09zeXLl3F1\ndSUhIYH5+XmuXbtGYWGhtJ6Hh4fLMcHGxkaCb3Aj9q8AgK2srGhububZZ5/F29tbOBL5+fl0d3fT\n19dHVlYWRqORAwcOEBERQWVlJTY2NtTX1zM0NERhYeHPfh9/ETuFN9544yWdToeLiwvT09MMDw8L\nq9DPzw+DwYCPjw8pKSnY2dnR0dHBwsICAQEBnDx5kvn5efbu3cuf/vQnKisrOXjwoNS5JyQkcPz4\ncTw9PdHr9RKSio6O5sqVK5w/f55Lly5RVlZGQkKC0Jb3798vbb11dXWCaD9x4gQvvPCCtEMp2Qx/\nf398fHxEKFOckyaTSZqFg4KCWFhYYG5uTlJ6dnZ2xMTEsH//frZs2cLQ0BDh4eHCQrS1tWV6epqh\noSFJ3in9A8oWuLOzExcXF3Q6HfHx8Rw6dAhLS0uWLVvG/v37GR8fJzw8nJMnT7JmzRr54iqeiszM\nTLy8vFi3bh1tbW1cu3YNPz8/ZmZmuHjxItu2bcPZ2ZmpqSlJKmo0Gj7//HM0Gg0qlYqenh4Rz0wm\nEzU1NURHRwtyvrCwED8/P6qqqtBoNJw+fRpvb28B6pSVlXH27FnWrl3L1atXWb16tYBYlC7FtrY2\n3NzcmJiYYPv27ULD9vDwEMPb4OAgly9flhLf6elpzp49y6JFiwR7b2FhQXNzM7GxsdId+vTTT7No\n0SISExMFBrN48WLs7e3Jy8sjLCyMkZERCgsLueWWWzCZTJw4cYK1a9fi4+MjZTrd3d04OTlx8OBB\n1q1bh06nkwYyW1tb2traGBgYoKGhgcLCQoKDgzl37pwIyj09PXLMVeoB29vbWb16tehA3t7eXL58\nmVWrVlFeXk5ISIhkG9zc3BgYGBDj06JFi3BycqKxsZELFy78L835f6//vf73+pfrfw7N2d/fn7//\n/e+iFqekpNDV1SXVXwqYdGhoiL6+PlasWIGPjw8vvvgia9asITExUXiBfX19LF68mI0bN/LBBx8I\n4OLixYt4eXlhNptxd3fHysqKrq4u1qxZQ35+Pra2tszMzODg4MDk5CR9fX24u7sTHBxMfX09tbW1\nwhgsKyujp6eH06dPs3r1aqysrKQlW5lJa7VaCgoKaG5uZnx8nDvvvJPe3l7Ky8uZnp6WcdEPP/zA\nu+++y5o1a3j00Ufx8vJiy5YtfPHFF6SlpeHv789nn33GvffeS319vVhwlTGVl5cXFRUV0o9gZWXF\n/Py8cAUV+3h1dbV46TUaDR4eHlhaWlJbW8vatWv57LPPsLW1ZWRkRHZsPj4+aDQazGYz+fn5REVF\nodfrSUtLY2BgQGzMiYmJHD16lMHBQW6++Waio6M5efIkrq6uEtMeGhoiKSmJiooKDh8+TEZGBgsL\nC0xMTBAeHo7RaORPf/oTzzzzDA4ODri5uXHo0CEyMzOluq67u5vKyko8PDyws7MjMDCQa9euERUV\nxdTUFCUlJWzatImDBw+KXVqlUvHaa6/x+uuvs3v3bo4dO8bhw4dJTk4mLi6O69ev4+3tLb0dZWVl\nTE1NoVKpCAgIoLCwkCVLljA9PU1kZCRnz55Fp9Nhb2/Ptm3buP/++9m+fTvh4eGSV2hoaKCxsZH7\n7rtPCo37+voYHh6WiU1CQgI5OTls376dyspK0XmuXLnCE088QU5ODhqNhvfeew8vLy8ByOr1euns\n8Pb2Zn5+nlOnTrF48WKSkpL4y1/+wn/8x3/Q2NgorkillHbr1q0/6338RWgK09PTEgd1dHQkJycH\nrVbLtm3b8PT0FOVdrVazZs0atFotTk5OvPLKK+IUvHTpkrRHvf/++wCkpKSIi06xBoeFhaHX6+Uc\n+9133xEZGSmcvKGhITo7O2lrayM6Oprs7GzCw8PJyMggOfn/WWT9/PxYWFjAaDRiMpnE1mswGLh6\n9SqffPIJlpaWuLm5sWHDBoaHh1lYWODy5cuo1Wr6+/uZm5vjjjvu4OrVG07wqakp4RAoVOFz586J\n+LiwsIC7uztZWVm4ubkREhLCsmXLCA8PJyAggOTkZDo7O0lLSyMnJ4ehoSE5g4aHhwsZyNnZmcrK\nSpqbmxkdHWVkZIQ1a9ZgbW3NypUr8fPzY+nSpbi7uzM7O0tqaire3t6sXLmS+fl5DAYDH3/8seQI\nTCYTv/rVr6iurqayspLs7Gy6u7spKCjgxRdfRKPRsHbtWsGjBwcHS/7Az8+PK1euyJlX0UoUHoJW\nq8Xd3V1IRRYWFlRXVzM4OEhZWRkNDQ3yM3l4eDAyMsKGDRvEA6AYw9avX09RURGdnZ3ExsaSn58v\nC2VzczPR0dHyz5CQEM6dOwfcsBtHR0fT1tZGT0+PiN5KFiM1NRWNRiMlscqEKi0tjcrKSlkcFWTd\n8uXLmZ6elimIkpPw8PAgPz9fOh97e3vJycnhkUce4b777hMi+IoVK3B1dcVgMFBVVUVNTQ06nU7I\n1evXrycvL4+WlhYuXbpEbGwsfn5+7Nv333Eo/+/XL2JRgBvdD8PDw+j1emJjY4mPj2doaEg6CJ54\n4gkWFhZQqVTSOmxpaYm9vT29vb1STd7Q0CAz5Pn5eezt7YXz5+npKdbS6elpXF1dmZiYYG5ujunp\naRITE8VBlpWVhdlsJjMzk5mZGQG5wA0RaHx8nO3bt9PY2EhCQgIbN24kMTGRzMxMdDod9957r7Rk\nR0ZGykTE3d2dzs5O0tPTWbRoEcnJyRIGUlR3QM7qipL95ZdfcubMGbq6ujh06BDOzs54e3vT3Nws\nXRUKvEMhOHl5eXHlyhX54l28eJHvvvuOxYsXs2jRInbt2sXmzZsZHR3F09OToaEhvvrqKywsLGS0\n6+npyaFDh3BxcaGqqkpcjevXr5d+g3379pGdnc1LL71ESEgIJpNJFuuwsDBaWlqkKNjZ2Rk/Pz/O\nnTvH1NQUixYtQqfT8dxzzwGwefNmKfXdsmULVVVV1NXV4eDgQFVVFYODg9x1113Ex8eTlpbG448/\nTmxsLCEhIcTExNDX18ehQ4cwGo14eHiwaNEiAP7yl78wOTnJyMgIu3fv5pVXXsHOzo6Kigr6+/sl\n+VlbW0txcTGrVq3ivffeE6F06dKlWFhY4O3tjclkIi4uTp7bvLw8uru75bw/MDCAra0tZ8+eJSIi\ngvLycuLj44mLixOC97Vr15icnJTMxfXr18VwBTfCcQp0d+/evbIznZiYoL6+XpD/Op1OFoihoSFu\nu+02RkdHqaqqwt3dXRafhx9++Ge/i7+IRWFhYUF6GJWvgsFgwMbGho6ODsrKyiQL7+rqKmGjr7/+\nmu+//57vvvtOGqZnZ2flqzM7O4unpyeJiYkYDAaSkpLIyMjAy8sLW1tbvL292b17NwsLC/zwww/8\n6le/oqysjODgYJydnenq6kKv11NSUiKsQLhhnz579qxUl1tYWHD06FEWFhYYGRkhPj6e3Nxcamtr\nmZ6e5sqVKzg5OREXF4e7u7uIZTU1NQLVgBsvxNmzZ3FwcKCjo4Pk5GTs7e2prKwUetJ//ud/0tzc\nTG5uLiUlJbz77rsUFhYSFhaGt7e3IOQfe+wxsrOz2blzJw0NDbi5uUnZq8FgYGZmhh07djAxMUFJ\nSQnHjx9nYmKCJUuWoNfrueeee1CpVOTm5rJu3TpZvMxmM+Hh4fz444+kpaXx/fff4+3tzaVLlyTZ\nqFarKSkpAZB7PTMzw5EjR6ivr5eH9vTp0zg6OtLV1UVLSwtwwwMyOjpKTU0Nr776KomJiWzcuJGK\nigoGBwcxm828+uqr3HHHHRw5coT29na+++47GQMfOnSI+vp6TCYT7e3t0v9566230tTURFBQEDk5\nOfT39/P3v/9ddmnT09MC1/H19SUiIgIfHx9CQkLQaDTk5eXx5JNP8vrrr3Py5EkULa6goICgoCCq\nq6uxsrIiNzcXCwsL8bqsW7eOe++9l/DwcHp7exkZGZGod1xcnIwMKyoquPPOO8WW3dLSQnBwMPPz\n83R3d3PlyhXuuusuUlNThaa1bds2mpubWbFiBRs3buTcuXMMDw/z8MMPEx4eTnt7OxUVFeTk5PzL\nc/Z/u34RmoKFhQW33HKLJNgUIOe5c+eYnZ2VaO6qVav44YcfsLCwEGilQvTx8fFBrVYTERGBVqvl\n/fffl69FdHS0kHHKy8vFLKOcAZuammhoaKCiooILFy5w6dIltFotS5YsobW1lfj4eMxmM56envIz\n+/n5UVxcjMlk4rvvviMoKIiamhpCQ0OpqKiQspjW1lZGR0cxGo1ERkbi6upKUlISra2tuLu7k5mZ\nSWNjIwCenp48/vjjojIr83UFdhocHExcXBwjIyMkJibS2dnJfffdh8FgkLHgmjVrOHbsGGq1mj17\n9vDuu+/i5ubG0qVL+frrr7GxseG1115Do9FgaWlJRUUFY2Nj3H///bS0tFBaWoper2d0dJSysjKs\nrKywtbXlyJEjbNmyBZVKxalTp6QAZceOHUxNTeHq6orJZGJgYEASfQqNWOloVJT+vr4+Nm/ezGuv\nvUZXVxepqalUV1cDNww9SUlJsnN77bXXiIuLw8PDQ1qcqqqqyMrKIjo6Gh8fHwHGKO3ZxcXF3Hff\nffzwww8SiDp69Cienp44Ojri6elJYWEhu3btYmBggJiYGIqLiwUBGB8fz08//URvby+1tbXExsZi\nYWEh9fMBAQG4urrKs9DU1CSuQ6VI12w28+ijjzIwMEBpaan4DwCMRiPJycmYzWbKysrw9PRk3bp1\neHl5yU5BgbiqVCqGhobYunUr7e3tXLhwAV9fX2pra8nIyMDS0lIWLLPZTFNTkyD2lVi2Au/52e/j\n/4d3+f+3S+EUKiwFBaI6NjaGlZWVrIgtLS0S8/3vXw6tVou1tTVXrlwhNjaWgYEBAImdzszM4Ojo\nSGVlJSaTiZ6eHvR6PYsXLyYwMBALCwvCw8PZt28fq1atYtWqVUK+URYCNzc3vLy8gBtflWvXrhEa\nGoqTkxN2dnbcdNNNeHt74+rqikqlQqvVEh0dLccixTwVGRlJZGQkYWFhRERE8Nlnn8mDa2lpycGD\nB2Vl9/f355lnnsHX15empiYKCwtxdnbG1tYWGxsbdu/eja+vr4iNzc3N/PWvf8Xa2prS0lKBhSo8\nAMW8ZWtryz//+U+WLFlCSUkJW7ZskUbsmZkZvvnmG3Jzc4UG3NnZyW233cbZs2fp7+/HwsKCrq4u\nysrKKCoqkuZo5eF98cUXSUpKYmJiguPHj+Pi4oKlpaWkI1NTU5mcnGTnzp0UFRWhUqkICQkBEA/H\n7Owsn332Gf7+/hQWFnLnnXcKEn737t3ccccdAmgdHh7Gy8tL0OyrV6/m8uXLpKenSzrwiSeeYNWq\nVXKcGhkZITw8nIiICPr7+5mZmaG/vx+1Wk1jYyOxsbFMTEyQlpYmDAm1Wo2Liwt6vZ5vvvkGuHFE\nVbbtNTU1UtLi5uaGyWTi+vXrLF68WLB0er1e3J+nT58WbJtGo6G8vFyCYTt37mTJkiWiIV25coWu\nri7Gx8fp7u7mgQceID4+nmXLlrFixQrq6+v58MMPOX78OB9++KFkgs6fPy85m597/SJGksHBwQvp\n6ens2rVL+ga6u7tpb2/Hw8ODq1ev0tHRgbW1NSMjIxIKuXTpEqOjo3R0dKDX61mxYgVpaWnU19ez\na9cusQMrgNTe3l60Wq00A0VGRmJpaUlcXJy0KtfU1DAyMsLw8DBRUVEsLCwwNjZGREQE2dnZPPHE\nE+zYsUNm8ErIxsvLCz8/P0wmE8HBwTg6OrKwsIBarcbS0pLs7Gymp6fFunvTTTf9iyayZcsW6uvr\nxfZ84MABUlNTUalUqFQq6urqhBa0sLDA+fPnmZycJDIyUirmampqBFUeGhrK4OCgQEfuuOMOLl26\nRENDA2FhYQwMDMjDe/ToUUJDQzl16pT0DZw5c4bIyEhcXFwoLi7mwQcfxGg0kp2djb29Pc7OzszN\nzfHFF1+IeSo9PZ2hoSGsra0ldObt7Y3RaKSxsZGBgQHWrVsnmH1ljr5161by8vJ49NFHeeWVVxgY\nGCAqKoqamhohCTk5OeHl5YVOp2NmZobOzk4CAwMpKSmRpqmtW7fi7+9PV1eX+Fc8PDzYs2eP9Fwq\nCPedO3dib2+PwWCgv79f+Jx/+ctfSEtLY/ny5czOzhIQEEBraytHjhzh+eefx8rKSoJJUVFRvPfe\neyQlJVFcXCwQ1bvvvpvPP/+ciIgIrKysKC0tpauri/LyclxdXQkKCsLHxweTycTq1atpa2uTSj6j\n0ciTTz4pR9aioiLOnTvHli1bKCoqwtramm3btsmR1Nvbm5KSEqqrqyW6rWhsSpbDwcEBBwcHtm7d\n+rNGkr+InYLi/HN3d8fa2lrOl/7+/hw6dIixsTG0Wi0rVqzAy8uLkZEREZ2cnZ0JDAwUXFVeXh56\nvR5AQK8+Pj40NDSQn5+PjY0NdXV1Et+1tbVFo9EwOzsrRRoAwcHBEm5REmgKuHXt2rXY2NhgMpko\nKCiQX7Rer6e7u5vx8XGqqqowmUxMTEwwOjpKT08P8fHxLFmyhLi4OGEGzMzMCMlYqRV//fXXaW5u\nRqVSMTc3J6JqVVUVBoOBlpYWpqenRUycnZ1lw4YN3HrrrfT09EhWf2RkRF5EpdnI3d1d6t9yc3Nl\ntKXX69HpdERERFBVVYVKpRL+o5ubGz09PVRWVhIQEICTkxPNzc2YTCYyMjIkym40GtmwYQP29vZc\nu3aNgwcP8uWXX/LGG2+wZs0azGYzr7zyCu3t7RgMBjo6OoiLi8NgMFBTUwPcgNq6uroyPDws4F7l\n3vr7+8vY1GQycfHiRfn7ODg44OTkRGdnp9S36XQ6qXd77733hOSUkZHBN998Q1tbG3l5eTQ2NuLm\n5oafnx8vvfQS0dHRwkz85ptvMBgMxMfHs3fvXlpaWujq6mL//v0AEp7z9vYmLCwMtVrNxMQEAwMD\nnDhxAh8fH6GErVy5ksHBQSorK6mrq6OmpkZayU6fPi3uV4Da2lqhLilOUQcHB26++WZmZ2dlMfPw\n8BBsn4+Pj7hIz5w5Q2JiImazWeoHf+71i9AUbGxsGBgYoLq6msnJSdrb21m/fj1NTU3Y2NiQlJQk\nhBwFZGI0Glm9erWEkxITE+Wcf/ToDRCU8nIYDAbS0tLQ6XRoNBqCgoLYsGGD5NIVR11OTg4pKSk4\nOztjYWEhkwkbGxvs7e1l3OTk5ERMTAxarRYfHx9iYmLo7OxEq9WSlZXFiRMnRHk3m80MDw8THR2N\nq6srVlZWrFixQraT/72bwN/fn/z8fG6//XZpJcrOziY9PZ2WlhbJgyjZjeLiYi5cuEBwcDBRUVGM\njY2xYsUK3NzcKCwsxNbWls7OTrKzswkNDaWtrY1bb70VLy8vzp49y+joKN9++y0rV66koKCA8PBw\nHB0daWlpITQ0FHt7e06dOiUei9LSUqmga2lpQa1WY2VlRXh4OMeOHUOlUjE1NUVcXBzPP/88k5OT\nYg3Pz8+nrKyMxYsXs2PHDs6cOUNQUJBAShSwqMIJUHoaFGK2v78/c3NzQpUymUzY2trS09ODvb09\n9vb2jI6OMj4+Lkccg8Eg7MdHHnmEBx54gLCwMDo7Ozl16hTt7e3Ex8dLjLqqqgrFWevv78+RI0cw\nm83y/4iIiKCsrEzs3gCXL18mPj6eiIgITp8+TWZmJkajUXIi586dw9XVVRKWMTEx2NjYcOedd1Jb\nW8vCwgL5+fnccsst/yIGdnR0yOKoJCMVQvjHH39MXV0dU1NTbN26lVWrVhEZGSmRf0tLS4KDgykv\nL5eO0/+ugfzfrl/EojA1NYW/v7+UWyhnzr6+PqKjo6XsYmZmBj8/P2699VbKysro6uqSlmNl9u/j\n4yOrbUhICMXFxYJXM5vN5ObmsnTpUulnVBqre3p6WLduHQEBARiNRqysrNBqtTJn7u/vl/GWMqFo\nbm5Gr9eTkJAg57aioiJKS0vR6XQ0NDRw4cIFqbV/6623xOqs1Wo5fvw48fHxQp92cXFh48aNfPnl\nl6Snp0u0uqysDDs7O5KSkvD29qawsFBGi+Pj49JSHBoaKg3OShZg8+bN9Pf3U1RUxN13343BYOD7\n77+ns7OTZcuWsXr1avLz82VqUlhYiFarxdHRUZKaiq/f1dWVubk5YmJiKCoqEqt5e3s7W7ZsYW5u\njrm5OaKioti8eTPe3t7U19cTGxvL7Owsb775pkw0lH6Pt956i507d8pOQWFx2tnZycOtTE+8vb0F\nVxYcHMzk5CQ+Pj5otVqGh4f5/vvvBU+n1+tJSkqSe1tfX8/GjRuxtbVleHiYwMBAbG1tGRgYICAg\nQHYkZWVlshB9/fXXFBQU8Pjjj3P9+nUefvhhurq60Gg0xMbG8vnnn0sz97Vr11CpVPztb3/j1ltv\nJTg4mO7uburq6vD09GTr1q0y1j5//jx79+5FrVYzPz8vWIBly5bJOzExMcHQ0BDvvPOOgImXL1/O\ntWvXpDpP4WY0NzfT09MjBrfp6WlWr15NZWUl3377Lffcc4+M03/O9Ys4PqhUKtnGNzU1UVRUhJ+f\nn+DXldFkamoqhw4dory8nD//+c/Mz88TFBREYmKimJ6Uim+4sSikpKTQ1taGtbU1ly9fFp9+S0sL\nvb29eHl5CUy0vr5e+HZ9fX3k5OSIn8HT01NKZhYvXkxpaSkajYawsDB0Oh3R0dEUFRVhMBjIyspC\nq9Xyzjvv4OTkxMjICDt37mRqaopvvvkGHx8fOjo62LJlC5WVlUxPTwPw008/ic/f0dGRgIAArKys\n5MuXkJBAfX29wFmLioqwtbUVL8XevXtpb2+nra2N4OBgKS318/Pjrrvu4tixY5jNZvnytre34+fn\nR1JSEvfffz9qtZrCwkLhE0xOTrJ06VIef/xxhoeHue+++9i4cSOjo6Ns3LgRNzc38vLycHJyIiMj\nAwsLC2ZnZzlx4gQWFhZcunSJmJgYERKVGXtfXx+9vb18+eWX1NfXC7AFbgSlJicnaWxspL6+nvPn\nzwM3agAMBgOLFi3CxsaG8fFxITKdOXMGR0dH7O3tWVhYYHZ2lvn5eYqLiyVGr9friYyMpK6uDj8/\nP+Li4nBzc8PFxUUq4Tw8PDAajSxdupS4uDh+/etf8+qrr5KXl8fMzAwTExPExcXh7+9PaGgocIPm\nXF1dLZ2TXV1dzM/PCwTIw8NDdsJKyVBvb69MZQIDA3n++edJSUmhrq5OPmh+fn7k5eXx8MMP09jY\nSF5eHleuXMHBwYE777yTTZs2ERQUhE6nIygoCK1WS2lpKeHh4cTFxXHw4EHGx8fZsGGDGPN+7vWL\nWBQUYWp4eFj8AwaDgTNnzpCTk4OtrS3j4+M88MADUr4aGBjIhx9+yL59+6itrcXf3x+NRkN4eLjQ\nkd98803UajWxsbFMT0+TkJDA4OCglJcMDg7S1NTEmTNnqKyspKmpCY1Gw8zMDB0dHdIK3NfXR3Z2\ntiwKMTExbN++HQ8PDwwGA7Ozs1y4cIGdO3eSlZVFSEgIaWlp3HbbbaSmplJSUsL27dvFcdnY2Iin\npycDAwMsX75ctthKcerKlStpaGhgYmICe3t7du3aJQKqXq8nODiY+Ph4iR+HhobKA7JmzRqWLVtG\naGgoO3bswGQySd7/sccew8fHB4PBIFzC/fv3093dzfDwMC4uLoyMjJCUlIRGo5EXw2AwEBkZSU1N\nDfn5+ZSXl5OdnU1tba1wEkpKSlCr1fT19dHd3U1+fj4bN26kpKTiOfAtAAAgAElEQVQEDw8Pqqqq\nyM3N5fjx41RUVAhpOSQkBAcHB/EL7N+/X6AxarWaP/7xj4yPj6PX6zEajWg0GnE8KlBeRRMZGhqS\nl8/d3R2TyST8y8uXL7Nv3z5uv/122traBIt38eJFPvnkE9F+lDRkdHQ0K1euxGw24+joiNFo5PDh\nwzQ1NfHFF19I38OhQ4ckxBUYGIjJZCIoKIibb76ZBx98kOjoaKlyKywslNDc2rVrWbVqFWq1Wkaf\n169fl91obGwsTz31FMuXL8fLywtLS0uGh4flHjc3N6PRaHBzc8NsNmNtbY2DgwOVlZV0dHQwODhI\ndHQ0u3bt4rnnnpP78HOuX0RK8s0333zJ09OT69ev09vbS0pKCkePHiUmJoY1a9YI53DDhg0iMBkM\nBpKTk/H19SUyMpK9e/cSExMjXQxfffUVjz32GKWlpQwMDIhi3N7eTklJiViF29vbpfPh5ptvxmw2\nU1JSQkpKCgMDAwQHB+Pr68vCwgJWVlZ8+eWXPPXUUxw9epTW1laioqK4cOEChw4dYmZmhu7ubqqr\nq7Gzs+P2228XY0l4eDgeHh4EBwej1+uxs7Njbm4Od3d3fvjhBy5cuMCvf/1rRkdH6evrY3JykgsX\nLjA7O4uTkxM1NTXMzc2RkZEhJbBGo5E9e/aIVXhiYoLq6moR5PLy8nBwcCA3NxdXV1dOnTrF+vXr\nGRkZ4fr162zcuFGq7R0dHbl8+TJbt26loKCA06dP09jYSF1dHXq9nqqqKmmC1uv1IvABvPXWWzg5\nOREYGMjFixdZv369pAZ1Oh1FRUUCGFEWl4sXL7Jq1SpxE2o0GgoLC3n00Ufx8PCQKrScnBxuvfVW\n4MaRoa+vj5mZGZKTk/Hy8qKkpEQKXpqbm7G1tcVsNhMcHEx4eDg1NTUUFBSQkpKCSqWitLSU3t5e\ndu/eLbsoRaw7deoUvr6+FBUVMTo6yvz8PCqVimeeeQadTsemTZtkEuXk5MSRI0eIj4+XpumZmRnW\nr1/P3NwcJSUlnDhxQihLUVFR1NbWcvz4caysrBgdHZUF5MMPP0Sn03HXXXfR3NzMwYMHee6558jN\nzaWvr4+hoSFsbGyIi4vD2dlZGKNvv/02VlZW+Pn5YW9vL9xKxSOikMm1Wi02Njbs37//fw5PQUG5\ne3t7c/fddzM9Pc3CwgIWFhYcOHCAd955h8uXL9Pe3s6rr77KG2+8wfT0NOnp6SxZsoTy8nI2b94s\nFmZFEExKShIOn1arZXZ2VnoGFLCIn58ffX193HTTTfT29pKbm4u1tTWTk5PMzs6i0+kwGo3iPgPE\n0GM2m9HpdDQ2NjI9PU1tba38uaioCEtLS0HIXbx4EbVaLR7+trY2KisrKSws/JemYQUaam9vz9Kl\nS0lLS+PKlStyRlXauMvKygQw+49//IM///nPNDY2Eh8fj5WVFRcuXCAkJASVSoW/vz/Lly/n3nvv\npbi4GGtra0JCQpiamsLW1hYLCwuKi4vp6enBz88POzs7CZBpNBpCQ0M5fPiwdB50dHTQ3NyMm5sb\nBQUF3H777Xh6ejI+Ps7KlSsldmwwGHB3dycjIwNPT09cXV0xm83MzMywbNkyLCwsWLRokTzEcAPN\np1KpmJycJDMzk7CwMNlFHDp0SEJuc3NzAq51dnZGpVJhNptxcXHh1ltv5cSJE3R1dQnEV2ERbN++\nnRdffJF///d/F77k+vXrmZ2d5ZFHHqGoqAgbGxtOnjxJf38/jY2NnD59mrNnz3L48GH6+vo4ceKE\nHPkefPBBcav6+fmJ+j85OUlnZyezs7PSHlVQUIDZbKa/v18MXf39/cTGxko7t1J1995779HR0YGL\niwvLly8nPj6elpYWCgsL2bRpE8PDwzz00EOoVCoiIiJwd3fnp59+wmAwiBZy7tw5cnJycHJyEtrT\nz7l+EYuCs7Mzvb29EirRarUkJibKWCkoKIi7774bFxcX4uPj0el0fPnll4yNjfH111/T0NDA1atX\n2bp1K87Oztjb2wM30GpGo1Hqx52dnVm6dCnr1q3D0dGRmZkZbGxsSE5OFtiHWq0mMDAQe3t7lixZ\ngp2dnWDOLS0tAfjuu+8YGxsjJSWFlpYWAgMD2bRpk3QUJiYmSspPCR+5uLjQ2NjIxMQERUVFDA8P\n09LSgtls5ne/+x1ww9mpUJPuvvtudu3aJRCUzMxMMRK1t7eza9cu1q1bx1dffUVDQwNLly6lp6eH\n6elpQkNDmZubY3JyEhcXFxwcHCRMNTg4yOnTp0lISMDBwYHu7m4aGhrQaDSkpaXh4OBAT08P8/Pz\nuLq6yj328fGhsbGRoKAgOjo6mJubIzY2Fk9PT4aHhzEajTQ0NPDuu+9y+PBhRkdHGR0dRa/XMzEx\ngdlsprS0lOrqaunbGBgYID4+ns2bN0v+QzmXK0GjwMBAvLy8ZLcRHR2NtbW1UIsDAgJERAsPD6en\np4eJiQmWL19OZWWlaApvv/02cXFxlJaW8uijj7J7924uXrxIdHQ0paWlTE1NkZOTI5mUtLQ0PDw8\nZOpx//33ExYWxtjYGCqVSqhaCtJe8dX09fWJ0KwwK728vKiuriY7O5vx8XEBzCQnJ4shzWw2Ex8f\nL8+YUtRbWVlJfn4+Y2Njkmo9fPgwH374IT/88AN9fX3U1dVhbW1NWloaixcvJjMzkzNnzjA2NkZ1\ndTXT09Mi5P6c6xcxfZidnaWmpobp6WnJLTQ1NREbGyvNPB9//DEjIyP84Q9/IDo6mrm5OU6cOEFK\nSgoVFRXodDomJycpKSkRUcXJyQl7e3uhJyvbMAsLC/r7+4UQDDe2pkFBQfz444+UlZVhYWHB+vXr\nJQnZ1dUlmoJOp2PJkiV8/fXXbNq0iampKYl3+/r6EhcXx+TkpNS0Ka3VCwsLYiZqbW0Vq7Py1Wlt\nbSUtLY3r169TXV0t0xQFBb9p0yYaGxvZs2cPQ0NDpKen4+joiL+/vwTBCgsLqaurk14LOzs7jh07\nRkJCAmq1mmXLlpGQkMBjjz3G9u3bcXFxYcOGDeTk5NDa2kpnZyfW1tZs2bJFRN6//e1v3H333Xh4\neODm5oZarebChQu89NJLPPvssyxevJijR4/i7+9PfHw8BoOBxx57jKeffpqTJ09iZ2fHs88+y1NP\nPcU//vEPzpw5I4vU6dOnRZCDG/Sp9evX09jYSElJCeHh4Tg5OaFSqVi9ejUlJSU0NDQwNTVFb2+v\nELyVP6enp3P58mVmZmYICQkhLCwMuOEQLCsro76+Hm9vb65cucK2bdv49ttvMRqNZGRksG7dOvbs\n2YNOp8PW1lbKViIiIrjlllt44403BG7z8ssvAze4mhkZGeLT6OvrIz4+nry8PB5//HH5wMTGxpKT\nk4OXlxdOTk784Q9/4Pr167i5ubF+/Xo8PDykqQqQtCaAq6srs7OzqFQqli1bRnd3N+7u7ly7dk36\nLrZu3YqFhYVUBkRFRQm0VcEE/NzrF7FTsLKyklm+YmBSfOrZ2dmcPXuWkZERLCwsqKurY3h4mNra\nWlJSUhgeHubRRx8lODiYS5cuERISgr+/PwBqtVrivcHBwcK66+rqwsvLS6rPbGxs6OzsZGhoiGvX\nrrF27VpBmnV3d0viTCmDaW1tRaPRYGNjg4uLC0ajkcWLF7N9+3ZaW1spLCykp6cHk8kkVJ7+/n4q\nKyv57rvvcHJyYnp6muTkZEpKSqT0JDAwkBMnTjA4OEhXV5d0PygPnNKheOHCBd577z02btzI5OSk\niGIZGRksXrwYFxcXYf4bDAa2b9/OwMAAQ0NDQsv+05/+xMaNG/Hz88PZ2Zlly5bh5uYmVCcFm375\n8mXZ6k9MTEjcOTU1lddff52uri7eeustMXDpdDoWFhb47W9/S0VFBXDDnHb8+HF+9atfUV5eTk1N\nDc7OzjQ0NJCVlcXatWv57W9/C8Du3bs5deoUXV1dLFmyhLKyMumUHB8fx9nZGQ8PDyFpOTg4MDAw\nQGJiItPT0zg6OsrvVaVSCRC2oqKCb7/9lsTERDQaDREREcTGxkq6dtmyZbS1tVFXV8dDDz3ELbfc\ngoODA3q9nrfeeousrCxaW1vp7u7Gzc1N+kqDg4MpKCigqakJR0dHNBoNRqMRNzc33NzcGBkZEQ3J\n1tYWNzc3pqenGRsbQ6fTkZGRwb59+7h48SIqlUqOqMnJyWKoe+edd+jr6yMsLEziAAoqLjMzk6ee\neoqkpCQcHR0Fbz8zMyMMzPDwcO67776f/T7+InYKVlZWrFy5Ep1OR1NTE3V1dTz//PMSXsnNzWVq\naorly5dz5MgRyTeEhoZy0003MTs7i7e3Ny4uLmJ7BeTG7N27V4Apbm5u4npLTU0Vp5eSTFy3bh32\n9vao1WoBtipuOUVYs7OzkyJVZasMYDKZ+Oabb3ByciIqKkpq6PV6PeXl5bS1tTE5OUlGRgYHDx5k\n165dFBUVSdOw2WzGwsKCmJgYKXaxt7cnJSUFnU4nuwuVSoWNjQ2hoaFkZmZy9epVoqOjpVTEzs6O\npqYmQkNDBY4SFRXFRx99RGRkJIODg3h4ePDjjz+KZVipwHN1dSUsLEwo0Eq1XEdHh5jLAGkjio+P\nZ8eOHVy6dIn4+Hj++te/4uXlhYeHBykpKVRVVTE6Osrs7CxPPfUUu3fvxtXVlYGBAW666SbOnTuH\ntbU1r7zyCnCjtCU4OFjMVzqdjoGBAQYHB3F0dGRgYIBTp04RHh6ORqNhYGCA8PBwxsbGGBsb4+OP\nP2b9+vV4enri5uYmv9/W1laCgoJYsWIFarUarVZLa2srTU1NODs709jYiL+/P1u2bMFkMtHQ0EBE\nRAQmk4nf//73uLi4yGjYZDKxdetW3nzzTZkQjY+P4+vrS319PWazmSVLlnD+/HmWLl3K2NgYzc3N\nkiA1Go34+voKJNfV1ZX6+nri4uL49NNPgRvpSx8fH/bs2SPdEs3NzVJ/uGrVKqysrOTZGB4exsPD\nQ1rQenp62LZtG5cvX6axsVFGqD/n+kUsCgqcxGg0Ehoaislk4plnnmHDhg24uLhgb28vfQpKdFiZ\nJ2dnZ3P33Xdz9uxZOjs7WbVqlfQTeHp60tbWRlhYmASJJicn0ev1REVF8c033wiJydLSEhsbG5qa\nmlCpVIyPj2NjY4NWq5VuRcUCPTU1JSqyMhtXKLo33XSTNFor9CbFmJKYmMiZM2eYmpqShzImJkaa\np0pKStixYwfvvvsuUVFRPPLIIxiNRgYGBmhtbaWlpYXt27fzwQcfkJyczMzMDN9//z0PPfSQfKUU\n/ULpxAgKCmJqaoorV66QmJhIf38/fX19eHl5ERQUxOTkJBMTE2zatImJiQlhNiQmJjI1NcUXX3zB\nnj17+M1vfsP27du57777+OCDDxgYGGDZsmUy9Vi2bBl1dXUcPXqUnJwcyWX827/9G729vTzwwAPc\neeedxMbGYjabMRgMmEwmysvLWbRoEXv27OHAgQM4ODjQ399PZGQktbW1UsmmVN7Nz8+zefNm9Ho9\nQUFBFBYWUlFRwfDwsJCIlKmEj4+PlAL7+vpy7do1jh07JlrLyZMnCQ8PJzQ0lICAAF5++WXq6uqI\ni4sjISEBnU7HuXPnaGxspKurCxsbG0HAK23WZ8+e5amnnuLixYtS+Do/P4+joyO9vb288MIL6PV6\nKXb59a9/TXt7OxEREYSGhjI2NkZTU5OMdhVtRTGKffrpp2RkZEjYz9bWVspfFIF+cnJSqgFnZ2dR\nq9WkpKRw8OBBkpOTKS0t/Z8nNCpjxNtuu01CRopod+XKFdrb24VAVFlZKZHl/fv3MzMzQ35+PkND\nQ4SFhVFTU4Ov743i6/LycmZnZ5mdncXX15epqSnCwsKwt7fHxsaGdevWsWLFCsmoL1myhLVr12Iy\nmWhtbcXT0xN3d3cWL15MdXX1vzRLKynHrKws0tLSpPouKSmJuLg4CgoK8PDwQKvVsmzZMhYWFqip\nqeGee+5hcHCQoKAggoKCOHv2LHNzN+o2FfhIZGQkPj4+1NbWyjZY6Yl0d3cXVV/ZLdjY2HDu3Dlq\namqwsrIiOTmZU6dOCVLc0tISLy8vEhIShPLz4Ycf8vLLL0tfpaOjI3Z2dqKe/3d7b3FxMU899RTt\n7e288soreHl5ia36+eefJyYmBrVazaZNmzh16hSFhYUcOHAAX19fCgsL+fDDD7n77rtxcnISirG3\ntzfV1dW8/PLLODs7y4Rhbm4OBwcHpqamWLlyJTU1NeTk5Egmo7e3l1OnTnH9+nUOHDjA/Pw8oaGh\naLVampubCQ4OliPQ8PCw/HcdHBx4+umnsbOz4/r165SVleHv78+5c+ck1vyb3/yG0dFRAgMDMRqN\nvPvuuzz77LNkZGQIDl6hhCm7Rnd3d/bv38/x48dxdHTE3d2dkZERSaWmpKRgY2PDli1beOyxx5ib\nm6Orq4uuri5qamo4dOgQoaGhDA0NkZeXx6ZNmwAE+aZQolJSUti7d68wIxRfTHt7O59//rnoD5OT\nkxJTv/nmmykrK6O7u1v0sJ9z/SIWhdnZWWxsbMjOzsbb25tFixZRUVEhqbuKigqSk5OZmJhg3759\nUrulUqmEhhsVFSWGFMXaam9vj7e3N+Hh4VRVVWFra0tTUxM+Pj6cP39evlIVFRWyjfTz82N+fp4d\nO3bQ09PD0NAQMzMzmM1mOe/V1tbKl9jFxYXc3Fy8vb0JDAzk0qVLXLp0iVWrVolo2tnZSUlJCVFR\nUeKq9PHx4dChQ3R0dEjRbEBAADMzM0xOTkrP4bFjxzh58qQo43Nzc9x1110sWbJEWriVrXNERIQE\nZTZs2CAaiZ+f378gxuLj47njjjvYvHkzxcXF0saklNYcO3aMjz76SEbEDg4OnDp1ioiICEmEXr58\nmZCQEPbs2cPAwAA1NTUCILG1tWX37t1kZ2dTVVVFbW2tcCvy8/M5cuQIRqMRa2trzp8/j5ubm7Q4\nt7e3c+XKFcnAKLj+8fFxpqamiI+PlwmFhYUFtbW1jI2NcfnyZSIjI6Ucx9/fHz8/P2mzHh4e5ssv\nv8THx4enn36ahx56iKysLLZv346/vz/W1tbk5eWxc+dOZmdnaW5u5q677uKZZ57ho48+wsXFhRMn\nThAVFUVSUhL//Oc/ARgaGsLOzo49e/ZQXl6OVqtlbm5OGIu+vr7o9Xrq6urQ6XS4urri7e1Nd3c3\nExMThISEMDIygp3d/2HvzaOjPK90319JKs0qSaV5nmehESQhQANGIAZjIICxMdhN4imx3baT7o67\n08eJu5NO2p3gOLHTNvEYbAw2WEYWg8CSGDQgBJpnISGVZpWG0jyV6vxBvn06a/U6cffpe6/7rvOt\nxWIhiqJUqu9937338/weG9rb27l79y4A169fZ8eOHdy9e1dKrEcffZQLFy4wNjYmitWPP/6Yxx9/\nnK6uLrEBjI6OolKpcHd3Z/fu3eTk5MjI9+tc3wjx0u9+97sfZ2dn89prr0nTUdG0KzWyl5eXeMNn\nZ2e5ffs2jY2NTE5OkpKSIlhzvV6Pj4+PPFbZ4R0dHSkoKMDFxYXXX3+dkpISvL29uXv3rtCOioqK\naG9vp7KyUuzWJ06coKurC6PRyCuvvILBYODQoUOSkJ2fn09YWBjDw8OyQ83Pz1NeXo6np6dkBS4s\nLFBZWYmVlRU9PT1i7vL39yckJITz58+zadMmtFqtNOTCw8MJDg7G3t6eLVu2CA3K09NTIsq6urqw\nsLBgaWmJ5ORkFhYWyMjIwMvLiwsXLjA0NITJZKKkpIQzZ86wZcsWiouLJSSnsrKSnTt30trail6v\nlyTvyMhICgoKKCkpoaioiOeffx6TycRXX33Fhg0b0Gq1tLW1SS7B9PQ0JpOJnTt3Soy7IhmPi4vj\no48+Ijg4WOzcarWapaUlurq6mJ2dZWxsjJs3b7Jp0yZcXFxIS0uTGHpLS0uamprw9PSUsJm8vDwc\nHR3FUq0oJpOTkwX0ouy0ipdEgdv4+Phw+fJl5ufniYmJITw8nI8//pigoCCGhobo6+tjYmJC7Nnd\n3d2o1Wrm5+epqKjAxcWFyclJ0cfAvRHizMwMlpaWqFQqyTu9fPkygYGBxMXFcenSJcbGxpienmZ8\nfJyIiAh6enrE37Fnzx4hVAUEBFBSUiKls4+PDwUFBaxatYpjx46RlJTE9PS0PGd7ezsfffQRQ0ND\n4gquqqrC1taW9957D4Cvvvrqv0a8pFKp3lWpVMMqlarh33ztxyqVqk+lUtX88de2f/N3L6lUqg6V\nStWqUqm2fJ1FQcFvHThwAF9fXzZs2MCBAwfo6Oigs7OTLVu2yNHQZDLh4eHBt7/9bdauXcv169e5\ne/cuKSkpTE9Pi0gJ7nWcDx06hKOjI59++ikqlUp+GJmZmdy5cwdPT0/CwsKkFlOENIODg8IQVGAh\nBw4cAO4d8318fAgMDMTV1VXGnrGxsQwPD9PZ2SkuSEWnoESEz8zMoNVquXXrljTKlNfr4OBAZmYm\nZ8+eZXx8HJPJJLqDmpoazp8/L0lFShydk5OTRN/V19cTExNDS0sLBQUF7Nmzh7feeksWn40bN1Jd\nXc3MzAz29vaEhoYK5m50dJSenh5SU1M5deoU/f395Obmsm3bNrZt20ZhYSH+/v5cuXKF3t5ebt68\nSWNjo2R0rKysMDs7S3FxMQcPHsTZ2RmNRoODgwNTU1NERkZKBF5nZydr166lp6eHzMxMvLy8JENx\ndnZWpiCVlZXs3buXVatWMTY2JhOUhYUF/vEf/1Eas++++y4zMzNs2rSJvLw8lpeXsbS0FP8EwPr1\n65mensbLy4vKykr0ej0RERGS7djT00NbWxupqalMTk7S0dGBu7u7fL/r169neXmZTZs2CXkZkD6T\nSqUiIyODgIAAscb7+vry5JNPSuNy3759BAUFsXnzZmEtZGVlMTMzI3Rnxc3Y1NQkfpCUlBSOHTsm\nYT0ff/yxoP40Gg0ajYapqSlCQ0NZXl6msbGRyMhIgoODOX36NIcPH+bKlStf51YEvl758D6Q++98\n/ajJZEr4469zf3xjooEDQMwf/82bKpXK/M/9BwpIY3FxkYKCAnQ6HW1tbfT19fHAAw9QWVmJr6+v\neAkUz4Gfnx/Hjx9n9erV2NnZYWZmRlBQkOQHuri4cPbsWW7cuEFMTAyrVq2iv78fg8GAl5cXMTEx\nDAwMoNfrpfzQ6XR89NFHXL9+nfPnz4thJiQkRD4INTU1rFu3jtbWVry8vNi1a5fcoPb29hJAa21t\nLai1pKQkBgcHcXJykl7EnTt3yM7OlumFr68vFRUVhIeHs3nzZqH6TE5OotFoiImJkeNwSkoKFRUV\ndHV1ia5CYRDMzc2xa9culpaWePrpp5mZmeH9999ncnKS/v5+Dh06xEcffSSnHUWDHxAQQFBQEPHx\n8VhYWFBaWkpzczOurq5oNBqKiop45ZVXKC4uZmVlhczMTHQ6HbW1tVhbW0v/pL+/n+XlZaqrqxke\nHqa/v19yPl9//XVB7Dc0NNDT00NiYqK8ByaTCXNzc5ycnIiMjKS2thY7OzvCwsIwMzOjqKgIrVbL\nu+++i1qtFrm3Es3n6elJc3MzOp2OiYkJuWnLysowGo3U19czNTVFTU0NFy9e5JNPPuFnP/sZXl5e\npKamYjAYUKlUWFhY4OPjg5WVFf7+/pw+fVqozfX19TKStLe3JzIykqGhIVQqFa2trdy4cYPOzk7B\nASgS6/PnzzMwMMDQ0BDNzc2YmZlJA1kBpiglamZmJgaDAY1GQ3t7u9jJ33rrLbG2p6SkiOYmOjqa\n3/zmNxQXFzMxMUFDQ4NQyJUQoK97/dlFwWQyXQXGvubzPQB8YjKZFkwmUxfQAaT8uX+0sLDA3bt3\ncXFxYf/+/Xh7e0tnVWHnKSSj2tpavv/976PT6bCwsOCNN96goaEBd3d3LCws0Ov10qzJyMgQHUJI\nSIggt/v7+yV/YXJyUrj9q1evxsrKij179vDkk08KMnxxcVHcinCvCaTsnN7e3rz99ts0NzczMTFB\nXFwcJSUltLW1kZ+fz9TUlLg/t27dSlRUFN3d3TQ1NbF27VqOHTsmEFAlg0Dh9Su6/MnJScLCwiQx\nqKGhgaGhIdRqNRs2bGB4eBhra2v27dtHYWEhzs7OmJmZMTc3R1paGlFRUcIitLKyorOzk+PHj6NS\nqXBzc6OoqIjW1lbCw8P5/e9/T1dXF0ePHhWbsmLo2bdvHxYWFmzcuFFGZN/97ndFkeru7s758+dl\nirBt2zY5Sel0OvFHaDQajh49ikajoampiTfffFNGnQkJCZKhMTIygouLC01NTQQFBbF69Wqio6NJ\nSkpi586dNDY2sry8zI9//GPi4+OFaWFjY0NKSgoPPfSQnBQACgoKsLS0FNl7aGgoR44cIS4uTqjT\nigLx1VdfJTY2lsTERHbt2sUPfvADjEYjIyMjovSEe+Np5WRYX19PZGQkTk5OpKenMzs7S2NjI88/\n/zxXr14lKSmJtLQ0rKys2Lt3Ly4uLuTm5rJmzRrUajUODg7SQ3FzcxO3bkNDA+Hh4RiNRj788EPx\nbLi7u7N9+3YsLCw4ffo0n376Kb/+9a954IEH6O7u5u7du1RVVeHu7v4n78Ofu/5PGo3PqlSquj+W\nFwrBwQfQ/ZvH9P7xa//ba35+nk2bNrF27Vp0Oh11dXX09PRgZWUlQR8LCwvcuHGDsbExmXMnJiYS\nExPDyMgIQUFBjI2NcevWLWlamZmZiYuspKQEa2trnJycUKvVrFu3Dl9fX9LS0lCpVNTU1PDmm2/i\n6emJq6srXV1dzM/P09bWxoYNGwgPD5cOb1dXl2T/KdoGpZ9RVlaGmZkZra2trKys4O/vz65du1Cr\n1VhaWkqQ6IkTJ+RmV5iSQUFBXLx4UezDilhLr9czMjJCRkYGAwMDTE1NYTAYMBgMXLhwQahEp06d\nEi9BeXk5zc3NInx65513mJmZISAggLGxMT744AMeeeQRCe4cst8AACAASURBVLidmZnh7NmzeHh4\nyE3x4osvMj4+jp2dnQiYysvLuXjxIkajkdHRUezt7YU9GRgYSGxsLM8995yQqvbt20dGRgZr1qzh\n4MGDbNu2jTt37pCQkICTkxMeHh6sWrVKTkBDQ0PSoc/JySE0NFQ8I6Ojo2g0GhwdHfniiy+4desW\n165d47333mNgYICsrCx8fX1lZKnwGOHexhMYGIi9vT0ODg5kZWVJ8EtcXBwNDQ3cunWLxcVFRkZG\nOHr0KLW1tcTGxrJx40Z53g0bNhAZGUlERASAeDvS09MF856bmytR8RUVFbi5uckpVznWm5mZYWVl\nRUNDA2vXrqW3t1cEa3BPsp2SkkJDQwOrVq0Srcf169cFXnvy5EmysrIoLS1laWmJsLAw7OzsOHny\nJBkZGSKKUyCxX/f6z+oUfgf8A2D64++/BI78R55ApVI9ATwB98ZFU1NTFBcXk5GRQWdnJzMzM8TG\nxuLu7i4Gma6uLq5fv85LL72Era0tDQ0N1NfX4+7uztzcnAiPGhsbgXv5DL6+vqSmpkpWQ05ODhcu\nXECtVlNXV4e9vT1TU1OkpaURGxsro7jAwECGh4fRarVcuXKF7373u/zoRz8CkP7D7Owsg4ODjIyM\nkJubK5MNvV4v48nx8XFBkOfn52NtbY2HhwehoaHCjFy/fj1vvvkmg4ODPP744zQ1NdHa2oqNjQ02\nNjakp6cL6luh7ywsLDA2Noa9vT12dnZ0dXWhVqu5e/cutra2+Pr6CuNvZWWFnp4empqayMrKIj8/\nn6CgIK5fv053dzcLCwv8zd/8DSUlJZKXePr0aTIzMzl//jx//dd/zfXr1/nRj37ED3/4Q44ePcq6\ndeskoPf27ds899xzFBYWEhQUhJubmzR8W1tb0el0HDp0SCYcaWlpGI1GUlJSMBgMBAYGCrDU1dVV\nbNwKdqy5uZnY2FiCgoLQ6XSSy6CE1Zqbm7O4uEhCQgKJiYk0NTUxNDQkSdBwT6cQEBDAL37xCzIy\nMnjzzTfx8fGRxrC/v78sEspCeOfOHc6ePcuRI0fw9fWV6HqlAQ73wDizs7N8+eWX2NjYYGVlRXt7\nO0ajkTNnzsgo1tLSkoqKCkkys7Oz48SJE6SmptLT00NVVRVqtVo2iPLycgICAnjggQewsrLi7t27\nIgSzs7OjqamJ7Oxs+vr62LZtG8ePH8fa2hq1Wo2HhwctLS0EBgaSnZ3NBx98IO/D17n+UycFk8k0\nZDKZjCaTaQU4xv8qEfoAv3/zUN8/fu3fe463TSbTapPJtNrT05Po6GhMJhPV1dU4OjoyOTnJuXPn\nOHPmjCT3qtVqXF1dqa+v5+bNm/zwhz/k5s2bstsrJwjFEBUREYHJZOLkyZPCSBgdHSUwMJDa2lqp\noZV8h7i4OCorK1lcXOTChQs88sgjeHp6smvXLuE7wr3RoYLsUqvV+Pn5MTMzQ0tLCyUlJZw7dw4r\nKysqKyulMRoZGcmBAwfw8/Ojt7eX1tZWWfgURaOVlZVE2m/YsEEkrWq1WlDfClPigQce4IknnmDj\nxo3yvUdERPDss89iZWXF2NgYS0tLXL58mfb2dmprazGZTJw6dQqdTicRec8//zxbtmzh2LFjfPXV\nV3z00Ud4eXnh6elJQ0MDf/M3f8Pw8DDLy8u8+eabTE1NyWKdn5+Pj48Pjz/+OIWFhTg6OsqUY3Jy\nUvQMs7OzXLp0iffff5+WlhY2bdokKDaDwUBbWxtZWVny2VAi2ry8vOjs7GTVqlVoNBrefvttKRNd\nXV2Zn58nODiY2tpaIiIiKCoqYmlpSZLFZ2ZmOHnyJHCPEn358mU5heXm5nLfffdJ6lhfXx+tra2c\nP3+e++67j+joaLZv345Op+MXv/gFDz30EMeOHUOv16PRaMSJ+84770jfw87OjtLSUtmMlNi/5eVl\nCgsLWVlZwcfHBw8PD1JTU3n44YdZWlriqaeeori4mN7eXlJTUwHIzs7G19eX3t5eGdkeOHCAb3/7\n2/zyl7/knXfeoaGhgb6+Pjw8PMjIyOCVV17h4sWL9Pf3U1BQQHt7O5cuXaKvrw9LS8uvfX//pxYF\nlUrl9W/+uBtQJhNngQMqlcpKpVIFAWFA5Z97vqWlJW7dukVISAjV1dXCChwYGOD27ds4OztTXl6O\nh4cH9vb2zM7OsmvXLv7yL/+Shx56iMDAQFGaKcxEgBs3blBdXU18fDwRERFotVr6+/u5cuUKY2Nj\nrF27loCAABoaGsjLy6OmpoaGhgbx41dWVtLU1MTY2JgARQH6+/vx9vZGr9fj5uZGc3MzNjY2hIaG\n0tTUxNatW0lJSUGv11NeXi5NvBMnTtDT00NraysDAwM4OTnx9ttvS5NNSR1SblgFXa/caMvLy9y8\neZNTp05x5coVTCYTiYmJpKeny82hJB41NTVJV9vNzY0dO3bwzDPP8Oijj/KTn/xERpydnZ24uLiw\nbds2nn32Wfbu3YvBYKC7u5ugoCDS09MpLS1lcHCQCxcuSJLUyZMnOXjwIFVVVdy8eZP29nYpzxSW\n4ujoqNiTlbKnpKSE9evX4+PjI4rLsLAw8SgoaV4KRNfMzIze3l5qamoICAigqKiI5eVlRkZGCA4O\nFjbkzMwMhYWFFBUV0dnZSXBwMNbW1kxMTAD3GoIGg0FgMEroSktLC6WlpYSHhzM2NiYnUCWZ7Pnn\nnxdSt+I12LZtm/StHnzwQe7evcuuXbuEorx582bm5+eJj49nbGxM/A+KerGlpYXBwUEZgVZUVJCW\nliYYQIATJ07Q2dkpsCFvb28JD9LpdHzxxRc0NzeTkpKCVqslISEBe3t7LCwsWLt2LVFRUWi1Wqan\np0lJSfkPiZf+bPmgUqlOAFmAq0ql6gVeBrJUKlUC98qHu8CTACaTqVGlUp0CmoBl4Hsmk8n45/4P\nJWmpq6tLxBxVVVVCKp6fn5eAjZCQEBwcHBgZGWF+fp41a9ZIx1oRGlVXVwP3otf6+vqYm5sjKioK\nHx8fKisr2bVrF+np6TKeysnJwWQycf36dVJSUkhOTsba2prKykpeeeUVrly5wszMjDQE4d4NvLCw\nwNDQEHq9nvHxcU6fPo1KpcLOzg4LCwvJs1BShhSYy8MPP0x5eTmVlZU89thj1NXVATA1NSUuTqXp\npNFoJKKusrKSnp4eJiYm6Ojo4Ac/+AEGg4GZmRmWlpbYvn07Hh4erKysoNfr6e3tJTQ0lImJCdHf\nGwwGHBwc2L9/P42NjaK+S01Npa6uDoPBINFvarWajo4Oofn8j//xPzAajYSHh8sunJqaKjN8g8GA\nr6+vMAPm5+fp6+tj9+7duLi4oNFosLW1JSkpSfIQFxYWaG1tFaL16tWr8fDwYHh4WCYGShCsYrby\n8vJifHwcg8EgDAN3d3d6e3spKirC09OT2NhYjh07xtNPP82HH35IT08PERERbNy4kby8PCIiItDp\ndJw4cYJNmzbxt3/7t+zfv5+ZmRmKiopoaWnhe9/7nojS7OzsmJycZHFxkVOnTjEwMADcs+f7+/vz\n3nvvYWFhIUd4JZRXQbh1dnaK/D47O1vKWQXpZzQaCQwMFDXu008/zaeffoqlpSVqtZp3331Xshsy\nMjJobW1FpVKRl5eHVqtlfn4ek8mEyWQS1aqDgwOpqakUFxf/SZDRn7v+7KJgMpke+ne+/M7/5vE/\nBX76tV8B9xYFJycnlpaWuP/++2lsbGTnzp0MDg6K59/V1ZWUlBTRJZw9e5bAwEBKSkqYm5vD29sb\nV1dXMeb88bXITtfa2irQzdHRUfr7+zl79ixOTk6oVCp8fHwwGAysWbOGmzdvih9Co9HQ1dVFS0sL\nzzzzDL/73e+Ij4/nd7/7HXq9Hl9fXxISEmTqUVtbyyOPPEJ0dDSPP/44fn5+aDQavvrqKyYmJti+\nfTtWVlYEBweTmprK8PCwRJspQIx169bJQjA+Ps7Q0BD/+q//ysGDB7n//vuxsbFBr9dz/fp10tPT\nWVxcFAhLbW2tIMUWFhbQarXcvn1bYugWFhY4d+4cfn5+/OxnPxO+5BtvvEFcXJyMGhWDjmI3DwgI\n4JNPPiEnJ4eZmRnByX311Vd4e3sTHR2NhYWFiJECAgKorKxkenqa5eVlLl26hJ+fH08++SR37tyh\nuLiYGzduyLhRqdGHh4cFlGtmZiZjXXNzcwYGBnB0dKSjo4Pp6Wm0Wi0REREsLy+Tl5dHZGQkdnZ2\njI2NodFo5PsF+M53vkN+fj79/f3cf//9BAYGUlpaKmnmarUao9HIgQMHePDBB+nv7+fWrVt0dHTQ\n0NDAkSNHOH78OI6OjkxMTMhNptCnfX19mZmZwd/fX2hQiv1/dnaWvr4+EhMTRWav1+vp6elhaWmJ\nJ554gj179qDT6USWXVRUhL+/P9euXSMoKAiTycTk5CSrV69GpVJJBqi/v78Qtbu7u5mammJycpLh\n4WE2bNiAnZ0doaGhksD1da5vhMzZ1dVVjqPvvfce0dHRwqu3srKirKyMNWvWYGtri42NDTqdjoSE\nBD7//HNxIWo0GsrKyti5c6dIOpUQlTVr1jA4OEhHRwfm5uZ4enri5eUlcV5KI3N8fJzW1lZJuw4O\nDha33MGDB3n77XtisKmpKcLCwqSH4ebmhp+fH6GhoVy5coWdO3diYWEhnoXJyUkCAgJ46qmnMJlM\nzM7OsmnTJlpaWhgYGMDH596AZnZ2lri4ODo7O2XnValU+Pv7k5KSIqYxRawzMzNDfX29QGDi4+OJ\nioqSk0pSUhKjo6NS3/74xz/mn//5nwkNDeXw4cPs379fOIJxcXFSjoSGhuLn58fi4iLf+ta32LFj\nBx0dHRKDNjw8zO3btzGZTGRkZJCYmIiNjY00SCMjI2ltbaW2thZzc3MhBylMi8LCQkpLS6moqMBg\nMIhJC+75YBwdHQkODmZ2dpagoCBCQkLw8vIiIyODmZkZMjMz2bNnD4GBgVhYWIi/xdzcnIWFBRwc\nHPD398fZ2Vl0Cl999RVBQUEsLi4yNjaGjY2NTLY2bdrEe++9h7+/P+fPn6e+vp7bt29z5swZmpqa\nJI+0v79f0PbKhEvZiBSLvGJ+Cg4OlmZlV1cX69evl4i73t5ehoeHOXz4MOvWrZNFMD8/n8zMTOCe\nrHtoaIjIyEiSkpIELW9vb8+VK1eYm5vj0UcfZWZmRuTR09PTUq5kZGRQVlZGXl4eLi4uPPjgg1/7\nfvxGuCTn5ub4+7//e4aGhti3bx8jIyP09vbi6ekpRypFzdXR0SGWZ29vb1avXk1NTY2k7ywuLqJI\nty0tLYmLi6O9vZ377rsPe3t73Nzc2Lx5s2Df8/Ly+MlPfkJxcTEajYZ169ZJfoFS+8XFxQm5Ce5N\nSwwGAxs2bKCqqoq+vj7Cw8Pp6+sjODiYQ4cOAXDlyhU+/fRT9u3bR0hICJ6enszPz1NYWEhsbCwx\nMTEybgOkYerh4YGrq6t02nNzc+XI/MEHH2AwGHB2diYsLAxXV1dcXFxkh4mKiiI5OZkrV64wMjJC\nX18fo6OjmJmZ8fLLL1NTU0NdXR1paWmirFMyNezs7PDw8ODKlSts3bqV1tZW8vPzMTc35zvf+Q4n\nTpxgaWkJtVpNcHAwcXFxwjxMSkpieXmZkJAQoqKiZJH18fGRI3RsbCxdXV34+flx+vRpPDw8ZGdP\nTU3l+PHjGAwGenp6aGxs5MEHH+Tq1auYTCbB22dkZPDGG2/IZyAjI0M4AsPDwyQkJGAwGLC2tiYo\nKOhPUHdKanl4eLhwNkZHR+no6GBiYoLx8XFSU1N5//33aWtr48iRI6L1UPI1Ojs7ZecH8PDw4PDh\nw5SUlLB9+3ZGRkZESLd9+3aqq6sJDQ3F09OTmpoabt++TXh4uPTH1Go11dXV+Pj4cN9990kZtWbN\nGt59912io6Opq6tjy5YtaDQa/P390el0eHp6snHjRl544QXc3d25fv269Ins7Oy4du0aer2ezZs3\n88477/z3A7e+/vrrPzYajSwvL7O4uMgXX3yBu7u7JCArEAydTsfMzAzV1dWCBff395d6fWlpicLC\nQvbv309BQQEvvvii6MJXVlY4e/YslpaWUm+6urqSnZ1NWVkZvb29BAYGcv78efLz89FoNCJ8Wl5e\nZmlpifPnzzM2NiY1tb+/vxCj3NzcaGpq4vDhwxiNRs6dOydz8qysLKKiovjnf/5nnnnmGdatW0d3\ndzelpaV4e3vT09NDYWEhu3fvZm5ujtnZWfngKTmENjY21NXV0dzcLJ4CvV7PwMAAlpaWuLq6AmAw\nGNDr9bi7u6PVarl8+TJBQUFcuHABR0dHurq6BMri6+vLiRMn+M1vfsPq1auJj4/Hx8dHMjgVBZ2/\nv7+oNZVGmKInyMnJ4datW0RFRTE4OEhAQABmZma0tbUxODiIs7OzLHCKQe3atWvY2toSHh6On58f\ns7OzREdHc+bMGQ4ePMj09LQoGJVsiQsXLnDx4kU8PT05e/Ys8/PzTExMUFtbi8FgQK1Ws2PHDs6f\nPy96ELVaTW9vr2RRzs7OMj8/T3JyMubm5gwNDUmjLyUlRcrO1NRUzMzM2Lp1K93d3fz2t7+lq6uL\nyMhIvvOd79DQ0CAGqrCwMAYGBkhMTOT3v/89jz32GGq1WgA/SnmmjAlzc3PZvHkzw8PD0vRUgno8\nPT2Zm5vjxIkT7Ny5k02bNjE6Okp0dDTnz59neXkZDw8PCT7u7e0lJycHLy8vAgICWLduncBrQ0ND\n+dnPfsbOnTvx8PDgxo0bNDc3//cBtxqNRry8vJidnWVoaEjqsIaGBi5evIhGo8FoNOLr6yt25/b2\ndmZnZzl37hwnTpzAaDSyevVqqYsBCQ7dvXs3oaGhQuQZGhqitLQUjUYjqVEeHh7cd999hIaGCsxV\nMd+Ulpbi4OAg5heF+2BpaSmg1JaWFn71q18J1GP37t2Clr9+/TqvvvoqAP/4j//IwsKCHHeVER9A\nTEwMbm5usqpPT09jMBioqakhLy+Pd999l76+PqFTKbhwBSvX3d0tEexeXl6S42A0GsnLy6Ovrw8/\nPz+ampp47bXXZJffsWOHuEv1ej1Xrlzh6NGj1NTUEBERQXJyskBc9+7di1arRa1WCxp+YWGB2NhY\npqensbCwwNnZmcjISHp7e7GxsWFoaIjq6mrKy8s5f/48Q0NDuLi4SARga2sr169fBxAJr16vp6qq\nCp1OJ02z5eVlrl69ytNPP01KSorQkTZv3sz69euxsrISPPz09DRXr16Vxp2np6eEvPT393P58mVa\nWlqIjo5m48aNrFmzhuTkZJnmvPzyy6xfv57XXntN+JWjo6O89dZb6PV6duzYAdxbHBWOwbe//W10\nOp2wFZaWljh16hQ3b95kYWFBeI537twRkpWtrS0zMzNkZGQwMjIiaL7R0VGqq6uZmJjAYDDw+9//\nnt27d7Nx40YhN9XV1ZGQkCAK1r/4i7+goKAAk8lEeXk5zz//PM3NzWzZsoWYmJivfT9+I8qH+fl5\n1q5dS39/P3fu3JHZ8aVLl6ROUrzmGo1Gdk4leFaBnFy9epXc3FwhLyma8A8//JC0tDQyMzNFiXjf\nffcxPz9PWVkZGo2G2NhYWZBWVlZYWFjg2rVr2NjYCK1HyWeIjo7m1KlTfO973xPte3d3N8nJyQQG\nBtLX1yc3jcIoUJBtAQEBEkc3PDyMpaWlSGaLi4vZsWMHRUVFGAwGAawooiTFManEpbu4uKDX64mM\njMTa2pqamhqREjc2NhIfH88Pf/hDsrOz+elPf0pHR4dwGtVqNWfPniUjI4PBwUGOHTsmQrH6+npK\nSkrQarWcOnWK9PR0Ojs7mZycpKCggKeeegovLy8+++wz2trayMjIoKqqirS0NIG9KDg9xaTW0tLC\nysoKWVlZODg4cPfuXbZu3UpBQQGpqami1VAoSFqtlrS0NPr7+2lubmZgYEDI0jdu3GD16ns5qba2\ntjJtuXv3rki+HRwc8PPzk2P+jRs3CAkJYXl5mdzcXBGjKe+/j48Pzc3NIilOTk4WJoGyUAYHBwsx\nWin5FBGap6cnbW1tJCYmotVqMTc3p62tjfHxcVFRzs3NSZlWVVXFjh07sLe3l/HtnTt3RLyk0WhY\nWVlh9+7d0stQRH5arZbq6mrm5+eZm5uT152YmChwFqUMNZlMNDc3Cxrv61zfiPLh6NGjP3Z2diYw\nMJDu7m6am5vRaDTodDomJyflOJiamkp0dLRo3HNycoiLi2Pfvn188sknDA0NSRTXyZMniYmJ4YMP\nPuDAgQOSrWdjY0Nrayv79u0jLy+Ps2fPEhsbKzPdS5cuMTMzw/LyMmq1mpmZGSoqKvDy8mJycpKr\nV6/y2GOPERMTQ3d3Nxs2bJAGknKjLS8v89lnn+Hp6cmOHTvYunUrtra2tLW1ybFbq9XS0NAgQR+f\nffYZR44ckZSlkJAQIiIiUKvV0ugKCgrCwcGB7Oxsee+Cg4OxsLCgrKyMsbExUQbGx8dLT8FgMODm\n5sb9998vuO+nnnqK4eFh7r//fhISErCwsGDbtm2UlZURHBzMrl276O7uJj4+nsXFRVpaWtBoNNjY\n2NDb2wvcizZT7NwZGRnMzc3x+eefU1JSgr+/P4ODg9y4cQN3d3eys7NJSEhgZWVFMjCio6O5c+cO\nJpNJHInf/va3cXV1xcfHBwcHB4xGo/APMzIyWLVqFSUlJVRUVBAcHCyjYSU6zcnJiUceeQRHR0c2\nbNjAyMgIhYWFaLVafH19mZyclGzHtLQ0FhcXycvLE9ORTqfj3LlzHD9+nIWFBfz9/UlLS2N4eFjc\nj/7+/pw6dYquri4efvhh5ufnWVhYYHp6Gp1OR0FBARUVFRLaq5CuAwICGBoaoqenh/T0dIxGI+Xl\n5ZSWlsrz+vv7c/LkSTZt2oSzszNDQ0Ns2LBBQmFSU1OxtrYWW7ayQVpZWeHg4CAZlcpoND09HXt7\ne1Qq1X+ddfr/jcvKyoqnnnoKGxsbySysqakhJiaGxMREvLy8sLOzo7a2lry8PKytramoqOD06dPo\n9XoaGxuZnZ2VBqG3tzdwb7VNSkoiNTWVxcVFmRUPDQ1x+fJlJiYmWLt2LXv37iU1NZU1a9awsLCA\nm5sb586dIygoiIyMDMHMKzWnSqWSgJrz58/zT//0T5hMJoGoDgwMiLZdccFNTk6i1+ulKajVagkM\nDJT5O8Dg4KAEhwQFBUnysUIWUgJKFauzn58fo6OjGI1GUYMeOnSImZkZfvOb37CwsEBwcDBarRad\nTicg2K1bt9Lc3Exubi53795lYGCAvXv3UlJSQkBAAOvXryc8PJxLly4xOjrKiRMnmJiYYGxsTDQA\nCssxJSWF4OBgjh07Ji7EhIQE6urqMDc3JysrC1tbWwGXLC4uEhERQW5uLh4eHjJZUVDsMzMzuLq6\nyo7n7OzMhg0beOaZZ1i1ahUGg4GlpSUyMzMxmUzCVMjOzpaeR3l5ORUVFZLdAEhux5EjR0QinJ+f\nT3t7O8PDw1RVVfHhhx+ysrKCp6cna9asERWghYUF27dvR61WS1ZjWloacK8sqa+vp6mpSWzLipZi\nenqaNWvWyHvk6elJZGQkKpWKL774gtLSUhwdHYmOjqasrIzr16/T398PQHt7uwQDKRMatVpNd3c3\ng4ODhIaGsn//ftFnjI6OEhsbS1NTk0jJs7OzOXnyJN3d3X+isflz1zdiUVCr1Rw5coT+/n7m5uZI\nTk7G0tKSubk5JicnmZ+fJy8vj/r6eoKDg0Uh5+DgQHp6OuPj4zg6OrJt2zYcHBw4c+YM8L+mD2+8\n8QZXrlwhPT2dlJQUEhMTUalU7NmzhwMHDrCysiI+CkV4cujQIRFDhYWF8dVXX4m01c3NjeHhYYlT\nT0tL49y5c5JnqNSMDQ0NAoSpq6tDq9VK0EtxcTFpaWl4e3uLCSY7O5uVlRXBrRmNRqytrZmdnWVx\ncRFfX1/Wrl1LYGAgaWlp9PX10dXVxUcffURzczPZ2dlUVFQQGhrKjh07CAgIYHZ2lsuXL8tCubCw\ngIeHB59++im9vb2SAtXR0YGNjQ3u7u7Y29uj1WrZu3cvN2/eFN+Bj48Pzs7O3LhxQ1yqik9fAZu+\n/PLLPPbYY6KiUwRk7e3tjI+PS/T64OAgBQUFeHl5odPpZFHQ6XQ0NTXx/vvvc+vWLdRqNZ6enqhU\nKnGrKth0V1dXZmZmRMUZHh5OWFgYi4uLLC0tsXXrVgGWLiwssHHjRlpbW4mIiJApwuTkJPb29kRF\nRZGRkUFLSwupqals2bKFxcVFdu/eLQnSSnxeeXk53d3dAOTn57NlyxYZUWZlZREWFoavry9mZma4\nu7vLhqUQl1UqFc3NzdLvCA0NJSsri8DAQFEexsfHMz4+TmhoKF9++SV1dXVMTk7i6uqKyWQiLCyM\ntLQ0kfUPDQ3h6OjI7du36ezs5PLly7z77ruyoSjCp69zfSPKh1deeeXHr776qpg5ampqsLW1ZXx8\nnNjYWEpLS8nOzubxxx9nfHycubk5ampqeO655/jwww8lqXhlZYXAwEA6Ozu5du0aERER0hCcm5vD\nxcWFjIwMwWUPDAzQ1NTE3bt3cXR0JDk5WQhBy8vLODk5odfrRda8sLBAWVkZAQEBzM/Py4jRzc1N\nglrt7e1ZvXq1nC5u3bqFt7e34MHt7OzkRPKrX/2KkJAQSSTatWuXAGTfeustampqJMwmICAAT09P\nJicnqayspK+vj/HxcYxGI0FBQRgMBhFFKbQjCwsLbt++TUJCgsS4TU9PExMTw4MPPsjIyAhFRUXU\n1tYyNTXF2rVrKSws5NatW7S0tFBYWCjcCgVF1t3dTVRUlODr7ty5g7+/v2gzFJiJMvZTpLurVq2i\nsLCQoaEhFhcXqa6uxtvbm4qKCkGhl5aWcujQIbq7u1lZWcHGxobZ2VnMzc0pLCykqalJjv/x8fF8\n8sknMrG6c+cO5eXl4rZUekLT09OUlZWRmZlJSUmJgC5CNQAAIABJREFUZCUoisiSkhJWrVrF1atX\nxcilvBalKd3R0UF+fr6UdgkJCXzwwQdMT0/zne98h46ODmlY+/j4yAlXq9ViaWlJREQEw8PDHD16\nlKWlJRITExkaGiI6OpqUlBTa2tpwdXUlPz+fhIQETpw4gb+/P42NjTQ3NxMSEkJLSwuNjY1ixS8o\nKODChQsMDg7KmLWyspKTJ0/i5OREREQE+fn50g/q6emhpqbmv0/5oMRzX7t2TZozlpaWWFpaUl5e\nzv79+/Hy8pIfeElJCZmZmYyMjLBr1y4cHR0lE2B2dlY6wwaDgc7OTrFgW1lZyfF0aWlJyoCenh7K\nysp47733pEGmsAfi4+NlKrFmzRrgnlLy30I9lFi3goICiYnv6uoiJCSE+Ph4GVk98cQT7N27F09P\nTywtLTlw4ABubm7C5VtcXESlUtHf38/27duJj4+XurKpqQlbW1tOnz4t5YkiRx4ZGeFb3/oWc3Nz\nmJubS39kZWWF7u5uenp6+PLLL5mdneXw4cP09vbyyiuvcPz4cc6dO4ezszM7d+6kq6uLubk5rK2t\ncXV1JSEhQfQHL730EpOTk2zevJmqqiri4+MZHh6WcJSPP/4YQNK8jx8/jl6vR6fT4erqSlVVFVqt\nlvb2dmJiYiTCz8/Pj+npaRFwKaARPz8/IiIixK/Q3d0tTMzHHnsMk8mEtbW18BWXl5dxcXGhu7sb\nrVYr2gZlslNSUkJgYCCPP/44y8vLODs7MzExga2tLc899xzDw8NcvXpVgK/Ozs6sX79eGsnJyclc\nunSJhIQEGhsb+e53vwvcOyGFhoYSEhKCm5sbn376KZWVlfzTP/0TH3/8McvLy2KJVoJxP/30U6E9\n19TUSCrUtm3bJObO0tISo9HIz3/+c9zc3Ni5cyc9PT1cunSJ2dlZ3n//fWpra0lLS2N2dpbp6WlC\nQ0PZtm0bISEhXLx4kdzcXAIDA4V5+bXvx/+Tm/m/6vLw8CA9PZ3U1FQ57qxfv56JiQkSExMJCAhg\nZWWFP/zhD1RVVRESEoJKpWJsbExItgrh59y5c5w9exaAhx9+GBcXFyoqKqiurv6TmyU4OJienh5c\nXV2xtrYW+62zs7NkLCjHUzs7O9RqtWCyH3nkEbq6uiguLiY2NpaUlBTGxsbw8/PD09OT7du3y+Ky\nsrKCWq3m6tWrlJSUCKE5MjISuEeHUoxWXV1d3L17Vzh97u7uuLi4sHPnToHOKqIt5UO4ZcsWEhIS\nBLl+9+5dnJ2d6erq4urVqxw8eBCj0YiNjQ0ZGRksLCzg7Owsce779u0jOjqagwcPCunJzs5OUqGn\npqZYv349t27dEr3D5s2bxQymNM60Wi2Dg4N8/PHHkqzU2NgoDVYvLy/MzMwYGRnB1dWVzZs3Y2Zm\nhpubGyqVSmr/5uZmzM3N5WZSSkFnZ2fefPNNqqur+eyzzxgYGCA+Pp4zZ84QEhLCli1bMDMzIyoq\nShq8SrkHcODAATZu3Mjrr79OW1sbXV1d7N+/n+HhYc6ePcuWLVvw8fHB29ubjIwM4uLi8PDwoLe3\nl8bGRrq6uhgaGsLa2prBwUFJNvf29iYoKIi6ujrRlygnx/j4eBFo2dnZYW1tzc2bN1m1apUQoxsa\nGujq6mJqagpnZ2f5jLm7uxMeHk5eXh6lpaW8+uqrqFQqrK2t+c1vfsOGDRvYuXMnVlZWwrFwcHAg\nNDSUc+fOERsby8LCgmghlPLm61yq/0gD4v+pS6VS/X//Iv7v9X+v//9ft0wm0+o/96BvhE7B3d2d\n5557Dn9/f4KDg7l58yZLS0sCPe3r6yMsLAytVotGo+FHP/oRP/3pT/Hy8qK0tJSJiQlBel++fBln\nZ2def/11Hn30UWxtbVGpVBw5coSBgQFOnz5NSkqKNGM2b96MRqPh7NmzvPbaa5ILOTAwwMWLF8nM\nzESr1QrR9/vf/z4vvvgiWq2WnJwcrl+/LnmG9fX1+Pj4iOLQ09OTuro6oqOjGR8fJzg4mLy8PHbs\n2EFvby8JCQlUVVXh5OTEX/zFX/Dee++h0WiEqGNhYYGTkxPXrl3j2Wef5f333ycwMBC9Xi9Jy66u\nrgQGBlJfXy9BsyMjIyL6MRqNWFhYCGPR19cXCwsLjEYjJSUlJCUliWAoPj6ekpIS1qxZg5+fH93d\n3RI9dvHiRaysrFhaWmLPnj1cvHgROzs7GdXeuXOHhYUF/Pz8pNN++fJlXFxcWFpaYmlpSSL7FLhp\nVVUVHh4ezM3NsWnTJgIDA/n5z39OWloa+fn5ArnNycmhs7NTMhusra2Jjo6ms7OT27dvs3HjRmJi\nYrh9+zYrKyuEhoYyPj7OhQsXOHToEOvWraOoqIihoSGKiorIzc1lfHxcTmgFBQVERkaya9cuUYqa\nmZnR2dlJeHg4FhYWEklYV1cnOo/s7GyampowGo3cvXuXyclJkpKSyM/PJysri8LCQpqbm/H19SUx\nMZFTp07x1ltv8cADD/Dhhx/yy1/+km9961v84he/YN26daSmplJTU8NLL73EL37xC3HIKqHDyq6v\nvGdDQ0Ns3ryZ3t5eBgcHCQwMxMvLS05mLS0tuLi4EBgYyM2bN3n55Ze/1v34jSgfJicncXJywtPT\nk9dee42SkhIxsri7uxMbG8vDDz9MbW0tKysrHD9+nObmZiE9K/QhJY5LyVtwd3fH0tJS0qFv375N\nZmYmFRUVJCUl8cMf/pBNmzah1+t54YUXePHFFwX06uLiwsjIiCQBzc/PS8dZIT11d3czPj6Oh4cH\ns7OzrF69WkJeVq9ejZmZGRs3bsTBwYGAgABWr15NcnIycXFx+Pn5cfHiRQG2wD0g7J07d4iKiuLG\njRuEhYVRWVkpsA+VSkVoaCh9fX3Y29tz7do1GhoaKC4upqCggPvuu0+k4JGRkQwODtLZ2Sn4t+jo\naIaGhrC1taWvr4+9e/fS29vL3/3d30k3/JlnnmF+fp7+/n6WlpZYXl7m2rVruLq6SrdbITQFBQWJ\nGUnRQOh0Or766ivB0+t0OkkvcnBwEOze559/jkaj4cSJE2g0Gs6dOweAXq8nPz+fQ4cOcfDgQVpa\nWpidnaWpqYnm5mYWFhYkdVppJM/NzfHb3/6WgYEB3Nzc+OKLL1CpVOzcuVPGinq9HldXVyIjI3F2\ndmZsbAxLS0uio6N56KGHiImJobi4GKPRSENDAxYWFmRmZlJXV0d7e7vIqj08PKiurubatWsAvPrq\nq5SXl6PRaFhcXKSoqIhVq1ZJslRiYqJ89u677z5efPFF7r//fj788EPCw8NFPq0I4tLT0wHEQfkv\n//IvjI2N/Ykdenp6GqPRyOTkJPX19bS3txMSEkJDQwO1tbU4OTlx8+ZNRkZGuHbtGufOnROV79e5\nvhGLgqOjI/Hx8SwtLREeHk5CQgKOjo40NTUJJ+GLL74QocapU6fYunUrly9f5ty5c3h5eYkjLjEx\nkWeffRaAnJwc1Go1CQkJVFRUsHbtWvr6+oR2pDD+d+/ejbe3N9u2beP+++/H19eXmpoarK2tiYqK\n4tq1a1hYWODg4ADcA6yq1Wqmpqbw9/eno6ODP/zhD5SWlvLFF18wPz/P559/LsBURXacl5eHt7c3\n09PT9Pb2EhMTg7u7u8ym4+LiRIizZs0arl69SldXF19++SUrKyukpKRQWFhIYGAgbW1trF+/npKS\nEtauXUtubq580BXEmFqtxs7OTsZzBQUF2NjY4OjoSEREBOvWrZOU5qSkJGprayktLSU+Ph5LS0vG\nxsbEjqwsrM8//zwlJSU89NA9R31NTY2oIL28vFi1ahXr1q1jaWmJ7u5urKysSEpKoqqqStK2ioqK\nSE9Px2Qy8eSTTzIzMyNq0cOHD5Obm8sLL7xAbW0tzz33HGVlZURERDAwMMDOnTv58ssvuXXrlqhR\nbWxs2Lhxo2wuW7duxcHBQRrWgCgR1Wo1y8vL+Pv7097eLtOWuro6Ojo6WFxcJDg4GHd3d8rKyrC2\nthZnq1qt5vbt2wAyQt29ezeJiYm89dZb/PrXv8ba2pobN24wNTXF4uIi7u7u7Nixg7i4OPbs2cOR\nI0fw9/fH0tJSCGHKz0zJIYF7Y3p3d3cOHDjAgQMH+MMf/sDU1BTu7u7Ex8djbW0tLIbp6Wn+8Ic/\n4ObmxujoKHNzc4SHh0uz+IEHHhAX6te5vhGLgo2NDR9++CHFxcXY2toyOTkpOvby8nLs7e1JTk7m\nhRde4MqVKzQ0NHD27FnWrVuHlZUVsbGx2NnZERUVxdDQkOi8b9++zdzcHB0dHWg0GiorK7GwsCAy\nMhIbGxuuXr3K8PAwrq6uTExMsGPHDq5fvy4RasnJybzxxhskJCSg0WgEfqmEfq5bt45169bh4uLC\nli1bcHZ2lvAXBWemmIZ0Oh3T09NMTk5iY2MjSs2TJ0+ydu1a4N4JxNXVlQcffJDR0VF0Oh0BAQHk\n5OTQ1NSEv78/AQEBREREsHPnTqampggMDOQHP/gBZmZmEhvn4+MjYiMlmRnu5VUojVbF/29ubk59\nfb2E1/b19fGrX/2KgoIClpaWqKuro7W1lcbGRgYGBvjggw84cuQI77zzDmlpaaysrIguY3Fxkccf\nf5zc3FzMzc0JCQkhJydHSECLi4uSo6hMburr6zEajfzVX/0VcI+QtLy8TFxcHCqVioSEBNLS0rCz\ns2PTpk1EREQQGBiIWq3m5s2bnD17lqtXr4rNWjEYwT2PSlVVFYCMZxcXF9Hr9Zibm7N161YSEhLE\nbrx//356e3upr6/nxRdfpKamhujoaDw9PQUm87d/+7eSUQH35NN5eXns3r2blJR7VMLJyUnefvtt\n7O3tKS4uFmjQ4cOHhTeZk5ODVqslPDycwMBAxsbGyMrKEk6DQsWKi4ujoKCAoKAgioqKxBvh4+Mj\nqWnm5uY8+uij9Pf34+bmJoY2pal7+vRpcfh+nesb0VNQgBmhoaEsLCxgbm5ORUUFy8vLkq6r0Wi4\nefMmVVVVaDQamfd+//vfl3hvhaBcX18P3JtVG41Genp6BFulJBOtrKywZ88exsbGaGpqIj8/n+zs\nbNLS0ggODsbFxYU33nhDwmVGR0fl5p2enmZxcZHLly/j4OBAcHAwTk5OnDlzhqioKJqbm5menqa9\nvR1vb2/m5+fJysrC29uby5cvk5WVxYMPPkhBQQFbtmwRE9WFCxd46KGHmJ6eFouxciTfuXMn9vb2\nRERESMhKS0uLsCFsbW155JFH6OzspK+vD61Wi4eHB7W1tbi6uooKztLSUvIEPv/8c+bn59m+fTuj\no6McO3aM5ORkcnJyGBsbY2BgQHoukZGRXLlyhdDQUM6cOcNf/dVfce3aNQF8KKerf/mXf8HV1ZWp\nqSmSkpLEdj40NIROp2PdunW4ubnh6uqKk5MT1dXV4gO4du0av/71r0lJScHDw0OEVSaTCX9/f5qb\nm/nss89wcXGhvb0dHx8fJicnsba2Ji4ujvj4eJGnnzt3DjMzM5KTk4F7414/Pz/s7e0pKyuT0sFg\nMLBnzx7Onz/P7OwskZGRGAwGgoODUalUVFZWCtXIzMyM6elp+vr6CAoKks/vxo0bWVlZYWlpSYJ2\nfvCDHzA+Ps7MzIxwOX19fTEYDHR1dWEymaioqODw4cPiB0lISBBmR0ZGBr///e+xsLBgZmaGTz75\nhCeeeAKdTie9neTkZN555x0uXLhAYWEhTz/9tLynra2tWFlZCcVagRl/nesbcVKwtrZGr9czODhI\nWloaTz/9NDk5OahUKvEEREZGMjk5ybZt24S5UFZWRlxcHGNjYyQlJZGUlERzc7Ngr27dukVmZqZY\nWP39/QkLCyM2NhYXFxf6+vpktBkTE8PFixdFmOPg4EBtbS2Wlpb89V//Nb29vXLcO3bsmDzG0tKS\n4OBgsU+Hhobi5OQkIzorKyusra0FOKJk/Y2Pj0sathJce/DgQcrLy7l58yYvvPACIyMjMoZaWFjA\nzMwMHx+fP3FXzs3NsX79eoKDgzEajcKL1Ol0aLVapqamaGxsxMvLi4qKCnE+trS0iCV8ZWVFTijm\n5uasWrUKW1tbcnNzeeuttxgaGqKqqor09HRCQkJITk6WyPWBgQFiYmKoqKgQevIXX3xBX18fP//5\nz2lpaSEgIIDNmzfz4IMPUlFRwcrKCu3t7QIIMTc3l11WKa+UksJoNHL79m0+/fRTenp6KCoqoqen\nR0jHiqlI6Skp4a329vYkJiZSVlYGILF4Sv3v7e0tzAVFVNXZ2SnhxtHR0bi5uREeHk5AQAAbN27E\nw8MDc3NzDh48KJ+FpaUlZmdnOXXqFGvXrmV4eJjR0VH8/f3Zs2cPDz30kCxkClvS3t6enp4eKioq\nOHr0KMXFxczOzvL555/L633ppZd44okncHBwoLe3l4MHD9LX18eOHTvEGBYREcEzzzzDkSNHeOKJ\nJ6ivr5dNwGg0CpVKyZ78utc34qQwPj6OmZkZNjY2TE1NceHCBebm5lhcXMTT05Pf/va3Uv8rQTD/\n8A//QGtrK3Z2dly+fFmMUi+//DIffPABcG+27evry8rKCmZmZvz617/G3Nyc9vZ2iX7LzMzk3Llz\ntLW1kZOTw9zcHImJiRw/fhwXFxdWVlZ44oknmJycZHBwELhHiVZqQGXi4evrS1ZWFnZ2djIj9vX1\n5caNG1y/fp1Dhw4Ji2Bubg4fHx/c3d0ZHh4WSnRjYyPd3d1YWlrS399PW1sbdnZ2ZGZmcuvWLerr\n6/H29iYwMBA7OztsbW2pra2VAJRPPvmE/fv3U1lZSXx8PEVFRdjY2DA9PU19fT0pKSmSTuTt7c1H\nH31EdHQ0X375pRw3MzIycHR0FOtuVFQUlpaW5OTkMDAwIAIhJycnysrK2LVrF21tbRiNRvR6Penp\n6TzyyCOEhobywgsv8D/Ze/PgKMt0f//q7Pu+dvZ9D9kJSUggQTBhCYKIIAjuzOjoLOKIzqgHnRkd\nUTiO4oAigggMIJsgSAhbAoHsZE8nIftG6Gyd7nQ6S3//wPc5c75V5zuc+tWvylN13qouIAoJod/n\nfZ77/tzXtX37dqanpwkNDSUgIABfX18OHz4s3qQrVqzg66+/FlJgHx8fOjs7Wb16NSqVis7OTjw9\nPVm7dq3Y+ktfh6mpqbBhffXVV3z66acsXLiQP/7xj9jb23P16lURDJIKeJJG3sXFhc8++0zAY6Oi\nokRk2s/Pjzlz5qDRaLh58yaJiYmUl5eLBxcgMhCtra2ic7Nv3z7WrFnD5OQkcrkclUpFUVGRiCTL\nZDLxdxocHOT3v/89ycnJ/OMf/yA7O5tNmzbx/PPPs2PHDnJycti9ezcvvvgiTU1NgkV65swZwclU\nKBQUFxcze/ZskY+ws7NDp9Nx48YNAdvp6upiy5YtD3w//ix2CiYmJsybNw+VSsXRo0c5f/48MzMz\nGBoa8sgjj4j0mqOjI++//z67d+/mhx9+oK2tTQhU7e3txXy/5Hy0srJiaGiIvLw8oSifnp4mICBA\nDJhIbSqVSoWBgQHfffcd165d4/vvvxfFnp6eHvz9/UVWXSL32tra0tvbS09PDwUFBeh0Onp6eujq\n6uLYsWP4+/tjY2MjxmolHVp3dzeFhYUMDAxQUlIiahVFRUVi0KqwsJCQkBDu3bsnpkbNzc1ZtGgR\nwcHBmJqa0tLSglwuR6FQoFKpWL16NSMjI8JIvWTJEpRKpTAxm5mZiYSls7MzTz/9NA4ODmzZsoWp\nqSkUCoWofWzatInU1FR6e3sZHBykra0NtVpNbW0tXV1dnDhxQnQy1Gq1OP9Kzo6ioiK2bdvGnDlz\nWL16tSiuLV26FBcXF+Li4khKSqKlpYXo6GiWLl0KQGRkJMnJyZw7dw5jY2PKyspwdnZGq9UyMDBA\nbW0thw8f5sqVK3zxxReEhoZiY2NDTEwMy5Ytw8LCgm+//ZbR0VGGhoZIS0sD7rshNBqN8EwePnwY\nhUJBZWUllpaWYochofpHRkZEveXUqVPCKiVxFqV5lYiICDo7O+nr6yMjI0OkWE+cOMGrr75Kc3Mz\nGo2G/fv3k5+fj62trZhfuHr1Kh999BEff/wxH3zwATExMXzxxRfA/UVHOt5mZGTg5uZGQUEBP/74\nI6Ojo2L+QiJ+S3U1Gxsb7O3tmT9/PkNDQ8yaNYvnn3/+vzX78LNYFIyMjNDr9URHRzNv3jw++OAD\njI2Nefnll7GysmLevHlMTU3h4uKCr6+vmCqUsGVxcXHizbtnzx5OnToFwJo1a6irq8PHx4fdu3eL\np4SDg4NgKlZUVDB37lzi4uKws7MT2QgHBwdcXFzo7e0lJCSElStXiijvrVu3SE5Opq+vT1T4Y2Ji\nxFTbp59+ipmZGVZWVvT39wsC0dTUFNPT05SUlNDY2Eh1dbWYeoT7mLesrCzMzc3Jz8+ntraW5ORk\n9Ho9xsbGojAljQGnpaWRlJREWFgYa9euJSYmhg0bNmBmZkZHRwc7duwgNTWV9957D5VKRXp6ujBw\nm5ubExsbyyOPPIKnpyfOzs5kZ2ezYMECampqxGI1d+5cMWuiVCrF4FNgYKCYjpSISxKJStK2/+Mf\n/xD1A4ncXFNTQ0BAACYmJtTX1xMeHi6Q/HA/jmxlZUVkZCROTk64uLgwNjaGm5sbdnZ2Ygp28eLF\nWFtb4+LiwoULF1AoFMTFxWFgYCCOnn5+fty5cwe4f0Tt6enh1q1bYsw9MjISKysrDA0NhS80OTmZ\n/v5+Tp48ibOzs5hNkBwdRkZG1NTUiCev1KoNDw8XKLU333xTAHClMfGkpCQiIiIYGhoSLU+pHbln\nzx58fX1paGgQ/gsrKyuuXbtGbW0tCoWCPXv2EBAQwC9/+UvCwsKEWm9ycpLLly9z4MABsrKySE1N\nZf78+Tz11FPcvXuXwcFBioqKuHbt2gPfjz+Lgajt27e/MzMzg0qlor29nX379oknb1JSElVVVXz1\n1VdcvnyZ0NBQFixYwNTUFGq1GiMjI4FMy8rKEoGOr7/+moyMDCoqKsjJycHHx4e+vj5sbGwEwv3A\ngQN0dHSIScnExEScnJwET/DcuXO0trZy/fp1pqenuX79Ov39/TzxxBNiMEdiOSoUCsbHx0ULaNWq\nVVy6dImamhqUSqWoHEtvkvHxcfr6+ggKCkKv13Py5EmefPJJmpqaKCwsZNmyZdja2uLj48Mf/vAH\nZDIZUVFRVFdXiwLh1atXsbS0ZGJigpmZGdLS0jA0NKSnpwcDAwMCAgIoKCigtLQUX19fysrKMDU1\npaysTMwFyGQyNm3axJo1a0hPT8fNzQ0PDw/eeustgYQbHx8nNTWV/fv3MzIyQkNDAwEBATg6OhIe\nHo63tzdZWVloNBoWLVrE5OSk2EF1dXUREBCASqXC2tqaS5cuCQtYVVUVfX19GBoaUlRUxI0bN0hI\nSCAmJoZLly5hZ2dHamoqGo1GhJ3CwsJ4+OGHKSgoIDY2VrAnIiIieOSRRwgODhYIv76+PlQqFXl5\neUL5Z2JiQk5ODg4ODixevJj4+Hiam5vx9fVFp9NhZWWFqampYGA89NBDBAUFibHxS5cukZaWxunT\np7lx4wZPPvkkZmZmODg4MD09zdatW/n000+JiYnBycmJxsZGkpOTBfF5ZGSEkpIScnJyiIqKIj4+\nnpqaGvbt24eDgwMlJSWUl5fT399PQECAWGwkGKyPjw8tLS1YWFig0+l49tlnqaysZGhoiK+//po7\nd+5gYGAgshxSS7igoICWlpb/OQNRk5OTBAYGCgOzgYEBc+fOJTs7m5GREa5du4ajo6Mg+1pYWAjU\ndWJiIgkJCcTFxVFcXIxMJhMeBa1WS1paGidOnGB0dBQLCwuMjIwEAXnVqlVMTEywd+9ezp8/z82b\nN8XkoYODA2+//Tbh4eE8/PDDREVF8cILLwCIDENeXh7u7u6CNCSp7CWZiqSAd3JyEuDRuro6bt68\niUqlYnx8nOnpaSFCkX7UarVs3ryZ/v5+ioqKSEpKYuXKlSKJFx4eLtByFhYWdHV10dnZyeHDh7l7\n9y69vb2Eh4eLhOPq1atFb76mpoaYmBiam5u5dOmSOKNaWlqyb98+wS2UeABSK/GHH35g6dKl6PV6\n/Pz8iI6OZnx8nLGxMby9vTl79qxQuBsYGPDb3/5WqNrMzc2ZN28eKSkp+Pj4COfib37zG0ZGRtBo\nNGL2ISEhgffeew+dTkd8fDx6vZ7+/n5mz55NXFwc2dnZbNmyRVC8pX79zZs3heF7YGAADw8PbG1t\nyczMBO5nViIiIkRNZHR0lOPHj9PU1MTAwABHjx6ltLRUPCgmJyc5deoUr732GuvWrROk77CwMMzM\nzMR7QalUYmtry+nTp2lsbCQmJkYkOpOSkrCwsECpVNLd3U1fXx8WFhaEhIRgaGiIl5cXBQUF7Nmz\nR4xfS5KalJQUYmNjGRoa4tixYzg7O5Ofny94Dp2dnQwMDHD48GFaW1tFCE4qjEqj5NPT0+h0Ol56\n6aUHvh9/FoVGCYAi0XRqa2tZvHgxJSUl2NvbEx0dTXh4uLjJm5qaaG5u5vTp09y5c4ecnBxsbGwY\nGBgQoRVADIfY2dmJ3n1nZyc2NjZiKz9//nzhlxgdHaWuro6YmBiBdOvr6+P48eMCTgKIMeqhoSEK\nCgqIjIzEwMAAR0dHJiYm0Gg04ukTFhaGj4+PIENLY8F3794lJydHCGAALly4QFJSEu7u7rz88sss\nWLAAnU6HjY0NiYmJGBoainHekZER9Ho9NTU1TE1N8e2335KYmCjm9Y2NjVm9ejW3bt3i4sWLeHh4\ncPnyZczNzeno6GBsbAw7Ozt6enrw8vLi6NGjAhUmJfjUarXogMB9BL2lpSVyuVyMtyuVSrZu3Ypc\nLqe7uxt3d3cWLlzIhQsXWLJkCXl5eezatYtXX30VrVaLVqsV7bG9e/cyNjaGs7OzELZOT0+TkZHB\n2bNnqaysFAIZKQfw3XffkZmZKepMSqWSTz5AvMXaAAAgAElEQVT5hKmpKfLz88Uu4Pz580RGRnLy\n5EngPppPr9eLQpyEzVOpVDg5OTF37lyGhoa4ffs2wcHBfPfddwwMDPDiiy/y5z//GVdXV+zs7Kir\nq2NmZkYMLpWWlmJlZUVycjItLS0iU3H8+HFsbGzYtGkTV69epbq6mt/97nc4OjpSVlZGcXExra2t\nrF+/nrVr1zI0NERiYiLbt28HIDY2Fi8vL4qKioS8+MyZM/T09PD444+j0WiYmZnB1dWVxx57DBMT\nE1pbWxkaGuKTTz7BxsaG559/nt///vf89a9/FanZB7l+FseHnTt3vrNgwQI8PT05f/48UVFRKBQK\nsrKykMlkbNmyhWvXrhEbG0tTUxPGxsbU19czPj7OrFmz0Ov1Yuy3o6MDjUZDRUWFeIo8/PDDhISE\ncODAAfR6PTqdjrGxMQoLCzE0NCQwMJDAwEABNNFoNBw6dEhEaaOjo3FyckKpVHLx4kXee+894P4T\nvaOjQ7Qlu7u7OXnyJLa2thQUFIik4OTkpABkWFtbi/CUj48PdXV1GBgYcPr0aT744AOuX78uKMKO\njo6oVCqampooKCigqKiIRYsWYWZmxpIlSxgfHyczM5Pc3Fx0Op34egEBQJFU5gsXLsTNzY3CwkJ8\nfHywtrYWrbOAgACR2gsPD0epVCKTyVi/fj0zMzO899575ObmipvQyMiIyclJ4uLiCAoKws/Pj7a2\nNq5evYqtrS2mpqYkJiZy5MgRXFxciIqKwsDAQEy5VlVVkZqaiqGhISYmJiItevr0aREWio6OBiAv\nL4/ly5djaWmJQqEgNzcXS0tLbt26RUFBgQDBxMbGYmNjI87bU1NTxMXFoVarOXfuHBs2bCA/P5/+\n/n4SEhIwNDSkrq6Oe/fukZCQQG1tLf7+/oSGhnL58mUefvhh4alwcXGho6OD2bNn4+zsTEJCAr29\nvRw/fpzXXnuN5uZmhoaGiImJoaOjg+effx5TU1Pu3LlDfn6+SIKqVCreeOMN7ty5w9y5c5k1axZ1\ndXVERkZy5MgRkaspLy/Hzc2N4uJigoODWbRokfCGAixbtgxDQ0Mxpj49PY1WqxXuzYCAALErAWhp\naeHrr7+mu7v7f87xYWBgAIVCQWhoKHK5HGtra1atWsWdO3dQKpUiHCSNCQ8NDVFbW4utrS06nY6Z\nmRmhPJPJZEKoIY3g9vb2cujQIVatWsXY2BhKpZKpqSkeeughsrKyUKvVHDlyhObmZhITE4Xa/YUX\nXsDHx4fh4WHu3LkjIqiDg4O0trZiYGBAUFAQ5ubmTExMEBQUJKLIEtHn1q1bIlEpBW3CwsLw9vZG\nr9eTkZGBv78/AO7u7mJBiY2NFVhwpVKJtbU1WVlZ4nhVWFgo8glSoEbyV3Z0dGBjY4OjoyPT09P8\n+te/xsLCglOnTvHQQw+RkpKCp6enEOQ2NTXR1dXF4OAgeXl52NjY8Oyzz4p+va+vr6h6S0VKvV7P\n3bt32bVrl2ijBgcHs2rVKlpaWti1a5dwKUjHk4mJCUpKSli2bJkwf0nwWYn7ODg4yNWrV0VAbdWq\nVbS1tXHu3DlaWlpYs2YNZ86cwcbGhoiICEFoDggIYN68eSL9KoWzmpubgfvt6a1btzI8PMz4+DhF\nRUXY29vj6uqKhYUFsbGxVFZW8uWXXwL3cWg9PT2CbLRp0yY8PDxEq1cqKl6+fBlPT08CAgKIiYlh\n7ty5wlsZExNDQkICSUlJqNVqCgoKmDt3rhDmhoWFodfrsbCwwMfHB1NTUxYvXgzcP+489dRTuLi4\niNpJR0cHExMTHDhwQDwQVCoVS5cuRaVSMTIygpWVlZDWmpubo1AomJqaEvfEg1w/i0XB19cXc3Nz\ndu/eTWxsLBcvXqS+vp6qqio8PDwwNjZm3rx5jI6O4uzszPT0NFZWVjg5OTE+Ps7MzAzLli0jMDBQ\ntOMAcnNzBZsvJCSEiYkJ5s6di6urK/PmzSMnJweZTIa7u7sQcdjZ2ZGTk8P69esxNDQUUJYVK1Zg\nY2MDgIODAzMzMwQEBGBlZUVLSwszMzMMDg4il8tJSUnB3t5emJoCAgKEHVga8FGpVAwNDXHx4kXx\n9ebn52NpaUlmZibLli3D19dXtOvWrVuHqakply9fZt++fahUKi5cuEBeXh6VlZWi8Ojl5cXixYtp\nbm4WKPXS0lIUCgUPP/ywaMtJRzKpWDo5OUllZSUKhYLw8HABb8nPzycxMZHAwEAMDAyE1yE7O5u2\ntjb8/PyIj4/Hzs6Op59+mhs3brBnzx7q6up49tlniYyMFDuj2NhYMjIyiI+Px9ramoiICBQKBcbG\nxqL/HxUVJeAx4+PjDA4OolQq2bhxo8isuLi4MDo6ipubm/BBSFRmQ0NDOjs72bt3L5OTkzz99NPA\n/Y7R7t27RXw6ICCA8fFxUlJS6O7uFtIalUpFaGgod+7cEbZtLy8vUc/SaDRs2LBBAHfWrVvH8PCw\n8Hnu27ePd955hzlz5uDk5ER6ejoxMTFUV1fz0Ucf4e7ujrOzM5cuXeKjjz4iKCgIa2tr7t27h06n\nE5q74eFhbt68SWtrK/v376e2tpaysjIuXLhAT0+PUBwsXrxYFBUlbVx8fDyFhYUolUomJibEA+tB\nr59FTUE600tevH379lFVVUVNTY04U0dFRSGTybC1tWVgYAC5XM7p06f55S9/iVarZceOHZiYmLB5\n82b+9Kf7KsvW1lZRK/jmm2+Ek1JChbm4uIipN8mbeOPGDW7evIlGo8HLy4tly5ZRUlJCfX29sEKr\nVCpBebKwsCA9PV1AVC9cuICpqSm5ubl0dXXh4uJCSUkJ5ubmNDU1YWJiwuTkJBEREUxPT9PS0iKs\n03PmzKGyshJfX19aWlpoaWnhhx9+ID09ndHRUaGqe+211xgeHmbnzp2Ym5tjZWUl2nHSWPXdu3fx\n8PAQC6tGo6G2tpbs7GycnJyoqamhvLyckJAQkpKSiIyMpKurS8RmY2Nj6e3tRa1WC49DTEyM4DoO\nDAxgZ2cnot8JCQkCob5+/Xrs7e0pLCwkJyeHO3fuUFNTI4hGarWa3t5ejIyMCA4O5vbt27i4uAD3\ng13m5ub4+/vz1ltvYWtrK4anZs2axa5du8TO8cqVK1haWpKdnU18fDxHjx7l3//933nxxRfJyckh\nKytLeDpHR0dFeEwairKxsaGiooLJyUm0Wi2Ojo4EBgaK6dWEhAT6+/uRyWRUVFQgk8nEUUEaYrt+\n/TpOTk5MTExw5coVQkJCCAwMxNTUlPDwcGpra8Uxx8HBQfgoly5dSlFRET09Pbz77rtYWVmxePFi\nLly4ACCOd5IVqqioiImJCebMmcOnn36Kra0ter1eiGrs7OyIiooiNDRUdGnkcjnFxcWiCP+g189i\np2BpaUlwcDCOjo58+umn5Ofni+hrQUEBAwMDJCYmcvr0aRGZNTU15emnn6a/v5+xsTEef/xxgQyX\n+t6Ojo4EBATQ399PRkYGdnZ2oj1kZGQkjh6+vr6YmZlhYmJCWVkZtra2xMXF4eLiQmlpKQ0NDZSV\nlQmHQF1dHV5eXoKu1NrayqVLl8Rwj6GhoUi1GRgYiAk1nU6HmZkZvr6+tLa20tHRgZGRkTg+qNVq\nUS8xMjLC3d2ddevWcfv2be7du8eOHTsoLi7m/fff55lnnhHb/7i4OJGTkIxTXV1d1NXViSJmV1cX\nSUlJ9PX1iYXExsZG3GBjY2MC2GpsbMyZM2c4evQo9fX1REVFERAQQHl5OQqFgvj4eNauXUtVVRUl\nJSXMnj2bgIAAsTN69NFHUavVjIyM0NTUxPDwsBgJ37hxI729vcyZM4fo6Gi0Wi1eXl6ClG1gYIBK\npeLUqVNs2rQJb29vwsPDMTAwoLi4GDs7O8bHx1GpVBgbG5OTk8PKlSvFwvr4448zOjpKeHg49+7d\nEyPZSqUSJycnli5dSkdHB9HR0djZ2QnTk4RyX7NmDTY2NsyaNUt8L27fvk1jY6No0Xp7e4ukpNRS\nHRwcpL+/ny+++IKbN29SWFjIuXPn+Prrr5menmZoaIi4uDhqamqEiGj16tV4eHiwYcMGnnjiCUZG\nRsRgmEQb6+vro7W1lYmJCZRKJXfu3GFkZETo4TQajeiMjIyM8MEHH6BSqRgcHBSLbl5ennjvPsj1\nLxcFmUzmJZPJLstksjqZTFYrk8le+enjDjKZLE8mkzX99KP9P/2eLTKZrFkmkzXKZLJF/+pzGBoa\nitbW5s2baWhooL6+noiICGxtbcnIyKC2tlagy1xdXdm8eTO2trZCvDE9PS0GWaSnQ11dHQqFgrGx\nMS5evIiZmRk1NTX09/fz9ddfU1lZSWNjo8CvTU5Okpubi6enJ3FxcWg0GuLi4sjNzRVfo/QPJg0W\nSTIWKXIcHh4utscSMdjBwYG5c+cSGxuLhYUFzc3Ngkn4z47KwsJCbGxsqK2tFdLX9PR03n33XZKT\nk8nMzBQGpLVr1xIXF8fLL79MZmYmU1NTrFmzhtLSUuRyuVCcyWQyDAwMCAkJoaWlBaVSycDAALGx\nsUxOTuLg4CBGv69cucLY2Biff/45VVVV3L17l9u3b2NhYcGhQ4fo6+tDr9eL9J80fTkyMoKlpSUz\nMzM0NzdTXV3NhQsXBFA1PT2dN998k/j4eDIyMli7dq2QmDg7OxMdHS3CNXq9HrVaTU5ODv7+/mRl\nZSGXy7lx4wb19fV4eHiQmZmJXC4nKiqK8vJyhoaGGBsbEwBUCR9348YN0TGSAl8vvPACXV1d7N27\nFycnJ7y8vFi5cqVIu+r1ery8vHB1dcXDwwMHBwfCw8NFYXd8fFxMkMJ9JmV8fDwDAwOihhMREcGy\nZctEh8DPz4/R0VFBIZcGvCYnJ/H29mZ8fJyRkRFu3LjBN998A/yHvMbExARHR0dcXV2FqcrCwoLZ\ns2ezdetWbG1tuXfvHsnJyXR0dIgCZ39/P7du3cLKyoqAgAABlHmQ60GOD1PA7/R6fblMJrMGymQy\nWR6wEcjX6/Xvy2Sy14HXgd/LZLJw4HEgApADF2UyWbBer5/+rz6BSqUSGvXXX3+d/v5+li9fLoCc\n0lappqaGp556iujoaFpaWggICGBycpKRkRFSUlJE5HfdunXs2bMHW1tbOjo6CA0NxcrKipqaGl5/\n/XWampq4du0af/jDH5g1axb+/v74+vqK4lZRUREGBgYMDw+LV1xcnOApqNVqqqqqGBoa4vnnn+fu\n3bu4urqSm5uLTCaju7ubGzduUFlZSVBQEF5eXpibm4t034EDB3B3dxckaLVaDSCOJPPmzaOwsJDQ\n0FBRWbawsMDNzY2VK1fS2toqMvWSFUqhUHD16lUiIyNpbGykqamJiooKIXydnp5GrVYTHh5OdXU1\nLS0trFixgvr6elFkXLx4sQjLSOdWiaAtdSWsrKzo7e3l/PnzZGRkIJfLqaurw9HRkSNHjtDU1CQQ\n80NDQwwNDdHQ0MD+/fuxsbFhzZo1DAwMEB0dzd///ndyc3M5deqUiCPPnTuXq1evcuvWLeRyOXZ2\ndoJkJKHWa2trBUfg9u3bvP/++7i7u7Nx40ZMTExoaGjA0NCQ2bNnc+7cOQBWr17N8ePHRTF7amqK\nDz/8kPXr1zMxMYFKpWLWrFmiG1FSUoKXlxcWFhYUFBRga2vL1NQU2dnZ1NTU4OnpCSBmP+7du0d8\nfDzZ2dkcOXKEhQsXEh0dTWhoKB0dHaJQODw8LI6x169fJzU1lffff58FCxaQnJwsFhszMzNBNJei\n9klJSYyNjXHq1CmamppobW3Fz8+PmZkZ1Gq1MK11dHSIorCUO8nKyhIR6v/Pi4Jer+8Fen/6uUom\nk9UDHkAuMO+n/20fcAX4/U8fP6zX6yeAVplM1gwkAUX/1eeQGAASONTBwYG8vDwmJiY4fPgw586d\no6mpiW3btlFRUcHly5cJDAxkenqapKQksd1cuXLlfyIvjY2NYWBgIPr7ycnJvPHGG6KusHnzZgYG\nBqipqaG4uJgdO3Zw+/ZtgYlftGiRIBJ9+eWXzJ49G0BkHaS2m1qtFts9KysrKisrhYhFyq47OztT\nXV2NRqMRW3lTU1P0ej0NDQ0AeHp6Ymtri1qtFiCXO3fuCMW9ZGTS6XTExsaKQtKsWbPIzc2ls7OT\n4uJiBgcHWbp0KQqFArlczs6dO5k/fz4WFhZiNHfFihWcOHFC8CSkbkRJSYnAgktZBmnst6enR+RE\nAgICqK6uxtLSEl9fX/bt24ezs7OozicnJ4vzdWlpKc7OzsyfPx8HBweuXLlCT0+P4GEsWLBA9P3L\ny8vRaDTY29szPj6Om5sbXV1dTE5Ocv78eVauXMnQ0JDIRcTExNDb28vSpUv56quvyMzMJCcnh/z8\nfKHZA7h9+7YIRNnb24sA1uTkJH19ffj5+VFZWUlwcDA3b94UO4Nz586J2pC9vT29vb2MjIyIGog0\n2LZo0SK2bdvGY489xo4dOwgPD6eyspI1a9aIMFRlZSUAFRUVGBoa4uLiwueff87q1as5c+YMq1ev\n5pVXXgHuA3fGxsZITU0VrWOZTIZMJkOr1dLa2kpKSgrm5ua0tLTw2GOPMTg4SF9fHwsWLODQoUNC\np7dkyRIxifsg13+rpiCTyXyBWOAW4PrTggHQB0iuaw/gn7+Crp8+9n//Wc/LZLJSmUxWqtfr+fjj\nj3FycsLV1RUTExNqamowNzfnlVdeYdeuXajVauRyOQ4ODqxfvx4/Pz++//57enp6KC8vp7W1lRs3\nbnDv3j3xBpuenmb//v0MDg4yNjaGn58f4eHhZGRk4OvrS2Njo5hwW758OZ999pkwJrm5udHQ0CBi\noxMTE+J83tzcjLGxsaj4JiQk4OTkREpKCr6+vixfvhwjIyNhU5LIOJaWltTX1wsst1KpJCEhQSxi\nDg4OnDx5UlSMKysrmZyc5Pjx40xPT9PT08OZM2fw9PRkaGgImUxGbGwszc3NXLhwgYmJCYyNjUXB\n09LSkvLyctzd3ZkzZw4tLS2YmJiI/IZ045ubmxMdHS0M1bW1tdy7d4/ExESxNc3KysLZ2ZmSkhKK\ni4vJy8sThqbW1lbWrVsnpMBbtmzBzs6Oa9euCYu2l5cXpaWlNDU1YWFhQVxcHI6Ojjg5OWFhYYG3\ntzdwvyWZkJAg8PSdnZ0sWrRI6PwsLCw4ffo0ZWVlovuRnJws8iipqans2rULJycnjh07JrBpktoO\n7qv2bt++TUNDAyUlJdy8eRNHR0eBj3N2dqa7u1so2zw9PTExMUEulwv0unTcuXHjBlu3buXq1asC\n1//ss8+iVCqF7jAyMhIfHx+USiX79+8X/1737t2jpKSEkydPotVqUSgUor508OBB/va3v3Hs2DHB\nYMjPz0epVPLjjz8KCnRTUxNJSUkMDQ2JY53E2ZSI0wcPHvxvjU6j1+sf6AVYAWXAip9+Pfx//feh\nn378FFj3Tx/fAzz6L/5s/f++/vf1v6//31+lD3KvP1BLUiaTGQPfAd/q9frjP324XyaTuev1+l6Z\nTOYO3P3p492A1z/9ds+fPvZfXr6+vrz00ktUVlbi4+MjAkzj4+NUVVWRm5uLQqHA29ubqqoqnJ2d\n6enpETmCWbNm4ebmxqlTp/Dw8ECv1/P73/+e119/nZaWFkJCQrCysmJgYEBIVZ944gm2b9/OoUOH\nRFJNyrFLBmsLCwsx1pycnMzo6ChPPfUUJ06cwNHRkb6+Pi5cuMCKFSvYvn07X375JSUlJTQ3N1NW\nVsbChQtJSEhAr9dTUlKCgYEBFy5cYNWqVbz99tvs3btXgF0ee+wxPvzwQ1FVd3FxYWpqSjzFNBoN\n5ubmAuX2z4YqqbjV3t4uJkFtbGxoaWnh2Wef5cyZM8yaNQuFQsHChQtRq9XCZJyZmYmPjw/vv/8+\njzzyCNeuXRNOwiVLlqDT6ejq6qKxsZHMzExOnDghJCw//PADixcvxsfHh+rqajIyMhgfHxdcylu3\nbmFnZ4eRkRG2tra88MILdHZ28tvf/pbs7Gxqa2tpbm5mxYoVTE1N8dZbb/Hiiy9iamrK/PnzRbJw\nenqaEydOEB8fL+Q7Op0OJycnTp06RUZGBtnZ2Xz11VcCxLpnzx5cXV1pa2tj27ZtfPfdd5SVlQmw\nTVlZGQkJCSiVSgIDA3njjTdYs2YN7u7uotPV2tpKVlYWvb29YhJ3aGiIK1euMHv2bJ599lkGBwdp\nb2/n8OHDuLm5UVdXh6urK93d3WzcuJFLly4xMTHBG2+8QW1tLUZGRpiYmFBcXMzAwADDw8OMjo4K\nAFBbWxubN2/miy++4MqVKyQlJREfH09tbS3W1tYimi1JjVNSUlCr1Zw6dYrf/OY3NDQ04OHhQVdX\nF+bm5lRUVBAYGIiLiws5OTkPcrs/UPdBxv2nfb1er//4n/7TaWDDTz/fAJz6p48/LpPJTGUymR8Q\nBPw/m6Tj4+NiDtzT05PU1FQaGhoICgoiJiYGuVxOQ0MD/f39PP/883R0dIjqcHBwMENDQ+Iv7+Hh\nIboEExMTxMfH4+joKNJ1bm5unDx5kjfeeINDhw7h5uYmJgslfqGdnR1eXl58+OGHLF26lPr6el59\n9VXy8/MBCAsLo7S0lObmZkJCQrh69Sq+vr4MDAwIZfzGjRsxMjKit7cXmUyGo6Mj8+bN4xe/+AVH\njx4VsVapcAb3WZWGhoYEBQXR1dUlCqgSDzI5ORlPT0+WLl1KRkYGXV1dWFtbs3DhQqqqqnB1daWg\noAC1Ws3Q0BCZmZlMTEwQFhaGo6MjycnJXL9+nc7OTiwtLXnooYc4f/481dXVpKam0tnZKToZdnZ2\nfPPNNyJ6a2NjQ3t7O3FxcYJ3GBwcTE9PD0eOHKG7u5tt27ZRV1dHaWkpZWVl5ObmCqbF6Ogof/3r\nXykoKGDNmjWkpKQQEhLCjh07sLa2FgNRaWlpuLu7o1QqCQsLIz8/nzNnzrBw4UI8PT25ePEi0dHR\nvPTSS2zevJm33nqLhIQEdu7cibW1NWFhYXz88cdYWloSEhLCggULgPstyaioKOrr6wVGLj8/HzMz\nMzo7O1m8eDFDQ0N4eHjwhz/8ASsrK+zt7fnhhx+Ymppi3759Av2+dOlSgdDbtGkTvb29JCcn097e\nTlRUFM3NzczMzDAzM8PAwACLFi3i+++/R6fTcfDgQdRqNSEhIeh0OlJSUsjJycHIyEg8iOB+TuG3\nv/2tEM1MTk4SEBCAgYEB8+fPF0wNNzc3LC0t2bp1K7du3RLJ2ZqaGiorK6murmZkZETcEw9yPUhN\nIRVYD2TKZLLKn145wPvAQzKZrAlY8NOv0ev1tcARoA44D7z4/+o8SJcEoJQitVZWVoyPj2NsbExd\nXR1r1qzh1q1bHDhwgA8//JDXXntN8ACTkpIYGBhAp9PR398vet7SxFltbS2WlpbIZDLUajXOzs6c\nPXsWHx8fkY2XcvfGxsYMDAwwPj7Om2++yfXr14mIiMDMzEwUlyS/gaGhIU5OTsJKbGNjQ2xsrGAG\nrF27Fk9PTy5fvoy7uzunTp3C1NSUdevW8dFHHwmN2NTUFAAzMzPCvajRaIiJiRFZeIVCIdyBer1e\nTBe2tbXR3d0t9PSSO1NiTpw8eRJXV1cUCoUYaZasV9XV1SIoJQFPR0dHiYiIID09nb/85S+C5KzR\naHB3dxeR3r/85S+Ympoil8vZsGGDgJV4eHig1WqJiorizJkzmJmZiWh5ZmYm7u7uhIaGUlhYSEBA\nAB0dHURFRYkwkBRf7ujoIDg4mF//+tc8/vjj3L17l4GBAZYtW4a9vT0HDx7kww8/pKCggPr6elat\nWsX27dtFV8DGxgYbGxtRU5BGmyWugaenJwkJCZSVlaFSqViyZAlpaWnU19eLzMvixYtZsGABjo6O\npKSk4O3tLbIcEkA1MTGRH3/8kfHxcR577DGCgoJYvnw5SqWSnTt3Cl+mXq+nqqoKFxcXvv32Wy5d\nusTw8DCTk5O0tbUxODjI7NmzReuwu7ubb775huHhYSwtLens7KSsrIyWlhZ27tzJtWvXhNdDcoTI\nZDIxZi9Zp9evX4+NjY1IjD7I9S8XBb1eX6jX62V6vT5ar9fH/PT6Qa/XK/V6fZZerw/S6/UL9Hr9\n4D/9nj/p9foAvV4fotfrz/2rz2FpaSl60iYmJmJWYdeuXdy7d4/W1laOHz9OYmIiW7duFdqt7du3\nC7Gqg4MDSqVSFBnhPrDD1dVVhIyCgoKYmpoSklUJhLpixQoCAgJQKpWYm5tz8uRJjh8/zt69e+nu\n7sbFxYUPPviA7777DrgvD5HQ301NTWzatAljY2O8vLxwcHBgwYIFHDlyhNHRUVpbWzl37hz5+flC\nV9bR0cH8+fOFRVkqYHZ3d+Pv7y90eSMjI4yOjorFMSgoiJycHBISEvD29haW5La2NlQqlZjncHJy\nor6+ntraWlasWEFjYyPt7e20tLSwbt06ent78ff3Z3x8nKGhISFulSLfKpUKjUZDZWWlCBhJMxIz\nMzOMj49z4sQJtm3bRm9vL++88w7vvvsuLS0tHDhwgF/84hfMnz+ft99+G41GIxyO9fX1nD17lpMn\nT3Lx4kWam5tFclJCxl+9epWLFy+i1Wr5/vvvqa2t5ciRI9y9e1cMfel0OgYHB6mtrSUhIUEccQoK\nCvDw8OCpp54iNjaWW7duiSSfdFN5enoSEhJCaGgow8PDrFmzhqCgIDFq7eXlRXp6OlFRUdjb26PV\naklMTGR8fFwAfv70pz/R2toK3F98JM7m7t27OXr0KDqdjg0bNjBv3jxWrlwpmJaNjY2kpKSwYsUK\nNBoN3t7e3L17l/7+flQqFX19fbS0tAD329NyuRylUilCbBYWFhQVFWFiYkJERAQ2NjYCiNPa2kpe\nXh5ZWVns2rWL8vJy7O3tqa6uprm5WYS7HuT6WcScpdVSIjQHBQUJKay0TZK27k888YToWUsyjbGx\nMRwdHent7cXd3V0MwUhTgY8++ihVVXoVqoMAACAASURBVFVkZ2dz5coV0c708/MjMDCQ6upq7Ozs\nRCfC2tqa5cuXC/nIJ598wtNPP01KSgrt7e24urqKVl5OTg5///vfWbp0qTBCS+3Jc+fOibbg2rVr\nef/997lz5w5paWnIZDJRmZZQWRYWFsKMdfHiRaysrOjp6WH58uWCmPTPNGBzc3NeffVVQRKS2AWm\npqa4uLgwd+5cLC0tsbKywtbWFjs7O8zNzQXurLW1FUdHRx599FG8vb3p6uqiq6uL27dv4+DgQHp6\nOuPj42Iysq6ujvDwcPz8/ITX8ZlnnuGRRx7hxIkTfPrpp5SUlPDiiy+yZcsWxsbGiI2NRafTcf36\ndTZs2MCxY8cIDQ1l9uzZQqQjfc3wHw4ER0dHjh07xpUrV/D39xejw3v37mXv3r3Y2dkRFhZGaGio\nWHCbmpr4/vvvxffW1dVV7JgmJydZvnw5t2/fpqenh5UrV5KYmIhWq6WsrExkVCT/hE6nIy8vj9DQ\nUOrq6ggKCmLHjh2CwHT16lXgfocrLCwMrVaLmZkZ7u7udHZ2Mn/+fMbHx+ns7ESlUrFw4UJBBHN3\nd8fV1ZXR0VESExNFHevzzz8Xk5ByuRydToednR1FRUUi6h4TE8OTTz5JW1sbYWFhXL58WVCkly1b\nRllZGS+99BJ3796luLgYlUpFSEgIAwMDD3w//ixizqampoJd19/fj62tLdnZ2cjlchITE4X01dLS\nkhdffJFZs2bh5+fH888/z4YNGwSINT09nYCAAFxd73dHtVotK1euFE/cyspKMVAi5esVCgX79++n\nqKiIkydP8vbbb2NpaYlarRZIdIn+FBsbC9yP4kqSFOlMa2pqKqKuly5dIjAwkLCwMHx9fUlJSSEv\nL4+NGzfyq1/9ioaGBmZmZjAyMqKrq0vgwk1MTBgeHqaiooLly5fj4uJCeHg4eXl5ACgUCurr6/ng\ngw/45JNPcHV1xczMjPj4eAHgOH/+PAkJCRQXF6NWqwUnYd68efT393P8+HEGBwextLQkMDAQCwsL\nVCoVBw8eRKVS4ejoyHPPPUd8fDw3btxg9+7djI2NkZ6ejrW1tRjjzszM5PPPP+e5555Dq9Vy9+5d\ntm3bRmNjI4ODg3zxxRfk5+fzyCOPMDIyglwup729nXv37omZEgsLCz777DO6urpwc3MD/gOxd/ny\nZZKTkzE2NqazsxM/Pz8WLVrEwYMH8fPzw9HRkYKCAry8vJiYmBDpyJaWFgIDA3nmmWfo7OwUeLOZ\nmRkKCgqIjo7G2dmZH3/8kerqatrb29FqtSiVSurr6/noo48oLi7m8uXL+Pr6ioG1wsJCBgcHmZyc\nZHh4mLlz5wL3pTDS2L5US5HEuAcPHkQmk3H8+HGOHTvGjz/+yEMPPcSWLVtYuHAh1tbWNDc3c/Lk\nSb766iuUSqW4J4yNjQkODsbExIQlS5ZQWlqKWq3mhRdewM7ODplMhqmpqZg9qa6uRiaTYWhoyMsv\nv8yPP/7IyMiIABFLU50Pcv0sFoXR0VE6OjqQy+XExcWh1WoZGRnBw8ODtrY22tvbqaysFLBSpVKJ\nSqXC3NxcFBEbGxt55513xAw/3Jer2NracvjwYfz9/dmzZw8tLS0cOXKEsbExent7GR4eJi0tjYyM\nDAwNDQkLC+PQoUNi6y/l4ysqKkQRqLKykp07d5KYmMijjz7K6Ogog4ODrFmzBqVSydWrV+np6aGt\nrY2+vj40Gg3t7e0C7x0dHU1tbS1NTU2UlpaKuHBsbCyGhobo9XoqKirQ6XRYW1szOjoqFjOlUola\nrRbbQUNDQ8zMzATOOzo6mitXrrB27VoUCoUwZ9nb2ws6lPT3lMvlZGRkCKZDfX09X375JZ2dnbS0\ntBAcHExcXBwODg7U1dVhbm6OWq0mPT2d1NRUMjMz0el0yGQyNmzYIGZJVq9ejY2NDXFxcfzjH/+g\nsLAQmUxGWVkZhw8fZmpqiitXrogttLe3t3iSpaSk0N/fL7yb6enpQqGnVqtRq9UsWLCAwcFBMSTX\n3NzM4OAg5eXlFBcX85vf/EYU2qSztKWlpeho9fT0EBISwtjYmNDxdXZ2iuOKtbW1OE7U19dTWFiI\nra0tSUlJ+P5keJZqF97e3uh0Onp7ewkODhZch56eHoKCgrh27RrT09NUVFQwPT3N3r17WbVqFQ0N\nDYSFheHv709VVRUVFRUYGRlx+/Zt4P6sikql4u7du3R1dTE8PExGRoYAtPr4+IijpjRod/PmTQAW\nL14sJnidnJzIz8/noYceeuD78WexKFhbW5Oens7FixcpLy/n3LlzlJaWolQq6ejo4OLFi4yMjNDY\n2EhAQAA+Pj7A/W3Y2rVrsbe3Z/PmzcjlcoaGhigsLAQQJOM5c+ZgZGREWloaW7ZsISoqCgcHB2Fp\nGhgYwN/fn1deeQV/f38iIyPFjfnDDz8QExMjRmThPlTzd7/7Hbt27RJQkdOnT6NUKrl16xZOTk4i\nMGVnZ0dAQACrVq3Cy8tLoOIleEd2dragRJ89e5bh4WGio6O5dOkS7u7uwknh7u6Oj48POp0OHx8f\nwsLCRFX6xIkTODk5oVarKS4uFpFcR0dHAfg8e/Ys7e3tTE5OYmFhQWFhIW1tbcLAJYW7nnrqKRQK\nBaampnR3d+Pl5cXMzAz29vaCdnz79m3q6+vp6+sjPT1ddDGysrJ4+umnycjIIDAwULQBvby8iIqK\noqGhAblczqFDh2hpaeHEiROMjY0J1wHc390FBQWh0+nw8/Pj3XffJSQkhG+//ZaoqChmZmYoKioS\n0tXp6WnMzc3p7++nsLCQ6Oho1qxZg729Pdu2bROcBunINzIywuzZs2loaECtVvPtt99y9+5dCgoK\n/pOrs6urCyMjI9avXy/Gq0NDQ1EqldTW1grhUGNjI1lZWQKJJ72nYmNjmZiYoLa2FjMzMwA2b96M\nRqPBxcWFhoYGZDIZHh4eosMwMTEhQkZTU1N0dnYyOTlJRUWFSFfm5+eTn58vkp6mpqZs2LCBxx57\nDLlcjoWFBdnZ2YSEhHDlyhWys7NZtGiRMGU9yPWzqCnMzMyIf/RTp06JbWBBQQHt7e1YWFjg5OQk\nvkGSSCQzM1MUfDZu3MiyZcuwtrYWK6anpydXr17lueeeo7W1VQg6pThxZ2cn//Zv/4apqSnLli2j\npqaGsrIyYmNjUSgUGBkZkZqaKnrvEqJt2bJloiNSXV3NDz/8gFwuJy0tDRMTE1xdXYW3oL6+HjMz\nMyoqKoTWTUK7/fnPfyYjI4OwsDDg/lZ01qxZnDlzhjfffJOamhpiY2NxcXFhaGgIjUYj5CMSzr6l\npQWNRsPw8DD+/v54enqKtKGPjw+Wlpb09/fj4OBAW1sb5ubmQmHW0dHB4OAgN2/eJCwsTCjUiouL\nhVzG1NQUJycnDAwMcHZ2FtOVxcXFJCQk8Nlnn9Hd3c3ixYsFK8LIyIi4uDjCwsIwMjLCycmJgYEB\nkpKSSEtL49y5c+h0OrKzs9Fqtbz99tusXr0auA83CQkJERSjv/3tb9TW1vLVV19RWFhISkoK27dv\nZ86cOXh4eHDs2DFCQkJYunQpc+bMYfv27TQ2NtLc3Iynpye7du1i9+7dFBcX4+fnR0dHhzijS0Ke\nxsZGUlNTSUpKErBbhUKBRqNBLpcTHR0tEH4rV67kyy+/FDWQyspKbG1tSU9PR6vVioi5NDodGBhI\neXk5lZWVjI+Ps379eoG4s7CwwMzMjNjYWBFnltgaGo2Gc+fO8dBDD9HV1SUGryQWyOTkJIODg2i1\nWiorK3F3d+ePf/wjkZGReHh4YGpqyty5c3F0dBTt3Qe9fhY7hampKSYmJqiqqsLNzQ1fX1+RTX/2\n2WeZnJzE2dkZLy8v7ty5Q0dHB42NjaLi7+npyeOPP05NTQ0TExMi5z00NERqaiqtra2CaSBp2H/8\n8UeOHj2KXq9nzpw5pKenMz09zYsvvohGoxGz9f7+/ixfvhwnJyccHByA++OyHR0dfPzxx5w+fZrj\nx49z/fp1Qd01NzensLCQsrIyHn30UUxMTLh9+zbl5eX09fXh6upKWVkZTz31lLA4A8TFxVFVVcXY\n2Bj19fXMnj0bvV4vWleDg4NiCyy5Jnt7e7G1tRX5iN7eXoHF9/b2xtraGj8/P9rb2/H29mZmZobw\n8HDBMzAxMRH1kZ6eHpqbm/n000/5/vvvOXr0qNgdlJSUkJCQIJR1v/zlLzl+/DheXl588cUXnD17\nlldeeUWAQGxtbYmJiWFgYICRkRF27tzJq6++ikKhwMfHh2vXrok8xcGDBwV+TYLfWlpaCpp1WFiY\nGHevr6/Hzs4OuVzOnj17CA4OZu7cuSxZskTMChgZGZGQkMCcOXNEgdrKygpjY2MUCgXOzs7cunUL\nY2NjWltbMTU15fz589TW1gpQTmBgIL29vZiYmHDixAn+8pe/0NXVxfnz50lOThYc0NDQUAwMDIRg\n19vbm/379/PWW2+hUCgoLy+npqYGrVZLbm6uUNwPDw8zZ84ctFota9euZXJyUqgC4P6R+vXXXycq\nKgp3d3fmz59PZGQkX331FTt27GDv3r3k5eXR19eHl5cXWq0WQ0NDIiIiMDU1xdHRkaVLl6LRaCgo\nKBAt3we5fhaLgq2tLVFRUeLs5OrqypUrV4TAY3BwkIGBAe7du8ft27fRaDT4+/vj6upKb28v165d\no7Ozk7S0NDo7O4UAND8/HxcXF44ePcrMzAxlZWVotVpGR0fR6XQsWLCA1atXiyGY/v5+NBoNkZGR\n5ObmihDKN998g4GBgZB/dnZ2CpLQc889x69//Wtmz57Na6+9hrOzM5OTk+h0OjZu3IharWZ8fFyA\nOlxdXcV4cGpqKvHx8WKxmZiYICoqCi8vL7GFTktLw8DAgPb2dqanpzlw4ABdXV24urri5uaGsbGx\nCH1JHRsPDw/+/ve/c/HiRXJzc6murhYZigULFqBWq5mYmKCpqQlXV1fhfDQ3N8fCwgKtVsuhQ4cw\nNTXF29tbTElKhV9zc3Ox/TY3N+f06dMsWrSIzz77TNirJA6BVDwNCQnh4Ycf5vz58zQ1NTE6Oipq\nQdPT0zzxxBMAwutZUlLCk08+yb179ygvLxe1k7S0NF555RWxmE1NTeHv709bW5uonQQFBbF69Wra\n29upqakBEKh2iZIsFbP/9Kc/kZKSQlZWFgYGBsINGh0dzfT0NJ9//jn79+/HzMyMvr4+0RGQuAfS\njmZmZkZ0JCYmJggMDCQuLo7Ozk4OHTqEpaUlfX19nD59mpdeeonh4WG6urqEwm/t2rUUFRUJwGpY\nWBgqlYpLly7h5OQEQHt7OxMTE1RUVIixaInRKFnOXV1deeaZZ1CpVNjZ2WFvb8+8efPE9+FBrp8F\nuHXbtm3veHh4YGBgwKxZs5gzZw7m5uYMDAxQVVVFQEAAcrlcpAIzMzOJiIhgamqKkpIS3N3d8fX1\npaioCK1WS1dXF9evX2fr1q3izfvYY49RWFhIV1eXCOvcuXMHlUpFSkoKKpUKPz8/wsLCMDU1FTuR\nmZkZRkZGcHR0FFtPPz8/Hn74YUpLS4UMddWqVeh0OvHEMzAwwMDAQJihLC0t+dWvfiXMx5Klqrq6\nmri4OL799lu8vb0FBi0nJ0f08Jubm4mIiECr1XL27FnBJJTL5bi5uTFv3jwaGxuZmJhAq9XS3t6O\nk5MTIyMjODg44OjoiKWlJebm5rS3t+Pu7o6bmxsdHR309/eL7o1Op6OyspK0tDQiIiKEGDUyMhJA\nFMLa2tpobm4WluzAwEBqamrEIjExMSGKqgMDAwwMDLBw4UJyc3N54oknOHToEOvWrSMyMpILFy7g\n4OBATU0N165d44UXXhAi3urqamJiYrC3txfiEy8vL65fv86ZM2e4cOECGRkZ/4kiJRmcOzs7KS8v\nR6VSUVNTQ3JyMj09PdTX14tjg729vUi6KpVKxsbG6O/vx9raWkzs/uIXvyAuLg5fX1+8vb1JS0tj\n27ZtODo68v3334uotJmZGWq1mhs3bhAeHk5YWJjockgC4KefflocvyREvTQEeODAASorK7GysuLS\npUuivWhlZcXcuXMxMTHB2NiY4eFhMQRmamqKr68vgYGBjI+PM2/ePGFHO3PmDNevXxcBqIULF/Lt\nt9/+zwG36nQ6EhMThe5bemJK04JJSUnY29uL7H9dXR1lZWWMj4+TnJwsnAAymUxYfOB+BVen05Ga\nmsrf/vY3hoeHqaurY/HixeTk5GBmZkZpaSk1NTU0Nzdz9+5dLl++zJkzZzhz5gylpaUYGhri6urK\n8PCwaBmlpqZy69YtXnrpJd58801CQkLo7+8nMjJSJOx0Oh1HjhwRmDl3d3cKCgpYu3YtOTk5WFtb\ni7OsFHOW7NphYWFoNBouX75MdXU1hYWFXLt2jaamJsrKysQkob29PXZ2dpSWlv4f9t47Ks7z3Nu9\nhhlg6H3oMPQiQIgihBCodySr2JZkybYk920nx3FJnDhO4iQ7246dbkuWq+zYkizJklVAAoRRA1FE\n71X0MgwwdAaYme8P8j5f9llnZWuvtc86Xmd97z8yIwOjgXne57nv331dImMg0aNXrVol4tIqlYqO\njg4mJyeprq7GyspKGLcmJiawsLCgv7+f/Px8Tp8+zeLFi/nxj3/M6tWr8ff3R6/X4+TkhFarxcfH\nB5PJxNzcHJs2bWLDhg1UV1czOzvL1NQUWq2W2NhYampqGBkZYW5uDicnJy5evMjc3BwTExOo1WpB\nbXJzc2PPnj1cunQJWNg1lpSU8MMf/hC1Wk1mZqY4Nzs6OtLe3s7nn3/O8PCwENVeu3ZNYP8zMjKo\nq6ujurqazZs3CwSbdFyKj4/Hz88Pg8GAwWAgNzeXwcFBfH190Wq1xMXF0dnZKVqWzc3N+Pr6imj4\n5cuXWbdunYhlSyxMg8EgIs7SDe3SpUtYWlqybNky0tPTCQsL49q1a3zzzTfs3r2b0tJSPvzwQ0wm\nEwUFBSQnJwueggR6kYJkUnFS4pnOzMygUqkoLCyksrJSpFGlboq7uzsBAQE8/vjjxMbGolar7/v9\n+L1YFGQymQCFbNq0ib6+PlQqFX5+fqSnp1NYWMiNGzcYGxtDLpczOjoqKvE2NjZMT0+Tk5MjhlOk\n2PDQ0JDAsC9evJjCwkIBYp2YmGBkZITg4GDkcrkAiJqbm4v0XH9/PwUFBYIpIOXdJatUcXExTz/9\nNJWVlQQGBpKdnc0777zD119/LXL4UnLy3LlzLFu2TEBnNRoNTU1N1NbWisqwWq1GoVAwPj4u0mga\njQalUolOp8Pf359Vq1ahVqspKCjg7t27GAwGnJycsLCwIDo6mtu3b9PR0UFbW5sotn388ccEBgbS\n39/Pli1bmJqa4pNPPmFyclK0+vz9/YmMjBTDYxqNhs7OTry9vbG3tyc3NxeZTIbJZBK0qytXrggJ\nrbW1NU5OTjz88MOioq9Wq1m5ciWenp50dnZSVVVFdXU1hw4dAkCr1dLR0YFCoRBHvtraWqHv+2fQ\nSFZWFq2trUxPT/Pqq6/y0ksv8cwzzzA2NkZQUBDJyckkJycTHR3N5s2bUSgUgvcofS/pLjswMEBA\nQACtra088MADAvhrMBg4d+4cNTU19PT0YG9vT0ZGBnfv3mXDhg3C9m0wGEQWZseOHYLhYGZmxsjI\nCGVlZcLCZW1tLVDsErJ/9+7dIoG7ePFivvnmG/z9/VGr1eLr9vX14e3tjb+/PyaTCblcLiDGzs7O\nhISEoNfrxc1AoklLCoP4+HhCQkJwdnbGwsKC06dP3/f78XvRfZCCP88++yy3bt2irKxMdAeeffZZ\nli5dyqpVq6irq8Pa2hq1Wi1U8lIr7u2338bJyQlra2uBOjM3NxfFt87OTl5//XVGR0fJzs4WC4v0\nYq9YsYKGhgZ8fHyQy+Ui9CL1sGtqati2bRuXL18WLD6dTsfOnTsFo7GpqYn33nuPwMBAhoaG+POf\n/yzSbGlpafj4+DA8PMyRI0cwGAykp6fj6+uLSqXi66+/JiAggPr6es6cOYNOp2NmZobf//73vPnm\nm5hMJpqamnj00UcFn0DalSxdupTY2Fi0Wq24gw0MDJCSkoKZmRlpaWm4uLjQ3d3NwMAAk5OTGI1G\n9Ho99+7dIywsjNraWmxtbdm/fz+5ubnY2toSGhqKu7s7Fy9eZHJykp6eHhwcHJienhYLuJTVb29v\n57nnnuOvf/0rS5cuFZbld955h5CQEPbt28etW7dobm5m9+7dPPbYY7z//vscPXqUvr4+Dh06xIkT\nJ9i9ezfnzp2jqakJOzs77OzsRDdnZGREoPWdnJzo6uriueeeIyMjQyxwAwMDhIeH8+2335KUlCQI\n3GFhYRgMBurq6gTXwtfXl4iICHx8fLh16xZGo5H8/HyOHj2KTqcTx7MbN24wNTVFfn4+lpaW9Pf3\nC3jLpUuXcHZ2Ftv3ffv2ibShubk5np6eJCQkEBISIjIozc3NlJSUMDY2RlVVFVqtFisrK/r6+nj+\n+eeB/11wHRsbQ6vVMjQ0xNKlS8W0qEqlQi6X4+npyezsLO3t7YyNjeHr64uTkxMffvghhw4doqWl\nBWdnZwlRcF/X92KnMDExgaWlJS0tLcJlqFQq2bt3rxB2VFZW0trayuDgIKGhoczNzXHp0iVycnK4\nePEiTU1NeHt7I5PJMBqNwMJWVDpvGgwGJiYmMBgMhIWF0dnZSVBQELOzs5w6dUqwCJOTk1mzZg1m\nZmZoNBqcnJwYGhoiKSlJiFYCAgJYvXo1Dz30kABZFBYWsnfvXtauXcvVq1fJzMwUz/PIkSMMDQ0x\nNDRESEgI9+7dE4XP0dFREbD57LPPaGlpYf369UIIW15ejk6nE/MW7e3t2NnZYWNjI/5NEk4sMDBQ\nTFqGh4dTUVFBa2sr5ubmaLVaHB0dsba2xtHREVtbWxobG1m6dCkymUz4ECV6toeHh0Cmq9VqZmZm\nmJiYYGxsjCVLluDt7c3IyAiOjo5UVVWxc+dOBgYG8PX1Zc+ePaxevVoElXp6eujs7OT06dNcvXoV\nHx8fYmJiWLRoER9++CGffvqpeB0lG1dPTw85OTmUlZXR2dkpiq/SAJJMJmPp0qVcv35dODSOHz+O\nmZkZzzzzDN7e3ty7d0/kFCYnJ2lubmZoaEhQr/V6PdPT0wLeExgYyKpVq5ieniY+Pp6YmBhGR0dx\ndHTk6tWrrFixgpCQEBISEsQ2Py0tjR07dhAZGcnnn39OREQEa9asETtcJycnuru7OXPmDLm5uRw7\ndkzAWDUaDUVFReK4aGFhwcmTJ4GFsJVGo2HlypXMzs7i5uaGi4uLwNBJcyQ5OTnMz88LV0hISAgz\nMzP4+fkxPDxMd3c3FRUVYlr0fq7vxaJgY2NDVVUVcrkcLy8vUlJS8PLyQi6X4+TkRElJiaAtS4z7\n/Px84V4wmUxERESQmJjI4OCgSAhKAaXo6GiheIuLi0On07F582YCAgJwc3NDoVDQ1NQkMOLx8fGE\nhoaSnJzMxYsXcXNzIzAwkM8//xxY4AiaTCaxncvKyhKo8IaGBhYvXkxkZCSDg4NCcqpQKHB3d6es\nrExIRR0cHJiYmBCr+IYNG/D29iYwMBBLS0vRL5cy8jExMahUKjGf4OXlRUhIiDAq6/V6bt++jZ2d\nnfj6koXJwcEBCwsLUR8IDg4WZ2uVSsW9e/fo7+8nKCiImJgYAgIC8PDwoLa2FgsLC2JiYliyZAl2\ndnYUFxcLzf27775LVFQUQUFBFBQUiCDPb37zG8LDw0lPT+f5558nPz9fqOhKSkq4fv26SP4lJyeL\nAJOFhQX5+fl4eXkJUcrs7Cw6nQ4nJycxQiyxJGJjY7G2tmbr1q3ExMSILlRcXBze3t4CmV5eXo6X\nlxfr169Ho9Gg1Wqxs7PD19eX6elpoqKiOHjwILt27RI8BOmGYjQaRe1DmrSNj48HFrolFy5coLi4\nmNjYWBobGwUGzszMjIKCAlxcXGhoaBARbrlcLtyir776KmFhYTz88MP87Gc/E6lZSUgsjVpLg3Mh\nISG4uLigVCpF29fc3Jzt27djbm6OyWTirbfe4tq1a1y8eFG0yKV5oPu5vhfHh7GxMcEzvHLlCi+/\n/DLXr19naGiIDRs24O7uLqqyAQEBmJubCyGnVEh68cUXuXjxIsuXLyc0NBRYQGU9+OCDaDQaPD09\nKSoq4rPPPiMpKQm5XM5XX30FwNq1azl48CBLly4VYFJJLrJ//37Rq3dxcQEWoJqtra2oVCrKy8uZ\nmZnB1dWVH/3oRxw6dIjw8HCUSiVmZmbEx8czMjLCqVOnOH36NPX19cIVGRQUJCbbABFWKisrw9XV\nlfDwcFxcXPjNb37DyMiIWDils6oEgBkfH2doaIivvvqK5ORkdDodRqOR+vp6QkNDGR4eRqVS4e3t\njUKhwM3NTbwRh4aGRFbA3d2dV155hczMTEwmEz4+PuzatUu4JSsrK2lpaRGLkwRiuXfvHt988w0D\nAwMolUoSEhJQKBSo1WpCQ0O5du0a/v7+HDp0iO3btxMeHk52djb29vbCBCYRrcPDw4XXQzI9FRcX\nY2trK7oldXV1zM/PYzQa2bhxIxUVFfztb39j+/btXLp0iSeeeILq6mp8fX3Zv38/RUVFrF69WrS2\nHR0dMRgMfPvtt/T29uLv74+3t7fY0Vy9elWwMbZv345KpUKtVnP8+HG++OILVq9eLaYZpaK4n58f\nr7zyCg888AC9vb3U1tZSU1NDXFwc4+Pj/PznP0cul/PRRx/R19fH7OwsmzZtQqFQiFzLrVu32LBh\nA7CguK+rq8PLy0toA+RyOX19fdjY2KDVahkYGOAPf/gDpaWldHZ2cvz4cQYHB0WEOioqShi3Xnrp\npft+P34vFgUbGxt6enrYtWsXMpmMuro61q1bJ7bcK1asYHh4mHXr1uHg4EBkZCSWlpZUVlYyOTnJ\no48+Kv7hU1NTvP/++wCsWrVK64zh+gAAIABJREFUJMQAHnvsMaanp5mfn2dwcBC9Xk9HRwebN28W\nzMNjx44RFhYmZgGio6NJSkqivb1dxJwbGhpEBLWtrQ0nJycxVQgICezZs2fp7OyktLRU9PulLoqU\nobCxsaGvbwF1KWUYpPOlubk5nZ2dtLe309zczJIlS0SsNzc3l66uLuLi4pDL5XR3dyOTyYRd2s7O\njsjISJYsWcKFCxcYHBz8T0atkZERtFotarUag8EgFtLGxkbGxsZIT0+ntbWVs2fPEhISgr+/P/b2\n9qhUKnQ6HRcuXOCPf/wjhYWFDA4OsnfvXhobG3n44YdpbW3ljTfeEKCYyclJ3N3d8fLyYnx8nLt3\n7/KnP/2Jv/71r+Tl5dHU1CS+v0Q8/tWvfsX69evRarWEhYVx/PhxoqKihKH88ccfp7Gxkc8//5yz\nZ88KsfCRI0fo6upiw4YN3LhxQ+zCfHx8xE6ttbVVjFg3NDSQkZHBli1buHLlCsXFxbi4uFBeXo6T\nkxNVVVVcv36dyMhIoRDw9vYWsF1zc3NqamrIzMwUBKkTJ06wfv169u7di6OjI7du3WJ4eJiKigrq\n6upIS0uju7sba2trTpw4wY4dO+ju7ubAgQPi6CtZyz744AMRhc/KyiI4OJjs7Gy2bNkiWsVSTkOp\nVOLr64u3tzebN2+murpa1NXq6+vv+/34vVgUpDx/Xl4eGo2G8PBwrly5wsjICFu2bKGlpYXOzk6h\n87558yYxMTFMTU2xePFiamtrWbJkCR9//DHu7u6Eh4cDC5Nj4eHhFBYWYmtri5ubmzivStHXNWvW\niHbXL3/5S9atW4fRaCQjI4OIiAja29tFYEoKGel0OlGneO655xgcHKSlpYX09HRhdE5KSmJiYoKO\njg4h7IiIiKClpUXEaaWRV+kuKZfLycjIYPfu3VRWVuLo6Mi1a9eIiopCo9GQk5PDihUrkMvlQqxb\nW1uLubk5KSkpWFtbMzY2hl6vF5OJN2/eFKPZ/v7+zM/P09vby9TUFG5ubuIuJ2Us3N3dGR0dpaWl\nBUdHR+Fz0Gq1YrjL2tqaNWvW8NFHHzE2NsZTTz3F119/zezsLNnZ2VRVVQnPxBtvvCGYDVZWVmRl\nZbF8+XJOnz5NeXm5IChLAZ2hoSHCwsJ4/vnnxZY9Pj5eGMR37tzJgw8+iKOjI/Hx8Vy+fJmgoCCW\nLl2Kv78/f//738VU7NatW4UbUqPRiLRsU1MTYWFhvPPOOwJ739raiouLC9bW1sTHx4sqf3p6Ohcv\nXhS2bslBIbkZpeSlra2tMEofPnwYo9HI3bt3mZiYEKP9SUlJQjAzMzNDV1cXN27cwNXVFZPJxHff\nfYeFhQWwcEStqqoS6c2+vj7Gx8epra3F3t4eCwsLpqamqK+v59y5c6SlpYl6laOjIy+++CIrVqzA\n29sbLy8vMcB1P9f3oqYAC3fflpYWEXGViM05OTmcPn2awsJCtFotTk5OAnBSVVVFQUEBDQ0NfPfd\nd5iZmQkBKkBoaCj+/v7s3r1b3Ml++ctf0tvby8GDB/n5z3/Oa6+9xuzsrEDA5+fnU15eLsadpZCT\nREaGBV6jq6srr776KufPn8fX15fQ0FAyMjJ466236OrqIjs7G3d3dywtLcXz8PLy4tChQ6KwWldX\nR0JCgqhkW1pacujQISorK0lMTKSmpobNmzcLaerBgwcZGBjAzMyMyclJ2trasLKyYufOnWLRbG1t\npaWlBTc3N0pLS1EoFPj7+5OQkIBMJsPX1xdbW1vxy3zv3j1RxGxpaeHTTz9leHiY4OBg2traBOhD\nq9Wi1WoJDAwURGk/Pz8xwZmVlcWaNWtEStHCwgJbW1vRNZBSnBs2bMDT05PMzEyOHz8u/BdSF8Pb\n25vGxkY++OADvvzyS5544gkh5N28eTOenp7cvHkTjUZDe3s7MpkMPz8/UlJSeOihh7Czs2P16tU4\nODiQl5fH8uXLAcjMzCQ/P1/IXhobG4mOjiYhIYE9e/ag1WpZsWIF+/bto6Wlhb1797Jnzx78/f3F\nZKKzszPd3d3Y29uLaLqEz0tNTeX27dsCgBIaGkpbWxuHDh3iwIEDwMJch1qtJiUlBTs7O6ampvjV\nr36Fvb29mFWRRsglOrlkkC4sLKSzsxOApKQkuru7OX36NK2trTzyyCPU19cLQa3BYCAzMxM7Ozsx\nMSndeO7nkv13WhX/b10ymez/+yfxf67/c/3//yo1mUwJ/9X/9L04PkjVV09PTxoaGlAqldTW1hIY\nGMjU1BR2dnaUlJSQmpoqvATDw8Po9XoBMr127ZqYT/Dz82Pbtm288cYbWFhYCCKRyWRCJpMREBDA\ntWvXhHXJ2dkZrVZLaWkp6enpFBQU4ODgwM6dO6moqMBoNPLtt98SEBDAz372M8EutLS0xGAwMDc3\nh729PZaWlhiNRnp7ezEYDNTX13Pw4EEuX76Mp6cn/f39bNq0id7eXjGPsG7dOu7du8f+/fsZGhri\nzTffJCgoiMnJSYaHh1Gr1Tg6OorR3Vu3bgmSs7W1NRqNhqGhIVxcXIiPjycvL4/t27czMDCAl5cX\nExMT9Pf34+PjI4qfknouNDRUTFE6ODhQVlYmeJeNjY0olUoCAgKorq5m2bJlFBQUCNnv8ePHBXTF\nz89PiGi1Wq0o2h0+fJjMzEympqaIj4/n7t27hIaGEhUVRXl5OWVlZVhaWrJkyRKam5v5y1/+wmuv\nvYalpaVIfAYEBODp6SmGfVpbWwkPD+fEiRNCdFJVVUVwcDClpaV4eHjg5eXFzMwMCQkJXLlyhdde\ne4333nuPu3fvChFMRkaGOE7Gx8fj4+PDRx99RGJiIh0dHUxMTJCQkMDY2JiwdHV1dREeHk5CQgJ/\n//vfOXHiBGfOnBHTq9IZ//z586xevZrm5mZ8fHyEASopKYmysjJsbGzEfI+trS0RERG4urryxhtv\n8MQTT/Dqq6/y9ttvYzAYBNm8pqYGc3NzoqOj8fHxob29HUtLS7RaLUajUdTOpqeniYmJYWhoiC+/\n/JIdO3YwMjIiOj73c30vjg9jY2MoFApqa2vx9vZmcHCQtLQ0tmzZwvbt24mMjGT//v0UFxdz584d\nDAYDPj4+6HQ6li9fztjYGFFRUUxNTWEwGASCa+PGjTQ1NdHQ0MDWrVuxtLQUhaXIyEgGBgZITk7G\n3t6eqqoqkScPDg4mISGBhoYG+vv7aWpqYvny5WJcVorI6nQ65ufnuXPnDkVFRVhbW3Pv3j2SkpJQ\nq9Vs3LiR8vJylEolSqUSHx8fWlpakMlkODo6Mjg4yP79+8UPq7CwkNDQUNGTfvTRR5mdncXHxwdb\nW1syMjJobm4WklxJjjI3N8f4+DhZWVmsXr2a3NxcYZyenp4WHAKVSsXs7KzQrsFCYdHe3p729nZi\nYmKorq5mcnKS8PBwcdSYmZmhra0NNzc3+vv70Wg0zM3NMTc3R2RkJAaDgZUrV5KWlkZkZCRWVlZM\nT0+Tm5srjnLV1dV4e3tTW1vL9evXUSgUREdHC+yYJGwNCAhg+/btrFixAjc3N+bn53F1daWtrY3L\nly+j0+moqakhKiqKRx55BLlcjpWVlRhTlnIXZmZmXL9+XSj5ZmZm8Pf3x9ramvPnz7N3715+9atf\nYTKZyMnJISsri507d+Ls7IxSqSQpKYmIiAi0Wi3r1q0TtK3JyUm+++47du7cCUB2djZzc3PU1dXx\n3Xffce7cOSwsLNBoNKKbIYXYamtrxVHD2tqagIAAQkNDRUdox44dooApQVIWLVpEaWkpWq1WBLF+\n85vfUF5eTn19PVqtlubmZgYGBlCpVLS1tdHV1YW5uTnh4eHs2rWLVatW8eSTT973+/F7sVOQy+VE\nRUXR1tZGVFQUFRUVgiTzxhtv8OMf/xi1Ws3q1auJiYkRfESJljQ/P8+NGzdELkEKlkxNTfHaa69x\n9+5dvvrqKyIiIoiOjsbBwQFvb2+6urqEr2DRokWUlJTw/vvvs2nTJjo7O7l8+TIvvPAC8/Pz1NbW\nCm1cT08PTk5OmJmZoVKpeO655wQdKjY2loCAALGAODs7MzU1hVwuJzY2FqPRKEa7TSYTBw8eFG+c\nqqoqdDodKpUKo9FIUVER7e3tYizWy8uL1atXU1FRQW5uLvv27aO8vBxXV1eMRqOA0trY2DA3N0dx\ncTEbNmwQZuSenh56e3tFK7OpqYmkpCQRlc7MzCQ1NZXGxkY0Gg2RkZGC5izZpmdmZgT+7MEHH6S9\nvZ2UlBR6enpEgVPiDiqVSnJyckTIqbm5mZdffpm3335bJEgnJyf/kzLezMyM27dvY2VlJeA4c3Nz\nwkbd3d0tioPSFKAkyTUzMyMyMlJ4Qc6fPy+waadOnWLr1q24u7sjl8v5yU9+wo4dO3juueeABcDN\nL37xC1xdXdm6dauocVhYWHD06FGio6MpLS3F29ubpKQkmpubgYXzfU9PD4sWLcLPz4+jR49iaWmJ\nQqGgv78fa2trMjMz2bt3L52dnYL96OnpSVhYGFZWVuTl5VFZWYmHhwfR0dF88skn2NraolAoaG1t\nxcvLi4iICD799FM0Gg2PPPII1tbWuLi48PTTT9PX10d4eDhPPfUUvb29NDU1CTtZfn4+wH+L0fi9\nWBSkbXxeXh5Lly7ll7/8JQMDA5w/f57f//73tLW1ce7cOaytrTGZTDz99NP09PSI0VR3d3c+//xz\nTCaTSN3BwmpbUVFBe3s7ZmZmjI2NsXnzZs6cOSMWn6ioKO7cuYNer+fVV19l586d3L59m5mZGQAx\n/9/d3S2IT76+vnh5eREZGclHH30ktnfSC19TUyOQ9FJASqlU0tbWJpj/BQUF2NjYkJ+fLyrZHh4e\nooA4NTVFX1+fQMvX1tbi4eFBRUWF8BRKyHMpfTk8PExWVhYJCQmUl5fj6+vL0NAQs7Oz4nVZv349\nIyMjyOVyoqOjaWtrY3h4WDgwvv32W1Htd3JyIisri4ceeoi5uTlycnKIiYlhbGyMnTt3YjKZUKvV\nlJeX4+npidFo5IEHHqCkpAR/f3+KiopQKpW4urri4uIiCsdarZabN2/S0tLCvn370Gq1LFq0CFho\nSTY0NODn50dYWBhnz55l7969JCcn09jYSFtbm3Ap6nQ6ZDIZQUFBjI6OsnTpUkGL+uMf/ygmXgFh\ngR4cHORnP/sZ1dXVpKamotfrmZubE3j38PBwRkdH8fHxwcfHh/LycmHhXr9+vShUlpSUAAvdkh/9\n6EecOnWKvr4+tmzZQnR0NMeOHRNt4BdeeEHE2WdnZzEzM+Py5cvU19eze/duYfA2NzcXw3HV1dVo\ntVpsbW3F7590lLx48SIKhYLExESOHTsmTNP//u//zo0bN0hKSmLt2rXodDrs7e3Jz8//b0FWvheL\nwvDwMH/961/ZunUrVlZW3Lx5E7VazdatWwW2XRoG6uzs5Ac/+IHg+qnVar777jvS0tKQyWSEhoYK\nkaekVj98+DAnT55k165dfPLJJwwPD/PQQw9hY2PD8PAwcrmcxsZGBgYGOHXqFL6+vhw6dAhfX1/K\ny8tpamoSE5CwECzx8vKitbWVbdu2UVNTg62trYijNjY2cvz4cbZt24ZOpxNxU6PRSFJSEgqFgsHB\nQZKTk4mKihIz9BqNhuTkZDQaDbdv38bR0RG5XE52djZmZmbi7F5TU0NYWJjYESQmJlJQUMDNmzdx\ndHTEzMyMuLg4hoaGaGpqYv369QwMDKDVapmcnCQoKIj8/HwqKirw9fWlvr5eIODr6+v58Y9/zIoV\nK1CpVBw6dIihoSEGBgYICwvD3d2d4eFh4uPjuX37NlVVVSQlJbF48WIuXbpEUFAQKSkpyOVyUWvR\n6/Xs27ePkZERFAoFMzMzlJSUYGNjQ0JCAoWFhQIsKtGIJENWWFgY+fn5IsknCXkPHDiAm5sbn376\nqaBOzc7OEhYWRklJCQcOHBC5EFiYmE1OTkYmk1FfX49CoUCr1bJ48WJGR0f5/e9/T0xMDPHx8VRW\nVuLg4MDg4CDDw8OYm5uTlpZGX18fDz74IFqtVuxsLCwsKCkpETzNyspKysvLsbOzQ6fTkZKSgrOz\nMxMTE7S0tBAfH8+6desoKipiYmKCsrIyNm7ciMFgwMbGRqRxnZyc0Ov1mEwmrl69yuLFi1mzZg2+\nvr4cPnyYb775hrGxMYqKipiensbW1hZfX1/27dsn4LGpqal4enqiVCpFsvN+ru/FomBlZSXmCr7+\n+mu2bNnCV199RVRUlOAHhoSE4ObmRm5uLhUVFZSUlNDR0YGNjQ0qlQorKytsbW05ffq0AHZId7XM\nzEyRlNu4cSMuLi688847JCQk4OLiQlpaGvHx8SxfvlzIQcvKynjjjTdYvny5ULNJwRLJ5BMQECAy\n697e3mLU28/Pj5mZGc6ePStMQ1Jhr7KyknPnztHX1ycsxRJoNjw8HI1GI/D11dXVvP7665SWlnL3\n7l127tzJyMgIRqMRV1dXHBwcGB4epqioSAwvFRQU0Nvby+zsLBERETz00EPMz89ja2vLwMAAsLCT\nSU9PJzAwUPwb2trasLa2xsbGBi8vL6amppifnxdTikuWLOHu3bv09fUREhJCUVERw8PDbNy4kdnZ\nWVpaWsTsiYODg5gMnJycpKOjQ8Bsx8bGWLZsGRs2bMDNzY3PPvtMcBIANm3aREBAAIODg5w5c4Yl\nS5Ygk8koKirCyckJuVxOcnKy8FJYWFiIQSOpverj40NbWxsGg0HEhn18fMRxz9fXl8uXL3PkyBHq\n6upYsmQJ8/PztLe3C4FMd3e3KFiuWbMGrVYrvCTvv/8+0dHRwAIM5c6dO5ibm4sBJzs7O4aHh/nL\nX/4imJz5+fmUlJQwNDSEr68vvb29xMfHo1Ao2LRpEw0NDdTU1IjxfCsrK5qbm8UIvFqt5sKFCzQ0\nNLBs2TLq6upob28XNvGZmRnWrl3LhQsXiIyMFBH6d999l5SUFBZEb/d3fS8KjVJKLT4+nvb2ds6e\nPUtxcbEAVkZERDAxMSGoPQkJCbz77rtikESi3ygUCkZGRsSkoLW1NbW1tYIsVFpaKqAiSqWS4eFh\nwVP45ptvMBgMrFmzBjs7O06fPs3KlStJTExk7969glwEkJyczJ07d5icnOTatWvCFSBxCdzc3NDr\n9VhZWQllnJeXF9XV1WLX4ObmRlhYGBEREZw6dQpAaO+OHj2KTCYjLS1NZP0ff/xx7OzsgIXzoaSk\n1+v1Ilfg5OREfHw8Tz/9NCMjI5w+fZpTp05hMpmYn59nzZo14m5tZmYmnBPz8/OCdixJcUpLS8nJ\nyaGtrQ1XV1dsbW1pbW2lsLAQhUJBVVUVBoNBoMkkT2d3d7c4qvT29uLh4UFqaiq1tbXCgSltZ6em\npsQcgbRbys7OFoveypUrKSgoID8/X3A21Go109PT1NTUUFBQIGYJpLmXsbExbG1tmZycpKioCL1e\nL16ztrY2Wltb+fTTT1m9ejUajYYXXniBxx9/nLq6Ovbs2UN/fz/Lly/H2dmZgYEBdDqdyFhMTk7y\nwQcfsHbtWpEQzMvLY3BwkICAABITE0lPT8fOzg6FQsHf/vY3HBwcRMFRoVBw5coVmpubOXz4MFu2\nbOHRRx8VE7fSwgcLxXdPT08uXbokBuFWrlxJbGwsFRUVaLVaYmJiaGlpwdLSEgsLC4KDg9m7dy/R\n0dGEh4fj7OzM2rVrMRqNAh57P9f3YlFwcXERNF6VSoW/vz9r167lscceQ6FQCBeepaUlu3btYs2a\nNczNzTE9PU1AQABmZmb89Kc/BeC5554TxZXc3FwOHTpEX18fV65cISYmhh07duDg4CBEMEuWLOHK\nlSs4ODhQWFhIe3s7vb29WFpasnHjRpKTk0lKSmLz5s2cOHECWGBKLlq0SHRL5HI51tbWLF68WAA5\npLHs5cuXI5PJSE1NxWAwoNVqRdHP2dmZnJwcwRcoLCxkamqKxx57jKGhIWxsbJDL5XR1deHh4SHQ\n6LAQbpG4BQkJCaL12t3dTXNzMw4ODqxcuRJvb2/q6+u5e/cup06dIjg4GIVCwcDAAB0dHZiZmQnO\no7m5Ob29vczPz6PT6QgODhbDahI8JTU1laKiIqKiokRxsre3F51OR2RkJJWVlYLKPTMzw+DgIHNz\nc2g0GiwtLdHpdDz22GOiU+Ti4kJZWZlIXXp4eAgPY3d3N87OzjQ2NqJWq+nt7RVtPmkSsbe3l/7+\nflEX0uv15OXlkZOTI6Y5AVEM7e3tZWRkRMwHmJmZsWjRItLT07GxscHMzAy9Xs/ExIQAzBQWFhIf\nH09bWxsPP/wwJSUlbNu2DVgIyOl0Oqqrq8nIyKC7u5tz586JN2prayu9vb0CuRYTE4ONjQ1xcXF4\nenqSk5ODl5cXNTU1gqAEC8edVatWERMTg16vp6amhrq6Os6cOUNaWhoBAQGMjY3h7+/Ptm3bmJyc\nZGZmhkWLFgnYcH9/v5gwluZ27uf6XhwfZDIZPT09BAcHc+/ePe7cuUN4eDiNjY20t7czOzvL2rVr\n8fLywt3dnfPnz5Ofn49Op+PRRx8VvxhyuZzc3FyWL1/On//8Z5KSkigvL8fKyoq4uDhu377N0NCQ\nmAQMDw8nKyuL119/HZPJRH5+Prt27aKwsFAo1Obm5sT5d9OmTZw7d054DPbs2YNSqUShUODl5YWr\nqysjIyOEh4dTU1ODh4cHs7OzXLt2DV9fX2JiYpidnWV6ehq9Xo9KpRJFRFi4O0gj4EuXLiUyMlJg\nuqW5CynRNzc3R2pqqpjBGBoa4ubNm3h7e1NeXk5cXJyoc9ja2rJixQp6e3uxs7PD0dERNzc3urq6\n6OjoIDQ0lPr6evHGlhgAERERZGVlidyFpaWlGFmuqqrC3t4epVLJ2rVrqaur49q1a3R2dtLf3y8E\nN3V1dURFRfHyyy/T1dWFi4sLer2et956SwBv5ufnCQ4OBhALckhIiOBSrlu3DldXVxwdHZmfnyci\nIgI7OzvOnDkjpLAqlQovLy8BKwkKCiIuLo5z5xYk6ZKXQbJul5WVMTg4SGlpqSjIZWVliRH+ffv2\nMTs7y7Fjx9i8eTNLly6lpKQER0dHAgIChMnp4sWLpKamip+HhH6TyWSiuBcaGspvf/tbampqyM7O\nRq1WMzo6ipWVFTqdjlWrVpGTk8OyZctErSIhIYGKigqGhoZobGzEysqKc+fOoVQqycvLY926dXh5\neQm8PSzcBMPCwgSNy8rKit7eXqG/u9/re7EoTExMsHPnTvLy8ti8eTOLFy8mOzub9vZ2bG1teeGF\nFwC4du0aExMTbNmyhfHxcebm5oTcRBJeXLp0iatXrwJw69YtxsbGWLdundg9zM7OcuPGDTFEVF5e\nztKlSykoKBDzArm5ubi6utLY2CiwbS4uLqJ1uGXLFhEm6enpYXp6Gh8fH+7cuUNdXR2zs7NcunSJ\nZcuWYW9vz8GDB5mfn+f27duUl5ezZMkS+vv78fLyori4mJCQEGDhLikVEAcHB8nIyMDc3Jy+vj5i\nYmLo7OwkICAAd3d3rl69KsjDubm5Avfd3t7O6OgoKpWK4eFhPD09BS0b/nc3pbS0FL1eL2hE5eXl\n2NjYMDU1JWot0lFMIj1HRkZSXFxMZGQkt27dws7ODj8/P+bm5vD09GTDhg28+eabKJVKNm7cyJUr\nVzAYDAwPD1NXV0djYyM7d+4U7MGqqipiY2Px9vYWEXIp819dXS34EM7Oztjb27Nr1y5aWloYHBzk\nwoULwuEgAWf9/f2ZmpqivLwcW1tbWlpaWLlyJR988AEtLS04ODhQVVUl6MYGgwFvb29SU1P58MMP\nCQkJIS4uDqPRKF7bTZs2YWdnJzIoV69e5cknn+TXv/41AC+//DIajUZ83QcffFDMWEhuS0kI3N3d\nzdq1a8nPz8fJyYmysjJ2795NfX09Pj4+FBUVCbqXJKaVdrXXr18nKCgIGxsbFi9ejEwmo6ysjIKC\nAiwsLMT7QOJzSDmHhIQEqqqq/mePDzKZzFcmk+XJZLI6mUxWK5PJ/q9/PP4rmUzW838zUUuf81OZ\nTNYik8kaZTLZxv/ySZiZicLR0NAQ2dnZ9PX1icx8Xl4eGRkZ4m6RnZ3N6tWrxUy6BFe9e/cuiYmJ\nAsfm6uqKwWBgdHSUrq4uQkNDBfy1oqKCe/fusW3bNgoKCoRK7ciRI9jY2LB9+3bc3Nz49ttvaW9v\np7u7W7Q6x8fHhYzDzMxMSFHHxsZQq9UCMurt7c3Vq1dpaGjA29ubiIgI/P39ReqsuLiYqakpcXdw\nd3cX9u3BwUG6urpEb/vmzZs0NDTQ2dkp8OiDg4MMDQ2xaNEiRkdHiY2Nxd/fnzVr1hAUFISfnx/+\n/v74+fmJ87FEBEpKSmJ2dhaVSkVzc7MAjUjVeElsExwcTHBwMHZ2doyPj7Nt2zbCw8NZtGgRq1at\nEkcGV1dXxsbGsLa25oEHHmB+fp65uTlRAJ2bm0OlUqFQKESnQVKfSZQpQLTxFi9ejEajISUlhbGx\nMVHz8PT0FFmFtLQ0QbWuqqrCwsICZ2dnPD09iY6OpqmpSRQanZ2d2bBhA319fcTGxhIXF8ejjz6K\nt7c3bW1tAoluMBj44osv6O7u5uTJk5w4cQK1Wo1cLue7774jPDycjo4OvLy8ADh27Bi3b9/G2tqa\nyMhIamtr8fLywtHRkW3btmEymcjKyuL69evk5+czNTWFh4cH09PTeHp6Mjo6yo4dO7CysiI4OFjs\nsCYnJ9m3bx8ajUag8h0cHHj22WdFXay3t5ebN29iZmaGhYUFaWlp6HQ6+vv7aWxsFKRoo9H4P+6S\nnAdeNplMkcAy4HmZTBb5j7/70z+bqP/xg44E9gKLgE3AEZlMJv9X38BoNPLTn/4UX19fzp8/z759\n+7CzsyMsLEzcpZOTkwWosqSkhN/97ndkZmZib28vlNzSmVviE3h7e/P444+j0WgwNzfns88+Q6/X\nU1lZyWuvvcaBAwdEGGoyc0AgAAAgAElEQVTZsmV4eXmxbds2Dh48iJeXlwjJeHl5cfnyZVHAtLOz\nEwCQvLw8RkdHWb16NZ6enjg6OpKbm0tiYiJKpZLAwEACAwORyWQsWrSIFStWkJ2dza9//WuWLVtG\ndHS06KUbjUacnZ0pLi6mra1NuBQzMjI4efIkzc3NhIeH8/TTTws4anl5OSEhIRw+fBgzMzNKS0tp\nb2+ntLRUEIbm5uZwd3fH19eXvr4+9Hq9wJc5ODiQkJBAeno6RqNRVL2VSiVqtRovLy9CQ0PFgipN\ngTo7O5OVlUVtbS1qtZrLly8zODjIs88+i5+fH87Ozjz22GMEBQVRVFQkxLxarZbU1FReeeUVvvji\nC3p6ejCZTGIQSPI1VldXo9fruXHjhgC0aDQaJiYm6O3txd7entnZWd59911effVV3N3d8fHxQa1W\n4+DgINqN0tThypUrKSkpITAwkHv37rF+/XoqKytpbGzkiy++YOPGjRw9ehS9Xs/LL78sos3/9m//\nJniga9euFRo9qTC6a9cutFotbm5uXLx4kb6+PgFgKS8v59y5c9jY2DA6OsquXbsYGBjAx8cHR0dH\nNm3aRElJCSdPnhR1te3btwOIkXA/Pz9efPFF1q1bh1Kp5MaNG5w6dYq8vDzMzMw4fvy4CE5JxyFX\nV1fi4uJwd3cXMfT/0UXBZDL1mUymsn/89zhQD3j/i095ADhlMpn0JpPpHtACLP1X32NqakoIP+Lj\n4zEzM+Ppp58mKCiIrq4uRkZG6Orq4s6dOzg5ORETEyN+iUwmE+fPnycsLIzCwkIsLCzo6ekBwNHR\nEZ1OR35+Pl1dXaxcuVLkEAoKCsjJyREewuzsbEE37unp4fPPP8fJyYkLFy7g7OzMrl27xB2up6cH\nuVxOb28vCQkJ/PSnP8Xa2hqVSoVGoxGeRKnDodFoKC8vp62tDXt7e1xdXQkNDaW5uVnk7wFBVvbx\n8WHTpk3ivF5WVsbly5e5ceMGIyMjTE5OCjqSo6Mjt2/fpqmpiZMnTxIXFydch6Ojo8IoBQt3S4kI\nNDo6ilar5fTp0wwNDZGfn8/Q0BC9vb1ERUUJv8H8/DwXLlxAr9dz4cIFQcDu6OgQYSvpeDA+Pk5j\nYyOAIAt3dnaKYI6rqytNTU3IZDLBF5BsV9LV1dUlPBYhISHi55GTk8ONGzcYHh4WzMaUlBQSEhLY\nuHEjdnZ2dHd3o9FosLW1FR9fuXIFQMya9Pf3o1QqqaqqoqenB3d3d5ydncnLyyM9PR25XC5G9l98\n8UVcXFyYm5ujrKyMhoYGgoKCmJ6eJiUlBYC//OUvyGQyZmZm+PnPf46lpSVWVlY0NTVha2uLpaUl\nvb29uLi44O3tzYEDBzh48CDW1tacOXMGhUJBT08Px44dIyEhQRyj9uzZg5mZGWVlZbi5ubFkyRK2\nbt2KjY0N8fHxdHR0UFZWRlFRESqVipqaGo4dO4ZKpRK0JWlSsqysTMS97+f6b3UfZDKZGlgCFP3j\noR/IZLIqmUz2qUwmc/rHY95A1z99Wjf/ehFBoVDwzDPPkJqayg9+8APGxsb43e9+R39/Pzt27ODS\npUtkZGRQW1srmPhPPPEEBoOB06dPY2lpyejoKBYWFiQmJop0nNQ6e/zxx1m1ahW5ubkcPnyY0tJS\nGhsbuX37Nt988w3x8fEimiu5K3t6evD09OTJJ58UQziSpFMulwt2v1ar5c0336S1tRWj0UhJSQm2\ntrbExcURFBTEI488IpyYRqNRmIEkVPfg4KCoVbS1tVFSUoK5ubkgKw0MDPDDH/6Q1NRUoqOjyc/P\n5+OPP+a3v/2tGPS6ceOGgMguWrSI/Px86urqmJiYYHh4WJwp5+fnGR0dpaGhQYzrSscFCwsL1q1b\nx/j4OGZmZlhaWoqZBImylJSUhNFoxNramlWrVokdg0R4npiY4Msvv6S+vl70ziUhcFhYGM7Ozjg4\nONDX18cHH3xAcXExK1euJCUlhd27dwMLsWFPT0/q6+vp6+sTXQJJvyfFvkdGRvjyyy8JDw8nKChI\n2LqLi4vRaDTk5uZib28vvm5PTw9dXV1Csffaa68REhKCubk5MTEx4vXp6elBoVAIwnh4eDiurq50\ndXWxb98+2traRGAJFmSuiYmJFBYWcvLkSby9vfnss8/47rvvyMnJITIykrVr1wqPhqRAbGpqQqlU\nirmaV155hYKCAjo6OgB49913sbOzY8eOHRw8eBCFQoG1tTUhISF89tlnYrGpq6tDJpPR3t7Oxo0b\nUalU+Pj4MDExQXl5OePj4yLUd7/XfRcaZTKZLfAN8KLJZBqTyWRHgd8Apn/8+Qfg8H/j6z0NPA0L\n23HpzKxQKOjq6sLR0RG1Wo29vT2vvvoqc3Nz/Md//Adff/01sbGxTExM4OPjw9zcHM8//zz29vZ0\ndHTw7bffimq+UqkUA1QS0v3ZZ5/lJz/5CSEhIQK3NjAwwODgIHl5ebi4uAixa3R0NH19fXh4eODk\n5CSMwHq9Hnd3d+7du4enpyd3797l6tWrpKWl8ac//Ynh4WFhiJb682vXrqWtrY3Z2VnWr1+Pu7s7\nly9fFq02WNhuPvnkkwQHB3P+/Hnkcjl+fn40NDSwfft2bGxs6OrqwmQysXz5chISEujp6RFbeycn\nJz7++GPS09OFC9FoNLJ69Wrc3d1577338PX1FU5Oa2troqKihKJOpVIxOTmJyWQSfMHIyEjRHerp\n6cHf35/e3l7m5uZISkoSic3o6GiCg4PZvHkzzs7OTE5OolQqRVuvt7eXzZs3s2PHDn7xi1+gUCjQ\n6/VkZmayadMmLl68CCx4FAoLC1m1ahW2trY88sgjIqHa0tJCR0cHgYGBfP311zQ0NJCcnCykPBYW\nFqhUKo4fP86KFSsYGxsT2riJiQl2797Nm2++yR/+8Ae2bNlCWVkZTU1NVFZWcuLECV577TVCQ0MJ\nCAjg888/x83NDaPRSE1NDZaWlnz22WfExcVx7949oXdrbGxEpVKxbNkyQkNDuXTpEgqFQtCyExMT\nqa6uZmpqipMnT2Iymbh27RoNDQ0EBwfj4ODAkSNHePHFF5HL5WLaUcpK2NjYMDQ0JIJvrq6u1NTU\noFKpePHFF0WB097enpSUFGEnu379OpcuXeKZZ55hz549vPXWW/f71ry/RUEmk5mzsCB8ZTKZzgGY\nTKaBf/r7j4DL//iwB/D9p0/3+cdj/+kymUwfAh8CBAYGmiTijU6n4/DhwxQVFQmzr0wmIzExkbCw\nMIHQiouLE5LXwsJCVCoVnp6ePPzwwxw5cgRAeP8aGxvx8/MTha4LFy4wNjbGgQMHaGlpITExEYCQ\nkBCCgoJoaWlhZGQEZ2dnjEYj4+PjtLa2/qfns3jxYjw9PZmYmODIkSNMTEywfft2xsbGGBoaIicn\nBysrK3FGjouLw8zMTIhUbt68yfj4OLdu3SI9PR2AxMRE9Hq9SElGRUVhZmYmWlfvvvsuixYtwtzc\nHLVajV6vx2g0olarRcJz7dq1nD17lsDAQEJCQggPD6e6upqamhpcXV1JSkoiIyODpUuXUldXR1tb\nmwCdSNp7CbgiwWk1Gg1LliwhLS1NFGWVSiVGo5F169ZhaWkpshrt7e3U1tYyOztLeXk5KpVKLDy3\nbt0S22VXV1fMzc0Flaqurg5YGApbvnw5s7Oz9PT0UFdXR3R0NKdPn8bb21ukRSVSlMRYlLodGRkZ\npKSkYDAYhDMTFnYgNTU1bN++Xfxc6+rqyMrKwt3dna+++oqJiQlmZ2dpamqisbGR0tJSLCwseOih\nh7hx4wZRUVGEhYXh5+cnkoeBgYHiaDg7O0t9fT2pqanCeD4zMyNcoHK5nLt379Lb2yuwdLt27eKR\nRx6hv7+fs2fPsmvXLgDWrFnDtWvX0Ol0bNmyhcLCQgG0sbGxETaq0tJS9u/fz/r168nOzmbbtm24\nuLgIsc309DT5+fmi5Xs/13+5KMgW8pGfAPUmk+mP//S4p8lk6vvHhzsBSVZ3ETghk8n+CHgBIUDx\nf/V91Gq1eIFzcnKYm5vD1dWVa9euYWNjg5ubm7DuBgUFER0djYWFhbAWVVRUEBAQQGZmpgjC9Pf3\ns3btWkJCQpiamqKoqIjIyEjc3d3FXcrf358HH3wQvV4vzpsSUrylpYWJiQkUCgXl5eWigOTh4cHJ\nkydJSkpibm5O+BVaW1txdnYW1elPPvkEe3t7NmzYIKrOlZWV+Pj4UF9fz86dO9FqtRgMBmBhFn54\neBhHR0eCg4OZm5sT6Lfbt2/T0NBAenq6OHZIyUGZTEZbWxtGo5HW1lZiY2NJSUkRQabExESuX79O\nbGws09PTJCQk0NHRgUqlEqZjvV5PW1ubSDs6ODhQV1fH9PS0QL+fO3cOBwcHli9fLryHV69excXF\nhW+//ZaOjg6xkG3btg2NRiPo0waDQSDWExMTSU1N5dNPP8Xc3JzR0VERc96/fz/Ozs7U1dURGhrK\nkiVLuH79uqA729vbi/kNNzc3Ojs7SUxMxNnZmZs3bzI1NUVLSwvW1tZs27ZNIN4BAXzds2cP9vb2\nlJWV4eTkhJubG4ODg0RERFBXV0dsbCxJSUmMjo4SExPDli1bMBgMvPfee9jb2+Po6CjqSwDbtm0j\nLy9POCjNzc154YUXqKqqwtzcnPXr11NWViYKpGNjY1y/fp3Vq1fj4uIi8hTPPvusGPsvLS0lMTGR\ntrY2vvrqKw4cOCC4jgMDA2zatElE+93d3RkaGmLfvn1kZmbi4eFBfHy8cI7OzMwIneH9XPezU0gB\nHgWqZTJZxT8e+xmwTyaTxbJwfGgHngEwmUy1MpnsNFDHQufieZPJZPhX30CKB2dkZODg4CA8jgcO\nHOC5556jubmZpUuX0tLSQkVFBQUFBRw+fJiXXnqJ559/HrlczvT0NFlZWSgUCjZu3MjVq1dFMU2K\n4Upz8bdu3RLFL4l+K2HIMjMzhRuiq6tLtA4NBoMg+DY0NIh205kzZ3jppZe4d+8eHh4eQqsGC4tS\nZWUld+/excvLSyjQ6+vrBaU4MjKSY8eOAQt3nba2Nj7++GOsra3ZvXs3MpmMpKQkAgMDxYTn6Oio\naIvGxcUxOzvLli1bKC4uRi6XC1GqlHJrb29n2bJltLS0iFkRadFpbW0lJCSEkJAQbty4QV1dHc8/\n/zy1tbX4+PhQU1ODk5MTdnZ2KJVKvL29mZiY4ObNm6SlpeHg4CBQdtbW1lhYWPDggw8yMjLCU089\nhb29PdnZ2bS0tIhKvU6n4+2338bGxobZ2VmBXv/b3/5GU1MTxcXF7N27FxcXFwYHB3Fzc8PBwQGj\n0Yi7uztnz57F0tKSsLAw4uPjsbKyorW1FQcHB1xdXbG2thYoPGmMfnx8nImJCUJDQ3niiSfYu3cv\ncXFxxMXFoVQq0Wq1JCYm4uHhIcSt8fHxfPjhh8zPz7N+/XoUCgURERFcunSJmJgYvvrqK5ycnLh3\n7x6vv/46ZWVl9PX14ePjwzfffCMW68bGRrq7uzl69CgtLS1ipsHe3p6RkRG++OIL1Go18/PzwhZm\nYWFBaWkpq1atEoBdpVKJjY0Nr7/+OlZWVjQ2NgprVUhICE5OTsTGxtLf38/8/Dz+/v6UlZWh0+l4\n6qmn7nNJuI9FwWQy3Qb+n6YpMv/F5/w78O/3+yTGx8dpamoSYJKioiJOnDjBlStXiIqKwtLSEo1G\nQ0ZGBk5OToSFhVFZWckPf/hD5HK5CONI29ysrCxg4VxWXl6Oubk5Pj4+dHV1CVKShYWF6FmbTCYh\nCk1MTKSvr4+hoSHOnTuHyWQiODgYlUrF+Pg4ADt37hRTZzExMbz33nu4u7szMjKChYUFa9asEV9T\nursbDAaSkpJQKpUMDAzg+L/ae/PoKK8rfff5pNI8VElVGkqzVJqFZpAEQiAxiRmEDY4JjrHbJnY7\nTjdx4qTjLNvp5cTuOB1naMdO28ZmsI0BAWESMwgEQhIIoXme59I8lVQa6v4hvnPjXp0Ofe/9GbKu\n9losZAH29uGr852z97ufV6UiIiICd3d3kpOTOXToEGq1mtDQUGxsbGhoaMDGxkZQpL29venr60On\n09HR0cGpU6fYuXOnyC8nJ0fIZhUKhfDJkP/s5OQk8fHx3Llzh6tXr4q2r1KpJCcnh9WrV+Po6ChG\nhuWZCNn0t7OzE0dHR1paWvD29mbz5s2CWj09PU1XVxePP/44sbGx2NnZcejQITo7OykrK2N8fJzQ\n0FDxIe3s7BR0po0bN9LX10dDQwMAcXFxVFRUCKXe0NAQra2tqFQqDh48KK6MMhrdyspKrIdM/q6v\nr8fKyorm5maWLVvGr371K9zd3YVoacmSJRw5coRXXnkFo9Eo2reZmZkEBQWxZ88e0tPTqaqqQqFQ\niCnYvLw8CgoK8PLyEnBgk8mEvb09586do6amhpiYGGHsm5KSIhy3ZR2HQqEgPz+fDRs2UFRUhCRJ\nwkBYLkjD7BX1O9/5Dl999RVRUVHCDnB6epoNGzbQ2dnJxMQEtra2wgvz1q1bFBcX093dTUZGBmfP\nniUkJITg4GAx4fsg8UgoGu3s7LC1tRU99K6uLhYtWoSZmRnXrl0T9BkvLy/ee+89vvOd74i3p1Kp\n5Ac/+AE/+MEPhBmGTKjx9fWlra2N1atXk5+fz/z589Hr9eI0ERcXx6lTp3B3d6ejo4Pp6Wn0ej3O\nzs5MTk6ycuVK4VItQ0dhlubc1dWFVqtlbGxMzDV0dXURExNDW1sbg4ODYhbez89P+ARmZmaSmJjI\nyZMn6ejoEP8fMAv2lMVWmzdvRqvVikEuGQu2adMmvvrqK1atWoWNjQ1ubm5kZ2djb2+PUqnk8ccf\np6ysjKamJlpaWti8eTOdnZ24urpy7tw50tPT8fb2JjAwkN7eXpRKJVu2bEGtVuPu7k5gYKDwvJDb\nevLY9fj4uPCFPHfuHHFxcTg4ODA5OUlAQAC3bt2iqKiIoKAgcQqS/15PnjxJVVUVFRUVuLm54enp\nycKFC8nKysLHx0fQhuvr60lISBB29ZIkib8bpVKJVqvlD3/4AwEBAezcuZPq6mqampqIi4tjYmKC\ntrY2rK2tcXFxESInQJjWfv7552JA6MCBA+j1eiRJEvLtiooKURg1NzcnODiYJUuWUFtbS09PD9u3\nbxdAH4DY2Fh+97vfsXv3biFKg9khv1OnTqFUKsXQl06no7q6GqVSyd69e8nIyKC1tZXk5GSB0pOZ\nHbm5uYKkpVar6ejoYHBwkImJCUEgGx0dFbRmufPV2dmJs7Mz3d3djI+PMzU1xc2bN0UL9UHikbCi\nf/fdd9+UgRMlJSWkpaUJDHZQUBA5OTnMnz+fmpoaVq1ahb29PUuWLBHz+7LBa0FBAVVVVaxfv579\n+/cL/wa5CPmf//mfJCYmotFoBIfAz88PrVbL0NCQsBmrr68nJCSEffv2ERISgpOTE25ubly/fp1r\n166RlpbG5s2bCQgI4OrVq6KDsGDBAkwmE0NDQ+Tk5BAQEMCWLVuE/XtTU5OYfKyrqyMgIIBDhw6J\n8efvfve75Ofns2DBAmpqaoQLlvyA6/V6MjMz8fb2JiAggOrqahQKBVeuXKGlpUUMjnl4eFBbW0tK\nSgrj4+O0trYyPj5OSEgIvb29DAwMCOqRPAcwNTUlJvvCwsLIyckRzlOtra2MjY2RnJwsBpxGR0fp\n7+8nJSWFyspKITmPiYnh8uXLTE1NUVpayq1bt7C2tiYwMJAFCxYwPj5OZGQkIyMjlJWVodPp6Ovr\nQ6lUcubMGdavX09gYCAajYY///nPKBQKFi9eLHB2Op2OiIgIRkZGqK6uxsvLi+HhYeGQtXLlSmEN\nNz09LYq+vr6+AqU+Pj6OjY2N6DT09vYKE2A3NzeioqKYmZlh165dmEwmioqKqK2tFczQNWvWYGZm\nxpdffkl8fDwxMTGcP39ejKA/++yz/OY3vyEkJITGxkbWrl2Lra0tt27dws/Pj+bmZhITE0Vht62t\nDS8vLxwdHdHpdOzZs4cPP/yQ8fFxhoaGxAuhuroag8Eg8q+oqMDS0pL29nZMJpPgWIyNjYkWpo+P\nj6BP3759+4Gs6OdoznMxF///ib8fmnNgYCDvvPMO5ubmeHl5iaJRQ0MD0dHR4ogs31tv3rxJd3e3\nOBrLQ0HT09MMDQ2hVqt5/vnneeWVV3Bzc8PMzAwLCwsMBgNPPvkkn376qbgfr1y5EoPBICYcnZ2d\nKSoqYmhoCHd3d2JiYigsLMTHx4f8/Hxef/113njjDRwcHDAYDFy+fJnw8HDc3d0JDQ2loaEBJycn\n9Ho9BoOBlpYWVqxYwdmzZ3Fzc2NqagqDwcCKFSvo7+8nPz+f+Ph4/vEf/5FPP/1UoNUsLS3p6upi\n/fr1tLa2cvPmTfz9/YV/5eDgoOgSHD9+XDgBKZVKhoaG6OrqYt26dRQVFTE5OYmDgwOZmZkkJydj\nb28vhDMODg5YWVnh5OSEt7c3vr6+dHd3c+7cOSwsLHB3dxfXKvn+XlJSwksvvSSKjzMzM4yOjuLh\n4SHy0uv1BAQE0NXVhZ2dHU1NTcLXUq1WY25uLoxNmpqasLe35yc/+QmfffYZ/v7+DA4OCtPU0tJS\nVqxYQWZmJsPDw8THx1NRUUFcXBznzp1j586dtLa20t7eTmhoqKAly/yE119/nffff5+goCBhHiNP\njx4+fFhYC3Z3dxMYGCiur8ePHxe+D/JU6OjoKGNjY6jVal599VV+9atfCdir7BlZVlbG6dOneeqp\np/j888958cUXxan3X/7lX/j5z3/Oz372M1588UVMJhMDAwOEhoZiaWmJVqtl7dq1vPDCC1y7do15\n8+axZs0abt++jVqtpr6+nqCgIHEtktuyPT09DA8Po9FoqKio4I033uDkyZMMDAzQ2trKzp07xfX3\nb8UjwVOYmJgQo8fyQ9HV1cXExAQdHR1iwu2ZZ54hOzsbf39/EhISqKqqws7OjsuXL+Pm5ib6sjIR\nt6urS9yTV69eTVBQEMePHycwMJC+vj4WL15MXV2dOKIZjUZaW1vZunUr3//+96msrGRsbEzIneWW\nZHt7Ozk5OXR0dLB161ZKSkpQqVS0t7dTWFhIQUEBwcHB9Pb20tHRwY4dO2hoaKC2tpa0tDTs7Owo\nKSnB19eX559/Xoz3yjZ0kZGRhIeHMzU1xaFDh1CpVGi1Wvbv34+dnR337t1jeHiYe/fuUVlZiVKp\nZNmyZXh5eYkipVarFfLbvLxZAeqrr75KW1sbVlZW2NraolAoWLhwIQkJCdjZ2YmipsFgYOPGjUxP\nT9Pa2kpCQgIhISGYTCZqa2uJi4tDrVZjZ2cn8vD19UWtVguNwujoqLib29nZCSxbb28vmZmZFBcX\n09vbS2xsrCBHwyxA9ebNm4JMXFxczKuvvgrAihUrCAwMxMzMjL6+Pk6fPs3WrVtxdnZm9erV2Nra\n4uzszO3bt/nDH/6AtbW1GHFetmyZYDzGx8cL05+nn34agMcee0zYuVVUVBAYGMjOnTsFVVqhUJCU\nlMTo6CibNm0SpLCgoCAGBwcpLS0VBVEfHx+2bNmCp6en8CiNjY2lr69P2MO/9957LF68WFgW9Pf3\n09vbS319PTArvFu6dCm2traMjIyQlpaGQqFAo9Fw9+5d1q1bh5mZGZOTk1hYWAhDovHxcZydnfny\nyy9pbGwUArW33nrrgT+Pj0RN4de//vWb8qCJwWBAp9NRWlrK0qVLMRgMuLi4iLuhXJCys7Pj1KlT\nmJub8+1vf5vx8XFmZmYIDg7Gw8ODAwcOkJKSIkxje3t7xditzDUYGhpiYGCA/v5+pqenxWDOxMQE\nNTU1hIeHC7z62NgYVVVVXLt2jUWLFn2tXSZzIW/cuMHu3buZP38+w8PDQrAj8x4tLS1xc3MTkuer\nV6+yZ88eduzYwZkzZ9i4cSNubm6iYi/rCWSLNFm1FxQURF9fn3jbpaenU1NTg6OjI7W1tQwPD4su\nzcWLF3FychIWej4+PhQWFmJpaUlqaip+fn54enpSUlJCb28vcXFxghMYGhpKS0sLWVlZREREcPHi\nRXbv3i1kzwEBATQ1NYmTRmNjoxALTU5OUl1djZOTE0NDQ3h5ebFmzRphzRYSEiLGwg0GAzExMfzp\nT38iIyODFStW0NjYyLx58ygoKKC/vx+DwUBNTY2YfAwPD6e1tVWAaeVNdGhoiOHhYbZv346vry99\nfX3s27dPPAuyMGv+/PnCuXlgYEC4lTs6OvL8888jSRIWFhYcPHgQvV5PbGws3d3dVFRUMDMzI9zJ\nfHx8sLW1xcnJifr6elFEHRoaIiwsjM7OTlJTU7l16xZhYWF0d3dTXV3N0aNHMRgMNDQ0YG9vj4+P\nD2q1mqqqKnJzc/H390eSJOrq6khPT+fy5cuCLDY5OSl4DX19fZiZmXH79m0aGxtxc3PDxsaGsbEx\nfHx8OHLkCIsXL8bJyYnr168/UE3hkTgpODg4kJ2djUKhEC05uXWTlpZGQEAAjz32GMPDw1hbW7Nk\nyRJhjLF//34x+lpdXc2lS5eE2mzr1q3cvn0bo9EoAK2bNm3is88+Y3R0lIaGBjF+fevWLdG+6+zs\nxM7ODj8/P+rq6rC3t0elUrFmzRoA4f/o4eFBa2sroaGh9PT0iLmN8+fP09zcTFpaGm+++aZ4m8p9\n6JUrV6JSqbC2tuapp54iNzcXmAWBTE5OYmdnh1ar5d69e6SlpYkTR3d3N4ODg5w9e5by8nJUKhWx\nsbFMTEywZs0aPD09ycjI4IknnhCGq/b29ri6ugoTU1lrL7MUuru7qamp4fr162g0GvR6PdHR0bi6\nuvLHP/6Rffv2kZWVRWVlJW+88QanTp2ir6+P0NBQjEYjoaGhYtO1tbXlueeeIyIigri4OFJTU/H3\n9xetWNmJeXx8XHh7BgcH09nZSU5ODjBrnSfrVfLy8oiKihK+oqtWrWLdunXU1dVRUFCAt7c3169f\np66uTlTx5XHr82v7pBwAACAASURBVOfPC90FzPIJDh48SHd3Nw0NDUxOTgp6lswjuH79OhUVFUJA\n1NvbyxNPPCGO75OTkzz77LO0tbWJjlFKSgo2NjaEhoaSmJiIUqmkqKgILy8vzM3N+c53vsOdO3eE\nG7itrS1VVVVoNBpGR0c5d+4c8fHx4gTc2dkJwJNPPomPjw/R0dEUFhai0+kYHx+npKSEmpoampqa\nuHDhAlNTU7i5uZGQkICNjQ0qlYoFCxbQ0NAgCFmyf8mDxiNxUvjDH/7w5s6dO9FoNOzZs4fKykqe\nfvppMQNRXl7Ol19+iYeHB0qlkl//+tfMzMwQERHB6tWrOXjwIO+//774INvY2HDo0CHMzMxYtGgR\njY2N6PV65s+fT3V1NXFxccLFWnbYkd+KjY2NpKamolKpMBgMWFlZiVOGWq1m3759rF27FqPRSFpa\nGo2NjeIvuby8XLSfhoeHiY6OFm9JjUbDihUryMjIYO/evYJTKI8U37x5k1//+tdUVFSwdu1acnNz\n6ezsZHh4mKmpKTFPERUVJQQy/v7+ogMhbxgjIyPk5+cjSZIAscpemA0NDdTV1XHjxg3xQdXr9cLM\n1tramqysLKysrDh27BgajYYNGzbg5eVFX18fXl5egnMgn0ZGR0dxdnampqaGgYEB7t27J4ah1Go1\nBoOB0dFRsrKyKCoqIjo6Gl9fX6anp8XLICUlhZGREU6cOMH27duxsbFhyZIlXLp0ibCwML71rW+R\nk5MjWm/f+973CA0NJT09nSVLlrBw4ULs7e3p6Ohgy5YtXLp0ibi4OGxsbEhNTeU3v/mN+OBMTk4K\nMVBubq5oP8s+mf39/WIDsba2xs3NjS+//BK9Xo9Op+OLL74gODiY/Px8iouLhY6joaEBg8GAp6cn\nv/zlLzl+/DgJCQkCGNzZ2UltbS3FxcWsXr2arq4udDodgYGBjI6OsmbNGs6ePUtqaipHjx4lICCA\ntrY2tm/fzrVr12hpaRHTn6tWrRJy7Fu3bhEVFUVYWBgdHR2YmZnxzjvvsHz5cjo7O8U4v62t7d/X\nSUE28ZAkiV27dgGwZ88eSkpK+MUvfiGs29RqNfn5+URHRzNv3jzc3d05c+YMRUVFJCcn4+TkhEKh\nEDP0L730kqDnyEwAGXPe0tJCdXW10Ab09PQIRySZGXj+/HlOnz5NSUkJOp1ODFp5enrS2dmJwWDA\nZDIxMTGBtbU1L7zwAq+++ipr1qzhySefxNvbW6C6i4uLuXDhAi0tLSgUCrRaLdPT00KlBrOeB9bW\n1ty8eVMg3uSBKDMzM8LCwiguLub8+fPiwy8XF62srCgrKxMnF4PBQEdHh/CJkD0fFAoFzz77LFNT\nU5w/f54bN24wPDzMmTNnKCgoIDY2loGBAWGxZm9vz/T0NFqtlomJCZqbm+nr60Ov1wtNx0cffURD\nQwMWFhbEx8djbm4uTGPkoqbMqpBHf5VKJYcPHxYKR5lPcOvWLWHdJitFb9++TUpKipBbf/TRR0xO\nTtLR0SGYl3l5eWg0Gq5fv05RUZE4jcioMnt7e1pbW/Hx8cHe3l5ML8otu6efflpcJ27evMnQ0BCR\nkZHs3btX3NnNzMyIiooSgFxAtJW1Wi1Go5G9e/fyy1/+kh/96EcsXLiQ8fFxvLy8GBoaEuwKNzc3\ntm3bRnt7O7dv3yY8PBxLS0vWr18vhHeWlpb85Cc/oaGhgaSkJF577TW6u7vx8PCgvr4ed3d3vL29\nGRgYICcnh3//93/H19eX6Ohodu/eTVxcHDt27MDf3x+lUimAsA/0efz/4DP9/zqMRiNNTU0olUrU\najURERG8+OKLhISEsHLlSpKSkli2bBnp6emEhoayfPlybG1tyczMpKmpCScnJzw9PYWbsOywKx8N\nYVZyrNfrSUpKQqvV4urqSlhYmDj++fr6CtVfQEAAk5OTDA0NUVVVxaZNm8jOzhbCEtmJ+s9//rNQ\n4skeEhYWFhiNRqamphgfH6epqYmGhgZx/3/jjTcEZks+TsrHVT8/P6KiorCyshJqt7t372JpaUlO\nTo6YJpWHnezs7DAYDBw9elQ4Ksk4eoPBgCRJYta+oqKCffv2iXqNjJJXKpUEBwezdOlSQkJCmJmZ\nobKyEj8/P6ysrNi3bx8KhYLo6GhMJhMmk4nOzk4xs3/9+nWWLVsmBqJcXV0JDg7G29ub7u5uysvL\nGR8fJy0tDU9PT+Li4qipqSEnJweNRoOZmRmtra1iZPiHP/yhgNOOjo4K6rJcz7G3txe+C1evXhXc\nAC8vL/R6PTU1Neh0OtGheu+994DZjTwiIoL+/n5xqvvyyy/p6enB19eXgwcPUl9fT21trTDIsbOz\nY9GiRaxdu5a0tDRiY2PZtGmToI/DLBgnPT2d+fPnU15ezqZNm8TIusyQLCoqorKykiNHjgAQHx8v\nXkTf/va3hTrSxsZGjP1v2rRJ6Cvmz5/PJ598Ik6M8fHxhIWFsWjRIsEMuXfvHt7e3qxbt46wsDCK\nior43e9+x5EjR75GtnqQeCRakrLC7NKlS6jValJSUnB0dCQhIYFPPvkEFxcX+vv7uXLlCjExMbS3\ntwuI6ubNm4mMjKShoQEvLy8ef/xxIemU7bTOnDkj6EkuLi4UFxeTnJwsQCJ2dna0tbVhZmYmeAEy\nmyExMZGf/exnrFu3Ttz9a2pqGBkZwdzcnHXr1hEYGMiNGzfIyckhLy+P7u5uzM3NRbtUpVKxcOFC\nOjo68PDwYMeOHdy6dUuM1168eBGA9957j+TkZPr7+9mxYwc9PT0YjUbWrFkjBEPh4eFs3LiRmZkZ\n7O3thW2bt7c3OTk5DA8PY2FhQXJystgILSwsmJ6eFlckecKwoaFBKOXMzc2xsLAQHyhZULVw4UJi\nYmJYuXIlubm5lJaWsmbNGqytrSkpKWHdunU0NzeTnJxMTU0NZ86cYfPmzUxNTYmHWEaJyWKptWvX\nMjo6ikKhICEhgdHRUYGkkyXCbW1tfPzxx2zatImkpCQUCgV79+5Fq9Wi0+nIzMwU15a0tDRg1mNR\nrVZjNBrZvXs3H330Ea+88goHDhwQQjOdTkdNTQ21tbX827/9m/Dg/OKLL4iIiMDNzQ0LCwuCgoJ4\n6aWXSExMxNPTk/7+fqampjA3N6enp0e8DOSZjGPHjjEzMyMmcwHq6uo4d+4cWq2WU6dOkZiYSEpK\nCnfv3mV0dJTvf//7DAwM8Mwzz7Bu3Tq6u7sF8u+tt94SDBCdToefnx/+/v7k5uayfPlyzp49y/T0\nNG+//TaOjo4EBATw+eefU1FRISZ05Y1Zdht70HgkTgp2dnZcv36d0tJS8vLyqK2t5dNPP6WtrU3I\na2XgxNGjR3FwcKChoYHU1FQCAgIoLi5meHiYI0eOcOvWLcEnkPvzCoWCgIAA9Hq9KK7l5eWJ8eWT\nJ09SUFBAW1sbRqNR+DK2tLRw8eJFQkJC6OzsZO3aWQylu7u7uJcODQ1x9+5dwfUfHR0lICCA+fPn\nC4NQb29voqOjiYyMZPv27QQHB4sTydjYmHiIduzYgbu7u3BZzs/Pp6Ghgbi4OPr7+9HpdCQmJlJS\nUsKJEycIDAwUkM6ysjKmpqaIiopiyZIllJSU0N3dTWRkJDqdjrS0NNra2oiOjmbjxo2Ym5uTnJzM\npk2bsLS0xMPDg8DAQLy9vQXPsLGxEX9/fzw8PGhpaeHWrVtYWFhw9epVUXORdQhy96a5uRm9Xo+P\njw8jIyPExMQQHx+Pj48POp0OnU6Hra2tYGxqtVrCw8NRKGbfT4cPH6a2thY/Pz+2bdsm6Mrnzp0T\nNnhZWVn4+vqi1Wqxt7envb2dGzdu4OzsTHFxMUuWLCE/P59vf/vbwrfz3r17jI2NUVNTI/5/R0ZG\n8Pb2pq6ujra2NpqamoiNjaWxsZHGxkYBYGlvb6eiogIPDw9BfpYLjT09PdTV1XH58mUcHBxwc3PD\naDQyPDzMzZs38fDwQKfT8dZbb/HDH/6QlpYWUX+pqKjgt7/9LU5OTmJQTTaZ6enpwWQysXDhQnEF\nksljsi7FyclJbPyDg4PU1dWxf/9+SkpKKCoqoqWlRXBE/Pz8Hvjz+EicFGQAxuXLlykvL+err77C\nxsaG4uJiTCYTsbGxAoEmW56ZTCZh9xYTE0N2djZLlixBpVKJRYyLixOW6g4ODrz00kscPXqU9vZ2\nFi1aJKTUMkNPqVRSVlaGs7MzV69epbe3l6SkJCYnJ4VTEsDrr7/Ohg0bmJmZwd3dnfz8fFHTGB0d\nFbZjMnJt69atwmuhurqa7du3Y2ZmRmlpKU1NTcJ56uTJkzzxxBNcuXKFgoICQRQqLS1FkiSuX79O\nWVkZLi4umEwmQSfq6OgQw1oWFhbs3buXXbt2iY6KnJeXlxfj4+McPHgQPz8/gb4LCwujvb0djUbD\n/v37aWhoQK1Wo9FoWLRoEZOTk5w7d47c3FxiYmIICQnh7t27BAYGcuHCBVFQNBgMWFpaCo6i7Ckh\nzxSYmZmJ3np2djY1NTVCGyAj6fz8/ISxy6JFi7h06RL19fXs2LEDjUbD+Pg4BoNBDKnJ7lcnT54E\nYOfOnej1evLz8/Hx8RFv9Pr6eiYmJli1ahVnz56lpaUFV1dXYX23dOlSdu3ahUaj4cKFC8JgJjY2\nlt7eXmGQExMTw9DQkDjZNDc38+yzz4quQmJiIg0NDVy/fp0VK1YIG8CZmRk++eQTRkdH6e7uFtZ/\n/f39AuYiG/LCrFPWzMwMBw4cYHJykscffxy9Xk9xcTFubm6kpKRQX19PUVERmzdvFpuNk5MTubm5\n/PCHP6Szs1MUKOX1eZB4JDYFgHfffZfdu3cLNNj27ds5d+6cMGhVKpWUlJQQFBQkxDlVVVV4e3uL\nI5dWq6WyslI8YFVVVaxevZqTJ0/y/vvv4+zsjI2NDcuXL2d8fFy4Q4eHh5OQkIBSqeSdd97hpz/9\nKQMDA/zud7/jP/7jP/jRj370tc3mww8/5NChQ1y8eBFfX18UCgU6nQ4LCwuee+45NBoNR48eRafT\nMW/ePP785z+LSrSlpSUfffSRaFENDg4KVFZGRgY3b94kJSWFuro6hoeH2blzJx9//DHu7u5ERESw\naNEiTp06JTwfZmZm6O3tpa6ujo6ODqampnj11VcxGo309PRQW1tLQkKCqCHIrUwZda9SqYiMjKS0\ntBQfHx8hgpENReQWZHp6Oo2NjWL95YnLjRs3UlpaSmlpqWBp+vj4sHDhQtFSlqdUYXb6Tx4Uc3V1\n/RrpGcDFxQWNRoPRaGT//v14e3uzceNGMXMiH+0HBgbo6Ohg+fLlmJmZsW7dOvLy8oiLi+POnTvs\n3r0bk8kkCEnLli2jubkZW1tbMVz1ySefUFVVhV6v57XXXsPHx4exsTHS0tLIyMgQb2G5ViGPY6el\npQlS1OLFi3nzzTd58sknaWtrw93dnUOHDpGQkEBGRgZGo5Hz588LP8q7d+/S0dEh7N7i4uJwdXWl\nqakJPz8/nn76abGZ37lzRzzXMmFLBgQfPXpUDOWdOXMGjUbDT3/6U4qKinBwcGBqaoobN24QHx9P\nR0cH69evFyK2vxWPREvy97///ZvLli0TKrnx8XEx/nrt2jU2bNhAdXU12dnZGI1G4ZwUFhZGVVUV\nWq2W4uJiUlNTcXd359ixY+LPyQNLjo6OwtbM19dXSEhHR0ext7cnKCgIvV6Pk5OTsFEbGRkhMjIS\na2tr0Zc/cuQIy5YtIzk5GQcHB8rKyigrK6O9vZ2AgACBobeyssLZ2ZkzZ87g4uLC0NAQp06dEqRk\nGSNmb2+PJElcvnwZb29vrKys0Ov1jI2NERsbS3NzM87OzrS3t2Ntbc3Vq1dZsGABnp6eVFZWfk2y\n7OrqKvra+fn5lJaWsmXLFkJCQgStKC0tjZKSEkJCQoQAZnh4mIGBASHKKi0tZfny5Vy+fBmNRkNz\nczP9/f2kpaUxNDSEXq8nNDRUEIXs7OyYmpoiPz+fiIgIIiIiBP/Rw8MDCwsLRkdHmZmZQaVSIUkS\nZmZm1NfX09LSgrW1NWZmZhw8eJCFCxdy/vx5RkdHxdXj9u3bRERE8PHHH1NdXS0I2VZWVty4cUPw\nLhITEzlx4gQWFhaCfPTcc89hMBhYvnw5Go2GO3fucOXKFYaGhkhMTCQ5OZmAgAAMBoPA7c+bNw+9\nXs9vf/tboRiU/76mp6eFMe/hw4d5++23RS3gmWeeEYKq4OBgpqamcHd3x9nZWVzrOjo6+OCDDzhy\n5AglJSUcPXpUbAwVFRVUVlZSXl5OdHQ0Go1GiL90Op2QQU9PT5OcnCyIzXJNqLS0FAcHB9avX8/B\ngweFijI4OFiu2f39tCRtbGyE6YVsMdbe3s7g4CAlJSXcuHGD9957j0WLFlFcXMzhw4cFXkylUhEa\nGkpvby9qtZri4mJx98/Ly2Pjxo0YDAaGhoZoamqiv79feDnW1NQI2vLnn3/On/70J0wmE6tWrSIi\nIoKuri4WLFhAZ2enOGbD7Amko6OD5uZmYX/m5ubG/v37sbe3Z9WqVaxatQqDwcBTTz1FYWEhHh4e\nLF68WEwUylZtso8lwPr163FycsLCwkJAVD/88ENxvPb19aWjo4Px8XGsra0pLCwUJwKFQoGvry8X\nLlwQ8M7ExERGRkbIzc0V5iqNjY2kpKRQW1tLSEgIExMT6PV60VWQ2Qe/+c1vxMi2LMj54osvqKur\nw9fXFzMzM+Lj4zGZTKJzpNPpWLp0qbDkk4nOX331FZ2dnUJFOjU1RVlZmSA5l5WVCRiKt7c3np6e\nREdHc/v2bby9vVm7dq1oq925c4eRkRGKiopQKpX09/cLYVdTUxOvvPIKHR0deHp6cufOHd59910A\n+vv7aW9vx8LCgqmpKVpbW1myZAmhoaGiNhQZGUlMTAxWVlbk5ORw+vRpiouLxRrLrtfysR9m7/46\nnQ6VSsXdu3e5fPkyZ8+eZXh4WNjPyUSrgoICFAoFn376qTD3TU1N5fbt28ybN4/w8HBeeeUVYFbm\n7O/vL8Rhsh5FJmx1dnbS2toqoEDy9GpMTAxdXV0CPShLwmXk4IPEI3FS+MUvfvFmWFgYVlZWYkBI\nHlneunWrwLCr1Wr8/Px44YUXhHR00aJF9Pb28r3vfU88HOfOnePmzZt89tlnXLlyhcWLFwuxiK+v\nL8PDwyQlJTE9PU1cXJwgCWm1WuEQ1N3dTUhICEajEUmSUKlUfPnll9y+fRutVoujoyMnTpzgxRdf\nxMXFBRsbG3EPn5ycpKqqirfffpukpCRu376NTqfD3Nyc5cuXMz09LT4ILi4u/OAHP+Djjz9mamoK\nk8mEhYUFLi4uNDQ0MDU1xY9+9CNiY2NpbW3lW9/6Fm5ubrS2tjI5OUlLSwvDw8NkZ2ejVquF6avs\n9XjgwAExYmxtbY2trS1arZbo6Giqqqq4fPkydXV1VFdX09vbK4xNXVxceOKJJ1CpVNy7d48zZ86w\nbNkyUlJSyMzMZOnSpQIMK5vuJCYmYjKZ8PPzE4h7c3NzYdMuexsWFhaiUCiE58WKFSuYmJjg888/\nJyEhQbTlvLy8uHjxIjdu3ODFF19EqVQSEBBAXV0d2dnZPPXUU/j4+GBhYUF/fz9RUVEYDAZUKhVR\nUVH09/fj6urKoUOHePbZZ8U6BAQEkJ6eLgRMv/3tb3nyySdpb2/n+PHjuLq6Csx7U1OT2MRkp7Lb\nt29jaWnJ2bNnhX9jf38/69ato7y8nIyMDIaHh2lra+Po0aN89tlnXLt2TbiNWVhYCDBuVFQUGo0G\nDw8P7O3tyc3N5cqVK2zbto3q6mpUKhVr166lsrKS119/nfz8fFQqFZaWluzZswdfX1/R6XFycuLa\ntWv4+voKVWtnZyeDg4N0dXVx8+bNv5+TgqWlJbt27eLu3btYWVmh0+lobGykqamJsLAw/P392bJl\nCxMTEwQGBgr3JHmqT94NXVxccHZ2FpXW/fv38/jjjzM+Ps6tW7cEGs1oNBITE0NkZKQAfMrMR9l8\nVJIkMQgzPT2No6Oj6PUuWbIET09P0tPTKSgoEHBWKysrqqqqGB8fx8XFBZVKRVlZmSAoDQ4OolKp\nWLFihajCW1paCtrOtm3b8PT0RKlUCgVcRESE2GB+//vf09TURGNjI0qlUpi6HDx4EIVCId5OOTk5\n5OTk0NXVJSAuGzZsoL6+nmvXrgli8KFDhxgcHMTOzo7q6mpMJhPT09MUFhYKme7o6ChDQ0OsX7+e\nS5cucfLkSYKCgmhraxMVc0dHR3x8fDAajQQEBFBRUYHJZKKnp4eamhrBBZAdrUJCQliwYIHYqGpq\naoQBqr+/v8CLyYXByspKgoODqa6uFuCQkZERmpqayM/PF5yEy5cvk5+fT3BwMENDQ7i6uqLRaIBZ\nD5ChoSEyMzMFtbqsrIwbN27w/PPPC7u1trY2ent7GR0dpa6uDmtra4F+KywspKenBwsLC+HtaW9v\nT0FBAbW1tRw+fJiXXnqJmpoakpKShE4lNjaWFStWUFxczODgIG5ubhQWFjI2NkZkZKQQqM3MzIhn\nTJ7UdHBwYHp6GhsbGyFE6uzspL+/Hx8fHzw8PPDw8GDBggWMjY3h5eXFxMQEVVVV9PT0sGzZMpKS\nkgS39EHikdgUJEni3LlzQmGWnZ3N3bt36e7u5vDhwyQlJQnXobq6OqFyO378OO+99x4uLi4cPXpU\nGH4YjUZgtiVZXl6OmZkZMTExAk/m4+PD6Ogow8PDNDY20tvbi1arFbMXTk5O4mFXKpXCnPW73/0u\nMItiNzMzIzExkaioKAoLC8XxdmRkBIPBwMjICGfOnOHSpUtMTExgNBqpra3lwoULZGRkcOHCBVFJ\nl3UVd+7cwWg0Mjk5yaVLl0hMTKSzs5PJyUkmJiaYP38+2dnZTE5O4u/vL5yqIiMjSUpKEn4SgBg2\nunPnDkqlkitXrnDx4kW6urro7e0lJyeHuLg4du3ahUql4uWXX2bRokWkpqYK34PR0VFOnz6Nj48P\nZ86cISwsDI1Gg1arxdvbm6tXrxIcHIyDgwMTExOkpqZiZmYmBFljY2MolUri4uKEcY2Xlxft7e2M\nj48LVmZPTw+lpbPcX1nYNTU1JTQMSUlJ/Ou//ivvv/8+aWlpBAYGsnHjRiwtLUlISMDNzQ0HBwe8\nvb1ZuXIlR44cEdi9a9euAbNXycTERHbt2sUXX3yBq6urICPJ1X9nZ2fUarXwq5iammLp0qWoVCrU\najW9vb2MjIwgSZJwh5adnoxGI2q1muvXrzNv3jwA5s2bx9q1a5k/fz4BAQFiHfbt28fbb7+Nm5ub\ncCj38PAQXRCYpYaVlpYyMjKCu7s7g4ODhISE4OnpKYqzq1atYnp6mrS0NEGLlmdyZP/Pq1evcvny\nZdFCfZB4JK4Pv//97990cXEhPDwck8mEUqlk27ZtNDU18cc//lFwGD09Pent7SU6OprY2FgqKytx\ncnKitbUVS0tLAgICmJ6eJiwsjPfff59/+qd/wsnJCScnJ65evcr09DTBwcGkpqZy+vRp8vLyUCgU\nKJVKFAoFJpMJjUYjrjGrV68WugbZovyrr75i+/bttLa2olAoSEtLw8zMjIGBAdLT01mzZg0eHh70\n9vaKFlpFRQXOzs4CxeXu7o5arUaSJEpKSjCZTJSUlLB161YmJiYwMzMjNDRUFCxl0dLExITA1DU3\nN+Pg4ICjoyOpqamkpKSg1WrJy8tDqVQyOTmJn58f9fX1QvOQmJgIIMxoFy5cSGFhITExMWzatAlr\na2vBy8zKyuLYsWOEhYVhY2NDSEgI8+bNE9Qo+SpUUVFBfn4+LS0tODg40NXVhdFoxMXFBa1Wi5OT\nE7a2tpw5c4bW1lZhdFteXo6NjQ3T09P09/ezePFiPvzwQ/bs2UNTUxMWFhacPHlSDC7JE48yIHft\n2rXk5ORgNBopKSkRisgDBw6watUqJicnCQ8PJygoiA8//FAIwGQWouxBunLlSsHWrK+v5969e6xf\nvx5Jkti+fTsKhYJbt24RHh5ORUUFFhYWuLq6Mjg4yLVr19i2bRsmk4m1a9dy5coV8vPz8fLyorCw\nkKysLFpbWzGZTDz22GNUV1eTmZnJ+Pi4WMva2lo6Ozs5d+4cU1NTJCcn89FHH2FhYcG8efOwtrbG\nZDJRXl5OdXU1wcHBaLVaUlJSxHN2+PBhDh48yLJly6ivr+eDDz7ggw8+wNPTE71ej729Pc3NzZSV\nlf39XB/k6nBPTw9jY2NC5nvv3j0Bh5B3c29vb/bs2YNer6ehoUHIVmHWcszPz0/MKNy5c4e6ujpO\nnDjBtm3bBHrNy8uLjIwMrKysGB4extzcnNraWhQKBZOTkwKtfvDgQTw9PRkcHESSJNHS0Wq1giWw\nf/9+kpOTOXbsGO+88w5Xrlzh8uXL5OXl0djYKAAcra2thIWFCXisp6cnRqORH//4x0I+Ld9PIyIi\nSE9Px8nJSaj2tFotXV1ddHV1YW5uzr1797CxsSEyMpLo6GiBAn/iiSeEUMjd3V0UY1esWCFEVKmp\nqWzevJmSkhI0Gg25ubnigT1x4gRNTU1oNBpeeOEFrKysCAsLY8OGDbS0tNDZ2UlYWBh2dnZi4lA+\n8pqbm6PRaOjs7KS+vh5XV1cBjZXNZN3d3YUTd0xMjGiDyoo72SjYZDJhaWmJhYUFPT09lJeXk5WV\nhaWlJSkpKQKC09zcTFxcHD4+Phw/fhxvb29KSkqEV4Z8bB4aGsJgMFBeXk56ejq3bt0S+gpZPi6r\nLG1tbVm8eDF6vZ4TJ07g4uJCXl4e3t7ewkVKLtyVl5eLYbn169ezdOlSBgcHUavVvPbaa9jZ2YnZ\nB5VKxeLFi0lISMBkMmFlZUVJSQnNzc24ubmRnp7OrKPCLOpelsZ3dXUxMzMjCNyyCnV6epr58+cT\nHh7Oyy+/LK4WTz31FKtWrcLR0RFXV1dBqn7QeCROCu++++6b8sNdWlrK9PQ0K1euJCgoiMbGRiFE\n6ejo4PDhoB9kjQAACjFJREFUw8TExFBRUUFRUZGQpioUCuGn4O3tzUcffcSPf/xjYZc+Pj4unJ5k\ngxcnJydh6unn54fRaKSxsVH09i9dukRAQADu7u7U19ejUqnIyspi+/btNDU1oVAocHNz48CBA3R1\ndXH37l0BbFmwYIEAl8bFxWFpaSlgIlZWVmRlZXHjxg3Mzc0JCgrixIkTxMTEiAnDrq4ufHx8aG1t\n5d69e9y5c4c1a9YQHh6Og4MDIyMj2NnZMW/ePL766is8PT2ZmJgQ8M8bN27g6OgorlPu7u6YTCYq\nKytJT08XjtkXL16ktrYWrVZLTU0Nra2ttLW1ERMTI4xiZPK1bGTa09PDvHnzyM3NZf78+bi7u+Pj\n48Pw8DB9fX10dXWhUCgwGAxiovLq1asMDAwIrLt8mpDHsJuamjhy5IgQfRkMBnp7ewVUZfHixcK3\n0snJSfiFLlmyRFzNZI6n3PpzdHRkcHCQgwcP8t3vfpe6ujqcnJzw8/PDzs5OCH0qKyt54YUXxOCZ\nbAeoUqno7u4mOztb1H/MzMw4f/68MNX97LPPyMvLE8rZ5uZmnnjiCVF8PXHiBNeuXaOgoAB3d3cy\nMjLE9GdZWRn//M//THx8PG+99RbT09N0dHRw7do1NBqNqA2Ym5sTHx+Pg4ODMAHq7e2lpqaG4uJi\nnnnmGZqamrC0tBR5hoaGUllZyeDgINHR0fT29pKbm/tAJ4VHYlP44osv3rSzs2Px4sUEBgYyMjKC\nyWSirq6OwMBA/Pz8RK9/4cKFgvAsSRJdXV08+eSTYnaioaFB4Lq3b99OVVUVfvetxGXPRln998EH\nH+Du7o6npydZWVl4eXmJXrpseHvv3j2GhoYE5uvQoUMEBQUxMzODlZUV8+bNIycnBy8vL375y1+K\n/riMOrtz5454yPLy8oSp6djYGLt27eLTTz8lKCiIixcvsmrVKnEi8PDwoLGxkZs3b1JcXMz3v/99\njEYjhYWFtLS0EBERIQxabW1tyc7OFr4HQUFBYnPp6+ujp6cHGxsbWlpasLKy4uzZs0xNTSFJEtPT\n019zpnJyciI8PJy2tjYGBgaEGazsRp2ZmSlGwj09Pblx4wb+/v7Y2toyODgozGZk0IyVlRX29vYC\ndSfDX3U6HTBLxh4ZGaGgoIDr16+zdetWHBwc6O7uZuXKlWKStbi4mFWrVpGYmMj+/fvFmLA8oi23\nWFUqFTqdTrhxqdVqPvnkE15++WWxmfb09BAbGytOcy4uLnh5eVFVVYXJZCIzM1OQpD08PNBqtWRl\nZbF27VpRGxgbG+PkyZMMDQ0REhJCRUUFxcXFwm9DpVJRU1ODr68vOp2Of/iHf2DhwoXcu3ePuro6\nUXCsqKhgcnKSqKgo7OzsSEtLY+/evSxYsIDIyEiGhoYYHBykvLwctVpNSUkJ8fHx+Pn5oVaruXv3\nLjdu3KCjowM7OzscHBzo7e0VSDoPDw/MzMwYHR194E3hkbg+GI1GwsLCGB8f59ixY3R2dtLU1MTY\n2BgeHh5UVlaSnZ2NRqOhrq4OnU7HunXrSEpKIjU1lczMTGpqanjsscdwdHTk9OnTAMJ7ITc3F0mS\nhPLPZDJx/fp1nnvuOTEgExQUJMjPtbW1/PznP2dqaootW7ag1WppbGykuLgYQPTqjUYjlZWVJCQk\n8L3vfY+ysjJmZmaYP38+Op2OkpISsrKyOHjwIEePHmXNmjU4OTnx9ttvCyXmunXrxPXHycmJI0eO\nCK26jNOSh40WLlyIlZWVQJXV1tZSUVFBTU0Nk5OTqNVqHB0dBanZ0dFR+DoGBATQ0dGBubk5TU1N\n1NbW0tbWJnwxe3t7xQNlMpnIyMgQb0rZ1/Du3bssWLBAvHF9fHyIiYmhrq4OMzMzMYtgYWFBa2sr\ng4OD6PV6qqurCQwMFGyIoKAgwRawsbFBo9GQkZEBwJo1axgeHiYhIYHKykrs7OzEJt3e3s7KlSsJ\nDg4W6j9Jkrhw4YIYF1+2bBn9/f2cPn2agoIC4eIsjz3LhqxHjx7F19cXpVJJRkYGx44dIykpCZ1O\nh0ajwcHBgdzcXNrb2/H09BQnoosXL1JdXS1mFGRq9JEjR8jOzsbDw4Ouri5OnjyJr68v/v7+REVF\ncfPmTU6fPs3o6KhoJzc0NIjBtYGBAVJSUsSE6MsvvyzIVSEhIWzZskX8uy5duiTMeoOCgnBxccHT\n01O8DEwmE11dXfT39+Pg4IDRaBSGuA8SjwrNWQ+MAj0PO5f/EhoevZxgLq//TTyKOcHDycvXZDK5\n/K3f9EhsCgCSJN1+EPz0NxmPYk4wl9f/Jh7FnODRzQsekevDXMzFXDw6MbcpzMVczMXX4lHaFP5m\nVfQhxKOYE8zl9b+JRzEneHTzenRqCnMxF3PxaMSjdFKYi7mYi0cgHvqmIEnSakmSqiRJqpUk6ScP\nOZdGSZJKJEkqkiTp9v3vOUuSdEGSpJr7Pz84K/v/eR57JEnqliSp9C++91fzkCTpX+6vX5UkSenf\nYE5vSpLUdn+9iiRJWvsN5+QtSdIVSZLKJUkqkyTpn+5//2Gv1V/L66Gu1wOHjO1+GD8Ac6AOCAAs\ngXtA+EPMpxHQ/Jfv/Qr4yf2vfwL82zeQxxIgDij9W3kA4ffXzQrwv7+e5t9QTm8CP/xvfu83lZMW\niLv/tQNQff+//bDX6q/l9VDX60F/POyTQgJQazKZ6k0mkxE4CGx6yDn919gE7L3/9V5g8//p/6DJ\nZLoG9D1gHpuAgyaTacJkMjUAtcyu6zeR01+LbyqnDpPJVHj/62GgAvDk4a/VX8vrr8U3kteDxsPe\nFDyBlr/451b+58X7Px0m4KIkSXckSdp1/3tuJpNJhuZ3Am4PJ7W/msfDXsOXJUkqvn+9kI/p33hO\nkiT5AbFAHo/QWv2XvOARWa//KR72pvCoxWKTyRQDrAFekiRpyV/+omn2rPfQ2zWPSh7AB8xe/WKA\nDuDfH0YSkiTZA5nAP5tMpqG//LWHuVb/TV6PxHr9rXjYm0Ib4P0X/+x1/3sPJUwmU9v9n7uBY8we\n4bokSdIC3P+5+yGl99fyeGhraDKZukwm07TJZJoBPuL/PvJ+YzlJkmTB7Afvc5PJdPT+tx/6Wv13\neT0K6/Ug8bA3hQIgSJIkf0mSLIFvASceRiKSJNlJkuQgfw2sAkrv5/P0/d/2NPDnh5Hf/5DHCeBb\nkiRZSZLkDwQB+d9EQvIH735kMLte31hO0iyR5BOgwmQy/eYvfumhrtVfy+thr9cDx8OqcP5F5XUt\ns9XZOuC1h5hHALMV4HtAmZwLoAYuATXARcD5G8jlS2aPl5PM3i//4X/KA3jt/vpVAWu+wZz2AyVA\nMbMPtvYbzmkxs1eDYqDo/o+1j8Ba/bW8Hup6PeiPOUXjXMzFXHwtHvb1YS7mYi4esZjbFOZiLubi\nazG3KczFXMzF12JuU5iLuZiLr8XcpjAXczEXX4u5TWEu5mIuvhZzm8JczMVcfC3mNoW5mIu5+Fr8\nX+2GMSPCbKPdAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f1383d592d0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "batch_size = 100\n",
-    "num_train_batch = dat.shape[0] / batch_size\n",
-    "for epoch in range(10):\n",
-    "    w.to_host()  # move the tensor to the host to get its values\n",
-    "    wnp = tensor.to_numpy(w)\n",
-    "    image = Image.fromarray(utils.tile_raster_images(X = wnp.T, img_shape=(28, 28), \n",
-    "                                                     tile_shape=(10, 10), tile_spacing=(1,1)))\n",
-    "\n",
-    "    plt.figure()\n",
-    "    plt.imshow(image, cmap='Greys_r')\n",
-    "\n",
-    "    err = 0.0\n",
-    "    w.to_device(dev) # move the tensor back to the device    \n",
-    "    tposhidrandom = tensor.Tensor((batch_size, hdim), dev)\n",
-    "    for b in range(num_train_batch):\n",
-    "        # positive phase\n",
-    "        tdata = tensor.from_numpy(dat[(b * batch_size):((b + 1) * batch_size), : ])\n",
-    "        tdata.to_device(dev)\n",
-    "        tposhidprob = tensor.mult(tdata, w)\n",
-    "        tposhidprob.add_row(hb)\n",
-    "        tposhidprob = tensor.sigmoid(tposhidprob)        \n",
-    "        tposhidrandom.uniform(0.0, 1.0)\n",
-    "        tposhidsample = tensor.gt(tposhidprob, tposhidrandom)\n",
-    "\n",
-    "        # negative phase\n",
-    "        tnegdata = tensor.mult(tposhidsample, w.T())\n",
-    "        tnegdata.add_row(vb)\n",
-    "        tnegdata = tensor.sigmoid(tnegdata)\n",
-    "        tneghidprob = tensor.mult(tnegdata, w)\n",
-    "        tneghidprob.add_row(hb)\n",
-    "        tneghidprob = tensor.sigmoid(tneghidprob)\n",
-    "        \n",
-    "        err += tensor.sum(tensor.square((tdata - tnegdata)))\n",
-    "        # compute gradients\n",
-    "        gw = tensor.mult(tnegdata.T(), tneghidprob) - tensor.mult(tdata.T(), tposhidprob)\n",
-    "        gvb = tensor.sum(tnegdata, 0) - tensor.sum(tdata, 0)\n",
-    "        ghb = tensor.sum(tneghidprob, 0) - tensor.sum(tposhidprob, 0)\n",
-    "\n",
-    "        # update parameters\n",
-    "        opt.apply_with_lr(epoch, 0.01 / batch_size, gw, w, 'w')\n",
-    "        opt.apply_with_lr(epoch, 0.01 / batch_size, gvb, vb, 'vb')\n",
-    "        opt.apply_with_lr(epoch, 0.01 / batch_size, ghb, hb, 'hb')\n",
-    "\n",
-    "    print 'Epoch %d, Reconstruction error per image = %f' % (epoch, err / num_train_batch / batch_size)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "## Observation\n",
-    "\n",
-    "We can see that the reconstruction error is decreasing and the gabor filters in the figures of the weight matrix is becoming clearer."
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/doc/en/docs/notebook/regression.ipynb b/doc/en/docs/notebook/regression.ipynb
deleted file mode 100755
index a61aed6..0000000
--- a/doc/en/docs/notebook/regression.ipynb
+++ /dev/null
@@ -1,288 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Train a linear regression model\n",
-    "\n",
-    "In this notebook, we are going to use the tensor module from PySINGA to train a linear regression model. We use this example to illustrate the usage of tensor of PySINGA. Please refer the [documentation page](http://singa.apache.org/en/docs/tensor.html) to for more tensor functions provided by PySINGA. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "%matplotlib inline\n",
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "To import the tensor module of PySINGA, run "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from singa import tensor"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## The ground-truth\n",
-    "\n",
-    "Our problem is to find a line that fits a set of 2-d data points.\n",
-    "We first plot the ground truth line, "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<matplotlib.legend.Legend at 0x7f3c7ebe0050>"
-      ]
-     },
-     "execution_count": 3,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEKCAYAAAD9xUlFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl8lOW5//HPDQQSwg5hTUJYZQsgBlBwAZcWkIogx3qq\ntZUiarVqWyuL4oqKWhdOq1JcihylHiWAiChuWHBBBcQkhC3sgbBDAiSBJHP9/sg0P0SWAJl5MjPf\n9+s1r8w8c8/MdZPwfOdZ5hpnZoiIiABU8boAERGpPBQKIiJSRqEgIiJlFAoiIlJGoSAiImUUCiIi\nUkahICIiZRQKIiJSRqEgIiJlqnldwOlq1KiRJSUleV2GiEhIWbp06W4zizvVuJALhaSkJJYsWeJ1\nGSIiIcU5t6k847T7SEREyigURESkjEJBRETKhNwxheMpKioiOzubwsJCr0vxXHR0NPHx8URFRXld\nioiEoLAIhezsbGrXrk1SUhLOOa/L8YyZsWfPHrKzs2nVqpXX5YhICAro7iPn3EbnXLpzbrlz7ien\nDLlS/+Ocy3LOpTnnepzJ6xQWFtKwYcOIDgQA5xwNGzbUFpOInLFgbCn0N7PdJ7hvINDOf+kNvOT/\nedoiPRD+Q/8OInI2vD7QPASYZqUWA/Wcc808rklEpFIpKvHx4udZ/LBlf8BfK9ChYMAnzrmlzrlR\nx7m/BbDlqNvZ/mU/4pwb5Zxb4pxbsmvXrgCVGnyLFi2ic+fOdO/enYKCAnJychg8ePBJHzN37lwe\neOCBIFUoIl7L2JrL1S98yVMfruaDjO0Bf71Ah8KFZtad0t1EtzvnLj6TJzGzKWaWYmYpcXGn/JR2\nyHjzzTcZO3Ysy5cvJyYmhmeffZabb775pI+58soree+998jPzw9SlSLihcKiEp76cBVDXviSHXmH\neen6HowZ2CHgrxvQUDCzrf6fO4FZQK9jhmwFEo66He9fFlIeeOABnn/++bLb9913H5MmTTrpY155\n5RXefvttxo8fz/XXXw9AamoqAwYMAOC5555jxIgRAKSnp9OlSxfy8/NxztGvXz/mzp0boNmIiNe+\n27iXQZMW8eLn6xh2bgs+/dMlDEwOzp71gB1ods7FAlXM7ID/+s+AR44ZNge4wzn3FqUHmHPNLOds\nXvfh91aQuS3vbJ7iJzo1r8ODv+h8wvtHjBjBsGHDuPvuu/H5fLz11lt89tlndO/e/bjjp0+fzsiR\nI/niiy8YPHgww4cPZ8OGDdSvX58aNWoAcNddd9GvXz9mzZrFY489xj/+8Q9q1qwJQEpKCosWLeLa\na6+t0HmKiLcOHi7mqQ9XMe3rTbSoF8O0Eb24uH1w944E8uyjJsAs/9kw1YDpZvahc+5WADObDMwD\nBgFZQD5wUwDrCZikpCQaNmzI999/z44dOzj33HNp2bIly5cvL/dz5OTkcPSusSpVqjB16lS6du3K\nLbfcQt++fcvua9y4Mdu2bavQOYiIt/69ZhfjZqazLbeA3/ZJ4i8/P4fYGsH/KFnAXtHM1gPdjrN8\n8lHXDbi9Il/3ZO/oA2nkyJFMnTqV7du3M2LECA4cOMBFF1103LHTp0+nU6dOP1oWExPzk88XrF27\nllq1av0kAAoLC4mJianYCYiIJ/bnH+HRuStJXZZNm7hY3rnlAlKSGnhWT1h8orkyGDp0KA888ABF\nRUVMnz6dqlWrntaWQvv27dm4cWPZ7dzcXO68804WLlzIHXfcwYwZMxg+fDgAa9asoUuXLhU9BREJ\nsnnpOTzwbgb78ou4o39b7ri0LdFRVT2tSaFQQapXr07//v2pV68eVaue/i81NjaWNm3akJWVRdu2\nbfnjH//I7bffTvv27Xn11Vfp378/F198MY0bN2bBggU88cQTAZiFiATDzrxCxr+bwfwVO+jSog6v\nj+hF5+Z1vS4LUChUGJ/Px+LFi3nnnXfK/ZipU6f+6PYdd9zB1KlTmTBhAq+99lrZ8oSEBLKysgDY\nsWMHBQUFJCcnV0jdIhI8ZsY7S7OZMDeTw8U+xgzswMgLW1GtqtefI/7/FAoVIDMzk8GDBzN06FDa\ntWt3xs8zdOhQ9uzZc9Ixmzdv5plnnjnj1xARb2zZm8+4WeksWrubXkkNmHhNMq3janld1k8oFCpA\np06dWL9+fYU818iRI096f8+ePSvkdUQkOEp8xrSvN/LUh6up4uDRq7twfa9EqlSpnH3KwiYUzEzN\n4Cj9dxCRyiFr5wHunZHGss376XdOHI8NTaZFvcp95mBYhEJ0dDR79uyJ+PbZ//k+hejoaK9LEYlo\nRSU+Jn++jr99lkXNGlV57pfduLp7i5BYP4VFKMTHx5OdnU04Ncs7U//55jUR8UZ6di5/mfEDq7Yf\nYHDXZjx0VWca1arhdVnlFhahEBUVpW8aExFPFRaV8Nwna3h54Xoa1arBlF+fx886N/W6rNMWFqEg\nIuKlbzfsZXRqGht2H+K6ngmMHdSRujGh+T3pCgURkTN0oLCIJz9cxRuLN5PQIIY3R/amb9tGXpd1\nVhQKIiJnYMGqnYyblc72vEJ+d2Er/vyz9tSsHvqr1NCfgYhIEO09dIRH52Yy6/uttGtci9Tb+tAj\nsb7XZVUYhYKISDmYGXPTcnhozgpyC4q487J23N6/DTWqedvArqIpFERETmFHXiH3z87g48wddI2v\nyxsje9OxWR2vywoIhYKIyAmYGf/33RYem7eSI8U+xg3qwIi+lauBXUVTKIiIHMfmPfmMmZnGV+v2\n0LtVA568pitJjWK9LivgFAoiIkcp8Rn//HIDf/1oNdWqVGHC1V34VSVuYFfRFAoiIn5rdpQ2sFu+\nZT+XdmjMY0O70Kxu5W5gV9EUCiIS8Y4U+3jp83X8fcFaatWoxqTrunNVt+Yh0cCuoikURCSi/bBl\nP6NT01i1/QBXdWvOg7/oRMMQamBX0RQKIhKRCo6U8OzHq3n1iw3E1a7BKzemcHmnJl6X5TmFgohE\nnK/W7WbszHQ27cnnv3slMnZQB+pEh2YDu4qmUBCRiJFXWMQT81bxr28307JhTabf3Js+bUK7gV1F\nC3goOOeqAkuArWY2+Jj7+gHvAhv8i2aa2SOBrklEIs8nmTu4f3YGOw8UcvNFrfjTFecQUz28WlRU\nhGBsKdwFrARO9JnwRceGhYhIRdlz8DAPv5fJnB+20aFpbf7x6/PollDP67IqrYCGgnMuHrgSeAz4\nUyBfS0TkaGbGnB+28dCcFRw8XMzdl7fj9/3aUr1a+LaoqAiB3lJ4HrgXqH2SMX2cc2nAVuAeM1tx\n7ADn3ChgFEBiYmIg6hSRMJKTW8D9szL4dNVOuiXU46lrunJO05OthuQ/AhYKzrnBwE4zW+o/dnA8\ny4BEMzvonBsEzAbaHTvIzKYAUwBSUlIsQCWLSIjz+Yy3vtvCE/NWUuTzcf+VHbmpbyuqRkiLiooQ\nyC2FvsBV/pV9NFDHOfeGmd3wnwFmlnfU9XnOuRedc43MbHcA6xKRMLRx9yHGzExj8fq99GnTkInD\nupLYsKbXZYWcgIWCmY0FxkLZWUb3HB0I/uVNgR1mZs65XkAVYE+gahKR8FNc4uO1LzfwzEdrqF61\nChOHJfPLngkR2aKiIgT9cwrOuVsBzGwyMBy4zTlXDBQA15mZdg+JSLms2p7H6Blp/JCdy+UdmzDh\n6i40rRvtdVkhzYXaOjglJcWWLFnidRki4qHDxSW8sGAdLy7Iom5MFA8P6cyVyc20dXASzrmlZpZy\nqnH6RLOIhJRlm/cxekYaa3ceZOi5LRg/uBMNYqt7XVbYUCiISEjIP1LMMx+t4bUvN9C0TjSv/TaF\nSzuogV1FUyiISKX3ZdZuxsxMY8veAm44P5HRAzpQWw3sAkKhICKVVm5BEU/MW8lb322hVaNY3hp1\nPue3buh1WWFNoSAildJHK7Zz/+wM9hw6wq2XtOHuy9sRHaUGdoGmUBCRSmXXgcM89N4K3k/LoUPT\n2rz6m54kx9f1uqyIoVAQkUrBzJi9fCsPv5dJ/uES/nxFe27t14aoqmpgF0wKBRHx3Nb9Bdw3K53P\nV++iR2I9nhrelbaN1cDOCwoFEfGMz2e8+e1mJs5bic/ggcGd+E2fJDWw85BCQUQ8sX7XQcakpvPt\nxr1c1K4Rjw9NJqGBGth5TaEgIkFVXOLj5UUbeO6TNURXq8LTw7sy/Lx4taioJBQKIhI0mdvyuDf1\nBzK25vHzzk14dEgXGtdRA7vKRKEgIgFXWFTC3z/LYvK/11GvZnVeur4HA5ObeV2WHIdCQUQCaumm\nvYxOTSdr50Gu6RHP+MEdqVdTDewqK4WCiATEocPFPD1/Na9/vZHmdWN4fUQvLmkf53VZcgoKBRGp\ncIvW7mLszHSy9xXwmwta8pcBHahVQ6ubUKDfkohUmNz8Iia8n8k7S7Np3SiWd269gJ5JDbwuS06D\nQkFEKsSHGdsZ/24Gew8d4ff92nDnZWpgF4oUCiJyVnYeKOShOSuYl76dTs3q8M/f9qRLCzWwC1UK\nBRE5I2ZG6rKtPDo3k4KiEv7y83MYdXFrNbALcQoFETlt2fvyGTcrg4VrdpHSsj4Tr+lK28a1vC5L\nKoBCQUTKzecz/nfxJp78cBUAjwzpzA29W1JFDezChkJBRMola+dBxqSmsWTTPi5q14gnhiUTX18N\n7MJNwEPBOVcVWAJsNbPBx9zngEnAICAf+K2ZLQt0TSJSfkUlPqYsXM+kT9cSE1WVZ/6rG8N6tFAD\nuzAVjC2Fu4CVQJ3j3DcQaOe/9AZe8v8UkUogY2su985IIzMnj4FdmvLwkM40rq0GduEsoKHgnIsH\nrgQeA/50nCFDgGlmZsBi51w951wzM8sJZF0icnKFRSVM+nQtUxaup0FsdSbfcB4DujT1uiwJgkBv\nKTwP3Auc6Hv1WgBbjrqd7V+mUBDxyHcb9zJ6Rhrrdx/i2pR47hvUibo1o7wuS4IkYKHgnBsM7DSz\npc65fmf5XKOAUQCJiYkVUJ2IHOvg4WKe+nAV077eRHz9GN74XW8ubNfI67IkyAK5pdAXuMo5NwiI\nBuo4594wsxuOGrMVSDjqdrx/2Y+Y2RRgCkBKSooFrmSRyPTvNbsYNzOdbbkF3NQ3iXt+dg6xamAX\nkQL2WzezscBYAP+Wwj3HBALAHOAO59xblB5gztXxBJHg2XfoCI++n8nMZVtpExfLjFsv4LyWamAX\nyYL+VsA5dyuAmU0G5lF6OmoWpaek3hTsekQikZkxL307D87JYH9+EX+4tC13XNqWGtXUwC7SBSUU\nzOxz4HP/9clHLTfg9mDUICKlduYVMv7dDOav2EFyi7pMG9GbTs2Pd8a4RCLtNBSJEGbGO0uzmTA3\nk8PFPsYM7MDIC1tRTQ3s5CgKBZEIsGVvPmNnpvNF1m56JTVg4jXJtI5TAzv5KYWCSBgr8Rmvf7WR\np+evpmoVx4Sru/CrXolqYCcnpFAQCVNrdxxgdGoayzbvp985cTw+NJnm9WK8LksqOYWCSJgpKvEx\n+fN1/O2zLGJrVOX5X3ZnSPfmamAn5aJQEAkjadn7uXdGGqu2H2Bw12Y8dFVnGtWq4XVZEkIUCiJh\noLCohOc+XsPLi9YTV7sGL9+YwhWdmnhdloQghYJIiFu8fg9jUtPYuCef63omMHZQR+rGqIGdnBmF\ngkiIOlBYxMQPVvHmN5tJaBDD9JG96dNWDezk7CgURELQglU7GTcrne15hYzo24p7ft6emtX131nO\nnv6KRELI3kNHeHRuJrO+30q7xrVIva0PPRLre12WhBGFgkgIMDPmpuXw0JwV5BYUceelbbldDewk\nABQKIpXcjrxC7p+dwceZO+gaX5c3b+5Nh6ZqYCeBoVAQqaTMjP/7bguPzVvJkWIf4wZ1YERfNbCT\nwFIoiFRCm/fkM2ZmGl+t20PvVg148pquJDWK9bosiQAKBZFKpMRn/PPLDfz1o9VUq1KFx4cmc13P\nBDWwk6BRKIhUEqu3lzawW75lP5d1aMyEoV1oVlcN7CS4FAoiHjtS7OPFz7N4YUEWtaOjmHRdd67q\npgZ24g2FgoiHfthS2sBu9Y4DDOnenAcGd6KhGtiJhxQKIh4oOFLCsx+v5tUvNtC4djSv/iaFyzqq\ngZ14T6EgEmRfrdvN2JnpbNqTz696JzJmYAfqRKuBnVQOCgWRIMkrLOKJeav417ebadmwJv+6+Xwu\naNPQ67JEfkShIBIEn67cwX2zMth5oJBRF7fmj5e3J6a6WlRI5aNQEAmgPQcP8/B7mcz5YRvnNKnN\nP359Ht0S6nldlsgJBSwUnHPRwEKghv91ZpjZg8eM6Qe8C2zwL5ppZo8EqiaRYDEz5vywjYffy+RA\nYRF/vLw9t/VrQ/VqalEhlVsgtxQOA5ea2UHnXBTwhXPuAzNbfMy4RWY2OIB1iARVTm4B98/K4NNV\nO+mWUI+nh3elfZPaXpclUi6nDAXn3B+AN8xs3+k8sZkZcNB/M8p/sdOuUCRE+HzGW99t4Yl5Kyny\n+bj/yo7c1LcVVdWiQkJIebYUmgDfOeeWAa8B8/0r/FNyzlUFlgJtgRfM7JvjDOvjnEsDtgL3mNmK\n8pUuUnls3H2IMTPTWLx+L33aNGTisK4kNqzpdVkip82VZ/3uSj9v/zPgJiAFeBt41czWletFnKsH\nzAL+YGYZRy2vA/j8u5gGAZPMrN1xHj8KGAWQmJh43qZNm8rzsiIBV1zi47UvN/DMR2uoXq0K91/Z\nkWtTEtSiQiod59xSM0s51bhyHfXybxls91+KgfrADOfcU+V8/H5gATDgmOV5ZnbQf30eEOWc+8k3\nj5vZFDNLMbOUuLi48rykSMCtzMlj2Etf8fi8VVzULo5P/nQJv+yZqECQkFaeYwp3ATcCu4FXgL+Y\nWZFzrgqwFrj3BI+LA4rMbL9zLga4AnjymDFNgR1mZs65XpSG1J6zmZBIoB0uLuGFz7J48fN11I2J\n4u+/Opcrk5spDCQslOeYQgNgmJn9aJ+Nmfmccyc7a6gZ8Lr/uEIV4G0zm+ucu9X/+MnAcOA251wx\nUABcV97jFSJeWLZ5H6NnpLF250GGntuCBwZ3on5sda/LEqkw5TqmUJmkpKTYkiVLvC5DIkz+kWL+\nOn8N//xqA83qRPPYsGT6n9PY67JEyq28xxT0iWaRU/hi7W7GzEwje18BN5yfyOgBHaitBnYSphQK\nIieQW1DEY+9n8vaSbFo1iuX/Rp1P79ZqYCfhTaEgchzzV2xn/OwMdh88zC0Xt+aPV7QnOkoN7CT8\nKRREjrL74GEenLOC99Ny6NC0Nq/8JoWu8WpgJ5FDoSBCaQO7Wd9v5ZG5meQfLuHPV7Tn1n5tiKqq\nBnYSWRQKEvG27i/gvlnpfL56Fz0S6/HU8K60bawGdhKZFAoSsXw+481vNjHxg1X4DB78RSduvCBJ\nDewkoikUJCKt33WQManpfLtxLxe1a8TjQ5NJaKAGdiIKBYkoxSU+Xl60gec+WUN0tSo8Nbwr/3Ve\nvFpUiPgpFCRirNiWy+jUNDK25jGgc1MeubozjWtHe12WSKWiUJCwV1hUwt8+W8vkf6+nfs3qvHR9\nDwYmN/O6LJFKSaEgYW3ppr3cOyONdbsOcU2PeMYP7ki9mmpgJ3IiCgUJS4cOF/P0/NW8/vVGmteN\n4fURvbikvb6LQ+RUFAoSdhau2cXYmelsyy3gxvNb8pcBHahVQ3/qIuWh/ykSNvbnH2HC+yuZsTSb\n1nGxvHPLBaQkNfC6LJGQolCQsPBhRg73z17Bvvwj/L5fG+68rJ0a2ImcAYWChLSdBwp58N0VfJCx\nnc7N6zD1pp50aVHX67JEQpZCQUKSmTFjaTYT3l9JQVEJowd0YORFrdTATuQsKRQk5GzZm8+4Weks\nWrublJb1eXJ4V9rE1fK6LJGwoFCQkOHzGf+7eBNPfrgKBzwypDM39G5JFTWwE6kwCgUJCVk7DzIm\nNY0lm/ZxSfs4Hhvahfj6amAnUtEUClKpFZX4mLJwPZM+WUvNGlV59tpuDD23hRrYiQSIQkEqrYyt\nudw7I43MnDwGJTfl4au6EFe7htdliYQ1hYJUOoVFJUz6dC1TFq6nQWx1Jt9wHgO6NPW6LJGIELBQ\ncM5FAwuBGv7XmWFmDx4zxgGTgEFAPvBbM1sWqJqk8vtu415Gz0hj/e5D/DIlgXGDOlK3ZpTXZYlE\njEBuKRwGLjWzg865KOAL59wHZrb4qDEDgXb+S2/gJf9PiTAHDxfz1IermPb1JuLrx/DG73pzYbtG\nXpclEnECFgpmZsBB/80o/8WOGTYEmOYfu9g5V88518zMcgJVl1Q+C1bv5L6Z6eTkFXJT3yTu+dk5\nxKqBnYgnAvo/zzlXFVgKtAVeMLNvjhnSAthy1O1s/zKFQgTYd+gIj87NZOb3W2kTF8uMWy/gvJZq\nYCfipYCGgpmVAN2dc/WAWc65LmaWcbrP45wbBYwCSExMrOAqJdjMjHnp23lwTgb784u4o39b/nBZ\nW2pUUwM7Ea8FZRvdzPY75xYAA4CjQ2ErkHDU7Xj/smMfPwWYApCSknLsLigJITvzChn/bgbzV+wg\nuUVdpo3oTafmdbwuS0T8Ann2URxQ5A+EGOAK4Mljhs0B7nDOvUXpAeZcHU8IT2bGO0uyefT9TI4U\n+xg7sAO/u7AV1dTATqRSCeSWQjPgdf9xhSrA22Y21zl3K4CZTQbmUXo6ahalp6TeFMB6xCNb9uYz\ndmY6X2TtplerBkwclkxrNbATqZQCefZRGnDucZZPPuq6AbcHqgbxVonPeP2rjTw9fzVVHDx6dReu\n75WoBnYilZjO+5OAWLvjAKNT01i2eT/9zonj8aHJNK8X43VZInIKCgWpUEUlPiZ/vo6/fZZFbI2q\nPP/L7gzp3lwN7ERChEJBKkxa9n7unZHGqu0H+EW35jz4i040qqUGdiKhRKEgZ62wqITnPl7Dy4vW\n06hWDV6+MYUrOjXxuiwROQMKBTkri9fvYezMdDbsPsR/90pgzMCO1I1RAzuRUKVQkDNyoLCIiR+s\n4s1vNpPYoCbTR/amT1s1sBMJdQoFOW0LVu1k3Kx0duQVMvLCVvzpZ+2pWV1/SiLhQP+Tpdz2HjrC\nI++tYPbybbRvUosXr+/DuYn1vS5LRCqQQkFOycyYm5bDQ3NWkFdYxF2XteP2/m2pXk0tKkTCjUJB\nTmp7biH3z87gk5U76BZflyeH96ZDUzWwEwlXCgU5LjPjre+28Pj7Kyny+bhvUEdGXNiKqmpRIRLW\nFAryE5v2HGJMajpfr9/D+a0bMHFYV5IaxXpdlogEgUJBypT4jH9+uYG/frSaqCpVeHxoMtf1TFAD\nO5EIolAQAFZvP8C9qWn8sGU/l3VozIShXWhWVw3sRCKNQiHCHSn28eLnWbywIIva0VH8z3+fyy+6\nNlMDO5EIpVCIYMu37Gf0jDRW7zjAVf4Gdg3VwE4koikUIlDBkRKe+Wg1r325gca1o3n1Nylc1lEN\n7EREoRBxvlq3mzGp6Wzem8+veicyZmAH6kSrgZ2IlFIoRIi8wiKemLeSf327haSGNfnXzedzQZuG\nXpclIpWMQiECfJK5g/tmp7PrwGFuubg1d1/enpjqVb0uS0QqIYVCGNtz8DAPvZfJez9so0PT2kz5\ndQrdEup5XZaIVGIKhTBkZry7fBsPv7eCg4eL+ePl7bmtXxs1sBORU1IohJlt+wu4f3YGn63aSfeE\nejw1vCvtm9T2uiwRCREKhTDh8xnTv93MxA9WUeIzxg/uxG/7JKmBnYicloCFgnMuAZgGNAEMmGJm\nk44Z0w94F9jgXzTTzB4JVE3hasPuQ4xJTeObDXvp27YhTwztSmLDml6XJSIhKJBbCsXAn81smXOu\nNrDUOfexmWUeM26RmQ0OYB1hq7jEx6tfbODZj9dQvVoVnrwmmWtTEtSiQkTOWMBCwcxygBz/9QPO\nuZVAC+DYUJAzsDInj9GpaaRl53JFpyZMuLoLTepEe12WiIS4oBxTcM4lAecC3xzn7j7OuTRgK3CP\nma0IRk2h6nBxCS98lsWLn6+jXs0oXvhVDwYlN9XWgYhUiICHgnOuFpAK3G1mecfcvQxINLODzrlB\nwGyg3XGeYxQwCiAxMTHAFVdeSzftY3RqGlk7DzKsRwvGX9mJ+rHVvS5LRMKIM7PAPblzUcBcYL6Z\nPVuO8RuBFDPbfaIxKSkptmTJkoorMgTkHynm6fmrmfrVRprVieaxYcn0P6ex12WJSAhxzi01s5RT\njQvk2UcOeBVYeaJAcM41BXaYmTnnegFVgD2BqikUfbF2N2NnpbFlbwE3XtCSewd0oFYNnUksIoER\nyLVLX+DXQLpzbrl/2TggEcDMJgPDgducc8VAAXCdBXLTJYTkFhTx2PuZvL0km1aNYnn7lgvo1aqB\n12WJSJgL5NlHXwAnPfppZn8H/h6oGkLV/BXbGT87gz2HjnBbvzbcdVk7oqPUwE5EAk/7ISqRXQcO\n89CcFbyfnkPHZnV49Tc9SY6v63VZIhJBFAqVgJkx6/utPDI3k/zDJfzl5+cw6uLWRFVVAzsRCS6F\ngse27i9g3Mx0/r1mF+e1rM+T13SlbeNaXpclIhFKoeARn89445tNPPnBKgx46BeduPGCJKqogZ2I\neEih4IF1uw4yJjWN7zbu46J2jXh8aDIJDdTATkS8p1AIouISHy8v2sBzn6whuloVnh7eleHnxatF\nhYhUGgqFIFmxLZfRqWlkbM1jQOemPHJ1ZxrXVgM7EalcFAoBVlhUwt8/y2Lyv9dRr2Z1Xrq+BwOT\nm3ldlojIcSkUAmjJxr2MTk1j3a5DXNMjnvGDO1KvphrYiUjlpVAIgEOHSxvYvf71RprXjWHaiF5c\n3D7O67JERE5JoVDBFq7ZxdiZ6WzLLeDG80sb2MWqgZ2IhAitrSrI/vwjTHh/JTOWZtM6LpZ3brmA\nlCQ1sBOR0KJQqAAfpOcw/t0V7Ms/wu392/CHS9XATkRCk0LhLOw8UMiD767gg4ztdG5eh9dH9KRz\nczWwE5HQpVA4A2bGjKXZTHh/JQVFamAnIuFDoXCatuzNZ9ysdBat3U3PpPpMvKYrbeLUwE5EwoNC\noZx8PmOwHbfqAAAHD0lEQVTa1xt5av5qHPDokM5c37ulGtiJSFhRKJRD1s4DjE5NZ+mmfVzSPo7H\nhyXTol6M12WJiFQ4hcJJFJX4mLJwPZM+WUtM9ao8e203hp7bQg3sRCRsKRROIGNrLvfOSCMzJ48r\nk5vx0FWdiatdw+uyREQCSqFwjMKiEiZ9upYpC9fTILY6k284jwFdmnpdlohIUCgUjvLthr2MSU1j\n/e5D/DIlgXGDOlK3ZpTXZYmIBI1CATh4uJgnP1jF/y7eRHz9GN74XW8ubNfI67JERIIu4kNhweqd\n3DcznZy8Qkb0bcU9P29PzeoR/88iIhEqYGs/51wCMA1oAhgwxcwmHTPGAZOAQUA+8FszWxaomo62\n79ARHp2byczvt9KucS1m3NqH81rWD8ZLi4hUWoF8S1wM/NnMljnnagNLnXMfm1nmUWMGAu38l97A\nS/6fAWNmvJ+ew4PvriC3oIg7L23L7Ze2pUY1NbATEQlYKJhZDpDjv37AObcSaAEcHQpDgGlmZsBi\n51w951wz/2Mr3I68QsbPzuCjzB0kt6jLGyN707FZnUC8lIhISArKznPnXBJwLvDNMXe1ALYcdTvb\nv6zCQ2HBqp3c+db3HCn2MXZgB353YSuqqYGdiMiPBDwUnHO1gFTgbjPLO8PnGAWMAkhMTDyjOlo1\niqVHYn0euqozrRrFntFziIiEu4C+VXbORVEaCG+a2czjDNkKJBx1O96/7EfMbIqZpZhZSlzcmX3X\ncVKjWF4f0UuBICJyEgELBf+ZRa8CK83s2RMMmwPc6EqdD+QG6niCiIicWiB3H/UFfg2kO+eW+5eN\nAxIBzGwyMI/S01GzKD0l9aYA1iMiIqcQyLOPvgBO2k7Uf9bR7YGqQURETo9OvxERkTIKBRERKaNQ\nEBGRMgoFEREpo1AQEZEyrvQEoNDhnNsFbDrDhzcCdldgOaFAc44MmnNkOJs5tzSzU376N+RC4Ww4\n55aYWYrXdQST5hwZNOfIEIw5a/eRiIiUUSiIiEiZSAuFKV4X4AHNOTJozpEh4HOOqGMKIiJycpG2\npSAiIicRlqHgnBvgnFvtnMtyzo05zv3OOfc//vvTnHM9vKizIpVjztf755runPvKOdfNizor0qnm\nfNS4ns65Yufc8GDWFwjlmbNzrp9zbrlzboVz7t/BrrGileNvu65z7j3n3A/+OYd0t2Xn3GvOuZ3O\nuYwT3B/Y9ZeZhdUFqAqsA1oD1YEfgE7HjBkEfEBpF9fzgW+8rjsIc+4D1PdfHxgJcz5q3GeUtmkf\n7nXdQfg916P0e9AT/bcbe113EOY8DnjSfz0O2AtU97r2s5jzxUAPIOME9wd0/RWOWwq9gCwzW29m\nR4C3gCHHjBkCTLNSi4F6zrlmwS60Ap1yzmb2lZnt899cTOm33IWy8vyeAf5A6bf/7QxmcQFSnjn/\nCphpZpsBzCzU512eORtQ2//FXrUoDYXi4JZZccxsIaVzOJGArr/CMRRaAFuOup3tX3a6Y0LJ6c7n\nd5S+0whlp5yzc64FMBR4KYh1BVJ5fs/tgfrOuc+dc0udczcGrbrAKM+c/w50BLYB6cBdZuYLTnme\nCOj6K5DfvCaVkHOuP6WhcKHXtQTB88BoM/OVvomMCNWA84DLgBjga+fcYjNb421ZAfVzYDlwKdAG\n+Ng5t8jM8rwtKzSFYyhsBRKOuh3vX3a6Y0JJuebjnOsKvAIMNLM9QaotUMoz5xTgLX8gNAIGOeeK\nzWx2cEqscOWZczawx8wOAYeccwuBbkCohkJ55nwTMNFKd7hnOec2AB2Ab4NTYtAFdP0VjruPvgPa\nOedaOeeqA9cBc44ZMwe40X8U/3wg18xygl1oBTrlnJ1zicBM4Ndh8q7xlHM2s1ZmlmRmScAM4Pch\nHAhQvr/td4ELnXPVnHM1gd7AyiDXWZHKM+fNlG4Z4ZxrApwDrA9qlcEV0PVX2G0pmFmxc+4OYD6l\nZy68ZmYrnHO3+u+fTOmZKIOALCCf0ncaIaucc34AaAi86H/nXGwh3EysnHMOK+WZs5mtdM59CKQB\nPuAVMzvuqY2hoJy/50eBqc65dErPyBltZiHbPdU59y+gH9DIOZcNPAhEQXDWX/pEs4iIlAnH3Uci\nInKGFAoiIlJGoSAiImUUCiIiUkahICIiZRQKIiJSRqEgIiJlFAoiZ8n/fQ1pzrlo51ysv6d/F6/r\nEjkT+vCaSAVwzk0AoiltQpdtZk94XJLIGVEoiFQAf1+e74BCoI+ZlXhcksgZ0e4jkYrRkNIveKlN\n6RaDSEjSloJIBXDOzaH0W8FaAc3M7A6PSxI5I2HXJVUk2PzfblZkZtOdc1WBr5xzl5rZZ17XJnK6\ntKUgIiJldExBRETKKBRERKSMQkFERMooFEREpIxCQUREyigURESkjEJBRETKKBRERKTM/wMY2i8P\nOiX0cgAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f3cce2f9cd0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "a, b = 3, 2\n",
-    "f = lambda x: a * x + b\n",
-    "gx = np.linspace(0.,1,100)\n",
-    "gy = [f(x) for x in gx]\n",
-    "plt.plot(gx, gy,  label='y=f(x)')\n",
-    "plt.xlabel('x')\n",
-    "plt.ylabel('y')\n",
-    "plt.legend(loc='best')\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Generating the trainin data\n",
-    "\n",
-    "Then we generate the training data points by adding a random error to sampling points from the ground truth line.\n",
-    "30 data points are generated."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[<matplotlib.lines.Line2D at 0x7f3c7b71a410>]"
-      ]
-     },
-     "execution_count": 4,
-     "metadata": {},
-     "output_type": "execute_result"
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFcVJREFUeJzt3X+MZWdZwPHvQ7e4pVscYhfYdjsulPoLYwFna1OJWTCM\ndGls3PSPpkIjMVkWKsGo8dcfEGmMmhiDZJVNRaJEKzG6CxULioFKCRa6W+nSUjS7S7G73Vha3cJs\nZzUDj3+cM/bu7Z2Zc+/ce8+P+X6Syb33nDP3Pj3pPvfMc573fSMzkSR11/PqDkCSNFkmeknqOBO9\nJHWciV6SOs5EL0kdZ6KXpI4z0UtSx5noJanjTPSS1HGb6vrgSy+9NHfs2FHXx0tSKx05cuTJzNw6\nzO9USvQR8SjwLeDbwFJmzvXt3wV8DPhauelgZr53tffcsWMHhw8fHiZWSdrwIuLrw/7OMFf0r8vM\nJ1fZf29m3jBsAJKkybJGL0kdVzXRJ/BPEXEkIvaucMx1EXE0Ij4REa8cU3ySpHWqWrp5bWaeiogX\nA5+KiK9m5md79j8AzGbmQkTsBj4KXNX/JuWXxF6A2dnZdYYuSaqi0hV9Zp4qH58ADgHX9O3/ZmYu\nlM/vBi6MiEsHvM8dmTmXmXNbtw5101iSWm9xEe68E26/vXg8d246n7vmFX1EXAw8LzO/VT6fB97b\nd8xLgf/MzIyIayi+QJ6aRMCS1Eb33w/z87C0BGfPwsUXw223wT/+I+zcOdnPrlK6eQlwKCKWj78z\nMz8ZEfsAMvMAcBPw9ohYAhaBm9OlqyQJKK7k5+fhzJlnty0sFI/z83D6NGzePLnPXzPRZ+YJ4OoB\n2w/0PN8P7B9vaJLUDYcOFVfygywtwcGDcMstk/v82kbGStKoFheL5Hn8OFx5JezZM9kr4vU6frwo\n1wxy9iycODHZzzfRS2qVOmvdo7ryyiLO5XJNr4svhpe/fLKf74ApSa3RW+teWIDM4vHMmWL7tLpY\nhrVnD2xa4bJ606Zi/ySZ6CW1RpVadxNt3lz8xTEzA1u2QETxODNTbJ902cnSjaTWqLvWvR47d8Lj\njxdfVidOFOWaad1bMNFLao26a93rddFFk+2uWYmlG0mtUXetu61M9JJao+5ad1tZupHUKnXWutvK\nRC+pdeqqdbeVpRtJ6jgTvSR1nIlekjrORC9JHWeil6SOM9FLUseZ6CWp40z0ktRxJnpJ6jgTvSR1\nnIlekjrORC9JHWeil6SOqzR7ZUQ8CnwL+DawlJlzffsD+ENgN/AM8HOZ+cB4Q5Wk+i0uFlMkHz9e\nrHjVhimSh5mm+HWZ+eQK+64Hrip/fgz4QPkoSSNpYkK9/36Yny8WIj97tli+8LbbikVPdu6sN7bV\njGs++huBD2dmAvdFxExEbMvM02N6f0kbSBMT6uJiEdOZM89uW167dn4eTp+u/4toJVVr9An8U0Qc\niYi9A/ZfDjzW8/pkuU2ShtKbUBcWILN4PHOm2H7uXD1xHTpUfPEMsrQEBw9ON55hVE30r83MV1GU\naG6LiJ8Y5cMiYm9EHI6Iw9/4xjdGeQtJHdfUhHr8ePHXxSBnzxbLGjZVpUSfmafKxyeAQ8A1fYec\nAq7oeb293Nb/Pndk5lxmzm3dunW0iCV1WlMT6pVXFiWkQS6+uFi7tqnWTPQRcXFEXLL8HJgHHuo7\n7C7g1ihcCzxtfV7SKJqaUPfsgU0r3NXctKnY31RVruhfAnwuIh4Evgj8fWZ+MiL2RcS+8pi7gRPA\nMeBPgHdMJFpJndfUhLp5c3EzeGYGtmyBiOJxZqbY3tQbsVCh6yYzTwBXD9h+oOd5AreNNzRJG9Fy\nQu3vutm0qf6EunMnPP54cR/hxInir4smtH2uZVztlZI0Nk1OqBddBLfcUncUwzHRS2qkNibUpnKu\nG0nqOBO9JHWcpRtJjdO0eW6aFs+wTPSSGqVp89w0LZ5RRNEZOX1zc3N5+PDhWj5bUjMtLsJll50/\ncdiymZnpTxzWtHgAIuJI/1Txa7FGL6kxmjbPTdPiGZWJXlJjNG2em6bFMypr9JIaY3mem+V53nsN\nmudm0jdJh42nqazRS2qMc+dg27ZqNfFBN0mXp0kY103SYeKZFmv0klqt6sRh01qcpM0TmfWydCOp\nUarMc1PlJum4pk9o8rw7VZnoJdVuUK19tUQ97ZukbZ93x0QvqVajDEjqyk3SabFGL6k2q9Xad+0a\nfBMUmrs4SVOZ6CXVZrVa+zPPwOWXF1f8/bpyk3RaLN1Iqs1qtXYokv38/OA2xi7cJJ0WE73UcU2e\neXG1Wvuy1bpo2n6TdFpM9FKHNX3mxT17inhW06apBprKGr3UUdMaVLQey7X2F7xg5WPsolk/E73U\nUW2ZeXHnTjh5cuVkbxfN+pnopY5q08yLL3oR3HOPXTSTUrlGHxEXAIeBU5l5Q9++XcDHgK+Vmw5m\n5nvHFaSk4bVtUJFdNJMzzM3YdwGPAC9cYf+9/V8Akuqz2o3OppZD7KKZjEqlm4jYDrwJ+OBkw5E0\nLg4q0rKqV/TvA34VuGSVY66LiKPAKeBXMvPh9QYnNVGT+9L7WQ4RVEj0EXED8ERmHilr8YM8AMxm\n5kJE7AY+Clw14L32AnsBZmdnRw5aqkvT+9IHsRyiNVeYiojfAd4CLAGbKWr0BzPzzav8zqPAXGY+\nudIxrjCltllchMsua9ZqQ9p4JrLCVGb+RmZuz8wdwM3Ap/uTfES8NCKifH5N+b5PDROI1HRt6UuX\n+o08BUJE7APIzAPATcDbI2IJWARuzroWo5UmpE196VKvoRJ9Zt4D3FM+P9CzfT+wf5yBSU3Ttr50\naZkjY6WKXOxCbWWilyqyL11t5TTF0hDsS1cbmeilIdmXrraxdCNJHWeil6SOM9FLUseZ6CWp40z0\nktRxJnpJ6jgTvSR1nIlekjrORC9JHefIWKkD2rS8oabPRC+1XBuXN9R0meilFltcLJJ87/KGy/Pl\nz8+7vKEK1uilFnN5Q1VhopdazOUNVYWJXmqx5eUNB3F5Qy0z0asRFhfhzjvh9tuLx3Pn6o6oHVze\nUFV4M1a1s2tkdMvLG/afv02bXN5QzzLRq1Z2jayfyxtqLSZ61apK14jL9q3N5Q21Gmv0qpVdI9Lk\nVU70EXFBRPxrRHx8wL6IiPdHxLGIOBoRrxlvmOoqu0akyRvmiv5dwCMr7LseuKr82Qt8YJ1xaYOw\na0SavEo1+ojYDrwJ+G3glwYcciPw4cxM4L6ImImIbZl5enyhatp6J8q64opi22OPjXfSLLtGpMmr\nejP2fcCvApessP9y4LGe1yfLbecl+ojYS3HFz+zs7FCBarr6Wx4zn923Zct42x83YteIs01qmtZM\n9BFxA/BEZh6JiF3r+bDMvAO4A2Bubi7XOFw1GdTy2GsS7Y8bqWvEcQOatio1+h8HfjoiHgU+Arw+\nIv6i75hTwBU9r7eX29RCq7U89nLSrOH1fokuLBR/KS0sFK/n5x0RrMlYM9Fn5m9k5vbM3AHcDHw6\nM9/cd9hdwK1l9821wNPW59trtZbHXrY/Ds/ZJlWHkQdMRcQ+gMw8ANwN7AaOAc8Abx1LdKrFcsvj\ncolmJbY/Ds9xA6rDUIk+M+8B7imfH+jZnsBt4wxM9dmzp6gZr8X2x+Gt9iXqF6cmxZGxeo7llseZ\nmaLDJuL8/Vu2FPtsfxye4wZUB+e60UD9LY/btxfbT57cGO2Pk+K4AdXBRK8VbaSWx2naiOMGVC8T\nvVQDv0Q1TdboJanjTPSS1HEmeknqOGv0G0SbJtFqU6xSG5joN4A2TaLVpliltojMeiaRnJuby8OH\nD9fy2RvJ4iJcdtngmShnZpq1+HabYpXqEhFHMnNumN+xRt9xbZpEq02xSm1iou+4Nk2i1aZYpTYx\n0XdcmxbfblOsUptYo2+hYbpSzp2DbdvWV/eeVhfMOGKVum6UGr1dNy0zbFfKeifRmmYXTJMm/LLF\nU13iFX2LrKcrZTlxDTOJVl1dMKPEOk6DvtyWv2xs8VTdvKLvuCpdKStNlDXKJFrr+bz1qHPCr0EL\no09iMXRpmrwZ2yLj6EpZXIQ774Tbby8eV1uMeiN2wdjiqS7yir5F1rsM3bD19o247N1G/HJT93lF\n3yLrWYautySxsACZxeOZM8X2QVf2bVn2bpi/UtZii6e6yETfIoPWcq26fusoJYn1fN603H9/ccP4\nbW+D97yneNy2rdg+irZ8uUnDsHTTMqMuQzdqSaLJy95N4sZpk1o8pXEx0bfQKF0p66m3N3XZu0l1\nBTX5y00axZqJPiI2A58Fvqs8/m8y8z19x+wCPgZ8rdx0MDPfO95QtR579hQ3Xgdpa0likjdOm/rl\nJo2iyhX9/wCvz8yFiLgQ+FxEfCIz7+s77t7MvGH8IWoculiS2IhdQdIo1kz0WQydXf6ndGH5U89w\nWq1L10oSXfwrRZqESjX6iLgAOAK8AvijzPzCgMOui4ijwCngVzLz4QHvsxfYCzA7Ozty0Bpdl0oS\nXfwrRZqEoea6iYgZ4BDwzsx8qGf7C4HvlOWd3cAfZuZVq72Xc920UxMn+6p7bhxpmkaZ62boSc0i\n4t3AM5n5+6sc8ygwl5lPrnSMib59nOxLqt9ElhKMiK3llTwRcRHwBuCrfce8NCKifH5N+b5PDROI\nmm2UkbWSmqHKyNhtwGfK+vv9wKcy8+MRsS8i9pXH3AQ8FBEPAu8Hbs665j/WRDjZl9ReVbpujgKv\nHrD9QM/z/cD+8YamJnGyL6m9nOtGlTjZl9ReJnpV4mRfUnuZ6FVJG2aylDSYk5qpsq6NrJU2ChO9\nhtKlkbXSRmHpRpI6ziv6ITRx+L8krcVEX9GwC2tLUlOY6CuYxJJ1kjQt1ugrcPi/pDYz0Vfg8H9J\nbWair8Dh/5LazERfgcP/JbWZib4Ch/9LajO7bipy+L+ktjLRD8Hh/5LayNKNJHWcV/Qd4hQNkgYx\n0TfIehK1UzRIWknUtYb33NxcHj58uJbPbqJBiXrTpmqJenERLrvs/Ckals3MOEWD1CURcSQz54b5\nHWv0DdA7l87CAmQWj2fOFNvPnVv9952iQdJqTPQNsN5E7RQNklZjom+A9SZqp2iQtJo1E31EbI6I\nL0bEgxHxcET81oBjIiLeHxHHIuJoRLxmMuF203oTtVM0SFpNlSv6/wFen5lXA68C3hgR1/Ydcz1w\nVfmzF/jAWKPsuPUmaqdokLSaNdsrs2jLKZfZ4MLyp79V50bgw+Wx90XETERsy8zTY422o5YT9Upd\nN1UStVM0SFpJpT76iLgAOAK8AvijzPxC3yGXA4/1vD5ZbjPRVzSORO0UDZIGqZToM/PbwKsiYgY4\nFBE/nJkPDfthEbGXorTD7OzssL/eeSZqSZMwVNdNZp4BPgO8sW/XKeCKntfby239v39HZs5l5tzW\nrVuHjVWSNIIqXTdbyyt5IuIi4A3AV/sOuwu4tey+uRZ42vq8JDVDldLNNuDPyzr984C/zsyPR8Q+\ngMw8ANwN7AaOAc8Ab51QvJKkIVXpujkKvHrA9gM9zxO4bbyhSZLGwZGxktRxJnpJ6jgTvSR1nAuP\n9HGVJkldY6Lv4SpNkrrIRF/qXfxj2UI5w8/8vKs0SWova/QlV2mS1FUm+pKrNEnqKhN9yVWaJHWV\nib7kKk2SuspEX3KVJkldZddND1dpktRFnU30ow58cvEPSV3TyUTvwCdJelbnEr0DnyTpfJ27GevA\nJ0k6X+cSvQOfJOl8nUv0DnySpPN1LtE78EmSzte5RO/AJ0k6X+e6bsCBT5LUq5OJHhz4JEnLOle6\nkSSdb81EHxFXRMRnIuIrEfFwRLxrwDG7IuLpiPhS+fPuyYQrSRpWldLNEvDLmflARFwCHImIT2Xm\nV/qOuzczbxh/iJKk9Vjzij4zT2fmA+XzbwGPAJdPOjBJ0ngMVaOPiB3Aq4EvDNh9XUQcjYhPRMQr\nxxCbJGkMKnfdRMQW4G+BX8zMb/btfgCYzcyFiNgNfBS4asB77AX2AszOzo4ctCSpukpX9BFxIUWS\n/8vMfM60YJn5zcxcKJ/fDVwYEZcOOO6OzJzLzLmtW7euM3RJUhVVum4C+FPgkcz8gxWOeWl5HBFx\nTfm+T40zUEnSaKqUbn4ceAvw5Yj4UrntN4FZgMw8ANwEvD0iloBF4ObMzAnEK0ka0pqJPjM/B8Qa\nx+wH9o8rKEnS+DgyVpI6rlVz3Yy64LckbWStSfQu+C1Jo2lFonfBb0kaXStq9C74LUmja0Wid8Fv\nSRpdKxK9C35L0uhakehd8FuSRteKRO+C35I0ulZ03YALfkvSqFqT6MEFvyVpFK0o3UiSRmeil6SO\nM9FLUseZ6CWp46Ku9UEi4hvA12v58Mm7FHiy7iAayPPyXJ6TwTwvz7V8Tr43M4dai7W2RN9lEXE4\nM+fqjqNpPC/P5TkZzPPyXOs5J5ZuJKnjTPSS1HEm+sm4o+4AGsrz8lyek8E8L8818jmxRi9JHecV\nvSR1nIl+HSLijRHxbxFxLCJ+fcD+n42IoxHx5Yj4fERcXUec07TWOek5bmdELEXETdOMry5VzktE\n7IqIL0XEwxHxz9OOcdoq/Pv57oj4u4h4sDwnb60jzmmKiA9FxBMR8dAK+yMi3l+es6MR8ZpKb5yZ\n/ozwA1wAHAdeDjwfeBD4ob5jrgNeVD6/HvhC3XHXfU56jvs0cDdwU91xN+G8ADPAV4DZ8vWL6467\nAefkN4HfK59vBf4LeH7dsU/4vPwE8BrgoRX27wY+AQRwbdWc4hX96K4BjmXmicz8X+AjwI29B2Tm\n5zPzv8uX9wHbpxzjtK15TkrvBP4WeGKawdWoynm5BTiYmf8BkJldPzdVzkkCl0REAFsoEv0Kq0d3\nQ2Z+luK/cyU3Ah/Own3ATERsW+t9TfSjuxx4rOf1yXLbSn6e4pu4y9Y8JxFxOfAzwAemGFfdqvy/\n8n3AiyLinog4EhG3Ti26elQ5J/uBHwQeB74MvCszvzOd8Bpr2LwDtGw++raKiNdRJPrX1h1LA7wP\n+LXM/E5xoabSJuBHgZ8ELgL+JSLuy8x/rzesWv0U8CXg9cCVwKci4t7M/Ga9YbWPiX50p4Arel5v\nL7edJyJ+BPggcH1mPjWl2OpS5ZzMAR8pk/ylwO6IWMrMj04nxFpUOS8ngacy8yxwNiI+C1wNdDXR\nVzknbwV+N4vi9LGI+BrwA8AXpxNiI1XKO/0s3YzufuCqiHhZRDwfuBm4q/eAiJgFDgJv2SBXZmue\nk8x8WWbuyMwdwN8A7+h4kocK5wX4GPDaiNgUES8Afgx4ZMpxTlOVc/IfFH/hEBEvAb4fODHVKJvn\nLuDWsvvmWuDpzDy91i95RT+izFyKiF8A/oGig+BDmflwROwr9x8A3g18D/DH5RXsUnZ4oqaK52TD\nqXJeMvORiPgkcBT4DvDBzBzYYtcFFf9fuR34s4j4MkWXya9lZqdntIyIvwJ2AZdGxEngPcCF8P/n\n5G6KzptjwDMUf/Ws/b5ly44kqaMs3UhSx5noJanjTPSS1HEmeknqOBO9JHWciV6SOs5EL0kdZ6KX\npI77P/o8W9CbuMwEAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f3c7ec4bcd0>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "nb_points = 30\n",
-    "\n",
-    "# generate training data\n",
-    "train_x = np.asarray(np.random.uniform(0., 1., nb_points), np.float32)\n",
-    "train_y = np.asarray(f(train_x) + np.random.rand(30), np.float32)\n",
-    "plt.plot(train_x, train_y, 'bo', ms=7)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Training via SGD\n",
-    "\n",
-    "Assuming that we know the training data points are sampled from a line, but we don't know the line slope and intercept. The training is then to learn the slop (k) and intercept (b) by minimizing the error, i.e. ||kx+b-y||^2. \n",
-    "1. we set the initial values of k and b (could be any values).\n",
-    "2. we iteratively update k and b by moving them in the direction of reducing the prediction error, i.e. in the gradient direction. For every iteration, we plot the learned line."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "def plot(idx, x, y):\n",
-    "    global gx, gy, axes\n",
-    "    # print the ground truth line\n",
-    "    axes[idx/5, idx%5].plot(gx, gy, label='y=f(x)')     \n",
-    "    # print the learned line\n",
-    "    axes[idx/5, idx%5].plot(x, y, label='y=kx+b')\n",
-    "    axes[idx/5, idx%5].legend(loc='best')\n",
-    "\n",
-    "# set hyper-parameters\n",
-    "max_iter = 15\n",
-    "alpha = 0.05\n",
-    "\n",
-    "# init parameters\n",
-    "k, b = 2.,0."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "SINGA tensor module supports basic linear algebra operations, like `+ - * /`, and advanced functions including axpy, gemm, gemv, and random function (e.g., Gaussian and Uniform).\n",
-    "\n",
-    "SINGA Tensor instances could be created via **tensor.Tensor()** by specifying the shape, and optionally the device and data type. Note that every Tensor instance should be initialized (e.g., via **set_value()** or random functions) before reading data from it. You can also create Tensor instances from numpy arrays,\n",
-    "\n",
-    "* numpy array could be converted into SINGA tensor via **tensor.from_numpy(np_ary)** \n",
-    "* SINGA tensor could be converted into numpy array via **tensor.to_numpy()**; Note that the tensor should be on the host device. tensor instances could be transferred from other devices to host device via **to_host()**\n",
-    "\n",
-    "Users cannot read a single cell of the Tensor instance. To read a single cell, users need to convert the Tesnor into a numpy array.\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "loss at iter 0 = 8.897375\n",
-      "loss at iter 1 = 7.806258\n",
-      "loss at iter 2 = 6.850243\n",
-      "loss at iter 3 = 6.012600\n",
-      "loss at iter 4 = 5.278670\n",
-      "loss at iter 5 = 4.635614\n",
-      "loss at iter 6 = 4.072176\n",
-      "loss at iter 7 = 3.578499\n",
-      "loss at iter 8 = 3.145944\n",
-      "loss at iter 9 = 2.766943\n",
-      "loss at iter 10 = 2.434863\n",
-      "loss at iter 11 = 2.143896\n",
-      "loss at iter 12 = 1.888950\n",
-      "loss at iter 13 = 1.665564\n",
-      "loss at iter 14 = 1.469831\n"
-     ]
-    },
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAArwAAAHVCAYAAAATqShMAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XlcVNX7wPHPFUEWFwTFDRF3QwXctyy1PS3TzFyq7zez\nrLTE8mv1s6zMNsv2faVUxL3S1DYttUVTFnEXFRVBdpCdYeb8/kBLZIABZud5v168ksPM3OfOPN15\n7j3nnqMppRBCCCGEEMJZNbB1AEIIIYQQQliSFLxCCCGEEMKpScErhBBCCCGcmhS8QgghhBDCqUnB\nK4QQQgghnJoUvEIIIYQQwqlJwSuEEEIIIZyaFLxCCCGEEMKpScErhBBCCCGcWkNLvGiLFi1UYGCg\nJV5aWNnevXvTlVItLfHakifORXJFmELyRJhKckWYwtQ8sUjBGxgYyJ49eyzx0sLKNE07ZanXljxx\nLpIrwhSSJ8JUkivCFKbmiQxpEEIIIYQQTs2kK7yapiUAuYAeKFVK9bdkUMJxSa4IU0ieCFNJrghT\nSJ6I6tRkSMNIpVS6xSIRNpWRV0xj94Y0auhijpeTXHFi53KKaN3M3RwvJXnixHIKdLg21PB0M8vI\nOckVJybHFGGKvOJSDErR1N21Vs+3yBhe4Vh+OpjCU+v2MbF/e+bd2MOmseh0OhITEykqKrJpHI7C\n3d0df39/XF1rdwCoifziUp7fcIDNcefYMucq2nl7WHybVZFcMZ018wRg76lMHl0Rw1XdWvLy+N5W\n2WZlJE9qxpq5otMbeP3HI4T/nsC3s4bRo3VTi2+zyngkV0xm7WNKfGouM5bupYtfYz6+u3YX700t\neBXws6ZpeuBjpdQnlz9A07QHgAcAAgICahWMsK7cIh0vbDzIqj2JXNGmKbeGtjXHy1aZK9XlSWJi\nIk2aNCEwMBBN08wRj9NSSpGRkUFiYiIdO3a06LZiz2QzOzKaU5kFPDyiM35NGtX1Jet8TJFcMY01\n80RvUHz023He+Oko7bw9uHNAe3O8rBxTrMSauZKcU8gjEdHsOZXF1EEBBPp61fUl5ZhiJdbME4Dv\n9yUzb00sHm4u/GdoYK1fx9SC90ql1FlN0/yAnzRNO6yU2n7pAy4k1ycA/fv3V7WOSFjFXycymLs6\nlqTsQh4e0Zmwa7vh1tAs9zBWmSvV5UlRUZEcbEykaRq+vr6kpaVZbBsXC5g3fzqKX5NGRN4/mEGd\nfM3x0nU+pkiumMYaeQKQmlvEYytj2RmfzpjgNrw0vnetux4vI8cUK7FWrmw/mkbYyhiKdHrenhTK\n2NB25nhZOaZYibXyRKc38Ormw3y28yR9A7x5f2pf2jSrfc+iSQWvUurshf+mapq2HhgIbK/6WcIe\nFen0LPnxCJ/tPEkHH09WPziUfh2am+31zZErcrAxnSXfq7PZhcyJjGF3Qiajg9vw0m29aeZpnu4r\ncx1TJFdMY+n36bejaTy+Koa84lJeGd+bOwe0N9s25ZhiXZZ8r/QGxVs/H+W9bfF082vC+1P70sWv\nsVleW44p1mXp9yk1t4hZEdHsPpnJf4Z0YP7ooDpflKu24NU0zQtooJTKvfDv64GFddqqsIn9Z3N4\nbFUMR1PymDoogPmjrzDXDSWA5Ioz+S42ifnr4zAYFEvuCGF833ZmO8BJnjiPi2MwP/7tBN1bNWHF\n/YPp2qqJ2V5fcsV5pOYW8eiKaP46kcnE/v48f2svPNzMcpO05ImT+Tshk5nLozhfpOOtO0O5rY9Z\negBMmoe3FbBT07RYYDfwvVJqi1m2LqyiVG/g3V+Ocdv7v5NdoCP83gG8OK63WYvdC+ptruzYsYOe\nPXsSGhpKYWEhycnJjBkzpsrnbNy4kQULFlgpQtPkFul4bGUMj66IpqtfYzbPvorb+/mb+2y+3uYJ\nOE+unM4oYMJHf/LxbyeYMiiAb2cNM2uxe0G9zRVnyROAP46nc/PbO4k5k81rE4JZPCHEbMXuBfU2\nT8B5ckUpxRc7TzL5k7/wdHPhm5nDzFbs/rMBc//069dPCftwPDVXjX1vp+rwxEY1KyJKZeUX1+j5\nwB5lgRxRleTJwYMHa76TdmDGjBlq6dKl//w+d+5c9c0331T5HIPBoEJDQ1V+fn6dtm2u92xPQoa6\n8tVfVMcnN6o3fjyidKX6Gj1fcsU0tsoVc75fG2LPql4Ltqhez25R3+9LqtFzJU9M4wzHFL3eoN7+\n+ajq+ORGNer1bepw8vkaPV9yxTTOcEzJK9KpWRFRqsMTG9X0r/5WOYUlJj/X1DyRacmclMGgWLbr\nFC9tOkSjhi68O7kPt4SYZRYGq3l+wwEOJp0362sGtW3Ks7f0rPTvCxYswMfHh7CwMADmz5+Pn58f\ns2fPrvQ5n332GatWreKHH35g8+bNLF++nLVr17Jo0SIA3nzzTeLi4vjiiy+Ii4tj8uTJ7N69G09P\nT0aMGMHGjRuZOHGiWfezJkr1Bt7dGs+7W4/R1tuD1Q8OoV8HH5vFUxuSK9ZRWKJn4cYDrNh9hj4B\n3rwzqQ/tfTxtFk9NSZ5YT0ZeMWErY9hxLJ2xoW15aVxvvBo5TskhuWI9x9PyeHDpXo6n5fG/G7rz\n0NWdadDA/GOEHSf7hMmScwqZt2YfO46lc1W3lrw2IZhWTc0yqbfTmzZtGuPHjycsLAyDwUBkZCRb\nt24lNDTU6OMjIiKYPn06O3fuZMyYMUyYMIGTJ0/SvHlzGjUqm7pr9uzZjBgxgvXr1/Piiy/y8ccf\n4+lZViT079+fHTt22OyAczqjgLCV0USdzmZcn3YsHNuTJua5s97p1bdcOXzuPI9ERBOflsdDIzrz\n2HXdcHWR1emrU9/yBMrGYD4SEU1mQQkvjevN5IHmu4nRmdXHXNmyP5m5q/fh1rABX08bxJVdW1hs\nW1LwOhGlFN/GJLHg2/3o9IoXbuvFXYMCHPZAU9WZsKUEBgbi6+tLdHQ0KSkp9OnThw4dOhATE2Py\nayQnJ9OyZct/fm/QoAHh4eEEBwczY8YMhg0b9s/f/Pz8SEpKMus+mEIpxbqosyz4dj8NGmjmnBrI\nJiRXLEcpRcTu0yzccJAm7q58PW0gw7u2rP6JdkjyxLIMBsWnO06w+Icj+Df3YN1DQ+nVrplNYqkr\nyRXLKtUbeO2HI3y8/QQh7b35cGpf2lp4MSMpeJ1EZn4Jz3yzn+/jkukb4M0bE0MJbFHnibzrpenT\npxMeHs65c+eYNm0aubm5DB8+3OhjIyIiCAoKKtfm4eFRYaWeY8eO0bhx4woHl6KiIjw8rLtiWU6B\njvnfxLFxXzIDA314484Q/Js7Tre0PXH6XCnU8dS6fWyKO8fwri14Y2IoLeu+6Ei94+x5ApBdUMLc\n1bH8fCiVm3q15tUJweaah7leqQ+5kp5XzCMR0fx5IoO7BgfwzJggGjU0602MRknB6wS2HU5l3tp9\nZBeU8L8buvPg1Z1xscD4l/pi3LhxLFiwAJ1OR0REBC4uLjU6w+7WrRsJCQn//J6Tk8Ojjz7K9u3b\nmTVrFmvWrGHChAkAHD16lF69epl7Fyq160QGc1bGkJpbLLliBs6cK1Gns3gkIpqU80U8cWMPZlzV\nySLj6uoDZ84TgJgz2cxcHkVqbhHP3RLEf4bK4g215ey5svdUFg8v30t2gY4ld4Rwez9/q21bCl4H\nll9cyqLvD7Fi92l6tG7CV/cOJKitbdcidwZubm6MHDkSb29vXFxqftbp5eVF586diY+Pp0uXLsyZ\nM4eZM2fSrVs3Pv/8c0aOHMlVV12Fn58f27Zt4+WXX7bAXpSn0xt46+ejfPDrcQJ9vVjz0FBC23tb\nfLvOzhlzxWBQfLz9BK//eIQ2zdxZ9eAQ+gaYb3Ga+sgZ8wTKhrt8+XsCL28+hF8Td9Y8OJQQOa7U\niTPnytK/TvHCxoO0aebBuocH0LOtlYe7mDKVQ01/ZFoyy9t9MkMNf3WrCnxyo3rp+4OqSFdqke1Q\nD6eF0ev1KiQkRB09erTWr7Fu3To1f/78Kh9z7tw5NWrUqFpv46Lq3rPjqbnqlnd3qA5PbFTzVseq\nvCJdnbdpjORK7VgrV0x5v1LPF6m7PvtLdXhio3po2R6VXWD61ECmkjypHXs6piilVE5hiXpw6R7V\n4YmN6r7w3So7X3LFHBwpV0x9v/KLdWr2irIpx+790vy5YmqeyBVeB1NcqueNn47yyfYT+Df3YOUD\nQxjY0bGmkLJnBw8eZMyYMYwbN46uXbvW+nXGjRtHRkZGlY85ffo0S5YsqfU2qqOUYuXfZ3h+w0Hc\nGjbgw6l9ual3G4ttr75xplwB2HEsjTkrY8kt0smd9WbkbHkCZat2zoyIIjGrkP+7uQf3D+8kuWIG\nzpgrJ9PzeWjZXo6k5PL4dd2YObKLzYZGScHrQA4mneexVTEcPpfL5IHtmT86iMYONK+hIwgKCuLE\niRNmea3p06dX+fcBAwaYZTvGZOWX8NS6OLYcOMfQzr4smRhCm2bWvznBmTlLruj0Bt746Sgf/nqc\nrn6NWT59EN1bm33FtHrLWfIE/p2x4/kNB/HxdGPlA4PpHygXXMzFmXIF4McD53h8VSwuLhpf3TuQ\nq7rZdnYXqZYcgN6g+Hj7cd786Sjenm588d/+jOrRytZhCTv1e3w6j62KITO/hKduKrv6IjcbCWPO\nZBbwaGQ00aezmTwwgAVjgsy95KtwEvnFpfzf+ji+jUni6m4tefPOUHy83GwdlrBDpXoDSy6cRAf7\nN+ODqX3tYiYgKXjtXEJ6Po+vjmXvqSxG927Dott60VwOMsKI4lI9S34sG+7SuaUXn/9ngMPOgSks\nb1NcMk+s3QcK3pvShzHBjrUSo7Cew+fO8/DyKBLS85l7fTceHmG7bmlh3zLyink0Mprf4zOYPDCA\nZ28Jwt3VPk6ipeC1U0oplu86zYvfH8LVpWxhgFtD2so4KWFUfGouj66I4WDyeaYOCuDp0XKlThhX\npNOzcONBInadJrS9N+9OdqzlgYV1rdpzhgXf7qeJuyvLpg9iaGfLrYQlHFv06SweXh5FRn4JiycE\nM7F/e1uHVI4UvHYo5XwR89bs47ejaQzv2oLFE4Jl/KUwSilFXnEpd72zE69GDfn0nv5cFyTDXYRx\nR1NyeSQimiMpucy4uhNzr+8uywMLowpL9Dzz7X7W7E1kaGdf3poUil8TWaJeVKSUYtmu0yzccIBW\nTd3tdoU9OdLZmQ2xSVz/5nZ2nczg+Vt78tW9A6XYdQCBgYGkp6db9fV0egOnMgrILtAxsKMPW2YP\nl2LXAdgiV5RS5BeXcut7O8nIL+araQN56qYrpNi1Y7bIk4t0egNj39/J2qhEHh3VhaX3DZJi147Z\nMlcKS/Q8vjqWZ77Zz5VdWrDxkSvtstgFucJrN7ILSnjm2wNsiE0itL03b0wMoVPLxrYOS1hYeHg4\nCQkJPPfcczV6Xm6RjjOZheiVwtvDla/u7SNj6pxcbXNFbzBwNquQrAId/TuULSUtxYvzqm2eXJRV\nUEJabjHpeSV2cWe9sJy65kqp3sC4D37nSEouc67txiOj7Htst8kFr6ZpLsAe4KxSaozlQqp/fjua\nxrw1sWTklfD4dd14aERnGjrolRez5snmJ+FcnFni+kfr3nDTK5X+ecGCBfj4+BAWFgbA/Pnz8fPz\nY/bs2Sa9fGFhIePHj2f8+PGEhoZy3333sXv3bvR6PQMHDmTlypUmL+W4ePFiNm/ejIeHBxEREXTp\n0gWDQXHufBHpecW4u7rQsbkXCecb2vVBxiokVyrkCkBBSSmnMwvQlSqaeTTk62n1/MRI8sRonkDZ\nCntJOYVk5pfg6tKATY8Op3WzenxiJLlSaa4AnC/UkZpbTHJOEV/8dwAju/uZ9Lq2VJMrvLOBQ4Cs\nXWsmBSWlvLTpEMv+Ok1Xv8bOcle9Q+fJtGnTGD9+PGFhYRgMBiIjI9m6dSuhoaFGHx8REUFQUBAA\neXl5TJo0iXvuuYd77rkHgFtvvZWnn36awsJC7rrrrhqtW96sWTPi4uL4+uuvCQsLY/X6bzmTWUCR\nTk+Lxo1o3dTdoYsXRz+Jtudc2bBhA+l5xZzLKcbVRaNTSy9O57o6dL44KnvOk40bNwJQrNNzOrOA\nQp2elk0a0bCxW/0udm3EEXJFKUXK+WJSc4to2EBj4yNXOs5Nr6Ysxwb4A78Ao4CN1T1elhau3p6E\nTHX14rKlgV/YcEAVllhmaeC6ogZLO5ojT+xhacdrr71WRUVFqc2bN6vbb7/dpOd06NBBBQcHq2XL\nlpVrLy4uVsHBwWrgwIGqtLTsM05PT1chISEqJCREtW/fXrVq1eqf3/ft2/fP6x0/fvyf12je3Eft\nS8xWB87mqPOF5ZdltIf3TKmaLwMKPAZESK6UMUeulJSUKB8fH3UiLU/FnslSCel5SleqV0rZx/ul\nVK3yxAWIljwpY848UUqp7PxitT8xW+0/m61yLiwlbQ/vmVL1c2lhe84VXaleHU/NVbFnstTpjHx1\n4MABc+12nZiaJ6Ze4X0LmAdUuvyOpmkPAA8ABAQE1KDkrl9KSg28/UvZhMxtmnmw4v7BDO7ka+uw\nzKXaPHEE06dPJzw8nHPnzjFt2jRyc3MZPny40cdeeoY9bNgwtmzZwpQpU/6ZPi4jI4O8vDx0Oh1F\nRUV4eXnh6+tLTEwMUPUYKk3T0OkNJGQUoIAmjRri39zDYYe7XErTNH9gNPAiZYWvQ7KnXAHIK9Jh\nUGWLBLTz9sDHy80ZpjJ06F4jsL88ufjvpOxC0vOK8XRrSICPB24NHXsqQ0fvNQL7zZWCklJOZxSg\nMyj8m3vg49WIQymOdWyp9ptT07QxQKpSam9Vj1NKfaKU6q+U6t+ypQxyN+bwufOMff933t92nNv7\n+rMlbLjTFLum5ommaQ9omrZH07Q9aWlpVoquZsaNG8eWLVv4+++/ueGGG2jSpAkxMTFGfy4ebAAW\nLlxI8+bNmTlz5j9tM2bM4IUXXmDq1Kk88cQTNYrjq2URHEvJY+3qVQwcNJgOvp5OUexecPHkyFDZ\nAyRXTBcZGcm5nEI+/HIpof0H0tmvMb6NGzl8sXvJidFnto6lLuwlT1auXAnA8ogVBPcdQHpeMS0a\nN6JTSy+HL3YvuHhy5LDsLVciIyPpN2AQx9PyAejc0gsfr0Zm2FPrM+UK7zDgVk3Tbgbcgaaapi1T\nSt1l2dCch96g+GzHCZb8eJSmHk47V6pJeaKU+gT4BKB///7K+mFWz83NjZEjR+Lt7Y2LS82+BN5+\n+22mTZvGvHnz6NWrF66urkyZMgW9Xs/QoUPZunUro0aNqvI1DAaF3qA4lZzK+GuH4uXpwcrIFQ5f\nvFx06cmRpmkjKnuc5Er1uXLRyaRUrhrcHw93d1atjMTDTlY2MgOn6F20lzzJysqiV+/e4OLKa+9/\nTgdfT5p5OMfKnc7Sa2RPuRIcHIzm4sqL73yKl5sLAT4OftHFlHEPF3+AEcgY3ho5nZGv7vjwD9Xh\niY3qga//Vum5RbYOqUaoxRiquuSJPYyh0uv1KiQkRB09etTq2y4o1qnDyedV7JkslZRVoPQGQ7XP\nsYf3TCnTcwV4GUgEEoBzQAGwrKrnSK4Yl11QrPafzVb7E7NVVn5xlY+1h/dLqRrlyRjgAyXHFPPE\nYDCopOwCFXsmSx09d14V6Sq/b8Qe3jOlavb9A6wB+lWVK5SdGO0B9gQEBFTYnj3stz3kilJKFetK\n1dFzZd9FydmFymDku8ge3i+lTM8TBy7V7ZtSisjdp7nxre0cSj7PkjtC+Oiufvg2dsyugPri4MGD\ndOnShWuuuYauXbtabbtKKdJyi4hPy8egFB1beNHG24MGTnJV91JKqaeUUv5KqUBgErBVOWCPka1y\nBcp6Ac5mFXAqowC3hg3o0qox3p7OcaXuEhd7jRKASGCUpmnLbBtSzdkyTy4qKTVwMi2ftNxifL3c\n6NyyMY2cYwgD4DxDL+0hVwDOF+k4lppHid5AoK8XrZu5O0UPY40WnlBK/Qr8apFInEhqbhFPro1j\n6+FUhnTy5fWJIbTzrj+rpTlyngQFBXHixAmrblNXauBMVgF5xaU083Clnbdz3Jjm7GyRKwBFF6aQ\n+md6umbuTntiBDwFcGHoy1xHPDGyVZ5cVLZITQEGBQE+ns54YgROMvTS1rmilCIlt5jU80W4u7rQ\nwdfTqU6MZKU1M9sUl8z89XEUlOhZMCaI/w4NlLkva0gp5RRnk6bIKSghMbsQpcC/uQfNPWt2V31Z\nb47jquvJUX3KFaUUWQU6krILaaBpBLbwoqm7q8nPrc/qU55cpNS/86W6u5aNv3Q3YWy3I+aKOU+O\n6mOuQNmqaWeyCskt0tHc04123h5V1i6OmCdS8JpJToGOZ7/bzzcxSQT7N+ONiaF08ZOlgWvK3d2d\njIwMfH19nfqgozcokrMLySwowdPNhfbNPWlUwxuNlFJkZGTg7l4/J4ivL7kCF5cHLiK7sITGjRrS\n3scTVxN7AZwhT+pyYlSf8uQind7A6cwC8otL8fF0o201xctFzpArdVEfcwWgsKSUUxemHDNlOkNH\nzRMpeM1g57F0/rcmltTcYmZf05VZo7qY/GUkyvP39ycxMRF7nYbKHEpKDWQVlFCqVzR2b0hD94ac\nyKjdwdXd3R1/f38zR+gY6kOuQFm+ZOaXoDcomno0pGEjV+LTa/YakifOnycXFev0ZBboUErh7elK\nbm5DjqSY/nxHz5W6nBzVt1yBsjm7swt1uGgaPl5upJ5vQKoJz3PEPJGCtw4KS/S8svkQX/15is4t\nvVj30FBC2nvbOiyH5urqSseOHW0dhkXoDYqPfjvOmz8dxa9JI968M5RBTjIPsy04c65A2Y1pX/x+\nkle3HKZl40a8M7kP/QN9bB2Ww3H2PLlIb1C8vy2eN38+QeeWjflgal+6tXLoNYCsrr7kCpTdC/D8\nhgOs2H2GK7u04J3JffDxcsrx3f+QgreWok9n8fiqWE6k53PvsECeuLGHSeOjRP2UmFXAYytj2Z2Q\nyZjgNrw4rjfNPEwbfynqn4y8YuaujmXbkTRu6NmKV28PdtabjYQZpOcVM2dlDDuOpTOuTzsW3dYL\nr0by9S6MO5NZwMPLo4g7m8PMkZ157LruuNSDe43k/4ga0ukNvPPLMd7fFk/rpu5E3D+IoZ1b2Dos\nYce+i01i/vo4lIIld4Qwvm+7ejU+TNTMH/HphK2MIbtQx8KxPbl7cAfJF1Gp3SczeWRFFFkFOl4e\n35tJA9pLvohK/XY0jdmR0ej1ylkXwaqUFLw1cCwllzmrYth/9jzj+7bjuVt7mnyXtKh/cot0PPvt\nAdZFn6VvgDdv3dmHAF9PW4cl7FSp3sDbvxzjvW3xdGzhRfi9Awlq29TWYQk7ZTAoPt5+gtd/PEL7\n5h58+bDki6icwaB4b1s8b/58lO6tmvDRXf0IbOFl67CsSgpeE1wcS7f4hyM0btSQj+7qx429Wts6\nLGHH9p7KJGxlDGezCpl9TVceGdVF5tYVlUrKLmR2ZDR/J2RxRz9/nh/bE083OTwL47LyS3h8dSxb\nD6cyuncbXrm9N03k4ouoRE6BjjmrYth6OJVxfdrx0rjeeLjVvyGYckStxpnMAuaujmXXyUyuvcKP\nl8cH07KJrJYmjCvVG3h3azzvbj1Gu+YerH5wCP06yI1GonI/HDjHvDX7yq7wTgplbGg7W4ck7Fj0\n6SxmRUSTmlskQ15EtQ4k5fDQsiiScwrrfb5IwVsJpRSr9yaycMNBABZPCOaOfv71NlFE9U5nFBC2\nMpqo09mM79OO58f2lKsuolJFOj0vbTrE13+eone7Zrw7uU+962IUplNK8cXvCby86RCtm7mz5kGZ\nFUhUbc3eROavj6O5pxsrZwyhb0BzW4dkU1LwGpGWW8xT6+L4+VAKgzr68PodIbT3kbGXwjilFOui\nzvLsdwfQNHhnch9uDWlr67CEHYtPzeORFdEcSj7PfVd2ZN6N3Z1qCU9hXjmFOuatieWHAylcF9SK\n1yeE0MxTTqaFccWlep7fcJCIXacZ0smXd6f0oUVj6ZmWgvcyW/afY/76OHKLS3l69BVMG9ZRlgYW\nlcop1PH0N/vZEJvEwEAf3rgzBP/mcnIkjFNKsWZvIgu+PYC7awO++G9/RvWoP3dJi5qLS8zh4Yi9\nJGcXMf/mK5g+vKP0NIpKnc0u5OFle4lNzOHBqzsz9/pucv/IBVLwXnC+SMdz3x1gXdRZerZtyoo7\nQ2XSblGlXScyeGxVLCnni5h7fTceGtGlXsxlKGonr7iUp9fH8U1MEkM6+fLmnaG0buZYS3MK61FK\nseyvU7yw8RC+jd1YOWOw3A8gqrTzWDqPrIhCp1dyc70RUvACfxxPZ+6qWFJyi3lkVBceGdUVt4Zy\nRiSM0+kNvPXzUT749TgdfDxZ89BQQmUsnajC/rM5zIqI4nRmAXOu7casUXJyJCqXV1zKU+vi2BCb\nxIjuLXljYqjTr4Ilas9gUHz423GW/HiELn6N+eiufnRq2djWYdmdel3wFun0vLrlMF/+nkCnFl6s\neXAIfer5oG5RtZPp+YRFRhObmMPE/v48e0tPWdFIVOrijUavbD5Ei8aNiHxgCAM7ylU6UblDyeeZ\nuTyKhIx8/ndDdx66urMMqxOVyinU8fiqWH4+lMKtIW155fbeMqVhJap9VzRNcwe2A40uPH6NUupZ\nSwdmafsSs5mzMobjafn8Z0gHnrzpino5L50wjVKKVXvO8Nx3B3Fr2IAPp/blpt5tbB2WsGOZ+SX8\nb3UsvxxO5bqgViy+PZjmcpVOVEIpxeo9iTzz7X6aebgScf9gBnfytXVYwo4dSj7PQ8v2kphVyIIx\nQdw7LFDGd1fBlNOAYmCUUipP0zRXYKemaZuVUn9ZODaL0OkNvL8tnne3xtOycSOW3jeQ4V1b2jos\nh+esJ0ZQNsn7U+vi2HLgHEM7+7JkYghtmnnYOixhx/48nkHYymiy8nU8d0sQ/xkqX0SicgUlpTzz\nzQHWRiUyrIsvb93ZR+Z7F1VaH53IU+viaOruSuQDg+kfKD1H1am24FVKKSDvwq+uF36UJYOylPjU\nPB5fFUPmOrCoAAAgAElEQVRsYg63hbbl+Vt7ydQu5uNUJ0YX/R6fzmOrYsjML+H/bu7B9Cs7Sfei\nqNSlC48E+nrx+X8G0KtdM1uHJexYfGouDy2LIj4tj9nXdOXRa7rK+G5RqZJSA4u+P8jXf55iYEcf\n3pvSB78mcvOrKUwa6KFpmguwF+gCvK+U2mXkMQ8ADwAEBASYM8Y6MxgUX/2ZwCubD+Pp5sL7U/oy\nOli6o83JmU6MoGwewyU/HuWT7Sfo3FIKF3Ny1t6A5JxCZkfGsPtkJrf39WfhWBnfLar2TfRZnloX\nh6ebC0unDeLKri1sHZKwY8k5hTy8PIro09ncP7wjT9zYQ6YcqwGTjsZKKT0QqmmaN7Be07ReSqn9\nlz3mE+ATgP79+9tNoXM2u5D/rY7lj+MZjOrhxyvje+PXVM6GLMHRT4wuik/N5dEVMRxMPs/UQQE8\nPTpIxnebl9P1Bvx8MIW5a2LRlRp4884QxvXxt3VITsFZT46KdHqe33CAFbvPMLCjD+9O7kMr+V4S\nVfgjPp1HVkRTpNPzwdS+3Cz3kNRYjS4/KKWyNU3bBtwI7K/u8baklGJ99Fme/fYABqV4ZXxv7hzQ\nXsbRWZAjnxjBhXkvd51m0caDeDVqyGf39OfaIFkUwNycqTeguFTPy5sOE/5HAr3aNeXdyX3pKMsD\nm5PTnRydTM/n4eVRZTccjejM49fJwgCickopPt5+gsVbDtOpZdmUY138ZMqx2jBlloaWgO5CsesB\nXAe8avHI6iAjr5j56/ez5cA5BgQ2Z8kdoQT4yupX1uJIJ0YXpecV8+Taffx8KJWrurXk9QnB0hNg\nQc7QG3AirWx54ANJ57l3WCBP3tRDlgc2M2c6OQLYuC+JJ9fG0dBF48v/DmBkDz9bh+QUnLUn4HyR\njv+tLltSenRwGxbfHizDpOrAlHeuDfDVhS+oBsAqpdRGy4ZVez8dTOGpdfs4X1jKUzf1YPrwTnID\ngBU44onRRb8eSWXu6n2cL9KxYEwQ/x0aKDemWZij9was3Vs2fVSjhg2kJ8DCTDk5snfFpXpe+v4Q\nX/15ij4B3rw3pS/tvGWmFzNyup6Aoym5PLh0L6cyC3h69BXcd6UsKV1XpszSsA/oY4VY6iS3SMcL\nGw+yak8iV7RpyrLpIfRo3dTWYdUnDnViBGXj6F7ZXNYd3b1VE5beN5Ar2kjOWJOj9QbkF5fyzDf7\nWRd9loEdfXh7UqhMUWdh1Z0c2XtPwJnMAmZGRLEvMYfpV3Zk3o09ZCVPM3O2noBvY87y5No4vBo1\nJGL6IAbJfMxm4RTXxnedyODx1bEkZRfy8IjOhF3bTQ4oVuYoJ0YXHT53ntkrYjiSkst/h5Z1R7u7\nSne0NThqb8D+szk8siKaUxn5Mn2UDVR2cmTPPQE/HjjH3NWxKODju/txQ8/Wtg7JaTnDMKmSUgMv\nbTpE+B8JDAhszvtT+srQOjNy6IK3SKdnyY9H+GznSQJ8PFn94BD6dZDJl0XlDAZF+B8JvLLlME3d\nXfny3gGM7C7j6KzMoXoDlCrLmZc3HcbHy01WwLIiRz050ukNLN5ymE93nKR3u2Z8MLUv7X3kPhJL\ncvRhUinni3h4eRR7T2UxbVhHnrq5B65yM6NZOWzBu/9sDo+tiuFoSh5TBwUwf/QVsn60qFJqbhFz\nV+9j+9E0runhx6sTgmnRWFYzsjZH6g3Iyi/hf2v28fOhFK69wo/FE0LwkeWBrcmhTo4AkrILmRUR\nRdTpbO4e3IGnx1whNzNakaMNkwL460QGsyKiKSgp5d3JfbglpK2tQ3JKDlchluoNfPjrcd7+5Rg+\nXm6E3zuAEXKFTlTj54MpzFu7j/ziUl4Y25O7BneQGwBElXadyGB2ZNkqe7JOvW040skRlN0AO2dl\nDCWlBilcrMhRewKUUny24ySvbDlMBx9PIu4fRLdWTWwdltNyqIL3RFoej62KJeZMNreEtOWFsT3x\n9pSrLaJyhSV6Xtx0kGV/nSaoTVPenhRKVzmgiCroDYp3tx7jnV+OEeDjybqHh8oqe6JKpXoDb/58\nlPe3HadH6yZ8MLUvnVrKXKlW5HA9AXnFpcxbE8umuHPc2LM1r90RTBN3V1uH5dQcouBVSrH0r1O8\ntOkQjRq68M7kPtwqZ86iGvvP5jA7MprjafncP7wjc2/oLl2LokrncoqYHRnNrpOZjOvTjhdu60Vj\nmfdSVCHlfBGPrijLmckD2/PsLT3lBlgrc7SegPjUXGYs3UtCRgH/d3MP7h/eSXqPrMDuj+TJOYXM\nW7OPHcfSubpbSxZPCJYlGEWVDAbFZztP8NoPR/DxcmPZfbJGvajeL4dSmLs6luJSA6/fEcKEfrI8\nsKja7/HpzI6MJr9YzxsTQxjfV3JGVG3jviTmrdmHp5sLy+4bxJDOcgNspc7uBe9A8DLPe2S3Ba9S\niu9ik3jmm/3o9IpFt/Vi6qAAOQsSVTqXU8Rjq2L443gGN/RsxSvjg2kuNxmJKhSX6lm85Qif7zzJ\nFW2a8t6UPnSW7mhRhYvDXt7+5RhdWjZmxf19ZaiUqJJOb+CVzYf5fOdJ+gZ488HUfrRuJhfvjDq3\nHz4aVvbvIbPghhfN8rJ2WfBm5Zfw9Df7+T4umb4B3rwxMZRAWZ9eVGPL/mSeWBtHSamBV8b35s4B\n7eUESVQpIT2fR1ZEE3c2R+ZjFiZJzysmLDKGnfHpjO/TjkXjeskMQaJKqeeLmBURze6ETP4zpAPz\nRwfJWgHG6Irg/YGQferftmFhZnt5u/u/dNvhVOat3Ud2QQn/u6E7M67qREOZi05UIb+4lIUbDrJy\nzxmC/Zvx1p2hcsOIqNY30WeZvz4O14YN+OTuflwviwKIauw6kcEjK6LJKdTJSXVtFedBYSZ429/C\nD5bwd0ImDy+PIq+olLcnhTI2tJ2tQ7JPz112Y/DkSOh+k1k3YTcFb35xKYu+P8SK3afp0boJX907\nkKC2ssyrqFrsmWxmR0ZzKrOAh0d0Zs513WSyblGl/OJSnv3uAGv2JjIgsDlvT+pDW29ZHlhUzmBQ\nfLT9OK//cIQOvl6Ey/dTzSkF6+6HuNVlvz+TDi7OOyuBUoovfk/g5U2HaO/jybL7BtG9tQx7qWDd\nDNgX+e/v3W6CySvAAieSdlHw/p2QyeOrYjmTVcCMqzvx2HXd5G56USW9QfHRb8d586ej+DVpxApZ\n/UqY4EBS2fLAJ9PzefSarjw6qov0IIkqZeWX8NiqGLYdSWNMcBteHt9bpo+qqd2fwqa5//4+/HGn\nLnbzi0t5Yu0+Nu5L5vqgVrw+MYSmkjPlZRyHd/uWb3skCnw7W2yTNi14i0v1vPHTUT7ZfgL/5h6s\nmjGEAYGyNLCoWmJWAY+timX3yUzGBLfhxdt608xTDiaickopvv7zFC9uOkRzT1eWTx/E0M4yc4dZ\nleRDQ3do4DwXK6JOZzFreRTpeSWyYE1tbH4Sdn347+9tQuG+H6Gh865wGZ+ax0PL9nI8LY8nbuzB\ng1fLlGMVXD58of1guO8Hi2/WZgXvwaTzPLYqhsPncpk8sGxpYJnvUlTnu9gk5q+PQylYckcI4/u2\nk4OJqFJ2QQnz1uzjx4MpjOzektfvCMFXlpQ2n9RD8MHgsn/f9BoMesC28ZiBUorPd57klc2HaePt\nztqHhtLbXxYfMdnhTRA5uXzbY4egqXPPn785Lpm5q2Nxd3Vh6X2DGNZFTqrL+fJmOPV7+bbncqy2\neatXmHqD4uPtZV3RzTzc+OK//RnVo5W1wxAOJrdIx7PfHWBd1Fn6BHjz9p19CPD1tHVYws79nZDJ\n7BXRpOUV8/ToK7jvyo5ygmQuOYnwZs/ybVeMsU0sZpRTqON/q2P58WAK1we14rU7QmjmIT1IJinI\nhMUdy7f1uh0mfGGbeKykVG9g8Q9H+GT7CULbe/PhXX1p00zuC/jHmb/h82vLt83aCy26WDWMagte\nTdPaA18DrQAFfKKUers2G0tIz+fx1bHsPZXFzb1bs+i23vjIHKmiGntPZRG2MpqzWYUy7lKYRG9Q\nfLAtnjd/Pkp7H0/WPjSUYH9vW4flHJKi4ZMR5dsmRUCP0TYJx5ziEnN4OGIvydlFcoJUU5d3U4NV\nr97ZSlpuMbMioth1MpO7B3fg6TFXyD1IFykFz1923O05Hu740ibhmHKFtxR4XCkVpWlaE2Cvpmk/\nKaUOmroRpRTLd53mxe8P0dBF4807Q7gtVLqiRdVK9Qbe3RrPu1uP0dbbg9UPDqFfBxnjbRVKwbaX\nYPvist+fzbbIXbOWkHK+iLDIGP48kcHY0LYsuq2X3GRkDueT4I0ryreNXgIDptsmHjNSSrHsr1O8\nsPEQLRq7serBIfQNaG7rsByDsUJ3QaZTjeWuzN5TZVOO5RTqWHJHCLfL6oz/ssMToGoLXqVUMpB8\n4d+5mqYdAtoBJhW8ecWlzIqI4tcjaVzZpQWv3REsl/qdkDl7AgBOZxQQtjKaqNPZjOvTjoVje0rR\nYg1Kwc/Pwu+XfHRBYx2m2N12OJXHV8dSWKJn8YRg7ujnLyfWdVVSAC+1qdjuJFfvcot0PLkuju/3\nJTOye0vemBgqqzOaYvkdcOzH8m0WvsveXiil+OqPBBZ9f4h2zT0Iv3cgV7SRaeoA+H4u/P1p+bZH\no8Gnk23iuUSNxvBqmhYI9AF2GfnbA8ADAAEB/04o7enqgqtLA56/tSd3D+5Agwby5eOk6twTAGUH\nkvXRZ1nw7QE0DZmo21oMBlh42RUt364w/SfwsP8rXSWlBl774TCf7jhJj9ZNeG9KX7r4yeIjdWIs\nJwAWZEED5xhSdDDpPDMjojidWcATN/ZgxlWd5DuqOjER8M1D5dtufBUGP2ibeKysoKSUp9bF8W1M\nEtf08OONO0NljDeAXgcvGLlJz45OjE0ueDVNawysBcKUUucv/7tS6hPgE4D+/furi+0NGmh8cnc/\nucpiL/JS4fPrICvBrGtU17UnAMpuFnn6m/1siE1iYKAPb9wZgn9zuTHNovSl8IKR+YufPAPujnHF\n4lRG2fLA+xJzuHtwB+aPvkKWB64rY92RT52FRs5xEqGUYuXfZ3j2uwM083AlYvogBsk83lUrzIJX\nAyu221FBY2kn0vJ4aFkUR1NzmXt9Nx4e0UVOkMAuhy8YY1LBq2maK2XF7nKl1LqabkSKXTtwbj98\nNKx8W/ebLbKp2vQEAKyLSmRTXDJzr+/GQyO64CIHEsspLYZFfhXbZ8dC80CLbtqcw1++jTnL/PX7\naaDBR3f148ZesjxwnRj74grbD97trR+LhRSUlPL0+v2siz7LlV1a8NakUFrINHVVc5CCxpJ+OHCO\nuatiaeii8dW9A7mqW0tbh2R74WMgYUf5tkdjwKej8cfbmCmzNGjA58AhpdQblg9JmNVbvSH7dPm2\nG16CITMtsrna9gQA3DMkkCGdfenR2jGuLDqkysZjWneOzDoPfykoKeW57w6wak8i/To0553JfWgn\nywPXnrGC5r6fof0A68diQcdScnl4eRTxaXmEXduVR0Z1lRPrqhjLi7nx0Lj+FHulegOv/3iUj347\nToh/Mz64q58ca4py4JXyF6xwaQTPpNomHhOZcoV3GHA3EKdpWsyFtv9TSm2yXFiizowdqCavhO43\nWmyTde0JcGmgSbFrKcbmxwSbfHnVdfhLSamBce//wdHUXGaN7ELYtV1lmrraMnacuPY5uHKOtSMx\nypy9AeuiEpm/fj9ejVxYJosCVO3tUMg6Wb5txFMw4knbxGMj6XnFPLoimj+OZzBlUADP3hIkU445\n8NV+U2Zp2AnIKbAj0BXCi0a6dIeFwXXPW3TT0hNgp7LPwFu9KrY/kWAXN6PVZviLW8MGTBrYnu6t\nmjBUipbaMfalBfb4xVXn3oAinZ7nvjtA5N9nGNTRh3cm96FVU3fLRezIjm+FpeMqtttfXlhc1Oks\nZi6PIjO/hMUTgpnY33mG9dTKKwFlV3YvNe8keDrOVKGylq8zSD8G7/Wv2H73eug8ylpRSE+APUmK\ngU+urtj+VCI0amL9eIyoy/CXe4fZ5xgxuxc5FQ5vrNhupwVNXXsDTqTl8fDyKA6fy2XmyM7Mubab\n9AYYY9DDQiOFi53mxeXM2ROglGLZrtMs3HCA1s3KlpXu1a4eLyudcRze7Vu+zX8ATP/ZNvHUgRS8\njuy312Dboortjx+BJta9eUd6AuxEwu8QbuRmRDu7w76uw19EDf39GXz/eMV2BylooPLegKpuhE05\nX0x6Xgnh9w5gRHcjN2kK41f7HWihmQvMMi1mYYme+evjWBd9lpHdW/LWnX1o5lmPpxxz4OELxkjB\n64gq646sJ6vbCCOO/gAREyu229EV3Ytk+IsVnY2CT0dWbHewL62qegOq6gkY0tmXHfNG4uEmx8UK\njH2PPPAbtA21fix1ZI5pMU9l5DNj6V6OpOTy2HXdmDWyHk85Ziw35qeAq2MPBZKC15E4zrg7YS37\nVsG6+yu2P50KDe12qiUZ/mJpld2k6IDHirr2Bkixe5lvZ0L0svJtna+Bu52jo6Wq+wIqYzAopn+1\nh7S8YsLvHcjV9XXKsaRo+GRE+ba+98Ct79okHHOTgtfeVbbaETjkl5cwk92fwqa5FdufyQAX+/7f\nWoa/WFBlx4un06Ch4y2XK70BZmRsLCY41fdIdfcFVDb8pUEDjSUTQ2ju6UZ7n3q62JGTDV8wxr6/\nGeuztCPw/sCK7U3awuOHrB+PsA+/vgq/vlSx3fHG3AlzM/aFNecANPO3fizmI70B5lAPihlTegKq\nGv4S7O9t8RjtknOM4TaJUxe8Op2OxMREioqKbB2K6Ypy/p3644ZV/7Z7+Px709Eh8xe87u7u+Pv7\n4+pajwfo27M102D/2ortTvalJWrB2BfWpAjoMdr6sZiZ9AbUkZOOxbyc9ATUwuFNEDm5fNt1C2HY\nbNvEYwVOXfAmJibSpEkTAgMD7X9546Tosv82awRccjexX0+Ld0UqpcjIyCAxMZGOHWW6J7siha6o\njLFi5uonYOT/WT8WYV+M5caEL6DX7daPxTqkJ6Am6sEVf2OcuuAtKiqy/2L3YqF7uTahVutS0DQN\nX19f0tLSrLI9YYKvboGT2yu214ODkqiGsS+rNqEw4zfrxyLsy9+fw/ePlW/TGsCzWbaJx0qkJ8BE\n9bTQvcipC17Afovdygrdtn2sG8cFdvs+1TcfDIFUIzPp1KODkqjE8z6g9BXbJTdEcS68bGSstuSG\nANgbDhsuG6pwx1fQ8zabhGMrTl/w2hWDHs7tM/43GxW6wk68GgiFRq7CyBeWWDsd4lZXbJfcEFDv\nr9qJKigFzxu5Ga+e5ocUvNZQdB4yj1dsb9QEfLtUaN6xYwcPPvggrq6u/Pnnn2RnZ3P//fezcaOR\nJUEv2LhxI7t372bhwoXmjFxYmsytLCoTvaxsztTLSW4IMH7seOwwNG1j/ViE/ZEToQqk4LWkjONQ\nXGEqwLIit4rVr5YvX85TTz3FXXfdBcCCBQu4/34jiwtcYvTo0TzzzDM8+eSTeHrW03kEHYkUuqIy\nKQfhwyEV2yU3BMCXo+HUzvJtI+fD1fNsE4+wLz89C7+/Vb7t/q3Qrp9t4rEj9abgfX7DAQ4mGSk+\n6yCobVOevaVnxT9cGJ+74LUP8fFuStj9UwGY/94q/Fq1ZvbsyocvfPbZZ6xatYoffviBzZs3s3z5\nctauXcuiRYsAePPNN4mLi+OLL74gLi6OyZMns3v3bjw9PRkxYgQbN25k4kQjS8wK+yCFrqhM0Xl4\npX3FdskNAXBmN3x+XcV2yQ8BoC+FF3wrtkt+/KPeFLxWcdmNaNMmjWX89LmEPfs6BoOByJUT2Lp1\nK6Ghxtcqj4iIYPr06ezcuZMxY8YwYcIETp48SfPmzWnUqGyZ2NmzZzNixAjWr1/Piy++yMcff/zP\nFd3+/fuzY8cOKXjtkRS6ojKVjbOz7+WhhbVUtnqeHDvERTJ8wSTVFryapn0BjAFSlVK9LB+SZRi9\nEmsOSkFyjNE/BQ4ajW/rt4iOjiYlJYU+ffrQoUMHYmKMP96Y5ORkWrb8d13vBg0aEB4eTnBwMDNm\nzGDYsGH//M3Pz4+kpKTa74swPyl0RVWM5UfYfvA2cqVX1D/1aBUsUQuRU+HwZff2PHYImra1TTx2\nzpQrvOHAe8DXlg3FwZSWQOoB43+7ZMaF6dOnEx4ezrlz55g2bRq5ubkMHz7c6NMiIiIICgoq1+bh\n4VFhpbhjx47RuHHjCsVtUVERHh4etdgZYXZS6IqqGMuPu9dD51HWj0XYH2P58cCvMpuPKFOcBy+3\nK9/WYRjcK+tsVKXaglcptV3TtEDLh+Ig8tMh50zF9qbtoLFfheZx48axYMECdDodERERuLi41OgK\nb7du3UhISPjn95ycHB599FG2b9/OrFmzWLNmDRMmTADg6NGj9OrlsBfhnYMUuqIqxvJjxP/BiCes\nH4uwP3++Dz9ctlJe/2kw5k3bxCPsjwxfqDWzjeHVNO0B4AGAgIAAc72s/Ug9BKVFFdv9gqocZ+fm\n5sbIkSPx9vbGxcWlxpv18vKic+fOxMfH06VLF+bMmcPMmTPp1q0bn3/+OSNHjuSqq67Cz8+Pbdu2\n8fLLL9d4G+biLMNfakUKXVEVY/nReyLc/qn1YxH2JzcFlnSr2C7HD3HRstsh/ufybU+eAfemtonH\nAZmt4FVKfQJ8AtC/f39lrte1uTou/WswGPjrr79YvdrIxPGVCA8PL/f7rFmzCA8PZ9GiRXzxxRf/\ntLdv3574+HgAUlJSKCwspHfv3iZvxwLCqW/DX4wVMq6eMD/Z+rEI+7N0HBzfWrFdChlxkVyxE1Ux\ndjLU9z9w6zu2iceBySwNlTHD0r8HDx5kzJgxjBs3jq5du9Y6lHHjxpGRkVHlY06fPs2SJUtqvQ1z\nqFfDX4x9SXkHQFic9WMR9mfHG/DL8xXbpZARFxk7hsxPAVd368ci7JOcDJmVFLyXUgZIjjX+t1rc\nLBAUFMSJEyfqGFSZ6dOnV/n3AQMGmGU7lubwQ1+MHYBa9oCZu6wfi4Ny6qEv8b/AsvEV2+VLSlxk\nbKnoqWugq5E5dkX9FDEJjm4u3/ZMOri42iYeJ2HKtGQrgBFAC03TEoFnlVKfWzowq9IVQtrhiu2N\nmoJvZ+vH48QcduiLsUK36w0wdZX1Y3F84Tjb0JfzyfBGj4rtUuiKi07/BV/cUL6tVS946HfbxCPs\nT/ZpeOuyYYljP4A+U20Tj5MxZZaGydYIxCZykyH3XMV2367QqLH14xH2x1ihGzwJxn9s/VichFMN\nfdEVwYutKrZLoSsuKi2GRRVn8JEcEeXI8AWLq59DGtKPQUlexfbWwdCg5jMpCCdk7OAz6CG46RXr\nx1JP2fXwl0pXR0uDhm7Wj8dJOfzwFyliRHWMDXGRxUUson4VvGa4EU1UzimGvxj7grrlbej3X6uH\nUt/Z7fAXYzkyNx4at6zYLuoqHEcc/vL5DXDmr/Jt806Cp49t4hH2J/MEvHNZ7SFjuS2qfhS8Dlbo\nBgYGsmfPHlq0aGGXr1cZhx3+UtnVuts/h94TrB+PsE+y+pXVOdzwl32rYN395dvGfwrBE20Tj7BP\nlx9L/AfA9J+NP1aYjfMWvCX5ZQPAky5bLMK9Gfh0sk1MFhYeHk5CQgLPPfecrUNxDJUVupMjoftN\n1o9H2KePhsO5feXb5GRIXMrYTYshk2HcR7aJR9in8DGQsKN8mwxxsRrnK3hTD8EHg8v+fcMld9Dv\n/aps7K45te5d5ZjOBQsW4OPjQ1hYGADz58/Hz8+P2bNnm/TyhYWFjB8/nvHjxxMaGsp9993H7t27\n0ev1DBw4kJUrV5q8lPDixYvZvHkzHh4eRERE0KVLF5Oe55QMBljYvGL7tB8hYJD146lnHGboy4Yw\n2Ptl+bbRb8CA+2wTjzDKpmO9KztpliLGqux+rHdyLHx8Vfm2mX9DSyOr6wmLcZ6CN+pr+O6Riu2t\nepXNXRe70uohTZs2jfHjxxMWFobBYCAyMpKtW7cSGhpq9PEREREEBQUBkJeXx6RJk7jnnnu45557\nALj11lt5+umnKSws5K677jK52AVo1qwZcXFxfP3114SFhbFx48a676Cj0ZfCC74V2x/eBX5GppQS\nFmH3Q192fwqb5pZvGzAdRtt2YRdhnM3Gehsb4iI3G9lKOPY41tvYCVHPcXBHuE3Cqe8cv+BdMRmO\nbKrYviALjhz5d6JmG9xdHxgYiK+vL9HR0aSkpNCnTx86dOhATExMtc8dO3Ys8+bNY+rUf+ffW7Bg\nAQMGDMDd3Z133ilbVjAjI4NrrrkGgMzMTEpKSvjmm28AWLp06T9LDU+ePPmf/86ZM8es+2n3Sktg\nkZEbih6NdtrhLaIWjm+DpbeVb2vbp2ycrhAXffcoRH1Vvm3OAWjmb5t4hH2O9X4rGLJPlW+TK/82\n5bgFr7Gza7C7hJo+fTrh4eGcO3eOadOmkZuby/Dhw40+9tIrvMOGDWPLli1MmTIF7cIVg4yMDPLy\n8tDpdBQVFeHl5YWvr+8/BXRVY3i1S646aPXlCoSuEF5sXbFdvpzEpdKPwXv9K7bb2bGkPrKr4S8J\nOyF8dPk2WRTAoVhl+MuJX+HrseXbHjsETdtaZnvCZI5V8FY29rLbTTAl0vrxmGDcuHEsWLAAnU5H\nREQELi4uJl3hXbhwIQsXLmTmzJl88MEHAMyYMYMXXniBkydP8sQTT/Dee++ZHMfKlSt58sknWbly\nJUOGDKn1/jiE4lx42UhBO/cYNDYyAbyonwoyYXHHiu1S6NoNuxj+UpwHL7cr39YmFGb8Zpt4RK1Z\ndPiLsfpkyCy44UWzbkbUnmMUvPnp8JqRJX5vfRf63mP9eGrAzc2NkSNH4u3tjYtLzRa1ePvtt5k2\nbRrz5s2jV69euLq6MmXKFPR6PUOHDmXr1q2MGjXKpNfKysoiODiYRo0asWLFitrsiv0z9sUEMv+l\nKJno92UAACAASURBVK+ysdwy/lJcThaOEKZYeTcc+q58m+SJ3bHvgvfSGRcu9dCf0CrI+vHUgsFg\n4K+//mL16tXVP/iChISEf/795Zf/3iV+8eY1FxcXdu3aVeF5//3vf6t8vVdffdXkGBxKUQ68YqR7\n6qlEaNTE+vEI+ySrowlTfTsTopeVb5M8EZc7sxs+v2yhiCcSwMNIT7SwOfsseA9+C6uMXLn9vyRw\n87J+PLV08OBBxowZw7hx4+jatautw3E+JQXwUpuK7f+XDG6e1o9H2K+vx5aNrbuUnBCJy6UdgfcH\nlm+bvQ+ad7BNPMIkVh/rbdDDwst6DSetgB43W2yTou7sq+A99jMsv718W+NWMPeobeKpo6CgIE6c\nOGHrMJxPST58dQuc3Vu+/Zn0f2flEAIgbg2svWzeXLmBRFyu6HzZjYt5Kf+2TfgSeo23XUzCZFYd\n6x21FL6b9e/vbUJgxnarbV7Unv0UvO8NgPRLCtvBD8ONL9f5ZZVS9WdWgjpQynrTV9bJ5euPtx8M\n926CBjUbHy3qga9ugZMXvoi0BmVX6rzb2zYmYX/yUuH1S3rg7ggvmytViMutfxBiL9wD0+FK+M93\n8t3jQOyn4O12Y1nBO2MHtAk2y0u6u7uTkZGBr6+vFL1VUEqRkZGBu7u7rUOpnlLg6gX9/gM3vCQ3\nGYnKdb4GTu6AR/aCr5GbXoUAUIaynsTgiXD9IltHI+xZ+4FlQy5n7gJvK6/qJ+rMpIJX07QbgbcB\nF+AzpZT5V3G4/oWyHzPy9/cnMTGRtLQ0s76uM3J3d8ff3wHmpvXtDPOTbB2FcARXhpX9CFGVJq0d\ndticsLL+08p+hEOqtuDVNM0FeB+4DkgE/tY07Tul1EFLB1dXrq6udOxoZJ5NIYQQQghRbzQw4TED\ngXil1AmlVAkQCYyt5jlCCCGEEELYBVMK3nbAmUt+T7zQVo6maQ9omrZH07Q9MoRACCGEEELYC1MK\nXpMopT5RSvVXSvVv2bKluV5WCCGEEEKIOtGqm45K07QhwHNKqRsu/P4UgFKq0jnDNE1LA05d1twC\nSK9TtPbF2fYHjO9TB6WURc5g6kmegPPtU2X7I7lSN862PyDHFEtxtn2SY4plONv+QB2OKaYUvA2B\no8A1wFngb2CKUupATSLUNG2PUqp/TZ5jz5xtf8A+9skeYjA3Z9sne9kfe4nDXJxtf8A+9skeYjA3\nZ9sne9kfe4nDXJxtf6Bu+1TtLA1KqVJN02YBP1A2LdkXNS12hRBCCCGEsBWT5uFVSm0CNlk4FiGE\nEEIIIczObDetmeATK27LGpxtf8A+9skeYjA3Z9sne9kfe4nDXJxtf8A+9skeYjA3Z9sne9kfe4nD\nXJxtf6AO+1TtGF4hhBBCCCEcmTWv8AohhBBCCGF1UvAKIYQQQginZtaCV9O0GzVNO6JpWrymaU8a\n+bumado7F/6+T9O0vubcviWYsE8jNE3L0TQt5sLPAlvEaSpN077QNC1V07T9lfzdKp+Rs+WK5Inl\nPh/JFckVE+NwqjwByRXJFdNInpj4+SilzPJD2ZRlx4FOgBsQCwRd9pibgc2ABgwGdplr+5b4MXGf\nRgAbbR1rDfbpKqAvsL+Sv1v8M3K2XJE8sdznI7li/z/2kCvOlieSK5IrkifmzxNzXuEdCMQrpU4o\npUqASGDsZY8ZC3ytyvwFeGua1saMMZibKfvkUJRS24HMKh5ijc/I2XJF8sRyn4/kip2zk1xxtjwB\nyRXJFdNInpj4+Ziz4G0HnLnk98QLbTV9jD0xNd6hFy6rb9Y0rad1QrMYa3xGzpYrkieW+3wkVyRX\nzLUNR8oTkFwByRVTSJ6Y+PmYtPCEqFIUEKCUytM07WbgG6CrjWMS9kfyRJhKckWYSnJFmELyBPNe\n4T0LtL/kd/8LbTV9jD2pNl6l1HmlVN6Ff28CXDVNa2G9EM3OGp+Rs+WK5InlPh/JFckVc23DkfIE\nJFdAcsUUkicmfj7mLHj/BrpqmtZR0zQ3YBLw3WWP+Q6458IddoOBHKVUshljMLdq90nTtNaapmkX\n/j2Qsvc0w+qRmo81PiNnyxXJE8t9PpIrkiumcLY8AckVyRXTSJ6Y+PmYbUiDUqpU07RZwA+U3TX4\nhVLqgKZpD174+0fAJsrurosHCoB7zbV9SzBxnyYAD2maVgoUApPUhdsI7ZGmaSsou2OzhaZpicCz\ngCtY7zNytlyRPLHc5yO5IrliCmfLE5BcQXLFJJInpn8+srSwEEIIIYRwarLSmhBCCCGEcGpS8Aoh\nhBBCCKcmBa8QQgghhHBqUvAKIYQQQginJgWvEEIIIYRwalLwCiGEEEIIpyYFrxBCCCGEcGpS8Aoh\nhBBCCKdmtpXWLtWiRQsVGBhoiZcWVrZ37950pf6fvfMOi+Lq4vA7KgqWiGKvWKPYUMGaZnrxS6Ip\n1tgFjcb0nphi2pfkiyXVjt0YY5pJTDPNEgXEig07YgOk9937/YEGWUEW2N2Z2T3v8/DIXGbvnJn9\neefce889V9V1Rt2iE/dCtCLYg+hEsBfRimAP9urEKQ6vv78/ERERzqhacDGaph1zVt2iE/dCtCLY\ng+hEsBfRimAP9upEQhoEQRAEQRAEt8auEV5N044CqYAFyFNKBTnTKMG8iFYEexCdCPYiWhHsQXQi\nlERpQhr6KaXinWaJ4E6IVgR7EJ0I9iJaEexBdCIUi1NieIsiNzeX2NhYsrKyXHVJ0+Lt7U2TJk3w\n8vJyyfUsVsWMXw9wW4cGdGxc0yXXLA7RSelwtVYOn0tj6T/Heemu9lSooLnkmsUhWrEfV+tEKcXs\nvw7TpYkvvVv5ueSaxSE6KR2u1sqp5Ew+/eMQL90VQOVK+kZZilbsx9U6AVj6zzHqX+XNLQH1y/R5\nex1eBfyqaZoFmK2UmmN7gqZpIUAIQLNmzS6rIDY2lho1auDv74+m6fuiNDJKKRISEoiNjaVFixZO\nv15yRi6Pfh7FH/vPoYEjHN4rakV04jhcrZU/D5zjkeXbqFSxAqP6+NPMr2p5qpM2xUW4Wic5eVZe\n+GoXqyNjGdazmSMcXmlTXISrtbInLpkxYeGkZ1sYHNyMgEZXlac6aVNchKt1opTi/Z/38/Hvh7ir\nU8MyO7z2dqeuUUoFAncAkzRNu64Ig+YopYKUUkF1616eHSIrKws/Pz8RUQlomoafn59LepgHzqRy\nz8cb2BgTz5sDOvLErVc7otorakV04jhcpRWlFPM3HGH0wq008vXhm0l9y+vsgrQpLsOVbUpieg7D\n529hdWQsU25qwxv3dnREtdKmuAhXauX3/Wd58LPNVNQ0Vk/sXV5nF6RNcRmu1EmuxcpTX+zk498P\nMTi4KTMHB5a5LrtGeJVSJy/8e1bTtK+AHsBfpb2YiMg+XPGc1u0+xROrdlCtSiVWjO9FkH9th9Tr\nCK2ITuzH2c8qO8/Cy1/vZlVELLcG1Gf6oECqVSl/JJS0Ka7FFc8p5mwaYxeFcyo5i5mDA7knsLFD\n6pU2xbW44lkt23KMqd/soV2DGiwYFUz9q7zLXae0Ka7FFc8pLTuPiUsj+ftgPI/f3JYpN7Uu13VL\nHOHVNK2apmk1Lv4O3ArsLvMVBV2xWBXv/7SfCUu30bZ+DdY+co3DnF3RinsRn5bN8HlbWBURy5Qb\nW/PZ8O4OcXZFJ+7Hpph4Bn6ykbSsPFaM7+kwZ1e04l5YrYq3f9zLi1/t5ro2dVgV2tshzq7oxP04\nm5rFoNmb2XQogf/e14lHb25TbifbnpCG+sAGTdN2AFuB75VS68p1VRPx999/06FDBwIDA8nMzOTU\nqVP079//ip9Zu3YtU6dOdZGF9pOcmcu4ReF89HsMg4KasjKkl0Mam0vwWK24k04gP7bu7g83sOtk\nMh8O6coTt17tyEVqHqsTcD+trNh6nBELttKgpjdfT+pL9+aO6UBfwGO14m46ycq18MiKKGb/eZjh\nvZoxd0SQQzrQF/BYnYD7aeXQuTQGfrKJI/HpzBsZxKDgy+Oty4RSyuE/3bt3V7ZER0dfVmYGQkND\n1ZIlS/49fuqpp9TXX399xc9YrVYVGBio0tPTy3xdRz+vA6dT1A3v/a5aPf+9Wrz5qLJarXZ9DohQ\nTtCIEp04RCdKOf6Z/bAzTrV76UfV661f1c4TSXZ/TrRiH+7SpuRZrGrad3tU82fXqhHzt6jkzBy7\nPic6sQ93alMS0rLVwE82qubPrlWz/4yR94+DcZc2RSmlIo4mqi6v/aS6T/tZ7Thx3q7P2KsTj9lp\nberUqcyYMePf4xdffJGZM2de8TPz5s1j1apVvPzyywwbNgyAL7/8kttvvx2A6dOnM2bMGAB27dpF\nx44dycjIQNM0brjhBtauXeukuykdP+05zb0fbyQ1K48VIb14qFdziVMqBk/WifVCerqJy7bRrmEN\nvpncl05N9E1TZ2Q8WSvp2XmELolg3oYjjOzdnPkjg7jK23XpicyEJ+sE4Eh8OgM/2cjuk8l8Mqwb\nIde1kvdPMXi6Vn7ac5qhc//B18eLLyf2oXMTX4fW77I8vJfy2nd7iI5LcWidAY2u4pX/dCj272PG\njGHgwIE89thjWK1WVq5cyfr16wkMLHrF3/Llyxk3bhwbNmygf//+3H///Rw5coRatWpRpUoVAB59\n9FFuuOEGvvrqK958801mz55N1ar5q9eDgoL4+++/efDBBx16n6XhogMza30MXZrU5LOHutOwpo9u\n9pQW0YnryMjJ46kvdvDDrtMM7NaYtwZ0wturoq42lQbRiuuIS8pk7KII9p9O4bW7OzCyj7+u9pQG\n0YlrCT+aSMjiCDRNY/n4XnRvXktvk+xGtOJalvxzjFe+2U3nJr7MHxmEX/UqDr+GLg6vHvj7++Pn\n50dUVBRnzpyha9euNG/enO3bt9tdx6lTp7g0lUmFChUICwujc+fOhIaG0rdv33//Vq9ePeLi4hx6\nD6UhJSuXx1du57d9Z3mgexOm3dvRVA6MXniaTgBOJmUyflEEe0+n8MKd7Rh/bUsZgbEDT9TKjhNJ\njFscQVaOhQWjgrnh6nq62mMGPFEnAGt3xvHEqh009vUhbHQwzf2q6W2S4fFErSileO+n/XzyxyFu\nalePj4Z2w6eyc3wVXRzeK/VwnMm4ceMICwvj9OnTjBkzhtTUVK699toiz12+fDkBAQGFynx8fC7L\nO3fw4EGqV69+mWiysrLw8dFnNDXmbBohiyM4npjBa3d3YERvc4YwiE6cT+SxREKXRJKda2XBqGD6\nmdSBEa04nx92neKJVdupU70Ky8b1pG39GrrZUlZEJ85HKcVnfx7mv+v2EexfizkPBVGrWmXd7Ckr\nohXnk5Nn5bk1O1mz7SRDejRj2j0dqFTReZG2HjPCCzBgwACmTp1Kbm4uy5cvp2LFiqXqObVt25aj\nR4/+e5ycnMyUKVP466+/mDx5MqtXr+b+++8H4MCBA3Ts6JCk66Xi5z2neWLVDry9KrBsXE96ttR3\nW08z4gk6Afgi4gQvfrWbRr7erAwJonU98zkweuMJWlFK8ckfh3jvp/10a+bLnBFB1HHCdKM74wk6\nAcizWHn5mz2s2Hqc/3RpxHv3d5aZxVLiKVpJzcrl4WXb+PtgPE/c0pZHbixfjl178CiHt3LlyvTr\n1w9fX18qViz9f8Jq1arRqlUrYmJiaN26NY8//jiTJk2ibdu2zJ8/n379+nHddddRr149fv/9d95+\n+20n3EXRWK2Kmb8dZOZvB+ncpCafDe9OI1/zxOsaCXfWCeS/lN75cR/zNhyhb2s/Ph7aDd+q5huB\nMQLurpXsPAsvrNnNl9tiuSewEf+9TxyYsuDuOoH8TQImLdvGnwfO8fANrXjKsakMPQZP0MrZlCxG\nLQxn/5lU3r2/Mw8GNXXNhe1J5VDaH6Om+7BYLKpLly7qwIEDZa5jzZo16sUXX7ziOadPn1Y33nhj\nma+hVOmeV3JmjhobFq6aP7tWPfH5dpWZk1eua18KHpgWxkw6Uap0zywpI0eNmL9FNX92rXrlm90q\nN89S7utfRLRSNozYpiiVn0rqgU83qebPrlUzfjlgdyqpkhCdlA2jtilKKRWXlKFum/6navn892rF\nlmPlvv5FRCtlw6htilJKHTyTqvq8/Ztq//KP6vd9Z8p1/YvYqxOPSUsWHR1N69atuemmm2jTpk2Z\n6xkwYAD+/v5XPOf48eP873//K/M1SsOhc2nc+/FGft9/llf/E8D7D8gITHlwV50AHD6XxoBPNrIx\nJp63B3bi1budGy/l7rizVmLO5rcr22OTmDWkq0N2OfJU3FknANFxKQz4eBOx5zNZMCqYwT0ctEmA\nB+LuWok8lsj9n20iO8/C5yG9Xb7o1WNCGgICAjh8+LBD6ho3btwV/x4cHOyQ65TEr9FnePzz7VSu\nlB+v20vidcuNO+oE4O+D55i0bBuVKkpst6NwV61sOBjPxGWRVKlUgZUhvejWzDyppIyIu+oE4I/9\nZ5m0bBs1vL1YFdqbgEZXufT67oY7a+WnPaeZsiKKRr4+LBrdg2Z+VV16ffAgh9edsFoVH66PYfqv\nB+jUOD+/bmOJ1xWKQCnFwo1HeeP7aNrWr8HcEUE0re36hkYwB8u2HGPqN3toXbc680cF0aSWaEUo\nmuVbjvPyN7tpW78GC0cF06CmQ7epF9yIJZuPMvXbPQQ29WX+yGBq65S1Qxxek5GalcuTq3bwc/QZ\nBnZtzFsDzbVBgOA6svMsTP16D59HnODWgPpMHxToyL3rBTfCYlW89cNe5m84Qr+r6zJrSFdqyM5p\nQhFYrYr3ft7Pp38c4oar6/LR0G5Ul3ZFKAKlFO/+lK+Vm9vX58MhXZ2WY9ceRKUm4vC5NMYvjuBo\nQgZT+wcwuq+/xNUJRRKfls2EJZFEHDvPIze25vGb28qKaaFI0rLzmLIiivX7zjKqjz8v3dVeYruF\nIsnKtfDkFzv4fucphvZsxuuyDkAohpw8K899uZM1Ua7JsWsP4vCahPX7zvDoiu14VarAkrE96NOq\njt4mCQYlOi6F8YsjSEjP5sMhXflPl0Z6myQYlJNJmYwNC+fg2TSm3duRh3o119skwaCcT89h/OII\nIo6d57k72hF6nezIKBTNpTl2n7q1LZP6OT/Hrj1I16wE/P39iY+P160+q1Ux67eDjF0UQTO/qnw7\nua84uwZEb51cZN3uU9z36SYsVsUXoX3E2TUgRtHK9hNJ3PPRRk6ez2ThqGBxdg2GUXQCcDQ+nYGf\nbmLnyWQ+HtqNCde3MoQDI+RjJK2cScli0Ox/2Hwogffu78zkG42T4UUcXicRFhbGq6++Wq460rLz\nmLgskg9+OcA9XRqxekIfWUTiZjhCJ5AfKzXz14NMWLqNqxvU4NvJfenUpGb5DRQMg6O0AvD9zlMM\nmr0Zn8oVWPNwH65rW9ch9Qr640idQH4qqQGfbCQpI4fl43pyV+eGDqtb0BdHayXmbBoDP9nE0YR0\n5o8K5gFXbShhJx7j8E6dOpUZM2b8e/ziiy8yc+ZMuz+fmZnJHXfcwdy5cwkPD6dz585kZWWRnp5O\nhw4d2L17t911vfvuu3Tq1IkePXoQExNT5Dl5FisDPt7Ir3vP8tJd7Zk+KFDXYG9PwWw6AbAqxeTl\nUUz/9QADuzVmZUgv6l0lK6adjRm1opTiw98OMmn5Njo2rsnXD/elTX3ZUtqZmFEnF1m7M44hc7dQ\n08eLNQ/3Jci/tt3XEkqPmbUSfjSR+z7dRHaelc9DenO9ATvR+sTw/vgcnN7l2DobdII73in2z2PG\njGHgwIE89thjWK1WVq5cyfr16wkMDCzy/OXLlxMQEABAWloagwcPZsSIEYwYMQKAu+++m5deeonM\nzEyGDx9eqv2oa9asya5du1i8eDGPPfYYa9euLfT3lMxczqZmE5+WzeIxPejb2kNDGEQnV9QJ5C8M\niE/N5ofdp3jhznaMv9ZD4+pEKyVqRSnFE6t28FXUSe4NbMQ7nrhNsOikRJ1c5LM/D/HOj/sIal6L\nOSOCdEslpRuiFbu1sm73Kaas3E4TXx8Wjelh2NSXHrNozd/fHz8/P6Kiojhz5gxdu3alefPmbN++\nvcTP3nPPPTzzzDMMGzbs37KpU6cSHByMt7c3s2bNAiAhIYGbbroJgMTERHJycvj6668BWLJkCZ06\ndQJgyJAh//77+OOP/1unUopzqdmcTsmiUgWNbydfY1jhuCtm0MlF0rPzOJaQQZ5VsWBkMP3auXbX\nGk/HTFrJs1iJT8vhq6iTPHlLWybfaIxFJJ6AmXQC+e+hpIwc3vnxMP07N+T9B7p4XsdIJ8ymFYBF\nm47y6nd76NrUl3k65ti1B30c3iv0cJzJuHHjCAsL4/Tp04wZM4bU1FSuvfbaIs+9tOfUt29f1q1b\nx9ChQ/99SSQkJJCWlkZubi5ZWVlUq1YNPz+/f4UZFhbG0aNHi4yPufRFc/F3i1URez6D5MxcfH0q\nU6lGFXF2RSdF/g6QmJ7DyaRMvCpq1K1Rhc6e7uyKVor8HfJTSR1NSCfHYuWjoV3p39mDFzKKTor8\n/SIWq+J4YgZp2RYmXN+KZ2672nPTGYpWivz9IkopkjNzeeXbw9wSUJ9Zg/XNsWsPdsfwappWUdO0\nKE3Tih/XNjgDBgxg3bp1hIeHc9ttt1GjRg22b99e5M9FEQG8/vrr1KpVi0mTJv1bFhoayrRp0xg2\nbBjPPvtsqez4/PPP//23d+/eZOdaOHQujZTMXBrW9KFpbR8ZfdERo+oE8huZuKRMYs9nUK1yRVrX\nrY6X5MHUDSNrBfLTAx06l4bVCnWrV/FsZ1dHjK4TgNw8K4fPpZGWlUetql48d0c7z3V2dcQMWrEq\nxYnzmaRm5TGsZzM+G97d8M4ulG6E91FgL2DazbIrV65Mv3798PX1pWLF0n05M2fOZMyYMTzzzDN0\n7NgRLy8vhg4disVioU+fPqxfv54bb7zRrrrOnz9P586dqVKlCnPDFhNzLg0A/zrVTL+7kaZpFYEI\n4KRSqr/e9pQFI+pkxYoV5FmtnEjMJDUrF7/qVWhU01s6RjpjVK0AJKRlE5eURRWvCvj7VeNQsnSM\n9MLIOgHIzMmfBbBYFf51qhKb6jHRjobD6FqxWK0cS8ggLTuPmj6VeOPejuZ5DymlSvwBmgC/ATcC\na0s6v3v37sqW6Ojoy8pcjcViUV26dFEHDhzQ2xRltVrVmeRMtePEebX/dIrKzs0r9HcjPC+llAIi\nlB0aUQVaeQJYLjpxLFk5eWrfqRS180SSik/LKvQ3IzwzpcqklYpAlGjFsVitVnXyfIbaceK8OnIu\nTeVZrEopYzwvpUQnRiMlM0ftjk1S0XHJKiM7/z1khGemVOm1Upof0UrpycmzqP2n899DCWnZhnhe\nStmvE3u7/DOAZwBrcSdomhaiaVqEpmkR586dK6Xb7Xyio6Np3bo1N910E23atNHVlotxUqdTsvD1\nqUzrutWpXMn40wEloWlaE+AuYJ7etpQVI+nkIqlZucScS8NiVbSoWw2/alX0NslRXJw1MiVG1IrF\nauVoQgbxadnUqV6F5n5VqWj+aWnRiZNITM/maHwGXpUq0KpudVNMS18Js4deGlkrWbkWDp1NIyfP\nin+dqoZenFYcJc5baJrWHzirlIrUNO2G4s5TSs0B5gAEBQUph1noIAICAjh8+LDeZpCdZ+FYQgbZ\nuRYa1vSmTvUq5pkOKJmLHaNiE3tqmhYChAA0a9bMRWbZj1F0AvmzLwlpOZxKzqSKV0X8/aq6RccI\nCnWO3iR/VsB0GEkrADl5Fo4mZJCda6Wxrw9+1c3fMRKdOAelFGdSsjibmk0Nby+a1XaLjhGYPPTS\niFqB/IxARxPS0dBoVbcaPpXNGfJizwhvX+BuTdOOAiuBGzVNW1qWi+WPPHsuqVm5xJxNI9dixb9O\nNerWKDoG04zP6dKO0ZXOU0rNUUoFKaWC6tYtOjG1Ge/f0ViV4uT5TOKSM6nh7UWrYmYBTPysHDJr\nZOL7dygZ2XnEnE2/0LZUvczZNfFzKlEn9mDi+3c4VqviRGImZ1OzqV2t8mWzAGZ9Vo6aYTTr/TuL\n5IwcDsenU6lCBVrXK3B2zficSnR4lVLPK6WaKKX8gcHAeqXU8NJeyNvbm4SEBFM+pPKilOJsahZH\n49PxqliB1vWqF7s4TSlFQkIC3t6m2ynLIR0jT9bJRXItVo6cSycxI4d6NYqfljarVhzVORKt5JOU\nkcOh+HQqVIBWdS9vW9xdJyV1jEQnBeRZrByJTycpM4cGNb1p7OtDBa2ws2tGrVyg3J1o0Uph4tOy\nOZaYgY9XRVrVrfbvoItZdeKycekmTZoQGxuLEeN7nYlVKZIycsnIsVC1ckV8q3pxOPHKU0fe3t40\nadLERRY6BqXU88DzABdCX54qS8fIU3VykVyLlYS0HCxKUatqZc6nVuR8XPHnm1ErFHSO7gS8gas0\nTVtaWr14ulYAUrJyScnMo0qlCvhVq8yR80W3Le6sk5LC6UQn+eRZrCSk55BnzW9bElIrklDEeWbU\niqNCL0Ur+SiV37akZuXh41WBStUqczChcNtiRp2UyuFVSv0B/FGWC3l5edGiRYuyfNS0nEjMYPzi\nCPafSeWZ29oxobeHbvtaCjxRJxdZt/sUj3++g5o+XswZ0Z3OTXz1NskpOKpz5Mlaycq18NyXO/l6\nexwDuzXm7YGdqOIm8d0XEZ04jshj5xm/OAKrUswdEUR3/9p6m+RoHNKJFq3kb1f/9OodfLM9juG9\nmvHa3R3dJb7bc7YWdjV/HzzHIyuisFoVYaN7cH3bouNV3ZHydIw8EaUUs36LYfqvBwhs6such7pT\n7ypzTRUJriMhLZuQJZFEHjvP07ddzcM3tJKOtFAsP+w6xWOfb6dhTW/CRvegRZ1qepvkcBzVOfJ0\nUrJyCV0cyebDCW7ZtojD62CUUsz9+zDv/LiPNvVqMPuh7vi7YQMjOIbMHAtPrd7B9ztPMbBrj2RH\nVwAAIABJREFUY94a2Mmj9q2XzlHpOHAmlTFh4ZxLzeaTYd24s1NDvU1yCaKT0nPxXfTWD/vo1syX\nuSOC3CJzh+AcTidnMWrhVmLOpvHBg10Y2M1c4Qr2IA6vA8nMsfDslzv5dkccd3ZqwHv3d6FaFXnE\nQtHEJWUyfnEE0adSeP6OdoRcJyEvQvH8sf8sk5dH4VO5IqtCe9OlqXuGvAjlJ89i5ZVv97Bsy3Hu\n6tSQ/z3YxWM60tI5Kj0HzqQyasFWkjNzWTg6mGvbuOeMtHhjDuJEYgYhSyLZdzrFLacCBMcSeew8\noUsiyc61MH9kEDe2q6+3SYKBWbz5KK9+u4erG1zF/JFBNPL10dskwaCkZ+fxyIoo1u87S+j1LXn2\ntnZUcJMYTMHxbD2SyLhF4VTxqsjnob3p2Lim3iY5DXF4HcDGmHgmLd+G1apYMCqYflfX09skwcCs\njozlhTW7aOjrzYrxPWlTv9h9OgQPJ89iZdraaBZtPsbN7esxc3BXmTUSiuVMShZjwsLZdzqVN+7t\nyPBezfU2STAwF+O7m9byIWx0D5rWrqq3SU5FWs5yoJRi/oYjvPXDXlrVrc6cEUFuuSBAcAwWq+Kd\nH/cy9+8j9GnlxyfDuuFb1XzbMwquITUrl8nLo/jzwDnGXdOC5+9s7zarpQXHs+90CmMWhpOUmcu8\nkUEy8CJckYUbj/D62mi6NavFvBFB1DLhVsGlRRzeMpKZY+G5NTv5Znsct3dowPsPdqG6jLwIxZCS\nlcuUFVH8sf8cI3s356X+AXhVtGejQ8ETOZGYwdhF4Rw+l85bAzoxtKfxtuEWjMNfB87x8LJtVKuS\nH9/tztPSQvmwWhXvrNvHnL8Oc1uH+swc3NVj4rvFQysDJxIzmLA0kuhTKTx1a1sm9Wst8bpCsRyJ\nT2fconCOJWTw5oCODOsp04xC8UQeO0/I4ghyLVYWjelB39Z19DZJMDCfhx/nxa9207pedRaODqZh\nTYnvFoomO8/CU1/s5LsdcYzo3ZxX/tPBo2aNxOEtJZsuxOvmWRULRgbTr51MGwnF8/fBc0xato2K\nFTSWjutJr5Z+epskGJhvtp/k6dU7aVjTmwWjgmlVt7reJgkGRSnF/34+wEe/x3Bd27p8PLRrsVvW\nC8KlOXafuf1qJl7veQvrxeG1E6UUCzYe5a0f9tKyTjVmP9SdlvIyEopBKUXYpqO88f1eWtetzryR\nQW6/IEAoO0opZvx6kJm/HaRHi9rMHt7dI2LqhLKRnWfhmdX5IXWDg5sy7d6OEiIlFMulOXanD+rC\ngK7ul2PXHsThtYPMHAsvfLWLr6JOcmtAfT4YFCjxukKx5ORZmfrNblaGn+CWgPpMF70IVyArN995\n+XZHHPd1a8LbAztRuZI4L0LRJGXkELIkkq1HEiUFplAiB86kMnLBVlKz8ggb3YNr2nhuiJS8hUvg\nZFImoUsi2BOXwhO3tGVyv9aS01Aolvi0bCYujST86Hkm92vNE7e0Fb0IxXIuNZuQJRFEHU/y2GlG\nwX6OJ2QwKmwrsYmZzBrSlbu7NNLbJMHAbDmcwPjFEXh7VeTz0F50aOTZixnF4b0Cmw8lMGn5NnLz\nrMwbEcRN7WVzAKF49p5KYdyiCOLTspk5OJB7AhvrbZJgYPafzt8mOCE9m8+Gd+P2jp6xTbBQNrYd\nP8/4RRFYlGLpuJ70aFFbb5MEA/P9zlM8/vl2mtb2YdGYHjSpZbKQuqxkWDEErnsaWvVzSJXi8BaB\nUoqFG4/y5g978ferypwRQbJ4RLgi63af5olV27nK24svJvSmcxPZ9lUont/3n+WR5VFUvbBNsOhF\nuBLrdp/i0ZXbaVDTm4WjgmX9iHBF5m84whvfR9O9WS3mjQwyX773Q+thyYD8309cLw6vs8jKzY/X\nXbPtJDe3r8/0QV1k5atQLEopPlwfwwe/HCCwqS9zHupOvau89TZLMChKKRZtOsrra6Np1+Aq5o8K\nkjRSQrFc3NzozR/20rWpL3NHBOFXvYreZgkGxWpVvH1hc6PbOzRgxuBA8+XY/XI87FqV/3u3kXDD\nsw6rWhzeS4hLyiR0SSS7Tibz2M1tmHJjG4m/FIolM8fCU6t38P3OUwzs2pi3BnYyX+MiuIw8i5XX\n10azePMxbgmoz4xBgbJNsFAsl+rljo4NmD7IhM5LUSy9D3IzYcQ3UFEGkxzFpTl2R/Xx5+X+AebK\nsZt2Ft5vU3A86nvwv8ahl5DW9gL/HE5g0rJtZOdZmTsiiFsCJF5XKJ64pExCLixmfO6OdoRe11IW\nGwnFknJhm+C/Dpwj9LqWPHN7O3O9jASXkp6dx5QVUfy27ywh17XkudvbmX/wJfkkTA8oOK4g7oej\nSM7MZcKS/By7pnwfLb4XDv9ecPz8Saji+LAdj1fcxSnGad/vpXnt/Hjd1vUkPkoonshj5wldEklW\nrkUWMwolciIxgzFh4RyJT+edgZ0Y3EO2CRaK52xKFmMWhRMdl8K0ezvyUC832Jlxy2z48Zn83ytW\nzndozOSQGZi4pExGLwzncHwaMwYFcm9Xky2WftUmc8SryU67lEc7vFm5Fl76ejerI2O5uX09PhgU\nyFUSrytcgS8jY3l+zS4a+nqzYnxP2tSvobdJgoGJPJZIyOJI8qyKxWN70KeVm+bAjP4GVo2AQUuh\n/X/0tsa07D+dyuiFW0nKzGW+O+zkmZsFb14yIHDLNOg7RT973Iz9p1MZtbAgx66ptiFPiYMP2hcc\n1+8IEzc69ZIlOryapnkDfwFVLpy/Win1ilOtcgGnkjOZsCSSHbHJTLmpDY/dJPG6QvFYrIr/rtvH\nnL8O06eVHx8P7SY7YQlX5OuokzyzeieNfPO3CXbLlfVWC8wKhKTj+ceV3fAeXcTGmHgmLInE50Lm\njo6NTZ4z9ZvJELWk4PjRnVDLDUarDcI/F3Ls+njl6yWg0VV6m2Q/X4yGPWsKjiduhvoBxZ/vIOwZ\n4c0GblRKpWma5gVs0DTtR6XUP062zWlsPZLIw8siycyxMPuh7tzWoYHeJgkGJiUrl0dXRPH7/nOM\n6N2cl/sHyDaeQrFYrYoZvx5g1voYeraozWfuuk3w8X9gwW0FxxM2QINO+tljYlZFnOCFNbtoVbc6\nC0YH09jX5Jk7bKepX0mSEAYH8t2OOJ5ctYNmflUJGx1srhy7LgxhsKVEh1cppYC0C4deF36UM41y\nFkoplv5zjNe+i6ZZ7aqsDOlF63oyJe0I3HUm4Eh8OuMWhXMsIYO3BnRiaE8Pir/MPA//9YeGXSDk\nT3lh2UFWroUnv8jP3PFgUBPeuNcNtwlWCl67JG9wk2AY+4voowwopfjglwN8uD6Ga9vU4eNh3cwd\nVnd6F3x2ycr6qnXgmUP62eOGzPv7MG98v5dg/1rMHWGiHLvpCfBey4LjGo3gyb0uNcGuGF5N0yoC\nkUBr4GOl1JYizgkBQgCaNTOeU5CVa2HqN7tZFRHLje3qMWOwxOs6GLebCdhwMJ5Jy7dRQYOl43rS\nq6Wf3ia5jo2z4JeX83+PP+hQZ8ZdO0dnU7MYvziSnbFJPH9HO0LMtlLaHjZ9CD+/VHA8bDW0uUU/\ne0xMdp6F577cxVdRJxkU1JQ3BnQ098zRa7VBWQqOJ4VD3bb62eNmWK2KN3/Yy/wNR8yXpm7lMNi3\ntuB4/Hpo3N3lZtjl8CqlLECgpmm+wFeapnVUSu22OWcOMAcgKCjIUCPAp5OzCF0ayY4TSTxyY2se\nv7mtxOs6GHebCbiYuaN13erMGxlE09ommjIqD+nx8F6rguO+j8Etrzn6Km7XObq4rXRieg6fDuvO\n7R3dMEzKdiryhTioXE0fW0xOckYuIUsi2HIkkadubcukfq3N2zmyWuH1WoXLXDhN7Qlk51l4ctUO\n1u48Zb4cuzqGMNhSqiwNSqkkTdN+B24Hdpd0vhEIP5rIxKXbyMzJ47PhbvoiKi1//Bf+eCs/8XfL\nGxxWrTvMBOTkWZn6zW5Whp/gloD6TB8USHVP2RzAtmF6cj/UcPz/F3fqHAGs33eGR5ZHUd27El9M\ncIPFRrYc/gMW31O4zEUvLXecDTiRmMGohVs5kZjJzMGB3BNosjRSlxI+D75/suC41yS4/S397HFD\nkjNzCVmc3zky1cxRcixM71C4TOeOkD1ZGuoCuRecXR/gFuC/TresnCilWLrlOK99u4emtatKCimA\n+Bj46JJphOqOzR9r9pmAhLRsJi7dxtajiTx8QyueuvVqz5gJOPgrLLuv4LhxEIz/zamXdIfOkVKK\nhRuP8sb30bRveBXzRwbToKabbStt2wl6+B+o177oc52DW80GbD+RxLhF4eRaFEvG9qCnmcOkbLXx\n4mnw0mexnTt2jCA/m9TIBVs5Ep9urs6RrTYeCIMOA3Qx5VLsGbpqCCy68IKqAKxSSq0t4TO6kp1n\n4ZVv9rAy/AQ3XF2XmYO7UtPHg+N1rVYIuwuObyooe+YIVK3tlMuZcSbg4pR0fFq2uRqW8mLbMDlh\nO8eiMHvnKNdi5dVv97Bsy3FuDajPjMGBVK3sRjMB54/BzM6Fy3QYnXGn2YB1u0/z6Moo6l/lzcLR\nwbQya5q6hEPwYbfCZfqHMLhVxwjyc+yOXLCV9Ow8Fo3uQR+z5Ng1cIYOe7I07AS6usAWh3AmJYsJ\nSyOJOp7EpH6teOKWq80T6+IMZgbC+SMFx/fNh073O/wyZp0JAPhpz2ke/3w7V3l78cWE3nRu4lvy\nh8zOnq/hi5GFy/RxaEzXOUrOzGXy8m38fTCeCde34pnb3GwmwPaF9eASCLhbH1soeTbADDMBCy7M\nBHRp4su8kUHUqV5Fb7PKhq027vkYug7Xx5ZLcKeOEcDmQwmELImgauWKrJrQm/YNTZBj13b9Bxih\nI1QINxqSyN/VaMLSbaRn5/HpsG7c0amh3ibpR1G98JcToKLTvnLTzQQopfhofQz/++UAXZr6Mveh\n7tS7ys2mpIvC9qU1/ndo3K3oc52AmTtHxxLSGROWn6bu3fs682BwU71NchzZqfB2k8JlBnhhlTQb\nYOSZAItVMW1tNGGbjnJ7hwbMGGyilfW2GGjxUVG4Q5gUFM6xu2hMD3PkZLbVRudBMHCOPrZcAbdx\neJdvOc4r3+6mka8PS8f25OoGHhyvayu+4HFw1/+cekmzzQRk5lh4anV+vtQBXRvz9sBO5n0R2cu2\nJfDt5MJl+ry0TNc5gvwFsKFLIrEqxZKxPendysTxl7bYthltboNhq/SxpRjMNhuQkZPHlBXb+XXv\nGcZf24Ln72hvzpmAn16EzR8VLjOYswvmD5OCghy7PfxrM3dEEDWrmiAU07btmHoeKhgzvZ7pHd7s\nPAuvfhvNiq3Hub5tXWYN7moOkTiDiIWw9rHCZQZsmPTmVHIm4xdHsCcuhefuaEeoWVa9lofLFh9t\ngXrtdDHFbJ0jgDXbYnnuy100qeXD/FHBtKjjJum4bDeRAJiaCBWM0fkz62zA2dQsxi2KYPfJZF6/\npwMjevvrbVLZsG03xv4CTXvoY4udmK1jBPk5dt/4fi8LNh7hzk4N+OBBE8wEGHRG6EqY2uE9eyFe\nd9vxJCZeWFXvkfG6ReVBHLkWWlyrjz0GJvLYeSYszd9Wet6IIG5q79hMFYZj3i0Qu7VwmcEbJSNh\ntebvhPXR7zH0bunHp8O7mWdno5J4rRYoa+Ey42nDdLMBB86kMnphOInpOcw1axtjMmfGrB0juLA7\n46odfL/LRDl2/9ceUuMKjltcDyO/1c8eOzGtwxt57DwTl0aSmpXHx0O7cVdnD43Xfb8tpJ0pXGbg\nhklPvoyM5fk1u2hQ05vl4zwgTZ3t6MzEzVA/QB9bTEhmjoWnvsh/EQ0Obsrr93R0n22CbbXx6A6o\n5a+LKVfCbLMBG2PimbA0Em+viqwK7U2nJibMyWyrjYqV4eVz+thiP6brGEH+BiTjl0Sw9UgiL97Z\nnnHXtjD+bKOtPl6Oh4rmmFU3pcO7Yutxpn6zm4Y1fVg8tgftGphgBaOjsc2pC/DccfA2YQPrZCxW\nxX/X7WPOX4fp08qPj4d2o1Y1NxmlKwrbBgmkE1RKzqZkMX5xBDtPJvPCne0Yf62bhL388DRstVlM\nItpwCKsjY3nuy520rFuNhaNNstjIFtu246WzUMn4GSXM1jECiEvKZNTC/By7s4Z05e4ujfQ26crk\nZcMb9QqXmaztMJXDm5Nn5dXv9rB8y3GubVOHD4d0dZ/pxdJg2yjVC4CHN+tji8FJycplyooo/th/\njhG9m/Ny/wBz71d/JYqKxxz7KzQN1scek7L3VApjw8I5n5HL7OHdubWDm+zOaNtuDJwHnR/QxxY3\nQinF9F8PMuu3g1zTug6fDO/GVd7mGPH6lwM/wfIHC5eZzJkxE3tPpTBq4VYysi0sGtODPq0MnmN3\n9nVwakfBsV9reCRSP3vKiGkc3rOpWTy8dBsRx84z4fpWPH2bB8brzr8VTthkWpFGqViOxKczblF+\nCqk37u3I8F7N9TbJeciorkP4be8ZpqyIosaFnMxusU1wzK+w9L7CZaINh5CTZ+W5L3eyJuokD3Rv\nwpsDOpkv7MW27bhlGvSdoo8tHsCmmHhCl0RStUpFvpjY2/gz1Lb6eCEOKptz0a4pHN6o4/kLjVIy\n8/hwSFf+Y/Shf0dT1MhdjxC48z197DEBGw7GM2n5NipouF8KqUuxWuB1mx3zJoVD3bb62GNSlFLM\n33CEN3/YS8dGNZk3Moj67pCT2fZldfVdMGS5Pra4GckZuUxYGsnmwwk8cUtbHrmxtbnCXop6r0hH\nyKl8s/0kT32xgxZ1qhE2ugeNjBz2UtS7xeT6MLzDuyr8BC99vZv6Nauw5uE+5thxxJHIyF2pUEqx\nePMxXl8bTeu61Zk7IohmflX1Nss5iDYcQq7Fyivf5odK3d6hAR8M6mL+bYLTzsH7rQuXiTYcxonE\nDEaHhXMsIZ3pg7owoGuTkj9kJD4KhvgDhctEH05DKcXcvw/z1g/76NmiNnNGBFHTx8BhL6tGQPQ3\nhcvcQB+GbdVz8qxMWxvNkn+OcU3r/Hhdt15oZEtcFMy5oXCZjrlTzUBOXr7jsmLrcW5uX58ZgwOp\nXsWwEi87uVnwpk2qo8f3QE2TvXQNQHJmLpOWbWNDTDwTb2jF07e6wTbB0hFyKttPJDFuUTi5lvwN\nSHq1NNnska0+pkRB7Zb62OIBXLrb3l2dG/LBg12oUsnAOXZt9fHMEahau+hzTYYhvYFzqdk8vCyS\n8KPnCbmuJc/cdjWV3HWhUVHIC6vUJKRlM3HpNrYeTeThCzmZTe+4FIVow2EcjU9n7KJwjidm8N79\nnXkgyOTbBBc1Re3c7cQ9jp/3nGbKyijqVK/CypAetK5XXW+T7Cc5FqZ3KFwmbYdTuTTH7pi+LXjp\nLgPvtucBIS6Gawl3nEgidEkkSZk55kjV4UhWDoN9NqkDX0kCM8WF6cDeUymMWxRBfFo2MwcHck9g\nY71NcjyZSfBfm0V3btTzdjVbjyQSuiQCBSwd25OeZhuls0U6Qk5nwYYjTPs+ms5NfJk3Ioi6NYyf\nrutfbPXRtCeM/VkfWzyES3PsvnRXe8Zda+BR9B+ega2zC5e5YfthKId3VUR+vG7d6lX4cmIfOjRy\ngxXS9lBUz8qAe9kbkZ/2nObxz7dTw7sSq0J706Wpb8kfMhvizDiU1ZGxPL9mJ01rV2XByGD8zb5N\nsK0+HtkGfq30scUNuXRK+rYO9ZkxqCs+lQ08JW2LrT5kEMXpnEzKZNSCrRxLyDD+wJ2tPh6Phppu\nOGiEQRzeXIuVN9ZGs2jzMfq08uOjod2o7SnxuuLMlAmlFB//HsP7Px+gS1Nf5jzU3T1W1V9K2ll4\nv03hMhOnhNEbq1Xx/s/7+eSPQ/Rt7ccnQ7tTs6qBF46UxNonIGJ+4TJpOxxKRk4ej67czi/RZxh7\nTQteuLO9edJhbp0LPzxVuEz04XT+zbGbYyFsTLCxc+za+h9urg/dHd74tGweXraNrUcSGX9tC569\nvZ1nxOue3g2f9S1cNn49NO5e9PnCv2TmWHh69Q7W7jzFvYGNeOe+znh7mWjExR6kI+RQMnMsPLFq\nOz/uPs2QHs14/Z4O5t6AxFYf982HTvfrY4ubcjY1i/GLIth1MpnX7u7AyD7+eptkP7b6eHAJBNyt\njy0exMYLOXarV6nEFxMMnGP3r/dg/RuFyzzg/aKrw7vjRBITlkaSmJ7DjEGB3NvVPYfRL0OcmTJz\nKjmT8Ysj2BOXwrO3t2PC9W6y5etFEo/ArMDCZSbaq9yInE3JYtzifMflpbvaM/YaE+xXXxwHfobl\nNrujSdvhcA6eSWXUwnAS03OY81AQNwfUL/lDRsCSC9NsRhRFHy7BNDl2bf2PyRFQp03R57oZujm8\nqyNjeeGrXf/G67rFjkYl8e0U2LaocNnU81DBxCNNLmTb8fOELokkM8fCXDO9hOxFOkIOZ09cMuMW\nRZCcmWt+zdjqI+AeeHCxPra4MZsOxTNhSSSVK1Xk89BedG5iknUB77aEjITCZdJ+OB2lFHP+Oszb\nP5ogx66HhTDY4nKHN9di5c3v9xK26Si9W/rx8TAPide1FVrj7vkhDIJdfBkZy/NrdtGgpjfLxvWk\nbf0aepvkOE7vgs+uKVwmHaFy80v0GR5dGUVNn/xtgk27CLaoWG4Pe1G5ijXbYnn2y520qFONBaOC\naVLLJJvW2L5fnj0GPiZx1E2MaXLsbl8OX08sXOaBbUiJDq+maU2BxUB9QAFzlFIzy3Kx+LRsJi/f\nxj+HExnTtwUv3OkB8boyalcuLFbFu+v2Mfuvw/Ru6ccnw7q51wYkog+Ho5Ri3t9HeOvHvXRqXJN5\nI4KoZ9YFjaIPl6CUYtZvMUz/9QB9Wvnx6fDuxh2lu5RTO2H2tYXLRB8uISvXwuOf568LGHdhQaMh\nc+zatiGhf0HDLvrYojP2jPDmAU8qpbZpmlYDiNQ07RelVHRpLrQrNpnQJREkpOeYcyvG0pJ0AmZ0\nLFw24ltoeb0+9piQlKxcHl0Rxe/7z/FQr+ZM/U+AuRcaXcrRjRB2Z+EyeVGVm5w8K1O/2c3K8BPc\n0bEBHzwYaK4UUhcpKlXh1ESoYMJ7MTg5eVaeX7OLL7fFcl+3Jrw9sBOVK5mgnbF1ZILGQv8P9LHF\nw0jKyGH84gjCj543do5dDw9hsKVEh1cpdQo4deH3VE3T9gKNAbsd3jXb8qej63hKvK4Hjso4ciYA\nCnbBOpaQwbR7O/JQr+Ylf8gseKA+XEFyRi4Tl0Wy6VACk/q14slbTLrb3tvNINtGD6IPp5CcmcuE\nJZFsPpzA4ze3ZcpNrc2xoFEcGd2IPZ/ByAVbOZGYyUdDu9K/swFz7O7/EVYMLlwmGildDK+maf5A\nV2BLEX8LAUIAmjVr9m95alYub/2wj67NfPl4aDf8qptod5rS8uursGF64TLP2drTITMBkJ/a5eFl\n29A0WDy2h7HzGJYGWWHvNI7GpzMmLJzY85l88GAXBnYz6QySByWB15sTiRmMCQvnaEK6eTTzyyuw\ncUbhMmlDXEZ0XH6O3cxcC4vH9qCXEXdotG1DRn0P/tcUfa6HYbcnpmladeBL4DGlVIrt35VSc4A5\nAEFBQepieQ1vL1aF9qJp7aruMx1dFLYiq9kUHt+tjy064IiZAKUUizcf4/W10bSqW415I4Jp5meS\nRSMlYauPGo3gyb362OJm/HM4gQlLI9GApeN60qOFCbdbXvc8/PNJ4TJxZJzGztgkxoRFkJNnYdEY\nk3SqbduQkD+hUWDR5woOZ8PBeCYsjaSGdyVWT+jD1Q0MuHBaRv6viF0Or6ZpXuQ7u8uUUmtKe5GW\ndauX9iPmQaanL+NKMwFXYtGmo7z6XTQ3t6/H9EGB1PA2waKRktjxOXwVUrhM9OGw8JcvIk7wwle7\naFa7KgtGBdPcz4S70Nm2IYNXQLs7iz5XKDe/RJ9hyooo/KpXZmVIT1rXM6DjcinZafC2zSi/h7ch\nruarqFie/mInrepWJ2xMMA1rGizHbtx2mGOzPkg0chn2ZGnQgPnAXqWURMRfJD0e3rPZr37Yl9Dm\nZn3sMQglzQQUF/oCcG/XxmTnWRl/bUtzxl7aYuvItLkVhn2hjy3GotzhL1ar4t2f9vPZn4e4pnUd\nPh7WzRyr6i8l5jdYOrBwmbyknErYxiO8tjaazo1rMm9kMHVrGDzE7oMOkBJbcFyzGTy+Sz97PAyl\nFJ/9eZj/rttHr5a1mf2QAXPs2r5nZHF8sdgzwtsXeAjYpWna9gtlLyilfnCeWQZHRnWLxJ6ZgOJC\nXwB8q1Ym9PpWRX3MXGz+GH56oXCZ6ONfyhv+kp1nYcqKKH7ac4ZhPZvx6t0m3CbYtg3pNhLunqWP\nLQbGUbMBFqvize/3smDjEW4JqM/MwYFUrWzwtRW2GvGc9SCGwGJVvP7dHhZtPsZ/ujTi/Qc6Gy/H\nroQwlAp7sjRsANxguM0BbJkDPz5duOyls1DJ4KMELkBmAi5wmSMzAu7+UB9bTEBZFsJWrliBGt5e\nTO0fwOi+/uZYVX+RjER4t0XhMnlJXYlyzwZk5lh4dGUUP0efYXRff166K4CKRp5BOvgLLLu/cJlo\nxKVk5Vp4bOV21u05zfhrW/D8HQbLsRt/ED4KKlwmGikR6S7ai60jU70BPLVfH1uMiWfPBMiobqkp\n60JYTdN47/7O5nJ04fI2pE5bmByujy0mobyzAedSsxm3KJydJ5N55T8BjO7bouQP6YmtRu5fCB0H\nFn2u8C+OXBeQlJHDuEURRB4/z8v9Axh7jcE0Y6uRgXOh84P62GIyxOEtiRmdIelY4TJxZC7Do2cC\nbBug2/8LvSboY4tJKO9CWFM5u1YrvF6rcJlsHV1qipsNuNK6gKjj5zl4No3Zw7tza4cGrjG0LBS1\n0Yi8Z0qDQ9JinkjMYNTCCzl2h3Tjrs4NnWNtWZEQhnIhDm9xZKXAO00Ll8miNOFSfnvFCsWmAAAg\nAElEQVQd/v5f4TJpgErEo8JfZl8Pp7YXLhONlJorzQZcaV3ArR0a8Ncz/ahj5PzvPzwNW+cULhON\nlApHpMXcE5fMqIXhZBkxx25yLEzvULhMNFJqxOEtClmUJpTEZaO670CvifrYYj48I/zFViNPxUD1\nuvrYYmLKOxtgaGfXViNPHoAa9fWxxU0oy7oAq1Xx2MrteFXQWDaxD23rGyhVna1G7vkYug7XxxaT\nIw7vpRS1Hd/zJ6GKG+cRFkrHt1Ng26LCZdIZKhVuH/7y1/uwflrhMtFImXDb2QAZsXMKZV0XUKGC\nxqfDu1G9ihcNanq7zN4SkRAGhyIO70VshVUvAB7erI8tgjGx1cjQVdD2Nn1sEYyJrUZGroUW1+pj\ni3vgfrMBs7pC4uGC4x6hcOe7+tnjJpR3JsBQG5BkJsF/mxcuE2e33IjDO/dGOBlZuEyEJVzKRz0g\n3iYjh2hEuJRTO2G2jWMrGik3bjcbYNsheiUJzLQA06C41UzAF6Ngz1cFxwNmQ5fBxZ4u2I/nOry5\nmfCmzardBxZBh3v1sUcwJrYvqNHroHlvfWwRjImtRrqPhv/M0McWwZgcWg9LBhQukw6RI3GPmQDp\nEDkVz3R4ZVGaUBJrQmDn54XLRCPCpci0o2APtu+b0L+hYWd9bHFTTD8TkJMObzUqXCZticPxLIf3\nZGR+CMOlPHsUfGoVebrggRSVD3NyJNRprY89gjF5vQ5YcwuOA+6FBxcVf77geVjyYJpNaitxYgRb\nNsyAX18pOL7jPegZop89boznOLy2veyW/WDE1/rYIhiTJQPh0G+Fy+QFJVyKbCIh2MPmT+Cn5wuO\n29wGw1bpZ49gTGz9EmlLnIr7O7w/PgtbPitcJk6McClFjeo+Hg01G+tjj2BM/nwXfn/zkgINXk3S\nzRzBoNg6MS+cgspV9bFFMCaWXJhWp3CZ+CVOx30dXqsFXq9duGzwCmh3pz72CMbkm8kQtaRwmTQ8\ngi22ToyEQgm2pJyCD9oVLpO2RLBl22L49pGC43s/g8Ah+tnjQbinw/tRMMQfKFwmDY9wKUV1iJ47\nDt5FLGgUPJcDP8PyBwqXSVsi2LLgdjh+Sd72h76GVv30s0cwJrYd55cToKJ7umFGxL2e9PmjMLNL\n4TLZKU2wZeUw2Le24Lh2K5iyTT97BGNi+3KSxYtCUchuWEJJFDXAIjpxOe7j8F6WC3MU/GemLqYI\nBiUvB96oW7hM4usEWxIOwYfdCpfJy0mwJeY3WDqw4Nj/Whi1tvjzBc9k9xpYPbrgWLIw6Ib5HV7b\neBiQl5NwOQvugOObCo6b9IBxv+hnj2BMZPtowR5sdfLUQaheTx9bBONiq5OXzkKlKvrYIpjY4S1q\nZb3ETQm2FLWjnjQ6gi3ZqfB2k8Jl0nEWbMnNgjfrFy4TnQi2FOWfiE50x5wO77xbIHZr4TIRk2DL\nxz3h3L6C47Z3wNCV+tkjGJOw/nD074Ljfi/C9c/oZ49gTH5+GTbNKji+413oGaqfPYIxsV3oeuNL\ncN3T+tkj/EuJDq+maQuA/sBZpVRH55t0BVLi4IP2hcskPZBgS1YKvNO0cNnURKhQUR97BGNS1CiM\n7F0vFMVlGwRIeyIUwWU5mOOgcjV9bBEuw54R3jDgI2Cxc00pAVshdbwf7p+vjy2CcXm3JWQkFBx3\nGQoDPtXPHsGYbJwFv7xccNziehj5rX72CMYk9TT87+rCZTKbKNgiIQymoESHVyn1l6Zp/s43pRiO\n/wMLbBaNiJAEW4qKwZTROqEobDvPkrpQKIrvHoXIsILjkD+hUaBu5ggG5ew++KRnwXGfKXDrNP3s\nEYrFYTG8mqaFACEAzZo1K3+FRfWYhqyEq+8of92CU9At/OXz4bD3u4Lj3pPhtjeLP1/wTOKiYM4N\nBcfN+sCYH3UzRzAoMlon2MuX42DXFwXHEsJgaBzm8Cql5gBzAIKCglS5Kvv7f/Db6wXHDTrBhA3l\nqlJwCWG4MvwlJx3ealS4TEZ1haL4sDskxBQcP7kfajQo/nzBM7HdvOjO96HHeN3MEQyMbDhiOoyV\npSE3C2YFQuqpgrLnToD3VfrZJNiNS8Nftq+ArycUHD+wCDrc65JLCybCNtTFpzY8e0Q/ewTjsulD\n+PmlguMXT4OXj372CMYkIxHebVFw3H86BI3Rzx7Bbozj8EYthW8mFRzf/RF0e0g/ewSnUO7QF0su\nTKtTcNysN4xZ5yDrBFfgstCXPV/DFyMLjp/YB1c1dNrlBJNitebn6rZk5x/f/g70mqivTYIxifkV\nlt5XcPziGfDy1s8eoVRUKOkETdNWAJuBqzVNi9U0baxTLLno7HZ6IH9aWpxdt0QpNUcpFaSUCqpb\nt27JH7Dl2CW7pU2OFGfXnIQBtzv9Kj9fyMLQfVT+dKM4u0JRnN1T4Ow+vkecXaF4/no//98eoflt\niji7psKeLA1DXGEITx2EipXBx7fkcwXPxf9aeGQb+LXS2xKhjLgs9CXk9/xcqZKnW7gS9Tvmtym1\nW0r8v3BlBi/P/7dqbX3tEMqEcUIaZB9ywR4qVBBnV7CPanVKPkcwNC4Jf9E0aVME+xBH19SUGNIg\nCPbisvAXwSPQNC1E07QITdMizp07p7c5gj6E4YrwF0EQ3B7jjPAKpsdl4S+CR+DQVIeCKdF94yNB\nENwGGeEVBEEQBEEQ3BpNKccPnGiadg44ZlNcB4h3+MX0w93uB4q+p+ZKqTKkUygZD9EJuN89FXc/\ndmnlQujLDRfqOQO8opSaX8JnPEEr7nY/4IA25cII79riYngvTXUIXA3st8MGs+Nu91SuNqUsSJti\nWsrcpjjF4S3yQpoWoZQKcsnFXIC73Q8Y456MYIOjcbd7Msr9GMUOR+Fu9wOOuaeSHF5X2GA03O2e\njHI/RrHDUbjb/UD57klCGgRBEARBEAS3RhxeQRAEwZBI5hdBEByFK7M0zHHhtVyBu90PGOOejGCD\no3G3ezLK/RjFDkfhbvcD5bwnB2V+kedqfIxyP0axw1G42/1AOe7JZTG8giAIgiAIgqAHEtIgCIIg\nCIIguDXi8AqCIAiCIAhujUMdXk3Tbtc0bb+maTGapj1XxN81TdNmXfj7Tk3Tujny+s7Ajnu6QdO0\nZE3Ttl/4maqHnfaiadoCTdPOapq2u5i/u+Q7cjetiE6c9/2IVkQrdtrhVjoB0YpoxT5EJ3Z+P0op\nh/wAFYFDQEugMrADCLA5507gR0ADegFbHHV9Z/zYeU83kJ8jUnd77byn64BuwO5i/u7078jdtCI6\ncd73I1ox/o8RtOJuOhGtiFZEJ47XiSNHeHsAMUqpw0qpHGAlcI/NOfcAi1U+/wC+mqY1dKANjsae\nezIVSqm/gMQrnOKK78jdtCI6cd73I1oxOAbRirvpBEQrohX7EJ3Y+f040uFtDJy45Dj2QllpzzES\n9trb58Kw+o+apnVwjWlOwxXfkbtpRXTivO9HtCJacdQ1zKQTEK2AaMUeRCd2fj+uzMPrrmwDmiml\n0jRNuxP4Gmijs02C8RCdCPYiWhHsRbQi2IPoBMeO8J4Eml5y3ORCWWnPMRIl2quUSlFKpV34/QfA\nS9O0Oq4z0eG44jtyN62ITpz3/YhWRCuOuoaZdAKiFRCt2IPoxM7vx5EObzjQRtO0FpqmVQYGA9/a\nnPMtMOLCCrteQLJS6pQDbXA0Jd6TpmkNNE3TLvzeg/xnmuBySx2HK74jd9OK6MR5349oRbRiD+6m\nExCtiFbsQ3Ri5/fjsJAGpVSepmmTgZ/IXzW4QCm1R9O0CRf+/hnwA/mr62KADGC0o67vDOy8p/uB\niZqm5QGZwGB1YRmhEdHy96a/AaijaVos8ArgBa77jtxNK6IT530/ohXRij24m05AtIJoxS5EJ/Z/\nP7K1sCAIgiAIguDWyE5rgiAIgiAIglsjDq8gCIIgCILg1ojDKwiCIAiCILg14vAKgiAIgiAIbo04\nvIIgCIIgCIJbIw6vIAiCIAiC4NaIwysIgiAIgiC4NeLwCoIgCIIgCG6NOLyCIAiCIAiCW+OwrYUv\npU6dOsrf398ZVQsuJjIyMl4pVdcZdYtO3AvRimAPohPBXkQrgj3YqxOnOLz+/v5EREQ4o2rBxWia\ndsxZdYtO3AvRimAPohPBXkQrgj3YqxO7HF5N044CqYAFyFNKBZXdNMGdEa0I9iA6EexFtCLYg+hE\nKInSjPD2U0rFO80SwZ0QrQj2IDoR7EW0ItiD6EQoFlm0JgAQl5SJxar0NkMwOEopTiRm6G2GYALO\npGSRnWfR2wzBBEibIthDfFo2GTl5Zf68vSO8CvhV0zQLMFspNae0F8rNzSU2NpasrKzSftTj8Pb2\npkmTJnh5ebnkejtjkxi9MJz7ujfhhTvbl7e6cmlFdFI6XKkVq1Ux7ftovoiI5fsp19Dcr1p5qpM2\nxYW4uk05Ep/O8Hlb6NPKj/ce6FLe6qRNcSGu1spH6w8ya30Mayb2oWPjmuWpStoUF+JqnZxNyWLI\n3H9oUaca80YGl6kOex3ea5RSJzVNqwf8omnaPqXUX5eeoGlaCBAC0KxZs8sqiI2NpUaNGvj7+6Np\nWpmM9QSUUiQkJBAbG0uLFi2cfr0NB+MJXRKBb9XKDA5u6ogqr6gV0YnjcKVW8ixWnvlyJ2u2nWRM\n3xY0rVW1vFVKm+IiXN2m7D+dyrB5W7Aqxcg+/o6oUtoUF+FKrSil+OCXA3y4PoYBXRvTrkGN8lYp\nbYqLcHWbciYliyFz/uF0ShZvDehU5nrsCmlQSp288O9Z4CugRxHnzFFKBSmlgurWvTw7RFZWFn5+\nfiKiEtA0DT8/P5f0ML/bEcfosK00rV2VNQ/3oWXd6uWusyStiE4ch6u0kpVrYcLSbazZdpInbmnL\ny/3bU6FC+b4faVNchyvblJ2xSQyas5mKFWBVaK/yjtgB0qa4EldpRSnF2z/u48P1MQwKasr7D3Sh\nUsXyRVhKm+I6XNmmnE7OYvCcfziTksWiMT3o2dKvzHWVqDBN06ppmlbj4u/ArcDuslxMRGQfrnhO\nYRuPMGVlFF2b1uLz0N7Uv8q73HU6SiuiE/tx9rNKzcpl1MKt/Lr3DK/f04EpN7Up9zWlTXE9rnhO\nWw4nMHTuFqpXqcQXoX1oXa/cI3bSpuiAs5+VUorXvotmzl+HeahXc94e2ImK5exAS5vielzxnOKS\nMhk0ZzPnUrNZPLYHwf61y1WfPSEN9YGvLtxcJWC5Umpdua4q6Mal00i3BNTnwyFd8faq6KjqRStu\nRGJ6DqMWbmVPXAozBgVyb9fGjqpadOJm/HngHKFLImjs68PScT1pWNPHUVWLVtwIq1Xx4te7WbH1\nOOOuacGLd7V3lOMkOnEzYs9nMGTuPySl57J4bA+6NatV7jpLHOFVSh1WSnW58NNBKfVmua9qIv7+\n+286dOhAYGAgmZmZnDp1iv79+1/xM2vXrmXq1KkustB+LFbFC1/t+nca6dNh3Rzp7Hq0VtxJJwCn\nkjN5cPZm9p9OZc5D3R3p7Hq0TsD9tLJu92nGLQqnZZ3qfB7a25HOrkdrxd10YrEqnl69kxVbj/Pw\nDa0c6ex6tE7A/bRyIjGDwXP+ITkjlyXjejrE2QVJS1Yiy5Yt4/nnn2f79u34+PjwwQcfMH78+Ct+\n5q677uK7774jI8M4qVayci1MXBrJiq0nmNSvFe/c16ncMVNCAe6iE/7f3nmHR1V0f/xzgYTeQkd6\nE5BOSAI2EPW1F0DpLQ0VFewFxcrPV18rViAJoSWA2FGwISpqGjUQeq/pvWd3fn8ssEsksEn2tt35\nPE8e9uze3Dk3+2XumTsz52DbYT/m0384k13E0kA/RvZqpbdLboU7aeWrrSeYGbWFPlc0JjokgOYN\nauvtktvgTjoptViZvWobX2w5weM39eDpW3rKpQMuxJ20cizdFuzmFpWxIjiAAe2buOzcqpQWvhyv\nfLeLpFM5Lj1n77aNeOnOqyr8fO7cufj4+DB79mwA5syZQ8uWLZk1a1aFvxMWFsbq1av58ccfWbdu\nHStWrOCLL77g9ddfB+C9994jMTGRiIgIEhMTGT9+PHFxcdSrV4/hw4ezdu1a7r//fpdeZ1XILiwl\nZGkC8UcyeOnO3ky/Wv1dla5A6kR7dp7MZtriOKwCokMC6Nuu+puOtEBqRXtWxB7lha93EtC5GYum\n+tKgti63k0ohdaI9JWVWHonewo+7knn21p48cH1XvV1yCqkV7Tmans/4hTHkl1hYEezvkk2vjhi/\nh3IRgYGBjBo1itmzZ2O1Wlm5ciUbNmxgwIABFz0+KiqK4OBgNm3axB133MGYMWM4fPgwTZs2pXZt\n21OMWbNmMXz4cL766ivmzZvHggULqFfPlq7J19eXP//8U3chpeQUMSUijoOpeXwwbiB39W+rqz9G\nx1N1AhB3OIOgyHga1qnFsmB/uroga4c748laWfTHIeb9sJuRPVvysYuXRrkbnqyTolILD63YwoY9\nKcy9ozeB15jjYYteeLJWjqTlM35RDEWlFqJC/LmqresftugS8F5qhKMWnTp1olmzZmzdupXk5GQG\nDhxIx44d2bZtm9PnOH36NI6pTGrUqEFkZCT9+vVjxowZXH311ec/a9myJadOnXLpNVSWw2n5TA6P\nJSO/hIhpQ7i2+7/TsBgZqRPt+G1PCg+u2EzbJnVZHuRP2yauW4epBVIr2iCE4L1f9jP/1/3c3rcN\n740dgHct8yyNkjrRjsISC6HLEvhzfxrz7u3DRP+OertUKaRWtONQah7jF8VQahFEhQTQq00jVdrx\nmCe8AMHBwURGRnLmzBkCAwPJzc3l2muvveixUVFR9O7d+4L36tat+6+8c/v376dBgwb/Ek1RURF1\n6+oXNJyrniawTU33d+E6GHfHk3QC8M22kzyxejs92zRkyXQ/msl1mE7jSVoRQjDv+92EbTrMfYPb\n8d/R/aqdTspT8CSdAOQVlxEUGU/8kQz+N6Yf9/m6pKiRR+BpWjmQYgt2rVZBdEgAV1a/AEmFeFTA\ne++99zJ37lxKS0uJioqiZs2alRo59ejRgyNHjpy3s7OzefTRR/njjz94+OGHWbNmDWPGjAFg3759\n9OnTx9WX4BSO1dOWBfm5pKCEJ+EpOgFYFnOUud/sxK+TD4um+tKojjZlIt0FT9GKxSp44Ww6qWnD\nOjH3jt7VLj7iSXiKTgByikqZFhHH9hPZvDd2AHcPcF2GF0/Ak7SyPzmX8YtiAYgODaBHK/WCXfCw\nLA3e3t6MGDGC+++/n5o1K7/mrH79+nTt2pUDBw4A8NhjjzFz5kx69OhBeHg4zz77LCkpKQD89ttv\n3H777S713xnUqJ7maXiCToQQfLRhPy9+vZORPVuyJNBPBrtVwBO0Umqx8vjqbUTHHWPmiK68dKcM\ndiuLJ+gEIKughElhsSSezObjCQNlsFsFPEUr+5JzGb8oBkWBlRoEu4Dtxufqn8GDB4vyJCUl/es9\nrbFYLKJ///5i3759VT7Hl19+KebMmXPJY86cOSNuuOGGKrchRNX+XpF/HRadnl0r7vv0b5FVUFKt\n9s8BJAgVNCKkTlyiEyEq/zezWq3ite92iY7PrBWzV24VJWWWavsghNRKVTFyn1JUWiZClsSLjs+s\nFR//tr9a7Z9D6qRqGLlPEUKItNwiccv7f4juz/8gfkk6U20fhJBaqSpG7lOEECLpVLYY+OpPYsjr\nP4sDKbnV8kEI53XiMU94k5KS6NatGyNHjqR79+5VPs+9995Lp06dLnnMsWPHeOedd6rcRmURQvDO\nT3t56dtd3NirFUuD/GhcVz6tqwrurBOAMouVp9fsIGzTYaYN68Q79/XHS+ZjrhLurpWCkjKClyTw\nU1Iyr9x1FQ8N76Zp++6Cu+sEbNmAxi2M4XBaHmFTfWXu7iriCVpJOpXDhEUxeNeswaoZQzXNBuQx\na3h79+7NoUOHXHKu4ODgS34+ZMgQl7TjDGUWKy9+s5PouOOM9W3PvHv7yIIS1cBddQK2FEGzVm7l\nx13JzBrZndk3dpfJ36uBO2slp6iUwMXxbDmWKTcdVRN31gnYqjJOXBTLmZwiFk/zY2jXZpr74C64\nu1Z2nsxmUngsdb1qEh0SQKfm9TVt32MCXnfEMYB5eEQ3nri5hwxgJBclr7iMGcsS+OtAusyHKbkk\nGfklTI2IY/fpHD4cP4jb+7XR2yWJQTmeUcCEsBiy8ktZGuiHbycfvV2SGJTEE7Zgt0HtWkSHBNCh\nWT3NfZABr0k5Vz0t7rC5qqdJtCczv4RpkfHsPJnNO/f1Z/Tgdnq7JDEoKTlFTAqP5Uh6AQunDOaG\nnnJqWnJxjqTlM2FRDHnFZSwP9pepLyUVsv14FpPCY2lUx4uVoQG099E+2AUZ8JqSC6unybQvkoo5\nk13E5PBYjmYU8NmkwdzUWwYwkotzIrOAiWGxpOYWEzl9CMO6NtfbJYlBOZCSx4RFMZRarESHBqhS\nFUviHmw9lsmU8Dia1PciOiSAdk31CXZBBrymw+zV0yTacSQtn0nhsWTml8gARnJJDqXmMTEslvzi\nMlYE+zOwQ1O9XZIYlD1ncpgUFgsorAwdqmqhAIm52Xw0k6kRcTRr4E10SIDuFTzl7qbL0KlTJ9LS\n0gxxvsQT2Yz59G8KSiysDA2Qwa6BMJJOAHafzmHMZ/+QX1xGVEiADHYNhBG1cv+Cfygps7IydKgM\ndg2C0XQCtk1H4xbGULOGwqoZ6lbFkjiPEbWScCSDKeGxtGhYm5Wh+ge7IANe1YiMjOTll1922fk2\n7U9j3MJ/qONVkzUPDKVfO7leyh1wtU4ANh/NYOyCf6hVQ+HzB4bKtXVughpa2XY8i3ELY6hVw5Yi\nqHdbdWrYS7RDDZ2AbWp6/KIY6nvXYrXG6aQk6qCWVuIOZzA1Io5WjeqwMjSANo31D3ZBryUN656F\nM4muPWfrvnDrfyv8eO7cufj4+DB79mwA5syZQ8uWLZk1a5ZTpy8sLGTUqFGMGjWKAQMGEBQURFxc\nHBaLBT8/P1atWuV0ib633nqLdevWUbduXaKioujW7dL5Lb/bforHV2+ja4sGLAn0o1WjOk61Y3qk\nTiqlE4Df96UyY1kCbRrXZVmQn67rpTRFaqXSWok5lE5QZDzNGtRmRbC/bhtJNEXqpNI6AVsAM31x\nHM0b1iYqJIArDPC0TnWkVqqklX8OphMYGU/bJnWIDgmgpYHiFY9ZwxsYGMioUaOYPXs2VquVlStX\nsmHDBgYMGHDR46OioujduzcAeXl5jBs3jilTpjBlyhQA7rrrLl544QUKCwuZNGlSpepRN27cmMTE\nRJYuXcrs2bNZu3Zthccu+fsIL3+3iyEdfVg01VcWlFAZs+oEYO2OUzy2ahvdWzZkaZAfzRvUdrot\nSeUxs1Z+25vCA8s208GnHsuD/T1nEK0DZtYJwN8H0ghakkCbJnWICg6gdWOpFbVwB60ELomnXdN6\nRIX407KhwbTiTDm2yv4YtWTfjTfeKLZs2SLWrVsnRo8e7dTvdOzYUfTr108sX778gveLi4tFv379\nhJ+fnygrKxNCCJGWlib69+8v+vfvL9q3by9atWp13t6xY8f58x08eFAIIURJSYnw8fG5aLtJSUni\n7R/3iI7PrBUhS+JFYUlZVS+7WuCBpR3NpBMhbH+zFTFHRadn14oxn/4lsgtdU1a6skitGFsr5/5e\nP+w4Jbo9/7247YM/RFpuUZWuuzpInRhbJ0LY/2a/7UkWPeb8IG5+93eRkiO1ogVm0orj3+vPfani\nyhd+EDe9u1FzrTirE495wgu2yiORkZGcOXOGwMBAcnNzufbaay96rOPI6eqrr2b9+vVMmDDhfGGH\n9PR08vLyKC0tpaioiPr169OsWTO2bdsG2NbGHDly5KLrYxyLQ1ysUIQQgsyCEj7ccEJWT9MBs+jk\nHLlFpTz/VSLDr2zBpxMHU9e7ZlUvXVJJzKaVLzaf4Kk12xnQvgmLp8sS5FphNp0A/JyUzMwVW+je\nqgHLgvzxqe9dlUuXVBIzauXP/akEL0mgc/P6rAj2p5lBZxc9Koq69957Wb9+PfHx8fznP/+hYcOG\nbNu27aI/50QE8Oqrr9K0aVNmzpx5/r0ZM2bw2muvMXHiRJ555plK+bFq1arz/w4dOvSCz6xWwbGM\nAvKLLTw8ohv/Hd1XBrsaYwadgG1gdDq7kOzCMu7s35aFk31lsKsxZtEK2KrtPfH5doZ2bcayIH8Z\n7GqImXQCUFhi4cHlm+nVthFRwQEy2NUQs2ll494UgpYk0KVFA6JCAgwb7IIHreEF8Pb2ZsSIETRp\n0oSaNSsXGHzwwQcEBgby9NNP06dPH7y8vJgwYQIWi4Vhw4axYcMGbrjhBqfOlZmZSb9+/ahduzbR\n0dHn3y+zWjmaXkB+cRlN6nnx5LArK+WjxDUYXSdgC3ZPZhWSkV9Cg9o1+WBsP2rUkGWltcYMWgFI\nyS0iq6CUG3u15KMJg6jjJQdGWmIWnQBkFpSQkV9ydhZgCA3ryIGRlphJK0WlFkJXbaZ7qwYsD/Kn\nqdEHRs6se6jsj1HXxlgsFtG/f3+xb98+vV35FyVlFrH3TI7YcSJLZOYXG+LvJYRnrqEysk6EEMJi\ntYojaXli+/FMcTqr0BB/MyGkVoyI1WoVp7MKxfbjmWJT/DZRUmbR2yWpEwOTnlcsth/PFL/HbhV5\nRaV6uyO1YmCyC0rEz39vFnfM/1Nk5hfr6ouzOnF6rlxRlJqKomxVFOXyW/UMSFJSEt26dWPkyJF0\n795db3cuoLjUwsGUPErKrHRqVo8m9Qw+SnJjjKwTAItVcCQtn+zCUto0rit3TOuI0bUihOB0dhEp\nuUX41POmaT1vvOTyKM0xuk7OkZ5XzInMAhrW8aJ5A2/q1/aoCWBDYBatZBeWcjSjAK+aCsuD/U0T\ns1RG0bOA3YApM5P37t2bQ4cO6e3GvygoKeNIWgEAXVrUp563uTsZRVFqAgnASWaH/vgAACAASURB\nVCHEHXr7U1mMqhOAMouVI+kFFJaU0a5pPbmuTmeMrBUhBCczC8koKKF5g9q0aVyH3GS9vfJMjKyT\nc6TmFnM6u5BGdbzo0Kwee1Pl8ig9MINWsgtLOJZeSF3vmjRvUNtUewGcGu4ritIOuB0Iq05jtifP\nknPkFpVyKDWfGsqFwa7J/07nBkZVxuTXrwqlFiuH0vIpLLXQoVn988Gumf9Wrpg1MvP1q4VVCI5n\n2ILdlg3r0MbkswBSJ+qSklPE6exCGte1BbueHupKrVRMVoE92O3UrC41LpO9wWg4O7/1PvA0YK1q\nQ3Xq1CE9PV2K6SxZBSUcSS/Au1YNurZscH4TiRCC9PR06tQx303KFQMjqZN/U1Jm4WCqbclL52b1\nzo+ozayVs1RrcCS18m+sVsGx9AKyCkto3bjO+SUvUidSJ+URQpCcU8SZnCKa1POmg48t2DWzVqo7\nOJJaqZisghKOZxRSz7smnZrVIysz03Q6uez8uaIodwApQojNiqIMv8RxoUAoQIcOHf71ebt27Thx\n4gSpqalV99ZNyCsuI7ugFO9aNWjWwJsDGReOkurUqUO7du108q5anBsYNazoAKmTylFqsZKWVwJC\n0KxBbY7nXDhGNatWHAZH84DHq3IOqZULsQpBRl4JRWVWmtTzIj23FulnP5M6kTopT3ZhKblFZdSv\nXZNadb3Zc3bJi1m1cpZqLb2UWrk4BSVlZObbYpYaDbzZn66or5OyYogeD0NnQreRLjmlMwtGrwbu\nUhTlNqAO0EhRlOVCiEmOBwkhFgILAXx9ff81PPLy8qJz584ucNm8CCF49+d9fLjhGDf1bsWH4/u7\nTXogZwdGUifOs/VYJtMWx1PHqwbLgvzp0arCcYQZqfbgSGrFTnZhKYGR8Ww9lsn/xvTnpgGmDVjK\nc1mdXA6pkwsRQvDq2iQW/3Wcif4deO3uPm6R0tAVgyOplX/z1dYTPLF6O0M6+RAxbYg2mxkLMuCt\ns99Dm34uC3gvu6RBCPGcEKKdEKITMA7YUD7YlVyeMouV579K5MMNBxjr255PJ7pdLsxzA6MjwErg\nBkVRluvrknn5c38qE8NiaVLPizUPDHOrYNdxcHSp44QQC4UQvkII3xYtWmjknflIzytmwqIYdpzI\n4uMJgxg92D2CXWd1oihKqKIoCYqiJMgnc5fGahW88PVOFv91hMCrO/P6PToFu2XFapz1sksvpVYq\nx5rNJ3h89Xb8Ozdj8XSNgt2MQ/Zgt0VPuPFll51a5qjRgKJSCzOjthAdd5yZI7q6ZfU0OTByHesS\nTxMYGU8Hn3p8/sBQ2vvU09slVyMHRy7iTHYRYxfGcCAlj0VTfLm1bxu9XXIlTulEDoycw2IVPPPF\nDlbEHuPB4V158Y5ely0Z63JK8uHlxvB6S7CUuuy0chDtelYnHOepNdu5umtzIqYN0SaD1LFYmD/Q\n9rrPaJgZ69LTV+oKhBAbgY0u9cDNySkqJXhJAnGHM3jpzt5Mv1pOl0gqZlX8MZ77MpGBHZoSMXUI\njeuZJ+WLswghngOeAzi7/OVJOTiqPMczCpgYFkt6XjFLAv0I6NJMb5dcitSJ6yizWHl89Xa+3X6K\nWSO7M/vG7toHu1nH4f0+ttd1m0JNl/ZtTi29lDjHyrhjPPtlItd2b86iKb7azEavngpJX9teD38O\nhj/r8ibMnfTV4KTkFDElIo6DqXnMHz+Qu/q31dslTZADo6qx8I+D/N8Pe7i+Rws+nTTI9DmZJepx\nICWPSWGxFJZaWBESwID2TfR1SAiID4Met0CT9vr6IrmAkjIrj0ZvZf2uMzx9y5U8NLyb9k4ci4WI\nm22vu4yAKV+79PRycOQ6VsQeZc5XOxl+ZQs+mzRYm2D35cb217e+Bf4zVGlG3lFV4nBaPpPDY8nI\nLyFi2hCu7S6nTyQXRwjB2z/t5ePfDnJ7vza8d/8AvGu515KXipCDo8qTdCqHyeGxKAqsDA2gVxud\nawEV58EbV9heF2bC9U+7vAmpk6pRVGph5oot/LonhRfv6E3QNTrMMP7+Fvw2z/b62idg5FztfZA4\nxbJ/jvDiN7u4oWdLPtFqn5FjsDv0YdWCXZABryoknshm2uI4BBAdEkB/vZ++SAyLxSqY+81OVsQe\nY7xfB16/pw813WDHtEQdth7LZGpEHA1q12J5sD9dWjTQ16H0g/DhILt97ZP6+SK5gMISC6HLEvhz\nfxqv39OHSQEdtXfCMZgZE2Fbl6kycnBUNZb8fYSXvt3Fjb1a8vHEQdSupXGwO3KubUCkIjLgdTF/\nHUgjdGkCTep5syzIT/8bksSwlJRZeXz1NtbuOM0D13flmVuu1H5d3eVYcR/Ubgijw8FovnkYfx9M\nI3hJAi0a1mZFsD/tmuq8mXHvOogeZ3vd/WaY+Lm+/kjOk19cRtCSeGIPZ/DWmH7c76vDMhPHYOb2\ndzUJdiVVI2LTYV5dm8TNvVvx0YRB6s8wWq3walO7PeUb6DJc3TaRAa9LWbvjFI+t2kaX5g1YGuRH\nq0bmqkIi0Y7CEgsPrtjMxr2pPHdrT2Zc31Vvly6ktAjmtbLbYyL080XChj3JPLh8Cx186rEi2J+W\nevctjsHMza/DsEf080VyATlFpUxfHM+241m8P3YAdw+4QlsHLKXwWnO7PX4lXHmrtj5InCbsz0O8\n/v1ubrmqNfPHD1Q/2C3MgjcdZhseS4LG2mhUBrwuYsnfR3j5u134dmxK2NQh58u/SiTlyS4sJSgy\nni3HMnljVF/G+/27qIKulJ+mnpOsny8Svt9xmlkrt9KrTSOWBPrhU99bX4ccg91JX7osKbyk+mQV\nlDA1Io5dp3L4cPxAbtM6TV3GIXtaKYAn9kLD1tr6IHGaBb8f5I11e7i9bxveHzcAL7XTpSbvgk+H\n2e0XUqGWdv2ZZ+yMUREhBO/8tPfs2pdWLAvyN36w+9MLtpvWnh/09sTjSM0tZtzCGLafyOLD8YOM\nF+wmLLYHux2GwcvZ4CVnKvRidcJxHonewoD2TVgR4q9vsFuce2GwO329DHYNhK0ASSy7T+fy6aTB\n2ge726IvDHbnZspg18B8svEAb6zbw5392/KBFsHu9lUXBrsvZ2sa7IJ8wlstyixWXvxmJ9Fxxxnr\n25559/YxdkEJqwX+2wFK8mx2iyv19cfDOJ5RwOTwWJJzigmfOoTrehgsc8cF09TzYNjD+vkiOb+J\n5NruzVkwebC+aeoOboBl99rtJw9AA4Pp14NJyS1iUlgsR9MLWDTVl+u17ltW3A/7f7TbL2dr276k\nUnz4637e+Xkfdw9oyzv39Vc/blk1CXZ/Z7d10ocMeKtIUamFWSu38uOuZB4a3pWn/mPADUeOZJ+E\n93rbbXnD0pT9yblMDo+joKSM5cH+DO7Y9PK/pBXl19zd/g4MCdbPHwmfbDzAW+v3cnPvVnw4YaA2\nO6YrYk0Q7Fxjt2UwYyjOZBcxYVEMp7OLWDx9CMO6Nr/8L7kSx4EySH0YnPd/2cf7v+zn3oFX8PZ9\n/dXPCmQgfciAtwpkF5YSsjSB+CMmqZ72RQgkrra9btIBZu2QO+41ZPvxLKYtjqNWzRqsfmAoPVvr\nnDfVkSN/QeRtdjt0I7QdWNHREpURQvC/H/fyycaD3D2gLW/f11/9qcZLYaCbleTfnMgsYMIiW773\npUF+DOnko60DUh+mQQjBe7/sZ/6v+xk9qB1vjennUcEuyIC30jhWT/tgnAmqpzkK7rqn4YY5+vni\ngfx9II2QpQn4NPBmRVAAHZrpnErKkeVj4MDPdntuBtTQ8Umih2O1Cl5dm0Tk30cY79ee1+/pq29O\nZoPdrCQXcjQ9nwmLYsktKmV5sL/21fYc9eEbCHe8p237Eqex7TXax0e/HeC+we14c3Q/anhYsAsy\n4K0Uh9PymRIRS3peiTHXYDqSlwpvO5SQvPV/4B+qnz8eyI+7zvBI1FY6N69vvDR1BuyMPBmLVfDs\nFzv4fPMJgq/pzJzbe+m3RKp8jsxrHocbX9LHF8lFOZiax4RFMZSUWYkKCaDPFY0v/0uuorQQ5jls\nRrtvCVx1j3btSyqFEIK3ftzLpxsPMt6vPfPu6euRwS7IgNdpTFU97bc34Pf/2u1Ht4JPF/388UDW\nbD7B02u2069dEyKnD6FJPZ1TSTni2BnVaw5PH9TPFwklZVYeW7WN7xNPM2tkd2bf2F2/YPf0dlhw\nnd1+YBO07quPL5KLsvdMLhPDYgFBdGiAtkuk9q6H6LF2e3aibZmcxJAIIXhj3R4W/nGIif4deO3u\nPuoGu0LAKw6xUb9xMGqBeu1VEhnwOsGm/WnMWGarnrY0yI+uRq6eZtCRlScRvukwr61N4pputt31\n9Wsb5L9Z9gl47yq7fe8C6D9OP38kFJVaeGjFFjbsSeH523oSep2OBUi+mwWbI+32i2lQ0+ApFj2M\nnSezmRwei3etGqwIHkq3lhreiz4ZCilJdntuJtQwcFYiD0cIwbzvdxO26TBThnbklbuuUncgnZcC\nb3e32+OioedtFR+vAwa5ExuXc9XTurZowJJAg01LO1J+ZAUy2NUYIQTv/byP+RsOcGuf1rw/boC+\nu+sd+eoB2B5tt585CnUNPEvhAeQXlxG8JIGYw+m8fk8fJgV0vPwvqYUcKBuebcezmBIeS4PatYgK\nCaBT8/raNS71YSqEsO0HWPzXEaYN68RLd/ZWN9jd/Z0t9dg5njoI9TXOFuIEMuC9BKapnnYmET67\nxm7f+hb4z9DPHw/EahW8/N0ulv5zlLG+7fm/UTpvOHJE3qwMR3ZBKdMi49hxIpt37+/PvQPb6eeM\n1IfhSTiSwbTF8fjU9yYqxJ92TTXc/Cr1YSqEELz87S6W/HOUwKs78+IdKu8HWDkR9qy12y9lGTYL\nlAx4L4IQgnd/3seHGw5wU+9WfDh+IHW8DPKkrjwRt8Cxf+z2s8ehjoHSXnkApRYrT32+na+3nSLk\n2s48f5uOG47KI29WhiMtr5gp4XEcSMnj4wmDuKWPjtWopD4Mz98H0whekkDrRnWICgmgdWMNZxml\nPkyF1SqY++1OlsccI/S6Ljx3a09170Um04cMeMthsQpe+Hon0XHHjF89zWRic0eKSi3MXLGFX/ek\n8NR/ruSh4V2NEewWZMBbDvmh2/tD0E/6+SMB4HR2IZPCYjmZVahPRaxzZB6FD/rZ7R63woSV+vgi\nqZDf96USujSBjs3qsTzYn5YNZbAruThWq2DO2dhlxvVdePYWGeyWRwa8DjhWT5s5oitP3mzQ6mkl\n+fB/5fL/mkBs7kZOUSnBkQnEH83Qfw2mI1/PhG3L7bZMG2QIjqUXMDE8hsz8UpYG+uPXWeMiAef4\n5mHYusxuPxQDLXvp44ukQn5JSuahFVvo2rIBy4P8aNagtjYNF2XbStCfY/A0uPMDbdqWVAmrVfD8\nV4msjD+uTeVXEwa7IAPe8+QUlRKyJIHYwxnMvaM3gdcYtHraslFw8Fe7PWoR9LtfP388lLS8YqZG\nxLH3TK6xCpCU74jkTmpDcCDFlkqquMxKVIg//drptGHQpDcqT2Nd4mkeid5K77aNWBrop11aw8Q1\n8EWQ3Z7xJ7TpV/HxEt1xzOH9yA3dePymHjLYrQAZ8GKrnjZ1cTwHUnL5YNwA7h5whd4uXZzyQpNp\ng3ThZFYhk8NiOZVtm5YecWVLvV2yYeKOyJ3ZeTKbKRFx1FAUVoUO5crWDfVxROrDFHyz7SSPr97O\ngPZNWDx9CI3qaNTHzx8IGYfsthwsGx6LVfDUmu18ueUks0Z257GbeqjboMn7EI8PeB2rp0VMG8K1\n3Q1aPc3kQnMXDqTkMTk8lrziMpYF+Wtfu/5iyJR0hmXz0UymLY6jUR0vVgT7a5tKyhHZf5iC1QnH\neeaLHfh18iFi2hDtcnhLfZiOMouVJ89uln78ph48OrL75X+pOjhqpNO1MG1txccaFI8OeB2rp60M\nDdBvmvFSpOyGTwIufE92RrqQeCKbqYvtT+p6tzVANow938PKCXb7ytthfJR+/kjO89eBNEKWJtCq\nUR2WB/tzRZO62jtRVgKvlxvEy/7DkCyPOcoLX+/k2u7NWTjZl7reGmUGksGu6SizWHl89Xa+3X6K\np/5zJTNHdFOvsfIboE1csOiyAa+iKHWAP4DaZ49fI4QwfWF1U1RPK98RjV0Ove7UxxcPJ+ZQOsFL\nEmhSz4vlQTo+qXOkvD6e2AcNW+nji+QCfklK5qGoLXRuVp9lwX7a7q4/x84vYc10u33PpzBgQsXH\nS3QjYtNhXl2bxMieLfl44iDt0mDKYNd0lFqszF61je93nOaZW3ry4HAVqzPu+xGiHPYIzd4JTdqr\n157KOPOEtxi4QQiRpyiKF7BJUZR1QogYlX1TjXPV07o0b8DSIINWT5MdkWH4OSmZmVFb6OhTj2VB\n/trmwawIqQ/D8t12W/9yVdtGLNFyw5Ej5fUx5wx46fCEWXJZPt14kDfX7+GWq1ozf/xAvGtptG5W\n9iGmo9Ri5dHorazbeUb9UuRrH4OECLvtBmu6LxvwCiEEkHfW9Dr7I9R0Sk1MUT3NhB2Ru84EfLX1\nBE9+voM+bRuxeLofPvV1CF7KY0J9eAqr4o/x7JeJDOnoQ/g0XxpqteHIEakPUyCE4INf9/P+L/u5\ns39b3r2/P15a5HwvzIQ3O9ltmXbMFJSUWXkkegs/7krmhdt7EXxtF/Uac9M+xKk1vIqi1AQ2A92A\nj4UQsRc5JhQIBejQoUP5j3VHCMF7P+9j/oYD3NirFR9NMGD1tK0r4JuH7LZXfZhzSj9/KofbzQRE\n/nWYl79LYljXZiyc4ksDrTaQVET2SXiv94XvmbAjctfB0eK/DvPKd0lc16MFCyYN1m4NpiNueqNy\nN4QQ/O/HvXyy8SCjB7XjrTH9tClFvn0VfBVqt0N/h7YD1G9XUi1KyqzMjNrCz0nJvHRnb6ZfrWLa\nVDfuQ5y6gwshLMAARVGaAF8pitJHCLGz3DELgYUAvr6+hnoCbIrqaeVFNms7NO2kiytVwZ1mAoQQ\nzP/1AO/9so+be7divhFKS38cAKm77fbENdD9Jv38qR5uNTgSQvDxbwd4+6d9/Ocqm15q15LBritw\nx8GREILXv99N+KbDjPfrwLx7+lBDi2D3nV6Q6/AAxQ2mqD2B4jILDy23VfN89e6rmDK0k3qNuWEf\n4kilHlkJIbIURfkNuAXYebnjjYBj9TRNKpBUBTcRmTvMBFitglfXJhH59xFGD2rHm6P76j84Kq+P\nl7LAaBquBO42OHpz/V4++/0gowZewVtj+mmvl/SD8OEgu90+AIJ+1NYH9XCrwZHVKnjp210siznK\ntGGdeOnO3trcj9zkHuNpFJVaeHD5Zn7bm6p+NU8P0IgzWRpaAKVng926wE3Am6p75gIMXz2tOA/e\nKFfkwsQiM/tMQKnFyjNrdvDl1pMEXt2ZF27vpc2Tl0vhpp2QuwyOzgUvE/078NrdGj2pc+SLEEhc\nbbcf3gzNVUxRpDHuNDiyWAXPfbmD1QknmHF9F569pacMdl2EO84EFJVaCF22mT/2pfJ/9/Zlgr+K\nfaAHaASce8LbBlhy9gZVA1gthDB8xmHDV09bfBsc/ctuD5oKd83Xzx8XYtaZgIejtvLL7mSeuKkH\nD9/QTd+ZAKsFXi1X1MKNOiGzD47KLFae+SKRL7acIPS6Ljx3q0bBiyMecpNyZnBkdByLBDw6sjuP\n3dhdBruuxa1mAgpLLIQuS2DTgTTeHN2XsUM0CnZ9usKjW9RrS2ecydKwAxiogS8uw7F6WvjUIVzX\nw2DV0/6VMigZvAyQ6qoamHkmILeolJCltpmA1+6+islqrpFyhi3L4NuH7faQYLj9Hf38UREzDo5K\nyqzMWmlLDfTYjT14dKQOgyPPCWQuOzgy+kxASZmV2au28kPiGfWLBDjiWRpxm5mAwhILwUvj+ftg\nOm+N7sd9virlvS0thHmt7fZ/3oChD1V8vBvgdpXWdp7MZmqErXpadEgA/dsbrHqa+3ZCppwJSM8r\nZtrieHafzuH9sQaYCSivj2eOQl2DabiamHlwVFRq4YHlm9m4N1X91EAV4b59yCWpaHBk5JmA4jIL\nM1ds4ZfdKdrpJT8d/ufQzrBH4ObX1W9XZ9xhmVRBSRmBkfHEHc7gnfv6M2pQO3UaOrkFFo2w2zPj\noUUPddoyEG4V8P51II3QpbbqacuC/OhipOppqfvg4yEXvudGNyozzgScyipkcngsJzILWTB5MCN7\n6VylzHMCGVMOjvKKywiKjCfuSAZvjOrLeD+Nb5ilRTCvnEbdVyOA+QdHM5Zt5vd9qervrj9H+dSW\nHhLIgPmXSeUXlzE9Mp6EIxm8e/8A7hmo0sOX3/4Pfnf4L/RiGtQ0YD0CFXCbgNfQ1dPKBzL3RcJV\n9+riisTGodQ8JofHkVNYytJAP/y7NNPXIc8Jdk05OMoqKGHq4nh2nszWZyZgx2r4MsRujw6HvmO0\n9UEfTDk4KigpI3hJAv8cSld/DeY53uoCBel22+TZXKqKGZdJ5RWXMX1xHJuPZvKemv3LG+2hOMdu\nu/F95mK4RcC79J8jvPStQauneVAgYxbOLXsBiA4NoM8VjS/zGypS/sl/DS+Ym6afP5J/kZpbzOTw\nWA6l5vPpxEHcfFXry/+SKynfh7yQArVqa+uDTphxcJRbVEpgZDybj2by7v39uXegStPSjnj4fcbM\nMwG5RaVMWxzPtuNZzB8/kDv6tVWnIQ/XCJg84DV89TQpMMMRdziDoMh4GtX1YmmQH131XPZSvphE\n4E/QwV8/fyT/4lRWIZPCYjmdXUT4NF+u7a7xBljZh5iK7IJSpiyOY9fJbD4cP4jb+7VRv1GpETDp\nTEBOUSlTI+JIPJHNR+MHcmtflfQiNQKYOOA1dPW0HZ/Dl8F2Wz61MwQb9iTz4PIttGtal2VB/rRt\nUlc/Z2QHZHiOpuczYVEsOYWlLAvyw7eTz+V/yZVIjZiKzPwSJkfEsu9MHp9oNRMgNQKYcyYgu7CU\nKRG2wdHHEwfxH7X0IjVyHlMGvIaunlZeXI9sgWZd9fFFcp5vtp3kidXb6dmmIUum+9GsgY5TwrID\nMjz7knOZFBZLqcWqz7IXqRFTcW7Zy+G0fBZMGcyIK1uq36jUiGnJLihlckQsu0/n8OmkwdzUW6UN\n01IjF2C6gNexetpLd/Zm+tUGqp4mxWVIlv1zhLnf7sKvkw9hU31pWEenNd5lxfB6uRuh1IjhSDyR\nzZSIWLxq1mDVjKH0aNVQu8bTDsBHg+22Xyjc9j/t2pdUmuScIiYsiuFUVhER04Zwdbfm6jZYPu3Y\nDS/AdU+p26bEZWQVlDAp3DYT8NkkFbMDOcYjja6Ax5PUacdEmCrgNWz1NA9MF2QGhBB8tOEA7/y8\njxt7teSjCYP0W+Md8xmsf8Zuj5gD1z+tjy+SCkk4ksH0xbY13lEh/nRsVl+7xr+cATtW2u3ZidDE\neLlCJXZOZhUyYVEMabnFLAn0w6+zystetkXB1w/a7Ue3gY+BHvpILklmfgkTw2I5kJrHgsmDGdFT\nhZkASxm85pB16IYX4bonXd+OCTFNwGvY6mnfPwHxYXZ7wES45xP9/JEAtmB33ve7Cdt0mFEDr+DN\nMf3w0muNtxtW1nNHNu1PI2RpAm0a12F5sMZrvOXskOk4ll7A+EUx5BSVsjzYn4Edmqrb4Ht9IPu4\n3fbQtGNmJT2vmIlhsRxKy2fh5MEMV2PZS/pB+HCQ3Z7xB7Tp7/p2TIopAt7EE9lMW2zA6mkenC7I\nyJRZrDz3ZSKfbz7BtGGdmHtHb2rU0OnGIAMZU/DTrjM8HLWVLi3qsyzInxYNNfx/LDViOg6l5jFh\nUSxFZRaiQzRY4y01YmrS8oqZuCiWI+n5REwdwjXdVVj2sutr+Hyq3Z5zBrx03JhtQAwf8DpWT9M9\njZQjsgMyJI4bGmeN7M7sG7vrt6FRasQUfLPtJI+v3k6fKxqzZPoQmtTz1q5xqRHTsS85l4lhsVit\nguiQAHq1aaRug1IjpiY1t5gJi2I4nlmg3hrvL4Ih8XO7LTVyUQwd8BqyelrGYZg/4ML3pLgMQV5x\nGTOWJfDXgXR9NzSe3g4LrrPbbQdB6G/6+CK5JNFxx3j+q0T8OvkQPm0IDWpr1CWWX/dfrzk8fVCb\ntiVVZtepbCaHx1GrhsKqGQF0a6nyhkYZ7JqalJwiJoTFcjKzkMXT/BjaVYWKnlIjTmPYgNeQ1dPe\n6goFDvl0xy6HXnfq54/kPJn5JUyLtJV+fee+/owerEF1o4ux5C44/LvdfjgBmnfXxxfJJQn78xCv\nf7+b4Ve24LNJg7Xb0Lh3PUSPtdvjV8GVt2jTtqTKbD+exZSIOOp712RFSACdm6u8odExkGneAx6O\nV7c9iUtJzili/MIYzuQUETl9iDrl62WwWykMF/AatnqaFJZhOZNdxOTwWI5mFPCZmjkNL4fUiCkQ\nQvDhhgO8+/M+bu3Tmg/GDcS7lkYbGj/0hfT9dvvFdKhpuG5YUo6EIxlMWxxP0/peRAUH0N6nnnqN\n5aXC293s9l0fwaDJ6rUncTlnsosYvyiGlJwilgT6MUSNojXyflNpDNXTGrZ6mhSWYTmSls+k8Fiy\nCkqJnD6EYV1VzoFZEVIjpkAIwX/X72HB74cYPagdb47uq10fIzViSv45mE7QknhaNapDVIg/bRqr\nuBEo6RtYPcVuP74HGmlQnljiMk5nFzJ+YQxpeSUsDfJjcEcZ7BoFwwS8hqyedmgjLL3bbrcbAsG/\n6OaO5EJ2n85hcngcFquVqBB/+rXTIXtHcR68US4ftOx8DInVKpj77U6WxxxjckBHXrnrKu2yd8gb\nlCn5c38qIUsTaNe0HlHB/rRUcx9JxK1w7G+7LdOOmY6TWbZgNzPfFuwOUiNVnWNf0qInzIx1fRtu\niiECXsfqaXPv6E3gNQZIpF3+BjV7JzRpr48vkn+x+aitQED92rVYGTpUeiOWCgAAESdJREFU/c0j\nFyN2AaxzKB4xdgX0ukN7PySXpcxi5ek1O/hy60keuL4rz9yi4YBaBrum5NfdyTy4fAtdWzZgeZDK\n5cilRkzP8QxbXubswlKWBfszwNXpU4WAVxzOeeMrcM1s17bh5uge8J6rnrY/2UDV02TnY2g27k3h\ngeWbadO4LsuC/GjXVMX1dBVRXiNyLaZhKS6zMCt6G+t3neHJm3vw8A0abSLMOATzB9rtm16Dqx/V\npm1JtVi/8zSPRG+lZ+tGLAvyUzdVnbzfmJ7jGQWMWxhDblEpK4JVmG0sv6475De4YlDFx0suiq53\naMfqaRHTDFA9rawEXi/ng+x8DMV320/x+OptdG/ZkKVBfjRX86lLRcgblGkoLLEwY/lm/tiXqu3s\n0Y9z4J+P7PaT+6GBCpWVJC7n2+22dJj92zUmMtCPRnVUzBAk+xLTczQ9n/ELY8gvsRClRhGSA7/C\n8lF2+7mTUNsg9QhMhm4Bb+KJbKZHxmE5m7xb9+ppf74Dv75qt4c/D8Of0c8fyb9YEXuUF77eiW/H\npoRPG6Lujagi5A3KNOQWlRIUmUD80QzeHN2XsUM6aNOw1IhpWbP5BE+v2Y5vJx8i1M7L7KiTVn3h\nwU3qtSVRhSNp+YxfFENRqYWoEH+uauviYPf7JyF+kd2WfUm10CXgNVz1tPI3qDnJ4GWAIheS83yy\n8QBvrd/LiCtb8MnEwdT11jhV3cktsGiE3fYLhdv+p60PEqfJzC9h6uI4kk7lMH/cQO7s31abhmWw\na1qiYm1FSK7p1pxFU3zV62MKM+HNTnb73gXQf5w6bUlU41BqHuMXxVBqEUSpUXEv/GY47rAhTfYl\n1UbzgNdw1dPkDcrQCCH477o9LPjjEHf1b8s79/fHS+tUda/4gLDY7Sf2QUOdcv1KLktKbhGTw+I4\nnJ7PgsmDGdlLo+9K9iWmJfKvw7z8XRIjrmzBp2oWISk/Pf34bmik0WBM4jIOpOQxYVEMFqsgKsSf\nnq1dHOzKvkQVLhvwKorSHlgKtAIEsFAI8UFVGjNU9bTMo/BBvwvfk6IyFBar4PkvE1mVcJxJAR14\n9a4+2qWROofseEzFyaxCJi6KITmnmMVq1a0vT/kywU06wOxE9duVuIQFvx/kjXV7uLl3Kz6aMEi9\nIiRfPQDbo+227EtMyf7kXMYvsj15jQ4NoEcrF2cIkvcc1XDmCW8Z8IQQYouiKA2BzYqi/CyESHK2\nEcNVT/tuNmxebLcnfgHdb9TPH8m/KC6z8NiqbfyQeIaHR3TjiZt7aJ+XWXY8puJwWj6TwmLJKSpl\nebBKCd/Lc+h3WHqX3X5gE7Tuq367Epcw/9f9vPvzPu7o14b3xg5Qb/ZI9iVuwb7kXCYsikFRFKJD\n/F2fDlPqRFUuG/AKIU4Dp8++zlUUZTdwBeBUwFtmsfLiNzuJjjtujOppUlCq4MqZgPziMh5Yvpk/\n96fxwu29CL62iytdvTxFOfBfh5zLXYbDlG+09UFSKfaeyWViWCxWYdsE6/Kd0hdjxX2w/ye7PTcT\nahigMqTksgghePunvXz820FGDbqC/43pT021Zo/kPcct2H06h0lhsdSsoRAdGuD6vUeOOul8HUz9\nzrXnl1RuDa+iKJ2AgcC/SnsoihIKhAJ06GDfDV1cZiXxZDYzR3TlyZt1rp4mOx41qfZMAEBWQQnT\nI+PZfjyLt8b0435fjYt9JCyGtQ7JvGW+Q8Oz40QWUyLiqF2rBiuDA7QpQiL7EtMihGDe97sJ23SY\ncUPa83/39lVvqZTUiVuQdCqHiWExeNeqwcrQoXRuXt91Jy9fUOKWNyHgAdedX3IepwNeRVEaAF8A\ns4UQOeU/F0IsBBYC+Pr6inPv169dizUPDNN3CcOprbBwuN3uNxZGLdTNHXekujMBYCtCMjk8jsNp\n+XwycTC39GmtkrcVUP7mJEt7qoIrZwPiDmcQGBlPk3peRAUH0KGZBkVIZBBjWqxWwUvf7mJZzFGm\nDu3IS3eqWF7aUSf9xsGoBeq0I1GVXaeymRgWS12vmkSHBNDJlcFuaSHMc7jPhW6EtgMrOlpSTZwK\neBVF8cIW7K4QQnxZ2UZ0DXY/GACZh+3243ugURv9/PEAqjITAPDL7hROZBaweLpGm40ckUGMlrhk\nNuCPfamELkvgiiZ1WREcQOvGKmd8Kf8kxqcrPLpF3TYlLsNiFcz5KpGV8ccJva4Lz93aU50Zx/JL\noiZ9Ad3kHhEzkngim0nhsTSoXYvoEBcPqDMOw/wBdvvZY1BHg6VYHowzWRoUIBzYLYR4V32XXIgM\nYjSnqjMBABP8OzCiZwvaNK6ria9nHbowiAGpE5VxxWzA+p1neDR6K11bNmCZFhX38lLgbYeSxKPD\noe8YdduUuGw2oMxi5ek1O/hy60l1N8GeSICwkXb7uRNQW4MlNhKXs/14FpPDY2lYx4uVoQG093Fh\nsLvvR4i6327L2URNcOYJ79XAZCBRUZRtZ997Xgjxg3puVROrBV4tt0NbBjGqU92ZAEDbYDd1H3w8\nxG6PXQG97tCufUkV9wVYmPdDEldd0YjIaX40rqdyesMdn8OXwXZblvbUkmrPBpRarMxetY3vd5zm\niZt68MjI7pf/parw+1vw2zy7Le85pmXrsUymhMfRpL4X0SEBtGvqwmD311dtlV3PIXWiGc5kadgE\nmGfoUT6x97go6Hm7fv54CKabCVj/HMR8YrefPw3eGqz/lJynqrMBtWvVZEVQAM0aeFNfzdKvAGE3\nwol4uy1vTppS3dmA4jILD0dt5eekZJ6/rSeh13VVx9HyS+ekTjTFlfsCNh/NZGpEHD71vVkZGkDb\nJi58CPPpNZDskKNb6kRTdCktrBrvXgU5J+z2i2lQU8fiFp6FeWYC5FIX3anubIDcnOZ5VDQbcKl9\nAd9tP83PScm8ctdVTB3WSR3HpE6MgEv2BSQcyWBqRBwtGtYmOjTAtTOOUie64z4BrxSTrphmJkDq\nRHdMMRvgqJPG7eGxnfr5IrnkbMCl9gWMHnQFXVrUZ1CHpuo45qiTFr1gZow67UguiSv2BcQdzmDa\n4jhaN6pDdGgArRq5cBOso07qNIFnj7ru3BKnMX+W9KLsC8XU4xYZxEj+TXmdjHhB6kQ/zs0G3KAo\nyrazP7fp7RQAZcUX6uS2t2WwqzPVmQ1QFEWdYFeIC3Vy4ysy2DUIl9sXoChKgqIoCampqefft1oF\nc7/ZSZvGdVipZrA7cJIMdnXE3E94Yz6F9c/a7Qf/hlZX6eePxJiU33Q0eyc00bigheQ8hp0NOLkZ\nFt1gt2dth6addHNHYtDZgMIseLOj3Q7dKHOnGoSq7guoUUMhbKov3rVq0LKhSsHuPZ/BgPGuO7ek\n0pg34JVT0xJneKcX5J6y21Inkoux/nmI+dhuyzRBRsFYewOOx0H4TXZbZuwwDNXdF+DSTAxlJfB6\nC7v9wCZo3dd155dUCXMGvDLYlTiD1InEGaRODIuhZgM2/hc2vmG3pU4Mg6FmAnJOwbu97PYzR6Fu\nk4qPl2iGuQLe9IPw4SC7PWIOXP+0fv5IjInVCq86rNvrMgKmfK2fPxLjIoNdiTOUzwAkdWI0jDET\ncPgPWHKn3Z6bCTXMv1XKXTBPwPvlDNix0m4/dRDqa1x+VmJ8Tm+HBdfZ7alrofO1+vkjMSblK+wN\nnAR3f1zx8RLPRQ6KDI8hZgLK7ymSOjEc5gh4ZYcjcYbYhbDuKbv9QirU8tbPH4kxKcyENzvZ7Wk/\nQKerdXNHYmAc7z2t+9rWYkok5dn4Jmz8P7stYxRDYuyAt/zUdK068EKyfv5IjMv7fSHrmN2WHY7k\nYhz5CyIdMqA9fwq86+vnj8SYlJ8BuOtDGDRFP38kxmXZKDj4q+21jFEMjXED3jM74TOHpy7BG6Dd\nYP38kRgXx6cwve6Escv180ViXH6eC385VBuVgyLJxSjJh/9ra7dlejpJRbzSFITV9rrXXTB2mb7+\nSC6JMQPe75+A+DC7/WI61DSmqxIdKX9jmrgGut9U8fESz+WdnpB72va6hhfMTdPXH4kxyU2Gd3rY\n7RdSoFZt/fyRGBfHBy03vQZXP6qfLxKnMF4U+c1M2OrwhE4+hZFcjIxDMN8h2btM/SKpiKX32INd\n/wfg1jf19UdiTJKT4NOhdlveeyQV8XGA/fXEL6D7jfr5InEa4wS8llJ4zSHrwqgw6Heffv5IjMuu\nr+HzqbbXjdrB47v09UdiTKwWeNXHbj+6FXy66OePxLjs+QFWnq2CJTN2SCqi/L6ipw9DPZ+Kj5cY\nCuMEvI7BrnxaJ6mIlN32YPfm12HYI/r6IzEu7/S0v37+NHi7sJKSxH3IOWUPdm/9H/iH6uuPxLiE\nOzzJnZMMXi4sQyxRHeMEvNc/A6d3wPhoWdJTUjG16kDL3nDnfGg/RG9vJEbmmtlwcINtbbfsUyQV\nodSEKwbDDS9C1xF6eyMxMoOmgncDmPy1LChhQowT8I54Xm8PJGbApzM89I/eXkjMwNCZth+J5FI0\nbAUhG/T2QmIGBk+1/UhMiRyiSCQSiUQikUjcGhnwSiQSiUQikUjcGhnwSiQSiUQikUjcGhnwSiQS\niUQikUjcGkUI4fqTKkoqcLTc280Bdypv5G7XAxe/po5CiBZqNOYhOgH3u6aKrkdqpXq42/WA7FPU\nwt2uSfYp6uBu1wPV6FNUCXgv2pCiJAghfDVpTAPc7XrAGNdkBB9cjbtdk1Guxyh+uAp3ux4wxjUZ\nwQdX427XZJTrMYofrsLdrgeqd01ySYNEIpFIJBKJxK2RAa9EIpFIJBKJxK3RMuBdqGFbWuBu1wPG\nuCYj+OBq3O2ajHI9RvHDVbjb9YAxrskIPrgad7smo1yPUfxwFe52PVCNa9JsDa9EIpFIJBKJRKIH\nckmDRCKRSCQSicStkQGvRCKRSCQSicStcWnAqyjKLYqi7FUU5YCiKM9e5HNFUZT5Zz/foSjKIFe2\nrwZOXNNwRVGyFUXZdvZnrh5+OouiKBGKoqQoirKzgs81+Y7cTStSJ+p9P1IrUitO+uFWOgGpFakV\n55A6cfL7EUK45AeoCRwEugDewHagd7ljbgPWAQoQAMS6qn01fpy8puHAWr19rcQ1XQcMAnZW8Lnq\n35G7aUXqRL3vR2rF+D9G0Iq76URqRWpF6sT1OnHlE14/4IAQ4pAQogRYCdxd7pi7gaXCRgzQRFGU\nNi70wdU4c02mQgjxB5BxiUO0+I7cTStSJ+p9P1IrBscgWnE3nYDUitSKc0idOPn9uDLgvQI47mCf\nOPteZY8xEs76O+zsY/V1iqJcpY1rqqHFd+RuWpE6Ue/7kVqRWnFVG2bSCUitgNSKM0idOPn91FLN\nHc9hC9BBCJGnKMptwNdAd519khgPqROJs0itSJxFakXiDFInuPYJ70mgvYPd7ux7lT3GSFzWXyFE\njhAi7+zrHwAvRVGaa+eiy9HiO3I3rUidqPf9SK1IrbiqDTPpBKRWQGrFGaROnPx+XBnwxgPdFUXp\nrCiKNzAO+LbcMd8CU87usAsAsoUQp13og6u57DUpitJaURTl7Gs/bH/TdM09dR1afEfuphWpE/W+\nH6kVqRVncDedgNSK1IpzSJ04+f24bEmDEKJMUZSHgR+x7RqMEELsUhTlgbOffwb8gG133QGgAJju\nqvbVwMlrGgM8qChKGVAIjBNntxEaEUVRorHt2GyuKMoJ4CXAC7T7jtxNK1In6n0/UitSK87gbjoB\nqRWkVpxC6sT570eWFpZIJBKJRCKRuDWy0ppEIpFIJBKJxK2RAa9EIpFIJBKJxK2RAa9EIpFIJBKJ\nxK2RAa9EIpFIJBKJxK2RAa9EIpFIJBKJxK2RAa9EIpFIJBKJxK2RAa9EIpFIJBKJxK35f7C0+2c1\nySYZAAAAAElFTkSuQmCC\n",
-      "text/plain": [
-       "<matplotlib.figure.Figure at 0x7f3c7b4f9e50>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "# to plot the intermediate results\n",
-    "fig, axes = plt.subplots(3, 5, figsize=(12, 8))\n",
-    "x = tensor.from_numpy(train_x)\n",
-    "y = tensor.from_numpy(train_y)\n",
-    "# sgd\n",
-    "for idx in range(max_iter):\n",
-    "    y_ = x * k + b\n",
-    "    err = y_ - y\n",
-    "    loss = tensor.sum(err * err) / nb_points\n",
-    "    print('loss at iter %d = %f' % (idx, loss))\n",
-    "    da1 = tensor.sum(err * x) / nb_points\n",
-    "    db1 = tensor.sum(err) / nb_points\n",
-    "    # update the parameters\n",
-    "    k -= da1 * alpha\n",
-    "    b -= db1 * alpha\n",
-    "    plot(idx, tensor.to_numpy(x), tensor.to_numpy(y_))\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "We can see that the learned line is becoming closer to the ground truth line (in blue color).\n",
-    "## Next: [MLP example](./mlp.ipynb)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [default]",
-   "language": "python",
-   "name": "python2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/doc/en/docs/notebook/requirements.txt b/doc/en/docs/notebook/requirements.txt
deleted file mode 100644
index 21e293b..0000000
--- a/doc/en/docs/notebook/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-matplotlib=2.0.0=np112py27_0
-nb_conda_kernels=2.0.0=py27_0
-nb_conda=2.0.0=py27_0
-pillow=4.0.0=py27_1
-tqdm=4.11.2=py27_0
diff --git a/doc/en/docs/notebook/rnn.ipynb b/doc/en/docs/notebook/rnn.ipynb
deleted file mode 100644
index 054ac19..0000000
--- a/doc/en/docs/notebook/rnn.ipynb
+++ /dev/null
@@ -1,257 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# RNN for Character Level Language Modeling"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Dataset pre-processing\n",
-    "\n",
-    "### sample data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import cPickle as pickle\n",
-    "import numpy as np\n",
-    "import argparse\n",
-    "\n",
-    "# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))\n",
-    "from singa import layer\n",
-    "from singa import loss\n",
-    "from singa import device\n",
-    "from singa import tensor\n",
-    "from singa import optimizer\n",
-    "from singa import initializer\n",
-    "from singa.proto import model_pb2\n",
-    "from tqdm import tnrange"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "class Data(object):\n",
-    "\n",
-    "    def __init__(self, fpath, batch_size=32, seq_length=100, train_ratio=0.8):\n",
-    "        '''Data object for loading a plain text file.\n",
-    "        Args:\n",
-    "            fpath, path to the text file.\n",
-    "            train_ratio, split the text file into train and test sets, where\n",
-    "                train_ratio of the characters are in the train set.\n",
-    "        '''\n",
-    "        self.raw_data = open(fpath, 'r').read()  # read text file\n",
-    "        chars = list(set(self.raw_data))\n",
-    "        self.vocab_size = len(chars)\n",
-    "        self.char_to_idx = {ch: i for i, ch in enumerate(chars)}\n",
-    "        self.idx_to_char = {i: ch for i, ch in enumerate(chars)}\n",
-    "        data = [self.char_to_idx[c] for c in self.raw_data]\n",
-    "        # seq_length + 1 for the data + label\n",
-    "        nsamples = len(data) / (1 + seq_length)\n",
-    "        data = data[0:nsamples * (1 + seq_length)]\n",
-    "        data = np.asarray(data, dtype=np.int32)\n",
-    "        data = np.reshape(data, (-1, seq_length + 1))\n",
-    "        # shuffle all sequences\n",
-    "        np.random.shuffle(data)\n",
-    "        self.train_dat = data[0:int(data.shape[0]*train_ratio)]\n",
-    "        self.num_train_batch = self.train_dat.shape[0] / batch_size\n",
-    "        self.val_dat = data[self.train_dat.shape[0]:]\n",
-    "        self.num_test_batch = self.val_dat.shape[0] / batch_size\n",
-    "        print 'train dat', self.train_dat.shape\n",
-    "        print 'val dat', self.val_dat.shape\n",
-    "        \n",
-    "def numpy2tensors(npx, npy, dev):\n",
-    "    '''batch, seq, dim -- > seq, batch, dim'''\n",
-    "    tmpx = np.swapaxes(npx, 0, 1)\n",
-    "    tmpy = np.swapaxes(npy, 0, 1)\n",
-    "    inputs = []\n",
-    "    labels = []\n",
-    "    for t in range(tmpx.shape[0]):\n",
-    "        x = tensor.from_numpy(tmpx[t])\n",
-    "        y = tensor.from_numpy(tmpy[t])\n",
-    "        x.to_device(dev)\n",
-    "        y.to_device(dev)\n",
-    "        inputs.append(x)\n",
-    "        labels.append(y)\n",
-    "    return inputs, labels\n",
-    "\n",
-    "\n",
-    "def convert(batch, batch_size, seq_length, vocab_size, dev):\n",
-    "    '''convert a batch of data into a sequence of input tensors'''\n",
-    "    y = batch[:, 1:]\n",
-    "    x1 = batch[:, :seq_length]\n",
-    "    x = np.zeros((batch_size, seq_length, vocab_size), dtype=np.float32)\n",
-    "    for b in range(batch_size):\n",
-    "        for t in range(seq_length):\n",
-    "            c = x1[b, t]\n",
-    "            x[b, t, c] = 1\n",
-    "    return numpy2tensors(x, y, dev)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Create the network"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "\n",
-    "def get_lr(epoch):\n",
-    "    return 0.001 / float(1 << (epoch / 50))\n",
-    "\n",
-    "data = Data('static/linux_input.txt')\n",
-    "# SGD with L2 gradient normalization\n",
-    "opt = optimizer.RMSProp(constraint=optimizer.L2Constraint(5))\n",
-    "cuda = device.create_cuda_gpu()\n",
-    "rnn = layer.LSTM(name='lstm', hidden_size=32, num_stacks=1, dropout=0.5, input_sample_shape=(data.vocab_size,))\n",
-    "rnn.to_device(cuda)\n",
-    "rnn_w = rnn.param_values()[0]\n",
-    "rnn_w.uniform(-0.08, 0.08)  \n",
-    "\n",
-    "dense = layer.Dense('dense', data.vocab_size, input_sample_shape=(32,))\n",
-    "dense.to_device(cuda)\n",
-    "dense_w = dense.param_values()[0]\n",
-    "dense_b = dense.param_values()[1]\n",
-    "print 'dense w ', dense_w.shape\n",
-    "print 'dense b ', dense_b.shape\n",
-    "initializer.uniform(dense_w, dense_w.shape[0], 0)\n",
-    "print 'dense weight l1 = %f' % (dense_w.l1())\n",
-    "dense_b.set_value(0)\n",
-    "print 'dense b l1 = %f' % (dense_b.l1())\n",
-    "\n",
-    "g_dense_w = tensor.Tensor(dense_w.shape, cuda)\n",
-    "g_dense_b = tensor.Tensor(dense_b.shape, cuda)\n",
-    "\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Conduct SGD"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "lossfun = loss.SoftmaxCrossEntropy()\n",
-    "train_loss = 0\n",
-    "for epoch in range(3):\n",
-    "    bar = tnrange(data.num_train_batch, desc='Epoch %d' % 0)\n",
-    "    for b in bar:\n",
-    "        batch = data.train_dat[b * batch_size: (b + 1) * batch_size]\n",
-    "        inputs, labels = convert(batch, batch_size, seq_length, data.vocab_size, cuda)\n",
-    "        inputs.append(tensor.Tensor())\n",
-    "        inputs.append(tensor.Tensor())\n",
-    "\n",
-    "        outputs = rnn.forward(model_pb2.kTrain, inputs)[0:-2]\n",
-    "        grads = []\n",
-    "        batch_loss = 0\n",
-    "        g_dense_w.set_value(0.0)\n",
-    "        g_dense_b.set_value(0.0)\n",
-    "        for output, label in zip(outputs, labels):\n",
-    "            act = dense.forward(model_pb2.kTrain, output)\n",
-    "            lvalue = lossfun.forward(model_pb2.kTrain, act, label)\n",
-    "            batch_loss += lvalue.l1()\n",
-    "            grad = lossfun.backward()\n",
-    "            grad /= batch_size\n",
-    "            grad, gwb = dense.backward(model_pb2.kTrain, grad)\n",
-    "            grads.append(grad)\n",
-    "            g_dense_w += gwb[0]\n",
-    "            g_dense_b += gwb[1]\n",
-    "            # print output.l1(), act.l1()\n",
-    "            bar.set_postfix(train_loss=batch_loss / seq_length)\n",
-    "        train_loss += batch_loss\n",
-    "\n",
-    "        grads.append(tensor.Tensor())\n",
-    "        grads.append(tensor.Tensor())\n",
-    "        g_rnn_w = rnn.backward(model_pb2.kTrain, grads)[1][0]\n",
-    "        dense_w, dense_b = dense.param_values()\n",
-    "        opt.apply_with_lr(epoch, get_lr(epoch), g_rnn_w, rnn_w, 'rnnw')\n",
-    "        opt.apply_with_lr(epoch, get_lr(epoch), g_dense_w, dense_w, 'dense_w')\n",
-    "        opt.apply_with_lr(epoch, get_lr(epoch), g_dense_b, dense_b, 'dense_b')\n",
-    "    print '\\nEpoch %d, train loss is %f' % (epoch, train_loss / data.num_train_batch / seq_length)\n",
-    "\n"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Checkpoint"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "with open('%s_%d.bin' % (model_path, epoch), 'wb') as fd:\n",
-    "    print 'saving model to %s' % model_path\n",
-    "    d = {}\n",
-    "    for name, w in zip(['rnn_w', 'dense_w', 'dense_b'],[rnn_w, dense_w, dense_b]):\n",
-    "        d[name] = tensor.to_numpy(w)\n",
-    "    d['idx_to_char'] = data.idx_to_char\n",
-    "    d['char_to_idx'] = data.char_to_idx\n",
-    "    d['hidden_size'] = hidden_size\n",
-    "    d['num_stacks'] = num_stacks\n",
-    "    d['dropout'] = dropout\n",
-    "    pickle.dump(d, fd)"
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [conda env:conda]",
-   "language": "python",
-   "name": "conda-env-conda-py"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/doc/en/docs/notebook/static/bp.PNG b/doc/en/docs/notebook/static/bp.PNG
deleted file mode 100644
index ac5db33..0000000
--- a/doc/en/docs/notebook/static/bp.PNG
+++ /dev/null
Binary files differ
diff --git a/doc/en/docs/notebook/static/digit.jpg b/doc/en/docs/notebook/static/digit.jpg
deleted file mode 100644
index 8350d88..0000000
--- a/doc/en/docs/notebook/static/digit.jpg
+++ /dev/null
Binary files differ
diff --git a/doc/en/docs/notebook/static/models.PNG b/doc/en/docs/notebook/static/models.PNG
deleted file mode 100644
index 9ebcfa2..0000000
--- a/doc/en/docs/notebook/static/models.PNG
+++ /dev/null
Binary files differ
diff --git a/doc/en/docs/notebook/static/sgd.png b/doc/en/docs/notebook/static/sgd.png
deleted file mode 100644
index 9eac916..0000000
--- a/doc/en/docs/notebook/static/sgd.png
+++ /dev/null
Binary files differ
diff --git a/doc/en/docs/notebook/static/singav1-sw.png b/doc/en/docs/notebook/static/singav1-sw.png
deleted file mode 100644
index e443c6e..0000000
--- a/doc/en/docs/notebook/static/singav1-sw.png
+++ /dev/null
Binary files differ
diff --git a/doc/en/docs/notebook/utils.py b/doc/en/docs/notebook/utils.py
deleted file mode 100755
index ff772ad..0000000
--- a/doc/en/docs/notebook/utils.py
+++ /dev/null
@@ -1,138 +0,0 @@
-""" This file contains different utility functions that are not connected
-in anyway to the networks presented in the tutorials, but rather help in
-processing the outputs into a more understandable way.
-
-For example ``tile_raster_images`` helps in generating a easy to grasp
-image from a set of samples or weights.
-"""
-
-import numpy
-
-
-def scale_to_unit_interval(ndar, eps=1e-8):
-    """ Scales all values in the ndarray ndar to be between 0 and 1 """
-    ndar = ndar.copy()
-    ndar -= ndar.min()
-    ndar *= 1.0 / (ndar.max() + eps)
-    return ndar
-
-
-def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),
-                       scale_rows_to_unit_interval=True,
-                       output_pixel_vals=True):
-    """
-    Transform an array with one flattened image per row, into an array in
-    which images are reshaped and layed out like tiles on a floor.
-
-    This function is useful for visualizing datasets whose rows are images,
-    and also columns of matrices for transforming those rows
-    (such as the first layer of a neural net).
-
-    :type X: a 2-D ndarray or a tuple of 4 channels, elements of which can
-    be 2-D ndarrays or None;
-    :param X: a 2-D array in which every row is a flattened image.
-
-    :type img_shape: tuple; (height, width)
-    :param img_shape: the original shape of each image
-
-    :type tile_shape: tuple; (rows, cols)
-    :param tile_shape: the number of images to tile (rows, cols)
-
-    :param output_pixel_vals: if output should be pixel values (i.e. int8
-    values) or floats
-
-    :param scale_rows_to_unit_interval: if the values need to be scaled before
-    being plotted to [0,1] or not
-
-
-    :returns: array suitable for viewing as an image.
-    (See:`Image.fromarray`.)
-    :rtype: a 2-d array with same dtype as X.
-
-    """
-
-    assert len(img_shape) == 2
-    assert len(tile_shape) == 2
-    assert len(tile_spacing) == 2
-
-    # The expression below can be re-written in a more C style as
-    # follows :
-    #
-    # out_shape    = [0,0]
-    # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -
-    #                tile_spacing[0]
-    # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -
-    #                tile_spacing[1]
-    out_shape = [
-        (ishp + tsp) * tshp - tsp
-        for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)
-    ]
-
-    if isinstance(X, tuple):
-        assert len(X) == 4
-        # Create an output numpy ndarray to store the image
-        if output_pixel_vals:
-            out_array = numpy.zeros((out_shape[0], out_shape[1], 4),
-                                    dtype='uint8')
-        else:
-            out_array = numpy.zeros((out_shape[0], out_shape[1], 4),
-                                    dtype=X.dtype)
-
-        #colors default to 0, alpha defaults to 1 (opaque)
-        if output_pixel_vals:
-            channel_defaults = [0, 0, 0, 255]
-        else:
-            channel_defaults = [0., 0., 0., 1.]
-
-        for i in range(4):
-            if X[i] is None:
-                # if channel is None, fill it with zeros of the correct
-                # dtype
-                dt = out_array.dtype
-                if output_pixel_vals:
-                    dt = 'uint8'
-                out_array[:, :, i] = numpy.zeros(
-                    out_shape,
-                    dtype=dt
-                ) + channel_defaults[i]
-            else:
-                # use a recurrent call to compute the channel and store it
-                # in the output
-                out_array[:, :, i] = tile_raster_images(
-                    X[i], img_shape, tile_shape, tile_spacing,
-                    scale_rows_to_unit_interval, output_pixel_vals)
-        return out_array
-
-    else:
-        # if we are dealing with only one channel
-        H, W = img_shape
-        Hs, Ws = tile_spacing
-
-        # generate a matrix to store the output
-        dt = X.dtype
-        if output_pixel_vals:
-            dt = 'uint8'
-        out_array = numpy.zeros(out_shape, dtype=dt)
-
-        for tile_row in range(tile_shape[0]):
-            for tile_col in range(tile_shape[1]):
-                if tile_row * tile_shape[1] + tile_col < X.shape[0]:
-                    this_x = X[tile_row * tile_shape[1] + tile_col]
-                    if scale_rows_to_unit_interval:
-                        # if we should scale values to be between 0 and 1
-                        # do this by calling the `scale_to_unit_interval`
-                        # function
-                        this_img = scale_to_unit_interval(
-                            this_x.reshape(img_shape))
-                    else:
-                        this_img = this_x.reshape(img_shape)
-                    # add the slice to the corresponding position in the
-                    # output array
-                    c = 1
-                    if output_pixel_vals:
-                        c = 255
-                    out_array[
-                        tile_row * (H + Hs): tile_row * (H + Hs) + H,
-                        tile_col * (W + Ws): tile_col * (W + Ws) + W
-                    ] = this_img * c
-        return out_array
diff --git a/doc/en/docs/software_stack.md b/doc/en/docs/software_stack.md
deleted file mode 100644
index c60b6a5..0000000
--- a/doc/en/docs/software_stack.md
+++ /dev/null
@@ -1,99 +0,0 @@
-# Software Stack
-
-SINGA's software stack includes three major components, namely, core, IO and
-model. Figure 1 illustrates these components together with the hardware.
-The core component provides memory management and tensor operations;
-IO has classes for reading (and writing) data from (to) disk and network; The
-model component provides data structures and algorithms for machine learning models,
-e.g., layers for neural network models, optimizers/initializer/metric/loss for
-general machine learning models.
-
-
-<img src="../_static/images/singav1-sw.png" align="center" width="500px"/>
-<br/>
-<span><strong>Figure 1 - SINGA V1 software stack.</strong></span>
-
-## Core
-
-[Tensor](tensor.html) and [Device](device.html) are two core abstractions in SINGA. Tensor class represents a
-multi-dimensional array, which stores model variables and provides linear algebra
-operations for machine learning
-algorithms, including matrix multiplication and random functions. Each tensor
-instance (i.e. a tensor) is allocated on a Device instance.
-Each Device instance (i.e. a device) is created against one hardware device,
-e.g. a GPU card or a CPU core. Devices manage the memory of tensors and execute
-tensor operations on its execution units, e.g. CPU threads or CUDA streams.
-
-Depending on the hardware and the programming language, SINGA have implemented
-the following specific device classes:
-
-* **CudaGPU** represents an Nvidia GPU card. The execution units are the CUDA streams.
-* **CppCPU** represents a normal CPU. The execution units are the CPU threads.
-* **OpenclGPU** represents normal GPU card from both Nvidia and AMD.
-  The execution units are the CommandQueues. Given that OpenCL is compatible with
-  many hardware devices, e.g. FPGA and ARM, the OpenclGPU has the potential to be
-  extended for other devices.
-
-Different types of devices use different programming languages to write the kernel
-functions for tensor operations,
-
-* CppMath (tensor_math_cpp.h) implements the tensor operations using Cpp for CppCPU
-* CudaMath (tensor_math_cuda.h) implements the tensor operations using CUDA for CudaGPU
-* OpenclMath (tensor_math_opencl.h) implements the tensor operations using OpenCL for OpenclGPU
-
-In addition, different types of data, such as float32 and float16, could be supported by adding
-the corresponding tensor functions.
-
-Typically, users would create a device instance and pass it to create multiple
-tensor instances. When users call the Tensor functions, these function would invoke
-the corresponding implementation (CppMath/CudaMath/OpenclMath) automatically. In
-other words, the implementation of Tensor operations is transparent to users.
-
-Most machine learning algorithms could be expressed using (dense or sparse) tensors.
-Therefore, with the Tensor abstraction, SINGA would be able to run a wide range of models,
-including deep learning models and other traditional machine learning models.
-
-The Tensor and Device abstractions are extensible to support a wide range of hardware device
-using different programming languages. A new hardware device would be supported by
-adding a new Device subclass and the corresponding implementation of the Tensor
-operations (xxxMath).
-
-Optimizations in terms of speed and memory could be implemented by Device, which
-manages both operation execution and memory malloc/free. More optimization details
-would be described in the [Device page](device.html).
-
-
-## Model
-
-On top of the Tensor and Device abstractions, SINGA provides some higher level
-classes for machine learning modules.
-
-* [Layer](layer.html) and its subclasses are specific for neural networks. Every layer provides
-  functions for forward propagating features and backward propagating gradients w.r.t the training loss functions.
-  They wraps the complex layer operations so that users can easily create neural nets
-  by connecting a set of layers.
-
-* [Initializer](initializer.html) and its subclasses provide variant methods of initializing
-  model parameters (stored in Tensor instances), following Uniform, Gaussian, etc.
-
-* [Loss](loss.html) and its subclasses defines the training objective loss functions.
-  Both functions of computing the loss values and computing the gradient of the prediction w.r.t the
-  objective loss are implemented. Example loss functions include squared error and cross entropy.
-
-* [Metric](metric.html) and its subclasses provide the function to measure the
-  performance of the model, e.g., the accuracy.
-
-* [Optimizer](optimizer.html) and its subclasses implement the methods for updating
-  model parameter values using parameter gradients, including SGD, AdaGrad, RMSProp etc.
-
-
-## IO
-
-The IO module consists of classes for data loading, data preprocessing and message passing.
-
-* Reader and its subclasses load string records from disk files
-* Writer and its subclasses write string records to disk files
-* Encoder and its subclasses encode Tensor instances into string records
-* Decoder and its subclasses decodes string records into Tensor instances
-* Endpoint represents a communication endpoint which provides functions for passing messages to each other.
-* Message represents communication messages between Endpoint instances. It carries both meta data and payload.
diff --git a/doc/en/downloads.md b/doc/en/downloads.md
deleted file mode 100644
index a7d5189..0000000
--- a/doc/en/downloads.md
+++ /dev/null
@@ -1,108 +0,0 @@
-## Download SINGA
-
-* Latest code: please clone the dev branch from [Github](https://github.com/apache/incubator-singa)
-
-* v1.1.0 (12 February 2017):
-     * [Apache SINGA 1.1.0](http://www.apache.org/dyn/closer.cgi/incubator/singa/1.1.0/apache-singa-incubating-1.1.0.tar.gz)
-      [\[MD5\]](https://dist.apache.org/repos/dist/release/incubator/singa/1.1.0/apache-singa-incubating-1.1.0.tar.gz.md5)
-      [\[KEYS\]](https://dist.apache.org/repos/dist/release/incubator/singa/1.1.0/KEYS)
-    * [Release Notes 1.1.0](releases/RELEASE_NOTES_1.1.0.html)
-    * New features and major updates,
-        * Create Docker images (CPU and GPU versions)
-        * Create Amazon AMI for SINGA (CPU version)
-        * Integrate with Jenkins for automatically generating Wheel and Debian packages (for installation), and updating the website.
-        * Enhance the FeedFowardNet, e.g., multiple inputs and verbose mode for debugging
-        * Add Concat and Slice layers
-        * Extend CrossEntropyLoss to accept instance with multiple labels
-        * Add image_tool.py with image augmentation methods
-        * Support model loading and saving via the Snapshot API
-        * Compile SINGA source on Windows
-        * Compile mandatory dependent libraries together with SINGA code
-        * Enable Java binding (basic) for SINGA
-        * Add version ID in checkpointing files
-        * Add Rafiki toolkit for providing RESTFul APIs
-        * Add examples pretrained from Caffe, including GoogleNet
-
-
-
-* v1.0.0 (8 September 2016):
-    * [Apache SINGA 1.0.0](https://archive.apache.org/dist/incubator/singa/1.0.0/apache-singa-incubating-1.0.0.tar.gz)
-      [\[MD5\]](https://archive.apache.org/dist/incubator/singa/1.0.0/apache-singa-incubating-1.0.0.tar.gz.md5)
-      [\[KEYS\]](https://archive.apache.org/dist/incubator/singa//1.0.0/KEYS)
-    * [Release Notes 1.0.0](releases/RELEASE_NOTES_1.0.0.html)
-    * New features and major updates,
-        * Tensor abstraction for supporting more machine learning models.
-        * Device abstraction for running on different hardware devices, including CPU, (Nvidia/AMD) GPU and FPGA (to be tested in later versions).
-        * Replace GNU autotool with cmake for compilation.
-        * Support Mac OS
-        * Improve Python binding, including installation and programming
-        * More deep learning models, including VGG and ResNet
-        * More IO classes for reading/writing files and encoding/decoding data
-        * New network communication components directly based on Socket.
-        * Cudnn V5 with Dropout and RNN layers.
-        * Replace website building tool from maven to Sphinx
-        * Integrate Travis-CI
-
-
-* v0.3.0 (20 April 2016):
-    * [Apache SINGA 0.3.0](https://archive.apache.org/dist/incubator/singa/0.3.0/apache-singa-incubating-0.3.0.tar.gz)
-      [\[MD5\]](https://archive.apache.org/dist/incubator/singa/0.3.0/apache-singa-incubating-0.3.0.tar.gz.md5)
-      [\[KEYS\]](https://archive.apache.org/dist/incubator/singa/0.3.0/KEYS)
-    * [Release Notes 0.3.0](releases/RELEASE_NOTES_0.3.0.html)
-    * New features and major updates,
-        * [Training on GPU cluster](v0.3.0/gpu.html) enables training of deep learning models over a GPU cluster.
-        * [Python wrapper improvement](v0.3.0/python.html) makes it easy to configure the job, including neural net and SGD algorithm.
-        * [New SGD updaters](v0.3.0/updater.html) are added, including Adam, AdaDelta and AdaMax.
-        * [Installation](v0.3.0/installation.html) has fewer dependent libraries for single node training.
-        * Heterogeneous training with CPU and GPU.
-        * Support cuDNN V4.
-        * Data prefetching.
-        * Fix some bugs.
-
-
-
-* v0.2.0 (14 January 2016):
-    * [Apache SINGA 0.2.0](https://archive.apache.org/dist/incubator/singa/0.2.0/apache-singa-incubating-0.2.0.tar.gz)
-      [\[MD5\]](https://archive.apache.org/dist/incubator/singa/0.2.0/apache-singa-incubating-0.2.0.tar.gz.md5)
-      [\[KEYS\]](https://archive.apache.org/dist/incubator/singa/0.2.0/KEYS)
-    * [Release Notes 0.2.0](releases/RELEASE_NOTES_0.2.0.html)
-    * New features and major updates,
-        * [Training on GPU](v0.2.0/gpu.html) enables training of complex models on a single node with multiple GPU cards.
-        * [Hybrid neural net partitioning](v0.2.0/hybrid.html) supports data and model parallelism at the same time.
-        * [Python wrapper](v0.2.0/python.html) makes it easy to configure the job, including neural net and SGD algorithm.
-        * [RNN model and BPTT algorithm](v0.2.0/general-rnn.html) are implemented to support applications based on RNN models, e.g., GRU.
-        * [Cloud software integration](v0.2.0/distributed-training.html) includes Mesos, Docker and HDFS.
-        * Visualization of neural net structure and layer information, which is helpful for debugging.
-        * Linear algebra functions and random functions against Blobs and raw data pointers.
-        * New layers, including SoftmaxLayer, ArgSortLayer, DummyLayer, RNN layers and cuDNN layers.
-        * Update Layer class to carry multiple data/grad Blobs.
-        * Extract features and test performance for new data by loading previously trained model parameters.
-        * Add Store class for IO operations.
-
-
-* v0.1.0 (8 October 2015):
-    * [Apache SINGA 0.1.0](https://archive.apache.org/dist/incubator/singa/apache-singa-incubating-0.1.0.tar.gz)
-      [\[MD5\]](https://archive.apache.org/dist/incubator/singa/apache-singa-incubating-0.1.0.tar.gz.md5)
-      [\[KEYS\]](https://archive.apache.org/dist/incubator/singa/KEYS)
-    * [Amazon EC2 image](https://console.aws.amazon.com/ec2/v2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-b41001e6)
-    * [Release Notes 0.1.0](releases/RELEASE_NOTES_0.1.0.html)
-    * Major features include,
-        * Installation using GNU build utility
-        * Scripts for job management with zookeeper
-        * Programming model based on NeuralNet and Layer abstractions.
-        * System architecture based on Worker, Server and Stub.
-        * Training models from three different model categories, namely, feed-forward models, energy models and RNN models.
-        * Synchronous and asynchronous distributed training frameworks using CPU
-        * Checkpoint and restore
-        * Unit test using gtest
-
-**Disclaimer**
-
-Apache SINGA is an effort undergoing incubation at The Apache Software
-Foundation (ASF), sponsored by the name of Apache Incubator PMC. Incubation is
-required of all newly accepted projects until a further review indicates that
-the infrastructure, communications, and decision making process have stabilized
-in a manner consistent with other successful ASF projects. While incubation
-status is not necessarily a reflection of the completeness or stability of the
-code, it does indicate that the project has yet to be fully endorsed by the
-ASF.
diff --git a/doc/en/index.rst b/doc/en/index.rst
deleted file mode 100755
index 56c7be9..0000000
--- a/doc/en/index.rst
+++ /dev/null
@@ -1,155 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-.. SINGA documentation master file, created by
-   sphinx-quickstart on Sat Jul  9 20:36:57 2016.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-Welcome to Apache SINGA
-=======================
-
-Recent News
------------
-
-* **Version 1.1.0** is now available, 12 Feb, 2017. `Download SINGA v1.1.0 <downloads.html>`_
-
-* A tutorial on SINGA V1 will be given at `SGInnovate <https://www.eventbrite.sg/e/ai-eveningssginnovate-apache-singa-tickets-31505061487>`_, on 23 March, 2017
-
-* **Version 1.0.0** is now available, 9 Sep, 2016. `Download SINGA v1.0.0 <downloads.html>`_
-
-* SINGA will be presented at `REWORK <https://www.re-work.co/events/deep-learning-singapore/schedule>`_, 21 Oct, 2016.
-
-* SINGA was presented at `PyDataSG <http://www.meetup.com/PyData-SG/events/229691286/>`_, 16 Aug, 2016.
-
-* **Version 0.3.0** is now available, 20 April, 2016. `Download SINGA v0.3.0 <downloads.html>`_
-
-* **Version 0.2.0** is now available, 14 Jan, 2016. `Download SINGA v0.2.0 <downloads.html>`_.
-
-* SINGA will be presented at `Strata+Hadoop <http://strataconf.com/big-data-conference-sg-2015/public/schedule/detail/45123>`_ on 2 Dec, 2015
-
-* SINGA was presented at `ACM Multimedia <http://www.acmmm.org/2015/at-a-glance/>`_ Best Paper session and Open Source Software Competition session, 26-30 Oct, 2015 (`Slides <http://www.comp.nus.edu.sg/~dbsystem/singa//assets/file/mm2015.ppt>`_)
-
-* **Version 0.1.0** is now available, 8 Oct, 2015. `Download SINGA v0.1.0 <downloads.html>`_.
-
-* SINGA was presented at `workshop on deep learning <http://www.comp.nus.edu.sg/~dbsystem/singa/workshop>`_  held on 16 Sep, 2015
-
-* SINGA was presented at `BOSS <http://boss.dima.tu-berlin.de/>`_ of `VLDB 2015 <http://www.vldb.org/2015/>`_ at Hawaii, 4 Sep, 2015. (slides: `overview <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/singa-vldb-boss.pptx>`_, `basic <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/basic-user-guide.pptx>`_, `advanced <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/advanced-user-guide.pptx>`_)
-
-* SINGA was presented at `ADSC/I2R Deep Learning Workshop <http://adsc.illinois.edu/contact-us>`_, 25 Aug, 2015.
-
-* A tutorial on SINGA was given at VLDB summer school at Tsinghua University,  25-31 July, 2015.
-
-* A half day tutorial on SINGA was given at I2R, 29 June, 2015.
-
-* SINGA was presented at `DanaC <http://danac.org/>`_ of `SIGMOD 2015 <http://www.sigmod2015.org/index.shtml>`_ at Melbourne, 31 May - 4 June, 2015.
-
-* SINGA has been accepted by `Apache Incubator <http://incubator.apache.org/>`_, 17 March, 2015.
-
-Getting Started
----------------
-* Try SINGA on `AWS <https://aws.amazon.com/marketplace/pp/B01NAUAWZW>`_ or via `Docker <https://hub.docker.com/r/nusdbsystem/singa/>`_.
-
-* Install SINGA via `python wheel files <./docs/installation.html#from-wheel>`_, `Debian packages <./docs/installation.html#from-debian-package>`_ or from `source <./docs/installation.html#from-source>`_.
-
-* Refer to the `Jupyter notebooks <http://nbviewer.jupyter.org/github/apache/incubator-singa/blob/master/doc/en/docs/notebook/index.ipynb>`_ for some basic examples and the `model zoo page <./docs/model_zoo/index.html>`_ for more examples.
-
-.. |logo1| image:: _static/github.png
-   :scale: 100%
-   :align: middle
-   :target: https://github.com/apache/incubator-singa
-.. |logo2| image:: _static/awsmp.gif 
-   :scale: 100%
-   :align: middle
-   :target: https://aws.amazon.com/marketplace/seller-profile?id=5bcac385-12c4-4802-aec7-351e09b77b4c
-.. |logo3| image:: _static/docker.png
-   :scale: 50%
-   :align: middle
-   :target: https://hub.docker.com/r/nusdbsystem/singa/
-.. |logo4| image:: _static/jupyter.png
-   :scale: 25%
-   :align: middle
-   :target: http://nbviewer.jupyter.org/github/apache/incubator-singa/blob/master/doc/en/docs/notebook/index.ipynb
-
-+---------+---------+---------+---------+
-| |logo1| | |logo2| | |logo3| | |logo4| |
-+---------+---------+---------+---------+
-
-Documentation
--------------
-
-* Documentation and Python APIs are listed `here <docs.html>`_.
-* `C++ APIs <http://www.comp.nus.edu.sg/~dbsystem/singa/api/>`_ are generated by Doxygen.
-* Research publication list is available `here <http://www.comp.nus.edu.sg/~dbsystem/singa/research/publication/>`_.
-
-How to contribute
-----------------------
-
-* Please subscribe to our development mailing list dev-subscribe@singa.incubator.apache.org.
-
-* If you find any issues using SINGA, please report it to the `Issue Tracker <https://issues.apache.org/jira/browse/singa>`_.
-
-* You can also contact with `SINGA committers <community/team-list.html>`_ directly.
-
-More details on contributing to SINGA is described `here <develop/how-contribute.html>`_ .
-
-Citing SINGA
-------------
-
-Please cite the following two papers if you use SINGA in your research:
-
-* B. C. Ooi, K.-L. Tan, S. Wang, W. Wang, Q. Cai, G. Chen, J. Gao, Z. Luo, A. K. H. Tung, Y. Wang, Z. Xie, M. Zhang, and K. Zheng. `SINGA: A distributed deep learning platform <http://www.comp.nus.edu.sg/~ooibc/singaopen-mm15.pdf>`_. ACM Multimedia (Open Source Software Competition) 2015 (`BibTex <http://www.comp.nus.edu.sg/~dbsystem/singa//assets/file/bib-oss.txt>`_).
-
-* W. Wang, G. Chen, T. T. A. Dinh, B. C. Ooi, K.-L.Tan, J. Gao, and S. Wang. `SINGA: putting deep learning in the hands of multimedia users <http://www.comp.nus.edu.sg/~ooibc/singa-mm15.pdf>`_. ACM Multimedia 2015 (`BibTex <http://www.comp.nus.edu.sg/~dbsystem/singa//assets/file/bib-singa.txt>`_, `Slides <files/mm2015.ppt>`_).
-
-.. toctree::
-   :hidden:
-
-   downloads
-   docs/index
-
-.. toctree::
-   :hidden:
-   :maxdepth: 2
-   :caption: Development
-
-   develop/schedule
-   develop/how-contribute
-   develop/contribute-code
-   develop/contribute-docs
-
-.. toctree::
-   :hidden:
-   :maxdepth: 2
-   :caption: Community
-
-   community/source-repository
-   community/mail-lists
-   community/issue-tracking
-   community/team-list
-
-
-
-License
-----------
-SINGA is released under `Apache License Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>`_.
-
-Disclaimers
------------
-
-Apache SINGA is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by the Apache Incubator. Incubation is required of all newly accepted projects until a further review indicates that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. While incubation status is not necessarily a reflection of the completeness or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
-
diff --git a/doc/en/releases/RELEASE_NOTES_0.1.0.md b/doc/en/releases/RELEASE_NOTES_0.1.0.md
deleted file mode 100644
index f0de7a5..0000000
--- a/doc/en/releases/RELEASE_NOTES_0.1.0.md
+++ /dev/null
@@ -1,99 +0,0 @@
-# singa-incubating-0.1.0 Release Notes
-
----
-
-SINGA is a general distributed deep learning platform for training big deep learning models over large datasets. It is
-designed with an intuitive programming model based on the layer abstraction. SINGA supports a wide variety of popular
-deep learning models.
-
-This release includes following features:
-
-  * Job management
-    * [SINGA-3](https://issues.apache.org/jira/browse/SINGA-3)  Use Zookeeper to check stopping (finish) time of the system
-    * [SINGA-16](https://issues.apache.org/jira/browse/SINGA-16)  Runtime Process id Management
-    * [SINGA-25](https://issues.apache.org/jira/browse/SINGA-25)  Setup glog output path
-    * [SINGA-26](https://issues.apache.org/jira/browse/SINGA-26)  Run distributed training in a single command
-    * [SINGA-30](https://issues.apache.org/jira/browse/SINGA-30)  Enhance easy-to-use feature and support concurrent jobs
-    * [SINGA-33](https://issues.apache.org/jira/browse/SINGA-33)  Automatically launch a number of processes in the cluster
-    * [SINGA-34](https://issues.apache.org/jira/browse/SINGA-34)  Support external zookeeper service
-    * [SINGA-38](https://issues.apache.org/jira/browse/SINGA-38)  Support concurrent jobs
-    * [SINGA-39](https://issues.apache.org/jira/browse/SINGA-39)  Avoid ssh in scripts for single node environment
-    * [SINGA-43](https://issues.apache.org/jira/browse/SINGA-43)  Remove Job-related output from workspace
-    * [SINGA-56](https://issues.apache.org/jira/browse/SINGA-56)  No automatic launching of zookeeper service
-    * [SINGA-73](https://issues.apache.org/jira/browse/SINGA-73)  Refine the selection of available hosts from host list
-
-
-  * Installation with GNU Auto tool
-    * [SINGA-4](https://issues.apache.org/jira/browse/SINGA-4)  Refine thirdparty-dependency installation
-    * [SINGA-13](https://issues.apache.org/jira/browse/SINGA-13)  Separate intermediate files of compilation from source files
-    * [SINGA-17](https://issues.apache.org/jira/browse/SINGA-17)  Add root permission within thirdparty/install.
-    * [SINGA-27](https://issues.apache.org/jira/browse/SINGA-27)  Generate python modules for proto objects
-    * [SINGA-53](https://issues.apache.org/jira/browse/SINGA-53)  Add lmdb compiling options
-    * [SINGA-62](https://issues.apache.org/jira/browse/SINGA-62)  Remove building scrips and auxiliary files
-    * [SINGA-67](https://issues.apache.org/jira/browse/SINGA-67)  Add singatest into build targets
-
-
-  * Distributed training
-    * [SINGA-7](https://issues.apache.org/jira/browse/SINGA-7)  Implement shared memory Hogwild algorithm
-    * [SINGA-8](https://issues.apache.org/jira/browse/SINGA-8)  Implement distributed Hogwild
-    * [SINGA-19](https://issues.apache.org/jira/browse/SINGA-19)  Slice large Param objects for load-balance
-    * [SINGA-29](https://issues.apache.org/jira/browse/SINGA-29)  Update NeuralNet class to enable layer partition type customization
-    * [SINGA-24](https://issues.apache.org/jira/browse/SINGA-24)  Implement Downpour training framework
-    * [SINGA-32](https://issues.apache.org/jira/browse/SINGA-32)  Implement AllReduce training framework
-    * [SINGA-57](https://issues.apache.org/jira/browse/SINGA-57)  Improve Distributed Hogwild
-
-
-  * Training algorithms for different model categories
-    * [SINGA-9](https://issues.apache.org/jira/browse/SINGA-9)  Add Support for Restricted Boltzman Machine (RBM) model
-    * [SINGA-10](https://issues.apache.org/jira/browse/SINGA-10)  Add Support for Recurrent Neural Networks (RNN)
-
-
-  * Checkpoint and restore
-    * [SINGA-12](https://issues.apache.org/jira/browse/SINGA-12)  Support Checkpoint and Restore
-
-
-  * Unit test
-    * [SINGA-64](https://issues.apache.org/jira/browse/SINGA-64)  Add the test module for utils/common
-
-
-  * Programming model
-    * [SINGA-36](https://issues.apache.org/jira/browse/SINGA-36)  Refactor job configuration, driver program and scripts
-    * [SINGA-37](https://issues.apache.org/jira/browse/SINGA-37)  Enable users to set parameter sharing in model configuration
-    * [SINGA-54](https://issues.apache.org/jira/browse/SINGA-54)  Refactor job configuration to move fields in ModelProto out
-    * [SINGA-55](https://issues.apache.org/jira/browse/SINGA-55)  Refactor main.cc and singa.h
-    * [SINGA-61](https://issues.apache.org/jira/browse/SINGA-61)  Support user defined classes
-    * [SINGA-65](https://issues.apache.org/jira/browse/SINGA-65)  Add an example of writing user-defined layers
-
-
-  * Other features
-    * [SINGA-6](https://issues.apache.org/jira/browse/SINGA-6)  Implement thread-safe singleton
-    * [SINGA-18](https://issues.apache.org/jira/browse/SINGA-18)  Update API for displaying performance metric
-    * [SINGA-77](https://issues.apache.org/jira/browse/SINGA-77)  Integrate with Apache RAT
-
-
-Some bugs are fixed during the development of this release
-
-  * [SINGA-2](https://issues.apache.org/jira/browse/SINGA-2) Check failed: zsock_connect
-  * [SINGA-5](https://issues.apache.org/jira/browse/SINGA-5) Server early terminate when zookeeper singa folder is not initially empty
-  * [SINGA-15](https://issues.apache.org/jira/browse/SINGA-15) Fixg a bug from ConnectStub function which gets stuck for connecting layer_dealer_
-  * [SINGA-22](https://issues.apache.org/jira/browse/SINGA-22) Cannot find openblas library when it is installed in default path
-  * [SINGA-23](https://issues.apache.org/jira/browse/SINGA-23) Libtool version mismatch error.
-  * [SINGA-28](https://issues.apache.org/jira/browse/SINGA-28) Fix a bug from topology sort of Graph
-  * [SINGA-42](https://issues.apache.org/jira/browse/SINGA-42) Issue when loading checkpoints
-  * [SINGA-44](https://issues.apache.org/jira/browse/SINGA-44) A bug when reseting metric values
-  * [SINGA-46](https://issues.apache.org/jira/browse/SINGA-46) Fix a bug in updater.cc to scale the gradients
-  * [SINGA-47](https://issues.apache.org/jira/browse/SINGA-47) Fix a bug in data layers that leads to out-of-memory when group size is too large
-  * [SINGA-48](https://issues.apache.org/jira/browse/SINGA-48) Fix a bug in trainer.cc that assigns the same NeuralNet instance to workers from diff groups
-  * [SINGA-49](https://issues.apache.org/jira/browse/SINGA-49) Fix a bug in HandlePutMsg func that sets param fields to invalid values
-  * [SINGA-66](https://issues.apache.org/jira/browse/SINGA-66) Fix bugs in Worker::RunOneBatch function and ClusterProto
-  * [SINGA-79](https://issues.apache.org/jira/browse/SINGA-79) Fix bug in singatool that can not parse -conf flag
-
-
-Features planned for the next release
-
-  * [SINGA-11](https://issues.apache.org/jira/browse/SINGA-11) Start SINGA using Mesos
-  * [SINGA-31](https://issues.apache.org/jira/browse/SINGA-31) Extend Blob to support xpu (cpu or gpu)
-  * [SINGA-35](https://issues.apache.org/jira/browse/SINGA-35) Add random number generators
-  * [SINGA-40](https://issues.apache.org/jira/browse/SINGA-40) Support sparse Param update
-  * [SINGA-41](https://issues.apache.org/jira/browse/SINGA-41) Support single node single GPU training
-
diff --git a/doc/en/releases/RELEASE_NOTES_0.2.0.md b/doc/en/releases/RELEASE_NOTES_0.2.0.md
deleted file mode 100644
index f2133e3..0000000
--- a/doc/en/releases/RELEASE_NOTES_0.2.0.md
+++ /dev/null
@@ -1,84 +0,0 @@
-# singa-incubating-0.2.0 Release Notes
-
----
-
-SINGA is a general distributed deep learning platform for training big deep
-learning models over large datasets. It is designed with an intuitive
-programming model based on the layer abstraction. SINGA supports a wide variety
-of popular deep learning models.
-
-This release includes the following **major features**:
-
-* [Training on GPU](../docs/gpu.html) enables training of complex models on a single node with multiple GPU cards.
-* [Hybrid neural net partitioning](../docs/hybrid.html) supports data and model parallelism at the same time.
-* [Python wrapper](../docs/python.html) makes it easy to configure the job, including neural net and SGD algorithm.
-* [RNN model and BPTT algorithm](../docs/general-rnn.html) are implemented to support applications based on RNN models, e.g., GRU.
-* [Cloud software integration](../docs/distributed-training.md) includes Mesos, Docker and HDFS.
-
-
-**More details** are listed as follows,
-
-  * Programming model
-    * [SINGA-80] New Blob Level and Address Level Math Operation Interface
-    * [SINGA-82] Refactor input layers using data store abstraction
-    * [SINGA-87] Replace exclude field to include field for layer configuration
-    * [SINGA-110] Add Layer member datavec_ and gradvec_
-    * [SINGA-120] Implemented GRU and BPTT (BPTTWorker)
-
-
-  * Neuralnet layers
-    * [SINGA-91] Add SoftmaxLayer and ArgSortLayer
-    * [SINGA-106] Add dummy layer for test purpose
-    * [SINGA-120] Implemented GRU and BPTT (GRULayer and OneHotLayer)
-
-
-  * GPU training support
-    * [SINGA-100] Implement layers using CUDNN for GPU training
-    * [SINGA-104] Add Context Class
-    * [SINGA-105] Update GUN make files for compiling cuda related code
-    * [SINGA-98] Add Support for AlexNet ImageNet Classification Model
-
-
-  * Model/Hybrid partition
-    * [SINGA-109] Refine bridge layers
-    * [SINGA-111] Add slice, concate and split layers
-    * [SINGA-113] Model/Hybrid Partition Support
-
-
-  * Python binding
-    * [SINGA-108] Add Python wrapper to singa
-
-
-  * Predict-only mode
-    * [SINGA-85] Add functions for extracting features and test new data
-
-
-  * Integrate with third-party tools
-    * [SINGA-11] Start SINGA on Apache Mesos
-    * [SINGA-78] Use Doxygen to generate documentation
-    * [SINGA-89] Add Docker support
-
-
-  * Unit test
-    * [SINGA-95] Add make test after building
-
-
-  * Other improvment
-    * [SINGA-84] Header Files Rearrange
-    * [SINGA-93] Remove the asterisk in the log tcp://169.254.12.152:*:49152
-    * [SINGA-94] Move call to google::InitGoogleLogging() from Driver::Init() to main()
-    * [SINGA-96] Add Momentum to Cifar10 Example
-    * [SINGA-101] Add ll (ls -l) command in .bashrc file when using docker
-    * [SINGA-114] Remove short logs in tmp directory
-    * [SINGA-115] Print layer debug information in the neural net graph file
-    * [SINGA-118] Make protobuf LayerType field id easy to assign
-    * [SIGNA-97] Add HDFS Store
-
-
-  * Bugs fixed
-    * [SINGA-85] Fix compilation errors in examples
-    * [SINGA-90] Miscellaneous trivial bug fixes
-    * [SINGA-107] Error from loading pre-trained params for training stacked RBMs
-    * [SINGA-116] Fix a bug in InnerProductLayer caused by weight matrix sharing
-
-
diff --git a/doc/en/releases/RELEASE_NOTES_0.3.0.md b/doc/en/releases/RELEASE_NOTES_0.3.0.md
deleted file mode 100644
index 4298aa6..0000000
--- a/doc/en/releases/RELEASE_NOTES_0.3.0.md
+++ /dev/null
@@ -1,37 +0,0 @@
-# singa-incubating-0.3.0 Release Notes
-
----
-
-SINGA is a general distributed deep learning platform for training big deep
-learning models over large datasets. It is designed with an intuitive
-programming model based on the layer abstraction. SINGA supports a wide variety
-of popular deep learning models.
-
-This release includes following features:
-
-  * GPU Support
-    * [SINGA-131] Implement and optimize hybrid training using both CPU and GPU
-    * [SINGA-136] Support cuDNN v4
-    * [SINGA-134] Extend SINGA to run over a GPU cluster
-    * [SINGA-157] Change the priority of cudnn library and install libsingagpu.so
-
-  * Remove Dependences
-    * [SINGA-156] Remove the dependency on ZMQ for single process training
-    * [SINGA-155] Remove zookeeper for single-process training
-
-  * Python Binding
-    * [SINGA-126] Python Binding for Interactive Training
-
-  * Other Improvements
-    * [SINGA-80] New Blob Level and Address Level Math Operation Interface
-    * [SINGA-130] Data Prefetching
-    * [SINGA-145] New SGD based optimization Updaters: AdaDelta, Adam, AdamMax
-
-  * Bugs Fixed
-    * [SINGA-148] Race condition between Worker threads and Driver
-    * [SINGA-150] Mesos Docker container failed
-    * [SIGNA-141] Undesired Hash collision when locating process id to worker…
-    * [SINGA-149] Docker build fail
-    * [SINGA-143] The compilation cannot detect libsingagpu.so file
-
-
diff --git a/doc/en/releases/RELEASE_NOTES_1.0.0.md b/doc/en/releases/RELEASE_NOTES_1.0.0.md
deleted file mode 100644
index dde2c63..0000000
--- a/doc/en/releases/RELEASE_NOTES_1.0.0.md
+++ /dev/null
@@ -1,91 +0,0 @@
-# singa-incubating-1.0.0 Release Notes
-
----
-
-SINGA is a general distributed deep learning platform for training big deep
-learning models over large datasets. It is designed with an intuitive
-programming model based on the layer abstraction. SINGA supports a wide variety
-of popular deep learning models.
-
-This release includes following features:
-
-  * Core abstractions including Tensor and Device
-      * [SINGA-207]  Update Tensor functions for matrices
-      * [SINGA-205]  Enable slice and concatenate operations for Tensor objects
-      * [SINGA-197]  Add CNMem as a submodule in lib/
-      * [SINGA-196]  Rename class Blob to Block
-      * [SINGA-194]  Add a Platform singleton
-      * [SINGA-175]  Add memory management APIs and implement a subclass using CNMeM
-      * [SINGA-173]  OpenCL Implementation
-      * [SINGA-171]  Create CppDevice and CudaDevice
-      * [SINGA-168]  Implement Cpp Math functions APIs
-      * [SINGA-162]  Overview of features for V1.x
-      * [SINGA-165]  Add cross-platform timer API to singa
-      * [SINGA-167]  Add Tensor Math function APIs
-      * [SINGA-166]  light built-in logging for making glog optional
-      * [SINGA-164]  Add the base Tensor class
-
-
-  * IO components for file read/write, network and data pre-processing
-      * [SINGA-233]  New communication interface
-      * [SINGA-215]  Implement Image Transformation for Image Pre-processing
-      * [SINGA-214]  Add LMDBReader and LMDBWriter for LMDB
-      * [SINGA-213]  Implement Encoder and Decoder for CSV
-      * [SINGA-211]  Add TextFileReader and TextFileWriter for CSV files
-      * [SINGA-210]  Enable checkpoint and resume for v1.0
-      * [SINGA-208]  Add DataIter base class and a simple implementation
-      * [SINGA-203]  Add OpenCV detection for cmake compilation
-      * [SINGA-202]  Add reader and writer for binary file
-      * [SINGA-200]  Implement Encoder and Decoder for data pre-processing
-
-
-
-  * Module components including layer classes, training algorithms and Python binding
-      * [SINGA-235]  Unify the engines for cudnn and singa layers
-      * [SINGA-230]  OpenCL Convolution layer and Pooling layer
-      * [SINGA-222]  Fixed bugs in IO
-      * [SINGA-218]  Implementation for RNN CUDNN version
-      * [SINGA-204]  Support the training of feed-forward neural nets
-      * [SINGA-199]  Implement Python classes for SGD optimizers
-      * [SINGA-198]  Change Layer::Setup API to include input Tensor shapes
-      * [SINGA-193]  Add Python layers
-      * [SINGA-192]  Implement optimization algorithms for SINGA v1 (nesterove, adagrad, rmsprop)
-      * [SINGA-191]  Add "autotune" for CudnnConvolution Layer
-      * [SINGA-190]  Add prelu layer and flatten layer
-      * [SINGA-189]  Generate python outputs of proto files
-      * [SINGA-188]  Add Dense layer
-      * [SINGA-187]  Add popular parameter initialization methods
-      * [SINGA-186]  Create Python Tensor class
-      * [SINGA-184]  Add Cross Entropy loss computation
-      * [SINGA-183]  Add the base classes for optimizer, constraint and regularizer
-      * [SINGA-180]  Add Activation layer and Softmax layer
-      * [SINGA-178]  Add Convolution layer and Pooling layer
-      * [SINGA-176]  Add loss and metric base classes
-      * [SINGA-174]  Add Batch Normalization layer and Local Response Nomalization layer.
-      * [SINGA-170]  Add Dropout layer and CudnnDropout layer.
-      * [SINGA-169]  Add base Layer class for V1.0
-
-
-  * Examples
-      * [SINGA-232]  Alexnet on Imagenet
-      * [SINGA-231]  Batchnormlized VGG model for cifar-10
-      * [SINGA-228]  Add Cpp Version of Convolution and Pooling layer
-      * [SINGA-227]  Add Split and Merge Layer and add ResNet Implementation
-
-  * Documentation
-      * [SINGA-239]  Transfer documentation files of v0.3.0 to github
-      * [SINGA-238]  RBM on mnist
-      * [SINGA-225]  Documentation for installation and Cifar10 example
-      * [SINGA-223]  Use Sphinx to create the website
-
-  * Tools for compilation and some utility code
-      * [SINGA-229]  Complete install targets
-      * [SINGA-221]  Support for Travis-CI
-      * [SINGA-217]  build python package with setup.py
-      * [SINGA-216]  add jenkins for CI support
-      * [SINGA-212]  Disable the compilation of libcnmem if USE_CUDA is OFF
-      * [SINGA-195]  Channel for sending training statistics
-      * [SINGA-185]  Add CBLAS and GLOG detection for singav1
-      * [SINGA-181]  Add NVCC supporting for .cu files
-      * [SINGA-177]  Add fully cmake supporting for the compilation of singa_v1
-      * [SINGA-172]  Add CMake supporting for Cuda and Cudnn libs
diff --git a/doc/en/releases/RELEASE_NOTES_1.1.0.md b/doc/en/releases/RELEASE_NOTES_1.1.0.md
deleted file mode 100644
index 75d086d..0000000
--- a/doc/en/releases/RELEASE_NOTES_1.1.0.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# singa-incubating-1.1.0 Release Notes
-
----
-
-SINGA is a general distributed deep learning platform for training big deep
-learning models over large datasets.
-
-This release includes following features:
-
-  * Core components
-      * [SINGA-296] Add sign and to_host function for pysinga tensor module
-
-  * Model components
-      * [SINGA-254] Implement Adam for V1
-      * [SINGA-264] Extend the FeedForwardNet to accept multiple inputs
-      * [SINGA-267] Add spatial mode in batch normalization layer
-      * [SINGA-271] Add Concat and Slice layers
-      * [SINGA-275] Add Cross Entropy Loss for multiple labels
-      * [SINGA-278] Convert trained caffe parameters to singa
-      * [SINGA-287] Add memory size check for cudnn convolution
-
-  * Utility functions and CI
-      * [SINGA-242] Compile all source files into a single library.
-      * [SINGA-244] Separating swig interface and python binding files
-      * [SINGA-246] Imgtool for image augmentation
-      * [SINGA-247] Add windows support for singa
-      * [SINGA-251] Implement image loader for pysinga
-      * [SINGA-252] Use the snapshot methods to dump and load models for pysinga
-      * [SINGA-255] Compile mandatory dependent libaries together with SINGA code
-      * [SINGA-259] Add maven pom file for building java classes
-      * [SINGA-261] Add version ID into the checkpoint files
-      * [SINGA-266] Add Rafiki python toolkits
-      * [SINGA-273] Improve license and contributions
-      * [SINGA-284] Add python unittest into Jenkins and link static libs into whl file
-      * [SINGA-280] Jenkins CI support
-      * [SINGA-288] Publish wheel of PySINGA generated by Jenkins to public servers
-
-  * Documentation and usability
-      * [SINGA-263] Create Amazon Machine Image
-      * [SINGA-268] Add IPython notebooks to the documentation
-      * [SINGA-276] Create docker images
-      * [SINGA-289] Update SINGA website automatically using Jenkins
-      * [SINGA-295] Add an example of image classification using GoogleNet
-
-  * Bugs fixed
-      * [SINGA-245] float as the first operand can not multiply with a tensor object
-      * [SINGA-293] Bug from compiling PySINGA on Mac OS X with multiple version of Python
-
-
diff --git a/python/rafiki/__init__.py b/doc/environment.yml
similarity index 70%
copy from python/rafiki/__init__.py
copy to doc/environment.yml
index 3aa745b..38b97fb 100644
--- a/python/rafiki/__init__.py
+++ b/doc/environment.yml
@@ -1,4 +1,7 @@
 #
+# singa documentation build configuration file, created by
+# sphinx-quickstart on Sat Jul  9 20:36:57 2016.
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -7,13 +10,19 @@
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#      http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
 
-__version__ = "0.1.1"
+
+name: singa
+channels:
+  - shicong
+  - conda-forge
+dependencies:
+  - singa=3.0.0.rc0=cpu_py37 [--no-deps]
+  - python_abi=3.7 [--no-deps]
\ No newline at end of file
diff --git a/doc/en/docs/index.rst b/doc/index.rst
similarity index 67%
rename from doc/en/docs/index.rst
rename to doc/index.rst
index ee78290..3e99a14 100644
--- a/doc/en/docs/index.rst
+++ b/doc/index.rst
@@ -15,25 +15,24 @@
    specific language governing permissions and limitations
    under the License.
 
+Python APIs for Apache SINGA
+=======================
 
-Documentation
-=============
+This website serves as the reference for the Python APIs of Apache SINGA.
+Apache SINGA is a library for distributed deep learning.
+The website of Apache SINGA is at http://singa.apache.org.
 
 .. toctree::
 
-   installation
-   software_stack
    device
    tensor
-   layer
-   net
-   initializer
-   loss
-   metric
-   optimizer
-   data
-   image_tool
-   snapshot
-   converter
+   autograd
+   onnx
+   module
+   opt
    utils
-   model_zoo/index
+   
+
+License
+----------
+SINGA is released under `Apache License Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>`_.
diff --git a/doc/en/docs/loss.rst b/doc/module.rst
similarity index 96%
rename from doc/en/docs/loss.rst
rename to doc/module.rst
index 18c587a..df3753a 100644
--- a/doc/en/docs/loss.rst
+++ b/doc/module.rst
@@ -20,6 +20,6 @@
 =========
 
 
-.. automodule:: singa.loss
+.. automodule:: singa.module
    :members:
    :show-inheritance:
diff --git a/doc/notebook/index.ipynb b/doc/notebook/index.ipynb
deleted file mode 100644
index f4e1e49..0000000
--- a/doc/notebook/index.ipynb
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "![Apache Singa](http://singa.apache.org/en/_static/singa.png)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Welcome to this tutorial for Apache Incubator-singa using Jupyter Notebook.\n",
-    "\n",
-    "Please install [PySINGA](http://singa.apache.org/en/docs/installation.html#install-pysinga) before running these tutorials.\n",
-    "\n",
-    "1. [Regression](../en/docs/notebook/regression.ipynb )\n",
-    "\n",
-    "2. [MLP Tutorial](../en/docs/notebook/mlp.ipynb)\n",
-    "\n",
-    "3. [RBM Tutorial](../en/docs/notebook/rbm.ipynb)\n",
-    "\n",
-    "\n",
-    "To learn more about Jupyter, please check [IPython in Depth](https://www.youtube.com/watch?v=xe_ATRmw0KM).\n",
-    "\n",
-    "If you want to use PySINGA and jupyter notebooks in virtual environment, please use conda virtual environment and install the following extension. Then you can select the kernel of the virtual environment in the browser. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "conda install nb_conda_kernel"
-   ]
-  }
- ],
- "metadata": {
-  "anaconda-cloud": {},
-  "kernelspec": {
-   "display_name": "Python [default]",
-   "language": "python",
-   "name": "python2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.12"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/doc/en/docs/net.rst b/doc/onnx.rst
similarity index 92%
rename from doc/en/docs/net.rst
rename to doc/onnx.rst
index 7aff364..7d76de5 100644
--- a/doc/en/docs/net.rst
+++ b/doc/onnx.rst
@@ -16,11 +16,10 @@
    under the License.
 
 
-FeedForward Net
+ONNX
 ===============
 
-.. automodule:: singa.net
+.. automodule:: singa.sonnx
    :members:
    :member-order: bysource
-   :show-inheritance:
    :undoc-members:
diff --git a/doc/en/docs/optimizer.rst b/doc/opt.rst
similarity index 95%
rename from doc/en/docs/optimizer.rst
rename to doc/opt.rst
index e6f1da9..1f3165a 100644
--- a/doc/en/docs/optimizer.rst
+++ b/doc/opt.rst
@@ -20,7 +20,7 @@
 =========
 
 
-.. automodule:: singa.optimizer
+.. automodule:: singa.opt
    :members:
    :member-order: bysource
    :show-inheritance:
diff --git a/doc/en/docs/snapshot.rst b/doc/snapshot.rst
similarity index 100%
rename from doc/en/docs/snapshot.rst
rename to doc/snapshot.rst
diff --git a/doc/en/docs/tensor.rst b/doc/tensor.rst
similarity index 98%
rename from doc/en/docs/tensor.rst
rename to doc/tensor.rst
index d9e7f18..5a2898b 100644
--- a/doc/en/docs/tensor.rst
+++ b/doc/tensor.rst
@@ -42,7 +42,3 @@
 
 .. automodule:: singa.tensor
    :members:
-
-
-CPP API
----------
diff --git a/doc/en/docs/utils.rst b/doc/utils.rst
similarity index 100%
rename from doc/en/docs/utils.rst
rename to doc/utils.rst
diff --git a/doc/zh/index.rst b/doc/zh/index.rst
deleted file mode 100644
index 2707001..0000000
--- a/doc/zh/index.rst
+++ /dev/null
@@ -1,120 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing,
-   software distributed under the License is distributed on an
-   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-   KIND, either express or implied.  See the License for the
-   specific language governing permissions and limitations
-   under the License.
-
-
-SINGA 中文文档
-==============
-
-
-最新动态
------------
-
-* **版本 1.1.0** 于2017年2月12日发布. `下载页面 <../en/downloads.html>`_
-
-* 关于SINGA V1的讲座在 `SGInnovate <https://www.eventbrite.sg/e/ai-eveningssginnovate-apache-singa-tickets-31505061487>`_ 举行, 时间:2017年3月23日
-
-* **版本 1.0.0** 于2016年9月9日发布. `下载页面 <../en/downloads.html>`_
-
-* 我们在 `REWORK <https://www.re-work.co/events/deep-learning-singapore/schedule>`_ 报告了SINGA的最新进展, 时间:2016年10月21日.
-
-* 关于SINGA的讲座在 `PyDataSG <http://www.meetup.com/PyData-SG/events/229691286/>`_ 举办, 时间:2016年8月16日.
-
-* **版本 0.3.0** 于2016年4月20日发布. `下载页面 <../en/downloads.html>`_
-
-* **版本 0.2.0** 于2016年1月14日发布. `下载页面 <../en/downloads.html>`_.
-
-* 我们在 `Strata+Hadoop <http://strataconf.com/big-data-conference-sg-2015/public/schedule/detail/45123>`_ 做了关于SINGA的报告, 时间:2015年12月2日
-
-* 我们在多媒体国际会议(`ACM Multimedia <http://www.acmmm.org/2015/at-a-glance/>`_ ) 的最佳论文和开源竞赛环节报告了关于SINGA的工作. 时间2015年10月26-30(`资料 <http://www.comp.nus.edu.sg/~dbsystem/singa//assets/file/mm2015.ppt>`_)
-
-* **版本 0.1** 于2015年10月8日发布. `下载页面 SINGA v0.1.0 <../en/downloads.html>`_.
-
-* 我们举办了一届关于 `深度学习的研讨会 <http://www.comp.nus.edu.sg/~dbsystem/singa/workshop>`_ ,并报告了SINGA.时间:2015年9月16
-
-* SINGA参展了VLDB2015期间举办的开源项目研讨会 `BOSS <http://boss.dima.tu-berlin.de/>`_ 时间2015年9月4日. (`简介 <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/singa-vldb-boss.pptx>`_, `基础 <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/basic-user-guide.pptx>`_ , `高级 <http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/advanced-user-guide.pptx>`_ )
-
-* 我们在 VLDB 暑期学校做了关于SINGA的教学. 时间:2015年7月; 地点:清华大学.
-
-* 我们 `SIGMOD 2015 <http://www.sigmod2015.org/index.shtml>`_ 的研讨会 `DanaC <http://danac.org/>`_ 上面报告了SINGA. 时间:2015年5月31日-6月4日;地点:墨尔本
-
-* SINGA 加入 `Apache Incubator <http://incubator.apache.org/>`_,  2015年3月17日.
-
-SINGA 入门
----------------
-* 无需安装,直接使用 `AWS <https://aws.amazon.com/marketplace/pp/B01NAUAWZW>`_ or via `Docker <https://hub.docker.com/r/nusdbsystem/singa/>`_.
-
-* 安装SINGA,通过 `conda <../en/docs/installation.html#from-conda>`_, `Debian packages <../en/docs/installation.html#from-debian-package>`_  或者 `源码 <../en/docs/installation.html#from-source>`_.
-
-* 更多操作和实例,参考 `Jupyter notebooks <http://nbviewer.jupyter.org/github/apache/incubator-singa/blob/master/doc/en/docs/notebook/index.ipynb>`_ 
-
-.. |logo1| image:: _static/github.png
-   :scale: 100%
-   :align: middle
-   :target: https://github.com/apache/incubator-singa
-.. |logo2| image:: _static/awsmp.gif 
-   :scale: 100%
-   :align: middle
-   :target: https://aws.amazon.com/marketplace/seller-profile?id=5bcac385-12c4-4802-aec7-351e09b77b4c
-.. |logo3| image:: _static/docker.png
-   :scale: 50%
-   :align: middle
-   :target: https://hub.docker.com/r/nusdbsystem/singa/
-.. |logo4| image:: _static/jupyter.png
-   :scale: 25%
-   :align: middle
-   :target: http://nbviewer.jupyter.org/github/apache/incubator-singa/blob/master/doc/en/docs/notebook/index.ipynb
-
-+---------+---------+---------+---------+
-| |logo1| | |logo2| | |logo3| | |logo4| |
-+---------+---------+---------+---------+
-
-.. toctree::
-   :hidden:
-
-   downloads
-   docs/index
-
-.. toctree::
-   :hidden:
-   :maxdepth: 2
-   :caption: Development
-
-   develop/schedule
-   develop/how-contribute
-   develop/contribute-code
-   develop/contribute-docs
-
-.. toctree::
-   :hidden:
-   :maxdepth: 2
-   :caption: Community
-
-   community/source-repository
-   community/mail-lists
-   community/issue-tracking
-   community/team-list
-
-
-
-License
-----------
-SINGA is released under `Apache License Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>`_.
-
-Disclaimers
------------
-
-Apache SINGA is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by the Apache Incubator. Incubation is required of all newly accepted projects until a further review indicates that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. While incubation status is not necessarily a reflection of the completeness or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index f372692..e7297b6 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -16,5 +16,5 @@
 # limitations under the License.
 #
 
-ADD_SUBDIRECTORY(cifar10)
-ADD_SUBDIRECTORY(imagenet/alexnet)
+ADD_SUBDIRECTORY(cpp/imagenet)
+ADD_SUBDIRECTORY(cpp/cifar10)
diff --git a/examples/caffe/README.md b/examples/caffe/README.md
deleted file mode 100644
index 92c9325..0000000
--- a/examples/caffe/README.md
+++ /dev/null
@@ -1,32 +0,0 @@
-# Use parameters pre-trained from Caffe in SINGA
-
-In this example, we use SINGA to load the VGG parameters trained by Caffe to do image classification.
-
-## Run this example
-You can run this example by simply executing `run.sh vgg16` or `run.sh vgg19`
-The script does the following work.
-
-### Obtain the Caffe model
-* Download caffe model prototxt and parameter binary file.
-* Currently we only support the latest caffe format, if your model is in
-    previous version of caffe, please update it to current format.(This is
-    supported by caffe)
-* After updating, we can obtain two files, i.e., the prototxt and parameter
-    binary file.
-
-### Prepare test images
-A few sample images are downloaded into the `test` folder.
-
-### Predict
-The `predict.py` script creates the VGG model and read the parameters,
-
-    usage: predict.py [-h] model_txt model_bin imgclass
-
-where `imgclass` refers to the synsets of imagenet dataset for vgg models.
-You can start the prediction program by executing the following command:
-
-    python predict.py vgg16.prototxt vgg16.caffemodel synset_words.txt
-
-Then you type in the image path, and the program would output the top-5 labels.
-
-More Caffe models would be tested soon.
diff --git a/examples/caffe/predict.py b/examples/caffe/predict.py
deleted file mode 100644
index 663cd87..0000000
--- a/examples/caffe/predict.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-import numpy as np
-import os
-import argparse
-from PIL import Image
-
-from singa import device
-from singa import tensor
-from singa import converter
-from singa import layer
-from singa import net
-
-#for debug: print norm of each layer
-#net.verbose = True
-
-
-def convert_model(prototxt, caffemodel):
-    cvt = converter.CaffeConverter(net_proto=prototxt, param_path=caffemodel)
-    model = cvt.create_net()
-    cvt.convert_params(model)
-    return model
-
-
-def check_path(path):
-    assert os.path.exists(path), 'File not found: ' + path
-
-
-def read_image(img_path):
-    # According to the VGG paper(Very Deep Convolutional Networks for
-    # Large-Scale Image Recognition), the input images are zero-centered by
-    # mean pixel(rather than mean image) substraction.
-    mean_RGB =[123.68, 116.779, 103.939]
-
-    img = Image.open(img_path)
-    img = img.convert('RGB')
-    resized = img.resize((224, 224))
-    # order of axes: width,height,channel
-    img_ary = np.asarray(resized, dtype=np.float32)
-    img_ary -= mean_RGB
-    # HWC -> CHW
-    img_ary = np.swapaxes(img_ary, 0, 2)
-    return np.asarray(img_ary)
-
-
-def predict(net, dev, synset_list, topk=5):
-    '''Predict the label of each image.
-
-    Args:
-        net, a pretrained neural net
-        images, a batch of images [batch_size, 3, 32, 32], which have been
-            pre-processed
-        dev, the training device
-        synset_list: the synset of labels
-        topk, return the topk labels for each image.
-    '''
-    while True:
-        img_path = raw_input("Enter input image path('quit' to exit): ")
-        if img_path == 'quit':
-            return
-        if not os.path.exists(img_path):
-            print 'Path is invalid'
-            continue
-        img = read_image(img_path)
-        x = tensor.from_numpy(img.astype(np.float32)[np.newaxis,:])
-        x.to_device(dev)
-        y = net.predict(x)
-        y.to_host()
-        prob = tensor.to_numpy(y)
-        lbl = np.argsort(-prob[0])  # sort prob in descending order
-        print [synset_list[lbl[i]] for i in range(topk)]
-
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser(
-        description='Convert caffe vgg into singa. \
-            This tool only supports caffe model in current version(29-Nov-2016). \
-            You can use caffe tool to update previous model')
-    parser.add_argument('model_txt', default='./vgg16.prototxt')
-    parser.add_argument('model_bin', default='./vgg16.caffemodel')
-    parser.add_argument('imgclass', default='./synset_words.txt')
-    args = parser.parse_args()
-
-    check_path(args.model_txt)
-    check_path(args.model_bin)
-    check_path(args.imgclass)
-
-    model = convert_model(args.model_txt, args.model_bin)
-    dev = device.get_default_device()
-    model.to_device(dev)
-
-    with open(args.imgclass, 'r') as fd:
-        syn_li = [line.split(' ', 1)[1].strip('\n') for line in fd.readlines()]
-
-    predict(model, dev, synset_list=syn_li, topk=5)
diff --git a/examples/caffe/run.sh b/examples/caffe/run.sh
deleted file mode 100755
index 031d678..0000000
--- a/examples/caffe/run.sh
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-if [[ $# -ne 1 ]]; then
-    echo "usage: $0 model_name"
-    echo "   model_name: [vgg16|vgg19], ..."
-    exit -1
-fi
-
-if [[ $1 == "vgg19" ]]; then
-    echo "Downloading label list..."
-    if [[ ! -f synset_words.txt ]]; then
-        wget -c https://www.dropbox.com/s/qe66xwwc78q7fe5/synset_words.txt?dl=0 -O synset_words.txt
-    fi
-    echo "Downloading vgg19..."
-    if [[ ! -f vgg19.prototxt ]]; then
-        wget -c https://www.dropbox.com/s/ehi6dxxp3s1rl8t/vgg19.prototxt?dl=0 -O vgg19.prototxt
-    fi
-
-    if [[ ! -f vgg19.caffemodel ]]; then
-        wget -c https://www.dropbox.com/s/y8ksbfp3iq0kvdn/vgg19.caffemodel?dl=0 -O vgg19.caffemodel
-    fi
-    echo "Downloading test images..."
-    if [[ ! -d test ]]; then
-        wget -c https://www.dropbox.com/s/ch5ktahijwof6ka/test.tar.gz?dl=0 -O test.tar.gz
-        tar -zxvf test.tar.gz
-    fi
-    echo "Converting..."
-    python predict.py ./vgg19.prototxt ./vgg19.caffemodel ./synset_words.txt
-
-elif [[ $1 == "vgg16" ]]; then
-    echo "Downloading label list..."
-    if [[ ! -f synset_words.txt ]]; then
-        wget -c https://www.dropbox.com/s/qe66xwwc78q7fe5/synset_words.txt?dl=0 -O synset_words.txt
-    fi
-    echo "Downloading vgg16..."
-    if [[ ! -f vgg16.prototxt ]]; then
-        wget -c https://www.dropbox.com/s/ilpt58tle8jqtxj/vgg16.prototxt?dl=0 -O vgg16.prototxt
-    fi
-
-    if [[ ! -f vgg16.caffemodel ]]; then
-        wget -c https://www.dropbox.com/s/3qidow3qr77ruob/vgg16.caffemodel?dl=0 -O vgg16.caffemodel
-    fi
-    echo "Downloading test images..."
-    if [[ ! -d test ]]; then
-        wget -c https://www.dropbox.com/s/ch5ktahijwof6ka/test.tar.gz?dl=0 -O test.tar.gz
-        tar -zxvf test.tar.gz
-    fi
-    echo "Converting..."
-    python predict.py ./vgg16.prototxt ./vgg16.caffemodel ./synset_words.txt
-else
-    echo "unsupported model: $1"
-fi
diff --git a/examples/char-rnn/README.md b/examples/char-rnn/README.md
deleted file mode 100644
index dcaf652..0000000
--- a/examples/char-rnn/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# Train Char-RNN over plain text
-
-Recurrent neural networks (RNN) are widely used for modelling sequential data,
-e.g., natural language sentences. This example describes how to implement a RNN
-application (or model) using SINGA's RNN layers.
-We will use the [char-rnn](https://github.com/karpathy/char-rnn) model as an
-example, which trains over sentences or
-source code, with each character as an input unit. Particularly, we will train
-a RNN using GRU over Linux kernel source code. After training, we expect to
-generate meaningful code from the model.
-
-
-## Instructions
-
-* Compile and install SINGA. Currently the RNN implementation depends on Cudnn with version >= 5.05.
-
-* Prepare the dataset. Download the [kernel source code](http://cs.stanford.edu/people/karpathy/char-rnn/).
-Other plain text files can also be used.
-
-* Start the training,
-
-        python train.py linux_input.txt
-
-  Some hyper-parameters could be set through command line,
-
-        python train.py -h
-
-* Sample characters from the model by providing the number of characters to sample and the seed string.
-
-        python sample.py 'model.bin' 100 --seed '#include <std'
-
-  Please replace 'model.bin' with the path to one of the checkpoint paths.
-
diff --git a/examples/char-rnn/sample.py b/examples/char-rnn/sample.py
deleted file mode 100644
index 9b6e757..0000000
--- a/examples/char-rnn/sample.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-'''Sample characters from the pre-trained model'''
-import sys
-import cPickle as pickle
-import numpy as np
-import argparse
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-from singa import layer
-from singa import tensor
-from singa import device
-from singa.proto import model_pb2
-
-
-def sample(model_path, nsamples=100, seed_text='', do_sample=True):
-    with open(model_path, 'rb') as fd:
-        d = pickle.load(fd)
-        rnn_w = tensor.from_numpy(d['rnn_w'])
-        idx_to_char = d['idx_to_char']
-        char_to_idx = d['char_to_idx']
-        vocab_size = len(idx_to_char)
-        dense_w = tensor.from_numpy(d['dense_w'])
-        dense_b = tensor.from_numpy(d['dense_b'])
-        hidden_size = d['hidden_size']
-        num_stacks = d['num_stacks']
-        dropout = d['dropout']
-
-    cuda = device.create_cuda_gpu()
-    rnn = layer.LSTM(name='lstm', hidden_size=hidden_size,
-                     num_stacks=num_stacks, dropout=dropout,
-                     input_sample_shape=(len(idx_to_char),))
-    rnn.to_device(cuda)
-    rnn.param_values()[0].copy_data(rnn_w)
-    dense = layer.Dense('dense', vocab_size, input_sample_shape=(hidden_size,))
-    dense.to_device(cuda)
-    dense.param_values()[0].copy_data(dense_w)
-    dense.param_values()[1].copy_data(dense_b)
-    hx = tensor.Tensor((num_stacks, 1, hidden_size), cuda)
-    cx = tensor.Tensor((num_stacks, 1, hidden_size), cuda)
-    hx.set_value(0.0)
-    cx.set_value(0.0)
-    if len(seed_text) > 0:
-        for c in seed_text:
-            x = np.zeros((1, vocab_size), dtype=np.float32)
-            x[0, char_to_idx[c]] = 1
-            tx = tensor.from_numpy(x)
-            tx.to_device(cuda)
-            inputs = [tx, hx, cx]
-            outputs = rnn.forward(model_pb2.kEval, inputs)
-            y = dense.forward(model_pb2.kEval, outputs[0])
-            y = tensor.softmax(y)
-            hx = outputs[1]
-            cx = outputs[2]
-        sys.stdout.write(seed_text)
-    else:
-        y = tensor.Tensor((1, vocab_size), cuda)
-        y.set_value(1.0 / vocab_size)
-
-    for i in range(nsamples):
-        y.to_host()
-        prob = tensor.to_numpy(y)[0]
-        if do_sample:
-            cur = np.random.choice(vocab_size, 1, p=prob)[0]
-        else:
-            cur = np.argmax(prob)
-        sys.stdout.write(idx_to_char[cur])
-        x = np.zeros((1, vocab_size), dtype=np.float32)
-        x[0, cur] = 1
-        tx = tensor.from_numpy(x)
-        tx.to_device(cuda)
-        inputs = [tx, hx, cx]
-        outputs = rnn.forward(model_pb2.kEval, inputs)
-        y = dense.forward(model_pb2.kEval, outputs[0])
-        y = tensor.softmax(y)
-        hx = outputs[1]
-        cx = outputs[2]
-    print ''
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser(description='sample chars from char-rnn')
-    parser.add_argument('model', help='the model checkpoint file')
-    parser.add_argument('n', type=int, help='num of characters to sample')
-    parser.add_argument('--seed', help='seed text string which warms up the '
-                        ' rnn states for sampling', default='')
-    args = parser.parse_args()
-    assert args.n > 0, 'n must > 0'
-    sample(args.model, args.n, seed_text=args.seed)
diff --git a/examples/char-rnn/train.py b/examples/char-rnn/train.py
deleted file mode 100644
index d28646e..0000000
--- a/examples/char-rnn/train.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-'''Train a Char-RNN model using plain text files.
-The model is created following https://github.com/karpathy/char-rnn
-The train file could be any text file,
-e.g., http://cs.stanford.edu/people/karpathy/char-rnn/
-'''
-import cPickle as pickle
-import numpy as np
-import argparse
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-from singa import layer
-from singa import loss
-from singa import device
-from singa import tensor
-from singa import optimizer
-from singa import initializer
-from singa.proto import model_pb2
-from singa import utils
-
-
-class Data(object):
-
-    def __init__(self, fpath, batch_size=32, seq_length=100, train_ratio=0.8):
-        '''Data object for loading a plain text file.
-
-        Args:
-            fpath, path to the text file.
-            train_ratio, split the text file into train and test sets, where
-                train_ratio of the characters are in the train set.
-        '''
-        self.raw_data = open(fpath, 'r').read()  # read text file
-        chars = list(set(self.raw_data))
-        self.vocab_size = len(chars)
-        self.char_to_idx = {ch: i for i, ch in enumerate(chars)}
-        self.idx_to_char = {i: ch for i, ch in enumerate(chars)}
-        data = [self.char_to_idx[c] for c in self.raw_data]
-        # seq_length + 1 for the data + label
-        nsamples = len(data) / (1 + seq_length)
-        data = data[0:nsamples * (1 + seq_length)]
-        data = np.asarray(data, dtype=np.int32)
-        data = np.reshape(data, (-1, seq_length + 1))
-        # shuffle all sequences
-        np.random.shuffle(data)
-        self.train_dat = data[0:int(data.shape[0]*train_ratio)]
-        self.num_train_batch = self.train_dat.shape[0] / batch_size
-        self.val_dat = data[self.train_dat.shape[0]:]
-        self.num_test_batch = self.val_dat.shape[0] / batch_size
-        print 'train dat', self.train_dat.shape
-        print 'val dat', self.val_dat.shape
-
-
-def numpy2tensors(npx, npy, dev):
-    '''batch, seq, dim -- > seq, batch, dim'''
-    tmpx = np.swapaxes(npx, 0, 1)
-    tmpy = np.swapaxes(npy, 0, 1)
-    inputs = []
-    labels = []
-    for t in range(tmpx.shape[0]):
-        x = tensor.from_numpy(tmpx[t])
-        y = tensor.from_numpy(tmpy[t])
-        x.to_device(dev)
-        y.to_device(dev)
-        inputs.append(x)
-        labels.append(y)
-    return inputs, labels
-
-
-def convert(batch, batch_size, seq_length, vocab_size, dev):
-    '''convert a batch of data into a sequence of input tensors'''
-    y = batch[:, 1:]
-    x1 = batch[:, :seq_length]
-    x = np.zeros((batch_size, seq_length, vocab_size), dtype=np.float32)
-    for b in range(batch_size):
-        for t in range(seq_length):
-            c = x1[b, t]
-            x[b, t, c] = 1
-    return numpy2tensors(x, y, dev)
-
-
-def get_lr(epoch):
-    return 0.001 / float(1 << (epoch / 50))
-
-
-def train(data, max_epoch, hidden_size=100, seq_length=100, batch_size=16,
-          num_stacks=1, dropout=0.5, model_path='model'):
-    # SGD with L2 gradient normalization
-    opt = optimizer.RMSProp(constraint=optimizer.L2Constraint(5))
-    cuda = device.create_cuda_gpu()
-    rnn = layer.LSTM(
-        name='lstm',
-        hidden_size=hidden_size,
-        num_stacks=num_stacks,
-        dropout=dropout,
-        input_sample_shape=(
-            data.vocab_size,
-        ))
-    rnn.to_device(cuda)
-    print 'created rnn'
-    rnn_w = rnn.param_values()[0]
-    rnn_w.uniform(-0.08, 0.08)  # init all rnn parameters
-    print 'rnn weight l1 = %f' % (rnn_w.l1())
-    dense = layer.Dense(
-        'dense',
-        data.vocab_size,
-        input_sample_shape=(
-            hidden_size,
-        ))
-    dense.to_device(cuda)
-    dense_w = dense.param_values()[0]
-    dense_b = dense.param_values()[1]
-    print 'dense w ', dense_w.shape
-    print 'dense b ', dense_b.shape
-    initializer.uniform(dense_w, dense_w.shape[0], 0)
-    print 'dense weight l1 = %f' % (dense_w.l1())
-    dense_b.set_value(0)
-    print 'dense b l1 = %f' % (dense_b.l1())
-
-    g_dense_w = tensor.Tensor(dense_w.shape, cuda)
-    g_dense_b = tensor.Tensor(dense_b.shape, cuda)
-
-    lossfun = loss.SoftmaxCrossEntropy()
-    for epoch in range(max_epoch):
-        train_loss = 0
-        for b in range(data.num_train_batch):
-            batch = data.train_dat[b * batch_size: (b + 1) * batch_size]
-            inputs, labels = convert(batch, batch_size, seq_length,
-                                     data.vocab_size, cuda)
-            inputs.append(tensor.Tensor())
-            inputs.append(tensor.Tensor())
-
-            outputs = rnn.forward(model_pb2.kTrain, inputs)[0:-2]
-            grads = []
-            batch_loss = 0
-            g_dense_w.set_value(0.0)
-            g_dense_b.set_value(0.0)
-            for output, label in zip(outputs, labels):
-                act = dense.forward(model_pb2.kTrain, output)
-                lvalue = lossfun.forward(model_pb2.kTrain, act, label)
-                batch_loss += lvalue.l1()
-                grad = lossfun.backward()
-                grad /= batch_size
-                grad, gwb = dense.backward(model_pb2.kTrain, grad)
-                grads.append(grad)
-                g_dense_w += gwb[0]
-                g_dense_b += gwb[1]
-                # print output.l1(), act.l1()
-            utils.update_progress(
-                b * 1.0 / data.num_train_batch, 'training loss = %f' %
-                (batch_loss / seq_length))
-            train_loss += batch_loss
-
-            grads.append(tensor.Tensor())
-            grads.append(tensor.Tensor())
-            g_rnn_w = rnn.backward(model_pb2.kTrain, grads)[1][0]
-            dense_w, dense_b = dense.param_values()
-            opt.apply_with_lr(epoch, get_lr(epoch), g_rnn_w, rnn_w, 'rnnw')
-            opt.apply_with_lr(
-                epoch, get_lr(epoch),
-                g_dense_w, dense_w, 'dense_w')
-            opt.apply_with_lr(
-                epoch, get_lr(epoch),
-                g_dense_b, dense_b, 'dense_b')
-        print '\nEpoch %d, train loss is %f' % \
-            (epoch, train_loss / data.num_train_batch / seq_length)
-
-        eval_loss = 0
-        for b in range(data.num_test_batch):
-            batch = data.val_dat[b * batch_size: (b + 1) * batch_size]
-            inputs, labels = convert(batch, batch_size, seq_length,
-                                     data.vocab_size, cuda)
-            inputs.append(tensor.Tensor())
-            inputs.append(tensor.Tensor())
-            outputs = rnn.forward(model_pb2.kEval, inputs)[0:-2]
-            for output, label in zip(outputs, labels):
-                output = dense.forward(model_pb2.kEval, output)
-                eval_loss += lossfun.forward(model_pb2.kEval,
-                                             output, label).l1()
-        print 'Epoch %d, evaluation loss is %f' % \
-            (epoch, eval_loss / data.num_test_batch / seq_length)
-
-        if (epoch + 1) % 30 == 0:
-            # checkpoint the file model
-            with open('%s_%d.bin' % (model_path, epoch), 'wb') as fd:
-                print 'saving model to %s' % model_path
-                d = {}
-                for name, w in zip(
-                        ['rnn_w', 'dense_w', 'dense_b'],
-                        [rnn_w, dense_w, dense_b]):
-                    w.to_host()
-                    d[name] = tensor.to_numpy(w)
-                    w.to_device(cuda)
-                d['idx_to_char'] = data.idx_to_char
-                d['char_to_idx'] = data.char_to_idx
-                d['hidden_size'] = hidden_size
-                d['num_stacks'] = num_stacks
-                d['dropout'] = dropout
-
-                pickle.dump(d, fd)
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser(
-        description='Train multi-stack LSTM for '
-        'modeling  character sequence from plain text files')
-    parser.add_argument('data', type=str, help='training file')
-    parser.add_argument('-b', type=int, default=32, help='batch_size')
-    parser.add_argument('-l', type=int, default=64, help='sequence length')
-    parser.add_argument('-d', type=int, default=128, help='hidden size')
-    parser.add_argument('-s', type=int, default=2, help='num of stacks')
-    parser.add_argument('-m', type=int, default=50, help='max num of epoch')
-    args = parser.parse_args()
-    data = Data(args.data, batch_size=args.b, seq_length=args.l)
-    train(data, args.m,  hidden_size=args.d, num_stacks=args.s,
-          seq_length=args.l, batch_size=args.b)
diff --git a/examples/cifar10/alexnet.py b/examples/cifar10/alexnet.py
deleted file mode 100644
index 02437b3..0000000
--- a/examples/cifar10/alexnet.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-''' This model is created following the structure from
-https://code.google.com/p/cuda-convnet/source/browse/trunk/example-layers/layers-18pct.cfg
-Following the same setting for hyper-parameters and data pre-processing, the final
-validation accuracy would be about 82%.
-'''
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-from singa import layer
-from singa import metric
-from singa import loss
-from singa import net as ffnet
-
-
-def create_net(use_cpu=False):
-    if use_cpu:
-        layer.engine = 'singacpp'
-
-    net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())
-    W0_specs = {'init': 'gaussian', 'mean': 0, 'std': 0.0001}
-    W1_specs = {'init': 'gaussian', 'mean': 0, 'std': 0.01}
-    W2_specs = {'init': 'gaussian', 'mean': 0, 'std': 0.01, 'decay_mult': 250}
-
-    b_specs = {'init': 'constant', 'value': 0, 'lr_mult': 2, 'decay_mult': 0}
-    net.add(layer.Conv2D('conv1', 32, 5, 1, W_specs=W0_specs.copy(), b_specs=b_specs.copy(), pad=2, input_sample_shape=(3,32,32,)))
-    net.add(layer.MaxPooling2D('pool1', 3, 2, pad=1))
-    net.add(layer.Activation('relu1'))
-    net.add(layer.LRN(name='lrn1', size=3, alpha=5e-5))
-    net.add(layer.Conv2D('conv2', 32, 5, 1, W_specs=W1_specs.copy(), b_specs=b_specs.copy(), pad=2))
-    net.add(layer.Activation('relu2'))
-    net.add(layer.AvgPooling2D('pool2', 3, 2,  pad=1))
-    net.add(layer.LRN('lrn2', size=3, alpha=5e-5))
-    net.add(layer.Conv2D('conv3', 64, 5, 1, W_specs=W1_specs.copy(), b_specs=b_specs.copy(), pad=2))
-    net.add(layer.Activation('relu3'))
-    net.add(layer.AvgPooling2D('pool3', 3, 2, pad=1))
-    net.add(layer.Flatten('flat'))
-    net.add(layer.Dense( 'dense', 10, W_specs=W2_specs.copy(), b_specs=b_specs.copy()))
-    for (p, specs) in zip(net.param_values(), net.param_specs()):
-        filler = specs.filler
-        if filler.type == 'gaussian':
-            p.gaussian(filler.mean, filler.std)
-        else:
-            p.set_value(0)
-        print specs.name, filler.type, p.l1()
-
-    return net
diff --git a/examples/cifar10/caffe/caffe_net.py b/examples/cifar10/caffe/caffe_net.py
deleted file mode 100644
index 2db131a..0000000
--- a/examples/cifar10/caffe/caffe_net.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-import urllib
-from singa import converter
-
-
-def create_net(use_cpu):
-    urllib.urlretrieve("https://raw.githubusercontent.com/BVLC/caffe/master/examples/cifar10/cifar10_full_train_test.prototxt", "train_test.prototxt")
-    urllib.urlretrieve("https://raw.githubusercontent.com/BVLC/caffe/master/examples/cifar10/cifar10_full_solver.prototxt", "solver.prototxt")
-    input_sample_shape = [3, 32, 32, ]
-
-    cvt = converter.CaffeConverter("train_test.prototxt", "solver.prototxt",
-                                   input_sample_shape)
-    net = cvt.create_net()
-    for (p, specs) in zip(net.param_values(), net.param_specs()):
-        filler = specs.filler
-        if filler.type == 'gaussian':
-            p.gaussian(filler.mean, filler.std)
-        else:
-            p.set_value(0)
-        print specs.name, filler.type, p.l1()
-
-    return net
diff --git a/examples/cifar10/predict.py b/examples/cifar10/predict.py
deleted file mode 100644
index dca44fe..0000000
--- a/examples/cifar10/predict.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-'''Predicting the labels for new images using the pre-trained alexnet model'''
-import cPickle as pickle
-import numpy as np
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-
-from singa import device
-from singa import tensor
-import alexnet
-
-
-def predict(net, images, dev, topk=5):
-    '''Predict the label of each image.
-
-    Args:
-        net, a pretrained neural net
-        images, a batch of images [batch_size, 3, 32, 32], which have been
-            pre-processed
-        dev, the training device
-        topk, return the topk labels for each image.
-    '''
-    x = tensor.from_numpy(images.astype(np.float32))
-    x.to_device(dev)
-    y = net.predict(x)
-    y.to_host()
-    prob = tensor.to_numpy(y)
-    # prob = np.average(prob, 0)
-    labels = np.flipud(np.argsort(prob))  # sort prob in descending order
-    return labels[:, 0:topk]
-
-
-def load_dataset(filepath):
-    print 'Loading data file %s' % filepath
-    with open(filepath, 'rb') as fd:
-        cifar10 = pickle.load(fd)
-    image = cifar10['data'].astype(dtype=np.uint8)
-    image = image.reshape((-1, 3, 32, 32))
-    label = np.asarray(cifar10['labels'], dtype=np.uint8)
-    label = label.reshape(label.size, 1)
-    return image, label
-
-
-def load_train_data(dir_path, num_batches=5):
-    labels = []
-    batchsize = 10000
-    images = np.empty((num_batches * batchsize, 3, 32, 32), dtype=np.uint8)
-    for did in range(1, num_batches + 1):
-        fname_train_data = dir_path + "/data_batch_{}".format(did)
-        image, label = load_dataset(fname_train_data)
-        images[(did - 1) * batchsize:did * batchsize] = image
-        labels.extend(label)
-    images = np.array(images, dtype=np.float32)
-    labels = np.array(labels, dtype=np.int32)
-    return images, labels
-
-
-def load_test_data(dir_path):
-    images, labels = load_dataset(dir_path + "/test_batch")
-    return np.array(images,  dtype=np.float32), np.array(labels, dtype=np.int32)
-
-
-def compute_image_mean(train_dir):
-    images, _ = load_train_data(train_dir)
-    return np.average(images, 0)
-
-if __name__ == '__main__':
-    model = alexnet.create_net(True)
-    model.load('model', 20)  # the checkpoint from train.py
-    dev = device.get_default_device()
-    model.to_device(dev)
-
-    mean = compute_image_mean('cifar-10-batches-py')
-    test_images, _ = load_test_data('cifar-10-batches-py')
-    # predict for two images
-    print predict(model, test_images[0:2] - mean, dev)
diff --git a/examples/cifar10/resnet.py b/examples/cifar10/resnet.py
deleted file mode 100644
index 6b573e9..0000000
--- a/examples/cifar10/resnet.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-"""The resnet model is adapted from http://torch.ch/blog/2016/02/04/resnets.html
-The best validation accuracy we achieved is about 83% without data augmentation.
-The performance could be improved by tuning some hyper-parameters, including
-learning rate, weight decay, max_epoch, parameter initialization, etc.
-"""
-
-import cPickle as pickle
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-# use the python modules by installing py singa in build/python
-# pip install -e .
-
-from singa import layer
-from singa import initializer
-from singa import metric
-from singa import loss
-from singa import net as ffnet
-
-
-def Block(net, name, nb_filters, stride):
-    split = net.add(layer.Split(name + "-split", 2))
-    if stride > 1:
-        net.add(layer.Conv2D(name + "-br2-conv", nb_filters, 1, stride, pad=0), split)
-        br2bn = net.add(layer.BatchNormalization(name + "-br2-bn"))
-    net.add(layer.Conv2D(name + "-br1-conv1", nb_filters, 3, stride, pad=1), split)
-    net.add(layer.BatchNormalization(name + "-br1-bn1"))
-    net.add(layer.Activation(name + "-br1-relu"))
-    net.add(layer.Conv2D(name + "-br1-conv2", nb_filters, 3, 1, pad=1))
-    br1bn2 = net.add(layer.BatchNormalization(name + "-br1-bn2"))
-    if stride > 1:
-        net.add(layer.Merge(name + "-merge"), [br1bn2, br2bn])
-    else:
-        net.add(layer.Merge(name + "-merge"), [br1bn2, split])
-
-
-def create_net(use_cpu=False):
-    if use_cpu:
-        layer.engine = 'singacpp'
-
-    net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())
-    net.add(layer.Conv2D("conv1", 16, 3, 1, pad=1, input_sample_shape=(3, 32, 32)))
-    net.add(layer.BatchNormalization("bn1"))
-    net.add(layer.Activation("relu1"))
-
-    Block(net, "2a", 16, 1)
-    Block(net, "2b", 16, 1)
-    Block(net, "2c", 16, 1)
-
-    Block(net, "3a", 32, 2)
-    Block(net, "3b", 32, 1)
-    Block(net, "3c", 32, 1)
-
-    Block(net, "4a", 64, 2)
-    Block(net, "4b", 64, 1)
-    Block(net, "4c", 64, 1)
-
-    net.add(layer.AvgPooling2D("pool4", 8, 8, border_mode='valid'))
-    net.add(layer.Flatten('flat'))
-    net.add(layer.Dense('ip5', 10))
-    print 'Start intialization............'
-    for (p, name) in zip(net.param_values(), net.param_names()):
-        # print name, p.shape
-        if 'mean' in name or 'beta' in name:
-            p.set_value(0.0)
-        elif 'var' in name:
-            p.set_value(1.0)
-        elif 'gamma' in name:
-            initializer.uniform(p, 0, 1)
-        elif len(p.shape) > 1:
-            if 'conv' in name:
-                # initializer.gaussian(p, 0, math.sqrt(2.0/p.shape[1]))
-                initializer.gaussian(p, 0, 9.0 * p.shape[0])
-            else:
-                initializer.uniform(p, p.shape[0], p.shape[1])
-        else:
-            p.set_value(0)
-        # print name, p.l1()
-
-    return net
diff --git a/examples/cifar10/train.py b/examples/cifar10/train.py
deleted file mode 100644
index d54d694..0000000
--- a/examples/cifar10/train.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-""" CIFAR10 dataset is at https://www.cs.toronto.edu/~kriz/cifar.html.
-It includes 5 binary dataset, each contains 10000 images. 1 row (1 image)
-includes 1 label & 3072 pixels.  3072 pixels are 3 channels of a 32x32 image
-"""
-
-import cPickle
-import numpy as np
-import os
-import argparse
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-from singa import utils
-from singa import optimizer
-from singa import device
-from singa import tensor
-from singa.proto import core_pb2
-from caffe import caffe_net
-
-import alexnet
-import vgg
-import resnet
-
-
-def load_dataset(filepath):
-    print 'Loading data file %s' % filepath
-    with open(filepath, 'rb') as fd:
-        cifar10 = cPickle.load(fd)
-    image = cifar10['data'].astype(dtype=np.uint8)
-    image = image.reshape((-1, 3, 32, 32))
-    label = np.asarray(cifar10['labels'], dtype=np.uint8)
-    label = label.reshape(label.size, 1)
-    return image, label
-
-
-def load_train_data(dir_path, num_batches=5):
-    labels = []
-    batchsize = 10000
-    images = np.empty((num_batches * batchsize, 3, 32, 32), dtype=np.uint8)
-    for did in range(1, num_batches + 1):
-        fname_train_data = dir_path + "/data_batch_{}".format(did)
-        image, label = load_dataset(fname_train_data)
-        images[(did - 1) * batchsize:did * batchsize] = image
-        labels.extend(label)
-    images = np.array(images, dtype=np.float32)
-    labels = np.array(labels, dtype=np.int32)
-    return images, labels
-
-
-def load_test_data(dir_path):
-    images, labels = load_dataset(dir_path + "/test_batch")
-    return np.array(images,  dtype=np.float32), np.array(labels, dtype=np.int32)
-
-
-def normalize_for_vgg(train_x, test_x):
-    mean = train_x.mean()
-    std = train_x.std()
-    train_x -= mean
-    test_x -= mean
-    train_x /= std
-    test_x /= std
-    return train_x, test_x
-
-
-def normalize_for_alexnet(train_x, test_x):
-    mean = np.average(train_x, axis=0)
-    train_x -= mean
-    test_x -= mean
-    return train_x, test_x
-
-
-def vgg_lr(epoch):
-    return 0.1 / float(1 << ((epoch / 25)))
-
-
-def alexnet_lr(epoch):
-    if epoch < 120:
-        return 0.001
-    elif epoch < 130:
-        return 0.0001
-    else:
-        return 0.00001
-
-
-def resnet_lr(epoch):
-    if epoch < 81:
-        return 0.1
-    elif epoch < 122:
-        return 0.01
-    else:
-        return 0.001
-
-
-def caffe_lr(epoch):
-    if epoch < 8:
-        return 0.001
-    else:
-        return 0.0001
-
-
-def train(data, net, max_epoch, get_lr, weight_decay, batch_size=100,
-          use_cpu=False):
-    print 'Start intialization............'
-    if use_cpu:
-        print 'Using CPU'
-        dev = device.get_default_device()
-    else:
-        print 'Using GPU'
-        dev = device.create_cuda_gpu()
-
-    net.to_device(dev)
-    opt = optimizer.SGD(momentum=0.9, weight_decay=weight_decay)
-    for (p, specs) in zip(net.param_names(), net.param_specs()):
-        opt.register(p, specs)
-
-    tx = tensor.Tensor((batch_size, 3, 32, 32), dev)
-    ty = tensor.Tensor((batch_size,), dev, core_pb2.kInt)
-    train_x, train_y, test_x, test_y = data
-    num_train_batch = train_x.shape[0] / batch_size
-    num_test_batch = test_x.shape[0] / batch_size
-    idx = np.arange(train_x.shape[0], dtype=np.int32)
-    for epoch in range(max_epoch):
-        np.random.shuffle(idx)
-        loss, acc = 0.0, 0.0
-        print 'Epoch %d' % epoch
-        for b in range(num_train_batch):
-            x = train_x[idx[b * batch_size: (b + 1) * batch_size]]
-            y = train_y[idx[b * batch_size: (b + 1) * batch_size]]
-            tx.copy_from_numpy(x)
-            ty.copy_from_numpy(y)
-            grads, (l, a) = net.train(tx, ty)
-            loss += l
-            acc += a
-            for (s, p, g) in zip(net.param_names(), net.param_values(), grads):
-                opt.apply_with_lr(epoch, get_lr(epoch), g, p, str(s), b)
-            # update progress bar
-            utils.update_progress(b * 1.0 / num_train_batch,
-                                  'training loss = %f, accuracy = %f' % (l, a))
-        info = '\ntraining loss = %f, training accuracy = %f, lr = %f' \
-            % (loss / num_train_batch, acc / num_train_batch, get_lr(epoch))
-        print info
-
-        loss, acc = 0.0, 0.0
-        for b in range(num_test_batch):
-            x = test_x[b * batch_size: (b + 1) * batch_size]
-            y = test_y[b * batch_size: (b + 1) * batch_size]
-            tx.copy_from_numpy(x)
-            ty.copy_from_numpy(y)
-            l, a = net.evaluate(tx, ty)
-            loss += l
-            acc += a
-
-        print 'test loss = %f, test accuracy = %f' \
-            % (loss / num_test_batch, acc / num_test_batch)
-    net.save('model', 20)  # save model params into checkpoint file
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser(description='Train dcnn for cifar10')
-    parser.add_argument('model', choices=['vgg', 'alexnet', 'resnet', 'caffe'],
-            default='alexnet')
-    parser.add_argument('data', default='cifar-10-batches-py')
-    parser.add_argument('--use_cpu', action='store_true')
-    args = parser.parse_args()
-    assert os.path.exists(args.data), \
-        'Pls download the cifar10 dataset via "download_data.py py"'
-    print 'Loading data ..................'
-    train_x, train_y = load_train_data(args.data)
-    test_x, test_y = load_test_data(args.data)
-    if args.model == 'caffe':
-        train_x, test_x = normalize_for_alexnet(train_x, test_x)
-        net = caffe_net.create_net(args.use_cpu)
-        # for cifar10_full_train_test.prototxt
-        train((train_x, train_y, test_x, test_y), net, 160, alexnet_lr, 0.004,
-              use_cpu=args.use_cpu)
-        # for cifar10_quick_train_test.prototxt
-        #train((train_x, train_y, test_x, test_y), net, 18, caffe_lr, 0.004,
-        #      use_cpu=args.use_cpu)
-    elif args.model == 'alexnet':
-        train_x, test_x = normalize_for_alexnet(train_x, test_x)
-        net = alexnet.create_net(args.use_cpu)
-        train((train_x, train_y, test_x, test_y), net, 2, alexnet_lr, 0.004,
-              use_cpu=args.use_cpu)
-    elif args.model == 'vgg':
-        train_x, test_x = normalize_for_vgg(train_x, test_x)
-        net = vgg.create_net(args.use_cpu)
-        train((train_x, train_y, test_x, test_y), net, 250, vgg_lr, 0.0005,
-              use_cpu=args.use_cpu)
-    else:
-        train_x, test_x = normalize_for_alexnet(train_x, test_x)
-        net = resnet.create_net(args.use_cpu)
-        train((train_x, train_y, test_x, test_y), net, 200, resnet_lr, 1e-4,
-              use_cpu=args.use_cpu)
diff --git a/examples/cifar10/vgg.py b/examples/cifar10/vgg.py
deleted file mode 100644
index 89c6fe8..0000000
--- a/examples/cifar10/vgg.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-""" The VGG model is adapted from http://torch.ch/blog/2015/07/30/cifar.html.
-The best validation accuracy we achieved is about 89% without data augmentation.
-The performance could be improved by tuning some hyper-parameters, including
-learning rate, weight decay, max_epoch, parameter initialization, etc.
-"""
-
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../../build/python'))
-
-from singa import layer
-from singa import initializer
-from singa import metric
-from singa import loss
-from singa import net as ffnet
-
-
-def ConvBnReLU(net, name, nb_filers, sample_shape=None):
-    net.add(layer.Conv2D(name + '_1', nb_filers, 3, 1, pad=1,
-                         input_sample_shape=sample_shape))
-    net.add(layer.BatchNormalization(name + '_2'))
-    net.add(layer.Activation(name + '_3'))
-
-
-def create_net(use_cpu=False):
-    if use_cpu:
-        layer.engine = 'singacpp'
-    net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())
-    ConvBnReLU(net, 'conv1_1', 64, (3, 32, 32))
-    net.add(layer.Dropout('drop1', 0.3))
-    ConvBnReLU(net, 'conv1_2', 64)
-    net.add(layer.MaxPooling2D('pool1', 2, 2, border_mode='valid'))
-    ConvBnReLU(net, 'conv2_1', 128)
-    net.add(layer.Dropout('drop2_1', 0.4))
-    ConvBnReLU(net, 'conv2_2', 128)
-    net.add(layer.MaxPooling2D('pool2', 2, 2, border_mode='valid'))
-    ConvBnReLU(net, 'conv3_1', 256)
-    net.add(layer.Dropout('drop3_1', 0.4))
-    ConvBnReLU(net, 'conv3_2', 256)
-    net.add(layer.Dropout('drop3_2', 0.4))
-    ConvBnReLU(net, 'conv3_3', 256)
-    net.add(layer.MaxPooling2D('pool3', 2, 2, border_mode='valid'))
-    ConvBnReLU(net, 'conv4_1', 512)
-    net.add(layer.Dropout('drop4_1', 0.4))
-    ConvBnReLU(net, 'conv4_2', 512)
-    net.add(layer.Dropout('drop4_2', 0.4))
-    ConvBnReLU(net, 'conv4_3', 512)
-    net.add(layer.MaxPooling2D('pool4', 2, 2, border_mode='valid'))
-    ConvBnReLU(net, 'conv5_1', 512)
-    net.add(layer.Dropout('drop5_1', 0.4))
-    ConvBnReLU(net, 'conv5_2', 512)
-    net.add(layer.Dropout('drop5_2', 0.4))
-    ConvBnReLU(net, 'conv5_3', 512)
-    net.add(layer.MaxPooling2D('pool5', 2, 2, border_mode='valid'))
-    net.add(layer.Flatten('flat'))
-    net.add(layer.Dropout('drop_flat', 0.5))
-    net.add(layer.Dense('ip1', 512))
-    net.add(layer.BatchNormalization('batchnorm_ip1'))
-    net.add(layer.Activation('relu_ip1'))
-    net.add(layer.Dropout('drop_ip2', 0.5))
-    net.add(layer.Dense('ip2', 10))
-    print 'Start intialization............'
-    for (p, name) in zip(net.param_values(), net.param_names()):
-        print name, p.shape
-        if 'mean' in name or 'beta' in name:
-            p.set_value(0.0)
-        elif 'var' in name:
-            p.set_value(1.0)
-        elif 'gamma' in name:
-            initializer.uniform(p, 0, 1)
-        elif len(p.shape) > 1:
-            if 'conv' in name:
-                initializer.gaussian(p, 0, 3 * 3 * p.shape[0])
-            else:
-                p.gaussian(0, 0.02)
-        else:
-            p.set_value(0)
-        print name, p.l1()
-
-    return net
diff --git a/examples/cnn/README.md b/examples/cnn/README.md
new file mode 100644
index 0000000..b081aff
--- /dev/null
+++ b/examples/cnn/README.md
@@ -0,0 +1,46 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+# Image Classification using Convolutional Neural Networks
+
+Examples inside this folder show how to train CNN models using 
+SINGA for image classification.
+
+* `data` includes the scripts for preprocessing image datasets.
+  Currently, MNIST, CIFAR10 and CIFAR100 are included.
+
+* `model` includes the CNN model construction codes by creating
+  a subclass of `Module` to wrap the neural network operations 
+  of each model. Then computational graph is enabled to optimized 
+  the memory and efficiency.
+
+* `autograd` includes the codes to train CNN models by calling the
+  [neural network operations](../../python/singa/autograd.py) imperatively. 
+  The computational graph is not created.
+
+* `train_cnn.py` is the training script, which controls the training flow by
+  doing BackPropagation and SGD update.
+
+* `train_multiprocess.py` is the script for distributed training on a single
+  node with multiple GPUs; it uses Python's multiprocessing module and NCCL.
+
+* `train_mpi.py` is the script for distributed training (among multiple nodes) 
+  using MPI and NCCL for communication.
+
+* `benchmark.py` tests the training throughput using `ResNet50` as the workload.
\ No newline at end of file
diff --git a/examples/cnn/autograd/cifar10_multiprocess.py b/examples/cnn/autograd/cifar10_multiprocess.py
new file mode 100644
index 0000000..b5e51ad
--- /dev/null
+++ b/examples/cnn/autograd/cifar10_multiprocess.py
@@ -0,0 +1,43 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from resnet_cifar10 import *
+import multiprocessing
+import sys
+
+if __name__ == '__main__':
+
+    # Generate a NCCL ID to be used for collective communication
+    nccl_id = singa.NcclIdHolder()
+
+    # number of GPUs to be used
+    world_size = int(sys.argv[1])
+
+    # Testing the experimental partial-parameter update asynchronous training
+    partial_update = True
+
+    process = []
+    for local_rank in range(0, world_size):
+        process.append(
+            multiprocessing.Process(target=train_cifar10,
+                                    args=(True, local_rank, world_size, nccl_id,
+                                          partial_update)))
+
+    for p in process:
+        p.start()
diff --git a/examples/cnn/autograd/mnist_cnn.py b/examples/cnn/autograd/mnist_cnn.py
new file mode 100644
index 0000000..ff2e1dc
--- /dev/null
+++ b/examples/cnn/autograd/mnist_cnn.py
@@ -0,0 +1,303 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import singa_wrap as singa
+from singa import autograd
+from singa import layer
+from singa import tensor
+from singa import device
+from singa import opt
+import numpy as np
+import os
+import sys
+import gzip
+import codecs
+import time
+
+
+class CNN:
+
+    def __init__(self):
+        self.conv1 = layer.Conv2d(1, 20, 5, padding=0)
+        self.conv2 = layer.Conv2d(20, 50, 5, padding=0)
+        self.linear1 = layer.Linear(4 * 4 * 50, 500)
+        self.linear2 = layer.Linear(500, 10)
+        self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
+        self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
+        self.relu1 = layer.ReLU()
+        self.relu2 = layer.ReLU()
+        self.relu3 = layer.ReLU()
+        self.flatten = layer.Flatten()
+
+    def forward(self, x):
+        y = self.conv1(x)
+        y = self.relu1(y)
+        y = self.pooling1(y)
+        y = self.conv2(y)
+        y = self.relu2(y)
+        y = self.pooling2(y)
+        y = self.flatten(y)
+        y = self.linear1(y)
+        y = self.relu3(y)
+        y = self.linear2(y)
+        return y
+
+
+def check_dataset_exist(dirpath):
+    if not os.path.exists(dirpath):
+        print(
+            'The MNIST dataset does not exist. Please download the mnist dataset using download_mnist.py (e.g. python3 download_mnist.py)'
+        )
+        sys.exit(0)
+    return dirpath
+
+
+def load_dataset():
+    train_x_path = '/tmp/train-images-idx3-ubyte.gz'
+    train_y_path = '/tmp/train-labels-idx1-ubyte.gz'
+    valid_x_path = '/tmp/t10k-images-idx3-ubyte.gz'
+    valid_y_path = '/tmp/t10k-labels-idx1-ubyte.gz'
+
+    train_x = read_image_file(check_dataset_exist(train_x_path)).astype(
+        np.float32)
+    train_y = read_label_file(check_dataset_exist(train_y_path)).astype(
+        np.float32)
+    valid_x = read_image_file(check_dataset_exist(valid_x_path)).astype(
+        np.float32)
+    valid_y = read_label_file(check_dataset_exist(valid_y_path)).astype(
+        np.float32)
+    return train_x, train_y, valid_x, valid_y
+
+
+def read_label_file(path):
+    with gzip.open(path, 'rb') as f:
+        data = f.read()
+        assert get_int(data[:4]) == 2049
+        length = get_int(data[4:8])
+        parsed = np.frombuffer(data, dtype=np.uint8, offset=8).reshape((length))
+        return parsed
+
+
+def get_int(b):
+    return int(codecs.encode(b, 'hex'), 16)
+
+
+def read_image_file(path):
+    with gzip.open(path, 'rb') as f:
+        data = f.read()
+        assert get_int(data[:4]) == 2051
+        length = get_int(data[4:8])
+        num_rows = get_int(data[8:12])
+        num_cols = get_int(data[12:16])
+        parsed = np.frombuffer(data, dtype=np.uint8, offset=16).reshape(
+            (length, 1, num_rows, num_cols))
+        return parsed
+
+
+def to_categorical(y, num_classes):
+    y = np.array(y, dtype="int")
+    n = y.shape[0]
+    categorical = np.zeros((n, num_classes))
+    categorical[np.arange(n), y] = 1
+    categorical = categorical.astype(np.float32)
+    return categorical
+
+
+def accuracy(pred, target):
+    y = np.argmax(pred, axis=1)
+    t = np.argmax(target, axis=1)
+    a = y == t
+    return np.array(a, "int").sum()
+
+
+# Function to all reduce NUMPY Accuracy and Loss from Multiple Devices
+def reduce_variable(variable, dist_opt, reducer):
+    reducer.copy_from_numpy(variable)
+    dist_opt.all_reduce(reducer.data)
+    dist_opt.wait()
+    output = tensor.to_numpy(reducer)
+    return output
+
+
+# Function to sychronize SINGA TENSOR initial model parameters
+def synchronize(tensor, dist_opt):
+    dist_opt.all_reduce(tensor.data)
+    dist_opt.wait()
+    tensor /= dist_opt.world_size
+
+
+# Data augmentation
+def augmentation(x, batch_size):
+    xpad = np.pad(x, [[0, 0], [0, 0], [4, 4], [4, 4]], 'symmetric')
+    for data_num in range(0, batch_size):
+        offset = np.random.randint(8, size=2)
+        x[data_num, :, :, :] = xpad[data_num, :, offset[0]:offset[0] + 28,
+                                    offset[1]:offset[1] + 28]
+        if_flip = np.random.randint(2)
+        if (if_flip):
+            x[data_num, :, :, :] = x[data_num, :, :, ::-1]
+    return x
+
+
+# Data partition
+def data_partition(dataset_x, dataset_y, global_rank, world_size):
+    data_per_rank = dataset_x.shape[0] // world_size
+    idx_start = global_rank * data_per_rank
+    idx_end = (global_rank + 1) * data_per_rank
+    return dataset_x[idx_start:idx_end], dataset_y[idx_start:idx_end]
+
+
+def train_mnist_cnn(DIST=False,
+                    local_rank=None,
+                    world_size=None,
+                    nccl_id=None,
+                    spars=0,
+                    topK=False,
+                    corr=True):
+
+    # Define the hypermeters good for the mnist_cnn
+    max_epoch = 10
+    batch_size = 64
+    sgd = opt.SGD(lr=0.005, momentum=0.9, weight_decay=1e-5)
+
+    # Prepare training and valadiation data
+    train_x, train_y, test_x, test_y = load_dataset()
+    IMG_SIZE = 28
+    num_classes = 10
+    train_y = to_categorical(train_y, num_classes)
+    test_y = to_categorical(test_y, num_classes)
+
+    # Normalization
+    train_x = train_x / 255
+    test_x = test_x / 255
+
+    if DIST:
+        # For Distributed GPU Training
+        sgd = opt.DistOpt(sgd,
+                          nccl_id=nccl_id,
+                          local_rank=local_rank,
+                          world_size=world_size)
+        dev = device.create_cuda_gpu_on(sgd.local_rank)
+        # Dataset partition for distributed training
+        train_x, train_y = data_partition(train_x, train_y, sgd.global_rank,
+                                          sgd.world_size)
+        test_x, test_y = data_partition(test_x, test_y, sgd.global_rank,
+                                        sgd.world_size)
+        world_size = sgd.world_size
+    else:
+        # For Single GPU
+        dev = device.create_cuda_gpu()
+        world_size = 1
+
+    # create model
+    model = CNN()
+
+    tx = tensor.Tensor((batch_size, 1, IMG_SIZE, IMG_SIZE), dev, tensor.float32)
+    ty = tensor.Tensor((batch_size, num_classes), dev, tensor.int32)
+    num_train_batch = train_x.shape[0] // batch_size
+    num_test_batch = test_x.shape[0] // batch_size
+    idx = np.arange(train_x.shape[0], dtype=np.int32)
+
+    if DIST:
+        #Sychronize the initial parameters
+        autograd.training = True
+        x = np.random.randn(batch_size, 1, IMG_SIZE,
+                            IMG_SIZE).astype(np.float32)
+        y = np.zeros(shape=(batch_size, num_classes), dtype=np.int32)
+        tx.copy_from_numpy(x)
+        ty.copy_from_numpy(y)
+        out = model.forward(tx)
+        loss = autograd.softmax_cross_entropy(out, ty)
+        for p, g in autograd.backward(loss):
+            synchronize(p, sgd)
+
+    # Training and Evaulation Loop
+    for epoch in range(max_epoch):
+        start_time = time.time()
+        np.random.shuffle(idx)
+
+        if ((DIST == False) or (sgd.global_rank == 0)):
+            print('Starting Epoch %d:' % (epoch))
+
+        # Training Phase
+        autograd.training = True
+        train_correct = np.zeros(shape=[1], dtype=np.float32)
+        test_correct = np.zeros(shape=[1], dtype=np.float32)
+        train_loss = np.zeros(shape=[1], dtype=np.float32)
+
+        for b in range(num_train_batch):
+            x = train_x[idx[b * batch_size:(b + 1) * batch_size]]
+            x = augmentation(x, batch_size)
+            y = train_y[idx[b * batch_size:(b + 1) * batch_size]]
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+            out = model.forward(tx)
+            loss = autograd.softmax_cross_entropy(out, ty)
+            train_correct += accuracy(tensor.to_numpy(out), y)
+            train_loss += tensor.to_numpy(loss)[0]
+            if DIST:
+                if (spars == 0):
+                    sgd.backward_and_update(loss, threshold=50000)
+                else:
+                    sgd.backward_and_sparse_update(loss,
+                                                   spars=spars,
+                                                   topK=topK,
+                                                   corr=corr)
+            else:
+                sgd(loss)
+
+        if DIST:
+            # Reduce the Evaluation Accuracy and Loss from Multiple Devices
+            reducer = tensor.Tensor((1,), dev, tensor.float32)
+            train_correct = reduce_variable(train_correct, sgd, reducer)
+            train_loss = reduce_variable(train_loss, sgd, reducer)
+
+        # Output the Training Loss and Accuracy
+        if ((DIST == False) or (sgd.global_rank == 0)):
+            print('Training loss = %f, training accuracy = %f' %
+                  (train_loss, train_correct /
+                   (num_train_batch * batch_size * world_size)),
+                  flush=True)
+
+        # Evaluation Phase
+        autograd.training = False
+        for b in range(num_test_batch):
+            x = test_x[b * batch_size:(b + 1) * batch_size]
+            y = test_y[b * batch_size:(b + 1) * batch_size]
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+            out_test = model.forward(tx)
+            test_correct += accuracy(tensor.to_numpy(out_test), y)
+
+        if DIST:
+            # Reduce the Evaulation Accuracy from Multiple Devices
+            test_correct = reduce_variable(test_correct, sgd, reducer)
+
+        # Output the Evaluation Accuracy
+        if ((DIST == False) or (sgd.global_rank == 0)):
+            print('Evaluation accuracy = %f, Elapsed Time = %fs' %
+                  (test_correct / (num_test_batch * batch_size * world_size),
+                   time.time() - start_time),
+                  flush=True)
+
+
+if __name__ == '__main__':
+
+    DIST = False
+    train_mnist_cnn(DIST=DIST)
diff --git a/examples/cnn/autograd/mnist_dist.py b/examples/cnn/autograd/mnist_dist.py
new file mode 100644
index 0000000..3586127
--- /dev/null
+++ b/examples/cnn/autograd/mnist_dist.py
@@ -0,0 +1,25 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from mnist_cnn import *
+
+if __name__ == '__main__':
+
+    DIST = True
+    train_mnist_cnn(DIST=DIST)
diff --git a/examples/cnn/autograd/mnist_multiprocess.py b/examples/cnn/autograd/mnist_multiprocess.py
new file mode 100644
index 0000000..f5c2763
--- /dev/null
+++ b/examples/cnn/autograd/mnist_multiprocess.py
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from mnist_cnn import *
+import multiprocessing
+import sys
+
+if __name__ == '__main__':
+
+    # Generate a NCCL ID to be used for collective communication
+    nccl_id = singa.NcclIdHolder()
+
+    # number of GPUs to be used
+    world_size = int(sys.argv[1])
+
+    process = []
+    for local_rank in range(0, world_size):
+        process.append(
+            multiprocessing.Process(target=train_mnist_cnn,
+                                    args=(True, local_rank, world_size, nccl_id)))
+
+    for p in process:
+        p.start()
diff --git a/examples/cnn/autograd/resnet_cifar10.py b/examples/cnn/autograd/resnet_cifar10.py
new file mode 100644
index 0000000..14005bc
--- /dev/null
+++ b/examples/cnn/autograd/resnet_cifar10.py
@@ -0,0 +1,291 @@
+#

+# Licensed to the Apache Software Foundation (ASF) under one

+# or more contributor license agreements.  See the NOTICE file

+# distributed with this work for additional information

+# regarding copyright ownership.  The ASF licenses this file

+# to you under the Apache License, Version 2.0 (the

+# "License"); you may not use this file except in compliance

+# with the License.  You may obtain a copy of the License at

+#

+#   http://www.apache.org/licenses/LICENSE-2.0

+#

+# Unless required by applicable law or agreed to in writing,

+# software distributed under the License is distributed on an

+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

+# KIND, either express or implied.  See the License for the

+# specific language governing permissions and limitations

+# under the License.

+#

+

+try:

+    import pickle

+except ImportError:

+    import cPickle as pickle

+

+from singa import singa_wrap as singa

+from singa import autograd

+from singa import tensor

+from singa import device

+from singa import opt

+from PIL import Image

+import numpy as np

+import os

+import sys

+import time

+

+

+def load_dataset(filepath):

+    with open(filepath, 'rb') as fd:

+        try:

+            cifar10 = pickle.load(fd, encoding='latin1')

+        except TypeError:

+            cifar10 = pickle.load(fd)

+    image = cifar10['data'].astype(dtype=np.uint8)

+    image = image.reshape((-1, 3, 32, 32))

+    label = np.asarray(cifar10['labels'], dtype=np.uint8)

+    label = label.reshape(label.size, 1)

+    return image, label

+

+

+def load_train_data(dir_path='cifar-10-batches-py', num_batches=5):

+    labels = []

+    batchsize = 10000

+    images = np.empty((num_batches * batchsize, 3, 32, 32), dtype=np.uint8)

+    for did in range(1, num_batches + 1):

+        fname_train_data = dir_path + "/data_batch_{}".format(did)

+        image, label = load_dataset(check_dataset_exist(fname_train_data))

+        images[(did - 1) * batchsize:did * batchsize] = image

+        labels.extend(label)

+    images = np.array(images, dtype=np.float32)

+    labels = np.array(labels, dtype=np.int32)

+    return images, labels

+

+

+def load_test_data(dir_path='cifar-10-batches-py'):

+    images, labels = load_dataset(check_dataset_exist(dir_path + "/test_batch"))

+    return np.array(images, dtype=np.float32), np.array(labels, dtype=np.int32)

+

+

+def check_dataset_exist(dirpath):

+    if not os.path.exists(dirpath):

+        print(

+            'Please download the cifar10 dataset using download_data.py (e.g. python ~/singa/examples/cifar10/download_data.py py)'

+        )

+        sys.exit(0)

+    return dirpath

+

+

+def normalize_for_resnet(train_x, test_x):

+    mean = [0.4914, 0.4822, 0.4465]

+    std = [0.2023, 0.1994, 0.2010]

+    train_x /= 255

+    test_x /= 255

+    for ch in range(0, 2):

+        train_x[:, ch, :, :] -= mean[ch]

+        train_x[:, ch, :, :] /= std[ch]

+        test_x[:, ch, :, :] -= mean[ch]

+        test_x[:, ch, :, :] /= std[ch]

+    return train_x, test_x

+

+

+def resize_dataset(x, IMG_SIZE):

+    num_data = x.shape[0]

+    dim = x.shape[1]

+    X = np.zeros(shape=(num_data, dim, IMG_SIZE, IMG_SIZE), dtype=np.float32)

+    for n in range(0, num_data):

+        for d in range(0, dim):

+            X[n, d, :, :] = np.array(Image.fromarray(x[n, d, :, :]).resize(

+                (IMG_SIZE, IMG_SIZE), Image.BILINEAR),

+                                     dtype=np.float32)

+    return X

+

+

+def augmentation(x, batch_size):

+    xpad = np.pad(x, [[0, 0], [0, 0], [4, 4], [4, 4]], 'symmetric')

+    for data_num in range(0, batch_size):

+        offset = np.random.randint(8, size=2)

+        x[data_num, :, :, :] = xpad[data_num, :, offset[0]:offset[0] + 32,

+                                    offset[1]:offset[1] + 32]

+        if_flip = np.random.randint(2)

+        if (if_flip):

+            x[data_num, :, :, :] = x[data_num, :, :, ::-1]

+    return x

+

+

+def accuracy(pred, target):

+    y = np.argmax(pred, axis=1)

+    t = np.argmax(target, axis=1)

+    a = y == t

+    return np.array(a, "int").sum()

+

+

+def to_categorical(y, num_classes):

+    y = np.array(y, dtype="int")

+    n = y.shape[0]

+    categorical = np.zeros((n, num_classes))

+    for i in range(0, n):

+        categorical[i, y[i]] = 1

+        categorical = categorical.astype(np.float32)

+    return categorical

+

+

+# Function to all reduce NUMPY Accuracy and Loss from Multiple Devices

+def reduce_variable(variable, dist_opt, reducer):

+    reducer.copy_from_numpy(variable)

+    dist_opt.all_reduce(reducer.data)

+    dist_opt.wait()

+    output = tensor.to_numpy(reducer)

+    return output

+

+

+# Function to sychronize SINGA TENSOR initial model parameters

+def synchronize(tensor, dist_opt):

+    dist_opt.all_reduce(tensor.data)

+    dist_opt.wait()

+    tensor /= dist_opt.world_size

+

+

+# Data partition

+def data_partition(dataset_x, dataset_y, global_rank, world_size):

+    data_per_rank = dataset_x.shape[0] // world_size

+    idx_start = global_rank * data_per_rank

+    idx_end = (global_rank + 1) * data_per_rank

+    return dataset_x[idx_start:idx_end], dataset_y[idx_start:idx_end]

+

+

+def train_cifar10(DIST=False,

+                  local_rank=None,

+                  world_size=None,

+                  nccl_id=None,

+                  partial_update=False):

+

+    # Define the hypermeters good for the train_cifar10

+    sgd = opt.SGD(lr=0.005, momentum=0.9, weight_decay=1e-5)

+    max_epoch = 5

+    batch_size = 32

+

+    train_x, train_y = load_train_data()

+    test_x, test_y = load_test_data()

+    train_x, test_x = normalize_for_resnet(train_x, test_x)

+    IMG_SIZE = 224

+    num_classes = 10

+

+    if DIST:

+        # For Distributed GPU Training

+        sgd = opt.DistOpt(sgd,

+                          nccl_id=nccl_id,

+                          local_rank=local_rank,

+                          world_size=world_size)

+        dev = device.create_cuda_gpu_on(sgd.local_rank)

+        # Dataset partition for distributed training

+        train_x, train_y = data_partition(train_x, train_y, sgd.global_rank,

+                                          sgd.world_size)

+        test_x, test_y = data_partition(test_x, test_y, sgd.global_rank,

+                                        sgd.world_size)

+        world_size = sgd.world_size

+    else:

+        # For Single GPU

+        dev = device.create_cuda_gpu()

+        world_size = 1

+

+    from resnet import resnet50

+    model = resnet50(num_classes=num_classes)

+

+    tx = tensor.Tensor((batch_size, 3, IMG_SIZE, IMG_SIZE), dev, tensor.float32)

+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)

+    num_train_batch = train_x.shape[0] // batch_size

+    num_test_batch = test_x.shape[0] // batch_size

+    idx = np.arange(train_x.shape[0], dtype=np.int32)

+

+    if DIST:

+        #Sychronize the initial parameters

+        autograd.training = True

+        x = np.random.randn(batch_size, 3, IMG_SIZE,

+                            IMG_SIZE).astype(np.float32)

+        y = np.zeros(shape=(batch_size,), dtype=np.int32)

+        tx.copy_from_numpy(x)

+        ty.copy_from_numpy(y)

+        out = model(tx)

+        loss = autograd.softmax_cross_entropy(out, ty)

+        param = []

+        for p, _ in autograd.backward(loss):

+            synchronize(p, sgd)

+            param.append(p)

+

+    for epoch in range(max_epoch):

+        start_time = time.time()

+        np.random.shuffle(idx)

+

+        if ((DIST == False) or (sgd.global_rank == 0)):

+            print('Starting Epoch %d:' % (epoch))

+

+        #Training Phase

+        autograd.training = True

+        train_correct = np.zeros(shape=[1], dtype=np.float32)

+        test_correct = np.zeros(shape=[1], dtype=np.float32)

+        train_loss = np.zeros(shape=[1], dtype=np.float32)

+

+        for b in range(num_train_batch):

+            x = train_x[idx[b * batch_size:(b + 1) * batch_size]]

+            x = augmentation(x, batch_size)

+            x = resize_dataset(x, IMG_SIZE)

+            y = train_y[idx[b * batch_size:(b + 1) * batch_size]]

+            tx.copy_from_numpy(x)

+            ty.copy_from_numpy(y)

+            out = model(tx)

+            loss = autograd.softmax_cross_entropy(out, ty)

+            train_correct += accuracy(tensor.to_numpy(out),

+                                      to_categorical(y, num_classes)).astype(

+                                          np.float32)

+            train_loss += tensor.to_numpy(loss)[0]

+            if not partial_update:

+                sgd.backward_and_update(loss)

+            else:

+                sgd.backward_and_partial_update(loss)

+

+        if DIST:

+            # Reduce the Evaluation Accuracy and Loss from Multiple Devices

+            reducer = tensor.Tensor((1,), dev, tensor.float32)

+            train_correct = reduce_variable(train_correct, sgd, reducer)

+            train_loss = reduce_variable(train_loss, sgd, reducer)

+

+        # Output the Training Loss and Accuracy

+        if ((DIST == False) or (sgd.global_rank == 0)):

+            print('Training loss = %f, training accuracy = %f' %

+                  (train_loss, train_correct /

+                   (num_train_batch * batch_size * world_size)),

+                  flush=True)

+

+        if partial_update:

+            # sychronize parameters before evaluation phase

+            for p in param:

+                synchronize(p, sgd)

+

+        #Evaulation Phase

+        autograd.training = False

+        for b in range(num_test_batch):

+            x = test_x[b * batch_size:(b + 1) * batch_size]

+            x = resize_dataset(x, IMG_SIZE)

+            y = test_y[b * batch_size:(b + 1) * batch_size]

+            tx.copy_from_numpy(x)

+            ty.copy_from_numpy(y)

+            out_test = model(tx)

+            test_correct += accuracy(tensor.to_numpy(out_test),

+                                     to_categorical(y, num_classes))

+

+        if DIST:

+            # Reduce the Evaulation Accuracy from Multiple Devices

+            test_correct = reduce_variable(test_correct, sgd, reducer)

+

+        # Output the Evaluation Accuracy

+        if ((DIST == False) or (sgd.global_rank == 0)):

+            print('Evaluation accuracy = %f, Elapsed Time = %fs' %

+                  (test_correct / (num_test_batch * batch_size * world_size),

+                   time.time() - start_time),

+                  flush=True)

+

+

+if __name__ == '__main__':

+

+    DIST = False

+    train_cifar10(DIST=DIST)

diff --git a/examples/cnn/autograd/resnet_dist.py b/examples/cnn/autograd/resnet_dist.py
new file mode 100644
index 0000000..6f9b56c
--- /dev/null
+++ b/examples/cnn/autograd/resnet_dist.py
@@ -0,0 +1,87 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# the code is modified from
+# https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
+
+from singa import autograd
+from singa import tensor
+from singa import device
+from singa import opt
+
+import numpy as np
+from tqdm import trange
+
+if __name__ == "__main__":
+    sgd = opt.SGD(lr=0.1, momentum=0.9, weight_decay=1e-5)
+    sgd = opt.DistOpt(sgd)
+
+    if (sgd.global_rank == 0):
+        print("Start intialization...........", flush=True)
+
+    dev = device.create_cuda_gpu_on(sgd.local_rank)
+
+    from resnet import resnet50
+    model = resnet50()
+
+    niters = 100
+    batch_size = 32
+    IMG_SIZE = 224
+
+    tx = tensor.Tensor((batch_size, 3, IMG_SIZE, IMG_SIZE), dev)
+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)
+    autograd.training = True
+    x = np.random.randn(batch_size, 3, IMG_SIZE, IMG_SIZE).astype(np.float32)
+    y = np.random.randint(0, 1000, batch_size, dtype=np.int32)
+    tx.copy_from_numpy(x)
+    ty.copy_from_numpy(y)
+
+    import time
+
+    dev.Sync()
+    start = time.time()
+    fd = 0
+    softmax = 0
+    update = 0
+    with trange(niters) as t:
+        for _ in t:
+            dev.Sync()
+            tick = time.time()
+            x = model(tx)
+            dev.Sync()
+            fd += time.time() - tick
+            tick = time.time()
+            loss = autograd.softmax_cross_entropy(x, ty)
+            dev.Sync()
+            softmax += time.time() - tick
+            sgd.backward_and_update(loss)
+
+    dev.Sync()
+    end = time.time()
+    throughput = float(sgd.world_size * niters * batch_size) / (end - start)
+    titer = (end - start) / float(niters)
+    tforward = float(fd) / float(niters)
+    tsoftmax = float(softmax) / float(niters)
+    tbackward = titer - tforward - tsoftmax
+
+    if (sgd.global_rank == 0):
+        print("\nThroughput = {} per second".format(throughput), flush=True)
+        print("Total={}, forward={}, softmax={}, backward={}".format(
+            titer, tforward, tsoftmax, tbackward),
+              flush=True)
diff --git a/examples/cnn/autograd/sparsification_mnist.py b/examples/cnn/autograd/sparsification_mnist.py
new file mode 100644
index 0000000..cc9b585
--- /dev/null
+++ b/examples/cnn/autograd/sparsification_mnist.py
@@ -0,0 +1,45 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from mnist_cnn import *
+import multiprocessing
+import sys
+
+if __name__ == '__main__':
+
+    # Generate a NCCL ID to be used for collective communication
+    nccl_id = singa.NcclIdHolder()
+
+    # number of GPUs to be used
+    world_size = int(sys.argv[1])
+
+    # Use sparsification with parameters
+    topK = False  # When topK = False, Sparsification based on a constant absolute threshold
+    corr = True  # If True, uses local accumulate gradient for the correction
+    sparsThreshold = 0.05  # The constant absolute threshold for sparsification
+
+    process = []
+    for local_rank in range(0, world_size):
+        process.append(
+            multiprocessing.Process(target=train_mnist_cnn,
+                                    args=(True, local_rank, world_size, nccl_id,
+                                          sparsThreshold, topK, corr)))
+
+    for p in process:
+        p.start()
diff --git a/examples/cnn/autograd/xceptionnet.py b/examples/cnn/autograd/xceptionnet.py
new file mode 100644
index 0000000..357e47d
--- /dev/null
+++ b/examples/cnn/autograd/xceptionnet.py
@@ -0,0 +1,303 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+from singa import autograd
+from singa import tensor
+from singa import device
+from singa import layer
+from singa import opt
+
+import numpy as np
+from tqdm import trange
+
+# the code is modified from
+# https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/xception.py
+
+
+class Block(layer.Layer):
+
+    def __init__(self,
+                 in_filters,
+                 out_filters,
+                 reps,
+                 strides=1,
+                 padding=0,
+                 start_with_relu=True,
+                 grow_first=True):
+        super(Block, self).__init__()
+
+        if out_filters != in_filters or strides != 1:
+            self.skip = layer.Conv2d(in_filters,
+                                     out_filters,
+                                     1,
+                                     stride=strides,
+                                     padding=padding,
+                                     bias=False)
+            self.skipbn = layer.BatchNorm2d(out_filters)
+        else:
+            self.skip = None
+
+        self.layers = []
+
+        filters = in_filters
+        if grow_first:
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(in_filters,
+                                      out_filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(out_filters))
+            filters = out_filters
+
+        for i in range(reps - 1):
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(filters,
+                                      filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(filters))
+
+        if not grow_first:
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(in_filters,
+                                      out_filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(out_filters))
+
+        if not start_with_relu:
+            self.layers = self.layers[1:]
+        else:
+            self.layers[0] = layer.ReLU()
+
+        if strides != 1:
+            self.layers.append(layer.MaxPool2d(3, strides, padding + 1))
+
+        self.register_layers(*self.layers)
+
+        self.add = layer.Add()
+
+    def forward(self, x):
+        y = self.layers[0](x)
+        for layer in self.layers[1:]:
+            if isinstance(y, tuple):
+                y = y[0]
+            y = layer(y)
+
+        if self.skip is not None:
+            skip = self.skip(x)
+            skip = self.skipbn(skip)
+        else:
+            skip = x
+        y = self.add(y, skip)
+        return y
+
+
+__all__ = ['Xception']
+
+
+class Xception(layer.Layer):
+    """
+    Xception optimized for the ImageNet dataset, as specified in
+    https://arxiv.org/pdf/1610.02357.pdf
+    """
+
+    def __init__(self, num_classes=1000):
+        """ Constructor
+        Args:
+            num_classes: number of classes
+        """
+        super(Xception, self).__init__()
+        self.num_classes = num_classes
+
+        self.conv1 = layer.Conv2d(3, 32, 3, 2, 0, bias=False)
+        self.bn1 = layer.BatchNorm2d(32)
+        self.relu1 = layer.ReLU()
+
+        self.conv2 = layer.Conv2d(32, 64, 3, 1, 1, bias=False)
+        self.bn2 = layer.BatchNorm2d(64)
+        self.relu2 = layer.ReLU()
+        # do relu here
+
+        self.block1 = Block(64,
+                            128,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=False,
+                            grow_first=True)
+        self.block2 = Block(128,
+                            256,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block3 = Block(256,
+                            728,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=True,
+                            grow_first=True)
+
+        self.block4 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block5 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block6 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block7 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+
+        self.block8 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block9 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block10 = Block(728,
+                             728,
+                             3,
+                             1,
+                             start_with_relu=True,
+                             grow_first=True)
+        self.block11 = Block(728,
+                             728,
+                             3,
+                             1,
+                             start_with_relu=True,
+                             grow_first=True)
+
+        self.block12 = Block(728,
+                             1024,
+                             2,
+                             2,
+                             start_with_relu=True,
+                             grow_first=False)
+
+        self.conv3 = layer.SeparableConv2d(1024, 1536, 3, 1, 1)
+        self.bn3 = layer.BatchNorm2d(1536)
+        self.relu3 = layer.ReLU()
+
+        # do relu here
+        self.conv4 = layer.SeparableConv2d(1536, 2048, 3, 1, 1)
+        self.bn4 = layer.BatchNorm2d(2048)
+
+        self.relu4 = layer.ReLU()
+        self.globalpooling = layer.MaxPool2d(10, 1)
+        self.flatten = layer.Flatten()
+        self.fc = layer.Linear(2048, num_classes)
+
+    def features(self, input):
+        x = self.conv1(input)
+        x = self.bn1(x)
+        x = self.relu1(x)
+
+        x = self.conv2(x)
+        x = self.bn2(x)
+        x = self.relu2(x)
+
+        x = self.block1(x)
+        x = self.block2(x)
+        x = self.block3(x)
+        x = self.block4(x)
+        x = self.block5(x)
+        x = self.block6(x)
+        x = self.block7(x)
+        x = self.block8(x)
+        x = self.block9(x)
+        x = self.block10(x)
+        x = self.block11(x)
+        x = self.block12(x)
+
+        x = self.conv3(x)
+        x = self.bn3(x)
+        x = self.relu3(x)
+
+        x = self.conv4(x)
+        x = self.bn4(x)
+        return x
+
+    def logits(self, features):
+        x = self.relu4(features)
+        x = self.globalpooling(x)
+        x = self.flatten(x)
+        x = self.fc(x)
+        return x
+
+    def forward(self, input):
+        x = self.features(input)
+        x = self.logits(x)
+        return x
+
+
+if __name__ == '__main__':
+    model = Xception(num_classes=1000)
+    print('Start intialization............')
+    dev = device.create_cuda_gpu_on(0)
+    #dev = device.create_cuda_gpu()
+
+    niters = 20
+    batch_size = 16
+    IMG_SIZE = 299
+    sgd = opt.SGD(lr=0.1, momentum=0.9, weight_decay=1e-5)
+
+    tx = tensor.Tensor((batch_size, 3, IMG_SIZE, IMG_SIZE), dev)
+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)
+    autograd.training = True
+    x = np.random.randn(batch_size, 3, IMG_SIZE, IMG_SIZE).astype(np.float32)
+    y = np.random.randint(0, 1000, batch_size, dtype=np.int32)
+    tx.copy_from_numpy(x)
+    ty.copy_from_numpy(y)
+
+    with trange(niters) as t:
+        for _ in t:
+            x = model(tx)
+            loss = autograd.softmax_cross_entropy(x, ty)
+            sgd(loss)
diff --git a/examples/cnn/benchmark.py b/examples/cnn/benchmark.py
new file mode 100644
index 0000000..a182139
--- /dev/null
+++ b/examples/cnn/benchmark.py
@@ -0,0 +1,121 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# the code is modified from
+# https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
+
+from singa import opt
+from singa import device
+from singa import tensor
+
+import argparse
+import time
+import numpy as np
+from tqdm import trange
+
+
+def train_resnet(DIST=True, graph=True, sequential=False, verbosity=0):
+
+    # Define the hypermeters good for the train_resnet
+    niters = 100
+    batch_size = 32
+    sgd = opt.SGD(lr=0.1, momentum=0.9, weight_decay=1e-5)
+
+    IMG_SIZE = 224
+
+    # For distributed training, sequential has better throughput in the current version
+    if DIST == True:
+        sgd = opt.DistOpt(sgd)
+        world_size = sgd.world_size
+        local_rank = sgd.local_rank
+        global_rank = sgd.global_rank
+        sequential = True
+    else:
+        local_rank = 0
+        world_size = 1
+        global_rank = 0
+        sequential = False
+
+    dev = device.create_cuda_gpu_on(local_rank)
+
+    tx = tensor.Tensor((batch_size, 3, IMG_SIZE, IMG_SIZE), dev)
+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)
+    x = np.random.randn(batch_size, 3, IMG_SIZE, IMG_SIZE).astype(np.float32)
+    y = np.random.randint(0, 1000, batch_size, dtype=np.int32)
+    tx.copy_from_numpy(x)
+    ty.copy_from_numpy(y)
+
+    dev.SetVerbosity(verbosity)
+    dev.SetSkipIteration(5)
+
+    # construct the model
+    from model import resnet
+    model = resnet.resnet50(num_channels=3, num_classes=1000)
+
+    model.train()
+    model.set_optimizer(sgd)
+    model.compile([tx], is_train=True, use_graph=graph, sequential=sequential)
+
+    # train model
+    dev.Sync()
+    start = time.time()
+    with trange(niters) as t:
+        for _ in t:
+            model(tx, ty, dist_option='fp32', spars=None)
+
+    dev.Sync()
+    end = time.time()
+    titer = (end - start) / float(niters)
+    throughput = float(niters * batch_size * world_size) / (end - start)
+    if global_rank == 0:
+        print("Throughput = {} per second".format(throughput), flush=True)
+        print("TotalTime={}".format(end - start), flush=True)
+        print("Total={}".format(titer), flush=True)
+        dev.PrintTimeProfiling()
+
+
+if __name__ == "__main__":
+
+    parser = argparse.ArgumentParser(
+        description='Throughput test using Resnet 50')
+    parser.add_argument('--dist',
+                        '--enable-dist',
+                        default='False',
+                        action='store_true',
+                        help='enable distributed training',
+                        dest='DIST')
+    parser.add_argument('--no-graph',
+                        '--disable-graph',
+                        default='True',
+                        action='store_false',
+                        help='disable graph',
+                        dest='graph')
+    parser.add_argument('--verbosity',
+                        '--log-verbosity',
+                        default=0,
+                        type=int,
+                        help='logging verbosity',
+                        dest='verbosity')
+
+    args = parser.parse_args()
+
+    train_resnet(DIST=args.DIST,
+                 graph=args.graph,
+                 sequential=False,
+                 verbosity=args.verbosity)
diff --git a/examples/cnn/data/cifar10.py b/examples/cnn/data/cifar10.py
new file mode 100644
index 0000000..d61d84e
--- /dev/null
+++ b/examples/cnn/data/cifar10.py
@@ -0,0 +1,89 @@
+#

+# Licensed to the Apache Software Foundation (ASF) under one

+# or more contributor license agreements.  See the NOTICE file

+# distributed with this work for additional information

+# regarding copyright ownership.  The ASF licenses this file

+# to you under the Apache License, Version 2.0 (the

+# "License"); you may not use this file except in compliance

+# with the License.  You may obtain a copy of the License at

+#

+#   http://www.apache.org/licenses/LICENSE-2.0

+#

+# Unless required by applicable law or agreed to in writing,

+# software distributed under the License is distributed on an

+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

+# KIND, either express or implied.  See the License for the

+# specific language governing permissions and limitations

+# under the License.

+#

+

+try:

+    import pickle

+except ImportError:

+    import cPickle as pickle

+

+import numpy as np

+import os

+import sys

+

+

+def load_dataset(filepath):

+    with open(filepath, 'rb') as fd:

+        try:

+            cifar10 = pickle.load(fd, encoding='latin1')

+        except TypeError:

+            cifar10 = pickle.load(fd)

+    image = cifar10['data'].astype(dtype=np.uint8)

+    image = image.reshape((-1, 3, 32, 32))

+    label = np.asarray(cifar10['labels'], dtype=np.uint8)

+    label = label.reshape(label.size, 1)

+    return image, label

+

+

+def load_train_data(dir_path='/tmp/cifar-10-batches-py', num_batches=5):

+    labels = []

+    batchsize = 10000

+    images = np.empty((num_batches * batchsize, 3, 32, 32), dtype=np.uint8)

+    for did in range(1, num_batches + 1):

+        fname_train_data = dir_path + "/data_batch_{}".format(did)

+        image, label = load_dataset(check_dataset_exist(fname_train_data))

+        images[(did - 1) * batchsize:did * batchsize] = image

+        labels.extend(label)

+    images = np.array(images, dtype=np.float32)

+    labels = np.array(labels, dtype=np.int32)

+    return images, labels

+

+

+def load_test_data(dir_path='/tmp/cifar-10-batches-py'):

+    images, labels = load_dataset(check_dataset_exist(dir_path + "/test_batch"))

+    return np.array(images, dtype=np.float32), np.array(labels, dtype=np.int32)

+

+

+def check_dataset_exist(dirpath):

+    if not os.path.exists(dirpath):

+        print(

+            'Please download the cifar10 dataset using python data/download_cifar10.py'

+        )

+        sys.exit(0)

+    return dirpath

+

+

+def normalize(train_x, val_x):

+    mean = [0.4914, 0.4822, 0.4465]

+    std = [0.2023, 0.1994, 0.2010]

+    train_x /= 255

+    val_x /= 255

+    for ch in range(0, 2):

+        train_x[:, ch, :, :] -= mean[ch]

+        train_x[:, ch, :, :] /= std[ch]

+        val_x[:, ch, :, :] -= mean[ch]

+        val_x[:, ch, :, :] /= std[ch]

+    return train_x, val_x

+

+def load():

+    train_x, train_y = load_train_data()

+    val_x, val_y = load_test_data()

+    train_x, val_x = normalize(train_x, val_x)

+    train_y = train_y.flatten()

+    val_y = val_y.flatten()

+    return train_x, train_y, val_x, val_y

diff --git a/examples/cnn/data/cifar100.py b/examples/cnn/data/cifar100.py
new file mode 100644
index 0000000..88b943f
--- /dev/null
+++ b/examples/cnn/data/cifar100.py
@@ -0,0 +1,81 @@
+#

+# Licensed to the Apache Software Foundation (ASF) under one

+# or more contributor license agreements.  See the NOTICE file

+# distributed with this work for additional information

+# regarding copyright ownership.  The ASF licenses this file

+# to you under the Apache License, Version 2.0 (the

+# "License"); you may not use this file except in compliance

+# with the License.  You may obtain a copy of the License at

+#

+#   http://www.apache.org/licenses/LICENSE-2.0

+#

+# Unless required by applicable law or agreed to in writing,

+# software distributed under the License is distributed on an

+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

+# KIND, either express or implied.  See the License for the

+# specific language governing permissions and limitations

+# under the License.

+#

+

+try:

+    import pickle

+except ImportError:

+    import cPickle as pickle

+

+import numpy as np

+import os

+import sys

+

+

+def load_dataset(filepath):

+    with open(filepath, 'rb') as fd:

+        try:

+            cifar100 = pickle.load(fd, encoding='latin1')

+        except TypeError:

+            cifar100 = pickle.load(fd)

+    image = cifar100['data'].astype(dtype=np.uint8)

+    image = image.reshape((-1, 3, 32, 32))

+    label = np.asarray(cifar100['fine_labels'], dtype=np.uint8)

+    label = label.reshape(label.size, 1)

+    return image, label

+

+

+def load_train_data(dir_path='/tmp/cifar-100-python'):

+    images, labels = load_dataset(check_dataset_exist(dir_path + "/train"))

+    return np.array(images, dtype=np.float32), np.array(labels, dtype=np.int32)

+

+

+def load_test_data(dir_path='/tmp/cifar-100-python'):

+    images, labels = load_dataset(check_dataset_exist(dir_path + "/test"))

+    return np.array(images, dtype=np.float32), np.array(labels, dtype=np.int32)

+

+

+def check_dataset_exist(dirpath):

+    if not os.path.exists(dirpath):

+        print(

+            'Please download the cifar100 dataset using python data/download_cifar100.py'

+        )

+        sys.exit(0)

+    return dirpath

+

+

+def normalize(train_x, val_x):

+    mean = [0.4914, 0.4822, 0.4465]

+    std = [0.2023, 0.1994, 0.2010]

+    train_x /= 255

+    val_x /= 255

+    for ch in range(0, 2):

+        train_x[:, ch, :, :] -= mean[ch]

+        train_x[:, ch, :, :] /= std[ch]

+        val_x[:, ch, :, :] -= mean[ch]

+        val_x[:, ch, :, :] /= std[ch]

+    return train_x, val_x

+

+

+def load():

+    train_x, train_y = load_train_data()

+    val_x, val_y = load_test_data()

+    train_x, val_x = normalize(train_x, val_x)

+    train_y = train_y.flatten()

+    val_y = val_y.flatten()

+    return train_x, train_y, val_x, val_y

diff --git a/examples/cnn/data/download_cifar10.py b/examples/cnn/data/download_cifar10.py
new file mode 100755
index 0000000..a010b2e
--- /dev/null
+++ b/examples/cnn/data/download_cifar10.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
+import urllib.request, urllib.parse, urllib.error
+import tarfile
+import os
+import sys
+
+
+def extract_tarfile(filepath):
+    if os.path.exists(filepath):
+        print('The tar file does exist. Extracting it now..')
+        with tarfile.open(filepath, 'r') as f:
+            f.extractall('/tmp/')
+        print('Finished!')
+        sys.exit(0)
+
+
+def do_download(dirpath, gzfile, url):
+    print('Downloading CIFAR from %s' % (url))
+    urllib.request.urlretrieve(url, gzfile)
+    extract_tarfile(gzfile)
+    print('Finished!')
+
+
+if __name__ == '__main__':
+    dirpath = '/tmp/'
+    gzfile = dirpath + 'cifar-10-python.tar.gz'
+    url = 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
+    do_download(dirpath, gzfile, url)
diff --git a/python/rafiki/__init__.py b/examples/cnn/data/download_cifar100.py
old mode 100644
new mode 100755
similarity index 73%
copy from python/rafiki/__init__.py
copy to examples/cnn/data/download_cifar100.py
index 3aa745b..59f9d23
--- a/python/rafiki/__init__.py
+++ b/examples/cnn/data/download_cifar100.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -16,4 +17,10 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+from download_cifar10 import do_download
+
+if __name__ == '__main__':
+    dirpath = '/tmp/'
+    gzfile = dirpath + 'cifar-100-python.tar.gz'
+    url = 'http://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz'
+    do_download(dirpath, gzfile, url)
diff --git a/examples/cnn/data/download_mnist.py b/examples/cnn/data/download_mnist.py
new file mode 100644
index 0000000..65acb0e
--- /dev/null
+++ b/examples/cnn/data/download_mnist.py
@@ -0,0 +1,49 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import os
+import urllib.request
+
+
+def check_exist_or_download(url):
+
+    download_dir = '/tmp/'
+    name = url.rsplit('/', 1)[-1]
+    filename = os.path.join(download_dir, name)
+
+    if not os.path.isfile(filename):
+        print("Downloading %s" % url)
+        urllib.request.urlretrieve(url, filename)
+    else:
+        print("Already Downloaded: %s" % url)
+
+
+if __name__ == '__main__':
+
+    #url of the mnist dataset
+    train_x_url = 'http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz'
+    train_y_url = 'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz'
+    valid_x_url = 'http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz'
+    valid_y_url = 'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz'
+
+    #download the mnist dataset
+    check_exist_or_download(train_x_url)
+    check_exist_or_download(train_y_url)
+    check_exist_or_download(valid_x_url)
+    check_exist_or_download(valid_y_url)
diff --git a/examples/cnn/data/mnist.py b/examples/cnn/data/mnist.py
new file mode 100644
index 0000000..9cd1a84
--- /dev/null
+++ b/examples/cnn/data/mnist.py
@@ -0,0 +1,91 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import numpy as np
+import os
+import sys
+import gzip
+import codecs
+
+
+def check_dataset_exist(dirpath):
+    if not os.path.exists(dirpath):
+        print(
+            'The MNIST dataset does not exist. Please download the mnist dataset using python data/download_mnist.py'
+        )
+        sys.exit(0)
+    return dirpath
+
+
+def load_dataset():
+    train_x_path = '/tmp/train-images-idx3-ubyte.gz'
+    train_y_path = '/tmp/train-labels-idx1-ubyte.gz'
+    valid_x_path = '/tmp/t10k-images-idx3-ubyte.gz'
+    valid_y_path = '/tmp/t10k-labels-idx1-ubyte.gz'
+
+    train_x = read_image_file(check_dataset_exist(train_x_path)).astype(
+        np.float32)
+    train_y = read_label_file(check_dataset_exist(train_y_path)).astype(
+        np.float32)
+    valid_x = read_image_file(check_dataset_exist(valid_x_path)).astype(
+        np.float32)
+    valid_y = read_label_file(check_dataset_exist(valid_y_path)).astype(
+        np.float32)
+    return train_x, train_y, valid_x, valid_y
+
+
+def read_label_file(path):
+    with gzip.open(path, 'rb') as f:
+        data = f.read()
+        assert get_int(data[:4]) == 2049
+        length = get_int(data[4:8])
+        parsed = np.frombuffer(data, dtype=np.uint8, offset=8).reshape((length))
+        return parsed
+
+
+def get_int(b):
+    return int(codecs.encode(b, 'hex'), 16)
+
+
+def read_image_file(path):
+    with gzip.open(path, 'rb') as f:
+        data = f.read()
+        assert get_int(data[:4]) == 2051
+        length = get_int(data[4:8])
+        num_rows = get_int(data[8:12])
+        num_cols = get_int(data[12:16])
+        parsed = np.frombuffer(data, dtype=np.uint8, offset=16).reshape(
+            (length, 1, num_rows, num_cols))
+        return parsed
+
+
+def normalize(train_x, val_x):
+    train_x /= 255
+    val_x /= 255
+    return train_x, val_x
+
+
+def load():
+    train_x, train_y, val_x, val_y = load_dataset()
+    train_x, val_x = normalize(train_x, val_x)
+    train_x = train_x.astype(np.float32)
+    val_x = val_x.astype(np.float32)
+    train_y = train_y.astype(np.int32)
+    val_y = val_y.astype(np.int32)
+    return train_x, train_y, val_x, val_y
diff --git a/examples/cnn/model/alexnet.py b/examples/cnn/model/alexnet.py
new file mode 100644
index 0000000..988596e
--- /dev/null
+++ b/examples/cnn/model/alexnet.py
@@ -0,0 +1,115 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import layer
+from singa import model
+
+
+class AlexNet(model.Model):
+
+    def __init__(self, num_classes=10, num_channels=1):
+        super(AlexNet, self).__init__()
+        self.num_classes = num_classes
+        self.input_size = 224
+        self.dimension = 4
+        self.conv1 = layer.Conv2d(num_channels, 64, 11, stride=4, padding=2)
+        self.conv2 = layer.Conv2d(64, 192, 5, padding=2)
+        self.conv3 = layer.Conv2d(192, 384, 3, padding=1)
+        self.conv4 = layer.Conv2d(384, 256, 3, padding=1)
+        self.conv5 = layer.Conv2d(256, 256, 3, padding=1)
+        self.linear1 = layer.Linear(4096)
+        self.linear2 = layer.Linear(4096)
+        self.linear3 = layer.Linear(num_classes)
+        self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
+        self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
+        self.pooling3 = layer.MaxPool2d(2, 2, padding=0)
+        self.avg_pooling1 = layer.AvgPool2d(3, 2, padding=0)
+        self.relu1 = layer.ReLU()
+        self.relu2 = layer.ReLU()
+        self.relu3 = layer.ReLU()
+        self.relu4 = layer.ReLU()
+        self.relu5 = layer.ReLU()
+        self.relu6 = layer.ReLU()
+        self.relu7 = layer.ReLU()
+        self.flatten = layer.Flatten()
+        self.dropout1 = layer.Dropout()
+        self.dropout2 = layer.Dropout()
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+    def forward(self, x):
+        y = self.conv1(x)
+        y = self.relu1(y)
+        y = self.pooling1(y)
+        y = self.conv2(y)
+        y = self.relu2(y)
+        y = self.pooling2(y)
+        y = self.conv3(y)
+        y = self.relu3(y)
+        y = self.conv4(y)
+        y = self.relu4(y)
+        y = self.conv5(y)
+        y = self.relu5(y)
+        y = self.pooling3(y)
+        y = self.avg_pooling1(y)
+        y = self.flatten(y)
+        y = self.dropout1(y)
+        y = self.linear1(y)
+        y = self.relu6(y)
+        y = self.dropout2(y)
+        y = self.linear2(y)
+        y = self.relu7(y)
+        y = self.linear3(y)
+        return y
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+
+        if dist_option == 'fp32':
+            self.optimizer(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a AlexNet model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = AlexNet(**kwargs)
+
+    return model
+
+
+__all__ = ['AlexNet', 'create_model']
diff --git a/examples/cnn/model/cnn.py b/examples/cnn/model/cnn.py
new file mode 100644
index 0000000..28ecd6c
--- /dev/null
+++ b/examples/cnn/model/cnn.py
@@ -0,0 +1,87 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import layer
+from singa import model
+
+
+class CNN(model.Model):
+
+    def __init__(self, num_classes=10, num_channels=1):
+        super(CNN, self).__init__()
+        self.num_classes = num_classes
+        self.input_size = 28
+        self.dimension = 4
+        self.conv1 = layer.Conv2d(num_channels, 20, 5, padding=0, activation="RELU")
+        self.conv2 = layer.Conv2d(20, 50, 5, padding=0, activation="RELU")
+        self.linear1 = layer.Linear(500)
+        self.linear2 = layer.Linear(num_classes)
+        self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
+        self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
+        self.relu = layer.ReLU()
+        self.flatten = layer.Flatten()
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+    def forward(self, x):
+        y = self.conv1(x)
+        y = self.pooling1(y)
+        y = self.conv2(y)
+        y = self.pooling2(y)
+        y = self.flatten(y)
+        y = self.linear1(y)
+        y = self.relu(y)
+        y = self.linear2(y)
+        return y
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+
+        if dist_option == 'fp32':
+            self.optimizer(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a CNN model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = CNN(**kwargs)
+
+    return model
+
+
+__all__ = ['CNN', 'create_model']
diff --git a/examples/cnn/model/resnet.py b/examples/cnn/model/resnet.py
new file mode 100644
index 0000000..2b2a7fd
--- /dev/null
+++ b/examples/cnn/model/resnet.py
@@ -0,0 +1,285 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# the code is modified from
+# https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
+
+from singa import layer
+from singa import model
+
+
+def conv3x3(in_planes, out_planes, stride=1):
+    """3x3 convolution with padding"""
+    return layer.Conv2d(
+        in_planes,
+        out_planes,
+        3,
+        stride=stride,
+        padding=1,
+        bias=False,
+    )
+
+
+class BasicBlock(layer.Layer):
+    expansion = 1
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(BasicBlock, self).__init__()
+        self.conv1 = conv3x3(inplanes, planes, stride)
+        self.bn1 = layer.BatchNorm2d(planes)
+        self.conv2 = conv3x3(planes, planes)
+        self.bn2 = layer.BatchNorm2d(planes)
+        self.relu1 = layer.ReLU()
+        self.add = layer.Add()
+        self.relu2 = layer.ReLU()
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu1(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out = self.add(out, residual)
+        out = self.relu2(out)
+
+        return out
+
+
+class Bottleneck(layer.Layer):
+    expansion = 4
+
+    def __init__(self, inplanes, planes, stride=1, downsample=None):
+        super(Bottleneck, self).__init__()
+        self.conv1 = layer.Conv2d(inplanes, planes, 1, bias=False)
+        self.bn1 = layer.BatchNorm2d(planes)
+        self.relu1 = layer.ReLU()
+        self.conv2 = layer.Conv2d(planes,
+                                  planes,
+                                  3,
+                                  stride=stride,
+                                  padding=1,
+                                  bias=False)
+        self.bn2 = layer.BatchNorm2d(planes)
+        self.relu2 = layer.ReLU()
+        self.conv3 = layer.Conv2d(planes,
+                                  planes * self.expansion,
+                                  1,
+                                  bias=False)
+        self.bn3 = layer.BatchNorm2d(planes * self.expansion)
+
+        self.add = layer.Add()
+        self.relu3 = layer.ReLU()
+
+        self.downsample = downsample
+        self.stride = stride
+
+    def forward(self, x):
+        residual = x
+
+        out = self.conv1(x)
+        out = self.bn1(out)
+        out = self.relu1(out)
+
+        out = self.conv2(out)
+        out = self.bn2(out)
+        out = self.relu2(out)
+
+        out = self.conv3(out)
+        out = self.bn3(out)
+
+        if self.downsample is not None:
+            residual = self.downsample(x)
+
+        out = self.add(out, residual)
+        out = self.relu3(out)
+
+        return out
+
+
+__all__ = [
+    'ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152'
+]
+
+
+class ResNet(model.Model):
+
+    def __init__(self, block, layers, num_classes=10, num_channels=3):
+        self.inplanes = 64
+        super(ResNet, self).__init__()
+        self.num_classes = num_classes
+        self.input_size = 224
+        self.dimension = 4
+        self.conv1 = layer.Conv2d(num_channels,
+                                  64,
+                                  7,
+                                  stride=2,
+                                  padding=3,
+                                  bias=False)
+        self.bn1 = layer.BatchNorm2d(64)
+        self.relu = layer.ReLU()
+        self.maxpool = layer.MaxPool2d(kernel_size=3, stride=2, padding=1)
+        self.layer1, layers1 = self._make_layer(block, 64, layers[0])
+        self.layer2, layers2 = self._make_layer(block, 128, layers[1], stride=2)
+        self.layer3, layers3 = self._make_layer(block, 256, layers[2], stride=2)
+        self.layer4, layers4 = self._make_layer(block, 512, layers[3], stride=2)
+        self.avgpool = layer.AvgPool2d(7, stride=1)
+        self.flatten = layer.Flatten()
+        self.fc = layer.Linear(num_classes)
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+        self.register_layers(*layers1, *layers2, *layers3, *layers4)
+
+    def _make_layer(self, block, planes, blocks, stride=1):
+        downsample = None
+        if stride != 1 or self.inplanes != planes * block.expansion:
+            conv = layer.Conv2d(
+                self.inplanes,
+                planes * block.expansion,
+                1,
+                stride=stride,
+                bias=False,
+            )
+            bn = layer.BatchNorm2d(planes * block.expansion)
+
+            def _downsample(x):
+                return bn(conv(x))
+
+            downsample = _downsample
+
+        layers = []
+        layers.append(block(self.inplanes, planes, stride, downsample))
+        self.inplanes = planes * block.expansion
+        for i in range(1, blocks):
+            layers.append(block(self.inplanes, planes))
+
+        def forward(x):
+            for layer in layers:
+                x = layer(x)
+            return x
+
+        return forward, layers
+
+    def forward(self, x):
+        x = self.conv1(x)
+        x = self.bn1(x)
+        x = self.relu(x)
+        x = self.maxpool(x)
+
+        x = self.layer1(x)
+        x = self.layer2(x)
+        x = self.layer3(x)
+        x = self.layer4(x)
+
+        x = self.avgpool(x)
+        x = self.flatten(x)
+        x = self.fc(x)
+
+        return x
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+
+        if dist_option == 'fp32':
+            self.optimizer(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def resnet18(pretrained=False, **kwargs):
+    """Constructs a ResNet-18 model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
+
+    return model
+
+
+def resnet34(pretrained=False, **kwargs):
+    """Constructs a ResNet-34 model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
+
+    return model
+
+
+def resnet50(pretrained=False, **kwargs):
+    """Constructs a ResNet-50 model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
+
+    return model
+
+
+def resnet101(pretrained=False, **kwargs):
+    """Constructs a ResNet-101 model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
+
+    return model
+
+
+def resnet152(pretrained=False, **kwargs):
+    """Constructs a ResNet-152 model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained on ImageNet
+    """
+    model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
+
+    return model
+
+
+__all__ = [
+    'ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152'
+]
diff --git a/examples/cnn/model/xceptionnet.py b/examples/cnn/model/xceptionnet.py
new file mode 100644
index 0000000..524e3f6
--- /dev/null
+++ b/examples/cnn/model/xceptionnet.py
@@ -0,0 +1,308 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+# the code is modified from
+# https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/xception.py
+
+from singa import layer
+from singa import model
+
+
+class Block(layer.Layer):
+
+    def __init__(self,
+                 in_filters,
+                 out_filters,
+                 reps,
+                 strides=1,
+                 padding=0,
+                 start_with_relu=True,
+                 grow_first=True):
+        super(Block, self).__init__()
+
+        if out_filters != in_filters or strides != 1:
+            self.skip = layer.Conv2d(in_filters,
+                                     out_filters,
+                                     1,
+                                     stride=strides,
+                                     padding=padding,
+                                     bias=False)
+            self.skipbn = layer.BatchNorm2d(out_filters)
+        else:
+            self.skip = None
+
+        self.layers = []
+
+        filters = in_filters
+        if grow_first:
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(in_filters,
+                                      out_filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(out_filters))
+            filters = out_filters
+
+        for i in range(reps - 1):
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(filters,
+                                      filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(filters))
+
+        if not grow_first:
+            self.layers.append(layer.ReLU())
+            self.layers.append(
+                layer.SeparableConv2d(in_filters,
+                                      out_filters,
+                                      3,
+                                      stride=1,
+                                      padding=1,
+                                      bias=False))
+            self.layers.append(layer.BatchNorm2d(out_filters))
+
+        if not start_with_relu:
+            self.layers = self.layers[1:]
+        else:
+            self.layers[0] = layer.ReLU()
+
+        if strides != 1:
+            self.layers.append(layer.MaxPool2d(3, strides, padding + 1))
+
+        self.register_layers(*self.layers)
+
+        self.add = layer.Add()
+
+    def forward(self, x):
+        y = self.layers[0](x)
+        for layer in self.layers[1:]:
+            if isinstance(y, tuple):
+                y = y[0]
+            y = layer(y)
+
+        if self.skip is not None:
+            skip = self.skip(x)
+            skip = self.skipbn(skip)
+        else:
+            skip = x
+        y = self.add(y, skip)
+        return y
+
+
+class Xception(model.Model):
+    """
+    Xception optimized for the ImageNet dataset, as specified in
+    https://arxiv.org/pdf/1610.02357.pdf
+    """
+
+    def __init__(self, num_classes=10, num_channels=3):
+        """ Constructor
+        Args:
+            num_classes: number of classes
+        """
+        super(Xception, self).__init__()
+        self.num_classes = num_classes
+        self.input_size = 299
+        self.dimension = 4
+
+        self.conv1 = layer.Conv2d(num_channels, 32, 3, 2, 0, bias=False)
+        self.bn1 = layer.BatchNorm2d(32)
+        self.relu1 = layer.ReLU()
+
+        self.conv2 = layer.Conv2d(32, 64, 3, 1, 1, bias=False)
+        self.bn2 = layer.BatchNorm2d(64)
+        self.relu2 = layer.ReLU()
+        # do relu here
+
+        self.block1 = Block(64,
+                            128,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=False,
+                            grow_first=True)
+        self.block2 = Block(128,
+                            256,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block3 = Block(256,
+                            728,
+                            2,
+                            2,
+                            padding=0,
+                            start_with_relu=True,
+                            grow_first=True)
+
+        self.block4 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block5 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block6 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block7 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+
+        self.block8 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block9 = Block(728,
+                            728,
+                            3,
+                            1,
+                            start_with_relu=True,
+                            grow_first=True)
+        self.block10 = Block(728,
+                             728,
+                             3,
+                             1,
+                             start_with_relu=True,
+                             grow_first=True)
+        self.block11 = Block(728,
+                             728,
+                             3,
+                             1,
+                             start_with_relu=True,
+                             grow_first=True)
+
+        self.block12 = Block(728,
+                             1024,
+                             2,
+                             2,
+                             start_with_relu=True,
+                             grow_first=False)
+
+        self.conv3 = layer.SeparableConv2d(1024, 1536, 3, 1, 1)
+        self.bn3 = layer.BatchNorm2d(1536)
+        self.relu3 = layer.ReLU()
+
+        # do relu here
+        self.conv4 = layer.SeparableConv2d(1536, 2048, 3, 1, 1)
+        self.bn4 = layer.BatchNorm2d(2048)
+
+        self.relu4 = layer.ReLU()
+        self.globalpooling = layer.MaxPool2d(10, 1)
+        self.flatten = layer.Flatten()
+        self.fc = layer.Linear(num_classes)
+
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+    def features(self, input):
+        x = self.conv1(input)
+        x = self.bn1(x)
+        x = self.relu1(x)
+
+        x = self.conv2(x)
+        x = self.bn2(x)
+        x = self.relu2(x)
+
+        x = self.block1(x)
+        x = self.block2(x)
+        x = self.block3(x)
+        x = self.block4(x)
+        x = self.block5(x)
+        x = self.block6(x)
+        x = self.block7(x)
+        x = self.block8(x)
+        x = self.block9(x)
+        x = self.block10(x)
+        x = self.block11(x)
+        x = self.block12(x)
+
+        x = self.conv3(x)
+        x = self.bn3(x)
+        x = self.relu3(x)
+
+        x = self.conv4(x)
+        x = self.bn4(x)
+        return x
+
+    def logits(self, features):
+        x = self.relu4(features)
+        x = self.globalpooling(x)
+        x = self.flatten(x)
+        x = self.fc(x)
+        return x
+
+    def forward(self, x):
+        x = self.features(x)
+        x = self.logits(x)
+        return x
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+        if dist_option == 'fp32':
+            self.optimizer(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a Xceptionnet model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = Xception(**kwargs)
+
+    return model
+
+
+__all__ = ['Xception', 'create_model']
diff --git a/examples/cnn/train_cnn.py b/examples/cnn/train_cnn.py
new file mode 100644
index 0000000..e4fd962
--- /dev/null
+++ b/examples/cnn/train_cnn.py
@@ -0,0 +1,313 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import singa_wrap as singa
+from singa import device
+from singa import tensor
+from singa import opt
+import numpy as np
+import time
+import argparse
+from PIL import Image
+
+
+# Data Augmentation
+def augmentation(x, batch_size):
+    xpad = np.pad(x, [[0, 0], [0, 0], [4, 4], [4, 4]], 'symmetric')
+    for data_num in range(0, batch_size):
+        offset = np.random.randint(8, size=2)
+        x[data_num, :, :, :] = xpad[data_num, :,
+                                    offset[0]:offset[0] + x.shape[2],
+                                    offset[1]:offset[1] + x.shape[2]]
+        if_flip = np.random.randint(2)
+        if (if_flip):
+            x[data_num, :, :, :] = x[data_num, :, :, ::-1]
+    return x
+
+
+# Calculate Accuracy
+def accuracy(pred, target):
+    # y is network output to be compared with ground truth (int)
+    y = np.argmax(pred, axis=1)
+    a = y == target
+    correct = np.array(a, "int").sum()
+    # print(correct)
+    return correct
+
+
+# Data partition according to the rank
+def partition(global_rank, world_size, train_x, train_y, val_x, val_y):
+    # Partition training data
+    data_per_rank = train_x.shape[0] // world_size
+    idx_start = global_rank * data_per_rank
+    idx_end = (global_rank + 1) * data_per_rank
+    train_x = train_x[idx_start:idx_end]
+    train_y = train_y[idx_start:idx_end]
+    # Partition evaluation data
+    data_per_rank = val_x.shape[0] // world_size
+    idx_start = global_rank * data_per_rank
+    idx_end = (global_rank + 1) * data_per_rank
+    val_x = val_x[idx_start:idx_end]
+    val_y = val_y[idx_start:idx_end]
+    return train_x, train_y, val_x, val_y
+
+
+# Function to all reduce NUMPY Accuracy and Loss from Multiple Devices
+def reduce_variable(variable, dist_opt, reducer):
+    reducer.copy_from_numpy(variable)
+    dist_opt.all_reduce(reducer.data)
+    dist_opt.wait()
+    output = tensor.to_numpy(reducer)
+    return output
+
+
+def resize_dataset(x, image_size):
+    num_data = x.shape[0]
+    dim = x.shape[1]
+    X = np.zeros(shape=(num_data, dim, image_size, image_size),
+                 dtype=np.float32)
+    for n in range(0, num_data):
+        for d in range(0, dim):
+            X[n, d, :, :] = np.array(Image.fromarray(x[n, d, :, :]).resize(
+                (image_size, image_size), Image.BILINEAR),
+                                     dtype=np.float32)
+    return X
+
+
+def run(global_rank,
+        world_size,
+        local_rank,
+        max_epoch,
+        batch_size,
+        model,
+        data,
+        sgd,
+        graph,
+        verbosity,
+        dist_option='fp32',
+        spars=None):
+    dev = device.create_cuda_gpu_on(local_rank)
+    dev.SetRandSeed(0)
+    np.random.seed(0)
+
+    if data == 'cifar10':
+        from data import cifar10
+        train_x, train_y, val_x, val_y = cifar10.load()
+    elif data == 'cifar100':
+        from data import cifar100
+        train_x, train_y, val_x, val_y = cifar100.load()
+    elif data == 'mnist':
+        from data import mnist
+        train_x, train_y, val_x, val_y = mnist.load()
+
+    num_channels = train_x.shape[1]
+    image_size = train_x.shape[2]
+    data_size = np.prod(train_x.shape[1:train_x.ndim]).item()
+    num_classes = (np.max(train_y) + 1).item()
+    #print(num_classes)
+
+    if model == 'resnet':
+        from model import resnet
+        model = resnet.resnet50(num_channels=num_channels,
+                                num_classes=num_classes)
+    elif model == 'xceptionnet':
+        from model import xceptionnet
+        model = xceptionnet.create_model(num_channels=num_channels,
+                                         num_classes=num_classes)
+    elif model == 'cnn':
+        from model import cnn
+        model = cnn.create_model(num_channels=num_channels,
+                                 num_classes=num_classes)
+    elif model == 'alexnet':
+        from model import alexnet
+        model = alexnet.create_model(num_channels=num_channels,
+                                     num_classes=num_classes)
+    elif model == 'mlp':
+        import os, sys, inspect
+        current = os.path.dirname(
+            os.path.abspath(inspect.getfile(inspect.currentframe())))
+        parent = os.path.dirname(current)
+        sys.path.insert(0, parent)
+        from mlp import model
+        model = model.create_model(data_size=data_size,
+                                    num_classes=num_classes)
+
+    # For distributed training, sequential gives better performance
+    if hasattr(sgd, "communicator"):
+        DIST = True
+        sequential = True
+    else:
+        DIST = False
+        sequential = False
+
+    if DIST:
+        train_x, train_y, val_x, val_y = partition(global_rank, world_size,
+                                                   train_x, train_y, val_x,
+                                                   val_y)
+    '''
+    # check dataset shape correctness
+    if global_rank == 0:
+        print("Check the shape of dataset:")
+        print(train_x.shape)
+        print(train_y.shape)
+    '''
+
+    if model.dimension == 4:
+        tx = tensor.Tensor(
+            (batch_size, num_channels, model.input_size, model.input_size), dev,
+            tensor.float32)
+    elif model.dimension == 2:
+        tx = tensor.Tensor((batch_size, data_size), dev, tensor.float32)
+        np.reshape(train_x, (train_x.shape[0], -1))
+        np.reshape(val_x, (val_x.shape[0], -1))
+
+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)
+    num_train_batch = train_x.shape[0] // batch_size
+    num_val_batch = val_x.shape[0] // batch_size
+    idx = np.arange(train_x.shape[0], dtype=np.int32)
+
+    # attached model to graph
+    model.set_optimizer(sgd)
+    model.compile([tx], is_train=True, use_graph=graph, sequential=sequential)
+    dev.SetVerbosity(verbosity)
+
+    # Training and Evaluation Loop
+    for epoch in range(max_epoch):
+        start_time = time.time()
+        np.random.shuffle(idx)
+
+        if global_rank == 0:
+            print('Starting Epoch %d:' % (epoch))
+
+        # Training Phase
+        train_correct = np.zeros(shape=[1], dtype=np.float32)
+        test_correct = np.zeros(shape=[1], dtype=np.float32)
+        train_loss = np.zeros(shape=[1], dtype=np.float32)
+
+        model.train()
+        for b in range(num_train_batch):
+            # Generate the patch data in this iteration
+            x = train_x[idx[b * batch_size:(b + 1) * batch_size]]
+            if model.dimension == 4:
+                x = augmentation(x, batch_size)
+                if (image_size != model.input_size):
+                    x = resize_dataset(x, model.input_size)
+            y = train_y[idx[b * batch_size:(b + 1) * batch_size]]
+
+            # Copy the patch data into input tensors
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+
+            # Train the model
+            out, loss = model(tx, ty, dist_option, spars)
+            train_correct += accuracy(tensor.to_numpy(out), y)
+            train_loss += tensor.to_numpy(loss)[0]
+
+        if DIST:
+            # Reduce the Evaluation Accuracy and Loss from Multiple Devices
+            reducer = tensor.Tensor((1,), dev, tensor.float32)
+            train_correct = reduce_variable(train_correct, sgd, reducer)
+            train_loss = reduce_variable(train_loss, sgd, reducer)
+
+        if global_rank == 0:
+            print('Training loss = %f, training accuracy = %f' %
+                  (train_loss, train_correct /
+                   (num_train_batch * batch_size * world_size)),
+                  flush=True)
+
+        # Evaluation Phase
+        model.eval()
+        for b in range(num_val_batch):
+            x = val_x[b * batch_size:(b + 1) * batch_size]
+            if model.dimension == 4:
+                if (image_size != model.input_size):
+                    x = resize_dataset(x, model.input_size)
+            y = val_y[b * batch_size:(b + 1) * batch_size]
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+            out_test = model(tx)
+            test_correct += accuracy(tensor.to_numpy(out_test), y)
+
+        if DIST:
+            # Reduce the Evaulation Accuracy from Multiple Devices
+            test_correct = reduce_variable(test_correct, sgd, reducer)
+
+        # Output the Evaluation Accuracy
+        if global_rank == 0:
+            print('Evaluation accuracy = %f, Elapsed Time = %fs' %
+                  (test_correct / (num_val_batch * batch_size * world_size),
+                   time.time() - start_time),
+                  flush=True)
+
+    dev.PrintTimeProfiling()
+
+
+if __name__ == '__main__':
+    # use argparse to get command config: max_epoch, model, data, etc. for single gpu training
+    parser = argparse.ArgumentParser(
+        description='Training using the autograd and graph.')
+    parser.add_argument('model',
+                        choices=['resnet', 'xceptionnet', 'cnn', 'mlp', 'alexnet'],
+                        default='cnn')
+    parser.add_argument('data',
+                        choices=['cifar10', 'cifar100', 'mnist'],
+                        default='mnist')
+    parser.add_argument('-m',
+                        '--max-epoch',
+                        default=10,
+                        type=int,
+                        help='maximum epochs',
+                        dest='max_epoch')
+    parser.add_argument('-b',
+                        '--batch-size',
+                        default=64,
+                        type=int,
+                        help='batch size',
+                        dest='batch_size')
+    parser.add_argument('-l',
+                        '--learning-rate',
+                        default=0.005,
+                        type=float,
+                        help='initial learning rate',
+                        dest='lr')
+    # determine which gpu to use
+    parser.add_argument('-i',
+                        '--device-id',
+                        default=0,
+                        type=int,
+                        help='which GPU to use',
+                        dest='device_id')
+    parser.add_argument('-g',
+                        '--disable-graph',
+                        default='True',
+                        action='store_false',
+                        help='disable graph',
+                        dest='graph')
+    parser.add_argument('-v',
+                        '--log-verbosity',
+                        default=0,
+                        type=int,
+                        help='logging verbosity',
+                        dest='verbosity')
+
+    args = parser.parse_args()
+
+    sgd = opt.SGD(lr=args.lr, momentum=0.9, weight_decay=1e-5)
+    run(0, 1, args.device_id, args.max_epoch, args.batch_size, args.model,
+        args.data, sgd, args.graph, args.verbosity)
diff --git a/examples/cnn/train_mpi.py b/examples/cnn/train_mpi.py
new file mode 100644
index 0000000..fd78b12
--- /dev/null
+++ b/examples/cnn/train_mpi.py
@@ -0,0 +1,84 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+from singa import singa_wrap as singa
+from singa import opt
+import argparse
+import train_cnn
+
+if __name__ == '__main__':
+    # use argparse to get command config: max_epoch, model, data, etc. for single gpu training
+    parser = argparse.ArgumentParser(
+        description='Training using the autograd and graph.')
+    parser.add_argument('model',
+                        choices=['resnet', 'xceptionnet', 'cnn', 'mlp'],
+                        default='cnn')
+    parser.add_argument('data', choices=['cifar10', 'cifar100', 'mnist'], default='mnist')
+    parser.add_argument('-m',
+                        '--max-epoch',
+                        default=10,
+                        type=int,
+                        help='maximum epochs',
+                        dest='max_epoch')
+    parser.add_argument('-b',
+                        '--batch-size',
+                        default=64,
+                        type=int,
+                        help='batch size',
+                        dest='batch_size')
+    parser.add_argument('-l',
+                        '--learning-rate',
+                        default=0.005,
+                        type=float,
+                        help='initial learning rate',
+                        dest='lr')
+    parser.add_argument('-d',
+                        '--dist-option',
+                        default='fp32',
+                        choices=['fp32','fp16','partialUpdate','sparseTopK','sparseThreshold'],
+                        help='distibuted training options',
+                        dest='dist_option')  # currently partialUpdate support graph=False only
+    parser.add_argument('-s',
+                        '--sparsification',
+                        default='0.05',
+                        type=float,
+                        help='the sparsity parameter used for sparsification, between 0 to 1',
+                        dest='spars')
+    parser.add_argument('-g',
+                        '--disable-graph',
+                        default='True',
+                        action='store_false',
+                        help='disable graph',
+                        dest='graph')
+    parser.add_argument('-v',
+                        '--log-verbosity',
+                        default=0,
+                        type=int,
+                        help='logging verbosity',
+                        dest='verbosity')
+
+    args = parser.parse_args()
+
+    sgd = opt.SGD(lr=args.lr, momentum=0.9, weight_decay=1e-5)
+    sgd = opt.DistOpt(sgd)
+
+    train_cnn.run(sgd.global_rank, sgd.world_size, sgd.local_rank, args.max_epoch,
+              args.batch_size, args.model, args.data, sgd, args.graph,
+              args.verbosity, args.dist_option, args.spars)
diff --git a/examples/cnn/train_multiprocess.py b/examples/cnn/train_multiprocess.py
new file mode 100644
index 0000000..9972ddd
--- /dev/null
+++ b/examples/cnn/train_multiprocess.py
@@ -0,0 +1,104 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+from singa import singa_wrap as singa
+from singa import opt
+import argparse
+import train_cnn
+import multiprocessing
+
+def run(args, local_rank, world_size, nccl_id):
+    sgd = opt.SGD(lr=args.lr, momentum=0.9, weight_decay=1e-5)
+    sgd = opt.DistOpt(sgd, nccl_id=nccl_id, local_rank=local_rank, world_size=world_size)
+    train_cnn.run(sgd.global_rank, sgd.world_size, sgd.local_rank, args.max_epoch,
+              args.batch_size, args.model, args.data, sgd, args.graph,
+              args.verbosity, args.dist_option, args.spars)
+
+
+if __name__ == '__main__':
+    # use argparse to get command config: max_epoch, model, data, etc. for single gpu training
+    parser = argparse.ArgumentParser(
+        description='Training using the autograd and graph.')
+    parser.add_argument('model',
+                        choices=['resnet', 'xceptionnet', 'cnn', 'mlp'],
+                        default='cnn')
+    parser.add_argument('data', choices=['cifar10', 'cifar100', 'mnist'], default='mnist')
+    parser.add_argument('-m',
+                        '--max-epoch',
+                        default=10,
+                        type=int,
+                        help='maximum epochs',
+                        dest='max_epoch')
+    parser.add_argument('-b',
+                        '--batch-size',
+                        default=64,
+                        type=int,
+                        help='batch size',
+                        dest='batch_size')
+    parser.add_argument('-l',
+                        '--learning-rate',
+                        default=0.005,
+                        type=float,
+                        help='initial learning rate',
+                        dest='lr')
+    parser.add_argument('-w',
+                        '--world-size',
+                        default=2,
+                        type=int,
+                        help='number of gpus to be used',
+                        dest='world_size')
+    parser.add_argument('-d',
+                        '--dist-option',
+                        default='fp32',
+                        choices=['fp32','fp16','partialUpdate','sparseTopK','sparseThreshold'],
+                        help='distibuted training options',
+                        dest='dist_option') # currently partialUpdate support graph=False only
+    parser.add_argument('-s',
+                        '--sparsification',
+                        default='0.05',
+                        type=float,
+                        help='the sparsity parameter used for sparsification, between 0 to 1',
+                        dest='spars')
+    parser.add_argument('-g',
+                        '--disable-graph',
+                        default='True',
+                        action='store_false',
+                        help='disable graph',
+                        dest='graph')
+    parser.add_argument('-v',
+                        '--log-verbosity',
+                        default=0,
+                        type=int,
+                        help='logging verbosity',
+                        dest='verbosity')
+
+    args = parser.parse_args()
+
+    # Generate a NCCL ID to be used for collective communication
+    nccl_id = singa.NcclIdHolder()
+
+    process = []
+    for local_rank in range(0, args.world_size):
+        process.append(
+            multiprocessing.Process(target=run,
+                                    args=(args, local_rank, args.world_size, nccl_id)))
+
+    for p in process:
+        p.start()
diff --git a/examples/cifar10/CMakeLists.txt b/examples/cpp/cifar10/CMakeLists.txt
similarity index 77%
rename from examples/cifar10/CMakeLists.txt
rename to examples/cpp/cifar10/CMakeLists.txt
index 8026467..3fc4e57 100644
--- a/examples/cifar10/CMakeLists.txt
+++ b/examples/cpp/cifar10/CMakeLists.txt
@@ -20,16 +20,16 @@
 INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/include)
 
 
-ADD_EXECUTABLE(alexnet alexnet.cc)
-ADD_DEPENDENCIES(alexnet singa)
-TARGET_LINK_LIBRARIES(alexnet singa) 
+ADD_EXECUTABLE(cnn cnn.cc)
+ADD_DEPENDENCIES(cnn singa)
+TARGET_LINK_LIBRARIES(cnn singa)
 
 IF(USE_CUDNN)
 
-ADD_EXECUTABLE(alexnet-parallel alexnet-parallel.cc)
-ADD_DEPENDENCIES(alexnet-parallel singa)
-TARGET_LINK_LIBRARIES(alexnet-parallel singa)
-#SET_TARGET_PROPERTIES(alexnet-parallel PROPERTIES LINK_FLAGS "${LINK_FLAGS} -pthread")
+ADD_EXECUTABLE(cnn-parallel cnn-parallel.cc)
+ADD_DEPENDENCIES(cnn-parallel singa)
+TARGET_LINK_LIBRARIES(cnn-parallel singa)
+#SET_TARGET_PROPERTIES(cnn-parallel PROPERTIES LINK_FLAGS "${LINK_FLAGS} -pthread")
 
 ADD_EXECUTABLE(vgg-parallel vgg-parallel.cc)
 ADD_DEPENDENCIES(vgg-parallel singa)
diff --git a/examples/cifar10/README.md b/examples/cpp/cifar10/README.md
similarity index 78%
rename from examples/cifar10/README.md
rename to examples/cpp/cifar10/README.md
index 65df5e6..7bb63e7 100644
--- a/examples/cifar10/README.md
+++ b/examples/cpp/cifar10/README.md
@@ -1,3 +1,21 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
 # Train CNN over Cifar-10
 
 
@@ -41,7 +59,7 @@
 
         python train.py vgg cifar-10-batches-py
 
-    To train other models, please replace 'vgg' to 'alexnet', 'resnet' or 'caffe', 
+    To train other models, please replace 'vgg' to 'alexnet', 'resnet' or 'caffe',
     where 'caffe' refers to the alexnet model converted from Caffe. By default
     the training would run on a CudaGPU device, to run it on CppCPU, add an additional
     argument
diff --git a/examples/cifar10/cifar10.h b/examples/cpp/cifar10/cifar10.h
similarity index 100%
rename from examples/cifar10/cifar10.h
rename to examples/cpp/cifar10/cifar10.h
diff --git a/examples/cifar10/alexnet-parallel.cc b/examples/cpp/cifar10/cnn-parallel.cc
similarity index 96%
rename from examples/cifar10/alexnet-parallel.cc
rename to examples/cpp/cifar10/cnn-parallel.cc
index 8cc3352..4bee575 100644
--- a/examples/cifar10/alexnet-parallel.cc
+++ b/examples/cpp/cifar10/cnn-parallel.cc
@@ -154,20 +154,20 @@
     train_y = train.second;
 
     LOG(INFO) << "Slicing training data...";
-    train_x_1.Reshape(Shape{nsamples / 2, train.first.shape(1),
+    train_x_1 = Tensor(Shape{nsamples / 2, train.first.shape(1),
         train.first.shape(2), train.first.shape(3)});
     LOG(INFO) << "Copying first data slice...";
     CopyDataToFrom(&train_x_1, train_x, train_x.Size() / 2);
-    train_x_2.Reshape(Shape{nsamples / 2, train.first.shape(1),
+    train_x_2 = Tensor(Shape{nsamples / 2, train.first.shape(1),
         train.first.shape(2), train.first.shape(3)});
     LOG(INFO) << "Copying second data slice...";
     CopyDataToFrom(&train_x_2, train_x, train_x.Size() / 2, 0,
                    train_x.Size() / 2);
-    train_y_1.Reshape(Shape{nsamples / 2});
+    train_y_1 = Tensor(Shape{nsamples / 2});
     train_y_1.AsType(kInt);
     LOG(INFO) << "Copying first label slice...";
     CopyDataToFrom(&train_y_1, train_y, train_y.Size() / 2);
-    train_y_2.Reshape(Shape{nsamples / 2});
+    train_y_2 = Tensor(Shape{nsamples / 2});
     train_y_2.AsType(kInt);
     LOG(INFO) << "Copying second label slice...";
     CopyDataToFrom(&train_y_2, train_y, train_y.Size() / 2, 0,
diff --git a/examples/cifar10/alexnet.cc b/examples/cpp/cifar10/cnn.cc
similarity index 98%
rename from examples/cifar10/alexnet.cc
rename to examples/cpp/cifar10/cnn.cc
index 61097b6..8af8a2f 100644
--- a/examples/cifar10/alexnet.cc
+++ b/examples/cpp/cifar10/cnn.cc
@@ -144,7 +144,7 @@
     auto train = data.ReadTrainData();
     size_t nsamples = train.first.shape(0);
     auto mtrain =
-        Reshape(train.first, Shape{nsamples, train.first.Size() / nsamples});
+         Reshape(train.first, Shape{nsamples, train.first.Size() / nsamples});
     const Tensor& mean = Average(mtrain, 0);
     SubRow(mean, &mtrain);
     train_x = Reshape(mtrain, train.first.shape());
diff --git a/examples/cifar10/download_data.py b/examples/cpp/cifar10/download_data.py
similarity index 81%
rename from examples/cifar10/download_data.py
rename to examples/cpp/cifar10/download_data.py
index 7129b03..a0b73c5 100755
--- a/examples/cifar10/download_data.py
+++ b/examples/cpp/cifar10/download_data.py
@@ -17,7 +17,10 @@
 # limitations under the License.
 # 
 
-import urllib
+from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
+import urllib.request, urllib.parse, urllib.error
 import tarfile
 import os
 import sys
@@ -26,17 +29,17 @@
 
 def extract_tarfile(filepath):
     if os.path.exists(filepath):
-        print 'The tar file does exist. Extracting it now..'
+        print('The tar file does exist. Extracting it now..')
         with tarfile.open(filepath, 'r') as f:
             f.extractall('.')
-        print 'Finished!'
+        print('Finished!')
         sys.exit(0)
 
 
 def check_dir_exist(dirpath):
     if os.path.exists(dirpath):
-        print 'Directory %s does exist. To redownload the files, '\
-            'remove the existing directory and %s.tar.gz' % (dirpath, dirpath)
+        print('Directory %s does exist. To redownload the files, '\
+            'remove the existing directory and %s.tar.gz' % (dirpath, dirpath))
         return True
     else:
         return False
@@ -45,10 +48,10 @@
 def do_download(dirpath, gzfile, url):
     if check_dir_exist(dirpath):
         sys.exit(0)
-    print 'Downloading CIFAR10 from %s' % (url)
-    urllib.urlretrieve(url, gzfile)
+    print('Downloading CIFAR10 from %s' % (url))
+    urllib.request.urlretrieve(url, gzfile)
     extract_tarfile(gzfile)
-    print 'Finished!'
+    print('Finished!')
 
 
 if __name__ == '__main__':
diff --git a/examples/cifar10/run-parallel.sh b/examples/cpp/cifar10/run-parallel.sh
similarity index 100%
rename from examples/cifar10/run-parallel.sh
rename to examples/cpp/cifar10/run-parallel.sh
diff --git a/examples/cifar10/run.sh b/examples/cpp/cifar10/run.sh
similarity index 100%
rename from examples/cifar10/run.sh
rename to examples/cpp/cifar10/run.sh
diff --git a/examples/cifar10/vgg-parallel.cc b/examples/cpp/cifar10/vgg-parallel.cc
similarity index 97%
rename from examples/cifar10/vgg-parallel.cc
rename to examples/cpp/cifar10/vgg-parallel.cc
index 90e9fce..33c533b 100644
--- a/examples/cifar10/vgg-parallel.cc
+++ b/examples/cpp/cifar10/vgg-parallel.cc
@@ -223,20 +223,20 @@
     train_y = train.second;
 
     LOG(INFO) << "Slicing training data...";
-    train_x_1.Reshape(Shape{nsamples / 2, train.first.shape(1),
+    train_x_1 = Tensor(Shape{nsamples / 2, train.first.shape(1),
         train.first.shape(2), train.first.shape(3)});
     LOG(INFO) << "Copying first data slice...";
     CopyDataToFrom(&train_x_1, train_x, train_x.Size() / 2);
-    train_x_2.Reshape(Shape{nsamples / 2, train.first.shape(1),
+    train_x_2 = Tensor(Shape{nsamples / 2, train.first.shape(1),
         train.first.shape(2), train.first.shape(3)});
     LOG(INFO) << "Copying second data slice...";
     CopyDataToFrom(&train_x_2, train_x, train_x.Size() / 2, 0,
                    train_x.Size() / 2);
-    train_y_1.Reshape(Shape{nsamples / 2});
+    train_y_1 = Tensor(Shape{nsamples / 2});
     train_y_1.AsType(kInt);
     LOG(INFO) << "Copying first label slice...";
     CopyDataToFrom(&train_y_1, train_y, train_y.Size() / 2);
-    train_y_2.Reshape(Shape{nsamples / 2});
+    train_y_2 = Tensor(Shape{nsamples / 2});
     train_y_2.AsType(kInt);
     LOG(INFO) << "Copying second label slice...";
     CopyDataToFrom(&train_y_2, train_y, train_y.Size() / 2, 0,
diff --git a/examples/imagenet/alexnet/CMakeLists.txt b/examples/cpp/imagenet/CMakeLists.txt
similarity index 100%
rename from examples/imagenet/alexnet/CMakeLists.txt
rename to examples/cpp/imagenet/CMakeLists.txt
diff --git a/examples/imagenet/alexnet/README.md b/examples/cpp/imagenet/README.md
similarity index 77%
rename from examples/imagenet/alexnet/README.md
rename to examples/cpp/imagenet/README.md
index be6797c..c3d261e 100644
--- a/examples/imagenet/alexnet/README.md
+++ b/examples/cpp/imagenet/README.md
@@ -1,3 +1,21 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
 # Train AlexNet over ImageNet
 
 Convolution neural network (CNN) is a type of feed-forward neural
diff --git a/examples/imagenet/alexnet/alexnet.cc b/examples/cpp/imagenet/alexnet.cc
similarity index 98%
rename from examples/imagenet/alexnet/alexnet.cc
rename to examples/cpp/imagenet/alexnet.cc
index 4ac1130..2d8db2d 100644
--- a/examples/imagenet/alexnet/alexnet.cc
+++ b/examples/cpp/imagenet/alexnet.cc
@@ -174,7 +174,7 @@
   size_t b = 0;
   size_t n_read;
   Timer timer, ttr;
-  Tensor prefetch_x, prefetch_y;
+  Tensor prefetch_x(Shape{batchsize, 3, kCropSize, kCropSize}), prefetch_y(Shape{batchsize}, kInt);
   string binfile = bin_folder + "/train1.bin";
   timer.Tick();
   data.LoadData(kTrain, binfile, batchsize, &prefetch_x, &prefetch_y, &n_read,
diff --git a/examples/imagenet/alexnet/create_data.sh b/examples/cpp/imagenet/create_data.sh
similarity index 100%
rename from examples/imagenet/alexnet/create_data.sh
rename to examples/cpp/imagenet/create_data.sh
diff --git a/examples/imagenet/alexnet/ilsvrc12.cc b/examples/cpp/imagenet/ilsvrc12.cc
similarity index 100%
rename from examples/imagenet/alexnet/ilsvrc12.cc
rename to examples/cpp/imagenet/ilsvrc12.cc
diff --git a/examples/imagenet/alexnet/ilsvrc12.h b/examples/cpp/imagenet/ilsvrc12.h
similarity index 97%
rename from examples/imagenet/alexnet/ilsvrc12.h
rename to examples/cpp/imagenet/ilsvrc12.h
index 74fffbb..05b3451 100644
--- a/examples/imagenet/alexnet/ilsvrc12.h
+++ b/examples/cpp/imagenet/ilsvrc12.h
@@ -43,6 +43,12 @@
 using std::string;
 using namespace singa::io;
 namespace singa {
+
+ /// size for resizing
+const size_t kImageSize = 256;
+const size_t kImageNBytes = 3 * kImageSize * kImageSize;
+/// size for cropping
+const size_t kCropSize = 227;
 /// For reading ILSVRC2012 image data as tensors.
 class ILSVRC {
  public:
@@ -105,11 +111,7 @@
   void WriteMean(Tensor &mean, string path);
 
  private:
-  /// size for resizing
-  const size_t kImageSize = 256;
-  const size_t kImageNBytes = 3 * kImageSize * kImageSize;
-  /// size for cropping
-  const size_t kCropSize = 227;
+ 
   Tensor mean;
   string last_read_file = "";
 
@@ -299,9 +301,7 @@
 
 size_t ILSVRC::LoadData(int flag, string file, size_t read_size, Tensor *x,
                         Tensor *y, size_t *n_read, int nthreads) {
-  x->Reshape(Shape{read_size, 3, kCropSize, kCropSize});
-  y->AsType(kInt);
-  y->Reshape(Shape{read_size});
+  
   if (file != last_read_file) {
     if (reader != nullptr) {
       reader->Close();
diff --git a/examples/imagenet/alexnet/run.sh b/examples/cpp/imagenet/run.sh
similarity index 100%
rename from examples/imagenet/alexnet/run.sh
rename to examples/cpp/imagenet/run.sh
diff --git a/examples/gan/README.md b/examples/gan/README.md
new file mode 100644
index 0000000..c805f7b
--- /dev/null
+++ b/examples/gan/README.md
@@ -0,0 +1,34 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Train a Generative Adversarial Nets (GAN) model
+
+This example is to train a Generative Adversarial Nets (GAN) model over the MNIST dataset.
+
+## Running instructions
+
+1. Download the pre-processed [MNIST dataset](https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz)
+
+2. Start the training
+
+        python vanilla.py mnist.pkl.gz
+
+By default the training code would run on CPU. To run it on a GPU card, please start
+the program with an additional argument
+
+        python vanilla.py mnist.pkl.gz --use_gpu
diff --git a/python/rafiki/__init__.py b/examples/gan/download_mnist.py
similarity index 66%
copy from python/rafiki/__init__.py
copy to examples/gan/download_mnist.py
index 3aa745b..b042a7c 100644
--- a/python/rafiki/__init__.py
+++ b/examples/gan/download_mnist.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -6,14 +7,22 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-#
+# 
 #     http://www.apache.org/licenses/LICENSE-2.0
-#
+# 
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+# 
 
-__version__ = "0.1.1"
+import argparse
+from utils import download_data
+
+if __name__ == '__main__':
+	parser = argparse.ArgumentParser(description='download the pre-processed MNIST dataset')
+	parser.add_argument('gzfile', type=str, help='the dataset path')
+	parser.add_argument('url', type=str, help='dataset url')	
+	args = parser.parse_args()
+	download_data(args.gzfile, args.url)
\ No newline at end of file
diff --git a/examples/gan/lsgan.py b/examples/gan/lsgan.py
new file mode 100644
index 0000000..39f243e
--- /dev/null
+++ b/examples/gan/lsgan.py
@@ -0,0 +1,186 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import device
+from singa import opt
+from singa import tensor
+
+import argparse
+import matplotlib.pyplot as plt
+import numpy as np
+import os
+from model import lsgan_mlp
+from utils import load_data
+from utils import print_log
+
+
+class LSGAN():
+
+    def __init__(self,
+                 dev,
+                 rows=28,
+                 cols=28,
+                 channels=1,
+                 noise_size=100,
+                 hidden_size=128,
+                 batch=128,
+                 interval=1000,
+                 learning_rate=0.001,
+                 iterations=1000000,
+                 d_steps=3,
+                 g_steps=1,
+                 dataset_filepath='mnist.pkl.gz',
+                 file_dir='lsgan_images/'):
+        self.dev = dev
+        self.rows = rows
+        self.cols = cols
+        self.channels = channels
+        self.feature_size = self.rows * self.cols * self.channels
+        self.noise_size = noise_size
+        self.hidden_size = hidden_size
+        self.batch = batch
+        self.batch_size = self.batch // 2
+        self.interval = interval
+        self.learning_rate = learning_rate
+        self.iterations = iterations
+        self.d_steps = d_steps
+        self.g_steps = g_steps
+        self.dataset_filepath = dataset_filepath
+        self.file_dir = file_dir
+        self.model = lsgan_mlp.create_model(noise_size=self.noise_size,
+                                            feature_size=self.feature_size,
+                                            hidden_size=self.hidden_size)
+
+    def train(self):
+        train_data, _, _, _, _, _ = load_data(self.dataset_filepath)
+        dev = device.create_cuda_gpu_on(0)
+        dev.SetRandSeed(0)
+        np.random.seed(0)
+
+        #sgd = opt.SGD(lr=self.learning_rate, momentum=0.9, weight_decay=1e-5)
+        sgd = opt.Adam(lr=self.learning_rate)
+
+        noise = tensor.Tensor((self.batch_size, self.noise_size), dev,
+                              tensor.float32)
+        real_images = tensor.Tensor((self.batch_size, self.feature_size), dev,
+                                    tensor.float32)
+        real_labels = tensor.Tensor((self.batch_size, 1), dev, tensor.float32)
+        fake_labels = tensor.Tensor((self.batch_size, 1), dev, tensor.float32)
+        substrahend_labels = tensor.Tensor((self.batch_size, 1), dev, tensor.float32)
+
+        # attached model to graph
+        self.model.set_optimizer(sgd)
+        self.model.compile([noise],
+                           is_train=True,
+                           use_graph=False,
+                           sequential=True)
+
+        real_labels.set_value(1.0)
+        fake_labels.set_value(-1.0)
+        substrahend_labels.set_value(0.0)
+
+        for iteration in range(self.iterations):
+
+            for d_step in range(self.d_steps):
+                idx = np.random.randint(0, train_data.shape[0], self.batch_size)
+                real_images.copy_from_numpy(train_data[idx])
+
+                self.model.train()
+
+                # Training the Discriminative Net
+                _, d_loss_real = self.model.train_one_batch_dis(
+                    real_images, real_labels)
+
+                noise.uniform(-1, 1)
+                fake_images = self.model.forward_gen(noise)
+                _, d_loss_fake = self.model.train_one_batch_dis(
+                    fake_images, fake_labels)
+
+                d_loss = tensor.to_numpy(d_loss_real)[0] + tensor.to_numpy(
+                    d_loss_fake)[0]
+
+            for g_step in range(self.g_steps):
+                # Training the Generative Net
+                noise.uniform(-1, 1)
+                _, g_loss_tensor = self.model.train_one_batch(
+                    noise, substrahend_labels)
+
+                g_loss = tensor.to_numpy(g_loss_tensor)[0]
+
+            if iteration % self.interval == 0:
+                self.model.eval()
+                self.save_image(iteration)
+                print_log(' The {} iteration, G_LOSS: {}, D_LOSS: {}'.format(
+                    iteration, g_loss, d_loss))
+
+    def save_image(self, iteration):
+        demo_row = 5
+        demo_col = 5
+        if not hasattr(self, "demo_noise"):
+            self.demo_noise = tensor.Tensor(
+                (demo_col * demo_row, self.noise_size), dev, tensor.float32)
+        self.demo_noise.uniform(-1, 1)
+        gen_imgs = self.model.forward_gen(self.demo_noise)
+        gen_imgs = tensor.to_numpy(gen_imgs)
+        show_imgs = np.reshape(
+            gen_imgs, (gen_imgs.shape[0], self.rows, self.cols, self.channels))
+        fig, axs = plt.subplots(demo_row, demo_col)
+        cnt = 0
+        for r in range(demo_row):
+            for c in range(demo_col):
+                axs[r, c].imshow(show_imgs[cnt, :, :, 0], cmap='gray')
+                axs[r, c].axis('off')
+                cnt += 1
+        fig.savefig("{}{}.png".format(self.file_dir, iteration))
+        plt.close()
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Train GAN over MNIST')
+    parser.add_argument('filepath', type=str, help='the dataset path')
+    parser.add_argument('--use_gpu', action='store_true')
+    args = parser.parse_args()
+
+    if args.use_gpu:
+        print('Using GPU')
+        dev = device.create_cuda_gpu()
+    else:
+        print('Using CPU')
+        dev = device.get_default_device()
+
+    if not os.path.exists('lsgan_images/'):
+        os.makedirs('lsgan_images/')
+
+    rows = 28
+    cols = 28
+    channels = 1
+    noise_size = 100
+    hidden_size = 128
+    batch = 128
+    interval = 1000
+    learning_rate = 0.0005
+    iterations = 1000000
+    d_steps = 1
+    g_steps = 1
+    dataset_filepath = 'mnist.pkl.gz'
+    file_dir = 'lsgan_images/'
+    lsgan = LSGAN(dev, rows, cols, channels, noise_size, hidden_size, batch,
+                  interval, learning_rate, iterations, d_steps, g_steps,
+                  dataset_filepath, file_dir)
+    lsgan.train()
diff --git a/examples/gan/model/gan_mlp.py b/examples/gan/model/gan_mlp.py
new file mode 100644
index 0000000..d1c46a1
--- /dev/null
+++ b/examples/gan/model/gan_mlp.py
@@ -0,0 +1,104 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import layer
+from singa import model
+from singa import autograd
+
+
+class GAN_MLP(model.Model):
+
+    def __init__(self, noise_size=100, feature_size=784, hidden_size=128):
+        super(GAN_MLP, self).__init__()
+        self.noise_size = noise_size
+        self.feature_size = feature_size
+        self.hidden_size = hidden_size
+
+        # Generative Net
+        self.gen_net_fc_0 = layer.Linear(self.hidden_size)
+        self.gen_net_relu_0 = layer.ReLU()
+        self.gen_net_fc_1 = layer.Linear(self.feature_size)
+        self.gen_net_sigmoid_1 = layer.Sigmoid()
+
+        # Discriminative Net
+        self.dis_net_fc_0 = layer.Linear(self.hidden_size)
+        self.dis_net_relu_0 = layer.ReLU()
+        self.dis_net_fc_1 = layer.Linear(1)
+        self.dis_net_sigmoid_1= layer.Sigmoid()
+        self.binary_cross_entropy = layer.BinaryCrossEntropy()
+
+    def forward(self, x):
+        # Cascaded Net
+        y = self.forward_gen(x)
+        y = self.forward_dis(y)
+        return y
+
+    def forward_dis(self, x):
+        # Discriminative Net
+        y = self.dis_net_fc_0(x)
+        y = self.dis_net_relu_0(y)
+        y = self.dis_net_fc_1(y)
+        y = self.dis_net_sigmoid_1(y)
+        return y
+
+    def forward_gen(self, x):
+        # Generative Net
+        y = self.gen_net_fc_0(x)
+        y = self.gen_net_relu_0(y)
+        y = self.gen_net_fc_1(y)
+        y = self.gen_net_sigmoid_1(y)
+        return y
+
+    def train_one_batch(self, x, y):
+        # Training the Generative Net
+        out = self.forward(x)
+        loss = self.binary_cross_entropy(out, y)
+        # Only update the Generative Net
+        for p, g in autograd.backward(loss):
+            if "gen_net" in p.name:
+                self.optimizer.apply(p.name, p, g)
+        return out, loss
+
+    def train_one_batch_dis(self, x, y):
+        # Training the Discriminative Net
+        out = self.forward_dis(x)
+        loss = self.binary_cross_entropy(out, y)
+        # Only update the Discriminative Net
+        for p, g in autograd.backward(loss):
+            if "dis_net" in p.name:
+                self.optimizer.apply(p.name, p, g)
+        self.optimizer(loss)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a CNN model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = GAN_MLP(**kwargs)
+
+    return model
+
+
+__all__ = ['GAN_MLP', 'create_model']
diff --git a/examples/gan/model/lsgan_mlp.py b/examples/gan/model/lsgan_mlp.py
new file mode 100644
index 0000000..c67222e
--- /dev/null
+++ b/examples/gan/model/lsgan_mlp.py
@@ -0,0 +1,101 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import layer
+from singa import model
+from singa import autograd
+
+
+class LSGAN_MLP(model.Model):
+
+    def __init__(self, noise_size=100, feature_size=784, hidden_size=128):
+        super(LSGAN_MLP, self).__init__()
+        self.noise_size = noise_size
+        self.feature_size = feature_size
+        self.hidden_size = hidden_size
+
+        # Generative Net
+        self.gen_net_fc_0 = layer.Linear(self.hidden_size)
+        self.gen_net_relu_0 = layer.ReLU()
+        self.gen_net_fc_1 = layer.Linear(self.feature_size)
+        self.gen_net_sigmoid_1 = layer.Sigmoid()
+
+        # Discriminative Net
+        self.dis_net_fc_0 = layer.Linear(self.hidden_size)
+        self.dis_net_relu_0 = layer.ReLU()
+        self.dis_net_fc_1 = layer.Linear(1)
+        self.mse_loss = layer.MeanSquareError()
+
+    def forward(self, x):
+        # Cascaded Net
+        y = self.forward_gen(x)
+        y = self.forward_dis(y)
+        return y
+
+    def forward_dis(self, x):
+        # Discriminative Net
+        y = self.dis_net_fc_0(x)
+        y = self.dis_net_relu_0(y)
+        y = self.dis_net_fc_1(y)
+        return y
+
+    def forward_gen(self, x):
+        # Generative Net
+        y = self.gen_net_fc_0(x)
+        y = self.gen_net_relu_0(y)
+        y = self.gen_net_fc_1(y)
+        y = self.gen_net_sigmoid_1(y)
+        return y
+
+    def train_one_batch(self, x, y):
+        # Training the Generative Net
+        out = self.forward(x)
+        loss = self.mse_loss(out, y)
+        # Only update the Generative Net
+        for p, g in autograd.backward(loss):
+            if "gen_net" in p.name:
+                self.optimizer.apply(p.name, p, g)
+        return out, loss
+
+    def train_one_batch_dis(self, x, y):
+        # Training the Discriminative Net
+        out = self.forward_dis(x)
+        loss = self.mse_loss(out, y)
+        # Only update the Discriminative Net
+        for p, g in autograd.backward(loss):
+            if "dis_net" in p.name:
+                self.optimizer.apply(p.name, p, g)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a CNN model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = LSGAN_MLP(**kwargs)
+
+    return model
+
+
+__all__ = ['LSGAN_MLP', 'create_model']
diff --git a/examples/gan/utils.py b/examples/gan/utils.py
new file mode 100644
index 0000000..050d184
--- /dev/null
+++ b/examples/gan/utils.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+import gzip
+import matplotlib.pyplot as plt
+import numpy as np
+import os
+import pickle
+import sys
+import time
+
+try:
+	import urllib.request as ul_request
+except ImportError:
+	import urllib as ul_request
+
+def print_log(s):
+    t = time.ctime()
+    print('[{}]{}'.format(t, s))
+
+def load_data(filepath):
+	with gzip.open(filepath, 'rb') as f:
+		train_set, valid_set, test_set = pickle.load(f, encoding='bytes')
+		traindata = train_set[0].astype(np.float32)
+		validdata = valid_set[0].astype(np.float32)
+		testdata = test_set[0].astype(np.float32)
+		trainlabel = train_set[1].astype(np.float32)
+		validlabel = valid_set[1].astype(np.float32)
+		testlabel = test_set[1].astype(np.float32)
+		return traindata, trainlabel, validdata, validlabel, testdata, testlabel
+
+def download_data(gzfile, url):
+	if os.path.exists(gzfile):
+		print('Downloaded already!')
+		sys.exit(0)
+	print('Downloading data %s' % (url))
+	ul_request.urlretrieve(url, gzfile)
+	print('Finished!')
+
+def show_images(filepath):
+	with open(filepath, 'rb') as f:
+		imgs = pickle.load(f)
+		r, c = 5, 5
+		fig, axs = plt.subplots(5, 5)
+		cnt = 0
+		for i in range(r):
+			for j in range(c):
+				axs[i, j].imshow(imgs[cnt, :, :, 0], cmap='gray')
+				axs[i, j].axis('off')
+				cnt += 1
+		plt.show()
\ No newline at end of file
diff --git a/examples/gan/vanilla.py b/examples/gan/vanilla.py
new file mode 100644
index 0000000..49c8ec4
--- /dev/null
+++ b/examples/gan/vanilla.py
@@ -0,0 +1,175 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import device
+from singa import opt
+from singa import tensor
+
+import argparse
+import matplotlib.pyplot as plt
+import numpy as np
+import os
+from model import gan_mlp
+from utils import load_data
+from utils import print_log
+
+
+class VANILLA():
+
+    def __init__(self,
+                 dev,
+                 rows=28,
+                 cols=28,
+                 channels=1,
+                 noise_size=100,
+                 hidden_size=128,
+                 batch=128,
+                 interval=1000,
+                 learning_rate=0.001,
+                 iterations=1000000,
+                 dataset_filepath='mnist.pkl.gz',
+                 file_dir='vanilla_images/'):
+        self.dev = dev
+        self.rows = rows
+        self.cols = cols
+        self.channels = channels
+        self.feature_size = self.rows * self.cols * self.channels
+        self.noise_size = noise_size
+        self.hidden_size = hidden_size
+        self.batch = batch
+        self.batch_size = self.batch // 2
+        self.interval = interval
+        self.learning_rate = learning_rate
+        self.iterations = iterations
+        self.dataset_filepath = dataset_filepath
+        self.file_dir = file_dir
+        self.model = gan_mlp.create_model(noise_size=self.noise_size,
+                                          feature_size=self.feature_size,
+                                          hidden_size=self.hidden_size)
+
+    def train(self):
+        train_data, _, _, _, _, _ = load_data(self.dataset_filepath)
+        dev = device.create_cuda_gpu_on(0)
+        dev.SetRandSeed(0)
+        np.random.seed(0)
+
+        # sgd = opt.SGD(lr=self.learning_rate, momentum=0.9, weight_decay=1e-5)
+        sgd = opt.Adam(lr=self.learning_rate)
+
+        noise = tensor.Tensor((self.batch_size, self.noise_size), dev,
+                              tensor.float32)
+        real_images = tensor.Tensor((self.batch_size, self.feature_size), dev,
+                                    tensor.float32)
+        real_labels = tensor.Tensor((self.batch_size, 1), dev, tensor.float32)
+        fake_labels = tensor.Tensor((self.batch_size, 1), dev, tensor.float32)
+
+        # attached model to graph
+        self.model.set_optimizer(sgd)
+        self.model.compile([noise],
+                           is_train=True,
+                           use_graph=False,
+                           sequential=True)
+
+        real_labels.set_value(1.0)
+        fake_labels.set_value(0.0)
+
+        for iteration in range(self.iterations):
+            idx = np.random.randint(0, train_data.shape[0], self.batch_size)
+            real_images.copy_from_numpy(train_data[idx])
+
+            self.model.train()
+
+            # Training the Discriminative Net
+            _, d_loss_real = self.model.train_one_batch_dis(
+                real_images, real_labels)
+
+            noise.uniform(-1, 1)
+            fake_images = self.model.forward_gen(noise)
+            _, d_loss_fake = self.model.train_one_batch_dis(
+                fake_images, fake_labels)
+
+            d_loss = tensor.to_numpy(d_loss_real)[0] + tensor.to_numpy(
+                d_loss_fake)[0]
+
+            # Training the Generative Net
+            noise.uniform(-1, 1)
+            _, g_loss_tensor = self.model.train_one_batch(
+                noise, real_labels)
+
+            g_loss = tensor.to_numpy(g_loss_tensor)[0]
+
+            if iteration % self.interval == 0:
+                self.model.eval()
+                self.save_image(iteration)
+                print_log(' The {} iteration, G_LOSS: {}, D_LOSS: {}'.format(
+                    iteration, g_loss, d_loss))
+
+    def save_image(self, iteration):
+        demo_row = 5
+        demo_col = 5
+        if not hasattr(self, "demo_noise"):
+            self.demo_noise = tensor.Tensor(
+                (demo_col * demo_row, self.noise_size), dev, tensor.float32)
+        self.demo_noise.uniform(-1, 1)
+        gen_imgs = self.model.forward_gen(self.demo_noise)
+        gen_imgs = tensor.to_numpy(gen_imgs)
+        show_imgs = np.reshape(
+            gen_imgs, (gen_imgs.shape[0], self.rows, self.cols, self.channels))
+        fig, axs = plt.subplots(demo_row, demo_col)
+        cnt = 0
+        for r in range(demo_row):
+            for c in range(demo_col):
+                axs[r, c].imshow(show_imgs[cnt, :, :, 0], cmap='gray')
+                axs[r, c].axis('off')
+                cnt += 1
+        fig.savefig("{}{}.png".format(self.file_dir, iteration))
+        plt.close()
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Train GAN over MNIST')
+    parser.add_argument('filepath', type=str, help='the dataset path')
+    parser.add_argument('--use_gpu', action='store_true')
+    args = parser.parse_args()
+
+    if args.use_gpu:
+        print('Using GPU')
+        dev = device.create_cuda_gpu()
+    else:
+        print('Using CPU')
+        dev = device.get_default_device()
+
+    if not os.path.exists('vanilla_images/'):
+        os.makedirs('vanilla_images/')
+
+    rows = 28
+    cols = 28
+    channels = 1
+    noise_size = 100
+    hidden_size = 128
+    batch = 128
+    interval = 1000
+    learning_rate = 0.0005
+    iterations = 1000000
+    dataset_filepath = 'mnist.pkl.gz'
+    file_dir = 'vanilla_images/'
+    vanilla = VANILLA(dev, rows, cols, channels, noise_size, hidden_size, batch,
+                      interval, learning_rate, iterations, dataset_filepath,
+                      file_dir)
+    vanilla.train()
diff --git a/examples/imagenet/googlenet/README.md b/examples/imagenet/googlenet/README.md
deleted file mode 100644
index e597fc6..0000000
--- a/examples/imagenet/googlenet/README.md
+++ /dev/null
@@ -1,66 +0,0 @@
----
-name: GoogleNet on ImageNet
-SINGA version: 1.0.1
-SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744
-parameter_url: https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
-parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d
-license: unrestricted https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet
----
-
-# Image Classification using GoogleNet
-
-
-In this example, we convert GoogleNet trained on Caffe to SINGA for image classification.
-
-## Instructions
-
-* Download the parameter checkpoint file into this folder
-
-        $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
-        $ tar xvf bvlc_googlenet.tar.gz
-
-* Run the program
-
-        # use cpu
-        $ python serve.py -C &
-        # use gpu
-        $ python serve.py &
-
-* Submit images for classification
-
-        $ curl -i -F image=@image1.jpg http://localhost:9999/api
-        $ curl -i -F image=@image2.jpg http://localhost:9999/api
-        $ curl -i -F image=@image3.jpg http://localhost:9999/api
-
-image1.jpg, image2.jpg and image3.jpg should be downloaded before executing the above commands.
-
-## Details
-
-We first extract the parameter values from [Caffe's checkpoint file](http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel) into a pickle version
-After downloading the checkpoint file into `caffe_root/python` folder, run the following script
-
-    # to be executed within caffe_root/python folder
-    import caffe
-    import numpy as np
-    import cPickle as pickle
-
-    model_def = '../models/bvlc_googlenet/deploy.prototxt'
-    weight = 'bvlc_googlenet.caffemodel'  # must be downloaded at first
-    net = caffe.Net(model_def, weight, caffe.TEST)
-
-    params = {}
-    for layer_name in net.params.keys():
-        weights=np.copy(net.params[layer_name][0].data)
-        bias=np.copy(net.params[layer_name][1].data)
-        params[layer_name+'_weight']=weights
-        params[layer_name+'_bias']=bias
-        print layer_name, weights.shape, bias.shape
-
-    with open('bvlc_googlenet.pickle', 'wb') as fd:
-        pickle.dump(params, fd)
-
-Then we construct the GoogleNet using SINGA's FeedForwardNet structure.
-Note that we added a EndPadding layer to resolve the issue from discrepancy
-of the rounding strategy of the pooling layer between Caffe (ceil) and cuDNN (floor).
-Only the MaxPooling layers outside inception blocks have this problem.
-Refer to [this](http://joelouismarino.github.io/blog_posts/blog_googlenet_keras.html) for more detials.
diff --git a/examples/imagenet/googlenet/serve.py b/examples/imagenet/googlenet/serve.py
deleted file mode 100644
index 57e005d..0000000
--- a/examples/imagenet/googlenet/serve.py
+++ /dev/null
@@ -1,240 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-''' This model is created following Caffe implementation of GoogleNet
-https://github.com/BVLC/caffe/blob/master/models/bvlc_googlenet/
-'''
-import os
-import sys
-import time
-import numpy as np
-import threading
-import traceback
-from argparse import ArgumentParser
-from scipy.misc import imread, imresize
-import numpy as np
-
-from singa.layer import Layer, Conv2D, Activation, MaxPooling2D, AvgPooling2D,\
-        Split, Concat, LRN, Dropout, Flatten, Dense
-from singa import layer
-from singa import net as ffnet
-from singa import device
-from singa import tensor
-from rafiki.agent import Agent, MsgType
-
-
-def add_to_tuple(x):
-    '''return a tuple with the last two values incremented by 1'''
-    if len(x) == 3:
-        return (x[0], x[1] + 1, x[2] + 1)
-    else:
-        return (x[0], x[1], x[2] + 1, x[3] + 1)
-
-class EndPadding(Layer):
-    '''Pad the end of the spatial axis with 1 row and 1 column of zeros.
-
-    This layer is inserted before the pooling layers outside the inception
-    block. We need such a layer because Caffe (ceil) and cuDNN (floor) have
-    different rounding strategies for the pooling layer.
-    http://joelouismarino.github.io/blog_posts/blog_googlenet_keras.html
-    '''
-    def __init__(self, name, input_sample_shape=None):
-        super(EndPadding, self).__init__(name)
-        if input_sample_shape is not None:
-            assert len(input_sample_shape) == 3, 'input must has 4 dims'
-            self.output_sample_shape = add_to_tuple(input_sample_shape)
-
-    def get_output_sample_shape(self):
-        return self.output_sample_shape
-
-    def setup(self, input_sample_shape):
-        assert len(input_sample_shape) == 3, 'input must has 4 dims'
-        self.output_sample_shape = add_to_tuple(input_sample_shape)
-        self.has_setup = True
-
-    def forward(self, flag, x):
-        '''pad zeros'''
-        tmp = tensor.to_numpy(x)
-        shape = add_to_tuple(x.shape)
-        ret = np.zeros(shape)
-        ret[:,:,:-1, :-1] = tmp
-        y = tensor.from_numpy(ret)
-        y.to_device(x.device)
-        return y
-
-    def backward(self, falg, dy):
-        '''remove paddings'''
-        tmp = tensor.to_numpy(dy)
-        dx = tensor.from_numpy(tmp[:,:,:-1,:-1])
-        dx.to_device(dy.device)
-        return dx, []
-
-# b_specs = {'init': 'constant', 'value': 0, 'lr_mult': 2, 'decay_mult': 0}
-
-def conv(net, src, name, num, kernel, stride=1, pad=0, suffix=''):
-    net.add(Conv2D('%s/%s' % (name, suffix), num, kernel, stride, pad=pad), src)
-    return net.add(Activation('%s/relue_%s' % (name, suffix)))
-
-def pool(net, src, name, kernel, stride):
-    net.add(EndPadding('%s/pad' % name), src)
-    ret = net.add(MaxPooling2D('%s' % name, 3, 2, pad=0))
-    return ret
-
-def inception(net, src, name, nb1x1, nb3x3r, nb3x3, nb5x5r, nb5x5, nbproj):
-    split = net.add(Split('%s/split' % name, 4), src)
-
-    c1x1 = conv(net, split, name, nb1x1, 1, suffix='1x1')
-
-    c3x3r = conv(net, split, name, nb3x3r, 1, suffix='3x3_reduce')
-    c3x3 = conv(net, c3x3r, name, nb3x3, 3, pad=1, suffix='3x3')
-
-    c5x5r = conv(net, split, name, nb5x5r, 1, suffix='5x5_reduce')
-    c5x5 = conv(net, c5x5r, name, nb5x5, 5, pad=2, suffix='5x5')
-
-    pool = net.add(MaxPooling2D('%s/pool' % name, 3, 1, pad=1), split)
-    cproj = conv(net, pool, name, nbproj, 1, suffix='pool_proj')
-
-    return net.add(Concat('%s/output' % name, 1), [c1x1, c3x3, c5x5, cproj])
-
-
-def create_net(shape, weight_path='bvlc_googlenet.pickle'):
-    net = ffnet.FeedForwardNet()
-    net.add(Conv2D('conv1/7x7_s2', 64, 7, 2, pad=3, input_sample_shape=shape))
-    c1 = net.add(Activation('conv1/relu_7x7'))
-    pool1 = pool(net, c1, 'pool1/3x3_s2', 3, 2)
-    norm1 = net.add(LRN('pool1/norm1', 5, 0.0001, 0.75))
-    c3x3r = conv(net, norm1 , 'conv2', 64, 1, suffix='3x3_reduce')
-    c3x3 = conv(net, c3x3r, 'conv2', 192, 3, pad=1, suffix='3x3')
-    norm2 = net.add(LRN('conv2/norm2', 5, 0.0001, 0.75))
-    pool2 = pool(net, norm2, 'pool2/3x3_s2', 3, 2)
-
-    i3a=inception(net, pool2, 'inception_3a', 64, 96, 128, 16, 32, 32)
-    i3b=inception(net, i3a, 'inception_3b', 128, 128, 192, 32, 96, 64)
-    pool3=pool(net, i3b, 'pool3/3x3_s2', 3, 2)
-    i4a=inception(net, pool3, 'inception_4a', 192, 96, 208, 16, 48, 64)
-    i4b=inception(net, i4a, 'inception_4b', 160, 112, 224, 24, 64, 64)
-    i4c=inception(net, i4b, 'inception_4c', 128, 128, 256, 24, 64, 64)
-    i4d=inception(net, i4c, 'inception_4d', 112, 144, 288, 32, 64, 64)
-    i4e=inception(net, i4d, 'inception_4e', 256, 160, 320, 32, 128, 128)
-    pool4=pool(net, i4e,'pool4/3x3_s2', 3, 2)
-    i5a=inception(net, pool4, 'inception_5a', 256, 160, 320, 32, 128, 128)
-    i5b=inception(net, i5a, 'inception_5b', 384, 192, 384, 48, 128, 128)
-    pool5=net.add(AvgPooling2D('pool5/7x7_s1', 7, 1, pad=0))
-    drop5=net.add(Dropout('drop', 0.4))
-    flat=net.add(Flatten('flat'))
-    dense=net.add(Dense('loss3/classifier', 1000))
-    # prob=net.add(Softmax('softmax'))
-
-    net.load(weight_path, use_pickle=True)
-    print 'total num of params %d' % (len(net.param_names()))
-    # SINGA and Caffe have different layout for the weight matrix of the dense
-    # layer
-    for key, val in zip(net.param_names(), net.param_values()):
-        # print key
-        if key == 'loss3/classifier_weight':
-            tmp = tensor.to_numpy(val)
-            tmp = tmp.reshape(tmp.shape[::-1])
-            val.copy_from_numpy(np.transpose(tmp))
-    return net
-
-
-def serve(agent, use_cpu, parameter_file, topk=5):
-    if use_cpu:
-        print 'running with cpu'
-        dev = device.get_default_device()
-        layer.engine = 'singacpp'
-    else:
-        print "runing with gpu"
-        dev = device.create_cuda_gpu()
-    agent = agent
-
-    print 'Start intialization............'
-    net = create_net((3, 224, 224), parameter_file)
-    net.to_device(dev)
-    print 'End intialization............'
-
-    labels = np.loadtxt('synset_words.txt', str, delimiter='\t ')
-    while True:
-        key, val = agent.pull()
-        if key is None:
-            time.sleep(0.1)
-            continue
-        msg_type = MsgType.parse(key)
-        if msg_type.is_request():
-            try:
-                response = ""
-                img = imread(val['image'], mode='RGB').astype(np.float32)
-                height,width = img.shape[:2]
-                img[:, :, 0] -= 123.68
-                img[:, :, 1] -= 116.779
-                img[:, :, 2] -= 103.939
-                img[:,:,[0,1,2]] = img[:,:,[2,1,0]]
-                img = img.transpose((2, 0, 1))
-                img = img[:,(height-224)//2:(height+224)//2,(width-224)//2:(width+224)//2]
-                images = np.expand_dims(img, axis=0)
-
-                x = tensor.from_numpy(images.astype(np.float32))
-                x.to_device(dev)
-                y = net.predict(x)
-                prob = np.average(tensor.to_numpy(y), 0)
-                # sort and reverse
-                idx = np.argsort(-prob)[0:topk]
-                for i in idx:
-                    response += "%s:%s<br/>" % (labels[i], prob[i])
-            except:
-                traceback.print_exc()
-                response = "Sorry, system error during prediction."
-            agent.push(MsgType.kResponse, response)
-        elif MsgType.kCommandStop.equal(msg_type):
-                print 'get stop command'
-                agent.push(MsgType.kStatus, "success")
-                break
-        else:
-            print 'get unsupported message %s' % str(msg_type)
-            agent.push(MsgType.kStatus, "Unknown command")
-            break
-        # while loop
-    print "server stop"
-
-
-def main():
-    try:
-        # Setup argument parser
-        parser = ArgumentParser(description="GooleNet for image classification")
-        parser.add_argument("-p", "--port", default=9999, help="listen port")
-        parser.add_argument("-C", "--use_cpu", action="store_true")
-        parser.add_argument("--parameter_file", default="bvlc_googlenet.pickle",
-                help="relative path")
-
-        # Process arguments
-        args = parser.parse_args()
-        port = args.port
-
-        # start to train
-        agent = Agent(port)
-        serve(agent, args.use_cpu, args.parameter_file)
-        agent.stop()
-
-    except SystemExit:
-        return
-    except:
-        traceback.print_exc()
-        sys.stderr.write("  for help use --help \n\n")
-        return 2
-
-
-if __name__ == '__main__':
-    main()
diff --git a/examples/imagenet/resnet/README.md b/examples/imagenet/resnet/README.md
deleted file mode 100644
index 9ea12cd..0000000
--- a/examples/imagenet/resnet/README.md
+++ /dev/null
@@ -1,54 +0,0 @@
----
-name: Resnets on ImageNet
-SINGA version: 1.1
-SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
-parameter_url: https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
-license: Apache V2, https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE
----
-
-# Image Classification using Residual Networks
-
-
-In this example, we convert Residual Networks trained on [Torch](https://github.com/facebook/fb.resnet.torch) to SINGA for image classification.
-
-## Instructions
-
-* Download one parameter checkpoint file (see below) and the synset word file of ImageNet into this folder, e.g.,
-
-        $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
-        $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
-        $ tar xvf resnet-18.tar.gz
-
-* Usage
-
-        $ python serve.py -h
-
-* Example
-
-        # use cpu
-        $ python serve.py --use_cpu --parameter_file resnet-18.pickle --model resnet --depth 18 &
-        # use gpu
-        $ python serve.py --parameter_file resnet-18.pickle --model resnet --depth 18 &
-
-  The parameter files for the following model and depth configuration pairs are provided:
-  * resnet (original resnet), [18](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz)|[34](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-34.tar.gz)|[101](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-101.tar.gz)|[152](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-152.tar.gz)
-  * addbn (resnet with a batch normalization layer after the addition), [50](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-50.tar.gz)
-  * wrn (wide resnet), [50](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/wrn-50-2.tar.gz)
-  * preact (resnet with pre-activation) [200](https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-200.tar.gz)
-
-* Submit images for classification
-
-        $ curl -i -F image=@image1.jpg http://localhost:9999/api
-        $ curl -i -F image=@image2.jpg http://localhost:9999/api
-        $ curl -i -F image=@image3.jpg http://localhost:9999/api
-
-image1.jpg, image2.jpg and image3.jpg should be downloaded before executing the above commands.
-
-## Details
-
-The parameter files were extracted from the original [torch files](https://github.com/facebook/fb.resnet.torch/tree/master/pretrained) via
-the convert.py program.
-
-Usage:
-
-    $ python convert.py -h
diff --git a/examples/imagenet/resnet/convert.py b/examples/imagenet/resnet/convert.py
deleted file mode 100644
index 042d2ec..0000000
--- a/examples/imagenet/resnet/convert.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-'''Extract the net parameters from the torch file and store them as python dict
-using cPickle'''
-
-import os
-import torchfile
-import numpy as np
-import cPickle as pickle
-from argparse import ArgumentParser
-
-import model
-
-verbose=False
-
-def add_param(idx, name, val, params):
-    if type(params) == dict:
-        assert name not in params, 'duplicated param %s' % name
-        params[name] = val
-    else:
-        assert params[idx].size() == val.size, 'size mismatch for %s: %s - %s' % (name, (params[idx].shape,), (val.shape,))
-        params[idx].copy_from_numpy(val)
-
-    if verbose:
-        print name, val.shape
-
-
-def conv(m, idx, params, param_names):
-    outplane = m['weight'].shape[0]
-    name = param_names[idx]
-    val = np.reshape(m['weight'], (outplane, -1))
-    add_param(idx, name, val, params)
-    return idx + 1
-
-
-def batchnorm(m, idx, params, param_names):
-    add_param(idx, param_names[idx], m['weight'], params)
-    add_param(idx + 1, param_names[idx + 1], m['bias'], params)
-    add_param(idx + 2, param_names[idx + 2], m['running_mean'], params)
-    add_param(idx + 3, param_names[idx + 3], m['running_var'], params)
-    return idx + 4
-
-
-def linear(m, idx, params, param_names):
-    add_param(idx, param_names[idx], np.transpose(m['weight']), params)
-    add_param(idx + 1, param_names[idx + 1], m['bias'], params)
-    return idx + 2
-
-
-def traverse(m, idx, params, param_names):
-    ''' Traverse all modules of the torch checkpoint file to extract params.
-
-    Args:
-        m, a TorchObject
-        idx, index for the current cursor of param_names
-        params, an empty dictionary (name->numpy) to dump the params via pickle;
-            or a list of tensor objects which should be in the same order as
-            param_names, called to initialize net created in SINGA directly
-            using param values from torch checkpoint file.
-
-    Returns:
-        the updated idx
-    '''
-    module_type = m.__dict__['_typename']
-    if module_type in ['nn.Sequential', 'nn.ConcatTable'] :
-        for x in m.modules:
-            idx = traverse(x, idx, params, param_names)
-    elif 'SpatialConvolution' in module_type:
-        idx = conv(m, idx, params, param_names)
-    elif 'SpatialBatchNormalization' in module_type:
-        idx = batchnorm(m, idx, params, param_names)
-    elif 'Linear' in module_type:
-        idx = linear(m, idx, params, param_names)
-    return idx
-
-
-if __name__ == '__main__':
-    parser = ArgumentParser(description='Convert params from torch to python '
-            'dict. \n resnet could have depth of 18, 34, 101, 152; \n
-            wrn has depth 50; preact has depth 200; addbn has depth 50')
-    parser.add_argument("infile", help="torch checkpoint file")
-    parser.add_argument("model", choices = ['resnet', 'wrn', 'preact', 'addbn'])
-    parser.add_argument("depth", type=int, choices = [18, 34, 50, 101, 152, 200])
-    args = parser.parse_args()
-
-    net = model.create_net(args.model, args.depth)
-    # model.init_params(net)
-    m = torchfile.load(args.infile)
-    params = {}
-    # params = net.param_values()
-    param_names = net.param_names()
-    traverse(m, 0, params, param_names)
-    miss = [name for name in param_names if name not in params]
-    if len(miss) > 0:
-        print 'The following params are missing from torch file'
-        print miss
-
-    outfile = os.path.splitext(args.infile)[0] + '.pickle'
-    with open(outfile, 'wb') as fd:
-        pickle.dump(params, fd)
diff --git a/examples/imagenet/resnet/model.py b/examples/imagenet/resnet/model.py
deleted file mode 100644
index bf90da3..0000000
--- a/examples/imagenet/resnet/model.py
+++ /dev/null
@@ -1,275 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-''' This models are created following https://github.com/facebook/fb.resnet.torch.git
-and https://github.com/szagoruyko/wide-residual-networks
-'''
-from singa.layer import Conv2D, Activation, MaxPooling2D, AvgPooling2D,\
-        Split, Merge, Flatten, Dense, BatchNormalization, Softmax
-from singa import net as ffnet
-from singa import initializer
-from singa import layer
-
-ffnet.verbose=True
-
-conv_bias = False
-
-def conv(net, prefix, n, ksize, stride=1, pad=0, bn=True, relu=True, src=None):
-    '''Add a convolution layer and optionally a batchnorm and relu layer.
-
-    Args:
-        prefix, a string for the prefix of the layer name
-        n, num of filters for the conv layer
-        bn, if true add batchnorm
-        relu, if true add relu
-
-    Returns:
-        the last added layer
-    '''
-    ret = net.add(Conv2D(
-        prefix + '-conv', n, ksize, stride, pad=pad, use_bias=conv_bias), src)
-    if bn:
-        ret = net.add(BatchNormalization(prefix + '-bn'))
-    if relu:
-        ret = net.add(Activation(prefix + '-relu'))
-    return ret
-
-
-def shortcut(net, prefix, inplane, outplane, stride, src, bn=False):
-    '''Add a conv shortcut layer if inplane != outplane; or return the source
-    layer directly.
-
-    Args:
-        prefix, a string for the prefix of the layer name
-        bn, if true add a batchnorm layer after the conv layer
-
-    Returns:
-        return the last added layer or the source layer.
-    '''
-    if inplane == outplane:
-        return src
-    return conv(net, prefix + '-shortcut', outplane, 1, stride, 0, bn, False, src)
-
-
-def bottleneck(name, net, inplane, midplane, outplane, stride=1, preact=False, add_bn=False):
-    '''Add three conv layers, with a>=b<=c filters.
-
-    The default structure is
-    input
-         -split - conv1-bn1-relu1-conv2-bn2-relu2-conv3-bn3
-                - conv-bn or dummy
-         -add
-         -relu
-
-    Args:
-        inplane, num of feature maps of the input
-        midplane, num of featue maps of the middle layer
-        outplane, num of feature maps of the output
-        preact, if true, move the bn3 and relu before conv1, i.e., pre-activation ref identity mapping paper
-        add_bn, if true, move the last bn after the addition layer (for resnet-50)
-    '''
-    assert not (preact and add_bn), 'preact and batchnorm after addition cannot be true at the same time'
-    split = net.add(Split(name + '-split', 2))
-    if preact:
-        net.add(BatchNormalization(name + '-preact-bn'))
-        net.add(Activation(name + '-preact-relu'))
-    conv(net, name + '-0', midplane, 1, 1, 0, True, True)
-    conv(net, name + '-1', midplane, 3, stride, 1, True, True)
-    br0 = conv(net, name + '-2', outplane, 1, 1, 0, not (preact or add_bn), False)
-    br1 = shortcut(net, name, inplane, outplane, stride, split, not add_bn)
-    ret = net.add(Merge(name + '-add'), [br0, br1])
-    if add_bn:
-        ret = net.add(BatchNormalization(name + '-add-bn'))
-    if not preact:
-        ret = net.add(Activation(name + '-add-relu'))
-    return ret
-
-
-def basicblock(name, net, inplane, midplane, outplane, stride=1, preact=False, add_bn=False):
-    '''Add two conv layers, with a<=b filters.
-
-    The default structure is
-    input
-         -split - conv1-bn1-relu1-conv2-bn2
-                - conv or dummy
-         -add
-         -relu
-
-    Args:
-        inplane, num of feature maps of the input
-        midplane, num of featue maps of the middle layer
-        outplane, num of feature maps of the output
-        preact, if true, move the bn2 and relu before conv1, i.e., pre-activation ref identity mapping paper
-        add_bn, if true, move the last bn after the addition layer (for resnet-50)
-    '''
-    assert not (preact and add_bn), 'preact and batchnorm after addition cannot be true at the same time'
-    split = net.add(Split(name + '-split', 2))
-    if preact:
-        net.add(BatchNormalization(name + '-preact-bn'))
-        net.add(Activation(name + '-preact-relu'))
-    conv(net, name + '-0', midplane, 3, stride, 1, True, True)
-    br0 = conv(net, name + '-1', outplane, 3, 1, 1, not preact, False)
-    br1 = shortcut(net, name, inplane, outplane, stride, split, False)
-    ret = net.add(Merge(name + '-add'), [br0, br1])
-    if add_bn:
-        ret = net.add(BatchNormalization(name + '-add-bn'))
-    if not preact:
-        ret = net.add(Activation(name + '-add-relu'))
-    return ret
-
-
-def stage(sid, net, num_blk, inplane, midplane, outplane, stride, block, preact=False, add_bn=False):
-    block('stage%d-blk%d' % (sid, 0), net, inplane, midplane, outplane, stride, preact, add_bn)
-    for i in range(1, num_blk):
-        block('stage%d-blk%d' % (sid, i), net, outplane, midplane, outplane, 1, preact, add_bn)
-
-def init_params(net, weight_path=None):
-    if weight_path == None:
-        for pname, pval in zip(net.param_names(), net.param_values()):
-            print pname, pval.shape
-            if 'conv' in pname and len(pval.shape) > 1:
-                initializer.gaussian(pval, 0, pval.shape[1])
-            elif 'dense' in pname:
-                if len(pval.shape) > 1:
-                    initializer.gaussian(pval, 0, pval.shape[0])
-                else:
-                    pval.set_value(0)
-            # init params from batch norm layer
-            elif 'mean' in pname or 'beta' in pname:
-                pval.set_value(0)
-            elif 'var' in pname:
-                pval.set_value(1)
-            elif 'gamma' in pname:
-                initializer.uniform(pval, 0, 1)
-    else:
-        net.load(weight_path, use_pickle = 'pickle' in weight_path)
-
-
-cfg = { 18: [2, 2, 2, 2],  # basicblock
-        34: [3, 4, 6, 3],  # basicblock
-        50: [3, 4, 6, 3],  # bottleneck
-        101: [3, 4, 23, 3], # bottleneck
-        152: [3, 8, 36, 3], # bottleneck
-        200: [3, 24, 36, 3]} # bottleneck
-
-
-def create_addbn_resnet(depth=50):
-    '''Original resnet with the last batchnorm of each block moved to after the addition layer'''
-    net = ffnet.FeedForwardNet()
-    net.add(Conv2D('input-conv', 64, 7, 2, pad=3, use_bias=False, input_sample_shape=(3, 224, 224)))
-    net.add(BatchNormalization('input-bn'))
-    net.add(Activation('input_relu'))
-    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
-    conf = cfg[depth]
-    if depth > 34:
-        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck, add_bn=True)
-        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck, add_bn=True)
-        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck, add_bn=True)
-        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck, add_bn=True)
-    else:
-        stage(0, net, conf[0], 64, 64, 64, 1, basicblock, add_bn=True)
-        stage(1, net, conf[1], 64, 128, 128, 2, basicblock, add_bn=True)
-        stage(2, net, conf[2], 128, 256, 256, 2, basicblock, add_bn=True)
-        stage(3, net, conf[3], 256, 512, 512, 2, basicblock, add_bn=True)
-    net.add(AvgPooling2D('avg', 7, 1, pad=0))
-    net.add(Flatten('flat'))
-    net.add(Dense('dense', 1000))
-    return net
-
-
-def create_resnet(depth=18):
-    '''Original resnet, where the there is a relue after the addition layer'''
-    net = ffnet.FeedForwardNet()
-    net.add(Conv2D('input-conv', 64, 7, 2, pad=3, use_bias=False, input_sample_shape=(3, 224, 224)))
-    net.add(BatchNormalization('input-bn'))
-    net.add(Activation('input_relu'))
-    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
-    conf = cfg[depth]
-    if depth > 34:
-        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck)
-        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck)
-        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck)
-        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck)
-    else:
-        stage(0, net, conf[0], 64, 64, 64, 1, basicblock)
-        stage(1, net, conf[1], 64, 128, 128, 2, basicblock)
-        stage(2, net, conf[2], 128, 256, 256, 2, basicblock)
-        stage(3, net, conf[3], 256, 512, 512, 2, basicblock)
-    net.add(AvgPooling2D('avg', 7, 1, pad=0))
-    net.add(Flatten('flat'))
-    net.add(Dense('dense', 1000))
-    return net
-
-def create_preact_resnet(depth=200):
-    '''Resnet with the batchnorm and relu moved to before the conv layer for each block'''
-    net = ffnet.FeedForwardNet()
-    net.add(Conv2D('input-conv', 64, 7, 2, pad=3, use_bias=False, input_sample_shape=(3, 224, 224)))
-    net.add(BatchNormalization('input-bn'))
-    net.add(Activation('input_relu'))
-    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
-    conf = cfg[depth]
-    if depth > 34:
-        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck, preact=True)
-        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck, preact=True)
-        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck, preact=True)
-        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck, preact=True)
-    else:
-        stage(0, net, conf[0], 64, 64, 64, 1, basicblock, preact=True)
-        stage(1, net, conf[1], 64, 128, 128, 2, basicblock, preact=True)
-        stage(2, net, conf[2], 128, 256, 256, 2, basicblock, preact=True)
-        stage(3, net, conf[3], 256, 512, 512, 2, basicblock, preact=True)
-    net.add(BatchNormalization('final-bn'))
-    net.add(Activation('final-relu'))
-    net.add(AvgPooling2D('avg', 7, 1, pad=0))
-    net.add(Flatten('flat'))
-    net.add(Dense('dense', 1000))
-    return net
-
-
-def create_wide_resnet(depth=50):
-    '''Similar original resnet except that a<=b<=c for the bottleneck block'''
-    net = ffnet.FeedForwardNet()
-    net.add(Conv2D('input-conv', 64, 7, 2, pad=3, use_bias=False, input_sample_shape=(3, 224, 224)))
-    net.add(BatchNormalization('input-bn'))
-    net.add(Activation('input_relu'))
-    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
-
-    stage(0, net, 3, 64, 128, 256, 1, bottleneck)
-    stage(1, net, 4, 256, 256, 512, 2, bottleneck)
-    stage(2, net, 6, 512, 512, 1024, 2, bottleneck)
-    stage(3, net, 3, 1024, 1024, 2048, 2, bottleneck)
-
-    net.add(AvgPooling2D('avg_pool', 7, 1, pad=0))
-    net.add(Flatten('flag'))
-    net.add(Dense('dense', 1000))
-    return net
-
-
-def create_net(name, depth, use_cpu):
-    if use_cpu:
-        layer.engine = 'singacpp'
-    if name == 'resnet':
-        return create_resnet(depth)
-    elif name == 'wrn':
-        return create_wide_resnet(depth)
-    elif name == 'preact':
-        return create_preact_resnet(depth)
-    elif name == 'addbn':
-        return create_addbn_resnet(depth)
-
-
-if __name__ == '__main__':
-    create_net('wrn', 50)
diff --git a/examples/imagenet/resnet/serve.py b/examples/imagenet/resnet/serve.py
deleted file mode 100644
index ba5adb1..0000000
--- a/examples/imagenet/resnet/serve.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-import time
-import numpy as np
-import threading
-import traceback
-from scipy.misc import imread, imresize
-from argparse import ArgumentParser
-
-from singa import device
-from singa import tensor
-from singa import data
-from singa import image_tool
-from singa import metric
-from rafiki.agent import Agent, MsgType
-import model
-
-tool = image_tool.ImageTool()
-num_augmentation = 10
-crop_size = 224
-mean = np.array([0.485, 0.456, 0.406])
-std = np.array([ 0.229, 0.224, 0.225])
-def image_transform(img):
-    '''Input an image path and return a set of augmented images (type Image)'''
-    global tool
-    return tool.load(img).resize_by_list([256]).crop5((crop_size, crop_size), 5).flip(2).get()
-
-
-def predict(net, images, num=10):
-    '''predict probability distribution for one net.
-
-    Args:
-        net: neural net (vgg or resnet)
-        images: a batch of augmented images (type numpy)
-        num: num of augmentations
-    '''
-    prob = net.predict(images)
-    prob = tensor.to_numpy(prob)
-    prob = prob.reshape((images.shape[0] / num, num, -1))
-    prob = np.average(prob, 1)
-    return prob
-
-
-def allowed_file(filename):
-    return '.' in filename and filename.rsplit('.', 1)[1] in \
-        ["PNG", "png", "jpg", "JPG", "JPEG", "jpeg"]
-
-
-def serve(net, label_map, dev, agent, topk=5):
-    '''Serve to predict image labels.
-
-    It prints the topk food names for each image.
-
-    Args:
-        label_map: a list of food names, corresponding to the index in meta_file
-    '''
-
-    images =tensor.Tensor((num_augmentation, 3, crop_size, crop_size), dev)
-    while True:
-        msg, val = agent.pull()
-        if msg is None:
-            time.sleep(0.1)
-            continue
-        msg = MsgType.parse(msg)
-        if msg.is_request():
-            try:
-                # process images
-                im = [np.array(x.convert('RGB'), dtype=np.float32).transpose(2, 0, 1) for x in image_transform(val['image'])]
-                im = np.array(im) / 256
-                im -= mean[np.newaxis, :, np.newaxis, np.newaxis]
-                im /= std[np.newaxis, :, np.newaxis, np.newaxis]
-                images.copy_from_numpy(im)
-                print "input: ", images.l1()
-                # do prediction
-                prob = predict(net, images, num_augmentation)[0]
-                idx = np.argsort(-prob)
-                # prepare results
-                response = ""
-                for i in range(topk):
-                    response += "%s:%f <br/>" % (label_map[idx[i]], prob[idx[i]])
-            except:
-                traceback.print_exc()
-                response = "sorry, system error during prediction."
-            agent.push(MsgType.kResponse, response)
-        elif msg.is_command():
-            if MsgType.kCommandStop.equal(msg):
-                print 'get stop command'
-                agent.push(MsgType.kStatus, "success")
-                break
-            else:
-                print 'get unsupported command %s' % str(msg)
-                agent.push(MsgType.kStatus, "Unknown command")
-        else:
-            print 'get unsupported message %s' % str(msg)
-            agent.push(MsgType.kStatus, "unsupported msg; going to shutdown")
-            break
-    print "server stop"
-
-def main():
-    try:
-        # Setup argument parser
-        parser = ArgumentParser(description="Wide residual network")
-
-        parser.add_argument("--port", default=9999, help="listen port")
-        parser.add_argument("--use_cpu", action="store_true",
-                            help="If set, load models onto CPU devices")
-        parser.add_argument("--parameter_file", default="wrn-50-2.pickle")
-        parser.add_argument("--model", choices = ['resnet', 'wrn', 'preact', 'addbn'], default='wrn')
-        parser.add_argument("--depth", type=int, choices = [18, 34, 50, 101, 152, 200], default='50')
-
-        # Process arguments
-        args = parser.parse_args()
-        port = args.port
-
-        # start to train
-        agent = Agent(port)
-
-        net = model.create_net(args.model, args.depth, args.use_cpu)
-        if args.use_cpu:
-            print 'Using CPU'
-            dev = device.get_default_device()
-        else:
-            print 'Using GPU'
-            dev = device.create_cuda_gpu()
-            net.to_device(dev)
-        model.init_params(net, args.parameter_file)
-        print 'Finish loading models'
-
-        labels = np.loadtxt('synset_words.txt', str, delimiter='\t ')
-        serve(net, labels, dev, agent)
-
-        # acc = evaluate(net, '../val_list.txt',  'image/val', dev)
-        # print acc
-
-        # wait the agent finish handling http request
-        agent.stop()
-    except SystemExit:
-        return
-    except:
-        traceback.print_exc()
-        sys.stderr.write("  for help use --help \n\n")
-        return 2
-
-
-if __name__ == '__main__':
-    main()
diff --git a/examples/index.rst b/examples/index.rst
deleted file mode 100644
index d08a557..0000000
--- a/examples/index.rst
+++ /dev/null
@@ -1,29 +0,0 @@
-..
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-..
-..     http://www.apache.org/licenses/LICENSE-2.0
-..
-.. Unless required by applicable law or agreed to in writing, software
-.. distributed under the License is distributed on an "AS IS" BASIS,
-.. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-.. See the License for the specific language governing permissions and
-.. limitations under the License.
-..
-
-Model Zoo
-=========
-
-.. toctree::
-
-   cifar10/README
-   char-rnn/README
-   imagenet/alexnet/README
-   imagenet/googlenet/README
-
-
diff --git a/examples/mlp/model.py b/examples/mlp/model.py
new file mode 100644
index 0000000..ab6a0bf
--- /dev/null
+++ b/examples/mlp/model.py
@@ -0,0 +1,114 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import layer
+from singa import model
+from singa import tensor
+
+
+class MLP(model.Model):
+
+    def __init__(self, data_size=10, perceptron_size=100, num_classes=10):
+        super(MLP, self).__init__()
+        self.num_classes = num_classes
+        self.dimension = 2
+
+        self.relu = layer.ReLU()
+        self.linear1 = layer.Linear(perceptron_size)
+        self.linear2 = layer.Linear(num_classes)
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+    def forward(self, inputs):
+        y = self.linear1(inputs)
+        y = self.relu(y)
+        y = self.linear2(y)
+        return y
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+
+        if dist_option == 'fp32':
+            self.optimizer(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def create_model(pretrained=False, **kwargs):
+    """Constructs a CNN model.
+
+    Args:
+        pretrained (bool): If True, returns a model pre-trained
+    """
+    model = MLP(**kwargs)
+
+    return model
+
+
+__all__ = ['MLP', 'create_model']
+
+if __name__ == "__main__":
+
+    import numpy as np
+    from singa import opt
+    from singa import device
+
+    # generate the boundary
+    f = lambda x: (5 * x + 1)
+    bd_x = np.linspace(-1.0, 1, 200)
+    bd_y = f(bd_x)
+    # generate the training data
+    x = np.random.uniform(-1, 1, 400)
+    y = f(x) + 2 * np.random.randn(len(x))
+    # convert training data to 2d space
+    label = np.asarray([5 * a + 1 > b for (a, b) in zip(x, y)]).astype(np.int32)
+    data = np.array([[a, b] for (a, b) in zip(x, y)], dtype=np.float32)
+
+    dev = device.create_cuda_gpu_on(0)
+    sgd = opt.SGD(0.05)
+    tx = tensor.Tensor((400, 2), dev, tensor.float32)
+    ty = tensor.Tensor((400,), dev, tensor.int32)
+    model = MLP(data_size=2, perceptron_size=3, num_classes=2)
+
+    # attached model to graph
+    model.set_optimizer(sgd)
+    model.compile([tx], is_train=True, use_graph=True, sequential=False)
+    model.train()
+
+    for i in range(1001):
+        tx.copy_from_numpy(data)
+        ty.copy_from_numpy(label)
+        out, loss = model(tx, ty, 'fp32', spars=None)
+
+        if i % 100 == 0:
+            print("training loss = ", tensor.to_numpy(loss)[0])
diff --git a/examples/mlp/native.py b/examples/mlp/native.py
new file mode 100644
index 0000000..00f4c0d
--- /dev/null
+++ b/examples/mlp/native.py
@@ -0,0 +1,90 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import tensor
+from singa.tensor import Tensor
+from singa import autograd
+from singa import opt
+import numpy as np
+
+if __name__ == "__main__":
+
+    autograd.training = True
+
+    # prepare training data in numpy array
+
+    # generate the boundary
+    f = lambda x: (5 * x + 1)
+    bd_x = np.linspace(-1.0, 1, 200)
+    bd_y = f(bd_x)
+    # generate the training data
+    x = np.random.uniform(-1, 1, 400)
+    y = f(x) + 2 * np.random.randn(len(x))
+    # convert training data to 2d space
+    label = np.asarray([5 * a + 1 > b for (a, b) in zip(x, y)])
+    data = np.array([[a, b] for (a, b) in zip(x, y)], dtype=np.float32)
+
+    def to_categorical(y, num_classes):
+        """
+        Converts a class vector (integers) to binary class matrix.
+
+        Args
+            y: class vector to be converted into a matrix
+                (integers from 0 to num_classes).
+            num_classes: total number of classes.
+
+        Return
+            A binary matrix representation of the input.
+        """
+        y = np.array(y, dtype="int")
+        n = y.shape[0]
+        categorical = np.zeros((n, num_classes))
+        categorical[np.arange(n), y] = 1
+        return categorical
+
+    label = to_categorical(label, 2).astype(np.float32)
+    print("train_data_shape:", data.shape)
+    print("train_label_shape:", label.shape)
+
+    inputs = Tensor(data=data)
+    target = Tensor(data=label)
+
+    w0 = Tensor(shape=(2, 3), requires_grad=True, stores_grad=True)
+    w0.gaussian(0.0, 0.1)
+    b0 = Tensor(shape=(3,), requires_grad=True, stores_grad=True)
+    b0.set_value(0.0)
+
+    w1 = Tensor(shape=(3, 2), requires_grad=True, stores_grad=True)
+    w1.gaussian(0.0, 0.1)
+    b1 = Tensor(shape=(2,), requires_grad=True, stores_grad=True)
+    b1.set_value(0.0)
+
+    sgd = opt.SGD(0.05)
+    # training process
+    for i in range(1001):
+        x = autograd.matmul(inputs, w0)
+        x = autograd.add_bias(x, b0)
+        x = autograd.relu(x)
+        x = autograd.matmul(x, w1)
+        x = autograd.add_bias(x, b1)
+        loss = autograd.softmax_cross_entropy(x, target)
+        sgd(loss)
+
+        if i % 100 == 0:
+            print("training loss = ", tensor.to_numpy(loss)[0])
diff --git a/examples/mnist/README.md b/examples/mnist/README.md
deleted file mode 100644
index 60a85e0..0000000
--- a/examples/mnist/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# Train a RBM model against MNIST dataset
-
-This example is to train an RBM model using the
-MNIST dataset. The RBM model and its hyper-parameters are set following
-[Hinton's paper](http://www.cs.toronto.edu/~hinton/science.pdf)
-
-## Running instructions
-
-1. Download the pre-processed [MNIST dataset](https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz)
-
-2. Start the training
-
-        python train.py mnist.pkl.gz
-
-By default the training code would run on CPU. To run it on a GPU card, please start
-the program with an additional argument
-
-        python train.py mnist.pkl.gz --use_gpu
diff --git a/examples/onnx/159008.jpg b/examples/onnx/159008.jpg
new file mode 100644
index 0000000..2b90845
--- /dev/null
+++ b/examples/onnx/159008.jpg
Binary files differ
diff --git a/examples/onnx/arcface.py b/examples/onnx/arcface.py
new file mode 100644
index 0000000..6050418
--- /dev/null
+++ b/examples/onnx/arcface.py
@@ -0,0 +1,116 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import os
+import numpy as np
+from PIL import Image
+from sklearn import preprocessing
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    w, h = img.size
+    img = img.crop((0, (h - w) // 2, w, h - (h - w) // 2))
+    img = img.resize((112, 112))
+    img = np.array(img).astype(np.float32)
+    img = np.rollaxis(img, 2, 0)
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image():
+    # download image
+    img1 = Image.open(
+        check_exist_or_download(
+            'https://angus-doc.readthedocs.io/en/latest/_images/aurelien.jpg'))
+    img2 = Image.open(
+        check_exist_or_download(
+            'https://angus-doc.readthedocs.io/en/latest/_images/gwenn.jpg'))
+    return img1, img2
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    download_dir = '/tmp'
+    url = 'https://s3.amazonaws.com/onnx-model-zoo/arcface/resnet100/resnet100.tar.gz'
+    model_path = os.path.join(download_dir, 'resnet100', 'resnet100.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference demo
+    logging.info("preprocessing...")
+    img1, img2 = get_image()
+    img1 = preprocess(img1)
+    img2 = preprocess(img2)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img1, img2])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.Tensor(device=dev, data=np.concatenate((img1, img2), axis=0))
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'resnet100', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    embedding = tensor.to_numpy(y)
+    embedding = preprocessing.normalize(embedding)
+    embedding1 = embedding[0]
+    embedding2 = embedding[1]
+
+    # Compute squared distance between embeddings
+    dist = np.sum(np.square(embedding1 - embedding2))
+    # Compute cosine similarity between embedddings
+    sim = np.dot(embedding1, embedding2.T)
+    # logging.info predictions
+    logging.info('Distance = %f' % (dist))
+    logging.info('Similarity = %f' % (sim))
\ No newline at end of file
diff --git a/examples/onnx/bert/bert-squad.py b/examples/onnx/bert/bert-squad.py
new file mode 100644
index 0000000..936968e
--- /dev/null
+++ b/examples/onnx/bert/bert-squad.py
@@ -0,0 +1,165 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import zipfile
+import numpy as np
+import json
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+import tokenization
+from run_onnx_squad import read_squad_examples, convert_examples_to_features, RawResult, write_predictions
+
+import sys
+sys.path.append(os.path.dirname(__file__) + '/..')
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+max_answer_length = 30
+max_seq_length = 256
+doc_stride = 128
+max_query_length = 64
+n_best_size = 20
+batch_size = 1
+
+
+def load_vocab():
+    url = 'https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip'
+    download_dir = '/tmp/'
+    filename = os.path.join(download_dir, 'uncased_L-12_H-768_A-12', '.',
+                            'vocab.txt')
+    with zipfile.ZipFile(check_exist_or_download(url), 'r') as z:
+        z.extractall(path=download_dir)
+    return filename
+
+
+def preprocess():
+    vocab_file = load_vocab()
+    tokenizer = tokenization.FullTokenizer(vocab_file=vocab_file,
+                                           do_lower_case=True)
+    predict_file = os.path.join(os.path.dirname(__file__), 'inputs.json')
+    # print content
+    with open(predict_file) as json_file:
+        test_data = json.load(json_file)
+        print("The input is:", json.dumps(test_data, indent=2))
+
+    eval_examples = read_squad_examples(input_file=predict_file)
+
+    # Use convert_examples_to_features method from run_onnx_squad to get parameters from the input
+    input_ids, input_mask, segment_ids, extra_data = convert_examples_to_features(
+        eval_examples, tokenizer, max_seq_length, doc_stride, max_query_length)
+    return input_ids, input_mask, segment_ids, extra_data, eval_examples
+
+
+def postprocess(eval_examples, extra_data, all_results):
+    output_dir = 'predictions'
+    os.makedirs(output_dir, exist_ok=True)
+    output_prediction_file = os.path.join(output_dir, "predictions.json")
+    output_nbest_file = os.path.join(output_dir, "nbest_predictions.json")
+    write_predictions(eval_examples, extra_data, all_results, n_best_size,
+                      max_answer_length, True, output_prediction_file,
+                      output_nbest_file)
+
+    # print results
+    with open(output_prediction_file) as json_file:
+        test_data = json.load(json_file)
+        print("The result is:", json.dumps(test_data, indent=2))
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://media.githubusercontent.com/media/onnx/models/master/text/machine_comprehension/bert-squad/model/bertsquad-10.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'download_sample_10',
+                              'bertsquad10.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    input_ids, input_mask, segment_ids, extra_data, eval_examples = preprocess()
+
+    m = None
+    dev = device.create_cuda_gpu()
+    n = len(input_ids)
+    bs = batch_size
+    all_results = []
+
+    tmp_dict = {}
+    for idx in range(0, n):
+        logging.info("starting infer sample {}...".format(idx))
+        item = eval_examples[idx]
+        inputs = [
+            np.array([item.qas_id], dtype=np.int32),
+            segment_ids[idx:idx + bs].astype(np.int32),
+            input_mask[idx:idx + bs].astype(np.int32),
+            input_ids[idx:idx + bs].astype(np.int32),
+        ]
+
+        x_batch = []
+        for inp in inputs:
+            tmp_tensor = tensor.from_numpy(inp)
+            tmp_tensor.to_device(dev)
+            x_batch.append(tmp_tensor)
+
+        # prepare the model
+        if m is None:
+            logging.info("model compling...")
+            m = MyModel(onnx_model)
+            # m.compile(x_batch, is_train=False, use_graph=True, sequential=True)
+
+        logging.info("model running for sample {}...".format(idx))
+        outputs = m.forward(*x_batch)
+
+        logging.info("hanlde the result of sample {}...".format(idx))
+        result = []
+        for outp in outputs:
+            result.append(tensor.to_numpy(outp))
+
+        in_batch = result[1].shape[0]
+        start_logits = [float(x) for x in result[1][0].flat]
+        end_logits = [float(x) for x in result[0][0].flat]
+        for i in range(0, in_batch):
+            unique_id = len(all_results)
+            all_results.append(
+                RawResult(unique_id=unique_id,
+                          start_logits=start_logits,
+                          end_logits=end_logits))
+    # postprocessing
+    logging.info("postprocessing...")
+    postprocess(eval_examples, extra_data, all_results)
\ No newline at end of file
diff --git a/examples/onnx/bert/inputs.json b/examples/onnx/bert/inputs.json
new file mode 100644
index 0000000..b9e313b
--- /dev/null
+++ b/examples/onnx/bert/inputs.json
@@ -0,0 +1,27 @@
+{
+  "version": "1.4",
+  "data": [
+    {
+      "paragraphs": [
+        {
+          "context": "In its early years, the new convention center failed to meet attendance and revenue expectations.[12] By 2002, many Silicon Valley businesses were choosing the much larger Moscone Center in San Francisco over the San Jose Convention Center due to the latter's limited space. A ballot measure to finance an expansion via a hotel tax failed to reach the required two-thirds majority to pass. In June 2005, Team San Jose built the South Hall, a $6.77 million, blue and white tent, adding 80,000 square feet (7,400 m2) of exhibit space",
+          "qas": [
+            {
+              "question": "where is the businesses choosing to go?",
+              "id": "1"
+            },
+            {
+              "question": "how may votes did the ballot measure need?",
+              "id": "2"
+            },
+            {
+              "question": "By what year many Silicon Valley businesses were choosing the Moscone Center?",
+              "id": "3"
+            }
+          ]
+        }
+      ],
+      "title": "Conference Center"
+    }
+  ]
+}
diff --git a/examples/onnx/bert/run_onnx_squad.py b/examples/onnx/bert/run_onnx_squad.py
new file mode 100644
index 0000000..f9a60da
--- /dev/null
+++ b/examples/onnx/bert/run_onnx_squad.py
@@ -0,0 +1,507 @@
+# Copyright 2018 The Google AI Language Team Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Inference for squad/bert using onnx.
+
+This is going to do the samem as 'python run_squad.py --do_predict=True ...' using a squad/bert model
+that was converted to onnx. Lots of code was taken from run_squad.py.
+You run it with:
+
+
+python onnx_squad.py --model $SQUAD_MODEL/squad.onnx \
+                     --vocab_file $BERT_BASE_DIR/uncased_L-12_H-768_A-12/vocab.txt
+                     --predict_file $SQUAD_DATA/dev-v1.1.json \
+                     --bert_config_file $BERT_BASE_DIR/uncased_L-12_H-768_A-12/bert_config.json \
+                     --output /tmp/
+"""
+
+import collections
+import json
+import math
+
+import numpy as np
+import six
+import tokenization
+
+RawResult = collections.namedtuple("RawResult",
+                                   ["unique_id", "start_logits", "end_logits"])
+
+Feature = collections.namedtuple("Feature", [
+    "unique_id", "tokens", "example_index", "token_to_orig_map",
+    "token_is_max_context"
+])
+
+
+class SquadExample(object):
+    """A single training/test example for simple sequence classification."""
+
+    def __init__(self,
+                 qas_id,
+                 question_text,
+                 doc_tokens,
+                 orig_answer_text=None,
+                 start_position=None,
+                 end_position=None):
+        self.qas_id = qas_id
+        self.question_text = question_text
+        self.doc_tokens = doc_tokens
+        self.orig_answer_text = orig_answer_text
+        self.start_position = start_position
+        self.end_position = end_position
+
+    def __str__(self):
+        return self.__repr__()
+
+    def __repr__(self):
+        s = []
+        s.append("qas_id: %s" % (tokenization.printable_text(self.qas_id)))
+        s.append("question_text: %s" %
+                 (tokenization.printable_text(self.question_text)))
+        s.append("doc_tokens: [%s]" % (" ".join(self.doc_tokens)))
+        if self.start_position:
+            s.append("start_position: %d" % (self.start_position))
+        if self.start_position:
+            s.append("end_position: %d" % (self.end_position))
+        return ", ".join(s)
+
+
+def _check_is_max_context(doc_spans, cur_span_index, position):
+    """Check if this is the 'max context' doc span for the token."""
+
+    # Because of the sliding window approach taken to scoring documents, a single
+    # token can appear in multiple documents. E.g.
+    #  Doc: the man went to the store and bought a gallon of milk
+    #  Span A: the man went to the
+    #  Span B: to the store and bought
+    #  Span C: and bought a gallon of
+    #  ...
+    #
+    # Now the word 'bought' will have two scores from spans B and C. We only
+    # want to consider the score with "maximum context", which we define as
+    # the *minimum* of its left and right context (the *sum* of left and
+    # right context will always be the same, of course).
+    #
+    # In the example the maximum context for 'bought' would be span C since
+    # it has 1 left context and 3 right context, while span B has 4 left context
+    # and 0 right context.
+    best_score = None
+    best_span_index = None
+    for (span_index, doc_span) in enumerate(doc_spans):
+        end = doc_span.start + doc_span.length - 1
+        if position < doc_span.start:
+            continue
+        if position > end:
+            continue
+        num_left_context = position - doc_span.start
+        num_right_context = end - position
+        score = min(num_left_context,
+                    num_right_context) + 0.01 * doc_span.length
+        if best_score is None or score > best_score:
+            best_score = score
+            best_span_index = span_index
+
+    return cur_span_index == best_span_index
+
+
+def convert_examples_to_features(examples, tokenizer, max_seq_length,
+                                 doc_stride, max_query_length):
+    """Loads a data file into a list of `InputBatch`s."""
+
+    res_input_ids = []
+    res_input_mask = []
+    res_segment_ids = []
+    extra = []
+    unique_id = 0
+
+    for (example_index, example) in enumerate(examples):
+        query_tokens = tokenizer.tokenize(example.question_text)
+
+        if len(query_tokens) > max_query_length:
+            query_tokens = query_tokens[0:max_query_length]
+
+        tok_to_orig_index = []
+        orig_to_tok_index = []
+        all_doc_tokens = []
+        for (i, token) in enumerate(example.doc_tokens):
+            orig_to_tok_index.append(len(all_doc_tokens))
+            sub_tokens = tokenizer.tokenize(token)
+            for sub_token in sub_tokens:
+                tok_to_orig_index.append(i)
+                all_doc_tokens.append(sub_token)
+
+        # The -3 accounts for [CLS], [SEP] and [SEP]
+        max_tokens_for_doc = max_seq_length - len(query_tokens) - 3
+
+        # We can have documents that are longer than the maximum sequence length.
+        # To deal with this we do a sliding window approach, where we take chunks
+        # of the up to our max length with a stride of `doc_stride`.
+        _DocSpan = collections.namedtuple("DocSpan", ["start", "length"])
+        doc_spans = []
+        start_offset = 0
+        while start_offset < len(all_doc_tokens):
+            length = len(all_doc_tokens) - start_offset
+            if length > max_tokens_for_doc:
+                length = max_tokens_for_doc
+            doc_spans.append(_DocSpan(start=start_offset, length=length))
+            if start_offset + length == len(all_doc_tokens):
+                break
+            start_offset += min(length, doc_stride)
+
+        for (doc_span_index, doc_span) in enumerate(doc_spans):
+            tokens = []
+            token_to_orig_map = {}
+            token_is_max_context = {}
+            segment_ids = []
+            tokens.append("[CLS]")
+            segment_ids.append(0)
+            for token in query_tokens:
+                tokens.append(token)
+                segment_ids.append(0)
+            tokens.append("[SEP]")
+            segment_ids.append(0)
+
+            for i in range(doc_span.length):
+                split_token_index = doc_span.start + i
+                token_to_orig_map[len(
+                    tokens)] = tok_to_orig_index[split_token_index]
+
+                is_max_context = _check_is_max_context(doc_spans,
+                                                       doc_span_index,
+                                                       split_token_index)
+                token_is_max_context[len(tokens)] = is_max_context
+                tokens.append(all_doc_tokens[split_token_index])
+                segment_ids.append(1)
+            tokens.append("[SEP]")
+            segment_ids.append(1)
+
+            input_ids = tokenizer.convert_tokens_to_ids(tokens)
+
+            # The mask has 1 for real tokens and 0 for padding tokens. Only real
+            # tokens are attended to.
+            input_mask = [1] * len(input_ids)
+
+            # Zero-pad up to the sequence length.
+            while len(input_ids) < max_seq_length:
+                input_ids.append(0)
+                input_mask.append(0)
+                segment_ids.append(0)
+            res_input_ids.append(np.array(input_ids, dtype=np.int64))
+            res_input_mask.append(np.array(input_mask, dtype=np.int64))
+            res_segment_ids.append(np.array(segment_ids, dtype=np.int64))
+            feature = Feature(unique_id=unique_id,
+                              tokens=tokens,
+                              example_index=example_index,
+                              token_to_orig_map=token_to_orig_map,
+                              token_is_max_context=token_is_max_context)
+            extra.append(feature)
+            unique_id += 1
+    return np.array(res_input_ids), np.array(res_input_mask), np.array(
+        res_segment_ids), extra
+
+
+def read_squad_examples(input_file):
+    """Read a SQuAD json file into a list of SquadExample."""
+    with open(input_file, "r") as f:
+        input_data = json.load(f)["data"]
+
+    def is_whitespace(c):
+        if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
+            return True
+        return False
+
+    examples = []
+    for idx, entry in enumerate(input_data):
+        for paragraph in entry["paragraphs"]:
+            paragraph_text = paragraph["context"]
+            doc_tokens = []
+            char_to_word_offset = []
+            prev_is_whitespace = True
+            for c in paragraph_text:
+                if is_whitespace(c):
+                    prev_is_whitespace = True
+                else:
+                    if prev_is_whitespace:
+                        doc_tokens.append(c)
+                    else:
+                        doc_tokens[-1] += c
+                    prev_is_whitespace = False
+                char_to_word_offset.append(len(doc_tokens) - 1)
+
+            for qa in paragraph["qas"]:
+                qas_id = qa["id"]
+                question_text = qa["question"]
+                start_position = None
+                end_position = None
+                orig_answer_text = None
+                example = SquadExample(qas_id=qas_id,
+                                       question_text=question_text,
+                                       doc_tokens=doc_tokens,
+                                       orig_answer_text=orig_answer_text,
+                                       start_position=start_position,
+                                       end_position=end_position)
+                examples.append(example)
+    return examples
+
+
+def write_predictions(all_examples, all_features, all_results, n_best_size,
+                      max_answer_length, do_lower_case, output_prediction_file,
+                      output_nbest_file):
+    """Write final predictions to the json file."""
+    example_index_to_features = collections.defaultdict(list)
+    for feature in all_features:
+        example_index_to_features[feature.example_index].append(feature)
+
+    unique_id_to_result = {}
+    for result in all_results:
+        unique_id_to_result[result.unique_id] = result
+
+    _PrelimPrediction = collections.namedtuple(  # pylint: disable=invalid-name
+        "PrelimPrediction", [
+            "feature_index", "start_index", "end_index", "start_logit",
+            "end_logit"
+        ])
+
+    all_predictions = collections.OrderedDict()
+    all_nbest_json = collections.OrderedDict()
+    for (example_index, example) in enumerate(all_examples):
+        features = example_index_to_features[example_index]
+        prelim_predictions = []
+        for (feature_index, feature) in enumerate(features):
+            if not feature.unique_id in unique_id_to_result:
+                print("feature not in unique_Id", feature.unique_id)
+                continue
+            result = unique_id_to_result[feature.unique_id]
+
+            start_indexes = _get_best_indexes(result.start_logits, n_best_size)
+            end_indexes = _get_best_indexes(result.end_logits, n_best_size)
+            for start_index in start_indexes:
+                for end_index in end_indexes:
+                    # We could hypothetically create invalid predictions, e.g., predict
+                    # that the start of the span is in the question. We throw out all
+                    # invalid predictions.
+                    if start_index >= len(feature.tokens):
+                        continue
+                    if end_index >= len(feature.tokens):
+                        continue
+                    if start_index not in feature.token_to_orig_map:
+                        continue
+                    if end_index not in feature.token_to_orig_map:
+                        continue
+                    if not feature.token_is_max_context.get(start_index, False):
+                        continue
+                    if end_index < start_index:
+                        continue
+                    length = end_index - start_index + 1
+                    if length > max_answer_length:
+                        continue
+                    prelim_predictions.append(
+                        _PrelimPrediction(
+                            feature_index=feature_index,
+                            start_index=start_index,
+                            end_index=end_index,
+                            start_logit=result.start_logits[start_index],
+                            end_logit=result.end_logits[end_index]))
+
+        prelim_predictions = sorted(prelim_predictions,
+                                    key=lambda x: (x.start_logit + x.end_logit),
+                                    reverse=True)
+
+        _NbestPrediction = collections.namedtuple(  # pylint: disable=invalid-name
+            "NbestPrediction", ["text", "start_logit", "end_logit"])
+
+        seen_predictions = {}
+        nbest = []
+        for pred in prelim_predictions:
+            if len(nbest) >= n_best_size:
+                break
+            feature = features[pred.feature_index]
+
+            tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)]
+            orig_doc_start = feature.token_to_orig_map[pred.start_index]
+            orig_doc_end = feature.token_to_orig_map[pred.end_index]
+            orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)]
+            tok_text = " ".join(tok_tokens)
+
+            # De-tokenize WordPieces that have been split off.
+            tok_text = tok_text.replace(" ##", "")
+            tok_text = tok_text.replace("##", "")
+
+            # Clean whitespace
+            tok_text = tok_text.strip()
+            tok_text = " ".join(tok_text.split())
+            orig_text = " ".join(orig_tokens)
+
+            final_text = get_final_text(tok_text, orig_text, do_lower_case)
+            if final_text in seen_predictions:
+                continue
+
+            seen_predictions[final_text] = True
+            nbest.append(
+                _NbestPrediction(text=final_text,
+                                 start_logit=pred.start_logit,
+                                 end_logit=pred.end_logit))
+
+        # In very rare edge cases we could have no valid predictions. So we
+        # just create a nonce prediction in this case to avoid failure.
+        if not nbest:
+            nbest.append(
+                _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0))
+
+        assert len(nbest) >= 1
+
+        total_scores = []
+        for entry in nbest:
+            total_scores.append(entry.start_logit + entry.end_logit)
+
+        probs = _compute_softmax(total_scores)
+
+        nbest_json = []
+        for (i, entry) in enumerate(nbest):
+            output = collections.OrderedDict()
+            output["text"] = entry.text
+            output["probability"] = probs[i]
+            output["start_logit"] = float(entry.start_logit)
+            output["end_logit"] = float(entry.end_logit)
+            nbest_json.append(output)
+
+        all_predictions[example.qas_id] = nbest_json[0]["text"]
+        all_nbest_json[example.qas_id] = nbest_json
+
+    with open(output_prediction_file, "w") as writer:
+        writer.write(json.dumps(all_predictions, indent=4) + "\n")
+
+    with open(output_nbest_file, "w") as writer:
+        writer.write(json.dumps(all_nbest_json, indent=4) + "\n")
+
+
+def get_final_text(pred_text, orig_text, do_lower_case):
+    """Project the tokenized prediction back to the original text."""
+
+    # When we created the data, we kept track of the alignment between original
+    # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So
+    # now `orig_text` contains the span of our original text corresponding to the
+    # span that we predicted.
+    #
+    # However, `orig_text` may contain extra characters that we don't want in
+    # our prediction.
+    #
+    # For example, let's say:
+    #   pred_text = steve smith
+    #   orig_text = Steve Smith's
+    #
+    # We don't want to return `orig_text` because it contains the extra "'s".
+    #
+    # We don't want to return `pred_text` because it's already been normalized
+    # (the SQuAD eval script also does punctuation stripping/lower casing but
+    # our tokenizer does additional normalization like stripping accent
+    # characters).
+    #
+    # What we really want to return is "Steve Smith".
+    #
+    # Therefore, we have to apply a semi-complicated alignment heruistic between
+    # `pred_text` and `orig_text` to get a character-to-charcter alignment. This
+    # can fail in certain cases in which case we just return `orig_text`.
+
+    def _strip_spaces(text):
+        ns_chars = []
+        ns_to_s_map = collections.OrderedDict()
+        for (i, c) in enumerate(text):
+            if c == " ":
+                continue
+            ns_to_s_map[len(ns_chars)] = i
+            ns_chars.append(c)
+        ns_text = "".join(ns_chars)
+        return (ns_text, ns_to_s_map)
+
+    # We first tokenize `orig_text`, strip whitespace from the result
+    # and `pred_text`, and check if they are the same length. If they are
+    # NOT the same length, the heuristic has failed. If they are the same
+    # length, we assume the characters are one-to-one aligned.
+    tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case)
+
+    tok_text = " ".join(tokenizer.tokenize(orig_text))
+
+    start_position = tok_text.find(pred_text)
+    if start_position == -1:
+        return orig_text
+    end_position = start_position + len(pred_text) - 1
+
+    (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text)
+    (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text)
+
+    if len(orig_ns_text) != len(tok_ns_text):
+        return orig_text
+
+    # We then project the characters in `pred_text` back to `orig_text` using
+    # the character-to-character alignment.
+    tok_s_to_ns_map = {}
+    for (i, tok_index) in six.iteritems(tok_ns_to_s_map):
+        tok_s_to_ns_map[tok_index] = i
+
+    orig_start_position = None
+    if start_position in tok_s_to_ns_map:
+        ns_start_position = tok_s_to_ns_map[start_position]
+        if ns_start_position in orig_ns_to_s_map:
+            orig_start_position = orig_ns_to_s_map[ns_start_position]
+
+    if orig_start_position is None:
+        return orig_text
+
+    orig_end_position = None
+    if end_position in tok_s_to_ns_map:
+        ns_end_position = tok_s_to_ns_map[end_position]
+        if ns_end_position in orig_ns_to_s_map:
+            orig_end_position = orig_ns_to_s_map[ns_end_position]
+
+    if orig_end_position is None:
+        return orig_text
+
+    output_text = orig_text[orig_start_position:(orig_end_position + 1)]
+    return output_text
+
+
+def _get_best_indexes(logits, n_best_size):
+    """Get the n-best logits from a list."""
+    index_and_score = sorted(enumerate(logits),
+                             key=lambda x: x[1],
+                             reverse=True)
+    best_indexes = []
+    for i in range(len(index_and_score)):
+        if i >= n_best_size:
+            break
+        best_indexes.append(index_and_score[i][0])
+    return best_indexes
+
+
+def _compute_softmax(scores):
+    """Compute softmax probability over raw logits."""
+    if not scores:
+        return []
+
+    max_score = None
+    for score in scores:
+        if max_score is None or score > max_score:
+            max_score = score
+
+    exp_scores = []
+    total_sum = 0.0
+    for score in scores:
+        x = math.exp(score - max_score)
+        exp_scores.append(x)
+        total_sum += x
+
+    probs = []
+    for score in exp_scores:
+        probs.append(score / total_sum)
+    return probs
\ No newline at end of file
diff --git a/examples/onnx/bert/tokenization.py b/examples/onnx/bert/tokenization.py
new file mode 100644
index 0000000..09b9b4f
--- /dev/null
+++ b/examples/onnx/bert/tokenization.py
@@ -0,0 +1,395 @@
+# coding=utf-8
+# Copyright 2018 The Google AI Language Team Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tokenization classes."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import re
+import unicodedata
+import six
+
+
+def validate_case_matches_checkpoint(do_lower_case, init_checkpoint):
+  """Checks whether the casing config is consistent with the checkpoint name."""
+
+  # The casing has to be passed in by the user and there is no explicit check
+  # as to whether it matches the checkpoint. The casing information probably
+  # should have been stored in the bert_config.json file, but it's not, so
+  # we have to heuristically detect it to validate.
+
+  if not init_checkpoint:
+    return
+
+  m = re.match("^.*?([A-Za-z0-9_-]+)/bert_model.ckpt", init_checkpoint)
+  if m is None:
+    return
+
+  model_name = m.group(1)
+
+  lower_models = [
+      "uncased_L-24_H-1024_A-16", "uncased_L-12_H-768_A-12",
+      "multilingual_L-12_H-768_A-12", "chinese_L-12_H-768_A-12"
+  ]
+
+  cased_models = [
+      "cased_L-12_H-768_A-12", "cased_L-24_H-1024_A-16",
+      "multi_cased_L-12_H-768_A-12"
+  ]
+
+  is_bad_config = False
+  if model_name in lower_models and not do_lower_case:
+    is_bad_config = True
+    actual_flag = "False"
+    case_name = "lowercased"
+    opposite_flag = "True"
+
+  if model_name in cased_models and do_lower_case:
+    is_bad_config = True
+    actual_flag = "True"
+    case_name = "cased"
+    opposite_flag = "False"
+
+  if is_bad_config:
+    raise ValueError(
+        "You passed in `--do_lower_case=%s` with `--init_checkpoint=%s`. "
+        "However, `%s` seems to be a %s model, so you "
+        "should pass in `--do_lower_case=%s` so that the fine-tuning matches "
+        "how the model was pre-training. If this error is wrong, please "
+        "just comment out this check." % (actual_flag, init_checkpoint,
+                                          model_name, case_name, opposite_flag))
+
+
+def convert_to_unicode(text):
+  """Converts `text` to Unicode (if it's not already), assuming utf-8 input."""
+  if six.PY3:
+    if isinstance(text, str):
+      return text
+    elif isinstance(text, bytes):
+      return text.decode("utf-8", "ignore")
+    else:
+      raise ValueError("Unsupported string type: %s" % (type(text)))
+  elif six.PY2:
+    if isinstance(text, str):
+      return text.decode("utf-8", "ignore")
+    else:
+      raise ValueError("Unsupported string type: %s" % (type(text)))
+  else:
+    raise ValueError("Not running on Python2 or Python 3?")
+
+
+def printable_text(text):
+  """Returns text encoded in a way suitable for print or `tf.logging`."""
+
+  # These functions want `str` for both Python2 and Python3, but in one case
+  # it's a Unicode string and in the other it's a byte string.
+  if six.PY3:
+    if isinstance(text, str):
+      return text
+    elif isinstance(text, bytes):
+      return text.decode("utf-8", "ignore")
+    else:
+      raise ValueError("Unsupported string type: %s" % (type(text)))
+  elif six.PY2:
+    if isinstance(text, str):
+      return text
+    else:
+      raise ValueError("Unsupported string type: %s" % (type(text)))
+  else:
+    raise ValueError("Not running on Python2 or Python 3?")
+
+
+def load_vocab(vocab_file):
+  """Loads a vocabulary file into a dictionary."""
+  vocab = collections.OrderedDict()
+  index = 0
+  with open(vocab_file, "rb") as reader:
+    while True:
+      token = reader.readline()
+      token = token.decode("utf-8", "ignore")
+      if not token:
+        break
+      token = token.strip()
+      vocab[token] = index
+      index += 1
+  return vocab
+
+
+def convert_by_vocab(vocab, items):
+  """Converts a sequence of [tokens|ids] using the vocab."""
+  output = []
+  for item in items:
+    output.append(vocab[item])
+  return output
+
+
+def convert_tokens_to_ids(vocab, tokens):
+  return convert_by_vocab(vocab, tokens)
+
+
+def convert_ids_to_tokens(inv_vocab, ids):
+  return convert_by_vocab(inv_vocab, ids)
+
+
+def whitespace_tokenize(text):
+  """Runs basic whitespace cleaning and splitting on a piece of text."""
+  text = text.strip()
+  if not text:
+    return []
+  tokens = text.split()
+  return tokens
+
+
+class FullTokenizer(object):
+  """Runs end-to-end tokenziation."""
+
+  def __init__(self, vocab_file, do_lower_case=True):
+    self.vocab = load_vocab(vocab_file)
+    self.inv_vocab = {v: k for k, v in self.vocab.items()}
+    self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case)
+    self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
+
+  def tokenize(self, text):
+    split_tokens = []
+    for token in self.basic_tokenizer.tokenize(text):
+      for sub_token in self.wordpiece_tokenizer.tokenize(token):
+        split_tokens.append(sub_token)
+
+    return split_tokens
+
+  def convert_tokens_to_ids(self, tokens):
+    return convert_by_vocab(self.vocab, tokens)
+
+  def convert_ids_to_tokens(self, ids):
+    return convert_by_vocab(self.inv_vocab, ids)
+
+
+class BasicTokenizer(object):
+  """Runs basic tokenization (punctuation splitting, lower casing, etc.)."""
+
+  def __init__(self, do_lower_case=True):
+    """Constructs a BasicTokenizer.
+
+    Args:
+      do_lower_case: Whether to lower case the input.
+    """
+    self.do_lower_case = do_lower_case
+
+  def tokenize(self, text):
+    """Tokenizes a piece of text."""
+    text = convert_to_unicode(text)
+    text = self._clean_text(text)
+
+    # This was added on November 1st, 2018 for the multilingual and Chinese
+    # models. This is also applied to the English models now, but it doesn't
+    # matter since the English models were not trained on any Chinese data
+    # and generally don't have any Chinese data in them (there are Chinese
+    # characters in the vocabulary because Wikipedia does have some Chinese
+    # words in the English Wikipedia.).
+    text = self._tokenize_chinese_chars(text)
+
+    orig_tokens = whitespace_tokenize(text)
+    split_tokens = []
+    for token in orig_tokens:
+      if self.do_lower_case:
+        token = token.lower()
+        token = self._run_strip_accents(token)
+      split_tokens.extend(self._run_split_on_punc(token))
+
+    output_tokens = whitespace_tokenize(" ".join(split_tokens))
+    return output_tokens
+
+  def _run_strip_accents(self, text):
+    """Strips accents from a piece of text."""
+    text = unicodedata.normalize("NFD", text)
+    output = []
+    for char in text:
+      cat = unicodedata.category(char)
+      if cat == "Mn":
+        continue
+      output.append(char)
+    return "".join(output)
+
+  def _run_split_on_punc(self, text):
+    """Splits punctuation on a piece of text."""
+    chars = list(text)
+    i = 0
+    start_new_word = True
+    output = []
+    while i < len(chars):
+      char = chars[i]
+      if _is_punctuation(char):
+        output.append([char])
+        start_new_word = True
+      else:
+        if start_new_word:
+          output.append([])
+        start_new_word = False
+        output[-1].append(char)
+      i += 1
+
+    return ["".join(x) for x in output]
+
+  def _tokenize_chinese_chars(self, text):
+    """Adds whitespace around any CJK character."""
+    output = []
+    for char in text:
+      cp = ord(char)
+      if self._is_chinese_char(cp):
+        output.append(" ")
+        output.append(char)
+        output.append(" ")
+      else:
+        output.append(char)
+    return "".join(output)
+
+  def _is_chinese_char(self, cp):
+    """Checks whether CP is the codepoint of a CJK character."""
+    # This defines a "chinese character" as anything in the CJK Unicode block:
+    #   https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
+    #
+    # Note that the CJK Unicode block is NOT all Japanese and Korean characters,
+    # despite its name. The modern Korean Hangul alphabet is a different block,
+    # as is Japanese Hiragana and Katakana. Those alphabets are used to write
+    # space-separated words, so they are not treated specially and handled
+    # like the all of the other languages.
+    if ((cp >= 0x4E00 and cp <= 0x9FFF) or  #
+        (cp >= 0x3400 and cp <= 0x4DBF) or  #
+        (cp >= 0x20000 and cp <= 0x2A6DF) or  #
+        (cp >= 0x2A700 and cp <= 0x2B73F) or  #
+        (cp >= 0x2B740 and cp <= 0x2B81F) or  #
+        (cp >= 0x2B820 and cp <= 0x2CEAF) or
+        (cp >= 0xF900 and cp <= 0xFAFF) or  #
+        (cp >= 0x2F800 and cp <= 0x2FA1F)):  #
+      return True
+
+    return False
+
+  def _clean_text(self, text):
+    """Performs invalid character removal and whitespace cleanup on text."""
+    output = []
+    for char in text:
+      cp = ord(char)
+      if cp == 0 or cp == 0xfffd or _is_control(char):
+        continue
+      if _is_whitespace(char):
+        output.append(" ")
+      else:
+        output.append(char)
+    return "".join(output)
+
+
+class WordpieceTokenizer(object):
+  """Runs WordPiece tokenziation."""
+
+  def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=200):
+    self.vocab = vocab
+    self.unk_token = unk_token
+    self.max_input_chars_per_word = max_input_chars_per_word
+
+  def tokenize(self, text):
+    """Tokenizes a piece of text into its word pieces.
+
+    This uses a greedy longest-match-first algorithm to perform tokenization
+    using the given vocabulary.
+
+    For example:
+      input = "unaffable"
+      output = ["un", "##aff", "##able"]
+
+    Args:
+      text: A single token or whitespace separated tokens. This should have
+        already been passed through `BasicTokenizer.
+
+    Returns:
+      A list of wordpiece tokens.
+    """
+
+    text = convert_to_unicode(text)
+
+    output_tokens = []
+    for token in whitespace_tokenize(text):
+      chars = list(token)
+      if len(chars) > self.max_input_chars_per_word:
+        output_tokens.append(self.unk_token)
+        continue
+
+      is_bad = False
+      start = 0
+      sub_tokens = []
+      while start < len(chars):
+        end = len(chars)
+        cur_substr = None
+        while start < end:
+          substr = "".join(chars[start:end])
+          if start > 0:
+            substr = "##" + substr
+          if substr in self.vocab:
+            cur_substr = substr
+            break
+          end -= 1
+        if cur_substr is None:
+          is_bad = True
+          break
+        sub_tokens.append(cur_substr)
+        start = end
+
+      if is_bad:
+        output_tokens.append(self.unk_token)
+      else:
+        output_tokens.extend(sub_tokens)
+    return output_tokens
+
+
+def _is_whitespace(char):
+  """Checks whether `chars` is a whitespace character."""
+  # \t, \n, and \r are technically contorl characters but we treat them
+  # as whitespace since they are generally considered as such.
+  if char == " " or char == "\t" or char == "\n" or char == "\r":
+    return True
+  cat = unicodedata.category(char)
+  if cat == "Zs":
+    return True
+  return False
+
+
+def _is_control(char):
+  """Checks whether `chars` is a control character."""
+  # These are technically control characters but we count them as whitespace
+  # characters.
+  if char == "\t" or char == "\n" or char == "\r":
+    return False
+  cat = unicodedata.category(char)
+  if cat in ("Cc", "Cf"):
+    return True
+  return False
+
+
+def _is_punctuation(char):
+  """Checks whether `chars` is a punctuation character."""
+  cp = ord(char)
+  # We treat all non-letter/number ASCII as punctuation.
+  # Characters such as "^", "$", and "`" are not in the Unicode
+  # Punctuation class but we treat them as punctuation anyways, for
+  # consistency.
+  if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or
+      (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)):
+    return True
+  cat = unicodedata.category(char)
+  if cat.startswith("P"):
+    return True
+  return False
diff --git a/examples/onnx/densenet121.py b/examples/onnx/densenet121.py
new file mode 100644
index 0000000..c8a8f95
--- /dev/null
+++ b/examples/onnx/densenet121.py
@@ -0,0 +1,111 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_label():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    download_dir = '/tmp'
+    url = 'https://s3.amazonaws.com/download.onnx/models/opset_9/densenet121.tar.gz'
+    model_path = os.path.join(download_dir, 'densenet121', 'model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference demo
+    logging.info("preprocessing...")
+    img, labels = get_image_label()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.Tensor(device=dev, data=img)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'densenet121', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
\ No newline at end of file
diff --git a/examples/onnx/fer_emotion.py b/examples/onnx/fer_emotion.py
new file mode 100644
index 0000000..e980580
--- /dev/null
+++ b/examples/onnx/fer_emotion.py
@@ -0,0 +1,107 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    input_shape = (1, 1, 64, 64)
+    img = img.resize((64, 64), Image.ANTIALIAS)
+    img_data = np.array(img).astype(np.float32)
+    img_data = np.resize(img_data, input_shape)
+    return img_data
+
+
+def get_image_labe():
+    labels = [
+        'neutral', 'happiness', 'surprise', 'sadness', 'anger', 'disgust',
+        'fear', 'contempt'
+    ]
+    # download image
+    image_url = 'https://microsoft.github.io/onnxjs-demo/img/fear.8d1417fa.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://onnxzoo.blob.core.windows.net/models/opset_8/emotion_ferplus/emotion_ferplus.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'emotion_ferplus', 'model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_labe()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'emotion_ferplus', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/gpt2/gpt2.py b/examples/onnx/gpt2/gpt2.py
new file mode 100644
index 0000000..dd27334
--- /dev/null
+++ b/examples/onnx/gpt2/gpt2.py
@@ -0,0 +1,110 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import os
+import numpy as np
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+from singa import autograd
+import onnx
+
+import sys
+sys.path.append(os.path.dirname(__file__) + '/..')
+from utils import download_model
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+from transformers import GPT2Tokenizer
+
+tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
+length = 20
+
+
+def preprocess():
+    text = "Here is some text to encode : Hello World"
+    tokens = tokenizer.encode(text)
+    tokens = np.array(tokens)
+    return tokens.reshape([1, 1, -1]).astype(np.float32)
+
+
+def postprocess(out):
+    text = tokenizer.decode(out)
+    return text
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+    url = 'https://github.com/onnx/models/raw/master/text/machine_comprehension/gpt-2/model/gpt2-lm-head-10.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'GPT-2-LM-HEAD', 'model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    input_ids = preprocess()
+
+    logging.info("model compling...")
+    dev = device.get_default_device()
+    x = tensor.Tensor(device=dev, data=input_ids)
+    model = MyModel(onnx_model)
+
+    # verifty the test
+    # from utils import load_dataset
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # inputs, ref_outputs = load_dataset(
+    #     os.path.join('/tmp', 'GPT-2-LM-HEAD', 'test_data_set_0'))
+    # outputs = sg_ir.run(inputs)
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, o, 4)
+
+    logging.info("model running...")
+    output = []
+
+    for i in range(length):
+        logging.info("word {} generating...".format(i))
+        y = model.forward(x)
+        y = autograd.reshape(y, y.shape[-2:])[-1, :]
+        y = tensor.softmax(y)
+        y = tensor.to_numpy(y)[0]
+        y = np.argsort(y)[-1]
+        output.append(y)
+        y = np.array([y]).reshape([1, 1, -1]).astype(np.float32)
+        y = tensor.Tensor(device=dev, data=y)
+        x = tensor.concatenate([x, y], 2)
+
+    text = postprocess(output)
+    print(text)
\ No newline at end of file
diff --git a/examples/onnx/gpt2/requirements.txt b/examples/onnx/gpt2/requirements.txt
new file mode 100644
index 0000000..14693ad
--- /dev/null
+++ b/examples/onnx/gpt2/requirements.txt
@@ -0,0 +1 @@
+transformers==2.5.1
\ No newline at end of file
diff --git a/examples/onnx/mobilenet.py b/examples/onnx/mobilenet.py
new file mode 100644
index 0000000..ad394ca
--- /dev/null
+++ b/examples/onnx/mobilenet.py
@@ -0,0 +1,113 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_labe():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://s3.amazonaws.com/onnx-model-zoo/mobilenet/mobilenetv2-1.0/mobilenetv2-1.0.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'mobilenetv2-1.0',
+                              'mobilenetv2-1.0.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_labe()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'mobilenetv2-1.0', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/resnet18.py b/examples/onnx/resnet18.py
new file mode 100644
index 0000000..b66c3fb
--- /dev/null
+++ b/examples/onnx/resnet18.py
@@ -0,0 +1,111 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_labe():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet18v1/resnet18v1.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'resnet18v1', 'resnet18v1.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_labe()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'resnet18v1', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
\ No newline at end of file
diff --git a/examples/onnx/ro_bert_a/requirements.txt b/examples/onnx/ro_bert_a/requirements.txt
new file mode 100644
index 0000000..14693ad
--- /dev/null
+++ b/examples/onnx/ro_bert_a/requirements.txt
@@ -0,0 +1 @@
+transformers==2.5.1
\ No newline at end of file
diff --git a/examples/onnx/ro_bert_a/ro_bert_a.py b/examples/onnx/ro_bert_a/ro_bert_a.py
new file mode 100644
index 0000000..b6b02ed
--- /dev/null
+++ b/examples/onnx/ro_bert_a/ro_bert_a.py
@@ -0,0 +1,95 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import os
+import numpy as np
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+from singa import autograd
+import onnx
+
+import sys
+sys.path.append(os.path.dirname(__file__) + '/..')
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+from transformers import RobertaTokenizer
+
+tokenizer = RobertaTokenizer.from_pretrained('roberta-base')
+
+def preprocess():
+    text = "This film is so good"
+    tokens = tokenizer.encode(text, add_special_tokens=True)
+    tokens = np.array(tokens)
+    return tokens.reshape([1, -1]).astype(np.float32)
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+    url = 'https://media.githubusercontent.com/media/onnx/models/master/text/machine_comprehension/roberta/model/roberta-sequence-classification-9.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'roberta-sequence-classification-9', 'roberta-sequence-classification-9.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    input_ids = preprocess()
+
+    logging.info("model compling...")
+    dev = device.get_default_device()
+    x = tensor.Tensor(device=dev, data=input_ids)
+    model = MyModel(onnx_model)
+
+    # verifty the test
+    # from utils import load_dataset
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # inputs, ref_outputs = load_dataset(
+    #     os.path.join('/tmp', 'roberta-sst-9', 'test_data_set_0'))
+    # outputs = sg_ir.run(inputs)
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, o, 4)
+
+    logging.info("model running...")
+    y = model.forward(x)
+    y = autograd.reshape(y, y.shape[-2:])[-1, :]
+    y = tensor.softmax(y)
+    y = tensor.to_numpy(y)[0]
+    y = np.argsort(y)[::-1]
+    if(y[0] == 0):
+        print("Prediction: negative")
+    else:
+        print("Prediction: positive")
\ No newline at end of file
diff --git a/examples/onnx/shufflenetv1.py b/examples/onnx/shufflenetv1.py
new file mode 100644
index 0000000..139d6d3
--- /dev/null
+++ b/examples/onnx/shufflenetv1.py
@@ -0,0 +1,108 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import logging
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_label():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == '__main__':
+    download_dir = '/tmp'
+    url = 'https://github.com/onnx/models/raw/master/vision/classification/shufflenet/model/shufflenet-9.tar.gz'
+    model_path = os.path.join(download_dir, 'shufflenet', 'model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference demo
+    logging.info("preprocessing...")
+    img, labels = get_image_label()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.Tensor(device=dev, data=img)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'shufflenet', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/shufflenetv2.py b/examples/onnx/shufflenetv2.py
new file mode 100644
index 0000000..60f84a4
--- /dev/null
+++ b/examples/onnx/shufflenetv2.py
@@ -0,0 +1,114 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_label():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://github.com/onnx/models/raw/master/vision/classification/shufflenet/model/shufflenet-v2-10.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'model', 'test_shufflenetv2',
+                              'model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_label()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'model', 'test_shufflenetv2',
+    #                           'model.onnx'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/squeezenet.py b/examples/onnx/squeezenet.py
new file mode 100644
index 0000000..861550c
--- /dev/null
+++ b/examples/onnx/squeezenet.py
@@ -0,0 +1,111 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((224, 224))
+    img = img.crop((0, 0, 224, 224))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_label():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == '__main__':
+    download_dir = '/tmp'
+    url = 'https://github.com/onnx/models/raw/master/vision/classification/squeezenet/model/squeezenet1.1-7.tar.gz'
+    model_path = os.path.join(download_dir, 'squeezenet1.1',
+                              'squeezenet1.1.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference demo
+    logging.info("preprocessing...")
+    img, labels = get_image_label()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.Tensor(device=dev, data=img)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'squeezenet1.1', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/superresolution.py b/examples/onnx/superresolution.py
new file mode 100644
index 0000000..caaae19
--- /dev/null
+++ b/examples/onnx/superresolution.py
@@ -0,0 +1,104 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+from resizeimage import resizeimage
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = resizeimage.resize_cover(img, [224, 224], validate=False)
+    img_ycbcr = img.convert('YCbCr')
+    img_y_0, img_cb, img_cr = img_ycbcr.split()
+    img_ndarray = np.asarray(img_y_0)
+    img_4 = np.expand_dims(np.expand_dims(img_ndarray, axis=0), axis=0)
+    img_5 = img_4.astype(np.float32) / 255.0
+    return img_5, img_cb, img_cr
+
+
+def get_image():
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://github.com/onnx/models/raw/master/vision/super_resolution/sub_pixel_cnn_2016/model/super-resolution-10.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'super_resolution',
+                              'super_resolution.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # preprocess
+    logging.info("preprocessing...")
+    img = get_image()
+    img_y, img_cb, img_cr = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img_y.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # inference
+    logging.info("model running...")
+    x_batch = tensor.Tensor(device=dev, data=img_y)
+    img_y = model.forward(x_batch)
+    array_img_y = tensor.to_numpy(img_y)
+    img_out_y = Image.fromarray(np.uint8((array_img_y[0] * 255.0).clip(0,
+                                                                       255)[0]),
+                                mode='L')
+
+    # postprocess
+    logging.info("postprocessing...")
+    final_img = Image.merge("YCbCr", [
+        img_out_y,
+        img_cb.resize(img_out_y.size, Image.BICUBIC),
+        img_cr.resize(img_out_y.size, Image.BICUBIC),
+    ]).convert("RGB")
+    final_img.show()
diff --git a/examples/onnx/tiny_yolov2.py b/examples/onnx/tiny_yolov2.py
new file mode 100644
index 0000000..72d3666
--- /dev/null
+++ b/examples/onnx/tiny_yolov2.py
@@ -0,0 +1,162 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image, ImageDraw
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = np.array(img).astype(np.float32)
+    img = np.rollaxis(img, 2, 0)
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image():
+    image_url = 'https://raw.githubusercontent.com/simo23/tinyYOLOv2/master/person.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    img = img.resize((416, 416))
+    return img
+
+
+def postprcess(out):
+    numClasses = 20
+    anchors = [1.08, 1.19, 3.42, 4.41, 6.63, 11.38, 9.42, 5.11, 16.62, 10.52]
+
+    def sigmoid(x, derivative=False):
+        return x * (1 - x) if derivative else 1 / (1 + np.exp(-x))
+
+    def softmax(x):
+        scoreMatExp = np.exp(np.asarray(x))
+        return scoreMatExp / scoreMatExp.sum(0)
+
+    clut = [(0, 0, 0), (255, 0, 0), (255, 0, 255), (0, 0, 255), (0, 255, 0),
+            (0, 255, 128), (128, 255, 0), (128, 128, 0), (0, 128, 255),
+            (128, 0, 128), (255, 0, 128), (128, 0, 255), (255, 128, 128),
+            (128, 255, 128), (255, 255, 0), (255, 128, 128), (128, 128, 255),
+            (255, 128, 128), (128, 255, 128)]
+    label = [
+        "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat",
+        "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person",
+        "pottedplant", "sheep", "sofa", "train", "tvmonitor"
+    ]
+
+    img = get_image()
+    draw = ImageDraw.Draw(img)
+
+    for cy in range(13):
+        for cx in range(13):
+            for b in range(5):
+                channel = b * (numClasses + 5)
+                tx = out[channel][cy][cx]
+                ty = out[channel + 1][cy][cx]
+                tw = out[channel + 2][cy][cx]
+                th = out[channel + 3][cy][cx]
+                tc = out[channel + 4][cy][cx]
+                x = (float(cx) + sigmoid(tx)) * 32
+                y = (float(cy) + sigmoid(ty)) * 32
+
+                w = np.exp(tw) * 32 * anchors[2 * b]
+                h = np.exp(th) * 32 * anchors[2 * b + 1]
+
+                confidence = sigmoid(tc)
+
+                classes = np.zeros(numClasses)
+                for c in range(0, numClasses):
+                    classes[c] = out[channel + 5 + c][cy][cx]
+
+                classes = softmax(classes)
+                detectedClass = classes.argmax()
+                if 0.5 < classes[detectedClass] * confidence:
+                    color = clut[detectedClass]
+                    x = x - w / 2
+                    y = y - h / 2
+                    draw.line((x, y, x + w, y), fill=color)
+                    draw.line((x, y, x, y + h), fill=color)
+                    draw.line((x + w, y, x + w, y + h), fill=color)
+                    draw.line((x, y + h, x + w, y + h), fill=color)
+                    draw.text((x, y), label[detectedClass], fill=color)
+                    logging.info("bounding box: (%.2f, %.2f, %.2f, %.2f)" %
+                                 (x, y, x + w, y + h))
+                    logging.info('class=%s ; probability=%f' %
+                                 (label[detectedClass],
+                                  classes[detectedClass] * confidence))
+    img.save("result.png")
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+
+    url = 'https://onnxzoo.blob.core.windows.net/models/opset_8/tiny_yolov2/tiny_yolov2.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'tiny_yolov2', 'Model.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img = get_image()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'tiny_yolov2', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    out = tensor.to_numpy(y)[0]
+    postprcess(out)
\ No newline at end of file
diff --git a/examples/onnx/training/model.json b/examples/onnx/training/model.json
new file mode 100644
index 0000000..f72d1a7
--- /dev/null
+++ b/examples/onnx/training/model.json
@@ -0,0 +1,112 @@
+{
+    "resnet18v1": {
+        "name": "ResNet-18 Version 1",
+        "description": "ResNet v1 uses post-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet18v1/resnet18v1.tar.gz",
+        "path": "resnet18v1/resnet18v1.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 512
+    },
+    "resnet34v1": {
+        "name": "ResNet-34 Version 1",
+        "description": "ResNet v1 uses post-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet34v1/resnet34v1.tar.gz",
+        "path": "resnet34v1/resnet34v1.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 512
+    },
+    "resnet50v1": {
+        "name": "ResNet-50 Version 1",
+        "description": "ResNet v1 uses post-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet50v1/resnet50v1.tar.gz",
+        "path": "resnet50v1/resnet50v1.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "resnet101v1": {
+        "name": "ResNet-101 Version 1",
+        "description": "ResNet v1 uses post-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet101v1/resnet101v1.tar.gz",
+        "path": "resnet101v1/resnet101v1.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "resnet152v1": {
+        "name": "ResNet-152 Version 1",
+        "description": "ResNet v1 uses post-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet152v1/resnet152v1.tar.gz",
+        "path": "resnet152v1/resnet152v1.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "resnet18v2": {
+        "name": "ResNet-18 Version 2",
+        "description": "ResNet v2 uses pre-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet18v2/resnet18v2.tar.gz",
+        "path": "resnet18v2/resnet18v2.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 512
+    },
+    "resnet34v2": {
+        "name": "ResNet-34 Version 2",
+        "description": "ResNet v2 uses pre-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet34v2/resnet34v2.tar.gz",
+        "path": "resnet34v2/resnet34v2.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 512
+    },
+    "resnet50v2": {
+        "name": "ResNet-50 Version 2",
+        "description": "ResNet v2 uses pre-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet50v2/resnet50v2.tar.gz",
+        "path": "resnet50v2/resnet50v2.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "resnet101v2": {
+        "name": "ResNet-101 Version 2",
+        "description": "ResNet v2 uses pre-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet101v2/resnet101v2.tar.gz",
+        "path": "resnet101v2/resnet101v2.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "resnet152v2": {
+        "name": "ResNet-152 Version 2",
+        "description": "ResNet v2 uses pre-activation for the residual blocks",
+        "url": "https://s3.amazonaws.com/onnx-model-zoo/resnet/resnet152v2/resnet152v2.tar.gz",
+        "path": "resnet152v2/resnet152v2.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 2048
+    },
+    "vgg16": {
+        "name": "VGG-16",
+        "url": "https://github.com/onnx/models/raw/master/vision/classification/vgg/model/vgg16-7.tar.gz",
+        "path": "vgg16/vgg16.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 4096
+    },
+    "vgg16bn": {
+        "name": "VGG-16 with batch normalization",
+        "description": "VGG have batch normalization applied after each convolutional layer",
+        "url": "https://github.com/onnx/models/raw/master/vision/classification/vgg/model/vgg16-bn-7.tar.gz",
+        "path": "vgg16-bn/vgg16-bn.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 4096
+    },
+    "vgg19": {
+        "name": "VGG-19",
+        "url": "https://github.com/onnx/models/raw/master/vision/classification/vgg/model/vgg19-7.tar.gz",
+        "path": "vgg19/vgg19.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 4096
+    },
+    "vgg19bn": {
+        "name": "VGG-19 with batch normalization",
+        "description": "VGG have batch normalization applied after each convolutional layer",
+        "url": "https://github.com/onnx/models/raw/master/vision/classification/vgg/model/vgg19-bn-7.tar.gz",
+        "path": "vgg19-bn/vgg19-bn.onnx",
+        "last_layers": -3,
+        "last_layers_dim": 4096
+    }
+}
\ No newline at end of file
diff --git a/examples/onnx/training/train.py b/examples/onnx/training/train.py
new file mode 100644
index 0000000..8407bfe
--- /dev/null
+++ b/examples/onnx/training/train.py
@@ -0,0 +1,354 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import sys, os
+import json
+from singa import singa_wrap as singa
+from singa import opt
+from singa import device
+from singa import tensor
+from singa import sonnx
+from singa import layer
+from singa import autograd
+import numpy as np
+import time
+import argparse
+from PIL import Image
+import onnx
+import logging
+from tqdm import tqdm
+
+logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
+sys.path.append(os.path.dirname(__file__) + '/../../cnn')
+sys.path.append(os.path.dirname(__file__) + '/..')
+from utils import download_model
+
+# Data Augmentation
+def augmentation(x, batch_size):
+    xpad = np.pad(x, [[0, 0], [0, 0], [4, 4], [4, 4]], 'symmetric')
+    for data_num in range(0, batch_size):
+        offset = np.random.randint(8, size=2)
+        x[data_num, :, :, :] = xpad[data_num, :,
+                                    offset[0]:offset[0] + x.shape[2],
+                                    offset[1]:offset[1] + x.shape[2]]
+        if_flip = np.random.randint(2)
+        if (if_flip):
+            x[data_num, :, :, :] = x[data_num, :, :, ::-1]
+    return x
+
+
+# Calculate Accuracy
+def accuracy(pred, target):
+    # y is network output to be compared with ground truth (int)
+    y = np.argmax(pred, axis=1)
+    a = y == target
+    correct = np.array(a, "int").sum()
+    # print(correct)
+    return correct
+
+
+# Data partition according to the rank
+def partition(global_rank, world_size, train_x, train_y, val_x, val_y):
+    # Partition training data
+    data_per_rank = train_x.shape[0] // world_size
+    idx_start = global_rank * data_per_rank
+    idx_end = (global_rank + 1) * data_per_rank
+    train_x = train_x[idx_start:idx_end]
+    train_y = train_y[idx_start:idx_end]
+    # Partition evaluation data
+    data_per_rank = val_x.shape[0] // world_size
+    idx_start = global_rank * data_per_rank
+    idx_end = (global_rank + 1) * data_per_rank
+    val_x = val_x[idx_start:idx_end]
+    val_y = val_y[idx_start:idx_end]
+    return train_x, train_y, val_x, val_y
+
+
+# Function to all reduce NUMPY Accuracy and Loss from Multiple Devices
+def reduce_variable(variable, dist_opt, reducer):
+    reducer.copy_from_numpy(variable)
+    dist_opt.all_reduce(reducer.data)
+    dist_opt.wait()
+    output = tensor.to_numpy(reducer)
+    return output
+
+
+def resize_dataset(x, image_size):
+    num_data = x.shape[0]
+    dim = x.shape[1]
+    X = np.zeros(shape=(num_data, dim, image_size, image_size),
+                 dtype=np.float32)
+    for n in range(0, num_data):
+        for d in range(0, dim):
+            X[n, d, :, :] = np.array(Image.fromarray(x[n, d, :, :]).resize(
+                (image_size, image_size), Image.BILINEAR),
+                                     dtype=np.float32)
+    return X
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model, num_classes=10, num_channels=3, last_layers=-1, in_dim=1000):
+        super(MyModel, self).__init__(onnx_model)
+        self.num_classes = num_classes
+        self.input_size = 224
+        self.dimension = 4
+        self.num_channels = num_channels
+        self.num_classes = num_classes
+        self.last_layers = last_layers
+        self.linear = layer.Linear(in_dim, num_classes)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x, last_layers=self.last_layers)[0]
+        y = self.linear(y)
+        return y
+
+    def train_one_batch(self, x, y, dist_option, spars):
+        out = self.forward(x)
+        loss = autograd.softmax_cross_entropy(out, y)
+        if dist_option == 'fp32':
+            self.optimizer.backward_and_update(loss)
+        elif dist_option == 'fp16':
+            self.optimizer.backward_and_update_half(loss)
+        elif dist_option == 'partialUpdate':
+            self.optimizer.backward_and_partial_update(loss)
+        elif dist_option == 'sparseTopK':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=True,
+                                                      spars=spars)
+        elif dist_option == 'sparseThreshold':
+            self.optimizer.backward_and_sparse_update(loss,
+                                                      topK=False,
+                                                      spars=spars)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+def run(global_rank,
+        world_size,
+        local_rank,
+        max_epoch,
+        batch_size,
+        model_config,
+        data,
+        sgd,
+        graph,
+        verbosity,
+        dist_option='fp32',
+        spars=None):
+    dev = device.create_cuda_gpu_on(local_rank)
+    dev.SetRandSeed(0)
+    np.random.seed(0)
+
+    if data == 'cifar10':
+        from data import cifar10
+        train_x, train_y, val_x, val_y = cifar10.load()
+    elif data == 'cifar100':
+        from data import cifar100
+        train_x, train_y, val_x, val_y = cifar100.load()
+
+    num_channels = train_x.shape[1]
+    image_size = train_x.shape[2]
+    data_size = np.prod(train_x.shape[1:train_x.ndim]).item()
+    num_classes = (np.max(train_y) + 1).item()
+
+    # read and make onnx model
+    download_model(model_config['url'])
+    onnx_model = onnx.load(os.path.join('/tmp', model_config['path']))
+    model = MyModel(onnx_model,
+                    num_channels=num_channels,
+                    num_classes=num_classes,
+                    last_layers=model_config['last_layers'],
+                    in_dim=model_config['last_layers_dim'])
+
+    # For distributed training, sequential gives better performance
+    if hasattr(sgd, "communicator"):
+        DIST = True
+        sequential = True
+    else:
+        DIST = False
+        sequential = False
+
+    if DIST:
+        train_x, train_y, val_x, val_y = partition(global_rank, world_size,
+                                                   train_x, train_y, val_x,
+                                                   val_y)
+    '''
+    # check dataset shape correctness
+    if global_rank == 0:
+        print("Check the shape of dataset:")
+        print(train_x.shape)
+        print(train_y.shape)
+    '''
+
+    if model.dimension == 4:
+        tx = tensor.Tensor(
+            (batch_size, num_channels, model.input_size, model.input_size), dev,
+            tensor.float32)
+    elif model.dimension == 2:
+        tx = tensor.Tensor((batch_size, data_size), dev, tensor.float32)
+        np.reshape(train_x, (train_x.shape[0], -1))
+        np.reshape(val_x, (val_x.shape[0], -1))
+
+    ty = tensor.Tensor((batch_size,), dev, tensor.int32)
+    num_train_batch = train_x.shape[0] // batch_size
+    num_val_batch = val_x.shape[0] // batch_size
+    idx = np.arange(train_x.shape[0], dtype=np.int32)
+
+    # attached model to graph
+    model.set_optimizer(sgd)
+    model.compile([tx], is_train=True, use_graph=graph, sequential=sequential)
+    dev.SetVerbosity(verbosity)
+
+    # Training and Evaluation Loop
+    for epoch in range(max_epoch):
+        start_time = time.time()
+        np.random.shuffle(idx)
+
+        if global_rank == 0:
+            print('Starting Epoch %d:' % (epoch))
+
+        # Training Phase
+        train_correct = np.zeros(shape=[1], dtype=np.float32)
+        test_correct = np.zeros(shape=[1], dtype=np.float32)
+        train_loss = np.zeros(shape=[1], dtype=np.float32)
+
+        model.train()
+        for b in tqdm(range(num_train_batch)):
+            # Generate the patch data in this iteration
+            x = train_x[idx[b * batch_size:(b + 1) * batch_size]]
+            if model.dimension == 4:
+                x = augmentation(x, batch_size)
+                if (image_size != model.input_size):
+                    x = resize_dataset(x, model.input_size)
+            y = train_y[idx[b * batch_size:(b + 1) * batch_size]]
+
+            # Copy the patch data into input tensors
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+
+            # Train the model
+            out, loss = model(tx, ty, dist_option, spars)
+            train_correct += accuracy(tensor.to_numpy(out), y)
+            train_loss += tensor.to_numpy(loss)[0]
+
+        if DIST:
+            # Reduce the Evaluation Accuracy and Loss from Multiple Devices
+            reducer = tensor.Tensor((1,), dev, tensor.float32)
+            train_correct = reduce_variable(train_correct, sgd, reducer)
+            train_loss = reduce_variable(train_loss, sgd, reducer)
+
+        if global_rank == 0:
+            print('Training loss = %f, training accuracy = %f' %
+                  (train_loss, train_correct /
+                   (num_train_batch * batch_size * world_size)),
+                  flush=True)
+
+        # Evaluation Phase
+        model.eval()
+        for b in tqdm(range(num_val_batch)):
+            x = val_x[b * batch_size:(b + 1) * batch_size]
+            if model.dimension == 4:
+                if (image_size != model.input_size):
+                    x = resize_dataset(x, model.input_size)
+            y = val_y[b * batch_size:(b + 1) * batch_size]
+            tx.copy_from_numpy(x)
+            ty.copy_from_numpy(y)
+            out_test = model(tx)
+            test_correct += accuracy(tensor.to_numpy(out_test), y)
+
+        if DIST:
+            # Reduce the Evaulation Accuracy from Multiple Devices
+            test_correct = reduce_variable(test_correct, sgd, reducer)
+
+        # Output the Evaluation Accuracy
+        if global_rank == 0:
+            print('Evaluation accuracy = %f, Elapsed Time = %fs' %
+                  (test_correct / (num_val_batch * batch_size * world_size),
+                   time.time() - start_time),
+                  flush=True)
+
+    dev.PrintTimeProfiling()
+
+
+def loss(out, y):
+    return autograd.softmax_cross_entropy(out, y)
+
+
+if __name__ == '__main__':
+
+    with open(os.path.join(os.path.dirname(__file__),
+                           'model.json')) as json_file:
+        model_config = json.load(json_file)
+
+    # use argparse to get command config: max_epoch, model, data, etc. for single gpu training
+    parser = argparse.ArgumentParser(
+        description='Training using the autograd and graph.')
+    parser.add_argument('--model',
+                        choices=list(model_config.keys()),
+                        help='please refer to the models.json for more details',
+                        default='resnet18v1')
+    parser.add_argument('--data',
+                        choices=['cifar10', 'cifar100'],
+                        default='cifar10')
+    parser.add_argument('--epoch',
+                        '--max-epoch',
+                        default=10,
+                        type=int,
+                        help='maximum epochs',
+                        dest='max_epoch')
+    parser.add_argument('--bs',
+                        '--batch-size',
+                        default=32,
+                        type=int,
+                        help='batch size',
+                        dest='batch_size')
+    parser.add_argument('--lr',
+                        '--learning-rate',
+                        default=0.005,
+                        type=float,
+                        help='initial learning rate',
+                        dest='lr')
+    # determine which gpu to use
+    parser.add_argument('--id',
+                        '--device-id',
+                        default=0,
+                        type=int,
+                        help='which GPU to use',
+                        dest='device_id')
+    parser.add_argument('--no-graph',
+                        '--disable-graph',
+                        default='True',
+                        action='store_false',
+                        help='disable graph',
+                        dest='graph')
+    parser.add_argument('--verbosity',
+                        '--log-verbosity',
+                        default=1,
+                        type=int,
+                        help='logging verbosity',
+                        dest='verbosity')
+
+    args = parser.parse_args()
+
+    sgd = opt.SGD(lr=args.lr, momentum=0.9, weight_decay=1e-5)
+    run(0, 1, args.device_id, args.max_epoch, args.batch_size, model_config[args.model],
+        args.data, sgd, args.graph, args.verbosity)
diff --git a/examples/onnx/utils.py b/examples/onnx/utils.py
new file mode 100644
index 0000000..b8f7b34
--- /dev/null
+++ b/examples/onnx/utils.py
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import urllib.request
+import tarfile
+import glob
+import onnx
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def download_model(url):
+    download_dir = '/tmp/'
+    with tarfile.open(check_exist_or_download(url), 'r') as t:
+        t.extractall(path=download_dir)
+
+
+def load_dataset(test_data_dir):
+    # load inputs
+    inputs = []
+    inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
+    for i in range(inputs_num):
+        input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
+        onnx_tensor = onnx.TensorProto()
+        with open(input_file, 'rb') as f:
+            onnx_tensor.ParseFromString(f.read())
+        inputs.append(onnx.numpy_helper.to_array(onnx_tensor))
+
+    # load reference outputs
+    ref_outputs = []
+    ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, 'output_*.pb')))
+    for i in range(ref_outputs_num):
+        output_file = os.path.join(test_data_dir, 'output_{}.pb'.format(i))
+        onnx_tensor = onnx.TensorProto()
+        with open(output_file, 'rb') as f:
+            onnx_tensor.ParseFromString(f.read())
+        ref_outputs.append(onnx.numpy_helper.to_array(onnx_tensor))
+    return inputs, ref_outputs
+
+
+def check_exist_or_download(url):
+    download_dir = '/tmp/'
+    name = url.rsplit('/', 1)[-1]
+    filename = os.path.join(download_dir, name)
+    if not os.path.isfile(filename):
+        logging.info("Downloading %s" % url)
+        urllib.request.urlretrieve(url, filename)
+    return filename
diff --git a/examples/onnx/vgg16.py b/examples/onnx/vgg16.py
new file mode 100644
index 0000000..369cee9
--- /dev/null
+++ b/examples/onnx/vgg16.py
@@ -0,0 +1,111 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_labe():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+    url = 'https://s3.amazonaws.com/onnx-model-zoo/vgg/vgg16/vgg16.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'vgg16', 'vgg16.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_labe()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'vgg16', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/onnx/vgg19.py b/examples/onnx/vgg19.py
new file mode 100644
index 0000000..a2c3ea7
--- /dev/null
+++ b/examples/onnx/vgg19.py
@@ -0,0 +1,111 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under th
+
+import os
+import numpy as np
+from PIL import Image
+
+from singa import device
+from singa import tensor
+from singa import sonnx
+import onnx
+from utils import download_model, check_exist_or_download
+
+import logging
+logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
+
+
+def preprocess(img):
+    img = img.resize((256, 256))
+    img = img.crop((16, 16, 240, 240))
+    img = np.array(img).astype(np.float32) / 255.
+    img = np.rollaxis(img, 2, 0)
+    for channel, mean, std in zip(range(3), [0.485, 0.456, 0.406],
+                                  [0.229, 0.224, 0.225]):
+        img[channel, :, :] -= mean
+        img[channel, :, :] /= std
+    img = np.expand_dims(img, axis=0)
+    return img
+
+
+def get_image_labe():
+    # download label
+    label_url = 'https://s3.amazonaws.com/onnx-model-zoo/synset.txt'
+    with open(check_exist_or_download(label_url), 'r') as f:
+        labels = [l.rstrip() for l in f]
+
+    # download image
+    image_url = 'https://s3.amazonaws.com/model-server/inputs/kitten.jpg'
+    img = Image.open(check_exist_or_download(image_url))
+    return img, labels
+
+
+class MyModel(sonnx.SONNXModel):
+
+    def __init__(self, onnx_model):
+        super(MyModel, self).__init__(onnx_model)
+
+    def forward(self, *x):
+        y = super(MyModel, self).forward(*x)
+        return y[0]
+
+    def train_one_batch(self, x, y):
+        pass
+
+
+if __name__ == "__main__":
+    url = 'https://s3.amazonaws.com/onnx-model-zoo/vgg/vgg19/vgg19.tar.gz'
+    download_dir = '/tmp/'
+    model_path = os.path.join(download_dir, 'vgg19', 'vgg19.onnx')
+
+    logging.info("onnx load model...")
+    download_model(url)
+    onnx_model = onnx.load(model_path)
+
+    # inference
+    logging.info("preprocessing...")
+    img, labels = get_image_labe()
+    img = preprocess(img)
+    # sg_ir = sonnx.prepare(onnx_model) # run without graph
+    # y = sg_ir.run([img])
+
+    logging.info("model compling...")
+    dev = device.create_cuda_gpu()
+    x = tensor.PlaceHolder(img.shape, device=dev)
+    model = MyModel(onnx_model)
+    model.compile([x], is_train=False, use_graph=True, sequential=True)
+
+    # verifty the test
+    # from utils import load_dataset
+    # inputs, ref_outputs = load_dataset(os.path.join('/tmp', 'vgg19', 'test_data_set_0'))
+    # x_batch = tensor.Tensor(device=dev, data=inputs[0])
+    # outputs = sg_ir.run([x_batch])
+    # for ref_o, o in zip(ref_outputs, outputs):
+    #     np.testing.assert_almost_equal(ref_o, tensor.to_numpy(o), 4)
+
+    logging.info("model running...")
+    x = tensor.Tensor(device=dev, data=img)
+    y = model.forward(x)
+
+    logging.info("postprocessing...")
+    y = tensor.softmax(y)
+    scores = tensor.to_numpy(y)
+    scores = np.squeeze(scores)
+    a = np.argsort(scores)[::-1]
+    for i in a[0:5]:
+        logging.info('class=%s ; probability=%f' % (labels[i], scores[i]))
diff --git a/examples/qabot/README.md b/examples/qabot/README.md
new file mode 100644
index 0000000..fdbab08
--- /dev/null
+++ b/examples/qabot/README.md
@@ -0,0 +1,31 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Train a question and answering QABOT model
+
+This example describes how to implement a question and answering QABOT
+application using SINGA's CUDNN RNN layers.
+
+We will use the [LSTM](https://www.mitpressjournals.org/doi/abs/10.1162/neco.1997.9.8.1735) model together with max pooling as an
+example to train the QABOT.
+
+## Instructions
+
+* Start the training,
+
+        python qabot_train.py
diff --git a/examples/qabot/qabot_data.py b/examples/qabot/qabot_data.py
new file mode 100644
index 0000000..4494855
--- /dev/null
+++ b/examples/qabot/qabot_data.py
@@ -0,0 +1,282 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import numpy as np
+import random
+
+download_dir = "/tmp/"
+import os
+import urllib
+
+
+def check_exist_or_download(url):
+    ''' download data into tmp '''
+    name = url.rsplit('/', 1)[-1]
+    filename = os.path.join(download_dir, name)
+    if not os.path.isfile(filename):
+        print("Downloading %s" % url)
+        urllib.request.urlretrieve(url, filename)
+    return filename
+
+
+def unzip_data(download_dir, data_zip):
+    data_dir = download_dir + "insuranceQA-master/V2/"
+    if not os.path.exists(data_dir):
+        print("extracting %s to %s" % (download_dir, data_dir))
+        from zipfile import ZipFile
+        with ZipFile(data_zip, 'r') as zipObj:
+            zipObj.extractall(download_dir)
+    return data_dir
+
+
+def get_label2answer(data_dir):
+    import gzip
+    label2answer = dict()
+    with gzip.open(data_dir +
+                   "/InsuranceQA.label2answer.token.encoded.gz") as fin:
+        for line in fin:
+            pair = line.decode().strip().split("\t")
+            idxs = pair[1].split(" ")
+            idxs = [int(idx.replace("idx_", "")) for idx in idxs]
+            label2answer[int(pair[0])] = idxs
+    return label2answer
+
+
+pad_idx = 0
+pad_string = "<pad>"
+pad_embed = np.zeros((300,))
+
+insuranceqa_train_filename = "/InsuranceQA.question.anslabel.token.100.pool.solr.train.encoded.gz"
+insuranceqa_test_filename = "/InsuranceQA.question.anslabel.token.100.pool.solr.test.encoded.gz"
+insuranceQA_url = "https://github.com/shuzi/insuranceQA/archive/master.zip"
+insuranceQA_cache_fp = download_dir + "insuranceQA_cache.pickle"
+google_news_pretrain_embeddings_link = "https://s3.amazonaws.com/dl4j-distribution/GoogleNews-vectors-negative300.bin.gz"
+
+
+def get_idx2word(data_dir):
+    idx2word = dict()
+    with open(data_dir + "vocabulary", encoding="utf-8") as vc_f:
+        for line in vc_f:
+            pair = line.strip().split("\t")
+            idx = int(pair[0].replace("idx_", ""))
+            idx2word[idx] = pair[1]
+
+    # add padding string to idx2word lookup
+    idx2word[pad_idx] = pad_string
+
+    return idx2word
+
+
+def get_train_raw(data_dir, data_filename):
+    ''' deserialize training data file
+        args:
+            data_dir: dir of data file
+        return:
+            train_raw: list of QnA pair, length of list  == number of samples,
+                each pair has 3 fields:
+                    0 is question sentence idx encoded, use idx2word to decode,
+                        idx2vec to get embedding.
+                    1 is ans labels, each label corresponds to a ans sentence,
+                        use label2answer to decode.
+                    2 is top K candidate ans, these are negative ans for
+                        training.
+    '''
+    train_raw = []
+    import gzip
+    with gzip.open(data_dir + data_filename) as fin:
+        for line in fin:
+            tpl = line.decode().strip().split("\t")
+            question = [
+                int(idx.replace("idx_", "")) for idx in tpl[1].split(" ")
+            ]
+            ans = [int(label) for label in tpl[2].split(" ")]
+            candis = [int(label) for label in tpl[3].split(" ")]
+            train_raw.append((question, ans, candis))
+    return train_raw
+
+
+def limit_encode_train(train_raw, label2answer, idx2word, q_seq_limit,
+                       ans_seq_limit, idx2vec):
+    ''' prepare train data to embedded word vector sequence given sequence limit
+        return:
+            questions_encoded: np ndarray, shape
+                (number samples, seq length, vector size)
+            poss_encoded: same layout, sequence for positive answer
+            negs_encoded: same layout, sequence for negative answer
+    '''
+    questions = [question for question, answers, candis in train_raw]
+    # choose 1 answer from answer pool
+    poss = [
+        label2answer[random.choice(answers)]
+        for question, answers, candis in train_raw
+    ]
+    # choose 1 candidate from candidate pool
+    negs = [
+        label2answer[random.choice(candis)]
+        for question, answers, candis in train_raw
+    ]
+
+    # filtered word not in idx2vec
+    questions_filtered = [
+        [idx for idx in q if idx in idx2vec] for q in questions
+    ]
+    poss_filtered = [[idx for idx in ans if idx in idx2vec] for ans in poss]
+    negs_filtered = [[idx for idx in ans if idx in idx2vec] for ans in negs]
+
+    # crop to seq limit
+    questions_crop = [
+        q[:q_seq_limit] + [0] * max(0, q_seq_limit - len(q))
+        for q in questions_filtered
+    ]
+    poss_crop = [
+        ans[:ans_seq_limit] + [0] * max(0, ans_seq_limit - len(ans))
+        for ans in poss_filtered
+    ]
+    negs_crop = [
+        ans[:ans_seq_limit] + [0] * max(0, ans_seq_limit - len(ans))
+        for ans in negs_filtered
+    ]
+
+    # encoded, word idx to word vector
+    questions_encoded = [[idx2vec[idx] for idx in q] for q in questions_crop]
+    poss_encoded = [[idx2vec[idx] for idx in ans] for ans in poss_crop]
+    negs_encoded = [[idx2vec[idx] for idx in ans] for ans in negs_crop]
+
+    # make nd array
+    questions_encoded = np.array(questions_encoded).astype(np.float32)
+    poss_encoded = np.array(poss_encoded).astype(np.float32)
+    negs_encoded = np.array(negs_encoded).astype(np.float32)
+    return questions_encoded, poss_encoded, negs_encoded
+
+
+def get_idx2vec_weights(wv, idx2word):
+    idx2vec = {k: wv[v] for k, v in idx2word.items() if v in wv}
+
+    # add padding embedding (all zeros) to idx2vec lookup
+    idx2vec[pad_idx] = pad_embed
+    return idx2vec
+
+
+def prepare_data(use_cache=True):
+    import pickle
+    if not os.path.isfile(insuranceQA_cache_fp) or not use_cache:
+        # no cache is found, preprocess data from scratch
+        print("prepare data from scratch")
+
+        # get pretained word vector
+        from gensim.models.keyedvectors import KeyedVectors
+        google_news_pretrain_fp = check_exist_or_download(
+            google_news_pretrain_embeddings_link)
+        wv = KeyedVectors.load_word2vec_format(google_news_pretrain_fp,
+                                               binary=True)
+
+        # prepare insurance QA dataset
+        data_zip = check_exist_or_download(insuranceQA_url)
+        data_dir = unzip_data(download_dir, data_zip)
+
+        label2answer = get_label2answer(data_dir)
+        idx2word = get_idx2word(data_dir)
+        idx2vec = get_idx2vec_weights(wv, idx2word)
+
+        train_raw = get_train_raw(data_dir, insuranceqa_train_filename)
+        test_raw = get_train_raw(data_dir, insuranceqa_test_filename)
+        with open(insuranceQA_cache_fp, 'wb') as handle:
+            pickle.dump((train_raw, test_raw, label2answer, idx2word, idx2vec),
+                        handle,
+                        protocol=pickle.HIGHEST_PROTOCOL)
+    else:
+        # load from cached pickle
+        with open(insuranceQA_cache_fp, 'rb') as handle:
+            (train_raw, test_raw, label2answer, idx2word,
+             idx2vec) = pickle.load(handle)
+
+    return train_raw, test_raw, label2answer, idx2word, idx2vec
+
+
+def limit_encode_eval(train_raw,
+                      label2answer,
+                      idx2word,
+                      q_seq_limit,
+                      ans_seq_limit,
+                      idx2vec,
+                      top_k_candi_limit=6):
+    ''' prepare train data to embedded word vector sequence given sequence limit for testing
+        return:
+            questions_encoded: np ndarray, shape
+                (number samples, seq length, vector size)
+            poss_encoded: same layout, sequence for positive answer
+            negs_encoded: same layout, sequence for negative answer
+    '''
+    questions = [question for question, answers, candis in train_raw]
+
+    # combine truth and candidate answers label,
+    candi_pools = [
+        list(answers + candis)[:top_k_candi_limit]
+        for question, answers, candis in train_raw
+    ]
+    assert all([len(pool) == top_k_candi_limit for pool in candi_pools])
+
+    ans_count = [len(answers) for question, answers, candis in train_raw]
+    assert all([c > 0 for c in ans_count])
+
+    # encode ans
+    candi_pools_encoded = [[label2answer[candi_label]
+                            for candi_label in pool]
+                           for pool in candi_pools]
+
+    # filtered word not in idx2vec
+    questions_filtered = [
+        [idx for idx in q if idx in idx2vec] for q in questions
+    ]
+    candi_pools_filtered = [[[idx
+                              for idx in candi_encoded
+                              if idx in idx2vec]
+                             for candi_encoded in pool]
+                            for pool in candi_pools_encoded]
+
+    # crop to seq limit
+    questions_crop = [
+        q[:q_seq_limit] + [0] * max(0, q_seq_limit - len(q))
+        for q in questions_filtered
+    ]
+    candi_pools_crop = [[
+        candi[:ans_seq_limit] + [0] * max(0, ans_seq_limit - len(candi))
+        for candi in pool
+    ]
+                        for pool in candi_pools_filtered]
+
+    # encoded, word idx to word vector
+    questions_encoded = [[idx2vec[idx] for idx in q] for q in questions_crop]
+    candi_pools_encoded = [[[idx2vec[idx]
+                             for idx in candi]
+                            for candi in pool]
+                           for pool in candi_pools_crop]
+    questions_encoded = np.array(questions_encoded).astype(np.float32)
+    candi_pools_encoded = np.array(candi_pools_encoded).astype(np.float32)
+
+    # candi_pools_encoded shape
+    #    (number of sample QnA,
+    #     number of candi in pool,
+    #     number of sequence word idx per candi,
+    #     300 word embedding for 1 word idx)
+    #  e.g 10 QnA to test
+    #      5 each question has 5 possible ans
+    #      8 each ans has 8 words
+    #      300 each word has vector size 300
+    return questions_encoded, candi_pools_encoded, ans_count
diff --git a/examples/qabot/qabot_model.py b/examples/qabot/qabot_model.py
new file mode 100644
index 0000000..d5a9d88
--- /dev/null
+++ b/examples/qabot/qabot_model.py
@@ -0,0 +1,152 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from singa import autograd, layer, model
+
+
+class QAModel_mlp(model.Model):
+
+    def __init__(self, hidden_size):
+        super().__init__()
+        self.linear_q = layer.Linear(hidden_size)
+        self.linear_a = layer.Linear(hidden_size)
+
+    def forward(self, q, a_batch):
+        q = autograd.reshape(q, (q.shape[0], -1))  # bs, seq_q*data_s
+        a_batch = autograd.reshape(a_batch,
+                                   (a_batch.shape[0], -1))  # 2bs, seq_a*data_s
+
+        q = self.linear_q(q)  # bs, hid_s
+        a_batch = self.linear_a(a_batch)  # 2bs, hid_s
+
+        a_pos, a_neg = autograd.split(a_batch, 0,
+                                      [q.shape[0], q.shape[0]])  # 2*(bs, hid)
+
+        sim_pos = autograd.cossim(q, a_pos)
+        sim_neg = autograd.cossim(q, a_neg)
+        return sim_pos, sim_neg
+
+
+class QAModel(model.Model):
+
+    def __init__(self,
+                 hidden_size,
+                 num_layers=1,
+                 bidirectional=True,
+                 return_sequences=False):
+        super(QAModel, self).__init__()
+        self.hidden_size = hidden_size
+        self.lstm_q = layer.CudnnRNN(hidden_size=hidden_size,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+        self.lstm_a = layer.CudnnRNN(hidden_size=hidden_size,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+
+    def forward(self, q, a_batch):
+        q = self.lstm_q(q)  # bs, Hidden*2
+        a_batch = self.lstm_a(a_batch)  # 2bs, Hidden*2
+
+        bs_a = q.shape[0]
+        # bs, hid*2
+        a_pos, a_neg = autograd.split(a_batch, 0, [bs_a, bs_a])
+
+        sim_pos = autograd.cossim(q, a_pos)
+        sim_neg = autograd.cossim(q, a_neg)
+        return sim_pos, sim_neg
+
+
+class QAModel_mean(model.Model):
+
+    def __init__(self, hidden_size, bidirectional=True, return_sequences=True):
+        super(QAModel_mean, self).__init__()
+        self.hidden_size = hidden_size
+        self.lstm_q = layer.CudnnRNN(hidden_size=hidden_size,
+                                     batch_first=True,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+        self.lstm_a = layer.CudnnRNN(hidden_size=hidden_size,
+                                     batch_first=True,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+
+    def forward(self, q, a_batch):
+        q = self.lstm_q(q)  # bs, seq, Hidden*2
+        a_batch = self.lstm_a(a_batch)  # 2bs, seq, Hidden*2
+
+        # bs, hid*2
+        q = autograd.reduce_mean(q, [1], keepdims=0)
+        # (2bs, hid*2)
+        a_batch = autograd.reduce_mean(a_batch, [1], keepdims=0)
+
+        # 2*(bs, seq, hid*2)
+        a_pos, a_neg = autograd.split(a_batch, 0, [q.shape[0], q.shape[0]])
+
+        sim_pos = autograd.cossim(q, a_pos)
+        sim_neg = autograd.cossim(q, a_neg)
+        return sim_pos, sim_neg
+
+
+class QAModel_maxpooling(model.Model):
+
+    def __init__(self,
+                 hidden_size,
+                 q_seq,
+                 a_seq,
+                 num_layers=1,
+                 bidirectional=True,
+                 return_sequences=True):
+        super(QAModel_maxpooling, self).__init__()
+        self.hidden_size = hidden_size
+        self.lstm_q = layer.CudnnRNN(hidden_size=hidden_size,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+        self.lstm_a = layer.CudnnRNN(hidden_size=hidden_size,
+                                     bidirectional=bidirectional,
+                                     return_sequences=return_sequences)
+        self.q_pool = layer.MaxPool2d((q_seq, 1))
+        self.a_pool = layer.MaxPool2d((a_seq, 1))
+
+    def forward(self, q, a_batch):
+        # bs, seq, Hidden*2
+        q = self.lstm_q(q)
+        # bs, 1, seq, hid*2
+        q = autograd.reshape(q, (q.shape[0], 1, q.shape[1], q.shape[2]))
+        # bs, 1, 1, hid*2
+        q = self.q_pool(q)
+        # bs, hid*2
+        q = autograd.reshape(q, (q.shape[0], q.shape[3]))
+
+        # 2bs, seq, Hidden*2
+        a_batch = self.lstm_a(a_batch)
+        # 2bs, 1, seq, hid*2
+        a_batch = autograd.reshape(
+            a_batch, (a_batch.shape[0], 1, a_batch.shape[1], a_batch.shape[2]))
+        # 2bs, 1, 1, hid*2
+        a_batch = self.a_pool(a_batch)
+        # 2bs, hid*2
+        a_batch = autograd.reshape(a_batch,
+                                   (a_batch.shape[0], a_batch.shape[3]))
+
+        # 2*(bs, hid*2)
+        a_pos, a_neg = autograd.split(a_batch, 0, [q.shape[0], q.shape[0]])
+
+        sim_pos = autograd.cossim(q, a_pos)
+        sim_neg = autograd.cossim(q, a_neg)
+        return sim_pos, sim_neg
\ No newline at end of file
diff --git a/examples/qabot/qabot_train.py b/examples/qabot/qabot_train.py
new file mode 100644
index 0000000..45893e0
--- /dev/null
+++ b/examples/qabot/qabot_train.py
@@ -0,0 +1,159 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import numpy as np
+import time
+import random
+from tqdm import tqdm
+import argparse
+
+from singa import autograd, tensor, device, opt
+from qabot_data import limit_encode_train, limit_encode_eval, prepare_data
+from qabot_model import QAModel_maxpooling
+
+
+def do_train(m, tq, ta, train, meta_data, args):
+    '''
+    batch size need to be large to see all negative ans
+    '''
+    m.train()
+    for epoch in range(args.epochs):
+        total_loss = 0
+        start = time.time()
+
+        q, ans_p, ans_n = limit_encode_train(train, meta_data['label2answer'],
+                                             meta_data['idx2word'],
+                                             args.q_seq_limit,
+                                             args.ans_seq_limit,
+                                             meta_data['idx2vec'])
+        bs = args.bs
+
+        for i in tqdm(range(len(q) // bs)):
+            tq.copy_from_numpy(q[i * bs:(i + 1) * bs])
+            a_batch = np.concatenate(
+                [ans_p[i * bs:(i + 1) * bs], ans_n[i * bs:(i + 1) * bs]])
+            ta.copy_from_numpy(a_batch)
+
+            p_sim, n_sim = m.forward(tq, ta)
+            l = autograd.ranking_loss(p_sim, n_sim)
+            m.optimizer(l)
+
+            total_loss += tensor.to_numpy(l)
+        print(
+            "epoch %d, time used %d sec, loss: " % (epoch, time.time() - start),
+            total_loss * bs / len(q))
+
+
+def do_eval(m, tq, ta, test, meta_data, args):
+    q, candis, ans_count = limit_encode_eval(test, meta_data['label2answer'],
+                                             meta_data['idx2word'],
+                                             args.q_seq_limit,
+                                             args.ans_seq_limit,
+                                             meta_data['idx2vec'],
+                                             args.number_of_candidates)
+    m.eval()
+    candi_pool_size = candis.shape[1]
+    correct = 0
+    start = time.time()
+    for i in tqdm(range(len(q))):
+        # batch size bs must satisfy: bs == repeated q, bs == number of answers//2
+        # 1 question repeat n times, n == number of answers//2
+        _q = np.repeat([q[i]], candi_pool_size // 2, axis=0)
+        tq.copy_from_numpy(_q)
+        ta.copy_from_numpy(candis[i])
+
+        (first_half_score, second_half_score) = m.forward(tq, ta)
+
+        first_half_score = tensor.to_numpy(first_half_score)
+        second_half_score = tensor.to_numpy(second_half_score)
+        scores = np.concatenate((first_half_score, second_half_score))
+        pred_max_idx = np.argmax(scores)
+
+        if pred_max_idx < ans_count[i]:
+            correct += 1
+
+    print("eval top %s " % (candi_pool_size), " accuracy", correct / len(q),
+          " time used %d sec" % (time.time() - start))
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-m',
+                        '--max-epoch',
+                        default=30,
+                        type=int,
+                        help='maximum epochs',
+                        dest='epochs')
+    parser.add_argument('-b',
+                        '--batch-size',
+                        default=50,
+                        type=int,
+                        help='batch size',
+                        dest='bs')
+    parser.add_argument('-l',
+                        '--learning-rate',
+                        default=0.01,
+                        type=float,
+                        help='initial learning rate',
+                        dest='lr')
+    parser.add_argument('-i',
+                        '--device-id',
+                        default=0,
+                        type=int,
+                        help='which GPU to use',
+                        dest='device_id')
+
+    args = parser.parse_args()
+
+    args.hid_s = 64
+    args.q_seq_limit = 10
+    args.ans_seq_limit = 50
+    args.embed_size = 300
+    args.number_of_candidates = args.bs * 2
+    assert args.number_of_candidates <= 100, "number_of_candidates should be <= 100"
+
+    dev = device.create_cuda_gpu_on(args.device_id)
+
+    # tensor container
+    tq = tensor.random((args.bs, args.q_seq_limit, args.embed_size), dev)
+    ta = tensor.random((args.bs * 2, args.ans_seq_limit, args.embed_size), dev)
+
+    # model
+    m = QAModel_maxpooling(args.hid_s,
+                           q_seq=args.q_seq_limit,
+                           a_seq=args.ans_seq_limit)
+    m.compile([tq, ta], is_train=True, use_graph=True, sequential=False)
+    m.optimizer = opt.SGD(args.lr, 0.9)
+
+    # get data
+    train_raw, test_raw, label2answer, idx2word, idx2vec = prepare_data()
+    meta_data = {
+        'label2answer': label2answer,
+        'idx2word': idx2word,
+        'idx2vec': idx2vec
+    }
+
+    print("training...")
+    do_train(m, tq, ta, train_raw, meta_data, args)
+
+    print("Eval with train data...")
+    do_eval(m, tq, ta, random.sample(train_raw, 2000), meta_data, args)
+
+    print("Eval with test data...")
+    do_eval(m, tq, ta, test_raw, meta_data, args)
diff --git a/examples/rbm/README.md b/examples/rbm/README.md
new file mode 100644
index 0000000..8345014
--- /dev/null
+++ b/examples/rbm/README.md
@@ -0,0 +1,37 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Train a RBM model using SINGA Tensor APIs
+
+This example is to train a RBM model over the MNIST dataset
+using SINGA Python Tensor APIs.
+The RBM model and its hyper-parameters are set following
+[Hinton's paper](http://www.cs.toronto.edu/~hinton/science.pdf)
+
+## Running instructions
+
+1. Download the pre-processed [MNIST dataset](https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz)
+
+2. Start the training
+
+        python train.py mnist.pkl.gz
+
+By default the training code would run on CPU. To run it on a GPU card, please start
+the program with an additional argument
+
+        python train.py mnist.pkl.gz --use_gpu
diff --git a/examples/mnist/train.py b/examples/rbm/train.py
old mode 100644
new mode 100755
similarity index 65%
rename from examples/mnist/train.py
rename to examples/rbm/train.py
index 0a00358..a2419ab
--- a/examples/mnist/train.py
+++ b/examples/rbm/train.py
@@ -14,48 +14,50 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # =============================================================================
+from __future__ import division
+from __future__ import print_function
+from builtins import range
 
 import numpy as np
 import os
+import sys
 import gzip
 import argparse
-import cPickle
-from singa import initializer
-from singa import utils
-from singa import optimizer
+try:
+    import pickle
+except ImportError:
+    import cPickle as pickle
+
+from singa import opt
 from singa import device
 from singa import tensor
 
 
-from singa.proto import core_pb2
-
-
-
 def load_train_data(file_path):
     f = gzip.open(file_path, 'rb')
-    train_set, valid_set, test_set = cPickle.load(f)
+    if sys.version_info.major > 2:
+        train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
+    else:
+        train_set, valid_set, test_set = pickle.load(f)
     traindata = train_set[0].astype(np.float32)
     validdata = valid_set[0].astype(np.float32)
-    print traindata.shape, validdata.shape
+    print(traindata.shape, validdata.shape)
     return traindata, validdata
 
 
-
 def train(data_file, use_gpu, num_epoch=10, batch_size=100):
-    print 'Start intialization............'
-    lr = 0.1   # Learning rate
-    weight_decay  = 0.0002
+    print('Start intialization............')
+    lr = 0.0005   # Learning rate
+    weight_decay = 0.0002
     hdim = 1000
     vdim = 784
-    opt = optimizer.SGD(momentum=0.8, weight_decay=weight_decay)
-
     tweight = tensor.Tensor((vdim, hdim))
     tweight.gaussian(0.0, 0.1)
-    tvbias = tensor.from_numpy(np.zeros(vdim, dtype = np.float32))
-    thbias = tensor.from_numpy(np.zeros(hdim, dtype = np.float32))
-    opt = optimizer.SGD(momentum=0.5, weight_decay=weight_decay)
+    tvbias = tensor.from_numpy(np.zeros(vdim, dtype=np.float32))
+    thbias = tensor.from_numpy(np.zeros(hdim, dtype=np.float32))
+    sgd = opt.SGD(lr=lr, momentum=0.9, weight_decay=weight_decay)
 
-    print 'Loading data ..................'
+    print('Loading data ..................')
     train_x, valid_x = load_train_data(data_file)
 
     if use_gpu:
@@ -66,18 +68,18 @@
     for t in [tweight, tvbias, thbias]:
         t.to_device(dev)
 
-    num_train_batch = train_x.shape[0] / batch_size
-    print "num_train_batch = %d " % (num_train_batch)
+    num_train_batch = train_x.shape[0] // batch_size
+    print("num_train_batch = %d " % (num_train_batch))
     for epoch in range(num_epoch):
         trainerrorsum = 0.0
-        print 'Epoch %d' % epoch
+        print('Epoch %d' % epoch)
         for b in range(num_train_batch):
             # positive phase
             tdata = tensor.from_numpy(
-                    train_x[(b * batch_size):((b + 1) * batch_size), : ])
+                train_x[(b * batch_size):((b + 1) * batch_size), :])
             tdata.to_device(dev)
             tposhidprob = tensor.mult(tdata, tweight)
-            tposhidprob.add_row(thbias)
+            tposhidprob = tposhidprob + thbias
             tposhidprob = tensor.sigmoid(tposhidprob)
             tposhidrandom = tensor.Tensor(tposhidprob.shape, dev)
             tposhidrandom.uniform(0.0, 1.0)
@@ -85,41 +87,43 @@
 
             # negative phase
             tnegdata = tensor.mult(tposhidsample, tweight.T())
-            tnegdata.add_row(tvbias)
+            tnegdata = tnegdata + tvbias
             tnegdata = tensor.sigmoid(tnegdata)
 
             tneghidprob = tensor.mult(tnegdata, tweight)
-            tneghidprob.add_row(thbias)
+            tneghidprob = tneghidprob + thbias
             tneghidprob = tensor.sigmoid(tneghidprob)
             error = tensor.sum(tensor.square((tdata - tnegdata)))
             trainerrorsum = error + trainerrorsum
 
-            tgweight = tensor.mult(tnegdata.T(), tneghidprob) -\
-                    tensor.mult(tdata.T(), tposhidprob)
+            tgweight = tensor.mult(tnegdata.T(), tneghidprob) \
+                - tensor.mult(tdata.T(), tposhidprob)
             tgvbias = tensor.sum(tnegdata, 0) - tensor.sum(tdata, 0)
             tghbias = tensor.sum(tneghidprob, 0) - tensor.sum(tposhidprob, 0)
 
-            opt.apply_with_lr(epoch, lr / batch_size, tgweight, tweight, 'w')
-            opt.apply_with_lr(epoch, lr / batch_size, tgvbias, tvbias, 'vb')
-            opt.apply_with_lr(epoch, lr / batch_size, tghbias, thbias, 'hb')
+            sgd.apply('w', tweight, tgweight)
+            sgd.apply('vb', tvbias, tgvbias)
+            sgd.apply('hb', thbias, tghbias)
 
-        print 'training errorsum = %f' % (trainerrorsum)
+        print('training erroraverage = %f' %
+              (tensor.to_numpy(trainerrorsum) / train_x.shape[0]))
 
         tvaliddata = tensor.from_numpy(valid_x)
         tvaliddata.to_device(dev)
         tvalidposhidprob = tensor.mult(tvaliddata, tweight)
-        tvalidposhidprob.add_row(thbias)
+        tvalidposhidprob = tvalidposhidprob + thbias
         tvalidposhidprob = tensor.sigmoid(tvalidposhidprob)
         tvalidposhidrandom = tensor.Tensor(tvalidposhidprob.shape, dev)
-        initializer.uniform(tvalidposhidrandom, 0.0, 1.0)
+        tvalidposhidrandom.uniform(0.0, 1.0)
         tvalidposhidsample = tensor.gt(tvalidposhidprob, tvalidposhidrandom)
 
         tvalidnegdata = tensor.mult(tvalidposhidsample, tweight.T())
-        tvalidnegdata.add_row(tvbias)
+        tvalidnegdata = tvalidnegdata + tvbias
         tvalidnegdata = tensor.sigmoid(tvalidnegdata)
 
         validerrorsum = tensor.sum(tensor.square((tvaliddata - tvalidnegdata)))
-        print 'valid errorsum = %f' % (validerrorsum)
+        print('valid erroraverage = %f' %
+              (tensor.to_numpy(validerrorsum) / valid_x.shape[0]))
 
 
 if __name__ == '__main__':
@@ -129,5 +133,5 @@
     args = parser.parse_args()
 
     assert os.path.exists(args.file), 'Pls download the MNIST dataset from' \
-            'https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz'
+        'https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz'
     train(args.file, args.use_gpu)
diff --git a/examples/rnn/README.md b/examples/rnn/README.md
new file mode 100644
index 0000000..36d60ab
--- /dev/null
+++ b/examples/rnn/README.md
@@ -0,0 +1,35 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Train RNN model over IMDB dataset
+
+Recurrent neural networks (RNN) are widely used for modelling sequential data,
+e.g., natural language sentences. This example describes how to implement a RNN
+application (or model) using SINGA's CUDNN RNN layers.
+We will use the [LSTM](https://www.mitpressjournals.org/doi/abs/10.1162/neco.1997.9.8.1735) model as an
+example to train on IMDB dataset.
+
+## Instructions
+
+* Prepare the dataset,
+
+        python imdb_data.py
+
+* Start the training,
+
+        python imdb_train.py
diff --git a/examples/rnn/char_rnn.py b/examples/rnn/char_rnn.py
new file mode 100644
index 0000000..2979b95
--- /dev/null
+++ b/examples/rnn/char_rnn.py
@@ -0,0 +1,258 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+'''Train a Char-RNN model using plain text files.
+The model is created following https://github.com/karpathy/char-rnn
+The train file could be any text file,
+e.g., http://cs.stanford.edu/people/karpathy/char-rnn/
+'''
+
+from __future__ import division
+from __future__ import print_function
+from builtins import range
+import numpy as np
+import sys
+import argparse
+from tqdm import tqdm
+
+from singa import device
+from singa import tensor
+from singa import autograd
+from singa import layer
+from singa import model
+from singa import opt
+
+
+class CharRNN(model.Model):
+
+    def __init__(self, vocab_size, hidden_size=32):
+        super(CharRNN, self).__init__()
+        self.rnn = layer.LSTM(vocab_size, hidden_size)
+        self.cat = layer.Cat()
+        self.reshape1 = layer.Reshape()
+        self.dense = layer.Linear(hidden_size, vocab_size)
+        self.reshape2 = layer.Reshape()
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+        self.optimizer = opt.SGD(0.01)
+        self.hidden_size = hidden_size
+        self.vocab_size = vocab_size
+
+    def reset_states(self, dev):
+        self.hx.to_device(dev)
+        self.cx.to_device(dev)
+        self.hx.set_value(0.0)
+        self.cx.set_value(0.0)
+
+    def initialize(self, inputs):
+        batchsize = inputs[0].shape[0]
+        self.hx = tensor.Tensor((batchsize, self.hidden_size))
+        self.cx = tensor.Tensor((batchsize, self.hidden_size))
+        self.reset_states(inputs[0].device)
+
+    def forward(self, inputs):
+        x, hx, cx = self.rnn(inputs, (self.hx, self.cx))
+        self.hx.copy_data(hx)
+        self.cx.copy_data(cx)
+        x = self.cat(x)
+        x = self.reshape1(x, (-1, self.hidden_size))
+        return self.dense(x)
+
+    def train_one_batch(self, x, y):
+        out = self.forward(x)
+        y = self.reshape2(y, (-1, 1))
+        loss = self.softmax_cross_entropy(out, y)
+        self.optimizer(loss)
+        return out, loss
+
+    def get_states(self):
+        ret = super().get_states()
+        ret[self.hx.name] = self.hx
+        ret[self.cx.name] = self.cx
+        return ret
+
+    def set_states(self, states):
+        self.hx.copy_from(states[self.hx.name])
+        self.hx.copy_from(states[self.hx.name])
+        super().set_states(states)
+
+
+class Data(object):
+
+    def __init__(self, fpath, batch_size=32, seq_length=100, train_ratio=0.8):
+        '''Data object for loading a plain text file.
+
+        Args:
+            fpath, path to the text file.
+            train_ratio, split the text file into train and test sets, where
+                train_ratio of the characters are in the train set.
+        '''
+        self.raw_data = open(fpath, 'r',
+                             encoding='iso-8859-1').read()  # read text file
+        chars = list(set(self.raw_data))
+        self.vocab_size = len(chars)
+        self.char_to_idx = {ch: i for i, ch in enumerate(chars)}
+        self.idx_to_char = {i: ch for i, ch in enumerate(chars)}
+        data = [self.char_to_idx[c] for c in self.raw_data]
+        # seq_length + 1 for the data + label
+        nsamples = len(data) // (1 + seq_length)
+        data = data[0:nsamples * (1 + seq_length)]
+        data = np.asarray(data, dtype=np.int32)
+        data = np.reshape(data, (-1, seq_length + 1))
+        # shuffle all sequences
+        np.random.shuffle(data)
+        self.train_dat = data[0:int(data.shape[0] * train_ratio)]
+        self.num_train_batch = self.train_dat.shape[0] // batch_size
+        self.val_dat = data[self.train_dat.shape[0]:]
+        self.num_test_batch = self.val_dat.shape[0] // batch_size
+        print('train dat', self.train_dat.shape)
+        print('val dat', self.val_dat.shape)
+
+
+def numpy2tensors(npx, npy, dev, inputs=None, labels=None):
+    '''batch, seq, dim -- > seq, batch, dim'''
+    tmpy = np.swapaxes(npy, 0, 1).reshape((-1, 1))
+    if labels:
+        labels.copy_from_numpy(tmpy)
+    else:
+        labels = tensor.from_numpy(tmpy)
+    labels.to_device(dev)
+    tmpx = np.swapaxes(npx, 0, 1)
+    inputs_ = []
+    for t in range(tmpx.shape[0]):
+        if inputs:
+            inputs[t].copy_from_numpy(tmpx[t])
+        else:
+            x = tensor.from_numpy(tmpx[t])
+            x.to_device(dev)
+            inputs_.append(x)
+    if not inputs:
+        inputs = inputs_
+    return inputs, labels
+
+
+def convert(batch,
+            batch_size,
+            seq_length,
+            vocab_size,
+            dev,
+            inputs=None,
+            labels=None):
+    '''convert a batch of data into a sequence of input tensors'''
+    y = batch[:, 1:]
+    x1 = batch[:, :seq_length]
+    x = np.zeros((batch_size, seq_length, vocab_size), dtype=np.float32)
+    for b in range(batch_size):
+        for t in range(seq_length):
+            c = x1[b, t]
+            x[b, t, c] = 1
+    return numpy2tensors(x, y, dev, inputs, labels)
+
+
+def sample(model, data, dev, nsamples=100, use_max=False):
+    while True:
+        cmd = input('Do you want to sample text from the model [y/n]')
+        if cmd == 'n':
+            return
+        else:
+            seed = input('Please input some seeding text, e.g., #include <c: ')
+            inputs = []
+            for c in seed:
+                x = np.zeros((1, data.vocab_size), dtype=np.float32)
+                x[0, data.char_to_idx[c]] = 1
+                tx = tensor.from_numpy(x)
+                tx.to_device(dev)
+                inputs.append(tx)
+            model.reset_states(dev)
+            outputs = model(inputs)
+            y = tensor.softmax(outputs[-1])
+            sys.stdout.write(seed)
+            for i in range(nsamples):
+                prob = tensor.to_numpy(y)[0]
+                if use_max:
+                    cur = np.argmax(prob)
+                else:
+                    cur = np.random.choice(data.vocab_size, 1, p=prob)[0]
+                sys.stdout.write(data.idx_to_char[cur])
+                x = np.zeros((1, data.vocab_size), dtype=np.float32)
+                x[0, cur] = 1
+                tx = tensor.from_numpy(x)
+                tx.to_device(dev)
+                outputs = model([tx])
+                y = tensor.softmax(outputs[-1])
+
+
+def evaluate(model, data, batch_size, seq_length, dev, inputs, labels):
+    model.eval()
+    val_loss = 0.0
+    for b in range(data.num_test_batch):
+        batch = data.val_dat[b * batch_size:(b + 1) * batch_size]
+        inputs, labels = convert(batch, batch_size, seq_length, data.vocab_size,
+                                 dev, inputs, labels)
+        model.reset_states(dev)
+        y = model(inputs)
+        loss = autograd.softmax_cross_entropy(y, labels)[0]
+        val_loss += tensor.to_numpy(loss)[0]
+    print('            validation loss is %f' %
+          (val_loss / data.num_test_batch / seq_length))
+
+
+def train(data,
+          max_epoch,
+          hidden_size=100,
+          seq_length=100,
+          batch_size=16,
+          model_path='model'):
+    # SGD with L2 gradient normalization
+    cuda = device.create_cuda_gpu()
+    model = CharRNN(data.vocab_size, hidden_size)
+    model.graph(True, False)
+
+    inputs, labels = None, None
+
+    for epoch in range(max_epoch):
+        model.train()
+        train_loss = 0
+        for b in tqdm(range(data.num_train_batch)):
+            batch = data.train_dat[b * batch_size:(b + 1) * batch_size]
+            inputs, labels = convert(batch, batch_size, seq_length,
+                                     data.vocab_size, cuda, inputs, labels)
+            out, loss = model(inputs, labels)
+            model.reset_states(cuda)
+            train_loss += tensor.to_numpy(loss)[0]
+
+        print('\nEpoch %d, train loss is %f' %
+              (epoch, train_loss / data.num_train_batch / seq_length))
+
+        evaluate(model, data, batch_size, seq_length, cuda, inputs, labels)
+        sample(model, data, cuda)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(
+        description='Train multi-stack LSTM for '
+        'modeling  character sequence from plain text files')
+    parser.add_argument('data', type=str, help='training file')
+    parser.add_argument('-b', type=int, default=32, help='batch_size')
+    parser.add_argument('-l', type=int, default=64, help='sequence length')
+    parser.add_argument('-d', type=int, default=128, help='hidden size')
+    parser.add_argument('-m', type=int, default=50, help='max num of epoch')
+    args = parser.parse_args()
+    data = Data(args.data, batch_size=args.b, seq_length=args.l)
+    train(data,
+          args.m,
+          hidden_size=args.d,
+          seq_length=args.l,
+          batch_size=args.b)
diff --git a/examples/rnn/imdb_data.py b/examples/rnn/imdb_data.py
new file mode 100644
index 0000000..973f9e5
--- /dev/null
+++ b/examples/rnn/imdb_data.py
@@ -0,0 +1,283 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import re
+import os
+import pickle
+import urllib
+import tarfile
+import numpy as np
+import pandas as pd
+import nltk
+from nltk.stem import PorterStemmer
+from nltk.tokenize.toktok import ToktokTokenizer
+from gensim.models.keyedvectors import KeyedVectors
+from sklearn.model_selection import train_test_split
+from bs4 import BeautifulSoup
+'''
+    data collection preprocessing constants
+'''
+download_dir = '/tmp/'
+preprocessed_imdb_data_fp = download_dir + 'imdb_processed.pickle'
+imdb_dataset_link = "https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz"
+google_news_pretrain_embeddings_link = "https://s3.amazonaws.com/dl4j-distribution/GoogleNews-vectors-negative300.bin.gz"
+
+
+def pad_batch(b, seq_limit):
+    ''' convert a batch of encoded sequence
+        to pretrained word vectors from the embed weights (lookup dictionary)
+    '''
+    batch_seq = []
+    batch_senti_onehot = []
+    batch_senti = []
+    for r in b:
+        # r[0] encoded sequence
+        # r[1] label 1 or 0
+        encoded = None
+        if len(r[0]) >= seq_limit:
+            encoded = r[0][:seq_limit]
+        else:
+            encoded = r[0] + [0] * (seq_limit - len(r[0]))
+
+        batch_seq.append(encoded)
+        batch_senti.append(r[1])
+        if r[1] == 1:
+            batch_senti_onehot.append([0, 1])
+        else:
+            batch_senti_onehot.append([1, 0])
+    batch_senti = np.array(batch_senti).astype(np.float32)
+    batch_senti_onehot = np.array(batch_senti_onehot).astype(np.float32)
+    batch_seq = np.array(batch_seq).astype(np.int32)
+    return batch_seq, batch_senti_onehot, batch_senti
+
+
+def pad_batch_2vec(b, seq_limit, embed_weights):
+    ''' convert a batch of encoded sequence
+        to pretrained word vectors from the embed weights (lookup dictionary)
+    '''
+    batch_seq = []
+    batch_senti_onehot = []
+    batch_senti = []
+    for r in b:
+        # r[0] encoded sequence
+        # r[1] label 1 or 0
+        encoded = None
+        if len(r[0]) >= seq_limit:
+            encoded = r[0][:seq_limit]
+        else:
+            encoded = r[0] + [0] * (seq_limit - len(r[0]))
+
+        batch_seq.append([embed_weights[idx] for idx in encoded])
+        batch_senti.append(r[1])
+        if r[1] == 1:
+            batch_senti_onehot.append([0, 1])
+        else:
+            batch_senti_onehot.append([1, 0])
+    batch_senti = np.array(batch_senti).astype(np.float32)
+    batch_senti_onehot = np.array(batch_senti_onehot).astype(np.float32)
+    batch_seq = np.array(batch_seq).astype(np.float32)
+    return batch_seq, batch_senti_onehot, batch_senti
+
+
+def check_exist_or_download(url):
+    ''' download data into tmp '''
+    name = url.rsplit('/', 1)[-1]
+    filename = os.path.join(download_dir, name)
+    if not os.path.isfile(filename):
+        print("Downloading %s" % url)
+        urllib.request.urlretrieve(url, filename)
+    return filename
+
+
+def unzip_data(download_dir, data_gz):
+    data_dir = download_dir + 'aclImdb'
+    if not os.path.exists(data_dir):
+        print("extracting %s to %s" % (download_dir, data_dir))
+        with tarfile.open(data_gz) as tar:
+            tar.extractall(download_dir)
+    return data_dir
+
+
+def strip_html(text):
+    ''' lambda fn for cleaning html '''
+    soup = BeautifulSoup(text, "html.parser")
+    return soup.get_text()
+
+
+def remove_between_square_brackets(text):
+    ''' lambda fn for cleaning square brackets'''
+    return re.sub('\[[^]]*\]', '', text)
+
+
+def remove_special_characters(text, remove_digits=True):
+    ''' lambda fn for removing special char '''
+    pattern = r'[^a-zA-Z0-9\s]'
+    text = re.sub(pattern, '', text)
+    return text
+
+
+def simple_stemmer(text):
+    ''' lambda fn for stemming '''
+    ps = PorterStemmer()
+    text = ' '.join([ps.stem(word) for word in text.split()])
+    return text
+
+
+def remove_stopwords(text, tokenizer, stopword_list, is_lower_case=False):
+    ''' lambda fn for removing stopwrods '''
+    tokens = tokenizer.tokenize(text)
+    tokens = [token.strip() for token in tokens]
+    if is_lower_case:
+        filtered_tokens = [
+            token for token in tokens if token not in stopword_list
+        ]
+    else:
+        filtered_tokens = [
+            token for token in tokens if token.lower() not in stopword_list
+        ]
+    filtered_text = ' '.join(filtered_tokens)
+    return filtered_text
+
+
+def tokenize(x):
+    ''' lambda fn for tokenize sentences '''
+    ret = []
+    for w in x.split(" "):
+        if w != '':
+            ret.append(w)
+    return ret
+
+
+def encode_token(words, wv, w2i):
+    ''' lambda fn for encoding string seq to int seq 
+        args: 
+            wv: word vector lookup dictionary
+            w2i: word2index lookup dictionary
+    '''
+    ret = []
+    for w in words:
+        if w in wv:
+            ret.append(w2i[w])
+    return ret
+
+
+def preprocess():
+    ''' collect and preprocess raw data from acl Imdb dataset
+    '''
+    nltk.download('stopwords')
+
+    print("preparing raw imdb data")
+    data_gz = check_exist_or_download(imdb_dataset_link)
+    data_dir = unzip_data(download_dir, data_gz)
+
+    # imdb dirs
+    # vocab_f = data_dir + '/imdb.vocab'
+    train_pos_dir = data_dir + '/train/pos/'
+    train_neg_dir = data_dir + '/train/neg/'
+    test_pos_dir = data_dir + '/test/pos/'
+    test_neg_dir = data_dir + '/test/neg/'
+
+    # nltk helpers
+    tokenizer = ToktokTokenizer()
+    stopword_list = nltk.corpus.stopwords.words('english')
+
+    # load pretrained word2vec binary
+    print("loading pretrained word2vec")
+    google_news_pretrain_fp = check_exist_or_download(
+        google_news_pretrain_embeddings_link)
+    wv = KeyedVectors.load_word2vec_format(google_news_pretrain_fp, binary=True)
+
+    # parse flat files to memory
+    data = []
+    for data_dir, label in [(train_pos_dir, 1), (train_neg_dir, 0),
+                            (test_pos_dir, 1), (test_neg_dir, 0)]:
+        for filename in os.listdir(data_dir):
+            if filename.endswith(".txt"):
+                with open(os.path.join(data_dir, filename),
+                          "r",
+                          encoding="utf-8") as fhdl:
+                    data.append((fhdl.read(), label))
+
+    # text review cleaning
+    print("cleaning text review")
+    imdb_data = pd.DataFrame(data, columns=["review", "label"])
+    imdb_data['review'] = imdb_data['review'].apply(strip_html)
+    imdb_data['review'] = imdb_data['review'].apply(
+        remove_between_square_brackets)
+    imdb_data['review'] = imdb_data['review'].apply(remove_special_characters)
+    imdb_data['review'] = imdb_data['review'].apply(simple_stemmer)
+    imdb_data['review'] = imdb_data['review'].apply(remove_stopwords,
+                                                    args=(tokenizer,
+                                                          stopword_list))
+    imdb_data['token'] = imdb_data['review'].apply(tokenize)
+
+    # build  word2index and index2word
+    w2i = dict()
+    i2w = dict()
+
+    # add vocab <pad> as index 0
+    w2i["<pad>"] = 0
+    i2w[0] = "<pad>"
+
+    idx = 1  # start from idx 1
+    for index, row in imdb_data['token'].iteritems():
+        for w in row:
+            if w in wv and w not in w2i:
+                w2i[w] = idx
+                i2w[idx] = w
+                assert idx < 28241
+                idx += 1
+    assert len(w2i) == len(i2w)
+    print("vocab size: ", len(w2i))
+
+    # encode tokens to int
+    imdb_data['encoded'] = imdb_data['token'].apply(encode_token,
+                                                    args=(wv, w2i))
+
+    # select word vector weights for embedding layer from vocab
+    embed_weights = []
+    for w in w2i.keys():
+        val = None
+        if w in wv:
+            val = wv[w]
+        else:
+            val = np.zeros([
+                300,
+            ])
+        embed_weights.append(val)
+    embed_weights = np.array(embed_weights)
+    print("embedding layer lookup weight shape: ", embed_weights.shape)
+
+    # split into train and test
+    train_data = imdb_data[['encoded', 'label']].values
+    train, val = train_test_split(train_data, test_size=0.33, random_state=42)
+
+    # save preprocessed for training
+    imdb_processed = {
+        "train": train,
+        "val": val,
+        "embed_weights": embed_weights,
+        "w2i": w2i,
+        "i2w": i2w
+    }
+    print("saving preprocessed file to ", preprocessed_imdb_data_fp)
+    with open(preprocessed_imdb_data_fp, 'wb') as handle:
+        pickle.dump(imdb_processed, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+if __name__ == "__main__":
+    preprocess()
diff --git a/examples/rnn/imdb_model.py b/examples/rnn/imdb_model.py
new file mode 100644
index 0000000..5698c0c
--- /dev/null
+++ b/examples/rnn/imdb_model.py
@@ -0,0 +1,58 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+from singa import autograd
+from singa import layer
+from singa import model
+
+
+class IMDBModel(model.Model):
+
+    def __init__(self,
+                 hidden_size,
+                 mode='lstm',
+                 return_sequences=False,
+                 bidirectional="False",
+                 num_layers=1):
+        super().__init__()
+        batch_first = True
+        self.lstm = layer.CudnnRNN(hidden_size=hidden_size,
+                                   batch_first=batch_first,
+                                   rnn_mode=mode,
+                                   return_sequences=return_sequences,
+                                   num_layers=1,
+                                   dropout=0.9,
+                                   bidirectional=bidirectional)
+        self.l1 = layer.Linear(64)
+        self.l2 = layer.Linear(2)
+
+    def forward(self, x):
+        y = self.lstm(x)
+        y = autograd.reshape(y, (y.shape[0], -1))
+        y = self.l1(y)
+        y = autograd.relu(y)
+        y = self.l2(y)
+        return y
+
+    def train_one_batch(self, x, y):
+        out = self.forward(x)
+        loss = autograd.softmax_cross_entropy(out, y)
+        self.optimizer(loss)
+        return out, loss
+
+    def set_opt(self, optimizer):
+        self.optimizer = optimizer
diff --git a/examples/rnn/imdb_train.py b/examples/rnn/imdb_train.py
new file mode 100644
index 0000000..4952639
--- /dev/null
+++ b/examples/rnn/imdb_train.py
@@ -0,0 +1,176 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import pickle
+import os
+import sys
+import numpy as np
+from singa import tensor
+from singa import device
+from singa import opt
+from imdb_data import pad_batch_2vec, preprocessed_imdb_data_fp
+from imdb_model import IMDBModel
+import argparse
+
+if not os.path.isfile(preprocessed_imdb_data_fp):
+    sys.exit(
+        "Imdb dataset is not found, run python3 examples/rnn/imdb_data.py to prepare data"
+    )
+
+# load preprocessed data
+imdb_processed = None
+with open(preprocessed_imdb_data_fp, 'rb') as handle:
+    imdb_processed = pickle.load(handle)
+
+# use argparse to get command config: max_epoch, model, data, etc. for single gpu training
+parser = argparse.ArgumentParser()
+parser.add_argument('-m',
+                    '--max-epoch',
+                    default=5,
+                    type=int,
+                    help='maximum epochs',
+                    dest='max_epoch')
+parser.add_argument('-b',
+                    '--batch-size',
+                    default=128,
+                    type=int,
+                    help='batch size',
+                    dest='bs')
+parser.add_argument('-l',
+                    '--learning-rate',
+                    default=0.01,
+                    type=float,
+                    help='initial learning rate',
+                    dest='lr')
+# determine which gpu to use
+parser.add_argument('-i',
+                    '--device-id',
+                    default=0,
+                    type=int,
+                    help='which GPU to use',
+                    dest='device_id')
+# training params
+parser.add_argument('--mode',
+                    default='lstm',
+                    help='relu, tanh, lstm, gru',
+                    dest='mode')
+parser.add_argument('-s', '--return-sequences',
+                    default=False,
+                    action='store_true',
+                    help='return sequences',
+                    dest='return_sequences')
+parser.add_argument('-d', '--bidirectional',
+                    default=False,
+                    action='store_true',
+                    help='bidirectional lstm',
+                    dest='bidirectional')
+parser.add_argument('-n', '--num-layers',
+                    default=2,
+                    type=int,
+                    help='num layers',
+                    dest='num_layers')
+
+args = parser.parse_args()
+
+# parameters
+seq_limit = 50
+embed_size = 300
+hid = 32
+
+# gpu device
+dev = device.create_cuda_gpu_on(args.device_id)
+
+# create placeholder
+tx = tensor.Tensor((args.bs, seq_limit, embed_size), dev, tensor.float32)
+ty = tensor.Tensor((args.bs, 2), dev, tensor.float32)
+tx.gaussian(0, 1)
+ty.gaussian(0, 1)
+
+# create model
+m = IMDBModel(hid,
+              mode=args.mode,
+              return_sequences=args.return_sequences,
+              bidirectional=args.bidirectional,
+              num_layers=args.num_layers)
+m.set_opt(opt.SGD(args.lr, 0.9))
+
+m.compile([tx], is_train=True, use_graph=True, sequential=False)
+
+# training
+m.train()
+x_train, y_onehot_train, y_train = pad_batch_2vec(
+    imdb_processed['train'], seq_limit, imdb_processed['embed_weights'])
+x_test, y_onehot_test, y_test = pad_batch_2vec(imdb_processed['val'], seq_limit,
+                                               imdb_processed['embed_weights'])
+
+for epoch in range(args.max_epoch):
+    i = 0
+    l = 0
+    correct = 0
+    trials = 0
+    while (i + 1) * args.bs < len(x_train):
+        l_idx = i * args.bs
+        r_idx = l_idx + args.bs
+        x_batch = x_train[l_idx:r_idx]
+        y_onehot_batch = y_onehot_train[l_idx:r_idx]
+        y_batch = y_train[l_idx:r_idx]
+        i += 1
+
+        # reuse placeholders
+        tx.copy_from_numpy(x_batch)
+        ty.copy_from_numpy(y_onehot_batch)
+
+        # train one batch
+        out, loss = m(tx, ty)
+
+        # save output
+        l += tensor.to_numpy(loss)
+        scores = tensor.to_numpy(out)
+        correct += (y_batch == np.argmax(scores, 1)).sum()
+        trials += len(y_batch)
+
+    print("epoch %d loss %s; acc %.3f" % (epoch, l /
+                                          (trials / args.bs), correct / trials))
+    l = 0
+
+# testing:
+m.eval()
+
+i = 0
+correct = 0
+trials = 0
+while (i + 1) * args.bs < len(x_test):
+    l_idx = i * args.bs
+    r_idx = l_idx + args.bs
+    x_batch = x_test[l_idx:r_idx]
+    y_onehot_batch = y_onehot_test[l_idx:r_idx]
+    y_batch = y_test[l_idx:r_idx]
+    i += 1
+
+    # reuse same tensors
+    tx.copy_from_numpy(x_batch)
+    ty.copy_from_numpy(y_onehot_batch)
+
+    # make inference
+    out = m(tx)
+
+    # save correct predictions
+    scores = tensor.to_numpy(out)
+    correct += (y_batch == np.argmax(scores, 1)).sum()
+    trials += len(y_batch)
+
+print("eval acc %.3f" % (correct / trials))
diff --git a/include/singa/core/common.h b/include/singa/core/common.h
index 2c6d1d8..a408650 100644
--- a/include/singa/core/common.h
+++ b/include/singa/core/common.h
@@ -18,22 +18,26 @@
 
 #ifndef SINGA_CORE_COMMON_H_
 #define SINGA_CORE_COMMON_H_
-#include <random>
-#include <chrono>
-#include "singa/singa_config.h"
 #include <atomic>
+#include <chrono>
 #include <memory>
+#include <random>
+
+#include "singa/singa_config.h"
 #include "singa/utils/logging.h"
 
 #ifdef USE_CUDA
-#include <cuda_runtime.h>
 #include <cublas_v2.h>
+#include <cuda_runtime.h>
 #include <curand.h>
 #ifdef USE_CUDNN
 #include <cudnn.h>
 #endif
-#endif // USE_CUDA
+#endif  // USE_CUDA
 
+#ifdef USE_DNNL
+#include <dnnl.hpp>
+#endif  // USE_DNNL
 
 #ifdef USE_OPENCL
 #include "singa/utils/opencl_utils.h"
@@ -45,44 +49,40 @@
 
 namespace lang {
 /// To implemente functions using cpp libraries
-typedef struct _Cpp { } Cpp;
+typedef struct _Cpp {
+} Cpp;
 /// To implemente functions using cuda libraries
-typedef struct _Cuda { } Cuda;
+typedef struct _Cuda {
+} Cuda;
 /// To implement function using opencl libraries
-typedef struct _Opencl { } Opencl;
+typedef struct _Opencl {
+} Opencl;
 }  // namespace lang
 
+class Device;
 /// Block represent a chunk of memory (on device or host).
 class Block {
  public:
-  Block(void* ptr, size_t size, size_t offset = 0)
-      : data_(ptr), size_(size), offset_(offset) {
+  Block(void* ptr, size_t size, Device* device = nullptr, size_t offset = 0)
+      : data_(ptr), size_(size), offset_(offset), device_(device) {
     ref_count_ = 1;  // std::make_shared<std::atomic<int>>(1);
   }
   // Disabled as it is not used currently.
   // Block(void* ptr, size_t size, size_t offset, std::shared_ptr<atomic<int>>
   //  ref) : data_(ptr), size_(size), offset_(offset), ref_count_(ref) {}
-  void* mutable_data() {
-    initialized_ = true;
-    return static_cast<char*>(data_) + offset_;
-  }
-  const void* data() const {
-    CHECK(initialized_) << "Must initialize data before reading it";
-    return static_cast<char*>(data_) + offset_;
-  }
+  void* mutable_data();
+  const void* data() const;
+  void free_data();
+
   size_t size() const { return size_; }
   size_t offset() const { return offset_; }
   int IncRefCount() {
     return ++ref_count_;  // Note do not use ref_count_++;
   }
-  int DecRefCount() {
-    return --ref_count_;
-  }
+  int DecRefCount() { return --ref_count_; }
   int ref_count() const { return ref_count_.load(); }
 
-  bool initialized() const {
-    return initialized_;
-  }
+  bool initialized() const { return initialized_; }
 
  private:
   Block() {}
@@ -90,6 +90,7 @@
   size_t size_ = 0;
   size_t offset_ = 0;
   bool initialized_ = false;
+  Device* device_ = nullptr;
   // Disabled as it is not used currently.
   // std::shared_ptr<std::atomic<int>> ref_count_ = nullptr;
   std::atomic<int> ref_count_;
@@ -99,12 +100,25 @@
   std::mt19937 random_generator;
 #ifdef USE_CUDA
   cublasHandle_t cublas_handle;
-  cudaStream_t stream;
-  curandGenerator_t curand_generator;
+  cudaStream_t stream; 
+  curandGenerator_t curand_generator; 
+
 #ifdef USE_CUDNN
   cudnnHandle_t cudnn_handle;
 #endif
-#endif // USE_CUDA
+#endif  // USE_CUDA
+
+#ifdef USE_DIST
+  // cuda streams used by communicator
+  cudaStream_t c1;
+  cudaStream_t c2;
+  cudaStream_t s;
+#endif
+
+#ifdef USE_DNNL
+  dnnl::engine dnnl_engine;
+  dnnl::stream dnnl_stream;
+#endif  // USE_DNNL
 
 #ifdef USE_OPENCL
   // This stores the context ID of the OpenCL context controlled by ViennaCL.
diff --git a/include/singa/core/device.h b/include/singa/core/device.h
index 1a960d8..50644c0 100644
--- a/include/singa/core/device.h
+++ b/include/singa/core/device.h
@@ -19,30 +19,35 @@
 #ifndef SINGA_CORE_DEVICE_H_
 #define SINGA_CORE_DEVICE_H_
 
-#include <type_traits>
-#include <vector>
-#include <string>
+#include <chrono>
 #include <functional>
+#include <map>
 #include <memory>
+#include <mutex>
+#include <string>
+#include <type_traits>
+#include <unordered_set>
+#include <vector>
 
-#include "singa/singa_config.h"
 #include "singa/core/common.h"
 #include "singa/core/memory.h"
 #include "singa/core/scheduler.h"
 #include "singa/proto/core.pb.h"
+#include "singa/singa_config.h"
+#include "singa/utils/safe_queue.h"
 
 #ifdef USE_CUDA
 #include "singa/utils/cuda_utils.h"
-#endif // USE_CUDA
+#endif  // USE_CUDA
 
 #ifdef USE_OPENCL
 #include "singa/utils/opencl_utils.h"
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
 
-using std::vector;
-using std::string;
 using std::function;
 using std::shared_ptr;
+using std::string;
+using std::vector;
 
 namespace singa {
 
@@ -50,15 +55,21 @@
 /// There are three types of devices distinguished by their programming
 /// languages, namely cpp, cuda and opencl.
 class Device {
-  public:
+ public:
   // Device() = default;
-  virtual ~Device() {}
+  virtual ~Device();
   /// Constructor with device ID, num of executors (e.g., cuda streams),
   /// max mem size to use (in MB)
   Device(int id, int num_executors);
 
+  void Reset();
+
   virtual void SetRandSeed(unsigned seed) = 0;
 
+  void EnableGraph(bool enable) { graph_enabled_ = enable; }
+
+  static void EnableLazyAlloc(bool enbale) { lazy_alloc_ = enbale; }
+
   /// Called by Tensor.
   Block* NewBlock(int size);
 
@@ -67,47 +78,62 @@
 
   /// Return the size (bytes) of memory in use
   /// TODO(wangwei) override this function for all devices.
-  virtual size_t GetAllocatedMem() {
-    return 0u;
-  }
+  virtual size_t GetAllocatedMem() { return 0u; }
 
   /// Copy data within or across devices.
   virtual void CopyDataToFrom(Block* dst, Block* src, size_t nBytes,
-                      CopyDirection direction, int dst_offset, int src_offset);
+                              CopyDirection direction, int dst_offset,
+                              int src_offset, Context* ctx);
 
   void CopyDataFromHostPtr(Block* dst, const void* src, size_t nBytes,
-                           size_t dst_offset = 0);
+                           size_t dst_offset = 0, Context* ctx = nullptr);
   /// Submit the operation to the device, which may execute it right now or
   /// delay it depending on the scheduler.
   void Exec(function<void(Context*)>&& fn, const vector<Block*> read_blocks,
-                    const vector<Block*> write_blocks,
-                    bool use_rand_generator = false);
+            const vector<Block*> write_blocks, string op_name = "no_name",
+            bool use_rand_generator = false);
+
+  void RunGraph(bool serial = false);
+
+  void ResetGraph() { graph_->Reset(); }
 
   // Wait for one event.
   // void WaitFor();
 
   /// wait for all operations submitted to this device.
-  void Sync();
-
-  /// Return the programming language for this device.
-  LangType lang() const {
-    return lang_;
-  }
-
-  virtual std::shared_ptr<Device> host() const { return host_;}
-
-  Context* context(int k) {
-    return &ctx_;
-  }
+  virtual void Sync();
 
   int id() const { return id_; }
 
- private:
-  Device() {};
+  /// Return the programming language for this device.
+  LangType lang() const { return lang_; }
+
+  Context* context(int k) { return &ctx_; }
+
+  bool graph_enabled() const { return graph_enabled_; }
+
+  /// Verbosity of the time profiling function:
+  /// verbosity == 0 (default) -> no logging
+  /// verbosity == 1 -> display forward and backward propagation time
+  /// verbosity == 2 -> display each operation time (OP_ID, op name, time)
+  int verbosity() const { return verbosity_; }
+  /// the number of initial iteration that is skipped for time profiling
+  int skip_iteration() const { return skip_iteration_; }
+
+  virtual std::shared_ptr<Device> host() const { return host_; }
+
+  void PrintTimeProfiling();
+  void SetVerbosity(int verbosity) { verbosity_ = verbosity; };
+  void SetSkipIteration(int skip_iteration) {
+    skip_iteration_ = skip_iteration;
+  };
 
  protected:
   /// Execute one operation on one executor.
   virtual void DoExec(function<void(Context*)>&& fn, int executor) = 0;
+  virtual void TimeProfilingDoExec(function<void(Context*)>&& fn, int executor,
+                                   Node* node) = 0;
+  virtual void EvaluateTimeElapsed(Node* node) = 0;
 
   virtual void CopyToFrom(void* dst, const void* src, size_t nBytes,
                           CopyDirection direction, Context* ctx) = 0;
@@ -118,20 +144,33 @@
   /// Free device memory.
   virtual void Free(void* ptr) = 0;
 
+ private:
+  Device(){};
+
  protected:
+  friend class Block;
+  friend class Graph;
+
   int id_ = 0;
   int num_executors_ = 0;
   unsigned seed_ = 0;
-  // Scheduler* scheduler_ = nullptr;
-  // VirtualMemory* vm_ = nullptr;
+  bool graph_enabled_ = false;
+  int verbosity_ = 0;
+  int skip_iteration_ = 5;
+  /// The computational graph
+  Graph* graph_ = nullptr;
   /// Programming language type, could be kCpp, kCuda, kOpencl
   LangType lang_;
-  // SafeQueue<Operation> op_queue_;
-  // SafeQueue<Operation> op_log_;
   /// The host device
   std::shared_ptr<Device> host_;
   // TODO(wangwei) define multiple contexts, one per executor
   Context ctx_;
+  // Scheduler* scheduler_ = nullptr;
+  // VirtualMemory* vm_ = nullptr;
+  // SafeQueue<Operation> op_queue_;
+  // SafeQueue<Operation> op_log_;
+
+  static bool lazy_alloc_;
 };
 
 /// a singleton CppDevice as the host for all devices.
@@ -141,14 +180,17 @@
 /// It runs cpp code.
 class CppCPU : public Device {
  public:
-  ~CppCPU() {};
+  ~CppCPU();
   CppCPU();
 
-  std::shared_ptr<Device> host() const override { return defaultDevice;}
+  std::shared_ptr<Device> host() const override { return defaultDevice; }
   void SetRandSeed(unsigned seed) override;
 
  protected:
   void DoExec(function<void(Context*)>&& fn, int executor) override;
+  void TimeProfilingDoExec(function<void(Context*)>&& fn, int executor,
+                           Node* node) override;
+  void EvaluateTimeElapsed(Node* node) override;
 
   void CopyToFrom(void* dst, const void* src, size_t nBytes,
                   CopyDirection direction, Context* ctx) override;
@@ -160,7 +202,6 @@
   void Free(void* ptr) override;
 };
 
-
 // Implement Device using OpenCL libs.
 // class OpenclDevice : public Device { };
 
@@ -176,9 +217,15 @@
 
   void SetRandSeed(unsigned seed) override;
   size_t GetAllocatedMem() override;
+  void Sync() override;
 
  protected:
   void DoExec(function<void(Context*)>&& fn, int executor) override;
+  void TimeProfilingDoExec(function<void(Context*)>&& fn, int executor,
+                           Node* node) override;
+  void EvaluateTimeElapsed(Node* node) override;
+
+  void SyncBeforeCountingTime();
 
   void CopyToFrom(void* dst, const void* src, size_t nBytes,
                   CopyDirection direction, Context* ctx) override;
@@ -193,7 +240,7 @@
   void Setup();
 
  private:
-	shared_ptr<DeviceMemPool> pool_;
+  shared_ptr<DeviceMemPool> pool_;
 };
 
 /// CudaCPU which uses cudaMallocHost to allocate pinned memory for host.
@@ -204,22 +251,22 @@
 
 // Implement Device using OpenCL libs.
 class OpenclDevice : public singa::Device {
-public:
-
+ public:
   // TODO: Constructor arguments to consider:
   // Path to kernel sources?
   // Select only certain device types?
   OpenclDevice(int id = 0, int num_executors = 1);
   ~OpenclDevice();
 
-// Overridden, inherited methods
+  // Overridden, inherited methods
   void SetRandSeed(unsigned seed) override;
 
   virtual void CopyDataToFrom(Block* dst, Block* src, size_t nBytes,
-                      CopyDirection direction, int dst_offset = 0,
-                      int src_offset = 0) override;
+                              CopyDirection direction, int dst_offset = 0,
+                              int src_offset = 0,
+                              Context* ctx = nullptr) override;
 
-protected:
+ protected:
   /// The OpenCL device that this object represents.
   /// Each OpenclDevice contains exactly one cl::Device for the lifetime of the
   /// object.
@@ -233,7 +280,7 @@
   /// OpenCL programs, then stores them in the Kernels map.
   void BuildPrograms();
 
-// Overridden, inherited methods.
+  // Overridden, inherited methods.
 
   void DoExec(function<void(Context*)>&& fn, int executor) override;
 
@@ -249,22 +296,24 @@
   /// This has the effect of freeing up device memory.
   void Free(void* ptr) override;
 
-private:
-
+ private:
   static const std::string cl_src_path;
 };
 #endif  // USE_OPENCL
 
 /// This class queries all available calculating devices on a given machine
-/// grouped according to manufacturer or device drivers. All methods should be static.
-/// If CUDA or OPENCL are not enabled, then the respective related methods should
+/// grouped according to manufacturer or device drivers. All methods should be
+/// static.
+/// If CUDA or OPENCL are not enabled, then the respective related methods
+/// should
 /// return something that indicates their absence (for example, 0 devices);
 /// however they should always be available regardless of compile-time switches.
 class Platform {
-public:
-
+ public:
   /// Return the default host device
   static std::shared_ptr<Device> GetDefaultDevice() {
+    // cannot reset cpu device, which leads to error
+    // defaultDevice->Reset();
     return defaultDevice;
   }
 
@@ -285,13 +334,14 @@
   static const std::string DeviceQuery(int id, bool verbose = false);
 
   /// Create a set of CudaGPU Device using 'num_devices' free GPUs.
-  static const std::vector<std::shared_ptr<Device>>
-  CreateCudaGPUs(const size_t num_devices, size_t init_size = 0);
+  static const std::vector<std::shared_ptr<Device>> CreateCudaGPUs(
+      const size_t num_devices, size_t init_size = 0);
 
   /// Create a set of CudaGPU Device using given GPU IDs.
-  static const std::vector<std::shared_ptr<Device>>
-  CreateCudaGPUsOn(const std::vector<int> &devices, size_t init_size = 0);
-  
+  static const std::vector<std::shared_ptr<Device>> CreateCudaGPUsOn(
+      const std::vector<int>& devices, size_t init_size = 0);
+
+  static std::vector<std::shared_ptr<Device>> UsedDevice;
   /// This function is implementd by Caffe (http://caffe.berkeleyvision.org/).
   /// This function checks the availability of GPU #device_id.
   /// It attempts to create a context on the device by calling cudaFree(0).
@@ -307,35 +357,34 @@
   /// the permission. cudaFree(0) is one of those with no side effect,
   /// except the context initialization.
   static bool CheckDevice(const int device_id);
-#endif // USE_CUDA
+  static std::mutex mtx_;
+#endif  // USE_CUDA
 
 #ifdef USE_OPENCL
 
   const int GetNumOpenclPlatforms();
-  
+
   const int GetNumOpenclDevices();
-  
+
   static const std::shared_ptr<Device> GetDefaultOpenclDevice();
 
-  /// Create a \p num_devices set of valid OpenCL devices, regardless of
-  /// platforms.  If there are fewer valid devices than requested, then this
-  /// method will return as many as possible. If OpenCL is not in use, this
-  /// method will return an empty array.
+/// Create a \p num_devices set of valid OpenCL devices, regardless of
+/// platforms.  If there are fewer valid devices than requested, then this
+/// method will return as many as possible. If OpenCL is not in use, this
+/// method will return an empty array.
 //  static const std::vector<std::shared_ptr<Device>>
 //  CreateOpenclDevices(const size_t num_devices);
 
-  /// Create a set of valid OpenCL devices, regardless of platforms, assigning
-  /// \p id to each device in sequence.
-  /// If there are fewer valid devices than requested, then this method will
-  /// return as many as possible.
-  /// If OpenCL is not in use, this method will return an empty array.
+/// Create a set of valid OpenCL devices, regardless of platforms, assigning
+/// \p id to each device in sequence.
+/// If there are fewer valid devices than requested, then this method will
+/// return as many as possible.
+/// If OpenCL is not in use, this method will return an empty array.
 //  const std::vector<std::shared_ptr<Device>>
 //  CreateOpenclDevices(const vector<int> &id);
-#endif // USE_OPENCL
-
+#endif  // USE_OPENCL
 };
 
-
 }  // namespace singa
 
 #endif  // SINGA_CORE_DEVICE_H_
diff --git a/include/singa/core/memory.h b/include/singa/core/memory.h
index f664f95..a43532e 100644
--- a/include/singa/core/memory.h
+++ b/include/singa/core/memory.h
@@ -19,8 +19,9 @@
 #ifndef SINGA_CORE_MEMORY_H_
 #define SINGA_CORE_MEMORY_H_
 
-#include <mutex>
 #include <atomic>
+#include <mutex>
+
 #include "singa/proto/core.pb.h"
 #include "singa/singa_config.h"
 
@@ -28,7 +29,6 @@
 #include "cnmem.h"
 #endif
 
-
 namespace singa {
 
 /// Manage device memory pool including garbage collection, memory opt.
@@ -36,18 +36,21 @@
 
 class DeviceMemPool {
  public:
-  virtual void Malloc(void** ptr, const size_t size)  = 0;
-  virtual void Free(void* ptr)  = 0;
+  virtual void Malloc(void** ptr, const size_t size) = 0;
+  virtual void Free(void* ptr) = 0;
 
   /// Return a pair for free and total memory managed by this pool.
   virtual std::pair<size_t, size_t> GetMemUsage() {
     return std::make_pair(0u, 0u);
   }
+  virtual std::pair<size_t, size_t> GetMemUsage(int id) {
+    return std::make_pair(0u, 0u);
+  }
   virtual ~DeviceMemPool(){};
 
  protected:
   size_t usage_;
-//  size_t init_size_ = 0, max_size_ = 0;
+  //  size_t init_size_ = 0, max_size_ = 0;
 };
 
 #ifdef USE_CUDA
@@ -62,6 +65,7 @@
   void Free(void* ptr);
 
   std::pair<size_t, size_t> GetMemUsage() override;
+  std::pair<size_t, size_t> GetMemUsage(int id) override;
 
   // release all memory and set cnmem manager to unintialized
   ~CnMemPool();
@@ -69,16 +73,12 @@
  protected:
   void Init();
 
-
  private:
-
   MemPoolConf conf_;
   // whether the (global) memory pool has been initialized
   bool initialized_ = false;
   // lock on the initialized variable
   std::mutex mtx_;
-
-  static std::atomic<int> pool_count;
 };
 
 class CudaMemPool : public DeviceMemPool {
diff --git a/include/singa/core/scheduler.h b/include/singa/core/scheduler.h
index 3673c6b..b430101 100644
--- a/include/singa/core/scheduler.h
+++ b/include/singa/core/scheduler.h
@@ -15,11 +15,212 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 #ifndef SINGA_CORE_SCHEDULER_H_
 #define SINGA_CORE_SCHEDULER_H_
 
+#include <condition_variable>
+#include <functional>
+#include <mutex>
+#include <string>
+#include <thread>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "singa/core/common.h"
+#include "singa/utils/safe_queue.h"
+
+using std::function;
+using std::string;
+using std::unordered_map;
+using std::vector;
+
 namespace singa {
 
+class Node;
+class Edge;
+class Graph;
+class Device;
+class BlkInfo;
+
+typedef std::vector<Node *> NodeVec;
+typedef std::vector<Edge *> EdgeVec;
+typedef std::vector<Block *> BlockVec;
+typedef std::unordered_set<Block *> BlockSet;
+typedef std::function<void(Context *)> OpFunc;
+typedef std::unordered_map<Block *, BlkInfo *> Blk2InfoMap;
+typedef std::chrono::high_resolution_clock::time_point TimePoint;
+
+enum BlockType { kUnknow, kInput, kParam, kInter, kEnd };
+
+class Node {
+ public:
+  Node(int id, OpFunc &&op, string op_name)
+      : id_(id), op_(std::move(op)), op_name_(op_name) {}
+
+  void AddInEdge(Edge *in_edge);
+  void AddOutEdge(Edge *out_edge);
+
+  // getters of Node
+  int id() const { return id_; }
+  string op_name() const { return op_name_; }
+  const EdgeVec &in_edges() const { return in_edges_; }
+  const EdgeVec &out_edges() const { return out_edges_; }
+  float time_elapsed() const { return time_elapsed_; }
+
+  // time profiling
+  void time_elapsed_inc(float time) { time_elapsed_ += time; }
+
+ private:
+  friend Graph;
+
+  int id_;
+  OpFunc op_;
+  EdgeVec in_edges_;
+  EdgeVec out_edges_;
+
+  string op_name_;
+  float time_elapsed_ = 0;
+
+#ifdef USE_CUDA
+  cudaEvent_t start_;
+  cudaEvent_t end_;
+  friend class CudaGPU;
+#endif  // USE_CUDA
+};
+
+class Edge {
+ public:
+  Edge(int id, Block *blk, Node *src_node, Node *dst_node)
+      : id_(id), blk_(blk), src_node_(src_node), dst_node_(dst_node) {}
+
+  void SetBlock(Block *blk);
+  void SetSrcNode(Node *src_node);
+  void SetDstNode(Node *dst_node);
+
+  // getters of Edge
+  int id() const { return id_; }
+  Block *block() const { return blk_; }
+  Node *src_node() const { return src_node_; }
+  Node *dst_node() const { return dst_node_; }
+
+ private:
+  friend Graph;
+
+  int id_;
+  Block *blk_;
+  Node *src_node_;
+  Node *dst_node_;
+};
+
+class BlkInfo {
+ public:
+  BlkInfo(int id, Block *blk, BlockType type = BlockType::kUnknow)
+      : id_(id), blk_(blk), type_(type), graph_ref_(0), write_edge_(nullptr) {}
+
+  // getters of BlkInfo
+  int id() const { return id_; }
+  Block *block() const { return blk_; }
+  BlockType type() const { return type_; }
+  int graph_ref() const { return graph_ref_; }
+  Edge *write_edge() const { return write_edge_; }
+  const NodeVec &used_nodes() const { return used_nodes_; }
+  Node *used_node(const size_t idx) const;
+
+ private:
+  friend Graph;
+
+  int id_;
+  Block *blk_;
+  BlockType type_;
+  int graph_ref_;
+  Edge *write_edge_;    // the edge of last node that writes data into blk
+  NodeVec used_nodes_;  // the nodes that use this block(in order of execution)
+};
+
+class Graph {
+ public:
+  struct CBData {
+    Graph *graph_;
+    Node *node_;
+
+    CBData(Graph *graph, Node *node) : graph_(graph), node_(node) {}
+  };
+
+  ~Graph();
+  Graph(Device *device);
+
+  void Reset();
+  void Debug();
+  void RunGraph();
+  void RunInSerial();
+  void PrintTimeProfiling();
+  void AddOperation(OpFunc &&op, const BlockVec &read_blocks,
+                    const BlockVec &write_blocks, string op_name = "no_name");
+
+  // getters of Graph
+  const NodeVec &nodes() const { return nodes_; }
+  const EdgeVec &edges() const { return edges_; }
+  const Blk2InfoMap &blocks() const { return blocks_; }
+
+  const BlockSet &leaf_blocks() const { return leaf_blocks_; }
+
+  bool dirty() const { return dirty_; }
+  const NodeVec &begin_nodes() const { return begin_nodes_; }
+  const std::vector<NodeVec> &next_nodes() const { return next_nodes_; }
+  const std::vector<BlockVec> &free_blocks() const { return free_blocks_; }
+  int iteration() const { return iteration_; }
+
+  Node *node(const size_t idx) const;
+  Edge *edge(const size_t idx) const;
+  BlkInfo *block(Block *blk) const;
+
+  Node *begin_node(const size_t idx) const;
+  const NodeVec &next_nodes(const size_t idx) const;
+  const BlockVec &free_blocks(const size_t idx) const;
+
+ private:
+  void Analyze();
+  void FreeLoop();
+  void AnalyzeNodes();
+  void AnalyzeEdges();
+  void TimeProfilingDoExec(Node *curNode);
+  void AddSyncOp(function<void(Context *)> &&op, string op_name = "no_name");
+
+  void step() { iteration_++; }
+  void time_elapsed_inc(float time) { time_elapsed_ += time; }
+  void TakeStartTime(TimePoint &start);
+  void EvaluateTimeElapsed(const TimePoint &start);
+
+  // static void CUDART_CB Callback(cudaStream_t stream, cudaError_t status,
+  //                                void *data);
+
+ private:
+  Device *device_;
+
+  // nodes, edges and blocks included in the calculation graph
+  NodeVec nodes_;
+  EdgeVec edges_;
+  Blk2InfoMap blocks_;
+
+  // Leaf blocks written by the previous operations, used for sync op
+  BlockSet leaf_blocks_;
+
+  // Computational graph analysis
+  bool dirty_ = false;
+  bool in_serial_ = false;
+  NodeVec begin_nodes_;
+  std::vector<NodeVec> next_nodes_;
+  std::vector<BlockVec> free_blocks_;
+
+  // Time Profiling
+  int iteration_ = 0;
+  float time_elapsed_ = 0;
+
+  SafeQueue<int> free_queue_;
+};
+
 /// Scheduling Tensor operations with dependency detection.
 class Scheduler {};
 
diff --git a/include/singa/core/tensor.h b/include/singa/core/tensor.h
index 6621fa0..aea988d 100644
--- a/include/singa/core/tensor.h
+++ b/include/singa/core/tensor.h
@@ -18,18 +18,17 @@
 
 #ifndef SINGA_CORE_TENSOR_H_
 #define SINGA_CORE_TENSOR_H_
-
-#include <vector>
-#include <tuple>
 #include <memory>
+#include <tuple>
+#include <vector>
 
 #include "singa/core/common.h"
 #include "singa/core/device.h"
 #include "singa/proto/core.pb.h"
 #include "singa/utils/logging.h"
 
-using std::vector;
 using std::tuple;
+using std::vector;
 namespace singa {
 
 typedef vector<size_t> Shape;
@@ -51,95 +50,125 @@
 /// Tensor.
 /// For all operations, if the result tensor is passed as an argument,
 /// then it must be set up correctly (shape, device). Otherwise, runtime error
-/// like SegmentFault would happen. Simply type/device check would be conducted.
+/// like SegmentFault would happen. Simple type/device check would be conducted.
 class Tensor {
  public:
   ~Tensor();
   Tensor();
-  explicit Tensor(Shape &&shape, DataType dtype = kFloat32);
+
+  /// Constructor using default device.
   explicit Tensor(const Shape &shape, DataType dtype = kFloat32);
-  Tensor(Shape &&shape, std::shared_ptr<Device> dev, DataType dtype = kFloat32);
+
+  /// Constructor with shape, device and data type
   Tensor(const Shape &shape, std::shared_ptr<Device> dev,
          DataType dtype = kFloat32);
 
-  /// Copy Tensor to share the internal data.  No deep copy.
+  /// Copy constructor.  No deep copy.
   Tensor(const Tensor &from);
-  /// Copy Tensor to share the internal data.  No deep copy.
+
+  /// Move constructor.  No deep copy.
   Tensor(Tensor &&from);
 
+  // --------------------------------------------------------------------------
+  // ---Following methods return info of the class without making any changes--
+  // --------------------------------------------------------------------------
+
   /// For functions in xx_math.cc to access the block.
   /// Users should not operate against Block directly.
   /// block_ is allocated in constructors.
   Block *block() const { return block_; }
-  void SetBlock(Block *block);
 
   std::shared_ptr<Device> device() const { return device_; }
 
-  /// return immutable Tensor values with given type.
+  /// Return immutable Tensor values with given type.
   template <typename SType>
   const SType *data() const {
     return static_cast<const SType *>(block()->data());
   }
 
-  /// used for swig code to convert Tensor into numpy array.
-  /// It gets data into 'value'
-  template <typename SType>
-  void GetValue(SType *value, const size_t num) {
-    CHECK(device_ == defaultDevice);
-    const SType* ptr = data<SType>();
-    for(size_t i = 0; i < num; i++) value[i] = ptr[i];
-  }
-
   /// data type, including kFloat16, kFloat32, kInt
-  const DataType data_type() const { return data_type_; }
+  DataType data_type() const { return data_type_; }
 
   const Shape &shape() const { return shape_; }
 
-  const size_t shape(const size_t idx) const {
+  size_t shape(const size_t idx) const {
     CHECK_LT(idx, shape_.size());
     return shape_.at(idx);
   }
 
   size_t nDim() const { return shape_.size(); }
 
+  size_t n_dim() const { return shape_.size(); }
+
   bool empty() const { return nDim() == 0; }
 
-  bool transpose() const { return transpose_; }
+  /// The stride should decrease except dim with stride=0 due to broadcasting
+  bool transpose() const {
+    if (!stride_.empty()) {
+      auto last = stride_.front();
+      for (auto s : stride_) {
+        if (s > last && last > 0) return true;
+        if (s > 0) last = s;
+      }
+    }
+    return false;
+  }
 
-  /// return true if the content of the tensor is initialized
+  bool broadcasted() const {
+    int strideProduct = 1;
+    for (const auto &i : stride_) strideProduct *= i;
+    if (strideProduct == 0) {
+      return true;
+    }
+    return false;
+  }
+
+  const vector<int> &stride() const { return stride_; }
+
+  /// Return true if the content of the tensor is initialized
   bool initailized() const {
     return block_ != nullptr && block_->initialized();
   }
 
-  /// return number of total elements
-  size_t Size() const {
+  /// Return number of total elements
+  size_t Size() const { return size(); }
+
+  size_t size() const {
     if (block_ == nullptr) return 0u;
     CHECK_EQ(block_->size() % SizeOf(data_type_), 0u);
     return block_->size() / SizeOf(data_type_);
   }
 
-  /// return memory size (i.e., Bytes)
+  /// Return memory size (i.e., Bytes)
   size_t MemSize() const { return block_->size(); }
 
-  /// Reset the tensor shape, it may reallocate block, if MemSize() changes.
-  void Reshape(const Shape &shape);
-  void Reshape(Shape &&shape);
+  size_t mem_size() const { return block_->size(); }
 
-  /// Reset the shape, device, and data type as given tensor.
-  /// If block size changes, then reallocate a new block. The previous block
-  /// would
-  /// be deleted.
-  void ResetLike(const Tensor &t);
+  /// used for swig code to convert Tensor into numpy array.
+  /// It gets data into 'value'
+  template <typename SType>
+  void GetValue(SType *value, const size_t num) const;
 
-  /// Reset the data type, it would reallocate block if type changes.
-  void AsType(const DataType type);
+  template <typename SType>
+  void get_value(SType *value, const size_t num) const;
 
-  /// Reset the device.
-  /// If the target device is a diff device, then do deep data copy.
-  void ToDevice(std::shared_ptr<Device> dev);
+  /// Serialize data, shape and transpose to protobuf object.
+  void ToProto(singa::TensorProto *proto) const;
 
-  /// Equivalent to ToDevice(host_dev).
-  void ToHost();
+  void to_proto(singa::TensorProto *proto) const;
+
+  /// Return average L1 norm
+  float L1() const;
+
+  float l1() const;
+
+  /// Return average L2 norm
+  float L2() const;
+
+  float l2() const;
+  // --------------------------------------------------------------------------
+  // ---Following methods changes the internal data
+  // --------------------------------------------------------------------------
 
   /// Set each element of the tensor to be x
   template <typename SType>
@@ -149,7 +178,7 @@
   /// memory with 'offset' (elements).
   template <typename SType>
   void CopyDataFromHostPtr(const SType *src, const size_t num,
-                           const size_t offset = 0);
+                           const size_t offset = 0) const;
 
   /// Copy data from another Tensor which may be on a diff device.
   /// Meta data would not be copied!
@@ -158,33 +187,39 @@
   /// Deserialize data, shape and transpose from protobuf object.
   void FromProto(const singa::TensorProto &proto);
 
-  /// Serialize data, shape and transpose to protobuf object.
-  void ToProto(singa::TensorProto *proto) const;
+  /// TODO(wangwei) merge RepeatData into  Repeat?
+  void RepeatData(const vector<size_t> &repeats, int axis, int total_repeats,
+                  const Tensor &other);
+
+  // --------------------------------------------------------------------------
+  // ---Following methods returns a new Tensor without change original tensor
+  // --------------------------------------------------------------------------
+
+  Tensor Repeat(const vector<size_t> &repeats, int axis,
+                std::shared_ptr<Device> device = nullptr);
 
   /// return an exactly the same Tensor with data been deep copied to the given
   /// device. If 'device' is nullptr, then clone it one the current device.
   Tensor Clone(std::shared_ptr<Device> device = nullptr) const;
 
-  // Tensor operations
+  void Clone(Tensor *&other, std::shared_ptr<Device> device = nullptr) const;
 
-  /// Matrix transpose.  Valid only if shape.size() == 2.
-  /// No data copy, just set the transpose_ filed of the returned tensor.
-  Tensor T() const;
-
-  /// Copy the meta info with data block shared.
+  // --------------------------------------------------------------------------
+  // ---Following methods change the tensor and return itself
+  // --------------------------------------------------------------------------
+  /// Copy assignment
   Tensor &operator=(const Tensor &in);
 
-  /// Copy the meta info with data block shared.
+  /// Move assignment
   Tensor &operator=(Tensor &&in);
 
   Tensor &operator+=(const Tensor &in);
-  // void operator+=(Tensor&& in);
+
   Tensor &operator-=(const Tensor &in);
-  // void operator-=(Tensor&& in);
+
   Tensor &operator*=(const Tensor &in);
-  // void operator*=(Tensor&& in);
+
   Tensor &operator/=(const Tensor &in);
-  // void operator/=(Tensor&& in);
 
   // Scalar operations.
 
@@ -204,26 +239,75 @@
   template <typename SType>
   Tensor &operator/=(const SType x);
 
-  /// Return average L1 norm
-  float L1() const;
-  /// Return average L2 norm
-  float L2() const;
+  /// if tensor is transposed, transform to contiguous memory
+  Tensor &Contiguous();
+
+  /// change the shape (and stride); the block may be reallocated.
+  Tensor &Reshape(const Shape &shape);
+
+  /// Resize the memory and return itself
+  Tensor &Resize(const Shape &shape);
+
+  /// Matrix transpose.  Valid only if shape.size() == 2.
+  Tensor &T();
+
+  /// Reverse the shape vector
+  Tensor &Transpose();
+
+  /// Change the axes
+  Tensor &Transpose(const vector<size_t> &axes);
+
+  /// Return a view of the input tensor whose shape is broadcasted to be
+  /// compitable with the given shape
+  Tensor &Broadcast(const Shape &shape, const int ignore_last_dim = 0);
+
+  /// Reset the shape, device, and data type as given tensor.
+  /// If block size changes, then reallocate a new block.
+  /// The previous block would be deleted.
+  Tensor &ResetLike(const Tensor &t);
+
+  /// Reset the data type, it would reallocate block if type changes.
+  Tensor AsType(const DataType type);
+
+  /// Reset the device.
+  /// If the target device is a diff device, then do deep data copy.
+  Tensor &ToDevice(std::shared_ptr<Device> dev);
+
+  /// Equivalent to ToDevice(host_dev).
+  Tensor &ToHost();
 
  protected:
-  bool transpose_ = false;
+  // generate strides automatically if stride field is not passed
+  void generate_stride() {
+    stride_.clear();
+    if (shape_.size() == 0) {
+      stride_.push_back(1);
+      return;
+    }
+
+    size_t dim = Size();
+    int cumulative_product = 1;
+    for (size_t n = 0; n < shape_.size(); ++n) {
+      cumulative_product = cumulative_product * shape_[n];
+      stride_.push_back(dim / cumulative_product);
+    }
+  }
+
+  void set_strides(const vector<int> new_strides) { stride_ = new_strides; }
+
+ protected:
   DataType data_type_ = kFloat32;
   std::shared_ptr<Device> device_ = nullptr;
   /// Note: block_ is allocated in lazy manner to avoid frequent malloc/free.
   /// If you want to get an allocated Block, use block() instead of block_.
   Block *block_ = nullptr;
   Shape shape_ = {};
-};
+  vector<int> stride_ = {};
+};  // end of tensor class
 
-typedef Shape::iterator ShapeIter;
 inline size_t Product(const Shape &shape, int start = 0, size_t len = 0) {
   if (len == 0) len = shape.size();
-  if (len == 0)
-    return 0;
+  if (len == 0) return 0;
   CHECK_LE(len, shape.size());
   size_t v = 1;
   for (unsigned int i = start; i < len; i++) v *= shape[i];
@@ -241,37 +325,99 @@
   return static_cast<ToType>(x);
 }
 
-Tensor Reshape(const Tensor &in, const Shape &s);
-Tensor Reshape(const Tensor &in, Shape &&s);
+Tensor Boradcast(const Shape &shape);
 
-// For tensors with sparse content, e.g., missing columns or rows.
-// class SparseTensor : public Tensor {};
+/// Reshape the given tensor and generate a new tensor; the total vol should
+/// match
+/// which shares the memory with in if possible
+Tensor Reshape(const Tensor &in, const Shape &s);
+
+Tensor Contiguous(const Tensor &in);
+
+Tensor Resize(const Tensor &in, const Shape &s);
+
+/// Reverse the shape vector
+Tensor Transpose(const Tensor &in);
+
+/// Return a view of the input tensor whose shape is broadcasted to be
+/// compitable with the given shape
+Tensor Broadcast(const Tensor &in, const Shape &shape,
+                 const int ignore_last_dim = 0);
+
+/// Change the axes
+Tensor Transpose(const Tensor &in, const vector<size_t> &axes);
 
 /// Copy 'num' elements of src to dst.
 /// The first 'src_offset' ('dst_offset') elements will be skipped.
 void CopyDataToFrom(Tensor *dst, const Tensor &src, const size_t num,
                     const size_t dst_offset = 0, const size_t src_offset = 0);
 
+void RepeatDataToFrom(bool broadcast_flag, const vector<size_t> &repeats,
+                      int axis, Tensor *dst, const Tensor &in,
+                      const size_t num);
+
 // =============Element-wise operations====================================
 Tensor Abs(const Tensor &in);
+Tensor Erf(const Tensor &in);
+Tensor Ceil(const Tensor &in);
+Tensor Floor(const Tensor &in);
+Tensor Round(const Tensor &in);
+Tensor RoundE(const Tensor &in);
 Tensor Exp(const Tensor &in);
 Tensor Log(const Tensor &in);
 Tensor ReLU(const Tensor &in);
 Tensor Sigmoid(const Tensor &in);
 Tensor Sign(const Tensor &in);
+Tensor SoftPlus(const Tensor &in);
+Tensor SoftSign(const Tensor &in);
 Tensor Sqrt(const Tensor &in);
 Tensor Square(const Tensor &in);
+Tensor Cos(const Tensor &in);
+Tensor Cosh(const Tensor &in);
+Tensor Acos(const Tensor &in);
+Tensor Acosh(const Tensor &in);
+Tensor Sin(const Tensor &in);
+Tensor Sinh(const Tensor &in);
+Tensor Asin(const Tensor &in);
+Tensor Asinh(const Tensor &in);
+Tensor Tan(const Tensor &in);
 Tensor Tanh(const Tensor &in);
+Tensor Atan(const Tensor &in);
+Tensor Atanh(const Tensor &in);
+Tensor Transform(const Tensor &in);
 
 void Abs(const Tensor &in, Tensor *out);
+void Erf(const Tensor &in, Tensor *out);
+void Ceil(const Tensor &in, Tensor *out);
+void Floor(const Tensor &in, Tensor *out);
+void Round(const Tensor &in, Tensor *out);
+void RoundE(const Tensor &in, Tensor *out);
 void Exp(const Tensor &in, Tensor *out);
 void Log(const Tensor &in, Tensor *out);
 void ReLU(const Tensor &in, Tensor *out);
 void Sigmoid(const Tensor &in, Tensor *out);
 void Sign(const Tensor &in, Tensor *out);
+void SoftPlus(const Tensor &in, Tensor *out);
+void SoftSign(const Tensor &in, Tensor *out);
 void Sqrt(const Tensor &in, Tensor *out);
 void Square(const Tensor &in, Tensor *out);
+void Cos(const Tensor &in, Tensor *out);
+void Cosh(const Tensor &in, Tensor *out);
+void Acos(const Tensor &in, Tensor *out);
+void Acosh(const Tensor &in, Tensor *out);
+void Sin(const Tensor &in, Tensor *out);
+void Sinh(const Tensor &in, Tensor *out);
+void Asin(const Tensor &in, Tensor *out);
+void Asinh(const Tensor &in, Tensor *out);
+void Tan(const Tensor &in, Tensor *out);
 void Tanh(const Tensor &in, Tensor *out);
+void Atan(const Tensor &in, Tensor *out);
+void Atanh(const Tensor &in, Tensor *out);
+void Transform(const Tensor &in, Tensor *out);
+
+/// Element-wise operation, out[i]= (in2[i] > 0) ? in1[i] : 0.f
+Tensor ReLUBackward(const Tensor &in1, const Tensor &in2);
+void ReLUBackward(const Tensor &in1, const Tensor &in2, Tensor *out);
 
 /// Element-wise opeartion, out[i]=in[i]^x
 template <typename SType>
@@ -291,8 +437,8 @@
 void LT(const Tensor &in, const SType x, Tensor *out);
 
 /// Element-wise operation, out[i]= (in1[i] < in2[i]) ? 1.f : 0.f
-Tensor operator<(const Tensor &in1, const Tensor& in2);
-void LT(const Tensor &in1, const Tensor& in2, Tensor *out);
+Tensor operator<(const Tensor &in1, const Tensor &in2);
+void LT(const Tensor &in1, const Tensor &in2, Tensor *out);
 
 /// Element-wise operation, out[i]= (in[i] <= x) ? 1.f : 0.f
 template <typename SType>
@@ -301,8 +447,8 @@
 void LE(const Tensor &in, const SType x, Tensor *out);
 
 /// Element-wise operation, out[i]= (in1[i] <= in2[i]) ? 1.f : 0.f
-Tensor operator<=(const Tensor &in1, const Tensor& in2);
-void LE(const Tensor &in1, const Tensor& in2, Tensor *out);
+Tensor operator<=(const Tensor &in1, const Tensor &in2);
+void LE(const Tensor &in1, const Tensor &in2, Tensor *out);
 
 /// Element-wise operation, out[i]= (in[i] > x) ? 1.f : 0.f
 template <typename SType>
@@ -311,9 +457,8 @@
 void GT(const Tensor &in, const SType x, Tensor *out);
 
 /// Element-wise operation, out[i]= (in1[i] > in2[i]) ? 1.f : 0.f
-Tensor operator>(const Tensor &in1, const Tensor& in2);
-void GT(const Tensor &in1, const Tensor& in2, Tensor *out);
-
+Tensor operator>(const Tensor &in1, const Tensor &in2);
+void GT(const Tensor &in1, const Tensor &in2, Tensor *out);
 
 /// Element-wise operation, out[i]= (in[i] >= x) ? 1.f : 0.f
 template <typename SType>
@@ -322,9 +467,18 @@
 void GE(const Tensor &in, const SType x, Tensor *out);
 
 /// Element-wise operation, out[i]= (in1[i] >= in2[i]) ? 1.f : 0.f
-Tensor operator>=(const Tensor &in1, const Tensor& in2);
-void GE(const Tensor &in1, const Tensor& in2, Tensor *out);
+Tensor operator>=(const Tensor &in1, const Tensor &in2);
+void GE(const Tensor &in1, const Tensor &in2, Tensor *out);
 
+/// Element-wise operation, out[i]= (in[i] == x) ? 1.f : 0.f
+template <typename SType>
+Tensor operator==(const Tensor &in, const SType x);
+template <typename SType>
+void EQ(const Tensor &in, const SType x, Tensor *out);
+
+/// Element-wise operation, out[i]= (in1[i] == in2[i]) ? 1.f : 0.f
+Tensor operator==(const Tensor &in1, const Tensor &in2);
+void EQ(const Tensor &in1, const Tensor &in2, Tensor *out);
 
 Tensor operator+(const Tensor &lhs, const Tensor &rhs);
 void Add(const Tensor &lhs, const Tensor &rhs, Tensor *out);
@@ -366,6 +520,7 @@
 
 template <typename SType = float>
 SType Sum(const Tensor &in);
+
 // ============Matrix (row/column) operations==================================
 /// Average elements in the Tensor, currently only support vector and matrix.
 /// if 'axis' is 0, average all rows into a single row
@@ -394,8 +549,13 @@
 void MultRow(const Tensor &v, Tensor *M);
 /// Do softmax for each row. 'in' could be a 1-d or 2-d Tensor.
 Tensor SoftMax(const Tensor &in);
+Tensor SoftMax(const Tensor &in, int axis);
+Tensor SoftMaxBackward(const Tensor &in, int axis, const Tensor &fdout);
+
+Tensor RowMax(const Tensor &in);
 /// Do softmax for each row. 'in' could be a 1-d or 2-d Tensor.
 void SoftMax(const Tensor &in, Tensor *out);
+void SoftMax(const Tensor &in, Tensor *out, int axis);
 /// Sub column 'v' by each column of matrix M
 void SubColumn(const Tensor &v, Tensor *M);
 /// Sub row 'v' by each row of matrix M; write results into 'out'
@@ -410,6 +570,7 @@
 /// if 'axis' is 1, sum all columns into a single column
 /// TODO(wangwei) support arbitrary Tensor like numpy.sum
 Tensor Sum(const Tensor &in, const int axis);
+Tensor SumAll(const Tensor &in);
 
 // ================Random operations==========================================
 /// For each element x set x = 1 if random() < p; otherwise x = 1.
@@ -429,6 +590,8 @@
 template <typename SType>
 void Axpy(SType alpha, const Tensor &in, Tensor *out);
 
+void Axpy(const Tensor &alpha, const Tensor &in, Tensor *out);
+
 /// Do matrix vector multipication or matrix matrix multiplication depdending
 /// on the Tensor shape.  result = A * B
 Tensor Mult(const Tensor &A, const Tensor &B);
@@ -452,19 +615,29 @@
 /// each instance, t[i] could be 2 or [0, 0, 1]. If one instance could have
 /// multiple labels, then t[i] could be [1, 0, 1].
 /// The loss is computed into p.
+
 void ComputeCrossEntropy(const Tensor &p, const Tensor &t, Tensor *loss);
+
 /// Compute the dx, given prediction probability 'p' (p=softmax(x)) and
 /// the target (ground truth) labels 't'. 'p' and 't' are either 1-d vector
 /// or 2-d matrix. 'grad' has the same shape as 'p'. dx is computed into p.
+
 void SoftmaxCrossEntropyBwd(const Tensor &t, Tensor *p);
 
+/// To be called by pysinga autograd operations;
+/// swig ignores the const qualifier
+/// http://www.swig.org/Doc3.0/SWIGPlus.html#SWIGPlus_const
+Tensor CrossEntropyFwd(const Tensor &p, const Tensor &t);
+Tensor SoftmaxCrossEntropyBwd(const Tensor &p, const Tensor &t);
+
 /// Return a tensor consisting of rows ([start, end)) from 'in'. It copies the
 /// values from 'in'. 'in' ia a 2D Tensor.
 Tensor CopyRows(const Tensor &in, const size_t start, const size_t end);
 /// Alias of CopyRows
 Tensor SliceRows(const Tensor &in, const size_t start, const size_t end);
 /// Slice the input tensor along the give axis to generate a new tensor
-Tensor SliceOn(const Tensor &in, const size_t start, const size_t end, int axis);
+Tensor SliceOn(const Tensor &in, const size_t start, const size_t end,
+               int axis);
 /// Return a tensor consisting of columns ([start, end)) from 'in'. It copies
 /// the values from 'in'. 'in' is a  2D Tensor.
 Tensor CopyColumns(const Tensor &in, const size_t start, const size_t end);
@@ -474,7 +647,7 @@
 /// tensor in 'in' is a 2D tensor. Values are copied, no memory sharing.
 Tensor ConcatenateRows(const vector<Tensor> &in);
 /// Return a tensor concatenated of the input tensors along the give axis.
-Tensor ConcatOn(const vector<Tensor> &in, int axis);
+Tensor ConcatOn(const std::vector<Tensor> &in, int axis);
 /// Alias name for function ConcatenateRows
 Tensor ConcatRows(const vector<Tensor> &in);
 /// Return a tensor which is horizontally stacked from tensors in 'in'. Each
diff --git a/include/singa/io/communicator.h b/include/singa/io/communicator.h
new file mode 100644
index 0000000..3f738ea
--- /dev/null
+++ b/include/singa/io/communicator.h
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SINGA_DIST_COMMUNICATOR_H_
+#define SINGA_DIST_COMMUNICATOR_H_
+
+#ifdef USE_DIST
+
+#include <cuda_runtime.h>
+#include <cusparse.h>
+#include <mpi.h>
+#include <nccl.h>
+#include <unistd.h>
+
+#include <cstdint>
+#include <iostream>
+#include <memory>
+
+#include "cuda_fp16.h"
+#include "singa/core/tensor.h"
+using std::vector;
+
+namespace singa {
+
+#define CUSPARSE_CHECK(cmd)                                             \
+  do {                                                                  \
+    cusparseStatus_t e = cmd;                                           \
+    if (e != CUSPARSE_STATUS_SUCCESS) {                                 \
+      printf("Falied: Cusparse Error %s:%d '%d'\n", __FILE__, __LINE__, \
+             int(e));                                                   \
+      exit(EXIT_FAILURE);                                               \
+    }                                                                   \
+  } while (0)
+
+#define MPICHECK(cmd)                                                  \
+  do {                                                                 \
+    int e = cmd;                                                       \
+    if (e != MPI_SUCCESS) {                                            \
+      printf("Failed: MPI error %s:%d '%d'\n", __FILE__, __LINE__, e); \
+      exit(EXIT_FAILURE);                                              \
+    }                                                                  \
+  } while (0)
+
+#define NCCLCHECK(cmd)                                              \
+  do {                                                              \
+    ncclResult_t r = cmd;                                           \
+    if (r != ncclSuccess) {                                         \
+      printf("Failed, NCCL error %s:%d '%s'\n", __FILE__, __LINE__, \
+             ncclGetErrorString(r));                                \
+      exit(EXIT_FAILURE);                                           \
+    }                                                               \
+  } while (0)
+
+class NcclIdHolder {
+ public:
+  ncclUniqueId id;
+  NcclIdHolder();
+  ~NcclIdHolder();
+};
+
+class Communicator {
+ public:
+  int global_rank;
+  int world_size;
+  int local_rank;
+
+  Communicator(int limit);
+  Communicator(int local_rank, int world_size, const NcclIdHolder &holder, int size);
+  ~Communicator();
+  void synch(Tensor &t);
+  void fusedSynch(vector<Tensor> &t, bool send = true);
+  void synchHalf(Tensor &t);
+  void fusedSynchHalf(vector<Tensor> &t, bool send = true);
+  void fusedSparsification(vector<Tensor> &t, Tensor &accumulation,
+                           float sparsThreshold, bool topK);
+  void fusedSparsification(vector<Tensor> &t, float sparsThreshold, bool topK);
+  void sparsification(Tensor &t, Tensor &accumulation, float sparsThreshold,
+                      bool topK);
+  void sparsification(Tensor &t, float sparsThreshold, bool topK);
+  void wait();
+
+ private:
+  void generateBlocks(Tensor &t);
+  void generateBlocks(std::vector<Tensor> &t);
+  void allReduce(int size, void *sendbuff, void *recvbuff,
+                 ncclDataType_t ncclType, Context *ctx);
+  void setup();
+  void sparsInit();
+  void halfInit();
+  void _fusedSparsification(vector<Tensor> &t, Tensor *accumulation,
+                            float sparsThreshold, bool topK, Context *ctx);
+  void _sparsification(Tensor &t, Tensor *accumulation, float sparsThreshold,
+                       bool topK, Context *ctx);
+  void valSparsAllReduce(size_t num, float *accumulation, Context *ctx);
+  void topKSparsAllReduce(size_t num, float *accumulation, Context *ctx);
+
+  // last group of synchronized memory blocks
+  std::shared_ptr<Device> device_ = nullptr;
+  std::vector<Block *> blocks_;
+  std::vector<Block *> prev_blocks_;
+
+  ncclUniqueId id;
+  ncclComm_t comm;
+  cudaEvent_t event;
+
+  bool UseMPI;
+  size_t maxSize;
+
+  // normal synch
+  size_t sendBuffOffset = 0;
+  float *fusedSendBuff;
+  float *fusedRecvBuff;
+
+  // half synch
+  bool halfInitialized;
+  __half *fusedSendBuffHalf;
+  __half *fusedRecvBuffHalf;
+
+  // sparsification
+  cusparseHandle_t cusparse_handle;
+  cusparseMatDescr_t descrC;
+  bool sparsInitialized;
+  int *xInd;
+  float *xVal;
+  int *nnz;
+  int *nnzAll;
+  int *nnzGPU;
+  int *nnzAllGPU;
+  float threshold;
+  float *sparsSendBuff;
+  float *sparsRecvBuff;
+  float *backupBuff;
+  int *fusedIndex;
+};
+}  // namespace singa
+
+#endif  // USE_DIST
+#endif
diff --git a/include/singa/io/decoder.h b/include/singa/io/decoder.h
index bf9a1bc..00f4ca8 100644
--- a/include/singa/io/decoder.h
+++ b/include/singa/io/decoder.h
@@ -64,7 +64,7 @@
   }
   std::vector<Tensor> Decode(std::string value) override;
 
-  const bool has_label() const { return has_label_; }
+  bool has_label() const { return has_label_; }
 
  private:
   /// if ture the first value is the label
diff --git a/include/singa/io/transformer.h b/include/singa/io/transformer.h
index d9a9263..b6db1ef 100644
--- a/include/singa/io/transformer.h
+++ b/include/singa/io/transformer.h
@@ -56,12 +56,12 @@
 
   Tensor Apply(int flag, Tensor& input) override;
 
-  const bool featurewise_center() const { return featurewise_center_; }
-  const bool featurewise_std_norm() const { return featurewise_std_norm_; }
-  const bool horizontal_mirror() const { return horizontal_mirror_; }
-  const int resize_height() const { return resize_height_; }
-  const int resize_width() const { return resize_width_; }
-  const float rescale() const { return rescale_; }
+  bool featurewise_center() const { return featurewise_center_; }
+  bool featurewise_std_norm() const { return featurewise_std_norm_; }
+  bool horizontal_mirror() const { return horizontal_mirror_; }
+  int resize_height() const { return resize_height_; }
+  int resize_width() const { return resize_width_; }
+  float rescale() const { return rescale_; }
   const Shape crop_shape() const { return crop_shape_; }
   const string image_dim_order() const { return image_dim_order_; }
 
diff --git a/include/singa/utils/channel.h b/include/singa/utils/channel.h
index b640e90..e9cbefb 100644
--- a/include/singa/utils/channel.h
+++ b/include/singa/utils/channel.h
@@ -1,31 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #ifndef SINGA_UTILS_CHANNEL_H_
 #define SINGA_UTILS_CHANNEL_H_
 
 #include <google/protobuf/message.h>
 
-#include <iostream>
 #include <fstream>
+#include <iostream>
 #include <map>
 #include <string>
 
diff --git a/include/singa/utils/cuda_utils.h b/include/singa/utils/cuda_utils.h
index 2fe7d27..1ac99be 100644
--- a/include/singa/utils/cuda_utils.h
+++ b/include/singa/utils/cuda_utils.h
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 // from caffe include/caffe/util/device_alternative.hpp
 #ifndef SINGA_UTILS_CUDA_UTILS_H_
 #define SINGA_UTILS_CUDA_UTILS_H_
@@ -11,29 +28,29 @@
 
 inline const char* cublasGetErrorString(cublasStatus_t error) {
   switch (error) {
-  case CUBLAS_STATUS_SUCCESS:
-    return "CUBLAS_STATUS_SUCCESS";
-  case CUBLAS_STATUS_NOT_INITIALIZED:
-    return "CUBLAS_STATUS_NOT_INITIALIZED";
-  case CUBLAS_STATUS_ALLOC_FAILED:
-    return "CUBLAS_STATUS_ALLOC_FAILED";
-  case CUBLAS_STATUS_INVALID_VALUE:
-    return "CUBLAS_STATUS_INVALID_VALUE";
-  case CUBLAS_STATUS_ARCH_MISMATCH:
-    return "CUBLAS_STATUS_ARCH_MISMATCH";
-  case CUBLAS_STATUS_MAPPING_ERROR:
-    return "CUBLAS_STATUS_MAPPING_ERROR";
-  case CUBLAS_STATUS_EXECUTION_FAILED:
-    return "CUBLAS_STATUS_EXECUTION_FAILED";
-  case CUBLAS_STATUS_INTERNAL_ERROR:
-    return "CUBLAS_STATUS_INTERNAL_ERROR";
+    case CUBLAS_STATUS_SUCCESS:
+      return "CUBLAS_STATUS_SUCCESS";
+    case CUBLAS_STATUS_NOT_INITIALIZED:
+      return "CUBLAS_STATUS_NOT_INITIALIZED";
+    case CUBLAS_STATUS_ALLOC_FAILED:
+      return "CUBLAS_STATUS_ALLOC_FAILED";
+    case CUBLAS_STATUS_INVALID_VALUE:
+      return "CUBLAS_STATUS_INVALID_VALUE";
+    case CUBLAS_STATUS_ARCH_MISMATCH:
+      return "CUBLAS_STATUS_ARCH_MISMATCH";
+    case CUBLAS_STATUS_MAPPING_ERROR:
+      return "CUBLAS_STATUS_MAPPING_ERROR";
+    case CUBLAS_STATUS_EXECUTION_FAILED:
+      return "CUBLAS_STATUS_EXECUTION_FAILED";
+    case CUBLAS_STATUS_INTERNAL_ERROR:
+      return "CUBLAS_STATUS_INTERNAL_ERROR";
 #if CUDA_VERSION >= 6000
-  case CUBLAS_STATUS_NOT_SUPPORTED:
-    return "CUBLAS_STATUS_NOT_SUPPORTED";
+    case CUBLAS_STATUS_NOT_SUPPORTED:
+      return "CUBLAS_STATUS_NOT_SUPPORTED";
 #endif
 #if CUDA_VERSION >= 6050
-  case CUBLAS_STATUS_LICENSE_ERROR:
-    return "CUBLAS_STATUS_LICENSE_ERROR";
+    case CUBLAS_STATUS_LICENSE_ERROR:
+      return "CUBLAS_STATUS_LICENSE_ERROR";
 #endif
   }
   return "Unknown cublas status";
@@ -41,32 +58,32 @@
 
 inline const char* curandGetErrorString(curandStatus_t error) {
   switch (error) {
-  case CURAND_STATUS_SUCCESS:
-    return "CURAND_STATUS_SUCCESS";
-  case CURAND_STATUS_VERSION_MISMATCH:
-    return "CURAND_STATUS_VERSION_MISMATCH";
-  case CURAND_STATUS_NOT_INITIALIZED:
-    return "CURAND_STATUS_NOT_INITIALIZED";
-  case CURAND_STATUS_ALLOCATION_FAILED:
-    return "CURAND_STATUS_ALLOCATION_FAILED";
-  case CURAND_STATUS_TYPE_ERROR:
-    return "CURAND_STATUS_TYPE_ERROR";
-  case CURAND_STATUS_OUT_OF_RANGE:
-    return "CURAND_STATUS_OUT_OF_RANGE";
-  case CURAND_STATUS_LENGTH_NOT_MULTIPLE:
-    return "CURAND_STATUS_LENGTH_NOT_MULTIPLE";
-  case CURAND_STATUS_DOUBLE_PRECISION_REQUIRED:
-    return "CURAND_STATUS_DOUBLE_PRECISION_REQUIRED";
-  case CURAND_STATUS_LAUNCH_FAILURE:
-    return "CURAND_STATUS_LAUNCH_FAILURE";
-  case CURAND_STATUS_PREEXISTING_FAILURE:
-    return "CURAND_STATUS_PREEXISTING_FAILURE";
-  case CURAND_STATUS_INITIALIZATION_FAILED:
-    return "CURAND_STATUS_INITIALIZATION_FAILED";
-  case CURAND_STATUS_ARCH_MISMATCH:
-    return "CURAND_STATUS_ARCH_MISMATCH";
-  case CURAND_STATUS_INTERNAL_ERROR:
-    return "CURAND_STATUS_INTERNAL_ERROR";
+    case CURAND_STATUS_SUCCESS:
+      return "CURAND_STATUS_SUCCESS";
+    case CURAND_STATUS_VERSION_MISMATCH:
+      return "CURAND_STATUS_VERSION_MISMATCH";
+    case CURAND_STATUS_NOT_INITIALIZED:
+      return "CURAND_STATUS_NOT_INITIALIZED";
+    case CURAND_STATUS_ALLOCATION_FAILED:
+      return "CURAND_STATUS_ALLOCATION_FAILED";
+    case CURAND_STATUS_TYPE_ERROR:
+      return "CURAND_STATUS_TYPE_ERROR";
+    case CURAND_STATUS_OUT_OF_RANGE:
+      return "CURAND_STATUS_OUT_OF_RANGE";
+    case CURAND_STATUS_LENGTH_NOT_MULTIPLE:
+      return "CURAND_STATUS_LENGTH_NOT_MULTIPLE";
+    case CURAND_STATUS_DOUBLE_PRECISION_REQUIRED:
+      return "CURAND_STATUS_DOUBLE_PRECISION_REQUIRED";
+    case CURAND_STATUS_LAUNCH_FAILURE:
+      return "CURAND_STATUS_LAUNCH_FAILURE";
+    case CURAND_STATUS_PREEXISTING_FAILURE:
+      return "CURAND_STATUS_PREEXISTING_FAILURE";
+    case CURAND_STATUS_INITIALIZATION_FAILED:
+      return "CURAND_STATUS_INITIALIZATION_FAILED";
+    case CURAND_STATUS_ARCH_MISMATCH:
+      return "CURAND_STATUS_ARCH_MISMATCH";
+    case CURAND_STATUS_INTERNAL_ERROR:
+      return "CURAND_STATUS_INTERNAL_ERROR";
   }
   return "Unknown curand status";
 }
@@ -76,27 +93,26 @@
 //
 
 // CUDA: various checks for different function calls.
-#define CUDA_CHECK(condition) \
-  /* Code block avoids redefinition of cudaError_t error */ \
-  do { \
-    cudaError_t error = condition; \
+#define CUDA_CHECK(condition)                                         \
+  /* Code block avoids redefinition of cudaError_t error */           \
+  do {                                                                \
+    cudaError_t error = condition;                                    \
     CHECK_EQ(error, cudaSuccess) << " " << cudaGetErrorString(error); \
   } while (0)
 
-#define CUBLAS_CHECK(condition) \
-  do { \
-    cublasStatus_t status = condition; \
-    CHECK_EQ(status, CUBLAS_STATUS_SUCCESS) << " " \
-      << cublasGetErrorString(status); \
+#define CUBLAS_CHECK(condition)                 \
+  do {                                          \
+    cublasStatus_t status = condition;          \
+    CHECK_EQ(status, CUBLAS_STATUS_SUCCESS)     \
+        << " " << cublasGetErrorString(status); \
   } while (0)
 
-#define CURAND_CHECK(condition) \
-  do { \
-    curandStatus_t status = condition; \
-    CHECK_EQ(status, CURAND_STATUS_SUCCESS) << " " \
-      << curandGetErrorString(status); \
+#define CURAND_CHECK(condition)                 \
+  do {                                          \
+    curandStatus_t status = condition;          \
+    CHECK_EQ(status, CURAND_STATUS_SUCCESS)     \
+        << " " << curandGetErrorString(status); \
   } while (0)
 
-
 #endif  // USE_CUDA
 #endif  // SINGA_UTILS_CUDA_UTILS_H_
diff --git a/include/singa/utils/dnnl_utils.h b/include/singa/utils/dnnl_utils.h
new file mode 100644
index 0000000..7fc7f50
--- /dev/null
+++ b/include/singa/utils/dnnl_utils.h
@@ -0,0 +1,54 @@
+/*********************************************************
+ * *
+ * * Licensed to the Apache Software Foundation (ASF) under one
+ * * or more contributor license agreements.  See the NOTICE file
+ * * distributed with this work for additional information
+ * * regarding copyright ownership.  The ASF licenses this file
+ * * to you under the Apache License, Version 2.0 (the
+ * * "License"); you may not use this file except in compliance
+ * * with the License.  You may obtain a copy of the License at
+ * *
+ * *   http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing,
+ * * software distributed under the License is distributed on an
+ * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * * KIND, either express or implied.  See the License for the
+ * * specific language governing permissions and limitations
+ * * under the License.
+ * *
+ * ************************************************************/
+#ifndef SINGA_UTILS_MKLDNN_UTILS_H_
+#define SINGA_UTILS_MKLDNN_UTILS_H_
+
+namespace singa {
+
+using namespace dnnl;
+
+inline dnnl::memory::format_tag get_dnnl_format_tag(const Tensor &x) {
+  memory::format_tag format_tag_;
+  switch (x.nDim()) {
+    case 1: {
+      format_tag_ = memory::format_tag::a;
+      break;
+    }
+    case 2: {
+      format_tag_ = memory::format_tag::ab;
+      break;
+    }
+    case 3: {
+      format_tag_ = memory::format_tag::abc;
+      break;
+    }
+    case 4: {
+      format_tag_ = memory::format_tag::abcd;
+      break;
+    }
+    default: {
+      LOG(FATAL) << x.nDim() << " dim is not supported";
+    }
+  }
+  return format_tag_;
+}
+}  // namespace singa
+#endif  // SINGA_UTILS_MKLDNN_UTILS_H_
diff --git a/include/singa/utils/factory.h b/include/singa/utils/factory.h
index b53caef..472b34c 100644
--- a/include/singa/utils/factory.h
+++ b/include/singa/utils/factory.h
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #ifndef SINGA_UTILS_FACTORY_H_
 #define SINGA_UTILS_FACTORY_H_
@@ -32,7 +32,7 @@
  * returns pointer to the base class.
  */
 #define CreateInstance(SubClass, BaseClass) \
-  [](void)->BaseClass* {return new SubClass();}
+  [](void) -> BaseClass* { return new SubClass(); }
 
 /**
  * Factory template to generate class (or a sub-class) object based on id.
@@ -41,7 +41,7 @@
  * 2. call Create() func to call the creation function and return
  * a pointer to the base calss.
  */
-template<typename T, typename ID = std::string>
+template <typename T, typename ID = std::string>
 class Factory {
  public:
   /**
@@ -51,8 +51,7 @@
    * @param id Identifier of the creating function/class
    * @param func a function that creates a layer instance
    */
-  static void Register(const ID& id,
-                       const std::function<T*(void)>& creator) {
+  static void Register(const ID& id, const std::function<T*(void)>& creator) {
     Registry* reg = GetRegistry();
     // CHECK(reg->find(id) == reg->end())
     //  << "The id " << id << " has been registered";
@@ -67,14 +66,13 @@
   static T* Create(const ID& id) {
     Registry* reg = GetRegistry();
     CHECK(reg->find(id) != reg->end())
-      << "The creation function for " << id << " has not been registered";
+        << "The creation function for " << id << " has not been registered";
     return (*reg)[id]();
   }
 
   static const std::vector<ID> GetIDs() {
     std::vector<ID> keys;
-    for (const auto entry : *GetRegistry())
-      keys.push_back(entry.first);
+    for (const auto entry : *GetRegistry()) keys.push_back(entry.first);
     return keys;
   }
 
@@ -87,7 +85,7 @@
   }
 };
 
-template<typename Base, typename Sub, typename ID = std::string>
+template <typename Base, typename Sub, typename ID = std::string>
 class Registra {
  public:
   Registra(const ID& id) {
diff --git a/include/singa/utils/integer.h b/include/singa/utils/integer.h
index 9c2799d..e8dc903 100644
--- a/include/singa/utils/integer.h
+++ b/include/singa/utils/integer.h
@@ -24,50 +24,47 @@
 
 #include <cstdint>
 
-namespace singa{
+namespace singa {
 static bool isNetworkOrder() {
-    int test = 1;
-    return (1 != *(uint8_t*)&test);
+  int test = 1;
+  return (1 != *(uint8_t*)&test);
 }
 
 template <typename T>
 static inline T byteSwap(const T& v) {
-    int size = sizeof(v);
-    T ret;
-    uint8_t *dest = reinterpret_cast<uint8_t *>(&ret);
-    uint8_t *src = const_cast<uint8_t*>(reinterpret_cast<const uint8_t*>(&v));
-    for (int i = 0; i < size; ++i) {
-        dest[i] = src[size - i - 1];
-    }
-    return ret;
+  int size = sizeof(v);
+  T ret;
+  uint8_t* dest = reinterpret_cast<uint8_t*>(&ret);
+  uint8_t* src = const_cast<uint8_t*>(reinterpret_cast<const uint8_t*>(&v));
+  for (int i = 0; i < size; ++i) {
+    dest[i] = src[size - i - 1];
+  }
+  return ret;
 }
 
 template <typename T>
-static inline T hton(const T& v)
-{
-    return isNetworkOrder() ? v : byteSwap(v);
+static inline T hton(const T& v) {
+  return isNetworkOrder() ? v : byteSwap(v);
 }
 
 template <typename T>
-static inline T ntoh(const T& v) 
-{
-    return hton(v);
+static inline T ntoh(const T& v) {
+  return hton(v);
 }
 
-static inline int appendInteger(char* buf) {return 0;}
-static inline int readInteger(char* buf) {return 0;}
+static inline int appendInteger(char* buf) { return 0; }
+static inline int readInteger(char* buf) { return 0; }
 
-template<typename Type, typename... Types>
+template <typename Type, typename... Types>
 static int appendInteger(char* buf, Type value, Types... values) {
-    *(Type*)buf = hton(value);
-    return sizeof(Type) + appendInteger(buf + sizeof(Type), values...);
+  *(Type*)buf = hton(value);
+  return sizeof(Type) + appendInteger(buf + sizeof(Type), values...);
 }
 
-template<typename Type, typename... Types>
+template <typename Type, typename... Types>
 static int readInteger(char* buf, Type& value, Types&... values) {
-    value = ntoh(*(Type*)buf);
-    return sizeof(Type) + readInteger(buf + sizeof(Type), values...);
+  value = ntoh(*(Type*)buf);
+  return sizeof(Type) + readInteger(buf + sizeof(Type), values...);
 }
-
-}
+}  // namespace singa
 #endif
diff --git a/include/singa/utils/logging.h b/include/singa/utils/logging.h
index 00ac02b..9b9e643 100644
--- a/include/singa/utils/logging.h
+++ b/include/singa/utils/logging.h
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 // adapted from google::tensorflow::logging
 
@@ -25,6 +25,7 @@
 #define SINGA_UTILS_LOGGING_H_
 
 #include <stdlib.h>
+
 #include <sstream>
 #include <string>
 #ifdef USE_GLOG
@@ -34,7 +35,7 @@
 namespace singa {
 
 /// Global functions for both glog and built-in log
-void InitLogging(const char *argv);
+void InitLogging(const char* argv);
 /// Make it so that all log messages go only to stderr
 void LogToStderr();
 /// Make it so that all log messages of at least a particular severity are
@@ -83,8 +84,7 @@
   ::singa::logging::LogMessage(__FILE__, __LINE__, singa::WARNING)
 #define _SINGA_LOG_ERROR \
   ::singa::logging::LogMessage(__FILE__, __LINE__, singa::ERROR)
-#define _SINGA_LOG_FATAL \
-  ::singa::logging::LogMessageFatal(__FILE__, __LINE__)
+#define _SINGA_LOG_FATAL ::singa::logging::LogMessageFatal(__FILE__, __LINE__)
 
 #define LOG(severity) _SINGA_LOG_##severity
 
@@ -92,17 +92,16 @@
 /// controlled by NDEBUG, so the check will be executed regardless of
 /// compilation mode.  Therefore, it is safe to do things like:
 ///    CHECK(fp->Write(x) == 4)
-#define CHECK(condition)              \
-  if (!(condition)) \
-  LOG(FATAL) << "Check failed: " #condition " "
+#define CHECK(condition) \
+  if (!(condition)) LOG(FATAL) << "Check failed: " #condition " "
 
 // Function is overloaded for integral types to allow static const
 // integrals declared in classes and not defined to be used as arguments to
 // CHECK* macros. It's not encouraged though.
 template <typename T>
-  inline const T& GetReferenceableValue(const T& t) {
-    return t;
-  }
+inline const T& GetReferenceableValue(const T& t) {
+  return t;
+}
 inline char GetReferenceableValue(char t) { return t; }
 inline unsigned char GetReferenceableValue(unsigned char t) { return t; }
 inline signed char GetReferenceableValue(signed char t) { return t; }
@@ -149,8 +148,7 @@
 
 // Build the error message string. Specify no inlining for code size.
 template <typename T1, typename T2>
-string* MakeCheckOpString(const T1& v1, const T2& v2,
-    const char* exprtext);
+string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext);
 
 // A helper class for formatting "expr (V1 vs. V2)" in a CHECK_XX
 // statement.  See MakeCheckOpString for sample usage.  Other
@@ -187,17 +185,17 @@
 // The (int, int) specialization works around the issue that the compiler
 // will not instantiate the template version of the function on values of
 // unnamed enum type - see comment below.
-#define SINGA_DEFINE_CHECK_OP_IMPL(name, op)                         \
-  template <typename T1, typename T2>                                \
-  inline string* name##Impl(const T1& v1, const T2& v2,              \
-                            const char* exprtext) {                  \
-    if (v1 op v2)                                                    \
-      return NULL;                                                   \
-    else                                                             \
+#define SINGA_DEFINE_CHECK_OP_IMPL(name, op)                        \
+  template <typename T1, typename T2>                               \
+  inline string* name##Impl(const T1& v1, const T2& v2,             \
+                            const char* exprtext) {                 \
+    if (v1 op v2)                                                   \
+      return NULL;                                                  \
+    else                                                            \
       return ::singa::logging::MakeCheckOpString(v1, v2, exprtext); \
-  }                                                                  \
-  inline string* name##Impl(int v1, int v2, const char* exprtext) {  \
-    return name##Impl<int, int>(v1, v2, exprtext);                   \
+  }                                                                 \
+  inline string* name##Impl(int v1, int v2, const char* exprtext) { \
+    return name##Impl<int, int>(v1, v2, exprtext);                  \
   }
 
 // We use the full name Check_EQ, Check_NE, etc. in case the file including
@@ -205,12 +203,12 @@
 // This happens if, for example, those are used as token names in a
 // yacc grammar.
 SINGA_DEFINE_CHECK_OP_IMPL(Check_EQ,
-                           == )  // Compilation error with CHECK_EQ(NULL, x)?
-SINGA_DEFINE_CHECK_OP_IMPL(Check_NE, != )  // Use CHECK(x == NULL) instead.
-SINGA_DEFINE_CHECK_OP_IMPL(Check_LE, <= )
-SINGA_DEFINE_CHECK_OP_IMPL(Check_LT, < )
-SINGA_DEFINE_CHECK_OP_IMPL(Check_GE, >= )
-SINGA_DEFINE_CHECK_OP_IMPL(Check_GT, > )
+                           ==)  // Compilation error with CHECK_EQ(NULL, x)?
+SINGA_DEFINE_CHECK_OP_IMPL(Check_NE, !=)  // Use CHECK(x == NULL) instead.
+SINGA_DEFINE_CHECK_OP_IMPL(Check_LE, <=)
+SINGA_DEFINE_CHECK_OP_IMPL(Check_LT, <)
+SINGA_DEFINE_CHECK_OP_IMPL(Check_GE, >=)
+SINGA_DEFINE_CHECK_OP_IMPL(Check_GT, >)
 #undef SINGA_DEFINE_CHECK_OP_IMPL
 
 // In optimized mode, use CheckOpString to hint to compiler that
@@ -232,9 +230,9 @@
 #define CHECK_LT(val1, val2) CHECK_OP(Check_LT, <, val1, val2)
 #define CHECK_GE(val1, val2) CHECK_OP(Check_GE, >=, val1, val2)
 #define CHECK_GT(val1, val2) CHECK_OP(Check_GT, >, val1, val2)
-#define CHECK_NOTNULL(val)                            \
+#define CHECK_NOTNULL(val)                           \
   ::singa::logging::CheckNotNull(__FILE__, __LINE__, \
-                                  "'" #val "' Must be non NULL", (val))
+                                 "'" #val "' Must be non NULL", (val))
 
 #ifndef NDEBUG
 // DCHECK_EQ/NE/...
diff --git a/include/singa/utils/opencl_utils.h b/include/singa/utils/opencl_utils.h
index 0445f13..471f396 100644
--- a/include/singa/utils/opencl_utils.h
+++ b/include/singa/utils/opencl_utils.h
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #ifndef SINGA_UTILS_OPENCL_UTILS_H_
 #define SINGA_UTILS_OPENCL_UTILS_H_
@@ -30,41 +30,39 @@
 #define CL_HPP_TARGET_OPENCL_VERSION 120
 
 #ifndef VIENNACL_WITH_OPENCL
-  #define VIENNACL_WITH_OPENCL
+#define VIENNACL_WITH_OPENCL
 #endif
 
 #ifndef __APPLE__
-  #include "CL/cl.h"
+#include "CL/cl.h"
 #else
-  #include "OpenCL/cl.h"
+#include "OpenCL/cl.h"
 #endif
 
 #include <viennacl/backend/opencl.hpp>
-
-#include <viennacl/ocl/device.hpp>
-#include <viennacl/ocl/platform.hpp>
 #include <viennacl/ocl/backend.hpp>
+#include <viennacl/ocl/device.hpp>
 #include <viennacl/ocl/device_utils.hpp>
-#include <viennacl/ocl/utils.hpp>
-#include <viennacl/ocl/program.hpp>
 #include <viennacl/ocl/kernel.hpp>
+#include <viennacl/ocl/platform.hpp>
+#include <viennacl/ocl/program.hpp>
+#include <viennacl/ocl/utils.hpp>
 
-
-inline viennacl::ocl::handle<cl_mem>
-WrapHandle(cl_mem in, viennacl::ocl::context &ctx) {
+inline viennacl::ocl::handle<cl_mem> WrapHandle(cl_mem in,
+                                                viennacl::ocl::context &ctx) {
   if (in != nullptr) {
     viennacl::ocl::handle<cl_mem> memhandle(in, ctx);
     memhandle.inc();
     return memhandle;
   } else {
     cl_int err;
-    cl_mem dummy = clCreateBuffer(ctx.handle().get(), CL_MEM_READ_WRITE, 0,
-                                  nullptr, &err);
+    cl_mem dummy =
+        clCreateBuffer(ctx.handle().get(), CL_MEM_READ_WRITE, 0, nullptr, &err);
     viennacl::ocl::handle<cl_mem> memhandle(dummy, ctx);
     return memhandle;
   }
 }
 
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
 
-#endif // SINGA_UTILS_OPENCL_UTILS_H_
+#endif  // SINGA_UTILS_OPENCL_UTILS_H_
diff --git a/include/singa/utils/safe_queue.h b/include/singa/utils/safe_queue.h
index 1f791a4..ae79c85 100644
--- a/include/singa/utils/safe_queue.h
+++ b/include/singa/utils/safe_queue.h
@@ -1,32 +1,32 @@
 /************************************************************

-*

-* Licensed to the Apache Software Foundation (ASF) under one

-* or more contributor license agreements.  See the NOTICE file

-* distributed with this work for additional information

-* regarding copyright ownership.  The ASF licenses this file

-* to you under the Apache License, Version 2.0 (the

-* "License"); you may not use this file except in compliance

-* with the License.  You may obtain a copy of the License at

-*

-*   http://www.apache.org/licenses/LICENSE-2.0

-*

-* Unless required by applicable law or agreed to in writing,

-* software distributed under the License is distributed on an

-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

-* KIND, either express or implied.  See the License for the

-* specific language governing permissions and limitations

-* under the License.

-*

-*************************************************************/

+ *

+ * Licensed to the Apache Software Foundation (ASF) under one

+ * or more contributor license agreements.  See the NOTICE file

+ * distributed with this work for additional information

+ * regarding copyright ownership.  The ASF licenses this file

+ * to you under the Apache License, Version 2.0 (the

+ * "License"); you may not use this file except in compliance

+ * with the License.  You may obtain a copy of the License at

+ *

+ *   http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing,

+ * software distributed under the License is distributed on an

+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

+ * KIND, either express or implied.  See the License for the

+ * specific language governing permissions and limitations

+ * under the License.

+ *

+ *************************************************************/

 

 #ifndef SINGA_UTILS_SAFE_QUEUE_H_

 #define SINGA_UTILS_SAFE_QUEUE_H_

 

 #include <algorithm>

-#include <queue>

+#include <condition_variable>

 #include <list>

 #include <mutex>

-#include <condition_variable>

+#include <queue>

 #include <thread>

 

 /**

@@ -36,9 +36,7 @@
 class SafeQueue {

  public:

   SafeQueue() = default;

-  ~SafeQueue() {

-    std::lock_guard<std::mutex> lock(mutex_);

-  }

+  ~SafeQueue() { std::lock_guard<std::mutex> lock(mutex_); }

 

   /**

    * Push an element into the queue. Blocking operation.

@@ -69,11 +67,10 @@
     std::unique_lock<std::mutex> lock(mutex_);

 

     if (queue_.empty()) {

-      if (timeout == 0)

-        return false;

+      if (timeout == 0) return false;

 

-      if (condition_.wait_for(lock, std::chrono::microseconds(timeout))

-          == std::cv_status::timeout)

+      if (condition_.wait_for(lock, std::chrono::microseconds(timeout)) ==

+          std::cv_status::timeout)

         return false;

     }

 

@@ -89,15 +86,13 @@
   bool TryPop(T& e) {

     std::unique_lock<std::mutex> lock(mutex_);

 

-    if (queue_.empty())

-      return false;

+    if (queue_.empty()) return false;

 

     e = queue_.front();

     queue_.pop();

     return true;

   }

 

-

   /**

    * @return Number of elements in the queue.

    */

@@ -115,7 +110,7 @@
 /**

  * Thread safe priority queue.

  */

-template<typename T>

+template <typename T>

 class PriorityQueue {

  public:

   PriorityQueue() = default;

@@ -174,15 +169,13 @@
   /**

    * @return Number of elements in the queue.

    */

-  unsigned int Size() const {

-    return queue_.Size();

-  }

+  unsigned int Size() const { return queue_.Size(); }

 

  private:

   struct Element {

     T data;

     int priority;

-    inline bool operator<(const Element &other) const {

+    inline bool operator<(const Element& other) const {

       return priority < other.priority;

     }

   };

diff --git a/include/singa/utils/singleton.h b/include/singa/utils/singleton.h
index de831c4..c395924 100644
--- a/include/singa/utils/singleton.h
+++ b/include/singa/utils/singleton.h
@@ -1,30 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #ifndef SINGA_UTILS_SINGLETON_H_
 #define SINGA_UTILS_SINGLETON_H_
 
 /// Thread-safe implementation for C++11 according to
 //  http://stackoverflow.com/questions/2576022/efficient-thread-safe-singleton-in-c
-template<typename T>
+template <typename T>
 class Singleton {
  public:
   static T* Instance() {
diff --git a/include/singa/utils/string.h b/include/singa/utils/string.h
index 35177e2..db31354 100644
--- a/include/singa/utils/string.h
+++ b/include/singa/utils/string.h
@@ -1,29 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #ifndef SINGA_UTILS_TOKENIZER_H_
 #define SINGA_UTILS_TOKENIZER_H_
 
-#include <string>
 #include <algorithm>
+#include <string>
+
 #include "singa/utils/logging.h"
 
 namespace singa {
@@ -51,8 +52,8 @@
   return -1;
 }
 
-template<typename T>
-inline std::string VecToStr(const std::vector<T> & in) {
+template <typename T>
+inline std::string VecToStr(const std::vector<T>& in) {
   std::string out = "(";
 
   for (auto x : in) {
@@ -76,14 +77,13 @@
 
 class Tokenizer {
  public:
-  Tokenizer(const std::string& str, const std::string& sep): start_(0),
-  sep_(sep), buf_(str) {}
-  Tokenizer & operator>>(std::string& out) {
+  Tokenizer(const std::string& str, const std::string& sep)
+      : start_(0), sep_(sep), buf_(str) {}
+  Tokenizer& operator>>(std::string& out) {
     CHECK_LT(start_, buf_.length());
     int start = start_;
     auto pos = buf_.find_first_of(sep_, start);
-    if (pos == std::string::npos)
-      pos = buf_.length();
+    if (pos == std::string::npos) pos = buf_.length();
     start_ = (unsigned int)(pos + 1);
     out = buf_.substr(start, pos);
     return *this;
diff --git a/include/singa/utils/timer.h b/include/singa/utils/timer.h
index 291c733..fa30b14 100644
--- a/include/singa/utils/timer.h
+++ b/include/singa/utils/timer.h
@@ -40,12 +40,13 @@
   /// The returned value is the count of the time metric.
   template <typename T = Milliseconds>
   int Elapsed() const {
-    static_assert(std::is_same<T, Seconds>::value ||
-                      std::is_same<T, Milliseconds>::value ||
-                      std::is_same<T, Hours>::value ||
-                      std::is_same<T, Microseconds>::value,
-                  "Template arg must be Seconds | Milliseconds | Hours | Microseconds");
-    auto now  = std::chrono::high_resolution_clock::now();
+    static_assert(
+        std::is_same<T, Seconds>::value ||
+            std::is_same<T, Milliseconds>::value ||
+            std::is_same<T, Hours>::value ||
+            std::is_same<T, Microseconds>::value,
+        "Template arg must be Seconds | Milliseconds | Hours | Microseconds");
+    auto now = std::chrono::high_resolution_clock::now();
     return std::chrono::duration_cast<T>(now - last_).count();
   }
   /// Return the string rep of current wall time
@@ -54,5 +55,5 @@
  private:
   std::chrono::high_resolution_clock::time_point last_;
 };
-}
+}  // namespace singa
 #endif
diff --git a/include/singa/utils/tinydir.h b/include/singa/utils/tinydir.h
index abb7000..0702f94 100644
--- a/include/singa/utils/tinydir.h
+++ b/include/singa/utils/tinydir.h
@@ -32,7 +32,7 @@
 #define WIN32_LEAN_AND_MEAN
 #include <windows.h>
 #ifdef _MSC_VER
-#pragma warning (disable : 4996)
+#pragma warning(disable : 4996)
 #endif
 #else
 #include <dirent.h>
@@ -40,7 +40,6 @@
 #include <sys/stat.h>
 #endif
 
-
 /* types */
 
 #define _TINYDIR_PATH_MAX 4096
@@ -58,49 +57,47 @@
 #define _TINYDIR_FUNC static __inline__
 #endif
 
-/* Allow user to use a custom allocator by defining _TINYDIR_MALLOC and _TINYDIR_FREE. */
-#if    defined(_TINYDIR_MALLOC) &&  defined(_TINYDIR_FREE)
+/* Allow user to use a custom allocator by defining _TINYDIR_MALLOC and
+ * _TINYDIR_FREE. */
+#if defined(_TINYDIR_MALLOC) && defined(_TINYDIR_FREE)
 #elif !defined(_TINYDIR_MALLOC) && !defined(_TINYDIR_FREE)
 #else
 #error "Either define both alloc and free or none of them!"
 #endif
 
 #if !defined(_TINYDIR_MALLOC)
-	#define _TINYDIR_MALLOC(_size) malloc(_size)
-	#define _TINYDIR_FREE(_ptr)    free(_ptr)
-#endif //!defined(_TINYDIR_MALLOC)
+#define _TINYDIR_MALLOC(_size) malloc(_size)
+#define _TINYDIR_FREE(_ptr) free(_ptr)
+#endif  //! defined(_TINYDIR_MALLOC)
 
-typedef struct
-{
-	char path[_TINYDIR_PATH_MAX];
-	char name[_TINYDIR_FILENAME_MAX];
-	char *extension;
-	int is_dir;
-	int is_reg;
+typedef struct {
+  char path[_TINYDIR_PATH_MAX];
+  char name[_TINYDIR_FILENAME_MAX];
+  char *extension;
+  int is_dir;
+  int is_reg;
 
 #ifdef _WIN32
 #else
-	struct stat _s;
+  struct stat _s;
 #endif
 } tinydir_file;
 
-typedef struct
-{
-	char path[_TINYDIR_PATH_MAX];
-	int has_next;
-	size_t n_files;
+typedef struct {
+  char path[_TINYDIR_PATH_MAX];
+  int has_next;
+  size_t n_files;
 
-	tinydir_file *_files;
+  tinydir_file *_files;
 #ifdef _WIN32
-	HANDLE _h;
-	WIN32_FIND_DATAA _f;
+  HANDLE _h;
+  WIN32_FIND_DATAA _f;
 #else
-	DIR *_d;
-	struct dirent *_e;
+  DIR *_d;
+  struct dirent *_e;
 #endif
 } tinydir_dir;
 
-
 /* declarations */
 
 _TINYDIR_FUNC
@@ -124,439 +121,385 @@
 _TINYDIR_FUNC
 int _tinydir_file_cmp(const void *a, const void *b);
 
-
 /* definitions*/
 
 _TINYDIR_FUNC
-int tinydir_open(tinydir_dir *dir, const char *path)
-{
-	if (dir == NULL || path == NULL || strlen(path) == 0)
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	if (strlen(path) + _TINYDIR_PATH_EXTRA >= _TINYDIR_PATH_MAX)
-	{
-		errno = ENAMETOOLONG;
-		return -1;
-	}
+int tinydir_open(tinydir_dir *dir, const char *path) {
+  if (dir == NULL || path == NULL || strlen(path) == 0) {
+    errno = EINVAL;
+    return -1;
+  }
+  if (strlen(path) + _TINYDIR_PATH_EXTRA >= _TINYDIR_PATH_MAX) {
+    errno = ENAMETOOLONG;
+    return -1;
+  }
 
-	/* initialise dir */
-	dir->_files = NULL;
+  /* initialise dir */
+  dir->_files = NULL;
 #ifdef _WIN32
-	dir->_h = INVALID_HANDLE_VALUE;
+  dir->_h = INVALID_HANDLE_VALUE;
 #else
-	dir->_d = NULL;
+  dir->_d = NULL;
 #endif
-	tinydir_close(dir);
+  tinydir_close(dir);
 
-	strcpy(dir->path, path);
+  strcpy(dir->path, path);
 #ifdef _WIN32
-	strcat(dir->path, "\\*");
-	dir->_h = FindFirstFileA(dir->path, &dir->_f);
-	dir->path[strlen(dir->path) - 2] = '\0';
-	if (dir->_h == INVALID_HANDLE_VALUE)
+  strcat(dir->path, "\\*");
+  dir->_h = FindFirstFileA(dir->path, &dir->_f);
+  dir->path[strlen(dir->path) - 2] = '\0';
+  if (dir->_h == INVALID_HANDLE_VALUE)
 #else
-	dir->_d = opendir(path);
-	if (dir->_d == NULL)
+  dir->_d = opendir(path);
+  if (dir->_d == NULL)
 #endif
-	{
-		errno = ENOENT;
-		goto bail;
-	}
+  {
+    errno = ENOENT;
+    goto bail;
+  }
 
-	/* read first file */
-	dir->has_next = 1;
+  /* read first file */
+  dir->has_next = 1;
 #ifndef _WIN32
-	dir->_e = readdir(dir->_d);
-	if (dir->_e == NULL)
-	{
-		dir->has_next = 0;
-	}
+  dir->_e = readdir(dir->_d);
+  if (dir->_e == NULL) {
+    dir->has_next = 0;
+  }
 #endif
 
-	return 0;
+  return 0;
 
 bail:
-	tinydir_close(dir);
-	return -1;
+  tinydir_close(dir);
+  return -1;
 }
 
 _TINYDIR_FUNC
-int tinydir_open_sorted(tinydir_dir *dir, const char *path)
-{
-	/* Count the number of files first, to pre-allocate the files array */
-	size_t n_files = 0;
-	if (tinydir_open(dir, path) == -1)
-	{
-		return -1;
-	}
-	while (dir->has_next)
-	{
-		n_files++;
-		if (tinydir_next(dir) == -1)
-		{
-			goto bail;
-		}
-	}
-	tinydir_close(dir);
+int tinydir_open_sorted(tinydir_dir *dir, const char *path) {
+  /* Count the number of files first, to pre-allocate the files array */
+  size_t n_files = 0;
+  if (tinydir_open(dir, path) == -1) {
+    return -1;
+  }
+  while (dir->has_next) {
+    n_files++;
+    if (tinydir_next(dir) == -1) {
+      goto bail;
+    }
+  }
+  tinydir_close(dir);
 
-	if (tinydir_open(dir, path) == -1)
-	{
-		return -1;
-	}
+  if (tinydir_open(dir, path) == -1) {
+    return -1;
+  }
 
-	dir->n_files = 0;
-	dir->_files = (tinydir_file *)_TINYDIR_MALLOC(sizeof *dir->_files * n_files);
-	if (dir->_files == NULL)
-	{
-		errno = ENOMEM;
-		goto bail;
-	}
-	while (dir->has_next)
-	{
-		tinydir_file *p_file;
-		dir->n_files++;
+  dir->n_files = 0;
+  dir->_files = (tinydir_file *)_TINYDIR_MALLOC(sizeof *dir->_files * n_files);
+  if (dir->_files == NULL) {
+    errno = ENOMEM;
+    goto bail;
+  }
+  while (dir->has_next) {
+    tinydir_file *p_file;
+    dir->n_files++;
 
-		p_file = &dir->_files[dir->n_files - 1];
-		if (tinydir_readfile(dir, p_file) == -1)
-		{
-			goto bail;
-		}
+    p_file = &dir->_files[dir->n_files - 1];
+    if (tinydir_readfile(dir, p_file) == -1) {
+      goto bail;
+    }
 
-		if (tinydir_next(dir) == -1)
-		{
-			goto bail;
-		}
+    if (tinydir_next(dir) == -1) {
+      goto bail;
+    }
 
-		/* Just in case the number of files has changed between the first and
-		second reads, terminate without writing into unallocated memory */
-		if (dir->n_files == n_files)
-		{
-			break;
-		}
-	}
+    /* Just in case the number of files has changed between the first and
+    second reads, terminate without writing into unallocated memory */
+    if (dir->n_files == n_files) {
+      break;
+    }
+  }
 
-	qsort(dir->_files, dir->n_files, sizeof(tinydir_file), _tinydir_file_cmp);
+  qsort(dir->_files, dir->n_files, sizeof(tinydir_file), _tinydir_file_cmp);
 
-	return 0;
+  return 0;
 
 bail:
-	tinydir_close(dir);
-	return -1;
+  tinydir_close(dir);
+  return -1;
 }
 
 _TINYDIR_FUNC
-void tinydir_close(tinydir_dir *dir)
-{
-	if (dir == NULL)
-	{
-		return;
-	}
+void tinydir_close(tinydir_dir *dir) {
+  if (dir == NULL) {
+    return;
+  }
 
-	memset(dir->path, 0, sizeof(dir->path));
-	dir->has_next = 0;
-	dir->n_files = 0;
-	if (dir->_files != NULL)
-	{
-		_TINYDIR_FREE(dir->_files);
-	}
-	dir->_files = NULL;
+  memset(dir->path, 0, sizeof(dir->path));
+  dir->has_next = 0;
+  dir->n_files = 0;
+  if (dir->_files != NULL) {
+    _TINYDIR_FREE(dir->_files);
+  }
+  dir->_files = NULL;
 #ifdef _WIN32
-	if (dir->_h != INVALID_HANDLE_VALUE)
-	{
-		FindClose(dir->_h);
-	}
-	dir->_h = INVALID_HANDLE_VALUE;
+  if (dir->_h != INVALID_HANDLE_VALUE) {
+    FindClose(dir->_h);
+  }
+  dir->_h = INVALID_HANDLE_VALUE;
 #else
-	if (dir->_d)
-	{
-		closedir(dir->_d);
-	}
-	dir->_d = NULL;
-	dir->_e = NULL;
+  if (dir->_d) {
+    closedir(dir->_d);
+  }
+  dir->_d = NULL;
+  dir->_e = NULL;
 #endif
 }
 
 _TINYDIR_FUNC
-int tinydir_next(tinydir_dir *dir)
-{
-	if (dir == NULL)
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	if (!dir->has_next)
-	{
-		errno = ENOENT;
-		return -1;
-	}
+int tinydir_next(tinydir_dir *dir) {
+  if (dir == NULL) {
+    errno = EINVAL;
+    return -1;
+  }
+  if (!dir->has_next) {
+    errno = ENOENT;
+    return -1;
+  }
 
 #ifdef _WIN32
-	if (FindNextFileA(dir->_h, &dir->_f) == 0)
+  if (FindNextFileA(dir->_h, &dir->_f) == 0)
 #else
-	dir->_e = readdir(dir->_d);
-	if (dir->_e == NULL)
+  dir->_e = readdir(dir->_d);
+  if (dir->_e == NULL)
 #endif
-	{
-		dir->has_next = 0;
+  {
+    dir->has_next = 0;
 #ifdef _WIN32
-		if (GetLastError() != ERROR_SUCCESS &&
-			GetLastError() != ERROR_NO_MORE_FILES)
-		{
-			tinydir_close(dir);
-			errno = EIO;
-			return -1;
-		}
+    if (GetLastError() != ERROR_SUCCESS &&
+        GetLastError() != ERROR_NO_MORE_FILES) {
+      tinydir_close(dir);
+      errno = EIO;
+      return -1;
+    }
 #endif
-	}
+  }
 
-	return 0;
+  return 0;
 }
 
 _TINYDIR_FUNC
-int tinydir_readfile(const tinydir_dir *dir, tinydir_file *file)
-{
-	if (dir == NULL || file == NULL)
-	{
-		errno = EINVAL;
-		return -1;
-	}
+int tinydir_readfile(const tinydir_dir *dir, tinydir_file *file) {
+  if (dir == NULL || file == NULL) {
+    errno = EINVAL;
+    return -1;
+  }
 #ifdef _WIN32
-	if (dir->_h == INVALID_HANDLE_VALUE)
+  if (dir->_h == INVALID_HANDLE_VALUE)
 #else
-	if (dir->_e == NULL)
+  if (dir->_e == NULL)
 #endif
-	{
-		errno = ENOENT;
-		return -1;
-	}
-	if (strlen(dir->path) +
-		strlen(
+  {
+    errno = ENOENT;
+    return -1;
+  }
+  if (strlen(dir->path) +
+          strlen(
 #ifdef _WIN32
-			dir->_f.cFileName
+              dir->_f.cFileName
 #else
-			dir->_e->d_name
+              dir->_e->d_name
 #endif
-		) + 1 + _TINYDIR_PATH_EXTRA >=
-		_TINYDIR_PATH_MAX)
-	{
-		/* the path for the file will be too long */
-		errno = ENAMETOOLONG;
-		return -1;
-	}
-	if (strlen(
+              ) +
+          1 + _TINYDIR_PATH_EXTRA >=
+      _TINYDIR_PATH_MAX) {
+    /* the path for the file will be too long */
+    errno = ENAMETOOLONG;
+    return -1;
+  }
+  if (strlen(
 #ifdef _WIN32
-			dir->_f.cFileName
+          dir->_f.cFileName
 #else
-			dir->_e->d_name
+          dir->_e->d_name
 #endif
-		) >= _TINYDIR_FILENAME_MAX)
-	{
-		errno = ENAMETOOLONG;
-		return -1;
-	}
+          ) >= _TINYDIR_FILENAME_MAX) {
+    errno = ENAMETOOLONG;
+    return -1;
+  }
 
-	strcpy(file->path, dir->path);
-	strcat(file->path, "/");
-	strcpy(file->name,
+  strcpy(file->path, dir->path);
+  strcat(file->path, "/");
+  strcpy(file->name,
 #ifdef _WIN32
-		dir->_f.cFileName
+         dir->_f.cFileName
 #else
-		dir->_e->d_name
+         dir->_e->d_name
 #endif
-	);
-	strcat(file->path, file->name);
+  );
+  strcat(file->path, file->name);
 #ifndef _WIN32
-	if (stat(file->path, &file->_s) == -1)
-	{
-		return -1;
-	}
+  if (stat(file->path, &file->_s) == -1) {
+    return -1;
+  }
 #endif
-	_tinydir_get_ext(file);
+  _tinydir_get_ext(file);
 
-	file->is_dir =
+  file->is_dir =
 #ifdef _WIN32
-		!!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY);
+      !!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY);
 #else
-		S_ISDIR(file->_s.st_mode);
+      S_ISDIR(file->_s.st_mode);
 #endif
-	file->is_reg =
+  file->is_reg =
 #ifdef _WIN32
-		!!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_NORMAL) ||
-		(
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DEVICE) &&
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) &&
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_ENCRYPTED) &&
+      !!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_NORMAL) ||
+      (!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DEVICE) &&
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) &&
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_ENCRYPTED) &&
 #ifdef FILE_ATTRIBUTE_INTEGRITY_STREAM
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_INTEGRITY_STREAM) &&
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_INTEGRITY_STREAM) &&
 #endif
 #ifdef FILE_ATTRIBUTE_NO_SCRUB_DATA
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_NO_SCRUB_DATA) &&
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_NO_SCRUB_DATA) &&
 #endif
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_OFFLINE) &&
-			!(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_TEMPORARY));
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_OFFLINE) &&
+       !(dir->_f.dwFileAttributes & FILE_ATTRIBUTE_TEMPORARY));
 #else
-		S_ISREG(file->_s.st_mode);
+      S_ISREG(file->_s.st_mode);
 #endif
 
-	return 0;
+  return 0;
 }
 
 _TINYDIR_FUNC
-int tinydir_readfile_n(const tinydir_dir *dir, tinydir_file *file, size_t i)
-{
-	if (dir == NULL || file == NULL)
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	if (i >= dir->n_files)
-	{
-		errno = ENOENT;
-		return -1;
-	}
+int tinydir_readfile_n(const tinydir_dir *dir, tinydir_file *file, size_t i) {
+  if (dir == NULL || file == NULL) {
+    errno = EINVAL;
+    return -1;
+  }
+  if (i >= dir->n_files) {
+    errno = ENOENT;
+    return -1;
+  }
 
-	memcpy(file, &dir->_files[i], sizeof(tinydir_file));
-	_tinydir_get_ext(file);
+  memcpy(file, &dir->_files[i], sizeof(tinydir_file));
+  _tinydir_get_ext(file);
 
-	return 0;
+  return 0;
 }
 
 _TINYDIR_FUNC
-int tinydir_open_subdir_n(tinydir_dir *dir, size_t i)
-{
-	char path[_TINYDIR_PATH_MAX];
-	if (dir == NULL)
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	if (i >= dir->n_files || !dir->_files[i].is_dir)
-	{
-		errno = ENOENT;
-		return -1;
-	}
+int tinydir_open_subdir_n(tinydir_dir *dir, size_t i) {
+  char path[_TINYDIR_PATH_MAX];
+  if (dir == NULL) {
+    errno = EINVAL;
+    return -1;
+  }
+  if (i >= dir->n_files || !dir->_files[i].is_dir) {
+    errno = ENOENT;
+    return -1;
+  }
 
-	strcpy(path, dir->_files[i].path);
-	tinydir_close(dir);
-	if (tinydir_open_sorted(dir, path) == -1)
-	{
-		return -1;
-	}
+  strcpy(path, dir->_files[i].path);
+  tinydir_close(dir);
+  if (tinydir_open_sorted(dir, path) == -1) {
+    return -1;
+  }
 
-	return 0;
+  return 0;
 }
 
 /* Open a single file given its path */
 _TINYDIR_FUNC
-int tinydir_file_open(tinydir_file *file, const char *path)
-{
-	tinydir_dir dir;
-	int result = 0;
-	int found = 0;
-	char dir_name_buf[_TINYDIR_PATH_MAX];
-	char file_name_buf[_TINYDIR_FILENAME_MAX];
-	char *dir_name;
-	char *base_name;
+int tinydir_file_open(tinydir_file *file, const char *path) {
+  tinydir_dir dir;
+  int result = 0;
+  int found = 0;
+  char dir_name_buf[_TINYDIR_PATH_MAX];
+  char file_name_buf[_TINYDIR_FILENAME_MAX];
+  char *dir_name;
+  char *base_name;
 #ifdef _WIN32
-	char drive_buf[_TINYDIR_PATH_MAX];
-	char ext_buf[_TINYDIR_FILENAME_MAX];
+  char drive_buf[_TINYDIR_PATH_MAX];
+  char ext_buf[_TINYDIR_FILENAME_MAX];
 #endif
-	
-	if (file == NULL || path == NULL || strlen(path) == 0)
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	if (strlen(path) + _TINYDIR_PATH_EXTRA >= _TINYDIR_PATH_MAX)
-	{
-		errno = ENAMETOOLONG;
-		return -1;
-	}
 
-	/* Get the parent path */
+  if (file == NULL || path == NULL || strlen(path) == 0) {
+    errno = EINVAL;
+    return -1;
+  }
+  if (strlen(path) + _TINYDIR_PATH_EXTRA >= _TINYDIR_PATH_MAX) {
+    errno = ENAMETOOLONG;
+    return -1;
+  }
+
+/* Get the parent path */
 #ifdef _WIN32
-	if (_splitpath_s(
-			path,
-			drive_buf, sizeof drive_buf,
-			dir_name_buf, sizeof dir_name_buf,
-			file_name_buf, sizeof file_name_buf,
-			ext_buf, sizeof ext_buf))
-	{
-		errno = EINVAL;
-		return -1;
-	}
-	/* Concatenate the drive letter and dir name to form full dir name */
-	strcat(drive_buf, dir_name_buf);
-	dir_name = drive_buf;
-	/* Concatenate the file name and extension to form base name */
-	strcat(file_name_buf, ext_buf);
-	base_name = file_name_buf;
+  if (_splitpath_s(path, drive_buf, sizeof drive_buf, dir_name_buf,
+                   sizeof dir_name_buf, file_name_buf, sizeof file_name_buf,
+                   ext_buf, sizeof ext_buf)) {
+    errno = EINVAL;
+    return -1;
+  }
+  /* Concatenate the drive letter and dir name to form full dir name */
+  strcat(drive_buf, dir_name_buf);
+  dir_name = drive_buf;
+  /* Concatenate the file name and extension to form base name */
+  strcat(file_name_buf, ext_buf);
+  base_name = file_name_buf;
 #else
-	strcpy(dir_name_buf, path);
-	dir_name = dirname(dir_name_buf);
-	strcpy(file_name_buf, path);
-	base_name = basename(file_name_buf);
+  strcpy(dir_name_buf, path);
+  dir_name = dirname(dir_name_buf);
+  strcpy(file_name_buf, path);
+  base_name = basename(file_name_buf);
 #endif
-	
-	/* Open the parent directory */
-	if (tinydir_open(&dir, dir_name) == -1)
-	{
-		return -1;
-	}
 
-	/* Read through the parent directory and look for the file */
-	while (dir.has_next)
-	{
-		if (tinydir_readfile(&dir, file) == -1)
-		{
-			result = -1;
-			goto bail;
-		}
-		if (strcmp(file->name, base_name) == 0)
-		{
-			/* File found */
-			found = 1;
-			goto bail;
-		}
-		tinydir_next(&dir);
-	}
-	if (!found)
-	{
-		result = -1;
-		errno = ENOENT;
-	}
-	
+  /* Open the parent directory */
+  if (tinydir_open(&dir, dir_name) == -1) {
+    return -1;
+  }
+
+  /* Read through the parent directory and look for the file */
+  while (dir.has_next) {
+    if (tinydir_readfile(&dir, file) == -1) {
+      result = -1;
+      goto bail;
+    }
+    if (strcmp(file->name, base_name) == 0) {
+      /* File found */
+      found = 1;
+      goto bail;
+    }
+    tinydir_next(&dir);
+  }
+  if (!found) {
+    result = -1;
+    errno = ENOENT;
+  }
+
 bail:
-	tinydir_close(&dir);
-	return result;
+  tinydir_close(&dir);
+  return result;
 }
 
 _TINYDIR_FUNC
-void _tinydir_get_ext(tinydir_file *file)
-{
-	char *period = strrchr(file->name, '.');
-	if (period == NULL)
-	{
-		file->extension = &(file->name[strlen(file->name)]);
-	}
-	else
-	{
-		file->extension = period + 1;
-	}
+void _tinydir_get_ext(tinydir_file *file) {
+  char *period = strrchr(file->name, '.');
+  if (period == NULL) {
+    file->extension = &(file->name[strlen(file->name)]);
+  } else {
+    file->extension = period + 1;
+  }
 }
 
 _TINYDIR_FUNC
-int _tinydir_file_cmp(const void *a, const void *b)
-{
-	const tinydir_file *fa = (const tinydir_file *)a;
-	const tinydir_file *fb = (const tinydir_file *)b;
-	if (fa->is_dir != fb->is_dir)
-	{
-		return -(fa->is_dir - fb->is_dir);
-	}
-	return strncmp(fa->name, fb->name, _TINYDIR_FILENAME_MAX);
+int _tinydir_file_cmp(const void *a, const void *b) {
+  const tinydir_file *fa = (const tinydir_file *)a;
+  const tinydir_file *fb = (const tinydir_file *)b;
+  if (fa->is_dir != fb->is_dir) {
+    return -(fa->is_dir - fb->is_dir);
+  }
+  return strncmp(fa->name, fb->name, _TINYDIR_FILENAME_MAX);
 }
 
 #endif
diff --git a/java/pom.xml b/java/pom.xml
index b91d65c..1f68adb 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -31,8 +31,8 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-compiler-plugin</artifactId>
         <configuration>
-          <source>1.5</source>
-          <target>1.5</target>
+          <source>1.8</source>
+          <target>1.8</target>
         </configuration>
       </plugin>
     </plugins>
@@ -52,4 +52,47 @@
 	    <scope>compile</scope>
 	</dependency>
   </dependencies>
+  <profiles>
+		<profile>
+			<id>check-licence</id>
+			<build>
+				<plugins>
+					<plugin>
+						<groupId>org.apache.rat</groupId>
+						<artifactId>apache-rat-plugin</artifactId>
+                        <version>0.13</version>
+                        <configuration>
+                        <excludes combine.children="append">
+                            <exclude>rafiki/**</exclude>
+                            <exclude>test/gtest/**</exclude>
+                            <exclude>examples/onnx/bert/inputs.json</exclude>
+                            <exclude>examples/onnx/test_onnx_backend.py</exclude>
+                            <exclude>examples/onnx/gpt2/requirements.txt</exclude>
+			    <exclude>examples/onnx/ro_bert_a/requirements.txt</exclude>
+                            <exclude>include/singa/utils/tinydir.h</exclude>
+                            <exclude>include/singa/utils/cuda_utils.h</exclude>
+                            <exclude>src/core/tensor/distribution.cl</exclude>
+                            <exclude>src/model/layer/im2col.cl</exclude>
+                            <exclude>src/model/layer/pooling.cl</exclude>
+                            <exclude>src/proto/caffe.proto</exclude>
+                            <exclude>cmake/Thirdparty/FindViennaCL.cmake</exclude>
+                            <exclude>cmake/Thirdparty/FindOpenCL.cmake</exclude>
+                            <exclude>cmake/Thirdparty/GetGitRevisionDescription.cmake.in</exclude>
+                            <exclude>cmake/Thirdparty/GetGitRevisionDescription.cmake</exclude>
+                            <exclude>src/api/numpy.i</exclude>
+                            <exclude>tool/cpplint.py</exclude>
+                            <exclude>tool/docker/devel/centos6/cuda10/cuda.repo</exclude>
+                            <exclude>travis*.log</exclude>
+                            <exclude>.gitmodules</exclude>
+                            <exclude>java/target/*</exclude>
+                            <exclude>miniconda.sh</exclude>
+                            <exclude>**/*.json</exclude>
+                        </excludes>
+                        <consoleOutput>True</consoleOutput>
+                    </configuration>
+					</plugin>
+				</plugins>
+			</build>
+		</profile>
+	</profiles>
 </project>
diff --git a/java/src/test/java/org/apache/singa/TestTensor.java b/java/src/test/java/org/apache/singa/TestTensor.java
index c66f41a..7ef294e 100644
--- a/java/src/test/java/org/apache/singa/TestTensor.java
+++ b/java/src/test/java/org/apache/singa/TestTensor.java
@@ -19,41 +19,38 @@
 
 package test;
 
+import junit.framework.*;
 import org.apache.singa.swig.*;
 
-
-import junit.framework.*;
 import static org.junit.Assert.*;
 
 public class TestTensor extends TestCase {
 
-	 protected void setUp(){
-	     System.loadLibrary("singa_wrap");
-	   }
-	public void testTensorFunc() {
-		Shape s = new Shape(2);
-		s.set(0, 2);
-		s.set(1, 3);
+    protected void setUp() {
+        System.loadLibrary("singa_wrap");
+    }
 
-		Tensor t1 = new Tensor(s);
-		t1.SetFloatValue(0.1f);
-		Tensor t2 = singa_wrap.Square(t1);
-		float[] data = new float[6];
+    public void testTensorFunc() {
+        Shape s = new Shape(2);
+        s.set(0, 2);
+        s.set(1, 3);
 
-		t2.GetFloatValue(data, 6);
-		for(int i = 0; i < 6; i++)
-			assertEquals(data[i], 0.01, 1e-4);
+        Tensor t1 = new Tensor(s);
+        t1.SetFloatValue(0.1f);
+        Tensor t2 = singa_wrap.Square(t1);
+        float[] data = new float[6];
 
-		for (int i =0; i< 6; i++)
-			data[i] = i * 1.0f;
-		Tensor t3 = new Tensor(s);
-		t3.CopyFloatDataFromHostPtr(data, 6);
+        t2.GetFloatValue(data, 6);
+        for (int i = 0; i < 6; i++)
+            assertEquals(data[i], 0.01, 1e-4);
 
+        for (int i = 0; i < 6; i++)
+            data[i] = i * 1.0f;
+        Tensor t3 = new Tensor(s);
+        t3.CopyFloatDataFromHostPtr(data, 6);
 
-		t3.GetFloatValue(data, 6);
-		for(int i = 0; i < 6; i++)
-			assertEquals(data[i], i * 1.0f, 1e-4);
-
-	}
-
+        t3.GetFloatValue(data, 6);
+        for (int i = 0; i < 6; i++)
+            assertEquals(data[i], i * 1.0f, 1e-4);
+    }
 }
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
index 177326e..22a7c7d 100644
--- a/python/CMakeLists.txt
+++ b/python/CMakeLists.txt
@@ -93,8 +93,15 @@
 file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/python/singa/proto)
 file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/python/rafiki)
 file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/src/api)
+
+IF(USE_PYTHON3)
+	SET(SWIG_PYTHON3 "-py3")
+ELSE()
+	SET(SWIG_PYTHON3 "")
+ENDIF()
+
 execute_process(
-    COMMAND swig -c++ -python -I${CMAKE_SOURCE_DIR}/include
+    COMMAND swig -c++ -python ${SWIG_PYTHON3} -I${CMAKE_SOURCE_DIR}/include
     -outdir ${CMAKE_BINARY_DIR}/python/singa
     -o ${CMAKE_BINARY_DIR}/src/api/singa_wrap.cxx
     ${CMAKE_SOURCE_DIR}/src/api/singa.i)
@@ -106,6 +113,12 @@
 create_symlinks(${python_source_files})
 
 
+execute_process(
+    COMMAND ${PYTHON_EXECUTABLE} -c "from __future__ import print_function; import numpy; print(numpy.get_include())"
+    OUTPUT_VARIABLE NUMPY_INCLUDE_DIR)
+
+#message(status "numpy path ${NUMPY_INCLUDE_DIR}")
+
 IF(USE_CUDA)
 # remain this custom command to avoid cuda objs can't find
 ADD_CUSTOM_COMMAND(
@@ -115,14 +128,21 @@
 ENDIF(USE_CUDA)
 
 ADD_LIBRARY(_singa_wrap SHARED $<TARGET_OBJECTS:singa_objects>  ${python_srcs} ${proto_pys} ${global_cuda_objs})
-TARGET_LINK_LIBRARIES(_singa_wrap ${SINGA_LINKER_LIBS} ${PYTHON_LIBRARIES})
-TARGET_INCLUDE_DIRECTORIES(_singa_wrap PRIVATE ${PYTHON_INCLUDE_DIRS})
-SET_TARGET_PROPERTIES(_singa_wrap
-PROPERTIES PREFIX ""
-LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/python/singa
-)
 
-#SETUP
+# For MacOS Python3.6 is already linked into python executable, hence no need to link python3.6 into Singa.
+IF(APPLE)
+    TARGET_LINK_LIBRARIES(_singa_wrap ${SINGA_LINKER_LIBS})    
+    SET_TARGET_PROPERTIES(_singa_wrap PROPERTIES PREFIX "" 
+                                                 LINK_FLAGS "-undefined dynamic_lookup" 
+                                      LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/python/singa)
+ELSE()
+    TARGET_LINK_LIBRARIES(_singa_wrap ${SINGA_LINKER_LIBS} ${PYTHON_LIBRARIES})    
+    SET_TARGET_PROPERTIES(_singa_wrap PROPERTIES PREFIX "" LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/python/singa)
+ENDIF()
+
+TARGET_INCLUDE_DIRECTORIES(_singa_wrap PRIVATE ${PYTHON_INCLUDE_DIRS} ${NUMPY_INCLUDE_DIR})
+
+# substitute ${var} in setup.py.in to generate setup.py
 SET(SETUP_PY_IN "setup.py.in")
 SET(SETUP_PY    "${CMAKE_BINARY_DIR}/python/setup.py")
 CONFIGURE_FILE(${SETUP_PY_IN} ${SETUP_PY})
diff --git a/python/rafiki/agent.py b/python/rafiki/agent.py
deleted file mode 100644
index d9e4e7a..0000000
--- a/python/rafiki/agent.py
+++ /dev/null
@@ -1,219 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-from multiprocessing import Process, Queue
-from flask import Flask,request, send_from_directory, jsonify
-from flask_cors import CORS, cross_origin
-import os, traceback, sys
-import time
-from werkzeug.utils import secure_filename
-from werkzeug.datastructures import CombinedMultiDict, MultiDict
-import pickle
-import uuid
-
-class MsgType:
-   def __init__(self, name):
-       self.name = name
-   def __str__(self):
-       return self.name
-   def __repr__(self):
-       return "<Msg: %s>" % self
-   def equal(self,target):
-       return str(self) == str(target)
-
-   def is_info(self):
-       return self.name.startswith('kInfo') 
-   def is_command(self):
-       return self.name.startswith('kCommand') 
-   def is_status(self):
-       return self.name.startswith('kStatus') 
-   def is_request(self):
-       return self.name.startswith('kRequest') 
-   def is_response(self):
-       return self.name.startswith('kResponse') 
-
-   @staticmethod
-   def parse(name):
-       return getattr(MsgType,str(name))
-   @staticmethod
-   def get_command(name):
-       if name=='stop':
-           return MsgType.kCommandStop
-       if name=='pause':
-           return MsgType.kCommandPause
-       if name=='resume':
-           return MsgType.kCommandResume
-       return MsgType.kCommand 
-
-types =  ['kInfo','kInfoMetric',
-           'kCommand','kCommandStop','kCommandPause','kCommandResume',
-           'kStatus','kStatusRunning','kStatusPaused','kStatusError',
-           'kRequest','kResponse']
-
-for t in types:
-    setattr(MsgType,t,MsgType(t))
-
-#####   NOTE the server currently only can handle request sequentially
-
-app = Flask(__name__)
-top_k_=5
-
-class Agent():
-
-    def __init__(self,port):
-        info_queue = Queue()
-        command_queue = Queue()
-        self.p = Process(target=start, args=(port, info_queue,command_queue))
-        self.p.start()
-        self.info_queue=info_queue
-        self.command_queue=command_queue
-        return
-
-    def pull(self):
-        if not self.command_queue.empty():
-            msg,data=self.command_queue.get()
-            if msg.is_request():
-                data = pickle.loads(data)
-            return msg,data
-        return None,None 
-
-    def push(self,msg,value):
-        self.info_queue.put((msg,value))
-        return
-
-    def stop(self):
-        #sleep a while, wait for http response finished
-        time.sleep(1)
-        self.p.terminate()
-
-def start(port,info_queue,command_queue):
-    global info_queue_, command_queue_, data_
-    info_queue_=info_queue
-    command_queue_=command_queue
-    data_ = []
-    app.run(host='0.0.0.0', port=port)
-    return
-
-def getDataFromInfoQueue(need_return=False):
-    global info_queue_, data_
-    if not need_return:
-        while not info_queue_.empty():
-            msg,d = info_queue_.get()
-            data_.append(d)
-    else:
-        while True: # loop until get answer
-            while not info_queue_.empty():
-                msg,d = info_queue_.get()
-                if msg.is_info():
-                    data_.append(d)
-                else:
-                    return msg,d
-            time.sleep(0.01)
-
-@app.route("/")
-@cross_origin()
-def index():
-    try:
-        req=send_from_directory(os.getcwd(),"index.html", mimetype='text/html')
-    except:
-        traceback.print_exc()
-        return "error"
-    return req
-
-# support two operations for user to monitor the training status
-@app.route('/getAllData')
-@cross_origin()
-def getAllData():
-    global data_
-    try:
-        getDataFromInfoQueue()
-    except:
-        traceback.print_exc()
-        return failure("Internal Error")
-    return success(data_)
-
-
-@app.route('/getTopKData')
-@cross_origin()
-def getTopKData():
-    global data_
-    try:
-        k = int(request.args.get("k", top_k_))
-    except:
-        traceback.print_exc()
-        return failure("k should be integer")
-    try:
-        getDataFromInfoQueue()
-    except:
-        traceback.print_exc()
-        return failure("Internal Error")
-    return success(data_[-k:])
-
-@app.route("/api", methods=['POST'])                                                                  
-@cross_origin()                                                                                           
-def api():
-    global info_queue_,command_queue_ 
-    try:
-        files=transformFile(request.files)
-        values = CombinedMultiDict([request.args,request.form,files])
-        req_str = pickle.dumps(values)
-        command_queue_.put((MsgType.kRequest,req_str))
-        msg,response=getDataFromInfoQueue(True)
-        deleteFiles(files)
-        return response
-    except:                                                                            
-        traceback.print_exc()
-        return failure("Internal Error")
-
-@app.route("/command/<name>", methods=['GET','POST'])                                                                  
-@cross_origin()                                                                                           
-def command(name):
-    global info_queue_,command_queue_ 
-    try:
-        command=MsgType.get_command(name)
-        command_queue_.put((command,""))
-        msg,response=getDataFromInfoQueue(True)
-        return response
-    except:                                                                            
-        traceback.print_exc()
-        return failure("Internal Error")
-
-def success(data=""):
-    '''return success status in json format'''
-    res = dict(result="success", data=data)
-    return jsonify(res)
-def failure(message):
-    '''return failure status in json format'''
-    res = dict(result="message", message=message)
-    return jsonify(res)
-
-def transformFile(files):
-    result= MultiDict([])
-    for f in files.keys():
-        file = files[f]
-        unique_filename = str(uuid.uuid4())+secure_filename(file.filename)
-        filepath=os.path.join(os.getcwd(),unique_filename)
-        file.save(filepath)
-        result.add(f,filepath)
-    return result
-
-def deleteFiles(files):
-    for f in files.keys():
-        filepath = files[f]    
-        os.remove(filepath)
-        #print "remove",filepath
-    return
diff --git a/python/setup.py.in b/python/setup.py.in
index 6b3e5b5..19b5f11 100644
--- a/python/setup.py.in
+++ b/python/setup.py.in
@@ -22,15 +22,16 @@
 
 setup(
     name='singa',
-
+    
+    # PACKAGE_VERSION will be substituted by cmake with the real version
     version='${PACKAGE_VERSION}',
 
     description='A General Deep Learning System',
 
-    url='https://github.com/apache/incubator-singa',
+    url='https://github.com/apache/singa',
 
-    author='Apache SINGA (incubating)',
-    author_email='dev@singa.incubator.apache.org',
+    author='Apache SINGA',
+    author_email='dev@singa.apache.org',
 
     license='Apache 2',
 
@@ -47,25 +48,30 @@
 
         # Specify the Python versions you support here. In particular, ensure
         # that you indicate whether you support Python 2, Python 3 or both.
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
+        #'Programming Language :: Python :: 2',
+        #'Programming Language :: Python :: 2.6',
+        #'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
         ],
 
     keywords='deep learning singa apache',
 
-    packages=['rafiki', 'singa', 'singa.proto'],
+    packages=['singa', 'singa.proto'],
 
     # py_modules=["singa"],
 
-    install_requires=[
-        'numpy>=1.11.0',
-        'protobuf>=2.5.0',
-        'unittest-xml-reporting',
-        'flask>=0.10.1',
-        'flask_cors>=3.0.2',
-        'pillow>=2.3.0'
-        ],
+    #install_requires=[
+    #    'numpy>=1.11.0',
+    #    'protobuf==3.6.1',
+    #    'unittest-xml-reporting',
+    #    'flask>=0.10.1',
+    #    'flask_cors>=3.0.2',
+    #    'pillow>=2.3.0',
+    #    'future',
+    #    'tqdm',
+    #    'openblas==0.2.19',
+    #    'glog==0.3.4',
+    #    ],
 
     #List additional groups of dependencies here (e.g. development
     #dependencies). You can install these using the following syntax,
diff --git a/python/singa/autograd.py b/python/singa/autograd.py
new file mode 100644
index 0000000..76a645e
--- /dev/null
+++ b/python/singa/autograd.py
@@ -0,0 +1,5765 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import division
+
+from collections import Counter, deque
+import numpy as np
+
+from singa import tensor
+from singa import utils
+from .tensor import Tensor
+from . import singa_wrap as singa
+
+CTensor = singa.Tensor
+training = False
+
+
+def axis_helper(y_shape, x_shape):
+    """
+    check which axes the x has been broadcasted
+    Args:
+        y_shape: the shape of result
+        x_shape: the shape of x
+    Return:
+        a tuple refering the axes
+    """
+    res = []
+    j = len(x_shape) - 1
+    for i in range(len(y_shape) - 1, -1, -1):
+        if j < 0 or x_shape[j] != y_shape[i]:
+            res.append(i)
+        j -= 1
+    return tuple(res[::-1])
+
+
+def back_broadcast(y_shape, x_shape, x):
+    """
+    for a brodcasted tensor, restore its shape of x from y_shape to x_shape
+    Args:
+        y_shape: the shape of result
+        x_shape: the shape of x
+        x: the input
+    Return:
+        a tensor
+    """
+    if y_shape != x_shape:
+        x = tensor.from_raw_tensor(x)
+        axis = axis_helper(y_shape, x_shape)
+        x = tensor.sum(x, axis)
+        x = tensor.reshape(x, x_shape)
+        x = x.data
+    return x
+
+
+def infer_dependency(op):
+    """
+    Infer the dependency of all operations with the
+    given op as the last operation.
+    Operator A is depending on B if A uses the output(s) of B.
+
+    Args:
+        op: an Operator instance, e.g. the loss operation.
+
+    Return:
+        a Counter instance with the operation as the key,
+        and the number of operations that are depending on it as the value;
+        and a Counter instance with the id of the output tensor as the key, and
+        the number of operations that are depending on it as the value.
+    """
+
+    # current op is not inserted into the dependency_count
+    # if the current op is not a terminal op, then this function may just
+    # count dependency of a branch.
+    op_count = Counter()
+    tensor_count = Counter()
+    queue = deque([op])
+    while len(queue) > 0:
+        cur_op = queue.pop()
+        for src_op, xid, _, _ in cur_op.src:
+            if src_op not in op_count:
+                op_count[src_op] = 1
+                queue.append(src_op)
+            else:
+                op_count[src_op] += 1
+            tensor_count[xid] += 1
+    return op_count, tensor_count
+
+
+def gradients(y, dy=None):
+    """
+    Compute the gradients of the output w.r.t the parameters
+
+    Args:
+        y: the output tensor, e.g., the loss
+        dy: gradient of the target w.r.t y; None indicates the gradient is 1.0;
+            it can be used to rescale the loss.
+
+    Return:
+        a dictionary storing the gradient tensors of all tensors
+            whose stores_grad is true (e.g. parameter tensors)
+    """
+    grads = {}  # mapping: x->dx if x.stores_grad
+    for p, dp in backward(y, dy):
+        # TODO: this fn is only helper for test case for now.
+        #   1. could implement __hash__ or
+        #   2. make grad as a attribute of tensor class
+        #      p.grad = dp
+        grads[id(p)] = dp
+    return grads
+
+
+def backward(y, dy=None):
+    """
+    Run the backward propagation starting at y.
+    Args:
+        y: a Tensor instance, usually the loss
+        dy: a number or a Tensor instance, for the gradient of the
+            objective/loss w.r.t y, usually None, i.e., 1.0
+    Return:
+        yeild the parameter (tensor with stores_grad true) and the
+            gradient tensors.
+    """
+    assert isinstance(y, Tensor), "wrong input type."
+    op_dep, tensor_dep = infer_dependency(y.creator)
+    assert y.size() == 1, ("y must be a Tensor with a single value;"
+                           "size of y is % d" % y.size())
+
+    # by default the dy is a tensor with 1.0 for each sample;
+    if dy is None:
+        dy = float(1.0)
+    elif isinstance(dy, Tensor):
+        dy = dy.data
+    else:
+        dy = float(dy)
+
+    # ready is a queue of (operation, dy list)
+    ready = deque([(y.creator, (dy,))])
+    not_ready = {}  # mapping: op->[dy]
+
+    if y.stores_grad:
+        # gradients[y] = dy
+        if isinstance(dy, float):
+            g = np.array(dy)
+        else:
+            g = dy
+        tg = Tensor(device=g.device(), data=g)
+        yield (y, tg)
+
+    while len(ready) > 0:
+        op, dys = ready.pop()
+        if not op.requires_grad or isinstance(op, Dummy):
+            continue
+        # if not isinstance(op, tensor.Dummy):
+        dxs = op._do_backward(*dys)
+        # TODO src and dx must match
+
+        assert len(op.src) == len(dxs), (
+            "the number of src ops (=%d) and dx (=%d) not match" %
+            (len(op.src), len(dxs)))
+        for (src_op, x_id, y, y_stores_grad), dx in zip(op.src, dxs):
+            # prefix x is w.r.t op; prefix y is w.r.t src_op.
+            # x_id is the python id of one input arg of src_op, denoted as x.
+            # y_idx (below) is the index of x among the outputs of src_op.
+            # not_ready[src_op][y_idx] records the intermediate gradient
+            # of the y_idx'th output of src_op. 'intermediate gradient'
+            # indicates that if this output is used in multiple children
+            # operations, then we have to add the graident (dx) from all these
+            # children operations. When src_op is ready, it means that
+            # the gradient of all its outputs are available, i.e. all children
+            # operations have been backwarded.
+            # y is None if y.stores_grad is false; otherwise it is a Tensor
+
+            if isinstance(src_op, Dummy) and (not src_op.stores_grad):
+                continue
+
+            y_idx = src_op.y_id2idx[x_id]
+            if src_op not in not_ready:
+                # src_op may have mulitple outputs
+                not_ready[src_op] = [None for _ in src_op.y_id2idx]
+                not_ready[src_op][y_idx] = dx
+            else:
+                dxs_ = not_ready[src_op]
+                if dxs_[y_idx] is None:
+                    dxs_[y_idx] = dx
+                else:
+                    # add the gradient from another children operation that
+                    # uses y_idx'th output of src_op as input arg
+                    dxs_[y_idx] += dx
+
+            op_dep[src_op] -= 1
+            tensor_dep[x_id] -= 1
+            if y_stores_grad and tensor_dep[x_id] == 0:
+                # store the gradient for final return, e.g. for parameters.
+                # it may cause a delay to yield. Only after src_op's all
+                # output tensors have recieved the gradients, then output
+                g = not_ready[src_op][y_idx]
+                tg = Tensor(device=g.device(),
+                            data=g,
+                            name=src_op.grad_name(y_idx))
+                yield (y, tg)
+
+            if op_dep[src_op] == 0:
+                if src_op.requires_grad is True:
+                    assert not isinstance(
+                        src_op, Dummy), "Dummy op does not do backward()"
+                    ready.append((src_op, not_ready[src_op]))
+                del not_ready[src_op]
+        del op  # delete the operation to free all tensors from this op
+
+
+class Operator(object):
+    """
+    An operation includes the forward and backward function of
+    tensor calculation.
+    Steps to add a specific operation Xxxx:
+    1. create a subclass of Operator, name it as Xxxx
+    2. override the forward() and backward(); The arguments of forward()
+       and backward() should only include CTensor;
+    """
+
+    op_count = 0
+
+    def __init__(self, name=None):
+        if name is None:
+            self.name = "{}#{}".format(self.__class__.__name__,
+                                       Operator.op_count)
+            Operator.op_count += 1
+        else:
+            self.name = name
+
+    def __call__(self, *xs):
+        return self._do_forward(*xs)
+
+    def output_name(self, idx):
+        """
+        Args:
+            idx: index of the output among all outputs
+
+        Return:
+            the name of the output tensor
+        """
+        return "{}:{}".format(self.name, idx)
+
+    def grad_name(self, idx):
+        """
+        Args:
+            idx: index of the output among all outputs
+
+        Return:
+            the name of the gradient of the output tensor
+        """
+        return "{}_g".format(self.output_name(idx))
+
+    def _do_forward(self, *xs):
+        """
+        Do not call this function from user code. It is called by __call__().
+        Args:
+            xs, Tensor instance(s)
+        Returns:
+            Tensor instance(s)
+        """
+        # TODO add the pre hook
+        assert all([isinstance(x, Tensor) for x in xs
+                   ]), "xs should include only Tensor instances"
+
+        # need to do backward if any of its input arg needs gradient
+        self.requires_grad = any([x.requires_grad for x in xs])
+
+        self.src = []
+        for x in xs:
+            if x.stores_grad:
+                # store the tensor whose gradient needs be returned in
+                # backward(), e.g. if x is parameter
+                self.src.append((x.creator, id(x), x, x.stores_grad))
+            else:
+                # for intermediate tensors, they will be released soon;
+                # no need to store them --> use None
+                self.src.append((x.creator, id(x), None, x.stores_grad))
+
+        # get the CTensor (data) if the input arg is Tensor
+        xs = tuple(x.data for x in xs)
+        ys = self.forward(*xs)
+        if not isinstance(ys, tuple):
+            ys = (ys,)
+        # create Tensor based on CTensor(data);
+        # assume outputs are all Tensor instances
+        ys = tuple(
+            Tensor(
+                device=y.device(),
+                data=y,
+                requires_grad=self.requires_grad,
+                creator=self,
+                name=self.output_name(idx),
+            ) for idx, y in enumerate(ys))
+        # map from python id to output index
+        self.y_id2idx = {id(y): i for i, y in enumerate(ys)}
+        # TODO add the post hook
+        return ys
+
+    def _do_backward(self, *dys):
+        dxs = self.backward(*dys)
+        if not isinstance(dxs, tuple):
+            dxs = (dxs,)
+        return dxs
+
+    def forward(self, *xs):
+        """Forward propagation.
+        Args:
+            xs: input args consisting of only CTensors.
+        Returns:
+            CTensor instance(s)
+        """
+        raise NotImplementedError
+
+    def backward(self, *dys):
+        """ Backward propagation.
+        Args:
+            dys: input args consisting of only CTensors.
+        Returns:
+            CTensor instance(s)
+        """
+        raise NotImplementedError
+
+    def get_params(self):
+        return []
+
+
+class Dummy(Operator):
+    """Dummy operation whice serves as a placehoder for autograd
+    Args:
+        name(string): set it for debug
+    """
+
+    def __init__(self, tensor, name=None):
+        super(Dummy, self).__init__(name)
+        self.src = []
+        self.y_id2idx = {id(tensor): 0}
+        self.tensor = tensor
+        self.requires_grad = False
+
+    def output_name(self, idx):
+        return self.name
+
+    def grad_name(self, idx):
+        return "{}_g".format(self.name)
+
+    def __getattr__(self, name):
+        return self.tensor.__getattribute__(name)
+
+
+class Mean(Operator):
+    """
+    Element-wise mean of each of the input CTensors.
+    """
+
+    def __init__(self):
+        super(Mean, self).__init__()
+
+    def forward(self, *l):
+        """
+        Args:
+            l (a list of CTensor): a list of CTensor for element-wise mean.
+        Returns:
+            a new CTensor.
+        """
+        if training:
+            self.l = len(l)
+        assert (len(l) > 0)
+        x = singa.Tensor(list(l[0].shape()), l[0].device())
+        x.SetFloatValue(0.0)
+        for i in range(len(l)):
+            x += l[i]
+        return singa.MultFloat(x, 1 / len(l))
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy.
+        Returns:
+            a list of dx (CTensor).
+        """
+        return [singa.MultFloat(dy, 1 / self.l)] * self.l
+
+
+def mean(*l):
+    """
+    Element-wise mean of each of the input tensors.
+    Args:
+        l (a list of Tensor): element-wise mean operator.
+    Returns:
+        a new Tensor.
+    """
+    return Mean()(*l)[0]
+
+
+class ReLU(Operator):
+    """
+    Relu means rectified linear function, i.e, y = max(0, x) is applied to the
+    CTensor elementwise.
+    """
+
+    def __init__(self):
+        super(ReLU, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            a new CTensor whose element y = x if x >= 0; otherwise 0.
+        """
+        if training:
+            self.input = x
+        return singa.ReLU(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy.
+        Returns:
+            dx (CTensor): dL / dx = dy if x >= 0; otherwise 0.
+        """
+        return singa.ReLUBackward(dy, self.input)
+
+
+def relu(x):
+    """
+    Relu means rectified linear function, i.e, y = max(0, x) is applied to the
+    CTensors elementwise.
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        a new Tensor whose element y = x if x >= 0; otherwise 0.
+    """
+    return ReLU()(x)[0]
+
+
+class Less(Operator):
+    """
+    Returns the tensor resulted from performing the less logical operation
+    elementwise on the input CTensors x and y.
+    """
+
+    def __init__(self):
+        super(Less, self).__init__()
+
+    def forward(self, x, y):
+        """
+        Return a<b, where a and b are CTensor.
+        """
+        cur = singa.LTFloat(singa.__sub__(x, y), 0)
+        if training:
+            self.cache = cur
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss.
+        Raises:
+            AssertionError: no backward function for this operator.
+        """
+        assert False, ('no backward function for less')
+
+
+def less(x, y):
+    """
+    Return a<b, where a and b are CTensor.
+    """
+    return Less()(x, y)[0]
+
+
+class Clip(Operator):
+    """
+    Clip operator limits the given input within an interval. The interval
+    is specified by the inputs 'min' and 'max'.
+    """
+
+    def __init__(self, min, max):
+        """
+        Args:
+            min (float): min value, under which element is replaced by min.
+            max (float): max value, above which element is replaced by max.
+        """
+        super(Clip, self).__init__()
+        self.max = max
+        self.min = min
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): input tensor
+        Returns:
+            a new CTensor with np.clip(x,min,max)
+        """
+        self.mask = singa.Tensor(list(x.shape()), x.device())
+        self.mask.SetFloatValue(1.0)
+
+        if self.min is not None:
+            self.min = float(self.min)
+            mask0 = singa.LTFloat(x, self.min)
+            mask1 = singa.GEFloat(x, self.min)
+            self.mask = singa.__mul__(mask1, self.mask)
+            x = singa.__add__(singa.MultFloat(mask0, self.min),
+                              singa.__mul__(mask1, x))
+
+        if self.max is not None:
+            self.max = float(self.max)
+            mask0 = singa.GTFloat(x, self.max)
+            mask1 = singa.LEFloat(x, self.max)
+            self.mask = singa.__mul__(mask1, self.mask)
+            x = singa.__add__(singa.MultFloat(mask0, self.max),
+                              singa.__mul__(mask1, x))
+
+        return x
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        return singa.__mul__(dy, self.mask)
+
+
+def clip(x, min=None, max=None):
+    """
+    Clip operator limits the given input within an interval. The interval
+    is specified by the inputs 'min' and 'max'.
+    Args:
+        x (Tensor): input tensor
+        min (float): Minimum value, under which element is replaced by min.
+        max (float): Maximum value, above which element is replaced by max.
+    Returns:
+        a new Tensor with np.clip(x,min,max).
+    """
+    return Clip(min, max)(x)[0]
+
+
+class Identity(Operator):
+    """
+    Init a identity operator
+    """
+
+    def __init__(self):
+        super(Identity, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the same CTensor x.
+        """
+        return x
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy.
+        Returns:
+            dx (CTensor): dL / dx.
+        """
+        return dy
+
+
+def identity(x):
+    """
+    Init a identity operator.
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the same Tensor with x.
+    """
+    return Identity()(x)[0]
+
+
+class Matmul(Operator):
+    """
+    Init matrix multiplication operator.
+    """
+
+    def __init__(self):
+        super(Matmul, self).__init__()
+
+    def forward(self, x, w):
+        """
+        Return `np.matmul(x,w)`, where x and w are CTensor.
+        """
+        # todo, cannot do Mult for dims more than 2
+        if training:
+            self.input = (x, w)
+        res = singa.Mult(x, w)
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss.
+        Returns:
+            a tuple for (dx, dw).
+        """
+        return (
+            singa.Mult(dy, singa.DefaultTranspose(self.input[1])),
+            singa.Mult(singa.DefaultTranspose(self.input[0]), dy),
+        )
+
+
+def matmul(x, w):
+    """
+    Return `np.matmul(x,w)`, where x and w are Tensor.
+    """
+    return Matmul()(x, w)[0]
+
+
+class Greater(Operator):
+    """
+    Returns the tensor resulted from performing the greater logical
+    operation elementwise on the input tensors A and B.
+    """
+
+    def __init__(self):
+        super(Greater, self).__init__()
+
+    def forward(self, x, y):
+        """
+        Return a>b, where a and b are CTensor.
+        """
+        cur = singa.GTFloat(singa.__sub__(x, y), 0)
+        if training:
+            self.cache = cur
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss.
+        Raises:
+            AssertionError: no backward function for this operator.
+        """
+        assert False, ('no backward function for greater')
+
+
+def greater(x, y):
+    """
+    Return a>b, where a and b are Tensor.
+    """
+    return Greater()(x, y)[0]
+
+
+class AddBias(Operator):
+    """
+    Add Bias to each row / column of the Tensor, depending on the axis arg.
+    """
+
+    def __init__(self, axis=0):
+        """
+        To indicate the calculation axis, 0 for row, 1 for column.
+        Args:
+            axis (int): 0 or 1, default is 0.
+        """
+        super(AddBias, self).__init__()
+        self.axis = axis
+
+    def forward(self, x, b):
+        """
+        Args:
+            x (CTensor): matrix.
+            b (CTensor): bias to be added.
+        Return:
+            the result Tensor
+        """
+        if self.axis == 0:
+            singa.AddRow(b, x)
+        elif self.axis == 1:
+            singa.AddColumn(b, x)
+        return x
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss.
+        Return:
+            a tuple for (db, dx), db is data for dL / db, dx is data
+            for dL / dx.
+        """
+        if self.axis == 0:
+            return dy, singa.Sum(dy, 0)
+        elif self.axis == 1:
+            return dy, singa.Sum(dy, 0)
+
+
+def add_bias(x, b, axis=0):
+    """
+    Add Bias to each row / column of the Tensor, depending on the axis arg.
+    Args:
+        x (Tensor): matrix.
+        b (Tensor): bias to be added.
+        axis (int): 0 or 1, default is 0.
+    Return:
+        the result Tensor
+    """
+    assert x.ndim() == 2, "1st arg required 2d tensor. got shape: %s" % (
+        x.shape)
+    assert b.ndim() == 1, "2nd arg required 1d tensor. got shape: %s" % (
+        b.shape)
+    assert axis in [0, 1], "allowed axis: 0 or 1"
+    return AddBias(axis)(x, b)[0]
+
+
+class Reshape(Operator):
+    """
+    Reshape the input tensor similar to np.reshape.
+    """
+
+    def __init__(self, shape):
+        """
+        Args:
+            shape (list of int): Specified shape for output. At most one
+                dimension of the new shape can be -1. In this case, the
+                value is inferred from the size of the tensor and the
+                remaining dimensions. A dimension could also be 0,
+                in which case the actual dimension value is unchanged
+                (i.e. taken from the input tensor).
+        """
+        super(Reshape, self).__init__()
+        self.shape = shape
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): matrix.
+        Return:
+            the result CTensor
+        """
+        self._shape = x.shape()
+        shape = list(self.shape)
+        # handle the shape with 0
+        shape = [
+            self._shape[i]
+            if i < len(self._shape) and shape[i] == 0 else shape[i]
+            for i in range(len(shape))
+        ]
+        # handle the shape with -1
+        hidden_shape = int(np.prod(self._shape) // np.abs(np.prod(shape)))
+        self.cache = [int(s) if s != -1 else hidden_shape for s in shape]
+        return singa.Reshape(x, self.cache)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        return singa.Reshape(dy, self._shape)
+
+
+def reshape(x, shape):
+    """
+    Reshape the input tensor similar to mp.reshape.
+    Args:
+        x (Tensor): matrix.
+        shape (list of int): Specified shape for output. At most one
+            dimension of the new shape can be -1. In this case, the
+            value is inferred from the size of the tensor and the
+            remaining dimensions. A dimension could also be 0,
+            in which case the actual dimension value is unchanged
+            (i.e. taken from the input tensor).
+    Return:
+        the result Tensor
+    """
+    return Reshape(shape)(x)[0]
+
+
+class PRelu(Operator):
+    """
+    PRelu applies the function `f(x) = slope * x` for x < 0,
+    `f(x) = x` for x >= 0 to the data tensor elementwise.
+    """
+
+    def __init__(self):
+        super(PRelu, self).__init__()
+
+    def forward(self, x, slope):
+        """
+        Args:
+            x (CTensor): matrix.
+        Return:
+            the result CTensor
+        """
+        mask0 = singa.LTFloat(x, 0.0)
+        res = singa.__mul__(x, mask0)
+        res = singa.__mul__(res, slope)
+        res += singa.ReLU(x)
+        if training:
+            self.input = x
+            self.slope = slope
+            self.mask0 = mask0
+            self.shape0 = list(x.shape())
+            self.shape1 = list(slope.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        dx1mask = singa.GEFloat(self.input, 0.0)
+        dx2 = singa.__mul__(self.mask0, self.slope)
+        dx = singa.__add__(dx1mask, dx2)
+        dx = singa.__mul__(dy, dx)
+        dslope = singa.__mul__(dy, singa.__mul__(self.mask0, self.input))
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx, dslope
+        # handle broadcast
+        dx = back_broadcast(self.shape3, self.shape0, dx)
+        dslope = back_broadcast(self.shape3, self.shape1, dslope)
+        return dx, dslope
+
+
+def prelu(x, slope):
+    """
+    PRelu applies the function `f(x) = slope * x` for x < 0,
+    `f(x) = x` for x >= 0 to the data tensor elementwise.
+    Args:
+        x (Tensor): matrix.
+    Return:
+        the result Tensor
+    """
+    return PRelu()(x, slope)[0]
+
+
+class Add(Operator):
+    """
+    Performs element-wise binary addition.
+    """
+
+    def __init__(self):
+        super(Add, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `a+b`, where a and b are CTensor.
+        """
+        res = singa.__add__(a, b)
+        if training:
+            self.shape0 = list(a.shape())
+            self.shape1 = list(b.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy(CTensor): dL / dy
+        Return:
+            a tuple for (dx0, dx1), dx0 is data for dL / da, dx1 is data
+            for dL / db.
+        """
+        dx0, dx1 = dy, dy
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx0, dx1
+        # handle broadcast
+        dx0 = back_broadcast(self.shape3, self.shape0, dx0)
+        dx1 = back_broadcast(self.shape3, self.shape1, dx1)
+        return dx0, dx1
+
+
+def add(a, b):
+    """
+    Return `a+b`, where a and b are Tensor.
+    """
+    return Add()(a, b)[0]
+
+
+class Elu(Operator):
+    """
+    `f(x) = alpha * (exp(x) - 1.)` for x < 0, `f(x) = x` for x >= 0., is applied to
+    the tensor elementwise.
+    """
+
+    def __init__(self, alpha=1.):
+        """
+        Args:
+            alpha (float): Coefficient of ELU, default is 1.0
+        """
+        super(Elu, self).__init__()
+        self.alpha = alpha
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): matrix
+        Returns:
+            a CTensor for the result
+        """
+        #f(x) = alpha * (exp(x) - 1.) for x < 0, f(x) = x for x >= 0
+        if training:
+            self.input = x
+        x1 = singa.LTFloat(x, 0.0)
+        x1 *= x
+        x1 = singa.MultFloat(singa.SubFloat(singa.Exp(x1), 1.0), self.alpha)
+        x2 = singa.ReLU(x)
+        x1 += x2
+        return x1
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        dx1mask = singa.LTFloat(self.input, 0.0)
+        dx = singa.MultFloat(singa.Exp(self.input), self.alpha)
+        dx *= dx1mask
+
+        dx2mask = singa.GEFloat(self.input, 0.0)
+
+        dx += dx2mask
+        dx *= dy
+        return dx
+
+
+def elu(x, alpha=1):
+    """
+    `f(x) = alpha * (exp(x) - 1.)` for x < 0, `f(x) = x` for x >= 0., is applied to
+    the tensor elementwise.
+    Args:
+        x (Tensor): matrix
+        alpha (float): Coefficient of ELU, default is 1.0
+    Returns:
+        a Tensor for the result
+    """
+    return Elu(alpha)(x)[0]
+
+
+class Equal(Operator):
+    """
+    Returns the tensor resulted from performing the equal logical operation
+    elementwise on the input tensors x and y.
+    """
+
+    def __init__(self):
+        super(Equal, self).__init__()
+
+    def forward(self, x, y):
+        """
+        Return `a=b`, where a and b are CTensor.
+        """
+        return singa.__eq__(x, y)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no backward function for equal')
+
+
+def equal(x, y):
+    """
+    Return `a=b`, where a and b are Tensor.
+    """
+    return Equal()(x, y)[0]
+
+
+class SeLU(Operator):
+    """
+    `y = gamma * (alpha * e^x - alpha)` for x <= 0, `y = gamma * x` for x > 0
+    is applied to the tensor elementwise.
+    """
+
+    def __init__(self, alpha=1.67326, gamma=1.0507):
+        """
+        Args:
+            alpha (float): Coefficient of SELU default to 1.67326
+            gamma (float): Coefficient of SELU default to 1.0507
+        """
+        super(SeLU, self).__init__()
+        self.alpha = alpha
+        self.gamma = gamma
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): matrix
+        Returns:
+            a CTensor for the result
+        """
+        #y = gamma * (alpha * e^x - alpha) for x <= 0, y = gamma * x for x > 0
+        if training:
+            self.input = x
+        x1 = singa.LEFloat(x, 0.0)
+        x1 *= x
+        x1 = singa.MultFloat(singa.SubFloat(singa.Exp(x1), 1.0),
+                             self.alpha * self.gamma)
+        x2 = singa.ReLU(x)
+        x2 = singa.MultFloat(x2, self.gamma)
+        x1 += x2
+        return x1
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        dx1mask = singa.LEFloat(self.input, 0.0)
+        dx1 = singa.MultFloat(singa.Exp(self.input), self.gamma * self.alpha)
+        dx1 = singa.__mul__(dx1mask, dx1)
+
+        dx2mask = singa.GTFloat(self.input, 0.0)
+        dx2 = singa.MultFloat(dx2mask, self.gamma)
+
+        dx = singa.__add__(dx1, dx2)
+        dx *= dy
+        return dx
+
+
+def selu(x, alpha=1.67326, gamma=1.0507):
+    """
+    `y = gamma * (alpha * e^x - alpha)` for x <= 0, `y = gamma * x` for x > 0
+    is applied to the tensor elementwise.
+    Args:
+        x (Tensor): matrix
+        alpha (float): Coefficient of SELU default to 1.67326
+        gamma (float): Coefficient of SELU default to 1.0507
+    Returns:
+        a Tensor for the result
+    """
+    return SeLU(alpha, gamma)(x)[0]
+
+
+class SoftMax(Operator):
+    """
+    Apply SoftMax for each row of the Tensor or each column of the Tensor
+    according to the parameter axis.
+    """
+
+    def __init__(self, axis=1):
+        """
+        Args:
+            axis (int): axis of softmax, default to 1
+        """
+        super(SoftMax, self).__init__()
+        self.axis = axis
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): the input 1d or 2d tensor
+        Returns:
+            the result CTensor
+        """
+        self.output = singa.SoftMax(x, self.axis)
+        return self.output
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        return singa.SoftMaxBackward(dy, self.axis, self.output)
+
+
+def softmax(x, axis=1):
+    """
+    Apply SoftMax for each row of the Tensor or each column of the Tensor
+    according to the parameter axis.
+    Args:
+        x (Tensor): the input 1d or 2d tensor
+        axis (int): axis of softmax, default to 1
+    Returns:
+        the result Tensor
+    """
+    return SoftMax(axis)(x)[0]
+
+
+class Sum(Operator):
+    """
+    Element-wise sum of each of the input tensors
+    """
+
+    def __init__(self):
+        super(Sum, self).__init__()
+
+    def forward(self, *l):
+        """
+        Args:
+            l (a list of CTensor): element-wise sum operator
+        Returns:
+            a CTensor for the result
+        """
+        if training:
+            self.l = len(l)
+        assert (len(l) > 0)
+        x = singa.Tensor(list(l[0].shape()), l[0].device())
+        x.SetFloatValue(0.0)
+        for i in range(len(l)):
+            x += l[i]
+        return x
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        return [dy] * self.l
+
+
+def sum(*l):
+    """
+    Element-wise sum of each of the input tensors
+    Args:
+        l (a list of Tensor): element-wise sum operator
+    Returns:
+        a Tensor for the result
+    """
+    return Sum()(*l)[0]
+
+
+class BinaryCrossEntropy(Operator):
+
+    def __init__(self, t):
+        super(BinaryCrossEntropy, self).__init__()
+        self.t = t.data
+
+    """
+    Calculte negative log likelihood loss for a batch of training data.
+    """
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): 1d or 2d tensor, the prediction data(output)
+                         of current network.
+            t (CTensor): 1d or 2d tensor, the target data for training.
+        Returns:
+            loss (CTensor): scalar.
+        """
+        posx = singa.AddFloat(x, 0.0001)
+        loss = singa.SumAll(singa.__mul__(self.t, singa.Log(posx)))
+        negt = singa.AddFloat(singa.MultFloat(self.t,-1.0), 1.0)
+        negx = singa.AddFloat(singa.MultFloat(x,-1.0), 1.0001)
+        negLoss = singa.SumAll(singa.__mul__(negt, singa.Log(negx)))
+        loss += negLoss
+        loss /= -x.shape()[0]
+        self.x = singa.AddFloat(x, 0.0001)
+        return loss
+
+    def backward(self, dy=1.0):
+        """
+        Args:
+            dy (float or CTensor): scalar, accumulate gradient from outside
+                                of current network, usually equal to 1.0
+        Returns:
+            dx (CTensor): data for the dL /dx, L is the loss, x is the output
+                          of current network. note that this is true for
+                          dy = 1.0
+        """
+
+        dx = singa.__div__(self.t, self.x)
+        negt = singa.AddFloat(self.t, -1.0)
+        negx = singa.AddFloat(self.x, -0.9999)
+        dx -= singa.__div__(negt, negx)
+        dx *= float(-1.0 / self.x.shape()[0])
+        if isinstance(dy, float):
+            # dtype of dy: float
+            dx *= dy
+            return dx
+        elif isinstance(dy, CTensor):
+            pass  # TODO, broadcast elementwise multiply seems not support
+
+
+def binary_cross_entropy(x, t):
+    return BinaryCrossEntropy(t)(x)[0]
+
+
+class CrossEntropy(Operator):
+
+    def __init__(self, t):
+        super(CrossEntropy, self).__init__()
+        self.t = t.data
+
+    """
+    Calculte negative log likelihood loss for a batch of training data.
+    """
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): 1d or 2d tensor, the prediction data(output)
+                         of current network.
+            t (CTensor): 1d or 2d tensor, the target data for training.
+        Returns:
+            loss (CTensor): scalar.
+        """
+        loss = singa.SumAll(singa.__mul__(self.t, singa.Log(x)))
+        loss /= -x.shape()[0]
+        self.x = x
+        return loss
+
+    def backward(self, dy=1.0):
+        """
+        Args:
+            dy (float or CTensor): scalar, accumulate gradient from outside
+                                of current network, usually equal to 1.0
+        Returns:
+            dx (CTensor): data for the dL /dx, L is the loss, x is the output
+                          of current network. note that this is true for
+                          dy = 1.0
+        """
+
+        dx = singa.__div__(self.t, self.x)
+        dx *= float(-1.0 / self.x.shape()[0])
+        if isinstance(dy, float):
+            # dtype of dy: float
+            dx *= dy
+            return dx
+        elif isinstance(dy, CTensor):
+            pass  # TODO, broadcast elementwise multiply seems not support
+
+
+def cross_entropy(x, t):
+    assert x.ndim() == 2, "1st arg required 2d tensor. got shape: " + str(
+        x.shape)
+    assert t.ndim() <= 2, "2nd arg required <=2d tensor. got shape: " + str(
+        t.shape)
+    # x is the logits and t is the ground truth.
+    return CrossEntropy(t)(x)[0]
+
+
+class RankingLoss(Operator):
+
+    def __init__(self, M=0.2):
+        super().__init__()
+        # margin
+        self.M = M
+
+    def forward(self, pos, neg):
+        # L = max{0, M - fn(pos) + fn(neg)}
+        zero = singa.Tensor(list(pos.shape()), pos.device())
+        zero.SetFloatValue(0.0)
+        val = singa.AddFloat(singa.__sub__(neg, pos), self.M)
+        gt_zero = singa.__gt__(val, zero)
+        if training:
+            self.inputs = (gt_zero,)  # (BS,)
+        all_loss = singa.__mul__(gt_zero, val)
+        loss = singa.SumAll(all_loss)
+        loss /= (pos.shape()[0])
+        return loss
+
+    def backward(self, dy=1.0):
+        assert training, "enable training mode to do backward"
+        # dpos = -1 if M-pos+neg > 0 else 0
+        # dneg =  1 if M-pos+neg > 0 else 0
+        gt_zero = self.inputs[0]
+        dpos_factor = singa.Tensor(list(gt_zero.shape()), gt_zero.device())
+        dpos_factor.SetFloatValue(-1.0 / gt_zero.Size())
+        dneg_factor = singa.Tensor(list(gt_zero.shape()), gt_zero.device())
+        dneg_factor.SetFloatValue(1.0 / gt_zero.Size())
+        dpos = singa.__mul__(gt_zero, dpos_factor)
+        dneg = singa.__mul__(gt_zero, dneg_factor)
+        return dpos, dneg
+
+
+def ranking_loss(pos, neg, M=0.2):
+    assert pos.shape == neg.shape, "input and target shape different: %s, %s" % (
+        pos.shape, neg.shape)
+    return RankingLoss(M)(pos, neg)[0]
+
+
+class SoftMaxCrossEntropy(Operator):
+
+    def __init__(self, t):
+        super(SoftMaxCrossEntropy, self).__init__()
+        self.t = t.data
+
+    def forward(self, x):
+        self.p = singa.SoftMax(x)
+        ret = singa.CrossEntropyFwd(self.p, self.t)
+        loss = singa.SumAll(ret)
+        loss /= x.shape()[0]
+        return loss
+
+    def backward(self, dy=1.0):
+        dx = singa.SoftmaxCrossEntropyBwd(self.p, self.t)
+        dx /= float(self.p.shape()[0])
+        return dx
+
+
+def softmax_cross_entropy(x, t):
+    assert x.ndim() == 2, "1st arg required 2d tensor. got shape: " + str(
+        x.shape)
+    assert t.ndim() <= 2, "2nd arg required <=2d tensor. got shape: " + str(
+        t.shape)
+    # x is the logits and t is the ground truth.
+    return SoftMaxCrossEntropy(t)(x)[0]
+
+
+class MeanSquareError(Operator):
+
+    def __init__(self, t):
+        super(MeanSquareError, self).__init__()
+        self.t = t.data
+
+    def forward(self, x):
+        self.err = singa.__sub__(x, self.t)
+        sqr = singa.Square(self.err)
+        loss = singa.SumAll(sqr)
+        self.n = 1
+        for s in x.shape():
+            self.n *= s
+        loss /= self.n
+        return loss
+
+    def backward(self, dy=1.0):
+        dx = self.err
+        dx *= float(2 / self.n)
+        dx *= dy
+        return dx
+
+
+def mse_loss(x, t):
+    assert x.shape == t.shape, "input and target shape different: %s, %s" % (
+        x.shape, t.shape)
+    return MeanSquareError(t)(x)[0]
+
+
+def ctensor2numpy(x):
+    """
+    To be used in SoftMax Operator.
+    Convert a singa_tensor to numpy_tensor.
+    """
+    np_array = x.GetFloatValue(int(x.Size()))
+    return np_array.reshape(x.shape())
+
+
+class Flatten(Operator):
+    """
+    Flattens the input tensor into a 2D matrix. If input tensor has shape
+    `(d_0, d_1, ... d_n)` then the output will have shape `(d_0 X d_1 ...
+    d_(axis-1), d_axis X d_(axis+1) ... X dn)`.
+    """
+
+    def __init__(self, axis=1):
+        """
+        Args:
+            axis (int): Indicate up to which input dimensions (exclusive)
+                should be flattened to the outer dimension of the output. The
+                value for axis must be in the range [-r, r], where r is the
+                rank of the input tensor. Negative value means counting
+                dimensions from the back. When axis = 0, the shape of the
+                output tensor is `(1, (d_0 X d_1 ... d_n)`, where the shape
+                of the input tensor is `(d_0, d_1, ... d_n)`.
+        Returns:
+            the result CTensor
+        """
+        super(Flatten, self).__init__()
+        self.axis = axis
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): the input tensor
+        Returns:
+            the result CTensor
+        """
+        self.shape = list(x.shape())
+        shape, axis = self.shape, self.axis
+        # the axis must be within this range (0, r-1)
+        assert axis <= len(
+            shape) - 1 or axis >= 0, "the axis must be within (0, %d-1)" % len(
+                shape)
+        # calculate the new shape
+        new_shape = (1, int(np.prod(shape))) if axis == 0 else (
+            int(np.prod(shape[0:axis]).astype(int)),
+            int(np.prod(shape[axis:]).astype(int)))
+        y = singa.Reshape(x, new_shape)
+        return y
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss
+        Returns:
+            dx (CTensor): data for the dL / dx, L is the loss,
+        """
+        dx = singa.Reshape(dy, self.shape)
+        return dx
+
+
+def flatten(x, axis=1):
+    """
+    Flattens the input tensor into a 2D matrix. If input tensor has shape
+    `(d_0, d_1, ... d_n)` then the output will have shape `(d_0 X d_1 ...
+    d_(axis-1), d_axis X d_(axis+1) ... X dn)`.
+    Args:
+        x (Tensor): the input tensor
+        axis (int): Indicate up to which input dimensions (exclusive)
+            should be flattened to the outer dimension of the output. The
+            value for axis must be in the range [-r, r], where r is the
+            rank of the input tensor. Negative value means counting
+            dimensions from the back. When axis = 0, the shape of the
+            output tensor is `(1, (d_0 X d_1 ... d_n)`, where the shape
+            of the input tensor is `(d_0, d_1, ... d_n)`.
+    Returns:
+        the result Tensor
+    """
+    return Flatten(axis)(x)[0]
+
+
+class ScatterElements(Operator):
+    """
+    ScatterElements operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Changelog.md#ScatterElements-11
+
+    Example usage:
+    data = [
+        [0.0, 0.0, 0.0],
+        [0.0, 0.0, 0.0],
+        [0.0, 0.0, 0.0],
+    ]
+    axis = 0
+    indices = [
+        [1, 0, 2],
+        [0, 2, 1],
+    ]
+    updates = [
+        [1.0, 1.1, 1.2],
+        [2.0, 2.1, 2.2],
+    ]
+    output = [
+        [2.0, 1.1, 0.0]
+        [1.0, 0.0, 2.2]
+        [0.0, 2.1, 1.2]
+    ]
+
+    """
+
+    def __init__(self, indices, updates, axis=0):
+        """
+        Args:
+            indices (Tensor): index tensor
+            updates (Tensor): source tensor
+            axis (int): Which axis to scatter on. A negative value means 
+                counting dimension from the back. Accepted range is [-r,r-1]
+                where r=rank(destination_tensor) 
+        """
+        super(ScatterElements, self).__init__()
+        self.indices = indices
+        self.updates = updates
+        self.axis = axis
+
+    def forward(self, x):
+        x_shape = x.shape()
+        x_rank = len(x_shape)
+        if isinstance(self.indices, Tensor):
+            self.indices = tensor.to_numpy(self.indices)
+        elif isinstance(self.indices, (list, tuple)):
+            self.indices = np.array(self.indices)
+        if isinstance(self.updates, Tensor):
+            self.updates = tensor.to_numpy(self.updates)
+        elif isinstance(self.updates, (list, tuple)):
+            self.updates = np.array(self.updates)
+        self.updates.astype(np.int32)
+        _x = tensor.to_numpy(tensor.from_raw_tensor(x))
+        _x = _x.astype(np.float32)
+
+        assert x_rank == 2, "Only support 2D input."
+        assert x_rank == len(
+            self.indices.shape
+        ), "Index should have the same number of dimensions as output"
+        assert -x_rank < self.axis <= x_rank, "Axis is out of range"
+        assert np.logical_and(
+            -_x.shape[self.axis] < self.indices,
+            self.indices <= _x.shape[self.axis]).all(
+            ), "The values of the indexes should be between %d and %d" % (-_x.shape[self.axis], _x.shape[self.axis] - 1)
+
+        self.axis = self.axis % x_rank
+        u_shape = self.updates.shape
+        y = _x.copy()
+        for i in range(u_shape[0]):
+            for j in range(u_shape[1]):
+                idx = int(self.indices[i][j])
+                if self.axis == 0:
+                    y[idx][j] = self.updates[i][j]
+                else:
+                    y[i][idx] = self.updates[i][j]
+        y = tensor.from_numpy(y)
+        y.to_device(x.device())
+        return y.data
+
+    def backward(self, dy):
+        mask = np.ones(dy.shape(), dtype=np.float32)
+        u_shape = self.updates.shape
+        for i in range(u_shape[0]):
+            for j in range(u_shape[1]):
+                idx = int(self.indices[i][j])
+                if self.axis == 0:
+                    mask[idx][j] = 0.
+                else:
+                    mask[i][idx] = 0.
+        mask = tensor.from_numpy(mask)
+        mask.to_device(dy.device())
+        return singa.__mul__(dy, mask.data)
+
+
+def scatter_elements(x, indices, updates, axis=0):
+    """
+    Produces a ScatterElements operator
+    Args:
+        x (Tensor): input tensor.
+        indices (Tensor): index tensor
+        updates (Tensor): source tensor
+        axis (int): Which axis to scatter on. A negative value means 
+            counting dimension from the back. Accepted range is [-r,r-1]
+            where r=rank(destination_tensor) 
+    Returns:
+        the output Tensor.
+    """
+    return ScatterElements(indices, updates, axis)(x)[0]
+
+
+
+class Concat(Operator):
+    """
+    Concatenate a list of tensors into a single tensor. All input tensors must
+    have the same shape, except for the dimension size of the axis to
+    concatenate on.
+    """
+
+    def __init__(self, axis=0):
+        """
+        Args:
+            axis (int): Which axis to concat on. A negative value means
+                counting dimensions from the back. Accepted range is [-r, r-1]
+                where r = rank(inputs).
+        Returns:
+            the result CTensor
+        """
+        super(Concat, self).__init__()
+        self.axis = axis
+
+    def forward(self, *xs):
+        """
+        Args:
+            xs (a list of CTensor): List of tensors for concatenation
+        Returns:
+            a CTensor for the result
+        """
+        if self.axis < 0:
+            self.axis = self.axis % len(xs[0].shape())
+        if training:
+            offset = 0
+            self.slice_point = []
+            for t in xs:
+                offset += t.shape()[self.axis]
+                self.slice_point.append(offset)
+        x = singa.VecTensor(list(xs))
+        return singa.ConcatOn(x, self.axis)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss
+        Returns:
+            dxs (a tuple of CTensor): data for the dL / dxs, L is the loss,
+        """
+        assert hasattr(
+            self, "slice_point"), "Please set training as True before do BP. "
+        assert self.slice_point[-1] == dy.shape()[self.axis], "Shape mismatch."
+        dxs = []
+        last_offset = 0
+        for p in self.slice_point:
+            dxs.append(singa.SliceOn(dy, last_offset, p, self.axis))
+            last_offset = p
+        return tuple(dxs)
+
+
+def cat(xs, axis=0):
+    """
+    Concatenate a list of tensors into a single tensor. All input tensors must
+    have the same shape, except for the dimension size of the axis to
+    concatenate on.
+    Args:
+        xs (a list of Tensor): List of tensors for concatenation
+        axis (int): Which axis to concat on. A negative value means
+            counting dimensions from the back. Accepted range is [-r, r-1]
+            where r = rank(inputs).
+    Returns:
+        a Tensor for the result
+    """
+    return Concat(axis)(*xs)[0]
+"""
+def make_slice(arr, axis, i):  # type: ignore
+        slc = [slice(None)] * arr.ndim
+        slc[axis] = i
+        return slc
+"""
+
+class _Conv2d(Operator):
+    """
+    Init a conv 2d operator
+    """
+
+    def __init__(self, handle, odd_padding=(0, 0, 0, 0)):
+        """
+        Args:
+            handle (object): ConvHandle for cpu or CudnnConvHandle for gpu
+            odd_padding (tuple of four ints):, the odd paddding is the value
+                that cannot be handled by the tuple padding (w, h) mode so
+                we need to firstly handle the input, then use the nomal padding
+                method.
+        """
+        super(_Conv2d, self).__init__()
+        self.handle = handle
+        self.odd_padding = odd_padding
+
+    def forward(self, x, W, b=None):
+        """
+        Args:
+            x (CTensor): input
+            W (CTensor): weight
+            b (CTensor): bias
+        Returns:
+            CTensor
+        """
+        assert x.nDim() == 4, "The dimensions of input should be 4D."
+        if self.odd_padding != (0, 0, 0, 0):
+            x = utils.handle_odd_pad_fwd(x, self.odd_padding)
+
+        if training:
+            if self.handle.bias_term:
+                self.inputs = (x, W, b)
+            else:
+                self.inputs = (x, W)
+
+        if not self.handle.bias_term:
+            # create empty bias tensor for Cpp API
+            b = CTensor((self.handle.num_filters,), x.device())
+            b.SetFloatValue(0.0)
+
+        if (type(self.handle) != singa.ConvHandle):
+            return singa.GpuConvForward(x, W, b, self.handle)
+        else:
+            return singa.CpuConvForward(x, W, b, self.handle)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): dL / dy
+        Returns:
+            dx (CTensor): dL / dx
+        """
+        assert training is True and hasattr(
+            self, "inputs"), "Please set training as True before do BP. "
+
+        if (type(self.handle) != singa.ConvHandle):
+            dx = singa.GpuConvBackwardx(dy, self.inputs[1], self.inputs[0],
+                                        self.handle)
+            dW = singa.GpuConvBackwardW(dy, self.inputs[0], self.inputs[1],
+                                        self.handle)
+            db = singa.GpuConvBackwardb(
+                dy, self.inputs[2],
+                self.handle) if self.handle.bias_term else None
+        else:
+            dx = singa.CpuConvBackwardx(dy, self.inputs[1], self.inputs[0],
+                                        self.handle)
+            dW = singa.CpuConvBackwardW(dy, self.inputs[0], self.inputs[1],
+                                        self.handle)
+            db = singa.CpuConvBackwardb(
+                dy, self.inputs[2],
+                self.handle) if self.handle.bias_term else None
+        if self.odd_padding != (0, 0, 0, 0):
+            dx = utils.handle_odd_pad_bwd(dx, self.odd_padding)
+
+        if db:
+            return dx, dW, db
+
+        else:
+            return dx, dW
+
+
+def conv2d(handle, x, W, b=None, odd_padding=(0, 0, 0, 0)):
+    """
+    Conv 2d operator
+    Args:
+        handle (object): ConvHandle for cpu or CudnnConvHandle for gpu
+        x (Tensor): input
+        W (Tensor): weight
+        b (Tensor): bias
+        odd_padding (tuple of four ints):, the odd paddding is the value
+            that cannot be handled by the tuple padding (w, h) mode so
+            we need to firstly handle the input, then use the nomal padding
+            method.
+    """
+    if b is None:
+        return _Conv2d(handle, odd_padding)(x, W)[0]
+    else:
+        return _Conv2d(handle, odd_padding)(x, W, b)[0]
+
+
+class _BatchNorm2d(Operator):
+    """
+    Carries out batch normalization as described in the paper
+    https://arxiv.org/abs/1502.03167.
+    """
+
+    def __init__(self, handle, running_mean, running_var, name=None):
+        """
+        Args:
+            handle (object): BatchNormHandle for cpu and CudnnBatchNormHandle
+                for gpu
+            running_mean (float): the running_mean
+            running_var (float): the running_var
+            name (string): the name assigned to this operator
+        """
+        super(_BatchNorm2d, self).__init__(name)
+        self.handle = handle
+        self.running_mean = running_mean.data
+        self.running_var = running_var.data
+
+    def forward(self, x, scale, bias):
+        """
+        Args:
+            x (CTensor): the input tensor
+            scale (CTensor): the bias tensor
+            bias (CTensor): the bias tensor
+        Returns:
+            the result CTensor
+        """
+        if training:
+            if (type(self.handle) == singa.BatchNormHandle):
+                y, mean, var = singa.CpuBatchNormForwardTraining(
+                    self.handle, x, scale, bias, self.running_mean,
+                    self.running_var)
+
+                self.cache = (x, scale, mean, var, y, bias)
+            else:
+                y, mean, var = singa.GpuBatchNormForwardTraining(
+                    self.handle, x, scale, bias, self.running_mean,
+                    self.running_var)
+
+                self.cache = (x, scale, mean, var)
+
+        else:
+
+            if (type(self.handle) == singa.BatchNormHandle):
+                y = singa.CpuBatchNormForwardInference(
+                    self.handle,
+                    x,
+                    scale,
+                    bias,
+                    self.running_mean,
+                    self.running_var,
+                )
+            else:
+                y = singa.GpuBatchNormForwardInference(
+                    self.handle,
+                    x,
+                    scale,
+                    bias,
+                    self.running_mean,
+                    self.running_var,
+                )
+        return y
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss
+        Returns:
+            dx (CTensor): data for the dL / dx, L is the loss
+            ds (CTensor): data for the dL / ds, L is the loss
+            db (CTensor): data for the dL / db, L is the loss
+        """
+        assert training is True and hasattr(
+            self, "cache"), "Please set training as True before do BP. "
+
+        if (type(self.handle) == singa.BatchNormHandle):
+            x, scale, mean, var, y, bias = self.cache
+            dx, ds, db = singa.CpuBatchNormBackwardx(self.handle, y, dy, x,
+                                                     scale, bias, mean, var)
+        else:
+            x, scale, mean, var = self.cache
+            dx, ds, db = singa.GpuBatchNormBackward(self.handle, dy, x, scale,
+                                                    mean, var)
+
+        return dx, ds, db
+
+
+def batchnorm_2d(handle, x, scale, bias, running_mean, running_var):
+    """
+    Carries out batch normalization as described in the paper
+    https://arxiv.org/abs/1502.03167.
+    Args:
+        handle (object): BatchNormHandle for cpu and CudnnBatchNormHandle
+            for gpu
+        x (Tensor): the input tensor
+        scale (Tensor): the bias tensor
+        bias (Tensor): the bias tensor
+        running_mean (float): the running_mean
+        running_var (float): the running_var
+    Returns:
+        the result Tensor
+    """
+    return _BatchNorm2d(handle, running_mean, running_var)(x, scale, bias)[0]
+
+
+class _Pooling2d(Operator):
+    """
+    Init a pool 2d operator
+    """
+
+    def __init__(self, handle, odd_padding=(0, 0, 0, 0)):
+        """
+        Args:
+            handle (object): PoolingHandle for cpu or CudnnPoolingHandle for
+                gpu
+            odd_padding (tuple of four int): the odd paddding is the value
+                that cannot be handled by the tuple padding (w, h) mode so
+                it needs to firstly handle the input, then use the normal
+                padding method.
+        """
+        super(_Pooling2d, self).__init__()
+        self.handle = handle
+        self.odd_padding = odd_padding
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): the input tensor
+        Returns:
+            the result CTensor
+        """
+        assert x.nDim() == 4, "The dimensions of input should be 4D."
+        if self.odd_padding != (0, 0, 0, 0):
+            x = utils.handle_odd_pad_fwd(x, self.odd_padding, True)
+
+        if (type(self.handle) != singa.PoolingHandle):
+            y = singa.GpuPoolingForward(self.handle, x)
+        else:
+            y = singa.CpuPoolingForward(self.handle, x)
+        if training:
+            self.cache = (x, y)
+        return y
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the dL / dy, L is the loss
+        Returns:
+            dx (CTensor): data for the dL / dx, L is the loss,
+        """
+        if (type(self.handle) != singa.PoolingHandle):
+            dx = singa.GpuPoolingBackward(self.handle, dy, self.cache[0],
+                                          self.cache[1])
+        else:
+            dx = singa.CpuPoolingBackward(self.handle, dy, self.cache[0],
+                                          self.cache[1])
+        if self.odd_padding != (0, 0, 0, 0):
+            dx = utils.handle_odd_pad_bwd(dx, self.odd_padding)
+
+        return dx
+
+
+def pooling_2d(handle, x, odd_padding=(0, 0, 0, 0)):
+    """
+    Pooling 2d operator
+    Args:
+        handle (object): PoolingHandle for cpu or CudnnPoolingHandle for
+            gpu
+        x (Tensor): input
+        odd_padding (tuple of four int): the odd paddding is the value
+            that cannot be handled by the tuple padding (w, h) mode so
+            it needs to firstly handle the input, then use the normal
+            padding method.
+    Returns:
+        the result Tensor
+    """
+    return _Pooling2d(handle, odd_padding)(x)[0]
+
+
+class Tanh(Operator):
+    """
+    Calculates the hyperbolic tangent of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Tanh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        out = singa.Tanh(x)
+        if training:
+            self.cache = (out,)
+        return out
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.__mul__(self.cache[0], self.cache[0])
+        dx = singa.MultFloat(dx, -1.0)
+        dx = singa.AddFloat(dx, 1.0)
+        dx *= dy
+        return dx
+
+
+def tanh(x):
+    """
+    Calculates the hyperbolic tangent of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Tanh()(x)[0]
+
+
+class Cos(Operator):
+    """
+    Calculates the cosine of the given input tensor, element-wise.
+    """
+
+    def __init__(self):
+        super(Cos, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Cos(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Sin(self.input)
+        dx = singa.MultFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def cos(x):
+    """
+    Calculates the cosine of the given input tensor, element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+
+    return Cos()(x)[0]
+
+
+class Cosh(Operator):
+    """
+    Calculates the hyperbolic cosine of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Cosh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Cosh(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Sinh(self.input)
+        dx *= dy
+        return dx
+
+
+def cosh(x):
+    """
+    Calculates the hyperbolic cosine of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Cosh()(x)[0]
+
+
+class Acos(Operator):
+    """
+    Calculates the arccosine (inverse of cosine) of the given input tensor,
+    element-wise.
+    """
+
+    def __init__(self):
+        super(Acos, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Acos(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Square(self.input)
+        dx = singa.MultFloat(dx, -1.0)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.PowFloat(dx, -0.5)
+        dx = singa.MultFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def acos(x):
+    """
+    Calculates the arccosine (inverse of cosine) of the given input tensor,
+    element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Acos()(x)[0]
+
+
+class Acosh(Operator):
+    """
+    Calculates the hyperbolic arccosine of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Acosh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Acosh(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.SubFloat(self.input, 1.0)
+        dx = singa.Sqrt(dx)
+        temp = singa.AddFloat(self.input, 1.0)
+        temp = singa.Sqrt(temp)
+        dx = singa.__mul__(dx, temp)
+        dx = singa.PowFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def acosh(x):
+    """
+    Calculates the hyperbolic arccosine of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Acosh()(x)[0]
+
+
+class Sin(Operator):
+    """
+    Calculates the sine of the given input tensor, element-wise.
+    """
+
+    def __init__(self):
+        super(Sin, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Sin(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Cos(self.input)
+        dx *= dy
+        return dx
+
+
+def sin(x):
+    """
+    Calculates the sine of the given input tensor, element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Sin()(x)[0]
+
+
+class Sinh(Operator):
+    """
+    Calculates the hyperbolic sine of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Sinh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Sinh(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Cosh(self.input)
+        dx *= dy
+        return dx
+
+
+def sinh(x):
+    """
+    Calculates the hyperbolic sine of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Sinh()(x)[0]
+
+
+class Asin(Operator):
+    """
+    Calculates the arcsine (inverse of sine) of the given input tensor, element-wise.
+    """
+
+    def __init__(self):
+        super(Asin, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Asin(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Square(self.input)
+        dx = singa.MultFloat(dx, -1.0)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.PowFloat(dx, -0.5)
+        dx *= dy
+        return dx
+
+
+def asin(x):
+    """
+    Calculates the arcsine (inverse of sine) of the given input tensor, element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+
+    return Asin()(x)[0]
+
+
+class Asinh(Operator):
+    """
+    Calculates the hyperbolic arcsine of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Asinh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Asinh(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Square(self.input)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.PowFloat(dx, -0.5)
+        dx *= dy
+        return dx
+
+
+def asinh(x):
+    """
+    Calculates the hyperbolic arcsine of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Asinh()(x)[0]
+
+
+class Tan(Operator):
+    """
+    Insert single-dimensional entries to the shape of an input tensor (data).
+    """
+
+    def __init__(self):
+        super(Tan, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Tan(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Cos(self.input)
+        dx = singa.Square(dx)
+        dx = singa.PowFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def tan(x):
+    """
+    Calculates the tangent of the given input tensor, element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Tan()(x)[0]
+
+
+class Atan(Operator):
+    """
+    Calculates the arctangent (inverse of tangent) of the given input tensor, element-wise.
+    """
+
+    def __init__(self):
+        super(Atan, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Atan(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Square(self.input)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.PowFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def atan(x):
+    """
+    Calculates the arctangent (inverse of tangent) of the given input tensor, element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Atan()(x)[0]
+
+
+class Atanh(Operator):
+    """
+    Calculates the hyperbolic arctangent of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Atanh, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        return singa.Atanh(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Square(self.input)
+        dx = singa.MultFloat(dx, -1.0)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.PowFloat(dx, -1.0)
+        dx *= dy
+        return dx
+
+
+def atanh(x):
+    """
+    Calculates the hyperbolic arctangent of the given input tensor element-wise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Atanh()(x)[0]
+
+
+class Sigmoid(Operator):
+    """
+    `y = 1 / (1 + exp(-x))`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Sigmoid, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        out = singa.Sigmoid(x)
+        if training:
+            self.cache = (out,)
+        return out
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.MultFloat(self.cache[0], -1.0)
+        dx = singa.AddFloat(dx, 1.0)
+        dx = singa.__mul__(self.cache[0], dx)
+        dx *= dy
+        return dx
+
+
+def sigmoid(x):
+    """
+    `y = 1 / (1 + exp(-x))`, is applied to the tensor elementwise.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Sigmoid()(x)[0]
+
+
+class Mul(Operator):
+    """
+    Performs element-wise binary multiplication (with Numpy-style broadcasting
+    support).
+    """
+
+    def __init__(self):
+        super(Mul, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `np.multiply(a,b)`, where a and b are CTensor.
+        """
+        # todo we cannot support mul op for int tensors
+        _a, _b = a, b
+        dtype0 = _a.data_type()
+        dtype1 = _b.data_type()
+        if dtype0 == singa.kInt or dtype1 == singa.kInt:
+            _a = a.AsType(singa.kFloat32)
+            _b = b.AsType(singa.kFloat32)
+            res = singa.__mul__(_a, _b)
+            res = res.AsType(singa.kInt)
+        else:
+            res = singa.__mul__(_a, _b)
+        if training:
+            self.input = (_a, _b)
+            self.shape0 = list(_a.shape())
+            self.shape1 = list(_b.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a tuple for (da, db), da is data for dL / da, db is data
+                for dL / db.
+        """
+        dx0 = singa.__mul__(dy, self.input[1])
+        dx1 = singa.__mul__(dy, self.input[0])
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx0, dx1
+        # handle broadcast
+        dx0 = back_broadcast(self.shape3, self.shape0, dx0)
+        dx1 = back_broadcast(self.shape3, self.shape1, dx1)
+        return dx0, dx1
+
+
+def mul(x, y):
+    """
+    Return `np.multiply(x,y)`, where a and b are Tensor.
+    """
+    return Mul()(x, y)[0]
+
+
+class Unsqueeze(Operator):
+    """
+    Insert single-dimensional entries to the shape of an input tensor (data).
+    """
+
+    def __init__(self, axis):
+        """
+        Args:
+            axis (list of int): the dimensions to be inserted.
+        """
+        super(Unsqueeze, self).__init__()
+        if (type(axis) is int):
+            self.axis = list(axis)
+        else:
+            self.axis = axis
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        self.cache = x.shape()
+        cur = list(self.cache)
+        # todo, need optimize after we have scalar tensor
+        if len(self.cache) == 1 and self.axis == [0]:
+            return x
+        for i in self.axis:
+            cur.insert(i, 1)
+        return singa.Reshape(x, cur)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        return singa.Reshape(dy, self.cache)
+
+
+def unsqueeze(x, axis=-1):
+    """
+    Insert single-dimensional entries to the shape of an input tensor (data).
+    Args:
+        x (Tensor): Input tensor
+        axis (list of int): the dimensions to be inserted.
+    Returns:
+        Tensor, the output
+    """
+    return Unsqueeze(axis)(x)[0]
+
+
+class Transpose(Operator):
+    """
+    Transpose the input tensor similar to numpy.transpose.
+    """
+
+    def __init__(self, perm):
+        """
+        Args:
+            perm (list of ints): A list of integers. By default, reverse the
+                dimensions, otherwise permute the axes according to the values given.
+        """
+        super(Transpose, self).__init__()
+        self.perm = list(perm)
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        return singa.Transpose(x, self.perm)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        cur = []
+        for i in range(len(self.perm)):
+            cur += [self.perm.index(i)]
+        return singa.Transpose(dy, cur)
+
+
+def transpose(x, shape):
+    """
+    Transpose the input tensor similar to numpy.transpose.
+    Args:
+        x (Tensor): Input tensor
+        perm (list of ints): A list of integers. By default, reverse the
+            dimensions, otherwise permute the axes according to the values given.
+    Returns:
+        Tensor, the output
+    """
+    return Transpose(shape)(x)[0]
+
+
+def add_all(*xs):
+    assert len(xs) > 2
+    y = add(xs[0], xs[1])
+    for x in xs[2:]:
+        y = add(y, x)
+    return
+
+
+class Abs(Operator):
+    """
+    `y = abs(x)`, is applied to the tensor elementwise.
+    """
+
+    def forward(self, a):
+        """
+        Return `abs(a)`, where a is CTensor.
+        """
+        if training:
+            self.input = a
+        return singa.Abs(a)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Sign(self.input)
+        dx *= dy
+        return dx
+
+
+def abs(a):
+    """
+    Return abs(a), where a is Tensor.
+    """
+    return Abs()(a)[0]
+
+
+class Exp(Operator):
+    """
+    `y = exp(x)`, is applied to the tensor elementwise.
+    """
+
+    def forward(self, a):
+        """
+        Return `exp(a)`, where a is Tensor.
+        """
+        if training:
+            self.input = a
+        return singa.Exp(a)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Exp(self.input)
+        dx *= dy
+        return dx
+
+
+def exp(a):
+    """
+    Return `exp(a)`, where a is Tensor.
+    """
+    return Exp()(a)[0]
+
+
+class LeakyRelu(Operator):
+    """
+    `f(x) = alpha * x` for x < 0, `f(x) = x` for x >= 0, is applied to the tensor elementwise.
+    """
+
+    def __init__(self, a):
+        """
+        Args:
+            a (float): Coefficient of leakage.
+        """
+        super(LeakyRelu, self).__init__()
+        self.a = a
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = x
+        x1 = singa.LTFloat(x, 0.0)
+        x1 = singa.__mul__(x, x1)
+        x1 = singa.MultFloat(x1, self.a)
+        x2 = singa.ReLU(x)
+        x1 = singa.__add__(x1, x2)
+        return x1
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        # TODO(wangwei) check the correctness
+        dx1 = singa.GTFloat(self.input, 0.0)
+        dx2 = singa.LTFloat(self.input, 0.0)
+        dx2 = singa.MultFloat(dx2, self.a)
+        dx = singa.__add__(dx1, dx2)
+        dx *= dy
+        return dx
+
+
+def leakyrelu(x, a=0.01):
+    """
+    `f(x) = alpha * x` for x < 0, `f(x) = x` for x >= 0 is applied to the tensor
+    elementwise.
+    Args:
+        x (Tensor): Input tensor
+        a (float): Coefficient of leakage, default to 0.01.
+    Returns:
+        Tensor, the output
+    """
+    return LeakyRelu(a)(x)[0]
+
+
+class Sign(Operator):
+    """
+    Calculate the sign of the given input tensor element-wise. If input > 0,
+    output 1. if input < 0, output -1. if input == 0, output 0.
+    """
+
+    def __init__(self):
+        super(Sign, self).__init__()
+
+    def forward(self, a):
+        """
+        Args:
+            a (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.input = a
+        return singa.Sign(a)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.MultFloat(dy, 0.0)
+        return dx
+
+
+def sign(a):
+    """
+    Calculate the sign of the given input tensor element-wise. If input > 0,
+    output 1. if input < 0, output -1. if input == 0, output 0.
+    Args:
+        a (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Sign()(a)[0]
+
+
+class Pow(Operator):
+    """
+    `f(x) = a^b`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Pow, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `a^b`, where a and b are CTensor.
+        """
+        res = singa.Pow(a, b)
+        if training:
+            self.input = (a, b)
+            self.shape0 = list(a.shape())
+            self.shape1 = list(b.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a tuple for (da, db), da is data for dL / da, db is data
+                for dL / db.
+        """
+        da1 = singa.__mul__(
+            self.input[1],
+            singa.Pow(self.input[0], singa.SubFloat(self.input[1], 1.0)))
+        dx0 = singa.__mul__(da1, dy)
+        db1 = singa.__mul__(singa.Pow(self.input[0], self.input[1]),
+                            singa.Log(self.input[0]))
+        dx1 = singa.__mul__(db1, dy)
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx0, dx1
+        # handle broadcast
+        dx0 = back_broadcast(self.shape3, self.shape0, dx0)
+        dx1 = back_broadcast(self.shape3, self.shape1, dx1)
+        return dx0, dx1
+
+
+def pow(a, b):
+    """
+    Return `a^b`, where a and b are Tensor.
+    """
+    return Pow()(a, b)[0]
+
+
+class SoftSign(Operator):
+    """
+    Calculates the softsign `(x/(1+|x|))` of the given input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(SoftSign, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `(x/(1+|x|))`, where x is CTensor.
+        """
+        # y = x / (1 + np.abs(x))
+        if training:
+            self.input = x
+        x1 = singa.AddFloat(singa.Abs(x), 1.0)
+        y = singa.__div__(x, x1)
+
+        return y
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.AddFloat(singa.Abs(self.input), 1.0)
+        dx = singa.PowFloat(singa.Square(dx), -1.0)
+        dx = singa.__mul__(dy, dx)
+        return dx
+
+
+def softsign(x):
+    """
+    Return `(x/(1+|x|))`, where x is Tensor.
+    """
+    return SoftSign()(x)[0]
+
+
+class Sqrt(Operator):
+    """
+    `y = x^0.5`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Sqrt, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `x^0.5`, where x is CTensor.
+        """
+        if training:
+            self.input = x
+        return singa.Sqrt(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.PowFloat(self.input, -0.5)
+        dx = singa.MultFloat(dx, 0.5)
+        dx = singa.__mul__(dy, dx)
+        return dx
+
+
+def sqrt(x):
+    """
+    Return `x^0.5`, where x is Tensor.
+    """
+    return Sqrt()(x)[0]
+
+
+class SoftPlus(Operator):
+    """
+    `y = ln(exp(x) + 1)` is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(SoftPlus, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `ln(exp(x) + 1)`, where x is CTensor.
+        """
+        #f(x) = ln(exp(x) + 1)
+        if training:
+            self.input = x
+        x1 = singa.AddFloat(singa.Exp(x), 1.0)
+        y = singa.Log(x1)
+        return y
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.Exp(singa.MultFloat(self.input, -1.0))
+        dx = singa.PowFloat(singa.AddFloat(dx, 1.0), -1.0)
+        dx = singa.__mul__(dy, dx)
+        return dx
+
+
+def softplus(x):
+    """
+    Return `ln(exp(x) + 1)`, where x is Tensor.
+    """
+    return SoftPlus()(x)[0]
+
+
+class Sub(Operator):
+    """
+    Performs element-wise binary subtraction (with Numpy-style broadcasting
+    support).
+    """
+
+    def __init__(self):
+        super(Sub, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `a-b`, where x is CTensor.
+        """
+        ori_type = None
+        if a.data_type() != singa.kFloat32:
+            ori_type = a.data_type()
+            a = a.AsType(singa.kFloat32)
+            b = b.AsType(singa.kFloat32)
+        res = singa.__sub__(a, b)
+        if ori_type is not None:
+            res = res.AsType(ori_type)
+        if training:
+            self.shape0 = list(a.shape())
+            self.shape1 = list(b.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a tuple for (da, db), da is data for dL / da, db is data
+                for dL / db.
+        """
+        dx0 = dy
+        dx1 = singa.MultFloat(dy, -1.0)
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx0, dx1
+        # handle broadcast
+        dx0 = back_broadcast(self.shape3, self.shape0, dx0)
+        dx1 = back_broadcast(self.shape3, self.shape1, dx1)
+        return dx0, dx1
+
+
+def sub(a, b):
+    """
+    Return a-b, where a and b are Tensor.
+    """
+    return Sub()(a, b)[0]
+
+
+# optimize min to support multi inputs
+class Min(Operator):
+    """
+    Element-wise min of each of the input tensors (with Numpy-style
+    broadcasting support).
+    """
+
+    def __init__(self):
+        super(Min, self).__init__()
+        self.masks = []
+
+    def _min(self, a, b):
+        """
+        Args:
+            a (CTensor): First operand
+            b (CTensor): Second operand
+        Returns:
+            CTensor, the output
+            tuple of CTensor, mask tensor
+        """
+        m = singa.__sub__(a, b)
+        mask0 = singa.LEFloat(m, 0)
+        mask1 = singa.GTFloat(m, 0)
+        res = singa.__add__(singa.__mul__(mask0, a), singa.__mul__(mask1, b))
+        return res, (mask0, mask1)
+
+    def forward(self, *x):
+        """
+        Args:
+            *x (a list of CTensor): List of tensors for max.
+        Returns:
+            CTensor, the output
+        """
+        assert (len(x) > 0)
+        self.l = len(x)
+        if len(x) == 1:
+            res, masks = self._min(x[0], x[0])
+            self.masks.append(masks)
+            return x[0]
+        res, masks = self._min(x[0], x[1])
+        self.masks.append(masks)
+        for i in range(2, len(x)):
+            res, masks = self._min(res, x[i])
+            self.masks.append(masks)
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a tuple for (*dx), dx is data for dL / dx.
+        """
+        if self.l == 1:
+            return self.masks[0][0]
+        else:
+            ret = []
+            cumulation = None
+            for mask0, mask1 in self.masks[::-1]:
+                if not cumulation:
+                    ret.insert(0, mask1)
+                    cumulation = mask0
+                else:
+                    ret.insert(0, singa.__mul__(cumulation, mask1))
+                    cumulation = singa.__mul__(cumulation, mask0)
+            ret.insert(0, cumulation)
+            return tuple(ret)
+
+
+def min(*l):
+    """
+    Element-wise min of each of the input tensors (with Numpy-style
+    broadcasting support).
+    Args:
+        *x (a list of Tensor): List of tensors for max.
+    Returns:
+        Tensor, the output
+    """
+    return Min()(*l)[0]
+
+
+class Log(Operator):
+    """
+    `y = log(x)`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Log, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `log(x)`, where x is CTensor.
+        """
+        if training:
+            self.input = x
+        return singa.Log(x)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        dx = singa.PowFloat(self.input, -1)
+        dx = singa.__mul__(dy, dx)
+        return dx
+
+
+def log(x):
+    """
+    Return log(x), where x is Tensor.
+    """
+    return Log()(x)[0]
+
+
+class HardSigmoid(Operator):
+    """
+    `y = max(0, min(1, alpha * x + beta))`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self, alpha=0.2, gamma=0.5):
+        """
+        Args:
+            alpha (float): Value of alpha.
+            gamma (float): Value of beta.
+        """
+        super(HardSigmoid, self).__init__()
+        self.alpha = alpha
+        self.gamma = gamma
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): matrix
+        Returns:
+            a CTensor for the result
+        """
+        x = singa.AddFloat(singa.MultFloat(x, self.alpha), self.gamma)
+        if training:
+            self.cache = x
+
+        x = singa.ReLU(x)
+        mask1 = singa.LTFloat(x, 1.0)
+        mask2 = singa.GEFloat(x, 1.0)
+
+        ans = singa.__add__(singa.__mul__(x, mask1), mask2)
+        return singa.ReLU(ans)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        mask0 = singa.GTFloat(self.cache, 0.0)
+        mask1 = singa.LTFloat(self.cache, 1.0)
+        mask = singa.__mul__(mask0, mask1)
+        return singa.__mul__(singa.MultFloat(mask, self.alpha), dy)
+
+
+def hardsigmoid(x, alpha=0.2, gamma=0.5):
+    """
+    `y = max(0, min(1, alpha * x + beta))`, is applied to the tensor elementwise.
+    Args:
+        x (Tensor): matrix
+        alpha (float): Value of alpha.
+        gamma (float): Value of beta.
+    Returns:
+        a Tensor for the result
+    """
+    return HardSigmoid(alpha, gamma)(x)[0]
+
+
+class Squeeze(Operator):
+    """
+    Remove single-dimensional entries from the shape of a tensor. Takes a
+    parameter axes with a list of axes to squeeze. If axes is not provided,
+    all the single dimensions will be removed from the shape. If an axis is
+    selected with shape entry not equal to one, an error is raised.
+    """
+
+    def __init__(self, axis=[]):
+        """
+        Args:
+            axis (list of ints): List of integers indicating the dimensions
+                to squeeze. Negative value means counting dimensions from
+                the back. Accepted range is [-r, r-1] where r = rank(data).
+        """
+        super(Squeeze, self).__init__()
+        self.axis = axis
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        self.cache = x.shape()
+        newshape = []
+        if (self.axis == []):
+            newshape = list(filter(lambda i: i != 1, self.cache))
+        else:
+            for id, i in enumerate(self.axis):
+                assert i < len(self.cache)
+                self.axis[id] = i % len(self.cache)
+                assert self.cache[
+                    i] == 1, "the length of axis {} is {}, which should be 1".format(
+                        i, self.cache[i])
+            for ind, v in enumerate(self.cache):
+                if ind not in self.axis:
+                    newshape.append(v)
+        # todo, need optimize after we have scalar tensor
+        if newshape == []:
+            return x
+        return singa.Reshape(x, newshape)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        return singa.Reshape(dy, self.cache)
+
+
+def squeeze(x, axis=[]):
+    """
+    Remove single-dimensional entries from the shape of a tensor. Takes a
+    parameter axes with a list of axes to squeeze. If axes is not provided,
+    all the single dimensions will be removed from the shape. If an axis is
+    selected with shape entry not equal to one, an error is raised.
+    Args:
+        x (Tensor): Input tensor
+        axis (list of ints): List of integers indicating the dimensions
+            to squeeze. Negative value means counting dimensions from
+            the back. Accepted range is [-r, r-1] where r = rank(data).
+    Returns:
+        Tensor, the output
+    """
+    return Squeeze(axis)(x)[0]
+
+
+class Div(Operator):
+    """
+    Performs element-wise binary division (with Numpy-style broadcasting support).
+    """
+
+    def __init__(self):
+        super(Div, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `np.div(a,b)`, where a and b are CTensor.
+        """
+        ori_type = None
+        if a.data_type() != singa.kFloat32:
+            ori_type = a.data_type()
+            a = a.AsType(singa.kFloat32)
+            b = b.AsType(singa.kFloat32)
+        res = singa.__mul__(a, singa.PowFloat(b, -1.0))
+        # res = singa.__div__(a, b)
+        if ori_type is not None:
+            res = res.AsType(ori_type)
+        if training:
+            self.input = (singa.MultFloat(a, -1.0), singa.PowFloat(b, -1.0)
+                         )  # -a, 1/b
+            self.shape0 = list(a.shape())
+            self.shape1 = list(b.shape())
+            self.shape3 = list(res.shape())
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a CTensor tuple for (da, db), da is data for dL / da, db is data
+                for dL / db.
+        """
+        #dy/dx_0 = b^(-1)
+        #dy/dx_1 = (-a)*b^(-2)
+        dx0 = singa.__mul__(dy, self.input[1])
+        dx1 = singa.__mul__(self.input[0], singa.PowFloat(self.input[1], 2.0))
+        dx1 = singa.__mul__(dy, dx1)
+        if (type(dy) == float) or self.shape0 == self.shape1:
+            assert self.shape0 == self.shape1, ('should have same shape')
+            return dx0, dx1
+        # handle broadcast
+        dx0 = back_broadcast(self.shape3, self.shape0, dx0)
+        dx1 = back_broadcast(self.shape3, self.shape1, dx1)
+        return dx0, dx1
+
+
+def div(a, b):
+    """
+    Return `np.div(a,b)`, where a and b are Tensor.
+    """
+    return Div()(a, b)[0]
+
+
+class Shape(Operator):
+    """
+    Takes a tensor as input and outputs a tensor containing the shape of the
+    input tensor.
+    """
+
+    def __init__(self):
+        super(Shape, self).__init__()
+
+    def forward(self, x):
+        """
+        Args:
+            x (CTensor): Input tensor
+        Returns:
+            CTensor, the output
+        """
+        cur = list(x.shape())
+        cur = tensor.from_numpy(np.array(cur))
+        cur.to_device(x.device())
+        return cur.data
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            list of int, the shape of dy
+        """
+        return list(dy.shape())
+
+
+def shape(x):
+    """
+    Takes a tensor as input and outputs a tensor containing the shape of the
+    input tensor.
+    Args:
+        x (Tensor): Input tensor
+    Returns:
+        Tensor, the output
+    """
+    return Shape()(x)[0]
+
+
+# optimize max to support multi inputs
+class Max(Operator):
+    """
+    Element-wise max of each of the input tensors (with Numpy-style
+    broadcasting support).
+    """
+
+    def __init__(self):
+        super(Max, self).__init__()
+        self.masks = []
+
+    def _max(self, a, b):
+        """
+        Args:
+            a (CTensor): First operand
+            b (CTensor): Second operand
+        Returns:
+            CTensor, the output
+            tuple of CTensor, mask tensor
+        """
+        m = singa.__sub__(a, b)
+        mask0 = singa.GEFloat(m, 0)
+        mask1 = singa.LTFloat(m, 0)
+        res = singa.__add__(singa.__mul__(mask0, a), singa.__mul__(mask1, b))
+        return res, (mask0, mask1)
+
+    def forward(self, *x):
+        """
+        Args:
+            *x (a list of CTensor): List of tensors for max.
+        Returns:
+            CTensor, the output
+        """
+        assert (len(x) > 0)
+        self.l = len(x)
+        if len(x) == 1:
+            res, masks = self._max(x[0], x[0])
+            self.masks.append(masks)
+            return x[0]
+        res, masks = self._max(x[0], x[1])
+        self.masks.append(masks)
+        for i in range(2, len(x)):
+            res, masks = self._max(res, x[i])
+            self.masks.append(masks)
+        return res
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            a tuple for (*dx), dx is data for dL / dx.
+        """
+        if self.l == 1:
+            return self.masks[0][0]
+        else:
+            ret = []
+            cumulation = None
+            for mask0, mask1 in self.masks[::-1]:
+                if not cumulation:
+                    ret.insert(0, mask1)
+                    cumulation = mask0
+                else:
+                    ret.insert(0, singa.__mul__(cumulation, mask1))
+                    cumulation = singa.__mul__(cumulation, mask0)
+            ret.insert(0, cumulation)
+            return tuple(ret)
+
+
+def max(*l):
+    """
+    Element-wise max of each of the input tensors (with Numpy-style broadcasting support).
+    Args:
+        *x (a list of Tensor): List of tensors for max.
+    Returns:
+        Tensor, the output
+    """
+    return Max()(*l)[0]
+
+
+class And(Operator):
+    """
+    Returns the tensor resulted from performing the and logical operation elementwise on the input tensors A and B (with Numpy-style broadcasting support).
+    """
+
+    def __init__(self):
+        super(And, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `np.logical_and(a,b)`, where a and b are CTensor.
+        """
+        m = singa.__mul__(a, b)
+        cur = singa.PowFloat(singa.Sign(m), 2)
+
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def _and(a, b):
+    """
+    Return `np.logical_and(a,b)`, where a and b are Tensor.
+    """
+    return And()(a, b)[0]
+
+
+class Or(Operator):
+    """
+    Returns the tensor resulted from performing the or logical operation elementwise on the input tensors A and B (with Numpy-style broadcasting support).
+    """
+
+    def __init__(self):
+        super(Or, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `np.logical_or(a,b)`, where a and b are CTensor.
+        """
+        m = singa.__add__(singa.PowFloat(singa.Sign(a), 2.0),
+                          singa.PowFloat(singa.Sign(b), 2.0))
+        cur = singa.Sign(m)
+
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): data for the `dL / dy`, L is the loss.
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def _or(a, b):
+    """
+    Return np.logical_or(a,b), where a and b are Tensor.
+    """
+    return Or()(a, b)[0]
+
+
+class Not(Operator):
+    """
+    Returns the negation of the input tensor element-wise.
+    """
+
+    def __init__(self):
+        super(Not, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `np.logical_not(x)`, where x is CTensor.
+        """
+        mask0 = singa.GEFloat(x, 0)
+        mask1 = singa.LEFloat(x, 0)
+        cur = singa.__mul__(mask0, mask1)
+
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def _not(x):
+    """
+    Return `np.logical_not(x)`, where x is Tensor.
+    """
+    return Not()(x)[0]
+
+
+class Xor(Operator):
+    """
+    Performing the xor logical operation elementwise on the input tensors A and B (with Numpy-style broadcasting support).
+    """
+
+    def __init__(self):
+        super(Xor, self).__init__()
+
+    def forward(self, a, b):
+        """
+        Return `np.logical_xor(a,b)`, where a and b are CTensor.
+        """
+        m = singa.__sub__(singa.PowFloat(singa.Sign(a), 2.0),
+                          singa.PowFloat(singa.Sign(b), 2.0))
+        cur = singa.PowFloat(singa.Sign(m), 2.0)
+
+        return cur
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def _xor(a, b):
+    """
+    Return `np.logical_xor(a,b)`, where a and b are Tensor.
+    """
+    return Xor()(a, b)[0]
+
+
+class Negative(Operator):
+    """
+    `y = -x`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Negative, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `-x`, where x is CTensor.
+        """
+        #y=-x
+        return singa.MultFloat(x, -1)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        return singa.MultFloat(dy, -1)
+
+
+def negative(x):
+    """
+    Return `-x`, where x is Tensor.
+    """
+    return Negative()(x)[0]
+
+
+class Reciprocal(Operator):
+    """
+    `y = 1/x`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Reciprocal, self).__init__()
+
+    def forward(self, x):
+        """
+        Return `1/x`, where x is CTensor.
+        """
+        #y=1/x elementwise
+        if training:
+            self.input = x
+
+        return singa.PowFloat(x, -1)
+
+    def backward(self, dy):
+        """
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        #dy/dx = -1/x**2
+        dx = singa.MultFloat(singa.PowFloat(self.input, -2), -1)
+        return singa.__mul__(dy, dx)
+
+
+def reciprocal(x):
+    """
+    Return 1/x, where x is Tensor.
+    """
+    return Reciprocal()(x)[0]
+
+
+class Gemm(Operator):
+    """
+    Init a General Matrix multiplication(Gemm) operator. Compute `Y = alpha *
+    A' * B' + beta * C`, where input tensor A has shape (M, K) or (K, M), input
+    tensor B has shape (K, N) or (N, K), input tensor C is broadcastable to
+    shape (M, N), and output tensor Y has shape (M, N).
+    `A' = transpose(A)` if transA else A
+    `B' = transpose(B)` if transB else B
+    """
+
+    def __init__(self, alpha=1.0, beta=1.0, transA=0, transB=0):
+        """
+        Args:
+            alpha (float): Scalar multiplier for the product of input tensors
+                A * B.
+            beta (float): Scalar multiplier for input tensor C.
+            ransA (int): Whether A should be transposed
+            transB (int): Whether B should be transposed
+        Returns:
+            CTensor, the output
+        """
+        super(Gemm, self).__init__()
+        self.alpha = alpha
+        self.beta = beta
+        self.transA = transA
+        self.transB = transB
+
+    def forward(self, A, B, C=None):
+        """
+        forward propogation of Gemm
+        Args:
+            A (CTensor): The shape of A should be (M, K) if transA is 0, or
+                (K, M) if transA is non-zero.
+            B (CTensor): The shape of B should be (K, N) if transB is 0, or
+                (N, K) if transB is non-zero.
+            C (CTensor): (optional), Optional input tensor C. If not specified,
+                the computation is done as if C is a scalar 0. The shape of C
+                should be unidirectional broadcastable to (M, N).
+        Returns:
+            tensor, the output
+        """
+        _A = singa.DefaultTranspose(A) if self.transA == 1 else A
+        _B = singa.DefaultTranspose(B) if self.transB == 1 else B
+        if training:
+            self.inputs = (_A, _B, C)
+        tmpM = singa.MultFloat(singa.Mult(_A, _B), self.alpha)
+        if C:
+            tmpM = singa.__add__(tmpM, singa.MultFloat(C, self.beta))
+        return tmpM
+
+    def backward(self, dy):
+        """
+        backward propogation of Gemm
+        Args:
+            dy (CTensor): The shape of A should be (M, K) if transA is 0, or (K, M) if transA is non-zero.
+        Returns:
+            CTensor, the gradient over A
+            CTensor, the gradient over B
+            CTensor(optional), the gradient over C
+        """
+        _A, _B, C = self.inputs
+        # y = alpha * A  * B  => da = alpha * dy * BT
+        # y = alpha * A  * BT => da = alpha * dy * B
+        # y = alpha * AT * B  => da = alpha * B * dyT = alpha * (dy * BT)T
+        # y = alpha * AT * BT => da = alpha * BT * dyT = alpha * (dy * B)T
+        da = singa.MultFloat(singa.Mult(dy, singa.DefaultTranspose(_B)),
+                             self.alpha)
+        if self.transA:
+            da = singa.DefaultTranspose(da)
+
+        # y = alpha * A  * B  => db = alpha * AT * dy
+        # y = alpha * AT * B  => db = alpha * A * dy
+        # y = alpha * A  * BT => db = alpha * dyT * A = alpha * (AT * dy)T
+        # y = alpha * AT * BT => db = alpha * dyT * AT = alpha * (A * dy)T
+        db = singa.MultFloat(singa.Mult(singa.DefaultTranspose(_A), dy),
+                             self.alpha)
+        if self.transB:
+            db = singa.DefaultTranspose(db)
+        if C:
+            dc = back_broadcast(dy.shape(), C.shape(),
+                                singa.MultFloat(dy, self.beta))
+            return da, db, dc
+        else:
+            return da, db
+
+
+def gemm(A, B, C=None, alpha=1.0, beta=1.0, transA=0, transB=0):
+    """
+    Init a General Matrix multiplication(Gemm) operator. Compute `Y = alpha *
+    A' * B' + beta * C`, where input tensor A has shape (M, K) or (K, M), input
+    tensor B has shape (K, N) or (N, K), input tensor C is broadcastable to
+    shape (M, N), and output tensor Y has shape (M, N).
+    `A' = transpose(A)` if transA else A
+    `B' = transpose(B)` if transB else B
+    Args:
+        A (Tensor): The shape of A should be (M, K) if transA is 0, or
+            (K, M) if transA is non-zero.
+        B (Tensor): The shape of B should be (K, N) if transB is 0, or
+            (N, K) if transB is non-zero.
+        C (Tensor): (optional), Optional input tensor C. If not specified,
+            the computation is done as if C is a scalar 0. The shape of C
+            should be unidirectional broadcastable to (M, N).
+        alpha (float): Scalar multiplier for the product of input tensors A * B.
+        beta (float): Scalar multiplier for input tensor C.
+        ransA (int): Whether A should be transposed
+        transB (int): Whether B should be transposed
+    Returns:
+        Tensor, the output
+    """
+    if C:
+        return Gemm(alpha, beta, transA, transB)(A, B, C)[0]
+    else:
+        return Gemm(alpha, beta, transA, transB)(A, B)[0]
+
+
+class GlobalAveragePool(Operator):
+    """
+    Init a GlobalAveragePool operator
+    """
+
+    def __init__(self, data_format='channels_first'):
+        """
+        Args:
+            data_format (string): A string, we support two formats:
+                channels_last and channels_first, default is channels_first.
+                channels_first means the format of input is (N x C x H x W)
+                channels_last means the format of input is (N x H x W x C)
+        """
+        super(GlobalAveragePool, self).__init__()
+        self.data_format = data_format
+
+    def forward(self, x):
+        """
+        forward propogation of GlobalAveragePool
+        Args:
+            x (CTensor): the input tensor
+        Returns:
+            CTensor, the output
+        """
+        if training:
+            self.mask = singa.Tensor(x.shape(), x.device())
+
+        shape = list(x.shape())
+
+        # (N x C x H x W) for channels_first
+        if self.data_format == 'channels_first':
+            axes = tuple(i for i in range(2, len(shape)))
+            self.shape_divisor = 1 / np.prod(shape[2:])
+        else:  # (N x H x W x C) for channels_last
+            axes = tuple(i for i in range(1, len(shape) - 1))
+            self.shape_divisor = 1 / np.prod(shape[1:-1])
+
+        # output shape
+        # (N x C x 1 x 1) for channels_first
+        # (N x 1 x 1 x C) for channels_last
+        for i in axes:
+            shape[i] = 1
+
+        x = tensor.from_raw_tensor(x)
+        x = tensor.sum(x, axis=axes)
+        x = tensor.reshape(x, shape)
+        return singa.MultFloat(x.data, self.shape_divisor)
+
+    def backward(self, dy):
+        """
+        backward propogation of GlobalAveragePool
+        Args:
+            dy (CTensor): the gradient tensor from upper operations
+        Returns:
+            CTensor, the gradient over input
+        """
+        self.mask.SetFloatValue(self.shape_divisor)
+        return singa.__mul__(self.mask, dy)
+
+
+def globalaveragepool(x, data_format='channels_first'):
+    """
+    GlobalAveragePool operator
+    Args:
+        x (Tensor): the input tensor
+        data_format (string): A string, we support two formats:
+            channels_last and channels_first, default is channels_first.
+            channels_first means the format of input is (N x C x H x W)
+            channels_last means the format of input is (N x H x W x C)
+    Returns:
+        Tensor, the output
+    """
+    return GlobalAveragePool(data_format)(x)[0]
+
+
+class ConstantOfShape(Operator):
+    """
+    Init a ConstantOfShape, generate a tensor with given value and shape.
+    """
+
+    def __init__(self, value=0.):
+        """
+        Args:
+            value (float): (Optional) The value of the output elements. Should
+                be a one-element value. If not specified, it defaults to 0 and
+                datatype float32
+        """
+        super(ConstantOfShape, self).__init__()
+        self.value = value
+
+    def forward(self, x):
+        """
+        forward of ConstantOfShape
+        Args:
+            x: CTensor, 1D tensor. The shape of the expected output tensor.
+                All values must be >= 0.
+        Returns:
+            the output CTensor. If attribute 'value' is specified, the value
+                and datatype of the output tensor is taken from 'value'. If
+                attribute 'value' is not specified, the value in the output
+                defaults to 0, and the datatype defaults to float32.
+        """
+        x_shape = tensor.to_numpy(tensor.from_raw_tensor(x)).astype(
+            np.int64).tolist()
+        assert np.min(x_shape) >= 0, ('shape cannot be negative')
+        x = CTensor(x_shape, x.device())
+        x.SetFloatValue(self.value)
+        return x
+
+    def backward(self, dy):
+        """
+        backward of ConstantOfShape
+        Args:
+            dy (CTensor): gradient tensor.
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def constant_of_shape(x, value=0):
+    """
+    Init a ConstantOfShape, generate a tensor with given value and shape.
+    Args:
+        x: Tensor, 1D tensor. The shape of the expected output tensor.
+            All values must be >= 0.
+        value (float): (Optional) The value of the output elements. Should
+            be a one-element value. If not specified, it defaults to 0 and
+            datatype float32
+    Returns:
+        the output Tensor. If attribute 'value' is specified, the value
+            and datatype of the output tensor is taken from 'value'. If
+            attribute 'value' is not specified, the value in the output
+            defaults to 0, and the datatype defaults to float32.
+    """
+    return ConstantOfShape(value)(x)[0]
+
+
+class Dropout(Operator):
+    """
+    Init a Dropout, which scales the masked input data by the following equation:
+    `output = scale * data * mask`, `scale = 1. / (1. - ratio)`.
+    """
+
+    def __init__(self, seed=0, ratio=0.5):
+        """
+        Args:
+            seed (int): the random seed
+            ratio (float): the ratio of random dropout, with value in [0, 1).
+        """
+        super(Dropout, self).__init__()
+        self.ratio = ratio
+        self.seed = int(seed)
+        self.init_seed = False
+
+    def forward(self, x):
+        """
+        forward of Dropout
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        if not self.init_seed:
+            x.device().SetRandSeed(self.seed)
+            self.init_seed = True
+        if training:
+            self.scale = 1 / 1 - self.ratio
+            self.mask = singa.Tensor(list(x.shape()), x.device())
+            singa.Bernoulli(1 - self.ratio, self.mask)
+            x = singa.MultFloat(singa.__mul__(self.mask, x), self.scale)
+        return x
+
+    def backward(self, dy):
+        """
+        backward of Dropout
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        if training:
+            dy = singa.MultFloat(singa.__mul__(self.mask, dy), self.scale)
+        return dy
+
+
+def dropout(x, seed=0, ratio=0.5):
+    """
+    Init a Dropout, which scales the masked input data by the following
+    equation: `output = scale * data * mask`, `scale = 1. / (1. - ratio)`.
+    Args:
+        x (Tensor): input tensor.
+        ratio (float): the ratio of random dropout, with value in [0, 1).
+    Returns:
+        the output Tensor.
+    """
+    return Dropout(seed, ratio)(x)[0]
+
+
+class ReduceSum(Operator):
+    """
+    Init a ReduceSum, computes the sum of the input tensor's element along
+    the provided axes.
+    """
+
+    def __init__(self, axes=None, keepdims=1):
+        """
+        Args:
+            axes (list of int): A list of integers, along which to reduce.
+                Accepted range is [-r, r-1] where r = rank(data). The default
+                is None, which reduces over all the dimensions of the input tensor.
+            keepdims (int): Keep the reduced dimension or not, default 1 mean
+                keep reduced dimension.
+        """
+        super(ReduceSum, self).__init__()
+        self.axes = axes
+        self.keepdims = keepdims
+
+    def forward(self, x):
+        """
+        forward of ReduceSum
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        _x = tensor.from_raw_tensor(x)
+        x_shape = list(_x.shape)
+        # handle the special axes
+        if self.axes is None:
+            self.axes = [i for i in range(len(x_shape))]  # axes = None
+        else:
+            self.axes = [i if i >= 0 else len(x_shape) + i for i in self.axes
+                        ]  # axes has negative
+        self.axes.sort(reverse=True)
+        for axis in self.axes:
+            _x = tensor.sum(_x, axis)
+            x_shape[axis] = 1
+        if self.keepdims == 1:
+            _x = tensor.reshape(_x, x_shape)
+        self.cache = (x_shape, x)
+        return _x.data
+
+    def backward(self, dy):
+        """
+        backward of ReduceSum
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        x_shape, x = self.cache
+        dy = singa.Reshape(dy, x_shape)
+        scale = np.prod(x_shape) / np.prod(x.shape())
+        mask = singa.Tensor(list(x.shape()), x.device())
+        mask.SetFloatValue(scale)
+        dy = singa.__mul__(mask, dy)
+        return dy
+
+
+def reduce_sum(x, axes=None, keepdims=1):
+    """
+    Init a ReduceSum, computes the sum of the input tensor's element along
+    the provided axes.
+    Args:
+        x (Tensor): input tensor.
+        axes (list of int): A list of integers, along which to reduce.
+            Accepted range is [-r, r-1] where r = rank(data). The default
+            is None, which reduces over all the dimensions of the input tensor.
+        keepdims (int): Keep the reduced dimension or not, default 1 mean
+            keep reduced dimension.
+    Returns:
+        the output Tensor.
+    """
+    return ReduceSum(axes, keepdims)(x)[0]
+
+
+class ReduceMean(Operator):
+    """
+    Init a ReduceMean, computes the mean of the input tensor's element along
+    the provided axes.
+    """
+
+    def __init__(self, axes=None, keepdims=1):
+        """
+        Args:
+            axes (list of int): A list of integers, along which to reduce.
+                Accepted range is [-r, r-1] where r = rank(data). The default
+                is None, which reduces over all the dimensions of the input tensor.
+            keepdims (int): Keep the reduced dimension or not, default 1 mean
+                keep reduced dimension.
+        """
+        super(ReduceMean, self).__init__()
+        self.axes = axes
+        self.keepdims = keepdims
+
+    def forward(self, x):
+        """
+        forward of ReduceMean
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        _x = tensor.from_raw_tensor(x)
+        x_shape = list(_x.shape)
+        # handle the special axes
+        if self.axes is None:
+            self.axes = [i for i in range(len(x_shape))]  # axes = None
+        else:
+            self.axes = [i if i >= 0 else len(x_shape) + i for i in self.axes
+                        ]  # axes has negative
+        self.axes.sort(reverse=True)
+        for axis in self.axes:
+            _x = tensor.sum(_x, axis)
+            x_shape[axis] = 1
+        if self.keepdims == 1:
+            _x = tensor.reshape(_x, x_shape)
+        self.cache = (x_shape, x)
+        scale = np.prod(x_shape) / np.prod(x.shape())
+        self.scale = scale
+        _x = singa.MultFloat(_x.data, scale)
+        return _x
+
+    def backward(self, dy):
+        """
+        backward of ReduceMean
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        x_shape, x = self.cache
+        dy = singa.Reshape(dy, x_shape)
+        mask = singa.Tensor(list(x.shape()), x.device())
+        mask.SetFloatValue(1.0)
+        dy = singa.__mul__(mask, dy)
+        dy = singa.MultFloat(dy, self.scale)
+        return dy
+
+
+def reduce_mean(x, axes=None, keepdims=1):
+    """
+    Init a ReduceMean, computes the mean of the input tensor's element along
+    the provided axes.
+    Args:
+        x (Tensor): input tensor.
+        axes (list of int): A list of integers, along which to reduce.
+            Accepted range is [-r, r-1] where r = rank(data). The default
+            is None, which reduces over all the dimensions of the input tensor.
+        keepdims (int): Keep the reduced dimension or not, default 1 mean
+            keep reduced dimension.
+    Returns:
+        the output Tensor.
+    """
+    return ReduceMean(axes, keepdims)(x)[0]
+
+
+class Slice(Operator):
+    """
+    Init a Slice, Produces a slice of the input tensor along multiple axes.
+    Similar to numpy: https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html
+    """
+
+    def __init__(self, starts, ends, axes=None, steps=None):
+        """
+        Args:
+            starts (list of int): starting indices of corresponding axis
+            ends (list of int): ending indices of corresponding axis
+            axes (list of int): axes that `starts` and `ends` apply to.
+                Negative value means counting dimensions from the back.
+                Accepted range is [-r, r-1] where r = rank(data).
+            steps (list of int): slice step of corresponding axis in `axes`.
+                Negative value means slicing backward. 'steps' cannot be 0.
+                Defaults to 1.
+        """
+        super(Slice, self).__init__()
+        self.starts = starts
+        self.ends = ends
+        self.axes = axes
+        self.steps = steps
+
+    def forward(self, x):
+        """
+        forward of Slice
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        x_shape = list(x.shape())
+        # handle the special axes
+        if self.axes is None:
+            self.axes = [i for i in range(len(x_shape))]  # axes = None
+        else:
+            self.axes = [i if i >= 0 else len(x_shape) + i for i in self.axes
+                        ]  # axes has negative
+        self.cache = []
+        # handle the special steps
+        if self.steps is None:
+            self.steps = [1] * len(x_shape)  # steps = None
+        for idx, axis in enumerate(self.axes):
+            axis = int(axis)
+            start, end, step = self.starts[idx], self.ends[idx], self.steps[idx]
+            if end > x_shape[axis]:
+                end = x_shape[axis]
+            self.cache.append((axis, x_shape[axis], start, end, step))
+            xs = []
+            for step_idx in range(x_shape[axis])[start:end:step]:
+                xs.append(singa.SliceOn(x, step_idx, step_idx + 1, axis))
+            assert len(xs) > 0, "Cannot support empty tensor"
+            x = singa.VecTensor(xs)
+            x = singa.ConcatOn(x, axis)
+        return x
+
+    def backward(self, dy):
+        """
+        backward of Slice
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        for axis, shape, start, end, step in self.cache[::-1]:
+            data_idxes = tuple(range(shape)[start:end:step])
+            dys = []
+            data_idx = 0
+            for step_idx in range(shape):
+                if step_idx in data_idxes:
+                    tmp_tensor = singa.SliceOn(dy, data_idx, data_idx + 1, axis)
+                    data_idx += 1
+                else:
+                    tmp_shape = list(dy.shape())
+                    tmp_shape[axis] = 1
+                    tmp_tensor = singa.Tensor(tmp_shape, dy.device())
+                    tmp_tensor.SetFloatValue(0.)
+                dys.append(tmp_tensor)
+            dys = singa.VecTensor(dys)
+            dy = singa.ConcatOn(dys, axis)
+        return dy
+
+
+def slice(x, starts, ends, axes=None, steps=None):
+    """
+    Init a Slice, Produces a slice of the input tensor along multiple axes.
+    Similar to numpy: https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html
+    Args:
+        x (Tensor): input tensor.
+        starts (list of int): starting indices of corresponding axis
+        ends (list of int): ending indices of corresponding axis
+        axes (list of int): axes that `starts` and `ends` apply to.
+            Negative value means counting dimensions from the back.
+            Accepted range is [-r, r-1] where r = rank(data).
+        steps (list of int): slice step of corresponding axis in `axes`.
+            Negative value means slicing backward. 'steps' cannot be 0.
+            Defaults to 1.
+    Returns:
+        the output Tensor.
+    """
+    return Slice(starts, ends, axes, steps)(x)[0]
+
+
+class Ceil(Operator):
+    """
+    Ceil takes one input data (Tensor) and produces one output data (Tensor)
+    where the ceil is, `y = ceil(x)`, is applied to the tensor elementwise.
+    """
+
+    def __init__(self):
+        super(Ceil, self).__init__()
+
+    def forward(self, x):
+        """
+        forward of Ceil
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        return singa.Ceil(x)
+
+    def backward(self, dy):
+        """
+        backward of Ceil
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        dy = singa.Tensor(dy.shape(), dy.device())
+        dy.SetFloatValue(0.)
+        return dy
+
+
+def ceil(x):
+    """
+    Ceil takes one input data (Tensor) and produces one output data (Tensor)
+    where the ceil is, `y = ceil(x)`, is applied to the tensor elementwise.
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    return Ceil()(x)[0]
+
+
+class Floor(Operator):
+    """
+    Floor takes one input data (Tensor) and produces one output data (Tensor), 
+    where the floor is, `y = floor(x)`, is applied to the tensor elementwise
+    """
+
+    def __init__(self):
+        super(Floor, self).__init__()
+
+    def forward(self, x):
+        """
+        forward of floor
+        Args: 
+            x (CTensor): input tensor
+        Returns:
+            the output CTensor    
+        """
+        return singa.Floor(x)
+
+    def backward(self, dy):
+        """
+        backward of floor. Derivative of floor is 0
+        Args: 
+            dy (CTensor): gradient tensor
+        Returns:
+            the gradient tensor over the input tensor. 
+        """
+        dy = singa.Tensor(dy.shape(), dy.device())
+        dy.SetFloatValue(0.)
+        return dy
+
+
+def floor(x):
+    """
+    floor takes one input data (Tensor) and produces one output data (Tensor)
+    the value of floor is `y = floor(x)`, is applied to the tensor elementwise. 
+    Args: 
+        x(Tensor): input tensor.
+    Returns: 
+        the output tensor    
+    """
+    return Floor()(x)[0]
+
+
+class Split(Operator):
+    """
+    Init a Split, Split a tensor into a list of tensors, along the specified
+    'axis'.
+    """
+
+    def __init__(self, axis, parts, num_output=None):
+        """
+        Args:
+            axis (int): which axis to split on. A negative value means
+                counting dimensions from the back. Accepted range is
+                [-rank, rank-1] where r = rank(input).
+            parts (list of int): length of each output, which can be specified
+                using argument 'parts'. Otherwise, the tensor is parts to equal
+                sized parts.
+            num_output (bool): once parts is none, the tensor is split to equal
+                sized parts for each output.
+        """
+        super(Split, self).__init__()
+        self.axis = axis
+        self.parts = parts
+        self.num_output = num_output
+        if self.parts is None:
+            assert self.num_output is not None, "For (parts, num_output), it at least requires one."
+
+    def forward(self, x):
+        """
+        forward of Split
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        x_shape = list(x.shape())
+        self.axis = self.axis % len(x_shape)
+        if self.parts is None:
+            self.parts = [x_shape[self.axis] // self.num_output
+                         ] * self.num_output
+        xs = []
+        _s = 0
+        for _l in self.parts:
+            xs.append(singa.SliceOn(x, _s, _s + _l, self.axis))
+            _s += _l
+        return tuple(xs)
+
+    def backward(self, *dys):
+        """
+        backward of Split
+        Args:
+            dys: list of CTensor, gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        dys = singa.VecTensor(dys)
+        dy = singa.ConcatOn(dys, self.axis)
+        return dy
+
+
+def split(x, axis, parts, num_output=None):
+    """
+    Init a Split, Split a tensor into a list of tensors, along the specified
+    'axis'.
+    Args:
+        x (Tensor): input tensor.
+        axis (int): which axis to split on. A negative value means
+            counting dimensions from the back. Accepted range is
+            [-rank, rank-1] where r = rank(input).
+        parts (list of int): length of each output, which can be specified
+            using argument 'parts'. Otherwise, the tensor is parts to equal
+            sized parts.
+        num_output (bool): once parts is none, the tensor is split to equal
+            sized parts for each output.
+    Returns:
+        the output Tensor.
+    """
+    return Split(axis, parts, num_output)(x)
+
+
+class Gather(Operator):
+    """
+    Init a Gather, Given data tensor of rank r >= 1, and indices tensor of
+    rank q, gather entries of the axis dimension of data (by default outer-most
+    one as axis=0) indexed by indices, and concatenates them in an output tensor of rank `q + (r - 1)`.
+    """
+
+    def __init__(self, axis, indices):
+        """
+        Args:
+            axis (int): which axis to slice on. A negative value means counting
+                dimensions from the back. Accepted range is [-rank, rank-1]
+                where r = rank(input).
+            indices (list of int): entries of the axis dimension of data.
+        """
+        super(Gather, self).__init__()
+        self.axis = axis
+        self.indices = indices
+
+    def forward(self, x):
+        """
+        forward of Gather
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        self.x_shape = list(x.shape())
+        self.axis = self.axis % len(self.x_shape)  # handle the negative value
+        _shape = self.x_shape[self.axis]
+        xs = []
+        for indice in self.indices:
+            # each indice is a sub-indice
+            if isinstance(indice, (tuple, list, np.ndarray)):
+                sub_xs = []
+                for idx in indice:
+                    idx = int(idx % _shape)
+                    tmp_tensor = singa.SliceOn(x, idx, idx + 1, self.axis)
+                    sub_xs.append(tmp_tensor)
+                sub_xs = singa.VecTensor(sub_xs)
+                tmp_tensor = singa.ConcatOn(sub_xs, self.axis)
+                _slice_shape = list(tmp_tensor.shape())
+                _slice_shape.insert(self.axis, 1)  # add a new axis to concat
+                tmp_tensor = singa.Reshape(tmp_tensor, _slice_shape)
+            else:
+                indice = int(indice % _shape)
+                tmp_tensor = singa.SliceOn(x, indice, indice + 1, self.axis)
+            xs.append(tmp_tensor)
+        xs = singa.VecTensor(xs)
+        return singa.ConcatOn(xs, self.axis)
+
+    def backward(self, dy):
+        """
+        backward of Gather
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        _shape = self.x_shape[self.axis]
+
+        def construct_dx(dy, axis, indices, _shape):
+            dys = []
+            data_idx = 0
+            data_idxes = tuple(indices)
+            for step_idx in range(_shape):
+                if step_idx in data_idxes:
+                    tmp_tensor = singa.SliceOn(dy, data_idx, data_idx + 1, axis)
+                    data_idx += 1
+                else:
+                    tmp_shape = list(dy.shape())
+                    tmp_shape[axis] = 1
+                    tmp_tensor = singa.Tensor(tmp_shape, dy.device())
+                    tmp_tensor.SetFloatValue(0.)
+                dys.append(tmp_tensor)
+            dys = singa.VecTensor(dys)
+            dy = singa.ConcatOn(dys, axis)
+            return dy
+
+        if isinstance(self.indices[0], tuple) or isinstance(
+                self.indices[0], list):
+            dx = singa.Tensor(self.x_shape, dy.device())
+            dx.SetFloatValue(0.)
+            for data_idx in range(len(self.indices)):
+                # get a piece of the dy and remove its new axis added at forward
+                tmp_tensor = singa.SliceOn(dy, data_idx, data_idx + 1,
+                                           self.axis)
+                _slice_shape = list(tmp_tensor.shape())
+                del _slice_shape[self.axis]
+                tmp_tensor = singa.Reshape(tmp_tensor, _slice_shape)
+                # construct dx and sum them together
+                tmp_tensor = construct_dx(tmp_tensor, self.axis,
+                                          self.indices[data_idx],
+                                          self.x_shape[self.axis])
+                dx = singa.__add__(dx, tmp_tensor)
+            return dx
+        else:
+            return construct_dx(dy, self.axis, self.indices, _shape)
+
+
+def gather(x, axis, indices):
+    """
+    Init a Gather, Given data tensor of rank r >= 1, and indices tensor of
+    rank q, gather entries of the axis dimension of data (by default outer-most
+    one as axis=0) indexed by indices, and concatenates them in an output tensor of rank `q + (r - 1)`.
+    Args:
+        x (Tensor): input tensor.
+        axis (int): which axis to slice on. A negative value means counting
+            dimensions from the back. Accepted range is [-rank, rank-1]
+            where r = rank(input).
+        indices (list of int): entries of the axis dimension of data.
+    Returns:
+        the output Tensor.
+    """
+    return Gather(axis, indices)(x)[0]
+
+
+class Tile(Operator):
+    """
+    Init a Tile, Constructs a tensor by tiling a given tensor. This is the same
+    as function tile in Numpy: https://docs.scipy.org/doc/numpy/reference/generated/numpy.tile.html
+    """
+
+    def __init__(self, repeats):
+        """
+        Args:
+            repeats (list of int): 1D int matrix of the same length as input's
+                dimension number, includes numbers of repeated copies along
+                input's dimensions.
+        """
+        super(Tile, self).__init__()
+        self.repeats = [repeats] if isinstance(repeats, int) else repeats
+
+    def forward(self, x):
+        """
+        forward of Tile
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        self.x_shape = list(x.shape())
+        # add new axis from head
+        if len(self.x_shape) < len(self.repeats):
+            append_len = len(self.repeats) - len(self.x_shape)
+            new_shape = [1] * append_len + self.x_shape
+            x = singa.Reshape(x, new_shape)
+        for axis, rp in enumerate(self.repeats):
+            if rp == 1:
+                continue
+            xs = []
+            for idx in range(rp):
+                xs.append(x.Clone())
+            xs = singa.VecTensor(xs)
+            x = singa.ConcatOn(xs, axis)
+        return x
+
+    def backward(self, dy):
+        """
+        backward of Tile
+        Args:
+            dy (CTensor): gradient tensor.
+        Returns:
+            the gradient tensor over input tensor.
+        """
+        for axis, rp in enumerate(self.repeats):
+            if rp == 1:
+                continue
+            _slice_shape = list(dy.shape())
+            ori_len = _slice_shape[axis] // rp
+            _slice_shape[axis] = ori_len
+            _dy = singa.Tensor(_slice_shape, dy.device())
+            _dy.SetFloatValue(0.)
+
+            for idx in range(rp):
+                tmp_tensor = singa.SliceOn(dy, ori_len * idx,
+                                           ori_len * (idx + 1), axis)
+                _dy = singa.__add__(_dy, tmp_tensor)
+            dy = _dy
+        # remove the new axis we added at forward
+        if len(self.x_shape) < len(self.repeats):
+            dy = singa.Reshape(dy, self.x_shape)
+        return dy
+
+
+def tile(x, repeats):
+    """
+    Init a Tile, Constructs a tensor by tiling a given tensor. This is the same
+    as function tile in Numpy: https://docs.scipy.org/doc/numpy/reference/generated/numpy.tile.html
+    Args:
+        x (Tensor): input tensor.
+        repeats (list of int): 1D int matrix of the same length as input's
+            dimension number, includes numbers of repeated copies along
+            input's dimensions.
+    Returns:
+        the output Tensor.
+    """
+    return Tile(repeats)(x)[0]
+
+
+class NonZero(Operator):
+    """
+    Init a NonZero, Constructs a tensor by tiling a given tensor. This is the same
+    as function tile in Numpy: https://docs.scipy.org/doc/numpy/reference/generated/numpy.tile.html
+    """
+
+    def __init__(self):
+        super(NonZero, self).__init__()
+
+    def forward(self, x):
+        """
+        forward of NonZero
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        y = tensor.to_numpy(tensor.from_raw_tensor(x))
+        y = np.array((np.nonzero(y))).astype(np.int32)
+        y = tensor.from_numpy(y)
+        y.to_device(x.device())
+        return y.data
+
+    def backward(self, dy):
+        """
+        backward of NonZero
+        Args:
+            dy (CTensor): gradient tensor.
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def nonzero(x):
+    """
+    Init a NonZero, Constructs a tensor by tiling a given tensor. This is the same
+    as function tile in Numpy: https://docs.scipy.org/doc/numpy/reference/generated/numpy.tile.html
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    return NonZero()(x)[0]
+
+
+class Cast(Operator):
+    """
+    The operator casts the elements of a given input tensor to a data type
+    specified by the 'to' argument and returns an output tensor of the same
+    size in the converted type.
+    """
+
+    def __init__(self, to):
+        """
+        Args:
+            to (int): data type, float32 = 0; int = 2.
+        """
+        super(Cast, self).__init__()
+        self.to = to
+
+    def forward(self, x):
+        """
+        forward of Cast
+        Args:
+            x (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        if x.data_type() != self.to:
+            x = x.AsType(self.to)
+        return x
+
+    def backward(self, dy):
+        """
+        backward of Cast
+        Args:
+            dy (CTensor), gradient tensor.
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def cast(x, to):
+    """
+    The operator casts the elements of a given input tensor to a data type
+    specified by the 'to' argument and returns an output tensor of the same
+    size in the converted type.
+    Args:
+        x (Tensor): input tensor.
+        to (int): data type, float32 = 0; int = 2.
+    Returns:
+        the output Tensor.
+    """
+    return Cast(to)(x)[0]
+
+
+class OneHot(Operator):
+    """
+    Produces a one-hot tensor based on inputs.
+    """
+
+    def __init__(self, axis, depth, values):
+        """
+        Args:
+            axis (int): Axis along which one-hot representation in added.
+                Default: axis=-1. axis=-1 means that the additional dimension
+                will be inserted as the innermost/last dimension in the output
+                tensor.
+            depth (int): Scalar specifying the number of classes in one-hot
+                tensor. This is also the size of the one-hot dimension
+                (specified by 'axis' attribute) added on in the output tensor.
+                The values in the 'indices' input tensor are expected to be in
+                the range [-depth, depth-1].
+            values (float): Rank 1 tensor containing exactly two elements, in
+                the format [off_value, on_value], where 'on_value' is the
+                value used for filling locations specified in 'indices' input
+                tensor,
+        """
+        super(OneHot, self).__init__()
+        self.axis = axis
+        self.depth = depth
+        self.values = values
+
+    def forward(self, indices):
+        """
+        forward of OneHot, we borrow this function from onnx
+        Args:
+            indices (CTensor): Scalar specifying the number of classes in
+                one-hot tensor. The values in the 'indices' input tensor are
+                expected to be in the range [-depth, depth-1].
+        Returns:
+            the output CTensor.
+        """
+        values = tensor.to_numpy(tensor.from_raw_tensor(indices))
+        rank = len(values.shape)
+        depth_range = np.arange(self.depth)
+        if self.axis < 0:
+            self.axis += (rank + 1)
+        ls = values.shape[0:self.axis]
+        rs = values.shape[self.axis:rank]
+        targets = np.reshape(depth_range, (1,) * len(ls) + depth_range.shape +
+                             (1,) * len(rs))
+        values = np.reshape(np.mod(values, self.depth), ls + (1,) + rs)
+        np_tensor = np.asarray(targets == values, dtype=np.float32)
+        np_tensor = np_tensor * (self.values[1] -
+                                 self.values[0]) + self.values[0]
+        tmp_tensor = tensor.from_numpy(np_tensor)
+        tmp_tensor.to_device(indices.device())
+        return tmp_tensor.data
+
+    def backward(self, dy):
+        """
+        backward of OneHot
+        Args:
+            dy (CTensor):gradient tensor.
+        Raises:
+            AssertionError: no backward function for this operator
+        """
+        assert False, ('no gradient for backward function')
+
+
+def onehot(axis, indices, depth, values):
+    """
+    Produces a one-hot tensor based on inputs.
+    Args:
+        axis (int): Axis along which one-hot representation in added.
+            Default: axis=-1. axis=-1 means that the additional dimension
+            will be inserted as the innermost/last dimension in the output
+            tensor.
+        indices (Tensor): Scalar specifying the number of classes in
+            one-hot tensor. The values in the 'indices' input tensor are
+            expected to be in the range [-depth, depth-1].
+        depth (int): Scalar specifying the number of classes in one-hot
+            tensor. This is also the size of the one-hot dimension
+            (specified by 'axis' attribute) added on in the output tensor.
+            The values in the 'indices' input tensor are expected to be in
+            the range [-depth, depth-1].
+        values (float): Rank 1 tensor containing exactly two elements, in
+            the format [off_value, on_value], where 'on_value' is the
+            value used for filling locations specified in 'indices' input
+            tensor,
+    Returns:
+        the output Tensor.
+    """
+    return OneHot(axis, depth, values)(indices)[0]
+
+
+class _RNN(Operator):
+    """ RNN operation with c++ backend
+    """
+
+    def __init__(
+            self,
+            handle,
+            return_sequences=False,
+            #  batch_first=True,
+            use_mask=False,
+            seq_lengths=None):
+        assert singa.USE_CUDA, "Not able to run without CUDA"
+        super(_RNN, self).__init__()
+        self.handle = handle
+        self.return_sequences = return_sequences
+        self.use_mask = use_mask
+        if use_mask:
+            assert type(seq_lengths) == Tensor, "wrong type for seq_lengths"
+        self.seq_lengths = seq_lengths
+
+    def forward(self, x, hx, cx, w):
+        if training:
+            if self.use_mask:
+                (y, hy,
+                 cy) = singa.GpuRNNForwardTrainingEx(x, hx, cx, w,
+                                                     self.seq_lengths.data,
+                                                     self.handle)
+            else:
+                (y, hy,
+                 cy) = singa.GpuRNNForwardTraining(x, hx, cx, w, self.handle)
+            self.inputs = {
+                'x': x,
+                'hx': hx,
+                'cx': cx,
+                'w': w,
+                'y': y,
+                'hy': hy,
+                'cy': cy
+            }
+        else:
+            if self.use_mask:
+                (y, hy,
+                 cy) = singa.GpuRNNForwardInferenceEx(x, hx, cx, w,
+                                                      self.seq_lengths.data,
+                                                      self.handle)
+            else:
+                (y, hy,
+                 cy) = singa.GpuRNNForwardInference(x, hx, cx, w, self.handle)
+
+        if self.return_sequences:
+            # (seq, bs, data)
+            return y
+        else:
+            # return last time step of y
+            # (seq, bs, data)[-1] -> (bs, data)
+            last_y_shape = (y.shape()[1], y.shape()[2])
+            last_y = singa.Tensor(list(last_y_shape), x.device())
+
+            src_offset = y.Size() - last_y.Size()
+            # def copy_data_to_from(dst, src, size, dst_offset=0, src_offset=0):
+            singa.CopyDataToFrom(last_y, y, last_y.Size(), 0, src_offset)
+            return last_y
+
+    def backward(self, grad):
+        assert training is True and hasattr(
+            self, "inputs"), "Please set training as True before do BP. "
+
+        # (seq, bs, hid)
+        dy = None
+        if self.return_sequences:
+            assert grad.shape() == self.inputs['y'].shape(), (
+                "grad shape %s != y shape %s" %
+                (grad.shape(), self.inputs['y'].shape()))
+            dy = grad
+        else:
+            # grad (bs, directions*hidden) -> dy (seq, bs, directions*hidden)
+            #   empty space filled by zeros
+            assert grad.shape() == (self.inputs['y'].shape()[1],
+                                    self.inputs['y'].shape()[2]), (
+                                        "grad y shape %s != last y shape %s" %
+                                        (grad.shape(),
+                                         (self.inputs['y'].shape()[1],
+                                          self.inputs['y'].shape()[2])))
+            dy = singa.Tensor(list(self.inputs['y'].shape()), grad.device())
+            dy.SetFloatValue(0.0)
+            dst_offset = dy.Size() - grad.Size()
+            singa.CopyDataToFrom(dy, grad, grad.Size(), dst_offset, 0)
+
+        # states grad are zeros, since states are not used in forward pass
+        dhy = singa.Tensor(list(self.inputs['hy'].shape()), grad.device())
+        dhy.SetFloatValue(0.0)
+        dcy = singa.Tensor(list(self.inputs['cy'].shape()), grad.device())
+        dcy.SetFloatValue(0.0)
+
+        if self.use_mask:
+            (dx, dhx,
+             dcx) = singa.GpuRNNBackwardxEx(self.inputs['y'], dy, dhy, dcy,
+                                            self.inputs['w'], self.inputs['hx'],
+                                            self.inputs['cx'],
+                                            self.seq_lengths.data, self.handle)
+            dW = singa.GpuRNNBackwardWEx(self.inputs['x'], self.inputs['hx'],
+                                         self.inputs['y'],
+                                         self.seq_lengths.data, self.handle)
+        else:
+            (dx, dhx,
+             dcx) = singa.GpuRNNBackwardx(self.inputs['y'], dy, dhy, dcy,
+                                          self.inputs['w'], self.inputs['hx'],
+                                          self.inputs['cx'], self.handle)
+            dW = singa.GpuRNNBackwardW(self.inputs['x'], self.inputs['hx'],
+                                       self.inputs['y'], self.handle)
+
+
+        return dx, dhx, dcx, dW
+
+
+class CosSim(Operator):
+    """
+    Init a cos similarity operator
+    """
+
+    def __init__(self):
+        super(CosSim, self).__init__()
+
+    @classmethod
+    def dot(cls, a, b):
+        """
+        dot multiply
+        Args:
+            a (CTensor): 2d input tensor.
+            b (CTensor): 2d input tensor.
+        Returns:
+            CTensor: the output CTensor.
+        """
+        batch_size = a.shape()[0]
+        ret = []
+        for indice in range(batch_size):
+            tmp_a = singa.SliceOn(a, indice, indice + 1, 0)  # 1 * d
+            tmp_b = singa.SliceOn(b, indice, indice + 1, 0)  # 1 * d
+            tmp_b = singa.DefaultTranspose(tmp_b)
+            tmp_tensor = singa.Mult(tmp_a, tmp_b)  # 1 * d * d * 1
+            ret.append(tmp_tensor)
+        ret = singa.VecTensor(ret)
+        ret = singa.ConcatOn(ret, 0)  # b * 1
+        return singa.Reshape(ret, [ret.shape()[0]])  # b
+
+    def forward(self, a, b):
+        """
+        forward of CosSim
+        Args:
+            a (CTensor): input tensor.
+            b (CTensor): input tensor.
+        Returns:
+            the output CTensor.
+        """
+        ad = CosSim.dot(a, a)
+        bd = CosSim.dot(b, b)
+        ap = singa.PowFloat(ad, 0.5)
+        bp = singa.PowFloat(bd, 0.5)
+        ret = singa.__div__(CosSim.dot(a, b), singa.__mul__(ap, bp))
+        if training:
+            self.cache = (a, b, ad, bd, ap, bp, ret)
+        return ret
+
+    def backward(self, dy):
+        """
+        backward of CosSim
+        follow https://math.stackexchange.com/a/1923705
+        Args:
+            dy (CTensor): gradient tensor.
+        Return:
+            the gradient tensor over input tensor.
+        """
+        a, b, ad, bd, ap, bp, ret = self.cache
+        ab = singa.__mul__(ap, bp)
+        ab = singa.Reshape(ab, list(ab.shape()) + [1])  # b * 1
+        ad = singa.Reshape(ad, list(ad.shape()) + [1])  # b * 1
+        bd = singa.Reshape(bd, list(bd.shape()) + [1])  # b * 1
+        ret = singa.Reshape(ret, list(ret.shape()) + [1])  # b * 1
+        dy = singa.Reshape(dy, list(dy.shape()) + [1])  # boardcast
+        da = singa.__sub__(singa.__div__(b, ab),
+                           singa.__div__(singa.__mul__(ret, a), ad))
+        db = singa.__sub__(singa.__div__(a, ab),
+                           singa.__div__(singa.__mul__(ret, b), bd))
+        da = singa.__mul__(dy, da)
+        db = singa.__mul__(dy, db)
+        return da, db
+
+
+def cossim(a, b):
+    """
+    Produces a cos similarity operator
+    Args:
+        a (CTensor): input tensor.
+        b (CTensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    assert a.shape == b.shape, "shape not match for cossim"
+    assert a.ndim() == 2, "shape should be in 2d for cossim"
+    assert b.ndim() == 2, "shape should be in 2d for cossim"
+    return CosSim()(a, b)[0]
+
+
+class Expand(Operator):
+    """
+    Expand operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#Expand
+
+    Example usage::
+    data = [[1.], [2.], [3.]]
+
+    # dim_changed
+    shape = [2, 1, 6]
+    output = [[[1., 1., 1., 1., 1., 1.], 
+               [2., 2., 2., 2., 2., 2.],
+               [3., 3., 3., 3., 3., 3.]],
+              [[1., 1., 1., 1., 1., 1.],
+               [2., 2., 2., 2., 2., 2.],
+               [3., 3., 3., 3., 3., 3.]]]
+
+    # dim_unchanged
+    shape = [3, 4]
+    output = [[1., 1., 1., 1.],
+              [2., 2., 2., 2.],
+              [3., 3., 3., 3.]]
+    """
+
+    def __init__(self, shape):
+        """
+        Args:
+            shape (list[int]: indicates the shape you want to expand to, 
+                following the broadcast rule
+        """
+        super(Expand, self).__init__()
+        self.shape = shape
+
+    def forward(self, x):
+        if isinstance(self.shape, np.ndarray):
+            self.shape = self.shape.tolist()
+        else:
+            self.shape = list(self.shape)
+        self.x_shape = list(x.shape())
+        x_shape = self.x_shape.copy()
+        self.dim_changed = True if len(self.shape) != len(x_shape) else False
+        if self.dim_changed:
+            tmp_tensor = singa.Tensor(self.shape, x.device())
+            tmp_tensor.SetFloatValue(1.)
+            x = singa.__mul__(x, tmp_tensor)
+        else:
+            for axis, s_1, s_2 in zip(range(len(self.shape)), self.shape,
+                                      x_shape):
+                if s_1 == s_2:
+                    continue
+                xs = [x] * (s_1 // s_2)
+                x = singa.VecTensor(xs)
+                x = singa.ConcatOn(x, axis)
+        return x
+
+    def backward(self, dy):
+        x_shape = self.x_shape
+        if self.dim_changed:
+            dy = tensor.from_raw_tensor(dy)
+            if len(self.shape) > len(x_shape):
+                x_shape = [1] * (len(self.shape) - len(x_shape)) + x_shape
+            for axis, s in zip(range(len(self.shape))[::-1], x_shape[::1]):
+                if s == 1:
+                    dy = tensor.sum(dy, axis)
+            dy = dy.data
+        else:
+            for axis, s_1, s_2 in zip(
+                    range(len(self.shape))[::-1], self.shape[::-1],
+                    x_shape[::-1]):
+                if s_1 > s_2:
+                    duplic = s_1 // s_2
+                    dxs = []
+                    for i in range(s_2):
+                        tmp_tensor = None
+                        for j in range(duplic):
+                            if not tmp_tensor:
+                                tmp_tensor = singa.SliceOn(
+                                    dy, j * s_2 + i, j * s_2 + i + 1, axis)
+                            else:
+                                tmp_tensor += singa.SliceOn(
+                                    dy, j * s_2 + i, j * s_2 + i + 1, axis)
+                        dxs.append(tmp_tensor)
+                    dxs = singa.VecTensor(dxs)
+                    dy = singa.ConcatOn(dxs, axis)
+        dy = singa.Reshape(dy, self.x_shape)
+        return dy
+
+
+def expand(x, shape):
+    """
+    Produces a Expand operator
+    Args:
+        x (Tensor): input tensor.
+        shape (list[int]: indicates the shape you want to expand to, 
+            following the broadcast rule
+    Returns:
+        the output Tensor.
+    """
+    return Expand(shape)(x)[0]
+
+
+class Pad(Operator):
+    """
+    Pad operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#Pad
+
+    Example usage::
+        data = 
+        [
+            [1.0, 1.2],
+            [2.3, 3.4],
+            [4.5, 5.7],
+        ] 
+        pads = [0, 2, 0, 0]
+
+        # constant mode
+        mode = 'constant'
+        constant_value = 0.0
+        output = 
+        [
+            [
+                [0.0, 0.0, 1.0, 1.2],
+                [0.0, 0.0, 2.3, 3.4],
+                [0.0, 0.0, 4.5, 5.7],
+            ],
+        ]
+
+        # reflect mode
+        mode = 'reflect'
+        output = 
+        [
+            [
+                [1.0, 1.2, 1.0, 1.2],
+                [2.3, 3.4, 2.3, 3.4],
+                [4.5, 5.7, 4.5, 5.7],
+            ],
+        ]
+
+        # edge mode
+        mode = 'edge'
+        output = 
+        [
+            [
+                [1.0, 1.0, 1.0, 1.2],
+                [2.3, 2.3, 2.3, 3.4],
+                [4.5, 4.5, 4.5, 5.7],
+            ],
+        ]
+    """
+
+    def __init__(self, mode, pads, constant=0.):
+        """
+        Args:
+            mode (string): Supported modes: `constant`(default), `reflect`, `edge`.
+            pads (list[int]): list of integers indicating the number of padding elements 
+                to add at the beginning each axis.
+            constant (float): A scalar value to be used if the mode chosen is 
+                `constant`
+        """
+        super(Pad, self).__init__()
+        self.mode = mode
+        if self.mode not in ("constant", "reflect", "edge"):
+            assert False, ('Only support three modes: constant, reflect, edge')
+        self.constant = constant
+        self.pads = pads
+        self.pad_width = ()
+
+    def forward(self, x):
+        if not self.pad_width:
+            half_width = len(self.pads) // 2
+            for i in range(half_width):
+                self.pad_width += ((self.pads[i], self.pads[i + half_width])),
+
+        for axis, pads in zip(range(len(x.shape())), self.pad_width):
+            for pad, is_left in zip(pads, (True, False)):
+                if pad == 0:
+                    continue
+                pad_shape = list(x.shape())
+                if self.mode == "constant":
+                    pad_shape[axis] = pad
+                    padding = singa.Tensor(list(pad_shape), x.device())
+                    padding.SetFloatValue(self.constant)
+                    if is_left:
+                        x = singa.ConcatOn(singa.VecTensor([padding, x]), axis)
+                    else:
+                        x = singa.ConcatOn(singa.VecTensor([x, padding]), axis)
+                elif self.mode == "reflect":
+                    axis_shape = pad_shape[axis]
+                    if is_left:
+                        padding = singa.SliceOn(x, 0, pad, axis)
+                        x = singa.ConcatOn(singa.VecTensor([padding, x]), axis)
+                    else:
+                        padding = singa.SliceOn(x, axis_shape - pad, axis_shape,
+                                                axis)
+                        x = singa.ConcatOn(singa.VecTensor([x, padding]), axis)
+                elif self.mode == "edge":
+                    axis_shape = pad_shape[axis]
+                    if is_left:
+                        padding = []
+                        for _ in range(pad):
+                            padding.append(singa.SliceOn(x, 0, 1, axis))
+                        padding.append(x)
+                        padding = singa.VecTensor(padding)
+                        x = singa.ConcatOn(padding, axis)
+                    else:
+                        padding = [x]
+                        for _ in range(pad):
+                            padding.append(
+                                singa.SliceOn(x, axis_shape - 1, axis_shape,
+                                              axis))
+                        padding = singa.VecTensor(padding)
+                        x = singa.ConcatOn(padding, axis)
+        return x
+
+    def backward(self, dy):
+        for axis, pads in zip(range(len(dy.shape())), self.pad_width):
+            for pad, is_left in zip(pads, (True, False)):
+                if pad == 0:
+                    continue
+                axis_shape = list(dy.shape())[axis]
+                if is_left:
+                    dy = singa.SliceOn(dy, pad, axis_shape, axis)
+                else:
+                    dy = singa.SliceOn(dy, 0, axis_shape - pad, axis)
+        return dy
+
+
+def pad(x, mode, pads, constant=0.):
+    """
+    Produces a pad operator
+    Args:
+        x (Tensor): input tensor.
+        mode (string): Supported modes: `constant`(default), `reflect`, `edge`.
+        pads (list[int]): list of integers indicating the number of padding elements 
+            to add at the beginning each axis.
+        constant (float): A scalar value to be used if the mode chosen is 
+            `constant`
+    Returns:
+        the output Tensor.
+    """
+    return Pad(mode, pads, constant)(x)[0]
+
+
+class UpSample(Operator):
+    """
+    UpSample operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#upsample
+
+    Example usage::
+    data = [[[[1, 2],
+              [3, 4],]]]
+
+    # nearest
+    scales = [1.0, 1.0, 2.0, 3.0]
+    output = [[[[1, 1, 1, 2, 2, 2],
+                [1, 1, 1, 2, 2, 2],
+                [3, 3, 3, 4, 4, 4],
+                [3, 3, 3, 4, 4, 4],]]]
+    """
+
+    def __init__(self, mode, scales):
+        """
+        Args:
+            scales (list[int]): The scale array along each dimension. It takes 
+                value greater than or equal to 1. 
+        """
+        super(UpSample, self).__init__()
+        self.scales = scales
+        self.mode = mode.lower()
+        if self.mode != "nearest":
+            assert False, "only support nearest mode."
+
+    def forward(self, x):
+        if isinstance(self.scales, np.ndarray):
+            self.scales = self.scales.tolist()
+        else:
+            self.scales = list(self.scales)
+        self.x_shape = list(x.shape())
+        for axis, s in zip(range(len(self.scales)), self.scales):
+            s = int(s)
+            if s == 1:
+                continue
+            x = x.Repeat([
+                s,
+            ], axis)
+        return x
+
+    def backward(self, dy):
+        x_shape = self.x_shape.copy()
+        for axis, s_1, s_2 in zip(
+                range(len(self.scales))[::-1], self.scales[::-1],
+                x_shape[::-1]):
+            s_1 = int(s_1)
+            if s_1 != 1:
+                duplic = s_1
+                dxs = []
+                for i in range(s_2):
+                    tmp_tensor = None
+                    for j in range(duplic):
+                        if not tmp_tensor:
+                            tmp_tensor = singa.SliceOn(dy, i * duplic + j,
+                                                       i * duplic + j + 1, axis)
+                        else:
+                            tmp_tensor += singa.SliceOn(dy, i * duplic + j,
+                                                        i * duplic + j + 1,
+                                                        axis)
+                    dxs.append(tmp_tensor)
+                dxs = singa.VecTensor(dxs)
+                dy = singa.ConcatOn(dxs, axis)
+        dy = singa.Reshape(dy, self.x_shape)
+        return dy
+
+
+def upsample(x, mode, scales):
+    """
+    Produces a upsample operator
+    Args:
+        x (Tensor): input tensor.
+        scales (list[int]): The scale array along each dimension. It takes 
+                value greater than or equal to 1. 
+    Returns:
+        the output Tensor.
+    """
+    return UpSample(mode, scales)(x)[0]
+
+
+class DepthToSpace(Operator):
+    """
+    DepthToSpace operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#DepthToSpace
+
+    Example usage::
+    blocksize = 2
+    # (1, 8, 2, 3) input tensor
+    data = [[[[0., 1., 2.],
+            [3., 4., 5.]],
+            [[9., 10., 11.],
+            [12., 13., 14.]],
+            [[18., 19., 20.],
+            [21., 22., 23.]],
+            [[27., 28., 29.],
+            [30., 31., 32.]],
+            [[36., 37., 38.],
+            [39., 40., 41.]],
+            [[45., 46., 47.],
+            [48., 49., 50.]],
+            [[54., 55., 56.],
+            [57., 58., 59.]],
+            [[63., 64., 65.],
+            [66., 67., 68.]]]]
+
+    # DCR mode
+    # (1, 2, 4, 6) output tensor
+    output = [[[[0., 18., 1., 19., 2., 20.],
+                [36., 54., 37., 55., 38., 56.],
+                [3., 21., 4., 22., 5., 23.],
+                [39., 57., 40., 58., 41., 59.]],
+               [[9., 27., 10., 28., 11., 29.],
+                [45., 63., 46., 64., 47., 65.],
+                [12., 30., 13., 31., 14., 32.],
+                [48., 66., 49., 67., 50., 68.]]]]
+
+    # CRD mode
+    # (1, 2, 4, 6) output tensor
+    output = [[[[0., 9., 1., 10., 2., 11.],
+                [18., 27., 19., 28., 20., 29.],
+                [3., 12., 4., 13., 5., 14.],
+                [21., 30., 22., 31., 23., 32.]],
+               [[36., 45., 37., 46., 38., 47.],
+                [54., 63., 55., 64., 56., 65.],
+                [39., 48., 40., 49., 41., 50.],
+                [57., 66., 58., 67., 59., 68.]]]]
+    """
+
+    def __init__(self, blocksize, mode="DCR"):
+        """
+        Args:
+            blocksize (int): Blocks of [blocksize, blocksize] are moved.
+            mode (string): DCR (default) for depth-column-row order re-
+                arrangement. Use CRD for column-row-depth order.
+        """
+        super(DepthToSpace, self).__init__()
+        self.blocksize = blocksize
+        self.mode = mode.upper()
+
+    def forward(self, x):
+        if training:
+            self.x_shape = x.shape()
+        b, c, h, w = x.shape()
+        blocksize = self.blocksize
+        if self.mode == "DCR":
+            x = singa.Reshape(
+                x, [b, blocksize, blocksize, c // (blocksize**2), h, w])
+            x = singa.Transpose(x, [0, 3, 4, 1, 5, 2])
+            x = singa.Reshape(
+                x, [b, c // (blocksize**2), h * blocksize, w * blocksize])
+        elif self.mode == "CRD":
+            x = singa.Reshape(
+                x, [b, c // (blocksize**2), blocksize, blocksize, h, w])
+            x = singa.Transpose(x, [0, 1, 4, 2, 5, 3])
+            x = singa.Reshape(
+                x, [b, c // (blocksize**2), h * blocksize, w * blocksize])
+        else:
+            assert False, ("only support two methods: DCR and CRD.")
+        return x
+
+    def backward(self, dy):
+        b, c, h, w = self.x_shape
+        blocksize = self.blocksize
+        dy = singa.Reshape(
+            dy, [b, c // (blocksize**2), h, blocksize, w, blocksize])
+        if self.mode == "DCR":
+            dy = singa.Transpose(dy, [0, 3, 5, 1, 2, 4])
+        elif self.mode == "CRD":
+            dy = singa.Transpose(dy, [0, 1, 3, 5, 2, 4])
+        else:
+            assert False, ("only support two methods: DCR and CRD.")
+        dy = singa.Reshape(dy, self.x_shape)
+        return dy
+
+
+def depth_to_space(x, blocksize, mode="DCR"):
+    """
+    Produces a DepthToSpace operator
+    Args:
+        x (Tensor): input tensor.
+        blocksize (int): Blocks of [blocksize, blocksize] are moved.
+        mode (string): DCR (default) for depth-column-row order re-
+            arrangement. Use CRD for column-row-depth order.
+    Returns:
+        the output Tensor.
+    """
+    return DepthToSpace(blocksize, mode)(x)[0]
+
+
+class SpaceToDepth(Operator):
+    """
+    SpaceToDepth operator following ONNX Operator Schemas, reverse of DepthToSpace
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#SpaceToDepth
+    """
+
+    def __init__(self, blocksize, mode="DCR"):
+        """
+        Args:
+            blocksize (int): Blocks of [blocksize, blocksize] are moved.
+            mode (string): DCR (default) for depth-column-row order re-
+                arrangement. Use CRD for column-row-depth order.
+        """
+        super(SpaceToDepth, self).__init__()
+        self.blocksize = blocksize
+        self.mode = mode.upper()
+
+    def forward(self, x):
+        blocksize = self.blocksize
+        b, c, h, w = x.shape()
+        b, c, h, w = b, c * (blocksize**2), h // blocksize, w // blocksize
+        if training:
+            self.x_shape = (b, c, h, w)
+        x = singa.Reshape(
+            x, [b, c // (blocksize**2), h, blocksize, w, blocksize])
+        if self.mode == "DCR":
+            x = singa.Transpose(x, [0, 3, 5, 1, 2, 4])
+        elif self.mode == "CRD":
+            x = singa.Transpose(x, [0, 1, 3, 5, 2, 4])
+        else:
+            assert False, ("only support two methods: DCR and CRD.")
+        x = singa.Reshape(x, self.x_shape)
+        return x
+
+    def backward(self, dy):
+        b, c, h, w = self.x_shape
+        blocksize = self.blocksize
+        if self.mode == "DCR":
+            dy = singa.Reshape(
+                dy, [b, blocksize, blocksize, c // (blocksize**2), h, w])
+            dy = singa.Transpose(dy, [0, 3, 4, 1, 5, 2])
+            dy = singa.Reshape(
+                dy, [b, c // (blocksize**2), h * blocksize, w * blocksize])
+        elif self.mode == "CRD":
+            dy = singa.Reshape(
+                dy, [b, c // (blocksize**2), blocksize, blocksize, h, w])
+            dy = singa.Transpose(dy, [0, 1, 4, 2, 5, 3])
+            dy = singa.Reshape(
+                dy, [b, c // (blocksize**2), h * blocksize, w * blocksize])
+        else:
+            assert False, ("only support two methods: DCR and CRD.")
+        return dy
+
+
+def space_to_depth(x, blocksize, mode="DCR"):
+    """
+    Produces a SpaceToDepth operator
+    Args:
+        x (Tensor): input tensor.
+        blocksize (int): Blocks of [blocksize, blocksize] are moved.
+        mode (string): DCR (default) for depth-column-row order re-
+            arrangement. Use CRD for column-row-depth order.
+    Returns:
+        the output Tensor.
+    """
+    return SpaceToDepth(blocksize, mode)(x)[0]
+
+
+class Where(Operator):
+    """
+    Where operator following ONNX Operator Schemas
+    https://github.com/onnx/onnx/blob/master/docs/Operators.md#Where
+    and Numpy
+    https://numpy.org/doc/stable/reference/generated/numpy.where.html
+    Example usage::
+    condition = [[True, False], 
+              [True, True]]
+    x = [[1, 2], 
+        [3, 4]]
+    y =  [[9, 8], 
+        [7, 6]]
+
+    output = [[1, 8],
+            [3, 4]]
+    """
+
+    def __init__(self, condition):
+        """
+        Args:
+            condition (Tensor): When True (nonzero), yield X, otherwise yield Y
+        """
+        super(Where, self).__init__()
+        self.condition = condition
+
+    def forward(self, a, b):
+        if isinstance(self.condition, list):
+            self.condition = np.array(self.condition)
+        if isinstance(self.condition, np.ndarray):
+            self.condition = self.condition.astype(np.float32)
+            self.condition = tensor.from_numpy(self.condition)
+            self.condition.to_device(a.device())
+            self.condition = self.condition.data
+        self.neg_condition = singa.AddFloat(singa.MultFloat(self.condition, -1.), 1.)
+        _a, _b = a, b
+        dtype0 = _a.data_type()
+        dtype1 = _b.data_type()
+        if dtype0 == singa.kInt or dtype1 == singa.kInt:
+            _a = a.AsType(singa.kFloat32)
+            _b = b.AsType(singa.kFloat32)
+            res = singa.__add__(singa.__mul__(self.condition, _a),
+                             singa.__mul__(self.neg_condition, _b))
+            res = res.AsType(singa.kInt)
+        else:
+            res = singa.__add__(singa.__mul__(self.condition, _a),
+                             singa.__mul__(self.neg_condition, _b))
+        return res
+
+    def backward(self, dy):
+        da = singa.__mul__(self.condition, dy)
+        db = singa.__mul__(self.neg_condition, dy)
+        return da, db
+
+
+def where(x, y, condition):
+    """
+    Produces a Where operator
+    Args:
+        x (Tensor): input tensor.
+        y (Tensor): input tensor.
+        condition (Tensor): When True (nonzero), yield X, otherwise yield Y
+    Returns:
+        the output Tensor.
+    """
+    return Where(condition)(x, y)[0]
+
+
+class Round(Operator):
+    """
+    Element-wise round the input
+    """
+
+    def __init__(self):
+        super(Round, self).__init__()
+
+    def forward(self, x):
+        return singa.Round(x)
+
+    def backward(self, dy):
+        dy = singa.Tensor(dy.shape(), dy.device())
+        dy.SetFloatValue(0.)
+        return dy
+
+
+def round(x):
+    """
+    Element-wise round the input
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    return Round()(x)[0]
+
+
+class Rounde(Operator):
+    """
+    Element-wise round the input, In case of halfs, round to the nearest even integer
+    """
+
+    def __init__(self):
+        super(Rounde, self).__init__()
+
+    def forward(self, x):
+        return singa.RoundE(x)
+
+    def backward(self, dy):
+        dy = singa.Tensor(dy.shape(), dy.device())
+        dy.SetFloatValue(0.)
+        return dy
+
+
+def rounde(x):
+    """
+    Element-wise round the input, In case of halfs, round to the nearest even integer
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    return Rounde()(x)[0]
+
+
+class Embedding(Operator):
+    """
+    Init an embedding operator
+    """
+
+    def __init__(self):
+        super(Embedding, self).__init__()
+
+    def forward(self, x, w):
+        """
+        forward of embedding
+        Args:
+            x (CTensor): input tensor.
+            w (CTensor): weight tensor.
+        Returns:
+            the output CTensor.
+        """
+        x = tensor.to_numpy(tensor.from_raw_tensor(x))
+        if training:
+            self.cache = (x, w.shape())
+
+        xs = []
+        x = x.tolist()
+        for indice in x:
+            sub_xs = []
+            for idx in indice:
+                idx = int(idx)
+                tmp_tensor = singa.SliceOn(w, idx, idx + 1, 0)
+                sub_xs.append(tmp_tensor)
+            sub_xs = singa.VecTensor(sub_xs)
+            tmp_tensor = singa.ConcatOn(sub_xs, 0)
+            tmp_tensor = singa.Reshape(tmp_tensor,
+                                       [1] + list(tmp_tensor.shape()))
+
+            xs.append(tmp_tensor)
+        xs = singa.VecTensor(xs)
+        xs = singa.ConcatOn(xs, 0)
+        return xs
+
+    def backward(self, dy):
+        """
+        backward of embedding
+        Args:
+            dy (CTensor): gradient tensor.
+        Raises:
+            the gradient tensor over input tensor.
+        """
+        x, w_shape = self.cache
+        dy_shape = dy.shape()
+        # construct the dx
+        dx = tensor.sum(tensor.from_raw_tensor(dy), axis=2)
+
+        # construct the dw
+        dws = []
+        for idx in range(w_shape[0]):
+            tmp_tensor = singa.Tensor((1, w_shape[1]), dy.device())
+            tmp_tensor.SetFloatValue(0.0)
+            dws.append(tmp_tensor)
+        dy = singa.Reshape(dy, [dy_shape[0] * dy_shape[1], dy_shape[2]])
+        x = x.reshape(-1)
+        for idx, val in enumerate(x):
+            tmp_tensor = singa.SliceOn(dy, idx, idx + 1, 0)
+            dws[val] = singa.__add__(dws[val], tmp_tensor)
+        dws = singa.VecTensor(dws)
+        return dx.data, singa.ConcatOn(dws, 0)
+
+
+def embedding(x, w):
+    """
+    Produces an embedding operator.
+    Args:
+    Returns:
+        the output Tensor.
+    """
+    return Embedding()(x, w)[0]
+
+
+class Erf(Operator):
+    """
+    Apply element-wise math.erf to the input
+    """
+
+    def __init__(self):
+        super(Erf, self).__init__()
+
+    def forward(self, x):
+        return singa.Erf(x)
+
+    def backward(self, dy):
+        dx = singa.MultFloat(singa.PowFloat(dy, 2.0), -1.0)
+        dx = singa.MultFloat(singa.Exp(dx), 2. / np.pi ** 0.5)
+        return dx
+
+
+def erf(x):
+    """
+    Apply element-wise math.erf to the input
+    Args:
+        x (Tensor): input tensor.
+    Returns:
+        the output Tensor.
+    """
+    return Erf()(x)[0]
+
+
+''' alias for Operator and Layers
+'''
+Operation = Operator
+''' import layer at the end to resolve circular import
+'''
+from singa import layer
+Linear = layer.Linear
+Conv2d = layer.Conv2d
+SeparableConv2d = layer.SeparableConv2d
+BatchNorm2d = layer.BatchNorm2d
+Pooling2d = layer.Pooling2d
+MaxPool2d = layer.MaxPool2d
+AvgPool2d = layer.AvgPool2d
+MaxPool1d = layer.MaxPool1d
+AvgPool1d = layer.AvgPool1d
+RNN_Base = layer.RNN_Base
+RNN = layer.RNN
+LSTM = layer.LSTM
diff --git a/python/singa/command.py b/python/singa/command.py
deleted file mode 100644
index f14c8c5..0000000
--- a/python/singa/command.py
+++ /dev/null
@@ -1,240 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-
-'''
-This script is the main entrance for user to run singa inside a model workspace
-
-To use this script, user sudo install these dependencies: flask pillow and protobuf
-'''
-
-import sys, glob, os, random, shutil, time
-from flask import Flask, request, redirect, url_for
-import numpy as np
-import ConfigParser
-import urllib, traceback
-
-
-from argparse import ArgumentParser
-from argparse import RawDescriptionHelpFormatter
-sys.path.append(os.getcwd())
-
-__all__ = []
-__version__ = 0.1
-__date__ = '2016-07-20'
-__updated__ = '2016-07-20'
-__shortdesc__ = '''
-welcome to singa
-'''
-
-app = Flask(__name__)
-config = ConfigParser.RawConfigParser()
-service = {}
-data_path = "data_"
-parameter_path = "parameter_"
-
-debug = False
-
-class CLIError(Exception):
-    '''Generic exception to raise and log different fatal errors.'''
-    def __init__(self, msg):
-        super(CLIError).__init__(type(self))
-        self.msg = "E: %s" % msg
-    def __str__(self):
-        return self.msg
-    def __unicode__(self):
-        return self.msg
-
-def main(argv=None): # IGNORE:C0111
-    '''Command line options.'''
-
-    from . import device
-
-    if argv is None:
-        argv = sys.argv
-    else:
-        sys.argv.extend(argv)
-
-    program_name = os.path.basename(sys.argv[0])
-    program_version = "v%s" % __version__
-    program_build_date = str(__updated__)
-    program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date)
-    program_shortdesc = __shortdesc__
-    program_license = '''%s
-
-  Created by dbsystem group on %s.
-  Copyright 2016 NUS School of Computing. All rights reserved.
-
-  Licensed under the Apache License 2.0
-  http://www.apache.org/licenses/LICENSE-2.0
-
-  Distributed on an "AS IS" basis without warranties
-  or conditions of any kind, either express or implied.
-
-USAGE
-''' % (program_shortdesc, str(__date__))
-
-    global debug
-
-    try:
-        # Setup argument parser
-        parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter)
-        parser.add_argument("-p", "--port", dest="port", default=5000, help="the port to listen to, default is 5000")
-        parser.add_argument("-param", "--parameter", dest="parameter",  help="the parameter file path to be loaded")
-        parser.add_argument("-D", "--debug", dest="debug", action="store_true", help="whether need to debug")
-        parser.add_argument("-R", "--reload", dest="reload_data", action="store_true", help="whether need to reload data")
-        parser.add_argument("-C", "--cpu", dest="use_cpu", action="store_true", help="Using cpu or not, default is using gpu")
-        parser.add_argument("-m", "--mode", dest="mode", choices=['train','test','serve'], default='serve', help="On Which mode (train,test,serve) to run singa")
-        parser.add_argument('-V', '--version', action='version', version=program_version_message)
-
-        # Process arguments
-        args = parser.parse_args()
-
-        port = args.port
-        parameter_file = args.parameter
-        mode = args.mode
-        need_reload = args.reload_data
-        use_cpu = args.use_cpu
-        debug = args.debug
-
-        #prepare data files
-        config.read('file.cfg')
-        file_prepare(need_reload)
-
-
-        import network as net
-        model = net.create()
-
-        #load parameter
-        parameter_file=get_parameter(parameter_file)
-
-        if parameter_file:
-            print "load parameter file: %s" % parameter_file
-            model.load(parameter_file)
-
-        if use_cpu:
-            raise CLIError("Currently cpu is not support!")
-        else:
-            print "runing with gpu"
-            d = device.create_cuda_gpu()
-
-        model.to_device(d)
-
-        if mode == "serve":
-            print "runing singa in serve mode, listen to  port: %s " % port
-            global service
-            from serve import Service
-            service =Service(model,d)
-
-            app.debug = debug
-            app.run(host='0.0.0.0', port= port)
-        elif mode == "train":
-            print "runing singa in train mode"
-            global trainer
-            from train import Trainer
-            trainer= Trainer(model,d)
-            if not parameter_file:
-                trainer.initialize()
-            trainer.train()
-        else:
-            raise CLIError("Currently only serve mode is surpported!")
-        return 0
-    except KeyboardInterrupt:
-        ### handle keyboard interrupt ###
-        return 0
-    except Exception, e:
-        if debug:
-            traceback.print_exc()
-            raise(e)
-        indent = len(program_name) * " "
-        sys.stderr.write(program_name + ": " + str(e) + "\n")
-        sys.stderr.write(indent + "  for help use --help \n\n")
-        return 2
-
-def file_prepare(reload_data=False):
-    '''
-        download all files and generate data.py
-    '''
-    if not reload_data and os.path.exists("data_.py"):
-        return
-
-    print "download file"
-    #clean data
-    shutil.rmtree("data_.py",ignore_errors=True)
-    shutil.rmtree("data_",ignore_errors=True)
-
-    data_py=open("data_.py",'w')
-    data_py.write("#%s" % "This file is Generated by SINGA, please don't edit\n\n")
-    if config.has_section("data"):
-        file_list = config.items("data")
-        #download files
-        for f in file_list:
-            name,path=download_file(f[0],f[1],data_path)
-            data_py.write("%s=\"%s\"\n" % (name,path))
-
-    data_py.flush()
-    data_py.close()
-
-    if config.has_section("parameter"):
-        parameter_list = config.items("parameter")
-        for p in parameter_list:
-            download_file(p[0],p[1],parameter_path)
-
-def download_file(name,path,dest):
-    '''
-    download one file to dest
-    '''
-    if not os.path.exists(dest):
-        os.makedirs(dest)
-    if (path.startswith('http')):
-        file_name = path.split('/')[-1]
-        target = os.path.join(dest,file_name)
-        urllib.urlretrieve(path,target)
-    return name,target
-
-
-def get_parameter(file_name=None):
-    '''
-    get the paticular file name or get the last parameter file
-    '''
-    if not os.path.exists(parameter_path):
-        os.makedirs(parameter_path)
-        return
-
-    if file_name:
-	return os.path.join(parameter_path,file_name)
-
-    parameter_list = [ os.path.join(parameter_path,f) for f in os.listdir(parameter_path)]
-    if len(parameter_list)==0:
-        return
-    parameter_list.sort()
-
-    return parameter_list[-1]
-
-@app.route("/")
-def index():
-    return "Hello SINGA User!"
-
-@app.route('/predict', methods=['POST'])
-def predict():
-    if request.method == 'POST':
-        try:
-            response=service.serve(request)
-        except Exception as e:
-            return e
-        return response
-    return "error, should be post request"
diff --git a/python/singa/converter.py b/python/singa/converter.py
deleted file mode 100644
index cc42ef0..0000000
--- a/python/singa/converter.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-
-from google.protobuf import text_format
-from singa import layer
-from singa import metric
-from singa import loss
-from singa import net as ffnet
-from .proto import model_pb2
-from .proto import caffe_pb2
-import numpy as np
-
-
-class CaffeConverter:
-
-    def __init__(self, net_proto, solver_proto = None, input_sample_shape =
-            None, param_path = None):
-        self.caffe_net_path = net_proto
-        self.caffe_solver_path = solver_proto
-        self.input_sample_shape = input_sample_shape
-        self.param_path = param_path
-
-    def read_net_proto(self):
-        net_config = caffe_pb2.NetParameter()
-        return self.read_proto(self.caffe_net_path, net_config)
-
-    def read_solver_proto(self):
-        solver_config = caffe_pb2.SolverParameter()
-        return self.read_proto(self.caffe_solver_path, solver_config)
-
-    def read_caffemodel(self):
-        f = open(self.param_path, 'rb')
-        contents = f.read()
-        net_param = caffe_pb2.NetParameter();
-        net_param.ParseFromString(contents)
-        return net_param
-
-    def read_proto(self, filepath, parser_object):
-        file = open(filepath, "r")
-        if not file:
-            raise self.ProcessException("ERROR (" + filepath + ")!")
-        # Merges an ASCII representation of a protocol message into a message.
-        text_format.Merge(str(file.read()), parser_object)
-        file.close()
-        return parser_object
-
-    def convert_engine(self, layer_conf, solver_mode):
-        '''
-        Convert caffe engine into singa engine
-        return:
-            a singa engine string
-        '''
-        caffe_engine = ''
-        singa_engine = ''
-
-        # if no 'engine' field in caffe proto, set engine to -1
-        if layer_conf.type == 'Convolution' or layer_conf.type == 4:
-            caffe_engine = layer_conf.convolution_param.engine
-        elif layer_conf.type == 'Pooling' or layer_conf.type == 17:
-            caffe_engine = layer_conf.pooling_param.engine
-        elif layer_conf.type == 'ReLU' or layer_conf.type == 18:
-            caffe_engine = layer_conf.relu_param.engine
-        elif layer_conf.type == 'Sigmoid' or layer_conf.type == 19:
-            caffe_engine = layer_conf.sigmoid_param.engine
-        elif layer_conf.type == 'TanH' or layer_conf.type == 23:
-            caffe_engine = layer_conf.tanh_param.engine
-        elif layer_conf.type == 'LRN' or layer_conf.type == 15:
-            caffe_engine = layer_conf.lrn_param.engine
-        elif layer_conf.type == 'Softmax' or layer_conf.type == 20:
-            caffe_engine = layer_conf.softmax_param.engine
-        elif layer_conf.type == 'InnerProduct' or layer_conf.type == 14:
-            caffe_engine = -1
-        elif layer_conf.type == 'Dropout' or layer_conf.type == 6:
-            caffe_engine = -1
-        elif layer_conf.type == 'Flatten' or layer_conf.type == 8:
-            caffe_engine = -1
-        else:
-            raise Exception('Unknown layer type: ' + layer_conf.type)
-
-        # caffe_engine: -1-no field;  0-DEFAULT; 1-CAFFE; 2-CUDNN
-        # solver_mode: 0-CPU; 1-GPU
-        if solver_mode == 1:
-            singa_engine = 'cudnn'
-        else:
-            if caffe_engine == 2:
-                raise Exception('engine and solver mode mismatch!')
-            else:
-                singa_engine = 'singacpp'
-
-        if ((layer_conf.type == 'InnerProduct' or layer_conf.type == 14) or \
-            (layer_conf.type == 'Flatten' or layer_conf.type == 8)) and \
-            singa_engine == 'cudnn':
-            singa_engine = 'singacuda'
-
-        return singa_engine
-
-    def create_net(self):
-        '''
-        Create singa net based on caffe proto files.
-            net_proto: caffe prototxt that describes net
-            solver_proto: caffe prototxt that describe solver
-            input_sample_shape: shape of input data tensor
-        return:
-            a FeedForwardNet object
-        '''
-        caffe_net = self.read_net_proto()
-        caffe_solver = None
-        if self.caffe_solver_path is not None:
-            caffe_solver = self.read_solver_proto()
-        layer_confs = ''
-        flatten_id = 0
-
-        # If the net proto has the input shape
-        if len(caffe_net.input_dim) > 0:
-            self.input_sample_shape = caffe_net.input_dim
-        if len(caffe_net.layer):
-            layer_confs = caffe_net.layer
-        elif len(caffe_net.layers):
-            layer_confs = caffe_net.layers
-        else:
-            raise Exception('Invalid proto file!')
-
-        net = ffnet.FeedForwardNet()
-        for i in range(len(layer_confs)):
-            if layer_confs[i].type == 'Data' or layer_confs[i].type == 5:
-                continue
-            elif layer_confs[i].type == 'Input':
-                self.input_sample_shape = layer_confs[i].input_param.shape[0].dim[1:]
-            elif layer_confs[i].type == 'SoftmaxWithLoss' or layer_confs[i].type == 21:
-                net.loss = loss.SoftmaxCrossEntropy()
-            elif layer_confs[i].type == 'EuclideanLoss' or layer_confs[i].type == 7:
-                net.loss = loss.SquareError()
-            elif layer_confs[i].type == 'Accuracy' or layer_confs[i].type == 1:
-                net.metric = metric.Accuracy()
-            else:
-                strConf = layer_confs[i].SerializeToString()
-                conf = model_pb2.LayerConf()
-                conf.ParseFromString(strConf)
-                if caffe_solver:
-                    layer.engine = self.convert_engine(
-                        layer_confs[i], caffe_solver.solver_mode)
-                else:
-                    # if caffe_solver is None,
-                    layer.engine = self.convert_engine(layer_confs[i], 0)
-                lyr = layer.Layer(conf.name, conf)
-                if len(net.layers) == 0:
-                    print 'input sample shape: ', self.input_sample_shape
-                    lyr.setup(self.input_sample_shape)
-                    print lyr.name, lyr.get_output_sample_shape()
-                if layer_confs[i].type == 'InnerProduct' or layer_confs[i].type == 14:
-                    net.add(layer.Flatten('flat' + str(flatten_id)))
-                    flatten_id += 1
-                net.add(lyr)
-
-        return net
-
-    def convert_params(self, net):
-        '''
-        Convert params in .caffemodel into singa model.
-        This method only supports current version of Caffe(24-Nov-2016).
-        '''
-
-        params = net.param_values()
-        caffe_model = self.read_caffemodel()
-        layers = None
-        if len(caffe_model.layer):
-            layers = caffe_model.layer
-        else:
-            raise Exception('Invalid proto file!')
-
-        i = 0
-        first_conv = True
-        for layer in layers:
-            if layer.type == 'Convolution' or layer.type == 'InnerProduct':
-                assert(len(layer.blobs) == 2), 'Either 2 params per layer or 0'
-                wmat_dim = []
-                if getattr(layer.blobs[0].shape, 'dim', None) is not None:
-                    if len(layer.blobs[0].shape.dim) > 0:
-                        wmat_dim = layer.blobs[0].shape.dim
-                    else:
-                        wmat_dim = [layer.blobs[0].num, \
-                                layer.blobs[0].channels, \
-                                layer.blobs[0].height, \
-                                layer.blobs[0].width]
-                else:
-                    wmat_dim = list(layer.blobs[0].shape)
-
-                wmat = np.array(layer.blobs[0].data, dtype=np.float32)
-                bias = np.array(layer.blobs[1].data, dtype=np.float32)
-                #print layer.name, ' wmat_dim: ', wmat_dim
-
-                wdim = []
-                if layer.type == 'InnerProduct':
-                    wdim = wmat_dim[-2:]
-                else:
-                    if wmat_dim[1] == 3 and first_conv:  # BGR -> RGB
-                        wmat = wmat.reshape(wmat_dim)
-                        wmat[:, [0, 1, 2], :, :] = wmat[:, [2, 1, 0], :, :]
-                        first_conv = False
-                    nb_filters = wmat_dim[0]
-                    chw = 1
-                    for k in range(1, len(wmat_dim)):
-                        chw *= wmat_dim[k]
-                    wdim.extend([nb_filters, chw])
-                #print layer.name, ' wdim: ', wdim
-                w = np.reshape(wmat, wdim)
-
-                # TODO(wangwei) transpose SINGA's weight following caffe
-                if layer.type == 'InnerProduct':
-                    w = np.transpose(w)
-                params[i].copy_from_numpy(w)
-                i += 1
-                params[i].copy_from_numpy(bias)
-                i += 1
-                print 'converting layer {0}, wmat shape = {1}, bias shape = {2}'.format(layer.name, w.shape, bias.shape)
diff --git a/python/singa/data.py b/python/singa/data.py
index 492d218..8fb2ecd 100644
--- a/python/singa/data.py
+++ b/python/singa/data.py
@@ -23,6 +23,7 @@
 
     import image_tool
     from PIL import Image
+    from singa.data import ImageBatchIter
 
     tool = image_tool.ImageTool()
 
@@ -48,7 +49,11 @@
         img.save('img%d.png' % idx)
     data.end()
 '''
+from __future__ import print_function
+from __future__ import absolute_import
 
+from builtins import range
+from builtins import object
 import os
 import random
 import time
@@ -56,7 +61,7 @@
 import numpy as np
 
 
-class ImageBatchIter:
+class ImageBatchIter(object):
     '''Utility for iterating over an image dataset to get mini-batches.
 
     Args:
@@ -78,8 +83,14 @@
         capacity(int): the max num of mini-batches in the internal queue.
     '''
 
-    def __init__(self, img_list_file, batch_size, image_transform,
-                 shuffle=True, delimiter=' ', image_folder=None, capacity=10):
+    def __init__(self,
+                 img_list_file,
+                 batch_size,
+                 image_transform,
+                 shuffle=True,
+                 delimiter=' ',
+                 image_folder=None,
+                 capacity=10):
         self.img_list_file = img_list_file
         self.queue = Queue(capacity)
         self.batch_size = batch_size
@@ -97,16 +108,13 @@
         self.p.start()
         return
 
-    def next(self):
+    def __next__(self):
         assert self.p is not None, 'call start before next'
         while self.queue.empty():
             time.sleep(0.1)
         x, y = self.queue.get()  # dequeue one mini-batch
         return x, y
 
-    def stop(self):
-        self.end();
-
     def end(self):
         if self.p is not None:
             self.stop = True
@@ -136,7 +144,7 @@
                 while i < self.batch_size:
                     img_path, img_meta = img_list[index]
                     aug_images = self.image_transform(
-                            os.path.join(self.image_folder, img_path))
+                        os.path.join(self.image_folder, img_path))
                     assert i + len(aug_images) <= self.batch_size, \
                         'too many images (%d) in a batch (%d)' % \
                         (i + len(aug_images), self.batch_size)
@@ -155,7 +163,8 @@
                             random.shuffle(img_list)
                 # enqueue one mini-batch
                 if is_label_index:
-                    self.queue.put((np.asarray(x), np.asarray(y, dtype=np.int32)))
+                    self.queue.put((np.asarray(x), np.asarray(y,
+                                                              dtype=np.int32)))
                 else:
                     self.queue.put((np.asarray(x), y))
             else:
@@ -164,23 +173,25 @@
 
 
 if __name__ == '__main__':
-    import image_tool
+    from . import image_tool
     from PIL import Image
     tool = image_tool.ImageTool()
 
     def image_transform(img_path):
         global tool
-        return tool.load(img_path).resize_by_range(
-            (112, 128)).random_crop(
+        return tool.load(img_path).resize_by_range((112, 128)).random_crop(
             (96, 96)).flip().get()
 
-    data = ImageBatchIter('train.txt', 3,
-                          image_transform, shuffle=False, delimiter=',',
+    data = ImageBatchIter('train.txt',
+                          3,
+                          image_transform,
+                          shuffle=False,
+                          delimiter=',',
                           image_folder='images/',
                           capacity=10)
     data.start()
-    imgs, labels = data.next()
-    print labels
+    imgs, labels = next(data)
+    print(labels)
     for idx in range(imgs.shape[0]):
         img = Image.fromarray(imgs[idx].astype(np.uint8).transpose(1, 2, 0),
                               'RGB')
diff --git a/python/singa/device.py b/python/singa/device.py
index fdd2a92..cfc3eb8 100644
--- a/python/singa/device.py
+++ b/python/singa/device.py
@@ -22,35 +22,10 @@
 TODO(wangwei) implement py CudaGPU class.
 '''
 
+# from builtins import object
 from . import singa_wrap as singa
 
 
-class Device(object):
-    """ Class and member functions for singa::Device.
-
-    Create Device instances using the CreateXXXDevice.
-    """
-
-    def __init__(self, id, device):
-        """Device constructor given device ID.
-
-        Args:
-            id (int): device ID.
-            device: swig shared_ptr<Device>
-        """
-        self.id = id
-        self.singa_device = device
-
-    def set_rand_seed(self, seed):
-        self.singa_device.SetRandSeed(seed)
-
-    def get_host(self):
-        return self.singa_device.host()
-
-    def get_id(self):
-        return self.singa_device.id()
-
-
 def get_num_gpus():
     assert singa.USE_CUDA, 'SINGA has not been compiled with CUDA enabled.'
     return singa.Platform.GetNumGPUs()
@@ -81,6 +56,15 @@
     return singa.Platform.DeviceQuery(id, verbose)
 
 
+def create_cpu_device():
+    '''Create the default CPU device.
+
+    Returns:
+        a swig converted CPU device.
+    '''
+    return singa.Platform.GetDefaultDevice()
+
+
 def create_cuda_gpus(num):
     '''Create a list of CudaGPU devices.
 
@@ -100,7 +84,7 @@
         a swig converted CudaGPU device.
     '''
     assert singa.USE_CUDA, 'SINGA has not been compiled with CUDA enabled.'
-    return singa.Platform.CreateCudaGPUs(1)[0]
+    return create_cuda_gpu_on(0)
 
 
 def create_cuda_gpus_on(device_ids):
@@ -146,3 +130,7 @@
 def get_default_device():
     '''Get the default host device which is a CppCPU device'''
     return default_device
+
+
+def enable_lazy_alloc(enable):
+    singa.Device.EnableLazyAlloc(enable)
diff --git a/python/singa/image_tool.py b/python/singa/image_tool.py
index cc38307..1011db8 100644
--- a/python/singa/image_tool.py
+++ b/python/singa/image_tool.py
@@ -28,7 +28,10 @@
         img.save('%d.png' % idx)
 
 '''
+from __future__ import division
 
+from builtins import range
+from builtins import object
 import random
 import numpy as np
 from PIL import Image, ImageEnhance
@@ -54,11 +57,11 @@
         and center.
     '''
     if img.size[0] < patch[0]:
-        raise Exception(
-            'img size[0] %d is smaller than patch[0]: %d' % (img[0], patch[0]))
+        raise Exception('img size[0] %d is smaller than patch[0]: %d' %
+                        (img.size[0], patch[0]))
     if img.size[1] < patch[1]:
-        raise Exception(
-            'img size[1] %d is smaller than patch[1]: %d' % (img[1], patch[1]))
+        raise Exception('img size[1] %d is smaller than patch[1]: %d' %
+                        (img.size[1], patch[1]))
 
     if position == 'left_top':
         left, upper = 0, 0
@@ -69,8 +72,8 @@
     elif position == 'right_bottom':
         left, upper = img.size[0] - patch[0], img.size[1] - patch[1]
     elif position == 'center':
-        left, upper = (img.size[0] - patch[0]) / \
-            2, (img.size[1] - patch[1]) / 2
+        left, upper = (img.size[0] - patch[0]) // 2, (img.size[1] -
+                                                      patch[1]) // 2
     else:
         raise Exception('position is wrong')
 
@@ -93,8 +96,8 @@
         left, upper = 0, 0
         right, bottom = size[1], size[1]
     elif position == 'center':
-        left, upper = (size[0] - size[1]) / 2, 0
-        right, bottom = (size[0] + size[1]) / 2, size[1]
+        left, upper = (size[0] - size[1]) // 2, 0
+        right, bottom = (size[0] + size[1]) // 2, size[1]
     elif position == 'right':
         left, upper = size[0] - size[1], 0
         right, bottom = size[0], size[1]
@@ -102,8 +105,8 @@
         left, upper = 0, 0
         right, bottom = size[0], size[0]
     elif position == 'middle':
-        left, upper = 0, (size[1] - size[0]) / 2
-        right, bottom = size[0], (size[1] + size[0]) / 2
+        left, upper = 0, (size[1] - size[0]) // 2
+        right, bottom = size[0], (size[1] + size[0]) // 2
     elif position == 'bottom':
         left, upper = 0, size[1] - size[0]
         right, bottom = size[0], size[1]
@@ -112,7 +115,7 @@
     box = (left, upper, right, bottom)
     new_img = img.crop(box)
 
-    new_img = img.resize(patch)
+    new_img = new_img.resize(patch, Image.BILINEAR)
     # print box+crop
     # print "crop to box %d,%d,%d,%d and scale to %d,%d" % (box+crop)
     return new_img
@@ -125,7 +128,7 @@
         new_size = (small_size, int(small_size * size[1] / size[0]))
     else:
         new_size = (int(small_size * size[0] / size[1]), small_size)
-    new_img = img.resize(new_size)
+    new_img = img.resize(new_size, Image.BILINEAR)
     # print 'resize to (%d,%d)' % new_size
     return new_img
 
@@ -205,10 +208,10 @@
 
 
 def get_list_sample(l, sample_size):
-    return [l[i] for i in sorted(random.sample(xrange(len(l)), sample_size))]
+    return [l[i] for i in sorted(random.sample(range(len(l)), sample_size))]
 
 
-class ImageTool():
+class ImageTool(object):
     '''A tool for image augmentation.
 
     For operations with inplace=True, the returned value is the ImageTool
@@ -267,7 +270,7 @@
             rng: a tuple (begin,end), include begin, exclude end
             inplace: inplace imgs or not ( return new_imgs)
         '''
-        size_list = range(rng[0], rng[1])
+        size_list = list(range(rng[0], rng[1]))
         return self.resize_by_list(size_list, 1, inplace)
 
     def resize_by_list(self, size_list, num_case=1, inplace=True):
@@ -341,7 +344,7 @@
             rng: a tuple (begin,end) in degree, include begin, exclude end
             inplace: inplace imgs or not ( return new_imgs)
         '''
-        angle_list = range(rng[0], rng[1])
+        angle_list = list(range(rng[0], rng[1]))
         return self.rotate_by_list(angle_list, 1, inplace)
 
     def rotate_by_list(self, angle_list, num_case=1, inplace=True):
@@ -382,16 +385,13 @@
         '''
         new_imgs = []
         positions = [
-            "left_top",
-            "left_bottom",
-            "right_top",
-            "right_bottom",
-            "center"]
+            "left_top", "left_bottom", "right_top", "right_bottom", "center"
+        ]
         if num_case > 5 or num_case < 1:
             raise Exception('num_case must be in [1,5]')
         for img in self.imgs:
 
-            if num_case > 0 and num_case < 5:
+            if num_case < 5:
                 positions = get_list_sample(positions, num_case)
 
             for position in positions:
@@ -426,12 +426,12 @@
         for img in self.imgs:
             size = img.size
             if size[0] > size[1]:
-                if num_case > 0 and num_case < 3:
+                if num_case < 3:
                     positions = get_list_sample(positions_horizental, num_case)
                 else:
                     positions = positions_horizental
             else:
-                if num_case > 0 and num_case < 3:
+                if num_case < 3:
                     positions = get_list_sample(positions_vertical, num_case)
                 else:
                     positions = positions_vertical
@@ -454,12 +454,11 @@
         patch5 = 5
         patch3 = 3
         if num_case < 1 or num_case > patch5 + patch3:
-            raise Exception(
-                'num_case must be in [0,%d]' % (patch5 + patch3))
+            raise Exception('num_case must be in [0,%d]' % (patch5 + patch3))
         if num_case == patch5 + patch3:
             count = patch5
         else:
-            sample_list = range(0, patch5 + patch3)
+            sample_list = list(range(0, patch5 + patch3))
             samples = get_list_sample(sample_list, num_case)
             count = 0
             for s in samples:
@@ -492,8 +491,8 @@
                 (img.size[0], img.size[1], patch[0], patch[1])
             left_offset = random.randint(0, img.size[0] - patch[0])
             top_offset = random.randint(0, img.size[1] - patch[1])
-            box = (left_offset, top_offset,
-                   left_offset + patch[0], top_offset + patch[1])
+            box = (left_offset, top_offset, left_offset + patch[0],
+                   top_offset + patch[1])
             new_imgs.append(img.crop(box))
 
         if inplace:
@@ -502,10 +501,11 @@
         else:
             return new_imgs
 
-    def random_crop_resize(self,patch,inplace=True):
-        ''' Crop of the image at a random size between 0.08 to 1 of input image size
-            and random aspect ratio between 3/4 to 4/3 of input image aspect ratio is made.
+    def random_crop_resize(self, patch, inplace=True):
+        ''' Crop of the image at a random size between 0.08 to 1 of input image
+            and random aspect ratio between 3/4 to 4/3.
             This crop is then resized to the given patch size.
+
         Args:
             patch(tuple): width and height of the patch
             inplace(Boolean): replace the internal images list with the patches
@@ -513,20 +513,23 @@
         '''
         new_imgs = []
         for img in self.imgs:
-            area=img.size[0]*img.size[1]
-            target_area = random.uniform(0.08, 1.0) * area
-            aspect_ratio = random.uniform(3. / 4, 4. / 3)
-            crop_x = int(round(math.sqrt(target_area * aspect_ratio)))
-            crop_y = int(round(math.sqrt(target_area / aspect_ratio)))
-            assert img.size[0] >= patch[0] and img.size[1] >= patch[1],\
-                'img size (%d, %d), patch size (%d, %d)' % \
-                (img.size[0], img.size[1], patch[0], patch[1])
-            left_offset = random.randint(0, img.size[0] - crop_x)
-            top_offset = random.randint(0, img.size[1] - crop_y)
-            box = (left_offset, top_offset,
-                   left_offset + crop_x, top_offset + crop_y)
-            img_croped=img.crop(box)
-            img_resized=img_croped.resize(patch)
+            area = img.size[0] * img.size[1]
+            img_resized = None
+            for attempt in range(10):
+                target_area = random.uniform(0.08, 1.0) * area
+                aspect_ratio = random.uniform(3. / 4, 4. / 3)
+                crop_x = int(round(math.sqrt(target_area * aspect_ratio)))
+                crop_y = int(round(math.sqrt(target_area / aspect_ratio)))
+                if img.size[0] > crop_x and img.size[1] > crop_y:
+                    left_offset = random.randint(0, img.size[0] - crop_x)
+                    top_offset = random.randint(0, img.size[1] - crop_y)
+                    box = (left_offset, top_offset, left_offset + crop_x,
+                           top_offset + crop_y)
+                    img_croped = img.crop(box)
+                    img_resized = img_croped.resize(patch, Image.BILINEAR)
+                    break
+            if img_resized is None:
+                img_resized = img.resize(patch, Image.BILINEAR)
             new_imgs.append(img_resized)
 
         if inplace:
diff --git a/python/singa/initializer.py b/python/singa/initializer.py
index fb99663..c907736 100644
--- a/python/singa/initializer.py
+++ b/python/singa/initializer.py
@@ -17,44 +17,170 @@
 # =============================================================================
 '''Popular initialization methods for parameter values (Tensor objects).
 
+credit: this module is adapted from keras
+https://github.com/keras-team/keras/blob/master/keras/initializers.py
+
+All functions in this module change the input tensor in-place.
+
 Example usages::
 
     from singa import tensor
     from singa import initializer
 
     x = tensor.Tensor((3, 5))
-    initializer.uniform(x, 3, 5) # use both fan_in and fan_out
-    initializer.uniform(x, 3, 0)  # use only fan_in
+    initializer.he_uniform(x)
+    initializer.golorot_norm(x) 
 '''
 
+from __future__ import division
 import math
+import numpy as np
+from deprecated import deprecated
 
 
-def uniform(t, fan_in=0, fan_out=0):
+def eye(t):
+    """Initialize the tensor with ones on the diagonal and zeros elsewhere.
+
+    Note: it is implemented by calling numpy. 
+    Do not call it within forward propagation when computation graph is enabled.
+
+    # Arguments
+        t(Tensor): the matrix to be filled in.
+    """
+    if len(t.shape) == 2:
+        raise ValueError("Only tensors with 2 dimensions are supported")
+    a = np.eye(t.shape[0], t.shape[1], dtype=np.float32)
+    t.copy_from(a)
+
+
+def orthogonal(t, gain=1.0):
+    """Initializer that generates a random orthogonal matrix.
+
+    Note: it is implemented by calling numpy. 
+    Do not call it within forward propagation when computation graph is enabled.
+
+    # Arguments
+        t(Tensor): the matrix to be filled in.
+        gain: Multiplicative factor to apply to the orthogonal matrix.
+
+    # References
+        - [Exact solutions to the nonlinear dynamics of learning in deep
+           linear neural networks](http://arxiv.org/abs/1312.6120)
+    """
+    if len(t.shape) == 2:
+        raise ValueError("Only tensors with 2 dimensions are supported")
+
+    a = np.random.normal(0.0, 1.0, t.shape).astype(np.float32)
+    u, _, v = np.linalg.svd(a, full_matrices=False)
+    # Pick the one with the correct shape.
+    q = u if u.shape == t.shape else v
+    q *= gain
+    t.copy_from(q)
+
+
+def lecun_uniform(t):
+    """LeCun uniform initializer.
+
+    It draws samples from a uniform distribution within [-limit, limit]
+    where `limit` is `sqrt(3 / fan_in)`
+    where `fan_in` is the number of input units in the weight tensor.
+
+    # Arguments
+        t(Tensor):the tensor to be filled in.
+
+    # References
+        - [Efficient BackProp](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)
+    """
+    _random_fill(t, scale=1., mode='fan_in', distribution='uniform')
+
+
+def glorot_normal(t):
+    """Glorot normal initializer, also called Xavier normal initializer.
+
+    It draws samples from a normal distribution centered on 0
+    with `stddev = sqrt(2 / (fan_in + fan_out))`
+    where `fan_in` is the number of input units in the weight tensor
+    and `fan_out` is the number of output units in the weight tensor.
+
+    # Arguments
+        t(Tensor):the tensor to be filled in.
+
+    # References
+        - [Understanding the difficulty of training deep feedforward neural
+           networks](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf)
+    """
+    _random_fill(t, scale=1., mode='fan_avg', distribution='normal')
+
+
+def glorot_uniform(t):
+    """Glorot uniform initializer, also called Xavier uniform initializer.
+
+    It draws samples from a uniform distribution within [-limit, limit]
+    where `limit` is `sqrt(6 / (fan_in + fan_out))`
+    where `fan_in` is the number of input units in the weight tensor
+    and `fan_out` is the number of output units in the weight tensor.
+
+    # Arguments
+        t(Tensor):the tensor to be filled in.
+    # References
+        - [Understanding the difficulty of training deep feedforward neural
+           networks](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf)
+    """
+    _random_fill(t, scale=1., mode='fan_avg', distribution='uniform')
+
+
+def he_normal(t):
+    """He normal initializer.
+
+    It draws samples from a truncated normal distribution centered on 0
+    with `stddev = sqrt(2 / fan_in)`
+    where `fan_in` is the number of input units in the weight tensor.
+
+    # Arguments
+        t(Tensor):the tensor to be filled in.
+
+    # References
+        - [Delving Deep into Rectifiers: Surpassing Human-Level Performance on
+           ImageNet Classification](http://arxiv.org/abs/1502.01852)
+    """
+    _random_fill(t, scale=2., mode='fan_in', distribution='normal')
+
+def lecun_normal(t):
+    """LeCun normal initializer.
+
+    It draws samples from a truncated normal distribution centered on 0
+    with `stddev = sqrt(1 / fan_in)`
+    where `fan_in` is the number of input units in the weight tensor.
+
+    # Arguments
+        t(Tensor):the tensor to be filled in.
+
+    # References
+        - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
+        - [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)
+    """
+    _random_fill(t, scale=1., mode='fan_in', distribution='normal')
+
+
+def he_uniform(t):
     '''Initialize the values of the input tensor following a uniform
     distribution with specific bounds.
 
-    Args:
-        fan_in(int): for the weight Tensor of a convolution layer,
-            fan_in = nb_channel * kh * kw; for dense layer,
-            fan_in = input_feature_length
-        fan_out(int): for the convolution layer weight Tensor,
-            fan_out = nb_filter * kh * kw; for the weight Tensor of a dense
-            layer, fan_out = output_feature_length
+    It draws samples from a uniform distribution within [-limit, limit]
+    where `limit` is `sqrt(6 / fan_in)`
+    where `fan_in` is the number of input units in the weight tensor.
 
-    Ref: [Bengio and Glorot 2010]: Understanding the difficulty of
-    training deep feedforward neuralnetworks.
+    # Arguments
+        t(Tensor): the tensor to be filled in.
 
+    # References
+        - [Delving Deep into Rectifiers: Surpassing Human-Level Performance on
+           ImageNet Classification](http://arxiv.org/abs/1502.01852)
     '''
-    assert fan_in > 0 or fan_out > 0, \
-        'fan_in and fan_out cannot be 0 at the same time'
-    avg = 2
-    if fan_in * fan_out == 0:
-        avg = 1
-    x = math.sqrt(3.0 * avg / (fan_in + fan_out))
-    t.uniform(-x, x)
+    _random_fill(t, scale=2., mode='fan_in', distribution='uniform')
 
 
+@deprecated(reason="Use he_normal or glorot_normal")
 def gaussian(t, fan_in=0, fan_out=0):
     '''Initialize the values of the input tensor following a Gaussian
     distribution with specific std.
@@ -79,12 +205,11 @@
     t.gaussian(0, std)
 
 
+@deprecated(reason="Use glorot_normal")
 def xavier(t):
     '''Initialize the matrix parameter follow a Uniform distribution from
     [-sqrt(6/(fan_in + fan_out)), sqrt(6/(fan_in + fan_out))].
 
-    Deprecated. Please use uniform()
-
     Args:
         t (Tensor): the parater tensor
     '''
@@ -93,12 +218,11 @@
     t.uniform(-scale, scale)
 
 
+@deprecated(reason="Use glorot_uniform")
 def glorot(t):
     '''Initialize the matrix parameter follow a Gaussian distribution with
     mean = 0 and std = sqrt(2.0 / (nb_row + nb_col))
 
-    Deprecated. Please use gaussian()
-
     Args:
         t (Tensor): the parater tensor
     '''
@@ -107,12 +231,11 @@
     t *= scale
 
 
+@deprecated(reason="Use he_normal")
 def msra(t):
     '''Initialize the matrix parameter follow a Guassian distribution with
     mean = 0, std = math.sqrt(2.0 / nb_row).
 
-    Deprecated. Please use gaussian()
-
     Ref [He, Zhang, Ren and Sun 2015]: Specifically accounts for ReLU
     nonlinearities.
 
@@ -120,3 +243,94 @@
         t (Tensor): the parater tensor
     '''
     t.gaussian(0, math.sqrt(2.0 / t.shape[0]))
+
+
+def _compute_fans(shape, data_format='channels_first'):
+    """Computes the number of input and output units for a weight shape.
+    # Arguments
+        shape: Integer shape tuple.
+        data_format: Image data format to use for convolution kernels.
+            Note that all kernels in Keras are standardized on the
+            `channels_last` ordering (even when inputs are set
+            to `channels_first`).
+    # Returns
+        A tuple of scalars, `(fan_in, fan_out)`.
+    # Raises
+        ValueError: in case of invalid `data_format` argument.
+    """
+    if len(shape) == 2:
+        fan_in = shape[0]
+        fan_out = shape[1]
+    elif len(shape) in {3, 4, 5}:
+        # Assuming convolution kernels (1D, 2D or 3D).
+        # TH kernel shape: (depth, input_depth, ...)
+        # TF kernel shape: (..., input_depth, depth)
+        if data_format == 'channels_first':
+            receptive_field_size = np.prod(shape[2:])
+            fan_in = shape[1] * receptive_field_size
+            fan_out = shape[0] * receptive_field_size
+        elif data_format == 'channels_last':
+            receptive_field_size = np.prod(shape[:-2])
+            fan_in = shape[-2] * receptive_field_size
+            fan_out = shape[-1] * receptive_field_size
+        else:
+            raise ValueError('Invalid data_format: ' + data_format)
+    else:
+        # No specific assumptions.
+        fan_in = np.sqrt(np.prod(shape))
+        fan_out = np.sqrt(np.prod(shape))
+    return fan_in, fan_out
+
+
+def _random_fill(t, scale, mode, distribution):
+    """Fill the tensor with values sampled from a distribution.
+
+    With `distribution="normal"`, samples are drawn from a normal
+    distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:
+        - number of input units in the weight tensor, if mode = "fan_in"
+        - number of output units, if mode = "fan_out"
+        - average of the numbers of input and output units, if mode = "fan_avg"
+
+    With `distribution="uniform"`,
+    samples are drawn from a uniform distribution
+    within [-limit, limit], with `limit = sqrt(3 * scale / n)`.
+
+
+    Args:
+        t (Tensor): Tensor to be filled
+        scale (float): scale factor  
+        mode (str): "fan_in" or "fan_out" or "fan_avg" 
+        distribution (str): "normal" or "uniform" 
+
+    Raises:
+        ValueError: In case of an invalid value for scale, mode or distribution 
+    """
+    if scale <= 0.:
+        raise ValueError('`scale` must be a positive float. Got:', scale)
+    mode = mode.lower()
+    if mode not in {'fan_in', 'fan_out', 'fan_avg'}:
+        raise ValueError(
+            'Invalid `mode` argument: '
+            'expected on of {"fan_in", "fan_out", "fan_avg"} '
+            'but got', mode)
+    distribution = distribution.lower()
+    if distribution not in {'normal', 'uniform'}:
+        raise ValueError(
+            'Invalid `distribution` argument: '
+            'expected one of {"normal", "uniform"} '
+            'but got', distribution)
+
+    fan_in, fan_out = _compute_fans(t.shape)
+    if mode == 'fan_in':
+        scale /= max(1., fan_in)
+    elif mode == 'fan_out':
+        scale /= max(1., fan_out)
+    else:
+        scale /= max(1., float(fan_in + fan_out) / 2)
+    if distribution == 'normal':
+        # 0.879... = scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.)
+        # stddev = np.sqrt(scale) / .87962566103423978
+        t.gaussian(0., np.sqrt(scale))
+    else:
+        limit = np.sqrt(3. * scale)
+        t.uniform(-limit, limit)
\ No newline at end of file
diff --git a/python/singa/layer.py b/python/singa/layer.py
index 4fe9983..e5abea7 100644
--- a/python/singa/layer.py
+++ b/python/singa/layer.py
@@ -8,1286 +8,1617 @@
 #
 #   http://www.apache.org/licenses/LICENSE-2.0
 #
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
 # =============================================================================
-""" Python layers wrap the C++ layers to provide simpler construction APIs.
 
-Example usages::
+import math
+from functools import wraps
+from collections import OrderedDict
 
-    from singa import layer
-    from singa import tensor
-    from singa import device
-
-    layer.engine = 'cudnn'  # to use cudnn layers
-    dev = device.create_cuda_gpu()
-
-    # create a convolution layer
-    conv = layer.Conv2D('conv', 32, 3, 1, pad=1, input_sample_shape=(3, 32, 32))
-
-    # init param values
-    w, b = conv.param_values()
-    w.guassian(0, 0.01)
-    b.set_value(0)
-    conv.to_device(dev)  # move the layer data onto a CudaGPU device
-
-    x = tensor.Tensor((3, 32, 32), dev)
-    x.uniform(-1, 1)
-    y = conv.foward(True, x)
-
-    dy = tensor.Tensor()
-    dy.reset_like(y)
-    dy.set_value(0.1)
-    # dp is a list of tensors for parameter gradients
-    dx, dp = conv.backward(kTrain, dy)
-"""
-
-from sets import Set
-from . import singa_wrap
-from .proto import model_pb2
-import tensor
+from singa import utils
+from .tensor import Tensor
+from . import singa_wrap as singa
 
 
-engine = 'cudnn'
-'''engine is the prefix of layer identifier.
+class LayerMeta(type):
 
-The value could be one of [**'cudnn', 'singacpp', 'singacuda', 'singacl'**], for
-layers implemented using the cudnn library, Cpp, Cuda and OpenCL respectively.
-For example, CudnnConvolution layer is identified by 'cudnn_convolution';
-'singacpp_convolution' is for Convolution layer;
-Some layers' implementation use only Tensor functions, thererfore they are
-transparent to the underlying devices. For threse layers, they would have
-multiple identifiers, e.g., singacpp_dropout, singacuda_dropout and
-singacl_dropout are all for the Dropout layer. In addition, it has an extra
-identifier 'singa', i.e. 'singa_dropout' also stands for the Dropout layer.
+    def init_wrapper(func):
 
-engine is case insensitive. Each python layer would create the correct specific
-layer using the engine attribute.
+        @wraps(func)
+        def wrapper(self, *args, **kwargs):
+            if len(args) == 0:
+                return
+
+            if isinstance(args[0], list):
+                assert len(args) > 0 and isinstance(args[0][0], Tensor), (
+                    'initialize function expects PlaceHolders or Tensors')
+                dev = args[0][0].device
+            else:
+                assert len(args) > 0 and isinstance(args[0], Tensor), (
+                    'initialize function expects PlaceHolders or Tensors')
+                dev = args[0].device
+
+            prev_state = dev.graph_enabled()
+            dev.EnableGraph(False)
+            func(self, *args, **kwargs)
+            self._initialized = True
+            dev.EnableGraph(prev_state)
+
+        return wrapper
+
+    def forward_wrapper(func):
+
+        @wraps(func)
+        def wrapper(self, *args, **kwargs):
+            if not self._initialized:
+                self.initialize(*args, **kwargs)
+                self._initialized = True
+            return func(self, *args, **kwargs)
+
+        return wrapper
+
+    def __new__(cls, name, bases, attr):
+        if 'initialize' in attr:
+            attr['initialize'] = LayerMeta.init_wrapper(attr['initialize'])
+        if 'forward' in attr:
+            attr['forward'] = LayerMeta.forward_wrapper(attr['forward'])
+
+        return super(LayerMeta, cls).__new__(cls, name, bases, attr)
+
+
+class Layer(object, metaclass=LayerMeta):
+
+    sep = '.'
+
+    def __init__(self):
+        self.name = None
+        self._initialized = False
+        self._parent = None
+        self._layers = dict()
+
+    def initialize(self, *input):
+        """ Initialize the layer
+
+        This function will be called before the forward function if this
+        layer hasn't been initialized. Those members that need to be
+        initialized according to the input will be initialized in this
+        function. e.g. parameters, states and handles.
+
+        Args:
+            *input: input args, should be consistent with the forward function
+        """
+        pass
+
+    def forward(self, *input):
+        """ Forward propagation
+
+        Args:
+            *input: input arguments consisting of only PyTensors
+        Returns:
+            PyTensor instance(s)
+        """
+        raise NotImplementedError
+
+    def __call__(self, *args, **kwargs):
+        return self.forward(*args, **kwargs)
+
+    def get_params(self):
+        """ Get parameters of this layer and all sublayers
+
+        Returns:
+            parameters(dict): A dictionary contains parameter names
+            and values of this layer and all sublayers.
+        """
+        params = dict()
+        sublayers = self._layers
+        for name, sublayer in sublayers.items():
+            if sublayer._initialized:
+                params.update(sublayer.get_params())
+        return params
+
+    def set_params(self, parameters):
+        """ Set parameters for this layer and all sublayers
+
+        Args:
+            parameters(dict): A dictionary contains parameter names
+            and corresponding values. The value shoud be either a
+            PyTensor or numpy ndarray
+        """
+        names = parameters.keys()
+        sublayers = self._layers
+        for name, sublayer in sublayers.items():
+            if sublayer._initialized:
+                if self._has_layer_param(sublayer, names):
+                    sublayer.set_params(parameters)
+
+    def get_states(self):
+        """ Get states of this layer and all sublayers
+
+        Returns:
+            states(dict): A dictionary contains state names and values
+            of this layer and all sublayers.
+        """
+        states = dict()
+        sublayers = self._layers
+        for name, sublayer in sublayers.items():
+            if sublayer._initialized:
+                states.update(sublayer.get_states())
+        states.update(self.get_params())
+        return states
+
+    def set_states(self, states):
+        """ Set states for this layer and all sublayers
+
+        Args:
+            states(dict): A dictionary contains state names and
+            corresponding values. The value shoud be either a
+            PyTensor or numpy ndarray
+        """
+        names = states.keys()
+        sublayers = self._layers
+        for name, sublayer in sublayers.items():
+            if sublayer._initialized:
+                if self._has_layer_param(sublayer, names):
+                    sublayer.set_states(states)
+        self.set_params(states)
+
+    def device_check(self, *inputs):
+        """ Check if the devices of the input tensor are the same.
+
+        Keep the device where each tensors is located the same as the
+        first tensor. Copy data to the device of the first tensor if
+        the device does not match.
+
+        Args:
+            *inputs: input args consisting of only PyTensors
+        """
+        # disabled the graph to prevent buffering data transfer operator
+        x_device = inputs[0].device
+        prev_state = x_device.graph_enabled()
+        x_device.EnableGraph(False)
+        x_dev_id = x_device.id()
+        for var in inputs:
+            if var.device.id() != x_dev_id:
+                var.to_device(x_device)
+        x_device.EnableGraph(prev_state)
+
+    def _has_layer_param(self, layer, names):
+        """ Determine whether names contains parameter names in the layer
+
+        Args:
+            layer(Layer): the layer instance
+            names(list): the list of parameter names
+
+        Returns:
+            boolean: whether names contains parameter names in that layer
+        """
+        for name in names:
+            if name.startswith(layer.name):
+                return True
+        return False
+
+    def _get_name_prefix(self):
+        """ Get the name prefix
+
+        Returns:
+            prefix(str): the layer or param name prefix
+        """
+        if self.name and self._parent:
+            return self.name + Layer.sep
+        else:
+            return ''
+
+    def __getattr__(self, name):
+        if '_layers' in self.__dict__:
+            layers = self.__dict__['_layers']
+            if name in layers:
+                return layers[name]
+        raise AttributeError("'{}' object has no attribute '{}'".format(
+            type(self).__name__, name))
+
+    def __setattr__(self, name, value):
+        if isinstance(value, Layer):
+            # TODO: remove the attr from dict first
+            self.__dict__['_layers'][name] = value
+            value.__dict__['_parent'] = self
+            value.name = self._get_name_prefix() + name
+        else:
+            object.__setattr__(self, name, value)
+            if isinstance(value, Tensor) and value.is_dummy():
+                # WARN: If tensors are initialized in __init__ function
+                #       their names may be incorrect and should be reset
+                value.name = self._get_name_prefix() + name
+            elif name == 'name' and value:
+                # WARN: can't reset the name after the initialization
+                # update sublayer name
+                for name, sublayer in self._layers.items():
+                    sublayer.name = self._get_name_prefix() + name
+
+    def __delattr__(self, name):
+        if name in self._layers:
+            del self._layers[name]
+        else:
+            object.__delattr__(self, name)
+
+    def register_layers(self, *args):
+        """ Register a list of sublayers.
+
+        Can only be called once in each subclass.
+
+        Args:
+            *args: a list of sublayers or a dictionary that contains
+            the name and the instance of each sublayer
+        """
+        if len(args) == 1 and isinstance(args[0], OrderedDict):
+            items = args[0].items()
+        else:
+            items = [(v.__class__.__name__ + '_' + str(idx), v)
+                     for idx, v in enumerate(args)]
+
+        for name, value in items:
+            if isinstance(value, Layer):
+                self._layers[name] = value
+                value.__dict__['_parent'] = self
+                value.name = name
+
+
+class Linear(Layer):
+    """
+    Generate a Linear operator
+    """
+
+    # TODO: replace current with
+    #   def __init__(self, out_features, bias=True):
+    def __init__(self, out_features, *args, bias=True, **kwargs):
+        """
+        Args:
+            out_channels: int, the channel of output, also is the number of
+                filters
+            bias: bool
+        """
+        super(Linear, self).__init__()
+
+        self.out_features = out_features
+
+        # TODO: for backward compatibility, to remove
+        if len(args) > 0:
+            self.in_features = out_features
+            self.out_features = args[0]
+        if len(args) > 1:
+            self.bias = args[1]
+        else:
+            self.bias = bias
+
+    def initialize(self, x):
+        self.in_features = x.shape[1]
+        w_shape = (self.in_features, self.out_features)
+        b_shape = (self.out_features,)
+
+        self.W = Tensor(shape=w_shape, requires_grad=True, stores_grad=True)
+        std = math.sqrt(2.0 / (self.in_features + self.out_features))
+        self.W.gaussian(0.0, std)
+
+        if self.bias:
+            self.b = Tensor(shape=b_shape, requires_grad=True, stores_grad=True)
+            self.b.set_value(0.0)
+        else:
+            self.b = None
+
+    def forward(self, x):
+        if self.b:
+            self.device_check(x, self.W, self.b)
+        else:
+            self.device_check(x, self.W)
+
+        assert x.shape[1] == self.W.shape[0], (
+            "Linear layer expects input features size %d received %d" %
+            (self.W.shape[0], x.shape[1]))
+
+        y = autograd.matmul(x, self.W)
+        if self.bias:
+            y = autograd.add_bias(y, self.b, axis=0)
+        return y
+
+    def get_params(self):
+        if self.bias:
+            return {self.W.name: self.W, self.b.name: self.b}
+        else:
+            return {self.W.name: self.W}
+
+    def set_params(self, parameters):
+        self.W.copy_from(parameters[self.W.name])
+        if self.bias:
+            self.b.copy_from(parameters[self.b.name])
+
+
+class Gemm(Layer):
+    """
+    Generate a Gemm operator
+    Y = alpha * A' * B' + beta * C
+    B is weight, C is bias
+    """
+
+    def __init__(self,
+                 nb_kernels,
+                 alpha=1.0,
+                 beta=1.0,
+                 transA=False,
+                 transB=True,
+                 bias=True,
+                 bias_shape=None):
+        """
+        Args:
+            nb_kernels: int, the channel of output, also is the number of
+                filters
+            alpha (float): Scalar multiplier for the product of input tensors A * B.
+            beta (float): Scalar multiplier for input tensor C.
+            ransA (bool): Whether A should be transposed
+            transB (bool): Whether B should be transposed
+            bias: bool
+        """
+        super(Gemm, self).__init__()
+        self.nb_kernels = nb_kernels
+        self.alpha = alpha
+        self.beta = beta
+        self.transA = 1 if transA else 0
+        self.transB = 1 if transB else 0
+        self.bias = bias
+        self.bias_shape = bias_shape
+
+    def initialize(self, x):
+        if self.transA == 0:
+            self.in_features = x.shape[-1]
+        else:
+            self.in_features = x.shape[0]
+
+        if self.transB == 0:
+            w_shape = (self.in_features, self.nb_kernels)
+        else:
+            w_shape = (self.nb_kernels, self.in_features)
+
+        if self.bias_shape:
+            b_shape = self.bias_shape
+        else:
+            b_shape = (1, self.nb_kernels)
+
+        self.W = Tensor(shape=w_shape,
+                        requires_grad=True,
+                        stores_grad=True,
+                        device=x.device)
+        std = math.sqrt(2.0 / (self.in_features + self.nb_kernels))
+        self.W.gaussian(0.0, std)
+
+        if self.bias:
+            self.b = Tensor(shape=b_shape,
+                            requires_grad=True,
+                            stores_grad=True,
+                            device=x.device)
+            self.b.set_value(0.0)
+        else:
+            self.b = None
+
+    def forward(self, x):
+        if self.b:
+            self.device_check(x, self.W, self.b)
+        else:
+            self.device_check(x, self.W)
+
+        if self.transA == 0:
+            in_features = x.shape[-1]
+        else:
+            in_features = x.shape[0]
+
+        if self.transB == 0:
+            in_features_w = self.W.shape[0]
+        else:
+            in_features_w = self.W.shape[-1]
+
+        assert in_features == in_features_w, (
+            "Gemm layer expects input features size %d received %d" %
+            (in_features_w, in_features))
+        y = autograd.gemm(x, self.W, self.b, self.alpha, self.beta, self.transA,
+                          self.transB)
+
+        return y
+
+    def get_params(self):
+        if self.bias:
+            return {self.W.name: self.W, self.b.name: self.b}
+        else:
+            return {self.W.name: self.W}
+
+    def set_params(self, parameters):
+        self.W.copy_from(parameters[self.W.name])
+        if self.bias:
+            self.b.copy_from(parameters[self.b.name])
+
+
+class Embedding(Layer):
+    """
+    Generate an Embedding operator
+    """
+
+    def __init__(self, input_dim, output_dim, initializer="gaussian"):
+        """init the Embedding operator
+        Args:
+            input_dim (int): the number of different words in the dictionary
+            output_dim (int): the dimendion of a word after the embedding
+            initializer (str, optional): weight initializer, can be [uniform, gaussian]. Defaults to "uniform".
+        """
+        super(Embedding, self).__init__()
+        self.input_dim = input_dim
+        self.output_dim = output_dim
+        self.initializer = initializer
+
+    def initialize(self, x):
+        w_shape = (self.input_dim, self.output_dim)
+        self.W = Tensor(shape=w_shape,
+                        requires_grad=True,
+                        stores_grad=True,
+                        device=x.device)
+        if self.initializer == 'uniform':
+            self.W.uniform(-1., 1.)
+        else:
+            self.W.gaussian(0., 1.)
+
+    def from_pretrained(self, W, freeze=True):
+        self.set_params({self.W.name: W})
+        self.W.requires_grad = not freeze
+
+    def forward(self, x):
+        return autograd.embedding(x, self.W)
+
+    def get_params(self):
+        return {self.W.name: self.W}
+
+    def set_params(self, parameters):
+        self.W.copy_from(parameters[self.W.name])
+
+
+class Conv2d(Layer):
+    """
+    Generate a Conv 2d operator
+    """
+
+    def __init__(self,
+                 nb_kernels,
+                 kernel_size,
+                 *args,
+                 stride=1,
+                 padding=0,
+                 dilation=1,
+                 group=1,
+                 bias=True,
+                 pad_mode="NOTSET",
+                 activation="NOTSET",
+                 **kwargs):
+        """
+        Args:
+            nb_kernels (int): the channel of output, also is the number of filters
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            dilation (int): only support 1
+            group (int): group
+            bias (bool): bias
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+            activation (string): can be NOTSET, RELU, where default value is NOTSET,
+                which means there is no activation behind the conv2d layer.
+                RELU means there is a ReLU behind current conv2d layer.
+        """
+        super(Conv2d, self).__init__()
+
+        # the old code create the layer like: Conv2d(8, 16, 3), or Conv2d(8, 16, 3, stride=1)
+        # the following code block is for backward compatibility
+        if len(args) > 0:
+            nb_kernels = kernel_size
+            kernel_size = args[0]
+        if len(args) > 1:
+            stride = args[1]
+        if len(args) > 2:
+            padding = args[2]
+
+        self.nb_kernels = nb_kernels
+        self.kernel_size = kernel_size
+        self.stride = stride
+        self.padding = padding
+        self.dilation = dilation
+        self.group = group
+        self.bias = bias
+        self.pad_mode = pad_mode
+        self.activation = activation
+
+        if isinstance(kernel_size, int):
+            self.kernel_size = (kernel_size, kernel_size)
+        elif isinstance(kernel_size, tuple):
+            self.kernel_size = kernel_size
+        else:
+            raise TypeError("Wrong kernel_size type.")
+
+        if isinstance(stride, int):
+            self.stride = (stride, stride)
+        elif isinstance(stride, tuple):
+            self.stride = stride
+        else:
+            raise TypeError("Wrong stride type.")
+
+        self.odd_padding = (0, 0, 0, 0)
+        if isinstance(padding, int):
+            self.padding = (padding, padding)
+        elif isinstance(padding, tuple) or isinstance(padding, list):
+            if len(padding) == 2:
+                self.padding = padding
+            elif len(padding) == 4:
+                _h_mask = padding[0] - padding[1]
+                _w_mask = padding[2] - padding[3]
+                # the odd paddding is the value that cannot be handled by the tuple padding (w, h) mode
+                # so we need to firstly handle the input, then use the nomal padding method.
+                self.odd_padding = (max(_h_mask, 0), max(-_h_mask, 0),
+                                    max(_w_mask, 0), max(-_w_mask, 0))
+                self.padding = (
+                    padding[0] - self.odd_padding[0],
+                    padding[2] - self.odd_padding[2],
+                )
+            else:
+                raise TypeError("Wrong padding value.")
+
+        if dilation != 1 and list(dilation) != [1, 1]:
+            raise ValueError("Not implemented yet")
+
+        self.inner_params = {
+            "cudnn_prefer": "fastest",
+            "workspace_MB_limit": 1024,
+        }
+        # TODO valid value of inner_params check
+
+        for kwarg in kwargs:
+            if kwarg not in self.inner_params:
+                raise TypeError("Keyword argument not understood:", kwarg)
+            else:
+                self.inner_params[kwarg] = kwargs[kwarg]
+
+    def initialize(self, x):
+        self.in_channels = x.shape[1]
+        w_shape = (
+            self.nb_kernels,
+            int(self.in_channels / self.group),
+            self.kernel_size[0],
+            self.kernel_size[1],
+        )
+
+        self.W = Tensor(shape=w_shape,
+                        requires_grad=True,
+                        stores_grad=True,
+                        device=x.device)
+        # std = math.sqrt(
+        # 2.0 / (self.in_channels * self.kernel_size[0] * self.kernel_size[1] +
+        # self.nb_kernels))
+        std = math.sqrt(
+            2.0 / (w_shape[1] * self.kernel_size[0] * self.kernel_size[1] +
+                   self.nb_kernels))
+        self.W.gaussian(0.0, std)
+
+        if self.bias:
+            b_shape = (self.nb_kernels,)
+            self.b = Tensor(shape=b_shape,
+                            requires_grad=True,
+                            stores_grad=True,
+                            device=x.device)
+            self.b.set_value(0.0)
+        else:
+            # to keep consistency when to do forward.
+            self.b = None
+            # Tensor(data=CTensor([]), requires_grad=False, stores_grad=False)
+
+        # if same pad mode, re-compute the padding
+        if self.pad_mode in ("SAME_UPPER", "SAME_LOWER"):
+            self.padding, self.odd_padding = utils.get_padding_shape(
+                self.pad_mode, x.shape[2:], self.kernel_size, self.stride)
+            self.padding = [self.padding[0], self.padding[2]]
+
+        _x = x
+        if self.odd_padding != (0, 0, 0, 0):
+            x_shape = list(x.data.shape())
+            x_shape[2] += (self.odd_padding[0] + self.odd_padding[1])
+            x_shape[3] += (self.odd_padding[2] + self.odd_padding[3])
+            _x = Tensor(shape=x_shape, device=x.device)
+            _x.set_value(0.0)
+
+        if _x.device.id() == -1:
+            if self.group != 1:
+                raise ValueError("Not implemented yet")
+            else:
+                if not hasattr(self, "handle"):
+                    self.handle = singa.ConvHandle(
+                        _x.data,
+                        self.kernel_size,
+                        self.stride,
+                        self.padding,
+                        self.in_channels,
+                        self.nb_kernels,
+                        self.bias,
+                        self.group,
+                    )
+        else:
+            if not hasattr(self, "handle"):
+                self.handle = singa.CudnnConvHandle(
+                    _x.data,
+                    self.kernel_size,
+                    self.stride,
+                    self.padding,
+                    self.in_channels,
+                    self.nb_kernels,
+                    self.bias,
+                    self.group,
+                )
+
+    def forward(self, x):
+        # sanitize the device of params/states, TODO: better to decorate forward()
+        self.device_check(x, *[s for k, s in self.get_states().items()])
+
+        assert (self.group >= 1 and self.in_channels % self.group
+                == 0), "please set reasonable group."
+
+        assert (self.nb_kernels >= self.group and self.nb_kernels % self.group
+                == 0), "nb_kernels and group dismatched."
+
+        y = autograd.conv2d(self.handle, x, self.W, self.b, self.odd_padding)
+
+        if self.activation != "NOTSET":
+            if self.activation == "RELU":
+                y = autograd.relu(y)
+
+        return y
+
+    def get_params(self):
+        if self.bias:
+            return {self.W.name: self.W, self.b.name: self.b}
+        else:
+            return {self.W.name: self.W}
+
+    def set_params(self, parameters):
+        self.W.copy_from(parameters[self.W.name])
+        if self.bias:
+            self.b.copy_from(parameters[self.b.name])
+
+
+class SeparableConv2d(Layer):
+    """
+    Generate a Conv 2d operator
+    """
+
+    def __init__(self,
+                 nb_kernels,
+                 kernel_size,
+                 *args,
+                 stride=1,
+                 padding=0,
+                 bias=False):
+        """
+        Args:
+            nb_kernels (int): the channel of output, also is the number of filters
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            bias (bool): bias
+        """
+        super(SeparableConv2d, self).__init__()
+
+        # the following code block is for backward compatibility
+        if len(args) > 0:
+            nb_kernels = kernel_size
+            kernel_size = args[0]
+        if len(args) > 1:
+            stride = args[1]
+        if len(args) > 2:
+            padding = args[2]
+
+        self.nb_kernels = nb_kernels
+        self.kernel_size = kernel_size
+        self.stride = stride
+        self.padding = padding
+        self.bias = bias
+
+    def initialize(self, x):
+        self.in_channels = x.shape[1]
+        self.depthwise_conv = Conv2d(
+            self.in_channels,
+            self.kernel_size,
+            stride=self.stride,
+            padding=self.padding,
+            group=self.in_channels,
+            bias=self.bias,
+        )
+
+        self.point_conv = Conv2d(self.nb_kernels, 1, bias=self.bias)
+
+    def forward(self, x):
+        y = self.depthwise_conv(x)
+        y = self.point_conv(y)
+        return y
+
+
+class BatchNorm2d(Layer):
+    """
+    Generate a BatchNorm 2d operator
+    """
+
+    def __init__(self, *args, momentum=0.9):
+        """
+        Args:
+            momentum (float): Factor used in computing the running mean and
+                variance.
+        """
+        super(BatchNorm2d, self).__init__()
+
+        if len(args) > 0:
+            self.channels = args[0]
+        if len(args) > 1:
+            self.momentum = args[1]
+        self.momentum = momentum
+        assert 0 <= momentum <= 1.0, ("Illegal momentum")
+
+    def initialize(self, x):
+        self.channels = x.shape[1]
+        param_shape = (self.channels,)
+
+        self.scale = Tensor(shape=param_shape,
+                            requires_grad=True,
+                            stores_grad=True)
+        self.scale.set_value(1.0)
+
+        self.bias = Tensor(shape=param_shape,
+                           requires_grad=True,
+                           stores_grad=True)
+        self.bias.set_value(0.0)
+
+        self.running_mean = Tensor(shape=param_shape,
+                                   requires_grad=False,
+                                   stores_grad=False)
+        self.running_mean.set_value(0.0)
+
+        self.running_var = Tensor(shape=param_shape,
+                                  requires_grad=False,
+                                  stores_grad=False)
+        self.running_var.set_value(1.0)
+
+        if not hasattr(self, "handle"):
+            if x.device.id() == -1:
+                self.handle = singa.BatchNormHandle(self.momentum, x.data)
+            else:
+                self.handle = singa.CudnnBatchNormHandle(self.momentum, x.data)
+
+    def forward(self, x):
+        assert x.shape[1] == self.channels, (
+            "number of channels dismatched. %d vs %d" %
+            (x.shape[1], self.channels))
+
+        self.device_check(x, self.scale, self.bias, self.running_mean,
+                          self.running_var)
+
+        y = autograd.batchnorm_2d(
+            self.handle,
+            x,
+            self.scale,
+            self.bias,
+            self.running_mean,
+            self.running_var,
+        )
+        return y
+
+    def get_params(self):
+        return {self.scale.name: self.scale, self.bias.name: self.bias}
+
+    def set_params(self, parameters):
+        self.scale.copy_from(parameters[self.scale.name])
+        self.bias.copy_from(parameters[self.bias.name])
+
+    def get_states(self):
+        ret = self.get_params()
+        ret[self.running_mean.name] = self.running_mean
+        ret[self.running_var.name] = self.running_var
+        return ret
+
+    def set_states(self, states):
+        self.set_params(states)
+        self.running_mean.copy_from(states[self.running_mean.name])
+        self.running_var.copy_from(states[self.running_var.name])
+
+
+class Pooling2d(Layer):
+    """
+    Generate a Pooling 2d operator
+    """
+
+    def __init__(self,
+                 kernel_size,
+                 stride=None,
+                 padding=0,
+                 is_max=True,
+                 pad_mode="NOTSET"):
+        """
+        Args:
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            is_max (bool): is max pooling or avg pooling
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+        """
+        super(Pooling2d, self).__init__()
+
+        if isinstance(kernel_size, int):
+            self.kernel_size = (kernel_size, kernel_size)
+        elif isinstance(kernel_size, tuple):
+            self.kernel_size = kernel_size
+        else:
+            raise TypeError("Wrong kernel_size type.")
+
+        if stride is None:
+            self.stride = self.kernel_size
+        elif isinstance(stride, int):
+            self.stride = (stride, stride)
+        elif isinstance(stride, tuple):
+            self.stride = stride
+            assert stride[0] > 0 or (kernel_size[0] == 1 and padding[0] == 0), (
+                "stride[0]=0, but kernel_size[0]=%d, padding[0]=%d" %
+                (kernel_size[0], padding[0]))
+        else:
+            raise TypeError("Wrong stride type.")
+
+        self.odd_padding = (0, 0, 0, 0)
+        if isinstance(padding, int):
+            self.padding = (padding, padding)
+        elif isinstance(padding, tuple) or isinstance(padding, list):
+            if len(padding) == 2:
+                self.padding = padding
+            elif len(padding) == 4:
+                _h_mask = padding[0] - padding[1]
+                _w_mask = padding[2] - padding[3]
+                # the odd paddding is the value that cannot be handled by the tuple padding (w, h) mode
+                # so we need to firstly handle the input, then use the nomal padding method.
+                self.odd_padding = (max(_h_mask, 0), max(-_h_mask, 0),
+                                    max(_w_mask, 0), max(-_w_mask, 0))
+                self.padding = (
+                    padding[0] - self.odd_padding[0],
+                    padding[2] - self.odd_padding[2],
+                )
+            else:
+                raise TypeError("Wrong padding value.")
+
+        self.is_max = is_max
+        self.pad_mode = pad_mode
+
+    def initialize(self, x):
+        # if same pad mode, re-compute the padding
+        if self.pad_mode in ("SAME_UPPER", "SAME_LOWER"):
+            self.padding, self.odd_padding = utils.get_padding_shape(
+                self.pad_mode, x.shape[2:], self.kernel_size, self.stride)
+
+        # if same pad mode, re-compute the padding
+        if self.pad_mode in ("SAME_UPPER", "SAME_LOWER"):
+            self.padding, self.odd_padding = utils.get_padding_shape(
+                self.pad_mode, x.shape[2:], self.kernel_size, self.stride)
+            self.padding = [self.padding[0], self.padding[2]]
+
+        _x = x
+        if self.odd_padding != (0, 0, 0, 0):
+            x_shape = list(x.data.shape())
+            x_shape[2] += (self.odd_padding[0] + self.odd_padding[1])
+            x_shape[3] += (self.odd_padding[2] + self.odd_padding[3])
+            _x = Tensor(shape=x_shape, device=x.device)
+            _x.set_value(0.0)
+
+        if _x.device.id() == -1:
+            self.handle = singa.PoolingHandle(
+                _x.data,
+                self.kernel_size,
+                self.stride,
+                self.padding,
+                self.is_max,
+            )
+        else:
+            self.handle = singa.CudnnPoolingHandle(
+                _x.data,
+                self.kernel_size,
+                self.stride,
+                self.padding,
+                self.is_max,
+            )
+
+    def forward(self, x):
+        y = autograd.pooling_2d(self.handle, x, self.odd_padding)
+        return y
+
+
+class MaxPool2d(Pooling2d):
+    """
+    Generate a Max Pooling 2d operator
+    """
+
+    def __init__(self, kernel_size, stride=None, padding=0, pad_mode="NOTSET"):
+        """
+        Args:
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+        """
+        super(MaxPool2d, self).__init__(kernel_size, stride, padding, True,
+                                        pad_mode)
+
+
+class AvgPool2d(Pooling2d):
+
+    def __init__(self, kernel_size, stride=None, padding=0, pad_mode="NOTSET"):
+        """
+        Args:
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+        """
+        super(AvgPool2d, self).__init__(kernel_size, stride, padding, False,
+                                        pad_mode)
+
+
+class MaxPool1d(Pooling2d):
+    """
+    Generate a Max Pooling 1d operator
+    """
+
+    def __init__(self, kernel_size, stride=None, padding=0, pad_mode="NOTSET"):
+        """
+        Args:
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+        """
+        if stride is None:
+            stride = kernel_size
+        super(MaxPool1d, self).__init__((1, kernel_size), (1, stride),
+                                        (0, padding), True, pad_mode)
+
+
+class AvgPool1d(Pooling2d):
+    """
+    Generate a Avg Pooling 1d operator
+    """
+
+    def __init__(self, kernel_size, stride=None, padding=0, pad_mode="NOTSET"):
+        """
+        Args:
+            kernel_size (int or tuple): kernel size for two direction of each
+                axis. For example, (2, 3), the first 2 means will add 2 at the
+                beginning and also 2 at the end for its axis.and if a int is
+                accepted, the kernel size will be initiated as (int, int)
+            stride (int or tuple): stride, the logic is the same as kernel size.
+            padding (int): tuple, list or None, padding, the logic is the same
+                as kernel size. However, if you set pad_mode as "SAME_UPPER" or
+                "SAME_LOWER" mode, you can set padding as None, and the padding
+                will be computed automatically.
+            pad_mode (string): can be NOTSET, SAME_UPPER, or SAME_LOWER, where
+                default value is NOTSET, which means explicit padding is used.
+                SAME_UPPER or SAME_LOWER mean pad the input so that the output
+                spatial size match the input. In case of odd number add the extra
+                padding at the end for SAME_UPPER and at the beginning for SAME_LOWER.
+        """
+        if stride is None:
+            stride = kernel_size
+        super(AvgPool1d, self).__init__((1, kernel_size), (1, stride),
+                                        (0, padding), False, pad_mode)
+
+
+class RNN_Base(Layer):
+
+    def step_forward(self,
+                     x=None,
+                     h=None,
+                     c=None,
+                     Wx=None,
+                     Wh=None,
+                     Bx=None,
+                     Bh=None,
+                     b=None):
+        raise NotImplementedError
+
+
+class RNN(RNN_Base):
+    """
+    Generate a RNN operator
+    """
+
+    def __init__(
+        self,
+        input_size,
+        hidden_size,
+        num_layers=1,
+        nonlinearity="tanh",
+        bias=True,
+        batch_first=False,
+        dropout=0,
+        bidirectional=False,
+    ):
+        """
+        Args:
+            input_size (int):  The number of expected features in the input x
+            hidden_size (int): The number of features in the hidden state h
+            num_layers (int):  Number of recurrent layers. Default: 1
+            nonlinearity (string): The non-linearity to use. Default: 'tanh'
+            bias (bool):  If False, then the layer does not use bias weights.
+                Default: True
+            batch_first (bool):  If True, then the input and output tensors
+                are provided as (batch, seq, feature). Default: False
+            dropout (float): If non-zero, introduces a Dropout layer on the
+                outputs of each RNN layer except the last layer, with dropout
+                probability equal to dropout. Default: 0
+            bidirectional (bool): If True, becomes a bidirectional RNN.
+                Default: False
+        """
+        super(RNN, self).__init__()
+
+        self.input_size = input_size
+        self.hidden_size = hidden_size
+        self.num_layers = num_layers
+        self.nonlinearity = nonlinearity
+        self.bias = bias
+        self.batch_first = batch_first
+        self.dropout = dropout
+        self.bidirectional = bidirectional
+
+    def initialize(self, xs, h0):
+        Wx_shape = (self.input_size, self.hidden_size)
+        self.Wx = Tensor(shape=Wx_shape, requires_grad=True, stores_grad=True)
+        self.Wx.gaussian(0.0, 1.0)
+
+        Wh_shape = (self.hidden_size, self.hidden_size)
+        self.Wh = Tensor(shape=Wh_shape, requires_grad=True, stores_grad=True)
+        self.Wh.gaussian(0.0, 1.0)
+
+        b_shape = (self.hidden_size,)
+        self.b = Tensor(shape=b_shape, requires_grad=True, stores_grad=True)
+        self.b.set_value(0.0)
+
+    def forward(self, xs, h0):
+        # xs: a tuple or list of input tensors
+        if not isinstance(xs, tuple):
+            xs = tuple(xs)
+        inputs = xs + (h0,)
+        self.device_check(*inputs)
+        # self.device_check(inputs[0], *self.params)
+        self.device_check(inputs[0], self.Wx, self.Wh, self.b)
+        batchsize = xs[0].shape[0]
+        out = []
+        h = self.step_forward(xs[0], h0, self.Wx, self.Wh, self.b)
+        out.append(h)
+        for x in xs[1:]:
+            assert x.shape[0] == batchsize
+            h = self.step_forward(x, h, self.Wx, self.Wh, self.b)
+            out.append(h)
+        return out, h
+
+    def step_forward(self, x, h, Wx, Wh, b):
+        y2 = autograd.matmul(h, Wh)
+        y1 = autograd.matmul(x, Wx)
+        y = autograd.add(y2, y1)
+        y = autograd.add_bias(y, b, axis=0)
+        if self.nonlinearity == "tanh":
+            y = autograd.tanh(y)
+        elif self.nonlinearity == "relu":
+            y = autograd.relu(y)
+        else:
+            raise ValueError
+        return y
+
+    def get_params(self):
+        return {
+            self.Wx.name: self.Wx,
+            self.Wh.name: self.Wh,
+            self.b.name: self.b
+        }
+
+    def set_params(self, parameters):
+        self.Wx.copy_from(parameters[self.Wx.name])
+        self.Wh.copy_from(parameters[self.Wh.name])
+        self.b.copy_from(parameters[self.b.name])
+
+
+class LSTM(RNN_Base):
+    """
+    Generate a LSTM operator
+    """
+
+    def __init__(
+        self,
+        input_size,
+        hidden_size,
+        nonlinearity="tanh",
+        num_layers=1,
+        bias=True,
+        batch_first=False,
+        dropout=0,
+        bidirectional=False,
+    ):
+        """
+        Args:
+            input_size (int):  The number of expected features in the input x
+            hidden_size (int): The number of features in the hidden state h
+            num_layers (int):  Number of recurrent layers. Default: 1
+            nonlinearity (string): The non-linearity to use. Default: 'tanh'
+            bias (bool):  If False, then the layer does not use bias weights.
+                Default: True
+            batch_first (bool):  If True, then the input and output tensors
+                are provided as (batch, seq, feature). Default: False
+            dropout (float): If non-zero, introduces a Dropout layer on the
+                outputs of each RNN layer except the last layer, with dropout
+                probability equal to dropout. Default: 0
+            bidirectional (bool): If True, becomes a bidirectional RNN.
+                Default: False
+        """
+        super(LSTM, self).__init__()
+
+        self.input_size = input_size
+        self.hidden_size = hidden_size
+        self.num_layers = num_layers
+        self.nonlinearity = nonlinearity
+        self.bias = bias
+        self.batch_first = batch_first
+        self.dropout = dropout
+        self.bidirectional = bidirectional
+
+    def initialize(self, xs, h0_c0):
+        # 1. Wx_i input,  Bx_i
+        # 2. Wx_f forget, Bx_f
+        # 3. Wx_o output, Bx_o
+        # 4. Wx_g candidate, Bx_g
+        Wx_shape = (self.input_size, self.hidden_size)
+        self.Wx_i = Tensor(shape=Wx_shape, requires_grad=True, stores_grad=True)
+        self.Wx_f = Tensor(shape=Wx_shape, requires_grad=True, stores_grad=True)
+        self.Wx_o = Tensor(shape=Wx_shape, requires_grad=True, stores_grad=True)
+        self.Wx_g = Tensor(shape=Wx_shape, requires_grad=True, stores_grad=True)
+
+        Wh_shape = (self.hidden_size, self.hidden_size)
+        self.Wh_i = Tensor(shape=Wh_shape, requires_grad=True, stores_grad=True)
+        self.Wh_f = Tensor(shape=Wh_shape, requires_grad=True, stores_grad=True)
+        self.Wh_o = Tensor(shape=Wh_shape, requires_grad=True, stores_grad=True)
+        self.Wh_g = Tensor(shape=Wh_shape, requires_grad=True, stores_grad=True)
+        [
+            w.gaussian(0.0, 0.01) for w in [
+                self.Wx_i, self.Wx_f, self.Wx_o, self.Wx_g, self.Wh_i,
+                self.Wh_f, self.Wh_o, self.Wh_g
+            ]
+        ]
+
+        Bx_shape = (self.hidden_size,)
+        self.Bx_i = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bx_f = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bx_o = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bx_g = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bh_i = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bh_f = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bh_o = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        self.Bh_g = Tensor(shape=Bx_shape, requires_grad=True, stores_grad=True)
+        [
+            b.set_value(0.0) for b in [
+                self.Bx_i, self.Bx_f, self.Bx_o, self.Bx_g, self.Bh_i,
+                self.Bh_f, self.Bh_o, self.Bh_g
+            ]
+        ]
+
+    def forward(self, xs, h0_c0):
+        # xs: a tuple or list of input tensors
+        # h0_c0: a tuple of (h0, c0)
+        h0, c0 = h0_c0
+        if not isinstance(xs, list):
+            xs = list(xs)
+        inputs = xs + list((h0, c0))
+        self.device_check(*inputs)
+        self.device_check(inputs[0], *[s for k, s in self.get_states().items()])
+        batchsize = xs[0].shape[0]
+        out = []
+        h, c = self.step_forward(xs[0], h0, c0)
+        out.append(h)
+        for x in xs[1:]:
+            assert x.shape[0] == batchsize
+            h, c = self.step_forward(x, h, c)
+            out.append(h)
+        return out, h, c
+
+    def step_forward(self, x, h, c):
+        # input
+        y1 = autograd.matmul(x, self.Wx_i)
+        y1 = autograd.add_bias(y1, self.Bx_i, axis=0)
+        y2 = autograd.matmul(h, self.Wh_i)
+        y2 = autograd.add_bias(y2, self.Bh_i, axis=0)
+        i = autograd.add(y1, y2)
+        i = autograd.sigmoid(i)
+
+        # forget
+        y1 = autograd.matmul(x, self.Wx_f)
+        y1 = autograd.add_bias(y1, self.Bx_f, axis=0)
+        y2 = autograd.matmul(h, self.Wh_f)
+        y2 = autograd.add_bias(y2, self.Bh_f, axis=0)
+        f = autograd.add(y1, y2)
+        f = autograd.sigmoid(f)
+
+        # output
+        y1 = autograd.matmul(x, self.Wx_o)
+        y1 = autograd.add_bias(y1, self.Bx_o, axis=0)
+        y2 = autograd.matmul(h, self.Wh_o)
+        y2 = autograd.add_bias(y2, self.Bh_o, axis=0)
+        o = autograd.add(y1, y2)
+        o = autograd.sigmoid(o)
+
+        y1 = autograd.matmul(x, self.Wx_g)
+        y1 = autograd.add_bias(y1, self.Bx_g, axis=0)
+        y2 = autograd.matmul(h, self.Wh_g)
+        y2 = autograd.add_bias(y2, self.Bh_g, axis=0)
+        g = autograd.add(y1, y2)
+        g = autograd.tanh(g)
+
+        cout1 = autograd.mul(f, c)
+        cout2 = autograd.mul(i, g)
+        cout = autograd.add(cout1, cout2)
+
+        hout = autograd.tanh(cout)
+        hout = autograd.mul(o, hout)
+        return hout, cout
+
+    def get_params(self):
+        ret = {}
+        for w in [
+                self.Wx_i, self.Wx_f, self.Wx_o, self.Wx_g, self.Wh_i,
+                self.Wh_f, self.Wh_o, self.Wh_g
+        ]:
+            ret[w.name] = w
+
+        for b in [
+                self.Bx_i, self.Bx_f, self.Bx_o, self.Bx_g, self.Bh_i,
+                self.Bh_f, self.Bh_o, self.Bh_g
+        ]:
+            ret[b.name] = b
+        return ret
+
+    def set_params(self, parameters):
+        for w in [
+                self.Wx_i, self.Wx_f, self.Wx_o, self.Wx_g, self.Wh_i,
+                self.Wh_f, self.Wh_o, self.Wh_g
+        ]:
+            w.copy_from(parameters[w.name])
+
+        for b in [
+                self.Bx_i, self.Bx_f, self.Bx_o, self.Bx_g, self.Bh_i,
+                self.Bh_f, self.Bh_o, self.Bh_g
+        ]:
+            b.copy_from(parameters[b.name])
+
+
+''' layers without params or states
 '''
 
-if singa_wrap.USE_CUDNN:
-    cudnn_version = singa_wrap.CUDNN_VERSION
-else:
-    cudnn_version = 0
 
-
-class Layer(object):
-    '''Base Python layer class.
-
-    Typically, the life cycle of a layer instance includes:
-        1. construct layer without input_sample_shapes, goto 2;
-           construct layer with input_sample_shapes, goto 3;
-        2. call setup to create the parameters and setup other meta fields
-        3. call forward or access layer members
-        4. call backward and get parameters for update
-
-    Args:
-        name (str): layer name
-    '''
-    def __init__(self, name, conf=None, **kwargs):
-        if conf is None:
-            self.layer = None  # layer converted by swig
-            self.name = name  # TODO(wangwei) duplicate with self.conf.name
-            self.conf = model_pb2.LayerConf()
-            self.conf.name = name
-            self.param_specs = []
-        else:
-            self.conf = conf
-            self.name = str(conf.name)
-            self.caffe_layer()
-            self.param_specs = []
-
-            # convert caffe proto into singa proto format
-            #   case1: parameters of conv and dense layers
-            #   case2: type of activation layers
-            if (conf.type == 'Convolution' or conf.type == 4) or \
-                    (conf.type == 'InnerProduct' or conf.type == 14):
-                w, b = _construct_param_specs_from_caffe_proto(conf)
-                del conf.param[:]
-                conf.param.extend([w, b])
-                self.param_specs.append(w)
-                self.param_specs.append(b)
-                # print 'conf:\n', conf
-            if conf.type == 'Pooling':
-                conf.pooling_conf.ceil = True
-                # print 'conf:\n', conf
-            elif (conf.type == 'ReLU' or conf.type == 18 or
-                  conf.type == 'Sigmoid' or conf.type == 19 or
-                  conf.type == 'TanH' or conf.type == 23):
-                conf.type = (engine + '_' + conf.type).lower()
-            self.conf = conf
-
-        self.has_setup = False
-
-    def setup(self, in_shapes):
-        '''Call the C++ setup function to create params and set some meta data.
-
-        Args:
-            in_shapes: if the layer accepts a single input Tensor, in_shapes is
-                a single tuple specifying the inpute Tensor shape; if the layer
-                accepts multiple input Tensor (e.g., the concatenation layer),
-                in_shapes is a tuple of tuples, each for one input Tensor
-        '''
-        if self.has_setup:
-            return
-        if type(in_shapes[0]) is tuple:
-            self.layer.SetupWithMultInputs([list(s) for s in in_shapes],
-                                           self.conf.SerializeToString())
-        else:
-            self.layer.Setup(list(in_shapes), self.conf.SerializeToString())
-        self.has_setup = True
-
-    def caffe_layer(self):
-        '''
-        Create a singa layer based on caffe layer configuration.
-        '''
-        _check_engine(engine, ['cudnn', 'singacpp', 'singacuda', 'singacl'])
-        if self.conf.type == 'InnerProduct' or self.conf.type == 14:
-            self.layer = _create_layer(engine, 'Dense')
-        else:
-            self.layer = _create_layer(engine, str(self.conf.type))
-
-    def get_output_sample_shape(self):
-        '''Called after setup to get the shape of the output sample(s).
-
-        Returns:
-            a tuple for a single output Tensor or a list of tuples if this layer
-            has multiple outputs
-        '''
-        assert self.has_setup, \
-            'Must call setup() before get_output_sample_shape()'
-        return self.layer.GetOutputSampleShape()
-
-    def param_names(self):
-        '''
-        Returns:
-            a list of strings, one for the name of one parameter Tensor
-        '''
-        names = []
-        for x in self.param_specs:
-            names.append(x.name)
-        return names
-
-    def param_values(self):
-        '''Return param value tensors.
-
-        Parameter tensors are not stored as layer members because cpp Tensor
-        could be moved onto diff devices due to the change of layer device,
-        which would result in inconsistency.
-
-        Returns:
-            a list of tensors, one for each paramter
-        '''
-        if self.layer is None:
-            return []
-        else:
-            return tensor.from_raw_tensors(self.layer.param_values())
-
-    def forward(self, flag, x):
-        '''Forward propagate through this layer.
-
-        Args:
-            flag: True (kTrain) for training (kEval); False for evaluating;
-                other values for furture use.
-            x (Tensor or list<Tensor>): an input tensor if the layer is
-                connected from a single layer; a list of tensors if the layer
-                is connected from multiple layers.
-
-        Return:
-            a tensor if the layer is connected to a single layer; a list of
-            tensors if the layer is connected to multiple layers;
-        '''
-        assert self.has_setup, 'Must call setup() before forward()'
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-        if type(x) is list:
-            xs = [t.singa_tensor for t in x]
-            y = self.layer.ForwardWithMultInputs(flag, xs)
-        else:
-            assert isinstance(x, tensor.Tensor), \
-                    'input of %s (type:%s) must be a Tensor or Tensor list'\
-                    % (self.name, type(x).__name__)
-            y = self.layer.Forward(flag, x.singa_tensor)
-        if type(y) is tuple:
-            return tensor.from_raw_tensors(y)
-        else:
-            return tensor.from_raw_tensor(y)
-
-    def backward(self, flag, dy):
-        '''Backward propagate gradients through this layer.
-
-        Args:
-            flag (int): for future use.
-            dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the
-                objective loss
-        Return:
-            <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x
-            , dpi is the gradient of the i-th parameter
-        '''
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-
-        if type(dy) == list:
-            dys = [t.singa_tensor for t in dy]
-            ret = self.layer.BackwardWithMultInputs(flag, dys)
-        else:
-            assert isinstance(dy, tensor.Tensor), \
-                    'input of %s (type:%s) must be a Tensor or Tensor list'\
-                    % (self.name, type(dy).__name__)
-            dys = dy.singa_tensor
-            ret = self.layer.Backward(flag, dys)
-        if type(ret[0]) is tuple:
-            dxs = tensor.from_raw_tensors(ret[0])
-        else:
-            dxs = tensor.from_raw_tensor(ret[0])
-        return dxs, tensor.from_raw_tensors(ret[1])
-
-    def to_device(self, device):
-        '''Move layer state tensors onto the given device.
-
-        Args:
-            device: swig converted device, created using singa.device
-        '''
-        if self.layer is not None:
-            self.layer.ToDevice(device)
-
-    def as_type(self, dtype):
-        pass
-
-    def __copy__(self):
-        pass
-
-    def __deepcopy__(self):
-        pass
-
-
-class Dummy(Layer):
-    '''A dummy layer that does nothing but just forwards/backwards the data
-    (the input/output is a single tensor).
-    '''
-    def __init__(self, name, input_sample_shape=None):
-        super(Dummy, self).__init__(name)
-        self.output_sample_shape = input_sample_shape
-
-    def get_output_sample_shape(self):
-        return self.output_sample_shape
-
-    def setup(self, input_sample_shape):
-        self.output_sample_shape = input_sample_shape
-        self.has_setup = True
-
-    def forward(self, flag, x):
-        '''Return the input x'''
-        return x
-
-    def backward(self, falg, dy):
-        '''Return dy, []'''
-        return dy, []
-
-
-class Conv2D(Layer):
-    """Construct a layer for 2D convolution.
-
-    Args:
-        nb_kernels (int): num of the channels (kernels) of the input Tensor
-        kernel: an integer or a pair of integers for kernel height and width
-        stride: an integer or a pair of integers for stride height and width
-        border_mode (string): padding mode, case in-sensitive,
-            'valid' -> padding is 0 for height and width
-            'same' -> padding is half of the kernel (floor), the kernel must be
-            odd number.
-        cudnn_prefer (string): the preferred algorithm for cudnn convolution
-            which could be 'fastest', 'autotune', 'limited_workspace' and
-            'no_workspace'
-        workspace_byte_limit(int): max workspace size in MB (default is 512MB)
-        data_format (string): either 'NCHW' or 'NHWC'
-        use_bias (bool): True or False
-        pad: an integer or a pair of integers for padding height and width
-        W_specs (dict): used to specify the weight matrix specs, fields
-            include,
-            'name' for parameter name
-            'lr_mult' for learning rate multiplier
-            'decay_mult' for weight decay multiplier
-            'init' for init method, which could be 'gaussian', 'uniform',
-            'xavier' and ''
-            'std', 'mean', 'high', 'low' for corresponding init methods
-            TODO(wangwei) 'clamp' for gradient constraint, value is scalar
-            'regularizer' for regularization, currently support 'l2'
-        b_specs (dict): hyper-parameters for bias vector, similar as W_specs
-        name (string): layer name.
-        input_sample_shape: 3d tuple for the shape of the input Tensor
-            without the batchsize, e.g., (channel, height, width) or
-            (height, width, channel)
+class ReLU(Layer):
+    """
+    Generate a ReLU operator
     """
 
-    def __init__(self, name, nb_kernels, kernel=3, stride=1, border_mode='same',
-                 cudnn_prefer='fastest', workspace_byte_limit=1024,
-                 data_format='NCHW', use_bias=True, W_specs=None, b_specs=None,
-                 pad=None, input_sample_shape=None):
-        super(Conv2D, self).__init__(name)
-        assert data_format == 'NCHW', 'Not supported data format: %s ' \
-            'only "NCHW" is enabled currently' % (data_format)
-        conf = self.conf.convolution_conf
-        conf.num_output = nb_kernels
-        conf.prefer = cudnn_prefer
-        conf.workspace_byte_limit = workspace_byte_limit
-        conf = _set_kernel_stride_pad(conf, kernel, stride, border_mode, pad)
-        conf.bias_term = use_bias
-        # TODO(wangwei) enable data format for cpp code
-        # conf.data_format = data_format
-        if W_specs is None:
-            W_specs = {'init': 'xavier'}
-        if 'name' not in W_specs:
-            W_specs['name'] = name + '/weight'
-        wspecs = _construct_param_specs_from_dict(W_specs)
-        self.conf.param.extend([wspecs])
-        self.param_specs.append(wspecs)
-        if use_bias:
-            if b_specs is None:
-                b_specs = {'init': 'constant'}
-            if 'name' not in b_specs:
-                b_specs['name'] = name + '/bias'
-            bspecs = _construct_param_specs_from_dict(b_specs)
-            self.conf.param.extend([bspecs])
-            self.param_specs.append(bspecs)
+    def __init__(self):
+        super(ReLU, self).__init__()
 
-        _check_engine(engine, ['cudnn', 'singacpp', 'singacl'])
-        self.layer = _create_layer(engine, 'Convolution')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
+    def forward(self, x):
+        return autograd.relu(x)
 
 
-class Conv1D(Conv2D):
-    """Construct a layer for 1D convolution.
-
-    Most of the args are the same as those for Conv2D except the kernel,
-    stride, pad, which is a scalar instead of a tuple.
-    input_sample_shape is a tuple with a single value for the input feature
-    length
+class Sigmoid(Layer):
+    """
+    Generate a ReLU operator
     """
 
-    def __init__(self, name, nb_kernels, kernel=3, stride=1,
-                 border_mode='same', cudnn_prefer='fastest',
-                 workspace_byte_limit=1024,
-                 use_bias=True, W_specs={'init': 'Xavier'},
-                 b_specs={'init': 'Constant', 'value': 0}, pad=None,
-                 input_sample_shape=None):
-        pad = None
-        if pad is not None:
-            pad = (0, pad)
-        if input_sample_shape is not None:
-            input_sample_shape = (1, 1, input_sample_shape[0])
-        super(Conv1D, self).__init__(name, nb_kernels, (1, kernel), (0, stride),
-                                     border_mode, cudnn_prefer,
-                                     workspace_byte_limit,
-                                     use_bias=use_bias, pad=pad,
-                                     W_specs=W_specs, b_specs=b_specs,
-                                     input_sample_shape=input_sample_shape)
+    def __init__(self):
+        super(Sigmoid, self).__init__()
 
-    def get_output_sample_shape(self):
-        shape = self.layer.GetOutputSampleShape()
-        assert len(shape) == 3, 'The output sample shape should be 3D.'\
-            'But the length is %d' % len(shape)
-        return (shape[0], shape[2])
+    def forward(self, x):
+        return autograd.sigmoid(x)
 
 
-class Pooling2D(Layer):
-    '''2D pooling layer providing max/avg pooling.
-
-    All args are the same as those for Conv2D, except the following one
-
-    Args:
-        mode: pooling type, model_pb2.PoolingConf.MAX or
-            model_pb2.PoolingConf.AVE
-
-    '''
-
-    def __init__(self, name, mode, kernel=3, stride=2, border_mode='same',
-                 pad=None, data_format='NCHW', input_sample_shape=None):
-        super(Pooling2D, self).__init__(name)
-        assert data_format == 'NCHW', 'Not supported data format: %s ' \
-            'only "NCHW" is enabled currently' % (data_format)
-        conf = self.conf.pooling_conf
-        conf = _set_kernel_stride_pad(conf, kernel, stride, border_mode, pad)
-        conf.pool = mode
-        _check_engine(engine, ['cudnn', 'singacpp', 'singacl'])
-        self.layer = _create_layer(engine, 'Pooling')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class MaxPooling2D(Pooling2D):
-
-    def __init__(self, name, kernel=3, stride=2, border_mode='same', pad=None,
-                 data_format='NCHW', input_sample_shape=None):
-        super(MaxPooling2D, self).__init__(name, model_pb2.PoolingConf.MAX,
-                                           kernel, stride, border_mode,
-                                           pad, data_format, input_sample_shape)
-
-
-class AvgPooling2D(Pooling2D):
-
-    def __init__(self, name, kernel=3, stride=2, border_mode='same', pad=None,
-                 data_format='NCHW', input_sample_shape=None):
-        super(AvgPooling2D, self).__init__(name, model_pb2.PoolingConf.AVE,
-                                           kernel, stride, border_mode,
-                                           pad, data_format, input_sample_shape)
-
-
-class MaxPooling1D(MaxPooling2D):
-
-    def __init__(self, name, kernel=3, stride=2, border_mode='same', pad=None,
-                 data_format='NCHW', input_sample_shape=None):
-        """Max pooling for 1D feature.
-
-        Args:
-            input_sample_shape (tuple): 1D tuple for input feature length
-        """
-        pad = None
-        if pad is not None:
-            pad = (0, pad)
-        if input_sample_shape is not None:
-            assert len(input_sample_shape) == 1, \
-                'AvgPooling1D expects input sample to be 1D'
-            input_sample_shape = (1, 1, input_sample_shape[0])
-        else:
-            input_sample_shape = None
-        super(MaxPooling1D, self).__init__(name, (1, kernel), (0, stride),
-                                           border_mode, pad,
-                                           data_format, input_sample_shape)
-
-    def get_output_sample_shape(self):
-        shape = self.layer.GetOutputSampleShape()
-        return (shape[2],)
-
-
-class AvgPooling1D(AvgPooling2D):
-
-    def __init__(self, name, kernel=3, stride=2, border_mode='same', pad=None,
-                 data_format='NCHW', input_sample_shape=None):
-        """input_feature_length is a scalar value"""
-        pad2 = None
-        if pad is not None:
-            pad2 = (pad, 0)
-        if input_sample_shape is not None:
-            assert len(input_sample_shape) == 1, \
-                'AvgPooling1D expects input sample to be 1D'
-            input_sample_shape = (1, 1, input_sample_shape[0])
-        else:
-            input_sample_shape = None
-
-        super(AvgPooling1D, self).__init__(name, (kernel, 1), (0, stride),
-                                           border_mode, pad2,
-                                           data_format, input_sample_shape)
-
-    def get_output_sample_shape(self):
-        shape = self.layer.GetOutputSampleShape()
-        return (shape[2],)
-
-
-class BatchNormalization(Layer):
-    """Batch-normalization.
-
-    Args:
-        momentum (float): for running average mean and variance.
-        beta_specs (dict): dictionary includes the fields for the beta
-            param:
-            'name' for parameter name
-            'lr_mult' for learning rate multiplier
-            'decay_mult' for weight decay multiplier
-            'init' for init method, which could be 'gaussian', 'uniform',
-            'xavier' and ''
-            'std', 'mean', 'high', 'low' for corresponding init methods
-            'clamp' for gradient constraint, value is scalar
-            'regularizer' for regularization, currently support 'l2'
-        gamma_specs (dict): similar to beta_specs, but for the gamma param.
-        name (string): layer name
-        input_sample_shape (tuple): with at least one integer
+class Add(Layer):
+    """
+    Generate a Add operator
     """
 
-    def __init__(self, name, momentum=0.9,
-                 beta_specs=None, gamma_specs=None, input_sample_shape=None):
-        super(BatchNormalization, self).__init__(name)
-        conf = self.conf.batchnorm_conf
-        conf.factor = momentum
-        if beta_specs is None:
-            beta_specs = {'init': 'Xavier'}
-        if gamma_specs is None:
-            gamma_specs = {'init': 'Xavier'}
-        if 'name' not in beta_specs:
-            beta_specs['name'] = name + '/beta'
-        if 'name' not in gamma_specs:
-            gamma_specs['name'] = name + '/gamma'
-        mean_specs = {'init': 'constant', 'value': 0, 'name': name + '/mean'}
-        var_specs = {'init': 'constant', 'value': 1, 'name': name + '/var'}
-        self.conf.param.extend([_construct_param_specs_from_dict(gamma_specs)])
-        self.conf.param.extend([_construct_param_specs_from_dict(beta_specs)])
-        self.conf.param.extend([_construct_param_specs_from_dict(mean_specs)])
-        self.conf.param.extend([_construct_param_specs_from_dict(var_specs)])
-        self.param_specs.append(_construct_param_specs_from_dict(gamma_specs))
-        self.param_specs.append(_construct_param_specs_from_dict(beta_specs))
-        self.param_specs.append(_construct_param_specs_from_dict(mean_specs))
-        self.param_specs.append(_construct_param_specs_from_dict(var_specs))
-        _check_engine(engine, ['cudnn', 'singa', 'singacpp', 'singacuda',
-                               'singacl'])
-        self.layer = _create_layer(engine, 'BatchNorm')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
+    def __init__(self):
+        super(Add, self).__init__()
 
-
-class L2Norm(Layer):
-    '''Normalize each sample to have L2 norm = 1'''
-    def __init__(self, name, input_sample_shape, epsilon=1e-8):
-        super(L2Norm, self).__init__(name)
-        self.y = None
-        self.norm = None
-        self.name = name
-        self.epsilon = epsilon
-        self.out_sample_shape = input_sample_shape
-
-    def get_output_sample_shape(self):
-        return self.out_sample_shape
-
-    def forward(self, is_train, x):
-        norm = tensor.sum_columns(tensor.square(x))
-        norm += self.epsilon
-        norm = tensor.sqrt(norm)
-        self.y = x.clone()
-        self.y.div_column(norm)
-
-        if is_train:
-            self.norm = norm
-        return self.y
-
-    def backward(self, is_train, dy):
-        # (dy - y * k) / norm, k = sum(dy * y)
-        k = tensor.sum_columns(tensor.eltwise_mult(dy, self.y))
-        self.y.mult_column(k)
-        dx = dy - self.y
-        dx.div_column(self.norm)
-        return dx, []
-
-
-class LRN(Layer):
-    """Local response normalization.
-
-    Args:
-        size (int): # of channels to be crossed
-            normalization.
-        mode (string): 'cross_channel'
-        input_sample_shape (tuple): 3d tuple, (channel, height, width)
-    """
-
-    def __init__(self, name, size=5, alpha=1, beta=0.75, mode='cross_channel',
-                 k=1, input_sample_shape=None):
-        super(LRN, self).__init__(name)
-        conf = self.conf.lrn_conf
-        conf.local_size = size
-        conf.alpha = alpha
-        conf.beta = beta
-        conf.k = k
-        # TODO(wangwei) enable mode = 'within_channel'
-        assert mode == 'cross_channel', 'only support mode="across_channel"'
-        conf.norm_region = model_pb2.LRNConf.ACROSS_CHANNELS
-        _check_engine(engine, ['cudnn', 'singa', 'singacpp', 'singacuda',
-                               'singacl'])
-        self.layer = _create_layer(engine, 'LRN')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class Dense(Layer):
-    """Apply linear/affine transformation, also called inner-product or
-    fully connected layer.
-
-    Args:
-        num_output (int): output feature length.
-        use_bias (bool): add a bias vector or not to the transformed feature
-        W_specs (dict): specs for the weight matrix
-            'name' for parameter name
-            'lr_mult' for learning rate multiplier
-            'decay_mult' for weight decay multiplier
-            'init' for init method, which could be 'gaussian', 'uniform',
-            'xavier' and ''
-            'std', 'mean', 'high', 'low' for corresponding init methods
-            'clamp' for gradient constraint, value is scalar
-            'regularizer' for regularization, currently support 'l2'
-        b_specs (dict): specs for the bias vector, same fields as W_specs.
-        W_transpose (bool): if true, output=x*W.T+b;
-        input_sample_shape (tuple): input feature length
-    """
-
-    def __init__(self, name, num_output, use_bias=True,
-                 W_specs=None, b_specs=None,
-                 W_transpose=False, input_sample_shape=None):
-        """Apply linear/affine transformation, also called inner-product or
-        fully connected layer.
-
-        Args:
-            num_output (int): output feature length.
-            use_bias (bool): add a bias vector or not to the transformed feature
-            W_specs (dict): specs for the weight matrix
-                'name' for parameter name
-                'lr_mult' for learning rate multiplier
-                'decay_mult' for weight decay multiplier
-                'init' for init method, which could be 'gaussian', 'uniform',
-                'xavier' and ''
-                'std', 'mean', 'high', 'low' for corresponding init methods
-                'clamp' for gradient constraint, value is scalar
-                'regularizer' for regularization, currently support 'l2'
-            b_specs (dict): specs for the bias vector, same fields as W_specs.
-            W_transpose (bool): if true, output=x*W.T+b;
-            input_sample_shape (tuple): input feature length
-        """
-        super(Dense, self).__init__(name)
-        conf = self.conf.dense_conf
-        conf.num_output = num_output
-        conf.bias_term = use_bias
-        conf.transpose = W_transpose
-        if W_specs is None:
-            W_specs = {'init': 'xavier'}
-        if 'name' not in W_specs:
-            W_specs['name'] = name + '/weight'
-        wspecs = _construct_param_specs_from_dict(W_specs)
-        self.conf.param.extend([wspecs])
-        self.param_specs.append(wspecs)
-        if use_bias:
-            if b_specs is None:
-                b_specs = {'init': 'constant', 'value': 0}
-            if 'name' not in b_specs:
-                b_specs['name'] = name + '/bias'
-            bspecs = _construct_param_specs_from_dict(b_specs)
-            self.conf.param.extend([bspecs])
-            self.param_specs.append(bspecs)
-        # dense layer is transparent to engine.
-        if engine == 'cudnn':
-            self.layer = _create_layer('singacuda', 'Dense')
-        else:
-            self.layer = _create_layer(engine, 'Dense')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class Dropout(Layer):
-    """Droput layer.
-
-    Args:
-        p (float): probability for dropping out the element, i.e., set to 0
-        name (string): layer name
-    """
-
-    def __init__(self, name, p=0.5, input_sample_shape=None):
-        super(Dropout, self).__init__(name)
-        conf = self.conf.dropout_conf
-        conf.dropout_ratio = p
-        # dropout is support in cudnn since V5
-        if engine.lower() == 'cudnn' and cudnn_version < 5000:
-            myengine = 'singacuda'
-        else:
-            myengine = engine
-        _check_engine(myengine, ['cudnn', 'singa', 'singacpp', 'singacuda',
-                                 'singacl'])
-        self.layer = _create_layer(myengine, 'Dropout')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class Activation(Layer):
-    """Activation layers.
-
-    Args:
-        name (string): layer name
-        mode (string): 'relu', 'sigmoid', or 'tanh'
-        input_sample_shape (tuple): shape of a single sample
-    """
-
-    def __init__(self, name, mode='relu', input_sample_shape=None):
-        super(Activation, self).__init__(name)
-        _check_engine(engine, ['cudnn', 'singacpp', 'singacuda', 'singacl'])
-        self.conf.type = (engine + '_' + mode).lower()
-        self.layer = _create_layer(engine, mode)
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class Softmax(Layer):
-    """Apply softmax.
-
-    Args:
-        axis (int): reshape the input as a matrix with the dimension
-            [0,axis) as the row, the [axis, -1) as the column.
-        input_sample_shape (tuple): shape of a single sample
-    """
-
-    def __init__(self, name, axis=1, input_sample_shape=None):
-        super(Softmax, self).__init__(name)
-        # conf = self.conf.softmax_conf
-        # conf.axis = axis
-        _check_engine(engine, ['cudnn', 'singa', 'singacpp', 'singacl',
-                               'singacuda'])
-        self.layer = _create_layer(engine, 'Softmax')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
+    def forward(self, a, b):
+        return autograd.add(a, b)
 
 
 class Flatten(Layer):
-    """Reshape the input tensor into a matrix.
-
-    Args:
-        axis (int): reshape the input as a matrix with the dimension
-            [0,axis) as the row, the [axis, -1) as the column.
-        input_sample_shape (tuple): shape for a single sample
+    """
+    Generate a Flatten operator
     """
 
-    def __init__(self, name, axis=1, input_sample_shape=None):
-        super(Flatten, self).__init__(name)
-        conf = self.conf.flatten_conf
-        conf.axis = axis
-        # fltten layer is transparent to engine
-        if engine == 'cudnn':
-            self.layer = _create_layer('singacuda', 'Flatten')
-        else:
-            self.layer = _create_layer(engine, 'Flatten')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-
-class Merge(Layer):
-    '''Sum all input tensors.
-
-    Args:
-        input_sample_shape: sample shape of the input. The sample shape of all
-            inputs should be the same.
-    '''
-
-    def __init__(self, name, input_sample_shape=None):
-        self.in_shape = input_sample_shape
-        self.num_input = 1
-        super(Merge, self).__init__(name)
-
-    def setup(self, in_shape):
-        self.in_shape = in_shape
-        self.has_setup = True
-
-    def get_output_sample_shape(self):
-        return self.in_shape
-
-    def forward(self, flag, inputs):
-        '''Merge all input tensors by summation.
-
-        TODO(wangwei) do element-wise merge operations, e.g., avg, count
-        Args:
-            flag: not used.
-            inputs (list): a list of tensors
-
-        Returns:
-            A single tensor as the sum of all input tensors
-        '''
-        assert len(inputs) > 1, 'There must be multiple input tensors'
-        self.num_input = len(inputs)
-        output = tensor.Tensor()
-        output.reset_like(inputs[0])
-        output.set_value(0)
-        for x in inputs:
-            output += x
-        return output
-
-    def backward(self, flag, grad):
-        '''Replicate the grad for each input source layer.
-
-        Args:
-            grad(Tensor), the gradient tensor of the merged result from forward
-
-        Returns:
-            A list of replicated grad, one per source layer
-        '''
-        assert isinstance(grad, tensor.Tensor), 'The input must be Tensor' \
-            ' instead of %s' % type(grad).__name__
-        return [grad] * self.num_input, []  # * self.num_input
-
-
-class Split(Layer):
-    '''Replicate the input tensor.
-
-    Args:
-        num_output (int): number of output tensors to generate.
-        input_sample_shape: includes a single integer for the input sample
-            feature size.
-    '''
-    def __init__(self, name, num_output, input_sample_shape=None):
-        self.num_output = num_output
-        self.in_shape = input_sample_shape
-        super(Split, self).__init__(name)
-
-    def setup(self, in_shape):
-        self.in_shape = in_shape
-        self.has_setup = True
-
-    def get_output_sample_shape(self):
-        return [self.in_shape] * self.num_output
-
-    def forward(self, flag, input):
-        '''Replicate the input tensor into mutiple tensors.
-
-        Args:
-            flag: not used
-            input: a single input tensor
-
-        Returns:
-            a list a output tensor (each one is a copy of the input)
-        '''
-        assert isinstance(input, tensor.Tensor), 'The input must be Tensor'
-        outputs = [input] * self.num_output
-        return outputs
-
-    def backward(self, flag, grads):
-        '''Sum all grad tensors to generate a single output tensor.
-
-        Args:
-            grads(list of Tensor), one per dest layer
-
-        Returns:
-            a single tensor as the sum of all grads
-        '''
-        assert len(grads) > 1, 'There must be multiple gradients'
-        dx = tensor.Tensor()
-        dx.reset_like(grads[0])
-        dx.set_value(0)
-        for g in grads:
-            dx += g
-        return dx, []
-
-
-class Concat(Layer):
-    '''Concatenate tensors vertically (axis = 0) or horizontally (axis = 1).
-
-    Currently, only support tensors with 2 dimensions.
-
-    Args:
-        axis(int): 0 for concat row; 1 for concat columns;
-        input_sample_shapes: a list of sample shape tuples, one per input tensor
-    '''
-
-    def __init__(self, name, axis, input_sample_shapes=None):
-        super(Concat, self).__init__(name)
-        self.in_shapes = input_sample_shapes
+    def __init__(self, axis=1):
+        super(Flatten, self).__init__()
         self.axis = axis
-        self.conf.concat_conf.axis = axis
-        if engine == "cudnn":
-            self.layer = _create_layer('singacuda', 'Concat')
-        else:
-            self.layer = _create_layer(engine, 'Concat')
-        if input_sample_shapes is not None:
-            self.setup(input_sample_shapes)
 
-    def forward(self, flag, inputs):
-        '''Concatenate all input tensors.
-
-        Args:
-            flag: same as Layer::forward()
-            input: a list of tensors
-
-        Returns:
-            a single concatenated tensor
-        '''
-        assert type(inputs) is list, 'Must be a list of Tensors'
-        ys = super(Concat, self).forward(flag, inputs)
-        return ys[0]
-
-    def backward(self, flag, dy):
-        '''Backward propagate gradients through this layer.
-
-        Args:
-            flag: same as Layer::backward()
-            dy(Tensor): the gradient tensors of y w.r.t objective loss
-        Return:
-            <dx, []>, dx is a list tensors for the gradient of the inputs; []
-               is an empty list.
-        '''
-        if type(dy) is tensor.Tensor:
-            dy = [dy]
-        assert type(dy) is list, 'Must be a list(Tensor)'
-        return super(Concat, self).backward(flag, dy)
+    def forward(self, x):
+        return autograd.flatten(x, self.axis)
 
 
-class Slice(Layer):
-    '''Slice the input tensor into multiple sub-tensors vertially (axis=0) or
-    horizontally (axis=1).
+class SoftMaxCrossEntropy(Layer):
+    """
+    Generate a SoftMaxCrossEntropy operator
+    """
 
-    Args:
-        axis (int): 0 for slice rows; 1 for slice columns;
-        slice_point(list): positions along the axis to do slice; there are n-1
-            points for n sub-tensors;
-        input_sample_shape: input tensor sample shape
-    '''
+    def __init__(self):
+        super(SoftMaxCrossEntropy, self).__init__()
 
-    def __init__(self, name, axis, slice_point, input_sample_shape=None):
-        super(Slice, self).__init__(name)
-        self.in_shape = input_sample_shape
+    def forward(self, x, t):
+        return autograd.softmax_cross_entropy(x, t)
+
+
+class SoftMax(Layer):
+    """
+    Generate a SoftMax operator
+    """
+
+    def __init__(self):
+        super(SoftMax, self).__init__()
+
+    def forward(self, x):
+        return autograd.softmax(x)
+
+
+class MeanSquareError(Layer):
+    """
+    Generate a MeanSquareError operator
+    """
+
+    def __init__(self):
+        super(MeanSquareError, self).__init__()
+
+    def forward(self, x, t):
+        return autograd.mse_loss(x, t)
+
+
+class CrossEntropy(Layer):
+    """
+    Generate a CrossEntropy operator
+    """
+
+    def __init__(self):
+        super(CrossEntropy, self).__init__()
+
+    def forward(self, x, t):
+        return autograd.cross_entropy(x, t)
+
+
+class BinaryCrossEntropy(Layer):
+    """
+    Generate a BinaryCrossEntropy operator
+    """
+
+    def __init__(self):
+        super(BinaryCrossEntropy, self).__init__()
+
+    def forward(self, x, t):
+        return autograd.binary_cross_entropy(x, t)
+
+
+class Dropout(Layer):
+    """
+    Generate a Dropout operator
+    """
+
+    def __init__(self, ratio=0.5):
+        super(Dropout, self).__init__()
+        self.ratio = ratio
+
+    def forward(self, x):
+        return autograd.dropout(x, self.ratio)
+
+
+class Cat(Layer):
+    """
+    Generate a Cat Operator
+    """
+
+    def __init__(self, axis=0):
+        super(Cat, self).__init__()
         self.axis = axis
-        self.conf.slice_conf.axis = axis
-        self.conf.slice_conf.slice_point.extend(slice_point)
-        if engine == "cudnn":
-            self.layer = _create_layer('singacuda', 'Slice')
+
+    def forward(self, xs):
+        return autograd.cat(xs, self.axis)
+
+
+class Reshape(Layer):
+    """
+    Generate a Reshape Operator
+    """
+
+    def __init__(self):
+        super(Reshape, self).__init__()
+
+    def forward(self, x, shape):
+        return autograd.reshape(x, shape)
+
+
+class CudnnRNN(Layer):
+    """ `CudnnRNN` class implements with c++ backend and run the operation
+          directly on cuDNN
+        While `RNN` class implements with high level singa API
+    """
+
+    def __init__(self,
+                 hidden_size,
+                 activation="tanh",
+                 num_layers=1,
+                 bias=True,
+                 batch_first=True,
+                 dropout=0,
+                 bidirectional=False,
+                 rnn_mode="lstm",
+                 use_mask=False,
+                 return_sequences=True):
+        """
+            Args:
+                hidden_size: hidden feature dim
+                rnn_mode: accepted value: "vanilla", "tanh", "relu",  "lstm", "gru"
+        """
+        assert singa.USE_CUDA, "Not able to run without CUDA"
+        assert num_layers > 0, "num layers should be > 0"
+        assert 0 <= dropout < 1, "dropout shouldbe >=0 and <1"
+        super(CudnnRNN, self).__init__()
+
+        self.rnn_mode = rnn_mode
+        self.hidden_size = hidden_size
+        self.num_layers = num_layers
+        self.dropout = dropout
+        self.bidirectional = 1 if bidirectional else 0
+        self.return_sequences = return_sequences
+        self.batch_first = batch_first
+        self.use_mask = use_mask
+
+        # GPU parameter
+        # cudnn_rnn_mode: 0 - RNN RELU, 1 - RNN TANH, 2 - LSTM, 3 - GRU
+        if self.rnn_mode == "lstm":
+            self.cudnn_rnn_mode = 2
+        elif self.rnn_mode == "vanilla" or self.rnn_mode == "tanh":
+            self.cudnn_rnn_mode = 1
+        elif self.rnn_mode == "relu":
+            self.cudnn_rnn_mode = 0
+        elif self.rnn_mode == "gru":
+            self.cudnn_rnn_mode = 3
+
+    def initialize(self, x, hx=None, cx=None, seq_lengths=None):
+        if self.batch_first:
+            x = x.transpose((1, 0, 2))
+        self.input_size = x.shape[1]
+
+        # GPU handle
+        self.handle = singa.CudnnRNNHandle(x.data,
+                                           self.hidden_size,
+                                           mode=self.cudnn_rnn_mode,
+                                           num_layers=self.num_layers,
+                                           dropout=self.dropout,
+                                           bidirectional=self.bidirectional)
+
+        self.W = Tensor(shape=(self.handle.weights_size,),
+                        requires_grad=True,
+                        stores_grad=True,
+                        device=x.device)
+
+        k = 1 / self.hidden_size
+        self.W.uniform(-math.sqrt(k), math.sqrt(k))
+
+    def forward(self, x, hx=None, cx=None, seq_lengths=None):
+
+        self.device_check(x, self.W)
+        if self.batch_first:  # (bs,seq,data) -> (seq,bs,data)
+            x = autograd.transpose(x, (1, 0, 2))
+
+        batch_size = x.shape[1]
+        directions = 2 if self.bidirectional else 1
+        if hx == None:
+            hx = Tensor(shape=(self.num_layers * directions, batch_size,
+                               self.hidden_size),
+                        requires_grad=False,
+                        stores_grad=False,
+                        device=x.device).set_value(0.0)
+        if cx == None:
+            cx = Tensor(shape=(self.num_layers * directions, batch_size,
+                               self.hidden_size),
+                        requires_grad=False,
+                        stores_grad=False,
+                        device=x.device).set_value(0.0)
+
+        # outputs returned is list
+        #   inputs has shape of {sequence length, batch size, feature size}
+        if self.use_mask:
+            assert type(seq_lengths) == Tensor, "wrong type for seq_lengths"
+            y = autograd._RNN(self.handle,
+                              return_sequences=self.return_sequences,
+                              use_mask=self.use_mask,
+                              seq_lengths=seq_lengths)(x, hx, cx, self.W)[0]
         else:
-            self.layer = _create_layer(engine, 'Slice')
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
+            y = autograd._RNN(
+                self.handle,
+                return_sequences=self.return_sequences,
+            )(x, hx, cx, self.W)[0]
+        if self.return_sequences and self.batch_first:
+            # (seq, bs, hid) -> (bs, seq, hid)
+            y = autograd.transpose(y, (1, 0, 2))
+        return y
 
-    def get_output_sample_shape(self):
-        out = []
-        for i in range(len(self.conf.slice_conf.slice_point) + 1):
-            out.append(self.layer.GetOutputSampleShapeAt(i))
-        return out
+    def get_params(self):
+        return {self.W.name: self.W}
 
-    def forward(self, flag, x):
-        '''Slice the input tensor on the given axis.
-
-        Args:
-            flag: same as Layer::forward()
-            x: a single input tensor
-
-        Returns:
-            a list a output tensor
-        '''
-        if type(x) is tensor.Tensor:
-            x = [x]
-        assert type(x) is list, 'Must be a list of Tensor'
-        return super(Slice, self).forward(flag, x)
-
-    def backward(self, flag, grads):
-        '''Concate all grad tensors to generate a single output tensor
-
-        Args:
-            flag: same as Layer::backward()
-            grads: a list of tensors, one for the gradient of one sliced tensor
-
-        Returns:
-            a single tensor for the gradient of the original user, and an empty
-                list.
-        '''
-        assert len(grads) > 1, 'There must be multiple gradients'
-        dxs, _ = super(Slice, self).backward(flag, grads)
-        return dxs[0], []
+    def set_params(self, parameters):
+        self.set_attribute(self.W, parameters[self.W.name])
 
 
-class RNN(Layer):
-    '''Recurrent layer with 4 types of units, namely lstm, gru, tanh and relu.
-
-    Args:
-        hidden_size: hidden feature size, the same for all stacks of layers.
-        rnn_mode: decides the rnn unit, which could be one of 'lstm', 'gru',
-            'tanh' and 'relu', refer to cudnn manual for each mode.
-        num_stacks: num of stacks of rnn layers. It is different to the
-            unrolling seqence length.
-        input_mode: 'linear' convert the input feature x by by a linear
-            transformation to get a feature vector of size hidden_size;
-            'skip' does nothing but requires the input feature size equals
-            hidden_size
-        bidirection: True for bidirectional RNN
-        param_specs: config for initializing the RNN parameters.
-        input_sample_shape: includes a single integer for the input sample
-            feature size.
-    '''
-
-    def __init__(self, name, hidden_size, rnn_mode='lstm', dropout=0.0,
-                 num_stacks=1, input_mode='linear', bidirectional=False,
-                 param_specs=None, input_sample_shape=None):
-        assert cudnn_version >= 5005, 'RNN is supported since CUDNN V5.0.5; '\
-            'This version is %d' % cudnn_version
-        super(RNN, self).__init__(name)
-        conf = self.conf.rnn_conf
-        assert hidden_size > 0, 'Hidden feature size must > 0'
-        conf.hidden_size = hidden_size
-        assert rnn_mode in Set(['lstm', 'gru', 'tanh', 'relu']),  \
-            'rnn mode %s is not available' % (rnn_mode)
-        conf.rnn_mode = rnn_mode
-        conf.num_stacks = num_stacks
-        conf.dropout = dropout
-        conf.input_mode = input_mode
-        conf.direction = 'unidirectional'
-        if bidirectional:
-            conf.direction = 'bidirectional'
-        # currently only has rnn layer implemented using cudnn
-        _check_engine(engine, ['cudnn'])
-        if param_specs is None:
-            param_specs = {'name': name + '/weight',
-                           'init': 'uniform', 'low': 0, 'high': 1}
-        self.conf.param.extend([_construct_param_specs_from_dict(param_specs)])
-        self.param_specs.append(_construct_param_specs_from_dict(param_specs))
-
-        self.layer = singa_wrap.CudnnRNN()
-        if input_sample_shape is not None:
-            self.setup(input_sample_shape)
-
-    def forward(self, flag, inputs):
-        '''Forward inputs through the RNN.
-
-        Args:
-            flag: True(kTrain) for training; False(kEval) for evaluation;
-                others values for future use.
-            inputs, <x1, x2,...xn, hx, cx>, where xi is the input tensor for the
-                i-th position, its shape is (batch_size, input_feature_length);
-                the batch_size of xi must >= that of xi+1; hx is the initial
-                hidden state of shape (num_stacks * bidirection?2:1, batch_size,
-                hidden_size). cx is the initial cell state tensor of the same
-                shape as hy. cx is valid for only lstm. For other RNNs there is
-                no cx. Both hx and cx could be dummy tensors without shape and
-                data.
-
-        Returns:
-            <y1, y2, ... yn, hy, cy>, where yi is the output tensor for the i-th
-                position, its shape is (batch_size,
-                hidden_size * bidirection?2:1). hy is the final hidden state
-                tensor. cx is the final cell state tensor. cx is only used for
-                lstm.
-        '''
-        assert self.has_setup, 'Must call setup() before forward()'
-        assert len(inputs) > 1, 'The input to RNN must include at '\
-            'least one input tensor '\
-            'and one hidden state tensor (could be a dummy tensor)'
-        tensors = []
-        for t in inputs:
-            assert isinstance(t, tensor.Tensor), \
-                'input must be py Tensor %s' % (type(t))
-            tensors.append(t.singa_tensor)
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-        y = self.layer.ForwardWithMultInputs(flag, tensors)
-        return tensor.from_raw_tensors(y)
-
-    def backward(self, flag, grad):
-        '''Backward gradients through the RNN.
-
-        Args:
-            flag, for future use.
-            grad, <dy1, dy2,...dyn, dhy, dcy>, where dyi is the gradient for the
-            i-th output, its shape is (batch_size, hidden_size*bidirection?2:1);
-                dhy is the gradient for the final hidden state, its shape is
-                (num_stacks * bidirection?2:1, batch_size,
-                hidden_size). dcy is the gradient for the final cell state.
-                cx is valid only for lstm. For other RNNs there is
-                no cx. Both dhy and dcy could be dummy tensors without shape and
-                data.
-
-        Returns:
-            <dx1, dx2, ... dxn, dhx, dcx>, where dxi is the gradient tensor for
-                the i-th input, its shape is (batch_size,
-                input_feature_length). dhx is the gradient for the initial
-                hidden state. dcx is the gradient for the initial cell state,
-                which is valid only for lstm.
-        '''
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-
-        tensors = []
-        for t in grad:
-            assert isinstance(t, tensor.Tensor), 'grad must be py Tensor'
-            tensors.append(t.singa_tensor)
-        ret = self.layer.BackwardWithMultInputs(flag, tensors)
-        return tensor.from_raw_tensors(ret[0]), tensor.from_raw_tensors(ret[1])
-
-
-class LSTM(RNN):
-
-    def __init__(self, name, hidden_size, dropout=0.0, num_stacks=1,
-                 input_mode='linear', bidirectional=False,
-                 param_specs=None, input_sample_shape=None):
-        super(LSTM, self).__init__(name, hidden_size,  'lstm',  dropout,
-                                   num_stacks, input_mode, bidirectional,
-                                   param_specs, input_sample_shape)
-
-
-class GRU(RNN):
-
-    def __init__(self, name, hidden_size, dropout=0.0, num_stacks=1,
-                 input_mode='linear', bidirectional=False, param_specs=None,
-                 input_sample_shape=None):
-        super(GRU, self).__init__(name,  hidden_size, 'gru',  dropout,
-                                  num_stacks, input_mode, bidirectional,
-                                  param_specs, input_sample_shape)
-
-
-def _check_engine(engine, allowed_engines):
-    assert engine.lower() in Set(allowed_engines), \
-        '%s is not a supported engine. Pls use one of %s' % \
-        (engine, ', '.join(allowed_engines))
-
-
-def _create_layer(eng, layer):
-    ''' create singa wrap layer.
-
-    Both arguments are case insensitive.
-    Args:
-        engine, implementation engine, either 'singa' or 'cudnn'
-        layer, layer type, e.g., 'convolution', 'pooling'; for activation
-        layers, use the specific activation mode, e.g. 'relu', 'tanh'.
-    '''
-    assert eng != 'cudnn' or cudnn_version > 0, 'CUDNN is not enabled, please '\
-        'change the engine, e.g., layer.engine=singacpp'
-    layer_type = eng + '_' + layer
-    return singa_wrap.CreateLayer(layer_type.lower())
-
-
-def _set_kernel_stride_pad(conf, kernel, stride, border_mode, pad):
-    """Private function called by Convolution2D and Pooling2D."""
-    if isinstance(kernel, tuple):
-        conf.kernel_h = kernel[0]
-        conf.kernel_w = kernel[1]
-    else:
-        conf.kernel_h = kernel
-        conf.kernel_w = kernel
-    if isinstance(stride, tuple):
-        conf.stride_h = stride[0]
-        conf.stride_w = stride[1]
-    else:
-        conf.stride_h = stride
-        conf.stride_w = stride
-    mode = border_mode.lower()
-    if pad is None:
-        # TODO(wangwei) check the border mode
-        if mode == 'same':
-            assert conf.kernel_h % 2 == 1 and conf.kernel_w % 2 == 1, \
-                'Must use odd kernel for mode="same", kernel is (%d, %d)' % (
-                    conf.kernel_h, conf.kernel_w)
-            pad = (conf.kernel_h / 2, conf.kernel_w / 2)
-        elif mode == 'valid':
-            pad = (0, 0)
-        else:
-            assert False, ('Unsupported border_mode: %s. '
-                           'Please use {"valid", "same"}' % border_mode)
-        assert isinstance(pad, tuple), 'pad should be a tuple'
-    if isinstance(pad, tuple):
-        conf.pad_h = pad[0]
-        conf.pad_w = pad[1]
-    else:
-        conf.pad_h = pad
-        conf.pad_w = pad
-    return conf
-
-
-def _construct_param_specs_from_dict(specs):
-    """Conver the param specs from a dict into ParamSpec protobuf object.
-
-    Args:
-        specs (dict): the fields inlcude
-            'name' for parameter name
-            'lr_mult' for learning rate multiplier;
-            'decay_mult' for weight decay multiplier;
-            'init' for init method, which could be 'gaussian', 'uniform',
-            'xavier' and 'msra';
-            'std', 'mean', 'high', 'low' are used by corresponding init methods;
-            'constraint' for gradient constraint, value is a float threshold for
-                clampping the gradient.
-            'regularizer' for regularization, currently support 'l2', value is a
-                float for the coefficient.
-
-    Returns:
-        a ParamSpec object
-    """
-    conf = model_pb2.ParamSpec()
-    if 'name' in specs:
-        conf.name = specs['name']
-    if 'lr_mult' in specs:
-        conf.lr_mult = specs['lr_mult']
-    if 'decay_mult' in specs:
-        conf.decay_mult = specs['decay_mult']
-    if 'init' in specs:
-        filler = conf.filler
-        filler.type = specs['init'].lower()
-        if specs['init'].lower() == 'uniform':
-            assert 'low' in specs and 'high' in specs, \
-                'low and high are required for "uniform" init method'
-            filler.min = specs['low']
-            filler.max = specs['high']
-        elif specs['init'].lower() == 'gaussian':
-            assert 'mean' in specs and 'std' in specs, \
-                'std and mean are required for "gaussian" init method'
-            filler.mean = specs['mean']
-            filler.std = specs['std']
-        elif specs['init'].lower() == 'constant' and 'value' in specs:
-            filler.value = specs['value']
-    if 'regularizer' in specs:
-        conf.regularizer.coefficient = specs['regularizer']
-    if 'constraint' in specs:
-        conf.constraint.threshold = specs['constraint']
-    return conf
-
-
-def _construct_param_specs_from_caffe_proto(lyr_conf):
-    """convert the param specs from a caffe layer proto into a singa paramspec
-    protobuf object.
-
-    args:
-        specs (dict): the fields inlcude
-            'name' for parameter name
-            'lr_mult' for learning rate multiplier;
-            'decay_mult' for weight decay multiplier;
-            'init' for init method, which could be 'gaussian', 'uniform',
-            'xavier' and 'msra';
-            'std', 'mean', 'high', 'low' are used by corresponding init methods;
-            caffe model has no 'constraint' and 'regularizer'
-
-    returns:
-        a pair of paramspec objects(weight and bias)
-    """
-    wparam = model_pb2.ParamSpec()
-    bparam = model_pb2.ParamSpec()
-    if len(lyr_conf.param) > 0:
-        wparam.name = lyr_conf.param[0].name
-        wparam.lr_mult = lyr_conf.param[0].lr_mult
-        wparam.decay_mult = lyr_conf.param[0].decay_mult
-        if len(lyr_conf.param) > 1:
-            bparam.name = lyr_conf.param[1].name
-            bparam.lr_mult = lyr_conf.param[1].lr_mult
-            bparam.decay_mult = lyr_conf.param[1].decay_mult
-    if wparam.name == '' or wparam.name is None:
-        wparam.name = lyr_conf.name + '_weight'
-    if bparam.name == '' or bparam.name is None:
-        bparam.name = lyr_conf.name + '_bias'
-    wfiller = wparam.filler
-    bfiller = bparam.filler
-    param = ''
-    if lyr_conf.type == 'Convolution' or lyr_conf.type == 4:
-        param = lyr_conf.convolution_conf
-    elif lyr_conf.type == 'InnerProduct' or lyr_conf.type == 14:
-        param = lyr_conf.dense_conf
-
-    if param != '':
-        wfiller.type = param.weight_filler.type.lower()
-        wfiller.min = param.weight_filler.min
-        wfiller.max = param.weight_filler.max
-        wfiller.mean = param.weight_filler.mean
-        wfiller.std = param.weight_filler.std
-        wfiller.value = param.weight_filler.value
-
-        bfiller.type = param.bias_filler.type.lower()
-        bfiller.min = param.bias_filler.min
-        bfiller.max = param.bias_filler.max
-        bfiller.mean = param.bias_filler.mean
-        bfiller.std = param.bias_filler.std
-        bfiller.value = param.bias_filler.value
-
-    return (wparam, bparam)
-
-
-def get_layer_list():
-    """ Return a list of strings which include the identifiers (tags) of all
-    supported layers
-    """
-    return singa_wrap.GetRegisteredLayers()
+''' import autograd at the end to resolve circular import
+'''
+from singa import autograd
diff --git a/python/singa/loss.py b/python/singa/loss.py
deleted file mode 100644
index 7fd3d77..0000000
--- a/python/singa/loss.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-
-'''
-Loss module includes a set of training loss implmentations. Some are converted
-from C++ implementation, and the rest are implemented directly using python
-Tensor.
-
-Example usage::
-
-    from singa import tensor
-    from singa import loss
-
-    x = tensor.Tensor((3, 5))
-    x.uniform(0, 1)  # randomly genearte the prediction activation
-    y = tensor.from_numpy(np.array([0, 1, 3], dtype=np.int))  # set the truth
-
-    f = loss.SoftmaxCrossEntropy()
-    l = f.forward(True, x, y)  # l is tensor with 3 loss values
-    g = f.backward()  # g is a tensor containing all gradients of x w.r.t l
-'''
-
-
-from . import singa_wrap as singa
-from proto import model_pb2
-import tensor
-import numpy as np
-
-
-class Loss(object):
-    '''Base loss class.
-
-    Subclasses that wrap the C++ loss classes can use the inherited foward,
-    backward, and evaluate functions of this base class. Other subclasses need
-    to override these functions
-    '''
-
-    def __init__(self):
-        self.swig_loss = None
-
-    def forward(self, flag, x, y):
-        '''Compute the loss values.
-
-        Args:
-            flag: kTrain/kEval or bool. If it is kTrain/True, then the backward
-                function must be called before calling forward again.
-            x (Tensor): the prediction Tensor
-            y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0]
-
-        Returns:
-            a tensor of floats for the loss values, one per sample
-        '''
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-        return tensor.from_raw_tensor(
-            self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
-
-    def backward(self):
-        '''
-        Returns:
-            the grad of x w.r.t. the loss
-        '''
-        return tensor.from_raw_tensor(self.swig_loss.Backward())
-
-    def evaluate(self, flag, x, y):  # TODO(wangwei) remove flag
-        '''
-        Args:
-            flag (int): must be kEval, to be removed
-            x (Tensor): the prediction Tensor
-            y (Tensor): the ground truth Tnesor
-
-        Returns:
-            the averaged loss for all samples in x.
-        '''
-        if type(flag) is bool:
-            if flag:
-                flag = model_pb2.kTrain
-            else:
-                flag = model_pb2.kEval
-
-        return self.swig_loss.Evaluate(flag, x.singa_tensor, y.singa_tensor)
-
-
-class SoftmaxCrossEntropy(Loss):
-    '''This loss function is a combination of SoftMax and Cross-Entropy loss.
-
-    It converts the inputs via SoftMax function and then
-    computes the cross-entropy loss against the ground truth values.
-
-    For each sample, the ground truth could be a integer as the label index;
-    or a binary array, indicating the label distribution. The ground truth
-    tensor thus could be a 1d or 2d tensor.
-    The data/feature tensor could 1d (for a single sample) or 2d for a batch of
-    samples.
-    '''
-
-    def __init__(self):
-        super(SoftmaxCrossEntropy, self).__init__()
-        self.swig_loss = singa.SoftmaxCrossEntropy()
-
-
-class SigmoidCrossEntropy(Loss):
-    '''This loss evaluates the cross-entropy loss between the prediction and the
-    truth values with the prediction probability generated from Sigmoid.
-    '''
-    def __init__(self, epsilon=1e-8):
-        super(SigmoidCrossEntropy, self).__init__()
-        self.truth = None
-        self.prob = None
-        self.epsilon = epsilon  # to avoid log(x) with x being too small
-
-    def forward(self, flag, x, y):
-        '''loss is -yi * log pi - (1-yi) log (1-pi), where pi=sigmoid(xi)
-
-        Args:
-            flag (bool): true for training; false for evaluation
-            x (Tensor): the prediction Tensor
-            y (Tensor): the truth Tensor, a binary array value per sample
-
-        Returns:
-            a Tensor with one error value per sample
-        '''
-        p = tensor.sigmoid(x)
-        if flag:
-            self.truth = y
-            self.prob = p
-        np = 1 - p
-        p += (p < self.epsilon) * self.epsilon
-        np += (np < self.epsilon) * self.epsilon
-        l = (y-1) * tensor.log(np) - y * tensor.log(p)
-        # TODO(wangwei): add unary operation -Tensor
-        return tensor.average(l, axis=1)
-
-    def backward(self):
-        ''' Compute the gradient of loss w.r.t to x.
-
-        Returns:
-            dx = pi - yi.
-        '''
-        assert self.truth is not None, 'must call forward in a prior'
-        dx =  self.prob - self.truth
-        self.truth = None
-        return dx
-
-    def evaluate(self, flag, x, y):
-        '''Compuate the averaged error.
-
-        Returns:
-            a float value as the averaged error
-        '''
-        l = self.forward(False, x, y)
-        return l.l1()
-
-
-class SquaredError(Loss):
-    '''This loss evaluates the squared error between the prediction and the
-    truth values.
-
-    It is implemented using Python Tensor operations.
-    '''
-    def __init__(self):
-        super(SquaredError, self).__init__()
-        self.err = None
-
-    def forward(self, flag, x, y):
-        '''Compute the error as 0.5 * ||x-y||^2.
-
-        Args:
-            flag (int): kTrain or kEval; if kTrain, then the backward must be
-                called before calling forward again.
-            x (Tensor): the prediction Tensor
-            y (Tensor): the truth Tensor, an integer value per sample, whose
-                value is [0, x.shape[1])
-
-        Returns:
-            a Tensor with one error value per sample
-        '''
-        self.err = x - y
-        return tensor.square(self.err) * 0.5
-
-    def backward(self):
-        '''Compute the gradient of x w.r.t the error.
-
-        Returns:
-            x - y
-        '''
-        return self.err
-
-    def evaluate(self, flag, x, y):
-        '''Compuate the averaged error.
-
-        Returns:
-            a float value as the averaged error
-        '''
-        return tensor.sum(tensor.square(x - y) * 0.5) / x.size()
diff --git a/python/singa/metric.py b/python/singa/metric.py
deleted file mode 100644
index 64a1b72..0000000
--- a/python/singa/metric.py
+++ /dev/null
@@ -1,216 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-'''This module includes a set of metric classes for evaluating the model's
-performance. The specific metric classes could be converted from C++
-implmentation or implemented directly using Python.
-
-
-Example usage::
-
-    from singa import tensor
-    from singa import metric
-
-    x = tensor.Tensor((3, 5))
-    x.uniform(0, 1)  # randomly genearte the prediction activation
-    x = tensor.SoftMax(x)  # normalize the prediction into probabilities
-    y = tensor.from_numpy(np.array([0, 1, 3], dtype=np.int))  # set the truth
-
-    f = metric.Accuracy()
-    acc = f.evaluate(x, y)  # averaged accuracy over all 3 samples in x
-
-'''
-
-
-from . import singa_wrap as singa
-import tensor
-import numpy as np
-
-
-class Metric(object):
-    '''Base metric class.
-
-    Subclasses that wrap the C++ loss classes can use the inherited foward,
-    and evaluate functions of this base class. Other subclasses need
-    to override these functions. Users need to feed in the **predictions** and
-    ground truth to get the metric values.
-    '''
-
-    def __init__(self):
-        self.swig_metric = None
-
-    def forward(self, x, y):
-        '''Compute the metric for each sample.
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth values, one row per sample
-
-        Returns:
-            a tensor of floats, one per sample
-        '''
-        return tensor.from_raw_tensor(
-            self.swig_metric.Forward(x.singa_tensor, y.singa_tensor))
-
-    def evaluate(self, x, y):
-        '''Compute the averaged metric over all samples.
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth values, one row per sample
-        Returns:
-            a float value for the averaged metric
-        '''
-        return self.swig_metric.Evaluate(x.singa_tensor, y.singa_tensor)
-
-
-class Accuracy(Metric):
-    '''Compute the top one accuracy for single label prediction tasks.
-
-    It calls the C++ functions to do the calculation.
-    '''
-    def __init__(self):
-        self.swig_metric = singa.Accuracy()
-
-
-
-
-class Precision(Metric):
-    '''Make the top-k labels of max probability as the prediction
-
-    Compute the precision against the groundtruth labels
-    '''
-    def __init__(self, top_k):
-        self.top_k = top_k
-
-
-
-    def forward(self, x, y):
-        '''Compute the precision for each sample.
-
-        Convert tensor to numpy for computation
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth labels, one row per sample
-
-        Returns:
-            a tensor of floats, one per sample
-        '''
-
-        dev = x.device
-        x.to_host()
-        y.to_host()
-
-        x_np = tensor.to_numpy(x)
-        y_np = tensor.to_numpy(y)
-
-        pred_np = np.argsort(-x_np)[:, 0:self.top_k] #Sort in descending order
-
-        prcs_np = np.zeros(pred_np.shape[0], dtype=np.float32)
-
-        for i in range(pred_np.shape[0]):
-            #groundtruth labels
-            label_np = np.argwhere(y_np[i])
-
-            #Num of common labels among prediction and groundtruth
-            num_intersect = np.intersect1d(pred_np[i], label_np).size
-            prcs_np[i] = num_intersect / float(self.top_k)
-
-        precision = tensor.from_numpy(prcs_np)
-
-        x.to_device(dev)
-        y.to_device(dev)
-        precision.to_device(dev)
-
-        return precision
-
-
-    def evaluate(self, x, y):
-        '''Compute the averaged precision over all samples.
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth values, one row per sample
-        Returns:
-            a float value for the averaged metric
-        '''
-
-        return tensor.average(self.forward(x, y))
-
-
-class Recall(Metric):
-    '''Make the top-k labels of max probability as the prediction
-
-    Compute the recall against the groundtruth labels
-    '''
-    def __init__(self, top_k):
-        self.top_k = top_k
-
-
-    def forward(self, x, y):
-        '''Compute the recall for each sample.
-
-        Convert tensor to numpy for computation
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth labels, one row per sample
-
-        Returns:
-            a tensor of floats, one per sample
-        '''
-
-        dev = x.device
-        x.to_host()
-        y.to_host()
-
-        x_np = tensor.to_numpy(x)
-        y_np = tensor.to_numpy(y)
-
-        pred_np = np.argsort(-x_np)[:, 0:self.top_k] #Sort in descending order
-
-        recall_np = np.zeros(pred_np.shape[0], dtype=np.float32)
-
-        for i in range(pred_np.shape[0]):
-            #Return the index of non-zero dimension of i-th sample
-            label_np = np.argwhere(y_np[i])
-
-            #Num of common labels among prediction and groundtruth
-            num_intersect = np.intersect1d(pred_np[i], label_np).size
-            recall_np[i] = float(num_intersect) / label_np.size
-
-        recall = tensor.from_numpy(recall_np)
-
-        x.to_device(dev)
-        y.to_device(dev)
-        recall.to_device(dev)
-
-        return recall
-
-
-    def evaluate(self, x, y):
-        '''Compute the averaged precision over all samples.
-
-        Args:
-            x (Tensor): predictions, one row per sample
-            y (Tensor): ground truth values, one row per sample
-        Returns:
-            a float value for the averaged metric
-        '''
-
-        return tensor.average(self.forward(x,y))
diff --git a/python/singa/model.py b/python/singa/model.py
index 38d9950..5f1ed2c 100644
--- a/python/singa/model.py
+++ b/python/singa/model.py
@@ -1,21 +1,354 @@
-#/**
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# =============================================================================
+'''
+This script includes Model class for python users
+to use Computational Graph in their model.
+'''
 
-class Model(Object):
-    pass
+import os
+import gc
+import time
+import json
+import zipfile
+import numpy as np
+from functools import wraps
+from collections import Iterable
 
+from singa import tensor
+from singa import autograd
+from singa import layer
+from .tensor import Tensor
+from . import singa_wrap as singa
+
+
+class ModelMeta(layer.LayerMeta):
+
+    def buffer_operation(func):
+
+        def remove_creator(tensors):
+            if not tensors:
+                return
+
+            if isinstance(tensors, Iterable):
+                for item in tensors:
+                    if isinstance(item, Iterable):
+                        remove_creator(item)
+                    elif isinstance(item, tensor.Tensor):
+                        item.creator = None
+            elif isinstance(tensors, tensor.Tensor):
+                tensors.creator = None
+
+        @wraps(func)
+        def wrapper(self, *args, **kwargs):
+            if self.graph_mode and self.training:
+                if len(args) == 0:
+                    raise ValueError('expect at least one input tensor')
+
+                if isinstance(args[0], list):
+                    assert isinstance(
+                        args[0][0],
+                        Tensor), ('function expects PlaceHolders or Tensors')
+                    dev = args[0][0].device
+                else:
+                    assert isinstance(
+                        args[0],
+                        Tensor), ('function expects PlaceHolders or Tensors')
+                    dev = args[0].device
+
+                if not self._buffered:
+                    # buffer operations
+                    dev.EnableGraph(True)
+                    self._results = func(self, *args, **kwargs)
+                    dev.Sync()
+                    dev.EnableGraph(False)
+                    self._buffered = True
+
+                    # deconstruct Operations before running the entire graph
+                    remove_creator(self._results)
+
+                    # make sure all Operations are deallocated
+                    gc.collect()
+
+                # run graph
+                dev.RunGraph(self.sequential)
+                return self._results
+            else:
+                return func(self, *args, **kwargs)
+
+        return wrapper
+
+    def __new__(cls, name, bases, attr):
+        if 'train_one_batch' in attr:
+            attr['train_one_batch'] = ModelMeta.buffer_operation(
+                attr['train_one_batch'])
+
+        return super(ModelMeta, cls).__new__(cls, name, bases, attr)
+
+
+class Model(layer.Layer, metaclass=ModelMeta):
+    """ Base class for your neural network models.
+
+    Example usage::
+
+        import numpy as np
+        from singa import opt
+        from singa import tensor
+        from singa import device
+        from singa import autograd
+        from singa import layer
+        from singa import model
+
+        class MyModel(model.Model):
+            def __init__(self):
+                super(MyModel, self).__init__()
+
+                self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+                self.conv1 = layer.Conv2d(1, 20, 5, padding=0)
+                self.conv2 = layer.Conv2d(20, 50, 5, padding=0)
+                self.sgd = opt.SGD(lr=0.01)
+
+            def forward(self, x):
+                y = self.conv1(x)
+                y = self.conv2(y)
+                return y
+
+            def train_one_batch(self, x, y):
+                out = self.forward(x)
+                loss = self.softmax_cross_entropy(out, y)
+                self.sgd(loss)
+                return out, loss
+
+    """
+
+    # save load states constant
+    TENSOR_DICT_FILENAME = '/tensor_dict.npz'
+    STATES_ATTR_FILENAME = '/states_attr.json'
+    MODEL_STATE_TYPE = 0
+    AUX_STATE_TYPE = 1
+
+    def __init__(self):
+        """
+        Initializes internal Model state
+        """
+        super(Model, self).__init__()
+
+        self.training = True
+        self.graph_mode = True
+        self.sequential = False
+        self._buffered = False
+        self._results = None
+
+    def compile(self, inputs, is_train=True, use_graph=False, sequential=False):
+        """ Compile and initialize the model
+
+        This function will automatically derive the shape of parameters
+        in each sublayer based on the shape of input placeholders. It will
+        also do some settings.
+
+        Args:
+            inputs(list): the list of input tensors(placeholders)
+            is_train(bool): when is_trainis True, this model will enter
+            training mode, otherwise it will enter the evaluation mode
+            use_graph(bool): when use_graph is True, computational graph
+            will be used to train this model
+            sequential(bool): when sequential is True, model will execute ops
+            in the graph follow the order of joining the graph
+        """
+        assert len(inputs) > 0 and isinstance(inputs[0], Tensor), (
+            'compile function expects PlaceHolders or Tensors')
+
+        dev = inputs[0].device
+        dev.EnableGraph(True)
+        self.forward(*inputs)
+        dev.EnableGraph(False)
+        dev.ResetGraph()
+
+        autograd.training = is_train
+        self.training = is_train
+        self.graph_mode = use_graph
+        self.sequential = sequential
+
+    def forward(self, *input):
+        """Defines the computation performed in every forward propagation.
+
+        Should be overridden by all subclasses.
+
+        Args:
+            *input: the input training data for the model
+
+        Returns:
+            out: the outputs of the forward propagation.
+        """
+        raise NotImplementedError
+
+    def train_one_batch(self, *input, **kwargs):
+        """Defines the computation performed in every training iteration
+
+        Should be overridden by all subclasses.
+
+        Args:
+            *input: the arguments of train_one_batch
+            **kwargs: the keyword arguments of train_one_batch
+        """
+        raise NotImplementedError
+
+    def train(self, mode=True):
+        """Set the model in evaluation mode.
+
+        Args:
+            mode(bool): when mode is True, this model will enter training mode
+        """
+        self.training = mode
+        autograd.training = mode
+
+    def eval(self):
+        """Sets the model in evaluation mode.
+        """
+        self.train(mode=False)
+
+    def graph(self, mode=True, sequential=False):
+        """ Turn on the computational graph. Specify execution mode.
+
+        Args:
+            mode(bool): when mode is True, model will use computational graph
+            sequential(bool): when sequential is True, model will execute ops
+            in the graph follow the order of joining the graph
+        """
+        self.graph_mode = mode
+        self.sequential = sequential
+
+    def __get_name__(self):
+        return self.__class__.__name__
+
+    def __call__(self, *input, **kwargs):
+        if self.training:
+            return self.train_one_batch(*input, **kwargs)
+        else:
+            return self.forward(*input, **kwargs)
+
+    def save_states(self, fpath, aux_states={}):
+        """Save states.
+
+        Args:
+            fpath: output file path (without the extension)
+            aux_states(dict): values are standard data types or Tensor,
+                              e.g., epoch ID, learning rate, optimizer states
+        """
+        assert not os.path.isfile(fpath), (
+            "Failed to save states, %s is already existed." % fpath)
+
+        states = self.get_states()
+
+        # save states data and attr
+        tensor_dict = {}
+        states_attr = {}
+        for k, v in states.items():
+            assert isinstance(v, tensor.Tensor), "Only tensor state is allowed"
+            tensor_dict[k] = tensor.to_numpy(v)
+            states_attr[k] = {
+                'state_type': self.MODEL_STATE_TYPE,
+                'shape': v.shape,
+                'dtype': v.dtype
+            }
+
+        for k, v in aux_states.items():
+            assert isinstance(v,
+                              tensor.Tensor), "Only tensor aux state is allowed"
+            tensor_dict[k] = tensor.to_numpy(v)
+            states_attr[k] = {
+                'state_type': self.AUX_STATE_TYPE,
+                'shape': v.shape,
+                'dtype': v.dtype
+            }
+
+        # save to files
+        timestamp = time.time()
+        tmp_dir = '/tmp/singa_save_states_%s' % timestamp
+        os.mkdir(tmp_dir)
+        tensor_dict_fp = tmp_dir + self.TENSOR_DICT_FILENAME
+        states_attr_fp = tmp_dir + self.STATES_ATTR_FILENAME
+
+        np.savez(tensor_dict_fp, **tensor_dict)
+
+        with open(states_attr_fp, 'w') as fp:
+            json.dump(states_attr, fp)
+
+        compression = zipfile.ZIP_DEFLATED
+        with zipfile.ZipFile(fpath, mode="w") as zf:
+            zf.write(tensor_dict_fp,
+                     os.path.basename(tensor_dict_fp),
+                     compress_type=compression)
+            zf.write(states_attr_fp,
+                     os.path.basename(states_attr_fp),
+                     compress_type=compression)
+
+        # clean up tmp files
+        os.remove(tensor_dict_fp)
+        os.remove(states_attr_fp)
+        os.rmdir(tmp_dir)
+
+    def load_states(self, fpath):
+        """Load the model states and auxiliary states from disk.
+
+        Usage:
+            m = MyModel()
+            m.compile(...)
+            aux_states = m.load_states('mymodel.zip')
+
+        Args:
+            path: input file path (without the extension)
+        Returns:
+            dict
+        """
+
+        assert os.path.isfile(fpath), (
+            "Failed to load states, %s is not exist." % fpath)
+
+        timestamp = time.time()
+        tmp_dir = '/tmp/singa_load_states_%s' % timestamp
+        os.mkdir(tmp_dir)
+
+        with zipfile.ZipFile(fpath, 'r') as zf:
+            zf.extractall(tmp_dir)
+
+        tensor_dict_fp = tmp_dir + self.TENSOR_DICT_FILENAME
+        states_attr_fp = tmp_dir + self.STATES_ATTR_FILENAME
+
+        with open(states_attr_fp) as f:
+            states_attr = json.load(f)
+
+        tensor_dict = np.load(tensor_dict_fp)
+
+        # restore singa tensor from numpy
+        model_states = dict()
+        aux_states = dict()
+
+        for k in tensor_dict.files:
+            if states_attr[k]['state_type'] == self.MODEL_STATE_TYPE:
+                model_states[k] = tensor.from_numpy(tensor_dict[k])
+            elif states_attr[k]['state_type'] == self.AUX_STATE_TYPE:
+                aux_states[k] = tensor.from_numpy(tensor_dict[k])
+
+        # restore model_states
+        self.set_states(model_states)
+
+        # clean up tmp files
+        os.remove(tensor_dict_fp)
+        os.remove(states_attr_fp)
+        os.rmdir(tmp_dir)
+        return aux_states
diff --git a/python/singa/net.py b/python/singa/net.py
deleted file mode 100644
index 82933e1..0000000
--- a/python/singa/net.py
+++ /dev/null
@@ -1,485 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# =============================================================================
-"""
-Nerual net class for constructing the nets using layers and providing access
-functions for net info, e.g., parameters.
-
-Example usages::
-
-    from singa import net as ffnet
-    from singa import metric
-    from singa import loss
-    from singa import layer
-    from singa import device
-
-    # create net and add layers
-    net = ffnet.FeedForwardNet(loss.SoftmaxCrossEntropy(), metric.Accuracy())
-    net.add(layer.Conv2D('conv1', 32, 5, 1, input_sample_shape=(3,32,32,)))
-    net.add(layer.Activation('relu1'))
-    net.add(layer.MaxPooling2D('pool1', 3, 2))
-    net.add(layer.Flatten('flat'))
-    net.add(layer.Dense('dense', 10))
-
-    # init parameters
-    for p in net.param_values():
-        if len(p.shape) == 0:
-            p.set_value(0)
-        else:
-            p.gaussian(0, 0.01)
-
-    # move net onto gpu
-    dev = device.create_cuda_gpu()
-    net.to_device(dev)
-
-    # training (skipped)
-
-    # do prediction after training
-    x = tensor.Tensor((2, 3, 32, 32), dev)
-    x.uniform(-1, 1)
-    y = net.predict(x)
-    print tensor.to_numpy(y)
-"""
-
-from .proto.model_pb2 import kTrain, kEval
-from __init__ import __version__
-import tensor
-import layer
-import snapshot
-import cPickle as pickle
-
-import os
-
-'''For display training information, e.g L1 value of layer data'''
-verbose = False
-
-
-class FeedForwardNet(object):
-
-    def __init__(self, loss=None, metric=None):
-        '''Representing a feed-forward neural net.
-
-        Args:
-            loss, a Loss instance. Necessary training
-            metric, a Metric instance. Necessary for evaluation
-        '''
-        self.loss = loss
-        self.metric = metric
-        self.layers = []
-        self.src_of_layer = {}
-        self.dst_of_layer = None
-        self.ordered_layers = None
-        self.out_sample_shape_of_layer = {}
-
-    def to_device(self, dev):
-        '''Move the net onto the given device, including
-        all parameters and intermediate data.
-        '''
-        for lyr in self.layers:
-            lyr.to_device(dev)
-
-    def add(self, lyr, src=None):
-        """Append a layer into the layer list.
-
-        This function will get the sample shape from the src layers to setup the
-        newly added layer. For the first layer, it is setup outside. The calling
-        function should ensure the correctness of the layer order. If src is
-        None, the last layer is the src layer. If there are multiple src layers,
-        the src is a list of the src layers.
-
-        Args:
-            lyr (Layer): the layer to be added
-            src (Layer): the source layer of lyr
-        """
-        if src is not None:
-            if isinstance(src, layer.Layer):
-                assert src.has_setup is True, 'the source layer must be set up'
-                self.src_of_layer[lyr.name] = [src]
-            else:
-                assert type(src) == list, 'the src must be a list of layers'
-                self.src_of_layer[lyr.name] = src
-                # print 'merge------', len(src)
-        else:
-            assert len(self.layers) > 0 or lyr.has_setup, \
-                'Source layers are needed to set up this layer'
-            if len(self.layers) > 0:
-                self.src_of_layer[lyr.name] = [self.layers[-1]]
-            else:
-                self.src_of_layer[lyr.name] = []
-        if lyr.has_setup is False:
-            in_shape = []
-            for src in self.src_of_layer[lyr.name]:
-                shapes = self.out_sample_shape_of_layer[src.name]
-                assert len(shapes) > 0, \
-                    'Cannot get output shape of layer %s' % lyr.name
-                in_shape.append(shapes[0])
-                shapes.pop(0)
-            if len(in_shape) == 1:
-                lyr.setup(in_shape[0])
-            else:
-                lyr.setup(in_shape)
-        out_shape = lyr.get_output_sample_shape()
-        if type(out_shape[0]) is tuple:
-            self.out_sample_shape_of_layer[lyr.name] = out_shape
-        else:
-            self.out_sample_shape_of_layer[lyr.name] = [out_shape]
-        self.layers.append(lyr)
-        print(lyr.name, out_shape)
-        return lyr
-
-    def param_values(self):
-        '''Return a list of tensors for all parameters'''
-        values = []
-        layers = self.layers
-        if self.ordered_layers is not None:
-            layers = self.ordered_layers
-        for lyr in layers:
-            values.extend(lyr.param_values())
-        return values
-
-    def param_specs(self):
-        '''Return a list of ParamSpec for all parameters'''
-        specs = []
-        layers = self.layers
-        if self.ordered_layers is not None:
-            layers = self.ordered_layers
-        for lyr in layers:
-            specs.extend(lyr.param_specs)
-        return specs
-
-    def param_names(self):
-        '''Return a list for the names of all params'''
-        return [spec.name for spec in self.param_specs()]
-
-    def train(self, x, y):
-        '''Run BP for one iteration.
-
-        Currently only support nets with a single output layer, and a single
-        loss objective and metric.
-        For multiple outputs (with multiple loss/metric), please manually
-        call forward, compute loss/metric and call backward. backward() is also
-        more memory efficient than this function.
-
-        Args:
-            x: input data, a single input Tensor or a dict: layer name -> Tensor
-            y: label data, a single input Tensor.
-        Returns:
-            gradients of parameters and the loss and metric values.
-        '''
-        out = self.forward(kTrain, x)
-        l = self.loss.forward(kTrain, out, y)
-        g = self.loss.backward()
-        g /= x.shape[0]
-        m = None
-        if self.metric is not None:
-            m = self.metric.evaluate(out, y)
-        grads = []  # store all gradient tensors; memory inefficient
-        for _, _, grad, _ in self.backward(g):
-            grads.extend(grad[::-1])
-        return grads[::-1], (l.l1(), m)
-
-    def evaluate(self, x, y):
-        '''Evaluate the loss and metric of the given data.
-
-        Currently only support nets with a single output layer, and a single
-        loss objective and metric.
-        TODO(wangwei) consider multiple loss objectives and metrics.
-
-        Args:
-            x: input data, a single input Tensor or a dict: layer name -> Tensor
-            y: label data, a single input Tensor.
-        '''
-        out = self.forward(kEval, x)
-        l = None
-        m = None
-        assert self.loss is not None or self.metric is not None,\
-            'Cannot do evaluation, as neither loss nor metic is set'
-        if self.loss is not None:
-            l = self.loss.evaluate(kEval, out, y)
-        if self.metric is not None:
-            m = self.metric.evaluate(out, y)
-        return l, m
-
-    def predict(self, x):
-        '''Forward the input data through each layer to get the values of the
-        output layers.
-
-        Currently only support nets with a single output layer
-
-        Args:
-            x: input data, a single input Tensor or a dict: layer name -> Tensor
-
-        Returns:
-            a single output tensor as the prediction result.
-        '''
-        xx = self.forward(kEval, x)
-        return tensor.softmax(xx)
-
-    def topo_sort(self, layers, src_of_layer):
-        '''Topology sort of layers.
-
-        It would try to preserve the orders of the input layers.
-
-        Args:
-            layers: a list of layers; the layers from the output of the same
-                layer (e.g., slice layer) should be added by users in correct
-                order; This function would not change their order.
-            src_of_layer: a dictionary: src layer name -> a list of src layers
-
-        Returns:
-            A list of ordered layer
-        '''
-        order = []
-        while len(order) < len(layers):
-            for lyr in self.layers:
-                if lyr not in order:
-                    for src in src_of_layer[lyr.name]:
-                        if src not in order:
-                            break
-                    order.append(lyr)
-        return order
-
-    def forward(self, flag, x, output=[]):
-        '''Forward the input(s) through every layer.
-
-        Args:
-            flag: True for training; False for evaluation; could also be
-                model_pb2.kTrain or model_pb2.kEval, or other values for future
-                use.
-            x: a single SINGA tensor if there is a single input; otherwise, a
-                dictionary: layer name-> singa tensor, for each layer accepting
-                input data. Do not associate a layer with input tensor if it is
-                connected from another layer. For such case, use a Dummy() layer
-                to accept the input data and connect the dummy layer to this
-                layer.
-            output(list): a list of layer names whose output would be returned
-                in addition to the default output.
-
-        Returns:
-            if there is only one output layer and output arg is empty, return
-                the result from the single output layer; otherwise, return a
-                dictionary: layer name -> output tensor(s)
-        '''
-        if self.ordered_layers is None:
-            self.ordered_layers = self.topo_sort(self.layers, self.src_of_layer)
-        if type(x) is dict:
-            input_of_layer = x
-        else:
-            assert isinstance(x, tensor.Tensor), \
-                'The inputs of a net should be dict or a single tensor'
-            input_of_layer = {self.ordered_layers[0].name: x}
-        output_of_layer = {}  # outputs generated by each layer
-        ret = {}  # outputs to return
-        for cur in self.ordered_layers:
-            inputs = []
-            if cur.name in input_of_layer:
-                if type(input_of_layer[cur.name]) is list:
-                    inputs.extend(input_of_layer[cur.name])
-                else:
-                    inputs.append(input_of_layer[cur.name])
-            srcs = self.src_of_layer[cur.name]
-            disp_src = ''
-            for src in srcs:
-                outs = output_of_layer[src.name]
-                if type(outs) == list:
-                    assert len(outs) > 0, \
-                            'the output from layer %s is empty' % src.name
-                    inputs.append(outs[0])
-                    outs.pop(0)
-                    if len(outs) == 0:
-                        output_of_layer.pop(src.name)
-                else:
-                    inputs.append(outs)
-                    output_of_layer[cur.name] = []
-                    output_of_layer.pop(src.name)
-            if len(inputs) == 1:
-                inputs = inputs[0]
-            out = cur.forward(flag, inputs)
-            if verbose:
-                disp_src = '+'.join([src.name for src in srcs])
-                disp_src += '-->' + cur.name
-                if type(out) is list:
-                    print('%s: %s' % (disp_src,
-                                      ' '.join([str(o.l1()) for o in out])))
-                else:
-                    print('%s: %f' % (disp_src, out.l1()))
-            output_of_layer[cur.name] = out
-            if cur.name in output:
-                ret[cur.name] = out
-            # print lyr.name, x.l1()
-        # print output_of_layer
-        ret.update(output_of_layer)
-        if len(ret) == 1:
-            return ret.values()[0]
-        else:
-            return ret
-
-    def backward(self, dy, output=[]):
-        '''Run back-propagation after forward-propagation.
-
-        Args:
-            dy: a single tensor if there is a single loss function; otherwise,
-                a dictionary maps the name of the layer connecting to the loss
-                function -> gradient from the loss function. Do not associate a
-                layer with gradient tensor if it is connecting to another layer.
-                For such case, connect this layer to a Dummy() layer and use the
-                dummy layer to accept the gradient.
-            output(list): a list of layer names whose output gradient would be
-                returned in addition to the param gradient
-
-        Returns:
-                a geneartor iterator that generates
-                (param_names, param_values, param_grads, layer_grads) after
-                processing each layer h, where the first three lists are for h
-                and the last item is a dictionary which maps
-                layer name -> its output gradient tensor(s). At the end of this
-                function, the key set includes all layers in the output arg.
-        '''
-        if self.dst_of_layer is None:
-            self.dst_of_layer = {}
-            for cur in self.layers:
-                self.dst_of_layer[cur.name] = []
-            for cur in self.ordered_layers[1:]:
-                srcs = self.src_of_layer[cur.name]
-                for src in srcs:
-                    self.dst_of_layer[src.name].append(cur)
-        output_of_layer = {}  # outputs generated by each layer
-        ret = {}  # outputs to return
-        if type(dy) is dict:
-            input_of_layer = dy
-        else:
-            assert isinstance(dy, tensor.Tensor), \
-                'The inputs of a net should be dict or a single tensor'
-            input_of_layer = {self.ordered_layers[-1].name: dy}
-        for cur in reversed(self.ordered_layers):
-            inputs = []
-            if cur.name in input_of_layer:
-                if type(input_of_layer[cur.name]) is list:
-                    inputs.extend(input_of_layer[cur.name])
-                else:
-                    inputs.append(input_of_layer[cur.name])
-            for dst in self.dst_of_layer[cur.name]:
-                outputs = output_of_layer[dst.name]
-                if type(outputs) == list:
-                    assert len(outputs) > 0, \
-                            'the gradient from layer %s is empty' % dst.name
-                    inputs.append(outputs[0])
-                    outputs.pop(0)
-                else:
-                    inputs.append(outputs)
-                    output_of_layer[dst.name] = []
-                # del output_of_layer[dst.name]
-            if len(inputs) == 1:
-                inputs = inputs[0]
-            outs, pgrads = cur.backward(kTrain, inputs)
-            if verbose:
-                disp_src = '+'.join(
-                        [dst.name for dst in self.dst_of_layer[cur.name]])
-                disp_src += '-->' + cur.name
-                if type(outs) is list:
-                    print('%s: %s' % (disp_src,
-                                      ' '.join([str(o.l1()) for o in outs])))
-                else:
-                    print('%s: %f' % (disp_src, outs.l1()))
-            if type(outs) is list:
-                output_of_layer[cur.name] = outs[::-1]
-            else:
-                output_of_layer[cur.name] = outs
-            if cur.name in output:
-                ret[cur.name] = outs
-            # ret.update(output_of_layer)
-            yield (cur.param_names(), cur.param_values(), pgrads, ret)
-
-    def save(self, f, buffer_size=10, use_pickle=False):
-        '''Save model parameters using io/snapshot.
-
-        Args:
-            f: file name
-            buffer_size: size (MB) of the IO, default setting is 10MB; Please
-                make sure it is larger than any single parameter object.
-            use_pickle(Boolean): if true, it would use pickle for dumping;
-                otherwise, it would use protobuf for serialization, which uses
-                less space.
-        '''
-        if use_pickle:
-            params = {}
-            # since SINGA>=1.1.1  (1101)
-            params['SINGA_VERSION'] = __version__
-            for (name, val) in zip(self.param_names(), self.param_values()):
-                val.to_host()
-                params[name] = tensor.to_numpy(val)
-            if not f.endswith('.pickle'):
-                f = f + '.pickle'
-            with open(f, 'wb') as fd:
-                pickle.dump(params, fd)
-        else:
-            if f.endswith('.bin'):
-                f = f[0:-4]
-            sp = snapshot.Snapshot(f, True, buffer_size)
-            for (name, val) in zip(self.param_names(), self.param_values()):
-                val.to_host()
-                sp.write(name, val)
-
-    def load(self, f, buffer_size=10, use_pickle=False):
-        '''Load model parameters using io/snapshot.
-
-        Please refer to the argument description in save().
-        '''
-        version = 0
-
-        def get_name(name):
-            if version < 1101:
-                idx = name.rfind('/')
-                assert idx > 0, '/ must be in the parameter name'
-                name = name[:idx] + '_' + name[idx+1:]
-            return name
-
-        if use_pickle:
-            print('NOTE: If your model was saved using Snapshot, '
-                  'then set use_pickle=False for loading it')
-            if not os.path.exists(f):
-                # guess the correct path
-                if f.endswith('.pickle'):
-                    f = f[0:-7]
-                else:
-                    f = f + '.pickle'
-            assert os.path.exists(f), 'file not exists %s w/o .pickle' % f
-            with open(f, 'rb') as fd:
-                params = pickle.load(fd)
-        else:
-            print('NOTE: If your model was saved using pickle, '
-                  'then set use_pickle=True for loading it')
-            if f.endswith('.bin'):
-                f = f[0:-4]
-            sp = snapshot.Snapshot(f, False, buffer_size)
-            params = sp.read()
-        if 'SINGA_VERSION' in params:
-            version = params['SINGA_VERSION']
-        for name, val in zip(self.param_names(), self.param_values()):
-            name = get_name(name)
-            if name not in params:
-                print('Param: %s missing in the checkpoint file' % name)
-                continue
-            try:
-                if isinstance(params[name], tensor.Tensor):
-                    val.copy_data(params[name])
-                else:
-                    val.copy_from_numpy(params[name])
-            except AssertionError as err:
-                print('Error from copying values for param: %s' % name)
-                print('shape of param vs checkpoint',
-                      val.shape, params[name].shape)
-                raise err
diff --git a/python/singa/opt.py b/python/singa/opt.py
new file mode 100755
index 0000000..8eda563
--- /dev/null
+++ b/python/singa/opt.py
@@ -0,0 +1,1085 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+'''This module includes a set of optimizers for updating model parameters.
+It replaces the old optimizers from optimizer.py'''
+
+from singa import tensor
+from singa.tensor import Tensor
+from singa import autograd
+from . import singa_wrap as singa
+
+from deprecated import deprecated
+
+
+class DecayScheduler:
+    # to be used for decaying learning rate or regularization coefficient or momentum, etc.
+    def __init__(self, init_value):
+        self.init_value = init_value
+
+    def __call__(self, step):
+        assert isinstance(step, Tensor)
+        return self.call(step)
+
+    def call(self, step) -> Tensor:
+        # step is a Tensor with a single scalar value
+        # return the current value as a Tensor
+        raise NotImplementedError
+
+
+class Constant(DecayScheduler):
+
+    def call(self, step: Tensor) -> Tensor:
+        # TODO should be an in-place operator
+        ret = Tensor((1,), step.device)
+        ret.set_value(self.init_value)
+        return ret
+
+
+class ExponentialDecay(DecayScheduler):
+
+    def __init__(self, init_value, decay_steps, decay_rate, staircase=False):
+        super(ExponentialDecay, self).__init__(init_value)
+
+        self.decay_steps = decay_steps
+        self.decay_rate = decay_rate
+        self.staircase = staircase
+
+    def call(self, step):
+        if self.staircase:
+            s = step // self.decay_steps
+        else:
+            s = step / self.decay_steps
+        ret = Tensor((1,), s.device)
+        ret.set_value(self.decay_rate)
+        return self.init_value * tensor.pow(ret, s)
+
+
+class Optimizer(object):
+    """Base optimizer.
+
+    Args:
+        config (Dict): specify the default values of configurable variables.
+    """
+
+    def __init__(self, lr):
+        # init lr(could be a constant scalar or a learning rate scheduler)
+        if type(lr) == float or type(lr) == int:
+            self.lr = Constant(lr)
+        elif isinstance(lr, DecayScheduler):
+            self.lr = lr
+        else:
+            raise TypeError("Wrong learning rate type")
+
+        # init step counter
+        # TODO change type to int32
+        self.step_counter = Tensor((1,), dtype=tensor.float32)
+        self.step_counter.set_value(0)
+        self.lr_value = self.lr(self.step_counter)
+
+    def get_states(self):
+        # skip DecayScheduler as it does not have persistent states
+        return {'step_counter': tensor.to_numpy(self.step_counter)[0]}
+
+    def set_states(self, states):
+        self.step_counter = Tensor((1,))
+        self.step_counter.set_value(states['step_counter'])
+        self.lr_value = self.lr(self.step_counter)
+
+    def __call__(self, loss):
+        self.call(loss)
+        self.step()
+
+    def call(self, loss):
+        for p, g in autograd.backward(loss):
+            if p.name is None:
+                p.name = id(p)
+            self.apply(p.name, p, g)
+
+    def step(self):
+        """To increment the step counter and update the lr"""
+        self.step_counter.data += 1
+        lr_value = self.lr(self.step_counter)
+        self.lr_value.copy_from(lr_value)
+
+    def apply(self, param_name, param_value, param_grad):
+        """Performs a single optimization step.
+
+        Args:
+                param_name(String): the name of the param
+                param_value(Tensor): param values to be update in-place
+                grad(Tensor): param gradients; the values may be updated
+                        in this function; cannot use it anymore
+        """
+        raise NotImplementedError
+
+    @deprecated(
+        reason=
+        "Update is deprecated, use apply() to do update, refer to apply for more details."
+    )
+    def update(self, param, grad):
+        """Update the param values with given gradients.
+
+        Args:
+            param(Tensor): param values to be updated in-place
+            grad(Tensor): param gradients; the values may be updated
+                    in this function; do not use it anymore
+        """
+        if param.name is None:
+            param.name = id(param)
+        self.apply(param.name, param, grad)
+
+    def device_check(self, *inputs):
+        flag = inputs[0].device.graph_enabled()
+        inputs[0].device.EnableGraph(False)
+        x_device = inputs[0].device
+        x_dev_id = x_device.id()
+        for var in inputs:
+            if var.device.id() != x_dev_id:
+                var.to_device(x_device)
+        inputs[0].device.EnableGraph(flag)
+
+    @deprecated(
+        reason=
+        "backward_and_update is deprecated, use __call__() to do update, refer to __call__ for more details."
+    )
+    def backward_and_update(self, loss):
+        """Performs backward propagation from the loss and parameter update.
+
+        From the loss, it performs backward propagation to get the gradients
+        and do the parameter update.
+
+        Args:
+                loss(Tensor): loss is the objective function of the deep learning model
+                optimization, e.g. for classification problem it can be the output of the
+                softmax_cross_entropy function.
+        """
+        self.__call__(loss)
+
+
+class SGD(Optimizer):
+    """Implements stochastic gradient descent (optionally with momentum).
+
+    Nesterov momentum is based on the formula from `On the importance of initialization and momentum in deep learning`__.
+
+    Args:
+        lr(float): learning rate
+        momentum(float, optional): momentum factor(default: 0)
+        weight_decay(float, optional): weight decay(L2 penalty)(default: 0)
+        dampening(float, optional): dampening for momentum(default: 0)
+        nesterov(bool, optional): enables Nesterov momentum(default: False)
+
+    Typical usage example:
+        >> > from singa import opt
+        >> > optimizer = opt.SGD(lr=0.1, momentum=0.9)
+        >> > optimizer.update()
+
+    __ http: // www.cs.toronto.edu / %7Ehinton / absps / momentum.pdf
+
+    .. note::
+        The implementation of SGD with Momentum / Nesterov subtly differs from
+        Sutskever et. al. and implementations in some other frameworks.
+
+        Considering the specific case of Momentum, the update can be written as
+
+        .. math::
+                  v = \rho * v + g \\
+                  p = p - lr * v
+
+        where p, g, v and: math: `\rho` denote the parameters, gradient,
+        velocity, and momentum respectively.
+
+        This is in contrast to Sutskever et. al. and
+        other frameworks which employ an update of the form
+
+        .. math::
+             v = \rho * v + lr * g \\
+             p = p - v
+
+        The Nesterov version is analogously modified.
+    """
+
+    def __init__(self,
+                 lr=0.1,
+                 momentum=0,
+                 dampening=0,
+                 weight_decay=0,
+                 nesterov=False):
+        super(SGD, self).__init__(lr)
+
+        # init momentum
+        if type(momentum) == float or type(momentum) == int:
+            if momentum < 0.0:
+                raise ValueError("Invalid momentum value: {}".format(momentum))
+            self.momentum = Constant(momentum)
+        elif isinstance(momentum, DecayScheduler):
+            self.momentum = momentum
+            momentum = momentum.init_value
+        else:
+            raise TypeError("Wrong momentum type")
+        self.mom_value = self.momentum(self.step_counter)
+
+        # init dampening
+        if type(dampening) == float or type(dampening) == int:
+            self.dampening = Constant(dampening)
+        elif isinstance(dampening, DecayScheduler):
+            self.dampening = dampening
+            dampening = dampening.init_value
+        else:
+            raise TypeError("Wrong dampening type")
+        self.dam_value = self.dampening(self.step_counter)
+
+        # init weight_decay
+        if type(weight_decay) == float or type(weight_decay) == int:
+            if weight_decay < 0.0:
+                raise ValueError(
+                    "Invalid weight_decay value: {}".format(weight_decay))
+            self.weight_decay = Constant(weight_decay)
+        elif isinstance(weight_decay, DecayScheduler):
+            self.weight_decay = weight_decay
+        else:
+            raise TypeError("Wrong weight_decay type")
+        self.decay_value = self.weight_decay(self.step_counter)
+
+        # init other params
+        self.nesterov = nesterov
+        self.moments = dict()
+
+        # check value
+        if nesterov and (momentum <= 0 or dampening != 0):
+            raise ValueError(
+                "Nesterov momentum requires a momentum and zero dampening")
+
+    def apply(self, param_name, param_value, param_grad):
+        """Performs a single optimization step.
+
+        Args:
+                param_name(String): the name of the param
+                param_value(Tensor): param values to be update in-place
+                grad(Tensor): param gradients; the values may be updated
+                        in this function; cannot use it anymore
+        """
+        assert param_value.shape == param_grad.shape, ("shape mismatch",
+                                                       param_value.shape,
+                                                       param_grad.shape)
+        self.device_check(param_value, self.step_counter, self.lr_value,
+                          self.mom_value, self.dam_value, self.decay_value)
+
+        # TODO add branch operator
+        # if self.decay_value != 0:
+        if self.weight_decay.init_value != 0:
+            singa.Axpy(self.decay_value.data, param_value.data, param_grad.data)
+
+        if self.momentum.init_value != 0:
+            if param_name not in self.moments:
+                flag = param_value.device.graph_enabled()
+                param_value.device.EnableGraph(False)
+                self.moments[param_name] = tensor.zeros_like(param_value)
+                param_value.device.EnableGraph(flag)
+
+            buf = self.moments[param_name]
+            buf *= self.mom_value
+            alpha = 1.0 - self.dam_value
+            singa.Axpy(alpha.data, param_grad.data, buf.data)
+
+            if self.nesterov:
+                singa.Axpy(self.mom_value.data, buf.data, param_grad.data)
+            else:
+                param_grad = buf
+
+        minus_lr = 0.0 - self.lr_value
+        singa.Axpy(minus_lr.data, param_grad.data, param_value.data)
+
+    def step(self):
+        # increment step counter, lr and moment
+        super().step()
+        mom_value = self.momentum(self.step_counter)
+        dam_value = self.dampening(self.step_counter)
+        decay_value = self.weight_decay(self.step_counter)
+        self.mom_value.copy_from(mom_value)
+        self.dam_value.copy_from(dam_value)
+        self.decay_value.copy_from(decay_value)
+
+    def get_states(self):
+        states = super().get_states()
+        if self.mom_value > 0:
+            states[
+                'moments'] = self.moments  # a dict for 1st order moments tensors
+        return states
+
+    def set_states(self, states):
+        super().set_states(states)
+        if 'moments' in states:
+            self.moments = states['moments']
+            self.mom_value = self.momentum(self.step_counter)
+
+
+class RMSProp(Optimizer):
+    '''RMSProp optimizer.
+
+    See the base Optimizer for all constructor args.
+
+    Args:
+        rho (float): float within [0, 1]
+        epsilon (float): small value for preventing numeric error
+    '''
+
+    def __init__(self, lr=0.1, rho=0.9, epsilon=1e-8, weight_decay=0):
+        super(RMSProp, self).__init__(lr)
+
+        # init weight_decay
+        if type(weight_decay) == float or type(weight_decay) == int:
+            if weight_decay < 0.0:
+                raise ValueError(
+                    "Invalid weight_decay value: {}".format(weight_decay))
+            self.weight_decay = Constant(weight_decay)
+        elif isinstance(weight_decay, DecayScheduler):
+            self.weight_decay = weight_decay
+        else:
+            raise TypeError("Wrong weight_decay type")
+        self.decay_value = self.weight_decay(self.step_counter)
+
+        # init rho
+        if type(rho) == float or type(rho) == int:
+            self.rho = Constant(rho)
+        elif isinstance(rho, DecayScheduler):
+            self.rho = rho
+        else:
+            raise TypeError("Wrong rho type")
+        self.rho_value = self.rho(self.step_counter)
+
+        # init epsilon
+        if type(epsilon) == float or type(epsilon) == int:
+            self.epsilon = Constant(epsilon)
+        elif isinstance(rho, DecayScheduler):
+            self.epsilon = epsilon
+        else:
+            raise TypeError("Wrong epsilon type")
+        self.epsilon_value = self.epsilon(self.step_counter)
+
+        # init running average
+        self.running_average = dict()
+
+    def apply(self, param_name, param_value, param_grad):
+        """Performs a single optimization step.
+
+        Args:
+                param_name(String): the name of the param
+                param_value(Tensor): param values to be update in-place
+                grad(Tensor): param gradients; the values may be updated
+                        in this function; cannot use it anymore
+        """
+        assert param_value.shape == param_grad.shape, ("shape mismatch",
+                                                       param_value.shape,
+                                                       param_grad.shape)
+        self.device_check(param_value, self.step_counter, self.lr_value,
+                          self.rho_value, self.epsilon_value, self.decay_value)
+
+        # if self.decay_value != 0:
+        if self.weight_decay.init_value != 0:
+            singa.Axpy(self.decay_value.data, param_value.data, param_grad.data)
+
+        if param_name not in self.running_average:
+            flag = param_value.device.graph_enabled()
+            param_value.device.EnableGraph(False)
+            self.running_average[param_name] = tensor.zeros_like(param_value)
+            param_value.device.EnableGraph(flag)
+
+        # running_average = running_average * rho + param_grad * param_grad * (1 - rho)
+        # param_value = param_value - lr * param_grad / sqrt(running_average + epsilon)
+
+        self.running_average[param_name] *= self.rho_value
+
+        tmp1 = singa.Square(param_grad.data)
+        tmp2 = 1.0 - self.rho_value
+        singa.Axpy(tmp2.data, tmp1, self.running_average[param_name].data)
+
+        minus_lr = 0.0 - self.lr_value
+        tmp3 = self.running_average[param_name] + self.epsilon_value
+        tmp3 = singa.Sqrt(tmp3.data)
+        tmp3 = singa.__div__(param_grad.data, tmp3)
+
+        singa.Axpy(minus_lr.data, tmp3, param_value.data)
+
+    def step(self):
+        # increment step counter, lr and moment
+        super().step()
+        decay_value = self.weight_decay(self.step_counter)
+        rho_value = self.rho(self.step_counter)
+        epsilon_value = self.epsilon(self.step_counter)
+        self.decay_value.copy_from(decay_value)
+        self.rho_value.copy_from(rho_value)
+        self.epsilon_value.copy_from(epsilon_value)
+
+    def get_states(self):
+        states = super().get_states()
+        states['running_average'] = self.running_average
+        return states
+
+    def set_states(self, states):
+        super().set_states(states)
+        if 'running_average' in states:
+            self.running_average = states['running_average']
+
+
+class AdaGrad(Optimizer):
+    '''AdaGrad optimizer.
+
+    See the base Optimizer for all constructor args.
+
+    Args:
+        epsilon (float): small number for preventing numeric error.
+    '''
+
+    def __init__(self, lr=0.1, epsilon=1e-8, weight_decay=0):
+        super(AdaGrad, self).__init__(lr)
+
+        # init weight_decay
+        if type(weight_decay) == float or type(weight_decay) == int:
+            if weight_decay < 0.0:
+                raise ValueError(
+                    "Invalid weight_decay value: {}".format(weight_decay))
+            self.weight_decay = Constant(weight_decay)
+        elif isinstance(weight_decay, DecayScheduler):
+            self.weight_decay = weight_decay
+        else:
+            raise TypeError("Wrong weight_decay type")
+        self.decay_value = self.weight_decay(self.step_counter)
+
+        # init epsilon
+        if type(epsilon) == float or type(epsilon) == int:
+            self.epsilon = Constant(epsilon)
+        elif isinstance(epsilon, DecayScheduler):
+            self.epsilon = epsilon
+        else:
+            raise TypeError("Wrong epsilon type")
+        self.epsilon_value = self.epsilon(self.step_counter)
+
+        # init history
+        self.history = dict()
+
+    def apply(self, param_name, param_value, param_grad):
+        """Performs a single optimization step.
+
+        Args:
+                param_name(String): the name of the param
+                param_value(Tensor): param values to be update in-place
+                grad(Tensor): param gradients; the values may be updated
+                        in this function; cannot use it anymore
+        """
+        assert param_value.shape == param_grad.shape, ("shape mismatch",
+                                                       param_value.shape,
+                                                       param_grad.shape)
+        self.device_check(param_value, self.step_counter, self.lr_value,
+                          self.epsilon_value, self.decay_value)
+
+        # if self.decay_value != 0:
+        if self.weight_decay.init_value != 0:
+            singa.Axpy(self.decay_value.data, param_value.data, param_grad.data)
+
+        if param_name not in self.history:
+            flag = param_value.device.graph_enabled()
+            param_value.device.EnableGraph(False)
+            self.history[param_name] = tensor.zeros_like(param_value)
+            param_value.device.EnableGraph(flag)
+
+        # history = history + param_grad * param_grad
+        # param_value = param_value - lr * param_grad / sqrt(history + epsilon)
+
+        tmp = self.history[param_name].data
+        tmp += singa.Square(param_grad.data)
+
+        minus_lr = 0.0 - self.lr_value
+        tmp = self.history[param_name] + self.epsilon_value
+        tmp = singa.Sqrt(tmp.data)
+        tmp = singa.__div__(param_grad.data, tmp)
+        singa.Axpy(minus_lr.data, tmp, param_value.data)
+
+    def step(self):
+        # increment step counter, lr and moment
+        super().step()
+        decay_value = self.weight_decay(self.step_counter)
+        epsilon_value = self.epsilon(self.step_counter)
+        self.decay_value.copy_from(decay_value)
+        self.epsilon_value.copy_from(epsilon_value)
+
+    def get_states(self):
+        states = super().get_states()
+        states['history'] = self.history  # a dict for 1st order moments tensors
+        return states
+
+    def set_states(self, states):
+        super().set_states(states)
+        if 'history' in states:
+            self.history = states['history']
+
+
+class Adam(Optimizer):
+    '''Adam optimizer.
+
+    See the base Optimizer for all constructor args.
+
+    Args:
+        beta_1(float): coefficient of momentum
+        beta_2(float): coefficient of aggregated squared gradient
+        epsilon (float): small value for preventing numeric error
+    '''
+
+    def __init__(self,
+                 lr=0.1,
+                 beta_1=0.9,
+                 beta_2=0.999,
+                 epsilon=1e-8,
+                 weight_decay=0):
+        super(Adam, self).__init__(lr)
+
+        # init weight_decay
+        if type(weight_decay) == float or type(weight_decay) == int:
+            if weight_decay < 0.0:
+                raise ValueError(
+                    "Invalid weight_decay value: {}".format(weight_decay))
+            self.weight_decay = Constant(weight_decay)
+        elif isinstance(weight_decay, DecayScheduler):
+            self.weight_decay = weight_decay
+        else:
+            raise TypeError("Wrong weight_decay type")
+        self.decay_value = self.weight_decay(self.step_counter)
+
+        # init beta_1
+        if type(beta_1) == float or type(beta_1) == int:
+            self.beta_1 = Constant(beta_1)
+        elif isinstance(beta_1, DecayScheduler):
+            self.beta_1 = beta_1
+        else:
+            raise TypeError("Wrong beta_1 type")
+        self.beta_1_value = self.beta_1(self.step_counter)
+
+        # init beta_2
+        if type(beta_2) == float or type(beta_2) == int:
+            self.beta_2 = Constant(beta_2)
+        elif isinstance(beta_2, DecayScheduler):
+            self.beta_2 = beta_2
+        else:
+            raise TypeError("Wrong beta_2 type")
+        self.beta_2_value = self.beta_2(self.step_counter)
+
+        # init epsilon
+        if type(epsilon) == float or type(epsilon) == int:
+            self.epsilon = Constant(epsilon)
+        elif isinstance(epsilon, DecayScheduler):
+            self.epsilon = epsilon
+        else:
+            raise TypeError("Wrong epsilon type")
+        self.epsilon_value = self.epsilon(self.step_counter)
+
+        # init m and v
+        self.m = dict()
+        self.v = dict()
+
+    def apply(self, param_name, param_value, param_grad):
+        """Performs a single optimization step.
+
+        Args:
+                param_name(String): the name of the param
+                param_value(Tensor): param values to be update in-place
+                grad(Tensor): param gradients; the values may be updated
+                        in this function; cannot use it anymore
+        """
+        assert param_value.shape == param_grad.shape, ("shape mismatch",
+                                                       param_value.shape,
+                                                       param_grad.shape)
+        self.device_check(param_value, self.step_counter, self.lr_value,
+                          self.beta_1_value, self.beta_2_value,
+                          self.epsilon_value, self.decay_value)
+
+        # if self.decay_value != 0:
+        if self.weight_decay.init_value != 0:
+            singa.Axpy(self.decay_value.data, param_value.data, param_grad.data)
+
+        if param_name not in self.m:
+            flag = param_value.device.graph_enabled()
+            param_value.device.EnableGraph(False)
+            self.m[param_name] = tensor.zeros_like(param_value)
+            self.v[param_name] = tensor.zeros_like(param_value)
+            param_value.device.EnableGraph(flag)
+
+        # overall steps
+        # m := beta_1 * m + (1 - beta_1) * grad
+        # v := beta_2 * v + (1 - beta_2) * grad * grad
+        # m_norm = m / (1 - beta_1 ^ step)
+        # v_norm = v / (1 - beta_2 ^ step)
+        # param := param - (lr * m_norm) / ( sqrt(v_norm) + epsilon) )
+
+        step = self.step_counter + 1.0
+
+        # m := beta_1 * m + (1 - beta_1) * grad
+        tmp = 1.0 - self.beta_1_value
+        self.m[param_name] *= self.beta_1_value
+        singa.Axpy(tmp.data, param_grad.data, self.m[param_name].data)
+
+        # v := beta_2 * v + (1 - beta_2) * grad * grad
+        tmp = 1.0 - self.beta_2_value
+        self.v[param_name] *= self.beta_2_value
+        singa.Axpy(tmp.data, singa.Square(param_grad.data),
+                   self.v[param_name].data)
+
+        # m_norm = m / (1 - beta_1 ^ step)
+        tmp = tensor.pow(self.beta_1_value, step)
+        tmp = 1.0 - tmp
+        m_norm = self.m[param_name] / tmp
+
+        # v_norm = v / (1 - beta_2 ^ step)
+        tmp = tensor.pow(self.beta_2_value, step)
+        tmp = 1.0 - tmp
+        v_norm = self.v[param_name] / tmp
+
+        # param := param - (lr * m_norm) / ( sqrt(v_norm) + epsilon) )
+        a = tensor.sqrt(v_norm) + self.epsilon_value
+        tmp = m_norm / a
+
+        minus_lr = 0.0 - self.lr_value
+        singa.Axpy(minus_lr.data, tmp.data, param_value.data)
+
+    def step(self):
+        # increment step counter, lr and moment
+        super().step()
+        decay_value = self.weight_decay(self.step_counter)
+        beta_1_value = self.beta_1(self.step_counter)
+        beta_2_value = self.beta_2(self.step_counter)
+        self.decay_value.copy_from(decay_value)
+        self.beta_1_value.copy_from(beta_1_value)
+        self.beta_2_value.copy_from(beta_2_value)
+
+    def get_states(self):
+        states = super().get_states()
+        states['m'] = self.m  # a dict for 1st order moments tensors
+        states['v'] = self.v  # a dict for 2nd order moments tensors
+        return states
+
+    def set_states(self, states):
+        super().set_states(states)
+        if 'm' in states:
+            self.m = states['m']
+        if 'v' in states:
+            self.v = states['v']
+
+
+class DistOpt(object):
+    """The class is designed to wrap an optimizer to do distributed training.
+
+    This class is used to wrap an optimizer object to perform distributed training based
+    on multiprocessing. Each process has an individual rank, which gives information of
+    which GPU the individual process is using. The training data is partitioned, so that
+    each process can evaluate the sub-gradient based on the partitioned training data.
+    Once the sub-graident is calculated on each processes, the overall stochastic gradient
+    is obtained by all-reducing the sub-gradients evaluated by all processes. The all-reduce
+    operation is supported by the NVidia Collective Communication Library (NCCL).
+
+    Args:
+        opt(Optimizer): The optimizer to be wrapped.
+        nccl_id(NcclIdHolder): an nccl id holder object for a unique communication id
+        local_rank(int): local rank of a process on the current node
+        world_size(int): total number of processes
+        buffSize(int): the buffSize in terms of number of elements used in nccl communicator
+
+    Attributes:
+        world_size(int): total number of processes
+        local_rank(int): local rank of a process on the current node
+        global_rank(int): global rank of a process
+
+    Typical usage example:
+        >> > from singa import opt
+        >> > optimizer = opt.SGD(lr=0.1, momentum=0.9)
+        >> > optimizer = opt.DistOpt(sgd)
+
+    """
+
+    def __init__(self,
+                 opt=SGD(),
+                 nccl_id=None,
+                 local_rank=None,
+                 world_size=None,
+                 buffSize=4194304):
+        self.opt = opt
+        if nccl_id is None:
+            # constructure for application using MPI
+            self.communicator = singa.Communicator(buffSize)
+        else:
+            # constructor for application using python multi-process module
+            self.communicator = singa.Communicator(local_rank, world_size,
+                                                   nccl_id, buffSize)
+
+        self.world_size = self.communicator.world_size
+        self.local_rank = self.communicator.local_rank
+        self.global_rank = self.communicator.global_rank
+
+    def __call__(self, loss):
+        self.backward_and_update(loss)
+
+    def update(self, param, grad):
+        """Performs a single optimization step.
+
+        Args:
+                param(Tensor): param values to be update
+                grad(Tensor): param gradients
+        """
+        grad /= self.world_size
+        self.opt.update(param, grad)
+
+    def all_reduce(self, tensor):
+        """Performs all reduce of a tensor for distributed training.
+
+        Args:
+                tensor(Tensor): a tensor to be all-reduced
+        """
+        self.communicator.synch(tensor)
+
+    def fused_all_reduce(self, tensor, send=True):
+        """Performs all reduce of the tensors after fusing them in a buffer.
+
+        Args:
+                tensor(List of Tensors): a list of tensors to be all-reduced
+                send(bool): When send is False, the tensor won't be send to the
+                target device immediately, it will be copied to the buffer first
+        """
+        tensor = singa.VecTensor(tensor)
+        self.communicator.fusedSynch(tensor, send)
+
+    def all_reduce_half(self, tensor):
+        """Performs all reduce of a tensor after converting to FP16.
+
+        Args:
+                tensor(Tensor): a tensor to be all-reduced
+        """
+        self.communicator.synchHalf(tensor)
+
+    def fused_all_reduce_half(self, tensor, send=True):
+        """Performs all reduce of the tensors after fusing and converting them to FP16.
+
+        Args:
+                tensor(List of Tensors): a list of tensors to be all-reduced
+                send(bool): When send is False, the tensor won't be send to the
+                target device immediately, it will be copied to the buffer first
+        """
+        tensor = singa.VecTensor(tensor)
+        self.communicator.fusedSynchHalf(tensor, send)
+
+    def sparsification(self, tensor, accumulation, spars, topK):
+        """Performs all reduce of a tensor after sparsification.
+
+        Args:
+                tensor(Tensor): a tensor to be all-reduced
+                accumulation(Tensor): local gradient accumulation
+                spars(float): a parameter to control sparsity as defined below
+                topK(bool): When topK is False, it sparsifies the gradient with absolute
+                value >= sparsWhen topK is True, it sparsifies a fraction of total gradient
+                number equals to spars,  E.g. when spars = 0.01, it sparsifies 1 % of the
+                total gradient elements
+        """
+        if accumulation is None:
+            self.communicator.sparsification(tensor, spars, topK)
+        else:
+            self.communicator.sparsification(tensor, accumulation, spars, topK)
+
+    def fused_sparsification(self, tensor, accumulation, spars, topK):
+        """Performs all reduce of the tensors after fusing and sparsification.
+
+        Args:
+                tensor(List of Tensors): a list of tensors to be all-reduced
+                accumulation(Tensor): local gradient accumulation
+                spars(float): a parameter to control sparsity as defined below
+                topK(bool): When topK is False, it sparsifies the gradient with absolute
+                value >= sparsWhen topK is True, it sparsifies a fraction of total gradient
+                number equals to spars,  E.g. when spars = 0.01, it sparsifies 1 % of the
+                total gradient elements
+        """
+        tensor = singa.VecTensor(tensor)
+        if accumulation is None:
+            self.communicator.fusedSparsification(tensor, spars, topK)
+        else:
+            self.communicator.fusedSparsification(tensor, accumulation, spars,
+                                                  topK)
+
+    def wait(self):
+        """Wait for the cuda streams used by the communicator to finish their operations."""
+        self.communicator.wait()
+
+    def backward_and_update(self, loss, threshold=2097152):
+        """Performs backward propagation from the loss and parameter update.
+
+        From the loss, it performs backward propagation to get the gradients and do the parameter
+        update. For gradient communication, it fuses all the tensor smaller than the threshold
+        value to reduce network latency.
+
+        Args:
+                loss(Tensor): loss is the objective function of the deep learning model
+                optimization, e.g. for classification problem it can be the output of the
+                softmax_cross_entropy function.
+                threshold(int): threshold is a parameter to control performance in fusing
+                the tensors. For the tensors of sizes smaller than threshold, they are to
+                be accumulated and fused before the all reduce operation. For the tensors
+                of its size larger than the threshold value, they are to be reduced directly
+                without fusion.
+        """
+        plist = []
+        acc = 0
+        glist = []
+        for p, g in autograd.backward(loss):
+            if g.size() > threshold:
+                # larger than threshold -> reduced directly
+                self.all_reduce(g.data)
+            else:
+                # smaller than threshold -> accumulate
+                glist.append(g.data)
+                self.fused_all_reduce([g.data], send=False)
+                acc += g.size()
+                if (acc > threshold):
+                    self.fused_all_reduce(glist)
+                    acc = 0
+                    glist = []
+            plist.append((p, g))
+        if glist:
+            self.fused_all_reduce(glist)
+        self.wait()
+        for p, g in plist:
+            self.update(p, g)
+        self.opt.step()
+
+    def backward_and_update_half(self,
+                                 loss,
+                                 threshold=2097152,
+                                 clipping=False,
+                                 clip_Value=100):
+        """Performs backward propagation and parameter update, with FP16 precision communication.
+
+        THIS IS A EXPERIMENTAL FUNCTION FOR RESEARCH PURPOSE:
+        From the loss, it performs backward propagation to get the gradients and do the parameter
+        update. For gradient communication, it fuses all the tensor smaller than the threshold value
+        to reduce network latency, as well as converting them to FP16 half precision format before
+        sending them out. To assist training, this functions provide an option to perform gradient
+        clipping.
+
+        Args:
+                loss(Tensor): loss is the objective function of the deep learning model
+                optimization, e.g. for classification problem it can be the output of the
+                softmax_cross_entropy function.
+                threshold(int): threshold is a parameter to control performance in fusing
+                the tensors. For the tensors of sizes smaller than threshold, they are to
+                be accumulated and fused before the all reduce operation. For the tensors
+                of its size larger than the threshold value, they are to be reduced directly
+                without fusion.
+                clipping(bool): a boolean flag to choose whether to clip the gradient value
+                clip_value(float): the clip value to be used when clipping is True
+        """
+        plist = []
+        acc = 0
+        glist = []
+        for p, g in autograd.backward(loss):
+            if clipping:
+                g = autograd.clip(g, -clip_Value, clip_Value)
+            if g.size() > threshold:
+                # larger than threshold -> reduced directly
+                self.all_reduce_half(g.data)
+            else:
+                # smaller than threshold -> accumulate
+                glist.append(g.data)
+                self.fused_all_reduce_half([g.data], send=False)
+                acc += g.size()
+                if (acc > threshold):
+                    self.fused_all_reduce_half(glist)
+                    acc = 0
+                    glist = []
+            plist.append((p, g))
+        if glist:
+            self.fused_all_reduce_half(glist)
+        self.wait()
+        for p, g in plist:
+            self.update(p, g)
+        self.opt.step()
+
+    def backward_and_partial_update(self, loss, threshold=2097152):
+        """Performs backward propagation from the loss and parameter update using asychronous training.
+
+        THIS IS A EXPERIMENTAL FUNCTION FOR RESEARCH PURPOSE:
+        From the loss, it performs backward propagation to get the gradients and do the parameter
+        update. It fuses the tensors smaller than the threshold value to reduce network latency,
+        as well as performing asychronous training where one parameter partition is all-reduced
+        per iteration. The size of the parameter partition depends on the threshold value.
+
+        Args:
+                loss(Tensor): loss is the objective function of the deep learning model
+                optimization, e.g. for classification problem it can be the output of the
+                softmax_cross_entropy function.
+                threshold(int): threshold is a parameter to control performance in fusing
+                the tensors. For the tensors of sizes smaller than threshold, they are to
+                be accumulated and fused before the all reduce operation. For the tensors
+                of its size larger than the threshold value, they are to be reduced directly
+                without fusion.
+
+        Attributes:
+                self.partial(int): A counter to determine which partition to perform all-reduce.
+                This counter resets to zero automatlly after an update cycle of the full parameter
+                set.
+        """
+        if not hasattr(self, "partial"):
+            self.partial = 0
+        self.partial += 1
+        k = 0
+        plist = []
+        acc = 0
+        tenlist = []
+        reduced = []
+        for p, g in autograd.backward(loss):
+            # every parameters update locally
+            self.opt.update(p, g)
+            # then do the partial parameter sychronization
+            if p.size() > threshold:
+                # larger than threshold -> reduced directly
+                # k is the partition number of the full gradient set
+                k += 1
+                if (k == self.partial):
+                    self.all_reduce(p.data)
+                    reduced.append(p)
+            else:
+                # smaller than threshold -> accumulate
+                plist.append(p.data)
+                tenlist.append(p)
+                acc += p.size()
+                if (acc > threshold):
+                    k += 1
+                    if (k == self.partial):
+                        self.fused_all_reduce(plist, send=False)
+                        self.fused_all_reduce(plist)
+                        reduced = tenlist
+                    acc = 0
+                    plist = []
+                    tenlist = []
+        if plist:
+            k += 1
+            if (k == self.partial):
+                self.fused_all_reduce(plist, send=False)
+                self.fused_all_reduce(plist)
+                reduced = tenlist
+        self.wait()
+        # the all-reduced parameters needed to be averaged
+        for r in reduced:
+            r /= self.world_size
+        # the counter returns to zero after a cycle of partial update
+        if (k == self.partial):
+            self.partial = 0
+        self.opt.step()
+
+    def backward_and_sparse_update(self,
+                                   loss,
+                                   threshold=2097152,
+                                   spars=0.05,
+                                   topK=False,
+                                   corr=True):
+        """ Performs backward propagation from the loss and parameter update with sparsification.
+
+        THIS IS A EXPERIMENTAL FUNCTION FOR RESEARCH PURPOSE:
+        From the loss, it performs backward propagation to get the gradients and do the parameter
+        update. It fuses the tensors with size smaller than the threshold value to reduce network
+        latency, as well as using sparsification schemes to transfer only the gradient elements which
+        are significant.
+
+        Args:
+                loss(Tensor): loss is the objective function of the deep learning model
+                optimization, e.g. for classification problem it can be the output of the
+                softmax_cross_entropy function.
+                threshold(int): threshold is a parameter to control performance in fusing
+                the tensors. For the tensors of sizes smaller than threshold, they are to
+                be accumulated and fused before the all reduce operation. For the tensors
+                of its size larger than the threshold value, they are to be reduced directly
+                without fusion.
+                spars(float): a parameter to control sparsity as defined below
+                topK(bool): When topK is False, it sparsifies the gradient with absolute
+                value >= sparsWhen topK is True, it sparsifies a fraction of total gradient
+                number equals to spars,  E.g. when spars = 0.01, it sparsifies 1 % of the
+                total gradient elements
+                corr(bool): whether to use the local accumulate gradient for correction
+
+        Attributes:
+                self.sparsInit: A counter to determine which partition to perform all-reduce.
+                self.gradAccumulation: Local gradient accumulation
+        """
+        if ((not hasattr(self, "sparsInit")) and corr):
+            self.gradAccumulation = []
+            self.sparsInit = False
+        plist = []
+        acc = 0
+        k = -1
+        glist = []
+        for p, g in autograd.backward(loss):
+            if g.size() > threshold:
+                # larger than threshold -> reduced directly
+                k += 1
+                if (corr and (not self.sparsInit)):
+                    # create a tensor for the gradient accumulation
+                    flag = p.device.graph_enabled()
+                    p.device.EnableGraph(False)
+                    self.gradAccumulation.append(
+                        tensor.Tensor((g.size(),), p.device, p.dtype))
+                    self.gradAccumulation[k].set_value(0.0)
+                    p.device.EnableGraph(flag)
+                if corr:
+                    self.sparsification(g.data, self.gradAccumulation[k].data,
+                                        spars, topK)
+                else:
+                    self.sparsification(g.data, None, spars, topK)
+            else:
+                # smaller than threshold -> accumulate
+                glist.append(g.data)
+                acc += g.size()
+                if (acc > threshold):
+                    k += 1
+                    if (corr and (not self.sparsInit)):
+                        # create a tensor for the gradient accumulation
+                        flag = p.device.graph_enabled()
+                        p.device.EnableGraph(False)
+                        self.gradAccumulation.append(
+                            tensor.Tensor((acc,), p.device, p.dtype))
+                        self.gradAccumulation[k].set_value(0.0)
+                        p.device.EnableGraph(flag)
+                    if corr:
+                        self.fused_sparsification(glist,
+                                                  self.gradAccumulation[k].data,
+                                                  spars, topK)
+                    else:
+                        self.fused_sparsification(glist, None, spars, topK)
+                    acc = 0
+                    glist = []
+            plist.append((p, g))
+        if glist:
+            k += 1
+            if (corr and (not self.sparsInit)):
+                # create a tensor for the gradient accumulation
+                flag = p.device.graph_enabled()
+                p.device.EnableGraph(False)
+                self.gradAccumulation.append(
+                    tensor.Tensor((acc,), p.device, p.dtype))
+                self.gradAccumulation[k].set_value(0.0)
+                p.device.EnableGraph(flag)
+            if corr:
+                self.fused_sparsification(glist, self.gradAccumulation[k].data,
+                                          spars, topK)
+            else:
+                self.fused_sparsification(glist, None, spars, topK)
+        self.wait()
+        for p, g in plist:
+            self.update(p, g)
+        self.sparsInit = True
+        self.opt.step()
diff --git a/python/singa/optimizer.py b/python/singa/optimizer.py
deleted file mode 100644
index 614fe6d..0000000
--- a/python/singa/optimizer.py
+++ /dev/null
@@ -1,438 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-'''This module includes a set of optimizers for updating model parameters.
-
-Example usage::
-
-  from singa import optimizer
-  from singa import tensor
-
-  sgd = optimizer.SGD(lr=0.01, momentum=0.9, weight_decay=1e-4)
-  p = tensor.Tensor((3,5))
-  p.uniform(-1, 1)
-  g = tensor.Tensor((3,5))
-  g.gaussian(0, 0.01)
-
-  sgd.apply(1, g, p, 'param')  # use the global lr=0.1 for epoch 1
-  sgd.apply_with_lr(2, 0.03, g, p, 'param')  # use lr=0.03 for epoch 2
-'''
-
-import math
-from . import singa_wrap as singa
-import tensor
-from proto import model_pb2
-
-
-class Optimizer(object):
-    '''The base python optimizer class.
-
-    Typically, an optimizer is used as follows:
-
-    1. construct the optimizer
-    2. (optional) register each parameter with its specs.
-    3. use the optimizer to update parameter values given parameter gradients
-       and other optional info
-
-    The subclasses should override the apply_with_lr function to do the real
-    parameter udpate.
-
-    Args:
-        lr (float): a constant value for the learning rate
-        momentum (float): a constant value for the momentum value
-        weight_decay (float): the coefficent for L2 regularizer, which is
-            mutually exclusive with 'regularizer'.
-        regularizer: an instance of Regularizer or RegularizerConf; If set,
-            regularization would be applied in apply_with_lr().
-            Users can also do regularization outside.
-        constraint: an instance of Constraint or ConstraintConf; If set,
-            constraint would be applied inside apply_with_lr(). Users can
-            also apply constraint outside.
-    '''
-    def __init__(self, lr=None, momentum=None, weight_decay=None,
-                 regularizer=None, constraint=None):
-        self.lr = lr
-        self.momentum = momentum
-        if weight_decay is not None:
-            assert regularizer is None, \
-                'Cannot set weight_decay and regularizer at the same time'
-            regularizer = L2Regularizer(weight_decay)
-
-        if regularizer is not None:
-            if isinstance(regularizer, model_pb2.RegularizerConf):
-                self.regularizer = CppRegularizer(regularizer)
-            else:
-                self.regularizer = regularizer
-        else:
-            self.regularizer = None
-        if constraint is not None:
-            if isinstance(constraint, model_pb2.ConstraintConf):
-                self.constraint = CppConstraint(constraint)
-            else:
-                self.constraint = constraint
-        else:
-            self.constraint = None
-        self.regularizers = {}
-        self.constraints = {}
-        self.decay_multiplier = {}
-        self.learning_rate_multiplier = {}
-
-    def register(self, name, specs):
-        '''Register the param specs, including creating regularizer and
-        constraint per param object. Param specific regularizer and constraint
-        have higher priority than the global ones. If all parameters share the
-        same setting for learning rate, regularizer and constraint, then there
-        is no need to call this function.
-
-        Args:
-            name (str): parameter name
-            specs (ParamSpec): protobuf obj, including regularizer and
-                constraint, multipliers for learning rate and weight decay.
-        '''
-        assert isinstance(specs, model_pb2.ParamSpec), \
-            'specs should be model_pb2.ParamSpec instance'
-        if specs.HasField('regularizer'):
-            self.regularizers[name] = CppRegularizer(specs.regularizer)
-        elif specs.decay_mult != 1:
-            self.regularizers[name] = L2Regularizer(
-                specs.decay_mult * self.regularizer.coefficient)
-
-        if specs.HasField('constraint'):
-            self.constraints[name] = CppConstraint(specs.constraint)
-
-        if specs.lr_mult != 1:
-            self.learning_rate_multiplier[name] = specs.lr_mult
-
-    def apply_regularizer_constraint(self, epoch, value, grad, name=None,
-                                     step=-1):
-        '''Apply regularization and constraint if available.
-
-        If there are both global regularizer (constraint) and param specific
-        regularizer (constraint), it would use the param specific one.
-
-        Args:
-            epoch (int): training epoch ID
-            value (Tensor): parameter value Tensor
-            grad (Tensor): parameter gradient Tensor
-            name (string): to get parameter specific regularizer or constraint
-            step (int): iteration ID within one epoch
-
-        Returns:
-            the updated gradient Tensor
-        '''
-        if name is not None and name in self.constraints:
-            grad = self.constraints[name].apply(epoch, value, grad, step)
-        elif self.constraint is not None:
-            grad = self.constraint.apply(epoch, value, grad, step)
-
-        if name is not None and name in self.regularizers:
-            grad = self.regularizers[name].apply(epoch, value, grad, step)
-        elif self.regularizer is not None:
-            grad = self.regularizer.apply(epoch, value, grad, step)
-        return grad
-
-    def apply_with_lr(self, epoch, lr, grad, value, name=None, step=-1):
-        '''Do update of parameters with given learning rate if the grad is not
-        empty.
-
-        The subclass optimizer must override this function.
-        This function do nothing if the grad is empty.
-
-        Args:
-            epoch (int): training epoch ID
-            lr (float): learning rate
-            grad (Tensor): parameter gradient
-            value (Tesnor): parameter value
-            name (string): paramter name to index parameter specific
-                updating rules (including regularizer and constraint)
-            step (int): iteration ID within one epoch
-
-        Returns:
-            updated parameter value
-        '''
-        assert False, 'This is the base function, pls call the subclass func'
-        return value
-
-    def apply(self, epoch, grad, value, name=None, step=-1):
-        '''Do update assuming the learning rate generator is set.
-
-        The subclass optimizer does not need to override this function.
-
-        Args:
-            epoch (int): training epoch ID
-            grad (Tensor): parameter gradient
-            value (Tesnor): parameter value
-            name (string): paramter name to retrieval parameter specific
-                updating rules (including regularizer and constraint)
-            step (int): training iteration ID within one epoch
-
-        Return:
-            updated parameter value
-        '''
-        assert self.lr is not None, 'Must set the learning rate, i.e. "lr"'
-        return self.apply_with_lr(epoch, self.lr, grad, value, name, step)
-
-
-class SGD(Optimizer):
-    '''The vallina Stochasitc Gradient Descent algorithm with momentum.
-
-    See the base Optimizer for all arguments.
-    '''
-
-    def __init__(self, lr=None, momentum=None, weight_decay=None,
-                 regularizer=None, constraint=None):
-        super(SGD, self).__init__(lr, momentum, weight_decay, regularizer,
-                                  constraint)
-        conf = model_pb2.OptimizerConf()
-        if self.momentum is not None:
-            conf.momentum = self.momentum
-        conf.type = 'sgd'
-        self.opt = singa.CreateOptimizer('SGD')
-        self.opt.Setup(conf.SerializeToString())
-
-    def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
-        if grad.is_empty():
-            return value
-        grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
-        if name is not None and name in self.learning_rate_multiplier:
-            lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr, name, grad.singa_tensor, value.singa_tensor)
-        return value
-
-
-class Nesterov(Optimizer):
-    '''The SGD with Nesterov momentum.
-
-    See the base Optimizer for all arguments.
-    '''
-
-    def __init__(self, lr=None, momentum=0.9, weight_decay=None,
-                 regularizer=None, constraint=None):
-        super(Nesterov, self).__init__(lr, momentum, weight_decay,
-                                       regularizer, constraint)
-        conf = model_pb2.OptimizerConf()
-        if self.momentum is not None:
-            conf.momentum = momentum
-        conf.type = 'nesterov'
-        self.opt = singa.CreateOptimizer('Nesterov')
-        self.opt.Setup(conf.SerializeToString())
-
-    def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
-        if grad.is_empty():
-            return value
-
-        grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
-        if name is not None and name in self.learning_rate_multiplier:
-            lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr, name, grad.singa_tensor, value.singa_tensor)
-        return value
-
-
-class RMSProp(Optimizer):
-    '''RMSProp optimizer.
-
-    See the base Optimizer for all constructor args.
-
-    Args:
-        rho (float): float within [0, 1]
-        epsilon (float): small value for preventing numeric error
-    '''
-
-    def __init__(self, rho=0.9, epsilon=1e-8, lr=None, weight_decay=None,
-                 regularizer=None, constraint=None):
-        super(RMSProp, self).__init__(lr, None, weight_decay, regularizer,
-                                      constraint)
-        conf = model_pb2.OptimizerConf()
-        conf.rho = rho
-        conf.delta = epsilon
-        self.opt = singa.CreateOptimizer('RMSProp')
-        self.opt.Setup(conf.SerializeToString())
-
-    def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
-        if grad.is_empty():
-            return value
-
-        grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
-        if name is not None and name in self.learning_rate_multiplier:
-            lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(step, lr,  name, grad.singa_tensor, value.singa_tensor)
-        return value
-
-
-class AdaGrad(Optimizer):
-    '''AdaGrad optimizer.
-
-    See the base Optimizer for all constructor args.
-
-    Args:
-        epsilon (float): small number for preventing numeric error.
-    '''
-
-    def __init__(self, epsilon=1e-8, lr=None, weight_decay=None, lr_gen=None,
-                 regularizer=None, constraint=None):
-        super(AdaGrad, self).__init__(lr, None, weight_decay, regularizer,
-                                      constraint)
-        conf = model_pb2.OptimizerConf()
-        conf.delta = epsilon
-        conf.type = 'adagrad'
-        self.opt = singa.CreateOptimizer('AdaGrad')
-        self.opt.Setup(conf.SerializeToString())
-
-    def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
-        if grad.is_empty():
-            return value
-
-        grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
-        if name is not None and name in self.learning_rate_multiplier:
-            lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr,  name, grad.singa_tensor, value.singa_tensor)
-        return value
-
-
-class Adam(Optimizer):
-    '''Adam optimizer.
-
-    See the base Optimizer for all constructor args.
-
-    Args:
-        beta_1(float): coefficient of momentum
-        beta_2(float): coefficient of aggregated squared gradient
-        epsilon (float): small value for preventing numeric error
-    '''
-
-    def __init__(self, beta_1=0.9, beta_2=0.999, epsilon=1e-8, lr=None,
-                 weight_decay=None, regularizer=None, constraint=None):
-        super(Adam, self).__init__(lr, None, weight_decay, regularizer,
-                                   constraint)
-        self.beta_1 = beta_1
-        self.beta_2 = beta_2
-        self.epsilon = epsilon
-        self.m = {}
-        self.v = {}
-        self.t = 0
-        self.last_epoch = -1
-        self.last_step = -1
-
-    def apply_with_lr(self, epoch, lr, grad, value, name, step):
-        '''Update one parameter object.
-
-        Args:
-            step(int): the accumulated training iterations, not the iteration ID
-        '''
-        if grad.is_empty():
-            return value
-
-        assert step != -1, 'step should >= 0'
-        if epoch != self.last_epoch or step != self.last_step:
-            self.t += 1
-            self.last_step = step
-            self.last_epoch = epoch
-        grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
-        if name is not None and name in self.learning_rate_multiplier:
-            lr = lr * self.learning_rate_multiplier[name]
-        if name not in self.m or name not in self.v:
-            self.m[name] = tensor.Tensor(grad.shape, grad.device, grad.dtype)
-            self.m[name].set_value(0)
-            self.v[name] = tensor.Tensor(grad.shape, grad.device, grad.dtype)
-            self.v[name].set_value(0)
-
-        self.m[name] *= self.beta_1
-        tensor.axpy(1 - self.beta_1, grad, self.m[name])
-        self.v[name] *= self.beta_2
-        tensor.axpy(1 - self.beta_2, tensor.square(grad), self.v[name])
-        alpha = lr * math.sqrt(1 - math.pow(self.beta_2, self.t)) \
-            / (1 - math.pow(self.beta_1, self.t))
-        value -= alpha * self.m[name] / (tensor.sqrt(self.v[name]) +
-                                         self.epsilon)
-        return value
-
-
-class Regularizer(object):
-    '''Base Python regularizer for parameter gradients.'''
-
-    def apply(self, epoch, value, grad, step=-1):
-        assert False, 'Not Implemented. Call the subclass function.'
-        return grad
-
-
-class CppRegularizer(Regularizer):
-    '''Wrapper for regularizer implemented using C++.
-
-    Args:
-        conf (RegularizerConf): protobuf message for the configuration.
-    '''
-
-    def __init__(self, conf):
-        self.reg = singa.CreateRegularizer(conf.type)
-        self.reg.Setup(conf.SerializeToString())
-
-    def apply(self, epoch, value, grad, step=-1):
-        self.reg.Apply(epoch, value.singa_tensor, grad.singa_tensor)
-        return grad
-
-
-class L2Regularizer(Regularizer):
-    '''L2 regularization
-
-    Args:
-        coefficient (float): regularization coefficient.
-    '''
-
-    def __init__(self, coefficient):
-        self.coefficient = coefficient
-
-    def apply(self, epoch, value, grad, step=-1):
-        # print coefficient, value.l1(), grad.l1()
-        if self.coefficient != 0:
-            tensor.axpy(self.coefficient, value, grad)
-        return grad
-
-
-class Constraint(object):
-    '''Base Python constraint class for paramter gradients'''
-
-    def apply(self, epoch, value, grad, step=-1):
-        return grad
-
-
-class CppConstraint(Constraint):
-    '''Wrapper for constraints implemented using C++.
-
-    Args:
-        conf (ConstraintConf): protobuf message for the configuration.
-    '''
-
-    def __init__(self, conf):
-        self.constraint = singa.CreateConstraint(conf.type)
-        self.constraint.Setup(conf.SerializeToString())
-
-    def apply(self, epoch, value, grad, step=-1):
-        self.constraint.Apply(epoch, value.singa_tensor, grad.singa_tensor,
-                              step)
-        return grad
-
-
-class L2Constraint(Constraint):
-    '''Rescale the gradient to make the L2 norm <= a given threshold'''
-
-    def __init__(self, threshold=None):
-        self.threshold = threshold
-
-    def apply(self, epoch, value, grad, step=-1):
-        nrm = grad.l2()
-        grad *= self.threshold / nrm
-        return grad
diff --git a/python/singa/snapshot.py b/python/singa/snapshot.py
index 3e1298f..67f246b 100644
--- a/python/singa/snapshot.py
+++ b/python/singa/snapshot.py
@@ -18,6 +18,9 @@
 '''
 This script includes io::snapshot class and its methods.
 
+Note: This module is depreated. Please use the model module for 
+checkpoing and restore.
+
 Example usages::
 
     from singa import snapshot
@@ -29,15 +32,18 @@
     for k, v in params.iteritems():
         sn2.write(k, v)
 '''
+from __future__ import absolute_import
 
+from builtins import object
 from . import singa_wrap as singa
-import tensor
+from . import tensor
 
 
 class Snapshot(object):
     ''' Class and member functions for singa::Snapshot.
 
     '''
+
     def __init__(self, f, mode, buffer_size=10):
         '''Snapshot constructor given file name and R/W mode.
 
@@ -46,7 +52,7 @@
             mode (boolean): True for write, False for read
             buffer_size (int): Buffer size (in MB), default is 10
         '''
-        self.snapshot = singa.Snapshot(f, mode, buffer_size)
+        self.snapshot = singa.Snapshot(f.encode(), mode, buffer_size)
 
     def write(self, param_name, param_val):
         '''Call Write method to write a parameter
@@ -55,7 +61,7 @@
             param_name (string): name of the parameter
             param_val (Tensor): value tensor of the parameter
         '''
-        self.snapshot.Write(str(param_name), param_val.singa_tensor)
+        self.snapshot.Write(param_name.encode(), param_val.data)
 
     def read(self):
         '''Call read method to load all (param_name, param_val)
diff --git a/python/singa/sonnx.py b/python/singa/sonnx.py
new file mode 100755
index 0000000..6ff7cef
--- /dev/null
+++ b/python/singa/sonnx.py
@@ -0,0 +1,2229 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import division
+
+import numpy as np
+
+import onnx
+import onnx.utils
+from onnx.backend.base import Backend, BackendRep
+from onnx import (checker, helper, numpy_helper, GraphProto, NodeProto,
+                  TensorProto, OperatorSetIdProto, optimizer, mapping,
+                  shape_inference)
+import warnings
+
+from . import device
+from . import autograd
+from . import layer
+from . import tensor
+from . import model
+from . import utils
+from . import singa_wrap as singa
+
+import collections
+OrderedDict = collections.OrderedDict
+namedtuple = collections.namedtuple
+
+# singa only supports float32 and int32
+NP_TYPE_TO_SINGA_SUPPORT_TYPE = {
+    np.dtype('float32'): np.dtype('float32'),
+    np.dtype('uint8'): None,
+    np.dtype('int8'): np.dtype('int32'),
+    np.dtype('uint16'): None,
+    np.dtype('int16'): np.dtype('int32'),
+    np.dtype('int32'): np.dtype('int32'),
+    np.dtype('int64'): np.dtype('int32'),
+    np.dtype('bool'): np.dtype('float32'),
+    np.dtype('float16'): np.dtype('float32'),
+    np.dtype('float64'): np.dtype('float32'),
+    np.dtype('complex64'): None,
+    np.dtype('complex128'): None,
+    np.dtype('uint32'): None,
+    np.dtype('uint64'): None,
+    np.dtype(np.object): None
+}
+
+
+def onnx_type_to_singa_type(onnx_type):
+    np_type = mapping.TENSOR_TYPE_TO_NP_TYPE[onnx_type]
+    return NP_TYPE_TO_SINGA_SUPPORT_TYPE[np_type]
+
+
+gpu_dev = None
+if singa.USE_CUDA:
+    gpu_dev = device.create_cuda_gpu()
+cpu_dev = device.get_default_device()
+
+
+class SingaFrontend(object):
+    """
+    This class provides mthods to convert model from singa to onnx. 
+    """
+
+    # This number indicates the target onnx operator set version
+    _target_opset_version = 11
+
+    # beceuase singa's operators are different from onnx.
+    # we define a dict for the name projection
+    # "singa op name": "onnx op name"
+    _rename_operators = {
+        '_Conv2d': 'Conv',
+        'ReLU': 'Relu',
+        'MaxPool2d': 'MaxPool',
+        'AvgPool2d': 'AveragePool',
+        'SoftMax': 'Softmax',
+        'Sigmoid': 'Sigmoid',
+        'Add': 'Add',
+        'Matmul': 'MatMul',
+        '_BatchNorm2d': 'BatchNormalization',
+        'Concat': 'Concat',
+        'Flatten': 'Flatten',
+        'AddBias': 'Add',
+        'Gemm': 'Gemm',
+        'Reshape': 'Reshape',
+        'Sum': 'Sum',
+        'cos': 'Cos',
+        'cosh': 'Cosh',
+        'sin': 'Sin',
+        'sinh': 'Sinh',
+        'tan': 'Tan',
+        'tanh': 'Tanh',
+        'acos': 'Acos',
+        'acosh': 'Acosh',
+        'asin': 'Asin',
+        'asinh': 'Asinh',
+        'atan': 'Atan',
+        'atanh': 'Atanh',
+        'SeLU': 'Selu',
+        'Elu': 'Elu',
+        'Equal': 'equal',
+        'Less': 'Less',
+        'Sign': 'Sign',
+        'Div': 'Div',
+        'Sub': 'Sub',
+        'Sqrt': 'Sqrt',
+        'Log': 'Log',
+        'Greater': 'Greater',
+        'HardSigmoid': 'HardSigmoid',
+        'Identity': 'Identity',
+        'SoftPlus': 'Softplus',
+        'SoftSign': 'Softsign',
+        'Mean': 'Mean',
+        'Pow': 'Pow',
+        'Clip': 'Clip',
+        'PRelu': 'PRelu',
+        'Mul': 'Mul',
+        'Transpose': 'Transpose',
+        'Max': 'Max',
+        'Min': 'Min',
+        'Shape': 'Shape',
+        'And': 'And',
+        'Or': 'Or',
+        'Xor': 'Xor',
+        'Not': 'Not',
+        'Negative': 'Neg',
+        'Reciprocal': 'Reciprocal',
+        'ConstantOfShape': 'ConstantOfShape',
+        'Dropout': 'Dropout',
+        'ReduceSum': 'ReduceSum',
+        'ReduceMean': 'ReduceMean',
+        'LeakyRelu': 'LeakyRelu',
+        'GlobalAveragePool': 'GlobalAveragePool',
+        'Squeeze': 'Squeeze',
+        'Unsqueeze': 'Unsqueeze',
+        'Slice': 'Slice',
+        'Ceil': 'Ceil',
+        'Split': 'Split',
+        'Gather': 'Gather',
+        'Tile': 'Tile',
+        'NonZero': 'NonZero',
+        'Cast': 'Cast',
+        'OneHot': 'OneHot',
+    }
+
+    # this dict indicates the operators that need extra handle
+    # each indicates a function name
+    _special_operators = {
+        '_Conv2d': '_create_conv_pool',
+        '_Pooling2d': '_create_conv_pool',
+        '_BatchNorm2d': '_create_batchnorm',
+        'Concat': '_create_concat',
+        'Flatten': '_create_flatten',
+        'Gemm': '_create_gemm',
+        'Reshape': '_create_reshape',
+        'SoftMax': '_create_softmax',
+        'SeLU': '_create_selu',
+        'Elu': '_create_elu',
+        'HardSigmoid': '_create_hardsigmoid',
+        'Clip': '_create_clip',
+        'Transpose': '_create_transpose',
+        'ConstantOfShape': '_create_constantOfShape',
+        'Dropout': '_create_dropout',
+        'ReduceSum': '_create_reduceOp',
+        'ReduceMean': '_create_reduceOp',
+        'Squeeze': '_create_squeeze',
+        'Unsqueeze': '_create_squeeze',
+        'Slice': '_create_slice',
+        'Split': '_create_split',
+        'Gather': '_create_gather',
+        'Tile': '_create_tile',
+        'Cast': '_create_cast',
+        'OneHot': '_create_onehot',
+    }
+
+    # operators with bool output
+    _bool_operators = {
+        'Equal': TensorProto.BOOL,
+        'Greater': TensorProto.BOOL,
+        'Less': TensorProto.BOOL,
+        'And': TensorProto.BOOL,
+        'Not': TensorProto.BOOL,
+        'Or': TensorProto.BOOL,
+        'Xor': TensorProto.BOOL,
+        'Shape': TensorProto.INT64,
+        'NonZero': TensorProto.INT64,
+    }
+
+    # some ops(such as batchnorm) has inputs we cannot handle directly,
+    # so we record these items firstly so that we can handle then
+    # at other place.
+    _unhandled_operators = {
+        "_BatchNorm2d": "_special_handle_batchnorm",
+        "Reshape": "_special_handle_reshape",
+        "Clip": "_special_handle_clip",
+        "Slice": "_special_handle_slice",
+        "Gather": "_special_handle_gather",
+        "Tile": "_special_handle_tile",
+        "OneHot": "_special_handle_onehot",
+    }
+
+    @classmethod
+    def _create_onehot(cls, op, op_t):
+        """
+        get a onnx node from singa onthot
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        # axis, indices, depth, values
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+        ])
+        for attr in ['depth', 'values']:
+            node.input.append(op.name + ":" + attr)
+        return node
+
+    @classmethod
+    def _create_cast(cls, op, op_t):
+        """
+        get a onnx node from singa cast
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        map_dict = {
+            tensor.float32: TensorProto.FLOAT,  # FLOAT to float32
+            tensor.int32: TensorProto.INT32,  # INT32 to int32
+        }
+        node.attribute.extend([
+            helper.make_attribute('to', map_dict[op.to]),
+        ])
+        return node
+
+    @classmethod
+    def _create_tile(cls, op, op_t):
+        """
+        get a onnx node from singa tile
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.input.append(op.name + ":repeats")
+        return node
+
+    @classmethod
+    def _create_gather(cls, op, op_t):
+        """
+        get a onnx node from singa gather
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+        ])
+        node.input.append(op.name + ":indices")
+        return node
+
+    @classmethod
+    def _create_split(cls, op, op_t):
+        """
+        get a onnx node from singa split
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+            helper.make_attribute('split', op.parts),
+        ])
+        return node
+
+    @classmethod
+    def _create_slice(cls, op, op_t):
+        """
+        get a onnx node from singa slice
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        for attr in ['starts', 'ends', 'axes', 'steps']:
+            node.input.append(op.name + ":" + attr)
+        return node
+
+    @classmethod
+    def _create_squeeze(cls, op, op_t):
+        """
+        get a onnx node from singa squeeze and unsqueeze
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axes', list(op.axis)),
+        ])
+        return node
+
+    @classmethod
+    def _create_reduceOp(cls, op, op_t):
+        """
+        get a onnx node from singa ReduceSum, ReduceMean, ReduceMax, ReduceMin, etc.
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axes', list(op.axes)),
+            helper.make_attribute('keepdims', op.keepdims),
+        ])
+        return node
+
+    @classmethod
+    def _create_dropout(cls, op, op_t):
+        """
+        get a onnx node from singa Dropout operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('ratio', op.ratio),
+        ])
+        return node
+
+    @classmethod
+    def _create_constantOfShape(cls, op, op_t):
+        """
+        get a onnx node from singa ConstantOfShape operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        tensor_type = TensorProto.FLOAT if isinstance(
+            op.value, float) else TensorProto.INT32
+        tensor_value = helper.make_tensor("value", tensor_type, [1], [op.value])
+        node.attribute.extend([
+            helper.make_attribute('value', tensor_value),
+        ])
+        return node
+
+    @classmethod
+    def _create_transpose(cls, op, op_t):
+        """
+        get a onnx node from singa Transpose operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('perm', op.perm),
+        ])
+        return node
+
+    @classmethod
+    def _create_clip(cls, op, op_t):
+        """
+        get a onnx node from singa clip operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        if op.min is not None:
+            node.input.append(op.name + ":min")
+        else:
+            node.input.append("")
+        if op.max is not None:
+            node.input.append(op.name + ":max")
+        else:
+            node.input.append("")
+        return node
+
+    @classmethod
+    def _create_hardsigmoid(cls, op, op_t):
+        """
+        get a onnx node from singa HardSigmoid operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('alpha', op.alpha),
+            helper.make_attribute('beta', op.gamma),
+        ])
+        return node
+
+    @classmethod
+    def _create_elu(cls, op, op_t):
+        """
+        get a onnx node from singa elu operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('alpha', op.alpha),
+        ])
+        return node
+
+    @classmethod
+    def _create_selu(cls, op, op_t):
+        """
+        get a onnx node from singa SeLU operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('alpha', op.alpha),
+            helper.make_attribute('gamma', op.gamma),
+        ])
+        return node
+
+    @classmethod
+    def _create_reshape(cls, op, op_t):
+        """
+        get a onnx node from singa Concat operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        # make the shape node
+        # because the reshape in singa does not provide its shape as input tensor
+        shape_node_name = op.name + ":shape"
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        node.input.extend([shape_node_name])
+        return node
+
+    @classmethod
+    def _create_concat(cls, op, op_t):
+        """
+        get a onnx node from singa Concat operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+        ])
+        return node
+
+    @classmethod
+    def _create_softmax(cls, op, op_t):
+        """
+        get a onnx node from singa Concat operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+        ])
+        return node
+
+    @classmethod
+    def _create_flatten(cls, op, op_t):
+        """
+        get a onnx node from singa flatten operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('axis', op.axis),
+        ])
+        return node
+
+    @classmethod
+    def _create_gemm(cls, op, op_t):
+        """
+        get a onnx node from singa gemm operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        node.attribute.extend([
+            helper.make_attribute('alpha', float(op.alpha)),
+            helper.make_attribute('beta', float(op.beta)),
+            helper.make_attribute('transA', op.transA),
+            helper.make_attribute('transB', op.transB),
+        ])
+
+        return node
+
+    @classmethod
+    def _create_batchnorm(cls, op, op_t):
+        """
+        get a onnx node from singa _BatchNorm2d operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        # first, we init batchnorm node
+        epsilon = 1e-5  # the epsilon value used in singa
+        bn_node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+        bn_node.attribute.extend([
+            helper.make_attribute('momentum', op.handle.factor),
+            helper.make_attribute('epsilon', epsilon),
+        ])
+        # then we add nodes of scal, bias, mean, var
+        nodes = []
+        running_values = {"mean": op.running_mean, "var": op.running_var}
+        for tmp_name, running_value in running_values.items():
+            node_name = op.name + ":" + tmp_name
+            bn_node.input.append(node_name)
+
+        nodes.append(bn_node)
+        return nodes
+
+    @classmethod
+    def _create_conv_pool(cls, op, op_t):
+        """
+        get a onnx node from singa _Conv2d and _Pooling2d operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        node = cls._common_singa_tensor_to_onnx_node(op, op_t)
+
+        k = [op.handle.kernel_h, op.handle.kernel_w]
+        s = [op.handle.stride_h, op.handle.stride_w]
+        oddp = op.odd_padding
+        p = [
+            op.handle.pad_h + oddp[0],
+            op.handle.pad_w + oddp[1],
+            op.handle.pad_w + oddp[2],
+            op.handle.pad_h + oddp[3],
+        ]
+
+        node.attribute.extend([
+            helper.make_attribute('kernel_shape', k),
+            helper.make_attribute('pads', p),
+            helper.make_attribute('strides', s),
+        ])
+        if cls._get_singa_op_type(op) == '_Conv2d':
+            node.op_type = cls._rename_operators.get('_Conv2d')
+            node.attribute.extend([
+                helper.make_attribute('group', op.handle.group),
+                helper.make_attribute('auto_pad', 'NOTSET'),
+            ])
+
+        elif op.handle.is_max_pooling:
+            node.op_type = cls._rename_operators.get('MaxPool2d')
+        else:
+            node.op_type = cls._rename_operators.get('AvgPool2d')
+        return node
+
+    @classmethod
+    def _get_singa_op_inputs_outputs(cls, op):
+        """
+        get inputs and outputs from a given operator
+        Args:
+            op: a given operator
+        Returns: 
+            inputs and outputs of the op
+        """
+        outputs = [op.output_name(idx) for _, idx in op.y_id2idx.items()]
+        inputs = [
+            srcop.output_name(srcop.y_id2idx[yid])
+            for (srcop, yid, _, _) in op.src
+        ]
+        return inputs, outputs
+
+    @classmethod
+    def _get_singa_op_type(cls, op):
+        """
+        get the operator type from a given operator
+        Args:
+            op: a given operator
+        Returns: 
+            operator type
+        """
+        return type(op).__name__
+
+    @classmethod
+    def _special_handle_batchnorm(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        # for singa, x, scale, bias is input
+        # and mean and var is attribute
+        # so we add the mean and var to W
+        tensor_list = []
+        append_inputs = {"mean": op.running_mean, "var": op.running_var}
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            append_input = tensor.to_numpy(tensor.from_raw_tensor(append_input))
+            tensor_list.append(numpy_helper.from_array(append_input, node_name))
+        return tensor_list
+
+    @classmethod
+    def _special_handle_reshape(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        node_name = op.name + ":shape"
+        return [
+            numpy_helper.from_array(np.array(op.shape, dtype=np.int64),
+                                    node_name)
+        ]
+
+    @classmethod
+    def _special_handle_clip(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        tensor_list = []
+        # clip add min and max
+        append_inputs = {"min": op.min, "max": op.max}
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            tensor_list.append(
+                helper.make_tensor(node_name, TensorProto.FLOAT, [],
+                                   [append_input]))
+        return tensor_list
+
+    @classmethod
+    def _special_handle_slice(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        tensor_list = []
+        # slice add starts, ends, axes, steps
+        append_inputs = {
+            "starts": op.starts,
+            "ends": op.ends,
+            "axes": op.axes,
+            "steps": op.steps,
+        }
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            tensor_list.append(
+                numpy_helper.from_array(np.array(append_input), node_name))
+        return tensor_list
+
+    @classmethod
+    def _special_handle_gather(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        tensor_list = []
+        append_inputs = {
+            "indices": op.indices,
+        }
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            tensor_list.append(
+                numpy_helper.from_array(np.array(append_input), node_name))
+        return tensor_list
+
+    @classmethod
+    def _special_handle_tile(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        tensor_list = []
+        append_inputs = {
+            "repeats": op.repeats,
+        }
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            tensor_list.append(
+                numpy_helper.from_array(np.array(append_input), node_name))
+        return tensor_list
+
+    @classmethod
+    def _special_handle_onehot(cls, op, X, W):
+        """
+        hanlde the special operators
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            onnx tensor list
+        """
+        tensor_list = []
+        append_inputs = {
+            "depth": op.depth,
+            "values": op.values,
+        }
+        for tmp_name, append_input in append_inputs.items():
+            node_name = op.name + ":" + tmp_name
+            tensor_list.append(
+                numpy_helper.from_array(np.array(append_input), node_name))
+        return tensor_list
+
+    @classmethod
+    def handle_special_ops(cls, op, X, W):
+        """
+        hanlde the special operators, 
+        because the inputs of batchnorm and reshape are differnet with onnx
+        we need to add these inputs into onnx model mannully
+        Args:
+            op: a given operator
+        Args:
+            X: onnx input list
+        Args:
+            X: onnx weight list
+        Returns: the onnx node
+        """
+        optype = cls._get_singa_op_type(op)
+        translator = getattr(cls, cls._unhandled_operators[optype])
+        tensor_list = translator(op, X, W)
+        for tensor in tensor_list:
+            X.append(
+                helper.make_tensor_value_info(tensor.name, tensor.data_type,
+                                              tensor.dims))
+            W.append(tensor)
+        # return X, W
+
+    @classmethod
+    def _common_singa_tensor_to_onnx_node(cls, op, op_t):
+        """
+        get a onnx node from singa operator, prepare its type, inputs and outputs
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: the onnx node
+        """
+        node_def = NodeProto()
+        node_def.name = op.name
+
+        optype = cls._get_singa_op_type(op)
+        node_def.op_type = cls._rename_operators.get(optype, optype)
+
+        inputs, outputs = cls._get_singa_op_inputs_outputs(op)
+        node_def.input.extend(inputs)
+        node_def.output.extend(outputs)
+
+        return node_def
+
+    @classmethod
+    def singa_op_to_onnx_node(cls, op, op_t):
+        """
+        get a onnx node from singa operator
+        Args:
+            op: a given operator
+        Args:
+            op_t: the tensor of the operator
+        Returns: 
+            the onnx node
+        """
+        optype = cls._get_singa_op_type(op)
+        # wether the operator needs special handler
+        if optype in cls._special_operators:
+            translator = getattr(cls, cls._special_operators[optype])
+        else:
+            translator = cls._common_singa_tensor_to_onnx_node
+        nodes = translator(op, op_t)
+        if not isinstance(nodes, collections.Iterable):
+            nodes = [nodes]
+        nodes = [node for node in nodes if node is not None]
+        return nodes
+
+    @classmethod
+    def singa_to_onnx_graph(cls, inputs, y, model_name="sonnx"):
+        """
+        get onnx model from singa computational graph
+        Args:
+            inputs: a list of input tensors (each is initialized with a name)
+        Args:
+            y: a list of tensors, usually the outputs of the graph
+        Returns: 
+            the onnx model
+        """
+        assert len(
+            y
+        ) == 1, "Not support multiple output now."  # assume there is only one output
+        y = y[0]
+
+        graph_def = GraphProto()
+        graph_def.name = model_name
+        topol, ws, ins = utils.post_order_recursive(y.creator, y)
+
+        # prepare the input
+        X = []
+        for op_name, op_t in ins.items():
+            op_t = inputs.pop(0)
+            dtype = TensorProto.INT32 if op_t.dtype == tensor.int32 else TensorProto.FLOAT
+            X.append(helper.make_tensor_value_info(op_name, dtype, op_t.shape))
+
+        # prepare the output
+        y_optype = cls._get_singa_op_type(y.creator)
+        if y_optype in cls._bool_operators:
+            y_dtype = cls._bool_operators[y_optype]
+        elif y.dtype == tensor.int32:
+            y_dtype = TensorProto.INT32
+        else:
+            y_dtype = TensorProto.FLOAT
+        Y = [helper.make_tensor_value_info(y.name, y_dtype, y.shape)]
+
+        # prepare the weight
+        W = []
+        for op_name, op_t in ws.items():
+            dtype = TensorProto.INT32 if op_t.dtype == tensor.int32 else TensorProto.FLOAT
+            wt = tensor.to_numpy(op_t)
+            wt = numpy_helper.from_array(wt)
+            wt.name = op_name
+            W.append(wt)
+            X.append(helper.make_tensor_value_info(op_name, dtype, op_t.shape))
+
+        # iterate the node graph
+        for op_name, op in topol.items():
+            optype = cls._get_singa_op_type(op)
+            if optype in cls._unhandled_operators:
+                cls.handle_special_ops(op, X, W)
+            graph_def.node.extend(cls.singa_op_to_onnx_node(op, op_t))
+
+        graph_def.input.extend(X)
+        graph_def.output.extend(Y)
+        graph_def.initializer.extend(W)
+        return graph_def
+
+    @classmethod
+    def singa_to_onnx_model(cls, inputs, y, model_name="sonnx"):
+        """
+        get onnx model from singa computational graph
+        Args:
+            inputs: a list of input tensors (each is initialized with a name)
+        Args:
+            y: a list of tensors, usually the outputs of the graph
+        Returns: 
+            the onnx model
+        """
+        opset_id = OperatorSetIdProto()
+        opset_id.version = cls._target_opset_version
+        model = helper.make_model(cls.singa_to_onnx_graph(inputs,
+                                                          y,
+                                                          model_name="sonnx"),
+                                  producer_name='sonnx',
+                                  opset_imports=[opset_id])
+        model = optimizer.optimize(model)
+        checker.check_model(model)
+        return model
+
+
+class OnnxNode(object):
+    """
+    Reimplementation of NodeProto from ONNX, but in a form
+    more convenient to work with from Python.
+    """
+
+    def __init__(self, node):
+        self.name = str(node.name).replace(".", "_")
+        self.op_type = str(node.op_type)
+        self.attrs = OnnxAttributes.from_onnx(node.attribute)
+        # inputs as attributes in singa
+        self.attr_inputs = {}
+        # inputs as weights in singa
+        self.weight_inputs = {}
+        self.inputs = list(node.input)
+        self.outputs = list(node.output)
+
+    def getattr(self, key, default=None):
+        return self.attrs[key] if key in self.attrs else default
+
+    def set_attr_inputs(self, key, name):
+        self.attr_inputs[key] = name
+
+    def del_attr_inputs(self, key):
+        del self.attr_inputs[key]
+
+    def set_weight_inputs(self, key, name):
+        self.weight_inputs[key] = name
+
+    def del_weight_inputs(self, key):
+        del self.weight_inputs[key]
+
+
+class OnnxAttributes(dict):
+    """
+    This is a more convenient way to work with ONNX attributes
+    that is not the protobuf representation.
+    """
+
+    @staticmethod
+    def from_onnx(args):
+        d = OnnxAttributes()
+        for arg in args:
+            d[arg.name] = helper.get_attribute_value(arg)
+        return d
+
+
+class SingaBackend(Backend):
+
+    # This number indicates the onnx operator set version
+    _opset_version = 11
+
+    _ir_version = 0x0000000000000006
+
+    # beceuase singa's operators are different from onnx.
+    # we define a dict for the name projection
+    _rename_operators = {
+        # common op
+        'Relu': 'ReLU',
+        'Sigmoid': 'Sigmoid',
+        'Add': 'Add',
+        'MatMul': 'Matmul',
+        'Sum': 'Sum',
+        'Cos': 'Cos',
+        'Cosh': 'Cosh',
+        'Sin': 'Sin',
+        'Sinh': 'Sinh',
+        'Tan': 'Tan',
+        'Tanh': 'Tanh',
+        'Acos': 'Acos',
+        'Acosh': 'Acosh',
+        'Asin': 'Asin',
+        'Asinh': 'Asinh',
+        'Atan': 'Atan',
+        'Atanh': 'Atanh',
+        'Equal': 'Equal',
+        'Less': 'Less',
+        'Sign': 'Sign',
+        'Div': 'Div',
+        'Sub': 'Sub',
+        'Sqrt': 'Sqrt',
+        'Log': 'Log',
+        'Greater': 'Greater',
+        'Identity': 'Identity',
+        'Softplus': 'SoftPlus',
+        'Softsign': 'SoftSign',
+        'Mean': 'Mean',
+        'Pow': 'Pow',
+        'PRelu': 'PRelu',
+        'Mul': 'Mul',
+        'Max': 'Max',
+        'Min': 'Min',
+        'Shape': 'Shape',
+        'And': 'And',
+        'Or': 'Or',
+        'Xor': 'Xor',
+        'Not': 'Not',
+        'Neg': 'Negative',
+        'Reciprocal': 'Reciprocal',
+        'Unsqueeze': 'Unsqueeze',
+        'NonZero': 'NonZero',
+        'Ceil': 'Ceil',
+        'Floor': 'Floor',
+        'Abs': 'Abs',
+        # special op
+        'ScatterElements': 'ScatterElements',
+        'Cast': 'Cast',
+        'Split': 'Split',
+        'Squeeze': 'Squeeze',
+        'GlobalAveragePool': 'GlobalAveragePool',
+        'LeakyRelu': 'LeakyRelu',
+        'ReduceSum': 'ReduceSum',
+        'ReduceMean': 'ReduceMean',
+        'Dropout': 'Dropout',
+        'ConstantOfShape': 'ConstantOfShape',
+        'Transpose': 'Transpose',
+        'HardSigmoid': 'HardSigmoid',
+        'Elu': 'Elu',
+        'Selu': 'SeLU',
+        'Concat': 'Concat',
+        'Softmax': 'SoftMax',
+        'Flatten': 'Flatten',
+        'OneHot': 'OneHot',
+        'Tile': 'Tile',
+        'Gather': 'Gather',
+        'Reshape': 'Reshape',
+        'Slice': 'Slice',
+        'Clip': 'Clip',
+        'Expand': 'Expand',
+        'Pad': 'Pad',
+        'Upsample': 'UpSample',
+        'DepthToSpace': 'DepthToSpace',
+        'SpaceToDepth': 'SpaceToDepth',
+        'Where': 'Where',
+        'Erf': 'Erf',
+        'Gemm': 'layer.Gemm',  # layer
+        'BatchNormalization': 'layer.BatchNorm2d',  # layer
+        'Conv': 'layer.Conv2d',  # layer
+        'MaxPool': 'layer.Pooling2d',  # layer
+        'AveragePool': 'layer.Pooling2d',  # layer
+    }
+
+    # this dict indicates the operators that need extra handle
+    # each indicates a function name
+    _special_operators = {
+        'Cast': '_create_cast',
+        'Split': '_create_split',
+        'Squeeze': '_create_squeeze_unsqueeze',
+        'Unsqueeze': '_create_squeeze_unsqueeze',
+        'GlobalAveragePool': '_create_global_average_pool',
+        'LeakyRelu': '_create_leakyrelu',
+        'ReduceSum': '_create_reduce_ops',
+        'ReduceMean': '_create_reduce_ops',
+        'Dropout': '_create_dropout',
+        'ConstantOfShape': '_create_constant_of_shape',
+        'Transpose': '_create_transpose',
+        'HardSigmoid': '_create_hardsigmoid',
+        'Elu': '_create_elu',
+        'Selu': '_create_selu',
+        'Concat': '_create_concat',
+        'Softmax': '_create_softmax',
+        'Gemm': '_create_gemm',
+        'Flatten': '_create_flatten',
+        'OneHot': '_create_onehot',
+        'Tile': '_create_tile',
+        'Gather': '_create_gather',
+        'Reshape': '_create_reshape',
+        'Slice': '_create_slice',
+        'Clip': '_create_clip',
+        'BatchNormalization': '_create_batch_norm',
+        'Conv': '_create_conv',
+        'MaxPool': '_create_max_avg_pool',
+        'AveragePool': '_create_max_avg_pool',
+        'Expand': '_create_expand',
+        'Pad': '_create_pad',
+        'Upsample': '_create_upsample',
+        'DepthToSpace': '_create_depth_space',
+        'SpaceToDepth': '_create_depth_space',
+        'ScatterElements': '_create_scatter_elements',
+        'Where': '_create_where',
+    }
+
+    @classmethod
+    def _create_depth_space(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the DepthToSpace and SpaceToDepth operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        blocksize = onnx_node.getattr("blocksize")
+        mode = utils.force_unicode(onnx_node.getattr("mode", "DCR"))
+        return operator(blocksize, mode)
+
+    @classmethod
+    def _create_where(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Where operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_node.set_attr_inputs(onnx_node.inputs[0], 'condition')
+        return operator(None)
+
+    @classmethod
+    def _create_pad(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Pad operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        mode = onnx_node.getattr("mode", "constant")
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'pads')
+        if len(onnx_node.inputs) == 3:
+            onnx_node.set_attr_inputs(onnx_node.inputs[2], 'constant')
+        return operator(mode, None, None)
+
+    @classmethod
+    def _create_upsample(cls,
+                         onnx_node,
+                         operator,
+                         opset_version=_opset_version):
+        """
+        get the UpSample operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        mode = utils.force_unicode(onnx_node.getattr("mode", None))
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'scales')
+        return operator(mode, None)
+
+    @classmethod
+    def _create_expand(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Expand operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'shape')
+        return operator(None)
+
+    @classmethod
+    def _create_cast(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Cast operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        to_type = onnx_type_to_singa_type(onnx_node.getattr("to"))
+        assert to_type != None, "not support cast type: {}".format(to_type)
+        if to_type == np.dtype('float32'):
+            return operator(tensor.float32)
+        else:
+            return operator(tensor.int32)
+
+    @classmethod
+    def _create_split(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Split operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        axis = onnx_node.getattr("axis", 0)
+        split = onnx_node.getattr("split", None)
+        num_output = len(onnx_node.outputs)
+        return operator(axis, split, num_output)
+
+    @classmethod
+    def _create_squeeze_unsqueeze(cls,
+                                  onnx_node,
+                                  operator,
+                                  opset_version=_opset_version):
+        """
+        get the Squeeze and Unsqueeze operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        axes = onnx_node.getattr("axes")
+        return operator(axes)
+
+    @classmethod
+    def _create_global_average_pool(cls,
+                                    onnx_node,
+                                    operator,
+                                    opset_version=_opset_version):
+        """
+        get the GlobalAveragePool operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        data_format = onnx_node.getattr("data_format", 'channels_first')
+        return operator(data_format)
+
+    @classmethod
+    def _create_leakyrelu(cls,
+                          onnx_node,
+                          operator,
+                          opset_version=_opset_version):
+        """
+        get the LeakyRelu operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        alpha = onnx_node.getattr("alpha", 0.01)
+        return operator(alpha)
+
+    @classmethod
+    def _create_reduce_ops(cls,
+                           onnx_node,
+                           operator,
+                           opset_version=_opset_version):
+        """
+        get the ReduceSum, ReduceMean, ReduceMax, ReduceMin, etc, operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        axes = onnx_node.getattr("axes", None)
+        keepdims = onnx_node.getattr("keepdims", 1)
+        return operator(axes, keepdims)
+
+    @classmethod
+    def _create_dropout(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Dropout operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        seed = onnx_node.getattr("seed", 0)
+        ratio = onnx_node.getattr("ratio", 0)
+        return operator(seed, ratio)
+
+    @classmethod
+    def _create_constant_of_shape(cls,
+                                  onnx_node,
+                                  operator,
+                                  opset_version=_opset_version):
+        """
+        get the ConstantOfShape operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        value = onnx_node.getattr("value", 0)
+        if isinstance(value, onnx.TensorProto):
+            value = numpy_helper.to_array(value)[0].item()
+        return operator(value)
+
+    @classmethod
+    def _create_transpose(cls,
+                          onnx_node,
+                          operator,
+                          opset_version=_opset_version):
+        """
+        get the Transpose operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        perm = onnx_node.getattr("perm")
+        return operator(perm)
+
+    @classmethod
+    def _create_hardsigmoid(cls,
+                            onnx_node,
+                            operator,
+                            opset_version=_opset_version):
+        """
+        get the hardsigmoid operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        alpha = onnx_node.getattr("alpha", 0.2)
+        beta = onnx_node.getattr("beta", 0.5)
+        return operator(alpha, beta)
+
+    @classmethod
+    def _create_elu(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the elu operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        alpha = onnx_node.getattr("alpha", 1.)
+        return operator(alpha)
+
+    @classmethod
+    def _create_selu(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the selu operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        alpha = onnx_node.getattr("alpha", 1.67326)
+        gamma = onnx_node.getattr("gamma", 1.0507)
+        return operator(alpha, gamma)
+
+    @classmethod
+    def _create_concat(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the concat operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        factor = onnx_node.getattr('axis')
+        return operator(axis=factor)
+
+    @classmethod
+    def _create_softmax(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the softmax operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        factor = onnx_node.getattr('axis', 1)
+        return operator(axis=factor)
+
+    @classmethod
+    def _create_gemm(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the gemm operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        alpha = onnx_node.getattr('alpha', 1.)
+        beta = onnx_node.getattr('beta', 1.)
+        transA = onnx_node.getattr('transA', 0)
+        transB = onnx_node.getattr('transB', 0)
+        onnx_node.set_weight_inputs(onnx_node.inputs[1], 'W')
+        bias = False
+        if len(onnx_node.inputs) == 3:
+            onnx_node.set_weight_inputs(onnx_node.inputs[2], 'b')
+            bias = True
+        return operator(None,
+                        alpha=alpha,
+                        beta=beta,
+                        transA=transA,
+                        transB=transB,
+                        bias=bias)
+
+    @classmethod
+    def _create_flatten(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the flatten operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        factor = onnx_node.getattr('axis', 1)
+        return operator(axis=factor)
+
+    @classmethod
+    def _create_onehot(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the OneHot operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        axis = onnx_node.getattr("axis", -1)
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'depth')
+        onnx_node.set_attr_inputs(onnx_node.inputs[2], 'values')
+        return operator(axis, None, None)
+
+    @classmethod
+    def _create_tile(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Tile operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'repeats')
+        return operator(None)
+
+    @classmethod
+    def _create_gather(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Gather operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        axis = onnx_node.getattr("axis", 0)
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'indices')
+        return operator(axis, None)
+
+    @classmethod
+    def _create_reshape(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the reshape operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'shape')
+        return operator(None)
+
+    @classmethod
+    def _create_slice(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the Slice operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'starts')
+        onnx_node.set_attr_inputs(onnx_node.inputs[2], 'ends')
+        if len(onnx_node.inputs) >= 4 and onnx_node.inputs[3] != '':
+            onnx_node.set_attr_inputs(onnx_node.inputs[3], 'axes')
+        if len(onnx_node.inputs) == 5 and onnx_node.inputs[4] != '':
+            onnx_node.set_attr_inputs(onnx_node.inputs[4], 'steps')
+        return operator(None, None, None, None)
+
+    @classmethod
+    def _create_clip(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the clip operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        if len(onnx_node.inputs) >= 2 and onnx_node.inputs[1] != '':
+            onnx_node.set_attr_inputs(onnx_node.inputs[1], 'min')
+        if len(onnx_node.inputs) == 3 and onnx_node.inputs[2] != '':
+            onnx_node.set_attr_inputs(onnx_node.inputs[2], 'max')
+        return operator(None, None)
+
+    @classmethod
+    def _create_batch_norm(cls,
+                           onnx_node,
+                           operator,
+                           opset_version=_opset_version):
+        """
+        get the clip operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        factor = onnx_node.getattr('momentum', 0.9)
+        onnx_node.set_weight_inputs(onnx_node.inputs[1], 'scale')
+        onnx_node.set_weight_inputs(onnx_node.inputs[2], 'bias')
+        onnx_node.set_weight_inputs(onnx_node.inputs[3], 'running_mean')
+        onnx_node.set_weight_inputs(onnx_node.inputs[4], 'running_var')
+        return operator(factor)
+
+    @classmethod
+    def _create_conv(cls, onnx_node, operator, opset_version=_opset_version):
+        """
+        get the clip operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        kernel_size = tuple(onnx_node.getattr('kernel_shape'))
+        padding = tuple(onnx_node.getattr('pads', (0, 0)))
+        stride = tuple(onnx_node.getattr('strides', (1, 1)))
+        auto_pad = utils.force_unicode(onnx_node.getattr('auto_pad', 'NOTSET'))
+
+        # not support dilation
+        dilation = onnx_node.getattr('dilations', 1)
+        if dilation != 1 and list(dilation) != [1, 1]:
+            raise ValueError("Not implemented yet for dilation")
+        group = onnx_node.getattr('group', 1)
+
+        # only support 1d or 2d
+        if len(kernel_size) > 2:
+            raise ValueError("Only implemented for 1d or 2d")
+
+        onnx_node.set_weight_inputs(onnx_node.inputs[1], 'W')
+        bias = False
+        if len(onnx_node.inputs) == 3:
+            onnx_node.set_weight_inputs(onnx_node.inputs[2], 'b')
+            bias = True
+        return operator(None,
+                        kernel_size,
+                        stride=stride,
+                        padding=padding,
+                        dilation=dilation,
+                        group=group,
+                        bias=bias,
+                        pad_mode=auto_pad)
+
+    @classmethod
+    def _create_max_avg_pool(cls,
+                             onnx_node,
+                             operator,
+                             opset_version=_opset_version):
+        """
+        get the clip operator from onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        kernel_size = tuple(onnx_node.getattr('kernel_shape'))
+        padding = tuple(onnx_node.getattr('pads', (0, 0)))
+        stride = tuple(onnx_node.getattr('strides', (1, 1)))
+        auto_pad = utils.force_unicode(onnx_node.getattr('auto_pad', 'NOTSET'))
+
+        # not support count_include_pad and auto_pad
+        ceil_mode = onnx_node.getattr('ceil_mode', 0)
+        count_include_pad = onnx_node.getattr('count_include_pad', 0)
+        if ceil_mode != 0 or count_include_pad != 0:
+            raise ValueError(
+                "Not implemented yet for count_include_pad or ceil_mode")
+
+        # only support 1d or 2d
+        if len(kernel_size) > 2:
+            raise ValueError("Only implemented for 1d or 2d")
+
+        is_max = onnx_node.op_type == 'MaxPool'
+        return operator(kernel_size, stride, padding, is_max, auto_pad)
+
+    @classmethod
+    def _create_scatter_elements(cls,
+                                 onnx_node,
+                                 operator,
+                                 opset_version=_opset_version):
+        """
+        get the ScatterElements from the onnx node
+        Args:
+            onnx_node(OnnxNode): a given onnx node
+            operator (Operator Class): a singa operator class
+            opset_version(int): the opset version
+        Returns: 
+            singa operator instance      
+        """
+        axis = onnx_node.getattr("axis", 0)
+        onnx_node.set_attr_inputs(onnx_node.inputs[1], 'indices')
+        onnx_node.set_attr_inputs(onnx_node.inputs[2], 'updates')
+        return operator(None, None, axis)
+
+    @classmethod
+    def _onnx_constant_to_np(cls, onnx_node, opset_version=_opset_version):
+        """
+        parse onnx constatn node to numpy array
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            opset_version (int): the opset version
+        Returns: 
+            a numpy ndarray
+        """
+        onnx_tensor = onnx_node.getattr('value')
+        np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[onnx_tensor.data_type]
+        return np.frombuffer(onnx_tensor.raw_data, dtype=np_dtype)
+
+    @classmethod
+    def _onnx_node_to_singa_op(cls, onnx_node, opset_version=_opset_version):
+        """
+        get singa operator from a onnx node
+        Args:
+            onnx_node (OnnxNode): a given onnx node
+            opset_version (int): the opset version
+        Returns: 
+            singa operator instance
+        """
+        onnx_op_type = onnx_node.op_type
+        assert onnx_op_type in cls._rename_operators, "not support operator: {}".format(
+            onnx_op_type)
+        renamed_op = cls._rename_operators[onnx_op_type]
+        if renamed_op.startswith('layer.'):
+            op_class = getattr(layer, renamed_op[6:])
+        else:
+            op_class = getattr(autograd, renamed_op)
+        if onnx_node.op_type in cls._special_operators:
+            translator = getattr(cls, cls._special_operators[onnx_node.op_type])
+            op = translator(onnx_node, op_class, opset_version)
+            op.name = onnx_node.name
+        else:
+            op = op_class()
+        # refine the ONNXNode
+        onnx_node.inputs = [inp for inp in onnx_node.inputs if inp != '']
+        return op
+
+    @classmethod
+    def run_node(cls, node, inputs, device='CPU', opset_version=_opset_version):
+        """
+        run a single singa operator from a onnx node
+        Args:
+            node (NodeProto): a given onnx node
+            inputs (ndarray[]): a list of numpy ndarray
+            device (string): CPU or CUDA
+            opset_version (int): the opset version
+        Returns:
+            list, the output
+        """
+        node = OnnxNode(node)
+        valid_inputs = [x for x in node.inputs if x != ""]
+        assert len(valid_inputs) == len(
+            inputs), "{}: expected {} inputs, but got {}. ".format(
+                node.op_type, len(valid_inputs), len(inputs))
+
+        operator = cls._onnx_node_to_singa_op(node, opset_version)
+        # seperate weights with inputs, and init inputs as Tensor
+        weights = {}
+        _inputs = []
+        for (key, val) in zip(valid_inputs, inputs):
+            val = val.astype(onnx_type_to_singa_type(val.dtype))
+            if key in node.weight_inputs:
+                weights[key] = val
+            else:
+                x = tensor.from_numpy(val)
+                if device != 'CPU':
+                    assert singa.USE_CUDA, "Your SINGA doesn't compile GPU module."
+                    dev = device.create_cuda_gpu()
+                else:
+                    dev = device.get_default_device()
+                x.to_device(dev)
+                _inputs.append(x)
+        inputs = _inputs
+        # set params
+        params = {}
+        for key, name in node.weight_inputs.items():
+            params[name] = weights[key]
+        operator.set_params(params)
+        outputs = cls._run_node(operator, inputs)
+        outputs_dict = OrderedDict()
+        for (key, val) in zip(node.outputs, outputs):
+            outputs_dict[key] = val
+        return outputs_dict
+
+    @classmethod
+    def _run_node(cls, operator, inputs):
+        """
+        run a single singa operator from singa operator
+        Args:
+            operator (Operator): the Operator instance
+            inputs (Tensor[]): a list of SINGA Tensor
+        Returns:
+            list, the output
+        """
+        outputs = operator(*inputs)
+        if not isinstance(outputs, collections.Iterable):
+            outputs = [outputs]
+        return outputs
+
+    @classmethod
+    def _parse_graph_params(cls, graph, device):
+        """
+        parse the parameters from onnx graph
+        Args:
+            graph (Graph): a given onnx graph
+            device (string): CPU or CUDA
+        Returns:
+            a dict of numpy ndarray
+        """
+        params = {}
+        for tp in graph.initializer:
+            val = numpy_helper.to_array(tp)
+            val = val.astype(onnx_type_to_singa_type(tp.data_type))
+            params[tp.name] = val
+        return params
+
+    @classmethod
+    def _parse_graph_inputs_outputs(cls, graph, params, device):
+        """
+        parse the inits, outputs from onnx graph
+        Args:
+            graph (Graph): a given onnx graph
+            device (string): # CPU or CUDA
+        Returns:
+            a dict of ValueInfo
+            a dict of ValueInfo
+        """
+        inputs = []
+        outputs = []
+        info_tuple = namedtuple('info_tuple', ['name', 'dtype', 'shape'])
+        for t in graph.input:
+            if t.name not in params:
+                dtype = t.type.tensor_type.elem_type
+                shape = [dim.dim_value for dim in t.type.tensor_type.shape.dim]
+                inputs.extend([info_tuple(t.name, dtype, shape)])
+        for t in graph.output:
+            dtype = t.type.tensor_type.elem_type
+            shape = [dim.dim_value for dim in t.type.tensor_type.shape.dim]
+            outputs.extend([info_tuple(t.name, dtype, shape)])
+        return inputs, outputs
+
+    @classmethod
+    def _onnx_model_to_singa_ops(cls,
+                                 graph,
+                                 device,
+                                 opset_version=_opset_version):
+        """
+        get all intermediate params, operators, and input info from onnx model
+        Args:
+            graph (Graph): the loaded ONNX graph
+            device (string): CPU or CUDA
+            opset_version (int): the opset version
+        Returns:
+            a dict of weights
+            a dict of ValueInfo
+            a dict of ValueInfo
+            a list of SingaOps('node', 'forward')
+        """
+        # init all tensor input and params as a tensor map
+        params = cls._parse_graph_params(graph, device)
+        inputs, outputs = cls._parse_graph_inputs_outputs(graph, params, device)
+        # the parsed operators queue
+        operators = []
+        operator_tuple = namedtuple('operator_tuple', ['node', 'operator'])
+        for node in graph.node:
+            if not node.name:
+                node.name = "%s_%d" % (str(node.op_type), len(operators))
+            node = OnnxNode(node)
+            # convert Constant to param
+            if node.op_type == 'Constant':
+                params[node.outputs[0]] = cls._onnx_constant_to_np(node)
+            else:
+                op = cls._onnx_node_to_singa_op(node, opset_version)
+                operators.append(operator_tuple(node, op))
+        return params, inputs, outputs, operators
+
+    @classmethod
+    def prepare(cls, model, device='CPU', **kwargs):
+        """
+        parse the ONNX and to create layers
+        Args:
+            model (ModelProto): the loaded ONNX model
+            device (string): CPU or CUDA
+        Returns:
+            a SingaRep instance to stores the layers and weights
+        """
+        super(SingaBackend, cls).prepare(model, device, **kwargs)
+        # optimize and infer the shape of the model
+        try:
+            model = onnx.utils.polish_model(model)
+        except IndexError as err:
+            model = shape_inference.infer_shapes(model)
+
+        # check the opset version and ir version
+        # SINGA supports opset version(11), ir version(1.6.0 -> 6)
+        opset_version = None
+        for imp in model.opset_import:
+            if not imp.HasField("domain") or imp.domain == "":
+                opset_version = imp.version
+                if imp.version > cls._opset_version:
+                    warnings.warn(
+                        "The imported opertor set verion {} is larger than the supported version {}."
+                        .format(imp.version, cls._opset_version))
+            else:
+                warnings.warn("Unrecognized operator set {}".format(imp.domain))
+
+        if model.ir_version > cls._ir_version:
+            warnings.warn(
+                "The imported ir verion {} is larger than the supported version {}."
+                .format(cls._ir_version, imp.version))
+
+        graph = model.graph
+        params, inputs, outputs, layers = cls._onnx_model_to_singa_ops(
+            graph, device, opset_version)
+        return SingaRep(params, inputs, outputs, layers, device)
+
+
+class SingaRep(BackendRep):
+
+    def __init__(self, params, inputs, outputs, layers, device):
+        """
+        https://github.com/onnx/onnx/blob/master/docs/ImplementingAnOnnxBackend.md
+        SingaRep provides the intermediate representation of Singa,
+        the user can run the forward of the singa model by run func,
+        or, the user can append more layers after the singa_ops to do
+        the transfer learning
+        Args:
+            params (dict{}): a dict of params, data type is numpy ndarray
+            inputs (ValueInfo): a dict of inputs
+            outputs (ValueInfo): a dict of outputs
+            layers (namedtuple('operator_tuple', ['node', 'operator'])[]): a list of singa operator
+            device (string): CPU or CUDA
+        """
+        super(SingaRep, self).__init__()
+        self.inputs = inputs
+        self.states = params
+        self.outputs = outputs
+        self.dev = cpu_dev if device == "CPU" else gpu_dev
+        self.layers = layers
+        self.tensor_count = {}
+        self.has_initialized = False
+        self.is_graph = False
+
+    def initialize(self):
+        """
+        Init the instance
+        """
+        self.outputs_info = {outp.name: outp for outp in self.outputs}
+        _layers = []  # layers by topo order
+        for node, operator in self.layers:
+            _del_keys = []
+            for key, name in node.weight_inputs.items():
+                if key not in self.states:
+                    # cannot find the weights, try to find it from input
+                    node.set_attr_inputs(key, name)
+                    _del_keys.append(key)
+            for key in _del_keys:
+                node.del_weight_inputs(key)
+            self.__dict__[node.name] = operator
+            _layers.append(node)
+        self._layers = _layers
+
+    def init_tensor_count(self):
+        """
+        Init the tensor count dict
+        """
+        self.tensor_count = {}
+        for node, operator in self.layers:
+            # init the tensor count
+            all_possible_inputs = node.inputs + list(
+                node.attr_inputs.keys()) + list(node.weight_inputs.keys())
+            for inp in all_possible_inputs:
+                if inp not in self.tensor_count:
+                    self.tensor_count[inp] = 1
+                else:
+                    self.tensor_count[inp] += 1
+
+    def to_input_tensor(self, x):
+        """
+        convert the input to tensors
+        Args:
+            x (np.ndarray[]): a list of numpy ndarray as inputs
+        Returns: 
+            a dict of SINGA Tensors
+        """
+        tensor_dict = {}
+        # init inputs as Tensor
+        for (key, val) in zip(self.inputs, x):
+            if not self.is_graph:
+                val = val.astype(onnx_type_to_singa_type(key.dtype))
+                # todo, scalar
+                val = np.atleast_1d(val)
+                val = tensor.from_numpy(val)
+                val.to_device(self.dev)
+            tensor_dict[key.name] = val
+        return tensor_dict
+
+    def to_output_tensor(self, y, out_name):
+        """
+        convert the tensors to input
+        Args:
+            x (np.ndarray[]): a list of numpy ndarray as inputs
+        Returns: 
+            a dict of SINGA Tensors
+        """
+        if not self.is_graph:
+            y = tensor.to_numpy(y)
+            if out_name in self.outputs_info:
+                np_dtyp = mapping.TENSOR_TYPE_TO_NP_TYPE[
+                    self.outputs_info[out_name].dtype]
+                y = y.astype(np_dtyp)
+        return y
+
+    def get_s(self, name, node, tensor_dict):
+        """
+        get state from the node's weights or tensor_dict
+        Args:
+            name (str): name of the state
+            node (ONNXNode): ONNX node
+            tensor_dict ({}): tensor dict
+        Returns: 
+            the states
+        """
+        if name in node.attr_inputs:
+            return tensor_dict[name]
+        else:
+            return self.states[name]
+
+    def handle_special_ops(self, node, op, tensor_dict):
+        """
+        hanlde some special operations
+        Args:
+            name (str): name of the state
+            node (ONNXNode): ONNX node
+            tensor_dict ({}): tensor dict
+        Returns: 
+            the states
+        """
+        # todo, hard code
+        # Conv2d nb_kernels
+        if node.op_type == "Conv":
+            shape = self.get_s(node.inputs[1], node, tensor_dict).shape
+            op.nb_kernels = shape[0]
+        # Gemm nb_kernels and bias_shape
+        elif node.op_type == "Gemm":
+            nb_kernels_flag = 0 if op.transB == 1 else -1
+            shape = self.get_s(node.inputs[1], node, tensor_dict).shape
+            op.nb_kernels = shape[nb_kernels_flag]
+            if op.bias:
+                shape = self.get_s(node.inputs[2], node, tensor_dict).shape
+                op.bias_shape = shape
+
+    def run(self, x, **kwargs):
+        """
+        run the forward of singa model
+        Args:
+            x (np.ndarray[]): a list of numpy ndarray as inputs
+        Returns: 
+            a list of outputs
+        """
+        if not self.has_initialized:
+            self.initialize()
+            if isinstance(x[0], tensor.Tensor):
+                self.dev = x[0].device
+
+        outputs_dict = OrderedDict([])
+
+        # last_layers means we run this model until the last #N layers
+        last_layers = kwargs.get('last_layers', len(self._layers) - 1)
+        last_layers = last_layers if last_layers >= 0 else (
+            last_layers + 1) % len(self._layers)
+        if last_layers != len(self._layers) - 1:
+            for outp in self._layers[last_layers].outputs:
+                outputs_dict[outp] = None
+        else:
+            for outp in self.outputs:
+                outputs_dict[outp.name] = None
+
+        aux_output = kwargs.get('aux_output', ())
+        for outp in aux_output:
+            outputs_dict[outp] = None
+
+        tensor_dict = self.to_input_tensor(x)
+        self.init_tensor_count()
+
+        # run the layer by the topo order
+        for node in self._layers[:last_layers + 1]:
+            op = self.__dict__[node.name]
+            self.handle_special_ops(node, op, tensor_dict)
+            # make input
+            inputs = []
+            for inp in node.inputs:
+                if inp not in node.weight_inputs and inp not in node.attr_inputs:
+                    if inp in tensor_dict:
+                        inputs.append(tensor_dict[inp])
+                    elif inp in self.states:
+                        # todo, scalar
+                        val = np.atleast_1d(self.states[inp])
+                        val = tensor.from_numpy(val)
+                        val.to_device(self.dev)
+                        inputs.append(val)
+                    else:
+                        raise KeyError(
+                            "Not found the input {} for operation {}".format(
+                                inp, node.name))
+            states = {}
+            if callable(getattr(op, "initialize",
+                                None)) and not op._initialized:
+                # init the operator
+                op.initialize(*inputs)
+                op._initialized = True
+                for key, name in node.weight_inputs.items():
+                    if key not in node.attr_inputs:
+                        # find the weights and not in the inputs
+                        states[name] = self.states[key]
+
+            # replace attrs by inputs
+            for key, name in node.attr_inputs.items():
+                if key in tensor_dict:
+                    ts = tensor_dict[key]
+                elif key in self.states:
+                    ts = self.states[key]
+                if isinstance(ts, tensor.Tensor):
+                    ts = tensor.to_numpy(ts)
+                states[name] = ts
+            # set states
+            if states:
+                if callable(getattr(op, "set_states", None)):
+                    # rename the layer's states
+                    states = {
+                        getattr(op, key).name: val
+                        for (key, val) in states.items()
+                    }
+                    if self.is_graph and not self.has_initialized:
+                        prev_state = self.dev.graph_enabled()
+                        self.dev.EnableGraph(False)
+                        op.set_states(states)
+                        self.dev.EnableGraph(prev_state)
+                    else:
+                        op.set_states(states)
+                else:
+                    for key, value in states.items():
+                        setattr(op, key, value)
+            # run the node
+            outputs = _run_node(op, inputs)
+            # release the input tensor
+            for inp in node.inputs:
+                if inp in self.tensor_count:
+                    self.tensor_count[inp] -= 1
+                if self.tensor_count[inp] == 0:
+                    if inp in tensor_dict:
+                        del tensor_dict[inp]
+                    del self.tensor_count[inp]
+            # store the output
+            for (outp, val) in zip(node.outputs, outputs):
+                tensor_dict[outp] = val
+                if outp in outputs_dict:
+                    outputs_dict[outp] = self.to_output_tensor(val, outp)
+        self.has_initialized = True
+        return list(outputs_dict.values())
+
+
+class SONNXModel(model.Model):
+
+    def __init__(self, onnx_model):
+        """
+        Init a SIGNA Model
+        Args:
+            onnx_model (ModelProto): a loaded onnx model
+        """
+        super(SONNXModel, self).__init__()
+        self.sg_ir = prepare(onnx_model)
+        for node, operator in self.sg_ir.layers:
+            self.__dict__[node.name] = operator
+        self.sg_ir.is_graph = True
+
+    def forward(self, *input, aux_output=(), **kwargs):
+        """
+        The forward of the SINGA model
+        Args:
+            input (Tensors[]): a list of Tensor
+            aux_output (string()): a set of required output name
+
+        Returns:
+            a OrderedDict of Tensor
+        """
+        return self.sg_ir.run(input, aux_output=aux_output, **kwargs)
+
+
+run_node = SingaBackend.run_node
+_run_node = SingaBackend._run_node
+prepare = SingaBackend.prepare
+get_op = SingaBackend._onnx_node_to_singa_op
+to_onnx = SingaFrontend.singa_to_onnx_model
+save = onnx.save
+load = onnx.load
\ No newline at end of file
diff --git a/python/singa/tensor.py b/python/singa/tensor.py
old mode 100644
new mode 100755
index d36abdc..4f62a31
--- a/python/singa/tensor.py
+++ b/python/singa/tensor.py
@@ -39,9 +39,7 @@
     dev = device.get_default_device()
     x.to_device(dev)  # move the data to a gpu device
 
-    r = tensor.relu(x)
-
-    s = tensor.to_numpy(r)  # tensor -> numpy array
+    s = tensor.to_numpy(x)  # tensor -> numpy array
 
 There are two sets of tensor functions,
 
@@ -53,51 +51,127 @@
 
 Every Tesor instance must be initialized before reading data from it.
 """
+from __future__ import division
+from __future__ import print_function
+from __future__ import absolute_import
 
+from deprecated import deprecated
+from builtins import object
 import numpy as np
 from functools import reduce
-from .proto import core_pb2
-from . import singa_wrap as singa
-import device as pydevice
+import re
 
-int32 = core_pb2.kInt
-float32 = core_pb2.kFloat32
+from . import singa_wrap as singa
+from .device import get_default_device
+
+int32 = 2  #core.proto.kInt32
+float32 = 0  #core.proto.kFloat32
+CTensor = singa.Tensor
+
 
 class Tensor(object):
-    '''Create a Py Tensor, which wraps a swig converted Tensor from CPP Tensor
-
-    The three arguments are three attributes of the Tensor.
+    '''Python Tensor, which wraps a swig converted Tensor from CPP Tensor.
 
     Args:
-        shape (list<int>): a list of integers for the tensor shape. If shape is
-            not specified, the created tensor is called a dummy tensor.
-        device: a swig converted Device instance using the device moduel . If it
-            is None, then the default host device would be used.
-        dtype: data type. currently, most operations only accept kFloat32.
+        shape (tuple<int>): a tuple of integers for the tensor shape. If shape
+            is not specified, the created tensor is called a dummy tensor.
+        device: a swig device. If None, the default host device is used.
+        dtype: data type. currently, most operations only accept float32.
+        data: a numpy array or swig tensor.
+        requires_grad: boolean indicator for computing the gradient.
+        stores_grad: boolean indicator for storing and returning the gradient.
+                     Some intermediate tensors' gradient can be released
+                     during the backward propagation. A tensor may require
+                     grad but not store grad; But if a tensor stores grad
+                     then it must require grad.
     '''
+    tensor_count = 0
 
-    def __init__(self, shape=None, device=None, dtype=core_pb2.kFloat32):
-        if shape is None:
-            # call constructor of singa::Tensor
-            self.singa_tensor = singa.Tensor()
-            return
+    def __init__(self,
+                 shape=(),
+                 device=None,
+                 dtype=float32,
+                 data=None,
+                 requires_grad=True,
+                 stores_grad=False,
+                 creator=None,
+                 name=None):
+        if device is None:
+            device = get_default_device()
+        if isinstance(data, np.ndarray):
+            self.data = CTensor(list(data.shape), device, dtype)
+            copy_from_numpy(self.data, data)
+        elif isinstance(data, CTensor):
+            self.data = data
+            assert data.device().id() == device.id(), 'not the same device'
         else:
-            assert isinstance(shape, tuple), 'shape should be tuple'
-            if device is None:
-                device = pydevice.get_default_device()
-                self.singa_tensor = singa.Tensor(list(shape), device, dtype)
-            else:
-                self.singa_tensor = singa.Tensor(list(shape), device, dtype)
-        self.shape = shape
-        self.dtype = dtype
+            self.data = CTensor(list(shape), device, dtype)
+
+        self.shape = tuple(self.data.shape())
         self.device = device
+        self.dtype = self.data.data_type()
+        self.requires_grad = requires_grad
+        self.stores_grad = stores_grad
+        if name is None:
+            self.name = 'Dummy#{}'.format(Tensor.tensor_count)
+            Tensor.tensor_count += 1
+        else:
+            self.name = name
+        if creator is None:
+            from . import autograd
+            self.creator = autograd.Dummy(self, name)
+        else:
+            self.creator = creator
+
+    def __getitem__(self, keys):
+        if type(keys) != tuple:
+            keys = (keys,)
+
+        ret = self.clone()
+        axis_index = 0
+        for key in keys:
+            if type(key) == int:
+                key += self.shape[axis_index] if key < 0 else 0
+
+                if not (key >= 0 and key < self.shape[axis_index]):
+                    raise ValueError("Invalid Index")
+
+                ret.data = singa.SliceOn(ret.data, key, key + 1, axis_index)
+            elif type(key) == slice:
+                start = key.start if key.start else 0
+                end = key.stop if key.stop else self.shape[axis_index]
+
+                start += self.shape[axis_index] if start < 0 else 0
+                end += self.shape[axis_index] if end < 0 else 0
+
+                if not (start >= 0 and start < end and
+                        end <= self.shape[axis_index]):
+                    raise ValueError("Invalid Index")
+
+                ret.data = singa.SliceOn(ret.data, start, end, axis_index)
+            else:
+                raise ValueError("Invalid Index")
+            axis_index += 1
+
+        return ret
+
+    def is_dummy(self):
+        '''
+        Returns:
+            True if the tensor is a dummy tensor
+        '''
+        match = re.match(r'Dummy#\d+', self.name)
+        if match:
+            return True
+        else:
+            return False
 
     def ndim(self):
         '''
         Returns:
             the number of dimensions of the tensor.
         '''
-        return self.singa_tensor.nDim()
+        return self.data.nDim()
 
     def is_empty(self):
         '''
@@ -111,53 +185,100 @@
         Returns:
             True if the internal data is transposed; otherwise False.
         '''
-        return self.singa_tensor.transpose()
+        return self.data.transpose()
+
+    def transpose(self, axes=None):
+        ''' To transpose the tensor
+
+        Args:
+            axes: axes to transpose
+
+        Returns:
+            new transposed tensor
+        '''
+        t = Tensor(self.shape, self.device, self.dtype)
+        if axes is None:
+            tshape = [self.shape[x] for x in range(len(t.shape))]
+            t.shape = tuple(tshape)
+            t.data = singa.DefaultTranspose(self.data)
+        else:
+            if (len(axes) != len(self.shape)):
+                raise ValueError('dimensions do not match')
+            tshape = [self.shape[x] for x in axes]
+            t.shape = tuple(tshape)
+            t.data = singa.Transpose(self.data, list(axes))
+        return t
 
     def size(self):  # TODO(wangwei) compute size
         '''
         Returns:
             the number of elements of the tensor.
         '''
-        return self.singa_tensor.Size()
+        return self.data.Size()
 
     def memsize(self):
         '''
         Returns:
             the number of Bytes allocated for this tensor.
         '''
-        return self.singa_tensor.MemSize()
+        return self.data.MemSize()
+
+    def contiguous(self):
+        t = Tensor(self.shape, self.device, self.dtype)
+        t.data = singa.Contiguous(self.data)
+        return t
 
     def reshape(self, shape):
-        '''Change the tensor shape.
+        '''Return a new tensor with the given shape, and the original
+            tensor is not changed.
 
         Args:
-            shape (list<int>): new shape, which should have the same volumn as
-                the original shape.
+            shape (list<int>): new shape, which should have the same
+                volumn as the original shape.
+
+        Returns:
+            new tensor reshaped
         '''
+        t = Tensor(self.shape, self.device, self.dtype)
         assert product(self.shape) == product(shape), \
             'product of shape should be equal'
-        self.shape = shape
-        self.singa_tensor.Reshape(list(shape))
+        t.shape = shape
+        t.data = singa.Reshape(self.data, shape)
+        return t
 
     def reset_like(self, t):
         '''Reset the shape, dtype and device as the given tensor.
 
         Args:
-            t (Tensor)
+            t (Tensor): a tensor
         '''
-        self.singa_tensor.ResetLike(t.singa_tensor)
+        self.data.ResetLike(t.data)
         self.shape = t.shape
         self.device = t.device
         self.dtype = t.dtype
 
-    '''
     def as_type(self, dtype):
-        Change the data type.
+        '''Change the data type.
 
         Args:
-            dtype:
-        self.singa_tensor.AsType(dtype)
-    '''
+            dtype: accepts 'int', 'float', 'singa.kFloat32', 'singa.kInt'
+
+        Returns:
+            new tensor with new type
+        '''
+        if dtype == singa.kInt:
+            pass
+        elif dtype == singa.kFloat32:
+            pass
+        elif dtype == 'int':
+            dtype = singa.kInt
+        elif dtype == 'float':
+            dtype = singa.kFloat32
+        else:
+            raise TypeError("invalid data type %s" % dtype)
+        t = Tensor(self.shape, self.device, dtype)
+        t.data = self.data.AsType(dtype)
+        return t
 
     def to_device(self, device):
         '''Move the tensor data onto a given device.
@@ -165,38 +286,47 @@
         Args:
             device: a swig Device converted from CudaGPU or CppCPU or OpenclGPU
         '''
-        self.singa_tensor.ToDevice(device)
+        self.data.ToDevice(device)
         self.device = device
 
     def to_host(self):
         '''Move the tensor data onto the default host CppCPU device.
         '''
-        self.singa_tensor.ToHost()
-        self.device = pydevice.default_device
+        self.data.ToHost()
+        self.device = get_default_device()
 
     def l2(self):
         '''
         Returns:
             the L2 norm.
         '''
-        return self.singa_tensor.L2()
+        return self.data.L2()
 
     def l1(self):
         '''
         Returns:
             the L1 norm.
         '''
-        return self.singa_tensor.L1()
+        return self.data.L1()
 
-    def set_value(self, x):
+    def set_value(self, x, inplace=True):
         '''Set all elements of the tensor to be the give value.
 
         Args:
-            x (float), a float value to be set to all elements.
+            x (float): a float value to be set to all elements.
+            inplace: inplace flag
+
+        Returns:
+            this tensor
         '''
         # assert type(x) == float, 'set value only accepts float input'
         # if isinstance(x, float):
-        self.singa_tensor.SetFloatValue(float(x))
+        if not inplace:
+            # return new tensor filled with value
+            raise NotImplementedError
+
+        self.data.SetFloatValue(float(x))
+        return self
 
     def copy_from_numpy(self, np_array, offset=0):
         ''' Copy the data from the numpy array.
@@ -210,11 +340,11 @@
             np_array = np_array.flatten()
         dt = np_array.dtype
         if dt == np.float32:
-            self.singa_tensor.CopyFloatDataFromHostPtr(np_array)
+            self.data.CopyFloatDataFromHostPtr(np_array)
         elif dt == np.int or dt == np.int32:
-            self.singa_tensor.CopyIntDataFromHostPtr(np_array)
+            self.data.CopyIntDataFromHostPtr(np_array)
         else:
-            print 'Not implemented yet for ', dt
+            print('Not implemented yet for ', dt)
 
     def copy_data(self, t):
         '''Copy data from other Tensor instance.
@@ -222,29 +352,107 @@
         Args:
             t (Tensor): source Tensor.
         '''
+        assert (t.size() == self.size()), "tensor shape should be the same"
         assert isinstance(t, Tensor), 't must be a singa Tensor instance'
-        self.singa_tensor.CopyData(t.singa_tensor)
+        self.data.CopyData(t.data)
+
+    def copy_from(self, t, offset=0):
+        ''' Copy the data from the numpy array or other Tensor instance
+
+        Args:
+            t (Tensor or np array): source Tensor or numpy array
+            offset (int): destination offset
+        '''
+        if isinstance(t, Tensor):
+            self.copy_data(t)
+        elif isinstance(t, np.ndarray):
+            self.copy_from_numpy(t)
+        else:
+            raise ValueError("t should be Tensor or numpy array.")
 
     def clone(self):
         '''
         Returns:
             a new Tensor which does deep copy of this tensor
         '''
-        return _call_singa_func(self.singa_tensor.Clone)
+        return _call_singa_func(self.data.Clone)
 
-    def T(self):
-        ''' shallow copy, negate the transpose field.
+    def repeat(self, repeats, axis):
+        '''Repeat data of a tensor
+
+        Args:
+            repeats(int or a sequence): the number that the tensor need to repeat for
+            axis (int):the axis to do repeat
+                       If it is None, then the repeated tensor will be flattened.If it isn't None,
+                       the repeats could be sequence, but it's size should match the axis's shape
 
         Returns:
-            a new Tensor which shares the underlying data memory (shallow copy)
-            but is marked as a transposed version of this tensor.
+            the tensor which has been repeated
+
         '''
-        return _call_singa_func(self.singa_tensor.T)
+        t = Tensor()
+        t_ndim = self.ndim()
+        if isinstance(repeats, int) or isinstance(repeats, complex):
+            if repeats < 0:
+                raise ValueError(
+                    "'repeats' should not be negative: {}".format(repeats))
+            if axis != None and axis < 0:
+                axis += t_ndim
+            # broadcast = True
+            if axis is None:
+                axis = 9999
+                t.shape = (product(self.shape) * repeats,)
+                Repeats = [
+                    repeats,
+                ]
+                t.data = self.data.Repeat(Repeats, axis)
+            elif axis >= 0:
+                t_shape = list(self.shape)
+                t_shape[axis] = self.shape[axis] * repeats
+                t.shape = tuple(t_shape)
+                Repeats = [
+                    repeats,
+                ]
+                t.data = self.data.Repeat(Repeats, axis)
+
+        elif isinstance(repeats, tuple) or isinstance(repeats, list):
+            for rep in repeats:
+                if rep < 0:
+                    raise ValueError(
+                        "'repeats' should be int or sequence: {}".format(
+                            repeats))
+
+            if axis != None and axis < 0:
+                axis += t_ndim
+            if axis is None:
+                raise ValueError(
+                    "when axis us None, 'repeats' should be int: {}".format(
+                        repeats))
+            elif axis >= 0:
+                t_shape = list(self.shape)
+                t_shape[axis] = sum(repeats)
+                t.shape = tuple(t_shape)
+                t.data = self.data.Repeat(list(repeats), axis)
+        else:
+            raise ValueError('repeats should be int or sequence')
+
+        return t
+
+    def T(self):
+        ''' shallow copy.
+
+        Returns:
+            a new Tensor which shares the underlying data memory (shallow copy).
+        '''
+        return _call_singa_func(singa.DefaultTranspose, self.data)
 
     def copy(self):
         '''shallow copy calls copy constructor of singa::Tensor
+
+        Returns:
+            new tensor copied
         '''
-        return _call_singa_func(singa.Tensor, self.singa_tensor)
+        return _call_singa_func(CTensor, self.data)
 
     def deepcopy(self):
         '''Same as clone().
@@ -254,79 +462,112 @@
         '''
         return self.clone()
 
-    def bernoulli(self, p):
+    def bernoulli(self, p, inplace=True):
         '''Sample 0/1 for each element according to the given probability.
 
         Args:
             p (float): with probability p, each element is sample to 1.
-        '''
-        singa.Bernoulli(float(p), self.singa_tensor)
+            inplace: inplace flag
 
-    def gaussian(self, mean, std):
+        Returns:
+            this tensor
+        '''
+        if not inplace:
+            # return new tensor
+            raise NotImplementedError
+
+        singa.Bernoulli(float(p), self.data)
+        return self
+
+    def gaussian(self, mean, std, inplace=True):
         '''Generate a value for each element following a Gaussian distribution.
 
         Args:
             mean (float): mean of the distribution
             std (float): standard variance of the distribution
-        '''
-        singa.Gaussian(float(mean), float(std), self.singa_tensor)
+            inplace: inplace flag
 
-    def uniform(self, low, high):
+        Returns:
+            this tensor
+        '''
+        if not inplace:
+            # return new tensor
+            raise NotImplementedError
+
+        singa.Gaussian(float(mean), float(std), self.data)
+        return self
+
+    def uniform(self, low, high, inplace=True):
         '''Generate a value for each element following a uniform distribution.
 
         Args:
             low (float): the lower bound
             high (float): the hight bound
-        '''
-        singa.Uniform(float(low), float(high), self.singa_tensor)
+            inplace: inplace flag
 
+        Returns:
+            this tensor
+        '''
+        if not inplace:
+            # return new tensor
+            raise NotImplementedError
+
+        singa.Uniform(float(low), float(high), self.data)
+        return self
+
+    @deprecated(reason="use broadcast instead")
     def add_column(self, v):
-        '''Add a tensor to each column of this tensor.
+        '''(DEPRECATED, use broadcast)Add a tensor to each column of this tensor.
 
         Args:
             v (Tensor): a Tensor to be added as a column to this tensor.
         '''
-        singa.AddColumn(v.singa_tensor, self.singa_tensor)
+        singa.AddColumn(v.data, self.data)
 
+    @deprecated(reason="use broadcast instead")
     def add_row(self, v):
-        '''Add a tensor to each row of this tensor.
+        '''(DEPRECATED, use broadcast)Add a tensor to each row of this tensor.
 
         Args:
             v (Tensor): a Tensor to be added as a row to this tensor.
         '''
-        singa.AddRow(v.singa_tensor, self.singa_tensor)
+        singa.AddRow(v.data, self.data)
 
+    @deprecated(reason="use broadcast instead")
     def div_column(self, v):
-        '''Divide each column of this tensor by v.
+        '''(DEPRECATED, use broadcast)Divide each column of this tensor by v.
 
         Args:
             v (Tensor): 1d tensor of the same length the column of self.
         '''
-        singa.DivColumn(v.singa_tensor, self.singa_tensor)
+        singa.DivColumn(v.data, self.data)
 
+    @deprecated(reason="use broadcast instead")
     def div_row(self, v):
-        '''Divide each row of this tensor by v.
+        '''(DEPRECATED, use broadcast)Divide each row of this tensor by v.
 
         Args:
             v (Tensor): 1d tensor of the same length the row of self.
         '''
-        singa.DivRow(v.singa_tensor, self.singa_tensor)
+        singa.DivRow(v.data, self.data)
 
+    @deprecated(reason="use broadcast instead")
     def mult_column(self, v):
-        '''Multiply each column of this tensor by v element-wisely.
+        '''(DEPRECATED, use broadcast)Multiply each column of this tensor by v element-wisely.
 
         Args:
             v (Tensor): 1d tensor of the same length the column of self.
         '''
-        singa.MultColumn(v.singa_tensor, self.singa_tensor)
+        singa.MultColumn(v.data, self.data)
 
+    @deprecated(reason="use broadcast instead")
     def mult_row(self, v):
-        '''Multiply each row of this tensor by v element-wisely.
+        '''(DEPRECATED, use broadcast)Multiply each row of this tensor by v element-wisely.
 
         Args:
             v (Tensor): 1d tensor of the same length the row of self.
         '''
-        singa.MultRow(v.singa_tensor, self.singa_tensor)
+        singa.MultRow(v.data, self.data)
 
     '''
     python operators (+=, -=, *=, /=) for singa::Tensor unary operators
@@ -336,49 +577,61 @@
         ''' inplace element-wise addition with a tensor or a float value.
 
         Args:
-            x (float or Tensor):
+            x (float or Tensor): input value
+
+        Returns:
+            this tensor
         '''
         if isinstance(x, Tensor):
-            self.singa_tensor += x.singa_tensor
+            self.data += x.data
         else:
-            self.singa_tensor += float(x)
+            self.data += float(x)
         return self
 
     def __isub__(self, x):
         ''' inplace element-wise subtraction with a tensor or a float value.
 
         Args:
-            x (float or Tensor):
+            x (float or Tensor): input value
+
+        Returns:
+            this tensor
         '''
 
         if isinstance(x, Tensor):
-            self.singa_tensor -= x.singa_tensor
+            self.data -= x.data
         else:
-            self.singa_tensor -= float(x)
+            self.data -= float(x)
         return self
 
     def __imul__(self, x):
         ''' inplace element-wise multiplication with a tensor or a float value.
 
         Args:
-            x (float or Tensor):
+            x (float or Tensor): input value
+
+        Returns:
+            this tensor
         '''
         if isinstance(x, Tensor):
-            self.singa_tensor *= x.singa_tensor
+            self.data *= x.data
         else:
-            self.singa_tensor *= float(x)
+            self.data *= float(x)
         return self
 
-    def __idiv__(self, x):
+    def __itruediv__(self, x):
         ''' inplace element-wise division by a tensor or a float value.
 
         Args:
-            x (float or Tensor):
+            x (float or Tensor): input value
+
+        Returns:
+            this tensor
         '''
         if isinstance(x, Tensor):
-            self.singa_tensor /= x.singa_tensor
+            self.data /= x.data
         else:
-            self.singa_tensor /= float(x)
+            self.data /= float(x)
         return self
 
     '''
@@ -388,63 +641,73 @@
 
     def __add__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__add__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__add__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.AddFloat,
-                                    self.singa_tensor, rhs)
+            return _call_singa_func(singa.AddFloat, self.data, rhs)
 
     def __sub__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__sub__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__sub__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.SubFloat,
-                                    self.singa_tensor, rhs)
+            return _call_singa_func(singa.SubFloat, self.data, rhs)
 
     def __mul__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__mul__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__mul__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.MultFloat,
-                                    self.singa_tensor, rhs)
+            return _call_singa_func(singa.MultFloat, self.data, rhs)
 
     def __div__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__div__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__div__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.DivFloat,
-                                    self.singa_tensor, rhs)
+            return _call_singa_func(singa.DivFloat, self.data, rhs)
+
+    def __truediv__(self, rhs):
+        if isinstance(rhs, Tensor):
+            return from_raw_tensor(singa.__div__(self.data, rhs.data))
+        else:
+            return _call_singa_func(singa.DivFloat, self.data, rhs)
+
+    def __floordiv__(self, rhs):
+        if isinstance(rhs, Tensor):
+            tmp = from_raw_tensor(singa.__div__(self.data, rhs.data))
+            return _call_singa_func(singa.Floor, tmp.data)
+        else:
+            tmp = _call_singa_func(singa.DivFloat, self.data, rhs)
+            return _call_singa_func(singa.Floor, tmp.data)
 
     def __lt__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__lt__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__lt__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.LTFloat, self.singa_tensor, rhs)
+            return _call_singa_func(singa.LTFloat, self.data, rhs)
 
     def __le__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__le__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__le__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.LEFloat, self.singa_tensor, rhs)
+            return _call_singa_func(singa.LEFloat, self.data, rhs)
 
     def __gt__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__gt__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__gt__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.GTFloat, self.singa_tensor, rhs)
+            return _call_singa_func(singa.GTFloat, self.data, rhs)
 
     def __ge__(self, rhs):
         if isinstance(rhs, Tensor):
-            return from_raw_tensor(
-                singa.__ge__(self.singa_tensor, rhs.singa_tensor))
+            return from_raw_tensor(singa.__ge__(self.data, rhs.data))
         else:
-            return _call_singa_func(singa.GEFloat, self.singa_tensor, rhs)
+            return _call_singa_func(singa.GEFloat, self.data, rhs)
+
+    def __eq__(self, rhs):
+        if isinstance(rhs, Tensor):
+            return from_raw_tensor(singa.__eq__(self.data, rhs.data))
+        elif rhs is None:
+            return False
+        else:
+            return _call_singa_func(singa.EQFloat, self.data, rhs)
 
     def __radd__(self, lhs):
         lhs = float(lhs)
@@ -474,13 +737,27 @@
         one /= self
         return one
 
+    def __rtruediv__(self, lhs):
+        lhs = float(lhs)
+        one = Tensor(self.shape, self.device, self.dtype)
+        one.set_value(lhs)
+        one /= self
+        return one
+
+    def __repr__(self):
+        return np.array2string(to_numpy(self))
+
+
+''' alias Tensor to PlaceHolder
+'''
+PlaceHolder = Tensor
 ''' python functions for global functions in Tensor.h
 '''
 
 
 def from_raw_tensor(t):
     x = Tensor(t.shape(), t.device(), t.data_type())
-    x.singa_tensor = t
+    x.data = t
     return x
 
 
@@ -491,30 +768,65 @@
     return ret
 
 
+def zeros_like(t):
+    ret = Tensor(t.shape, t.device, t.dtype)
+    ret.set_value(float(0))
+    return ret
+
+
+def ones_like(t):
+    ret = Tensor(t.shape, t.device, t.dtype)
+    ret.set_value(float(1))
+    return ret
+
+
 def product(shape):
     return reduce(lambda x, y: x * y, shape)
 
 
 def sizeof(dtype):
-    '''
+    '''Get size of datatype
+
+    Args:
+        dtype: singa datatype
+
     Returns:
         the number of bytes of the given SINGA data type defined in core.proto
     '''
     return singa.SizeOf(dtype)
 
 
-def reshape(t, s):
-    '''Reshape the input tensor with the given shape.
+def contiguous(tensor):
+    return _call_singa_func(singa.Contiguous, tensor.data)
+
+
+def reshape(tensor, shape):
+    '''Reshape the input tensor with the given shape and
+    the original tensor is not changed
 
     Args:
-        t (Tensor): the tensor to be changed
-        s (list<int>): the new shape, which should have the same volumn as the
+        tensor (Tensor): the tensor to be changed
+        shape (list<int>): the new shape, which should have the same volumn as the
             old shape.
 
     Returns:
         the new Tensor
     '''
-    return _call_singa_func(singa.Reshape, t.singa_tensor, s)
+    return _call_singa_func(singa.Reshape, tensor.data, shape)
+
+
+def transpose(t, axes=None):
+    '''To transpose the tensor
+
+    Args:
+        t: input tensor
+        axes: axes to transpose
+
+    Returns:
+        the transposed tensor
+    '''
+    ret = t.transpose(axes)
+    return ret
 
 
 def copy_data_to_from(dst, src, size, dst_offset=0, src_offset=0):
@@ -528,11 +840,10 @@
         dst_offset (int): offset in terms of elements to the start of dst
         src_offset (int): offset in terms of elements to the start of src
     '''
-    singa.CopyDataToFrom(dst.singa_tensor, src.singa_tensor, size,
-                         dst_offset, src_offset)
+    singa.CopyDataToFrom(dst.data, src.data, size, dst_offset, src_offset)
 
 
-def from_numpy(np_array):
+def from_numpy(np_array, dev=None):
     '''Create a Tensor instance with the shape, dtype and values from the numpy
     array.
 
@@ -551,18 +862,26 @@
         np_array = np_array.astype(np.int32)
 
     if np_array.dtype == np.float32:
-        dtype = core_pb2.kFloat32
+        dtype = float32
     else:
         assert np_array.dtype == np.int32, \
             'Only float and int tensors are supported'
-        dtype = core_pb2.kInt
+        dtype = int32
     ret = Tensor(np_array.shape, dtype=dtype)
     ret.copy_from_numpy(np_array)
+    if dev:
+        ret.to_device(dev)
     return ret
 
 
 def to_host(t):
     '''Copy the data to a host tensor.
+
+    Args:
+        t (Tensor): a Tensor
+
+    Returns:
+        new Tensor at host
     '''
     ret = t.clone()
     ret.to_host()
@@ -573,18 +892,18 @@
     '''Copy the tensor into a numpy array.
 
     Args:
-        t (Tensor), a Tensor
+        t (Tensor): a Tensor
 
     Returns:
         a numpy array
     '''
     th = to_host(t)
-    if th.dtype == core_pb2.kFloat32:
-        np_array = th.singa_tensor.GetFloatValue(int(th.size()))
-    elif th.dtype == core_pb2.kInt:
-        np_array = th.singa_tensor.GetIntValue(int(th.size()))
+    if th.dtype == float32:
+        np_array = th.data.GetFloatValue(int(th.size()))
+    elif th.dtype == int32:
+        np_array = th.data.GetIntValue(int(th.size()))
     else:
-        print 'Not implemented yet for ', th.dtype
+        print('Not implemented yet for ', th.dtype)
     return np_array.reshape(th.shape)
 
 
@@ -596,7 +915,7 @@
     Returns:
         a new Tensor whose element y = abs(x), x is an element of t
     '''
-    return _call_singa_func(singa.Abs, t.singa_tensor)
+    return _call_singa_func(singa.Abs, t.data)
 
 
 def exp(t):
@@ -607,7 +926,18 @@
     Returns:
         a new Tensor whose element y = exp(x), x is an element of t
     '''
-    return _call_singa_func(singa.Exp, t.singa_tensor)
+    return _call_singa_func(singa.Exp, t.data)
+
+
+def ceil(t):
+    '''
+    Args:
+        t (Tensor): input Tensor
+
+    Returns:
+        a new Tensor whose element y = ceil(x), x is an element of t
+    '''
+    return _call_singa_func(singa.Ceil, t.data)
 
 
 def log(t):
@@ -618,19 +948,7 @@
     Returns:
         a new Tensor whose element y = log(x), x is an element of t
     '''
-    return _call_singa_func(singa.Log, t.singa_tensor)
-
-
-def relu(t):
-    '''
-    Args:
-        t (Tensor): input Tensor
-
-    Returns:
-        a new Tensor whose element y = x if x >0; otherwise 0; x is an element
-        of t
-    '''
-    return _call_singa_func(singa.ReLU, t.singa_tensor)
+    return _call_singa_func(singa.Log, t.data)
 
 
 def sigmoid(t):
@@ -641,7 +959,7 @@
     Returns:
         a new Tensor whose element y = sigmoid(x); x is an element of t
     '''
-    return _call_singa_func(singa.Sigmoid, t.singa_tensor)
+    return _call_singa_func(singa.Sigmoid, t.data)
 
 
 def sign(t):
@@ -652,7 +970,7 @@
     Returns:
         a new Tensor whose element y = sign(x)
     '''
-    return _call_singa_func(singa.Sign, t.singa_tensor)
+    return _call_singa_func(singa.Sign, t.data)
 
 
 def sqrt(t):
@@ -663,7 +981,7 @@
     Returns:
         a new Tensor whose element y = sqrt(x), x is an element of t
     '''
-    return _call_singa_func(singa.Sqrt, t.singa_tensor)
+    return _call_singa_func(singa.Sqrt, t.data)
 
 
 def square(t):
@@ -674,7 +992,7 @@
     Returns:
         a new Tensor whose element y = x * x, x is an element of t
     '''
-    return _call_singa_func(singa.Square, t.singa_tensor)
+    return _call_singa_func(singa.Square, t.data)
 
 
 def tanh(t):
@@ -685,26 +1003,66 @@
     Returns:
         a new Tensor whose element y = tanh(x), x is an element of t
     '''
-    return _call_singa_func(singa.Tanh, t.singa_tensor)
+    return _call_singa_func(singa.Tanh, t.data)
 
 
-def sum(t, axis=None):
-    '''Sum elements of the input tensor long the given axis.
+def sum(t, axis=None, out=None):
+    '''Sum of tensor elements over given axis
 
     Args:
-        t (Tensor): input Tensor
-        axis (int, optional): if None, the summation is done over all elements;
-            if axis is provided, then it is calculated along the given axis,
-            e.g. 0 -- sum each column; 1 -- sum each row.
+        t: Singa.tensor
+            The array_like tensor to be sumed
+        axis: None or int or tuple of ints, optional
+            Axis or axes along which a sum is performed.
+            The default, axis=None, will sum all of the elements of the input array.
+            If axis is negative it counts from the last to the first axis.
+            If axis is a tuple of ints, a sum is performed on all of the axes specified
+            in the tuple instead of a single axis or all the axes as before.
+        out:Singa.tensor optional
+            Alternative output array in which to place the result.
+            It must have the same shape as the expected output,
+            but the type of the output values will be cast if necessary.
 
     Returns:
-        a float value as the sum of all elements, or a new Tensor
+        A tensor with the same shape as t, with the specified axis removed.
+        If a is a 0-d array, or if axis is None, a scalar is returned.
+        If an output array is specified, a reference to out is returned
     '''
 
+    t_shape = t.shape
+    t_ndim = t.ndim()
+
     if axis is None:
-        return singa.SumAsFloat(t.singa_tensor)
+        one = Tensor(t.shape, t.device)
+        one.set_value(1.0)
+        ret = tensordot(t, one, t_ndim)
+
+    if isinstance(axis, int):
+        if axis < 0:
+            axis += t_ndim
+
+        axis_shape = t_shape[axis]
+        axis_shape = int(axis_shape)
+        one = Tensor(shape=(axis_shape,), device=t.device)
+        one.set_value(1.0)
+        ret = tensordot(t, one, axes=([axis], [0]))
+
+    if isinstance(axis, tuple):
+        l_axis = list(axis)
+        axis_shape = [t_shape[x] for x in axis]
+        axisshape = tuple(axis_shape)
+        one = Tensor(axisshape, t.device)
+        one.set_value(1.0)
+        one_axis = [x for x in range(one.ndim())]
+        ret = tensordot(t, one, (l_axis, one_axis))
+
+    if out is not None:
+        if out.shape != ret.shape:
+            raise ValueError('dimensions do not match')
+        out[:] = ret
+        return out
     else:
-        return _call_singa_func(singa.Sum, t.singa_tensor, axis)
+        return ret
 
 
 def pow(t, x, out=None):
@@ -721,14 +1079,14 @@
     '''
     if out is None:
         if isinstance(x, Tensor):
-            return _call_singa_func(singa.Pow, t.singa_tensor, x.singa_tensor)
+            return _call_singa_func(singa.Pow, t.data, x.data)
         else:
-            return _call_singa_func(singa.PowFloat, t.singa_tensor, x)
+            return _call_singa_func(singa.PowFloat, t.data, x)
     else:
         if isinstance(x, Tensor):
-            singa.PowWithRet(t.singa_tensor, x.singa_tensor, out.singa_tensor)
+            singa.PowWithRet(t.data, x.data, out.data)
         else:
-            singa.PowFloatWitRet(t.singa_tensor, x, out.singa_tensor)
+            singa.PowFloatWitRet(t.data, x, out.data)
         return out
 
 
@@ -744,9 +1102,9 @@
         a float value if axis is None; otherwise, a new Tensor for the result.
     '''
     if t.ndim() > 1:
-        return _call_singa_func(singa.Average, t.singa_tensor, axis)
+        return _call_singa_func(singa.Average, t.data, axis)
     else:
-        return singa.SumAsFloat(t.singa_tensor) / t.size()
+        return singa.SumAsFloat(t.data) / t.size()
 
 
 def softmax(t, out=None):
@@ -760,9 +1118,9 @@
         the result Tensor
     '''
     if out is None:
-        return _call_singa_func(singa.SoftMax, t.singa_tensor)
+        return _call_singa_func(singa.SoftMax, t.data)
     else:
-        singa.SoftMax(t.singa_tensor, out.singa_tensor)
+        singa.SoftMax(t.data, out.data)
         return out
 
 
@@ -822,12 +1180,26 @@
     return t >= x
 
 
+def eq(t, x):
+    '''Elementi-wise comparison for t == x.
+
+    Args:
+        t (Tensor): left hand side operand
+        x (Tensor or float): right hand side operand
+
+    Returns:
+        a Tensor with each element being t[i] == x ? 1.0f:0.0f,
+        or t[i] == x[i] ? 1.0f:0.0f
+    '''
+    return t == x
+
+
 def add(lhs, rhs, ret=None):
     '''Elementi-wise addition.
 
     Args:
-        lhs (Tensor)
-        rhs (Tensor)
+        lhs (Tensor): lhs tensor
+        rhs (Tensor): rhs tensor
         ret (Tensor, optional): if not None, the result is stored in it;
             otherwise, a new Tensor would be created for the result.
 
@@ -839,9 +1211,9 @@
         return lhs + rhs
     else:
         if isinstance(rhs, Tensor):
-            singa.Add(lhs.singa_tensor, rhs.singa_tensor, ret.singa_tensor)
+            singa.Add(lhs.data, rhs.data, ret.data)
         else:
-            singa.AddFloatWithRet(lhs.singa_tensor, rhs, ret.singa_tensor)
+            singa.AddFloatWithRet(lhs.data, rhs, ret.data)
         return ret
 
 
@@ -849,8 +1221,8 @@
     '''Elementi-wise subtraction.
 
     Args:
-        lhs (Tensor)
-        rhs (Tensor)
+        lhs (Tensor): lhs tensor
+        rhs (Tensor): rhs tensor
         ret (Tensor, optional): if not None, the result is stored in it;
             otherwise, a new Tensor would be created for the result.
 
@@ -862,9 +1234,9 @@
         return lhs - rhs
     else:
         if isinstance(rhs, Tensor):
-            singa.Sub(lhs.singa_tensor, rhs.singa_tensor, ret.singa_tensor)
+            singa.Sub(lhs.data, rhs.data, ret.data)
         else:
-            singa.SubFloatWithRet(lhs.singa_tensor, rhs, ret.singa_tensor)
+            singa.SubFloatWithRet(lhs.data, rhs, ret.data)
         return ret
 
 
@@ -872,8 +1244,8 @@
     '''Elementi-wise multiplication.
 
     Args:
-        lhs (Tensor)
-        rhs (Tensor)
+        lhs (Tensor): lhs tensor
+        rhs (Tensor): rhs tensor
         ret (Tensor, optional): if not None, the result is stored in it;
             otherwise, a new Tensor would be created for the result.
 
@@ -886,45 +1258,321 @@
         return lhs * rhs
     else:
         if isinstance(rhs, Tensor):
-            singa.EltwiseMult(lhs.singa_tensor, rhs.singa_tensor,
-                              ret.singa_tensor)
+            singa.EltwiseMult(lhs.data, rhs.data, ret.data)
         else:
-            singa.EltwiseMultFloatWithRet(lhs.singa_tensor, rhs,
-                                          ret.singa_tensor)
+            singa.EltwiseMultFloatWithRet(lhs.data, rhs, ret.data)
         return ret
 
 
 def mult(A, B, C=None, alpha=1.0, beta=0.0):
     '''Do matrix-matrix or matrix-vector multiplication.
-
     This function returns C = alpha * A * B + beta * C
+    Currently below cases are supported
+        case 1 - matrix * vector:
+            A (Tensor): 2d Tensor
+            B (Tensor): 1d Tensor, GEMV would be invoked
+        case 2 - matrix * matrix:
+            A (Tensor): 2d Tensor
+            B (Tensor): 2d Tensor, GEMM would be invoked
+        case 3 - batched matrix * batched matrix:
+            A (Tensor): 3/4d Tensor
+            B (Tensor): 3/4d Tensor, batched GEMM would be invoked
+            Where first/first and second dimension(s) of A, B should be exactly the same
+            e.g. C{2,3,4,6} = A{2,3,4,5} * B{2,3,5,6}
 
     Args:
-        A (Tensor): 2d Tensor
-        B (Tensor): If B is a 1d Tensor, GEMV would be invoked for matrix-vector
-            multiplication; otherwise GEMM would be invoked.
-        C (Tensor, optional): for storing the result; If None, a new Tensor
-            would be created.
-        alpha (float)
-        beta (float)
+        A: n-d tensor
+        B: n-d tensor
+        C (Tensor, optional): for storing the result; If None, a new Tensor would be created.
+        alpha (float): scaling factor
+        beta (float): scaling factor
 
     Returns:
         the result Tensor
     '''
     if C is None:
-        return _call_singa_func(singa.Mult, A.singa_tensor, B.singa_tensor)
+        return _call_singa_func(singa.Mult, A.data, B.data)
     else:
-        singa.MultWithScale(alpha, A.singa_tensor, B.singa_tensor,
-                            beta, C.singa_tensor)
+        singa.MultWithScale(alpha, A.data, B.data, beta, C.data)
         return C
 
 
+def einsum(ops, *args):
+    ''' function TODO list to finish the function in cpp(just like numpy function):
+    1.sum(A,axis = None)
+    2.repeat(A,repeats)
+    3.transpose(A,axes = None)
+    Do the matrix to matrix einsum calculation according to the operands
+    Warning : this function could only support two matrix' einsum calcultion
+
+    Args:
+        ops(string): the string specifies the subscripts for summation such as
+            'ki,kj->kij' Here all the 26 lowercase letter can be used here.
+        args(list of array_like): These are the tensors for the operation,
+            but here only support two tensors.
+
+    Returns:
+        Singa.Tensor the output matirx of the einsum calculation
+
+    The best way to understand this function is to try the examples below:
+    A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
+    A = A_.reshape(4,3)
+    B = A_.reshape(3,4)
+
+    Here this einsum calculation is the same as normal 'mult'
+    Res = einsum('ij,jk->ik',A,B)
+
+    >>> [[ 20  23  26  29]
+         [ 56  68  80  92]
+         [ 92 113 134 155]
+         [128 158 188 218]]
+
+    A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
+    A = A_.reshape(4,3)
+    B = A_.reshape(4,3)
+
+    Here the einsum calculation is the same as normol 'eltwise_mult'
+    Res = einsum('ki,ki->ki',A,B)
+
+    >>> [[  0   1   4]
+         [  9  16  25]
+         [ 36  49  64]
+         [ 81 100 121]]
+
+    A = [0,1,2,3,4,5,6,7,8,9,10,11]
+    A = A.reshape(4,3)
+
+    Res = einsum('ki,kj->kij',A,A)
+    >>> [[[  0   0   0]
+          [  0   1   2]
+          [  0   2   4]]
+         [[  9  12  15]
+          [ 12  16  20]
+          [ 15  20  25]]
+         [[ 36  42  48]
+          [ 42  49  56]
+          [ 48  56  64]]
+         [[ 81  90  99]
+          [ 90 100 110]
+          [ 99 110 121]]]
+
+    A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
+    A = A_.reshape(3,2,2)
+
+    Res = einsum('kia,kja->kij',A,A)
+    >>> [[[  1   3]
+          [  3  13]]
+         [[ 41  59]
+          [ 59  85]]
+         [[145 179]
+          [179 221]]]
+    '''
+
+    if len(ops) == 0:
+        raise ValueError("No input operands")
+
+    if len(args) != 2:
+        raise ValueError("Currently only two operands are supported")
+    # to get the input and output ops
+    inputops, outputops = ops.split('->')
+    inputops = inputops.split(',')
+
+    # to get the two input tensor
+    A = args[0]
+    B = args[1]
+
+    if A.ndim() != len(inputops[0]) or B.ndim() != len(inputops[1]):
+        raise ValueError("input dim doesn't match operands")
+
+    # to get the indices in input but not in output
+    sums = sorted(list((set(inputops[0]) | set(inputops[1])) - set(outputops)))
+
+    # to get the indices that A and B use to broadcast to each other
+    broadcast_A = sorted(list(set(inputops[1]) - set(inputops[0])))
+    broadcast_B = sorted(list(set(inputops[0]) - set(inputops[1])))
+    # to get all the indices in input
+    outputall = sorted(list(set(inputops[0]) | set(inputops[1])))
+
+    # Map indices to axis integers
+    sums = [outputall.index(x) for x in sums]
+    broadcast_idA = [inputops[1].find(x) for x in broadcast_A]
+    broadcast_idB = [inputops[0].find(x) for x in broadcast_B]
+
+    broadcast_a = [B.shape[x] for x in broadcast_idA]
+    broadcast_b = [A.shape[x] for x in broadcast_idB]
+
+    # get the the transpose and reshape parameter used in the elementwise
+    # calculation
+    transpose_A = [(list(inputops[0]) + broadcast_A).index(x) for x in outputall
+                  ]
+    transpose_B = [(list(inputops[1]) + broadcast_B).index(x) for x in outputall
+                  ]
+
+    reshape_A = list(A.shape) + broadcast_a
+    reshape_B = list(B.shape) + broadcast_b
+
+    if len(broadcast_a) == 0:
+        broadcast_a = [1]
+    if len(broadcast_b) == 0:
+        broadcast_b = [1]
+    mult_A = repeat(A, product(broadcast_a))
+    mult_A = mult_A.reshape(reshape_A)
+    mult_A = transpose(mult_A, transpose_A)
+    mult_B = repeat(B, product(broadcast_b))
+    mult_B = mult_B.reshape(reshape_B)
+    mult_B = transpose(mult_B, transpose_B)
+
+    if mult_A.shape != mult_B.shape:
+        raise ValueError("Error: matrix dimension mismatch")
+    res = eltwise_mult(mult_A, mult_B)
+    sum_R = sorted(sums, reverse=True)
+    for i in sum_R:
+        res = sum(res, axis=i)
+    transpose_res = [sorted(list(outputops)).index(x) for x in list(outputops)]
+    res = transpose(res, transpose_res)
+
+    return res
+
+
+def repeat(t, repeats, axis=None):
+    '''Return the repeated tensor
+
+    Args:
+        t(tensor): the tensor to be repeated
+        repeats(int or a sequence): the number that the tensor need to repeat for
+        axis (int):the axis to do repeat
+                    If it is None, then the repeated tensor will be flattened.If it isn't None,
+                    the repeats could be sequence, but it's size should match the axis's shape
+
+    Returns:
+        the tensor which has been repeated
+    '''
+    ret = t.repeat(repeats, axis)
+    return ret
+
+
+def tensordot(A, B, axes=2):
+    """Returns the tensor multiplication of two tensors along specified axes.
+
+    This is equivalent to compute dot product along the specified axes which
+    are treated as one axis by reshaping.
+
+    Args:
+        A: Singa.Tensor
+        B: Singa.Tensor
+        axes:
+            - If it is an integer, then ''axes'' represent axes at the last of ''a`'' and
+              the first of ''b'' are used.
+            - If it is a pair of sequences of integers, then these two
+              sequences specify the list of axes for ''a'' and ''b''. The
+              corresponding axes are paired for sum-product.
+
+    Returns:
+        singa.tensor: The tensor  product of ''A'' and ''B'' along the
+        axes specified by ''axes''.
+
+    Thanks to numpy.tensordot.
+    the link is https://github.com/numpy/numpy/blob/v1.14.0/numpy/core/numeric.py#L1123-L1306
+    """
+    # when axes is an integer, axes_A and axes_B represent axes at the last of ''A'' and
+    # the first of ''B''. For example, when axes is 1, we do the normal multiplication :
+    # if A is in shape(3,2,4), B is in shape(4,2,5), it will return a matrix in shape(3,2,2,5)
+    # when axes is 2 and A,B are shape (3,2,4) and (2,4,5), it will return a
+    # matrix in shape(3,5)
+
+    if type(axes) == int:
+        axes_A = list(range(-axes, 0))
+        axes_B = list(range(0, axes))
+    else:
+        axes_A, axes_B = axes
+    # when axes is a pair of sequences of integers.For example, A is in shape(3,2,4),
+    # B is in shape(4,2,5), we set axes as ([1,2],[1,0]), it will return a
+    # matrix in shape(3,5)
+    if isinstance(axes_A, list):
+        na = len(axes_A)
+        axes_A = list(axes_A)
+    else:
+        axes_A = [axes_A]
+        na = 1
+    if isinstance(axes_B, list):
+        nb = len(axes_B)
+        axes_B = list(axes_B)
+    else:
+        axes_B = [axes_B]
+        nb = 1
+
+    # a_shape and b_shape are the shape of tensor A and B, while nda and ndb
+    # are the dim of A and B
+    a_shape = A.shape
+    nda = A.ndim()
+    b_shape = B.shape
+    ndb = B.ndim()
+    equal = True
+    # to check if the length of axe_A is equal to axes_B
+    if na != nb:
+        equal = False
+    else:
+        # to make the shape match
+        for k in range(na):
+            if a_shape[axes_A[k]] != b_shape[axes_B[k]]:
+                equal = False
+                break
+            if axes_A[k] < 0:
+                axes_A[k] += nda
+            if axes_B[k] < 0:
+                axes_B[k] += ndb
+    if not equal:
+        raise ValueError("shape-mismatch for sum")
+    '''start to do the calculation according to the axes'''
+
+    notin = [k for k in range(nda) if k not in axes_A]
+    # nda is the dim of A, and axes_a is the axis for A, notin is the axis
+    # which is not in axes_A
+    newaxes_a = notin + axes_A
+    N2 = 1
+    for axis in axes_A:
+        N2 *= a_shape[axis]
+    N1 = 1
+    for ax in notin:
+        N1 *= a_shape[ax]
+    # newshape_a is the shape to do multiplication.For example, A is in shape(3,2,4),
+    # B is in shape(4,2,5), we set axes as ([1,2],[1,0]), then newshape_a should be (3,5)
+    # olda is the shape that will be shown in the result.
+    newshape_a = (N1, N2)
+    olda = [a_shape[axis] for axis in notin]
+    notin = [k for k in range(ndb) if k not in axes_B]
+    newaxes_b = axes_B + notin
+    N2 = 1
+    for axis in axes_B:
+        N2 *= b_shape[axis]
+    N1 = 1
+    for bx in notin:
+        N1 *= b_shape[bx]
+    newshape_b = (N2, N1)
+    oldb = [b_shape[axis] for axis in notin]
+
+    A = transpose(A, newaxes_a)
+    B = transpose(B, newaxes_b)
+    at = reshape(A, newshape_a)
+    bt = reshape(B, newshape_b)
+
+    res = mult(at, bt)
+    if len(olda + oldb) == 0:
+        olda = [1]
+        oldb = [1]
+        res = res.reshape(tuple(olda + oldb))
+    else:
+        res = res.reshape(tuple(olda + oldb))
+
+    return res
+
+
 def div(lhs, rhs, ret=None):
     '''Elementi-wise division.
 
     Args:
-        lhs (Tensor)
-        rhs (Tensor)
+        lhs (Tensor): lhs tensor
+        rhs (Tensor): rhs tensor
         ret (Tensor, optional): if not None, the result is stored in it;
             otherwise, a new Tensor would be created for the result.
 
@@ -936,9 +1584,9 @@
         return lhs / rhs
     else:
         if isinstance(rhs, Tensor):
-            singa.Div(lhs.singa_tensor, rhs.singa_tensor, ret.singa_tensor)
+            singa.Div(lhs.data, rhs.data, ret.data)
         else:
-            singa.DivFloatWithRet(lhs.singa_tensor, rhs, ret.singa_tensor)
+            singa.DivFloatWithRet(lhs.data, rhs, ret.data)
         return ret
 
 
@@ -946,14 +1594,14 @@
     '''Element-wise operation for y += alpha * x.
 
     Args:
-        alpha (float)
-        x (Tensor)
-        y (Tensor)
+        alpha (float): scaling factor
+        x (Tensor): a tensor
+        y (Tensor): a tensor
 
     Returns:
         y
     '''
-    singa.Axpy(float(alpha), x.singa_tensor, y.singa_tensor)
+    singa.Axpy(float(alpha), x.data, y.data)
     return y
 
 
@@ -967,7 +1615,7 @@
     Returns:
         t
     '''
-    singa.Bernoulli(float(p), t.singa_tensor)
+    singa.Bernoulli(float(p), t.data)
     return t
 
 
@@ -982,7 +1630,7 @@
     Returns:
         t
     '''
-    singa.Gaussian(float(mean), float(std), t.singa_tensor)
+    singa.Gaussian(float(mean), float(std), t.data)
     return t
 
 
@@ -991,13 +1639,13 @@
 
     Args:
         low (float): the lower bound
-        hight (float): the higher bound
+        high (float): the higher bound
         t (Tensor): the results are put into t
 
     Returns:
         t
     '''
-    singa.Uniform(float(low), float(high), t.singa_tensor)
+    singa.Uniform(float(low), float(high), t.data)
     return t
 
 
@@ -1007,15 +1655,15 @@
     Denote each column of M as m, m = alpha * v + beta * m
 
     Args:
-        alpha (float)
-        v (Tensor)
-        beta (float)
+        alpha (float): scalar factor
+        v (Tensor): a tensor
+        beta (float): scalar factor
         M (Tensor): 2d tensor
+
     Returns:
-        M
+        Resulted tensor M
     '''
-    singa.AddColumnWithScale(float(alpha), float(beta), v.singa_tensor,
-                             M.singa_tensor)
+    singa.AddColumnWithScale(float(alpha), float(beta), v.data, M.data)
     return M
 
 
@@ -1025,14 +1673,15 @@
     Denote each row of M as m, m = alpha * v + beta * m
 
     Args:
-        alpha (float)
-        v (Tensor)
-        beta (float)
+        alpha (float): scaling factor
+        v (Tensor): a tensor
+        beta (float): scaling factor
         M (Tensor): 2d tensor
+
     Returns:
-        M
+        Resulted tensor M
     '''
-    singa.AddRowWithScale(alpha, beta, v.singa_tensor, M.singa_tensor)
+    singa.AddRowWithScale(alpha, beta, v.data, M.data)
     return M
 
 
@@ -1046,8 +1695,8 @@
         a new Tensor as the resulted column.
     '''
     assert M.ndim() == 2, 'M.nDim() is supposed to be 2'
-    ret = Tensor((M.shape[0], 1), M.singa_tensor.device())
-    singa.SumColumns(M.singa_tensor, ret.singa_tensor)
+    ret = Tensor((M.shape[0], 1), M.data.device())
+    singa.SumColumns(M.data, ret.data)
     return ret
 
 
@@ -1061,8 +1710,8 @@
         a new Tensor as the resulted row.
     '''
     assert M.ndim() == 2, 'M.nDim() is supposed to be 2'
-    ret = Tensor((1, M.shape[1]), M.singa_tensor.device())
-    singa.SumRows(M.singa_tensor, ret.singa_tensor)
+    ret = Tensor((1, M.shape[1]), M.data.device())
+    singa.SumRows(M.data, ret.data)
     return ret
 
 
@@ -1074,10 +1723,82 @@
     ''' this function calls singa global functions that returns Tensor
         and create new python Tensor instance
         e.g., Tensor [singa_func](args...)
+
+    Args:
+        _singa_func: singa CPP API
+        args: args for singa CPP API
+
+    Returns:
+        new singa tensor
     '''
     new_t = Tensor()
-    new_t.singa_tensor = _singa_func(*args)
-    new_t.shape = tuple(new_t.singa_tensor.shape())
-    new_t.device = new_t.singa_tensor.device()
-    new_t.dtype = new_t.singa_tensor.data_type()
+    new_t.data = _singa_func(*args)
+    new_t.shape = tuple(new_t.data.shape())
+    new_t.device = new_t.data.device()
+    new_t.dtype = new_t.data.data_type()
     return new_t
+
+
+def copy_from_numpy(data, np_array):
+    ''' Copy the data from the numpy array.
+        used as static method
+
+    Args:
+        data: singa ctensor
+        np_array: source numpy array
+    '''
+    assert np_array.size == data.Size(), \
+        'tensor shape should be the same'
+    if not np_array.ndim == 1:
+        np_array = np_array.flatten()
+    dt = np_array.dtype
+    if dt == np.float32:
+        data.CopyFloatDataFromHostPtr(np_array)
+    elif dt == np.int or dt == np.int32:
+        data.CopyIntDataFromHostPtr(np_array)
+    else:
+        print('Not implemented yet for ', dt)
+
+
+def concatenate(tensors, axis):
+    '''concatenate list of tensors together based on given axis
+
+    Args:
+        tensors: list of tensors.
+        axis: number of axis to cancatenate on, all the dim should be the same
+            except the axis to be concatenated.
+
+    Returns:
+        new tensor concatenated
+    '''
+    ctensors = singa.VecTensor()
+    for t in tensors:
+        ctensors.append(t.data)
+    return _call_singa_func(singa.ConcatOn, ctensors, axis)
+
+
+def random(shape, device=get_default_device()):
+    ''' return a random tensor with given shape
+
+    Args:
+        shape: shape of generated tensor
+        device: device of generated tensor, default is cpu
+
+    Returns:
+        new tensor generated
+    '''
+    ret = Tensor(shape, device=device)
+    ret.uniform(0, 1)
+    return ret
+
+
+def zeros(shape, device=get_default_device()):
+    ret = Tensor(shape, device=device)
+    ret.set_value(0.0)
+    return ret
+
+
+def ones(shape, device=get_default_device()):
+    ret = Tensor(shape, device=device)
+    ret.set_value(1.0)
+    return ret
diff --git a/python/singa/utils.py b/python/singa/utils.py
index c446984..7dcf8fb 100644
--- a/python/singa/utils.py
+++ b/python/singa/utils.py
@@ -16,6 +16,13 @@
 # =============================================================================
 
 import sys
+import numpy as np
+import collections
+
+from . import singa_wrap as singa
+
+OrderedDict = collections.OrderedDict
+
 
 def update_progress(progress, info):
     """Display progress bar and user info.
@@ -38,10 +45,223 @@
         progress = 1
         status = "Done. "
     status = status + info
-    block = int(round(barLength*progress))
-    text = "[{0}] {1:3.1f}% {2}".format("."*block + " "*(barLength-block),
-                                        progress*100, status)
+    block = int(round(barLength * progress))
+    text = "[{0}] {1:3.1f}% {2}".format("." * block + " " * (barLength - block),
+                                        progress * 100, status)
     sys.stdout.write(text)
-    sys.stdout.write('\b'*(9 + barLength + len(status)))
+    sys.stdout.write('\b' * (9 + barLength + len(status)))
     sys.stdout.flush()
 
+
+def handle_odd_pad_fwd(x, odd_padding, is_pool=False):
+    """
+    handle odd padding mode forward
+    Args:
+        x, the input tensor
+        odd_padding, the odd_padding
+    Returns: 
+        tensor, the output
+    """
+    # (axis, left padding if True else right padding)
+    flags = [(2, True), (2, False), (3, True), (3, False)]
+    for (axis, left), pad in zip(flags, odd_padding):
+        if pad == 0:
+            continue
+        if is_pool:
+            if left:
+                padding = singa.SliceOn(x, 0, pad, axis)
+            else:
+                axis_shape = list(x.shape())[axis]
+                padding = singa.SliceOn(x, axis_shape - pad, axis_shape, axis)
+        else:
+            pad_shape = list(x.shape())
+            pad_shape[axis] = pad
+            padding = singa.Tensor(list(pad_shape), x.device())
+            padding.SetFloatValue(0.)
+        if left:
+            x = singa.ConcatOn(singa.VecTensor([padding, x]), axis)
+        else:
+            x = singa.ConcatOn(singa.VecTensor([x, padding]), axis)
+    return x
+
+
+def handle_odd_pad_bwd(dx, odd_padding):
+    """
+    handle odd padding mode backward
+    Args:
+        dx, the backward tensor
+        odd_padding, the odd_padding
+    Returns: 
+        tensor, the output
+    """
+    # (axis, left padding if True else right padding)
+    flags = [(2, True), (2, False), (3, True), (3, False)]
+    for (axis, left), pad in zip(flags, odd_padding):
+        if pad == 0:
+            continue
+        axis_shape = list(dx.shape())[axis]
+        if left:
+            dx = singa.SliceOn(dx, pad, axis_shape, axis)
+        else:
+            dx = singa.SliceOn(dx, 0, axis_shape - pad, axis)
+    return dx
+
+
+def same_pad_shape_check(handle, pad_mode, x):
+    """
+    check the shape is correct for same padding mode
+    Args:
+        handle, the handle
+        pad_mode, pad_mode
+        x: input tensor
+    Returns: 
+        tuple, the correct padding(before divide 2)
+    """
+    _kernel = [handle.kernel_h, handle.kernel_w]
+    _stride = [handle.stride_h, handle.stride_w]
+    _padding = [handle.pad_h, handle.pad_w]
+    _padding_correct = get_padding_shape(pad_mode,
+                                         x.shape()[2:], _kernel, _stride)
+    _padding_crop, _ = [x // 2 for x in _padding_correct]
+    assert _padding == _padding_crop, (
+        'For a same mode, the given padding %s is wrong, the correct one should be %s.'
+        % (_padding, _padding_crop))
+    return _padding_correct
+
+
+def re_new_handle(handle, x, is_pool=False):
+    """
+    re-new a handle by useing the new input tensor
+    Args:
+        handle, the handle
+        x, input tensor
+    Returns: 
+        handle, a new handle
+    """
+    kernel_size = [handle.kernel_h, handle.kernel_w]
+    stride = [handle.stride_h, handle.stride_w]
+    padding = [handle.pad_h, handle.pad_w]
+    if is_pool:
+        params = (x, kernel_size, stride, padding, handle.is_max_pooling)
+    else:
+        params = (x, kernel_size, stride, padding, handle.channels,
+                  handle.num_filters, handle.bias_term, handle.group)
+    if (type(handle) == singa.ConvHandle or
+            type(handle) == singa.PoolingHandle):
+        handle = singa.PoolingHandle(*params) if is_pool else singa.ConvHandle(
+            *params)
+    else:
+        handle = singa.CudnnPoolingHandle(
+            *params) if is_pool else singa.CudnnConvHandle(*params)
+    return handle
+
+
+def get_padding_shape(pad_mode, input_spatial_shape, kernel_spatial_shape,
+                      strides_spatial):
+    """
+    return padding shape of conv2d or pooling,
+    Args:
+        pad_mode: string
+        kernel_spatial_shape: list[int]
+        strides_spatial: list[int]
+    Returns: 
+        list[int]
+    """
+    output_spatial_shape = get_output_shape(pad_mode, input_spatial_shape,
+                                            kernel_spatial_shape,
+                                            strides_spatial)
+    pad_shape = [0] * len(input_spatial_shape) * 2  # 2 means left and right
+    # the odd paddding is the value that cannot be handled by the tuple padding (w, h) mode
+    # so we need to firstly handle the input, then use the nomal padding method.
+    odd_padd_shape = [0] * len(input_spatial_shape) * 2
+    for i in range(len(input_spatial_shape)):
+        whole_pad = (output_spatial_shape[i] - 1) * strides_spatial[i] + \
+            kernel_spatial_shape[i] - input_spatial_shape[i]
+        pad_shape[2 * i] = pad_shape[2 * i + 1] = whole_pad // 2
+        if whole_pad % 2 != 0:
+            if pad_mode == "SAME_UPPER":
+                odd_padd_shape[2 * i + 1] += 1
+            else:
+                odd_padd_shape[2 * i] += 1
+    return pad_shape, odd_padd_shape
+
+
+def get_output_shape(auto_pad, input_spatial_shape, kernel_spatial_shape,
+                     strides_spatial):
+    """
+    return output shape of conv2d or pooling,
+    ! borrow from onnx
+    Args:
+        auto_pad: string
+        input_spatial_shape: list[int]
+        kernel_spatial_shape: list[int]
+        strides_spatial: list[int]
+        output_spatial_shape: list[int]
+    Returns: 
+        list[int
+    """
+    out_shape = [0] * len(input_spatial_shape)
+    if auto_pad in ('SAME_UPPER', 'SAME_LOWER'):
+        for i in range(len(input_spatial_shape)):
+            out_shape[i] = int(
+                np.ceil(
+                    float(input_spatial_shape[i]) / float(strides_spatial[i])))
+    elif auto_pad == 'VALID':
+        for i in range(len(input_spatial_shape)):
+            out_shape[i] = int(
+                np.ceil(
+                    float(input_spatial_shape[i] -
+                          (kernel_spatial_shape[i] - 1)) /
+                    float(strides_spatial[i])))
+    return out_shape
+
+
+def force_unicode(s):
+    """
+    return string of a bytes
+    ! borrow from onnx
+    Args:
+        s: string or bytes
+    Returns: 
+        string
+    """
+    try:
+        return s.decode('utf-8')
+    except AttributeError:
+        return s
+
+
+def post_order_recursive(root, root_t):
+    """
+    return a list by the topological ordering (postorder of Depth-first search)
+    Args:
+        root: singa operator
+        root_t: tensor
+    Returns: 
+        deque[int]
+    """
+
+    def recursive(root, yid, root_t):
+        if root:
+            # srcop: operator for a input of root
+            # yid: id(output of this operator)
+            # y: output of this operator
+            for srcop, yid, y, _ in root.src:
+                recursive(srcop, yid, y)
+
+            if type(root).__name__ == 'Dummy':
+                if root_t != None:
+                    # constant within a node: weight
+                    weights[root.name] = root_t
+                else:
+                    # constant outside a node: input
+                    inputs[root.name] = root_t
+            else:
+                nodes[root.name] = root
+
+    nodes = OrderedDict()
+    weights = OrderedDict()
+    inputs = OrderedDict()
+
+    recursive(root, None, root_t)
+    return nodes, weights, inputs
diff --git a/rafiki b/rafiki
new file mode 160000
index 0000000..b027c58
--- /dev/null
+++ b/rafiki
@@ -0,0 +1 @@
+Subproject commit b027c588f27ed4e801e8e300785b0eca230b5167
diff --git a/rat-excludes b/rat-excludes
deleted file mode 100644
index 9dd92e6..0000000
--- a/rat-excludes
+++ /dev/null
@@ -1,13 +0,0 @@
-rat-excludes
-Doxyfile
-Makefile.*
-configure
-.gitignore
-doc/*
-config/*
-\.dirstamp
-config.*
-stamp-h1
-.*\.conf
-.*\.md
-control
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..f7a3f19
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,440 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+'''Script for building wheel package for installing singa via pip.
+
+This script must be launched at the root dir of the singa project 
+inside the docker container created via tool/docker/devel/centos/cudaxx/Dockerfile.manylinux2014.
+
+    # launch docker container
+    $ nvidia-docker run -v <local singa dir>:/root/singa -it apache/singa:manylinux2014-cuda10.2
+    # build the wheel packag; replace cp36-cp36m to compile singa for other py version
+    $ /opt/python/cp36-cp36m/bin/python setup.py bdist_wheel
+    $ /opt/python/cp37-cp37m/bin/python setup.py bdist_wheel
+    $ /opt/python/cp38-cp38/bin/python setup.py bdist_wheel
+
+The generted wheel file should be repaired by the auditwheel tool to make it 
+compatible with PEP513. Otherwise, the dependent libs will not be included in
+the wheel package and the wheel file will be rejected by PYPI website during
+uploading due to file name error.
+
+    # repair the wheel pakage and upload to pypi
+    $ /opt/python/cp36-cp36m/bin/python setup.py audit
+
+For the Dockerfile with CUDA and CUDNN installed, the CUDA version and 
+CUDNN version are exported as environment variable: CUDA_VERSION, CUDNN_VERSION.
+You can control the script to build CUDA enabled singa package by exporting
+SINGA_CUDA=ON; otherwise the CPU only package will be built.
+
+
+Ref: 
+[1] https://github.com/bytedance/byteps/blob/master/setup.py
+[2] https://setuptools.readthedocs.io/en/latest/setuptools.html
+[3] https://packaging.python.org/tutorials/packaging-projects/ 
+'''
+
+from setuptools import find_packages, setup, Command, Extension
+from setuptools.command.build_ext import build_ext
+from distutils.errors import CompileError, DistutilsSetupError
+
+import os
+import io
+import sys
+import subprocess
+import shutil
+import shlex
+from pathlib import Path
+
+import numpy as np
+
+NAME = 'singa'
+'''
+Pypi does not allow you to overwrite the uploaded package;
+therefore, you have to bump the version.
+Pypi does not allow [local version label](https://www.python.org/dev/peps/pep-0440/#local-version-segments) 
+to appear in the version, therefore, you have to include the public 
+version label only. Currently, due to the pypi size limit, the package 
+uploaded to pypi is cpu only (without cuda and cudnn), which can be installed via
+    
+    $ pip install singa
+    $ pip install singa=3.0.0.dev1
+
+The cuda and cudnn enabled package's version consists of the public 
+version label + local version label, e.g., 3.0.0.dev1+cuda10.2, which
+can be installed via
+
+    $ pip install singa=3.0.0.dev1+cuda10.2 -f <url of the repo>
+
+'''
+from datetime import date
+
+# stable version
+VERSION = '3.1.0.rc1'
+# get the git hash
+# git_hash = subprocess.check_output(["git", "describe"]).strip().split('-')[-1][1:]
+# comment the next line to build wheel for stable version
+# VERSION += '.dev' + date.today().strftime('%y%m%d')
+
+SINGA_PY = Path('python')
+SINGA_SRC = Path('src')
+SINGA_HDR = Path('include')
+
+
+class AuditCommand(Command):
+    """Support setup.py upload."""
+
+    description = 'Repair the package via auditwheel tool.'
+    user_options = []
+
+    @staticmethod
+    def status(s):
+        """Prints things in bold."""
+        print('\033[1m{0}\033[0m'.format(s))
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        self.status('Removing previous wheel files under wheelhouse')
+        shutil.rmtree('wheelhouse', ignore_errors=True)
+        for wheel in os.listdir('dist'):
+            self.status('Repair the dist/{} via auditwheel'.format(wheel))
+            os.system('auditwheel repair dist/{}'.format(wheel))
+
+        # self.status('Uploading the package to PyPI via Twine…')
+        # os.system('{} -m twine upload dist/*'.format(sys.executable))
+        sys.exit()
+
+
+def parse_compile_options():
+    '''Read the environment variables to parse the compile options.
+
+    Returns:
+        a tuple of bool values as the indicators
+    '''
+    with_cuda = os.environ.get('SINGA_CUDA', False)
+    with_nccl = os.environ.get('SINGA_NCCL', False)
+    with_test = os.environ.get('SINGA_TEST', False)
+    with_debug = os.environ.get('SINGA_DEBUG', False)
+
+    return with_cuda, with_nccl, with_test, with_debug
+
+
+def generate_singa_config(with_cuda, with_nccl):
+    '''Generate singa_config.h file to define some macros for the cpp code.
+
+    Args:
+        with_cuda(bool): indicator for cudnn and cuda lib
+        with_nccl(bool): indicator for nccl lib
+    '''
+    config = ['#define USE_CBLAS', '#define USE_GLOG', '#define USE_DNNL']
+    if not with_cuda:
+        config.append('#define CPU_ONLY')
+    else:
+        config.append('#define USE_CUDA')
+        config.append('#define USE_CUDNN')
+
+    if with_nccl:
+        config.append('#define ENABLE_DIST')
+        config.append('#define USE_DIST')
+
+    # singa_config.h to be included by cpp code
+    cpp_conf_path = SINGA_HDR / 'singa/singa_config.h'
+    print('Writing configs to {}'.format(cpp_conf_path))
+    with cpp_conf_path.open('w') as fd:
+        for line in config:
+            fd.write(line + '\n')
+        versions = [int(x) for x in VERSION.split('+')[0].split('.')[:3]]
+        fd.write('#define SINGA_MAJOR_VERSION {}\n'.format(versions[0]))
+        fd.write('#define SINGA_MINOR_VERSION {}\n'.format(versions[1]))
+        fd.write('#define SINGA_PATCH_VERSION {}\n'.format(versions[2]))
+        fd.write('#define SINGA_VERSION "{}"\n'.format(VERSION))
+
+    # config.i to be included by swig files
+    swig_conf_path = SINGA_SRC / 'api/config.i'
+    with swig_conf_path.open('w') as fd:
+        for line in config:
+            fd.write(line + ' 1 \n')
+
+        fd.write('#define USE_PYTHON 1\n')
+        if not with_nccl:
+            fd.write('#define USE_DIST 0\n')
+        if not with_cuda:
+            fd.write('#define USE_CUDA 0\n')
+            fd.write('#define USE_CUDNN 0\n')
+        else:
+            fd.write('#define CUDNN_VERSION "{}"\n'.format(
+                os.environ.get('CUDNN_VERSION')))
+        versions = [int(x) for x in VERSION.split('+')[0].split('.')[:3]]
+        fd.write('#define SINGA_MAJOR_VERSION {}\n'.format(versions[0]))
+        fd.write('#define SINGA_MINOR_VERSION {}\n'.format(versions[1]))
+        fd.write('#define SINGA_PATCH_VERSION {}\n'.format(versions[2]))
+        fd.write('#define SINGA_VERSION "{}"\n'.format(VERSION))
+
+
+def get_cpp_flags():
+    default_flags = ['-std=c++11', '-fPIC', '-g', '-O2', '-Wall', '-pthread']
+    # avx_flags = [ '-mavx'] #'-mf16c',
+    if sys.platform == 'darwin':
+        # Darwin most likely will have Clang, which has libc++.
+        return default_flags + ['-stdlib=libc++']
+    else:
+        return default_flags
+
+
+def generate_proto_files():
+    print('----------------------')
+    print('Generating proto files')
+    print('----------------------')
+    proto_src = SINGA_SRC / 'proto'
+    cmd = "/usr/bin/protoc --proto_path={} --cpp_out={} {}".format(
+        proto_src, proto_src, proto_src / 'core.proto')
+    subprocess.run(cmd, shell=True, check=True)
+
+    proto_hdr_dir = SINGA_HDR / 'singa/proto'
+    proto_hdr_file = proto_hdr_dir / 'core.pb.h'
+    if proto_hdr_dir.exists():
+        if proto_hdr_file.exists():
+            proto_hdr_file.unlink()
+    else:
+        proto_hdr_dir.mkdir()
+
+    shutil.copyfile(Path(proto_src / 'core.pb.h'), proto_hdr_file)
+    return proto_hdr_file, proto_src / 'core.pb.cc'
+
+
+def path_to_str(path_list):
+    return [str(x) if not isinstance(x, str) else x for x in path_list]
+
+
+def prepare_extension_options():
+    with_cuda, with_nccl, with_test, with_debug = parse_compile_options()
+
+    generate_singa_config(with_cuda, with_nccl)
+    generate_proto_files()
+
+    link_libs = ['glog', 'protobuf', 'openblas', 'dnnl']
+
+    sources = path_to_str([
+        *list((SINGA_SRC / 'core').rglob('*.cc')), *list(
+            (SINGA_SRC / 'model/operation').glob('*.cc')), *list(
+                (SINGA_SRC / 'utils').glob('*.cc')),
+        SINGA_SRC / 'proto/core.pb.cc', SINGA_SRC / 'api/singa.i'
+    ])
+    include_dirs = path_to_str([
+        SINGA_HDR, SINGA_HDR / 'singa/proto',
+        np.get_include(), '/usr/include', '/usr/include/openblas',
+        '/usr/local/include'
+    ])
+
+    try:
+        np_include = np.get_include()
+    except AttributeError:
+        np_include = np.get_numpy_include()
+    include_dirs.append(np_include)
+
+    library_dirs = []  # path_to_str(['/usr/lib64', '/usr/local/lib'])
+
+    if with_cuda:
+        link_libs.extend(['cudart', 'cudnn', 'curand', 'cublas', 'cnmem'])
+        include_dirs.append('/usr/local/cuda/include')
+        library_dirs.append('/usr/local/cuda/lib64')
+        sources.append(str(SINGA_SRC / 'core/tensor/math_kernel.cu'))
+        if with_nccl:
+            link_libs.extend(['nccl', 'cusparse', 'mpicxx', 'mpi'])
+            sources.append(str(SINGA_SRC / 'io/communicator.cc'))
+    # print(link_libs, extra_libs)
+
+    libraries = link_libs
+    runtime_library_dirs = ['.'] + library_dirs
+    extra_compile_args = {'gcc': get_cpp_flags()}
+
+    if with_cuda:
+        cuda9_gencode = (' -gencode arch=compute_35,code=sm_35'
+                         ' -gencode arch=compute_50,code=sm_50'
+                         ' -gencode arch=compute_60,code=sm_60'
+                         ' -gencode arch=compute_70,code=sm_70')
+        cuda10_gencode = ' -gencode arch=compute_75,code=sm_75'
+        cuda11_gencode = ' -gencode arch=compute_80,code=sm_80'
+        cuda9_ptx = ' -gencode arch=compute_70,code=compute_70'
+        cuda10_ptx = ' -gencode arch=compute_75,code=compute_75'
+        cuda11_ptx = ' -gencode arch=compute_80,code=compute_80'
+        if cuda_major >= 11:
+            gencode = cuda9_gencode + cuda10_gencode + cuda11_gencode + cuda11_ptx
+        elif cuda_major >= 10:
+            gencode = cuda9_gencode + cuda10_gencode + cuda10_ptx
+        elif cuda_major >= 9:
+            gencode = cuda9_gencode + cuda9_ptx
+        else:
+            raise CompileError(
+                'CUDA version must be >=9.0, the current version is {}'.format(
+                    cuda_major))
+
+        extra_compile_args['nvcc'] = shlex.split(gencode) + [
+            '-Xcompiler', '-fPIC'
+        ]
+    options = {
+        'sources': sources,
+        'include_dirs': include_dirs,
+        'library_dirs': library_dirs,
+        'libraries': libraries,
+        'runtime_library_dirs': runtime_library_dirs,
+        'extra_compile_args': extra_compile_args
+    }
+
+    return options
+
+
+# credit: https://github.com/rmcgibbo/npcuda-example/blob/master/cython/setup.py#L55
+def customize_compiler_for_nvcc(self):
+    """Inject deep into distutils to customize how the dispatch
+    to gcc/nvcc works.
+    If you subclass UnixCCompiler, it's not trivial to get your subclass
+    injected in, and still have the right customizations (i.e.
+    distutils.sysconfig.customize_compiler) run on it. So instead of going
+    the OO route, I have this. Note, it's kindof like a wierd functional
+    subclassing going on.
+    """
+
+    # Tell the compiler it can processes .cu
+    self.src_extensions.append('.cu')
+
+    # Save references to the default compiler_so and _comple methods
+    default_compiler_so = self.compiler_so
+    super = self._compile
+
+    # Now redefine the _compile method. This gets executed for each
+    # object but distutils doesn't have the ability to change compilers
+    # based on source extension: we add it.
+    def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
+        if os.path.splitext(src)[1] == '.cu':
+            # use the cuda for .cu files
+            self.set_executable('compiler_so', 'nvcc')
+            # use only a subset of the extra_postargs, which are 1-1
+            # translated from the extra_compile_args in the Extension class
+            postargs = extra_postargs['nvcc']
+        else:
+            postargs = extra_postargs['gcc']
+
+        super(obj, src, ext, cc_args, postargs, pp_opts)
+        # Reset the default compiler_so, which we might have changed for cuda
+        self.compiler_so = default_compiler_so
+
+    # Inject our redefined _compile method into the class
+    self._compile = _compile
+
+
+class custom_build_ext(build_ext):
+    '''Customize the process for building the extension by chaning 
+    the options for compiling swig files and cu files.
+
+    Ref: https://github.com/python/cpython/blob/master/Lib/distutils/command/build_ext.py
+    '''
+
+    def finalize_options(self):
+        self.swig_cpp = True
+        print('build temp', self.build_temp)
+        print('build lib', self.build_lib)
+        super(custom_build_ext, self).finalize_options()
+        self.swig_opts = '-py3 -outdir {}/singa/'.format(self.build_lib).split()
+        print('build temp', self.build_temp)
+        print('build lib', self.build_lib)
+
+    def build_extensions(self):
+        options = prepare_extension_options()
+        for key, val in options.items():
+            singa_wrap.__dict__[key] = val
+        customize_compiler_for_nvcc(self.compiler)
+        build_ext.build_extensions(self)
+
+
+try:
+    with io.open('README.md', encoding='utf-8') as f:
+        long_description = '\n' + f.read()
+except OSError:
+    long_description = ''
+
+classifiers = [
+    # Trove classifiers
+    # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
+    'License :: OSI Approved :: Apache Software License',
+    'Development Status :: 3 - Alpha',
+    'Intended Audience :: Developers',
+    'Programming Language :: Python :: 3.6',
+    'Programming Language :: Python :: 3.7',
+    'Programming Language :: Python :: 3.8',
+    'Topic :: Scientific/Engineering :: Artificial Intelligence'
+]
+if sys.platform == 'darwin':
+    classifiers.append('Operating System :: MacOS :: MacOS X')
+elif sys.platform == 'linux':
+    'Operating System :: POSIX :: Linux'
+else:
+    raise DistutilsSetupError('Building on Windows is not supported currently.')
+
+keywords = 'deep learning, apache singa'
+with_cuda, with_nccl, _, _ = parse_compile_options()
+if with_cuda:
+    classifiers.append('Environment :: GPU :: NVIDIA CUDA')
+    cuda_version = os.environ.get('CUDA_VERSION')
+    cudnn_version = os.environ.get('CUDNN_VERSION')
+    keywords += ', cuda{}, cudnn{}'.format(cuda_version, cudnn_version)
+    cuda_major = int(cuda_version.split('.')[0])
+    cuda_minor = int(cuda_version.split('.')[1])
+    # local label '+cuda10.2'. Ref: https://www.python.org/dev/peps/pep-0440/
+    VERSION = VERSION + '+cuda{}.{}'.format(cuda_major, cuda_minor)
+    if with_nccl:
+        classifiers.append('Topic :: System :: Distributed Computing')
+        keywords += ', distributed'
+else:
+    keywords += ', cpu-only'
+
+singa_wrap = Extension('singa._singa_wrap', [])
+
+setup(
+    name=NAME,
+    version=VERSION,
+    description='A General Deep Learning System',
+    long_description=long_description,
+    long_description_content_type='text/markdown',
+    author='Apache SINGA Community',
+    author_email='dev@singa.apache.org',
+    url='http://singa.apache.org',
+    python_requires='>=3',
+    install_requires=[
+        'numpy >=1.16,<2.0',  #1.16
+        'onnx==1.6',
+        'deprecated',
+        'unittest-xml-reporting',
+        'future',
+        'pillow',
+        'tqdm',
+    ],
+    include_package_data=True,
+    license='Apache 2',
+    classifiers=classifiers,
+    keywords=keywords,
+    packages=find_packages('python'),
+    package_dir={'': 'python'},
+    ext_modules=[singa_wrap],
+    cmdclass={
+        'build_ext': custom_build_ext,
+        'audit': AuditCommand
+    })
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 709894b..5f30299 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -30,12 +30,18 @@
 AUX_SOURCE_DIRECTORY(io/network io_source)
 LIST(APPEND singa_sources ${io_source})
 
+AUX_SOURCE_DIRECTORY(core/common core_source)
 AUX_SOURCE_DIRECTORY(core/device core_source)
 AUX_SOURCE_DIRECTORY(core/memory core_source)
 AUX_SOURCE_DIRECTORY(core/scheduler core_source)
 AUX_SOURCE_DIRECTORY(core/tensor core_source)
 LIST(APPEND singa_sources ${core_source})
 
+IF (USE_DIST)
+    AUX_SOURCE_DIRECTORY(dist dist_source)
+    LIST(APPEND singa_sources ${dist_source})
+ENDIF (USE_DIST)
+
 IF (USE_CUDA)
     FILE(GLOB_RECURSE cuda_source core "*.cu")
     SET(FLAGS_BACKUP ${CMAKE_CXX_FLAGS})
@@ -58,6 +64,7 @@
 AUX_SOURCE_DIRECTORY(model/loss model_source)
 AUX_SOURCE_DIRECTORY(model/metric model_source)
 AUX_SOURCE_DIRECTORY(model/updater model_source)
+AUX_SOURCE_DIRECTORY(model/operation model_source)
 LIST(APPEND singa_sources ${model_source})
 
 
@@ -98,6 +105,11 @@
   SET_TARGET_PROPERTIES(singa PROPERTIES LINK_FLAGS "")
 ENDIF()
 
+IF(CODE_COVERAGE)
+    MESSAGE("-- Enabling Code Coverage")
+    SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g --coverage")
+ENDIF(CODE_COVERAGE)
+
 #pass configure infor to swig
 FILE(REMOVE "${CMAKE_CURRENT_SOURCE_DIR}/api/config.i")
 CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/api/config.i.in" "${CMAKE_CURRENT_SOURCE_DIR}/api/config.i")
diff --git a/src/api/.gitignore b/src/api/.gitignore
deleted file mode 100644
index adb5d03..0000000
--- a/src/api/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-config.i
diff --git a/src/api/config.i.in b/src/api/config.i.in
old mode 100644
new mode 100755
index 05ddf6e..396c292
--- a/src/api/config.i.in
+++ b/src/api/config.i.in
@@ -1,9 +1,30 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+
+
 // Pass in cmake configurations to swig
 #cmakedefine01 USE_CUDA
 #cmakedefine01 USE_CUDNN
 #cmakedefine01 USE_OPENCL
 #cmakedefine01 USE_PYTHON
+#cmakedefine01 USE_DNNL
 #cmakedefine01 USE_JAVA
+#cmakedefine01 USE_DIST
 #cmakedefine CUDNN_VERSION ${CUDNN_VERSION}
 
 // SINGA version
diff --git a/src/api/core_device.i b/src/api/core_device.i
index a5b7de6..a5a9644 100644
--- a/src/api/core_device.i
+++ b/src/api/core_device.i
@@ -47,7 +47,17 @@
  public:
   virtual void SetRandSeed(unsigned seed) = 0;
   std::shared_ptr<Device> host();
+  void Reset();
   int id() const;
+  virtual void Sync();
+  void ResetGraph();
+  void RunGraph(bool serial = false);
+  bool graph_enabled() const;
+  void EnableGraph(bool enable);
+  void PrintTimeProfiling();
+  void SetVerbosity(int verbosity);
+  void SetSkipIteration(int skip_iteration);
+  static void EnableLazyAlloc(bool enbale);
 };
 
 class Platform {
@@ -80,4 +90,3 @@
 };
 
 }
-
diff --git a/src/api/core_tensor.i b/src/api/core_tensor.i
old mode 100644
new mode 100755
index 31562c9..f7e3160
--- a/src/api/core_tensor.i
+++ b/src/api/core_tensor.i
@@ -32,7 +32,7 @@
 #include "singa/core/tensor.h"
 #include "singa/core/device.h"
 #include "singa/proto/core.pb.h"
-#include "singa/proto/model.pb.h"
+// #include "singa/proto/model.pb.h"
 using singa::DataType;
 %}
 %shared_ptr(singa::Device)
@@ -42,6 +42,9 @@
 %init %{
   import_array();
 %}
+// better use (int DIM1, float* IN_ARRAY1)
+// otherwise, the generated py method will have the arg name src,
+// which in fact accepts num as the input
 %apply (float *IN_ARRAY1, int DIM1) {
        (const float *src, const size_t num)
 }
@@ -62,7 +65,10 @@
 %apply float[] {float *};
 #endif // USE_JAVA
 
-
+namespace std {
+  %template(VecTensor) vector<singa::Tensor>;
+  %template(VecVecSize) vector<vector<size_t>>;
+}
 
 %template(Shape) std::vector<size_t>;
 
@@ -90,7 +96,7 @@
 
     std::shared_ptr<singa::Device> device() const;
 
-    template <typename SType> void GetValue(SType* value, const size_t num);
+    template <typename SType> void GetValue(SType* value, const size_t num) const;
     %template(GetFloatValue) GetValue<float>;
     %template(GetIntValue) GetValue<int>;
 
@@ -100,13 +106,14 @@
     const DataType data_type() const;
     const std::vector<size_t> &shape() const;
     const size_t shape(size_t idx) const;
-    size_t nDim() const;
     bool transpose() const;
+    size_t nDim() const;
+
     size_t Size() const;
     size_t MemSize() const;
-    void Reshape(const std::vector<size_t> &shape);
+
     void ResetLike(const Tensor &t);
-    void AsType(DataType type);
+    Tensor AsType(DataType type);
     void ToDevice(std::shared_ptr<singa::Device> dev);
     void ToHost();
     float L2() const;
@@ -114,13 +121,16 @@
 
     template <typename DType> void CopyDataFromHostPtr(const DType *src,
                                                        const size_t num,
-                                                       const size_t offset = 0);
+                                                       const size_t offset = 0) const;
     %template(CopyFloatDataFromHostPtr) CopyDataFromHostPtr<float>;
     %template(CopyIntDataFromHostPtr) CopyDataFromHostPtr<int>;
 
     void CopyData(const Tensor &other);
+    void RepeatData(std::vector<size_t> repeats, int axis, int total_repeats, const Tensor &src);
+
     Tensor Clone() const;
-    Tensor T() const;
+    Tensor Repeat(std::vector<size_t> repeats, int axis);
+
 
 #if USE_JAVA
     %rename(iAdd) operator+=(const Tensor &t);
@@ -157,24 +167,53 @@
   void CopyDataToFrom(Tensor *dst, const Tensor &src, size_t num,
                       size_t src_offset = 0, size_t dst_offset = 0);
 
+  void RepeatDataToFrom(bool broadcast_flag, std::vector<size_t> repeats, int axis,
+                        Tensor *dst, const Tensor &src, const size_t num);
+
   Tensor Reshape(const Tensor &in, const std::vector<size_t> &s);
+  Tensor Contiguous(const Tensor &in);
+  Tensor Transpose(const Tensor &in, const std::vector<size_t> &axes);
+
+  %rename(DefaultTranspose) Transpose(const Tensor &in);
+  Tensor Transpose(const Tensor &in);
 
   Tensor Abs(const Tensor &t);
+  Tensor Ceil(const Tensor &t);
+  Tensor Floor(const Tensor &t);
+  Tensor Round(const Tensor &t);
+  Tensor RoundE(const Tensor &t);
   Tensor Exp(const Tensor &t);
+  Tensor Erf(const Tensor &t);
   Tensor Log(const Tensor &t);
   Tensor ReLU(const Tensor &t);
   Tensor Sigmoid(const Tensor &t);
   Tensor Sign(const Tensor &t);
   Tensor Sqrt(const Tensor &t);
   Tensor Square(const Tensor &t);
+  Tensor Cos(const Tensor &t);
+  Tensor Cosh(const Tensor &t);
+  Tensor Acos(const Tensor &t);
+  Tensor Acosh(const Tensor &t);
+  Tensor Sin(const Tensor &t);
+  Tensor Sinh(const Tensor &t);
+  Tensor Asin(const Tensor &t);
+  Tensor Asinh(const Tensor &t);
+  Tensor Tan(const Tensor &t);
   Tensor Tanh(const Tensor &t);
+  Tensor Atan(const Tensor &t);
+  Tensor Atanh(const Tensor &t);
+
+  Tensor ReLUBackward(const Tensor &in1, const Tensor& in2);
 
   Tensor Sum(const Tensor &t, int axis);
   template <typename SType> SType Sum(const Tensor &t);
   %template(SumAsFloat) Sum<float>;
+  Tensor SumAll(const Tensor &t);
 
   Tensor Average(const Tensor &t, int axis);
   Tensor SoftMax(const Tensor &t);
+  Tensor SoftMax(const Tensor &t, int axis);
+  Tensor SoftMaxBackward(const Tensor &t, int axis, const Tensor &fdout);
 
   Tensor Pow(const Tensor &base, const Tensor &exp);
 
@@ -193,10 +232,12 @@
   %rename(__le__) operator<=(const Tensor &lhs, const Tensor &rhs);
   %rename(__gt__) operator>(const Tensor &lhs, const Tensor &rhs);
   %rename(__ge__) operator>=(const Tensor &lhs, const Tensor &rhs);
+  %rename(__eq__) operator==(const Tensor &lhs, const Tensor &rhs);
   Tensor operator<(const Tensor &lhs, const Tensor &rhs);
   Tensor operator<=(const Tensor &lhs, const Tensor &rhs);
   Tensor operator>(const Tensor &lhs, const Tensor &rhs);
   Tensor operator>=(const Tensor &lhs, const Tensor &rhs);
+  Tensor operator==(const Tensor &lhs, const Tensor &rhs);
 
 
   %rename(LTFloat) operator<(const Tensor &t, const float x);
@@ -216,6 +257,13 @@
   template <typename DType> Tensor operator>=(const Tensor &t, const DType x);
   %template(opge) operator>= <float>;
 
+  %rename(EQFloat) operator==(const Tensor &t, const float x);
+  template <typename DType> Tensor operator==(const Tensor &t, const DType x);
+  %template(opeq) operator== <float>;
+
+  Tensor ConcatOn(const std::vector<Tensor> &in, int axis);
+  Tensor SliceOn(const Tensor&in, const size_t start, const size_t end, int axis);
+
 
   /* ========== Arithmetic operations ========== */
   %rename(__add__) operator+(const Tensor &lhs, const Tensor &rhs);
@@ -281,6 +329,7 @@
   template <typename SType>
   void Axpy(SType alpha, const Tensor &in, Tensor *out);
   %template(Axpy) Axpy<float>;
+  void Axpy(const Tensor &alpha, const Tensor &in, Tensor *out);
 
   Tensor Mult(const Tensor &A, const Tensor &B);
   %rename(MultWithRet) Mult(const Tensor &A, const Tensor &B, Tensor *C);
@@ -317,4 +366,9 @@
 
   Tensor SoftMax(const Tensor &in);
   void SoftMax(const Tensor &in, Tensor *out);
+  Tensor SoftMax(const Tensor &in, int axis);
+  void SoftMax(const Tensor &in, Tensor *out, int axis);
+
+  Tensor CrossEntropyFwd(const Tensor& p, const Tensor& t);
+  Tensor SoftmaxCrossEntropyBwd(const Tensor& p, const Tensor& t);
 }
diff --git a/src/api/dist_communicator.i b/src/api/dist_communicator.i
new file mode 100644
index 0000000..dc43692
--- /dev/null
+++ b/src/api/dist_communicator.i
@@ -0,0 +1,62 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
+/*interface file for swig */
+
+%module dist_communicator
+%include "std_vector.i"
+
+%{
+#include "singa/io/communicator.h"
+%}
+
+namespace singa{
+
+#if USE_DIST
+
+class NcclIdHolder {
+public:
+  ncclUniqueId id;
+  NcclIdHolder();
+};
+
+class Communicator {
+public:
+  int global_rank;
+  int world_size;
+  int local_rank;
+  Communicator(int limit);
+  Communicator(int local_rank, int world_size, const NcclIdHolder &holder, int limit);
+  void synch(Tensor &t);
+  void fusedSynch(std::vector<Tensor> &t, bool send = true);
+  void synchHalf(Tensor &t);
+  void fusedSynchHalf(std::vector<Tensor> &t, bool send = true);
+  void sparsification(Tensor &t, Tensor &accumulation, float sparsThreshold, bool topK);
+  void sparsification(Tensor &t, float sparsThreshold, bool topK);
+  void fusedSparsification(std::vector<Tensor> &, Tensor &accumulation, float sparsThreshold, bool topK);
+  void fusedSparsification(std::vector<Tensor> &, float sparsThreshold, bool topK);
+  void wait();
+};
+
+
+#endif  // USE_DIST
+
+}
diff --git a/src/api/model_layer.i b/src/api/model_layer.i
index 92919fd..dc04be0 100644
--- a/src/api/model_layer.i
+++ b/src/api/model_layer.i
@@ -29,6 +29,28 @@
 
 
 %{
+// To make the code compatible between py2 and py3, the follow
+// macro is required, which forces the
+// interface (function) to accept byte string (from python) and
+// return byte string (in python) in py3. Otherwise the strings
+// should be unicode strings in py3.
+// Note that by default the strings in python3 are of type unicode.
+// You have to encode it with the correct encoding (default is utf-8)
+// to convert it into bytes. Sometimes, the string is already byte string
+// e.g. from protobuf SerializeToString, then there is no need to do
+// conversion. The output byte strings should be decoded into unicode.
+// For python2, the default type of string is byte string.
+//
+// Because protobuf::SerializeToString cannot be decoded into unicode
+// string, we cannot use SWIG_PYTHON_2_UNICODE which forces the
+// interface (function) to accept unicode strings as input args
+// and return unicode strings.
+//
+// TODO(wangwei) make strings compatible between py2 and py3.
+
+#define SWIG_PYTHON_STRICT_BYTE_CHAR
+
+
 #include "singa/model/layer.h"
 #include "../src/model/layer/rnn.h"
 #include "../src/model/layer/cudnn_rnn.h"
diff --git a/src/api/model_operation.i b/src/api/model_operation.i
new file mode 100755
index 0000000..b0d95a0
--- /dev/null
+++ b/src/api/model_operation.i
@@ -0,0 +1,232 @@
+/************************************************************
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*************************************************************/
+
+%module model_operation
+
+%include "config.i"
+%include "std_vector.i"
+%include "std_string.i"
+%{
+#include "../src/model/operation/convolution.h"
+#include "../src/model/operation/batchnorm.h"
+#include "../src/model/operation/pooling.h"
+#include "../src/model/operation/rnn.h"
+
+%}
+
+namespace singa {
+
+class ConvHandle {
+ public:
+  ConvHandle(const Tensor &input, const std::vector<size_t>& kernel_size,
+             const std::vector<size_t>& stride, const std::vector<size_t>& padding,
+             const size_t in_channels, const size_t out_channels,
+             const bool bias, const size_t groups);
+  bool bias_term;
+  size_t batchsize;
+  size_t pad_w;
+  size_t pad_h;
+  size_t stride_h;
+  size_t stride_w;
+  size_t kernel_h;
+  size_t kernel_w;
+  size_t channels;
+  size_t num_filters;
+  size_t group;
+};
+
+Tensor CpuConvForward(const Tensor &x, Tensor &W,  Tensor &b, const ConvHandle &ch);
+
+Tensor CpuConvBackwardx(const Tensor &dy, Tensor &W, const Tensor &x, const ConvHandle &ch);
+
+Tensor CpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W, const ConvHandle &ch);
+
+Tensor CpuConvBackwardb(const Tensor &dy, const Tensor &b, const ConvHandle &ch);
+
+
+class BatchNormHandle{
+  public:
+    BatchNormHandle(const float momentum, const Tensor& input);
+
+    size_t batchsize;
+    float factor;
+};
+
+#if USE_DNNL
+Tensor CpuBatchNormForwardInference(const BatchNormHandle &bnh,
+                                    const Tensor &x,
+                                    const Tensor &bnScale,
+                                    const Tensor &bnBias,
+                                    Tensor &running_mean,
+                                    Tensor &running_var);
+
+const std::vector<Tensor> CpuBatchNormForwardTraining(const BatchNormHandle &bnh,
+                                                      const Tensor &x,
+                                                      const Tensor &bnScale,
+                                                      const Tensor &bnBias,
+                                                      Tensor &running_mean,
+                                                      Tensor &running_var);
+
+const std::vector<Tensor> CpuBatchNormBackwardx(const BatchNormHandle &bnh,
+                                                const Tensor &y, const Tensor &dy,
+                                                const Tensor &x,
+                                                const Tensor &bnScale, const Tensor &bnBias,
+                                                const Tensor &mean, const Tensor &var);
+#endif //USE_DNNL
+
+
+class PoolingHandle {
+ public:
+  PoolingHandle(const Tensor &input, const std::vector<int>& kernel_size,
+                const std::vector<int>& stride, const std::vector<int>& padding,
+                const bool is_max=true);
+
+  int batchsize;
+  int stride_h;
+  int stride_w;
+  int kernel_h;
+  int kernel_w;
+  int pad_h;
+  int pad_w;
+  int pooled_height;
+  int pooled_width;
+  bool is_max_pooling;
+};
+
+#if USE_DNNL
+Tensor CpuPoolingForward(const PoolingHandle &ph, const Tensor &x);
+Tensor CpuPoolingBackward(const PoolingHandle &ph, const Tensor &dy,
+                              const Tensor& x, const Tensor& y);
+#endif //USE_DNNL
+
+
+#if USE_CUDNN
+class CudnnConvHandle: public ConvHandle {
+ public:
+  CudnnConvHandle(const Tensor &input, const std::vector<size_t>& kernel_size,
+                  const std::vector<size_t>& stride, const std::vector<size_t>& padding,
+                  const size_t in_channels, const size_t out_channels,
+                  const bool bias, const size_t groups = 1, const size_t workspace_byte_limit = 1024 * 1024 * 1024,
+                  const std::string& prefer = "fastest");
+  bool bias_term;
+  size_t batchsize;
+  size_t pad_w;
+  size_t pad_h;
+  size_t stride_h;
+  size_t stride_w;
+  size_t kernel_h;
+  size_t kernel_w;
+  size_t channels;
+  size_t num_filters;
+  size_t group;
+};
+
+Tensor GpuConvForward(const Tensor &x, const Tensor &W, const Tensor &b, const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardx(const Tensor &dy, const Tensor &W, const Tensor &x, const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W, const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardb(const Tensor &dy, const Tensor &b, const CudnnConvHandle &cch);
+
+
+class CudnnBatchNormHandle: public BatchNormHandle{
+    public:
+      CudnnBatchNormHandle(const float momentum, const Tensor& input);
+    size_t channels;
+    size_t batchsize;
+    float factor;
+};
+
+const std::vector<Tensor> GpuBatchNormForwardTraining(const CudnnBatchNormHandle &cbnh,
+  const Tensor& x, const Tensor& bnScale, const Tensor& bnBias, Tensor& running_mean, Tensor& running_var);
+
+Tensor GpuBatchNormForwardInference(const CudnnBatchNormHandle &cbnh, const Tensor& x,
+  const Tensor& bnScale, const Tensor& bnBias,  const Tensor& running_mean, const Tensor& running_var);
+
+const std::vector<Tensor> GpuBatchNormBackward(const CudnnBatchNormHandle &cbnh,
+  const Tensor& dy, const Tensor& x, const Tensor& bnScale, const Tensor& mean, const Tensor& var);
+
+
+class CudnnPoolingHandle : public PoolingHandle {
+ public:
+  CudnnPoolingHandle(const Tensor &input, const std::vector<int>& kernel_size,
+                     const std::vector<int>& stride, const std::vector<int>& padding,
+                     const bool is_max=true);
+
+  int batchsize;
+
+  int pooled_height;
+  int pooled_width;
+  int kernel_h;
+  int kernel_w;
+  int pad_h;
+  int pad_w;
+
+  int stride_h;
+  int stride_w;
+
+};
+
+Tensor GpuPoolingForward(const CudnnPoolingHandle &cph, const Tensor &x);
+
+Tensor GpuPoolingBackward(const CudnnPoolingHandle &cph, const Tensor &dy, const Tensor& x, const Tensor& y);
+
+class CudnnRNNHandle {
+ public:
+  CudnnRNNHandle(const Tensor &x,
+                 const int hidden_size, const int mode = 0,
+                 const int num_layers = 1, const int bias = 1,
+                 const float dropout = 0.0f, const int bidirectional = 0);
+  int bias;
+  int mode;
+  float dropout;
+  int bidirectional;
+  size_t feature_size;
+  size_t hidden_size;
+  size_t weights_size;
+  int num_layers;
+  size_t batch_size;
+  size_t seq_length;
+  size_t workspace_size;
+  size_t reserve_size;
+  Tensor workspace;
+  Tensor reserve_space;
+  void *states;
+};
+
+std::vector<Tensor> GpuRNNForwardTraining(const Tensor &x, const Tensor &hx, const Tensor &cx, const Tensor &W, CudnnRNNHandle &h);
+std::vector<Tensor> GpuRNNForwardInference(const Tensor &x, const Tensor &hx, const Tensor &cx, const Tensor &W, CudnnRNNHandle &h);
+std::vector<Tensor> GpuRNNBackwardx(const Tensor &y, const Tensor &dy, const Tensor &dhy, const Tensor &dcy, const Tensor &W, const Tensor &hx, const Tensor &cx, CudnnRNNHandle &h);
+Tensor GpuRNNBackwardW(const Tensor &x, const Tensor &hx, const Tensor &y, CudnnRNNHandle &h);
+
+void GpuRNNSetParam(int linLayerID, int pseudoLayer, Tensor &weights, Tensor &paramValues, bool is_bias, CudnnRNNHandle &h);
+Tensor GpuRNNGetParamCopy(int linLayerID, int pseudoLayer, Tensor &weights, bool is_bias, CudnnRNNHandle &h);
+
+std::vector<Tensor> GpuRNNForwardTrainingEx(const Tensor &x, const Tensor &hx, const Tensor &cx, const Tensor &W, const Tensor &seq_lengths, CudnnRNNHandle &h);
+std::vector<Tensor> GpuRNNForwardInferenceEx(const Tensor &x, const Tensor &hx, const Tensor &cx, const Tensor &W, const Tensor &seq_lengths, CudnnRNNHandle &h);
+std::vector<Tensor> GpuRNNBackwardxEx(const Tensor &y, const Tensor &dy, const Tensor &dhy, const Tensor &dcy, const Tensor &W, const Tensor &hx, const Tensor &cx, const Tensor &seq_lengths, CudnnRNNHandle &h);
+Tensor GpuRNNBackwardWEx(const Tensor &x, const Tensor &hx, const Tensor &y, const Tensor &seq_lengths, CudnnRNNHandle &h);
+
+
+#endif  // USE_CUDNN
+
+}  //namespace singa
diff --git a/src/api/model_optimizer.i b/src/api/model_optimizer.i
deleted file mode 100644
index 793df28..0000000
--- a/src/api/model_optimizer.i
+++ /dev/null
@@ -1,70 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-/*interface file for swig */
-
-%module model_optimizer
-%include "std_vector.i"
-%include "std_string.i"
-%include "std_pair.i"
-%include "std_shared_ptr.i"
-
-%{
-#include "singa/model/optimizer.h"
-#include "singa/proto/model.pb.h"
-using singa::Tensor;
-using singa::ParamSpec;
-using singa::OptimizerConf;
-%}
-
-
-%shared_ptr(singa::Optimizer)
-%shared_ptr(singa::Regularizer)
-%shared_ptr(singa::Constraint)
-
-namespace singa {
-class Optimizer {
- public:
-  // Optimizer() = default;
-  virtual ~Optimizer() = default;
-  void Setup(const std::string& str);
-  virtual void Apply(int epoch, float lr, const std::string& name,
-      Tensor& grad, Tensor& value, int step = -1) = 0;
-};
-inline std::shared_ptr<Optimizer> CreateOptimizer(const std::string& type);
-
-class Constraint {
- public:
-  Constraint() = default;
-  void Setup(const std::string& conf_str);
-  void Apply(int epoch, const Tensor& value, Tensor& grad, int step = -1);
-};
-
-inline std::shared_ptr<Constraint> CreateConstraint(const std::string& type);
-
-class Regularizer {
- public:
-  Regularizer() = default;
-  void Setup(const std::string& conf_str);
-  void Apply(int epoch, const Tensor& value, Tensor& grad, int step = -1);
-};
-inline std::shared_ptr<Regularizer> CreateRegularizer(const std::string& type);
-}
diff --git a/src/api/singa.i b/src/api/singa.i
index 3fc3b47..98be9d2 100644
--- a/src/api/singa.i
+++ b/src/api/singa.i
@@ -25,8 +25,6 @@
 %include "config.i"
 %include "core_tensor.i"
 %include "core_device.i"
-%include "model_layer.i"
-%include "model_optimizer.i"
-%include "model_loss.i"
-%include "model_metric.i"
-%include "io_snapshot.i"
+%include "model_operation.i"
+%include "dist_communicator.i"
+ // %include "io_snapshot.i"
\ No newline at end of file
diff --git a/src/core/common/common.cc b/src/core/common/common.cc
new file mode 100644
index 0000000..dbd74ed
--- /dev/null
+++ b/src/core/common/common.cc
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "singa/core/common.h"
+
+#include "singa/core/device.h"
+
+namespace singa {
+
+void* Block::mutable_data() {
+  if (data_ == nullptr && size_ > 0) {
+    data_ = device_->Malloc((int)size_);
+  }
+  initialized_ = true;
+  return static_cast<char*>(data_) + offset_;
+}
+
+const void* Block::data() const {
+  CHECK(initialized_) << "Must initialize data before reading it";
+  return static_cast<char*>(data_) + offset_;
+}
+
+void Block::free_data() {
+  if (data_) {
+    device_->Free(data_);
+    data_ = nullptr;
+    initialized_ = false;
+  }
+}
+
+}  // namespace singa
diff --git a/src/core/device/cpp_cpu.cc b/src/core/device/cpp_cpu.cc
index 04209ab..af56b1b 100644
--- a/src/core/device/cpp_cpu.cc
+++ b/src/core/device/cpp_cpu.cc
@@ -20,28 +20,42 @@
 
 namespace singa {
 
-std::shared_ptr<Device> defaultDevice=std::make_shared<CppCPU>();
+std::shared_ptr<Device> defaultDevice = std::make_shared<CppCPU>();
 
 CppCPU::CppCPU() : Device(-1, 1) {
   lang_ = kCpp;
-  //host_ = nullptr;
+#ifdef USE_DNNL
+  ctx_.dnnl_engine = dnnl::engine(dnnl::engine::kind::cpu, 0);
+  ctx_.dnnl_stream = dnnl::stream(ctx_.dnnl_engine);
+#endif  // USE_DNNL
+  // host_ = nullptr;
 }
 
+CppCPU::~CppCPU(){};
 
-void CppCPU::SetRandSeed(unsigned seed) {
-  ctx_.random_generator.seed(seed);
-}
-
+void CppCPU::SetRandSeed(unsigned seed) { ctx_.random_generator.seed(seed); }
 
 void CppCPU::DoExec(function<void(Context*)>&& fn, int executor) {
   CHECK_EQ(executor, 0);
   fn(&ctx_);
 }
 
+void CppCPU::TimeProfilingDoExec(function<void(Context*)>&& fn, int executor,
+                                 Node* node) {
+  CHECK_EQ(executor, 0);
+
+  auto t_start = std::chrono::high_resolution_clock::now();
+  fn(&ctx_);
+  std::chrono::duration<float> duration =
+      std::chrono::high_resolution_clock::now() - t_start;
+  node->time_elapsed_inc(duration.count());
+}
+
+void CppCPU::EvaluateTimeElapsed(Node* node) {}
 
 void* CppCPU::Malloc(int size) {
   if (size > 0) {
-    void *ptr = malloc(size);
+    void* ptr = malloc(size);
     memset(ptr, 0, size);
     return ptr;
   } else {
@@ -49,15 +63,12 @@
   }
 }
 
-
 void CppCPU::Free(void* ptr) {
-  if (ptr != nullptr)
-    free(ptr);
+  if (ptr != nullptr) free(ptr);
 }
 
-
 void CppCPU::CopyToFrom(void* dst, const void* src, size_t nBytes,
-                           CopyDirection direction, Context* ctx) {
+                        CopyDirection direction, Context* ctx) {
   memcpy(dst, src, nBytes);
 }
 
diff --git a/src/core/device/cuda_gpu.cc b/src/core/device/cuda_gpu.cc
index f6603d3..6025a5e 100644
--- a/src/core/device/cuda_gpu.cc
+++ b/src/core/device/cuda_gpu.cc
@@ -21,8 +21,10 @@
 #include <cuda.h>
 #include <cuda_runtime.h>
 #include <curand.h>
+
 #include <chrono>
 #include <iostream>
+
 #include "singa/core/device.h"
 #include "singa/utils/cuda_utils.h"
 namespace singa {
@@ -41,6 +43,11 @@
     CHECK_EQ(status, CUDNN_STATUS_SUCCESS) << cudnnGetErrorString(status);
   }
 #endif
+
+  // Explicitly destroys and cleans up all resources associated with current
+  // device
+  cudaDeviceReset();
+  // the returned code incidate "driver shutting down" after reset
 }
 const int kNumCudaStream = 1;
 
@@ -61,21 +68,34 @@
 void CudaGPU::Setup() {
   lang_ = kCuda;
   ctx_.stream = NULL;  // use the default sync stream
+
   // TODO(wangwei) create one handle for each steam?
+  // Preserse for future use instead of default sync stream, for concurrency
+  // cudaStreamCreate(&ctx_.stream);
+
+#ifdef USE_DIST
+  CUDA_CHECK(cudaStreamCreateWithFlags(&ctx_.s, cudaStreamNonBlocking));
+  CUDA_CHECK(cudaStreamCreateWithFlags(&ctx_.c1, cudaStreamNonBlocking));
+  CUDA_CHECK(cudaStreamCreateWithFlags(&ctx_.c2, cudaStreamNonBlocking));
+#endif  // USE_DIST
+
   CUDA_CHECK(cudaSetDevice(id_));
   // use curandCreateGeneratorHost for CudaHost device
   CURAND_CHECK(
       curandCreateGenerator(&ctx_.curand_generator, CURAND_RNG_PSEUDO_DEFAULT));
+  CURAND_CHECK(curandSetStream(ctx_.curand_generator, ctx_.stream));
   auto seed = std::chrono::system_clock::now().time_since_epoch().count();
   SetRandSeed(seed);
   // TODO(wangwei) if one generator per stream, then need diff offset per gen?
   CURAND_CHECK(curandSetGeneratorOffset(ctx_.curand_generator, 0));
   CUBLAS_CHECK(cublasCreate(&(ctx_.cublas_handle)));
+  CUBLAS_CHECK(cublasSetStream(ctx_.cublas_handle, ctx_.stream));
 
 #ifdef USE_CUDNN
   // TODO(wangwei) create one handle for each stream?
   auto status = cudnnCreate(&ctx_.cudnn_handle);
   CHECK_EQ(status, CUDNN_STATUS_SUCCESS) << cudnnGetErrorString(status);
+  cudnnSetStream(ctx_.cudnn_handle, ctx_.stream);
 #endif  // USE_CUDNN
 }
 
@@ -86,11 +106,72 @@
 
 void CudaGPU::DoExec(function<void(Context*)>&& fn, int executor) { fn(&ctx_); }
 
+void CudaGPU::SyncBeforeCountingTime() {
+  // synchronization before counting time
+  bool previous_state = graph_enabled();
+  graph_enabled_ = false;
+  Sync();
+  graph_enabled_ = previous_state;
+}
+
+void CudaGPU::EvaluateTimeElapsed(Node* node) {
+  float totalTime;
+
+  cudaEventElapsedTime(&totalTime, node->start_, node->end_);
+
+  cudaEventDestroy(node->start_);
+  cudaEventDestroy(node->end_);
+
+  node->time_elapsed_inc(totalTime * 0.001);
+}
+
+void CudaGPU::TimeProfilingDoExec(function<void(Context*)>&& fn, int executor,
+                                  Node* node) {
+  // time profiling using cudaEvent
+  cudaEventCreate(&(node->start_));
+  cudaEventCreate(&(node->end_));
+
+#ifdef USE_DIST
+  if (node->op_name().find("Dist") != std::string::npos) {
+    if (node->op_name().find("Dist_s") != std::string::npos)
+      cudaEventRecord(node->start_, ctx_.s);
+    else if (node->op_name().find("Dist_c1") != std::string::npos)
+      cudaEventRecord(node->start_, ctx_.c1);
+    else if (node->op_name().find("Dist_c2") != std::string::npos)
+      cudaEventRecord(node->start_, ctx_.c2);
+    else if (node->op_name().find("Dist_c1c2") != std::string::npos)
+      cudaEventRecord(node->start_, ctx_.c1);
+  } else {
+    cudaEventRecord(node->start_, ctx_.stream);
+  }
+#else
+  cudaEventRecord(node->start_, ctx_.stream);
+#endif  // USE_DIST
+
+  fn(&ctx_);
+
+#ifdef USE_DIST
+  if (node->op_name().find("Dist") != std::string::npos) {
+    if (node->op_name().find("Dist_s") != std::string::npos)
+      cudaEventRecord(node->end_, ctx_.s);
+    else if (node->op_name().find("Dist_c1") != std::string::npos)
+      cudaEventRecord(node->end_, ctx_.c1);
+    else if (node->op_name().find("Dist_c2") != std::string::npos)
+      cudaEventRecord(node->end_, ctx_.c2);
+    else if (node->op_name().find("Dist_c1c2") != std::string::npos)
+      cudaEventRecord(node->end_, ctx_.c2);
+  } else {
+    cudaEventRecord(node->end_, ctx_.stream);
+  }
+#else
+  cudaEventRecord(node->end_, ctx_.stream);
+#endif  // USE_DIST
+}
+
 void CudaGPU::CopyToFrom(void* dst, const void* src, size_t nBytes,
                          CopyDirection direction, Context* ctx) {
-  cudaMemcpy(dst, src, nBytes, copyKind[direction]);
-  // TODO(wangwei) use async copy
-  // cudaMemcpyAsync(dst, src, nBytes,cudaMemcpyDefault, ctx_.stream);
+  // cudaMemcpy(dst, src, nBytes, copyKind[direction]);
+  cudaMemcpyAsync(dst, src, nBytes, copyKind[direction], ctx_.stream);
 }
 
 size_t CudaGPU::GetAllocatedMem() {
@@ -108,8 +189,8 @@
   if (size > 0) {
     CUDA_CHECK(cudaSetDevice(id_));
     pool_->Malloc((void**)&ptr, size);
-    // TODO(wangwei) remove the memset.
-    CUDA_CHECK(cudaMemset(ptr, 0, size));
+    // Comment out for future analysis: without cnmem
+    // CUDA_CHECK(cudaMemsetAsync(ptr, 0, size, ctx_.stream));
   }
   return ptr;
 }
@@ -122,5 +203,10 @@
   }
 }
 
+void CudaGPU::Sync() {
+  Exec([this](Context* ctx) { CUDA_CHECK(cudaDeviceSynchronize()); }, {}, {},
+       "Waiting");
+}
+
 }  // namespace singa
 #endif  // USE_CUDA
diff --git a/src/core/device/device.cc b/src/core/device/device.cc
index cda1b9f..15167e2 100644
--- a/src/core/device/device.cc
+++ b/src/core/device/device.cc
@@ -19,25 +19,82 @@
 #include "singa/core/device.h"
 
 namespace singa {
+
+bool Device::lazy_alloc_ = true;
+
 Device::Device(int id, int num_executors)
     : id_(id), num_executors_(num_executors) {
   // TODO(wangwei) create scheduler and vm.
   host_ = defaultDevice;
+  graph_ = new Graph(this);
 }
 
-void Device::Exec(function<void(Context*)>&& fn, const vector<Block*> read_blocks,
-                    const vector<Block*> write_blocks, bool use_rand_generator) {
-  // TODO(wangwei) execute operations scheduled by the scheduler.
-  DoExec(std::move(fn), 0);
+Device::~Device() {
+  if (graph_) {
+    delete graph_;
+  }
 }
 
-// TODO(wangwei) get Block from the memory manager
+void Device::Reset() {
+  // Sync the device to finished the current calculation
+  graph_enabled_ = false;
+  Sync();
+
+  // Reset Seed
+  // seed_ = std::chrono::system_clock::now().time_since_epoch().count();
+  // SetRandSeed(seed_);
+
+  // Reset Graph
+  graph_->Reset();
+
+  // Others
+  verbosity_ = 0;
+  skip_iteration_ = 5;
+}
+
+void Device::Exec(function<void(Context*)>&& fn,
+                  const vector<Block*> read_blocks,
+                  const vector<Block*> write_blocks, string op_name,
+                  bool use_rand_generator) {
+  if (graph_enabled_ == true) {
+    graph_->AddOperation(std::move(fn), read_blocks, write_blocks, op_name);
+  } else {
+    // printf("immediately ops\n");
+    DoExec(std::move(fn), 0);
+  }
+}
+
+void Device::RunGraph(bool serial) {
+  bool previous_state = graph_enabled_;
+  graph_enabled_ = false;
+
+  if (serial) {
+    // sequential execution
+    graph_->RunInSerial();
+  } else {
+    // execute according to dependencies
+    graph_->RunGraph();
+  }
+
+  // graph_->Debug();
+
+  graph_enabled_ = previous_state;
+}
+
+void Device::PrintTimeProfiling() { graph_->PrintTimeProfiling(); }
+
+// Todo(Wangwei) Get Block From The Memory manager
 Block* Device::NewBlock(int size) {
-  CHECK_GE(size, 0) << "size is negative, could be caused by the type cast "
-    << "from size_t to int. In that case, the size is too large.";
+  CHECK_GE(size, 0)
+      << "size is negative, could be caused by the type cast "
+      << "from size_t to int. In that case, the size is too large.";
   if (size > 0) {
-    void* ptr = Malloc(size);
-    return new Block(ptr, size);
+    void* ptr = nullptr;
+    if (!lazy_alloc_) {
+      ptr = Malloc(size);
+    }
+
+    return new Block(ptr, size, this);
   } else {
     return nullptr;
   }
@@ -53,24 +110,19 @@
 
 void Device::CopyDataToFrom(Block* dst, Block* src, size_t nBytes,
                             CopyDirection direct, int dst_offset,
-                            int src_offset) {
-  this->Exec(
-      [this, dst, src, nBytes, direct, dst_offset, src_offset](Context* ctx) {
-        this->CopyToFrom(
-            reinterpret_cast<char*>(dst->mutable_data()) + dst_offset,
-            reinterpret_cast<const char*>(src->data()) + src_offset, nBytes,
-            direct, ctx);
-      },
-      {src}, {dst});
+                            int src_offset, Context* ctx) {
+  this->CopyToFrom(reinterpret_cast<char*>(dst->mutable_data()) + dst_offset,
+                   reinterpret_cast<const char*>(src->data()) + src_offset,
+                   nBytes, direct, ctx);
 }
 
 void Device::CopyDataFromHostPtr(Block* dst, const void* src, size_t nBytes,
-                                 size_t dst_offset) {
+                                 size_t dst_offset, Context* ctx) {
   auto direct = lang_ == kCpp ? kHostToHost : kHostToDevice;
   void* dstptr = reinterpret_cast<char*>(dst->mutable_data()) + dst_offset;
   Exec([this, dstptr, src, nBytes,
         direct](Context* ctx) { CopyToFrom(dstptr, src, nBytes, direct, ctx); },
-       {}, {dst});
+       {}, {dst}, "CopyDataFromHostPtr");
 }
 void Device::Sync() {}
 }  // namespace singa
diff --git a/src/core/device/opencl_device.cc b/src/core/device/opencl_device.cc
index b36a1da..8d0971d 100644
--- a/src/core/device/opencl_device.cc
+++ b/src/core/device/opencl_device.cc
@@ -17,15 +17,15 @@
  */
 #ifndef DISABLE_WARNINGS
 
-#include <iostream>
 #include <fstream>
+#include <iostream>
 #include <sstream>
 #include <string>
 
-#include "singa/core/device.h"
-#include "singa/utils/tinydir.h"
-#include "singa/utils/opencl_utils.h"
 #include "./opencl_func.h"
+#include "singa/core/device.h"
+#include "singa/utils/opencl_utils.h"
+#include "singa/utils/tinydir.h"
 
 #ifdef USE_OPENCL
 
@@ -35,120 +35,118 @@
 namespace singa {
 
 OpenclDevice::OpenclDevice(int id, int num_executors)
-	: Device(id, num_executors) {
+    : Device(id, num_executors) {
   CHECK_GE(id, 0);
   lang_ = kOpencl;
-  
+
   ocl::current_context().build_options("-cl-std=CL1.2");
-  
+
   ctx_.vcl_ctx_id = 0;
   this->this_device = ocl::current_device();
-  
+
   BuildPrograms();
 }
 
-
 OpenclDevice::~OpenclDevice() {
-
   // Flush and finish the command queue.
   auto cmdq = ocl::current_context().get_queue();
-  
+
   cmdq.flush();
   cmdq.finish();
 }
 
-
 void OpenclDevice::SetRandSeed(unsigned seed) { seed = seed; }
 
-
 void OpenclDevice::CopyDataToFrom(Block* dst, Block* src, size_t nBytes,
-                                  CopyDirection direction, int dst_offset, int src_offset) {
+                                  CopyDirection direction, int dst_offset,
+                                  int src_offset, Context* ctx) {
   // Pointers must be valid.
   if (!dst || !src) return;
-  
+
   auto ocl_ctx = viennacl::ocl::get_context(ctx_.vcl_ctx_id);
 
-  switch(direction) {
-  case kHostToDevice: {
-    auto dst_handle = WrapHandle(static_cast<cl_mem>(dst->mutable_data()), ocl_ctx);
-    memory_write(dst_handle, dst_offset, nBytes, src->data());
-    return;
-  }
-  case kDeviceToHost: {
-    auto src_handle = WrapHandle(static_cast<cl_mem>(src->mutable_data()), ocl_ctx);
-    memory_read(src_handle, src_offset, nBytes, dst->mutable_data());
-    return;
-  }
-  case kDeviceToDevice: {
-    auto src_handle = WrapHandle(static_cast<cl_mem>(src->mutable_data()), ocl_ctx);
-    auto dst_handle = WrapHandle(static_cast<cl_mem>(dst->mutable_data()), ocl_ctx);
-    memory_copy(src_handle, dst_handle, src_offset, dst_offset, nBytes);
-    return;
-  }
-  default:
-    return;
+  switch (direction) {
+    case kHostToDevice: {
+      auto dst_handle =
+          WrapHandle(static_cast<cl_mem>(dst->mutable_data()), ocl_ctx);
+      memory_write(dst_handle, dst_offset, nBytes, src->data());
+      return;
+    }
+    case kDeviceToHost: {
+      auto src_handle =
+          WrapHandle(static_cast<cl_mem>(src->mutable_data()), ocl_ctx);
+      memory_read(src_handle, src_offset, nBytes, dst->mutable_data());
+      return;
+    }
+    case kDeviceToDevice: {
+      auto src_handle =
+          WrapHandle(static_cast<cl_mem>(src->mutable_data()), ocl_ctx);
+      auto dst_handle =
+          WrapHandle(static_cast<cl_mem>(dst->mutable_data()), ocl_ctx);
+      memory_copy(src_handle, dst_handle, src_offset, dst_offset, nBytes);
+      return;
+    }
+    default:
+      return;
   }
 }
 
-
 void OpenclDevice::BuildPrograms() {
-  ocl::current_context().add_program(opencl::distribution_str, "opencl_distribution");
-  ocl::current_context().add_program(opencl::tensormath_str, "opencl_tensor_math");
+  ocl::current_context().add_program(opencl::distribution_str,
+                                     "opencl_distribution");
+  ocl::current_context().add_program(opencl::tensormath_str,
+                                     "opencl_tensor_math");
   ocl::current_context().add_program(opencl::im2col_str, "opencl_im2col");
   ocl::current_context().add_program(opencl::pooling_str, "opencl_pooling");
 }
 
-
 void OpenclDevice::DoExec(function<void(Context*)>&& fn, int executor) {
   fn(&ctx_);
 }
 
-
 void OpenclDevice::CopyToFrom(void* dst, const void* src, size_t nBytes,
-                  CopyDirection direction, Context* ctx) {
+                              CopyDirection direction, Context* ctx) {
   // Pointers must be valid.
   if (!dst || !src) return;
-  
+
   auto ocl_ctx = viennacl::ocl::get_context(ctx->vcl_ctx_id);
 
-  switch(direction) {
-  case kHostToDevice: {
-    auto dst_handle = WrapHandle(static_cast<cl_mem>(dst), ocl_ctx);
-    memory_write(dst_handle, 0, nBytes, src);
-    return;
-  }
-  case kDeviceToHost: {
-    auto src_handle = WrapHandle((const cl_mem)src, ocl_ctx);
-    memory_read(src_handle, 0, nBytes, dst);
-    return;
-  }
-  case kDeviceToDevice: {
-    auto src_handle = WrapHandle((const cl_mem)src, ocl_ctx);
-    auto dst_handle = WrapHandle(static_cast<cl_mem>(dst), ocl_ctx);
-    memory_copy(src_handle, dst_handle, 0, 0, nBytes);
-    return;
-  }
-  default:
-    return;
+  switch (direction) {
+    case kHostToDevice: {
+      auto dst_handle = WrapHandle(static_cast<cl_mem>(dst), ocl_ctx);
+      memory_write(dst_handle, 0, nBytes, src);
+      return;
+    }
+    case kDeviceToHost: {
+      auto src_handle = WrapHandle((const cl_mem)src, ocl_ctx);
+      memory_read(src_handle, 0, nBytes, dst);
+      return;
+    }
+    case kDeviceToDevice: {
+      auto src_handle = WrapHandle((const cl_mem)src, ocl_ctx);
+      auto dst_handle = WrapHandle(static_cast<cl_mem>(dst), ocl_ctx);
+      memory_copy(src_handle, dst_handle, 0, 0, nBytes);
+      return;
+    }
+    default:
+      return;
   }
 }
 
-
 void* OpenclDevice::Malloc(int size) {
   cl_mem buffer = memory_create(ocl::current_context(), size, nullptr);
 
   return static_cast<void*>(buffer);
 }
 
-
 void OpenclDevice::Free(void* p) {
   if (!p) return;
   cl_mem buffer = static_cast<cl_mem>(p);
   clReleaseMemObject(buffer);
 }
 
-} // namespace singa
+}  // namespace singa
 
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
 
 #endif
diff --git a/src/core/device/opencl_func.h b/src/core/device/opencl_func.h
index 97ef2ec..a005a3c 100644
--- a/src/core/device/opencl_func.h
+++ b/src/core/device/opencl_func.h
@@ -1,4 +1,5 @@
-// This file is auto-generated by tool/opencl/clsrc_to_str, do not edit manually.
+// This file is auto-generated by tool/opencl/clsrc_to_str, do not edit
+// manually.
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -17,10 +18,853 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#ifdef USE_OPENCL
+
 #include <string>
 
 namespace singa {
- namespace opencl {
-const std::string im2col_str = "// This file is modified from the file located at\n// https://github.com/BVLC/caffe/blob/opencl/src/caffe/greentea/cl_kernels/im2col.cl\n// and is covered under the BSD 2-Clause License, as indicated in the LICENSE\n// file at the root of this repository.\n\n__kernel void im2col(const int n, __global const float* data_im,\n                     const int data_im_off,\n                     const int height, const int width,\n                     const int kernel_h, const int kernel_w,\n                     const int pad_h, const int pad_w,\n                     const int stride_h, const int stride_w,\n                     const int dilation_h, const int dilation_w,\n                     const int height_col, const int width_col,\n                     __global float* data_col, const int data_col_off) {\n\n  for (int index = get_global_id(0); index < n;\n      index += get_global_size(0)) {\n    const int h_index = index / width_col;\n    const int h_col = h_index % height_col;\n    const int w_col = index % width_col;\n    const int c_im = h_index / height_col;\n    const int c_col = c_im * kernel_h * kernel_w;\n    const int h_offset = h_col * stride_h - pad_h;\n    const int w_offset = w_col * stride_w - pad_w;\n    \n    __global float* data_col_ptr = data_col + data_col_off;\n    data_col_ptr += (c_col * height_col + h_col) * width_col + w_col;\n    __global const float* data_im_ptr = data_im + data_im_off;\n    data_im_ptr += (c_im * height + h_offset) * width + w_offset;\n    \n    for (int i = 0; i < kernel_h; ++i) {\n      for (int j = 0; j < kernel_w; ++j) {\n        int h_im = h_offset + i * dilation_h;\n        int w_im = w_offset + j * dilation_w;\n        *data_col_ptr =\n            (h_im >= 0 && w_im >= 0 && h_im < height && w_im < width) ?\n                data_im_ptr[i * dilation_h * width + j * dilation_w] : 0;\n        data_col_ptr += height_col * width_col;\n      }\n    }\n  }\n}\n\n__kernel void col2im(const int n, __global const float* data_col,\n                     const int data_col_off, const int channels,\n                     const int height, const int width,\n                     const int kernel_h, const int kernel_w,\n                     const int pad_h, const int pad_w,\n                     const int stride_h, const int stride_w,\n                     const int dilation_h, const int dilation_w,\n                     const int height_col, const int width_col,\n                     __global float* data_im, const int data_im_off) {\n\n  for (int index = get_global_id(0); index < n; index += get_global_size(0)) {\n    float val = 0;\n    const int w_im = index % width + pad_w;\n    const int h_im = (index / width) % height + pad_h;\n    const int c_im = index / (width * height);\n    int kernel_extent_w = (kernel_w - 1) * dilation_w + 1;\n    int kernel_extent_h = (kernel_h - 1) * dilation_h + 1;\n    // compute the start and end of the output\n    const int w_col_start =\n        (w_im < kernel_extent_w) ? 0 : (w_im - kernel_extent_w) / stride_w + 1;\n    const int w_col_end = min(w_im / stride_w + 1, width_col);\n    const int h_col_start =\n        (h_im < kernel_extent_h) ? 0 : (h_im - kernel_extent_h) / stride_h + 1;\n    const int h_col_end = min(h_im / stride_h + 1, height_col);\n    \n    // TODO: use LCM of stride and dilation to avoid unnecessary loops\n    for (int h_col = h_col_start; h_col < h_col_end; h_col += 1) {\n      for (int w_col = w_col_start; w_col < w_col_end; w_col += 1) {\n        int h_k = (h_im - h_col * stride_h);\n        int w_k = (w_im - w_col * stride_w);\n        if (h_k % dilation_h == 0 && w_k % dilation_w == 0) {\n          h_k /= dilation_h;\n          w_k /= dilation_w;\n          int data_col_index = (((c_im * kernel_h + h_k) * kernel_w + w_k) *\n                                height_col + h_col) * width_col + w_col;\n          val += data_col[data_col_off + data_col_index];\n        }\n      }\n    }\n    data_im[data_im_off + index] = val;\n  }\n}\n";const std::string pooling_str = "// This file is modified from the file located at\n// https://github.com/BVLC/caffe/blob/opencl/src/caffe/greentea/cl_kernels/pooling.cl\n// and is covered under the BSD 2-Clause License, as indicated in the LICENSE\n// file at the root of this repository.\n\n__kernel void max_pool_forward(\n    const int nthreads, __global const float* bottom, const int channels, \n    const int height, const int width,\n    const int pooled_h, const int pooled_w,\n    const int kernel_h, const int kernel_w,\n    const int stride_h, const int stride_w,\n    const int pad_h, const int pad_w,\n    __global float* top, __global float* mask) {\n\n//  printf(\"%d \", get_global_size(0));\n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph = (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) % channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    \n    int hstart = ph * stride_h - pad_h;\n    int wstart = pw * stride_w - pad_w;\n    const int hend = min(hstart + kernel_h, height);\n    const int wend = min(wstart + kernel_w, width);\n    hstart = max(hstart, (int)0);\n    wstart = max(wstart, (int)0);\n    \n    float maxval = -FLT_MAX;\n    int maxidx = -1;\n    __global const float* bottom_slice = bottom + (n * channels + c) * height * width;\n    for (int h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        const int index = h * width + w;\n        if (bottom_slice[index] > maxval) {\n          maxidx = index;\n          maxval = bottom_slice[maxidx];\n        }\n      }\n    }\n    top[i] = maxval;\n    mask[i] = (float)maxidx;\n  }\n}\n\n__kernel void ave_pool_forward(\n    const int nthreads, __global const float* const bottom, const int channels, \n    const int height, const int width,\n    const int pooled_h, const int pooled_w,\n    const int kernel_h, const int kernel_w,\n    const int stride_h, const int stride_w, \n    const int pad_h, const int pad_w, __global float* top) {\n    \n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph = (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) % channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    int hstart = ph * stride_h - pad_h;\n    int wstart = pw * stride_w - pad_w;\n    int hend = min(hstart + kernel_h, height + pad_h);\n    int wend = min(wstart + kernel_w, width + pad_w);\n    const int pool_size = (hend - hstart) * (wend - wstart);\n    hstart = max(hstart, (int)0);\n    wstart = max(wstart, (int)0);\n    hend = min(hend, height);\n    wend = min(wend, width);\n    float aveval = 0;\n    __global const float* bottom_slice = bottom + (n * channels + c) * height * width;\n    for (int h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        aveval += bottom_slice[h * width + w];\n      }\n    }\n    top[i] = aveval / pool_size;\n  }\n}\n\n__kernel void sto_pool_forward_train(\n    const int nthreads, __global const float* bottom,\n    const int channels, const int height, const int width,\n    const int pooled_h, const int pooled_w, const int kernel_h,\n    const int kernel_w, const int stride_h, const int stride_w,\n    __global float* rand_idx, __global float* top) {\n    \n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph = (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) % channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    \n    const int hstart = ph * stride_h;\n    const int hend = min(hstart + kernel_h, height);\n    const int wstart = pw * stride_w;\n    const int wend = min(wstart + kernel_w, width);\n    float cumsum = 0.;\n    __global const float* bottom_slice = bottom + (n * channels + c) * height * width;\n    // First pass: get sum\n    for (int h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        cumsum += bottom_slice[h * width + w];\n      }\n    }\n    const float thres = rand_idx[i] * cumsum;\n    // Second pass: get value, and set i.\n    cumsum = 0;\n    for (int h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        cumsum += bottom_slice[h * width + w];\n        if (cumsum >= thres) {\n          rand_idx[i] = ((n * channels + c) * height + h) * width + w;\n          top[i] = bottom_slice[h * width + w];\n          h = hend;\n          w = wend;\n        }\n      }\n    }\n  }\n}\n\n__kernel void sto_pool_forward_test(\n    const int nthreads, __global const float* const bottom, const int channels, \n    const int height, const int width,\n    const int pooled_h, const int pooled_w, \n    const int kernel_h, const int kernel_w, \n    const int stride_h, const int stride_w,\n    __global float* top) {\n    \n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph = (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) % channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    \n    const int hstart = ph * stride_h;\n    const int hend = min(hstart + kernel_h, height);\n    const int wstart = pw * stride_w;\n    const int wend = min(wstart + kernel_w, width);\n    // We set cumsum to be 0 to avoid divide-by-zero problems\n    float cumsum = FLT_MIN;\n    float cumvalues = 0.;\n    __global const float* bottom_slice = bottom + (n * channels + c) * height * width;\n    // First pass: get sum\n    for (int h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        cumsum += bottom_slice[h * width + w];\n        cumvalues += bottom_slice[h * width + w] * bottom_slice[h * width + w];\n      }\n    }\n    top[i] = cumvalues / cumsum;\n  }\n}\n\n__kernel void max_pool_backward(const int nthreads,\n                                __global const float* top_diff,\n                                __global const float* mask,\n                                const int channels,\n                                const int height, const int width,\n                                const int pooled_h, const int pooled_w,\n                                const int kernel_h, const int kernel_w,\n                                const int stride_h, const int stride_w,\n                                const int pad_h, const int pad_w,\n                                __global float* bottom_diff) {\n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    // find out the local i\n    // find out the local offset\n    const int w = i % width;\n    const int h = (i / width) % height;\n    const int c = (i / width / height) % channels;\n    const int n = i / width / height / channels;\n    \n    const int phstart =\n        (h + pad_h < kernel_h) ? 0 : (h + pad_h - kernel_h) / stride_h + 1;\n    const int phend = min((h + pad_h) / stride_h + 1, pooled_h);\n    const int pwstart =\n        (w + pad_w < kernel_w) ? 0 : (w + pad_w - kernel_w) / stride_w + 1;\n    const int pwend = min((w + pad_w) / stride_w + 1, pooled_w);\n    float gradient = 0.0f;\n    const int offset = (n * channels + c) * pooled_h * pooled_w;\n    __global const float* top_diff_slice = top_diff + offset;\n    __global const float* mask_slice = mask + offset;\n    for (int ph = phstart; ph < phend; ++ph) {\n      for (int pw = pwstart; pw < pwend; ++pw) {\n        if (mask_slice[ph * pooled_w + pw] == (float)(h * width + w)) {\n          gradient += top_diff_slice[ph * pooled_w + pw];\n        }\n      }\n    }\n    bottom_diff[i] = gradient;\n  }\n}\n\n__kernel void ave_pool_backward(const int nthreads,\n                                __global const float* top_diff,\n                                const int channels,\n                                const int height, const int width,\n                                const int pooled_h, const int pooled_w,\n                                const int kernel_h, const int kernel_w,\n                                const int stride_h, const int stride_w,\n                                const int pad_h, const int pad_w,\n                                __global float* bottom_diff) {\n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    // find out the local i\n    // find out the local offset\n    const int w = i % width + pad_w;\n    const int h = (i / width) % height + pad_h;\n    const int c = (i / width / height) % channels;\n    const int n = i / width / height / channels;\n    \n    const int phstart = (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;\n    const int phend = min(h / stride_h + 1, pooled_h);\n    const int pwstart = (w < kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;\n    const int pwend = min(w / stride_w + 1, pooled_w);\n    float gradient = 0.0;\n    __global const float* const top_diff_slice = top_diff + (n * channels + c) * pooled_h * pooled_w;\n    for (int ph = phstart; ph < phend; ++ph) {\n      for (int pw = pwstart; pw < pwend; ++pw) {\n        // figure out the pooling size\n        int hstart = ph * stride_h - pad_h;\n        int wstart = pw * stride_w - pad_w;\n        int hend = min(hstart + kernel_h, height + pad_h);\n        int wend = min(wstart + kernel_w, width + pad_w);\n        int pool_size = (hend - hstart) * (wend - wstart);\n        gradient += top_diff_slice[ph * pooled_w + pw] / pool_size;\n      }\n    }\n    bottom_diff[i] = gradient;\n  }\n}\n\n__kernel void sto_pool_backward(\n    const int nthreads, __global const float* rand_idx,\n    __global const float* const top_diff, const int channels,\n    const int height, const int width,\n    const int pooled_h, const int pooled_w,\n    const int kernel_h, const int kernel_w,\n    const int stride_h, const int stride_w,\n    __global float* bottom_diff) {\n\n  for (int i = get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    // find out the local i\n    // find out the local offset\n    const int w = i % width;\n    const int h = (i / width) % height;\n    const int c = (i / width / height) % channels;\n    const int n = i / width / height / channels;\n    \n    const int phstart = (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;\n    const int phend = min(h / stride_h + 1, pooled_h);\n    const int pwstart = (w < kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;\n    const int pwend = min(w / stride_w + 1, pooled_w);\n    float gradient = 0.0;\n    __global const float* rand_idx_slice = rand_idx + (n * channels + c) * pooled_h * pooled_w;\n    __global const float* top_diff_slice = top_diff + (n * channels + c) * pooled_h * pooled_w;\n    for (int ph = phstart; ph < phend; ++ph) {\n      for (int pw = pwstart; pw < pwend; ++pw) {\n        gradient += top_diff_slice[ph * pooled_w + pw]\n            * (i == (int) (rand_idx_slice[ph * pooled_w + pw])?1.0:0.0);\n      }\n    }\n    bottom_diff[i] = gradient;\n  }\n}\n\n";const std::string distribution_str = "// This code is adapted from https://github.com/amd/OpenCL-caffe/blob/stable/src/caffe/ocl/random.cl\n\n//Note: random generator has two parts\n//first part: the open sourced threefy random generator kernel from DE Shaw Research\n//second part. we wrap the kernel up to generate uniform, bernoulli and gaussion distribution generators.\n\n//begin: the open sourced random generator from DE Shaw Research\n//https://www.deshawresearch.com/resources_random123.html\ntypedef uint uint32_t;\n\nstruct r123array4x32 {\n  uint32_t v[4];\n};\n\nenum r123_enum_threefry32x4 {\n  R_32x4_0_0 = 10,\n  R_32x4_0_1 = 26,\n  R_32x4_1_0 = 11,\n  R_32x4_1_1 = 21,\n  R_32x4_2_0 = 13,\n  R_32x4_2_1 = 27,\n  R_32x4_3_0 = 23,\n  R_32x4_3_1 = 5,\n  R_32x4_4_0 = 6,\n  R_32x4_4_1 = 20,\n  R_32x4_5_0 = 17,\n  R_32x4_5_1 = 11,\n  R_32x4_6_0 = 25,\n  R_32x4_6_1 = 10,\n  R_32x4_7_0 = 18,\n  R_32x4_7_1 = 20\n};\n\ninline uint32_t RotL_32(uint32_t x, unsigned int N) {\n  return (x << (N & 31)) | (x >> ((32 - N) & 31));\n}\n\ntypedef struct r123array4x32 threefry4x32_ctr_t;\ntypedef struct r123array4x32 threefry4x32_key_t;\ntypedef struct r123array4x32 threefry4x32_ukey_t;\n\ninline threefry4x32_ctr_t threefry4x32_R(unsigned int Nrounds, threefry4x32_ctr_t in, threefry4x32_key_t k) {\n  threefry4x32_ctr_t X;\n  uint32_t ks[4 + 1];\n  int i;\n  ks[4] = 0x1BD11BDA;\n\n  {\n    ks[0] = k.v[0];\n    X.v[0] = in.v[0];\n    ks[4] ^= k.v[0];\n\n    ks[1] = k.v[1];\n    X.v[1] = in.v[1];\n    ks[4] ^= k.v[1];\n\n    ks[2] = k.v[2];\n    X.v[2] = in.v[2];\n    ks[4] ^= k.v[2];\n\n    ks[3] = k.v[3];\n    X.v[3] = in.v[3];\n    ks[4] ^= k.v[3];\n  }\n\n  X.v[0] += ks[0];\n  X.v[1] += ks[1];\n  X.v[2] += ks[2];\n  X.v[3] += ks[3];\n\n  if (Nrounds > 0) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 1) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 2) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 3) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 3) {\n    X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] += ks[4];\n    X.v[4 - 1] += 1;\n  }\n\n  if (Nrounds > 4) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 5) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 6) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 7) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 7) {\n    X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] += ks[0];\n    X.v[4 - 1] += 2;\n  }\n\n  if (Nrounds > 8) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 9) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 10) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 11) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 11) {\n    X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] += ks[1];\n    X.v[4 - 1] += 3;\n  }\n\n  if (Nrounds > 12) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 13) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 14) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 15) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 15) {\n    X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] += ks[2];\n    X.v[4 - 1] += 4;\n  }\n\n  if (Nrounds > 16) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 17) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 18) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 19) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 19) {\n    X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] += ks[3];\n    X.v[4 - 1] += 5;\n  }\n\n  if (Nrounds > 20) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 21) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 22) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 23) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 23) {\n    X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] += ks[4];\n    X.v[4 - 1] += 6;\n  }\n\n  if (Nrounds > 24) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 25) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 26) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 27) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 27) {\n    X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] += ks[0];\n    X.v[4 - 1] += 7;\n  }\n\n  if (Nrounds > 28) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 29) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 30) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 31) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 31) {\n    X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] += ks[1];\n    X.v[4 - 1] += 8;\n  }\n\n  if (Nrounds > 32) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 33) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 34) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 35) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 35) {\n    X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] += ks[2];\n    X.v[4 - 1] += 9;\n  }\n\n  if (Nrounds > 36) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 37) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 38) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 39) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 39) {\n    X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] += ks[3];\n    X.v[4 - 1] += 10;\n  }\n\n  if (Nrounds > 40) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 41) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n  if (Nrounds > 42) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 43) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 43) {\n    X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] += ks[4];\n    X.v[4 - 1] += 11;\n  }\n\n  if (Nrounds > 44) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 45) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 46) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 47) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 47) {\n    X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] += ks[0];\n    X.v[4 - 1] += 12;\n  }\n\n  if (Nrounds > 48) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 49) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 50) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 51) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 51) {\n    X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] += ks[1];\n    X.v[4 - 1] += 13;\n  }\n\n  if (Nrounds > 52) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 53) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 54) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 55) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 55) {\n    X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] += ks[2];\n    X.v[4 - 1] += 14;\n  }\n\n  if (Nrounds > 56) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 57) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 58) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 59) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 59) {\n    X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] += ks[3];\n    X.v[4 - 1] += 15;\n  }\n\n  if (Nrounds > 60) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 61) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 62) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 63) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 63) {\n    X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] += ks[4];\n    X.v[4 - 1] += 16;\n  }\n\n  if (Nrounds > 64) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 65) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 66) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 67) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 67) {\n    X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] += ks[0];\n    X.v[4 - 1] += 17;\n  }\n\n  if (Nrounds > 68) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 69) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 70) {\n    X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] ^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 71) {\n    X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] ^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 71) {\n    X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] += ks[1];\n    X.v[4 - 1] += 18;\n  }\n  return X;\n}\n//end: the open sourced random generator from DE Shaw Research\n\n// **************************\n// BERNOULLI DISTRIBUTION\n// **************************\n\n__kernel void PRNG_threefry4x32_bernoulli(\n__global float4 *randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat inf, float sup,\nfloat threshold,\nuint nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  uint maxUint = 0;\n  maxUint--;\n  float r = (float)maxUint;\n\n  threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey;\n\n  ukey.v[0] = ukey.v[1] = ukey.v[2] = ukey.v[3] = gdx;\n\n  threefry4x32_ctr_t random4;\n\n  if ( gdx < numrandom ) {\n    random4 = threefry4x32_R(nrounds, ctr, ukey);\n    float4 frnd;\n    frnd.x = ( (((float)random4.v[0]) / r) * (sup - inf) + inf ) < threshold ? 1.0f : 0.0f;\n    frnd.y = ( (((float)random4.v[1]) / r) * (sup - inf) + inf ) < threshold ? 1.0f : 0.0f;\n    frnd.z = ( (((float)random4.v[2]) / r) * (sup - inf) + inf ) < threshold ? 1.0f : 0.0f;\n    frnd.w = ( (((float)random4.v[3]) / r) * (sup - inf) + inf ) < threshold ? 1.0f : 0.0f;\n    randomnumber[gdx] = frnd;\n  }\n}\n\n// **************************\n// UNIFORM DISTRIBUTION (float)\n// **************************\n\n__kernel void PRNG_threefry4x32_uniform(\n__global float4 *randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat inf, float sup,\nuint nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  uint maxUint = 0;\n  maxUint--;\n  float r = (float)maxUint;\n\n  threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey;\n\n  ukey.v[0] = ukey.v[1] = ukey.v[2] = ukey.v[3] = gdx;\n\n  threefry4x32_ctr_t random4;\n\n  if ( gdx < numrandom ) {\n    random4 = threefry4x32_R(nrounds, ctr, ukey);\n    float4 frnd;\n    frnd.x = ( (((float)random4.v[0]) / r) * (sup - inf) + inf );\n    frnd.y = ( (((float)random4.v[1]) / r) * (sup - inf) + inf );\n    frnd.z = ( (((float)random4.v[2]) / r) * (sup - inf) + inf );\n    frnd.w = ( (((float)random4.v[3]) / r) * (sup - inf) + inf );\n    randomnumber[gdx] = frnd;\n  }\n}\n\n// **************************\n// UNIFORM DISTRIBUTION (uint)\n// **************************\n\n__kernel void PRNG_threefry4x32_uint_uniform(\n__global uint4 *randomnumber,\nthreefry4x32_ctr_t ctr_i,\nuint inf, uint sup,\nuint nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey;\n\n  ukey.v[0] = ukey.v[1] = ukey.v[2] = ukey.v[3] = gdx;\n\n  threefry4x32_ctr_t random4;\n\n  if ( gdx < numrandom ) {\n    random4 = threefry4x32_R(nrounds, ctr, ukey);\n    uint4 frnd;\n    frnd.x = random4.v[0] % (sup - inf) + inf;\n    frnd.y = random4.v[1] % (sup - inf) + inf;\n    frnd.z = random4.v[2] % (sup - inf) + inf;\n    frnd.w = random4.v[3] % (sup - inf) + inf;\n    randomnumber[gdx] = frnd;\n  }\n}\n\n// **************************\n// GAUSSIAN DISTRIBUTION\n// **************************\n\n__kernel void PRNG_threefry4x32_gaussian(\n__global float4 *randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat E, float V,\nuint nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  uint maxUint = 0;\n  maxUint--;\n  float r = (float)maxUint;\n\n  threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey1, ukey2;\n\n  ukey1.v[0] = ukey2.v[1] = ukey1.v[2] = ukey2.v[3] = gdx;\n  ukey2.v[0] = ukey1.v[1] = ukey2.v[2] = ukey1.v[3] = 0;\n\n  threefry4x32_ctr_t random1, random2;\n\n  if ( gdx < numrandom ) {\n    random1 = threefry4x32_R(nrounds, ctr, ukey1);\n    random2 = threefry4x32_R(nrounds, ctr, ukey2);\n    float4 frnd1;\n\n    float r1 = (((float)random1.v[0]) / r); // generate a random sequence of uniform distribution\n    float r2 = (((float)random2.v[0]) / r);\n    float r3 = (((float)random1.v[1]) / r);\n    float r4 = (((float)random2.v[1]) / r);\n    float r5 = (((float)random1.v[2]) / r);\n    float r6 = (((float)random2.v[2]) / r);\n    float r7 = (((float)random1.v[3]) / r);\n    float r8 = (((float)random2.v[3]) / r);\n\n    if(r2 == 0 || r4 == 0 || r6 == 0 || r8 == 0) {\n      r2 += 0.0001;\n      r4 += 0.0001;\n      r6 += 0.0001;\n      r8 += 0.0001;\n    }\n\n    frnd1.x = cos(2*M_PI*r1)*sqrt(-2.0*log(r2)) * V + E;// return a pseudo sequence of normal distribution using two above uniform noise data\n    //frnd2.x = sin(2*M_PI*r1)*sqrt(-2.0*log(r2));      // return the quadrature counterpart of the foregoing pseudo normal distribution sequence\n    frnd1.y = cos(2*M_PI*r3)*sqrt(-2.0*log(r4)) * V + E;// return a pseudo sequence of normal distribution using two above uniform noise data\n    //frnd2.y = sin(2*M_PI*r3)*sqrt(-2.0*log(r4));      // return the quadrature counterpart of the foregoing pseudo normal distribution sequence\n    frnd1.z = cos(2*M_PI*r5)*sqrt(-2.0*log(r6)) * V + E;// return a pseudo sequence of normal distribution using two above uniform noise data\n    //frnd2.z = sin(2*M_PI*r5)*sqrt(-2.0*log(r6));      // return the quadrature counterpart of the foregoing pseudo normal distribution sequence\n    frnd1.w = cos(2*M_PI*r7)*sqrt(-2.0*log(r8)) * V + E;// return a pseudo sequence of normal distribution using two above uniform noise data\n    //frnd2.w = sin(2*M_PI*r7)*sqrt(-2.0*log(r8));      // return the quadrature counterpart of the foregoing pseudo normal distribution sequence\n\n    randomnumber[gdx] = frnd1;\n  }\n}\n";const std::string tensormath_str = "/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// **************************************\n// Element-wise functions\n// **************************************\n\n// Sum is basically reduction.\n// This reduction code is serial reduction modified from AMD\'s example.\n// http://developer.amd.com/resources/documentation-articles/articles-whitepapers/opencl-optimization-case-study-simple-reductions/\n__kernel\nvoid clkernel_fabs(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = fabs(in[i]);\n}\n\n__kernel\nvoid clkernel_add_scalar(const int num, float x, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in[i] + x;\n}\n\n__kernel\nvoid clkernel_add(const int num, __global const float* in1, __global const float* in2,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] + in2[i];\n}\n\n__kernel\nvoid clkernel_clamp(const int num, float low, float high, __global const float* in,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = clamp(in[i], low, high);\n}\n\n__kernel\nvoid clkernel_divide_scalar_matx(const int num, __global const float* in1, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] / x;\n}\n\n__kernel\nvoid clkernel_divide_scalar_xmat(const int num, const float x, __global const float* in1,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = x / in1[i];\n}\n\n__kernel\nvoid clkernel_divide(const int num, __global const float* in1, __global const float* in2,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] / in2[i];\n}\n\n__kernel\nvoid clkernel_eltmult_scalar(const int num, const float x, __global const float* in,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in[i] * x;\n}\n\n__kernel\nvoid clkernel_eltmult(const int num, __global const float* in1, __global const float* in2,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] * in2[i];\n}\n\n__kernel\nvoid clkernel_exp(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = exp(in[i]);\n}\n\n__kernel\nvoid clkernel_le(const int num, __global const float* in, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] <= x) ? 1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_log(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = log(in[i]);\n}\n\n__kernel\nvoid clkernel_lt(const int num, __global const float* in, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] < x) ? 1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_ge(const int num, __global const float* in, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] >= x) ? 1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_gt(const int num, __global const float* in, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] > x) ? 1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_pow_scalar(const int num, const float x, __global const float* in,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = pow(in[i], x);\n}\n\n__kernel\nvoid clkernel_pow(const int num, __global const float* in1, __global const float* in2,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = pow(in1[i], in2[i]);\n}\n\n__kernel\nvoid clkernel_relu(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] >= 0.0f) ? in[i] : 0.0f;\n}\n\n__kernel\nvoid clkernel_set(const int num, const float x, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = x;\n}\n\n__kernel\nvoid clkernel_sigmoid(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = 1 / (1 + exp(-(in[i])));\n}\n\n__kernel\nvoid clkernel_sign(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] > 0) - (in[i] < 0);\n}\n\n__kernel\nvoid clkernel_sqrt(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = sqrt(in[i]);\n}\n\n// kernel for square is called pow(2).\n\n__kernel\nvoid clkernel_subtract_scalar(const int num, __global const float* in, const float x,\n  __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in[i] - x;\n}\n\n__kernel\nvoid clkernel_subtract(const int num, __global const float* in1, __global const float* in2,\n   __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] - in2[i];\n}\n\n// reduce3 kernel from\n// https://github.com/sschaetz/nvidia-opencl-examples/blob/master/OpenCL/src/oclReduction/oclReduction_kernel.cl\n__kernel\nvoid clkernel_sum(const int num, __global const float* in, __global float* out,\n  __local float* sdata) {\n  const int i = get_group_id(0)*(get_local_size(0)*2) + get_local_id(0);\n  const int tid = get_local_id(0);\n  sdata[tid] = (i < num) ? in[i] : 0.0f;\n\n  // Perform the first level of reduction.\n  if (i + get_local_size(0) < num) {\nsdata[tid] += in[i + get_local_size(0)];\n  }\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for (int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid > s) {\n  sdata[tid] += sdata[tid + s];\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  if (tid == 0) {\nout[get_group_id(0)] = sdata[0];\n  }\n}\n\n__kernel\nvoid clkernel_tanh(const int num, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = tanh(in[i]);\n}\n\n// **************************************\n// Random functions\n// **************************************\n\n// See: distribution.cl\n\n// *********************************************************\n// BLAS functions, ref to http://docs.nvidia.com/cuda/cublas\n// *********************************************************\n\n__kernel\nvoid clkernel_amax(const int num, __global const float* in, __global int* ret,\n   __local uint* sdata, __local size_t* temp) {\n  const int gid = get_global_id(0);\n  const int tid = get_local_id(0);\n\n  for(int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid < s) {\n  sdata[tid] = (in[sdata[tid]] > in[tid+s]) ? sdata[tid] : tid;\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nret[0] = sdata[0];\n  }\n}\n\n\n/* TODO: Fix line 284:20.\n__kernel\nvoid clkernel_amin(const int num, __global const float* in, __global int* ret,\n   __local float* sdata, __local size_t* temp) {\n  const int gid = get_global_id(0);\n  const int tid = get_local_id(0);\n\n  // Initialize the values to pos infinity.\n  sdata[tid] = (gid < num) ? in[gid] : INFINITY;\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for(int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid < s) {\n  sdata[tid] = (in[sdata[tid]] < in[tid+s]) ? sdata[tid] : tid;\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nret[0] = sdata[0];\n  }\n}*/\n\n\n__kernel\nvoid clkernel_asum(const int num, __global const float* in, __global float* out,\n   __local float* sdata) {\n  const int tid = get_local_id(0);\n  const int i = get_global_id(0);\n\n  // Initialize\n  sdata[tid] = (i < num) ? in[i] : INFINITY;\n  // Perform the first level of reduction.\n  if (i + get_local_size(0) < num) {\nsdata[tid] += in[i + get_local_size(0)];\n  }\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for(int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid < s) {\n  sdata[tid] = fabs(sdata[tid + s]);\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nout[0] = sdata[0];\n  }\n}\n\n__kernel\nvoid clkernel_axpy(const int num, float alpha, __global const float* in,\n   __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = fma(alpha, in[i], out[i]);\n}\n\n// This kernel is essentially the same as Sum, except that during the process\n// of reading in data to the local memory, the value is also doubled.\n// Then, just before submitting the sum to out, we do a square-root on it.\n__kernel\nvoid clkernel_nrm2(const int num, __global const float* in, __global float* out,\n   __local float* sdata) {\n  const int i = get_group_id(0)*(get_local_size(0)*2) + get_local_id(0);\n  const int tid = get_local_id(0);\n  sdata[tid] = (i < num) ? (in[i] * in[i]) : 0.0f;\n\n  // Perform the first level of reduction.\n  if (i + get_local_size(0) < num) {\nsdata[tid] += in[i + get_local_size(0)];\n  }\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for (int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid > s) {\n  sdata[tid] += sdata[tid + s];\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  if (tid == 0) {\nout[get_group_id(0)] = sqrt(sdata[0]);\n  }\n}\n\n__kernel\nvoid clkernel_scale(const int num, float x, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = x * out[i];\n}\n\n__kernel\nvoid clkernel_dot(const int num, __global const float* in1, __global const float* in2,\n    __global float* out, __local float* scratch) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  int offset = i << 2;\n  scratch[i] = in1[offset] * in2[offset];\n\n}\n\n// First kernel from http://www.bealto.com/gpu-gemv_intro.html\n// y = \xce\xb1*A*v + \xce\xb2*y\n// fma(a, b, c) == (a * b) + c with infinite precision\n__kernel\nvoid clkernel_gemv(const int m, const int n, const float alpha,\n   __global const float* A, __global const float* v,\n   const float beta, __global float* out) {\n  const int i = get_global_id(0);\n  float sum  = 0.0f;\n  for (int k = 0; k < n; k++) {\n    sum += fma(beta, out[i + m * k], alpha * A[i + m * k] * v[k]);\n  }\n  out[i] = sum;\n}\n\n// http://docs.nvidia.com/cuda/cublas/#cublas-lt-t-gt-dgmm\n// X[j] = x[j*inc(x)] if inc(x) \xe2\x89\xa5 0\n//= x[(\xcf\x87 \xe2\x88\x92 1)*|inc(x)| \xe2\x88\x92 j*|inc(x)|] if inc(x) < 0\n\n// C = diag( X )*A\n__kernel\nvoid clkernel_dgmm_left(const int nrow, const int ncol,\n__global const float* M, __global const float* v,\n__global float* out) {\n  const uint gidx = get_global_id(0);\n\n  uint offset = gidx * ncol;\n  for (uint i = 0; i < ncol; i++) {\nout[offset + i] = M[offset + i] * v[i];\n  }\n}\n\n// C = A*diag( X )\n__kernel\nvoid clkernel_dgmm_right(const int nrow, const int ncol,\n __global const float* M, __global const float* v,\n __global float* out) {\n  const uint gidx = get_global_id(0);\n\n  uint offset = gidx * ncol;\n  for (uint i = 0; i < ncol; i++) {\nout[offset + i] = M[offset + i] * v[gidx];\n  }\n}\n\n// TODO: Optimize with Reference from http://www.cedricnugteren.nl/tutorial.php?page=1\n//  C = \xce\xb1*A*B + \xce\xb2*C\n__kernel\nvoid clkernel_gemm(const uint nrowA, const uint ncolB, const uint ncolA, const float alpha,\n    __global const float* A, __global const float* B, const float beta,\n     __global float* C, __local float* Asub, __local float* Bsub) {\n\n  const uint lidx = get_local_id(0);\n  const uint lidy = get_local_id(1);\n  const uint TS = get_local_size(0); // Tile size\n  const uint gidx = TS * get_group_id(0) + lidx; // Row ID of C (0..M)\n  const uint gidy = TS * get_group_id(1) + lidy; // Row ID of C (0..N)\n\n  // Initialise the accumulation register\n  float acc = 0.0f;\n\n  // Loop over all tiles\n  const int numtiles = ncolA / TS;\n  for (int t = 0; t < numtiles; t++) {\n    const int tiledRow = TS * t + lidx;\n    const int tiledCol = TS * t + lidy;\n    Asub[lidy * TS + lidx] = A[tiledCol * nrowA + gidx];\n    Bsub[lidy * TS + lidx] = B[gidy * ncolA + tiledRow];\n\n    barrier(CLK_LOCAL_MEM_FENCE);\n\n    for(int k = 0; k < TS; k++) {\n      acc += Asub[k * TS + lidx] * Bsub[lidy * TS + k] * alpha;\n    }\n\n    barrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  C[gidy * nrowA + gidx] = fma(beta, C[gidy * nrowA + gidx], acc);\n}\n\n\n__kernel\nvoid clkernel_crossentropy(const uint batchsize, const uint dim,\n   __global const float* p, __global const int* t,\n   __global float* loss) {\n  const uint gidx = get_global_id(0);\n  if (gidx >= batchsize) return;\n\n  int truth_idx = t[gidx];\n  if (truth_idx <= 0) return;\n  float prob_of_truth = p[gidx * dim + truth_idx];\n  loss[gidx] = -log(fmax(prob_of_truth, -FLT_MIN));\n}\n\n\n__kernel\nvoid clkernel_softmaxentropy(const uint batchsize, const uint dim,\n __global const float* p, __global const int* t,\n __global float* grad) {\n  const uint gidx = get_global_id(0);\n  if (gidx >= batchsize) return;\n\n  int truth_idx = t[gidx];\n  if (truth_idx <= 0) return;\n  grad[gidx * dim + truth_idx] -= 1.0;\n}\n\n\n__kernel\nvoid clkernel_rowmax(const uint nrow, const uint ncol,\n                     __global const float* in, __global float* out) {\n  const uint row_id = get_global_id(0);\n  if (row_id >= nrow) return;\n\n  float row_max_val = -FLT_MAX;\n  for (uint i = 0; i < ncol; i++) {\n    row_max_val = fmax(row_max_val, in[row_id * ncol + i]);\n  }\n\n  out[row_id] = row_max_val;\n}\n\n\n// **************************************\n// Matrix functions\n// **************************************\n/*\n__kernel\nvoid clkernel_addcol(int nrow, int ncol, __global const float* A, __global const float* v, __global float* out) {\n  const int i = get_global_id(0);\n  const int j = get_global_id(1);\n  if (i >= nrow) return;\n  if (j >= ncol) return;\n  ret[j] = A[j + nrow * i] + v[j];\n}\n\n__kernel\nvoid clkernel_addrow(int nrow, int ncol, __global const float* A, __global const float* v, __global float* out) {\n  const int i = get_global_id(0);\n  const int j = get_global_id(1);\n  if (i >= nrow) return;\n  if (j >= ncol) return;\n  out[i] = A[i + ncol * j] + v[i];\n}\n\n__kernel\nvoid clkernel_outerproduct(int m, const int n, __global const float* in1, __global const float* in2, __global float* out) {\n  const int col = get_global_id(0);\n  const int row = get_global_id(1);\n\n  // TODO: This\n}\n\n__kernel\nvoid clkernel_sumcol(int nrow, int ncol, __global const float* in, __global float* out) {\n  const int i = get_global_id(0);\n  if (i >= nrow) return;\n\n  float sum = 0.0f;\n  for (int j = 0; j < nrow; j++) {\nsum += input[nrow * i + j];\n  }\n  out[i] = sum;\n}\n*/\n__kernel\nvoid clkernel_sumrow(int nrow, int ncol, __global const float* in, __global float* out) {\n  const int idx = get_global_id(0);\n  if (idx >= nrow) return;\n\n  float sum = 0.0f;\n  for (int j = 0; j < ncol; j++) {\nsum += in[j + ncol * idx];\n  }\n  out[idx] = sum;\n}\n\n\n// Adapted from http://code.haskell.org/HsOpenCL/tests/bench/transpose.cl\n#define BLOCK_DIM 16\n__kernel\nvoid clkernel_transpose(uint nrow, uint ncol,\n__global const float* in, __global float* out,\n__local float* sdata) {\n  uint gidx = get_global_id(0);\n  uint gidy = get_global_id(1);\n\n  if ((gidx < ncol) && (gidy < nrow)) {\nuint id_in = gidy * ncol + gidx;\nsdata[get_local_id(1) * (BLOCK_DIM+1) + get_local_id(0)] = in[id_in];\n  }\n\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  gidx = get_group_id(1) * BLOCK_DIM + get_local_id(0);\n  gidy = get_group_id(0) * BLOCK_DIM + get_local_id(1);\n  if ((gidx < nrow) && (gidy < ncol)) {\nuint id_out = gidy * nrow + gidx;\nout[id_out] = sdata[get_local_id(0) * (BLOCK_DIM + 1) + get_local_id(1)];\n  }\n}\n/*\n__kernel\nvoid clkernel_transpose2(uint nrow, uint ncol, __global const float* in, __global float* out, __local float* sdata) {\n  const uint lidx = get_local_id(0);\n  const uint lidy = get_local_id(1);\n  const uint id0 = get_group_id(0) * ncol * lidx;\n  const uint id1 = get_group_id(1) * nrow * lidy;\n\n  if (id0 < nrow && id1 < ncol) {\nsdata[lidx][lidy] = in[id1 * nrow + id0];\n  }\n\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  const uint new_id0 = get_group_id(1) * nrow + lidx;\n  const uint new_id1 = get_group_id(0) * ncol + lidy;\n\n  if (new_id0 < ncol && new_id1 < nrow) {\nout[new_id1 * ncol + new_id0] = sdata[lidx][lidy];\n  }\n}*/\n\n__kernel\nvoid clkernel_diagvec_left(uint vsize, __global const float* vin, __global float* out) {\n  const uint gid = get_global_id(0);\n\n  for (uint i = 0; i < vsize; i++)\nout[gid * vsize + i] = (i == gid) ? vin[gid] : 0.0f;\n}\n\n\n__kernel\nvoid clkernel_diagvec_right(uint vsize, __global const float* vin, __global float* out) {\n  const uint gid = get_global_id(0);\n\n  for (uint i = 0; i < vsize; i++)\nout[gid * vsize + i] = (i == gid) ? vin[gid] : 0.0f;\n}\n";
- } //  namespace opencl 
-} //  namespace singa
\ No newline at end of file
+namespace opencl {
+const std::string im2col_str =
+    "// This file is modified from the file located at\n// "
+    "https://github.com/BVLC/caffe/blob/opencl/src/caffe/greentea/cl_kernels/"
+    "im2col.cl\n// and is covered under the BSD 2-Clause License, as indicated "
+    "in the LICENSE\n// file at the root of this repository.\n\n__kernel void "
+    "im2col(const int n, __global const float* data_im,\n                     "
+    "const int data_im_off,\n                     const int height, const int "
+    "width,\n                     const int kernel_h, const int kernel_w,\n    "
+    "                 const int pad_h, const int pad_w,\n                     "
+    "const int stride_h, const int stride_w,\n                     const int "
+    "dilation_h, const int dilation_w,\n                     const int "
+    "height_col, const int width_col,\n                     __global float* "
+    "data_col, const int data_col_off) {\n\n  for (int index = "
+    "get_global_id(0); index < n;\n      index += get_global_size(0)) {\n    "
+    "const int h_index = index / width_col;\n    const int h_col = h_index % "
+    "height_col;\n    const int w_col = index % width_col;\n    const int c_im "
+    "= h_index / height_col;\n    const int c_col = c_im * kernel_h * "
+    "kernel_w;\n    const int h_offset = h_col * stride_h - pad_h;\n    const "
+    "int w_offset = w_col * stride_w - pad_w;\n    \n    __global float* "
+    "data_col_ptr = data_col + data_col_off;\n    data_col_ptr += (c_col * "
+    "height_col + h_col) * width_col + w_col;\n    __global const float* "
+    "data_im_ptr = data_im + data_im_off;\n    data_im_ptr += (c_im * height + "
+    "h_offset) * width + w_offset;\n    \n    for (int i = 0; i < kernel_h; "
+    "++i) {\n      for (int j = 0; j < kernel_w; ++j) {\n        int h_im = "
+    "h_offset + i * dilation_h;\n        int w_im = w_offset + j * "
+    "dilation_w;\n        *data_col_ptr =\n            (h_im >= 0 && w_im >= 0 "
+    "&& h_im < height && w_im < width) ?\n                data_im_ptr[i * "
+    "dilation_h * width + j * dilation_w] : 0;\n        data_col_ptr += "
+    "height_col * width_col;\n      }\n    }\n  }\n}\n\n__kernel void "
+    "col2im(const int n, __global const float* data_col,\n                     "
+    "const int data_col_off, const int channels,\n                     const "
+    "int height, const int width,\n                     const int kernel_h, "
+    "const int kernel_w,\n                     const int pad_h, const int "
+    "pad_w,\n                     const int stride_h, const int stride_w,\n    "
+    "                 const int dilation_h, const int dilation_w,\n            "
+    "         const int height_col, const int width_col,\n                     "
+    "__global float* data_im, const int data_im_off) {\n\n  for (int index = "
+    "get_global_id(0); index < n; index += get_global_size(0)) {\n    float "
+    "val = 0;\n    const int w_im = index % width + pad_w;\n    const int h_im "
+    "= (index / width) % height + pad_h;\n    const int c_im = index / (width "
+    "* height);\n    int kernel_extent_w = (kernel_w - 1) * dilation_w + 1;\n  "
+    "  int kernel_extent_h = (kernel_h - 1) * dilation_h + 1;\n    // compute "
+    "the start and end of the output\n    const int w_col_start =\n        "
+    "(w_im < kernel_extent_w) ? 0 : (w_im - kernel_extent_w) / stride_w + 1;\n "
+    "   const int w_col_end = min(w_im / stride_w + 1, width_col);\n    const "
+    "int h_col_start =\n        (h_im < kernel_extent_h) ? 0 : (h_im - "
+    "kernel_extent_h) / stride_h + 1;\n    const int h_col_end = min(h_im / "
+    "stride_h + 1, height_col);\n    \n    // TODO: use LCM of stride and "
+    "dilation to avoid unnecessary loops\n    for (int h_col = h_col_start; "
+    "h_col < h_col_end; h_col += 1) {\n      for (int w_col = w_col_start; "
+    "w_col < w_col_end; w_col += 1) {\n        int h_k = (h_im - h_col * "
+    "stride_h);\n        int w_k = (w_im - w_col * stride_w);\n        if (h_k "
+    "% dilation_h == 0 && w_k % dilation_w == 0) {\n          h_k /= "
+    "dilation_h;\n          w_k /= dilation_w;\n          int data_col_index = "
+    "(((c_im * kernel_h + h_k) * kernel_w + w_k) *\n                           "
+    "     height_col + h_col) * width_col + w_col;\n          val += "
+    "data_col[data_col_off + data_col_index];\n        }\n      }\n    }\n    "
+    "data_im[data_im_off + index] = val;\n  }\n}\n";
+const std::string pooling_str =
+    "// This file is modified from the file located at\n// "
+    "https://github.com/BVLC/caffe/blob/opencl/src/caffe/greentea/cl_kernels/"
+    "pooling.cl\n// and is covered under the BSD 2-Clause License, as "
+    "indicated in the LICENSE\n// file at the root of this "
+    "repository.\n\n__kernel void max_pool_forward(\n    const int nthreads, "
+    "__global const float* bottom, const int channels, \n    const int height, "
+    "const int width,\n    const int pooled_h, const int pooled_w,\n    const "
+    "int kernel_h, const int kernel_w,\n    const int stride_h, const int "
+    "stride_w,\n    const int pad_h, const int pad_w,\n    __global float* "
+    "top, __global float* mask) {\n\n//  printf(\"%d \", "
+    "get_global_size(0));\n  for (int i = get_global_id(0); i < nthreads; i += "
+    "get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph "
+    "= (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) "
+    "% channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    "
+    "\n    int hstart = ph * stride_h - pad_h;\n    int wstart = pw * stride_w "
+    "- pad_w;\n    const int hend = min(hstart + kernel_h, height);\n    const "
+    "int wend = min(wstart + kernel_w, width);\n    hstart = max(hstart, "
+    "(int)0);\n    wstart = max(wstart, (int)0);\n    \n    float maxval = "
+    "-FLT_MAX;\n    int maxidx = -1;\n    __global const float* bottom_slice = "
+    "bottom + (n * channels + c) * height * width;\n    for (int h = hstart; h "
+    "< hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        "
+    "const int index = h * width + w;\n        if (bottom_slice[index] > "
+    "maxval) {\n          maxidx = index;\n          maxval = "
+    "bottom_slice[maxidx];\n        }\n      }\n    }\n    top[i] = maxval;\n  "
+    "  mask[i] = (float)maxidx;\n  }\n}\n\n__kernel void ave_pool_forward(\n   "
+    " const int nthreads, __global const float* const bottom, const int "
+    "channels, \n    const int height, const int width,\n    const int "
+    "pooled_h, const int pooled_w,\n    const int kernel_h, const int "
+    "kernel_w,\n    const int stride_h, const int stride_w, \n    const int "
+    "pad_h, const int pad_w, __global float* top) {\n    \n  for (int i = "
+    "get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    const int "
+    "pw = i % pooled_w;\n    const int ph = (i / pooled_w) % pooled_h;\n    "
+    "const int c = (i / pooled_w / pooled_h) % channels;\n    const int n = i "
+    "/ pooled_w / pooled_h / channels;\n    int hstart = ph * stride_h - "
+    "pad_h;\n    int wstart = pw * stride_w - pad_w;\n    int hend = "
+    "min(hstart + kernel_h, height + pad_h);\n    int wend = min(wstart + "
+    "kernel_w, width + pad_w);\n    const int pool_size = (hend - hstart) * "
+    "(wend - wstart);\n    hstart = max(hstart, (int)0);\n    wstart = "
+    "max(wstart, (int)0);\n    hend = min(hend, height);\n    wend = min(wend, "
+    "width);\n    float aveval = 0;\n    __global const float* bottom_slice = "
+    "bottom + (n * channels + c) * height * width;\n    for (int h = hstart; h "
+    "< hend; ++h) {\n      for (int w = wstart; w < wend; ++w) {\n        "
+    "aveval += bottom_slice[h * width + w];\n      }\n    }\n    top[i] = "
+    "aveval / pool_size;\n  }\n}\n\n__kernel void sto_pool_forward_train(\n    "
+    "const int nthreads, __global const float* bottom,\n    const int "
+    "channels, const int height, const int width,\n    const int pooled_h, "
+    "const int pooled_w, const int kernel_h,\n    const int kernel_w, const "
+    "int stride_h, const int stride_w,\n    __global float* rand_idx, __global "
+    "float* top) {\n    \n  for (int i = get_global_id(0); i < nthreads; i += "
+    "get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph "
+    "= (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) "
+    "% channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    "
+    "\n    const int hstart = ph * stride_h;\n    const int hend = min(hstart "
+    "+ kernel_h, height);\n    const int wstart = pw * stride_w;\n    const "
+    "int wend = min(wstart + kernel_w, width);\n    float cumsum = 0.;\n    "
+    "__global const float* bottom_slice = bottom + (n * channels + c) * height "
+    "* width;\n    // First pass: get sum\n    for (int h = hstart; h < hend; "
+    "++h) {\n      for (int w = wstart; w < wend; ++w) {\n        cumsum += "
+    "bottom_slice[h * width + w];\n      }\n    }\n    const float thres = "
+    "rand_idx[i] * cumsum;\n    // Second pass: get value, and set i.\n    "
+    "cumsum = 0;\n    for (int h = hstart; h < hend; ++h) {\n      for (int w "
+    "= wstart; w < wend; ++w) {\n        cumsum += bottom_slice[h * width + "
+    "w];\n        if (cumsum >= thres) {\n          rand_idx[i] = ((n * "
+    "channels + c) * height + h) * width + w;\n          top[i] = "
+    "bottom_slice[h * width + w];\n          h = hend;\n          w = wend;\n  "
+    "      }\n      }\n    }\n  }\n}\n\n__kernel void sto_pool_forward_test(\n "
+    "   const int nthreads, __global const float* const bottom, const int "
+    "channels, \n    const int height, const int width,\n    const int "
+    "pooled_h, const int pooled_w, \n    const int kernel_h, const int "
+    "kernel_w, \n    const int stride_h, const int stride_w,\n    __global "
+    "float* top) {\n    \n  for (int i = get_global_id(0); i < nthreads; i += "
+    "get_global_size(0)) {\n    const int pw = i % pooled_w;\n    const int ph "
+    "= (i / pooled_w) % pooled_h;\n    const int c = (i / pooled_w / pooled_h) "
+    "% channels;\n    const int n = i / pooled_w / pooled_h / channels;\n    "
+    "\n    const int hstart = ph * stride_h;\n    const int hend = min(hstart "
+    "+ kernel_h, height);\n    const int wstart = pw * stride_w;\n    const "
+    "int wend = min(wstart + kernel_w, width);\n    // We set cumsum to be 0 "
+    "to avoid divide-by-zero problems\n    float cumsum = FLT_MIN;\n    float "
+    "cumvalues = 0.;\n    __global const float* bottom_slice = bottom + (n * "
+    "channels + c) * height * width;\n    // First pass: get sum\n    for (int "
+    "h = hstart; h < hend; ++h) {\n      for (int w = wstart; w < wend; ++w) "
+    "{\n        cumsum += bottom_slice[h * width + w];\n        cumvalues += "
+    "bottom_slice[h * width + w] * bottom_slice[h * width + w];\n      }\n    "
+    "}\n    top[i] = cumvalues / cumsum;\n  }\n}\n\n__kernel void "
+    "max_pool_backward(const int nthreads,\n                                "
+    "__global const float* top_diff,\n                                __global "
+    "const float* mask,\n                                const int channels,\n "
+    "                               const int height, const int width,\n       "
+    "                         const int pooled_h, const int pooled_w,\n        "
+    "                        const int kernel_h, const int kernel_w,\n         "
+    "                       const int stride_h, const int stride_w,\n          "
+    "                      const int pad_h, const int pad_w,\n                 "
+    "               __global float* bottom_diff) {\n  for (int i = "
+    "get_global_id(0); i < nthreads; i += get_global_size(0)) {\n    // find "
+    "out the local i\n    // find out the local offset\n    const int w = i % "
+    "width;\n    const int h = (i / width) % height;\n    const int c = (i / "
+    "width / height) % channels;\n    const int n = i / width / height / "
+    "channels;\n    \n    const int phstart =\n        (h + pad_h < kernel_h) "
+    "? 0 : (h + pad_h - kernel_h) / stride_h + 1;\n    const int phend = "
+    "min((h + pad_h) / stride_h + 1, pooled_h);\n    const int pwstart =\n     "
+    "   (w + pad_w < kernel_w) ? 0 : (w + pad_w - kernel_w) / stride_w + 1;\n  "
+    "  const int pwend = min((w + pad_w) / stride_w + 1, pooled_w);\n    float "
+    "gradient = 0.0f;\n    const int offset = (n * channels + c) * pooled_h * "
+    "pooled_w;\n    __global const float* top_diff_slice = top_diff + "
+    "offset;\n    __global const float* mask_slice = mask + offset;\n    for "
+    "(int ph = phstart; ph < phend; ++ph) {\n      for (int pw = pwstart; pw < "
+    "pwend; ++pw) {\n        if (mask_slice[ph * pooled_w + pw] == (float)(h * "
+    "width + w)) {\n          gradient += top_diff_slice[ph * pooled_w + "
+    "pw];\n        }\n      }\n    }\n    bottom_diff[i] = gradient;\n  "
+    "}\n}\n\n__kernel void ave_pool_backward(const int nthreads,\n             "
+    "                   __global const float* top_diff,\n                      "
+    "          const int channels,\n                                const int "
+    "height, const int width,\n                                const int "
+    "pooled_h, const int pooled_w,\n                                const int "
+    "kernel_h, const int kernel_w,\n                                const int "
+    "stride_h, const int stride_w,\n                                const int "
+    "pad_h, const int pad_w,\n                                __global float* "
+    "bottom_diff) {\n  for (int i = get_global_id(0); i < nthreads; i += "
+    "get_global_size(0)) {\n    // find out the local i\n    // find out the "
+    "local offset\n    const int w = i % width + pad_w;\n    const int h = (i "
+    "/ width) % height + pad_h;\n    const int c = (i / width / height) % "
+    "channels;\n    const int n = i / width / height / channels;\n    \n    "
+    "const int phstart = (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;\n "
+    "   const int phend = min(h / stride_h + 1, pooled_h);\n    const int "
+    "pwstart = (w < kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;\n    const "
+    "int pwend = min(w / stride_w + 1, pooled_w);\n    float gradient = 0.0;\n "
+    "   __global const float* const top_diff_slice = top_diff + (n * channels "
+    "+ c) * pooled_h * pooled_w;\n    for (int ph = phstart; ph < phend; ++ph) "
+    "{\n      for (int pw = pwstart; pw < pwend; ++pw) {\n        // figure "
+    "out the pooling size\n        int hstart = ph * stride_h - pad_h;\n       "
+    " int wstart = pw * stride_w - pad_w;\n        int hend = min(hstart + "
+    "kernel_h, height + pad_h);\n        int wend = min(wstart + kernel_w, "
+    "width + pad_w);\n        int pool_size = (hend - hstart) * (wend - "
+    "wstart);\n        gradient += top_diff_slice[ph * pooled_w + pw] / "
+    "pool_size;\n      }\n    }\n    bottom_diff[i] = gradient;\n  "
+    "}\n}\n\n__kernel void sto_pool_backward(\n    const int nthreads, "
+    "__global const float* rand_idx,\n    __global const float* const "
+    "top_diff, const int channels,\n    const int height, const int width,\n   "
+    " const int pooled_h, const int pooled_w,\n    const int kernel_h, const "
+    "int kernel_w,\n    const int stride_h, const int stride_w,\n    __global "
+    "float* bottom_diff) {\n\n  for (int i = get_global_id(0); i < nthreads; i "
+    "+= get_global_size(0)) {\n    // find out the local i\n    // find out "
+    "the local offset\n    const int w = i % width;\n    const int h = (i / "
+    "width) % height;\n    const int c = (i / width / height) % channels;\n    "
+    "const int n = i / width / height / channels;\n    \n    const int phstart "
+    "= (h < kernel_h) ? 0 : (h - kernel_h) / stride_h + 1;\n    const int "
+    "phend = min(h / stride_h + 1, pooled_h);\n    const int pwstart = (w < "
+    "kernel_w) ? 0 : (w - kernel_w) / stride_w + 1;\n    const int pwend = "
+    "min(w / stride_w + 1, pooled_w);\n    float gradient = 0.0;\n    __global "
+    "const float* rand_idx_slice = rand_idx + (n * channels + c) * pooled_h * "
+    "pooled_w;\n    __global const float* top_diff_slice = top_diff + (n * "
+    "channels + c) * pooled_h * pooled_w;\n    for (int ph = phstart; ph < "
+    "phend; ++ph) {\n      for (int pw = pwstart; pw < pwend; ++pw) {\n        "
+    "gradient += top_diff_slice[ph * pooled_w + pw]\n            * (i == (int) "
+    "(rand_idx_slice[ph * pooled_w + pw])?1.0:0.0);\n      }\n    }\n    "
+    "bottom_diff[i] = gradient;\n  }\n}\n\n";
+const std::string distribution_str =
+    "// This code is adapted from "
+    "https://github.com/amd/OpenCL-caffe/blob/stable/src/caffe/ocl/"
+    "random.cl\n\n//Note: random generator has two parts\n//first part: the "
+    "open sourced threefy random generator kernel from DE Shaw "
+    "Research\n//second part. we wrap the kernel up to generate uniform, "
+    "bernoulli and gaussion distribution generators.\n\n//begin: the open "
+    "sourced random generator from DE Shaw "
+    "Research\n//https://www.deshawresearch.com/"
+    "resources_random123.html\ntypedef uint uint32_t;\n\nstruct r123array4x32 "
+    "{\n  uint32_t v[4];\n};\n\nenum r123_enum_threefry32x4 {\n  R_32x4_0_0 = "
+    "10,\n  R_32x4_0_1 = 26,\n  R_32x4_1_0 = 11,\n  R_32x4_1_1 = 21,\n  "
+    "R_32x4_2_0 = 13,\n  R_32x4_2_1 = 27,\n  R_32x4_3_0 = 23,\n  R_32x4_3_1 = "
+    "5,\n  R_32x4_4_0 = 6,\n  R_32x4_4_1 = 20,\n  R_32x4_5_0 = 17,\n  "
+    "R_32x4_5_1 = 11,\n  R_32x4_6_0 = 25,\n  R_32x4_6_1 = 10,\n  R_32x4_7_0 = "
+    "18,\n  R_32x4_7_1 = 20\n};\n\ninline uint32_t RotL_32(uint32_t x, "
+    "unsigned int N) {\n  return (x << (N & 31)) | (x >> ((32 - N) & "
+    "31));\n}\n\ntypedef struct r123array4x32 threefry4x32_ctr_t;\ntypedef "
+    "struct r123array4x32 threefry4x32_key_t;\ntypedef struct r123array4x32 "
+    "threefry4x32_ukey_t;\n\ninline threefry4x32_ctr_t threefry4x32_R(unsigned "
+    "int Nrounds, threefry4x32_ctr_t in, threefry4x32_key_t k) {\n  "
+    "threefry4x32_ctr_t X;\n  uint32_t ks[4 + 1];\n  int i;\n  ks[4] = "
+    "0x1BD11BDA;\n\n  {\n    ks[0] = k.v[0];\n    X.v[0] = in.v[0];\n    ks[4] "
+    "^= k.v[0];\n\n    ks[1] = k.v[1];\n    X.v[1] = in.v[1];\n    ks[4] ^= "
+    "k.v[1];\n\n    ks[2] = k.v[2];\n    X.v[2] = in.v[2];\n    ks[4] ^= "
+    "k.v[2];\n\n    ks[3] = k.v[3];\n    X.v[3] = in.v[3];\n    ks[4] ^= "
+    "k.v[3];\n  }\n\n  X.v[0] += ks[0];\n  X.v[1] += ks[1];\n  X.v[2] += "
+    "ks[2];\n  X.v[3] += ks[3];\n\n  if (Nrounds > 0) {\n    X.v[0] += "
+    "X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 1) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 2) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 3) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 3) {\n    "
+    "X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] "
+    "+= ks[4];\n    X.v[4 - 1] += 1;\n  }\n\n  if (Nrounds > 4) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 5) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 6) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 7) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 7) {\n    "
+    "X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] "
+    "+= ks[0];\n    X.v[4 - 1] += 2;\n  }\n\n  if (Nrounds > 8) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 9) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 10) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 11) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 11) {\n    "
+    "X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] "
+    "+= ks[1];\n    X.v[4 - 1] += 3;\n  }\n\n  if (Nrounds > 12) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 13) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 14) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 15) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 15) {\n    "
+    "X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] "
+    "+= ks[2];\n    X.v[4 - 1] += 4;\n  }\n\n  if (Nrounds > 16) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 17) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 18) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 19) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 19) {\n    "
+    "X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] "
+    "+= ks[3];\n    X.v[4 - 1] += 5;\n  }\n\n  if (Nrounds > 20) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 21) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 22) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 23) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 23) {\n    "
+    "X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] "
+    "+= ks[4];\n    X.v[4 - 1] += 6;\n  }\n\n  if (Nrounds > 24) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 25) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 26) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 27) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 27) {\n    "
+    "X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] "
+    "+= ks[0];\n    X.v[4 - 1] += 7;\n  }\n\n  if (Nrounds > 28) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 29) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 30) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 31) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 31) {\n    "
+    "X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] "
+    "+= ks[1];\n    X.v[4 - 1] += 8;\n  }\n\n  if (Nrounds > 32) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 33) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 34) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 35) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 35) {\n    "
+    "X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] "
+    "+= ks[2];\n    X.v[4 - 1] += 9;\n  }\n\n  if (Nrounds > 36) {\n    X.v[0] "
+    "+= X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] ^= "
+    "X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 37) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 38) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 39) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 39) {\n    "
+    "X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] "
+    "+= ks[3];\n    X.v[4 - 1] += 10;\n  }\n\n  if (Nrounds > 40) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 41) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n  if (Nrounds > 42) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 43) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 43) {\n    "
+    "X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] "
+    "+= ks[4];\n    X.v[4 - 1] += 11;\n  }\n\n  if (Nrounds > 44) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 45) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 46) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 47) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 47) {\n    "
+    "X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] "
+    "+= ks[0];\n    X.v[4 - 1] += 12;\n  }\n\n  if (Nrounds > 48) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 49) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 50) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 51) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 51) {\n    "
+    "X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] "
+    "+= ks[1];\n    X.v[4 - 1] += 13;\n  }\n\n  if (Nrounds > 52) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 53) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 54) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 55) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 55) {\n    "
+    "X.v[0] += ks[4];\n    X.v[1] += ks[0];\n    X.v[2] += ks[1];\n    X.v[3] "
+    "+= ks[2];\n    X.v[4 - 1] += 14;\n  }\n\n  if (Nrounds > 56) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 57) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 58) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 59) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 59) {\n    "
+    "X.v[0] += ks[0];\n    X.v[1] += ks[1];\n    X.v[2] += ks[2];\n    X.v[3] "
+    "+= ks[3];\n    X.v[4 - 1] += 15;\n  }\n\n  if (Nrounds > 60) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 61) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 62) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 63) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 63) {\n    "
+    "X.v[0] += ks[1];\n    X.v[1] += ks[2];\n    X.v[2] += ks[3];\n    X.v[3] "
+    "+= ks[4];\n    X.v[4 - 1] += 16;\n  }\n\n  if (Nrounds > 64) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_0_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_0_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 65) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_1_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_1_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 66) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_2_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_2_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 67) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_3_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_3_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 67) {\n    "
+    "X.v[0] += ks[2];\n    X.v[1] += ks[3];\n    X.v[2] += ks[4];\n    X.v[3] "
+    "+= ks[0];\n    X.v[4 - 1] += 17;\n  }\n\n  if (Nrounds > 68) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_4_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_4_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 69) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_5_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_5_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 70) {\n    "
+    "X.v[0] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], R_32x4_6_0);\n    X.v[1] "
+    "^= X.v[0];\n    X.v[2] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], "
+    "R_32x4_6_1);\n    X.v[3] ^= X.v[2];\n  }\n\n  if (Nrounds > 71) {\n    "
+    "X.v[0] += X.v[3];\n    X.v[3] = RotL_32(X.v[3], R_32x4_7_0);\n    X.v[3] "
+    "^= X.v[0];\n    X.v[2] += X.v[1];\n    X.v[1] = RotL_32(X.v[1], "
+    "R_32x4_7_1);\n    X.v[1] ^= X.v[2];\n  }\n\n  if (Nrounds > 71) {\n    "
+    "X.v[0] += ks[3];\n    X.v[1] += ks[4];\n    X.v[2] += ks[0];\n    X.v[3] "
+    "+= ks[1];\n    X.v[4 - 1] += 18;\n  }\n  return X;\n}\n//end: the open "
+    "sourced random generator from DE Shaw Research\n\n// "
+    "**************************\n// BERNOULLI DISTRIBUTION\n// "
+    "**************************\n\n__kernel void "
+    "PRNG_threefry4x32_bernoulli(\n__global float4 "
+    "*randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat inf, float sup,\nfloat "
+    "threshold,\nuint nrounds, uint numrandom) {\n\n  size_t gdx = "
+    "get_global_id(0);\n\n  uint maxUint = 0;\n  maxUint--;\n  float r = "
+    "(float)maxUint;\n\n  threefry4x32_ctr_t ctr = ctr_i;\n  "
+    "threefry4x32_ukey_t ukey;\n\n  ukey.v[0] = ukey.v[1] = ukey.v[2] = "
+    "ukey.v[3] = gdx;\n\n  threefry4x32_ctr_t random4;\n\n  if ( gdx < "
+    "numrandom ) {\n    random4 = threefry4x32_R(nrounds, ctr, ukey);\n    "
+    "float4 frnd;\n    frnd.x = ( (((float)random4.v[0]) / r) * (sup - inf) + "
+    "inf ) < threshold ? 1.0f : 0.0f;\n    frnd.y = ( (((float)random4.v[1]) / "
+    "r) * (sup - inf) + inf ) < threshold ? 1.0f : 0.0f;\n    frnd.z = ( "
+    "(((float)random4.v[2]) / r) * (sup - inf) + inf ) < threshold ? 1.0f : "
+    "0.0f;\n    frnd.w = ( (((float)random4.v[3]) / r) * (sup - inf) + inf ) < "
+    "threshold ? 1.0f : 0.0f;\n    randomnumber[gdx] = frnd;\n  }\n}\n\n// "
+    "**************************\n// UNIFORM DISTRIBUTION (float)\n// "
+    "**************************\n\n__kernel void "
+    "PRNG_threefry4x32_uniform(\n__global float4 "
+    "*randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat inf, float sup,\nuint "
+    "nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  uint "
+    "maxUint = 0;\n  maxUint--;\n  float r = (float)maxUint;\n\n  "
+    "threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey;\n\n  "
+    "ukey.v[0] = ukey.v[1] = ukey.v[2] = ukey.v[3] = gdx;\n\n  "
+    "threefry4x32_ctr_t random4;\n\n  if ( gdx < numrandom ) {\n    random4 = "
+    "threefry4x32_R(nrounds, ctr, ukey);\n    float4 frnd;\n    frnd.x = ( "
+    "(((float)random4.v[0]) / r) * (sup - inf) + inf );\n    frnd.y = ( "
+    "(((float)random4.v[1]) / r) * (sup - inf) + inf );\n    frnd.z = ( "
+    "(((float)random4.v[2]) / r) * (sup - inf) + inf );\n    frnd.w = ( "
+    "(((float)random4.v[3]) / r) * (sup - inf) + inf );\n    randomnumber[gdx] "
+    "= frnd;\n  }\n}\n\n// **************************\n// UNIFORM DISTRIBUTION "
+    "(uint)\n// **************************\n\n__kernel void "
+    "PRNG_threefry4x32_uint_uniform(\n__global uint4 "
+    "*randomnumber,\nthreefry4x32_ctr_t ctr_i,\nuint inf, uint sup,\nuint "
+    "nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  "
+    "threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey;\n\n  "
+    "ukey.v[0] = ukey.v[1] = ukey.v[2] = ukey.v[3] = gdx;\n\n  "
+    "threefry4x32_ctr_t random4;\n\n  if ( gdx < numrandom ) {\n    random4 = "
+    "threefry4x32_R(nrounds, ctr, ukey);\n    uint4 frnd;\n    frnd.x = "
+    "random4.v[0] % (sup - inf) + inf;\n    frnd.y = random4.v[1] % (sup - "
+    "inf) + inf;\n    frnd.z = random4.v[2] % (sup - inf) + inf;\n    frnd.w = "
+    "random4.v[3] % (sup - inf) + inf;\n    randomnumber[gdx] = frnd;\n  "
+    "}\n}\n\n// **************************\n// GAUSSIAN DISTRIBUTION\n// "
+    "**************************\n\n__kernel void "
+    "PRNG_threefry4x32_gaussian(\n__global float4 "
+    "*randomnumber,\nthreefry4x32_ctr_t ctr_i,\nfloat E, float V,\nuint "
+    "nrounds, uint numrandom) {\n\n  size_t gdx = get_global_id(0);\n\n  uint "
+    "maxUint = 0;\n  maxUint--;\n  float r = (float)maxUint;\n\n  "
+    "threefry4x32_ctr_t ctr = ctr_i;\n  threefry4x32_ukey_t ukey1, ukey2;\n\n  "
+    "ukey1.v[0] = ukey2.v[1] = ukey1.v[2] = ukey2.v[3] = gdx;\n  ukey2.v[0] = "
+    "ukey1.v[1] = ukey2.v[2] = ukey1.v[3] = 0;\n\n  threefry4x32_ctr_t "
+    "random1, random2;\n\n  if ( gdx < numrandom ) {\n    random1 = "
+    "threefry4x32_R(nrounds, ctr, ukey1);\n    random2 = "
+    "threefry4x32_R(nrounds, ctr, ukey2);\n    float4 frnd1;\n\n    float r1 = "
+    "(((float)random1.v[0]) / r); // generate a random sequence of uniform "
+    "distribution\n    float r2 = (((float)random2.v[0]) / r);\n    float r3 = "
+    "(((float)random1.v[1]) / r);\n    float r4 = (((float)random2.v[1]) / "
+    "r);\n    float r5 = (((float)random1.v[2]) / r);\n    float r6 = "
+    "(((float)random2.v[2]) / r);\n    float r7 = (((float)random1.v[3]) / "
+    "r);\n    float r8 = (((float)random2.v[3]) / r);\n\n    if(r2 == 0 || r4 "
+    "== 0 || r6 == 0 || r8 == 0) {\n      r2 += 0.0001;\n      r4 += 0.0001;\n "
+    "     r6 += 0.0001;\n      r8 += 0.0001;\n    }\n\n    frnd1.x = "
+    "cos(2*M_PI*r1)*sqrt(-2.0*log(r2)) * V + E;// return a pseudo sequence of "
+    "normal distribution using two above uniform noise data\n    //frnd2.x = "
+    "sin(2*M_PI*r1)*sqrt(-2.0*log(r2));      // return the quadrature "
+    "counterpart of the foregoing pseudo normal distribution sequence\n    "
+    "frnd1.y = cos(2*M_PI*r3)*sqrt(-2.0*log(r4)) * V + E;// return a pseudo "
+    "sequence of normal distribution using two above uniform noise data\n    "
+    "//frnd2.y = sin(2*M_PI*r3)*sqrt(-2.0*log(r4));      // return the "
+    "quadrature counterpart of the foregoing pseudo normal distribution "
+    "sequence\n    frnd1.z = cos(2*M_PI*r5)*sqrt(-2.0*log(r6)) * V + E;// "
+    "return a pseudo sequence of normal distribution using two above uniform "
+    "noise data\n    //frnd2.z = sin(2*M_PI*r5)*sqrt(-2.0*log(r6));      // "
+    "return the quadrature counterpart of the foregoing pseudo normal "
+    "distribution sequence\n    frnd1.w = cos(2*M_PI*r7)*sqrt(-2.0*log(r8)) * "
+    "V + E;// return a pseudo sequence of normal distribution using two above "
+    "uniform noise data\n    //frnd2.w = sin(2*M_PI*r7)*sqrt(-2.0*log(r8));    "
+    "  // return the quadrature counterpart of the foregoing pseudo normal "
+    "distribution sequence\n\n    randomnumber[gdx] = frnd1;\n  }\n}\n";
+const std::string tensormath_str =
+    "/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or "
+    "more contributor license agreements.  See the NOTICE file\n * distributed "
+    "with this work for additional information\n * regarding copyright "
+    "ownership.  The ASF licenses this file\n * to you under the Apache "
+    "License, Version 2.0 (the\n * \"License\"); you may not use this file "
+    "except in compliance\n * with the License.  You may obtain a copy of the "
+    "License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * "
+    "Unless required by applicable law or agreed to in writing, software\n * "
+    "distributed under the License is distributed on an \"AS IS\" BASIS,\n * "
+    "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or "
+    "implied.\n * See the License for the specific language governing "
+    "permissions and\n * limitations under the License.\n */\n\n// "
+    "**************************************\n// Element-wise functions\n// "
+    "**************************************\n\n// Sum is basically "
+    "reduction.\n// This reduction code is serial reduction modified from "
+    "AMD\'s example.\n// "
+    "http://developer.amd.com/resources/documentation-articles/"
+    "articles-whitepapers/opencl-optimization-case-study-simple-reductions/"
+    "\n__kernel\nvoid clkernel_fabs(const int num, __global const float* in, "
+    "__global float* out) {\n  const int i = get_global_id(0);\n  if (i >= "
+    "num) return;\n  out[i] = fabs(in[i]);\n}\n\n__kernel\nvoid "
+    "clkernel_add_scalar(const int num, float x, __global const float* in, "
+    "__global float* out) {\n  const int i = get_global_id(0);\n  if (i >= "
+    "num) return;\n  out[i] = in[i] + x;\n}\n\n__kernel\nvoid "
+    "clkernel_add(const int num, __global const float* in1, __global const "
+    "float* in2,\n  __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] + "
+    "in2[i];\n}\n\n__kernel\nvoid clkernel_clamp(const int num, float low, "
+    "float high, __global const float* in,\n  __global float* out) {\n  const "
+    "int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "clamp(in[i], low, high);\n}\n\n__kernel\nvoid "
+    "clkernel_divide_scalar_matx(const int num, __global const float* in1, "
+    "const float x,\n  __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  if (i >= num) return;\n  out[i] = in1[i] / "
+    "x;\n}\n\n__kernel\nvoid clkernel_divide_scalar_xmat(const int num, const "
+    "float x, __global const float* in1,\n  __global float* out) {\n  const "
+    "int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = x / "
+    "in1[i];\n}\n\n__kernel\nvoid clkernel_divide(const int num, __global "
+    "const float* in1, __global const float* in2,\n  __global float* out) {\n  "
+    "const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "in1[i] / in2[i];\n}\n\n__kernel\nvoid clkernel_eltmult_scalar(const int "
+    "num, const float x, __global const float* in,\n  __global float* out) {\n "
+    " const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "in[i] * x;\n}\n\n__kernel\nvoid clkernel_eltmult(const int num, __global "
+    "const float* in1, __global const float* in2,\n  __global float* out) {\n  "
+    "const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "in1[i] * in2[i];\n}\n\n__kernel\nvoid clkernel_exp(const int num, "
+    "__global const float* in, __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "exp(in[i]);\n}\n\n__kernel\nvoid clkernel_le(const int num, __global "
+    "const float* in, const float x,\n  __global float* out) {\n  const int i "
+    "= get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] <= x) ? "
+    "1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_log(const int num, __global "
+    "const float* in, __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "log(in[i]);\n}\n\n__kernel\nvoid clkernel_lt(const int num, __global "
+    "const float* in, const float x,\n  __global float* out) {\n  const int i "
+    "= get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] < x) ? "
+    "1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_ge(const int num, __global "
+    "const float* in, const float x,\n  __global float* out) {\n  const int i "
+    "= get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] >= x) ? "
+    "1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_gt(const int num, __global "
+    "const float* in, const float x,\n  __global float* out) {\n  const int i "
+    "= get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] > x) ? "
+    "1.0f : 0.0f;\n}\n\n__kernel\nvoid clkernel_pow_scalar(const int num, "
+    "const float x, __global const float* in,\n  __global float* out) {\n  "
+    "const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "pow(in[i], x);\n}\n\n__kernel\nvoid clkernel_pow(const int num, __global "
+    "const float* in1, __global const float* in2,\n  __global float* out) {\n  "
+    "const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "pow(in1[i], in2[i]);\n}\n\n__kernel\nvoid clkernel_relu(const int num, "
+    "__global const float* in, __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  if (i >= num) return;\n  out[i] = (in[i] >= 0.0f) ? "
+    "in[i] : 0.0f;\n}\n\n__kernel\nvoid clkernel_set(const int num, const "
+    "float x, __global float* out) {\n  const int i = get_global_id(0);\n  if "
+    "(i >= num) return;\n  out[i] = x;\n}\n\n__kernel\nvoid "
+    "clkernel_sigmoid(const int num, __global const float* in, __global float* "
+    "out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  "
+    "out[i] = 1 / (1 + exp(-(in[i])));\n}\n\n__kernel\nvoid "
+    "clkernel_sign(const int num, __global const float* in, __global float* "
+    "out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  "
+    "out[i] = (in[i] > 0) - (in[i] < 0);\n}\n\n__kernel\nvoid "
+    "clkernel_sqrt(const int num, __global const float* in, __global float* "
+    "out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  "
+    "out[i] = sqrt(in[i]);\n}\n\n// kernel for square is called "
+    "pow(2).\n\n__kernel\nvoid clkernel_subtract_scalar(const int num, "
+    "__global const float* in, const float x,\n  __global float* out) {\n  "
+    "const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "in[i] - x;\n}\n\n__kernel\nvoid clkernel_subtract(const int num, __global "
+    "const float* in1, __global const float* in2,\n   __global float* out) {\n "
+    " const int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = "
+    "in1[i] - in2[i];\n}\n\n// reduce3 kernel from\n// "
+    "https://github.com/sschaetz/nvidia-opencl-examples/blob/master/OpenCL/src/"
+    "oclReduction/oclReduction_kernel.cl\n__kernel\nvoid clkernel_sum(const "
+    "int num, __global const float* in, __global float* out,\n  __local float* "
+    "sdata) {\n  const int i = get_group_id(0)*(get_local_size(0)*2) + "
+    "get_local_id(0);\n  const int tid = get_local_id(0);\n  sdata[tid] = (i < "
+    "num) ? in[i] : 0.0f;\n\n  // Perform the first level of reduction.\n  if "
+    "(i + get_local_size(0) < num) {\nsdata[tid] += in[i + "
+    "get_local_size(0)];\n  }\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for (int s "
+    "= get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid > s) {\n  sdata[tid] += "
+    "sdata[tid + s];\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  if (tid == 0) "
+    "{\nout[get_group_id(0)] = sdata[0];\n  }\n}\n\n__kernel\nvoid "
+    "clkernel_tanh(const int num, __global const float* in, __global float* "
+    "out) {\n  const int i = get_global_id(0);\n  if (i >= num) return;\n  "
+    "out[i] = tanh(in[i]);\n}\n\n// **************************************\n// "
+    "Random functions\n// **************************************\n\n// See: "
+    "distribution.cl\n\n// "
+    "*********************************************************\n// BLAS "
+    "functions, ref to http://docs.nvidia.com/cuda/cublas\n// "
+    "*********************************************************\n\n__"
+    "kernel\nvoid clkernel_amax(const int num, __global const float* in, "
+    "__global int* ret,\n   __local uint* sdata, __local size_t* temp) {\n  "
+    "const int gid = get_global_id(0);\n  const int tid = get_local_id(0);\n\n "
+    " for(int s = get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid < s) {\n  "
+    "sdata[tid] = (in[sdata[tid]] > in[tid+s]) ? sdata[tid] : "
+    "tid;\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nret[0] = "
+    "sdata[0];\n  }\n}\n\n\n/* TODO: Fix line 284:20.\n__kernel\nvoid "
+    "clkernel_amin(const int num, __global const float* in, __global int* "
+    "ret,\n   __local float* sdata, __local size_t* temp) {\n  const int gid = "
+    "get_global_id(0);\n  const int tid = get_local_id(0);\n\n  // Initialize "
+    "the values to pos infinity.\n  sdata[tid] = (gid < num) ? in[gid] : "
+    "INFINITY;\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for(int s = "
+    "get_local_size(0)/2; s > 0; s >>= 1) {\nif (tid < s) {\n  sdata[tid] = "
+    "(in[sdata[tid]] < in[tid+s]) ? sdata[tid] : "
+    "tid;\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nret[0] = "
+    "sdata[0];\n  }\n}*/\n\n\n__kernel\nvoid clkernel_asum(const int num, "
+    "__global const float* in, __global float* out,\n   __local float* sdata) "
+    "{\n  const int tid = get_local_id(0);\n  const int i = "
+    "get_global_id(0);\n\n  // Initialize\n  sdata[tid] = (i < num) ? in[i] : "
+    "INFINITY;\n  // Perform the first level of reduction.\n  if (i + "
+    "get_local_size(0) < num) {\nsdata[tid] += in[i + get_local_size(0)];\n  "
+    "}\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for(int s = get_local_size(0)/2; "
+    "s > 0; s >>= 1) {\nif (tid < s) {\n  sdata[tid] = fabs(sdata[tid + "
+    "s]);\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n  if (tid == 0) {\nout[0] = "
+    "sdata[0];\n  }\n}\n\n__kernel\nvoid clkernel_axpy(const int num, float "
+    "alpha, __global const float* in,\n   __global float* out) {\n  const int "
+    "i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = fma(alpha, "
+    "in[i], out[i]);\n}\n\n// This kernel is essentially the same as Sum, "
+    "except that during the process\n// of reading in data to the local "
+    "memory, the value is also doubled.\n// Then, just before submitting the "
+    "sum to out, we do a square-root on it.\n__kernel\nvoid "
+    "clkernel_nrm2(const int num, __global const float* in, __global float* "
+    "out,\n   __local float* sdata) {\n  const int i = "
+    "get_group_id(0)*(get_local_size(0)*2) + get_local_id(0);\n  const int tid "
+    "= get_local_id(0);\n  sdata[tid] = (i < num) ? (in[i] * in[i]) : "
+    "0.0f;\n\n  // Perform the first level of reduction.\n  if (i + "
+    "get_local_size(0) < num) {\nsdata[tid] += in[i + get_local_size(0)];\n  "
+    "}\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n  for (int s = get_local_size(0)/2; "
+    "s > 0; s >>= 1) {\nif (tid > s) {\n  sdata[tid] += sdata[tid + "
+    "s];\n}\nbarrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  if (tid == 0) "
+    "{\nout[get_group_id(0)] = sqrt(sdata[0]);\n  }\n}\n\n__kernel\nvoid "
+    "clkernel_scale(const int num, float x, __global float* out) {\n  const "
+    "int i = get_global_id(0);\n  if (i >= num) return;\n  out[i] = x * "
+    "out[i];\n}\n\n__kernel\nvoid clkernel_dot(const int num, __global const "
+    "float* in1, __global const float* in2,\n    __global float* out, __local "
+    "float* scratch) {\n  const int i = get_global_id(0);\n  if (i >= num) "
+    "return;\n  int offset = i << 2;\n  scratch[i] = in1[offset] * "
+    "in2[offset];\n\n}\n\n// First kernel from "
+    "http://www.bealto.com/gpu-gemv_intro.html\n// y = \xce\xb1*A*v + "
+    "\xce\xb2*y\n// fma(a, b, c) == (a * b) + c with infinite "
+    "precision\n__kernel\nvoid clkernel_gemv(const int m, const int n, const "
+    "float alpha,\n   __global const float* A, __global const float* v,\n   "
+    "const float beta, __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  float sum  = 0.0f;\n  for (int k = 0; k < n; k++) "
+    "{\n    sum += fma(beta, out[i + m * k], alpha * A[i + m * k] * v[k]);\n  "
+    "}\n  out[i] = sum;\n}\n\n// "
+    "http://docs.nvidia.com/cuda/cublas/#cublas-lt-t-gt-dgmm\n// X[j] = "
+    "x[j*inc(x)] if inc(x) \xe2\x89\xa5 0\n//= x[(\xcf\x87 \xe2\x88\x92 "
+    "1)*|inc(x)| \xe2\x88\x92 j*|inc(x)|] if inc(x) < 0\n\n// C = diag( X "
+    ")*A\n__kernel\nvoid clkernel_dgmm_left(const int nrow, const int "
+    "ncol,\n__global const float* M, __global const float* v,\n__global float* "
+    "out) {\n  const uint gidx = get_global_id(0);\n\n  uint offset = gidx * "
+    "ncol;\n  for (uint i = 0; i < ncol; i++) {\nout[offset + i] = M[offset + "
+    "i] * v[i];\n  }\n}\n\n// C = A*diag( X )\n__kernel\nvoid "
+    "clkernel_dgmm_right(const int nrow, const int ncol,\n __global const "
+    "float* M, __global const float* v,\n __global float* out) {\n  const uint "
+    "gidx = get_global_id(0);\n\n  uint offset = gidx * ncol;\n  for (uint i = "
+    "0; i < ncol; i++) {\nout[offset + i] = M[offset + i] * v[gidx];\n  "
+    "}\n}\n\n// TODO: Optimize with Reference from "
+    "http://www.cedricnugteren.nl/tutorial.php?page=1\n//  C = \xce\xb1*A*B + "
+    "\xce\xb2*C\n__kernel\nvoid clkernel_gemm(const uint nrowA, const uint "
+    "ncolB, const uint ncolA, const float alpha,\n    __global const float* A, "
+    "__global const float* B, const float beta,\n     __global float* C, "
+    "__local float* Asub, __local float* Bsub) {\n\n  const uint lidx = "
+    "get_local_id(0);\n  const uint lidy = get_local_id(1);\n  const uint TS = "
+    "get_local_size(0); // Tile size\n  const uint gidx = TS * get_group_id(0) "
+    "+ lidx; // Row ID of C (0..M)\n  const uint gidy = TS * get_group_id(1) + "
+    "lidy; // Row ID of C (0..N)\n\n  // Initialise the accumulation "
+    "register\n  float acc = 0.0f;\n\n  // Loop over all tiles\n  const int "
+    "numtiles = ncolA / TS;\n  for (int t = 0; t < numtiles; t++) {\n    const "
+    "int tiledRow = TS * t + lidx;\n    const int tiledCol = TS * t + lidy;\n  "
+    "  Asub[lidy * TS + lidx] = A[tiledCol * nrowA + gidx];\n    Bsub[lidy * "
+    "TS + lidx] = B[gidy * ncolA + tiledRow];\n\n    "
+    "barrier(CLK_LOCAL_MEM_FENCE);\n\n    for(int k = 0; k < TS; k++) {\n      "
+    "acc += Asub[k * TS + lidx] * Bsub[lidy * TS + k] * alpha;\n    }\n\n    "
+    "barrier(CLK_LOCAL_MEM_FENCE);\n  }\n\n  C[gidy * nrowA + gidx] = "
+    "fma(beta, C[gidy * nrowA + gidx], acc);\n}\n\n\n__kernel\nvoid "
+    "clkernel_crossentropy(const uint batchsize, const uint dim,\n   __global "
+    "const float* p, __global const int* t,\n   __global float* loss) {\n  "
+    "const uint gidx = get_global_id(0);\n  if (gidx >= batchsize) return;\n\n "
+    " int truth_idx = t[gidx];\n  if (truth_idx <= 0) return;\n  float "
+    "prob_of_truth = p[gidx * dim + truth_idx];\n  loss[gidx] = "
+    "-log(fmax(prob_of_truth, -FLT_MIN));\n}\n\n\n__kernel\nvoid "
+    "clkernel_softmaxentropy(const uint batchsize, const uint dim,\n __global "
+    "const float* p, __global const int* t,\n __global float* grad) {\n  const "
+    "uint gidx = get_global_id(0);\n  if (gidx >= batchsize) return;\n\n  int "
+    "truth_idx = t[gidx];\n  if (truth_idx <= 0) return;\n  grad[gidx * dim + "
+    "truth_idx] -= 1.0;\n}\n\n\n__kernel\nvoid clkernel_rowmax(const uint "
+    "nrow, const uint ncol,\n                     __global const float* in, "
+    "__global float* out) {\n  const uint row_id = get_global_id(0);\n  if "
+    "(row_id >= nrow) return;\n\n  float row_max_val = -FLT_MAX;\n  for (uint "
+    "i = 0; i < ncol; i++) {\n    row_max_val = fmax(row_max_val, in[row_id * "
+    "ncol + i]);\n  }\n\n  out[row_id] = row_max_val;\n}\n\n\n// "
+    "**************************************\n// Matrix functions\n// "
+    "**************************************\n/*\n__kernel\nvoid "
+    "clkernel_addcol(int nrow, int ncol, __global const float* A, __global "
+    "const float* v, __global float* out) {\n  const int i = "
+    "get_global_id(0);\n  const int j = get_global_id(1);\n  if (i >= nrow) "
+    "return;\n  if (j >= ncol) return;\n  ret[j] = A[j + nrow * i] + "
+    "v[j];\n}\n\n__kernel\nvoid clkernel_addrow(int nrow, int ncol, __global "
+    "const float* A, __global const float* v, __global float* out) {\n  const "
+    "int i = get_global_id(0);\n  const int j = get_global_id(1);\n  if (i >= "
+    "nrow) return;\n  if (j >= ncol) return;\n  out[i] = A[i + ncol * j] + "
+    "v[i];\n}\n\n__kernel\nvoid clkernel_outerproduct(int m, const int n, "
+    "__global const float* in1, __global const float* in2, __global float* "
+    "out) {\n  const int col = get_global_id(0);\n  const int row = "
+    "get_global_id(1);\n\n  // TODO: This\n}\n\n__kernel\nvoid "
+    "clkernel_sumcol(int nrow, int ncol, __global const float* in, __global "
+    "float* out) {\n  const int i = get_global_id(0);\n  if (i >= nrow) "
+    "return;\n\n  float sum = 0.0f;\n  for (int j = 0; j < nrow; j++) {\nsum "
+    "+= input[nrow * i + j];\n  }\n  out[i] = sum;\n}\n*/\n__kernel\nvoid "
+    "clkernel_sumrow(int nrow, int ncol, __global const float* in, __global "
+    "float* out) {\n  const int idx = get_global_id(0);\n  if (idx >= nrow) "
+    "return;\n\n  float sum = 0.0f;\n  for (int j = 0; j < ncol; j++) {\nsum "
+    "+= in[j + ncol * idx];\n  }\n  out[idx] = sum;\n}\n\n\n// Adapted from "
+    "http://code.haskell.org/HsOpenCL/tests/bench/transpose.cl\n#define "
+    "BLOCK_DIM 16\n__kernel\nvoid clkernel_transpose(uint nrow, uint "
+    "ncol,\n__global const float* in, __global float* out,\n__local float* "
+    "sdata) {\n  uint gidx = get_global_id(0);\n  uint gidy = "
+    "get_global_id(1);\n\n  if ((gidx < ncol) && (gidy < nrow)) {\nuint id_in "
+    "= gidy * ncol + gidx;\nsdata[get_local_id(1) * (BLOCK_DIM+1) + "
+    "get_local_id(0)] = in[id_in];\n  }\n\n  barrier(CLK_LOCAL_MEM_FENCE);\n\n "
+    " gidx = get_group_id(1) * BLOCK_DIM + get_local_id(0);\n  gidy = "
+    "get_group_id(0) * BLOCK_DIM + get_local_id(1);\n  if ((gidx < nrow) && "
+    "(gidy < ncol)) {\nuint id_out = gidy * nrow + gidx;\nout[id_out] = "
+    "sdata[get_local_id(0) * (BLOCK_DIM + 1) + get_local_id(1)];\n  "
+    "}\n}\n/*\n__kernel\nvoid clkernel_transpose2(uint nrow, uint ncol, "
+    "__global const float* in, __global float* out, __local float* sdata) {\n  "
+    "const uint lidx = get_local_id(0);\n  const uint lidy = "
+    "get_local_id(1);\n  const uint id0 = get_group_id(0) * ncol * lidx;\n  "
+    "const uint id1 = get_group_id(1) * nrow * lidy;\n\n  if (id0 < nrow && "
+    "id1 < ncol) {\nsdata[lidx][lidy] = in[id1 * nrow + id0];\n  }\n\n  "
+    "barrier(CLK_LOCAL_MEM_FENCE);\n\n  const uint new_id0 = get_group_id(1) * "
+    "nrow + lidx;\n  const uint new_id1 = get_group_id(0) * ncol + lidy;\n\n  "
+    "if (new_id0 < ncol && new_id1 < nrow) {\nout[new_id1 * ncol + new_id0] = "
+    "sdata[lidx][lidy];\n  }\n}*/\n\n__kernel\nvoid clkernel_diagvec_left(uint "
+    "vsize, __global const float* vin, __global float* out) {\n  const uint "
+    "gid = get_global_id(0);\n\n  for (uint i = 0; i < vsize; i++)\nout[gid * "
+    "vsize + i] = (i == gid) ? vin[gid] : 0.0f;\n}\n\n\n__kernel\nvoid "
+    "clkernel_diagvec_right(uint vsize, __global const float* vin, __global "
+    "float* out) {\n  const uint gid = get_global_id(0);\n\n  for (uint i = 0; "
+    "i < vsize; i++)\nout[gid * vsize + i] = (i == gid) ? vin[gid] : "
+    "0.0f;\n}\n";
+}  //  namespace opencl
+}  //  namespace singa
+
+#endif
diff --git a/src/core/device/platform.cc b/src/core/device/platform.cc
index 8ae15f8..d07c67c 100644
--- a/src/core/device/platform.cc
+++ b/src/core/device/platform.cc
@@ -17,14 +17,18 @@
  */
 #ifndef DISABLE_WARNINGS
 
+#include <iostream>
+
 #include "singa/core/device.h"
 #include "singa/singa_config.h"
 #include "singa/utils/opencl_utils.h"
-
+using namespace std;
 namespace singa {
 
 #ifdef USE_CUDA
 
+std::vector<std::shared_ptr<Device>> Platform::UsedDevice;
+std::mutex Platform::mtx_;
 int Platform::GetNumGPUs() {
   int count;
   CUDA_CHECK(cudaGetDeviceCount(&count));
@@ -52,7 +56,7 @@
 }
 
 const std::pair<size_t, size_t> Platform::GetGPUMemSize(const int device) {
-  std::pair<size_t, size_t> ret{ 0, 0 };
+  std::pair<size_t, size_t> ret{0, 0};
   if (Platform::CheckDevice(device)) {
     CUDA_CHECK(cudaSetDevice(device));
     size_t free = 0, total = 0;
@@ -75,20 +79,17 @@
 
 const string Platform::DeviceQuery(int device, bool verbose) {
   if (cudaSuccess != cudaGetDevice(&device)) {
-    return "The device (ID = " + std::to_string(device) + " is not available" ;
+    return "The device (ID = " + std::to_string(device) + " is not available";
   }
   cudaDeviceProp prop;
   CUDA_CHECK(cudaGetDeviceProperties(&prop, device));
   std::ostringstream out;
   out << "Device id:                     " << device << '\n';
   out << "Total global memory:           " << prop.totalGlobalMem << '\n';
-  out << "Total shared memory per block: " << prop.sharedMemPerBlock
-      << '\n';
-  out << "Maximum threads per block:     " << prop.maxThreadsPerBlock
-      << '\n';
-  out << "Maximum dimension of block:    "
-      << prop.maxThreadsDim[0 << '\n'] << ", " << prop.maxThreadsDim[1]
-      << ", " << prop.maxThreadsDim[2] << '\n';
+  out << "Total shared memory per block: " << prop.sharedMemPerBlock << '\n';
+  out << "Maximum threads per block:     " << prop.maxThreadsPerBlock << '\n';
+  out << "Maximum dimension of block:    " << prop.maxThreadsDim[0 << '\n']
+      << ", " << prop.maxThreadsDim[1] << ", " << prop.maxThreadsDim[2] << '\n';
   out << "Maximum dimension of grid:     " << prop.maxGridSize[0] << ", "
       << "Concurrent copy and execution: "
       << (prop.deviceOverlap ? "Yes" : "No") << '\n';
@@ -100,7 +101,7 @@
     out << "Total registers per block:     " << prop.regsPerBlock << '\n';
     out << "Maximum memory pitch:          " << prop.memPitch << '\n';
     out << "Warp size:                     " << prop.warpSize
-      << prop.maxGridSize[1] << ", " << prop.maxGridSize[2] << '\n';
+        << prop.maxGridSize[1] << ", " << prop.maxGridSize[2] << '\n';
     out << "Clock rate:                    " << prop.clockRate << '\n';
     out << "Number of multiprocessors:     " << prop.multiProcessorCount
         << '\n';
@@ -110,31 +111,38 @@
   return out.str();
 }
 
-const vector<shared_ptr<Device>>
-Platform::CreateCudaGPUs(const size_t num_devices, size_t init_size) {
+const vector<shared_ptr<Device>> Platform::CreateCudaGPUs(
+    const size_t num_devices, size_t init_size) {
   const vector<int> gpus = GetGPUIDs();
   CHECK_LE(num_devices, gpus.size());
   vector<int> use_gpus(gpus.begin(), gpus.begin() + num_devices);
   return CreateCudaGPUsOn(use_gpus, init_size);
 }
 
-const vector<shared_ptr<Device>>
-Platform::CreateCudaGPUsOn(const vector<int> &devices, size_t init_size) {
+const vector<shared_ptr<Device>> Platform::CreateCudaGPUsOn(
+    const vector<int>& devices, size_t init_size) {
   MemPoolConf conf;
-  if (init_size > 0)
-    conf.set_init_size(init_size);
+  if (init_size > 0) conf.set_init_size(init_size);
   size_t bytes = conf.init_size() << 20;
   for (auto device : devices) {
     conf.add_device(device);
     CHECK_LE(bytes, Platform::GetGPUMemSize(device).first);
   }
-  auto pool = std::make_shared<CnMemPool>(conf);
-
-  vector<shared_ptr<Device> > ret;
-  for (auto device : devices) {
-    auto dev = std::make_shared<CudaGPU>(device, pool);
-    ret.push_back(dev);
+  mtx_.lock();
+  if (UsedDevice.size() == 0) {
+    int count = Platform::GetNumGPUs();
+    for (int i = 0; i < count; i++) UsedDevice.push_back(nullptr);
   }
+  auto pool = std::make_shared<CnMemPool>(conf);
+  vector<shared_ptr<Device>> ret;
+  for (size_t i = 0; i < devices.size(); i++) {
+    if (UsedDevice[devices[i]] == nullptr)
+      UsedDevice[devices[i]] = std::make_shared<CudaGPU>(devices[i], pool);
+    else
+      UsedDevice[devices[i]]->Reset();
+    ret.push_back(UsedDevice[devices[i]]);
+  }
+  mtx_.unlock();
   return ret;
 }
 
@@ -176,7 +184,7 @@
 
 }
 */
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
 
 }  // namespace singa
 
diff --git a/src/core/memory/memory.cc b/src/core/memory/memory.cc
index 0fb8511..480c2c3 100644
--- a/src/core/memory/memory.cc
+++ b/src/core/memory/memory.cc
@@ -18,34 +18,38 @@
 #ifndef DISABLE_WARNINGS
 
 #include "singa/core/memory.h"
-#include "singa/utils/logging.h"
-#include "singa/proto/core.pb.h"
+
 #include <iostream>
 
+#include "singa/proto/core.pb.h"
+#include "singa/utils/logging.h"
+
 #ifdef USE_CUDA
 
 namespace singa {
-std::atomic<int> CnMemPool::pool_count(0);
 std::pair<size_t, size_t> CnMemPool::GetMemUsage() {
   size_t free, total;
   auto status = cnmemMemGetInfo(&free, &total, NULL);
   CHECK_EQ(status, cnmemStatus_t::CNMEM_STATUS_SUCCESS)
-    << cnmemGetErrorString(status);
+      << cnmemGetErrorString(status);
+  return std::make_pair(free, total);
+}
+std::pair<size_t, size_t> CnMemPool::GetMemUsage(int id) {
+  CHECK_EQ(cudaSetDevice(id), cudaError_t::cudaSuccess);
+  size_t free, total;
+  auto status = cnmemMemGetInfo(&free, &total, NULL);
+  CHECK_EQ(status, cnmemStatus_t::CNMEM_STATUS_SUCCESS)
+      << cnmemGetErrorString(status);
   return std::make_pair(free, total);
 }
 
 CnMemPool::CnMemPool(int numDevices, size_t init_size, size_t max_size) {
-  for (int i = 0; i < numDevices; i++)
-    conf_.add_device(i);
+  for (int i = 0; i < numDevices; i++) conf_.add_device(i);
   conf_.set_init_size(init_size);
   conf_.set_max_size(max_size);
-  CHECK_LT(++pool_count, 2) << "CnMemPool must be used as a singleton.";
 }
 
-CnMemPool::CnMemPool(const MemPoolConf &conf) {
-  conf_ = conf;
-  CHECK_LT(++pool_count, 2) << "CnMemPool must be used as a singleton.";
-}
+CnMemPool::CnMemPool(const MemPoolConf &conf) { conf_ = conf; }
 
 void CnMemPool::Init() {
   mtx_.lock();
@@ -79,21 +83,20 @@
     CHECK_EQ(status, cnmemStatus_t::CNMEM_STATUS_SUCCESS)
         << " " << cnmemGetErrorString(status);
     initialized_ = false;
-    --pool_count;
   }
   mtx_.unlock();
 }
 
 void CnMemPool::Malloc(void **ptr, const size_t size) {
-  if (!initialized_)
-    Init();
+  if (!initialized_) Init();
   cnmemStatus_t status = cnmemMalloc(ptr, size, NULL);
   CHECK_EQ(status, cnmemStatus_t::CNMEM_STATUS_SUCCESS)
       << " " << cnmemGetErrorString(status);
 }
 
 void CnMemPool::Free(void *ptr) {
-  CHECK(initialized_) << "Cannot free the memory as the pool is not initialzied";
+  CHECK(initialized_)
+      << "Cannot free the memory as the pool is not initialzied";
   cnmemStatus_t status = cnmemFree(ptr, NULL);
   CHECK_EQ(status, cnmemStatus_t::CNMEM_STATUS_SUCCESS)
       << " " << cnmemGetErrorString(status);
@@ -109,7 +112,7 @@
   cudaError_t status = cudaFree(ptr);
   CHECK_EQ(status, cudaError_t::cudaSuccess);
 }
-}
+}  // namespace singa
 #endif
 
 #endif
diff --git a/src/core/scheduler/scheduler.cc b/src/core/scheduler/scheduler.cc
index 183674f..0172ee8 100644
--- a/src/core/scheduler/scheduler.cc
+++ b/src/core/scheduler/scheduler.cc
@@ -17,3 +17,698 @@
  */
 
 #include "singa/core/scheduler.h"
+
+#include <algorithm>
+#include <functional>
+#include <iomanip>
+#include <sstream>
+#include <thread>
+
+#include "singa/core/device.h"
+#include "singa/utils/safe_queue.h"
+
+namespace singa {
+
+void Node::AddInEdge(Edge *in_edge) { in_edges_.push_back(in_edge); }
+
+void Node::AddOutEdge(Edge *out_edge) { out_edges_.push_back(out_edge); }
+
+void Edge::SetBlock(Block *blk) { blk_ = blk; }
+
+void Edge::SetSrcNode(Node *src_node) { src_node_ = src_node; }
+
+void Edge::SetDstNode(Node *dst_node) { dst_node_ = dst_node; }
+
+Graph::Graph(Device *device) : device_(device) {}
+
+Graph::~Graph() { Reset(); }
+
+Node *BlkInfo::used_node(const size_t idx) const {
+  CHECK_LT(idx, used_nodes_.size());
+  return used_nodes_[idx];
+}
+
+Node *Graph::node(const size_t idx) const {
+  CHECK_LT(idx, nodes_.size());
+  return nodes_[idx];
+}
+
+Edge *Graph::edge(const size_t idx) const {
+  CHECK_LT(idx, edges_.size());
+  return edges_[idx];
+}
+
+BlkInfo *Graph::block(Block *blk) const {
+  auto it = blocks_.find(blk);
+  CHECK(it != blocks_.end());
+  return it->second;
+}
+
+Node *Graph::begin_node(const size_t idx) const {
+  CHECK_LT(idx, begin_nodes_.size());
+  return begin_nodes_[idx];
+}
+
+const NodeVec &Graph::next_nodes(const size_t idx) const {
+  CHECK_LT(idx, next_nodes_.size());
+  return next_nodes_[idx];
+}
+
+const BlockVec &Graph::free_blocks(const size_t idx) const {
+  CHECK_LT(idx, free_blocks_.size());
+  return free_blocks_[idx];
+}
+
+void Graph::Reset() {
+  for (auto it : nodes_) {
+    delete it;
+  }
+  nodes_.clear();
+
+  for (auto it : edges_) {
+    delete it;
+  }
+  edges_.clear();
+
+  for (auto it : blocks_) {
+    delete it.second;
+  }
+  blocks_.clear();
+
+  leaf_blocks_.clear();
+
+  iteration_ = 0;
+
+  time_elapsed_ = 0;
+
+  dirty_ = false;
+
+  in_serial_ = false;
+}
+
+void Graph::Debug() {
+  if (dirty_) Analyze();
+
+  int w = 0;
+  size_t max_in_num = 0, max_out_num = 0, max_next_num = 0, max_free_num = 0;
+  for (auto &it : nodes_) {
+    if (it->op_name_ == "Waiting") continue;
+    max_in_num = std::max(max_in_num, it->in_edges_.size());
+    max_out_num = std::max(max_out_num, it->out_edges_.size());
+  }
+
+  for (auto &it : next_nodes_) {
+    max_next_num = std::max(max_next_num, it.size());
+  }
+
+  for (auto &it : free_blocks_) {
+    max_free_num = std::max(max_free_num, it.size());
+  }
+
+  size_t max_size = std::max(nodes_.size(), blocks_.size());
+  for (size_t i = max_size; i > 0; i /= 10, ++w) {
+  }
+
+  std::stringstream ss;
+  ss << "begin nodes:[";
+  for (size_t i = 0; i < begin_nodes_.size(); ++i) {
+    ss << begin_nodes_[i]->id_ << " ";
+  }
+  ss << "]" << std::endl;
+
+  size_t size = 0;
+  for (size_t i = 0; i < nodes_.size(); ++i) {
+    ss << "OP[" << std::setw(w) << i;
+    auto node = nodes_[i];
+
+    string name;
+    if (node->op_name_.size() > 16) {
+      name = node->op_name_.substr(0, 13) + "...";
+    } else {
+      name = node->op_name_;
+    }
+
+    ss << "] Inputs:[";
+    size = node->in_edges_.size();
+    for (size_t j = 0; j < std::max(max_in_num, size); ++j) {
+      if (j < size)
+        ss << std::setw(w) << blocks_[node->in_edges_[j]->blk_]->id_ << " ";
+      else
+        ss << std::setw(w + 1) << " ";
+    }
+
+    ss << "] Outputs:[";
+    size = node->out_edges_.size();
+    for (size_t j = 0; j < std::max(max_out_num, size); ++j) {
+      if (j < size)
+        ss << std::setw(w) << blocks_[node->out_edges_[j]->blk_]->id_ << " ";
+      else
+        ss << std::setw(w + 1) << " ";
+    }
+
+    ss << "] Next nodes:[";
+    size = next_nodes_[i].size();
+    for (size_t j = 0; j < max_next_num; ++j) {
+      if (j < size)
+        ss << std::setw(w) << next_nodes_[i][j]->id_ << " ";
+      else
+        ss << std::setw(w + 1) << " ";
+    }
+
+    ss << "] Free blocks:[";
+    size = free_blocks_[i].size();
+    for (size_t j = 0; j < max_free_num; ++j) {
+      if (j < size)
+        ss << std::setw(w) << blocks_[free_blocks_[i][j]]->id_ << " ";
+      else
+        ss << std::setw(w + 1) << " ";
+    }
+    ss << "]" << std::endl;
+  }
+
+  size_t max_used_num = 0;
+  std::vector<BlkInfo *> blkInfos;
+  blkInfos.resize(blocks_.size());
+
+  for (auto it : blocks_) {
+    blkInfos[it.second->id_] = it.second;
+    max_used_num = std::max(max_used_num, it.second->used_nodes_.size());
+  }
+
+  for (auto it : blkInfos) {
+    auto blkInfo = it;
+    ss << "Block[" << std::setw(w) << blkInfo->id_ << "] addr[" << std::setw(10)
+       << blkInfo->blk_ << "] size[" << std::setw(10) << blkInfo->blk_->size()
+       << "] graph_ref[" << std::setw(w) << blkInfo->graph_ref_
+       << "] ref_count[" << std::setw(w) << blkInfo->blk_->ref_count() << "] ";
+    switch (blkInfo->type_) {
+      case BlockType::kInput:
+        ss << "type[input] ";
+        break;
+      case BlockType::kParam:
+        ss << "type[param] ";
+        break;
+      case BlockType::kInter:
+        ss << "type[inter] ";
+        break;
+      case BlockType::kEnd:
+        ss << "type[_end_] ";
+        break;
+      default:
+        break;
+    }
+    int id = -1;
+    if (blkInfo->write_edge_) {
+      id = blkInfo->write_edge_->src_node_->id_;
+    }
+    ss << " write_node[" << std::setw(w) << id;
+
+    ss << "] used_nodes[";
+    size = blkInfo->used_nodes_.size();
+    for (size_t i = 0; i < max_used_num; ++i) {
+      if (i < size)
+        ss << std::setw(w) << blkInfo->used_nodes_[i]->id_ << " ";
+      else
+        ss << std::setw(w + 1) << " ";
+    }
+    ss << "]" << std::endl;
+  }
+
+  printf("%s", ss.str().c_str());
+}
+
+void Graph::PrintTimeProfiling() {
+  std::stringstream ss;
+
+  // verbosity level: 1 -> forward and backward propagation time
+  if (device_->verbosity() == 1) {
+    bool forward = true;
+    float forward_time = 0;
+    float backward_time = 0;
+    float time_elapsed;
+
+    for (size_t i = 0; i < nodes_.size(); ++i)
+      if (nodes_[i]->time_elapsed() > 0) {
+        if (forward == true)
+          // check the op of cross entropy backward, after that are backward ops
+          // note that the function is more accurate when either
+          // SoftmaxCrossEntropy or Softmax is used
+          if (nodes_[i]->op_name().find("Backward") != std::string::npos)
+            forward = false;
+        // when forward becomes false, it starts the backward propagation
+
+        time_elapsed = (nodes_[i]->time_elapsed()) /
+                       (iteration_ - device_->skip_iteration());
+
+        if (forward == true) forward_time += time_elapsed;
+      }
+
+    backward_time = (time_elapsed_ / (iteration_ - device_->skip_iteration())) -
+                    forward_time;
+
+    ss << std::endl << "Time Profiling:" << std::endl;
+    ss << "Forward Propagation Time : " << forward_time << " sec" << std::endl;
+    ss << "Backward Propagation Time : " << backward_time << " sec"
+       << std::endl;
+  }
+
+  // verbosity level: 2 -> each operation time (OP_ID, operation name, time)
+  if (device_->verbosity() == 2) {
+    ss << std::endl << "Time Profiling:" << std::endl;
+    for (size_t i = 0; i < nodes_.size(); ++i)
+      if (nodes_[i]->time_elapsed() > 0)
+        ss << "OP_ID" << nodes_[i]->id_ << ". " << nodes_[i]->op_name() << " : "
+           << (nodes_[i]->time_elapsed()) / (iteration_) << " sec" << std::endl;
+  }
+
+  // verbosity level: 3 -> Distributed training operations
+  if (device_->verbosity() == 3) {
+    ss << std::endl << "Time Profiling:" << std::endl;
+    for (size_t i = 0; i < nodes_.size(); ++i)
+      if ((nodes_[i]->op_name().find("Dist") != std::string::npos) &&
+          (nodes_[i]->time_elapsed() > 0))
+        ss << "OP_ID" << nodes_[i]->id_ << ". " << nodes_[i]->op_name() << " : "
+           << (nodes_[i]->time_elapsed()) / (iteration_) << " sec" << std::endl;
+  }
+
+  printf("%s", ss.str().c_str());
+}
+
+void Graph::TimeProfilingDoExec(Node *curNode) {
+  if ((device_->verbosity() > 0) && (curNode->op_name_ != "Waiting") &&
+      (iteration_ >= device_->skip_iteration()))
+    device_->TimeProfilingDoExec(std::move(curNode->op_), 0, curNode);
+  else
+    device_->DoExec(std::move(curNode->op_), 0);
+}
+
+void Graph::EvaluateTimeElapsed(const TimePoint &start) {
+  if ((device_->verbosity() > 0) && (iteration_ > device_->skip_iteration())) {
+    device_->Sync();
+    std::chrono::duration<float> duration =
+        std::chrono::high_resolution_clock::now() - start;
+    time_elapsed_inc(duration.count());
+    for (size_t i = 0; i < nodes_.size(); ++i) {
+      Node *curNode = nodes_[i];
+      if (curNode->op_name_ != "Waiting") {
+        device_->EvaluateTimeElapsed(curNode);
+      }
+    }
+  }
+}
+
+void Graph::TakeStartTime(TimePoint &start) {
+  if ((device_->verbosity() > 0) && (iteration_ >= device_->skip_iteration())) {
+    device_->Sync();
+    start = std::chrono::high_resolution_clock::now();
+  }
+}
+
+void Graph::RunGraph() {
+  in_serial_ = false;
+  if (dirty_) Analyze();
+
+  TimePoint start;
+  SafeQueue<Node *> node_queue;
+
+  // activate nodes
+  for (auto it : begin_nodes_) {
+    node_queue.Push(it);
+  }
+
+  TakeStartTime(start);
+
+  // run graph
+  while (node_queue.Size()) {
+    // step 1: pop the first element, get the node corresponding to the index
+    Node *curNode = nullptr;
+    node_queue.Pop(curNode);
+    int curIndex = curNode->id_;
+
+    // step 2: execute the operation
+    TimeProfilingDoExec(curNode);
+
+    // step 3: release some blocks' data that won't be used later
+    for (auto it : free_blocks_[curIndex]) {
+      it->free_data();
+    }
+
+    /*
+    if (free_blocks_[curIndex].size()) {
+      CBData *cb_data = new CBData(this, curNode);
+      cudaStreamAddCallback(device_->ctx_.stream, Graph::Callback, (void
+    *)(cb_data), 0);
+    }
+    */
+
+    // step 4: activate the following nodes
+    for (auto it : next_nodes_[curIndex]) {
+      node_queue.Push(it);
+    }
+  }
+
+  // increment iteration counter
+  step();
+  EvaluateTimeElapsed(start);
+}
+
+void Graph::RunInSerial() {
+  in_serial_ = true;
+  if (dirty_) Analyze();
+
+  TimePoint start;
+  TakeStartTime(start);
+
+  for (size_t i = 0; i < nodes_.size(); ++i) {
+    Node *curNode = nodes_[i];
+
+    // step 1: execute the operation
+    TimeProfilingDoExec(curNode);
+
+    // step 2: release some blocks' data that won't be used later
+    for (auto it : free_blocks_[i]) {
+      it->free_data();
+    }
+
+    /*
+    // Wait for calculation to complete and then recyle the data
+    CBData *cb_data = new CBData(this, curNode);
+    CHECK(cudaStreamAddCallback(device_->ctx_.stream, Graph::Callback, (void
+    *)(cb_data), 0));
+    */
+  }
+
+  // increment iteration counter
+  step();
+  EvaluateTimeElapsed(start);
+}
+
+void Graph::AddOperation(OpFunc &&op, const BlockVec &read_blocks,
+                         const BlockVec &write_blocks, string op_name) {
+  dirty_ = true;
+
+  // if the size of both read_blocks and write_blocks is zero,
+  // this operation is used for synchronization
+  if (read_blocks.size() == 0 && write_blocks.size() == 0) {
+    AddSyncOp(std::move(op), op_name);
+    return;
+  }
+
+  // create new node
+  Node *node = new Node(nodes_.size(), std::move(op), op_name);
+
+  // create edges for read_blocks
+  for (size_t i = 0; i < read_blocks.size(); ++i) {
+    Block *blk = read_blocks[i];
+    Node *src_node = nullptr;
+    BlkInfo *blkInfo = nullptr;
+
+    // update leaf blocks
+    auto iter = leaf_blocks_.find(blk);
+    if (iter != leaf_blocks_.end()) {
+      leaf_blocks_.erase(iter);
+    }
+
+    // check if the block is already in the computational graph
+    auto it = blocks_.find(blk);
+    if (it == blocks_.end()) {
+      blkInfo = new BlkInfo(blocks_.size(), blk, BlockType::kInput);
+      blocks_[blk] = blkInfo;
+    } else {
+      blkInfo = it->second;
+      if (blkInfo->type_ == BlockType::kEnd) {
+        blkInfo->type_ = BlockType::kInter;
+      }
+
+      // update the existing edge, update dst node and create new edge
+      Edge *write_edge = blkInfo->write_edge_;
+      if (write_edge) {
+        if (!write_edge->dst_node_) {
+          // change the dst node of the write_edge
+          write_edge->dst_node_ = node;
+          node->AddInEdge(write_edge);
+          blkInfo->graph_ref_ += 1;
+          continue;
+        } else {
+          src_node = write_edge->src_node_;
+        }
+      }
+    }
+
+    // create new edge for new block
+    Edge *edge = new Edge(edges_.size(), blk, src_node, node);
+    blkInfo->graph_ref_ += 1;
+    if (src_node) {
+      src_node->AddOutEdge(edge);
+    }
+
+    node->AddInEdge(edge);
+    edges_.push_back(edge);
+  }
+
+  // update last node for write_blocks
+  for (size_t i = 0; i < write_blocks.size(); ++i) {
+    Block *blk = write_blocks[i];
+    BlkInfo *blkInfo = nullptr;
+
+    // update leaf blocks
+    leaf_blocks_.insert(blk);
+
+    auto it = blocks_.find(blk);
+    if (it == blocks_.end()) {
+      blkInfo = new BlkInfo(blocks_.size(), blk, BlockType::kEnd);
+      blocks_[blk] = blkInfo;
+    } else {
+      blkInfo = it->second;
+      if (blkInfo->type_ == BlockType::kInput) {
+        blkInfo->type_ = BlockType::kParam;
+      }
+
+      Edge *write_edge = blkInfo->write_edge_;
+      if (write_edge) {
+        if (!write_edge->dst_node_) {
+          write_edge->dst_node_ = node;
+          node->AddInEdge(write_edge);
+        } else {
+          Node *lastNode = write_edge->src_node_;
+          auto outEdges = lastNode->out_edges();
+          for (auto outEdge : outEdges) {
+            if (outEdge->blk_ == blk && outEdge->dst_node_ != node) {
+              Edge *edge =
+                  new Edge(edges_.size(), blk, outEdge->dst_node_, node);
+              outEdge->dst_node_->AddOutEdge(edge);
+              node->AddInEdge(edge);
+            }
+          }
+        }
+      }
+    }
+
+    // create new edge for new block
+    Edge *edge = new Edge(edges_.size(), blk, node, nullptr);
+    blkInfo->write_edge_ = edge;
+    blkInfo->graph_ref_ += 1;
+
+    node->AddOutEdge(edge);
+    edges_.push_back(edge);
+  }
+
+  // add node into nodes
+  nodes_.push_back(node);
+}
+
+void Graph::AddSyncOp(function<void(Context *)> &&op, string op_name) {
+  // create new node
+  Node *node = new Node(nodes_.size(), std::move(op), op_name);
+
+  for (auto it : leaf_blocks_) {
+    Block *blk = it;
+    BlkInfo *blkInfo = blocks_[blk];
+    Edge *edge = nullptr;
+
+    if (blkInfo->type_ == BlockType::kEnd) {
+      blkInfo->type_ = BlockType::kInter;
+    }
+
+    Edge *write_edge = blkInfo->write_edge_;
+    if (!write_edge->dst_node_) {
+      // change the dst node of the write_edge
+      write_edge->dst_node_ = node;
+      edge = write_edge;
+    } else {
+      Node *src_node = write_edge->src_node_;
+      edge = new Edge(edges_.size(), blk, src_node, node);
+      src_node->AddOutEdge(edge);
+      edges_.push_back(edge);
+    }
+
+    node->AddInEdge(edge);
+
+    // fake edges, no need to add the graph ref
+    edge = new Edge(edges_.size(), blk, node, nullptr);
+    blkInfo->write_edge_ = edge;
+
+    node->AddOutEdge(edge);
+    edges_.push_back(edge);
+  }
+
+  // add node into nodes
+  nodes_.push_back(node);
+}
+
+void Graph::Analyze() {
+  begin_nodes_.clear();
+  next_nodes_.clear();
+  next_nodes_.resize(nodes_.size());
+  free_blocks_.clear();
+  free_blocks_.resize(nodes_.size());
+
+  for (auto &it : blocks_) {
+    it.second->used_nodes_.clear();
+  }
+
+  AnalyzeNodes();
+
+  AnalyzeEdges();
+
+  dirty_ = false;
+
+  // Debug();
+}
+
+void Graph::AnalyzeNodes() {
+  if (in_serial_) {
+    begin_nodes_.push_back(nodes_[0]);
+
+    for (size_t i = 0; i < nodes_.size(); ++i) {
+      Node *curNode = nodes_[i];
+
+      next_nodes_[i].clear();
+      if (i + 1 < nodes_.size()) {
+        next_nodes_[i].push_back(nodes_[i + 1]);
+      }
+
+      BlockSet blks;
+      for (size_t j = 0; j < curNode->in_edges_.size(); ++j) {
+        blks.insert(curNode->in_edges_[j]->blk_);
+      }
+      for (size_t j = 0; j < curNode->out_edges_.size(); ++j) {
+        blks.insert(curNode->out_edges_[j]->blk_);
+      }
+
+      for (auto &it : blks) {
+        blocks_[it]->used_nodes_.push_back(curNode);
+      }
+    }
+  } else {
+    // init node ref
+    std::vector<int> node_ref_;
+    node_ref_.resize(nodes_.size());
+    for (size_t i = 0; i < nodes_.size(); ++i) {
+      node_ref_[i] = nodes_[i]->in_edges_.size();
+    }
+
+    // find all input edges and decrease ref count of nodes
+    for (size_t i = 0; i < edges_.size(); ++i) {
+      Node *src_node = edges_[i]->src_node_;
+      if (!src_node) {
+        Node *node = edges_[i]->dst_node_;
+        int nodeId = node->id_;
+        node_ref_[nodeId] -= 1;
+      }
+    }
+
+    // activate nodes
+    SafeQueue<Node *> node_queue;
+    for (size_t i = 0; i < node_ref_.size(); ++i) {
+      if (node_ref_[i] == 0) {
+        begin_nodes_.push_back(nodes_[i]);
+        node_queue.Push(nodes_[i]);
+      }
+    }
+
+    // run graph
+    while (node_queue.Size()) {
+      // step 1: pop the first element, get the node corresponding to the index
+      Node *curNode = nullptr;
+      node_queue.Pop(curNode);
+      int curIndex = curNode->id_;
+
+      // step 2: decrease ref count of nodes and activate nodes
+      next_nodes_[curIndex].clear();
+      for (size_t i = 0; i < curNode->out_edges_.size(); ++i) {
+        Edge *edge = curNode->out_edges_[i];
+        Node *nextNode = edge->dst_node_;
+
+        if (!nextNode) {
+          continue;
+        }
+
+        int nodeId = nextNode->id_;
+        node_ref_[nodeId] -= 1;
+        if (node_ref_[nodeId] <= 0) {
+          node_queue.Push(nextNode);
+          next_nodes_[curIndex].push_back(nextNode);
+        }
+      }
+
+      // step 3: push_back curNode to the used_nodes_ of relevant blocks
+      BlockSet blks;
+      for (size_t j = 0; j < curNode->in_edges_.size(); ++j) {
+        blks.insert(curNode->in_edges_[j]->blk_);
+      }
+      for (size_t j = 0; j < curNode->out_edges_.size(); ++j) {
+        blks.insert(curNode->out_edges_[j]->blk_);
+      }
+
+      for (auto &it : blks) {
+        blocks_[it]->used_nodes_.push_back(curNode);
+      }
+    }
+  }
+}
+
+void Graph::AnalyzeEdges() {
+  for (auto &it : blocks_) {
+    Block *blk = it.first;
+    BlkInfo *blkInfo = it.second;
+
+    if (blkInfo->used_nodes_.size()) {
+      int node_id = blkInfo->used_nodes_.back()->id_;
+      BlockType type = blkInfo->type_;
+
+      // if the block belongs to a inter tensor
+      // and isn't refered on the Python Side
+      if ((type == BlockType::kInter || type == BlockType::kEnd) &&
+          blkInfo->graph_ref_ >= blk->ref_count()) {
+        free_blocks_[node_id].push_back(blk);
+      }
+    }
+  }
+}
+
+void Graph::FreeLoop() {
+  int id = 0;
+  for (;;) {
+    free_queue_.Pop(id);
+    if (id == -1) {
+      break;
+    } else {
+      for (auto it : free_blocks_[id]) {
+        it->free_data();
+      }
+    }
+  }
+}
+
+/*
+void CUDART_CB Graph::Callback(cudaStream_t stream, cudaError_t status,
+                               void *data) {
+  CBData *cb_data = (CBData *)data;
+  Graph *graph = cb_data->graph_;
+  graph->free_queue_.Push(cb_data->node_->id_);
+  delete cb_data;
+}
+*/
+
+}  // namespace singa
diff --git a/src/core/tensor/math_kernel.cu b/src/core/tensor/math_kernel.cu
index 482f223..43be56d 100644
--- a/src/core/tensor/math_kernel.cu
+++ b/src/core/tensor/math_kernel.cu
@@ -69,6 +69,25 @@
 }
 */
 
+__global__ void KernelTraverseUnaryTransform(const size_t n, size_t nDim,
+                                             const float *in, const int *shape,
+                                             const int *stride, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    int shape_accu = n;
+    size_t offset = 0;
+    int remains = i;
+
+    for (int k = 0; k < nDim; k++) {
+      shape_accu = shape_accu / shape[k];
+      int idx = remains / shape_accu;
+      remains = remains % shape_accu;
+      offset = offset + idx * stride[k];
+    }
+    out[i] = in[offset];
+  }
+}
+
 __global__ void KernelAdd(const size_t n, const float *in1, const float *in2,
                           float *out) {
   for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
@@ -100,6 +119,46 @@
   }
 }
 
+__global__ void KernelErf(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = erff(in[i]);
+  }
+}
+
+__global__ void KernelCeil2(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = std::ceil(in[i]);
+  }
+}
+__global__ void KernelFloor(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = std::floor(in[i]);
+  }
+}
+
+__global__ void KernelRound(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = roundf(in[i]);
+  }
+}
+
+__global__ void KernelRoundE(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    float doub = in[i]*2;
+    if (ceilf(doub) == doub) {
+      out[i] = roundf(in[i]/2)*2;
+    } else {
+      out[i] = roundf(in[i]);
+    }
+  }
+}
+
+
 __global__ void KernelLog(const size_t n, const float *in, float *out) {
   for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
        i += blockDim.x * gridDim.x) {
@@ -141,7 +200,15 @@
 __global__ void KernelRelu(const size_t n, const float *in, float *out) {
   for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
        i += blockDim.x * gridDim.x) {
-    out[i] = max(in[i], 0.0f);
+    out[i] = in[i] > 0 ? in[i] : 0.0f;
+  }
+}
+
+__global__ void KernelReLUBackward(const size_t n, const float *in1, const float *in2,
+                         float *out) {
+  for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = in2[i] > 0 ? in1[i] : 0.0f;
   }
 }
 
@@ -152,10 +219,17 @@
   }
 }
 
-__global__ void KernelTanh(const size_t n, const float *in, float *out) {
+__global__ void KernelCastFloat2Int(const size_t n, const float *in, int *out) {
   for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
        i += blockDim.x * gridDim.x) {
-    out[i] = tanhf(in[i]);
+    out[i] = int(in[i]);
+  }
+}
+
+__global__ void KernelCastInt2Float(const size_t n, const int *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = float(in[i]);
   }
 }
 
@@ -165,6 +239,14 @@
     out[i] = logf(1 + expf(in[i]));
   }
 }
+  
+__global__ void KernelSoftsign(const size_t n, const float *in, float *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = in[i] / (max(in[i], -in[i]) + 1);
+  }
+}
+
 __global__ void KernelSquare(const size_t n, const float *in, float *out) {
   for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
        i += blockDim.x * gridDim.x) {
@@ -254,6 +336,23 @@
     out[idx] = in1[idx] >= in2[idx] ? 1.0f : 0.0f;
   }
 }
+
+__global__ void KernelEQ(const size_t num, const float *in, const float x,
+                         float *out) {
+  for (size_t idx = blockIdx.x * blockDim.x + threadIdx.x; idx < num;
+       idx += blockDim.x * gridDim.x) {
+    out[idx] = in[idx] == x ? 1.0f : 0.0f;
+  }
+}
+
+__global__ void KernelBEQ(const size_t num, const float *in1, const float *in2,
+                         float *out) {
+  for (size_t idx = blockIdx.x * blockDim.x + threadIdx.x; idx < num;
+       idx += blockDim.x * gridDim.x) {
+    out[idx] = in1[idx] == in2[idx] ? 1.0f : 0.0f;
+  }
+}
+
 __global__ void KernelGT(const size_t num, const float *in, const float x,
                          float *out) {
   for (size_t idx = blockIdx.x * blockDim.x + threadIdx.x; idx < num;
@@ -355,138 +454,295 @@
   }
 }
 
+__global__ void KernelFloat2Half(const size_t n, const float *in, __half *out) {
+  for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = __float2half_rn(in[i]);
+  }
+}
+
+__global__ void KernelHalf2Float(const size_t n, const __half *in, float *out) {
+  for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = __half2float(in[i]);
+  }
+}
+
+//kernal used by the threshold based sparsification
+__global__ void KernelSparsAbs(const size_t n, const float threshold, const float *in, float *out) {
+  for (size_t i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = fabs(in[i]) >= threshold ? in[i] : 0.0f;
+  }
+}
+
+//kernal used by the threshold based sparsification
+__global__ void KernelSparsIndex(const size_t n, const float *in, int *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = in[i] == 0.0f ? 0 : i + 1;
+  }
+}
+
+//kernal used by the topK based sparsification
+__global__ void KernelGenerateIndex(const size_t n, int *out) {
+  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;
+       i += blockDim.x * gridDim.x) {
+    out[i] = i + 1;
+  }
+}
+
+//cuda unary elementwise ops kernel template 
+#define GenUnaryCudaKernel(fn,kernelfn,cudafn)                                \
+  __global__ void kernelfn(const size_t n, const float *in, float *out) {     \
+    for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n;                \
+         i += blockDim.x * gridDim.x) {                                       \
+      out[i] = cudafn(in[i]);                                                 \
+    }                                                                         \
+  }                                                                           \
+  void fn(const size_t n, const float *in, float *out, cudaStream_t s) {      \
+    kernelfn <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);             \
+  }
+
+GenUnaryCudaKernel(cos,KernelCos,cosf);
+GenUnaryCudaKernel(cosh,KernelCosh,coshf);
+GenUnaryCudaKernel(acos,KernelAcos,acosf);
+GenUnaryCudaKernel(acosh,KernelAcosh,acoshf);
+GenUnaryCudaKernel(sin,KernelSin,sinf);
+GenUnaryCudaKernel(sinh,KernelSinh,sinhf);
+GenUnaryCudaKernel(asin,KernelAsin,asinf);
+GenUnaryCudaKernel(asinh,KernelAsinh,asinhf);
+GenUnaryCudaKernel(tan,KernelTan,tanf);
+GenUnaryCudaKernel(tanh,KernelTanh,tanhf);
+GenUnaryCudaKernel(atan,KernelAtan,atanf);
+GenUnaryCudaKernel(atanh,KernelAtanh,atanhf);
 
 
 // ********************************
 // Functions call kernels
 // ********************************
 
+void float2half(const size_t n, const float *in, __half *out, cudaStream_t s) {
+  KernelFloat2Half <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void half2float(const size_t n, const __half *in, float *out, cudaStream_t s) {
+  KernelHalf2Float <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void sparsabs(const size_t n, const float threshold, const float *in, float *out, cudaStream_t s) {
+  KernelSparsAbs <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, threshold, in, out);
+}
+
+void sparsindex(const size_t n, const float *in, int *out, cudaStream_t s) {
+  KernelSparsIndex <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void generateindex(const size_t n, int *out, cudaStream_t s) {
+  KernelGenerateIndex <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, out);
+}
+
+//used by the threshold based sparsification
+void removezeroval(const size_t n, float *in, cudaStream_t s) {
+  thrust::remove(thrust::cuda::par.on(s), in, in + n, float(0));
+}
+
+//used by the threshold based sparsification
+void removezeroidx(const size_t n, int *in, cudaStream_t s, int *address) {
+  thrust::remove(thrust::cuda::par.on(s), in, in + n, int(0));  
+  int a = thrust::count(thrust::cuda::par.on(s), in, in + n, int(0));
+  *address = n - a;
+}
+
+struct absgreater : public thrust::binary_function<float,float,bool>
+{
+  thrust::maximum<int> max;
+  __host__ __device__ bool operator()(const float &lhs, const float &rhs) const {
+     return max(lhs, -lhs) > max(rhs, -rhs);
+  }
+};
+
+//used by the topK based sparsification
+void sortbykey(const size_t n, float *key, int *value, cudaStream_t s) {
+  thrust::sort_by_key(thrust::cuda::par.on(s), key, key + n, value, absgreater());
+}
+
 void set(const size_t n, const float v, float *out, cudaStream_t s) {
-  KernelSet <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, v, out);
+  KernelSet <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, v, out);
 }
 
 void abs(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelAbs <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelAbs <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void cast_float_2_int(const size_t n, const float *src, int *dst, cudaStream_t s) {
+  KernelCastFloat2Int <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, src, dst);
+}
+
+void cast_int_2_float(const size_t n, const int *src, float *dst, cudaStream_t s) {
+  KernelCastInt2Float <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, src, dst);
 }
 
 void sign(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelSign <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelSign <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 
 void exp(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelExp <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelExp <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void erf(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelErf <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void ceil2(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelCeil2 <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void floor(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelFloor <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void round(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelRound <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
+}
+
+void rounde(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelRoundE <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 
 void log(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelLog <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelLog <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 
 void sqrt(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelSqrt <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelSqrt <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 
 void square(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelSquare <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
-}
-
-void tanh(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelTanh <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelSquare <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 
 void relu(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelRelu <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelRelu <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
 void sigmoid(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelSigmoid <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelSigmoid <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
+
 void softplus(const size_t n, const float *in, float *out, cudaStream_t s) {
-  KernelSoftplus <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+  KernelSoftplus <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, out);
 }
+
+void softsign(const size_t n, const float *in, float *out, cudaStream_t s) {
+  KernelSoftsign <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, out);
+}
+
 void clamp(const size_t n, const float low, const float high, const float *in,
            float *out, cudaStream_t s) {
-  KernelClamp <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, low, high, in, out);
+  KernelClamp <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, low, high, in, out);
 }
 
 void pow(const size_t n, const float *in, const float x, float *out,
          cudaStream_t s) {
-  KernelPow <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, x, out);
+  KernelPow <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, x, out);
 }
 
 void add(const size_t n, const float *in, const float x, float *out,
          cudaStream_t s) {
-  KernelAdd <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, x, out);
+  KernelAdd <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, x, out);
+}
+
+void traverse_unary_transform(const size_t n, size_t nDim, const float *in,
+                              const int *shape, const int *stride, float *out,
+                              cudaStream_t s) {
+  KernelTraverseUnaryTransform<<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>>(
+      n, nDim, in, shape, stride, out);
 }
 
 void mult(const size_t n, const float *in, const float x, float *out,
           cudaStream_t s) {
-  KernelMult <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in, x, out);
+  KernelMult <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in, x, out);
 }
 
 void div(const size_t n, const float x, const float *in, float *out,
           cudaStream_t s) {
-  KernelDiv <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, x, in, out);
+  KernelDiv <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, x, in, out);
 }
 
 void threshold(const size_t n, const float x, const float *in, float *out,
                cudaStream_t s) {
-  KernelThreshold <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, x, in, out);
+  KernelThreshold <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, x, in, out);
+}
+
+void relubackward(const size_t num, const float *in1, const float *in2, float *out,
+        cudaStream_t s) {
+  KernelReLUBackward <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
 }
 
 void gt(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s) {
-  KernelGT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in, x, out);
+  KernelGT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in, x, out);
 }
 void gt(const size_t num, const float *in1, const float *in2, float *out,
         cudaStream_t s) {
-  KernelBGT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in1, in2, out);
+  KernelBGT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
 }
 void ge(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s) {
-  KernelGE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in, x, out);
+  KernelGE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in, x, out);
 }
 void ge(const size_t num, const float *in1, const float *in2, float *out,
         cudaStream_t s) {
-  KernelBGE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in1, in2, out);
+  KernelBGE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
+}
+void eq(const size_t num, const float *in, const float x, float *out,
+        cudaStream_t s) {
+  KernelEQ <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in, x, out);
+}
+void eq(const size_t num, const float *in1, const float *in2, float *out,
+        cudaStream_t s) {
+  KernelBEQ <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
 }
 void lt(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s) {
-  KernelLT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in, x, out);
+  KernelLT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in, x, out);
 }
 void lt(const size_t num, const float *in1, const float *in2, float *out,
         cudaStream_t s) {
-  KernelBLT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in1, in2, out);
+  KernelBLT <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
 }
 void le(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s) {
-  KernelLE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in, x, out);
+  KernelLE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in, x, out);
 }
 void le(const size_t num, const float *in1, const float *in2, float *out,
         cudaStream_t s) {
-  KernelBLE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF>>> (num, in1, in2, out);
+  KernelBLE <<<ceil(num / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (num, in1, in2, out);
 }
 void pow(const size_t n, const float *in1, const float *in2, float *out,
          cudaStream_t s) {
-  KernelPow <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in1, in2, out);
+  KernelPow <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in1, in2, out);
 }
 
 void add(const size_t n, const float *in1, const float *in2, float *out,
          cudaStream_t s) {
-  KernelAdd <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in1, in2, out);
+  KernelAdd <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in1, in2, out);
 }
 
 void sub(const size_t n, const float *in1, const float *in2, float *out,
          cudaStream_t s) {
-  KernelSub <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in1, in2, out);
+  KernelSub <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in1, in2, out);
 }
 
 void mult(const size_t n, const float *in1, const float *in2, float *out,
           cudaStream_t s) {
-  KernelMult <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in1, in2, out);
+  KernelMult <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in1, in2, out);
 }
 
 void div(const size_t n, const float *in1, const float *in2, float *out,
          cudaStream_t s) {
-  KernelDiv <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (n, in1, in2, out);
+  KernelDiv <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (n, in1, in2, out);
 }
 
 /*
@@ -500,42 +756,42 @@
 
 void ComputeCrossEntropy(const bool int_target, size_t batchsize, const size_t dim, const float *p,
                          const int *t, float *loss, cudaStream_t stream) {
-  KernelComputeCrossEntropy <<<ceil(batchsize / CU1DBLOCKF), CU1DBLOCKF>>>
+  KernelComputeCrossEntropy <<<ceil(batchsize / CU1DBLOCKF), CU1DBLOCKF, 0, stream>>>
       (int_target, batchsize, dim, p, t, loss);
 }
 
 void SoftmaxCrossEntropyBwd(const bool int_target, size_t batchsize, const size_t dim, const float *p,
                             const int *t, float *grad, cudaStream_t stream) {
-  KernelSoftmaxCrossEntropyBwd <<<ceil(batchsize / CU1DBLOCKF), CU1DBLOCKF>>>
+  KernelSoftmaxCrossEntropyBwd <<<ceil(batchsize / CU1DBLOCKF), CU1DBLOCKF, 0, stream>>>
       (int_target, batchsize, dim, p, t, grad);
 }
 
 void RowMax(const size_t nrow, const size_t ncol, const float *inPtr,
     float *outPtr, cudaStream_t stream) {
-  KernelRowMax <<<ceil(nrow / CU1DBLOCKF), CU1DBLOCKF>>>(nrow, ncol, inPtr, outPtr);
+  KernelRowMax <<<ceil(nrow / CU1DBLOCKF), CU1DBLOCKF, 0, stream>>>(nrow, ncol, inPtr, outPtr);
 }
 
 /*
 void square_grad(int n, const float *in, float *out, cudaStream_t s) {
-  kernel_square_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (in, out, n);
+  kernel_square_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (in, out, n);
 }
 
 void tanh_grad(int n, const float *in, float *out, cudaStream_t s) {
-  kernel_tanh_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (in, out, n);
+  kernel_tanh_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (in, out, n);
 }
 
 
 void relu_grad(int n, const float *in, float *out, cudaStream_t s) {
-  kernel_relu_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (in, out, n);
+  kernel_relu_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (in, out, n);
 }
 
 
 void sigmoid_grad(int n, const float *in, float *out, cudaStream_t s) {
-  kernel_sigmoid_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (in, out, n);
+  kernel_sigmoid_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (in, out, n);
 }
 
 void softplus_grad(int n, const float *in, float *out, cudaStream_t s) {
-  kernel_softplus_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF>>> (in, out, n);
+  kernel_softplus_grad <<<ceil(n / CU1DBLOCKF), CU1DBLOCKF, 0, s>>> (in, out, n);
 }
 
 
@@ -630,7 +886,6 @@
   }
 }
 
-
 __global__ void kernel_softplus_grad(const float *src_data, float *des_data,
                                      int n) {
   int index = blockIdx.x * blockDim.x + threadIdx.x;
diff --git a/src/core/tensor/math_kernel.h b/src/core/tensor/math_kernel.h
index 7c7e84c..69e5047 100644
--- a/src/core/tensor/math_kernel.h
+++ b/src/core/tensor/math_kernel.h
@@ -1,30 +1,35 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #ifndef SRC_CORE_TENSOR__MATH_KERNEL_H_
 #define SRC_CORE_TENSOR__MATH_KERNEL_H_
 
-
 #include "singa/singa_config.h"
 #ifdef USE_CUDA
 
+#include <thrust/execution_policy.h>
+#include <thrust/remove.h>
+#include <thrust/sort.h>
+
+#include "cuda_fp16.h"
+
 /// TODO(wangwei) Clean the function APIs as commented in tensor_math.h
 ///  Add 'Context *ctx' as an argument of all cuda functions.
 namespace singa {
@@ -39,13 +44,34 @@
 void abs(const size_t n, const float *in, float *out, cudaStream_t s);
 void sign(const size_t n, const float *in, float *out, cudaStream_t s);
 void exp(const size_t n, const float *in, float *out, cudaStream_t s);
+void erf(const size_t n, const float *in, float *out, cudaStream_t s);
+void ceil2(const size_t n, const float *in, float *out, cudaStream_t s);
+void floor(const size_t n, const float *in, float *out, cudaStream_t s);
+void round(const size_t n, const float *in, float *out, cudaStream_t s);
+void rounde(const size_t n, const float *in, float *out, cudaStream_t s);
+void cast_float_2_int(const size_t n, const float *src, int *dst,
+                      cudaStream_t s);
+void cast_int_2_float(const size_t n, const int *src, float *dst,
+                      cudaStream_t s);
 void log(const size_t n, const float *in, float *out, cudaStream_t s);
 void sqrt(const size_t n, const float *in, float *out, cudaStream_t s);
 void square(const size_t n, const float *in, float *out, cudaStream_t s);
+void cos(const size_t n, const float *in, float *out, cudaStream_t s);
+void cosh(const size_t n, const float *in, float *out, cudaStream_t s);
+void acos(const size_t n, const float *in, float *out, cudaStream_t s);
+void acosh(const size_t n, const float *in, float *out, cudaStream_t s);
+void sin(const size_t n, const float *in, float *out, cudaStream_t s);
+void sinh(const size_t n, const float *in, float *out, cudaStream_t s);
+void asin(const size_t n, const float *in, float *out, cudaStream_t s);
+void asinh(const size_t n, const float *in, float *out, cudaStream_t s);
+void tan(const size_t n, const float *in, float *out, cudaStream_t s);
 void tanh(const size_t n, const float *in, float *out, cudaStream_t s);
+void atan(const size_t n, const float *in, float *out, cudaStream_t s);
+void atanh(const size_t n, const float *in, float *out, cudaStream_t s);
 void relu(const size_t n, const float *in, float *out, cudaStream_t s);
 void sigmoid(const size_t n, const float *in, float *out, cudaStream_t s);
 void softplus(const size_t n, const float *in, float *out, cudaStream_t s);
+void softsign(const size_t n, const float *in, float *out, cudaStream_t s);
 void clamp(const size_t n, const float low, const float high, const float *in,
            float *out, cudaStream_t s);
 
@@ -58,12 +84,19 @@
 void mult(const size_t n, const float *in, const float x, float *out,
           cudaStream_t s);
 
+void traverse_unary_transform(const size_t n, size_t nDim, const float *in,
+                              const int *shape, const int *stride, float *out,
+                              cudaStream_t s);
+
 void div(const size_t n, const float x, const float *in, float *out,
          cudaStream_t s);
 
 void threshold(const size_t n, const float x, const float *in, float *out,
                cudaStream_t s);
 
+void relubackward(const size_t num, const float *in1, const float *in2,
+                  float *out, cudaStream_t s);
+
 void gt(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s);
 void gt(const size_t num, const float *in1, const float *in2, float *out,
@@ -74,6 +107,10 @@
 void ge(const size_t num, const float *in1, const float *in2, float *out,
         cudaStream_t s);
 
+void eq(const size_t num, const float *in, const float x, float *out,
+        cudaStream_t s);
+void eq(const size_t num, const float *in1, const float *in2, float *out,
+        cudaStream_t s);
 
 void lt(const size_t num, const float *in, const float x, float *out,
         cudaStream_t s);
@@ -111,8 +148,26 @@
                             float *grad, cudaStream_t stream);
 
 void RowMax(const size_t nrow, const size_t ncol, const float *inPtr,
-    float *outPtr, cudaStream_t stream);
-}  // cuda
+            float *outPtr, cudaStream_t stream);
+
+void float2half(const size_t n, const float *in, __half *out, cudaStream_t s);
+
+void half2float(const size_t n, const __half *in, float *out, cudaStream_t s);
+
+void sparsabs(const size_t n, const float threshold, const float *in,
+              float *out, cudaStream_t s);
+
+void sparsindex(const size_t n, const float *in, int *out, cudaStream_t s);
+
+void generateindex(const size_t n, int *out, cudaStream_t s);
+
+void removezeroval(const size_t n, float *in, cudaStream_t s);
+
+void removezeroidx(const size_t n, int *in, cudaStream_t s, int *address);
+
+void sortbykey(const size_t n, float *key, int *value, cudaStream_t s);
+
+}  // namespace cuda
 
 }  // namespace singa
 
diff --git a/src/core/tensor/sparse_tensor.cc b/src/core/tensor/sparse_tensor.cc
index 46ea850..7eeca95 100644
--- a/src/core/tensor/sparse_tensor.cc
+++ b/src/core/tensor/sparse_tensor.cc
@@ -20,4 +20,4 @@
 #include "singa/core/tensor.h"
 namespace singa {}
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/core/tensor/tensor.cc b/src/core/tensor/tensor.cc
index ed4da96..99d9e2a 100644
--- a/src/core/tensor/tensor.cc
+++ b/src/core/tensor/tensor.cc
@@ -16,74 +16,73 @@
  * limitations under the License.
  */
 #include "singa/core/tensor.h"
+// #include "singa/utils/stacktrace.h"
+#include <algorithm>
+#include <utility>
+
 #include "./tensor_math.h"
 #include "./tensor_math_cpp.h"
 #include "./tensor_math_cuda.h"
 #include "./tensor_math_opencl.h"
-#include <utility>
+
+#define Noaxis 9999
 
 namespace singa {
 
 Tensor::~Tensor() {
-  if (block_ != nullptr && block_->DecRefCount() == 0)
+  if (block_ != nullptr && block_->DecRefCount() == 0) {
     device_->FreeBlock(block_);
+  }
   block_ = nullptr;
 }
 
-Tensor::Tensor() { device_ = defaultDevice; }
+Tensor::Tensor() {
+  device_ = defaultDevice;
+  stride_ = {1};
+}
 
+// non-strided constructors
 Tensor::Tensor(const Shape &shape, DataType dtype)
     : data_type_(dtype), device_(defaultDevice), shape_(shape) {
   size_t size = Product(shape_) * SizeOf(data_type_);
-  if (size)
+  if (size) {
     block_ = device_->NewBlock((int)size);
+  }
+  generate_stride();
 }
-Tensor::Tensor(Shape &&shape, DataType dtype)
-    : data_type_(dtype), device_(defaultDevice), shape_(shape) {
-  size_t size = Product(shape_) * SizeOf(data_type_);
-  if (size)
-    block_ = device_->NewBlock((int)size);
-}
+
+// non-strided constructors with device
 Tensor::Tensor(const Shape &shape, std::shared_ptr<Device> device,
                DataType dtype)
     : data_type_(dtype), device_(device), shape_(shape) {
   size_t size = Product(shape_) * SizeOf(data_type_);
-  if (size)
+  if (size) {
     block_ = device_->NewBlock((int)size);
+  }
+  generate_stride();
 }
-Tensor::Tensor(Shape &&shape, std::shared_ptr<Device> device, DataType dtype)
-    : data_type_(dtype), device_(device), shape_(shape) {
-  size_t size = Product(shape_) * SizeOf(data_type_);
-  if (size)
-    block_ = device_->NewBlock((int)size);
-}
+
 Tensor::Tensor(const Tensor &in)
-    : transpose_(in.transpose_),
-      data_type_(in.data_type_),
+    : data_type_(in.data_type_),
       device_(in.device_),
       block_(in.block()),
-      shape_(in.shape_) {
-  if (block_ != nullptr)
-    block_->IncRefCount();
+      shape_(in.shape_),
+      stride_(in.stride_) {
+  // printf("i am here in &in\n");
+  if (block_ != nullptr) block_->IncRefCount();
 }
 
 Tensor::Tensor(Tensor &&in)
-    : transpose_(in.transpose_),
-      data_type_(in.data_type_),
+    : data_type_(in.data_type_),
       device_(in.device_),
-      shape_(std::move(in.shape_)) {
+      shape_(std::move(in.shape_)),
+      stride_(std::move(in.stride_)) {
+  // printf("i am here in &&in\n");
   block_ = in.block_;
   in.block_ = nullptr;
 }
 
-void Tensor::SetBlock(Block *block) {
-  LOG(WARNING) << "Pls avoid using this function, which may have side-effect.";
-  if (block_ != nullptr)
-    if (block_->DecRefCount()) device_->FreeBlock(block_);
-  block_ = block;
-}
-
-void Tensor::ResetLike(const Tensor &in) {
+Tensor &Tensor::ResetLike(const Tensor &in) {
   if (block_ == nullptr || device_ != in.device_ || MemSize() != in.MemSize()) {
     if (block_ != nullptr && block_->DecRefCount() == 0)
       device_->FreeBlock(block_);
@@ -92,38 +91,101 @@
     block_ = device_->NewBlock((int)in.MemSize());
   }
   shape_ = in.shape_;
+  stride_ = in.stride_;
+  return *this;
 }
 
-void Tensor::Reshape(const Shape &shape) {
-  if (Product(shape_) != Product(shape)) {
+Tensor &Tensor::Resize(const Shape &shape) {
+  if (Size() != Product(shape)) {
     if (block_ != nullptr && block_->DecRefCount() == 0)
       device_->FreeBlock(block_);
     block_ = device_->NewBlock((int)(Product(shape) * SizeOf(data_type_)));
   }
   shape_ = shape;
+  generate_stride();
+  return *this;
 }
 
-void Tensor::Reshape(Shape &&shape) {
-  if (Product(shape_) != Product(shape)) {
-    if (block_ != nullptr && block_->DecRefCount() == 0)
-      device_->FreeBlock(block_);
-    block_ = device_->NewBlock((int)(Product(shape) * SizeOf(data_type_)));
-  }
-  shape_ = std::move(shape);
+Tensor Resize(const Tensor &in, const Shape &shape) {
+  Tensor out(in);
+  out.Resize(shape);
+  return out;
 }
 
-void Tensor::AsType(const DataType type) {
+#define TYPE_TYPE_LANG_SWITCH(ldtype, LDType, rdtype, RDType, ltype, Lang,     \
+                              ...)                                             \
+  do {                                                                         \
+    const int _SwitchShift = 3;                                                \
+    int _SwitchHash =                                                          \
+        ((ldtype) << _SwitchShift * 2) + ((rdtype) << _SwitchShift) + (ltype); \
+    switch (_SwitchHash) {                                                     \
+      case (((kFloat32) << _SwitchShift * 2) + (kInt << _SwitchShift) +        \
+            kCuda): {                                                          \
+        typedef float LDType;                                                  \
+        typedef int RDType;                                                    \
+        typedef lang::Cuda Lang;                                               \
+        { __VA_ARGS__ }                                                        \
+        break;                                                                 \
+      }                                                                        \
+      case (((kInt) << _SwitchShift * 2) + (kFloat32 << _SwitchShift) +        \
+            kCuda): {                                                          \
+        typedef int LDType;                                                    \
+        typedef float RDType;                                                  \
+        typedef lang::Cuda Lang;                                               \
+        { __VA_ARGS__ }                                                        \
+        break;                                                                 \
+      }                                                                        \
+      case (((kFloat32) << _SwitchShift * 2) + (kInt << _SwitchShift) +        \
+            kCpp): {                                                           \
+        typedef float LDType;                                                  \
+        typedef int RDType;                                                    \
+        typedef lang::Cpp Lang;                                                \
+        { __VA_ARGS__ }                                                        \
+        break;                                                                 \
+      }                                                                        \
+      case (((kInt) << _SwitchShift * 2) + (kFloat32 << _SwitchShift) +        \
+            kCpp): {                                                           \
+        typedef int LDType;                                                    \
+        typedef float RDType;                                                  \
+        typedef lang::Cpp Lang;                                                \
+        { __VA_ARGS__ }                                                        \
+        break;                                                                 \
+      }                                                                        \
+      default:                                                                 \
+        LOG(FATAL) << "Unknown combination of left data type "                 \
+                   << DataType_Name(ldtype) << " and right data type "         \
+                   << DataType_Name(rdtype) << " and language "                \
+                   << LangType_Name(ltype);                                    \
+    }                                                                          \
+  } while (0)
+
+// return new tensor
+Tensor Tensor::AsType(const DataType type) {
+  CHECK(block() && block()->initialized() == true)
+      << "the data of the tensor needs be initialized before casting to "
+         "another type";
   if (data_type_ != type) {
-    if (block_ != nullptr && block_->DecRefCount() == 0)
-      device_->FreeBlock(block_);
-    block_ = device_->NewBlock((int)(Product(shape_) * SizeOf(type)));
-    data_type_ = type;
+    Tensor &thisRef = *this;
+    Tensor ret(shape_, device_, type);
+    TYPE_TYPE_LANG_SWITCH(
+        data_type_, LDType, type, RDType, device_->lang(), Lang, {
+          ret.device()->Exec(
+              [thisRef, ret](Context *ctx) mutable {
+                CastCopy<LDType, RDType, Lang>(&thisRef, &ret, ctx);
+              },
+              {this->block()}, {ret.block()}, "AsType");
+        });
+    return ret;
+  } else {
+    Tensor t = this->Clone();
+    return t;
   }
 }
 
-void Tensor::ToDevice(std::shared_ptr<Device> dst) {
+Tensor &Tensor::ToDevice(std::shared_ptr<Device> dst) {
   // TODO(wangwei) the comparison is restricted. May compare against device ID?
   if (device_ != dst) {
+    // WARNING: this function can't be buffered
     Tensor tmp(shape_, dst, data_type_);
     if (block_ != nullptr && Size() && block_->initialized())
       tmp.CopyData(*this);
@@ -133,32 +195,42 @@
     tmp.block_ = nullptr;
     device_ = dst;
   }
+  return *this;
 }
 
-void Tensor::ToHost() {
+Tensor &Tensor::ToHost() {
   if (device_ != defaultDevice) ToDevice(device_->host());
+  return *this;
 }
 
 template <typename DType>
 void Tensor::CopyDataFromHostPtr(const DType *src, const size_t num,
-                                 const size_t offset) {
+                                 const size_t offset) const {
   CHECK_EQ(sizeof(DType), SizeOf(data_type_))
       << "data_type is " << DataType_Name(data_type_)
       << " user given type is of size " << sizeof(DType);
   if (src != nullptr) {
-    device_->CopyDataFromHostPtr(block(), src, sizeof(DType) * num,
-                                 sizeof(DType) * offset);
+    Device *dev = device_.get();
+    const Tensor &thisRef = *this;
+    size_t nBytes = sizeof(DType) * num;
+    size_t dst_offset = sizeof(DType) * offset;
+    device_->Exec(
+        [dev, thisRef, src, nBytes, dst_offset](Context *ctx) mutable {
+          dev->CopyDataFromHostPtr(thisRef.block(), src, nBytes, dst_offset,
+                                   ctx);
+        },
+        {}, {block()}, "CopyDataFromHostPtr");
   } else {
     LOG(WARNING) << "Copy data from null host ptr";
   }
 }
 template void Tensor::CopyDataFromHostPtr(const unsigned char *src,
                                           const size_t num,
-                                          const size_t offset);
+                                          const size_t offset) const;
 template void Tensor::CopyDataFromHostPtr(const float *src, const size_t num,
-                                          const size_t offset);
+                                          const size_t offset) const;
 template void Tensor::CopyDataFromHostPtr(const int *src, const size_t num,
-                                          const size_t offset);
+                                          const size_t offset) const;
 
 void Tensor::CopyData(const Tensor &src) {
   CHECK_EQ(Size(), src.Size());
@@ -169,15 +241,31 @@
   }
 }
 
+void Tensor::RepeatData(const vector<size_t> &repeats, int axis,
+                        int total_repeats, const Tensor &src) {
+  if (repeats.size() == 1) {
+    CHECK_EQ(Size(), src.Size() * total_repeats);
+  } else {
+    CHECK_EQ(Size(), src.Size() * total_repeats / src.shape()[axis]);
+  }
+
+  CHECK(block_ != nullptr);
+  // Do repeat only if the src's block is already initialized.
+  if (src.block_ != nullptr) {
+    singa::RepeatDataToFrom(false, repeats, axis, this, src, Size());
+  }
+}
+
 void Tensor::FromProto(const singa::TensorProto &proto) {
   if (block_ != nullptr && block_->DecRefCount() == 0)
     device_->FreeBlock(block_);
   block_ = nullptr;
-  Shape shape;
-  for (uint32_t s : proto.shape()) shape.push_back(s);
+  for (uint32_t s : proto.shape()) shape_.push_back(s);
   data_type_ = proto.data_type();
-  Reshape(shape);
-  transpose_ = proto.transpose();
+  block_ = device_->NewBlock((int)(Product(shape()) * SizeOf(data_type_)));
+  // transpose_ = proto.transpose();
+  stride_.clear();
+  for (int32_t s : proto.stride()) stride_.push_back(s);
   switch (data_type_) {
     case kFloat32: {
       std::unique_ptr<float[]> data_ptr(new float[Product(shape_)]);
@@ -195,12 +283,15 @@
     }
     case kInt: {
       std::unique_ptr<int[]> data(new int[Product(shape_)]);
-      for (size_t i = 0; i < Product(shape_); ++i) data[i] = proto.int_data((int)i);
+      for (size_t i = 0; i < Product(shape_); ++i)
+        data[i] = proto.int_data((int)i);
       CopyDataFromHostPtr<int>(data.get(), Product(shape_));
       break;
     }
-    ///TODO(wangji): Implement to support C++ type char using bytes type in protobuf
-    /// which is equivalent to string type is different from the other cases. The kchar
+    /// TODO(wangji): Implement to support C++ type char using bytes type in
+    /// protobuf
+    /// which is equivalent to string type is different from the other cases.
+    /// The kchar
     /// and kUChar case is to be implemented.
     /*
     case kChar: {
@@ -216,17 +307,23 @@
       break;
     }
     */
-    default: { LOG(FATAL) << "Unsupported Type" << DataType_Name(data_type_); }
+    default: {
+      LOG(FATAL) << "Unsupported Type" << DataType_Name(data_type_);
+    }
   }
 }
 
-void Tensor::ToProto(singa::TensorProto *proto) const {
+void Tensor::to_proto(singa::TensorProto *proto) const {
   proto->clear_shape();
   for (auto s : shape_) {
     proto->add_shape(s);
   }
   proto->set_data_type(data_type_);
-  proto->set_transpose(transpose_);
+  // proto->set_transpose(transpose_);
+  proto->clear_stride();
+  for (auto s : stride_) {
+    proto->add_stride(s);
+  }
   switch (data_type_) {
     case kFloat32: {
       proto->clear_float_data();
@@ -265,50 +362,164 @@
       break;
     }
     */
-    default: { LOG(FATAL) << "Unsupported Type" << DataType_Name(data_type_); }
+    default: {
+      LOG(FATAL) << "Unsupported Type" << DataType_Name(data_type_);
+    }
   }
 }
 
+void Tensor::ToProto(singa::TensorProto *proto) const { to_proto(proto); }
+
+Tensor Tensor::Repeat(const vector<size_t> &repeats, int axis,
+                      std::shared_ptr<Device> device) {
+  if (device == nullptr) device = device_;
+  vector<size_t> tshape;
+  int total_repeats = 0;
+  if (axis == Noaxis) {
+    total_repeats = repeats[0];
+    tshape.push_back(Product(shape_) * total_repeats);
+  } else {
+    if (repeats.size() == 1) {
+      total_repeats = repeats[0];
+      for (int i = 0; i < static_cast<int>(shape_.size()); i++) {
+        if (i == axis) {
+          tshape.push_back(shape_[i] * total_repeats);
+        } else {
+          tshape.push_back(shape_[i]);
+        }
+      }
+    } else {
+      if (repeats.size() != shape_[axis]) {
+        LOG(FATAL) << "the repeats number doesn't match the axis";
+      }
+      for (size_t i = 0; i < shape_[axis]; i++) {
+        if (repeats[i] < 0) {
+          LOG(FATAL) << "the repeats number is less than zero";
+        }
+        total_repeats += repeats[i];
+      }
+      for (int i = 0; i < static_cast<int>(shape_.size()); i++) {
+        if (i == axis) {
+          tshape.push_back(total_repeats);
+        } else {
+          tshape.push_back(shape_[i]);
+        }
+      }
+    }
+  }
+  Tensor t(tshape, device_);
+  // t.stride_.push_back(1);
+  t.RepeatData(repeats, axis, total_repeats, *this);
+  return t;
+}
+
 Tensor Tensor::Clone(std::shared_ptr<Device> device) const {
   if (device == nullptr) device = device_;
-  Tensor t(shape_, device_, data_type_);
-  t.transpose_ = transpose_;
+  Tensor t(shape_, device, data_type_);
+  // t.transpose_ = transpose_;
+  t.stride_ = stride_;
   t.CopyData(*this);
   return t;
 }
 
-Tensor Tensor::T() const {
+void Tensor::Clone(Tensor *&other, std::shared_ptr<Device> device) const {
+  if (device == nullptr) device = device_;
+  other = new Tensor(shape_, device, data_type_);
+  other->stride_ = stride_;
+  other->CopyData(*this);
+  return;
+}
+
+Tensor &Tensor::Broadcast(const Shape &shape, const int ignore_last_dim) {
+  // TODO(wangwei) do we need to transform the mem layout if the tensor was
+  // transposed?
+  auto m = shape_.size() - 1, n = shape.size() - 1;
+  // ignore_last_dim is useful for mult broadcast
+  // e.g. (2,3,4)x(4,5) to (2,3,4)x(2,4,5)
+  if (ignore_last_dim < std::min(m, n) + 1) {
+    for (size_t i = ignore_last_dim; i <= std::min(m, n); i++) {
+      if ((shape.at(n - i) != shape_.at(m - i)) && (shape.at(n - i) != 1)) {
+        CHECK_EQ(shape_.at(m - i), 1) << "i= " << i << "\n";  // << Backtrace();
+        shape_.at(m - i) = shape.at(n - i);
+        stride_.at(m - i) = 0;
+      }
+    }
+  }
+  if (m < n) {
+    for (size_t i = m + 1; i <= n; i++) {
+      shape_.emplace(shape_.begin(), shape.at(n - i));
+      stride_.emplace(stride_.begin(), 0);
+    }
+  }
+  return *this;
+}
+
+Tensor Broadcast(const Tensor &in, const Shape &shape,
+                 const int ignore_last_dim) {
+  Tensor out(in);
+  return out.Broadcast(shape, ignore_last_dim);
+}
+
+Tensor &Tensor::T() {
+  // this function only works for 2d tensors
   CHECK_EQ(shape_.size(), 2u);
-  Tensor t;
-  t.device_ = device_;
-  t.data_type_ = data_type_;
-  t.transpose_ = !transpose_;
-  t.shape_.push_back(shape_[1]);
-  t.shape_.push_back(shape_[0]);
-  t.block_ = block_;
-  block_->IncRefCount();
-  return t;
+  Transpose();
+  return *this;
+}
+
+// normal transpose without axes
+Tensor &Tensor::Transpose() {
+  std::reverse(shape_.begin(), shape_.end());
+  std::reverse(stride_.begin(), stride_.end());
+  return *this;
+}
+
+// transpose with axes
+Tensor &Tensor::Transpose(const vector<size_t> &axes) {
+  CHECK_EQ(axes.size(), shape_.size())
+      << "Tranpose axes's length should be equal to shape";
+
+  auto shape = shape_;
+  auto stride = stride_;
+  shape_.clear();
+  stride_.clear();
+  for (size_t n = 0; n < axes.size(); ++n) {
+    shape_.push_back(shape[axes[n]]);
+    stride_.push_back(stride[axes[n]]);
+  }
+  return *this;
+}
+
+// normal transpose without axes
+Tensor Transpose(const Tensor &in) {
+  Tensor out(in);
+  out.Transpose();
+  return out;
+}
+
+// transpose with axes
+Tensor Transpose(const Tensor &in, const vector<size_t> &axes) {
+  Tensor out(in);
+  out.Transpose(axes);
+  return out;
 }
 
 Tensor &Tensor::operator=(const Tensor &in) {
-  // LOG(ERROR) << "= const &";
   if (block_ != nullptr && block_->DecRefCount() == 0)
     device_->FreeBlock(block_);
-  transpose_ = in.transpose_;
+  stride_ = in.stride_;
   data_type_ = in.data_type_;
   shape_ = in.shape_;
   device_ = in.device_;
   block_ = in.block();
-  if (block_ != nullptr)
-    block_->IncRefCount();
+  if (block_ != nullptr) block_->IncRefCount();
   return *this;
 }
 
 Tensor &Tensor::operator=(Tensor &&in) {
-  // LOG(ERROR) << "= &&";
   if (block_ != nullptr && block_->DecRefCount() == 0)
     device_->FreeBlock(block_);
-  transpose_ = in.transpose_;
+  stride_ = std::move(in.stride_);
   data_type_ = in.data_type_;
   shape_ = std::move(in.shape_);
   device_ = in.device_;
@@ -317,21 +528,10 @@
   return *this;
 }
 
-Tensor Reshape(const Tensor &in, const Shape &s) {
-  Tensor out(in);
-  out.Reshape(s);
-  return out;
-}
-
-Tensor Reshape(const Tensor &in, Shape &&s) {
-  Tensor out(in);
-  out.Reshape(std::move(s));
-  return out;
-}
-
 #define GenUnaryTensorArgMemberFn(op, fn) \
   Tensor &Tensor::op(const Tensor &in) {  \
-    fn(*this, in, this);                  \
+    Tensor out(*this);                    \
+    fn(*this, in, &out);                  \
     return *this;                         \
   }
 
@@ -343,7 +543,8 @@
 #define GenUnaryScalarArgMemberFn(op, fn) \
   template <typename DType>               \
   Tensor &Tensor::op(const DType x) {     \
-    fn(*this, x, this);                   \
+    Tensor out(*this);                    \
+    fn(*this, x, &out);                   \
     return *this;                         \
   }                                       \
   template Tensor &Tensor::op<float>(const float x)
@@ -364,24 +565,110 @@
   CHECK_GE(src.MemSize(), s_offset + nBytes);
   CHECK_GE(dst->MemSize(), d_offset + nBytes);
 
+  Device *dev = nullptr;
+  CopyDirection direct;
   std::shared_ptr<Device> src_dev = src.device(), dst_dev = dst->device();
-  Block *from = src.block(), *to = dst->block();
   if (dst_dev->lang() != src_dev->lang()) {
     // let the none cpp device conduct copy op
     if (dst_dev->lang() == kCpp) {
-      src_dev->CopyDataToFrom(to, from, nBytes, kDeviceToHost, (int)d_offset,
-                              (int)s_offset);
+      dev = src_dev.get();
+      direct = kDeviceToHost;
     } else if (src_dev->lang() == kCpp) {
-      dst_dev->CopyDataToFrom(to, from, nBytes, kHostToDevice, (int)d_offset,
-							  (int)s_offset);
+      dev = dst_dev.get();
+      direct = kHostToDevice;
     } else {
-      LOG(FATAL) << "Not support mem copy betwee Cuda and OpenCL device";
+      LOG(FATAL) << "Not support mem copy between Cuda and OpenCL device";
     }
   } else {
-    auto direct = src_dev->lang() == kCpp ? kHostToHost : kDeviceToDevice;
-    src_dev->CopyDataToFrom(to, from, nBytes, direct, (int)d_offset, (int)s_offset);
+    dev = src_dev.get();
+    direct = src_dev->lang() == kCpp ? kHostToHost : kDeviceToDevice;
+  }
+
+  Tensor &dstRef = *dst;
+  dev->Exec(
+      [dev, dstRef, src, nBytes, direct, d_offset,
+       s_offset](Context *ctx) mutable {
+        Block *from = src.block(), *to = dstRef.block();
+        dev->CopyDataToFrom(to, from, nBytes, direct, (int)d_offset,
+                            (int)s_offset, ctx);
+      },
+      {src.block()}, {dst->block()}, "CopyDataToFrom");
+}
+
+void RepeatDataToFrom(bool broadcast_flag, const vector<size_t> &repeats,
+                      int axis, Tensor *dst, const Tensor &src,
+                      const size_t num) {
+  if (repeats.size() == 1) {
+    broadcast_flag = true;
+  } else if (repeats.size() > 1) {
+    if (axis == Noaxis) {
+      LOG(FATAL) << "When repeats parameter is sequence, axis cannot be None";
+    }
+  }
+  for (size_t i = 0; i < repeats.size(); i++) {
+    CHECK_GE(repeats[i], 0);
+  }
+  auto width = SizeOf(src.data_type());
+  CHECK_EQ(width, SizeOf(dst->data_type()));
+  // size_t nBytes = num * width;
+  int chunk = width;
+  int axis_shape = 1;
+  int shape_outer = 1;
+  if (axis == Noaxis) {
+    axis_shape = 1;
+    shape_outer = Product(src.shape());
+  } else {
+    for (int i = 0; i < axis; i++) {
+      shape_outer *= src.shape()[i];
+    }
+    axis_shape = src.shape()[axis];
+    for (int i = axis + 1; i < static_cast<int>(src.nDim()); i++) {
+      chunk *= src.shape()[i];
+    }
+  }
+
+  Device *dev = nullptr;
+  CopyDirection direct;
+  std::shared_ptr<Device> src_dev = src.device(), dst_dev = dst->device();
+  if (dst_dev->lang() != src_dev->lang()) {
+    // let the none cpp device conduct copy op
+    if (dst_dev->lang() == kCpp) {
+      dev = src_dev.get();
+      direct = kDeviceToHost;
+    } else if (src_dev->lang() == kCpp) {
+      dev = dst_dev.get();
+      direct = kHostToDevice;
+    } else {
+      LOG(FATAL)
+          << "Not support mem repeat copy between Cuda and OpenCL device";
+    }
+  } else {
+    dev = src_dev.get();
+    direct = src_dev->lang() == kCpp ? kHostToHost : kDeviceToDevice;
+  }
+
+  int dst_offset = 0;
+  int src_offset = 0;
+  Tensor &dstRef = *dst;
+  for (int i = 0; i < shape_outer; i++) {
+    for (int j = 0; j < axis_shape; j++) {
+      int temp = broadcast_flag ? repeats[0] : repeats[j];
+      for (int k = 0; k < temp; k++) {
+        dev->Exec(
+            [dev, dstRef, src, chunk, direct, dst_offset,
+             src_offset](Context *ctx) mutable {
+              Block *from = src.block(), *to = dstRef.block();
+              dev->CopyDataToFrom(to, from, chunk, direct, dst_offset,
+                                  src_offset, ctx);
+            },
+            {src.block()}, {dst->block()}, "CopyDataToFrom");
+        dst_offset += chunk;
+      }
+      src_offset += chunk;
+    }
   }
 }
+
 //============================================================================
 /// typedef DType accroding to type value.
 /// DType would be used in the code block __VA_ARGS__.
@@ -428,12 +715,24 @@
         { __VA_ARGS__ }                                        \
         break;                                                 \
       }                                                        \
+      case ((kInt << _SwitchShift) + kCuda): {                 \
+        typedef int DType;                                     \
+        typedef lang::Cuda Lang;                               \
+        { __VA_ARGS__ }                                        \
+        break;                                                 \
+      }                                                        \
       case ((kFloat32 << _SwitchShift) + kCpp): {              \
         typedef float DType;                                   \
         typedef lang::Cpp Lang;                                \
         { __VA_ARGS__ }                                        \
         break;                                                 \
       }                                                        \
+      case ((kInt << _SwitchShift) + kCpp): {                  \
+        typedef int DType;                                     \
+        typedef lang::Cpp Lang;                                \
+        { __VA_ARGS__ }                                        \
+        break;                                                 \
+      }                                                        \
       case ((kFloat32 << _SwitchShift) + kOpencl): {           \
         typedef float DType;                                   \
         typedef lang::Opencl Lang;                             \
@@ -448,95 +747,283 @@
   } while (0)
 
 // =============Element-wise operations====================================
-float Tensor::L1() const {
+float Tensor::l1() const {
   float nrm = 0.0f;
   TYPE_LANG_SWITCH(data_type_, DType, device_->lang(), Lang, {
-    device_->Exec([&nrm, this](Context *ctx) {
-      DType ret = DType(0);
-      Asum<DType, Lang>(this->Size(), this->block(), &ret, ctx);
-      nrm = TypeCast<DType, float>(ret);
-    }, {this->block()}, {});
+    device_->Exec(
+        [&nrm, this](Context *ctx) {
+          DType ret = DType(0);
+          Asum<DType, Lang>(*this, &ret, ctx);
+          nrm = TypeCast<DType, float>(ret);
+        },
+        {this->block()}, {}, "l1");
   });
   return nrm / Size();
 }
 
+// DEPRECATED use l1()
+float Tensor::L1() const { return l1(); }
+
 /// L2 norm, Do not use Nrm2 (name conflict).
-float Tensor::L2() const {
+float Tensor::l2() const {
   float nrm = 0.0f;
   TYPE_LANG_SWITCH(data_type_, DType, device_->lang(), Lang, {
-    device_->Exec([&nrm, this](Context *ctx) {
-      DType ret = DType(0);
-      Nrm2<DType, Lang>(this->Size(), this->block(), &ret, ctx);
-      nrm = TypeCast<DType, float>(ret);
-    }, {this->block()}, {});
+    device_->Exec(
+        [&nrm, this](Context *ctx) {
+          Nrm2<DType, Lang>(*this, &nrm, ctx);
+        },
+        {this->block()}, {}, "L1");
   });
   return nrm / Size();
 }
 
+// DEPRECATED use l2()
+float Tensor::L2() const { return l2(); }
+
 template <typename SType>
 void Tensor::SetValue(const SType x) {
   CHECK_EQ(sizeof(SType), SizeOf(data_type_));
-  auto size = Size();
+  // auto size = Size();
   auto ptr = block_;
+
   TYPE_LANG_SWITCH(data_type_, DType, device_->lang(), Lang, {
     // TODO(wangwei) cast x to DType
-    device_->Exec([size, x, ptr](Context *ctx) {
-      Set<DType, Lang>(size, x, ptr, ctx);
-    }, {}, {ptr});
+    Tensor &thisRef = *this;
+    device_->Exec(
+        [thisRef, x](Context *ctx) mutable {
+          Set<DType, Lang>(x, &thisRef, ctx);
+        },
+        {}, {ptr}, "SetValue");
   });
 }
 template void Tensor::SetValue<float>(const float x);
 template void Tensor::SetValue<int>(const int x);
 
+template <typename SType>
+void Tensor::get_value(SType *value, const size_t num) const {
+  CHECK(device_ == defaultDevice);
+  Tensor t(shape_, device_, data_type_);
+  // transform function arrange data in memory considering stride
+  singa::Transform(*this, &t);
+  auto ptr = static_cast<const SType *>(t.block()->data());
+  for (size_t i = 0; i < num; i++) value[i] = ptr[i];
+}
+template void Tensor::get_value<float>(float *value, const size_t num) const;
+template void Tensor::get_value<int>(int *value, const size_t num) const;
+
+// DEPRECATED
+template <typename SType>
+void Tensor::GetValue(SType *value, const size_t num) const {
+  get_value(value, num);
+}
+template void Tensor::GetValue<float>(float *value, const size_t num) const;
+template void Tensor::GetValue<int>(int *value, const size_t num) const;
+
 #define EltwiseUnaryTensorFn(fn, t, ret)                               \
   do {                                                                 \
     TYPE_LANG_SWITCH(t.data_type(), DType, t.device()->lang(), Lang, { \
-      ret->device()->Exec([t, ret](Context * ctx) {                    \
-        fn<DType, Lang>(t.Size(), t.block(), ret->block(), ctx);       \
-      }, {t.block()}, {ret->block()});                                 \
+      Tensor &retRef = *ret;                                           \
+      ret->device()->Exec(                                             \
+          [t, retRef](Context *ctx) mutable {                          \
+            fn<DType, Lang>(t, &retRef, ctx);                          \
+          },                                                           \
+          {t.block()}, {ret->block()}, #fn);                           \
     });                                                                \
   } while (0)
 
 #define GenUnaryTensorFn(fn)                             \
   Tensor fn(const Tensor &in) {                          \
     Tensor ret(in.shape(), in.device(), in.data_type()); \
-    auto *retptr = &ret;                                 \
+    Tensor *retptr = &ret;                               \
     EltwiseUnaryTensorFn(fn, in, retptr);                \
     return ret;                                          \
   }                                                      \
   void fn(const Tensor &in, Tensor *out) { EltwiseUnaryTensorFn(fn, in, out); }
 
 GenUnaryTensorFn(Abs);
+GenUnaryTensorFn(Erf);
+GenUnaryTensorFn(Ceil);
+GenUnaryTensorFn(Floor);
+GenUnaryTensorFn(Round);
+GenUnaryTensorFn(RoundE);
 GenUnaryTensorFn(Exp);
 GenUnaryTensorFn(Log);
 GenUnaryTensorFn(ReLU);
 GenUnaryTensorFn(Sigmoid);
+GenUnaryTensorFn(SoftPlus);
+GenUnaryTensorFn(SoftSign);
 GenUnaryTensorFn(Sign);
 GenUnaryTensorFn(Sqrt);
 GenUnaryTensorFn(Square);
+GenUnaryTensorFn(Transform);
+GenUnaryTensorFn(Cos);
+GenUnaryTensorFn(Cosh);
+GenUnaryTensorFn(Acos);
+GenUnaryTensorFn(Acosh);
+GenUnaryTensorFn(Sin);
+GenUnaryTensorFn(Sinh);
+GenUnaryTensorFn(Asin);
+GenUnaryTensorFn(Asinh);
+GenUnaryTensorFn(Tan);
 GenUnaryTensorFn(Tanh);
+GenUnaryTensorFn(Atan);
+GenUnaryTensorFn(Atanh);
+GenUnaryTensorFn(SoftMax);
 
-#define EltwiseBinaryTensorFn(fn, lhs, rhs, ret)                            \
-  do {                                                                      \
-    TYPE_LANG_SWITCH(lhs.data_type(), DType, lhs.device()->lang(), Lang, {  \
-      CHECK_EQ(sizeof(DType), SizeOf(rhs.data_type()));                     \
-      ret->device()->Exec([lhs, rhs, ret](Context * ctx) {                  \
-        fn<DType, Lang>(lhs.Size(), lhs.block(), rhs.block(), ret->block(), \
-                        ctx);                                               \
-      }, {lhs.block(), rhs.block()}, {ret->block()});                       \
-    });                                                                     \
-  } while (0)
+// add axis to softmax API according to ONNX specification
+// https://github.com/onnx/onnx/blob/master/docs/Operators.md#Softmax
+void SoftMax(const Tensor &in, Tensor *out, int axis) {
+  // {a_0, a_1, ..., a_k-1, a_k, ... a_n-1}
+  // reshape to
+  // { a_0 * a_1 * ... a_k-1, a_k * ... a_n-1 }
 
-#define GenBinaryTensorFn(op, fn)                              \
-  Tensor op(const Tensor &lhs, const Tensor &rhs) {            \
-    Tensor ret(lhs.shape(), lhs.device(), lhs.data_type());    \
-    fn(lhs, rhs, &ret);                                        \
-    return ret;                                                \
-  }                                                            \
-  void fn(const Tensor &lhs, const Tensor &rhs, Tensor *ret) { \
-    EltwiseBinaryTensorFn(fn, lhs, rhs, ret);                  \
+  // assert axis \in {-r, r-1}
+  CHECK_LE(axis, (int)in.shape().size() - 1);
+  CHECK_GE(axis, -1 * (int)in.nDim());
+
+  Shape original_shape = in.shape();
+  if (axis < 0) axis = in.shape().size() + axis;
+
+  Shape coerced_shape = {1, 1};
+  for (std::size_t i = 0, max = in.shape().size(); i != max; ++i) {
+    if (i < axis)
+      coerced_shape[0] *= in.shape()[i];
+    else
+      coerced_shape[1] *= in.shape()[i];
+  }
+  Tensor in_reshaped = Reshape(in, coerced_shape);
+  out->Reshape(coerced_shape);
+
+  // optimise by minus x - x.max()
+  auto in_max = RowMax(in_reshaped);
+  in_max.Reshape({coerced_shape[0], 1});
+  in_reshaped = in_reshaped - in_max;
+
+  SoftMax(in_reshaped, out);
+
+  out->Reshape(original_shape);
+}
+
+Tensor SoftMax(const Tensor &in, int axis) {
+  Tensor ret(in.shape(), in.device(), in.data_type());
+  auto *retptr = &ret;
+  SoftMax(in, retptr, axis);
+  return ret;
+}
+void SoftMaxBackward(const Tensor &in, Tensor *out, int axis,
+                     const Tensor &fdout) {
+  // {a_0, a_1, ..., a_k-1, a_k, ... a_n-1}
+  // reshape to
+  // { a_0 * a_1 * ... a_k-1, a_k * ... a_n-1 }
+
+  // assert axis \in {-r, r-1}
+  CHECK_LE(axis, (int)in.shape().size() - 1);
+  CHECK_GE(axis, -1 * (int)in.nDim());
+
+  Shape original_shape = in.shape();
+  if (axis < 0) axis = in.shape().size() + axis;
+
+  Shape coerced_shape = {1, 1};
+  for (std::size_t i = 0, max = in.shape().size(); i != max; ++i) {
+    if (i < axis)
+      coerced_shape[0] *= in.shape()[i];
+    else
+      coerced_shape[1] *= in.shape()[i];
   }
 
+  Tensor in_reshaped = Reshape(in, coerced_shape);
+  out->Reshape(coerced_shape);
+
+  do {
+    TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
+      Tensor &outRef = *out;
+      out->device()->Exec(
+          [in, outRef, fdout](Context *ctx) mutable {
+            SoftMaxBackward<DType, Lang>(in, &outRef, fdout, ctx);
+          },
+          {in.block(), fdout.block()}, {out->block()}, "SoftmaxBackward");
+    });
+  } while (0);
+
+  out->Reshape(original_shape);
+}
+
+Tensor SoftMaxBackward(const Tensor &in, int axis, const Tensor &fdout) {
+  Tensor ret(in.shape(), in.device(), in.data_type());
+  auto *retptr = &ret;
+  SoftMaxBackward(in, retptr, axis, fdout);
+  return ret;
+}
+
+#define EltwiseBinaryTensorFn(fn, lhs, rhs, ret)                           \
+  do {                                                                     \
+    TYPE_LANG_SWITCH(lhs.data_type(), DType, lhs.device()->lang(), Lang, { \
+      CHECK_EQ(sizeof(DType), SizeOf(rhs.data_type()));                    \
+      Tensor &retRef = *ret;                                               \
+      ret->device()->Exec(                                                 \
+          [lhs, rhs, retRef](Context *ctx) mutable {                       \
+            fn<DType, Lang>(lhs, rhs, &retRef, ctx);                       \
+          },                                                               \
+          {lhs.block(), rhs.block()}, {ret->block()}, #fn);                \
+    });                                                                    \
+  } while (0)
+
+#define GenBinaryTensorFn(op, fn)                                           \
+  Tensor op(const Tensor &lhs, const Tensor &rhs) {                         \
+    if (lhs.shape() != rhs.shape()) {                                       \
+      if (lhs.data_type() == kFloat32 && rhs.data_type() == kFloat32) {     \
+        auto lhs_ = Broadcast(lhs, rhs.shape());                            \
+        auto rhs_ = Broadcast(rhs, lhs.shape());                            \
+        Tensor ret(lhs_.shape(), lhs.device(), lhs.data_type());            \
+        fn(lhs_, rhs_, &ret);                                               \
+        return ret;                                                         \
+      } else {                                                              \
+        /* lhs tensor and rhs tensor are not both in float, cast to float */\
+        Tensor tmp_lhs = lhs.Clone().AsType(kFloat32);                      \
+        Tensor tmp_rhs = rhs.Clone().AsType(kFloat32);                      \
+        tmp_lhs = Broadcast(tmp_lhs, tmp_rhs.shape());                      \
+        tmp_rhs = Broadcast(tmp_rhs, tmp_lhs.shape());                      \
+        Tensor ret(tmp_lhs.shape(), tmp_lhs.device(), tmp_lhs.data_type()); \
+        fn(tmp_lhs, tmp_rhs, &ret);                                         \
+        /* if lhs and rhs are both int, cast back to int */                 \
+        if (lhs.data_type() == kInt && rhs.data_type() == kInt)             \
+          return ret.Clone().AsType(kInt);                                  \
+        return ret;                                                         \
+      }                                                                     \
+    } else {                                                                \
+      if (lhs.data_type() == kFloat32 && rhs.data_type() == kFloat32) {     \
+        Tensor ret(lhs.shape(), lhs.device(), lhs.data_type());             \
+        fn(lhs, rhs, &ret);                                                 \
+        return ret;                                                         \
+      } else {                                                              \
+        /* lhs tensor and rhs tensor are not both in float, cast to float */\
+        Tensor tmp_lhs = lhs.Clone().AsType(kFloat32);                      \
+        Tensor tmp_rhs = rhs.Clone().AsType(kFloat32);                      \
+        Tensor ret(tmp_lhs.shape(), tmp_lhs.device(), tmp_lhs.data_type()); \
+        fn(tmp_lhs, tmp_rhs, &ret);                                         \
+        /* if lhs and rhs are both int, cast back to int */                 \
+        if (lhs.data_type() == kInt && rhs.data_type() == kInt)             \
+          return ret.Clone().AsType(kInt);                                  \
+        return ret;                                                         \
+      }                                                                     \
+    }                                                                       \
+  }                                                                         \
+  void fn(const Tensor &lhs, const Tensor &rhs, Tensor *ret) {              \
+    CHECK_EQ(lhs.device(), ret->device());                                  \
+    CHECK_EQ(rhs.device(), ret->device());                                  \
+    if (lhs.shape() != rhs.shape()) {                                       \
+      auto lhs_ = Broadcast(lhs, rhs.shape());                              \
+      auto rhs_ = Broadcast(rhs, lhs.shape());                              \
+      CHECK(lhs_.shape() == ret->shape());                                  \
+      EltwiseBinaryTensorFn(fn, lhs_, rhs_, ret);                           \
+    } else {                                                                \
+      CHECK(lhs.shape() == ret->shape());                                   \
+      EltwiseBinaryTensorFn(fn, lhs, rhs, ret);                             \
+    }                                                                       \
+  }  // namespace singa
+
+// boradcasting operations:
+// https://github.com/onnx/onnx/blob/master/docs/Broadcasting.md
 GenBinaryTensorFn(operator+, Add);
 GenBinaryTensorFn(operator-, Sub);
 GenBinaryTensorFn(operator*, EltwiseMult);
@@ -546,29 +1033,50 @@
 GenBinaryTensorFn(operator<=, LE);
 GenBinaryTensorFn(operator>, GT);
 GenBinaryTensorFn(operator>=, GE);
+GenBinaryTensorFn(operator==, EQ);
+GenBinaryTensorFn(ReLUBackward, ReLUBackward);
+
 #define EltwiseTensorScalarFn(fn, t, x, ret)                            \
   do {                                                                  \
     TYPE_LANG_SWITCH(t.data_type(), DType, t.device()->lang(), Lang, {  \
-      static_assert(std::is_same<SType, DType>::value,                  \
-                    "The Scalar type must match the Tensor data type"); \
-      ret->device()->Exec([t, x, ret](Context * ctx) {                  \
-        fn<DType, Lang>(t.Size(), t.block(), x, ret->block(), ctx);     \
-      }, {t.block()}, {ret->block()});                                  \
+      Tensor &retRef = *ret;                                            \
+      ret->device()->Exec(                                              \
+          [t, x, retRef](Context *ctx) mutable {                        \
+            fn<DType, Lang>(t, x, &retRef, ctx);                        \
+          },                                                            \
+          {t.block()}, {ret->block()}, #fn);                            \
     });                                                                 \
   } while (0)
 
-#define GenTensorScalarFn(op, fn)                             \
-  template <typename SType>                                   \
-  Tensor op(const Tensor &in, const SType x) {                \
-    Tensor ret(in.shape(), in.device(), in.data_type());      \
-    fn(in, x, &ret);                                          \
-    return ret;                                               \
-  }                                                           \
-  template <typename SType>                                   \
-  void fn(const Tensor &in, const SType x, Tensor *ret) {     \
-    EltwiseTensorScalarFn(fn, in, x, ret);                    \
-  }                                                           \
-  template Tensor op <float>(const Tensor &in, const float x); \
+#define GenTensorScalarFn(op, fn)                                          \
+  template <typename SType>                                                \
+  Tensor op(const Tensor &in, const SType x) {                             \
+    if (in.data_type() == kFloat32 && std::is_same<SType, float>::value){  \
+      Tensor ret(in.shape(), in.device(), in.data_type());                 \
+      fn(in, x, &ret);                                                     \
+      return ret;                                                          \
+    } else if (in.data_type() == kFloat32) {                               \
+      Tensor ret(in.shape(), in.device(), in.data_type());                 \
+      float tmp_x = x;                                                     \
+      fn(in, tmp_x, &ret);                                                 \
+      return ret;                                                          \
+    } else {                                                               \
+      /* tensor and scalar are not both in float, cast to float */         \
+      Tensor tmp_in = in.Clone().AsType(kFloat32);                         \
+      float tmp_x = x;                                                     \
+      Tensor ret(tmp_in.shape(), tmp_in.device(), tmp_in.data_type());     \
+      fn(tmp_in, tmp_x, &ret);                                             \
+      /* if tensor and scalar are both int, cast back to int */            \
+      if (in.data_type() == kInt && std::is_same<SType, int>::value)       \
+        return ret.Clone().AsType(kInt);                                   \
+      return ret;                                                          \
+    }                                                                      \
+  }                                                                        \
+  template <typename SType>                                                \
+  void fn(const Tensor &in, const SType x, Tensor *ret) {                  \
+    EltwiseTensorScalarFn(fn, in, x, ret);                                 \
+  }                                                                        \
+  template Tensor op<float>(const Tensor &in, const float x);              \
   template void fn<float>(const Tensor &in, const float x, Tensor *ret)
 
 GenTensorScalarFn(operator+, Add);
@@ -580,6 +1088,8 @@
 GenTensorScalarFn(operator<=, LE);
 GenTensorScalarFn(operator>, GT);
 GenTensorScalarFn(operator>=, GE);
+GenTensorScalarFn(operator==, EQ);
+
 template <typename SType>
 Tensor Div(const SType alpha, const Tensor &in) {
   Tensor out(in.shape(), in.device(), in.data_type());
@@ -594,9 +1104,12 @@
   CHECK(in.shape() == out->shape());
   TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
     // TODO(wangwei) type cast SType to DType;
-    in.device()->Exec([alpha, in, out](Context *ctx) {
-      Div<DType, Lang>(in.Size(), alpha, in.block(), out->block(), ctx);
-    }, {in.block()}, {out->block()});
+    Tensor &outRef = *out;
+    in.device()->Exec(
+        [alpha, in, outRef](Context *ctx) mutable {
+          Div<DType, Lang>(alpha, in, &outRef, ctx);
+        },
+        {in.block()}, {out->block()}, "Div");
   });
 }
 template void Div<float>(const float, const Tensor &, Tensor *);
@@ -618,9 +1131,10 @@
   // }
   if (axis == 0) {
     return Sum(M, 0) / (1.0f * M.shape(0));
-  } else {
-    CHECK_EQ(axis, 1);
+  } else if (axis == 1) {
     return Sum(M, 1) / (1.0f * M.shape(1));
+  } else {
+    LOG(FATAL) << "Not currently support Sum over axis = " << axis;
   }
 }
 // TODO(wangwei) conside async exec
@@ -630,11 +1144,14 @@
   Tensor one(in.shape(), in.device(), in.data_type());
   one.SetValue(1.0f);
   TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
-    one.device()->Exec([in, one, &s](Context *ctx) {
-      DType ret = DType(0);
-      Dot<DType, Lang>(in.Size(), in.block(), one.block(), &ret, ctx);
-      s = ret;
-    }, {in.block(), one.block()}, {});
+    one.device()->Exec(
+        // cannot use this sum function in computational graph
+        [in, one, &s](Context *ctx) mutable {
+          DType ret = DType(0);
+          Dot<DType, Lang>(in, one, &ret, ctx);
+          s = ret;
+        },
+        {in.block(), one.block()}, {}, "Sum");
   });
   return s;
 }
@@ -646,56 +1163,48 @@
     return out;
   } else {
     CHECK_EQ(axis, 1) << "Not support Sum over axis = " << axis;
-    Tensor out(Shape{M.shape(0)}, M.device(), M.data_type());
+    Tensor out = Tensor(Shape{M.shape(0)}, M.device(), M.data_type());
     SumColumns(M, &out);
     return out;
   }
 }
 
-Tensor SoftMax(const Tensor &in) {
-  Tensor out(in.shape(), in.device(), in.data_type());
-  SoftMax(in, &out);
+Tensor SumAll(const Tensor &in) {
+  Tensor out({(size_t)1}, in.device(), in.data_type());
+  Tensor one(in.shape(), in.device(), in.data_type());
+  one.SetValue(1.0f);
+  TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
+    one.device()->Exec(
+        [in, one, out](Context *ctx) mutable {
+          Dot<DType, Lang>(in, one, &out, ctx);
+        },
+        {in.block(), one.block()}, {out.block()}, "SumAll");
+  });
   return out;
 }
 
 Tensor RowMax(const Tensor &in) {
   Tensor ret({in.shape(0)}, in.device(), in.data_type());
   TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
-    in.device()->Exec([in, ret](Context *ctx) {
-      size_t nrow = 1;
-      if (in.nDim() > 1) nrow = in.shape(0);
-      size_t ncol = in.Size() / nrow;
-      RowMax<DType, Lang>(nrow, ncol, in.block(), ret.block(), ctx);
-    }, {in.block()}, {ret.block()});
+    in.device()->Exec(
+        [in, ret](Context *ctx) mutable {
+          // size_t nrow = 1;
+          // if (in.nDim() > 1) nrow = in.shape(0);
+          // size_t ncol = in.Size() / nrow;
+          RowMax<DType, Lang>(in, &ret, ctx);
+        },
+        {in.block()}, {ret.block()}, "RowMax");
   });
   return ret;
 }
 
-void SoftMax(const Tensor &in, Tensor *out) {
-  CHECK_LE(in.nDim(), 2u);
-  out->CopyData(in);
-  size_t nrow = 1, ncol = in.Size(), size = ncol;
-  if (in.nDim() == 2u) {
-    nrow = in.shape(0);
-    ncol = size / nrow;
-    out->Reshape(Shape{nrow, ncol});
-  }
-  Tensor tmp = RowMax(*out);
-  SubColumn(tmp, out);
-  Exp(*out, out);
-
-  SumColumns(*out, &tmp);
-  DivColumn(tmp, out);
-  out->Reshape(in.shape());
-}
-
 void AddColumn(const Tensor &v, Tensor *M) { AddColumn(1, 1, v, M); }
 /// Add column 'v' onto each column of matrix M;
 template <typename SType>
 void AddColumn(const SType alpha, const SType beta, const Tensor &v,
                Tensor *M) {
   if (M->transpose()) {
-    Tensor X = M->T();
+    Tensor X(Transpose(*M));
     AddRow(v, &X);
   } else {
     CHECK_EQ(M->nDim(), 2u);
@@ -705,20 +1214,20 @@
 
     Tensor one(Shape{1, nb_col}, M->device(), M->data_type());
     one.SetValue(1.0f);  // TODO(wangwei) cast type
-    Tensor vmat = Reshape(v, Shape{nb_row, 1});
+    Tensor vmat(Reshape(v, Shape{nb_row, 1}));
     Mult(alpha, vmat, one, beta, M);
   }
 }
-template
-void AddColumn(const float alpha, const float beta, const Tensor &v, Tensor *M);
+template void AddColumn(const float alpha, const float beta, const Tensor &v,
+                        Tensor *M);
 
 void AddRow(const Tensor &v, Tensor *M) { AddRow(1, 1, v, M); }
 
-/// Sub column 'v' by each column of matrix M; write results into 'out'
+/// Add row 'v' by each column of matrix M; write results into 'out'
 template <typename SType>
 void AddRow(const SType alpha, const SType beta, const Tensor &v, Tensor *M) {
   if (M->transpose()) {
-    Tensor X = M->T();
+    Tensor X(Transpose(*M));
     AddColumn(v, &X);
   } else {
     CHECK_EQ(M->nDim(), 2u);
@@ -728,7 +1237,7 @@
 
     Tensor one(Shape{nb_row, 1}, M->device(), M->data_type());
     one.SetValue(1.0f);
-    Tensor vmat = Reshape(v, Shape{1, nb_col});
+    Tensor vmat(Reshape(v, Shape{1, nb_col}));
     Mult(alpha, one, vmat, beta, M);
   }
 }
@@ -742,23 +1251,24 @@
   MultColumn(inv, M);
 }
 
-Tensor ConcatOn(const vector<Tensor> &in, int axis) {
+Tensor ConcatOn(const std::vector<Tensor> &in, int axis) {
   vector<Tensor> tmp;
   Shape out_shape = in[0].shape();
   size_t dim = in[0].shape().size();
-  CHECK_GE(dim, 2u) << " Only work for tensor of dim >=2 ";
+  // CHECK_GE(dim, 2u) << " Only work for tensor of dim >=2 ";
   size_t size = in[0].Size() / in[0].shape(axis);
   size_t new_size = 0u;
-  for (const auto& t: in) {
+  for (const auto &t : in) {
     CHECK_EQ(dim, t.shape().size()) << "All tensors should have the same dim";
-    CHECK_EQ(size, t.Size() / t.shape(axis)) << "The size of all axis should "
-      <<" be the same except the concatenated axis";
+    CHECK_EQ(size, t.Size() / t.shape(axis))
+        << "The size of all axis should "
+        << " be the same except the concatenated axis";
     new_size += t.shape(axis);
   }
   out_shape[axis] = new_size;
   if (axis == 0) {
     size_t nrow = 0;
-    for (const auto& t: in) {
+    for (const auto &t : in) {
       nrow += t.shape(0);
       tmp.push_back(Reshape(t, {t.shape(0), t.Size() / t.shape(0)}));
     }
@@ -766,10 +1276,9 @@
     ret.Reshape(out_shape);
     return ret;
   } else {
-    for (const auto& t: in) {
+    for (const auto &t : in) {
       size_t nrow = 1;
-      for (int i = 0; i < axis; i++)
-        nrow *= t.shape(i);
+      for (int i = 0; i < axis; i++) nrow *= t.shape(i);
       tmp.push_back(Reshape(t, {nrow, t.Size() / nrow}));
     }
     auto ret = ConcatenateColumns(tmp);
@@ -798,9 +1307,7 @@
   }
   return out;
 }
-Tensor ConcatRows(const vector<Tensor> &in) {
-  return ConcatenateRows(in);
-}
+Tensor ConcatRows(const vector<Tensor> &in) { return ConcatenateRows(in); }
 // TODO(wangwei) add a copypatch function for improve the efficiency on GPU.
 Tensor ConcatenateColumns(const vector<Tensor> &in) {
   size_t nrow = 0, ncol = 0;
@@ -841,22 +1348,21 @@
   return out;
 }
 
-
-Tensor SliceOn(const Tensor&in, const size_t start, const size_t end, int axis) {
+Tensor SliceOn(const Tensor &in, const size_t start, const size_t end,
+               int axis) {
   Shape out_shape = in.shape();
   out_shape[axis] = end - start;
   if (axis == 0) {
     auto ret = SliceRows(Reshape(in, {in.shape(0), in.Size() / in.shape(0)}),
-        start, end);
+                         start, end);
     ret.Reshape(out_shape);
     return ret;
   } else {
     size_t nrow = 1;
-    for (int i = 0; i < axis; i++)
-      nrow *= in.shape(i);
+    for (int i = 0; i < axis; i++) nrow *= in.shape(i);
     auto suffix = in.Size() / nrow / in.shape(axis);
     auto ret = SliceColumns(Reshape(in, {nrow, in.Size() / nrow}),
-        start * suffix, end * suffix);
+                            start * suffix, end * suffix);
     ret.Reshape(out_shape);
     return ret;
   }
@@ -884,7 +1390,6 @@
   return CopyColumns(in, start, end);
 }
 
-
 /// Divide row 'v' by each row of matrix M; write results into 'out'
 void DivRow(const Tensor &v, Tensor *M) {
   Tensor inv;
@@ -894,31 +1399,35 @@
 
 /// Multiply column 'v' and each column of matrix M; write results into 'out'
 void MultColumn(const Tensor &v, Tensor *M) {
-  CHECK(!M->transpose()) << "Not supported yet";
+  // CHECK(!M->transpose()) << "Not supported yet";
   CHECK_EQ(M->nDim(), 2u);
   // CHECK_EQ(v.nDim(), 1u); (chonho) shape of v is 2-element tuple
   CHECK_EQ(v.Size(), M->shape(0));
   CheckDataTypeAndLang(*M, v);
   TYPE_LANG_SWITCH(v.data_type(), DType, v.device()->lang(), Lang, {
-    v.device()->Exec([M, v](Context *ctx) {
-      DGMM<DType, Lang>(false, M->shape(0), M->shape(1), M->block(), v.block(),
-                        M->block(), ctx);
-    }, {M->block(), v.block()}, {M->block()});
+    Tensor &MRef = *M;
+    v.device()->Exec(
+        [MRef, v](Context *ctx) mutable {
+          DGMM<DType, Lang>(false, MRef, v, &MRef, ctx);
+        },
+        {M->block(), v.block()}, {M->block()}, "MultColumn");
   });
 }
 
 /// Multiply row 'v' with each row of matrix M; write results into 'out'
 void MultRow(const Tensor &v, Tensor *M) {
-  CHECK(!M->transpose()) << "Not supported yet";
+  // CHECK(!M->transpose()) << "Not supported yet";
   CHECK_EQ(M->nDim(), 2u);
   // CHECK_EQ(v.nDim(), 1u); (chonho) shape of v is 2-element tuple
   CHECK_EQ(v.Size(), M->shape(1));
   CheckDataTypeAndLang(*M, v);
   TYPE_LANG_SWITCH(v.data_type(), DType, v.device()->lang(), Lang, {
-    v.device()->Exec([M, v](Context *ctx) {
-      DGMM<DType, Lang>(true, M->shape(0), M->shape(1), M->block(), v.block(),
-                        M->block(), ctx);
-    }, {M->block(), v.block()}, {M->block()});
+    Tensor &MRef = *M;
+    v.device()->Exec(
+        [MRef, v](Context *ctx) mutable {
+          DGMM<DType, Lang>(true, MRef, v, &MRef, ctx);
+        },
+        {M->block(), v.block()}, {M->block()}, "MultRow");
   });
 }
 
@@ -928,7 +1437,7 @@
 
 void SumColumns(const Tensor &M, Tensor *v) {
   if (M.transpose()) {
-    Tensor X = M.T();
+    Tensor X = Transpose(M);
     SumRows(X, v);
   } else {
     CHECK_EQ(M.nDim(), 2u);
@@ -943,7 +1452,7 @@
 }
 void SumRows(const Tensor &M, Tensor *v) {
   if (M.transpose()) {
-    Tensor X = M.T();
+    Tensor X = Transpose(M);
     SumColumns(X, v);
   } else {
     CHECK_EQ(M.nDim(), 2u);
@@ -953,7 +1462,7 @@
 
     Tensor one(Shape{nb_row}, M.device(), M.data_type());
     one.SetValue(1.0f);  // TODO(wangwei) cast type
-    Tensor X = M.T();
+    Tensor X = Transpose(M);
     Mult(X, one, v);
   }
 }
@@ -962,9 +1471,12 @@
 void Bernoulli(const SType p, Tensor *out) {
   TYPE_LANG_SWITCH(out->data_type(), DType, out->device()->lang(), Lang, {
     auto prob = TypeCast<SType, DType>(p);
-    out->device()->Exec([prob, out](Context *ctx) {
-      Bernoulli<DType, Lang>(out->Size(), prob, out->block(), ctx);
-    }, {}, {out->block()}, true);
+    Tensor &outRef = *out;
+    out->device()->Exec(
+        [prob, outRef](Context *ctx) mutable {
+          Bernoulli<DType, Lang>(prob, &outRef, ctx);
+        },
+        {}, {out->block()}, "Bernoulli", true);
   });
 }
 
@@ -975,9 +1487,12 @@
   TYPE_LANG_SWITCH(out->data_type(), DType, out->device()->lang(), Lang, {
     auto l = TypeCast<SType, DType>(low);
     auto h = TypeCast<SType, DType>(high);
-    out->device()->Exec([l, h, out](Context *ctx) {
-      Uniform<DType, Lang>(out->Size(), l, h, out->block(), ctx);
-    }, {}, {out->block()}, true);
+    Tensor &outRef = *out;
+    out->device()->Exec(
+        [l, h, outRef](Context *ctx) mutable {
+          Uniform<DType, Lang>(l, h, &outRef, ctx);
+        },
+        {}, {out->block()}, "Uniform", true);
   });
 }
 
@@ -988,9 +1503,12 @@
   TYPE_LANG_SWITCH(out->data_type(), DType, out->device()->lang(), Lang, {
     auto m = TypeCast<SType, DType>(mean);
     auto s = TypeCast<SType, DType>(std);
-    out->device()->Exec([m, s, out](Context *ctx) {
-      Gaussian<DType, Lang>(out->Size(), m, s, out->block(), ctx);
-    }, {}, {out->block()}, true);
+    Tensor &outRef = *out;
+    out->device()->Exec(
+        [m, s, outRef](Context *ctx) mutable {
+          Gaussian<DType, Lang>(m, s, &outRef, ctx);
+        },
+        {}, {out->block()}, "Gaussian", true);
   });
 }
 template void Gaussian<float>(const float mean, const float std, Tensor *out);
@@ -1001,21 +1519,48 @@
 void Axpy(const SType alpha, const Tensor &in, Tensor *out) {
   TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
     auto a = TypeCast<SType, DType>(alpha);
-    out->device()->Exec([a, in, out](Context *ctx) {
-      Axpy<DType, Lang>(in.Size(), a, in.block(), out->block(), ctx);
-    }, {in.block(), out->block()}, {out->block()});
+    Tensor &outRef = *out;
+    Tensor fake(*out);
+    out->device()->Exec(
+        [a, in, outRef, fake](Context *ctx) mutable {
+          Axpy<DType, Lang>(a, in, &outRef, ctx);
+        },
+        {in.block(), out->block()}, {out->block()}, "Axpy");
   });
 }
 
-template
-void Axpy<float>(const float alpha, const Tensor &in, Tensor *out);
+template void Axpy<float>(const float alpha, const Tensor &in, Tensor *out);
+
+void Axpy(const Tensor &alpha, const Tensor &in, Tensor *out) {
+  TYPE_SWITCH(alpha.data_type(), SType, {
+    TYPE_LANG_SWITCH(in.data_type(), DType, in.device()->lang(), Lang, {
+      Tensor fake(*out);
+      Tensor &outRef = *out;
+      out->device()->Exec(
+          [alpha, in, outRef, fake](Context *ctx) mutable {
+            Tensor alphaHost = alpha.Clone(defaultDevice);
+            // synchronize the stream to wait for the data transfer to complete
+            alpha.device()->Sync();
+            const SType value =
+                static_cast<const SType *>(alphaHost.block()->data())[0];
+            auto a = TypeCast<SType, DType>(value);
+            Axpy<DType, Lang>(a, in, &outRef, ctx);
+          },
+          {alpha.block(), in.block(), out->block()}, {out->block()}, "Axpy");
+    });
+  });
+}
 
 Tensor Mult(const Tensor &A, const Tensor &B) {
-  Shape s;
-  s.push_back(A.shape(0));
-  if (B.nDim() == 2) s.push_back(B.shape(1));
+  auto A_ = Broadcast(A, B.shape(), 2);
+  auto B_ = Broadcast(B, A.shape(), 2);
+
+  Shape s = A_.shape();
+  s.pop_back();
+  s.push_back(B.shape(B.nDim() - 1));
+
   Tensor out(s, A.device(), A.data_type());
-  Mult(A, B, &out);
+  Mult(A_, B_, &out);
   return out;
 }
 
@@ -1026,33 +1571,93 @@
 template <typename SType>
 void Mult(const SType alpha, const Tensor &A, const Tensor &B, const SType beta,
           Tensor *C) {
-  CHECK_EQ(A.shape().size(), 2u);
+  Tensor fakeC;
+  vector<Block *> read_blocks = {A.block(), B.block()};
+  if (beta) {
+    fakeC = *C;
+    read_blocks.push_back(C->block());
+  }
   if (B.nDim() == 1u) {
+    CHECK_EQ(A.shape().size(), 2u);
     TYPE_LANG_SWITCH(A.data_type(), DType, A.device()->lang(), Lang, {
       auto a = TypeCast<SType, DType>(alpha);
       auto b = TypeCast<SType, DType>(beta);
-      C->device()->Exec([a, A, b, B, C](Context *ctx) {
-        GEMV<DType, Lang>(A.transpose(), A.shape(0), A.shape(1), a, A.block(),
-                          B.block(), b, C->block(), ctx);
-      }, {A.block(), B.block()}, {C->block()});
+      Tensor &CRef = *C;
+      C->device()->Exec(
+          [a, A, b, B, CRef, fakeC](Context *ctx) mutable {
+            GEMV<DType, Lang>(a, A, B, b, &CRef, ctx);
+          },
+          read_blocks, {C->block()}, "GEMV");
     });
-  } else {
+  } else if (B.nDim() == 2u) {
+    CHECK_EQ(A.shape().size(), 2u);
     CHECK(!C->transpose());
     TYPE_LANG_SWITCH(A.data_type(), DType, A.device()->lang(), Lang, {
       auto a = TypeCast<SType, DType>(alpha);
       auto b = TypeCast<SType, DType>(beta);
-      C->device()->Exec([a, A, b, B, C](Context *ctx) {
-        GEMM<DType, Lang>(A.transpose(), B.transpose(), A.shape(0), B.shape(1),
-                          A.shape(1), a, A.block(), B.block(), b, C->block(),
-                          ctx);
-      }, {A.block(), B.block()}, {C->block()});
+      Tensor &CRef = *C;
+      C->device()->Exec(
+          [a, A, b, B, CRef, fakeC](Context *ctx) mutable {
+            GEMM<DType, Lang>(a, A, B, b, &CRef, ctx);
+          },
+          read_blocks, {C->block()}, "GEMM");
     });
+  } else if (B.nDim() == 3u || B.nDim() == 4u) {
+    CHECK_EQ(A.shape().size(), B.shape().size());
+    CHECK(!C->transpose());
+    TYPE_LANG_SWITCH(A.data_type(), DType, A.device()->lang(), Lang, {
+      auto a = TypeCast<SType, DType>(alpha);
+      auto b = TypeCast<SType, DType>(beta);
+
+      Tensor A_tmp;
+      Tensor B_tmp;
+
+      if (A.transpose() || A.broadcasted()) {
+        A_tmp = Tensor(A.shape(), A.device(), A.data_type());
+        singa::Transform(A, &A_tmp);
+      } else {
+        A_tmp = A;
+      }
+
+      if (B.transpose() || B.broadcasted()) {
+        B_tmp = Tensor(B.shape(), B.device(), B.data_type());
+        singa::Transform(B, &B_tmp);
+      } else {
+        B_tmp = B;
+      }
+
+      // batch GEMM should have same batch size
+      CHECK_EQ(A_tmp.shape(0), B_tmp.shape(0));
+      if (B.nDim() == 4u) CHECK_EQ(A_tmp.shape(1), B_tmp.shape(1));
+
+      Tensor &CRef = *C;
+      C->device()->Exec(
+          [a, A_tmp, b, B_tmp, CRef, fakeC](Context *ctx) mutable {
+            GEMMBatched<DType, Lang>(a, A_tmp, B_tmp, b, &CRef, ctx);
+          },
+          read_blocks, {C->block()}, "GEMMBatched");
+    });
+  } else {
+    LOG(FATAL) << "Un-supported tensor dimentions " << A.nDim() << "d matmul "
+               << B.nDim() << "d\n";
   }
 }
 
 // ************************
 // Misc.
 // ************************
+Tensor CrossEntropyFwd(const Tensor &p, const Tensor &t) {
+  Tensor loss({p.shape(0)}, p.device(), p.data_type());
+  ComputeCrossEntropy(p, t, &loss);
+  return loss;
+}
+
+Tensor SoftmaxCrossEntropyBwd(const Tensor &p, const Tensor &t) {
+  Tensor g = p.Clone();
+  SoftmaxCrossEntropyBwd(t, &g);
+  return g;
+}
+
 void ComputeCrossEntropy(const Tensor &p, const Tensor &t, Tensor *loss) {
   CHECK_LE(p.nDim(), 2u);
   CHECK_LE(t.nDim(), 2u);
@@ -1060,11 +1665,15 @@
   if (p.nDim() == 2u) batchsize = p.shape(0);
   size_t dim = p.Size() / batchsize;
   TYPE_LANG_SWITCH(p.data_type(), DType, p.device()->lang(), Lang, {
-    p.device()->Exec([batchsize, dim, t, p, loss](Context *ctx) {
-        bool int_target = t.Size() == batchsize;
-        ComputeCrossEntropy<DType, Lang>(int_target, batchsize, dim, p.block(),
-            t.block(), loss->block(), ctx);
-    }, {p.block(), t.block()}, {loss->block()});
+    Tensor &lossRef = *loss;
+    p.device()->Exec(
+        [batchsize, dim, t, p, lossRef](Context *ctx) mutable {
+          bool int_target = t.Size() == batchsize;
+          ComputeCrossEntropy<DType, Lang>(int_target, batchsize, dim,
+                                           p.block(), t.block(),
+                                           lossRef.block(), ctx);
+        },
+        {p.block(), t.block()}, {loss->block()}, "ComputeCrossEntropy");
   });
 }
 
@@ -1075,12 +1684,57 @@
   if (p->nDim() == 2u) batchsize = p->shape(0);
   size_t dim = p->Size() / batchsize;
   TYPE_LANG_SWITCH(p->data_type(), DType, p->device()->lang(), Lang, {
-    p->device()->Exec([batchsize, dim, t, p](Context *ctx) {
-      bool int_target = t.Size() == batchsize;
-      SoftmaxCrossEntropyBwd<DType, Lang>(int_target, batchsize, dim,
-          p->block(), t.block(), p->block(), ctx);
-    }, {p->block(), t.block()}, {p->block()});
+    Tensor &pRef = *p;
+    Tensor pFake(*p);  // just add a ref count
+    p->device()->Exec(
+        [batchsize, dim, t, pRef, pFake](Context *ctx) mutable {
+          bool int_target = t.Size() == batchsize;
+          SoftmaxCrossEntropyBwd<DType, Lang>(int_target, batchsize, dim,
+                                              pRef.block(), t.block(),
+                                              pRef.block(), ctx);
+        },
+        {p->block(), t.block()}, {p->block()}, "SoftmaxCrossEntropyBackward");
   });
 }
 
+Tensor &Tensor::Contiguous() {
+  if (transpose()) {
+    Tensor t(shape_, device_, data_type_);
+    singa::Transform(*this, &t);
+    std::swap(t.block_, block_);
+  }
+  return *this;
+}
+
+Tensor Contiguous(const Tensor &in) {
+  Tensor out(in);
+  return out.Contiguous();
+}
+
+// if tensor is not transposed yet, we change the shape and generate new stride
+// if tensor is already transposed, we reallocate the memory and generate stride
+Tensor &Tensor::Reshape(const Shape &shape) {
+  // Check original volumn with the new one
+  // do not use Product(shape_) due to stride 0 from broadcasting.
+  // printf("reshape loc b\n");
+  CHECK_EQ(Product(shape), Size());
+  if (transpose()) {
+    Tensor t(shape_, device_, data_type_);
+    singa::Transform(*this, &t);
+    std::swap(t.block_, block_);
+    shape_ = shape;
+  } else {
+    shape_ = shape;
+  }
+  generate_stride();
+  // printf("reshape loc c\n");
+  return *this;
+}
+
+Tensor Reshape(const Tensor &in, const Shape &s) {
+  // printf("reshape loc a\n");
+  Tensor out(in);
+  return out.Reshape(s);
+}
+
 }  // namespace singa
diff --git a/src/core/tensor/tensor_math.h b/src/core/tensor/tensor_math.h
index 6d42211..3236e7c 100644
--- a/src/core/tensor/tensor_math.h
+++ b/src/core/tensor/tensor_math.h
@@ -17,8 +17,16 @@
  */
 #ifndef SINGA_CORE_MATH_H_
 #define SINGA_CORE_MATH_H_
+#include <algorithm>
+#include <iostream>
+#include <iterator>
+#include <sstream>
+#include <string>
 #include <type_traits>
+#include <vector>
+
 #include "singa/core/common.h"
+#include "singa/core/tensor.h"
 #include "singa/utils/logging.h"
 
 namespace singa {
@@ -29,223 +37,309 @@
 /// device programming language, e.g., Langice::kCpp, Langice::kCuda
 ///
 /// TODO(wangwei) Clean the functions to make the function APIs consistent:
-/// 1. All function names should be like XxxYyy or XY, i.e., capitablize the
-/// first
-///    letter.
+/// 1. All function names should be like XxxYyy or XY, i.e., capitalize the
+/// first letter.
 /// 2. Order functions based on function name in alphabetical order.
-/// 3. Function arguments order is [const basic type] [const Block] [mutable
-/// Block].
+/// 3. Function arguments order is [const basic type] [const Tensor] [mutable
+/// Tensor].
 /// 4. Function argument names, use 'num' for total number of elements in
-///    elementwise operations; use 'in1' 'in2' for in blocks; use 'out' for
-///    output block or value. With exceptions for some functions, e.g.,
-///      Scale(const float alpha, const Block* in, Block* out);
+///    elementwise operations; use 'in1' 'in2' for in Tensors; use 'out' for
+///    output Tensor or value. With exceptions for some functions, e.g.,
+///      Scale(const float alpha, const Tensor &in, Tensor* out);
 ///    For such cases, use x, v, alpha, etc for scalar types.
 ///    For blas functions, follow the blas style for argument names.
 ///    Use 'M' and 'v' for matrix and vector tensors in functions involving both
 ///    matrix and vectors.
-/// 5. For Block argument xxx, name its raw pointer as xxxPtr.
+/// 5. For Tensor argument xxx, name its raw pointer as xxxPtr.
 /// 6. Pass the 'cudaStream_t s' to every function in math_kernel.h
 /// 7. Use size_t for the number of elements, rows or columns.
-/// 8. Use the same name for the Tensor and Block level math functions.
+/// 8. Use the same name for the Tensor and Tensor level math functions.
+
+const std::string vec2str(const std::vector<int> &vec) {
+  std::ostringstream vts;
+  if (!vec.empty()) {
+    // Convert all but the last element to avoid a trailing ","
+    std::copy(vec.begin(), vec.end(), std::ostream_iterator<int>(vts, ", "));
+  }
+  return vts.str();
+}
+
+const std::string vec2str(const std::vector<size_t> &vec) {
+  std::ostringstream vts;
+  if (!vec.empty()) {
+    // Convert all but the last element to avoid a trailing ","
+    std::copy(vec.begin(), vec.end(), std::ostream_iterator<size_t>(vts, ", "));
+  }
+  return vts.str();
+}
 
 // **************************************
-// Element-wise functions
-// **************************************
+// // Element-wise functions
+// // Cpp tensors support multi-dimensional broadcasting;
+// // Cuda supports unidirectional broadcasting,
+// // i.e., the lhs and the output have the same shape
+// // **************************************
 
 /// out[i] = |in[i]|
 template <typename DType, typename Lang>
-void Abs(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Abs(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Abs Not Implemented";
 }
 
+template <typename DType, typename Lang>
+void Erf(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Erf Not Implemented";
+}
+
+template <typename DTypeSrc, typename DTypeDst, typename Lang>
+void CastCopy(const Tensor *src, Tensor *dst, Context *ctx) {
+  LOG(FATAL) << "CastCopy Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void Ceil(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Ceil Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void Floor(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Floor Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void Round(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Round Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void RoundE(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Round Not Implemented";
+}
+
 /// out[i] = in[i] + x
 template <typename DType, typename Lang>
-void Add(const size_t num, const Block *in, const DType x, Block *out,
-         Context *ctx) {
+void Add(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Add Not Implemented";
 }
 
 /// out[i] = in1[i] + in2[i]
 template <typename DType, typename Lang>
-void Add(const size_t num, const Block *in1, const Block *in2, Block *out,
-         Context *ctx) {
+void Add(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Add-Pair Not Implemented";
 }
 /// Clamp every element into [low, high]
 /// if in[i]>high, then out[i]=high; if in[i]<low, then out[i]=low.
 template <typename DType, typename Lang>
-void Clamp(const size_t num, const DType low, const DType high, const Block *in,
-           Block *out, Context *ctx) {
+void Clamp(const DType low, const DType high, const Tensor &in, Tensor *out,
+           Context *ctx) {
   LOG(FATAL) << "Clamp Not Implemented";
 }
 
 /// out[i] = x / in[i]
 template <typename DType, typename Lang>
-void Div(const size_t num, const DType x, const Block *in, Block *out,
-         Context *ctx) {
+void Div(const DType x, const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Div Not Implemented";
 }
 
 /// out[i] = in[i] / x
 template <typename DType, typename Lang>
-void Div(const size_t num, const Block *in, const DType x, Block *out,
-         Context *ctx) {
+void Div(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   CHECK_NE(x, 0.f);
-  EltwiseMult<DType, Lang>(num, in, DType(1) / x, out, ctx);
+  EltwiseMult<DType, Lang>(in, DType(1) / x, out, ctx);
 }
 
 /// out[i] = in1[i] / in2[i]
 template <typename DType, typename Lang>
-void Div(const size_t num, const Block *in1, const Block *in2, Block *out,
-         Context *ctx) {
+void Div(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Div-Pair Not Implemented";
 }
 
 /// out[i] = in[i] * x
 template <typename DType, typename Lang>
-void EltwiseMult(const size_t num, const Block *in, const DType x, Block *out,
-                 Context *ctx) {
+void EltwiseMult(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "EltwiseMult Not Implemented";
 }
 
 /// out[i] = in1[i] * in2[i]
 template <typename DType, typename Lang>
-void EltwiseMult(const size_t num, const Block *in1, const Block *in2, Block *out,
+void EltwiseMult(const Tensor &in1, const Tensor &in2, Tensor *out,
                  Context *ctx) {
   LOG(FATAL) << "EltwiseMult-Pair Not Implemented";
 }
 
+/// out[i]=(in2[i]>0)?in1[i]:0.f
+template <typename DType, typename Lang>
+void ReLUBackward(const Tensor &in1, const Tensor &in2, Tensor *out,
+                  Context *ctx) {
+  LOG(FATAL) << "ReLUBackward Not Implemented";
+}
+
 /// Base is e, Neper number. out[i]=exp(in[i])
 template <typename DType, typename Lang>
-void Exp(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Exp(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Exp Not Implemented";
 }
 
 /// out[i]=(in[i]<=x)?1.f:0.f
 template <typename DType, typename Lang>
-void LE(const size_t num, const Block *in, const DType x, Block *out,
-        Context *ctx) {
+void LE(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "LE Not Implemented";
 }
 /// out[i]=(in1[i]<=in2[i])?1.f:0.f
 template <typename DType, typename Lang>
-void LE(const size_t num, const Block *in1, const Block *in2, Block *out,
-        Context *ctx) {
+void LE(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Tensor-Tensor LE Not Implemented";
 }
-/// Natual logarithm, the base is e, Neper number out[i]=log(in[i]).
+/// Natural logarithm, the base is e, Neper number out[i]=log(in[i]).
 template <typename DType, typename Lang>
-void Log(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Log(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Log Not Implemented";
 }
 /// out[i]=(in[i]<x)?1.f:0.f
 template <typename DType, typename Lang>
-void LT(const size_t num, const Block *in, const DType x, Block *out,
-        Context *ctx) {
+void LT(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "LT Not Implemented";
 }
 /// out[i]=(in1[i]<in2[i])?1.f:0.f
 template <typename DType, typename Lang>
-void LT(const size_t num, const Block *in1, const Block *in2, Block *out,
-        Context *ctx) {
+void LT(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Tensor-Tensor LT Not Implemented";
 }
 /// out[i]=(in[i]>=x)?1.f:0.f
 template <typename DType, typename Lang>
-void GE(const size_t num, const Block *in, const DType x, Block *out,
-        Context *ctx) {
+void GE(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "GE Not Implemented";
 }
 /// out[i]=(in1[i]>=in2[i])?1.f:0.f
 template <typename DType, typename Lang>
-void GE(const size_t num, const Block *in1, const Block *in2, Block *out,
-        Context *ctx) {
+void GE(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Tensor-Tensor GE Not Implemented";
 }
 /// out[i]=(in[i]>x)?1.f:0.f
 template <typename DType, typename Lang>
-void GT(const size_t num, const Block *in, const DType x, Block *out,
-        Context *ctx) {
+void GT(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "GT Not Implemented";
 }
 /// out[i]=(in[i]>in2[i])?1.f:0.f
 template <typename DType, typename Lang>
-void GT(const size_t num, const Block *in, const Block *in2, Block *out,
-        Context *ctx) {
+void GT(const Tensor &in, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Tensor-Tensor GT Not Implemented";
 }
+/// out[i]=(in[i]==x)?1.f:0.f
+template <typename DType, typename Lang>
+void EQ(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "EQ Not Implemented";
+}
+/// out[i]=(in[i]==in2[i])?1.f:0.f
+template <typename DType, typename Lang>
+void EQ(const Tensor &in, const Tensor &in2, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Tensor-Tensor EQ Not Implemented";
+}
 /// out[i] = pow(in[i], x)
 template <typename DType, typename Lang>
-void Pow(const size_t num, const Block *in, const DType x, Block *out,
-         Context *ctx) {
+void Pow(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Pow Not Implemented";
 }
 
 /// out[i]=pow(in1[i], in2[i])
 template <typename DType, typename Lang>
-void Pow(const size_t num, const Block *in1, const Block *in2, Block *out,
-         Context *ctx) {
+void Pow(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Pow-Pair Not Implemented";
 }
 
 /// out[i]=max(0, in[i])
 template <typename DType, typename Lang>
-void ReLU(const size_t num, const Block *in, Block *out, Context *ctx) {
+void ReLU(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "ReLU Not Implemented";
 }
 
 /// out[i] = x
 template <typename DType, typename Lang>
-void Set(const size_t num, const DType x, Block *out, Context *ctx) {
+void Set(const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Set Not Implemented";
 }
 /// out[i]=sigmoid(in[i])
 template <typename DType, typename Lang>
-void Sigmoid(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Sigmoid(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Sigmoid Not Implemented";
 }
 
+/// out[i] = log(exp(in[i]) + 1)
+template <typename DType, typename Lang>
+void SoftPlus(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "SoftPlus Not Implemented";
+}
+
+/// out[i] = in[i] / (abs(in[i]) + 1)
+template <typename DType, typename Lang>
+void SoftSign(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "SoftSign Not Implemented";
+}
+
 /// out[i] = sign(in[i])
 template <typename DType, typename Lang>
-void Sign(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Sign(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Sign Not Implemented";
 }
 /// out[i]=sqrt(in[i])
 template <typename DType, typename Lang>
-void Sqrt(const size_t num, const Block *in, Block *out, Context *ctx) {
+void Sqrt(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Sqrt Not Implemented";
 }
 
 /// out[i]=square(in[i])
 template <typename DType, typename Lang>
-void Square(const size_t num, const Block *in, Block *out, Context *ctx) {
-  EltwiseMult<DType, Lang>(num, in, in, out, ctx);
+void Square(const Tensor &in, Tensor *out, Context *ctx) {
+  EltwiseMult<DType, Lang>(in, in, out, ctx);
 }
 
 /// out[i] =  in[i] - x
 template <typename DType, typename Lang>
-void Sub(const size_t num, const Block *in, const DType x, Block *out,
-         Context *ctx) {
-  Add<DType, Lang>(num, in, -x, out, ctx);
+void Sub(const Tensor &in, const DType x, Tensor *out, Context *ctx) {
+  Add<DType, Lang>(in, -x, out, ctx);
 }
 
 /// out[i] = in1[i] - in2[i]
 template <typename DType, typename Lang>
-void Sub(const size_t num, const Block *in1, const Block *in2, Block *out,
-         Context *ctx) {
+void Sub(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Sub-Pair Not Implemented";
 }
 
 /// sum all elements of in into out
 template <typename DType, typename Lang>
-void Sum(const size_t num, const Block *in, DType *out, Context *ctx) {
+void Sum(const Tensor &in, DType *out, Context *ctx) {
   LOG(FATAL) << "Sum Not Implemented";
 }
 
-/// out[i]=tanh(in[i])
+/// out[i]=fn(in[i])
+#define GenUnaryNotImplemented(fn, stringfn)             \
+  template <typename DType, typename Lang>               \
+  void fn(const Tensor &in, Tensor *out, Context *ctx) { \
+    std::string str = stringfn;                          \
+    str += " Not Implemented";                           \
+    LOG(FATAL) << str;                                   \
+  }
+
+GenUnaryNotImplemented(Cos, "Cos");
+GenUnaryNotImplemented(Cosh, "Cosh");
+GenUnaryNotImplemented(Acos, "Acos");
+GenUnaryNotImplemented(Acosh, "Acosh");
+GenUnaryNotImplemented(Sin, "Sin");
+GenUnaryNotImplemented(Sinh, "Sinh");
+GenUnaryNotImplemented(Asin, "Asin");
+GenUnaryNotImplemented(Asinh, "Asinh");
+GenUnaryNotImplemented(Tan, "Tan");
+GenUnaryNotImplemented(Tanh, "Tanh");
+GenUnaryNotImplemented(Atan, "Atan");
+GenUnaryNotImplemented(Atanh, "Atanh");
+
+/// similar to cudnnTransformTensor
+/// copies the data from one tensor to another tensor with a different layout
+/// the tensors must have the same dimensions but not necessarily the same
+/// strides
 template <typename DType, typename Lang>
-void Tanh(const size_t num, const Block *in, Block *out, Context *ctx) {
-  LOG(FATAL) << "Tanh Not Implemented";
+void Transform(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Transform Not Implemented";
 }
 
 // **************************************
@@ -255,21 +349,19 @@
 // Get the random generator from 'ctx'
 // If DType is not float, then convert the threshold to DType
 template <typename DType, typename Lang>
-void Bernoulli(const size_t num, const float p, Block *out, Context *ctx) {
+void Bernoulli(const float p, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Bernoulli Not Implemented";
 }
 // The random generator should be extracted from ctx.
 // If DType is not float, then convert the mean and std to DType
 template <typename DType, typename Lang>
-void Gaussian(const size_t num, const float mean, const float std, Block *out,
-              Context *ctx) {
+void Gaussian(const float mean, const float std, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Gaussian Not Implemented";
 }
 // The random generator should be extracted from ctx.
 // If DType is not float, then convert the low and high to DType
 template <typename DType, typename Lang>
-void Uniform(const size_t num, const float low, const float high, Block *out,
-             Context *ctx) {
+void Uniform(const float low, const float high, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Uniform Not Implemented";
 }
 
@@ -279,75 +371,92 @@
 
 /// outurn the index of the element with the max value.
 template <typename DType, typename Lang>
-void Amax(const size_t num, const Block *in, size_t *out, Context *ctx) {
+void Amax(const Tensor &in, size_t *out, Context *ctx) {
   LOG(FATAL) << "Amax Not Implemented";
 }
 
 /// outurn the index of the element with the min value.
 template <typename DType, typename Lang>
-void Amin(const size_t num, const Block *in, size_t *out, Context *ctx) {
+void Amin(const Tensor &in, size_t *out, Context *ctx) {
   LOG(FATAL) << "Amin Not Implemented";
 }
 /// out = sum |x| for all x in in
 template <typename DType, typename Lang>
-void Asum(const size_t num, const Block *in, DType *out, Context *ctx) {
+void Asum(const Tensor &in, DType *out, Context *ctx) {
   LOG(FATAL) << "Asum Not Implemented";
 }
 
 /// out = alpha * in + out
 template <typename DType, typename Lang>
-void Axpy(const size_t num, const DType alpha, const Block *in, Block *out,
-          Context *ctx) {
+void Axpy(const DType alpha, const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Axpy Not Implemented";
 }
 
 /// out = ||in||_2^2, i.e, L2 norm.
 template <typename DType, typename Lang>
-void Nrm2(const size_t num, const Block *in, float *out, Context *ctx) {
+void Nrm2(const Tensor &in, float *out, Context *ctx) {
   LOG(FATAL) << "Nrm2 Not Implemented";
 }
 
 /// out *= x
 template <typename DType, typename Lang>
-void Scale(const size_t num, const DType x, Block *out, Context *ctx) {
+void Scale(const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Scale Not Implemented";
 }
 
 /// inner product of array in1 and in2
 template <typename DType, typename Lang>
-void Dot(const size_t num, const Block *in1, const Block *in2, DType *out,
-         Context *ctx) {
+void Dot(const Tensor &in1, const Tensor &in2, DType *out, Context *ctx) {
+  LOG(FATAL) << "Dot Not Implemented";
+}
+template <typename DType, typename Lang>
+void Dot(const Tensor &in1, const Tensor &in2, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Dot Not Implemented";
 }
 
 /// out = alpha * A * v + beta * out.
 /// transA indicates if the internal data layout is transposed of A
 template <typename DType, typename Lang>
-void GEMV(bool trans, const size_t m, const size_t n, const DType alpha,
-          const Block *A, const Block *v, const DType beta, Block *out,
-          Context *ctx) {
+void GEMV(const DType alpha, const Tensor &A, const Tensor &v, const DType beta,
+          Tensor *out, Context *ctx) {
   LOG(FATAL) << "GEMV Not Implemented";
 }
 
 /// multiply a matrix with a diagnoal matrix constructed using values from 'v'.
 /// if matrix_lef_side is true, do M*v; else do v*M
 template <typename DType, typename Lang>
-void DGMM(const bool side_right, const size_t nrow, const size_t ncol,
-          const Block *M, const Block *v, Block *out, Context *ctx) {
+void DGMM(const bool side_right, const Tensor &M, const Tensor &v, Tensor *out,
+          Context *ctx) {
   LOG(FATAL) << "DGMM Not Implemented";
 }
 
 /// C = alpha * A * B + beta * C.
 /// transA indicates if the internal data layout is transposed of A
 template <typename DType, typename Lang>
-void GEMM(const bool transA, const bool transB, const size_t nrowA,
-          const size_t ncolB, const size_t ncolA, const DType alpha,
-          const Block *A, const Block *B, const DType beta, Block *C,
-          Context *ctx) {
+void GEMM(const DType alpha, const Tensor &A, const Tensor &B, const DType beta,
+          Tensor *C, Context *ctx) {
   LOG(FATAL) << "GEMM Not Implemented";
 }
 
 template <typename DType, typename Lang>
+void GEMMBatched(const DType alpha, const Tensor &A, const Tensor &B,
+                 const DType beta, Tensor *C, Context *ctx) {
+  LOG(FATAL) << "GEMM Batched Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void SoftMax(const Tensor &in, Tensor *out, Context *ctx) {
+  LOG(FATAL) << "Not Implemented";
+}
+
+template <typename DType, typename Lang>
+void SoftMaxBackward(const Tensor &in, Tensor *out, const Tensor &fdout,
+                     Context *ctx) {
+  LOG(FATAL) << "Not Implemented";
+}
+
+// yisen todo
+template <typename DType, typename Lang>
 void ComputeCrossEntropy(bool int_target, const size_t batchsize,
                          const size_t dim, const Block *p, const Block *t,
                          Block *loss, Context *ctx) {
@@ -362,8 +471,7 @@
 }
 
 template <typename DType, typename Lang>
-void RowMax(const size_t nrow, const size_t ncol, const Block *in,
-    Block *ret, Context* ctx) {
+void RowMax(const Tensor &in, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Not Implemented";
 }
 // **************************************
@@ -372,43 +480,48 @@
 /*
 /// Add the vector v to every column of A as the column of out
 template <typename DType, typename Lang>
-void AddCol(const size_t nrow, const size_t ncol, const Block *A, const Block *v,
-            Block *out, Context *ctx) {
+void AddCol(const size_t nrow, const size_t ncol, const Tensor &A, const Tensor
+&v,
+            Tensor *out, Context *ctx) {
   LOG(FATAL) << "AddCol Not Implemented";
 }
 // TODO(wangwei) unify AddRow and AddCol.
 /// Add the vector v to every row of A as the row of out
 template <typename DType, typename Lang>
-void AddRow(const size_t nrow, const size_t ncol, const Block *A, const Block *v,
-            Block *out, Context *ctx) {
+void AddRow(const size_t nrow, const size_t ncol, const Tensor &A, const Tensor
+&v,
+            Tensor *out, Context *ctx) {
   LOG(FATAL) << "AddRow Not Implemented";
 }
 /// outer-product.
 /// in1 and in2 are vectors of len m and n. out is matrix of shape m * n
 template <typename DType, typename Lang>
-void Outer(const size_t m, const size_t n, const Block *in1, const Block *in2,
-           Block *out, Context *ctx) {
+void Outer(const size_t m, const size_t n, const Tensor &in1, const Tensor &in2,
+           Tensor *out, Context *ctx) {
   LOG(FATAL) << "Outer Not Implemented";
 }
 
 /// Sum the columns of the in matrix into a vector
 template <typename DType, typename Lang>
-void SumColumns(const size_t nrow, const size_t ncol, const Block *in, Block *out,
+void SumColumns(const size_t nrow, const size_t ncol, const Tensor &in, Tensor
+*out,
                 Context *ctx) {
   LOG(FATAL) << "SumColumns Not Implemented";
 }
 template <typename DType, typename Lang>
-void Set(const size_t num, const DType x, Block *out, Context *ctx) {
+void Set(const DType x, Tensor *out, Context *ctx) {
   LOG(FATAL) << "Not Implemented";
 }
 
 // TODO(wangwei) unify SumRow and SumCol.
 /// Sum the rows of the in matrix into a vector
 template <typename DType, typename Lang>
-void SumRows(const size_t nrow, const size_t ncol, const Block *in, Block *out,
+void SumRows(const size_t nrow, const size_t ncol, const Tensor &in, Tensor
+*out,
              Context *ctx) {
   LOG(FATAL) << "SumRows Not Implemented";
 }
 */
+
 }  // namespace singa
 #endif  // SINGA_CORE_MATH_H_
diff --git a/src/core/tensor/tensor_math_cpp.h b/src/core/tensor/tensor_math_cpp.h
index 4f510ed..5be46c6 100644
--- a/src/core/tensor/tensor_math_cpp.h
+++ b/src/core/tensor/tensor_math_cpp.h
@@ -19,377 +19,708 @@
 #define SINGA_CORE_TENSOR_TENSOR_MATH_CPP_H_
 
 #include "./tensor_math.h"
-#include <cfloat>
-#include "singa/core/common.h"
+//#include "./stacktrace.h"
 #include <math.h>
 
+#include <algorithm>
+#include <cfloat>
+#include <iostream>
+#include <iterator>
+#include <sstream>
+
+#include "singa/core/common.h"
+#include "singa/core/tensor.h"
+
 #ifdef USE_CBLAS
 #include <cblas.h>
 #endif
 
 namespace singa {
 
-template <>
-void Abs<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                           Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = fabs(inPtr[i]);
+// ===================== Helper Functions =============================
+
+// generate a traversal_info vector based on the tensor's shape for the
+// traverse_next function to work
+vector<int> generate_traversal_info(const Tensor &x) {
+  vector<int> traversal_info = {};
+  for (size_t n = 0; n < (x.shape().size() + 2); ++n) {
+    traversal_info.push_back(0);
   }
+  return traversal_info;
+};
+
+// generate shape multipliers
+// for e.g. tensor of shape (3,3), stride (1,3) will have shape multipliers of
+// (3,1)
+// for e.g. tensor of shape (3,3), stride (3,1) will also have shape multipliers
+// of (3,1)
+// this means that the 3rd, 6th, and 9th index of the array will always be the
+// starting element of their respective rows
+// so we need to need use the inner stride when jumping from 1st->2nd element,
+// and outer stride when jumping from 2nd->3rd
+vector<int> generate_shape_multipliers(const Tensor &x) {
+  Shape y_shape = x.shape();
+  if (y_shape.size() == 0) {
+    return {1};
+  }
+  vector<int> shape_multipliers = {1};
+  int cumulative_product = 1;
+
+  for (size_t n = 0; n < (y_shape.size() - 1); ++n) {
+    cumulative_product = cumulative_product * y_shape[y_shape.size() - 1 - n];
+    shape_multipliers.insert(shape_multipliers.begin(), cumulative_product);
+  }
+  return shape_multipliers;
+};
+
+// ******************************************************************************************
+// CPP traversal operations (works on const declarations without modifying
+// tensor variables)
+// ******************************************************************************************
+
+// this function checks whether the next index falls on a special multiplier of
+// the outer shape
+// so the algorithm knows when to jump over/back to a starting element of the
+// outer shape
+// for e.g. in [[1,4,7], [2,5,8], [3,6,9]], elements 1,2,3 are the starting
+// elements of their respective rows
+// this additional check only has 1 loop for 2d matrix
+// but runtime performance might degrade to O(nlog(n)) for higher dimensional
+// tensors
+int determine_order(vector<int> &shape_multipliers, int counter) {
+  for (size_t n = 0; n < (shape_multipliers.size() - 1); ++n) {
+    if ((counter % shape_multipliers[n]) == 0) {
+      return ((shape_multipliers.size()) - 1 - n);
+    }
+  }
+  return 0;
+};
+
+// this function updates the base indexes with the current index after every
+// single traversal step,
+// can be generalized beyond 2d cases
+void update_base_index(const Tensor &x, vector<int> &traversal_info) {
+  for (int n = 0; n < (traversal_info[x.shape().size() + 1] + 1); ++n) {
+    traversal_info[n] = traversal_info[x.shape().size()];
+  }
+};
+
+// function to traverse a const strided tensor object
+// it requires an additional vector, traversal_info {0,0,0,0 ...}, comprising
+// (x.shape().size()+2) elements of 0
+// for e.g. 2d matrix:
+// index 0 and 1 store the base row and column index respectively
+// index 2 stores the current index of the traversal
+// index 3 stores the order of the traversal for e.g. if the order is 0,
+// it means the next element can be navigated to using the innermost stride
+void traverse_next(const Tensor &x, vector<int> &shape_multipliers,
+                   vector<int> &traversal_info, int counter) {
+  update_base_index(x, traversal_info);
+  traversal_info[x.shape().size() + 1] =
+      determine_order(shape_multipliers, counter);
+  traversal_info[x.shape().size()] =
+      traversal_info[traversal_info[x.shape().size() + 1]] +
+      x.stride()[x.stride().size() - traversal_info[x.shape().size() + 1] - 1];
+};
+
+inline int next_offset(int offset, const vector<size_t> &shape,
+                       const vector<int> &stride, vector<int> *index) {
+  for (int k = shape.size() - 1; k >= 0; k--) {
+    if (index->at(k) + 1 < int(shape.at(k))) {
+      offset += stride.at(k);
+      index->at(k) += 1;
+      break;
+    }
+    index->at(k) = 0;
+    offset -= stride.at(k) * (shape.at(k) - 1);
+  }
+  return offset;
 }
 
-template <>
-void Add<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                           Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = inPtr[i] + x;
-  }
-}
+template <typename DType>
+void traverse_unary(const Tensor &in, Tensor *out,
+                    std::function<DType(DType)> func) {
+  DType *outPtr = static_cast<DType *>(out->block()->mutable_data());
+  const DType *inPtr = static_cast<const DType *>(in.block()->data());
+  /*
+  vector<int> traversal_info = generate_traversal_info(in);
+  vector<int> shape_multipliers = generate_shape_multipliers(in);
 
-template <>
-void Add<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           Block *out, Context *ctx) {
-  // CHECK_EQ(ctx->stream, nullptr);
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = in1Ptr[i] + in2Ptr[i];
+  for (size_t i = 0; i < in.Size(); i++) {
+    outPtr[i] = func(inPtr[traversal_info[in.shape().size()]]);
+    traverse_next(in, shape_multipliers, traversal_info, i + 1);
   }
-}
-
-template <>
-void Clamp<float, lang::Cpp>(const size_t num, const float low,
-                             const float high, const Block *in, Block *out,
-                             Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    if (inPtr[i] > high) {
-      outPtr[i] = high;
-    } else if (inPtr[i] < low) {
-      outPtr[i] = low;
-    } else {
-      outPtr[i] = inPtr[i];
+  */
+  CHECK(in.shape() == out->shape());
+  if (in.stride() == out->stride()) {
+    for (size_t i = 0; i < in.Size(); i++) outPtr[i] = func(inPtr[i]);
+  } else {
+    // LOG(INFO) << "not equal stride";
+    size_t in_offset = 0, out_offset = 0;
+    vector<int> in_idx(in.nDim(), 0), out_idx(out->nDim(), 0);
+    for (size_t i = 0; i < Product(in.shape()); i++) {
+      outPtr[out_offset] = func(inPtr[in_offset]);
+      out_offset =
+          next_offset(out_offset, out->shape(), out->stride(), &out_idx);
+      in_offset = next_offset(in_offset, in.shape(), in.stride(), &in_idx);
     }
   }
 }
 
-template <>
-void Div<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    CHECK_NE(in2Ptr[i], 0.f);
-    outPtr[i] = in1Ptr[i] / in2Ptr[i];
+template <typename DType>
+void traverse_binary(const Tensor &in1, const Tensor &in2, Tensor *out,
+                     std::function<DType(DType, DType)> func) {
+  DType *outPtr = static_cast<DType *>(out->block()->mutable_data());
+  const DType *in1Ptr = static_cast<const DType *>(in1.block()->data());
+  const DType *in2Ptr = static_cast<const DType *>(in2.block()->data());
+  /*
+  vector<int> traversal_info_in1 = generate_traversal_info(in1);
+  vector<int> traversal_info_in2 = generate_traversal_info(in2);
+  vector<int> shape_multipliers_in1 = generate_shape_multipliers(in1);
+  vector<int> shape_multipliers_in2 = generate_shape_multipliers(in2);
+
+  for (size_t i = 0; i < in1.Size(); i++) {
+    outPtr[i] = func(in1Ptr[traversal_info_in1[in1.shape().size()]],
+                     in2Ptr[traversal_info_in2[in2.shape().size()]]);
+    traverse_next(in1, shape_multipliers_in1, traversal_info_in1, i + 1);
+    traverse_next(in2, shape_multipliers_in2, traversal_info_in2, i + 1);
+  }
+  */
+  auto prod = Product(in1.shape());
+  CHECK(in1.shape() == out->shape());
+  CHECK(in2.shape() == out->shape());
+  if ((in1.stride() == out->stride()) && (in2.stride() == in1.stride())) {
+    for (size_t i = 0; i < prod; i++) outPtr[i] = func(in1Ptr[i], in2Ptr[i]);
+  } else {
+    /*
+    LOG(INFO) << "not equal stride";
+    std::ostringstream s1, s2, s3, s4, s5, s6;
+    std::copy(in1.stride().begin(), in1.stride().end(),
+    std::ostream_iterator<int>(s1, ", "));
+    std::copy(in2.stride().begin(), in2.stride().end(),
+    std::ostream_iterator<int>(s2, ", "));
+    std::copy(out->stride().begin(), out->stride().end(),
+    std::ostream_iterator<int>(s3, ", "));
+
+    std::copy(in1.shape().begin(), in1.shape().end(),
+    std::ostream_iterator<int>(s4, ", "));
+    std::copy(in2.shape().begin(), in2.shape().end(),
+    std::ostream_iterator<int>(s5, ", "));
+    std::copy(out->shape().begin(), out->shape().end(),
+    std::ostream_iterator<int>(s6, ", "));
+
+    LOG(INFO) << s1.str() << ": " << s4.str();
+    LOG(INFO) << s2.str() << ": " << s5.str();
+    LOG(INFO) << s3.str() << ": " << s6.str();
+    LOG(INFO) << Backtrace();
+    */
+
+    size_t in1_offset = 0, in2_offset = 0, out_offset = 0;
+    vector<int> in1_idx(in1.nDim(), 0), in2_idx(in2.nDim(), 0),
+        out_idx(out->nDim(), 0);
+    for (size_t i = 0; i < prod; i++) {
+      outPtr[out_offset] = func(in1Ptr[in1_offset], in2Ptr[in2_offset]);
+      out_offset =
+          next_offset(out_offset, out->shape(), out->stride(), &out_idx);
+      in1_offset = next_offset(in1_offset, in1.shape(), in1.stride(), &in1_idx);
+      in2_offset = next_offset(in2_offset, in2.shape(), in2.stride(), &in2_idx);
+      // LOG(INFO) <<  in1_offset << ", " << in2_offset << ", " << out_offset;
+    }
   }
 }
 
+// ******************************************************************************************
+// traversal operations end
+// ******************************************************************************************
+
+// ===================== CUDA Functions =============================
+
 template <>
-void Div<float, lang::Cpp>(const size_t num, const float x, const Block *in,
-                           Block *out, Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) {
-    CHECK_NE(inPtr[i], 0.f);
-    outPtr[i] = x / inPtr[i];
-  }
+void Abs<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return fabs(x); });
 }
 
 template <>
-void EltwiseMult<float, lang::Cpp>(const size_t num, const Block *in,
-                                   const float x, Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = inPtr[i] * x;
-  }
+void Erf<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return erff(x); });
 }
 
 template <>
-void EltwiseMult<float, lang::Cpp>(const size_t num, const Block *in1,
-                                   const Block *in2, Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = in1Ptr[i] * in2Ptr[i];
-  }
+void CastCopy<float, int, lang::Cpp>(const Tensor *src, Tensor *dst,
+                                     Context *ctx) {
+  int *dst_array = static_cast<int *>(dst->block()->mutable_data());
+  const float *src_array = static_cast<const float *>(src->block()->data());
+  for (int i = 0; i < dst->Size(); ++i) dst_array[i] = (int)src_array[i];
 }
+
 template <>
-void Exp<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
+void CastCopy<int, float, lang::Cpp>(const Tensor *src, Tensor *dst,
+                                     Context *ctx) {
+  float *dst_array = static_cast<float *>(dst->block()->mutable_data());
+  const int *src_array = static_cast<const int *>(src->block()->data());
+  for (int i = 0; i < dst->Size(); ++i) dst_array[i] = (float)src_array[i];
+}
+
+template <>
+void Ceil<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return std::ceil(x); });
+}
+
+template <>
+void Floor<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return std::floor(x); });
+}
+
+template <>
+void Round<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return std::round(x); });
+}
+
+template <>
+void RoundE<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) {
+    float doub = x*2;
+    if (ceilf(doub) == doub) {
+      return std::round(x/2)*2;
+    } else {
+      return std::round(x);
+    }
+  });
+}
+
+#ifdef USE_DNNL
+template <>
+void SoftMax<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto md = dnnl::memory::desc({static_cast<long long>(in.shape()[0]),
+                                static_cast<long long>(in.shape()[1])},
+                               dnnl::memory::data_type::f32,
+                               dnnl::memory::format_tag::ab);
+  auto in_mem = dnnl::memory(md, ctx->dnnl_engine, in.block()->mutable_data());
+  auto out_mem =
+      dnnl::memory(md, ctx->dnnl_engine, out->block()->mutable_data());
+
+  auto softmax_desc =
+      dnnl::softmax_forward::desc(dnnl::prop_kind::forward_scoring, md, 1);
+  auto softmax_prim_desc =
+      dnnl::softmax_forward::primitive_desc(softmax_desc, ctx->dnnl_engine);
+  auto softmax = dnnl::softmax_forward(softmax_prim_desc);
+  softmax.execute(ctx->dnnl_stream,
+                  {{DNNL_ARG_SRC, in_mem}, {DNNL_ARG_DST, out_mem}});
+  ctx->dnnl_stream.wait();
+}
+
+template <>
+void SoftMaxBackward<float, lang::Cpp>(const Tensor &in, Tensor *out,
+                                       const Tensor &fdout, Context *ctx) {
+  auto md = dnnl::memory::desc({static_cast<long long>(in.shape()[0]),
+                                static_cast<long long>(in.shape()[1])},
+                               dnnl::memory::data_type::f32,
+                               dnnl::memory::format_tag::ab);
+  auto in_mem = dnnl::memory(md, ctx->dnnl_engine, in.block()->mutable_data());
+  auto fdout_mem =
+      dnnl::memory(md, ctx->dnnl_engine, fdout.block()->mutable_data());
+  auto out_mem =
+      dnnl::memory(md, ctx->dnnl_engine, out->block()->mutable_data());
+
+  auto softmax_desc =
+      dnnl::softmax_forward::desc(dnnl::prop_kind::forward_scoring, md, 1);
+  auto softmax_prim_desc =
+      dnnl::softmax_forward::primitive_desc(softmax_desc, ctx->dnnl_engine);
+
+  auto softmaxbwd_desc = dnnl::softmax_backward::desc(md, md, 1);
+  auto softmaxbwd_prim_desc = dnnl::softmax_backward::primitive_desc(
+      softmaxbwd_desc, ctx->dnnl_engine, softmax_prim_desc);
+  auto softmaxbwd = dnnl::softmax_backward(softmaxbwd_prim_desc);
+  softmaxbwd.execute(ctx->dnnl_stream, {{DNNL_ARG_DIFF_SRC, out_mem},
+                                        {DNNL_ARG_DIFF_DST, in_mem},
+                                        {DNNL_ARG_DST, fdout_mem}});
+  ctx->dnnl_stream.wait();
+}
+#else
+// native Softmax without DNNL
+template <>
+void SoftMax<float, lang::Cpp>(const Tensor &in, Tensor *out, Context* ctx) {
+  CHECK_LE(in.nDim(), 2u) << "Axis is required for SoftMax on multi dimemsional tensor";
+  out->CopyData(in);
+  size_t nrow = 1, ncol = in.Size(), size = ncol;
+  if (in.nDim() == 2u) {
+    nrow = in.shape(0);
+    ncol = size / nrow;
+    out->Reshape(Shape{nrow, ncol});
+  }
+  Tensor tmp = RowMax(*out);
+  SubColumn(tmp, out);
+  Exp(*out, out);
+
+  SumColumns(*out, &tmp);
+  DivColumn(tmp, out);
+  out->Reshape(in.shape());
+}
+#endif  // USE_DNNL
+
+template <>
+void Add<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = exp(inPtr[i]);
-  }
+  auto add_lambda = [&x](float a) { return (a + x); };
+  traverse_unary<float>(in, out, add_lambda);
 }
 
 template <>
-void GE<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] >= x) ? 1.f : 0.f;
-  }
-}
-
-template <>
-void GE<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr1 = static_cast<const float *>(in1->data());
-  const float *inPtr2 = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr1[i] >= inPtr2[i]) ? 1.f : 0.f;
-  }
-}
-template <>
-void GT<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] > x) ? 1.f : 0.f;
-  }
-}
-template <>
-void GT<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr1 = static_cast<const float *>(in1->data());
-  const float *inPtr2 = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr1[i] > inPtr2[i]) ? 1.f : 0.f;
-  }
-}
-
-template <>
-void LE<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] <= x) ? 1.f : 0.f;
-  }
-}
-template <>
-void LE<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr1 = static_cast<const float *>(in1->data());
-  const float *inPtr2 = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr1[i] <= inPtr2[i]) ? 1.f : 0.f;
-  }
-}
-template <>
-void Log<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
+void Add<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    CHECK_GT(inPtr[i], 0.f);
-    outPtr[i] = log(inPtr[i]);
-  }
-}
-template <>
-void LT<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] < x) ? 1.f : 0.f;
-  }
-}
-template <>
-void LT<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                          Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr1 = static_cast<const float *>(in1->data());
-  const float *inPtr2 = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr1[i] < inPtr2[i]) ? 1.f : 0.f;
-  }
-}
-
-template <>
-void Pow<float, lang::Cpp>(const size_t num, const Block *in, const float x,
-                           Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = pow(inPtr[i], x);
-  }
-}
-
-template <>
-void Pow<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = pow(in1Ptr[i], in2Ptr[i]);
-  }
-}
-template <>
-void ReLU<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] >= 0.f) ? inPtr[i] : 0.f;
-  }
-}
-template <>
-void Set<float, lang::Cpp>(const size_t num, const float x, Block *out,
-                           Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) outPtr[i] = x;
-}
-template <>
-void Set<int, lang::Cpp>(const size_t num, const int x, Block *out,
-                           Context *ctx) {
-  int *outPtr = static_cast<int *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) outPtr[i] = x;
-}
-
-template <>
-void Sigmoid<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                               Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = 1.f / (1.f + exp(-inPtr[i]));
-  }
-}
-
-template <>
-void Sign<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = (inPtr[i] > 0) - (inPtr[i] < 0);
-  }
-}
-
-template <>
-void Sqrt<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    CHECK_GE(inPtr[i], 0.f);
-    outPtr[i] = sqrt(inPtr[i]);
-  }
-}
-/*
-template <>
-void Square<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                              Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = inPtr[i] * inPtr[i];
-  }
-}
-*/
-
-template <>
-void Sub<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           Block *out, Context *ctx) {
   // CHECK_EQ(ctx->stream, nullptr);
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = in1Ptr[i] - in2Ptr[i];
-  }
+  auto add_lambda_binary = [](float a, float b) { return (a + b); };
+  traverse_binary<float>(in1, in2, out, add_lambda_binary);
+}
+
+template <>
+void Clamp<float, lang::Cpp>(const float low, const float high,
+                             const Tensor &in, Tensor *out, Context *ctx) {
+  auto clamp_lambda = [&low, &high](float a) {
+    if (a < low) {
+      return low;
+    } else if (a > high) {
+      return high;
+    } else {
+      return a;
+    }
+  };
+  traverse_unary<float>(in, out, clamp_lambda);
+}
+
+template <>
+void Div<float, lang::Cpp>(const float x, const Tensor &in, Tensor *out,
+                           Context *ctx) {
+  auto const_div = [&x](float a) {
+    CHECK_NE(a, 0.f);
+    return x / a;
+  };
+  traverse_unary<float>(in, out, const_div);
+}
+
+template <>
+void Div<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                           Context *ctx) {
+  auto binary_div = [](float a, float b) {
+    CHECK_NE(b, 0.f);
+    return a / b;
+  };
+  traverse_binary<float>(in1, in2, out, binary_div);
+}
+
+template <>
+void EltwiseMult<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                                   Context *ctx) {
+  auto eltwisemult_lambda = [&x](float a) { return (a * x); };
+  traverse_unary<float>(in, out, eltwisemult_lambda);
+}
+
+template <>
+void EltwiseMult<float, lang::Cpp>(const Tensor &in1, const Tensor &in2,
+                                   Tensor *out, Context *ctx) {
+  auto eltwisemult_lambda_binary = [](float a, float b) { return (a * b); };
+  traverse_binary<float>(in1, in2, out, eltwisemult_lambda_binary);
+}
+
+template <>
+void ReLUBackward<float, lang::Cpp>(const Tensor &in1, const Tensor &in2,
+                                    Tensor *out, Context *ctx) {
+  auto relubackward_lambda = [](float a, float b) { return (b > 0) ? a : 0.f; };
+  traverse_binary<float>(in1, in2, out, relubackward_lambda);
+}
+
+template <>
+void Exp<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  traverse_unary<float>(in, out, [](float x) { return exp(x); });
+}
+
+template <>
+void GE<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                          Context *ctx) {
+  auto ge_lambda = [&x](float a) { return (a >= x) ? 1.f : 0.f; };
+  traverse_unary<float>(in, out, ge_lambda);
+}
+
+template <>
+void GE<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto ge_lambda_binary = [](float a, float b) { return (a >= b) ? 1.f : 0.f; };
+  traverse_binary<float>(in1, in2, out, ge_lambda_binary);
+}
+
+template <>
+void GE<int, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto ge_lambda_binary = [](int a, int b) { return (a >= b) ? 1.f : 0.f; };
+  traverse_binary<int>(in1, in2, out, ge_lambda_binary);
+}
+
+template <>
+void GT<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                          Context *ctx) {
+  auto gt_lambda = [&x](float a) { return (a > x) ? 1.f : 0.f; };
+  traverse_unary<float>(in, out, gt_lambda);
+}
+
+template <>
+void GT<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto gt_lambda_binary = [](float a, float b) { return (a > b) ? 1.f : 0.f; };
+  traverse_binary<float>(in1, in2, out, gt_lambda_binary);
+}
+
+template <>
+void GT<int, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto gt_lambda_binary = [](int a, int b) { return (a > b) ? 1.f : 0.f; };
+  traverse_binary<int>(in1, in2, out, gt_lambda_binary);
+}
+
+template <>
+void LE<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                          Context *ctx) {
+  auto le_lambda = [&x](float a) { return (a <= x) ? 1.f : 0.f; };
+  traverse_unary<float>(in, out, le_lambda);
+}
+
+template <>
+void LE<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto le_lambda_binary = [](float a, float b) { return (a <= b) ? 1.f : 0.f; };
+  traverse_binary<float>(in1, in2, out, le_lambda_binary);
+}
+
+template <>
+void LE<int, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto le_lambda_binary = [](int a, int b) { return (a <= b) ? 1.f : 0.f; };
+  traverse_binary<int>(in1, in2, out, le_lambda_binary);
+}
+
+template <>
+void Log<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto ulog = [](float a) {
+    CHECK_GT(a, 0.f);
+    return log(a);
+  };
+  traverse_unary<float>(in, out, ulog);
+}
+
+template <>
+void LT<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                          Context *ctx) {
+  auto lt_lambda = [&x](float a) { return (a < x) ? 1.f : 0.f; };
+  traverse_unary<float>(in, out, lt_lambda);
+}
+
+template <>
+void LT<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto lt_lambda_binary = [](float a, float b) { return (a < b) ? 1.f : 0.f; };
+  traverse_binary<float>(in1, in2, out, lt_lambda_binary);
+}
+
+template <>
+void LT<int, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto lt_lambda_binary = [](int a, int b) { return (a < b) ? 1.f : 0.f; };
+  traverse_binary<int>(in1, in2, out, lt_lambda_binary);
+}
+
+template <>
+void EQ<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                          Context *ctx) {
+  auto eq_lambda = [&x](float a) { return (a == x) ? 1.f : 0.f; };
+  traverse_unary<float>(in, out, eq_lambda);
+}
+
+template <>
+void EQ<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto eq_lambda_binary = [](float a, float b) { return (a == b) ? 1.f : 0.f; };
+  traverse_binary<float>(in1, in2, out, eq_lambda_binary);
+}
+
+template <>
+void EQ<int, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                          Context *ctx) {
+  auto eq_lambda_binary = [](int a, int b) { return (a == b) ? 1.f : 0.f; };
+  traverse_binary<int>(in1, in2, out, eq_lambda_binary);
+}
+
+template <>
+void Pow<float, lang::Cpp>(const Tensor &in, const float x, Tensor *out,
+                           Context *ctx) {
+  traverse_unary<float>(in, out, [x](float y) { return pow(y, x); });
+}
+
+template <>
+void Pow<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                           Context *ctx) {
+  auto pow_lambda_binary = [](float a, float b) { return pow(a, b); };
+  traverse_binary<float>(in1, in2, out, pow_lambda_binary);
+}
+
+template <>
+void ReLU<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto relu_lambda = [](float a) { return (a >= 0.f) ? a : 0.f; };
+  traverse_unary<float>(in, out, relu_lambda);
+}
+
+template <>
+void Set<float, lang::Cpp>(const float x, Tensor *out, Context *ctx) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) outPtr[i] = x;
+}
+
+template <>
+void Set<int, lang::Cpp>(const int x, Tensor *out, Context *ctx) {
+  int *outPtr = static_cast<int *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) outPtr[i] = x;
+}
+
+template <>
+void Sigmoid<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto sigmoid_lambda = [](float a) { return 1.f / (1.f + exp(-a)); };
+  traverse_unary<float>(in, out, sigmoid_lambda);
+}
+
+template <>
+void Sign<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto sign_lambda = [](float a) { return (a > 0) - (a < 0); };
+  traverse_unary<float>(in, out, sign_lambda);
+}
+
+template <>
+void SoftPlus<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto softplus_lambda = [](float a) { return log(1.f + exp(a)); };
+  traverse_unary<float>(in, out, softplus_lambda);
+}
+
+template <>
+void SoftSign<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto softsign_lambda = [](float a) { return a / (1.f + fabs(a)); };
+  traverse_unary<float>(in, out, softsign_lambda);
+}
+
+template <>
+void Sqrt<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto usqrt = [](float a) {
+    CHECK_GE(a, 0.f);
+    return sqrt(a);
+  };
+  traverse_unary<float>(in, out, usqrt);
+}
+
+template <>
+void Sub<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                           Context *ctx) {
+  // CHECK_EQ(ctx->stream, nullptr);
+  auto sub_lambda_binary = [](float a, float b) { return (a - b); };
+  traverse_binary<float>(in1, in2, out, sub_lambda_binary);
 }
 
 // sum all elements of input into out
 // TODO(wangwei) optimize using omp
 template <>
-void Sum<float, lang::Cpp>(const size_t num, const Block *in, float *out,
-                           Context *ctx) {
+void Sum<float, lang::Cpp>(const Tensor &in, float *out, Context *ctx) {
   float s = 0.f;
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  for (size_t i = 0; i < in.Size(); i++) {
     s += inPtr[i];
   }
   *out = s;
 }
 
-template <>
-void Tanh<float, lang::Cpp>(const size_t num, const Block *in, Block *out,
-                            Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] = tanh(inPtr[i]);
+#define GenUnaryTensorCppFn(fn, cppfn)                                     \
+  template <>                                                              \
+  void fn<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) { \
+    auto fn_lambda = [](float a) { return cppfn(a); };                     \
+    traverse_unary<float>(in, out, fn_lambda);                             \
   }
+
+GenUnaryTensorCppFn(Cos, cos);
+GenUnaryTensorCppFn(Cosh, cosh);
+GenUnaryTensorCppFn(Acos, acos);
+GenUnaryTensorCppFn(Acosh, acosh);
+GenUnaryTensorCppFn(Sin, sin);
+GenUnaryTensorCppFn(Sinh, sinh);
+GenUnaryTensorCppFn(Asin, asin);
+GenUnaryTensorCppFn(Asinh, asinh);
+GenUnaryTensorCppFn(Tan, tan);
+GenUnaryTensorCppFn(Tanh, tanh);
+GenUnaryTensorCppFn(Atan, atan);
+GenUnaryTensorCppFn(Atanh, atanh);
+
+template <>
+void Transform<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto identity = [](float a) { return a; };
+  traverse_unary<float>(in, out, identity);
 }
 
-// ===============Random operations==========================================
 template <>
-void Bernoulli<float, lang::Cpp>(const size_t num, const float p, Block *out,
-                                 Context *ctx) {
+void Transform<int, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  auto identity = [](int a) { return a; };
+  traverse_unary<int>(in, out, identity);
+}
+
+template <>
+void Bernoulli<float, lang::Cpp>(const float p, Tensor *out, Context *ctx) {
   std::bernoulli_distribution distribution(p);
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) {
     outPtr[i] = distribution(ctx->random_generator) ? 1.0f : 0.0f;
   }
 }
 
 template <>
-void Gaussian<float, lang::Cpp>(const size_t num, const float mean,
-                                const float std, Block *out, Context *ctx) {
+void Gaussian<float, lang::Cpp>(const float mean, const float std, Tensor *out,
+                                Context *ctx) {
   std::normal_distribution<float> distribution(mean, std);
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) {
     outPtr[i] = static_cast<float>(distribution(ctx->random_generator));
   }
 }
+
 template <>
-void Uniform<float, lang::Cpp>(const size_t num, const float low,
-                               const float high, Block *out, Context *ctx) {
+void Uniform<float, lang::Cpp>(const float low, const float high, Tensor *out,
+                               Context *ctx) {
   std::uniform_real_distribution<float> distribution(low, high);
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) {
     outPtr[i] = static_cast<float>(distribution(ctx->random_generator));
   }
 }
 
 // ====================Blas operations======================================
 
+// warning, this function has block M overwritting to block M itself
 template <>
-void DGMM<float, lang::Cpp>(const bool side_right, const size_t nrow,
-                            const size_t ncol, const Block *M, const Block *v,
-                            Block *out, Context *ctx) {
-  const float *MPtr = static_cast<const float *>(M->data());
-  const float *vPtr = static_cast<const float *>(v->data());
-  float *outPtr = static_cast<float *>(out->mutable_data());
+void DGMM<float, lang::Cpp>(const bool side_right, const Tensor &M,
+                            const Tensor &v, Tensor *out, Context *ctx) {
+  const float *MPtr = static_cast<const float *>(M.block()->data());
+  const float *vPtr = static_cast<const float *>(v.block()->data());
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const size_t nrow = M.shape(0);
+  const size_t ncol = M.shape(1);
+
   if (side_right) {
     for (size_t r = 0; r < nrow; r++) {
-      size_t offset = r * ncol;
+      size_t in_offset = M.stride()[0] * r, out_offset = out->stride()[0] * r;
       for (size_t c = 0; c < ncol; c++) {
-        outPtr[offset + c] = MPtr[offset + c] * vPtr[c];
+        outPtr[out_offset] = MPtr[in_offset] * vPtr[c];
+        in_offset += M.stride()[1];
+        out_offset += out->stride()[1];
       }
     }
   } else {
     for (size_t r = 0; r < nrow; r++) {
-      size_t offset = r * ncol;
+      size_t in_offset = M.stride()[0] * r, out_offset = out->stride()[0] * r;
       for (size_t c = 0; c < ncol; c++) {
-        outPtr[offset + c] = MPtr[offset + c] * vPtr[r];
+        outPtr[out_offset] = MPtr[in_offset] * vPtr[r];
+        in_offset += M.stride()[1];
+        out_offset += out->stride()[1];
       }
     }
   }
@@ -397,90 +728,194 @@
 
 #ifdef USE_CBLAS
 template <>
-void Amax<float, lang::Cpp>(const size_t num, const Block *in, size_t *out,
+void Amax<float, lang::Cpp>(const Tensor &in, size_t *out, Context *ctx) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  *out = cblas_isamax(in.Size(), inPtr, 1);  // not using strided traversal
+}
+
+template <>
+void Asum<float, lang::Cpp>(const Tensor &in, float *out, Context *ctx) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  *out = cblas_sasum(in.Size(), inPtr, 1);  // not using strided traversal
+}
+
+// template <>
+// void Axpy<float, lang::Cpp>(const float alpha,
+//                             const Tensor& in, Tensor *out, Context *ctx) {
+//   //check input tensor for strides first
+//   if (in.stride() == out->stride()) {
+//     const float *inPtr = static_cast<const float *>(in.block()->data());
+//     float *outPtr = static_cast<float *>(out->block()->mutable_data());
+//     cblas_saxpy(in.Size(), alpha, inPtr, 1, outPtr, 1);
+//   } else {
+//     //LOG(FATAL) << "Axpy, input and output strides do not match." ;
+//     EltwiseMult<float, lang::Cpp>(in, alpha, out, ctx);
+//   }
+// }
+
+template <>
+void Axpy<float, lang::Cpp>(const float alpha, const Tensor &in, Tensor *out,
                             Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  *out = cblas_isamax(num, inPtr, 1);
-}
+  // check input tensor for strides first
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
 
-template <>
-void Asum<float, lang::Cpp>(const size_t num, const Block *in, float *out,
-                            Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  *out = cblas_sasum(num, inPtr, 1);
-}
-
-template <>
-void Axpy<float, lang::Cpp>(const size_t num, const float alpha,
-                            const Block *in, Block *out, Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  cblas_saxpy(num, alpha, inPtr, 1, outPtr, 1);
-}
-
-template <>
-void Dot<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           float *out, Context *ctx) {
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  *out = cblas_sdot(num, in1Ptr, 1, in2Ptr, 1);
-}
-template <>
-void Scale<float, lang::Cpp>(const size_t num, const float x, Block *out,
-                             Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  cblas_sscal(num, x, outPtr, 1);
-}
-template <>
-void Nrm2<float, lang::Cpp>(const size_t num, const Block *in, float *out,
-                            Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  *out = cblas_snrm2(num, inPtr, 1);
-}
-
-template <>
-void GEMV<float, lang::Cpp>(bool trans, const size_t m, const size_t n,
-                            const float alpha, const Block *A, const Block *v,
-                            const float beta, Block *out, Context *ctx) {
-  const float *APtr = static_cast<const float *>(A->data());
-  const float *vPtr = static_cast<const float *>(v->data());
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  if (!trans) {
-    cblas_sgemv(CblasRowMajor, CblasNoTrans, m, n, alpha, APtr, n, vPtr, 1,
-                beta, outPtr, 1);
+  if (in.stride() == out->stride()) {
+    cblas_saxpy(in.Size(), alpha, inPtr, 1, outPtr, 1);
   } else {
-    cblas_sgemv(CblasRowMajor, CblasTrans, n, m, alpha, APtr, m, vPtr, 1, beta,
-                outPtr, 1);
+    // LOG(FATAL) << "Axpy, input and output strides do not match." ;
+    Tensor t(in.shape(), in.device(), in.data_type());
+    EltwiseMult<float, lang::Cpp>(in, alpha, &t, ctx);
+    float *tPtr = static_cast<float *>(t.block()->mutable_data());
+    cblas_saxpy(in.Size(), 1, tPtr, 1, outPtr, 1);
+  }
+}
+
+// template <>
+// void Axpy<float, lang::Cpp>(const float alpha,
+//                            const Tensor& in, Tensor *out, Context *ctx) {
+//  //check input tensor for strides first
+//  if (in.stride() == out->stride()) {
+//    const float *inPtr = static_cast<const float *>(in.block()->data());
+//    float *outPtr = static_cast<float *>(out->block()->mutable_data());
+//    cblas_saxpy(in.Size(), alpha, inPtr, 1, outPtr, 1);
+//  } else if(out->transpose()) {
+//    LOG(FATAL) << "output is already transposed." ;
+//  } else {
+//    LOG(FATAL) << "Axpy, input and output strides do not match." ;
+//  }
+// }
+
+template <>
+void Dot<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, float *out,
+                           Context *ctx) {
+  // check input tensor for strides first
+  if (!(in1.transpose()) && !(in2.transpose())) {
+    const float *in1Ptr = static_cast<const float *>(in1.block()->data());
+    const float *in2Ptr = static_cast<const float *>(in2.block()->data());
+    *out = cblas_sdot(in1.Size(), in1Ptr, 1, in2Ptr, 1);
+  } else {
+    LOG(FATAL) << "Dot, one of the input is tranposed. Not implemented yet.";
+  }
+}
+template <>
+void Dot<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, Tensor *out,
+                           Context *ctx) {
+  // check input tensor for strides first
+  if (!(in1.transpose()) && !(in2.transpose())) {
+    const float *in1Ptr = static_cast<const float *>(in1.block()->data());
+    const float *in2Ptr = static_cast<const float *>(in2.block()->data());
+    float *outPtr = static_cast<float *>(out->block()->mutable_data());
+    *outPtr = cblas_sdot(in1.Size(), in1Ptr, 1, in2Ptr, 1);
+  } else {
+    LOG(FATAL) << "Dot, one of the input is tranposed. Not implemented yet.";
   }
 }
 
 template <>
-void GEMM<float, lang::Cpp>(const bool transA, const bool transB,
-                            const size_t nrowA, const size_t ncolB,
-                            const size_t ncolA, const float alpha,
-                            const Block *A, const Block *B, const float beta,
-                            Block *C, Context *ctx) {
+void Scale<float, lang::Cpp>(const float x, Tensor *out, Context *ctx) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  cblas_sscal(out->Size(), x, outPtr, 1);  // not using strided traversal
+}
+
+template <>
+void Nrm2<float, lang::Cpp>(const Tensor &in, float *out, Context *ctx) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  *out = cblas_snrm2(in.Size(), inPtr, 1);  // not using strided traversal
+}
+
+template <>
+void GEMV<float, lang::Cpp>(const float alpha, const Tensor &A, const Tensor &v,
+                            const float beta, Tensor *out, Context *ctx) {
+  const float *APtr = static_cast<const float *>(A.block()->data());
+  const float *vPtr = static_cast<const float *>(v.block()->data());
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const size_t m = A.shape()[0];
+  const size_t n = A.shape()[1];
+  if (A.transpose()) {
+    cblas_sgemv(CblasRowMajor, CblasTrans, n, m, alpha, APtr, m, vPtr, 1, beta,
+                outPtr, 1);
+  } else {
+    cblas_sgemv(CblasRowMajor, CblasNoTrans, m, n, alpha, APtr, n, vPtr, 1,
+                beta, outPtr, 1);
+  }
+}
+
+template <>
+void GEMM<float, lang::Cpp>(const float alpha, const Tensor &A, const Tensor &B,
+                            const float beta, Tensor *C, Context *ctx) {
+  auto transA = A.transpose();
   auto transa = transA ? CblasTrans : CblasNoTrans;
+  auto transB = B.transpose();
   auto transb = transB ? CblasTrans : CblasNoTrans;
+  const size_t nrowA = A.shape()[0];
+  const size_t ncolA = A.shape()[1];
+  const size_t ncolB = B.shape()[1];
   auto lda = transA ? nrowA : ncolA;
   auto ldb = transB ? ncolA : ncolB;
   auto ldc = ncolB;
-  const float *APtr = static_cast<const float *>(A->data());
-  const float *BPtr = static_cast<const float *>(B->data());
-  float *CPtr = static_cast<float *>(C->mutable_data());
+  const float *APtr = static_cast<const float *>(A.block()->data());
+  const float *BPtr = static_cast<const float *>(B.block()->data());
+  float *CPtr = static_cast<float *>(C->block()->mutable_data());
   cblas_sgemm(CblasRowMajor, transa, transb, nrowA, ncolB, ncolA, alpha, APtr,
-	  lda, BPtr, ldb, beta, CPtr, ldc);
+              lda, BPtr, ldb, beta, CPtr, ldc);
+}
+
+/*
+ * implement matmul for 3d 4d tensor
+ *   simulate cblas_sgemm_batch();
+ *   which is only available in intel cblas
+ */
+template <>
+void GEMMBatched<float, lang::Cpp>(const float alpha, const Tensor &A,
+                                   const Tensor &B, const float beta, Tensor *C,
+                                   Context *ctx) {
+  const float *APtr = static_cast<const float *>(A.block()->data());
+  const float *BPtr = static_cast<const float *>(B.block()->data());
+  float *CPtr = static_cast<float *>(C->block()->mutable_data());
+
+  auto transA = A.transpose();
+  auto transa = transA ? CblasTrans : CblasNoTrans;
+  auto transB = B.transpose();
+  auto transb = transB ? CblasTrans : CblasNoTrans;
+
+  const size_t ncolB = B.shape().end()[-1];
+  const size_t nrowA = A.shape().end()[-2];
+  const size_t ncolA = A.shape().end()[-1];
+
+  auto lda = transA ? nrowA : ncolA;
+  auto ldb = transB ? ncolA : ncolB;
+  auto ldc = ncolB;
+  const int group_count = 1;
+
+  size_t group_size = A.shape()[0];                // 3d
+  if (A.nDim() == 4u) group_size *= A.shape()[1];  // 4d
+
+  auto matrix_stride_A = A.shape().end()[-1] * A.shape().end()[-2];
+  auto matrix_stride_B = B.shape().end()[-1] * B.shape().end()[-2];
+  auto matrix_stride_C = C->shape().end()[-1] * C->shape().end()[-2];
+  auto offset_A = 0;
+  auto offset_B = 0;
+  auto offset_C = 0;
+
+  for (int i = 0; i < group_size; i++) {
+    cblas_sgemm(CblasRowMajor, transa, transb, nrowA, ncolB, ncolA, alpha,
+                APtr + offset_A, lda, BPtr + offset_B, ldb, beta,
+                CPtr + offset_C, ldc);
+    offset_A += matrix_stride_A;
+    offset_B += matrix_stride_B;
+    offset_C += matrix_stride_C;
+  }
 }
 
 #else
 
 template <>
-void Amax<float, lang::Cpp>(const size_t num, const Block *in, size_t *out,
-                            Context *ctx) {
+void Amax<float, lang::Cpp>(const Tensor &in, size_t *out, Context *ctx) {
   size_t maxPos = 0;
   float maxVal = 0;
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  for (size_t i = 0; i < in.Size(); i++) {  // not using strided traversal
     if (i == 0) {
       maxVal = inPtr[i];
     } else if (inPtr[i] > maxVal) {
@@ -490,13 +925,13 @@
   }
   *out = maxPos;
 }
+
 template <>
-void Amin<float, lang::Cpp>(const size_t num, const Block *in, size_t *out,
-                            Context *ctx) {
+void Amin<float, lang::Cpp>(const Tensor &in, size_t *out, Context *ctx) {
   size_t minPos = 0;
   float minVal = 0;
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  for (size_t i = 0; i < in.Size(); i++) {  // not using strided traversal
     if (i == 0) {
       minVal = inPtr[i];
     } else if (inPtr[i] > minVal) {
@@ -508,52 +943,70 @@
 }
 
 template <>
-void Asum<float, lang::Cpp>(const size_t num, const Block *in, float *out,
+void Asum<float, lang::Cpp>(const Tensor &in, float *out, Context *ctx) {
+  float sum = 0;
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  for (size_t i = 0; i < in.Size(); i++) {
+    sum += fabs(inPtr[i]);  // not using strided traversal
+  }
+}
+
+template <>
+void Axpy<float, lang::Cpp>(const float alpha, const Tensor &in, Tensor *out,
                             Context *ctx) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  vector<int> traversal_info = generate_traversal_info(in);
+  vector<int> shape_multipliers = generate_shape_multipliers(in);
+
+  for (size_t i = 0; i < in.Size(); i++) {
+    outPtr[i] += alpha * inPtr[traversal_info[in.shape().size()]];
+    traverse_next(in, shape_multipliers, traversal_info, i + 1);
+  }
+}
+
+template <>
+void Scale<float, lang::Cpp>(const float x, Tensor *out, Context *ctx) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  for (size_t i = 0; i < out->Size(); i++) {
+    outPtr[i] *= x;  // not using strided traversal
+  }
+}
+
+template <>
+void Dot<float, lang::Cpp>(const Tensor &in1, const Tensor &in2, float *out,
+                           Context *ctx) {
   float sum = 0;
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    sum += fabs(inPtr[i]);
+  // const float *in1Ptr = static_cast<const float *>(in1.data());
+  // const float *in2Ptr = static_cast<const float *>(in2.data());
+  // for (size_t i = 0; i < in.Size(); i++) {
+  //   sum += in1Ptr[i] * in2Ptr[i];
+  // }
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const float *in1Ptr = static_cast<const float *>(in1.block()->data());
+  const float *in2Ptr = static_cast<const float *>(in2.block()->data());
+  vector<int> traversal_info_in1 = generate_traversal_info(in1);
+  vector<int> traversal_info_in2 = generate_traversal_info(in2);
+  vector<int> shape_multipliers_in1 = generate_shape_multipliers(in1);
+  vector<int> shape_multipliers_in2 = generate_shape_multipliers(in2);
+
+  for (size_t i = 0; i < in1.Size(); i++) {
+    sum += in1Ptr[traversal_info_in1[in1.shape().size()]] *
+           in2Ptr[traversal_info_in2[in2.shape().size()]];
+    traverse_next(in1, shape_multipliers_in1, traversal_info_in1, i + 1);
+    traverse_next(in2, shape_multipliers_in2, traversal_info_in2, i + 1);
   }
 }
 
 template <>
-void Axpy<float, lang::Cpp>(const size_t num, const float alpha,
-                            const Block *in, Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] += alpha * inPtr[i];
-  }
-}
-
-template <>
-void Scale<float, lang::Cpp>(const size_t num, const float x, Block *out,
-                             Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  for (size_t i = 0; i < num; i++) {
-    outPtr[i] *= x;
-  }
-}
-
-template <>
-void Dot<float, lang::Cpp>(const size_t num, const Block *in1, const Block *in2,
-                           float *out, Context *ctx) {
-  float sum = 0;
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
-  for (size_t i = 0; i < num; i++) {
-    sum += in1Ptr[i] * in2Ptr[i];
-  }
-}
-
-template <>
-void GEMV<float, lang::Cpp>(bool trans, const size_t m, const size_t n,
-                            const float alpha, const Block *A, const Block *v,
-                            const float beta, Block *out, Context *ctx) {
-  float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *APtr = static_cast<const float *>(A->data());
-  const float *vPtr = static_cast<const float *>(v->data());
+void GEMV<float, lang::Cpp>(const float alpha, const Tensor &A, const Tensor &v,
+                            const float beta, Tensor *out, Context *ctx) {
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const float *APtr = static_cast<const float *>(A.block()->data());
+  const float *vPtr = static_cast<const float *>(v.block()->data());
+  bool trans = A.transpose();
+  const size_t m = A.shape(0);
+  const size_t n = A.shape(1);
   for (size_t r = 0; r < m; r++) {
     float sum = 0;
     for (size_t c = 0; c < n; c++) {
@@ -582,16 +1035,17 @@
       lossPtr[i] = -std::log((std::max)(prob_of_truth, FLT_MIN));
     }
   } else {
-    for (size_t i = 0;i < batchsize; i++) {
+    for (size_t i = 0; i < batchsize; i++) {
       float sum = 0.f;
       for (size_t j = 0; j < dim; j++) {
         sum += tPtr[i * dim + j];
       }
-      float loss = 0.f;
+      float loss_value = 0.f;
       for (size_t j = 0, offset = i * dim; j < dim; j++, offset++) {
-        loss -= tPtr[offset] / sum * std::log((std::max)(pPtr[offset], FLT_MIN));
+        loss_value -=
+            tPtr[offset] / sum * std::log((std::max)(pPtr[offset], FLT_MIN));
       }
-      lossPtr[i] = loss;
+      lossPtr[i] = loss_value;
     }
   }
 }
@@ -627,15 +1081,22 @@
 }
 
 template <>
-void RowMax<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                              const Block *in, Block *out, Context *ctx) {
-  const float *inPtr = static_cast<const float *>(in->data());
-  float *outPtr = static_cast<float *>(out->mutable_data());
+void RowMax<float, lang::Cpp>(const Tensor &in, Tensor *out, Context *ctx) {
+  const float *inPtr = static_cast<const float *>(in.block()->data());
+  float *outPtr = static_cast<float *>(out->block()->mutable_data());
+  const size_t nrow = in.shape()[0];
+  const size_t ncol = in.shape()[1];
+  vector<int> traversal_info = generate_traversal_info(in);
+  vector<int> shape_multipliers = generate_shape_multipliers(in);
+
   for (size_t r = 0; r < nrow; r++) {
-    int offset = (int)(r * ncol);
-    float maxval = inPtr[offset];
-    for (size_t c = 1; c < ncol; c++)
-      maxval = (std::max)(maxval, inPtr[offset + c]);
+    int counter_offset = (r * ncol);
+    float maxval = 0;
+    for (size_t c = 0; c < ncol; c++) {
+      maxval = (std::max)(maxval, inPtr[traversal_info[in.shape().size()]]);
+      traverse_next(in, shape_multipliers, traversal_info,
+                    counter_offset + c + 1);
+    }
     outPtr[r] = maxval;
   }
 }
@@ -643,12 +1104,32 @@
 // =========Matrix operations ================================================
 /*
 template <>
+void SoftMax<float, lang::Cpp>(const Tensor &in, Tensor *out, Context* ctx) {
+  CHECK_LE(in.nDim(), 2u) << "Axis is required for SoftMax on multi dimemsional
+tensor";
+  out->CopyData(in);
+  size_t nrow = 1, ncol = in.Size(), size = ncol;
+  if (in.nDim() == 2u) {
+    nrow = in.shape(0);
+    ncol = size / nrow;
+    out->Reshape(Shape{nrow, ncol});
+  }
+  Tensor tmp = RowMax(*out);
+  SubColumn(tmp, out);
+  Exp(*out, out);
+
+  SumColumns(*out, &tmp);
+  DivColumn(tmp, out);
+  out->Reshape(in.shape());
+}
+
+template <>
 void AddCol<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                              const Block *A, const Block *v, Block *out,
+                              const Tensor& A, const Tensor& v, Tensor* out,
                               Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *APtr = static_cast<const float *>(A->data());
-  const float *vPtr = static_cast<const float *>(v->data());
+  const float *APtr = static_cast<const float *>(A.data());
+  const float *vPtr = static_cast<const float *>(v.data());
   for (size_t r = 0; r < nrow; r++) {
     size_t offset = r * ncol;
     for (size_t c = 0; c < ncol; c++) {
@@ -659,11 +1140,11 @@
 
 template <>
 void AddRow<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                              const Block *A, const Block *v, Block *out,
+                              const Tensor& A, const Tensor& v, Tensor* out,
                               Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *APtr = static_cast<const float *>(A->data());
-  const float *vPtr = static_cast<const float *>(v->data());
+  const float *APtr = static_cast<const float *>(A.data());
+  const float *vPtr = static_cast<const float *>(v.data());
   for (size_t r = 0; r < nrow; r++) {
     size_t offset = r * ncol;
     for (size_t c = 0; c < ncol; c++) {
@@ -672,11 +1153,11 @@
   }
 }
 template <>
-void Outer<float, lang::Cpp>(const size_t m, const size_t n, const Block *in1,
-                             const Block *in2, Block *out, Context *ctx) {
+void Outer<float, lang::Cpp>(const size_t m, const size_t n, const Tensor& in1,
+                             const Tensor& in2, Tensor* out, Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *in1Ptr = static_cast<const float *>(in1->data());
-  const float *in2Ptr = static_cast<const float *>(in2->data());
+  const float *in1Ptr = static_cast<const float *>(in1.data());
+  const float *in2Ptr = static_cast<const float *>(in2.data());
   for (size_t r = 0; r < m; r++) {
     size_t offset = r * n;
     for (size_t c = 0; c < n; c++) {
@@ -686,9 +1167,9 @@
 }
 template <>
 void Softmax<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                               const Block *in, Block *out, Context *ctx) {
+                               const Tensor& in, Tensor* out, Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
+  const float *inPtr = static_cast<const float *>(in.data());
   float *bPtr = new float[ncol];
   for (size_t r = 0; r < nrow; r++) {
     size_t offset = r * ncol;
@@ -707,9 +1188,9 @@
 
 template <>
 void SumColumns<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                                  const Block *in, Block *out, Context *ctx) {
+                                  const Tensor& in, Tensor* out, Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
+  const float *inPtr = static_cast<const float *>(in.data());
   for (size_t c = 0; c < ncol; c++) {
     outPtr[c] = 0.f;
   }
@@ -723,9 +1204,9 @@
 
 template <>
 void SumRows<float, lang::Cpp>(const size_t nrow, const size_t ncol,
-                               const Block *in, Block *out, Context *ctx) {
+                               const Tensor& in, Tensor* out, Context *ctx) {
   float *outPtr = static_cast<float *>(out->mutable_data());
-  const float *inPtr = static_cast<const float *>(in->data());
+  const float *inPtr = static_cast<const float *>(in.data());
   for (size_t r = 0; r < nrow; r++) {
     size_t offset = r * ncol;
     outPtr[r] = 0.f;
diff --git a/src/core/tensor/tensor_math_cuda.h b/src/core/tensor/tensor_math_cuda.h
index 8a9e47a..b3ff100 100644
--- a/src/core/tensor/tensor_math_cuda.h
+++ b/src/core/tensor/tensor_math_cuda.h
@@ -16,276 +16,837 @@
  * limitations under the License.
  */
 
-#ifndef  SINGA_CORE_TENSOR_TENSOR_MATH_CUDA_H_
-#define  SINGA_CORE_TENSOR_TENSOR_MATH_CUDA_H_
+#ifndef SINGA_CORE_TENSOR_TENSOR_MATH_CUDA_H_
+#define SINGA_CORE_TENSOR_TENSOR_MATH_CUDA_H_
 #include "singa/singa_config.h"
 #ifdef USE_CUDA
-#include "./tensor_math.h"
-#include "./math_kernel.h"
-#include "singa/utils/cuda_utils.h"
-#include "singa/core/common.h"
-#include <cuda_runtime.h>
 #include <cublas_v2.h>
+#include <cuda_runtime.h>
+#include <cudnn.h>
+
+#include "./math_kernel.h"
+#include "./tensor_math.h"
+#include "singa/core/common.h"
+#include "singa/core/tensor.h"
 #include "singa/utils/cuda_utils.h"
 
+#define check_cudnn(expression)                          \
+  {                                                      \
+    cudnnStatus_t status = (expression);                 \
+    if (status != CUDNN_STATUS_SUCCESS) {                \
+      LOG(FATAL) << "Error on line " << __LINE__ << ": " \
+                 << cudnnGetErrorString(status) << " ";  \
+    }                                                    \
+  }
+
 namespace singa {
 
+// ===================== Helper Functions =============================
+
+/*
+cudnn requires tensor dimensions to fulfill 1 requirement:
+  1.) Dimensions to be set to a minimum of 4 for 4d and lower dimensional
+tensors
+      if input tensor is 5d, cudnn will take a 5d tensor as input. Beyond 5d,
+certain operations are not supported.
+      (cudnnOp supports up to 5d, cudnnReduce supports up to 8d)
+
+  for e.g. Tensor A has shape {3,3}, cudnn requires shape of {1,1,3,3} to be the
+input
+           Tensor B has shape (2,3,4), cudnn requires shape of {1,2,3,4} to be
+the input
+*/
+vector<int> generate_shape_cuda(const Tensor& x) {
+  Shape shape = x.shape();
+  // maximum dimension allowed defined in cudnn.h, variable CUDNN_DIM_MAX
+  // TODO: check other side effects
+  CHECK_LE(shape.size(), CUDNN_DIM_MAX)
+      << "Dimensions (shape) beyond " << CUDNN_DIM_MAX << " are currently not supported";
+  vector<int> shape_arr;
+  if (shape.size() < 4) {
+    for (int n = 0; n < 4 - int(shape.size()); ++n) {
+      shape_arr.push_back(1);
+    }
+  }
+  for (auto x : shape) {
+    shape_arr.push_back(static_cast<int>(x));
+  }
+  return shape_arr;
+}
+
+int generate_dim_cuda(const Tensor& x) {
+  // maximum dimension allowed defined in cudnn.h, variable CUDNN_DIM_MAX
+  CHECK_LE(x.nDim(), CUDNN_DIM_MAX)
+      << "Dimensions (shape) beyond " << CUDNN_DIM_MAX << " are currently not supported";
+  if (x.shape().size() <= 4) {
+    return 4;
+  } else {
+    return x.nDim();
+  }
+}
+
+/*
+  cudnn requires stride dimensions to conform to the format of the shape input
+  as well
+    1.) Stride dimensions to be set to a minimum of 4 for 4d and lower
+  dimensional tensors
+        If input tensor is 5d, cudnn will take a 5d tensor as input. Beyond 5d,
+  certain operations are not supported.
+        (cudnnOp supports up to 5d, cudnnReduce supports up to 8d)
+
+    for e.g. Tensor A has shape {3,3}, stride {3,1}, cudnn requires shape
+  {1,1,3,3}
+    and stride {9, 9, 3, 1} or {9, 9, 1, 3} to be the inputs
+  */
+vector<int> generate_strides_cuda(const Tensor& x) {
+  Shape shape = x.shape();
+  auto& strides = x.stride();
+  vector<int> strides_arr;
+  int product = Product(shape);
+  if (shape.size() < 4) {
+    for (int n = 0; n < 4 - int(shape.size()); ++n) {
+      strides_arr.push_back(product);
+    }
+  }
+  for (auto x : strides) strides_arr.push_back(static_cast<int>(x));
+  return strides_arr;
+}
+
+cudnnTensorDescriptor_t generate_tensor_nd_desc(const Tensor& x) {
+  cudnnTensorDescriptor_t x_desc;
+  check_cudnn(cudnnCreateTensorDescriptor(&x_desc));
+  // LOG(INFO) << vec2str(x.shape());
+  // LOG(INFO) << vec2str(x.stride());
+  auto st = x.stride();
+  std::vector<size_t> sh;
+  bool reshape = false;
+  for (size_t i = 0; i < st.size(); i++) {
+    if (st[i] == 0) {
+      sh.push_back(1);
+      reshape = true;
+    } else {
+      sh.push_back(x.shape(i));
+    }
+  }
+  auto y = x;
+  if (reshape) y = Reshape(x, sh);
+
+  auto shape = generate_shape_cuda(y);
+  auto stride = generate_strides_cuda(y);
+
+  // LOG(INFO) << vec2str(shape);
+  // LOG(INFO) << vec2str(stride);
+  // LOG(INFO) << "";
+  check_cudnn(cudnnSetTensorNdDescriptor(x_desc, CUDNN_DATA_FLOAT,
+                                         generate_dim_cuda(y), shape.data(),
+                                         stride.data()));
+
+  return x_desc;
+}
+
+cudnnOpTensorDescriptor_t generate_op_desc(cudnnOpTensorOp_t op) {
+  cudnnOpTensorDescriptor_t op_desc;
+  check_cudnn(cudnnCreateOpTensorDescriptor(&op_desc));
+  check_cudnn(cudnnSetOpTensorDescriptor(op_desc, op, CUDNN_DATA_FLOAT,
+                                         CUDNN_PROPAGATE_NAN));
+
+  return op_desc;
+}
+
+// ===================== CUDA Functions =============================
+
 /// out[i] = |in[i]|
 template <>
-void Abs<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                            Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::abs(num, inPtr, outPtr, ctx->stream);
-}
-/// out = in + x
-template <>
-void Add<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                            Block* out, Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::add(num, inPtr, x, outPtr, ctx->stream);
-}
-/// out = in1 + in2
-template <>
-void Add<float, lang::Cuda>(const size_t num, const Block* in1,
-                            const Block* in2, Block* out, Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::add(num, inPtr1, inPtr2, outPtr, ctx->stream);
-}
-/// Element-wise operation, clamp every element into [low, high]
-/// if x>high, then x=high; if x<low, then x=low.
-template <>
-void Clamp<float, lang::Cuda>(const size_t num, const float low,
-                              const float high, const Block* in, Block* out,
-                              Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::clamp(num, low, high, inPtr, outPtr, ctx->stream);
-}
-/// out = in1 / in2
-template <>
-void Div<float, lang::Cuda>(const size_t num, const Block* in1,
-                            const Block* in2, Block* out, Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::div(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void Abs<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  float alpha1 = 1.0;
+  float alpha2 = -1.0;
+  float beta = 0.0;
+  cudnnTensorDescriptor_t in_desc = generate_tensor_nd_desc(in);
+  check_cudnn(cudnnOpTensor(
+      ctx->cudnn_handle, generate_op_desc(CUDNN_OP_TENSOR_MAX),
+      (void*)(&alpha1), in_desc, inPtr, (void*)(&alpha2), in_desc, inPtr,
+      (void*)(&beta), generate_tensor_nd_desc(*out), outPtr));
+  cudnnDestroyTensorDescriptor(in_desc);
 }
 
 template <>
-void Div<float, lang::Cuda>(const size_t num, const float x, const Block* in,
-                            Block* out, Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::div(num, x, inPtr, outPtr, ctx->stream);
+void CastCopy<float, int, lang::Cuda>(const Tensor* src, Tensor* dst,
+                                      Context* ctx) {
+  const float* srcPtr = static_cast<const float*>(src->block()->data());
+  int* dstPtr = static_cast<int*>(dst->block()->mutable_data());
+  cuda::cast_float_2_int(dst->Size(), srcPtr, dstPtr, ctx->stream);
+}
+
+template <>
+void CastCopy<int, float, lang::Cuda>(const Tensor* src, Tensor* dst,
+                                      Context* ctx) {
+  const int* srcPtr = static_cast<const int*>(src->block()->data());
+  float* dstPtr = static_cast<float*>(dst->block()->mutable_data());
+  cuda::cast_int_2_float(dst->Size(), srcPtr, dstPtr, ctx->stream);
+}
+
+template <>
+void Set<float, lang::Cuda>(const float x, Tensor* out, Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  check_cudnn(cudnnSetTensor(ctx->cudnn_handle, generate_tensor_nd_desc(*out),
+                             outPtr, (void*)(&x)));
+}
+
+template <>
+void Add<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                            Context* ctx) {
+  Set<float, lang::Cuda>(x, out, ctx);
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  float alpha = 1.0, beta = 1.0;
+  check_cudnn(cudnnAddTensor(ctx->cudnn_handle, (void*)(&alpha),
+                             generate_tensor_nd_desc(in), inPtr, (void*)(&beta),
+                             generate_tensor_nd_desc(*out), outPtr));
+}
+
+template <typename T>
+void TraverseUnaryTransformImpl(const Tensor& in1, Tensor* in1Bc,
+                                Context* ctx) {
+  Tensor shape(Shape{in1.nDim()}, in1.device(), kInt);
+  Tensor stride(Shape{in1.nDim()}, in1.device(), kInt);
+  const vector<int> strideVec(in1.stride().begin(), in1.stride().end());
+  const vector<int> shapeVec(in1.shape().begin(), in1.shape().end());
+  shape.CopyDataFromHostPtr(shapeVec.data(), in1.nDim());
+  stride.CopyDataFromHostPtr(strideVec.data(), in1.nDim());
+  const int* shapePtr = static_cast<const int*>(shape.block()->data());
+  const int* stridePtr = static_cast<const int*>(stride.block()->data());
+
+  const T* inPtr1 = static_cast<const T*>(in1.block()->data());
+  T* inBcPtr1 = static_cast<T*>(in1Bc->block()->mutable_data());
+
+  const size_t n = Product(in1Bc->shape());
+
+  cuda::traverse_unary_transform(n, in1.nDim(), inPtr1, shapePtr, stridePtr,
+                                 inBcPtr1, ctx->stream);
+}
+template void TraverseUnaryTransformImpl<float>(const Tensor& in1,
+                                                Tensor* in1Bc, Context* ctx);
+
+template <>
+void Transform<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  if (in.broadcasted()) {
+    TraverseUnaryTransformImpl<float>(in, out, ctx);
+    return;
+  }
+
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  float alpha = 1.0;
+  float beta = 0.0;
+
+  check_cudnn(cudnnTransformTensor(
+      ctx->cudnn_handle, (void*)(&alpha), generate_tensor_nd_desc(in), inPtr,
+      (void*)(&beta), generate_tensor_nd_desc(*out), outPtr));
+}
+
+/// add sub div mul pow on two tensors
+#define GenBinaryMathFn(fn, kernel)                                       \
+  template <>                                                             \
+  void fn<float, lang::Cuda>(const Tensor& in1, const Tensor& in2,        \
+                             Tensor* out, Context* ctx) {                 \
+    const float* inPtr1 = static_cast<const float*>(in1.block()->data()); \
+    const float* inPtr2 = static_cast<const float*>(in2.block()->data()); \
+    float* outPtr = static_cast<float*>(out->block()->mutable_data());    \
+    const size_t num = out->Size();                                       \
+                                                                          \
+    if (!in1.broadcasted() && !in2.broadcasted()) {                       \
+      if (!in1.transpose() && !in2.transpose() &&                         \
+          (in1.stride() == in2.stride())) {                               \
+        kernel(num, inPtr1, inPtr2, outPtr, ctx->stream);                 \
+      } else {                                                            \
+        if (in1.transpose() && in2.transpose()) {                         \
+          Tensor t(in1.shape(), in1.device(), in1.data_type());           \
+          Transform<float, lang::Cuda>(in1, &t, ctx);                     \
+          Transform<float, lang::Cuda>(in2, out, ctx);                    \
+                                                                          \
+          float* tPtr = static_cast<float*>(t.block()->mutable_data());   \
+          kernel(num, tPtr, outPtr, outPtr, ctx->stream);                 \
+        } else if (in1.transpose()) {                                     \
+          Transform<float, lang::Cuda>(in1, out, ctx);                    \
+          kernel(num, outPtr, inPtr2, outPtr, ctx->stream);               \
+        } else if (in2.transpose()) {                                     \
+          Transform<float, lang::Cuda>(in2, out, ctx);                    \
+          kernel(num, inPtr1, outPtr, outPtr, ctx->stream);               \
+        }                                                                 \
+      }                                                                   \
+    } else {                                                              \
+      Tensor in1bc;                                                       \
+      Tensor in2bc;                                                       \
+      if (in1.broadcasted()) {                                            \
+        in1bc = Tensor(in1.shape(), in1.device(), in1.data_type());       \
+        Transform<float, lang::Cuda>(in1, &in1bc, ctx);                   \
+        inPtr1 = static_cast<const float*>(in1bc.block()->data());        \
+      }                                                                   \
+                                                                          \
+      if (in2.broadcasted()) {                                            \
+        in2bc = Tensor(in2.shape(), in2.device(), in2.data_type());       \
+        Transform<float, lang::Cuda>(in2, &in2bc, ctx);                   \
+        inPtr2 = static_cast<const float*>(in2bc.block()->data());        \
+      }                                                                   \
+                                                                          \
+      kernel(num, inPtr1, inPtr2, outPtr, ctx->stream);                   \
+    }                                                                     \
+  }
+
+/// out = in1 * in2
+GenBinaryMathFn(EltwiseMult, cuda::mult);
+/// out = in1 + in2
+GenBinaryMathFn(Add, cuda::add);
+/// out = in1 - in2
+GenBinaryMathFn(Sub, cuda::sub);
+/// out = in1 / in2
+GenBinaryMathFn(Div, cuda::div);
+/// out = in1 ^ in2
+GenBinaryMathFn(Pow, cuda::pow);
+
+/// Element-wise operation, clamp every element into [low, high]
+/// if x>high, then x=high; if x<low, then x=low.
+template <>
+void Clamp<float, lang::Cuda>(const float low, const float high,
+                              const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+  // if both in and out strides are the same, we proceed to normal cuda::clamp
+  if (in.stride() == out->stride()) {
+    cuda::clamp(num, low, high, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::clamp(num, low, high, outPtr, outPtr, ctx->stream);
+  }
+}
+
+template <>
+void Div<float, lang::Cuda>(const float x, const Tensor& in, Tensor* out,
+                            Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::div(num, x, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::div(num, x, outPtr, outPtr, ctx->stream);
+  }
 }
 
 /// out = in * x
 template <>
-void EltwiseMult<float, lang::Cuda>(const size_t num, const Block* in,
-                                    const float x, Block* out, Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
+void EltwiseMult<float, lang::Cuda>(const Tensor& in, const float x,
+                                    Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
   cuda::mult(num, inPtr, x, outPtr, ctx->stream);
 }
-/// out = in1 * in2
-template <>
-void EltwiseMult<float, lang::Cuda>(const size_t num, const Block* in1,
-                                    const Block* in2, Block* out,
-                                    Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::mult(num, inPtr1, inPtr2, outPtr, ctx->stream);
-}
+
 /// Base is e. out[i]=e^in[i]
 template <>
-void Exp<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                            Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::exp(num, inPtr, outPtr, ctx->stream);
+void Exp<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::exp(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::exp(num, outPtr, outPtr, ctx->stream);
+  }
 }
 
 template <>
-void GE<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr = static_cast<const float*>(in->data());
-  cuda::ge(num, inPtr, x, outPtr, ctx->stream);
-}
-template <>
-void GE<float, lang::Cuda>(const size_t num, const Block* in1, const Block* in2,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  cuda::ge(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void Erf<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::erf(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::erf(num, outPtr, outPtr, ctx->stream);
+  }
 }
 
+template <>
+void Ceil<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::ceil2(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::ceil2(num, outPtr, outPtr, ctx->stream);
+  }
+}
 
 template <>
-void GT<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr = static_cast<const float*>(in->data());
-  cuda::gt(num, inPtr, x, outPtr, ctx->stream);
+void Floor<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::floor(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::floor(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+template <>
+void Round<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::round(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::round(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+template <>
+void RoundE<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::rounde(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::rounde(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+template <>
+void GE<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                           Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::ge(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::ge(num, outPtr, x, outPtr, ctx->stream);
+  }
 }
 template <>
-void GT<float, lang::Cuda>(const size_t num, const Block* in1, const Block* in2,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  cuda::gt(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void GE<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                           Context* ctx) {
+  Sub<float, lang::Cuda>(in1, in2, out, ctx);
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::ge(num, outPtr, 0.0, outPtr, ctx->stream);
+}
+
+template <>
+void GT<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                           Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::gt(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::gt(num, outPtr, x, outPtr, ctx->stream);
+  }
 }
 template <>
-void LE<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr = static_cast<const float*>(in->data());
-  cuda::le(num, inPtr, x, outPtr, ctx->stream);
+void GT<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                           Context* ctx) {
+  Sub<float, lang::Cuda>(in1, in2, out, ctx);
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::gt(num, outPtr, 0.0, outPtr, ctx->stream);
+}
+
+template <>
+void LE<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                           Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::le(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::le(num, outPtr, x, outPtr, ctx->stream);
+  }
 }
 template <>
-void LE<float, lang::Cuda>(const size_t num, const Block* in1, const Block* in2,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  cuda::le(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void LE<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                           Context* ctx) {
+  Sub<float, lang::Cuda>(in1, in2, out, ctx);
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::le(num, outPtr, 0.0, outPtr, ctx->stream);
+}
+
+template <>
+void EQ<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                           Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::eq(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::eq(num, outPtr, x, outPtr, ctx->stream);
+  }
+}
+template <>
+void EQ<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                           Context* ctx) {
+  Sub<float, lang::Cuda>(in1, in2, out, ctx);
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::eq(num, outPtr, 0.0, outPtr, ctx->stream);
 }
 
 /// Natual logarithm, the base is e, Neper number out[i]=ln(in[i]).
 template <>
-void Log<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                            Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::log(num, inPtr, outPtr, ctx->stream);
+void Log<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::log(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::log(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+template <>
+void LT<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                           Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::lt(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::lt(num, outPtr, x, outPtr, ctx->stream);
+  }
 }
 template <>
-void LT<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr = static_cast<const float*>(in->data());
-  cuda::lt(num, inPtr, x, outPtr, ctx->stream);
+void LT<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                           Context* ctx) {
+  Sub<float, lang::Cuda>(in1, in2, out, ctx);
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::lt(num, outPtr, 0.0, outPtr, ctx->stream);
 }
-template <>
-void LT<float, lang::Cuda>(const size_t num, const Block* in1, const Block* in2,
-                           Block* out, Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  cuda::lt(num, inPtr1, inPtr2, outPtr, ctx->stream);
-}
+
 /// Element-wise operation, out[i] = in[i]^x
 template <>
-void Pow<float, lang::Cuda>(const size_t num, const Block* in, const float x,
-                            Block* out, Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::pow(num, inPtr, x, outPtr, ctx->stream);
+void Pow<float, lang::Cuda>(const Tensor& in, const float x, Tensor* out,
+                            Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::pow(num, inPtr, x, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::pow(num, outPtr, x, outPtr, ctx->stream);
+  }
 }
-/// Element-wise operation, out[i] = in1[i]^in2[i]
+
 template <>
-void Pow<float, lang::Cuda>(const size_t num, const Block* in1,
-                            const Block* in2, Block* out, Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::pow(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void ReLUBackward<float, lang::Cuda>(const Tensor& in1, const Tensor& in2,
+                                     Tensor* out, Context* ctx) {
+  const float* in1Ptr = static_cast<const float*>(in1.block()->data());
+  const float* in2Ptr = static_cast<const float*>(in2.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in1.Size();
+  cuda::relubackward(num, in1Ptr, in2Ptr, outPtr, ctx->stream);
 }
 
 /// Element-wise operation, out[i]=max(0, in[i])
+// template <>
+// void ReLU<float, lang::Cuda>(const Tensor& in, Tensor* out,
+//                              Context* ctx) {
+//   const float* inPtr = static_cast<const float*>(in.block()->data());
+//   float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+//   cudnnActivationDescriptor_t act_desc;
+//   cudnnActivationMode_t mode = CUDNN_ACTIVATION_RELU;
+//   cudnnNanPropagation_t cudnn_propagation = CUDNN_PROPAGATE_NAN;
+//   double coef = 0.0; //only used for CLIPPED_RELU or ELU
+//   cudnnCreateActivationDescriptor(&act_desc);
+//   cudnnSetActivationDescriptor(act_desc, mode, cudnn_propagation, coef);
+
+//   float alpha[1] = {1.0};
+//   float beta[1] = {0.0};
+//   cudnnDataType_t cudnn_dtype = CUDNN_DATA_FLOAT;
+//   cudnnTensorDescriptor_t in_desc, out_desc;
+//   cudnnCreateTensorDescriptor(&in_desc);
+//   cudnnCreateTensorDescriptor(&out_desc);
+//   cudnnSetTensorNdDescriptor(in_desc, cudnn_dtype, in.generate_dim_cuda(),
+// in.generate_shape_cuda().data(), in.generate_strides_cuda().data());
+//   cudnnSetTensorNdDescriptor(out_desc, cudnn_dtype, out->generate_dim_cuda(),
+// out->generate_shape_cuda().data(), out->generate_strides_cuda().data());
+//   cudnnActivationForward(ctx->cudnn_handle, act_desc, (void*)(&alpha),
+//   in_desc, inPtr,
+//                         (void*)(&beta), out_desc, outPtr);
+
+//   cudnnDestroyTensorDescriptor(in_desc);
+//   cudnnDestroyTensorDescriptor(out_desc);
+//   cudnnDestroyActivationDescriptor(act_desc);
+// }
+
 template <>
-void ReLU<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::relu(num, inPtr, outPtr, ctx->stream);
+void ReLU<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::relu(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::relu(num, outPtr, outPtr, ctx->stream);
+  }
 }
 
-/// out[i] = x
-template <>
-void Set<float, lang::Cuda>(const size_t num, const float x, Block* out,
-                            Context* ctx) {
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::set(num, x, outPtr, ctx->stream);
-}
+// /// Element-wise operation, out[i]=sigmoid([in[i])
+// template <>
+// void Sigmoid<float, lang::Cuda>(const Tensor& in, Tensor* out,
+//                                 Context* ctx) {
+//   const float* inPtr = static_cast<const float*>(in.block()->data());
+//   float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+//   cudnnActivationDescriptor_t act_desc;
+//   cudnnActivationMode_t mode = CUDNN_ACTIVATION_SIGMOID;
+//   cudnnNanPropagation_t cudnn_propagation = CUDNN_PROPAGATE_NAN;
+//   double coef = 0.0; //only used for CLIPPED_RELU or ELU
+//   cudnnCreateActivationDescriptor(&act_desc);
+//   cudnnSetActivationDescriptor(act_desc, mode, cudnn_propagation, coef);
+
+//   float alpha[1] = {1.0};
+//   float beta[1] = {0.0};
+//   cudnnDataType_t cudnn_dtype = CUDNN_DATA_FLOAT;
+//   cudnnTensorDescriptor_t in_desc, out_desc;
+//   cudnnCreateTensorDescriptor(&in_desc);
+//   cudnnCreateTensorDescriptor(&out_desc);
+//   cudnnSetTensorNdDescriptor(in_desc, cudnn_dtype, in.generate_dim_cuda(),
+// in.generate_shape_cuda().data(), in.generate_strides_cuda().data());
+//   cudnnSetTensorNdDescriptor(out_desc, cudnn_dtype, out->generate_dim_cuda(),
+// out->generate_shape_cuda().data(), out->generate_strides_cuda().data());
+//   cudnnActivationForward(ctx->cudnn_handle, act_desc, (void*)(&alpha),
+//   in_desc, inPtr,
+//                         (void*)(&beta), out_desc, outPtr);
+
+//   cudnnDestroyTensorDescriptor(in_desc);
+//   cudnnDestroyTensorDescriptor(out_desc);
+//   cudnnDestroyActivationDescriptor(act_desc);
+// }
+
 /// Element-wise operation, out[i]=sigmoid([in[i])
 template <>
-void Sigmoid<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                                Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::sigmoid(num, inPtr, outPtr, ctx->stream);
-}
-// out[i] = sign(in[i])
-template <>
-void Sign<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::sign(num, inPtr, outPtr, ctx->stream);
+void Sigmoid<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::sigmoid(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::sigmoid(num, outPtr, outPtr, ctx->stream);
+  }
 }
 
-/// Element-wise operation, out[i]=sqrt([in[i])
+// out[i] = sign(in[i])
 template <>
-void Sqrt<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::sqrt(num, inPtr, outPtr, ctx->stream);
+void Sign<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::sign(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::sign(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+// out[i] = softplus(in[i])
+template <>
+void SoftPlus<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::softplus(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::softplus(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+// out[i] = softsign(in[i])
+template <>
+void SoftSign<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::softsign(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::softsign(num, outPtr, outPtr, ctx->stream);
+  }
+}
+
+// Element-wise operation, out[i]=sqrt([in[i])
+template <>
+void Sqrt<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+#if CUDNN_MAJOR < 7
+  Transform<float, lang::Cuda>(in, out, ctx);
+  size_t num = in.Size();
+  cuda::sqrt(num, outPtr, outPtr, ctx->stream);
+#else
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float alpha1 = 1.0;
+  float alpha2 = 0.0;
+  float beta = 0.0;
+  cudnnTensorDescriptor_t in_desc = generate_tensor_nd_desc(in);
+  check_cudnn(cudnnOpTensor(
+      ctx->cudnn_handle, generate_op_desc(CUDNN_OP_TENSOR_SQRT),
+      (void*)(&alpha1), in_desc, inPtr, (void*)(&alpha2), in_desc, inPtr,
+      (void*)(&beta), generate_tensor_nd_desc(*out), outPtr));
+#endif  // CUDNN_MAJOR < 7
 }
 
 /// Element-wise operation, out[i]=in[i]^2
 template <>
-void Square<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                               Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::square(num, inPtr, outPtr, ctx->stream);
-}
-/// out = in1 - in2
-template <>
-void Sub<float, lang::Cuda>(const size_t num, const Block* in1,
-                            const Block* in2, Block* out, Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::sub(num, inPtr1, inPtr2, outPtr, ctx->stream);
+void Square<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = in.Size();
+
+  if (in.stride() == out->stride()) {
+    cuda::square(num, inPtr, outPtr, ctx->stream);
+  } else {  // else we transform in to out to store first
+    Transform<float, lang::Cuda>(in, out, ctx);
+    cuda::square(num, outPtr, outPtr, ctx->stream);
+  }
 }
 
-/// sum all elements of input into out
-template <>
-void Sum<float, lang::Cuda>(const size_t num, const Block* in, float* out,
-                            Context* ctx) {
-  LOG(FATAL) << "Cuda Sum is not implemented!";
-  // const float* inPtr = static_cast<const float*>(in->data());
-  // cuda::sum(num, inPtr, out, ctx->stream);
-}
+// template <>
+// void Sum<float, lang::Cuda>(const size_t num, const Block* in, float* out,
+//                             Context* ctx) {
+//   LOG(FATAL) << "Cuda Sum is not implemented!";
+//   // const float* inPtr = static_cast<const float*>(in.data());
+//   // cuda::sum(num, inPtr, out, ctx->stream);
+// }
 
 /// Element-wise operation, out[i]=tanh([in[i])
-template <>
-void Tanh<float, lang::Cuda>(const size_t num, const Block* in, Block* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::tanh(num, inPtr, outPtr, ctx->stream);
-}
+// template <>
+// void Tanh<float, lang::Cuda>(const Tensor& in, Tensor* out,
+//                              Context* ctx) {
+//   const float* inPtr = static_cast<const float*>(in.block()->data());
+//   float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+//   cudnnActivationDescriptor_t act_desc;
+//   cudnnActivationMode_t mode = CUDNN_ACTIVATION_TANH;
+//   cudnnNanPropagation_t cudnn_propagation = CUDNN_PROPAGATE_NAN;
+//   double coef = 0.0; //only used for CLIPPED_RELU or ELU
+//   cudnnCreateActivationDescriptor(&act_desc);
+//   cudnnSetActivationDescriptor(act_desc, mode, cudnn_propagation, coef);
+
+//   float alpha[1] = {1.0};
+//   float beta[1] = {0.0};
+//   cudnnDataType_t cudnn_dtype = CUDNN_DATA_FLOAT;
+//   cudnnTensorDescriptor_t in_desc, out_desc;
+//   cudnnCreateTensorDescriptor(&in_desc);
+//   cudnnCreateTensorDescriptor(&out_desc);
+//   cudnnSetTensorNdDescriptor(in_desc, cudnn_dtype, in.generate_dim_cuda(),
+// in.generate_shape_cuda().data(), in.generate_strides_cuda().data());
+//   cudnnSetTensorNdDescriptor(out_desc, cudnn_dtype, out->generate_dim_cuda(),
+// out->generate_shape_cuda().data(), out->generate_strides_cuda().data());
+//   cudnnActivationForward(ctx->cudnn_handle, act_desc, (void*)(&alpha),
+//   in_desc, inPtr,
+//                         (void*)(&beta), out_desc, outPtr);
+
+//   cudnnDestroyTensorDescriptor(in_desc);
+//   cudnnDestroyTensorDescriptor(out_desc);
+//   cudnnDestroyActivationDescriptor(act_desc);
+// }
+
+#define GenUnaryTensorCudaFn(fn, cudafn)                                    \
+  template <>                                                               \
+  void fn<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) { \
+    const float* inPtr = static_cast<const float*>(in.block()->data());     \
+    float* outPtr = static_cast<float*>(out->block()->mutable_data());      \
+    const size_t num = in.Size();                                           \
+    if (in.stride() == out->stride()) {                                     \
+      cuda::cudafn(num, inPtr, outPtr, ctx->stream);                        \
+    } else {                                                                \
+      Transform<float, lang::Cuda>(in, out, ctx);                           \
+      cuda::cudafn(num, outPtr, outPtr, ctx->stream);                       \
+    }                                                                       \
+  }
+
+GenUnaryTensorCudaFn(Cos, cos);
+GenUnaryTensorCudaFn(Cosh, cosh);
+GenUnaryTensorCudaFn(Acos, acos);
+GenUnaryTensorCudaFn(Acosh, acosh);
+GenUnaryTensorCudaFn(Sin, sin);
+GenUnaryTensorCudaFn(Sinh, sinh);
+GenUnaryTensorCudaFn(Asin, asin);
+GenUnaryTensorCudaFn(Asinh, asinh);
+GenUnaryTensorCudaFn(Tan, tan);
+GenUnaryTensorCudaFn(Tanh, tanh);
+GenUnaryTensorCudaFn(Atan, atan);
+GenUnaryTensorCudaFn(Atanh, atanh);
 
 // ================Random functions===========================================
 /// Each element of out would be 1 with prob p and 0 with 1-p. 0<= p <= 1
 // Get the random generator from 'ctx'
 // If DType is not float, then convert the threshold to DType
 template <>
-void Bernoulli<float, lang::Cuda>(const size_t num, const float p, Block* out,
-                                  Context* ctx) {
+void Bernoulli<float, lang::Cuda>(const float p, Tensor* out, Context* ctx) {
   auto rgen = ctx->curand_generator;
-  float* outPtr = static_cast<float*>(out->mutable_data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = out->Size();
   CURAND_CHECK(curandGenerateUniform(rgen, outPtr, num));
   cuda::threshold(num, p, outPtr, outPtr, ctx->stream);
 }
@@ -293,10 +854,11 @@
 // The random generator should be extracted from ctx.
 // If DType is not float, then convert the low and high to DType
 template <>
-void Uniform<float, lang::Cuda>(const size_t num, const float low,
-                                const float high, Block* out, Context* ctx) {
+void Uniform<float, lang::Cuda>(const float low, const float high, Tensor* out,
+                                Context* ctx) {
   auto rgen = ctx->curand_generator;
-  float* outPtr = static_cast<float*>(out->mutable_data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = out->Size();
   CURAND_CHECK(curandGenerateUniform(rgen, outPtr, num));
   cuda::mult(num, outPtr, high - low, outPtr, ctx->stream);
   cuda::add(num, outPtr, low, outPtr, ctx->stream);
@@ -305,88 +867,114 @@
 // The random generator should be extracted from ctx.
 // If DType is not float, then convert the mean and delta to DType
 template <>
-void Gaussian<float, lang::Cuda>(const size_t num, const float mean,
-                                 const float std, Block* out, Context* ctx) {
+void Gaussian<float, lang::Cuda>(const float mean, const float std, Tensor* out,
+                                 Context* ctx) {
   auto rgen = ctx->curand_generator;
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  CURAND_CHECK(curandGenerateNormal(rgen, outPtr, num, mean, std));
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = out->Size();
+
+  // CURAND_STATUS_LENGTH_NOT_MULTIPLE
+  if (num % 2 != 0) {
+    Tensor tmp(Shape{num + 1}, out->device());
+    float* outPtr_tmp = static_cast<float*>(tmp.block()->mutable_data());
+    CURAND_CHECK(curandGenerateNormal(rgen, outPtr_tmp, num + 1, mean, std));
+    CopyDataToFrom(out, tmp, num, 0, 0);
+  } else {
+    CURAND_CHECK(curandGenerateNormal(rgen, outPtr, num, mean, std));
+  }
 }
 
 // =========================Blas operations==================================
 // ref to http://docs.nvidia.com/cuda/cublas
 template <>
-void Amax<float, lang::Cuda>(const size_t num, const Block* in, size_t* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
+void Amax<float, lang::Cuda>(const Tensor& in, size_t* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
   int idx = 1;
+  const size_t num = in.Size();
   CUBLAS_CHECK(cublasIsamax(handle, num, inPtr, 1, &idx));
   *out = idx - 1;  // cublas index starts from 1
 }
 
 /// return the index of the element with the min value.
 template <>
-void Amin<float, lang::Cuda>(const size_t num, const Block* in, size_t* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
+void Amin<float, lang::Cuda>(const Tensor& in, size_t* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
   int idx = 1;
+  const size_t num = in.Size();
   CUBLAS_CHECK(cublasIsamin(handle, num, inPtr, 1, &idx));
   *out = idx - 1;
 }
 
 /// out = sum |x| for all x in in
 template <>
-void Asum<float, lang::Cuda>(const size_t num, const Block* in, float* out,
-                             Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
+void Asum<float, lang::Cuda>(const Tensor& in, float* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
+  const size_t num = in.Size();
   CUBLAS_CHECK(cublasSasum(handle, num, inPtr, 1, out));
 }
 
 /// out = alpha * in + out
 template <>
-void Axpy<float, lang::Cuda>(const size_t num, const float alpha,
-                             const Block* in, Block* out, Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
+void Axpy<float, lang::Cuda>(const float alpha, const Tensor& in, Tensor* out,
+                             Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
+  const size_t num = in.Size();
   CUBLAS_CHECK(cublasSaxpy(handle, num, &alpha, inPtr, 1, outPtr, 1));
 }
 
 /// out = \sum_i in1[i] * in2[i]
 template <>
-void Dot<float, lang::Cuda>(const size_t num, const Block* in1,
-                            const Block* in2, float* out, Context* ctx) {
-  const float* inPtr1 = static_cast<const float*>(in1->data());
-  const float* inPtr2 = static_cast<const float*>(in2->data());
+void Dot<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, float* out,
+                            Context* ctx) {
+  const float* inPtr1 = static_cast<const float*>(in1.block()->data());
+  const float* inPtr2 = static_cast<const float*>(in2.block()->data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
+  const size_t num = in1.Size();
   CUBLAS_CHECK(cublasSdot(handle, num, inPtr1, 1, inPtr2, 1, out));
 }
 template <>
-void Nrm2<float, lang::Cuda>(const size_t num, const Block* in, float* out,
-                             Context* ctx) {
+void Dot<float, lang::Cuda>(const Tensor& in1, const Tensor& in2, Tensor* out,
+                            Context* ctx) {
+  const float* inPtr1 = static_cast<const float*>(in1.block()->data());
+  const float* inPtr2 = static_cast<const float*>(in2.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  auto handle = ctx->cublas_handle;
+  const size_t num = in1.Size();
+  CUBLAS_CHECK(cublasSetPointerMode(handle, CUBLAS_POINTER_MODE_DEVICE));
+  CUBLAS_CHECK(cublasSdot(handle, num, inPtr1, 1, inPtr2, 1, outPtr));
+  CUBLAS_CHECK(cublasSetPointerMode(handle, CUBLAS_POINTER_MODE_HOST));
+}
+
+template <>
+void Nrm2<float, lang::Cuda>(const Tensor& in, float* out, Context* ctx) {
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
-  const float* inPtr = static_cast<const float*>(in->data());
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const size_t num = in.Size();
   cublasSnrm2(handle, num, inPtr, 1, out);
 }
 template <>
-void Scale<float, lang::Cuda>(const size_t num, const float x, Block* out,
-                              Context* ctx) {
+void Scale<float, lang::Cuda>(const float x, Tensor* out, Context* ctx) {
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
-  float* outPtr = static_cast<float*>(out->mutable_data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t num = out->Size();
   CUBLAS_CHECK(cublasSscal(handle, num, &x, outPtr, 1));
 }
 // NOTE: cublas uses column major order.
 // http://peterwittek.com/cublas-matrix-c-style.html
 template <>
-void DGMM<float, lang::Cuda>(const bool side_right, const size_t nrow,
-                             const size_t ncol, const Block* M, const Block* v,
-                             Block* out, Context* ctx) {
+void DGMM<float, lang::Cuda>(const bool side_right, const Tensor& M,
+                             const Tensor& v, Tensor* out, Context* ctx) {
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
-  const float* MPtr = static_cast<const float*>(M->data());
-  const float* vPtr = static_cast<const float*>(v->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
+  const float* MPtr = static_cast<const float*>(M.block()->data());
+  const float* vPtr = static_cast<const float*>(v.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t nrow = M.shape(0);
+  const size_t ncol = M.shape(1);
   if (side_right) {
     CUBLAS_CHECK(cublasSdgmm(handle, CUBLAS_SIDE_LEFT, ncol, nrow, MPtr, ncol,
                              vPtr, 1, outPtr, ncol));
@@ -396,14 +984,17 @@
   }
 }
 template <>
-void GEMV<float, lang::Cuda>(bool trans, const size_t m, const size_t n,
-                             const float alpha, const Block* A, const Block* v,
-                             const float beta, Block* out, Context* ctx) {
-  const float* APtr = static_cast<const float*>(A->data());
-  const float* vPtr = static_cast<const float*>(v->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
+void GEMV<float, lang::Cuda>(const float alpha, const Tensor& A,
+                             const Tensor& v, const float beta, Tensor* out,
+                             Context* ctx) {
+  const float* APtr = static_cast<const float*>(A.block()->data());
+  const float* vPtr = static_cast<const float*>(v.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t m = A.shape()[0];
+  const size_t n = A.shape()[1];
+
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
-  if (!trans)
+  if (!(A.transpose()))
     CUBLAS_CHECK(cublasSgemv(handle, CUBLAS_OP_T, n, m, &alpha, APtr, n, vPtr,
                              1, &beta, outPtr, 1));
   else
@@ -413,24 +1004,142 @@
 
 // http://docs.nvidia.com/cuda/cublas/#cublas-lt-t-gt-gemm
 template <>
-void GEMM<float, lang::Cuda>(const bool transA, const bool transB,
-                             const size_t nrowA, const size_t ncolB,
-                             const size_t ncolA, const float alpha,
-                             const Block* A, const Block* B, const float beta,
-                             Block* C, Context* ctx) {
+void GEMM<float, lang::Cuda>(const float alpha, const Tensor& A,
+                             const Tensor& B, const float beta, Tensor* C,
+                             Context* ctx) {
+  auto transA = A.transpose();
   auto transa = transA ? CUBLAS_OP_T : CUBLAS_OP_N;
+  auto transB = B.transpose();
   auto transb = transB ? CUBLAS_OP_T : CUBLAS_OP_N;
+  const size_t nrowA = A.shape()[0];
+  const size_t ncolA = A.shape()[1];
+  const size_t ncolB = B.shape()[1];
   int lda = transA ? nrowA : ncolA;
   int ldb = transB ? ncolA : ncolB;
   int ldc = ncolB;
-  const float* APtr = static_cast<const float*>(A->data());
-  const float* BPtr = static_cast<const float*>(B->data());
-  float* CPtr = static_cast<float*>(C->mutable_data());
+  const float* APtr = static_cast<const float*>(A.block()->data());
+  const float* BPtr = static_cast<const float*>(B.block()->data());
+  float* CPtr = static_cast<float*>(C->block()->mutable_data());
   auto handle = ctx->cublas_handle;  // TODO(wangwei) set cudastream
   CUBLAS_CHECK(cublasSgemm(handle, transb, transa, ncolB, nrowA, ncolA, &alpha,
                            BPtr, ldb, APtr, lda, &beta, CPtr, ldc));
 }
 
+/* pseudocode for GEMM Strided Batched:
+ * for (int p = 0; p < batchCount; ++p) {
+ *   for (int m = 0; m < M; ++m) {
+ *     for (int n = 0; n < N; ++n) {
+ *       T c_mnp = 0;
+ *       for (int k = 0; k < K, ++k)
+ *         c_mnp += A[m + k*ldA + p*strideA] * B[k + n*ldB + p*strideB];
+ *       C[m + n*ldC + p*strideC] =
+ *         (*alpha)*c_mnp + (*beta)*C[m + n*ldC + p*strideC];
+ *     }
+ *   }
+ * }
+ */
+template <>
+void GEMMBatched<float, lang::Cuda>(const float alpha, const Tensor& A,
+                                    const Tensor& B, const float beta,
+                                    Tensor* C, Context* ctx) {
+  auto handle = ctx->cublas_handle;
+
+  auto transA = A.transpose();
+  auto transa = transA ? CUBLAS_OP_T : CUBLAS_OP_N;
+  auto transB = B.transpose();
+  auto transb = transB ? CUBLAS_OP_T : CUBLAS_OP_N;
+
+  const size_t ncolB = B.shape().end()[-1];
+  const size_t nrowA = A.shape().end()[-2];
+  const size_t ncolA = A.shape().end()[-1];
+
+  size_t batchCount = A.shape()[0];
+  if (A.nDim() == 4u) batchCount *= A.shape()[1];
+
+  const size_t strideA = A.shape().end()[-1] * A.shape().end()[-2];
+  const size_t strideB = B.shape().end()[-1] * B.shape().end()[-2];
+  const size_t strideC = C->shape().end()[-1] * C->shape().end()[-2];
+
+  int lda = transA ? nrowA : ncolA;
+  int ldb = transB ? ncolA : ncolB;
+  int ldc = ncolB;
+
+  const float* APtr = static_cast<const float*>(A.block()->data());
+  const float* BPtr = static_cast<const float*>(B.block()->data());
+  float* CPtr = static_cast<float*>(C->block()->mutable_data());
+  CUBLAS_CHECK(cublasSgemmStridedBatched(
+      handle, transa, transb, ncolB, nrowA, ncolA, &alpha, BPtr, ldb, strideB,
+      APtr, lda, strideA, &beta, CPtr, ldc, strideC, batchCount));
+}
+
+template <>
+void SoftMax<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  cudnnSoftmaxAlgorithm_t algorithm = CUDNN_SOFTMAX_ACCURATE;
+  cudnnSoftmaxMode_t mode = CUDNN_SOFTMAX_MODE_INSTANCE;
+
+  /*
+   * tensor tmp is for generating cudnn descriptor
+   *   as for cudnn softmax, it required shape of {N, C, 1, 1}
+   *   while helper func `generate_shape_cuda` generate shape of {1, 1, N, C}
+   *   Thus this part serve similar purpose as `generate_shape_cuda` but in
+   * reverse manner
+   */
+  CHECK_LE(in.shape().size(), 5)
+      << "Dimensions (shape) beyond 5 are currently not supported";
+  auto tmp = in;
+  while (tmp.shape().size() < 4) {
+    auto s = tmp.shape();
+    s.push_back(1);
+    tmp.Reshape(s);
+  }
+
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  float alpha = 1.0;
+  float beta = 0.0;
+
+  check_cudnn(cudnnSoftmaxForward(ctx->cudnn_handle, algorithm, mode,
+                                  (void*)(&alpha), generate_tensor_nd_desc(tmp),
+                                  inPtr, (void*)(&beta),
+                                  generate_tensor_nd_desc(tmp), outPtr));
+}
+
+template <>
+void SoftMaxBackward<float, lang::Cuda>(const Tensor& in, Tensor* out,
+                                        const Tensor& fdout, Context* ctx) {
+  cudnnSoftmaxAlgorithm_t algorithm = CUDNN_SOFTMAX_ACCURATE;
+  cudnnSoftmaxMode_t mode = CUDNN_SOFTMAX_MODE_INSTANCE;
+
+  /*
+   * tensor tmp is for generating cudnn descriptor
+   *   as for cudnn softmax, it required shape of {N, C, 1, 1}
+   *   while helper func `generate_shape_cuda` generate shape of {1, 1, N, C}
+   *   Thus this part serve similar purpose as `generate_shape_cuda` but in
+   * reverse manner
+   */
+  CHECK_LE(in.shape().size(), 5)
+      << "Dimensions (shape) beyond 5 are currently not supported";
+  auto tmp = in;
+  while (tmp.shape().size() < 4) {
+    auto s = tmp.shape();
+    s.push_back(1);
+    tmp.Reshape(s);
+  }
+
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  const float* fdoutPtr = static_cast<const float*>(fdout.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+
+  float alpha = 1.0;
+  float beta = 0.0;
+
+  check_cudnn(cudnnSoftmaxBackward(
+      ctx->cudnn_handle, algorithm, mode, (void*)(&alpha),
+      generate_tensor_nd_desc(tmp), fdoutPtr, generate_tensor_nd_desc(tmp),
+      inPtr, (void*)(&beta), generate_tensor_nd_desc(tmp), outPtr));
+}
+
 template <>
 void ComputeCrossEntropy<float, lang::Cuda>(bool int_target,
                                             const size_t batchsize,
@@ -441,7 +1150,7 @@
   const int* tPtr = static_cast<const int*>(t->data());
   float* lossPtr = static_cast<float*>(loss->mutable_data());
   cuda::ComputeCrossEntropy(int_target, batchsize, dim, pPtr, tPtr, lossPtr,
-      ctx->stream);
+                            ctx->stream);
 }
 template <>
 void SoftmaxCrossEntropyBwd<float, lang::Cuda>(bool int_target,
@@ -457,14 +1166,144 @@
                                ctx->stream);
 }
 
+// template <>
+// void RowMax<float, lang::Cuda>(const Tensor& in, Tensor* out,
+//                                Context* ctx) {
+//   const float* inPtr = static_cast<const float*>(in.block()->data());
+//   float* outPtr = static_cast<float*>(out->block()->mutable_data());
+//   // const size_t nrow = in.shape()[0];
+//   // const size_t ncol = in.shape()[1];
+//   // cuda::RowMax(nrow, ncol, inPtr, outPtr, ctx->stream);
+
+//   //vector<int> reduce_row_axes_shape = in.generate_shape_cuda();
+//   //reduce_row_axes_shape.back() = 1; //reduce axis 1, so we set last element
+//   d in shape {a,b,c,d} to 1
+
+//   vector<int> reduce_row_axes_shape = {1,1,1,1};
+//   vector<int> reduced_strides = {1,1,1,1};
+
+//   //reduce_desc
+//   cudnnReduceTensorDescriptor_t reduce_desc;
+//   cudnnReduceTensorOp_t reduce_op = CUDNN_REDUCE_TENSOR_ADD;
+//   cudnnDataType_t cudnn_dtype = CUDNN_DATA_FLOAT;
+//   cudnnNanPropagation_t cudnn_propagation = CUDNN_PROPAGATE_NAN;
+//   cudnnReduceTensorIndices_t cudnn_indices = CUDNN_REDUCE_TENSOR_NO_INDICES;
+//   //cudnnReduceTensorIndices_t cudnn_indices =
+//   CUDNN_REDUCE_TENSOR_FLATTENED_INDICES;
+//   cudnnIndicesType_t cudnn_indices_type = CUDNN_32BIT_INDICES;
+//   cudnnCreateReduceTensorDescriptor(&reduce_desc);
+//   cudnnSetReduceTensorDescriptor(reduce_desc, reduce_op, cudnn_dtype,
+//                                  cudnn_propagation, cudnn_indices,
+//                                  cudnn_indices_type);
+
+//   //instantiate new tensor to use new blocks as memory instead of cudaMalloc
+//   //create 2 tensors of same size as input tensor
+//   Shape reduction_size = {1000};
+//   Tensor indices(reduction_size, in.device(), in.data_type());
+//   Tensor workspace(reduction_size, in.device(), in.data_type());
+//   size_t indices_bytes = indices.block()->size()*1000;
+//   size_t workspace_bytes = workspace.block()->size()*1000;
+//   size_t* indicesPtr = static_cast<size_t*>(indices.block()->mutable_data());
+//   float* workspacePtr =
+//   static_cast<float*>(workspace.block()->mutable_data());
+//   //void* indicesPtr{nullptr}; void* workspacePtr{nullptr};
+//   //cudaMalloc(&indicesPtr, indices_bytes); cudaMalloc(&workspacePtr,
+//   workspace_bytes);
+
+//   float alpha[1] = {1.0};
+//   float beta[1] = {0.0};
+//   cudnnTensorDescriptor_t in_desc, out_desc;
+//   cudnnCreateTensorDescriptor(&in_desc);
+//   cudnnCreateTensorDescriptor(&out_desc);
+//   cudnnSetTensorNdDescriptor(in_desc, cudnn_dtype, in.generate_dim_cuda(),
+// in.generate_shape_cuda().data(), in.generate_strides_cuda().data());
+//   //cudnnSetTensorNdDescriptor(out_desc, cudnn_dtype,
+//   out->generate_dim_cuda(),
+// out->generate_shape_cuda().data(), out->generate_strides_cuda().data());
+//   cudnnSetTensorNdDescriptor(out_desc, cudnn_dtype, out->generate_dim_cuda(),
+// reduce_row_axes_shape.data(), reduced_strides.data());
+//   cudnnReduceTensor(ctx->cudnn_handle, reduce_desc,
+//                     indicesPtr, indices_bytes, workspacePtr, workspace_bytes,
+//                     (void*)(&alpha), in_desc, inPtr, (void*)(&beta),
+//                     out_desc, outPtr);
+
+//   cudnnDestroyTensorDescriptor(in_desc);
+//   cudnnDestroyTensorDescriptor(out_desc);
+// }
+
 template <>
-void RowMax<float, lang::Cuda>(const size_t nrow, const size_t ncol,
-                               const Block* in, Block* out,
-                               Context* ctx) {
-  const float* inPtr = static_cast<const float*>(in->data());
-  float* outPtr = static_cast<float*>(out->mutable_data());
-  cuda::RowMax(nrow, ncol, inPtr, outPtr, ctx->stream);
+void RowMax<float, lang::Cuda>(const Tensor& in, Tensor* out, Context* ctx) {
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  float* outPtr = static_cast<float*>(out->block()->mutable_data());
+  const size_t nrow = in.shape()[0];
+  const size_t ncol = in.shape()[1];
+
+  if (in.transpose()) {
+    Tensor t(in.shape(), in.device(), in.data_type());
+    Transform<float, lang::Cuda>(in, &t, ctx);
+    const float* tPtr_const = static_cast<const float*>(t.block()->data());
+    cuda::RowMax(nrow, ncol, tPtr_const, outPtr, ctx->stream);
+  } else {
+    cuda::RowMax(nrow, ncol, inPtr, outPtr, ctx->stream);
+  }
 }
+
+// must put this function after Set and Dot functions due to the error from
+// instantiation before specialization
+template <>
+void Sum<float, lang::Cuda>(const Tensor& in, float* out, Context* ctx) {
+#if CUDNN_MAJOR < 7
+  Tensor one(in.shape(), in.device(), in.data_type());
+  Set<float, lang::Cuda>(float(1), &one, ctx);
+  Dot<float, lang::Cuda>(in, one, out, ctx);
+#else
+  const float* inPtr = static_cast<const float*>(in.block()->data());
+  // reduce all axes to 1 for cudnnReduce, e.g. Tensor A with shape (2,4) will
+  // be reduced to (1)
+  Shape reduced_shape = {1};
+  Tensor t(reduced_shape, in.device(), in.data_type());
+  float* tPtr = static_cast<float*>(t.block()->mutable_data());
+  vector<int> reduce_all_axes = generate_shape_cuda(in);
+  for (size_t n = 0; n < reduce_all_axes.size(); ++n) {
+    reduce_all_axes[n] = 1;
+  }
+
+  // reduce_desc
+  cudnnReduceTensorDescriptor_t reduce_desc;
+  cudnnReduceTensorOp_t reduce_op = CUDNN_REDUCE_TENSOR_ADD;
+  cudnnDataType_t cudnn_dtype = CUDNN_DATA_FLOAT;
+  cudnnNanPropagation_t cudnn_propagation = CUDNN_PROPAGATE_NAN;
+  cudnnReduceTensorIndices_t cudnn_indices = CUDNN_REDUCE_TENSOR_NO_INDICES;
+  cudnnIndicesType_t cudnn_indices_type = CUDNN_32BIT_INDICES;
+  check_cudnn(cudnnCreateReduceTensorDescriptor(&reduce_desc));
+  check_cudnn(cudnnSetReduceTensorDescriptor(
+      reduce_desc, reduce_op, cudnn_dtype, cudnn_propagation, cudnn_indices,
+      cudnn_indices_type));
+
+  // instantiate 2 new tensors to use new blocks as memory instead of cudaMalloc
+  size_t reduction_size_int = Product(in.shape());
+  Shape reduction_size = {reduction_size_int * 100};
+  Tensor indices(reduction_size, in.device(), in.data_type());
+  Tensor workspace(reduction_size, in.device(), in.data_type());
+  size_t indices_bytes = indices.block()->size() * 100;
+  size_t workspace_bytes = workspace.block()->size() * 100;
+  size_t* indicesPtr = static_cast<size_t*>(indices.block()->mutable_data());
+  float* workspacePtr = static_cast<float*>(workspace.block()->mutable_data());
+  // void* indicesPtr{nullptr}; void* workspacePtr{nullptr};
+  // cudaMalloc(&indicesPtr, indices_bytes); cudaMalloc(&workspacePtr,
+  // workspace_bytes);
+
+  float alpha = 1.0;
+  float beta = 0.0;
+  check_cudnn(cudnnReduceTensor(
+      ctx->cudnn_handle, reduce_desc, indicesPtr, indices_bytes, workspacePtr,
+      workspace_bytes, (void*)(&alpha), generate_tensor_nd_desc(in), inPtr,
+      (void*)(&beta), generate_tensor_nd_desc(t), tPtr));
+
+  *out = tPtr[0];
+#endif  // CUDNN_MAJOR < 7
+}
+
 }  // namespace singa
 
 #endif  // USE_CUDA
diff --git a/src/core/tensor/tensor_math_opencl.h b/src/core/tensor/tensor_math_opencl.h
index 6ab248b..f968f40 100644
--- a/src/core/tensor/tensor_math_opencl.h
+++ b/src/core/tensor/tensor_math_opencl.h
@@ -16,29 +16,27 @@
  * limitations under the License.
  */
 
-#ifndef  SINGA_CORE_TENSOR_TENSOR_MATH_OPENCL_H_
+#ifndef SINGA_CORE_TENSOR_TENSOR_MATH_OPENCL_H_
 
 #ifdef USE_OPENCL
 
-#include "tensor_math.h"
-#include "singa/utils/opencl_utils.h"
-
+#include <viennacl/linalg/inner_prod.hpp>
+#include <viennacl/linalg/matrix_operations.hpp>
+#include <viennacl/linalg/norm_2.hpp>
+#include <viennacl/linalg/prod.hpp>
+#include <viennacl/linalg/scalar_operations.hpp>
+#include <viennacl/linalg/sum.hpp>
+#include <viennacl/linalg/vector_operations.hpp>
+#include <viennacl/matrix.hpp>
+#include <viennacl/ocl/kernel.hpp>
 #include <viennacl/scalar.hpp>
 #include <viennacl/vector.hpp>
-#include <viennacl/matrix.hpp>
 
-#include <viennacl/linalg/prod.hpp>
-#include <viennacl/linalg/inner_prod.hpp>
-#include <viennacl/linalg/norm_2.hpp>
-#include <viennacl/linalg/sum.hpp>
-#include <viennacl/linalg/scalar_operations.hpp>
-#include <viennacl/linalg/vector_operations.hpp>
-#include <viennacl/linalg/matrix_operations.hpp>
+#include "singa/utils/opencl_utils.h"
+#include "tensor_math.h"
 
-#include <viennacl/ocl/kernel.hpp>
-
-using viennacl::ocl::get_context;
 using viennacl::ocl::enqueue;
+using viennacl::ocl::get_context;
 
 namespace singa {
 
@@ -46,8 +44,9 @@
 // Element-wise functions
 // **************************************
 
-template<>
-void Abs<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Abs<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                              Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_fabs");
 
@@ -58,21 +57,22 @@
   enqueue(kernel((cl_int)num, v_in, v_out));
 }
 
-
-template<>
-void Add<float, lang::Opencl>(const size_t num, const Block* in, const float x, Block* out, Context* ctx) {
+template <>
+void Add<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                              Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = v_in + x_in;
 }
 
-
-template<>
-void Add<float, lang::Opencl>(const size_t num, const Block* in1, const Block* in2, Block* out, Context* ctx) {
+template <>
+void Add<float, lang::Opencl>(const size_t num, const Block* in1,
+                              const Block* in2, Block* out, Context* ctx) {
   viennacl::vector<float> v_in1((const cl_mem)in1->data(), num);
   viennacl::vector<float> v_in2((const cl_mem)in2->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -80,10 +80,10 @@
   v_out = v_in1 + v_in2;
 }
 
-
-template<>
-void Clamp<float, lang::Opencl>(const size_t num, const float low, const float high,
-                                const Block* in, Block* out, Context* ctx) {
+template <>
+void Clamp<float, lang::Opencl>(const size_t num, const float low,
+                                const float high, const Block* in, Block* out,
+                                Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_clamp");
 
@@ -93,33 +93,35 @@
   enqueue(kernel((cl_int)num, low, high, v_in, v_out));
 }
 
-
-template<>
-void Div<float, lang::Opencl>(const size_t num, const Block* in, const float x, Block* out, Context* ctx) {
+template <>
+void Div<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                              Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_div(v_in, x_in);
 }
 
-
-template<>
-void Div<float, lang::Opencl>(const size_t num, const float x, const Block* in, Block* out, Context* ctx) {
+template <>
+void Div<float, lang::Opencl>(const size_t num, const float x, const Block* in,
+                              Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_div(x_in, v_in);
 }
 
-
-template<>
-void Div<float, lang::Opencl>(const size_t num, const Block* in1, const Block* in2, Block* out, Context* ctx) {
+template <>
+void Div<float, lang::Opencl>(const size_t num, const Block* in1,
+                              const Block* in2, Block* out, Context* ctx) {
   viennacl::vector<float> v_in1((const cl_mem)in1->data(), num);
   viennacl::vector<float> v_in2((const cl_mem)in2->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -127,21 +129,23 @@
   v_out = viennacl::linalg::element_div(v_in1, v_in2);
 }
 
-
-template<>
-void EltwiseMult<float, lang::Opencl>(const size_t num, const Block* in, const float x, Block* out, Context* ctx) {
+template <>
+void EltwiseMult<float, lang::Opencl>(const size_t num, const Block* in,
+                                      const float x, Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_prod(v_in, x_in);
 }
 
-
-template<>
-void EltwiseMult<float, lang::Opencl>(const size_t num, const Block* in1, const Block* in2, Block* out, Context* ctx) {
+template <>
+void EltwiseMult<float, lang::Opencl>(const size_t num, const Block* in1,
+                                      const Block* in2, Block* out,
+                                      Context* ctx) {
   viennacl::vector<float> v_in1((const cl_mem)in1->data(), num);
   viennacl::vector<float> v_in2((const cl_mem)in2->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -149,87 +153,92 @@
   v_out = viennacl::linalg::element_prod(v_in1, v_in2);
 }
 
-
-template<>
-void Exp<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Exp<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                              Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_exp(v_in);
 }
 
-
-template<>
-void LE<float, lang::Opencl>(const size_t num, const Block *in, const float x, Block *out, Context *ctx) {
+template <>
+void LE<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                             Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_le");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel((cl_int)num, in_buf, x, out_buf));
 }
 
-
-template<>
-void Log<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Log<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                              Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_log(v_in);
 }
 
-
-template<>
-void LT<float, lang::Opencl>(const size_t num, const Block *in, const float x, Block *out, Context *ctx) {
+template <>
+void LT<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                             Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_lt");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel((cl_int)num, in_buf, x, out_buf));
 }
 
-
-template<>
-void GE<float, lang::Opencl>(const size_t num, const Block *in, const float x, Block *out, Context *ctx) {
+template <>
+void GE<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                             Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_ge");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel((cl_int)num, in_buf, x, out_buf));
 }
 
-
-template<>
-void GT<float, lang::Opencl>(const size_t num, const Block *in, const float x, Block *out, Context *ctx) {
+template <>
+void GT<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                             Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_gt");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel((cl_int)num, in_buf, x, out_buf));
 }
 
-
-template<>
-void Pow<float, lang::Opencl>(const size_t num, const Block* in, float x, Block* out, Context* ctx) {
+template <>
+void Pow<float, lang::Opencl>(const size_t num, const Block* in, float x,
+                              Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_pow(v_in, x_in);
 }
 
-
-template<>
-void Pow<float, lang::Opencl>(const size_t num, const Block* in1, const Block* in2, Block* out, Context* ctx) {
+template <>
+void Pow<float, lang::Opencl>(const size_t num, const Block* in1,
+                              const Block* in2, Block* out, Context* ctx) {
   viennacl::vector<float> v_in1((const cl_mem)in1->data(), num);
   viennacl::vector<float> v_in2((const cl_mem)in2->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -237,21 +246,22 @@
   v_out = viennacl::linalg::element_pow(v_in1, v_in2);
 }
 
-
-template<>
-void ReLU<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void ReLU<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                               Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_relu");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel((cl_int)num, in_buf, out_buf));
 }
 
-
-template<>
-void Set<float, lang::Opencl>(const size_t num, const float x, Block* out, Context* ctx) {
+template <>
+void Set<float, lang::Opencl>(const size_t num, const float x, Block* out,
+                              Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -259,56 +269,60 @@
   v_out = viennacl::scalar_vector<float>(num, x, ocl_ctx);
 }
 
-
-template<>
-void Sigmoid<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Sigmoid<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                                  Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  const viennacl::vector<float> zero = viennacl::zero_vector<float>(num, ocl_ctx);
-  const viennacl::vector<float> one = viennacl::scalar_vector<float>(num, 1.0f, ocl_ctx);
+  const viennacl::vector<float> zero =
+      viennacl::zero_vector<float>(num, ocl_ctx);
+  const viennacl::vector<float> one =
+      viennacl::scalar_vector<float>(num, 1.0f, ocl_ctx);
 
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
-  v_out = viennacl::linalg::element_div(one, viennacl::linalg::element_exp(zero - v_in) + one);
+  v_out = viennacl::linalg::element_div(
+      one, viennacl::linalg::element_exp(zero - v_in) + one);
 }
 
-
-template<>
-void Sign<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Sign<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                               Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_sign");
 
   viennacl::vector<float> in_buf((const cl_mem)in->data(), num);
-  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()), num);
+  viennacl::vector<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  num);
 
   enqueue(kernel(num, in_buf, out_buf));
 }
 
-
-template<>
-void Sqrt<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Sqrt<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                               Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_sqrt(v_in);
 }
 
-
-template<>
-void Square<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Square<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                                 Context* ctx) {
   Pow<float, lang::Opencl>(num, in, 2, out, ctx);
 }
 
-
-template<>
-void Sub<float, lang::Opencl>(const size_t num, const Block* in, const float x, Block* out, Context* ctx) {
+template <>
+void Sub<float, lang::Opencl>(const size_t num, const Block* in, const float x,
+                              Block* out, Context* ctx) {
   Add<float, lang::Opencl>(num, in, -x, out, ctx);
 }
 
-
-template<>
-void Sub<float, lang::Opencl>(const size_t num, const Block* in1, const Block* in2, Block* out, Context* ctx) {
+template <>
+void Sub<float, lang::Opencl>(const size_t num, const Block* in1,
+                              const Block* in2, Block* out, Context* ctx) {
   viennacl::vector<float> v_in1((const cl_mem)in1->data(), num);
   viennacl::vector<float> v_in2((const cl_mem)in2->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
@@ -316,17 +330,17 @@
   v_out = v_in1 - v_in2;
 }
 
-
-template<>
-void Sum<float, lang::Opencl>(const size_t num, const Block* in, float* out, Context* ctx) {
+template <>
+void Sum<float, lang::Opencl>(const size_t num, const Block* in, float* out,
+                              Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
 
   out[0] = viennacl::linalg::sum(v_in);
 }
 
-
-template<>
-void Tanh<float, lang::Opencl>(const size_t num, const Block* in, Block* out, Context* ctx) {
+template <>
+void Tanh<float, lang::Opencl>(const size_t num, const Block* in, Block* out,
+                               Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
@@ -340,10 +354,12 @@
 /// Number of generation rounds used in the current algorithm.
 static cl_uint rounds = 8;
 
-template<>
-void Bernoulli<float, lang::Opencl>(const size_t num, const float p, Block* out, Context *ctx) {
+template <>
+void Bernoulli<float, lang::Opencl>(const size_t num, const float p, Block* out,
+                                    Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
-  auto kernel = ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_bernoulli");
+  auto kernel =
+      ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_bernoulli");
 
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
@@ -352,30 +368,32 @@
   enqueue(kernel(v_out, seed, 0.0f, 1.0f, p, rounds, cl_uint(num / 4)));
 }
 
-
-template<>
-void Gaussian<float, lang::Opencl>(const size_t num, const float mean, const float std, Block* out, Context *ctx) {
+template <>
+void Gaussian<float, lang::Opencl>(const size_t num, const float mean,
+                                   const float std, Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
-  auto kernel = ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_gaussian");
+  auto kernel =
+      ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_gaussian");
 
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   viennacl::ocl::packed_cl_uint seed = {0, 32, 42, 888};
 
-  enqueue(kernel(v_out, seed, mean, std, rounds, cl_uint(num/4)));
+  enqueue(kernel(v_out, seed, mean, std, rounds, cl_uint(num / 4)));
 }
 
-
-template<>
-void Uniform<float, lang::Opencl>(const size_t num, const float low, const float high, Block* out, Context *ctx) {
+template <>
+void Uniform<float, lang::Opencl>(const size_t num, const float low,
+                                  const float high, Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
-  auto kernel = ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_uniform");
+  auto kernel =
+      ocl_ctx.get_kernel("opencl_distribution", "PRNG_threefry4x32_uniform");
 
   viennacl::ocl::packed_cl_uint seed = {0, 32, 42, 888};
 
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
-  enqueue(kernel(v_out, seed, low, high, rounds, cl_uint(num/4)));
+  enqueue(kernel(v_out, seed, low, high, rounds, cl_uint(num / 4)));
 }
 
 // *********************************************************
@@ -383,7 +401,8 @@
 // *********************************************************
 /*
 template<>
-void Amax<float, lang::Opencl>(const size_t num, const Block* in, size_t* out, Context* ctx) {
+void Amax<float, lang::Opencl>(const size_t num, const Block* in, size_t* out,
+Context* ctx) {
   cl_int status = CL_SUCCESS;
 
   std::string kname = "clkernel_amax";
@@ -401,7 +420,8 @@
   kernel.setArg(3, cl::Local(size));
   kernel.setArg(4, cl::Local(sizeof(size_t)));
 
-  status = ctx->ocl_cmdq.enqueueNDRangeKernel(kernel, cl::NDRange(0), cl::NDRange(num));
+  status = ctx->ocl_cmdq.enqueueNDRangeKernel(kernel, cl::NDRange(0),
+cl::NDRange(num));
   OCL_CHECK(status, "Failed to enqueue kernel function!");
 
   size_t* temp = new size_t[num];
@@ -413,7 +433,8 @@
 
 
 template<>
-void Amin<float, lang::Opencl>(const size_t num, const Block* in, size_t* out, Context* ctx) {
+void Amin<float, lang::Opencl>(const size_t num, const Block* in, size_t* out,
+Context* ctx) {
   cl_int status = CL_SUCCESS;
 
   std::string kname = "clkernel_amin";
@@ -431,7 +452,8 @@
   kernel.setArg(3, cl::Local(size));
   kernel.setArg(4, cl::Local(sizeof(size_t)));
 
-  status = ctx->ocl_cmdq.enqueueNDRangeKernel(kernel, cl::NDRange(0), cl::NDRange(num));
+  status = ctx->ocl_cmdq.enqueueNDRangeKernel(kernel, cl::NDRange(0),
+cl::NDRange(num));
   OCL_CHECK(status, "Failed to enqueue kernel function!");
 
   size_t* temp = new size_t[num];
@@ -441,9 +463,10 @@
   delete temp;
 }
 */
-	
-template<>
-void Asum<float, lang::Opencl>(const size_t num, const Block* in, float* out, Context* ctx) {
+
+template <>
+void Asum<float, lang::Opencl>(const size_t num, const Block* in, float* out,
+                               Context* ctx) {
   viennacl::vector<float> v_in((const cl_mem)in->data(), num);
 
   viennacl::vector<float> temp = viennacl::linalg::element_fabs(v_in);
@@ -452,8 +475,9 @@
 }
 
 /// out = alpha * in + out
-template<>
-void Axpy<float, lang::Opencl>(const size_t num, const float alpha, const Block* in, Block* out, Context* ctx) {
+template <>
+void Axpy<float, lang::Opencl>(const size_t num, const float alpha,
+                               const Block* in, Block* out, Context* ctx) {
   viennacl::vector<float> inbuf((const cl_mem)in->data(), num);
   viennacl::vector<float> outbuf(static_cast<cl_mem>(out->mutable_data()), num);
 
@@ -461,27 +485,29 @@
 }
 
 /// out = ||in||_2^2, i.e, L2 norm.
-template<>
-void Nrm2<float, lang::Opencl>(const size_t num, const Block* in, float* out, Context* ctx) {
+template <>
+void Nrm2<float, lang::Opencl>(const size_t num, const Block* in, float* out,
+                               Context* ctx) {
   viennacl::vector<float> inbuf((const cl_mem)in->data(), num);
 
   out[0] = viennacl::linalg::norm_2(inbuf);
 }
 
-
-template<>
-void Scale<float, lang::Opencl>(const size_t num, const float x, Block* out, Context* ctx) {
+template <>
+void Scale<float, lang::Opencl>(const size_t num, const float x, Block* out,
+                                Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
 
-  viennacl::vector<float> x_in = viennacl::scalar_vector<float>(num, x, ocl_ctx);
+  viennacl::vector<float> x_in =
+      viennacl::scalar_vector<float>(num, x, ocl_ctx);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), num);
 
   v_out = viennacl::linalg::element_prod(v_out, x_in);
 }
 
-
-template<>
-void Dot<float, lang::Opencl>(const size_t num, const Block *in1, const Block *in2, float *out, Context *ctx) {
+template <>
+void Dot<float, lang::Opencl>(const size_t num, const Block* in1,
+                              const Block* in2, float* out, Context* ctx) {
   viennacl::vector<float> in1_buf((const cl_mem)in1->data(), num);
   viennacl::vector<float> in2_buf((const cl_mem)in2->data(), num);
 
@@ -489,9 +515,11 @@
 }
 
 /// out = alpha * A * v + beta * out.
-template<>
-void GEMV<float, lang::Opencl>(bool trans, const size_t m, const size_t n, const float alpha,
-		  const Block *A, const Block *v, const float beta, Block* out, Context* ctx) {
+template <>
+void GEMV<float, lang::Opencl>(bool trans, const size_t m, const size_t n,
+                               const float alpha, const Block* A,
+                               const Block* v, const float beta, Block* out,
+                               Context* ctx) {
   viennacl::vector<float> v_buf((const cl_mem)v->data(), n);
   viennacl::vector<float> o_buf(static_cast<cl_mem>(out->mutable_data()), m);
 
@@ -510,14 +538,14 @@
 
 /// multiply a matrix with a diagonal matrix constructed using values from 'v'.
 /// if matrix_left_side is true, do M*v; else do v*M
-template<>
-void DGMM<float, lang::Opencl>(bool side_right,
-		  const size_t nrow, const size_t ncol,
-		  const Block *M, const Block *v, Block *out, Context *ctx) {
-
+template <>
+void DGMM<float, lang::Opencl>(bool side_right, const size_t nrow,
+                               const size_t ncol, const Block* M,
+                               const Block* v, Block* out, Context* ctx) {
   viennacl::matrix<float> M_buf((const cl_mem)M->data(), nrow, ncol);
   viennacl::vector<float> v_buf((const cl_mem)v->data(), nrow);
-  viennacl::matrix<float> out_buf(static_cast<cl_mem>(out->mutable_data()), nrow, ncol);
+  viennacl::matrix<float> out_buf(static_cast<cl_mem>(out->mutable_data()),
+                                  nrow, ncol);
 
   auto diag = viennacl::diag(v_buf);
 
@@ -529,14 +557,15 @@
 }
 
 /// C = alpha * A * B + beta * C.
-template<>
+template <>
 void GEMM<float, lang::Opencl>(const bool transA, const bool transB,
-		  const size_t nrowA, const size_t ncolB, const size_t ncolA,
-		  const float alpha, const Block *A, const Block *B, const float beta,
-		  Block *C, Context *ctx) {
-
+                               const size_t nrowA, const size_t ncolB,
+                               const size_t ncolA, const float alpha,
+                               const Block* A, const Block* B, const float beta,
+                               Block* C, Context* ctx) {
   viennacl::matrix<float> A_buf, B_buf;
-  viennacl::matrix<float> C_buf(static_cast<cl_mem>(C->mutable_data()), nrowA, ncolB);
+  viennacl::matrix<float> C_buf(static_cast<cl_mem>(C->mutable_data()), nrowA,
+                                ncolB);
 
   if (transA) {
     A_buf = viennacl::matrix<float>((const cl_mem)A->data(), ncolA, nrowA);
@@ -556,47 +585,53 @@
   C_buf += alpha * viennacl::linalg::prod(A_buf, B_buf);
 }
 
-
 template <>
-void ComputeCrossEntropy<float, lang::Opencl>(bool int_target, const size_t batchsize,
-                         const size_t dim, const Block *p, const Block *t,
-                         Block *loss, Context *ctx) {
+void ComputeCrossEntropy<float, lang::Opencl>(bool int_target,
+                                              const size_t batchsize,
+                                              const size_t dim, const Block* p,
+                                              const Block* t, Block* loss,
+                                              Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
-  auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_crossentropy");
+  auto kernel =
+      ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_crossentropy");
 
   viennacl::vector<float> p_buf((const cl_mem)p->data(), batchsize);
   viennacl::vector<float> t_buf((const cl_mem)t->data(), batchsize);
-  viennacl::vector<float> loss_buf(static_cast<cl_mem>(loss->mutable_data()), batchsize);
+  viennacl::vector<float> loss_buf(static_cast<cl_mem>(loss->mutable_data()),
+                                   batchsize);
 
   enqueue(kernel((cl_uint)batchsize, (cl_uint)dim, p_buf, t_buf, loss_buf));
 }
 
-
 template <>
-void SoftmaxCrossEntropyBwd<float, lang::Opencl>(bool int_target, const size_t batchsize, const size_t dim,
-                            const Block *p, const Block *t, Block *grad,
-                            Context *ctx) {
+void SoftmaxCrossEntropyBwd<float, lang::Opencl>(bool int_target,
+                                                 const size_t batchsize,
+                                                 const size_t dim,
+                                                 const Block* p, const Block* t,
+                                                 Block* grad, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
-  auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_softmaxentropy");
+  auto kernel =
+      ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_softmaxentropy");
 
   viennacl::vector<float> p_buf((const cl_mem)p->data(), batchsize);
   viennacl::vector<float> t_buf((const cl_mem)t->data(), batchsize);
-  viennacl::vector<float> grad_buf(static_cast<cl_mem>(grad->mutable_data()), batchsize);
+  viennacl::vector<float> grad_buf(static_cast<cl_mem>(grad->mutable_data()),
+                                   batchsize);
 
   enqueue(kernel((cl_uint)batchsize, (cl_uint)dim, p_buf, t_buf, grad_buf));
 }
 
-
-template<>
+template <>
 void RowMax<float, lang::Opencl>(const size_t nrow, const size_t ncol,
-                                 const Block *in, Block *out, Context *ctx) {
+                                 const Block* in, Block* out, Context* ctx) {
   auto ocl_ctx = get_context(ctx->vcl_ctx_id);
   auto kernel = ocl_ctx.get_kernel("opencl_tensor_math", "clkernel_rowmax");
 
-//  kernel.global_work_size(0, nrow);
+  //  kernel.global_work_size(0, nrow);
 
   viennacl::matrix<float> in_buf((const cl_mem)in->data(), nrow, ncol);
-  viennacl::vector<float> outbuf(static_cast<cl_mem>(out->mutable_data()), nrow);
+  viennacl::vector<float> outbuf(static_cast<cl_mem>(out->mutable_data()),
+                                 nrow);
 
   enqueue(kernel((cl_uint)nrow, (cl_uint)ncol, in_buf, outbuf));
 }
@@ -606,29 +641,34 @@
 // **************************************
 /*
 template<>
-void AddCol<float, lang::Opencl>(const size_t nrow, const size_t ncol, const Block* A, const Block* v, Block* out, Context* ctx) {
+void AddCol<float, lang::Opencl>(const size_t nrow, const size_t ncol, const
+Block* A, const Block* v, Block* out, Context* ctx) {
 
 }
 
 
 template<>
-void AddRow<float, lang::Opencl>(const size_t nrow, const size_t ncol, const Block* A, const Block* v, Block* out, Context* ctx) {
+void AddRow<float, lang::Opencl>(const size_t nrow, const size_t ncol, const
+Block* A, const Block* v, Block* out, Context* ctx) {
 
 }
 
 
 template<>
-void Outer<float, lang::Opencl>(const size_t m, const size_t n, const Block* lhs, const Block* rhs, Block* out, Context* ctx) {
+void Outer<float, lang::Opencl>(const size_t m, const size_t n, const Block*
+lhs, const Block* rhs, Block* out, Context* ctx) {
   viennacl::vector<float> lhs_in((const cl_mem)lhs->data(), m);
   viennacl::vector<float> rhs_in((const cl_mem)rhs->data(), n);
-  viennacl::matrix<float> out_buf(static_cast<cl_mem>(out->mutable_data()), m, n);
+  viennacl::matrix<float> out_buf(static_cast<cl_mem>(out->mutable_data()), m,
+n);
 
   out_buf = viennacl::linalg::outer_prod(lhs_in, rhs_in);
 }
 
 
 template<>
-void SumColumns<float, lang::Opencl>(const size_t nrow, const size_t ncol, const Block* in, Block* out, Context* ctx) {
+void SumColumns<float, lang::Opencl>(const size_t nrow, const size_t ncol, const
+Block* in, Block* out, Context* ctx) {
   viennacl::matrix<float> m_in((const cl_mem)in->data(), nrow, ncol);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), nrow);
 
@@ -637,7 +677,8 @@
 
 
 template<>
-void SumRows<float, lang::Opencl>(const size_t nrow, const size_t ncol, const Block* in, Block* out, Context* ctx) {
+void SumRows<float, lang::Opencl>(const size_t nrow, const size_t ncol, const
+Block* in, Block* out, Context* ctx) {
   viennacl::matrix<float> m_in((const cl_mem)in->data(), nrow, ncol);
   viennacl::vector<float> v_out(static_cast<cl_mem>(out->mutable_data()), ncol);
 
@@ -645,8 +686,8 @@
 }
 */
 
-} // namespace singa
+}  // namespace singa
 
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
 
 #endif  // SINGA_CORE_TENSOR_TENSOR_MATH_OPENCL_H_v_in + x;
diff --git a/src/io/communicator.cc b/src/io/communicator.cc
new file mode 100644
index 0000000..a64c79d
--- /dev/null
+++ b/src/io/communicator.cc
@@ -0,0 +1,719 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+
+#include "singa/utils/cuda_utils.h"
+
+#ifdef USE_DIST
+
+#include "../core/tensor/math_kernel.h"
+#include "singa/io/communicator.h"
+
+namespace singa {
+
+static uint64_t getHostHash(const char *string) {
+  // Based on DJB2, result = result * 33 + char
+  uint64_t result = 5381;
+  for (int c = 0; string[c] != '\0'; c++) {
+    result = ((result << 5) + result) + string[c];
+  }
+  return result;
+}
+
+static void getHostName(char *hostname, int maxlen) {
+  gethostname(hostname, maxlen);
+  for (int i = 0; i < maxlen; i++) {
+    if (hostname[i] == '.') {
+      hostname[i] = '\0';
+      return;
+    }
+  }
+}
+
+NcclIdHolder::NcclIdHolder() { ncclGetUniqueId(&id); }  // end of constructor
+
+NcclIdHolder::~NcclIdHolder() {}
+
+// contructer for application with python multi-processing module
+Communicator::Communicator(int local_rank, int world_size,
+                           const NcclIdHolder &holder, int buffSize) {
+  maxSize = (size_t)buffSize;
+  // this contructor is for NCCL WITHOUT MPI
+  UseMPI = false;
+  // Determine the rank of the collective communication
+  this->world_size = world_size;
+  this->local_rank = local_rank;
+  this->global_rank = local_rank;
+
+  // copy the nccl unqiue id from the input id holder
+  id = holder.id;
+
+  // setup cuda stream and nccl communicator
+  setup();
+
+}  // end of constructor
+
+// contructer for application with MPI
+Communicator::Communicator(int buffSize) {
+  maxSize = (size_t)buffSize;
+  // this contructor is for NCCL WITH MPI
+  UseMPI = true;
+
+  // MPI initialization
+  MPICHECK(MPI_Init(NULL, NULL));
+  MPICHECK(MPI_Comm_rank(MPI_COMM_WORLD, &global_rank));
+  MPICHECK(MPI_Comm_size(MPI_COMM_WORLD, &world_size));
+
+  // calculating local_rank which is used in selecting a GPU
+  local_rank = 0;
+  uint64_t hostHashs[world_size];
+  char hostname[1024];
+  getHostName(hostname, 1024);
+  hostHashs[global_rank] = getHostHash(hostname);
+  MPICHECK(MPI_Allgather(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, hostHashs,
+                         sizeof(uint64_t), MPI_BYTE, MPI_COMM_WORLD));
+  for (int p = 0; p < world_size; p++) {
+    if (p == global_rank) break;
+    if (hostHashs[p] == hostHashs[global_rank]) local_rank++;
+  }
+
+  // generating NCCL unique nccl ID at one process and broadcasting it to all
+  if (global_rank == 0) ncclGetUniqueId(&id);
+  MPICHECK(MPI_Bcast((void *)&id, sizeof(id), MPI_BYTE, 0, MPI_COMM_WORLD));
+
+  // setup cuda stream and nccl communicator
+  setup();
+
+}  // end of constructor
+
+void Communicator::setup() {
+  CUDA_CHECK(cudaSetDevice(local_rank));
+  NCCLCHECK(ncclCommInitRank(&comm, world_size, id, global_rank));
+  CUDA_CHECK(cudaMalloc(&fusedSendBuff, maxSize * sizeof(float)));
+  CUDA_CHECK(cudaMalloc(&fusedRecvBuff, maxSize * sizeof(float)));
+  CUDA_CHECK(cudaEventCreateWithFlags(
+      &event, cudaEventBlockingSync | cudaEventDisableTiming));
+  halfInitialized = false;
+  sparsInitialized = false;
+}
+
+void Communicator::halfInit() {
+  // initialze the buffer
+  CUDA_CHECK(cudaMalloc(&fusedSendBuffHalf, maxSize * sizeof(__half)));
+  CUDA_CHECK(cudaMalloc(&fusedRecvBuffHalf, maxSize * sizeof(__half)));
+  halfInitialized = true;
+}
+
+void Communicator::sparsInit() {
+  // initize sparsification environment
+  CUDA_CHECK(cudaSetDevice(local_rank));
+  CUDA_CHECK(
+      cudaMalloc(&sparsRecvBuff, (int)(maxSize * sizeof(float) * world_size)));
+  CUDA_CHECK(cudaMalloc(&sparsSendBuff, (int)(maxSize * sizeof(float))));
+  CUDA_CHECK(cudaMalloc(&backupBuff, maxSize * sizeof(float)));
+  CUDA_CHECK(cudaMalloc(&fusedIndex, maxSize * sizeof(int)));
+  CUDA_CHECK(cudaMalloc(&xInd, (int)(sizeof(int) * maxSize)));
+  CUDA_CHECK(cudaMalloc(&xVal, (int)(sizeof(float) * maxSize)));
+  CUSPARSE_CHECK(cusparseCreate(&cusparse_handle));
+  nnz = (int *)malloc(sizeof(int));
+  nnzAll = (int *)malloc(sizeof(int) * world_size);
+  CUDA_CHECK(cudaMalloc(&nnzGPU, sizeof(int) * world_size));
+  CUDA_CHECK(cudaMalloc(&nnzAllGPU, sizeof(int) * world_size));
+  sparsInitialized = true;
+}
+
+void Communicator::allReduce(int size, void *sendbuff, void *recvbuff,
+                             ncclDataType_t ncclType, Context *ctx) {
+  NCCLCHECK(ncclAllReduce((const void *)sendbuff, (void *)recvbuff, size,
+                          ncclType, ncclSum, comm, ctx->s));
+}
+
+void Communicator::generateBlocks(Tensor &t) {
+  device_ = t.device();
+
+  blocks_.clear();
+  blocks_.push_back(t.block());
+}
+
+void Communicator::generateBlocks(std::vector<Tensor> &t) {
+  device_ = t[0].device();
+
+  prev_blocks_ = blocks_;
+
+  blocks_.clear();
+  blocks_.reserve(t.size());
+  prev_blocks_.reserve(prev_blocks_.size() + t.size());
+
+  for (size_t i = 0; i < t.size(); ++i) {
+    blocks_.push_back(t[i].block());
+    prev_blocks_.push_back(t[i].block());
+  }
+}
+
+void Communicator::wait() {
+  if (!device_) {
+    // just return if it has not been synchronized
+    return;
+  }
+
+  device_->Exec(
+      [this](Context *ctx) mutable {
+        // synchronizing on all the CUDA streams used by communicator
+        CUDA_CHECK(cudaEventRecord(event, ctx->s));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->stream, event, 0));
+        CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->stream, event, 0));
+        CUDA_CHECK(cudaEventRecord(event, ctx->c2));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->stream, event, 0));
+      },
+      blocks_, blocks_, "Waiting");
+}
+
+Communicator::~Communicator() {
+  // finalizing NCCL
+  ncclCommDestroy(comm);
+  if (UseMPI == true) MPICHECK(MPI_Finalize());
+  CUDA_CHECK(cudaFree(fusedSendBuff));
+  CUDA_CHECK(cudaFree(fusedRecvBuff));
+
+  if (halfInitialized == true) {
+    CUDA_CHECK(cudaFree(fusedSendBuffHalf));
+    CUDA_CHECK(cudaFree(fusedRecvBuffHalf));
+  }
+
+  if (sparsInitialized == true) {
+    CUDA_CHECK(cudaFree(sparsRecvBuff));
+    CUDA_CHECK(cudaFree(sparsSendBuff));
+    CUDA_CHECK(cudaFree(backupBuff));
+    CUDA_CHECK(cudaFree(fusedIndex));
+    CUDA_CHECK(cudaFree(xInd));
+    CUDA_CHECK(cudaFree(xVal));
+    CUDA_CHECK(cudaFree(nnzGPU));
+    CUDA_CHECK(cudaFree(nnzAllGPU));
+  }
+}
+
+void Communicator::fusedSynch(vector<Tensor> &t, bool send) {
+  CHECK_GT(t.size(), 0);
+
+  generateBlocks(t);
+
+  if (!send) {
+    // buffer the tensors
+    device_->Exec(
+        [this, t](Context *ctx) mutable {
+          // record the event of the default cuda stream and follow it
+          CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+        },
+        prev_blocks_, prev_blocks_, "Waiting");
+
+    device_->Exec(
+        [this, t](Context *ctx) mutable {
+          // memory copy to fusedBuff
+          for (size_t i = 0; i < t.size(); i++) {
+            CUDA_CHECK(
+                cudaMemcpyAsync((void *)(fusedSendBuff + sendBuffOffset),
+                                (const void *)t[i].block()->mutable_data(),
+                                t[i].Size() * sizeof(float),
+                                cudaMemcpyDeviceToDevice, ctx->c1));
+            sendBuffOffset += t[i].Size();
+          }
+        },
+        prev_blocks_, blocks_, "Dist_c1_fusedSynch_filling");
+
+  } else {
+    // send the tensors in the buffer
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          // wait for the memcpy to complete
+          CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+        },
+        prev_blocks_, prev_blocks_, "Waiting");
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          allReduce((int)sendBuffOffset, (void *)fusedSendBuff,
+                    (void *)fusedRecvBuff, ncclFloat, ctx);
+          sendBuffOffset = 0;
+        },
+        prev_blocks_, blocks_, "Dist_s_fusedSynch_allreduce");
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          // wait for the allreduce to complete
+          CUDA_CHECK(cudaEventRecord(event, ctx->s));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+        },
+        blocks_, blocks_, "Waiting");
+    device_->Exec(
+        [this, t](Context *ctx) mutable {
+          // copy data back to tensors after allreduce
+          size_t offset = 0;
+          for (size_t i = 0; i < t.size(); i++) {
+            CUDA_CHECK(cudaMemcpyAsync((void *)t[i].block()->mutable_data(),
+                                       (const void *)(fusedRecvBuff + offset),
+                                       t[i].Size() * sizeof(float),
+                                       cudaMemcpyDeviceToDevice, ctx->c1));
+            offset += t[i].Size();
+          }
+        },
+        blocks_, blocks_, "Dist_c1_fusedSynch_copyBackToTensor");
+  }
+}
+
+void Communicator::synch(Tensor &t) {
+  // generateBlocks(t);
+  device_ = t.device();
+
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        // record the event of the default cuda stream and follow it
+        CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+      },
+      {t.block()}, {t.block()}, "Waiting");
+
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        void *addr = t.block()->mutable_data();
+        allReduce(t.Size(), addr, addr, ncclFloat, ctx);
+      },
+      {t.block()}, {t.block()}, "Dist_s_synch_allreduce");
+}
+
+void Communicator::fusedSynchHalf(vector<Tensor> &t, bool send) {
+  CHECK_GT(t.size(), 0);
+
+  generateBlocks(t);
+
+  if (halfInitialized == false) halfInit();
+
+  if (!send) {
+    // buffer the tensors and convert them into half
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          // record the event of the default cuda stream and follow it
+          CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+        },
+        prev_blocks_, prev_blocks_, "Waiting");
+    device_->Exec(
+        [this, t](Context *ctx) mutable {
+          size_t offset = 0;
+          // memory copy to fusedBuff
+          for (size_t i = 0; i < t.size(); i++) {
+            CUDA_CHECK(
+                cudaMemcpyAsync((void *)(fusedSendBuff + sendBuffOffset),
+                                (const void *)t[i].block()->mutable_data(),
+                                t[i].Size() * sizeof(float),
+                                cudaMemcpyDeviceToDevice, ctx->c1));
+            sendBuffOffset += t[i].Size();
+            offset += t[i].Size();
+          }
+        },
+        prev_blocks_, blocks_, "Dist_c1_fusedSynchHalf_filling");
+  } else {
+    // send the tensors in the buffer
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          cuda::float2half(sendBuffOffset, fusedSendBuff, fusedSendBuffHalf,
+                           ctx->c1);
+        },
+        prev_blocks_, blocks_, "Dist_c1_fusedSynchHalf_float2half");
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          // wait for the memcpy to complete
+          CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+        },
+        blocks_, blocks_, "Waiting");
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          allReduce((int)sendBuffOffset, (void *)fusedSendBuffHalf,
+                    (void *)fusedRecvBuffHalf, ncclHalf, ctx);
+        },
+        blocks_, blocks_, "Dist_s_fusedSynchHalf_allreduce");
+    device_->Exec(
+        [this](Context *ctx) mutable {
+          // wait for the allreduce to complete
+          CUDA_CHECK(cudaEventRecord(event, ctx->s));
+          CUDA_CHECK(cudaStreamWaitEvent(ctx->c2, event, 0));          
+        },
+        blocks_, blocks_, "Waiting");
+    device_->Exec(
+        [this, t](Context *ctx) mutable {
+          cuda::half2float(sendBuffOffset, fusedRecvBuffHalf, fusedRecvBuff,
+                           ctx->c2);
+
+          sendBuffOffset = 0;
+
+          // copy data back to tensors after allreduce
+          size_t offset = 0;
+          for (size_t i = 0; i < t.size(); i++) {
+            CUDA_CHECK(cudaMemcpyAsync((void *)t[i].block()->mutable_data(),
+                                       (const void *)(fusedRecvBuff + offset),
+                                       t[i].Size() * sizeof(float),
+                                       cudaMemcpyDeviceToDevice, ctx->c2));
+            offset += t[i].Size();
+          }
+        },
+        blocks_, blocks_, "Dist_c2_fusedSynchHalf_half2floatcopy");
+  }
+}
+
+void Communicator::synchHalf(Tensor &t) {
+  generateBlocks(t);
+
+  if (halfInitialized == false) halfInit();
+
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        // record the event of the default cuda stream and follow it
+        CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+      },
+      blocks_, blocks_, "Waiting");
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        float *addr = static_cast<float *>(t.block()->mutable_data());
+        cuda::float2half(t.Size(), addr, fusedSendBuffHalf, ctx->c1);
+      },
+      blocks_, blocks_, "Dist_c1_synchHalf_float2half");
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        // wait for conversion to half precision complete
+        CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+      },
+      blocks_, blocks_, "Waiting");
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        allReduce(t.Size(), (void *)fusedSendBuffHalf,
+                  (void *)fusedRecvBuffHalf, ncclHalf, ctx);
+      },
+      blocks_, blocks_, "Dist_s_synchHalf_allreduce");
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        // wait for the allreduce to complete
+        CUDA_CHECK(cudaEventRecord(event, ctx->s));
+        CUDA_CHECK(cudaStreamWaitEvent(ctx->c2, event, 0));
+      },
+      blocks_, blocks_, "Waiting");
+  device_->Exec(
+      [this, t](Context *ctx) mutable {
+        float *addr = static_cast<float *>(t.block()->mutable_data());
+        cuda::half2float(t.Size(), fusedRecvBuffHalf, addr, ctx->c2);
+      },
+      blocks_, blocks_, "Dist_c2_synchHalf_half2float");
+
+}
+
+void Communicator::sparsification(Tensor &t, Tensor &accumulation,
+                                  float sparsThreshold, bool topK) {
+  generateBlocks(t);
+  blocks_.push_back(accumulation.block());
+
+  device_->Exec(
+      [=](Context *ctx) mutable {
+        _sparsification(t, &accumulation, sparsThreshold, topK, ctx);
+      },
+      blocks_, blocks_, "Dist_c1c2_sparsification");
+}
+
+void Communicator::sparsification(Tensor &t, float sparsThreshold, bool topK) {
+  generateBlocks(t);
+
+  t.device()->Exec(
+      [=](Context *ctx) mutable {
+        _sparsification(t, (Tensor *)NULL, sparsThreshold, topK, ctx);
+      },
+      blocks_, blocks_, "Dist_c1c2_sparsification");
+}
+
+void Communicator::_sparsification(Tensor &t, Tensor *accumulation,
+                                   float sparsThreshold, bool topK,
+                                   Context *ctx) {
+  // threshold for sprasification
+  threshold = sparsThreshold;
+
+  // record the event of the default cuda stream and follow it
+  CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+
+  // memory copy to fusedBuff
+  CUDA_CHECK(cudaMemcpyAsync(
+      (void *)fusedSendBuff, (const void *)t.block()->mutable_data(),
+      t.Size() * sizeof(float), cudaMemcpyDeviceToDevice, ctx->c1));
+
+  float *accumPtr;
+
+  if (accumulation != NULL)
+    accumPtr = (float *)accumulation->block()->mutable_data();
+  else
+    accumPtr = NULL;
+
+  if (topK == false)
+    valSparsAllReduce(t.Size(), accumPtr, ctx);
+  else
+    topKSparsAllReduce(t.Size(), accumPtr, ctx);
+
+  // copy data back to tensor after allreduce
+  CUDA_CHECK(cudaMemcpyAsync(
+      (void *)t.block()->mutable_data(), (const void *)fusedRecvBuff,
+      t.Size() * sizeof(float), cudaMemcpyDeviceToDevice, ctx->c2));
+}
+
+void Communicator::fusedSparsification(vector<Tensor> &t, Tensor &accumulation,
+                                       float sparsThreshold, bool topK) {
+  CHECK_GT(t.size(), 0);
+
+  generateBlocks(t);
+  blocks_.push_back(accumulation.block());
+
+  device_->Exec(
+      [=](Context *ctx) mutable {
+        _fusedSparsification(t, &accumulation, sparsThreshold, topK, ctx);
+      },
+      blocks_, blocks_, "Dist_c1c2_fusedSparsification");
+}
+
+void Communicator::fusedSparsification(vector<Tensor> &t, float sparsThreshold,
+                                       bool topK) {
+  CHECK_GT(t.size(), 0);
+
+  generateBlocks(t);
+
+  device_->Exec(
+      [=](Context *ctx) mutable {
+        _fusedSparsification(t, (Tensor *)NULL, sparsThreshold, topK, ctx);
+      },
+      blocks_, blocks_, "Dist_c1c2_fusedSparsification");
+}
+
+void Communicator::_fusedSparsification(vector<Tensor> &t, Tensor *accumulation,
+                                        float sparsThreshold, bool topK,
+                                        Context *ctx) {
+  // threshold for sprasification
+  threshold = sparsThreshold;
+
+  // record the event of the default cuda stream and follow it
+  CUDA_CHECK(cudaEventRecord(event, ctx->stream));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->c1, event, 0));
+
+  size_t offset = 0;
+
+  // memory copy to fusedBuff
+  for (size_t i = 0; i < t.size(); i++) {
+    CUDA_CHECK(cudaMemcpyAsync((void *)(fusedSendBuff + offset),
+                               (const void *)t[i].block()->mutable_data(),
+                               t[i].Size() * sizeof(float),
+                               cudaMemcpyDeviceToDevice, ctx->c1));
+    offset += t[i].Size();
+  }
+
+  float *accumPtr;
+
+  if (accumulation != NULL)
+    accumPtr = (float *)accumulation->block()->mutable_data();
+  else
+    accumPtr = NULL;
+
+  if (topK == false)
+    valSparsAllReduce(offset, accumPtr, ctx);
+  else
+    topKSparsAllReduce(offset, accumPtr, ctx);
+
+  // copy data back to tensors after allreduce
+  offset = 0;
+  for (size_t i = 0; i < t.size(); i++) {
+    CUDA_CHECK(cudaMemcpyAsync((void *)t[i].block()->mutable_data(),
+                               (const void *)(fusedRecvBuff + offset),
+                               t[i].Size() * sizeof(float),
+                               cudaMemcpyDeviceToDevice, ctx->c2));
+    offset += t[i].Size();
+  }
+}
+
+void Communicator::valSparsAllReduce(size_t num, float *accumulation,
+                                     Context *ctx) {
+  if (sparsInitialized == false) sparsInit();
+
+  if (accumulation != NULL) {
+    // add the previous accumulation
+    cuda::add(num, fusedSendBuff, accumulation, fusedSendBuff, ctx->c1);
+    // backup the fusedSendBuff
+    CUDA_CHECK(cudaMemcpyAsync((void *)backupBuff, (const void *)fusedSendBuff,
+                               sizeof(float) * num, cudaMemcpyDeviceToDevice,
+                               ctx->c1));
+  }
+
+  // sparsification based on threshold
+  cuda::sparsabs(num, threshold, fusedSendBuff, fusedSendBuff, ctx->c1);
+
+  // output the gradient accumulation
+  if (accumulation != NULL)
+    cuda::sub(num, backupBuff, fusedSendBuff, accumulation, ctx->c1);
+
+  // produce the index of the sparse array
+  cuda::sparsindex(num, fusedSendBuff, fusedIndex, ctx->c1);
+
+  // remove zero of index to become sprase array and get the num of non-zero nnz
+  cuda::removezeroidx(num, fusedIndex, ctx->c1, nnz);
+
+  CUDA_CHECK(cudaMemcpyAsync((void *)nnzGPU, (const void *)nnz, sizeof(int),
+                             cudaMemcpyHostToDevice, ctx->c1));
+
+  // all-gather all the nnz from different ranks
+  NCCLCHECK(ncclAllGather((const void *)nnzGPU, (void *)nnzAllGPU, 1, ncclInt,
+                          comm, ctx->c1));
+
+  CUDA_CHECK(cudaMemcpyAsync((void *)nnzAll, (const void *)nnzAllGPU,
+                             sizeof(int) * world_size, cudaMemcpyDeviceToHost,
+                             ctx->c1));
+
+  CUDA_CHECK(cudaStreamSynchronize(ctx->c1));
+
+  int nnzMax = 0;
+  for (int i = 0; i < world_size; i++)
+    if (nnzAll[i] > nnzMax) nnzMax = nnzAll[i];
+
+  // remove zero of values to become sprase array
+  cuda::removezeroval(num, fusedSendBuff, ctx->c1);
+
+  CUDA_CHECK(cudaMemcpyAsync((void *)(sparsSendBuff), (const void *)fusedIndex,
+                             sizeof(int) * (*nnz), cudaMemcpyDeviceToDevice,
+                             ctx->c1));
+  CUDA_CHECK(cudaMemcpyAsync(
+      (void *)(sparsSendBuff + (*nnz)), (const void *)fusedSendBuff,
+      sizeof(float) * (*nnz), cudaMemcpyDeviceToDevice, ctx->c1));
+
+  // wait for the memcpy to complete
+  CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+
+  // all-gather all the sparse gradients
+  NCCLCHECK(ncclAllGather((const void *)sparsSendBuff, (void *)sparsRecvBuff,
+                          2 * nnzMax, ncclFloat, comm, ctx->s));
+
+  // wait for the all-gather to complete
+  CUDA_CHECK(cudaEventRecord(event, ctx->s));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->c2, event, 0));
+
+  // reduce the sparse gradients, firstly setting the sum buff value to zero
+  CUDA_CHECK(cudaMemsetAsync(fusedRecvBuff, 0, num * sizeof(float), ctx->c2));
+
+  size_t offset = 0;
+  float alpha = 1.0;
+
+  // add the spase gradent from each rank to the sum buff to finish the
+  // all-reduce process
+  CUSPARSE_CHECK(cusparseSetStream(cusparse_handle, ctx->c2));
+
+  for (int i = 0; i < world_size; i++) {
+    CUDA_CHECK(cudaMemcpyAsync(
+        (void *)xInd, (const void *)(sparsRecvBuff + offset),
+        sizeof(int) * nnzAll[i], cudaMemcpyDeviceToDevice, ctx->c2));
+    offset += nnzAll[i];
+    CUDA_CHECK(cudaMemcpyAsync(
+        (void *)xVal, (const void *)(sparsRecvBuff + offset),
+        sizeof(float) * nnzAll[i], cudaMemcpyDeviceToDevice, ctx->c2));
+    offset += (2 * nnzMax - nnzAll[i]);
+    CUSPARSE_CHECK(cusparseSaxpyi(cusparse_handle, nnzAll[i], &alpha, xVal,
+                                  xInd, fusedRecvBuff,
+                                  CUSPARSE_INDEX_BASE_ONE));
+  }
+}
+
+void Communicator::topKSparsAllReduce(size_t num, float *accumulation,
+                                      Context *ctx) {
+  if (sparsInitialized == false) sparsInit();
+
+  // use gradient accumulation
+  if (accumulation != NULL) {
+    // add the previous accumulation
+    cuda::add(num, fusedSendBuff, accumulation, fusedSendBuff, ctx->c1);
+    // backup the fusedSendBuff
+    CUDA_CHECK(cudaMemcpyAsync((void *)backupBuff, (const void *)fusedSendBuff,
+                               sizeof(float) * num, cudaMemcpyDeviceToDevice,
+                               ctx->c1));
+  }
+
+  // generate an index and sort the fusedSendBuff from large to small values
+  cuda::generateindex(num, fusedIndex, ctx->c1);
+  cuda::sortbykey(num, fusedSendBuff, fusedIndex, ctx->c1);
+
+  // determine the number of topK for communication
+  int nnzMax = (int)ceil(threshold * num);
+
+  // output the gradient accumulation
+  float alpha = 1.0;
+  if (accumulation != NULL) {
+    CUDA_CHECK(cudaMemsetAsync(accumulation, 0, num * sizeof(float), ctx->c1));
+    CUSPARSE_CHECK(cusparseSetStream(cusparse_handle, ctx->c1));
+    CUSPARSE_CHECK(cusparseSaxpyi(cusparse_handle, nnzMax, &alpha,
+                                  fusedSendBuff, fusedIndex, accumulation,
+                                  CUSPARSE_INDEX_BASE_ONE));
+    cuda::sub(num, backupBuff, accumulation, accumulation, ctx->c1);
+  }
+
+  // the topK value and index will be sent
+  CUDA_CHECK(cudaMemcpyAsync((void *)(sparsSendBuff), (const void *)fusedIndex,
+                             sizeof(int) * nnzMax, cudaMemcpyDeviceToDevice,
+                             ctx->c1));
+  CUDA_CHECK(cudaMemcpyAsync(
+      (void *)(sparsSendBuff + nnzMax), (const void *)fusedSendBuff,
+      sizeof(float) * nnzMax, cudaMemcpyDeviceToDevice, ctx->c1));
+
+  // wait for the memcpy to complete
+  CUDA_CHECK(cudaEventRecord(event, ctx->c1));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->s, event, 0));
+
+  // all-gather all the sparse gradients
+  NCCLCHECK(ncclAllGather((const void *)sparsSendBuff, (void *)sparsRecvBuff,
+                          2 * nnzMax, ncclFloat, comm, ctx->s));
+
+  // wait for the all-gather to complete
+  CUDA_CHECK(cudaEventRecord(event, ctx->s));
+  CUDA_CHECK(cudaStreamWaitEvent(ctx->c2, event, 0));
+
+  // reduce the sparse gradients, firstly setting the sum buff value to zero
+  CUDA_CHECK(cudaMemsetAsync(fusedRecvBuff, 0, num * sizeof(float), ctx->c2));
+
+  size_t offset = 0;
+
+  CUSPARSE_CHECK(cusparseSetStream(cusparse_handle, ctx->c2));
+
+  // add the spase gradent from each rank to the sum buff to finish the
+  // all-reduce process
+  for (int i = 0; i < world_size; i++) {
+    CUDA_CHECK(cudaMemcpyAsync(
+        (void *)xInd, (const void *)(sparsRecvBuff + offset),
+        sizeof(int) * nnzMax, cudaMemcpyDeviceToDevice, ctx->c2));
+    offset += nnzMax;
+    CUDA_CHECK(cudaMemcpyAsync(
+        (void *)xVal, (const void *)(sparsRecvBuff + offset),
+        sizeof(float) * nnzMax, cudaMemcpyDeviceToDevice, ctx->c2));
+    offset += nnzMax;
+    CUSPARSE_CHECK(cusparseSaxpyi(cusparse_handle, nnzMax, &alpha, xVal, xInd,
+                                  fusedRecvBuff, CUSPARSE_INDEX_BASE_ONE));
+  }
+}
+}  // namespace singa
+
+#endif  // USE_DIST
diff --git a/src/io/image_transformer.cc b/src/io/image_transformer.cc
index 6e5567d..d9a8be1 100644
--- a/src/io/image_transformer.cc
+++ b/src/io/image_transformer.cc
@@ -26,331 +26,328 @@
 
 namespace singa {
 
-  Tensor ImageTransformer::Apply(int flag, Tensor& input) {
-    CHECK_LE(input.nDim(), 4u);
-    CHECK_GE(input.nDim(), 2u);
-    CHECK_EQ(input.data_type(), kFloat32) << "Data type " << input.data_type()
-      << " is invalid for an raw image";
-    srand((unsigned int)time(NULL));
-    /// TODO
-    /// currently only consider one sample each time
+Tensor ImageTransformer::Apply(int flag, Tensor& input) {
+  CHECK_LE(input.nDim(), 4u);
+  CHECK_GE(input.nDim(), 2u);
+  CHECK_EQ(input.data_type(), kFloat32) << "Data type " << input.data_type()
+                                        << " is invalid for an raw image";
+  srand((unsigned int)time(NULL));
+  /// TODO
+  /// currently only consider one sample each time
 
-    /// resize image using opencv resize
-    Tensor temp1;
+  /// resize image using opencv resize
+  Tensor temp1;
 #ifdef USE_OPENCV
-    temp1 = resize(input, resize_height_, resize_width_, image_dim_order_);
+  temp1 = resize(input, resize_height_, resize_width_, image_dim_order_);
 #else
-    temp1 = input;
+  temp1 = input;
 #endif
 
-    /// crop
-    Tensor temp2;
-    size_t height = 0, width = 0;
-    if (input.nDim() >= 3u) {
-      if (image_dim_order_ == "CHW")
-        height = temp1.shape(input.nDim() - 2), width = temp1.shape(input.nDim() - 1);
-      else if (image_dim_order_ == "HWC")
-        height = temp1.shape(input.nDim() - 3), width = temp1.shape(input.nDim() - 2);
-      else
-        LOG(FATAL) << "Unknow dimension order for images " << image_dim_order_
-               << " Only support 'HWC' and 'CHW'";
-    } else /// input is 2D gray image
-      height = temp1.shape(0), width = temp1.shape(1);
+  /// crop
+  Tensor temp2;
+  size_t height = 0, width = 0;
+  if (input.nDim() >= 3u) {
+    if (image_dim_order_ == "CHW")
+      height = temp1.shape(input.nDim() - 2), width = temp1.shape(input.nDim() - 1);
+    else if (image_dim_order_ == "HWC")
+      height = temp1.shape(input.nDim() - 3), width = temp1.shape(input.nDim() - 2);
+    else
+      LOG(FATAL) << "Unknow dimension order for images " << image_dim_order_
+                 << " Only support 'HWC' and 'CHW'";
+  } else /// input is 2D gray image
+    height = temp1.shape(0), width = temp1.shape(1);
 
-    if (crop_shape_.size() == 2) {
-      if (flag == kTrain) { 
-        /// random crop
-        if (crop_shape_[0] > height || crop_shape_[0] > width)
-          LOG(FATAL) << "Crop size larger than the size of raw image";
-        size_t crop_h_offset = rand() % ((height - crop_shape_[0]) / 2), 
-               crop_w_offset = rand() % ((width - crop_shape_[1]) / 2);
-        temp2 = crop(temp1, crop_shape_[0], crop_shape_[1], 
-                  crop_h_offset, crop_w_offset, image_dim_order_);
-      } else if (flag == kEval) {
-        /// central crop
-        size_t crop_h_offset = (height - crop_shape_[0]) / 2,
-               crop_w_offset = (width - crop_shape_[1]) / 2;
-        temp2 = crop(temp1, crop_shape_[0], crop_shape_[1], 
-                  crop_h_offset, crop_w_offset, image_dim_order_); 
-      }
+  if (crop_shape_.size() == 2) {
+    if (flag == kTrain) {
+      /// random crop
+      if (crop_shape_[0] > height || crop_shape_[0] > width)
+        LOG(FATAL) << "Crop size larger than the size of raw image";
+      size_t crop_h_offset = rand() % ((height - crop_shape_[0]) / 2),
+             crop_w_offset = rand() % ((width - crop_shape_[1]) / 2);
+      temp2 = crop(temp1, crop_shape_[0], crop_shape_[1],
+                   crop_h_offset, crop_w_offset, image_dim_order_);
+    } else if (flag == kEval) {
+      /// central crop
+      size_t crop_h_offset = (height - crop_shape_[0]) / 2,
+             crop_w_offset = (width - crop_shape_[1]) / 2;
+      temp2 = crop(temp1, crop_shape_[0], crop_shape_[1],
+                   crop_h_offset, crop_w_offset, image_dim_order_);
     }
-    else temp2 = temp1;
+  } else temp2 = temp1;
 
-    /// mirror
-    Tensor output;
-    if ((flag == kTrain) && (rand() % 2))
-        output = mirror(temp2, true, false, image_dim_order_);
-    else output = temp2;
-    return output;
-  }
+  /// mirror
+  Tensor output;
+  if ((flag == kTrain) && (rand() % 2))
+    output = mirror(temp2, true, false, image_dim_order_);
+  else output = temp2;
+  return output;
+}
 
 #ifdef USE_OPENCV
-  Tensor resize(Tensor& input, const size_t resize_height, 
-               const size_t resize_width, const string& image_dim_order) {
-    CHECK_LE(input.nDim(), 4u);
-    CHECK_GE(input.nDim(), 2u);
-    if (!resize_height || !resize_width) return input;
-    Tensor output;
-    cv::Mat mat;
-    const auto* in = input.data<float>();
-    if (input.nDim() == 4u) {
-      /// TODO
-      /// batch based resize
-      LOG(FATAL) << "Not implemented";
-    } else if (input.nDim() == 3u) {
-      if (image_dim_order == "CHW") {
-        size_t height = input.shape(1), width = input.shape(2),
-               channel = input.shape(0);
-        if (channel == 3u) {
-          mat = cv::Mat(height, width, CV_32FC3, cv::Scalar(0, 0, 0));
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              for (size_t k = 0; k < channel; k++)
-                mat.at<cv::Vec3f>(i, j)[k] = in[k * height * width + i * width + j];
-        }
-        else if (channel == 1u) {
-          mat = cv::Mat(height, width, CV_32FC1);
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-                mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
-        }
-        else LOG(FATAL) << "Invalid channel size: " << channel;
-      } else if (image_dim_order == "HWC") {
-        size_t height = input.shape(0), width = input.shape(1),
-               channel = input.shape(2);
-        if (channel == 3u) {
-          mat = cv::Mat(height, width, CV_32FC3, cv::Scalar(0, 0, 0));
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              for (size_t k = 0; k < channel; k++)
-                mat.at<cv::Vec3f>(i, j)[k] =
-                  in[i * width * channel + j * channel + k];
-        } else if (channel == 1u) { /// 2D gray image
-          mat = cv::Mat(height, width, CV_32FC1);
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
-        } else LOG(FATAL) << "Invalid channel size: " << channel;
-      } else {
-        LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
-                   << " Only support 'HWC' and 'CHW'";
-      }
-    } else { /// 2D gray image
-      size_t height = input.shape(0), width = input.shape(1);
-      mat = cv::Mat(height, width, CV_32FC1);
-      for (size_t i = 0; i < height; i++)
-        for (size_t j = 0; j < width; j++)
-          mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
+Tensor resize(Tensor& input, const size_t resize_height,
+              const size_t resize_width, const string& image_dim_order) {
+  CHECK_LE(input.nDim(), 4u);
+  CHECK_GE(input.nDim(), 2u);
+  if (!resize_height || !resize_width) return input;
+  Tensor output;
+  cv::Mat mat;
+  const auto* in = input.data<float>();
+  if (input.nDim() == 4u) {
+    /// TODO
+    /// batch based resize
+    LOG(FATAL) << "Not implemented";
+  } else if (input.nDim() == 3u) {
+    if (image_dim_order == "CHW") {
+      size_t height = input.shape(1), width = input.shape(2),
+             channel = input.shape(0);
+      if (channel == 3u) {
+        mat = cv::Mat(height, width, CV_32FC3, cv::Scalar(0, 0, 0));
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            for (size_t k = 0; k < channel; k++)
+              mat.at<cv::Vec3f>(i, j)[k] = in[k * height * width + i * width + j];
+      } else if (channel == 1u) {
+        mat = cv::Mat(height, width, CV_32FC1);
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
+      } else LOG(FATAL) << "Invalid channel size: " << channel;
+    } else if (image_dim_order == "HWC") {
+      size_t height = input.shape(0), width = input.shape(1),
+             channel = input.shape(2);
+      if (channel == 3u) {
+        mat = cv::Mat(height, width, CV_32FC3, cv::Scalar(0, 0, 0));
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            for (size_t k = 0; k < channel; k++)
+              mat.at<cv::Vec3f>(i, j)[k] =
+                in[i * width * channel + j * channel + k];
+      } else if (channel == 1u) { /// 2D gray image
+        mat = cv::Mat(height, width, CV_32FC1);
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
+      } else LOG(FATAL) << "Invalid channel size: " << channel;
+    } else {
+      LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
+                 << " Only support 'HWC' and 'CHW'";
     }
-    cv::Size size(resize_width, resize_height);
-    cv::Mat resized;
-    cv::resize(mat, resized, size);
-    CHECK_EQ(resized.size().height, resize_height);
-    CHECK_EQ(resized.size().width, resize_width);
-    size_t new_size = resize_height * resize_width * resized.channels();
-    float* out = new float[new_size];
-    if (input.nDim() == 4u) {
-      /// TODO
-      /// batch based resize
-      LOG(FATAL) << "Not implemented";
-    } else if (input.nDim() == 3u) {
-      if (image_dim_order == "CHW") {
-        size_t height = resize_height, width = resize_width,
-           channel = input.shape(0);
-        if (channel == 3u) {
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              for (size_t k = 0; k < channel; k++)
-                out[k * height * width + i * width + j] = resized.at<cv::Vec3f>(i, j)[k];
-        } else { /// 2D gray image
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
-        }
-        Tensor temp(Shape{channel, height, width});
-        temp.CopyDataFromHostPtr<float>(out, new_size);
-        output = temp;
-      } else {
-        size_t height = resize_height, width = resize_width,
-           channel = input.shape(2);
-        if (channel == 3u) {
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              for (size_t k = 0; k < channel; k++)
-                out[i * width * channel + j * channel + k] = resized.at<cv::Vec3f>(i, j)[k];
-        } else { /// 1 channel
-          for (size_t i = 0; i < height; i++)
-            for (size_t j = 0; j < width; j++)
-              out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
-        }
-        Tensor temp(Shape{height, width, channel}); 
-        temp.CopyDataFromHostPtr<float>(out, new_size);
-        output = temp;
+  } else { /// 2D gray image
+    size_t height = input.shape(0), width = input.shape(1);
+    mat = cv::Mat(height, width, CV_32FC1);
+    for (size_t i = 0; i < height; i++)
+      for (size_t j = 0; j < width; j++)
+        mat.at<cv::Vec<float, 1>>(i, j)[0] = in[i * width + j];
+  }
+  cv::Size size(resize_width, resize_height);
+  cv::Mat resized;
+  cv::resize(mat, resized, size);
+  CHECK_EQ(resized.size().height, resize_height);
+  CHECK_EQ(resized.size().width, resize_width);
+  size_t new_size = resize_height * resize_width * resized.channels();
+  float* out = new float[new_size];
+  if (input.nDim() == 4u) {
+    /// TODO
+    /// batch based resize
+    LOG(FATAL) << "Not implemented";
+  } else if (input.nDim() == 3u) {
+    if (image_dim_order == "CHW") {
+      size_t height = resize_height, width = resize_width,
+             channel = input.shape(0);
+      if (channel == 3u) {
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            for (size_t k = 0; k < channel; k++)
+              out[k * height * width + i * width + j] = resized.at<cv::Vec3f>(i, j)[k];
+      } else { /// 2D gray image
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
       }
-    } else { /// 2D gray image
-      size_t height = resize_height, width = resize_width;
-      for (size_t i = 0; i < height; i++)
-        for (size_t j = 0; j < width; j++)
-          out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
-      Tensor temp(Shape{height, width});
+      Tensor temp(Shape{channel, height, width});
+      temp.CopyDataFromHostPtr<float>(out, new_size);
+      output = temp;
+    } else {
+      size_t height = resize_height, width = resize_width,
+             channel = input.shape(2);
+      if (channel == 3u) {
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            for (size_t k = 0; k < channel; k++)
+              out[i * width * channel + j * channel + k] = resized.at<cv::Vec3f>(i, j)[k];
+      } else { /// 1 channel
+        for (size_t i = 0; i < height; i++)
+          for (size_t j = 0; j < width; j++)
+            out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
+      }
+      Tensor temp(Shape{height, width, channel});
       temp.CopyDataFromHostPtr<float>(out, new_size);
       output = temp;
     }
-    delete[] out;
-    return output;
+  } else { /// 2D gray image
+    size_t height = resize_height, width = resize_width;
+    for (size_t i = 0; i < height; i++)
+      for (size_t j = 0; j < width; j++)
+        out[i * width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
+    Tensor temp(Shape{height, width});
+    temp.CopyDataFromHostPtr<float>(out, new_size);
+    output = temp;
   }
+  delete[] out;
+  return output;
+}
 #endif
 
-  Tensor crop(Tensor& input, const size_t crop_height, const size_t crop_width, 
-             const size_t crop_h_offset, const size_t crop_w_offset, 
-             const string& image_dim_order) {
-    CHECK_LE(input.nDim(), 4u);
-    CHECK_GE(input.nDim(), 2u);
+Tensor crop(Tensor& input, const size_t crop_height, const size_t crop_width,
+            const size_t crop_h_offset, const size_t crop_w_offset,
+            const string& image_dim_order) {
+  CHECK_LE(input.nDim(), 4u);
+  CHECK_GE(input.nDim(), 2u);
 
-    Tensor output;
-    const float* in = input.data<float>();
-    size_t out_idx = 0, in_idx = 0;
-    if (input.nDim() == 4u) {
-      /// TODO
-      LOG(FATAL) << "Not implemented";
-    } else if (input.nDim() == 3u) {
-      if (image_dim_order == "CHW") {
-        size_t height = input.shape(1), width = input.shape(2),
-            channel = input.shape(0); 
-        CHECK_LE(crop_height + crop_h_offset, height);
-        CHECK_LE(crop_width + crop_w_offset, width);
-        float* out = new float[crop_height * crop_width * channel];
-        for (size_t c = 0; c < channel; c++) {
-          for (size_t h = 0; h < crop_height; h++) {
-            for (size_t w = 0; w < crop_width; w++) {
-              in_idx = (c * height + crop_h_offset + h) * width + crop_w_offset + w;
-              out_idx = (c * crop_height + h) * crop_width + w;
-              out[out_idx] = in[in_idx];
-            }
-          }
-        }
-        output.Reshape(Shape{channel, crop_height, crop_width});
-        output.CopyDataFromHostPtr<float>(out, crop_height * crop_width * channel);
-        delete[] out;
-      } else if (image_dim_order == "HWC") {
-        size_t height = input.shape(0), width = input.shape(1), 
-               channel = input.shape(2); 
-        CHECK_LE(crop_height + crop_h_offset, height);
-        CHECK_LE(crop_width + crop_w_offset, width);
-        float* out = new float[crop_height * crop_width * channel];
-        for (size_t c = 0; c < channel; c++) {
-          for (size_t h = 0; h < crop_height; h++) {
-            for (size_t w = 0; w < crop_width; w++) {
-              in_idx = ((crop_h_offset + h) * width + crop_w_offset + w) * channel + c;
-              out_idx = (h * crop_width + w) * channel + c;
-              out[out_idx] = in[in_idx];
-            }
-          }
-        }
-        output.Reshape(Shape{crop_height, crop_width, channel});
-        output.CopyDataFromHostPtr<float>(out, crop_height * crop_width * channel);
-        delete[] out;
-      } else {
-        LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
-                   << " Only support 'HWC' and 'CHW'";
-      }
-    } else { /// 2D gray image
-      size_t height = input.shape(0), width = input.shape(1); 
+  Tensor output;
+  const float* in = input.data<float>();
+  size_t out_idx = 0, in_idx = 0;
+  if (input.nDim() == 4u) {
+    /// TODO
+    LOG(FATAL) << "Not implemented";
+  } else if (input.nDim() == 3u) {
+    if (image_dim_order == "CHW") {
+      size_t height = input.shape(1), width = input.shape(2),
+             channel = input.shape(0);
       CHECK_LE(crop_height + crop_h_offset, height);
       CHECK_LE(crop_width + crop_w_offset, width);
-      float* out = new float[crop_height * crop_width];
-      for (size_t h = 0; h < crop_height; h++) {
-        for (size_t w = 0; w < crop_width; w++) {
-          in_idx = (crop_h_offset + h) * width + crop_w_offset + w;
-          out_idx = h * crop_width + w;
-          out[out_idx] = in[in_idx];
-        }
-      }
-      output.Reshape(Shape{crop_height, crop_width});
-      output.CopyDataFromHostPtr<float>(out, crop_height * crop_width);
-      delete[] out;
-    }
-    return output;
-  }
-
-  Tensor mirror(Tensor& input, const bool horizontal_mirror,
-             const bool vertical_mirror, const string& image_dim_order) {
-    CHECK_LE(input.nDim(), 4u);
-    CHECK_GE(input.nDim(), 2u);
-    if (!horizontal_mirror && !vertical_mirror) return input;
-
-    Tensor output;
-    const float* in = input.data<float>();
-    size_t out_idx = 0, in_idx = 0;
-    if (input.nDim() == 4u) {
-      /// TODO
-      LOG(FATAL) << "Not implemented";
-    } else if (input.nDim() == 3u) {
-      if (image_dim_order == "CHW") {
-        size_t height = input.shape(1), width = input.shape(2),
-            channel = input.shape(0);
-        float* out = new float[height * width * channel];
-        for (size_t c = 0; c < channel; c++) {
-          for (size_t h = 0; h < height; h++) {
-            for (size_t w = 0; w < width; w++) {
-              in_idx = (c * height + h) * width + w;
-              if (horizontal_mirror && vertical_mirror)
-                out_idx = (c * height + (height - 1 - h)) * width + (width - 1 - w);
-              else if (horizontal_mirror)
-                out_idx = (c * height + h) * width + (width - 1 - w);
-              else /// only do vertical mirror
-                out_idx = (c * height + (height - 1 - h)) * width + w;
-              out[out_idx] = in[in_idx];
-            }
+      float* out = new float[crop_height * crop_width * channel];
+      for (size_t c = 0; c < channel; c++) {
+        for (size_t h = 0; h < crop_height; h++) {
+          for (size_t w = 0; w < crop_width; w++) {
+            in_idx = (c * height + crop_h_offset + h) * width + crop_w_offset + w;
+            out_idx = (c * crop_height + h) * crop_width + w;
+            out[out_idx] = in[in_idx];
           }
         }
-        output.Reshape(Shape{channel, height, width});
-        output.CopyDataFromHostPtr<float>(out, height * width * channel);
-        delete[] out;
-      } else if (image_dim_order == "HWC") {
-        size_t height = input.shape(0), width = input.shape(1),
-            channel = input.shape(2);
-        float* out = new float[height * width * channel];
-        for (size_t c = 0; c < channel; c++) {
-          for (size_t h = 0; h < height; h++) {
-            for (size_t w = 0; w < width; w++) {
-              in_idx = (h * width + w) * channel + c;
-              if (horizontal_mirror && vertical_mirror)
-                out_idx = ((height - 1 - h) * width + (width - 1 - w)) * channel + c;
-              else if (horizontal_mirror)
-                out_idx = (h * width + (width - 1 - w)) * channel + c;
-              else /// only do vertical mirror
-                out_idx = ((height - 1 - h) * width + w) * channel + c;
-              out[out_idx] = in[in_idx];
-            }
+      }
+      output.Resize(Shape{channel, crop_height, crop_width});
+      output.CopyDataFromHostPtr<float>(out, crop_height * crop_width * channel);
+      delete[] out;
+    } else if (image_dim_order == "HWC") {
+      size_t height = input.shape(0), width = input.shape(1),
+             channel = input.shape(2);
+      CHECK_LE(crop_height + crop_h_offset, height);
+      CHECK_LE(crop_width + crop_w_offset, width);
+      float* out = new float[crop_height * crop_width * channel];
+      for (size_t c = 0; c < channel; c++) {
+        for (size_t h = 0; h < crop_height; h++) {
+          for (size_t w = 0; w < crop_width; w++) {
+            in_idx = ((crop_h_offset + h) * width + crop_w_offset + w) * channel + c;
+            out_idx = (h * crop_width + w) * channel + c;
+            out[out_idx] = in[in_idx];
           }
         }
-        output.Reshape(Shape{height, width, channel});
-        output.CopyDataFromHostPtr<float>(out, height * width * channel);
-        delete[] out;
-      } else {
-        LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
-                   << " Only support 'HWC' and 'CHW'";
       }
-    } else { /// 2D gray image
-      size_t height = input.shape(0), width = input.shape(1);
-      float* out = new float[height * width];
-      for (size_t h = 0; h < height; h++) {
-        for (size_t w = 0; w < width; w++) {
-          in_idx = h * width + w;
-          if (horizontal_mirror && vertical_mirror)
-            out_idx = (height - 1 - h) * width + (width - 1 - w);
-          else if (horizontal_mirror)
-            out_idx = h * width + (width - 1 - w);
-          else /// only do vertical mirror
-            out_idx = (height - 1 - h) * width + w;
-          out[out_idx] = in[in_idx];
+      output.Resize(Shape{crop_height, crop_width, channel});
+      output.CopyDataFromHostPtr<float>(out, crop_height * crop_width * channel);
+      delete[] out;
+    } else {
+      LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
+                 << " Only support 'HWC' and 'CHW'";
+    }
+  } else { /// 2D gray image
+    size_t height = input.shape(0), width = input.shape(1);
+    CHECK_LE(crop_height + crop_h_offset, height);
+    CHECK_LE(crop_width + crop_w_offset, width);
+    float* out = new float[crop_height * crop_width];
+    for (size_t h = 0; h < crop_height; h++) {
+      for (size_t w = 0; w < crop_width; w++) {
+        in_idx = (crop_h_offset + h) * width + crop_w_offset + w;
+        out_idx = h * crop_width + w;
+        out[out_idx] = in[in_idx];
+      }
+    }
+    output.Resize(Shape{crop_height, crop_width});
+    output.CopyDataFromHostPtr<float>(out, crop_height * crop_width);
+    delete[] out;
+  }
+  return output;
+}
+
+Tensor mirror(Tensor& input, const bool horizontal_mirror,
+              const bool vertical_mirror, const string& image_dim_order) {
+  CHECK_LE(input.nDim(), 4u);
+  CHECK_GE(input.nDim(), 2u);
+  if (!horizontal_mirror && !vertical_mirror) return input;
+
+  Tensor output;
+  const float* in = input.data<float>();
+  size_t out_idx = 0, in_idx = 0;
+  if (input.nDim() == 4u) {
+    /// TODO
+    LOG(FATAL) << "Not implemented";
+  } else if (input.nDim() == 3u) {
+    if (image_dim_order == "CHW") {
+      size_t height = input.shape(1), width = input.shape(2),
+             channel = input.shape(0);
+      float* out = new float[height * width * channel];
+      for (size_t c = 0; c < channel; c++) {
+        for (size_t h = 0; h < height; h++) {
+          for (size_t w = 0; w < width; w++) {
+            in_idx = (c * height + h) * width + w;
+            if (horizontal_mirror && vertical_mirror)
+              out_idx = (c * height + (height - 1 - h)) * width + (width - 1 - w);
+            else if (horizontal_mirror)
+              out_idx = (c * height + h) * width + (width - 1 - w);
+            else /// only do vertical mirror
+              out_idx = (c * height + (height - 1 - h)) * width + w;
+            out[out_idx] = in[in_idx];
+          }
         }
       }
-      output.Reshape(Shape{height, width});
-      output.CopyDataFromHostPtr<float>(out, height * width);
+      output.Resize(Shape{channel, height, width});
+      output.CopyDataFromHostPtr<float>(out, height * width * channel);
       delete[] out;
+    } else if (image_dim_order == "HWC") {
+      size_t height = input.shape(0), width = input.shape(1),
+             channel = input.shape(2);
+      float* out = new float[height * width * channel];
+      for (size_t c = 0; c < channel; c++) {
+        for (size_t h = 0; h < height; h++) {
+          for (size_t w = 0; w < width; w++) {
+            in_idx = (h * width + w) * channel + c;
+            if (horizontal_mirror && vertical_mirror)
+              out_idx = ((height - 1 - h) * width + (width - 1 - w)) * channel + c;
+            else if (horizontal_mirror)
+              out_idx = (h * width + (width - 1 - w)) * channel + c;
+            else /// only do vertical mirror
+              out_idx = ((height - 1 - h) * width + w) * channel + c;
+            out[out_idx] = in[in_idx];
+          }
+        }
+      }
+      output.Resize(Shape{height, width, channel});
+      output.CopyDataFromHostPtr<float>(out, height * width * channel);
+      delete[] out;
+    } else {
+      LOG(FATAL) << "Unknow dimension order for images " << image_dim_order
+                 << " Only support 'HWC' and 'CHW'";
     }
-    return output;
+  } else { /// 2D gray image
+    size_t height = input.shape(0), width = input.shape(1);
+    float* out = new float[height * width];
+    for (size_t h = 0; h < height; h++) {
+      for (size_t w = 0; w < width; w++) {
+        in_idx = h * width + w;
+        if (horizontal_mirror && vertical_mirror)
+          out_idx = (height - 1 - h) * width + (width - 1 - w);
+        else if (horizontal_mirror)
+          out_idx = h * width + (width - 1 - w);
+        else /// only do vertical mirror
+          out_idx = (height - 1 - h) * width + w;
+        out[out_idx] = in[in_idx];
+      }
+    }
+    output.Resize(Shape{height, width});
+    output.CopyDataFromHostPtr<float>(out, height * width);
+    delete[] out;
   }
+  return output;
+}
 } // namespace singa
diff --git a/src/io/snapshot.cc b/src/io/snapshot.cc
old mode 100644
new mode 100755
index d24513e..86c5ccd
--- a/src/io/snapshot.cc
+++ b/src/io/snapshot.cc
@@ -42,34 +42,41 @@
     text_writer_ptr_->Open(prefix + ".desc", io::kCreate);
 
     // write the current version ids
-    text_writer_ptr_->Write("SINGA_VERSION", std::to_string(SINGA_VERSION));
+    //text_writer_ptr_->Write("SINGA_VERSION", std::to_string(SINGA_VERSION));
+    text_writer_ptr_->Write("", "SINGA VERSION: " + std::to_string(SINGA_VERSION));
   } else if (mode == kRead) {
+
+    /*
     auto text_reader_ptr = new io::TextFileReader();
     text_reader_ptr->Open(prefix + ".desc");
     std::string key, val;
     while (text_reader_ptr->Read(&key, &val)) {
-      if (key == "SINGA_VERSION")
+      if (key == "0")
         version_ = std::stoi(val);
     }
     delete text_reader_ptr;
-
+    */
+    std::string key, val;
     if (!bin_reader_ptr_->Open(prefix + ".bin", max_param_size << 20))
       CHECK(bin_reader_ptr_->Open(prefix + ".model", max_param_size << 20))
         << "Cannot open the checkpoint bin file:" << prefix + ".bin (>=1.0.1) "
         <<" or " << prefix + " .model (used by 1.0.0)";
     singa::TensorProto tp;
     while (bin_reader_ptr_->Read(&key, &val)) {
+      /*
       if (key == "SINGA_VERSION") {
         CHECK(version_ == std::stoi(val)) << key << " in .bin and .desc mismatch: "
           << val << " (bin) vs " << version_ << " (desc)";
         continue;
       }
+      */
 
       CHECK(param_names_.count(key) == 0);
       param_names_.insert(key);
       CHECK(tp.ParseFromString(val));
       param_map_[key].FromProto(tp);
     }
+    //need ro set version_ by getting data form param_map_["SINGA_VERSION"]?
   } else {
     LOG(FATAL)
         << "Mode for snapshot should be Snapshot::kWrite or Snapshot::kRead";
diff --git a/src/model/layer/activation.cc b/src/model/layer/activation.cc
index eb90d87..8e09f59 100644
--- a/src/model/layer/activation.cc
+++ b/src/model/layer/activation.cc
@@ -62,7 +62,7 @@
     output = ReLU(input);
     if (flag & kTrain) buf_.push(input);
   } else
-    LOG(FATAL) << "Unkown activation: " << mode_;
+    LOG(FATAL) << "Unknown activation: " << mode_;
   return output;
 }
 
diff --git a/src/model/layer/activation.h b/src/model/layer/activation.h
index 7d15979..a61e3d2 100644
--- a/src/model/layer/activation.h
+++ b/src/model/layer/activation.h
@@ -44,7 +44,7 @@
 
   const std::string Mode() const { return mode_; }
 
-  const float Negative_slope() const { return neg_slope_; }
+  float Negative_slope() const { return neg_slope_; }
 
  protected:
   std::string mode_;
diff --git a/src/model/layer/batchnorm.cc b/src/model/layer/batchnorm.cc
index 4e74a82..a4b9b24 100644
--- a/src/model/layer/batchnorm.cc
+++ b/src/model/layer/batchnorm.cc
@@ -44,7 +44,7 @@
   else
     is_2d_ = false;
 
-  bnScale_.Reshape(Shape{channels_});
+  bnScale_.Resize(Shape{channels_});
   bnBias_.ResetLike(bnScale_);
   runningMean_.ResetLike(bnScale_);
   runningVariance_.ResetLike(bnScale_);
@@ -68,19 +68,18 @@
 const Tensor BatchNorm::Forward(int flag, const Tensor& input) {
   Tensor x = input.Clone();
   x.Reshape(Shape{input.shape(0), input.Size() / input.shape(0)});
-  Tensor output, mean, var, xnorm;
+  Tensor output;
   output.ResetLike(x);
   // TODO(wangwei) input sample shape check
-
   if ((flag & kTrain) == kTrain) {  // forward for train
     if (is_2d_) {                   // batchnorm_per_activation mode
-      mean = Average(x, 0);
+      auto mean = Average(x, 0);
       runningMean_ *= 1.0f - factor_;
       Axpy(factor_, mean, &runningMean_);
-      xnorm = x.Clone();
+      auto xnorm = x.Clone();
       SubRow(mean, &xnorm);
       xnorm = Square(xnorm);
-      var = Average(xnorm, 0);
+      auto var = Average(xnorm, 0);
       runningVariance_ *= 1.0f - factor_;
       Axpy(factor_, var, &runningVariance_);
       Tensor tmp = var.Clone();
@@ -102,7 +101,7 @@
     }
   } else {         // forward for test
     if (is_2d_) {  // batchnorm_per_activation mode
-      xnorm = x.Clone();
+      auto xnorm = x.Clone();
       SubRow(runningMean_, &xnorm);
       Tensor tmp = runningVariance_.Clone();
       tmp = Sqrt(tmp);
@@ -134,7 +133,7 @@
       scale.Reshape(Shape{channels_ * height_ * width_});
       bias.Reshape(Shape{channels_ * height_ * width_});
 
-      xnorm = x.Clone();
+      auto xnorm = x.Clone();
       SubRow(mean, &xnorm);
       var = Sqrt(var);
       var += 1e-6f;
diff --git a/src/model/layer/batchnorm.h b/src/model/layer/batchnorm.h
index c2cfde9..8b2e295 100644
--- a/src/model/layer/batchnorm.h
+++ b/src/model/layer/batchnorm.h
@@ -48,27 +48,27 @@
     return std::vector<Tensor> { bnScale_, bnBias_, runningMean_,
                                  runningVariance_ };
   }
-  const float factor() const { return factor_; }
+  float factor() const { return factor_; }
   const Tensor& bnScale() const { return bnScale_; }
   const Tensor& bnBias() const { return bnBias_; }
   const Tensor& runningMean() const { return runningMean_; }
   const Tensor& runningVariance() const { return runningVariance_; }
-  const size_t channels() const { return channels_; }
-  const size_t height() const { return height_; }
-  const size_t width() const { return width_; }
-  void set_bnScale(Tensor x) {
+  size_t channels() const { return channels_; }
+  size_t height() const { return height_; }
+  size_t width() const { return width_; }
+  void set_bnScale(const Tensor& x) {
     bnScale_.ResetLike(x);
     bnScale_.CopyData(x);
   }
-  void set_bnBias(Tensor x) {
+  void set_bnBias(const Tensor& x) {
     bnBias_.ResetLike(x);
     bnBias_.CopyData(x);
   }
-  void set_runningMean(Tensor x) {
+  void set_runningMean(const Tensor& x) {
     runningMean_.ResetLike(x);
     runningMean_.CopyData(x);
   }
-  void set_runningVariance(Tensor x) {
+  void set_runningVariance(const Tensor& x) {
     runningVariance_.ResetLike(x);
     runningVariance_.CopyData(x);
   }
diff --git a/src/model/layer/convolution.cc b/src/model/layer/convolution.cc
old mode 100644
new mode 100755
index 3fc7afb..c91fd6a
--- a/src/model/layer/convolution.cc
+++ b/src/model/layer/convolution.cc
@@ -96,9 +96,9 @@
   col_width_ = conv_height_ * conv_width_;
 
   // Setup shape of weight_ and bias_
-  weight_.Reshape(Shape{num_filters_, col_height_});
+  weight_.Resize(Shape{num_filters_, col_height_});
   if (bias_term_)
-    bias_.Reshape(Shape{num_filters_});
+    bias_.Resize(Shape{num_filters_});
   // Assume the order of param is: weight, bias
   for (const auto &spec : conf.param()) param_specs_.push_back(spec);
 }
@@ -174,8 +174,8 @@
     col_data.CopyDataFromHostPtr(data_col, col_height_ * col_width_);
     Tensor grad_b(Shape{num_filters_, conv_height_ * conv_width_});
     CopyDataToFrom(&grad_b, grad, grad_b.Size(), 0, b * grad_b.Size());
-    dw += Mult(grad_b, col_data.T());
-    Tensor dcol_b = Mult(weight_.T(), grad_b);
+    dw += Mult(grad_b, Transpose(col_data));
+    Tensor dcol_b = Mult(Transpose(weight_), grad_b);
     auto dcol_data = dcol_b.data<float>();
     Col2im(dcol_data, channels_, height_, width_, kernel_h_, kernel_w_, pad_h_,
            pad_w_, stride_h_, stride_w_, dx_b);
@@ -194,7 +194,7 @@
   bias_.ToDevice(device);
 }
 
-void Convolution::Im2col(const float *data_im, const int channels,
+void Im2col(const float *data_im, const int channels,
                          const int height, const int width,
                          const int kernel_h, const int kernel_w,
                          const int pad_h, const int pad_w,
@@ -221,13 +221,13 @@
   }
 }
 
-void Convolution::Col2im(const float *data_col, const int channels,
+void Col2im(const float *data_col, const int channels,
                          const int height, const int width,
                          const int kernel_h, const int kernel_w,
                          const int pad_h, const int pad_w,
                          const int stride_h, const int stride_w,
                          float *data_im) {
-  memset(data_im, 0, height * width * channels * sizeof(float));
+  memset(data_im, 0, (unsigned long) height * width * channels * sizeof(float));
   int height_col = (height + 2 * pad_h - kernel_h) / stride_h + 1;
   int width_col  = ( width + 2 * pad_w - kernel_w) / stride_w + 1;
   int channels_col = channels * kernel_h * kernel_w;
diff --git a/src/model/layer/convolution.h b/src/model/layer/convolution.h
old mode 100644
new mode 100755
index 89b5319..cb1b1d3
--- a/src/model/layer/convolution.h
+++ b/src/model/layer/convolution.h
@@ -46,16 +46,6 @@
 
   void ToDevice(std::shared_ptr<Device> device) override;
 
-  void Im2col(const float* data_im, const int channels, const int height,
-              const int width, const int kernel_h, const int kernel_w,
-              const int pad_h, const int pad_w, const int stride_h,
-              const int stride_w, float* data_col);
-
-  void Col2im(const float* data_col, const int channels, const int height,
-              const int width, const int kernel_h, const int kernel_w,
-              const int pad_h, const int pad_w, const int stride_h,
-              const int stride_w, float* data_im);
-
   const std::vector<Tensor> param_values() override {
     if (bias_term_)
       return std::vector<Tensor>{weight_, bias_};
@@ -77,11 +67,11 @@
   const Tensor& weight() const { return weight_; }
   const Tensor& bias() const { return bias_; }
 
-  void set_weight(Tensor w) {
+  void set_weight(const Tensor& w) {
     weight_.ResetLike(w);
     weight_.CopyData(w);
   }
-  void set_bias(Tensor b) {
+  void set_bias(const Tensor& b) {
     bias_.ResetLike(b);
     bias_.CopyData(b);
   }
@@ -97,5 +87,16 @@
   bool bias_term_;
   vector<size_t> out_sample_shape_;
 };
+
+void Im2col(const float* data_im, const int channels, const int height,
+            const int width, const int kernel_h, const int kernel_w,
+            const int pad_h, const int pad_w, const int stride_h,
+            const int stride_w, float* data_col);
+
+void Col2im(const float* data_col, const int channels, const int height,
+            const int width, const int kernel_h, const int kernel_w,
+            const int pad_h, const int pad_w, const int stride_h,
+            const int stride_w, float* data_im);
+            
 }  // namespace singa
 #endif  // SRC_MODEL_LAYER_CONVOLUTION_H_
diff --git a/src/model/layer/cudnn_activation.cc b/src/model/layer/cudnn_activation.cc
index ff520b8..806e714 100644
--- a/src/model/layer/cudnn_activation.cc
+++ b/src/model/layer/cudnn_activation.cc
@@ -78,16 +78,10 @@
   output.device()->Exec([input, output, this](Context* ctx) {
     Block* inblock = input.block(), * outblock = output.block();
     float alpha = 1.0f, beta = 0.0f;
-#if CUDNN_MAJOR == 5
     CUDNN_CHECK(cudnnActivationForward(
         ctx->cudnn_handle, this->acti_desc_, &alpha, this->desc_,
         inblock->data(), &beta, this->desc_, outblock->mutable_data()));
-#elif CUDNN_MAJOR == 4
-    CUDNN_CHECK(cudnnActivationForward_v4(
-        ctx->cudnn_handle, this->acti_desc_, &alpha, this->desc_,
-        inblock->data(), &beta, this->desc_, outblock->mutable_data()));
-#endif
-  }, {input.block()}, {output.block()});
+  }, {input.block()}, {output.block()}, "cudnnActivationForward");
   if (flag & kTrain) {
     if (cudnn_mode_ == CUDNN_ACTIVATION_SIGMOID ||
         cudnn_mode_ == CUDNN_ACTIVATION_TANH) {
@@ -113,18 +107,11 @@
     Block* dyblock = grad.block(), * dxblock = dx.block(),
            * yblock = inout.block(), * xblock = inout.block();
     float alpha = 1.0f, beta = 0.0f;
-#if CUDNN_MAJOR == 5
     CUDNN_CHECK(cudnnActivationBackward(
         ctx->cudnn_handle, this->acti_desc_, &alpha, this->desc_,
         yblock->data(), this->desc_, dyblock->data(), this->desc_,
         xblock->data(), &beta, this->desc_, dxblock->mutable_data()));
-#elif CUDNN_MAJOR == 4
-    CUDNN_CHECK(cudnnActivationBackward_v4(
-        ctx->cudnn_handle, this->acti_desc_, &alpha, this->desc_, yblock->data(),
-        this->desc_, dyblock->data(), this->desc_, xblock->data(), &beta,
-        this->desc_, dxblock->mutable_data()));
-#endif
-  }, {grad.block(), inout.block()}, {dx.block()});
+  }, {grad.block(), inout.block()}, {dx.block()}, "cudnnActivationBackward");
   return std::make_pair(dx, param_grad);
 }
 }  // namespace singa
diff --git a/src/model/layer/cudnn_batchnorm.cc b/src/model/layer/cudnn_batchnorm.cc
old mode 100644
new mode 100755
index 5c93a6b..c5baefb
--- a/src/model/layer/cudnn_batchnorm.cc
+++ b/src/model/layer/cudnn_batchnorm.cc
@@ -39,8 +39,8 @@
 
 void CudnnBatchNorm::Setup(const Shape& in_sample, const LayerConf& conf) {
   BatchNorm::Setup(in_sample, conf);
-  resultSaveMean_.Reshape(Shape{channels_});
-  resultSaveVariance_.Reshape(Shape{channels_});
+  resultSaveMean_.Resize(Shape{channels_});
+  resultSaveVariance_.Resize(Shape{channels_});
 }
 
 void CudnnBatchNorm::InitCudnn(const Shape& shape, DataType dtype) {
@@ -167,7 +167,7 @@
               saveVarBlock->data()));
 
         },
-        {dx.block(), grad.block(), bnScale_.block(), resultSaveMean_.block(),
+        {x.block(), grad.block(), bnScale_.block(), resultSaveMean_.block(),
          resultSaveVariance_.block()},
         {dx.block(), dbnScale_.block(), dbnBias_.block()});
   } else {
diff --git a/src/model/layer/cudnn_convolution.cc b/src/model/layer/cudnn_convolution.cc
index 7d5e554..44e1fef 100644
--- a/src/model/layer/cudnn_convolution.cc
+++ b/src/model/layer/cudnn_convolution.cc
@@ -44,7 +44,7 @@
   CHECK(prefer_ == "fastest" || prefer_ == "limited_workspace" ||
         prefer_ == "no_workspace" || prefer_ == "autotune")
       << "CudnnConvolution only supports four algorithm preferences: fastest, "
-         "limited_workspace, no_workspace and autotune";
+      "limited_workspace, no_workspace and autotune";
 }
 
 void CudnnConvolution::ToDevice(std::shared_ptr<Device> device) {
@@ -70,27 +70,22 @@
                                          GetCudnnDataType(dtype), batchsize,
                                          channels_, height_, width_));
   CUDNN_CHECK(cudnnSetTensor4dDescriptor(
-      y_desc_, CUDNN_TENSOR_NCHW, GetCudnnDataType(dtype), batchsize,
-      num_filters_, conv_height_, conv_width_));
+                y_desc_, CUDNN_TENSOR_NCHW, GetCudnnDataType(dtype), batchsize,
+                num_filters_, conv_height_, conv_width_));
   if (bias_term_)
     CUDNN_CHECK(cudnnSetTensor4dDescriptor(bias_desc_, CUDNN_TENSOR_NCHW,
                                            GetCudnnDataType(dtype), 1,
                                            num_filters_, 1, 1));
   CUDNN_CHECK(cudnnSetConvolution2dDescriptor(conv_desc_, pad_h_, pad_w_,
-                                              stride_h_, stride_w_, 1, 1,
-                                              CUDNN_CROSS_CORRELATION));
-#if CUDNN_MAJOR == 5
+              stride_h_, stride_w_, 1, 1,  // dilation x and y
+              CUDNN_CROSS_CORRELATION
+#if CUDNN_MAJOR >= 7
+              , GetCudnnDataType(dtype)
+#endif  // CUDNN_MAJOR
+                                             ));
   CUDNN_CHECK(cudnnSetFilter4dDescriptor(filter_desc_, GetCudnnDataType(dtype),
                                          CUDNN_TENSOR_NCHW, num_filters_,
                                          channels_, kernel_h_, kernel_w_));
-#elif CUDNN_MAJOR == 4
-  CUDNN_CHECK(cudnnSetFilter4dDescriptor_v4(
-      filter_desc_, GetCudnnDataType(dtype), CUDNN_TENSOR_NCHW, num_filters_,
-      channels_, kernel_h_, kernel_w_));
-#else
-  LOG(FATAL) << "Not supported CUDNN version = " << CUDNN_MAJOR;
-#endif
-
   if (prefer_ == "fastest" || prefer_ == "limited_workspace" ||
       prefer_ == "no_workspace") {
     cudnnConvolutionFwdPreference_t fwd_pref;
@@ -110,14 +105,15 @@
       bwd_data_pref = CUDNN_CONVOLUTION_BWD_DATA_SPECIFY_WORKSPACE_LIMIT;
     }
     CUDNN_CHECK(cudnnGetConvolutionForwardAlgorithm(
-        ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, fwd_pref,
-        workspace_byte_limit_, &fp_alg_));
+                  ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, fwd_pref,
+                  workspace_byte_limit_, &fp_alg_));
     CUDNN_CHECK(cudnnGetConvolutionBackwardFilterAlgorithm(
-        ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_,
-        bwd_filt_pref, workspace_byte_limit_, &bp_filter_alg_));
+                  ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_,
+                  bwd_filt_pref, workspace_byte_limit_, &bp_filter_alg_));
+    // deprecated in cudnn v7
     CUDNN_CHECK(cudnnGetConvolutionBackwardDataAlgorithm(
-        ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_,
-        bwd_data_pref, workspace_byte_limit_, &bp_data_alg_));
+                  ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_,
+                  bwd_data_pref, workspace_byte_limit_, &bp_data_alg_));
   } else if (prefer_ == "autotune") {
     const int topk = 1;
     int num_fp_alg, num_bp_filt_alg, num_bp_data_alg;
@@ -125,16 +121,16 @@
     cudnnConvolutionBwdFilterAlgoPerf_t bp_filt_perf[topk];
     cudnnConvolutionBwdDataAlgoPerf_t bp_data_perf[topk];
     CUDNN_CHECK(cudnnFindConvolutionForwardAlgorithm(
-        ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, topk,
-        &num_fp_alg, fp_alg_perf));
+                  ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, topk,
+                  &num_fp_alg, fp_alg_perf));
     fp_alg_ = fp_alg_perf[0].algo;
     CUDNN_CHECK(cudnnFindConvolutionBackwardFilterAlgorithm(
-        ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_, topk,
-        &num_bp_filt_alg, bp_filt_perf));
+                  ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_, topk,
+                  &num_bp_filt_alg, bp_filt_perf));
     bp_filter_alg_ = bp_filt_perf[0].algo;
     CUDNN_CHECK(cudnnFindConvolutionBackwardDataAlgorithm(
-        ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_, topk,
-        &num_bp_data_alg, bp_data_perf));
+                  ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_, topk,
+                  &num_bp_data_alg, bp_data_perf));
     bp_data_alg_ = bp_data_perf[0].algo;
   } else {
     LOG(FATAL) << "Preferred algorithm is not available!";
@@ -142,22 +138,22 @@
 
   size_t fp_byte, bp_data_byte, bp_filter_byte;
   CUDNN_CHECK(cudnnGetConvolutionForwardWorkspaceSize(
-      ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, fp_alg_,
-      &fp_byte));
+                ctx->cudnn_handle, x_desc_, filter_desc_, conv_desc_, y_desc_, fp_alg_,
+                &fp_byte));
   CUDNN_CHECK(cudnnGetConvolutionBackwardDataWorkspaceSize(
-      ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_,
-      bp_data_alg_, &bp_data_byte));
+                ctx->cudnn_handle, filter_desc_, y_desc_, conv_desc_, x_desc_,
+                bp_data_alg_, &bp_data_byte));
   CUDNN_CHECK(cudnnGetConvolutionBackwardFilterWorkspaceSize(
-      ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_,
-      bp_filter_alg_, &bp_filter_byte));
+                ctx->cudnn_handle, x_desc_, y_desc_, conv_desc_, filter_desc_,
+                bp_filter_alg_, &bp_filter_byte));
   workspace_count_ = std::max(std::max(fp_byte, bp_data_byte), bp_filter_byte) /
-                         sizeof(float) +
+                     sizeof(float) +
                      1;
   if (workspace_count_ * sizeof(float) > workspace_byte_limit_)
     LOG(WARNING) << "The required memory for workspace ("
-      << workspace_count_ * sizeof(float)
-      << ") is larger than the expected Bytes ("
-      << workspace_byte_limit_ << ")";
+                 << workspace_count_ * sizeof(float)
+                 << ") is larger than the expected Bytes ("
+                 << workspace_byte_limit_ << ")";
   workspace_ = Tensor(Shape{workspace_count_}, dev, dtype);
   has_init_cudnn_ = true;
 }
@@ -177,23 +173,23 @@
     int n, c, h, w, s;
     cudnnDataType_t type;
     CUDNN_CHECK(cudnnGetTensor4dDescriptor(x_desc_, &type, &n, &c, &h, &w,
-          &s, &s, &s, &s));
+                                           &s, &s, &s, &s));
     if (batchsize != static_cast<size_t>(n))
       InitCudnn(input);
     CHECK(input.shape(1) == static_cast<size_t>(c)
-        && input.shape(2) == static_cast<size_t>(h)
-        && input.shape(3) == static_cast<size_t>(w))
-      << "input sample shape should not change"
-      << "previous shape " << c << ", " << h << ", " << w
-      << "current shape " << input.shape(1) << ", " << input.shape(2) << ", "
-      << input.shape(3);
+          && input.shape(2) == static_cast<size_t>(h)
+          && input.shape(3) == static_cast<size_t>(w))
+        << "input sample shape should not change"
+        << "previous shape " << c << ", " << h << ", " << w
+        << "current shape " << input.shape(1) << ", " << input.shape(2) << ", "
+        << input.shape(3);
   }
 
   Shape shape{batchsize, num_filters_, conv_height_, conv_width_};
   Tensor output(shape, dev, dtype);
-  output.device()->Exec([input, output, this](Context *ctx) {
+  output.device()->Exec([input, output, this](Context * ctx) {
     Block *inblock = input.block(), *outblock = output.block(),
-          *wblock = this->weight_.block();
+           *wblock = this->weight_.block();
     float alpha = 1.f, beta = 0.f;
     cudnnConvolutionForward(ctx->cudnn_handle, &alpha, this->x_desc_,
                             inblock->data(), this->filter_desc_, wblock->data(),
@@ -201,10 +197,10 @@
                             this->workspace_.block()->mutable_data(),
                             this->workspace_count_ * sizeof(float), &beta,
                             this->y_desc_, outblock->mutable_data());
-  }, {input.block(), weight_.block()}, {output.block()}, workspace_.block());
+  }, {input.block(), weight_.block()}, {output.block(), workspace_.block()});
 
   if (bias_term_) {
-    output.device()->Exec([output, this](Context *ctx) {
+    output.device()->Exec([output, this](Context * ctx) {
       float beta = 1.f, alpha = 1.0f;
       Block *outblock = output.block(), *bblock = this->bias_.block();
       cudnnAddTensor(ctx->cudnn_handle, &alpha, this->bias_desc_,
@@ -216,7 +212,7 @@
 }
 
 const std::pair<Tensor, vector<Tensor>> CudnnConvolution::Backward(
-    int flag, const Tensor &grad) {
+int flag, const Tensor &grad) {
   CHECK(has_init_cudnn_);
   CHECK_EQ(grad.device()->lang(), kCuda);
   CHECK_EQ(grad.nDim(), 4u);
@@ -232,7 +228,7 @@
   // LOG(ERROR) << "backward bias";
   if (bias_term_) {
     db.ResetLike(bias_);
-    dx.device()->Exec([grad, db, this](Context *ctx) {
+    dx.device()->Exec([grad, db, this](Context * ctx) {
       Block *dyblock = grad.block(), *dbblock = db.block();
       float alpha = 1.f, beta = 0.f;
       cudnnConvolutionBackwardBias(ctx->cudnn_handle, &alpha, this->y_desc_,
@@ -241,22 +237,22 @@
     }, {grad.block()}, {db.block()});
   }
   // LOG(ERROR) << "backward w";
-  dx.device()->Exec([grad, dw, src_data, this](Context *ctx) {
+  dx.device()->Exec([grad, dw, src_data, this](Context * ctx) {
     Block *inblock = src_data.block(), *dyblock = grad.block(),
-          *dwblock = dw.block();
+           *dwblock = dw.block();
     float alpha = 1.f, beta = 0.f;
     cudnnConvolutionBackwardFilter(
-        ctx->cudnn_handle, &alpha, this->x_desc_, inblock->data(),
-        this->y_desc_, dyblock->data(), this->conv_desc_, this->bp_filter_alg_,
-        this->workspace_.block()->mutable_data(),
-        this->workspace_count_ * sizeof(float), &beta, this->filter_desc_,
-        dwblock->mutable_data());
+      ctx->cudnn_handle, &alpha, this->x_desc_, inblock->data(),
+      this->y_desc_, dyblock->data(), this->conv_desc_, this->bp_filter_alg_,
+      this->workspace_.block()->mutable_data(),
+      this->workspace_count_ * sizeof(float), &beta, this->filter_desc_,
+      dwblock->mutable_data());
   }, {grad.block(), src_data.block()}, {dw.block(), workspace_.block()});
 
   // LOG(ERROR) << "backward src";
-  dx.device()->Exec([dx, grad, this](Context *ctx) {
+  dx.device()->Exec([dx, grad, this](Context * ctx) {
     Block *wblock = this->weight_.block(), *dyblock = grad.block(),
-          *dxblock = dx.block();
+           *dxblock = dx.block();
     float alpha = 1.f, beta = 0.f;
     cudnnConvolutionBackwardData(ctx->cudnn_handle, &alpha, this->filter_desc_,
                                  wblock->data(), this->y_desc_, dyblock->data(),
diff --git a/src/model/layer/cudnn_dropout.cc b/src/model/layer/cudnn_dropout.cc
index 65d7b42..e9c36ee 100644
--- a/src/model/layer/cudnn_dropout.cc
+++ b/src/model/layer/cudnn_dropout.cc
@@ -70,7 +70,7 @@
     if (!has_init_cudnn_) {
       input.device()->Exec([size, dtype, this, dev](Context* ctx) {
           this->InitCudnn(size, dtype, dev, ctx);
-          }, {}, {this->state_.block()});
+          }, {}, {this->state_.block()}, "InitCudnn");
     } else {
       int n, c, h, w, s;
       cudnnDataType_t type;
@@ -79,7 +79,7 @@
       if (size != static_cast<size_t>(w))
         input.device()->Exec([size, dtype, this, dev](Context* ctx) {
             this->InitCudnn(size, dtype, dev, ctx);
-            }, {}, {this->state_.block()});
+            }, {}, {this->state_.block()}, "InitCudnn");
     }
     Tensor output;
     output.ResetLike(input);
@@ -90,7 +90,7 @@
                           inblock->data(), this->y_desc_,
                           outblock->mutable_data(), mblock->mutable_data(),
                           this->reserve_size_);
-    }, {input.block()}, {output.block(), mask_.block()});
+    }, {input.block()}, {output.block(), mask_.block()}, "cudnnDropoutForward");
     return output;
   } else {
     return input;
@@ -110,7 +110,7 @@
                            dyblock->data(), this->x_desc_,
                            dxblock->mutable_data(), mblock->mutable_data(),
                            this->reserve_size_);
-    }, {grad.block(), mask_.block()}, {dx.block()});
+    }, {grad.block(), mask_.block()}, {dx.block()}, "cudnnDropoutBackward");
   } else {
     LOG(ERROR) << "Do not call backward for evaluation phase";
   }
diff --git a/src/model/layer/cudnn_pooling.cc b/src/model/layer/cudnn_pooling.cc
index 364242e..be480d7 100644
--- a/src/model/layer/cudnn_pooling.cc
+++ b/src/model/layer/cudnn_pooling.cc
@@ -65,17 +65,9 @@
   else
     LOG(FATAL) << "Not implemented!";
 
-#if CUDNN_MAJOR == 5
   CUDNN_CHECK(cudnnSetPooling2dDescriptor(pool_desc_, pool_method, nan_prop_,
                                           kernel_h_, kernel_w_, pad_h_, pad_w_,
                                           stride_h_, stride_w_));
-#elif CUDNN_MAJOR == 4
-  CUDNN_CHECK(cudnnSetPooling2dDescriptor_v4(pool_desc_, pool_method, nan_prop_,
-                                             kernel_h_, kernel_w_, pad_h_,
-                                             pad_w_, stride_h_, stride_w_));
-#else
-  LOG(FATAL) << "Not supported CUDNN version = " << CUDNN_MAJOR;
-#endif
   has_init_cudnn_ = true;
 }
 
diff --git a/src/model/layer/cudnn_rnn.cc b/src/model/layer/cudnn_rnn.cc
old mode 100644
new mode 100755
index 62c6355..eb2bfd3
--- a/src/model/layer/cudnn_rnn.cc
+++ b/src/model/layer/cudnn_rnn.cc
@@ -125,8 +125,8 @@
   CUDNN_CHECK(cudnnDropoutGetStatesSize(ctx->cudnn_handle, &state_size));
   dropout_state_ = Tensor(Shape{state_size}, dev, kChar);
   CUDNN_CHECK(cudnnSetDropoutDescriptor(
-      dropout_desc_, ctx->cudnn_handle, 1 - dropout_,  // keep probability
-      dropout_state_.block()->mutable_data(), state_size, seed_));
+                dropout_desc_, ctx->cudnn_handle, 1 - dropout_,  // keep probability
+                dropout_state_.block()->mutable_data(), state_size, seed_));
 
   CUDNN_CHECK(cudnnCreateRNNDescriptor(&rnn_desc_));
   cudnnRNNInputMode_t input_mode = CUDNN_LINEAR_INPUT;
@@ -144,10 +144,15 @@
     rnn_mode = CUDNN_RNN_TANH;
   else if (rnn_mode_ == "gru")
     rnn_mode = CUDNN_GRU;
+#if CUDNN_MAJOR <= 5
   CUDNN_CHECK(cudnnSetRNNDescriptor(rnn_desc_, hidden_size_, num_stacks_,
                                     dropout_desc_, input_mode, direction,
                                     rnn_mode, dtype_));
-
+#else
+  CUDNN_CHECK(cudnnSetRNNDescriptor(ctx->cudnn_handle, rnn_desc_, hidden_size_, num_stacks_,
+                                    dropout_desc_, input_mode, direction,
+                                    rnn_mode, CUDNN_RNN_ALGO_STANDARD, dtype_));
+#endif
   size_t weight_size;
   CUDNN_CHECK(cudnnGetRNNParamsSize(ctx->cudnn_handle, rnn_desc_, x_descs_[0],
                                     &weight_size, dtype_));
@@ -199,7 +204,7 @@
   }
 
   CUDNN_CHECK(cudnnGetRNNTrainingReserveSize(ctx->cudnn_handle, rnn_desc_,
-                                             seq_length, x_descs_, &count));
+              seq_length, x_descs_, &count));
   if (reserve_space_.Size() != count) {
     reserve_space_ = Tensor(Shape{count}, dev, kChar);
     // reserve_space_.SetValue(0);
@@ -263,8 +268,8 @@
 
   if (rnn_desc_ != nullptr)
     CHECK_EQ(dtype_, GetCudnnDataType(dtype))
-      << "Cannot change cudnn data type during training from " << dtype_
-      << " to " << GetCudnnDataType(dtype);
+        << "Cannot change cudnn data type during training from " << dtype_
+        << " to " << GetCudnnDataType(dtype);
   else
     dtype_ = GetCudnnDataType(dtype);
 
@@ -303,57 +308,57 @@
   // LOG(INFO) << "hidden size " << hy.Size();
   // LOG(INFO) << "weight size " << weight_.Size() << " value " << weight_.L1();
   Block *inb = input.block(), *outb = output.block(),
-        *wb = this->weight_.block(), *hxb = hx.block(), *cxb = cx.block(),
-        *hyb = hy.block(), *cyb = cy.block(),
-        *wspace = this->workspace_.block(),
-        *rspace = this->reserve_space_.block();
+         *wb = this->weight_.block(), *hxb = hx.block(), *cxb = cx.block(),
+          *hyb = hy.block(), *cyb = cy.block(),
+           *wspace = this->workspace_.block(),
+            *rspace = this->reserve_space_.block();
   if (flag & kTrain) {
     CHECK_EQ(reserve_space_.device()->lang(), kCuda);
     CHECK_EQ(did, reserve_space_.device()->id());
     dev->Exec(
-        [inb, outb, wb, hxb, cxb, hyb, cyb, wspace, rspace, this](Context *ctx) {
-        // clang-format off
-        cudnnRNNForwardTraining(
-            ctx->cudnn_handle,
-            this->rnn_desc_,
-            this->seq_length_,
-            this->x_descs_, inb->data(),
-            this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
-            this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
-            this->weight_desc_, wb->data(),
-            this->y_descs_, outb->mutable_data(),
-            this->hy_desc_, hyb->mutable_data(),
-            this->cy_desc_, cyb == nullptr ? nullptr : cyb->mutable_data(),
-            wspace->mutable_data(),
-            this->workspace_.Size(), rspace->mutable_data(),
-            this->reserve_space_.Size());
-        // clang-format on
-        },
-        {inb, wb, hxb, cxb}, {outb, hyb, cyb, wspace, rspace});
+    [inb, outb, wb, hxb, cxb, hyb, cyb, wspace, rspace, this](Context * ctx) {
+      // clang-format off
+      cudnnRNNForwardTraining(
+        ctx->cudnn_handle,
+        this->rnn_desc_,
+        this->seq_length_,
+        this->x_descs_, inb->data(),
+        this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
+        this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
+        this->weight_desc_, wb->data(),
+        this->y_descs_, outb->mutable_data(),
+        this->hy_desc_, hyb->mutable_data(),
+        this->cy_desc_, cyb == nullptr ? nullptr : cyb->mutable_data(),
+        wspace->mutable_data(),
+        this->workspace_.Size(), rspace->mutable_data(),
+        this->reserve_space_.Size());
+      // clang-format on
+    },
+    {inb, wb, hxb, cxb}, {outb, hyb, cyb, wspace, rspace});
     buf_.push(input);
     buf_.push(output);
     buf_.push(hx);
     buf_.push(cx);
   } else {
-    dev->Exec([inb, outb, wb, hxb, cxb, hyb, cyb, wspace, this](Context *ctx) {
+    dev->Exec([inb, outb, wb, hxb, cxb, hyb, cyb, wspace, this](Context * ctx) {
       // clang-format off
       cudnnRNNForwardInference(
-          ctx->cudnn_handle,
-          this->rnn_desc_,
-          this->seq_length_,
-          this->x_descs_, inb->data(),
-          this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
-          this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
-          this->weight_desc_, wb->data(),
-          this->y_descs_, outb->mutable_data(),
-          this->hy_desc_, hyb->mutable_data(),
-          this->cy_desc_, cyb == nullptr ? nullptr : cyb->mutable_data(),
-          wspace->mutable_data(), this->workspace_.Size());
+        ctx->cudnn_handle,
+        this->rnn_desc_,
+        this->seq_length_,
+        this->x_descs_, inb->data(),
+        this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
+        this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
+        this->weight_desc_, wb->data(),
+        this->y_descs_, outb->mutable_data(),
+        this->hy_desc_, hyb->mutable_data(),
+        this->cy_desc_, cyb == nullptr ? nullptr : cyb->mutable_data(),
+        wspace->mutable_data(), this->workspace_.Size());
       // clang-format on
     }, {inb, wb, hxb, cxb}, {outb, hyb, cyb, wspace});
   }
   auto outputs =
-      SplitOutput(num_x, hidden_size_ * num_directions_, inputs, output);
+    SplitOutput(num_x, hidden_size_ * num_directions_, inputs, output);
   outputs.push_back(hy);
   if (has_cell_) outputs.push_back(cy);
   return outputs;
@@ -361,7 +366,7 @@
 
 // TODO(wangwei) check Tensor device to be on cuda?
 const std::pair<vector<Tensor>, vector<Tensor>> CudnnRNN::Backward(
-    int flag, const vector<Tensor> &grads) {
+int flag, const vector<Tensor> &grads) {
   // dhy (and dcy) is at last
   const Tensor cx = buf_.top();  // cannot use const Tensor& due to pop()
   buf_.pop();
@@ -395,45 +400,45 @@
     dcx.ResetLike(dhx);
   dw.SetValue(0.0f);
   Block *yb = y.block(), *dyb = dy.block(), *dhyb = dhy.block(),
-        *dcyb = dcy.block(), *xb = x.block(), *cxb = cx.block(),
-        *wb = weight_.block(), *dwb = dw.block(), *hxb = hx.block(),
-        *dxb = dx.block(), *dhxb = dhx.block(), *dcxb = dcx.block(),
-        *wspace = workspace_.block(), *rspace = reserve_space_.block();
+         *dcyb = dcy.block(), *xb = x.block(), *cxb = cx.block(),
+          *wb = weight_.block(), *dwb = dw.block(), *hxb = hx.block(),
+           *dxb = dx.block(), *dhxb = dhx.block(), *dcxb = dcx.block(),
+            *wspace = workspace_.block(), *rspace = reserve_space_.block();
 
   y.device()->Exec(
-      [yb, dyb, dhyb, dcyb, xb, cxb, wb, dwb, hxb, dxb, dhxb, dcxb, wspace,
-       rspace, this](Context *ctx) {
-        // clang-format off
-        cudnnRNNBackwardData(
-            ctx->cudnn_handle,
-            this->rnn_desc_,
-            this->seq_length_,
-            this->y_descs_, yb->data(),
-            this->dy_descs_, dyb->data(),
-            this->dhy_desc_, dhyb == nullptr ? nullptr : dhyb->data(),
-            this->dcy_desc_, dcyb == nullptr ? nullptr : dcyb->data(),
-            this->weight_desc_, wb->data(),
-            this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
-            this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
-            this->dx_descs_, dxb->mutable_data(),
-            this->dhx_desc_, dhxb->mutable_data(),
-            this->dcx_desc_, dcxb == nullptr ? nullptr : dcxb->mutable_data(),
-            wspace->mutable_data(), this->workspace_.Size(),
-            rspace->mutable_data(), this->reserve_space_.Size());
-        cudnnRNNBackwardWeights(
-            ctx->cudnn_handle,
-            this->rnn_desc_,
-            this->seq_length_,
-            this->x_descs_, xb->data(),
-            this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
-            this->y_descs_, yb->data(),
-            wspace->data(), this->workspace_.Size(),
-            this->dweight_desc_, dwb->mutable_data(),
-            rspace->data(), this->reserve_space_.Size());
-        // clang-format on
-      },
-      {yb, dyb, dhyb, dcyb, xb, wb, wspace, rspace},
-      {dxb, dwb, dhxb, dcxb, wspace, rspace});
+    [yb, dyb, dhyb, dcyb, xb, cxb, wb, dwb, hxb, dxb, dhxb, dcxb, wspace,
+  rspace, this](Context * ctx) {
+    // clang-format off
+    cudnnRNNBackwardData(
+      ctx->cudnn_handle,
+      this->rnn_desc_,
+      this->seq_length_,
+      this->y_descs_, yb->data(),
+      this->dy_descs_, dyb->data(),
+      this->dhy_desc_, dhyb == nullptr ? nullptr : dhyb->data(),
+      this->dcy_desc_, dcyb == nullptr ? nullptr : dcyb->data(),
+      this->weight_desc_, wb->data(),
+      this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
+      this->cx_desc_, cxb == nullptr ? nullptr : cxb->data(),
+      this->dx_descs_, dxb->mutable_data(),
+      this->dhx_desc_, dhxb->mutable_data(),
+      this->dcx_desc_, dcxb == nullptr ? nullptr : dcxb->mutable_data(),
+      wspace->mutable_data(), this->workspace_.Size(),
+      rspace->mutable_data(), this->reserve_space_.Size());
+    cudnnRNNBackwardWeights(
+      ctx->cudnn_handle,
+      this->rnn_desc_,
+      this->seq_length_,
+      this->x_descs_, xb->data(),
+      this->hx_desc_, hxb == nullptr ? nullptr : hxb->data(),
+      this->y_descs_, yb->data(),
+      wspace->data(), this->workspace_.Size(),
+      this->dweight_desc_, dwb->mutable_data(),
+      rspace->data(), this->reserve_space_.Size());
+    // clang-format on
+  },
+  {yb, dyb, dhyb, dcyb, xb, wb, wspace, rspace},
+  {dxb, dwb, dhxb, dcxb, wspace, rspace});
 
   vector <Tensor> param_grad{dw};
   auto data_grads = SplitOutput(num_dy, input_size_, grads, dx);
diff --git a/src/model/layer/dense.cc b/src/model/layer/dense.cc
index fac9130..385d5cd 100644
--- a/src/model/layer/dense.cc
+++ b/src/model/layer/dense.cc
@@ -40,11 +40,11 @@
   transpose_ = dense_conf.transpose();

   bias_term_ = dense_conf.bias_term();

   if (transpose_)  // was {vdim_, hdim} by zhaojing?

-    weight_.Reshape(Shape{hdim_, vdim_});

+    weight_.Resize(Shape{hdim_, vdim_});

   else

-    weight_.Reshape(Shape{vdim_, hdim_});

+    weight_.Resize(Shape{vdim_, hdim_});

   if (bias_term_)

-    bias_.Reshape(Shape{hdim_});

+    bias_.Resize(Shape{hdim_});

   for (auto specs: conf.param())

     param_specs_.push_back(specs);

 }

@@ -55,7 +55,7 @@
   Tensor output;

   CHECK_EQ(input.nDim(), 2u);

   if (transpose_)  // use the transposed version of weight_ for computing

-    output = Mult(input, weight_.T());

+    output = Mult(input, Transpose(weight_));

   else

     output = Mult(input, weight_);

   if (bias_term_)

@@ -81,10 +81,10 @@
   }

   if (transpose_) {

     dx = Mult(grad, weight_);

-    dw = Mult(grad.T(), src_data);

+    dw = Mult(Transpose(grad), src_data);

   } else {

-    dx = Mult(grad, weight_.T());

-    dw = Mult(src_data.T(), grad);

+    dx = Mult(grad, Transpose(weight_));

+    dw = Mult(Transpose(src_data), grad);

   }

   param_grad.push_back(dw);

   if (bias_term_)

diff --git a/src/model/layer/dense.h b/src/model/layer/dense.h
index 8f53699..e55a49a 100644
--- a/src/model/layer/dense.h
+++ b/src/model/layer/dense.h
@@ -57,11 +57,11 @@
   const Tensor& weight() const { return weight_; }

   const Tensor& bias() const { return bias_; }

 

-  void set_weight(Tensor w) {

+  void set_weight(const Tensor& w) {

     weight_.ResetLike(w);

     weight_.CopyData(w);

   }

-  void set_bias(Tensor b) {

+  void set_bias(const Tensor& b) {

     bias_.ResetLike(b);

     bias_.CopyData(b);

   }

diff --git a/src/model/layer/flatten.cc b/src/model/layer/flatten.cc
index 561c310..592e892 100644
--- a/src/model/layer/flatten.cc
+++ b/src/model/layer/flatten.cc
@@ -49,8 +49,7 @@
 const std::pair<Tensor, vector<Tensor> > Flatten::Backward(int flag,
                                                            const Tensor &grad) {
   vector<Tensor> param_grad;
-  Tensor input_grad = grad;
-  input_grad.Reshape(input_shape_);
+  Tensor input_grad = Reshape(grad, input_shape_);
   return std::make_pair(input_grad, param_grad);
 }
 
diff --git a/src/model/layer/flatten.h b/src/model/layer/flatten.h
index 8bbf481..ad885ae 100644
--- a/src/model/layer/flatten.h
+++ b/src/model/layer/flatten.h
@@ -42,7 +42,7 @@
   const std::pair<Tensor, vector<Tensor> > Backward(
       int flag, const Tensor &grad) override;
 
-  const int Axis() const { return axis_; }
+  int Axis() const { return axis_; }
   const Shape input_shape() const { return input_shape_; }
 
  protected:
diff --git a/src/model/layer/lrn.cc b/src/model/layer/lrn.cc
index 4fdb5c9..18e5d06 100644
--- a/src/model/layer/lrn.cc
+++ b/src/model/layer/lrn.cc
@@ -52,8 +52,7 @@
                    std::min(input.shape(1), c + local_size_ / 2 + 1));
       window = Square(window);
 
-      Tensor tmp, ch;
-      tmp.Reshape(Shape{input.shape(2) * input.shape(3)});
+      Tensor ch, tmp(Shape{input.shape(2) * input.shape(3)});
       SumRows(window, &tmp);
 
       tmp *= alpha_;
@@ -61,6 +60,7 @@
       tmp = Pow(tmp, beta_);
 
       ch = CopyRows(image, c, c + 1);
+      ch.Reshape(tmp.shape());
       ch = ch / tmp;
       ch.Reshape(Shape{input.shape(2), input.shape(3)});
       channels.push_back(ch);
@@ -97,8 +97,7 @@
         Tensor window =
             CopyRows(image, std::max(0, static_cast<int>(c) - local_size_ / 2),
                      std::min(grad.shape(1), c + local_size_ / 2 + 1));
-        Tensor tmp;
-        tmp.Reshape(Shape{grad.shape(2) * grad.shape(3)});
+        Tensor tmp(Shape{grad.shape(2) * grad.shape(3)});
         window = Square(window);
         SumRows(window, &tmp);
         tmp *= alpha_;
@@ -126,8 +125,7 @@
         Tensor window =
             CopyRows(image, std::max(0, static_cast<int>(c) - local_size_ / 2),
                      std::min(grad.shape(1), c + local_size_ / 2 + 1));
-        Tensor tmpr;
-        tmpr.Reshape(Shape{grad.shape(2) * grad.shape(3)});
+        Tensor tmpr(Shape{grad.shape(2) * grad.shape(3)});
         SumRows(window, &tmpr);
         tmpr.Reshape(Shape{grad.shape(2), grad.shape(3)});
         channels.push_back(tmpr);
@@ -138,6 +136,7 @@
     }
     Tensor tmp2 = ConcatenateRows(images);
     tmp2 *= (-2.0f * beta_ * alpha_);
+    tmp2.Reshape(x.shape());
     tmp2 = tmp2 * x;
     dx = dx + tmp2;
     dx.Reshape(grad.shape());
diff --git a/src/model/layer/opencl_convolution.cc b/src/model/layer/opencl_convolution.cc
index 063c4c3..eb25f5e 100644
--- a/src/model/layer/opencl_convolution.cc
+++ b/src/model/layer/opencl_convolution.cc
@@ -37,9 +37,9 @@
   auto data_type = input.data_type();
   auto device = input.device();
 
-   // TODO(wangwei) update the layer config if the input sample shape changes
+  // TODO(wangwei) update the layer config if the input sample shape changes
   CHECK(input.shape(1) == channels_ && input.shape(2) == height_ &&
-      input.shape(3) == width_) << "input sample shape should not change";
+        input.shape(3) == width_) << "input sample shape should not change";
 
   Shape shape{batchsize, num_filters_, conv_height_, conv_width_};
   Tensor output(shape, device, data_type);
@@ -48,16 +48,16 @@
   for (size_t b = 0; b < batchsize; b++) {
     int offset = b * imagesize;
 
-    col_data.device()->Exec([input, offset, col_data, this](Context* ctx) mutable {
+    col_data.device()->Exec([input, offset, col_data, this](Context * ctx) mutable {
 
       this->Im2Col(input.block(), offset,
-                   height_, width_,
-                   kernel_h_, kernel_w_,
-                   pad_h_, pad_w_,
-                   stride_h_, stride_w_,
-                   conv_height_, conv_width_,
-                   0, channels_,
-                   col_data.block(), ctx);
+      height_, width_,
+      kernel_h_, kernel_w_,
+      pad_h_, pad_w_,
+      stride_h_, stride_w_,
+      conv_height_, conv_width_,
+      0, channels_,
+      col_data.block(), ctx);
     },
     {input.block()},
     {col_data.block()});
@@ -116,16 +116,17 @@
     int im_offset = b * imagesize;
     int col_offset = 0; // Always keep this to zero.
 
-    col_data.device()->Exec([src_data, col_data, im_offset, col_offset, this](Context* ctx) mutable {
+    col_data.device()->Exec([src_data, col_data, im_offset, col_offset,
+    this](Context * ctx) mutable {
 
       this->Im2Col(src_data.block(), im_offset,
-                   height_, width_,
-                   kernel_h_, kernel_w_,
-                   pad_h_, pad_w_,
-                   stride_h_, stride_w_,
-                   conv_height_, conv_width_,
-                   col_offset, channels_,
-                   col_data.block(), ctx);
+      height_, width_,
+      kernel_h_, kernel_w_,
+      pad_h_, pad_w_,
+      stride_h_, stride_w_,
+      conv_height_, conv_width_,
+      col_offset, channels_,
+      col_data.block(), ctx);
     },
     {src_data.block()},
     {col_data.block()});
@@ -134,19 +135,20 @@
                   grad.device(), grad.data_type());
     CopyDataToFrom(&grad_b, grad, grad_b.Size(), 0, b * grad_b.Size());
 
-    dw += Mult(grad_b, col_data.T());
-    Tensor dcol_b = Mult(weight_.T(), grad_b);
+    dw += Mult(grad_b, Transpose(col_data));
+    Tensor dcol_b = Mult(Transpose(weight_), grad_b);
 
-    dx.device()->Exec([dcol_b, dx, im_offset, col_offset, this](Context* ctx) mutable {
+    dx.device()->Exec([dcol_b, dx, im_offset, col_offset,
+    this](Context * ctx) mutable {
 
       this->Col2Im(dcol_b.block(), col_offset,
-                   height_, width_,
-                   kernel_h_, kernel_w_,
-                   pad_h_, pad_w_,
-                   stride_h_, stride_w_,
-                   conv_height_, conv_width_,
-                   im_offset, channels_,
-                   dx.block(), ctx);
+      height_, width_,
+      kernel_h_, kernel_w_,
+      pad_h_, pad_w_,
+      stride_h_, stride_w_,
+      conv_height_, conv_width_,
+      im_offset, channels_,
+      dx.block(), ctx);
     },
     {dcol_b.block()},
     {dx.block()});
diff --git a/src/model/layer/prelu.cc b/src/model/layer/prelu.cc
index e567172..fe6447f 100644
--- a/src/model/layer/prelu.cc
+++ b/src/model/layer/prelu.cc
@@ -64,6 +64,7 @@
     } else {
       LOG(FATAL) << "Incorrect input format for prelu layer.";
     }
+    temp.Reshape(input.shape());
     output = input * ((input > 0.f) + temp);
   } else {
     // share the first param of Tensor A along all channels
diff --git a/src/model/layer/prelu.h b/src/model/layer/prelu.h
index 3041d1e..eca6a23 100644
--- a/src/model/layer/prelu.h
+++ b/src/model/layer/prelu.h
@@ -46,11 +46,11 @@
 
   void ToDevice(std::shared_ptr<Device> device);
 
-  const bool Channel_shared() const { return channel_shared_; }
+  bool Channel_shared() const { return channel_shared_; }
   const Tensor A() const { return a_; }
   const std::string Format() const { return format_; }
 
-  void Set_a(Tensor a) {
+  void Set_a(const Tensor& a) {
     a_.ResetLike(a);
     a_.CopyData(a);
   }
diff --git a/src/model/layer/rnn.cc b/src/model/layer/rnn.cc
index b811f9d..0fb920c 100644
--- a/src/model/layer/rnn.cc
+++ b/src/model/layer/rnn.cc
@@ -79,7 +79,7 @@
       dim = hidden_size_ * (hidden_size_ +  hidden_size_ + 2);
     weight_size += mult * dim;
   }
-  weight_.Reshape(Shape{weight_size});
+  weight_.Resize(Shape{weight_size});
 }
 
 const vector<Tensor> RNN::Forward(int flag, const vector<Tensor>& inputs) {
diff --git a/src/model/layer/split.h b/src/model/layer/split.h
index d4fd58a..b454da3 100644
--- a/src/model/layer/split.h
+++ b/src/model/layer/split.h
@@ -44,7 +44,7 @@
   const std::pair<vector<Tensor>, vector<Tensor> >
   Backward(int flag, const vector<Tensor> &grads) override;
 
-  const size_t output_size() const { return output_size_; }
+  size_t output_size() const { return output_size_; }
 
  protected:
   // To store the input and output(of forward) tensors
diff --git a/src/model/operation/batchnorm.cc b/src/model/operation/batchnorm.cc
new file mode 100644
index 0000000..dffac1e
--- /dev/null
+++ b/src/model/operation/batchnorm.cc
@@ -0,0 +1,407 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+#include "batchnorm.h"
+
+#include <cctype>
+
+namespace singa {
+
+BatchNormHandle::BatchNormHandle(const float momentum, const Tensor& input) {
+  factor = momentum;
+  batchsize = input.shape(0);
+  channels = input.shape(1);
+  if (input.nDim() == 4u) {
+    height = input.shape().at(2);
+    width = input.shape().at(3);
+    is_2d = false;
+  } else if (input.nDim() == 2u) {
+    height = 1;
+    width = 1;
+    is_2d = true;
+  } else {
+    LOG(FATAL) << "The dimension of input should either be 4D or 2D.";
+  }
+
+#ifdef USE_DNNL
+  if (input.device()->lang() == kCpp) {
+    use_dnnl = true;
+    epsilon = 1e-5f;
+    x_dims = dnnl::memory::dims(input.shape().begin(), input.shape().end());
+
+    // support f32 only
+    auto dtype_ = memory::data_type::f32;
+    memory::format_tag format_tag_ = get_dnnl_format_tag(input);
+    x_md = dnnl::memory::desc({x_dims}, dtype_, format_tag_);
+
+    // add to
+    bn_fwd_training_d = new dnnl::batch_normalization_forward::desc(
+        dnnl::prop_kind::forward_training, x_md, epsilon,
+        dnnl::normalization_flags::use_scale_shift);
+
+    auto eng = input.device()->context(0)->dnnl_engine;
+    bn_fwd_training_pd = new dnnl::batch_normalization_forward::primitive_desc(
+        *bn_fwd_training_d, eng);
+  }
+#endif  // USE_DNNL
+};
+
+BatchNormHandle::~BatchNormHandle() {
+#ifdef USE_DNNL
+  if (use_dnnl) {
+    delete (bn_fwd_training_d);
+    delete (bn_fwd_training_pd);
+  }
+#endif  // USE_DNNL
+}
+
+#ifdef USE_DNNL
+
+Tensor CpuBatchNormForwardInference(const BatchNormHandle& bnh, const Tensor& x,
+                                    const Tensor& bnScale, const Tensor& bnBias,
+                                    Tensor& running_mean, Tensor& running_var) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+  Tensor y;
+  y.ResetLike(x);
+
+  Tensor w = get_bn_weight_from(bnScale, bnBias);
+
+  y.device()->Exec(
+      [y, w, x, &running_mean, &running_var, &bnh](Context* ctx) mutable {
+        auto eng = ctx->dnnl_engine;
+        using namespace dnnl;
+
+        auto x_mem = memory(bnh.x_md, eng, x.block()->mutable_data());
+        auto y_mem = memory(bnh.x_md, eng, y.block()->mutable_data());
+        // indicates using scale&bias and running mean&var
+        auto flags_ = normalization_flags::use_scale_shift |
+                      normalization_flags::use_global_stats;
+
+        auto bn_fwd_d = batch_normalization_forward::desc(
+            prop_kind::forward_inference, bnh.x_md, bnh.epsilon, flags_);
+        auto bn_fwd_pd =
+            batch_normalization_forward::primitive_desc(bn_fwd_d, eng);
+        auto m_mem = memory(bn_fwd_pd.mean_desc(), eng,
+                            running_mean.block()->mutable_data());
+        auto v_mem = memory(bn_fwd_pd.variance_desc(), eng,
+                            running_var.block()->mutable_data());
+        auto w_mem =
+            memory(bn_fwd_pd.weights_desc(), eng, w.block()->mutable_data());
+
+        // execution
+        batch_normalization_forward(bn_fwd_pd).execute(
+            ctx->dnnl_stream, {{DNNL_ARG_SRC, x_mem},
+                               {DNNL_ARG_DST, y_mem},
+                               {DNNL_ARG_SCALE_SHIFT, w_mem},
+                               {DNNL_ARG_MEAN, m_mem},
+                               {DNNL_ARG_VARIANCE, v_mem}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block(), w.block(), running_mean.block(), running_var.block()},
+      {y.block(), running_mean.block(), running_var.block()}, "CpuBatchNormForwardInference");
+
+  return y;
+}
+
+const std::vector<Tensor> CpuBatchNormForwardTraining(
+    const BatchNormHandle& bnh, const Tensor& x, const Tensor& bnScale,
+    const Tensor& bnBias, Tensor& running_mean, Tensor& running_var) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+  Tensor y;
+  y.ResetLike(x);
+
+  // mean and var for local batch
+  Tensor mean;
+  mean.ResetLike(running_mean);
+  Tensor var;
+  var.ResetLike(running_var);
+
+  // combine scale and bias to construct weight tensor in required format for
+  // backward
+  Tensor w = get_bn_weight_from(bnScale, bnBias);
+
+  y.device()->Exec(
+      [y, mean, var, w, x, &running_mean, &running_var,
+       &bnh](Context* ctx) mutable {
+        auto eng = ctx->dnnl_engine;
+        using namespace dnnl;
+
+        auto x_mem = memory(bnh.x_md, eng, x.block()->mutable_data());
+        auto y_mem = memory(bnh.x_md, eng, y.block()->mutable_data());
+        auto m_mem = memory(bnh.bn_fwd_training_pd->mean_desc(), eng,
+                            mean.block()->mutable_data());
+        auto v_mem = memory(bnh.bn_fwd_training_pd->variance_desc(), eng,
+                            var.block()->mutable_data());
+        auto w_mem = memory(bnh.bn_fwd_training_pd->weights_desc(), eng,
+                            w.block()->mutable_data());
+
+        batch_normalization_forward(*bnh.bn_fwd_training_pd)
+            .execute(ctx->dnnl_stream, {{DNNL_ARG_SRC, x_mem},
+                                        {DNNL_ARG_DST, y_mem},
+                                        {DNNL_ARG_SCALE_SHIFT, w_mem},
+                                        {DNNL_ARG_MEAN, m_mem},
+                                        {DNNL_ARG_VARIANCE, v_mem}});
+        ctx->dnnl_stream.wait();
+
+        // local implemented running mean as mkldnn does not support it yet:
+        // https://github.com/intel/mkl-dnn/issues/371
+        // https://github.com/intel/mkl-dnn/issues/517
+        // https://arxiv.org/pdf/1502.03167.pdf
+        auto s = x.shape();
+        s[1] = 1;
+        float p = Product(s);  // for unbiased variance
+        running_mean = running_mean * (1 - bnh.factor) + mean * bnh.factor;
+        running_var =
+            running_var * (1 - bnh.factor) + var * (p / (p - 1)) * bnh.factor;
+      },
+      {x.block(), w.block(), running_mean.block(), running_var.block()},
+      {y.block(), running_mean.block(), running_var.block(), mean.block(),
+       var.block()}, "CpuBatchNormForwardTraining");
+
+  return {y, mean, var};
+}
+
+const std::vector<Tensor> CpuBatchNormBackwardx(
+    const BatchNormHandle& bnh, const Tensor& y, const Tensor& dy,
+    const Tensor& x, const Tensor& bnScale, const Tensor& bnBias,
+    const Tensor& mean, const Tensor& var) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+  CHECK_EQ(y.device()->lang(), kCpp);
+  CHECK_EQ(dy.device()->lang(), kCpp);
+  CHECK_EQ(mean.device()->lang(), kCpp);
+  CHECK_EQ(var.device()->lang(), kCpp);
+  CHECK_EQ(bnScale.device()->lang(), kCpp);
+  CHECK_EQ(bnBias.device()->lang(), kCpp);
+
+  Tensor dx;
+  dx.ResetLike(dy);
+
+  // combine scale and bias to construct weight tensor in required format for
+  // backward
+  Tensor w = get_bn_weight_from(bnScale, bnBias);
+
+  // Tensor dw(Shape{bnScale.Size(), 2});
+  Tensor dw;
+  dw.ResetLike(w);
+
+  dx.device()->Exec(
+      [w, dw, dx, dy, x, y, mean, var, &bnh](Context* ctx) mutable {
+        auto eng = ctx->dnnl_engine;
+        using namespace dnnl;
+
+        auto x_mem = memory(bnh.x_md, eng, x.block()->mutable_data());
+        auto dx_mem = memory(bnh.x_md, eng, dx.block()->mutable_data());
+        auto y_mem = memory(bnh.x_md, eng, y.block()->mutable_data());
+        auto dy_mem = memory(bnh.x_md, eng, dy.block()->mutable_data());
+
+        auto m_mem = memory(bnh.bn_fwd_training_pd->mean_desc(), eng,
+                            mean.block()->mutable_data());
+        auto v_mem = memory(bnh.bn_fwd_training_pd->variance_desc(), eng,
+                            var.block()->mutable_data());
+        auto w_mem = memory(bnh.bn_fwd_training_pd->weights_desc(), eng,
+                            w.block()->mutable_data());
+
+        auto bn_bwd_d = batch_normalization_backward::desc(
+            prop_kind::backward, bnh.x_md, bnh.x_md, bnh.epsilon,
+            normalization_flags::use_scale_shift);
+        auto bn_bwd_pd = batch_normalization_backward::primitive_desc(
+            bn_bwd_d, eng, *bnh.bn_fwd_training_pd);
+
+        auto dw_mem = memory(bn_bwd_pd.diff_weights_desc(), eng,
+                             dw.block()->mutable_data());
+
+        batch_normalization_backward(bn_bwd_pd).execute(
+            ctx->dnnl_stream, {{DNNL_ARG_SRC, x_mem},
+                               {DNNL_ARG_DIFF_SRC, dx_mem},
+                               {DNNL_ARG_DIFF_DST, dy_mem},
+                               {DNNL_ARG_MEAN, m_mem},
+                               {DNNL_ARG_VARIANCE, v_mem},
+                               {DNNL_ARG_DIFF_SCALE_SHIFT, dw_mem},
+                               {DNNL_ARG_SCALE_SHIFT, w_mem}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block(), dy.block(), mean.block(), var.block(), w.block(), y.block()},
+      {dx.block(), dw.block()}, "CpuBatchNormBackwardx");
+
+  singa::Tensor dbnScale(bnScale.shape());
+  CopyDataToFrom(&dbnScale, dw, bnScale.Size(), 0, 0);
+  singa::Tensor dbnBias(bnBias.shape());
+  CopyDataToFrom(&dbnBias, dw, bnBias.Size(), 0, bnScale.Size());
+
+  CHECK(dbnScale.nDim() == bnScale.nDim()) << "dbnScale ndim not match bnScale";
+  CHECK(dbnBias.nDim() == bnBias.nDim()) << "dbnScale ndim not match bnScale";
+  CHECK(dbnScale.shape()[0] == bnScale.shape()[0])
+      << "dbnScale shape not match bnScale";
+  CHECK(dbnBias.shape()[0] == bnBias.shape()[0])
+      << "dbnBias shape not match bnBias";
+
+  return {dx, dbnScale, dbnBias};
+}
+
+#endif  // USE_DNNL
+
+#ifdef USE_CUDNN
+CudnnBatchNormHandle::CudnnBatchNormHandle(const float momentum,
+                                           const Tensor& input)
+    : BatchNormHandle(momentum, input) {
+  if (is_2d) {
+    mode = CUDNN_BATCHNORM_PER_ACTIVATION;
+  } else {
+    mode = CUDNN_BATCHNORM_SPATIAL;
+    if (const char* env_p = std::getenv("CUDNN_BATCHNORM_ALG")) {
+      std::string alg = std::string(env_p);
+      std::transform(alg.begin(), alg.end(), alg.begin(), toupper);
+      if (alg == "CUDNN_BATCHNORM_SPATIAL_PERSISTENT")
+        mode = CUDNN_BATCHNORM_SPATIAL_PERSISTENT;
+      LOG(INFO) << " CUDNN_BATCHNORM_ALG: " << alg;
+    }
+  }
+  DataType dtype = input.data_type();
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&shape_desc));
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&param_desc));
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(shape_desc, CUDNN_TENSOR_NCHW,
+                                         GetCudnnDataType(dtype), batchsize,
+                                         channels, height, width));
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(param_desc, CUDNN_TENSOR_NCHW,
+                                         GetCudnnDataType(dtype), 1, channels,
+                                         1, 1));
+};
+
+const std::vector<Tensor> GpuBatchNormForwardTraining(
+    const CudnnBatchNormHandle& cbnh, const Tensor& x, const Tensor& bnScale,
+    const Tensor& bnBias, Tensor& running_mean, Tensor& running_var) {
+  CHECK_EQ(x.device()->lang(), kCuda);
+  CHECK_EQ(bnScale.device()->lang(), kCuda);
+  CHECK_EQ(bnBias.device()->lang(), kCuda);
+  CHECK_EQ(running_mean.device()->lang(), kCuda);
+  CHECK_EQ(running_var.device()->lang(), kCuda);
+
+  Tensor mean;
+  Tensor var;
+  mean.ResetLike(running_mean);
+  var.ResetLike(running_var);
+
+  Shape shape = x.shape();
+
+  Tensor input(x);  // for unification of 2d and 4d cases.
+  if (cbnh.is_2d) input.Reshape(Shape{shape.at(0), shape.at(1), 1, 1});
+
+  Tensor output;
+  output.ResetLike(x);
+
+  output.device()->Exec(
+      [=, &bnScale, &bnBias, &running_mean, &running_var,
+       &cbnh](Context* ctx) mutable {
+        const float alpha = 1.0f, beta = 0.0f;
+        double epsilon = CUDNN_BN_MIN_EPSILON;
+        CUDNN_CHECK(cudnnBatchNormalizationForwardTraining(
+            ctx->cudnn_handle, cbnh.mode, &alpha, &beta, cbnh.shape_desc,
+            input.block()->data(), cbnh.shape_desc,
+            output.block()->mutable_data(), cbnh.param_desc,
+            bnScale.block()->data(), bnBias.block()->data(), cbnh.factor,
+            running_mean.block()->mutable_data(),
+            running_var.block()->mutable_data(), epsilon,
+            mean.block()->mutable_data(), var.block()->mutable_data()));
+      },
+      {input.block(), bnScale.block(), bnBias.block(), running_mean.block(),
+       running_var.block()},
+      {output.block(), running_mean.block(), running_var.block(), mean.block(),
+       var.block()}, "GpuBatchNormForwardTraining");
+  if (cbnh.is_2d) output.Reshape(Shape{shape.at(0), shape.at(1)});
+  return {output, mean, var};
+}
+
+Tensor GpuBatchNormForwardInference(const CudnnBatchNormHandle& cbnh,
+                                    const Tensor& x, const Tensor& bnScale,
+                                    const Tensor& bnBias,
+                                    const Tensor& running_mean,
+                                    const Tensor& running_var) {
+  CHECK_EQ(x.device()->lang(), kCuda);
+  CHECK_EQ(bnScale.device()->lang(), kCuda);
+  CHECK_EQ(bnBias.device()->lang(), kCuda);
+  CHECK_EQ(running_mean.device()->lang(), kCuda);
+  CHECK_EQ(running_var.device()->lang(), kCuda);
+
+  Shape shape = x.shape();
+
+  Tensor input(x);  // for unification of 2d and 4d cases.
+  if (cbnh.is_2d) input.Reshape(Shape{shape.at(0), shape.at(1), 1, 1});
+
+  Tensor output;
+  output.ResetLike(x);
+  output.device()->Exec(
+      [=, &bnScale, &bnBias, &running_mean, &running_var,
+       &cbnh](Context* ctx) mutable {
+        const float alpha = 1.0f, beta = 0.0f;
+        double epsilon = CUDNN_BN_MIN_EPSILON;
+        CUDNN_CHECK(cudnnBatchNormalizationForwardInference(
+            ctx->cudnn_handle, cbnh.mode, &alpha, &beta, cbnh.shape_desc,
+            input.block()->data(), cbnh.shape_desc,
+            output.block()->mutable_data(), cbnh.param_desc,
+            bnScale.block()->data(), bnBias.block()->data(),
+            running_mean.block()->data(), running_var.block()->data(),
+            epsilon));
+      },
+      {input.block(), bnScale.block(), bnBias.block(), running_mean.block(),
+       running_var.block()},
+      {output.block()}, "GpuBatchNormForwardInference");
+  return output;
+}
+
+const std::vector<Tensor> GpuBatchNormBackward(
+    const CudnnBatchNormHandle& cbnh, const Tensor& dy, const Tensor& x,
+    const Tensor& bnScale, const Tensor& mean, const Tensor& var) {
+  CHECK_EQ(dy.device()->lang(), kCuda);
+  CHECK_EQ(x.device()->lang(), kCuda);
+  CHECK_EQ(bnScale.device()->lang(), kCuda);
+  CHECK_EQ(mean.device()->lang(), kCuda);
+  CHECK_EQ(var.device()->lang(), kCuda);
+
+  Tensor dx;
+  dx.ResetLike(dy);
+
+  Tensor dbnScale;
+  dbnScale.ResetLike(bnScale);
+
+  Tensor dbnBias;
+  dbnBias.ResetLike(bnScale);
+
+  dx.device()->Exec(
+      [=, &bnScale, &cbnh](Context* ctx) mutable {
+        const float alpha = 1.0f, beta = .0f;
+        double epsilon = CUDNN_BN_MIN_EPSILON;
+        CUDNN_CHECK(cudnnBatchNormalizationBackward(
+            ctx->cudnn_handle, cbnh.mode, &alpha, &beta, &alpha, &beta,
+            cbnh.shape_desc, x.block()->data(), cbnh.shape_desc,
+            dy.block()->data(), cbnh.shape_desc, dx.block()->mutable_data(),
+            cbnh.param_desc, bnScale.block()->data(),
+            dbnScale.block()->mutable_data(), dbnBias.block()->mutable_data(),
+            epsilon, mean.block()->data(), var.block()->data()));
+      },
+      {x.block(), dy.block(), bnScale.block(), mean.block(), var.block()},
+      {dx.block(), dbnScale.block(), dbnBias.block()}, "GpuBatchNormBackward");
+
+  if (cbnh.is_2d) dx.Reshape(Shape{dx.shape().at(0), dx.shape().at(1)});
+
+  return {dx, dbnScale, dbnBias};
+}
+
+#endif  // USE_CUDNN
+}  // namespace singa
diff --git a/src/model/operation/batchnorm.h b/src/model/operation/batchnorm.h
new file mode 100644
index 0000000..f2f4723
--- /dev/null
+++ b/src/model/operation/batchnorm.h
@@ -0,0 +1,121 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+#ifndef SINGA_MODEL_OPERATION_BATCHNORM_H_
+#define SINGA_MODEL_OPERATION_BATCHNORM_H_
+
+#include <vector>
+
+#include "singa/core/tensor.h"
+
+#ifdef USE_CUDNN
+#include <cudnn.h>
+
+#include "../layer/cudnn_utils.h"  // check_cudnn
+#endif                             // USE_CUDNN
+
+#ifdef USE_DNNL
+#include <singa/utils/dnnl_utils.h>
+
+// combine scale and bias into weight format required by dnnl
+static inline singa::Tensor get_bn_weight_from(const singa::Tensor &s,
+                                               const singa::Tensor &b) {
+  singa::Tensor w(singa::Shape{s.Size(), b.Size()});
+  CopyDataToFrom(&w, s, s.Size(), 0, 0);
+  CopyDataToFrom(&w, b, b.Size(), s.Size(), 0);
+  return w;
+}
+#endif  // USE_DNNL
+
+namespace singa {
+
+class BatchNormHandle {
+ public:
+  BatchNormHandle(const float momentum, const Tensor &input);
+  ~BatchNormHandle();
+
+  float factor;
+
+  size_t batchsize;
+  size_t channels;
+  size_t height;
+  size_t width;
+  bool is_2d;
+  // bool train = true;
+  bool use_dnnl =
+      false;  // useful flag if both USE_CUDNN and USE_DNNL are enabled
+
+#ifdef USE_DNNL
+  float epsilon;
+  dnnl::memory::dims x_dims;
+  dnnl::memory::desc x_md;
+  // as no default constructor, we need to declare it as pointer
+  dnnl::batch_normalization_forward::desc *bn_fwd_training_d;
+  dnnl::batch_normalization_forward::primitive_desc *bn_fwd_training_pd;
+#endif  // USE_DNNL
+};
+
+#ifdef USE_DNNL
+Tensor CpuBatchNormForwardInference(const BatchNormHandle &bnh, const Tensor &x,
+                                    const Tensor &bnScale, const Tensor &bnBias,
+                                    Tensor &running_mean, Tensor &running_var);
+
+const std::vector<Tensor> CpuBatchNormForwardTraining(
+    const BatchNormHandle &bnh, const Tensor &x, const Tensor &bnScale,
+    const Tensor &bnBias, Tensor &running_mean, Tensor &running_var);
+
+const std::vector<Tensor> CpuBatchNormBackwardx(
+    const BatchNormHandle &bnh, const Tensor &y, const Tensor &dy,
+    const Tensor &x, const Tensor &bnScale, const Tensor &bnBias,
+    const Tensor &mean, const Tensor &var);
+#endif  // USE_DNNL
+
+#ifdef USE_CUDNN
+
+class CudnnBatchNormHandle : public BatchNormHandle {
+ public:
+  CudnnBatchNormHandle(const float momentum, const Tensor &input);
+
+  //~CudnnBatchNormHandle();
+
+  cudnnBatchNormMode_t mode;
+  cudnnTensorDescriptor_t shape_desc = nullptr;
+  cudnnTensorDescriptor_t param_desc = nullptr;
+};
+
+const std::vector<Tensor> GpuBatchNormForwardTraining(
+    const CudnnBatchNormHandle &cbnh, const Tensor &x, const Tensor &bnScale,
+    const Tensor &bnBias, Tensor &running_mean, Tensor &running_var);
+
+Tensor GpuBatchNormForwardInference(const CudnnBatchNormHandle &cbnh,
+                                    const Tensor &x, const Tensor &bnScale,
+                                    const Tensor &bnBias,
+                                    const Tensor &running_mean,
+                                    const Tensor &running_var);
+
+const std::vector<Tensor> GpuBatchNormBackward(
+    const CudnnBatchNormHandle &cbnh, const Tensor &dy, const Tensor &x,
+    const Tensor &bnScale, const Tensor &mean, const Tensor &var);
+
+#endif  // USE_CUDNN
+
+}  // namespace singa
+
+#endif  // SINGA_MODEL_OPERATION_BATCHNORM_H_
diff --git a/src/model/operation/convolution.cc b/src/model/operation/convolution.cc
new file mode 100644
index 0000000..052e521
--- /dev/null
+++ b/src/model/operation/convolution.cc
@@ -0,0 +1,698 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+// #include "../layer/convolution.h"
+
+#include "convolution.h"
+
+#include <cctype>
+
+namespace singa {
+
+ConvHandle::ConvHandle(const Tensor &input,
+                       const std::vector<size_t> &kernel_size,
+                       const std::vector<size_t> &stride,
+                       const std::vector<size_t> &padding,
+                       const size_t in_channels, const size_t out_channels,
+                       const bool bias, const size_t groups) {
+  kernel_h = kernel_size[0];
+  kernel_w = kernel_size[1];
+
+  pad_h = padding[0];
+  pad_w = padding[1];
+
+  stride_h = stride[0];
+  stride_w = stride[1];
+
+  channels = in_channels;
+  num_filters = out_channels;
+  group = groups;
+
+  bias_term = bias;
+
+  batchsize = input.shape(0);
+  CHECK(input.shape(1) == in_channels)
+      << "the number of input channels mismatched.";
+  height = input.shape(2);
+  width = input.shape(3);
+
+  conv_height = 1;
+  if (stride_h > 0)
+    conv_height = (height + 2 * pad_h - kernel_h) / stride_h + 1;
+  conv_width = (width + 2 * pad_w - kernel_w) / stride_w + 1;
+
+  col_height = in_channels * kernel_w * kernel_h;
+  col_width = conv_height * conv_width;
+  imagesize = input.Size() / batchsize;
+
+#ifdef USE_DNNL
+  if (input.device()->lang() == kCpp) {
+    use_dnnl = true;
+    const int groups = 1;  // only groups 1 is supported for now
+    auto dtype_ = dnnl::memory::data_type::f32;
+
+    x_dims = dnnl::memory::dims{(int)input.shape(0), (int)in_channels,
+                                (int)input.shape(2), (int)input.shape(3)};
+    b_dims = dnnl::memory::dims{(int)out_channels};
+    s_dims = dnnl::memory::dims{(int)stride_h, (int)stride_w};
+    p_dims = dnnl::memory::dims{(int)pad_h, (int)pad_w};
+    o_dims = dnnl::memory::dims{(int)input.shape(0), (int)out_channels,
+                                (int)conv_height, (int)conv_width};
+    w_dims = dnnl::memory::dims{groups, (int)out_channels / groups,
+                                (int)in_channels / groups, (int)kernel_size[0],
+                                (int)kernel_size[1]};
+    // dnnl calculate dw and db in one go, a workaround to be compatible with
+    // singa api
+    db = new Tensor(Shape{num_filters}, input.device(), input.data_type());
+  }
+#endif  // USE_DNNL
+}
+
+ConvHandle::~ConvHandle() {
+#ifdef USE_DNNL
+  if (use_dnnl) {
+    delete (db);
+  }
+#endif  // USE_DNNL
+}
+
+Tensor CpuConvForward(const Tensor &x, Tensor &W, Tensor &b,
+                      const ConvHandle &ch) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+
+  CHECK(x.shape(1) == ch.channels && x.shape(2) == ch.height &&
+        x.shape(3) == ch.width)
+      << "input sample shape should not change";
+
+  CHECK(W.shape(0) == ch.num_filters && W.shape(1) == ch.channels &&
+        W.shape(2) == ch.kernel_h && W.shape(3) == ch.kernel_w)
+      << "weights shape should not change";
+
+#ifdef USE_DNNL
+  DataType dtype = x.data_type();
+  auto dev = x.device();
+
+  Shape shape{ch.batchsize, ch.num_filters, ch.conv_height, ch.conv_width};
+  Tensor output(shape, dev, dtype);
+
+  output.device()->Exec(
+      [output, x, &W, &b, &ch](Context *ctx) mutable {
+        using namespace dnnl;
+        using tag = memory::format_tag;
+        auto eng = ctx->dnnl_engine;
+        auto s = ctx->dnnl_stream;
+        auto dtype = dnnl::memory::data_type::f32;
+
+        // dnnl design pattern
+        // xxx_user_xxx_memory(and its format tag) is defined by user, which may
+        // need to be reordered
+        auto conv_user_src_memory = memory({{ch.x_dims}, dtype, tag::nchw}, eng,
+                                           x.block()->mutable_data());
+        auto conv_user_weights_memory = memory({{ch.w_dims}, dtype, tag::goihw},
+                                               eng, W.block()->mutable_data());
+        auto conv_user_bias_memory = memory({{ch.b_dims}, dtype, tag::x}, eng,
+                                            b.block()->mutable_data());
+
+        // xxx_xxx_memory_md is created for creating conv_desc, and format tag
+        // is defined as any
+        auto conv_src_md = memory::desc({ch.x_dims}, dtype, tag::any);
+        auto conv_bias_md = memory::desc({ch.b_dims}, dtype, tag::any);
+        auto conv_weights_md = memory::desc({ch.w_dims}, dtype, tag::any);
+        auto conv_dst_md = memory::desc({ch.o_dims}, dtype,
+                                        tag::nchw);  // could not set to any
+
+        auto conv_desc = convolution_forward::desc(
+            prop_kind::forward, algorithm::convolution_direct, conv_src_md,
+            conv_weights_md, conv_bias_md, conv_dst_md, ch.s_dims, ch.p_dims,
+            ch.p_dims);
+        auto conv_pd = convolution_forward::primitive_desc(conv_desc, eng);
+
+        // auto conv_pd = *ch.conv_pd; // 1ms to 70 ms slower
+
+        // memory placeholder for reorder
+        auto conv_src_memory = conv_user_src_memory;
+        auto conv_weights_memory = conv_user_weights_memory;
+
+        // output memory
+        auto conv_dst_memory =
+            memory(conv_pd.dst_desc(), eng, output.block()->mutable_data());
+
+        // Tensor for reorder  - tesing performance shows no significant improve
+        Tensor x_reo;
+        x_reo.ResetLike(x);
+        Tensor W_reo;
+        W_reo.ResetLike(W);
+
+        if (conv_pd.src_desc() != conv_user_src_memory.get_desc()) {
+          conv_src_memory =
+              memory(conv_pd.src_desc(), eng, x_reo.block()->mutable_data());
+          reorder(conv_user_src_memory, conv_src_memory)
+              .execute(s, {{DNNL_ARG_FROM, conv_user_src_memory},
+                           {DNNL_ARG_TO, conv_src_memory}});
+        }
+        if (conv_pd.weights_desc() != conv_user_weights_memory.get_desc()) {
+          conv_weights_memory = memory(conv_pd.weights_desc(), eng,
+                                       W_reo.block()->mutable_data());
+          reorder(conv_user_weights_memory, conv_weights_memory)
+              .execute(s, {{DNNL_ARG_FROM, conv_user_weights_memory},
+                           {DNNL_ARG_TO, conv_weights_memory}});
+        }
+
+        // execuete forward
+        convolution_forward(conv_pd).execute(
+            s, {{DNNL_ARG_SRC, conv_src_memory},
+                {DNNL_ARG_WEIGHTS, conv_weights_memory},
+                {DNNL_ARG_BIAS, conv_user_bias_memory},
+                {DNNL_ARG_DST, conv_dst_memory}});
+
+        // synchronize stream
+        s.wait();
+      },
+      {x.block(), W.block(), b.block()}, {output.block()}, "CpuConvForward");
+
+  return output;
+#else   // cpp naive, error due to Im2col importing
+/*
+  Shape w_shape = W.shape();
+  Shape b_shape;
+  if (ch.bias_term) b_shape = b.shape();
+
+  W.Reshape(Shape{ch.num_filters, ch.col_height});
+  if (ch.bias_term) b.Reshape(Shape{ch.num_filters});
+
+  DataType dtype = x.data_type();
+  auto dev = x.device();
+  Shape shape{ch.batchsize, ch.num_filters, ch.conv_height, ch.conv_width};
+  Tensor output(shape, dev, dtype);
+  Tensor col_data(Shape{ch.col_height, ch.col_width});  // broadcasted image
+
+  float *data_col = new float[ch.col_height * ch.col_width];
+  auto in_data = x.data<float>();
+  for (size_t num = 0; num < ch.batchsize; num++) {
+    Im2col(in_data + num * ch.imagesize, ch.channels, ch.height, ch.width,
+           ch.kernel_h, ch.kernel_w, ch.pad_h, ch.pad_w, ch.stride_h,
+           ch.stride_w, data_col);
+
+    col_data.CopyDataFromHostPtr(data_col, ch.col_height * ch.col_width);
+    Tensor each = Mult(W, col_data);
+    if (ch.bias_term) {
+      AddColumn(b, &each);
+    }
+    CopyDataToFrom(&output, each, each.Size(), num * each.Size());
+  };
+  W.Reshape(w_shape);
+  if (ch.bias_term) b.Reshape(b_shape);
+  return output;
+*/
+#endif  // USE_DNNL
+}
+
+Tensor CpuConvBackwardx(const Tensor &dy, Tensor &W, const Tensor &x,
+                        const ConvHandle &ch) {
+  CHECK_EQ(dy.device()->lang(), kCpp);
+  CHECK_EQ(W.device()->lang(), kCpp);
+  CHECK_EQ(x.device()->lang(), kCpp);
+
+  CHECK(dy.shape(1) == ch.num_filters && dy.shape(2) == ch.conv_height &&
+        dy.shape(3) == ch.conv_width)
+      << "input gradients shape should not change";
+
+  CHECK(W.shape(0) == ch.num_filters && W.shape(1) == ch.channels &&
+        W.shape(2) == ch.kernel_h && W.shape(3) == ch.kernel_w)
+      << "weights shape should not change";
+
+#ifdef USE_DNNL
+  Tensor dx;
+  dx.ResetLike(x);
+
+  dy.device()->Exec(
+      [dx, dy, x, &W, &ch](Context *ctx) mutable {
+        using namespace dnnl;
+        auto eng = ctx->dnnl_engine;
+        auto s = ctx->dnnl_stream;
+        using tag = memory::format_tag;
+        auto dtype = dnnl::memory::data_type::f32;
+
+        auto conv_src_md = memory::desc({ch.x_dims}, dtype, tag::nchw);
+        auto conv_weights_md = memory::desc({ch.w_dims}, dtype, tag::goihw);
+        auto conv_bias_md = memory::desc({ch.b_dims}, dtype, tag::x);
+        auto conv_dst_md = memory::desc({ch.o_dims}, dtype, tag::nchw);
+
+        auto conv_user_src_memory =
+            memory(conv_src_md, eng, dx.block()->mutable_data());
+        auto conv_user_diff_dst_memory =
+            memory(conv_dst_md, eng, dy.block()->mutable_data());
+        auto conv_user_weights_memory =
+            memory(conv_weights_md, eng, W.block()->mutable_data());
+
+        auto conv_desc = convolution_forward::desc(
+            prop_kind::forward, algorithm::convolution_direct, conv_src_md,
+            conv_weights_md, conv_bias_md, conv_dst_md, ch.s_dims, ch.p_dims,
+            ch.p_dims);
+        auto conv_pd = convolution_forward::primitive_desc(conv_desc, eng);
+
+        auto conv_bwd_data_d = convolution_backward_data::desc(
+            algorithm::convolution_direct, conv_src_md, conv_weights_md,
+            conv_dst_md, ch.s_dims, ch.p_dims, ch.p_dims);
+        auto conv_bwd_data_pd = convolution_backward_data::primitive_desc(
+            conv_bwd_data_d, eng, conv_pd);
+
+        convolution_backward_data(conv_bwd_data_pd)
+            .execute(ctx->dnnl_stream,
+                     {{DNNL_ARG_DIFF_DST, conv_user_diff_dst_memory},
+                      {DNNL_ARG_WEIGHTS, conv_user_weights_memory},
+                      {DNNL_ARG_DIFF_SRC, conv_user_src_memory}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block(), dy.block(), W.block()}, {dx.block()}, "CpuConvBackwardx");
+
+  return dx;
+
+#else   // NOT USE_DNNL
+/*  // error due to importing Col2im
+  Shape w_shape = W.shape();
+  W.Reshape(Shape{ch.num_filters, ch.col_height});
+
+  Tensor dx;
+  dx.ResetLike(x);
+
+  float *dx_b = new float[ch.imagesize];
+
+  for (size_t num = 0; num < ch.batchsize; num++) {
+    Tensor grad_b(Shape{ch.num_filters, ch.conv_height * ch.conv_width});
+    CopyDataToFrom(&grad_b, dy, grad_b.Size(), 0, num * grad_b.Size());
+    Tensor dcol_b = Mult(Transpose(W), grad_b);
+    auto dcol_data = dcol_b.data<float>();
+    Col2im(dcol_data, ch.channels, ch.height, ch.width, ch.kernel_h,
+           ch.kernel_w, ch.pad_h, ch.pad_w, ch.stride_h, ch.stride_w, dx_b);
+    dx.CopyDataFromHostPtr(dx_b, ch.imagesize, num * ch.imagesize);
+  }
+  W.Reshape(w_shape);
+  return dx;
+*/
+#endif  // USE_DNNL
+}
+
+Tensor CpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W,
+                        const ConvHandle &ch) {
+  CHECK_EQ(dy.device()->lang(), kCpp);
+  CHECK_EQ(x.device()->lang(), kCpp);
+  CHECK_EQ(W.device()->lang(), kCpp);
+
+  CHECK(dy.shape(1) == ch.num_filters && dy.shape(2) == ch.conv_height &&
+        dy.shape(3) == ch.conv_width)
+      << "input gradients shape should not change";
+
+  CHECK(x.shape(1) == ch.channels && x.shape(2) == ch.height &&
+        x.shape(3) == ch.width)
+      << "input sample shape should not change";
+
+#ifdef USE_DNNL
+  Tensor dW;
+  dW.ResetLike(W);
+
+  dy.device()->Exec(
+      [dy, dW, x, &W, &ch](Context *ctx) mutable {
+        using namespace dnnl;
+        auto eng = ctx->dnnl_engine;
+        auto s = ctx->dnnl_stream;
+        using tag = memory::format_tag;
+        auto dtype = dnnl::memory::data_type::f32;
+
+        auto conv_src_md = memory::desc({ch.x_dims}, dtype, tag::nchw);
+        auto conv_weights_md = memory::desc({ch.w_dims}, dtype, tag::goihw);
+        auto conv_bias_md = memory::desc({ch.b_dims}, dtype, tag::x);
+        auto conv_dst_md = memory::desc({ch.o_dims}, dtype, tag::nchw);
+
+        auto conv_user_src_memory =
+            memory(conv_src_md, eng, x.block()->mutable_data());
+        auto conv_user_diff_weights_memory =
+            memory(conv_weights_md, eng, dW.block()->mutable_data());
+        auto conv_diff_bias_memory =
+            memory(conv_bias_md, eng, ch.db->block()->mutable_data());
+        auto conv_user_diff_dst_memory =
+            memory(conv_dst_md, eng, dy.block()->mutable_data());
+
+        auto conv_desc = convolution_forward::desc(
+            prop_kind::forward, algorithm::convolution_direct, conv_src_md,
+            conv_weights_md, conv_bias_md, conv_dst_md, ch.s_dims, ch.p_dims,
+            ch.p_dims);
+        auto conv_pd = convolution_forward::primitive_desc(conv_desc, eng);
+
+        // auto conv_pd = *ch.conv_pd; // very slow
+
+        auto conv_bwd_src_memory = conv_user_src_memory;
+        auto conv_diff_weights_memory = conv_user_diff_weights_memory;
+        auto conv_diff_dst_memory = conv_user_diff_dst_memory;
+
+        auto conv_bwd_weights_desc = convolution_backward_weights::desc(
+            algorithm::convolution_direct, conv_src_md, conv_weights_md,
+            conv_bias_md, conv_dst_md, ch.s_dims, ch.p_dims, ch.p_dims);
+        auto conv_bwd_weights_pd = convolution_backward_weights::primitive_desc(
+            conv_bwd_weights_desc, eng, conv_pd);
+
+        convolution_backward_weights(conv_bwd_weights_pd)
+            .execute(ctx->dnnl_stream,
+                     {{DNNL_ARG_DIFF_DST, conv_diff_dst_memory},
+                      {DNNL_ARG_SRC, conv_bwd_src_memory},
+                      {DNNL_ARG_DIFF_WEIGHTS, conv_diff_weights_memory},
+                      {DNNL_ARG_DIFF_BIAS, conv_diff_bias_memory}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block(), dy.block(), W.block()}, {dW.block(), ch.db->block()},
+      "CpuConvBackwardW");
+
+  return dW;
+#else   // native cpp
+/* // error due to importing Im2col
+  Tensor dW;
+  dW.ResetLike(W);
+  dW.SetValue(0.0f);
+
+  Shape w_shape = W.shape();
+  dW.Reshape(Shape{ch.num_filters, ch.col_height});
+
+  Tensor col_data(Shape{ch.col_height, ch.col_width});  // broadcasted image
+
+  float *data_col = new float[ch.col_height * ch.col_width];
+  auto in_data = dy.data<float>();
+  for (size_t num = 0; num < ch.batchsize; num++) {
+    Im2col(in_data + num * ch.imagesize, ch.channels, ch.height, ch.width,
+           ch.kernel_h, ch.kernel_w, ch.pad_h, ch.pad_w, ch.stride_h,
+           ch.stride_w, data_col);
+    col_data.CopyDataFromHostPtr(data_col, ch.col_height * ch.col_width);
+    Tensor grad_b(Shape{ch.num_filters, ch.conv_height * ch.conv_width});
+    CopyDataToFrom(&grad_b, dy, grad_b.Size(), 0, num * grad_b.Size());
+    dW += Mult(grad_b, Transpose(col_data));
+  }
+  dW.Reshape(w_shape);
+  return dW;
+*/
+#endif  // USE_DNNL
+}
+
+Tensor CpuConvBackwardb(const Tensor &dy, const Tensor &b,
+                        const ConvHandle &ch) {
+  CHECK_EQ(dy.device()->lang(), kCpp);
+  CHECK_EQ(b.device()->lang(), kCpp);
+
+  CHECK(dy.shape(1) == ch.num_filters && dy.shape(2) == ch.conv_height &&
+        dy.shape(3) == ch.conv_width)
+      << "input gradients shape should not change";
+
+  CHECK(b.shape(0) == ch.num_filters) << "bias shape should not change";
+
+#ifdef USE_DNNL
+  Tensor db = ch.db->Clone();
+  return db;
+#else   // Native cpp
+  Tensor db;
+  db.ResetLike(b);
+
+  auto tmpshp = Shape{ch.batchsize * ch.num_filters,
+                      dy.Size() / (ch.batchsize * ch.num_filters)};
+  Tensor tmp1 = Reshape(dy, tmpshp);
+
+  Tensor tmp2(Shape{ch.batchsize * ch.num_filters});
+  SumColumns(tmp1, &tmp2);
+  Tensor tmp3 = Reshape(tmp2, Shape{ch.batchsize, ch.num_filters});
+
+  SumRows(tmp3, &db);
+
+  return db;
+#endif  // USE_DNNL
+};
+
+#ifdef USE_CUDNN
+CudnnConvHandle::CudnnConvHandle(
+    const Tensor &input, const std::vector<size_t> &kernel_size,
+    const std::vector<size_t> &stride, const std::vector<size_t> &padding,
+    const size_t in_channels, const size_t out_channels, const bool bias,
+    const size_t groups, const size_t workspace_byte_limit,
+    const std::string &prefer_)
+    : ConvHandle(input, kernel_size, stride, padding, in_channels, out_channels,
+                 bias, groups) {
+  std::string prefer = prefer_;
+  if (const char *env_p = std::getenv("CUDNN_CONV_ALG")) {
+    prefer = std::string(env_p);
+    std::transform(prefer.begin(), prefer.end(), prefer.begin(), tolower);
+    LOG(INFO) << "CUDNN_CONV_ALG: " << prefer;
+  }
+  DataType dtype = input.data_type();
+  auto dev = input.device();
+  Context *ctx = dev->context(0);
+  channels_per_filter = channels / groups;
+
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&x_desc));
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&y_desc));
+  if (bias_term) CUDNN_CHECK(cudnnCreateTensorDescriptor(&bias_desc));
+  CUDNN_CHECK(cudnnCreateFilterDescriptor(&filter_desc));
+  CUDNN_CHECK(cudnnCreateConvolutionDescriptor(&conv_desc));
+
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(x_desc, CUDNN_TENSOR_NCHW,
+                                         GetCudnnDataType(dtype), batchsize,
+                                         channels, height, width));
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(y_desc, CUDNN_TENSOR_NCHW,
+                                         GetCudnnDataType(dtype), batchsize,
+                                         num_filters, conv_height, conv_width));
+  if (bias_term)
+    CUDNN_CHECK(cudnnSetTensor4dDescriptor(bias_desc, CUDNN_TENSOR_NCHW,
+                                           GetCudnnDataType(dtype), 1,
+                                           num_filters, 1, 1));
+  CUDNN_CHECK(cudnnSetConvolution2dDescriptor(
+      conv_desc, pad_h, pad_w, stride_h, stride_w, 1, 1, CUDNN_CROSS_CORRELATION
+#if CUDNN_MAJOR >= 7
+      ,
+      GetCudnnDataType(dtype)
+#endif
+          ));
+  if (CUDNN_MAJOR >= 7 && groups > 1) {
+    CUDNN_CHECK(cudnnSetConvolutionGroupCount(conv_desc, groups));
+  } else if (groups > 1) {
+    LOG(FATAL)
+        << "The current version of cuDNN not support grouped convolution.";
+  };
+
+  CUDNN_CHECK(cudnnSetFilter4dDescriptor(
+      filter_desc, GetCudnnDataType(dtype), CUDNN_TENSOR_NCHW, num_filters,
+      channels / groups, kernel_h, kernel_w));
+  if (prefer == "fastest" || prefer == "limited_workspace" ||
+      prefer == "no_workspace") {
+    cudnnConvolutionFwdPreference_t fwd_pref;
+    cudnnConvolutionBwdFilterPreference_t bwd_filt_pref;
+    cudnnConvolutionBwdDataPreference_t bwd_data_pref;
+    if (prefer == "fastest") {
+      fwd_pref = CUDNN_CONVOLUTION_FWD_PREFER_FASTEST;
+      bwd_filt_pref = CUDNN_CONVOLUTION_BWD_FILTER_PREFER_FASTEST;
+      bwd_data_pref = CUDNN_CONVOLUTION_BWD_DATA_PREFER_FASTEST;
+    } else if (prefer == "limited_workspace") {
+      fwd_pref = CUDNN_CONVOLUTION_FWD_SPECIFY_WORKSPACE_LIMIT;
+      bwd_filt_pref = CUDNN_CONVOLUTION_BWD_FILTER_SPECIFY_WORKSPACE_LIMIT;
+      bwd_data_pref = CUDNN_CONVOLUTION_BWD_DATA_SPECIFY_WORKSPACE_LIMIT;
+    } else {
+      fwd_pref = CUDNN_CONVOLUTION_FWD_NO_WORKSPACE;
+      bwd_filt_pref = CUDNN_CONVOLUTION_BWD_FILTER_NO_WORKSPACE;
+      bwd_data_pref = CUDNN_CONVOLUTION_BWD_DATA_SPECIFY_WORKSPACE_LIMIT;
+    }
+    CUDNN_CHECK(cudnnGetConvolutionForwardAlgorithm(
+        ctx->cudnn_handle, x_desc, filter_desc, conv_desc, y_desc, fwd_pref,
+        workspace_byte_limit, &fp_alg));
+    CUDNN_CHECK(cudnnGetConvolutionBackwardFilterAlgorithm(
+        ctx->cudnn_handle, x_desc, y_desc, conv_desc, filter_desc,
+        bwd_filt_pref, workspace_byte_limit, &bp_filter_alg));
+    // deprecated in cudnn v7
+    CUDNN_CHECK(cudnnGetConvolutionBackwardDataAlgorithm(
+        ctx->cudnn_handle, filter_desc, y_desc, conv_desc, x_desc,
+        bwd_data_pref, workspace_byte_limit, &bp_data_alg));
+  } else if (prefer == "autotune") {
+    const int topk = 1;
+    int num_fp_alg, num_bp_filt_alg, num_bp_data_alg;
+    cudnnConvolutionFwdAlgoPerf_t fp_algperf[topk];
+    cudnnConvolutionBwdFilterAlgoPerf_t bp_filt_perf[topk];
+    cudnnConvolutionBwdDataAlgoPerf_t bp_data_perf[topk];
+    CUDNN_CHECK(cudnnFindConvolutionForwardAlgorithm(
+        ctx->cudnn_handle, x_desc, filter_desc, conv_desc, y_desc, topk,
+        &num_fp_alg, fp_algperf));
+    fp_alg = fp_algperf[0].algo;
+    CUDNN_CHECK(cudnnFindConvolutionBackwardFilterAlgorithm(
+        ctx->cudnn_handle, x_desc, y_desc, conv_desc, filter_desc, topk,
+        &num_bp_filt_alg, bp_filt_perf));
+    bp_filter_alg = bp_filt_perf[0].algo;
+    CUDNN_CHECK(cudnnFindConvolutionBackwardDataAlgorithm(
+        ctx->cudnn_handle, filter_desc, y_desc, conv_desc, x_desc, topk,
+        &num_bp_data_alg, bp_data_perf));
+    bp_data_alg = bp_data_perf[0].algo;
+  } else {
+    LOG(FATAL) << "Preferred algorithm is not available :" << prefer;
+  }
+
+  size_t fp_byte, bp_data_byte, bp_filter_byte;
+  CUDNN_CHECK(cudnnGetConvolutionForwardWorkspaceSize(
+      ctx->cudnn_handle, x_desc, filter_desc, conv_desc, y_desc, fp_alg,
+      &fp_byte));
+  CUDNN_CHECK(cudnnGetConvolutionBackwardDataWorkspaceSize(
+      ctx->cudnn_handle, filter_desc, y_desc, conv_desc, x_desc, bp_data_alg,
+      &bp_data_byte));
+  CUDNN_CHECK(cudnnGetConvolutionBackwardFilterWorkspaceSize(
+      ctx->cudnn_handle, x_desc, y_desc, conv_desc, filter_desc, bp_filter_alg,
+      &bp_filter_byte));
+  workspace_count = std::max(std::max(fp_byte, bp_data_byte), bp_filter_byte) /
+                        sizeof(float) +
+                    1;
+  if (workspace_count * sizeof(float) > workspace_byte_limit)
+    LOG(WARNING) << "The required memory for workspace ("
+                 << workspace_count * sizeof(float)
+                 << ") is larger than the expected Bytes ("
+                 << workspace_byte_limit << ")";
+  workspace = Tensor(Shape{workspace_count}, dev, dtype);
+}
+
+CudnnConvHandle::~CudnnConvHandle() {
+  if (bias_desc != nullptr)
+    CUDNN_CHECK(cudnnDestroyTensorDescriptor(bias_desc));
+  if (filter_desc != nullptr)
+    CUDNN_CHECK(cudnnDestroyFilterDescriptor(filter_desc));
+  if (conv_desc != nullptr)
+    CUDNN_CHECK(cudnnDestroyConvolutionDescriptor(conv_desc));
+  if (x_desc != nullptr) CUDNN_CHECK(cudnnDestroyTensorDescriptor(x_desc));
+  if (y_desc != nullptr) CUDNN_CHECK(cudnnDestroyTensorDescriptor(y_desc));
+}
+
+Tensor GpuConvForward(const Tensor &x, const Tensor &W, const Tensor &b,
+                      const CudnnConvHandle &cch) {
+  CHECK_EQ(x.device()->lang(), kCuda);
+  CHECK(x.shape(1) == cch.channels && x.shape(2) == cch.height &&
+        x.shape(3) == cch.width)
+      << "input sample shape should not change";
+
+  CHECK(W.shape(0) == cch.num_filters &&
+        W.shape(1) == cch.channels_per_filter && W.shape(2) == cch.kernel_h &&
+        W.shape(3) == cch.kernel_w)
+      << "weights shape should not change";
+
+  DataType dtype = x.data_type();
+  auto dev = x.device();
+
+  Shape shape{cch.batchsize, cch.num_filters, cch.conv_height, cch.conv_width};
+  Tensor output(shape, dev, dtype);
+
+  output.device()->Exec(
+      [output, x, &W, &cch](Context *ctx) mutable {
+        Block *inblock = x.block(), *outblock = output.block(),
+              *wblock = W.block();
+        float alpha = 1.f, beta = 0.f;
+        cudnnConvolutionForward(ctx->cudnn_handle, &alpha, cch.x_desc,
+                                inblock->data(), cch.filter_desc,
+                                wblock->data(), cch.conv_desc, cch.fp_alg,
+                                cch.workspace.block()->mutable_data(),
+                                cch.workspace_count * sizeof(float), &beta,
+                                cch.y_desc, outblock->mutable_data());
+      },
+      {x.block(), W.block()}, {output.block(), cch.workspace.block()},
+      "cudnnConvForward");
+
+  if (cch.bias_term) {
+    Tensor outputFake(output);
+    output.device()->Exec(
+        [output, outputFake, &b, &cch](Context *ctx) mutable {
+          float beta = 1.f, alpha = 1.0f;
+          Block *outblock = output.block(), *bblock = b.block();
+          cudnnAddTensor(ctx->cudnn_handle, &alpha, cch.bias_desc,
+                         bblock->data(), &beta, cch.y_desc,
+                         outblock->mutable_data());
+        },
+        {output.block(), b.block()}, {output.block()}, "cudnnAddTensor");
+  }
+
+  return output;
+}
+
+Tensor GpuConvBackwardx(const Tensor &dy, const Tensor &W, const Tensor &x,
+                        const CudnnConvHandle &cch) {
+  CHECK_EQ(dy.device()->lang(), kCuda);
+
+  Tensor dx;
+  dx.ResetLike(x);
+
+  dy.device()->Exec(
+      [dx, dy, &W, &cch](Context *ctx) mutable {
+        Block *wblock = W.block(), *dyblock = dy.block(), *dxblock = dx.block();
+        float alpha = 1.f, beta = 0.f;
+        cudnnConvolutionBackwardData(
+            ctx->cudnn_handle, &alpha, cch.filter_desc, wblock->data(),
+            cch.y_desc, dyblock->data(), cch.conv_desc, cch.bp_data_alg,
+            cch.workspace.block()->mutable_data(),
+            cch.workspace_count * sizeof(float), &beta, cch.x_desc,
+            dxblock->mutable_data());
+      },
+      {dy.block(), W.block()}, {dx.block(), cch.workspace.block()},
+      "cudnnConvolutionBackwardData");
+
+  return dx;
+}
+
+Tensor GpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W,
+                        const CudnnConvHandle &cch) {
+  CHECK_EQ(dy.device()->lang(), kCuda);
+
+  Tensor dW;
+  dW.ResetLike(W);
+
+  dy.device()->Exec(
+      [dW, dy, x, &cch](Context *ctx) {
+        Block *inblock = x.block(), *dyblock = dy.block(),
+              *dwblock = dW.block();
+        float alpha = 1.f, beta = 0.f;
+        cudnnConvolutionBackwardFilter(
+            ctx->cudnn_handle, &alpha, cch.x_desc, inblock->data(), cch.y_desc,
+            dyblock->data(), cch.conv_desc, cch.bp_filter_alg,
+            cch.workspace.block()->mutable_data(),
+            cch.workspace_count * sizeof(float), &beta, cch.filter_desc,
+            dwblock->mutable_data());
+      },
+      {dy.block(), x.block()}, {dW.block(), cch.workspace.block()},
+      "cudnnConvolutionBackwardFilter");
+
+  return dW;
+}
+
+// input Tensor b for Reset db purpose, can avoid this later.
+Tensor GpuConvBackwardb(const Tensor &dy, const Tensor &b,
+                        const CudnnConvHandle &cch) {
+  CHECK_EQ(dy.device()->lang(), kCuda);
+
+  Tensor db;
+  db.ResetLike(b);
+
+  dy.device()->Exec(
+      [dy, db, &cch](Context *ctx) mutable {
+        Block *dyblock = dy.block(), *dbblock = db.block();
+        float alpha = 1.f, beta = 0.f;
+        cudnnConvolutionBackwardBias(ctx->cudnn_handle, &alpha, cch.y_desc,
+                                     dyblock->data(), &beta, cch.bias_desc,
+                                     dbblock->mutable_data());
+      },
+      {dy.block()}, {db.block()}, "cudnnConvolutionBackwardBias");
+
+  return db;
+}
+#endif  // USE_CUDNN
+
+}  // namespace singa
diff --git a/src/model/operation/convolution.h b/src/model/operation/convolution.h
new file mode 100644
index 0000000..5ef6c03
--- /dev/null
+++ b/src/model/operation/convolution.h
@@ -0,0 +1,145 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+#ifndef SINGA_MODEL_OPERATION_CONVOLUTION_H_
+#define SINGA_MODEL_OPERATION_CONVOLUTION_H_
+
+#include <string>
+#include <vector>
+
+#include "singa/core/tensor.h"
+#include "singa/singa_config.h"
+#include "singa/utils/logging.h"
+
+#ifdef USE_CUDNN
+#include <cudnn.h>
+
+#include "../layer/cudnn_utils.h"
+#endif  // USE_CUDNN
+
+#ifdef USE_DNNL
+#include <singa/utils/dnnl_utils.h>
+#endif  // USE_DNNL
+
+namespace singa {
+
+class ConvHandle {
+ public:
+  ConvHandle(const Tensor &input, const std::vector<size_t> &kernel_size,
+             const std::vector<size_t> &stride,
+             const std::vector<size_t> &padding, const size_t in_channels,
+             const size_t out_channels, const bool bias,
+             const size_t groups = 1);
+
+  ~ConvHandle();
+
+  size_t kernel_w;
+  size_t pad_w;
+  size_t stride_w;
+  size_t kernel_h;
+  size_t pad_h;
+  size_t stride_h;
+
+  size_t channels;
+  size_t num_filters;
+  size_t group;
+
+  bool bias_term;
+
+  size_t height;
+  size_t width;
+  size_t conv_height;
+  size_t conv_width;
+  size_t batchsize;
+
+  size_t col_height;
+  size_t col_width;
+  size_t imagesize;
+
+  bool use_dnnl =
+      false;  // useful flag if both USE_CUDNN and USE_DNNL are enabled
+
+#ifdef USE_DNNL
+  dnnl::memory::data_type dtype;
+  dnnl::memory::dims b_dims;
+  dnnl::memory::dims s_dims;
+  dnnl::memory::dims p_dims;
+  dnnl::memory::dims x_dims;
+  dnnl::memory::dims o_dims;
+  dnnl::memory::dims w_dims;
+
+  Tensor *db;
+#endif  // USE_DNNL
+};
+
+Tensor CpuConvForward(const Tensor &x, Tensor &W, Tensor &b,
+                      const ConvHandle &ch);
+
+Tensor CpuConvBackwardx(const Tensor &dy, Tensor &W, const Tensor &x,
+                        const ConvHandle &ch);
+
+Tensor CpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W,
+                        const ConvHandle &ch);
+
+Tensor CpuConvBackwardb(const Tensor &dy, const Tensor &b,
+                        const ConvHandle &ch);
+
+#ifdef USE_CUDNN
+class CudnnConvHandle : public ConvHandle {
+ public:
+  CudnnConvHandle(const Tensor &input, const std::vector<size_t> &kernel_size,
+                  const std::vector<size_t> &stride,
+                  const std::vector<size_t> &padding, const size_t in_channels,
+                  const size_t out_channels, const bool bias,
+                  const size_t groups = 1,
+                  const size_t workspace_byte_limit = 1024 * 1024 * 1024,
+                  const std::string &prefer = "fastest");
+  ~CudnnConvHandle();
+  // TODO(wangwei) add the destructor
+
+  cudnnTensorDescriptor_t x_desc = nullptr;
+  cudnnTensorDescriptor_t y_desc = nullptr;
+  cudnnTensorDescriptor_t bias_desc = nullptr;
+  cudnnFilterDescriptor_t filter_desc = nullptr;
+  cudnnConvolutionDescriptor_t conv_desc = nullptr;
+  cudnnConvolutionFwdAlgo_t fp_alg;
+  cudnnConvolutionBwdFilterAlgo_t bp_filter_alg;
+  cudnnConvolutionBwdDataAlgo_t bp_data_alg;
+
+  size_t workspace_count;
+  Tensor workspace;
+  size_t channels_per_filter;
+};
+
+Tensor GpuConvForward(const Tensor &x, const Tensor &W, const Tensor &b,
+                      const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardx(const Tensor &dy, const Tensor &W, const Tensor &x,
+                        const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardW(const Tensor &dy, const Tensor &x, const Tensor &W,
+                        const CudnnConvHandle &cch);
+
+Tensor GpuConvBackwardb(const Tensor &dy, const Tensor &b,
+                        const CudnnConvHandle &cch);
+#endif  // USE_CUDNN
+
+}  // namespace singa
+#endif  // SINGA_MODEL_OPERATION_CONVOLUTION_H_
diff --git a/src/model/operation/pooling.cc b/src/model/operation/pooling.cc
new file mode 100644
index 0000000..b07ad4b
--- /dev/null
+++ b/src/model/operation/pooling.cc
@@ -0,0 +1,229 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+#include "pooling.h"
+
+#include <cmath>
+
+namespace singa {
+
+PoolingHandle::PoolingHandle(const Tensor &input,
+                             const std::vector<int> &kernel_size,
+                             const std::vector<int> &stride,
+                             const std::vector<int> &padding,
+                             const bool is_max) {
+  kernel_h = kernel_size[0];
+  kernel_w = kernel_size[1];
+
+  pad_h = padding[0];
+  pad_w = padding[1];
+
+  stride_h = stride[0];
+  stride_w = stride[1];
+
+  batchsize = input.shape(0);
+  channels = input.shape(1);
+  height = input.shape(2);
+  width = input.shape(3);
+
+  pooled_height = 1;
+
+  if (stride_h > 0)
+    pooled_height =
+        std::floor(((height + 2 * pad_h - kernel_h) / stride_h)) + 1;
+  pooled_width = std::floor(((width + 2 * pad_w - kernel_w) / stride_w)) + 1;
+  is_max_pooling = is_max;
+
+#ifdef USE_DNNL
+  if (input.device()->lang() == kCpp) {
+    auto x_dims =
+        dnnl::memory::dims(input.shape().begin(), input.shape().end());
+    auto y_dims =
+        dnnl::memory::dims({batchsize, channels, pooled_height, pooled_width});
+    auto s_dims = dnnl::memory::dims(stride.begin(), stride.end());
+    auto k_dims = dnnl::memory::dims(kernel_size.begin(), kernel_size.end());
+
+    auto p_dims = dnnl::memory::dims(padding.begin(), padding.end());
+
+    auto dtype_ = dnnl::memory::data_type::f32;
+    auto format_tag_ = get_dnnl_format_tag(input);
+    x_md = dnnl::memory::desc({x_dims}, dtype_, format_tag_);
+    y_md = dnnl::memory::desc({y_dims}, dtype_, format_tag_);
+
+    // allow max or avg (follow cudnn implementation convention)
+    auto pooling_algo = dnnl::algorithm::pooling_avg_exclude_padding;
+    if (is_max_pooling) pooling_algo = dnnl::algorithm::pooling_max;
+
+    auto pool_fwd_d = dnnl::pooling_forward::desc(
+        dnnl::prop_kind::forward_training, pooling_algo, x_md, y_md, s_dims,
+        k_dims, p_dims, p_dims);
+    auto pool_bwd_d = dnnl::pooling_backward::desc(
+        pooling_algo, x_md, y_md, s_dims, k_dims, p_dims, p_dims);
+
+    auto eng = input.device()->context(0)->dnnl_engine;
+    pool_fwd_pd = dnnl::pooling_forward::primitive_desc(pool_fwd_d, eng);
+    pool_bwd_pd =
+        dnnl::pooling_backward::primitive_desc(pool_bwd_d, eng, pool_fwd_pd);
+
+    auto ws_md = pool_fwd_pd.workspace_desc();
+    ws_mem = dnnl::memory(ws_md, eng);
+  }
+#endif  // USE_DNNL
+}
+
+PoolingHandle::~PoolingHandle() {}
+
+#ifdef USE_DNNL
+
+Tensor CpuPoolingForward(const PoolingHandle &ph, const Tensor &x) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+  Tensor y({(unsigned long)ph.batchsize, (unsigned long)ph.channels,
+            (unsigned long)ph.pooled_height, (unsigned long)ph.pooled_width},
+           x.device(), x.data_type());
+
+  y.device()->Exec(
+      [y, x, &ph](Context *ctx) mutable {
+        auto eng = ctx->dnnl_engine;
+        using namespace dnnl;
+
+        memory x_mem(ph.x_md, eng, x.block()->mutable_data());
+        memory y_mem(ph.y_md, eng, y.block()->mutable_data());
+
+        pooling_forward(ph.pool_fwd_pd)
+            .execute(ctx->dnnl_stream, {{DNNL_ARG_SRC, x_mem},
+                                        {DNNL_ARG_DST, y_mem},
+                                        {DNNL_ARG_WORKSPACE, ph.ws_mem}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block()}, {y.block()}, "CpuPoolingForward");
+
+  return y;
+}
+
+Tensor CpuPoolingBackward(const PoolingHandle &ph, const Tensor &grad,
+                          const Tensor &x, const Tensor &y) {
+  CHECK_EQ(x.device()->lang(), kCpp);
+  CHECK_EQ(grad.device()->lang(), kCpp);
+  CHECK_EQ(y.device()->lang(), kCpp);
+  Tensor in_grad;
+  in_grad.ResetLike(x);
+
+  in_grad.device()->Exec(
+      [x, y, in_grad, grad, &ph](Context *ctx) mutable {
+        auto eng = ctx->dnnl_engine;
+        using namespace dnnl;
+
+        memory dx_mem(ph.x_md, eng, in_grad.block()->mutable_data());
+        memory dy_mem(ph.y_md, eng, grad.block()->mutable_data());
+
+        pooling_backward(ph.pool_bwd_pd)
+            .execute(ctx->dnnl_stream, {{DNNL_ARG_DIFF_DST, dy_mem},
+                                        {DNNL_ARG_DIFF_SRC, dx_mem},
+                                        {DNNL_ARG_WORKSPACE, ph.ws_mem}});
+        ctx->dnnl_stream.wait();
+      },
+      {x.block(), y.block(), grad.block()}, {in_grad.block()}, "CpuPoolingBackward");
+
+  return in_grad;
+}
+#endif  // USE_DNNL
+
+#ifdef USE_CUDNN
+
+CudnnPoolingHandle::CudnnPoolingHandle(const Tensor &input,
+                                       const std::vector<int> &kernel_size,
+                                       const std::vector<int> &stride,
+                                       const std::vector<int> &padding,
+                                       const bool is_max)
+    : PoolingHandle(input, kernel_size, stride, padding, is_max) {
+  // nan_prop = CUDNN_NOT_PROPAGATE_NAN;
+
+  DataType dtype = input.data_type();
+
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&x_desc));
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&y_desc));
+  CUDNN_CHECK(cudnnCreatePoolingDescriptor(&pool_desc));
+
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(x_desc, CUDNN_TENSOR_NCHW,
+                                         GetCudnnDataType(dtype), batchsize,
+                                         channels, height, width));
+  // LOG(ERROR) << batchsize << " " << channels << " " << pooled_height << " "
+  // << pooled_width;
+  CUDNN_CHECK(cudnnSetTensor4dDescriptor(
+      y_desc, CUDNN_TENSOR_NCHW, GetCudnnDataType(dtype), batchsize, channels,
+      pooled_height, pooled_width));
+  auto pool_method = CUDNN_POOLING_AVERAGE_COUNT_EXCLUDE_PADDING;
+  if (is_max) pool_method = CUDNN_POOLING_MAX;
+
+  CUDNN_CHECK(cudnnSetPooling2dDescriptor(pool_desc, pool_method, nan_prop,
+                                          kernel_h, kernel_w, pad_h, pad_w,
+                                          stride_h, stride_w));
+};
+
+CudnnPoolingHandle::~CudnnPoolingHandle() {
+  if (pool_desc != nullptr)
+    CUDNN_CHECK(cudnnDestroyPoolingDescriptor(pool_desc));
+  if (x_desc != nullptr) CUDNN_CHECK(cudnnDestroyTensorDescriptor(x_desc));
+  if (y_desc != nullptr) CUDNN_CHECK(cudnnDestroyTensorDescriptor(y_desc));
+}
+
+Tensor GpuPoolingForward(const CudnnPoolingHandle &cph, const Tensor &x) {
+  CHECK_EQ(x.device()->lang(), kCuda);
+  CHECK_EQ(x.nDim(), 4u);
+
+  Tensor output = Tensor(
+      Shape({cph.batchsize, cph.channels, cph.pooled_height, cph.pooled_width}),
+      x.device(), x.data_type());
+
+  output.device()->Exec(
+      [output, x, &cph](Context *ctx) mutable {
+        float alpha = 1.0f, beta = 0.0f;
+        cudnnPoolingForward(ctx->cudnn_handle, cph.pool_desc, &alpha,
+                            cph.x_desc, x.block()->data(), &beta, cph.y_desc,
+                            output.block()->mutable_data());
+      },
+      {x.block()}, {output.block()}, "GpuPoolingForward");
+
+  return output;
+}
+
+Tensor GpuPoolingBackward(const CudnnPoolingHandle &cph, const Tensor &dy,
+                          const Tensor &x, const Tensor &y) {
+  CHECK_EQ(dy.device()->lang(), kCuda);
+  CHECK_EQ(dy.nDim(), 4u);
+
+  Tensor dx;
+  dx.ResetLike(x);
+
+  dx.device()->Exec(
+      [dx, dy, x, y, &cph](Context *ctx) mutable {
+        float alpha = 1.0f, beta = 0.0f;
+        cudnnPoolingBackward(ctx->cudnn_handle, cph.pool_desc, &alpha,
+                             cph.y_desc, y.block()->data(), cph.y_desc,
+                             dy.block()->data(), cph.x_desc, x.block()->data(),
+                             &beta, cph.x_desc, dx.block()->mutable_data());
+      },
+      {dy.block(), y.block(), x.block()}, {dx.block()}, "GpuPoolingBackward");
+
+  return dx;
+};
+#endif  // USE_CUDNN
+
+}  // namespace singa
diff --git a/src/model/operation/pooling.h b/src/model/operation/pooling.h
new file mode 100644
index 0000000..86a4852
--- /dev/null
+++ b/src/model/operation/pooling.h
@@ -0,0 +1,102 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+#ifndef SINGA_MODEL_OPERATION_POOLING_H_
+#define SINGA_MODEL_OPERATION_POOLING_H_
+
+#include <string>
+
+#include "singa/core/tensor.h"
+
+#ifdef USE_CUDNN
+#include <cudnn.h>
+
+#include "../layer/cudnn_utils.h"
+#endif
+
+#ifdef USE_DNNL
+#include <singa/utils/dnnl_utils.h>
+#endif  // USE_DNNL
+
+namespace singa {
+
+class PoolingHandle {
+ public:
+  PoolingHandle(const Tensor &input, const std::vector<int> &kernel_size,
+                const std::vector<int> &stride, const std::vector<int> &padding,
+                const bool is_max = true);
+  ~PoolingHandle();
+
+  int kernel_w;
+  int pad_w;
+  int stride_w;
+  int kernel_h;
+  int pad_h;
+  int stride_h;
+
+  int batchsize;
+  int channels;
+  int height;
+  int width;
+
+  int pooled_height;
+  int pooled_width;
+
+  bool is_max_pooling;
+
+#ifdef USE_DNNL
+  dnnl::memory::desc x_md;
+  dnnl::memory::desc y_md;
+  dnnl::memory ws_mem;
+  dnnl::pooling_forward::primitive_desc pool_fwd_pd;
+  dnnl::pooling_backward::primitive_desc pool_bwd_pd;
+#endif  // USE_DNNL
+};
+
+#ifdef USE_DNNL
+Tensor CpuPoolingForward(const PoolingHandle &ph, const Tensor &x);
+Tensor CpuPoolingBackward(const PoolingHandle &ph, const Tensor &dy,
+                          const Tensor &x, const Tensor &y);
+#endif  // USE_DNNL
+
+#ifdef USE_CUDNN
+class CudnnPoolingHandle : public PoolingHandle {
+ public:
+  CudnnPoolingHandle(const Tensor &input, const std::vector<int> &kernel_size,
+                     const std::vector<int> &stride,
+                     const std::vector<int> &padding, const bool is_max = true);
+  ~CudnnPoolingHandle();
+
+  cudnnTensorDescriptor_t x_desc = nullptr;
+  cudnnTensorDescriptor_t y_desc = nullptr;
+  cudnnPoolingDescriptor_t pool_desc = nullptr;
+  cudnnNanPropagation_t nan_prop = CUDNN_PROPAGATE_NAN;
+};
+
+Tensor GpuPoolingForward(const CudnnPoolingHandle &cph, const Tensor &x);
+
+Tensor GpuPoolingBackward(const CudnnPoolingHandle &cph, const Tensor &dy,
+                          const Tensor &x, const Tensor &y);
+
+#endif  // USE_CUDNN
+
+}  // namespace singa
+
+#endif  // SINGA_MODEL_OPERATION_POOLING_H_
diff --git a/src/model/operation/rnn.cc b/src/model/operation/rnn.cc
new file mode 100644
index 0000000..bc8edfd
--- /dev/null
+++ b/src/model/operation/rnn.cc
@@ -0,0 +1,808 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+
+#include "rnn.h"
+
+#include <map>
+namespace singa {
+#ifdef USE_CUDNN
+CudnnRNNHandle::CudnnRNNHandle(const Tensor &x, const int hidden_size,
+                               const int mode, const int num_layers,
+                               const int bias, const float dropout,
+                               const int bidirectional)
+    : bias(bias),
+      dropout(dropout),
+      bidirectional(bidirectional),
+      hidden_size(hidden_size),
+      mode(mode),
+      num_layers(num_layers) {
+  // cudnn rnn bias is not available in cudnn v7.4.5, not found in cudnn.h
+  CHECK_EQ(bias, 1) << "Current implementation always include bias";
+  CHECK(bidirectional == 0 || bidirectional == 1)
+      << "bidirectional should be 0 or 1 not " << bidirectional;
+
+  dev = x.device();
+  ctx = x.device()->context(0);
+
+  // TODO: batch first mode failed in cudnn
+  batch_first = 0;
+
+  // x shape {seq, bs, ..}
+  seq_length = x.shape(0);
+  batch_size = x.shape(1);
+  feature_size = x.shape(2);
+
+  cudnnRNNAlgo = CUDNN_RNN_ALGO_STANDARD;
+  cudnnDataType = CUDNN_DATA_FLOAT;
+
+  cudnnTensorDescriptor_t *xDesc = new cudnnTensorDescriptor_t[seq_length];
+  init_xDesc(xDesc, *this);
+
+  init_dropout_desc();
+  init_rnn_desc();
+  init_parameters_desc(xDesc);
+  init_workspace(xDesc);
+  init_param_mapping(xDesc);
+  delete[] xDesc;
+}
+
+void CudnnRNNHandle::init_workspace(cudnnTensorDescriptor_t *xDesc) {
+  /* workspace data */
+  // Need for every pass
+  CUDNN_CHECK(cudnnGetRNNWorkspaceSize(ctx->cudnn_handle, rnnDesc, seq_length,
+                                       xDesc, &workspace_size_bytes));
+  // Only needed in training, shouldn't be touched between passes.
+  CUDNN_CHECK(cudnnGetRNNTrainingReserveSize(
+      ctx->cudnn_handle, rnnDesc, seq_length, xDesc, &reserve_size_bytes));
+
+  workspace_size = workspace_size_bytes / sizeof(float);
+  reserve_size = reserve_size_bytes / sizeof(float);
+  workspace = Tensor(Shape{workspace_size}, dev);
+  reserve_space = Tensor(Shape{reserve_size}, dev);
+}
+
+void CudnnRNNHandle::init_parameters_desc(cudnnTensorDescriptor_t *xDesc) {
+  /* weights size
+   *   depends on rnn desc */
+  CUDNN_CHECK(cudnnGetRNNParamsSize(ctx->cudnn_handle, rnnDesc, xDesc[0],
+                                    &weights_size_bytes, cudnnDataType));
+  /* weights desc
+   *   depends on weights size */
+  CUDNN_CHECK(cudnnCreateFilterDescriptor(&wDesc));
+  CUDNN_CHECK(cudnnCreateFilterDescriptor(&dwDesc));
+
+  weights_size = weights_size_bytes / sizeof(float);  // TODO different types
+  int dimW[3];
+  dimW[0] = weights_size;  // TODO different types
+  dimW[1] = 1;
+  dimW[2] = 1;
+  CUDNN_CHECK(cudnnSetFilterNdDescriptor(wDesc, cudnnDataType,
+                                         CUDNN_TENSOR_NCHW, 3, dimW));
+  CUDNN_CHECK(cudnnSetFilterNdDescriptor(dwDesc, cudnnDataType,
+                                         CUDNN_TENSOR_NCHW, 3, dimW));
+}
+
+void CudnnRNNHandle::init_rnn_desc() {
+  /* rnn desc */
+  CUDNN_CHECK(cudnnCreateRNNDescriptor(&rnnDesc));
+  if (mode == 0)
+    RNNMode = CUDNN_RNN_RELU;
+  else if (mode == 1)
+    RNNMode = CUDNN_RNN_TANH;
+  else if (mode == 2)
+    RNNMode = CUDNN_LSTM;
+  else if (mode == 3)
+    RNNMode = CUDNN_GRU;
+  CUDNN_CHECK(cudnnSetRNNDescriptor(
+      ctx->cudnn_handle, rnnDesc, hidden_size, num_layers, dropoutDesc,
+      CUDNN_LINEAR_INPUT,
+      bidirectional ? CUDNN_BIDIRECTIONAL : CUDNN_UNIDIRECTIONAL, RNNMode,
+      cudnnRNNAlgo,  // CUDNN_RNN_ALGO_STANDARD,
+      cudnnDataType));
+}
+void CudnnRNNHandle::init_dropout_desc() {
+  /* drop out */
+  size_t seed = 0x1234567;
+  CUDNN_CHECK(cudnnCreateDropoutDescriptor(&dropoutDesc));
+  size_t stateSize;
+  CUDNN_CHECK(cudnnDropoutGetStatesSize(ctx->cudnn_handle, &stateSize));
+  CUDA_CHECK(cudaMalloc(&states, stateSize));
+  CUDNN_CHECK(cudnnSetDropoutDescriptor(dropoutDesc, ctx->cudnn_handle, dropout,
+                                        states, stateSize, seed));
+}
+
+void init_yDesc(cudnnTensorDescriptor_t *yDesc, CudnnRNNHandle &h) {
+  int dimA[] = {h.batch_size,
+                h.bidirectional ? h.hidden_size * 2 : h.hidden_size, 1};
+  int strideA[] = {dimA[1] * dimA[2], dimA[2], 1};
+  for (int i = 0; i < h.seq_length; i++) {
+    CUDNN_CHECK(cudnnCreateTensorDescriptor(&yDesc[i]));
+    CUDNN_CHECK(cudnnSetTensorNdDescriptor(yDesc[i], h.cudnnDataType, 3, dimA,
+                                           strideA));
+  }
+}
+
+void init_xDesc(cudnnTensorDescriptor_t *xDesc, CudnnRNNHandle &h) {
+  int dimA[] = {h.batch_size, h.feature_size, 1};
+  int strideA[] = {dimA[1] * dimA[2], dimA[2], 1};
+  for (int i = 0; i < h.seq_length; i++) {
+    CUDNN_CHECK(cudnnCreateTensorDescriptor(&xDesc[i]));
+    CUDNN_CHECK(cudnnSetTensorNdDescriptor(xDesc[i], h.cudnnDataType, 3, dimA,
+                                           strideA));
+  }
+}
+
+void init_hc_Desc(cudnnTensorDescriptor_t &hxDesc, CudnnRNNHandle &h) {
+  /* If direction is CUDNN_BIDIRECTIONAL then the first dimension should match
+  double the numLayers argument passed to cudnnSetRNNDescriptor(). */
+  /* The second dimension must match the batchSize parameter in xDesc */
+  /* the third dimension must match the hiddenSize argument passed to the
+  cudnnSetRNNDescriptor() call used to initialize rnnDesc. */
+  int dimA[] = {h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                h.hidden_size};
+  int strideA[] = {dimA[2] * dimA[1], dimA[2], 1};
+  CUDNN_CHECK(cudnnCreateTensorDescriptor(&hxDesc));
+  CUDNN_CHECK(
+      cudnnSetTensorNdDescriptor(hxDesc, h.cudnnDataType, 3, dimA, strideA));
+}
+
+/*
+vector<Tensor> GpuRNNForwardTraining();
+vector<Tensor> GpuRNNForwardInference();
+vector<Tensor> GpuRNNBackwardx();
+Tensor GpuRNNBackwardW();
+*/
+
+vector<Tensor> GpuRNNForwardInference(const Tensor &x, const Tensor &hx,
+                                      const Tensor &cx, const Tensor &W,
+                                      CudnnRNNHandle &h) {
+  CHECK_EQ(h.feature_size, x.shape(2)) << "feature size should not change";
+
+  // in
+  // x in shape {seq, bs, ..}
+  // out
+  // y in shape {seq, bs, ..}
+
+  h.batch_size = x.shape(1);  // update batch size to accomodate bs change
+  h.seq_length = x.shape(0);
+
+  Tensor y(Shape{h.seq_length, h.batch_size,
+                 h.hidden_size * (h.bidirectional ? 2 : 1)},
+           x.device());
+  Tensor hy(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                  h.hidden_size},
+            x.device());
+  Tensor cy(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                  h.hidden_size},
+            x.device());
+  y.SetValue(0.0f);
+  hy.SetValue(0.0f);
+  cy.SetValue(0.0f);
+  h.workspace.SetValue(0.0f);
+  y.device()->Exec(
+      [y, hy, cy, x, hx, cx, &W, &h](Context *ctx) {
+        // require desc, [x], hx, cx, w, y, hy, cy
+        cudnnTensorDescriptor_t *xDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        cudnnTensorDescriptor_t *yDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        init_xDesc(xDesc, h);
+        init_yDesc(yDesc, h);
+        cudnnTensorDescriptor_t hxDesc;
+        cudnnTensorDescriptor_t cxDesc;
+        cudnnTensorDescriptor_t hyDesc;
+        cudnnTensorDescriptor_t cyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(hyDesc, h);
+        init_hc_Desc(cyDesc, h);
+
+        auto x_con = Contiguous(x);
+
+        auto xptr = x_con.block()->data();
+        auto hxptr = hx.block()->data();
+        auto cxptr = cx.block()->data();
+        auto Wptr = W.block()->data();
+        auto yptr = y.block()->mutable_data();
+        auto hyptr = hy.block()->mutable_data();
+        auto cyptr = cy.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+
+        CUDNN_CHECK(cudnnRNNForwardInference(
+            ctx->cudnn_handle, h.rnnDesc, h.seq_length, xDesc, xptr, hxDesc,
+            hxptr, cxDesc, cxptr, h.wDesc, Wptr, yDesc, yptr, hyDesc, hyptr,
+            cyDesc, cyptr, wsptr, h.workspace_size_bytes));
+
+        delete[] xDesc;
+        delete[] yDesc;
+      },
+      {x.block(), hx.block(), cx.block(), W.block()},
+      {y.block(), hy.block(), cy.block(), h.workspace.block()},
+      "cudnnRNNForwardInterface");
+  return {y, hy, cy};
+}
+
+vector<Tensor> GpuRNNForwardTraining(const Tensor &x, const Tensor &hx,
+                                     const Tensor &cx, const Tensor &W,
+                                     CudnnRNNHandle &h) {
+  CHECK_EQ(h.feature_size, x.shape(2)) << "feature size should not change";
+
+  // in
+  // x in shape {seq, bs, ..}
+  // out
+  // y in shape {seq, bs, ..}
+
+  // update batch size to accomodate bs change
+  h.batch_size = x.shape(1);
+  h.seq_length = x.shape(0);
+
+  Tensor y(Shape{h.seq_length, h.batch_size,
+                 h.hidden_size * (h.bidirectional ? 2 : 1)},
+           x.device());
+  Tensor hy(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                  h.hidden_size},
+            x.device());
+  Tensor cy(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                  h.hidden_size},
+            x.device());
+  y.SetValue(0.0f);
+  hy.SetValue(0.0f);
+  cy.SetValue(0.0f);
+  h.workspace.SetValue(0.0f);
+  h.reserve_space.SetValue(0.0f);
+
+  y.device()->Exec(
+      [y, hy, cy, x, hx, cx, &W, &h](Context *ctx) {
+        // require desc, [x], hx, cx, w, y, hy, cy
+        cudnnTensorDescriptor_t *xDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        cudnnTensorDescriptor_t *yDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        init_xDesc(xDesc, h);
+        init_yDesc(yDesc, h);
+        cudnnTensorDescriptor_t hxDesc;
+        cudnnTensorDescriptor_t cxDesc;
+        cudnnTensorDescriptor_t hyDesc;
+        cudnnTensorDescriptor_t cyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(hyDesc, h);
+        init_hc_Desc(cyDesc, h);
+
+        auto x_con = Contiguous(x);
+
+        auto xptr = x_con.block()->data();
+        auto hxptr = hx.block()->data();
+        auto cxptr = cx.block()->data();
+        auto Wptr = W.block()->data();
+        auto yptr = y.block()->mutable_data();
+        auto hyptr = hy.block()->mutable_data();
+        auto cyptr = cy.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+        CUDNN_CHECK(cudnnRNNForwardTraining(
+            ctx->cudnn_handle, h.rnnDesc, h.seq_length, xDesc, xptr, hxDesc,
+            hxptr, cxDesc, cxptr, h.wDesc, Wptr, yDesc, yptr, hyDesc, hyptr,
+            cyDesc, cyptr, wsptr, h.workspace_size_bytes, rsptr,
+            h.reserve_size_bytes));
+        delete[] xDesc;
+        delete[] yDesc;
+      },
+      {x.block(), hx.block(), cx.block(), W.block()},
+      {y.block(), hy.block(), cy.block(), h.workspace.block(),
+       h.reserve_space.block()},
+      "cudnnRNNForwardTraining");
+
+  return {y, hy, cy};
+}
+vector<Tensor> GpuRNNBackwardx(const Tensor &y, const Tensor &dy,
+                               const Tensor &dhy, const Tensor &dcy,
+                               const Tensor &W, const Tensor &hx,
+                               const Tensor &cx, CudnnRNNHandle &h) {
+  // in
+  // y shape {seq, bs}
+  // dy shape {seq, bs}
+  Tensor dx(Shape{h.seq_length, h.batch_size, h.feature_size}, y.device());
+  Tensor dhx(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                   h.hidden_size},
+             y.device());
+  Tensor dcx(Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                   h.hidden_size},
+             y.device());
+  dx.SetValue(0.0f);
+  dhx.SetValue(0.0f);
+  dcx.SetValue(0.0f);
+  h.workspace.SetValue(0.0f);
+  dx.device()->Exec(
+      [dx, dhx, dcx, y, dy, dhy, dcy, &W, hx, cx, &h](Context *ctx) {
+        // require desc:
+        //      [dx], hx, dhx, cx, dcx, w,
+        // [y], [dy],     dhy,     dcy
+        cudnnTensorDescriptor_t *dxDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        cudnnTensorDescriptor_t *yDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        cudnnTensorDescriptor_t *dyDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        init_yDesc(yDesc, h);
+        init_xDesc(dxDesc, h);
+        init_yDesc(dyDesc, h);
+        cudnnTensorDescriptor_t hxDesc;
+        cudnnTensorDescriptor_t cxDesc;
+        cudnnTensorDescriptor_t dhxDesc;
+        cudnnTensorDescriptor_t dcxDesc;
+        cudnnTensorDescriptor_t dhyDesc;
+        cudnnTensorDescriptor_t dcyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(dhxDesc, h);
+        init_hc_Desc(dcxDesc, h);
+        init_hc_Desc(dhyDesc, h);
+        init_hc_Desc(dcyDesc, h);
+
+        auto y_con = Contiguous(y);
+        auto dy_con = Contiguous(dy);
+
+        auto dxptr = dx.block()->mutable_data();
+        auto hxptr = hx.block()->data();
+        auto dhxptr = dhx.block()->mutable_data();
+        auto cxptr = cx.block()->data();
+        auto dcxptr = dcx.block()->mutable_data();
+        auto Wptr = W.block()->data();
+        auto yptr = y_con.block()->data();
+        auto dyptr = dy_con.block()->data();
+        auto dhyptr = dhy.block()->data();
+        auto dcyptr = dcy.block()->data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+
+        CUDNN_CHECK(cudnnRNNBackwardData(
+            ctx->cudnn_handle, h.rnnDesc, h.seq_length, yDesc, yptr, dyDesc,
+            dyptr, dhyDesc, dhyptr, dcyDesc, dcyptr, h.wDesc, Wptr, hxDesc,
+            hxptr, cxDesc, cxptr, dxDesc, dxptr, dhxDesc, dhxptr, dcxDesc,
+            dcxptr, wsptr, h.workspace_size_bytes, rsptr,
+            h.reserve_size_bytes));
+        delete[] dxDesc;
+        delete[] yDesc;
+        delete[] dyDesc;
+      },
+      {y.block(), dy.block(), dhy.block(), dcy.block(), hx.block(), cx.block(),
+       W.block()},
+      {dx.block(), dhx.block(), dcx.block(), h.workspace.block(),
+       h.reserve_space.block()},
+      "cudnnRNNBackwardx");
+  return {dx, dhx, dcx};
+}
+
+Tensor GpuRNNBackwardW(const Tensor &x, const Tensor &hx, const Tensor &y,
+                       CudnnRNNHandle &h) {
+  Tensor dW(Shape{h.weights_size}, x.device());
+  // in
+  // x shape {seq, bs}
+  // y shape {seq, bs}
+  dW.SetValue(0.0f);
+  h.workspace.SetValue(0.0f);
+  dW.device()->Exec(
+      [dW, x, hx, y, &h](Context *ctx) {
+        cudnnTensorDescriptor_t *xDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        cudnnTensorDescriptor_t *yDesc =
+            new cudnnTensorDescriptor_t[h.seq_length];
+        init_xDesc(xDesc, h);
+        init_yDesc(yDesc, h);
+        cudnnTensorDescriptor_t hxDesc;
+        init_hc_Desc(hxDesc, h);
+
+        auto y_con = Contiguous(y);
+        auto x_con = Contiguous(x);
+
+        auto xptr = x_con.block()->data();
+        auto hxptr = hx.block()->data();
+        auto yptr = y_con.block()->data();
+        auto dWptr = dW.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+
+        CUDNN_CHECK(cudnnRNNBackwardWeights(
+            ctx->cudnn_handle, h.rnnDesc, h.seq_length, xDesc, xptr, hxDesc,
+            hxptr, yDesc, yptr, wsptr, h.workspace_size_bytes, h.dwDesc, dWptr,
+            rsptr, h.reserve_size_bytes));
+        delete[] xDesc;
+        delete[] yDesc;
+      },
+      {x.block(), y.block(), hx.block()},
+      {dW.block(), h.workspace.block(), h.reserve_space.block()},
+      "cudnnRnnBackwardW");
+  return dW;
+}
+
+void CudnnRNNHandle::init_param_mapping(cudnnTensorDescriptor_t *xDesc) {
+  int linLayerIDRange = 2;
+  if (mode == 0 || mode == 1) {
+    // vanilla relu/tanh
+    linLayerIDRange = 2;
+  } else if (mode == 2) {
+    // lstm
+    linLayerIDRange = 8;
+  } else if (mode == 3) {
+    // gru
+    linLayerIDRange = 6;
+  }
+  int pseudoLayerRange = (bidirectional ? 2 : 1) * num_layers;
+
+  // dummy weights for getting the offset
+  Tensor weights(
+      Shape{
+          weights_size,
+      },
+      dev);
+  weights.SetValue(0.0f);
+  const void *W_ptr = weights.block()->data();
+
+  void *param_ptr = nullptr;
+  int dims[] = {1, 1, 1};
+  cudnnDataType_t data_type;
+  cudnnTensorFormat_t tensor_format;
+  int n_dims;
+  cudnnFilterDescriptor_t paramDesc;
+  CUDNN_CHECK(cudnnCreateFilterDescriptor(&paramDesc));
+
+  vector<bool> paramTypes{false, true};
+  for (int linLayerID = 0; linLayerID < linLayerIDRange; linLayerID++) {
+    for (int pseudoLayer = 0; pseudoLayer < pseudoLayerRange; pseudoLayer++) {
+      for (const bool &is_bias : paramTypes) {
+        // get param ptr
+        if (is_bias) {
+          CUDNN_CHECK(cudnnGetRNNLinLayerBiasParams(
+              ctx->cudnn_handle, rnnDesc, pseudoLayer, xDesc[0], wDesc, W_ptr,
+              linLayerID, paramDesc, &param_ptr));
+        } else {
+          CUDNN_CHECK(cudnnGetRNNLinLayerMatrixParams(
+              ctx->cudnn_handle, rnnDesc, pseudoLayer, xDesc[0], wDesc, W_ptr,
+              linLayerID, paramDesc, &param_ptr));
+        }
+
+        // get param dims
+        CUDNN_CHECK(cudnnGetFilterNdDescriptor(paramDesc, 3, &data_type,
+                                               &tensor_format, &n_dims, dims));
+
+        // get diff - offset
+        size_t offset = (float *)param_ptr - (float *)W_ptr;
+
+        // save in map
+        weights_mapping[std::make_tuple(linLayerID, pseudoLayer, is_bias)] =
+            std::make_tuple(offset, dims[0] * dims[1] * dims[2]);
+      }
+    }
+  }
+}
+
+void GpuRNNSetParam(int linLayerID, int pseudoLayer, Tensor &weights,
+                    Tensor &paramValues, bool is_bias, CudnnRNNHandle &h) {
+  size_t offset, size;
+  std::tie(offset, size) =
+      h.weights_mapping[std::make_tuple(linLayerID, pseudoLayer, is_bias)];
+  CHECK_EQ(size, paramValues.size()) << "param size is not expected";
+  CopyDataToFrom(&weights, paramValues, size, offset, 0);
+}
+
+Tensor GpuRNNGetParamCopy(int linLayerID, int pseudoLayer, Tensor &weights,
+                          bool is_bias, CudnnRNNHandle &h) {
+  size_t offset, size;
+  std::tie(offset, size) =
+      h.weights_mapping[std::make_tuple(linLayerID, pseudoLayer, is_bias)];
+  Tensor paramCopy(
+      Shape{
+          size,
+      },
+      weights.device());
+  CopyDataToFrom(&paramCopy, weights, size, 0, offset);
+  return paramCopy;
+}
+
+/*
+vector<Tensor> GpuRNNForwardTrainingEx();
+vector<Tensor> GpuRNNForwardInferenceEx();
+vector<Tensor> GpuRNNBackwardxEx();
+Tensor GpuRNNBackwardWEx();
+*/
+
+void init_data_desc(cudnnRNNDataDescriptor_t &desc, int data_size,
+                    const Tensor seq_lengths, CudnnRNNHandle &h) {
+  /* cudnnRNNDataDescriptor_t is a pointer to an opaque structure holding
+  the description of an RNN data set. The function
+  cudnnCreateRNNDataDescriptor() is used to create one instance, and
+  cudnnSetRNNDataDescriptor() must be used to initialize this instance.
+  */
+  CUDNN_CHECK(cudnnCreateRNNDataDescriptor(&desc));
+  /* CUDNN_RNN_DATA_LAYOUT_SEQ_MAJOR_UNPACKED
+    Data layout is padded, with outer stride from one time-step to the
+  next. CUDNN_RNN_DATA_LAYOUT_SEQ_MAJOR_PACKED The sequence length is
+  sorted and packed as in basic RNN API.
+  CUDNN_RNN_DATA_LAYOUT_BATCH_MAJOR_UNPACKED
+    Data layout is padded, with outer stride from one batch to the next.
+  */
+  cudnnRNNDataLayout_t layout;
+  if (h.batch_first) {
+    layout = CUDNN_RNN_DATA_LAYOUT_BATCH_MAJOR_UNPACKED;
+  } else {
+    layout = CUDNN_RNN_DATA_LAYOUT_SEQ_MAJOR_PACKED;
+  }
+
+  /* This is only effective when the descriptor is describing the RNN
+  output, and the unpacked layout is specified.*/
+  float paddingFill = 0.0f;
+
+  /* Input. An integer array with batchSize number of elements.
+  Describes the length (number of time-steps) of each sequence. Each
+  element in seqLengthArray must be greater than 0 but less than or
+  equal to maxSeqLength. */
+  Tensor tmp = seq_lengths.Clone();
+  tmp.ToHost();
+  tmp = tmp.AsType(singa::kInt);
+  const int *seq_lengths_ptr = static_cast<const int *>(tmp.block()->data());
+
+  CUDNN_CHECK(cudnnSetRNNDataDescriptor(desc, h.cudnnDataType, layout,
+                                        h.seq_length, h.batch_size, data_size,
+                                        seq_lengths_ptr, (void *)&paddingFill));
+}
+
+vector<Tensor> GpuRNNForwardInferenceEx(const Tensor &x, const Tensor &hx,
+                                        const Tensor &cx, const Tensor &W,
+                                        const Tensor &seq_lengths,
+                                        CudnnRNNHandle &h) {
+  CHECK_EQ(h.feature_size, x.shape(2)) << "feature size should not change";
+
+  Tensor y, hy, cy;
+  Shape yshape, states_shape;
+
+  if (h.batch_first) {
+    LOG(FATAL) << "batch_first not implemented for GpuRNNForwardTrainingEx";
+  } else {
+    h.seq_length = x.shape(0);
+    h.batch_size = x.shape(1);
+    yshape = Shape{h.seq_length, h.batch_size,
+                   h.hidden_size * (h.bidirectional ? 2 : 1)};
+    states_shape = Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                         h.hidden_size};
+  }
+
+  y = Tensor(yshape, x.device());
+  hy = Tensor(states_shape, x.device());
+  cy = Tensor(states_shape, x.device());
+
+  y.device()->Exec(
+      [y, hy, cy, x, seq_lengths, hx, cx, &W, &h](Context *ctx) {
+        // data descriptor
+        cudnnRNNDataDescriptor_t xDesc, yDesc;
+        init_data_desc(xDesc, h.feature_size, seq_lengths, h);
+        init_data_desc(yDesc,
+                       h.bidirectional ? h.hidden_size * 2 : h.hidden_size,
+                       seq_lengths, h);
+
+        // hidden cell states descriptor
+        cudnnTensorDescriptor_t hxDesc, cxDesc, hyDesc, cyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(hyDesc, h);
+        init_hc_Desc(cyDesc, h);
+
+        auto xptr = x.block()->data();
+        auto hxptr = hx.block()->data();
+        auto cxptr = cx.block()->data();
+        auto Wptr = W.block()->data();
+        auto yptr = y.block()->mutable_data();
+        auto hyptr = hy.block()->mutable_data();
+        auto cyptr = cy.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+
+        /* This routine is the extended version of the cudnnRNNForwardTraining()
+        function. The cudnnRNNForwardTrainingEx() allows the user to use
+        unpacked (padded) layout for input x and output y.
+        */
+        CUDNN_CHECK(cudnnRNNForwardInferenceEx(
+            ctx->cudnn_handle, h.rnnDesc, xDesc, xptr, hxDesc, hxptr, cxDesc,
+            cxptr, h.wDesc, Wptr, yDesc, yptr, hyDesc, hyptr, cyDesc, cyptr,
+            NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, wsptr,
+            h.workspace_size_bytes));
+      },
+      {x.block(), hx.block(), cx.block(), W.block()},
+      {y.block(), hy.block(), cy.block(), h.workspace.block(),
+       h.reserve_space.block()});
+  return {y, hy, cy};
+}
+
+vector<Tensor> GpuRNNForwardTrainingEx(const Tensor &x, const Tensor &hx,
+                                       const Tensor &cx, const Tensor &W,
+                                       const Tensor &seq_lengths,
+                                       CudnnRNNHandle &h) {
+  CHECK_EQ(h.feature_size, x.shape(2)) << "feature size should not change";
+
+  Tensor y, hy, cy;
+  Shape yshape, states_shape;
+
+  if (h.batch_first) {
+    LOG(FATAL) << "batch_first not implemented for GpuRNNForwardTrainingEx";
+  } else {
+    h.seq_length = x.shape(0);
+    h.batch_size = x.shape(1);
+    yshape = Shape{h.seq_length, h.batch_size,
+                   h.hidden_size * (h.bidirectional ? 2 : 1)};
+    states_shape = Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                         h.hidden_size};
+  }
+
+  y = Tensor(yshape, x.device());
+  hy = Tensor(states_shape, x.device());
+  cy = Tensor(states_shape, x.device());
+
+  y.device()->Exec(
+      [y, hy, cy, x, seq_lengths, hx, cx, &W, &h](Context *ctx) {
+        // data descriptor
+        cudnnRNNDataDescriptor_t xDesc, yDesc;
+        init_data_desc(xDesc, h.feature_size, seq_lengths, h);
+        init_data_desc(yDesc,
+                       h.bidirectional ? h.hidden_size * 2 : h.hidden_size,
+                       seq_lengths, h);
+
+        // hidden cell states descriptor
+        cudnnTensorDescriptor_t hxDesc, cxDesc, hyDesc, cyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(hyDesc, h);
+        init_hc_Desc(cyDesc, h);
+
+        auto xptr = x.block()->data();
+        auto hxptr = hx.block()->data();
+        auto cxptr = cx.block()->data();
+        auto Wptr = W.block()->data();
+        auto yptr = y.block()->mutable_data();
+        auto hyptr = hy.block()->mutable_data();
+        auto cyptr = cy.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+
+        /* This routine is the extended version of the cudnnRNNForwardTraining()
+        function. The cudnnRNNForwardTrainingEx() allows the user to use
+        unpacked (padded) layout for input x and output y.
+        */
+        CUDNN_CHECK(cudnnRNNForwardTrainingEx(
+            ctx->cudnn_handle, h.rnnDesc, xDesc, xptr, hxDesc, hxptr, cxDesc,
+            cxptr, h.wDesc, Wptr, yDesc, yptr, hyDesc, hyptr, cyDesc, cyptr,
+            NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, wsptr,
+            h.workspace_size_bytes, rsptr, h.reserve_size_bytes));
+      },
+      {x.block(), hx.block(), cx.block(), W.block()},
+      {y.block(), hy.block(), cy.block(), h.workspace.block(),
+       h.reserve_space.block()});
+  return {y, hy, cy};
+}
+
+vector<Tensor> GpuRNNBackwardxEx(const Tensor &y, const Tensor &dy,
+                                 const Tensor &dhy, const Tensor &dcy,
+                                 const Tensor &W, const Tensor &hx,
+                                 const Tensor &cx, const Tensor &seq_lengths,
+                                 CudnnRNNHandle &h) {
+  // y shape: {bs, seq}
+  // dy shape: {bs, seq}
+  // dx shape: {bs, seq}
+  Shape xshape, states_shape;
+  if (h.batch_first) {
+    LOG(FATAL) << "batch_first not implemented for GpuRNNBackwardxEx";
+  } else {
+    xshape = Shape{h.batch_size, h.seq_length, h.feature_size};
+    states_shape = Shape{h.num_layers * (h.bidirectional ? 2 : 1), h.batch_size,
+                         h.hidden_size};
+  }
+  Tensor dx(xshape, y.device());
+  Tensor dhx(states_shape, y.device());
+  Tensor dcx(states_shape, y.device());
+
+  dx.SetValue(0.0f);
+  dhx.SetValue(0.0f);
+  dcx.SetValue(0.0f);
+  h.workspace.SetValue(0.0f);
+
+  dx.device()->Exec(
+      [dx, dhx, dcx, y, dy, dhy, dcy, &W, hx, cx, seq_lengths,
+       &h](Context *ctx) {
+        cudnnRNNDataDescriptor_t yDesc, dyDesc, dxDesc;
+        init_data_desc(yDesc,
+                       h.bidirectional ? h.hidden_size * 2 : h.hidden_size,
+                       seq_lengths, h);
+        init_data_desc(dyDesc,
+                       h.bidirectional ? h.hidden_size * 2 : h.hidden_size,
+                       seq_lengths, h);
+        init_data_desc(dxDesc, h.feature_size, seq_lengths, h);
+
+        /* other tensors desc*/
+        cudnnTensorDescriptor_t hxDesc, cxDesc, dhxDesc, dcxDesc, dhyDesc,
+            dcyDesc;
+        init_hc_Desc(hxDesc, h);
+        init_hc_Desc(cxDesc, h);
+        init_hc_Desc(dhxDesc, h);
+        init_hc_Desc(dcxDesc, h);
+        init_hc_Desc(dhyDesc, h);
+        init_hc_Desc(dcyDesc, h);
+
+        auto dxptr = dx.block()->mutable_data();
+        auto hxptr = hx.block()->data();
+        auto dhxptr = dhx.block()->mutable_data();
+        auto cxptr = cx.block()->data();
+        auto dcxptr = dcx.block()->mutable_data();
+        auto Wptr = W.block()->data();
+        auto yptr = y.block()->data();
+        auto dyptr = dy.block()->data();
+        auto dhyptr = dhy.block()->data();
+        auto dcyptr = dcy.block()->data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+
+        CUDNN_CHECK(cudnnRNNBackwardDataEx(
+            ctx->cudnn_handle, h.rnnDesc, yDesc, yptr, dyDesc, dyptr, NULL,
+            NULL, dhyDesc, dhyptr, dcyDesc, dcyptr, h.wDesc, Wptr, hxDesc,
+            hxptr, cxDesc, cxptr, dxDesc, dxptr, dhxDesc, dhxptr, dcxDesc,
+            dcxptr, NULL, NULL, wsptr, h.workspace_size_bytes, rsptr,
+            h.reserve_size_bytes));
+      },
+      {y.block(), dy.block(), dhy.block(), dcy.block(), hx.block(), cx.block(),
+       W.block()},
+      {dx.block(), dhx.block(), dcx.block(), h.workspace.block(),
+       h.reserve_space.block()});
+  return {dx, dhx, dcx};
+}
+
+Tensor GpuRNNBackwardWEx(const Tensor &x, const Tensor &hx, const Tensor &y,
+                         const Tensor &seq_lengths, CudnnRNNHandle &h) {
+  Tensor dW(Shape{h.weights_size}, x.device());
+  dW.SetValue(0.0f);
+
+  dW.device()->Exec(
+      [dW, x, hx, y, seq_lengths, &h](Context *ctx) {
+        cudnnRNNDataDescriptor_t xDesc, yDesc;
+        init_data_desc(xDesc, h.feature_size, seq_lengths, h);
+        init_data_desc(yDesc,
+                       h.bidirectional ? h.hidden_size * 2 : h.hidden_size,
+                       seq_lengths, h);
+
+        /* other tensor desc */
+        cudnnTensorDescriptor_t hxDesc;
+        init_hc_Desc(hxDesc, h);
+
+        auto xptr = x.block()->data();
+        auto hxptr = hx.block()->data();
+        auto yptr = y.block()->data();
+        auto dWptr = dW.block()->mutable_data();
+        auto wsptr = h.workspace.block()->mutable_data();
+        auto rsptr = h.reserve_space.block()->mutable_data();
+
+        CUDNN_CHECK(cudnnRNNBackwardWeightsEx(
+            ctx->cudnn_handle, h.rnnDesc, xDesc, xptr, hxDesc, hxptr, yDesc,
+            yptr, wsptr, h.workspace_size_bytes, h.dwDesc, dWptr, rsptr,
+            h.reserve_size_bytes));
+      },
+      {x.block(), y.block(), hx.block()},
+      {dW.block(), h.workspace.block(), h.reserve_space.block()});
+  return dW;
+}
+
+#endif  // USE_CUDNN
+}  // namespace singa
diff --git a/src/model/operation/rnn.h b/src/model/operation/rnn.h
new file mode 100644
index 0000000..bbc9266
--- /dev/null
+++ b/src/model/operation/rnn.h
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef SRC_MODEL_OPERATION_RNN_H_
+#define SRC_MODEL_OPERATION_RNN_H_
+
+#include <iostream>
+#include <tuple>
+#include <vector>
+
+#include "singa/core/tensor.h"
+#include "singa/singa_config.h"
+#include "singa/utils/logging.h"
+
+#ifdef USE_CUDNN
+#include <cudnn.h>
+
+#include "../layer/cudnn_utils.h"
+#endif  // USE_CUDNN
+
+namespace singa {
+
+#ifdef USE_CUDNN
+class CudnnRNNHandle {
+ public:
+  CudnnRNNHandle(const Tensor &x, const int hidden_size, const int mode = 0,
+                 const int num_layers = 1, const int bias = 1,
+                 const float dropout = 0.0f, const int bidirectional = 0);
+
+  Context *ctx;
+  std::shared_ptr<Device> dev;
+
+  // parameters
+  int bias;
+  int mode;
+  float dropout;
+  int bidirectional;
+  size_t feature_size;
+  size_t hidden_size;
+  size_t num_layers;
+  int batch_first;
+
+  size_t weights_size_bytes;
+  size_t weights_size;
+  size_t batch_size;
+  size_t seq_length;
+
+  /* workspace data */
+  size_t workspace_size;
+  size_t workspace_size_bytes;
+  size_t reserve_size;
+  size_t reserve_size_bytes;
+  Tensor workspace;
+  Tensor reserve_space;
+
+  /* dropout */
+  void *states;
+  cudnnDropoutDescriptor_t dropoutDesc;
+
+  /* rnn desc */
+  cudnnRNNDescriptor_t rnnDesc;
+  cudnnRNNMode_t RNNMode;
+  cudnnRNNAlgo_t cudnnRNNAlgo;
+  cudnnDataType_t cudnnDataType;
+
+  /* weights desc */
+  cudnnFilterDescriptor_t wDesc, dwDesc;
+
+  void init_dropout_desc();
+  void init_rnn_desc();
+  void init_parameters_desc(cudnnTensorDescriptor_t *xDesc);
+  void init_workspace(cudnnTensorDescriptor_t *xDesc);
+  void init_param_mapping(cudnnTensorDescriptor_t *xDesc);
+
+  // linLayerID, pseudoLayer, is_bias => offset, size
+  // e.g. Wx of 1st layer is at <0,0,false> => 0, data_s*hid_s
+  std::map<std::tuple<int, int, bool>, std::tuple<size_t, size_t>>
+      weights_mapping;
+};
+
+void init_xDesc(cudnnTensorDescriptor_t *xDesc, CudnnRNNHandle &h);
+void init_yDesc(cudnnTensorDescriptor_t *yDesc, CudnnRNNHandle &h);
+void init_hc_Desc(cudnnTensorDescriptor_t &hDesc, CudnnRNNHandle &h);
+
+vector<Tensor> GpuRNNForwardTraining(const Tensor &x, const Tensor &hx,
+                                     const Tensor &cx, const Tensor &W,
+                                     CudnnRNNHandle &h);
+vector<Tensor> GpuRNNForwardInference(const Tensor &x, const Tensor &hx,
+                                      const Tensor &cx, const Tensor &W,
+                                      CudnnRNNHandle &h);
+vector<Tensor> GpuRNNBackwardx(const Tensor &y, const Tensor &dy,
+                               const Tensor &dhy, const Tensor &dcy,
+                               const Tensor &W, const Tensor &hx,
+                               const Tensor &cx, CudnnRNNHandle &h);
+Tensor GpuRNNBackwardW(const Tensor &x, const Tensor &hx, const Tensor &y,
+                       CudnnRNNHandle &h);
+
+void GpuRNNSetParam(int linLayerID, int pseudoLayer, Tensor &weights,
+                    Tensor &paramValues, bool is_bias, CudnnRNNHandle &h);
+Tensor GpuRNNGetParamCopy(int linLayerID, int pseudoLayer, Tensor &weights,
+                          bool is_bias, CudnnRNNHandle &h);
+
+vector<Tensor> GpuRNNForwardTrainingEx(const Tensor &x, const Tensor &hx,
+                                       const Tensor &cx, const Tensor &W,
+                                       const Tensor &seq_lengths,
+                                       CudnnRNNHandle &h);
+vector<Tensor> GpuRNNForwardInferenceEx(const Tensor &x, const Tensor &hx,
+                                        const Tensor &cx, const Tensor &W,
+                                        const Tensor &seq_lengths,
+                                        CudnnRNNHandle &h);
+vector<Tensor> GpuRNNBackwardxEx(const Tensor &y, const Tensor &dy,
+                                 const Tensor &dhy, const Tensor &dcy,
+                                 const Tensor &W, const Tensor &hx,
+                                 const Tensor &cx, const Tensor &seq_lengths,
+                                 CudnnRNNHandle &h);
+Tensor GpuRNNBackwardWEx(const Tensor &x, const Tensor &hx, const Tensor &y,
+                         const Tensor &seq_lengths, CudnnRNNHandle &h);
+
+#endif  // USE_CUDNN
+
+}  // namespace singa
+#endif  // SRC_MODEL_OPERATION_RNN_H_
diff --git a/src/model/updater/local_updater.cc b/src/model/updater/local_updater.cc
index c3c6793..eb3f83b 100644
--- a/src/model/updater/local_updater.cc
+++ b/src/model/updater/local_updater.cc
@@ -43,7 +43,7 @@
   int nth = dev_index_[name]++;
   auto key = std::make_pair(nth, name);
   if (grad_buffer_[key].Size() != grad.Size()) {
-    grad_buffer_[key].Reshape(grad.shape());
+    grad_buffer_[key].Resize(grad.shape());
     grad_buffer_[key].AsType(grad.data_type());
   }
   grad_buffer_[key].CopyData(grad);
@@ -56,7 +56,7 @@
     }
   } else {
     if (param_buffer_[name].Size() != value.Size()) {
-      param_buffer_[name].Reshape(value.shape());
+      param_buffer_[name].Resize(value.shape());
       param_buffer_[name].AsType(value.data_type());
       param_buffer_[name].CopyData(value);
       sum_[name].ResetLike(param_buffer_[name]);
@@ -74,4 +74,4 @@
   value.CopyData(param_buffer_[name]);
 }
 
-}  // namesapce singa
+}  // namespace singa
diff --git a/src/proto/core.proto b/src/proto/core.proto
index 9264e55..5c4d997 100644
--- a/src/proto/core.proto
+++ b/src/proto/core.proto
@@ -50,19 +50,19 @@
 
 // configuration for device memory pool
 message MemPoolConf {
-	optional string type = 1 [default = "cnmem"];
-	// allocation size for each device, default is 256 MB
-	optional uint32 init_size = 2 [default = 256];
+  optional string type = 1 [default = "cnmem"];
+  // allocation size for each device, default is 256 MB
+  optional uint32 init_size = 2 [default = 256];
   // size limit in MB; report error/warning if this limit is reached.
   // 0 for unlimited memory, i.e., use as much memory as the device has
   // not used currently.
-	optional uint32 max_size = 3 [default = 0];
+  optional uint32 max_size = 3 [default = 0];
 
-	// memory manager flag for cnmem
-	// flag = 0: default flag
-	// flag = 1: prevent the manager from growing its memory consumption
-	// flag = 2: prevent the manager from stealing memory
-	optional uint32 flag = 11 [default = 0];
+  // memory manager flag for cnmem
+  // flag = 0: default flag
+  // flag = 1: prevent the manager from growing its memory consumption
+  // flag = 2: prevent the manager from stealing memory
+  optional uint32 flag = 11 [default = 0];
   repeated uint32 device = 12;
 }
 
@@ -70,7 +70,8 @@
 message TensorProto {
   repeated uint32 shape = 1;
   optional DataType data_type = 2;
-  optional bool transpose = 3;
+  //optional bool transpose = 3;
+  repeated int32 stride = 3;
   repeated float float_data = 4 [packed = true];
   repeated double double_data = 5 [packed = true];
   repeated int32 int_data = 6 [packed = true];
diff --git a/src/utils/channel.cc b/src/utils/channel.cc
index 588a11a..dc02e10 100644
--- a/src/utils/channel.cc
+++ b/src/utils/channel.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "singa/utils/channel.h"
 
diff --git a/src/utils/logging.cc b/src/utils/logging.cc
index 304d431..8e621f6 100644
--- a/src/utils/logging.cc
+++ b/src/utils/logging.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-* 
-*   http://www.apache.org/licenses/LICENSE-2.0
-* 
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "singa/utils/logging.h"
 
@@ -34,7 +34,7 @@
 FILE* log_file[NUM_SEVERITIES] = {};
 bool not_log_stderr[NUM_SEVERITIES] = {};
 
-void InitLogging(const char *argv) {
+void InitLogging(const char* argv) {
 #ifdef USE_GLOG
   google::InitGoogleLogging(argv);
 #else
@@ -87,32 +87,22 @@
   localtime_r(&rw_time, &tm_time);
   // log to a file
   for (int i = severity_; i >= 0; --i)
-    if (log_file[i] )
-      DoLogging(log_file[i], tm_time);
+    if (log_file[i]) DoLogging(log_file[i], tm_time);
   // log to stderr
-  if (!not_log_stderr[severity_])
-    DoLogging(stderr, tm_time);
+  if (!not_log_stderr[severity_]) DoLogging(stderr, tm_time);
 }
 
 void LogMessage::DoLogging(FILE* file, const struct tm& tm_time) {
   fprintf(file, "[%c d%02d%02d t%02d:%02d:%02d p%05d:%03d %s:%d] %s\n",
-          "IWEF"[severity_],
-          1 + tm_time.tm_mon,
-          tm_time.tm_mday,
-          tm_time.tm_hour,
-          tm_time.tm_min,
-          tm_time.tm_sec,
-          GetPID(),
-          static_cast<unsigned>(GetTID()%1000),
-          fname_,
-          line_,
-          str().c_str());
+          "IWEF"[severity_], 1 + tm_time.tm_mon, tm_time.tm_mday,
+          tm_time.tm_hour, tm_time.tm_min, tm_time.tm_sec, GetPID(),
+          static_cast<unsigned>(GetTID() % 1000), fname_, line_, str().c_str());
 }
 
 LogMessage::~LogMessage() { GenerateLogMessage(); }
 
 LogMessageFatal::LogMessageFatal(const char* file, int line)
-  : LogMessage(file, line, FATAL) {}
+    : LogMessage(file, line, FATAL) {}
 LogMessageFatal::~LogMessageFatal() {
   // abort() ensures we don't return
   GenerateLogMessage();
@@ -148,7 +138,7 @@
 
 template <>
 void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& p) {
-    (*os) << "nullptr";
+  (*os) << "nullptr";
 }
 
 CheckOpMessageBuilder::CheckOpMessageBuilder(const char* exprtext)
diff --git a/python/rafiki/__init__.py b/test/python/cuda_helper.py
similarity index 79%
copy from python/rafiki/__init__.py
copy to test/python/cuda_helper.py
index 3aa745b..8f6bd4f 100644
--- a/python/rafiki/__init__.py
+++ b/test/python/cuda_helper.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +15,11 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+from singa import device
+from singa import singa_wrap
+
+# avoid singleton error
+gpu_dev = None
+if singa_wrap.USE_CUDA:
+    gpu_dev = device.create_cuda_gpu()
+cpu_dev = device.get_default_device()
diff --git a/test/python/run.py b/test/python/run.py
index 8f7a45f..b787a15 100644
--- a/test/python/run.py
+++ b/test/python/run.py
@@ -16,14 +16,15 @@
 # limitations under the License.
 #
 
+import sys
 import unittest
-import xmlrunner
 
-loader = unittest.TestLoader()
-tests = loader.discover('.')
-# testRunner = unittest.runner.TextTestRunner()
-with open('unittest.xml', 'wb') as output:
-    testRunner = xmlrunner.XMLTestRunner(output=output,
-                                         failfast=False,
-                                         buffer=False)
-    testRunner.run(tests)
+def main():
+    loader = unittest.TestLoader()
+    tests = loader.discover('.')
+    testRunner = unittest.runner.TextTestRunner()
+    ret = not testRunner.run(tests).wasSuccessful()
+    sys.exit(ret)
+
+if __name__ == "__main__":
+    main()
diff --git a/test/python/test_api.py b/test/python/test_api.py
new file mode 100644
index 0000000..e307dc9
--- /dev/null
+++ b/test/python/test_api.py
@@ -0,0 +1,923 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import division
+
+import unittest
+import math
+import numpy as np
+
+from singa import singa_wrap as singa_api
+from singa import tensor
+from cuda_helper import gpu_dev, cpu_dev
+
+
+def _np_bn_training(x, scale, bias, rm, rv, momentum=0.1, e=1e-5):
+    channel = x.shape[1]
+    np.testing.assert_array_almost_equal(scale.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(bias.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(rm.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(rv.shape, (1, channel, 1, 1))
+
+    batch_m = x.mean(axis=(0, 2, 3), keepdims=True)
+    batch_v = x.var(axis=(0, 2, 3), keepdims=True)
+
+    x_norm = (x - batch_m) / np.sqrt(batch_v + e)
+    y_norm = x_norm * scale + bias
+
+    # https://arxiv.org/pdf/1502.03167.pdf
+    s = list(x.shape)
+    s[1] = 1
+    batch_v_unbiased = np.prod(s) * batch_v / (np.prod(s) - 1)
+
+    rm = momentum * batch_m + (1 - momentum) * rm
+    rv = momentum * batch_v_unbiased + (1 - momentum) * rv
+
+    # https://docs.nvidia.com/deeplearning/sdk/cudnn-developer-guide/index.html#cudnnBatchNormalizationForwardTraining
+    resultSaveInvVariance = 1 / np.sqrt(batch_v)
+    return y_norm, rm, rv, batch_m, resultSaveInvVariance
+
+
+def _np_bn_testing(x, scale, bias, rm, rv, momentum=0.1, e=1e-5):
+    channel = x.shape[1]
+    np.testing.assert_array_almost_equal(scale.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(bias.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(rm.shape, (1, channel, 1, 1))
+    np.testing.assert_array_almost_equal(rv.shape, (1, channel, 1, 1))
+    return scale * (x - rm) / np.sqrt(rv + e) + bias
+
+
+def _cTensor_to_pyTensor(cTensor):
+    new_t = tensor.Tensor()
+    new_t.data = cTensor
+    new_t.shape = tuple(new_t.data.shape())
+    new_t.device = new_t.data.device()
+    new_t.dtype = new_t.data.data_type()
+    return new_t
+
+
+def _ctensor_eq_ndarray(t1, np1):
+    d = t1.device()
+    t1.ToHost()
+    if t1.data_type() == singa_api.kInt:
+        np.testing.assert_array_almost_equal(t1.GetIntValue(t1.Size()),
+                                             np1.flatten())
+    elif t1.data_type() == singa_api.kFloat32:
+        np.testing.assert_array_almost_equal(t1.GetFloatValue(t1.Size()),
+                                             np1.flatten())
+
+    if np1.dtype == np.float32:
+        np.testing.assert_equal(t1.data_type(), singa_api.kFloat32)
+    elif np1.dtype == np.int32:
+        np.testing.assert_equal(t1.data_type(), singa_api.kInt)
+
+    np.testing.assert_array_almost_equal(t1.shape(), np1.shape)
+    t1.ToDevice(d)
+
+
+def print_t(t1):
+    d = t1.device()
+    t1.ToHost()
+    if t1.data_type() == singa_api.kInt:
+        print(t1.GetIntValue(t1.Size()))
+    elif t1.data_type() == singa_api.kFloat32:
+        print(t1.GetFloatValue(t1.Size()))
+    t1.ToDevice(d)
+
+
+class TestAPI(unittest.TestCase):
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_batchnorm_training_gpu(self):
+        dev = gpu_dev
+
+        def _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.1):
+            # np api
+            (y_1, rm_1, rv_1, bm_1, bv_1) = _np_bn_training(x_0,
+                                                            s_0,
+                                                            b_0,
+                                                            rm_0,
+                                                            rv_0,
+                                                            momentum=m_0)
+
+            # singa api
+            rm_t = tensor.Tensor(device=dev, data=rm_0)
+            rv_t = tensor.Tensor(device=dev, data=rv_0)
+            hndl = singa_api.CudnnBatchNormHandle(
+                m_0,
+                tensor.Tensor(device=dev, data=x_0).data)
+            (y_2_c, bm_2_c, bv_2_c) = singa_api.GpuBatchNormForwardTraining(
+                hndl,
+                tensor.Tensor(device=dev, data=x_0).data,
+                tensor.Tensor(device=dev, data=s_0).data,
+                tensor.Tensor(device=dev, data=b_0).data, rm_t.data, rv_t.data)
+
+            np.testing.assert_array_almost_equal(
+                y_1, tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)), decimal=4)
+            np.testing.assert_array_almost_equal(
+                bm_1, tensor.to_numpy(_cTensor_to_pyTensor(bm_2_c)))
+            np.testing.assert_array_almost_equal(rm_1, tensor.to_numpy(rm_t))
+            #print(bv_1)
+            #print(tensor.to_numpy(_cTensor_to_pyTensor(bv_2_c)))
+            np.testing.assert_array_almost_equal(
+                bv_1, tensor.to_numpy(_cTensor_to_pyTensor(bv_2_c)), decimal=3)
+            np.testing.assert_array_almost_equal(rv_1,
+                                                 tensor.to_numpy(rv_t),
+                                                 decimal=4)
+            return
+
+        x_0 = np.array([1, 1, 1, 1, 2, 2, 2, 2, 10, 10, 10, 10, 20, 20, 20, 20],
+                       dtype=np.float32).reshape((2, 2, 2, 2))
+        s_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        b_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rm_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rv_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.0)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.2)
+
+        c = 10
+        x_0 = np.random.random((10, c, 20, 20)).astype(np.float32)
+        s_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        b_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rm_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rv_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.2)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_batchnorm_testing_gpu(self):
+        dev = gpu_dev
+
+        def _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=0.1):
+            # np api
+            y_1 = _np_bn_testing(x_0, s_0, b_0, rm_0, rv_0, momentum=m_0)
+
+            # singa api
+            hndl = singa_api.CudnnBatchNormHandle(
+                m_0,
+                tensor.Tensor(device=dev, data=x_0).data)
+            y_2_c = singa_api.GpuBatchNormForwardInference(
+                hndl,
+                tensor.Tensor(device=dev, data=x_0).data,
+                tensor.Tensor(device=dev, data=s_0).data,
+                tensor.Tensor(device=dev, data=b_0).data,
+                tensor.Tensor(device=dev, data=rm_0).data,
+                tensor.Tensor(device=dev, data=rv_0).data)
+            #print(y_1)
+            #print(tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)))
+
+            np.testing.assert_array_almost_equal(
+                y_1, tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)), decimal=3)
+            return
+
+        x_0 = np.array([1, 1, 1, 1, 2, 2, 2, 2, 10, 10, 10, 10, 20, 20, 20, 20],
+                       dtype=np.float32).reshape((2, 2, 2, 2))
+        s_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        b_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rm_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rv_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+        c = 10
+        x_0 = np.random.random((10, c, 20, 20)).astype(np.float32)
+        s_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        b_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rm_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rv_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+
+    def _softmax_api_helper(self, dev):
+
+        def _run_test(dev, org_shape, axis, aft_shape):
+            x_0 = np.random.random(org_shape).astype(np.float32)
+            x_0 = x_0 + 1000
+            x0 = tensor.Tensor(device=dev, data=x_0)
+
+            # test with axis
+            y0 = tensor._call_singa_func(singa_api.SoftMax, x0.data, axis)
+
+            # test with numpy
+            x_0 = x_0.reshape(aft_shape)
+            x_0 = x_0 - np.max(x_0)
+            y1 = np.divide(np.exp(x_0),
+                           np.sum(np.exp(x_0), axis=1).reshape(x_0.shape[0],
+                                                               1))  # 2d softmax
+            y1 = y1.reshape(org_shape)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(y0), y1)
+
+        _run_test(dev, [2, 2], 1, [2, 2])
+        _run_test(dev, [2, 2], 0, [1, 4])
+        _run_test(dev, [2, 2], -1, [2, 2])
+        _run_test(dev, [2, 2], -2, [1, 4])
+        _run_test(dev, [2, 2, 2], 2, [4, 2])
+        _run_test(dev, [2, 2, 2], 1, [2, 4])
+        _run_test(dev, [2, 2, 2], 0, [1, 8])
+        _run_test(dev, [2, 2, 2], -1, [4, 2])
+        _run_test(dev, [2, 2, 2], -2, [2, 4])
+        _run_test(dev, [2, 2, 2], -3, [1, 8])
+        _run_test(dev, [2, 2, 2, 2], 3, [8, 2])
+        _run_test(dev, [2, 2, 2, 2], 2, [4, 4])
+        _run_test(dev, [2, 2, 2, 2], 1, [2, 8])
+        _run_test(dev, [2, 2, 2, 2], 0, [1, 16])
+        _run_test(dev, [2, 2, 2, 2], -1, [8, 2])
+        _run_test(dev, [2, 2, 2, 2], -2, [4, 4])
+        _run_test(dev, [2, 2, 2, 2], -3, [2, 8])
+        _run_test(dev, [2, 2, 2, 2], -4, [1, 16])
+
+    def test_softmax_api_cpu(self):
+        self._softmax_api_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_softmax_api_gpu(self):
+        self._softmax_api_helper(gpu_dev)
+
+    def _tensor_arithmetic_op_broadcast_helper(self, dev):
+
+        def _run_test(dev, singa_op, np_op, s1, s2):
+            x_0 = np.random.random(s1).astype(np.float32)
+            y_0 = np.random.random(s2).astype(np.float32)
+            x0 = tensor.Tensor(device=dev, data=x_0)
+            y0 = tensor.Tensor(device=dev, data=y_0)
+
+            z0 = tensor._call_singa_func(singa_op, x0.data, y0.data)
+            z0.to_host()
+            np.testing.assert_array_almost_equal(tensor.to_numpy(z0),
+                                                 np_op(x_0, y_0))
+            return
+
+        for s_op, n_op in zip([
+                singa_api.Pow,
+                singa_api.__add__,
+                singa_api.__div__,
+                singa_api.__sub__,
+                singa_api.__mul__,
+        ], [np.power, np.add, np.divide, np.subtract, np.multiply]):
+            _run_test(dev, s_op, n_op, [6], [1])
+            _run_test(dev, s_op, n_op, [2, 3], [2, 3])
+            _run_test(dev, s_op, n_op, [3, 2], [1])
+            _run_test(dev, s_op, n_op, [3, 1, 2], [3, 1, 1])
+            _run_test(dev, s_op, n_op, [2, 3, 4, 5], [5])
+            _run_test(dev, s_op, n_op, [2, 3, 4, 5], [1, 1, 1])
+            _run_test(dev, s_op, n_op, [2, 3, 4, 5], [1, 1, 1, 1])
+            _run_test(dev, s_op, n_op, [2, 3, 4, 5], [4, 5])  # 45+2345=2345
+            _run_test(dev, s_op, n_op, [3, 1, 2, 1], [3, 1, 2])
+            _run_test(dev, s_op, n_op, [4, 5], [2, 3, 4, 5])  # 45+2345=2345
+            _run_test(dev, s_op, n_op, [1, 4, 5], [2, 3, 1, 1])  # 145+2311=2345
+            _run_test(dev, s_op, n_op, [3, 4, 5], [2, 1, 1, 1])  # 345+2111=2345
+
+    def test_tensor_arithmetic_op_broadcast_cpu(self):
+        self._tensor_arithmetic_op_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_tensor_arithmetic_op_broadcast_gpu(self):
+        self._tensor_arithmetic_op_broadcast_helper(gpu_dev)
+
+    def _transpose_and_arithmetic_op_broadcast_helper(self, dev):
+
+        def _test(s1, s2, axis1, axis2, s3, s_op, n_op, dev):
+            x_0 = np.random.random(s1).astype(np.float32)
+            y_0 = np.random.random(s2).astype(np.float32)
+
+            x0 = tensor.Tensor(device=dev, data=x_0)
+            y0 = tensor.Tensor(device=dev, data=y_0)
+
+            x1 = x0.transpose(axis1)
+            y1 = y0.transpose(axis2)
+
+            z0 = tensor._call_singa_func(s_op, x1.data, y1.data)
+            z0.to_host()
+
+            np.testing.assert_array_almost_equal(
+                tensor.to_numpy(z0),
+                n_op(x_0.transpose(axis1), y_0.transpose(axis2)))
+            np.testing.assert_array_almost_equal(z0.shape, s3)
+            return
+
+        for s_op, n_op in zip([
+                singa_api.Pow,
+                singa_api.__add__,
+                singa_api.__div__,
+                singa_api.__sub__,
+                singa_api.__mul__,
+        ], [np.power, np.add, np.divide, np.subtract, np.multiply]):
+            s1 = [1, 5, 1, 3]
+            s2 = [3, 1, 1, 4]
+            axis1 = [3, 2, 1, 0]  # 3121
+            axis2 = [1, 0, 2, 3]  # 1314
+            s3 = [3, 3, 5, 4]
+            _test(s1, s2, axis1, axis2, s3, s_op, n_op, dev)
+
+            s1 = [1, 5, 1]
+            s2 = [1, 3, 2]
+            axis1 = [2, 1, 0]  # 151
+            axis2 = [1, 0, 2]  # 312
+            s3 = [3, 5, 2]
+            _test(s1, s2, axis1, axis2, s3, s_op, n_op, dev)
+
+            s1 = [5, 1]
+            s2 = [1, 3]
+            axis1 = [1, 0]  # 15
+            axis2 = [1, 0]  # 31
+            s3 = [3, 5]
+            _test(s1, s2, axis1, axis2, s3, s_op, n_op, dev)
+
+    def test_transpose_and_arithmetic_op_broadcast_cpu(self):
+        self._transpose_and_arithmetic_op_broadcast_helper(cpu_dev)
+
+    def _erf(self, dev=cpu_dev):
+        np1 = np.random.random((2, 3)).astype(np.float32)
+
+        x1 = tensor.from_numpy(np1)
+        x1.to_device(dev)
+        y1 = tensor.from_raw_tensor(singa_api.Erf(x1.data))
+
+        # from scipy.special import erf
+        # np.testing.assert_array_almost_equal(erf(np1), tensor.to_numpy(y1))
+
+    def test_erf_cpu(self):
+        self._erf(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_transpose_and_arithmetic_op_broadcast_gpu(self):
+        self._transpose_and_arithmetic_op_broadcast_helper(gpu_dev)
+
+    def test_batchnorm_training_dnnl(self):
+        dev = cpu_dev
+
+        def _np_bn_training(x, scale, bias, rm, rv, momentum=0.1, e=1e-5):
+            channel = x.shape[1]
+            np.testing.assert_array_almost_equal(scale.shape,
+                                                 (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(bias.shape, (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(rm.shape, (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(rv.shape, (1, channel, 1, 1))
+
+            batch_m = x.mean(axis=(0, 2, 3), keepdims=True)
+            batch_v = x.var(axis=(0, 2, 3), keepdims=True)
+
+            x_norm = (x - batch_m) / np.sqrt(batch_v + e)
+            y_norm = x_norm * scale + bias
+
+            # https://arxiv.org/pdf/1502.03167.pdf
+            s = list(x.shape)
+            s[1] = 1
+            batch_v_unbiased = np.prod(s) * batch_v / (np.prod(s) - 1)
+
+            rm = momentum * batch_m + (1 - momentum) * rm
+            rv = momentum * batch_v_unbiased + (1 - momentum) * rv
+
+            # https://docs.nvidia.com/deeplearning/sdk/cudnn-developer-guide/index.html#cudnnBatchNormalizationForwardTraining
+            # this value is useful for bwd computation
+            resultSaveInvVariance = 1 / np.sqrt(batch_v)
+            return y_norm, rm, rv, batch_m, resultSaveInvVariance
+
+        def _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.1):
+            # np api
+            (y_1, rm_1, rv_1, bm_1, bv_1) = _np_bn_training(x_0,
+                                                            s_0,
+                                                            b_0,
+                                                            rm_0,
+                                                            rv_0,
+                                                            momentum=m_0)
+
+            # singa api
+            hndl = singa_api.BatchNormHandle(
+                m_0,
+                tensor.Tensor(device=dev, data=x_0).data)
+            (y_2_c, bm_2_c, bv_2_c) = singa_api.CpuBatchNormForwardTraining(
+                hndl,
+                tensor.Tensor(device=dev, data=x_0).data,
+                tensor.Tensor(device=dev, data=s_0).data,
+                tensor.Tensor(device=dev, data=b_0).data,
+                tensor.Tensor(device=dev, data=rm_0).data,
+                tensor.Tensor(device=dev, data=rv_0).data)
+
+            np.testing.assert_array_almost_equal(
+                y_1, tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)), decimal=5)
+            np.testing.assert_array_almost_equal(
+                bm_1, tensor.to_numpy(_cTensor_to_pyTensor(bm_2_c)), decimal=5)
+            #print(bv_1)
+            #print(tensor.to_numpy(_cTensor_to_pyTensor(bv_2_c)))
+            #np.testing.assert_array_almost_equal(
+            #    bv_1, tensor.to_numpy(_cTensor_to_pyTensor(bv_2_c)), decimal=3)
+            return
+
+        x_0 = np.array([1, 1, 1, 1, 2, 2, 2, 2, 10, 10, 10, 10, 20, 20, 20, 20],
+                       dtype=np.float32).reshape((2, 2, 2, 2))
+        s_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        b_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rm_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rv_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.0)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.2)
+
+        c = 10
+        x_0 = np.random.random((10, c, 20, 20)).astype(np.float32)
+        s_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        b_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rm_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rv_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        _run_training(x_0, s_0, b_0, rm_0, rv_0, m_0=0.2)
+
+    def test_batchnorm_testing_dnnl(self):
+        dev = cpu_dev
+
+        def _np_bn_testing(x, scale, bias, rm, rv, momentum=0.1, e=1e-5):
+            channel = x.shape[1]
+            np.testing.assert_array_almost_equal(scale.shape,
+                                                 (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(bias.shape, (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(rm.shape, (1, channel, 1, 1))
+            np.testing.assert_array_almost_equal(rv.shape, (1, channel, 1, 1))
+            return scale * (x - rm) / np.sqrt(rv + e) + bias
+
+        def _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=0.1):
+            # np api
+            y_1 = _np_bn_testing(x_0, s_0, b_0, rm_0, rv_0, momentum=m_0)
+
+            # singa api
+            hndl = singa_api.BatchNormHandle(
+                m_0,
+                tensor.Tensor(device=dev, data=x_0).data)
+            y_2_c = singa_api.CpuBatchNormForwardInference(
+                hndl,
+                tensor.Tensor(device=dev, data=x_0).data,
+                tensor.Tensor(device=dev, data=s_0).data,
+                tensor.Tensor(device=dev, data=b_0).data,
+                tensor.Tensor(device=dev, data=rm_0).data,
+                tensor.Tensor(device=dev, data=rv_0).data)
+            #print(y_1)
+            #print(tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)))
+
+            np.testing.assert_array_almost_equal(
+                y_1, tensor.to_numpy(_cTensor_to_pyTensor(y_2_c)), decimal=5)
+            return
+
+        x_0 = np.array([1, 1, 1, 1, 2, 2, 2, 2, 10, 10, 10, 10, 20, 20, 20, 20],
+                       dtype=np.float32).reshape((2, 2, 2, 2))
+        s_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        b_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rm_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        rv_0 = np.array([1, 10], dtype=np.float32).reshape((1, 2, 1, 1))
+        _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+        c = 10
+        x_0 = np.random.random((10, c, 20, 20)).astype(np.float32)
+        s_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        b_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rm_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        rv_0 = np.random.random((1, c, 1, 1)).astype(np.float32)
+        _run_testing(x_0, s_0, b_0, rm_0, rv_0, m_0=1.0)
+
+    def test_batchnorm_backward_dnnl(self):
+        dev = cpu_dev
+        N = 1
+        C = 3
+        H = 2
+        W = 2
+
+        data_shape = [N, C, H, W]
+        param_shape = [1, C, 1, 1]
+        data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
+
+        x_0 = np.array(data, dtype=np.float32).reshape(data_shape)
+        y_0 = np.array(data, dtype=np.float32).reshape(data_shape)
+        dy_0 = np.array(data, dtype=np.float32).reshape(data_shape)
+        scale_0 = np.array([1] * C, dtype=np.float32).reshape(param_shape)
+        bias_0 = np.array([0] * C, dtype=np.float32).reshape(param_shape)
+
+        mean_0 = x_0.mean(axis=(0, 2, 3), keepdims=True)
+        var_0 = x_0.var(axis=(0, 2, 3), keepdims=True)
+
+        hndl = singa_api.BatchNormHandle(
+            0.1,
+            tensor.Tensor(device=dev, data=x_0).data)
+        (dx_2_c, _, _) = singa_api.CpuBatchNormBackwardx(
+            hndl,
+            tensor.Tensor(device=dev, data=y_0).data,
+            tensor.Tensor(device=dev, data=dy_0).data,
+            tensor.Tensor(device=dev, data=x_0).data,
+            tensor.Tensor(device=dev, data=scale_0).data,
+            tensor.Tensor(device=dev, data=bias_0).data,
+            tensor.Tensor(device=dev, data=mean_0).data,
+            tensor.Tensor(device=dev, data=var_0).data,
+        )
+
+        dx_truth = np.array([[[[-1.0769e-05, -3.5985e-06],
+                               [3.5985e-06, 1.0769e-05]],
+                              [[-1.0769e-05, -3.5985e-06],
+                               [3.5985e-06, 1.0769e-05]],
+                              [[-1.0769e-05, -3.5985e-06],
+                               [3.5985e-06, 1.0769e-05]]]])
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(dx_2_c)), dx_truth)
+
+        return
+
+    def test_softmax_api_dnnl_backend(self):
+        dev = cpu_dev
+
+        def _run_test(org_shape, axis, aft_shape):
+            x_0 = np.random.random(org_shape).astype(np.float32)
+            x_0 = x_0 + 1000
+            x0 = tensor.Tensor(device=dev, data=x_0)
+
+            # test with axis
+            y0 = tensor._call_singa_func(singa_api.SoftMax, x0.data, axis)
+
+            # test with numpy
+            x_0 = x_0.reshape(aft_shape)
+            x_0 = x_0 - np.max(x_0)
+            y1 = np.divide(np.exp(x_0),
+                           np.sum(np.exp(x_0), axis=1).reshape(x_0.shape[0],
+                                                               1))  # 2d softmax
+            y1 = y1.reshape(org_shape)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(y0), y1)
+
+        _run_test([2, 2], 1, [2, 2])
+        _run_test([2, 2], 0, [1, 4])
+        _run_test([2, 2], -1, [2, 2])
+        _run_test([2, 2], -2, [1, 4])
+
+        _run_test([2, 2, 2], 2, [4, 2])
+        _run_test([2, 2, 2], 1, [2, 4])
+        _run_test([2, 2, 2], 0, [1, 8])
+        _run_test([2, 2, 2], -1, [4, 2])
+        _run_test([2, 2, 2], -2, [2, 4])
+        _run_test([2, 2, 2], -3, [1, 8])
+
+        _run_test([2, 2, 2, 2], 3, [8, 2])
+        _run_test([2, 2, 2, 2], 2, [4, 4])
+        _run_test([2, 2, 2, 2], 1, [2, 8])
+        _run_test([2, 2, 2, 2], 0, [1, 16])
+        _run_test([2, 2, 2, 2], -1, [8, 2])
+        _run_test([2, 2, 2, 2], -2, [4, 4])
+        _run_test([2, 2, 2, 2], -3, [2, 8])
+        _run_test([2, 2, 2, 2], -4, [1, 16])
+
+    def test_dnnl_pooling_max(self):
+        dev = cpu_dev
+        N = 1
+        C = 3
+        H = 2
+        W = 2
+
+        data_shape = [N, C, H, W]
+        param_shape = [1, C, 1, 1]
+        data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
+
+        x0 = np.array(data, dtype=np.float32).reshape(data_shape)
+        x0_ct = tensor.Tensor(device=dev, data=x0).data
+
+        dy0 = np.array([1, 2, 3], dtype=np.float32).reshape([1, 3, 1, 1])
+        dy0_ct = tensor.Tensor(device=dev, data=dy0).data
+
+        hndl = singa_api.PoolingHandle(x0_ct, [2, 2], [1, 1], [0, 0], True)
+
+        y0_ct = singa_api.CpuPoolingForward(hndl, x0_ct)
+        y1 = np.array([[[[4.]], [[8.]], [[12.]]]])
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(y0_ct)), y1)
+
+        dx0_ct = singa_api.CpuPoolingBackward(hndl, dy0_ct, x0_ct, y0_ct)
+        dx1 = np.array([[[[0., 0.], [0., 1.]], [[0., 0.], [0., 2.]],
+                         [[0., 0.], [0., 3.]]]])
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(dx0_ct)), dx1)
+
+    def test_dnnl_pooling_avg(self):
+        dev = cpu_dev
+        N = 1
+        C = 3
+        H = 2
+        W = 2
+
+        data_shape = [N, C, H, W]
+        param_shape = [1, C, 1, 1]
+        data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
+
+        x0 = np.array(data, dtype=np.float32).reshape(data_shape)
+        x0_ct = tensor.Tensor(device=dev, data=x0).data
+
+        dy0 = np.array([1, 2, 3], dtype=np.float32).reshape([1, 3, 1, 1])
+        dy0_ct = tensor.Tensor(device=dev, data=dy0).data
+
+        hndl = singa_api.PoolingHandle(x0_ct, [2, 2], [1, 1], [0, 0], False)
+
+        y0_ct = singa_api.CpuPoolingForward(hndl, x0_ct)
+
+        y1 = np.array([[[[2.5000]], [[6.5000]], [[10.5000]]]])
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(y0_ct)), y1)
+        dx0_ct = singa_api.CpuPoolingBackward(hndl, dy0_ct, x0_ct, y0_ct)
+        dx1 = np.array([[[[0.2500, 0.2500], [0.2500, 0.2500]],
+                         [[0.5000, 0.5000], [0.5000, 0.5000]],
+                         [[0.7500, 0.7500], [0.7500, 0.7500]]]])
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(dx0_ct)), dx1)
+
+    def _concat_helper(self, dev):
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+        np2 = np.random.random([5, 6, 7, 1]).astype(np.float32)
+        np3 = np.concatenate((np1, np2), axis=3)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+        t2 = tensor.Tensor(device=dev, data=np2)
+
+        ctensors = singa_api.VecTensor()
+        ctensors.append(t1.data)
+        ctensors.append(t2.data)
+
+        t3_ct = singa_api.ConcatOn(ctensors, 3)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(t3_ct)), np3)
+
+    def test_concat_cpu(self):
+        self._concat_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_concat_gpu(self):
+        self._concat_helper(gpu_dev)
+
+    def _ceil_helper(self, dev):
+
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+        np1 = np1 * 10
+        np2 = np.ceil(np1)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+
+        t2_ct = singa_api.Ceil(t1.data)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(t2_ct)), np2)
+
+    def test_ceil_cpu(self):
+        self._ceil_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_ceil_gpu(self):
+        self._ceil_helper(gpu_dev)
+
+    def _floor_helper(self, dev):
+
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+        np1 = np1 * 10
+        np2 = np.floor(np1)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+
+        t2_ct = singa_api.Floor(t1.data)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(t2_ct)), np2)
+
+    def test_floor_cpu(self):
+        self._floor_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_floor_gpu(self):
+        self._floor_helper(gpu_dev)
+
+    def _as_type_helper(self, dev):
+
+        np1 = np.random.random([3]).astype(np.float32)
+        np1 = np1 * 10 - 5
+        np2 = np1.astype(np.int32)
+        np3 = np2.astype(np.float32)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+
+        t1_ct = t1.data
+
+        self.assertEqual(t1_ct.data_type(), singa_api.kFloat32)
+
+        t1_ct = t1_ct.AsType(singa_api.kInt)
+
+        self.assertEqual(t1_ct.data_type(), singa_api.kInt)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(t1_ct)), np2)
+
+        t1_ct = t1_ct.AsType(singa_api.kFloat32)
+
+        self.assertEqual(t1_ct.data_type(), singa_api.kFloat32)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(_cTensor_to_pyTensor(t1_ct)), np3)
+
+    def test_as_type_cpu(self):
+        self._as_type_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_as_type_gpu(self):
+        self._as_type_helper(gpu_dev)
+
+    def _as_type2_helper(self, dev):
+        shape1 = [1, 2, 3, 4]
+        shape2 = [4, 3, 2, 1]
+        np_int = np.random.randint(0, 10, shape1).astype(np.int32)
+        np_flt = np_int.astype(np.float32)
+
+        t1 = singa_api.Tensor(shape1, dev, singa_api.kInt)
+        t1.CopyIntDataFromHostPtr(np_int.flatten())
+        _ctensor_eq_ndarray(t1, np_int)
+
+        t1 = singa_api.Reshape(t1, shape2)
+        t2 = t1.AsType(singa_api.kFloat32)
+        _ctensor_eq_ndarray(t2, np_flt.reshape(shape2))
+
+        t3 = t2.AsType(singa_api.kInt)
+        _ctensor_eq_ndarray(t3, np_int.reshape(shape2))
+
+        t1 = singa_api.Reshape(t1, shape1)
+        t4 = t1.AsType(singa_api.kFloat32)
+        _ctensor_eq_ndarray(t4, np_flt.reshape(shape1))
+
+    def test_as_type2_cpu(self):
+        self._as_type2_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_as_type2_gpu(self):
+        self._as_type2_helper(gpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_relu(self):
+        self._rnn_helper(0)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_tanh(self):
+        self._rnn_helper(1)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_lstm(self):
+        self._rnn_helper(2)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_gru(self):
+        self._rnn_helper(3)
+
+    def _rnn_helper(self, mode):
+        dev = gpu_dev
+
+        hidden_size = 7
+        seq_length = 5
+        batch_size = 6
+        feature_size = 3
+        directions = 2
+        num_layers = 2
+
+        x = tensor.Tensor(shape=(seq_length, batch_size, feature_size),
+                          device=dev).gaussian(0, 1)
+        hx = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                  hidden_size),
+                           device=dev).gaussian(0, 1)
+        cx = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                  hidden_size),
+                           device=dev).gaussian(0, 1)
+
+        rnn_handle = singa_api.CudnnRNNHandle(x.data,
+                                              hidden_size,
+                                              mode,
+                                              num_layers=num_layers,
+                                              dropout=0.1,
+                                              bidirectional=1)
+
+        w = tensor.Tensor(shape=(rnn_handle.weights_size,),
+                          device=dev).gaussian(0, 1)
+        # print("weights size is ", rnn_handle.weights_size)
+
+        (y, hy, cy) = singa_api.GpuRNNForwardTraining(x.data, hx.data, cx.data,
+                                                      w.data, rnn_handle)
+        self.assertEqual(y.shape(),
+                         (seq_length, batch_size, directions * hidden_size))
+        self.assertEqual(hy.shape(), hx.shape)
+        self.assertEqual(cy.shape(), cx.shape)
+
+        (y2, hy2,
+         cy2) = singa_api.GpuRNNForwardInference(x.data, hx.data, cx.data,
+                                                 w.data, rnn_handle)
+        self.assertEqual(y2.shape(),
+                         (seq_length, batch_size, directions * hidden_size))
+        self.assertEqual(hy2.shape(), hx.shape)
+        self.assertEqual(cy2.shape(), cx.shape)
+
+        dy = tensor.Tensor(shape=(seq_length, batch_size,
+                                  directions * hidden_size),
+                           device=dev).gaussian(0, 1)
+        dhy = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                   hidden_size),
+                            device=dev).gaussian(0, 1)
+        dcy = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                   hidden_size),
+                            device=dev).gaussian(0, 1)
+
+        (dx, dhx, dcx) = singa_api.GpuRNNBackwardx(y, dy.data, dhy.data,
+                                                   dcy.data, w.data, hx.data,
+                                                   cx.data, rnn_handle)
+        self.assertEqual(dx.shape(), (seq_length, batch_size, feature_size))
+        self.assertEqual(dhx.shape(), hx.shape)
+        self.assertEqual(dcx.shape(), cx.shape)
+
+        dW = singa_api.GpuRNNBackwardW(x.data, hx.data, y, rnn_handle)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_with_seq_lengths(self):
+        dev = gpu_dev
+
+        # params
+        hidden_size = 7
+        seq_length = 5
+        batch_size = 6
+        feature_size = 3
+        directions = 2
+        num_layers = 2
+
+        # shapes
+        x_s = (seq_length, batch_size, feature_size)
+        y_s = (seq_length, batch_size, hidden_size)
+        states_s = (num_layers * directions, batch_size, hidden_size)
+
+        # tensors
+        x = tensor.Tensor(x_s, dev).gaussian(0, 1)
+        y = tensor.Tensor(y_s, dev).gaussian(0, 1)
+        dy = tensor.Tensor(y_s, dev).gaussian(0, 1)
+        dhy = tensor.Tensor(states_s, dev).gaussian(0, 1)
+        dcy = tensor.Tensor(states_s, dev).gaussian(0, 1)
+        hx = tensor.Tensor(states_s, dev).gaussian(0, 1)
+        cx = tensor.Tensor(states_s, dev).gaussian(0, 1)
+
+        # handle
+        rnn_handle = singa_api.CudnnRNNHandle(x.data, hidden_size, 2)
+        w = tensor.Tensor((rnn_handle.weights_size,), dev).gaussian(0, 1)
+
+        # seq lengths
+        seq_lengths = tensor.from_numpy(np.array([seq_length] * batch_size))
+
+        # operations
+        (dx, dhx, dcx) = singa_api.GpuRNNBackwardxEx(y.data, dy.data, dhy.data,
+                                                     dcy.data, w.data, hx.data,
+                                                     cx.data, seq_lengths.data,
+                                                     rnn_handle)
+
+
+    def test_round_cpu(self):
+        self._round(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_round_gpu(self):
+        self._round(gpu_dev)
+
+    def _round(self, dev=gpu_dev):
+        x = tensor.Tensor(shape=(3,4,5), device=dev).gaussian(0, 1)
+        y = tensor._call_singa_func(singa_api.Round, x.data)
+        np.testing.assert_array_almost_equal(np.round(tensor.to_numpy(x)),
+                                             tensor.to_numpy(y))
+
+    def test_round_even_cpu(self):
+        self._round_even(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_round_even_gpu(self):
+        self._round_even(gpu_dev)
+
+    def _round_even(self, dev=gpu_dev):
+        q=np.array([0.1, 0.5, 0.9, 1.2, 1.5,
+                    1.8, 2.3, 2.5, 2.7, -1.1,
+                    -1.5, -1.9, -2.2, -2.5, -2.8]).astype(np.float32)
+        ans = np.array([0., 0., 1., 1., 2.,
+                    2., 2., 2., 3., -1.,
+                    -2., -2., -2., -2., -3.]).astype(np.float32)
+
+        x = tensor.Tensor(shape=q.shape, device=dev)
+        x.copy_from_numpy(q)
+        y = tensor._call_singa_func(singa_api.RoundE, x.data)
+        np.testing.assert_array_almost_equal(ans, tensor.to_numpy(y))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_dist.py b/test/python/test_dist.py
new file mode 100644
index 0000000..76c3404
--- /dev/null
+++ b/test/python/test_dist.py
@@ -0,0 +1,125 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import unittest
+import numpy as np
+from singa import tensor
+from singa import opt
+from singa import device
+from singa import singa_wrap
+
+if (singa_wrap.USE_DIST):
+    sgd = opt.SGD(lr=0.1)
+    sgd = opt.DistOpt(sgd)
+    dev = device.create_cuda_gpu_on(sgd.local_rank)
+    param = tensor.Tensor((10, 10), dev, tensor.float32)
+    grad = tensor.Tensor((10, 10), dev, tensor.float32)
+    expected = np.ones((10, 10), dtype=np.float32) * (10 - 0.1)
+
+@unittest.skipIf(not singa_wrap.USE_DIST,'DIST is not enabled')
+class TestDistOptimizer(unittest.TestCase):
+
+
+    def test_dist_opt_fp32(self):
+        # Test the C++ all reduce operation in fp32
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.all_reduce(grad.data)
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+    def test_dist_opt_fp32_fused(self):
+        # Test the C++ all reduce operation in fp32
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.fused_all_reduce([grad.data], send=False)
+        sgd.fused_all_reduce([grad.data])
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+    def test_dist_opt_fp16(self):
+        # Test the C++ all reduce operation in fp16
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.all_reduce_half(grad.data)
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+    def test_dist_opt_fp16_fused(self):
+        # Test the C++ all reduce operation in fp16
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.fused_all_reduce_half([grad.data], send=False)
+        sgd.fused_all_reduce_half([grad.data])
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+    def test_dist_opt_spars_value(self):
+        # Test the C++ value based sparsification operation for all reduce
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.sparsification(grad.data, accumulation=None, spars=0.05, topK=False)
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+    def test_dist_opt_spars_topk(self):
+        # Test the C++ TopK based sparsification operation for all reduce
+
+        param.set_value(10)
+        grad.set_value(1)
+
+        sgd.sparsification(grad.data, accumulation=None, spars=1, topK=True)
+        sgd.wait()
+        sgd.update(param, grad)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(param),
+                                             expected,
+                                             decimal=5)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_initializer.py b/test/python/test_initializer.py
new file mode 100644
index 0000000..cbd082e
--- /dev/null
+++ b/test/python/test_initializer.py
@@ -0,0 +1,123 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from singa import initializer
+from singa import tensor
+from singa import singa_wrap
+
+from cuda_helper import gpu_dev, cpu_dev
+
+import unittest
+import numpy as np
+
+
+class TestInitializer(unittest.TestCase):
+
+    def setUp(self):
+        self.t1 = tensor.Tensor((40, 90))
+        self.t2 = tensor.Tensor((30, 50, 8))
+        self.t3 = tensor.Tensor((30, 50, 4, 8))
+
+    def compute_fan(self, shape):
+        if len(shape) == 2:
+            fan_in = shape[0]
+            fan_out = shape[1]
+        elif len(shape) in {3, 4, 5}:
+            fan_in = shape[1] * np.prod(shape[2:])
+            fan_out = shape[0] * np.prod(shape[2:])
+        else:
+            fan_in = fan_out = np.sqrt(np.prod(shape))
+
+        return fan_in, fan_out
+
+    def he_uniform(self, dev):
+
+        def init(shape):
+            fan_in, _ = self.compute_fan(shape)
+            limit = np.sqrt(6 / fan_in)
+            return limit
+
+        self.t1.to_device(dev)
+        initializer.he_uniform(self.t1)
+        np_t1 = tensor.to_numpy(self.t1)
+        limit = init(self.t1.shape)
+        self.assertAlmostEqual(np_t1.max(), limit, delta=limit/10)
+        self.assertAlmostEqual(np_t1.min(), -limit, delta=limit/10)
+        self.assertAlmostEqual(np_t1.mean(), 0, delta=limit/10)
+                                       
+        self.t2.to_device(dev)
+        initializer.he_uniform(self.t2)
+        np_t2 = tensor.to_numpy(self.t2)
+        limit = init(self.t2.shape)
+        self.assertAlmostEqual(np_t2.max(), limit, delta=limit/10)
+        self.assertAlmostEqual(np_t2.min(), -limit, delta=limit/10)
+        self.assertAlmostEqual(np_t2.mean(), 0, delta=limit/10)
+ 
+        self.t3.to_device(dev)
+        initializer.he_uniform(self.t3)
+        np_t3 = tensor.to_numpy(self.t3)
+        limit = init(self.t3.shape)
+        self.assertAlmostEqual(np_t3.max(), limit, delta=limit/10)
+        self.assertAlmostEqual(np_t3.min(), -limit, delta=limit/10)
+        self.assertAlmostEqual(np_t3.mean(), 0, delta=limit/10)
+ 
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_he_uniform_gpu(self):
+        self.he_uniform(gpu_dev)
+
+    def test_he_uniform_cpu(self):
+        self.he_uniform(cpu_dev)
+
+    def he_normal(self, dev):
+
+        def init(shape):
+            fan_in, _ = self.compute_fan(shape)
+            stddev = np.sqrt(2 / fan_in)
+            return stddev
+
+        self.t1.to_device(dev)
+        initializer.he_normal(self.t1)
+        np_t1 = tensor.to_numpy(self.t1)
+        stddev = init(self.t1.shape)
+        self.assertAlmostEqual(np_t1.mean(), 0, delta=stddev/10)
+        self.assertAlmostEqual(np_t1.std(), stddev, delta=stddev/10)
+ 
+        self.t2.to_device(dev)
+        initializer.he_normal(self.t2)
+        np_t2 = tensor.to_numpy(self.t2)
+        stddev = init(self.t2.shape)
+        self.assertAlmostEqual(np_t2.mean(), 0, delta=stddev/10)
+        self.assertAlmostEqual(np_t2.std(), stddev, delta=stddev/10)
+ 
+        self.t3.to_device(dev)
+        initializer.he_normal(self.t3)
+        np_t3 = tensor.to_numpy(self.t3)
+        stddev = init(self.t3.shape)
+        self.assertAlmostEqual(np_t3.mean(), 0, delta=stddev/10)
+        self.assertAlmostEqual(np_t3.std(), stddev, delta=stddev/10)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_he_normal_gpu(self):
+        self.he_uniform(gpu_dev)
+
+    def test_he_normal_cpu(self):
+        self.he_uniform(cpu_dev)
+
+
+if __name__ == '__main__':
+    unittest.main()
\ No newline at end of file
diff --git a/test/python/test_layer.py b/test/python/test_layer.py
deleted file mode 100644
index c0f19f3..0000000
--- a/test/python/test_layer.py
+++ /dev/null
@@ -1,242 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import unittest
-import numpy as np
-
-
-from singa import layer
-from singa import tensor
-from singa.proto import model_pb2
-
-
-def _tuple_to_string(t):
-    lt = [str(x) for x in t]
-    return '(' + ', '.join(lt) + ')'
-
-
-class TestPythonLayer(unittest.TestCase):
-
-    def check_shape(self, actual, expect):
-        self.assertEqual(actual, expect, 'shape mismatch, actual shape is %s'
-                         ' exepcted is %s' % (_tuple_to_string(actual),
-                                              _tuple_to_string(expect))
-                         )
-
-    def setUp(self):
-        layer.engine = 'singacpp'
-        self.w = {'init': 'Xavier', 'regularizer': 1e-4}
-        self.b = {'init': 'Constant', 'value': 0}
-        self.sample_shape = None
-
-    def test_conv2D_shape(self):
-        in_sample_shape = (3, 224, 224)
-        conv = layer.Conv2D('conv', 64, 3, 1, W_specs=self.w, b_specs=self.b,
-                            input_sample_shape=in_sample_shape)
-        out_sample_shape = conv.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (64, 224, 224))
-
-    def test_conv2D_forward_backward(self):
-        in_sample_shape = (1, 3, 3)
-        conv = layer.Conv2D('conv', 1, 3, 2, W_specs=self.w, b_specs=self.b,
-                            pad=1, input_sample_shape=in_sample_shape)
-        # cuda = device.create_cuda_gpu()
-        # conv.to_device(cuda)
-        params = conv.param_values()
-
-        raw_x = np.arange(9, dtype=np.float32) + 1
-        x = tensor.from_numpy(raw_x)
-        x.reshape((1, 1, 3, 3))
-        w = np.array([1, 1, 0, 0, 0, -1, 0, 1, 0], dtype=np.float32)
-        params[0].copy_from_numpy(w)
-        params[1].set_value(1.0)
-
-        # x.to_device(cuda)
-        y = conv.forward(model_pb2.kTrain, x)
-        # y.to_host()
-        npy = tensor.to_numpy(y).flatten()
-
-        self.assertAlmostEqual(3.0, npy[0])
-        self.assertAlmostEqual(7.0, npy[1])
-        self.assertAlmostEqual(-3.0, npy[2])
-        self.assertAlmostEqual(12.0, npy[3])
-
-        dy = np.asarray([0.1, 0.2, 0.3, 0.4], dtype=np.float32).reshape(y.shape)
-        grad = tensor.from_numpy(dy)
-        # grad.to_device(cuda)
-        (dx, [dw, db]) = conv.backward(model_pb2.kTrain, grad)
-        dx.to_host()
-        dw.to_host()
-        dx = tensor.to_numpy(dx).flatten()
-        dw = tensor.to_numpy(dw).flatten()
-        dy = dy.flatten()
-        self.assertAlmostEquals(dy[0] * w[4], dx[0])
-        self.assertAlmostEquals(dy[0] * w[5] + dy[1] * w[3], dx[1])
-        self.assertAlmostEquals(dy[1] * w[4], dx[2])
-        self.assertAlmostEquals(dy[0] * w[7] + dy[2] * w[1], dx[3])
-        self.assertAlmostEquals(
-            dy[0] *
-            w[8] +
-            dy[1] *
-            w[6] +
-            dy[2] *
-            w[2] +
-            dy[3] *
-            w[0],
-            dx[4])
-        self.assertAlmostEquals(dy[1] * w[7] + dy[3] * w[1], dx[5])
-        self.assertAlmostEquals(dy[2] * w[4], dx[6])
-        self.assertAlmostEquals(dy[2] * w[5] + dy[3] * w[3], dx[7])
-        self.assertAlmostEquals(dy[3] * w[4], dx[8])
-
-        self.assertAlmostEquals(dy[3] * raw_x[4], dw[0])
-        self.assertAlmostEquals(dy[3] * raw_x[5] + dy[2] * raw_x[3], dw[1])
-        self.assertAlmostEquals(dy[2] * raw_x[4], dw[2])
-        self.assertAlmostEquals(dy[1] * raw_x[1] + dy[3] * raw_x[7], dw[3])
-        self.assertAlmostEquals(
-            dy[0] *
-            raw_x[0] +
-            dy[1] *
-            raw_x[2] +
-            dy[2] *
-            raw_x[6] +
-            dy[3] *
-            raw_x[8],
-            dw[4], 5)
-        self.assertAlmostEquals(dy[0] * raw_x[1] + dy[2] * raw_x[7], dw[5])
-        self.assertAlmostEquals(dy[1] * raw_x[4], dw[6])
-        self.assertAlmostEquals(dy[0] * raw_x[3] + dy[1] * raw_x[5], dw[7])
-        self.assertAlmostEquals(dy[0] * raw_x[4], dw[8])
-
-    def test_conv1D(self):
-        in_sample_shape = (224,)
-        conv = layer.Conv1D('conv', 64, 3, 1, W_specs=self.w, b_specs=self.b,
-                            pad=1, input_sample_shape=in_sample_shape)
-        out_sample_shape = conv.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (64, 224,))
-
-    def test_max_pooling2D(self):
-        in_sample_shape = (64, 224, 224)
-        pooling = layer.MaxPooling2D('pool', 3, 2,
-                                     input_sample_shape=in_sample_shape)
-        out_sample_shape = pooling.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (64, 112, 112))
-
-    def test_max_pooling1D(self):
-        in_sample_shape = (224,)
-        pooling = layer.MaxPooling1D('pool', 3, 2,
-                                     input_sample_shape=in_sample_shape)
-        out_sample_shape = pooling.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (112,))
-
-    def test_avg_pooling2D(self):
-        in_sample_shape = (64, 224, 224)
-        pooling = layer.AvgPooling2D('pool', 3, 2,
-                                     input_sample_shape=in_sample_shape)
-        out_sample_shape = pooling.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (64, 112, 112))
-
-    def test_avg_pooling1D(self):
-        in_sample_shape = (224,)
-        pooling = layer.AvgPooling1D('pool', 3, 2,
-                                     input_sample_shape=in_sample_shape)
-        out_sample_shape = pooling.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (112,))
-
-    def test_batch_normalization(self):
-        in_sample_shape = (3, 224, 224)
-        bn = layer.BatchNormalization('bn', input_sample_shape=in_sample_shape)
-        out_sample_shape = bn.get_output_sample_shape()
-        self.check_shape(out_sample_shape, in_sample_shape)
-
-    def test_lrn(self):
-        in_sample_shape = (3, 224, 224)
-        lrn = layer.LRN('lrn', input_sample_shape=in_sample_shape)
-        out_sample_shape = lrn.get_output_sample_shape()
-        self.check_shape(out_sample_shape, in_sample_shape)
-
-    def test_dense(self):
-        dense = layer.Dense('ip', 32, input_sample_shape=(64,))
-        out_sample_shape = dense.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (32,))
-
-    def test_dropout(self):
-        input_sample_shape = (64, 1, 12)
-        dropout = layer.Dropout('drop', input_sample_shape=input_sample_shape)
-        out_sample_shape = dropout.get_output_sample_shape()
-        self.check_shape(out_sample_shape, input_sample_shape)
-
-    def test_activation(self):
-        input_sample_shape = (64, 1, 12)
-        act = layer.Activation('act', input_sample_shape=input_sample_shape)
-        out_sample_shape = act.get_output_sample_shape()
-        self.check_shape(out_sample_shape, input_sample_shape)
-
-    def test_softmax(self):
-        input_sample_shape = (12,)
-        softmax = layer.Softmax('soft', input_sample_shape=input_sample_shape)
-        out_sample_shape = softmax.get_output_sample_shape()
-        self.check_shape(out_sample_shape, input_sample_shape)
-
-    def test_flatten(self):
-        input_sample_shape = (64, 1, 12)
-        flatten = layer.Flatten('flat', input_sample_shape=input_sample_shape)
-        out_sample_shape = flatten.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (64 * 1 * 12, ))
-
-        flatten = layer.Flatten('flat', axis=2,
-                                input_sample_shape=input_sample_shape)
-        out_sample_shape = flatten.get_output_sample_shape()
-        self.check_shape(out_sample_shape, (12,))
-
-    def test_concat(self):
-        t1 = tensor.Tensor((2, 3))
-        t2 = tensor.Tensor((1, 3))
-        t1.set_value(1)
-        t2.set_value(2)
-        lyr = layer.Concat('concat', 0, [(3,), (3,)])
-        t = lyr.forward(model_pb2.kTrain, [t1, t2])
-        tnp = tensor.to_numpy(t)
-        self.assertEquals(np.sum(tnp), 12)
-        t3 = tensor.Tensor((3, 3))
-        t3.set_value(1.5)
-        grads, _ = lyr.backward(model_pb2.kTrain, [t3])
-        gnp = tensor.to_numpy(grads[0])
-        self.assertEquals(np.sum(gnp), 6 * 1.5)
-
-    def test_slice(self):
-        t = np.zeros((3, 3))
-        t[:, :2] = float(2)
-        t[:, 2] = float(1)
-        lyr = layer.Slice('slice', 1, [2], (3,))
-        out = lyr.forward(model_pb2.kTrain, [tensor.from_numpy(t)])
-        t1 = tensor.to_numpy(out[0])
-        t2 = tensor.to_numpy(out[1])
-        self.assertEquals(np.average(t1), 2)
-        self.assertEquals(np.average(t2), 1)
-        t1 = tensor.Tensor((3, 2))
-        t2 = tensor.Tensor((3, 1))
-        t1.set_value(1)
-        t2.set_value(2)
-        grad,_ = lyr.backward(model_pb2.kTrain, [t1, t2])
-        gnp = tensor.to_numpy(grad)
-        self.assertEquals(np.sum(gnp), 12)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/python/test_loss.py b/test/python/test_loss.py
deleted file mode 100644
index 78356f2..0000000
--- a/test/python/test_loss.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import unittest
-
-import numpy as np
-
-from singa import loss
-from singa import tensor
-
-
-class TestLoss(unittest.TestCase):
-    def setUp(self):
-        self.x_np = np.asarray([[0.9, 0.2, 0.1],
-                                [0.1, 0.4, 0.5],
-                                [0.2, 0.4, 0.4]],
-                               dtype=np.float32)
-
-        self.y_np = np.asarray([[1, 0, 1],
-                                [0, 1, 1],
-                                [1, 0, 0]],
-                               dtype=np.float32)
-
-        self.x = tensor.from_numpy(self.x_np)
-        self.y = tensor.from_numpy(self.y_np)
-
-    def test_sigmoid_cross_entropy(self):
-        sig = loss.SigmoidCrossEntropy()
-        l1 = sig.forward(True, self.x, self.y)
-        sig.backward()
-        l2 = sig.evaluate(True, self.x, self.y)
-
-        p = 1.0 / (1 + np.exp(-self.x_np))
-        l = - (self.y_np * np.log(p) + (1-self.y_np) * np.log(1-p))
-        self.assertAlmostEqual(l1.l1(), l2)
-        self.assertAlmostEqual(l1.l1(), np.average(l))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/python/test_metric.py b/test/python/test_metric.py
deleted file mode 100644
index e7a51c3..0000000
--- a/test/python/test_metric.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import unittest
-
-import numpy as np
-
-from singa import metric
-from singa import tensor
-
-
-class TestPrecision(unittest.TestCase):
-    def setUp(self):
-        x_np = np.asarray([[0.7, 0.2, 0.1],
-                           [0.2, 0.4, 0.5],
-                           [0.2,0.4,0.4]],
-                          dtype=np.float32)
-
-        y_np = np.asarray([[1, 0, 1],
-                           [0, 1, 1],
-                           [1, 0, 0]],
-                           dtype=np.int32)
-
-        self.prcs = metric.Precision(top_k=2)
-        self.x = tensor.from_numpy(x_np)
-        self.y = tensor.from_numpy(y_np)
-
-
-    def test_forward(self):
-        p = self.prcs.forward(self.x,self.y)
-        self.assertAlmostEqual(tensor.to_numpy(p)[0], 0.5)
-        self.assertAlmostEqual(tensor.to_numpy(p)[1], 1)
-        self.assertAlmostEqual(tensor.to_numpy(p)[2], 0)
-
-
-    def test_evaluate(self):
-        e = self.prcs.evaluate(self.x,self.y)
-        self.assertAlmostEqual(e, (0.5 + 1 + 0) / 3)
-
-class TestRecall(unittest.TestCase):
-    def setUp(self):
-        x_np = np.asarray([[0.7, 0.2, 0.1],
-                           [0.2, 0.4, 0.5],
-                           [0.2,0.4,0.4]],
-                          dtype=np.float32)
-
-        y_np = np.asarray([[1, 0, 1],
-                           [1, 1, 1],
-                           [1, 0, 0]],
-                           dtype=np.int32)
-
-        self.recall = metric.Recall(top_k=2)
-        self.x = tensor.from_numpy(x_np)
-        self.y = tensor.from_numpy(y_np)
-
-
-    def test_forward(self):
-        r = self.recall.forward(self.x,self.y)
-        self.assertAlmostEqual(tensor.to_numpy(r)[0], 0.5)
-        self.assertAlmostEqual(tensor.to_numpy(r)[1], 2.0 / 3)
-        self.assertAlmostEqual(tensor.to_numpy(r)[2], 0)
-
-
-    def test_evaluate(self):
-        e = self.recall.evaluate(self.x,self.y)
-        self.assertAlmostEqual(e, (0.5 + 2.0 / 3 + 0) / 3)
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/python/test_mkldnn.py b/test/python/test_mkldnn.py
new file mode 100755
index 0000000..2ccadc7
--- /dev/null
+++ b/test/python/test_mkldnn.py
@@ -0,0 +1,190 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import unittest
+from singa import singa_wrap
+
+
+class TestPythonOperation(unittest.TestCase):
+
+    def test_conv2d(self):
+        print("TEST CONV2D FORWARD")
+        x_shape = [2, 1, 3, 3]
+        x = singa_wrap.Tensor(x_shape)
+        x.CopyFloatDataFromHostPtr(
+            [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9])
+
+        W_shape = [1, 1, 3, 3]
+        W = singa_wrap.Tensor(W_shape)
+        W.CopyFloatDataFromHostPtr([1, 1, 0, 0, 0, -1, 0, 1, 0])
+
+        b_shape = [1]
+        b = singa_wrap.Tensor(b_shape)
+        b.CopyFloatDataFromHostPtr([1])
+
+        dy_shape = [2, 1, 2, 2]
+        dy = singa_wrap.Tensor(dy_shape)
+        dy.CopyFloatDataFromHostPtr([0.1, 0.2, 0.3, 0.4, 0.1, 0.2, 0.3, 0.4])
+
+        handle = singa_wrap.ConvHandle(x, (3, 3), (2, 2), (1, 1), 1, 1, True, 1)
+        y = singa_wrap.CpuConvForward(x, W, b, handle)
+
+        self.assertListEqual([2, 1, 2, 2], list(y.shape()))
+
+        _y = y.GetFloatValue(int(y.Size()))
+        self.assertAlmostEqual(3.0, _y[0])
+        self.assertAlmostEqual(7.0, _y[1])
+        self.assertAlmostEqual(-3.0, _y[2])
+        self.assertAlmostEqual(12.0, _y[3])
+        self.assertAlmostEqual(3.0, _y[4])
+        self.assertAlmostEqual(7.0, _y[5])
+        self.assertAlmostEqual(-3.0, _y[6])
+        self.assertAlmostEqual(12.0, _y[7])
+
+        print("TEST CONV2D DATA BACKWARD")
+
+        dx = singa_wrap.CpuConvBackwardx(dy, W, x, handle)
+        self.assertListEqual([2, 1, 3, 3], list(dx.shape()))
+
+        _dx = dx.GetFloatValue(int(dx.Size()))
+        self.assertAlmostEqual(0.0, _dx[0])
+        self.assertAlmostEqual(-0.1, _dx[1])
+        self.assertAlmostEqual(0.0, _dx[2])
+        self.assertAlmostEqual(0.4, _dx[3])
+        self.assertAlmostEqual(0.4, _dx[4])
+        self.assertAlmostEqual(0.6, _dx[5])
+        self.assertAlmostEqual(0.0, _dx[6])
+        self.assertAlmostEqual(-0.3, _dx[7])
+
+        print("TEST CONV2D WEIGHT BACKWARD")
+        dW = singa_wrap.CpuConvBackwardW(dy, x, W, handle)
+        self.assertListEqual([1, 1, 3, 3], list(dW.shape()))
+
+        _dW = dW.GetFloatValue(int(dW.Size()))
+        self.assertAlmostEqual(4.0, _dW[0], places=5)
+        self.assertAlmostEqual(7.2, _dW[1], places=5)
+        self.assertAlmostEqual(3.0, _dW[2], places=5)
+        self.assertAlmostEqual(7.2, _dW[3], places=5)
+        self.assertAlmostEqual(12.8, _dW[4], places=5)
+        self.assertAlmostEqual(5.2, _dW[5], places=5)
+        self.assertAlmostEqual(2.0, _dW[6], places=5)
+        self.assertAlmostEqual(3.2, _dW[7], places=5)
+        self.assertAlmostEqual(1.0, _dW[8], places=5)
+
+        print("TEST CONV2D DATA BACKWARD")
+        db = singa_wrap.CpuConvBackwardb(dy, b, handle)
+        self.assertEqual(1, dW.shape()[0])
+
+        _db = db.GetFloatValue(int(db.Size()))
+        print(_db)
+        self.assertAlmostEqual(2.0, _db[0], places=5)
+
+    def test_pooling(self):
+        x_shape = [2, 1, 3, 3]
+        x = singa_wrap.Tensor(x_shape)
+        x.CopyFloatDataFromHostPtr(
+            [1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9])
+
+        y_shape = [2, 1, 2, 2]
+        dy = singa_wrap.Tensor(y_shape)
+        dy.CopyFloatDataFromHostPtr([0.1, 0.2, 0.3, 0.4, 0.1, 0.2, 0.3, 0.4])
+
+        k_dim = [2, 2]
+        s_dim = [1, 1]
+        p_dim = [0, 0]
+
+        # max pooling
+        handle = singa_wrap.PoolingHandle(x, k_dim, s_dim, p_dim, True)
+        y = singa_wrap.CpuPoolingForward(handle, x)
+        self.assertListEqual([2, 1, 2, 2], list(y.shape()))
+        dx = singa_wrap.CpuPoolingBackward(handle, dy, x, y)
+        self.assertListEqual([2, 1, 3, 3], list(dx.shape()))
+
+        # avg pooling
+        handle = singa_wrap.PoolingHandle(x, k_dim, s_dim, p_dim, False)
+        y = singa_wrap.CpuPoolingForward(handle, x)
+        self.assertListEqual([2, 1, 2, 2], list(y.shape()))
+        dx = singa_wrap.CpuPoolingBackward(handle, dy, x, y)
+        self.assertListEqual([2, 1, 3, 3], list(dx.shape()))
+
+    def test_batch_norm(self):
+        x_shape = [2, 2]
+        x = singa_wrap.Tensor(x_shape)
+        x.CopyFloatDataFromHostPtr([1, 2, 3, 4])
+
+        dy_shape = [2, 2]
+        dy = singa_wrap.Tensor(dy_shape)
+        dy.CopyFloatDataFromHostPtr([4, 3, 2, 1])
+
+        scale_shape = [2]
+        scale = singa_wrap.Tensor(scale_shape)
+        scale.CopyFloatDataFromHostPtr([1, 1])
+
+        bias_shape = [2]
+        bias = singa_wrap.Tensor(bias_shape)
+        bias.CopyFloatDataFromHostPtr([0, 0])
+
+        mean_shape = [2]
+        mean = singa_wrap.Tensor(mean_shape)
+        mean.CopyFloatDataFromHostPtr([1, 2])
+        var = singa_wrap.Tensor(mean_shape)
+        var.CopyFloatDataFromHostPtr([1, 2])
+
+        handle = singa_wrap.BatchNormHandle(0.9, x)
+
+        # 2D Forward Inference
+        y = singa_wrap.CpuBatchNormForwardInference(handle, x, scale, bias,
+                                                    mean, var)
+        self.assertListEqual([2, 2], list(y.shape()))
+
+        # 2D Forward Training
+        (y, mean_updated, var_updated) = singa_wrap.CpuBatchNormForwardTraining(
+            handle, x, scale, bias, mean, var)
+        self.assertListEqual([2, 2], list(y.shape()))
+        self.assertListEqual([2], list(mean_updated.shape()))
+        self.assertListEqual([2], list(var_updated.shape()))
+
+        # 2D Backward dx
+        (dx, dscale,
+         dbias) = singa_wrap.CpuBatchNormBackwardx(handle, y, dy, x, scale,
+                                                   bias, mean_updated,
+                                                   var_updated)
+        self.assertListEqual([2, 2], list(dx.shape()))
+        self.assertListEqual([2], list(dscale.shape()))
+        self.assertListEqual([2], list(dbias.shape()))
+
+        # 4D Forward Inference
+
+        x2_shape = [1, 2, 4, 4]
+        x2 = singa_wrap.Tensor(x2_shape)
+        x2.CopyFloatDataFromHostPtr([
+            0.0736655, 0.0459045, 0.0779517, 0.0771059, 0.0586862, 0.0561263,
+            0.0708457, 0.0977273, 0.0405025, -0.170897, 0.0208982, 0.136865,
+            -0.0367905, -0.0618205, -0.0103908, -0.0522777, -0.122161,
+            -0.025427, -0.0718576, -0.185941, 0.0166533, 0.178679, -0.0576606,
+            -0.137817, 0.150676, 0.153442, -0.0929899, -0.148675, -0.112459,
+            -0.106284, -0.103074, -0.0668811
+        ])
+
+        handle = singa_wrap.BatchNormHandle(0.9, x)
+        y2 = singa_wrap.CpuBatchNormForwardInference(handle, x2, scale, bias,
+                                                     mean, var)
+        self.assertListEqual([1, 2, 4, 4], list(y2.shape()))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_model.py b/test/python/test_model.py
new file mode 100644
index 0000000..aaf1023
--- /dev/null
+++ b/test/python/test_model.py
@@ -0,0 +1,499 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# =============================================================================
+
+from __future__ import division
+
+import os
+import math
+import unittest
+import numpy as np
+
+from singa import singa_wrap as singa_api
+from singa.tensor import Tensor
+from singa import autograd
+from singa import tensor
+from singa import device
+from singa import layer
+from singa import model
+from singa import opt
+
+from cuda_helper import gpu_dev, cpu_dev
+
+
+class DoubleLinear(layer.Layer):
+
+    def __init__(self, a, b, c):
+        super(DoubleLinear, self).__init__()
+        self.l1 = layer.Linear(a, b)
+        self.l2 = layer.Linear(b, c)
+
+    def forward(self, x):
+        y = self.l1(x)
+        y = self.l2(y)
+        return y
+
+
+class MyModel(model.Model):
+
+    def __init__(self):
+        super(MyModel, self).__init__()
+        self.conv1 = layer.Conv2d(2, 2)
+        self.bn1 = layer.BatchNorm2d(2)
+        self.doublelinear1 = DoubleLinear(2, 4, 2)
+        self.optimizer = opt.SGD()
+
+    def forward(self, x):
+        y = self.conv1(x)
+        y = self.bn1(y)
+        y = autograd.reshape(y, (y.shape[0], -1))
+        y = self.doublelinear1(y)
+        return y
+
+    def train_one_batch(self, x, y):
+        y_ = self.forward(x)
+        l = self.loss(y_, y)
+        self.optim(l)
+        return y_, l
+
+    def loss(self, out, ty):
+        return autograd.softmax_cross_entropy(out, ty)
+
+    def optim(self, loss):
+        self.optimizer(loss)
+
+
+class MLP(model.Model):
+
+    def __init__(self, data_size=10, perceptron_size=100, num_classes=10):
+        super(MLP, self).__init__()
+        self.num_classes = num_classes
+        self.dimension = 2
+
+        self.relu = layer.ReLU()
+        self.linear1 = layer.Linear(perceptron_size)
+        self.linear2 = layer.Linear(num_classes)
+        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
+
+    def forward(self, inputs):
+        y = self.linear1(inputs)
+        y = self.relu(y)
+        y = self.linear2(y)
+        return y
+
+    def train_one_batch(self, x, y):
+        out = self.forward(x)
+        loss = self.softmax_cross_entropy(out, y)
+        self.optimizer(loss)
+        return out, loss
+
+    def set_optimizer(self, optimizer):
+        self.optimizer = optimizer
+
+
+# lstm testing
+class LSTMModel3(model.Model):
+
+    def __init__(self, hidden_size):
+        super(LSTMModel3, self).__init__()
+        self.lstm = layer.CudnnRNN(
+            hidden_size=hidden_size,
+            batch_first=True,
+            #    return_sequences=True,
+            use_mask=True)
+        self.l1 = layer.Linear(2)
+        self.optimizer = opt.SGD(0.1)
+
+    def forward(self, x, seq_lengths):
+        y = self.lstm(x, seq_lengths=seq_lengths)
+        y = autograd.reshape(y, (y.shape[0], -1))
+        y = self.l1(y)
+        return y
+
+
+class LSTMModel2(model.Model):
+
+    def __init__(self, hidden_size, bidirectional, num_layers):
+        super(LSTMModel2, self).__init__()
+        self.lstm = layer.CudnnRNN(hidden_size=hidden_size,
+                                   num_layers=num_layers,
+                                   bidirectional=bidirectional,
+                                   return_sequences=False,
+                                   rnn_mode='lstm',
+                                   batch_first=True)
+        self.optimizer = opt.SGD(0.1)
+
+    def forward(self, x):
+        return self.lstm(x)
+
+
+class LSTMModel(model.Model):
+
+    def __init__(self, hidden_size, seq_length, batch_size, bidirectional,
+                 num_layers, return_sequences, rnn_mode, batch_first):
+        super(LSTMModel, self).__init__()
+        self.hidden_size = hidden_size
+        self.seq_length = seq_length
+        self.return_sequences = return_sequences
+
+        self.lstm = layer.CudnnRNN(hidden_size=hidden_size,
+                                   num_layers=num_layers,
+                                   bidirectional=bidirectional,
+                                   return_sequences=return_sequences,
+                                   rnn_mode=rnn_mode,
+                                   batch_first=batch_first)
+        self.optimizer = opt.SGD(0.1)
+
+    def forward(self, x):
+        y = self.lstm(x)
+        if self.return_sequences:
+            y = autograd.reshape(y, (-1, self.seq_length * self.hidden_size))
+        return y
+
+
+class TestModelMethods(unittest.TestCase):
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_rnn_with_seq_lengths(self, dev=gpu_dev):
+        bs = 2
+        seq_length = 3
+        hidden_size = 2
+        em_size = 2
+        x_np = np.array([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
+                         [[0.3, 0.3], [0.4, 0.4], [0.0,
+                                                   0.0]]]).astype(np.float32)
+        y_np = np.array([[0.4, 0.4], [0.5, 0.5]]).astype(np.float32)
+        seq_lengths_np = np.array([3, 2]).astype(np.int32)
+
+        x = tensor.from_numpy(x_np)
+        x.to_device(dev)
+        y = tensor.from_numpy(y_np)
+        y.to_device(dev)
+        seq_lengths = tensor.from_numpy(seq_lengths_np)
+
+        m = LSTMModel3(hidden_size)
+        m.compile([x, seq_lengths],
+                  is_train=True,
+                  use_graph=False,
+                  sequential=False)
+        m.train()
+        for i in range(10):
+            out = m.forward(x, seq_lengths)
+            loss = autograd.mse_loss(out, y)
+            print("train l:", tensor.to_numpy(loss))
+            m.optimizer(loss)
+        m.eval()
+        out = m.forward(x, seq_lengths)
+        loss = autograd.mse_loss(out, y)
+        print(" eval l:", tensor.to_numpy(loss))
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_lstm_model(self, dev=gpu_dev):
+        hidden_size = 3
+        seq_length = 2
+        batch_size = 4
+        feature_size = 3
+        bidirectional = False
+        directions = 2 if bidirectional else 1
+        num_layers = 2
+        out_size = hidden_size
+        return_sequences = False
+        batch_first = True
+        rnn_mode = "lstm"
+
+        # manual test case
+        x_data = np.array([[[0, 0, 1], [0, 1, 0]], [[0, 1, 0], [1, 0, 0]],
+                           [[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 0, 1]]],
+                          dtype=np.float32).reshape(batch_size, seq_length,
+                                                    hidden_size)  # bs, seq, fea
+        if return_sequences:
+            y_data = np.array(
+                [[[0, 1, 0], [1, 0, 0]], [[1, 0, 0], [0, 0, 1]],
+                 [[0, 1, 0], [1, 0, 0]], [[0, 0, 1], [0, 1, 0]]],
+                dtype=np.float32).reshape(batch_size, seq_length,
+                                          hidden_size)  # bs, hidden
+            y_data.reshape(batch_size, -1)
+        else:
+            y_data = np.array([[1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0]],
+                              dtype=np.float32).reshape(
+                                  batch_size, hidden_size)  # bs, hidden
+
+        x = tensor.Tensor(device=dev, data=x_data)
+        y_t = tensor.Tensor(device=dev, data=y_data)
+
+        m = LSTMModel(hidden_size, seq_length, batch_size, bidirectional,
+                      num_layers, return_sequences, rnn_mode, batch_first)
+        m.compile([x], is_train=True, use_graph=False, sequential=False)
+
+        m.train()
+        for i in range(1000):
+            y = m.forward(x)
+            assert y.shape == y_t.shape
+            loss = autograd.softmax_cross_entropy(y, y_t)
+            if i % 100 == 0:
+                print("loss", loss)
+            m.optimizer(loss)
+
+        m.eval()
+        y = m.forward(x)
+        loss = autograd.softmax_cross_entropy(y, y_t)
+        print("eval loss", loss)
+
+
+class TestModelSaveMethods(unittest.TestCase):
+
+    def _save_states_load_states_helper(self, dev, graph_flag="False"):
+        x_shape = (2, 2, 2, 2)
+        x = tensor.PlaceHolder(x_shape, device=dev)
+
+        m = MyModel()
+        m.compile([x], is_train=True, use_graph=graph_flag, sequential=False)
+
+        states = {
+            "conv1.W":
+                tensor.Tensor((2, 2, 2, 2), device=dev).set_value(0.1),
+            "conv1.b":
+                tensor.Tensor((2,), device=dev).set_value(0.2),
+            "bn1.scale":
+                tensor.Tensor((2,), device=dev).set_value(0.3),
+            "bn1.bias":
+                tensor.Tensor((2,), device=dev).set_value(0.4),
+            "bn1.running_mean":
+                tensor.Tensor((2,), device=dev).set_value(0.5),
+            "bn1.running_var":
+                tensor.Tensor((2,), device=dev).set_value(0.6),
+            "doublelinear1.l1.W":
+                tensor.Tensor((2, 4), device=dev).set_value(0.7),
+            "doublelinear1.l1.b":
+                tensor.Tensor((4,), device=dev).set_value(0.8),
+            "doublelinear1.l2.W":
+                tensor.Tensor((4, 2), device=dev).set_value(0.9),
+            "doublelinear1.l2.b":
+                tensor.Tensor((2,), device=dev).set_value(1.0)
+        }
+
+        m.set_states(states)
+        states2 = m.get_states()
+        for k in states2.keys():
+            np.testing.assert_array_almost_equal(tensor.to_numpy(states[k]),
+                                                 tensor.to_numpy(states2[k]))
+
+        opt_state1 = tensor.Tensor((2, 10), device=dev).gaussian(1, 0.1)
+        opt_state2 = tensor.Tensor((20, 2), device=dev).gaussian(0.1, 1)
+        aux = {"opt1": opt_state1, "opt2": opt_state2}
+
+        # save snapshot1
+        zip_fp = 'snapshot1_%s.zip' % self._testMethodName
+        if os.path.exists(zip_fp):
+            os.remove(zip_fp)
+        m.save_states(zip_fp, aux)
+
+        # do some training, states changes
+        cx = tensor.Tensor(x_shape, device=dev).gaussian(1, 1)
+        cy = tensor.Tensor((2, 2), device=dev).gaussian(1, 1)
+        mini_batch_size = 10
+        for i in range(mini_batch_size):
+            m.train_one_batch(cx, cy)
+
+        # restore snapshot
+        aux2 = m.load_states(zip_fp)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(aux2["opt1"]),
+                                             tensor.to_numpy(aux["opt1"]))
+        np.testing.assert_array_almost_equal(tensor.to_numpy(aux2["opt2"]),
+                                             tensor.to_numpy(aux["opt2"]))
+
+        # snapshot states
+        states3 = m.get_states()
+        for k in states3.keys():
+            np.testing.assert_array_almost_equal(tensor.to_numpy(states[k]),
+                                                 tensor.to_numpy(states3[k]))
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_save_states_load_states_gpu(self):
+        self._save_states_load_states_helper(gpu_dev, graph_flag=False)
+        self._save_states_load_states_helper(gpu_dev, graph_flag=True)
+
+    def test_save_states_load_states_cpu(self):
+        self._save_states_load_states_helper(cpu_dev, graph_flag=False)
+        self._save_states_load_states_helper(cpu_dev, graph_flag=True)
+
+
+class TestPythonModule(unittest.TestCase):
+
+    def to_categorical(self, y, num_classes):
+        y = np.array(y, dtype="int")
+        n = y.shape[0]
+        categorical = np.zeros((n, num_classes))
+        categorical[np.arange(n), y] = 1
+        return categorical
+
+    def generate_data(self, dev, num=400):
+        f = lambda x: (5 * x + 1)
+
+        x = np.random.uniform(-1, 1, num)
+        y = f(x) + 2 * np.random.randn(len(x))
+
+        self.label = np.asarray([5 * a + 1 > b for (a, b) in zip(x, y)])
+        self.data = np.array([[a, b] for (a, b) in zip(x, y)], dtype=np.float32)
+        self.label = self.to_categorical(self.label, 2).astype(np.float32)
+
+        self.inputs = Tensor(data=self.data, device=dev)
+        self.target = Tensor(data=self.label, device=dev)
+
+    def get_params(self, model):
+        params = model.get_params()
+        self.w0 = params['linear1.W']
+        self.b0 = params['linear1.b']
+        self.w1 = params['linear2.W']
+        self.b1 = params['linear2.b']
+
+        self.W0 = tensor.to_numpy(self.w0)
+        self.B0 = tensor.to_numpy(self.b0)
+        self.W1 = tensor.to_numpy(self.w1)
+        self.B1 = tensor.to_numpy(self.b1)
+
+    def numpy_forward(self, inputs):
+        self.x1 = np.matmul(inputs, self.W0)
+        self.x2 = np.add(self.x1, self.B0)
+        self.x3 = np.maximum(self.x2, 0)
+        self.x4 = np.matmul(self.x3, self.W1)
+        self.x5 = np.add(self.x4, self.B1)
+        return self.x5
+
+    def numpy_train_one_batch(self, inputs, y):
+        # forward propagation
+        out = self.numpy_forward(inputs)
+
+        # softmax cross entropy loss
+        exp_out = np.exp(out - np.max(out, axis=-1, keepdims=True))
+        self.softmax = exp_out / np.sum(exp_out, axis=-1, keepdims=True)
+        loss = np.sum(y * np.log(self.softmax)) / -self.softmax.shape[0]
+
+        # optimize
+        # calculate gradients
+        label_sum = np.sum(self.label, axis=-1)
+        dloss = self.softmax - self.label / label_sum.reshape(
+            label_sum.shape[0], 1)
+        dloss /= self.softmax.shape[0]
+
+        dx5 = dloss
+        db1 = np.sum(dloss, 0)
+
+        dx4 = np.matmul(dx5, self.W1.T)
+        dw1 = np.matmul(self.x3.T, dx5)
+
+        dx3 = dx4 * (self.x3 > 0)
+
+        dx2 = dx3
+        db0 = np.sum(dx3, 0)
+
+        dx1 = np.matmul(dx2, self.W0.T)
+        dw0 = np.matmul(self.data.T, dx2)
+
+        # update all the params
+        self.W0 -= 0.05 * dw0
+        self.B0 -= 0.05 * db0
+        self.W1 -= 0.05 * dw1
+        self.B1 -= 0.05 * db1
+        return out, loss
+
+    def setUp(self):
+        self.sgd = opt.SGD(lr=0.05)
+
+        cpu_dev.ResetGraph()
+        if singa_api.USE_CUDA:
+            gpu_dev.ResetGraph()
+
+    def tearDown(self):
+        cpu_dev.ResetGraph()
+        if singa_api.USE_CUDA:
+            gpu_dev.ResetGraph()
+
+    def _forward_helper(self, dev, is_train, use_graph, sequential):
+        self.generate_data(dev)
+        model = MLP(self.sgd)
+        model.compile([self.inputs],
+                      is_train=is_train,
+                      use_graph=use_graph,
+                      sequential=sequential)
+
+        self.get_params(model)
+
+        out = model(self.inputs)
+        np_out = self.numpy_forward(self.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(out), np_out)
+
+    def _train_one_batch_helper(self, dev, is_train, use_graph, sequential):
+        self.generate_data(dev)
+        model = MLP(num_classes=2)
+        model.set_optimizer(self.sgd)
+        model.compile([self.inputs],
+                      is_train=is_train,
+                      use_graph=use_graph,
+                      sequential=sequential)
+
+        self.get_params(model)
+
+        out, loss = model(self.inputs, self.target)
+        np_out, np_loss = self.numpy_train_one_batch(self.data, self.label)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(out), np_out)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(loss), np_loss)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(self.w0), self.W0)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(self.b0), self.B0)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(self.w1), self.W1)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(self.b1), self.B1)
+
+    def test_forward_cpu(self):
+        self._forward_helper(cpu_dev, False, True, False)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_forward_gpu(self):
+        self._forward_helper(gpu_dev, False, True, False)
+
+    def test_evaluate_cpu(self):
+        self._forward_helper(cpu_dev, False, False, False)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_evaluate_gpu(self):
+        self._forward_helper(gpu_dev, False, False, False)
+
+    def test_train_one_batch_cpu(self):
+        self._train_one_batch_helper(cpu_dev, True, True, False)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_train_one_batch_gpu(self):
+        self._train_one_batch_helper(gpu_dev, True, True, False)
+
+    def test_without_graph_cpu(self):
+        self._train_one_batch_helper(cpu_dev, True, False, False)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_without_graph_gpu(self):
+        self._train_one_batch_helper(gpu_dev, True, False, False)
+
+    def test_run_in_serial_cpu(self):
+        self._train_one_batch_helper(cpu_dev, True, True, True)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_run_in_serial_gpu(self):
+        self._train_one_batch_helper(gpu_dev, True, True, True)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_net.py b/test/python/test_net.py
deleted file mode 100644
index b19d868..0000000
--- a/test/python/test_net.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import unittest
-import math
-import numpy as np
-
-from singa import net
-from singa import layer
-from singa import tensor
-from singa import loss
-
-layer.engine = 'singacpp'
-# net.verbose = True
-
-
-class TestFeedForwardNet(unittest.TestCase):
-
-    def test_single_input_output(self):
-        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
-        ffn.add(layer.Activation('relu1', input_sample_shape=(2,)))
-        ffn.add(layer.Activation('relu2'))
-        x = np.array([[-1, 1], [1, 1], [-1, -2]], dtype=np.float32)
-        x = tensor.from_numpy(x)
-        y = tensor.Tensor((3,))
-        y.set_value(0)
-        out, _ = ffn.evaluate(x, y)
-        self.assertAlmostEqual(out * 3,
-                               - math.log(1.0/(1+math.exp(1))) -
-                               math.log(0.5) - math.log(0.5),
-                               5)
-
-    def test_mult_inputs(self):
-        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
-        s1 = ffn.add(layer.Activation('relu1', input_sample_shape=(2,)), [])
-        s2 = ffn.add(layer.Activation('relu2', input_sample_shape=(2,)), [])
-        ffn.add(layer.Merge('merge', input_sample_shape=(2,)), [s1, s2])
-        x1 = tensor.Tensor((2, 2))
-        x1.set_value(1.1)
-        x2 = tensor.Tensor((2, 2))
-        x2.set_value(0.9)
-        out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
-        out = tensor.to_numpy(out)
-        self.assertAlmostEqual(np.average(out), 2)
-
-    def test_mult_outputs(self):
-        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
-        s1 = ffn.add(layer.Activation('relu1', input_sample_shape=(2,)), [])
-        s2 = ffn.add(layer.Activation('relu2', input_sample_shape=(2,)), [])
-        ffn.add(layer.Merge('merge', input_sample_shape=(2,)), [s1, s2])
-        split = ffn.add(layer.Split('split', 2))
-        ffn.add(layer.Dummy('split1'), split)
-        ffn.add(layer.Dummy('split2'), split)
-        x1 = tensor.Tensor((2, 2))
-        x1.set_value(1.1)
-        x2 = tensor.Tensor((2, 2))
-        x2.set_value(0.9)
-        out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
-        out = tensor.to_numpy(out['split1'])
-        self.assertAlmostEqual(np.average(out), 2)
-
-    def test_save_load(self):
-        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
-        ffn.add(layer.Conv2D('conv', 4, 3, input_sample_shape=(3, 12, 12)))
-        ffn.add(layer.Flatten('flat'))
-        # ffn.add(layer.BatchNorm('bn'))
-        ffn.add(layer.Dense('dense', num_output=4))
-        for pname, pval in zip(ffn.param_names(), ffn.param_values()):
-            pval.set_value(0.1)
-        ffn.save('test_snaphost')
-        ffn.save('test_pickle', use_pickle=True)
-
-        ffn.load('test_snaphost')
-        ffn.load('test_pickle', use_pickle=True)
-
-    def test_train_one_batch(self):
-        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
-        ffn.add(layer.Conv2D('conv', 4, 3, input_sample_shape=(3, 12, 12)))
-        ffn.add(layer.Flatten('flat'))
-        ffn.add(layer.Dense('dense', num_output=4))
-        for pname, pval in zip(ffn.param_names(), ffn.param_values()):
-            pval.set_value(0.1)
-        x = tensor.Tensor((4, 3, 12, 12))
-        x.gaussian(0, 0.01)
-        y = np.asarray([[1, 0, 0],
-                        [0, 0, 1],
-                        [0, 0, 1],
-                        [0, 1, 0]], dtype=np.int32)
-        y = tensor.from_numpy(y)
-        o = ffn.forward(True, x)
-        ffn.loss.forward(True, o, y)
-        g = ffn.loss.backward()
-        for pname, pvalue, pgrad, _ in ffn.backward(g):
-            self.assertEqual(len(pvalue), len(pgrad))
-            for p, g in zip(pvalue, pgrad):
-                self.assertEqual(p.size(), g.size())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/python/test_onnx.py b/test/python/test_onnx.py
new file mode 100644
index 0000000..504fbcf
--- /dev/null
+++ b/test/python/test_onnx.py
@@ -0,0 +1,2127 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import unittest
+from builtins import str
+
+from singa import singa_wrap as singa_api
+from singa import tensor
+from singa import singa_wrap as singa
+from singa import autograd
+from singa import layer
+from singa import sonnx
+from singa import opt
+
+import onnx
+from onnx import (defs, checker, helper, numpy_helper, mapping, ModelProto,
+                  GraphProto, NodeProto, AttributeProto, TensorProto,
+                  OperatorSetIdProto)
+from onnx.helper import make_tensor, make_tensor_value_info, make_node, make_graph
+
+from cuda_helper import gpu_dev, cpu_dev
+
+import numpy as np
+
+autograd.training = True
+
+
+def _tuple_to_string(t):
+    lt = [str(x) for x in t]
+    return '(' + ', '.join(lt) + ')'
+
+
+class TestPythonOnnx(unittest.TestCase):
+
+    def check_shape(self, actual, expect):
+        self.assertEqual(
+            actual, expect, 'shape mismatch, actual shape is %s'
+            ' exepcted is %s' %
+            (_tuple_to_string(actual), _tuple_to_string(expect)))
+
+    def _conv2d_helper(self, dev):
+        x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        x.gaussian(0.0, 1.0)
+        y = layer.Conv2d(1, 2)(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_conv2d_cpu(self):
+        self._conv2d_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_conv2d_gpu(self):
+        self._conv2d_helper(gpu_dev)
+
+    def _relu_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.array([0.8, 0, 3.3, 0, 0, 0.5]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.ReLU()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_relu_cpu(self):
+        self._relu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_relu_gpu(self):
+        self._relu_helper(gpu_dev)
+
+    def _avg_pool_helper(self, dev):
+        x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        x.gaussian(0.0, 1.0)
+        y = layer.AvgPool2d(3, 1, 2)(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_avg_pool_cpu(self):
+        self._avg_pool_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_avg_pool_gpu(self):
+        self._avg_pool_helper(gpu_dev)
+
+    def _softmax_helper(self, dev):
+        X = np.array([[-1, 0, 1]]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.SoftMax()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_softmax_cpu(self):
+        self._softmax_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_softmax_gpu(self):
+        self._softmax_helper(gpu_dev)
+
+    def _sigmoid_helper(self, dev):
+        X = np.array([[-1, 0, 1]]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.Sigmoid()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_sigmoid_cpu(self):
+        self._sigmoid_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_sigmoid_gpu(self):
+        self._sigmoid_helper(gpu_dev)
+
+    def _add_helper(self, dev):
+        X1 = np.random.randn(3, 4, 5).astype(np.float32)
+        X2 = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x1 = tensor.from_numpy(X1)
+        x2 = tensor.from_numpy(X2)
+        x1.to_device(dev)
+        x2.to_device(dev)
+        y = autograd.Add()(x1, x2)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x1, x2], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x1, x2])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_add_cpu(self):
+        self._add_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_add_gpu(self):
+        self._add_helper(gpu_dev)
+
+    def _concat_helper(self, dev):
+        X1 = np.random.randn(3, 4, 5).astype(np.float32)
+        X2 = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x1 = tensor.from_numpy(X1)
+        x2 = tensor.from_numpy(X2)
+        x1.to_device(dev)
+        x2.to_device(dev)
+        y = autograd.Concat()(x1, x2)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x1, x2], [y])
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x1, x2])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_concat_cpu(self):
+        self._concat_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_concat_gpu(self):
+        self._concat_helper(gpu_dev)
+
+    def _matmul_helper(self, dev):
+        X1 = np.random.randn(4, 5).astype(np.float32)
+        X2 = np.random.randn(5, 4).astype(np.float32)
+
+        x1 = tensor.from_numpy(X1)
+        x2 = tensor.from_numpy(X2)
+        x1.to_device(dev)
+        x2.to_device(dev)
+
+        y = autograd.Matmul()(x1, x2)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x1, x2], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x1, x2])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_matmul_cpu(self):
+        self._matmul_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_matmul_gpu(self):
+        self._matmul_helper(gpu_dev)
+
+    def _max_pool_helper(self, dev):
+        x = tensor.Tensor(shape=(2, 3, 4, 4), device=dev)
+        x.gaussian(0.0, 1.0)
+        y = layer.MaxPool2d(2, 2, 0)(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_max_pool_cpu(self):
+        self._max_pool_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_max_pool_gpu(self):
+        self._max_pool_helper(gpu_dev)
+
+    def _batch_norm_helper(self, dev):
+        x = np.array([[[[-1, 0, 1]], [[2, 3, 4]]]]).astype(np.float32)
+        s = np.array([1.0, 1.5]).astype(np.float32)
+        bias = np.array([0, 1]).astype(np.float32)
+        mean = np.array([0, 3]).astype(np.float32)
+        var = np.array([1, 1.5]).astype(np.float32)
+
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+        s = tensor.from_numpy(s)
+        s.to_device(dev)
+
+        bias = tensor.from_numpy(bias)
+        mean = tensor.from_numpy(mean)
+        var = tensor.from_numpy(var)
+
+        bias.to_device(dev)
+        mean.to_device(dev)
+        var.to_device(dev)
+        if dev == cpu_dev:
+            handle = singa.BatchNormHandle(0.9, x.data)
+        else:
+            handle = singa.CudnnBatchNormHandle(0.9, x.data)
+        y = autograd.batchnorm_2d(handle, x, s, bias, mean, var)
+
+        # frontend
+        model = sonnx.to_onnx([x, s, bias, mean, var], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x, s, bias])  # mean and var has been stored in graph
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_batch_norm_cpu(self):
+        self._batch_norm_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_batch_norm_gpu(self):
+        self._batch_norm_helper(gpu_dev)
+
+    def _linear_helper(self, dev):
+        x = tensor.Tensor(shape=(2, 20), device=dev)
+        x.gaussian(0.0, 1.0)
+        x1 = x.clone()
+        y = layer.Linear(20, 1, bias=False)(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_linear_cpu(self):
+        self._linear_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_linear_gpu(self):
+        self._linear_helper(gpu_dev)
+
+    def _gemm_helper(self, dev):
+        A = np.random.randn(2, 3).astype(np.float32)
+        B = np.random.rand(3, 4).astype(np.float32)
+        C = np.random.rand(2, 4).astype(np.float32)
+        alpha = 1.0
+        beta = 2.0
+
+        tA = tensor.from_numpy(A)
+        tB = tensor.from_numpy(B)
+        tC = tensor.from_numpy(C)
+        tA.to_device(dev)
+        tB.to_device(dev)
+        tC.to_device(dev)
+        y = autograd.Gemm(alpha, beta, 0, 0)(tA, tB, tC)[0]
+
+        # frontend
+        model = sonnx.to_onnx([tA, tB, tC], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([tA, tB, tC])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_gemm_cpu(self):
+        self._gemm_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_gemm_gpu(self):
+        self._gemm_helper(gpu_dev)
+
+    def _reshape_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+        y = autograd.Reshape((2, 3))(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x, (2, 3)], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])  # shape has been stored in graph
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_reshape_cpu(self):
+        self._reshape_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_reshape_gpu(self):
+        self._reshape_helper(gpu_dev)
+
+    def _sum_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x1 = tensor.from_numpy(x1)
+        y = autograd.Sum()(x, x1)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_sum_cpu(self):
+        self._sum_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_sum_gpu(self):
+        self._sum_helper(gpu_dev)
+
+    def _Cos_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Cos()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Cos_cpu(self):
+        self._Cos_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Cos_gpu(self):
+        self._Cos_helper(gpu_dev)
+
+    def _Cosh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Cosh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Cosh_cpu(self):
+        self._Cosh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Cosh_gpu(self):
+        self._Cosh_helper(gpu_dev)
+
+    def _Sin_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Sin()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Sin_cpu(self):
+        self._Sin_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Sin_gpu(self):
+        self._Sin_helper(gpu_dev)
+
+    def _Sinh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Sinh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Sinh_cpu(self):
+        self._Sinh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Sinh_gpu(self):
+        self._Sinh_helper(gpu_dev)
+
+    def _Tan_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Tan()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Tan_cpu(self):
+        self._Tan_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Tan_gpu(self):
+        self._Tan_helper(gpu_dev)
+
+    def _Tanh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Tanh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Tanh_cpu(self):
+        self._Tanh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Tanh_gpu(self):
+        self._Tanh_helper(gpu_dev)
+
+    def _Acos_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Acos()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Acos_cpu(self):
+        self._Acos_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Acos_gpu(self):
+        self._Acos_helper(gpu_dev)
+
+    def _Acosh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Acosh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Acosh_cpu(self):
+        self._Acosh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Acosh_gpu(self):
+        self._Acosh_helper(gpu_dev)
+
+    def _Asin_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Asin()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Asin_cpu(self):
+        self._Asin_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Asin_gpu(self):
+        self._Asin_helper(gpu_dev)
+
+    def _Asinh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Asinh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Asinh_cpu(self):
+        self._Asinh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Asinh_gpu(self):
+        self._Asinh_helper(gpu_dev)
+
+    def _Atan_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Atan()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Atan_cpu(self):
+        self._Atan_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Atan_gpu(self):
+        self._Atan_helper(gpu_dev)
+
+    def _Atanh_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.Atanh()(x)[0]
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Atanh_cpu(self):
+        self._Atanh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Atanh_gpu(self):
+        self._Atanh_helper(gpu_dev)
+
+    def _SeLu_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        #y = gamma * (alpha * e^x - alpha) for x <= 0, y = gamma * x for x > 0
+        a = 1.67326
+        g = 1.0507
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.selu(x, a, g)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_SeLu_cpu(self):
+        self._SeLu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_SeLu_gpu(self):
+        self._SeLu_helper(gpu_dev)
+
+    def _ELu_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        #y = gamma * (alpha * e^x - alpha) for x <= 0, y = gamma * x for x > 0
+        a = 1.
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.elu(x, a)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_ELu_cpu(self):
+        self._ELu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_ELu_gpu(self):
+        self._ELu_helper(gpu_dev)
+
+    # No Op registered for equal with domain_version of 11
+    # def _Equal_helper(self, dev):
+    #     x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+    #                    0.9]).reshape(3, 2).astype(np.float32)
+    #     x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+    #                                                      2).astype(np.float32)
+    #     x0 = tensor.from_numpy(x0)
+    #     x1 = tensor.from_numpy(x1)
+    #     x0.to_device(dev)
+    #     x1.to_device(dev)
+
+    #     y = autograd.equal(x0, x1)
+
+    #     # frontend
+    #     model = sonnx.to_onnx([x0, x1], [y])
+    #     # print('The model is:\n{}'.format(model))
+
+    #     # backend
+    #     sg_ir = sonnx.prepare(model, device=dev)
+    #     sg_ir.is_graph = True
+    #     y_t = sg_ir.run([x0, x1])
+
+    #     np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+    #                                          tensor.to_numpy(y_t[0]),
+    #                                          decimal=5)
+
+    # def test_Equal_cpu(self):
+    #     self._Equal_helper(cpu_dev)
+
+    # @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    # def test_Equal_gpu(self):
+    #     self._Equal_helper(gpu_dev)
+
+    def _Less_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.less(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Less_cpu(self):
+        self._Less_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Less_gpu(self):
+        self._Less_helper(gpu_dev)
+
+
+    def _Sign_helper(self, dev):
+        x = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        y = autograd.sign(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Sign_cpu(self):
+        self._Sign_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Sign_gpu(self):
+        self._Sign_helper(gpu_dev)
+
+    def _Div_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.div(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Div_cpu(self):
+        self._Div_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Div_gpu(self):
+        self._Div_helper(gpu_dev)
+
+    def _Sub_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.sub(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Sub_cpu(self):
+        self._Sub_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Sub_gpu(self):
+        self._Sub_helper(gpu_dev)
+
+    def _Sqrt_helper(self, dev):
+        X = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.sqrt(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev, init_inputs=X)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Sqrt_cpu(self):
+        self._Sqrt_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Sqrt_gpu(self):
+        self._Sqrt_helper(gpu_dev)
+
+    def _Greater_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(cpu_dev)
+        x1.to_device(cpu_dev)
+
+        y = autograd.greater(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_Greater_cpu(self):
+        self._Greater_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_Greater_gpu(self):
+        self._Greater_helper(gpu_dev)
+
+    def _HardSigmoid_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        a = 0.2
+        g = 0.5
+
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+        y = autograd.hardsigmoid(x, a, g)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_HardSigmoid_cpu(self):
+        self._HardSigmoid_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_HardSigmoid_gpu(self):
+        self._HardSigmoid_helper(gpu_dev)
+
+    def _identity_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.identity(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_identity_cpu(self):
+        self._identity_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_identity_gpu(self):
+        self._identity_helper(gpu_dev)
+
+    def _softplus_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.softplus(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_softplus_cpu(self):
+        self._softplus_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_softplus_gpu(self):
+        self._softplus_helper(gpu_dev)
+
+    def _softsign_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.softsign(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_softsign_cpu(self):
+        self._softsign_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_softsign_gpu(self):
+        self._softsign_helper(gpu_dev)
+
+    def _mean_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.mean(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_mean_cpu(self):
+        self._mean_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_mean_gpu(self):
+        self._mean_helper(gpu_dev)
+
+    def _pow_helper(self, dev):
+        x0 = np.array([7, 5, 0.2, 0.1, 0.3, 4]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([-1.0, 2.0, -1.0, -2.1, 1.0,
+                       -2.0]).reshape(3, 2).astype(np.float32)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.mean(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_pow_cpu(self):
+        self._pow_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_pow_gpu(self):
+        self._pow_helper(gpu_dev)
+
+    def _clip_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+
+        x = tensor.from_numpy(x)
+        min = -0.5
+        max = 0.5
+        x.to_device(dev)
+
+        y = autograd.clip(x, min, max)
+
+        # frontend
+        model = sonnx.to_onnx([x, min, max], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])  # min, max has been stored in model
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_clip_cpu(self):
+        self._clip_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_clip_gpu(self):
+        self._clip_helper(gpu_dev)
+
+    def _prelu_helper(self, dev):
+        x = np.array([0.1, -1.0, -0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        slope = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                          9.0]).reshape(3, 2).astype(np.float32)
+
+        x = tensor.from_numpy(x)
+        slope = tensor.from_numpy(slope)
+        x.to_device(dev)
+        slope.to_device(dev)
+
+        y = autograd.prelu(x, slope)
+
+        # frontend
+        model = sonnx.to_onnx([x, slope], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x, slope])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_prelu_cpu(self):
+        self._prelu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_prelu_gpu(self):
+        self._prelu_helper(gpu_dev)
+
+    def _mul_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x1 = tensor.from_numpy(x1)
+        x.to_device(dev)
+        x1.to_device(dev)
+        y = autograd.mul(x, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_mul_cpu(self):
+        self._mul_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_mul_gpu(self):
+        self._mul_helper(gpu_dev)
+
+    def _transpose_helper(self, dev):
+        x = np.random.randn(3, 2, 1)
+        y = x.transpose(1, 2, 0)
+
+        x = tensor.from_numpy(x)
+        x.to_device(cpu_dev)
+
+        y = autograd.transpose(x, (1, 2, 0))
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_transpose_cpu(self):
+        self._transpose_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_transpose_gpu(self):
+        self._transpose_helper(gpu_dev)
+
+    def _max_helper(self, dev):
+        X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
+                       0.2]).reshape(3, 2).astype(np.float32)
+        X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
+                       2.0]).reshape(3, 2).astype(np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.max(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_max_cpu(self):
+        self._max_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_max_gpu(self):
+        self._max_helper(gpu_dev)
+
+    def _min_helper(self, dev):
+        X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
+                       0.2]).reshape(3, 2).astype(np.float32)
+        X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
+                       2.0]).reshape(3, 2).astype(np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd.min(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_min_cpu(self):
+        self._min_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_min_gpu(self):
+        self._min_helper(gpu_dev)
+
+    def _shape_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd.shape(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_shape_cpu(self):
+        self._shape_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_shape_gpu(self):
+        self._shape_helper(gpu_dev)
+
+    def _and_helper(self, dev):
+        x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0.5, 0.9]).reshape(3,
+                                                           2).astype(np.float32)
+
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd._and(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_and_cpu(self):
+        self._and_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_and_gpu(self):
+        self._and_helper(gpu_dev)
+
+    def _or_helper(self, dev):
+        x0 = np.array([1.0, 1.0, 2.0, -3.0, 0,
+                       -7.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([-1.0, 0, 2.0, 4.0, 0,
+                       -7.0]).reshape(3, 2).astype(np.float32)
+
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd._or(x0, x1)
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_or_cpu(self):
+        self._or_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_or_gpu(self):
+        self._or_helper(gpu_dev)
+
+    def _xor_helper(self, dev):
+        x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        y = autograd._xor(x0, x1)
+
+        # frontend
+        model = sonnx.to_onnx([x0, x1], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x0, x1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_xor_cpu(self):
+        self._xor_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_xor_gpu(self):
+        self._xor_helper(gpu_dev)
+
+    def _not_helper(self, dev):
+        x = np.array([1.0, -1.0, 0, -0.1, 0,
+                      -7.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        y = autograd._not(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_not_cpu(self):
+        self._not_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_not_gpu(self):
+        self._not_helper(gpu_dev)
+
+    def _negative_helper(self, dev):
+        X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
+                      -2.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        y = autograd.negative(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_negative_cpu(self):
+        self._negative_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_negative_gpu(self):
+        self._negative_helper(gpu_dev)
+
+    def _reciprocal_helper(self, dev):
+        X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
+                      -2.0]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(cpu_dev)
+
+        y = autograd.reciprocal(x)
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_reciprocal_cpu(self):
+        self._reciprocal_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_reciprocal_gpu(self):
+        self._reciprocal_helper(gpu_dev)
+
+    def _constantOfShape_helper(self, dev):
+        X = np.array([4, 3, 2]).astype(np.int64)
+        x = tensor.from_numpy(X)
+        x.to_device(cpu_dev)
+
+        y = autograd.constant_of_shape(x, 1.)
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev, init_inputs=[X])
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_constantOfShape_cpu(self):
+        self._constantOfShape_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_constantOfShape_gpu(self):
+        self._constantOfShape_helper(gpu_dev)
+
+    def _dropout_helper(self, dev):
+        X = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.dropout(x, 0.5)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        self.check_shape(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_dropout_cpu(self):
+        self._dropout_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_dropout_gpu(self):
+        self._dropout_helper(gpu_dev)
+
+    def _reduceSum_helper(self, dev):
+        X = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.reduce_sum(x, None, 1)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_reduceSum_cpu(self):
+        self._reduceSum_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_reduceSum_gpu(self):
+        self._reduceSum_helper(gpu_dev)
+
+    def _reduceMean_helper(self, dev):
+        X = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.reduce_mean(x, None, 1)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_reduceMean_cpu(self):
+        self._reduceMean_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_reduceMean_gpu(self):
+        self._reduceMean_helper(gpu_dev)
+
+    def _squeeze_helper(self, dev):
+        X = np.random.randn(3, 1, 2, 1, 1)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.squeeze(x, [1, 3, 4])
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_squeeze_cpu(self):
+        self._squeeze_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_squeeze_gpu(self):
+        self._squeeze_helper(gpu_dev)
+
+    def _unsqueeze_helper(self, dev):
+        X = np.random.randn(3, 2)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.unsqueeze(x, [2, 4, 5])
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_unsqueeze_cpu(self):
+        self._unsqueeze_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_unsqueeze_gpu(self):
+        self._unsqueeze_helper(gpu_dev)
+
+    def _slice_helper(self, dev):
+        X = np.random.randn(20, 10, 5).astype(np.float32)
+        starts, ends, axes, steps = [0, 0], [3, 10], [0, 1], [1, 1]
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.slice(x, starts, ends, axes, steps)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_slice_cpu(self):
+        self._slice_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_slice_gpu(self):
+        self._slice_helper(gpu_dev)
+
+    # # todo, we don't support muli outputs
+    # def _split_helper(self, dev):
+    #       X = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float32)
+    #       x = tensor.from_numpy(X)
+    #       x.to_device(dev)
+    #       y = autograd.split(x, 0, (2, 4))
+
+    #       # frontend
+    #       model = sonnx.to_onnx([x], [*y])
+    #       # print('The model is:\n{}'.format(model))
+
+    #       # backend
+    #       sg_ir = sonnx.prepare(model, device=dev)
+    #       sg_ir.is_graph = True
+    #       y_t = sg_ir.run([x])[0]
+
+    #       np.testing.assert_array_almost_equal(tensor.to_numpy(y).shape, tensor.to_numpy(y_t).shape)
+
+    # def test_split_cpu(self):
+    #     self._split_helper(cpu_dev)
+
+    # @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    # def test_split_gpu(self):
+    #     self._split_helper(gpu_dev)
+
+    def _gather_helper(self, dev):
+        X = np.array([0, 1, 2]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.gather(x, 0, [0, 1, 3])
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_gather_cpu(self):
+        self._gather_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_gather_gpu(self):
+        self._gather_helper(gpu_dev)
+
+    def _tile_helper(self, dev):
+        X = np.array([0, 1, 2]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.tile(x, [2, 2])
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_tile_cpu(self):
+        self._tile_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_tile_gpu(self):
+        self._tile_helper(gpu_dev)
+
+    def _nonzero_helper(self, dev):
+        X = np.array([[1, 0], [1, 1]]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.nonzero(x)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_nonzero_cpu(self):
+        self._nonzero_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_nonzero_gpu(self):
+        self._nonzero_helper(gpu_dev)
+
+    def _cast_helper(self, dev):
+        X = np.array([[1, 0], [1, 1]]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        y = autograd.cast(x, tensor.int32)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_cast_cpu(self):
+        self._cast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_cast_gpu(self):
+        self._cast_helper(gpu_dev)
+
+    def _onehot_helper(self, dev):
+        axisValue = 1
+        on_value = 3
+        off_value = 1
+        output_type = np.float32
+        indices = np.array([[1, 9], [2, 4]], dtype=np.float32)
+        depth = np.array([10], dtype=np.float32)
+        values = np.array([off_value, on_value], dtype=output_type)
+
+        x = tensor.from_numpy(indices)
+        x.to_device(dev)
+        y = autograd.onehot(axisValue, x, depth, values)
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x])
+
+        self.check_shape(
+            tensor.to_numpy(y).shape,
+            tensor.to_numpy(y_t[0]).shape)
+
+    def test_onehot_cpu(self):
+        self._onehot_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_onehot_gpu(self):
+        self._onehot_helper(gpu_dev)
+
+    def _inference_helper(self, dev):
+        x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        x.gaussian(0.0, 1.0)
+
+        conv1 = layer.Conv2d(1, 2)
+        conv2 = layer.Conv2d(1, 2)
+
+        class MyLayer(layer.Layer):
+
+            def __init__(self, conv1, conv2):
+                super(MyLayer, self).__init__()
+                self.conv1 = conv1
+                self.conv2 = conv2
+
+            def forward(self, inputs):
+                x = self.conv1(inputs)
+                x = self.conv2(x)
+                return x
+
+        y = MyLayer(conv1, conv2)(x)
+        x1 = conv1(x)
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        y_t = sg_ir.run([x], last_layers=-1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(x1),
+                                             tensor.to_numpy(y_t[0]),
+                                             decimal=5)
+
+    def test_inference_cpu(self):
+        self._inference_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_inference_gpu(self):
+        self._inference_helper(gpu_dev)
+
+    def _retraining_helper(self, dev):
+        # forward
+        x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        x.gaussian(0.0, 1.0)
+
+        class MyLayer(layer.Layer):
+
+            def __init__(self):
+                super(MyLayer, self).__init__()
+                self.conv1 = layer.Conv2d(1, 2)
+                self.conv2 = layer.Conv2d(1, 2)
+
+            def forward(self, inputs):
+                x = self.conv1(inputs)
+                x = self.conv2(x)
+                x = autograd.flatten(x)
+                return x
+
+        y = MyLayer()(x)
+        y_t = tensor.Tensor(shape=(2, 1), device=dev)
+        y_t.gaussian(0.0, 1.0)
+        loss = autograd.MeanSquareError(y_t)(y)[0]
+        # backward
+        sgd = opt.SGD(lr=0.01)
+        for p, gp in autograd.backward(loss):
+            sgd.apply(p.name, p, gp)
+        sgd.step()
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        # forward
+        y_o = sg_ir.run([x])[0]
+        # backward
+        loss = autograd.MeanSquareError(y_t)(y_o)[0]
+        sgd = opt.SGD(lr=0.01)
+        for p, gp in autograd.backward(loss):
+            sgd.apply(p.name, p, gp)
+        sgd.step()
+
+    def test_retraining_cpu(self):
+        self._retraining_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_retraining_gpu(self):
+        self._retraining_helper(gpu_dev)
+
+    def _transfer_learning_helper(self, dev):
+        # forward
+        x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        x.gaussian(0.0, 1.0)
+
+        class MyLayer(layer.Layer):
+
+            def __init__(self):
+                super(MyLayer, self).__init__()
+                self.conv1 = layer.Conv2d(1, 2)
+
+            def forward(self, inputs):
+                x = self.conv1(inputs)
+                x = autograd.flatten(x)
+                return x
+
+        y = MyLayer()(x)
+        y_t = tensor.Tensor(shape=(2, 4), device=dev)
+        y_t.gaussian(0.0, 1.0)
+        loss = autograd.MeanSquareError(y_t)(y)[0]
+        # backward
+        sgd = opt.SGD(lr=0.01)
+        for p, gp in autograd.backward(loss):
+            sgd.apply(p.name, p, gp)
+        sgd.step()
+
+        # frontend
+        model = sonnx.to_onnx([x], [y])
+        # print('The model is:\n{}'.format(model))
+
+        # backend
+        sg_ir = sonnx.prepare(model, device=dev)
+        sg_ir.is_graph = True
+        # forward
+        class MyLayer2(layer.Layer):
+
+            def __init__(self, sg_ir):
+                super(MyLayer2, self).__init__()
+                self.sg_ir = sg_ir
+                for node, operator in self.sg_ir.layers:
+                    self.__dict__[node.name] = operator
+                self.conv2 = layer.Conv2d(1, 2)
+
+            def forward(self, inputs):
+                x = self.sg_ir.run(inputs, last_layers=-1)[0]
+                x = self.conv2(inputs)
+                x = autograd.flatten(x)
+                return x
+
+        y_o = MyLayer()(x)
+        # backward
+        y_ot = tensor.Tensor(shape=(2, 1), device=dev)
+        y_ot.gaussian(0.0, 1.0)
+        loss = autograd.MeanSquareError(y_ot)(y_o)[0]
+        sgd = opt.SGD(lr=0.01)
+        for p, gp in autograd.backward(loss):
+            sgd.apply(p.name, p, gp)
+        sgd.step()
+
+    def test_transfer_learning_cpu(self):
+        self._transfer_learning_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_transfer_learning_gpu(self):
+        self._transfer_learning_helper(gpu_dev)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_onnx_backend.py b/test/python/test_onnx_backend.py
new file mode 100644
index 0000000..0e7bb65
--- /dev/null
+++ b/test/python/test_onnx_backend.py
@@ -0,0 +1,131 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from singa import tensor
+from singa import singa_wrap as singa
+from singa import autograd
+from singa import sonnx
+from singa import opt
+
+import os
+
+import unittest
+import onnx.backend.test
+
+# This is a pytest magic variable to load extra plugins
+pytest_plugins = 'onnx.backend.test.report',
+
+backend_test = onnx.backend.test.BackendTest(sonnx.SingaBackend, __name__)
+
+_include_nodes_patterns = {
+    # rename some patterns
+    'ReduceSum': r'(test_reduce_sum)',
+    'ReduceMean': r'(test_reduce_mean)',
+    'BatchNormalization': r'(test_batchnorm)',
+    'ScatterElements': r'(test_scatter_elements)',
+    'Conv': r'(test_basic_conv_|test_conv_with_|test_Conv2d)',
+    'MaxPool': r'(test_maxpool_2d)',
+    'AveragePool': r'(test_averagepool_2d)',
+}
+
+_exclude_nodes_patterns = [
+    # not support data type
+    r'(uint)',  # does not support uint
+    r'(scalar)',  # does not support scalar
+    r'(STRING)',  # does not support string
+    # not support some features
+    r'(test_split_zero_size_splits|test_slice_start_out_of_bounds)',  # not support empty tensor
+    r'(test_batchnorm_epsilon)',  # does not support epsilon
+    r'(dilations)',  # does not support dilations
+    r'(test_maxpool_2d_ceil|test_averagepool_2d_ceil)',  # does not ceil for max or avg pool
+    r'(count_include_pad)',  # pool not support count_include_pad
+    # interrupt some include patterns
+    r'(test_matmulinteger)',  # interrupt matmulinteger
+    r'(test_less_equal)',  # interrupt les
+    r'(test_greater_equal)',  # interrupt greater
+    r'(test_negative_log)',  # interrupt negative
+    r'(test_softmax_cross_entropy)',  # interrupt softmax
+    r'(test_reduce_sum_square)',  # interrupt reduce sum squre
+    r'(test_log_softmax)',  # interrupt log softmax
+    r'(test_maxunpool)',  # interrupt max unpool
+    r'(test_gather_elements)',  # interrupt gather elements
+    r'(test_logsoftmax)',  # interrupt log softmax
+    r'(test_gathernd)',  # interrupt gather nd
+    r'(test_maxpool_with_argmax)', # interrupt maxpool_with_argmax
+    # todo, some special error
+    r'test_transpose',  # the test cases are wrong
+    r'test_conv_with_strides_and_asymmetric_padding', # the test cases are wrong
+    r'(test_gemm_default_single_elem_vector_bias_cuda)',  # status == CURAND_STATUS_SUCCESS
+    r'(test_equal_bcast_cuda|test_equal_cuda)',  # Unknown combination of data type kInt and language kCuda
+    r'(test_maxpool_1d|test_averagepool_1d|test_maxpool_3d|test_averagepool_3d)',  # Check failed: idx < shape_.size() (3 vs. 3)
+    r'test_depthtospace.*cuda', # cuda cannot support transpose with more than 4 dims
+]
+
+_include_real_patterns = []  # todo
+
+_include_simple_patterns = []  # todo
+
+_include_pytorch_converted_patterns = []  # todo
+
+_include_pytorch_operator_patterns = []  # todo
+
+# add supported operators into include patterns
+for name in sonnx.SingaBackend._rename_operators.keys():
+    if name not in _include_nodes_patterns:
+        backend_test.include(r'(test_{})'.format(name.lower()))
+    else:
+        # todo, need to fix the conv2d
+        if name == 'Conv':
+            continue
+        backend_test.include(_include_nodes_patterns[name])
+
+# exclude the unsupported operators
+for pattern in _exclude_nodes_patterns:
+    backend_test.exclude(pattern)
+
+# exclude the cuda cases
+if not singa.USE_CUDA:
+    backend_test.exclude(r'(cuda)')
+
+OnnxBackendNodeModelTest = backend_test.enable_report().test_cases['OnnxBackendNodeModelTest']
+
+# disable and enable training before and after test cases
+def setUp(self):
+    # print("\nIn method", self._testMethodName)
+    autograd.training = False
+
+def tearDown(self):
+    autograd.training = True
+
+OnnxBackendNodeModelTest.setUp = setUp
+OnnxBackendNodeModelTest.tearDown = tearDown
+
+# import all test cases at global scope to make them visible to python.unittest
+# print(backend_test.enable_report().test_cases)
+test_cases = {
+    'OnnxBackendNodeModelTest': OnnxBackendNodeModelTest
+}
+
+globals().update(test_cases)
+
+if __name__ == '__main__':
+    unittest.main()
\ No newline at end of file
diff --git a/test/python/test_operation.py b/test/python/test_operation.py
new file mode 100755
index 0000000..54d2513
--- /dev/null
+++ b/test/python/test_operation.py
@@ -0,0 +1,3656 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# =============================================================================
+
+import unittest
+from builtins import str
+
+from singa import tensor
+from singa import singa_wrap as singa
+from singa import autograd
+from singa import layer
+from singa import singa_wrap
+from cuda_helper import gpu_dev, cpu_dev
+
+import numpy as np
+
+autograd.training = True
+
+CTensor = singa.Tensor
+
+dy = CTensor([2, 1, 2, 2])
+singa.Gaussian(0.0, 1.0, dy)
+
+
+def _tuple_to_string(t):
+    lt = [str(x) for x in t]
+    return '(' + ', '.join(lt) + ')'
+
+
+def axis_helper(y_shape, x_shape):
+    """
+    check which axes the x has been broadcasted
+    Args:
+        y_shape: the shape of result
+        x_shape: the shape of x
+    Return:
+        a tuple refering the axes
+    """
+    res = []
+    j = len(x_shape) - 1
+    for i in range(len(y_shape) - 1, -1, -1):
+        if j < 0 or x_shape[j] != y_shape[i]:
+            res.append(i)
+        j -= 1
+    return tuple(res[::-1])
+
+
+def prepare_inputs_targets_for_rnn_test(dev):
+    x_0 = np.random.random((2, 3)).astype(np.float32)
+    x_1 = np.random.random((2, 3)).astype(np.float32)
+    x_2 = np.random.random((2, 3)).astype(np.float32)
+
+    h_0 = np.zeros((2, 2)).astype(np.float32)
+
+    t_0 = np.random.random((2, 2)).astype(np.float32)
+    t_1 = np.random.random((2, 2)).astype(np.float32)
+    t_2 = np.random.random((2, 2)).astype(np.float32)
+
+    x0 = tensor.Tensor(device=dev, data=x_0)
+    x1 = tensor.Tensor(device=dev, data=x_1)
+    x2 = tensor.Tensor(device=dev, data=x_2)
+
+    h0 = tensor.Tensor(device=dev, data=h_0)
+
+    t0 = tensor.Tensor(device=dev, data=t_0)
+    t1 = tensor.Tensor(device=dev, data=t_1)
+    t2 = tensor.Tensor(device=dev, data=t_2)
+
+    inputs = [x0, x1, x2]
+    targets = [t0, t1, t2]
+    return inputs, targets, h0
+
+
+class TestPythonOperation(unittest.TestCase):
+
+    def check_shape(self, actual, expect):
+        self.assertEqual(
+            actual, expect, 'shape mismatch, actual shape is %s'
+            ' exepcted is %s' %
+            (_tuple_to_string(actual), _tuple_to_string(expect)))
+
+    def _greater_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        y = np.greater(x0, x1)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd.greater(x0, x1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_Greater_cpu(self):
+        self._greater_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Greater_gpu(self):
+        self._greater_helper(gpu_dev)
+
+    def _conv2d_helper(self, dev):
+        # (out_channels, kernel_size)
+        conv_0 = layer.Conv2d(1, 2)
+        conv_without_bias_0 = layer.Conv2d(1, 2, bias=False)
+
+        cpu_input_tensor = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        cpu_input_tensor.gaussian(0.0, 1.0)
+
+        dy = tensor.Tensor(shape=(2, 1, 2, 2), device=dev)
+        dy.gaussian(0.0, 1.0)
+
+        y = conv_0(cpu_input_tensor)  # PyTensor
+        dx, dW, db = y.creator.backward(dy.data)  # CTensor
+
+        self.check_shape(y.shape, (2, 1, 2, 2))
+        self.check_shape(dx.shape(), (2, 3, 3, 3))
+        self.check_shape(dW.shape(), (1, 3, 2, 2))
+        self.check_shape(db.shape(), (1,))
+
+        # forward without bias
+        y_without_bias = conv_without_bias_0(cpu_input_tensor)
+        self.check_shape(y_without_bias.shape, (2, 1, 2, 2))
+
+    def test_conv2d_cpu(self):
+        self._conv2d_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_conv2d_gpu(self):
+        self._conv2d_helper(gpu_dev)
+
+    def _conv_same_pad(self, dev, pad_mode, is_2d):
+        if is_2d:
+            x_h, w_h, k_h, p_h = 32, 4, 4, 1
+        else:
+            x_h, w_h, k_h, p_h = 1, 1, 1, 0
+
+        x = tensor.Tensor(shape=(3, 3, x_h, 32), device=dev)
+        x.gaussian(0.0, 1.0)
+
+        # with the same padding, the padding should be 3
+        # for SAME_UPPER, is (1, 1) + (0, 1)
+        # for SAME_LOWER, is (1, 1) + (1, 0)
+
+        kernel = (k_h, 4)
+        padding = (p_h, 1)
+        stride = (1, 1)
+        group = 1
+        bias = False
+        out_channels = 3
+
+        conv_0 = layer.Conv2d(out_channels,
+                              kernel,
+                              stride=stride,
+                              group=group,
+                              bias=bias,
+                              pad_mode=pad_mode)
+
+        y = conv_0(x)
+        dy = np.ones((3, 3, x_h, 32), dtype=np.float32)
+        dy = tensor.from_numpy(dy)
+        dy.to_device(dev)
+
+        dx, dW = y.creator.backward(dy.data)
+        self.check_shape(y.shape, (3, 3, x_h, 32))
+        self.check_shape(dx.shape(), (3, 3, x_h, 32))
+        self.check_shape(dW.shape(), (3, 3, w_h, 4))
+
+    def test_conv2d_same_pad_cpu(self):
+        self._conv_same_pad(cpu_dev, "SAME_LOWER", True)
+        self._conv_same_pad(cpu_dev, "SAME_UPPER", True)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_conv2d_same_pad_gpu(self):
+        self._conv_same_pad(gpu_dev, "SAME_LOWER", True)
+        self._conv_same_pad(gpu_dev, "SAME_UPPER", True)
+
+    def test_conv1d_same_pad_cpu(self):
+        self._conv_same_pad(cpu_dev, "SAME_LOWER", False)
+        self._conv_same_pad(cpu_dev, "SAME_UPPER", False)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_conv1d_same_pad_gpu(self):
+        self._conv_same_pad(gpu_dev, "SAME_LOWER", False)
+        self._conv_same_pad(gpu_dev, "SAME_UPPER", False)
+
+    def _pooling_same_pad(self, dev, pad_mode, is_2d):
+        if is_2d:
+            x_h, k_h, p_h = 32, 4, 1
+        else:
+            x_h, k_h, p_h = 1, 1, 0
+
+        x = tensor.Tensor(shape=(3, 3, x_h, 32), device=dev)
+        x.gaussian(0.0, 1.0)
+
+        # with the same padding, the padding should be 3
+        # for SAME_UPPER, is (1, 1) + (0, 1)
+        # for SAME_LOWER, is (1, 1) + (1, 0)
+
+        kernel = (k_h, 4)
+        # we add 4 padding here and hope the conv and trim one padding then
+        padding = (p_h, 1)
+        stride = (1, 1)
+
+        pooling = layer.Pooling2d(kernel, stride=stride, pad_mode=pad_mode)
+
+        y = pooling(x)
+
+        dy = np.ones((3, 3, x_h, 32), dtype=np.float32)
+        dy = tensor.from_numpy(dy)
+        dy.to_device(dev)
+
+        dx = y.creator.backward(dy.data)
+        self.check_shape(y.shape, (3, 3, x_h, 32))
+        self.check_shape(dx.shape(), (3, 3, x_h, 32))
+
+    def test_pooling2d_same_pad_cpu(self):
+        self._pooling_same_pad(cpu_dev, "SAME_LOWER", True)
+        self._pooling_same_pad(cpu_dev, "SAME_UPPER", True)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_pooling2d_same_pad_gpu(self):
+        self._pooling_same_pad(gpu_dev, "SAME_LOWER", True)
+        self._pooling_same_pad(gpu_dev, "SAME_UPPER", True)
+
+    def test_pooling1d_same_pad_cpu(self):
+        self._pooling_same_pad(cpu_dev, "SAME_LOWER", False)
+        self._pooling_same_pad(cpu_dev, "SAME_UPPER", False)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_pooling1d_same_pad_gpu(self):
+        self._pooling_same_pad(gpu_dev, "SAME_LOWER", False)
+        self._pooling_same_pad(gpu_dev, "SAME_UPPER", False)
+
+    def _sum_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        y = x + x1
+        dy = np.ones((3, 2), dtype=np.float32)
+        grad0 = dy
+        grad1 = dy
+        x = tensor.from_numpy(x)
+        x1 = tensor.from_numpy(x1)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.sum(x, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             grad0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             grad1,
+                                             decimal=5)
+
+    def test_sum_cpu(self):
+        self._sum_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_sum_gpu(self):
+        self._sum_helper(gpu_dev)
+
+    def _SeparableConv2d_helper(self, dev):
+        # SeparableConv2d(in_channels, out_channels, kernel_size)
+        if dev == cpu_dev:
+            in_channels = 1
+        else:
+            in_channels = 8
+        separ_conv = layer.SeparableConv2d(16, 3, padding=1)
+
+        x = np.random.random((10, in_channels, 28, 28)).astype(np.float32)
+        x = tensor.Tensor(device=dev, data=x)
+
+        y = separ_conv(x)
+        self.check_shape(y.shape, (10, 16, 28, 28))
+
+        y1 = separ_conv.depthwise_conv(x)
+        y2 = separ_conv.point_conv(y1)
+
+        dy1, dW_depth = y2.creator.backward(y2.data)
+        dx, dW_spacial = y1.creator.backward(dy1)
+
+        self.check_shape(y2.shape, (10, 16, 28, 28))
+
+        self.check_shape(dy1.shape(), (10, in_channels, 28, 28))
+        self.check_shape(dW_depth.shape(), (16, in_channels, 1, 1))
+
+        self.check_shape(dx.shape(), (10, in_channels, 28, 28))
+        self.check_shape(dW_spacial.shape(), (in_channels, 1, 3, 3))
+
+    def test_SeparableConv2d_cpu(self):
+        self._SeparableConv2d_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_SeparableConv2d_gpu(self):
+        self._SeparableConv2d_helper(gpu_dev)
+
+    def _batchnorm2d_helper(self, dev):
+        batchnorm_0 = layer.BatchNorm2d(3)
+
+        cpu_input_tensor = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
+        cpu_input_tensor.gaussian(0.0, 1.0)
+
+        dy = cpu_input_tensor.clone().data
+
+        y = batchnorm_0(cpu_input_tensor)
+        dx, ds, db = y.creator.backward(dy)
+
+        self.check_shape(y.shape, (2, 3, 3, 3))
+        self.check_shape(dx.shape(), (2, 3, 3, 3))
+        self.check_shape(ds.shape(), (3,))
+        self.check_shape(db.shape(), (3,))
+
+    def test_batchnorm2d_cpu(self):
+        self._batchnorm2d_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_batchnorm2d_gpu(self):
+        self._batchnorm2d_helper(gpu_dev)
+
+    def gradients_check(self,
+                        func,
+                        param,
+                        autograds,
+                        h=0.0005,
+                        df=1,
+                        dev=cpu_dev):
+        # param: PyTensor
+        # autograds: numpy_tensor
+        p = tensor.to_numpy(param)
+        it = np.nditer(p, flags=['multi_index'], op_flags=['readwrite'])
+        while not it.finished:
+            idx = it.multi_index
+            diff = np.zeros_like(p)
+            diff[idx] += h
+            diff = tensor.from_numpy(diff)
+            diff.to_device(dev)
+
+            param += diff
+            pos = func()
+            pos = tensor.to_numpy(pos)
+
+            param -= diff
+            param -= diff
+            neg = func()
+            neg = tensor.to_numpy(neg)
+
+            numerical_grad = np.sum((pos - neg) * df) / (2 * h)
+            #print((autograds[idx] - numerical_grad)/numerical_grad)
+            # threshold set as -5% to +5%
+            #self.assertAlmostEqual((autograds[idx] - numerical_grad)/(numerical_grad+0.0000001), 0., places=1)
+            self.assertAlmostEqual(autograds[idx] - numerical_grad,
+                                   0.,
+                                   places=2)
+
+            it.iternext()
+
+    def _vanillaRNN_gpu_tiny_ops_shape_check_helper(self, dev):
+        # gradients shape check.
+        inputs, target, h0 = prepare_inputs_targets_for_rnn_test(dev)
+        rnn = layer.RNN(3, 2)
+
+        hs, _ = rnn(inputs, h0)
+
+        loss = autograd.softmax_cross_entropy(hs[0], target[0])
+        for i in range(1, len(hs)):
+            l = autograd.softmax_cross_entropy(hs[i], target[i])
+            loss = autograd.add(loss, l)
+        # d=autograd.infer_dependency(loss.creator)
+        # print(d)
+        for t, dt in autograd.backward(loss):
+            self.check_shape(t.shape, dt.shape)
+
+    def test_vanillaRNN_gpu_tiny_ops_shape_check_cpu(self):
+        self._vanillaRNN_gpu_tiny_ops_shape_check_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_vanillaRNN_gpu_tiny_ops_shape_check_gpu(self):
+        self._vanillaRNN_gpu_tiny_ops_shape_check_helper(gpu_dev)
+
+    def _LSTM_gpu_tiny_ops_shape_check_helper(self, dev):
+        # gradients shape check.
+        inputs, target, h0 = prepare_inputs_targets_for_rnn_test(dev)
+        c_0 = np.random.random((2, 1)).astype(np.float32)
+        c0 = tensor.Tensor(device=dev, data=c_0)
+
+        rnn = layer.LSTM(3, 2)
+
+        hs, _, _ = rnn(inputs, (h0, c0))
+        loss = autograd.softmax_cross_entropy(hs[0], target[0])
+
+        for i in range(1, len(hs)):
+            l = autograd.softmax_cross_entropy(hs[i], target[i])
+            loss = autograd.add(loss, l)
+        # d=autograd.infer_dependency(loss.creator)
+        # print(d)
+        for t, dt in autograd.backward(loss):
+            self.check_shape(t.shape, dt.shape)
+
+    def test_LSTM_gpu_tiny_ops_shape_check_cpu(self):
+        self._LSTM_gpu_tiny_ops_shape_check_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_LSTM_gpu_tiny_ops_shape_check_gpu(self):
+        self._LSTM_gpu_tiny_ops_shape_check_helper(gpu_dev)
+
+    def _numerical_gradients_check_for_vallina_rnn_helper(self, dev):
+        inputs, target, h0 = prepare_inputs_targets_for_rnn_test(dev)
+
+        rnn = layer.RNN(3, 2)
+
+        def valinna_rnn_forward():
+            hs, _ = rnn(inputs, h0)
+
+            loss = autograd.softmax_cross_entropy(hs[0], target[0])
+            for i in range(1, len(hs)):
+                l = autograd.softmax_cross_entropy(hs[i], target[i])
+                loss = autograd.add(loss, l)
+            #grads = autograd.gradients(loss)
+            return loss
+
+        loss1 = valinna_rnn_forward()
+        auto_grads = autograd.gradients(loss1)
+
+        params = rnn.get_params()
+        for key, param in params.items():
+            auto_grad = tensor.to_numpy(auto_grads[id(param)])
+
+            self.gradients_check(valinna_rnn_forward, param, auto_grad, dev=dev)
+
+    def _gradient_check_cudnn_rnn(self, mode="vanilla", dev=gpu_dev):
+        seq = 10
+        bs = 2
+        fea = 10
+        hid = 10
+        x = np.random.random((seq, bs, fea)).astype(np.float32)
+        tx = tensor.Tensor(device=dev, data=x)
+        y = np.random.random((seq, bs, hid)).astype(np.float32)
+        y = np.reshape(y, (-1, hid))
+        ty = tensor.Tensor(device=dev, data=y)
+        rnn = layer.CudnnRNN(hid, rnn_mode=mode, return_sequences=True)
+
+        def vanilla_rnn_forward():
+            out = rnn(tx)
+            out = autograd.reshape(out, (-1, hid))
+            loss = autograd.softmax_cross_entropy(out, ty)
+            return loss
+
+        loss = vanilla_rnn_forward()
+        auto_grads = autograd.gradients(loss)
+
+        params = rnn.get_params()
+        for key, param in params.items():
+            auto_grad = tensor.to_numpy(auto_grads[id(param)])
+            self.gradients_check(vanilla_rnn_forward, param, auto_grad, dev=dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gradient_check_cudnn_rnn_vanilla(self):
+        self._gradient_check_cudnn_rnn(mode="vanilla", dev=gpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gradient_check_cudnn_rnn_lstm(self):
+        self._gradient_check_cudnn_rnn(mode="lstm", dev=gpu_dev)
+
+    # Cos Sim Gradient Check
+    def _gradient_check_cossim(self, dev=gpu_dev):
+        bs = 2
+        vec = 3
+        ta = tensor.random((bs, vec), dev)
+        tb = tensor.random((bs, vec), dev)
+        # treat ta, tb as params
+        ta.stores_grad = True
+        tb.stores_grad = True
+        ty = tensor.random((bs,), dev)
+
+        def _forward():
+            out = autograd.cossim(ta, tb)
+            loss = autograd.mse_loss(out, ty)
+            return loss
+
+        loss = _forward()
+        auto_grads = autograd.gradients(loss)
+
+        params = {id(ta): ta, id(tb): tb}
+
+        for key, param in params.items():
+            auto_grad = tensor.to_numpy(auto_grads[id(param)])
+            self.gradients_check(_forward, param, auto_grad, dev=dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gradient_check_cossim_gpu(self):
+        self._gradient_check_cossim(dev=gpu_dev)
+
+    def test_gradient_check_cossim_cpu(self):
+        self._gradient_check_cossim(dev=cpu_dev)
+
+    def test_numerical_gradients_check_for_vallina_rnn_cpu(self):
+        self._numerical_gradients_check_for_vallina_rnn_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_numerical_gradients_check_for_vallina_rnn_gpu(self):
+        self._numerical_gradients_check_for_vallina_rnn_helper(gpu_dev)
+
+    def _numerical_gradients_check_for_lstm_helper(self, dev):
+        inputs, target, h0 = prepare_inputs_targets_for_rnn_test(dev)
+        c_0 = np.zeros((2, 2)).astype(np.float32)
+        c0 = tensor.Tensor(device=dev, data=c_0)
+
+        rnn = layer.LSTM(3, 2)
+
+        def lstm_forward():
+            hs, _, _ = rnn(inputs, (h0, c0))
+
+            loss = autograd.softmax_cross_entropy(hs[0], target[0])
+            for i in range(1, len(hs)):
+                l = autograd.softmax_cross_entropy(hs[i], target[i])
+                loss = autograd.add(loss, l)
+            return loss
+
+        loss1 = lstm_forward()
+        auto_grads = autograd.gradients(loss1)
+
+        params = rnn.get_params()
+        for key, param in params.items():
+            auto_grad = tensor.to_numpy(auto_grads[id(param)])
+
+            self.gradients_check(lstm_forward, param, auto_grad, dev=dev)
+
+    def test_numerical_gradients_check_for_lstm_cpu(self):
+        self._numerical_gradients_check_for_lstm_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_numerical_gradients_check_for_lstm_gpu(self):
+        self._numerical_gradients_check_for_lstm_helper(gpu_dev)
+
+    def _MeanSquareError_helper(self, dev):
+        X = np.array([4.3, 5.4, 3.3, 3.6, 5.7,
+                      6.0]).reshape(3, 2).astype(np.float32)
+        T = np.array([4.4, 5.3, 3.2, 3.7, 5.4,
+                      6.3]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        t = tensor.from_numpy(T)
+        x.to_device(dev)
+        t.to_device(dev)
+
+        loss = autograd.mse_loss(x, t)
+        dx = loss.creator.backward()
+
+        loss_np = tensor.to_numpy(loss)[0]
+        self.assertAlmostEqual(loss_np, 0.0366666, places=4)
+        self.check_shape(dx.shape(), (3, 2))
+
+    def test_MeanSquareError_cpu(self):
+        self._MeanSquareError_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_MeanSquareError_gpu(self):
+        self._MeanSquareError_helper(gpu_dev)
+
+    def _Abs_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.array([0.8, 1.2, 3.3, 3.6, 0.5,
+                       0.5]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        result = autograd.abs(x)
+        dx = result.creator.backward(x.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result), XT)
+        self.check_shape(dx.shape(), (3, 2))
+
+    def test_Abs_cpu(self):
+        self._Abs_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Abs_gpu(self):
+        self._Abs_helper(gpu_dev)
+
+    def _Mean_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        y = (x0 + x1) / 2
+        grad = np.ones(x0.shape) / 2
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd.mean(x0, x1)
+        dy = tensor.from_numpy(np.ones((3, 2)).astype(np.float32))
+        dy.to_device(dev)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             grad,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             grad,
+                                             decimal=5)
+
+    def test_Mean_cpu(self):
+        self._Mean_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Mean_gpu(self):
+        self._Mean_helper(gpu_dev)
+
+    def _Exp_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.exp(X)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        result = autograd.exp(x)
+        dx = result.creator.backward(x.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        self.check_shape(dx.shape(), (3, 2))
+
+    def test_Exp_cpu(self):
+        self._Exp_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Exp_gpu(self):
+        self._Exp_helper(gpu_dev)
+
+    def _Identity_helper(self, dev):
+        x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        y = x.copy()
+        grad = np.ones(x.shape)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        result = autograd.identity(x)
+        dy = tensor.from_numpy(np.ones((3, 2)).astype(np.float32))
+        dy.to_device(dev)
+        dx = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+        self.check_shape(dx.shape(), (3, 2))
+
+    def test_Identity_cpu(self):
+        self._Identity_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Identity_gpu(self):
+        self._Identity_helper(gpu_dev)
+
+    def _LeakyRelu_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.array([0.8, -0.012, 3.3, -0.036, -0.005,
+                       0.5]).reshape(3, 2).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        result = autograd.leakyrelu(x)
+
+        dx = result.creator.backward(x.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result), XT)
+        self.check_shape(dx.shape(), (3, 2))
+
+    def test_LeakyRelu_cpu(self):
+        self._LeakyRelu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_LeakyRelu_gpu(self):
+        self._LeakyRelu_helper(gpu_dev)
+
+    def _Relu_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.maximum(X, 0)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.relu(x)
+        dx = result.creator.backward(dy.data)
+
+        G = (X > 0).astype(np.float32)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Relu_cpu(self):
+        self._Relu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Relu_gpu(self):
+        self._Relu_helper(gpu_dev)
+
+    def _Cos_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.cos(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.cos(x)
+        dx = result.creator.backward(dy.data)
+
+        G = -np.sin(X)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Cos_cpu(self):
+        self._Cos_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Cos_gpu(self):
+        self._Cos_helper(gpu_dev)
+
+    def _Cosh_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.cosh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.cosh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = np.sinh(X)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Cosh_cpu(self):
+        self._Cosh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Cosh_gpu(self):
+        self._Cosh_helper(gpu_dev)
+
+    def _Acos_helper(self, dev):
+        X = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        XT = np.arccos(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.acos(x)
+        dx = result.creator.backward(dy.data)
+
+        G = -1.0 / np.sqrt(1.0 - np.square(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Acos_cpu(self):
+        self._Acos_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Acos_gpu(self):
+        self._Acos_helper(gpu_dev)
+
+    def _Acosh_helper(self, dev):
+        X = np.array([1.1, 1.5, 1.9, 2.2, 2.5,
+                      2.8]).reshape(3, 2).astype(np.float32)
+        XT = np.arccosh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.acosh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.multiply(np.sqrt(X - 1.0), np.sqrt(X + 1.0))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Acosh_cpu(self):
+        self._Acosh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Acosh_gpu(self):
+        self._Acosh_helper(gpu_dev)
+
+    def _Sin_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.sin(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.sin(x)
+        dx = result.creator.backward(dy.data)
+
+        G = np.cos(X)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Sin_cpu(self):
+        self._Sin_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Sin_gpu(self):
+        self._Sin_helper(gpu_dev)
+
+    def _Sinh_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.sinh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.sinh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = np.cosh(X)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Sinh_cpu(self):
+        self._Sinh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Sinh_gpu(self):
+        self._Sinh_helper(gpu_dev)
+
+    def _Asin_helper(self, dev):
+        X = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        XT = np.arcsin(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.asin(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.sqrt(1.0 - np.square(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Asin_cpu(self):
+        self._Asin_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Asin_gpu(self):
+        self._Asin_helper(gpu_dev)
+
+    def _Asinh_helper(self, dev):
+        X = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        XT = np.arcsinh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.asinh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.sqrt(np.square(X) + 1.0)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Asinh_cpu(self):
+        self._Asinh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Asinh_gpu(self):
+        self._Asinh_helper(gpu_dev)
+
+    def _Tan_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.tan(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.tan(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.square(np.cos(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Tan_cpu(self):
+        self._Tan_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Tan_gpu(self):
+        self._Tan_helper(gpu_dev)
+
+    def _Tanh_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.tanh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.tanh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.square(np.cosh(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Tanh_cpu(self):
+        self._Tanh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Tanh_gpu(self):
+        self._Tanh_helper(gpu_dev)
+
+    def _Atan_helper(self, dev):
+        X = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        XT = np.arctan(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.atan(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / (1.0 + np.square(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Atan_cpu(self):
+        self._Atan_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Atan_gpu(self):
+        self._Atan_helper(gpu_dev)
+
+    def _Atanh_helper(self, dev):
+        X = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                      0.9]).reshape(3, 2).astype(np.float32)
+        XT = np.arctanh(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.atanh(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / (1.0 - np.square(X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Atanh_cpu(self):
+        self._Atanh_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Atanh_gpu(self):
+        self._Atanh_helper(gpu_dev)
+
+    def _Less_helper(self, dev):
+        x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+        y = np.less(x0, x1)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd.less(x0, x1)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_Less_cpu(self):
+        self._Less_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Less_gpu(self):
+        self._Less_helper(gpu_dev)
+
+    def _Sub_helper(self, dev):
+        X0 = np.array([7, -5, 0.2, -0.1, 0.3, 4]).reshape(3,
+                                                          2).astype(np.float32)
+        X1 = np.array([0.6, -1.3, 0.1, -0.1, 0.4,
+                       0.3]).reshape(3, 2).astype(np.float32)
+        XT = np.subtract(X0, X1)
+
+        DY = np.ones((3, 2), dtype=np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.sub(x0, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        DX0 = np.multiply(DY, 1.0)
+        DX1 = np.multiply(DY, -1.0)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+
+    def test_Sub_cpu(self):
+        self._Sub_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Sub_gpu(self):
+        self._Sub_helper(gpu_dev)
+
+    def _Pow_helper(self, dev):
+        X0 = np.array([7, 5, 0.2, 0.1, 0.3, 4]).reshape(3, 2).astype(np.float32)
+        X1 = np.array([-1.0, 2.0, -1.0, -2.1, 1.0,
+                       -2.0]).reshape(3, 2).astype(np.float32)
+        XT = np.power(X0, X1)
+
+        DY = np.ones((3, 2), dtype=np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.pow(x0, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        G0 = np.multiply(X1, np.power(X0, (X1 - 1.0)))
+        DX0 = np.multiply(G0, DY)
+        G1 = np.multiply(np.power(X0, X1), np.log(X0))
+        DX1 = np.multiply(G1, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=4)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=4)
+
+    def test_Pow_cpu(self):
+        self._Pow_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Pow_gpu(self):
+        self._Pow_helper(gpu_dev)
+
+    def _SoftSign_helper(self, dev):
+        # y = x / (1 + np.abs(x))
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = X / (1 + np.absolute(X))
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.softsign(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / np.square(np.absolute(X) + 1.0)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_SoftSign_cpu(self):
+        self._SoftSign_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_SoftSign_gpu(self):
+        self._SoftSign_helper(gpu_dev)
+
+    def _SoftPlus_helper(self, dev):
+        #y = np.log(np.exp(x) + 1)
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.log(np.exp(X) + 1)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.softplus(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 1.0 / (1.0 + np.exp(-X))
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_SoftPlus_cpu(self):
+        self._SoftPlus_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_SoftPlus_gpu(self):
+        self._SoftPlus_helper(gpu_dev)
+
+    def _unsqueeze_helper(self, dev):
+        data = [0.1, -1.0, 0.4, 4.0, -0.9, 9.0]
+
+        x = np.array(data).reshape(1, 2, 3).astype(np.float32)
+        y = x.reshape(1, 1, 2, 3, 1)
+        dy = np.ones((1, 1, 2, 3, 1), dtype=np.float32)
+        grad = dy.reshape(1, 2, 3)
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.unsqueeze(x, [0, 4])
+        dx = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_unsqueeze_cpu(self):
+        self._unsqueeze_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_unsqueeze_gpu(self):
+        self._unsqueeze_helper(gpu_dev)
+
+    def _Sqrt_helper(self, dev):
+        X = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        XT = np.sqrt(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.sqrt(x)
+        dx = result.creator.backward(dy.data)
+
+        G = 0.5 * np.power(X, -0.5)
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Sqrt_cpu(self):
+        self._Sqrt_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Sqrt_gpu(self):
+        self._Sqrt_helper(gpu_dev)
+
+    def _transpose_helper(self, dev):
+        x = np.random.randn(3, 2, 1)
+        y = x.transpose(1, 2, 0)
+        dy = np.random.randn(*(y.shape))
+        grad = dy.transpose((2, 0, 1))
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.transpose(x, (1, 2, 0))
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_transpose_cpu(self):
+        self._transpose_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_transpose_gpu(self):
+        self._transpose_helper(gpu_dev)
+
+    def _Sign_helper(self, dev):
+        X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
+                      0.5]).reshape(3, 2).astype(np.float32)
+        XT = np.sign(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+        result = autograd.sign(x)
+        dx = result.creator.backward(dy.data)
+        DX = np.multiply(DY, 0)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Sign_cpu(self):
+        self._Sign_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Sign_gpu(self):
+        self._Sign_helper(gpu_dev)
+
+    def _Log_helper(self, dev):
+        X = np.array([0.1, 1.0, 0.4, 1.4, 0.9,
+                      2.0]).reshape(3, 2).astype(np.float32)
+        XT = np.log(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+        result = autograd.log(x)
+        dx = result.creator.backward(dy.data)
+        #dx = 1/x
+        G = 1.0 / X
+        DX = np.multiply(G, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_Log_cpu(self):
+        self._Log_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Log_gpu(self):
+        self._Log_helper(gpu_dev)
+
+    def _mul_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        y = x * x1
+        dy = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        grad0 = x1 * dy
+        grad1 = x * dy
+
+        x = tensor.from_numpy(x)
+        slope = tensor.from_numpy(x1)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        slope.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.mul(x, slope)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             grad0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             grad1,
+                                             decimal=5)
+
+    def test_mul_cpu(self):
+        self._mul_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_mul_gpu(self):
+        self._mul_helper(gpu_dev)
+
+    def _reshape_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        y = x.reshape(2, 3)
+        dy = np.array([1, 2, 3, 4, 5, 6]).reshape(2, 3).astype(np.float32)
+        grad = dy.reshape(3, 2)
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.reshape(x, (2, 3))
+        dx = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_reshape_cpu(self):
+        self._reshape_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_reshape_gpu(self):
+        self._reshape_helper(gpu_dev)
+
+    def _max_helper(self, dev):
+        X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
+                       0.2]).reshape(3, 2).astype(np.float32)
+        X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
+                       2.0]).reshape(3, 2).astype(np.float32)
+        XT = np.maximum(X0, X1)
+
+        DY = np.ones((3, 2), dtype=np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.max(x0, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        G = np.subtract(X0, X1)
+        DX0 = np.where(G > 0, 1, G * 0)
+        DX1 = np.where(G < 0, 1, G * 0)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+
+    def test_max_cpu(self):
+        self._max_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_max_gpu(self):
+        self._max_helper(gpu_dev)
+
+    def _max_3inputs_helper(self, dev):
+        data_0 = np.array([3, 2, 1]).astype(np.float32)
+        data_1 = np.array([1, 4, 4]).astype(np.float32)
+        data_2 = np.array([2, 5, 3]).astype(np.float32)
+        XT = np.array([3, 5, 4]).astype(np.float32)
+
+        DY = np.array([1, 1, 1]).astype(np.float32)
+        x0 = tensor.from_numpy(data_0)
+        x1 = tensor.from_numpy(data_1)
+        x2 = tensor.from_numpy(data_2)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        x2.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.max(x0, x1, x2)
+        dx0, dx1, dx2 = result.creator.backward(dy.data)
+
+        DX0 = np.array([1, 0, 0]).astype(np.float32)
+        DX1 = np.array([0, 0, 1]).astype(np.float32)
+        DX2 = np.array([0, 1, 0]).astype(np.float32)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx2)),
+                                             DX2,
+                                             decimal=5)
+
+    def test_max_3inputs_cpu(self):
+        self._max_3inputs_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_max_3inputs_gpu(self):
+        self._max_3inputs_helper(gpu_dev)
+
+    def _max_1inputs_helper(self, dev):
+        data_0 = np.array([3, 2, 1]).astype(np.float32)
+        XT = np.array([3, 2, 1]).astype(np.float32)
+
+        DY = np.array([1, 1, 1]).astype(np.float32)
+        x0 = tensor.from_numpy(data_0)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.max(x0)
+        dx0 = result.creator.backward(dy.data)
+
+        DX0 = np.array([1, 1, 1]).astype(np.float32)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+
+    def test_max_1inputs_cpu(self):
+        self._max_1inputs_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_max_1inputs_gpu(self):
+        self._max_1inputs_helper(gpu_dev)
+
+    def _Div_helper(self, dev):
+        X0 = np.array([7, -5, 0.2, -0.1, 0.3, 4]).reshape(3,
+                                                          2).astype(np.float32)
+        X1 = np.array([0.6, -1.3, 0.1, -0.1, 0.4,
+                       0.3]).reshape(3, 2).astype(np.float32)
+        XT = np.divide(X0, X1)
+
+        DY = np.ones((3, 2), dtype=np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.div(x0, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        G0 = 1.0 / X1
+        DX0 = np.multiply(G0, DY)
+        G1 = np.divide(-X0, np.square(X1))
+        DX1 = np.multiply(G1, DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+
+    def test_Div_cpu(self):
+        self._Div_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_Div_gpu(self):
+        self._Div_helper(gpu_dev)
+
+    def _squeeze_helper(self, dev):
+        x = np.random.randn(3, 1, 2, 1, 1)
+        y = x.reshape(3, 2)
+        dy = np.random.randn(3, 2)
+        grad = dy.reshape(3, 1, 2, 1, 1)
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.squeeze(x, [1, 3, 4])
+        dx = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_squeeze_cpu(self):
+        self._squeeze_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_squeeze_gpu(self):
+        self._squeeze_helper(gpu_dev)
+
+    def _shape_helper(self, dev):
+        x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
+                      9.0]).reshape(3, 2).astype(np.float32)
+        y = list(x.shape)
+        dy = np.ones((3, 2), dtype=np.float32)
+        grad = list(dy.shape)
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.shape(x)
+        dx = result.creator.backward(dy.data)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(dx, grad, decimal=5)
+
+    def test_shape_cpu(self):
+        self._shape_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_shape_gpu(self):
+        self._shape_helper(gpu_dev)
+
+    def _min_helper(self, dev):
+        X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
+                       0.2]).reshape(3, 2).astype(np.float32)
+        X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
+                       2.0]).reshape(3, 2).astype(np.float32)
+        XT = np.minimum(X0, X1)
+
+        DY = np.ones((3, 2), dtype=np.float32)
+        x0 = tensor.from_numpy(X0)
+        x1 = tensor.from_numpy(X1)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.min(x0, x1)
+        dx0, dx1 = result.creator.backward(dy.data)
+
+        G = np.subtract(X0, X1)
+        DX0 = np.where(G < 0, 1, G * 0)
+        DX1 = np.where(G > 0, 1, G * 0)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+
+    def test_min_cpu(self):
+        self._min_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_min_gpu(self):
+        self._min_helper(gpu_dev)
+
+    def _min_3inputs_helper(self, dev):
+        data_0 = np.array([3, 2, 1]).astype(np.float32)
+        data_1 = np.array([1, 4, 4]).astype(np.float32)
+        data_2 = np.array([2, 5, 0]).astype(np.float32)
+        XT = np.array([1, 2, 0]).astype(np.float32)
+
+        DY = np.array([1, 1, 1]).astype(np.float32)
+        x0 = tensor.from_numpy(data_0)
+        x1 = tensor.from_numpy(data_1)
+        x2 = tensor.from_numpy(data_2)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        x1.to_device(dev)
+        x2.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.min(x0, x1, x2)
+        dx0, dx1, dx2 = result.creator.backward(dy.data)
+
+        DX0 = np.array([0, 1, 0]).astype(np.float32)
+        DX1 = np.array([1, 0, 0]).astype(np.float32)
+        DX2 = np.array([0, 0, 1]).astype(np.float32)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             DX1,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx2)),
+                                             DX2,
+                                             decimal=5)
+
+    def test_min_3inputs_cpu(self):
+        self._min_3inputs_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_min_3inputs_gpu(self):
+        self._min_3inputs_helper(gpu_dev)
+
+    def _min_1inputs_helper(self, dev):
+        data_0 = np.array([3, 2, 1]).astype(np.float32)
+        XT = np.array([3, 2, 1]).astype(np.float32)
+
+        DY = np.array([1, 1, 1]).astype(np.float32)
+        x0 = tensor.from_numpy(data_0)
+        dy = tensor.from_numpy(DY)
+        x0.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.min(x0)
+        dx0 = result.creator.backward(dy.data)
+
+        DX0 = np.array([1, 1, 1]).astype(np.float32)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             DX0,
+                                             decimal=5)
+
+    def test_min_1inputs_cpu(self):
+        self._min_1inputs_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_min_1inputs_gpu(self):
+        self._min_1inputs_helper(gpu_dev)
+
+    def _HardSigmoid_helper(self, dev):
+        x = np.random.randn(3, 2)
+        #y = max(0, min(1, alpha * x + gamma))
+        a = 0.2
+        g = 0.5
+        y = np.clip(x * 0.2 + 0.5, 0, 1)
+        dy = np.random.randn(3, 2)
+        grad = (0 < (np.clip(x * 0.2 + 0.5, 0, 1)) *
+                (np.clip(x * 0.2 + 0.5, 0, 1) < 1)) * 0.2 * dy
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.hardsigmoid(x, a, g)
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_HardSigmoid_cpu(self):
+        self._HardSigmoid_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_HardSigmoid_gpu(self):
+        self._HardSigmoid_helper(gpu_dev)
+
+    def _prelu_helper(self, dev):
+        x = np.random.randn(3, 2)
+        slope = np.random.randn(3, 2)
+        y = np.clip(x, 0, np.inf) + np.clip(x, -np.inf, 0) * slope
+        dy = np.random.randn(3, 2)
+        x0 = x.copy()
+        x0[x0 > 0] = 1
+        x0[x0 < 1] = 0
+        grad0 = (x0 + (1 - x0) * slope) * dy
+        grad1 = (1 - x0) * x * dy
+        x = tensor.from_numpy(x)
+        slope = tensor.from_numpy(slope)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        slope.to_device(dev)
+        dy.to_device(dev)
+        result = autograd.prelu(x, slope)
+        dx0, dx1 = result.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx0)),
+                                             grad0,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx1)),
+                                             grad1,
+                                             decimal=5)
+
+    def test_prelu_cpu(self):
+        self._prelu_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_prelu_gpu(self):
+        self._prelu_helper(gpu_dev)
+
+    def _SeLU_helper(self, dev):
+        x = np.random.randn(3, 2)
+        a = 0.2
+        g = 0.3
+        y = np.clip(x, 0,
+                    np.inf) * g + (np.exp(np.clip(x, -np.inf, 0)) - 1) * a * g
+        dy = np.random.randn(3, 2)
+        grad = (np.exp(np.clip(x, -np.inf, 0))) * g
+        grad[x <= 0] = grad[x <= 0] * a
+        grad *= dy
+
+        x = tensor.from_numpy(x)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+        result = autograd.selu(x, a, g)
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             grad,
+                                             decimal=5)
+
+    def test_SeLU_cpu(self):
+        self._SeLU_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_SeLU_gpu(self):
+        self._SeLU_helper(gpu_dev)
+
+    def _and_helper(self, dev):
+        x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
+                       0.9]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0.5, 0.9]).reshape(3,
+                                                           2).astype(np.float32)
+
+        y = np.logical_and(x0, x1)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd._and(x0, x1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_and_cpu(self):
+        self._and_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_and_gpu(self):
+        self._and_helper(gpu_dev)
+
+    def _or_helper(self, dev):
+        x0 = np.array([1.0, 1.0, 2.0, -3.0, 0,
+                       -7.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([-1.0, 0, 2.0, 4.0, 0,
+                       -7.0]).reshape(3, 2).astype(np.float32)
+
+        y = np.logical_or(x0, x1)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd._or(x0, x1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_or_cpu(self):
+        self._or_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_or_gpu(self):
+        self._or_helper(gpu_dev)
+
+    def _not_helper(self, dev):
+        x = np.array([1.0, -1.0, 0, -0.1, 0,
+                      -7.0]).reshape(3, 2).astype(np.float32)
+
+        y = np.logical_not(x)
+        x = tensor.from_numpy(x)
+        x.to_device(dev)
+
+        result = autograd._not(x)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_not_cpu(self):
+        self._not_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_not_gpu(self):
+        self._not_helper(gpu_dev)
+
+    def _xor_helper(self, dev):
+        x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
+                       9.0]).reshape(3, 2).astype(np.float32)
+        x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
+                                                         2).astype(np.float32)
+
+        y = np.logical_xor(x0, x1)
+        x0 = tensor.from_numpy(x0)
+        x1 = tensor.from_numpy(x1)
+        x0.to_device(dev)
+        x1.to_device(dev)
+
+        result = autograd._xor(x0, x1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_xor_cpu(self):
+        self._xor_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_xor_gpu(self):
+        self._xor_helper(gpu_dev)
+
+    def _negative_helper(self, dev):
+        X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
+                      -2.0]).reshape(3, 2).astype(np.float32)
+        XT = np.negative(X)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.negative(x)
+        dx = result.creator.backward(dy.data)
+        DX = np.negative(DY)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_negative_cpu(self):
+        self._negative_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_negative_gpu(self):
+        self._negative_helper(gpu_dev)
+
+    def _reciprocal_helper(self, dev):
+        X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
+                      -2.0]).reshape(3, 2).astype(np.float32)
+        DY = np.ones((3, 2), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.reciprocal(x)
+        dx = result.creator.backward(dy.data)
+        #dy/dx = -1/x**2
+        with np.errstate(divide='ignore'):
+            XT = np.reciprocal(X)
+            DX = -1 / np.square(X)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_reciprocal_cpu(self):
+        self._reciprocal_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_reciprocal_gpu(self):
+        self._reciprocal_helper(gpu_dev)
+
+    def _and_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = (np.random.randn(*in1) > 0).astype(np.float32)
+            x1 = (np.random.randn(*in2) > 0).astype(np.float32)
+            y = np.logical_and(x, x1)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            x.to_device(dev)
+            x1.to_device(dev)
+
+            result = autograd._and(x, x1)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+
+    def test_and_broadcast_cpu(self):
+        self._and_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_and_broadcast_gpu(self):
+        self._and_broadcast_helper(gpu_dev)
+
+    def _or_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = (np.random.randn(*in1) > 0).astype(np.float32)
+            x1 = (np.random.randn(*in2) > 0).astype(np.float32)
+            y = np.logical_or(x, x1)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            x.to_device(dev)
+            x1.to_device(dev)
+
+            result = autograd._or(x, x1)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+
+    def test_or_broadcast_cpu(self):
+        self._or_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_or_broadcast_gpu(self):
+        self._or_broadcast_helper(gpu_dev)
+
+    def _xor_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = (np.random.randn(*in1) > 0).astype(np.float32)
+            x1 = (np.random.randn(*in2) > 0).astype(np.float32)
+            y = np.logical_xor(x, x1)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            x.to_device(dev)
+            x1.to_device(dev)
+
+            result = autograd._xor(x, x1)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+
+    def test_xor_broadcast_cpu(self):
+        self._xor_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_xor_broadcast_gpu(self):
+        self._xor_broadcast_helper(gpu_dev)
+
+    def _greater_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32)
+            y = np.greater(x, x1)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            x.to_device(dev)
+            x1.to_device(dev)
+
+            result = autograd.greater(x, x1)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+
+    def test_greater_broadcast_cpu(self):
+        self._greater_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_greater_broadcast_gpu(self):
+        self._greater_broadcast_helper(gpu_dev)
+
+    def _less_broadcast_helper(self, dev):
+        dev = cpu_dev
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32)
+            y = np.less(x, x1)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            x.to_device(dev)
+            x1.to_device(dev)
+
+            result = autograd.less(x, x1)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+
+    def test_less_broadcast_cpu(self):
+        self._less_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_less_broadcast_gpu(self):
+        self._less_broadcast_helper(gpu_dev)
+
+    def _add_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32)
+            y = x + x1
+
+            dy = np.random.randn(*y.shape)
+            grad0 = np.sum(dy, axis=axis_helper(y.shape,
+                                                x.shape)).reshape(x.shape)
+            grad1 = np.sum(dy, axis=axis_helper(y.shape,
+                                                x1.shape)).reshape(x1.shape)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            x1.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.add(x, x1)
+            dx0, dx1 = result.creator.backward(dy.data)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                                 grad0,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                                 grad1,
+                                                 decimal=5)
+
+    def test_add_broadcast_cpu(self):
+        self._add_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_add_broadcast_gpu(self):
+        self._add_broadcast_helper(gpu_dev)
+
+    def _sub_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32)
+            y = x - x1
+
+            dy = np.random.randn(*y.shape)
+            grad0 = np.sum(dy, axis=axis_helper(y.shape,
+                                                x.shape)).reshape(x.shape)
+            grad1 = np.sum(-dy, axis=axis_helper(y.shape,
+                                                 x1.shape)).reshape(x1.shape)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            x1.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.sub(x, x1)
+            dx0, dx1 = result.creator.backward(dy.data)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                                 grad0,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                                 grad1,
+                                                 decimal=5)
+
+    def test_sub_broadcast_cpu(self):
+        self._sub_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_sub_broadcast_gpu(self):
+        self._sub_broadcast_helper(gpu_dev)
+
+    def _mul_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32)
+            y = x * x1
+
+            dy = np.random.randn(*y.shape)
+            grad0 = np.sum(x1 * dy, axis=axis_helper(y.shape,
+                                                     x.shape)).reshape(x.shape)
+            grad1 = np.sum(x * dy, axis=axis_helper(y.shape,
+                                                    x1.shape)).reshape(x1.shape)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            x1.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.mul(x, x1)
+            dx0, dx1 = result.creator.backward(dy.data)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                                 grad0,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                                 grad1,
+                                                 decimal=5)
+
+    def test_mul_broadcast_cpu(self):
+        self._mul_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_mul_broadcast_gpu(self):
+        self._mul_broadcast_helper(gpu_dev)
+
+    def _div_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            x1 = np.random.randn(*in2).astype(np.float32) + 1.0
+            y = x / x1
+
+            dy = np.random.randn(*y.shape).astype(np.float32)
+            grad0 = np.sum(np.power(x1, -1) * dy,
+                           axis=axis_helper(y.shape, x.shape)).reshape(x.shape)
+            grad1 = np.sum(x * -np.power(x1, -2) * dy,
+                           axis=axis_helper(y.shape,
+                                            x1.shape)).reshape(x1.shape)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            x1.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.div(x, x1)
+            dx0, dx1 = result.creator.backward(dy.data)
+            # use realtive and total error instead of demical number
+            np.testing.assert_allclose(tensor.to_numpy(result),
+                                       y,
+                                       rtol=1e-4,
+                                       atol=1e-4)
+            np.testing.assert_allclose(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                       grad0,
+                                       rtol=1e-4,
+                                       atol=1e-4)
+            np.testing.assert_allclose(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                       grad1,
+                                       rtol=1e-4,
+                                       atol=1e-4)
+
+    def test_div_broadcast_cpu(self):
+        self._div_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_div_broadcast_gpu(self):
+        self._div_broadcast_helper(gpu_dev)
+
+    def _pow_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randint(1, 10, size=in1).astype(np.float32)
+            x1 = np.random.randint(1, 5, size=in2).astype(np.float32)
+            y = np.power(x, x1).astype(np.float32)
+
+            dy = np.random.randn(*y.shape).astype(np.float32)
+            grad0 = np.sum(x1 * np.power(x, x1 - 1) * dy,
+                           axis=axis_helper(y.shape, x.shape)).reshape(x.shape)
+            grad1 = np.sum(np.power(x, x1) * np.log(x) * dy,
+                           axis=axis_helper(y.shape,
+                                            x1.shape)).reshape(x1.shape)
+
+            x = tensor.from_numpy(x)
+            x1 = tensor.from_numpy(x1)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            x1.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.pow(x, x1)
+            dx0, dx1 = result.creator.backward(dy.data)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=2)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                                 grad0,
+                                                 decimal=2)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                                 grad1,
+                                                 decimal=2)
+
+    def test_pow_broadcast_cpu(self):
+        self._pow_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_pow_broadcast_gpu(self):
+        self._pow_broadcast_helper(gpu_dev)
+
+    def _prelu_broadcast_helper(self, dev):
+        cases = [
+            ([3, 4, 5], [5]),  # 3d vs 1d
+            ([3, 4, 5], [4, 5]),  # 3d vs 2d
+            ([3, 4, 5, 6], [5, 6]),  # 4d vs 2d
+            ([3, 4, 5, 6], [4, 5, 6]),  # 4d vs 3d
+            ([1, 4, 1, 6], [3, 1, 5, 6])  # 4d vs 4d
+        ]
+        for in1, in2 in cases:
+            x = np.random.randn(*in1).astype(np.float32)
+            slope = np.random.randn(*in2).astype(np.float32)
+            y = np.clip(x, 0, np.inf) + np.clip(x, -np.inf, 0) * slope
+
+            dy = np.random.randn(*y.shape).astype(np.float32)
+            x0 = x.copy()
+            x0[x0 > 0] = 1
+            x0[x0 < 1] = 0
+            grad0 = np.sum((x0 + (1 - x0) * slope) * dy,
+                           axis=axis_helper(y.shape, x.shape)).reshape(x.shape)
+            grad1 = np.sum((1 - x0) * x * dy,
+                           axis=axis_helper(y.shape,
+                                            slope.shape)).reshape(slope.shape)
+
+            x = tensor.from_numpy(x)
+            slope = tensor.from_numpy(slope)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            slope.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.prelu(x, slope)
+            dx0, dx1 = result.creator.backward(dy.data)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx0)),
+                                                 grad0,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx1)),
+                                                 grad1,
+                                                 decimal=5)
+
+    def test_prelu_broadcast_cpu(self):
+        self._prelu_broadcast_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_prelu_broadcast_gpu(self):
+        self._prelu_broadcast_helper(gpu_dev)
+
+    def _gemm_helper(self, dev):
+        configs = [
+            # alpha, beta, transA, transB, shapeA, shapeB, shapeC, shapeY
+            [0.25, 0.35, 0, 0, (3, 4), (4, 5), (1, 5), (3, 5)],
+            [0.25, 0.35, 0, 1, (3, 4), (5, 4), (1, 5), (3, 5)],
+            [0.25, 0.35, 1, 0, (4, 3), (4, 5), (1, 5), (3, 5)],
+            [0.25, 0.35, 1, 1, (4, 3), (5, 4), (1, 5), (3, 5)],
+        ]
+        for config in configs:
+            alpha = config[0]
+            beta = config[1]
+            transA = config[2]
+            transB = config[3]
+            shapeA = config[4]
+            shapeB = config[5]
+            shapeC = config[6]
+            shapeY = config[7]
+
+            A = np.random.randn(*shapeA).astype(np.float32)
+            DY = np.ones(shapeY, dtype=np.float32)
+
+            if transB == 0:
+                out_features = shapeB[1]
+            else:
+                out_features = shapeB[0]
+
+            a = tensor.from_numpy(A)
+            a.to_device(dev)
+            dy = tensor.from_numpy(DY)
+            dy.to_device(dev)
+
+            gemm = layer.Gemm(out_features, alpha, beta, transA == 1,
+                              transB == 1)
+            result = gemm(a)
+
+            params = gemm.get_params()
+            B = tensor.to_numpy(params['W'])
+            C = tensor.to_numpy(params['b'])
+
+            da, db, dc = result.creator.backward(dy.data)
+
+            # Y = alpha * A' * B' + beta * C
+            _A = A if transA == 0 else A.T
+            _B = B if transB == 0 else B.T
+            C = C if C is not None else np.array(0)
+            Y = alpha * np.dot(_A, _B) + beta * C
+
+            DA = alpha * np.matmul(DY, _B.T)
+            DA = DA if transA == 0 else DA.T
+            DB = alpha * np.matmul(_A.T, DY)
+            DB = DB if transB == 0 else DB.T
+            DC = beta * np.sum(DY, axis=axis_helper(Y.shape, C.shape)).reshape(
+                C.shape)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 Y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(da)),
+                                                 DA,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(db)),
+                                                 DB,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dc)),
+                                                 DC,
+                                                 decimal=5)
+
+    def test_gemm_cpu(self):
+        self._gemm_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gemm_gpu(self):
+        self._gemm_helper(gpu_dev)
+
+    def globalaveragepool_channel_first(self, dev):
+        X = np.array([[[
+            [1, 2, 3],
+            [4, 5, 6],
+            [7, 8, 9],
+        ]]]).astype(np.float32)
+        XT = np.array([[[[5]]]]).astype(np.float32)
+        DY = np.ones((1, 1, 1, 1), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        dy = tensor.from_numpy(DY)
+        dy.to_device(dev)
+
+        result = autograd.globalaveragepool(x)
+        dx = result.creator.backward(dy.data)
+
+        DX = np.ones(X.shape, dtype=np.float32)
+        DX = np.multiply(DX, DY) / np.prod(X.shape[2:])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def globalaveragepool_channel_last(self, dev):
+        X = np.array([[
+            [[1], [2], [3]],
+            [[4], [5], [6]],
+            [[7], [8], [9]],
+        ]]).astype(np.float32)
+        XT = np.array([[[[5]]]]).astype(np.float32)
+        DY = np.ones((1, 1, 1, 1), dtype=np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        dy = tensor.from_numpy(DY)
+        dy.to_device(dev)
+
+        result = autograd.globalaveragepool(x, 'channel_last')
+        dx = result.creator.backward(dy.data)
+
+        DX = np.ones(X.shape, dtype=np.float32)
+        DX = np.multiply(DX, DY) / np.prod(X.shape[1:-1])
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             XT,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_globalaveragepool_cpu(self):
+        self.globalaveragepool_channel_first(cpu_dev)
+        self.globalaveragepool_channel_last(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_globalaveragepool_gpu(self):
+        self.globalaveragepool_channel_first(gpu_dev)
+        self.globalaveragepool_channel_last(gpu_dev)
+
+    def constantOfShape_test(self, dev):
+        # float_ones
+        X = np.array([4, 3, 2]).astype(np.int64)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        y = np.ones(X, dtype=np.float32)
+        result = autograd.constant_of_shape(x, 1.0)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        # int32_zeros
+        X = np.array([10, 6]).astype(np.int64)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        y = np.ones(X, dtype=np.int32)
+        result = autograd.constant_of_shape(x, 1)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_constantOfShape_cpu(self):
+        self.constantOfShape_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_constantOfShape_gpu(self):
+        self.constantOfShape_test(gpu_dev)
+
+    def dropout_test(self, dev):
+        X = np.random.randn(3, 4, 5).astype(np.float32)
+        dy = np.random.randn(3, 4, 5).astype(np.float32)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(dy)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.dropout(x, 0.5)
+        dx = result.creator.backward(dy.data)
+        self.check_shape(result.shape, (3, 4, 5))
+        self.check_shape(dx.shape(), (3, 4, 5))
+
+    def test_dropout_cpu(self):
+        self.dropout_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_dropout_gpu(self):
+        self.dropout_test(gpu_dev)
+
+    def reduceSum_test(self, dev):
+        shape = [3, 2, 2]
+        cases = [(None, 1), ([1], 0), ([1], 1), ([-2], 1), ([1, 2], 1)]
+        for axes, keepdims in cases:
+            X = np.random.uniform(-10, 10, shape).astype(np.float32)
+            _axes = tuple(axes) if axes is not None else None
+            y = np.sum(X, axis=_axes, keepdims=keepdims == 1)
+            dy = np.random.randn(*y.shape).astype(np.float32)
+
+            x = tensor.from_numpy(X)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.reduce_sum(x, axes, keepdims)
+            dx = result.creator.backward(dy.data)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            self.check_shape(dx.shape(), tuple(shape))
+
+    def test_reduceSum_cpu(self):
+        self.reduceSum_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_reduceSum_gpu(self):
+        self.reduceSum_test(gpu_dev)
+
+    def reduceMean_test(self, dev):
+        shape = [3, 2, 2]
+        cases = [(None, 1), ([1], 0), ([1], 1), ([-2], 1), ([1, 2], 1)]
+        for axes, keepdims in cases:
+            X = np.random.uniform(-10, 10, shape).astype(np.float32)
+            _axes = tuple(axes) if axes is not None else None
+            y = np.mean(X, axis=_axes, keepdims=keepdims == 1)
+            dy = np.random.randn(*y.shape).astype(np.float32)
+
+            x = tensor.from_numpy(X)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.reduce_mean(x, axes, keepdims)
+            dx = result.creator.backward(dy.data)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            self.check_shape(dx.shape(), tuple(shape))
+
+    def test_reduceMean_cpu(self):
+        self.reduceMean_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_reduceMean_gpu(self):
+        self.reduceMean_test(gpu_dev)
+
+    def slice_test(self, dev):
+        X = np.random.randn(20, 10, 5).astype(np.float32)
+        indexes = np.array(range(20 * 10 * 5)).reshape(20, 10, 5)
+        configs = [
+            # starts, ends, axes, steps, y
+            [[0, 0], [3, 10], [0, 1], [1, 1], X[0:3, 0:10],
+             indexes[0:3, 0:10]],  # slice
+            [[0, 0, 3], [20, 10, 4], None, None, X[:, :, 3:4],
+             indexes[:, :, 3:4]],  # slice_default_axes
+            [[1], [1000], [1], [1], X[:, 1:1000],
+             indexes[:, 1:1000]],  # slice_end_out_of_bounds
+            [[0], [-1], [1], [1], X[:, 0:-1],
+             indexes[:, 0:-1]],  # slice_end_out_of_bounds
+            [[20, 10, 4], [0, 0, 1], [0, 1, 2], [-1, -3, -2],
+             X[20:0:-1, 10:0:-3, 4:1:-2], indexes[20:0:-1, 10:0:-3,
+                                                  4:1:-2]],  # slice_neg_steps
+            [[0, 0, 3], [20, 10, 4], [0, -2, -1], None, X[:, :, 3:4],
+             indexes[:, :, 3:4]],  # slice_negative_axes
+            # [[1000], [1000], [1], [1], X[:, 1000:1000], indexes[:, 1000:1000]], # slice_start_out_of_bounds # cannot support empty tensor
+        ]
+        for starts, ends, axes, steps, y, dx_idx in configs:
+            dy = np.ones(y.shape).astype(np.float32)
+
+            x = tensor.from_numpy(X)
+            dy = tensor.from_numpy(dy)
+            x.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.slice(x, starts, ends, axes, steps)
+            dx = result.creator.backward(dy.data)
+
+            dx_idx = tuple(dx_idx.flatten().tolist())
+            dX = np.array([
+                1. if i in dx_idx else 0. for i in range(20 * 10 * 5)
+            ]).reshape(X.shape)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx)),
+                                                 dX,
+                                                 decimal=5)
+
+    def test_slice_cpu(self):
+        self.slice_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_slice_gpu(self):
+        self.slice_test(gpu_dev)
+
+    def ceil_test(self, dev):
+        X = np.array([-1.5, 1.2]).astype(np.float32)
+        DY = np.ones((2), dtype=np.float32)
+        y = np.ceil(X)
+
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.ceil(x)
+        dx = result.creator.backward(dy.data)
+        DX = np.zeros((2), dtype=np.float32)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_ceil_cpu(self):
+        self.ceil_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_ceil_gpu(self):
+        self.ceil_test(gpu_dev)
+
+    def floor_test(self,dev):
+        X = np.array([-1.9,1.2]).astype(np.float32)
+        DY = np.ones((2),dtype=np.float32)
+        y = np.floor(X)
+        x = tensor.from_numpy(X)
+        dy = tensor.from_numpy(DY)
+        x.to_device(dev)
+        dy.to_device(dev)
+
+        result = autograd.floor(x)
+        dx = result.creator.backward(dy.data)
+        DX = np.zeros((2),dtype=np.float32)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),y,decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)),DX,decimal=5)
+    
+    def test_floor_cpu(self):
+        self.floor_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_floor_gpu(self):
+        self.floor_test(gpu_dev)    
+
+    def _test_scatter_elements(self, dev):
+        # testing witout axis
+        data = np.zeros((3, 3), dtype=np.float32)
+        indices = np.array([[1, 0, 2], [0, 2, 1]], dtype=np.int32)
+        updates = np.array([[1.0, 1.1, 1.2], [2.0, 2.1, 2.2]], dtype=np.float32)
+        output = np.array([[2.0, 1.1, 0.0], [1.0, 0.0, 2.2], [0.0, 2.1, 1.2]],
+                          dtype=np.float32)
+
+        data = tensor.from_numpy(data)
+        indices = tensor.from_numpy(indices)
+        updates = tensor.from_numpy(updates)
+        data.to_device(dev)
+        indices.to_device(dev)
+        updates.to_device(dev)
+
+        result = autograd.scatter_elements(data, indices, updates)
+        dy = tensor.from_numpy(np.ones(data.shape, dtype=np.float32))
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_almost_equal(tensor.to_numpy(result),
+                                       output,
+                                       decimal=5)
+        self.check_shape(dx.shape(), data.shape)
+
+        # testing with axis
+        data = np.array([[1.0, 2.0, 3.0, 4.0, 5.0]], dtype=np.float32)
+        indices = np.array([[1, 3]], dtype=np.int32)
+        updates = np.array([[1.1, 2.1]], dtype=np.float32)
+        output = np.array([[1.0, 1.1, 3.0, 2.1, 5.0]], dtype=np.float32)
+
+        data = tensor.from_numpy(data)
+        indices = tensor.from_numpy(indices)
+        updates = tensor.from_numpy(updates)
+        data.to_device(dev)
+        indices.to_device(dev)
+        updates.to_device(dev)
+
+        result = autograd.scatter_elements(data, indices, updates, axis=1)
+        dy = tensor.from_numpy(np.ones(data.shape, dtype=np.float32))
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_almost_equal(tensor.to_numpy(result),
+                                       output,
+                                       decimal=5)
+        self.check_shape(dx.shape(), data.shape)
+
+        # testing with negative indices:
+        data = np.array([[1.0, 2.0, 3.0, 4.0, 5.0]], dtype=np.float32)
+        indices = np.array([[1, -3]], dtype=np.int64)
+        updates = np.array([[1.1, 2.1]], dtype=np.float32)
+        output = np.array([[1.0, 1.1, 2.1, 4.0, 5.0]], dtype=np.float32)
+
+        data = tensor.from_numpy(data)
+        indices = tensor.from_numpy(indices)
+        updates = tensor.from_numpy(updates)
+        data.to_device(dev)
+        indices.to_device(dev)
+        updates.to_device(dev)
+
+        result = autograd.scatter_elements(data, indices, updates, axis=1)
+        dy = tensor.from_numpy(np.ones(data.shape, dtype=np.float32))
+        dx = result.creator.backward(dy.data)
+        np.testing.assert_almost_equal(tensor.to_numpy(result),
+                                       output,
+                                       decimal=5)
+        self.check_shape(dx.shape(), data.shape)
+
+    def test_cpu_scatter_elements(self):
+        self._test_scatter_elements(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gpu_scatter_elements(self):
+        self._test_scatter_elements(gpu_dev)
+
+    def split_test(self, dev):
+        X = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float32)
+        DY1 = np.ones((2), dtype=np.float32)
+        DY2 = np.ones((4), dtype=np.float32)
+        y = [
+            np.array([1., 2.]).astype(np.float32),
+            np.array([3., 4., 5., 6.]).astype(np.float32)
+        ]
+
+        x = tensor.from_numpy(X)
+        dy1 = tensor.from_numpy(DY1)
+        dy2 = tensor.from_numpy(DY2)
+        x.to_device(dev)
+        dy1.to_device(dev)
+        dy2.to_device(dev)
+
+        result = autograd.split(x, 0, (2, 4))
+        dx = result[0].creator.backward(dy1.data, dy2.data)
+        DX = np.ones((6), dtype=np.float32)
+
+        for idx, _r in enumerate(result):
+            np.testing.assert_array_almost_equal(tensor.to_numpy(_r),
+                                                 y[idx],
+                                                 decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(
+            tensor.from_raw_tensor(dx)),
+                                             DX,
+                                             decimal=5)
+
+    def test_split_cpu(self):
+        self.split_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_split_gpu(self):
+        self.split_test(gpu_dev)
+
+    def gather_test(self, dev):
+        config = [([0, 1, 3], 0), ([0, 1, 3], 1), ([[0, 1], [1, 2], [2, 3]], 1),
+                  ([0, -1, -2], 0)]  # (indices, axis)
+        for indices, _axis in config:
+            X = np.random.randn(5, 4, 3, 2).astype(np.float32)
+            y = np.take(X, indices, axis=_axis)
+            DY = np.ones(y.shape, dtype=np.float32)
+
+            x = tensor.from_numpy(X)
+            dy = tensor.from_numpy(DY)
+            x.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.gather(x, _axis, indices)
+            dx = result.creator.backward(dy.data)
+
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            self.check_shape(dx.shape(), tuple(X.shape))
+
+    def test_gather_cpu(self):
+        self.gather_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_gather_gpu(self):
+        self.gather_test(gpu_dev)
+
+    def tile_test(self, dev):
+        config_repeats = [
+            2,
+            [2, 2],
+            [2, 1, 2],
+        ]
+        for repeats in config_repeats:
+            X = np.array([0, 1, 2]).astype(np.float32)
+            y = np.tile(X, repeats)
+            DY = np.copy(y)
+
+            x = tensor.from_numpy(X)
+            dy = tensor.from_numpy(DY)
+            x.to_device(dev)
+            dy.to_device(dev)
+
+            result = autograd.tile(x, repeats)
+            dx = result.creator.backward(dy.data)
+            DX = np.multiply(X, np.prod(repeats))
+            np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                                 y,
+                                                 decimal=5)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(
+                tensor.from_raw_tensor(dx)),
+                                                 DX,
+                                                 decimal=5)
+
+    def test_tile_cpu(self):
+        self.tile_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_tile_gpu(self):
+        self.tile_test(gpu_dev)
+
+    def noneZero_test(self, dev):
+        X = np.array([[1, 0], [1, 1]]).astype(np.float32)
+        y = np.array((np.nonzero(X)))
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        result = autograd.nonzero(x)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_noneZero_cpu(self):
+        self.noneZero_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_noneZero_gpu(self):
+        self.noneZero_test(gpu_dev)
+
+    def cast_test(self, dev):
+        config = [
+            (np.float32, np.int32, tensor.int32),
+            (np.int32, np.float32, tensor.float32),
+        ]
+        for t1, t2, t3 in config:
+            X = np.array([[1, 0], [1, 1]]).astype(t1)
+            y = np.array([[1, 0], [1, 1]]).astype(t2)
+
+            x = tensor.from_numpy(X)
+            x.to_device(dev)
+
+            result = autograd.cast(x, t3)
+            result_np = tensor.to_numpy(result)
+            assert result_np.dtype == y.dtype, "type %s != %s." % (
+                result_np.dtype, y.dtype)
+            np.testing.assert_array_almost_equal(result_np, y, decimal=5)
+
+    def test_cast_cpu(self):
+        self.cast_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_cast_gpu(self):
+        self.cast_test(gpu_dev)
+
+    def onehot_test(self, dev):
+
+        def one_hot(indices, depth, axis=-1, dtype=np.float32):  # type: ignore
+            ''' Compute one hot from indices at a specific axis '''
+            values = np.asarray(indices)
+            rank = len(values.shape)
+            depth_range = np.arange(depth)
+            if axis < 0:
+                axis += (rank + 1)
+            ls = values.shape[0:axis]
+            rs = values.shape[axis:rank]
+            targets = np.reshape(depth_range, (1,) * len(ls) +
+                                 depth_range.shape + (1,) * len(rs))
+            values = np.reshape(np.mod(values, depth), ls + (1,) + rs)
+            return np.asarray(targets == values, dtype=dtype)
+
+        axisValue = 1
+        on_value = 3
+        off_value = 1
+        output_type = np.float32
+        indices = np.array([[1, 9], [2, 4]], dtype=np.float32)
+        depth = np.array([10], dtype=np.float32)
+        values = np.array([off_value, on_value], dtype=output_type)
+        y = one_hot(indices, depth, axis=axisValue, dtype=output_type)
+        y = y * (on_value - off_value) + off_value
+
+        x = tensor.from_numpy(indices)
+        x.to_device(dev)
+
+        result = autograd.onehot(axisValue, x, depth, values)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(result),
+                                             y,
+                                             decimal=5)
+
+    def test_onehot_cpu(self):
+        self.onehot_test(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_onehot_gpu(self):
+        self.onehot_test(gpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_cudnn_rnn_operation(self, dev=gpu_dev):
+        # init params, inputs
+        hidden_size = 7
+        seq_length = 5
+        batch_size = 6
+        feature_size = 3
+        directions = 2
+        num_layers = 2
+
+        for mode in [0, 1, 2, 3]:  # 0-relu, 1-tanh, 2-lstm, 3-gru
+            x = tensor.Tensor(shape=(seq_length, batch_size, feature_size),
+                              device=dev).gaussian(0, 1)
+            hx = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                      hidden_size),
+                               device=dev).gaussian(0, 1)
+            cx = tensor.Tensor(shape=(num_layers * directions, batch_size,
+                                      hidden_size),
+                               device=dev).gaussian(0, 1)
+            dy = tensor.Tensor(shape=(seq_length, batch_size,
+                                      directions * hidden_size),
+                               device=dev).gaussian(0, 1)
+
+            # init cudnn rnn op
+            rnn_handle = singa.CudnnRNNHandle(x.data,
+                                              hidden_size,
+                                              mode,
+                                              num_layers=num_layers,
+                                              dropout=0.1,
+                                              bidirectional=1)
+
+            w = tensor.Tensor(shape=(rnn_handle.weights_size,),
+                              device=dev).gaussian(0, 1)
+
+            # return sequence, y shape = {seq, bs, hidden}
+            # init operator/operation
+            _rnn = autograd._RNN(rnn_handle, return_sequences=True)
+
+            # forward
+            y = _rnn(x, hx, cx, w)[0]
+            assert y.shape == dy.shape
+            # print(ys)
+
+            # backward
+            dx, dhx, dcx, dw = _rnn.backward(dy.data)
+
+            # return no sequence, y shape = {bs, hidden}
+            _rnn = autograd._RNN(rnn_handle, return_sequences=False)
+            dy = tensor.Tensor(shape=(batch_size, directions * hidden_size),
+                               device=dev).gaussian(0, 1)
+            y = _rnn(x, hx, cx, w)[0]
+
+            assert y.shape == dy.shape
+            # backward
+            dx, dhx, dcx, dw = _rnn.backward(dy.data)
+
+    def cossim_helper(self, dev):
+        A = np.random.randn(*[3, 10]).astype(np.float32)
+        B = np.random.randn(*[3, 10]).astype(np.float32)
+
+        a = tensor.from_numpy(A)
+        a.to_device(dev)
+        b = tensor.from_numpy(B)
+        b.to_device(dev)
+
+        DY = np.random.randn(3).astype(np.float32)
+        dy = tensor.from_numpy(DY)
+        dy.to_device(dev)
+
+        y = autograd.cossim(a, b)
+        da, db = y.creator.backward(dy.data)  # CTensor
+
+        self.check_shape(y.shape, (3,))
+        self.check_shape(da.shape(), (3, 10))
+        self.check_shape(db.shape(), (3, 10))
+
+    def test_cossim_cpu(self):
+        self.cossim_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_cossim_gpu(self):
+        self.cossim_helper(gpu_dev)
+
+    def expand_helper(self, dev):
+        shape = [3, 1]
+        X = np.reshape(np.arange(1, np.prod(shape) + 1, dtype=np.float32),
+                       shape)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        # dim_changed
+        new_shape = [2, 1, 6]
+        y_t = X * np.ones(new_shape, dtype=np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+        y = autograd.expand(x, new_shape)
+        dx = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        self.check_shape(dx.shape(), tuple(shape))
+
+        # dim_unchanged
+        new_shape_2 = [3, 4]
+        y_t2 = np.tile(X, 4)
+        dy2 = tensor.from_numpy(y_t2)
+        dy2.to_device(dev)
+        y2 = autograd.expand(x, new_shape_2)
+        dx2 = y2.creator.backward(dy2.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y2), y_t2)
+        self.check_shape(dx2.shape(), tuple(shape))
+
+    def test_expand_cpu(self):
+        self.expand_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_expand_gpu(self):
+        self.expand_helper(gpu_dev)
+
+    def pad_helper(self, dev):
+        X = np.array([
+            [1.0, 1.2],
+            [2.3, 3.4],
+            [4.5, 5.7],
+        ]).astype(np.float32)
+        Y1 = np.array([
+            [0.0, 0.0, 1.0, 1.2],
+            [0.0, 0.0, 2.3, 3.4],
+            [0.0, 0.0, 4.5, 5.7],
+        ],).astype(np.float32)
+        Y2 = np.array([
+            [1.0, 1.2, 1.0, 1.2],
+            [2.3, 3.4, 2.3, 3.4],
+            [4.5, 5.7, 4.5, 5.7],
+        ],).astype(np.float32)
+        Y3 = np.array([
+            [1.0, 1.0, 1.0, 1.2],
+            [2.3, 2.3, 2.3, 3.4],
+            [4.5, 4.5, 4.5, 5.7],
+        ],).astype(np.float32)
+
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+        pads = [0, 2, 0, 0]
+
+        DY = np.random.randn(3, 4).astype(np.float32)
+        dy = tensor.from_numpy(DY)
+        dy.to_device(dev)
+
+        y1 = autograd.pad(x, "constant", pads)
+        y2 = autograd.pad(x, "reflect", pads)
+        y3 = autograd.pad(x, "edge", pads)
+        dx1 = y1.creator.backward(dy.data)
+        dx2 = y2.creator.backward(dy.data)
+        dx3 = y3.creator.backward(dy.data)
+        pad_width = []
+        half_width = len(pads) // 2
+        for i in range(half_width):
+            pad_width += [[pads[i], pads[i + half_width]]]
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y1),
+                                             np.pad(
+                                                 X,
+                                                 pad_width=pad_width,
+                                                 mode="constant",
+                                                 constant_values=0.,
+                                             ),
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y2),
+                                             np.pad(
+                                                 X,
+                                                 pad_width=pad_width,
+                                                 mode="reflect",
+                                             ),
+                                             decimal=5)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y3),
+                                             np.pad(
+                                                 X,
+                                                 pad_width=pad_width,
+                                                 mode="edge",
+                                             ),
+                                             decimal=5)
+        self.check_shape(dx1.shape(), (3, 2))
+        self.check_shape(dx2.shape(), (3, 2))
+        self.check_shape(dx3.shape(), (3, 2))
+
+    def test_pad_cpu(self):
+        self.pad_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_pad_gpu(self):
+        self.pad_helper(gpu_dev)
+
+    def upsample_helper(self, dev):
+        X = np.array([[[
+            [1, 2],
+            [3, 4],
+        ]]], dtype=np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        scales = np.array([1.0, 1.0, 2.0, 3.0], dtype=np.float32)
+        y_t = np.array([[[
+            [1, 1, 1, 2, 2, 2],
+            [1, 1, 1, 2, 2, 2],
+            [3, 3, 3, 4, 4, 4],
+            [3, 3, 3, 4, 4, 4],
+        ]]],
+                       dtype=np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+
+        y = autograd.upsample(x, "nearest", scales)
+        dx = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        self.check_shape(dx.shape(), tuple(X.shape))
+
+    def test_upsample_cpu(self):
+        self.upsample_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_upsample_gpu(self):
+        self.upsample_helper(gpu_dev)
+
+    def depth_space_helper(self, dev):
+        # (1, 8, 2, 3) input tensor
+        X = np.array(
+            [[[[0., 1., 2.], [3., 4., 5.]], [[9., 10., 11.], [12., 13., 14.]],
+              [[18., 19., 20.], [21., 22., 23.]],
+              [[27., 28., 29.], [30., 31., 32.]],
+              [[36., 37., 38.], [39., 40., 41.]],
+              [[45., 46., 47.], [48., 49., 50.]],
+              [[54., 55., 56.], [57., 58., 59.]],
+              [[63., 64., 65.], [66., 67., 68.]]]],
+            dtype=np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        # (1, 2, 4, 6) output tensor
+        y_t = np.array(
+            [[[[0., 18., 1., 19., 2., 20.], [36., 54., 37., 55., 38., 56.],
+               [3., 21., 4., 22., 5., 23.], [39., 57., 40., 58., 41., 59.]],
+              [[9., 27., 10., 28., 11., 29.], [45., 63., 46., 64., 47., 65.],
+               [12., 30., 13., 31., 14., 32.], [48., 66., 49., 67., 50., 68.]]]
+            ],
+            dtype=np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+        y = autograd.depth_to_space(x, 2, "DCR")
+        dx = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)), X)
+
+        y = autograd.space_to_depth(dy, 2, "DCR")
+        dx = y.creator.backward(x.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), X)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)), y_t)
+
+        y_t = np.array(
+            [[[[0., 9., 1., 10., 2., 11.], [18., 27., 19., 28., 20., 29.],
+               [3., 12., 4., 13., 5., 14.], [21., 30., 22., 31., 23., 32.]],
+              [[36., 45., 37., 46., 38., 47.], [54., 63., 55., 64., 56., 65.],
+               [39., 48., 40., 49., 41., 50.], [57., 66., 58., 67., 59., 68.]]]
+            ],
+            dtype=np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+        y = autograd.depth_to_space(x, 2, "CRD")
+        dx = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)), X)
+
+        y = autograd.space_to_depth(dy, 2, "CRD")
+        dx = y.creator.backward(x.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), X)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)), y_t)
+
+    def test_depth_space_cpu(self):
+        self.depth_space_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_depth_space_gpu(self):
+        self.depth_space_helper(gpu_dev)
+
+    def test_invalid_inputs(self, dev=cpu_dev):
+        _1d = tensor.Tensor((10,), dev)
+        _2d = tensor.Tensor((10, 10), dev)
+        _3d = tensor.Tensor((10, 10, 10), dev)
+        self.assertRaises(AssertionError, autograd.softmax_cross_entropy, _2d,
+                          _3d)
+        self.assertRaises(AssertionError, autograd.mse_loss, _2d, _3d)
+        self.assertRaises(AssertionError, autograd.add_bias, _2d, _1d, 3)
+        self.assertRaises(AssertionError, autograd.ranking_loss, _2d, _1d)
+
+    def where_helper(self, dev):
+        X = np.array([[1, 2], [3, 4]], dtype=np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        X2 = np.array([[9, 8], [7, 6]], dtype=np.float32)
+        x2 = tensor.from_numpy(X2)
+        x2.to_device(dev)
+
+        condition = [[True, False], [True, True]]
+        y_t = np.where(condition, X, X2)
+        dx1_t = np.array([[1, 0], [3, 4]], dtype=np.float32)
+        dx2_t = np.array([[0, 8], [0, 0]], dtype=np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+
+        y = autograd.where(x, x2, condition)
+        dx1, dx2 = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(tensor.from_raw_tensor(dx1)), dx1_t)
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(tensor.from_raw_tensor(dx2)), dx2_t)
+
+    def test_where_cpu(self):
+        self.where_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_where_gpu(self):
+        self.where_helper(gpu_dev)
+
+    def rounde_helper(self, dev):
+        X = np.array([
+            0.1, 0.5, 0.9, 1.2, 1.5, 1.8, 2.3, 2.5, 2.7, -1.1, -1.5, -1.9, -2.2,
+            -2.5, -2.8
+        ]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        y_t = np.array(
+            [0., 0., 1., 1., 2., 2., 2., 2., 3., -1., -2., -2., -2., -2.,
+             -3.]).astype(np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+
+        y = autograd.rounde(x)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+
+    def test_rounde_cpu(self):
+        self.rounde_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_rounde_gpu(self):
+        self.rounde_helper(gpu_dev)
+
+    def round_helper(self, dev):
+        X = np.array([
+            0.1, 0.5, 0.9, 1.2, 1.5, 1.8, 2.3, 2.5, 2.7, -1.1, -1.5, -1.9, -2.2,
+            -2.5, -2.8
+        ]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        y_t = np.array(
+            [0., 1., 1., 1., 2., 2., 2., 3., 3., -1., -2., -2., -2., -3.,
+             -3.]).astype(np.float32)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+
+        y = autograd.round(x)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+
+    def test_round_cpu(self):
+        self.round_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_round_gpu(self):
+        self.round_helper(gpu_dev)
+
+    def embedding_helper(self, dev):
+        embedding = layer.Embedding(10, 3)
+
+        X = np.array([[0, 1, 2, 3], [9, 8, 7, 6]])
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        dy = tensor.Tensor(shape=(2, 4, 3), device=dev)
+        dy.gaussian(0.0, 1.0)
+
+        y = embedding(x)  # PyTensor
+        dx, dW = y.creator.backward(dy.data)  # CTensor
+
+        self.check_shape(y.shape, (2, 4, 3))
+        self.check_shape(dx.shape(), (2, 4))
+        self.check_shape(dW.shape(), (10, 3))
+
+    def test_embedding_cpu(self):
+        self.embedding_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_embedding_gpu(self):
+        self.embedding_helper(gpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def _cossim_value(self, dev=gpu_dev):
+        # numpy val
+        np.random.seed(0)
+        bs = 1000
+        vec_s = 1200
+        a = np.random.random((bs, vec_s)).astype(np.float32)
+        b = np.random.random((bs, vec_s)).astype(np.float32)
+        dy = np.random.random((bs,)).astype(np.float32)
+
+        # singa tensor
+        ta = tensor.from_numpy(a)
+        tb = tensor.from_numpy(b)
+        tdy = tensor.from_numpy(dy)
+        ta.to_device(dev)
+        tb.to_device(dev)
+        tdy.to_device(dev)
+
+        # singa forward and backward
+        ty = autograd.cossim(ta, tb)
+        tda, tdb = ty.creator.backward(tdy.data)
+
+        np_forward = list()
+        for i in range(len(a)):
+            a_norm = np.linalg.norm(a[i])
+            b_norm = np.linalg.norm(b[i])
+            ab_dot = np.dot(a[i], b[i])
+            out = ab_dot / (a_norm * b_norm)
+            np_forward.append(out)
+
+        np_backward_a = list()
+        np_backward_b = list()
+        for i in range(len(a)):
+            a_norm = np.linalg.norm(a[i])
+            b_norm = np.linalg.norm(b[i])
+            da = dy[i] * (b[i] / (a_norm * b_norm) - (np_forward[i] * a[i]) /
+                          (a_norm * a_norm))
+            db = dy[i] * (a[i] / (a_norm * b_norm) - (np_forward[i] * b[i]) /
+                          (b_norm * b_norm))
+            np_backward_a.append(da)
+            np_backward_b.append(db)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(ty),
+                                             np.array(np_forward))
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(tensor.from_raw_tensor(tda)), np_backward_a)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_cossim_value_gpu(self):
+        self._cossim_value(gpu_dev)
+
+    def test_cossim_value_cpu(self):
+        self._cossim_value(cpu_dev)
+
+    def test_mse_loss_value(self, dev=cpu_dev):
+        y = np.random.random((1000, 1200)).astype(np.float32)
+        tar = np.random.random((1000, 1200)).astype(np.float32)
+        # get singa value
+        sy = tensor.from_numpy(y, dev)
+        starget = tensor.from_numpy(tar, dev)
+        sloss = autograd.mse_loss(sy, starget)
+        sgrad = sloss.creator.backward()
+        # get np value result
+        np_loss = np.mean(np.square(tar - y))
+        np_grad = -2 * (tar - y) / np.prod(tar.shape)
+        # value check
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(tensor.from_raw_tensor(sgrad)), np_grad)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(sloss), np_loss)
+
+    def erf_helper(self, dev):
+        X = np.array([
+            0.1, 0.5, 0.9, 1.2, 1.5, 1.8, 2.3, 2.5, 2.7, -1.1, -1.5, -1.9, -2.2,
+            -2.5, -2.8
+        ]).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        import math
+
+        y_t = np.vectorize(math.erf)(X)
+        dy = tensor.from_numpy(y_t)
+        dy.to_device(dev)
+        dx_t = 2. / np.pi**0.5 * np.exp(-np.power(y_t, 2))
+
+        y = autograd.erf(x)
+        dx = y.creator.backward(dy.data)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t)
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(tensor.from_raw_tensor(dx)), dx_t)
+
+    def test_erf_cpu(self):
+        self.erf_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
+    def test_erf_gpu(self):
+        self.erf_helper(gpu_dev)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_opt.py b/test/python/test_opt.py
new file mode 100644
index 0000000..8027d3a
--- /dev/null
+++ b/test/python/test_opt.py
@@ -0,0 +1,230 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# =============================================================================
+from __future__ import division
+
+import math
+import unittest
+import numpy as np
+import functools
+
+
+from singa import tensor
+from singa import singa_wrap as singa
+from singa import opt
+
+from cuda_helper import gpu_dev, cpu_dev
+
+def assertTensorEqual(x,y,decimal=6):
+    assert x.shape == y.shape
+    assert x.dtype == y.dtype
+    assert x.device.id() == y.device.id()
+    d = x.device
+    x.to_host()
+    y.to_host()
+    np.testing.assert_array_almost_equal(
+        x.data.GetFloatValue(int(x.size())),
+        y.data.GetFloatValue(int(y.size())),
+                                    decimal)
+    x.to_device(d)
+    y.to_device(d)
+
+def on_cpu_gpu(func):
+    @functools.wraps(func)
+    def wrapper_decorator(*args, **kwargs):
+        func(*args, dev=cpu_dev, **kwargs)
+        if singa.USE_CUDA:
+            func(*args, dev=gpu_dev, **kwargs)
+    return wrapper_decorator
+
+class TestDecayScheduler(unittest.TestCase):
+    def test_exponential_decay_cpu(self):
+        lr = opt.ExponentialDecay(0.1, 2, 0.5, True)
+        sgd1 = opt.SGD(lr=lr)
+        for i in range(5):
+            np.testing.assert_array_almost_equal(tensor.to_numpy(sgd1.lr_value), [0.1*0.5**(i//2)])
+            sgd1.step()
+
+    def test_exponential_decay_no_staircase_cpu(self):
+        lr = opt.ExponentialDecay(0.1, 2, 0.5, False)
+        sgd1 = opt.SGD(lr=lr)
+        for i in range(5):
+            np.testing.assert_array_almost_equal(tensor.to_numpy(sgd1.lr_value), [0.1*0.5**(i/2)])
+            sgd1.step()
+
+    @on_cpu_gpu
+    def test_const_decay_scheduler(self, dev):
+        c1 = opt.Constant(0.2)
+        step = tensor.Tensor((1,), device=dev).set_value(0)
+        lr_val = c1(step)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(c1(step)) , [0.2])
+        step+=1
+        np.testing.assert_array_almost_equal(tensor.to_numpy(c1(step)) , [0.2])
+
+class TestOptimizer(unittest.TestCase):
+    @on_cpu_gpu
+    def test_optimizer(self, dev):
+        o1 = opt.Optimizer(0.1)
+
+        # test step
+        o1.step()
+        o1.step()
+
+        # test get states
+        s1 = o1.get_states()
+        self.assertAlmostEqual(s1['step_counter'], 2)
+
+        # test set states
+        s2 = {'step_counter': 5}
+        o1.set_states(s2)
+        np.testing.assert_array_almost_equal( tensor.to_numpy(o1.step_counter), [5])
+
+    @on_cpu_gpu
+    def test_sgd_const_lr(self, dev=cpu_dev):
+        cpu_dev.EnableGraph(False)
+        sgd1 = opt.SGD(lr=0.1)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+
+        w_step1 = w-0.1*g
+        sgd1.apply(w.name, w, g)
+
+        assertTensorEqual(w, w_step1)
+
+    @on_cpu_gpu
+    def test_RMSProp_const_lr(self, dev=cpu_dev):
+        cpu_dev.EnableGraph(False)
+        opt1 = opt.RMSProp(lr=0.1)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+
+        # running_average = running_average * rho + param_grad * param_grad * (1 - rho)
+        # param_value = param_value - lr * param_grad / sqrt(running_average + epsilon)
+
+        running_average = 0.1 * tensor.square(g)
+        tmp = running_average + 1e-8
+        tmp = tensor.sqrt(tmp)
+        tmp = g / tmp
+
+        w_step1 = w - 0.1 * tmp
+        opt1.apply(w.name, w, g)
+
+        assertTensorEqual(w, w_step1)
+
+    @on_cpu_gpu
+    def test_AdaGrad_const_lr(self, dev=cpu_dev):
+        cpu_dev.EnableGraph(False)
+        opt1 = opt.AdaGrad(lr=0.1)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+
+        # history = history + param_grad * param_grad
+        # param_value = param_value - lr * param_grad / sqrt(history + epsilon)
+
+        history = tensor.square(g)
+        tmp = history + 1e-8
+        tmp = tensor.sqrt(tmp)
+        tmp = g / tmp
+
+        w_step1 = w - 0.1 * tmp
+        opt1.apply(w.name, w, g)
+
+        assertTensorEqual(w, w_step1)
+
+    @on_cpu_gpu
+    def test_Adam_const_lr(self, dev=cpu_dev):
+        cpu_dev.EnableGraph(False)
+        opt1 = opt.Adam(lr=0.1)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(1.0)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+
+        # m := beta_1 * m + (1 - beta_1) * grad 
+        # v := beta_2 * v + (1 - beta_2) * grad * grad
+        # m_norm = m / (1 - beta_1 ^ step) 
+        # v_norm = v / (1 - beta_2 ^ step) 
+        # param := param - (lr * m_norm) / ( sqrt(v_norm) + epsilon) )
+
+        m = 0.1 * g
+        tmp = tensor.square(g)
+        v = 0.001 * tmp
+
+        m_norm = m / 0.1
+        v_norm = v / 0.001
+
+        tmp = tensor.sqrt(v_norm) + 1e-8
+        tmp = m_norm / tmp      
+
+        w_step1 = w - 0.1 * tmp
+        opt1.apply(w.name, w, g)
+
+        assertTensorEqual(w, w_step1, decimal=5)
+
+    @on_cpu_gpu
+    def test_sgd_const_lr_momentum(self, dev=cpu_dev):
+        sgd1 = opt.SGD(lr=0.1,momentum=0.9)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.01)
+
+        w_step1 = w-0.1*g
+        buf = g
+
+        sgd1.apply(w.name, w, g)
+        sgd1.step()
+
+        assertTensorEqual(w,w_step1)
+
+        buf = g + buf*0.9
+        w_step2 = w-0.1*buf
+
+        sgd1.apply(w.name, w, g)
+
+        assertTensorEqual(w, w_step2)
+
+    @on_cpu_gpu
+    def test_sgd_const_lr_momentum_weight_decay(self, dev=cpu_dev):
+        sgd1 = opt.SGD(lr=0.1, weight_decay=0.2)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.01)
+
+        w_step1 = w-0.1*(g+0.2*w)
+
+        sgd1.apply(w.name, w, g)
+
+        assertTensorEqual(w,w_step1)
+
+    # @on_cpu_gpu
+    def test_sgd_const_lr_momentum_nesterov(self, dev=cpu_dev):
+        sgd1 = opt.SGD(lr=0.1, momentum=0.9, nesterov=True)
+        w_shape=(2,3)
+        w = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+        g = tensor.Tensor(w_shape, device=dev).set_value(0.1)
+
+        buf = g
+        w_step1 = w-0.1*(g+0.9*buf)
+
+        sgd1.apply(w.name, w, g)
+
+        assertTensorEqual(w,w_step1)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/python/test_optimizer.py b/test/python/test_optimizer.py
deleted file mode 100644
index cfd13c0..0000000
--- a/test/python/test_optimizer.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# =============================================================================
-import unittest
-import math
-import numpy as np
-
-
-import singa.tensor as tensor
-import singa.optimizer as opt
-import singa.device as device
-from singa import singa_wrap
-
-if singa_wrap.USE_CUDA:
-    cuda = device.create_cuda_gpu()
-
-
-def np_adam(plist, glist, mlist, vlist, lr, t, b1=0.9, b2=0.999):
-    for p, g, m, v in zip(plist, glist, mlist, vlist):
-        m *=b1
-        m += (1-b1) * g
-        v *= b2
-        v += (1-b2) * g * g
-        alpha = lr * math.sqrt(1. - math.pow(b2, t)) / (1. - math.pow(b1, t))
-        p -= alpha * m / (np.sqrt(v) + 1e-8)
-
-
-class TestOptimizer(unittest.TestCase):
-
-    def setUp(self):
-        self.np_W = np.array([0.1, 0.2, 0.3, 0.4], dtype=np.float32)
-        self.W = tensor.from_numpy(self.np_W)
-        self.np_g = np.array([0.1, 0.3, 0.1, 0.2], dtype=np.float32)
-        self.g = tensor.from_numpy(self.np_g)
-
-    def to_cuda(self):
-        self.W.to_device(cuda)
-        self.g.to_device(cuda)
-
-    def test_sgd(self):
-        lr = 0.1
-        sgd = opt.SGD(lr)
-        sgd.apply(0, self.g, self.W, 'w')
-        w = tensor.to_numpy(self.W)
-        for i in range(self.W.size()):
-            self.assertAlmostEqual(w[i], self.np_W[i] - lr * self.np_g[i])
-
-    def test_adam(self):
-        lr = 0.1
-        n, m = 4, 6
-        p1 = np.random.rand(n, m)
-        p2 = np.random.rand(n, m)
-        g1 = np.random.rand(n, m) * 0.01
-        g2 = np.random.rand(n, m) * 0.01
-        m1 = np.zeros((n, m))
-        m2 = np.zeros((n, m))
-        v1 = np.zeros((n, m))
-        v2 = np.zeros((n, m))
-        t1 = tensor.from_numpy(p1)
-        t2 = tensor.from_numpy(p2)
-        tg1 = tensor.from_numpy(g1)
-        tg2 = tensor.from_numpy(g2)
-
-        for t in range(1, 10):
-            np_adam([p1, p2], [g1, g2], [m1, m2], [v1, v2], lr, t)
-
-        adam = opt.Adam(lr=lr)
-        for t in range(1, 10):
-            adam.apply(0, tg1, t1, 'p1', t)
-            adam.apply(0, tg2, t2, 'p2', t)
-
-        t1 = tensor.to_numpy(t1)
-        t2 = tensor.to_numpy(t2)
-        for t, p in zip([t1, t2], [p1, p2]):
-            for i in range(n):
-                for j in range(m):
-                    self.assertAlmostEqual(t[i, j], p[i, j], 6)
-
-    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
-    def test_sgd_cuda(self):
-        lr = 0.1
-        sgd = opt.SGD(lr)
-        self.to_cuda()
-        sgd.apply(0, self.g, self.W, 'w')
-        self.W.to_host()
-        w = tensor.to_numpy(self.W)
-        for i in range(self.W.size()):
-            self.assertAlmostEqual(w[i], self.np_W[i] - lr * self.np_g[i])
-
-    def test_constraint(self):
-        threshold = 0.02
-        cons = opt.L2Constraint(threshold)
-        cons.apply(0, self.W, self.g)
-        g = tensor.to_numpy(self.g)
-        nrm = np.linalg.norm(self.np_g) / self.np_g.size
-        for i in range(g.size):
-            self.assertAlmostEqual(g[i], self.np_g[i] * threshold / nrm)
-
-    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
-    def test_constraint_cuda(self):
-        threshold = 0.02
-        self.to_cuda()
-        cons = opt.L2Constraint(threshold)
-        cons.apply(0, self.W, self.g)
-        self.g.to_host()
-        g = tensor.to_numpy(self.g)
-        nrm = np.linalg.norm(self.np_g) / self.np_g.size
-        for i in range(g.size):
-            self.assertAlmostEqual(g[i], self.np_g[i] * threshold / nrm)
-
-    def test_regularizer(self):
-        coefficient = 0.0001
-        reg = opt.L2Regularizer(coefficient)
-        reg.apply(0, self.W, self.g)
-        g = tensor.to_numpy(self.g)
-        for i in range(g.size):
-            self.assertAlmostEqual(g[i],
-                                   self.np_g[i] + coefficient * self.np_W[i])
-
-    @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
-    def test_regularizer_cuda(self):
-        coefficient = 0.0001
-        reg = opt.L2Regularizer(coefficient)
-        self.to_cuda()
-        reg.apply(0, self.W, self.g)
-        self.g.to_host()
-        g = tensor.to_numpy(self.g)
-        for i in range(g.size):
-            self.assertAlmostEqual(g[i],
-                                   self.np_g[i] + coefficient * self.np_W[i])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/python/test_tensor.py b/test/python/test_tensor.py
index 9cd2411..82d6d5c 100644
--- a/test/python/test_tensor.py
+++ b/test/python/test_tensor.py
@@ -15,14 +15,17 @@
 # specific language governing permissions and limitations
 # under the License.
 # =============================================================================
+from __future__ import division
 
 import math
 import unittest
 import numpy as np
 
-
 from singa import tensor
-from singa.proto import core_pb2
+from singa import singa_wrap as singa_api
+from singa import autograd
+
+from cuda_helper import gpu_dev, cpu_dev
 
 
 class TestTensorMethods(unittest.TestCase):
@@ -40,10 +43,10 @@
         self.assertTupleEqual(t.shape, shape)
         self.assertEqual(t.shape[0], shape[0])
         self.assertEqual(t.shape[1], shape[1])
-        self.assertEqual(tensor.product(shape), 2*3)
+        self.assertEqual(tensor.product(shape), 2 * 3)
         self.assertEqual(t.ndim(), 2)
-        self.assertEqual(t.size(), 2*3)
-        self.assertEqual(t.memsize(), 2*3*tensor.sizeof(core_pb2.kFloat32))
+        self.assertEqual(t.size(), 2 * 3)
+        self.assertEqual(t.memsize(), 2 * 3 * tensor.sizeof(tensor.float32))
         self.assertFalse(t.is_transpose())
 
     def test_unary_operators(self):
@@ -52,11 +55,12 @@
         t += 1.23
         self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], 1.23)
         t -= 0.23
-        self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], 1.23-0.23)
+        self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], 1.23 - 0.23)
         t *= 2.5
-        self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], (1.23-0.23)*2.5)
+        self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], (1.23 - 0.23) * 2.5)
         t /= 2
-        self.assertAlmostEqual(tensor.to_numpy(t)[0, 0], (1.23-0.23)*2.5/2)
+        self.assertAlmostEqual(
+            tensor.to_numpy(t)[0, 0], (1.23 - 0.23) * 2.5 / 2)
 
     def test_binary_operators(self):
         t = self.t
@@ -64,11 +68,11 @@
         s = self.s
         s += 2.1
         a = t + s
-        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2+2.1, 5)
+        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2 + 2.1, 5)
         a = t - s
-        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2-2.1, 5)
+        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2 - 2.1, 5)
         a = t * s
-        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2*2.1, 5)
+        self.assertAlmostEqual(tensor.to_numpy(a)[0, 0], 3.2 * 2.1, 5)
         ''' not implemented yet
         a = t / s
         self.assertAlmostEqual(tensor.to_numpy(a)[0,0], 3.2/2.1, 5)
@@ -85,6 +89,8 @@
         self.assertEqual(tensor.to_numpy(a)[0, 0], 0)
         a = t >= 3.45
         self.assertEqual(tensor.to_numpy(a)[0, 0], 1)
+        a = t == 3.45
+        self.assertEqual(tensor.to_numpy(a)[0, 0], 1)
         a = tensor.lt(t, 3.45)
         self.assertEqual(tensor.to_numpy(a)[0, 0], 0)
         a = tensor.le(t, 3.45)
@@ -93,6 +99,8 @@
         self.assertEqual(tensor.to_numpy(a)[0, 0], 0)
         a = tensor.ge(t, 3.45)
         self.assertEqual(tensor.to_numpy(a)[0, 0], 1)
+        a = tensor.eq(t, 3.45)
+        self.assertEqual(tensor.to_numpy(a)[0, 0], 1)
 
     def test_tensor_copy(self):
         t = tensor.Tensor((2, 3))
@@ -153,16 +161,456 @@
         y = 2 / x
         self.assertEqual(tensor.average(y), 2.)
 
+    def matmul_high_dim_helper(self, dev):
+        configs = [
+            [(1, 12, 7, 64), (1, 12, 64, 7)],
+            [(1, 7, 768), (768, 768)],
+        ]
+        print()
+        for config in configs:
+            X = np.random.random(config[0]).astype(np.float32)
+            x = tensor.from_numpy(X)
+            x.to_device(dev)
+
+            W = np.random.random(config[1]).astype(np.float32)
+            w = tensor.from_numpy(W)
+            w.to_device(dev)
+
+            y_t = np.matmul(X, W)
+            y = autograd.matmul(x, w)
+            np.testing.assert_array_almost_equal(tensor.to_numpy(y), y_t, 3)
+
+    def test_matmul_high_dim_cpu(self):
+        self.matmul_high_dim_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_matmul_high_dim_gpu(self):
+        self.matmul_high_dim_helper(gpu_dev)
+
+    def test_tensor_inplace_api(self):
+        """ tensor inplace methods alter internal state and also return self
+        """
+        x = tensor.Tensor((3,))
+        y = x.set_value(1)
+        self.assertTrue(y is x)
+
+        x = tensor.Tensor((3,))
+        y = x.uniform(1, 2)
+        self.assertTrue(y is x)
+
+        x = tensor.Tensor((3,))
+        y = x.bernoulli(1)
+        self.assertTrue(y is x)
+
+        x = tensor.Tensor((3,))
+        y = x.gaussian(1, 2)
+        self.assertTrue(y is x)
+
     def test_numpy_convert(self):
         a = np.asarray([[1, 0, 0], [0, 1, 0]], dtype=np.int)
         t = tensor.from_numpy(a)
         b = tensor.to_numpy(t)
-        self.assertEqual(np.sum(a-b), 0)
+        self.assertEqual(np.sum(a - b), 0)
 
         a = np.asarray([[1, 0, 0], [0, 1, 0]], dtype=np.float32)
         t = tensor.from_numpy(a)
         b = tensor.to_numpy(t)
-        self.assertEqual(np.sum(a-b), 0.)
+        self.assertEqual(np.sum(a - b), 0.)
+
+    def test_transpose(self):
+        a = np.array(
+            [1.1, 1.1, 1.1, 1.1, 1.4, 1.3, 1.1, 1.6, 1.1, 1.1, 1.1, 1.2])
+        a = np.reshape(a, (2, 3, 2))
+        ta = tensor.from_numpy(a)
+
+        A1 = np.transpose(a)
+        tA1 = tensor.transpose(ta)
+        TA1 = tensor.to_numpy(tA1)
+        A2 = np.transpose(a, [0, 2, 1])
+        tA2 = tensor.transpose(ta, [0, 2, 1])
+        TA2 = tensor.to_numpy(tA2)
+
+        np.testing.assert_array_almost_equal(TA1, A1)
+        np.testing.assert_array_almost_equal(TA2, A2)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_gpu_6d_transpose(self,dev=gpu_dev):
+        s0 = (2,3,4,5,6,7)
+        axes1=[5,4,3,2,1,0]
+        s1 = (2,7,6,5,4,3)
+        s2 = (2,4,3,5,7,6)
+        a = np.random.random(s1)
+
+        ta = tensor.from_numpy(a)
+        ta.to_device(dev)
+
+        ta = tensor.reshape(ta,s1)
+        ta = tensor.transpose(ta,axes1)
+        ta = tensor.reshape(ta,s2)
+
+        a = np.reshape(a,s1)
+        a = np.transpose(a,axes1)
+        a = np.reshape(a,s2)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(ta), a)
+
+    def test_einsum(self):
+
+        a = np.array(
+            [1.1, 1.1, 1.1, 1.1, 1.4, 1.3, 1.1, 1.6, 1.1, 1.1, 1.1, 1.2])
+        a = np.reshape(a, (2, 3, 2))
+        ta = tensor.from_numpy(a)
+
+        res1 = np.einsum('kij,kij->kij', a, a)
+        tres1 = tensor.einsum('kij,kij->kij', ta, ta)
+        Tres1 = tensor.to_numpy(tres1)
+        res2 = np.einsum('kij,kih->kjh', a, a)
+        tres2 = tensor.einsum('kij,kih->kjh', ta, ta)
+        Tres2 = tensor.to_numpy(tres2)
+
+        self.assertAlmostEqual(np.sum(Tres1 - res1), 0., places=3)
+        self.assertAlmostEqual(np.sum(Tres2 - res2), 0., places=3)
+
+    def test_repeat(self):
+
+        a = np.array(
+            [1.1, 1.1, 1.1, 1.1, 1.4, 1.3, 1.1, 1.6, 1.1, 1.1, 1.1, 1.2])
+        a = np.reshape(a, (2, 3, 2))
+        ta = tensor.from_numpy(a)
+
+        ta_repeat1 = tensor.repeat(ta, 2, axis=None)
+        a_repeat1 = np.repeat(a, 2, axis=None)
+        Ta_repeat1 = tensor.to_numpy(ta_repeat1)
+        ta_repeat2 = tensor.repeat(ta, 4, axis=1)
+        a_repeat2 = np.repeat(a, 4, axis=1)
+        Ta_repeat2 = tensor.to_numpy(ta_repeat2)
+
+        self.assertAlmostEqual(np.sum(Ta_repeat1 - a_repeat1), 0., places=3)
+        self.assertAlmostEqual(np.sum(Ta_repeat2 - a_repeat2), 0., places=3)
+
+    def test_sum(self):
+        a = np.array(
+            [1.1, 1.1, 1.1, 1.1, 1.4, 1.3, 1.1, 1.6, 1.1, 1.1, 1.1, 1.2])
+        a = np.reshape(a, (2, 3, 2))
+        ta = tensor.from_numpy(a)
+
+        a_sum0 = np.sum(a)
+        ta_sum0 = tensor.sum(ta)
+        Ta_sum0 = tensor.to_numpy(ta_sum0)
+        a_sum1 = np.sum(a, axis=1)
+        ta_sum1 = tensor.sum(ta, axis=1)
+        Ta_sum1 = tensor.to_numpy(ta_sum1)
+        a_sum2 = np.sum(a, axis=2)
+        ta_sum2 = tensor.sum(ta, axis=2)
+        Ta_sum2 = tensor.to_numpy(ta_sum2)
+
+        self.assertAlmostEqual(np.sum(a_sum0 - Ta_sum0), 0., places=3)
+        self.assertAlmostEqual(np.sum(a_sum1 - Ta_sum1), 0., places=3)
+        self.assertAlmostEqual(np.sum(a_sum2 - Ta_sum2), 0., places=3)
+
+    def test_tensordot(self):
+        a = np.array(
+            [1.1, 1.1, 1.1, 1.1, 1.4, 1.3, 1.1, 1.6, 1.1, 1.1, 1.1, 1.2])
+        a = np.reshape(a, (2, 3, 2))
+
+        ta = tensor.from_numpy(a)
+
+        res1 = np.tensordot(a, a, axes=1)
+        tres1 = tensor.tensordot(ta, ta, axes=1)
+        Tres1 = tensor.to_numpy(tres1)
+        self.assertAlmostEqual(np.sum(Tres1 - res1), 0., places=3)
+        np.testing.assert_array_almost_equal(Tres1, res1)
+
+        res2 = np.tensordot(a, a, axes=([0, 1], [2, 1]))
+        tres2 = tensor.tensordot(ta, ta, axes=([0, 1], [2, 1]))
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tres2), res2)
+
+    def test_reshape(self):
+        a = np.array([[[1.1, 1.1, 1.4], [1.1, 1.1, 1.1]],
+                      [[1.1, 1.1, 1.3], [1.6, 1.1, 1.2]]])
+        ta = tensor.from_numpy(a)
+        tb = tensor.reshape(ta, [2, 6])
+        self.assertAlmostEqual(tb.shape[0], 2., places=3)
+        self.assertAlmostEqual(tb.shape[1], 6., places=3)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(tb),
+                                             a.reshape((2, 6)))
+
+    def test_transpose_then_reshape(self):
+        a = np.array([[[1.1, 1.1], [1.1, 1.1], [1.4, 1.3]],
+                      [[1.1, 1.6], [1.1, 1.1], [1.1, 1.2]]])
+        TRANSPOSE_AXES = (2, 0, 1)
+        RESHAPE_DIMS = (2, 6)
+
+        ta = tensor.from_numpy(a)
+        ta = ta.transpose(TRANSPOSE_AXES)
+        ta = ta.reshape(RESHAPE_DIMS)
+
+        np.testing.assert_array_almost_equal(
+            tensor.to_numpy(ta),
+            np.reshape(a.transpose(TRANSPOSE_AXES), RESHAPE_DIMS))
+
+    def _concatenate_helper(self, dev):
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+        np2 = np.random.random([5, 6, 7, 1]).astype(np.float32)
+        np3 = np.concatenate((np1, np2), axis=3)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+        t2 = tensor.Tensor(device=dev, data=np2)
+
+        t3 = tensor.concatenate((t1, t2), 3)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(t3), np3)
+
+    def test_concatenate_cpu(self):
+        self._concatenate_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_concatenate_gpu(self):
+        self._concatenate_helper(gpu_dev)
+
+    def _subscription_helper(self, dev):
+        np1 = np.random.random((5, 5, 5, 5)).astype(np.float32)
+        sg_tensor = tensor.Tensor(device=dev, data=np1)
+        sg_tensor_ret = sg_tensor[1:3, :, 1:, :-1]
+        np.testing.assert_array_almost_equal((tensor.to_numpy(sg_tensor_ret)),
+                                             np1[1:3, :, 1:, :-1])
+
+    def test_subscription_cpu(self):
+        self._subscription_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_subscription_gpu(self):
+        self._subscription_helper(gpu_dev)
+
+    def _ceil_helper(self, dev):
+
+        np1 = np.random.random([5, 6, 7, 8]).astype(np.float32)
+        np1 = np1 * 10
+        np2 = np.ceil(np1)
+
+        t1 = tensor.Tensor(device=dev, data=np1)
+
+        t2 = tensor.ceil(t1)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(t2), np2)
+
+    def test_ceil_cpu(self):
+        self._ceil_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_ceil_gpu(self):
+        self._ceil_helper(gpu_dev)
+
+    def _astype_helper(self, dev):
+        shape1 = [2, 3]
+        shape2 = [3, 2]
+
+        np_flt = np.random.random(shape1).astype(np.float32)
+        np_flt = np_flt * 10 - 5
+
+        np_int = np_flt.astype(np.int32)
+        np_flt2 = np_int.astype(np.float32)
+
+        t2 = tensor.Tensor(device=dev, data=np_flt)
+        t2 = t2.as_type('int')
+        np.testing.assert_array_almost_equal(tensor.to_numpy(t2), np_int)
+
+        t1 = t2.reshape(shape2)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(t1),
+                                             np_int.reshape(shape2))
+
+        t1 = t1.as_type('float')
+        np.testing.assert_array_almost_equal(tensor.to_numpy(t1),
+                                             np_flt2.reshape(shape2))
+
+    def test_astype_cpu(self):
+        self._astype_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_astype_gpu(self):
+        self._astype_helper(gpu_dev)
+
+    def _3d_matmul_helper(self, dev):
+        np_x1 = np.random.randn(2, 3, 4).astype(np.float32)
+        np_x2 = np.random.randn(2, 4, 3).astype(np.float32)
+        x1 = tensor.from_numpy(np_x1)
+        x1.to_device(dev)
+        x2 = tensor.from_numpy(np_x2)
+        x2.to_device(dev)
+        y = autograd.matmul(x1, x2)
+        np_y = np.matmul(np_x1, np_x2)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), np_y)
+
+        np_x1 = np.random.randn(2, 3, 4).astype(np.float32)
+        np_x2 = np.random.randn(2, 4, 5).astype(np.float32)
+        x1 = tensor.from_numpy(np_x1)
+        x1.to_device(dev)
+        x2 = tensor.from_numpy(np_x2)
+        x2.to_device(dev)
+        y = autograd.matmul(x1, x2)
+        np_y = np.matmul(np_x1, np_x2)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), np_y)
+
+    def test_3d_matmul_cpu(self):
+        self._3d_matmul_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_3d_matmul_gpu(self):
+        self._3d_matmul_helper(gpu_dev)
+
+    def _4d_matmul_helper(self, dev):
+        np_x1 = np.random.randn(2, 12, 256, 64).astype(np.float32)
+        np_x2 = np.random.randn(2, 12, 64, 256).astype(np.float32)
+        x1 = tensor.from_numpy(np_x1)
+        x1.to_device(dev)
+        x2 = tensor.from_numpy(np_x2)
+        x2.to_device(dev)
+        y = autograd.matmul(x1, x2)
+        np_y = np.matmul(np_x1, np_x2)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), np_y)
+
+        np_x1 = np.random.randn(2, 12, 256, 64).astype(np.float32)
+        np_x2 = np.random.randn(2, 12, 64, 1024).astype(np.float32)
+        x1 = tensor.from_numpy(np_x1)
+        x1.to_device(dev)
+        x2 = tensor.from_numpy(np_x2)
+        x2.to_device(dev)
+        y = autograd.matmul(x1, x2)
+        np_y = np.matmul(np_x1, np_x2)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), np_y)
+
+    def test_4d_matmul_cpu(self):
+        self._4d_matmul_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_4d_matmul_gpu(self):
+        self._4d_matmul_helper(gpu_dev)
+
+    def _matmul_transpose_helper(self, dev):
+
+        X = np.random.random((1, 256, 12, 64)).astype(np.float32)
+        x = tensor.from_numpy(X)
+        x.to_device(dev)
+
+        W = np.random.random((1, 256, 12, 64)).astype(np.float32)
+        w = tensor.from_numpy(W)
+        w.to_device(dev)
+
+        X = np.transpose(X, (0, 2, 1, 3))
+        W = np.transpose(W, (0, 2, 1, 3))
+        W = np.transpose(W, (0, 1, 3, 2))
+        Y = np.matmul(X, W)
+
+        x = autograd.transpose(x, (0, 2, 1, 3))
+        w = autograd.transpose(w, (0, 2, 1, 3))
+        w = autograd.transpose(w, (0, 1, 3, 2))
+        y = autograd.matmul(x, w)
+
+        np.testing.assert_array_almost_equal(tensor.to_numpy(x), X)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(w), W)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), Y)
+
+    def test_matmul_transpose_cpu(self):
+        self._matmul_transpose_helper(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_matmul_transpose_gpu(self):
+        self._matmul_transpose_helper(gpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_gaussian_gpu(self, dev=gpu_dev):
+        x = tensor.Tensor((3, 5, 3, 5), device=dev)
+        x.gaussian(0, 1)
+        x = tensor.Tensor((4, 5, 3, 2), device=dev)
+        x.gaussian(0, 1)
+
+    def _kfloat32_int(self, dev=gpu_dev):
+        np.random.seed(0)
+        x_val = np.random.random((2, 3)).astype(np.float32) * 10
+        x = tensor.from_numpy(x_val)
+        x.to_device(dev)
+        scalar = np.random.random((1,))[0] * 100
+        y = x + scalar
+        self.assertEqual(y.dtype, tensor.float32)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), x_val + scalar)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_kfloat32_int_gpu(self):
+        self._kfloat32_int(gpu_dev)
+
+    def test_kfloat32_int_cpu(self):
+        self._kfloat32_int(cpu_dev)
+
+    def _kint_float(self, dev=gpu_dev):
+        np.random.seed(0)
+        x_val = np.random.randint(0, 10, (2, 3))
+        x = tensor.from_numpy(x_val)
+        x.to_device(dev)
+        scalar = np.random.random((1,))[0] * 100
+        y = x + scalar
+        self.assertEqual(y.dtype, tensor.float32)
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), x_val + scalar)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_kint_float_gpu(self):
+        self._kint_float(gpu_dev)
+
+    def test_kint_float_cpu(self):
+        self._kint_float(cpu_dev)
+
+    def _kint_kint(self, dev=gpu_dev):
+        a_np = np.array([[[17, 4, 9, 22, 18], [-9, 9, -1, -1, 4],
+                          [1, 14, 7, 1, 4], [3, 14, -2, 3, -8]],
+                         [[-25, 6, 8, -7, 22], [-14, 0, -1, 15, 14],
+                          [1, 3, -8, -19, -3], [1, 12, 12, -3, -3]],
+                         [[-10, -14, -17, 19, -5], [-4, -12, 7, -16, -2],
+                          [-8, 3, -5, -11, 0], [4, 0, 3, -6, -3]]],
+                        dtype=np.int32)
+        b_np = np.array([[[-6, -3, -8, -17, 1], [-4, -16, 4, -9, 0],
+                          [7, 1, 11, -12, 4], [-6, -8, -5, -3, 0]],
+                         [[-11, 9, 4, -15, 14], [18, 11, -1, -10, 10],
+                          [-4, 12, 2, 9, 3], [7, 0, 17, 1, 4]],
+                         [[18, -13, -12, 9, -11], [19, -4, -7, 19, 14],
+                          [18, 9, -8, 19, -2], [8, 9, -1, 6, 9]]],
+                        dtype=np.int32)
+        ta = tensor.from_numpy(a_np)
+        tb = tensor.from_numpy(b_np)
+        ta.to_device(dev)
+        tb.to_device(dev)
+        y = ta - tb
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), a_np - b_np)
+
+    def test_kint_kint_cpu(self, dev=cpu_dev):
+        self._kint_kint(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_kint_kint_gpu(self, dev=gpu_dev):
+        self._kint_kint(gpu_dev)
+
+    def _kint_kint_bc(self, dev=gpu_dev):
+        a_np = np.array([[[17, 4, 9, 22, 18], [-9, 9, -1, -1, 4],
+                          [1, 14, 7, 1, 4], [3, 14, -2, 3, -8]],
+                         [[-25, 6, 8, -7, 22], [-14, 0, -1, 15, 14],
+                          [1, 3, -8, -19, -3], [1, 12, 12, -3, -3]],
+                         [[-10, -14, -17, 19, -5], [-4, -12, 7, -16, -2],
+                          [-8, 3, -5, -11, 0], [4, 0, 3, -6, -3]]],
+                        dtype=np.int32)
+        b_np = np.array([[-6, -3, -8, -17, 1], [-4, -16, 4, -9, 0],
+                         [7, 1, 11, -12, 4], [-6, -8, -5, -3, 0]],
+                        dtype=np.int32)
+        ta = tensor.from_numpy(a_np)
+        tb = tensor.from_numpy(b_np)
+        ta.to_device(dev)
+        tb.to_device(dev)
+        y = ta - tb
+        np.testing.assert_array_almost_equal(tensor.to_numpy(y), a_np - b_np)
+
+    def test_kint_kint_bc_cpu(self, dev=cpu_dev):
+        self._kint_kint_bc(cpu_dev)
+
+    @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
+    def test_kint_kint_bc_gpu(self, dev=gpu_dev):
+        self._kint_kint_bc(gpu_dev)
 
 
 if __name__ == '__main__':
diff --git a/test/singa/test_accuracy.cc b/test/singa/test_accuracy.cc
index 5b8067d..a8b8f0b 100644
--- a/test/singa/test_accuracy.cc
+++ b/test/singa/test_accuracy.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/model/metric.h"
diff --git a/test/singa/test_activation.cc b/test/singa/test_activation.cc
index ee7a44e..d320b14 100644
--- a/test/singa/test_activation.cc
+++ b/test/singa/test_activation.cc
@@ -1,27 +1,28 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <math.h>  // exp, tanh
 
 #include "../src/model/layer/activation.h"
 #include "gtest/gtest.h"
-#include <math.h> // exp, tanh
 
 using singa::Activation;
 using singa::Shape;
@@ -65,19 +66,14 @@
 
     float* y = new float[n];
     if (acti.Mode() == "sigmoid") {
-      for (size_t i = 0; i < n; i++)
-        y[i] = 1.f / (1.f + exp(-x[i]));
-    }
-    else if (acti.Mode() == "tanh") {
-      for (size_t i = 0; i < n; i++)
-        y[i] = tanh(x[i]);
-    }
-    else if (acti.Mode() == "relu") {
-      for (size_t i = 0; i < n; i++)
-        y[i] = (x[i] >= 0.f) ? x[i] : 0.f;
-    }
-    else
+      for (size_t i = 0; i < n; i++) y[i] = 1.f / (1.f + exp(-x[i]));
+    } else if (acti.Mode() == "tanh") {
+      for (size_t i = 0; i < n; i++) y[i] = tanh(x[i]);
+    } else if (acti.Mode() == "relu") {
+      for (size_t i = 0; i < n; i++) y[i] = (x[i] >= 0.f) ? x[i] : 0.f;
+    } else {
       LOG(FATAL) << "Unkown activation: " << acti.Mode();
+    }
     EXPECT_FLOAT_EQ(y[0], yptr[0]);
     EXPECT_FLOAT_EQ(y[4], yptr[4]);
     EXPECT_FLOAT_EQ(y[5], yptr[5]);
@@ -117,17 +113,14 @@
     if (acti.Mode() == "sigmoid") {
       for (size_t i = 0; i < n; i++)
         dx[i] = grad[i] * yptr[i] * (1.0f - yptr[i]);
-    }
-    else if (acti.Mode() == "tanh") {
-      for (size_t i = 0; i < n; i++)
-        dx[i] = grad[i] * (1 - yptr[i] * yptr[i]);
-    }
-    else if (acti.Mode() == "relu") {
+    } else if (acti.Mode() == "tanh") {
+      for (size_t i = 0; i < n; i++) dx[i] = grad[i] * (1 - yptr[i] * yptr[i]);
+    } else if (acti.Mode() == "relu") {
       for (size_t i = 0; i < n; i++)
         dx[i] = grad[i] * (x[i] > 0.f) + acti.Negative_slope() * (x[i] <= 0.f);
-    }
-    else
+    } else {
       LOG(FATAL) << "Unkown activation: " << acti.Mode();
+    }
     EXPECT_FLOAT_EQ(dx[0], xptr[0]);
     EXPECT_FLOAT_EQ(dx[4], xptr[4]);
     EXPECT_FLOAT_EQ(dx[5], xptr[5]);
diff --git a/test/singa/test_adagrad.cc b/test/singa/test_adagrad.cc
index 7408411..38830b0 100644
--- a/test/singa/test_adagrad.cc
+++ b/test/singa/test_adagrad.cc
@@ -1,28 +1,29 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <cmath>
 
 #include "gtest/gtest.h"
 #include "singa/model/optimizer.h"
 #include "singa/singa_config.h"
-#include <cmath>
 
 TEST(AdaGrad, ApplyCPU) {
   singa::AdaGrad adagrad;
diff --git a/test/singa/test_batchnorm.cc b/test/singa/test_batchnorm.cc
index fadba42..787250b 100644
--- a/test/singa/test_batchnorm.cc
+++ b/test/singa/test_batchnorm.cc
@@ -1,27 +1,28 @@
 /*********************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+
+#include <iostream>
 
 #include "../src/model/layer/batchnorm.h"
 #include "gtest/gtest.h"
-#include <iostream>
 
 using namespace singa;
 
@@ -98,7 +99,7 @@
   Tensor out = batchnorm.Forward(kTrain, in);
   auto ret = batchnorm.Backward(kTrain, dy_in);
   Tensor dx = ret.first;
-  const auto & shape = dx.shape();
+  const auto &shape = dx.shape();
   EXPECT_EQ(2u, shape.size());
   EXPECT_EQ(2u, shape[0]);
   EXPECT_EQ(2u, shape[1]);
@@ -110,7 +111,7 @@
 
   Tensor dbnScale = ret.second.at(0);
   const float *dbnScaleptr = dbnScale.data<float>();
-  const auto & dbnScaleShape = dbnScale.shape();
+  const auto &dbnScaleShape = dbnScale.shape();
   EXPECT_EQ(1u, dbnScaleShape.size());
   EXPECT_EQ(2u, dbnScaleShape[0]);
 
@@ -119,7 +120,7 @@
 
   Tensor dbnBias = ret.second.at(1);
   const float *dbnBiasptr = dbnBias.data<float>();
-  const auto & dbnBiasShape = dbnBias.shape();
+  const auto &dbnBiasShape = dbnBias.shape();
   EXPECT_EQ(1u, dbnBiasShape.size());
   EXPECT_EQ(2u, dbnBiasShape[0]);
 
diff --git a/test/singa/test_binfile_rw.cc b/test/singa/test_binfile_rw.cc
index 53c29fa..9da35f3 100644
--- a/test/singa/test_binfile_rw.cc
+++ b/test/singa/test_binfile_rw.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../include/singa/io/reader.h"
 #include "../include/singa/io/writer.h"
diff --git a/test/singa/test_channel.cc b/test/singa/test_channel.cc
index 68b0017..521394e 100644
--- a/test/singa/test_channel.cc
+++ b/test/singa/test_channel.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/utils/channel.h"
diff --git a/test/singa/test_concat.cc b/test/singa/test_concat.cc
index d7f1060..64713bb 100644
--- a/test/singa/test_concat.cc
+++ b/test/singa/test_concat.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/concat.h"
 #include "gtest/gtest.h"
@@ -25,8 +25,8 @@
 using singa::Shape;
 
 TEST(Concat, Setup) {
-  Shape s1 {2u, 3u};
-  Shape s2 {1u, 3u};
+  Shape s1{2u, 3u};
+  Shape s2{1u, 3u};
   singa::LayerConf conf;
   conf.set_type("singa_concat");
   conf.mutable_concat_conf()->set_axis(0);
@@ -53,16 +53,13 @@
   EXPECT_EQ(out.size(), 1u);
 
   out[0].ToHost();
-  const float * outptr = out[0].data<float>();
+  const float* outptr = out[0].data<float>();
   for (size_t i = 0; i < a; i++) {
-    for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outptr[i * c + j], 1.0f);
+    for (size_t j = 0; j < c; j++) EXPECT_FLOAT_EQ(outptr[i * c + j], 1.0f);
   }
   for (size_t i = a; i < a + b; i++) {
-    for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outptr[i  * c + j], 2.0f);
+    for (size_t j = 0; j < c; j++) EXPECT_FLOAT_EQ(outptr[i * c + j], 2.0f);
   }
-
 }
 
 void ForwardConcatColumnTest(std::shared_ptr<singa::Device> dev) {
@@ -81,16 +78,15 @@
   auto out = layer.Forward(singa::kTrain, {t1, t2});
   EXPECT_EQ(out.size(), 1u);
   out[0].ToHost();
-  const float * outptr = out[0].data<float>();
+  const float* outptr = out[0].data<float>();
   for (size_t i = 0; i < c; i++) {
     for (size_t j = 0; j < a; j++)
       EXPECT_FLOAT_EQ(outptr[i * (a + b) + j], 1.0f);
   }
   for (size_t i = 0; i < c; i++) {
     for (size_t j = a; j < a + b; j++)
-      EXPECT_FLOAT_EQ(outptr[i  * (a + b) + j], 2.0f);
+      EXPECT_FLOAT_EQ(outptr[i * (a + b) + j], 2.0f);
   }
-
 }
 TEST(Concat, ForwardConcatRowCpp) {
   ForwardConcatRowTest(singa::defaultDevice);
@@ -100,7 +96,6 @@
   ForwardConcatColumnTest(singa::defaultDevice);
 }
 
-
 #ifdef USE_CUDA
 TEST(Concat, ForwardConcatRowCuda) {
   ForwardConcatRowTest(std::make_shared<singa::CudaGPU>());
@@ -111,7 +106,6 @@
 }
 #endif  // USE_CUDA
 
-
 void BackwardConcatRowTest(std::shared_ptr<singa::Device> dev) {
   size_t a = 2u, b = 1u, c = 3u;
   singa::LayerConf conf;
@@ -137,15 +131,15 @@
   const float* tptr = t.data<float>();
 
   grads[0].ToHost();
-  const float * outa = grads[0].data<float>();
+  const float* outa = grads[0].data<float>();
   for (size_t i = 0; i < a; i++)
     for (size_t j = 0; j < c; j++)
       EXPECT_FLOAT_EQ(outa[i * c + j], tptr[i * c + j]);
   grads[1].ToHost();
-  const float * outb = grads[1].data<float>();
+  const float* outb = grads[1].data<float>();
   for (size_t i = 0; i < b; i++)
     for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outb[i  * c + j], tptr[(i + a) * c + j]);
+      EXPECT_FLOAT_EQ(outb[i * c + j], tptr[(i + a) * c + j]);
 }
 
 void BackwardConcatColumnTest(std::shared_ptr<singa::Device> dev) {
@@ -173,15 +167,15 @@
   const float* tptr = t.data<float>();
 
   grads[0].ToHost();
-  const float * outa = grads[0].data<float>();
+  const float* outa = grads[0].data<float>();
   for (size_t i = 0; i < c; i++)
     for (size_t j = 0; j < a; j++)
       EXPECT_FLOAT_EQ(outa[i * a + j], tptr[i * (a + b) + j]);
   grads[1].ToHost();
-  const float * outb = grads[1].data<float>();
+  const float* outb = grads[1].data<float>();
   for (size_t i = 0; i < c; i++)
     for (size_t j = 0; j < b; j++)
-      EXPECT_FLOAT_EQ(outb[i  * b + j], tptr[i * (a + b) + a + j]);
+      EXPECT_FLOAT_EQ(outb[i * b + j], tptr[i * (a + b) + a + j]);
 }
 
 TEST(Concat, BackwardConcatRowCpp) {
@@ -192,7 +186,6 @@
   BackwardConcatColumnTest(singa::defaultDevice);
 }
 
-
 #ifdef USE_CUDA
 TEST(Concat, BackwardConcatRowCuda) {
   BackwardConcatRowTest(std::make_shared<singa::CudaGPU>());
diff --git a/test/singa/test_convolution.cc b/test/singa/test_convolution.cc
index 4cfb38d..f74cf9b 100644
--- a/test/singa/test_convolution.cc
+++ b/test/singa/test_convolution.cc
@@ -1,28 +1,27 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "singa/singa_config.h"
 
 #ifdef USE_CBLAS
 #include "../src/model/layer/convolution.h"
-
 #include "gtest/gtest.h"
 
 using singa::Convolution;
diff --git a/test/singa/test_cpp_cpu.cc b/test/singa/test_cpp_cpu.cc
index 5f3308a..b8efe37 100644
--- a/test/singa/test_cpp_cpu.cc
+++ b/test/singa/test_cpp_cpu.cc
@@ -1,30 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
-#include  "singa/core/device.h"
+#include "singa/core/device.h"
 #include "singa/proto/core.pb.h"
 
-using singa::CppCPU;
 using singa::Block;
+using singa::CppCPU;
 TEST(CppCPU, Constructor) {
   CppCPU dev;
   EXPECT_EQ(-1, dev.id());
@@ -41,10 +41,8 @@
 TEST(CppCPU, Exec) {
   CppCPU dev;
   Block* b = dev.NewBlock(4);
-  int x = 1, y =3, z = 0;
-  dev.Exec([x, y, &z](singa::Context *ctx) {
-      z = x + y;
-      }, {b}, {b}, false);
+  int x = 1, y = 3, z = 0;
+  dev.Exec([x, y, &z](singa::Context* ctx) { z = x + y; }, {b}, {b});
   EXPECT_EQ(x + y, z);
   dev.FreeBlock(b);
 }
@@ -60,7 +58,7 @@
   EXPECT_EQ('x', bstr[3]);
 
   Block* c = dev.NewBlock(4);
-  dev.CopyDataToFrom(c, b, 4, singa::kHostToHost, 0, 0);
+  dev.CopyDataToFrom(c, b, 4, singa::kHostToHost, 0, 0, dev.context(0));
   const char* cstr = static_cast<const char*>(c->data());
 
   EXPECT_EQ('a', cstr[0]);
@@ -69,4 +67,3 @@
   dev.FreeBlock(b);
   dev.FreeBlock(c);
 }
-
diff --git a/test/singa/test_cross_entropy.cc b/test/singa/test_cross_entropy.cc
index 3d704c8..aa48e6f 100644
--- a/test/singa/test_cross_entropy.cc
+++ b/test/singa/test_cross_entropy.cc
@@ -1,27 +1,27 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
-#include "singa/core/tensor.h"
 #include "singa/core/device.h"
+#include "singa/core/tensor.h"
 #include "singa/model/loss.h"
 #include "singa/singa_config.h"
 
@@ -29,11 +29,11 @@
 class TestSoftmaxCrossEntropy : public ::testing::Test {
  protected:
   virtual void SetUp() {
-    p.Reshape(singa::Shape{2, 4});
-    t.Reshape(singa::Shape{2, 1});
-    ta.Reshape(singa::Shape{2, 4});
+    p.Resize(singa::Shape{2, 4});
+    t.Resize(singa::Shape{2, 1});
+    ta.Resize(singa::Shape{2, 4});
   }
-  const float pdat[8] = {0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f };
+  const float pdat[8] = {0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f};
   const int tdat[2] = {0, 2};
   const int tary[8] = {1, 0, 0, 0, 0, 0, 1, 0};
 
@@ -42,36 +42,38 @@
 
 TEST_F(TestSoftmaxCrossEntropy, CppForward) {
   p.CopyDataFromHostPtr(pdat, 8);
-  t.AsType(singa::kInt);
+  EXPECT_TRUE(p.block()->initialized());
   t.CopyDataFromHostPtr(tdat, 2);
+  t.AsType(singa::kInt);
+
 
   singa::SoftmaxCrossEntropy cross_entropy;
   const Tensor& loss = cross_entropy.Forward(singa::kEval, p, t);
   auto ldat = loss.data<float>();
 
-  const float result_test = (float) -log(0.25);
+  const float result_test = (float)-log(0.25);
   EXPECT_FLOAT_EQ(ldat[0], result_test);
   EXPECT_FLOAT_EQ(ldat[1], result_test);
 }
 
 TEST_F(TestSoftmaxCrossEntropy, CppForwardAryTarget) {
   p.CopyDataFromHostPtr(pdat, 8);
-  ta.AsType(singa::kInt);
   ta.CopyDataFromHostPtr(tary, 8);
+  ta.AsType(singa::kInt);
 
   singa::SoftmaxCrossEntropy cross_entropy;
   const Tensor& loss = cross_entropy.Forward(singa::kEval, p, ta);
   auto ldat = loss.data<float>();
 
-  const float result_test = (float) -log(0.25);
+  const float result_test = (float)-log(0.25);
   EXPECT_FLOAT_EQ(ldat[0], result_test);
   EXPECT_FLOAT_EQ(ldat[1], result_test);
 }
 
 TEST_F(TestSoftmaxCrossEntropy, CppBackward) {
   p.CopyDataFromHostPtr(pdat, 8);
-  t.AsType(singa::kInt);
   t.CopyDataFromHostPtr(tdat, 2);
+  t.AsType(singa::kInt);
 
   singa::SoftmaxCrossEntropy cross_entropy;
   cross_entropy.Forward(singa::kTrain, p, t);
@@ -90,8 +92,8 @@
 
 TEST_F(TestSoftmaxCrossEntropy, CppBackwardAryTarget) {
   p.CopyDataFromHostPtr(pdat, 8);
-  ta.AsType(singa::kInt);
   ta.CopyDataFromHostPtr(tary, 8);
+  ta.AsType(singa::kInt);
 
   singa::SoftmaxCrossEntropy cross_entropy;
   cross_entropy.Forward(singa::kTrain, p, ta);
@@ -143,7 +145,6 @@
   EXPECT_FLOAT_EQ(ldat[1], result_test);
 }
 
-
 TEST_F(TestSoftmaxCrossEntropy, CudaBackward) {
   singa::SoftmaxCrossEntropy cross_entropy;
   auto dev = std::make_shared<singa::CudaGPU>();
diff --git a/test/singa/test_csv.cc b/test/singa/test_csv.cc
index 77f5baa..16322d1 100644
--- a/test/singa/test_csv.cc
+++ b/test/singa/test_csv.cc
@@ -1,29 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
-#include "singa/io/encoder.h"
-#include "singa/io/decoder.h"
-#include "gtest/gtest.h"
-#include <sstream>
 #include <algorithm>
+#include <sstream>
+
+#include "gtest/gtest.h"
+#include "singa/io/decoder.h"
+#include "singa/io/encoder.h"
 
 using singa::Shape;
 using singa::Tensor;
diff --git a/test/singa/test_cudnn_activation.cc b/test/singa/test_cudnn_activation.cc
index 6a989d1..fdf225b 100644
--- a/test/singa/test_cudnn_activation.cc
+++ b/test/singa/test_cudnn_activation.cc
@@ -1,31 +1,32 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "singa/singa_config.h"
 #ifdef USE_CUDNN
 
-#include "singa/proto/core.pb.h"
+#include <cudnn.h>
+#include <math.h>  // exp tanh
+
 #include "../src/model/layer/cudnn_activation.h"
 #include "gtest/gtest.h"
-#include <math.h>  // exp tanh
-#include <cudnn.h>
+#include "singa/proto/core.pb.h"
 
 using singa::CudnnActivation;
 using singa::Shape;
@@ -39,7 +40,7 @@
   reluconf->set_negative_slope(0.5f);
 
   acti.Setup(Shape{3}, conf);
-//  EXPECT_EQ(CUDNN_ACTIVATION_RELU, acti.CudnnMode());
+  //  EXPECT_EQ(CUDNN_ACTIVATION_RELU, acti.CudnnMode());
   EXPECT_EQ(0.5f, acti.Negative_slope());
 }
 
@@ -74,8 +75,9 @@
       for (size_t i = 0; i < n; i++) y[i] = tanh(x[i]);
     } else if (acti.Mode() == "relu") {
       for (size_t i = 0; i < n; i++) y[i] = (x[i] >= 0.f) ? x[i] : 0.f;
-    } else
+    } else {
       LOG(FATAL) << "Unkown activation: " << acti.Mode();
+    }
     EXPECT_FLOAT_EQ(y[0], yptr[0]);
     EXPECT_FLOAT_EQ(y[4], yptr[4]);
     EXPECT_FLOAT_EQ(y[5], yptr[5]);
@@ -123,8 +125,9 @@
       for (size_t i = 0; i < n; i++)
         dx[i] =
             grad[i] * (x[i] > 0.f);  //+ acti.Negative_slope() * (x[i] <= 0.f);
-    } else
+    } else {
       LOG(FATAL) << "Unkown activation: " << acti.Mode();
+    }
     for (size_t i = 0; i < n; i++) {
       EXPECT_NEAR(dx[i], xptr[i], 1e-7);
     }
diff --git a/test/singa/test_cudnn_batchnorm.cc b/test/singa/test_cudnn_batchnorm.cc
index b024c19..bb218eb 100644
--- a/test/singa/test_cudnn_batchnorm.cc
+++ b/test/singa/test_cudnn_batchnorm.cc
@@ -1,23 +1,23 @@
 /*********************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
 
 #include "../src/model/layer/cudnn_batchnorm.h"
 
@@ -43,26 +43,23 @@
 
 TEST(CudnnBatchNorm, Forward) {
   CudnnBatchNorm batchnorm;
-  const float x[] = {
-    0.0736655, 0.0459045, 0.0779517, 0.0771059,
-    0.0586862, 0.0561263, 0.0708457, 0.0977273,
-    0.0405025, -0.170897, 0.0208982, 0.136865,
-    -0.0367905, -0.0618205, -0.0103908, -0.0522777,
-    -0.122161, -0.025427, -0.0718576, -0.185941,
-    0.0166533, 0.178679, -0.0576606, -0.137817,
-    0.150676, 0.153442, -0.0929899, -0.148675,
-    -0.112459, -0.106284, -0.103074, -0.0668811
-  };
+  const float x[] = {0.0736655,  0.0459045,  0.0779517,  0.0771059,  0.0586862,
+                     0.0561263,  0.0708457,  0.0977273,  0.0405025,  -0.170897,
+                     0.0208982,  0.136865,   -0.0367905, -0.0618205, -0.0103908,
+                     -0.0522777, -0.122161,  -0.025427,  -0.0718576, -0.185941,
+                     0.0166533,  0.178679,   -0.0576606, -0.137817,  0.150676,
+                     0.153442,   -0.0929899, -0.148675,  -0.112459,  -0.106284,
+                     -0.103074,  -0.0668811};
   auto cuda = std::make_shared<singa::CudaGPU>();
-  singa::Tensor in(singa::Shape{1,2,4,4}, cuda);
-  in.CopyDataFromHostPtr(x, 1*2*4*4);
+  singa::Tensor in(singa::Shape{1, 2, 4, 4}, cuda);
+  in.CopyDataFromHostPtr(x, 1 * 2 * 4 * 4);
   const float alpha_[] = {1, 1};
-  singa::Tensor alpha(singa::Shape{1,2,1,1}, cuda);
-  alpha.CopyDataFromHostPtr(alpha_, 1*2*1*1);
+  singa::Tensor alpha(singa::Shape{1, 2, 1, 1}, cuda);
+  alpha.CopyDataFromHostPtr(alpha_, 1 * 2 * 1 * 1);
 
   const float beta_[] = {0, 0};
-  singa::Tensor beta(singa::Shape{1,2,1,1}, cuda);
-  beta.CopyDataFromHostPtr(beta_, 1*2*1*1);
+  singa::Tensor beta(singa::Shape{1, 2, 1, 1}, cuda);
+  beta.CopyDataFromHostPtr(beta_, 1 * 2 * 1 * 1);
 
   singa::LayerConf conf;
   singa::BatchNormConf *batchnorm_conf = conf.mutable_batchnorm_conf();
@@ -77,7 +74,7 @@
   singa::Tensor out = batchnorm.Forward(singa::kTrain, in);
   out.ToHost();
   const float *outptr = out.data<float>();
-  const auto & shape = out.shape();
+  const auto &shape = out.shape();
   EXPECT_EQ(4u, shape.size());
   EXPECT_EQ(1u, shape[0]);
   EXPECT_EQ(2u, shape[1]);
@@ -119,19 +116,16 @@
 
 TEST(CudnnBatchNorm, Backward) {
   CudnnBatchNorm batchnorm;
-  const float x[] = {
-    0.0736655, 0.0459045, 0.0779517, 0.0771059,
-    0.0586862, 0.0561263, 0.0708457, 0.0977273,
-    0.0405025, -0.170897, 0.0208982, 0.136865,
-    -0.0367905, -0.0618205, -0.0103908, -0.0522777,
-    -0.122161, -0.025427, -0.0718576, -0.185941,
-    0.0166533, 0.178679, -0.0576606, -0.137817,
-    0.150676, 0.153442, -0.0929899, -0.148675,
-    -0.112459, -0.106284, -0.103074, -0.0668811
-  };
+  const float x[] = {0.0736655,  0.0459045,  0.0779517,  0.0771059,  0.0586862,
+                     0.0561263,  0.0708457,  0.0977273,  0.0405025,  -0.170897,
+                     0.0208982,  0.136865,   -0.0367905, -0.0618205, -0.0103908,
+                     -0.0522777, -0.122161,  -0.025427,  -0.0718576, -0.185941,
+                     0.0166533,  0.178679,   -0.0576606, -0.137817,  0.150676,
+                     0.153442,   -0.0929899, -0.148675,  -0.112459,  -0.106284,
+                     -0.103074,  -0.0668811};
   auto cuda = std::make_shared<singa::CudaGPU>();
-  singa::Tensor x_tensor(singa::Shape{1,2,4,4}, cuda);
-  x_tensor.CopyDataFromHostPtr(x, 1*2*4*4);
+  singa::Tensor x_tensor(singa::Shape{1, 2, 4, 4}, cuda);
+  x_tensor.CopyDataFromHostPtr(x, 1 * 2 * 4 * 4);
 
   singa::LayerConf conf;
   singa::BatchNormConf *batchnorm_conf = conf.mutable_batchnorm_conf();
@@ -139,33 +133,30 @@
   batchnorm.Setup(Shape{2, 4, 4}, conf);
 
   const float dy[] = {
-    -0.0064714, 0, 0, 0,
-    0, -0.00297655, -0.0195729, 0,
-    0, 0, 0, 0,
-    0, 0, 0, -0.0032594,
-    0, 0, 0, 0,
-    0, 0, 0.0125562, 0,
-    0.00041933, 0.000386108, -0.0074611, 0.0015929,
-    0.00468428, 0.00735506, -0.00682525, 0.00342023
-  };
+      -0.0064714,  0,           0,          0,          0,          -0.00297655,
+      -0.0195729,  0,           0,          0,          0,          0,
+      0,           0,           0,          -0.0032594, 0,          0,
+      0,           0,           0,          0,          0.0125562,  0,
+      0.00041933,  0.000386108, -0.0074611, 0.0015929,  0.00468428, 0.00735506,
+      -0.00682525, 0.00342023};
 
-  singa::Tensor dy_tensor(singa::Shape{1,2,4,4}, cuda);
-  dy_tensor.CopyDataFromHostPtr(dy, 1*2*4*4);
+  singa::Tensor dy_tensor(singa::Shape{1, 2, 4, 4}, cuda);
+  dy_tensor.CopyDataFromHostPtr(dy, 1 * 2 * 4 * 4);
   const float alpha_[] = {1, 1};
   singa::Tensor alpha(singa::Shape{2}, cuda);
-  alpha.CopyDataFromHostPtr(alpha_, 1*2*1*1);
+  alpha.CopyDataFromHostPtr(alpha_, 1 * 2 * 1 * 1);
 
   const float beta_[] = {0, 0};
   singa::Tensor beta(singa::Shape{2}, cuda);
-  beta.CopyDataFromHostPtr(beta_, 1*2*1*1);
+  beta.CopyDataFromHostPtr(beta_, 1 * 2 * 1 * 1);
 
   const float mean_[] = {0.0123405, -0.0622333};
   singa::Tensor mean(singa::Shape{2}, cuda);
-  mean.CopyDataFromHostPtr(mean_, 1*2*1*1);
+  mean.CopyDataFromHostPtr(mean_, 1 * 2 * 1 * 1);
 
   const float var_[] = {15.9948, 8.68198};
   singa::Tensor var(singa::Shape{2}, cuda);
-  var.CopyDataFromHostPtr(var_, 1*2*1*1);
+  var.CopyDataFromHostPtr(var_, 1 * 2 * 1 * 1);
 
   batchnorm.ToDevice(cuda);
   batchnorm.set_bnScale(alpha);
@@ -177,7 +168,7 @@
   singa::Tensor dx = ret.first;
   dx.ToHost();
   const float *dxptr = dx.data<float>();
-  const auto & shape = dx.shape();
+  const auto &shape = dx.shape();
   EXPECT_EQ(4u, shape.size());
   EXPECT_EQ(1u, shape[0]);
   EXPECT_EQ(2u, shape[1]);
@@ -219,7 +210,7 @@
   singa::Tensor dbnScale = ret.second.at(0);
   dbnScale.ToHost();
   const float *dbnScaleptr = dbnScale.data<float>();
-  const auto & dbnScaleShape = dbnScale.shape();
+  const auto &dbnScaleShape = dbnScale.shape();
   EXPECT_EQ(1u, dbnScaleShape.size());
   EXPECT_EQ(2u, dbnScaleShape[0]);
 
@@ -229,7 +220,7 @@
   singa::Tensor dbnBias = ret.second.at(1);
   dbnBias.ToHost();
   const float *dbnBiasptr = dbnBias.data<float>();
-  const auto & dbnBiasShape = dbnBias.shape();
+  const auto &dbnBiasShape = dbnBias.shape();
   EXPECT_EQ(1u, dbnBiasShape.size());
   EXPECT_EQ(2u, dbnBiasShape[0]);
 
diff --git a/test/singa/test_cudnn_convolution.cc b/test/singa/test_cudnn_convolution.cc
index 8dbee63..9839d31 100644
--- a/test/singa/test_cudnn_convolution.cc
+++ b/test/singa/test_cudnn_convolution.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "../src/model/layer/cudnn_convolution.h"
 #ifdef USE_CUDNN
 
@@ -69,9 +69,9 @@
 
   // Set weight and bias manually
   const size_t num_filters = 1;
-  const size_t col_height = 1 * 3 * 3; // channels * kernel_w * kernel_h
-  const float we[num_filters * col_height] = {
-      1.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f};
+  const size_t col_height = 1 * 3 * 3;  // channels * kernel_w * kernel_h
+  const float we[num_filters * col_height] = {1.0f,  1.0f, 0.0f, 0.0f, 0.0f,
+                                              -1.0f, 0.0f, 1.0f, 0.0f};
   singa::Tensor weight(singa::Shape{num_filters, col_height}, cuda);
   weight.CopyDataFromHostPtr(we, col_height);
   const float b[num_filters] = {1.0f};
@@ -120,11 +120,10 @@
 
   // Set weight_ and bias_ manually
   const size_t num_filters = 1;
-  const size_t col_height = 1 * 3 * 3; // channels * kernel_w * kernel_h
-  const float we[num_filters * col_height] = {
-      1.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f};
-  singa::Tensor weight(singa::Shape{num_filters, col_height},
-                       cuda);
+  const size_t col_height = 1 * 3 * 3;  // channels * kernel_w * kernel_h
+  const float we[num_filters * col_height] = {1.0f,  1.0f, 0.0f, 0.0f, 0.0f,
+                                              -1.0f, 0.0f, 1.0f, 0.0f};
+  singa::Tensor weight(singa::Shape{num_filters, col_height}, cuda);
   weight.CopyDataFromHostPtr(we, col_height);
   const float b[num_filters] = {1.0f};
   singa::Tensor bias(singa::Shape{num_filters}, cuda);
diff --git a/test/singa/test_cudnn_dropout.cc b/test/singa/test_cudnn_dropout.cc
index f1b8437..a2b2569 100644
--- a/test/singa/test_cudnn_dropout.cc
+++ b/test/singa/test_cudnn_dropout.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "../src/model/layer/cudnn_dropout.h"
 #ifdef USE_CUDNN
 // cudnn dropout is added in cudnn 5
@@ -117,7 +117,6 @@
   mask.CopyData(drop.mask());
   const char* mptr = mask.data<char>();
 
-
   EXPECT_FLOAT_EQ(dx[0], dy[0] * GetBitValue(mptr, 0) * scale);
   EXPECT_FLOAT_EQ(dx[1], dy[1] * GetBitValue(mptr, 1) * scale);
   EXPECT_FLOAT_EQ(dx[7], dy[7] * GetBitValue(mptr, 7) * scale);
diff --git a/test/singa/test_cudnn_lrn.cc b/test/singa/test_cudnn_lrn.cc
index df251f5..4b1e884 100644
--- a/test/singa/test_cudnn_lrn.cc
+++ b/test/singa/test_cudnn_lrn.cc
@@ -1,29 +1,29 @@
 /*********************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
 
 #include "../src/model/layer/cudnn_lrn.h"
 
 #ifdef USE_CUDNN
 // cudnn lrn is added in cudnn 4
-#if CUDNN_MAJOR >=4
+#if CUDNN_MAJOR >= 4
 #include "gtest/gtest.h"
 
 using singa::CudnnLRN;
@@ -49,18 +49,15 @@
 TEST(CudnnLRN, Forward) {
   CudnnLRN lrn;
   const float x[] = {
-    0.00658502, -0.0496967, -0.0333733, -0.0263094,
-    -0.044298, 0.0211638, 0.0829358, -0.0172312,
-    -0.0665471, -0.10017, -0.0750333, -0.104551,
-    -0.00981208, -0.0583349, -0.0751652, 0.011747,
-    0.0151165, 0.0304321, 0.0736639, -0.00652653,
-    0.00962833, 0.169646, -0.044588, -0.00244141,
-    0.0597329, -0.0530868, 0.0124246, 0.108429,
-    0.0451175, 0.0247055, 0.0304345, 0.0179575
-  };
+      0.00658502,  -0.0496967,  -0.0333733, -0.0263094, -0.044298,  0.0211638,
+      0.0829358,   -0.0172312,  -0.0665471, -0.10017,   -0.0750333, -0.104551,
+      -0.00981208, -0.0583349,  -0.0751652, 0.011747,   0.0151165,  0.0304321,
+      0.0736639,   -0.00652653, 0.00962833, 0.169646,   -0.044588,  -0.00244141,
+      0.0597329,   -0.0530868,  0.0124246,  0.108429,   0.0451175,  0.0247055,
+      0.0304345,   0.0179575};
   auto cuda = std::make_shared<singa::CudaGPU>();
-  singa::Tensor in(singa::Shape{1,2,4,4}, cuda);
-  in.CopyDataFromHostPtr(x, 1*2*4*4);
+  singa::Tensor in(singa::Shape{1, 2, 4, 4}, cuda);
+  in.CopyDataFromHostPtr(x, 1 * 2 * 4 * 4);
 
   singa::LayerConf conf;
   singa::LRNConf *lrn_conf = conf.mutable_lrn_conf();
@@ -73,7 +70,7 @@
   singa::Tensor out = lrn.Forward(singa::kTrain, in);
   out.ToHost();
   const float *outptr = out.data<float>();
-  const auto & shape = out.shape();
+  const auto &shape = out.shape();
   EXPECT_EQ(4u, shape.size());
   EXPECT_EQ(1u, shape[0]);
   EXPECT_EQ(2u, shape[1]);
@@ -118,32 +115,26 @@
   CudnnLRN lrn;
 
   const float x[] = {
-    0.00658502, -0.0496967, -0.0333733, -0.0263094,
-    -0.044298, 0.0211638, 0.0829358, -0.0172312,
-    -0.0665471, -0.10017, -0.0750333, -0.104551,
-    -0.00981208, -0.0583349, -0.0751652, 0.011747,
-    0.0151165, 0.0304321, 0.0736639, -0.00652653,
-    0.00962833, 0.169646, -0.044588, -0.00244141,
-    0.0597329, -0.0530868, 0.0124246, 0.108429,
-    0.0451175, 0.0247055, 0.0304345, 0.0179575
-  };
+      0.00658502,  -0.0496967,  -0.0333733, -0.0263094, -0.044298,  0.0211638,
+      0.0829358,   -0.0172312,  -0.0665471, -0.10017,   -0.0750333, -0.104551,
+      -0.00981208, -0.0583349,  -0.0751652, 0.011747,   0.0151165,  0.0304321,
+      0.0736639,   -0.00652653, 0.00962833, 0.169646,   -0.044588,  -0.00244141,
+      0.0597329,   -0.0530868,  0.0124246,  0.108429,   0.0451175,  0.0247055,
+      0.0304345,   0.0179575};
   auto cuda = std::make_shared<singa::CudaGPU>();
-  singa::Tensor x_tensor(singa::Shape{1,2,4,4}, cuda);
-  x_tensor.CopyDataFromHostPtr(x, 1*2*4*4);
+  singa::Tensor x_tensor(singa::Shape{1, 2, 4, 4}, cuda);
+  x_tensor.CopyDataFromHostPtr(x, 1 * 2 * 4 * 4);
 
   const float dy[] = {
-    -0.103178, -0.0326904, 0.293932, 0.355288,
-    -0.0288079, -0.0543308, -0.0668226, 0.0462216,
-    -0.0448064, -0.068982, -0.0509133, -0.0721143,
-    0.0959078, -0.0389037, -0.0510071, -0.178793,
-    0.00428248, -0.001132, -0.19928, 0.011935,
-    0.00622313, 0.143793, 0.0253894, 0.0104906,
-    -0.170673, 0.0283919, 0.00523488, -0.0455003,
-    0.177807, 0.000892812, -0.00113197, 0.00327798
-  };
+      -0.103178,   -0.0326904, 0.293932,   0.355288,   -0.0288079, -0.0543308,
+      -0.0668226,  0.0462216,  -0.0448064, -0.068982,  -0.0509133, -0.0721143,
+      0.0959078,   -0.0389037, -0.0510071, -0.178793,  0.00428248, -0.001132,
+      -0.19928,    0.011935,   0.00622313, 0.143793,   0.0253894,  0.0104906,
+      -0.170673,   0.0283919,  0.00523488, -0.0455003, 0.177807,   0.000892812,
+      -0.00113197, 0.00327798};
 
-  singa::Tensor dy_tensor(singa::Shape{1,2,4,4}, cuda);
-  dy_tensor.CopyDataFromHostPtr(dy, 1*2*4*4);
+  singa::Tensor dy_tensor(singa::Shape{1, 2, 4, 4}, cuda);
+  dy_tensor.CopyDataFromHostPtr(dy, 1 * 2 * 4 * 4);
 
   singa::LayerConf conf;
   singa::LRNConf *lrn_conf = conf.mutable_lrn_conf();
@@ -158,7 +149,7 @@
   singa::Tensor dx = ret.first;
   dx.ToHost();
   const float *dxptr = dx.data<float>();
-  const auto & shape = dx.shape();
+  const auto &shape = dx.shape();
   EXPECT_EQ(4u, shape.size());
   EXPECT_EQ(1u, shape[0]);
   EXPECT_EQ(2u, shape[1]);
diff --git a/test/singa/test_cudnn_pooling.cc b/test/singa/test_cudnn_pooling.cc
index 0e3314e..c62d50b 100644
--- a/test/singa/test_cudnn_pooling.cc
+++ b/test/singa/test_cudnn_pooling.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "../src/model/layer/cudnn_pooling.h"
 #ifdef USE_CUDNN
 
diff --git a/test/singa/test_cudnn_rnn.cc b/test/singa/test_cudnn_rnn.cc
index 07336a2..0460b0f 100644
--- a/test/singa/test_cudnn_rnn.cc
+++ b/test/singa/test_cudnn_rnn.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/cudnn_rnn.h"
 #ifdef USE_CUDNN
@@ -29,18 +29,18 @@
 using singa::Shape;
 using singa::Tensor;
 class TestCudnnRNN : public ::testing::Test {
-  protected:
-    virtual void SetUp() {
-      singa::RNNConf *rnnconf = conf.mutable_rnn_conf();
-      rnnconf->set_hidden_size(hidden_size);
-      rnnconf->set_num_stacks(1);
-      rnnconf->set_dropout(0);
-      rnnconf->set_input_mode("linear");
-      rnnconf->set_direction("unidirectional");
-      rnnconf->set_rnn_mode("tanh");
-    }
-    singa::LayerConf conf;
-    size_t hidden_size = 4;
+ protected:
+  virtual void SetUp() {
+    singa::RNNConf *rnnconf = conf.mutable_rnn_conf();
+    rnnconf->set_hidden_size(hidden_size);
+    rnnconf->set_num_stacks(1);
+    rnnconf->set_dropout(0);
+    rnnconf->set_input_mode("linear");
+    rnnconf->set_direction("unidirectional");
+    rnnconf->set_rnn_mode("tanh");
+  }
+  singa::LayerConf conf;
+  size_t hidden_size = 4;
 };
 
 TEST_F(TestCudnnRNN, Setup) {
@@ -54,8 +54,8 @@
 TEST_F(TestCudnnRNN, Forward) {
   auto cuda = std::make_shared<singa::CudaGPU>();
   const size_t seqLength = 4, batchsize = 1, dim = 2;
-  const float x[seqLength * batchsize * dim] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f,
-                                          1.0f, 1.0f, 1.0f};
+  const float x[seqLength * batchsize * dim] = {1.0f, 1.0f, 1.0f, 1.0f,
+                                                1.0f, 1.0f, 1.0f, 1.0f};
 
   vector<Tensor> inputs;
   for (size_t i = 0; i < seqLength; i++) {
@@ -75,8 +75,7 @@
   size_t weightSize = weight.Size();
   float we[weightSize];
   float wvalue = 0.1f;
-  for (size_t i = 0; i < weightSize; i++)
-    we[i] = wvalue;
+  for (size_t i = 0; i < weightSize; i++) we[i] = wvalue;
   weight.CopyDataFromHostPtr(we, weightSize);
 
   const auto ret = rnn.Forward(singa::kEval, inputs);
@@ -108,8 +107,8 @@
 TEST_F(TestCudnnRNN, Backward) {
   auto cuda = std::make_shared<singa::CudaGPU>();
   const size_t seqLength = 4, batchsize = 1, dim = 2;
-  const float x[seqLength * batchsize * dim] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f,
-                                          1.0f, 1.0f, 1.0f};
+  const float x[seqLength * batchsize * dim] = {1.0f, 1.0f, 1.0f, 1.0f,
+                                                1.0f, 1.0f, 1.0f, 1.0f};
 
   vector<Tensor> inputs;
   for (size_t i = 0; i < seqLength; i++) {
@@ -129,8 +128,7 @@
   size_t weightSize = weight.Size();
   float we[weightSize];
   float wvalue = 0.1f;
-  for (size_t i = 0; i < weightSize; i++)
-    we[i] = wvalue;
+  for (size_t i = 0; i < weightSize; i++) we[i] = wvalue;
   weight.CopyDataFromHostPtr(we, weightSize);
 
   const auto outs = rnn.Forward(singa::kTrain, inputs);
@@ -148,7 +146,7 @@
   grads.push_back(dhy);
   vector<float> dhyptr(hidden_size, 0.0f);
   const auto ret = rnn.Backward(singa::kTrain, grads);
-  for (size_t i = seqLength - 1; i > 0 ; i --) {
+  for (size_t i = seqLength - 1; i > 0; i--) {
     auto dx = ret.first[i];
     auto y = outs[i].Clone();
     y.ToHost();
diff --git a/test/singa/test_cudnn_softmax.cc b/test/singa/test_cudnn_softmax.cc
index 6e0d5ab..061d9f5 100644
--- a/test/singa/test_cudnn_softmax.cc
+++ b/test/singa/test_cudnn_softmax.cc
@@ -1,30 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "singa/singa_config.h"
 #ifdef USE_CUDNN
 
+#include <cudnn.h>
+#include <math.h>  // exp
+
 #include "../src/model/layer/cudnn_softmax.h"
 #include "gtest/gtest.h"
-#include <math.h>  // exp
-#include <cudnn.h>
 
 // TODO(wangwei) add test for matrix input
 using singa::CudnnSoftmax;
diff --git a/test/singa/test_dense.cc b/test/singa/test_dense.cc
index 0410929..4d34f6d 100644
--- a/test/singa/test_dense.cc
+++ b/test/singa/test_dense.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "../src/model/layer/dense.h"
 #include "gtest/gtest.h"
 #include "singa/singa_config.h"
@@ -68,9 +68,9 @@
   EXPECT_EQ(9u, out1.Size());
   for (int i = 0; i < 3; i++)
     for (int j = 0; j < 3; j++)
-      EXPECT_FLOAT_EQ((x[i * 2 + 0] * we[j] +
-                       x[i * 2 + 1] * we[3 + j] + bia[j]),
-                      outptr1[i * 3 + j]);
+      EXPECT_FLOAT_EQ(
+          (x[i * 2 + 0] * we[j] + x[i * 2 + 1] * we[3 + j] + bia[j]),
+          outptr1[i * 3 + j]);
 }
 TEST(Dense, BackwardCpp) {
   Dense dense;
@@ -169,9 +169,9 @@
   EXPECT_EQ(9u, out1.Size());
   for (int i = 0; i < 3; i++)
     for (int j = 0; j < 3; j++)
-      EXPECT_FLOAT_EQ((x[i * 2 + 0] * we[j] +
-                       x[i * 2 + 1] * we[3 + j] + bia[j]),
-                      outptr1[i * 3 + j]);
+      EXPECT_FLOAT_EQ(
+          (x[i * 2 + 0] * we[j] + x[i * 2 + 1] * we[3 + j] + bia[j]),
+          outptr1[i * 3 + j]);
 }
 TEST(Dense, BackwardCuda) {
   Dense dense;
diff --git a/test/singa/test_dropout.cc b/test/singa/test_dropout.cc
index 047762e..c607d9d 100644
--- a/test/singa/test_dropout.cc
+++ b/test/singa/test_dropout.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/dropout.h"
 #include "gtest/gtest.h"
diff --git a/test/singa/test_ep.cc b/test/singa/test_ep.cc
index 0d862e5..73160c3 100644
--- a/test/singa/test_ep.cc
+++ b/test/singa/test_ep.cc
@@ -1,33 +1,34 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "singa/singa_config.h"
 #ifdef ENABLE_DIST
+#include <assert.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <memory>
+
 #include "singa/io/network.h"
 #include "singa/utils/integer.h"
 #include "singa/utils/logging.h"
-#include <assert.h>
-#include <unistd.h>
-#include <string.h>
-#include <memory>
-
 
 #define SIZE 10000000
 #define PORT 10000
@@ -71,20 +72,18 @@
 
   while (1) {
     for (int i = 0; i < ITER; ++i) {
-      if (ep->send(m[i]) < 0)
-        return 1;
+      if (ep->send(m[i]) < 0) return 1;
       delete m[i];
     }
 
     for (int i = 0; i < ITER; ++i) {
       m[i] = ep->recv();
-      if (!m[i])
-        return 1;
+      if (!m[i]) return 1;
       char *p;
-      CHECK(m[i]->getMetadata((void **)&p) == SIZE);
-      CHECK(0 == strncmp(p, md, SIZE));
-      CHECK(m[i]->getPayload((void **)&p) == SIZE);
-      CHECK(0 == strncmp(p, payload, SIZE));
+      CHECK_EQ(m[i]->getMetadata((void **)&p), SIZE);
+      CHECK_EQ(0, strncmp(p, md, SIZE));
+      CHECK_EQ(m[i]->getPayload((void **)&p), SIZE);
+      CHECK_EQ(0, strncmp(p, payload, SIZE));
     }
   }
 
diff --git a/test/singa/test_flatten.cc b/test/singa/test_flatten.cc
index 65748f7..a88d6e5 100644
--- a/test/singa/test_flatten.cc
+++ b/test/singa/test_flatten.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/flatten.h"
 #include "gtest/gtest.h"
@@ -140,4 +140,4 @@
   EXPECT_EQ(2u, in_diff.shape(3));
   for (size_t i = 0; i < n; i++) EXPECT_FLOAT_EQ(dy[i], xptr[i]);
 }
-#endif // USE_CUDA
+#endif  // USE_CUDA
diff --git a/test/singa/test_image_transformer.cc b/test/singa/test_image_transformer.cc
index 016da72..cc22ada 100644
--- a/test/singa/test_image_transformer.cc
+++ b/test/singa/test_image_transformer.cc
@@ -1,29 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
-#include "singa/io/transformer.h"
-#include "gtest/gtest.h"
 #include <time.h>
+
 #include <iostream>
 
+#include "gtest/gtest.h"
+#include "singa/io/transformer.h"
+
 // decide whether to use opencv
 // #include "singa/singa_config.h"
 
@@ -93,27 +95,28 @@
   for (int i = 0; i < resize_height; i++)
     for (int j = 0; j < resize_width; j++)
       for (size_t k = 0; k < channel; k++)
-        xt[i * resize_width * channel + j * channel + k] = resized.at<cv::Vec3f>(i, j)[k];
+        xt[i * resize_width * channel + j * channel + k] =
+            resized.at<cv::Vec3f>(i, j)[k];
   for (size_t c = 0; c < 3; c++)
     for (size_t h = 0; h < 2; h++)
-      for (size_t w = 0; w < 3; w++){
-        //size_t in_idx = (c * height + 1 + h) * width + 1 + w,
+      for (size_t w = 0; w < 3; w++) {
+        // size_t in_idx = (c * height + 1 + h) * width + 1 + w,
         //    out_idx = (c * 2 + h) * 3 + w;
         // test for HWC
         size_t in_idx = ((h + 1) * resize_width + 1 + w) * channel + c,
-              out_idx = (h * 3 + w) * channel + c;
+               out_idx = (h * 3 + w) * channel + c;
         EXPECT_EQ(xt[in_idx], y[out_idx]);
       }
   delete[] xt;
 #else
   for (size_t c = 0; c < 3; c++)
     for (size_t h = 0; h < 2; h++)
-      for (size_t w = 0; w < 3; w++){
-        //size_t in_idx = (c * height + 2 + h) * width + 3 + w,
+      for (size_t w = 0; w < 3; w++) {
+        // size_t in_idx = (c * height + 2 + h) * width + 3 + w,
         //    out_idx = (c * 2 + h) * 3 + w;
         // test for HWC
         size_t in_idx = ((h + 2) * width + 3 + w) * channel + c,
-              out_idx = (h * 3 + w) * channel + c;
+               out_idx = (h * 3 + w) * channel + c;
         EXPECT_EQ(x[in_idx], y[out_idx]);
       }
 #endif
@@ -158,20 +161,18 @@
   float* xt = new float[new_size];
   for (int i = 0; i < resize_height; i++)
     for (int j = 0; j < resize_width; j++)
-        xt[i * resize_width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
+      xt[i * resize_width + j] = resized.at<cv::Vec<float, 1>>(i, j)[0];
 
   for (size_t h = 0; h < 2; h++)
-    for (size_t w = 0; w < 3; w++){
-      size_t in_idx = (h + 1) * resize_width + 1 + w,
-            out_idx = h * 3 + w;
+    for (size_t w = 0; w < 3; w++) {
+      size_t in_idx = (h + 1) * resize_width + 1 + w, out_idx = h * 3 + w;
       EXPECT_EQ(xt[in_idx], y[out_idx]);
     }
   delete[] xt;
 #else
   for (size_t h = 0; h < 2; h++)
-    for (size_t w = 0; w < 3; w++){
-      size_t in_idx = (h + 2) * width + 3 + w,
-            out_idx = h * 3 + w;
+    for (size_t w = 0; w < 3; w++) {
+      size_t in_idx = (h + 2) * width + 3 + w, out_idx = h * 3 + w;
       EXPECT_EQ(x[in_idx], y[out_idx]);
     }
 #endif
@@ -206,7 +207,8 @@
   for (int i = 0; i < resize_height; i++)
     for (int j = 0; j < resize_width; j++)
       for (size_t k = 0; k < channel; k++)
-        xt[i * resize_width * channel + j * channel + k] = resized.at<cv::Vec3f>(i, j)[k];
+        xt[i * resize_width * channel + j * channel + k] =
+            resized.at<cv::Vec3f>(i, j)[k];
 
   for (size_t i = 0; i < new_size; i++) EXPECT_EQ(xt[i], y[i]);
   delete[] x;
@@ -222,18 +224,17 @@
   srand((unsigned int)time(NULL));
   for (size_t i = 0; i < n; i++) x[i] = (float)(rand() % 256);
   in.CopyDataFromHostPtr<float>(x, n);
-  size_t crop_height = 3, crop_width = 4,
-         crop_h_offset = 2, crop_w_offset = 5;
-  singa::Tensor out = singa::crop(in, crop_height, crop_width,
-                         crop_h_offset, crop_w_offset, "CHW");
+  size_t crop_height = 3, crop_width = 4, crop_h_offset = 2, crop_w_offset = 5;
+  singa::Tensor out = singa::crop(in, crop_height, crop_width, crop_h_offset,
+                                  crop_w_offset, "CHW");
 
   const float* y = out.data<float>();
   for (size_t h = 0; h < crop_height; h++)
     for (size_t w = 0; w < crop_width; w++)
       for (size_t c = 0; c < channel; c++) {
         size_t out_idx = c * crop_height * crop_width + h * crop_width + w;
-        size_t in_idx = c * height * width + (h + crop_h_offset)
-                 * width + w + crop_w_offset;
+        size_t in_idx = c * height * width + (h + crop_h_offset) * width + w +
+                        crop_w_offset;
         EXPECT_EQ(x[in_idx], y[out_idx]);
       }
   delete[] x;
diff --git a/test/singa/test_initializer.cc b/test/singa/test_initializer.cc
index 74a30bb..94c2a8c 100644
--- a/test/singa/test_initializer.cc
+++ b/test/singa/test_initializer.cc
@@ -16,8 +16,8 @@
  * limitations under the License.
  */
 
-#include "singa/model/initializer.h"
 #include "gtest/gtest.h"
+#include "singa/model/initializer.h"
 
 TEST(Initializer, Constant) {
   singa::init::Constant x;
@@ -28,11 +28,9 @@
   x.Setup(conf);
   x.Fill(t);
   const float* xPtr = t.data<float>();
-  for (size_t i = 0; i < n; i++)
-    EXPECT_FLOAT_EQ(xPtr[i], 3.1f);
+  for (size_t i = 0; i < n; i++) EXPECT_FLOAT_EQ(xPtr[i], 3.1f);
 }
 
-
 TEST(Initializer, Gaussian) {
   singa::init::Gaussian x;
   size_t n = 1000;
@@ -44,12 +42,10 @@
   x.Fill(t);
   const float* xPtr = t.data<float>();
   float mean = 0.0f, std = 0.0f;
-  for (size_t i = 0; i < n; i++)
-    mean += xPtr[i];
+  for (size_t i = 0; i < n; i++) mean += xPtr[i];
   mean /= n;
   EXPECT_NEAR(mean, 0.11f, 1e-3);
-  for (size_t i = 0; i < n; i++)
-    std += (xPtr[i] - mean) * (xPtr[i] - mean);
+  for (size_t i = 0; i < n; i++) std += (xPtr[i] - mean) * (xPtr[i] - mean);
   std /= n;
   std = sqrt(std);
   EXPECT_NEAR(std, 0.01f, 1e-3);
@@ -67,20 +63,16 @@
   x.Fill(t);
   t.ToHost();
   const float* xPtr = t.data<float>();
-  for (size_t i = 0; i < n; i++)
-    EXPECT_FLOAT_EQ(xPtr[i], 3.1f);
-
+  for (size_t i = 0; i < n; i++) EXPECT_FLOAT_EQ(xPtr[i], 3.1f);
 
   singa::init::Constant y(-0.1f);
   singa::Tensor s(singa::Shape{n}, dev);
   y.Fill(s);
   s.ToHost();
   const float* sPtr = s.data<float>();
-  for (size_t i = 0; i < n; i++)
-    EXPECT_FLOAT_EQ(sPtr[i], -0.1f);
+  for (size_t i = 0; i < n; i++) EXPECT_FLOAT_EQ(sPtr[i], -0.1f);
 }
 
-
 TEST(Initializer, GaussianCUDA) {
   singa::init::Gaussian x;
   auto dev = std::make_shared<singa::CudaGPU>();
@@ -94,28 +86,23 @@
   t.ToHost();
   const float* tPtr = t.data<float>();
   float mean = 0.0f, std = 0.0f;
-  for (size_t i = 0; i < n; i++)
-    mean += tPtr[i];
+  for (size_t i = 0; i < n; i++) mean += tPtr[i];
   mean /= n;
   EXPECT_NEAR(mean, 0.11f, 1e-2);
-  for (size_t i = 0; i < n; i++)
-    std += (tPtr[i] - mean) * (tPtr[i] - mean);
+  for (size_t i = 0; i < n; i++) std += (tPtr[i] - mean) * (tPtr[i] - mean);
   std /= n;
   std = sqrt(std);
   EXPECT_NEAR(std, 0.01f, 1e-2);
 
-
   singa::init::Gaussian y(1.5f, 0.1f);
   singa::Tensor s(singa::Shape{n}, dev);
   y.Fill(s);
   s.ToHost();
   const float* sPtr = s.data<float>();
-  for (size_t i = 0; i < n; i++)
-    mean += sPtr[i];
+  for (size_t i = 0; i < n; i++) mean += sPtr[i];
   mean /= n;
   EXPECT_NEAR(mean, 1.5f, 0.1f);
-  for (size_t i = 0; i < n; i++)
-    std += (sPtr[i] - mean) * (sPtr[i] - mean);
+  for (size_t i = 0; i < n; i++) std += (sPtr[i] - mean) * (sPtr[i] - mean);
   std /= n;
   std = sqrt(std);
   EXPECT_NEAR(std, 0.1f, 0.1f);
@@ -124,7 +111,7 @@
 TEST(Initializer, XavierCUDA) {
   singa::init::Constant x;
   auto dev = std::make_shared<singa::CudaGPU>();
-  size_t m = 30, n=40;
+  size_t m = 30, n = 40;
   singa::Tensor t(singa::Shape{m, n}, dev);
   x.Fill(t);
   t.ToHost();
@@ -133,10 +120,8 @@
   float high = -100.0f, low = 100.0f;
   for (size_t i = 0; i < n; i++) {
     mean += xPtr[i];
-    if (high < xPtr[i])
-      high = xPtr[i];
-    if (low > xPtr[i])
-      low = xPtr[i];
+    if (high < xPtr[i]) high = xPtr[i];
+    if (low > xPtr[i]) low = xPtr[i];
   }
   mean /= m * n;
   EXPECT_NEAR(mean, 0, 1e-2);
diff --git a/test/singa/test_jpg.cc b/test/singa/test_jpg.cc
index 95ee01d..38fd5fe 100644
--- a/test/singa/test_jpg.cc
+++ b/test/singa/test_jpg.cc
@@ -1,29 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
-#include "singa/io/encoder.h"
-#include "singa/io/decoder.h"
-#include "gtest/gtest.h"
 #include <time.h>
 
+#include "gtest/gtest.h"
+#include "singa/io/decoder.h"
+#include "singa/io/encoder.h"
+
 #ifdef USE_OPENCV
 #include <opencv2/highgui/highgui.hpp>
 #include <opencv2/imgproc/imgproc.hpp>
@@ -35,7 +36,7 @@
 
   // initial random seed
   srand(time(NULL));
- 
+
   singa::EncoderConf encoder_conf;
   encoder_conf.set_image_dim_order("HWC");
   encoder.Setup(encoder_conf);
@@ -77,7 +78,7 @@
   const int* in_label = input[1].data<int>();
   EXPECT_EQ(2, in_label[0]);
   EXPECT_EQ(2u, input.size());
- 
+
   std::string tmp = encoder.Encode(input);
   std::vector<Tensor> output = decoder.Decode(tmp);
   EXPECT_EQ(2u, output.size());
@@ -92,7 +93,7 @@
   for (size_t i = 0; i < height; i++)
     for (size_t j = 0; j < width; j++)
       for (size_t k = 0; k < channel; k++)
-        out.at<cv::Vec3b>(i, j)[k] = 
+        out.at<cv::Vec3b>(i, j)[k] =
             out_pixel[i * width * channel + j * channel + k];
   for(size_t i = 0; i < total; i++)
     EXPECT_LE(fabs(in_pixel[i]-out_pixel[i]), 10.f);*/
diff --git a/test/singa/test_lmdb_rw.cc b/test/singa/test_lmdb_rw.cc
index 6d7b3d0..ec25720 100644
--- a/test/singa/test_lmdb_rw.cc
+++ b/test/singa/test_lmdb_rw.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../include/singa/io/reader.h"
 #include "../include/singa/io/writer.h"
diff --git a/test/singa/test_logging.cc b/test/singa/test_logging.cc
index 619e7e8..16efa8f 100644
--- a/test/singa/test_logging.cc
+++ b/test/singa/test_logging.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-* 
-*   http://www.apache.org/licenses/LICENSE-2.0
-* 
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/utils/logging.h"
diff --git a/test/singa/test_lrn.cc b/test/singa/test_lrn.cc
index 6a389e1..2a1ef82 100644
--- a/test/singa/test_lrn.cc
+++ b/test/singa/test_lrn.cc
@@ -1,23 +1,23 @@
 /*********************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
 
 #include "../src/model/layer/lrn.h"
 #include "gtest/gtest.h"
diff --git a/test/singa/test_memory.cc b/test/singa/test_memory.cc
index 33a3747..7375d39 100644
--- a/test/singa/test_memory.cc
+++ b/test/singa/test_memory.cc
@@ -1,30 +1,30 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
-#include "singa/utils/logging.h"
 #include "singa/core/memory.h"
 #include "singa/singa_config.h"
-#include "singa/utils/timer.h"
 #include "singa/utils/cuda_utils.h"
+#include "singa/utils/logging.h"
+#include "singa/utils/timer.h"
 
 #ifdef USE_CUDA
 /*
diff --git a/test/singa/test_mse.cc b/test/singa/test_mse.cc
index 7aa3326..08dca15 100644
--- a/test/singa/test_mse.cc
+++ b/test/singa/test_mse.cc
@@ -1,40 +1,40 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
-#include "singa/core/tensor.h"
 #include "singa/core/device.h"
+#include "singa/core/tensor.h"
 #include "singa/model/loss.h"
 
 using singa::Tensor;
 class TestMSE : public ::testing::Test {
  protected:
   virtual void SetUp() {
-    p.Reshape(singa::Shape{2, 3});
-    t.Reshape(singa::Shape{2, 3});
+    p.Resize(singa::Shape{2, 3});
+    t.Resize(singa::Shape{2, 3});
     p.CopyDataFromHostPtr(pdat, sizeof(pdat) / sizeof(float));
     t.CopyDataFromHostPtr(tdat, sizeof(pdat) / sizeof(float));
   }
-  const float pdat[6] = { 0.1f, 1.1f, 2.1f, 0.3f, 2.2f, 1.8f};
-  const float tdat[6] = { 0.1f, 1.1f, 2.0f, 0.3f, 2.2f, 1.8f};
+  const float pdat[6] = {0.1f, 1.1f, 2.1f, 0.3f, 2.2f, 1.8f};
+  const float tdat[6] = {0.1f, 1.1f, 2.0f, 0.3f, 2.2f, 1.8f};
 
   singa::Tensor p, t;
 };
@@ -85,8 +85,8 @@
     }
     EXPECT_FLOAT_EQ(ldat[i], 0.5 * l);
   }
-	p.ToHost();
-	t.ToHost();
+  p.ToHost();
+  t.ToHost();
   delete mse;
 }
 
@@ -102,8 +102,7 @@
 
   for (size_t i = 0; i < grad.Size(); i++)
     EXPECT_FLOAT_EQ(gdat[i], (1.0f / p.shape().at(0)) * (pdat[i] - tdat[i]));
-	p.ToHost();
-	t.ToHost();
-
+  p.ToHost();
+  t.ToHost();
 }
 #endif
diff --git a/test/singa/test_nesterov.cc b/test/singa/test_nesterov.cc
index 3ae3181..ad5155d 100644
--- a/test/singa/test_nesterov.cc
+++ b/test/singa/test_nesterov.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/model/optimizer.h"
diff --git a/test/singa/test_opencl.cc b/test/singa/test_opencl.cc
index 705e89c..5602e5d 100644
--- a/test/singa/test_opencl.cc
+++ b/test/singa/test_opencl.cc
@@ -1,31 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/core/device.h"
 #include "singa/core/tensor.h"
 #include "singa/proto/core.pb.h"
 
-using singa::CppCPU;
 using singa::Block;
+using singa::CppCPU;
 using singa::Shape;
 using singa::Tensor;
 
@@ -34,35 +34,33 @@
 using singa::OpenclDevice;
 
 class OpenCL_TensorMath : public ::testing::Test {
-protected:
-
+ protected:
   OpenCL_TensorMath() {
     auto ocl_dev = std::make_shared<OpenclDevice>();
-    
+
     a = Tensor(Shape{6}, ocl_dev);
     b = Tensor(Shape{6}, ocl_dev);
     c = Tensor(Shape{6, 1}, ocl_dev);
     d = Tensor(Shape{3, 2}, ocl_dev);
     e = Tensor(Shape{3, 2}, ocl_dev);
     empty10k = Tensor(Shape{10000}, ocl_dev);
-    
+
     a.CopyDataFromHostPtr<float>(dat1, 6);
     b.CopyDataFromHostPtr<float>(dat2, 6);
     e.CopyDataFromHostPtr<float>(dat1, 6);
   }
-  
+
   Tensor a, b, c, d, e;
   Tensor empty10k;
   const float dat1[6] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
   const float dat2[6] = {1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f};
 };
 
-
 TEST_F(OpenCL_TensorMath, MemberAbs) {
   Tensor aa = a.Clone();
   Tensor bb = b.Clone();
   Tensor cc = aa - bb;
-  
+
   cc.ToHost();
   const float *dptr = cc.data<float>();
   EXPECT_NEAR(-0.1, dptr[0], 1e-5);
@@ -76,13 +74,11 @@
   EXPECT_NEAR(0.1, dptr1[2], 1e-5);
 }
 
-
-//TEST_F(OpenCL_TensorMath, MemberClamp) { }
-
+// TEST_F(OpenCL_TensorMath, MemberClamp) { }
 
 TEST_F(OpenCL_TensorMath, MemberExp) {
   Tensor p = Exp(a);
-  
+
   p.ToHost();
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(exp(1.0f), dptr1[0], 1e-5);
@@ -90,10 +86,9 @@
   EXPECT_NEAR(exp(3.0f), dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberLog) {
   Tensor p = Log(a);
-  
+
   p.ToHost();
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(log(1.0f), dptr1[0], 1e-5);
@@ -101,11 +96,10 @@
   EXPECT_NEAR(log(3.0f), dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberReLU) {
   Tensor aa = a.Clone();
   Tensor cc = aa - 2.0f;
-  
+
   cc.ToHost();
   const float *dptr = cc.data<float>();
   EXPECT_NEAR(-1.0f, dptr[0], 1e-5);
@@ -120,7 +114,6 @@
   EXPECT_NEAR(1.0f, dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberSigmoid) {
   Tensor p = Sigmoid(a);
   p.ToHost();
@@ -130,7 +123,6 @@
   EXPECT_NEAR(1.0f / (1.0f + exp(-3.0f)), dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberSign) {
   Tensor aa = a.Clone();
   Tensor cc = aa - 2.0f;
@@ -148,7 +140,6 @@
   EXPECT_EQ(1.0f, dptr1[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberSqrt) {
   Tensor p = Sqrt(a);
   p.ToHost();
@@ -158,7 +149,6 @@
   EXPECT_NEAR(sqrt(3.0), dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberSquare) {
   Tensor p = Square(a);
   p.ToHost();
@@ -168,7 +158,6 @@
   EXPECT_NEAR(9.0, dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberTanh) {
   Tensor p = Tanh(a);
   p.ToHost();
@@ -214,7 +203,6 @@
   EXPECT_FLOAT_EQ(0.0f, dptr1[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberLE) {
   Tensor p1 = a <= 2.0f;
   p1.ToHost();
@@ -224,7 +212,6 @@
   EXPECT_FLOAT_EQ(0.0f, dptr1[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberGT) {
   Tensor p1 = a > 2.0f;
   p1.ToHost();
@@ -234,7 +221,6 @@
   EXPECT_FLOAT_EQ(1.0f, dptr1[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberGE) {
   Tensor p1 = a >= 2.0f;
   p1.ToHost();
@@ -244,7 +230,6 @@
   EXPECT_FLOAT_EQ(1.0f, dptr1[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberPow) {
   Tensor p1 = Pow(b, 3.0f);
   p1.ToHost();
@@ -253,15 +238,14 @@
   EXPECT_FLOAT_EQ(pow(2.1f, 3.0f), dptr1[1]);
   EXPECT_FLOAT_EQ(pow(3.1f, 3.0f), dptr1[2]);
 
-  Tensor p2 = Pow(a,b);
+  Tensor p2 = Pow(a, b);
   p2.ToHost();
   const float *dptr2 = p2.data<float>();
-  EXPECT_FLOAT_EQ(pow(1.0f,1.1f), dptr2[0]);
-  EXPECT_FLOAT_EQ(pow(2.0f,2.1f), dptr2[1]);
-  EXPECT_FLOAT_EQ(pow(3.0f,3.1f), dptr2[2]);
+  EXPECT_FLOAT_EQ(pow(1.0f, 1.1f), dptr2[0]);
+  EXPECT_FLOAT_EQ(pow(2.0f, 2.1f), dptr2[1]);
+  EXPECT_FLOAT_EQ(pow(3.0f, 3.1f), dptr2[2]);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberSub) {
   Tensor p1 = a - b;
   p1.ToHost();
@@ -271,7 +255,6 @@
   EXPECT_NEAR(-0.1, dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberEltwiseMult) {
   Tensor p1 = a * b;
   p1.ToHost();
@@ -281,7 +264,6 @@
   EXPECT_NEAR(3.0 * 3.1, dptr1[2], 1e-5);
 }
 
-
 TEST_F(OpenCL_TensorMath, MemberDiv) {
   Tensor p1 = a / b;
   p1.ToHost();
@@ -313,7 +295,7 @@
   const float p = 0.3f;
   Bernoulli(p, &empty10k);
   empty10k.ToHost();
-  const float* out = empty10k.data<float>();
+  const float *out = empty10k.data<float>();
   float sum = 0.0f;
   for (int i = 0; i < 10000; i++) sum += out[i];
   float mean = sum / 10000;
@@ -325,11 +307,10 @@
   EXPECT_NEAR(variance, p * (1 - p), 1e-2);
 }
 
-
 TEST_F(OpenCL_TensorMath, Gaussian) {
   Gaussian(0.0f, 1.0f, &empty10k);
   empty10k.ToHost();
-  const float* out = empty10k.data<float>();
+  const float *out = empty10k.data<float>();
   float sum = 0.0f;
   for (int i = 0; i < 10000; i++) sum += out[i];
   float mean = sum / 10000;
@@ -341,11 +322,10 @@
   EXPECT_NEAR(variance, 1.0f, 1e-2);
 }
 
-
 TEST_F(OpenCL_TensorMath, Uniform) {
   Uniform(0.1f, 0.2f, &empty10k);
   empty10k.ToHost();
-  const float* out = empty10k.data<float>();
+  const float *out = empty10k.data<float>();
   float sum = 0.0f;
   for (int i = 0; i < 10000; i++) sum += out[i];
   float mean = sum / 10000;
@@ -361,7 +341,6 @@
 // BLAS functions, ref to http://docs.nvidia.com/cuda/cublas
 // *********************************************************
 
-
 TEST_F(OpenCL_TensorMath, MemberAddTensor) {
   Tensor aa = a.Clone();
   aa += a;
@@ -389,7 +368,6 @@
   EXPECT_FLOAT_EQ(12.1f, dptr2[5]);
 }
 
-
 TEST_F(OpenCL_TensorMath, AddTensors) {
   Tensor ret(a.shape(), a.device(), a.data_type());
   Add(a, b, &ret);
@@ -417,7 +395,6 @@
   EXPECT_FLOAT_EQ(12.1f, dptr1[5]);
 }
 
-
 TEST_F(OpenCL_TensorMath, SetValue) {
   Tensor t(Shape{4});
   t.SetValue(0.3f);
@@ -426,32 +403,29 @@
   for (int i = 0; i < 4; i++) EXPECT_FLOAT_EQ(ptr[i], 0.3f);
 }
 
-
 TEST_F(OpenCL_TensorMath, Axpy) {
   Tensor ret(b.shape(), b.device(), b.data_type());
   const float zero[6] = {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f};
   ret.CopyDataFromHostPtr<float>(zero, 6);
   Axpy(10.0f, b, &ret);
   ret.ToHost();
-  const float* out = ret.data<float>();
+  const float *out = ret.data<float>();
 
-  EXPECT_EQ(11.0f, out[0]); // 1.1 * 10 + 0 = 11
-  EXPECT_EQ(21.0f, out[1]); // 2.1 * 10 + 1 = 22
-  EXPECT_EQ(31.0f, out[2]); // 3.1 * 10 + 2 = 33
-  EXPECT_EQ(41.0f, out[3]); // 4.1 * 10 + 3 = 44
+  EXPECT_EQ(11.0f, out[0]);  // 1.1 * 10 + 0 = 11
+  EXPECT_EQ(21.0f, out[1]);  // 2.1 * 10 + 1 = 22
+  EXPECT_EQ(31.0f, out[2]);  // 3.1 * 10 + 2 = 33
+  EXPECT_EQ(41.0f, out[3]);  // 4.1 * 10 + 3 = 44
 }
 
-
 TEST_F(OpenCL_TensorMath, GEMM) {
   a.Reshape(Shape{6, 1});
   Tensor result = Mult(a.T(), a);
   result.ToHost();
-  const float* out = result.data<float>();
+  const float *out = result.data<float>();
 
   EXPECT_EQ(91.0f, out[0]);
 }
 
-
 // TODO: ComputeCrossEntropy, SoftmaxCrossEntropy
 //
 #endif  // USE_OPENCL
diff --git a/test/singa/test_opencl_convolution.cc b/test/singa/test_opencl_convolution.cc
index 972756d..fc5fd07 100644
--- a/test/singa/test_opencl_convolution.cc
+++ b/test/singa/test_opencl_convolution.cc
@@ -1,27 +1,26 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/convolution.h"
 #include "../src/model/layer/opencl_convolution.h"
-
 #include "gtest/gtest.h"
 
 #ifdef USE_OPENCL
@@ -30,7 +29,6 @@
 using singa::OpenclDevice;
 using singa::Shape;
 
-
 TEST(OpenclConvolution, Setup) {
   OpenclConvolution conv;
   EXPECT_EQ("OpenclConvolution", conv.layer_type());
@@ -60,13 +58,12 @@
   EXPECT_EQ(3u, conv.width());
 }
 
-
 TEST(OpenclConvolution, Forward) {
   const size_t batchsize = 2, c = 1, h = 3, w = 3;
   const float x[batchsize * c * h * w] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f,
                                           7.0f, 8.0f, 9.0f, 1.0f, 2.0f, 3.0f,
                                           4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
-                                          
+
   auto ocl = std::make_shared<OpenclDevice>();
   singa::Tensor in(singa::Shape{batchsize, c, h, w}, ocl);
   in.CopyDataFromHostPtr(x, batchsize * c * h * w);
@@ -114,7 +111,6 @@
   EXPECT_EQ(12.0f, outptr1[7]);
 }
 
-
 TEST(OpenclConvolution, Backward) {
   // src_data
   const size_t batchsize = 2, c = 1, src_h = 3, src_w = 3;
@@ -219,5 +215,4 @@
   EXPECT_FLOAT_EQ(dy[0] * x[4] + dy[4] * x[13], dwptr[8]);
 }
 
-
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
diff --git a/test/singa/test_opencl_device.cc b/test/singa/test_opencl_device.cc
index 2797a40..68bdaeb 100644
--- a/test/singa/test_opencl_device.cc
+++ b/test/singa/test_opencl_device.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/core/device.h"
@@ -25,16 +25,15 @@
 
 #ifdef USE_OPENCL
 
+using singa::Block;
 using singa::CppCPU;
 using singa::OpenclDevice;
-using singa::Block;
 
 TEST(OpenclDevice, Constructor) {
   OpenclDevice dev;
   EXPECT_EQ(0, dev.id());
 }
 
-
 TEST(OpenclDevice, MemoryMallocFree) {
   OpenclDevice dev;
   Block* b = dev.NewBlock(4);
@@ -43,19 +42,17 @@
   dev.FreeBlock(b);
 }
 
-
 TEST(OpenclDevice, Exec) {
   OpenclDevice dev;
   Block* b = dev.NewBlock(4);
-  int x = 1, y =3, z = 0;
-  dev.Exec([x, y, &z](singa::Context *ctx) {
-      z = x + y;
-      }, {b}, {b}, false);
+  int x = 1, y = 3, z = 0;
+  dev.Exec([x, y, &z](singa::Context* ctx) { z = x + y; }, {b}, {b}, false);
   EXPECT_EQ(x + y, z);
   dev.FreeBlock(b);
 }
 
-// Tests for integrity of one round of data transfer to an OpenCL device and back.
+// Tests for integrity of one round of data transfer to an OpenCL device and
+// back.
 TEST(OpenclDevice, CopyDataToFrom) {
   OpenclDevice dev;
   CppCPU host;
@@ -79,7 +76,6 @@
   EXPECT_EQ('x', astr[3]);
 }
 
-
 TEST(OpenclDevice, DuplicateDataOnDevice) {
   OpenclDevice dev;
   CppCPU host;
@@ -105,4 +101,4 @@
   EXPECT_EQ('x', astr[3]);
 }
 
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
diff --git a/test/singa/test_opencl_pooling.cc b/test/singa/test_opencl_pooling.cc
index f015043..15200b7 100644
--- a/test/singa/test_opencl_pooling.cc
+++ b/test/singa/test_opencl_pooling.cc
@@ -1,27 +1,26 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
-#include "../src/model/layer/pooling.h"
 #include "../src/model/layer/opencl_pooling.h"
-
+#include "../src/model/layer/pooling.h"
 #include "gtest/gtest.h"
 
 #ifdef USE_OPENCL
@@ -57,7 +56,6 @@
   EXPECT_EQ(3u, pool.width());
 }
 
-
 TEST(OpenclPooling, Forward) {
   const size_t batchsize = 2, c = 1, h = 3, w = 3;
   const float x[batchsize * c * h * w] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f,
@@ -95,7 +93,6 @@
   EXPECT_EQ(9.0f, outptr1[7]);
 }
 
-
 TEST(OpenclPooling, Backward) {
   // src_data
   const size_t batchsize = 2, c = 1, src_h = 3, src_w = 3;
@@ -152,5 +149,4 @@
   EXPECT_EQ(0.4f, dx[17]);
 }
 
-
-#endif // USE_OPENCL
+#endif  // USE_OPENCL
diff --git a/test/singa/test_operation_batchnorm.cc b/test/singa/test_operation_batchnorm.cc
new file mode 100644
index 0000000..8da24b1
--- /dev/null
+++ b/test/singa/test_operation_batchnorm.cc
@@ -0,0 +1,88 @@
+/*********************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ ************************************************************/
+
+#include <iostream>
+
+#include "../src/model/operation/batchnorm.h"
+#include "gtest/gtest.h"
+
+using namespace singa;
+
+#ifdef USE_DNNL
+TEST(DNNLOperationBatchNorm, ForwardInference) {
+  Tensor x(Shape{2, 2});
+  Tensor alpha(Shape{2});
+  Tensor beta(Shape{2});
+  Tensor moving_mean(Shape{2});
+  Tensor moving_var(Shape{2});
+
+  Gaussian(0.0f, 1.0f, &x);
+  Gaussian(0.0f, 1.0f, &alpha);
+  Gaussian(0.0f, 1.0f, &beta);
+  Gaussian(0.0f, 1.0f, &moving_mean);
+  Gaussian(0.0f, 1.0f, &moving_var);
+
+  BatchNormHandle batch_norm_handle(0u, x);
+  Tensor y = CpuBatchNormForwardInference(batch_norm_handle, x, alpha, beta,
+                                          moving_mean, moving_var);
+}
+
+TEST(DNNLOperationBatchNorm, ForwardTraining) {
+  Tensor x(Shape{2, 2});
+  Tensor alpha(Shape{2});
+  Tensor beta(Shape{2});
+  Tensor moving_mean(Shape{2});
+  Tensor moving_var(Shape{2});
+
+  Gaussian(0.0f, 1.0f, &x);
+  Gaussian(0.0f, 1.0f, &alpha);
+  Gaussian(0.0f, 1.0f, &beta);
+  Gaussian(0.0f, 1.0f, &moving_mean);
+  Gaussian(0.0f, 1.0f, &moving_var);
+
+  BatchNormHandle batch_norm_handle(0u, x);
+  auto outputs = CpuBatchNormForwardTraining(batch_norm_handle, x, alpha, beta,
+                                             moving_mean, moving_var);
+}
+
+TEST(DNNLOperationBatchNorm, Backward) {
+  Tensor x(Shape{2, 2});
+  Tensor y(Shape{2, 2});
+  Tensor dy(Shape{2, 2});
+  Tensor alpha(Shape{2});
+  Tensor beta(Shape{2});
+  Tensor moving_mean(Shape{2});
+  Tensor moving_var(Shape{2});
+
+  Gaussian(0.0f, 1.0f, &x);
+  Gaussian(0.0f, 1.0f, &y);
+  Gaussian(0.0f, 1.0f, &dy);
+  Gaussian(0.0f, 1.0f, &alpha);
+  Gaussian(0.0f, 1.0f, &beta);
+  Gaussian(0.0f, 1.0f, &moving_mean);
+  Gaussian(0.0f, 1.0f, &moving_var);
+
+  BatchNormHandle batch_norm_handle(0u, x);
+  auto outputs = CpuBatchNormBackwardx(batch_norm_handle, y, dy, x, alpha, beta,
+                                       moving_mean, moving_var);
+}
+
+#endif  // USE_DNNL
diff --git a/test/singa/test_operation_convolution.cc b/test/singa/test_operation_convolution.cc
new file mode 100644
index 0000000..05d08c8
--- /dev/null
+++ b/test/singa/test_operation_convolution.cc
@@ -0,0 +1,245 @@
+/************************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+#include "singa/singa_config.h"
+
+#ifdef USE_CBLAS
+
+#include <chrono>
+#include <iostream>
+
+#include "../src/model/operation/convolution.h"
+#include "gtest/gtest.h"
+
+using namespace singa;
+#ifdef USE_DNNL
+
+#include <stdio.h>
+
+TEST(DNNLOperation_Convolution, Forward) {
+  const size_t batch_size = 2, c = 1, h = 3, w = 3;
+  const float x[batch_size * c * h * w] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f,
+                                           7.0f, 8.0f, 9.0f, 1.0f, 2.0f, 3.0f,
+                                           4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+  Tensor in(Shape{batch_size, c, h, w});
+  in.CopyDataFromHostPtr(x, batch_size * c * h * w);
+
+  const size_t num_filters = 1;
+  const size_t kernel_w = 3;
+  const size_t kernel_h = 3;
+  const std::vector<size_t> stride = {2, 2};
+  const std::vector<size_t> padding = {1, 1};
+  const bool bias_flag = true;
+
+  const float we[num_filters * kernel_w * kernel_h] = {
+      1.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f};
+  Tensor weight(Shape{num_filters, num_filters, 3, 3});
+  weight.CopyDataFromHostPtr(we,
+                             num_filters * num_filters * kernel_w * kernel_h);
+
+  const float b[num_filters] = {1.0f};
+  Tensor bias(Shape{num_filters});
+  bias.CopyDataFromHostPtr(b, num_filters);
+
+  ConvHandle conv_handle(in, {kernel_w, kernel_h}, stride, padding, c,
+                         num_filters, bias_flag);
+  Tensor out1 = CpuConvForward(in, weight, bias, conv_handle);
+
+  const float *out_ptr1 = out1.data<float>();
+  // Input: 3*3; kernel: 3*3; stride: 2*2; padding: 1*1.
+  EXPECT_EQ(8u, out1.Size());
+
+  EXPECT_EQ(3.0f, out_ptr1[0]);
+  EXPECT_EQ(7.0f, out_ptr1[1]);
+  EXPECT_EQ(-3.0f, out_ptr1[2]);
+  EXPECT_EQ(12.0f, out_ptr1[3]);
+  EXPECT_EQ(3.0f, out_ptr1[4]);
+  EXPECT_EQ(7.0f, out_ptr1[5]);
+  EXPECT_EQ(-3.0f, out_ptr1[6]);
+  EXPECT_EQ(12.0f, out_ptr1[7]);
+}
+
+TEST(DNNLOperation_Convolution, Performance) {
+  const int batch = 64;
+  const int image_h = 28;
+  const int in_chan = 1;
+  const int out_chan = 20;
+  const int ker = 5;
+  const int stride = 1;
+  const int out_size = 24;
+  const bool bias_flag = true;
+
+  Tensor grad(Shape{batch, out_chan, out_size, out_size});
+  Tensor in(Shape{batch, in_chan, image_h, image_h});
+  Tensor weight(Shape{out_chan, in_chan, ker, ker});
+  Tensor bias(Shape{out_chan});
+  Gaussian(0.0f, 1.0f, &grad);
+  Gaussian(0.0f, 1.0f, &in);
+  Gaussian(0.0f, 1.0f, &weight);
+  Gaussian(0.0f, 1.0f, &bias);
+  ConvHandle conv_handle(in, {ker, ker}, {stride, stride}, {0, 0}, in_chan,
+                         out_chan, bias_flag);
+
+  const int times = 100;
+
+  {
+    std::chrono::steady_clock::time_point begin =
+        std::chrono::steady_clock::now();
+    for (int i = 0; i < times; i++) {
+      Tensor out = CpuConvForward(in, weight, bias, conv_handle);
+    }
+    std::chrono::steady_clock::time_point end =
+        std::chrono::steady_clock::now();
+    std::cout << "[avg]forward Time difference = "
+              << (std::chrono::duration_cast<std::chrono::microseconds>(end -
+                                                                        begin)
+                      .count()) /
+                     times
+              << "[microsec]" << std::endl;
+  }
+
+  {
+    std::chrono::steady_clock::time_point begin =
+        std::chrono::steady_clock::now();
+    for (int i = 0; i < times; i++) {
+      Tensor in_grad = CpuConvBackwardx(grad, weight, in, conv_handle);
+    }
+    std::chrono::steady_clock::time_point end =
+        std::chrono::steady_clock::now();
+    std::cout << "[avg]backwardx Time difference = "
+              << (std::chrono::duration_cast<std::chrono::microseconds>(end -
+                                                                        begin)
+                      .count()) /
+                     times
+              << "[microsec]" << std::endl;
+  }
+
+  {
+    std::chrono::steady_clock::time_point begin =
+        std::chrono::steady_clock::now();
+    for (int i = 0; i < times; i++) {
+      Tensor dw = CpuConvBackwardW(grad, in, weight, conv_handle);
+    }
+    std::chrono::steady_clock::time_point end =
+        std::chrono::steady_clock::now();
+    std::cout << "[avg]backwardW Time difference = "
+              << (std::chrono::duration_cast<std::chrono::microseconds>(end -
+                                                                        begin)
+                      .count()) /
+                     times
+              << "[microsec]" << std::endl;
+  }
+}
+
+TEST(DNNLOperation_Convolution, Backward) {
+  const size_t batch_size = 2, c = 1, h = 3, w = 3;
+  const float x[batch_size * c * h * w] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f,
+                                           7.0f, 8.0f, 9.0f, 1.0f, 2.0f, 3.0f,
+                                           4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+  Tensor in(Shape{batch_size, c, h, w});
+  in.CopyDataFromHostPtr(x, batch_size * c * h * w);
+
+  const size_t num_filters = 1;
+  const size_t kernel_w = 3;
+  const size_t kernel_h = 3;
+  const std::vector<size_t> stride = {2, 2};
+  const std::vector<size_t> padding = {1, 1};
+  const bool bias_flag = true;
+
+  const float we[num_filters * kernel_w * kernel_h] = {
+      1.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f};
+  Tensor weight(Shape{num_filters, num_filters, 3, 3});
+  weight.CopyDataFromHostPtr(we,
+                             num_filters * num_filters * kernel_w * kernel_h);
+
+  const float b[num_filters] = {1.0f};
+  Tensor bias(Shape{num_filters});
+  bias.CopyDataFromHostPtr(b, num_filters);
+
+  ConvHandle conv_handle(in, {kernel_w, kernel_h}, stride, padding, c,
+                         num_filters, bias_flag);
+  Tensor out1 = CpuConvForward(in, weight, bias, conv_handle);
+
+  // grad
+  const size_t grad_h = 2, grad_w = 2;
+  const float dy[batch_size * num_filters * grad_h * grad_w] = {
+      0.1f, 0.2f, 0.3f, 0.4f, 0.1f, 0.2f, 0.3f, 0.4f};
+  Tensor grad(Shape{batch_size, num_filters, grad_h, grad_w});
+  grad.CopyDataFromHostPtr(dy, batch_size * num_filters * grad_h * grad_w);
+
+  Tensor in_grad = CpuConvBackwardx(grad, weight, in, conv_handle);
+
+  const float *dx = in_grad.data<float>();
+  const float *wptr = we;
+  EXPECT_EQ(18u, in_grad.Size());
+  EXPECT_EQ(dy[0] * wptr[4], dx[0]);
+  EXPECT_EQ(dy[0] * wptr[5] + dy[1] * wptr[3], dx[1]);
+  EXPECT_EQ(dy[1] * wptr[4], dx[2]);
+  EXPECT_EQ(dy[0] * wptr[7] + dy[2] * wptr[1], dx[3]);
+  EXPECT_EQ(
+      dy[0] * wptr[8] + dy[1] * wptr[6] + dy[2] * wptr[2] + dy[3] * wptr[0],
+      dx[4]);
+  EXPECT_EQ(dy[1] * wptr[7] + dy[3] * wptr[1], dx[5]);
+  EXPECT_EQ(dy[2] * wptr[4], dx[6]);
+  EXPECT_EQ(dy[2] * wptr[5] + dy[3] * wptr[3], dx[7]);
+  EXPECT_EQ(dy[3] * wptr[4], dx[8]);
+  EXPECT_EQ(dy[4] * wptr[4], dx[9]);
+  EXPECT_EQ(dy[4] * wptr[5] + dy[1] * wptr[3], dx[10]);
+  EXPECT_EQ(dy[5] * wptr[4], dx[11]);
+  EXPECT_EQ(dy[4] * wptr[7] + dy[2] * wptr[1], dx[12]);
+  EXPECT_EQ(
+      dy[4] * wptr[8] + dy[5] * wptr[6] + dy[6] * wptr[2] + dy[7] * wptr[0],
+      dx[13]);
+  EXPECT_EQ(dy[5] * wptr[7] + dy[7] * wptr[1], dx[14]);
+  EXPECT_EQ(dy[6] * wptr[4], dx[15]);
+  EXPECT_EQ(dy[6] * wptr[5] + dy[7] * wptr[3], dx[16]);
+  EXPECT_EQ(dy[7] * wptr[4], dx[17]);
+
+  Tensor dw = CpuConvBackwardW(grad, in, weight, conv_handle);
+
+  Tensor db = CpuConvBackwardb(grad, bias, conv_handle);
+
+  const float *dbptr = db.data<float>();
+  EXPECT_FLOAT_EQ(dy[0] + dy[1] + dy[2] + dy[3] + dy[4] + dy[5] + dy[6] + dy[7],
+                  dbptr[0]);
+
+  const float *dwptr = dw.data<float>();
+  EXPECT_EQ(9u, dw.Size());
+  EXPECT_FLOAT_EQ(dy[3] * x[4] + dy[7] * x[13], dwptr[0]);
+  EXPECT_FLOAT_EQ(dy[3] * x[5] + dy[7] * x[14] + dy[2] * x[3] + dy[6] * x[12],
+                  dwptr[1]);
+  EXPECT_FLOAT_EQ(dy[2] * x[4] + dy[6] * x[13], dwptr[2]);
+  EXPECT_FLOAT_EQ(dy[1] * x[1] + dy[5] * x[10] + dy[3] * x[7] + dy[7] * x[16],
+                  dwptr[3]);
+  EXPECT_FLOAT_EQ(dy[0] * x[0] + dy[4] * x[9] + dy[1] * x[2] + dy[5] * x[11] +
+                      dy[2] * x[6] + dy[6] * x[15] + dy[3] * x[8] +
+                      dy[7] * x[17],
+                  dwptr[4]);
+  EXPECT_FLOAT_EQ(dy[0] * x[1] + dy[4] * x[10] + dy[2] * x[7] + dy[6] * x[16],
+                  dwptr[5]);
+  EXPECT_FLOAT_EQ(dy[1] * x[4] + dy[5] * x[13], dwptr[6]);
+  EXPECT_FLOAT_EQ(dy[0] * x[3] + dy[4] * x[12] + dy[1] * x[5] + dy[5] * x[14],
+                  dwptr[7]);
+  EXPECT_FLOAT_EQ(dy[0] * x[4] + dy[4] * x[13], dwptr[8]);
+}
+
+#endif  // USE_DNNL
+
+#endif  // USE_CBLAS
diff --git a/test/singa/test_operation_pooling.cc b/test/singa/test_operation_pooling.cc
new file mode 100644
index 0000000..4a0595d
--- /dev/null
+++ b/test/singa/test_operation_pooling.cc
@@ -0,0 +1,98 @@
+/************************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+#include "../src/model/operation/pooling.h"
+#include "gtest/gtest.h"
+
+using namespace singa;
+
+#ifdef USE_DNNL
+TEST(DNNLOperationPooling, Forward) {
+  const size_t batchsize = 2, c = 1, h = 3, w = 3;
+  const float x[batchsize * c * h * w] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f,
+                                          7.0f, 8.0f, 9.0f, 1.0f, 2.0f, 3.0f,
+                                          4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+
+  Tensor in(Shape{batchsize, c, h, w});
+  in.CopyDataFromHostPtr(x, batchsize * c * h * w);
+
+  PoolingHandle pool_handle(in, {2, 2}, {1, 1}, {0, 0}, true);
+  Tensor out1 = CpuPoolingForward(pool_handle, in);
+}
+TEST(DNNLOperationPooling, ForwardAverage) {
+  const size_t batchsize = 2, c = 1, h = 3, w = 3;
+  const float x[batchsize * c * h * w] = {
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f,
+
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+  Tensor in(Shape{batchsize, c, h, w});
+  in.CopyDataFromHostPtr(x, batchsize * c * h * w);
+
+  PoolingHandle pool_handle(in, {2, 2}, {1, 1}, {0, 0}, false);
+  Tensor out1 = CpuPoolingForward(pool_handle, in);
+}
+
+TEST(DNNLOperationPooling, Backward) {
+  // src_data
+  const size_t batchsize = 2, c = 1, src_h = 3, src_w = 3;
+  const float x[batchsize * c * src_h * src_w] = {
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f,
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+  Tensor in(Shape{batchsize, c, src_h, src_w});
+  in.CopyDataFromHostPtr(x, batchsize * c * src_h * src_w);
+
+  PoolingHandle pool_handle(in, {2, 2}, {1, 1}, {0, 0}, true);
+
+  Tensor out = CpuPoolingForward(pool_handle, in);
+
+  // grad - bwd
+  const size_t grad_h = 2, grad_w = 2;
+  const float dy[batchsize * c * grad_h * grad_w] = {0.1f, 0.2f, 0.3f, 0.4f,
+                                                     0.1f, 0.2f, 0.3f, 0.4f};
+  Tensor grad(Shape{batchsize, c, grad_h, grad_w});
+  grad.CopyDataFromHostPtr(dy, batchsize * c * grad_h * grad_w);
+
+  Tensor in_grad = CpuPoolingBackward(pool_handle, grad, in, out);
+}
+TEST(DNNLOperationPooling, BackwardAvg) {
+  // src_data
+  const size_t batchsize = 2, c = 1, src_h = 3, src_w = 3;
+  const float x[batchsize * c * src_h * src_w] = {
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f,
+
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
+  Tensor in(Shape{batchsize, c, src_h, src_w});
+  in.CopyDataFromHostPtr(x, batchsize * c * src_h * src_w);
+
+  PoolingHandle pool_handle(in, {2, 2}, {1, 1}, {0, 0}, false);
+
+  Tensor out = CpuPoolingForward(pool_handle, in);
+
+  // grad - bwd
+  const size_t grad_h = 2, grad_w = 2;
+  const float dy[batchsize * c * grad_h * grad_w] = {0.1f, 0.2f, 0.3f, 0.4f,
+                                                     0.1f, 0.2f, 0.3f, 0.4f};
+  Tensor grad(Shape{batchsize, c, grad_h, grad_w});
+  grad.CopyDataFromHostPtr(dy, batchsize * c * grad_h * grad_w);
+
+  Tensor in_grad = CpuPoolingBackward(pool_handle, grad, in, out);
+}
+
+#endif  // USE_DNNL
diff --git a/test/singa/test_operation_rnn.cc b/test/singa/test_operation_rnn.cc
new file mode 100644
index 0000000..bf52975
--- /dev/null
+++ b/test/singa/test_operation_rnn.cc
@@ -0,0 +1,141 @@
+/************************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+#include "../src/model/operation/rnn.h"
+#include "gtest/gtest.h"
+#include "singa/core/tensor.h"
+#include "singa/singa_config.h"
+
+using namespace singa;
+
+#ifdef USE_CUDNN
+TEST(OperationRNN, tranining) {
+  auto cuda = std::make_shared<singa::CudaGPU>();
+
+  size_t hidden_size = 7;
+  int seq_length = 5;
+  size_t batch_size = 6;
+  size_t feature_size = 3;
+  size_t num_layers = 1;
+  int bdirect = 0;
+
+  Shape s_s{num_layers * (bdirect ? 2 : 1), batch_size, hidden_size};
+  Shape y_s{seq_length, batch_size, hidden_size * (bdirect ? 2 : 1)};
+
+  // x
+  Tensor x(Shape{seq_length, batch_size, feature_size}, cuda);
+  Gaussian(0.0f, 1.0f, &x);
+
+  // x hidden states and cell states
+  Tensor hx(s_s, cuda);
+  Tensor cx(s_s, cuda);
+  hx.SetValue(0.0f);
+  cx.SetValue(0.0f);
+
+  // y dy
+  Tensor y(y_s, cuda);
+  Tensor dy(y_s, cuda);
+  Gaussian(0.0f, 1.0f, &y);
+  Gaussian(0.0f, 1.0f, &dy);
+
+  // y hidden states and cell states
+  Tensor dhy(s_s, cuda);
+  Tensor dcy(s_s, cuda);
+  Gaussian(0.0f, 1.0f, &dhy);
+  Gaussian(0.0f, 1.0f, &dcy);
+
+  // init handle and weights
+  CudnnRNNHandle rnn_handle(x, hidden_size);
+  Tensor W(Shape{rnn_handle.weights_size}, cuda);
+  Gaussian(0.0f, 1.0f, &W);
+
+  // forward and backward passes
+  auto outputs = GpuRNNForwardTraining(x, hx, cx, W, rnn_handle);
+  auto outputs2 = GpuRNNForwardInference(x, hx, cx, W, rnn_handle);
+  auto output3 = GpuRNNBackwardx(y, dy, dhy, dcy, W, hx, cx, rnn_handle);
+  auto dW = GpuRNNBackwardW(x, hx, y, rnn_handle);
+}
+
+TEST(OperationRNNEx, tranining) {
+  auto cuda = std::make_shared<singa::CudaGPU>();
+
+  size_t hidden_size = 2;
+  size_t seq_length = 6;
+  size_t batch_size = 6;
+  size_t feature_size = 4;
+  int bdirect = 0;  // 0 or 1
+  size_t num_layers = 1;
+
+  Shape s_s{num_layers * (bdirect ? 2 : 1), batch_size, hidden_size};
+  Shape y_s{seq_length, batch_size, hidden_size * (bdirect ? 2 : 1)};
+  Shape x_s{seq_length, batch_size, feature_size};
+
+  // x
+  Tensor x(x_s, cuda);
+  Gaussian(0.0f, 1.0f, &x);
+
+  // x hidden states and cell states
+  Tensor hx(s_s, cuda);
+  Tensor cx(s_s, cuda);
+  hx.SetValue(0.0f);
+  cx.SetValue(0.0f);
+
+  // y hidden states and cell states
+  Tensor dhy(s_s, cuda);
+  Tensor dcy(s_s, cuda);
+  Gaussian(0.0f, 1.0f, &dhy);
+  Gaussian(0.0f, 1.0f, &dcy);
+
+  // y dy
+  Tensor y(y_s, cuda);
+  Tensor dy(y_s, cuda);
+  Gaussian(0.0f, 1.0f, &y);
+  Gaussian(0.0f, 1.0f, &dy);
+
+  // seq lengths
+  Tensor seq_lengths(
+      Shape{
+          batch_size,
+      },
+      cuda, singa::kInt);
+  vector<int> data(batch_size, seq_length);
+  seq_lengths.CopyDataFromHostPtr(data.data(), batch_size);
+
+  // init handle and weights
+  CudnnRNNHandle rnn_handle(x, hidden_size, 0);
+  Tensor W(Shape{rnn_handle.weights_size}, cuda);
+  Gaussian(0.0f, 1.0f, &W);
+
+  // forward and backward passes for batch first format
+  /* TODO: WARNING: Logging before InitGoogleLogging() is written to STDERR
+    F0619 07:11:43.435175  1094 rnn.cc:658] Check failed: status ==
+    CUDNN_STATUS_SUCCESS (8 vs. 0)  CUDNN_STATUS_EXECUTION_FAILED
+    *** Check failure stack trace: ***
+    Aborted (core dumped)
+    */
+  auto outputs = GpuRNNForwardTrainingEx(x, hx, cx, W, seq_lengths, rnn_handle);
+  auto outputs2 =
+      GpuRNNForwardInferenceEx(x, hx, cx, W, seq_lengths, rnn_handle);
+  auto outputs3 =
+      GpuRNNBackwardxEx(y, dy, dhy, dcy, W, hx, cx, seq_lengths, rnn_handle);
+  auto dW = GpuRNNBackwardWEx(x, hx, y, seq_lengths, rnn_handle);
+}
+
+#endif  // USE_CUDNN
diff --git a/test/singa/test_platform.cc b/test/singa/test_platform.cc
index f50c978..fce5f34 100644
--- a/test/singa/test_platform.cc
+++ b/test/singa/test_platform.cc
@@ -16,13 +16,27 @@
  * limitations under the License.
  */
 
+#include <iostream>
 
 #include "gtest/gtest.h"
 #include "singa/core/device.h"
 #include "singa/core/tensor.h"
-
+using namespace std;
 #ifdef USE_CUDA
 using singa::Platform;
+
+TEST(Platform, CreateMultDevice) {
+  int n = Platform::GetNumGPUs();
+  auto devs = Platform::CreateCudaGPUs(n);
+  for (size_t i = 0; i < devs.size(); i++) {
+    auto b = devs[i]->NewBlock(512 + 512 * (2 - i));
+    // for lazy allocation
+    b->mutable_data();
+    EXPECT_EQ(512 + 512 * (2 - i), devs[i]->GetAllocatedMem());
+    devs[i]->FreeBlock(b);
+  }
+}
+
 TEST(Platform, NumGPUs) {
   int n = Platform::GetNumGPUs();
   EXPECT_GE(n, 0);
@@ -34,15 +48,16 @@
   auto ids = Platform::GetGPUIDs();
   EXPECT_EQ(ids.size(), n);
   auto mem = Platform::GetGPUMemSize();
-  for (auto x : mem)
-    EXPECT_GT(x.second, x.first);
+  for (auto x : mem) EXPECT_GT(x.second, x.first);
 }
 
 TEST(Platform, CreateDevice) {
   auto dev = Platform::CreateCudaGPUs(1).at(0);
-  size_t size[] = { 128, 256, 3, 24 };
+  size_t size[] = {128, 256, 3, 24};
   {
     auto ptr = dev->NewBlock(size[0]);
+    // for lazy allocation
+    ptr->mutable_data();
     auto allocated = dev->GetAllocatedMem();
     EXPECT_LE(size[0], allocated);
     dev->FreeBlock(ptr);
@@ -52,46 +67,37 @@
     auto ptr0 = dev->NewBlock(size[0]);
     auto ptr1 = dev->NewBlock(size[1]);
     auto ptr2 = dev->NewBlock(size[2]);
+    ptr0->mutable_data();
+    ptr1->mutable_data();
+    ptr2->mutable_data();
     auto allocated = dev->GetAllocatedMem();
     EXPECT_LE(size[0] + size[1] + size[2], allocated);
     auto ptr3 = dev->NewBlock(size[3]);
+    ptr3->mutable_data();
     allocated = dev->GetAllocatedMem();
     EXPECT_LE(size[0] + size[1] + size[2] + size[3], allocated);
     dev->FreeBlock(ptr0);
     dev->FreeBlock(ptr1);
     dev->FreeBlock(ptr2);
-//    allocated = dev->GetAllocatedMem();
-//    EXPECT_EQ(size[3], allocated);
+    //    allocated = dev->GetAllocatedMem();
+    //    EXPECT_EQ(size[3], allocated);
     dev->FreeBlock(ptr3);
-//    allocated = dev->GetAllocatedMem();
-//    EXPECT_EQ(0, allocated);
-  }
-}
-
-TEST(Platform, CreateMultDevice) {
-  int n = Platform::GetNumGPUs();
-  auto devs = Platform::CreateCudaGPUs(n);
-  for (auto dev : devs) {
-    auto b = dev->NewBlock(32);
-    EXPECT_LE(32u, dev->GetAllocatedMem());
-    dev->FreeBlock(b);
+    //    allocated = dev->GetAllocatedMem();
+    //    EXPECT_EQ(0, allocated);
   }
 }
 
 TEST(Platform, CreatTensor) {
   auto cuda = Platform::CreateCudaGPUs(1)[0];
-  singa::Tensor t(singa::Shape{2,3,4}, cuda);
+  singa::Tensor t(singa::Shape{2, 3, 4}, cuda);
   t.SetValue(2.1f);
   t.ToHost();
   auto tPtr = t.data<float>();
-  for (size_t i = 0; i < t.Size(); i++)
-    EXPECT_FLOAT_EQ(tPtr[i], 2.1f);
+  for (size_t i = 0; i < t.Size(); i++) EXPECT_FLOAT_EQ(tPtr[i], 2.1f);
   t.ToDevice(cuda);
   t = t * 3.0f;
   t.ToHost();
   tPtr = t.data<float>();
-  for (size_t i = 0; i < t.Size(); i++)
-    EXPECT_FLOAT_EQ(tPtr[i], 2.1f * 3.0f);
+  for (size_t i = 0; i < t.Size(); i++) EXPECT_FLOAT_EQ(tPtr[i], 2.1f * 3.0f);
 }
 #endif
-
diff --git a/test/singa/test_pooling.cc b/test/singa/test_pooling.cc
index 7ba56d1..cf63c54 100644
--- a/test/singa/test_pooling.cc
+++ b/test/singa/test_pooling.cc
@@ -1,25 +1,24 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 #include "../src/model/layer/pooling.h"
-
 #include "gtest/gtest.h"
 
 using singa::Pooling;
diff --git a/test/singa/test_prelu.cc b/test/singa/test_prelu.cc
index 77b4b74..fc49cdf 100644
--- a/test/singa/test_prelu.cc
+++ b/test/singa/test_prelu.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/prelu.h"
 #include "gtest/gtest.h"
@@ -142,7 +142,7 @@
 #ifdef USE_CUDA
 TEST(PReLU, ForwardGPU) {
   const float x[] = {1.f,  2.f, 3.f,  -2.f, -3.f, -1.f,
-                         -1.f, 2.f, -1.f, -2.f, -2.f, -1.f};
+                     -1.f, 2.f, -1.f, -2.f, -2.f, -1.f};
   size_t n = sizeof(x) / sizeof(float);
   size_t batchsize = 2, c = 3, h = 2, w = 1;
   auto cuda = std::make_shared<singa::CudaGPU>();
@@ -185,7 +185,7 @@
 
 TEST(PReLU, BackwardGPU) {
   const float x[] = {1.f,  2.f, 3.f,  -2.f, -3.f, -1.f,
-                           -1.f, 2.f, -1.f, -2.f, -2.f, -1.f};
+                     -1.f, 2.f, -1.f, -2.f, -2.f, -1.f};
   size_t n = sizeof(x) / sizeof(float);
   size_t batchsize = 2, c = 3, h = 2, w = 1;
   auto cuda = std::make_shared<singa::CudaGPU>();
@@ -206,7 +206,7 @@
 
   singa::Tensor out = prelu.Forward(singa::kTrain, in);
   const float grad[] = {1.f, 2.f,  -2.f, -1.f, -1.f, -3.f,
-                          2.f, -2.f, 1.f,  1.f,  -2.f, 0.f};
+                        2.f, -2.f, 1.f,  1.f,  -2.f, 0.f};
   singa::Tensor out_diff(singa::Shape{batchsize, c, h, w}, cuda);
   out_diff.CopyDataFromHostPtr<float>(grad, n);
   const auto ret = prelu.Backward(singa::kTrain, out_diff);
@@ -225,7 +225,7 @@
     for (size_t i = 0; i < n; i++) {
       size_t pos = i / (h * w) % c / div_factor;
       dx[i] = grad[i] *
-                (std::max(x[i], 0.f) + neg_slope[pos] * std::min(x[i], 0.f));
+              (std::max(x[i], 0.f) + neg_slope[pos] * std::min(x[i], 0.f));
     }
     for (size_t i = 0; i < n; i++) {
       size_t pos = i / (h * w) % c / div_factor;
@@ -235,8 +235,8 @@
     for (size_t i = 0; i < n; i++) {
       size_t pos = i % c / div_factor;
       dx[i] = grad[i] *
-        (std::max(x[i], 0.f) + neg_slope[pos] * std::min(x[i], 0.f));
-  }
+              (std::max(x[i], 0.f) + neg_slope[pos] * std::min(x[i], 0.f));
+    }
     for (size_t i = 0; i < n; i++) {
       size_t pos = i % c / div_factor;
       da[pos] += grad[i] * std::min(x[i], 0.f);
diff --git a/test/singa/test_rmsprop.cc b/test/singa/test_rmsprop.cc
index f398355..a26bcde 100644
--- a/test/singa/test_rmsprop.cc
+++ b/test/singa/test_rmsprop.cc
@@ -1,27 +1,28 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <cmath>
 
 #include "gtest/gtest.h"
 #include "singa/model/optimizer.h"
-#include <cmath>
 
 TEST(RMSProp, ApplyCPU) {
   singa::RMSProp rmsprop;
diff --git a/test/singa/test_scheduler.cc b/test/singa/test_scheduler.cc
new file mode 100644
index 0000000..c94f8f7
--- /dev/null
+++ b/test/singa/test_scheduler.cc
@@ -0,0 +1,760 @@
+/************************************************************
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <sstream>
+#include <utility>
+
+#include "gtest/gtest.h"
+#include "singa/core/device.h"
+#include "singa/core/scheduler.h"
+#include "singa/core/tensor.h"
+#include "singa/singa_config.h"
+
+typedef std::vector<int> IntVec;
+using singa::Blk2InfoMap;
+using singa::BlkInfo;
+using singa::Block;
+using singa::BlockSet;
+using singa::BlockType;
+using singa::BlockVec;
+using singa::Context;
+using singa::Device;
+using singa::Edge;
+using singa::EdgeVec;
+using singa::Graph;
+using singa::Node;
+using singa::NodeVec;
+using singa::Shape;
+using singa::Tensor;
+
+namespace testing {
+namespace internal {
+enum GTestColor { COLOR_DEFAULT, COLOR_RED, COLOR_GREEN, COLOR_YELLOW };
+extern void ColoredPrintf(GTestColor color, const char *fmt, ...);
+}  // namespace internal
+}  // namespace testing
+
+class Gout : public std::stringstream {
+ public:
+  ~Gout() {
+    testing::internal::ColoredPrintf(testing::internal::COLOR_GREEN,
+                                     "[          ] ");
+    testing::internal::ColoredPrintf(testing::internal::COLOR_YELLOW,
+                                     str().c_str());
+  }
+};
+
+#define GOUT Gout()
+
+#define CheckNode(node, id_, in_edges_, out_edges_)         \
+  do {                                                      \
+    EXPECT_EQ(id_, node->id());                             \
+    EXPECT_EQ(in_edges_.size(), node->in_edges().size());   \
+    EXPECT_EQ(out_edges_.size(), node->out_edges().size()); \
+    for (size_t i = 0; i < in_edges_.size(); ++i) {         \
+      EXPECT_EQ(in_edges_[i], node->in_edges()[i])          \
+          << "in_edges is wrong at index [" << i << "]";    \
+    }                                                       \
+    for (size_t i = 0; i < out_edges_.size(); ++i) {        \
+      EXPECT_EQ(out_edges_[i], node->out_edges()[i])        \
+          << "out_edges is wrong at index [" << i << "]";   \
+    }                                                       \
+  } while (false)
+
+#define CheckEdge(edge, id_, block_, src_node_, dst_node_) \
+  do {                                                     \
+    EXPECT_EQ(id_, edge->id());                            \
+    EXPECT_EQ(block_, edge->block());                      \
+    EXPECT_EQ(src_node_, edge->src_node());                \
+    EXPECT_EQ(dst_node_, edge->dst_node());                \
+  } while (false)
+
+#define CheckBlock(blkInfo, id_, blk_, type_, ref_, write_edge_, used_nodes_) \
+  do {                                                                        \
+    EXPECT_EQ(id_, blkInfo->id());                                            \
+    EXPECT_EQ(blk_, blkInfo->block());                                        \
+    EXPECT_EQ(type_, blkInfo->type());                                        \
+    EXPECT_EQ(ref_, blkInfo->graph_ref());                                    \
+    EXPECT_EQ(write_edge_, blkInfo->write_edge());                            \
+    EXPECT_EQ(used_nodes_, blkInfo->used_nodes());                            \
+    for (size_t i = 0; i < used_nodes_.size(); ++i) {                         \
+      EXPECT_EQ(used_nodes_[i], blkInfo->used_node(i))                        \
+          << "used_nodes is different at index [" << i << "]";                \
+    }                                                                         \
+  } while (false)
+
+#define CheckLeafBlocks(leaf_blocks, correct_leaf_blocks)                   \
+  do {                                                                      \
+    EXPECT_EQ(correct_leaf_blocks.size(), leaf_blocks.size());              \
+    for (auto it : leaf_blocks) {                                           \
+      auto iter = correct_leaf_blocks.find(it);                             \
+      EXPECT_NE(iter, correct_leaf_blocks.end()) << "leaf blocks mismatch"; \
+    }                                                                       \
+  } while (false)
+
+#define CheckFreeBlocks(node_id, blocks, free_blocks, correct_free_blocks)   \
+  do {                                                                       \
+    EXPECT_EQ(correct_free_blocks.size(), free_blocks.size());               \
+    for (size_t i = 0; i < correct_free_blocks.size(); ++i) {                \
+      bool flag = false;                                                     \
+      for (size_t j = 0; j < free_blocks.size(); ++j) {                      \
+        if (blocks.find(free_blocks[j])->second->id() ==                     \
+            correct_free_blocks[i]) {                                        \
+          flag = true;                                                       \
+          break;                                                             \
+        }                                                                    \
+      }                                                                      \
+      EXPECT_TRUE(flag) << "block [" << correct_free_blocks[i]               \
+                        << "] is not recycled properly at node " << node_id; \
+    }                                                                        \
+  } while (false)
+
+class TestGraph : public testing::Test {
+ protected:
+  virtual void SetUp();
+  virtual void TearDown();
+
+ protected:
+  std::vector<std::pair<std::string, std::shared_ptr<Device> > > devices;
+};
+
+void TestGraph::SetUp() {
+  auto cpp_cpu = singa::Platform::GetDefaultDevice();
+  devices.push_back(std::make_pair("cpp_cpu", cpp_cpu));
+
+#ifdef USE_CUDA
+  auto cuda_gpu = std::make_shared<singa::CudaGPU>();
+  devices.push_back(std::make_pair("cuda_gpu", cuda_gpu));
+#endif
+}
+
+void TestGraph::TearDown() { devices.clear(); }
+
+TEST_F(TestGraph, AddOp) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {out.block()});
+
+    EXPECT_EQ(1u, nodes.size());
+    EXPECT_EQ(2u, edges.size());
+    EXPECT_EQ(2u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto node = nodes[0];
+    auto edge1 = edges[0];
+    auto edge2 = edges[1];
+    auto block1 = blocks.find(in.block())->second;
+    auto block2 = blocks.find(out.block())->second;
+
+    CheckNode(node, 0, EdgeVec({edge1}), EdgeVec({edge2}));
+    CheckEdge(edge1, 0, in.block(), nullptr, node);
+    CheckEdge(edge2, 1, out.block(), node, nullptr);
+    CheckBlock(block1, 0, in.block(), BlockType::kInput, 1, nullptr,
+               NodeVec({}));
+    CheckBlock(block2, 1, out.block(), BlockType::kEnd, 1, edge2, NodeVec({}));
+    CheckLeafBlocks(leaf_blocks, BlockSet({out.block()}));
+    EXPECT_TRUE(graph.dirty());
+  }
+}
+
+TEST_F(TestGraph, AddSyncOp) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {out.block()});
+    graph.AddOperation(op, {}, {});
+
+    EXPECT_EQ(2u, nodes.size());
+    EXPECT_EQ(3u, edges.size());
+    EXPECT_EQ(2u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto node1 = nodes[0];
+    auto node2 = nodes[1];
+    auto edge1 = edges[0];
+    auto edge2 = edges[1];
+    auto edge3 = edges[2];
+    auto block1 = blocks.find(in.block())->second;
+    auto block2 = blocks.find(out.block())->second;
+
+    CheckNode(node1, 0, EdgeVec({edge1}), EdgeVec({edge2}));
+    CheckNode(node2, 1, EdgeVec({edge2}), EdgeVec({edge3}));
+    CheckEdge(edge1, 0, in.block(), nullptr, node1);
+    CheckEdge(edge2, 1, out.block(), node1, node2);
+    CheckEdge(edge3, 2, out.block(), node2, nullptr);
+    CheckBlock(block1, 0, in.block(), BlockType::kInput, 1, nullptr,
+               NodeVec({}));
+    CheckBlock(block2, 1, out.block(), BlockType::kInter, 1, edge3,
+               NodeVec({}));
+    CheckLeafBlocks(leaf_blocks, BlockSet({out.block()}));
+    EXPECT_TRUE(graph.dirty());
+  }
+}
+
+TEST_F(TestGraph, AddInplaceOp) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {in.block()});
+
+    EXPECT_EQ(1u, nodes.size());
+    EXPECT_EQ(2u, edges.size());
+    EXPECT_EQ(1u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto node1 = nodes[0];
+    auto edge1 = edges[0];
+    auto edge2 = edges[1];
+    auto block1 = blocks.find(in.block())->second;
+
+    CheckNode(node1, 0, EdgeVec({edge1}), EdgeVec({edge2}));
+    CheckEdge(edge1, 0, in.block(), nullptr, node1);
+    CheckEdge(edge2, 1, in.block(), node1, nullptr);
+    CheckBlock(block1, 0, in.block(), BlockType::kParam, 2, edge2, NodeVec({}));
+    CheckLeafBlocks(leaf_blocks, BlockSet{in.block()});
+    EXPECT_TRUE(graph.dirty());
+
+    graph.AddOperation(op, {in.block(), out.block()}, {out.block()});
+
+    EXPECT_EQ(2u, nodes.size());
+    EXPECT_EQ(4u, edges.size());
+    EXPECT_EQ(2u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto node2 = nodes[1];
+    auto edge3 = edges[2];
+    auto edge4 = edges[3];
+    auto block2 = blocks.find(out.block())->second;
+
+    CheckNode(node1, 0, EdgeVec({edge1}), EdgeVec({edge2}));
+    CheckNode(node2, 1, EdgeVec({edge2, edge3}), EdgeVec({edge4}));
+    CheckEdge(edge2, 1, in.block(), node1, node2);
+    CheckEdge(edge3, 2, out.block(), nullptr, node2);
+    CheckEdge(edge4, 3, out.block(), node2, nullptr);
+    CheckBlock(block1, 0, in.block(), BlockType::kParam, 3, edge2, NodeVec({}));
+    CheckBlock(block2, 1, out.block(), BlockType::kParam, 2, edge4,
+               NodeVec({}));
+    CheckLeafBlocks(leaf_blocks, BlockSet({out.block()}));
+    EXPECT_TRUE(graph.dirty());
+  }
+}
+
+TEST_F(TestGraph, BlockTypeInput) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {out.block()});
+
+    EXPECT_EQ(1u, nodes.size());
+    EXPECT_EQ(2u, edges.size());
+    EXPECT_EQ(2u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto block1 = blocks.find(in.block())->second;
+
+    CheckBlock(block1, 0, in.block(), BlockType::kInput, 1, nullptr,
+               NodeVec({}));
+  }
+}
+
+TEST_F(TestGraph, BlockTypeParam) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor mid(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {in.block()});
+    graph.AddOperation(op, {in.block(), mid.block()}, {out.block()});
+    graph.AddOperation(op, {out.block()}, {mid.block()});
+
+    EXPECT_EQ(3u, nodes.size());
+    EXPECT_EQ(5u, edges.size());
+    EXPECT_EQ(3u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto edge2 = edges[1];
+    auto edge5 = edges[4];
+    auto block1 = blocks.find(in.block())->second;
+    auto block2 = blocks.find(mid.block())->second;
+
+    CheckBlock(block1, 0, in.block(), BlockType::kParam, 3, edge2, NodeVec({}));
+    CheckBlock(block2, 1, mid.block(), BlockType::kParam, 2, edge5,
+               NodeVec({}));
+  }
+}
+
+TEST_F(TestGraph, BlockTypeInter) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor mid(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {mid.block(), out.block()});
+    graph.AddOperation(op, {mid.block()}, {});
+    graph.AddOperation(op, {out.block()}, {out.block()});
+
+    EXPECT_EQ(3u, nodes.size());
+    EXPECT_EQ(4u, edges.size());
+    EXPECT_EQ(3u, blocks.size());
+    EXPECT_EQ(1u, leaf_blocks.size());
+
+    auto edge2 = edges[1];
+    auto edge4 = edges[3];
+    auto block2 = blocks.find(mid.block())->second;
+    auto block3 = blocks.find(out.block())->second;
+
+    CheckBlock(block2, 1, mid.block(), BlockType::kInter, 2, edge2,
+               NodeVec({}));
+    CheckBlock(block3, 2, out.block(), BlockType::kInter, 3, edge4,
+               NodeVec({}));
+  }
+}
+
+TEST_F(TestGraph, BlockTypeEnd) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor out1(Shape{1}, dev);
+    Tensor out2(Shape{1}, dev);
+    auto op = [](Context *ctx) mutable {};
+
+    graph.AddOperation(op, {in.block()}, {out1.block()});
+    graph.AddOperation(op, {}, {out2.block()});
+
+    EXPECT_EQ(2u, nodes.size());
+    EXPECT_EQ(3u, edges.size());
+    EXPECT_EQ(3u, blocks.size());
+    EXPECT_EQ(2u, leaf_blocks.size());
+
+    auto edge2 = edges[1];
+    auto edge3 = edges[2];
+    auto block2 = blocks.find(out1.block())->second;
+    auto block3 = blocks.find(out2.block())->second;
+
+    CheckBlock(block2, 1, out1.block(), BlockType::kEnd, 1, edge2, NodeVec({}));
+    CheckBlock(block3, 2, out2.block(), BlockType::kEnd, 1, edge3, NodeVec({}));
+  }
+}
+
+TEST_F(TestGraph, RunGraph) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor mid(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    Tensor b1(Shape{1}, dev);
+    Tensor b2(Shape{1}, dev);
+    Tensor dx(Shape{1}, dev);
+    Tensor dx1(Shape{1}, dev);
+    Tensor dx2(Shape{1}, dev);
+    Tensor dy1(Shape{1}, dev);
+    Tensor dy2(Shape{1}, dev);
+    Tensor db1(Shape{1}, dev);
+    Tensor db2(Shape{1}, dev);
+
+    // function: (in + b1) * in + b2
+    auto op1 = [in, b1, mid](Context *ctx) mutable {
+      singa::Add(in, b1, &mid);
+    };
+    auto op2 = [mid, in, out](Context *ctx) mutable {
+      singa::EltwiseMult(mid, in, &out);
+    };
+    auto op3 = [out, b2](Context *ctx) mutable { singa::Add(out, b2, &out); };
+    auto op4 = [out, dy1, db2](Context *ctx) mutable {
+      dy1.CopyData(out);
+      db2.CopyData(out);
+    };
+    auto op5 = [in, mid, dy1, dy2, dx1](Context *ctx) mutable {
+      singa::EltwiseMult(dy1, in, &dy2);
+      singa::EltwiseMult(dy1, mid, &dx1);
+    };
+    auto op6 = [dy2, dx2, db1](Context *ctx) mutable {
+      dx2.CopyData(dy2);
+      db1.CopyData(dy2);
+    };
+    auto op7 = [dx1, dx2, dx](Context *ctx) mutable {
+      singa::Add(dx1, dx2, &dx);
+    };
+
+    graph.AddOperation(op1, {in.block(), b1.block()}, {mid.block()});
+    graph.AddOperation(op2, {mid.block(), in.block()}, {out.block()});
+    graph.AddOperation(op3, {out.block(), b2.block()}, {out.block()});
+    graph.AddOperation(op4, {out.block()}, {dy1.block(), db2.block()});
+    graph.AddOperation(op5, {dy1.block()}, {dy2.block(), dx1.block()});
+    graph.AddOperation(op6, {dy2.block()}, {dx2.block(), db1.block()});
+    graph.AddOperation(op7, {dx1.block(), dx2.block()}, {dx.block()});
+
+    EXPECT_EQ(7u, nodes.size());
+    EXPECT_EQ(14u, edges.size());
+    EXPECT_EQ(12u, blocks.size());
+    EXPECT_EQ(3u, leaf_blocks.size());
+
+    in.SetValue(0);
+    b1.SetValue(-1);
+    b2.SetValue(2);
+    graph.RunGraph();
+
+    float dx_, db1_, db2_;
+    dx.ToHost().get_value(&dx_, 1);
+    db1.ToHost().get_value(&db1_, 1);
+    db2.ToHost().get_value(&db2_, 1);
+
+    EXPECT_EQ(-2, dx_);
+    EXPECT_EQ(0, db1_);
+    EXPECT_EQ(2, db2_);
+  }
+}
+
+TEST_F(TestGraph, MultipleIndependentOps) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+
+    Tensor workspace(Shape{1}, dev);
+    Tensor b1(Shape{1}, dev);
+    Tensor b2(Shape{1}, dev);
+    Tensor b3(Shape{1}, dev);
+    Tensor b4(Shape{1}, dev);
+
+    // emulate clean up workspace, use the rnn design as reference
+    auto clean1 = [workspace](Context *ctx) mutable {};
+    auto clean2 = [workspace](Context *ctx) mutable {};
+    auto clean3 = [workspace](Context *ctx) mutable {};
+    auto clean4 = [workspace](Context *ctx) mutable {};
+
+    // emulate usage of workspace, use the rnn design as reference
+    auto op1 = [workspace, b1](Context *ctx) mutable {};
+    auto op2 = [workspace, b2](Context *ctx) mutable {};
+    auto op3 = [workspace, b2](Context *ctx) mutable {};
+    auto op4 = [workspace, b2](Context *ctx) mutable {};
+
+    graph.AddOperation(clean1, {}, {workspace.block()});
+    graph.AddOperation(op1, {b1.block()}, {workspace.block(), b1.block()});
+    graph.AddOperation(clean2, {}, {workspace.block()});
+    graph.AddOperation(op2, {b2.block()}, {workspace.block(), b2.block()});
+    graph.AddOperation(clean3, {}, {workspace.block()});
+    graph.AddOperation(op3, {b3.block()}, {workspace.block(), b3.block()});
+    graph.AddOperation(clean4, {}, {workspace.block()});
+    graph.AddOperation(op4, {b4.block()}, {workspace.block(), b4.block()});
+
+    EXPECT_EQ(8u, nodes.size());
+    graph.RunGraph();
+  }
+}
+
+TEST_F(TestGraph, RunInSerial) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    Tensor in(Shape{1}, dev);
+    Tensor mid(Shape{1}, dev);
+    Tensor out(Shape{1}, dev);
+    Tensor b1(Shape{1}, dev);
+    Tensor b2(Shape{1}, dev);
+    Tensor dx(Shape{1}, dev);
+    Tensor dx1(Shape{1}, dev);
+    Tensor dx2(Shape{1}, dev);
+    Tensor dy1(Shape{1}, dev);
+    Tensor dy2(Shape{1}, dev);
+    Tensor db1(Shape{1}, dev);
+    Tensor db2(Shape{1}, dev);
+
+    auto op1 = [in, b1, mid](Context *ctx) mutable {
+      singa::Add(in, b1, &mid);
+    };
+    auto op2 = [mid, in, out](Context *ctx) mutable {
+      singa::EltwiseMult(mid, in, &out);
+    };
+    auto op3 = [out, b2](Context *ctx) mutable { singa::Add(out, b2, &out); };
+    auto op4 = [out, dy1, db2](Context *ctx) mutable {
+      dy1.CopyData(out);
+      db2.CopyData(out);
+    };
+    auto op5 = [in, mid, dy1, dy2, dx1](Context *ctx) mutable {
+      singa::EltwiseMult(dy1, in, &dy2);
+      singa::EltwiseMult(dy1, mid, &dx1);
+    };
+    auto op6 = [dy2, dx2, db1](Context *ctx) mutable {
+      dx2.CopyData(dy2);
+      db1.CopyData(dy2);
+    };
+    auto op7 = [dx1, dx2, dx](Context *ctx) mutable {
+      singa::Add(dx1, dx2, &dx);
+    };
+
+    graph.AddOperation(op1, {in.block(), b1.block()}, {mid.block()});
+    graph.AddOperation(op2, {mid.block(), in.block()}, {out.block()});
+    graph.AddOperation(op3, {out.block(), b2.block()}, {out.block()});
+    graph.AddOperation(op4, {out.block()}, {dy1.block(), db2.block()});
+    graph.AddOperation(op5, {dy1.block()}, {dy2.block(), dx1.block()});
+    graph.AddOperation(op6, {dy2.block()}, {dx2.block(), db1.block()});
+    graph.AddOperation(op7, {dx1.block(), dx2.block()}, {dx.block()});
+
+    EXPECT_EQ(7u, nodes.size());
+    EXPECT_EQ(14u, edges.size());
+    EXPECT_EQ(12u, blocks.size());
+    EXPECT_EQ(3u, leaf_blocks.size());
+
+    in.SetValue(0);
+    b1.SetValue(-1);
+    b2.SetValue(2);
+    graph.RunInSerial();
+
+    float dx_, db1_, db2_;
+    dx.ToHost().get_value(&dx_, 1);
+    db1.ToHost().get_value(&db1_, 1);
+    db2.ToHost().get_value(&db2_, 1);
+
+    EXPECT_EQ(-2, dx_);
+    EXPECT_EQ(0, db1_);
+    EXPECT_EQ(2, db2_);
+  }
+}
+
+TEST_F(TestGraph, AutoRecycle) {
+  for (auto &it : devices) {
+    GOUT << "Test graph on device [" << it.first << "]" << std::endl;
+
+    auto dev = it.second;
+    Graph graph(dev.get());
+
+    auto &nodes = graph.nodes();
+    auto &edges = graph.edges();
+    auto &blocks = graph.blocks();
+    auto &leaf_blocks = graph.leaf_blocks();
+
+    {
+      Tensor in(Shape{1}, dev);
+      Tensor mid1(Shape{1}, dev);
+      Tensor mid2(Shape{1}, dev);
+      Tensor out(Shape{1}, dev);
+      Tensor b1(Shape{1}, dev);
+      Tensor b2(Shape{1}, dev);
+      Tensor dx(Shape{1}, dev);
+      Tensor dx1(Shape{1}, dev);
+      Tensor dx2(Shape{1}, dev);
+      Tensor dx3(Shape{1}, dev);
+      Tensor dy1(Shape{1}, dev);
+      Tensor dy2(Shape{1}, dev);
+      Tensor dy3(Shape{1}, dev);
+      Tensor db1(Shape{1}, dev);
+      Tensor db2(Shape{1}, dev);
+
+      // function: (in + b1) * in + (in + b2)
+      auto op1 = [in, b1, mid1](Context *ctx) mutable {
+        singa::Add(in, b1, &mid1);
+      };
+      auto op2 = [mid1, in, out](Context *ctx) mutable {
+        singa::EltwiseMult(mid1, in, &out);
+      };
+      auto op3 = [in, b2, mid2](Context *ctx) mutable {
+        singa::Add(in, b2, &mid2);
+      };
+      auto op4 = [out, mid2](Context *ctx) mutable {
+        singa::Add(out, mid2, &out);
+      };
+      auto op5 = [out, dy1, dy2](Context *ctx) mutable {
+        dy1.CopyData(out);
+        dy2.CopyData(out);
+      };
+      auto op6 = [in, mid1, dy1, dy3, dx1](Context *ctx) mutable {
+        singa::EltwiseMult(dy1, in, &dy3);
+        singa::EltwiseMult(dy1, mid1, &dx1);
+      };
+      auto op7 = [dy3, dx2, db1](Context *ctx) mutable {
+        dx2.CopyData(dy3);
+        db1.CopyData(dy3);
+      };
+      auto op8 = [dy2, dx3, db2](Context *ctx) mutable {
+        dx3.CopyData(dy2);
+        db2.CopyData(dy2);
+      };
+      auto op9 = [dx1, dx2, dx](Context *ctx) mutable {
+        singa::Add(dx1, dx2, &dx);
+      };
+      auto op10 = [dx, dx3](Context *ctx) mutable { singa::Add(dx, dx3, &dx); };
+
+      graph.AddOperation(op1, {in.block(), b1.block()}, {mid1.block()});
+      graph.AddOperation(op2, {mid1.block(), in.block()}, {out.block()});
+      graph.AddOperation(op3, {in.block(), b2.block()}, {mid2.block()});
+      graph.AddOperation(op4, {out.block(), mid2.block()}, {out.block()});
+      graph.AddOperation(op5, {out.block()}, {dy1.block(), dy2.block()});
+      graph.AddOperation(op6, {in.block(), mid1.block(), dy1.block()},
+                         {dy3.block(), dx1.block()});
+      graph.AddOperation(op7, {dy3.block()}, {dx2.block(), db1.block()});
+      graph.AddOperation(op8, {dy2.block()}, {dx3.block(), db2.block()});
+      graph.AddOperation(op9, {dx1.block(), dx2.block()}, {dx.block()});
+      graph.AddOperation(op10, {dx.block(), dx3.block()}, {dx.block()});
+
+      in.SetValue(0);
+      b1.SetValue(-1);
+      b2.SetValue(2);
+    }
+
+    EXPECT_EQ(10u, nodes.size());
+    EXPECT_EQ(21u, edges.size());
+    EXPECT_EQ(15u, blocks.size());
+    EXPECT_EQ(3u, leaf_blocks.size());
+
+    graph.RunGraph();
+
+    auto &begin_nodes = graph.begin_nodes();
+    auto &next_nodes = graph.next_nodes();
+    auto &free_blocks = graph.free_blocks();
+
+    EXPECT_FALSE(graph.dirty());
+    EXPECT_EQ(nodes[0], begin_nodes[0]);
+    EXPECT_EQ(nodes[2], begin_nodes[1]);
+    EXPECT_EQ(nodes[1], next_nodes[0][0]);
+    EXPECT_EQ(nodes[3], next_nodes[1][0]);
+    EXPECT_EQ(nodes[4], next_nodes[3][0]);
+    EXPECT_EQ(nodes[5], next_nodes[4][0]);
+    EXPECT_EQ(nodes[7], next_nodes[4][1]);
+    EXPECT_EQ(nodes[6], next_nodes[5][0]);
+    EXPECT_EQ(nodes[8], next_nodes[6][0]);
+    EXPECT_EQ(nodes[9], next_nodes[8][0]);
+
+    CheckFreeBlocks(0, blocks, free_blocks[0], IntVec({}));
+    CheckFreeBlocks(1, blocks, free_blocks[1], IntVec({}));
+    CheckFreeBlocks(2, blocks, free_blocks[2], IntVec({}));
+    CheckFreeBlocks(3, blocks, free_blocks[3], IntVec({5}));
+    CheckFreeBlocks(4, blocks, free_blocks[4], IntVec({3}));
+    CheckFreeBlocks(5, blocks, free_blocks[5], IntVec({2, 6}));
+    CheckFreeBlocks(6, blocks, free_blocks[6], IntVec({8, 11}));
+    CheckFreeBlocks(7, blocks, free_blocks[7], IntVec({7, 13}));
+    CheckFreeBlocks(8, blocks, free_blocks[8], IntVec({9, 10}));
+    CheckFreeBlocks(9, blocks, free_blocks[9], IntVec({12, 14}));
+
+    // in 0 b1 1 mid1 2 out 3 b2 4
+    // mid2 5 dy1 6 dy2 7 dy3 8 dx1 9
+    // dx2 10 db1 11 dx3 12 db2 13 dx 14
+    bool state[15] = {true,  true,  false, false, true,  false, false, false,
+                      false, false, false, false, false, false, false};
+
+    for (auto it : blocks) {
+      int id = it.second->id();
+      EXPECT_EQ(state[id], it.first->initialized())
+          << "The memory of the block[" << id << "] is not properly recycled"
+          << std::endl;
+    }
+  }
+}
diff --git a/test/singa/test_sgd.cc b/test/singa/test_sgd.cc
index c25dfbd..2e8629f 100644
--- a/test/singa/test_sgd.cc
+++ b/test/singa/test_sgd.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "gtest/gtest.h"
 #include "singa/model/optimizer.h"
@@ -41,7 +41,6 @@
     EXPECT_FLOAT_EQ(newv1[i], v[i] - g[i] * lr);
   }
 
-
   lr /= 2;
   grad.CopyDataFromHostPtr(g, 4);
   sgd.Apply(1, lr, "xx", grad, value);
@@ -52,11 +51,10 @@
   }
 }
 
-
 TEST(SGD, ApplyWithMomentum) {
   singa::SGD sgd;
   float lr = 0.1f;
-  auto func = [](int step) { return step <=5 ? 0.5f: 0.9f;};
+  auto func = [](int step) { return step <= 5 ? 0.5f : 0.9f; };
   sgd.SetMomentumGenerator(func);
   const float v[4] = {0.1f, 0.2f, 0.3f, 0.4f};
   const float g[4] = {0.01f, 0.02f, 0.03f, 0.04f};
@@ -88,7 +86,7 @@
   const float v[4] = {0.1, 0.2, 0.3, 0.4};
   const float g[4] = {0.1, 0.1, 0.1, 0.1};
 
-	auto dev = std::make_shared<singa::CudaGPU>();
+  auto dev = std::make_shared<singa::CudaGPU>();
   singa::Tensor value(singa::Shape{4}, dev), grad(singa::Shape{4}, dev);
   value.CopyDataFromHostPtr(v, 4);
   grad.CopyDataFromHostPtr(g, 4);
@@ -103,7 +101,6 @@
     EXPECT_FLOAT_EQ(newv1[i], v[i] - g[i] * lr);
   }
 
-
   lr /= 2;
   grad.CopyDataFromHostPtr(g, 4);
   sgd.Apply(1, lr, "xx", grad, value);
@@ -115,16 +112,15 @@
   }
 }
 
-
 TEST(SGD, ApplyWithMomentumCuda) {
   singa::SGD sgd;
   float lr = 0.1f;
-  auto func = [](int step) { return step <=5 ? 0.5f: 0.9f;};
+  auto func = [](int step) { return step <= 5 ? 0.5f : 0.9f; };
   sgd.SetMomentumGenerator(func);
   const float v[4] = {0.1, 0.2, 0.3, 0.4};
   const float g[4] = {0.01, 0.02, 0.03, 0.04};
 
-	auto dev = std::make_shared<singa::CudaGPU>();
+  auto dev = std::make_shared<singa::CudaGPU>();
   singa::Tensor value(singa::Shape{4}, dev), grad(singa::Shape{4}, dev);
   value.CopyDataFromHostPtr(v, 4);
   grad.CopyDataFromHostPtr(g, 4);
diff --git a/test/singa/test_slice.cc b/test/singa/test_slice.cc
index f6b8997..974bf5a 100644
--- a/test/singa/test_slice.cc
+++ b/test/singa/test_slice.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../src/model/layer/slice.h"
 #include "gtest/gtest.h"
@@ -57,15 +57,15 @@
   const float* tptr = t.data<float>();
 
   grads[0].ToHost();
-  const float * outa = grads[0].data<float>();
+  const float* outa = grads[0].data<float>();
   for (size_t i = 0; i < a; i++)
     for (size_t j = 0; j < c; j++)
       EXPECT_FLOAT_EQ(outa[i * c + j], tptr[i * c + j]);
   grads[1].ToHost();
-  const float * outb = grads[1].data<float>();
+  const float* outb = grads[1].data<float>();
   for (size_t i = 0; i < b; i++)
     for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outb[i  * c + j], tptr[(i + a) * c + j]);
+      EXPECT_FLOAT_EQ(outb[i * c + j], tptr[(i + a) * c + j]);
 }
 
 void ForwardSliceColumnTest(std::shared_ptr<singa::Device> dev) {
@@ -88,27 +88,23 @@
   const float* tptr = t.data<float>();
 
   out[0].ToHost();
-  const float * outa = out[0].data<float>();
+  const float* outa = out[0].data<float>();
   for (size_t i = 0; i < c; i++)
     for (size_t j = 0; j < a; j++)
       EXPECT_FLOAT_EQ(outa[i * a + j], tptr[i * (a + b) + j]);
   out[1].ToHost();
-  const float * outb = out[1].data<float>();
+  const float* outb = out[1].data<float>();
   for (size_t i = 0; i < c; i++)
     for (size_t j = 0; j < b; j++)
-      EXPECT_FLOAT_EQ(outb[i  * b + j], tptr[i * (a + b) + a + j]);
+      EXPECT_FLOAT_EQ(outb[i * b + j], tptr[i * (a + b) + a + j]);
 }
 
-
-TEST(Slice, ForwardSliceRowCpp) {
-  ForwardSliceRowTest(singa::defaultDevice);
-}
+TEST(Slice, ForwardSliceRowCpp) { ForwardSliceRowTest(singa::defaultDevice); }
 
 TEST(Slice, ForwardSliceColumn) {
   ForwardSliceColumnTest(singa::defaultDevice);
 }
 
-
 #ifdef USE_CUDA
 TEST(Slice, ForwardSliceRowCuda) {
   ForwardSliceRowTest(std::make_shared<singa::CudaGPU>());
@@ -119,8 +115,6 @@
 }
 #endif  // USE_CUDA
 
-
-
 void BackwardSliceRowTest(std::shared_ptr<singa::Device> dev) {
   size_t a = 2u, b = 1u, c = 3u;
   singa::LayerConf conf;
@@ -140,14 +134,12 @@
   auto grad = out.first[0];
 
   grad.ToHost();
-  const float * outptr = grad.data<float>();
+  const float* outptr = grad.data<float>();
   for (size_t i = 0; i < a; i++) {
-    for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outptr[i * c + j], 1.0f);
+    for (size_t j = 0; j < c; j++) EXPECT_FLOAT_EQ(outptr[i * c + j], 1.0f);
   }
   for (size_t i = a; i < a + b; i++) {
-    for (size_t j = 0; j < c; j++)
-      EXPECT_FLOAT_EQ(outptr[i  * c + j], 2.0f);
+    for (size_t j = 0; j < c; j++) EXPECT_FLOAT_EQ(outptr[i * c + j], 2.0f);
   }
 }
 
@@ -169,27 +161,23 @@
   auto out = layer.Backward(singa::kTrain, {t1, t2});
   auto grad = out.first[0];
   grad.ToHost();
-  const float * outptr = grad.data<float>();
+  const float* outptr = grad.data<float>();
   for (size_t i = 0; i < c; i++) {
     for (size_t j = 0; j < a; j++)
       EXPECT_FLOAT_EQ(outptr[i * (a + b) + j], 1.0f);
   }
   for (size_t i = 0; i < c; i++) {
     for (size_t j = a; j < a + b; j++)
-      EXPECT_FLOAT_EQ(outptr[i  * (a + b) + j], 2.0f);
+      EXPECT_FLOAT_EQ(outptr[i * (a + b) + j], 2.0f);
   }
 }
 
-
-TEST(Slice, BackwardSliceRowCpp) {
-  BackwardSliceRowTest(singa::defaultDevice);
-}
+TEST(Slice, BackwardSliceRowCpp) { BackwardSliceRowTest(singa::defaultDevice); }
 
 TEST(Slice, BackwardSliceColumn) {
   BackwardSliceColumnTest(singa::defaultDevice);
 }
 
-
 #ifdef USE_CUDA
 TEST(Slice, BackwardSliceRowCuda) {
   BackwardSliceRowTest(std::make_shared<singa::CudaGPU>());
diff --git a/test/singa/test_snapshot.cc b/test/singa/test_snapshot.cc
index 6fb5f03..ab1a69f 100644
--- a/test/singa/test_snapshot.cc
+++ b/test/singa/test_snapshot.cc
@@ -1,31 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <fstream>
+#include <string>
 
 #include "gtest/gtest.h"
-#include "singa/io/snapshot.h"
-#include "singa/io/reader.h"
 #include "singa/core/tensor.h"
-
-#include <string>
-#include <fstream>
+#include "singa/io/reader.h"
+#include "singa/io/snapshot.h"
 
 const std::string prefix = "./snapshot_test";
 const float param_1_data[] = {0.1f, 0.2f, 0.3f, 0.4f};
@@ -79,8 +79,8 @@
     singa::Snapshot int_snapshot_write(prefix + ".int",
                                        singa::Snapshot::kWrite);
     singa::Tensor int_param(singa::Shape{4});
-    int_param.AsType(singa::kInt);
     int_param.CopyDataFromHostPtr(int_data, 4);
+    int_param.AsType(singa::kInt);
     int_snapshot_write.Write("IntParam", int_param);
   }
 
diff --git a/test/singa/test_softmax.cc b/test/singa/test_softmax.cc
index 8064b80..662b85b 100644
--- a/test/singa/test_softmax.cc
+++ b/test/singa/test_softmax.cc
@@ -1,30 +1,31 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
+
+#include <math.h>  // exp
 
 #include "../src/model/layer/softmax.h"
 #include "gtest/gtest.h"
-#include <math.h> // exp
 
-using singa::Softmax;
 using singa::Shape;
+using singa::Softmax;
 TEST(Softmax, Setup) {
   Softmax sft;
   // EXPECT_EQ("Softmax", sft.layer_type());
@@ -51,11 +52,9 @@
   EXPECT_EQ(n, out.Size());
 
   float* sigma = new float[row];
-  for (size_t i = 0; i < row; i++)
-    sigma[i] = 0.f;
-  for (size_t i = 0; i < n; i++)
-    sigma[i / col] += exp(x[i]);
-  //EXPECT_EQ(0, sigma[1]);
+  for (size_t i = 0; i < row; i++) sigma[i] = 0.f;
+  for (size_t i = 0; i < n; i++) sigma[i / col] += exp(x[i]);
+  // EXPECT_EQ(0, sigma[1]);
   for (size_t i = 0; i < row; i++)
     for (size_t j = 0; j < col; j++) {
       EXPECT_FLOAT_EQ(yptr[i * col + j], exp(x[i * col + j]) / sigma[i]);
@@ -85,15 +84,13 @@
 
   float* dx = new float[n];
   float* sigma = new float[row];
-  for (size_t i = 0; i < row; i++)
-    sigma[i] = 0.f;
-  for (size_t i = 0; i < n; i++)
-    sigma[i / col] += grad[i] * yptr[i];
+  for (size_t i = 0; i < row; i++) sigma[i] = 0.f;
+  for (size_t i = 0; i < n; i++) sigma[i / col] += grad[i] * yptr[i];
   // EXPECT_EQ(0, sigma[0]);
   // EXPECT_EQ(0, sigma[1]);
   for (size_t i = 0; i < row; i++)
     for (size_t j = 0; j < col; j++)
-      dx[i * col + j] = (grad[i * col + j] - sigma[i]) * yptr[i * col +j];
+      dx[i * col + j] = (grad[i * col + j] - sigma[i]) * yptr[i * col + j];
   EXPECT_FLOAT_EQ(dx[0], xptr[0]);
   EXPECT_FLOAT_EQ(dx[4], xptr[4]);
   EXPECT_FLOAT_EQ(dx[5], xptr[5]);
diff --git a/test/singa/test_tensor.cc b/test/singa/test_tensor.cc
index 316b996..3954d62 100644
--- a/test/singa/test_tensor.cc
+++ b/test/singa/test_tensor.cc
@@ -18,12 +18,12 @@
 
 #include "gtest/gtest.h"
 #include "singa/core/tensor.h"
-using singa::Tensor;
-using singa::Shape;
 using singa::Device;
+using singa::Shape;
+using singa::Tensor;
 
-TEST(TensorTest, TestConstructor) {
-  singa::Tensor float_t(singa::Shape{2,3});
+TEST(TensorClass, Constructor) {
+  singa::Tensor float_t(singa::Shape{2, 3});
   EXPECT_EQ(6u, float_t.Size());
   EXPECT_EQ(sizeof(float) * 6, float_t.MemSize());
   EXPECT_EQ(singa::kFloat32, float_t.data_type());
@@ -33,7 +33,7 @@
 
   EXPECT_NE(float_t.device(), nullptr);
 
-  singa::Tensor float16_t(Shape{2,3}, singa::kFloat16);
+  singa::Tensor float16_t(Shape{2, 3}, singa::kFloat16);
   EXPECT_EQ(singa::kFloat16, float16_t.data_type());
   EXPECT_EQ(6u, float16_t.Size());
   EXPECT_EQ(12u, float16_t.block()->size());
@@ -53,30 +53,120 @@
 
 TEST(TensorClass, Reshape) {
   Tensor t;
-  t.Reshape(Shape{2,3});
-  EXPECT_TRUE((Shape{2,3} == t.shape()));
+  t.Resize(Shape{2, 3});
+  EXPECT_TRUE((Shape{2, 3} == t.shape()));
 
-  t.Reshape(Shape{3,3, 4});
-  EXPECT_TRUE((Shape{3,3, 4} == t.shape()));
+  t.Resize(Shape{3, 3, 4});
+  EXPECT_TRUE((Shape{3, 3, 4} == t.shape()));
 
-  t.Reshape(Shape{12});
+  t.Resize(Shape{12});
   EXPECT_TRUE((Shape{12} == t.shape()));
 
   Tensor o;
   EXPECT_TRUE(o.shape() != t.shape());
-  o.Reshape(Shape{3, 3});
+  o.Resize(Shape{3, 3});
   EXPECT_TRUE(o.shape() != t.shape());
 }
 
-TEST(TensorClass, AsType) {
-  Tensor t;
+#ifdef USE_CUDA
+
+TEST(TensorClass, FloatAsTypeIntCuda) {
+  auto cuda = std::make_shared<singa::CudaGPU>();
+
+  Tensor t(Shape{3}, cuda);
+  float data[] = {1.0f, 2.0f, 3.0f};
+  t.CopyDataFromHostPtr(data, 3);
   EXPECT_EQ(singa::kFloat32, t.data_type());
-  t.AsType(singa::kFloat16);
-  EXPECT_EQ(singa::kFloat16, t.data_type());
+
+  t = t.AsType(singa::kInt);
+
+  EXPECT_EQ(singa::kInt, t.data_type());
+
+  t.ToHost();
+  const int* dptr2 = static_cast<const int*>(t.block()->data());
+  EXPECT_EQ(1, dptr2[0]);
+  EXPECT_EQ(2, dptr2[1]);
+  EXPECT_EQ(3, dptr2[2]);
+}
+
+TEST(TensorClass, IntAsTypeFloatCuda) {
+  auto cuda = std::make_shared<singa::CudaGPU>();
+
+  Tensor t(Shape{3}, cuda, singa::kInt);
+  int data[] = {1, 2, 3};
+  t.CopyDataFromHostPtr(data, 3);
+  EXPECT_EQ(singa::kInt, t.data_type());
+
+  t = t.AsType(singa::kFloat32);
+
+  EXPECT_EQ(singa::kFloat32, t.data_type());
+
+  t.ToHost();
+  const float* dptr2 = static_cast<const float*>(t.block()->data());
+  EXPECT_EQ(1.0f, dptr2[0]);
+  EXPECT_EQ(2.0f, dptr2[1]);
+  EXPECT_EQ(3.0f, dptr2[2]);
+}
+
+#endif  // USE_CUDA
+
+TEST(TensorClass, FloatAsTypeFloatCPU) {
+  Tensor t(Shape{3});
+  float data[] = {1.0f, 2.0f, 3.0f};
+  t.CopyDataFromHostPtr(data, 3);
+  EXPECT_EQ(singa::kFloat32, t.data_type());
+  const float* dptr = static_cast<const float*>(t.block()->data());
+  EXPECT_FLOAT_EQ(1.0f, dptr[0]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[1]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[2]);
+
+  Tensor t2 = t.AsType(singa::kFloat32);
+
+  EXPECT_EQ(singa::kFloat32, t2.data_type());
+
+  const float* dptr2 = static_cast<const float*>(t2.block()->data());
+  EXPECT_EQ(1.0f, dptr2[0]);
+  EXPECT_EQ(2.0f, dptr2[1]);
+  EXPECT_EQ(3.0f, dptr2[2]);
+}
+
+TEST(TensorClass, FloatAsTypeIntCPU) {
+  Tensor t(Shape{3});
+  float data[] = {1.0f, 2.0f, 3.0f};
+  t.CopyDataFromHostPtr(data, 3);
+  EXPECT_EQ(singa::kFloat32, t.data_type());
+  const float* dptr = static_cast<const float*>(t.block()->data());
+  EXPECT_FLOAT_EQ(1.0f, dptr[0]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[1]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[2]);
+
+  Tensor t2 = t.AsType(singa::kInt);
+
+  EXPECT_EQ(singa::kInt, t2.data_type());
+  const int* dptr2 = static_cast<const int*>(t2.block()->data());
+  EXPECT_EQ(1, dptr2[0]);
+  EXPECT_EQ(2, dptr2[1]);
+  EXPECT_EQ(3, dptr2[2]);
+}
+
+TEST(TensorClass, IntAsTypeFloatCPU) {
+  Tensor t(Shape{3}, singa::kInt);
+  int data[] = {1, 2, 3};
+  t.CopyDataFromHostPtr(data, 3);
+  EXPECT_EQ(singa::kInt, t.data_type());
+
+  auto t2 = t.AsType(singa::kFloat32);
+
+  EXPECT_EQ(singa::kFloat32, t2.data_type());
+
+  const float* dptr2 = static_cast<const float*>(t2.block()->data());
+  EXPECT_EQ(1.0f, dptr2[0]);
+  EXPECT_EQ(2.0f, dptr2[1]);
+  EXPECT_EQ(3.0f, dptr2[2]);
 }
 
 TEST(TensorClass, ToDevice) {
-  Tensor t(Shape{2,3});
+  Tensor t(Shape{2, 3});
   EXPECT_EQ(singa::defaultDevice, t.device());
   auto dev = std::make_shared<singa::CppCPU>();
   t.ToDevice(dev);
@@ -119,13 +209,105 @@
 }
 
 TEST(TensorClass, T) {
-  Tensor t(Shape{2,3});
+  Tensor t(Shape{2, 3});
   EXPECT_FALSE(t.transpose());
-  Tensor o = t.T();
+  Tensor o = t.T();  // o = t = {3,2}
+  t.T();             // t = {2,3}
   EXPECT_EQ(true, o.transpose());
   EXPECT_EQ(t.block(), o.block());
   EXPECT_EQ(t.data_type(), o.data_type());
-  EXPECT_EQ(t.shape()[0],  o.shape()[1]);
-  EXPECT_EQ(t.shape()[1],  o.shape()[0]);
+  EXPECT_EQ(t.shape()[0], o.shape()[1]);
+  EXPECT_EQ(t.shape()[1], o.shape()[0]);
 }
 
+TEST(TensorClass, Repeat) {
+  float data[] = {1.0f, 2.0f, 3.0f};
+  Tensor t(Shape{3});
+  t.CopyDataFromHostPtr(data, 3);
+
+  Tensor o = t.Repeat(vector<size_t>{2}, 9999);
+  const float* dptr = static_cast<const float*>(o.block()->data());
+  EXPECT_FLOAT_EQ(1.0f, dptr[0]);
+  EXPECT_FLOAT_EQ(1.0f, dptr[1]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[2]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[3]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[4]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[5]);
+}
+
+TEST(TensorClass, RepeatData) {
+  float data[] = {1.0f, 2.0f, 3.0f};
+  Tensor t(Shape{3});
+  t.CopyDataFromHostPtr(data, 3);
+
+  Tensor o(Shape{6});
+  o.RepeatData({2}, 9999, 2, t);
+  const float* dptr = static_cast<const float*>(o.block()->data());
+  EXPECT_FLOAT_EQ(1.0f, dptr[0]);
+  EXPECT_FLOAT_EQ(1.0f, dptr[1]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[2]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[3]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[4]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[5]);
+}
+
+TEST(TensorClass, Broadcast) {
+  {
+    Tensor a1(Shape{2, 3, 4, 5}), b1(Shape{5});
+    auto c1 = Broadcast(a1, b1.shape()).shape();
+    auto c2 = Broadcast(b1, a1.shape()).shape();
+    EXPECT_EQ(c1[0], 2);
+    EXPECT_EQ(c1[1], 3);
+    EXPECT_EQ(c1[2], 4);
+    EXPECT_EQ(c1[3], 5);
+
+    EXPECT_EQ(c2[0], 2);
+    EXPECT_EQ(c2[1], 3);
+    EXPECT_EQ(c2[2], 4);
+    EXPECT_EQ(c2[3], 5);
+  }
+  {
+    Tensor a1(Shape{4, 5}), b1(Shape{2, 3, 4, 5});
+    auto c1 = Broadcast(a1, b1.shape()).shape();
+    auto c2 = Broadcast(b1, a1.shape()).shape();
+    EXPECT_EQ(c1[0], 2);
+    EXPECT_EQ(c1[1], 3);
+    EXPECT_EQ(c1[2], 4);
+    EXPECT_EQ(c1[3], 5);
+
+    EXPECT_EQ(c2[0], 2);
+    EXPECT_EQ(c2[1], 3);
+    EXPECT_EQ(c2[2], 4);
+    EXPECT_EQ(c2[3], 5);
+  }
+  {
+    Tensor a1(Shape{1, 4, 5}), b1(Shape{2, 3, 1, 1});
+    auto c1 = Broadcast(a1, b1.shape()).shape();
+    auto c2 = Broadcast(b1, a1.shape()).shape();
+
+    EXPECT_EQ(c1[0], 2);
+    EXPECT_EQ(c1[1], 3);
+    EXPECT_EQ(c1[2], 4);
+    EXPECT_EQ(c1[3], 5);
+
+    EXPECT_EQ(c2[0], 2);
+    EXPECT_EQ(c2[1], 3);
+    EXPECT_EQ(c2[2], 4);
+    EXPECT_EQ(c2[3], 5);
+  }
+  {
+    Tensor a1(Shape{3, 4, 5}), b1(Shape{2, 1, 1, 1});
+    auto c1 = Broadcast(a1, b1.shape()).shape();
+    auto c2 = Broadcast(b1, a1.shape()).shape();
+
+    EXPECT_EQ(c1[0], 2);
+    EXPECT_EQ(c1[1], 3);
+    EXPECT_EQ(c1[2], 4);
+    EXPECT_EQ(c1[3], 5);
+
+    EXPECT_EQ(c2[0], 2);
+    EXPECT_EQ(c2[1], 3);
+    EXPECT_EQ(c2[2], 4);
+    EXPECT_EQ(c2[3], 5);
+  }
+}
diff --git a/test/singa/test_tensor_math.cc b/test/singa/test_tensor_math.cc
index 116262c..a980f22 100644
--- a/test/singa/test_tensor_math.cc
+++ b/test/singa/test_tensor_math.cc
@@ -16,20 +16,22 @@
  * limitations under the License.
  */
 
+#include <array>
+
 #include "gtest/gtest.h"
 #include "singa/core/tensor.h"
-using singa::Tensor;
-using singa::Shape;
 using singa::Device;
+using singa::Shape;
+using singa::Tensor;
 
-class TestTensorMath : public ::testing::Test {
+class TensorMath : public ::testing::Test {
  protected:
   virtual void SetUp() {
-    a.Reshape(singa::Shape{6});
-    b.Reshape(singa::Shape{6});
-    c.Reshape(singa::Shape{6, 1});
-    d.Reshape(singa::Shape{3, 2});
-    e.Reshape(singa::Shape{3, 2});
+    a.Resize(singa::Shape{6});
+    b.Resize(singa::Shape{6});
+    c.Resize(singa::Shape{6, 1});
+    d.Resize(singa::Shape{3, 2});
+    e.Resize(singa::Shape{3, 2});
 
     a.CopyDataFromHostPtr<float>(dat1, 6);
     b.CopyDataFromHostPtr<float>(dat2, 6);
@@ -40,7 +42,7 @@
   const float dat2[6] = {1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f};
 };
 
-TEST_F(TestTensorMath, MemberAbs) {
+TEST_F(TensorMath, AbsCpp) {
   Tensor aa = a.Clone();
   Tensor bb = b.Clone();
   Tensor cc = aa - bb;
@@ -56,7 +58,7 @@
   EXPECT_NEAR(0.1, dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberExp) {
+TEST_F(TensorMath, ExpCpp) {
   Tensor p = Exp(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(exp(1.0f), dptr1[0], 1e-5);
@@ -64,7 +66,17 @@
   EXPECT_NEAR(exp(3.0f), dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberLog) {
+TEST_F(TensorMath, ExpStrideCpp) {
+  auto x = singa::Tensor(singa::Shape{2, 1, 3});
+  auto y = singa::Transpose(x, {1, 2, 0});
+  Exp(singa::Reshape(a, singa::Shape{1, 3, 2}), &y);
+  const float *dptr1 = y.data<float>();
+  EXPECT_NEAR(exp(dat1[0]), dptr1[0], 1e-5);
+  EXPECT_NEAR(exp(dat1[4]), dptr1[2], 1e-5);
+  EXPECT_NEAR(exp(dat1[3]), dptr1[4], 1e-5);
+}
+
+TEST_F(TensorMath, LogCpp) {
   Tensor p = Log(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(log(1.0f), dptr1[0], 1e-5);
@@ -72,7 +84,7 @@
   EXPECT_NEAR(log(3.0f), dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberReLU) {
+TEST_F(TensorMath, ReLUCpp) {
   Tensor aa = a.Clone();
   Tensor cc = aa - 2.0f;
   const float *dptr = cc.data<float>();
@@ -87,7 +99,7 @@
   EXPECT_NEAR(1.0f, dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberSigmoid) {
+TEST_F(TensorMath, SigmoidCpp) {
   Tensor p = Sigmoid(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(1.0f / (1.0f + exp(-1.0f)), dptr1[0], 1e-5);
@@ -95,7 +107,7 @@
   EXPECT_NEAR(1.0f / (1.0f + exp(-3.0f)), dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberSign) {
+TEST_F(TensorMath, SignCpp) {
   Tensor aa = a.Clone();
   Tensor cc = aa - 2.0f;
   const float *dptr = cc.data<float>();
@@ -110,7 +122,33 @@
   EXPECT_EQ(1.0f, dptr1[2]);
 }
 
-TEST_F(TestTensorMath, MemberSqrt) {
+TEST_F(TensorMath, SoftPlusCpp) {
+  Tensor aa = a.Clone();
+  Tensor cc = aa - 1.0f;
+  const float *dptr = cc.data<float>();
+  EXPECT_NEAR(0.0f, dptr[0], 1e-5);
+  EXPECT_NEAR(1.0f, dptr[1], 1e-5);
+
+  Tensor p = SoftPlus(cc);
+  const float *dptr1 = p.data<float>();
+  EXPECT_NEAR(log(2.0f), dptr1[0], 1e-5);
+  EXPECT_NEAR(log(exp(1) + 1.0f), dptr1[1], 1e-5);
+}
+
+TEST_F(TensorMath, SoftSignCpp) {
+  Tensor aa = a.Clone();
+  Tensor cc = aa - 1.0f;
+  const float *dptr = cc.data<float>();
+  EXPECT_NEAR(0.0f, dptr[0], 1e-5);
+  EXPECT_NEAR(1.0f, dptr[1], 1e-5);
+
+  Tensor p = SoftSign(cc);
+  const float *dptr1 = p.data<float>();
+  EXPECT_EQ(0.0f, dptr1[0]);
+  EXPECT_EQ(0.5f, dptr1[1]);
+}
+
+TEST_F(TensorMath, SqrtCpp) {
   Tensor p = Sqrt(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(sqrt(1.0), dptr1[0], 1e-5);
@@ -118,7 +156,7 @@
   EXPECT_NEAR(sqrt(3.0), dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberSquare) {
+TEST_F(TensorMath, SquareCpp) {
   Tensor p = Square(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(1.0, dptr1[0], 1e-5);
@@ -126,7 +164,7 @@
   EXPECT_NEAR(9.0, dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberTanh) {
+TEST_F(TensorMath, TanhCpp) {
   Tensor p = Tanh(a);
   const float *dptr1 = p.data<float>();
   EXPECT_NEAR(tanh(1.0), dptr1[0], 1e-5);
@@ -134,7 +172,7 @@
   EXPECT_NEAR(tanh(3.0), dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, Sum) {
+TEST_F(TensorMath, SumCpp) {
   Tensor p1 = Sum(e, 0);
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(9.0f, dptr1[0]);
@@ -148,7 +186,7 @@
   EXPECT_FLOAT_EQ(11.0f, dptr2[2]);
 }
 
-TEST_F(TestTensorMath, SoftMax) {
+TEST_F(TensorMath, SoftMaxCpp) {
   Tensor p1 = SoftMax(Reshape(e, Shape{1, 6}));
   const float *dptr1 = p1.data<float>();
   float sum = 0;
@@ -166,7 +204,43 @@
   EXPECT_NEAR(exp(2) / (exp(1) + exp(2)), dptr2[1], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberLT) {
+#ifdef USE_CUDNN
+TEST_F(TensorMath, SoftMaxOnAxisCUDNN) {
+  Tensor in(Shape{2, 2, 2, 2}, std::make_shared<singa::CudaGPU>());
+  Gaussian(0.0f, 1.0f, &in);
+
+  // -4, -3, -2, -1, 0, 1, 2, 3
+  Tensor out = SoftMax(in, 1);
+  out = SoftMax(in, -4);
+  out = SoftMax(in, -3);
+  out = SoftMax(in, -2);
+  out = SoftMax(in, -1);
+  out = SoftMax(in, 0);
+  out = SoftMax(in, 1);
+  out = SoftMax(in, 2);
+  out = SoftMax(in, 3);
+}
+#endif  // USE_CUDNN
+
+#ifdef USE_DNNL
+TEST_F(TensorMath, SoftMaxOnAxisDNNL) {
+  Tensor in(Shape{2, 2, 2, 2});
+  Gaussian(0.0f, 1.0f, &in);
+
+  // -4, -3, -2, -1, 0, 1, 2, 3
+  Tensor out = SoftMax(in, 1);
+  out = SoftMax(in, -4);
+  out = SoftMax(in, -3);
+  out = SoftMax(in, -2);
+  out = SoftMax(in, -1);
+  out = SoftMax(in, 0);
+  out = SoftMax(in, 1);
+  out = SoftMax(in, 2);
+  out = SoftMax(in, 3);
+}
+#endif  // USE_DNNL
+
+TEST_F(TensorMath, LTCpp) {
   Tensor p1 = a < 2.0f;
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(1.0f, dptr1[0]);
@@ -174,7 +248,7 @@
   EXPECT_FLOAT_EQ(0.0f, dptr1[2]);
 }
 
-TEST_F(TestTensorMath, MemberLE) {
+TEST_F(TensorMath, LECpp) {
   Tensor p1 = a <= 2.0f;
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(1.0f, dptr1[0]);
@@ -182,7 +256,7 @@
   EXPECT_FLOAT_EQ(0.0f, dptr1[2]);
 }
 
-TEST_F(TestTensorMath, MemberGT) {
+TEST_F(TensorMath, GTCpp) {
   Tensor p1 = a > 2.0f;
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(0.0f, dptr1[0]);
@@ -190,7 +264,7 @@
   EXPECT_FLOAT_EQ(1.0f, dptr1[2]);
 }
 
-TEST_F(TestTensorMath, MemberGE) {
+TEST_F(TensorMath, GECpp) {
   Tensor p1 = a >= 2.0f;
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(0.0f, dptr1[0]);
@@ -198,7 +272,15 @@
   EXPECT_FLOAT_EQ(1.0f, dptr1[2]);
 }
 
-TEST_F(TestTensorMath, MemberPow) {
+TEST_F(TensorMath, EQCpp) {
+  Tensor p1 = a == 2.0f;
+  const float *dptr1 = p1.data<float>();
+  EXPECT_FLOAT_EQ(0.0f, dptr1[0]);
+  EXPECT_FLOAT_EQ(1.0f, dptr1[1]);
+  EXPECT_FLOAT_EQ(0.0f, dptr1[2]);
+}
+
+TEST_F(TensorMath, PowCpp) {
   Tensor p1 = Pow(b, 3.0f);
   const float *dptr1 = p1.data<float>();
   EXPECT_FLOAT_EQ(pow(1.1f, 3.0f), dptr1[0]);
@@ -214,7 +296,7 @@
   // EXPECT_FLOAT_EQ(pow(3.0f,3.1f), dptr2[2]);
 }
 
-TEST_F(TestTensorMath, MemberSub) {
+TEST_F(TensorMath, SubCpp) {
   Tensor p1 = a - b;
   const float *dptr1 = p1.data<float>();
   EXPECT_NEAR(-0.1, dptr1[0], 1e-5);
@@ -222,7 +304,7 @@
   EXPECT_NEAR(-0.1, dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberEltwiseMult) {
+TEST_F(TensorMath, EltwiseMultCpp) {
   Tensor p1 = a * b;
   const float *dptr1 = p1.data<float>();
   EXPECT_NEAR(1.0 * 1.1, dptr1[0], 1e-5);
@@ -230,7 +312,7 @@
   EXPECT_NEAR(3.0 * 3.1, dptr1[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberDiv) {
+TEST_F(TensorMath, DivCpp) {
   Tensor p1 = a / b;
   const float *dptr1 = p1.data<float>();
   EXPECT_NEAR(1.0 / 1.1, dptr1[0], 1e-5);
@@ -250,7 +332,7 @@
   EXPECT_NEAR(3.0 / 8.0, dptr3[2], 1e-5);
 }
 
-TEST_F(TestTensorMath, MemberBernoulli) {
+TEST_F(TensorMath, BernoulliCpp) {
   Tensor p1(Shape{10000});
   Bernoulli(0.3f, &p1);
   const float *dptr1 = p1.data<float>();
@@ -265,7 +347,7 @@
   EXPECT_NEAR(variance, 0.3 * 0.7, 1e-2);
 }
 
-TEST_F(TestTensorMath, MemberUniform) {
+TEST_F(TensorMath, UniformCpp) {
   Tensor p1(Shape{10000});
   Uniform(0.1f, 0.2f, &p1);
   const float *dptr1 = p1.data<float>();
@@ -280,7 +362,7 @@
   EXPECT_NEAR(variance, 0.01f / 12, 1e-3);
 }
 
-TEST_F(TestTensorMath, MemberGaussian) {
+TEST_F(TensorMath, GaussianCpp) {
   Tensor p1(Shape{50000});
   Gaussian(0.0f, 1.0f, &p1);
   const float *dptr1 = p1.data<float>();
@@ -295,7 +377,7 @@
   EXPECT_NEAR(variance, 1.0, 1e-2);
 }
 
-TEST_F(TestTensorMath, MemberAddTensor) {
+TEST_F(TensorMath, AddTensorCpp) {
   Tensor aa = a.Clone();
   aa += a;
   const float *dptr = aa.data<float>();
@@ -319,7 +401,7 @@
   EXPECT_FLOAT_EQ(12.1f, dptr2[5]);
 }
 
-TEST_F(TestTensorMath, AddTensors) {
+TEST_F(TensorMath, AddTensorsCpp) {
   Tensor ret(a.shape(), a.device(), a.data_type());
   Add(a, b, &ret);
   const float *dptr = ret.data<float>();
@@ -343,30 +425,196 @@
   EXPECT_FLOAT_EQ(12.1f, dptr1[5]);
 }
 
-TEST_F(TestTensorMath, SetValue) {
+TEST_F(TensorMath, SetValueCpp) {
   Tensor t(Shape{4});
   t.SetValue(0.3f);
   const float *ptr = t.data<float>();
   for (int i = 0; i < 4; i++) EXPECT_FLOAT_EQ(ptr[i], 0.3f);
 }
 
-TEST_F(TestTensorMath, Reshape) {
+TEST_F(TensorMath, ReshapeCpp) {
   Tensor t(Shape{4});
-  t.SetValue(0.3f);
-  Tensor p = Reshape(t, Shape{4, 1});
+  std::array<float, 4> dat = {1.1f, 2.1f, 3.1f, 4.1f};
+  t.CopyDataFromHostPtr(dat.data(), dat.size());
+  t.Reshape(Shape{4, 1});
   const float *ptr = t.data<float>();
-  EXPECT_EQ(p.shape(0), 4u);
-  EXPECT_EQ(p.shape(1), 1u);
-  for (int i = 0; i < 4; i++) EXPECT_FLOAT_EQ(ptr[i], 0.3f);
+  EXPECT_EQ(t.shape(0), 4u);
+  EXPECT_EQ(t.shape(1), 1u);
+  EXPECT_FLOAT_EQ(ptr[0], 1.1f);
+  EXPECT_FLOAT_EQ(ptr[1], 2.1f);
+  EXPECT_FLOAT_EQ(ptr[2], 3.1f);
+  EXPECT_FLOAT_EQ(ptr[3], 4.1f);
 }
+
+TEST_F(TensorMath, TransposeReshapeCpp) {
+  // test transpose then reshape
+  // {2,3,2} => {2,2,3} => {2,6}
+  Tensor t(Shape{2, 3, 2});
+  const float dat[12] = {1.1f, 2.1f, 3.1f, 4.1f,  5.1f,  6.1f,
+                         7.1f, 8.1f, 9.1f, 10.1f, 11.1f, 12.1f};
+  t.CopyDataFromHostPtr(dat, 12);
+
+  t.Transpose({2, 0, 1});
+  EXPECT_EQ(t.shape(0), 2u);
+  EXPECT_EQ(t.shape(1), 2u);
+  EXPECT_EQ(t.shape(2), 3u);
+
+  float dptr[12];
+  t.GetValue(dptr, 12);
+
+  EXPECT_FLOAT_EQ(1.1f, dptr[0]);
+  EXPECT_FLOAT_EQ(3.1f, dptr[1]);
+  EXPECT_FLOAT_EQ(5.1f, dptr[2]);
+  EXPECT_FLOAT_EQ(7.1f, dptr[3]);
+  EXPECT_FLOAT_EQ(9.1f, dptr[4]);
+  EXPECT_FLOAT_EQ(11.1f, dptr[5]);
+  EXPECT_FLOAT_EQ(2.1f, dptr[6]);
+  EXPECT_FLOAT_EQ(4.1f, dptr[7]);
+  EXPECT_FLOAT_EQ(6.1f, dptr[8]);
+  EXPECT_FLOAT_EQ(8.1f, dptr[9]);
+  EXPECT_FLOAT_EQ(10.1f, dptr[10]);
+  EXPECT_FLOAT_EQ(12.1f, dptr[11]);
+
+  t.Reshape(Shape{2, 6});
+  EXPECT_EQ(t.shape(0), 2u);
+  EXPECT_EQ(t.shape(1), 6u);
+
+  float dptr2[12];
+  t.GetValue(dptr2, 12);
+  EXPECT_FLOAT_EQ(1.1f, dptr2[0]);
+  EXPECT_FLOAT_EQ(3.1f, dptr2[1]);
+  EXPECT_FLOAT_EQ(5.1f, dptr2[2]);
+  EXPECT_FLOAT_EQ(7.1f, dptr2[3]);
+  EXPECT_FLOAT_EQ(9.1f, dptr2[4]);
+  EXPECT_FLOAT_EQ(11.1f, dptr2[5]);
+  EXPECT_FLOAT_EQ(2.1f, dptr2[6]);
+  EXPECT_FLOAT_EQ(4.1f, dptr2[7]);
+  EXPECT_FLOAT_EQ(6.1f, dptr2[8]);
+  EXPECT_FLOAT_EQ(8.1f, dptr2[9]);
+  EXPECT_FLOAT_EQ(10.1f, dptr2[10]);
+  EXPECT_FLOAT_EQ(12.1f, dptr2[11]);
+}
+
+TEST_F(TensorMath, TransposeFloatCpp) {
+  Tensor t(Shape{2, 3, 2});
+  const float dat1[12] = {1.0f, 2.0f, 3.0f, 4.0f,  5.0f,  6.0f,
+                          7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f};
+  t.CopyDataFromHostPtr(dat1, 12);
+
+  t.Transpose({2, 0, 1});
+  float dptr[12];
+  t.GetValue(dptr, 12);
+  EXPECT_FLOAT_EQ(1.0f, dptr[0]);
+  EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+  EXPECT_FLOAT_EQ(5.0f, dptr[2]);
+  EXPECT_FLOAT_EQ(7.0f, dptr[3]);
+  EXPECT_FLOAT_EQ(9.0f, dptr[4]);
+  EXPECT_FLOAT_EQ(11.0f, dptr[5]);
+  EXPECT_FLOAT_EQ(2.0f, dptr[6]);
+  EXPECT_FLOAT_EQ(4.0f, dptr[7]);
+  EXPECT_FLOAT_EQ(6.0f, dptr[8]);
+  EXPECT_FLOAT_EQ(8.0f, dptr[9]);
+  EXPECT_FLOAT_EQ(10.0f, dptr[10]);
+  EXPECT_FLOAT_EQ(12.0f, dptr[11]);
+}
+
+TEST_F(TensorMath, TransposeIntCpp) {
+  Tensor t(Shape{2, 3, 2});
+  const int dat1[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
+  t.CopyDataFromHostPtr(dat1, 12);
+
+  t.Transpose({2, 0, 1});
+  int dptr[12];
+  t.GetValue(dptr, 12);
+  EXPECT_EQ(1, dptr[0]);
+  EXPECT_EQ(3, dptr[1]);
+  EXPECT_EQ(5, dptr[2]);
+  EXPECT_EQ(7, dptr[3]);
+  EXPECT_EQ(9, dptr[4]);
+  EXPECT_EQ(11, dptr[5]);
+  EXPECT_EQ(2, dptr[6]);
+  EXPECT_EQ(4, dptr[7]);
+  EXPECT_EQ(6, dptr[8]);
+  EXPECT_EQ(8, dptr[9]);
+  EXPECT_EQ(10, dptr[10]);
+  EXPECT_EQ(12, dptr[11]);
+}
+
+TEST_F(TensorMath, BroadcastCpp) {
+  Tensor x(Shape{1});
+  x.SetValue(1.0f);
+  {
+    auto y = x + a;
+    const float *dptr = y.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+  }
+
+  {
+    auto y = x + e;
+    const float *dptr = y.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+  }
+
+  auto p = Reshape(e, Shape{3, 1, 2});
+  {
+    Tensor q(Shape{3, 1, 1});
+    q.CopyDataFromHostPtr(dat1, 3);
+    auto z = p + q;
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(5.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[4]);
+    EXPECT_FLOAT_EQ(9.0f, dptr[5]);
+  }
+
+  {
+    Tensor q(Shape{2});
+    q.CopyDataFromHostPtr(dat1, 2);
+    auto z = p + q;
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[4]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[5]);
+  }
+
+  {
+    Tensor q(Shape{3, 1, 2, 1});
+    q.CopyDataFromHostPtr(dat1, 6);
+    auto z = p + q;
+    EXPECT_EQ(z.shape().size(), 4);
+    EXPECT_EQ(z.shape(0), 3);
+    EXPECT_EQ(z.shape(1), 3);
+    EXPECT_EQ(z.shape(2), 2);
+    EXPECT_EQ(z.shape(3), 2);
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[16]);
+    EXPECT_FLOAT_EQ(7.0f, dptr[17]);
+    EXPECT_FLOAT_EQ(7.0f, dptr[18]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[19]);
+  }
+}
+
 #ifdef USE_CBLAS
-TEST_F(TestTensorMath, L2Cpp) {
+TEST_F(TensorMath, L2Cpp) {
   float l2 = a.L2();
   float target = 0.0f;
   for (size_t i = 0; i < a.Size(); i++) target += dat1[i] * dat1[i];
   EXPECT_FLOAT_EQ(l2, sqrt(target) / a.Size());
 }
-TEST_F(TestTensorMath, MultCpp) {
+TEST_F(TensorMath, MultCpp) {
   const float x[4] = {1.0f, 2.0f, 3.0f, 4.0f};
   Tensor t(Shape{2, 2});
   t.CopyDataFromHostPtr(x, 4);
@@ -413,7 +661,7 @@
   }
 }
 
-TEST_F(TestTensorMath, AddColumnCpp) {
+TEST_F(TensorMath, AddColumnCpp) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   Tensor t(Shape{3});
   t.CopyDataFromHostPtr(x, 3);
@@ -426,7 +674,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, SubColumnCpp) {
+TEST_F(TensorMath, SubColumnCpp) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   Tensor t(Shape{3});
   t.CopyDataFromHostPtr(x, 3);
@@ -440,7 +688,7 @@
   }
 }
 
-TEST_F(TestTensorMath, DivColumnCpp) {
+TEST_F(TensorMath, DivColumnCpp) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   Tensor t(Shape{3});
   t.CopyDataFromHostPtr(x, 3);
@@ -454,7 +702,7 @@
   }
 }
 
-TEST_F(TestTensorMath, AddRowCpp) {
+TEST_F(TensorMath, AddRowCpp) {
   const float x[2] = {1.1f, 2.1f};
   Tensor t(Shape{2});
   t.CopyDataFromHostPtr(x, 2);
@@ -468,7 +716,7 @@
   }
 }
 
-TEST_F(TestTensorMath, SubRowCpp) {
+TEST_F(TensorMath, SubRowCpp) {
   const float x[2] = {1.1f, 2.1f};
   Tensor t(Shape{2});
   t.CopyDataFromHostPtr(x, 2);
@@ -482,7 +730,7 @@
   }
 }
 
-TEST_F(TestTensorMath, MultRowCpp) {
+TEST_F(TensorMath, MultRowCpp) {
   const float x[2] = {1.1f, 2.1f};
   Tensor t(Shape{2});
   t.CopyDataFromHostPtr(x, 2);
@@ -496,11 +744,38 @@
   }
 }
 
-TEST_F(TestTensorMath, SumRowsCpp) {
+TEST_F(TensorMath, MultColumnCpp) {
+  const float x[3] = {1.0f, 2.0f, 3.0f};
+  Tensor t(Shape{3});
+  t.CopyDataFromHostPtr(x, 3);
+  d.CopyDataFromHostPtr(dat1, 6);
+  MultColumn(t, &d);
+  const float *xptr = d.data<float>();
+  for (int i = 0; i < 3; i++) {
+    for (int j = 0; j < 2; j++) {
+      EXPECT_FLOAT_EQ(xptr[i * 2 + j], dat1[i * 2 + j] * x[i]);
+    }
+  }
+}
+
+TEST_F(TensorMath, DivRowCpp) {
+  const float x[2] = {1.1f, 2.1f};
+  Tensor t(Shape{2});
+  t.CopyDataFromHostPtr(x, 2);
+  d.CopyDataFromHostPtr(dat1, 6);
+  DivRow(t, &d);
+  const float *xptr = d.data<float>();
+  for (int i = 0; i < 3; i++) {
+    for (int j = 0; j < 2; j++) {
+      EXPECT_FLOAT_EQ(xptr[i * 2 + j], dat1[i * 2 + j] / x[j]);
+    }
+  }
+}
+
+TEST_F(TensorMath, SumRowsCpp) {
   Tensor t(Shape{2});
   float dat[6];
-  for (int i = 0; i < 6; i ++)
-    dat[i] = (float)rand()/(float)(RAND_MAX/ 10);
+  for (int i = 0; i < 6; i++) dat[i] = (float)rand() / (float)(RAND_MAX / 10);
   d.CopyDataFromHostPtr(dat, 6);
   SumRows(d, &t);
   const float *tptr = t.data<float>();
@@ -513,7 +788,7 @@
   }
 }
 
-TEST_F(TestTensorMath, SumColumnsCpp) {
+TEST_F(TensorMath, SumColumnsCpp) {
   Tensor t(Shape{3});
   d.CopyDataFromHostPtr(dat1, 6);
   SumColumns(d, &t);
@@ -526,9 +801,59 @@
     EXPECT_FLOAT_EQ(tptr[i], tmp);
   }
 }
+
+TEST_F(TensorMath, ConcatenateRowsCpp) {
+  d.CopyDataFromHostPtr<float>(dat1, 6);
+  e.CopyDataFromHostPtr<float>(dat2, 6);
+  const auto ret = singa::ConcatenateRows(vector<Tensor>{d, e});
+  EXPECT_EQ(ret.shape(0), d.shape(0) + e.shape(0));
+  EXPECT_EQ(ret.shape(1), d.shape(1));
+  const float *retPtr = ret.data<float>();
+  for (int i = 0; i < 6; i++) EXPECT_FLOAT_EQ(retPtr[i], dat1[i]);
+  for (int i = 0; i < 6; i++) EXPECT_FLOAT_EQ(retPtr[i + 6], dat2[i]);
+}
+
+TEST_F(TensorMath, ConcatenateColumnsCpp) {
+  d.CopyDataFromHostPtr<float>(dat1, 6);
+  e.CopyDataFromHostPtr<float>(dat2, 6);
+  const auto ret = singa::ConcatenateColumns(vector<Tensor>{d, e});
+  EXPECT_EQ(ret.shape(0), d.shape(0));
+  EXPECT_EQ(ret.shape(1), d.shape(1) + e.shape(1));
+
+  const float *retPtr = ret.data<float>();
+  for (int i = 0; i < 3; i++) {
+    for (int j = 0; j < 2; j++)
+      EXPECT_FLOAT_EQ(retPtr[i * 4 + j], dat1[i * 2 + j]);
+    for (int j = 0; j < 2; j++)
+      EXPECT_FLOAT_EQ(retPtr[i * 4 + 2 + j], dat2[i * 2 + j]);
+  }
+}
+
+TEST_F(TensorMath, CopyRowsCpp) {
+  const auto ret = singa::CopyRows(e, 1, 2);
+  EXPECT_EQ(ret.shape(0), 1u);
+  EXPECT_EQ(ret.shape(1), e.shape(1));
+  const float *retPtr = ret.data<float>();
+  for (size_t i = 0; i < ret.Size(); i++)
+    EXPECT_FLOAT_EQ(retPtr[i], dat1[1 * 2 + i]);
+}
+
+TEST_F(TensorMath, CopyColumnsCpp) {
+  a.Reshape(Shape{2, 3});
+  const auto ret = singa::CopyColumns(a, 1, 3);
+  EXPECT_EQ(ret.shape(0), a.shape(0));
+  EXPECT_EQ(ret.shape(1), 2u);
+  const float *retPtr = ret.data<float>();
+  for (size_t i = 0; i < ret.shape(0); i++)
+    for (size_t j = 0; j < ret.shape(1); j++)
+      EXPECT_FLOAT_EQ(retPtr[i * ret.shape(1) + j],
+                      dat1[i * a.shape(1) + j + 1]);
+}
 #endif
+
+//////////////////////////////////////////////////////////
 #ifdef USE_CUDA
-TEST_F(TestTensorMath, L2Cuda) {
+TEST_F(TensorMath, L2Cuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3, 2}, dev);
   t.CopyDataFromHostPtr(dat1, 6);
@@ -537,7 +862,7 @@
   for (size_t i = 0; i < t.Size(); i++) target += dat1[i] * dat1[i];
   EXPECT_FLOAT_EQ(l2, sqrt(target) / t.Size());
 }
-TEST_F(TestTensorMath, MultCuda) {
+TEST_F(TensorMath, MultCuda) {
   const float x[4] = {1.0f, 2.0f, 3.0f, 4.0f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2, 2}, dev);
@@ -590,7 +915,7 @@
   p.ToHost();
 }
 
-TEST_F(TestTensorMath, AddColumnCuda) {
+TEST_F(TensorMath, AddColumnCuda) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3}, dev);
@@ -607,7 +932,7 @@
   }
 }
 
-TEST_F(TestTensorMath, SubColumnCuda) {
+TEST_F(TensorMath, SubColumnCuda) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3}, dev);
@@ -623,22 +948,8 @@
     }
   }
 }
-#endif
-TEST_F(TestTensorMath, MultColumnCpp) {
-  const float x[3] = {1.0f, 2.0f, 3.0f};
-  Tensor t(Shape{3});
-  t.CopyDataFromHostPtr(x, 3);
-  d.CopyDataFromHostPtr(dat1, 6);
-  MultColumn(t, &d);
-  const float *xptr = d.data<float>();
-  for (int i = 0; i < 3; i++) {
-    for (int j = 0; j < 2; j++) {
-      EXPECT_FLOAT_EQ(xptr[i * 2 + j], dat1[i * 2 + j] * x[i]);
-    }
-  }
-}
-#ifdef USE_CUDA
-TEST_F(TestTensorMath, MultColumnCuda) {
+
+TEST_F(TensorMath, MultColumnCuda) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3}, dev);
@@ -654,7 +965,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, DivColumnCuda) {
+TEST_F(TensorMath, DivColumnCuda) {
   const float x[3] = {1.0f, 2.0f, 3.0f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3}, dev);
@@ -670,7 +981,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, AddRowCuda) {
+TEST_F(TensorMath, AddRowCuda) {
   const float x[2] = {1.1f, 2.1f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2}, dev);
@@ -686,7 +997,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, SubRowCuda) {
+TEST_F(TensorMath, SubRowCuda) {
   const float x[2] = {1.1f, 2.1f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2}, dev);
@@ -702,7 +1013,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, MultRowCuda) {
+TEST_F(TensorMath, MultRowCuda) {
   const float x[2] = {1.1f, 2.1f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2}, dev);
@@ -718,22 +1029,8 @@
     }
   }
 }
-#endif
-TEST_F(TestTensorMath, DivRowCpp) {
-  const float x[2] = {1.1f, 2.1f};
-  Tensor t(Shape{2});
-  t.CopyDataFromHostPtr(x, 2);
-  d.CopyDataFromHostPtr(dat1, 6);
-  DivRow(t, &d);
-  const float *xptr = d.data<float>();
-  for (int i = 0; i < 3; i++) {
-    for (int j = 0; j < 2; j++) {
-      EXPECT_FLOAT_EQ(xptr[i * 2 + j], dat1[i * 2 + j] / x[j]);
-    }
-  }
-}
-#ifdef USE_CUDA
-TEST_F(TestTensorMath, DivRowCuda) {
+
+TEST_F(TensorMath, DivRowCuda) {
   const float x[2] = {1.1f, 2.1f};
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2}, dev);
@@ -749,7 +1046,7 @@
     }
   }
 }
-TEST_F(TestTensorMath, SumRowsCuda) {
+TEST_F(TensorMath, SumRowsCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{2}, dev);
   d.CopyDataFromHostPtr(dat1, 6);
@@ -766,7 +1063,7 @@
   }
   d.ToHost();
 }
-TEST_F(TestTensorMath, SumColumnCuda) {
+TEST_F(TensorMath, SumColumnCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   Tensor t(Shape{3}, dev);
   d.CopyDataFromHostPtr(dat1, 6);
@@ -784,59 +1081,22 @@
   d.ToHost();
 }
 
-#endif
-
-TEST_F(TestTensorMath, ConcatenateRowsCpp) {
+TEST_F(TensorMath, ExpStrideCuda) {
+  auto dev = std::make_shared<singa::CudaGPU>();
+  a.ToDevice(dev);
+  auto x = singa::Tensor(singa::Shape{2, 1, 3});
+  x.ToDevice(dev);
   d.CopyDataFromHostPtr<float>(dat1, 6);
-  e.CopyDataFromHostPtr<float>(dat2, 6);
-  const auto ret = singa::ConcatenateRows(vector<Tensor>{d, e});
-  EXPECT_EQ(ret.shape(0), d.shape(0) + e.shape(0));
-  EXPECT_EQ(ret.shape(1), d.shape(1));
-  const float *retPtr = ret.data<float>();
-  for (int i = 0; i < 6; i++) EXPECT_FLOAT_EQ(retPtr[i], dat1[i]);
-  for (int i = 0; i < 6; i++) EXPECT_FLOAT_EQ(retPtr[i + 6], dat2[i]);
+  auto y = singa::Transpose(x, {1, 2, 0});
+  Exp(singa::Reshape(a, singa::Shape{1, 3, 2}), &y);
+  y.ToHost();
+  const float *dptr1 = y.data<float>();
+  EXPECT_NEAR(exp(dat1[0]), dptr1[0], 1e-5);
+  EXPECT_NEAR(exp(dat1[4]), dptr1[2], 1e-5);
+  EXPECT_NEAR(exp(dat1[3]), dptr1[4], 1e-5);
 }
 
-TEST_F(TestTensorMath, ConcatenateColumnsCpp) {
-  d.CopyDataFromHostPtr<float>(dat1, 6);
-  e.CopyDataFromHostPtr<float>(dat2, 6);
-  const auto ret = singa::ConcatenateColumns(vector<Tensor>{d, e});
-  EXPECT_EQ(ret.shape(0), d.shape(0));
-  EXPECT_EQ(ret.shape(1), d.shape(1) + e.shape(1));
-
-  const float *retPtr = ret.data<float>();
-  for (int i = 0; i < 3; i++) {
-    for (int j = 0; j < 2; j++)
-      EXPECT_FLOAT_EQ(retPtr[i * 4 + j], dat1[i * 2 + j]);
-    for (int j = 0; j < 2; j++)
-      EXPECT_FLOAT_EQ(retPtr[i * 4 + 2 + j], dat2[i * 2 + j]);
-  }
-}
-
-TEST_F(TestTensorMath, CopyRowsCpp) {
-  const auto ret = singa::CopyRows(e, 1, 2);
-  EXPECT_EQ(ret.shape(0), 1u);
-  EXPECT_EQ(ret.shape(1), e.shape(1));
-  const float *retPtr = ret.data<float>();
-  for (size_t i = 0; i < ret.Size(); i++)
-    EXPECT_FLOAT_EQ(retPtr[i], dat1[1 * 2 + i]);
-}
-
-TEST_F(TestTensorMath, CopyColumnsCpp) {
-  a.Reshape(Shape{2, 3});
-  const auto ret = singa::CopyColumns(a, 1, 3);
-  EXPECT_EQ(ret.shape(0), a.shape(0));
-  EXPECT_EQ(ret.shape(1), 2u);
-  const float *retPtr = ret.data<float>();
-  for (size_t i = 0; i < ret.shape(0); i++)
-    for (size_t j = 0; j < ret.shape(1); j++)
-      EXPECT_FLOAT_EQ(retPtr[i * ret.shape(1) + j],
-                      dat1[i * a.shape(1) + j + 1]);
-}
-
-#ifdef USE_CUDA
-
-TEST_F(TestTensorMath, ConcatenateRowsCuda) {
+TEST_F(TensorMath, ConcatenateRowsCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   d.ToDevice(dev);
   e.ToDevice(dev);
@@ -851,7 +1111,7 @@
   for (int i = 0; i < 6; i++) EXPECT_FLOAT_EQ(retPtr[i + 6], dat2[i]);
 }
 
-TEST_F(TestTensorMath, ConcatenateColumnsCuda) {
+TEST_F(TensorMath, ConcatenateColumnsCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   d.ToDevice(dev);
   e.ToDevice(dev);
@@ -871,7 +1131,7 @@
   }
 }
 
-TEST_F(TestTensorMath, CopyRowsCuda) {
+TEST_F(TensorMath, CopyRowsCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   e.ToDevice(dev);
   auto ret = singa::CopyRows(e, 1, 2);
@@ -883,7 +1143,7 @@
     EXPECT_FLOAT_EQ(retPtr[i], dat1[1 * 2 + i]);
 }
 
-TEST_F(TestTensorMath, CopyColumnsCuda) {
+TEST_F(TensorMath, CopyColumnsCuda) {
   auto dev = std::make_shared<singa::CudaGPU>();
   a.Reshape(Shape{2, 3});
   a.ToDevice(dev);
@@ -898,4 +1158,127 @@
                       dat1[i * a.shape(1) + j + 1]);
 }
 
+TEST_F(TensorMath, RowMaxCuda) {
+  auto dev = std::make_shared<singa::CudaGPU>();
+  Tensor x1(Shape{2, 2}, dev);
+  const float data1[4] = {1.0f, 2.0f, 3.0f, 4.0f};
+  x1.CopyDataFromHostPtr<float>(data1, 4);
+
+  auto y2 = RowMax(x1);
+  y2.Reshape({2, 1});
+  y2.ToHost();
+  const float *dptr1 = y2.data<float>();
+  EXPECT_EQ(dptr1[0], 2);
+  EXPECT_EQ(dptr1[1], 4);
+}
+
+TEST_F(TensorMath, BroadcastCuda) {
+  auto dev = std::make_shared<singa::CudaGPU>();
+  Tensor x(Shape{1});
+  x.ToDevice(dev);
+  x.SetValue(1.0f);
+  a.ToDevice(dev);
+  {
+    auto y = a + x;
+    y.ToHost();
+    const float *dptr = y.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+  }
+
+  e.ToDevice(dev);
+  {
+    auto y = e + x;
+    y.ToHost();
+    const float *dptr = y.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+  }
+
+  auto p = Reshape(e, Shape{3, 1, 2});
+  {
+    Tensor q(Shape{3, 1, 1}, dev);
+    q.CopyDataFromHostPtr(dat1, 3);
+    auto z = p + q;
+    z.ToHost();
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(5.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[4]);
+    EXPECT_FLOAT_EQ(9.0f, dptr[5]);
+  }
+
+  {
+    Tensor q(Shape{2}, dev);
+    q.CopyDataFromHostPtr(dat1, 2);
+    auto z = p + q;
+    EXPECT_EQ(z.shape().size(), 3);
+    EXPECT_EQ(z.shape(0), 3);
+    EXPECT_EQ(z.shape(1), 1);
+    EXPECT_EQ(z.shape(2), 2);
+    z.ToHost();
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[4]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[5]);
+  }
+  {
+    Tensor q(Shape{3, 1, 2, 1}, dev);
+    q.CopyDataFromHostPtr(dat1, 6);
+    auto z = p + q;
+    z.ToHost();
+    EXPECT_EQ(z.shape().size(), 4);
+    EXPECT_EQ(z.shape(0), 3);
+    EXPECT_EQ(z.shape(1), 3);
+    EXPECT_EQ(z.shape(2), 2);
+    EXPECT_EQ(z.shape(3), 2);
+    const float *dptr = z.data<float>();
+    EXPECT_FLOAT_EQ(2.0f, dptr[0]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[1]);
+    EXPECT_FLOAT_EQ(3.0f, dptr[2]);
+    EXPECT_FLOAT_EQ(4.0f, dptr[3]);
+    EXPECT_FLOAT_EQ(6.0f, dptr[16]);
+    EXPECT_FLOAT_EQ(7.0f, dptr[17]);
+    EXPECT_FLOAT_EQ(7.0f, dptr[18]);
+    EXPECT_FLOAT_EQ(8.0f, dptr[19]);
+  }
+}
+
+TEST_F(TensorMath, SoftPlusCuda) {
+  auto dev = std::make_shared<singa::CudaGPU>();
+  Tensor x(Shape{2}, dev);
+  const float data[2] = {0.0f, 1.0f};
+  x.CopyDataFromHostPtr<float>(data, 2);
+
+  auto y = SoftPlus(x);
+  y.Reshape({2, 1});
+  y.ToHost();
+
+  const float *dptr = y.data<float>();
+  EXPECT_NEAR(dptr[0], log(2.0f), 1e-5);
+  EXPECT_NEAR(dptr[1], log(exp(1) + 1.0f), 1e-5);
+}
+
+TEST_F(TensorMath, SoftSignCuda) {
+  auto dev = std::make_shared<singa::CudaGPU>();
+  Tensor x(Shape{2}, dev);
+  const float data[2] = {0.0f, 1.0f};
+  x.CopyDataFromHostPtr<float>(data, 2);
+
+  auto y = SoftSign(x);
+  y.Reshape({2, 1});
+  y.ToHost();
+
+  const float *dptr = y.data<float>();
+  EXPECT_EQ(dptr[0], 0.0f);
+  EXPECT_EQ(dptr[1], 0.5f);
+}
+
 #endif
diff --git a/test/singa/test_textfile_rw.cc b/test/singa/test_textfile_rw.cc
index c436478..98befa3 100644
--- a/test/singa/test_textfile_rw.cc
+++ b/test/singa/test_textfile_rw.cc
@@ -1,23 +1,23 @@
 /************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ *************************************************************/
 
 #include "../include/singa/io/reader.h"
 #include "../include/singa/io/writer.h"
diff --git a/test/singa/test_timer.cc b/test/singa/test_timer.cc
index 735b72d..de49534 100644
--- a/test/singa/test_timer.cc
+++ b/test/singa/test_timer.cc
@@ -16,12 +16,12 @@
  * limitations under the License.
  */
 
-#include "gtest/gtest.h"
-#include "singa/utils/timer.h"
-
 #include <chrono>
 #include <thread>
 
+#include "gtest/gtest.h"
+#include "singa/utils/timer.h"
+
 TEST(TimerTest, TestTick) {
   singa::Timer t;
   std::this_thread::sleep_for(std::chrono::milliseconds(10));
diff --git a/tool/code-format/README.md b/tool/code-format/README.md
new file mode 100644
index 0000000..758f79b
--- /dev/null
+++ b/tool/code-format/README.md
@@ -0,0 +1,49 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+# How to format code
+
+This guide is for singa devoloper who should sanitize the code
+before merging into the main branch.
+
+## tools to auto formating code
+
+Install clang-format for C++:
+
+Ubuntu 16.04: `sudo apt install clang-format`
+
+Ubuntu 18.04: `sudo apt install clang-format-6.0`
+
+
+Install yapf for Python:
+
+`pip install yapf`
+
+## Formating a single file
+
+- C++: `clang-format -i path/to/file`
+
+- Python: `yapf -i path/to/file`
+
+## Formating the whole project
+usage: `bash tool/code-format/format.sh`
+
+## Configuration:
+Currently the configuration are customized to respect google style.
+Update of configuration could be done at `.clang-format` and `.style.yapf`
diff --git a/.travis.yml b/tool/code-format/format.sh
similarity index 63%
rename from .travis.yml
rename to tool/code-format/format.sh
index eb9ff69..d30e0fc 100644
--- a/.travis.yml
+++ b/tool/code-format/format.sh
@@ -1,3 +1,5 @@
+#!/usr/bin/env bash
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,29 +17,19 @@
 # limitations under the License.
 #
 
-# to use container for building
-sudo: required
-language: cpp
+# format cpp code by clang-format
+find src/api/ \
+    src/core/ \
+    src/proto/ \
+    src/utils/ \
+    include/singa/core/ \
+    include/singa/utils/ \
+    src/model/operation/ \
+    include/singa/io/communicator.h \
+    src/io/communicator.cc \
+    test/singa/ -iname *.h -o -iname *.cc | xargs clang-format -i
 
-matrix:
-  include:
-  - os: osx
-    compiler: clang
-    osx_image: xcode8
-  - os: linux
-    dist: trusty
-    compiler: gcc
-
-#
-#addons:
-#  apt:
-#    packages:
-#      - libopenblas-dev
-#      - libprotobuf-dev
-#      - protobuf-compiler
-
-install:
-  - travis_wait bash -ex tool/travis/depends.sh
-
-script:
-  - bash -ex tool/travis/build.sh
+# format python code by yapf
+find python/ \
+    examples/autograd \
+    test/python/ -iname *.py | xargs yapf -i
diff --git a/tool/conda/cpu/README.md b/tool/conda/cpu/README.md
new file mode 100644
index 0000000..566ac09
--- /dev/null
+++ b/tool/conda/cpu/README.md
@@ -0,0 +1,48 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+The conda package specification includes the package name (i.e. singa), version and build string (could be very long).
+To install a certain SINGA package, we run
+
+    conda install -c nusdbsystem singa=<version>=<build string>
+
+It is inconvenient to type all 3 parts when running the installation commands.
+The meta.yml file in this folder is to create a conda package `singa-cpu` as
+an alias of one specific SINGA package.
+It does nothing except creating a dummy conda package that depends on one real
+cpu version SINGA package.  For example, the following line in meta.yml indicates
+that singa-cpu depends on SINGA with version 1.1.1, python version=3.6
+
+    - singa 1.1.1 py36_cpu
+
+Therefore, when we run
+
+    conda install -c nusdbsystem singa-cpu
+
+The dependent SINGA package will be installed.
+By default, singa-cpu depends on the latest SINGA (py3.6).
+When we have a new SINGA version available, we need to update the meta.yml file to
+change the dependency.
+
+To build this package and upload it
+
+    conda config --add channels nusdbsystem
+    conda build .
+    anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main <path to the singa-cpu package>
+
+where $ANACONDA_UPLOAD_TOKEN is the upload token associated with nusdbsystem account on anaconda cloud.
diff --git a/tool/conda/cpu/meta.yaml b/tool/conda/cpu/meta.yaml
new file mode 100644
index 0000000..c9ba5a6
--- /dev/null
+++ b/tool/conda/cpu/meta.yaml
@@ -0,0 +1,38 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+package:
+  name: singa-cpu
+  version: {{ environ.get('GIT_DESCRIBE_TAG', '') | replace("-", ".") }}
+
+source:
+  path: ../../../
+
+requirements:
+  run:
+    - singa {{ environ.get('GIT_DESCRIBE_TAG', '') | replace("-", ".") }} cpu_py{{ py }}
+
+build:
+  number: 0
+  string: py{{ py }}
+ 
+about:
+  home: http://singa.apache.org/
+  license: Apache V2
+  summary: SINGA is an Apache Incubating project for providing distributed deep learning. Apache disclaimers http://singa.apache.org/en/index.html#disclaimers
diff --git a/tool/conda/dist/README.md b/tool/conda/dist/README.md
new file mode 100644
index 0000000..03d7050
--- /dev/null
+++ b/tool/conda/dist/README.md
@@ -0,0 +1,51 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+The conda package specification includes the package name (i.e. singa), version and build string (could be very long).
+To install a certain SINGA package we run
+
+    conda install -c nusdbsystem singa=<version>=<build string>
+
+It is inconvenient to type all 3 parts when running the installation commands.
+The meta.yml file in this folder is to create a conda package `singa-dist` as
+an alias of one specific SINGA package.
+It does nothing except creating a dummy conda package that depends on one real
+gpu version SINGA package.  For example, the following line in meta.yml indicates
+that singa-gpu depends on SINGA with version 1.1.1, python version=3.6, cuda version=9,
+cudnn version = 7.1.2, nccl version = 2.4.8.1, and mpich version = 3.3.2
+
+    - singa 1.1.1 py36_cuda9.0_cudnn7.1.2_nccl2.4.8.1_mpich3.3.2
+
+
+Therefore, when we run
+
+    conda install -c nusdbsystem singa-dist
+
+The dependent SINGA package will be installed.
+By default, singa-dist depends on the latest SINGA (py3.6) on the latest cuda (and cudnn),
+as well as the distributed computing libraries nccl and mpich.
+When we have a new SINGA version available, we need to update the meta.yml file to
+change the dependency.
+
+To build this package and upload it
+
+    conda config --add channels nusdbsystem
+    conda build .
+    anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main <path to the singa-dist package>
+
+where $ANACONDA_UPLOAD_TOKEN is the upload token associated with nusdbsystem account on anaconda cloud.
diff --git a/tool/conda/dist/meta.yaml b/tool/conda/dist/meta.yaml
new file mode 100644
index 0000000..97cc0b3
--- /dev/null
+++ b/tool/conda/dist/meta.yaml
@@ -0,0 +1,40 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+package:
+  name: singa-dist
+  version: {{ environ.get('GIT_DESCRIBE_TAG') }}
+
+source:
+  path: ../../../
+  # git_url: https://github.com/apache/singa.git
+
+requirements:
+  run:
+    - singa {{ environ.get('GIT_DESCRIBE_TAG') }} cudnn7.6.5_cuda10.0_nccl2.4.8.1_mpich3.3.2_py{{ py }}
+
+build:
+  number: 0
+  string: py{{ py }}
+ 
+
+about:
+  home: http://singa.apache.org/
+  license: Apache V2
+  summary: SINGA is an Apache Incubating project for providing distributed deep learning. Apache disclaimers http://singa.apache.org/en/index.html#disclaimers
diff --git a/tool/conda/docker/cuda10.2/Dockerfile b/tool/conda/docker/cuda10.2/Dockerfile
new file mode 100644
index 0000000..7526b2f
--- /dev/null
+++ b/tool/conda/docker/cuda10.2/Dockerfile
@@ -0,0 +1,63 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# 18.04 has erros in ssh
+FROM nvidia/cuda:10.2-devel-ubuntu16.04
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+        git \
+        build-essential \
+        cmake \
+        wget \
+        openssh-server \
+        ca-certificates \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    #
+    # install conda, conda-build and anaconda-client
+    #
+    && wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh \
+    && bash miniconda.sh -b -p /root/miniconda \
+    && /root/miniconda/bin/conda config --set always_yes yes --set changeps1 no \
+    && /root/miniconda/bin/conda update -q conda \
+    && /root/miniconda/bin/conda install -y \
+        conda-build \
+        anaconda-client \
+    && /root/miniconda/bin/conda clean -tipsy \
+    # config ssh service
+    && mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    # for ubuntu 16.04 prohibit
+    && sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    # SSH login fix. Otherwise user is kicked off after login
+    && sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd \
+    # dump environment variables into files, so that ssh can see also
+    && env | grep _ >> /etc/environment
+
+# Add conda to PATH. Doing this here so other RUN steps can be grouped above
+ENV PATH /root/miniconda/bin:${PATH}
+
+# In nvidia/cuda:10.2-devel-ubuntu16.04, the location of cubas headers moved to another directory
+RUN cp /usr/include/cublas* /usr/local/cuda/include/
+
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/conda/docker/cuda10/Dockerfile b/tool/conda/docker/cuda10/Dockerfile
new file mode 100644
index 0000000..5899aab
--- /dev/null
+++ b/tool/conda/docker/cuda10/Dockerfile
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# 18.04 has erros in ssh
+FROM nvidia/cuda:10.0-devel-ubuntu16.04
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+        git \
+        build-essential \
+        cmake \
+        wget \
+        openssh-server \
+        ca-certificates \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    #
+    # install conda, conda-build and anaconda-client
+    #
+    && wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh \
+    && bash miniconda.sh -b -p /root/miniconda \
+    && /root/miniconda/bin/conda config --set always_yes yes --set changeps1 no \
+    && /root/miniconda/bin/conda update -q conda \
+    && /root/miniconda/bin/conda install -y \
+        conda-build \
+        anaconda-client \
+    && /root/miniconda/bin/conda clean -tipsy \
+    # config ssh service
+    && mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    # for ubuntu 16.04 prohibit
+    && sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    # SSH login fix. Otherwise user is kicked off after login
+    && sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd \
+    # dump environment variables into files, so that ssh can see also
+    && env | grep _ >> /etc/environment
+
+# Add conda to PATH. Doing this here so other RUN steps can be grouped above
+ENV PATH /root/miniconda/bin:${PATH}
+
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/conda/docker/cuda9/Dockerfile b/tool/conda/docker/cuda9/Dockerfile
new file mode 100644
index 0000000..63140c4
--- /dev/null
+++ b/tool/conda/docker/cuda9/Dockerfile
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Change tags to build with different cuda/cudnn versions:
+FROM nvidia/cuda:9.0-devel-ubuntu16.04
+
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+        git \
+        build-essential \
+        cmake \
+        wget \
+        openssh-server \
+        ca-certificates \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    #
+    # install conda, conda-build and anaconda-client
+    #
+    && wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh \
+    && bash miniconda.sh -b -p /root/miniconda \
+    && /root/miniconda/bin/conda config --set always_yes yes --set changeps1 no \
+    && /root/miniconda/bin/conda update -q conda \
+    && /root/miniconda/bin/conda install -y \
+        conda-build \
+        anaconda-client \
+    && /root/miniconda/bin/conda clean -tipsy \
+    # config ssh service
+    && mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    # for ubuntu 16.04 prohibit
+    && sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    # SSH login fix. Otherwise user is kicked off after login
+    && sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd \
+    # dump environment variables into files, so that ssh can see also
+    && env | grep _ >> /etc/environment
+
+# Add conda to PATH. Doing this here so other RUN steps can be grouped above
+ENV PATH /root/miniconda/bin:${PATH}
+
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/conda/gpu/README.md b/tool/conda/gpu/README.md
new file mode 100644
index 0000000..91be515
--- /dev/null
+++ b/tool/conda/gpu/README.md
@@ -0,0 +1,50 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+The conda package specification includes the package name (i.e. singa), version and build string (could be very long).
+To install a certain SINGA package we run
+
+    conda install -c nusdbsystem singa=<version>=<build string>
+
+It is inconvenient to type all 3 parts when running the installation commands.
+The meta.yml file in this folder is to create a conda package `singa-gpu` as
+an alias of one specific SINGA package.
+It does nothing except creating a dummy conda package that depends on one real
+gpu version SINGA package.  For example, the following line in meta.yml indicates
+that singa-gpu depends on SINGA with version 1.1.1, python version=3.6, cuda version=9
+and cudnn version = 7.1.2
+
+    - singa 1.1.1 py36_cuda9.0_cudnn7.1.2
+
+
+Therefore, when we run
+
+    conda install -c nusdbsystem singa-gpu
+
+The dependent SINGA package will be installed.
+By default, singa-gpu depends on the latest SINGA (py3.6) on the latest cuda (and cudnn).
+When we have a new SINGA version available, we need to update the meta.yml file to
+change the dependency.
+
+To build this package and upload it
+
+    conda config --add channels nusdbsystem
+    conda build .
+    anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main <path to the singa-cpu package>
+
+where $ANACONDA_UPLOAD_TOKEN is the upload token associated with nusdbsystem account on anaconda cloud.
diff --git a/tool/conda/gpu/meta.yaml b/tool/conda/gpu/meta.yaml
new file mode 100644
index 0000000..58ef499
--- /dev/null
+++ b/tool/conda/gpu/meta.yaml
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+package:
+  name: singa-gpu
+  version: {{ environ.get('GIT_DESCRIBE_TAG') | replace("-", ".") }}
+
+source:
+  path: ../../../
+
+requirements:
+  run:
+    - singa {{ environ.get('GIT_DESCRIBE_TAG') | replace("-", ".") }} cudnn7.6.5_cuda10.0_py{{ py }}
+
+build:
+  number: 0
+  string: py{{ py }}
+ 
+
+about:
+  home: http://singa.apache.org/
+  license: Apache V2
+  summary: SINGA is an Apache Incubating project for providing distributed deep learning. Apache disclaimers http://singa.apache.org/en/index.html#disclaimers
diff --git a/tool/conda/meta.yaml b/tool/conda/meta.yaml
deleted file mode 100644
index 4d27ce7..0000000
--- a/tool/conda/meta.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-package:
-  name: {{ environ.get('SINGA_NAME', 'singa') }}
-  version: "{{ GIT_DESCRIBE_TAG }}"
-
-source:
-  git_url: https://github.com/apache/incubator-singa.git
-
-
-build:
-  number: {{ GIT_DESCRIBE_NUMBER }}
-  script_env:
-    - CONDA_BLD_PATH
-    - SINGA_INCLUDE_PATH
-    - SINGA_LIBRARY_PATH
-
-requirements:
-  build:
-    - python 2.7*
-    - numpy 1.12.0
-    - swig 3.0.2
-    - openblas 0.2.19
-    - protobuf 3.0.0
-    - glog 0.3.4
-    - libgfortran 3.0.0 # [osx]
-    - gcc 4.8.5 # [linux]
-
-  run:
-    - python 2.7*
-    - numpy >=1.12.0
-    - protobuf >=3.0.0
-    - glog >=0.3.4
-    - openblas >=0.2.19
-    - flask >=0.10.1
-    - flask-cors >=3.0.2
-    - pillow >=2.3.0
-    - libgfortran >=3.0.0 # [osx]
-    - libgcc 4.8.5 # [linux]
-
-test:
-  source_files:
-    - test/python/*.py
-  requires:
-    - unittest-xml-reporting
-  test:
-    - python run.py
-
-about:
-  home: http://singa.apache.org/
-  license: Apache V2
-  license_file: LICENSE
-  summary: SINGA is an Apache Incubating project for providing distributed deep learning. Apache disclaimers http://singa.apache.org/en/index.html#disclaimers
diff --git a/tool/conda/singa/README.md b/tool/conda/singa/README.md
new file mode 100644
index 0000000..c35a8b4
--- /dev/null
+++ b/tool/conda/singa/README.md
@@ -0,0 +1,53 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Package SINGA using conda-build
+
+[conda-build](https://conda.io/docs/user-guide/tasks/build-packages/index.html) is a packaging tool like apt-get, which is associated with [anaconda cloud](https://anaconda.org/) for package management for both python and cpp libraries.
+
+
+## Environment variables
+
+We export the CUDA version if SINGA is compiled with CUDA enabled. The cuDNN version is fixed by SINGA and cuDNN is installed from [anaconda cloud](https://anaconda.org/anaconda/cudnn).
+
+    # for SINGA with GPU, e.g. cuda9.0-cudnn7.3.1
+    export CUDA=9.0
+
+Then, we export a flag DIST to indicate if SINGA is compiled with distributed training enabled.
+
+    # to enable distributed training: DIST=ON, otherwise: DIST=OFF
+    export DIST=OFF
+
+We need to export both CUDA and DIST for GPU version. For CPU-only version, we do not export CUDA and DIST.
+
+## Instruction
+
+After exporting the environment variables, we need to add the necessary conda channels
+
+    conda config --add channels conda-forge
+    conda config --add channels nusdbsystem
+
+Then, we can execute the following commands to compile SINGA and package it
+
+    conda-build .  --python 3.6
+
+You will see the package path from the screen output, e.g., `xx/yy/singa-1.2.0-cpu.tar.bz2` or `xx/yy/singa-1.2.0-cudnn7.3.1_cuda9.0.tar.bz2`.
+
+To clean the cache
+
+    conda clean -ay
diff --git a/tool/conda/build.sh b/tool/conda/singa/build.sh
similarity index 60%
rename from tool/conda/build.sh
rename to tool/conda/singa/build.sh
index 2716452..7eb5287 100644
--- a/tool/conda/build.sh
+++ b/tool/conda/singa/build.sh
@@ -16,18 +16,32 @@
 #
 
 # to compile swig api files which depdend on numpy.i
-export export CPLUS_INCLUDE_PATH=`python -c "import numpy; print numpy.get_include()"`:$CPLUS_INCLUDE_PATH
+# export CPLUS_INCLUDE_PATH=`python -c "from __future__ import print_function; import numpy; print(numpy.get_include())"`:$CPLUS_INCLUDE_PATH
 
 # to let cmake use the dependent libs installed by conda, including python
-export CMAKE_PREFIX_PATH=$PREFIX
-export CMAKE_INCLUDE_PATH=$SINGA_INCLUDE_PATH
-export CMAKE_LIBRARY_PATH=$SINGA_LIBRARY_PATH
+export CMAKE_PREFIX_PATH=$PREFIX:$CMAKE_PREFIX_PATH
+export CMAKE_INCLUDE_PATH=$PREFIX/include:$CMAKE_INCLUDE_PATH
+export CMAKE_LIBRARY_PATH=$PREFIX/lib:$CMAKE_LIBRARY_PATH
+
+
+# if [ -z ${CUDA+x} ]; then
+if [ -z "$CUDA" ]; then
+	USE_CUDA=OFF
+else
+	USE_CUDA=ON
+fi
+
+
+if [ $DIST == "ON" ]; then
+	USE_DIST=ON
+else
+	USE_DIST=OFF
+fi
 
 mkdir build
 cd build
-USE_CUDA=ON
-# singa with cuda and cudnn has the name as : singa-cudaxx-cudnnxx
-if  [ "$PKG_NAME" == "singa" ]; then USE_CUDA=OFF; fi
-cmake -DCMAKE_INSTALL_PREFIX=$PREFIX -DUSE_CUDA=$USE_CUDA ..
+cmake -DCMAKE_INSTALL_PREFIX=$PREFIX -DUSE_CUDA=$USE_CUDA \
+	-DUSE_PYTHON3=ON -DUSE_DNNL=ON -DUSE_DIST=$USE_DIST -DCMAKE_OSX_SYSROOT=${CONDA_BUILD_SYSROOT} ..
+
 make
 make install
diff --git a/tool/conda/singa/conda_build_config.yaml b/tool/conda/singa/conda_build_config.yaml
new file mode 100644
index 0000000..ddef9d0
--- /dev/null
+++ b/tool/conda/singa/conda_build_config.yaml
@@ -0,0 +1,49 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+c_compiler_version:         # [linux]
+    - 5.4                   # [linux]
+cxx_compiler_version:       # [linux]
+    - 5.4                   # [linux]
+# https://docs.conda.io/projects/conda-build/en/latest/resources/compiler-tools.html#macos-sdk
+CONDA_BUILD_SYSROOT:
+    - "/Applications/Xcode_11.7.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk" # [osx]
+cudnn:                      # [linux]
+    - "7.6.5 cuda10.2_0"    # [environ.get("CUDA")=="10.2"]
+    - "7.6.5 cuda10.0_0"    # [environ.get("CUDA")=="10.0"]
+    - "7.6.5 cuda9.0_0"     # [environ.get("CUDA")=="9.0"]
+dnnl:
+    - 1.1
+python:
+    - 3.6
+#    - 3.7
+nccl:
+    - 2.6.4.1               # [environ.get("CUDA")=="10.2"]
+    - 2.4.8.1               # [environ.get("CUDA")=="10.0"]
+    - 2.4.8.1               # [environ.get("CUDA")=="9.0"]
+mpich:
+    - 3.3.2
+build_str:
+    - "cudnn7.6.5_cuda10.2"   # [environ.get("CUDA")=="10.2"] && [environ.get("DIST")=="OFF"]
+    - "cudnn7.6.5_cuda10.0"   # [environ.get("CUDA")=="10.0"] && [environ.get("DIST")=="OFF"]
+    - "cudnn7.6.5_cuda9.0"    # [environ.get("CUDA")=="9.0"] && [environ.get("DIST")=="OFF"]
+    - "cpu"                   # [environ.get("CUDA", "")== ""]
+    - "cudnn7.6.5_cuda10.2_nccl2.6.4.1_mpich3.3.2"     # [environ.get("CUDA")=="10.2"] && [environ.get("DIST")=="ON"]
+    - "cudnn7.6.5_cuda10.0_nccl2.4.8.1_mpich3.3.2"     # [environ.get("CUDA")=="10.0"] && [environ.get("DIST")=="ON"]
+    - "cudnn7.6.5_cuda9.0_nccl2.4.8.1_mpich3.3.2"      # [environ.get("CUDA")=="9.0"] && [environ.get("DIST")=="ON"]
diff --git a/tool/conda/singa/meta.yaml b/tool/conda/singa/meta.yaml
new file mode 100644
index 0000000..cface0d
--- /dev/null
+++ b/tool/conda/singa/meta.yaml
@@ -0,0 +1,92 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#templating-with-jinja
+# {% set data = load_setup_py_data(setup_file='../../../python/singa/setup.py', from_recipe_dir=True) %}
+
+{% set version = "2.1.0.dev" %}
+
+package:
+  name: singa
+  version: {{ environ.get('GIT_DESCRIBE_TAG', version) | replace("-", ".") }}
+
+source:
+  path: ../../../
+  # git_url: https://github.com/apache/singa.git
+
+build:
+  number: 0
+  script_env:
+    - CUDA   # run `export CUDA=9.0` in the terminal
+    - DIST   # run `export DIST=ON` in the terminal
+  string: {{ build_str }}_py{{ py }}
+
+requirements:
+  build:
+    - {{ compiler('cxx') }}
+    - {{ compiler('c') }}
+    - cmake >=3.12.2
+    - make # [unix]
+
+  host:
+    - swig 3.0.12
+    - openblas 0.3.9
+    - protobuf 3.10.0         # [osx]
+    - protobuf 3.9.2          # [linux]
+    - glog 0.3.5
+    - numpy >=1.16,<2.0
+    - pytest
+    - deprecated 1.2.7
+    - cudnn {{ cudnn }}       # ['cudnn' in str(build_str)]
+    - dnnl {{ dnnl }}
+    - python {{ python }}
+    - nccl {{ nccl }}         # ['nccl' in str(build_str)]
+    - mpich {{ mpich }}       # ['mpich' in str(build_str)]
+
+  run:
+    - {{ pin_compatible('glog', max_pin='x.x') }}
+    - {{ pin_compatible('numpy', max_pin='x.x') }}
+    - {{ pin_compatible('dnnl', max_pin='x.x') }}
+    - cudnn {{ cudnn }}       # ['cudnn' in str(build_str)]
+    - python {{ python }}
+    - nccl {{ nccl }}         # ['nccl' in str(build_str)]
+    - mpich {{ mpich }}       # ['mpich' in str(build_str)]
+    - libprotobuf 3.10.0      # [osx]
+    - libprotobuf 3.9.2       # [linux]
+    - libopenblas 0.3.9
+    - pillow
+    - future
+    - tqdm
+    - onnx 1.6.0
+    - deprecated 1.2.7
+    
+test:
+  requires:
+    - pytest-cov
+    - tabulate
+    - codecov
+  source_files:
+    - test/python/*.py
+  commands:
+    - {{ environ.get('TEST_COMMAND', 'cd test/python && python run.py') }}
+about:
+  home: http://singa.apache.org/
+  license: Apache V2
+  license_file: LICENSE
+  summary: SINGA is an Apache project for distributed deep learning.
diff --git a/tool/cpplint.py b/tool/cpplint.py
deleted file mode 100755
index 7b29e90..0000000
--- a/tool/cpplint.py
+++ /dev/null
@@ -1,6327 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2009 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#    * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#    * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#    * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Does google-lint on c++ files.
-
-The goal of this script is to identify places in the code that *may*
-be in non-compliance with google style.  It does not attempt to fix
-up these problems -- the point is to educate.  It does also not
-attempt to find all problems, or to ensure that everything it does
-find is legitimately a problem.
-
-In particular, we can get very confused by /* and // inside strings!
-We do a small hack, which is to ignore //'s with "'s after them on the
-same line, but it is far from perfect (in either direction).
-"""
-
-import codecs
-import copy
-import getopt
-import math  # for log
-import os
-import re
-import sre_compile
-import string
-import sys
-import unicodedata
-
-
-_USAGE = """
-Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
-                   [--counting=total|toplevel|detailed] [--root=subdir]
-                   [--linelength=digits]
-        <file> [file] ...
-
-  The style guidelines this tries to follow are those in
-    http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
-
-  Every problem is given a confidence score from 1-5, with 5 meaning we are
-  certain of the problem, and 1 meaning it could be a legitimate construct.
-  This will miss some errors, and is not a substitute for a code review.
-
-  To suppress false-positive errors of a certain category, add a
-  'NOLINT(category)' comment to the line.  NOLINT or NOLINT(*)
-  suppresses errors of all categories on that line.
-
-  The files passed in will be linted; at least one file must be provided.
-  Default linted extensions are .cc, .cpp, .cu, .cuh and .h.  Change the
-  extensions with the --extensions flag.
-
-  Flags:
-
-    output=vs7
-      By default, the output is formatted to ease emacs parsing.  Visual Studio
-      compatible output (vs7) may also be used.  Other formats are unsupported.
-
-    verbose=#
-      Specify a number 0-5 to restrict errors to certain verbosity levels.
-
-    filter=-x,+y,...
-      Specify a comma-separated list of category-filters to apply: only
-      error messages whose category names pass the filters will be printed.
-      (Category names are printed with the message and look like
-      "[whitespace/indent]".)  Filters are evaluated left to right.
-      "-FOO" and "FOO" means "do not print categories that start with FOO".
-      "+FOO" means "do print categories that start with FOO".
-
-      Examples: --filter=-whitespace,+whitespace/braces
-                --filter=whitespace,runtime/printf,+runtime/printf_format
-                --filter=-,+build/include_what_you_use
-
-      To see a list of all the categories used in cpplint, pass no arg:
-         --filter=
-
-    counting=total|toplevel|detailed
-      The total number of errors found is always printed. If
-      'toplevel' is provided, then the count of errors in each of
-      the top-level categories like 'build' and 'whitespace' will
-      also be printed. If 'detailed' is provided, then a count
-      is provided for each category like 'build/class'.
-
-    root=subdir
-      The root directory used for deriving header guard CPP variable.
-      By default, the header guard CPP variable is calculated as the relative
-      path to the directory that contains .git, .hg, or .svn.  When this flag
-      is specified, the relative path is calculated from the specified
-      directory. If the specified directory does not exist, this flag is
-      ignored.
-
-      Examples:
-        Assuming that src/.git exists, the header guard CPP variables for
-        src/chrome/browser/ui/browser.h are:
-
-        No flag => CHROME_BROWSER_UI_BROWSER_H_
-        --root=chrome => BROWSER_UI_BROWSER_H_
-        --root=chrome/browser => UI_BROWSER_H_
-
-    linelength=digits
-      This is the allowed line length for the project. The default value is
-      80 characters.
-
-      Examples:
-        --linelength=120
-
-    extensions=extension,extension,...
-      The allowed file extensions that cpplint will check
-
-      Examples:
-        --extensions=hpp,cpp
-
-    cpplint.py supports per-directory configurations specified in CPPLINT.cfg
-    files. CPPLINT.cfg file can contain a number of key=value pairs.
-    Currently the following options are supported:
-
-      set noparent
-      filter=+filter1,-filter2,...
-      exclude_files=regex
-      linelength=80
-
-    "set noparent" option prevents cpplint from traversing directory tree
-    upwards looking for more .cfg files in parent directories. This option
-    is usually placed in the top-level project directory.
-
-    The "filter" option is similar in function to --filter flag. It specifies
-    message filters in addition to the |_DEFAULT_FILTERS| and those specified
-    through --filter command-line flag.
-
-    "exclude_files" allows to specify a regular expression to be matched against
-    a file name. If the expression matches, the file is skipped and not run
-    through liner.
-
-    "linelength" allows to specify the allowed line length for the project.
-
-    CPPLINT.cfg has an effect on files in the same directory and all
-    sub-directories, unless overridden by a nested configuration file.
-
-      Example file:
-        filter=-build/include_order,+build/include_alpha
-        exclude_files=.*\.cc
-
-    The above example disables build/include_order warning and enables
-    build/include_alpha as well as excludes all .cc from being
-    processed by linter, in the current directory (where the .cfg
-    file is located) and all sub-directories.
-"""
-
-# We categorize each error message we print.  Here are the categories.
-# We want an explicit list so we can list them all in cpplint --filter=.
-# If you add a new error message with a new category, add it to the list
-# here!  cpplint_unittest.py should tell you if you forget to do this.
-_ERROR_CATEGORIES = [
-    'build/class',
-    'build/c++11',
-    'build/deprecated',
-    'build/endif_comment',
-    'build/explicit_make_pair',
-    'build/forward_decl',
-    'build/header_guard',
-    'build/include',
-    'build/include_alpha',
-    'build/include_order',
-    'build/include_what_you_use',
-    'build/namespaces',
-    'build/printf_format',
-    'build/storage_class',
-    'legal/copyright',
-    'readability/alt_tokens',
-    'readability/braces',
-    'readability/casting',
-    'readability/check',
-    'readability/constructors',
-    'readability/fn_size',
-    'readability/function',
-    'readability/inheritance',
-    'readability/multiline_comment',
-    'readability/multiline_string',
-    'readability/namespace',
-    'readability/nolint',
-    'readability/nul',
-    'readability/strings',
-    'readability/todo',
-    'readability/utf8',
-    'runtime/arrays',
-    'runtime/casting',
-    'runtime/explicit',
-    'runtime/int',
-    'runtime/init',
-    'runtime/invalid_increment',
-    'runtime/member_string_references',
-    'runtime/memset',
-    'runtime/indentation_namespace',
-    'runtime/operator',
-    'runtime/printf',
-    'runtime/printf_format',
-    'runtime/references',
-    'runtime/string',
-    'runtime/threadsafe_fn',
-    'runtime/vlog',
-    'whitespace/blank_line',
-    'whitespace/braces',
-    'whitespace/comma',
-    'whitespace/comments',
-    'whitespace/empty_conditional_body',
-    'whitespace/empty_loop_body',
-    'whitespace/end_of_line',
-    'whitespace/ending_newline',
-    'whitespace/forcolon',
-    'whitespace/indent',
-    'whitespace/line_length',
-    'whitespace/newline',
-    'whitespace/operators',
-    'whitespace/parens',
-    'whitespace/semicolon',
-    'whitespace/tab',
-    'whitespace/todo',
-    ]
-
-# These error categories are no longer enforced by cpplint, but for backwards-
-# compatibility they may still appear in NOLINT comments.
-_LEGACY_ERROR_CATEGORIES = [
-    'readability/streams',
-    ]
-
-# The default state of the category filter. This is overridden by the --filter=
-# flag. By default all errors are on, so only add here categories that should be
-# off by default (i.e., categories that must be enabled by the --filter= flags).
-# All entries here should start with a '-' or '+', as in the --filter= flag.
-_DEFAULT_FILTERS = ['-build/include_alpha']
-
-# We used to check for high-bit characters, but after much discussion we
-# decided those were OK, as long as they were in UTF-8 and didn't represent
-# hard-coded international strings, which belong in a separate i18n file.
-
-# C++ headers
-_CPP_HEADERS = frozenset([
-    # Legacy
-    'algobase.h',
-    'algo.h',
-    'alloc.h',
-    'builtinbuf.h',
-    'bvector.h',
-    'complex.h',
-    'defalloc.h',
-    'deque.h',
-    'editbuf.h',
-    'fstream.h',
-    'function.h',
-    'hash_map',
-    'hash_map.h',
-    'hash_set',
-    'hash_set.h',
-    'hashtable.h',
-    'heap.h',
-    'indstream.h',
-    'iomanip.h',
-    'iostream.h',
-    'istream.h',
-    'iterator.h',
-    'list.h',
-    'map.h',
-    'multimap.h',
-    'multiset.h',
-    'ostream.h',
-    'pair.h',
-    'parsestream.h',
-    'pfstream.h',
-    'procbuf.h',
-    'pthread_alloc',
-    'pthread_alloc.h',
-    'rope',
-    'rope.h',
-    'ropeimpl.h',
-    'set.h',
-    'slist',
-    'slist.h',
-    'stack.h',
-    'stdiostream.h',
-    'stl_alloc.h',
-    'stl_relops.h',
-    'streambuf.h',
-    'stream.h',
-    'strfile.h',
-    'strstream.h',
-    'tempbuf.h',
-    'tree.h',
-    'type_traits.h',
-    'vector.h',
-    # 17.6.1.2 C++ library headers
-    'algorithm',
-    'array',
-    'atomic',
-    'bitset',
-    'chrono',
-    'codecvt',
-    'complex',
-    'condition_variable',
-    'deque',
-    'exception',
-    'forward_list',
-    'fstream',
-    'functional',
-    'future',
-    'initializer_list',
-    'iomanip',
-    'ios',
-    'iosfwd',
-    'iostream',
-    'istream',
-    'iterator',
-    'limits',
-    'list',
-    'locale',
-    'map',
-    'memory',
-    'mutex',
-    'new',
-    'numeric',
-    'ostream',
-    'queue',
-    'random',
-    'ratio',
-    'regex',
-    'set',
-    'sstream',
-    'stack',
-    'stdexcept',
-    'streambuf',
-    'string',
-    'strstream',
-    'system_error',
-    'thread',
-    'tuple',
-    'typeindex',
-    'typeinfo',
-    'type_traits',
-    'unordered_map',
-    'unordered_set',
-    'utility',
-    'valarray',
-    'vector',
-    # 17.6.1.2 C++ headers for C library facilities
-    'cassert',
-    'ccomplex',
-    'cctype',
-    'cerrno',
-    'cfenv',
-    'cfloat',
-    'cinttypes',
-    'ciso646',
-    'climits',
-    'clocale',
-    'cmath',
-    'csetjmp',
-    'csignal',
-    'cstdalign',
-    'cstdarg',
-    'cstdbool',
-    'cstddef',
-    'cstdint',
-    'cstdio',
-    'cstdlib',
-    'cstring',
-    'ctgmath',
-    'ctime',
-    'cuchar',
-    'cwchar',
-    'cwctype',
-    ])
-
-
-# These headers are excluded from [build/include] and [build/include_order]
-# checks:
-# - Anything not following google file name conventions (containing an
-#   uppercase character, such as Python.h or nsStringAPI.h, for example).
-# - Lua headers.
-_THIRD_PARTY_HEADERS_PATTERN = re.compile(
-    r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
-
-
-# Assertion macros.  These are defined in base/logging.h and
-# testing/base/gunit.h.  Note that the _M versions need to come first
-# for substring matching to work.
-_CHECK_MACROS = [
-    'DCHECK', 'CHECK',
-    'EXPECT_TRUE_M', 'EXPECT_TRUE',
-    'ASSERT_TRUE_M', 'ASSERT_TRUE',
-    'EXPECT_FALSE_M', 'EXPECT_FALSE',
-    'ASSERT_FALSE_M', 'ASSERT_FALSE',
-    ]
-
-# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
-_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
-
-for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
-                        ('>=', 'GE'), ('>', 'GT'),
-                        ('<=', 'LE'), ('<', 'LT')]:
-  _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
-  _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
-  _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
-  _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
-  _CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement
-  _CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement
-
-for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
-                            ('>=', 'LT'), ('>', 'LE'),
-                            ('<=', 'GT'), ('<', 'GE')]:
-  _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
-  _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
-  _CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement
-  _CHECK_REPLACEMENT['ASSERT_FALSE_M'][op] = 'ASSERT_%s_M' % inv_replacement
-
-# Alternative tokens and their replacements.  For full list, see section 2.5
-# Alternative tokens [lex.digraph] in the C++ standard.
-#
-# Digraphs (such as '%:') are not included here since it's a mess to
-# match those on a word boundary.
-_ALT_TOKEN_REPLACEMENT = {
-    'and': '&&',
-    'bitor': '|',
-    'or': '||',
-    'xor': '^',
-    'compl': '~',
-    'bitand': '&',
-    'and_eq': '&=',
-    'or_eq': '|=',
-    'xor_eq': '^=',
-    'not': '!',
-    'not_eq': '!='
-    }
-
-# Compile regular expression that matches all the above keywords.  The "[ =()]"
-# bit is meant to avoid matching these keywords outside of boolean expressions.
-#
-# False positives include C-style multi-line comments and multi-line strings
-# but those have always been troublesome for cpplint.
-_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
-    r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
-
-
-# These constants define types of headers for use with
-# _IncludeState.CheckNextIncludeOrder().
-_C_SYS_HEADER = 1
-_CPP_SYS_HEADER = 2
-_LIKELY_MY_HEADER = 3
-_POSSIBLE_MY_HEADER = 4
-_OTHER_HEADER = 5
-
-# These constants define the current inline assembly state
-_NO_ASM = 0       # Outside of inline assembly block
-_INSIDE_ASM = 1   # Inside inline assembly block
-_END_ASM = 2      # Last line of inline assembly block
-_BLOCK_ASM = 3    # The whole block is an inline assembly block
-
-# Match start of assembly blocks
-_MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
-                        r'(?:\s+(volatile|__volatile__))?'
-                        r'\s*[{(]')
-
-
-_regexp_compile_cache = {}
-
-# {str, set(int)}: a map from error categories to sets of linenumbers
-# on which those errors are expected and should be suppressed.
-_error_suppressions = {}
-
-# The root directory used for deriving header guard CPP variable.
-# This is set by --root flag.
-_root = None
-
-# The allowed line length of files.
-# This is set by --linelength flag.
-_line_length = 80
-
-# The allowed extensions for file names
-# This is set by --extensions flag.
-_valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh'])
-
-def ParseNolintSuppressions(filename, raw_line, linenum, error):
-  """Updates the global list of error-suppressions.
-
-  Parses any NOLINT comments on the current line, updating the global
-  error_suppressions store.  Reports an error if the NOLINT comment
-  was malformed.
-
-  Args:
-    filename: str, the name of the input file.
-    raw_line: str, the line of input text, with comments.
-    linenum: int, the number of the current line.
-    error: function, an error handler.
-  """
-  matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
-  if matched:
-    if matched.group(1):
-      suppressed_line = linenum + 1
-    else:
-      suppressed_line = linenum
-    category = matched.group(2)
-    if category in (None, '(*)'):  # => "suppress all"
-      _error_suppressions.setdefault(None, set()).add(suppressed_line)
-    else:
-      if category.startswith('(') and category.endswith(')'):
-        category = category[1:-1]
-        if category in _ERROR_CATEGORIES:
-          _error_suppressions.setdefault(category, set()).add(suppressed_line)
-        elif category not in _LEGACY_ERROR_CATEGORIES:
-          error(filename, linenum, 'readability/nolint', 5,
-                'Unknown NOLINT error category: %s' % category)
-
-
-def ResetNolintSuppressions():
-  """Resets the set of NOLINT suppressions to empty."""
-  _error_suppressions.clear()
-
-
-def IsErrorSuppressedByNolint(category, linenum):
-  """Returns true if the specified error category is suppressed on this line.
-
-  Consults the global error_suppressions map populated by
-  ParseNolintSuppressions/ResetNolintSuppressions.
-
-  Args:
-    category: str, the category of the error.
-    linenum: int, the current line number.
-  Returns:
-    bool, True iff the error should be suppressed due to a NOLINT comment.
-  """
-  return (linenum in _error_suppressions.get(category, set()) or
-          linenum in _error_suppressions.get(None, set()))
-
-
-def Match(pattern, s):
-  """Matches the string with the pattern, caching the compiled regexp."""
-  # The regexp compilation caching is inlined in both Match and Search for
-  # performance reasons; factoring it out into a separate function turns out
-  # to be noticeably expensive.
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].match(s)
-
-
-def ReplaceAll(pattern, rep, s):
-  """Replaces instances of pattern in a string with a replacement.
-
-  The compiled regex is kept in a cache shared by Match and Search.
-
-  Args:
-    pattern: regex pattern
-    rep: replacement text
-    s: search string
-
-  Returns:
-    string with replacements made (or original string if no replacements)
-  """
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].sub(rep, s)
-
-
-def Search(pattern, s):
-  """Searches the string for the pattern, caching the compiled regexp."""
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].search(s)
-
-
-class _IncludeState(object):
-  """Tracks line numbers for includes, and the order in which includes appear.
-
-  include_list contains list of lists of (header, line number) pairs.
-  It's a lists of lists rather than just one flat list to make it
-  easier to update across preprocessor boundaries.
-
-  Call CheckNextIncludeOrder() once for each header in the file, passing
-  in the type constants defined above. Calls in an illegal order will
-  raise an _IncludeError with an appropriate error message.
-
-  """
-  # self._section will move monotonically through this set. If it ever
-  # needs to move backwards, CheckNextIncludeOrder will raise an error.
-  _INITIAL_SECTION = 0
-  _MY_H_SECTION = 1
-  _C_SECTION = 2
-  _CPP_SECTION = 3
-  _OTHER_H_SECTION = 4
-
-  _TYPE_NAMES = {
-      _C_SYS_HEADER: 'C system header',
-      _CPP_SYS_HEADER: 'C++ system header',
-      _LIKELY_MY_HEADER: 'header this file implements',
-      _POSSIBLE_MY_HEADER: 'header this file may implement',
-      _OTHER_HEADER: 'other header',
-      }
-  _SECTION_NAMES = {
-      _INITIAL_SECTION: "... nothing. (This can't be an error.)",
-      _MY_H_SECTION: 'a header this file implements',
-      _C_SECTION: 'C system header',
-      _CPP_SECTION: 'C++ system header',
-      _OTHER_H_SECTION: 'other header',
-      }
-
-  def __init__(self):
-    self.include_list = [[]]
-    self.ResetSection('')
-
-  def FindHeader(self, header):
-    """Check if a header has already been included.
-
-    Args:
-      header: header to check.
-    Returns:
-      Line number of previous occurrence, or -1 if the header has not
-      been seen before.
-    """
-    for section_list in self.include_list:
-      for f in section_list:
-        if f[0] == header:
-          return f[1]
-    return -1
-
-  def ResetSection(self, directive):
-    """Reset section checking for preprocessor directive.
-
-    Args:
-      directive: preprocessor directive (e.g. "if", "else").
-    """
-    # The name of the current section.
-    self._section = self._INITIAL_SECTION
-    # The path of last found header.
-    self._last_header = ''
-
-    # Update list of includes.  Note that we never pop from the
-    # include list.
-    if directive in ('if', 'ifdef', 'ifndef'):
-      self.include_list.append([])
-    elif directive in ('else', 'elif'):
-      self.include_list[-1] = []
-
-  def SetLastHeader(self, header_path):
-    self._last_header = header_path
-
-  def CanonicalizeAlphabeticalOrder(self, header_path):
-    """Returns a path canonicalized for alphabetical comparison.
-
-    - replaces "-" with "_" so they both cmp the same.
-    - removes '-inl' since we don't require them to be after the main header.
-    - lowercase everything, just in case.
-
-    Args:
-      header_path: Path to be canonicalized.
-
-    Returns:
-      Canonicalized path.
-    """
-    return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
-
-  def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
-    """Check if a header is in alphabetical order with the previous header.
-
-    Args:
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      header_path: Canonicalized header to be checked.
-
-    Returns:
-      Returns true if the header is in alphabetical order.
-    """
-    # If previous section is different from current section, _last_header will
-    # be reset to empty string, so it's always less than current header.
-    #
-    # If previous line was a blank line, assume that the headers are
-    # intentionally sorted the way they are.
-    if (self._last_header > header_path and
-        Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
-      return False
-    return True
-
-  def CheckNextIncludeOrder(self, header_type):
-    """Returns a non-empty error message if the next header is out of order.
-
-    This function also updates the internal state to be ready to check
-    the next include.
-
-    Args:
-      header_type: One of the _XXX_HEADER constants defined above.
-
-    Returns:
-      The empty string if the header is in the right order, or an
-      error message describing what's wrong.
-
-    """
-    error_message = ('Found %s after %s' %
-                     (self._TYPE_NAMES[header_type],
-                      self._SECTION_NAMES[self._section]))
-
-    last_section = self._section
-
-    if header_type == _C_SYS_HEADER:
-      if self._section <= self._C_SECTION:
-        self._section = self._C_SECTION
-      else:
-        self._last_header = ''
-        return error_message
-    elif header_type == _CPP_SYS_HEADER:
-      if self._section <= self._CPP_SECTION:
-        self._section = self._CPP_SECTION
-      else:
-        self._last_header = ''
-        return error_message
-    elif header_type == _LIKELY_MY_HEADER:
-      if self._section <= self._MY_H_SECTION:
-        self._section = self._MY_H_SECTION
-      else:
-        self._section = self._OTHER_H_SECTION
-    elif header_type == _POSSIBLE_MY_HEADER:
-      if self._section <= self._MY_H_SECTION:
-        self._section = self._MY_H_SECTION
-      else:
-        # This will always be the fallback because we're not sure
-        # enough that the header is associated with this file.
-        self._section = self._OTHER_H_SECTION
-    else:
-      assert header_type == _OTHER_HEADER
-      self._section = self._OTHER_H_SECTION
-
-    if last_section != self._section:
-      self._last_header = ''
-
-    return ''
-
-
-class _CppLintState(object):
-  """Maintains module-wide state.."""
-
-  def __init__(self):
-    self.verbose_level = 1  # global setting.
-    self.error_count = 0    # global count of reported errors
-    # filters to apply when emitting error messages
-    self.filters = _DEFAULT_FILTERS[:]
-    # backup of filter list. Used to restore the state after each file.
-    self._filters_backup = self.filters[:]
-    self.counting = 'total'  # In what way are we counting errors?
-    self.errors_by_category = {}  # string to int dict storing error counts
-
-    # output format:
-    # "emacs" - format that emacs can parse (default)
-    # "vs7" - format that Microsoft Visual Studio 7 can parse
-    self.output_format = 'emacs'
-
-  def SetOutputFormat(self, output_format):
-    """Sets the output format for errors."""
-    self.output_format = output_format
-
-  def SetVerboseLevel(self, level):
-    """Sets the module's verbosity, and returns the previous setting."""
-    last_verbose_level = self.verbose_level
-    self.verbose_level = level
-    return last_verbose_level
-
-  def SetCountingStyle(self, counting_style):
-    """Sets the module's counting options."""
-    self.counting = counting_style
-
-  def SetFilters(self, filters):
-    """Sets the error-message filters.
-
-    These filters are applied when deciding whether to emit a given
-    error message.
-
-    Args:
-      filters: A string of comma-separated filters (eg "+whitespace/indent").
-               Each filter should start with + or -; else we die.
-
-    Raises:
-      ValueError: The comma-separated filters did not all start with '+' or '-'.
-                  E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
-    """
-    # Default filters always have less priority than the flag ones.
-    self.filters = _DEFAULT_FILTERS[:]
-    self.AddFilters(filters)
-
-  def AddFilters(self, filters):
-    """ Adds more filters to the existing list of error-message filters. """
-    for filt in filters.split(','):
-      clean_filt = filt.strip()
-      if clean_filt:
-        self.filters.append(clean_filt)
-    for filt in self.filters:
-      if not (filt.startswith('+') or filt.startswith('-')):
-        raise ValueError('Every filter in --filters must start with + or -'
-                         ' (%s does not)' % filt)
-
-  def BackupFilters(self):
-    """ Saves the current filter list to backup storage."""
-    self._filters_backup = self.filters[:]
-
-  def RestoreFilters(self):
-    """ Restores filters previously backed up."""
-    self.filters = self._filters_backup[:]
-
-  def ResetErrorCounts(self):
-    """Sets the module's error statistic back to zero."""
-    self.error_count = 0
-    self.errors_by_category = {}
-
-  def IncrementErrorCount(self, category):
-    """Bumps the module's error statistic."""
-    self.error_count += 1
-    if self.counting in ('toplevel', 'detailed'):
-      if self.counting != 'detailed':
-        category = category.split('/')[0]
-      if category not in self.errors_by_category:
-        self.errors_by_category[category] = 0
-      self.errors_by_category[category] += 1
-
-  def PrintErrorCounts(self):
-    """Print a summary of errors by category, and the total."""
-    for category, count in self.errors_by_category.iteritems():
-      sys.stderr.write('Category \'%s\' errors found: %d\n' %
-                       (category, count))
-    sys.stderr.write('Total errors found: %d\n' % self.error_count)
-
-_cpplint_state = _CppLintState()
-
-
-def _OutputFormat():
-  """Gets the module's output format."""
-  return _cpplint_state.output_format
-
-
-def _SetOutputFormat(output_format):
-  """Sets the module's output format."""
-  _cpplint_state.SetOutputFormat(output_format)
-
-
-def _VerboseLevel():
-  """Returns the module's verbosity setting."""
-  return _cpplint_state.verbose_level
-
-
-def _SetVerboseLevel(level):
-  """Sets the module's verbosity, and returns the previous setting."""
-  return _cpplint_state.SetVerboseLevel(level)
-
-
-def _SetCountingStyle(level):
-  """Sets the module's counting options."""
-  _cpplint_state.SetCountingStyle(level)
-
-
-def _Filters():
-  """Returns the module's list of output filters, as a list."""
-  return _cpplint_state.filters
-
-
-def _SetFilters(filters):
-  """Sets the module's error-message filters.
-
-  These filters are applied when deciding whether to emit a given
-  error message.
-
-  Args:
-    filters: A string of comma-separated filters (eg "whitespace/indent").
-             Each filter should start with + or -; else we die.
-  """
-  _cpplint_state.SetFilters(filters)
-
-def _AddFilters(filters):
-  """Adds more filter overrides.
-
-  Unlike _SetFilters, this function does not reset the current list of filters
-  available.
-
-  Args:
-    filters: A string of comma-separated filters (eg "whitespace/indent").
-             Each filter should start with + or -; else we die.
-  """
-  _cpplint_state.AddFilters(filters)
-
-def _BackupFilters():
-  """ Saves the current filter list to backup storage."""
-  _cpplint_state.BackupFilters()
-
-def _RestoreFilters():
-  """ Restores filters previously backed up."""
-  _cpplint_state.RestoreFilters()
-
-class _FunctionState(object):
-  """Tracks current function name and the number of lines in its body."""
-
-  _NORMAL_TRIGGER = 250  # for --v=0, 500 for --v=1, etc.
-  _TEST_TRIGGER = 400    # about 50% more than _NORMAL_TRIGGER.
-
-  def __init__(self):
-    self.in_a_function = False
-    self.lines_in_function = 0
-    self.current_function = ''
-
-  def Begin(self, function_name):
-    """Start analyzing function body.
-
-    Args:
-      function_name: The name of the function being tracked.
-    """
-    self.in_a_function = True
-    self.lines_in_function = 0
-    self.current_function = function_name
-
-  def Count(self):
-    """Count line in current function body."""
-    if self.in_a_function:
-      self.lines_in_function += 1
-
-  def Check(self, error, filename, linenum):
-    """Report if too many lines in function body.
-
-    Args:
-      error: The function to call with any errors found.
-      filename: The name of the current file.
-      linenum: The number of the line to check.
-    """
-    if Match(r'T(EST|est)', self.current_function):
-      base_trigger = self._TEST_TRIGGER
-    else:
-      base_trigger = self._NORMAL_TRIGGER
-    trigger = base_trigger * 2**_VerboseLevel()
-
-    if self.lines_in_function > trigger:
-      error_level = int(math.log(self.lines_in_function / base_trigger, 2))
-      # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
-      if error_level > 5:
-        error_level = 5
-      error(filename, linenum, 'readability/fn_size', error_level,
-            'Small and focused functions are preferred:'
-            ' %s has %d non-comment lines'
-            ' (error triggered by exceeding %d lines).'  % (
-                self.current_function, self.lines_in_function, trigger))
-
-  def End(self):
-    """Stop analyzing function body."""
-    self.in_a_function = False
-
-
-class _IncludeError(Exception):
-  """Indicates a problem with the include order in a file."""
-  pass
-
-
-class FileInfo(object):
-  """Provides utility functions for filenames.
-
-  FileInfo provides easy access to the components of a file's path
-  relative to the project root.
-  """
-
-  def __init__(self, filename):
-    self._filename = filename
-
-  def FullName(self):
-    """Make Windows paths like Unix."""
-    return os.path.abspath(self._filename).replace('\\', '/')
-
-  def RepositoryName(self):
-    """FullName after removing the local path to the repository.
-
-    If we have a real absolute path name here we can try to do something smart:
-    detecting the root of the checkout and truncating /path/to/checkout from
-    the name so that we get header guards that don't include things like
-    "C:\Documents and Settings\..." or "/home/username/..." in them and thus
-    people on different computers who have checked the source out to different
-    locations won't see bogus errors.
-    """
-    fullname = self.FullName()
-
-    if os.path.exists(fullname):
-      project_dir = os.path.dirname(fullname)
-
-      if os.path.exists(os.path.join(project_dir, ".svn")):
-        # If there's a .svn file in the current directory, we recursively look
-        # up the directory tree for the top of the SVN checkout
-        root_dir = project_dir
-        one_up_dir = os.path.dirname(root_dir)
-        while os.path.exists(os.path.join(one_up_dir, ".svn")):
-          root_dir = os.path.dirname(root_dir)
-          one_up_dir = os.path.dirname(one_up_dir)
-
-        prefix = os.path.commonprefix([root_dir, project_dir])
-        return fullname[len(prefix) + 1:]
-
-      # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
-      # searching up from the current path.
-      root_dir = os.path.dirname(fullname)
-      while (root_dir != os.path.dirname(root_dir) and
-             not os.path.exists(os.path.join(root_dir, ".git")) and
-             not os.path.exists(os.path.join(root_dir, ".hg")) and
-             not os.path.exists(os.path.join(root_dir, ".svn"))):
-        root_dir = os.path.dirname(root_dir)
-
-      if (os.path.exists(os.path.join(root_dir, ".git")) or
-          os.path.exists(os.path.join(root_dir, ".hg")) or
-          os.path.exists(os.path.join(root_dir, ".svn"))):
-        prefix = os.path.commonprefix([root_dir, project_dir])
-        return fullname[len(prefix) + 1:]
-
-    # Don't know what to do; header guard warnings may be wrong...
-    return fullname
-
-  def Split(self):
-    """Splits the file into the directory, basename, and extension.
-
-    For 'chrome/browser/browser.cc', Split() would
-    return ('chrome/browser', 'browser', '.cc')
-
-    Returns:
-      A tuple of (directory, basename, extension).
-    """
-
-    googlename = self.RepositoryName()
-    project, rest = os.path.split(googlename)
-    return (project,) + os.path.splitext(rest)
-
-  def BaseName(self):
-    """File base name - text after the final slash, before the final period."""
-    return self.Split()[1]
-
-  def Extension(self):
-    """File extension - text following the final period."""
-    return self.Split()[2]
-
-  def NoExtension(self):
-    """File has no source file extension."""
-    return '/'.join(self.Split()[0:2])
-
-  def IsSource(self):
-    """File has a source file extension."""
-    return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx')
-
-
-def _ShouldPrintError(category, confidence, linenum):
-  """If confidence >= verbose, category passes filter and is not suppressed."""
-
-  # There are three ways we might decide not to print an error message:
-  # a "NOLINT(category)" comment appears in the source,
-  # the verbosity level isn't high enough, or the filters filter it out.
-  if IsErrorSuppressedByNolint(category, linenum):
-    return False
-
-  if confidence < _cpplint_state.verbose_level:
-    return False
-
-  is_filtered = False
-  for one_filter in _Filters():
-    if one_filter.startswith('-'):
-      if category.startswith(one_filter[1:]):
-        is_filtered = True
-    elif one_filter.startswith('+'):
-      if category.startswith(one_filter[1:]):
-        is_filtered = False
-    else:
-      assert False  # should have been checked for in SetFilter.
-  if is_filtered:
-    return False
-
-  return True
-
-
-def Error(filename, linenum, category, confidence, message):
-  """Logs the fact we've found a lint error.
-
-  We log where the error was found, and also our confidence in the error,
-  that is, how certain we are this is a legitimate style regression, and
-  not a misidentification or a use that's sometimes justified.
-
-  False positives can be suppressed by the use of
-  "cpplint(category)"  comments on the offending line.  These are
-  parsed into _error_suppressions.
-
-  Args:
-    filename: The name of the file containing the error.
-    linenum: The number of the line containing the error.
-    category: A string used to describe the "category" this bug
-      falls under: "whitespace", say, or "runtime".  Categories
-      may have a hierarchy separated by slashes: "whitespace/indent".
-    confidence: A number from 1-5 representing a confidence score for
-      the error, with 5 meaning that we are certain of the problem,
-      and 1 meaning that it could be a legitimate construct.
-    message: The error message.
-  """
-  if _ShouldPrintError(category, confidence, linenum):
-    _cpplint_state.IncrementErrorCount(category)
-    if _cpplint_state.output_format == 'vs7':
-      sys.stderr.write('%s(%s):  %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence))
-    elif _cpplint_state.output_format == 'eclipse':
-      sys.stderr.write('%s:%s: warning: %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence))
-    else:
-      sys.stderr.write('%s:%s:  %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence))
-
-
-# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
-_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
-    r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
-# Match a single C style comment on the same line.
-_RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
-# Matches multi-line C style comments.
-# This RE is a little bit more complicated than one might expect, because we
-# have to take care of space removals tools so we can handle comments inside
-# statements better.
-# The current rule is: We only clear spaces from both sides when we're at the
-# end of the line. Otherwise, we try to remove spaces from the right side,
-# if this doesn't work we try on left side but only if there's a non-character
-# on the right.
-_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
-    r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
-    _RE_PATTERN_C_COMMENTS + r'\s+|' +
-    r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
-    _RE_PATTERN_C_COMMENTS + r')')
-
-
-def IsCppString(line):
-  """Does line terminate so, that the next symbol is in string constant.
-
-  This function does not consider single-line nor multi-line comments.
-
-  Args:
-    line: is a partial line of code starting from the 0..n.
-
-  Returns:
-    True, if next character appended to 'line' is inside a
-    string constant.
-  """
-
-  line = line.replace(r'\\', 'XX')  # after this, \\" does not match to \"
-  return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
-
-
-def CleanseRawStrings(raw_lines):
-  """Removes C++11 raw strings from lines.
-
-    Before:
-      static const char kData[] = R"(
-          multi-line string
-          )";
-
-    After:
-      static const char kData[] = ""
-          (replaced by blank line)
-          "";
-
-  Args:
-    raw_lines: list of raw lines.
-
-  Returns:
-    list of lines with C++11 raw strings replaced by empty strings.
-  """
-
-  delimiter = None
-  lines_without_raw_strings = []
-  for line in raw_lines:
-    if delimiter:
-      # Inside a raw string, look for the end
-      end = line.find(delimiter)
-      if end >= 0:
-        # Found the end of the string, match leading space for this
-        # line and resume copying the original lines, and also insert
-        # a "" on the last line.
-        leading_space = Match(r'^(\s*)\S', line)
-        line = leading_space.group(1) + '""' + line[end + len(delimiter):]
-        delimiter = None
-      else:
-        # Haven't found the end yet, append a blank line.
-        line = '""'
-
-    # Look for beginning of a raw string, and replace them with
-    # empty strings.  This is done in a loop to handle multiple raw
-    # strings on the same line.
-    while delimiter is None:
-      # Look for beginning of a raw string.
-      # See 2.14.15 [lex.string] for syntax.
-      matched = Match(r'^(.*)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
-      if matched:
-        delimiter = ')' + matched.group(2) + '"'
-
-        end = matched.group(3).find(delimiter)
-        if end >= 0:
-          # Raw string ended on same line
-          line = (matched.group(1) + '""' +
-                  matched.group(3)[end + len(delimiter):])
-          delimiter = None
-        else:
-          # Start of a multi-line raw string
-          line = matched.group(1) + '""'
-      else:
-        break
-
-    lines_without_raw_strings.append(line)
-
-  # TODO(unknown): if delimiter is not None here, we might want to
-  # emit a warning for unterminated string.
-  return lines_without_raw_strings
-
-
-def FindNextMultiLineCommentStart(lines, lineix):
-  """Find the beginning marker for a multiline comment."""
-  while lineix < len(lines):
-    if lines[lineix].strip().startswith('/*'):
-      # Only return this marker if the comment goes beyond this line
-      if lines[lineix].strip().find('*/', 2) < 0:
-        return lineix
-    lineix += 1
-  return len(lines)
-
-
-def FindNextMultiLineCommentEnd(lines, lineix):
-  """We are inside a comment, find the end marker."""
-  while lineix < len(lines):
-    if lines[lineix].strip().endswith('*/'):
-      return lineix
-    lineix += 1
-  return len(lines)
-
-
-def RemoveMultiLineCommentsFromRange(lines, begin, end):
-  """Clears a range of lines for multi-line comments."""
-  # Having // dummy comments makes the lines non-empty, so we will not get
-  # unnecessary blank line warnings later in the code.
-  for i in range(begin, end):
-    lines[i] = '/**/'
-
-
-def RemoveMultiLineComments(filename, lines, error):
-  """Removes multiline (c-style) comments from lines."""
-  lineix = 0
-  while lineix < len(lines):
-    lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
-    if lineix_begin >= len(lines):
-      return
-    lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
-    if lineix_end >= len(lines):
-      error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
-            'Could not find end of multi-line comment')
-      return
-    RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
-    lineix = lineix_end + 1
-
-
-def CleanseComments(line):
-  """Removes //-comments and single-line C-style /* */ comments.
-
-  Args:
-    line: A line of C++ source.
-
-  Returns:
-    The line with single-line comments removed.
-  """
-  commentpos = line.find('//')
-  if commentpos != -1 and not IsCppString(line[:commentpos]):
-    line = line[:commentpos].rstrip()
-  # get rid of /* ... */
-  return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
-
-
-class CleansedLines(object):
-  """Holds 4 copies of all lines with different preprocessing applied to them.
-
-  1) elided member contains lines without strings and comments.
-  2) lines member contains lines without comments.
-  3) raw_lines member contains all the lines without processing.
-  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
-     strings removed.
-  All these members are of <type 'list'>, and of the same length.
-  """
-
-  def __init__(self, lines):
-    self.elided = []
-    self.lines = []
-    self.raw_lines = lines
-    self.num_lines = len(lines)
-    self.lines_without_raw_strings = CleanseRawStrings(lines)
-    for linenum in range(len(self.lines_without_raw_strings)):
-      self.lines.append(CleanseComments(
-          self.lines_without_raw_strings[linenum]))
-      elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
-      self.elided.append(CleanseComments(elided))
-
-  def NumLines(self):
-    """Returns the number of lines represented."""
-    return self.num_lines
-
-  @staticmethod
-  def _CollapseStrings(elided):
-    """Collapses strings and chars on a line to simple "" or '' blocks.
-
-    We nix strings first so we're not fooled by text like '"http://"'
-
-    Args:
-      elided: The line being processed.
-
-    Returns:
-      The line with collapsed strings.
-    """
-    if _RE_PATTERN_INCLUDE.match(elided):
-      return elided
-
-    # Remove escaped characters first to make quote/single quote collapsing
-    # basic.  Things that look like escaped characters shouldn't occur
-    # outside of strings and chars.
-    elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
-
-    # Replace quoted strings and digit separators.  Both single quotes
-    # and double quotes are processed in the same loop, otherwise
-    # nested quotes wouldn't work.
-    collapsed = ''
-    while True:
-      # Find the first quote character
-      match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
-      if not match:
-        collapsed += elided
-        break
-      head, quote, tail = match.groups()
-
-      if quote == '"':
-        # Collapse double quoted strings
-        second_quote = tail.find('"')
-        if second_quote >= 0:
-          collapsed += head + '""'
-          elided = tail[second_quote + 1:]
-        else:
-          # Unmatched double quote, don't bother processing the rest
-          # of the line since this is probably a multiline string.
-          collapsed += elided
-          break
-      else:
-        # Found single quote, check nearby text to eliminate digit separators.
-        #
-        # There is no special handling for floating point here, because
-        # the integer/fractional/exponent parts would all be parsed
-        # correctly as long as there are digits on both sides of the
-        # separator.  So we are fine as long as we don't see something
-        # like "0.'3" (gcc 4.9.0 will not allow this literal).
-        if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
-          match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
-          collapsed += head + match_literal.group(1).replace("'", '')
-          elided = match_literal.group(2)
-        else:
-          second_quote = tail.find('\'')
-          if second_quote >= 0:
-            collapsed += head + "''"
-            elided = tail[second_quote + 1:]
-          else:
-            # Unmatched single quote
-            collapsed += elided
-            break
-
-    return collapsed
-
-
-def FindEndOfExpressionInLine(line, startpos, stack):
-  """Find the position just after the end of current parenthesized expression.
-
-  Args:
-    line: a CleansedLines line.
-    startpos: start searching at this position.
-    stack: nesting stack at startpos.
-
-  Returns:
-    On finding matching end: (index just after matching end, None)
-    On finding an unclosed expression: (-1, None)
-    Otherwise: (-1, new stack at end of this line)
-  """
-  for i in xrange(startpos, len(line)):
-    char = line[i]
-    if char in '([{':
-      # Found start of parenthesized expression, push to expression stack
-      stack.append(char)
-    elif char == '<':
-      # Found potential start of template argument list
-      if i > 0 and line[i - 1] == '<':
-        # Left shift operator
-        if stack and stack[-1] == '<':
-          stack.pop()
-          if not stack:
-            return (-1, None)
-      elif i > 0 and Search(r'\boperator\s*$', line[0:i]):
-        # operator<, don't add to stack
-        continue
-      else:
-        # Tentative start of template argument list
-        stack.append('<')
-    elif char in ')]}':
-      # Found end of parenthesized expression.
-      #
-      # If we are currently expecting a matching '>', the pending '<'
-      # must have been an operator.  Remove them from expression stack.
-      while stack and stack[-1] == '<':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-      if ((stack[-1] == '(' and char == ')') or
-          (stack[-1] == '[' and char == ']') or
-          (stack[-1] == '{' and char == '}')):
-        stack.pop()
-        if not stack:
-          return (i + 1, None)
-      else:
-        # Mismatched parentheses
-        return (-1, None)
-    elif char == '>':
-      # Found potential end of template argument list.
-
-      # Ignore "->" and operator functions
-      if (i > 0 and
-          (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
-        continue
-
-      # Pop the stack if there is a matching '<'.  Otherwise, ignore
-      # this '>' since it must be an operator.
-      if stack:
-        if stack[-1] == '<':
-          stack.pop()
-          if not stack:
-            return (i + 1, None)
-    elif char == ';':
-      # Found something that look like end of statements.  If we are currently
-      # expecting a '>', the matching '<' must have been an operator, since
-      # template argument list should not contain statements.
-      while stack and stack[-1] == '<':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-
-  # Did not find end of expression or unbalanced parentheses on this line
-  return (-1, stack)
-
-
-def CloseExpression(clean_lines, linenum, pos):
-  """If input points to ( or { or [ or <, finds the position that closes it.
-
-  If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
-  linenum/pos that correspond to the closing of the expression.
-
-  TODO(unknown): cpplint spends a fair bit of time matching parentheses.
-  Ideally we would want to index all opening and closing parentheses once
-  and have CloseExpression be just a simple lookup, but due to preprocessor
-  tricks, this is not so easy.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    pos: A position on the line.
-
-  Returns:
-    A tuple (line, linenum, pos) pointer *past* the closing brace, or
-    (line, len(lines), -1) if we never find a close.  Note we ignore
-    strings and comments when matching; and the line we return is the
-    'cleansed' line at linenum.
-  """
-
-  line = clean_lines.elided[linenum]
-  if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]):
-    return (line, clean_lines.NumLines(), -1)
-
-  # Check first line
-  (end_pos, stack) = FindEndOfExpressionInLine(line, pos, [])
-  if end_pos > -1:
-    return (line, linenum, end_pos)
-
-  # Continue scanning forward
-  while stack and linenum < clean_lines.NumLines() - 1:
-    linenum += 1
-    line = clean_lines.elided[linenum]
-    (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack)
-    if end_pos > -1:
-      return (line, linenum, end_pos)
-
-  # Did not find end of expression before end of file, give up
-  return (line, clean_lines.NumLines(), -1)
-
-
-def FindStartOfExpressionInLine(line, endpos, stack):
-  """Find position at the matching start of current expression.
-
-  This is almost the reverse of FindEndOfExpressionInLine, but note
-  that the input position and returned position differs by 1.
-
-  Args:
-    line: a CleansedLines line.
-    endpos: start searching at this position.
-    stack: nesting stack at endpos.
-
-  Returns:
-    On finding matching start: (index at matching start, None)
-    On finding an unclosed expression: (-1, None)
-    Otherwise: (-1, new stack at beginning of this line)
-  """
-  i = endpos
-  while i >= 0:
-    char = line[i]
-    if char in ')]}':
-      # Found end of expression, push to expression stack
-      stack.append(char)
-    elif char == '>':
-      # Found potential end of template argument list.
-      #
-      # Ignore it if it's a "->" or ">=" or "operator>"
-      if (i > 0 and
-          (line[i - 1] == '-' or
-           Match(r'\s>=\s', line[i - 1:]) or
-           Search(r'\boperator\s*$', line[0:i]))):
-        i -= 1
-      else:
-        stack.append('>')
-    elif char == '<':
-      # Found potential start of template argument list
-      if i > 0 and line[i - 1] == '<':
-        # Left shift operator
-        i -= 1
-      else:
-        # If there is a matching '>', we can pop the expression stack.
-        # Otherwise, ignore this '<' since it must be an operator.
-        if stack and stack[-1] == '>':
-          stack.pop()
-          if not stack:
-            return (i, None)
-    elif char in '([{':
-      # Found start of expression.
-      #
-      # If there are any unmatched '>' on the stack, they must be
-      # operators.  Remove those.
-      while stack and stack[-1] == '>':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-      if ((char == '(' and stack[-1] == ')') or
-          (char == '[' and stack[-1] == ']') or
-          (char == '{' and stack[-1] == '}')):
-        stack.pop()
-        if not stack:
-          return (i, None)
-      else:
-        # Mismatched parentheses
-        return (-1, None)
-    elif char == ';':
-      # Found something that look like end of statements.  If we are currently
-      # expecting a '<', the matching '>' must have been an operator, since
-      # template argument list should not contain statements.
-      while stack and stack[-1] == '>':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-
-    i -= 1
-
-  return (-1, stack)
-
-
-def ReverseCloseExpression(clean_lines, linenum, pos):
-  """If input points to ) or } or ] or >, finds the position that opens it.
-
-  If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
-  linenum/pos that correspond to the opening of the expression.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    pos: A position on the line.
-
-  Returns:
-    A tuple (line, linenum, pos) pointer *at* the opening brace, or
-    (line, 0, -1) if we never find the matching opening brace.  Note
-    we ignore strings and comments when matching; and the line we
-    return is the 'cleansed' line at linenum.
-  """
-  line = clean_lines.elided[linenum]
-  if line[pos] not in ')}]>':
-    return (line, 0, -1)
-
-  # Check last line
-  (start_pos, stack) = FindStartOfExpressionInLine(line, pos, [])
-  if start_pos > -1:
-    return (line, linenum, start_pos)
-
-  # Continue scanning backward
-  while stack and linenum > 0:
-    linenum -= 1
-    line = clean_lines.elided[linenum]
-    (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
-    if start_pos > -1:
-      return (line, linenum, start_pos)
-
-  # Did not find start of expression before beginning of file, give up
-  return (line, 0, -1)
-
-
-def CheckForCopyright(filename, lines, error):
-  """Logs an error if no Copyright message appears at the top of the file."""
-
-  # We'll say it should occur by line 10. Don't forget there's a
-  # dummy line at the front.
-  for line in xrange(1, min(len(lines), 11)):
-    if re.search(r'Copyright', lines[line], re.I): break
-  else:                       # means no copyright line was found
-    error(filename, 0, 'legal/copyright', 5,
-          'No copyright message found.  '
-          'You should have a line: "Copyright [year] <Copyright Owner>"')
-
-
-def GetIndentLevel(line):
-  """Return the number of leading spaces in line.
-
-  Args:
-    line: A string to check.
-
-  Returns:
-    An integer count of leading spaces, possibly zero.
-  """
-  indent = Match(r'^( *)\S', line)
-  if indent:
-    return len(indent.group(1))
-  else:
-    return 0
-
-
-def GetHeaderGuardCPPVariable(filename):
-  """Returns the CPP variable that should be used as a header guard.
-
-  Args:
-    filename: The name of a C++ header file.
-
-  Returns:
-    The CPP variable that should be used as a header guard in the
-    named file.
-
-  """
-
-  # Restores original filename in case that cpplint is invoked from Emacs's
-  # flymake.
-  filename = re.sub(r'_flymake\.h$', '.h', filename)
-  filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
-  # Replace 'c++' with 'cpp'.
-  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
-
-  fileinfo = FileInfo(filename)
-  file_path_from_root = fileinfo.RepositoryName()
-  if _root:
-    file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root)
-  # return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
-
-  # wangsheng@singa.apache: change INCLUDE to SINGA
-  singa_path = re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
-  return singa_path.replace("INCLUDE_", "")
-
-
-def CheckForHeaderGuard(filename, clean_lines, error):
-  """Checks that the file contains a header guard.
-
-  Logs an error if no #ifndef header guard is present.  For other
-  headers, checks that the full pathname is used.
-
-  Args:
-    filename: The name of the C++ header file.
-    clean_lines: A CleansedLines instance containing the file.
-    error: The function to call with any errors found.
-  """
-
-  # Don't check for header guards if there are error suppression
-  # comments somewhere in this file.
-  #
-  # Because this is silencing a warning for a nonexistent line, we
-  # only support the very specific NOLINT(build/header_guard) syntax,
-  # and not the general NOLINT or NOLINT(*) syntax.
-  raw_lines = clean_lines.lines_without_raw_strings
-  for i in raw_lines:
-    if Search(r'//\s*NOLINT\(build/header_guard\)', i):
-      return
-
-  cppvar = GetHeaderGuardCPPVariable(filename)
-
-  ifndef = ''
-  ifndef_linenum = 0
-  define = ''
-  endif = ''
-  endif_linenum = 0
-  for linenum, line in enumerate(raw_lines):
-    linesplit = line.split()
-    if len(linesplit) >= 2:
-      # find the first occurrence of #ifndef and #define, save arg
-      if not ifndef and linesplit[0] == '#ifndef':
-        # set ifndef to the header guard presented on the #ifndef line.
-        ifndef = linesplit[1]
-        ifndef_linenum = linenum
-      if not define and linesplit[0] == '#define':
-        define = linesplit[1]
-    # find the last occurrence of #endif, save entire line
-    if line.startswith('#endif'):
-      endif = line
-      endif_linenum = linenum
-
-  if not ifndef or not define or ifndef != define:
-    error(filename, 0, 'build/header_guard', 5,
-          'No #ifndef header guard found, suggested CPP variable is: %s' %
-          cppvar)
-    return
-
-  # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
-  # for backward compatibility.
-  if ifndef != cppvar:
-    error_level = 0
-    if ifndef != cppvar + '_':
-      error_level = 5
-
-    ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
-                            error)
-    error(filename, ifndef_linenum, 'build/header_guard', error_level,
-          '#ifndef header guard has wrong style, please use: %s' % cppvar)
-
-  # Check for "//" comments on endif line.
-  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
-                          error)
-  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
-  if match:
-    if match.group(1) == '_':
-      # Issue low severity warning for deprecated double trailing underscore
-      error(filename, endif_linenum, 'build/header_guard', 0,
-            '#endif line should be "#endif  // %s"' % cppvar)
-    return
-
-  # Didn't find the corresponding "//" comment.  If this file does not
-  # contain any "//" comments at all, it could be that the compiler
-  # only wants "/**/" comments, look for those instead.
-  no_single_line_comments = True
-  for i in xrange(1, len(raw_lines) - 1):
-    line = raw_lines[i]
-    if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
-      no_single_line_comments = False
-      break
-
-  if no_single_line_comments:
-    match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
-    if match:
-      if match.group(1) == '_':
-        # Low severity warning for double trailing underscore
-        error(filename, endif_linenum, 'build/header_guard', 0,
-              '#endif line should be "#endif  /* %s */"' % cppvar)
-      return
-
-  # Didn't find anything
-  error(filename, endif_linenum, 'build/header_guard', 5,
-        '#endif line should be "#endif  // %s"' % cppvar)
-
-
-def CheckHeaderFileIncluded(filename, include_state, error):
-  """Logs an error if a .cc file does not include its header."""
-
-  # Do not check test files
-  if filename.endswith('_test.cc') or filename.endswith('_unittest.cc'):
-    return
-
-  fileinfo = FileInfo(filename)
-  headerfile = filename[0:len(filename) - 2] + 'h'
-  if not os.path.exists(headerfile):
-    return
-  headername = FileInfo(headerfile).RepositoryName()
-  first_include = 0
-  for section_list in include_state.include_list:
-    for f in section_list:
-      if headername in f[0] or f[0] in headername:
-        return
-      if not first_include:
-        first_include = f[1]
-
-  error(filename, first_include, 'build/include', 5,
-        '%s should include its header file %s' % (fileinfo.RepositoryName(),
-                                                  headername))
-
-
-def CheckForBadCharacters(filename, lines, error):
-  """Logs an error for each line containing bad characters.
-
-  Two kinds of bad characters:
-
-  1. Unicode replacement characters: These indicate that either the file
-  contained invalid UTF-8 (likely) or Unicode replacement characters (which
-  it shouldn't).  Note that it's possible for this to throw off line
-  numbering if the invalid UTF-8 occurred adjacent to a newline.
-
-  2. NUL bytes.  These are problematic for some tools.
-
-  Args:
-    filename: The name of the current file.
-    lines: An array of strings, each representing a line of the file.
-    error: The function to call with any errors found.
-  """
-  for linenum, line in enumerate(lines):
-    if u'\ufffd' in line:
-      error(filename, linenum, 'readability/utf8', 5,
-            'Line contains invalid UTF-8 (or Unicode replacement character).')
-    if '\0' in line:
-      error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
-
-
-def CheckForNewlineAtEOF(filename, lines, error):
-  """Logs an error if there is no newline char at the end of the file.
-
-  Args:
-    filename: The name of the current file.
-    lines: An array of strings, each representing a line of the file.
-    error: The function to call with any errors found.
-  """
-
-  # The array lines() was created by adding two newlines to the
-  # original file (go figure), then splitting on \n.
-  # To verify that the file ends in \n, we just have to make sure the
-  # last-but-two element of lines() exists and is empty.
-  if len(lines) < 3 or lines[-2]:
-    error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
-          'Could not find a newline character at the end of the file.')
-
-
-def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
-  """Logs an error if we see /* ... */ or "..." that extend past one line.
-
-  /* ... */ comments are legit inside macros, for one line.
-  Otherwise, we prefer // comments, so it's ok to warn about the
-  other.  Likewise, it's ok for strings to extend across multiple
-  lines, as long as a line continuation character (backslash)
-  terminates each line. Although not currently prohibited by the C++
-  style guide, it's ugly and unnecessary. We don't do well with either
-  in this lint program, so we warn about both.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Remove all \\ (escaped backslashes) from the line. They are OK, and the
-  # second (escaped) slash may trigger later \" detection erroneously.
-  line = line.replace('\\\\', '')
-
-  if line.count('/*') > line.count('*/'):
-    error(filename, linenum, 'readability/multiline_comment', 5,
-          'Complex multi-line /*...*/-style comment found. '
-          'Lint may give bogus warnings.  '
-          'Consider replacing these with //-style comments, '
-          'with #if 0...#endif, '
-          'or with more clearly structured multi-line comments.')
-
-  if (line.count('"') - line.count('\\"')) % 2:
-    error(filename, linenum, 'readability/multiline_string', 5,
-          'Multi-line string ("...") found.  This lint script doesn\'t '
-          'do well with such strings, and may give bogus warnings.  '
-          'Use C++11 raw strings or concatenation instead.')
-
-
-# (non-threadsafe name, thread-safe alternative, validation pattern)
-#
-# The validation pattern is used to eliminate false positives such as:
-#  _rand();               // false positive due to substring match.
-#  ->rand();              // some member function rand().
-#  ACMRandom rand(seed);  // some variable named rand.
-#  ISAACRandom rand();    // another variable named rand.
-#
-# Basically we require the return value of these functions to be used
-# in some expression context on the same line by matching on some
-# operator before the function name.  This eliminates constructors and
-# member function calls.
-_UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)'
-_THREADING_LIST = (
-    ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'),
-    ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'),
-    ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'),
-    ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'),
-    ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'),
-    ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'),
-    ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'),
-    ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'),
-    ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'),
-    ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'),
-    ('strtok(', 'strtok_r(',
-     _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
-    ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'),
-    )
-
-
-def CheckPosixThreading(filename, clean_lines, linenum, error):
-  """Checks for calls to thread-unsafe functions.
-
-  Much code has been originally written without consideration of
-  multi-threading. Also, engineers are relying on their old experience;
-  they have learned posix before threading extensions were added. These
-  tests guide the engineers to use thread-safe functions (when using
-  posix directly).
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST:
-    # Additional pattern matching check to confirm that this is the
-    # function we are looking for
-    if Search(pattern, line):
-      error(filename, linenum, 'runtime/threadsafe_fn', 2,
-            'Consider using ' + multithread_safe_func +
-            '...) instead of ' + single_thread_func +
-            '...) for improved thread safety.')
-
-
-def CheckVlogArguments(filename, clean_lines, linenum, error):
-  """Checks that VLOG() is only used for defining a logging level.
-
-  For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
-  VLOG(FATAL) are not.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line):
-    error(filename, linenum, 'runtime/vlog', 5,
-          'VLOG() should be used with numeric verbosity level.  '
-          'Use LOG() if you want symbolic severity levels.')
-
-# Matches invalid increment: *count++, which moves pointer instead of
-# incrementing a value.
-_RE_PATTERN_INVALID_INCREMENT = re.compile(
-    r'^\s*\*\w+(\+\+|--);')
-
-
-def CheckInvalidIncrement(filename, clean_lines, linenum, error):
-  """Checks for invalid increment *count++.
-
-  For example following function:
-  void increment_counter(int* count) {
-    *count++;
-  }
-  is invalid, because it effectively does count++, moving pointer, and should
-  be replaced with ++*count, (*count)++ or *count += 1.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  if _RE_PATTERN_INVALID_INCREMENT.match(line):
-    error(filename, linenum, 'runtime/invalid_increment', 5,
-          'Changing pointer instead of value (or unused value of operator*).')
-
-
-def IsMacroDefinition(clean_lines, linenum):
-  if Search(r'^#define', clean_lines[linenum]):
-    return True
-
-  if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]):
-    return True
-
-  return False
-
-
-def IsForwardClassDeclaration(clean_lines, linenum):
-  return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum])
-
-
-class _BlockInfo(object):
-  """Stores information about a generic block of code."""
-
-  def __init__(self, seen_open_brace):
-    self.seen_open_brace = seen_open_brace
-    self.open_parentheses = 0
-    self.inline_asm = _NO_ASM
-    self.check_namespace_indentation = False
-
-  def CheckBegin(self, filename, clean_lines, linenum, error):
-    """Run checks that applies to text up to the opening brace.
-
-    This is mostly for checking the text after the class identifier
-    and the "{", usually where the base class is specified.  For other
-    blocks, there isn't much to check, so we always pass.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    pass
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    """Run checks that applies to text after the closing brace.
-
-    This is mostly used for checking end of namespace comments.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    pass
-
-  def IsBlockInfo(self):
-    """Returns true if this block is a _BlockInfo.
-
-    This is convenient for verifying that an object is an instance of
-    a _BlockInfo, but not an instance of any of the derived classes.
-
-    Returns:
-      True for this class, False for derived classes.
-    """
-    return self.__class__ == _BlockInfo
-
-
-class _ExternCInfo(_BlockInfo):
-  """Stores information about an 'extern "C"' block."""
-
-  def __init__(self):
-    _BlockInfo.__init__(self, True)
-
-
-class _ClassInfo(_BlockInfo):
-  """Stores information about a class."""
-
-  def __init__(self, name, class_or_struct, clean_lines, linenum):
-    _BlockInfo.__init__(self, False)
-    self.name = name
-    self.starting_linenum = linenum
-    self.is_derived = False
-    self.check_namespace_indentation = True
-    if class_or_struct == 'struct':
-      self.access = 'public'
-      self.is_struct = True
-    else:
-      self.access = 'private'
-      self.is_struct = False
-
-    # Remember initial indentation level for this class.  Using raw_lines here
-    # instead of elided to account for leading comments.
-    self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum])
-
-    # Try to find the end of the class.  This will be confused by things like:
-    #   class A {
-    #   } *x = { ...
-    #
-    # But it's still good enough for CheckSectionSpacing.
-    self.last_line = 0
-    depth = 0
-    for i in range(linenum, clean_lines.NumLines()):
-      line = clean_lines.elided[i]
-      depth += line.count('{') - line.count('}')
-      if not depth:
-        self.last_line = i
-        break
-
-  def CheckBegin(self, filename, clean_lines, linenum, error):
-    # Look for a bare ':'
-    if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
-      self.is_derived = True
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    # If there is a DISALLOW macro, it should appear near the end of
-    # the class.
-    seen_last_thing_in_class = False
-    for i in xrange(linenum - 1, self.starting_linenum, -1):
-      match = Search(
-          r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
-          self.name + r'\)',
-          clean_lines.elided[i])
-      if match:
-        if seen_last_thing_in_class:
-          error(filename, i, 'readability/constructors', 3,
-                match.group(1) + ' should be the last thing in the class')
-        break
-
-      if not Match(r'^\s*$', clean_lines.elided[i]):
-        seen_last_thing_in_class = True
-
-    # Check that closing brace is aligned with beginning of the class.
-    # Only do this if the closing brace is indented by only whitespaces.
-    # This means we will not check single-line class definitions.
-    indent = Match(r'^( *)\}', clean_lines.elided[linenum])
-    if indent and len(indent.group(1)) != self.class_indent:
-      if self.is_struct:
-        parent = 'struct ' + self.name
-      else:
-        parent = 'class ' + self.name
-      error(filename, linenum, 'whitespace/indent', 3,
-            'Closing brace should be aligned with beginning of %s' % parent)
-
-
-class _NamespaceInfo(_BlockInfo):
-  """Stores information about a namespace."""
-
-  def __init__(self, name, linenum):
-    _BlockInfo.__init__(self, False)
-    self.name = name or ''
-    self.starting_linenum = linenum
-    self.check_namespace_indentation = True
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    """Check end of namespace comments."""
-    line = clean_lines.raw_lines[linenum]
-
-    # Check how many lines is enclosed in this namespace.  Don't issue
-    # warning for missing namespace comments if there aren't enough
-    # lines.  However, do apply checks if there is already an end of
-    # namespace comment and it's incorrect.
-    #
-    # TODO(unknown): We always want to check end of namespace comments
-    # if a namespace is large, but sometimes we also want to apply the
-    # check if a short namespace contained nontrivial things (something
-    # other than forward declarations).  There is currently no logic on
-    # deciding what these nontrivial things are, so this check is
-    # triggered by namespace size only, which works most of the time.
-    if (linenum - self.starting_linenum < 10
-        and not Match(r'};*\s*(//|/\*).*\bnamespace\b', line)):
-      return
-
-    # Look for matching comment at end of namespace.
-    #
-    # Note that we accept C style "/* */" comments for terminating
-    # namespaces, so that code that terminate namespaces inside
-    # preprocessor macros can be cpplint clean.
-    #
-    # We also accept stuff like "// end of namespace <name>." with the
-    # period at the end.
-    #
-    # Besides these, we don't accept anything else, otherwise we might
-    # get false negatives when existing comment is a substring of the
-    # expected namespace.
-    if self.name:
-      # Named namespace
-      if not Match((r'};*\s*(//|/\*).*\bnamespace\s+' + re.escape(self.name) +
-                    r'[\*/\.\\\s]*$'),
-                   line):
-        error(filename, linenum, 'readability/namespace', 5,
-              'Namespace should be terminated with "// namespace %s"' %
-              self.name)
-    else:
-      # Anonymous namespace
-      if not Match(r'};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
-        # If "// namespace anonymous" or "// anonymous namespace (more text)",
-        # mention "// anonymous namespace" as an acceptable form
-        if Match(r'}.*\b(namespace anonymous|anonymous namespace)\b', line):
-          error(filename, linenum, 'readability/namespace', 5,
-                'Anonymous namespace should be terminated with "// namespace"'
-                ' or "// anonymous namespace"')
-        else:
-          error(filename, linenum, 'readability/namespace', 5,
-                'Anonymous namespace should be terminated with "// namespace"')
-
-
-class _PreprocessorInfo(object):
-  """Stores checkpoints of nesting stacks when #if/#else is seen."""
-
-  def __init__(self, stack_before_if):
-    # The entire nesting stack before #if
-    self.stack_before_if = stack_before_if
-
-    # The entire nesting stack up to #else
-    self.stack_before_else = []
-
-    # Whether we have already seen #else or #elif
-    self.seen_else = False
-
-
-class NestingState(object):
-  """Holds states related to parsing braces."""
-
-  def __init__(self):
-    # Stack for tracking all braces.  An object is pushed whenever we
-    # see a "{", and popped when we see a "}".  Only 3 types of
-    # objects are possible:
-    # - _ClassInfo: a class or struct.
-    # - _NamespaceInfo: a namespace.
-    # - _BlockInfo: some other type of block.
-    self.stack = []
-
-    # Top of the previous stack before each Update().
-    #
-    # Because the nesting_stack is updated at the end of each line, we
-    # had to do some convoluted checks to find out what is the current
-    # scope at the beginning of the line.  This check is simplified by
-    # saving the previous top of nesting stack.
-    #
-    # We could save the full stack, but we only need the top.  Copying
-    # the full nesting stack would slow down cpplint by ~10%.
-    self.previous_stack_top = []
-
-    # Stack of _PreprocessorInfo objects.
-    self.pp_stack = []
-
-  def SeenOpenBrace(self):
-    """Check if we have seen the opening brace for the innermost block.
-
-    Returns:
-      True if we have seen the opening brace, False if the innermost
-      block is still expecting an opening brace.
-    """
-    return (not self.stack) or self.stack[-1].seen_open_brace
-
-  def InNamespaceBody(self):
-    """Check if we are currently one level inside a namespace body.
-
-    Returns:
-      True if top of the stack is a namespace block, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
-
-  def InExternC(self):
-    """Check if we are currently one level inside an 'extern "C"' block.
-
-    Returns:
-      True if top of the stack is an extern block, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _ExternCInfo)
-
-  def InClassDeclaration(self):
-    """Check if we are currently one level inside a class or struct declaration.
-
-    Returns:
-      True if top of the stack is a class/struct, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _ClassInfo)
-
-  def InAsmBlock(self):
-    """Check if we are currently one level inside an inline ASM block.
-
-    Returns:
-      True if the top of the stack is a block containing inline ASM.
-    """
-    return self.stack and self.stack[-1].inline_asm != _NO_ASM
-
-  def InTemplateArgumentList(self, clean_lines, linenum, pos):
-    """Check if current position is inside template argument list.
-
-    Args:
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      pos: position just after the suspected template argument.
-    Returns:
-      True if (linenum, pos) is inside template arguments.
-    """
-    while linenum < clean_lines.NumLines():
-      # Find the earliest character that might indicate a template argument
-      line = clean_lines.elided[linenum]
-      match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
-      if not match:
-        linenum += 1
-        pos = 0
-        continue
-      token = match.group(1)
-      pos += len(match.group(0))
-
-      # These things do not look like template argument list:
-      #   class Suspect {
-      #   class Suspect x; }
-      if token in ('{', '}', ';'): return False
-
-      # These things look like template argument list:
-      #   template <class Suspect>
-      #   template <class Suspect = default_value>
-      #   template <class Suspect[]>
-      #   template <class Suspect...>
-      if token in ('>', '=', '[', ']', '.'): return True
-
-      # Check if token is an unmatched '<'.
-      # If not, move on to the next character.
-      if token != '<':
-        pos += 1
-        if pos >= len(line):
-          linenum += 1
-          pos = 0
-        continue
-
-      # We can't be sure if we just find a single '<', and need to
-      # find the matching '>'.
-      (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
-      if end_pos < 0:
-        # Not sure if template argument list or syntax error in file
-        return False
-      linenum = end_line
-      pos = end_pos
-    return False
-
-  def UpdatePreprocessor(self, line):
-    """Update preprocessor stack.
-
-    We need to handle preprocessors due to classes like this:
-      #ifdef SWIG
-      struct ResultDetailsPageElementExtensionPoint {
-      #else
-      struct ResultDetailsPageElementExtensionPoint : public Extension {
-      #endif
-
-    We make the following assumptions (good enough for most files):
-    - Preprocessor condition evaluates to true from #if up to first
-      #else/#elif/#endif.
-
-    - Preprocessor condition evaluates to false from #else/#elif up
-      to #endif.  We still perform lint checks on these lines, but
-      these do not affect nesting stack.
-
-    Args:
-      line: current line to check.
-    """
-    if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
-      # Beginning of #if block, save the nesting stack here.  The saved
-      # stack will allow us to restore the parsing state in the #else case.
-      self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
-    elif Match(r'^\s*#\s*(else|elif)\b', line):
-      # Beginning of #else block
-      if self.pp_stack:
-        if not self.pp_stack[-1].seen_else:
-          # This is the first #else or #elif block.  Remember the
-          # whole nesting stack up to this point.  This is what we
-          # keep after the #endif.
-          self.pp_stack[-1].seen_else = True
-          self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
-
-        # Restore the stack to how it was before the #if
-        self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
-      else:
-        # TODO(unknown): unexpected #else, issue warning?
-        pass
-    elif Match(r'^\s*#\s*endif\b', line):
-      # End of #if or #else blocks.
-      if self.pp_stack:
-        # If we saw an #else, we will need to restore the nesting
-        # stack to its former state before the #else, otherwise we
-        # will just continue from where we left off.
-        if self.pp_stack[-1].seen_else:
-          # Here we can just use a shallow copy since we are the last
-          # reference to it.
-          self.stack = self.pp_stack[-1].stack_before_else
-        # Drop the corresponding #if
-        self.pp_stack.pop()
-      else:
-        # TODO(unknown): unexpected #endif, issue warning?
-        pass
-
-  # TODO(unknown): Update() is too long, but we will refactor later.
-  def Update(self, filename, clean_lines, linenum, error):
-    """Update nesting state with current line.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    line = clean_lines.elided[linenum]
-
-    # Remember top of the previous nesting stack.
-    #
-    # The stack is always pushed/popped and not modified in place, so
-    # we can just do a shallow copy instead of copy.deepcopy.  Using
-    # deepcopy would slow down cpplint by ~28%.
-    if self.stack:
-      self.previous_stack_top = self.stack[-1]
-    else:
-      self.previous_stack_top = None
-
-    # Update pp_stack
-    self.UpdatePreprocessor(line)
-
-    # Count parentheses.  This is to avoid adding struct arguments to
-    # the nesting stack.
-    if self.stack:
-      inner_block = self.stack[-1]
-      depth_change = line.count('(') - line.count(')')
-      inner_block.open_parentheses += depth_change
-
-      # Also check if we are starting or ending an inline assembly block.
-      if inner_block.inline_asm in (_NO_ASM, _END_ASM):
-        if (depth_change != 0 and
-            inner_block.open_parentheses == 1 and
-            _MATCH_ASM.match(line)):
-          # Enter assembly block
-          inner_block.inline_asm = _INSIDE_ASM
-        else:
-          # Not entering assembly block.  If previous line was _END_ASM,
-          # we will now shift to _NO_ASM state.
-          inner_block.inline_asm = _NO_ASM
-      elif (inner_block.inline_asm == _INSIDE_ASM and
-            inner_block.open_parentheses == 0):
-        # Exit assembly block
-        inner_block.inline_asm = _END_ASM
-
-    # Consume namespace declaration at the beginning of the line.  Do
-    # this in a loop so that we catch same line declarations like this:
-    #   namespace proto2 { namespace bridge { class MessageSet; } }
-    while True:
-      # Match start of namespace.  The "\b\s*" below catches namespace
-      # declarations even if it weren't followed by a whitespace, this
-      # is so that we don't confuse our namespace checker.  The
-      # missing spaces will be flagged by CheckSpacing.
-      namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
-      if not namespace_decl_match:
-        break
-
-      new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
-      self.stack.append(new_namespace)
-
-      line = namespace_decl_match.group(2)
-      if line.find('{') != -1:
-        new_namespace.seen_open_brace = True
-        line = line[line.find('{') + 1:]
-
-    # Look for a class declaration in whatever is left of the line
-    # after parsing namespaces.  The regexp accounts for decorated classes
-    # such as in:
-    #   class LOCKABLE API Object {
-    #   };
-    class_decl_match = Match(
-        r'^(\s*(?:template\s*<[\w\s<>,:]*>\s*)?'
-        r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))'
-        r'(.*)$', line)
-    if (class_decl_match and
-        (not self.stack or self.stack[-1].open_parentheses == 0)):
-      # We do not want to accept classes that are actually template arguments:
-      #   template <class Ignore1,
-      #             class Ignore2 = Default<Args>,
-      #             template <Args> class Ignore3>
-      #   void Function() {};
-      #
-      # To avoid template argument cases, we scan forward and look for
-      # an unmatched '>'.  If we see one, assume we are inside a
-      # template argument list.
-      end_declaration = len(class_decl_match.group(1))
-      if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
-        self.stack.append(_ClassInfo(
-            class_decl_match.group(3), class_decl_match.group(2),
-            clean_lines, linenum))
-        line = class_decl_match.group(4)
-
-    # If we have not yet seen the opening brace for the innermost block,
-    # run checks here.
-    if not self.SeenOpenBrace():
-      self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
-
-    # Update access control if we are inside a class/struct
-    if self.stack and isinstance(self.stack[-1], _ClassInfo):
-      classinfo = self.stack[-1]
-      access_match = Match(
-          r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
-          r':(?:[^:]|$)',
-          line)
-      if access_match:
-        classinfo.access = access_match.group(2)
-
-        # Check that access keywords are indented +1 space.  Skip this
-        # check if the keywords are not preceded by whitespaces.
-        indent = access_match.group(1)
-        if (len(indent) != classinfo.class_indent + 1 and
-            Match(r'^\s*$', indent)):
-          if classinfo.is_struct:
-            parent = 'struct ' + classinfo.name
-          else:
-            parent = 'class ' + classinfo.name
-          slots = ''
-          if access_match.group(3):
-            slots = access_match.group(3)
-          error(filename, linenum, 'whitespace/indent', 3,
-                '%s%s: should be indented +1 space inside %s' % (
-                    access_match.group(2), slots, parent))
-
-    # Consume braces or semicolons from what's left of the line
-    while True:
-      # Match first brace, semicolon, or closed parenthesis.
-      matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
-      if not matched:
-        break
-
-      token = matched.group(1)
-      if token == '{':
-        # If namespace or class hasn't seen a opening brace yet, mark
-        # namespace/class head as complete.  Push a new block onto the
-        # stack otherwise.
-        if not self.SeenOpenBrace():
-          self.stack[-1].seen_open_brace = True
-        elif Match(r'^extern\s*"[^"]*"\s*\{', line):
-          self.stack.append(_ExternCInfo())
-        else:
-          self.stack.append(_BlockInfo(True))
-          if _MATCH_ASM.match(line):
-            self.stack[-1].inline_asm = _BLOCK_ASM
-
-      elif token == ';' or token == ')':
-        # If we haven't seen an opening brace yet, but we already saw
-        # a semicolon, this is probably a forward declaration.  Pop
-        # the stack for these.
-        #
-        # Similarly, if we haven't seen an opening brace yet, but we
-        # already saw a closing parenthesis, then these are probably
-        # function arguments with extra "class" or "struct" keywords.
-        # Also pop these stack for these.
-        if not self.SeenOpenBrace():
-          self.stack.pop()
-      else:  # token == '}'
-        # Perform end of block checks and pop the stack.
-        if self.stack:
-          self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
-          self.stack.pop()
-      line = matched.group(2)
-
-  def InnermostClass(self):
-    """Get class info on the top of the stack.
-
-    Returns:
-      A _ClassInfo object if we are inside a class, or None otherwise.
-    """
-    for i in range(len(self.stack), 0, -1):
-      classinfo = self.stack[i - 1]
-      if isinstance(classinfo, _ClassInfo):
-        return classinfo
-    return None
-
-  def CheckCompletedBlocks(self, filename, error):
-    """Checks that all classes and namespaces have been completely parsed.
-
-    Call this when all lines in a file have been processed.
-    Args:
-      filename: The name of the current file.
-      error: The function to call with any errors found.
-    """
-    # Note: This test can result in false positives if #ifdef constructs
-    # get in the way of brace matching. See the testBuildClass test in
-    # cpplint_unittest.py for an example of this.
-    for obj in self.stack:
-      if isinstance(obj, _ClassInfo):
-        error(filename, obj.starting_linenum, 'build/class', 5,
-              'Failed to find complete declaration of class %s' %
-              obj.name)
-      elif isinstance(obj, _NamespaceInfo):
-        error(filename, obj.starting_linenum, 'build/namespaces', 5,
-              'Failed to find complete declaration of namespace %s' %
-              obj.name)
-
-
-def CheckForNonStandardConstructs(filename, clean_lines, linenum,
-                                  nesting_state, error):
-  r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
-
-  Complain about several constructs which gcc-2 accepts, but which are
-  not standard C++.  Warning about these in lint is one way to ease the
-  transition to new compilers.
-  - put storage class first (e.g. "static const" instead of "const static").
-  - "%lld" instead of %qd" in printf-type functions.
-  - "%1$d" is non-standard in printf-type functions.
-  - "\%" is an undefined character escape sequence.
-  - text after #endif is not allowed.
-  - invalid inner-style forward declaration.
-  - >? and <? operators, and their >?= and <?= cousins.
-
-  Additionally, check for constructor/destructor style violations and reference
-  members, as it is very convenient to do so while checking for
-  gcc-2 compliance.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: A callable to which errors are reported, which takes 4 arguments:
-           filename, line number, error level, and message
-  """
-
-  # Remove comments from the line, but leave in strings for now.
-  line = clean_lines.lines[linenum]
-
-  if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
-    error(filename, linenum, 'runtime/printf_format', 3,
-          '%q in format strings is deprecated.  Use %ll instead.')
-
-  if Search(r'printf\s*\(.*".*%\d+\$', line):
-    error(filename, linenum, 'runtime/printf_format', 2,
-          '%N$ formats are unconventional.  Try rewriting to avoid them.')
-
-  # Remove escaped backslashes before looking for undefined escapes.
-  line = line.replace('\\\\', '')
-
-  if Search(r'("|\').*\\(%|\[|\(|{)', line):
-    error(filename, linenum, 'build/printf_format', 3,
-          '%, [, (, and { are undefined character escapes.  Unescape them.')
-
-  # For the rest, work with both comments and strings removed.
-  line = clean_lines.elided[linenum]
-
-  if Search(r'\b(const|volatile|void|char|short|int|long'
-            r'|float|double|signed|unsigned'
-            r'|schar|u?int8|u?int16|u?int32|u?int64)'
-            r'\s+(register|static|extern|typedef)\b',
-            line):
-    error(filename, linenum, 'build/storage_class', 5,
-          'Storage class (static, extern, typedef, etc) should be first.')
-
-  if Match(r'\s*#\s*endif\s*[^/\s]+', line):
-    error(filename, linenum, 'build/endif_comment', 5,
-          'Uncommented text after #endif is non-standard.  Use a comment.')
-
-  if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
-    error(filename, linenum, 'build/forward_decl', 5,
-          'Inner-style forward declarations are invalid.  Remove this line.')
-
-  if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
-            line):
-    error(filename, linenum, 'build/deprecated', 3,
-          '>? and <? (max and min) operators are non-standard and deprecated.')
-
-  if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
-    # TODO(unknown): Could it be expanded safely to arbitrary references,
-    # without triggering too many false positives? The first
-    # attempt triggered 5 warnings for mostly benign code in the regtest, hence
-    # the restriction.
-    # Here's the original regexp, for the reference:
-    # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
-    # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
-    error(filename, linenum, 'runtime/member_string_references', 2,
-          'const string& members are dangerous. It is much better to use '
-          'alternatives, such as pointers or simple constants.')
-
-  # Everything else in this function operates on class declarations.
-  # Return early if the top of the nesting stack is not a class, or if
-  # the class head is not completed yet.
-  classinfo = nesting_state.InnermostClass()
-  if not classinfo or not classinfo.seen_open_brace:
-    return
-
-  # The class may have been declared with namespace or classname qualifiers.
-  # The constructor and destructor will not have those qualifiers.
-  base_classname = classinfo.name.split('::')[-1]
-
-  # Look for single-argument constructors that aren't marked explicit.
-  # Technically a valid construct, but against style. Also look for
-  # non-single-argument constructors which are also technically valid, but
-  # strongly suggest something is wrong.
-  explicit_constructor_match = Match(
-      r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*'
-      r'\(((?:[^()]|\([^()]*\))*)\)'
-      % re.escape(base_classname),
-      line)
-
-  if explicit_constructor_match:
-    is_marked_explicit = explicit_constructor_match.group(1)
-
-    if not explicit_constructor_match.group(2):
-      constructor_args = []
-    else:
-      constructor_args = explicit_constructor_match.group(2).split(',')
-
-    # collapse arguments so that commas in template parameter lists and function
-    # argument parameter lists don't split arguments in two
-    i = 0
-    while i < len(constructor_args):
-      constructor_arg = constructor_args[i]
-      while (constructor_arg.count('<') > constructor_arg.count('>') or
-             constructor_arg.count('(') > constructor_arg.count(')')):
-        constructor_arg += ',' + constructor_args[i + 1]
-        del constructor_args[i + 1]
-      constructor_args[i] = constructor_arg
-      i += 1
-
-    defaulted_args = [arg for arg in constructor_args if '=' in arg]
-    noarg_constructor = (not constructor_args or  # empty arg list
-                         # 'void' arg specifier
-                         (len(constructor_args) == 1 and
-                          constructor_args[0].strip() == 'void'))
-    onearg_constructor = ((len(constructor_args) == 1 and  # exactly one arg
-                           not noarg_constructor) or
-                          # all but at most one arg defaulted
-                          (len(constructor_args) >= 1 and
-                           not noarg_constructor and
-                           len(defaulted_args) >= len(constructor_args) - 1))
-    initializer_list_constructor = bool(
-        onearg_constructor and
-        Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
-    copy_constructor = bool(
-        onearg_constructor and
-        Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
-              % re.escape(base_classname), constructor_args[0].strip()))
-
-    if (not is_marked_explicit and
-        onearg_constructor and
-        not initializer_list_constructor and
-        not copy_constructor):
-      if defaulted_args:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Constructors callable with one argument '
-              'should be marked explicit.')
-      else:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Single-parameter constructors should be marked explicit.')
-    elif is_marked_explicit and not onearg_constructor:
-      if noarg_constructor:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Zero-parameter constructors should not be marked explicit.')
-      else:
-        error(filename, linenum, 'runtime/explicit', 0,
-              'Constructors that require multiple arguments '
-              'should not be marked explicit.')
-
-
-def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
-  """Checks for the correctness of various spacing around function calls.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Since function calls often occur inside if/for/while/switch
-  # expressions - which have their own, more liberal conventions - we
-  # first see if we should be looking inside such an expression for a
-  # function call, to which we can apply more strict standards.
-  fncall = line    # if there's no control flow construct, look at whole line
-  for pattern in (r'\bif\s*\((.*)\)\s*{',
-                  r'\bfor\s*\((.*)\)\s*{',
-                  r'\bwhile\s*\((.*)\)\s*[{;]',
-                  r'\bswitch\s*\((.*)\)\s*{'):
-    match = Search(pattern, line)
-    if match:
-      fncall = match.group(1)    # look inside the parens for function calls
-      break
-
-  # Except in if/for/while/switch, there should never be space
-  # immediately inside parens (eg "f( 3, 4 )").  We make an exception
-  # for nested parens ( (a+b) + c ).  Likewise, there should never be
-  # a space before a ( when it's a function argument.  I assume it's a
-  # function argument when the char before the whitespace is legal in
-  # a function name (alnum + _) and we're not starting a macro. Also ignore
-  # pointers and references to arrays and functions coz they're too tricky:
-  # we use a very simple way to recognize these:
-  # " (something)(maybe-something)" or
-  # " (something)(maybe-something," or
-  # " (something)[something]"
-  # Note that we assume the contents of [] to be short enough that
-  # they'll never need to wrap.
-  if (  # Ignore control structures.
-      not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
-                 fncall) and
-      # Ignore pointers/references to functions.
-      not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
-      # Ignore pointers/references to arrays.
-      not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
-    if Search(r'\w\s*\(\s(?!\s*\\$)', fncall):      # a ( used for a fn call
-      error(filename, linenum, 'whitespace/parens', 4,
-            'Extra space after ( in function call')
-    elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
-      error(filename, linenum, 'whitespace/parens', 2,
-            'Extra space after (')
-    if (Search(r'\w\s+\(', fncall) and
-        not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
-        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
-        not Search(r'\bcase\s+\(', fncall)):
-      # TODO(unknown): Space after an operator function seem to be a common
-      # error, silence those for now by restricting them to highest verbosity.
-      if Search(r'\boperator_*\b', line):
-        error(filename, linenum, 'whitespace/parens', 0,
-              'Extra space before ( in function call')
-      else:
-        error(filename, linenum, 'whitespace/parens', 4,
-              'Extra space before ( in function call')
-    # If the ) is followed only by a newline or a { + newline, assume it's
-    # part of a control statement (if/while/etc), and don't complain
-    if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
-      # If the closing parenthesis is preceded by only whitespaces,
-      # try to give a more descriptive error message.
-      if Search(r'^\s+\)', fncall):
-        error(filename, linenum, 'whitespace/parens', 2,
-              'Closing ) should be moved to the previous line')
-      else:
-        error(filename, linenum, 'whitespace/parens', 2,
-              'Extra space before )')
-
-
-def IsBlankLine(line):
-  """Returns true if the given line is blank.
-
-  We consider a line to be blank if the line is empty or consists of
-  only white spaces.
-
-  Args:
-    line: A line of a string.
-
-  Returns:
-    True, if the given line is blank.
-  """
-  return not line or line.isspace()
-
-
-def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
-                                 error):
-  is_namespace_indent_item = (
-      len(nesting_state.stack) > 1 and
-      nesting_state.stack[-1].check_namespace_indentation and
-      isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and
-      nesting_state.previous_stack_top == nesting_state.stack[-2])
-
-  if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
-                                     clean_lines.elided, line):
-    CheckItemIndentationInNamespace(filename, clean_lines.elided,
-                                    line, error)
-
-
-def CheckForFunctionLengths(filename, clean_lines, linenum,
-                            function_state, error):
-  """Reports for long function bodies.
-
-  For an overview why this is done, see:
-  http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
-
-  Uses a simplistic algorithm assuming other style guidelines
-  (especially spacing) are followed.
-  Only checks unindented functions, so class members are unchecked.
-  Trivial bodies are unchecked, so constructors with huge initializer lists
-  may be missed.
-  Blank/comment lines are not counted so as to avoid encouraging the removal
-  of vertical space and comments just to get through a lint check.
-  NOLINT *on the last line of a function* disables this check.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    function_state: Current function name and lines in body so far.
-    error: The function to call with any errors found.
-  """
-  lines = clean_lines.lines
-  line = lines[linenum]
-  joined_line = ''
-
-  starting_func = False
-  regexp = r'(\w(\w|::|\*|\&|\s)*)\('  # decls * & space::name( ...
-  match_result = Match(regexp, line)
-  if match_result:
-    # If the name is all caps and underscores, figure it's a macro and
-    # ignore it, unless it's TEST or TEST_F.
-    function_name = match_result.group(1).split()[-1]
-    if function_name == 'TEST' or function_name == 'TEST_F' or (
-        not Match(r'[A-Z_]+$', function_name)):
-      starting_func = True
-
-  if starting_func:
-    body_found = False
-    for start_linenum in xrange(linenum, clean_lines.NumLines()):
-      start_line = lines[start_linenum]
-      joined_line += ' ' + start_line.lstrip()
-      if Search(r'(;|})', start_line):  # Declarations and trivial functions
-        body_found = True
-        break                              # ... ignore
-      elif Search(r'{', start_line):
-        body_found = True
-        function = Search(r'((\w|:)*)\(', line).group(1)
-        if Match(r'TEST', function):    # Handle TEST... macros
-          parameter_regexp = Search(r'(\(.*\))', joined_line)
-          if parameter_regexp:             # Ignore bad syntax
-            function += parameter_regexp.group(1)
-        else:
-          function += '()'
-        function_state.Begin(function)
-        break
-    if not body_found:
-      # No body for the function (or evidence of a non-function) was found.
-      error(filename, linenum, 'readability/fn_size', 5,
-            'Lint failed to find start of function body.')
-  elif Match(r'^\}\s*$', line):  # function end
-    function_state.Check(error, filename, linenum)
-    function_state.End()
-  elif not Match(r'^\s*$', line):
-    function_state.Count()  # Count non-blank/non-comment lines.
-
-
-_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
-
-
-def CheckComment(line, filename, linenum, next_line_start, error):
-  """Checks for common mistakes in comments.
-
-  Args:
-    line: The line in question.
-    filename: The name of the current file.
-    linenum: The number of the line to check.
-    next_line_start: The first non-whitespace column of the next line.
-    error: The function to call with any errors found.
-  """
-  commentpos = line.find('//')
-  if commentpos != -1:
-    # Check if the // may be in quotes.  If so, ignore it
-    # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
-    if (line.count('"', 0, commentpos) -
-        line.count('\\"', 0, commentpos)) % 2 == 0:   # not in quotes
-      # Allow one space for new scopes, two spaces otherwise:
-      if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and
-          ((commentpos >= 1 and
-            line[commentpos-1] not in string.whitespace) or
-           (commentpos >= 2 and
-            line[commentpos-2] not in string.whitespace))):
-        error(filename, linenum, 'whitespace/comments', 2,
-              'At least two spaces is best between code and comments')
-
-      # Checks for common mistakes in TODO comments.
-      comment = line[commentpos:]
-      match = _RE_PATTERN_TODO.match(comment)
-      if match:
-        # One whitespace is correct; zero whitespace is handled elsewhere.
-        leading_whitespace = match.group(1)
-        if len(leading_whitespace) > 1:
-          error(filename, linenum, 'whitespace/todo', 2,
-                'Too many spaces before TODO')
-
-        username = match.group(2)
-        if not username:
-          error(filename, linenum, 'readability/todo', 2,
-                'Missing username in TODO; it should look like '
-                '"// TODO(my_username): Stuff."')
-
-        middle_whitespace = match.group(3)
-        # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison
-        if middle_whitespace != ' ' and middle_whitespace != '':
-          error(filename, linenum, 'whitespace/todo', 2,
-                'TODO(my_username) should be followed by a space')
-
-      # If the comment contains an alphanumeric character, there
-      # should be a space somewhere between it and the // unless
-      # it's a /// or //! Doxygen comment.
-      if (Match(r'//[^ ]*\w', comment) and
-          not Match(r'(///|//\!)(\s+|$)', comment)):
-        error(filename, linenum, 'whitespace/comments', 4,
-              'Should have a space between // and comment')
-
-
-def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
-  """Checks for improper use of DISALLOW* macros.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]  # get rid of comments and strings
-
-  matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
-                   r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
-  if not matched:
-    return
-  if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
-    if nesting_state.stack[-1].access != 'private':
-      error(filename, linenum, 'readability/constructors', 3,
-            '%s must be in the private: section' % matched.group(1))
-
-  else:
-    # Found DISALLOW* macro outside a class declaration, or perhaps it
-    # was used inside a function when it should have been part of the
-    # class declaration.  We could issue a warning here, but it
-    # probably resulted in a compiler error already.
-    pass
-
-
-def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
-  """Checks for the correctness of various spacing issues in the code.
-
-  Things we check for: spaces around operators, spaces after
-  if/for/while/switch, no spaces around parens in function calls, two
-  spaces between code and comment, don't start a block with a blank
-  line, don't end a function with a blank line, don't add a blank line
-  after public/protected/private, don't have too many blank lines in a row.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-
-  # Don't use "elided" lines here, otherwise we can't check commented lines.
-  # Don't want to use "raw" either, because we don't want to check inside C++11
-  # raw strings,
-  raw = clean_lines.lines_without_raw_strings
-  line = raw[linenum]
-
-  # Before nixing comments, check if the line is blank for no good
-  # reason.  This includes the first line after a block is opened, and
-  # blank lines at the end of a function (ie, right before a line like '}'
-  #
-  # Skip all the blank line checks if we are immediately inside a
-  # namespace body.  In other words, don't issue blank line warnings
-  # for this block:
-  #   namespace {
-  #
-  #   }
-  #
-  # A warning about missing end of namespace comments will be issued instead.
-  #
-  # Also skip blank line checks for 'extern "C"' blocks, which are formatted
-  # like namespaces.
-  if (IsBlankLine(line) and
-      not nesting_state.InNamespaceBody() and
-      not nesting_state.InExternC()):
-    elided = clean_lines.elided
-    prev_line = elided[linenum - 1]
-    prevbrace = prev_line.rfind('{')
-    # TODO(unknown): Don't complain if line before blank line, and line after,
-    #                both start with alnums and are indented the same amount.
-    #                This ignores whitespace at the start of a namespace block
-    #                because those are not usually indented.
-    if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1:
-      # OK, we have a blank line at the start of a code block.  Before we
-      # complain, we check if it is an exception to the rule: The previous
-      # non-empty line has the parameters of a function header that are indented
-      # 4 spaces (because they did not fit in a 80 column line when placed on
-      # the same line as the function name).  We also check for the case where
-      # the previous line is indented 6 spaces, which may happen when the
-      # initializers of a constructor do not fit into a 80 column line.
-      exception = False
-      if Match(r' {6}\w', prev_line):  # Initializer list?
-        # We are looking for the opening column of initializer list, which
-        # should be indented 4 spaces to cause 6 space indentation afterwards.
-        search_position = linenum-2
-        while (search_position >= 0
-               and Match(r' {6}\w', elided[search_position])):
-          search_position -= 1
-        exception = (search_position >= 0
-                     and elided[search_position][:5] == '    :')
-      else:
-        # Search for the function arguments or an initializer list.  We use a
-        # simple heuristic here: If the line is indented 4 spaces; and we have a
-        # closing paren, without the opening paren, followed by an opening brace
-        # or colon (for initializer lists) we assume that it is the last line of
-        # a function header.  If we have a colon indented 4 spaces, it is an
-        # initializer list.
-        exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
-                           prev_line)
-                     or Match(r' {4}:', prev_line))
-
-      if not exception:
-        error(filename, linenum, 'whitespace/blank_line', 2,
-              'Redundant blank line at the start of a code block '
-              'should be deleted.')
-    # Ignore blank lines at the end of a block in a long if-else
-    # chain, like this:
-    #   if (condition1) {
-    #     // Something followed by a blank line
-    #
-    #   } else if (condition2) {
-    #     // Something else
-    #   }
-    if linenum + 1 < clean_lines.NumLines():
-      next_line = raw[linenum + 1]
-      if (next_line
-          and Match(r'\s*}', next_line)
-          and next_line.find('} else ') == -1):
-        error(filename, linenum, 'whitespace/blank_line', 3,
-              'Redundant blank line at the end of a code block '
-              'should be deleted.')
-
-    matched = Match(r'\s*(public|protected|private):', prev_line)
-    if matched:
-      error(filename, linenum, 'whitespace/blank_line', 3,
-            'Do not leave a blank line after "%s:"' % matched.group(1))
-
-  # Next, check comments
-  next_line_start = 0
-  if linenum + 1 < clean_lines.NumLines():
-    next_line = raw[linenum + 1]
-    next_line_start = len(next_line) - len(next_line.lstrip())
-  CheckComment(line, filename, linenum, next_line_start, error)
-
-  # get rid of comments and strings
-  line = clean_lines.elided[linenum]
-
-  # You shouldn't have spaces before your brackets, except maybe after
-  # 'delete []' or 'return []() {};'
-  if Search(r'\w\s+\[', line) and not Search(r'(?:delete|return)\s+\[', line):
-    error(filename, linenum, 'whitespace/braces', 5,
-          'Extra space before [')
-
-  # In range-based for, we wanted spaces before and after the colon, but
-  # not around "::" tokens that might appear.
-  if (Search(r'for *\(.*[^:]:[^: ]', line) or
-      Search(r'for *\(.*[^: ]:[^:]', line)):
-    error(filename, linenum, 'whitespace/forcolon', 2,
-          'Missing space around colon in range-based for loop')
-
-
-def CheckOperatorSpacing(filename, clean_lines, linenum, error):
-  """Checks for horizontal spacing around operators.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Don't try to do spacing checks for operator methods.  Do this by
-  # replacing the troublesome characters with something else,
-  # preserving column position for all other characters.
-  #
-  # The replacement is done repeatedly to avoid false positives from
-  # operators that call operators.
-  while True:
-    match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line)
-    if match:
-      line = match.group(1) + ('_' * len(match.group(2))) + match.group(3)
-    else:
-      break
-
-  # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
-  # Otherwise not.  Note we only check for non-spaces on *both* sides;
-  # sometimes people put non-spaces on one side when aligning ='s among
-  # many lines (not that this is behavior that I approve of...)
-  if ((Search(r'[\w.]=', line) or
-       Search(r'=[\w.]', line))
-      and not Search(r'\b(if|while|for) ', line)
-      # Operators taken from [lex.operators] in C++11 standard.
-      and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
-      and not Search(r'operator=', line)):
-    error(filename, linenum, 'whitespace/operators', 4,
-          'Missing spaces around =')
-
-  # It's ok not to have spaces around binary operators like + - * /, but if
-  # there's too little whitespace, we get concerned.  It's hard to tell,
-  # though, so we punt on this one for now.  TODO.
-
-  # You should always have whitespace around binary operators.
-  #
-  # Check <= and >= first to avoid false positives with < and >, then
-  # check non-include lines for spacing around < and >.
-  #
-  # If the operator is followed by a comma, assume it's be used in a
-  # macro context and don't do any checks.  This avoids false
-  # positives.
-  #
-  # Note that && is not included here.  Those are checked separately
-  # in CheckRValueReference
-  match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line)
-  if match:
-    error(filename, linenum, 'whitespace/operators', 3,
-          'Missing spaces around %s' % match.group(1))
-  elif not Match(r'#.*include', line):
-    # Look for < that is not surrounded by spaces.  This is only
-    # triggered if both sides are missing spaces, even though
-    # technically should should flag if at least one side is missing a
-    # space.  This is done to avoid some false positives with shifts.
-    match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
-    if match:
-      (_, _, end_pos) = CloseExpression(
-          clean_lines, linenum, len(match.group(1)))
-      if end_pos <= -1:
-        error(filename, linenum, 'whitespace/operators', 3,
-              'Missing spaces around <')
-
-    # Look for > that is not surrounded by spaces.  Similar to the
-    # above, we only trigger if both sides are missing spaces to avoid
-    # false positives with shifts.
-    match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
-    if match:
-      (_, _, start_pos) = ReverseCloseExpression(
-          clean_lines, linenum, len(match.group(1)))
-      if start_pos <= -1:
-        error(filename, linenum, 'whitespace/operators', 3,
-              'Missing spaces around >')
-
-  # We allow no-spaces around << when used like this: 10<<20, but
-  # not otherwise (particularly, not when used as streams)
-  #
-  # We also allow operators following an opening parenthesis, since
-  # those tend to be macros that deal with operators.
-  match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])', line)
-  if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
-      not (match.group(1) == 'operator' and match.group(2) == ';')):
-    error(filename, linenum, 'whitespace/operators', 3,
-          'Missing spaces around <<')
-
-  # We allow no-spaces around >> for almost anything.  This is because
-  # C++11 allows ">>" to close nested templates, which accounts for
-  # most cases when ">>" is not followed by a space.
-  #
-  # We still warn on ">>" followed by alpha character, because that is
-  # likely due to ">>" being used for right shifts, e.g.:
-  #   value >> alpha
-  #
-  # When ">>" is used to close templates, the alphanumeric letter that
-  # follows would be part of an identifier, and there should still be
-  # a space separating the template type and the identifier.
-  #   type<type<type>> alpha
-  match = Search(r'>>[a-zA-Z_]', line)
-  if match:
-    error(filename, linenum, 'whitespace/operators', 3,
-          'Missing spaces around >>')
-
-  # There shouldn't be space around unary operators
-  match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
-  if match:
-    error(filename, linenum, 'whitespace/operators', 4,
-          'Extra space for operator %s' % match.group(1))
-
-
-def CheckParenthesisSpacing(filename, clean_lines, linenum, error):
-  """Checks for horizontal spacing around parentheses.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # No spaces after an if, while, switch, or for
-  match = Search(r' (if\(|for\(|while\(|switch\()', line)
-  if match:
-    error(filename, linenum, 'whitespace/parens', 5,
-          'Missing space before ( in %s' % match.group(1))
-
-  # For if/for/while/switch, the left and right parens should be
-  # consistent about how many spaces are inside the parens, and
-  # there should either be zero or one spaces inside the parens.
-  # We don't want: "if ( foo)" or "if ( foo   )".
-  # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
-  match = Search(r'\b(if|for|while|switch)\s*'
-                 r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
-                 line)
-  if match:
-    if len(match.group(2)) != len(match.group(4)):
-      if not (match.group(3) == ';' and
-              len(match.group(2)) == 1 + len(match.group(4)) or
-              not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
-        error(filename, linenum, 'whitespace/parens', 5,
-              'Mismatching spaces inside () in %s' % match.group(1))
-    if len(match.group(2)) not in [0, 1]:
-      error(filename, linenum, 'whitespace/parens', 5,
-            'Should have zero or one spaces inside ( and ) in %s' %
-            match.group(1))
-
-
-def CheckCommaSpacing(filename, clean_lines, linenum, error):
-  """Checks for horizontal spacing near commas and semicolons.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  raw = clean_lines.lines_without_raw_strings
-  line = clean_lines.elided[linenum]
-
-  # You should always have a space after a comma (either as fn arg or operator)
-  #
-  # This does not apply when the non-space character following the
-  # comma is another comma, since the only time when that happens is
-  # for empty macro arguments.
-  #
-  # We run this check in two passes: first pass on elided lines to
-  # verify that lines contain missing whitespaces, second pass on raw
-  # lines to confirm that those missing whitespaces are not due to
-  # elided comments.
-  if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and
-      Search(r',[^,\s]', raw[linenum])):
-    error(filename, linenum, 'whitespace/comma', 3,
-          'Missing space after ,')
-
-  # You should always have a space after a semicolon
-  # except for few corner cases
-  # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
-  # space after ;
-  if Search(r';[^\s};\\)/]', line):
-    error(filename, linenum, 'whitespace/semicolon', 3,
-          'Missing space after ;')
-
-
-def CheckBracesSpacing(filename, clean_lines, linenum, error):
-  """Checks for horizontal spacing near commas.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Except after an opening paren, or after another opening brace (in case of
-  # an initializer list, for instance), you should have spaces before your
-  # braces. And since you should never have braces at the beginning of a line,
-  # this is an easy test.
-  match = Match(r'^(.*[^ ({>]){', line)
-  if match:
-    # Try a bit harder to check for brace initialization.  This
-    # happens in one of the following forms:
-    #   Constructor() : initializer_list_{} { ... }
-    #   Constructor{}.MemberFunction()
-    #   Type variable{};
-    #   FunctionCall(type{}, ...);
-    #   LastArgument(..., type{});
-    #   LOG(INFO) << type{} << " ...";
-    #   map_of_type[{...}] = ...;
-    #   ternary = expr ? new type{} : nullptr;
-    #   OuterTemplate<InnerTemplateConstructor<Type>{}>
-    #
-    # We check for the character following the closing brace, and
-    # silence the warning if it's one of those listed above, i.e.
-    # "{.;,)<>]:".
-    #
-    # To account for nested initializer list, we allow any number of
-    # closing braces up to "{;,)<".  We can't simply silence the
-    # warning on first sight of closing brace, because that would
-    # cause false negatives for things that are not initializer lists.
-    #   Silence this:         But not this:
-    #     Outer{                if (...) {
-    #       Inner{...}            if (...){  // Missing space before {
-    #     };                    }
-    #
-    # There is a false negative with this approach if people inserted
-    # spurious semicolons, e.g. "if (cond){};", but we will catch the
-    # spurious semicolon with a separate check.
-    (endline, endlinenum, endpos) = CloseExpression(
-        clean_lines, linenum, len(match.group(1)))
-    trailing_text = ''
-    if endpos > -1:
-      trailing_text = endline[endpos:]
-    for offset in xrange(endlinenum + 1,
-                         min(endlinenum + 3, clean_lines.NumLines() - 1)):
-      trailing_text += clean_lines.elided[offset]
-    if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text):
-      error(filename, linenum, 'whitespace/braces', 5,
-            'Missing space before {')
-
-  # Make sure '} else {' has spaces.
-  if Search(r'}else', line):
-    error(filename, linenum, 'whitespace/braces', 5,
-          'Missing space before else')
-
-  # You shouldn't have a space before a semicolon at the end of the line.
-  # There's a special case for "for" since the style guide allows space before
-  # the semicolon there.
-  if Search(r':\s*;\s*$', line):
-    error(filename, linenum, 'whitespace/semicolon', 5,
-          'Semicolon defining empty statement. Use {} instead.')
-  elif Search(r'^\s*;\s*$', line):
-    error(filename, linenum, 'whitespace/semicolon', 5,
-          'Line contains only semicolon. If this should be an empty statement, '
-          'use {} instead.')
-  elif (Search(r'\s+;\s*$', line) and
-        not Search(r'\bfor\b', line)):
-    error(filename, linenum, 'whitespace/semicolon', 5,
-          'Extra space before last semicolon. If this should be an empty '
-          'statement, use {} instead.')
-
-
-def IsDecltype(clean_lines, linenum, column):
-  """Check if the token ending on (linenum, column) is decltype().
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: the number of the line to check.
-    column: end column of the token to check.
-  Returns:
-    True if this token is decltype() expression, False otherwise.
-  """
-  (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column)
-  if start_col < 0:
-    return False
-  if Search(r'\bdecltype\s*$', text[0:start_col]):
-    return True
-  return False
-
-
-def IsTemplateParameterList(clean_lines, linenum, column):
-  """Check if the token ending on (linenum, column) is the end of template<>.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: the number of the line to check.
-    column: end column of the token to check.
-  Returns:
-    True if this token is end of a template parameter list, False otherwise.
-  """
-  (_, startline, startpos) = ReverseCloseExpression(
-      clean_lines, linenum, column)
-  if (startpos > -1 and
-      Search(r'\btemplate\s*$', clean_lines.elided[startline][0:startpos])):
-    return True
-  return False
-
-
-def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
-  """Check if the token ending on (linenum, column) is a type.
-
-  Assumes that text to the right of the column is "&&" or a function
-  name.
-
-  Args:
-    typenames: set of type names from template-argument-list.
-    clean_lines: A CleansedLines instance containing the file.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    linenum: the number of the line to check.
-    column: end column of the token to check.
-  Returns:
-    True if this token is a type, False if we are not sure.
-  """
-  prefix = clean_lines.elided[linenum][0:column]
-
-  # Get one word to the left.  If we failed to do so, this is most
-  # likely not a type, since it's unlikely that the type name and "&&"
-  # would be split across multiple lines.
-  match = Match(r'^(.*)(\b\w+|[>*)&])\s*$', prefix)
-  if not match:
-    return False
-
-  # Check text following the token.  If it's "&&>" or "&&," or "&&...", it's
-  # most likely a rvalue reference used inside a template.
-  suffix = clean_lines.elided[linenum][column:]
-  if Match(r'&&\s*(?:[>,]|\.\.\.)', suffix):
-    return True
-
-  # Check for known types and end of templates:
-  #   int&& variable
-  #   vector<int>&& variable
-  #
-  # Because this function is called recursively, we also need to
-  # recognize pointer and reference types:
-  #   int* Function()
-  #   int& Function()
-  if (match.group(2) in typenames or
-      match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
-                         'short', 'int', 'long', 'signed', 'unsigned',
-                         'float', 'double', 'void', 'auto', '>', '*', '&']):
-    return True
-
-  # If we see a close parenthesis, look for decltype on the other side.
-  # decltype would unambiguously identify a type, anything else is
-  # probably a parenthesized expression and not a type.
-  if match.group(2) == ')':
-    return IsDecltype(
-        clean_lines, linenum, len(match.group(1)) + len(match.group(2)) - 1)
-
-  # Check for casts and cv-qualifiers.
-  #   match.group(1)  remainder
-  #   --------------  ---------
-  #   const_cast<     type&&
-  #   const           type&&
-  #   type            const&&
-  if Search(r'\b(?:const_cast\s*<|static_cast\s*<|dynamic_cast\s*<|'
-            r'reinterpret_cast\s*<|\w+\s)\s*$',
-            match.group(1)):
-    return True
-
-  # Look for a preceding symbol that might help differentiate the context.
-  # These are the cases that would be ambiguous:
-  #   match.group(1)  remainder
-  #   --------------  ---------
-  #   Call         (   expression &&
-  #   Declaration  (   type&&
-  #   sizeof       (   type&&
-  #   if           (   expression &&
-  #   while        (   expression &&
-  #   for          (   type&&
-  #   for(         ;   expression &&
-  #   statement    ;   type&&
-  #   block        {   type&&
-  #   constructor  {   expression &&
-  start = linenum
-  line = match.group(1)
-  match_symbol = None
-  while start >= 0:
-    # We want to skip over identifiers and commas to get to a symbol.
-    # Commas are skipped so that we can find the opening parenthesis
-    # for function parameter lists.
-    match_symbol = Match(r'^(.*)([^\w\s,])[\w\s,]*$', line)
-    if match_symbol:
-      break
-    start -= 1
-    line = clean_lines.elided[start]
-
-  if not match_symbol:
-    # Probably the first statement in the file is an rvalue reference
-    return True
-
-  if match_symbol.group(2) == '}':
-    # Found closing brace, probably an indicate of this:
-    #   block{} type&&
-    return True
-
-  if match_symbol.group(2) == ';':
-    # Found semicolon, probably one of these:
-    #   for(; expression &&
-    #   statement; type&&
-
-    # Look for the previous 'for(' in the previous lines.
-    before_text = match_symbol.group(1)
-    for i in xrange(start - 1, max(start - 6, 0), -1):
-      before_text = clean_lines.elided[i] + before_text
-    if Search(r'for\s*\([^{};]*$', before_text):
-      # This is the condition inside a for-loop
-      return False
-
-    # Did not find a for-init-statement before this semicolon, so this
-    # is probably a new statement and not a condition.
-    return True
-
-  if match_symbol.group(2) == '{':
-    # Found opening brace, probably one of these:
-    #   block{ type&& = ... ; }
-    #   constructor{ expression && expression }
-
-    # Look for a closing brace or a semicolon.  If we see a semicolon
-    # first, this is probably a rvalue reference.
-    line = clean_lines.elided[start][0:len(match_symbol.group(1)) + 1]
-    end = start
-    depth = 1
-    while True:
-      for ch in line:
-        if ch == ';':
-          return True
-        elif ch == '{':
-          depth += 1
-        elif ch == '}':
-          depth -= 1
-          if depth == 0:
-            return False
-      end += 1
-      if end >= clean_lines.NumLines():
-        break
-      line = clean_lines.elided[end]
-    # Incomplete program?
-    return False
-
-  if match_symbol.group(2) == '(':
-    # Opening parenthesis.  Need to check what's to the left of the
-    # parenthesis.  Look back one extra line for additional context.
-    before_text = match_symbol.group(1)
-    if linenum > 1:
-      before_text = clean_lines.elided[linenum - 1] + before_text
-    before_text = match_symbol.group(1)
-
-    # Patterns that are likely to be types:
-    #   [](type&&
-    #   for (type&&
-    #   sizeof(type&&
-    #   operator=(type&&
-    #
-    if Search(r'(?:\]|\bfor|\bsizeof|\boperator\s*\S+\s*)\s*$', before_text):
-      return True
-
-    # Patterns that are likely to be expressions:
-    #   if (expression &&
-    #   while (expression &&
-    #   : initializer(expression &&
-    #   , initializer(expression &&
-    #   ( FunctionCall(expression &&
-    #   + FunctionCall(expression &&
-    #   + (expression &&
-    #
-    # The last '+' represents operators such as '+' and '-'.
-    if Search(r'(?:\bif|\bwhile|[-+=%^(<!?:,&*]\s*)$', before_text):
-      return False
-
-    # Something else.  Check that tokens to the left look like
-    #   return_type function_name
-    match_func = Match(r'^(.*\S.*)\s+\w(?:\w|::)*(?:<[^<>]*>)?\s*$',
-                       match_symbol.group(1))
-    if match_func:
-      # Check for constructors, which don't have return types.
-      if Search(r'\b(?:explicit|inline)$', match_func.group(1)):
-        return True
-      implicit_constructor = Match(r'\s*(\w+)\((?:const\s+)?(\w+)', prefix)
-      if (implicit_constructor and
-          implicit_constructor.group(1) == implicit_constructor.group(2)):
-        return True
-      return IsRValueType(typenames, clean_lines, nesting_state, linenum,
-                          len(match_func.group(1)))
-
-    # Nothing before the function name.  If this is inside a block scope,
-    # this is probably a function call.
-    return not (nesting_state.previous_stack_top and
-                nesting_state.previous_stack_top.IsBlockInfo())
-
-  if match_symbol.group(2) == '>':
-    # Possibly a closing bracket, check that what's on the other side
-    # looks like the start of a template.
-    return IsTemplateParameterList(
-        clean_lines, start, len(match_symbol.group(1)))
-
-  # Some other symbol, usually something like "a=b&&c".  This is most
-  # likely not a type.
-  return False
-
-
-def IsDeletedOrDefault(clean_lines, linenum):
-  """Check if current constructor or operator is deleted or default.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-  Returns:
-    True if this is a deleted or default constructor.
-  """
-  open_paren = clean_lines.elided[linenum].find('(')
-  if open_paren < 0:
-    return False
-  (close_line, _, close_paren) = CloseExpression(
-      clean_lines, linenum, open_paren)
-  if close_paren < 0:
-    return False
-  return Match(r'\s*=\s*(?:delete|default)\b', close_line[close_paren:])
-
-
-def IsRValueAllowed(clean_lines, linenum, typenames):
-  """Check if RValue reference is allowed on a particular line.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    typenames: set of type names from template-argument-list.
-  Returns:
-    True if line is within the region where RValue references are allowed.
-  """
-  # Allow region marked by PUSH/POP macros
-  for i in xrange(linenum, 0, -1):
-    line = clean_lines.elided[i]
-    if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
-      if not line.endswith('PUSH'):
-        return False
-      for j in xrange(linenum, clean_lines.NumLines(), 1):
-        line = clean_lines.elided[j]
-        if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
-          return line.endswith('POP')
-
-  # Allow operator=
-  line = clean_lines.elided[linenum]
-  if Search(r'\boperator\s*=\s*\(', line):
-    return IsDeletedOrDefault(clean_lines, linenum)
-
-  # Allow constructors
-  match = Match(r'\s*(?:[\w<>]+::)*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line)
-  if match and match.group(1) == match.group(2):
-    return IsDeletedOrDefault(clean_lines, linenum)
-  if Search(r'\b(?:explicit|inline)\s+[\w<>]+\s*\(', line):
-    return IsDeletedOrDefault(clean_lines, linenum)
-
-  if Match(r'\s*[\w<>]+\s*\(', line):
-    previous_line = 'ReturnType'
-    if linenum > 0:
-      previous_line = clean_lines.elided[linenum - 1]
-    if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$', previous_line):
-      return IsDeletedOrDefault(clean_lines, linenum)
-
-  # Reject types not mentioned in template-argument-list
-  while line:
-    match = Match(r'^.*?(\w+)\s*&&(.*)$', line)
-    if not match:
-      break
-    if match.group(1) not in typenames:
-      return False
-    line = match.group(2)
-
-  # All RValue types that were in template-argument-list should have
-  # been removed by now.  Those were allowed, assuming that they will
-  # be forwarded.
-  #
-  # If there are no remaining RValue types left (i.e. types that were
-  # not found in template-argument-list), flag those as not allowed.
-  return line.find('&&') < 0
-
-
-def GetTemplateArgs(clean_lines, linenum):
-  """Find list of template arguments associated with this function declaration.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: Line number containing the start of the function declaration,
-             usually one line after the end of the template-argument-list.
-  Returns:
-    Set of type names, or empty set if this does not appear to have
-    any template parameters.
-  """
-  # Find start of function
-  func_line = linenum
-  while func_line > 0:
-    line = clean_lines.elided[func_line]
-    if Match(r'^\s*$', line):
-      return set()
-    if line.find('(') >= 0:
-      break
-    func_line -= 1
-  if func_line == 0:
-    return set()
-
-  # Collapse template-argument-list into a single string
-  argument_list = ''
-  match = Match(r'^(\s*template\s*)<', clean_lines.elided[func_line])
-  if match:
-    # template-argument-list on the same line as function name
-    start_col = len(match.group(1))
-    _, end_line, end_col = CloseExpression(clean_lines, func_line, start_col)
-    if end_col > -1 and end_line == func_line:
-      start_col += 1  # Skip the opening bracket
-      argument_list = clean_lines.elided[func_line][start_col:end_col]
-
-  elif func_line > 1:
-    # template-argument-list one line before function name
-    match = Match(r'^(.*)>\s*$', clean_lines.elided[func_line - 1])
-    if match:
-      end_col = len(match.group(1))
-      _, start_line, start_col = ReverseCloseExpression(
-          clean_lines, func_line - 1, end_col)
-      if start_col > -1:
-        start_col += 1  # Skip the opening bracket
-        while start_line < func_line - 1:
-          argument_list += clean_lines.elided[start_line][start_col:]
-          start_col = 0
-          start_line += 1
-        argument_list += clean_lines.elided[func_line - 1][start_col:end_col]
-
-  if not argument_list:
-    return set()
-
-  # Extract type names
-  typenames = set()
-  while True:
-    match = Match(r'^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$',
-                  argument_list)
-    if not match:
-      break
-    typenames.add(match.group(1))
-    argument_list = match.group(2)
-  return typenames
-
-
-def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error):
-  """Check for rvalue references.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-  # Find lines missing spaces around &&.
-  # TODO(unknown): currently we don't check for rvalue references
-  # with spaces surrounding the && to avoid false positives with
-  # boolean expressions.
-  line = clean_lines.elided[linenum]
-  match = Match(r'^(.*\S)&&', line)
-  if not match:
-    match = Match(r'(.*)&&\S', line)
-  if (not match) or '(&&)' in line or Search(r'\boperator\s*$', match.group(1)):
-    return
-
-  # Either poorly formed && or an rvalue reference, check the context
-  # to get a more accurate error message.  Mostly we want to determine
-  # if what's to the left of "&&" is a type or not.
-  typenames = GetTemplateArgs(clean_lines, linenum)
-  and_pos = len(match.group(1))
-  if IsRValueType(typenames, clean_lines, nesting_state, linenum, and_pos):
-    if not IsRValueAllowed(clean_lines, linenum, typenames):
-      error(filename, linenum, 'build/c++11', 3,
-            'RValue references are an unapproved C++ feature.')
-  else:
-    error(filename, linenum, 'whitespace/operators', 3,
-          'Missing spaces around &&')
-
-
-def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
-  """Checks for additional blank line issues related to sections.
-
-  Currently the only thing checked here is blank line before protected/private.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    class_info: A _ClassInfo objects.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  # Skip checks if the class is small, where small means 25 lines or less.
-  # 25 lines seems like a good cutoff since that's the usual height of
-  # terminals, and any class that can't fit in one screen can't really
-  # be considered "small".
-  #
-  # Also skip checks if we are on the first line.  This accounts for
-  # classes that look like
-  #   class Foo { public: ... };
-  #
-  # If we didn't find the end of the class, last_line would be zero,
-  # and the check will be skipped by the first condition.
-  if (class_info.last_line - class_info.starting_linenum <= 24 or
-      linenum <= class_info.starting_linenum):
-    return
-
-  matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
-  if matched:
-    # Issue warning if the line before public/protected/private was
-    # not a blank line, but don't do this if the previous line contains
-    # "class" or "struct".  This can happen two ways:
-    #  - We are at the beginning of the class.
-    #  - We are forward-declaring an inner class that is semantically
-    #    private, but needed to be public for implementation reasons.
-    # Also ignores cases where the previous line ends with a backslash as can be
-    # common when defining classes in C macros.
-    prev_line = clean_lines.lines[linenum - 1]
-    if (not IsBlankLine(prev_line) and
-        not Search(r'\b(class|struct)\b', prev_line) and
-        not Search(r'\\$', prev_line)):
-      # Try a bit harder to find the beginning of the class.  This is to
-      # account for multi-line base-specifier lists, e.g.:
-      #   class Derived
-      #       : public Base {
-      end_class_head = class_info.starting_linenum
-      for i in range(class_info.starting_linenum, linenum):
-        if Search(r'\{\s*$', clean_lines.lines[i]):
-          end_class_head = i
-          break
-      if end_class_head < linenum - 1:
-        error(filename, linenum, 'whitespace/blank_line', 3,
-              '"%s:" should be preceded by a blank line' % matched.group(1))
-
-
-def GetPreviousNonBlankLine(clean_lines, linenum):
-  """Return the most recent non-blank line and its line number.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file contents.
-    linenum: The number of the line to check.
-
-  Returns:
-    A tuple with two elements.  The first element is the contents of the last
-    non-blank line before the current line, or the empty string if this is the
-    first non-blank line.  The second is the line number of that line, or -1
-    if this is the first non-blank line.
-  """
-
-  prevlinenum = linenum - 1
-  while prevlinenum >= 0:
-    prevline = clean_lines.elided[prevlinenum]
-    if not IsBlankLine(prevline):     # if not a blank line...
-      return (prevline, prevlinenum)
-    prevlinenum -= 1
-  return ('', -1)
-
-
-def CheckBraces(filename, clean_lines, linenum, error):
-  """Looks for misplaced braces (e.g. at the end of line).
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-
-  line = clean_lines.elided[linenum]        # get rid of comments and strings
-
-  if Match(r'\s*{\s*$', line):
-    # We allow an open brace to start a line in the case where someone is using
-    # braces in a block to explicitly create a new scope, which is commonly used
-    # to control the lifetime of stack-allocated variables.  Braces are also
-    # used for brace initializers inside function calls.  We don't detect this
-    # perfectly: we just don't complain if the last non-whitespace character on
-    # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the
-    # previous line starts a preprocessor block.
-    prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
-    if (not Search(r'[,;:}{(]\s*$', prevline) and
-        not Match(r'\s*#', prevline)):
-      error(filename, linenum, 'whitespace/braces', 4,
-            '{ should almost always be at the end of the previous line')
-
-  # An else clause should be on the same line as the preceding closing brace.
-  if Match(r'\s*else\b\s*(?:if\b|\{|$)', line):
-    prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
-    if Match(r'\s*}\s*$', prevline):
-      error(filename, linenum, 'whitespace/newline', 4,
-            'An else should appear on the same line as the preceding }')
-
-  # If braces come on one side of an else, they should be on both.
-  # However, we have to worry about "else if" that spans multiple lines!
-  if Search(r'else if\s*\(', line):       # could be multi-line if
-    brace_on_left = bool(Search(r'}\s*else if\s*\(', line))
-    # find the ( after the if
-    pos = line.find('else if')
-    pos = line.find('(', pos)
-    if pos > 0:
-      (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
-      brace_on_right = endline[endpos:].find('{') != -1
-      if brace_on_left != brace_on_right:    # must be brace after if
-        error(filename, linenum, 'readability/braces', 5,
-              'If an else has a brace on one side, it should have it on both')
-  elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
-    error(filename, linenum, 'readability/braces', 5,
-          'If an else has a brace on one side, it should have it on both')
-
-  # Likewise, an else should never have the else clause on the same line
-  if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
-    error(filename, linenum, 'whitespace/newline', 4,
-          'Else clause should never be on same line as else (use 2 lines)')
-
-  # In the same way, a do/while should never be on one line
-  if Match(r'\s*do [^\s{]', line):
-    error(filename, linenum, 'whitespace/newline', 4,
-          'do/while clauses should not be on a single line')
-
-  # Check single-line if/else bodies. The style guide says 'curly braces are not
-  # required for single-line statements'. We additionally allow multi-line,
-  # single statements, but we reject anything with more than one semicolon in
-  # it. This means that the first semicolon after the if should be at the end of
-  # its line, and the line after that should have an indent level equal to or
-  # lower than the if. We also check for ambiguous if/else nesting without
-  # braces.
-  if_else_match = Search(r'\b(if\s*\(|else\b)', line)
-  if if_else_match and not Match(r'\s*#', line):
-    if_indent = GetIndentLevel(line)
-    endline, endlinenum, endpos = line, linenum, if_else_match.end()
-    if_match = Search(r'\bif\s*\(', line)
-    if if_match:
-      # This could be a multiline if condition, so find the end first.
-      pos = if_match.end() - 1
-      (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos)
-    # Check for an opening brace, either directly after the if or on the next
-    # line. If found, this isn't a single-statement conditional.
-    if (not Match(r'\s*{', endline[endpos:])
-        and not (Match(r'\s*$', endline[endpos:])
-                 and endlinenum < (len(clean_lines.elided) - 1)
-                 and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
-      while (endlinenum < len(clean_lines.elided)
-             and ';' not in clean_lines.elided[endlinenum][endpos:]):
-        endlinenum += 1
-        endpos = 0
-      if endlinenum < len(clean_lines.elided):
-        endline = clean_lines.elided[endlinenum]
-        # We allow a mix of whitespace and closing braces (e.g. for one-liner
-        # methods) and a single \ after the semicolon (for macros)
-        endpos = endline.find(';')
-        if not Match(r';[\s}]*(\\?)$', endline[endpos:]):
-          # Semicolon isn't the last character, there's something trailing.
-          # Output a warning if the semicolon is not contained inside
-          # a lambda expression.
-          if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$',
-                       endline):
-            error(filename, linenum, 'readability/braces', 4,
-                  'If/else bodies with multiple statements require braces')
-        elif endlinenum < len(clean_lines.elided) - 1:
-          # Make sure the next line is dedented
-          next_line = clean_lines.elided[endlinenum + 1]
-          next_indent = GetIndentLevel(next_line)
-          # With ambiguous nested if statements, this will error out on the
-          # if that *doesn't* match the else, regardless of whether it's the
-          # inner one or outer one.
-          if (if_match and Match(r'\s*else\b', next_line)
-              and next_indent != if_indent):
-            error(filename, linenum, 'readability/braces', 4,
-                  'Else clause should be indented at the same level as if. '
-                  'Ambiguous nested if/else chains require braces.')
-          elif next_indent > if_indent:
-            error(filename, linenum, 'readability/braces', 4,
-                  'If/else bodies with multiple statements require braces')
-
-
-def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
-  """Looks for redundant trailing semicolon.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-
-  line = clean_lines.elided[linenum]
-
-  # Block bodies should not be followed by a semicolon.  Due to C++11
-  # brace initialization, there are more places where semicolons are
-  # required than not, so we use a whitelist approach to check these
-  # rather than a blacklist.  These are the places where "};" should
-  # be replaced by just "}":
-  # 1. Some flavor of block following closing parenthesis:
-  #    for (;;) {};
-  #    while (...) {};
-  #    switch (...) {};
-  #    Function(...) {};
-  #    if (...) {};
-  #    if (...) else if (...) {};
-  #
-  # 2. else block:
-  #    if (...) else {};
-  #
-  # 3. const member function:
-  #    Function(...) const {};
-  #
-  # 4. Block following some statement:
-  #    x = 42;
-  #    {};
-  #
-  # 5. Block at the beginning of a function:
-  #    Function(...) {
-  #      {};
-  #    }
-  #
-  #    Note that naively checking for the preceding "{" will also match
-  #    braces inside multi-dimensional arrays, but this is fine since
-  #    that expression will not contain semicolons.
-  #
-  # 6. Block following another block:
-  #    while (true) {}
-  #    {};
-  #
-  # 7. End of namespaces:
-  #    namespace {};
-  #
-  #    These semicolons seems far more common than other kinds of
-  #    redundant semicolons, possibly due to people converting classes
-  #    to namespaces.  For now we do not warn for this case.
-  #
-  # Try matching case 1 first.
-  match = Match(r'^(.*\)\s*)\{', line)
-  if match:
-    # Matched closing parenthesis (case 1).  Check the token before the
-    # matching opening parenthesis, and don't warn if it looks like a
-    # macro.  This avoids these false positives:
-    #  - macro that defines a base class
-    #  - multi-line macro that defines a base class
-    #  - macro that defines the whole class-head
-    #
-    # But we still issue warnings for macros that we know are safe to
-    # warn, specifically:
-    #  - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P
-    #  - TYPED_TEST
-    #  - INTERFACE_DEF
-    #  - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:
-    #
-    # We implement a whitelist of safe macros instead of a blacklist of
-    # unsafe macros, even though the latter appears less frequently in
-    # google code and would have been easier to implement.  This is because
-    # the downside for getting the whitelist wrong means some extra
-    # semicolons, while the downside for getting the blacklist wrong
-    # would result in compile errors.
-    #
-    # In addition to macros, we also don't want to warn on
-    #  - Compound literals
-    #  - Lambdas
-    #  - alignas specifier with anonymous structs:
-    closing_brace_pos = match.group(1).rfind(')')
-    opening_parenthesis = ReverseCloseExpression(
-        clean_lines, linenum, closing_brace_pos)
-    if opening_parenthesis[2] > -1:
-      line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
-      macro = Search(r'\b([A-Z_]+)\s*$', line_prefix)
-      func = Match(r'^(.*\])\s*$', line_prefix)
-      if ((macro and
-           macro.group(1) not in (
-               'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
-               'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
-               'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
-          (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
-          Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
-          Search(r'\s+=\s*$', line_prefix)):
-        match = None
-    if (match and
-        opening_parenthesis[1] > 1 and
-        Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
-      # Multi-line lambda-expression
-      match = None
-
-  else:
-    # Try matching cases 2-3.
-    match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line)
-    if not match:
-      # Try matching cases 4-6.  These are always matched on separate lines.
-      #
-      # Note that we can't simply concatenate the previous line to the
-      # current line and do a single match, otherwise we may output
-      # duplicate warnings for the blank line case:
-      #   if (cond) {
-      #     // blank line
-      #   }
-      prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
-      if prevline and Search(r'[;{}]\s*$', prevline):
-        match = Match(r'^(\s*)\{', line)
-
-  # Check matching closing brace
-  if match:
-    (endline, endlinenum, endpos) = CloseExpression(
-        clean_lines, linenum, len(match.group(1)))
-    if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
-      # Current {} pair is eligible for semicolon check, and we have found
-      # the redundant semicolon, output warning here.
-      #
-      # Note: because we are scanning forward for opening braces, and
-      # outputting warnings for the matching closing brace, if there are
-      # nested blocks with trailing semicolons, we will get the error
-      # messages in reversed order.
-      error(filename, endlinenum, 'readability/braces', 4,
-            "You don't need a ; after a }")
-
-
-def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
-  """Look for empty loop/conditional body with only a single semicolon.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-
-  # Search for loop keywords at the beginning of the line.  Because only
-  # whitespaces are allowed before the keywords, this will also ignore most
-  # do-while-loops, since those lines should start with closing brace.
-  #
-  # We also check "if" blocks here, since an empty conditional block
-  # is likely an error.
-  line = clean_lines.elided[linenum]
-  matched = Match(r'\s*(for|while|if)\s*\(', line)
-  if matched:
-    # Find the end of the conditional expression
-    (end_line, end_linenum, end_pos) = CloseExpression(
-        clean_lines, linenum, line.find('('))
-
-    # Output warning if what follows the condition expression is a semicolon.
-    # No warning for all other cases, including whitespace or newline, since we
-    # have a separate check for semicolons preceded by whitespace.
-    if end_pos >= 0 and Match(r';', end_line[end_pos:]):
-      if matched.group(1) == 'if':
-        error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
-              'Empty conditional bodies should use {}')
-      else:
-        error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
-              'Empty loop bodies should use {} or continue')
-
-
-def FindCheckMacro(line):
-  """Find a replaceable CHECK-like macro.
-
-  Args:
-    line: line to search on.
-  Returns:
-    (macro name, start position), or (None, -1) if no replaceable
-    macro is found.
-  """
-  for macro in _CHECK_MACROS:
-    i = line.find(macro)
-    if i >= 0:
-      # Find opening parenthesis.  Do a regular expression match here
-      # to make sure that we are matching the expected CHECK macro, as
-      # opposed to some other macro that happens to contain the CHECK
-      # substring.
-      matched = Match(r'^(.*\b' + macro + r'\s*)\(', line)
-      if not matched:
-        continue
-      return (macro, len(matched.group(1)))
-  return (None, -1)
-
-
-def CheckCheck(filename, clean_lines, linenum, error):
-  """Checks the use of CHECK and EXPECT macros.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-
-  # Decide the set of replacement macros that should be suggested
-  lines = clean_lines.elided
-  (check_macro, start_pos) = FindCheckMacro(lines[linenum])
-  if not check_macro:
-    return
-
-  # Find end of the boolean expression by matching parentheses
-  (last_line, end_line, end_pos) = CloseExpression(
-      clean_lines, linenum, start_pos)
-  if end_pos < 0:
-    return
-
-  # If the check macro is followed by something other than a
-  # semicolon, assume users will log their own custom error messages
-  # and don't suggest any replacements.
-  if not Match(r'\s*;', last_line[end_pos:]):
-    return
-
-  if linenum == end_line:
-    expression = lines[linenum][start_pos + 1:end_pos - 1]
-  else:
-    expression = lines[linenum][start_pos + 1:]
-    for i in xrange(linenum + 1, end_line):
-      expression += lines[i]
-    expression += last_line[0:end_pos - 1]
-
-  # Parse expression so that we can take parentheses into account.
-  # This avoids false positives for inputs like "CHECK((a < 4) == b)",
-  # which is not replaceable by CHECK_LE.
-  lhs = ''
-  rhs = ''
-  operator = None
-  while expression:
-    matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||'
-                    r'==|!=|>=|>|<=|<|\()(.*)$', expression)
-    if matched:
-      token = matched.group(1)
-      if token == '(':
-        # Parenthesized operand
-        expression = matched.group(2)
-        (end, _) = FindEndOfExpressionInLine(expression, 0, ['('])
-        if end < 0:
-          return  # Unmatched parenthesis
-        lhs += '(' + expression[0:end]
-        expression = expression[end:]
-      elif token in ('&&', '||'):
-        # Logical and/or operators.  This means the expression
-        # contains more than one term, for example:
-        #   CHECK(42 < a && a < b);
-        #
-        # These are not replaceable with CHECK_LE, so bail out early.
-        return
-      elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'):
-        # Non-relational operator
-        lhs += token
-        expression = matched.group(2)
-      else:
-        # Relational operator
-        operator = token
-        rhs = matched.group(2)
-        break
-    else:
-      # Unparenthesized operand.  Instead of appending to lhs one character
-      # at a time, we do another regular expression match to consume several
-      # characters at once if possible.  Trivial benchmark shows that this
-      # is more efficient when the operands are longer than a single
-      # character, which is generally the case.
-      matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression)
-      if not matched:
-        matched = Match(r'^(\s*\S)(.*)$', expression)
-        if not matched:
-          break
-      lhs += matched.group(1)
-      expression = matched.group(2)
-
-  # Only apply checks if we got all parts of the boolean expression
-  if not (lhs and operator and rhs):
-    return
-
-  # Check that rhs do not contain logical operators.  We already know
-  # that lhs is fine since the loop above parses out && and ||.
-  if rhs.find('&&') > -1 or rhs.find('||') > -1:
-    return
-
-  # At least one of the operands must be a constant literal.  This is
-  # to avoid suggesting replacements for unprintable things like
-  # CHECK(variable != iterator)
-  #
-  # The following pattern matches decimal, hex integers, strings, and
-  # characters (in that order).
-  lhs = lhs.strip()
-  rhs = rhs.strip()
-  match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$'
-  if Match(match_constant, lhs) or Match(match_constant, rhs):
-    # Note: since we know both lhs and rhs, we can provide a more
-    # descriptive error message like:
-    #   Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42)
-    # Instead of:
-    #   Consider using CHECK_EQ instead of CHECK(a == b)
-    #
-    # We are still keeping the less descriptive message because if lhs
-    # or rhs gets long, the error message might become unreadable.
-    error(filename, linenum, 'readability/check', 2,
-          'Consider using %s instead of %s(a %s b)' % (
-              _CHECK_REPLACEMENT[check_macro][operator],
-              check_macro, operator))
-
-
-def CheckAltTokens(filename, clean_lines, linenum, error):
-  """Check alternative keywords being used in boolean expressions.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Avoid preprocessor lines
-  if Match(r'^\s*#', line):
-    return
-
-  # Last ditch effort to avoid multi-line comments.  This will not help
-  # if the comment started before the current line or ended after the
-  # current line, but it catches most of the false positives.  At least,
-  # it provides a way to workaround this warning for people who use
-  # multi-line comments in preprocessor macros.
-  #
-  # TODO(unknown): remove this once cpplint has better support for
-  # multi-line comments.
-  if line.find('/*') >= 0 or line.find('*/') >= 0:
-    return
-
-  for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
-    error(filename, linenum, 'readability/alt_tokens', 2,
-          'Use operator %s instead of %s' % (
-              _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
-
-
-def GetLineWidth(line):
-  """Determines the width of the line in column positions.
-
-  Args:
-    line: A string, which may be a Unicode string.
-
-  Returns:
-    The width of the line in column positions, accounting for Unicode
-    combining characters and wide characters.
-  """
-  if isinstance(line, unicode):
-    width = 0
-    for uc in unicodedata.normalize('NFC', line):
-      if unicodedata.east_asian_width(uc) in ('W', 'F'):
-        width += 2
-      elif not unicodedata.combining(uc):
-        width += 1
-    return width
-  else:
-    return len(line)
-
-
-def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
-               error):
-  """Checks rules from the 'C++ style rules' section of cppguide.html.
-
-  Most of these rules are hard to test (naming, comment style), but we
-  do what we can.  In particular we check for 2-space indents, line lengths,
-  tab usage, spaces inside code, etc.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    file_extension: The extension (without the dot) of the filename.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-
-  # Don't use "elided" lines here, otherwise we can't check commented lines.
-  # Don't want to use "raw" either, because we don't want to check inside C++11
-  # raw strings,
-  raw_lines = clean_lines.lines_without_raw_strings
-  line = raw_lines[linenum]
-
-  if line.find('\t') != -1:
-    error(filename, linenum, 'whitespace/tab', 1,
-          'Tab found; better to use spaces')
-
-  # One or three blank spaces at the beginning of the line is weird; it's
-  # hard to reconcile that with 2-space indents.
-  # NOTE: here are the conditions rob pike used for his tests.  Mine aren't
-  # as sophisticated, but it may be worth becoming so:  RLENGTH==initial_spaces
-  # if(RLENGTH > 20) complain = 0;
-  # if(match($0, " +(error|private|public|protected):")) complain = 0;
-  # if(match(prev, "&& *$")) complain = 0;
-  # if(match(prev, "\\|\\| *$")) complain = 0;
-  # if(match(prev, "[\",=><] *$")) complain = 0;
-  # if(match($0, " <<")) complain = 0;
-  # if(match(prev, " +for \\(")) complain = 0;
-  # if(prevodd && match(prevprev, " +for \\(")) complain = 0;
-  scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$'
-  classinfo = nesting_state.InnermostClass()
-  initial_spaces = 0
-  cleansed_line = clean_lines.elided[linenum]
-  while initial_spaces < len(line) and line[initial_spaces] == ' ':
-    initial_spaces += 1
-  if line and line[-1].isspace():
-    error(filename, linenum, 'whitespace/end_of_line', 4,
-          'Line ends in whitespace.  Consider deleting these extra spaces.')
-  # There are certain situations we allow one space, notably for
-  # section labels, and also lines containing multi-line raw strings.
-  elif ((initial_spaces == 1 or initial_spaces == 3) and
-        not Match(scope_or_label_pattern, cleansed_line) and
-        not (clean_lines.raw_lines[linenum] != line and
-             Match(r'^\s*""', line))):
-    error(filename, linenum, 'whitespace/indent', 3,
-          'Weird number of spaces at line-start.  '
-          'Are you using a 2-space indent?')
-
-  # Check if the line is a header guard.
-  is_header_guard = False
-  if file_extension == 'h':
-    cppvar = GetHeaderGuardCPPVariable(filename)
-    if (line.startswith('#ifndef %s' % cppvar) or
-        line.startswith('#define %s' % cppvar) or
-        line.startswith('#endif  // %s' % cppvar)):
-      is_header_guard = True
-  # #include lines and header guards can be long, since there's no clean way to
-  # split them.
-  #
-  # URLs can be long too.  It's possible to split these, but it makes them
-  # harder to cut&paste.
-  #
-  # The "$Id:...$" comment may also get very long without it being the
-  # developers fault.
-  if (not line.startswith('#include') and not is_header_guard and
-      not Match(r'^\s*//.*http(s?)://\S*$', line) and
-      not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
-    line_width = GetLineWidth(line)
-    extended_length = int((_line_length * 1.25))
-    if line_width > extended_length:
-      error(filename, linenum, 'whitespace/line_length', 4,
-            'Lines should very rarely be longer than %i characters' %
-            extended_length)
-    elif line_width > _line_length:
-      error(filename, linenum, 'whitespace/line_length', 2,
-            'Lines should be <= %i characters long' % _line_length)
-
-  if (cleansed_line.count(';') > 1 and
-      # for loops are allowed two ;'s (and may run over two lines).
-      cleansed_line.find('for') == -1 and
-      (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
-       GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
-      # It's ok to have many commands in a switch case that fits in 1 line
-      not ((cleansed_line.find('case ') != -1 or
-            cleansed_line.find('default:') != -1) and
-           cleansed_line.find('break;') != -1)):
-    error(filename, linenum, 'whitespace/newline', 0,
-          'More than one command on the same line')
-
-  # Some more style checks
-  CheckBraces(filename, clean_lines, linenum, error)
-  CheckTrailingSemicolon(filename, clean_lines, linenum, error)
-  CheckEmptyBlockBody(filename, clean_lines, linenum, error)
-  CheckAccess(filename, clean_lines, linenum, nesting_state, error)
-  CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
-  CheckOperatorSpacing(filename, clean_lines, linenum, error)
-  CheckParenthesisSpacing(filename, clean_lines, linenum, error)
-  CheckCommaSpacing(filename, clean_lines, linenum, error)
-  CheckBracesSpacing(filename, clean_lines, linenum, error)
-  CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
-  CheckRValueReference(filename, clean_lines, linenum, nesting_state, error)
-  CheckCheck(filename, clean_lines, linenum, error)
-  CheckAltTokens(filename, clean_lines, linenum, error)
-  classinfo = nesting_state.InnermostClass()
-  if classinfo:
-    CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
-
-
-_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
-# Matches the first component of a filename delimited by -s and _s. That is:
-#  _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
-#  _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
-#  _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
-#  _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
-_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
-
-
-def _DropCommonSuffixes(filename):
-  """Drops common suffixes like _test.cc or -inl.h from filename.
-
-  For example:
-    >>> _DropCommonSuffixes('foo/foo-inl.h')
-    'foo/foo'
-    >>> _DropCommonSuffixes('foo/bar/foo.cc')
-    'foo/bar/foo'
-    >>> _DropCommonSuffixes('foo/foo_internal.h')
-    'foo/foo'
-    >>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
-    'foo/foo_unusualinternal'
-
-  Args:
-    filename: The input filename.
-
-  Returns:
-    The filename with the common suffix removed.
-  """
-  for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
-                 'inl.h', 'impl.h', 'internal.h'):
-    if (filename.endswith(suffix) and len(filename) > len(suffix) and
-        filename[-len(suffix) - 1] in ('-', '_')):
-      return filename[:-len(suffix) - 1]
-  return os.path.splitext(filename)[0]
-
-
-def _IsTestFilename(filename):
-  """Determines if the given filename has a suffix that identifies it as a test.
-
-  Args:
-    filename: The input filename.
-
-  Returns:
-    True if 'filename' looks like a test, False otherwise.
-  """
-  if (filename.endswith('_test.cc') or
-      filename.endswith('_unittest.cc') or
-      filename.endswith('_regtest.cc')):
-    return True
-  else:
-    return False
-
-
-def _ClassifyInclude(fileinfo, include, is_system):
-  """Figures out what kind of header 'include' is.
-
-  Args:
-    fileinfo: The current file cpplint is running over. A FileInfo instance.
-    include: The path to a #included file.
-    is_system: True if the #include used <> rather than "".
-
-  Returns:
-    One of the _XXX_HEADER constants.
-
-  For example:
-    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
-    _C_SYS_HEADER
-    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
-    _CPP_SYS_HEADER
-    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
-    _LIKELY_MY_HEADER
-    >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
-    ...                  'bar/foo_other_ext.h', False)
-    _POSSIBLE_MY_HEADER
-    >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
-    _OTHER_HEADER
-  """
-  # This is a list of all standard c++ header files, except
-  # those already checked for above.
-  is_cpp_h = include in _CPP_HEADERS
-
-  if is_system:
-    if is_cpp_h:
-      return _CPP_SYS_HEADER
-    else:
-      return _C_SYS_HEADER
-
-  # If the target file and the include we're checking share a
-  # basename when we drop common extensions, and the include
-  # lives in . , then it's likely to be owned by the target file.
-  target_dir, target_base = (
-      os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
-  include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
-  if target_base == include_base and (
-      include_dir == target_dir or
-      include_dir == os.path.normpath(target_dir + '/../public')):
-    return _LIKELY_MY_HEADER
-
-  # If the target and include share some initial basename
-  # component, it's possible the target is implementing the
-  # include, so it's allowed to be first, but we'll never
-  # complain if it's not there.
-  target_first_component = _RE_FIRST_COMPONENT.match(target_base)
-  include_first_component = _RE_FIRST_COMPONENT.match(include_base)
-  if (target_first_component and include_first_component and
-      target_first_component.group(0) ==
-      include_first_component.group(0)):
-    return _POSSIBLE_MY_HEADER
-
-  return _OTHER_HEADER
-
-
-
-def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
-  """Check rules that are applicable to #include lines.
-
-  Strings on #include lines are NOT removed from elided line, to make
-  certain tasks easier. However, to prevent false positives, checks
-  applicable to #include lines in CheckLanguage must be put here.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    include_state: An _IncludeState instance in which the headers are inserted.
-    error: The function to call with any errors found.
-  """
-  fileinfo = FileInfo(filename)
-  line = clean_lines.lines[linenum]
-
-  # "include" should use the new style "foo/bar.h" instead of just "bar.h"
-  # Only do this check if the included header follows google naming
-  # conventions.  If not, assume that it's a 3rd party API that
-  # requires special include conventions.
-  #
-  # We also make an exception for Lua headers, which follow google
-  # naming convention but not the include convention.
-  match = Match(r'#include\s*"([^/]+\.h)"', line)
-  if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)):
-    error(filename, linenum, 'build/include', 4,
-          'Include the directory when naming .h files')
-
-  # we shouldn't include a file more than once. actually, there are a
-  # handful of instances where doing so is okay, but in general it's
-  # not.
-  match = _RE_PATTERN_INCLUDE.search(line)
-  if match:
-    include = match.group(2)
-    is_system = (match.group(1) == '<')
-    duplicate_line = include_state.FindHeader(include)
-    if duplicate_line >= 0:
-      error(filename, linenum, 'build/include', 4,
-            '"%s" already included at %s:%s' %
-            (include, filename, duplicate_line))
-    elif (include.endswith('.cc') and
-          os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
-      error(filename, linenum, 'build/include', 4,
-            'Do not include .cc files from other packages')
-    elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
-      include_state.include_list[-1].append((include, linenum))
-
-      # We want to ensure that headers appear in the right order:
-      # 1) for foo.cc, foo.h  (preferred location)
-      # 2) c system files
-      # 3) cpp system files
-      # 4) for foo.cc, foo.h  (deprecated location)
-      # 5) other google headers
-      #
-      # We classify each include statement as one of those 5 types
-      # using a number of techniques. The include_state object keeps
-      # track of the highest type seen, and complains if we see a
-      # lower type after that.
-      error_message = include_state.CheckNextIncludeOrder(
-          _ClassifyInclude(fileinfo, include, is_system))
-      if error_message:
-        error(filename, linenum, 'build/include_order', 4,
-              '%s. Should be: %s.h, c system, c++ system, other.' %
-              (error_message, fileinfo.BaseName()))
-      canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
-      if not include_state.IsInAlphabeticalOrder(
-          clean_lines, linenum, canonical_include):
-        error(filename, linenum, 'build/include_alpha', 4,
-              'Include "%s" not in alphabetical order' % include)
-      include_state.SetLastHeader(canonical_include)
-
-
-
-def _GetTextInside(text, start_pattern):
-  r"""Retrieves all the text between matching open and close parentheses.
-
-  Given a string of lines and a regular expression string, retrieve all the text
-  following the expression and between opening punctuation symbols like
-  (, [, or {, and the matching close-punctuation symbol. This properly nested
-  occurrences of the punctuations, so for the text like
-    printf(a(), b(c()));
-  a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
-  start_pattern must match string having an open punctuation symbol at the end.
-
-  Args:
-    text: The lines to extract text. Its comments and strings must be elided.
-           It can be single line and can span multiple lines.
-    start_pattern: The regexp string indicating where to start extracting
-                   the text.
-  Returns:
-    The extracted text.
-    None if either the opening string or ending punctuation could not be found.
-  """
-  # TODO(unknown): Audit cpplint.py to see what places could be profitably
-  # rewritten to use _GetTextInside (and use inferior regexp matching today).
-
-  # Give opening punctuations to get the matching close-punctuations.
-  matching_punctuation = {'(': ')', '{': '}', '[': ']'}
-  closing_punctuation = set(matching_punctuation.itervalues())
-
-  # Find the position to start extracting text.
-  match = re.search(start_pattern, text, re.M)
-  if not match:  # start_pattern not found in text.
-    return None
-  start_position = match.end(0)
-
-  assert start_position > 0, (
-      'start_pattern must ends with an opening punctuation.')
-  assert text[start_position - 1] in matching_punctuation, (
-      'start_pattern must ends with an opening punctuation.')
-  # Stack of closing punctuations we expect to have in text after position.
-  punctuation_stack = [matching_punctuation[text[start_position - 1]]]
-  position = start_position
-  while punctuation_stack and position < len(text):
-    if text[position] == punctuation_stack[-1]:
-      punctuation_stack.pop()
-    elif text[position] in closing_punctuation:
-      # A closing punctuation without matching opening punctuations.
-      return None
-    elif text[position] in matching_punctuation:
-      punctuation_stack.append(matching_punctuation[text[position]])
-    position += 1
-  if punctuation_stack:
-    # Opening punctuations left without matching close-punctuations.
-    return None
-  # punctuations match.
-  return text[start_position:position - 1]
-
-
-# Patterns for matching call-by-reference parameters.
-#
-# Supports nested templates up to 2 levels deep using this messy pattern:
-#   < (?: < (?: < [^<>]*
-#               >
-#           |   [^<>] )*
-#         >
-#     |   [^<>] )*
-#   >
-_RE_PATTERN_IDENT = r'[_a-zA-Z]\w*'  # =~ [[:alpha:]][[:alnum:]]*
-_RE_PATTERN_TYPE = (
-    r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?'
-    r'(?:\w|'
-    r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|'
-    r'::)+')
-# A call-by-reference parameter ends with '& identifier'.
-_RE_PATTERN_REF_PARAM = re.compile(
-    r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*'
-    r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]')
-# A call-by-const-reference parameter either ends with 'const& identifier'
-# or looks like 'const type& identifier' when 'type' is atomic.
-_RE_PATTERN_CONST_REF_PARAM = (
-    r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
-    r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
-
-
-def CheckLanguage(filename, clean_lines, linenum, file_extension,
-                  include_state, nesting_state, error):
-  """Checks rules from the 'C++ language rules' section of cppguide.html.
-
-  Some of these rules are hard to test (function overloading, using
-  uint32 inappropriately), but we do the best we can.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    file_extension: The extension (without the dot) of the filename.
-    include_state: An _IncludeState instance in which the headers are inserted.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-  # If the line is empty or consists of entirely a comment, no need to
-  # check it.
-  line = clean_lines.elided[linenum]
-  if not line:
-    return
-
-  match = _RE_PATTERN_INCLUDE.search(line)
-  if match:
-    CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
-    return
-
-  # Reset include state across preprocessor directives.  This is meant
-  # to silence warnings for conditional includes.
-  match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line)
-  if match:
-    include_state.ResetSection(match.group(1))
-
-  # Make Windows paths like Unix.
-  fullname = os.path.abspath(filename).replace('\\', '/')
-
-  # Perform other checks now that we are sure that this is not an include line
-  CheckCasts(filename, clean_lines, linenum, error)
-  CheckGlobalStatic(filename, clean_lines, linenum, error)
-  CheckPrintf(filename, clean_lines, linenum, error)
-
-  if file_extension == 'h':
-    # TODO(unknown): check that 1-arg constructors are explicit.
-    #                How to tell it's a constructor?
-    #                (handled in CheckForNonStandardConstructs for now)
-    # TODO(unknown): check that classes declare or disable copy/assign
-    #                (level 1 error)
-    pass
-
-  # Check if people are using the verboten C basic types.  The only exception
-  # we regularly allow is "unsigned short port" for port.
-  if Search(r'\bshort port\b', line):
-    if not Search(r'\bunsigned short port\b', line):
-      error(filename, linenum, 'runtime/int', 4,
-            'Use "unsigned short" for ports, not "short"')
-  else:
-    match = Search(r'\b(short|long(?! +double)|long long)\b', line)
-    if match:
-      error(filename, linenum, 'runtime/int', 4,
-            'Use int16/int64/etc, rather than the C type %s' % match.group(1))
-
-  # Check if some verboten operator overloading is going on
-  # TODO(unknown): catch out-of-line unary operator&:
-  #   class X {};
-  #   int operator&(const X& x) { return 42; }  // unary operator&
-  # The trick is it's hard to tell apart from binary operator&:
-  #   class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
-  if Search(r'\boperator\s*&\s*\(\s*\)', line):
-    error(filename, linenum, 'runtime/operator', 4,
-          'Unary operator& is dangerous.  Do not use it.')
-
-  # Check for suspicious usage of "if" like
-  # } if (a == b) {
-  if Search(r'\}\s*if\s*\(', line):
-    error(filename, linenum, 'readability/braces', 4,
-          'Did you mean "else if"? If not, start a new line for "if".')
-
-  # Check for potential format string bugs like printf(foo).
-  # We constrain the pattern not to pick things like DocidForPrintf(foo).
-  # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
-  # TODO(unknown): Catch the following case. Need to change the calling
-  # convention of the whole function to process multiple line to handle it.
-  #   printf(
-  #       boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
-  printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(')
-  if printf_args:
-    match = Match(r'([\w.\->()]+)$', printf_args)
-    if match and match.group(1) != '__VA_ARGS__':
-      function_name = re.search(r'\b((?:string)?printf)\s*\(',
-                                line, re.I).group(1)
-      error(filename, linenum, 'runtime/printf', 4,
-            'Potential format string bug. Do %s("%%s", %s) instead.'
-            % (function_name, match.group(1)))
-
-  # Check for potential memset bugs like memset(buf, sizeof(buf), 0).
-  match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
-  if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
-    error(filename, linenum, 'runtime/memset', 4,
-          'Did you mean "memset(%s, 0, %s)"?'
-          % (match.group(1), match.group(2)))
-
-  if Search(r'\busing namespace\b', line):
-    error(filename, linenum, 'build/namespaces', 5,
-          'Do not use namespace using-directives.  '
-          'Use using-declarations instead.')
-
-  # Detect variable-length arrays.
-  match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
-  if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
-      match.group(3).find(']') == -1):
-    # Split the size using space and arithmetic operators as delimiters.
-    # If any of the resulting tokens are not compile time constants then
-    # report the error.
-    tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
-    is_const = True
-    skip_next = False
-    for tok in tokens:
-      if skip_next:
-        skip_next = False
-        continue
-
-      if Search(r'sizeof\(.+\)', tok): continue
-      if Search(r'arraysize\(\w+\)', tok): continue
-
-      tok = tok.lstrip('(')
-      tok = tok.rstrip(')')
-      if not tok: continue
-      if Match(r'\d+', tok): continue
-      if Match(r'0[xX][0-9a-fA-F]+', tok): continue
-      if Match(r'k[A-Z0-9]\w*', tok): continue
-      if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
-      if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
-      # A catch all for tricky sizeof cases, including 'sizeof expression',
-      # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
-      # requires skipping the next token because we split on ' ' and '*'.
-      if tok.startswith('sizeof'):
-        skip_next = True
-        continue
-      is_const = False
-      break
-    if not is_const:
-      error(filename, linenum, 'runtime/arrays', 1,
-            'Do not use variable-length arrays.  Use an appropriately named '
-            "('k' followed by CamelCase) compile-time constant for the size.")
-
-  # Check for use of unnamed namespaces in header files.  Registration
-  # macros are typically OK, so we allow use of "namespace {" on lines
-  # that end with backslashes.
-  if (file_extension == 'h'
-      and Search(r'\bnamespace\s*{', line)
-      and line[-1] != '\\'):
-    error(filename, linenum, 'build/namespaces', 4,
-          'Do not use unnamed namespaces in header files.  See '
-          'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
-          ' for more information.')
-
-
-def CheckGlobalStatic(filename, clean_lines, linenum, error):
-  """Check for unsafe global or static objects.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Match two lines at a time to support multiline declarations
-  if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line):
-    line += clean_lines.elided[linenum + 1].strip()
-
-  # Check for people declaring static/global STL strings at the top level.
-  # This is dangerous because the C++ language does not guarantee that
-  # globals with constructors are initialized before the first access.
-  match = Match(
-      r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
-      line)
-
-  # Remove false positives:
-  # - String pointers (as opposed to values).
-  #    string *pointer
-  #    const string *pointer
-  #    string const *pointer
-  #    string *const pointer
-  #
-  # - Functions and template specializations.
-  #    string Function<Type>(...
-  #    string Class<Type>::Method(...
-  #
-  # - Operators.  These are matched separately because operator names
-  #   cross non-word boundaries, and trying to match both operators
-  #   and functions at the same time would decrease accuracy of
-  #   matching identifiers.
-  #    string Class::operator*()
-  if (match and
-      not Search(r'\bstring\b(\s+const)?\s*\*\s*(const\s+)?\w', line) and
-      not Search(r'\boperator\W', line) and
-      not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(3))):
-    error(filename, linenum, 'runtime/string', 4,
-          'For a static/global string constant, use a C style string instead: '
-          '"%schar %s[]".' %
-          (match.group(1), match.group(2)))
-
-  if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line):
-    error(filename, linenum, 'runtime/init', 4,
-          'You seem to be initializing a member variable with itself.')
-
-
-def CheckPrintf(filename, clean_lines, linenum, error):
-  """Check for printf related issues.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # When snprintf is used, the second argument shouldn't be a literal.
-  match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
-  if match and match.group(2) != '0':
-    # If 2nd arg is zero, snprintf is used to calculate size.
-    error(filename, linenum, 'runtime/printf', 3,
-          'If you can, use sizeof(%s) instead of %s as the 2nd arg '
-          'to snprintf.' % (match.group(1), match.group(2)))
-
-  # Check if some verboten C functions are being used.
-  if Search(r'\bsprintf\s*\(', line):
-    error(filename, linenum, 'runtime/printf', 5,
-          'Never use sprintf. Use snprintf instead.')
-  match = Search(r'\b(strcpy|strcat)\s*\(', line)
-  if match:
-    error(filename, linenum, 'runtime/printf', 4,
-          'Almost always, snprintf is better than %s' % match.group(1))
-
-
-def IsDerivedFunction(clean_lines, linenum):
-  """Check if current line contains an inherited function.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-  Returns:
-    True if current line contains a function with "override"
-    virt-specifier.
-  """
-  # Scan back a few lines for start of current function
-  for i in xrange(linenum, max(-1, linenum - 10), -1):
-    match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
-    if match:
-      # Look for "override" after the matching closing parenthesis
-      line, _, closing_paren = CloseExpression(
-          clean_lines, i, len(match.group(1)))
-      return (closing_paren >= 0 and
-              Search(r'\boverride\b', line[closing_paren:]))
-  return False
-
-
-def IsOutOfLineMethodDefinition(clean_lines, linenum):
-  """Check if current line contains an out-of-line method definition.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-  Returns:
-    True if current line contains an out-of-line method definition.
-  """
-  # Scan back a few lines for start of current function
-  for i in xrange(linenum, max(-1, linenum - 10), -1):
-    if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
-      return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
-  return False
-
-
-def IsInitializerList(clean_lines, linenum):
-  """Check if current line is inside constructor initializer list.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-  Returns:
-    True if current line appears to be inside constructor initializer
-    list, False otherwise.
-  """
-  for i in xrange(linenum, 1, -1):
-    line = clean_lines.elided[i]
-    if i == linenum:
-      remove_function_body = Match(r'^(.*)\{\s*$', line)
-      if remove_function_body:
-        line = remove_function_body.group(1)
-
-    if Search(r'\s:\s*\w+[({]', line):
-      # A lone colon tend to indicate the start of a constructor
-      # initializer list.  It could also be a ternary operator, which
-      # also tend to appear in constructor initializer lists as
-      # opposed to parameter lists.
-      return True
-    if Search(r'\}\s*,\s*$', line):
-      # A closing brace followed by a comma is probably the end of a
-      # brace-initialized member in constructor initializer list.
-      return True
-    if Search(r'[{};]\s*$', line):
-      # Found one of the following:
-      # - A closing brace or semicolon, probably the end of the previous
-      #   function.
-      # - An opening brace, probably the start of current class or namespace.
-      #
-      # Current line is probably not inside an initializer list since
-      # we saw one of those things without seeing the starting colon.
-      return False
-
-  # Got to the beginning of the file without seeing the start of
-  # constructor initializer list.
-  return False
-
-
-def CheckForNonConstReference(filename, clean_lines, linenum,
-                              nesting_state, error):
-  """Check for non-const references.
-
-  Separate from CheckLanguage since it scans backwards from current
-  line, instead of scanning forward.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: The function to call with any errors found.
-  """
-  # Do nothing if there is no '&' on current line.
-  line = clean_lines.elided[linenum]
-  if '&' not in line:
-    return
-
-  # If a function is inherited, current function doesn't have much of
-  # a choice, so any non-const references should not be blamed on
-  # derived function.
-  if IsDerivedFunction(clean_lines, linenum):
-    return
-
-  # Don't warn on out-of-line method definitions, as we would warn on the
-  # in-line declaration, if it isn't marked with 'override'.
-  if IsOutOfLineMethodDefinition(clean_lines, linenum):
-    return
-
-  # Long type names may be broken across multiple lines, usually in one
-  # of these forms:
-  #   LongType
-  #       ::LongTypeContinued &identifier
-  #   LongType::
-  #       LongTypeContinued &identifier
-  #   LongType<
-  #       ...>::LongTypeContinued &identifier
-  #
-  # If we detected a type split across two lines, join the previous
-  # line to current line so that we can match const references
-  # accordingly.
-  #
-  # Note that this only scans back one line, since scanning back
-  # arbitrary number of lines would be expensive.  If you have a type
-  # that spans more than 2 lines, please use a typedef.
-  if linenum > 1:
-    previous = None
-    if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
-      # previous_line\n + ::current_line
-      previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
-                        clean_lines.elided[linenum - 1])
-    elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
-      # previous_line::\n + current_line
-      previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
-                        clean_lines.elided[linenum - 1])
-    if previous:
-      line = previous.group(1) + line.lstrip()
-    else:
-      # Check for templated parameter that is split across multiple lines
-      endpos = line.rfind('>')
-      if endpos > -1:
-        (_, startline, startpos) = ReverseCloseExpression(
-            clean_lines, linenum, endpos)
-        if startpos > -1 and startline < linenum:
-          # Found the matching < on an earlier line, collect all
-          # pieces up to current line.
-          line = ''
-          for i in xrange(startline, linenum + 1):
-            line += clean_lines.elided[i].strip()
-
-  # Check for non-const references in function parameters.  A single '&' may
-  # found in the following places:
-  #   inside expression: binary & for bitwise AND
-  #   inside expression: unary & for taking the address of something
-  #   inside declarators: reference parameter
-  # We will exclude the first two cases by checking that we are not inside a
-  # function body, including one that was just introduced by a trailing '{'.
-  # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
-  if (nesting_state.previous_stack_top and
-      not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or
-           isinstance(nesting_state.previous_stack_top, _NamespaceInfo))):
-    # Not at toplevel, not within a class, and not within a namespace
-    return
-
-  # Avoid initializer lists.  We only need to scan back from the
-  # current line for something that starts with ':'.
-  #
-  # We don't need to check the current line, since the '&' would
-  # appear inside the second set of parentheses on the current line as
-  # opposed to the first set.
-  if linenum > 0:
-    for i in xrange(linenum - 1, max(0, linenum - 10), -1):
-      previous_line = clean_lines.elided[i]
-      if not Search(r'[),]\s*$', previous_line):
-        break
-      if Match(r'^\s*:\s+\S', previous_line):
-        return
-
-  # Avoid preprocessors
-  if Search(r'\\\s*$', line):
-    return
-
-  # Avoid constructor initializer lists
-  if IsInitializerList(clean_lines, linenum):
-    return
-
-  # We allow non-const references in a few standard places, like functions
-  # called "swap()" or iostream operators like "<<" or ">>".  Do not check
-  # those function parameters.
-  #
-  # We also accept & in static_assert, which looks like a function but
-  # it's actually a declaration expression.
-  whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
-                           r'operator\s*[<>][<>]|'
-                           r'static_assert|COMPILE_ASSERT'
-                           r')\s*\(')
-  if Search(whitelisted_functions, line):
-    return
-  elif not Search(r'\S+\([^)]*$', line):
-    # Don't see a whitelisted function on this line.  Actually we
-    # didn't see any function name on this line, so this is likely a
-    # multi-line parameter list.  Try a bit harder to catch this case.
-    for i in xrange(2):
-      if (linenum > i and
-          Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
-        return
-
-  decls = ReplaceAll(r'{[^}]*}', ' ', line)  # exclude function body
-  for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
-    if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter):
-      error(filename, linenum, 'runtime/references', 2,
-            'Is this a non-const reference? '
-            'If so, make const or use a pointer: ' +
-            ReplaceAll(' *<', '<', parameter))
-
-
-def CheckCasts(filename, clean_lines, linenum, error):
-  """Various cast related checks.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Check to see if they're using an conversion function cast.
-  # I just try to capture the most common basic types, though there are more.
-  # Parameterless conversion functions, such as bool(), are allowed as they are
-  # probably a member operator declaration or default constructor.
-  match = Search(
-      r'(\bnew\s+|\S<\s*(?:const\s+)?)?\b'
-      r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
-      r'(\([^)].*)', line)
-  expecting_function = ExpectingFunctionArgs(clean_lines, linenum)
-  if match and not expecting_function:
-    matched_type = match.group(2)
-
-    # matched_new_or_template is used to silence two false positives:
-    # - New operators
-    # - Template arguments with function types
-    #
-    # For template arguments, we match on types immediately following
-    # an opening bracket without any spaces.  This is a fast way to
-    # silence the common case where the function type is the first
-    # template argument.  False negative with less-than comparison is
-    # avoided because those operators are usually followed by a space.
-    #
-    #   function<double(double)>   // bracket + no space = false positive
-    #   value < double(42)         // bracket + space = true positive
-    matched_new_or_template = match.group(1)
-
-    # Avoid arrays by looking for brackets that come after the closing
-    # parenthesis.
-    if Match(r'\([^()]+\)\s*\[', match.group(3)):
-      return
-
-    # Other things to ignore:
-    # - Function pointers
-    # - Casts to pointer types
-    # - Placement new
-    # - Alias declarations
-    matched_funcptr = match.group(3)
-    if (matched_new_or_template is None and
-        not (matched_funcptr and
-             (Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
-                    matched_funcptr) or
-              matched_funcptr.startswith('(*)'))) and
-        not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and
-        not Search(r'new\(\S+\)\s*' + matched_type, line)):
-      error(filename, linenum, 'readability/casting', 4,
-            'Using deprecated casting style.  '
-            'Use static_cast<%s>(...) instead' %
-            matched_type)
-
-  if not expecting_function:
-    CheckCStyleCast(filename, clean_lines, linenum, 'static_cast',
-                    r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
-
-  # This doesn't catch all cases. Consider (const char * const)"hello".
-  #
-  # (char *) "foo" should always be a const_cast (reinterpret_cast won't
-  # compile).
-  if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast',
-                     r'\((char\s?\*+\s?)\)\s*"', error):
-    pass
-  else:
-    # Check pointer casts for other than string constants
-    CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast',
-                    r'\((\w+\s?\*+\s?)\)', error)
-
-  # In addition, we look for people taking the address of a cast.  This
-  # is dangerous -- casts can assign to temporaries, so the pointer doesn't
-  # point where you think.
-  #
-  # Some non-identifier character is required before the '&' for the
-  # expression to be recognized as a cast.  These are casts:
-  #   expression = &static_cast<int*>(temporary());
-  #   function(&(int*)(temporary()));
-  #
-  # This is not a cast:
-  #   reference_type&(int* function_param);
-  match = Search(
-      r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
-      r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
-  if match:
-    # Try a better error message when the & is bound to something
-    # dereferenced by the casted pointer, as opposed to the casted
-    # pointer itself.
-    parenthesis_error = False
-    match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line)
-    if match:
-      _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1)))
-      if x1 >= 0 and clean_lines.elided[y1][x1] == '(':
-        _, y2, x2 = CloseExpression(clean_lines, y1, x1)
-        if x2 >= 0:
-          extended_line = clean_lines.elided[y2][x2:]
-          if y2 < clean_lines.NumLines() - 1:
-            extended_line += clean_lines.elided[y2 + 1]
-          if Match(r'\s*(?:->|\[)', extended_line):
-            parenthesis_error = True
-
-    if parenthesis_error:
-      error(filename, linenum, 'readability/casting', 4,
-            ('Are you taking an address of something dereferenced '
-             'from a cast?  Wrapping the dereferenced expression in '
-             'parentheses will make the binding more obvious'))
-    else:
-      error(filename, linenum, 'runtime/casting', 4,
-            ('Are you taking an address of a cast?  '
-             'This is dangerous: could be a temp var.  '
-             'Take the address before doing the cast, rather than after'))
-
-
-def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
-  """Checks for a C-style cast by looking for the pattern.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    cast_type: The string for the C++ cast to recommend.  This is either
-      reinterpret_cast, static_cast, or const_cast, depending.
-    pattern: The regular expression used to find C-style casts.
-    error: The function to call with any errors found.
-
-  Returns:
-    True if an error was emitted.
-    False otherwise.
-  """
-  line = clean_lines.elided[linenum]
-  match = Search(pattern, line)
-  if not match:
-    return False
-
-  # Exclude lines with keywords that tend to look like casts
-  context = line[0:match.start(1) - 1]
-  if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context):
-    return False
-
-  # Try expanding current context to see if we one level of
-  # parentheses inside a macro.
-  if linenum > 0:
-    for i in xrange(linenum - 1, max(0, linenum - 5), -1):
-      context = clean_lines.elided[i] + context
-  if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
-    return False
-
-  # operator++(int) and operator--(int)
-  if context.endswith(' operator++') or context.endswith(' operator--'):
-    return False
-
-  # A single unnamed argument for a function tends to look like old
-  # style cast.  If we see those, don't issue warnings for deprecated
-  # casts, instead issue warnings for unnamed arguments where
-  # appropriate.
-  #
-  # These are things that we want warnings for, since the style guide
-  # explicitly require all parameters to be named:
-  #   Function(int);
-  #   Function(int) {
-  #   ConstMember(int) const;
-  #   ConstMember(int) const {
-  #   ExceptionMember(int) throw (...);
-  #   ExceptionMember(int) throw (...) {
-  #   PureVirtual(int) = 0;
-  #   [](int) -> bool {
-  #
-  # These are functions of some sort, where the compiler would be fine
-  # if they had named parameters, but people often omit those
-  # identifiers to reduce clutter:
-  #   (FunctionPointer)(int);
-  #   (FunctionPointer)(int) = value;
-  #   Function((function_pointer_arg)(int))
-  #   Function((function_pointer_arg)(int), int param)
-  #   <TemplateArgument(int)>;
-  #   <(FunctionPointerTemplateArgument)(int)>;
-  remainder = line[match.end(0):]
-  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
-           remainder):
-    # Looks like an unnamed parameter.
-
-    # Don't warn on any kind of template arguments.
-    if Match(r'^\s*>', remainder):
-      return False
-
-    # Don't warn on assignments to function pointers, but keep warnings for
-    # unnamed parameters to pure virtual functions.  Note that this pattern
-    # will also pass on assignments of "0" to function pointers, but the
-    # preferred values for those would be "nullptr" or "NULL".
-    matched_zero = Match(r'^\s=\s*(\S+)\s*;', remainder)
-    if matched_zero and matched_zero.group(1) != '0':
-      return False
-
-    # Don't warn on function pointer declarations.  For this we need
-    # to check what came before the "(type)" string.
-    if Match(r'.*\)\s*$', line[0:match.start(0)]):
-      return False
-
-    # Don't warn if the parameter is named with block comments, e.g.:
-    #  Function(int /*unused_param*/);
-    raw_line = clean_lines.raw_lines[linenum]
-    if '/*' in raw_line:
-      return False
-
-    # Passed all filters, issue warning here.
-    error(filename, linenum, 'readability/function', 3,
-          'All parameters should be named in a function')
-    return True
-
-  # At this point, all that should be left is actual casts.
-  error(filename, linenum, 'readability/casting', 4,
-        'Using C-style cast.  Use %s<%s>(...) instead' %
-        (cast_type, match.group(1)))
-
-  return True
-
-
-def ExpectingFunctionArgs(clean_lines, linenum):
-  """Checks whether where function type arguments are expected.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-
-  Returns:
-    True if the line at 'linenum' is inside something that expects arguments
-    of function types.
-  """
-  line = clean_lines.elided[linenum]
-  return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
-          (linenum >= 2 and
-           (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
-                  clean_lines.elided[linenum - 1]) or
-            Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
-                  clean_lines.elided[linenum - 2]) or
-            Search(r'\bstd::m?function\s*\<\s*$',
-                   clean_lines.elided[linenum - 1]))))
-
-
-_HEADERS_CONTAINING_TEMPLATES = (
-    ('<deque>', ('deque',)),
-    ('<functional>', ('unary_function', 'binary_function',
-                      'plus', 'minus', 'multiplies', 'divides', 'modulus',
-                      'negate',
-                      'equal_to', 'not_equal_to', 'greater', 'less',
-                      'greater_equal', 'less_equal',
-                      'logical_and', 'logical_or', 'logical_not',
-                      'unary_negate', 'not1', 'binary_negate', 'not2',
-                      'bind1st', 'bind2nd',
-                      'pointer_to_unary_function',
-                      'pointer_to_binary_function',
-                      'ptr_fun',
-                      'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
-                      'mem_fun_ref_t',
-                      'const_mem_fun_t', 'const_mem_fun1_t',
-                      'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
-                      'mem_fun_ref',
-                     )),
-    ('<limits>', ('numeric_limits',)),
-    ('<list>', ('list',)),
-    ('<map>', ('map', 'multimap',)),
-    ('<memory>', ('allocator',)),
-    ('<queue>', ('queue', 'priority_queue',)),
-    ('<set>', ('set', 'multiset',)),
-    ('<stack>', ('stack',)),
-    ('<string>', ('char_traits', 'basic_string',)),
-    ('<tuple>', ('tuple',)),
-    ('<utility>', ('pair',)),
-    ('<vector>', ('vector',)),
-
-    # gcc extensions.
-    # Note: std::hash is their hash, ::hash is our hash
-    ('<hash_map>', ('hash_map', 'hash_multimap',)),
-    ('<hash_set>', ('hash_set', 'hash_multiset',)),
-    ('<slist>', ('slist',)),
-    )
-
-_RE_PATTERN_STRING = re.compile(r'\bstring\b')
-
-_re_pattern_algorithm_header = []
-for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap',
-                  'transform'):
-  # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
-  # type::max().
-  _re_pattern_algorithm_header.append(
-      (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
-       _template,
-       '<algorithm>'))
-
-_re_pattern_templates = []
-for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
-  for _template in _templates:
-    _re_pattern_templates.append(
-        (re.compile(r'(\<|\b)' + _template + r'\s*\<'),
-         _template + '<>',
-         _header))
-
-
-def FilesBelongToSameModule(filename_cc, filename_h):
-  """Check if these two filenames belong to the same module.
-
-  The concept of a 'module' here is a as follows:
-  foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
-  same 'module' if they are in the same directory.
-  some/path/public/xyzzy and some/path/internal/xyzzy are also considered
-  to belong to the same module here.
-
-  If the filename_cc contains a longer path than the filename_h, for example,
-  '/absolute/path/to/base/sysinfo.cc', and this file would include
-  'base/sysinfo.h', this function also produces the prefix needed to open the
-  header. This is used by the caller of this function to more robustly open the
-  header file. We don't have access to the real include paths in this context,
-  so we need this guesswork here.
-
-  Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
-  according to this implementation. Because of this, this function gives
-  some false positives. This should be sufficiently rare in practice.
-
-  Args:
-    filename_cc: is the path for the .cc file
-    filename_h: is the path for the header path
-
-  Returns:
-    Tuple with a bool and a string:
-    bool: True if filename_cc and filename_h belong to the same module.
-    string: the additional prefix needed to open the header file.
-  """
-
-  if not filename_cc.endswith('.cc'):
-    return (False, '')
-  filename_cc = filename_cc[:-len('.cc')]
-  if filename_cc.endswith('_unittest'):
-    filename_cc = filename_cc[:-len('_unittest')]
-  elif filename_cc.endswith('_test'):
-    filename_cc = filename_cc[:-len('_test')]
-  filename_cc = filename_cc.replace('/public/', '/')
-  filename_cc = filename_cc.replace('/internal/', '/')
-
-  if not filename_h.endswith('.h'):
-    return (False, '')
-  filename_h = filename_h[:-len('.h')]
-  if filename_h.endswith('-inl'):
-    filename_h = filename_h[:-len('-inl')]
-  filename_h = filename_h.replace('/public/', '/')
-  filename_h = filename_h.replace('/internal/', '/')
-
-  files_belong_to_same_module = filename_cc.endswith(filename_h)
-  common_path = ''
-  if files_belong_to_same_module:
-    common_path = filename_cc[:-len(filename_h)]
-  return files_belong_to_same_module, common_path
-
-
-def UpdateIncludeState(filename, include_dict, io=codecs):
-  """Fill up the include_dict with new includes found from the file.
-
-  Args:
-    filename: the name of the header to read.
-    include_dict: a dictionary in which the headers are inserted.
-    io: The io factory to use to read the file. Provided for testability.
-
-  Returns:
-    True if a header was successfully added. False otherwise.
-  """
-  headerfile = None
-  try:
-    headerfile = io.open(filename, 'r', 'utf8', 'replace')
-  except IOError:
-    return False
-  linenum = 0
-  for line in headerfile:
-    linenum += 1
-    clean_line = CleanseComments(line)
-    match = _RE_PATTERN_INCLUDE.search(clean_line)
-    if match:
-      include = match.group(2)
-      include_dict.setdefault(include, linenum)
-  return True
-
-
-def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
-                              io=codecs):
-  """Reports for missing stl includes.
-
-  This function will output warnings to make sure you are including the headers
-  necessary for the stl containers and functions that you use. We only give one
-  reason to include a header. For example, if you use both equal_to<> and
-  less<> in a .h file, only one (the latter in the file) of these will be
-  reported as a reason to include the <functional>.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    include_state: An _IncludeState instance.
-    error: The function to call with any errors found.
-    io: The IO factory to use to read the header file. Provided for unittest
-        injection.
-  """
-  required = {}  # A map of header name to linenumber and the template entity.
-                 # Example of required: { '<functional>': (1219, 'less<>') }
-
-  for linenum in xrange(clean_lines.NumLines()):
-    line = clean_lines.elided[linenum]
-    if not line or line[0] == '#':
-      continue
-
-    # String is special -- it is a non-templatized type in STL.
-    matched = _RE_PATTERN_STRING.search(line)
-    if matched:
-      # Don't warn about strings in non-STL namespaces:
-      # (We check only the first match per line; good enough.)
-      prefix = line[:matched.start()]
-      if prefix.endswith('std::') or not prefix.endswith('::'):
-        required['<string>'] = (linenum, 'string')
-
-    for pattern, template, header in _re_pattern_algorithm_header:
-      if pattern.search(line):
-        required[header] = (linenum, template)
-
-    # The following function is just a speed up, no semantics are changed.
-    if not '<' in line:  # Reduces the cpu time usage by skipping lines.
-      continue
-
-    for pattern, template, header in _re_pattern_templates:
-      if pattern.search(line):
-        required[header] = (linenum, template)
-
-  # The policy is that if you #include something in foo.h you don't need to
-  # include it again in foo.cc. Here, we will look at possible includes.
-  # Let's flatten the include_state include_list and copy it into a dictionary.
-  include_dict = dict([item for sublist in include_state.include_list
-                       for item in sublist])
-
-  # Did we find the header for this file (if any) and successfully load it?
-  header_found = False
-
-  # Use the absolute path so that matching works properly.
-  abs_filename = FileInfo(filename).FullName()
-
-  # For Emacs's flymake.
-  # If cpplint is invoked from Emacs's flymake, a temporary file is generated
-  # by flymake and that file name might end with '_flymake.cc'. In that case,
-  # restore original file name here so that the corresponding header file can be
-  # found.
-  # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
-  # instead of 'foo_flymake.h'
-  abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
-
-  # include_dict is modified during iteration, so we iterate over a copy of
-  # the keys.
-  header_keys = include_dict.keys()
-  for header in header_keys:
-    (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
-    fullpath = common_path + header
-    if same_module and UpdateIncludeState(fullpath, include_dict, io):
-      header_found = True
-
-  # If we can't find the header file for a .cc, assume it's because we don't
-  # know where to look. In that case we'll give up as we're not sure they
-  # didn't include it in the .h file.
-  # TODO(unknown): Do a better job of finding .h files so we are confident that
-  # not having the .h file means there isn't one.
-  if filename.endswith('.cc') and not header_found:
-    return
-
-  # All the lines have been processed, report the errors found.
-  for required_header_unstripped in required:
-    template = required[required_header_unstripped][1]
-    if required_header_unstripped.strip('<>"') not in include_dict:
-      error(filename, required[required_header_unstripped][0],
-            'build/include_what_you_use', 4,
-            'Add #include ' + required_header_unstripped + ' for ' + template)
-
-
-_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
-
-
-def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
-  """Check that make_pair's template arguments are deduced.
-
-  G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are
-  specified explicitly, and such use isn't intended in any case.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
-  if match:
-    error(filename, linenum, 'build/explicit_make_pair',
-          4,  # 4 = high confidence
-          'For C++11-compatibility, omit template arguments from make_pair'
-          ' OR use pair directly OR if appropriate, construct a pair directly')
-
-
-def CheckDefaultLambdaCaptures(filename, clean_lines, linenum, error):
-  """Check that default lambda captures are not used.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # A lambda introducer specifies a default capture if it starts with "[="
-  # or if it starts with "[&" _not_ followed by an identifier.
-  match = Match(r'^(.*)\[\s*(?:=|&[^\w])', line)
-  if match:
-    # Found a potential error, check what comes after the lambda-introducer.
-    # If it's not open parenthesis (for lambda-declarator) or open brace
-    # (for compound-statement), it's not a lambda.
-    line, _, pos = CloseExpression(clean_lines, linenum, len(match.group(1)))
-    if pos >= 0 and Match(r'^\s*[{(]', line[pos:]):
-      error(filename, linenum, 'build/c++11',
-            4,  # 4 = high confidence
-            'Default lambda captures are an unapproved C++ feature.')
-
-
-def CheckRedundantVirtual(filename, clean_lines, linenum, error):
-  """Check if line contains a redundant "virtual" function-specifier.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  # Look for "virtual" on current line.
-  line = clean_lines.elided[linenum]
-  virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
-  if not virtual: return
-
-  # Ignore "virtual" keywords that are near access-specifiers.  These
-  # are only used in class base-specifier and do not apply to member
-  # functions.
-  if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
-      Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
-    return
-
-  # Ignore the "virtual" keyword from virtual base classes.  Usually
-  # there is a column on the same line in these cases (virtual base
-  # classes are rare in google3 because multiple inheritance is rare).
-  if Match(r'^.*[^:]:[^:].*$', line): return
-
-  # Look for the next opening parenthesis.  This is the start of the
-  # parameter list (possibly on the next line shortly after virtual).
-  # TODO(unknown): doesn't work if there are virtual functions with
-  # decltype() or other things that use parentheses, but csearch suggests
-  # that this is rare.
-  end_col = -1
-  end_line = -1
-  start_col = len(virtual.group(2))
-  for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
-    line = clean_lines.elided[start_line][start_col:]
-    parameter_list = Match(r'^([^(]*)\(', line)
-    if parameter_list:
-      # Match parentheses to find the end of the parameter list
-      (_, end_line, end_col) = CloseExpression(
-          clean_lines, start_line, start_col + len(parameter_list.group(1)))
-      break
-    start_col = 0
-
-  if end_col < 0:
-    return  # Couldn't find end of parameter list, give up
-
-  # Look for "override" or "final" after the parameter list
-  # (possibly on the next few lines).
-  for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
-    line = clean_lines.elided[i][end_col:]
-    match = Search(r'\b(override|final)\b', line)
-    if match:
-      error(filename, linenum, 'readability/inheritance', 4,
-            ('"virtual" is redundant since function is '
-             'already declared as "%s"' % match.group(1)))
-
-    # Set end_col to check whole lines after we are done with the
-    # first line.
-    end_col = 0
-    if Search(r'[^\w]\s*$', line):
-      break
-
-
-def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error):
-  """Check if line contains a redundant "override" or "final" virt-specifier.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  # Look for closing parenthesis nearby.  We need one to confirm where
-  # the declarator ends and where the virt-specifier starts to avoid
-  # false positives.
-  line = clean_lines.elided[linenum]
-  declarator_end = line.rfind(')')
-  if declarator_end >= 0:
-    fragment = line[declarator_end:]
-  else:
-    if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
-      fragment = line
-    else:
-      return
-
-  # Check that at most one of "override" or "final" is present, not both
-  if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
-    error(filename, linenum, 'readability/inheritance', 4,
-          ('"override" is redundant since function is '
-           'already declared as "final"'))
-
-
-
-
-# Returns true if we are at a new block, and it is directly
-# inside of a namespace.
-def IsBlockInNameSpace(nesting_state, is_forward_declaration):
-  """Checks that the new block is directly in a namespace.
-
-  Args:
-    nesting_state: The _NestingState object that contains info about our state.
-    is_forward_declaration: If the class is a forward declared class.
-  Returns:
-    Whether or not the new block is directly in a namespace.
-  """
-  if is_forward_declaration:
-    if len(nesting_state.stack) >= 1 and (
-        isinstance(nesting_state.stack[-1], _NamespaceInfo)):
-      return True
-    else:
-      return False
-
-  return (len(nesting_state.stack) > 1 and
-          nesting_state.stack[-1].check_namespace_indentation and
-          isinstance(nesting_state.stack[-2], _NamespaceInfo))
-
-
-def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
-                                    raw_lines_no_comments, linenum):
-  """This method determines if we should apply our namespace indentation check.
-
-  Args:
-    nesting_state: The current nesting state.
-    is_namespace_indent_item: If we just put a new class on the stack, True.
-      If the top of the stack is not a class, or we did not recently
-      add the class, False.
-    raw_lines_no_comments: The lines without the comments.
-    linenum: The current line number we are processing.
-
-  Returns:
-    True if we should apply our namespace indentation check. Currently, it
-    only works for classes and namespaces inside of a namespace.
-  """
-
-  is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments,
-                                                     linenum)
-
-  if not (is_namespace_indent_item or is_forward_declaration):
-    return False
-
-  # If we are in a macro, we do not want to check the namespace indentation.
-  if IsMacroDefinition(raw_lines_no_comments, linenum):
-    return False
-
-  return IsBlockInNameSpace(nesting_state, is_forward_declaration)
-
-
-# Call this method if the line is directly inside of a namespace.
-# If the line above is blank (excluding comments) or the start of
-# an inner namespace, it cannot be indented.
-def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum,
-                                    error):
-  line = raw_lines_no_comments[linenum]
-  if Match(r'^\s+', line):
-    error(filename, linenum, 'runtime/indentation_namespace', 4,
-          'Do not indent within a namespace')
-
-
-def ProcessLine(filename, file_extension, clean_lines, line,
-                include_state, function_state, nesting_state, error,
-                extra_check_functions=[]):
-  """Processes a single line in the file.
-
-  Args:
-    filename: Filename of the file that is being processed.
-    file_extension: The extension (dot not included) of the file.
-    clean_lines: An array of strings, each representing a line of the file,
-                 with comments stripped.
-    line: Number of line being processed.
-    include_state: An _IncludeState instance in which the headers are inserted.
-    function_state: A _FunctionState instance which counts function lines, etc.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: A callable to which errors are reported, which takes 4 arguments:
-           filename, line number, error level, and message
-    extra_check_functions: An array of additional check functions that will be
-                           run on each source line. Each function takes 4
-                           arguments: filename, clean_lines, line, error
-  """
-  raw_lines = clean_lines.raw_lines
-  ParseNolintSuppressions(filename, raw_lines[line], line, error)
-  nesting_state.Update(filename, clean_lines, line, error)
-  CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
-                               error)
-  if nesting_state.InAsmBlock(): return
-  CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
-  CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
-  CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
-  CheckLanguage(filename, clean_lines, line, file_extension, include_state,
-                nesting_state, error)
-  CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
-  CheckForNonStandardConstructs(filename, clean_lines, line,
-                                nesting_state, error)
-  CheckVlogArguments(filename, clean_lines, line, error)
-  CheckPosixThreading(filename, clean_lines, line, error)
-  CheckInvalidIncrement(filename, clean_lines, line, error)
-  CheckMakePairUsesDeduction(filename, clean_lines, line, error)
-  CheckDefaultLambdaCaptures(filename, clean_lines, line, error)
-  CheckRedundantVirtual(filename, clean_lines, line, error)
-  CheckRedundantOverrideOrFinal(filename, clean_lines, line, error)
-  for check_fn in extra_check_functions:
-    check_fn(filename, clean_lines, line, error)
-
-def FlagCxx11Features(filename, clean_lines, linenum, error):
-  """Flag those c++11 features that we only allow in certain places.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Flag unapproved C++11 headers.
-  include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
-  if include and include.group(1) in ('cfenv',
-                                      'condition_variable',
-                                      'fenv.h',
-                                      'future',
-                                      'mutex',
-                                      'thread',
-                                      'chrono',
-                                      'ratio',
-                                      'regex',
-                                      'system_error',
-                                     ):
-    error(filename, linenum, 'build/c++11', 5,
-          ('<%s> is an unapproved C++11 header.') % include.group(1))
-
-  # The only place where we need to worry about C++11 keywords and library
-  # features in preprocessor directives is in macro definitions.
-  if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return
-
-  # These are classes and free functions.  The classes are always
-  # mentioned as std::*, but we only catch the free functions if
-  # they're not found by ADL.  They're alphabetical by header.
-  for top_name in (
-      # type_traits
-      'alignment_of',
-      'aligned_union',
-      ):
-    if Search(r'\bstd::%s\b' % top_name, line):
-      error(filename, linenum, 'build/c++11', 5,
-            ('std::%s is an unapproved C++11 class or function.  Send c-style '
-             'an example of where it would make your code more readable, and '
-             'they may let you use it.') % top_name)
-
-
-def ProcessFileData(filename, file_extension, lines, error,
-                    extra_check_functions=[]):
-  """Performs lint checks and reports any errors to the given error function.
-
-  Args:
-    filename: Filename of the file that is being processed.
-    file_extension: The extension (dot not included) of the file.
-    lines: An array of strings, each representing a line of the file, with the
-           last element being empty if the file is terminated with a newline.
-    error: A callable to which errors are reported, which takes 4 arguments:
-           filename, line number, error level, and message
-    extra_check_functions: An array of additional check functions that will be
-                           run on each source line. Each function takes 4
-                           arguments: filename, clean_lines, line, error
-  """
-  lines = (['// marker so line numbers and indices both start at 1'] + lines +
-           ['// marker so line numbers end in a known way'])
-
-  include_state = _IncludeState()
-  function_state = _FunctionState()
-  nesting_state = NestingState()
-
-  ResetNolintSuppressions()
-
-  CheckForCopyright(filename, lines, error)
-
-  RemoveMultiLineComments(filename, lines, error)
-  clean_lines = CleansedLines(lines)
-
-  if file_extension == 'h':
-    CheckForHeaderGuard(filename, clean_lines, error)
-
-  for line in xrange(clean_lines.NumLines()):
-    ProcessLine(filename, file_extension, clean_lines, line,
-                include_state, function_state, nesting_state, error,
-                extra_check_functions)
-    FlagCxx11Features(filename, clean_lines, line, error)
-  nesting_state.CheckCompletedBlocks(filename, error)
-
-  CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
-
-  # Check that the .cc file has included its header if it exists.
-  if file_extension == 'cc':
-    CheckHeaderFileIncluded(filename, include_state, error)
-
-  # We check here rather than inside ProcessLine so that we see raw
-  # lines rather than "cleaned" lines.
-  CheckForBadCharacters(filename, lines, error)
-
-  CheckForNewlineAtEOF(filename, lines, error)
-
-def ProcessConfigOverrides(filename):
-  """ Loads the configuration files and processes the config overrides.
-
-  Args:
-    filename: The name of the file being processed by the linter.
-
-  Returns:
-    False if the current |filename| should not be processed further.
-  """
-
-  abs_filename = os.path.abspath(filename)
-  cfg_filters = []
-  keep_looking = True
-  while keep_looking:
-    abs_path, base_name = os.path.split(abs_filename)
-    if not base_name:
-      break  # Reached the root directory.
-
-    cfg_file = os.path.join(abs_path, "CPPLINT.cfg")
-    abs_filename = abs_path
-    if not os.path.isfile(cfg_file):
-      continue
-
-    try:
-      with open(cfg_file) as file_handle:
-        for line in file_handle:
-          line, _, _ = line.partition('#')  # Remove comments.
-          if not line.strip():
-            continue
-
-          name, _, val = line.partition('=')
-          name = name.strip()
-          val = val.strip()
-          if name == 'set noparent':
-            keep_looking = False
-          elif name == 'filter':
-            cfg_filters.append(val)
-          elif name == 'exclude_files':
-            # When matching exclude_files pattern, use the base_name of
-            # the current file name or the directory name we are processing.
-            # For example, if we are checking for lint errors in /foo/bar/baz.cc
-            # and we found the .cfg file at /foo/CPPLINT.cfg, then the config
-            # file's "exclude_files" filter is meant to be checked against "bar"
-            # and not "baz" nor "bar/baz.cc".
-            if base_name:
-              pattern = re.compile(val)
-              if pattern.match(base_name):
-                sys.stderr.write('Ignoring "%s": file excluded by "%s". '
-                                 'File path component "%s" matches '
-                                 'pattern "%s"\n' %
-                                 (filename, cfg_file, base_name, val))
-                return False
-          elif name == 'linelength':
-            global _line_length
-            try:
-                _line_length = int(val)
-            except ValueError:
-                sys.stderr.write('Line length must be numeric.')
-          else:
-            sys.stderr.write(
-                'Invalid configuration option (%s) in file %s\n' %
-                (name, cfg_file))
-
-    except IOError:
-      sys.stderr.write(
-          "Skipping config file '%s': Can't open for reading\n" % cfg_file)
-      keep_looking = False
-
-  # Apply all the accumulated filters in reverse order (top-level directory
-  # config options having the least priority).
-  for filter in reversed(cfg_filters):
-     _AddFilters(filter)
-
-  return True
-
-
-def ProcessFile(filename, vlevel, extra_check_functions=[]):
-  """Does google-lint on a single file.
-
-  Args:
-    filename: The name of the file to parse.
-
-    vlevel: The level of errors to report.  Every error of confidence
-    >= verbose_level will be reported.  0 is a good default.
-
-    extra_check_functions: An array of additional check functions that will be
-                           run on each source line. Each function takes 4
-                           arguments: filename, clean_lines, line, error
-  """
-
-  _SetVerboseLevel(vlevel)
-  _BackupFilters()
-
-  if not ProcessConfigOverrides(filename):
-    _RestoreFilters()
-    return
-
-  lf_lines = []
-  crlf_lines = []
-  try:
-    # Support the UNIX convention of using "-" for stdin.  Note that
-    # we are not opening the file with universal newline support
-    # (which codecs doesn't support anyway), so the resulting lines do
-    # contain trailing '\r' characters if we are reading a file that
-    # has CRLF endings.
-    # If after the split a trailing '\r' is present, it is removed
-    # below.
-    if filename == '-':
-      lines = codecs.StreamReaderWriter(sys.stdin,
-                                        codecs.getreader('utf8'),
-                                        codecs.getwriter('utf8'),
-                                        'replace').read().split('\n')
-    else:
-      lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
-
-    # Remove trailing '\r'.
-    # The -1 accounts for the extra trailing blank line we get from split()
-    for linenum in range(len(lines) - 1):
-      if lines[linenum].endswith('\r'):
-        lines[linenum] = lines[linenum].rstrip('\r')
-        crlf_lines.append(linenum + 1)
-      else:
-        lf_lines.append(linenum + 1)
-
-  except IOError:
-    sys.stderr.write(
-        "Skipping input '%s': Can't open for reading\n" % filename)
-    _RestoreFilters()
-    return
-
-  # Note, if no dot is found, this will give the entire filename as the ext.
-  file_extension = filename[filename.rfind('.') + 1:]
-
-  # When reading from stdin, the extension is unknown, so no cpplint tests
-  # should rely on the extension.
-  if filename != '-' and file_extension not in _valid_extensions:
-    sys.stderr.write('Ignoring %s; not a valid file name '
-                     '(%s)\n' % (filename, ', '.join(_valid_extensions)))
-  else:
-    ProcessFileData(filename, file_extension, lines, Error,
-                    extra_check_functions)
-
-    # If end-of-line sequences are a mix of LF and CR-LF, issue
-    # warnings on the lines with CR.
-    #
-    # Don't issue any warnings if all lines are uniformly LF or CR-LF,
-    # since critique can handle these just fine, and the style guide
-    # doesn't dictate a particular end of line sequence.
-    #
-    # We can't depend on os.linesep to determine what the desired
-    # end-of-line sequence should be, since that will return the
-    # server-side end-of-line sequence.
-    if lf_lines and crlf_lines:
-      # Warn on every line with CR.  An alternative approach might be to
-      # check whether the file is mostly CRLF or just LF, and warn on the
-      # minority, we bias toward LF here since most tools prefer LF.
-      for linenum in crlf_lines:
-        Error(filename, linenum, 'whitespace/newline', 1,
-              'Unexpected \\r (^M) found; better to use only \\n')
-
-  sys.stderr.write('Done processing %s\n' % filename)
-  _RestoreFilters()
-
-
-def PrintUsage(message):
-  """Prints a brief usage string and exits, optionally with an error message.
-
-  Args:
-    message: The optional error message.
-  """
-  sys.stderr.write(_USAGE)
-  if message:
-    sys.exit('\nFATAL ERROR: ' + message)
-  else:
-    sys.exit(1)
-
-
-def PrintCategories():
-  """Prints a list of all the error-categories used by error messages.
-
-  These are the categories used to filter messages via --filter.
-  """
-  sys.stderr.write(''.join('  %s\n' % cat for cat in _ERROR_CATEGORIES))
-  sys.exit(0)
-
-
-def ParseArguments(args):
-  """Parses the command line arguments.
-
-  This may set the output format and verbosity level as side-effects.
-
-  Args:
-    args: The command line arguments:
-
-  Returns:
-    The list of filenames to lint.
-  """
-  try:
-    (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
-                                                 'counting=',
-                                                 'filter=',
-                                                 'root=',
-                                                 'linelength=',
-                                                 'extensions='])
-  except getopt.GetoptError:
-    PrintUsage('Invalid arguments.')
-
-  verbosity = _VerboseLevel()
-  output_format = _OutputFormat()
-  filters = ''
-  counting_style = ''
-
-  for (opt, val) in opts:
-    if opt == '--help':
-      PrintUsage(None)
-    elif opt == '--output':
-      if val not in ('emacs', 'vs7', 'eclipse'):
-        PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.')
-      output_format = val
-    elif opt == '--verbose':
-      verbosity = int(val)
-    elif opt == '--filter':
-      filters = val
-      if not filters:
-        PrintCategories()
-    elif opt == '--counting':
-      if val not in ('total', 'toplevel', 'detailed'):
-        PrintUsage('Valid counting options are total, toplevel, and detailed')
-      counting_style = val
-    elif opt == '--root':
-      global _root
-      _root = val
-    elif opt == '--linelength':
-      global _line_length
-      try:
-          _line_length = int(val)
-      except ValueError:
-          PrintUsage('Line length must be digits.')
-    elif opt == '--extensions':
-      global _valid_extensions
-      try:
-          _valid_extensions = set(val.split(','))
-      except ValueError:
-          PrintUsage('Extensions must be comma seperated list.')
-
-  if not filenames:
-    PrintUsage('No files were specified.')
-
-  _SetOutputFormat(output_format)
-  _SetVerboseLevel(verbosity)
-  _SetFilters(filters)
-  _SetCountingStyle(counting_style)
-
-  return filenames
-
-
-def main():
-  filenames = ParseArguments(sys.argv[1:])
-
-  # Change stderr to write with replacement characters so we don't die
-  # if we try to print something containing non-ASCII characters.
-  sys.stderr = codecs.StreamReaderWriter(sys.stderr,
-                                         codecs.getreader('utf8'),
-                                         codecs.getwriter('utf8'),
-                                         'replace')
-
-  _cpplint_state.ResetErrorCounts()
-  for filename in filenames:
-    ProcessFile(filename, _cpplint_state.verbose_level)
-  _cpplint_state.PrintErrorCounts()
-
-  sys.exit(_cpplint_state.error_count > 0)
-
-
-if __name__ == '__main__':
-  main()
diff --git a/python/rafiki/__init__.py b/tool/debian-python2/postinst
similarity index 89%
copy from python/rafiki/__init__.py
copy to tool/debian-python2/postinst
index 3aa745b..2d63734 100644
--- a/python/rafiki/__init__.py
+++ b/tool/debian-python2/postinst
@@ -1,4 +1,4 @@
-#
+#!/bin/bash
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +16,6 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+pip install /usr/local/lib/singa/python
+rm -r /usr/local/lib/singa
+
diff --git a/tool/debian/postinst b/tool/debian/postinst
index 2d63734..433ca49 100644
--- a/tool/debian/postinst
+++ b/tool/debian/postinst
@@ -16,6 +16,6 @@
 # limitations under the License.
 #
 
-pip install /usr/local/lib/singa/python
+pip3 install /usr/local/lib/singa/python
 rm -r /usr/local/lib/singa
 
diff --git a/tool/docker/README.md b/tool/docker/README.md
index d766cbc..740735e 100644
--- a/tool/docker/README.md
+++ b/tool/docker/README.md
@@ -1,35 +1,76 @@
-# SINGA Docker Images
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
 
-## Availabe images
+      http://www.apache.org/licenses/LICENSE-2.0
 
-| Tag | OS version | devel/runtime | Device|CUDA/CUDNN|
-|:----|:-----------|:--------------|:------|:---------|
-|runtime| Ubuntu16.04|runtime|CPU|-|
-|runtime| Ubuntu16.04|runtime|CPU|-|
-|runtime-cuda| Ubuntu16.04|runtime|GPU|CUDA8.0+CUDNN5|
-|devel| Ubuntu16.04|devel|CPU|-|
-|devel-cuda| Ubuntu16.04|devel|GPU|CUDA8.0+CUDNN5|
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+# Docker Images
 
-## Usage
 
-    docker pull nusdbsystem/singa:<Tag>
-    docker run -it nusdbsystem/singa:<Tag> /bin/bash
+## Available tags
 
-* For the *devel* images, the container has a `incubator-singa` folder in the root directory,
-which has the latest SINGA code. The code has been compiled into `incubator-singa/build` directory and PySINGA has been installed.
-* For the *runtime* images, the container has only installed the PySINGA.
+* `devel`, with SINGA and the development packages installed on Ubuntu16.04 (no GPU)
+* `devel-cuda`, with SINGA, CUDA8.0, CUDNN5, and other development packages installed on Ubuntu16.04
 
-## Tag naming style
+## Use the existing Docker images
 
-    singa:devel|runtime[-OS][-CUDA|OPENCL][-CUDNN]
+Users can pull the Docker images from Dockerhub via
 
-* devel: development images with all dependent libs' header files installed and SINGA's source code;
-* runtime: the minimal images which can run SINGA programs.
-* OS: ubuntu, ubuntu14.04, centos, centos6
-* CUDA: cuda, cuda8.0, cuda7.0
-* CUDNN: cudnn, cudnn5, cudnn4
-* OPENCL: opencl, opencl1.2
+    docker pull apache/singa:1.2.0-cpu-devel-ubuntu18.04 
+    # or
+    docker pull apache/singa:1.2.0-cuda10.0-cudnn7.4.2-devel-ubuntu18.04
+    
+    For more versions see [here](https://hub.docker.com/r/apache/singa/tags)
 
-By default, if the version is not included in the tag, the latest stable version is used.
-The default OS is ubuntu. The version is the latest stable version (e.g., 16.04 for now).
-For -cuda version, the **cudnn** is included by default. Their versions are also the latest stable version, i.e., cuda-8.0 and cudnn-5 for now.
+Run the docker container using
+
+    docker run -it apache/singa:1.2.0-cpu-devel-ubuntu18.04 /bin/bash
+    # or
+    nvidia-docker run -it apache/singa:1.2.0-cuda10.0-cudnn7.4.2-devel-ubuntu18.04 /bin/bash
+
+The latest SINGA code is under the `singa` folder.
+
+***Warning*** The code will be under root/singa for 1.2.0-cpu-devel-ubuntu18.04.
+
+## Create new Docker images from Dockerfile
+
+New Docker images could be created by executing the following command within the
+Dockerfile folder, e.g., tool/docker/devel/
+
+    docker build -t apache/singa:<TAG> -f Dockerfile
+
+The `<TAG>` is named as
+
+    VERSION-devel|runtime[-CUDA|CPU][-CUDNN]
+
+* VERSION: e.g., 3.0.0
+* devel: development images with all dependent libs' header files installed and SINGA's source code; runtime: the minimal images which can run SINGA programs.
+* CUDA: cuda10.0, cuda9.0
+* CUDNN: cudnn7
+
+Here are some example tags:
+
+`devel-cuda9-cudnn7`, `devel-cuda9-cudnn7`, `devel-cuda10-cudnn7`, `devel-cpu`, `runtime-gpu` and `runtime-cpu`
+
+
+Please follow the existing Dockefiles under tool/docker/ to create other Dockefiles.
+The folder structure is like
+
+    level1: devel|runtime
+    level2: Dockerfile, OS
+    level3: Dockerfile, CUDA|MKLDNN
+
+
+For example, the path of the Dockerfile for `devel-cuda9-cudnn7` is `tool/docker/devel/ubuntu/cuda9/Dockerfile`.
\ No newline at end of file
diff --git a/tool/docker/build.sh b/tool/docker/build.sh
index bc4ec40..3f0ab05 100755
--- a/tool/docker/build.sh
+++ b/tool/docker/build.sh
@@ -18,29 +18,19 @@
 # * limitations under the License.
 # */
 
-# build all docker images, must be exected under the root directory, i.e., incubator-singa/
+# build all docker images, must be exected under the root directory, i.e., singa/
 # ./build.sh PUSH would push the images to dockerhub/nusdbsystem and then delete the local image
 #   (used by Jenkins to avoid dangling images from multiple building)
 
 echo "###################"
-echo "build singa:runtime"
+echo "build singa:conda-cudax.y"
 echo "###################"
-docker build tool/docker/runtime/ --force-rm -t nusdbsystem/singa:runtime -t nusdbsystem/singa:latest
+# docker build tool/docker/devel/conda/cuda/ --force-rm -t nusdbsystem/singa:conda-cuda9.0-cudnn7.1.2
 
 echo "###################"
-echo "build singa:runtime-cuda"
+echo "build singa:cudax.y"
 echo "###################"
-docker build tool/docker/runtime/cuda --force-rm -t nusdbsystem/singa:runtime-cuda
-
-echo "###################"
-echo "build singa:devel"
-echo "###################"
-docker build tool/docker/devel/ --force-rm -t nusdbsystem/singa:devel
-
-echo "###################"
-echo "build singa:devel-cuda"
-echo "###################"
-docker build tool/docker/devel/cuda --force-rm -t nusdbsystem/singa:devel-cuda
+docker build tool/docker/devel/native/ubuntu/cuda9 --force-rm -t nusdbsystem/singa:cuda9-cudnn7
 
 if [ $1 = "PUSH" ]; then
   echo "##########################################"
diff --git a/tool/docker/devel/Dockerfile b/tool/docker/devel/Dockerfile
deleted file mode 100644
index c19738d..0000000
--- a/tool/docker/devel/Dockerfile
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 16.04 image
-FROM ubuntu:latest
-
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
-
-# install dependencies
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends build-essential git cmake libprotobuf-dev libopenblas-dev protobuf-compiler python-dev python-pip swig wget\
-    && apt-get clean && apt-get autoremove && apt-get autoclean \
-    && rm -rf /var/lib/apt/lists/* \
-    && pip --no-cache-dir install -U pip numpy setuptools
-
-# set environment
-ENV CPLUS_INCLUDE_PATH /usr/local/lib/python2.7/dist-packages/numpy/core/include:${CPLUS_INCLUDE_PATH}
-
-# download singa source
-RUN git clone https://github.com/apache/incubator-singa.git
-
-# compile singa and install pysinga
-RUN cd incubator-singa && mkdir build && cd build && cmake .. && make && cd python && pip install . && cd ../../ && rm -rf build
diff --git a/tool/docker/devel/centos6/Dockerfile b/tool/docker/devel/centos6/Dockerfile
new file mode 100644
index 0000000..530b9e9
--- /dev/null
+++ b/tool/docker/devel/centos6/Dockerfile
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO(wangwei) install other libs and test. It has bugs now.
+
+# Change tags to build with different cuda/cudnn versions:
+FROM nvidia/cuda:7.5-cudnn5-devel-centos6
+
+# install dependencies
+RUN yum -y update \
+    && yum install -y \
+    git \
+    wget \
+    openssh-server \
+    cmake \
+    && yum clean all \
+    # download singa source
+    # RUN git clone https://github.com/apache/singa.git
+    # config ssh service
+    && mkdir /var/run/sshd \
+    && ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key \
+    && ssh-keygen -t dsa -f /etc/ssh/ssh_host_dsa_key \
+    && sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config \
+    && sed -ri 's/#UsePAM no/UsePAM no/g' /etc/ssh/sshd_config \
+    && echo 'root:singa' | chpasswd \
+    #RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config
+    # SSH login fix. Otherwise user is kicked off after login
+    #RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
+    # dump environment variables into files, so that ssh can see also
+    && env | grep _ >> /etc/environment
+
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/docker/devel/centos6/cuda10/Dockerfile.manylinux2014 b/tool/docker/devel/centos6/cuda10/Dockerfile.manylinux2014
new file mode 100644
index 0000000..d3aeaff
--- /dev/null
+++ b/tool/docker/devel/centos6/cuda10/Dockerfile.manylinux2014
@@ -0,0 +1,132 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The latest tag uses gcc 9, which is too high for nvcc.
+# The following tag uses gcc 8, which works with nvcc.
+FROM quay.io/pypa/manylinux2014_x86_64:2020-05-01-b37d76b
+
+# install dependencies
+RUN yum install -y \
+    protobuf-devel \
+    openblas-devel \
+    # git \
+    wget \
+    openssh-server \
+    pcre-devel \
+    cmake \
+    && yum clean all \
+    && rm -rf /var/cache/yum/*
+
+# install glog into /usr/local/include/glog /usr/local/lib
+RUN wget https://github.com/google/glog/archive/v0.3.5.tar.gz -P /tmp/\
+    && tar zxf /tmp/v0.3.5.tar.gz  -C /tmp/ \
+    && cd /tmp/glog-0.3.5 \
+    && ./configure && make && make install && cd .. && rm -rf glog-0.3.5
+
+# install dnnl into /usr/local/include  /usr/local/lib
+RUN wget https://github.com/intel/mkl-dnn/releases/download/v1.1/dnnl_lnx_1.1.0_cpu_gomp.tgz -P /tmp/ \
+    && tar zxf /tmp/dnnl_lnx_1.1.0_cpu_gomp.tgz  -C /tmp/ \
+    && cp -r -H /tmp/dnnl_lnx_1.1.0_cpu_gomp/lib/lib* /usr/local/lib/ \
+    && cp -r -H /tmp/dnnl_lnx_1.1.0_cpu_gomp/include/* /usr/local/include/  \
+    && rm -rf /tmp/dnnl_lnx_1.1.0_cpu_gomp
+# ENV DNNL_ROOT /root/dnnl_lnx_1.1.0_cpu_gomp/
+
+# install swig into /usr/local/bin
+RUN wget http://prdownloads.sourceforge.net/swig/swig-3.0.12.tar.gz -P /tmp/ \
+    && tar zxf /tmp/swig-3.0.12.tar.gz -C /tmp/ \
+    && cd /tmp/swig-3.0.12 && ./configure && make && make install && cd .. && rm -rf swig-3.0.12
+
+# numpy and python versions should be matched; 
+# twine works for all python versions
+RUN /opt/python/cp36-cp36m/bin/pip install numpy twine
+RUN /opt/python/cp37-cp37m/bin/pip install numpy 
+RUN /opt/python/cp38-cp38/bin/pip install numpy
+
+# install cuda and cudnn
+# Refer to https://gitlab.com/nvidia/container-images/cuda/-/tree/master/dist for other cuda and cudnn versions
+# 10.2-base-centos7
+RUN NVIDIA_GPGKEY_SUM=d1be581509378368edeec8c1eb2958702feedf3bc3d17011adbf24efacce4ab5 && \
+    curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/rhel7/x86_64/7fa2af80.pub | sed '/^Version/d' > /etc/pki/rpm-gpg/RPM-GPG-KEY-NVIDIA && \
+    echo "$NVIDIA_GPGKEY_SUM  /etc/pki/rpm-gpg/RPM-GPG-KEY-NVIDIA" | sha256sum -c --strict  -
+COPY cuda.repo /etc/yum.repos.d/cuda.repo
+ENV CUDA_VERSION 10.2.89
+ENV CUDA_PKG_VERSION 10-2-$CUDA_VERSION-1
+# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
+RUN yum install -y \
+    cuda-cudart-$CUDA_PKG_VERSION \
+    cuda-compat-10-2 \
+    && ln -s cuda-10.2 /usr/local/cuda && \
+    rm -rf /var/cache/yum/*
+
+# nvidia-docker 1.0
+RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
+    echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf
+ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}
+ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64:$LD_LIBRARY_PATH
+
+# nvidia-container-runtime
+ENV NVIDIA_VISIBLE_DEVICES all
+ENV NVIDIA_DRIVER_CAPABILITIES compute,utility
+ENV NVIDIA_REQUIRE_CUDA "cuda>=10.2 brand=tesla,driver>=396,driver<397 brand=tesla,driver>=410,driver<411 brand=tesla,driver>=418,driver<419 brand=tesla,driver>=440,driver<441"
+
+# 10.2-runtime-centos7
+RUN yum install -y \
+    cuda-libraries-$CUDA_PKG_VERSION \
+    cuda-nvtx-$CUDA_PKG_VERSION \
+    libcublas10-10.2.2.89-1 \
+    && rm -rf /var/cache/yum/*
+
+
+# 10.2-devel-centos7
+RUN yum install -y \
+    cuda-nvml-dev-$CUDA_PKG_VERSION \
+    cuda-command-line-tools-$CUDA_PKG_VERSION \
+    cuda-cudart-dev-$CUDA_PKG_VERSION \
+    cuda-libraries-dev-$CUDA_PKG_VERSION \
+    cuda-minimal-build-$CUDA_PKG_VERSION \
+    && rm -rf /var/cache/yum/*
+RUN yum install -y xz && NCCL_DOWNLOAD_SUM=a9ee790c3fc64b0ecbb00db92eddc1525552eda10a8656ff4b7380f66d81bda1 && \
+    curl -fsSL https://developer.download.nvidia.com/compute/redist/nccl/v2.7/nccl_2.7.3-1+cuda10.2_x86_64.txz -O && \
+    echo "$NCCL_DOWNLOAD_SUM  nccl_2.7.3-1+cuda10.2_x86_64.txz" | sha256sum -c - && \
+    unxz nccl_2.7.3-1+cuda10.2_x86_64.txz && \
+    tar --no-same-owner --keep-old-files --no-overwrite-dir -xvf  nccl_2.7.3-1+cuda10.2_x86_64.tar -C /usr/local/cuda/include/ --strip-components=2 --wildcards '*/include/*' && \
+    tar --no-same-owner --keep-old-files --no-overwrite-dir -xvf  nccl_2.7.3-1+cuda10.2_x86_64.tar -C /usr/local/cuda/lib64/ --strip-components=2 --wildcards '*/lib/libnccl.so' && \
+    rm -f nccl_2.7.3-1+cuda10.2_x86_64.tar && \
+    ldconfig
+ENV LIBRARY_PATH /usr/local/cuda/lib64/stubs
+
+# 10.2-cudnn7-devel-centos7
+ENV CUDNN_VERSION 7.6.5.32
+# cuDNN license: https://developer.nvidia.com/cudnn/license_agreement
+RUN CUDNN_DOWNLOAD_SUM=600267f2caaed2fd58eb214ba669d8ea35f396a7d19b94822e6b36f9f7088c20 && \
+    curl -fsSL http://developer.download.nvidia.com/compute/redist/cudnn/v7.6.5/cudnn-10.2-linux-x64-v7.6.5.32.tgz -O && \
+    echo "$CUDNN_DOWNLOAD_SUM  cudnn-10.2-linux-x64-v7.6.5.32.tgz" | sha256sum -c - && \
+    tar --no-same-owner -xzf cudnn-10.2-linux-x64-v7.6.5.32.tgz -C /usr/local && \
+    rm cudnn-10.2-linux-x64-v7.6.5.32.tgz && \
+    ldconfig
+
+# install cnmem to /usr/local/include  /usr/local/lib
+RUN git clone https://github.com/NVIDIA/cnmem.git cnmem \
+    && cd cnmem && mkdir build && cd build && cmake .. && make && make install && cd ../.. && rm -rf cnmem
+
+# install mpich /usr/local/include   /usr/local/lib
+RUN wget http://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz -P $HOME \
+    && cd $HOME \
+    && tar xfz mpich-3.3.2.tar.gz \
+    && cd mpich-3.3.2 \
+    && ./configure --prefix=/usr/local --disable-fortran \
+    && make && make install  && cd .. && rm -rf mpich-3.3.2
\ No newline at end of file
diff --git a/tool/docker/devel/centos6/cuda10/cuda.repo b/tool/docker/devel/centos6/cuda10/cuda.repo
new file mode 100644
index 0000000..990ac25
--- /dev/null
+++ b/tool/docker/devel/centos6/cuda10/cuda.repo
@@ -0,0 +1,6 @@
+[cuda]
+name=cuda
+baseurl=https://developer.download.nvidia.com/compute/cuda/repos/rhel7/x86_64
+enabled=1
+gpgcheck=1
+gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-NVIDIA
\ No newline at end of file
diff --git a/tool/docker/devel/cuda/Dockerfile b/tool/docker/devel/cuda/Dockerfile
deleted file mode 100644
index d4b84b2..0000000
--- a/tool/docker/devel/cuda/Dockerfile
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 16.04 image from nvidia/cuda
-# Change tags to build with different cuda/cudnn versions:
-#   FROM nvidia/cuda:8.0-cudnn5-devel-ubuntu16.04
-FROM nvidia/cuda:8.0-cudnn5-devel-ubuntu16.04
-
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
-
-# install dependencies
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends git g++ cmake libprotobuf-dev libopenblas-dev protobuf-compiler python-dev python-pip swig wget\
-    && apt-get clean && apt-get autoremove && apt-get autoclean \
-    && rm -rf /var/lib/apt/lists/* \
-    && pip --no-cache-dir install -U pip numpy setuptools
-
-# set environment
-ENV CPLUS_INCLUDE_PATH /usr/local/lib/python2.7/dist-packages/numpy/core/include:${CPLUS_INCLUDE_PATH}
-ENV CMAKE_INCLUDE_PATH /usr/local/cuda/include:${CMAKE_INCLUDE_PATH}
-ENV CMAKE_LIBRARY_PATH /usr/local/cuda/lib64:${CMAKE_LIBRARY_PATH}
-
-# download singa source
-RUN git clone https://github.com/apache/incubator-singa.git
-
-RUN cd incubator-singa && mkdir build && cd build && cmake -DUSE_CUDA=ON .. && make && cd python && pip install . && cd ../../ && rm -rf build
diff --git a/tool/docker/devel/ubuntu/cpu/Dockerfile b/tool/docker/devel/ubuntu/cpu/Dockerfile
new file mode 100644
index 0000000..5bf6440
--- /dev/null
+++ b/tool/docker/devel/ubuntu/cpu/Dockerfile
@@ -0,0 +1,74 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+FROM ubuntu:18.04
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+    git \
+    build-essential \
+    autoconf \
+    libtool \
+    libprotobuf-dev \
+    libopenblas-dev \
+    libpcre3-dev \
+    protobuf-compiler \
+    wget \
+    swig \
+    openssh-server \
+    python3-dev \
+    python3-pip \
+    python3-setuptools \
+    libgoogle-glog-dev \
+    cmake \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    && pip3 install -U --no-cache \
+    wheel \
+    numpy \
+    setuptools \
+    protobuf \
+    Deprecated \
+    future
+
+# install dnnl
+RUN wget https://github.com/intel/mkl-dnn/releases/download/v1.1/dnnl_lnx_1.1.0_cpu_gomp.tgz -P /tmp/ \
+    && tar zxf /tmp/dnnl_lnx_1.1.0_cpu_gomp.tgz -C /root
+ENV DNNL_ROOT /root/dnnl_lnx_1.1.0_cpu_gomp/
+
+# config ssh service
+RUN mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    && sed -ri 's/^#?PermitRootLogin\s+.*/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    && sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config \
+    && mkdir /root/.ssh
+
+# build singa
+RUN git clone https://github.com/apache/singa.git $HOME/singa \
+    && cd $HOME/singa \
+    && mkdir build && cd build \
+    && cmake -DENABLE_TEST=ON -DUSE_PYTHON3=ON -DUSE_DNNL=ON ..
+RUN cd $HOME/singa/build && make && make install
+
+WORKDIR $HOME/singa
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
+
diff --git a/tool/docker/devel/ubuntu/cuda10/Dockerfile b/tool/docker/devel/ubuntu/cuda10/Dockerfile
new file mode 100644
index 0000000..5560f8a
--- /dev/null
+++ b/tool/docker/devel/ubuntu/cuda10/Dockerfile
@@ -0,0 +1,111 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# 
+# Change tags to build with different cuda/cudnn versions:
+FROM nvidia/cuda:10.0-devel-ubuntu18.04
+
+ENV CUDNN_VERSION 7.6.5.32
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    libcudnn7=$CUDNN_VERSION-1+cuda10.0 \
+    libcudnn7-dev=$CUDNN_VERSION-1+cuda10.0 && \
+    apt-mark hold libcudnn7 && \
+    rm -rf /var/lib/apt/lists/*
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+    git \
+    build-essential \
+    autoconf \
+    libtool \
+    libprotobuf-dev \
+    libopenblas-dev \
+    libpcre3-dev \
+    protobuf-compiler \
+    wget \
+    swig \
+    openssh-server \
+    python3-dev \
+    python3-pip \
+    python3-setuptools \
+    libgoogle-glog-dev \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    && pip3 install -U --no-cache \
+    tqdm \
+    wheel \
+    numpy \
+    setuptools \
+    protobuf \
+    Deprecated \
+    future \
+    onnx==1.6.0
+
+# install cmake to correctly find Cuda 10
+RUN wget https://github.com/Kitware/CMake/releases/download/v3.12.2/cmake-3.12.2.tar.gz -P /tmp/ \
+    && tar zxf /tmp/cmake-3.12.2.tar.gz -C /tmp/ \
+    && cd /tmp/cmake-3.12.2/ && ./bootstrap && make -j4 && make install
+
+# install dnnl
+RUN wget https://github.com/intel/mkl-dnn/releases/download/v1.1/dnnl_lnx_1.1.0_cpu_gomp.tgz -P /tmp/ \
+    && tar zxf /tmp/dnnl_lnx_1.1.0_cpu_gomp.tgz -C /root
+ENV DNNL_ROOT /root/dnnl_lnx_1.1.0_cpu_gomp/
+
+# config ssh service
+RUN mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    && sed -ri 's/^#?PermitRootLogin\s+.*/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    && sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config \
+    && mkdir /root/.ssh
+
+# build nccl
+RUN git clone https://github.com/NVIDIA/nccl.git $HOME/nccl \
+    && cd $HOME/nccl \
+    && git checkout v2.4.8-1 \
+    && make -j src.build \
+    && apt-get update \
+    && apt install build-essential devscripts debhelper fakeroot -y \
+    && make pkg.debian.build \
+    && dpkg -i build/pkg/deb/libnccl2_2.4.8-1+cuda10.0_amd64.deb \
+    && dpkg -i build/pkg/deb/libnccl-dev_2.4.8-1+cuda10.0_amd64.deb
+
+# build mpiexec
+RUN wget http://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz -P $HOME \
+    && cd $HOME \
+    && tar xfz mpich-3.3.2.tar.gz \
+    && cd mpich-3.3.2 \
+    && ./configure --prefix=$HOME/mpich-3.3.2/build --disable-fortran 2>&1 | tee c.txt \
+    && make 2>&1 | tee m.txt \
+    && make install 2>&1 | tee mi.txt
+ENV PATH=/root/mpich-3.3.2/build/bin:$PATH
+
+# build singa
+RUN git clone https://github.com/apache/singa.git $HOME/singa \
+    && cd $HOME/singa \
+    && git checkout dev \
+    && mkdir build && cd build \
+    && /usr/local/bin/cmake -DENABLE_TEST=ON -DUSE_CUDA=ON -DUSE_PYTHON3=ON -DUSE_DNNL=ON -DUSE_DIST=ON ..
+RUN cd $HOME/singa/build && make && make install
+ENV PYTHONPATH="/root/singa/build/python/"
+
+WORKDIR /root/singa
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/docker/devel/ubuntu/cuda9/Dockerfile b/tool/docker/devel/ubuntu/cuda9/Dockerfile
new file mode 100644
index 0000000..50e1279
--- /dev/null
+++ b/tool/docker/devel/ubuntu/cuda9/Dockerfile
@@ -0,0 +1,114 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# 
+# Change tags to build with different cuda/cudnn versions:
+FROM nvidia/cuda:9.0-devel-ubuntu16.04
+
+ENV CUDNN_VERSION 7.6.5.32
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    libcudnn7=$CUDNN_VERSION-1+cuda9.0 \
+    libcudnn7-dev=$CUDNN_VERSION-1+cuda9.0 && \
+    apt-mark hold libcudnn7 && \
+    rm -rf /var/lib/apt/lists/*
+
+# install dependencies
+RUN apt-get update \
+    && apt-get install -y --no-install-recommends \
+    git \
+    build-essential \
+    autoconf \
+    libtool \
+    libprotobuf-dev \
+    libopenblas-dev \
+    libpcre3-dev \
+    protobuf-compiler \
+    wget \
+    openssh-server \
+    python3-dev \
+    python3-pip \
+    python3-setuptools \
+    libgoogle-glog-dev \
+    cmake \
+    && apt-get clean \
+    && apt-get autoremove \
+    && apt-get autoclean \
+    && rm -rf /var/lib/apt/lists/* \
+    && pip3 install -U --no-cache \
+    tqdm \
+    wheel \
+    numpy \
+    setuptools \
+    protobuf \
+    Deprecated \
+    future \
+    onnx==1.6.0
+
+# install swig > 3.0.10
+RUN wget http://prdownloads.sourceforge.net/swig/swig-3.0.10.tar.gz -P /tmp/ \
+    && tar zxf /tmp/swig-3.0.10.tar.gz -C /tmp/ \
+    && cd /tmp/swig-3.0.10 && ./configure && make && make install
+
+# install dnnl
+RUN wget https://github.com/intel/mkl-dnn/releases/download/v1.1/dnnl_lnx_1.1.0_cpu_gomp.tgz -P /tmp/ \
+    && tar zxf /tmp/dnnl_lnx_1.1.0_cpu_gomp.tgz -C /root
+ENV DNNL_ROOT /root/dnnl_lnx_1.1.0_cpu_gomp/
+
+# config ssh service
+RUN mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    # for ubuntu 16.04 prohibit
+    && sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    # SSH login fix. Otherwise user is kicked off after login
+    && sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd \
+    # dump environment variables into files, so that ssh can see also
+    && env | grep _ >> /etc/environment
+
+# build nccl
+RUN git clone https://github.com/NVIDIA/nccl.git $HOME/nccl \
+    && cd $HOME/nccl \
+    && git checkout v2.4.8-1 \
+    && make -j src.build \
+    && apt-get update \
+    && apt install build-essential devscripts debhelper fakeroot -y \
+    && make pkg.debian.build \
+    && dpkg -i build/pkg/deb/libnccl2_2.4.8-1+cuda9.0_amd64.deb \
+    && dpkg -i build/pkg/deb/libnccl-dev_2.4.8-1+cuda9.0_amd64.deb
+
+# build mpiexec
+RUN wget http://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz -P $HOME \
+    && cd $HOME \
+    && tar xfz mpich-3.3.2.tar.gz \
+    && cd mpich-3.3.2 \
+    && ./configure --prefix=$HOME/mpich-3.3.2/build --disable-fortran 2>&1 | tee c.txt \
+    && make 2>&1 | tee m.txt \
+    && make install 2>&1 | tee mi.txt
+ENV PATH=/root/mpich-3.3.2/build/bin:$PATH
+
+# build singa
+RUN git clone https://github.com/apache/singa.git $HOME/singa \
+    && cd $HOME/singa \
+    && git checkout dev \
+    && mkdir build && cd build \
+    && cmake -DENABLE_TEST=ON -DUSE_CUDA=ON -DUSE_PYTHON3=ON -DUSE_DNNL=ON -DUSE_DIST=ON ..
+RUN cd $HOME/singa/build && make && make install
+ENV PYTHONPATH="/root/singa/build/python/"
+
+WORKDIR /root/singa
+EXPOSE 22
+
+CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/docker/runtime/Dockerfile b/tool/docker/runtime/Dockerfile
deleted file mode 100644
index 40c78b5..0000000
--- a/tool/docker/runtime/Dockerfile
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 16.04 image
-FROM ubuntu:latest
-
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
-
-# install dependencies
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends git python python-pip \
-    && apt-get clean && apt-get autoremove && apt-get autoclean \
-    && rm -rf /var/lib/apt/lists/* \
-    && pip --no-cache-dir install -U pip setuptools
-
-
-# install pysinga TODO(wangwei) install debian package
-RUN pip install --upgrade http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/wheel/linux/latest/ubuntu16.04-cpp/singa-1.1.0-py2-none-any.whl
diff --git a/tool/jenkins/docker/runtime/Dockerfile b/tool/docker/runtime/cpu/Dockerfile
similarity index 66%
rename from tool/jenkins/docker/runtime/Dockerfile
rename to tool/docker/runtime/cpu/Dockerfile
index 9f4aea3..d418e62 100644
--- a/tool/jenkins/docker/runtime/Dockerfile
+++ b/tool/docker/runtime/cpu/Dockerfile
@@ -17,34 +17,38 @@
 # limitations under the License.
 #
 # Base unbuntu 16.04 image
-FROM ubuntu:latest
+FROM ubuntu:18.04
 
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
+MAINTAINER singa dev@singa.apache.org
 
 # install dependencies
 RUN apt-get update \
-    && apt-get install -y --no-install-recommends subversion git wget openssh-server bzip2\
+    && apt-get install -y --no-install-recommends subversion git wget openssh-server bzip2 \
     && apt-get clean && apt-get autoremove && apt-get autoclean \
     && rm -rf /var/lib/apt/lists/*
 
-RUN wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
+# install conda
+RUN wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh -O miniconda.sh
 RUN bash miniconda.sh -b -p /root/miniconda
 ENV PATH /root/miniconda/bin:${PATH}
 RUN conda config --set always_yes yes --set changeps1 no
-RUN conda update -q conda
+RUN conda config --add channels conda-forge
+RUN conda config --add channels nusdbsystem
+RUN conda install -c nusdbsystem singa-cpu
 RUN conda install -c conda-forge sphinx
 RUN conda install -c conda-forge sphinx_rtd_theme
-RUN conda install -c clinicalgraphics recommonmark=0.2.0
-RUN conda install -c nusdbsystem singa
+RUN conda install -c conda-forge recommonmark
 
-RUN mkdir /var/run/sshd
-RUN echo 'root:singa' | chpasswd
-RUN sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config
-# SSH login fix. Otherwise user is kicked off after login
-RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
 
-# dump environment variables into files, so that ssh can see also
-# RUN env | grep _ >> /etc/environment
+# config ssh service
+RUN mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    && sed -ri 's/^#?PermitRootLogin\s+.*/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    && sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config \
+    && mkdir /root/.ssh
+
+# add conda bin path to login or non-login shell
+RUN echo PATH=$PATH:/root/miniconda/bin >> /etc/profile
 
 EXPOSE 22
 
diff --git a/tool/docker/runtime/cuda/Dockerfile b/tool/docker/runtime/cuda/Dockerfile
deleted file mode 100644
index baf9bfd..0000000
--- a/tool/docker/runtime/cuda/Dockerfile
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 16.04, cuda8.0, cudnn5
-FROM nvidia/cuda:8.0-cudnn5-runtime-ubuntu16.04
-
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
-
-# install dependencies
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends git python python-pip \
-    && apt-get clean && apt-get autoremove && apt-get autoclean \
-    && rm -rf /var/lib/apt/lists/* \
-    && pip --no-cache-dir install -U pip setuptools
-
-
-# install pysinga TODO(wangwei) install debian package
-RUN pip install --upgrade http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/wheel/linux/latest/ubuntu16.04-cuda8.0-cudnn5/singa-1.1.0-py2-none-any.whl
diff --git a/tool/jenkins/docker/runtime/Dockerfile b/tool/docker/runtime/gpu/Dockerfile
similarity index 64%
copy from tool/jenkins/docker/runtime/Dockerfile
copy to tool/docker/runtime/gpu/Dockerfile
index 9f4aea3..009efc3 100644
--- a/tool/jenkins/docker/runtime/Dockerfile
+++ b/tool/docker/runtime/gpu/Dockerfile
@@ -17,34 +17,39 @@
 # limitations under the License.
 #
 # Base unbuntu 16.04 image
-FROM ubuntu:latest
+FROM nvidia/cuda:9.0-devel-ubuntu16.04
 
-MAINTAINER incubator-singa dev@singa.incubator.apache.org
+MAINTAINER singa dev@singa.apache.org
 
 # install dependencies
 RUN apt-get update \
-    && apt-get install -y --no-install-recommends subversion git wget openssh-server bzip2\
+    && apt-get install -y --no-install-recommends subversion git wget openssh-server bzip2 \
     && apt-get clean && apt-get autoremove && apt-get autoclean \
     && rm -rf /var/lib/apt/lists/*
 
-RUN wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
+# install conda
+RUN wget --no-check-certificate https://repo.continuum.io/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh -O miniconda.sh;
 RUN bash miniconda.sh -b -p /root/miniconda
 ENV PATH /root/miniconda/bin:${PATH}
 RUN conda config --set always_yes yes --set changeps1 no
-RUN conda update -q conda
+RUN conda config --add channels conda-forge
+RUN conda config --add channels nusdbsystem
+RUN conda install -c conda-forge -c nusdbsystem singa-gpu
 RUN conda install -c conda-forge sphinx
 RUN conda install -c conda-forge sphinx_rtd_theme
-RUN conda install -c clinicalgraphics recommonmark=0.2.0
-RUN conda install -c nusdbsystem singa
+RUN conda install -c conda-forge recommonmark
 
-RUN mkdir /var/run/sshd
-RUN echo 'root:singa' | chpasswd
-RUN sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config
-# SSH login fix. Otherwise user is kicked off after login
-RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
 
-# dump environment variables into files, so that ssh can see also
-# RUN env | grep _ >> /etc/environment
+# config ssh service
+RUN mkdir /var/run/sshd \
+    && echo 'root:singa' | chpasswd \
+    && sed -ri 's/^#?PermitRootLogin\s+.*/PermitRootLogin yes/' /etc/ssh/sshd_config \
+    && sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config \
+    && mkdir /root/.ssh
+
+# add conda bin path to login or non-login shell
+RUN echo PATH=$PATH:/root/miniconda/bin >> /etc/profile
+
 
 EXPOSE 22
 
diff --git a/tool/jenkins/README.md b/tool/jenkins/README.md
index b734b8f..1bb737c 100644
--- a/tool/jenkins/README.md
+++ b/tool/jenkins/README.md
@@ -1,41 +1,56 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
 # Jenkins CI Support
 
 ## Introduction
-This documentation is to guide SINGA developers to setup Jenkins service to support continuous integration on GPU systems. After each commit,
-1. SINGA should be compiled and tested automatically under different settings (e.g., OS and hardware).
-2. Convenient binaries should be generated automatically and archived.
+This documentation is to guide Singa developers to setup Jenkins service for continuous integration of Singa. After each commit,
+1. Singa should be compiled and tested automatically under different settings (e.g.,OS, python version and hardware).
+2. Binary packages should be generated automatically and archived.
 
 Continuous integration for CPU systems is enabled via [Travis](../travis).
+Hence, Jenkins is mainly used for CI on GPUs.
 
 ## Install Jenkins
 [Jenkins Official Wiki](https://wiki.jenkins-ci.org/display/JENKINS/Installing+Jenkins)
 The slave nodes for running different building environments are configured under 'Manage Jenkins'->'Manage nodes'.
 
+Change Jenkins time zone by executing the following code in 'Mange jenkins' -> 'Script Console':
+
+    System.setProperty('org.apache.commons.jelly.tags.fmt.timeZone', 'Asia/Singapore')
+
 ## Configure Jenkins for Unit Testing and Binary Package Generation
 Create a multi-configuration project and configure project as follows:
 
 ### Description
-This job automatically pulls latest commits from Apache incubator-singa github repository, then for different environments
+This job automatically pulls latest commits from Apache singa github repository, then for different environments
 
-* compile and test SINGA on GPUs
-* create Debian GPU packages
-* create anaconda GPU packages
-
-The working nodes (or Docker containers) are configured in Jenkins-Manage Jenkins-Mange Nodes.
-Each node should configure the following environment variable
-1. CUDA_VERSION, e.g., 7.5
-2. CUDNN_VERSION e.g, 5
-3. ANACONDA_UPLOAD_TOKEN
-4. SINGA_NAME=singa-cuda${CUDA_VERSION}-cudnn${CUDNN_VERSION}
-5. OS_VERSION, e.g., ubuntu14.04
-6. SINGA_INCLUDE_PATH and SINGA_LIBRARY_PATH for the cudnn.h and libcudnn.so folder respectively
+* compile and test Singa on GPUs
+* generate conda package of Singa with CUDA enabled
+* invoke the CPU test and packaging on Travis
+* (optional) create Debian GPU packages
 
 ### General
   * Discard old builds - Max # of builds to keep - 50
-  * GitHub project - ``https://github.com/apache/incubator-singa``
+  * GitHub project - ``https://github.com/apache/singa``
 
 ### Source Code Management
-  * Git - Repository URL - ``https://github.com/apache/incubator-singa``
+  * Git - Repository URL - ``https://github.com/apache/singa``
   * Git - Branch Specifier - ``*/master``
 
 ### Build Triggers
@@ -46,28 +61,23 @@
   * Slave - name ``env`` Node/label: tick available nodes
 
 ### Build
+The building script can do the following tasks:
+
   * compile and do unit test on GPU
-    Execute shell - command - ``bash -ex tool/jenkins/test.sh $lang``
-    `$lang` is set in **Configuration Matrix* section
+    Execute shell - command - ``bash -ex tool/jenkins/test.sh``
 
-  * create Debian package
+  * update another github repo with the new commits to invoke travis (for cpu test and conda package generation)
+    Execute shell - command - ``git push https://<username:token>@github.com/nusdbsystem/singa.git -f``
+
+  * create conda package and upload it to anaconda cloud
+    Execute shell - command
+
+        /root/miniconda/bin/conda-build tool/conda/singa
+        /root/miniconda/bin/anaconda -t <ANACONDA_UPLOAD_TOKEN> upload -u nusdbsystem -l main /root/miniconda/linux-64/singa-*.so.*.tar.bz2 --force
+
+  * (optional) create Debian package
     Execute shell - command - ``bash -ex tool/debian/build.sh --python --$lang``
 
-  * create conda package
-    Execute shell - command -
-
-        git push https://username:token@github.com/nusdbsystem/incubator-singa.git -f
-        bash -ex tool/jenkins/jenkins_test.sh $lang
-        export CONDA_BLD_PATH=/root/conda-bld-$BUILD_NUMBER
-        mkdir $CONDA_BLD_PATH
-        /root/miniconda/bin/conda-build tool/conda
-        /root/miniconda/bin/anaconda -t ANACONDA_UPLOAD_TOKEN upload -u nusdbsystem -l main $CONDA_BLD_PATH/linux-64/singa-*.tar.bz2 --force
-
-
-    It first pushes to a mirror site to invoke travis-ci for CPU package creation;
-    Then it compiles and runs unit tests;
-    Finally it creates the conda package for GPU and upload it.
-
 ### Post-build Actions
   * Publish JUnit test result report - Test report XMLs - ``**/gtest.xml, **/unittest.xml``
   * (optional) Archive the artifacts - ``build/debian/**.deb``
@@ -87,43 +97,71 @@
             debian/32/84d56b7/ubuntu14.04-cpp/singa-1.0.1.deb
             debian/32/84d56b7/ubuntu14.04-cuda8.0-cudnn5/singa-1.0.1.deb
 
-### Docker Images
-We provide in a number of singa docker [images](./docker) for Jenkins to use as slaves.
+### Jenkins Nodes
+
+We provide different Singa [Dockerfiles](../docker/README.md) for Jenkins to use as working nodes.
+
 To run the docker images,
 
-    nvidia-docker run --name <jenkins-slaveXX> -d <Image ID>
+    nvidia-docker run --name <node name> -P -d <Image ID>
 
-## Configure Jenkins for SINGA Website Updates
+To add the container into a network for easy access
+
+    docker network create <network name>
+    docker network connect <network name> <node name>
+
+After connecting both the jenkins and node contaniners into the same network, we can ssh to the node from jenkins container like
+
+
+    # inside jenkins container
+    ssh root@<node name>
+
+You need execute the above command manually for the first ssh login.
+
+In the Jenkins node configuration page, the container name is used to configure the `Host` field.
+Notice that Oracle username and account are required to luanch the node by Jenkins.
+
+The working nodes (or Docker containers) are configured in Jenkins-Manage Jenkins-Mange Nodes.
+Each node should configure the following environment variable
+
+    export CUDA=<cuda version, e.g., 9.0>
+
+[Dockerfiles](../conda/docker) are provided to create the working nodes.
+
+## Configure Jenkins for Singa Website Updates
 
 ### Description and Configuration
 
 This job is triggered upon any changes to the files of the `doc/` folder.
 It does the following tasks,
 
-1. installs the latest PySINGA
+1. installs the latest Singa
 2. pull the latest source code
 3. generate the html files for the documentation
-4. update the SINGA website
+4. update the Singa website
 
 The Jenkins job configuration is similar as above except the following fields,
 
 * Source Code Management - Git - Additional Behaviors - Include Region `doc/*`
-* Build - Execute Shell - Command `bash -ex tool/jenkins/jenkins_doc.sh`
+* Build - Execute Shell - Command
+
+      bash -ex tool/jenkins/gen_doc.sh
+
 * No `Post-build Actions`
 
-### Docker Images
+### Jenkins Node
 
-The Docker image for the Jenkins slave node is at `docker/ubuntu16.04/runtime/Dockerfile`.
-To build the docker image,
+The docker images used for testing also be used for document generation.
+We have to manually configure something inside the docker container.
+First, we start the container
 
-    # under the docker/ubuntu16.04/runtime/ folder
-    $ docker built -t singa:doc .
-
-To start the slave node
-
-    $ docker run --name singa-doc -d singa:doc
+    $docker run --name singa-doc -d <docker image>
+    # docker network connect jenkins singa-doc
     $ docker exec -it singa-doc /bin/bash
-    $ svn co https://svn.apache.org/repos/asf/incubator/singa/site/trunk
+
+Next, we do the first commit to the svn repo.
+
+    $ svn co https://svn.apache.org/repos/asf/singa/site/trunk
     # update ~/.subversion/config to set 'store-password=yes'
     # to set password free commit, we have to do a manual commit at first.
     # change any file (add spaces) inside trunk/ to commit a message
diff --git a/tool/jenkins/docker/devel/centos6/Dockerfile b/tool/jenkins/docker/devel/centos6/Dockerfile
deleted file mode 100644
index 61b30f6..0000000
--- a/tool/jenkins/docker/devel/centos6/Dockerfile
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 14.04 image from nvidia/cuda
-# Change tags to build with different cuda/cudnn versions:
-#   FROM nvidia/cuda:8.0-cudnn5-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.5-cudnn5-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.5-cudnn4-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.0-cudnn4-devel-ubuntu14.04
-FROM nvidia/cuda:7.5-cudnn5-devel-centos6
-
-# install dependencies
-RUN yum -y update && yum -y install git wget openssh-server cmake
-
-
-# install conda, conda-build and anaconda-client
-RUN wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
-RUN bash miniconda.sh -b -p /root/miniconda
-ENV PATH /root/miniconda/bin:${PATH}
-RUN /root/miniconda/bin/conda config --set always_yes yes --set changeps1 no
-RUN /root/miniconda/bin/conda update -q conda
-RUN /root/miniconda/bin/conda install conda-build
-RUN /root/miniconda/bin/conda install anaconda-client
-
-# set environment
-ENV CPLUS_INCLUDE_PATH /usr/local/lib/python2.7/dist-packages/numpy/core/include:${CPLUS_INCLUDE_PATH}
-# ENV CMAKE_INCLUDE_PATH /usr/local/cuda/include:${CMAKE_INCLUDE_PATH}
-# ENV CMAKE_LIBRARY_PATH /usr/local/cuda/lib64:${CMAKE_LIBRARY_PATH}
-
-# download singa source
-# RUN git clone https://github.com/apache/incubator-singa.git
-
-# config ssh service
-RUN mkdir /var/run/sshd
-RUN ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key
-RUN ssh-keygen -t dsa -f /etc/ssh/ssh_host_dsa_key
-
-RUN sed -ri 's/UsePAM yes/#UsePAM yes/g' /etc/ssh/sshd_config
-RUN sed -ri 's/#UsePAM no/UsePAM no/g' /etc/ssh/sshd_config
-RUN echo 'root:singa' | chpasswd
-
-#RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config
-# SSH login fix. Otherwise user is kicked off after login
-#RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
-
-# dump environment variables into files, so that ssh can see also
-RUN env | grep _ >> /etc/environment
-
-EXPOSE 22
-
-CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/jenkins/docker/devel/ubuntu14.04/Dockerfile b/tool/jenkins/docker/devel/ubuntu14.04/Dockerfile
deleted file mode 100644
index f907e07..0000000
--- a/tool/jenkins/docker/devel/ubuntu14.04/Dockerfile
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Base unbuntu 14.04 image from nvidia/cuda
-# Change tags to build with different cuda/cudnn versions:
-#   FROM nvidia/cuda:8.0-cudnn5-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.5-cudnn5-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.5-cudnn4-devel-ubuntu14.04
-#   FROM nvidia/cuda:7.0-cudnn4-devel-ubuntu14.04
-FROM nvidia/cuda:7.5-cudnn5-devel-ubuntu14.04
-
-# install dependencies
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends git build-essential autoconf libtool cmake libprotobuf-dev libopenblas-dev libpcre3-dev python-dev python-pip protobuf-compiler wget openssh-server \
-    && apt-get clean && apt-get autoremove && apt-get autoclean \
-    && rm -rf /var/lib/apt/lists/* \
-    && pip install -U pip wheel numpy setuptools unittest-xml-reporting protobuf
-
-
-# install swig 3.0
-RUN wget http://prdownloads.sourceforge.net/swig/swig-3.0.10.tar.gz && \
-    tar zxf swig-3.0.10.tar.gz && cd swig-3.0.10 && \
-    ./configure && make && make install
-
-# install conda, conda-build and anaconda-client
-RUN wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
-RUN bash miniconda.sh -b -p /root/miniconda
-RUN /root/miniconda/bin/conda config --set always_yes yes --set changeps1 no
-RUN /root/miniconda/bin/conda update -q conda
-RUN /root/miniconda/bin/conda install conda-build
-RUN /root/miniconda/bin/conda install anaconda-client
-ENV PATH /root/miniconda/bin:${PATH}
-
-# set environment
-ENV CPLUS_INCLUDE_PATH /usr/local/lib/python2.7/dist-packages/numpy/core/include:${CPLUS_INCLUDE_PATH}
-# ENV CMAKE_INCLUDE_PATH /usr/local/cuda/include:${CMAKE_INCLUDE_PATH}
-# ENV CMAKE_LIBRARY_PATH /usr/local/cuda/lib64:${CMAKE_LIBRARY_PATH}
-
-# download singa source
-# RUN git clone https://github.com/apache/incubator-singa.git
-
-# config ssh service
-RUN mkdir /var/run/sshd
-RUN echo 'root:singa' | chpasswd
-RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config
-# SSH login fix. Otherwise user is kicked off after login
-RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
-
-# dump environment variables into files, so that ssh can see also
-RUN env | grep _ >> /etc/environment
-
-EXPOSE 22
-
-CMD ["/usr/sbin/sshd", "-D"]
diff --git a/tool/jenkins/jenkins_doc.sh b/tool/jenkins/gen_doc.sh
similarity index 81%
rename from tool/jenkins/jenkins_doc.sh
rename to tool/jenkins/gen_doc.sh
index e88daf2..c102306 100644
--- a/tool/jenkins/jenkins_doc.sh
+++ b/tool/jenkins/gen_doc.sh
@@ -18,15 +18,17 @@
 # * limitations under the License.
 # */
 
-# This script is used by Jenkins to update SINGA website
 
-echo Install PySINGA, generate HTML files and update SINGA website
-# pip install --upgrade http://www.comp.nus.edu.sg/~dbsystem/singa/assets/file/wheel/linux/latest/ubuntu16.04-cpp/singa-1.1.0-py2-none-any.whl
+# This script is used by Jenkins to update Singa website. 
+# Run this script in runtime docker container.
+
+echo Install PySinga, generate HTML files and update Singa website
+
 conda update singa
 COMMIT=`git rev-parse --short HEAD`
 cd doc
 # generate the html files
-./build.sh html
+bash build.sh html
 # checkout the current website files
 svn co https://svn.apache.org/repos/asf/incubator/singa/site/trunk
 # overwrite the existing files
diff --git a/tool/jenkins/jenkins_test.sh b/tool/jenkins/test.sh
similarity index 64%
rename from tool/jenkins/jenkins_test.sh
rename to tool/jenkins/test.sh
index 0c16e2a..9b223f2 100644
--- a/tool/jenkins/jenkins_test.sh
+++ b/tool/jenkins/test.sh
@@ -18,40 +18,46 @@
 # * limitations under the License.
 # */
 
-# This script is used by Jenkins to compile and test SINGA
+# This script is used by Jenkins to compile and test Singa
 
-echo Compile and test SINGA...
-echo parameters: $1
+echo Compile and test Singa...
 echo workspace: `pwd`
 echo OS version: `cat /etc/issue`
 echo kernal version: `uname -a`
-echo CUDA version: $CUDA_VERSION
-echo CUDNN version: $CUDNN_VERSION
+echo parameters: $1
+echo parameters: $2
 COMMIT=`git rev-parse --short HEAD`
 echo COMMIT HASH: $COMMIT
+
 # set parameters
 CUDA="OFF"
-CUDNN="OFF"
 if [ $1 = "CUDA" ]; then
-  CUDA="ON"
-  CUDNN="ON"
+  CUDA="ON"  
 fi
 
-# setup env
+# TODO(wangwei) test python 3 according to env variable PY3K
+
+#if [ `uname` = "Darwin" ]; then
+#  EXTRA_ARGS="-DPYTHON_LIBRARY=`python-config --prefix`/lib/libpython2.7.dylib -DPYTHON_INCLUDE_DIR=`python-config --prefix`/include/python2.7/"
+#fi
+
 rm -rf build
 mkdir build
-
-if [ `uname` = "Darwin" ]; then
-  EXTRA_ARGS="-DPYTHON_LIBRARY=`python-config --prefix`/lib/libpython2.7.dylib -DPYTHON_INCLUDE_DIR=`python-config --prefix`/include/python2.7/"
-fi
-
-# compile singa c++
+# compile c++ code
 cd build
-cmake -DUSE_CUDA=$CUDA -DENABLE_TEST=ON $EXTRA_ARGS ../
+if [ $2 = "PYTHON3" ]; then 
+    cmake -DUSE_CUDA=$CUDA -DENABLE_TEST=ON -DUSE_PYTHON3=ON $EXTRA_ARGS ../
+else
+    cmake -DUSE_CUDA=$CUDA -DENABLE_TEST=ON $EXTRA_ARGS ../
+fi
 make
 # unit test cpp code
 ./bin/test_singa --gtest_output=xml:./gtest.xml
 # unit test python code
 cd ../test/python
-PYTHONPATH=../../build/python/ python run.py
+if [ $2 = "PYTHON3" ]; then 
+    PYTHONPATH=../../build/python/ python3 run.py
+else
+    PYTHONPATH=../../build/python/ python run.py
+fi
 echo Job finished...
diff --git a/tool/linting/README.md b/tool/linting/README.md
new file mode 100644
index 0000000..f36f859
--- /dev/null
+++ b/tool/linting/README.md
@@ -0,0 +1,48 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+# Linting check
+
+This guide is for singa devoloper who should sanitize the code
+before merging into the main branch.
+
+## linting tools
+
+Install cpplint for C++:
+`pip install cpplint`
+
+Install pylint for Python:
+`pip install pylint`
+
+## Linting a single file
+
+For C++ code:
+`cpplint path/to/file`
+
+For Python Code:
+`pylint path/to/file`
+
+## Linting the whole project
+
+usage: `bash tool/linting/py.sh`
+usage: `bash tool/linting/cpp.sh`
+
+## Configuration
+Currently the configuration are customized to respect google style.
+Update of configuration could be done at `.pylintrc` and `CPPLINT.cfg`
diff --git a/.travis.yml b/tool/linting/cpp.sh
similarity index 60%
copy from .travis.yml
copy to tool/linting/cpp.sh
index eb9ff69..ec36139 100644
--- a/.travis.yml
+++ b/tool/linting/cpp.sh
@@ -1,3 +1,5 @@
+#!/usr/bin/env bash
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,29 +17,29 @@
 # limitations under the License.
 #
 
-# to use container for building
-sudo: required
-language: cpp
+export PATH="$HOME/miniconda/bin:$PATH"
 
-matrix:
-  include:
-  - os: osx
-    compiler: clang
-    osx_image: xcode8
-  - os: linux
-    dist: trusty
-    compiler: gcc
+# cpplint
+find src/api/ \
+    src/core/ \
+    src/proto/ \
+    src/utils/ \
+    include/singa/core/ \
+    include/singa/utils/ \
+    src/model/operation/ \
+    include/singa/io/communicator.h \
+    src/io/communicator.cc \
+    test/singa/ -iname *.h -o -iname *.cc | xargs cpplint --quiet --verbose=5
 
-#
-#addons:
-#  apt:
-#    packages:
-#      - libopenblas-dev
-#      - libprotobuf-dev
-#      - protobuf-compiler
+CPPLINTRESULT=$?
 
-install:
-  - travis_wait bash -ex tool/travis/depends.sh
+if [ $CPPLINTRESULT -ne 0 ]; then
+  echo $CPPLINTRESULT
+  echo "cpplint not passed"
+  exit 1
+else
+  echo "cpplint passed"
+fi
 
-script:
-  - bash -ex tool/travis/build.sh
+exit 0
+
diff --git a/python/rafiki/__init__.py b/tool/linting/py.sh
similarity index 72%
copy from python/rafiki/__init__.py
copy to tool/linting/py.sh
index 3aa745b..ac83822 100644
--- a/python/rafiki/__init__.py
+++ b/tool/linting/py.sh
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -16,4 +17,20 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+export PATH="$HOME/miniconda/bin:$PATH"
+
+# pylint
+find python/singa/ \
+    examples/ \
+    test/python/ -iname "*.py" | xargs pylint
+
+LINTRESULT=$?
+if [ $LINTRESULT == 0 ]; then
+  echo "pylint passed"
+else
+  echo "pylint not passed"
+  exit $LINTRESULT
+fi
+
+exit 0
+
diff --git a/tool/opencl/clsrc_to_str.py b/tool/opencl/clsrc_to_str.py
index 24400f7..760e441 100755
--- a/tool/opencl/clsrc_to_str.py
+++ b/tool/opencl/clsrc_to_str.py
@@ -22,19 +22,21 @@
 This file is executed only if .cl files are updated.
 It is executed in the ROOT folder of SINGA source repo.
 '''
-
+from future.utils import iteritems
 
 distribution = "./src/core/tensor/distribution.cl"
 tensormath = "./src/core/tensor/tensor_math_opencl.cl"
 im2col = "./src/model/layer/im2col.cl"
 pooling = "./src/model/layer/pooling.cl"
+files = {"distribution_str": distribution, "tensormath_str": tensormath,
+         "im2col_str": im2col, "pooling_str": pooling}
 
-files = {"distribution_str" : distribution, "tensormath_str" : tensormath, "im2col_str" : im2col, "pooling_str" : pooling}
 
 if __name__ == "__main__":
     fullpath = './src/core/device/opencl_func.h'
     with open(fullpath, 'w') as fout:
-        fout.write("// This file is auto-generated by tool/opencl/clsrc_to_str, do not edit manually.\n")
+        fout.write("// This file is auto-generated by tool/opencl/clsrc_to_str."
+                   " do not edit manually.\n")
         license = """
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -55,9 +57,10 @@
  */
 """
         fout.write(license)
+        fout.write("#ifdef USE_OPENCL\n\n")
         fout.write("#include <string>\n\n")
         fout.write("namespace singa {\n namespace opencl {\n")
-        for name, path in files.items():
+        for name, path in iteritems(files):
             with open(path, 'r') as fin:
                 src = fin.read()
                 src = repr(src)
@@ -67,5 +70,6 @@
                 fout.write("const std::string " + name + " = \"")
                 fout.write(src)
                 fout.write("\";")
-        fout.write("\n } //  namespace opencl \n} //  namespace singa")
+        fout.write("\n } //  namespace opencl \n} //  namespace singa\n\n")
+        fout.write("#endif")
         fout.close()
diff --git a/python/rafiki/__init__.py b/tool/rat.sh
similarity index 91%
copy from python/rafiki/__init__.py
copy to tool/rat.sh
index 3aa745b..df28f69 100644
--- a/python/rafiki/__init__.py
+++ b/tool/rat.sh
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -16,4 +15,5 @@
 # limitations under the License.
 #
 
-__version__ = "0.1.1"
+cd java
+mvn apache-rat:check -Pcheck-licence -Drat.basedir=..
diff --git a/tool/release/README.md b/tool/release/README.md
new file mode 100644
index 0000000..024b739
--- /dev/null
+++ b/tool/release/README.md
@@ -0,0 +1,62 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+
+# Auto tagging for release
+
+### Usage: `$ ./tool/release/release.py [-h] [-y] <type>`
+
+### Option:
+  - `[-y]`
+    - In interactive mode, it is for user to confirm. Could be used in sript.
+
+### Argument:
+  - `<type>`
+    Allowed release types are `major`, `minor`, `patch`, `rc`, `stable`.
+    - `major` increments major version by 1.
+    - `minor` increments minor version by 1.
+    - `patch` increments patch version by 1.
+    - `rc` increments rc version by 1.
+    - `stable` removes rc version.
+
+
+### Example:
+
+  1. Pre-releasing major will update from 2.1.1 to 3.0.0-rc0
+
+    run `$ ./tool/release/release.py major`
+
+  2. The release candidate needs some revise, from 3.0.0-rc0 to 3.0.0-rc1
+
+    run `$ ./tool/release/release.py rc`
+
+  3. The current version is released as stable, from 3.0.0-rc1 to 3.0.0
+
+    run `$ ./tool/release/release.py stable`
+
+
+## In the release.py
+
+Internally, the script retrieve latest git tag by `git describe`,
+and increment the version accroding to semantic versioning,
+then push latest tag to remote master.
+
+## Next step
+
+CI will automatically detect the update in master
+and build and release conda packages.
diff --git a/tool/release/release.py b/tool/release/release.py
new file mode 100755
index 0000000..28a53db
--- /dev/null
+++ b/tool/release/release.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python3
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import subprocess
+import re
+import argparse
+import sys
+
+
+def tag_string(tags):
+    if len(tags) == 4:
+        return ".".join(str(i) for i in tags[:3]) + "-rc" + str(tags[3])
+    elif len(tags) == 3:
+        return ".".join(str(i) for i in tags)
+    else:
+        raise ValueError("malfomated tags %s" % ".".join(str(i) for i in tags))
+
+
+def main(args):
+    # current version
+    last_tag = subprocess.run(
+        ['git', 'describe', '--abbrev=0', '--tags'],
+        stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
+    # last_tag="4.2.3-rc1"
+    # last_tag="3.2.1"
+    tags = re.split("\.|-rc", last_tag)
+    new_tags = [int(i) for i in tags]
+
+    # parse args
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '-y',
+        default=False,
+        dest='confirmed',
+        action='store_true',
+        help="In interactive mode, for user to confirm. Could be used in script")
+    parser.add_argument('type',
+                        choices=['major', 'minor', 'patch', 'rc', 'stable'],
+                        help="Release types")
+    args = parser.parse_args(args)
+
+    # check eligibility of arg
+    if args.type in ['major', 'minor', 'patch']:
+        if len(tags) == 4:
+            exit("Current type \"%s\" is not allowed in pre release(version %s)"
+                 % (args.type, last_tag))
+    if args.type in ['stable', 'rc']:
+        if len(tags) == 3:
+            exit(
+                "Current type \"%s\" is not allowed in stable release(version %s)"
+                % (args.type, last_tag))
+
+    # new version
+    if args.type == 'major':
+        new_tags[0] += 1
+        new_tags[1] = 0
+        new_tags[2] = 0
+        new_tags.append(0)
+    elif args.type == 'minor':
+        new_tags[1] += 1
+        new_tags[2] = 0
+        new_tags.append(0)
+    elif args.type == 'patch':
+        new_tags[2] += 1
+        new_tags.append(0)
+    elif args.type == 'stable':
+        new_tags.pop(-1)
+    elif args.type == 'rc':
+        new_tags[3] += 1
+
+    # ask for confirmation
+    print("Please confirm bumping version from %s to %s" %
+          (last_tag, tag_string(new_tags)))
+    ans = "y" if args.confirmed else ""
+    while ans not in ['y', 'n']:
+        ans = input("OK to continue [Y/N]? ").lower()
+    if ans == "y":
+        print("Confirmed bumping version from %s to %s" %
+              (last_tag, tag_string(new_tags)))
+    else:
+        exit("Aborted")
+
+    # do the rest of the work
+    # git tag -a $NEW_VERSION -m "Version: $NEW_VERSION"
+    print(
+        subprocess.run(
+            ['git', 'tag', '-a',
+             tag_string(new_tags), '-m', 'new version'],
+            stdout=subprocess.PIPE).stdout)
+    # git push dcslin -f --tags
+    # print( subprocess.run(['git', 'push', 'dcslin', '-f', '--tags'], stdout=subprocess.PIPE).stdout) # test
+    print( subprocess.run(['git', 'push', '--tags'], stdout=subprocess.PIPE).stdout)
+    print("Done. Pushed to remote")
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])
+    # main(["-y","major"])
+    # main(["-y","patch"])
+    # main(["-y","minor"])
+    # main(["-y","stable"])
+    # main(["stable"])
+    # main(["-y","rc"])
diff --git a/tool/travis/build.sh b/tool/travis/build.sh
deleted file mode 100644
index e59a1cc..0000000
--- a/tool/travis/build.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-if [[ "$TRAVIS_SECURE_ENV_VARS" == "false" ]];
-then
-  if [[ "$TRAVIS_OS_NAME" == "osx" ]];
-  then
-    export CMAKE_LIBRARY_PATH=/usr/local/opt/openblas/lib:/usr/local/opt/protobuf/lib:$CMAKE_LIBRARY_PATH;
-    export CMAKE_INCLUDE_PATH=/usr/local/opt/openblas/include:/usr/local/opt/protobuf/include:$CMAKE_INCLUDE_PATH;
-    mkdir build && cd build;
-    cmake -DUSE_CUDA=OFF -DUSE_PYTHON=OFF -DENABLE_TEST=ON -DProtobuf_PROTOC_EXECUTABLE=/usr/local/opt/protobuf/bin/protoc ..;
-  else
-    mkdir build && cd build;
-    cmake -DUSE_CUDA=OFF -DUSE_PYTHON=OFF -DENABLE_TEST=ON ..
-  fi
-  make;
-  ./bin/test_singa --gtest_output=xml:./../gtest.xml;
-else
-  bash -e tool/travis/conda.sh;
-fi
diff --git a/tool/travis/conda.sh b/tool/travis/conda.sh
deleted file mode 100644
index 10def56..0000000
--- a/tool/travis/conda.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# to build SINGA package and upload it to anaconda
-
-set -x
-
-# anaconda login user name
-USER=nusdbsystem
-OS=$TRAVIS_OS_NAME-64
-
-export PATH="$HOME/miniconda/bin:$PATH"
-conda config --set anaconda_upload no
-
-# save the package at given folder, then we can upload using singa-*.tar.bz2
-suffix=$TRAVIS_JOB_NUMBER  #`TZ=Asia/Singapore date +%Y-%m-%d-%H-%M-%S`
-export CONDA_BLD_PATH=~/conda-bld-$suffix
-mkdir $CONDA_BLD_PATH
-
-conda build tool/conda/
-
-# turn off debug to hide the token in travis log
-set +x
-anaconda -t $ANACONDA_UPLOAD_TOKEN upload -u $USER -l main $CONDA_BLD_PATH/$OS/singa-*.tar.bz2 --force
diff --git a/tool/travis/depends.sh b/tool/travis/depends.sh
deleted file mode 100644
index a90cdd4..0000000
--- a/tool/travis/depends.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# if no env var (i.e., token), then do normal build and test;
-# otherwise use conda to build and package
-if [[ "$TRAVIS_SECURE_ENV_VARS" == "false" ]];
-then
-  if [[ "$TRAVIS_OS_NAME" == "linux" ]];
-  then
-    sudo apt-get -qq update;
-    sudo apt-get -qq -y install libopenblas-dev libprotobuf-dev protobuf-compiler;
-  else
-    brew update;
-    brew tap homebrew/science;
-    brew install openblas protobuf;
-  fi
-else
-  # install miniconda
-  if [[ "$TRAVIS_OS_NAME" == "linux" ]];
-  then
-    wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
-  else
-    wget https://repo.continuum.io/miniconda/Miniconda2-latest-MacOSX-x86_64.sh -O miniconda.sh;
-  fi
-  bash miniconda.sh -b -p $HOME/miniconda
-  export PATH="$HOME/miniconda/bin:$PATH"
-  hash -r
-  conda config --set always_yes yes --set changeps1 no
-  conda update -q conda
-  conda install conda-build
-  conda install anaconda-client
-  conda config --add channels conda-forge
-fi
diff --git a/tool/wheel.sh b/tool/wheel.sh
new file mode 100644
index 0000000..10e419a
--- /dev/null
+++ b/tool/wheel.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# this script should be launched at the root of the singa source folder
+# it build the cpu-only and cuda enabled wheel packages for py3.6, 3.7 and 3.8
+
+rm -rf dist
+
+# build cpu only wheel packages
+rm -rf build 
+/opt/python/cp36-cp36m/bin/python setup.py bdist_wheel
+rm -rf build 
+/opt/python/cp37-cp37m/bin/python setup.py bdist_wheel
+rm -rf build 
+/opt/python/cp38-cp38/bin/python setup.py bdist_wheel
+
+# build cuda enabled wheel packages
+export SINGA_CUDA=ON
+rm -rf build 
+/opt/python/cp36-cp36m/bin/python setup.py bdist_wheel
+rm -rf build 
+/opt/python/cp37-cp37m/bin/python setup.py bdist_wheel
+rm -rf build 
+/opt/python/cp38-cp38/bin/python setup.py bdist_wheel
+
+# repair the wheel files in dist/*.whl and store the results into wheelhouse/
+/opt/python/cp38-cp38/bin/python setup.py audit
\ No newline at end of file