Adding Sphinx based Docs minisite
diff --git a/_config.yml b/_config.yml
new file mode 100644
index 0000000..623a3ff
--- /dev/null
+++ b/_config.yml
@@ -0,0 +1,68 @@
+# Site wide configuration
+
+title: "Apache ARIA TOSCA"
+description: "ARIA is an agile reference implementation of automation based on OASIS TOSCA Specification. It is a framework for implementing orchestration software and a command line tool to execute TOSCA based application blueprints."
+logo: aria-logo.png
+# 120x120 px default image used for Twitter summary card
+teaser: aria-logo.png
+ # 400x250 px default teaser image used in image archive grid
+locale: en
+#url: ariatosca.apache.org
+feed:
+  path: atom.xml
+
+# Jekyll configuration
+
+sass:
+  sass_dir: _sass
+  style: compressed
+permalink: /:categories/:title/
+kramdown:
+  toc_levels: 1..2
+highlighter: rouge
+gems:
+  - jekyll-sitemap
+  - jekyll-gist
+  - jekyll-feed
+
+# Site owner
+owner:
+  name:
+  email:
+  twitter:
+  google:
+    ad-client:
+    ad-slot:
+  bio:
+  avatar: bio-photo.jpg # 160x160 px image for author byline
+  disqus-shortname:
+
+keep_files:
+  - docs
+
+include:
+  - .htaccess
+
+exclude:
+  - "*.less"
+  - "*.sublime-project"
+  - "*.sublime-workspace"
+  - .asset-cache
+  - .bundle
+  - .jekyll-assets-cache
+  - .sass-cache
+  - CHANGELOG
+  - Capfile
+  - Gemfile
+  - Gruntfile.js
+  - LICENSE
+  - README
+  - Rakefile
+  - config
+  - gulpfile.js
+  - lib
+  - log
+  - node_modules
+  - package.json
+  - spec
+  - tmp
diff --git a/apache-ariatosca-0.1.1.tar.gz.1 b/apache-ariatosca-0.1.1.tar.gz.1
deleted file mode 100644
index 4a1ef7d..0000000
--- a/apache-ariatosca-0.1.1.tar.gz.1
+++ /dev/null
Binary files differ
diff --git a/apache-ariatosca-0.1.1/CHANGELOG.rst b/apache-ariatosca-0.1.1/CHANGELOG.rst
deleted file mode 100644
index a0ca089..0000000
--- a/apache-ariatosca-0.1.1/CHANGELOG.rst
+++ /dev/null
@@ -1,16 +0,0 @@
-0.1.1
------
-
-[ARIA-312] Validation of workflow and operation kwargs raise false alarms
-[ARIA-301] Environment-marked dependencies are installed regardless of environment when installing from wheel
-[ARIA-299] Resuming canceled execution with non-finished tasks fails
-[ARIA-298] Test suite sometimes fails or freezes despite all tests passing
-[ARIA-296] Process termination test fails on windows
-[ARIA-287] New tox suite to make sure that Sphinx documentation generation isn't broken
-[ARIA-202] Execution plugin assumes '/tmp' for temp directory on the local/remote machine
-
-
-0.1.0
------
-
- * Initial release.
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/CONTRIBUTING b/apache-ariatosca-0.1.1/CONTRIBUTING
deleted file mode 100644
index 4124003..0000000
--- a/apache-ariatosca-0.1.1/CONTRIBUTING
+++ /dev/null
@@ -1,3 +0,0 @@
-Contribution guide is available on our Confluence:
-
-https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/DISCLAIMER b/apache-ariatosca-0.1.1/DISCLAIMER
deleted file mode 100644
index 358d8e1..0000000
--- a/apache-ariatosca-0.1.1/DISCLAIMER
+++ /dev/null
@@ -1,10 +0,0 @@
-Apache AriaTosca is an effort undergoing incubation at the Apache Software
-Foundation (ASF), sponsored by the Apache Incubator.
-
-Incubation is required of all newly accepted projects until a further review
-indicates that the infrastructure, communications, and decision making process
-have stabilized in a manner consistent with other successful ASF projects.
-
-While incubation status is not necessarily a reflection of the completeness
-or stability of the code, it does indicate that the project has yet to be
-fully endorsed by the ASF.
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/LICENSE b/apache-ariatosca-0.1.1/LICENSE
deleted file mode 100644
index 37ec93a..0000000
--- a/apache-ariatosca-0.1.1/LICENSE
+++ /dev/null
@@ -1,191 +0,0 @@
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and
-distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright
-owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities
-that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, "control" means (i) the power, direct or
-indirect, to cause the direction or management of such entity, whether by
-contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
-outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising
-permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including
-but not limited to software source code, documentation source, and configuration
-files.
-
-"Object" form shall mean any form resulting from mechanical transformation or
-translation of a Source form, including but not limited to compiled object code,
-generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form, made
-available under the License, as indicated by a copyright notice that is included
-in or attached to the work (an example is provided in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that
-is based on (or derived from) the Work and for which the editorial revisions,
-annotations, elaborations, or other modifications represent, as a whole, an
-original work of authorship. For the purposes of this License, Derivative Works
-shall not include works that remain separable from, or merely link (or bind by
-name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original version
-of the Work and any modifications or additions to that Work or Derivative Works
-thereof, that is intentionally submitted to Licensor for inclusion in the Work
-by the copyright owner or by an individual or Legal Entity authorized to submit
-on behalf of the copyright owner. For the purposes of this definition,
-"submitted" means any form of electronic, verbal, or written communication sent
-to the Licensor or its representatives, including but not limited to
-communication on electronic mailing lists, source code control systems, and
-issue tracking systems that are managed by, or on behalf of, the Licensor for
-the purpose of discussing and improving the Work, but excluding communication
-that is conspicuously marked or otherwise designated in writing by the copyright
-owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
-of whom a Contribution has been received by Licensor and subsequently
-incorporated within the Work.
-
-2. Grant of Copyright License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable copyright license to reproduce, prepare Derivative Works of,
-publicly display, publicly perform, sublicense, and distribute the Work and such
-Derivative Works in Source or Object form.
-
-3. Grant of Patent License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable (except as stated in this section) patent license to make, have
-made, use, offer to sell, sell, import, and otherwise transfer the Work, where
-such license applies only to those patent claims licensable by such Contributor
-that are necessarily infringed by their Contribution(s) alone or by combination
-of their Contribution(s) with the Work to which such Contribution(s) was
-submitted. If You institute patent litigation against any entity (including a
-cross-claim or counterclaim in a lawsuit) alleging that the Work or a
-Contribution incorporated within the Work constitutes direct or contributory
-patent infringement, then any patent licenses granted to You under this License
-for that Work shall terminate as of the date such litigation is filed.
-
-4. Redistribution.
-
-You may reproduce and distribute copies of the Work or Derivative Works thereof
-in any medium, with or without modifications, and in Source or Object form,
-provided that You meet the following conditions:
-
-You must give any other recipients of the Work or Derivative Works a copy of
-this License; and
-You must cause any modified files to carry prominent notices stating that You
-changed the files; and
-You must retain, in the Source form of any Derivative Works that You distribute,
-all copyright, patent, trademark, and attribution notices from the Source form
-of the Work, excluding those notices that do not pertain to any part of the
-Derivative Works; and
-If the Work includes a "NOTICE" text file as part of its distribution, then any
-Derivative Works that You distribute must include a readable copy of the
-attribution notices contained within such NOTICE file, excluding those notices
-that do not pertain to any part of the Derivative Works, in at least one of the
-following places: within a NOTICE text file distributed as part of the
-Derivative Works; within the Source form or documentation, if provided along
-with the Derivative Works; or, within a display generated by the Derivative
-Works, if and wherever such third-party notices normally appear. The contents of
-the NOTICE file are for informational purposes only and do not modify the
-License. You may add Your own attribution notices within Derivative Works that
-You distribute, alongside or as an addendum to the NOTICE text from the Work,
-provided that such additional attribution notices cannot be construed as
-modifying the License.
-You may add Your own copyright statement to Your modifications and may provide
-additional or different license terms and conditions for use, reproduction, or
-distribution of Your modifications, or for any such Derivative Works as a whole,
-provided Your use, reproduction, and distribution of the Work otherwise complies
-with the conditions stated in this License.
-
-5. Submission of Contributions.
-
-Unless You explicitly state otherwise, any Contribution intentionally submitted
-for inclusion in the Work by You to the Licensor shall be under the terms and
-conditions of this License, without any additional terms or conditions.
-Notwithstanding the above, nothing herein shall supersede or modify the terms of
-any separate license agreement you may have executed with Licensor regarding
-such Contributions.
-
-6. Trademarks.
-
-This License does not grant permission to use the trade names, trademarks,
-service marks, or product names of the Licensor, except as required for
-reasonable and customary use in describing the origin of the Work and
-reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty.
-
-Unless required by applicable law or agreed to in writing, Licensor provides the
-Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
-including, without limitation, any warranties or conditions of TITLE,
-NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
-solely responsible for determining the appropriateness of using or
-redistributing the Work and assume any risks associated with Your exercise of
-permissions under this License.
-
-8. Limitation of Liability.
-
-In no event and under no legal theory, whether in tort (including negligence),
-contract, or otherwise, unless required by applicable law (such as deliberate
-and grossly negligent acts) or agreed to in writing, shall any Contributor be
-liable to You for damages, including any direct, indirect, special, incidental,
-or consequential damages of any character arising as a result of this License or
-out of the use or inability to use the Work (including but not limited to
-damages for loss of goodwill, work stoppage, computer failure or malfunction, or
-any and all other commercial damages or losses), even if such Contributor has
-been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability.
-
-While redistributing the Work or Derivative Works thereof, You may choose to
-offer, and charge a fee for, acceptance of support, warranty, indemnity, or
-other liability obligations and/or rights consistent with this License. However,
-in accepting such obligations, You may act only on Your own behalf and on Your
-sole responsibility, not on behalf of any other Contributor, and only if You
-agree to indemnify, defend, and hold each Contributor harmless for any liability
-incurred by, or claims asserted against, such Contributor by reason of your
-accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work
-
-To apply the Apache License to your work, attach the following boilerplate
-notice, with the fields enclosed by brackets "[]" replaced with your own
-identifying information. (Don't include the brackets!) The text should be
-enclosed in the appropriate comment syntax for the file format. We also
-recommend that a file or class name and description of purpose be included on
-the same "printed page" as the copyright notice for easier identification within
-third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/apache-ariatosca-0.1.1/MANIFEST.in b/apache-ariatosca-0.1.1/MANIFEST.in
deleted file mode 100644
index 6b67894..0000000
--- a/apache-ariatosca-0.1.1/MANIFEST.in
+++ /dev/null
@@ -1,12 +0,0 @@
-include CONTRIBUTING
-include DISCLAIMER
-include LICENSE
-include NOTICE
-include VERSION
-include CHANGELOG.rst
-include README.rst
-include requirements.in
-include requirements.txt
-recursive-include docs/html *
-recursive-include examples *
-prune docs/html/.doctrees
diff --git a/apache-ariatosca-0.1.1/NOTICE b/apache-ariatosca-0.1.1/NOTICE
deleted file mode 100644
index bf03ab5..0000000
--- a/apache-ariatosca-0.1.1/NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-Apache AriaTosca
-Copyright 2016-2017 The Apache Software Foundation
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/PKG-INFO b/apache-ariatosca-0.1.1/PKG-INFO
deleted file mode 100644
index 46a0050..0000000
--- a/apache-ariatosca-0.1.1/PKG-INFO
+++ /dev/null
@@ -1,189 +0,0 @@
-Metadata-Version: 1.1
-Name: apache-ariatosca
-Version: 0.1.1
-Summary: ARIA
-Home-page: http://ariatosca.incubator.apache.org/
-Author: ARIA
-Author-email: dev@ariatosca.incubator.apache.org
-License: Apache License 2.0
-Download-URL: https://dist.apache.org/repos/dist/release/incubator/ariatosca/0.1.1-incubating
-Description: ARIA
-        ====
-        
-        |Build Status| |Appveyor Build Status| |License| |PyPI release| |Python Versions| |Wheel|
-        |Contributors| |Open Pull Requests| |Closed Pull Requests|
-        
-        
-        What is ARIA?
-        -------------
-        
-        `ARIA <http://ariatosca.incubator.apache.org/>`__ is a an open-source,
-        `TOSCA <https://www.oasis-open.org/committees/tosca/>`__-based, lightweight library and CLI for
-        orchestration and for consumption by projects building TOSCA-based solutions for resources and
-        services orchestration.
-        
-        ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its
-        solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple
-        virtual infrastructure managers.
-        
-        With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your
-        applications, from template to deployment.
-        
-        ARIA is an incubation project under the `Apache Software Foundation <https://www.apache.org/>`__.
-        
-        
-        Installation
-        ------------
-        
-        ARIA is `available on PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__.
-        
-        ARIA requires Python 2.6/2.7. Python 3 is currently not supported.
-        
-        To install ARIA directly from PyPI (using a ``wheel``), use::
-        
-            pip install --upgrade pip setuptools
-            pip install apache-ariatosca
-        
-        To install ARIA from source, download the source tarball from
-        `PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__, extract and ``cd`` into the extract dir, and run::
-        
-            pip install --upgrade pip setuptools
-            pip install .
-        
-        | The source package comes along with relevant examples, documentation, ``requirements.txt`` (for installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
-        |
-        |
-        | ARIA has additional optional dependencies. These are required for running operations over SSH.
-        | Below are instructions on how to install these dependencies, including required system dependencies per OS.
-        |
-        | Note: These dependencies may have varying licenses which may not be compatible with Apache license 2.0.
-        |
-        
-        **Ubuntu/Debian** (tested on Ubuntu14.04, Ubuntu16.04)::
-        
-            apt-get install -y python-dev gcc libffi-dev libssl-dev
-            pip install aria[ssh]
-        
-        **Centos** (tested on Centos6.6, Centos7)::
-        
-            yum install -y python-devel gcc libffi-devel openssl-devel
-            pip install aria[ssh]
-        
-        **Archlinux**::
-        
-            pacman -Syu --noconfirm python2 gcc libffi openssl
-            pip2 install aria[ssh]
-        
-        **Windows** (tested on Win10)::
-        
-            # no additional system requirements are needed
-            pip install aria[ssh]
-        
-        **MacOS**::
-        
-            # TODO
-        
-        
-        
-        To install ``pip``, either use your distro's package management system, or run::
-        
-            wget http://bootstrap.pypa.io/get-pip.py
-            python get-pip.py
-        
-        
-        
-        Getting Started
-        ---------------
-        
-        This section will describe how to run a simple "Hello World" example.
-        
-        First, provide ARIA with the ARIA "hello world" service-template and name it (e.g.
-        ``my-service-template``)::
-        
-            aria service-templates store examples/hello-world/helloworld.yaml my-service-template
-        
-        Now create a service based on this service-template and name it (e.g. ``my-service``)::
-        
-            aria services create my-service -t my-service-template
-        
-        Finally, start an ``install`` workflow execution on ``my-service`` like so::
-        
-            aria executions start install -s my-service
-        
-        You should now have a simple web-server running on your local machine. You can try visiting
-        ``http://localhost:9090`` to view your deployed application.
-        
-        To uninstall and clean your environment, follow these steps::
-        
-            aria executions start uninstall -s my-service
-            aria services delete my-service
-            aria service-templates delete my-service-template
-        
-        
-        Contribution
-        ------------
-        
-        You are welcome and encouraged to participate and contribute to the ARIA project.
-        
-        Please see our guide to
-        `Contributing to ARIA <https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__.
-        
-        Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__
-        section).
-        
-        
-        Resources
-        ---------
-        
-        -  `ARIA homepage <http://ariatosca.incubator.apache.org/>`__
-        -  `ARIA wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__
-        -  `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__
-        
-        -  Dev mailing list: dev@ariatosca.incubator.apache.org
-        -  User mailing list: user@ariatosca.incubator.apache.org
-        
-        Subscribe by sending a mail to ``<group>-subscribe@ariatosca.incubator.apache.org`` (e.g.
-        ``dev-subscribe@ariatosca.incubator.apache.org``). See information on how to subscribe to mailing
-        lists `here <https://www.apache.org/foundation/mailinglists.html>`__.
-        
-        For past correspondence, see the
-        `dev mailing list archive <https://lists.apache.org/list.html?dev@ariatosca.apache.org>`__.
-        
-        
-        License
-        -------
-        
-        ARIA is licensed under the
-        `Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__.
-        
-        .. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg
-           :target: https://travis-ci.org/apache/incubator-ariatosca
-        .. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg
-           :target: https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history
-        .. |License| image:: https://img.shields.io/github/license/apache/incubator-ariatosca.svg
-           :target: http://www.apache.org/licenses/LICENSE-2.0
-        .. |PyPI release| image:: https://img.shields.io/pypi/v/apache-ariatosca.svg
-           :target: https://pypi.python.org/pypi/apache-ariatosca
-        .. |Python Versions| image:: https://img.shields.io/pypi/pyversions/apache-ariatosca.svg
-        .. |Wheel| image:: https://img.shields.io/pypi/wheel/apache-ariatosca.svg
-        .. |Contributors| image:: https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg
-        .. |Open Pull Requests| image:: https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg
-           :target: https://github.com/apache/incubator-ariatosca/pulls
-        .. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg
-           :target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed
-        
-Platform: any
-Classifier: Development Status :: 4 - Beta
-Classifier: Environment :: Console
-Classifier: Environment :: Web Environment
-Classifier: Intended Audience :: Developers
-Classifier: Intended Audience :: System Administrators
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: System :: Networking
-Classifier: Topic :: System :: Systems Administration
diff --git a/apache-ariatosca-0.1.1/README.rst b/apache-ariatosca-0.1.1/README.rst
deleted file mode 100644
index b9a8213..0000000
--- a/apache-ariatosca-0.1.1/README.rst
+++ /dev/null
@@ -1,164 +0,0 @@
-ARIA
-====
-
-|Build Status| |Appveyor Build Status| |License| |PyPI release| |Python Versions| |Wheel|
-|Contributors| |Open Pull Requests| |Closed Pull Requests|
-
-
-What is ARIA?
--------------
-
-`ARIA <http://ariatosca.incubator.apache.org/>`__ is a an open-source,
-`TOSCA <https://www.oasis-open.org/committees/tosca/>`__-based, lightweight library and CLI for
-orchestration and for consumption by projects building TOSCA-based solutions for resources and
-services orchestration.
-
-ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its
-solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple
-virtual infrastructure managers.
-
-With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your
-applications, from template to deployment.
-
-ARIA is an incubation project under the `Apache Software Foundation <https://www.apache.org/>`__.
-
-
-Installation
-------------
-
-ARIA is `available on PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__.
-
-ARIA requires Python 2.6/2.7. Python 3 is currently not supported.
-
-To install ARIA directly from PyPI (using a ``wheel``), use::
-
-    pip install --upgrade pip setuptools
-    pip install apache-ariatosca
-
-To install ARIA from source, download the source tarball from
-`PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__, extract and ``cd`` into the extract dir, and run::
-
-    pip install --upgrade pip setuptools
-    pip install .
-
-| The source package comes along with relevant examples, documentation, ``requirements.txt`` (for installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
-|
-|
-| ARIA has additional optional dependencies. These are required for running operations over SSH.
-| Below are instructions on how to install these dependencies, including required system dependencies per OS.
-|
-| Note: These dependencies may have varying licenses which may not be compatible with Apache license 2.0.
-|
-
-**Ubuntu/Debian** (tested on Ubuntu14.04, Ubuntu16.04)::
-
-    apt-get install -y python-dev gcc libffi-dev libssl-dev
-    pip install aria[ssh]
-
-**Centos** (tested on Centos6.6, Centos7)::
-
-    yum install -y python-devel gcc libffi-devel openssl-devel
-    pip install aria[ssh]
-
-**Archlinux**::
-
-    pacman -Syu --noconfirm python2 gcc libffi openssl
-    pip2 install aria[ssh]
-
-**Windows** (tested on Win10)::
-
-    # no additional system requirements are needed
-    pip install aria[ssh]
-
-**MacOS**::
-
-    # TODO
-
-
-
-To install ``pip``, either use your distro's package management system, or run::
-
-    wget http://bootstrap.pypa.io/get-pip.py
-    python get-pip.py
-
-
-
-Getting Started
----------------
-
-This section will describe how to run a simple "Hello World" example.
-
-First, provide ARIA with the ARIA "hello world" service-template and name it (e.g.
-``my-service-template``)::
-
-    aria service-templates store examples/hello-world/helloworld.yaml my-service-template
-
-Now create a service based on this service-template and name it (e.g. ``my-service``)::
-
-    aria services create my-service -t my-service-template
-
-Finally, start an ``install`` workflow execution on ``my-service`` like so::
-
-    aria executions start install -s my-service
-
-You should now have a simple web-server running on your local machine. You can try visiting
-``http://localhost:9090`` to view your deployed application.
-
-To uninstall and clean your environment, follow these steps::
-
-    aria executions start uninstall -s my-service
-    aria services delete my-service
-    aria service-templates delete my-service-template
-
-
-Contribution
-------------
-
-You are welcome and encouraged to participate and contribute to the ARIA project.
-
-Please see our guide to
-`Contributing to ARIA <https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__.
-
-Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__
-section).
-
-
-Resources
----------
-
--  `ARIA homepage <http://ariatosca.incubator.apache.org/>`__
--  `ARIA wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__
--  `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__
-
--  Dev mailing list: dev@ariatosca.incubator.apache.org
--  User mailing list: user@ariatosca.incubator.apache.org
-
-Subscribe by sending a mail to ``<group>-subscribe@ariatosca.incubator.apache.org`` (e.g.
-``dev-subscribe@ariatosca.incubator.apache.org``). See information on how to subscribe to mailing
-lists `here <https://www.apache.org/foundation/mailinglists.html>`__.
-
-For past correspondence, see the
-`dev mailing list archive <https://lists.apache.org/list.html?dev@ariatosca.apache.org>`__.
-
-
-License
--------
-
-ARIA is licensed under the
-`Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__.
-
-.. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg
-   :target: https://travis-ci.org/apache/incubator-ariatosca
-.. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg
-   :target: https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history
-.. |License| image:: https://img.shields.io/github/license/apache/incubator-ariatosca.svg
-   :target: http://www.apache.org/licenses/LICENSE-2.0
-.. |PyPI release| image:: https://img.shields.io/pypi/v/apache-ariatosca.svg
-   :target: https://pypi.python.org/pypi/apache-ariatosca
-.. |Python Versions| image:: https://img.shields.io/pypi/pyversions/apache-ariatosca.svg
-.. |Wheel| image:: https://img.shields.io/pypi/wheel/apache-ariatosca.svg
-.. |Contributors| image:: https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg
-.. |Open Pull Requests| image:: https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg
-   :target: https://github.com/apache/incubator-ariatosca/pulls
-.. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg
-   :target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed
diff --git a/apache-ariatosca-0.1.1/VERSION b/apache-ariatosca-0.1.1/VERSION
deleted file mode 100644
index 6da28dd..0000000
--- a/apache-ariatosca-0.1.1/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-0.1.1
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/PKG-INFO b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/PKG-INFO
deleted file mode 100644
index 46a0050..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/PKG-INFO
+++ /dev/null
@@ -1,189 +0,0 @@
-Metadata-Version: 1.1
-Name: apache-ariatosca
-Version: 0.1.1
-Summary: ARIA
-Home-page: http://ariatosca.incubator.apache.org/
-Author: ARIA
-Author-email: dev@ariatosca.incubator.apache.org
-License: Apache License 2.0
-Download-URL: https://dist.apache.org/repos/dist/release/incubator/ariatosca/0.1.1-incubating
-Description: ARIA
-        ====
-        
-        |Build Status| |Appveyor Build Status| |License| |PyPI release| |Python Versions| |Wheel|
-        |Contributors| |Open Pull Requests| |Closed Pull Requests|
-        
-        
-        What is ARIA?
-        -------------
-        
-        `ARIA <http://ariatosca.incubator.apache.org/>`__ is a an open-source,
-        `TOSCA <https://www.oasis-open.org/committees/tosca/>`__-based, lightweight library and CLI for
-        orchestration and for consumption by projects building TOSCA-based solutions for resources and
-        services orchestration.
-        
-        ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its
-        solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple
-        virtual infrastructure managers.
-        
-        With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your
-        applications, from template to deployment.
-        
-        ARIA is an incubation project under the `Apache Software Foundation <https://www.apache.org/>`__.
-        
-        
-        Installation
-        ------------
-        
-        ARIA is `available on PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__.
-        
-        ARIA requires Python 2.6/2.7. Python 3 is currently not supported.
-        
-        To install ARIA directly from PyPI (using a ``wheel``), use::
-        
-            pip install --upgrade pip setuptools
-            pip install apache-ariatosca
-        
-        To install ARIA from source, download the source tarball from
-        `PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__, extract and ``cd`` into the extract dir, and run::
-        
-            pip install --upgrade pip setuptools
-            pip install .
-        
-        | The source package comes along with relevant examples, documentation, ``requirements.txt`` (for installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
-        |
-        |
-        | ARIA has additional optional dependencies. These are required for running operations over SSH.
-        | Below are instructions on how to install these dependencies, including required system dependencies per OS.
-        |
-        | Note: These dependencies may have varying licenses which may not be compatible with Apache license 2.0.
-        |
-        
-        **Ubuntu/Debian** (tested on Ubuntu14.04, Ubuntu16.04)::
-        
-            apt-get install -y python-dev gcc libffi-dev libssl-dev
-            pip install aria[ssh]
-        
-        **Centos** (tested on Centos6.6, Centos7)::
-        
-            yum install -y python-devel gcc libffi-devel openssl-devel
-            pip install aria[ssh]
-        
-        **Archlinux**::
-        
-            pacman -Syu --noconfirm python2 gcc libffi openssl
-            pip2 install aria[ssh]
-        
-        **Windows** (tested on Win10)::
-        
-            # no additional system requirements are needed
-            pip install aria[ssh]
-        
-        **MacOS**::
-        
-            # TODO
-        
-        
-        
-        To install ``pip``, either use your distro's package management system, or run::
-        
-            wget http://bootstrap.pypa.io/get-pip.py
-            python get-pip.py
-        
-        
-        
-        Getting Started
-        ---------------
-        
-        This section will describe how to run a simple "Hello World" example.
-        
-        First, provide ARIA with the ARIA "hello world" service-template and name it (e.g.
-        ``my-service-template``)::
-        
-            aria service-templates store examples/hello-world/helloworld.yaml my-service-template
-        
-        Now create a service based on this service-template and name it (e.g. ``my-service``)::
-        
-            aria services create my-service -t my-service-template
-        
-        Finally, start an ``install`` workflow execution on ``my-service`` like so::
-        
-            aria executions start install -s my-service
-        
-        You should now have a simple web-server running on your local machine. You can try visiting
-        ``http://localhost:9090`` to view your deployed application.
-        
-        To uninstall and clean your environment, follow these steps::
-        
-            aria executions start uninstall -s my-service
-            aria services delete my-service
-            aria service-templates delete my-service-template
-        
-        
-        Contribution
-        ------------
-        
-        You are welcome and encouraged to participate and contribute to the ARIA project.
-        
-        Please see our guide to
-        `Contributing to ARIA <https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__.
-        
-        Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__
-        section).
-        
-        
-        Resources
-        ---------
-        
-        -  `ARIA homepage <http://ariatosca.incubator.apache.org/>`__
-        -  `ARIA wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__
-        -  `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__
-        
-        -  Dev mailing list: dev@ariatosca.incubator.apache.org
-        -  User mailing list: user@ariatosca.incubator.apache.org
-        
-        Subscribe by sending a mail to ``<group>-subscribe@ariatosca.incubator.apache.org`` (e.g.
-        ``dev-subscribe@ariatosca.incubator.apache.org``). See information on how to subscribe to mailing
-        lists `here <https://www.apache.org/foundation/mailinglists.html>`__.
-        
-        For past correspondence, see the
-        `dev mailing list archive <https://lists.apache.org/list.html?dev@ariatosca.apache.org>`__.
-        
-        
-        License
-        -------
-        
-        ARIA is licensed under the
-        `Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__.
-        
-        .. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg
-           :target: https://travis-ci.org/apache/incubator-ariatosca
-        .. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg
-           :target: https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history
-        .. |License| image:: https://img.shields.io/github/license/apache/incubator-ariatosca.svg
-           :target: http://www.apache.org/licenses/LICENSE-2.0
-        .. |PyPI release| image:: https://img.shields.io/pypi/v/apache-ariatosca.svg
-           :target: https://pypi.python.org/pypi/apache-ariatosca
-        .. |Python Versions| image:: https://img.shields.io/pypi/pyversions/apache-ariatosca.svg
-        .. |Wheel| image:: https://img.shields.io/pypi/wheel/apache-ariatosca.svg
-        .. |Contributors| image:: https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg
-        .. |Open Pull Requests| image:: https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg
-           :target: https://github.com/apache/incubator-ariatosca/pulls
-        .. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg
-           :target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed
-        
-Platform: any
-Classifier: Development Status :: 4 - Beta
-Classifier: Environment :: Console
-Classifier: Environment :: Web Environment
-Classifier: Intended Audience :: Developers
-Classifier: Intended Audience :: System Administrators
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: System :: Networking
-Classifier: Topic :: System :: Systems Administration
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/SOURCES.txt b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/SOURCES.txt
deleted file mode 100644
index 30bcd9e..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,378 +0,0 @@
-CHANGELOG.rst
-CONTRIBUTING
-DISCLAIMER
-LICENSE
-MANIFEST.in
-NOTICE
-README.rst
-VERSION
-requirements.in
-requirements.txt
-setup.py
-apache_ariatosca.egg-info/PKG-INFO
-apache_ariatosca.egg-info/SOURCES.txt
-apache_ariatosca.egg-info/dependency_links.txt
-apache_ariatosca.egg-info/entry_points.txt
-apache_ariatosca.egg-info/not-zip-safe
-apache_ariatosca.egg-info/requires.txt
-apache_ariatosca.egg-info/top_level.txt
-aria/__init__.py
-aria/core.py
-aria/exceptions.py
-aria/extension.py
-aria/logger.py
-aria/cli/__init__.py
-aria/cli/ascii_art.py
-aria/cli/color.py
-aria/cli/csar.py
-aria/cli/defaults.py
-aria/cli/env.py
-aria/cli/exceptions.py
-aria/cli/execution_logging.py
-aria/cli/helptexts.py
-aria/cli/inputs.py
-aria/cli/logger.py
-aria/cli/main.py
-aria/cli/service_template_utils.py
-aria/cli/table.py
-aria/cli/utils.py
-aria/cli/commands/__init__.py
-aria/cli/commands/executions.py
-aria/cli/commands/logs.py
-aria/cli/commands/node_templates.py
-aria/cli/commands/nodes.py
-aria/cli/commands/plugins.py
-aria/cli/commands/reset.py
-aria/cli/commands/service_templates.py
-aria/cli/commands/services.py
-aria/cli/commands/workflows.py
-aria/cli/config/__init__.py
-aria/cli/config/config.py
-aria/cli/config/config_template.yaml
-aria/cli/core/__init__.py
-aria/cli/core/aria.py
-aria/modeling/__init__.py
-aria/modeling/constraints.py
-aria/modeling/exceptions.py
-aria/modeling/functions.py
-aria/modeling/mixins.py
-aria/modeling/models.py
-aria/modeling/orchestration.py
-aria/modeling/relationship.py
-aria/modeling/service_changes.py
-aria/modeling/service_common.py
-aria/modeling/service_instance.py
-aria/modeling/service_template.py
-aria/modeling/types.py
-aria/modeling/utils.py
-aria/orchestrator/__init__.py
-aria/orchestrator/decorators.py
-aria/orchestrator/events.py
-aria/orchestrator/exceptions.py
-aria/orchestrator/plugin.py
-aria/orchestrator/workflow_runner.py
-aria/orchestrator/context/__init__.py
-aria/orchestrator/context/common.py
-aria/orchestrator/context/exceptions.py
-aria/orchestrator/context/operation.py
-aria/orchestrator/context/toolbelt.py
-aria/orchestrator/context/workflow.py
-aria/orchestrator/execution_plugin/__init__.py
-aria/orchestrator/execution_plugin/common.py
-aria/orchestrator/execution_plugin/constants.py
-aria/orchestrator/execution_plugin/environment_globals.py
-aria/orchestrator/execution_plugin/exceptions.py
-aria/orchestrator/execution_plugin/instantiation.py
-aria/orchestrator/execution_plugin/local.py
-aria/orchestrator/execution_plugin/operations.py
-aria/orchestrator/execution_plugin/ctx_proxy/__init__.py
-aria/orchestrator/execution_plugin/ctx_proxy/client.py
-aria/orchestrator/execution_plugin/ctx_proxy/server.py
-aria/orchestrator/execution_plugin/ssh/__init__.py
-aria/orchestrator/execution_plugin/ssh/operations.py
-aria/orchestrator/execution_plugin/ssh/tunnel.py
-aria/orchestrator/workflows/__init__.py
-aria/orchestrator/workflows/events_logging.py
-aria/orchestrator/workflows/exceptions.py
-aria/orchestrator/workflows/api/__init__.py
-aria/orchestrator/workflows/api/task.py
-aria/orchestrator/workflows/api/task_graph.py
-aria/orchestrator/workflows/builtin/__init__.py
-aria/orchestrator/workflows/builtin/execute_operation.py
-aria/orchestrator/workflows/builtin/heal.py
-aria/orchestrator/workflows/builtin/install.py
-aria/orchestrator/workflows/builtin/start.py
-aria/orchestrator/workflows/builtin/stop.py
-aria/orchestrator/workflows/builtin/uninstall.py
-aria/orchestrator/workflows/builtin/workflows.py
-aria/orchestrator/workflows/core/__init__.py
-aria/orchestrator/workflows/core/engine.py
-aria/orchestrator/workflows/core/events_handler.py
-aria/orchestrator/workflows/core/graph_compiler.py
-aria/orchestrator/workflows/executor/__init__.py
-aria/orchestrator/workflows/executor/base.py
-aria/orchestrator/workflows/executor/celery.py
-aria/orchestrator/workflows/executor/dry.py
-aria/orchestrator/workflows/executor/process.py
-aria/orchestrator/workflows/executor/thread.py
-aria/parser/__init__.py
-aria/parser/exceptions.py
-aria/parser/specification.py
-aria/parser/consumption/__init__.py
-aria/parser/consumption/consumer.py
-aria/parser/consumption/context.py
-aria/parser/consumption/exceptions.py
-aria/parser/consumption/inputs.py
-aria/parser/consumption/modeling.py
-aria/parser/consumption/presentation.py
-aria/parser/consumption/style.py
-aria/parser/consumption/validation.py
-aria/parser/loading/__init__.py
-aria/parser/loading/context.py
-aria/parser/loading/exceptions.py
-aria/parser/loading/file.py
-aria/parser/loading/literal.py
-aria/parser/loading/loader.py
-aria/parser/loading/location.py
-aria/parser/loading/request.py
-aria/parser/loading/source.py
-aria/parser/loading/uri.py
-aria/parser/modeling/__init__.py
-aria/parser/modeling/context.py
-aria/parser/presentation/__init__.py
-aria/parser/presentation/context.py
-aria/parser/presentation/exceptions.py
-aria/parser/presentation/field_validators.py
-aria/parser/presentation/fields.py
-aria/parser/presentation/null.py
-aria/parser/presentation/presentation.py
-aria/parser/presentation/presenter.py
-aria/parser/presentation/source.py
-aria/parser/presentation/utils.py
-aria/parser/reading/__init__.py
-aria/parser/reading/context.py
-aria/parser/reading/exceptions.py
-aria/parser/reading/jinja.py
-aria/parser/reading/json.py
-aria/parser/reading/locator.py
-aria/parser/reading/raw.py
-aria/parser/reading/reader.py
-aria/parser/reading/source.py
-aria/parser/reading/yaml.py
-aria/parser/validation/__init__.py
-aria/parser/validation/context.py
-aria/parser/validation/issue.py
-aria/storage/__init__.py
-aria/storage/api.py
-aria/storage/collection_instrumentation.py
-aria/storage/core.py
-aria/storage/exceptions.py
-aria/storage/filesystem_rapi.py
-aria/storage/sql_mapi.py
-aria/utils/__init__.py
-aria/utils/archive.py
-aria/utils/argparse.py
-aria/utils/caching.py
-aria/utils/collections.py
-aria/utils/console.py
-aria/utils/exceptions.py
-aria/utils/file.py
-aria/utils/formatting.py
-aria/utils/http.py
-aria/utils/imports.py
-aria/utils/openclose.py
-aria/utils/plugin.py
-aria/utils/process.py
-aria/utils/specification.py
-aria/utils/threading.py
-aria/utils/type.py
-aria/utils/uris.py
-aria/utils/uuid.py
-aria/utils/validation.py
-aria/utils/versions.py
-docs/html/.buildinfo
-docs/html/aria.cli.html
-docs/html/aria.html
-docs/html/aria.modeling.html
-docs/html/aria.modeling.models.html
-docs/html/aria.orchestrator.context.html
-docs/html/aria.orchestrator.execution_plugin.ctx_proxy.html
-docs/html/aria.orchestrator.execution_plugin.html
-docs/html/aria.orchestrator.execution_plugin.ssh.html
-docs/html/aria.orchestrator.html
-docs/html/aria.orchestrator.workflows.api.html
-docs/html/aria.orchestrator.workflows.builtin.html
-docs/html/aria.orchestrator.workflows.executor.html
-docs/html/aria.orchestrator.workflows.html
-docs/html/aria.parser.consumption.html
-docs/html/aria.parser.html
-docs/html/aria.parser.loading.html
-docs/html/aria.parser.modeling.html
-docs/html/aria.parser.presentation.html
-docs/html/aria.parser.reading.html
-docs/html/aria.parser.validation.html
-docs/html/aria.storage.html
-docs/html/aria.utils.html
-docs/html/aria_extension_tosca.simple_nfv_v1_0.html
-docs/html/aria_extension_tosca.simple_v1_0.html
-docs/html/aria_extension_tosca.simple_v1_0.modeling.html
-docs/html/aria_extension_tosca.simple_v1_0.presentation.html
-docs/html/cli.html
-docs/html/genindex.html
-docs/html/index.html
-docs/html/objects.inv
-docs/html/py-modindex.html
-docs/html/rest.html
-docs/html/search.html
-docs/html/searchindex.js
-docs/html/_sources/aria.cli.rst.txt
-docs/html/_sources/aria.modeling.models.rst.txt
-docs/html/_sources/aria.modeling.rst.txt
-docs/html/_sources/aria.orchestrator.context.rst.txt
-docs/html/_sources/aria.orchestrator.execution_plugin.ctx_proxy.rst.txt
-docs/html/_sources/aria.orchestrator.execution_plugin.rst.txt
-docs/html/_sources/aria.orchestrator.execution_plugin.ssh.rst.txt
-docs/html/_sources/aria.orchestrator.rst.txt
-docs/html/_sources/aria.orchestrator.workflows.api.rst.txt
-docs/html/_sources/aria.orchestrator.workflows.builtin.rst.txt
-docs/html/_sources/aria.orchestrator.workflows.executor.rst.txt
-docs/html/_sources/aria.orchestrator.workflows.rst.txt
-docs/html/_sources/aria.parser.consumption.rst.txt
-docs/html/_sources/aria.parser.loading.rst.txt
-docs/html/_sources/aria.parser.modeling.rst.txt
-docs/html/_sources/aria.parser.presentation.rst.txt
-docs/html/_sources/aria.parser.reading.rst.txt
-docs/html/_sources/aria.parser.rst.txt
-docs/html/_sources/aria.parser.validation.rst.txt
-docs/html/_sources/aria.rst.txt
-docs/html/_sources/aria.storage.rst.txt
-docs/html/_sources/aria.utils.rst.txt
-docs/html/_sources/aria_extension_tosca.simple_nfv_v1_0.rst.txt
-docs/html/_sources/aria_extension_tosca.simple_v1_0.modeling.rst.txt
-docs/html/_sources/aria_extension_tosca.simple_v1_0.presentation.rst.txt
-docs/html/_sources/aria_extension_tosca.simple_v1_0.rst.txt
-docs/html/_sources/cli.rst.txt
-docs/html/_sources/index.rst.txt
-docs/html/_sources/rest.rst.txt
-docs/html/_static/ajax-loader.gif
-docs/html/_static/basic.css
-docs/html/_static/comment-bright.png
-docs/html/_static/comment-close.png
-docs/html/_static/comment.png
-docs/html/_static/doctools.js
-docs/html/_static/down-pressed.png
-docs/html/_static/down.png
-docs/html/_static/file.png
-docs/html/_static/jquery-3.1.0.js
-docs/html/_static/jquery.js
-docs/html/_static/minus.png
-docs/html/_static/plus.png
-docs/html/_static/pygments.css
-docs/html/_static/searchtools.js
-docs/html/_static/underscore-1.3.1.js
-docs/html/_static/underscore.js
-docs/html/_static/up-pressed.png
-docs/html/_static/up.png
-docs/html/_static/websupport.js
-docs/html/_static/css/badge_only.css
-docs/html/_static/css/theme.css
-docs/html/_static/fonts/Inconsolata-Bold.ttf
-docs/html/_static/fonts/Inconsolata-Regular.ttf
-docs/html/_static/fonts/Lato-Bold.ttf
-docs/html/_static/fonts/Lato-Regular.ttf
-docs/html/_static/fonts/RobotoSlab-Bold.ttf
-docs/html/_static/fonts/RobotoSlab-Regular.ttf
-docs/html/_static/fonts/fontawesome-webfont.eot
-docs/html/_static/fonts/fontawesome-webfont.svg
-docs/html/_static/fonts/fontawesome-webfont.ttf
-docs/html/_static/fonts/fontawesome-webfont.woff
-docs/html/_static/js/modernizr.min.js
-docs/html/_static/js/theme.js
-examples/hello-world/helloworld.yaml
-examples/hello-world/index.html
-examples/hello-world/images/aria-logo.png
-examples/hello-world/scripts/configure.sh
-examples/hello-world/scripts/start.sh
-examples/hello-world/scripts/stop.sh
-examples/tosca-simple-1.0/use-cases/non-normative-types.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-1/block-storage-1.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-2/block-storage-2.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-3/block-storage-3.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-4/block-storage-4.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-5/block-storage-5.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-6/block-storage-6.yaml
-examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
-examples/tosca-simple-1.0/use-cases/compute-1/compute-1.yaml
-examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/container-1/container-1.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/multi-tier-1.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/collectd.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
-examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/rsyslog.yaml
-examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/network-1/network-1.yaml
-examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
-examples/tosca-simple-1.0/use-cases/network-2/network-2.yaml
-examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
-examples/tosca-simple-1.0/use-cases/network-3/network-3.yaml
-examples/tosca-simple-1.0/use-cases/network-4/network-4.yaml
-examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/object-storage-1/object-storage-1.yaml
-examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
-examples/tosca-simple-1.0/use-cases/software-component-1/software-component-1.yaml
-examples/tosca-simple-1.0/use-cases/webserver-dbms-1/webserver-dbms-1.yaml
-examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
-examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
-extensions/aria_extension_tosca/__init__.py
-extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/artifacts.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/data.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/policies.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/relationships.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-1.0/tosca-simple-1.0.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/artifacts.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/capabilities.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/data.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/nodes.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/relationships.yaml
-extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/tosca-simple-nfv-1.0.yaml
-extensions/aria_extension_tosca/simple_nfv_v1_0/__init__.py
-extensions/aria_extension_tosca/simple_nfv_v1_0/presenter.py
-extensions/aria_extension_tosca/simple_v1_0/__init__.py
-extensions/aria_extension_tosca/simple_v1_0/assignments.py
-extensions/aria_extension_tosca/simple_v1_0/data_types.py
-extensions/aria_extension_tosca/simple_v1_0/definitions.py
-extensions/aria_extension_tosca/simple_v1_0/filters.py
-extensions/aria_extension_tosca/simple_v1_0/misc.py
-extensions/aria_extension_tosca/simple_v1_0/presenter.py
-extensions/aria_extension_tosca/simple_v1_0/templates.py
-extensions/aria_extension_tosca/simple_v1_0/types.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/copy.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
-extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
-extensions/aria_extension_tosca/simple_v1_0/presentation/__init__.py
-extensions/aria_extension_tosca/simple_v1_0/presentation/extensible.py
-extensions/aria_extension_tosca/simple_v1_0/presentation/field_getters.py
-extensions/aria_extension_tosca/simple_v1_0/presentation/field_validators.py
-extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/dependency_links.txt b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/entry_points.txt b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/entry_points.txt
deleted file mode 100644
index 392448e..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/entry_points.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-[console_scripts]
-aria = aria.cli.main:main
-
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/not-zip-safe b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/requires.txt b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/requires.txt
deleted file mode 100644
index 8686349..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/requires.txt
+++ /dev/null
@@ -1,32 +0,0 @@
-requests>=2.3.0, <2.14.0
-networkx>=1.9, <1.10
-retrying>=1.3.0, <1.4.0
-blinker>1.3, <1.5
-jsonpickle>0.9.0, <=0.9.4
-ruamel.yaml>=0.11.12, <0.12.0
-Jinja2>=2.8, <2.9
-shortuuid>=0.5, <0.6
-CacheControl[filecache]>=0.11.0, <0.13
-clint>=0.5.0, <0.6
-SQLAlchemy>=1.1.0, <1.2
-wagon==0.6.0
-bottle>=0.12.0, <0.13
-setuptools>=35.0.0, <36.0.0
-click>=6.0, < 7.0
-colorama>=0.3.7, <=0.3.9
-PrettyTable>=0.7,<0.8
-click_didyoumean==0.0.3
-backports.shutil_get_terminal_size==1.0.0
-logutils==0.3.4.1
-psutil>=5.2.2, < 6.0.0
-
-[:python_version < '2.7']
-importlib
-ordereddict
-total-ordering
-
-[ssh]
-Fabric>=1.13.0, <1.14
-
-[ssh:sys_platform=="win32"]
-pypiwin32==219
diff --git a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/top_level.txt b/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/top_level.txt
deleted file mode 100644
index 58fb564..0000000
--- a/apache-ariatosca-0.1.1/apache_ariatosca.egg-info/top_level.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-aria
-aria_extension_tosca
diff --git a/apache-ariatosca-0.1.1/aria/cli/ascii_art.py b/apache-ariatosca-0.1.1/aria/cli/ascii_art.py
deleted file mode 100644
index 8a8b79f..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/ascii_art.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf8 -*-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-ARIA_ASCII_ART = r"""
-     █████╗ ██████╗ ██╗ █████╗ 
-    ██╔══██╗██╔══██╗██║██╔══██╗
-    ███████║██████╔╝██║███████║
-    ██╔══██║██╔══██╗██║██╔══██║
-    ██║  ██║██║  ██║██║██║  ██║
-    ╚═╝  ╚═╝╚═╝  ╚═╝╚═╝╚═╝  ╚═╝"""
diff --git a/apache-ariatosca-0.1.1/aria/cli/color.py b/apache-ariatosca-0.1.1/aria/cli/color.py
deleted file mode 100644
index 03381ba..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/color.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Terminal colorization utilities.
-"""
-
-from StringIO import StringIO
-import re
-
-import colorama
-
-colorama.init()
-
-
-class StringStylizer(object):
-    def __init__(self, str_, color_spec=None):
-        self._str = str_
-        self._color_spec = color_spec
-
-    def __repr__(self):
-        if self._color_spec:
-            return '{schema}{str}{reset}'.format(
-                schema=self._color_spec, str=str(self._str), reset=Colors.Style.RESET_ALL)
-        return self._str
-
-    def __add__(self, other):
-        return str(self) + other
-
-    def __radd__(self, other):
-        return other + str(self)
-
-    def color(self, color_spec):
-        self._color_spec = color_spec
-
-    def replace(self, old, new, **kwargs):
-        self._str = self._str.replace(str(old), str(new), **kwargs)
-
-    def format(self, *args, **kwargs):
-        self._str = self._str.format(*args, **kwargs)
-
-    def highlight(self, pattern, schema):
-        if pattern is None:
-            return
-        for match in set(re.findall(re.compile(pattern), self._str)):
-            self.replace(match, schema + match + Colors.Style.RESET_ALL + self._color_spec)
-
-
-def _get_colors(color_type):
-    for name in dir(color_type):
-        if not name.startswith('_'):
-            yield (name.lower(), getattr(color_type, name))
-
-
-class Colors(object):
-    Fore = colorama.Fore
-    Back = colorama.Back
-    Style = colorama.Style
-
-    _colors = {
-        'fore': dict(_get_colors(Fore)),
-        'back': dict(_get_colors(Back)),
-        'style': dict(_get_colors(Style))
-    }
-
-
-class ColorSpec(object):
-    def __init__(self, fore=None, back=None, style=None):
-        """
-        It is possible to provide fore, back and style arguments. each could be either
-        the color is lower case letter, or the actual color from Colorama.
-        """
-        self._kwargs = dict(fore=fore, back=back, style=style)
-        self._str = StringIO()
-        for type_, colors in Colors._colors.items():
-            value = self._kwargs.get(type_, None)
-            # the former case is if the value is a string, the latter is in case of an object.
-            self._str.write(colors.get(value) or value)
-
-    def __str__(self):
-        return self._str.getvalue()
-
-    def __add__(self, other):
-        return str(self) + str(other)
-
-    def __radd__(self, other):
-        return str(other) + str(self)
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/executions.py b/apache-ariatosca-0.1.1/aria/cli/commands/executions.py
deleted file mode 100644
index ea70af5..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/executions.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``executions`` sub-commands.
-"""
-
-import os
-
-from .. import helptexts
-from .. import table
-from .. import utils
-from .. import logger as cli_logger
-from .. import execution_logging
-from ..core import aria
-from ...modeling.models import Execution
-from ...orchestrator.workflow_runner import WorkflowRunner
-from ...orchestrator.workflows.executor.dry import DryExecutor
-from ...utils import formatting
-from ...utils import threading
-
-EXECUTION_COLUMNS = ('id', 'workflow_name', 'status', 'service_name',
-                     'created_at', 'error')
-
-
-@aria.group(name='executions')
-@aria.options.verbose()
-def executions():
-    """
-    Manage executions
-    """
-    pass
-
-
-@executions.command(name='show',
-                    short_help='Show information for an execution')
-@aria.argument('execution-id')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def show(execution_id, model_storage, logger):
-    """
-    Show information for an execution
-
-    EXECUTION_ID is the unique ID of the execution.
-    """
-    logger.info('Showing execution {0}'.format(execution_id))
-    execution = model_storage.execution.get(execution_id)
-
-    table.print_data(EXECUTION_COLUMNS, execution, 'Execution:', col_max_width=50)
-
-    # print execution parameters
-    logger.info('Execution Inputs:')
-    if execution.inputs:
-        #TODO check this section, havent tested it
-        execution_inputs = [ei.to_dict() for ei in execution.inputs]
-        for input_name, input_value in formatting.decode_dict(
-                execution_inputs).iteritems():
-            logger.info('\t{0}: \t{1}'.format(input_name, input_value))
-    else:
-        logger.info('\tNo inputs')
-
-
-@executions.command(name='list',
-                    short_help='List executions')
-@aria.options.service_name(required=False)
-@aria.options.sort_by()
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(service_name,
-         sort_by,
-         descending,
-         model_storage,
-         logger):
-    """
-    List executions
-
-    If SERVICE_NAME is provided, list executions on that service. Otherwise, list executions on all
-    services.
-    """
-    if service_name:
-        logger.info('Listing executions for service {0}...'.format(
-            service_name))
-        service = model_storage.service.get_by_name(service_name)
-        filters = dict(service=service)
-    else:
-        logger.info('Listing all executions...')
-        filters = {}
-
-    executions_list = model_storage.execution.list(
-        filters=filters,
-        sort=utils.storage_sort_param(sort_by, descending)).items
-
-    table.print_data(EXECUTION_COLUMNS, executions_list, 'Executions:')
-
-
-@executions.command(name='start',
-                    short_help='Start a workflow on a service')
-@aria.argument('workflow-name')
-@aria.options.service_name(required=True)
-@aria.options.inputs(help=helptexts.EXECUTION_INPUTS)
-@aria.options.dry_execution
-@aria.options.task_max_attempts()
-@aria.options.task_retry_interval()
-@aria.options.mark_pattern()
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def start(workflow_name,
-          service_name,
-          inputs,
-          dry,
-          task_max_attempts,
-          task_retry_interval,
-          mark_pattern,
-          model_storage,
-          resource_storage,
-          plugin_manager,
-          logger):
-    """
-    Start a workflow on a service
-
-    SERVICE_NAME is the unique name of the service.
-
-    WORKFLOW_NAME is the unique name of the workflow within the service (e.g. "uninstall").
-    """
-    service = model_storage.service.get_by_name(service_name)
-    executor = DryExecutor() if dry else None  # use WorkflowRunner's default executor
-
-    workflow_runner = \
-        WorkflowRunner(
-            model_storage, resource_storage, plugin_manager,
-            service_id=service.id, workflow_name=workflow_name, inputs=inputs, executor=executor,
-            task_max_attempts=task_max_attempts, task_retry_interval=task_retry_interval
-        )
-    logger.info('Starting {0}execution. Press Ctrl+C cancel'.format('dry ' if dry else ''))
-
-    _run_execution(workflow_runner, logger, model_storage, dry, mark_pattern)
-
-
-@executions.command(name='resume',
-                    short_help='Resume a stopped execution')
-@aria.argument('execution-id')
-@aria.options.inputs(help=helptexts.EXECUTION_INPUTS)
-@aria.options.dry_execution
-@aria.options.task_max_attempts()
-@aria.options.task_retry_interval()
-@aria.options.mark_pattern()
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def resume(execution_id,
-           dry,
-           task_max_attempts,
-           task_retry_interval,
-           mark_pattern,
-           model_storage,
-           resource_storage,
-           plugin_manager,
-           logger):
-    """
-    Resume a stopped execution
-
-    EXECUTION_ID is the unique ID of the execution.
-    """
-    executor = DryExecutor() if dry else None  # use WorkflowRunner's default executor
-
-    execution = model_storage.execution.get(execution_id)
-    if execution.status != execution.status.CANCELLED:
-        logger.info("Can't resume execution {execution.id} - "
-                    "execution is in status {execution.status}. "
-                    "Can only resume executions in status {valid_status}"
-                    .format(execution=execution, valid_status=execution.status.CANCELLED))
-        return
-
-    workflow_runner = \
-        WorkflowRunner(
-            model_storage, resource_storage, plugin_manager,
-            execution_id=execution_id, executor=executor,
-            task_max_attempts=task_max_attempts, task_retry_interval=task_retry_interval
-        )
-
-    logger.info('Resuming {0}execution. Press Ctrl+C cancel'.format('dry ' if dry else ''))
-    _run_execution(workflow_runner, logger, model_storage, dry, mark_pattern)
-
-
-def _run_execution(workflow_runner, logger, model_storage, dry, mark_pattern):
-    execution_thread_name = '{0}_{1}'.format(workflow_runner.service.name,
-                                             workflow_runner.execution.workflow_name)
-    execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
-                                                 name=execution_thread_name)
-
-    execution_thread.start()
-
-    last_task_id = workflow_runner.execution.logs[-1].id if workflow_runner.execution.logs else 0
-    log_iterator = cli_logger.ModelLogIterator(model_storage,
-                                               workflow_runner.execution_id,
-                                               offset=last_task_id)
-    try:
-        while execution_thread.is_alive():
-            execution_logging.log_list(log_iterator, mark_pattern=mark_pattern)
-            execution_thread.join(1)
-
-    except KeyboardInterrupt:
-        _cancel_execution(workflow_runner, execution_thread, logger, log_iterator)
-
-    # It might be the case where some logs were written and the execution was terminated, thus we
-    # need to drain the remaining logs.
-    execution_logging.log_list(log_iterator, mark_pattern=mark_pattern)
-
-    # raise any errors from the execution thread (note these are not workflow execution errors)
-    execution_thread.raise_error_if_exists()
-
-    execution = workflow_runner.execution
-    logger.info('Execution has ended with "{0}" status'.format(execution.status))
-    if execution.status == Execution.FAILED and execution.error:
-        logger.info('Execution error:{0}{1}'.format(os.linesep, execution.error))
-
-    if dry:
-        # remove traces of the dry execution (including tasks, logs, inputs..)
-        model_storage.execution.delete(execution)
-
-
-def _cancel_execution(workflow_runner, execution_thread, logger, log_iterator):
-    logger.info('Cancelling execution. Press Ctrl+C again to force-cancel.')
-    workflow_runner.cancel()
-    while execution_thread.is_alive():
-        try:
-            execution_logging.log_list(log_iterator)
-            execution_thread.join(1)
-        except KeyboardInterrupt:
-            pass
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/logs.py b/apache-ariatosca-0.1.1/aria/cli/commands/logs.py
deleted file mode 100644
index b751b97..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/logs.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``logs`` sub-commands.
-"""
-
-from .. import execution_logging
-from ..logger import ModelLogIterator
-from ..core import aria
-
-
-@aria.group(name='logs')
-@aria.options.verbose()
-def logs():
-    """
-    Manage logs of workflow executions
-    """
-    pass
-
-
-@logs.command(name='list',
-              short_help='List logs for an execution')
-@aria.argument('execution-id')
-@aria.options.verbose()
-@aria.options.mark_pattern()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(execution_id, mark_pattern, model_storage, logger):
-    """
-    List logs for an execution
-
-    EXECUTION_ID is the unique ID of the execution.
-    """
-    logger.info('Listing logs for execution id {0}'.format(execution_id))
-    log_iterator = ModelLogIterator(model_storage, execution_id)
-
-    any_logs = execution_logging.log_list(log_iterator, mark_pattern=mark_pattern)
-
-    if not any_logs:
-        logger.info('\tNo logs')
-
-
-@logs.command(name='delete',
-              short_help='Delete logs of an execution')
-@aria.argument('execution-id')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def delete(execution_id, model_storage, logger):
-    """
-    Delete logs of an execution
-
-    EXECUTION_ID is the unique ID of the execution.
-    """
-    logger.info('Deleting logs for execution id {0}'.format(execution_id))
-    logs_list = model_storage.log.list(filters=dict(execution_fk=execution_id))
-    for log in logs_list:
-        model_storage.log.delete(log)
-    logger.info('Deleted logs for execution id {0}'.format(execution_id))
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/node_templates.py b/apache-ariatosca-0.1.1/aria/cli/commands/node_templates.py
deleted file mode 100644
index ec160d2..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/node_templates.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``node-templates`` sub-commands.
-"""
-
-from .. import table
-from .. import utils
-from ..core import aria
-
-
-NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
-
-
-@aria.group(name='node-templates')
-@aria.options.verbose()
-def node_templates():
-    """
-    Manages stored service templates' node templates
-    """
-    pass
-
-
-@node_templates.command(name='show',
-                        short_help='Show information for a stored node template')
-@aria.argument('node-template-id')
-# @aria.options.service_template_name(required=True)
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def show(node_template_id, model_storage, logger):
-    """
-    Show information for a stored node template
-
-    NODE_TEMPLATE_ID is the unique node template ID.
-    """
-    logger.info('Showing node template {0}'.format(node_template_id))
-    node_template = model_storage.node_template.get(node_template_id)
-
-    table.print_data(NODE_TEMPLATE_COLUMNS, node_template, 'Node template:', col_max_width=50)
-
-    # print node template properties
-    logger.info('Node template properties:')
-    if node_template.properties:
-        logger.info(utils.get_parameter_templates_as_string(node_template.properties))
-    else:
-        logger.info('\tNo properties')
-
-    # print node IDs
-    nodes = node_template.nodes
-    logger.info('Nodes:')
-    if nodes:
-        for node in nodes:
-            logger.info('\t{0}'.format(node.name))
-    else:
-        logger.info('\tNo nodes')
-
-
-@node_templates.command(name='list',
-                        short_help='List stored node templates')
-@aria.options.service_template_name()
-@aria.options.sort_by('service_template_name')
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(service_template_name, sort_by, descending, model_storage, logger):
-    """
-    List stored node templates
-
-    If SERVICE_TEMPLATE_NAME is provided, list node templates for that stored service template.
-    Otherwise, list node templates for all service templates.
-    """
-    if service_template_name:
-        logger.info('Listing node templates for service template {0}...'.format(
-            service_template_name))
-        service_template = model_storage.service_template.get_by_name(service_template_name)
-        filters = dict(service_template=service_template)
-    else:
-        logger.info('Listing all node templates...')
-        filters = {}
-
-    node_templates_list = model_storage.node_template.list(
-        filters=filters,
-        sort=utils.storage_sort_param(sort_by, descending))
-
-    table.print_data(NODE_TEMPLATE_COLUMNS, node_templates_list, 'Node templates:')
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/nodes.py b/apache-ariatosca-0.1.1/aria/cli/commands/nodes.py
deleted file mode 100644
index 30f1dd4..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/nodes.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``nodes`` sub-commands.
-"""
-
-from .. import table
-from .. import utils
-from ..core import aria
-
-
-NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
-
-
-@aria.group(name='nodes')
-@aria.options.verbose()
-def nodes():
-    """
-    Manage services' nodes
-    """
-    pass
-
-
-@nodes.command(name='show',
-               short_help='Show information for a node')
-@aria.argument('node_id')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def show(node_id, model_storage, logger):
-    """
-    Show information for a node
-
-    NODE_ID is the unique node ID.
-    """
-    logger.info('Showing node {0}'.format(node_id))
-    node = model_storage.node.get(node_id)
-
-    table.print_data(NODE_COLUMNS, node, 'Node:', col_max_width=50)
-
-    # print node attributes
-    logger.info('Node attributes:')
-    if node.attributes:
-        for param_name, param in node.attributes.iteritems():
-            logger.info('\t{0}: {1}'.format(param_name, param.value))
-    else:
-        logger.info('\tNo attributes')
-
-
-@nodes.command(name='list',
-               short_help='List node')
-@aria.options.service_name(required=False)
-@aria.options.sort_by('service_name')
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(service_name,
-         sort_by,
-         descending,
-         model_storage,
-         logger):
-    """
-    List nodes
-
-    If SERVICE_NAME is provided, list nodes for that service. Otherwise, list nodes for all
-    services.
-    """
-    if service_name:
-        logger.info('Listing nodes for service {0}...'.format(service_name))
-        service = model_storage.service.get_by_name(service_name)
-        filters = dict(service=service)
-    else:
-        logger.info('Listing all nodes...')
-        filters = {}
-
-    nodes_list = model_storage.node.list(
-        filters=filters,
-        sort=utils.storage_sort_param(sort_by, descending))
-
-    table.print_data(NODE_COLUMNS, nodes_list, 'Nodes:')
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/plugins.py b/apache-ariatosca-0.1.1/aria/cli/commands/plugins.py
deleted file mode 100644
index b5d68a2..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/plugins.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``plugins`` sub-commands.
-"""
-
-from .. import table
-from .. import utils
-from ..core import aria
-
-
-PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'supported_platform',
-                  'distribution', 'distribution_release', 'uploaded_at']
-
-
-@aria.group(name='plugins')
-@aria.options.verbose()
-def plugins():
-    """
-    Manage plugins
-    """
-    pass
-
-
-@plugins.command(name='validate',
-                 short_help='Validate a plugin archive')
-@aria.argument('plugin-path')
-@aria.options.verbose()
-@aria.pass_plugin_manager
-@aria.pass_logger
-def validate(plugin_path, plugin_manager, logger):
-    """
-    Validate a plugin archive
-
-    A valid plugin is a wagon (`http://github.com/cloudify-cosmo/wagon`) in the ZIP format (suffix
-    may also be `.wgn`).
-
-    PLUGIN_PATH is the path to the wagon archive.
-    """
-    logger.info('Validating plugin {0}...'.format(plugin_path))
-    plugin_manager.validate_plugin(plugin_path)
-    logger.info('Plugin validated successfully')
-
-
-@plugins.command(name='install',
-                 short_help='Install a plugin')
-@aria.argument('plugin-path')
-@aria.options.verbose()
-@aria.pass_context
-@aria.pass_plugin_manager
-@aria.pass_logger
-def install(ctx, plugin_path, plugin_manager, logger):
-    """
-    Install a plugin
-
-    A valid plugin is a wagon (`http://github.com/cloudify-cosmo/wagon`) in the ZIP format (suffix
-    may also be `.wgn`).
-
-    PLUGIN_PATH is the path to the wagon archive.
-    """
-    ctx.invoke(validate, plugin_path=plugin_path)
-    logger.info('Installing plugin {0}...'.format(plugin_path))
-    plugin = plugin_manager.install(plugin_path)
-    logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
-
-
-@plugins.command(name='show',
-                 short_help='Show information for an installed plugin')
-@aria.argument('plugin-id')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def show(plugin_id, model_storage, logger):
-    """
-    Show information for an installed plugin
-
-    PLUGIN_ID is the unique installed plugin ID in this ARIA instance.
-    """
-    logger.info('Showing plugin {0}...'.format(plugin_id))
-    plugin = model_storage.plugin.get(plugin_id)
-    table.print_data(PLUGIN_COLUMNS, plugin, 'Plugin:')
-
-
-@plugins.command(name='list',
-                 short_help='List all installed plugins')
-@aria.options.sort_by('uploaded_at')
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(sort_by, descending, model_storage, logger):
-    """
-    List all installed plugins
-    """
-    logger.info('Listing all plugins...')
-    plugins_list = model_storage.plugin.list(
-        sort=utils.storage_sort_param(sort_by, descending)).items
-    table.print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/reset.py b/apache-ariatosca-0.1.1/aria/cli/commands/reset.py
deleted file mode 100644
index c82c707..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/reset.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``reset`` command.
-"""
-
-from .. import helptexts
-from ..core import aria
-from ..env import env
-from ..exceptions import AriaCliError
-
-
-@aria.command(name='reset',
-              short_help="Reset ARIA working directory")
-@aria.options.force(help=helptexts.FORCE_RESET)
-@aria.options.reset_config
-@aria.pass_logger
-@aria.options.verbose()
-def reset(force, reset_config, logger):
-    """
-    Reset ARIA working directory
-
-    Deletes installed plugins, service templates, services, executions, and logs. The user
-    configuration will remain intact unless the `--reset_config` flag has been set as well, in
-    which case the entire ARIA working directory shall be removed.
-    """
-    if not force:
-        raise AriaCliError("To reset the ARIA's working directory, you must also provide the force"
-                           " flag ('-f'/'--force').")
-
-    env.reset(reset_config=reset_config)
-    logger.info("ARIA's working directory has been reset")
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/service_templates.py b/apache-ariatosca-0.1.1/aria/cli/commands/service_templates.py
deleted file mode 100644
index f567aa8..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/service_templates.py
+++ /dev/null
@@ -1,245 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``service-templates`` sub-commands.
-"""
-
-import os
-
-from .. import csar
-from .. import service_template_utils
-from .. import table
-from .. import utils
-from ..core import aria
-from ...core import Core
-from ...storage import exceptions as storage_exceptions
-from ...parser import consumption
-from ...utils import (formatting, collections, console)
-
-
-DESCRIPTION_FIELD_LENGTH_LIMIT = 20
-SERVICE_TEMPLATE_COLUMNS = \
-    ('id', 'name', 'description', 'main_file_name', 'created_at', 'updated_at')
-
-
-@aria.group(name='service-templates')
-@aria.options.verbose()
-def service_templates():
-    """
-    Manage service templates
-    """
-    pass
-
-
-@service_templates.command(name='show',
-                           short_help='Show information for a stored service template')
-@aria.argument('service-template-name')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.options.service_template_mode_full
-@aria.options.mode_types
-@aria.options.format_json
-@aria.options.format_yaml
-@aria.pass_logger
-def show(service_template_name, model_storage, mode_full, mode_types, format_json, format_yaml,
-         logger):
-    """
-    Show information for a stored service template
-
-    SERVICE_TEMPLATE_NAME is the unique name of the stored service template.
-    """
-    service_template = model_storage.service_template.get_by_name(service_template_name)
-
-    if format_json or format_yaml:
-        mode_full = True
-
-    if mode_full:
-        consumption.ConsumptionContext()
-        if format_json:
-            console.puts(formatting.json_dumps(collections.prune(service_template.as_raw)))
-        elif format_yaml:
-            console.puts(formatting.yaml_dumps(collections.prune(service_template.as_raw)))
-        else:
-            service_template.dump()
-    elif mode_types:
-        consumption.ConsumptionContext()
-        service_template.dump_types()
-    else:
-        logger.info('Showing service template {0}...'.format(service_template_name))
-        service_template_dict = service_template.to_dict()
-        service_template_dict['#services'] = len(service_template.services)
-        columns = SERVICE_TEMPLATE_COLUMNS + ('#services',)
-        column_formatters = \
-            dict(description=table.trim_formatter_generator(DESCRIPTION_FIELD_LENGTH_LIMIT))
-        table.print_data(columns, service_template_dict, 'Service-template:',
-                         column_formatters=column_formatters, col_max_width=50)
-
-        if service_template_dict['description'] is not None:
-            logger.info('Description:')
-            logger.info('{0}{1}'.format(service_template_dict['description'].encode('UTF-8') or '',
-                                        os.linesep))
-
-        if service_template.services:
-            logger.info('Existing services:')
-            for service_name in service_template.services:
-                logger.info('\t{0}'.format(service_name))
-
-
-@service_templates.command(name='list',
-                           short_help='List all stored service templates')
-@aria.options.sort_by()
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(sort_by, descending, model_storage, logger):
-    """
-    List all stored service templates
-    """
-
-    logger.info('Listing all service templates...')
-    service_templates_list = model_storage.service_template.list(
-        sort=utils.storage_sort_param(sort_by, descending))
-
-    column_formatters = \
-        dict(description=table.trim_formatter_generator(DESCRIPTION_FIELD_LENGTH_LIMIT))
-    table.print_data(SERVICE_TEMPLATE_COLUMNS, service_templates_list, 'Service templates:',
-                     column_formatters=column_formatters)
-
-
-@service_templates.command(name='store',
-                           short_help='Parse and store a service template archive')
-@aria.argument('service-template-path')
-@aria.argument('service-template-name')
-@aria.options.service_template_filename
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def store(service_template_path, service_template_name, service_template_filename,
-          model_storage, resource_storage, plugin_manager, logger):
-    """
-    Parse and store a service template archive
-
-    SERVICE_TEMPLATE_PATH is the path to the service template archive.
-
-    SERVICE_TEMPLATE_NAME is the unique name to give to the service template in storage.
-    """
-    logger.info('Storing service template {0}...'.format(service_template_name))
-
-    service_template_path = service_template_utils.get(service_template_path,
-                                                       service_template_filename)
-    core = Core(model_storage, resource_storage, plugin_manager)
-    try:
-        core.create_service_template(service_template_path,
-                                     os.path.dirname(service_template_path),
-                                     service_template_name)
-    except storage_exceptions.StorageError as e:
-        utils.check_overriding_storage_exceptions(e, 'service template', service_template_name)
-        raise
-    logger.info('Service template {0} stored'.format(service_template_name))
-
-
-@service_templates.command(name='delete',
-                           short_help='Delete a stored service template')
-@aria.argument('service-template-name')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def delete(service_template_name, model_storage, resource_storage, plugin_manager, logger):
-    """
-    Delete a stored service template
-
-    SERVICE_TEMPLATE_NAME is the unique name of the stored service template.
-    """
-    logger.info('Deleting service template {0}...'.format(service_template_name))
-    service_template = model_storage.service_template.get_by_name(service_template_name)
-    core = Core(model_storage, resource_storage, plugin_manager)
-    core.delete_service_template(service_template.id)
-    logger.info('Service template {0} deleted'.format(service_template_name))
-
-
-@service_templates.command(name='inputs',
-                           short_help='Show stored service template inputs')
-@aria.argument('service-template-name')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def inputs(service_template_name, model_storage, logger):
-    """
-    Show stored service template inputs
-
-    SERVICE_TEMPLATE_NAME is the unique name of the stored service template.
-    """
-    logger.info('Showing inputs for service template {0}...'.format(service_template_name))
-    print_service_template_inputs(model_storage, service_template_name, logger)
-
-
-@service_templates.command(name='validate',
-                           short_help='Validate a service template archive')
-@aria.argument('service-template')
-@aria.options.service_template_filename
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def validate(service_template, service_template_filename,
-             model_storage, resource_storage, plugin_manager, logger):
-    """
-    Validate a service template archive
-
-    SERVICE_TEMPLATE_PATH is the path to the service template archive.
-    """
-    logger.info('Validating service template: {0}'.format(service_template))
-    service_template_path = service_template_utils.get(service_template, service_template_filename)
-    core = Core(model_storage, resource_storage, plugin_manager)
-    core.validate_service_template(service_template_path)
-    logger.info('Service template validated successfully')
-
-
-@service_templates.command(name='create-archive',
-                           short_help='Create a CSAR archive from a service template source')
-@aria.argument('service-template-path')
-@aria.argument('destination')
-@aria.options.verbose()
-@aria.pass_logger
-def create_archive(service_template_path, destination, logger):
-    """
-    Create a CSAR archive from a service template source
-
-    SERVICE_TEMPLATE_PATH is the path to the service template source.
-
-    DESTINATION is the path to the created CSAR archive.
-    """
-    logger.info('Creating a CSAR archive')
-    if not destination.endswith(csar.CSAR_FILE_EXTENSION):
-        destination += csar.CSAR_FILE_EXTENSION
-    csar.write(service_template_path, destination, logger)
-    logger.info('CSAR archive created at {0}'.format(destination))
-
-
-def print_service_template_inputs(model_storage, service_template_name, logger):
-    service_template = model_storage.service_template.get_by_name(service_template_name)
-
-    logger.info('Service template inputs:')
-    if service_template.inputs:
-        logger.info(utils.get_parameter_templates_as_string(service_template.inputs))
-    else:
-        logger.info('\tNo inputs')
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/services.py b/apache-ariatosca-0.1.1/aria/cli/commands/services.py
deleted file mode 100644
index a99f5b3..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/services.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``services`` sub-commands.
-"""
-
-import os
-from StringIO import StringIO
-
-from . import service_templates
-from .. import helptexts
-from .. import table
-from .. import utils
-from ..core import aria
-from ...core import Core
-from ...modeling import exceptions as modeling_exceptions
-from ...storage import exceptions as storage_exceptions
-from ...parser import consumption
-from ...utils import (formatting, collections, console)
-
-
-DESCRIPTION_FIELD_LENGTH_LIMIT = 20
-SERVICE_COLUMNS = ('id', 'name', 'description', 'service_template_name', 'created_at', 'updated_at')
-
-
-@aria.group(name='services')
-@aria.options.verbose()
-def services():
-    """
-    Manage services
-    """
-    pass
-
-
-@services.command(name='show',
-                  short_help='Show information for a service')
-@aria.argument('service-name')
-@aria.options.verbose()
-@aria.options.service_mode_full
-@aria.options.mode_graph
-@aria.options.format_json
-@aria.options.format_yaml
-@aria.pass_model_storage
-@aria.pass_logger
-def show(service_name, model_storage, mode_full, mode_graph, format_json, format_yaml, logger):
-    """
-    Show information for a service
-
-    SERVICE_NAME is the unique name of the service.
-    """
-    service = model_storage.service.get_by_name(service_name)
-
-    if format_json or format_yaml:
-        mode_full = True
-
-    if mode_full:
-        consumption.ConsumptionContext()
-        if format_json:
-            console.puts(formatting.json_dumps(collections.prune(service.as_raw)))
-        elif format_yaml:
-            console.puts(formatting.yaml_dumps(collections.prune(service.as_raw)))
-        else:
-            service.dump()
-    elif mode_graph:
-        consumption.ConsumptionContext()
-        service.dump_graph()
-    else:
-        logger.info('Showing service {0}...'.format(service_name))
-        service_dict = service.to_dict()
-        columns = SERVICE_COLUMNS
-        column_formatters = \
-            dict(description=table.trim_formatter_generator(DESCRIPTION_FIELD_LENGTH_LIMIT))
-        table.print_data(columns, service_dict, 'Service:',
-                         column_formatters=column_formatters, col_max_width=50)
-
-        if service_dict['description'] is not None:
-            logger.info('Description:')
-            logger.info('{0}{1}'.format(service_dict['description'].encode('UTF-8') or '',
-                                        os.linesep))
-
-
-@services.command(name='list', short_help='List services')
-@aria.options.service_template_name()
-@aria.options.sort_by()
-@aria.options.descending
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(service_template_name,
-         sort_by,
-         descending,
-         model_storage,
-         logger):
-    """
-    List services
-
-    If `--service-template-name` is provided, list services based on that service template.
-    Otherwise, list all services.
-    """
-    if service_template_name:
-        logger.info('Listing services for service template {0}...'.format(
-            service_template_name))
-        service_template = model_storage.service_template.get_by_name(service_template_name)
-        filters = dict(service_template=service_template)
-    else:
-        logger.info('Listing all services...')
-        filters = {}
-
-    services_list = model_storage.service.list(
-        sort=utils.storage_sort_param(sort_by=sort_by, descending=descending),
-        filters=filters)
-    table.print_data(SERVICE_COLUMNS, services_list, 'Services:')
-
-
-@services.command(name='create',
-                  short_help='Create a service')
-@aria.argument('service-name', required=False)
-@aria.options.service_template_name(required=True)
-@aria.options.inputs(help=helptexts.SERVICE_INPUTS)
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def create(service_template_name,
-           service_name,
-           inputs,  # pylint: disable=redefined-outer-name
-           model_storage,
-           resource_storage,
-           plugin_manager,
-           logger):
-    """
-    Create a service
-
-    SERVICE_NAME is the unique name to give to the service.
-    """
-    logger.info('Creating new service from service template {0}...'.format(
-        service_template_name))
-    core = Core(model_storage, resource_storage, plugin_manager)
-    service_template = model_storage.service_template.get_by_name(service_template_name)
-
-    try:
-        service = core.create_service(service_template.id, inputs, service_name)
-    except storage_exceptions.StorageError as e:
-        utils.check_overriding_storage_exceptions(e, 'service', service_name)
-        raise
-    except modeling_exceptions.ParameterException:
-        service_templates.print_service_template_inputs(model_storage, service_template_name,
-                                                        logger)
-        raise
-    logger.info("Service created. The service's name is {0}".format(service.name))
-
-
-@services.command(name='delete',
-                  short_help='Delete a service')
-@aria.argument('service-name')
-@aria.options.force(help=helptexts.IGNORE_AVAILABLE_NODES)
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_resource_storage
-@aria.pass_plugin_manager
-@aria.pass_logger
-def delete(service_name, force, model_storage, resource_storage, plugin_manager, logger):
-    """
-    Delete a service
-
-    SERVICE_NAME is the unique name of the service.
-    """
-    logger.info('Deleting service {0}...'.format(service_name))
-    service = model_storage.service.get_by_name(service_name)
-    core = Core(model_storage, resource_storage, plugin_manager)
-    core.delete_service(service.id, force=force)
-    logger.info('Service {0} deleted'.format(service_name))
-
-
-@services.command(name='outputs',
-                  short_help='Show service outputs')
-@aria.argument('service-name')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def outputs(service_name, model_storage, logger):
-    """
-    Show service outputs
-
-    SERVICE_NAME is the unique name of the service.
-    """
-    logger.info('Showing outputs for service {0}...'.format(service_name))
-    service = model_storage.service.get_by_name(service_name)
-
-    if service.outputs:
-        outputs_string = StringIO()
-        for output_name, output in service.outputs.iteritems():
-            outputs_string.write(' - "{0}":{1}'.format(output_name, os.linesep))
-            outputs_string.write('     Description: {0}{1}'.format(output.description, os.linesep))
-            outputs_string.write('     Value: {0}{1}'.format(output.value, os.linesep))
-        logger.info(outputs_string.getvalue())
-    else:
-        logger.info('\tNo outputs')
-
-
-@services.command(name='inputs',
-                  short_help='Show service inputs')
-@aria.argument('service-name')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def inputs(service_name, model_storage, logger):
-    """
-    Show service inputs
-
-    SERVICE_NAME is the unique name of the service.
-    """
-    logger.info('Showing inputs for service {0}...'.format(service_name))
-    service = model_storage.service.get_by_name(service_name)
-
-    if service.inputs:
-        inputs_string = StringIO()
-        for input_name, input_ in service.inputs.iteritems():
-            inputs_string.write(' - "{0}":{1}'.format(input_name, os.linesep))
-            inputs_string.write('     Description: {0}{1}'.format(input_.description, os.linesep))
-            inputs_string.write('     Value: {0}{1}'.format(input_.value, os.linesep))
-        logger.info(inputs_string.getvalue())
-    else:
-        logger.info('\tNo inputs')
diff --git a/apache-ariatosca-0.1.1/aria/cli/commands/workflows.py b/apache-ariatosca-0.1.1/aria/cli/commands/workflows.py
deleted file mode 100644
index 03cf00e..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/commands/workflows.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI ``worfklows`` sub-commands.
-"""
-
-from .. import table
-from ..core import aria
-from ..exceptions import AriaCliError
-
-WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name']
-
-
-@aria.group(name='workflows')
-def workflows():
-    """
-    Manage service workflows
-    """
-    pass
-
-
-@workflows.command(name='show',
-                   short_help='Show information for a service workflow')
-@aria.argument('workflow-name')
-@aria.options.service_name(required=True)
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def show(workflow_name, service_name, model_storage, logger):
-    """
-    Show information for a service workflow
-
-    SERVICE_NAME is the unique name of the service.
-
-    WORKFLOW_NAME is the unique name of the workflow within the service (e.g. "uninstall").
-    """
-    logger.info('Retrieving workflow {0} for service {1}'.format(
-        workflow_name, service_name))
-    service = model_storage.service.get_by_name(service_name)
-    workflow = next((wf for wf in service.workflows.values() if
-                     wf.name == workflow_name), None)
-    if not workflow:
-        raise AriaCliError(
-            'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
-
-    defaults = {
-        'service_template_name': service.service_template_name,
-        'service_name': service.name
-    }
-    table.print_data(WORKFLOW_COLUMNS, workflow, 'Workflows:', defaults=defaults)
-
-    # print workflow inputs
-    required_inputs = dict()
-    optional_inputs = dict()
-    for input_name, input in workflow.inputs.iteritems():
-        inputs_group = optional_inputs if input.value is not None else required_inputs
-        inputs_group[input_name] = input
-
-    logger.info('Workflow Inputs:')
-    logger.info('\tMandatory Inputs:')
-    for input_name, input in required_inputs.iteritems():
-        if input.description is not None:
-            logger.info('\t\t{0}\t({1})'.format(input_name,
-                                                input.description))
-        else:
-            logger.info('\t\t{0}'.format(input_name))
-
-    logger.info('\tOptional Inputs:')
-    for input_name, input in optional_inputs.iteritems():
-        if input.description is not None:
-            logger.info('\t\t{0}: \t{1}\t({2})'.format(
-                input_name, input.value, input.description))
-        else:
-            logger.info('\t\t{0}: \t{1}'.format(input_name,
-                                                input.value))
-
-
-@workflows.command(name='list',
-                   short_help='List service workflows')
-@aria.options.service_name(required=True)
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def list(service_name, model_storage, logger):
-    """
-    List service workflows
-
-    SERVICE_NAME is the unique name of the service.
-    """
-    logger.info('Listing workflows for service {0}...'.format(service_name))
-    service = model_storage.service.get_by_name(service_name)
-    workflows_list = sorted(service.workflows.values(), key=lambda w: w.name)
-
-    defaults = {
-        'service_template_name': service.service_template_name,
-        'service_name': service.name
-    }
-    table.print_data(WORKFLOW_COLUMNS, workflows_list, 'Workflows:', defaults=defaults)
diff --git a/apache-ariatosca-0.1.1/aria/cli/config/config.py b/apache-ariatosca-0.1.1/aria/cli/config/config.py
deleted file mode 100644
index bbece80..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/config/config.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI configuration mechanism.
-"""
-
-import os
-import pkg_resources
-from ruamel import yaml
-
-from jinja2.environment import Template
-
-
-CONFIG_FILE_NAME = 'config.yaml'
-
-
-class CliConfig(object):
-
-    def __init__(self, config_path):
-        with open(config_path) as f:
-            self._config = yaml.safe_load(f.read())
-
-    @classmethod
-    def create_config(cls, workdir):
-        config_path = os.path.join(workdir, CONFIG_FILE_NAME)
-        if not os.path.isfile(config_path):
-            config_template = pkg_resources.resource_string(
-                __package__,
-                'config_template.yaml')
-
-            default_values = {
-                'log_path': os.path.join(workdir, 'cli.log'),
-                'enable_colors': True
-            }
-
-            template = Template(config_template)
-            rendered = template.render(**default_values)
-            with open(config_path, 'w') as f:
-                f.write(rendered)
-                f.write(os.linesep)
-
-        return cls(config_path)
-
-    @property
-    def logging(self):
-        return self.Logging(self._config.get('logging'))
-
-    class Logging(object):
-
-        def __init__(self, logging):
-            self._logging = logging or {}
-
-        @property
-        def filename(self):
-            return self._logging.get('filename')
-
-        @property
-        def loggers(self):
-            return self._logging.get('loggers', {})
-
-        @property
-        def execution(self):
-            return self.Execution(self._logging.get('execution'))
-
-        class Execution(object):
-
-            def __init__(self, execution_logging):
-                self._execution_logging = execution_logging
-
-            @property
-            def colors_enabled(self):
-                return self.colors.get('enabled', False)
-
-            @property
-            def colors(self):
-                return self._execution_logging.get('colors', {})
-
-            @property
-            def formats(self):
-                return self._execution_logging.get('formats', {})
diff --git a/apache-ariatosca-0.1.1/aria/cli/config/config_template.yaml b/apache-ariatosca-0.1.1/aria/cli/config/config_template.yaml
deleted file mode 100644
index 94fcac3..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/config/config_template.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
-
-logging:
-
-  # path to a file where cli logs will be saved.
-  filename: {{ log_path }}
-
-  # configuring level per logger
-  loggers:
-
-    # main logger of the cli. provides basic descriptions for executed operations.
-    aria.cli.main: info
-
-  execution:
-    formats:
-      # According to verbosity level 0 - no verbose. 3 - high verbose
-      0: '{message}'
-      1: '{timestamp:%H:%M:%S} | {level[0]} | {message}'
-      2: '{timestamp:%H:%M:%S} | {level[0]} | {implementation} | {message}'
-      3: '{timestamp:%H:%M:%S} | {level[0]} | {implementation} | {inputs} | {message}'
-
-    colors:
-      enabled: true
-
-      level:
-        default: {'fore': 'lightmagenta_ex'}
-        error: {'fore': 'red', 'style': 'bright'}
-      timestamp:
-        default: {'fore': 'lightmagenta_ex'}
-        error: {'fore': 'red', 'style': 'bright'}
-      message:
-        default: {'fore': 'lightblue_ex'}
-        error: {'fore': 'red', 'style': 'bright'}
-      implementation:
-        default: {'fore': 'lightblack_ex'}
-        error: {'fore': 'red', 'style': 'bright'}
-      inputs:
-        default: {'fore': 'blue'}
-        error: {'fore': 'red', 'style': 'bright'}
-      traceback:
-        default: {'fore': 'red'}
-
-      marker: 'lightyellow_ex'
diff --git a/apache-ariatosca-0.1.1/aria/cli/core/aria.py b/apache-ariatosca-0.1.1/aria/cli/core/aria.py
deleted file mode 100644
index 515c06a..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/core/aria.py
+++ /dev/null
@@ -1,501 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Enhancements and ARIA-specific conveniences for `Click <http://click.pocoo.org>`__.
-"""
-
-import os
-import sys
-import difflib
-import traceback
-import inspect
-from functools import wraps
-
-import click
-
-from ..env import (
-    env,
-    logger
-)
-from .. import defaults
-from .. import helptexts
-from ..ascii_art import ARIA_ASCII_ART
-from ..inputs import inputs_to_dict
-from ... import __version__
-from ...utils.exceptions import get_exception_as_string
-
-
-CLICK_CONTEXT_SETTINGS = dict(
-    help_option_names=['-h', '--help'],
-    token_normalize_func=lambda param: param.lower())
-
-
-class MutuallyExclusiveOption(click.Option):
-    def __init__(self, *args, **kwargs):
-        self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', tuple()))
-        self.mutuality_description = kwargs.pop('mutuality_description',
-                                                ', '.join(self.mutually_exclusive))
-        self.mutuality_error = kwargs.pop('mutuality_error',
-                                          helptexts.DEFAULT_MUTUALITY_ERROR_MESSAGE)
-        if self.mutually_exclusive:
-            help = kwargs.get('help', '')
-            kwargs['help'] = '{0}. {1}'.format(help, self._message)
-        super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
-
-    def handle_parse_result(self, ctx, opts, args):
-        if (self.name in opts) and self.mutually_exclusive.intersection(opts):
-            raise click.UsageError('Illegal usage: {0}'.format(self._message))
-        return super(MutuallyExclusiveOption, self).handle_parse_result(ctx, opts, args)
-
-    @property
-    def _message(self):
-        return '{0} be used together with {1} ({2}).'.format(
-            '{0} cannot'.format(', '.join(self.opts)) if hasattr(self, 'opts') else 'Cannot',
-            self.mutuality_description,
-            self.mutuality_error)
-
-
-def mutually_exclusive_option(*param_decls, **attrs):
-    """
-    Decorator for mutually exclusive options.
-
-    This decorator works similarly to `click.option`, but supports an extra ``mutually_exclusive``
-    argument, which is a list of argument names with which the option is mutually exclusive.
-
-    You can optionally also supply ``mutuality_description`` and ``mutuality_error`` to override the
-    default messages.
-
-    NOTE: All mutually exclusive options must use this. It's not enough to use it in just one of the
-    options.
-    """
-
-    # NOTE: This code is copied and slightly modified from click.decorators.option and
-    # click.decorators._param_memo. Unfortunately, using click's ``cls`` parameter support does not
-    # work as is with extra decorator arguments.
-
-    def decorator(func):
-        if 'help' in attrs:
-            attrs['help'] = inspect.cleandoc(attrs['help'])
-        param = MutuallyExclusiveOption(param_decls, **attrs)
-        if not hasattr(func, '__click_params__'):
-            func.__click_params__ = []
-        func.__click_params__.append(param)
-        return func
-    return decorator
-
-
-def show_version(ctx, param, value):
-    if not value:
-        return
-
-    logger.info('{0} v{1}'.format(ARIA_ASCII_ART, __version__))
-    ctx.exit()
-
-
-def inputs_callback(ctx, param, value):
-    """
-    Allow to pass any inputs we provide to a command as processed inputs instead of having to call
-    ``inputs_to_dict`` inside the command.
-
-    ``@aria.options.inputs`` already calls this callback so that every time you use the option it
-    returns the inputs as a dictionary.
-    """
-    if not value:
-        return {}
-
-    return inputs_to_dict(value)
-
-
-def set_verbosity_level(ctx, param, value):
-    if not value:
-        return
-
-    env.logging.verbosity_level = value
-
-
-def set_cli_except_hook():
-    def recommend(possible_solutions):
-        logger.info('Possible solutions:')
-        for solution in possible_solutions:
-            logger.info('  - {0}'.format(solution))
-
-    def new_excepthook(tpe, value, trace):
-        if env.logging.is_high_verbose_level():
-            # log error including traceback
-            logger.error(get_exception_as_string(tpe, value, trace))
-        else:
-            # write the full error to the log file
-            with open(env.logging.log_file, 'a') as log_file:
-                traceback.print_exception(
-                    etype=tpe,
-                    value=value,
-                    tb=trace,
-                    file=log_file)
-            # print only the error message
-            print value
-
-        if hasattr(value, 'possible_solutions'):
-            recommend(getattr(value, 'possible_solutions'))
-
-    sys.excepthook = new_excepthook
-
-
-def pass_logger(func):
-    """
-    Simply passes the logger to a command.
-    """
-    # Wraps here makes sure the original docstring propagates to click
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        return func(logger=logger, *args, **kwargs)
-
-    return wrapper
-
-
-def pass_plugin_manager(func):
-    """
-    Simply passes the plugin manager to a command.
-    """
-    # Wraps here makes sure the original docstring propagates to click
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        return func(plugin_manager=env.plugin_manager, *args, **kwargs)
-
-    return wrapper
-
-
-def pass_model_storage(func):
-    """
-    Simply passes the model storage to a command.
-    """
-    # Wraps here makes sure the original docstring propagates to click
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        return func(model_storage=env.model_storage, *args, **kwargs)
-
-    return wrapper
-
-
-def pass_resource_storage(func):
-    """
-    Simply passes the resource storage to a command.
-    """
-    # Wraps here makes sure the original docstring propagates to click
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        return func(resource_storage=env.resource_storage, *args, **kwargs)
-
-    return wrapper
-
-
-def pass_context(func):
-    """
-    Make click context ARIA specific.
-
-    This exists purely for aesthetic reasons, otherwise some decorators are called
-    ``@click.something`` instead of ``@aria.something``.
-    """
-    return click.pass_context(func)
-
-
-class AliasedGroup(click.Group):
-    def __init__(self, *args, **kwargs):
-        self.max_suggestions = kwargs.pop("max_suggestions", 3)
-        self.cutoff = kwargs.pop("cutoff", 0.5)
-        super(AliasedGroup, self).__init__(*args, **kwargs)
-
-    def get_command(self, ctx, cmd_name):
-        cmd = click.Group.get_command(self, ctx, cmd_name)
-        if cmd is not None:
-            return cmd
-        matches = \
-            [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
-        if not matches:
-            return None
-        elif len(matches) == 1:
-            return click.Group.get_command(self, ctx, matches[0])
-        ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches))))
-
-    def resolve_command(self, ctx, args):
-        """
-        Override clicks ``resolve_command`` method and appends *Did you mean ...* suggestions to the
-        raised exception message.
-        """
-        try:
-            return super(AliasedGroup, self).resolve_command(ctx, args)
-        except click.exceptions.UsageError as error:
-            error_msg = str(error)
-            original_cmd_name = click.utils.make_str(args[0])
-            matches = difflib.get_close_matches(
-                original_cmd_name,
-                self.list_commands(ctx),
-                self.max_suggestions,
-                self.cutoff)
-            if matches:
-                error_msg += '{0}{0}Did you mean one of these?{0}    {1}'.format(
-                    os.linesep,
-                    '{0}    '.format(os.linesep).join(matches, ))
-            raise click.exceptions.UsageError(error_msg, error.ctx)
-
-
-def group(name):
-    """
-    Allow to create a group with a default click context and a class for Click's ``didyoueamn``
-    without having to repeat it for every group.
-    """
-    return click.group(
-        name=name,
-        context_settings=CLICK_CONTEXT_SETTINGS,
-        cls=AliasedGroup)
-
-
-def command(*args, **kwargs):
-    """
-    Make Click commands ARIA specific.
-
-    This exists purely for aesthetic reasons, otherwise some decorators are called
-    ``@click.something`` instead of ``@aria.something``.
-    """
-    return click.command(*args, **kwargs)
-
-
-def argument(*args, **kwargs):
-    """
-    Make Click arguments specific to ARIA.
-
-    This exists purely for aesthetic reasons, otherwise some decorators are called
-    ``@click.something`` instead of ``@aria.something``
-    """
-    return click.argument(*args, **kwargs)
-
-
-class Options(object):
-    def __init__(self):
-        """
-        The options API is nicer when you use each option by calling ``@aria.options.some_option``
-        instead of ``@aria.some_option``.
-
-        Note that some options are attributes and some are static methods. The reason for that is
-        that we want to be explicit regarding how a developer sees an option. If it can receive
-        arguments, it's a method - if not, it's an attribute.
-        """
-        self.version = click.option(
-            '--version',
-            is_flag=True,
-            callback=show_version,
-            expose_value=False,
-            is_eager=True,
-            help=helptexts.VERSION)
-
-        self.json_output = click.option(
-            '--json-output',
-            is_flag=True,
-            help=helptexts.JSON_OUTPUT)
-
-        self.dry_execution = click.option(
-            '--dry',
-            is_flag=True,
-            help=helptexts.DRY_EXECUTION)
-
-        self.reset_config = click.option(
-            '--reset-config',
-            is_flag=True,
-            help=helptexts.RESET_CONFIG)
-
-        self.descending = click.option(
-            '--descending',
-            required=False,
-            is_flag=True,
-            default=defaults.SORT_DESCENDING,
-            help=helptexts.DESCENDING)
-
-        self.service_template_filename = click.option(
-            '-n',
-            '--service-template-filename',
-            default=defaults.SERVICE_TEMPLATE_FILENAME,
-            help=helptexts.SERVICE_TEMPLATE_FILENAME)
-
-        self.service_template_mode_full = mutually_exclusive_option(
-            '-f',
-            '--full',
-            'mode_full',
-            mutually_exclusive=('mode_types',),
-            is_flag=True,
-            help=helptexts.SHOW_FULL,
-            mutuality_description='-t, --types',
-            mutuality_error=helptexts.MODE_MUTUALITY_ERROR_MESSAGE)
-
-        self.service_mode_full = mutually_exclusive_option(
-            '-f',
-            '--full',
-            'mode_full',
-            mutually_exclusive=('mode_graph',),
-            is_flag=True,
-            help=helptexts.SHOW_FULL,
-            mutuality_description='-g, --graph',
-            mutuality_error=helptexts.MODE_MUTUALITY_ERROR_MESSAGE)
-
-        self.mode_types = mutually_exclusive_option(
-            '-t',
-            '--types',
-            'mode_types',
-            mutually_exclusive=('mode_full',),
-            is_flag=True,
-            help=helptexts.SHOW_TYPES,
-            mutuality_description='-f, --full',
-            mutuality_error=helptexts.MODE_MUTUALITY_ERROR_MESSAGE)
-
-        self.mode_graph = mutually_exclusive_option(
-            '-g',
-            '--graph',
-            'mode_graph',
-            mutually_exclusive=('mode_full',),
-            is_flag=True,
-            help=helptexts.SHOW_GRAPH,
-            mutuality_description='-f, --full',
-            mutuality_error=helptexts.MODE_MUTUALITY_ERROR_MESSAGE)
-
-        self.format_json = mutually_exclusive_option(
-            '-j',
-            '--json',
-            'format_json',
-            mutually_exclusive=('format_yaml',),
-            is_flag=True,
-            help=helptexts.SHOW_JSON,
-            mutuality_description='-y, --yaml',
-            mutuality_error=helptexts.FORMAT_MUTUALITY_ERROR_MESSAGE)
-
-        self.format_yaml = mutually_exclusive_option(
-            '-y',
-            '--yaml',
-            'format_yaml',
-            mutually_exclusive=('format_json',),
-            is_flag=True,
-            help=helptexts.SHOW_YAML,
-            mutuality_description='-j, --json',
-            mutuality_error=helptexts.FORMAT_MUTUALITY_ERROR_MESSAGE)
-
-    @staticmethod
-    def verbose(expose_value=False):
-        return click.option(
-            '-v',
-            '--verbose',
-            count=True,
-            callback=set_verbosity_level,
-            expose_value=expose_value,
-            is_eager=True,
-            help=helptexts.VERBOSE)
-
-    @staticmethod
-    def inputs(help):
-        return click.option(
-            '-i',
-            '--inputs',
-            multiple=True,
-            callback=inputs_callback,
-            help=help)
-
-    @staticmethod
-    def force(help):
-        return click.option(
-            '-f',
-            '--force',
-            is_flag=True,
-            help=help)
-
-    @staticmethod
-    def task_max_attempts(default=defaults.TASK_MAX_ATTEMPTS):
-        return click.option(
-            '--task-max-attempts',
-            type=int,
-            default=default,
-            help=helptexts.TASK_MAX_ATTEMPTS.format(default))
-
-    @staticmethod
-    def sort_by(default='created_at'):
-        return click.option(
-            '--sort-by',
-            required=False,
-            default=default,
-            help=helptexts.SORT_BY)
-
-    @staticmethod
-    def task_retry_interval(default=defaults.TASK_RETRY_INTERVAL):
-        return click.option(
-            '--task-retry-interval',
-            type=int,
-            default=default,
-            help=helptexts.TASK_RETRY_INTERVAL.format(default))
-
-    @staticmethod
-    def service_id(required=False):
-        return click.option(
-            '-s',
-            '--service-id',
-            required=required,
-            help=helptexts.SERVICE_ID)
-
-    @staticmethod
-    def execution_id(required=False):
-        return click.option(
-            '-e',
-            '--execution-id',
-            required=required,
-            help=helptexts.EXECUTION_ID)
-
-    @staticmethod
-    def service_template_id(required=False):
-        return click.option(
-            '-t',
-            '--service-template-id',
-            required=required,
-            help=helptexts.SERVICE_TEMPLATE_ID)
-
-    @staticmethod
-    def service_template_path(required=False):
-        return click.option(
-            '-p',
-            '--service-template-path',
-            required=required,
-            type=click.Path(exists=True))
-
-    @staticmethod
-    def service_name(required=False):
-        return click.option(
-            '-s',
-            '--service-name',
-            required=required,
-            help=helptexts.SERVICE_ID)
-
-    @staticmethod
-    def service_template_name(required=False):
-        return click.option(
-            '-t',
-            '--service-template-name',
-            required=required,
-            help=helptexts.SERVICE_ID)
-
-    @staticmethod
-    def mark_pattern():
-        return click.option(
-            '-m',
-            '--mark-pattern',
-            help=helptexts.MARK_PATTERN,
-            type=str,
-            required=False
-        )
-
-options = Options()
diff --git a/apache-ariatosca-0.1.1/aria/cli/csar.py b/apache-ariatosca-0.1.1/aria/cli/csar.py
deleted file mode 100644
index 40b1699..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/csar.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Support for the CSAR (Cloud Service ARchive) packaging specification.
-
-See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc461787381>`__
-"""
-
-import os
-import logging
-import pprint
-import tempfile
-import zipfile
-
-import requests
-from ruamel import yaml
-
-CSAR_FILE_EXTENSION = '.csar'
-META_FILE = 'TOSCA-Metadata/TOSCA.meta'
-META_FILE_VERSION_KEY = 'TOSCA-Meta-File-Version'
-META_FILE_VERSION_VALUE = '1.0'
-META_CSAR_VERSION_KEY = 'CSAR-Version'
-META_CSAR_VERSION_VALUE = '1.1'
-META_CREATED_BY_KEY = 'Created-By'
-META_CREATED_BY_VALUE = 'ARIA'
-META_ENTRY_DEFINITIONS_KEY = 'Entry-Definitions'
-BASE_METADATA = {
-    META_FILE_VERSION_KEY: META_FILE_VERSION_VALUE,
-    META_CSAR_VERSION_KEY: META_CSAR_VERSION_VALUE,
-    META_CREATED_BY_KEY: META_CREATED_BY_VALUE,
-}
-
-
-def write(service_template_path, destination, logger):
-
-    service_template_path = os.path.abspath(os.path.expanduser(service_template_path))
-    source = os.path.dirname(service_template_path)
-    entry = os.path.basename(service_template_path)
-
-    meta_file = os.path.join(source, META_FILE)
-    if not os.path.isdir(source):
-        raise ValueError('{0} is not a directory. Please specify the service template '
-                         'directory.'.format(source))
-    if not os.path.isfile(service_template_path):
-        raise ValueError('{0} does not exists. Please specify a valid entry point.'
-                         .format(service_template_path))
-    if os.path.exists(destination):
-        raise ValueError('{0} already exists. Please provide a path to where the CSAR should be '
-                         'created.'.format(destination))
-    if os.path.exists(meta_file):
-        raise ValueError('{0} already exists. This commands generates a meta file for you. Please '
-                         'remove the existing metafile.'.format(meta_file))
-    metadata = BASE_METADATA.copy()
-    metadata[META_ENTRY_DEFINITIONS_KEY] = entry
-    logger.debug('Compressing root directory to ZIP')
-    with zipfile.ZipFile(destination, 'w', zipfile.ZIP_DEFLATED) as f:
-        for root, _, files in os.walk(source):
-            for file in files:
-                file_full_path = os.path.join(root, file)
-                file_relative_path = os.path.relpath(file_full_path, source)
-                logger.debug('Writing to archive: {0}'.format(file_relative_path))
-                f.write(file_full_path, file_relative_path)
-        logger.debug('Writing new metadata file to {0}'.format(META_FILE))
-        f.writestr(META_FILE, yaml.dump(metadata, default_flow_style=False))
-
-
-class _CSARReader(object):
-
-    def __init__(self, source, destination, logger):
-        self.logger = logger
-        if os.path.isdir(destination) and os.listdir(destination):
-            raise ValueError('{0} already exists and is not empty. '
-                             'Please specify the location where the CSAR '
-                             'should be extracted.'.format(destination))
-        downloaded_csar = '://' in source
-        if downloaded_csar:
-            file_descriptor, download_target = tempfile.mkstemp()
-            os.close(file_descriptor)
-            self._download(source, download_target)
-            source = download_target
-        self.source = os.path.expanduser(source)
-        self.destination = os.path.expanduser(destination)
-        self.metadata = {}
-        try:
-            if not os.path.exists(self.source):
-                raise ValueError('{0} does not exists. Please specify a valid CSAR path.'
-                                 .format(self.source))
-            if not zipfile.is_zipfile(self.source):
-                raise ValueError('{0} is not a valid CSAR.'.format(self.source))
-            self._extract()
-            self._read_metadata()
-            self._validate()
-        finally:
-            if downloaded_csar:
-                os.remove(self.source)
-
-    @property
-    def created_by(self):
-        return self.metadata.get(META_CREATED_BY_KEY)
-
-    @property
-    def csar_version(self):
-        return self.metadata.get(META_CSAR_VERSION_KEY)
-
-    @property
-    def meta_file_version(self):
-        return self.metadata.get(META_FILE_VERSION_KEY)
-
-    @property
-    def entry_definitions(self):
-        return self.metadata.get(META_ENTRY_DEFINITIONS_KEY)
-
-    @property
-    def entry_definitions_yaml(self):
-        with open(os.path.join(self.destination, self.entry_definitions)) as f:
-            return yaml.load(f)
-
-    def _extract(self):
-        self.logger.debug('Extracting CSAR contents')
-        if not os.path.exists(self.destination):
-            os.mkdir(self.destination)
-        with zipfile.ZipFile(self.source) as f:
-            f.extractall(self.destination)
-        self.logger.debug('CSAR contents successfully extracted')
-
-    def _read_metadata(self):
-        csar_metafile = os.path.join(self.destination, META_FILE)
-        if not os.path.exists(csar_metafile):
-            raise ValueError('Metadata file {0} is missing from the CSAR'.format(csar_metafile))
-        self.logger.debug('CSAR metadata file: {0}'.format(csar_metafile))
-        self.logger.debug('Attempting to parse CSAR metadata YAML')
-        with open(csar_metafile) as f:
-            self.metadata.update(yaml.load(f))
-        self.logger.debug('CSAR metadata:{0}{1}'.format(os.linesep, pprint.pformat(self.metadata)))
-
-    def _validate(self):
-        def validate_key(key, expected=None):
-            if not self.metadata.get(key):
-                raise ValueError('{0} is missing from the metadata file.'.format(key))
-            actual = str(self.metadata[key])
-            if expected and actual != expected:
-                raise ValueError('{0} is expected to be {1} in the metadata file while it is in '
-                                 'fact {2}.'.format(key, expected, actual))
-        validate_key(META_FILE_VERSION_KEY, expected=META_FILE_VERSION_VALUE)
-        validate_key(META_CSAR_VERSION_KEY, expected=META_CSAR_VERSION_VALUE)
-        validate_key(META_CREATED_BY_KEY)
-        validate_key(META_ENTRY_DEFINITIONS_KEY)
-        self.logger.debug('CSAR entry definitions: {0}'.format(self.entry_definitions))
-        entry_definitions_path = os.path.join(self.destination, self.entry_definitions)
-        if not os.path.isfile(entry_definitions_path):
-            raise ValueError('The entry definitions {0} referenced by the metadata file does not '
-                             'exist.'.format(entry_definitions_path))
-
-    def _download(self, url, target):
-        response = requests.get(url, stream=True)
-        if response.status_code != 200:
-            raise ValueError('Server at {0} returned a {1} status code'
-                             .format(url, response.status_code))
-        self.logger.info('Downloading {0} to {1}'.format(url, target))
-        with open(target, 'wb') as f:
-            for chunk in response.iter_content(chunk_size=8192):
-                if chunk:
-                    f.write(chunk)
-
-
-def read(source, destination=None, logger=None):
-    destination = destination or tempfile.mkdtemp()
-    logger = logger or logging.getLogger('dummy')
-    return _CSARReader(source=source, destination=destination, logger=logger)
-
-
-def is_csar_archive(source):
-    return source.endswith(CSAR_FILE_EXTENSION)
diff --git a/apache-ariatosca-0.1.1/aria/cli/defaults.py b/apache-ariatosca-0.1.1/aria/cli/defaults.py
deleted file mode 100644
index e84abc0..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/defaults.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Various CLI default values.
-"""
-
-#: Default service template filename
-SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
-
-#: Default task max attempts
-TASK_MAX_ATTEMPTS = 30
-
-#: Default task retry interval
-TASK_RETRY_INTERVAL = 30
-
-#: Default sort descending
-SORT_DESCENDING = False
diff --git a/apache-ariatosca-0.1.1/aria/cli/env.py b/apache-ariatosca-0.1.1/aria/cli/env.py
deleted file mode 100644
index 84bdebe..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/env.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Environment (private)
-"""
-
-import os
-import shutil
-
-from .config import config
-from .logger import Logging
-from .. import (application_model_storage, application_resource_storage)
-from ..orchestrator.plugin import PluginManager
-from ..storage.sql_mapi import SQLAlchemyModelAPI
-from ..storage.filesystem_rapi import FileSystemResourceAPI
-
-
-ARIA_DEFAULT_WORKDIR_NAME = '.aria'
-
-
-class _Environment(object):
-
-    def __init__(self, workdir):
-
-        self._workdir = workdir
-        self._init_workdir()
-
-        self._config = config.CliConfig.create_config(workdir)
-        self._logging = Logging(self._config)
-
-        self._model_storage_dir = os.path.join(workdir, 'models')
-        self._resource_storage_dir = os.path.join(workdir, 'resources')
-        self._plugins_dir = os.path.join(workdir, 'plugins')
-
-        # initialized lazily
-        self._model_storage = None
-        self._resource_storage = None
-        self._plugin_manager = None
-
-    @property
-    def workdir(self):
-        return self._workdir
-
-    @property
-    def config(self):
-        return self._config
-
-    @property
-    def logging(self):
-        return self._logging
-
-    @property
-    def model_storage(self):
-        if not self._model_storage:
-            self._model_storage = self._init_sqlite_model_storage()
-        return self._model_storage
-
-    @property
-    def resource_storage(self):
-        if not self._resource_storage:
-            self._resource_storage = self._init_fs_resource_storage()
-        return self._resource_storage
-
-    @property
-    def plugin_manager(self):
-        if not self._plugin_manager:
-            self._plugin_manager = self._init_plugin_manager()
-        return self._plugin_manager
-
-    def reset(self, reset_config):
-        if reset_config:
-            shutil.rmtree(self._workdir)
-        else:
-            _, dirs, files = next(os.walk(self._workdir))
-            files.remove(config.CONFIG_FILE_NAME)
-
-            for dir_ in dirs:
-                shutil.rmtree(os.path.join(self._workdir, dir_))
-            for file_ in files:
-                os.remove(os.path.join(self._workdir, file_))
-
-    def _init_workdir(self):
-        if not os.path.exists(self._workdir):
-            os.makedirs(self._workdir)
-
-    def _init_sqlite_model_storage(self):
-        if not os.path.exists(self._model_storage_dir):
-            os.makedirs(self._model_storage_dir)
-
-        initiator_kwargs = dict(base_dir=self._model_storage_dir)
-        return application_model_storage(
-            SQLAlchemyModelAPI,
-            initiator_kwargs=initiator_kwargs)
-
-    def _init_fs_resource_storage(self):
-        if not os.path.exists(self._resource_storage_dir):
-            os.makedirs(self._resource_storage_dir)
-
-        fs_kwargs = dict(directory=self._resource_storage_dir)
-        return application_resource_storage(
-            FileSystemResourceAPI,
-            api_kwargs=fs_kwargs)
-
-    def _init_plugin_manager(self):
-        if not os.path.exists(self._plugins_dir):
-            os.makedirs(self._plugins_dir)
-
-        return PluginManager(self.model_storage, self._plugins_dir)
-
-
-env = _Environment(os.path.join(
-    os.environ.get('ARIA_WORKDIR', os.path.expanduser('~')), ARIA_DEFAULT_WORKDIR_NAME))
-
-logger = env.logging.logger
diff --git a/apache-ariatosca-0.1.1/aria/cli/exceptions.py b/apache-ariatosca-0.1.1/aria/cli/exceptions.py
deleted file mode 100644
index 7da9836..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/exceptions.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI exceptions.
-"""
-
-from ..exceptions import AriaError
-
-
-class AriaCliError(AriaError):
-    pass
diff --git a/apache-ariatosca-0.1.1/aria/cli/execution_logging.py b/apache-ariatosca-0.1.1/aria/cli/execution_logging.py
deleted file mode 100644
index af40e01..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/execution_logging.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Formatting for ``executions`` sub-commands.
-"""
-
-import os
-import re
-from StringIO import StringIO
-from functools import partial
-
-from . import (
-    logger,
-    color
-)
-from .env import env
-
-
-FIELD_TYPE = 'field_type'
-LEVEL = 'level'
-TIMESTAMP = 'timestamp'
-MESSAGE = 'message'
-IMPLEMENTATION = 'implementation'
-INPUTS = 'inputs'
-TRACEBACK = 'traceback'
-MARKER = 'marker'
-
-FINAL_STATES = 'final_states'
-SUCCESS_STATE = 'succeeded'
-CANCEL_STATE = 'canceled'
-FAIL_STATE = 'failed'
-
-_EXECUTION_PATTERN = "\'.*\' workflow execution {0}".format
-# In order to be able to format a string into this regex pattern, we need to provide support
-# in adding this string into double curly brackets. This is an issue with python format, so we add
-# this via format itself.
-_FIELD_TYPE_PATTERN = partial('.*({starting}{0}{closing}).*'.format, starting='{', closing='.*?}')
-
-_PATTERNS = {
-    FINAL_STATES: {
-        SUCCESS_STATE: re.compile(_EXECUTION_PATTERN(SUCCESS_STATE)),
-        CANCEL_STATE: re.compile(_EXECUTION_PATTERN(CANCEL_STATE)),
-        FAIL_STATE: re.compile(_EXECUTION_PATTERN(FAIL_STATE)),
-    },
-    FIELD_TYPE: {
-        IMPLEMENTATION: re.compile(_FIELD_TYPE_PATTERN(IMPLEMENTATION)),
-        LEVEL: re.compile(_FIELD_TYPE_PATTERN(LEVEL)),
-        MESSAGE: re.compile(_FIELD_TYPE_PATTERN(MESSAGE)),
-        INPUTS: re.compile(_FIELD_TYPE_PATTERN(INPUTS)),
-        TIMESTAMP: re.compile(_FIELD_TYPE_PATTERN(TIMESTAMP))
-    }
-}
-
-_FINAL_STATES = {
-    SUCCESS_STATE: color.Colors.Fore.GREEN,
-    CANCEL_STATE: color.Colors.Fore.YELLOW,
-    FAIL_STATE: color.Colors.Fore.RED
-}
-
-_DEFAULT_COLORS = {
-    LEVEL: {
-        'default': {'fore': 'lightmagenta_ex'},
-        'error': {'fore': 'red', 'style': 'bright'},
-    },
-    TIMESTAMP: {
-        'default': {'fore': 'lightmagenta_ex'},
-        'error': {'fore': 'red', 'style': 'bright'},
-    },
-    MESSAGE: {
-        'default': {'fore': 'lightblue_ex'},
-        'error': {'fore': 'red', 'style': 'bright'},
-    },
-    IMPLEMENTATION:{
-        'default': {'fore': 'lightblack_ex'},
-        'error': {'fore': 'red', 'style': 'bright'},
-    },
-    INPUTS: {
-        'default': {'fore': 'blue'},
-        'error': {'fore': 'red', 'style': 'bright'},
-    },
-    TRACEBACK: {'default': {'fore': 'red'}},
-
-    MARKER: 'lightyellow_ex'
-}
-
-_DEFAULT_FORMATS = {
-    logger.NO_VERBOSE: '{message}',
-    logger.LOW_VERBOSE: '{timestamp:%H:%M:%S} | {level[0]} | {message}',
-    logger.MEDIUM_VERBOSE: '{timestamp:%H:%M:%S} | {level[0]} | {implementation} | {message}',
-    logger.HIGH_VERBOSE:
-        '{timestamp:%H:%M:%S} | {level[0]} | {implementation} | {inputs} | {message}'
-}
-
-
-def stylize_log(item, mark_pattern):
-
-    # implementation
-    if item.task:
-        # operation task
-        implementation = item.task.function
-        inputs = dict(arg.unwrapped for arg in item.task.arguments.values())
-    else:
-        # execution task
-        implementation = item.execution.workflow_name
-        inputs = dict(inp.unwrapped for inp in item.execution.inputs.values())
-
-    stylized_str = color.StringStylizer(_get_format())
-    _populate_level(stylized_str, item)
-    _populate_timestamp(stylized_str, item)
-    _populate_message(stylized_str, item, mark_pattern)
-    _populate_inputs(stylized_str, inputs, item, mark_pattern)
-    _populate_implementation(stylized_str, implementation, item, mark_pattern)
-
-    msg = StringIO()
-    msg.write(str(stylized_str))
-    # Add the exception and the error msg.
-    if item.traceback and env.logging.verbosity_level >= logger.MEDIUM_VERBOSE:
-        msg.write(os.linesep)
-        msg.writelines(_color_traceback('\t' + '|' + line, item, mark_pattern)
-                       for line in item.traceback.splitlines(True))
-
-    return msg.getvalue()
-
-
-def log(item, mark_pattern=None, *args, **kwargs):
-    leveled_log = getattr(env.logging.logger, item.level.lower())
-    return leveled_log(stylize_log(item, mark_pattern), *args, **kwargs)
-
-
-def log_list(iterator, mark_pattern=None):
-    any_logs = False
-    for item in iterator:
-        log(item, mark_pattern)
-        any_logs = True
-    return any_logs
-
-
-def _get_format():
-    return (env.config.logging.execution.formats.get(env.logging.verbosity_level) or
-            _DEFAULT_FORMATS.get(env.logging.verbosity_level))
-
-
-def _get_styles(field_type):
-    return env.config.logging.execution.colors[field_type]
-
-
-def _is_color_enabled():
-    # If styling is enabled and the current log_item isn't final string
-    return env.config.logging.execution.colors_enabled
-
-
-def _get_marker_schema():
-    return color.ColorSpec(back=_get_styles(MARKER))
-
-
-def _populate_implementation(str_, implementation, log_item, mark_pattern=None):
-    _stylize(str_, implementation, log_item, IMPLEMENTATION, mark_pattern)
-
-
-def _populate_inputs(str_, inputs, log_item, mark_pattern=None):
-    _stylize(str_, inputs, log_item, INPUTS, mark_pattern)
-
-
-def _populate_timestamp(str_, log_item):
-    _stylize(str_, log_item.created_at, log_item, TIMESTAMP)
-
-
-def _populate_message(str_, log_item, mark_pattern=None):
-    _stylize(str_, log_item.msg, log_item, MESSAGE, mark_pattern)
-
-
-def _populate_level(str_, log_item):
-    _stylize(str_, log_item.level[0], log_item, LEVEL)
-
-
-def _stylize(stylized_str, msg, log_item, msg_type, mark_pattern=None):
-    match = re.match(_PATTERNS[FIELD_TYPE][msg_type], stylized_str._str)
-    if not match:
-        return
-    matched_substr = match.group(1)
-
-    substring = color.StringStylizer(matched_substr)
-
-    # handle format
-    substring.format(**{msg_type: msg})
-
-    if _is_color_enabled():
-        # handle color
-        substring.color(_resolve_schema(msg_type, log_item))
-        if not _is_end_execution_log(log_item):
-            # handle highlighting
-            substring.highlight(mark_pattern, _get_marker_schema())
-
-    stylized_str.replace(matched_substr, substring)
-
-
-def _color_traceback(traceback, log_item, mark_pattern):
-    if _is_color_enabled():
-        stylized_string = color.StringStylizer(traceback, _resolve_schema(TRACEBACK, log_item))
-        stylized_string.highlight(mark_pattern, _get_marker_schema())
-        return stylized_string
-    return traceback
-
-
-def _is_end_execution_log(log_item):
-    return not log_item.task and bool(_end_execution_schema(log_item))
-
-
-def _end_execution_schema(log_item):
-    for state, pattern in _PATTERNS[FINAL_STATES].items():
-        if re.match(pattern, log_item.msg):
-            return _FINAL_STATES[state]
-
-
-def _resolve_schema(msg_type, log_item):
-    if _is_end_execution_log(log_item):
-        return _end_execution_schema(log_item)
-    else:
-        return color.ColorSpec(
-            **(
-                # retrieve the schema from the user config according to the level
-                _get_styles(msg_type).get(log_item.level.lower()) or
-                # retrieve the default schema from the user config
-                _get_styles(msg_type).get('default') or
-                # retrieve the schema from the aria default config according to the level
-                _DEFAULT_COLORS[msg_type].get(log_item.level.lower()) or
-                # retrieve the default schema from the aria default config
-                _DEFAULT_COLORS[msg_type].get('default')
-            )
-        )
diff --git a/apache-ariatosca-0.1.1/aria/cli/helptexts.py b/apache-ariatosca-0.1.1/aria/cli/helptexts.py
deleted file mode 100644
index a5d41e8..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/helptexts.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Gathers all CLI command help texts in one place.
-"""
-
-DEFAULT_MUTUALITY_ERROR_MESSAGE = 'mutually exclusive'
-VERBOSE = \
-    "Show verbose output; you can supply this up to three times (i.e. -vvv)"
-
-VERSION = "Display the version and exit"
-FORCE_RESET = "Confirmation for resetting ARIA's working directory"
-RESET_CONFIG = "Reset ARIA's user configuration"
-
-SERVICE_TEMPLATE_ID = "The unique identifier for the service template"
-SERVICE_ID = "The unique identifier for the service"
-EXECUTION_ID = "The unique identifier for the execution"
-
-SERVICE_TEMPLATE_PATH = "The path to the application's service template file"
-SERVICE_TEMPLATE_FILENAME = (
-    "The name of the archive's main service template file "
-    "(only relevant if uploading a non-CSAR archive)")
-INPUTS_PARAMS_USAGE = (
-    '(can be provided as wildcard based paths '
-    '("inp?.yaml", "/my_inputs/", etc.) to YAML files, a JSON string or as '
-    '"key1=value1;key2=value2"); this argument can be used multiple times')
-SERVICE_INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE)
-EXECUTION_INPUTS = "Inputs for the execution {0}".format(INPUTS_PARAMS_USAGE)
-
-TASK_RETRY_INTERVAL = \
-    "How long of a minimal interval should occur between task retry attempts [default: {0}]"
-TASK_MAX_ATTEMPTS = \
-    "How many times should a task be attempted in case of failures [default: {0}]"
-DRY_EXECUTION = "Execute a workflow dry run (prints operations information without causing side " \
-                "effects)"
-IGNORE_AVAILABLE_NODES = "Delete the service even if it has available nodes"
-SORT_BY = "Key for sorting the list"
-DESCENDING = "Sort list in descending order [default: False]"
-JSON_OUTPUT = "Output logs in JSON format"
-MARK_PATTERN = "Mark a regular expression pattern in the logs"
-
-SHOW_FULL = "Show full information"
-SHOW_JSON = "Show in JSON format (implies --full)"
-SHOW_YAML = "Show in YAML format (implies --full)"
-SHOW_TYPES = "Show only the type hierarchies"
-SHOW_GRAPH = "Show only the node graph"
-MODE_MUTUALITY_ERROR_MESSAGE = 'only one mode is possible'
-FORMAT_MUTUALITY_ERROR_MESSAGE = 'only one format is possible'
diff --git a/apache-ariatosca-0.1.1/aria/cli/inputs.py b/apache-ariatosca-0.1.1/aria/cli/inputs.py
deleted file mode 100644
index bea3e1a..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/inputs.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Helpers for validating and coercing service template inputs.
-"""
-
-import os
-import glob
-from ruamel import yaml
-
-from .env import logger
-from .exceptions import AriaCliError
-
-
-def inputs_to_dict(resources):
-    """
-    Returns a dictionary of inputs
-
-    :param resources: can be:
-
-     * list of files
-     * single file
-     * directory containing multiple input files
-     * ``key1=value1;key2=value2`` pairs string.
-     * string formatted as JSON/YAML
-     * wildcard based string (e.g. ``*-inputs.yaml``)
-    """
-    if not resources:
-        return dict()
-
-    parsed_dict = {}
-
-    for resource in resources:
-        logger.debug('Processing inputs source: {0}'.format(resource))
-        # Workflow parameters always pass an empty dictionary. We ignore it
-        if isinstance(resource, basestring):
-            try:
-                parsed_dict.update(_parse_single_input(resource))
-            except AriaCliError:
-                raise AriaCliError(
-                    "Invalid input: {0}. It must represent a dictionary. "
-                    "Valid values can be one of:{1} "
-                    "- A path to a YAML file{1} "
-                    "- A path to a directory containing YAML files{1} "
-                    "- A single quoted wildcard based path "
-                    "(e.g. '*-inputs.yaml'){1} "
-                    "- A string formatted as JSON/YAML{1} "
-                    "- A string formatted as key1=value1;key2=value2".format(
-                        resource, os.linesep))
-    return parsed_dict
-
-
-def _parse_single_input(resource):
-    try:
-        # parse resource as string representation of a dictionary
-        return _plain_string_to_dict(resource)
-    except AriaCliError:
-        input_files = glob.glob(resource)
-        parsed_dict = dict()
-        if os.path.isdir(resource):
-            for input_file in os.listdir(resource):
-                parsed_dict.update(
-                    _parse_yaml_path(os.path.join(resource, input_file)))
-        elif input_files:
-            for input_file in input_files:
-                parsed_dict.update(_parse_yaml_path(input_file))
-        else:
-            parsed_dict.update(_parse_yaml_path(resource))
-    return parsed_dict
-
-
-def _parse_yaml_path(resource):
-
-    try:
-        # if resource is a path - parse as a yaml file
-        if os.path.isfile(resource):
-            with open(resource) as f:
-                content = yaml.load(f.read())
-        else:
-            # parse resource content as yaml
-            content = yaml.load(resource)
-    except yaml.error.YAMLError as e:
-        raise AriaCliError("'{0}' is not a valid YAML. {1}".format(
-            resource, str(e)))
-
-    # Emtpy files return None
-    content = content or dict()
-    if not isinstance(content, dict):
-        raise AriaCliError()
-
-    return content
-
-
-def _plain_string_to_dict(input_string):
-    input_string = input_string.strip()
-    input_dict = {}
-    mapped_inputs = input_string.split(';')
-    for mapped_input in mapped_inputs:
-        mapped_input = mapped_input.strip()
-        if not mapped_input:
-            continue
-        split_mapping = mapped_input.split('=')
-        try:
-            key = split_mapping[0].strip()
-            value = split_mapping[1].strip()
-        except IndexError:
-            raise AriaCliError(
-                "Invalid input format: {0}, the expected format is: "
-                "key1=value1;key2=value2".format(input_string))
-        input_dict[key] = value
-    return input_dict
diff --git a/apache-ariatosca-0.1.1/aria/cli/logger.py b/apache-ariatosca-0.1.1/aria/cli/logger.py
deleted file mode 100644
index 14baae0..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/logger.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Centralized logging configuration and formatting.
-"""
-
-import os
-import copy
-import logging
-from logutils import dictconfig
-
-HIGH_VERBOSE = 3
-MEDIUM_VERBOSE = 2
-LOW_VERBOSE = 1
-NO_VERBOSE = 0
-
-LOGGER_CONFIG_TEMPLATE = {
-    "version": 1,
-    "formatters": {
-        "file": {
-            "format": "%(asctime)s [%(levelname)s] %(message)s"
-        },
-        "console": {
-            "format": "%(message)s"
-        }
-    },
-    "handlers": {
-        "file": {
-            "class": "logging.handlers.RotatingFileHandler",
-            "formatter": "file",
-            "maxBytes": "5000000",
-            "backupCount": "20"
-        },
-        "console": {
-            "class": "logging.StreamHandler",
-            "stream": "ext://sys.stdout",
-            "formatter": "console"
-        }
-    },
-    "disable_existing_loggers": False
-}
-
-
-class Logging(object):
-
-    def __init__(self, config):
-        self._log_file = None
-        self._verbosity_level = NO_VERBOSE
-        self._all_loggers_names = []
-        self._configure_loggers(config)
-        self._lgr = logging.getLogger('aria.cli.main')
-
-    @property
-    def logger(self):
-        return self._lgr
-
-    @property
-    def log_file(self):
-        return self._log_file
-
-    @property
-    def verbosity_level(self):
-        return self._verbosity_level
-
-    @verbosity_level.setter
-    def verbosity_level(self, level):
-        self._verbosity_level = level
-        if self.is_high_verbose_level():
-            for logger_name in self._all_loggers_names:
-                logging.getLogger(logger_name).setLevel(logging.DEBUG)
-
-    def is_high_verbose_level(self):
-        return self.verbosity_level == HIGH_VERBOSE
-
-    def _configure_loggers(self, config):
-        loggers_config = config.logging.loggers
-        logfile = config.logging.filename
-
-        logger_dict = copy.deepcopy(LOGGER_CONFIG_TEMPLATE)
-        if logfile:
-            # set filename on file handler
-            logger_dict['handlers']['file']['filename'] = logfile
-            logfile_dir = os.path.dirname(logfile)
-            if not os.path.exists(logfile_dir):
-                os.makedirs(logfile_dir)
-            self._log_file = logfile
-        else:
-            del logger_dict['handlers']['file']
-
-        # add handlers to all loggers
-        loggers = {}
-        for logger_name in loggers_config:
-            loggers[logger_name] = dict(handlers=list(logger_dict['handlers'].keys()))
-            self._all_loggers_names.append(logger_name)
-        logger_dict['loggers'] = loggers
-
-        # set level for all loggers
-        for logger_name, logging_level in loggers_config.iteritems():
-            log = logging.getLogger(logger_name)
-            level = logging._levelNames[logging_level.upper()]
-            log.setLevel(level)
-
-        dictconfig.dictConfig(logger_dict)
-
-
-class ModelLogIterator(object):
-
-    def __init__(self, model_storage, execution_id, filters=None, sort=None, offset=0):
-        self._last_visited_id = offset
-        self._model_storage = model_storage
-        self._execution_id = execution_id
-        self._additional_filters = filters or {}
-        self._sort = sort or {}
-
-    def __iter__(self):
-        filters = dict(execution_fk=self._execution_id, id=dict(gt=self._last_visited_id))
-        filters.update(self._additional_filters)
-
-        for log in self._model_storage.log.iter(filters=filters, sort=self._sort):
-            self._last_visited_id = log.id
-            yield log
diff --git a/apache-ariatosca-0.1.1/aria/cli/main.py b/apache-ariatosca-0.1.1/aria/cli/main.py
deleted file mode 100644
index 640360b..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/main.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Executable entry point into the CLI.
-"""
-
-from aria import install_aria_extensions
-from aria.cli import commands
-from aria.cli.core import aria
-
-
-@aria.group(name='aria')
-@aria.options.verbose()
-@aria.options.version
-def _aria():
-    """
-    ARIA's Command Line Interface.
-
-    To activate bash-completion run::
-
-        eval "$(_ARIA_COMPLETE=source aria)"
-
-    ARIA's working directory resides by default in "~/.aria". To change it, set the environment
-    variable ARIA_WORKDIR to something else (e.g. "/tmp/").
-    """
-    aria.set_cli_except_hook()
-
-
-def _register_commands():
-    """
-    Register the CLI's commands.
-    """
-
-    _aria.add_command(commands.service_templates.service_templates)
-    _aria.add_command(commands.node_templates.node_templates)
-    _aria.add_command(commands.services.services)
-    _aria.add_command(commands.nodes.nodes)
-    _aria.add_command(commands.workflows.workflows)
-    _aria.add_command(commands.executions.executions)
-    _aria.add_command(commands.plugins.plugins)
-    _aria.add_command(commands.logs.logs)
-    _aria.add_command(commands.reset.reset)
-
-
-def main():
-    install_aria_extensions()
-    _register_commands()
-    _aria()
-
-
-if __name__ == '__main__':
-    main()
diff --git a/apache-ariatosca-0.1.1/aria/cli/service_template_utils.py b/apache-ariatosca-0.1.1/aria/cli/service_template_utils.py
deleted file mode 100644
index 5312522..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/service_template_utils.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Loading mechanism for service templates.
-"""
-
-import os
-from urlparse import urlparse
-
-from . import csar
-from . import utils
-from .exceptions import AriaCliError
-from ..utils import archive as archive_utils
-
-
-def get(source, service_template_filename):
-    """
-    Get a source and return a path to the main service template file
-
-    The behavior based on then source argument content is:
-
-    * local ``.yaml`` file: return the file
-    * local archive (``.csar``, ``.zip``, ``.tar``, ``.tar.gz``, and ``.tar.bz2``): extract it
-      locally and return path service template file
-    * URL: download and get service template from downloaded archive
-    * GitHub repo: download and get service template from downloaded archive
-
-    :param source: path/URL/GitHub repo to archive/service-template file
-    :type source: basestring
-    :param service_template_filename: path to service template if source is a non-CSAR archive
-     with CSAR archives, this is read from the metadata file)
-    :type service_template_filename: basestring
-    :return: path to main service template file
-    :rtype: basestring
-    """
-    if urlparse(source).scheme:
-        downloaded_file = utils.download_file(source)
-        return _get_service_template_file_from_archive(
-            downloaded_file, service_template_filename)
-    elif os.path.isfile(source):
-        if _is_archive(source):
-            return _get_service_template_file_from_archive(source, service_template_filename)
-        else:
-            # Maybe check if yaml.
-            return os.path.abspath(source)
-    elif len(source.split('/')) == 2:
-        url = _map_to_github_url(source)
-        downloaded_file = utils.download_file(url)
-        return _get_service_template_file_from_archive(
-            downloaded_file, service_template_filename)
-    else:
-        raise AriaCliError(
-            'You must provide either a path to a local file, a remote URL '
-            'or a GitHub `organization/repository[:tag/branch]`')
-
-
-def _get_service_template_file_from_archive(archive, service_template_filename):
-    """
-    Extract archive to temporary location and get path to service template file.
-
-    :param archive: path to archive file
-    :type archive: basestring
-    :param service_template_filename: path to service template file relative to archive
-    :type service_template_filename: basestring
-    :return: absolute path to service template file
-    :rtype: basestring
-
-    """
-    if csar.is_csar_archive(archive):
-        service_template_file = _extract_csar_archive(archive)
-    else:
-        extract_directory = archive_utils.extract_archive(archive)
-        service_template_dir = os.path.join(
-            extract_directory,
-            os.listdir(extract_directory)[0],
-        )
-        service_template_file = os.path.join(service_template_dir, service_template_filename)
-
-    if not os.path.isfile(service_template_file):
-        raise AriaCliError(
-            'Could not find `{0}`. Please provide the name of the main '
-            'service template file by using the `-n/--service-template-filename` flag'
-            .format(service_template_filename))
-    return service_template_file
-
-
-def _map_to_github_url(source):
-    """
-    Returns a path to a downloaded GitHub archive.
-
-    :param source: GitHub repo: ``org/repo[:tag/branch]``
-    :type source: basestring
-    :return: URL to the archive file for the given repo in GitHub
-    :rtype: basestring
-
-    """
-    source_parts = source.split(':', 1)
-    repo = source_parts[0]
-    tag = source_parts[1] if len(source_parts) == 2 else 'master'
-    url = 'https://github.com/{0}/archive/{1}.tar.gz'.format(repo, tag)
-    return url
-
-
-def _is_archive(source):
-    return archive_utils.is_archive(source) or csar.is_csar_archive(source)
-
-
-def _extract_csar_archive(archive):
-    reader = csar.read(source=archive)
-    main_service_template_file_name = os.path.basename(reader.entry_definitions)
-    return os.path.join(reader.destination,
-                        main_service_template_file_name)
diff --git a/apache-ariatosca-0.1.1/aria/cli/table.py b/apache-ariatosca-0.1.1/aria/cli/table.py
deleted file mode 100644
index 74487ae..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/table.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Tabular formatting utilities.
-"""
-
-import os
-from datetime import datetime
-
-from prettytable import PrettyTable
-
-from .env import logger
-
-
-def print_data(columns, items, header_text,
-               column_formatters=None, col_max_width=None, defaults=None):
-    """
-    Prints data in a tabular form.
-
-    :param columns: columns of the table, e.g. ``['id','name']``
-    :type columns: iterable of basestring
-    :param items: each element must have keys or attributes corresponding to the ``columns`` items,
-     e.g. ``[{'id':'123', 'name':'Pete'}]``
-    :type data: [{:obj:`basestring`: :obj:`basestring`}]
-    :param column_formatters: maps column name to formatter, a function that may manipulate the
-     string values printed for this column, e.g. ``{'created_at': timestamp_formatter}``
-    :type column_formatters: {:obj:`basestring`: :obj:`function`}
-    :param col_max_width: maximum width of table
-    :type col_max_width: int
-    :param defaults: default values for keys that don't exist in the data itself, e.g.
-     ``{'serviceId':'123'}``
-    :type defaults: {:obj:`basestring`: :obj:`basestring`}
-    """
-    if items is None:
-        items = []
-    elif not isinstance(items, list):
-        items = [items]
-
-    pretty_table = _generate(columns, data=items, column_formatters=column_formatters,
-                             defaults=defaults)
-    if col_max_width:
-        pretty_table.max_width = col_max_width
-    _log(header_text, pretty_table)
-
-
-def _log(title, table):
-    logger.info('{0}{1}{0}{2}{0}'.format(os.linesep, title, table))
-
-
-def _generate(cols, data, column_formatters=None, defaults=None):
-    """
-    Return a new PrettyTable instance representing the list.
-
-    :param cols: columns of the table, e.g. ``['id','name']``
-    :type cols: iterable of :obj:`basestring`
-    :param data: each element must have keys or attributes corresponding to the ``cols`` items,
-     e.g. ``[{'id':'123', 'name':'Pete'}]``
-    :type data: [{:obj:`basestring`: :obj:`basestring`}]
-    :param column_formatters: maps column name to formatter, a function that may manipulate the
-     string values printed for this column, e.g. ``{'created_at': timestamp_formatter}``
-    :type column_formatters: {:obj:`basestring`: :obj:`function`}
-    :param defaults: default values for keys that don't exist in the data itself, e.g.
-     ``{'serviceId':'123'}``
-    :type defaults: {:obj:`basestring`: :obj:`basestring`}
-    """
-    def get_values_per_column(column, row_data):
-        if hasattr(row_data, column) or (isinstance(row_data, dict) and column in row_data):
-            val = row_data[column] if isinstance(row_data, dict) else getattr(row_data, column)
-
-            if val and isinstance(val, list):
-                val = [str(element) for element in val]
-                val = ','.join(val)
-            elif val is None or isinstance(val, list):
-                # don't print `[]` or `None` (but do print `0`, `False`, etc.)
-                val = ''
-
-            if column in column_formatters:
-                # calling the user's column formatter to manipulate the value
-                val = column_formatters[column](val)
-
-            return val
-        else:
-            return defaults.get(column)
-
-    column_formatters = column_formatters or dict()
-    defaults = defaults or dict()
-    pretty_table = PrettyTable(list(cols))
-
-    for datum in data:
-        values_row = []
-        for col in cols:
-            values_row.append(get_values_per_column(col, datum))
-        pretty_table.add_row(values_row)
-
-    return pretty_table
-
-
-def timestamp_formatter(value):
-    try:
-        datetime.strptime(value[:10], '%Y-%m-%d')
-        return value.replace('T', ' ').replace('Z', ' ')
-    except ValueError:
-        # not a timestamp
-        return value
-
-
-def trim_formatter_generator(max_length):
-    def trim_formatter(value):
-        if len(value) >= max_length:
-            value = '{0}..'.format(value[:max_length - 2])
-        return value
-    return trim_formatter
diff --git a/apache-ariatosca-0.1.1/aria/cli/utils.py b/apache-ariatosca-0.1.1/aria/cli/utils.py
deleted file mode 100644
index 697ff37..0000000
--- a/apache-ariatosca-0.1.1/aria/cli/utils.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Miscellaneous CLI utilities.
-"""
-
-import os
-import sys
-from StringIO import StringIO
-
-from backports.shutil_get_terminal_size import get_terminal_size
-
-from .env import logger
-from .exceptions import AriaCliError
-from ..utils import http
-
-
-def storage_sort_param(sort_by, descending):
-    return {sort_by: 'desc' if descending else 'asc'}
-
-
-def get_parameter_templates_as_string(parameter_templates):
-    params_string = StringIO()
-
-    for param_name, param_template in parameter_templates.iteritems():
-        params_string.write('\t{0}:{1}'.format(param_name, os.linesep))
-        param_dict = param_template.to_dict()
-        del param_dict['id']  # not interested in printing the id
-        for k, v in param_dict.iteritems():
-            params_string.write('\t\t{0}: {1}{2}'.format(k, v, os.linesep))
-
-    params_string.write(os.linesep)
-    return params_string.getvalue()
-
-
-def check_overriding_storage_exceptions(e, model_class, name):
-    """
-    Checks whether the storage exception is a known type where we'd like to override the exception
-    message; If so, it raises a new error. Otherwise it simply returns.
-    """
-    assert isinstance(e, BaseException)
-    if 'UNIQUE constraint failed' in e.message:
-        new_message = \
-            'Could not store {model_class} `{name}`{linesep}' \
-            'There already a exists a {model_class} with the same name' \
-                .format(model_class=model_class, name=name, linesep=os.linesep)
-        trace = sys.exc_info()[2]
-        raise type(e), type(e)(new_message), trace  # pylint: disable=raising-non-exception
-
-
-def download_file(url):
-    progress_bar = generate_progress_handler(url, 'Downloading')
-    try:
-        destination = http.download_file(url, logger=logger, progress_handler=progress_bar)
-    except Exception as e:
-        raise AriaCliError(
-            'Failed to download {0}. ({1})'.format(url, str(e)))
-    return destination
-
-
-def generate_progress_handler(file_path, action='', max_bar_length=80):
-    """
-    Returns a function that prints a progress bar in the terminal.
-
-    :param file_path: the name of the file being transferred
-    :param action: uploading/downloading
-    :param max_bar_length: maximum allowed length of the bar
-    :return: configured ``print_progress`` function
-    """
-    # We want to limit the maximum line length to 80, but allow for a smaller terminal size. We also
-    # include the action string, and some extra chars
-    terminal_width = get_terminal_size().columns
-
-    # This takes care of the case where there is no terminal (e.g. unittest)
-    terminal_width = terminal_width or max_bar_length
-    bar_length = min(max_bar_length, terminal_width) - len(action) - 12
-
-    # Shorten the file name if it's too long
-    file_name = os.path.basename(file_path)
-    if len(file_name) > (bar_length / 4) + 3:
-        file_name = file_name[:bar_length / 4] + '...'
-
-    bar_length -= len(file_name)
-
-    def print_progress(read_bytes, total_bytes):
-        """
-        Print upload/download progress on a single line.
-
-        Call this function in a loop to create a progress bar in the terminal.
-
-        :param read_bytes: number of bytes already processed
-        :param total_bytes: total number of bytes in the file
-        """
-
-        filled_length = min(bar_length, int(round(bar_length * read_bytes / float(total_bytes))))
-        percents = min(100.00, round(100.00 * (read_bytes / float(total_bytes)), 2))
-        bar = '#' * filled_length + '-' * (bar_length - filled_length)  # pylint: disable=blacklisted-name
-
-        # The \r caret makes sure the cursor moves back to the beginning of the line
-        sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(action, file_name, bar, percents))
-        if read_bytes >= total_bytes:
-            sys.stdout.write(os.linesep)
-
-    return print_progress
diff --git a/apache-ariatosca-0.1.1/aria/core.py b/apache-ariatosca-0.1.1/aria/core.py
deleted file mode 100644
index a8d5245..0000000
--- a/apache-ariatosca-0.1.1/aria/core.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA core module.
-"""
-
-from . import exceptions
-from .parser import consumption
-from .parser.loading.location import UriLocation
-
-
-class Core(object):
-
-    def __init__(self,
-                 model_storage,
-                 resource_storage,
-                 plugin_manager):
-        self._model_storage = model_storage
-        self._resource_storage = resource_storage
-        self._plugin_manager = plugin_manager
-
-    @property
-    def model_storage(self):
-        return self._model_storage
-
-    @property
-    def resource_storage(self):
-        return self._resource_storage
-
-    @property
-    def plugin_manager(self):
-        return self._plugin_manager
-
-    def validate_service_template(self, service_template_path):
-        self._parse_service_template(service_template_path)
-
-    def create_service_template(self, service_template_path, service_template_dir,
-                                service_template_name):
-        context = self._parse_service_template(service_template_path)
-        service_template = context.modeling.template
-        service_template.name = service_template_name
-        self.model_storage.service_template.put(service_template)
-        self.resource_storage.service_template.upload(
-            entry_id=str(service_template.id), source=service_template_dir)
-
-    def delete_service_template(self, service_template_id):
-        service_template = self.model_storage.service_template.get(service_template_id)
-        if service_template.services:
-            raise exceptions.DependentServicesError(
-                'Can\'t delete service template `{0}` - service template has existing services'
-                .format(service_template.name))
-
-        self.model_storage.service_template.delete(service_template)
-        self.resource_storage.service_template.delete(entry_id=str(service_template.id))
-
-    def create_service(self, service_template_id, inputs, service_name=None):
-
-        service_template = self.model_storage.service_template.get(service_template_id)
-
-        # creating an empty ConsumptionContext, initiating a threadlocal context
-        context = consumption.ConsumptionContext()
-
-        storage_session = self.model_storage._all_api_kwargs['session']
-        # setting no autoflush for the duration of instantiation - this helps avoid dependency
-        # constraints as they're being set up
-        with storage_session.no_autoflush:
-            service = service_template.instantiate(None, self.model_storage, inputs=inputs)
-
-            consumption.ConsumerChain(
-                context,
-                (
-                    consumption.CoerceServiceInstanceValues,
-                    consumption.ValidateServiceInstance,
-                    consumption.SatisfyRequirements,
-                    consumption.CoerceServiceInstanceValues,
-                    consumption.ValidateCapabilities,
-                    consumption.FindHosts,
-                    consumption.ConfigureOperations,
-                    consumption.CoerceServiceInstanceValues
-                )).consume()
-            if context.validation.dump_issues():
-                raise exceptions.InstantiationError('Failed to instantiate service template `{0}`'
-                                                    .format(service_template.name))
-
-        storage_session.flush()  # flushing so service.id would auto-populate
-        service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
-        self.model_storage.service.put(service)
-        return service
-
-    def delete_service(self, service_id, force=False):
-        service = self.model_storage.service.get(service_id)
-
-        active_executions = [e for e in service.executions if e.is_active()]
-        if active_executions:
-            raise exceptions.DependentActiveExecutionsError(
-                'Can\'t delete service `{0}` - there is an active execution for this service. '
-                'Active execution ID: {1}'.format(service.name, active_executions[0].id))
-
-        if not force:
-            available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
-            if available_nodes:
-                raise exceptions.DependentAvailableNodesError(
-                    'Can\'t delete service `{0}` - there are available nodes for this service. '
-                    'Available node IDs: {1}'.format(service.name, ', '.join(available_nodes)))
-
-        self.model_storage.service.delete(service)
-
-    @staticmethod
-    def _parse_service_template(service_template_path):
-        context = consumption.ConsumptionContext()
-        context.presentation.location = UriLocation(service_template_path)
-        consumption.ConsumerChain(
-            context,
-            (
-                consumption.Read,
-                consumption.Validate,
-                consumption.ServiceTemplate
-            )).consume()
-        if context.validation.dump_issues():
-            raise exceptions.ParsingError('Failed to parse service template')
-        return context
diff --git a/apache-ariatosca-0.1.1/aria/exceptions.py b/apache-ariatosca-0.1.1/aria/exceptions.py
deleted file mode 100644
index 5d3e21d..0000000
--- a/apache-ariatosca-0.1.1/aria/exceptions.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Base exception classes and other common exceptions used throughout ARIA.
-"""
-
-import sys
-
-
-class AriaError(Exception):
-    """
-    Base class for ARIA errors.
-    """
-    pass
-
-
-class AriaException(Exception):
-    """
-    Base class for ARIA exceptions.
-    """
-
-    def __init__(self, message=None, cause=None, cause_traceback=None):
-        super(AriaException, self).__init__(message)
-        self.cause = cause
-        self.issue = None
-        if cause_traceback is None:
-            _, e, traceback = sys.exc_info()
-            if cause == e:
-                # Make sure it's our traceback
-                cause_traceback = traceback
-        self.cause_traceback = cause_traceback
-
-
-class DependentServicesError(AriaError):
-    """
-    Raised when attempting to delete a service template which has existing services.
-    """
-    pass
-
-
-class DependentActiveExecutionsError(AriaError):
-    """
-    Raised when attempting to delete a service which has active executions.
-    """
-    pass
-
-
-class DependentAvailableNodesError(AriaError):
-    """
-    Raised when attempting to delete a service which has available nodes.
-    """
-    pass
-
-
-class ParsingError(AriaError):
-    pass
-
-
-class InstantiationError(AriaError):
-    pass
diff --git a/apache-ariatosca-0.1.1/aria/extension.py b/apache-ariatosca-0.1.1/aria/extension.py
deleted file mode 100644
index e90750d..0000000
--- a/apache-ariatosca-0.1.1/aria/extension.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Mechanism for registering and loading ARIA extensions.
-"""
-
-# pylint: disable=no-self-use
-
-from .utils import collections
-
-
-class _Registrar(object):
-
-    def __init__(self, registry):
-        if not isinstance(registry, (dict, list)):
-            raise RuntimeError('Unsupported registry type')
-        self._registry = registry
-
-    def register(self, function):
-        result = function()
-        if isinstance(self._registry, dict):
-            for key in result:
-                if key in self._registry:
-                    raise RuntimeError('Re-definition of {0} in {1}'.format(key, function.__name__))
-            self._registry.update(result)
-        elif isinstance(self._registry, list):
-            if not isinstance(result, (list, tuple, set)):
-                result = [result]
-            self._registry += list(result)
-        else:
-            raise RuntimeError('Illegal state')
-
-    def __call__(self):
-        return self._registry
-
-
-def _registrar(function):
-    function._registrar_function = True
-    return function
-
-
-class _ExtensionRegistration(object):
-    """
-    Base class for extension class decorators.
-    """
-
-    def __init__(self):
-        self._registrars = {}
-        self._registered_classes = []
-        for attr, value in vars(self.__class__).items():
-            try:
-                is_registrar_function = value._registrar_function
-            except AttributeError:
-                is_registrar_function = False
-            if is_registrar_function:
-                registrar = _Registrar(registry=getattr(self, attr)())
-                setattr(self, attr, registrar)
-                self._registrars[attr] = registrar
-
-    def __call__(self, cls):
-        self._registered_classes.append(cls)
-        return cls
-
-    def init(self):
-        """
-        Initialize all registrars by calling all registered functions.
-        """
-        registered_instances = [cls() for cls in self._registered_classes]
-        for name, registrar in self._registrars.items():
-            for instance in registered_instances:
-                registrating_function = getattr(instance, name, None)
-                if registrating_function:
-                    registrar.register(registrating_function)
-
-
-class _ParserExtensionRegistration(_ExtensionRegistration):
-    """
-    Parser extensions class decorator.
-    """
-
-    @_registrar
-    def presenter_class(self):
-        """
-        Presentation class registration.
-
-        Implementing functions can return a single class or a list/tuple of classes.
-        """
-        return []
-
-    @_registrar
-    def specification_package(self):
-        """
-        Specification package registration.
-
-        Implementing functions can return a package name or a list/tuple of names.
-        """
-        return []
-
-    @_registrar
-    def specification_url(self):
-        """
-        Specification URL registration.
-
-        Implementing functions should return a dictionary from names to URLs.
-        """
-        return {}
-
-    @_registrar
-    def uri_loader_prefix(self):
-        """
-        URI loader prefix registration.
-
-        Implementing functions can return a single prefix or a list/tuple of prefixes.
-        """
-        return collections.StrictList(value_class=basestring)
-
-parser = _ParserExtensionRegistration()
-
-
-class _ProcessExecutorExtensionRegistration(_ExtensionRegistration):
-    """
-    Process executor extension class decorator.
-    """
-
-    @_registrar
-    def decorate(self):
-        """
-        The operation function executed by the process executor will be decorated with the function
-        returned from ``decorate()``.
-        """
-        return []
-
-process_executor = _ProcessExecutorExtensionRegistration()
-
-
-def init():
-    """
-    Initialize all registrars by calling all registered functions.
-    """
-    parser.init()
-    process_executor.init()
diff --git a/apache-ariatosca-0.1.1/aria/logger.py b/apache-ariatosca-0.1.1/aria/logger.py
deleted file mode 100644
index f4f6ec9..0000000
--- a/apache-ariatosca-0.1.1/aria/logger.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Mix-ins and functions for logging, supporting multiple backends (such as SQL) and consistent
-formatting.
-"""
-
-import logging
-from logging import handlers as logging_handlers
-# NullHandler doesn't exist in < 27. this workaround is from
-# http://docs.python.org/release/2.6/library/logging.html#configuring-logging-for-a-library
-try:
-    from logging import NullHandler                                                                 # pylint: disable=unused-import
-except ImportError:
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-from datetime import datetime
-
-
-TASK_LOGGER_NAME = 'aria.executions.task'
-
-
-_base_logger = logging.getLogger('aria')
-
-
-class LoggerMixin(object):
-    """
-    Provides logging functionality to a class.
-
-    :ivar logger_name: logger name; default to the class name
-    :ivar logger_level: logger level; defaults to ``logging.DEBUG``
-    :ivar base_logger: child loggers are created from this; defaults to the root logger
-    """
-    logger_name = None
-    logger_level = logging.DEBUG
-
-    def __init__(self, *args, **kwargs):
-        self.logger_name = self.logger_name or self.__class__.__name__
-        self.logger = logging.getLogger('{0}.{1}'.format(_base_logger.name, self.logger_name))
-        # Set the logger handler of any object derived from LoggerMixing to NullHandler.
-        # This is since the absence of a handler shows up while using the CLI in the form of:
-        # `No handlers could be found for logger "..."`.
-        self.logger.addHandler(NullHandler())
-        self.logger.setLevel(self.logger_level)
-        super(LoggerMixin, self).__init__(*args, **kwargs)
-
-    @classmethod
-    def with_logger(
-            cls,
-            logger_name=None,
-            logger_level=logging.DEBUG,
-            base_logger=logging.getLogger(),
-            **kwargs):
-        """
-        Set the logger used by the consuming class.
-        """
-        cls.logger_name = logger_name
-        cls.logger_level = logger_level
-        cls.base_logger = base_logger
-        return cls(**kwargs)
-
-    def __getstate__(self):
-        obj_dict = vars(self).copy()
-        del obj_dict['logger']
-        return obj_dict
-
-    def __setstate__(self, obj_dict):
-        vars(self).update(
-            logger=logging.getLogger('{0}.{1}'.format(_base_logger.name, obj_dict['logger_name'])),
-            **obj_dict)
-
-
-def create_logger(logger=_base_logger, handlers=(), **configs):
-    """
-    :param logger: logger name; defaults to ARIA logger
-    :type logger: logging.Logger
-    :param handlers: logger handlers
-    :type handlers: []
-    :param configs: logger configurations
-    :type configs: []
-    :return: logger
-    """
-    logger.handlers = []
-    for handler in handlers:
-        logger.addHandler(handler)
-
-    logger.setLevel(configs.get('level', logging.DEBUG))
-    logger.debug('Logger {0} configured'.format(logger.name))
-    return logger
-
-
-def create_console_log_handler(level=logging.DEBUG, formatter=None):
-    """
-    :param level:
-    :param formatter:
-    """
-    console = logging.StreamHandler()
-    console.setLevel(level)
-    console.formatter = formatter or _DefaultConsoleFormat()
-    return console
-
-
-def create_sqla_log_handler(model, log_cls, execution_id, level=logging.DEBUG):
-
-    # This is needed since the engine and session are entirely new we need to reflect the db
-    # schema of the logging model into the engine and session.
-    return _SQLAlchemyHandler(model=model, log_cls=log_cls, execution_id=execution_id, level=level)
-
-
-class _DefaultConsoleFormat(logging.Formatter):
-    """
-    Info level log format: ``%(message)s``.
-
-    Every other log level is formatted: ``%(levelname)s: %(message)s``.
-    """
-    def format(self, record):
-        try:
-            if hasattr(record, 'prefix'):
-                self._fmt = '<%(asctime)s: [%(levelname)s] @%(prefix)s> %(message)s'
-            else:
-                self._fmt = '<%(asctime)s: [%(levelname)s]> %(message)s'
-
-        except AttributeError:
-            return record.message
-        return logging.Formatter.format(self, record)
-
-
-def create_file_log_handler(
-        file_path,
-        level=logging.DEBUG,
-        max_bytes=5 * 1000 * 1024,
-        backup_count=10,
-        formatter=None):
-    """
-    Create a :class:`logging.handlers.RotatingFileHandler`.
-    """
-    rotating_file = logging_handlers.RotatingFileHandler(
-        filename=file_path,
-        maxBytes=max_bytes,
-        backupCount=backup_count,
-        delay=True,
-    )
-    rotating_file.setLevel(level)
-    rotating_file.formatter = formatter or _default_file_formatter
-    return rotating_file
-
-
-class _SQLAlchemyHandler(logging.Handler):
-    def __init__(self, model, log_cls, execution_id, **kwargs):
-        logging.Handler.__init__(self, **kwargs)
-        self._model = model
-        self._cls = log_cls
-        self._execution_id = execution_id
-
-    def emit(self, record):
-        created_at = datetime.strptime(logging.Formatter('%(asctime)s').formatTime(record),
-                                       '%Y-%m-%d %H:%M:%S,%f')
-        log = self._cls(
-            execution_fk=self._execution_id,
-            task_fk=record.task_id,
-            level=record.levelname,
-            msg=str(record.msg),
-            created_at=created_at,
-
-            # Not mandatory.
-            traceback=getattr(record, 'traceback', None)
-        )
-        self._model.log.put(log)
-
-
-_default_file_formatter = logging.Formatter(
-    '%(asctime)s [%(name)s:%(levelname)s] %(message)s <%(pathname)s:%(lineno)d>')
diff --git a/apache-ariatosca-0.1.1/aria/modeling/constraints.py b/apache-ariatosca-0.1.1/aria/modeling/constraints.py
deleted file mode 100644
index 8ed33d5..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/constraints.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Constraints for the requirements-and-capabilities matching mechanism.
-"""
-
-class NodeTemplateConstraint(object):
-    """
-    Used to constrain requirements for node templates.
-
-    Must be serializable.
-    """
-
-    def matches(self, source_node_template, target_node_template):
-        """
-        Returns ``True`` if the target matches the constraint for the source.
-        """
-        raise NotImplementedError
diff --git a/apache-ariatosca-0.1.1/aria/modeling/exceptions.py b/apache-ariatosca-0.1.1/aria/modeling/exceptions.py
deleted file mode 100644
index 573efaf..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/exceptions.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Modeling exceptions.
-"""
-
-from ..exceptions import AriaException
-
-
-class ModelingException(AriaException):
-    """
-    ARIA modeling exception.
-    """
-
-
-class ParameterException(ModelingException):
-    """
-    ARIA parameter exception.
-    """
-    pass
-
-
-class ValueFormatException(ModelingException):
-    """
-    ARIA modeling exception: the value is in the wrong format.
-    """
-
-
-class CannotEvaluateFunctionException(ModelingException):
-    """
-    ARIA modeling exception: cannot evaluate the function at this time.
-    """
-
-
-class MissingRequiredParametersException(ParameterException):
-    """
-    ARIA modeling exception: Required parameters have been omitted.
-    """
-
-
-class ParametersOfWrongTypeException(ParameterException):
-    """
-    ARIA modeling exception: Parameters of the wrong types have been provided.
-    """
-
-
-class UndeclaredParametersException(ParameterException):
-    """
-    ARIA modeling exception: Undeclared parameters have been provided.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/modeling/functions.py b/apache-ariatosca-0.1.1/aria/modeling/functions.py
deleted file mode 100644
index 6544adf..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/functions.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Mechanism for evaluating intrinsic functions.
-"""
-
-from ..parser.consumption import ConsumptionContext
-from ..parser.exceptions import InvalidValueError
-from ..utils.collections import OrderedDict
-from . import exceptions
-
-
-class Function(object):
-    """
-    Base class for intrinsic functions. Serves as a placeholder for a value that should eventually
-    be derived by "evaluating" (calling) the function.
-
-    Note that this base class is provided as a convenience and you do not have to inherit it: any
-    object with an ``__evaluate__`` method would be treated similarly.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def __evaluate__(self, container_holder):
-        """
-        Evaluates the function if possible.
-
-        :rtype: :class:`Evaluation` (or any object with ``value`` and ``final`` properties)
-        :raises CannotEvaluateFunctionException: if cannot be evaluated at this time (do *not* just
-         return ``None``)
-        """
-
-        raise NotImplementedError
-
-    def __deepcopy__(self, memo):
-        # Circumvent cloning in order to maintain our state
-        return self
-
-
-class Evaluation(object):
-    """
-    An evaluated :class:`Function` return value.
-
-    :ivar value: evaluated value
-    :ivar final: whether the value is final
-    :vartype final: boolean
-    """
-
-    def __init__(self, value, final=False):
-        self.value = value
-        self.final = final
-
-
-def evaluate(value, container_holder, report_issues=False): # pylint: disable=too-many-branches
-    """
-    Recursively attempts to call ``__evaluate__``. If an evaluation occurred will return an
-    :class:`Evaluation`, otherwise it will be ``None``. If any evaluation is non-final, then the
-    entire evaluation will also be non-final.
-
-    The ``container_holder`` argument should have three properties: ``container`` should return
-    the model that contains the value, ``service`` should return the containing
-    :class:`~aria.modeling.models.Service` model or None, and ``service_template`` should return the
-    containing :class:`~aria.modeling.models.ServiceTemplate` model or ``None``.
-    """
-
-    evaluated = False
-    final = True
-
-    if hasattr(value, '__evaluate__'):
-        try:
-            evaluation = value.__evaluate__(container_holder)
-
-            # Verify evaluation structure
-            if (evaluation is None) \
-                or (not hasattr(evaluation, 'value')) \
-                or (not hasattr(evaluation, 'final')):
-                raise InvalidValueError('bad __evaluate__ implementation')
-
-            evaluated = True
-            value = evaluation.value
-            final = evaluation.final
-
-            # The evaluated value might itself be evaluable
-            evaluation = evaluate(value, container_holder, report_issues)
-            if evaluation is not None:
-                value = evaluation.value
-                if not evaluation.final:
-                    final = False
-        except exceptions.CannotEvaluateFunctionException:
-            pass
-        except InvalidValueError as e:
-            if report_issues:
-                context = ConsumptionContext.get_thread_local()
-                context.validation.report(e.issue)
-
-    elif isinstance(value, list):
-        evaluated_list = []
-        for v in value:
-            evaluation = evaluate(v, container_holder, report_issues)
-            if evaluation is not None:
-                evaluated_list.append(evaluation.value)
-                evaluated = True
-                if not evaluation.final:
-                    final = False
-            else:
-                evaluated_list.append(v)
-        if evaluated:
-            value = evaluated_list
-
-    elif isinstance(value, dict):
-        evaluated_dict = OrderedDict()
-        for k, v in value.iteritems():
-            evaluation = evaluate(v, container_holder, report_issues)
-            if evaluation is not None:
-                evaluated_dict[k] = evaluation.value
-                evaluated = True
-                if not evaluation.final:
-                    final = False
-            else:
-                evaluated_dict[k] = v
-        if evaluated:
-            value = evaluated_dict
-
-    return Evaluation(value, final) if evaluated else None
diff --git a/apache-ariatosca-0.1.1/aria/modeling/mixins.py b/apache-ariatosca-0.1.1/aria/modeling/mixins.py
deleted file mode 100644
index 883ff4a..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/mixins.py
+++ /dev/null
@@ -1,373 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling mix-ins module
-"""
-
-from sqlalchemy.ext import associationproxy
-from sqlalchemy import (
-    Column,
-    Integer,
-    Text,
-    PickleType
-)
-
-from ..parser.consumption import ConsumptionContext
-from ..utils import console, collections, caching, formatting
-from ..utils.type import canonical_type_name, full_type_name
-from . import utils, functions
-
-
-class ModelMixin(object):
-
-    @utils.classproperty
-    def __modelname__(cls):                                                                         # pylint: disable=no-self-argument
-        return getattr(cls, '__mapiname__', cls.__tablename__)
-
-    @classmethod
-    def id_column_name(cls):
-        raise NotImplementedError
-
-    @classmethod
-    def name_column_name(cls):
-        raise NotImplementedError
-
-    def to_dict(self, fields=None, suppress_error=False):
-        """
-        Create a dict representation of the model.
-
-        :param suppress_error: if set to ``True``, sets ``None`` to attributes that it's unable to
-         retrieve (e.g., if a relationship wasn't established yet, and so it's impossible to access
-         a property through it)
-        """
-
-        res = dict()
-        fields = fields or self.fields()
-        for field in fields:
-            try:
-                field_value = getattr(self, field)
-            except AttributeError:
-                if suppress_error:
-                    field_value = None
-                else:
-                    raise
-            if isinstance(field_value, list):
-                field_value = list(field_value)
-            elif isinstance(field_value, dict):
-                field_value = dict(field_value)
-            elif isinstance(field_value, ModelMixin):
-                field_value = field_value.to_dict()
-            res[field] = field_value
-
-        return res
-
-    @classmethod
-    def fields(cls):
-        """
-        List of field names for this table.
-
-        Mostly for backwards compatibility in the code (that uses ``fields``).
-        """
-
-        fields = set(cls._iter_association_proxies())
-        fields.update(cls.__table__.columns.keys())
-        return fields - set(getattr(cls, '__private_fields__', ()))
-
-    @classmethod
-    def _iter_association_proxies(cls):
-        for col, value in vars(cls).items():
-            if isinstance(value, associationproxy.AssociationProxy):
-                yield col
-
-    def __repr__(self):
-        return '<{cls} id=`{id}`>'.format(
-            cls=self.__class__.__name__,
-            id=getattr(self, self.name_column_name()))
-
-
-class ModelIDMixin(object):
-    id = Column(Integer, primary_key=True, autoincrement=True, doc="""
-    Unique ID.
-    
-    :type: :obj:`int`
-    """)
-
-    name = Column(Text, index=True, doc="""
-    Model name.
-    
-    :type: :obj:`basestring`
-    """)
-
-    @classmethod
-    def id_column_name(cls):
-        return 'id'
-
-    @classmethod
-    def name_column_name(cls):
-        return 'name'
-
-
-class InstanceModelMixin(ModelMixin):
-    """
-    Mix-in for service instance models.
-
-    All models support validation, diagnostic dumping, and representation as raw data (which can be
-    translated into JSON or YAML) via :meth:`as_raw`.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def validate(self):
-        pass
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def dump(self):
-        pass
-
-
-class TemplateModelMixin(InstanceModelMixin):
-    """
-    Mix-in for service template models.
-
-    All model models can be instantiated into service instance models.
-    """
-
-    def instantiate(self, container):
-        raise NotImplementedError
-
-
-class ParameterMixin(TemplateModelMixin, caching.HasCachedMethods):                                 #pylint: disable=abstract-method
-    """
-    Mix-in for typed values. The value can contain nested intrinsic functions.
-
-    This model can be used as the ``container_holder`` argument for
-    :func:`~aria.modeling.functions.evaluate`.
-    """
-
-    __tablename__ = 'parameter'
-
-    type_name = Column(Text, doc="""
-    Type name.
-    
-    :type: :obj:`basestring`
-    """)
-
-    description = Column(Text, doc="""
-    Human-readable description.
-    
-    :type: :obj:`basestring`
-    """)
-
-    _value = Column(PickleType)
-
-    @property
-    def value(self):
-        value = self._value
-        if value is not None:
-            evaluation = functions.evaluate(value, self)
-            if evaluation is not None:
-                value = evaluation.value
-        return value
-
-    @value.setter
-    def value(self, value):
-        self._value = value
-
-    @property
-    @caching.cachedmethod
-    def owner(self):
-        """
-        The sole owner of this parameter, which is another model that relates to it.
-
-        *All* parameters should have an owner model.
-
-        :raises ~exceptions.ValueError: if failed to find an owner, which signifies an abnormal,
-         orphaned parameter
-        """
-
-        # Find first non-null relationship
-        for the_relationship in self.__mapper__.relationships:
-            v = getattr(self, the_relationship.key)
-            if v:
-                return v
-
-        raise ValueError('orphaned {class_name}: does not have an owner: {name}'.format(
-            class_name=type(self).__name__, name=self.name))
-
-    @property
-    @caching.cachedmethod
-    def container(self): # pylint: disable=too-many-return-statements,too-many-branches
-        """
-        The logical container for this parameter, which would be another model: service, node,
-        group, or policy (or their templates).
-
-        The logical container is equivalent to the ``SELF`` keyword used by intrinsic functions in
-        TOSCA.
-
-        *All* parameters should have a container model.
-
-        :raises ~exceptions.ValueError: if failed to find a container model, which signifies an
-         abnormal, orphaned parameter
-        """
-
-        from . import models
-
-        container = self.owner
-
-        # Extract interface from operation
-        if isinstance(container, models.Operation):
-            container = container.interface
-        elif isinstance(container, models.OperationTemplate):
-            container = container.interface_template
-
-        # Extract from other models
-        if isinstance(container, models.Interface):
-            container = container.node or container.group or container.relationship
-        elif isinstance(container, models.InterfaceTemplate):
-            container = container.node_template or container.group_template \
-                        or container.relationship_template
-        elif isinstance(container, models.Capability) or isinstance(container, models.Artifact):
-            container = container.node
-        elif isinstance(container, models.CapabilityTemplate) \
-                or isinstance(container, models.ArtifactTemplate):
-            container = container.node_template
-        elif isinstance(container, models.Task):
-            container = container.actor
-
-        # Extract node from relationship
-        if isinstance(container, models.Relationship):
-            container = container.source_node
-        elif isinstance(container, models.RelationshipTemplate):
-            container = container.requirement_template.node_template
-
-        if container is not None:
-            return container
-
-        raise ValueError('orphaned parameter: does not have a container: {0}'.format(self.name))
-
-    @property
-    @caching.cachedmethod
-    def service(self):
-        """
-        The :class:`~aria.modeling.models.Service` model containing this parameter, or ``None`` if
-        not contained in a service.
-
-        :raises ~exceptions.ValueError: if failed to find a container model, which signifies an
-         abnormal, orphaned parameter
-        """
-
-        from . import models
-        container = self.container
-        if isinstance(container, models.Service):
-            return container
-        elif hasattr(container, 'service'):
-            return container.service
-        return None
-
-    @property
-    @caching.cachedmethod
-    def service_template(self):
-        """
-        The :class:`~aria.modeling.models.ServiceTemplate` model containing this parameter, or
-        ``None`` if not contained in a service template.
-
-        :raises ~exceptions.ValueError: if failed to find a container model, which signifies an
-         abnormal, orphaned parameter
-        """
-
-        from . import models
-        container = self.container
-        if isinstance(container, models.ServiceTemplate):
-            return container
-        elif hasattr(container, 'service_template'):
-            return container.service_template
-        return None
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type_name),
-            ('value', self.value),
-            ('description', self.description)))
-
-    def instantiate(self, container):
-        return self.__class__(name=self.name,  # pylint: disable=unexpected-keyword-arg
-                              type_name=self.type_name,
-                              _value=self._value,
-                              description=self.description)
-
-    def coerce_values(self, report_issues):
-        value = self._value
-        if value is not None:
-            evaluation = functions.evaluate(value, self, report_issues)
-            if (evaluation is not None) and evaluation.final:
-                # A final evaluation can safely replace the existing value
-                self._value = evaluation.value
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.type_name is not None:
-            console.puts('{0}: {1} ({2})'.format(
-                context.style.property(self.name),
-                context.style.literal(formatting.as_raw(self.value)),
-                context.style.type(self.type_name)))
-        else:
-            console.puts('{0}: {1}'.format(
-                context.style.property(self.name),
-                context.style.literal(formatting.as_raw(self.value))))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-
-    @property
-    def unwrapped(self):
-        return self.name, self.value
-
-    @classmethod
-    def wrap(cls, name, value, description=None):
-        """
-        Wraps an arbitrary value as a parameter. The type will be guessed via introspection.
-
-        For primitive types, we will prefer their TOSCA aliases. See the `TOSCA Simple Profile v1.0
-        cos01 specification <http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01
-        /TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc373867862>`__
-
-        :param name: parameter name
-        :type name: basestring
-        :param value: parameter value
-        :param description: human-readable description (optional)
-        :type description: basestring
-        """
-
-        type_name = canonical_type_name(value)
-        if type_name is None:
-            type_name = full_type_name(value)
-        return cls(name=name,  # pylint: disable=unexpected-keyword-arg
-                   type_name=type_name,
-                   value=value,
-                   description=description)
-
-    def as_other_parameter_model(self, other_model_cls):
-        name, value = self.unwrapped
-        return other_model_cls.wrap(name, value)
-
-    def as_argument(self):
-        from . import models
-        return self.as_other_parameter_model(models.Argument)
diff --git a/apache-ariatosca-0.1.1/aria/modeling/models.py b/apache-ariatosca-0.1.1/aria/modeling/models.py
deleted file mode 100644
index cf84fdb..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/models.py
+++ /dev/null
@@ -1,427 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Data models.
-
-Service template models
------------------------
-
-.. autosummary::
-   :nosignatures:
-
-   aria.modeling.models.ServiceTemplate
-   aria.modeling.models.NodeTemplate
-   aria.modeling.models.GroupTemplate
-   aria.modeling.models.PolicyTemplate
-   aria.modeling.models.SubstitutionTemplate
-   aria.modeling.models.SubstitutionTemplateMapping
-   aria.modeling.models.RequirementTemplate
-   aria.modeling.models.RelationshipTemplate
-   aria.modeling.models.CapabilityTemplate
-   aria.modeling.models.InterfaceTemplate
-   aria.modeling.models.OperationTemplate
-   aria.modeling.models.ArtifactTemplate
-   aria.modeling.models.PluginSpecification
-
-Service instance models
------------------------
-
-.. autosummary::
-   :nosignatures:
-
-   aria.modeling.models.Service
-   aria.modeling.models.Node
-   aria.modeling.models.Group
-   aria.modeling.models.Policy
-   aria.modeling.models.Substitution
-   aria.modeling.models.SubstitutionMapping
-   aria.modeling.models.Relationship
-   aria.modeling.models.Capability
-   aria.modeling.models.Interface
-   aria.modeling.models.Operation
-   aria.modeling.models.Artifact
-
-Common models
--------------
-
-.. autosummary::
-   :nosignatures:
-
-   aria.modeling.models.Output
-   aria.modeling.models.Input
-   aria.modeling.models.Configuration
-   aria.modeling.models.Property
-   aria.modeling.models.Attribute
-   aria.modeling.models.Type
-   aria.modeling.models.Metadata
-
-Orchestration models
---------------------
-
-.. autosummary::
-   :nosignatures:
-
-   aria.modeling.models.Execution
-   aria.modeling.models.Task
-   aria.modeling.models.Log
-   aria.modeling.models.Plugin
-   aria.modeling.models.Argument
-"""
-
-# pylint: disable=abstract-method
-
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import (
-    Column,
-    Text
-)
-
-from . import (
-    service_template,
-    service_instance,
-    service_changes,
-    service_common,
-    orchestration,
-    mixins,
-    utils
-)
-
-
-aria_declarative_base = declarative_base(cls=mixins.ModelIDMixin)
-
-
-# See also models_to_register at the bottom of this file
-__all__ = (
-    'models_to_register',
-
-    # Service template models
-    'ServiceTemplate',
-    'NodeTemplate',
-    'GroupTemplate',
-    'PolicyTemplate',
-    'SubstitutionTemplate',
-    'SubstitutionTemplateMapping',
-    'RequirementTemplate',
-    'RelationshipTemplate',
-    'CapabilityTemplate',
-    'InterfaceTemplate',
-    'OperationTemplate',
-    'ArtifactTemplate',
-    'PluginSpecification',
-
-    # Service instance models
-    'Service',
-    'Node',
-    'Group',
-    'Policy',
-    'Substitution',
-    'SubstitutionMapping',
-    'Relationship',
-    'Capability',
-    'Interface',
-    'Operation',
-    'Artifact',
-
-    # Service changes models
-    'ServiceUpdate',
-    'ServiceUpdateStep',
-    'ServiceModification',
-
-    # Common service models
-    'Input',
-    'Configuration',
-    'Output',
-    'Property',
-    'Attribute',
-    'Type',
-    'Metadata',
-
-    # Orchestration models
-    'Execution',
-    'Plugin',
-    'Task',
-    'Log',
-    'Argument'
-)
-
-
-# region service template models
-
-@utils.fix_doc
-class ServiceTemplate(aria_declarative_base, service_template.ServiceTemplateBase):
-    name = Column(Text, index=True, unique=True)
-
-
-@utils.fix_doc
-class NodeTemplate(aria_declarative_base, service_template.NodeTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class GroupTemplate(aria_declarative_base, service_template.GroupTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class PolicyTemplate(aria_declarative_base, service_template.PolicyTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class SubstitutionTemplate(aria_declarative_base, service_template.SubstitutionTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class SubstitutionTemplateMapping(aria_declarative_base,
-                                  service_template.SubstitutionTemplateMappingBase):
-    pass
-
-
-@utils.fix_doc
-class RequirementTemplate(aria_declarative_base, service_template.RequirementTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class RelationshipTemplate(aria_declarative_base, service_template.RelationshipTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class CapabilityTemplate(aria_declarative_base, service_template.CapabilityTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class InterfaceTemplate(aria_declarative_base, service_template.InterfaceTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class OperationTemplate(aria_declarative_base, service_template.OperationTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class ArtifactTemplate(aria_declarative_base, service_template.ArtifactTemplateBase):
-    pass
-
-
-@utils.fix_doc
-class PluginSpecification(aria_declarative_base, service_template.PluginSpecificationBase):
-    pass
-
-# endregion
-
-
-# region service instance models
-
-@utils.fix_doc
-class Service(aria_declarative_base, service_instance.ServiceBase):
-    name = Column(Text, index=True, unique=True)
-
-
-@utils.fix_doc
-class Node(aria_declarative_base, service_instance.NodeBase):
-    pass
-
-
-@utils.fix_doc
-class Group(aria_declarative_base, service_instance.GroupBase):
-    pass
-
-
-@utils.fix_doc
-class Policy(aria_declarative_base, service_instance.PolicyBase):
-    pass
-
-
-@utils.fix_doc
-class Substitution(aria_declarative_base, service_instance.SubstitutionBase):
-    pass
-
-
-@utils.fix_doc
-class SubstitutionMapping(aria_declarative_base, service_instance.SubstitutionMappingBase):
-    pass
-
-
-@utils.fix_doc
-class Relationship(aria_declarative_base, service_instance.RelationshipBase):
-    pass
-
-
-@utils.fix_doc
-class Capability(aria_declarative_base, service_instance.CapabilityBase):
-    pass
-
-
-@utils.fix_doc
-class Interface(aria_declarative_base, service_instance.InterfaceBase):
-    pass
-
-
-@utils.fix_doc
-class Operation(aria_declarative_base, service_instance.OperationBase):
-    pass
-
-
-@utils.fix_doc
-class Artifact(aria_declarative_base, service_instance.ArtifactBase):
-    pass
-
-# endregion
-
-
-# region service changes models
-
-@utils.fix_doc
-class ServiceUpdate(aria_declarative_base, service_changes.ServiceUpdateBase):
-    pass
-
-
-@utils.fix_doc
-class ServiceUpdateStep(aria_declarative_base, service_changes.ServiceUpdateStepBase):
-    pass
-
-
-@utils.fix_doc
-class ServiceModification(aria_declarative_base, service_changes.ServiceModificationBase):
-    pass
-
-# endregion
-
-
-# region common service models
-
-@utils.fix_doc
-class Input(aria_declarative_base, service_common.InputBase):
-    pass
-
-
-@utils.fix_doc
-class Configuration(aria_declarative_base, service_common.ConfigurationBase):
-    pass
-
-
-@utils.fix_doc
-class Output(aria_declarative_base, service_common.OutputBase):
-    pass
-
-
-@utils.fix_doc
-class Property(aria_declarative_base, service_common.PropertyBase):
-    pass
-
-
-@utils.fix_doc
-class Attribute(aria_declarative_base, service_common.AttributeBase):
-    pass
-
-
-@utils.fix_doc
-class Type(aria_declarative_base, service_common.TypeBase):
-    pass
-
-
-@utils.fix_doc
-class Metadata(aria_declarative_base, service_common.MetadataBase):
-    pass
-
-# endregion
-
-
-# region orchestration models
-
-@utils.fix_doc
-class Execution(aria_declarative_base, orchestration.ExecutionBase):
-    pass
-
-
-@utils.fix_doc
-class Plugin(aria_declarative_base, orchestration.PluginBase):
-    pass
-
-
-@utils.fix_doc
-class Task(aria_declarative_base, orchestration.TaskBase):
-    pass
-
-
-@utils.fix_doc
-class Log(aria_declarative_base, orchestration.LogBase):
-    pass
-
-
-@utils.fix_doc
-class Argument(aria_declarative_base, orchestration.ArgumentBase):
-    pass
-
-# endregion
-
-
-# See also __all__ at the top of this file
-models_to_register = (
-    # Service template models
-    ServiceTemplate,
-    NodeTemplate,
-    GroupTemplate,
-    PolicyTemplate,
-    SubstitutionTemplate,
-    SubstitutionTemplateMapping,
-    RequirementTemplate,
-    RelationshipTemplate,
-    CapabilityTemplate,
-    InterfaceTemplate,
-    OperationTemplate,
-    ArtifactTemplate,
-    PluginSpecification,
-
-    # Service instance models
-    Service,
-    Node,
-    Group,
-    Policy,
-    SubstitutionMapping,
-    Substitution,
-    Relationship,
-    Capability,
-    Interface,
-    Operation,
-    Artifact,
-
-    # Service changes models
-    ServiceUpdate,
-    ServiceUpdateStep,
-    ServiceModification,
-
-    # Common service models
-    Input,
-    Configuration,
-    Output,
-    Property,
-    Attribute,
-    Type,
-    Metadata,
-
-    # Orchestration models
-    Execution,
-    Plugin,
-    Task,
-    Log,
-    Argument
-)
diff --git a/apache-ariatosca-0.1.1/aria/modeling/orchestration.py b/apache-ariatosca-0.1.1/aria/modeling/orchestration.py
deleted file mode 100644
index 7068557..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/orchestration.py
+++ /dev/null
@@ -1,711 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling orchestration module
-"""
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-from datetime import datetime
-
-from sqlalchemy import (
-    Column,
-    Integer,
-    Text,
-    DateTime,
-    Boolean,
-    Enum,
-    String,
-    Float,
-    orm,
-    PickleType)
-from sqlalchemy.ext.declarative import declared_attr
-
-from ..orchestrator.exceptions import (TaskAbortException, TaskRetryException)
-from . import mixins
-from . import (
-    relationship,
-    types as modeling_types
-)
-
-
-class ExecutionBase(mixins.ModelMixin):
-    """
-    Workflow execution.
-    """
-
-    __tablename__ = 'execution'
-
-    __private_fields__ = ('service_fk',
-                          'service_template')
-
-    SUCCEEDED = 'succeeded'
-    FAILED = 'failed'
-    CANCELLED = 'cancelled'
-    PENDING = 'pending'
-    STARTED = 'started'
-    CANCELLING = 'cancelling'
-
-    STATES = (SUCCEEDED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING)
-    END_STATES = (SUCCEEDED, FAILED, CANCELLED)
-
-    VALID_TRANSITIONS = {
-        PENDING: (STARTED, CANCELLED),
-        STARTED: END_STATES + (CANCELLING,),
-        CANCELLING: END_STATES,
-        CANCELLED: PENDING
-    }
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Execution parameters.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def tasks(cls):
-        """
-        Tasks.
-
-        :type: [:class:`Task`]
-        """
-        return relationship.one_to_many(cls, 'task')
-
-    @declared_attr
-    def logs(cls):
-        """
-        Log messages for the execution (including log messages for its tasks).
-
-        :type: [:class:`Log`]
-        """
-        return relationship.one_to_many(cls, 'log')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Associated service.
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    # endregion
-
-    # region association proxies
-
-    @declared_attr
-    def service_name(cls):
-        return relationship.association_proxy('service', cls.name_column_name())
-
-    @declared_attr
-    def service_template(cls):
-        return relationship.association_proxy('service', 'service_template')
-
-    @declared_attr
-    def service_template_name(cls):
-        return relationship.association_proxy('service', 'service_template_name')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_fk(cls):
-        return relationship.foreign_key('service')
-
-    # endregion
-
-    created_at = Column(DateTime, index=True, doc="""
-    Creation timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    started_at = Column(DateTime, nullable=True, index=True, doc="""
-    Started timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    ended_at = Column(DateTime, nullable=True, index=True, doc="""
-    Ended timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    error = Column(Text, nullable=True, doc="""
-    Error message.
-
-    :type: :obj:`basestring`
-    """)
-
-    status = Column(Enum(*STATES, name='execution_status'), default=PENDING, doc="""
-    Status.
-
-    :type: :obj:`basestring`
-    """)
-
-    workflow_name = Column(Text, doc="""
-    Workflow name.
-
-    :type: :obj:`basestring`
-    """)
-
-    @orm.validates('status')
-    def validate_status(self, key, value):
-        """Validation function that verifies execution status transitions are OK"""
-        try:
-            current_status = getattr(self, key)
-        except AttributeError:
-            return
-        valid_transitions = self.VALID_TRANSITIONS.get(current_status, [])
-        if all([current_status is not None,
-                current_status != value,
-                value not in valid_transitions]):
-            raise ValueError('Cannot change execution status from {current} to {new}'.format(
-                current=current_status,
-                new=value))
-        return value
-
-    def has_ended(self):
-        return self.status in self.END_STATES
-
-    def is_active(self):
-        return not self.has_ended() and self.status != self.PENDING
-
-    def __str__(self):
-        return '<{0} id=`{1}` (status={2})>'.format(
-            self.__class__.__name__,
-            getattr(self, self.name_column_name()),
-            self.status
-        )
-
-
-class TaskBase(mixins.ModelMixin):
-    """
-    Represents the smallest unit of stateful execution in ARIA. The task state includes inputs,
-    outputs, as well as an atomic status, ensuring that the task can only be running once at any
-    given time.
-
-    The Python :attr:`function` is usually provided by an associated :class:`Plugin`. The
-    :attr:`arguments` of the function should be set according to the specific signature of the
-    function.
-
-    Tasks may be "one shot" or may be configured to run repeatedly in the case of failure.
-
-    Tasks are often based on :class:`Operation`, and thus act on either a :class:`Node` or a
-    :class:`Relationship`, however this is not required.
-    """
-
-    __tablename__ = 'task'
-
-    __private_fields__ = ('dependency_operation_task_fk', 'dependency_stub_task_fk', 'node_fk',
-                          'relationship_fk', 'plugin_fk', 'execution_fk')
-
-    START_WORKFLOW = 'start_workflow'
-    END_WORKFLOW = 'end_workflow'
-    START_SUBWROFKLOW = 'start_subworkflow'
-    END_SUBWORKFLOW = 'end_subworkflow'
-    STUB = 'stub'
-    CONDITIONAL = 'conditional'
-
-    STUB_TYPES = (
-        START_WORKFLOW,
-        START_SUBWROFKLOW,
-        END_WORKFLOW,
-        END_SUBWORKFLOW,
-        STUB,
-        CONDITIONAL,
-    )
-
-    PENDING = 'pending'
-    RETRYING = 'retrying'
-    SENT = 'sent'
-    STARTED = 'started'
-    SUCCESS = 'success'
-    FAILED = 'failed'
-    STATES = (
-        PENDING,
-        RETRYING,
-        SENT,
-        STARTED,
-        SUCCESS,
-        FAILED,
-    )
-    INFINITE_RETRIES = -1
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def logs(cls):
-        """
-        Log messages.
-
-        :type: [:class:`Log`]
-        """
-        return relationship.one_to_many(cls, 'log')
-
-    @declared_attr
-    def arguments(cls):
-        """
-        Arguments sent to the Python :attr:`function``.
-
-        :type: {:obj:`basestring`: :class:`Argument`}
-        """
-        return relationship.one_to_many(cls, 'argument', dict_key='name')
-
-    # endregion
-
-    # region many_one relationships
-
-    @declared_attr
-    def execution(cls):
-        """
-        Containing execution.
-
-        :type: :class:`Execution`
-        """
-        return relationship.many_to_one(cls, 'execution')
-
-    @declared_attr
-    def node(cls):
-        """
-        Node actor (can be ``None``).
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    @declared_attr
-    def relationship(cls):
-        """
-        Relationship actor (can be ``None``).
-
-        :type: :class:`Relationship`
-        """
-        return relationship.many_to_one(cls, 'relationship')
-
-    @declared_attr
-    def plugin(cls):
-        """
-        Associated plugin.
-
-        :type: :class:`Plugin`
-        """
-        return relationship.many_to_one(cls, 'plugin')
-
-    # endregion
-
-    # region association proxies
-
-    @declared_attr
-    def node_name(cls):
-        return relationship.association_proxy('node', cls.name_column_name())
-
-    @declared_attr
-    def relationship_name(cls):
-        return relationship.association_proxy('relationship', cls.name_column_name())
-
-    @declared_attr
-    def execution_name(cls):
-        return relationship.association_proxy('execution', cls.name_column_name())
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def execution_fk(cls):
-        return relationship.foreign_key('execution', nullable=True)
-
-    @declared_attr
-    def node_fk(cls):
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def relationship_fk(cls):
-        return relationship.foreign_key('relationship', nullable=True)
-
-    @declared_attr
-    def plugin_fk(cls):
-        return relationship.foreign_key('plugin', nullable=True)
-
-    # endregion
-
-    status = Column(Enum(*STATES, name='status'), default=PENDING, doc="""
-    Current atomic status ('pending', 'retrying', 'sent', 'started', 'success', 'failed').
-
-    :type: :obj:`basestring`
-    """)
-
-    due_at = Column(DateTime, nullable=False, index=True, default=datetime.utcnow(), doc="""
-    Timestamp to start the task.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    started_at = Column(DateTime, default=None, doc="""
-    Started timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    ended_at = Column(DateTime, default=None, doc="""
-    Ended timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    attempts_count = Column(Integer, default=1, doc="""
-    How many attempts occurred.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    function = Column(String, doc="""
-    Full path to Python function.
-
-    :type: :obj:`basestring`
-    """)
-
-    max_attempts = Column(Integer, default=1, doc="""
-    Maximum number of attempts allowed in case of task failure.
-
-    :type: :obj:`int`
-    """)
-
-    retry_interval = Column(Float, default=0, doc="""
-    Interval between task retry attemps (in seconds).
-
-    :type: :obj:`float`
-    """)
-
-    ignore_failure = Column(Boolean, default=False, doc="""
-    Set to ``True`` to ignore failures.
-
-    :type: :obj:`bool`
-    """)
-
-    interface_name = Column(String, doc="""
-    Name of interface on node or relationship.
-
-    :type: :obj:`basestring`
-    """)
-
-    operation_name = Column(String, doc="""
-    Name of operation in interface on node or relationship.
-
-    :type: :obj:`basestring`
-    """)
-
-    _api_id = Column(String)
-    _executor = Column(PickleType)
-    _context_cls = Column(PickleType)
-    _stub_type = Column(Enum(*STUB_TYPES))
-
-    @property
-    def actor(self):
-        """
-        Actor of the task (node or relationship).
-        """
-        return self.node or self.relationship
-
-    @orm.validates('max_attempts')
-    def validate_max_attempts(self, _, value):                                  # pylint: disable=no-self-use
-        """
-        Validates that max attempts is either -1 or a positive number.
-        """
-        if value < 1 and value != TaskBase.INFINITE_RETRIES:
-            raise ValueError('Max attempts can be either -1 (infinite) or any positive number. '
-                             'Got {value}'.format(value=value))
-        return value
-
-    @staticmethod
-    def abort(message=None):
-        raise TaskAbortException(message)
-
-    @staticmethod
-    def retry(message=None, retry_interval=None):
-        raise TaskRetryException(message, retry_interval=retry_interval)
-
-    @declared_attr
-    def dependencies(cls):
-        return relationship.many_to_many(cls, self=True)
-
-    def has_ended(self):
-        return self.status in (self.SUCCESS, self.FAILED)
-
-    def is_waiting(self):
-        if self._stub_type:
-            return not self.has_ended()
-        else:
-            return self.status in (self.PENDING, self.RETRYING)
-
-    @classmethod
-    def from_api_task(cls, api_task, executor, **kwargs):
-        instantiation_kwargs = {}
-
-        if hasattr(api_task.actor, 'outbound_relationships'):
-            instantiation_kwargs['node'] = api_task.actor
-        elif hasattr(api_task.actor, 'source_node'):
-            instantiation_kwargs['relationship'] = api_task.actor
-        else:
-            raise RuntimeError('No operation context could be created for {actor.model_cls}'
-                               .format(actor=api_task.actor))
-
-        instantiation_kwargs.update(
-            {
-                'name': api_task.name,
-                'status': cls.PENDING,
-                'max_attempts': api_task.max_attempts,
-                'retry_interval': api_task.retry_interval,
-                'ignore_failure': api_task.ignore_failure,
-                'execution': api_task._workflow_context.execution,
-                'interface_name': api_task.interface_name,
-                'operation_name': api_task.operation_name,
-
-                # Only non-stub tasks have these fields
-                'plugin': api_task.plugin,
-                'function': api_task.function,
-                'arguments': api_task.arguments,
-                '_context_cls': api_task._context_cls,
-                '_executor': executor,
-            }
-        )
-
-        instantiation_kwargs.update(**kwargs)
-
-        return cls(**instantiation_kwargs)
-
-
-class LogBase(mixins.ModelMixin):
-    """
-    Single log message.
-    """
-
-    __tablename__ = 'log'
-
-    __private_fields__ = ('execution_fk',
-                          'task_fk')
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def execution(cls):
-        """
-        Containing execution.
-
-        :type: :class:`Execution`
-        """
-        return relationship.many_to_one(cls, 'execution')
-
-    @declared_attr
-    def task(cls):
-        """
-        Containing task (can be ``None``).
-
-        :type: :class:`Task`
-        """
-        return relationship.many_to_one(cls, 'task')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def execution_fk(cls):
-        return relationship.foreign_key('execution')
-
-    @declared_attr
-    def task_fk(cls):
-        return relationship.foreign_key('task', nullable=True)
-
-    # endregion
-
-    level = Column(String, doc="""
-    Log level.
-
-    :type: :obj:`basestring`
-    """)
-
-    msg = Column(String, doc="""
-    Log message.
-
-    :type: :obj:`basestring`
-    """)
-
-    created_at = Column(DateTime, index=True, doc="""
-    Creation timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    traceback = Column(Text, doc="""
-    Error traceback in case of failure.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    def __str__(self):
-        return self.msg
-
-    def __repr__(self):
-        name = (self.task.actor if self.task else self.execution).name
-        return '{name}: {self.msg}'.format(name=name, self=self)
-
-
-class PluginBase(mixins.ModelMixin):
-    """
-    Installed plugin.
-
-    Plugins are usually packaged as `wagons <https://github.com/cloudify-cosmo/wagon>`__, which
-    are archives of one or more `wheels <https://packaging.python.org/distributing/#wheels>`__.
-    Most of these fields are indeed extracted from the installed wagon's metadata.
-    """
-
-    __tablename__ = 'plugin'
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def tasks(cls):
-        """
-        Associated Tasks.
-
-        :type: [:class:`Task`]
-        """
-        return relationship.one_to_many(cls, 'task')
-
-    # endregion
-
-    archive_name = Column(Text, nullable=False, index=True, doc="""
-    Filename (not the full path) of the wagon's archive, often with a ``.wgn`` extension.
-
-    :type: :obj:`basestring`
-    """)
-
-    distribution = Column(Text, doc="""
-    Name of the operating system on which the wagon was installed (e.g. ``ubuntu``).
-
-    :type: :obj:`basestring`
-    """)
-
-    distribution_release = Column(Text, doc="""
-    Release of the operating system on which the wagon was installed (e.g. ``trusty``).
-
-    :type: :obj:`basestring`
-    """)
-
-    distribution_version = Column(Text, doc="""
-    Version of the operating system on which the wagon was installed (e.g. ``14.04``).
-
-    :type: :obj:`basestring`
-    """)
-
-    package_name = Column(Text, nullable=False, index=True, doc="""
-    Primary Python package name used when the wagon was installed, which is one of the wheels in the
-    wagon (e.g. ``cloudify-script-plugin``).
-
-    :type: :obj:`basestring`
-    """)
-
-    package_source = Column(Text, doc="""
-    Full install string for the primary Python package name used when the wagon was installed (e.g.
-    ``cloudify-script-plugin==1.2``).
-
-    :type: :obj:`basestring`
-    """)
-
-    package_version = Column(Text, doc="""
-    Version for the primary Python package name used when the wagon was installed (e.g. ``1.2``).
-
-    :type: :obj:`basestring`
-    """)
-
-    supported_platform = Column(Text, doc="""
-    If the wheels are *all* pure Python then this would be "any", otherwise it would be the
-    installed platform name (e.g. ``linux_x86_64``).
-
-    :type: :obj:`basestring`
-    """)
-
-    supported_py_versions = Column(modeling_types.StrictList(basestring), doc="""
-    Python versions supported by all the wheels (e.g. ``["py26", "py27"]``)
-
-    :type: [:obj:`basestring`]
-    """)
-
-    wheels = Column(modeling_types.StrictList(basestring), nullable=False, doc="""
-    Filenames of the wheels archived in the wagon, often with a ``.whl`` extension.
-
-    :type: [:obj:`basestring`]
-    """)
-
-    uploaded_at = Column(DateTime, nullable=False, index=True, doc="""
-    Timestamp for when the wagon was installed.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-
-class ArgumentBase(mixins.ParameterMixin):
-    """
-    Python function argument parameter.
-    """
-
-    __tablename__ = 'argument'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def task(cls):
-        """
-        Containing task (can be ``None``);
-
-        :type: :class:`Task`
-        """
-        return relationship.many_to_one(cls, 'task')
-
-    @declared_attr
-    def operation(cls):
-        """
-        Containing operation (can be ``None``);
-
-        :type: :class:`Operation`
-        """
-        return relationship.many_to_one(cls, 'operation')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def task_fk(cls):
-        return relationship.foreign_key('task', nullable=True)
-
-    @declared_attr
-    def operation_fk(cls):
-        return relationship.foreign_key('operation', nullable=True)
-
-    # endregion
diff --git a/apache-ariatosca-0.1.1/aria/modeling/relationship.py b/apache-ariatosca-0.1.1/aria/modeling/relationship.py
deleted file mode 100644
index 8b6028f..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/relationship.py
+++ /dev/null
@@ -1,395 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling relationship module
-"""
-
-# pylint: disable=invalid-name, redefined-outer-name
-
-from sqlalchemy.orm import relationship, backref
-from sqlalchemy.orm.collections import attribute_mapped_collection
-from sqlalchemy.ext.associationproxy import association_proxy as original_association_proxy
-from sqlalchemy import (
-    Column,
-    ForeignKey,
-    Integer,
-    Table
-)
-
-from ..utils import formatting
-
-NO_BACK_POP = 'NO_BACK_POP'
-
-
-def foreign_key(other_table, nullable=False):
-    """
-    Declare a foreign key property, which will also create a foreign key column in the table with
-    the name of the property. By convention the property name should end in "_fk".
-
-    You are required to explicitly create foreign keys in order to allow for one-to-one,
-    one-to-many, and many-to-one relationships (but not for many-to-many relationships). If you do
-    not do so, SQLAlchemy will fail to create the relationship property and raise an exception with
-    a clear error message.
-
-    You should normally not have to access this property directly, but instead use the associated
-    relationship properties.
-
-    *This utility method should only be used during class creation.*
-
-    :param other_table: other table name
-    :type other_table: basestring
-    :param nullable: ``True`` to allow null values (meaning that there is no relationship)
-    :type nullable: bool
-    """
-
-    return Column(Integer,
-                  ForeignKey('{table}.id'.format(table=other_table), ondelete='CASCADE'),
-                  nullable=nullable)
-
-
-def one_to_one_self(model_class, fk):
-    """
-    Declare a one-to-one relationship property. The property value would be an instance of the same
-    model.
-
-    You will need an associated foreign key to our own table.
-
-    *This utility method should only be used during class creation.*
-
-    :param model_class: class in which this relationship will be declared
-    :type model_class: type
-    :param fk: foreign key name
-    :type fk: basestring
-    """
-
-    remote_side = '{model_class}.{remote_column}'.format(
-        model_class=model_class.__name__,
-        remote_column=model_class.id_column_name()
-    )
-
-    primaryjoin = '{remote_side} == {model_class}.{column}'.format(
-        remote_side=remote_side,
-        model_class=model_class.__name__,
-        column=fk
-    )
-    return _relationship(
-        model_class,
-        model_class.__tablename__,
-        relationship_kwargs={
-            'primaryjoin': primaryjoin,
-            'remote_side': remote_side,
-            'post_update': True
-        }
-    )
-
-
-def one_to_one(model_class,
-               other_table,
-               fk=None,
-               other_fk=None,
-               back_populates=None):
-    """
-    Declare a one-to-one relationship property. The property value would be an instance of the other
-    table's model.
-
-    You have two options for the foreign key. Either this table can have an associated key to the
-    other table (use the ``fk`` argument) or the other table can have an associated foreign key to
-    this our table (use the ``other_fk`` argument).
-
-    *This utility method should only be used during class creation.*
-
-    :param model_class: class in which this relationship will be declared
-    :type model_class: type
-    :param other_table: other table name
-    :type other_table: basestring
-    :param fk: foreign key name at our table (no need specify if there's no ambiguity)
-    :type fk: basestring
-    :param other_fk: foreign key name at the other table (no need specify if there's no ambiguity)
-    :type other_fk: basestring
-    :param back_populates: override name of matching many-to-many property at other table; set to
-     ``False`` to disable
-    :type back_populates: basestring or bool
-    """
-    backref_kwargs = None
-    if back_populates is not NO_BACK_POP:
-        if back_populates is None:
-            back_populates = model_class.__tablename__
-        backref_kwargs = {'name': back_populates, 'uselist': False}
-        back_populates = None
-
-    return _relationship(model_class,
-                         other_table,
-                         fk=fk,
-                         back_populates=back_populates,
-                         backref_kwargs=backref_kwargs,
-                         other_fk=other_fk)
-
-
-def one_to_many(model_class,
-                other_table=None,
-                other_fk=None,
-                dict_key=None,
-                back_populates=None,
-                rel_kwargs=None,
-                self=False):
-    """
-    Declare a one-to-many relationship property. The property value would be a list or dict of
-    instances of the child table's model.
-
-    The child table will need an associated foreign key to our table.
-
-    The declaration will automatically create a matching many-to-one property at the child model,
-    named after our table name. Use the ``child_property`` argument to override this name.
-
-    *This utility method should only be used during class creation.*
-
-    :param model_class: class in which this relationship will be declared
-    :type model_class: type
-    :param other_table: other table name
-    :type other_table: basestring
-    :param other_fk: foreign key name at the other table (no need specify if there's no ambiguity)
-    :type other_fk: basestring
-    :param dict_key: if set the value will be a dict with this key as the dict key; otherwise will
-     be a list
-    :type dict_key: basestring
-    :param back_populates: override name of matching many-to-one property at other table; set to
-     ``False`` to disable
-    :type back_populates: basestring or bool
-    :param rel_kwargs: additional relationship kwargs to be used by SQLAlchemy
-    :type rel_kwargs: dict
-    :param self: used for relationships between a table and itself. if set, other_table will
-     become the same as the source table.
-    :type self: bool
-    """
-    relationship_kwargs = rel_kwargs or {}
-    if self:
-        assert other_fk
-        other_table_name = model_class.__tablename__
-        back_populates = False
-        relationship_kwargs['remote_side'] = '{model}.{column}'.format(model=model_class.__name__,
-                                                                       column=other_fk)
-
-    else:
-        assert other_table
-        other_table_name = other_table
-        if back_populates is None:
-            back_populates = model_class.__tablename__
-        relationship_kwargs.setdefault('cascade', 'all')
-
-    return _relationship(
-        model_class,
-        other_table_name,
-        back_populates=back_populates,
-        other_fk=other_fk,
-        dict_key=dict_key,
-        relationship_kwargs=relationship_kwargs)
-
-
-def many_to_one(model_class,
-                parent_table,
-                fk=None,
-                parent_fk=None,
-                back_populates=None):
-    """
-    Declare a many-to-one relationship property. The property value would be an instance of the
-    parent table's model.
-
-    You will need an associated foreign key to the parent table.
-
-    The declaration will automatically create a matching one-to-many property at the child model,
-    named after the plural form of our table name. Use the ``parent_property`` argument to override
-    this name. Note: the automatic property will always be a SQLAlchemy query object; if you need a
-    Python collection then use :func:`one_to_many` at that model.
-
-    *This utility method should only be used during class creation.*
-
-    :param model_class: class in which this relationship will be declared
-    :type model_class: type
-    :param parent_table: parent table name
-    :type parent_table: basestring
-    :param fk: foreign key name at our table (no need specify if there's no ambiguity)
-    :type fk: basestring
-    :param back_populates: override name of matching one-to-many property at parent table; set to
-     ``False`` to disable
-    :type back_populates: basestring or bool
-    """
-    if back_populates is None:
-        back_populates = formatting.pluralize(model_class.__tablename__)
-
-    return _relationship(model_class,
-                         parent_table,
-                         back_populates=back_populates,
-                         fk=fk,
-                         other_fk=parent_fk)
-
-
-def many_to_many(model_class,
-                 other_table=None,
-                 prefix=None,
-                 dict_key=None,
-                 other_property=None,
-                 self=False):
-    """
-    Declare a many-to-many relationship property. The property value would be a list or dict of
-    instances of the other table's model.
-
-    You do not need associated foreign keys for this relationship. Instead, an extra table will be
-    created for you.
-
-    The declaration will automatically create a matching many-to-many property at the other model,
-    named after the plural form of our table name. Use the ``other_property`` argument to override
-    this name. Note: the automatic property will always be a SQLAlchemy query object; if you need a
-    Python collection then use :func:`many_to_many` again at that model.
-
-    *This utility method should only be used during class creation.*
-
-    :param model_class: class in which this relationship will be declared
-    :type model_class: type
-    :param other_table: parent table name
-    :type other_table: basestring
-    :param prefix: optional prefix for extra table name as well as for ``other_property``
-    :type prefix: basestring
-    :param dict_key: if set the value will be a dict with this key as the dict key; otherwise will
-     be a list
-    :type dict_key: basestring
-    :param other_property: override name of matching many-to-many property at other table; set to
-     ``False`` to disable
-    :type other_property: basestring or bool
-    :param self: used for relationships between a table and itself. if set, other_table will
-     become the same as the source table.
-    :type self: bool
-    """
-
-    this_table = model_class.__tablename__
-    this_column_name = '{0}_id'.format(this_table)
-    this_foreign_key = '{0}.id'.format(this_table)
-
-    if self:
-        other_table = this_table
-
-    other_column_name = '{0}_{1}'.format(other_table, 'self_ref_id' if self else 'id')
-    other_foreign_key = '{0}.{1}'.format(other_table, 'id')
-
-    secondary_table_name = '{0}_{1}'.format(this_table, other_table)
-
-    if prefix is not None:
-        secondary_table_name = '{0}_{1}'.format(prefix, secondary_table_name)
-        if other_property is None:
-            other_property = '{0}_{1}'.format(prefix, formatting.pluralize(this_table))
-
-    secondary_table = _get_secondary_table(
-        model_class.metadata,
-        secondary_table_name,
-        this_column_name,
-        other_column_name,
-        this_foreign_key,
-        other_foreign_key
-    )
-
-    kwargs = {'relationship_kwargs': {'secondary': secondary_table}}
-
-    if self:
-        kwargs['back_populates'] = NO_BACK_POP
-        kwargs['relationship_kwargs']['primaryjoin'] = \
-                    getattr(model_class, 'id') == getattr(secondary_table.c, this_column_name)
-        kwargs['relationship_kwargs']['secondaryjoin'] = \
-            getattr(model_class, 'id') == getattr(secondary_table.c, other_column_name)
-    else:
-        kwargs['backref_kwargs'] = \
-            {'name': other_property, 'uselist': True} if other_property else None
-        kwargs['dict_key'] = dict_key
-
-    return _relationship(model_class, other_table, **kwargs)
-
-
-def association_proxy(*args, **kwargs):
-    if 'type' in kwargs:
-        type_ = kwargs.get('type')
-        del kwargs['type']
-    else:
-        type_ = ':obj:`basestring`'
-    proxy = original_association_proxy(*args, **kwargs)
-    proxy.__doc__ = """
-    Internal. For use in SQLAlchemy queries.
-
-    :type: {0}
-    """.format(type_)
-    return proxy
-
-
-def _relationship(model_class,
-                  other_table_name,
-                  back_populates=None,
-                  backref_kwargs=None,
-                  relationship_kwargs=None,
-                  fk=None,
-                  other_fk=None,
-                  dict_key=None):
-    relationship_kwargs = relationship_kwargs or {}
-
-    if fk:
-        relationship_kwargs.setdefault(
-            'foreign_keys',
-            lambda: getattr(_get_class_for_table(model_class, model_class.__tablename__), fk)
-        )
-
-    elif other_fk:
-        relationship_kwargs.setdefault(
-            'foreign_keys',
-            lambda: getattr(_get_class_for_table(model_class, other_table_name), other_fk)
-        )
-
-    if dict_key:
-        relationship_kwargs.setdefault('collection_class',
-                                       attribute_mapped_collection(dict_key))
-
-    if backref_kwargs:
-        assert back_populates is None
-        return relationship(
-            lambda: _get_class_for_table(model_class, other_table_name),
-            backref=backref(**backref_kwargs),
-            **relationship_kwargs
-        )
-    else:
-        if back_populates is not NO_BACK_POP:
-            relationship_kwargs['back_populates'] = back_populates
-        return relationship(lambda: _get_class_for_table(model_class, other_table_name),
-                            **relationship_kwargs)
-
-
-def _get_class_for_table(model_class, tablename):
-    if tablename in (model_class.__name__, model_class.__tablename__):
-        return model_class
-
-    for table_cls in model_class._decl_class_registry.values():
-        if tablename == getattr(table_cls, '__tablename__', None):
-            return table_cls
-
-    raise ValueError('unknown table: {0}'.format(tablename))
-
-
-def _get_secondary_table(metadata,
-                         name,
-                         first_column,
-                         second_column,
-                         first_foreign_key,
-                         second_foreign_key):
-    return Table(
-        name,
-        metadata,
-        Column(first_column, Integer, ForeignKey(first_foreign_key)),
-        Column(second_column, Integer, ForeignKey(second_foreign_key))
-    )
diff --git a/apache-ariatosca-0.1.1/aria/modeling/service_changes.py b/apache-ariatosca-0.1.1/aria/modeling/service_changes.py
deleted file mode 100644
index 061262a..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/service_changes.py
+++ /dev/null
@@ -1,253 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling service changes module
-"""
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-from collections import namedtuple
-
-from sqlalchemy import (
-    Column,
-    Text,
-    DateTime,
-    Enum,
-)
-from sqlalchemy.ext.declarative import declared_attr
-
-from .types import (List, Dict)
-from .mixins import ModelMixin
-from . import relationship
-
-
-class ServiceUpdateBase(ModelMixin):
-    """
-    Deployment update model representation.
-    """
-    __tablename__ = 'service_update'
-
-    __private_fields__ = ('service_fk',
-                          'execution_fk')
-
-    created_at = Column(DateTime, nullable=False, index=True)
-    service_plan = Column(Dict, nullable=False)
-    service_update_nodes = Column(Dict)
-    service_update_service = Column(Dict)
-    service_update_node_templates = Column(List)
-    modified_entity_ids = Column(Dict)
-    state = Column(Text)
-
-    # region association proxies
-
-    @declared_attr
-    def execution_name(cls):
-        return relationship.association_proxy('execution', cls.name_column_name())
-
-    @declared_attr
-    def service_name(cls):
-        return relationship.association_proxy('service', cls.name_column_name())
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def steps(cls):
-        return relationship.one_to_many(cls, 'service_update_step')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def execution(cls):
-        return relationship.one_to_one(cls, 'execution', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def service(cls):
-        return relationship.many_to_one(cls, 'service', back_populates='updates')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def execution_fk(cls):
-        return relationship.foreign_key('execution', nullable=True)
-
-    @declared_attr
-    def service_fk(cls):
-        return relationship.foreign_key('service')
-
-    # endregion
-
-    def to_dict(self, suppress_error=False, **kwargs):
-        dep_update_dict = super(ServiceUpdateBase, self).to_dict(suppress_error)     #pylint: disable=no-member
-        # Taking care of the fact the DeploymentSteps are _BaseModels
-        dep_update_dict['steps'] = [step.to_dict() for step in self.steps]
-        return dep_update_dict
-
-
-class ServiceUpdateStepBase(ModelMixin):
-    """
-    Deployment update step model representation.
-    """
-
-    __tablename__ = 'service_update_step'
-
-    __private_fields__ = ('service_update_fk',)
-
-    _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
-    ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
-
-    _entity_types = namedtuple(
-        'ENTITY_TYPES',
-        'NODE, RELATIONSHIP, PROPERTY, OPERATION, WORKFLOW, OUTPUT, DESCRIPTION, GROUP, PLUGIN')
-    ENTITY_TYPES = _entity_types(
-        NODE='node',
-        RELATIONSHIP='relationship',
-        PROPERTY='property',
-        OPERATION='operation',
-        WORKFLOW='workflow',
-        OUTPUT='output',
-        DESCRIPTION='description',
-        GROUP='group',
-        PLUGIN='plugin'
-    )
-
-    action = Column(Enum(*ACTION_TYPES, name='action_type'), nullable=False)
-    entity_id = Column(Text, nullable=False)
-    entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), nullable=False)
-
-    # region association proxies
-
-    @declared_attr
-    def service_update_name(cls):
-        return relationship.association_proxy('service_update', cls.name_column_name())
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_update(cls):
-        return relationship.many_to_one(cls, 'service_update', back_populates='steps')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_update_fk(cls):
-        return relationship.foreign_key('service_update')
-
-    # endregion
-
-    def __hash__(self):
-        return hash((getattr(self, self.id_column_name()), self.entity_id))
-
-    def __lt__(self, other):
-        """
-        the order is 'remove' < 'modify' < 'add'
-        :param other:
-        :return:
-        """
-        if not isinstance(other, self.__class__):
-            return not self >= other
-
-        if self.action != other.action:
-            if self.action == 'remove':
-                return_value = True
-            elif self.action == 'add':
-                return_value = False
-            else:
-                return_value = other.action == 'add'
-            return return_value
-
-        if self.action == 'add':
-            return self.entity_type == 'node' and other.entity_type == 'relationship'
-        if self.action == 'remove':
-            return self.entity_type == 'relationship' and other.entity_type == 'node'
-        return False
-
-
-class ServiceModificationBase(ModelMixin):
-    """
-    Deployment modification model representation.
-    """
-
-    __tablename__ = 'service_modification'
-
-    __private_fields__ = ('service_fk',)
-
-    STARTED = 'started'
-    FINISHED = 'finished'
-    ROLLEDBACK = 'rolledback'
-
-    STATES = [STARTED, FINISHED, ROLLEDBACK]
-    END_STATES = [FINISHED, ROLLEDBACK]
-
-    context = Column(Dict)
-    created_at = Column(DateTime, nullable=False, index=True)
-    ended_at = Column(DateTime, index=True)
-    modified_node_templates = Column(Dict)
-    nodes = Column(Dict)
-    status = Column(Enum(*STATES, name='service_modification_status'))
-
-    # region association proxies
-
-    @declared_attr
-    def service_name(cls):
-        return relationship.association_proxy('service', cls.name_column_name())
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        return relationship.many_to_one(cls, 'service', back_populates='modifications')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_fk(cls):
-        return relationship.foreign_key('service')
-
-    # endregion
diff --git a/apache-ariatosca-0.1.1/aria/modeling/service_common.py b/apache-ariatosca-0.1.1/aria/modeling/service_common.py
deleted file mode 100644
index b533a88..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/service_common.py
+++ /dev/null
@@ -1,615 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling service common module
-"""
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-from sqlalchemy import (
-    Column,
-    Text,
-)
-from sqlalchemy.ext.declarative import declared_attr
-
-from ..parser.consumption import ConsumptionContext
-from ..utils import (
-    collections,
-    formatting,
-    console,
-)
-from .mixins import InstanceModelMixin, TemplateModelMixin, ParameterMixin
-from . import relationship
-
-
-class OutputBase(ParameterMixin):
-    """
-    Output parameter or declaration for an output parameter.
-    """
-
-    __tablename__ = 'output'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template (can be ``None``).
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service (can be ``None``).
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return relationship.foreign_key('service_template', nullable=True)
-
-    @declared_attr
-    def service_fk(cls):
-        return relationship.foreign_key('service', nullable=True)
-
-    # endregion
-
-
-class InputBase(ParameterMixin):
-    """
-    Input parameter or declaration for an input parameter.
-    """
-
-    __tablename__ = 'input'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template (can be ``None``).
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service (can be ``None``).
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    @declared_attr
-    def interface(cls):
-        """
-        Containing interface (can be ``None``).
-
-        :type: :class:`Interface`
-        """
-        return relationship.many_to_one(cls, 'interface')
-
-    @declared_attr
-    def operation(cls):
-        """
-        Containing operation (can be ``None``).
-
-        :type: :class:`Operation`
-        """
-        return relationship.many_to_one(cls, 'operation')
-
-    @declared_attr
-    def interface_template(cls):
-        """
-        Containing interface template (can be ``None``).
-
-        :type: :class:`InterfaceTemplate`
-        """
-        return relationship.many_to_one(cls, 'interface_template')
-
-    @declared_attr
-    def operation_template(cls):
-        """
-        Containing operation template (can be ``None``).
-
-        :type: :class:`OperationTemplate`
-        """
-        return relationship.many_to_one(cls, 'operation_template')
-
-    @declared_attr
-    def execution(cls):
-        """
-        Containing execution (can be ``None``).
-
-        :type: :class:`Execution`
-        """
-        return relationship.many_to_one(cls, 'execution')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return relationship.foreign_key('service_template', nullable=True)
-
-    @declared_attr
-    def service_fk(cls):
-        return relationship.foreign_key('service', nullable=True)
-
-    @declared_attr
-    def interface_fk(cls):
-        return relationship.foreign_key('interface', nullable=True)
-
-    @declared_attr
-    def operation_fk(cls):
-        return relationship.foreign_key('operation', nullable=True)
-
-    @declared_attr
-    def interface_template_fk(cls):
-        return relationship.foreign_key('interface_template', nullable=True)
-
-    @declared_attr
-    def operation_template_fk(cls):
-        return relationship.foreign_key('operation_template', nullable=True)
-
-    @declared_attr
-    def execution_fk(cls):
-        return relationship.foreign_key('execution', nullable=True)
-
-    @declared_attr
-    def task_fk(cls):
-        return relationship.foreign_key('task', nullable=True)
-
-    # endregion
-
-
-class ConfigurationBase(ParameterMixin):
-    """
-    Configuration parameter.
-    """
-
-    __tablename__ = 'configuration'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def operation_template(cls):
-        """
-        Containing operation template (can be ``None``).
-
-        :type: :class:`OperationTemplate`
-        """
-        return relationship.many_to_one(cls, 'operation_template')
-
-    @declared_attr
-    def operation(cls):
-        """
-        Containing operation (can be ``None``).
-
-        :type: :class:`Operation`
-        """
-        return relationship.many_to_one(cls, 'operation')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def operation_template_fk(cls):
-        return relationship.foreign_key('operation_template', nullable=True)
-
-    @declared_attr
-    def operation_fk(cls):
-        return relationship.foreign_key('operation', nullable=True)
-
-    # endregion
-
-
-class PropertyBase(ParameterMixin):
-    """
-    Property parameter or declaration for a property parameter.
-    """
-
-    __tablename__ = 'property'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template (can be ``None``).
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def group_template(cls):
-        """
-        Containing group template (can be ``None``).
-
-        :type: :class:`GroupTemplate`
-        """
-        return relationship.many_to_one(cls, 'group_template')
-
-    @declared_attr
-    def policy_template(cls):
-        """
-        Containing policy template (can be ``None``).
-
-        :type: :class:`PolicyTemplate`
-        """
-        return relationship.many_to_one(cls, 'policy_template')
-
-    @declared_attr
-    def relationship_template(cls):
-        """
-        Containing relationship template (can be ``None``).
-
-        :type: :class:`RelationshipTemplate`
-        """
-        return relationship.many_to_one(cls, 'relationship_template')
-
-    @declared_attr
-    def capability_template(cls):
-        """
-        Containing capability template (can be ``None``).
-
-        :type: :class:`CapabilityTemplate`
-        """
-        return relationship.many_to_one(cls, 'capability_template')
-
-    @declared_attr
-    def artifact_template(cls):
-        """
-        Containing artifact template (can be ``None``).
-
-        :type: :class:`ArtifactTemplate`
-        """
-        return relationship.many_to_one(cls, 'artifact_template')
-
-    @declared_attr
-    def node(cls):
-        """
-        Containing node (can be ``None``).
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    @declared_attr
-    def group(cls):
-        """
-        Containing group (can be ``None``).
-
-        :type: :class:`Group`
-        """
-        return relationship.many_to_one(cls, 'group')
-
-    @declared_attr
-    def policy(cls):
-        """
-        Containing policy (can be ``None``).
-
-        :type: :class:`Policy`
-        """
-        return relationship.many_to_one(cls, 'policy')
-
-    @declared_attr
-    def relationship(cls):
-        """
-        Containing relationship (can be ``None``).
-
-        :type: :class:`Relationship`
-        """
-        return relationship.many_to_one(cls, 'relationship')
-
-    @declared_attr
-    def capability(cls):
-        """
-        Containing capability (can be ``None``).
-
-        :type: :class:`Capability`
-        """
-        return relationship.many_to_one(cls, 'capability')
-
-    @declared_attr
-    def artifact(cls):
-        """
-        Containing artifact (can be ``None``).
-
-        :type: :class:`Artifact`
-        """
-        return relationship.many_to_one(cls, 'artifact')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        return relationship.foreign_key('node_template', nullable=True)
-
-    @declared_attr
-    def group_template_fk(cls):
-        return relationship.foreign_key('group_template', nullable=True)
-
-    @declared_attr
-    def policy_template_fk(cls):
-        return relationship.foreign_key('policy_template', nullable=True)
-
-    @declared_attr
-    def relationship_template_fk(cls):
-        return relationship.foreign_key('relationship_template', nullable=True)
-
-    @declared_attr
-    def capability_template_fk(cls):
-        return relationship.foreign_key('capability_template', nullable=True)
-
-    @declared_attr
-    def artifact_template_fk(cls):
-        return relationship.foreign_key('artifact_template', nullable=True)
-
-    @declared_attr
-    def node_fk(cls):
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def group_fk(cls):
-        return relationship.foreign_key('group', nullable=True)
-
-    @declared_attr
-    def policy_fk(cls):
-        return relationship.foreign_key('policy', nullable=True)
-
-    @declared_attr
-    def relationship_fk(cls):
-        return relationship.foreign_key('relationship', nullable=True)
-
-    @declared_attr
-    def capability_fk(cls):
-        return relationship.foreign_key('capability', nullable=True)
-
-    @declared_attr
-    def artifact_fk(cls):
-        return relationship.foreign_key('artifact', nullable=True)
-
-    # endregion
-
-
-class AttributeBase(ParameterMixin):
-    """
-    Attribute parameter or declaration for an attribute parameter.
-    """
-
-    __tablename__ = 'attribute'
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template (can be ``None``).
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def node(cls):
-        """
-        Containing node (can be ``None``).
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For Attribute many-to-one to NodeTemplate"""
-        return relationship.foreign_key('node_template', nullable=True)
-
-    @declared_attr
-    def node_fk(cls):
-        """For Attribute many-to-one to Node"""
-        return relationship.foreign_key('node', nullable=True)
-
-    # endregion
-
-
-class TypeBase(InstanceModelMixin):
-    """
-    Type and its children. Can serve as the root for a type hierarchy.
-    """
-
-    __tablename__ = 'type'
-
-    __private_fields__ = ('parent_type_fk',)
-
-    variant = Column(Text, nullable=False)
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    _role = Column(Text, name='role')
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def parent(cls):
-        """
-        Parent type (will be ``None`` for the root of a type hierarchy).
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one_self(cls, 'parent_type_fk')
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def children(cls):
-        """
-        Children.
-
-        :type: [:class:`Type`]
-        """
-        return relationship.one_to_many(cls, other_fk='parent_type_fk', self=True)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def parent_type_fk(cls):
-        """For Type one-to-many to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    # endregion
-
-    @property
-    def role(self):
-        def get_role(the_type):
-            if the_type is None:
-                return None
-            elif the_type._role is None:
-                return get_role(the_type.parent)
-            return the_type._role
-
-        return get_role(self)
-
-    @role.setter
-    def role(self, value):
-        self._role = value
-
-    def is_descendant(self, base_name, name):
-        base = self.get_descendant(base_name)
-        if base is not None:
-            if base.get_descendant(name) is not None:
-                return True
-        return False
-
-    def get_descendant(self, name):
-        if self.name == name:
-            return self
-        for child in self.children:
-            found = child.get_descendant(name)
-            if found is not None:
-                return found
-        return None
-
-    def iter_descendants(self):
-        for child in self.children:
-            yield child
-            for descendant in child.iter_descendants():
-                yield descendant
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('role', self.role)))
-
-    @property
-    def as_raw_all(self):
-        types = []
-        self._append_raw_children(types)
-        return types
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.name:
-            console.puts(context.style.type(self.name))
-        with context.style.indent:
-            for child in self.children:
-                child.dump()
-
-    def _append_raw_children(self, types):
-        for child in self.children:
-            raw_child = formatting.as_raw(child)
-            raw_child['parent'] = self.name
-            types.append(raw_child)
-            child._append_raw_children(types)
-
-    @property
-    def hierarchy(self):
-        """
-        Type hierarchy as a list beginning with this type and ending in the root.
-
-        :type: [:class:`Type`]
-        """
-        return [self] + (self.parent.hierarchy if self.parent else [])
-
-
-class MetadataBase(TemplateModelMixin):
-    """
-    Custom values associated with the service.
-
-    This model is used by both service template and service instance elements.
-
-    :ivar name: name
-    :vartype name: basestring
-    :ivar value: value
-    :vartype value: basestring
-    """
-
-    __tablename__ = 'metadata'
-
-    value = Column(Text)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('value', self.value)))
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def instantiate(self, container):
-        from . import models
-        return models.Metadata(name=self.name,
-                               value=self.value)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('{0}: {1}'.format(
-            context.style.property(self.name),
-            context.style.literal(self.value)))
diff --git a/apache-ariatosca-0.1.1/aria/modeling/service_instance.py b/apache-ariatosca-0.1.1/aria/modeling/service_instance.py
deleted file mode 100644
index 002a87c..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/service_instance.py
+++ /dev/null
@@ -1,2240 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling service instance module
-"""
-
-# pylint: disable=too-many-lines, no-self-argument, no-member, abstract-method
-
-from sqlalchemy import (
-    Column,
-    Text,
-    Integer,
-    Enum,
-    Boolean
-)
-from sqlalchemy import DateTime
-from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy.ext.orderinglist import ordering_list
-
-from .mixins import InstanceModelMixin
-from ..orchestrator import execution_plugin
-from ..parser import validation
-from ..parser.consumption import ConsumptionContext
-from ..utils import (
-    collections,
-    formatting,
-    console
-)
-from . import (
-    relationship,
-    utils,
-    types as modeling_types
-)
-
-
-class ServiceBase(InstanceModelMixin):
-    """
-    Usually an instance of a :class:`ServiceTemplate` and its many associated templates (node
-    templates, group templates, policy templates, etc.). However, it can also be created
-    programmatically.
-    """
-
-    __tablename__ = 'service'
-
-    __private_fields__ = ('substitution_fk',
-                          'service_template_fk')
-
-    # region association proxies
-
-    @declared_attr
-    def service_template_name(cls):
-        return relationship.association_proxy('service_template', 'name', type=':obj:`basestring`')
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def substitution(cls):
-        """
-        Exposes the entire service as a single node.
-
-        :type: :class:`Substitution`
-        """
-        return relationship.one_to_one(cls, 'substitution', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def outputs(cls):
-        """
-        Output parameters.
-
-        :type: {:obj:`basestring`: :class:`Output`}
-        """
-        return relationship.one_to_many(cls, 'output', dict_key='name')
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Externally provided parameters.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def updates(cls):
-        """
-        Service updates.
-
-        :type: [:class:`ServiceUpdate`]
-        """
-        return relationship.one_to_many(cls, 'service_update')
-
-    @declared_attr
-    def modifications(cls):
-        """
-        Service modifications.
-
-        :type: [:class:`ServiceModification`]
-        """
-        return relationship.one_to_many(cls, 'service_modification')
-
-    @declared_attr
-    def executions(cls):
-        """
-        Executions.
-
-        :type: [:class:`Execution`]
-        """
-        return relationship.one_to_many(cls, 'execution')
-
-    @declared_attr
-    def nodes(cls):
-        """
-        Nodes.
-
-        :type: {:obj:`basestring`, :class:`Node`}
-        """
-        return relationship.one_to_many(cls, 'node', dict_key='name')
-
-    @declared_attr
-    def groups(cls):
-        """
-        Groups.
-
-        :type: {:obj:`basestring`, :class:`Group`}
-        """
-        return relationship.one_to_many(cls, 'group', dict_key='name')
-
-    @declared_attr
-    def policies(cls):
-        """
-        Policies.
-
-        :type: {:obj:`basestring`, :class:`Policy`}
-        """
-        return relationship.one_to_many(cls, 'policy', dict_key='name')
-
-    @declared_attr
-    def workflows(cls):
-        """
-        Workflows.
-
-        :type: {:obj:`basestring`, :class:`Operation`}
-        """
-        return relationship.one_to_many(cls, 'operation', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Source service template (can be ``None``).
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def meta_data(cls):
-        """
-        Associated metadata.
-
-        :type: {:obj:`basestring`, :class:`Metadata`}
-        """
-        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
-        return relationship.many_to_many(cls, 'metadata', dict_key='name')
-
-    @declared_attr
-    def plugins(cls):
-        """
-        Associated plugins.
-
-        :type: {:obj:`basestring`, :class:`Plugin`}
-        """
-        return relationship.many_to_many(cls, 'plugin', dict_key='name')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def substitution_fk(cls):
-        """Service one-to-one to Substitution"""
-        return relationship.foreign_key('substitution', nullable=True)
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For Service many-to-one to ServiceTemplate"""
-        return relationship.foreign_key('service_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    created_at = Column(DateTime, nullable=False, index=True, doc="""
-    Creation timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    updated_at = Column(DateTime, doc="""
-    Update timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    def satisfy_requirements(self):
-        satisfied = True
-        for node in self.nodes.itervalues():
-            if not node.satisfy_requirements():
-                satisfied = False
-        return satisfied
-
-    def validate_capabilities(self):
-        satisfied = True
-        for node in self.nodes.itervalues():
-            if not node.validate_capabilities():
-                satisfied = False
-        return satisfied
-
-    def find_hosts(self):
-        for node in self.nodes.itervalues():
-            node.find_host()
-
-    def configure_operations(self):
-        for node in self.nodes.itervalues():
-            node.configure_operations()
-        for group in self.groups.itervalues():
-            group.configure_operations()
-        for operation in self.workflows.itervalues():
-            operation.configure()
-
-    def is_node_a_target(self, target_node):
-        for node in self.nodes.itervalues():
-            if self._is_node_a_target(node, target_node):
-                return True
-        return False
-
-    def _is_node_a_target(self, source_node, target_node):
-        if source_node.outbound_relationships:
-            for relationship_model in source_node.outbound_relationships:
-                if relationship_model.target_node.name == target_node.name:
-                    return True
-                else:
-                    node = relationship_model.target_node
-                    if node is not None:
-                        if self._is_node_a_target(node, target_node):
-                            return True
-        return False
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('description', self.description),
-            ('metadata', formatting.as_raw_dict(self.meta_data)),
-            ('nodes', formatting.as_raw_list(self.nodes)),
-            ('groups', formatting.as_raw_list(self.groups)),
-            ('policies', formatting.as_raw_list(self.policies)),
-            ('substitution', formatting.as_raw(self.substitution)),
-            ('inputs', formatting.as_raw_dict(self.inputs)),
-            ('outputs', formatting.as_raw_dict(self.outputs)),
-            ('workflows', formatting.as_raw_list(self.workflows))))
-
-    def validate(self):
-        utils.validate_dict_values(self.meta_data)
-        utils.validate_dict_values(self.nodes)
-        utils.validate_dict_values(self.groups)
-        utils.validate_dict_values(self.policies)
-        if self.substitution is not None:
-            self.substitution.validate()
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.outputs)
-        utils.validate_dict_values(self.workflows)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.meta_data, report_issues)
-        utils.coerce_dict_values(self.nodes, report_issues)
-        utils.coerce_dict_values(self.groups, report_issues)
-        utils.coerce_dict_values(self.policies, report_issues)
-        if self.substitution is not None:
-            self.substitution.coerce_values(report_issues)
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.outputs, report_issues)
-        utils.coerce_dict_values(self.workflows, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.description is not None:
-            console.puts(context.style.meta(self.description))
-        utils.dump_dict_values(self.meta_data, 'Metadata')
-        for node in self.nodes.itervalues():
-            node.dump()
-        for group in self.groups.itervalues():
-            group.dump()
-        for policy in self.policies.itervalues():
-            policy.dump()
-        if self.substitution is not None:
-            self.substitution.dump()
-        utils.dump_dict_values(self.inputs, 'Inputs')
-        utils.dump_dict_values(self.outputs, 'Outputs')
-        utils.dump_dict_values(self.workflows, 'Workflows')
-
-    def dump_graph(self):
-        for node in self.nodes.itervalues():
-            if not self.is_node_a_target(node):
-                self._dump_graph_node(node)
-
-    def _dump_graph_node(self, node, capability=None):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(node.name))
-        if capability is not None:
-            console.puts('{0} ({1})'.format(context.style.property(capability.name),
-                                            context.style.type(capability.type.name)))
-        if node.outbound_relationships:
-            with context.style.indent:
-                for relationship_model in node.outbound_relationships:
-                    relationship_name = context.style.property(relationship_model.name)
-                    if relationship_model.type is not None:
-                        console.puts('-> {0} ({1})'.format(relationship_name,
-                                                           context.style.type(
-                                                               relationship_model.type.name)))
-                    else:
-                        console.puts('-> {0}'.format(relationship_name))
-                    with console.indent(3):
-                        self._dump_graph_node(relationship_model.target_node,
-                                              relationship_model.target_capability)
-
-
-class NodeBase(InstanceModelMixin):
-    """
-    Typed vertex in the service topology.
-
-    Nodes may have zero or more :class:`Relationship` instances to other nodes, together forming
-    a many-to-many node graph.
-
-    Usually an instance of a :class:`NodeTemplate`.
-    """
-
-    __tablename__ = 'node'
-
-    __private_fields__ = ('type_fk',
-                          'host_fk',
-                          'service_fk',
-                          'node_template_fk')
-
-    INITIAL = 'initial'
-    CREATING = 'creating'
-    CREATED = 'created'
-    CONFIGURING = 'configuring'
-    CONFIGURED = 'configured'
-    STARTING = 'starting'
-    STARTED = 'started'
-    STOPPING = 'stopping'
-    DELETING = 'deleting'
-    DELETED = 'deleted'
-    ERROR = 'error'
-
-    # 'deleted' isn't actually part of the TOSCA spec, since according the description of the
-    # 'deleting' state: "Node is transitioning from its current state to one where it is deleted and
-    # its state is no longer tracked by the instance model." However, we prefer to be able to
-    # retrieve information about deleted nodes, so we chose to add this 'deleted' state to enable us
-    # to do so.
-
-    STATES = (INITIAL, CREATING, CREATED, CONFIGURING, CONFIGURED, STARTING, STARTED, STOPPING,
-              DELETING, DELETED, ERROR)
-
-    _OP_TO_STATE = {'create': {'transitional': CREATING, 'finished': CREATED},
-                    'configure': {'transitional': CONFIGURING, 'finished': CONFIGURED},
-                    'start': {'transitional': STARTING, 'finished': STARTED},
-                    'stop': {'transitional': STOPPING, 'finished': CONFIGURED},
-                    'delete': {'transitional': DELETING, 'finished': DELETED}}
-
-    # region association proxies
-
-    @declared_attr
-    def service_name(cls):
-        return relationship.association_proxy('service', 'name', type=':obj:`basestring`')
-
-    @declared_attr
-    def node_template_name(cls):
-        return relationship.association_proxy('node_template', 'name', type=':obj:`basestring`')
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def host(cls): # pylint: disable=method-hidden
-        """
-        Node in which we are hosted (can be ``None``).
-
-        Normally the host node is found by following the relationship graph (relationships with
-        ``host`` roles) to final nodes (with ``host`` roles).
-
-        :type: :class:`Node`
-        """
-        return relationship.one_to_one_self(cls, 'host_fk')
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def tasks(cls):
-        """
-        Associated tasks.
-
-        :type: [:class:`Task`]
-        """
-        return relationship.one_to_many(cls, 'task')
-
-    @declared_attr
-    def interfaces(cls):
-        """
-        Associated interfaces.
-
-        :type: {:obj:`basestring`: :class:`Interface`}
-        """
-        return relationship.one_to_many(cls, 'interface', dict_key='name')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    @declared_attr
-    def attributes(cls):
-        """
-        Associated mutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Attribute`}
-        """
-        return relationship.one_to_many(cls, 'attribute', dict_key='name')
-
-    @declared_attr
-    def artifacts(cls):
-        """
-        Associated artifacts.
-
-        :type: {:obj:`basestring`: :class:`Artifact`}
-        """
-        return relationship.one_to_many(cls, 'artifact', dict_key='name')
-
-    @declared_attr
-    def capabilities(cls):
-        """
-        Associated exposed capabilities.
-
-        :type: {:obj:`basestring`: :class:`Capability`}
-        """
-        return relationship.one_to_many(cls, 'capability', dict_key='name')
-
-    @declared_attr
-    def outbound_relationships(cls):
-        """
-        Relationships to other nodes.
-
-        :type: [:class:`Relationship`]
-        """
-        return relationship.one_to_many(
-            cls, 'relationship', other_fk='source_node_fk', back_populates='source_node',
-            rel_kwargs=dict(
-                order_by='Relationship.source_position',
-                collection_class=ordering_list('source_position', count_from=0)
-            )
-        )
-
-    @declared_attr
-    def inbound_relationships(cls):
-        """
-        Relationships from other nodes.
-
-        :type: [:class:`Relationship`]
-        """
-        return relationship.one_to_many(
-            cls, 'relationship', other_fk='target_node_fk', back_populates='target_node',
-            rel_kwargs=dict(
-                order_by='Relationship.target_position',
-                collection_class=ordering_list('target_position', count_from=0)
-            )
-        )
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service.
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Source node template (can be ``None``).
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Node type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Node many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def host_fk(cls):
-        """For Node one-to-one to Node"""
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Node"""
-        return relationship.foreign_key('service')
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For Node many-to-one to NodeTemplate"""
-        return relationship.foreign_key('node_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    state = Column(Enum(*STATES, name='node_state'), nullable=False, default=INITIAL, doc="""
-    TOSCA state.
-
-    :type: :obj:`basestring`
-    """)
-
-    version = Column(Integer, default=1, doc="""
-    Used by :mod:`aria.storage.instrumentation`.
-
-    :type: :obj:`int`
-    """)
-
-    __mapper_args__ = {'version_id_col': version} # Enable SQLAlchemy automatic version counting
-
-    @classmethod
-    def determine_state(cls, op_name, is_transitional):
-        """ :returns the state the node should be in as a result of running the
-            operation on this node.
-
-            e.g. if we are running tosca.interfaces.node.lifecycle.Standard.create, then
-            the resulting state should either 'creating' (if the task just started) or 'created'
-            (if the task ended).
-
-            If the operation is not a standard tosca lifecycle operation, then we return None"""
-
-        state_type = 'transitional' if is_transitional else 'finished'
-        try:
-            return cls._OP_TO_STATE[op_name][state_type]
-        except KeyError:
-            return None
-
-    def is_available(self):
-        return self.state not in (self.INITIAL, self.DELETED, self.ERROR)
-
-    @property
-    def host_address(self):
-        if self.host and self.host.attributes:
-            attribute = self.host.attributes.get('ip')
-            return attribute.value if attribute else None
-        return None
-
-    def satisfy_requirements(self):
-        node_template = self.node_template
-        satisfied = True
-        for requirement_template in node_template.requirement_templates:
-            # Find target template
-            target_node_template, target_node_capability = \
-                requirement_template.find_target(node_template)
-            if target_node_template is not None:
-                satisfied = self._satisfy_capability(target_node_capability,
-                                                     target_node_template,
-                                                     requirement_template)
-            else:
-                context = ConsumptionContext.get_thread_local()
-                context.validation.report('requirement "{0}" of node "{1}" has no target node '
-                                          'template'.format(requirement_template.name, self.name),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    def _satisfy_capability(self, target_node_capability, target_node_template,
-                            requirement_template):
-        from . import models
-        context = ConsumptionContext.get_thread_local()
-        # Find target nodes
-        target_nodes = target_node_template.nodes
-        if target_nodes:
-            target_node = None
-            target_capability = None
-
-            if target_node_capability is not None:
-                # Relate to the first target node that has capacity
-                for node in target_nodes:
-                    a_target_capability = node.capabilities.get(target_node_capability.name)
-                    if a_target_capability.relate():
-                        target_node = node
-                        target_capability = a_target_capability
-                        break
-            else:
-                # Use first target node
-                target_node = target_nodes[0]
-
-            if target_node is not None:
-                if requirement_template.relationship_template is not None:
-                    relationship_model = \
-                        requirement_template.relationship_template.instantiate(self)
-                else:
-                    relationship_model = models.Relationship()
-                relationship_model.name = requirement_template.name
-                relationship_model.requirement_template = requirement_template
-                relationship_model.target_node = target_node
-                relationship_model.target_capability = target_capability
-                self.outbound_relationships.append(relationship_model)
-                return True
-            else:
-                context.validation.report('requirement "{0}" of node "{1}" targets node '
-                                          'template "{2}" but its instantiated nodes do not '
-                                          'have enough capacity'.format(
-                                              requirement_template.name,
-                                              self.name,
-                                              target_node_template.name),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                return False
-        else:
-            context.validation.report('requirement "{0}" of node "{1}" targets node template '
-                                      '"{2}" but it has no instantiated nodes'.format(
-                                          requirement_template.name,
-                                          self.name,
-                                          target_node_template.name),
-                                      level=validation.Issue.BETWEEN_INSTANCES)
-            return False
-
-    def validate_capabilities(self):
-        context = ConsumptionContext.get_thread_local()
-        satisfied = False
-        for capability in self.capabilities.itervalues():
-            if not capability.has_enough_relationships:
-                context.validation.report('capability "{0}" of node "{1}" requires at least {2:d} '
-                                          'relationships but has {3:d}'.format(
-                                              capability.name,
-                                              self.name,
-                                              capability.min_occurrences,
-                                              capability.occurrences),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    def find_host(self):
-        def _find_host(node):
-            if node.type.role == 'host':
-                return node
-            for the_relationship in node.outbound_relationships:
-                if (the_relationship.target_capability is not None) and \
-                    the_relationship.target_capability.type.role == 'host':
-                    host = _find_host(the_relationship.target_node)
-                    if host is not None:
-                        return host
-            for the_relationship in node.inbound_relationships:
-                if (the_relationship.target_capability is not None) and \
-                    the_relationship.target_capability.type.role == 'feature':
-                    host = _find_host(the_relationship.source_node)
-                    if host is not None:
-                        return host
-            return None
-
-        self.host = _find_host(self)
-
-    def configure_operations(self):
-        for interface in self.interfaces.itervalues():
-            interface.configure_operations()
-        for the_relationship in self.outbound_relationships:
-            the_relationship.configure_operations()
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type.name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('attributes', formatting.as_raw_dict(self.properties)),
-            ('interfaces', formatting.as_raw_list(self.interfaces)),
-            ('artifacts', formatting.as_raw_list(self.artifacts)),
-            ('capabilities', formatting.as_raw_list(self.capabilities)),
-            ('relationships', formatting.as_raw_list(self.outbound_relationships))))
-
-    def validate(self):
-        context = ConsumptionContext.get_thread_local()
-        if len(self.name) > context.modeling.id_max_length:
-            context.validation.report('"{0}" has an ID longer than the limit of {1:d} characters: '
-                                      '{2:d}'.format(
-                                          self.name,
-                                          context.modeling.id_max_length,
-                                          len(self.name)),
-                                      level=validation.Issue.BETWEEN_INSTANCES)
-
-        # TODO: validate that node template is of type?
-
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.attributes)
-        utils.validate_dict_values(self.interfaces)
-        utils.validate_dict_values(self.artifacts)
-        utils.validate_dict_values(self.capabilities)
-        utils.validate_list_values(self.outbound_relationships)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.attributes, report_issues)
-        utils.coerce_dict_values(self.interfaces, report_issues)
-        utils.coerce_dict_values(self.artifacts, report_issues)
-        utils.coerce_dict_values(self.capabilities, report_issues)
-        utils.coerce_list_values(self.outbound_relationships, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Node: {0}'.format(context.style.node(self.name)))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            console.puts('Template: {0}'.format(context.style.node(self.node_template.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_dict_values(self.attributes, 'Attributes')
-            utils.dump_interfaces(self.interfaces)
-            utils.dump_dict_values(self.artifacts, 'Artifacts')
-            utils.dump_dict_values(self.capabilities, 'Capabilities')
-            utils.dump_list_values(self.outbound_relationships, 'Relationships')
-
-
-class GroupBase(InstanceModelMixin):
-    """
-    Typed logical container for zero or more :class:`Node` instances.
-
-    Usually an instance of a :class:`GroupTemplate`.
-    """
-
-    __tablename__ = 'group'
-
-    __private_fields__ = ('type_fk',
-                          'service_fk',
-                          'group_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    @declared_attr
-    def interfaces(cls):
-        """
-        Associated interfaces.
-
-        :type: {:obj:`basestring`: :class:`Interface`}
-        """
-        return relationship.one_to_many(cls, 'interface', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service.
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    @declared_attr
-    def group_template(cls):
-        """
-        Source group template (can be ``None``).
-
-        :type: :class:`GroupTemplate`
-        """
-        return relationship.many_to_one(cls, 'group_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Group type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def nodes(cls):
-        """
-        Member nodes.
-
-        :type: [:class:`Node`]
-        """
-        return relationship.many_to_many(cls, 'node')
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Group many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Group"""
-        return relationship.foreign_key('service')
-
-    @declared_attr
-    def group_template_fk(cls):
-        """For Group many-to-one to GroupTemplate"""
-        return relationship.foreign_key('group_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    def configure_operations(self):
-        for interface in self.interfaces.itervalues():
-            interface.configure_operations()
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interfaces', formatting.as_raw_list(self.interfaces))))
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.interfaces)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.interfaces, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Group: {0}'.format(context.style.node(self.name)))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_interfaces(self.interfaces)
-            if self.nodes:
-                console.puts('Member nodes:')
-                with context.style.indent:
-                    for node in self.nodes:
-                        console.puts(context.style.node(node.name))
-
-
-class PolicyBase(InstanceModelMixin):
-    """
-    Typed set of orchestration hints applied to zero or more :class:`Node` or :class:`Group`
-    instances.
-
-    Usually an instance of a :class:`PolicyTemplate`.
-    """
-
-    __tablename__ = 'policy'
-
-    __private_fields__ = ('type_fk',
-                          'service_fk',
-                          'policy_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service.
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service')
-
-    @declared_attr
-    def policy_template(cls):
-        """
-        Source policy template (can be ``None``).
-
-        :type: :class:`PolicyTemplate`
-        """
-        return relationship.many_to_one(cls, 'policy_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Group type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def nodes(cls):
-        """
-        Policy is enacted on these nodes.
-
-        :type: {:obj:`basestring`: :class:`Node`}
-        """
-        return relationship.many_to_many(cls, 'node')
-
-    @declared_attr
-    def groups(cls):
-        """
-        Policy is enacted on nodes in these groups.
-
-        :type: {:obj:`basestring`: :class:`Group`}
-        """
-        return relationship.many_to_many(cls, 'group')
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Policy many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Policy"""
-        return relationship.foreign_key('service')
-
-    @declared_attr
-    def policy_template_fk(cls):
-        """For Policy many-to-one to PolicyTemplate"""
-        return relationship.foreign_key('policy_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type.name),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Policy: {0}'.format(context.style.node(self.name)))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            if self.nodes:
-                console.puts('Target nodes:')
-                with context.style.indent:
-                    for node in self.nodes:
-                        console.puts(context.style.node(node.name))
-            if self.groups:
-                console.puts('Target groups:')
-                with context.style.indent:
-                    for group in self.groups:
-                        console.puts(context.style.node(group.name))
-
-
-class SubstitutionBase(InstanceModelMixin):
-    """
-    Exposes the entire service as a single node.
-
-    Usually an instance of a :class:`SubstitutionTemplate`.
-    """
-
-    __tablename__ = 'substitution'
-
-    __private_fields__ = ('node_type_fk',
-                          'substitution_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def mappings(cls):
-        """
-        Map requirement and capabilities to exposed node.
-
-        :type: {:obj:`basestring`: :class:`SubstitutionMapping`}
-        """
-        return relationship.one_to_many(cls, 'substitution_mapping', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service.
-
-        :type: :class:`Service`
-        """
-        return relationship.one_to_one(cls, 'service', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def substitution_template(cls):
-        """
-        Source substitution template (can be ``None``).
-
-        :type: :class:`SubstitutionTemplate`
-        """
-        return relationship.many_to_one(cls, 'substitution_template')
-
-    @declared_attr
-    def node_type(cls):
-        """
-        Exposed node type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def node_type_fk(cls):
-        """For Substitution many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def substitution_template_fk(cls):
-        """For Substitution many-to-one to SubstitutionTemplate"""
-        return relationship.foreign_key('substitution_template', nullable=True)
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('node_type_name', self.node_type.name),
-            ('mappings', formatting.as_raw_dict(self.mappings))))
-
-    def validate(self):
-        utils.validate_dict_values(self.mappings)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.mappings, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Substitution:')
-        with context.style.indent:
-            console.puts('Node type: {0}'.format(context.style.type(self.node_type.name)))
-            utils.dump_dict_values(self.mappings, 'Mappings')
-
-
-class SubstitutionMappingBase(InstanceModelMixin):
-    """
-    Used by :class:`Substitution` to map a capability or a requirement to the exposed node.
-
-    The :attr:`name` field should match the capability or requirement template name on the exposed
-    node's type.
-
-    Only one of :attr:`capability` and :attr:`requirement_template` can be set. If the latter is
-    set, then :attr:`node` must also be set.
-
-    Usually an instance of a :class:`SubstitutionMappingTemplate`.
-    """
-
-    __tablename__ = 'substitution_mapping'
-
-    __private_fields__ = ('substitution_fk',
-                          'capability_fk',
-                          'requirement_template_fk',
-                          'node_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def capability(cls):
-        """
-        Capability to expose (can be ``None``).
-
-        :type: :class:`Capability`
-        """
-        return relationship.one_to_one(cls, 'capability', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def requirement_template(cls):
-        """
-        Requirement template to expose (can be ``None``).
-
-        :type: :class:`RequirementTemplate`
-        """
-        return relationship.one_to_one(cls, 'requirement_template',
-                                       back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def node(cls):
-        """
-        Node for which to expose :attr:`requirement_template` (can be ``None``).
-
-        :type: :class:`Node`
-        """
-        return relationship.one_to_one(cls, 'node', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def substitution(cls):
-        """
-        Containing substitution.
-
-        :type: :class:`Substitution`
-        """
-        return relationship.many_to_one(cls, 'substitution', back_populates='mappings')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def substitution_fk(cls):
-        """For Substitution one-to-many to SubstitutionMapping"""
-        return relationship.foreign_key('substitution')
-
-    @declared_attr
-    def capability_fk(cls):
-        """For Substitution one-to-one to Capability"""
-        return relationship.foreign_key('capability', nullable=True)
-
-    @declared_attr
-    def node_fk(cls):
-        """For Substitution one-to-one to Node"""
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def requirement_template_fk(cls):
-        """For Substitution one-to-one to RequirementTemplate"""
-        return relationship.foreign_key('requirement_template', nullable=True)
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),))
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def validate(self):
-        context = ConsumptionContext.get_thread_local()
-        if (self.capability is None) and (self.requirement_template is None):
-            context.validation.report('mapping "{0}" refers to neither capability nor a requirement'
-                                      ' in node: {1}'.format(
-                                          self.name,
-                                          formatting.safe_repr(self.node.name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.capability is not None:
-            console.puts('{0} -> {1}.{2}'.format(
-                context.style.node(self.name),
-                context.style.node(self.capability.node.name),
-                context.style.node(self.capability.name)))
-        else:
-            console.puts('{0} -> {1}.{2}'.format(
-                context.style.node(self.name),
-                context.style.node(self.node.name),
-                context.style.node(self.requirement_template.name)))
-
-
-class RelationshipBase(InstanceModelMixin):
-    """
-    Optionally-typed edge in the service topology, connecting a :class:`Node` to a
-    :class:`Capability` of another node.
-
-    Might be an instance of :class:`RelationshipTemplate` and/or :class:`RequirementTemplate`.
-    """
-
-    __tablename__ = 'relationship'
-
-    __private_fields__ = ('type_fk',
-                          'source_node_fk',
-                          'target_node_fk',
-                          'target_capability_fk',
-                          'requirement_template_fk',
-                          'relationship_template_fk',
-                          'target_position',
-                          'source_position')
-
-    # region association proxies
-
-    @declared_attr
-    def source_node_name(cls):
-        return relationship.association_proxy('source_node', 'name')
-
-    @declared_attr
-    def target_node_name(cls):
-        return relationship.association_proxy('target_node', 'name')
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def target_capability(cls):
-        """
-        Target capability.
-
-        :type: :class:`Capability`
-        """
-        return relationship.one_to_one(cls, 'capability', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def tasks(cls):
-        """
-        Associated tasks.
-
-        :type: [:class:`Task`]
-        """
-        return relationship.one_to_many(cls, 'task')
-
-    @declared_attr
-    def interfaces(cls):
-        """
-        Associated interfaces.
-
-        :type: {:obj:`basestring`: :class:`Interface`}
-        """
-        return relationship.one_to_many(cls, 'interface', dict_key='name')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def source_node(cls):
-        """
-        Source node.
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(
-            cls, 'node', fk='source_node_fk', back_populates='outbound_relationships')
-
-    @declared_attr
-    def target_node(cls):
-        """
-        Target node.
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(
-            cls, 'node', fk='target_node_fk', back_populates='inbound_relationships')
-
-    @declared_attr
-    def relationship_template(cls):
-        """
-        Source relationship template (can be ``None``).
-
-        :type: :class:`RelationshipTemplate`
-        """
-        return relationship.many_to_one(cls, 'relationship_template')
-
-    @declared_attr
-    def requirement_template(cls):
-        """
-        Source requirement template (can be ``None``).
-
-        :type: :class:`RequirementTemplate`
-        """
-        return relationship.many_to_one(cls, 'requirement_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Relationship type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Relationship many-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def source_node_fk(cls):
-        """For Node one-to-many to Relationship"""
-        return relationship.foreign_key('node')
-
-    @declared_attr
-    def target_node_fk(cls):
-        """For Node one-to-many to Relationship"""
-        return relationship.foreign_key('node')
-
-    @declared_attr
-    def target_capability_fk(cls):
-        """For Relationship one-to-one to Capability"""
-        return relationship.foreign_key('capability', nullable=True)
-
-    @declared_attr
-    def requirement_template_fk(cls):
-        """For Relationship many-to-one to RequirementTemplate"""
-        return relationship.foreign_key('requirement_template', nullable=True)
-
-    @declared_attr
-    def relationship_template_fk(cls):
-        """For Relationship many-to-one to RelationshipTemplate"""
-        return relationship.foreign_key('relationship_template', nullable=True)
-
-    # endregion
-
-    source_position = Column(Integer, doc="""
-    Position at source.
-
-    :type: :obj:`int`
-    """)
-
-    target_position = Column(Integer, doc="""
-    Position at target.
-
-    :type: :obj:`int`
-    """)
-
-    def configure_operations(self):
-        for interface in self.interfaces.itervalues():
-            interface.configure_operations()
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('target_node_id', self.target_node.name),
-            ('type_name', self.type.name
-             if self.type is not None else None),
-            ('template_name', self.relationship_template.name
-             if self.relationship_template is not None else None),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interfaces', formatting.as_raw_list(self.interfaces))))
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.interfaces)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.interfaces, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.name:
-            console.puts('{0} ->'.format(context.style.node(self.name)))
-        else:
-            console.puts('->')
-        with context.style.indent:
-            console.puts('Node: {0}'.format(context.style.node(self.target_node.name)))
-            if self.target_capability:
-                console.puts('Capability: {0}'.format(context.style.node(
-                    self.target_capability.name)))
-            if self.type is not None:
-                console.puts('Relationship type: {0}'.format(context.style.type(self.type.name)))
-            if (self.relationship_template is not None) and self.relationship_template.name:
-                console.puts('Relationship template: {0}'.format(
-                    context.style.node(self.relationship_template.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_interfaces(self.interfaces, 'Interfaces')
-
-
-class CapabilityBase(InstanceModelMixin):
-    """
-    Typed attachment serving two purposes: to provide extra properties and attributes to a
-    :class:`Node`, and to expose targets for :class:`Relationship` instances from other nodes.
-
-    Usually an instance of a :class:`CapabilityTemplate`.
-    """
-
-    __tablename__ = 'capability'
-
-    __private_fields__ = ('capability_fk',
-                          'node_fk',
-                          'capability_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node(cls):
-        """
-        Containing node.
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    @declared_attr
-    def capability_template(cls):
-        """
-        Source capability template (can be ``None``).
-
-        :type: :class:`CapabilityTemplate`
-        """
-        return relationship.many_to_one(cls, 'capability_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Capability type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Capability many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_fk(cls):
-        """For Node one-to-many to Capability"""
-        return relationship.foreign_key('node')
-
-    @declared_attr
-    def capability_template_fk(cls):
-        """For Capability many-to-one to CapabilityTemplate"""
-        return relationship.foreign_key('capability_template', nullable=True)
-
-    # endregion
-
-    min_occurrences = Column(Integer, default=None, doc="""
-    Minimum number of requirement matches required.
-
-    :type: :obj:`int`
-    """)
-
-    max_occurrences = Column(Integer, default=None, doc="""
-    Maximum number of requirement matches allowed.
-
-    :type: :obj:`int`
-    """)
-
-    occurrences = Column(Integer, default=0, doc="""
-    Number of requirement matches.
-
-    :type: :obj:`int`
-    """)
-
-    @property
-    def has_enough_relationships(self):
-        if self.min_occurrences is not None:
-            return self.occurrences >= self.min_occurrences
-        return True
-
-    def relate(self):
-        if self.max_occurrences is not None:
-            if self.occurrences == self.max_occurrences:
-                return False
-        self.occurrences += 1
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type.name),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            console.puts('Occurrences: {0:d} ({1:d}{2})'.format(
-                self.occurrences,
-                self.min_occurrences or 0,
-                ' to {0:d}'.format(self.max_occurrences)
-                if self.max_occurrences is not None
-                else ' or more'))
-            utils.dump_dict_values(self.properties, 'Properties')
-
-
-class InterfaceBase(InstanceModelMixin):
-    """
-    Typed bundle of :class:`Operation` instances.
-
-    Can be associated with a :class:`Node`, a :class:`Group`, or a :class:`Relationship`.
-
-    Usually an instance of a :class:`InterfaceTemplate`.
-    """
-
-    __tablename__ = 'interface'
-
-    __private_fields__ = ('type_fk',
-                          'node_fk',
-                          'group_fk',
-                          'relationship_fk',
-                          'interface_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Parameters for all operations of the interface.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def operations(cls):
-        """
-        Associated operations.
-
-        :type: {:obj:`basestring`: :class:`Operation`}
-        """
-        return relationship.one_to_many(cls, 'operation', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node(cls):
-        """
-        Containing node (can be ``None``).
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    @declared_attr
-    def group(cls):
-        """
-        Containing group (can be ``None``).
-
-        :type: :class:`Group`
-        """
-        return relationship.many_to_one(cls, 'group')
-
-    @declared_attr
-    def relationship(cls):
-        """
-        Containing relationship (can be ``None``).
-
-        :type: :class:`Relationship`
-        """
-        return relationship.many_to_one(cls, 'relationship')
-
-    @declared_attr
-    def interface_template(cls):
-        """
-        Source interface template (can be ``None``).
-
-        :type: :class:`InterfaceTemplate`
-        """
-        return relationship.many_to_one(cls, 'interface_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Interface type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Interface many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_fk(cls):
-        """For Node one-to-many to Interface"""
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def group_fk(cls):
-        """For Group one-to-many to Interface"""
-        return relationship.foreign_key('group', nullable=True)
-
-    @declared_attr
-    def relationship_fk(cls):
-        """For Relationship one-to-many to Interface"""
-        return relationship.foreign_key('relationship', nullable=True)
-
-    @declared_attr
-    def interface_template_fk(cls):
-        """For Interface many-to-one to InterfaceTemplate"""
-        return relationship.foreign_key('interface_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    def configure_operations(self):
-        for operation in self.operations.itervalues():
-            operation.configure()
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('inputs', formatting.as_raw_dict(self.inputs)),
-            ('operations', formatting.as_raw_list(self.operations))))
-
-    def validate(self):
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.operations)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.operations, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Interface type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
-            utils.dump_dict_values(self.operations, 'Operations')
-
-
-class OperationBase(InstanceModelMixin):
-    """
-    Entry points to Python functions called as part of a workflow execution.
-
-    The operation signature (its :attr:`name` and its :attr:`inputs`'s names and types) is declared
-    by the type of the :class:`Interface`, however each operation can provide its own
-    :attr:`implementation` as well as additional inputs.
-
-    The Python :attr:`function` is usually provided by an associated :class:`Plugin`. Its purpose is
-    to execute the implementation, providing it with both the operation's and interface's inputs.
-    The :attr:`arguments` of the function should be set according to the specific signature of the
-    function.
-
-    Additionally, :attr:`configuration` parameters can be provided as hints to configure the
-    function's behavior. For example, they can be used to configure remote execution credentials.
-
-    Might be an instance of :class:`OperationTemplate`.
-    """
-
-    __tablename__ = 'operation'
-
-    __private_fields__ = ('service_fk',
-                          'interface_fk',
-                          'plugin_fk',
-                          'operation_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def plugin(cls):
-        """
-        Associated plugin.
-
-        :type: :class:`Plugin`
-        """
-        return relationship.one_to_one(cls, 'plugin', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Parameters provided to the :attr:`implementation`.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def arguments(cls):
-        """
-        Arguments sent to the Python :attr:`function`.
-
-        :type: {:obj:`basestring`: :class:`Argument`}
-        """
-        return relationship.one_to_many(cls, 'argument', dict_key='name')
-
-    @declared_attr
-    def configurations(cls):
-        """
-        Configuration parameters for the Python :attr:`function`.
-
-        :type: {:obj:`basestring`: :class:`Configuration`}
-        """
-        return relationship.one_to_many(cls, 'configuration', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service(cls):
-        """
-        Containing service (can be ``None``). For workflow operations.
-
-        :type: :class:`Service`
-        """
-        return relationship.many_to_one(cls, 'service', back_populates='workflows')
-
-    @declared_attr
-    def interface(cls):
-        """
-        Containing interface (can be ``None``).
-
-        :type: :class:`Interface`
-        """
-        return relationship.many_to_one(cls, 'interface')
-
-    @declared_attr
-    def operation_template(cls):
-        """
-        Source operation template (can be ``None``).
-
-        :type: :class:`OperationTemplate`
-        """
-        return relationship.many_to_one(cls, 'operation_template')
-
-    # endregion
-
-    # region many_to_many relationships
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Operation"""
-        return relationship.foreign_key('service', nullable=True)
-
-    @declared_attr
-    def interface_fk(cls):
-        """For Interface one-to-many to Operation"""
-        return relationship.foreign_key('interface', nullable=True)
-
-    @declared_attr
-    def plugin_fk(cls):
-        """For Operation one-to-one to Plugin"""
-        return relationship.foreign_key('plugin', nullable=True)
-
-    @declared_attr
-    def operation_template_fk(cls):
-        """For Operation many-to-one to OperationTemplate"""
-        return relationship.foreign_key('operation_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    relationship_edge = Column(Boolean, doc="""
-    When ``True`` specifies that the operation is on the relationship's target edge; ``False`` is
-    the source edge (only used by operations on relationships)
-
-    :type: :obj:`bool`
-    """)
-
-    implementation = Column(Text, doc="""
-    Implementation (usually the name of an artifact).
-
-    :type: :obj:`basestring`
-    """)
-
-    dependencies = Column(modeling_types.StrictList(item_cls=basestring), doc="""
-    Dependencies (usually names of artifacts).
-
-    :type: [:obj:`basestring`]
-    """)
-
-    function = Column(Text, doc="""
-    Full path to Python function.
-
-    :type: :obj:`basestring`
-    """)
-
-    executor = Column(Text, doc="""
-    Name of executor.
-
-    :type: :obj:`basestring`
-    """)
-
-    max_attempts = Column(Integer, doc="""
-    Maximum number of attempts allowed in case of task failure.
-
-    :type: :obj:`int`
-    """)
-
-    retry_interval = Column(Integer, doc="""
-    Interval between task retry attemps (in seconds).
-
-    :type: :obj:`float`
-    """)
-
-    def configure(self):
-        if (self.implementation is None) and (self.function is None):
-            return
-
-        if (self.interface is not None) and (self.plugin is None) and (self.function is None):
-            # ("interface" is None for workflow operations, which do not currently use "plugin")
-            # The default (None) plugin is the execution plugin
-            execution_plugin.instantiation.configure_operation(self)
-        else:
-            # In the future plugins may be able to add their own "configure_operation" hook that
-            # can validate the configuration and otherwise create specially derived arguments. For
-            # now, we just send all configuration parameters as arguments without validation.
-            configurations_as_arguments = {}
-            for configuration in self.configurations.itervalues():
-                configurations_as_arguments[configuration.name] = configuration.as_argument()
-
-            utils.instantiate_dict(self, self.arguments, configurations_as_arguments)
-
-        # Send all inputs as extra arguments
-        # Note that they will override existing arguments of the same names
-        inputs_as_arguments = {}
-        for input in self.inputs.itervalues():
-            inputs_as_arguments[input.name] = input.as_argument()
-
-        utils.instantiate_dict(self, self.arguments, inputs_as_arguments)
-
-        # Check for reserved arguments
-        from ..orchestrator.decorators import OPERATION_DECORATOR_RESERVED_ARGUMENTS
-        used_reserved_names = \
-            OPERATION_DECORATOR_RESERVED_ARGUMENTS.intersection(self.arguments.keys())
-        if used_reserved_names:
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('using reserved arguments in node "{0}": {1}'
-                                      .format(
-                                          self.name,
-                                          formatting.string_list_as_string(used_reserved_names)),
-                                      level=validation.Issue.EXTERNAL)
-
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('inputs', formatting.as_raw_dict(self.inputs))))
-
-    def validate(self):
-        # TODO must be associated with either interface or service
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.configurations)
-        utils.validate_dict_values(self.arguments)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.configurations, report_issues)
-        utils.coerce_dict_values(self.arguments, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                console.puts('Implementation: {0}'.format(
-                    context.style.literal(self.implementation)))
-            if self.dependencies:
-                console.puts(
-                    'Dependencies: {0}'.format(
-                        ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
-            utils.dump_dict_values(self.inputs, 'Inputs')
-            if self.executor is not None:
-                console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
-            if self.max_attempts is not None:
-                console.puts('Max attempts: {0}'.format(context.style.literal(self.max_attempts)))
-            if self.retry_interval is not None:
-                console.puts('Retry interval: {0}'.format(
-                    context.style.literal(self.retry_interval)))
-            if self.plugin is not None:
-                console.puts('Plugin: {0}'.format(
-                    context.style.literal(self.plugin.name)))
-            utils.dump_dict_values(self.configurations, 'Configuration')
-            if self.function is not None:
-                console.puts('Function: {0}'.format(context.style.literal(self.function)))
-            utils.dump_dict_values(self.arguments, 'Arguments')
-
-
-class ArtifactBase(InstanceModelMixin):
-    """
-    Typed file, either provided in a CSAR or downloaded from a repository.
-
-    Usually an instance of :class:`ArtifactTemplate`.
-    """
-
-    __tablename__ = 'artifact'
-
-    __private_fields__ = ('type_fk',
-                          'node_fk',
-                          'artifact_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def properties(cls):
-        """
-        Associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node(cls):
-        """
-        Containing node.
-
-        :type: :class:`Node`
-        """
-        return relationship.many_to_one(cls, 'node')
-
-    @declared_attr
-    def artifact_template(cls):
-        """
-        Source artifact template (can be ``None``).
-
-        :type: :class:`ArtifactTemplate`
-        """
-        return relationship.many_to_one(cls, 'artifact_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Artifact type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Artifact many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_fk(cls):
-        """For Node one-to-many to Artifact"""
-        return relationship.foreign_key('node')
-
-    @declared_attr
-    def artifact_template_fk(cls):
-        """For Artifact many-to-one to ArtifactTemplate"""
-        return relationship.foreign_key('artifact_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    source_path = Column(Text, doc="""
-    Source path (in CSAR or repository).
-
-    :type: :obj:`basestring`
-    """)
-
-    target_path = Column(Text, doc="""
-    Path at which to install at destination.
-
-    :type: :obj:`basestring`
-    """)
-
-    repository_url = Column(Text, doc="""
-    Repository URL.
-
-    :type: :obj:`basestring`
-    """)
-
-    repository_credential = Column(modeling_types.StrictDict(basestring, basestring), doc="""
-    Credentials for accessing the repository.
-
-    :type: {:obj:`basestring`, :obj:`basestring`}
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Artifact type: {0}'.format(context.style.type(self.type.name)))
-            console.puts('Source path: {0}'.format(context.style.literal(self.source_path)))
-            if self.target_path is not None:
-                console.puts('Target path: {0}'.format(context.style.literal(self.target_path)))
-            if self.repository_url is not None:
-                console.puts('Repository URL: {0}'.format(
-                    context.style.literal(self.repository_url)))
-            if self.repository_credential:
-                console.puts('Repository credential: {0}'.format(
-                    context.style.literal(self.repository_credential)))
-            utils.dump_dict_values(self.properties, 'Properties')
diff --git a/apache-ariatosca-0.1.1/aria/modeling/service_template.py b/apache-ariatosca-0.1.1/aria/modeling/service_template.py
deleted file mode 100644
index 57fd672..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/service_template.py
+++ /dev/null
@@ -1,2411 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA modeling service template module
-"""
-
-# pylint: disable=too-many-lines, no-self-argument, no-member, abstract-method
-
-from __future__ import absolute_import  # so we can import standard 'types'
-
-from datetime import datetime
-
-from sqlalchemy import (
-    Column,
-    Text,
-    Integer,
-    Boolean,
-    DateTime,
-    PickleType
-)
-from sqlalchemy.ext.declarative import declared_attr
-
-from ..parser import validation
-from ..parser.consumption import ConsumptionContext
-from ..parser.reading import deepcopy_with_locators
-from ..utils import (collections, formatting, console)
-from ..utils.versions import VersionString
-from .mixins import TemplateModelMixin
-from . import (
-    relationship,
-    utils,
-    types as modeling_types
-)
-
-
-class ServiceTemplateBase(TemplateModelMixin):
-    """
-    Template for creating :class:`Service` instances.
-
-    Usually created by various DSL parsers, such as ARIA's TOSCA extension. However, it can also be
-    created programmatically.
-    """
-
-    __tablename__ = 'service_template'
-
-    __private_fields__ = ('substitution_template_fk',
-                          'node_type_fk',
-                          'group_type_fk',
-                          'policy_type_fk',
-                          'relationship_type_fk',
-                          'capability_type_fk',
-                          'interface_type_fk',
-                          'artifact_type_fk')
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    main_file_name = Column(Text, doc="""
-    Filename of CSAR or YAML file from which this service template was parsed.
-    
-    :type: :obj:`basestring`
-    """)
-
-    created_at = Column(DateTime, nullable=False, index=True, doc="""
-    Creation timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    updated_at = Column(DateTime, doc="""
-    Update timestamp.
-
-    :type: :class:`~datetime.datetime`
-    """)
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def substitution_template(cls):
-        """
-        Exposes an entire service as a single node.
-
-        :type: :class:`SubstitutionTemplate`
-        """
-        return relationship.one_to_one(
-            cls, 'substitution_template', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def node_types(cls):
-        """
-        Base for the node type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='node_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def group_types(cls):
-        """
-        Base for the group type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='group_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def policy_types(cls):
-        """
-        Base for the policy type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='policy_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def relationship_types(cls):
-        """
-        Base for the relationship type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='relationship_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def capability_types(cls):
-        """
-        Base for the capability type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='capability_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def interface_types(cls):
-        """
-        Base for the interface type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='interface_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def artifact_types(cls):
-        """
-        Base for the artifact type hierarchy,
-
-        :type: :class:`Type`
-        """
-        return relationship.one_to_one(
-            cls, 'type', fk='artifact_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def services(cls):
-        """
-        Instantiated services.
-
-        :type: [:class:`Service`]
-        """
-        return relationship.one_to_many(cls, 'service', dict_key='name')
-
-    @declared_attr
-    def node_templates(cls):
-        """
-        Templates for creating nodes.
-
-        :type: {:obj:`basestring`, :class:`NodeTemplate`}
-        """
-        return relationship.one_to_many(cls, 'node_template', dict_key='name')
-
-    @declared_attr
-    def group_templates(cls):
-        """
-        Templates for creating groups.
-
-        :type: {:obj:`basestring`, :class:`GroupTemplate`}
-        """
-        return relationship.one_to_many(cls, 'group_template', dict_key='name')
-
-    @declared_attr
-    def policy_templates(cls):
-        """
-        Templates for creating policies.
-
-        :type: {:obj:`basestring`, :class:`PolicyTemplate`}
-        """
-        return relationship.one_to_many(cls, 'policy_template', dict_key='name')
-
-    @declared_attr
-    def workflow_templates(cls):
-        """
-        Templates for creating workflows.
-
-        :type: {:obj:`basestring`, :class:`OperationTemplate`}
-        """
-        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
-
-    @declared_attr
-    def outputs(cls):
-        """
-        Declarations for output parameters are filled in after service installation.
-
-        :type: {:obj:`basestring`: :class:`Output`}
-        """
-        return relationship.one_to_many(cls, 'output', dict_key='name')
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Declarations for externally provided parameters.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def plugin_specifications(cls):
-        """
-        Required plugins for instantiated services.
-
-        :type: {:obj:`basestring`: :class:`PluginSpecification`}
-        """
-        return relationship.one_to_many(cls, 'plugin_specification', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def meta_data(cls):
-        """
-        Associated metadata.
-
-        :type: {:obj:`basestring`: :class:`Metadata`}
-        """
-        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
-        return relationship.many_to_many(cls, 'metadata', dict_key='name')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def substitution_template_fk(cls):
-        """For ServiceTemplate one-to-one to SubstitutionTemplate"""
-        return relationship.foreign_key('substitution_template', nullable=True)
-
-    @declared_attr
-    def node_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def group_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def policy_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def relationship_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def capability_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def interface_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def artifact_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('description', self.description),
-            ('metadata', formatting.as_raw_dict(self.meta_data)),
-            ('node_templates', formatting.as_raw_list(self.node_templates)),
-            ('group_templates', formatting.as_raw_list(self.group_templates)),
-            ('policy_templates', formatting.as_raw_list(self.policy_templates)),
-            ('substitution_template', formatting.as_raw(self.substitution_template)),
-            ('inputs', formatting.as_raw_dict(self.inputs)),
-            ('outputs', formatting.as_raw_dict(self.outputs)),
-            ('workflow_templates', formatting.as_raw_list(self.workflow_templates))))
-
-    @property
-    def types_as_raw(self):
-        return collections.OrderedDict((
-            ('node_types', formatting.as_raw(self.node_types)),
-            ('group_types', formatting.as_raw(self.group_types)),
-            ('policy_types', formatting.as_raw(self.policy_types)),
-            ('relationship_types', formatting.as_raw(self.relationship_types)),
-            ('capability_types', formatting.as_raw(self.capability_types)),
-            ('interface_types', formatting.as_raw(self.interface_types)),
-            ('artifact_types', formatting.as_raw(self.artifact_types))))
-
-    def instantiate(self, container, model_storage, inputs=None):  # pylint: disable=arguments-differ
-        from . import models
-        context = ConsumptionContext.get_thread_local()
-        now = datetime.now()
-        service = models.Service(created_at=now,
-                                 updated_at=now,
-                                 description=deepcopy_with_locators(self.description),
-                                 service_template=self)
-        context.modeling.instance = service
-
-        service.inputs = utils.merge_parameter_values(inputs, self.inputs, model_cls=models.Input)
-        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
-
-        for plugin_specification in self.plugin_specifications.itervalues():
-            if plugin_specification.enabled:
-                if plugin_specification.resolve(model_storage):
-                    plugin = plugin_specification.plugin
-                    service.plugins[plugin.name] = plugin
-                else:
-                    context = ConsumptionContext.get_thread_local()
-                    context.validation.report('specified plugin not found: {0}'.format(
-                        plugin_specification.name), level=validation.Issue.EXTERNAL)
-
-        utils.instantiate_dict(self, service.meta_data, self.meta_data)
-
-        for node_template in self.node_templates.itervalues():
-            for _ in range(node_template.default_instances):
-                node = node_template.instantiate(container)
-                service.nodes[node.name] = node
-
-        utils.instantiate_dict(self, service.groups, self.group_templates)
-        utils.instantiate_dict(self, service.policies, self.policy_templates)
-        utils.instantiate_dict(self, service.workflows, self.workflow_templates)
-
-        if self.substitution_template is not None:
-            service.substitution = self.substitution_template.instantiate(container)
-
-        utils.instantiate_dict(self, service.outputs, self.outputs)
-
-        return service
-
-    def validate(self):
-        utils.validate_dict_values(self.meta_data)
-        utils.validate_dict_values(self.node_templates)
-        utils.validate_dict_values(self.group_templates)
-        utils.validate_dict_values(self.policy_templates)
-        if self.substitution_template is not None:
-            self.substitution_template.validate()
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.outputs)
-        utils.validate_dict_values(self.workflow_templates)
-        if self.node_types is not None:
-            self.node_types.validate()
-        if self.group_types is not None:
-            self.group_types.validate()
-        if self.policy_types is not None:
-            self.policy_types.validate()
-        if self.relationship_types is not None:
-            self.relationship_types.validate()
-        if self.capability_types is not None:
-            self.capability_types.validate()
-        if self.interface_types is not None:
-            self.interface_types.validate()
-        if self.artifact_types is not None:
-            self.artifact_types.validate()
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.meta_data, report_issues)
-        utils.coerce_dict_values(self.node_templates, report_issues)
-        utils.coerce_dict_values(self.group_templates, report_issues)
-        utils.coerce_dict_values(self.policy_templates, report_issues)
-        if self.substitution_template is not None:
-            self.substitution_template.coerce_values(report_issues)
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.outputs, report_issues)
-        utils.coerce_dict_values(self.workflow_templates, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.description is not None:
-            console.puts(context.style.meta(self.description))
-        utils.dump_dict_values(self.meta_data, 'Metadata')
-        for node_template in self.node_templates.itervalues():
-            node_template.dump()
-        for group_template in self.group_templates.itervalues():
-            group_template.dump()
-        for policy_template in self.policy_templates.itervalues():
-            policy_template.dump()
-        if self.substitution_template is not None:
-            self.substitution_template.dump()
-        utils.dump_dict_values(self.inputs, 'Inputs')
-        utils.dump_dict_values(self.outputs, 'Outputs')
-        utils.dump_dict_values(self.workflow_templates, 'Workflow templates')
-
-    def dump_types(self):
-        if self.node_types.children:
-            console.puts('Node types:')
-            self.node_types.dump()
-        if self.group_types.children:
-            console.puts('Group types:')
-            self.group_types.dump()
-        if self.capability_types.children:
-            console.puts('Capability types:')
-            self.capability_types.dump()
-        if self.relationship_types.children:
-            console.puts('Relationship types:')
-            self.relationship_types.dump()
-        if self.policy_types.children:
-            console.puts('Policy types:')
-            self.policy_types.dump()
-        if self.artifact_types.children:
-            console.puts('Artifact types:')
-            self.artifact_types.dump()
-        if self.interface_types.children:
-            console.puts('Interface types:')
-            self.interface_types.dump()
-
-
-class NodeTemplateBase(TemplateModelMixin):
-    """
-    Template for creating zero or more :class:`Node` instances, which are typed vertices in the
-    service topology.
-    """
-
-    __tablename__ = 'node_template'
-
-    __private_fields__ = ('type_fk',
-                          'service_template_fk')
-
-    # region association proxies
-
-    @declared_attr
-    def service_template_name(cls):
-        return relationship.association_proxy('service_template', 'name')
-
-    @declared_attr
-    def type_name(cls):
-        return relationship.association_proxy('type', 'name')
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def nodes(cls):
-        """
-        Instantiated nodes.
-
-        :type: [:class:`Node`]
-        """
-        return relationship.one_to_many(cls, 'node')
-
-    @declared_attr
-    def interface_templates(cls):
-        """
-        Associated interface templates.
-
-        :type: {:obj:`basestring`: :class:`InterfaceTemplate`}
-        """
-        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
-
-    @declared_attr
-    def artifact_templates(cls):
-        """
-        Associated artifacts.
-
-        :type: {:obj:`basestring`: :class:`ArtifactTemplate`}
-        """
-        return relationship.one_to_many(cls, 'artifact_template', dict_key='name')
-
-    @declared_attr
-    def capability_templates(cls):
-        """
-        Associated exposed capability templates.
-
-        :type: {:obj:`basestring`: :class:`CapabilityTemplate`}
-        """
-        return relationship.one_to_many(cls, 'capability_template', dict_key='name')
-
-    @declared_attr
-    def requirement_templates(cls):
-        """
-        Associated potential relationships with other nodes.
-
-        :type: [:class:`RequirementTemplate`]
-        """
-        return relationship.one_to_many(cls, 'requirement_template', other_fk='node_template_fk')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    @declared_attr
-    def attributes(cls):
-        """
-        Declarations for associated mutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Attribute`}
-        """
-        return relationship.one_to_many(cls, 'attribute', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def type(cls):
-        """
-        Node type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template.
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For NodeTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to NodeTemplate"""
-        return relationship.foreign_key('service_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    default_instances = Column(Integer, default=1, doc="""
-    Default number nodes that will appear in the service.
-
-    :type: :obj:`int`
-    """)
-
-    min_instances = Column(Integer, default=0, doc="""
-    Minimum number nodes that will appear in the service.
-
-    :type: :obj:`int`
-    """)
-
-    max_instances = Column(Integer, default=None, doc="""
-    Maximum number nodes that will appear in the service.
-
-    :type: :obj:`int`
-    """)
-
-    target_node_template_constraints = Column(PickleType, doc="""
-    Constraints for filtering relationship targets.
-
-    :type: [:class:`NodeTemplateConstraint`]
-    """)
-
-    def is_target_node_template_valid(self, target_node_template):
-        if self.target_node_template_constraints:
-            for node_template_constraint in self.target_node_template_constraints:
-                if not node_template_constraint.matches(self, target_node_template):
-                    return False
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('default_instances', self.default_instances),
-            ('min_instances', self.min_instances),
-            ('max_instances', self.max_instances),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('attributes', formatting.as_raw_dict(self.properties)),
-            ('interface_templates', formatting.as_raw_list(self.interface_templates)),
-            ('artifact_templates', formatting.as_raw_list(self.artifact_templates)),
-            ('capability_templates', formatting.as_raw_list(self.capability_templates)),
-            ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
-
-    def instantiate(self, container):
-        from . import models
-        if self.nodes:
-            highest_name_suffix = max(int(n.name.rsplit('_', 1)[-1]) for n in self.nodes)
-            suffix = highest_name_suffix + 1
-        else:
-            suffix = 1
-        name = '{name}_{index}'.format(name=self.name, index=suffix)
-        node = models.Node(name=name,
-                           type=self.type,
-                           description=deepcopy_with_locators(self.description),
-                           state=models.Node.INITIAL,
-                           node_template=self)
-        utils.instantiate_dict(node, node.properties, self.properties)
-        utils.instantiate_dict(node, node.attributes, self.attributes)
-        utils.instantiate_dict(node, node.interfaces, self.interface_templates)
-        utils.instantiate_dict(node, node.artifacts, self.artifact_templates)
-        utils.instantiate_dict(node, node.capabilities, self.capability_templates)
-
-        # Default attributes
-        if 'tosca_name' in node.attributes:
-            node.attributes['tosca_name'].value = self.name
-        if 'tosca_id' in node.attributes:
-            node.attributes['tosca_id'].value = name
-
-        return node
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.attributes)
-        utils.validate_dict_values(self.interface_templates)
-        utils.validate_dict_values(self.artifact_templates)
-        utils.validate_dict_values(self.capability_templates)
-        utils.validate_list_values(self.requirement_templates)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.attributes, report_issues)
-        utils.coerce_dict_values(self.interface_templates, report_issues)
-        utils.coerce_dict_values(self.artifact_templates, report_issues)
-        utils.coerce_dict_values(self.capability_templates, report_issues)
-        utils.coerce_list_values(self.requirement_templates, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Node template: {0}'.format(context.style.node(self.name)))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            console.puts('Instances: {0:d} ({1:d}{2})'.format(
-                self.default_instances,
-                self.min_instances,
-                ' to {0:d}'.format(self.max_instances)
-                if self.max_instances is not None
-                else ' or more'))
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_dict_values(self.attributes, 'Attributes')
-            utils.dump_interfaces(self.interface_templates)
-            utils.dump_dict_values(self.artifact_templates, 'Artifact templates')
-            utils.dump_dict_values(self.capability_templates, 'Capability templates')
-            utils.dump_list_values(self.requirement_templates, 'Requirement templates')
-
-
-class GroupTemplateBase(TemplateModelMixin):
-    """
-    Template for creating a :class:`Group` instance, which is a typed logical container for zero or
-    more :class:`Node` instances.
-    """
-
-    __tablename__ = 'group_template'
-
-    __private_fields__ = ('type_fk',
-                          'service_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def groups(cls):
-        """
-        Instantiated groups.
-
-        :type: [:class:`Group`]
-        """
-        return relationship.one_to_many(cls, 'group')
-
-    @declared_attr
-    def interface_templates(cls):
-        """
-        Associated interface templates.
-
-        :type: {:obj:`basestring`: :class:`InterfaceTemplate`}
-        """
-        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template.
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Group type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def node_templates(cls):
-        """
-        Nodes instantiated by these templates will be members of the group.
-
-        :type: [:class:`NodeTemplate`]
-        """
-        return relationship.many_to_many(cls, 'node_template')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For GroupTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to GroupTemplate"""
-        return relationship.foreign_key('service_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interface_templates', formatting.as_raw_list(self.interface_templates))))
-
-    def instantiate(self, container):
-        from . import models
-        group = models.Group(name=self.name,
-                             type=self.type,
-                             description=deepcopy_with_locators(self.description),
-                             group_template=self)
-        utils.instantiate_dict(self, group.properties, self.properties)
-        utils.instantiate_dict(self, group.interfaces, self.interface_templates)
-        if self.node_templates:
-            for node_template in self.node_templates:
-                group.nodes += node_template.nodes
-        return group
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.interface_templates)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.interface_templates, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Group template: {0}'.format(context.style.node(self.name)))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_interfaces(self.interface_templates)
-            if self.node_templates:
-                console.puts('Member node templates: {0}'.format(', '.join(
-                    (str(context.style.node(v.name)) for v in self.node_templates))))
-
-
-class PolicyTemplateBase(TemplateModelMixin):
-    """
-    Template for creating a :class:`Policy` instance, which is a typed set of orchestration hints
-    applied to zero or more :class:`Node` or :class:`Group` instances.
-    """
-
-    __tablename__ = 'policy_template'
-
-    __private_fields__ = ('type_fk',
-                          'service_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def policies(cls):
-        """
-        Instantiated policies.
-
-        :type: [:class:`Policy`]
-        """
-        return relationship.one_to_many(cls, 'policy')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template.
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Policy type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def node_templates(cls):
-        """
-        Policy will be enacted on all nodes instantiated by these templates.
-
-        :type: {:obj:`basestring`: :class:`NodeTemplate`}
-        """
-        return relationship.many_to_many(cls, 'node_template')
-
-    @declared_attr
-    def group_templates(cls):
-        """
-        Policy will be enacted on all nodes in all groups instantiated by these templates.
-
-        :type: {:obj:`basestring`: :class:`GroupTemplate`}
-        """
-        return relationship.many_to_many(cls, 'group_template')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For PolicyTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to PolicyTemplate"""
-        return relationship.foreign_key('service_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def instantiate(self, container):
-        from . import models
-        policy = models.Policy(name=self.name,
-                               type=self.type,
-                               description=deepcopy_with_locators(self.description),
-                               policy_template=self)
-        utils.instantiate_dict(self, policy.properties, self.properties)
-        if self.node_templates:
-            for node_template in self.node_templates:
-                policy.nodes += node_template.nodes
-        if self.group_templates:
-            for group_template in self.group_templates:
-                policy.groups += group_template.groups
-        return policy
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Policy template: {0}'.format(context.style.node(self.name)))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.properties, 'Properties')
-            if self.node_templates:
-                console.puts('Target node templates: {0}'.format(', '.join(
-                    (str(context.style.node(v.name)) for v in self.node_templates))))
-            if self.group_templates:
-                console.puts('Target group templates: {0}'.format(', '.join(
-                    (str(context.style.node(v.name)) for v in self.group_templates))))
-
-
-class SubstitutionTemplateBase(TemplateModelMixin):
-    """
-    Template for creating a :class:`Substitution` instance, which exposes an entire instantiated
-    service as a single node.
-    """
-
-    __tablename__ = 'substitution_template'
-
-    __private_fields__ = ('node_type_fk',)
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def substitutions(cls):
-        """
-        Instantiated substitutions.
-
-        :type: [:class:`Substitution`]
-        """
-        return relationship.one_to_many(cls, 'substitution')
-
-    @declared_attr
-    def mappings(cls):
-        """
-        Map requirement and capabilities to exposed node.
-
-        :type: {:obj:`basestring`: :class:`SubstitutionTemplateMapping`}
-        """
-        return relationship.one_to_many(cls, 'substitution_template_mapping', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_type(cls):
-        """
-        Exposed node type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def node_type_fk(cls):
-        """For SubstitutionTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('node_type_name', self.node_type.name),
-            ('mappings', formatting.as_raw_dict(self.mappings))))
-
-    def instantiate(self, container):
-        from . import models
-        substitution = models.Substitution(node_type=self.node_type,
-                                           substitution_template=self)
-        utils.instantiate_dict(container, substitution.mappings, self.mappings)
-        return substitution
-
-    def validate(self):
-        utils.validate_dict_values(self.mappings)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.mappings, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts('Substitution template:')
-        with context.style.indent:
-            console.puts('Node type: {0}'.format(context.style.type(self.node_type.name)))
-            utils.dump_dict_values(self.mappings, 'Mappings')
-
-
-class SubstitutionTemplateMappingBase(TemplateModelMixin):
-    """
-    Used by :class:`SubstitutionTemplate` to map a capability template or a requirement template to
-    the exposed node.
-
-    The :attr:`name` field should match the capability or requirement name on the exposed node's
-    type.
-
-    Only one of :attr:`capability_template` and :attr:`requirement_template` can be set.
-    """
-
-    __tablename__ = 'substitution_template_mapping'
-
-    __private_fields__ = ('substitution_template_fk',
-                          'capability_template_fk',
-                          'requirement_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def capability_template(cls):
-        """
-        Capability template to expose (can be ``None``).
-
-        :type: :class:`CapabilityTemplate`
-        """
-        return relationship.one_to_one(
-            cls, 'capability_template', back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def requirement_template(cls):
-        """
-        Requirement template to expose (can be ``None``).
-
-        :type: :class:`RequirementTemplate`
-        """
-        return relationship.one_to_one(
-            cls, 'requirement_template', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def substitution_template(cls):
-        """
-        Containing substitution template.
-
-        :type: :class:`SubstitutionTemplate`
-        """
-        return relationship.many_to_one(cls, 'substitution_template', back_populates='mappings')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def substitution_template_fk(cls):
-        """For SubstitutionTemplate one-to-many to SubstitutionTemplateMapping"""
-        return relationship.foreign_key('substitution_template')
-
-    @declared_attr
-    def capability_template_fk(cls):
-        """For SubstitutionTemplate one-to-one to CapabilityTemplate"""
-        return relationship.foreign_key('capability_template', nullable=True)
-
-    @declared_attr
-    def requirement_template_fk(cls):
-        """For SubstitutionTemplate one-to-one to RequirementTemplate"""
-        return relationship.foreign_key('requirement_template', nullable=True)
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),))
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def instantiate(self, container):
-        from . import models
-        context = ConsumptionContext.get_thread_local()
-        if self.capability_template is not None:
-            node_template = self.capability_template.node_template
-        else:
-            node_template = self.requirement_template.node_template
-        nodes = node_template.nodes
-        if len(nodes) == 0:
-            context.validation.report(
-                'mapping "{0}" refers to node template "{1}" but there are no '
-                'node instances'.format(self.mapped_name, self.node_template.name),
-                level=validation.Issue.BETWEEN_INSTANCES)
-            return None
-        # The TOSCA spec does not provide a way to choose the node,
-        # so we will just pick the first one
-        node = nodes[0]
-        capability = None
-        if self.capability_template:
-            for a_capability in node.capabilities.itervalues():
-                if a_capability.capability_template.name == self.capability_template.name:
-                    capability = a_capability
-        return models.SubstitutionMapping(name=self.name,
-                                          capability=capability,
-                                          requirement_template=self.requirement_template,
-                                          node=node)
-
-
-    def validate(self):
-        context = ConsumptionContext.get_thread_local()
-        if (self.capability_template is None) and (self.requirement_template is None):
-            context.validation.report('mapping "{0}" refers to neither capability nor a requirement'
-                                      ' in node template: {1}'.format(
-                                          self.name,
-                                          formatting.safe_repr(self.node_template.name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.capability_template is not None:
-            node_template = self.capability_template.node_template
-        else:
-            node_template = self.requirement_template.node_template
-        console.puts('{0} -> {1}.{2}'.format(
-            context.style.node(self.name),
-            context.style.node(node_template.name),
-            context.style.node(self.capability_template.name
-                               if self.capability_template
-                               else self.requirement_template.name)))
-
-
-class RequirementTemplateBase(TemplateModelMixin):
-    """
-    Template for creating :class:`Relationship` instances, which are optionally-typed edges in the
-    service topology, connecting a :class:`Node` to a :class:`Capability` of another node.
-
-    Note that there is no equivalent "Requirement" instance model. Instead, during instantiation a
-    requirement template is matched with a capability and a :class:`Relationship` is instantiated.
-
-    A requirement template *must* target a :class:`CapabilityType` or a capability name. It can
-    optionally target a specific :class:`NodeType` or :class:`NodeTemplate`.
-
-    Requirement templates may optionally contain a :class:`RelationshipTemplate`. If they do not,
-    a :class:`Relationship` will be instantiated with default values.
-    """
-
-    __tablename__ = 'requirement_template'
-
-    __private_fields__ = ('target_capability_type_fk',
-                          'target_node_template_fk',
-                          'target_node_type_fk',
-                          'relationship_template_fk',
-                          'node_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def target_capability_type(cls):
-        """
-        Target capability type.
-
-        :type: :class:`CapabilityType`
-        """
-        return relationship.one_to_one(cls,
-                                       'type',
-                                       fk='target_capability_type_fk',
-                                       back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def target_node_template(cls):
-        """
-        Target node template (can be ``None``).
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.one_to_one(cls,
-                                       'node_template',
-                                       fk='target_node_template_fk',
-                                       back_populates=relationship.NO_BACK_POP)
-
-    @declared_attr
-    def relationship_template(cls):
-        """
-        Associated relationship template (can be ``None``).
-
-        :type: :class:`RelationshipTemplate`
-        """
-        return relationship.one_to_one(cls, 'relationship_template')
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def relationships(cls):
-        """
-        Instantiated relationships.
-
-        :type: [:class:`Relationship`]
-        """
-        return relationship.one_to_many(cls, 'relationship')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template.
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template', fk='node_template_fk')
-
-    @declared_attr
-    def target_node_type(cls):
-        """
-        Target node type (can be ``None``).
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(
-            cls, 'type', fk='target_node_type_fk', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def target_node_type_fk(cls):
-        """For RequirementTemplate many-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def target_node_template_fk(cls):
-        """For RequirementTemplate one-to-one to NodeTemplate"""
-        return relationship.foreign_key('node_template', nullable=True)
-
-    @declared_attr
-    def target_capability_type_fk(cls):
-        """For RequirementTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For NodeTemplate one-to-many to RequirementTemplate"""
-        return relationship.foreign_key('node_template')
-
-    @declared_attr
-    def relationship_template_fk(cls):
-        """For RequirementTemplate one-to-one to RelationshipTemplate"""
-        return relationship.foreign_key('relationship_template', nullable=True)
-
-    # endregion
-
-    target_capability_name = Column(Text, doc="""
-    Target capability name in node template or node type (can be ``None``).
-
-    :type: :obj:`basestring`
-    """)
-
-    target_node_template_constraints = Column(PickleType, doc="""
-    Constraints for filtering relationship targets.
-
-    :type: [:class:`NodeTemplateConstraint`]
-    """)
-
-    def find_target(self, source_node_template):
-        context = ConsumptionContext.get_thread_local()
-
-        # We might already have a specific node template, so we'll just verify it
-        if self.target_node_template is not None:
-            if not source_node_template.is_target_node_template_valid(self.target_node_template):
-                context.validation.report('requirement "{0}" of node template "{1}" is for node '
-                                          'template "{2}" but it does not match constraints'.format(
-                                              self.name,
-                                              self.target_node_template.name,
-                                              source_node_template.name),
-                                          level=validation.Issue.BETWEEN_TYPES)
-            if (self.target_capability_type is not None) \
-                or (self.target_capability_name is not None):
-                target_node_capability = self.find_target_capability(source_node_template,
-                                                                     self.target_node_template)
-                if target_node_capability is None:
-                    return None, None
-            else:
-                target_node_capability = None
-
-            return self.target_node_template, target_node_capability
-
-        # Find first node that matches the type
-        elif self.target_node_type is not None:
-            for target_node_template in \
-                    self.node_template.service_template.node_templates.values():
-                if self.target_node_type.get_descendant(target_node_template.type.name) is None:
-                    continue
-
-                if not source_node_template.is_target_node_template_valid(target_node_template):
-                    continue
-
-                target_node_capability = self.find_target_capability(source_node_template,
-                                                                     target_node_template)
-                if target_node_capability is None:
-                    continue
-
-                return target_node_template, target_node_capability
-
-        return None, None
-
-    def find_target_capability(self, source_node_template, target_node_template):
-        for capability_template in target_node_template.capability_templates.itervalues():
-            if capability_template.satisfies_requirement(source_node_template,
-                                                         self,
-                                                         target_node_template):
-                return capability_template
-        return None
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('target_node_type_name', self.target_node_type.name
-             if self.target_node_type is not None else None),
-            ('target_node_template_name', self.target_node_template.name
-             if self.target_node_template is not None else None),
-            ('target_capability_type_name', self.target_capability_type.name
-             if self.target_capability_type is not None else None),
-            ('target_capability_name', self.target_capability_name),
-            ('relationship_template', formatting.as_raw(self.relationship_template))))
-
-    def validate(self):
-        if self.relationship_template:
-            self.relationship_template.validate()
-
-    def coerce_values(self, report_issues):
-        if self.relationship_template is not None:
-            self.relationship_template.coerce_values(report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.name:
-            console.puts(context.style.node(self.name))
-        else:
-            console.puts('Requirement:')
-        with context.style.indent:
-            if self.target_node_type is not None:
-                console.puts('Target node type: {0}'.format(
-                    context.style.type(self.target_node_type.name)))
-            elif self.target_node_template is not None:
-                console.puts('Target node template: {0}'.format(
-                    context.style.node(self.target_node_template.name)))
-            if self.target_capability_type is not None:
-                console.puts('Target capability type: {0}'.format(
-                    context.style.type(self.target_capability_type.name)))
-            elif self.target_capability_name is not None:
-                console.puts('Target capability name: {0}'.format(
-                    context.style.node(self.target_capability_name)))
-            if self.target_node_template_constraints:
-                console.puts('Target node template constraints:')
-                with context.style.indent:
-                    for constraint in self.target_node_template_constraints:
-                        console.puts(context.style.literal(constraint))
-            if self.relationship_template:
-                console.puts('Relationship:')
-                with context.style.indent:
-                    self.relationship_template.dump()
-
-
-class RelationshipTemplateBase(TemplateModelMixin):
-    """
-    Optional addition to a :class:`RequirementTemplate`.
-
-    Note that a relationship template here is not exactly equivalent to a relationship template
-    entity in TOSCA. For example, a TOSCA requirement specifying a relationship type rather than a
-    relationship template would still be represented here as a relationship template.
-    """
-
-    __tablename__ = 'relationship_template'
-
-    __private_fields__ = ('type_fk',)
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def relationships(cls):
-        """
-        Instantiated relationships.
-
-        :type: [:class:`Relationship`]
-        """
-        return relationship.one_to_many(cls, 'relationship')
-
-    @declared_attr
-    def interface_templates(cls):
-        """
-        Associated interface templates.
-
-        :type: {:obj:`basestring`: :class:`InterfaceTemplate`}
-        """
-        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def type(cls):
-        """
-        Relationship type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For RelationshipTemplate many-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('type_name', self.type.name if self.type is not None else None),
-            ('name', self.name),
-            ('description', self.description),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interface_templates', formatting.as_raw_list(self.interface_templates))))
-
-    def instantiate(self, container):
-        from . import models
-        relationship_model = models.Relationship(name=self.name,
-                                                 type=self.type,
-                                                 relationship_template=self)
-        utils.instantiate_dict(container, relationship_model.properties, self.properties)
-        utils.instantiate_dict(container, relationship_model.interfaces, self.interface_templates)
-        return relationship_model
-
-    def validate(self):
-        # TODO: either type or name must be set
-        utils.validate_dict_values(self.properties)
-        utils.validate_dict_values(self.interface_templates)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-        utils.coerce_dict_values(self.interface_templates, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        if self.type is not None:
-            console.puts('Relationship type: {0}'.format(context.style.type(self.type.name)))
-        else:
-            console.puts('Relationship template: {0}'.format(
-                context.style.node(self.name)))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            utils.dump_dict_values(self.properties, 'Properties')
-            utils.dump_interfaces(self.interface_templates, 'Interface templates')
-
-
-class CapabilityTemplateBase(TemplateModelMixin):
-    """
-    Template for creating :class:`Capability` instances, typed attachments which serve two purposes:
-    to provide extra properties and attributes to :class:`Node` instances, and to expose targets for
-    :class:`Relationship` instances from other nodes.
-    """
-
-    __tablename__ = 'capability_template'
-
-    __private_fields__ = ('type_fk',
-                          'node_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def capabilities(cls):
-        """
-        Instantiated capabilities.
-
-        :type: [:class:`Capability`]
-        """
-        return relationship.one_to_many(cls, 'capability')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template.
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Capability type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region many_to_many relationships
-
-    @declared_attr
-    def valid_source_node_types(cls):
-        """
-        Reject requirements that are not from these node types.
-
-        :type: [:class:`Type`]
-        """
-        return relationship.many_to_many(cls, 'type', prefix='valid_sources')
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For CapabilityTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For NodeTemplate one-to-many to CapabilityTemplate"""
-        return relationship.foreign_key('node_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    min_occurrences = Column(Integer, default=None, doc="""
-    Minimum number of requirement matches required.
-
-    :type: :obj:`int`
-    """)
-
-    max_occurrences = Column(Integer, default=None, doc="""
-    Maximum number of requirement matches allowed.
-
-    :type: :obj:`int`
-    """)
-
-    def satisfies_requirement(self,
-                              source_node_template,
-                              requirement,
-                              target_node_template):
-        # Do we match the required capability type?
-        if requirement.target_capability_type and \
-            requirement.target_capability_type.get_descendant(self.type.name) is None:
-            return False
-
-        # Are we in valid_source_node_types?
-        if self.valid_source_node_types:
-            for valid_source_node_type in self.valid_source_node_types:
-                if valid_source_node_type.get_descendant(source_node_template.type.name) is None:
-                    return False
-
-        # Apply requirement constraints
-        if requirement.target_node_template_constraints:
-            for node_template_constraint in requirement.target_node_template_constraints:
-                if not node_template_constraint.matches(source_node_template, target_node_template):
-                    return False
-
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('min_occurrences', self.min_occurrences),
-            ('max_occurrences', self.max_occurrences),
-            ('valid_source_node_types', [v.name for v in self.valid_source_node_types]),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def instantiate(self, container):
-        from . import models
-        capability = models.Capability(name=self.name,
-                                       type=self.type,
-                                       min_occurrences=self.min_occurrences,
-                                       max_occurrences=self.max_occurrences,
-                                       occurrences=0,
-                                       capability_template=self)
-        utils.instantiate_dict(container, capability.properties, self.properties)
-        return capability
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
-            console.puts(
-                'Occurrences: {0:d}{1}'.format(
-                    self.min_occurrences or 0,
-                    ' to {0:d}'.format(self.max_occurrences)
-                    if self.max_occurrences is not None
-                    else ' or more'))
-            if self.valid_source_node_types:
-                console.puts('Valid source node types: {0}'.format(
-                    ', '.join((str(context.style.type(v.name))
-                               for v in self.valid_source_node_types))))
-            utils.dump_dict_values(self.properties, 'Properties')
-
-
-class InterfaceTemplateBase(TemplateModelMixin):
-    """
-    Template for creating :class:`Interface` instances, which are typed bundles of
-    :class:`Operation` instances.
-
-    Can be associated with a :class:`NodeTemplate`, a :class:`GroupTemplate`, or a
-    :class:`RelationshipTemplate`.
-    """
-
-    __tablename__ = 'interface_template'
-
-    __private_fields__ = ('type_fk',
-                          'node_template_fk',
-                          'group_template_fk',
-                          'relationship_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Declarations for externally provided parameters that can be used by all operations of the
-        interface.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def interfaces(cls):
-        """
-        Instantiated interfaces.
-
-        :type: [:class:`Interface`]
-        """
-        return relationship.one_to_many(cls, 'interface')
-
-    @declared_attr
-    def operation_templates(cls):
-        """
-        Associated operation templates.
-
-        :type: {:obj:`basestring`: :class:`OperationTemplate`}
-        """
-        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template (can be ``None``).
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def group_template(cls):
-        """
-        Containing group template (can be ``None``).
-
-        :type: :class:`GroupTemplate`
-        """
-        return relationship.many_to_one(cls, 'group_template')
-
-    @declared_attr
-    def relationship_template(cls):
-        """
-        Containing relationship template (can be ``None``).
-
-        :type: :class:`RelationshipTemplate`
-        """
-        return relationship.many_to_one(cls, 'relationship_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Interface type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For InterfaceTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For NodeTemplate one-to-many to InterfaceTemplate"""
-        return relationship.foreign_key('node_template', nullable=True)
-
-    @declared_attr
-    def group_template_fk(cls):
-        """For GroupTemplate one-to-many to InterfaceTemplate"""
-        return relationship.foreign_key('group_template', nullable=True)
-
-    @declared_attr
-    def relationship_template_fk(cls):
-        """For RelationshipTemplate one-to-many to InterfaceTemplate"""
-        return relationship.foreign_key('relationship_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('inputs', formatting.as_raw_dict(self.inputs)),  # pylint: disable=no-member
-            # TODO fix self.properties reference
-            ('operation_templates', formatting.as_raw_list(self.operation_templates))))
-
-    def instantiate(self, container):
-        from . import models
-        interface = models.Interface(name=self.name,
-                                     type=self.type,
-                                     description=deepcopy_with_locators(self.description),
-                                     interface_template=self)
-        utils.instantiate_dict(container, interface.inputs, self.inputs)
-        utils.instantiate_dict(container, interface.operations, self.operation_templates)
-        return interface
-
-    def validate(self):
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.operation_templates)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.operation_templates, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Interface type: {0}'.format(context.style.type(self.type.name)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
-            utils.dump_dict_values(self.operation_templates, 'Operation templates')
-
-
-class OperationTemplateBase(TemplateModelMixin):
-    """
-    Template for creating :class:`Operation` instances, which are entry points to Python functions
-    called as part of a workflow execution.
-    """
-
-    __tablename__ = 'operation_template'
-
-    __private_fields__ = ('service_template_fk',
-                          'interface_template_fk',
-                          'plugin_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    @declared_attr
-    def plugin_specification(cls):
-        """
-        Associated plugin specification.
-
-        :type: :class:`PluginSpecification`
-        """
-        return relationship.one_to_one(
-            cls, 'plugin_specification', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def operations(cls):
-        """
-        Instantiated operations.
-
-        :type: [:class:`Operation`]
-        """
-        return relationship.one_to_many(cls, 'operation')
-
-    @declared_attr
-    def inputs(cls):
-        """
-        Declarations for parameters provided to the :attr:`implementation`.
-
-        :type: {:obj:`basestring`: :class:`Input`}
-        """
-        return relationship.one_to_many(cls, 'input', dict_key='name')
-
-    @declared_attr
-    def configurations(cls):
-        """
-        Configuration parameters for the operation instance Python :attr:`function`.
-
-        :type: {:obj:`basestring`: :class:`Configuration`}
-        """
-        return relationship.one_to_many(cls, 'configuration', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template (can be ``None``). For workflow operation templates.
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template',
-                                        back_populates='workflow_templates')
-
-    @declared_attr
-    def interface_template(cls):
-        """
-        Containing interface template (can be ``None``).
-
-        :type: :class:`InterfaceTemplate`
-        """
-        return relationship.many_to_one(cls, 'interface_template')
-
-    # endregion
-
-    # region many_to_many relationships
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to OperationTemplate"""
-        return relationship.foreign_key('service_template', nullable=True)
-
-    @declared_attr
-    def interface_template_fk(cls):
-        """For InterfaceTemplate one-to-many to OperationTemplate"""
-        return relationship.foreign_key('interface_template', nullable=True)
-
-    @declared_attr
-    def plugin_specification_fk(cls):
-        """For OperationTemplate one-to-one to PluginSpecification"""
-        return relationship.foreign_key('plugin_specification', nullable=True)
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    relationship_edge = Column(Boolean, doc="""
-    When ``True`` specifies that the operation is on the relationship's target edge; ``False`` is
-    the source edge (only used by operations on relationships)
-
-    :type: :obj:`bool`
-    """)
-
-    implementation = Column(Text, doc="""
-    Implementation (usually the name of an artifact).
-
-    :type: :obj:`basestring`
-    """)
-
-    dependencies = Column(modeling_types.StrictList(item_cls=basestring), doc="""
-    Dependencies (usually names of artifacts).
-
-    :type: [:obj:`basestring`]
-    """)
-
-    function = Column(Text, doc="""
-    Full path to Python function.
-
-    :type: :obj:`basestring`
-    """)
-
-    executor = Column(Text, doc="""
-    Name of executor.
-
-    :type: :obj:`basestring`
-    """)
-
-    max_attempts = Column(Integer, doc="""
-    Maximum number of attempts allowed in case of task failure.
-
-    :type: :obj:`int`
-    """)
-
-    retry_interval = Column(Integer, doc="""
-    Interval between task retry attemps (in seconds).
-
-    :type: :obj:`float`
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('inputs', formatting.as_raw_dict(self.inputs))))
-
-    def instantiate(self, container):
-        from . import models
-
-        plugin = self.plugin_specification.plugin \
-            if (self.plugin_specification is not None) and self.plugin_specification.enabled \
-            else None
-
-        operation = models.Operation(name=self.name,
-                                     description=deepcopy_with_locators(self.description),
-                                     relationship_edge=self.relationship_edge,
-                                     implementation=self.implementation,
-                                     dependencies=self.dependencies,
-                                     executor=self.executor,
-                                     plugin=plugin,
-                                     function=self.function,
-                                     max_attempts=self.max_attempts,
-                                     retry_interval=self.retry_interval,
-                                     operation_template=self)
-
-        utils.instantiate_dict(container, operation.inputs, self.inputs)
-        utils.instantiate_dict(container, operation.configurations, self.configurations)
-
-        return operation
-
-    def validate(self):
-        utils.validate_dict_values(self.inputs)
-        utils.validate_dict_values(self.configurations)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.inputs, report_issues)
-        utils.coerce_dict_values(self.configurations, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                console.puts('Implementation: {0}'.format(
-                    context.style.literal(self.implementation)))
-            if self.dependencies:
-                console.puts('Dependencies: {0}'.format(
-                    ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
-            utils.dump_dict_values(self.inputs, 'Inputs')
-            if self.executor is not None:
-                console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
-            if self.max_attempts is not None:
-                console.puts('Max attempts: {0}'.format(context.style.literal(self.max_attempts)))
-            if self.retry_interval is not None:
-                console.puts('Retry interval: {0}'.format(
-                    context.style.literal(self.retry_interval)))
-            if self.plugin_specification is not None:
-                console.puts('Plugin specification: {0}'.format(
-                    context.style.literal(self.plugin_specification.name)))
-            utils.dump_dict_values(self.configurations, 'Configuration')
-            if self.function is not None:
-                console.puts('Function: {0}'.format(context.style.literal(self.function)))
-
-
-class ArtifactTemplateBase(TemplateModelMixin):
-    """
-    Template for creating an :class:`Artifact` instance, which is a typed file, either provided in a
-    CSAR or downloaded from a repository.
-    """
-
-    __tablename__ = 'artifact_template'
-
-    __private_fields__ = ('type_fk',
-                          'node_template_fk')
-
-    # region association proxies
-
-    # endregion
-
-    # region one_to_one relationships
-
-    # endregion
-
-    # region one_to_many relationships
-
-    @declared_attr
-    def artifacts(cls):
-        """
-        Instantiated artifacts.
-
-        :type: [:class:`Artifact`]
-        """
-        return relationship.one_to_many(cls, 'artifact')
-
-    @declared_attr
-    def properties(cls):
-        """
-        Declarations for associated immutable parameters.
-
-        :type: {:obj:`basestring`: :class:`Property`}
-        """
-        return relationship.one_to_many(cls, 'property', dict_key='name')
-
-    # endregion
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def node_template(cls):
-        """
-        Containing node template.
-
-        :type: :class:`NodeTemplate`
-        """
-        return relationship.many_to_one(cls, 'node_template')
-
-    @declared_attr
-    def type(cls):
-        """
-        Artifact type.
-
-        :type: :class:`Type`
-        """
-        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For ArtifactTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For NodeTemplate one-to-many to ArtifactTemplate"""
-        return relationship.foreign_key('node_template')
-
-    # endregion
-
-    description = Column(Text, doc="""
-    Human-readable description.
-
-    :type: :obj:`basestring`
-    """)
-
-    source_path = Column(Text, doc="""
-    Source path (in CSAR or repository).
-
-    :type: :obj:`basestring`
-    """)
-
-    target_path = Column(Text, doc="""
-    Path at which to install at destination.
-
-    :type: :obj:`basestring`
-    """)
-
-    repository_url = Column(Text, doc="""
-    Repository URL.
-
-    :type: :obj:`basestring`
-    """)
-
-    repository_credential = Column(modeling_types.StrictDict(basestring, basestring), doc="""
-    Credentials for accessing the repository.
-
-    :type: {:obj:`basestring`, :obj:`basestring`}
-    """)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type.name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def instantiate(self, container):
-        from . import models
-        artifact = models.Artifact(name=self.name,
-                                   type=self.type,
-                                   description=deepcopy_with_locators(self.description),
-                                   source_path=self.source_path,
-                                   target_path=self.target_path,
-                                   repository_url=self.repository_url,
-                                   repository_credential=self.repository_credential,
-                                   artifact_template=self)
-        utils.instantiate_dict(container, artifact.properties, self.properties)
-        return artifact
-
-    def validate(self):
-        utils.validate_dict_values(self.properties)
-
-    def coerce_values(self, report_issues):
-        utils.coerce_dict_values(self.properties, report_issues)
-
-    def dump(self):
-        context = ConsumptionContext.get_thread_local()
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Artifact type: {0}'.format(context.style.type(self.type.name)))
-            console.puts('Source path: {0}'.format(context.style.literal(self.source_path)))
-            if self.target_path is not None:
-                console.puts('Target path: {0}'.format(context.style.literal(self.target_path)))
-            if self.repository_url is not None:
-                console.puts('Repository URL: {0}'.format(
-                    context.style.literal(self.repository_url)))
-            if self.repository_credential:
-                console.puts('Repository credential: {0}'.format(
-                    context.style.literal(self.repository_credential)))
-            utils.dump_dict_values(self.properties, 'Properties')
-
-
-class PluginSpecificationBase(TemplateModelMixin):
-    """
-    Requirement for a :class:`Plugin`.
-
-    The actual plugin to be selected depends on those currently installed in ARIA.
-    """
-
-    __tablename__ = 'plugin_specification'
-
-    __private_fields__ = ('service_template_fk',
-                          'plugin_fk')
-
-    version = Column(Text, doc="""
-    Minimum plugin version.
-
-    :type: :obj:`basestring`
-    """)
-
-    enabled = Column(Boolean, nullable=False, default=True, doc="""
-    Whether the plugin is enabled.
-
-    :type: :obj:`bool`
-    """)
-
-    # region many_to_one relationships
-
-    @declared_attr
-    def service_template(cls):
-        """
-        Containing service template.
-
-        :type: :class:`ServiceTemplate`
-        """
-        return relationship.many_to_one(cls, 'service_template')
-
-    @declared_attr
-    def plugin(cls): # pylint: disable=method-hidden
-        """
-        Matched plugin.
-
-        :type: :class:`Plugin`
-        """
-        return relationship.many_to_one(cls, 'plugin', back_populates=relationship.NO_BACK_POP)
-
-    # endregion
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to PluginSpecification"""
-        return relationship.foreign_key('service_template', nullable=True)
-
-    @declared_attr
-    def plugin_fk(cls):
-        """For PluginSpecification many-to-one to Plugin"""
-        return relationship.foreign_key('plugin', nullable=True)
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('version', self.version),
-            ('enabled', self.enabled)))
-
-    def coerce_values(self, report_issues):
-        pass
-
-    def resolve(self, model_storage):
-        # TODO: we are planning a separate "instantiation" module where this will be called or
-        # moved to.
-        plugins = model_storage.plugin.list()
-        matching_plugins = []
-        if plugins:
-            for plugin in plugins:
-                if (plugin.name == self.name) and \
-                    ((self.version is None) or \
-                     (VersionString(plugin.package_version) >= self.version)):
-                    matching_plugins.append(plugin)
-        self.plugin = None
-        if matching_plugins:
-            # Return highest version of plugin
-            key = lambda plugin: VersionString(plugin.package_version).key
-            self.plugin = sorted(matching_plugins, key=key)[-1]
-        return self.plugin is not None
diff --git a/apache-ariatosca-0.1.1/aria/modeling/types.py b/apache-ariatosca-0.1.1/aria/modeling/types.py
deleted file mode 100644
index c34326e..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/types.py
+++ /dev/null
@@ -1,318 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Allows JSON-serializable collections to be used as SQLAlchemy column types.
-"""
-
-import json
-from collections import namedtuple
-
-from sqlalchemy import (
-    TypeDecorator,
-    VARCHAR,
-    event
-)
-from sqlalchemy.ext import mutable
-from ruamel import yaml
-
-from . import exceptions
-
-
-class _MutableType(TypeDecorator):
-    """
-    Dict representation of type.
-    """
-    @property
-    def python_type(self):
-        raise NotImplementedError
-
-    def process_literal_param(self, value, dialect):
-        pass
-
-    impl = VARCHAR
-
-    def process_bind_param(self, value, dialect):
-        if value is not None:
-            value = json.dumps(value)
-        return value
-
-    def process_result_value(self, value, dialect):
-        if value is not None:
-            value = json.loads(value)
-        return value
-
-
-class Dict(_MutableType):
-    """
-    JSON-serializable dict type for SQLAlchemy columns.
-    """
-    @property
-    def python_type(self):
-        return dict
-
-
-class List(_MutableType):
-    """
-    JSON-serializable list type for SQLAlchemy columns.
-    """
-    @property
-    def python_type(self):
-        return list
-
-
-class _StrictDictMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, dict):
-                    for k, v in value.items():
-                        cls._assert_strict_key(k)
-                        cls._assert_strict_value(v)
-                    return cls(value)
-                return mutable.MutableDict.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-    def __setitem__(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).__setitem__(key, value)
-
-    def setdefault(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).setdefault(key, value)
-
-    def update(self, *args, **kwargs):
-        for k, v in kwargs.items():
-            self._assert_strict_key(k)
-            self._assert_strict_value(v)
-        super(_StrictDictMixin, self).update(*args, **kwargs)
-
-    @classmethod
-    def _assert_strict_key(cls, key):
-        if cls._key_cls is not None and not isinstance(key, cls._key_cls):
-            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
-                                                  .format(cls._key_cls, type(key)))
-
-    @classmethod
-    def _assert_strict_value(cls, value):
-        if cls._value_cls is not None and not isinstance(value, cls._value_cls):
-            raise exceptions.ValueFormatException('value type was set strictly to {0}, but was {1}'
-                                                  .format(cls._value_cls, type(value)))
-
-
-class _MutableDict(mutable.MutableDict):
-    """
-    Enables tracking for dict values.
-    """
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            return mutable.MutableDict.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce value', cause=e)
-
-
-class _StrictListMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, list):
-                    for item in value:
-                        cls._assert_item(item)
-                    return cls(value)
-                return mutable.MutableList.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-    def __setitem__(self, index, value):
-        """
-        Detect list set events and emit change events.
-        """
-        self._assert_item(value)
-        super(_StrictListMixin, self).__setitem__(index, value)
-
-    def append(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).append(item)
-
-    def extend(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).extend(item)
-
-    def insert(self, index, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).insert(index, item)
-
-    @classmethod
-    def _assert_item(cls, item):
-        if cls._item_cls is not None and not isinstance(item, cls._item_cls):
-            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
-                                                  .format(cls._item_cls, type(item)))
-
-
-class _MutableList(mutable.MutableList):
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            return mutable.MutableList.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-
-_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls')
-_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls')
-
-class _StrictDict(object):
-    """
-    This entire class functions as a factory for strict dicts and their listeners. No type class,
-    and no listener type class is created more than once. If a relevant type class exists it is
-    returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, key_cls=None, value_cls=None):
-        strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls)
-        if strict_dict_map_key not in self._strict_map:
-            key_cls_name = getattr(key_cls, '__name__', str(key_cls))
-            value_cls_name = getattr(value_cls, '__name__', str(value_cls))
-            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
-            # Column).
-            strict_dict_cls = type(
-                'StrictDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (Dict, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableDict class and the _StrictDictMixin,
-            # while setting the necessary _key_cls and _value_cls as class attributes.
-            listener_cls = type(
-                'StrictMutableDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (_StrictDictMixin, _MutableDict),
-                {'_key_cls': key_cls, '_value_cls': value_cls}
-            )
-            yaml.representer.RoundTripRepresenter.add_representer(
-                listener_cls, yaml.representer.RoundTripRepresenter.represent_list)
-            self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls,
-                                                                 listener_cls=listener_cls)
-
-        return self._strict_map[strict_dict_map_key].type_cls
-
-
-StrictDict = _StrictDict()
-"""
-JSON-serializable strict dict type for SQLAlchemy columns.
-
-:param key_cls:
-:param value_cls:
-"""
-
-
-class _StrictList(object):
-    """
-    This entire class functions as a factory for strict lists and their listeners. No type class,
-    and no listener type class is created more than once. If a relevant type class exists it is
-    returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, item_cls=None):
-
-        if item_cls not in self._strict_map:
-            item_cls_name = getattr(item_cls, '__name__', str(item_cls))
-            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
-            # Column).
-            strict_list_cls = type(
-                'StrictList_{0}'.format(item_cls_name),
-                (List, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableList class and the _StrictListMixin,
-            # while setting the necessary _item_cls as class attribute.
-            listener_cls = type(
-                'StrictMutableList_{0}'.format(item_cls_name),
-                (_StrictListMixin, _MutableList),
-                {'_item_cls': item_cls}
-            )
-            yaml.representer.RoundTripRepresenter.add_representer(
-                listener_cls, yaml.representer.RoundTripRepresenter.represent_list)
-            self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls,
-                                                      listener_cls=listener_cls)
-
-        return self._strict_map[item_cls].type_cls
-
-
-StrictList = _StrictList()
-"""
-JSON-serializable strict list type for SQLAlchemy columns.
-
-:param item_cls:
-"""
-
-
-def _mutable_association_listener(mapper, cls):
-    strict_dict_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values())
-
-    strict_list_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values())
-
-    for prop in mapper.column_attrs:
-        column_type = prop.columns[0].type
-        # Dict Listeners
-        if type(column_type) in strict_dict_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_dict_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, Dict):
-            _MutableDict.associate_with_attribute(getattr(cls, prop.key))
-
-        # List Listeners
-        if type(column_type) in strict_list_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_list_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, List):
-            _MutableList.associate_with_attribute(getattr(cls, prop.key))
-
-
-_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener)
-
-
-def _register_mutable_association_listener():
-    event.listen(*_LISTENER_ARGS)
-
-_register_mutable_association_listener()
diff --git a/apache-ariatosca-0.1.1/aria/modeling/utils.py b/apache-ariatosca-0.1.1/aria/modeling/utils.py
deleted file mode 100644
index 5193cd9..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/utils.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Miscellaneous modeling utilities.
-"""
-
-import os
-from json import JSONEncoder
-from StringIO import StringIO
-
-from . import exceptions
-from ..parser.consumption import ConsumptionContext
-from ..utils.console import puts
-from ..utils.type import validate_value_type
-from ..utils.collections import OrderedDict
-from ..utils.formatting import string_list_as_string
-
-
-class ModelJSONEncoder(JSONEncoder):
-    """
-    JSON encoder that automatically unwraps ``value`` attributes.
-    """
-    def __init__(self, *args, **kwargs):
-        # Just here to make sure Sphinx doesn't grab the base constructor's docstring
-        super(ModelJSONEncoder, self).__init__(*args, **kwargs)
-
-    def default(self, o):  # pylint: disable=method-hidden
-        from .mixins import ModelMixin
-        if isinstance(o, ModelMixin):
-            if hasattr(o, 'value'):
-                dict_to_return = o.to_dict(fields=('value',))
-                return dict_to_return['value']
-            else:
-                return o.to_dict()
-        else:
-            return JSONEncoder.default(self, o)
-
-
-class NodeTemplateContainerHolder(object):
-    """
-    Wrapper that allows using a :class:`~aria.modeling.models.NodeTemplate` model directly as the
-    ``container_holder`` input for :func:`~aria.modeling.functions.evaluate`.
-    """
-
-    def __init__(self, node_template):
-        self.container = node_template
-        self.service = None
-
-    @property
-    def service_template(self):
-        return self.container.service_template
-
-
-def merge_parameter_values(parameter_values, declared_parameters, model_cls):
-    """
-    Merges parameter values according to those declared by a type.
-
-    Exceptions will be raised for validation errors.
-
-    :param parameter_values: provided parameter values or None
-    :type parameter_values: {:obj:`basestring`: object}
-    :param declared_parameters: declared parameters
-    :type declared_parameters: {:obj:`basestring`: :class:`~aria.modeling.models.Parameter`}
-    :return: the merged parameters
-    :rtype: {:obj:`basestring`: :class:`~aria.modeling.models.Parameter`}
-    :raises ~aria.modeling.exceptions.UndeclaredParametersException: if a key in
-     ``parameter_values`` does not exist in ``declared_parameters``
-    :raises ~aria.modeling.exceptions.MissingRequiredParametersException: if a key in
-     ``declared_parameters`` does not exist in ``parameter_values`` and also has no default value
-    :raises ~aria.modeling.exceptions.ParametersOfWrongTypeException: if a value in
-      ``parameter_values`` does not match its type in ``declared_parameters``
-    """
-
-    parameter_values = parameter_values or {}
-
-    undeclared_names = list(set(parameter_values.keys()).difference(declared_parameters.keys()))
-    if undeclared_names:
-        raise exceptions.UndeclaredParametersException(
-            'Undeclared parameters have been provided: {0}; Declared: {1}'
-            .format(string_list_as_string(undeclared_names),
-                    string_list_as_string(declared_parameters.keys())))
-
-    parameters = OrderedDict()
-
-    missing_names = []
-    wrong_type_values = OrderedDict()
-    for declared_parameter_name, declared_parameter in declared_parameters.iteritems():
-        if declared_parameter_name in parameter_values:
-            # Value has been provided
-            value = parameter_values[declared_parameter_name]
-
-            # Validate type
-            type_name = declared_parameter.type_name
-            try:
-                validate_value_type(value, type_name)
-            except ValueError:
-                wrong_type_values[declared_parameter_name] = type_name
-            except RuntimeError:
-                # TODO: This error shouldn't be raised (or caught), but right now we lack support
-                # for custom data_types, which will raise this error. Skipping their validation.
-                pass
-
-            # Wrap in Parameter model
-            parameters[declared_parameter_name] = model_cls( # pylint: disable=unexpected-keyword-arg
-                name=declared_parameter_name,
-                type_name=type_name,
-                description=declared_parameter.description,
-                value=value)
-        elif declared_parameter.value is not None:
-            # Copy default value from declaration
-            parameters[declared_parameter_name] = declared_parameter.instantiate(None)
-        else:
-            # Required value has not been provided
-            missing_names.append(declared_parameter_name)
-
-    if missing_names:
-        raise exceptions.MissingRequiredParametersException(
-            'Declared parameters {0} have not been provided values'
-            .format(string_list_as_string(missing_names)))
-
-    if wrong_type_values:
-        error_message = StringIO()
-        for param_name, param_type in wrong_type_values.iteritems():
-            error_message.write('Parameter "{0}" is not of declared type "{1}"{2}'
-                                .format(param_name, param_type, os.linesep))
-        raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
-
-    return parameters
-
-
-def coerce_dict_values(the_dict, report_issues=False):
-    if not the_dict:
-        return
-    coerce_list_values(the_dict.itervalues(), report_issues)
-
-
-def coerce_list_values(the_list, report_issues=False):
-    if not the_list:
-        return
-    for value in the_list:
-        value.coerce_values(report_issues)
-
-
-def validate_dict_values(the_dict):
-    if not the_dict:
-        return
-    validate_list_values(the_dict.itervalues())
-
-
-def validate_list_values(the_list):
-    if not the_list:
-        return
-    for value in the_list:
-        value.validate()
-
-
-def instantiate_dict(container, the_dict, from_dict):
-    if not from_dict:
-        return
-    for name, value in from_dict.iteritems():
-        value = value.instantiate(container)
-        if value is not None:
-            the_dict[name] = value
-
-
-def instantiate_list(container, the_list, from_list):
-    if not from_list:
-        return
-    for value in from_list:
-        value = value.instantiate(container)
-        if value is not None:
-            the_list.append(value)
-
-
-def dump_list_values(the_list, name):
-    if not the_list:
-        return
-    puts('%s:' % name)
-    context = ConsumptionContext.get_thread_local()
-    with context.style.indent:
-        for value in the_list:
-            value.dump()
-
-
-def dump_dict_values(the_dict, name):
-    if not the_dict:
-        return
-    dump_list_values(the_dict.itervalues(), name)
-
-
-def dump_interfaces(interfaces, name='Interfaces'):
-    if not interfaces:
-        return
-    puts('%s:' % name)
-    context = ConsumptionContext.get_thread_local()
-    with context.style.indent:
-        for interface in interfaces.itervalues():
-            interface.dump()
-
-
-class classproperty(object):                                                                        # pylint: disable=invalid-name
-    def __init__(self, f):
-        self._func = f
-        self.__doct__ = f.__doc__
-
-    def __get__(self, instance, owner):
-        return self._func(owner)
-
-
-def fix_doc(cls):
-    """
-    Class decorator to use the last base class's docstring and make sure Sphinx doesn't grab the
-    base constructor's docstring.
-    """
-    original_init = cls.__init__
-    def init(*args, **kwargs):
-        original_init(*args, **kwargs)
-
-    cls.__init__ = init
-    cls.__doc__ = cls.__bases__[-1].__doc__
-
-    return cls
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py
deleted file mode 100644
index f400142..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Common code for contexts.
-"""
-
-import logging
-from contextlib import contextmanager
-from functools import partial
-
-import jinja2
-
-from aria import (
-    logger as aria_logger,
-    modeling
-)
-from aria.storage import exceptions
-
-from ...utils.uuid import generate_uuid
-
-
-class BaseContext(object):
-    """
-    Base class for contexts.
-    """
-
-    INSTRUMENTATION_FIELDS = (
-        modeling.models.Node.attributes,
-        modeling.models.Node.properties,
-        modeling.models.NodeTemplate.attributes,
-        modeling.models.NodeTemplate.properties
-    )
-
-    class PrefixedLogger(object):
-        def __init__(self, base_logger, task_id=None):
-            self._logger = base_logger
-            self._task_id = task_id
-
-        def __getattr__(self, attribute):
-            if attribute.upper() in logging._levelNames:
-                return partial(self._logger_with_task_id, _level=attribute)
-            else:
-                return getattr(self._logger, attribute)
-
-        def _logger_with_task_id(self, *args, **kwargs):
-            level = kwargs.pop('_level')
-            kwargs.setdefault('extra', {})['task_id'] = self._task_id
-            return getattr(self._logger, level)(*args, **kwargs)
-
-    def __init__(self,
-                 name,
-                 service_id,
-                 model_storage,
-                 resource_storage,
-                 execution_id,
-                 workdir=None,
-                 **kwargs):
-        super(BaseContext, self).__init__(**kwargs)
-        self._name = name
-        self._id = generate_uuid(variant='uuid')
-        self._model = model_storage
-        self._resource = resource_storage
-        self._service_id = service_id
-        self._workdir = workdir
-        self._execution_id = execution_id
-        self.logger = None
-
-    def _register_logger(self, level=None, task_id=None):
-        self.logger = self.PrefixedLogger(
-            logging.getLogger(aria_logger.TASK_LOGGER_NAME), task_id=task_id)
-        self.logger.setLevel(level or logging.DEBUG)
-        if not self.logger.handlers:
-            self.logger.addHandler(self._get_sqla_handler())
-
-    def _get_sqla_handler(self):
-        return aria_logger.create_sqla_log_handler(model=self._model,
-                                                   log_cls=modeling.models.Log,
-                                                   execution_id=self._execution_id)
-
-    def __repr__(self):
-        return (
-            '{name}(name={self.name}, '
-            'deployment_id={self._service_id}, '
-            .format(name=self.__class__.__name__, self=self))
-
-    @contextmanager
-    def logging_handlers(self, handlers=None):
-        handlers = handlers or []
-        try:
-            for handler in handlers:
-                self.logger.addHandler(handler)
-            yield self.logger
-        finally:
-            for handler in handlers:
-                self.logger.removeHandler(handler)
-
-    @property
-    def model(self):
-        """
-        Storage model API ("MAPI").
-        """
-        return self._model
-
-    @property
-    def resource(self):
-        """
-        Storage resource API ("RAPI").
-        """
-        return self._resource
-
-    @property
-    def service_template(self):
-        """
-        Service template model.
-        """
-        return self.service.service_template
-
-    @property
-    def service(self):
-        """
-        Service instance model.
-        """
-        return self.model.service.get(self._service_id)
-
-    @property
-    def name(self):
-        """
-        Operation name.
-        """
-        return self._name
-
-    @property
-    def id(self):
-        """
-        Operation ID.
-        """
-        return self._id
-
-    def download_resource(self, destination, path=None):
-        """
-        Download a service template resource from the storage resource API ("RAPI").
-        """
-        try:
-            self.resource.service.download(entry_id=str(self.service.id),
-                                           destination=destination,
-                                           path=path)
-        except exceptions.StorageError:
-            self.resource.service_template.download(entry_id=str(self.service_template.id),
-                                                    destination=destination,
-                                                    path=path)
-
-    def download_resource_and_render(self, destination, path=None, variables=None):
-        """
-        Downloads a service template resource from the resource storage and renders its content as a
-        Jinja template using the provided variables. ``ctx`` is available to the template without
-        providing it explicitly.
-        """
-        resource_content = self.get_resource(path=path)
-        resource_content = self._render_resource(resource_content=resource_content,
-                                                 variables=variables)
-        with open(destination, 'wb') as f:
-            f.write(resource_content)
-
-    def get_resource(self, path=None):
-        """
-        Reads a service instance resource as string from the resource storage.
-        """
-        try:
-            return self.resource.service.read(entry_id=str(self.service.id), path=path)
-        except exceptions.StorageError:
-            return self.resource.service_template.read(entry_id=str(self.service_template.id),
-                                                       path=path)
-
-    def get_resource_and_render(self, path=None, variables=None):
-        """
-        Reads a service instance resource as string from the resource storage and renders it as a
-        Jinja template using the provided variables. ``ctx`` is available to the template without
-        providing it explicitly.
-        """
-        resource_content = self.get_resource(path=path)
-        return self._render_resource(resource_content=resource_content, variables=variables)
-
-    def _render_resource(self, resource_content, variables):
-        variables = variables or {}
-        variables.setdefault('ctx', self)
-        resource_template = jinja2.Template(resource_content)
-        return resource_template.render(variables)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py
deleted file mode 100644
index e46e2b1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Context exceptions.
-"""
-
-from ..exceptions import OrchestratorError
-
-
-class ContextException(OrchestratorError):
-    """
-    Context based exception
-    """
-    pass
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py
deleted file mode 100644
index 7d5f40c..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Operation contexts.
-"""
-
-import threading
-from contextlib import contextmanager
-
-import aria
-from aria.utils import file
-from . import common
-
-
-class BaseOperationContext(common.BaseContext):
-    """
-    Base class for contexts used during operation creation and execution.
-    """
-
-    def __init__(self, task_id, actor_id, **kwargs):
-        self._task_id = task_id
-        self._actor_id = actor_id
-        self._thread_local = threading.local()
-        self._destroy_session = kwargs.pop('destroy_session', False)
-        logger_level = kwargs.pop('logger_level', None)
-        super(BaseOperationContext, self).__init__(**kwargs)
-        self._register_logger(task_id=self.task.id, level=logger_level)
-
-    def __repr__(self):
-        details = 'function={task.function}; ' \
-                  'operation_arguments={task.arguments}'\
-            .format(task=self.task)
-        return '{name}({0})'.format(details, name=self.name)
-
-    @property
-    def task(self):
-        """
-        The task in the model storage
-        :return: Task model
-        """
-        # SQLAlchemy prevents from accessing an object which was created on a different thread.
-        # So we retrieve the object from the storage if the current thread isn't the same as the
-        # original thread.
-
-        if not hasattr(self._thread_local, 'task'):
-            self._thread_local.task = self.model.task.get(self._task_id)
-        return self._thread_local.task
-
-    @property
-    def plugin_workdir(self):
-        """
-        A work directory that is unique to the plugin and the deployment id
-        """
-        if self.task.plugin is None:
-            return None
-        plugin_workdir = '{0}/plugins/{1}/{2}'.format(self._workdir,
-                                                      self.service.id,
-                                                      self.task.plugin.name)
-        file.makedirs(plugin_workdir)
-        return plugin_workdir
-
-    @property
-    def serialization_dict(self):
-        context_dict = {
-            'name': self.name,
-            'service_id': self._service_id,
-            'task_id': self._task_id,
-            'actor_id': self._actor_id,
-            'workdir': self._workdir,
-            'model_storage': self.model.serialization_dict if self.model else None,
-            'resource_storage': self.resource.serialization_dict if self.resource else None,
-            'execution_id': self._execution_id,
-            'logger_level': self.logger.level
-        }
-        return {
-            'context_cls': self.__class__,
-            'context': context_dict
-        }
-
-    @classmethod
-    def instantiate_from_dict(cls, model_storage=None, resource_storage=None, **kwargs):
-        if model_storage:
-            model_storage = aria.application_model_storage(**model_storage)
-        if resource_storage:
-            resource_storage = aria.application_resource_storage(**resource_storage)
-
-        return cls(model_storage=model_storage,
-                   resource_storage=resource_storage,
-                   destroy_session=True,
-                   **kwargs)
-
-    def close(self):
-        if self._destroy_session:
-            self.model.log._session.remove()
-            self.model.log._engine.dispose()
-
-    @property
-    @contextmanager
-    def persist_changes(self):
-        yield
-        self.model.task.update(self.task)
-
-
-class NodeOperationContext(BaseOperationContext):
-    """
-    Context for node operations.
-    """
-
-    @property
-    def node_template(self):
-        """
-        the node of the current operation
-        :return:
-        """
-        return self.node.node_template
-
-    @property
-    def node(self):
-        """
-        The node instance of the current operation
-        :return:
-        """
-        return self.model.node.get(self._actor_id)
-
-
-class RelationshipOperationContext(BaseOperationContext):
-    """
-    Context for relationship operations.
-    """
-
-    @property
-    def source_node_template(self):
-        """
-        The source node
-        :return:
-        """
-        return self.source_node.node_template
-
-    @property
-    def source_node(self):
-        """
-        The source node instance
-        :return:
-        """
-        return self.relationship.source_node
-
-    @property
-    def target_node_template(self):
-        """
-        The target node
-        :return:
-        """
-        return self.target_node.node_template
-
-    @property
-    def target_node(self):
-        """
-        The target node instance
-        :return:
-        """
-        return self.relationship.target_node
-
-    @property
-    def relationship(self):
-        """
-        The relationship instance of the current operation
-        :return:
-        """
-        return self.model.relationship.get(self._actor_id)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py
deleted file mode 100644
index a2e1122..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Tools for operations.
-"""
-
-from . import operation
-
-
-class NodeToolBelt(object):
-    """
-    Node operation tool belt.
-    """
-    def __init__(self, operation_context):
-        self._op_context = operation_context
-
-    @property
-    def host_ip(self):
-        """
-        The host ip of the current node
-        :return:
-        """
-        assert isinstance(self._op_context, operation.NodeOperationContext)
-        return self._op_context.node.host.attributes.get('ip')
-
-
-class RelationshipToolBelt(object):
-    """
-    Relationship operation tool belt.
-    """
-    def __init__(self, operation_context):
-        self._op_context = operation_context
-
-
-def toolbelt(operation_context):
-    """
-    Get a toolbelt from to the current operation executor.
-
-    :param operation_context:
-    """
-    if isinstance(operation_context, operation.NodeOperationContext):
-        return NodeToolBelt(operation_context)
-    elif isinstance(operation_context, operation.RelationshipOperationContext):
-        return RelationshipToolBelt(operation_context)
-    else:
-        raise RuntimeError("Operation context not supported")
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py
deleted file mode 100644
index 738d2fd..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow context.
-"""
-
-import threading
-from contextlib import contextmanager
-
-from .exceptions import ContextException
-from .common import BaseContext
-
-
-class WorkflowContext(BaseContext):
-    """
-    Context used during workflow creation and execution.
-    """
-    def __init__(self,
-                 workflow_name,
-                 parameters=None,
-                 task_max_attempts=1,
-                 task_retry_interval=0,
-                 task_ignore_failure=False,
-                 *args, **kwargs):
-        super(WorkflowContext, self).__init__(*args, **kwargs)
-        self._workflow_name = workflow_name
-        self._parameters = parameters or {}
-        self._task_max_attempts = task_max_attempts
-        self._task_retry_interval = task_retry_interval
-        self._task_ignore_failure = task_ignore_failure
-        self._execution_graph = None
-        self._register_logger()
-
-    def __repr__(self):
-        return (
-            '{name}(deployment_id={self._service_id}, '
-            'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format(
-                name=self.__class__.__name__, self=self))
-
-    @property
-    def workflow_name(self):
-        return self._workflow_name
-
-    @property
-    def execution(self):
-        """
-        Execution model.
-        """
-        return self.model.execution.get(self._execution_id)
-
-    @execution.setter
-    def execution(self, value):
-        """
-        Stores the execution in the storage model API ("MAPI").
-        """
-        self.model.execution.put(value)
-
-    @property
-    def node_templates(self):
-        """
-        Iterates over nodes templates.
-        """
-        key = 'service_{0}'.format(self.model.node_template.model_cls.name_column_name())
-
-        return self.model.node_template.iter(
-            filters={
-                key: getattr(self.service, self.service.name_column_name())
-            }
-        )
-
-    @property
-    def nodes(self):
-        """
-        Iterates over nodes.
-        """
-        key = 'service_{0}'.format(self.model.node.model_cls.name_column_name())
-        return self.model.node.iter(
-            filters={
-                key: getattr(self.service, self.service.name_column_name())
-            }
-        )
-
-    @property
-    @contextmanager
-    def persist_changes(self):
-        yield
-        self._model.execution.update(self.execution)
-
-
-class _CurrentContext(threading.local):
-    """
-    Provides a thread-level context, with sugar for the task MAPI.
-    """
-
-    def __init__(self):
-        super(_CurrentContext, self).__init__()
-        self._workflow_context = None
-
-    def _set(self, value):
-        self._workflow_context = value
-
-    def get(self):
-        """
-        Retrieves the current workflow context.
-        """
-        if self._workflow_context is not None:
-            return self._workflow_context
-        raise ContextException("No context was set")
-
-    @contextmanager
-    def push(self, workflow_context):
-        """
-        Switches the current context to the provided context.
-        """
-        prev_workflow_context = self._workflow_context
-        self._set(workflow_context)
-        try:
-            yield self
-        finally:
-            self._set(prev_workflow_context)
-
-current = _CurrentContext()
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py b/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py
deleted file mode 100644
index 4b163d6..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow and operation decorators.
-"""
-
-from functools import partial, wraps
-
-from ..utils.validation import validate_function_arguments
-from ..utils.uuid import generate_uuid
-
-from . import context
-from .workflows.api import task_graph
-
-
-WORKFLOW_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'graph'))
-OPERATION_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'toolbelt'))
-
-
-def workflow(func=None, suffix_template=''):
-    """
-    Workflow decorator.
-    """
-    if func is None:
-        return partial(workflow, suffix_template=suffix_template)
-
-    @wraps(func)
-    def _wrapper(ctx, **workflow_parameters):
-
-        workflow_name = _generate_name(
-            func_name=func.__name__,
-            suffix_template=suffix_template,
-            ctx=ctx,
-            **workflow_parameters)
-
-        workflow_parameters.setdefault('ctx', ctx)
-        workflow_parameters.setdefault('graph', task_graph.TaskGraph(workflow_name))
-        validate_function_arguments(func, workflow_parameters)
-        with ctx.model.instrument(*ctx.INSTRUMENTATION_FIELDS):
-            with context.workflow.current.push(ctx):
-                func(**workflow_parameters)
-        return workflow_parameters['graph']
-    return _wrapper
-
-
-def operation(func=None, toolbelt=False, suffix_template='', logging_handlers=None):
-    """
-    Operation decorator.
-    """
-
-    if func is None:
-        return partial(operation,
-                       suffix_template=suffix_template,
-                       toolbelt=toolbelt,
-                       logging_handlers=logging_handlers)
-
-    @wraps(func)
-    def _wrapper(**func_kwargs):
-        ctx = func_kwargs['ctx']
-        if toolbelt:
-            operation_toolbelt = context.toolbelt(ctx)
-            func_kwargs.setdefault('toolbelt', operation_toolbelt)
-        validate_function_arguments(func, func_kwargs)
-        with ctx.model.instrument(*ctx.INSTRUMENTATION_FIELDS):
-            return func(**func_kwargs)
-    return _wrapper
-
-
-def _generate_name(func_name, ctx, suffix_template, **custom_kwargs):
-    return '{func_name}.{suffix}'.format(
-        func_name=func_name,
-        suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or generate_uuid(variant='uuid'))
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/events.py b/apache-ariatosca-0.1.1/aria/orchestrator/events.py
deleted file mode 100644
index ef84e5d..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/events.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Orchestrator events.
-"""
-
-from blinker import signal
-
-# workflow engine task signals:
-sent_task_signal = signal('sent_task_signal')
-start_task_signal = signal('start_task_signal')
-on_success_task_signal = signal('success_task_signal')
-on_failure_task_signal = signal('failure_task_signal')
-
-# workflow engine workflow signals:
-start_workflow_signal = signal('start_workflow_signal')
-on_cancelling_workflow_signal = signal('on_cancelling_workflow_signal')
-on_cancelled_workflow_signal = signal('on_cancelled_workflow_signal')
-on_success_workflow_signal = signal('on_success_workflow_signal')
-on_failure_workflow_signal = signal('on_failure_workflow_signal')
-on_resume_workflow_signal = signal('on_resume_workflow_signal')
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py
deleted file mode 100644
index 384458f..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Orchestrator exceptions.
-"""
-
-from aria.exceptions import AriaError
-
-
-class OrchestratorError(AriaError):
-    """
-    Orchestrator based exception
-    """
-    pass
-
-
-class InvalidPluginError(AriaError):
-    """
-    Raised when an invalid plugin is validated unsuccessfully
-    """
-    pass
-
-
-class PluginAlreadyExistsError(AriaError):
-    """
-    Raised when a plugin with the same package name and package version already exists
-    """
-    pass
-
-
-class TaskRetryException(RuntimeError):
-    """
-    Used internally when ctx.task.retry is called
-    """
-    def __init__(self, message, retry_interval=None):
-        super(TaskRetryException, self).__init__(message)
-        self.retry_interval = retry_interval
-
-
-class TaskAbortException(RuntimeError):
-    """
-    Used internally when ctx.task.abort is called
-    """
-    pass
-
-
-class UndeclaredWorkflowError(AriaError):
-    """
-    Raised when attempting to execute an undeclared workflow
-    """
-    pass
-
-
-class ActiveExecutionsError(AriaError):
-    """
-    Raised when attempting to execute a workflow on a service which already has an active execution
-    """
-    pass
-
-
-class WorkflowImplementationNotFoundError(AriaError):
-    """
-    Raised when attempting to import a workflow's code but the implementation is not found
-    """
-    pass
-
-
-class InvalidWorkflowRunnerParams(AriaError):
-    """
-    Raised when invalid combination of arguments is passed to the workflow runner
-    """
-    pass
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py
deleted file mode 100644
index ce6746c..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin utilities.
-"""
-
-import json
-import os
-import tempfile
-
-import requests
-
-from . import constants
-from . import exceptions
-
-
-def is_windows():
-    return os.name == 'nt'
-
-
-def download_script(ctx, script_path):
-    split = script_path.split('://')
-    schema = split[0]
-    suffix = script_path.split('/')[-1]
-    file_descriptor, dest_script_path = tempfile.mkstemp(suffix='-{0}'.format(suffix))
-    os.close(file_descriptor)
-    try:
-        if schema in ('http', 'https'):
-            response = requests.get(script_path)
-            if response.status_code == 404:
-                ctx.task.abort('Failed to download script: {0} (status code: {1})'
-                               .format(script_path, response.status_code))
-            content = response.text
-            with open(dest_script_path, 'wb') as f:
-                f.write(content)
-        else:
-            ctx.download_resource(destination=dest_script_path, path=script_path)
-    except:
-        os.remove(dest_script_path)
-        raise
-    return dest_script_path
-
-
-def create_process_config(script_path, process, operation_kwargs, quote_json_env_vars=False):
-    """
-    Updates a process with its environment variables, and return it.
-
-    Gets a dict representing a process and a dict representing the environment variables. Converts
-    each environment variable to a format of::
-
-        <string representing the name of the variable>:
-        <json formatted string representing the value of the variable>.
-
-    Finally, updates the process with the newly formatted environment variables, and return the
-    process.
-
-    :param process: dict representing a process
-    :type process: dict
-    :param operation_kwargs: dict representing environment variables that should exist in the
-     process's running environment.
-    :type operation_kwargs: dict
-    :return: process updated with its environment variables
-    :rtype: dict
-    """
-    process = process or {}
-    env_vars = operation_kwargs.copy()
-    if 'ctx' in env_vars:
-        del env_vars['ctx']
-    env_vars.update(process.get('env', {}))
-    for k, v in env_vars.items():
-        if isinstance(v, (dict, list, tuple, bool, int, float)):
-            v = json.dumps(v)
-            if quote_json_env_vars:
-                v = "'{0}'".format(v)
-        if is_windows():
-            # These <k,v> environment variables will subsequently
-            # be used in a subprocess.Popen() call, as the `env` parameter.
-            # In some windows python versions, if an environment variable
-            # name is not of type str (e.g. unicode), the Popen call will
-            # fail.
-            k = str(k)
-            # The windows shell removes all double quotes - escape them
-            # to still be able to pass JSON in env vars to the shell.
-            v = v.replace('"', '\\"')
-        del env_vars[k]
-        env_vars[k] = str(v)
-    process['env'] = env_vars
-    args = process.get('args')
-    command = script_path
-    command_prefix = process.get('command_prefix')
-    if command_prefix:
-        command = '{0} {1}'.format(command_prefix, command)
-    if args:
-        command = ' '.join([command] + [str(a) for a in args])
-    process['command'] = command
-    return process
-
-
-def patch_ctx(ctx):
-    ctx._error = None
-    task = ctx.task
-
-    def _validate_legal_action():
-        if ctx._error is not None:
-            ctx._error = RuntimeError(constants.ILLEGAL_CTX_OPERATION_MESSAGE)
-            raise ctx._error
-
-    def abort_operation(message=None):
-        _validate_legal_action()
-        ctx._error = exceptions.ScriptException(message=message, retry=False)
-        return ctx._error
-    task.abort = abort_operation
-
-    def retry_operation(message=None, retry_interval=None):
-        _validate_legal_action()
-        ctx._error = exceptions.ScriptException(message=message,
-                                                retry=True,
-                                                retry_interval=retry_interval)
-        return ctx._error
-    task.retry = retry_operation
-
-
-def check_error(ctx, error_check_func=None, reraise=False):
-    _error = ctx._error
-    # this happens when a script calls task.abort/task.retry more than once
-    if isinstance(_error, RuntimeError):
-        ctx.task.abort(str(_error))
-    # ScriptException is populated by the ctx proxy server when task.abort or task.retry
-    # are called
-    elif isinstance(_error, exceptions.ScriptException):
-        if _error.retry:
-            ctx.task.retry(_error.message, _error.retry_interval)
-        else:
-            ctx.task.abort(_error.message)
-    # local and ssh operations may pass an additional logic check for errors here
-    if error_check_func:
-        error_check_func()
-    # if this function is called from within an ``except`` clause, a re-raise maybe required
-    if reraise:
-        raise  # pylint: disable=misplaced-bare-raise
-    return _error
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py
deleted file mode 100644
index 1953912..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin constants.
-"""
-import os
-import tempfile
-
-from . import exceptions
-
-# related to local
-PYTHON_SCRIPT_FILE_EXTENSION = '.py'
-POWERSHELL_SCRIPT_FILE_EXTENSION = '.ps1'
-DEFAULT_POWERSHELL_EXECUTABLE = 'powershell'
-
-# related to both local and ssh
-ILLEGAL_CTX_OPERATION_MESSAGE = 'ctx may only abort or retry once'
-
-# related to ssh
-DEFAULT_BASE_DIR = os.path.join(tempfile.gettempdir(), 'aria-ctx')
-FABRIC_ENV_DEFAULTS = {
-    'connection_attempts': 5,
-    'timeout': 10,
-    'forward_agent': False,
-    'abort_on_prompts': True,
-    'keepalive': 0,
-    'linewise': False,
-    'pool_size': 0,
-    'skip_bad_hosts': False,
-    'status': False,
-    'disable_known_hosts': True,
-    'combine_stderr': True,
-    'abort_exception': exceptions.TaskException,
-}
-VALID_FABRIC_GROUPS = set([
-    'status',
-    'aborts',
-    'warnings',
-    'running',
-    'stdout',
-    'stderr',
-    'user',
-    'everything'
-])
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py
deleted file mode 100644
index 1310c21..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-``ctx`` proxy client implementation.
-"""
-
-import argparse
-import json
-import os
-import sys
-import urllib2
-
-
-# Environment variable for the socket url (used by clients to locate the socket)
-CTX_SOCKET_URL = 'CTX_SOCKET_URL'
-
-
-class _RequestError(RuntimeError):
-
-    def __init__(self, ex_message, ex_type, ex_traceback):
-        super(_RequestError, self).__init__(self, '{0}: {1}'.format(ex_type, ex_message))
-        self.ex_type = ex_type
-        self.ex_message = ex_message
-        self.ex_traceback = ex_traceback
-
-
-def _http_request(socket_url, request, method, timeout):
-    opener = urllib2.build_opener(urllib2.HTTPHandler)
-    request = urllib2.Request(socket_url, data=json.dumps(request))
-    request.get_method = lambda: method
-    response = opener.open(request, timeout=timeout)
-
-    if response.code != 200:
-        raise RuntimeError('Request failed: {0}'.format(response))
-    return json.loads(response.read())
-
-
-def _client_request(socket_url, args, timeout, method='POST'):
-    response = _http_request(
-        socket_url=socket_url,
-        request={'args': args},
-        method=method,
-        timeout=timeout
-    )
-    payload = response.get('payload')
-    response_type = response.get('type')
-    if response_type == 'error':
-        ex_type = payload['type']
-        ex_message = payload['message']
-        ex_traceback = payload['traceback']
-        raise _RequestError(ex_message, ex_type, ex_traceback)
-    elif response_type == 'stop_operation':
-        raise SystemExit(payload['message'])
-    else:
-        return payload
-
-
-def _parse_args(args):
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-t', '--timeout', type=int, default=30)
-    parser.add_argument('--socket-url', default=os.environ.get(CTX_SOCKET_URL))
-    parser.add_argument('--json-arg-prefix', default='@')
-    parser.add_argument('-j', '--json-output', action='store_true')
-    parser.add_argument('args', nargs='*')
-    args = parser.parse_args(args=args)
-    if not args.socket_url:
-        raise RuntimeError('Missing CTX_SOCKET_URL environment variable '
-                           'or socket_url command line argument. (ctx is supposed to be executed '
-                           'within an operation context)')
-    return args
-
-
-def _process_args(json_prefix, args):
-    processed_args = []
-    for arg in args:
-        if arg.startswith(json_prefix):
-            arg = json.loads(arg[1:])
-        processed_args.append(arg)
-    return processed_args
-
-
-def main(args=None):
-    args = _parse_args(args)
-    response = _client_request(
-        args.socket_url,
-        args=_process_args(args.json_arg_prefix, args.args),
-        timeout=args.timeout)
-    if args.json_output:
-        response = json.dumps(response)
-    else:
-        if not response:
-            response = ''
-        response = str(response)
-    sys.stdout.write(response)
-
-if __name__ == '__main__':
-    main()
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py
deleted file mode 100644
index ca910e0..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-``ctx`` proxy server implementation.
-"""
-
-import collections
-import json
-import re
-import socket
-import threading
-import traceback
-import Queue
-import StringIO
-import wsgiref.simple_server
-
-import bottle
-from aria import modeling
-
-from .. import exceptions
-
-
-class CtxProxy(object):
-
-    def __init__(self, ctx, ctx_patcher=(lambda *args, **kwargs: None)):
-        self.ctx = ctx
-        self._ctx_patcher = ctx_patcher
-        self.port = _get_unused_port()
-        self.socket_url = 'http://localhost:{0}'.format(self.port)
-        self.server = None
-        self._started = Queue.Queue(1)
-        self.thread = self._start_server()
-        self._started.get(timeout=5)
-
-    def _start_server(self):
-
-        class BottleServerAdapter(bottle.ServerAdapter):
-            proxy = self
-
-            def close_session(self):
-                self.proxy.ctx.model.log._session.remove()
-
-            def run(self, app):
-
-                class Server(wsgiref.simple_server.WSGIServer):
-                    allow_reuse_address = True
-                    bottle_server = self
-
-                    def handle_error(self, request, client_address):
-                        pass
-
-                    def serve_forever(self, poll_interval=0.5):
-                        try:
-                            wsgiref.simple_server.WSGIServer.serve_forever(self, poll_interval)
-                        finally:
-                            # Once shutdown is called, we need to close the session.
-                            # If the session is not closed properly, it might raise warnings,
-                            # or even lock the database.
-                            self.bottle_server.close_session()
-
-                class Handler(wsgiref.simple_server.WSGIRequestHandler):
-                    def address_string(self):
-                        return self.client_address[0]
-
-                    def log_request(*args, **kwargs):  # pylint: disable=no-method-argument
-                        if not self.quiet:
-                            return wsgiref.simple_server.WSGIRequestHandler.log_request(*args,
-                                                                                        **kwargs)
-                server = wsgiref.simple_server.make_server(
-                    host=self.host,
-                    port=self.port,
-                    app=app,
-                    server_class=Server,
-                    handler_class=Handler)
-                self.proxy.server = server
-                self.proxy._started.put(True)
-                server.serve_forever(poll_interval=0.1)
-
-        def serve():
-            # Since task is a thread_local object, we need to patch it inside the server thread.
-            self._ctx_patcher(self.ctx)
-
-            bottle_app = bottle.Bottle()
-            bottle_app.post('/', callback=self._request_handler)
-            bottle.run(
-                app=bottle_app,
-                host='localhost',
-                port=self.port,
-                quiet=True,
-                server=BottleServerAdapter)
-        thread = threading.Thread(target=serve)
-        thread.daemon = True
-        thread.start()
-        return thread
-
-    def close(self):
-        if self.server:
-            self.server.shutdown()
-            self.server.server_close()
-
-    def _request_handler(self):
-        request = bottle.request.body.read()  # pylint: disable=no-member
-        response = self._process(request)
-        return bottle.LocalResponse(
-            body=json.dumps(response, cls=modeling.utils.ModelJSONEncoder),
-            status=200,
-            headers={'content-type': 'application/json'}
-        )
-
-    def _process(self, request):
-        try:
-            with self.ctx.model.instrument(*self.ctx.INSTRUMENTATION_FIELDS):
-                typed_request = json.loads(request)
-                args = typed_request['args']
-                payload = _process_ctx_request(self.ctx, args)
-                result_type = 'result'
-                if isinstance(payload, exceptions.ScriptException):
-                    payload = dict(message=str(payload))
-                    result_type = 'stop_operation'
-                result = {'type': result_type, 'payload': payload}
-        except Exception as e:
-            traceback_out = StringIO.StringIO()
-            traceback.print_exc(file=traceback_out)
-            payload = {
-                'type': type(e).__name__,
-                'message': str(e),
-                'traceback': traceback_out.getvalue()
-            }
-            result = {'type': 'error', 'payload': payload}
-
-        return result
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args, **kwargs):
-        self.close()
-
-
-def _process_ctx_request(ctx, args):
-    current = ctx
-    num_args = len(args)
-    index = 0
-    while index < num_args:
-        arg = args[index]
-        attr = _desugar_attr(current, arg)
-        if attr:
-            current = getattr(current, attr)
-        elif isinstance(current, collections.MutableMapping):
-            key = arg
-            path_dict = _PathDictAccess(current)
-            if index + 1 == num_args:
-                # read dict prop by path
-                value = path_dict.get(key)
-                current = value
-            elif index + 2 == num_args:
-                # set dict prop by path
-                value = args[index + 1]
-                path_dict.set(key, value)
-                current = None
-            else:
-                raise RuntimeError('Illegal argument while accessing dict')
-            break
-        elif callable(current):
-            kwargs = {}
-            remaining_args = args[index:]
-            if isinstance(remaining_args[-1], collections.MutableMapping):
-                kwargs = remaining_args[-1]
-                remaining_args = remaining_args[:-1]
-            current = current(*remaining_args, **kwargs)
-            break
-        else:
-            raise RuntimeError('{0} cannot be processed in {1}'.format(arg, args))
-        index += 1
-    if callable(current):
-        current = current()
-    return current
-
-
-def _desugar_attr(obj, attr):
-    if not isinstance(attr, basestring):
-        return None
-    if hasattr(obj, attr):
-        return attr
-    attr = attr.replace('-', '_')
-    if hasattr(obj, attr):
-        return attr
-    return None
-
-
-class _PathDictAccess(object):
-    pattern = re.compile(r"(.+)\[(\d+)\]")
-
-    def __init__(self, obj):
-        self.obj = obj
-
-    def set(self, prop_path, value):
-        obj, prop_name = self._get_parent_obj_prop_name_by_path(prop_path)
-        obj[prop_name] = value
-
-    def get(self, prop_path):
-        value = self._get_object_by_path(prop_path)
-        return value
-
-    def _get_object_by_path(self, prop_path, fail_on_missing=True):
-        # when setting a nested object, make sure to also set all the
-        # intermediate path objects
-        current = self.obj
-        for prop_segment in prop_path.split('.'):
-            match = self.pattern.match(prop_segment)
-            if match:
-                index = int(match.group(2))
-                property_name = match.group(1)
-                if property_name not in current:
-                    self._raise_illegal(prop_path)
-                if not isinstance(current[property_name], list):
-                    self._raise_illegal(prop_path)
-                current = current[property_name][index]
-            else:
-                if prop_segment not in current:
-                    if fail_on_missing:
-                        self._raise_illegal(prop_path)
-                    else:
-                        current[prop_segment] = {}
-                current = current[prop_segment]
-        return current
-
-    def _get_parent_obj_prop_name_by_path(self, prop_path):
-        split = prop_path.split('.')
-        if len(split) == 1:
-            return self.obj, prop_path
-        parent_path = '.'.join(split[:-1])
-        parent_obj = self._get_object_by_path(parent_path, fail_on_missing=False)
-        prop_name = split[-1]
-        return parent_obj, prop_name
-
-    @staticmethod
-    def _raise_illegal(prop_path):
-        raise RuntimeError('illegal path: {0}'.format(prop_path))
-
-
-def _get_unused_port():
-    sock = socket.socket()
-    sock.bind(('127.0.0.1', 0))
-    _, port = sock.getsockname()
-    sock.close()
-    return port
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py
deleted file mode 100644
index 6dec293..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for managing globals for the environment.
-"""
-
-def create_initial_globals(path):
-    """
-    Emulates a ``globals()`` call in a freshly loaded module.
-
-    The implementation of this function is likely to raise a couple of questions. If you read the
-    implementation and nothing bothered you, feel free to skip the rest of this docstring.
-
-    First, why is this function in its own module and not, say, in the same module of the other
-    environment-related functions? Second, why is it implemented in such a way that copies the
-    globals, then deletes the item that represents this function, and then changes some other
-    entries?
-
-    Well, these two questions can be answered with one (elaborate) explanation. If this function was
-    in the same module with the other environment-related functions, then we would have had to
-    delete more items in globals than just ``create_initial_globals``. That is because all of the
-    other function names would also be in globals, and since there is no built-in mechanism that
-    return the name of the user-defined objects, this approach is quite an overkill.
-
-    *But why do we rely on the copy-existing-globals-and-delete-entries method, when it seems to
-    force us to put ``create_initial_globals`` in its own file?*
-
-    Well, because there is no easier method of creating globals of a newly loaded module.
-
-    *How about hard coding a ``globals`` dict? It seems that there are very few entries:
-    ``__doc__``, ``__file__``, ``__name__``, ``__package__`` (but don't forget ``__builtins__``).*
-
-    That would be coupling our implementation to a specific ``globals`` implementation. What if
-    ``globals`` were to change?
-    """
-    copied_globals = globals().copy()
-    copied_globals.update({
-        '__doc__': 'Dynamically executed script',
-        '__file__': path,
-        '__name__': '__main__',
-        '__package__': None
-    })
-    del copied_globals[create_initial_globals.__name__]
-    return copied_globals
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py
deleted file mode 100644
index f201fae..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin exceptions.
-"""
-
-class ProcessException(Exception):
-    """
-    Raised when local scripts and remote SSH commands fail.
-    """
-
-    def __init__(self, stderr=None, stdout=None, command=None, exit_code=None):
-        super(ProcessException, self).__init__(stderr)
-        self.command = command
-        self.exit_code = exit_code
-        self.stdout = stdout
-        self.stderr = stderr
-
-
-class TaskException(Exception):
-    """
-    Raised when remote ssh scripts fail.
-    """
-
-
-class ScriptException(Exception):
-    """
-    Used by the ``ctx`` proxy server when task.retry or task.abort are called by scripts.
-    """
-
-    def __init__(self, message=None, retry=None, retry_interval=None):
-        super(ScriptException, self).__init__(message)
-        self.retry = retry
-        self.retry_interval = retry_interval
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py
deleted file mode 100644
index f55aa50..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Instantiation of :class:`~aria.modeling.models.Operation` models.
-"""
-
-# TODO: this module will eventually be moved to a new "aria.instantiation" package
-
-from ...utils.type import full_type_name
-from ...utils.formatting import safe_repr
-from ...utils.collections import OrderedDict
-from ...parser import validation
-from ...parser.consumption import ConsumptionContext
-from ...modeling.functions import Function
-
-
-def configure_operation(operation):
-    host = None
-    interface = operation.interface
-    if interface.node is not None:
-        host = interface.node.host
-    elif interface.relationship is not None:
-        if operation.relationship_edge is True:
-            host = interface.relationship.target_node.host
-        else: # either False or None (None meaning that edge was not specified)
-            host = interface.relationship.source_node.host
-
-    _configure_common(operation)
-    if host is None:
-        _configure_local(operation)
-    else:
-        _configure_remote(operation)
-
-    # Any remaining un-handled configuration parameters will become extra arguments, available as
-    # kwargs in either "run_script_locally" or "run_script_with_ssh"
-    for key, value in operation.configurations.iteritems():
-        if key not in ('process', 'ssh'):
-            operation.arguments[key] = value.instantiate(None)
-
-
-def _configure_common(operation):
-    """
-    Local and remote operations.
-    """
-
-    from ...modeling.models import Argument
-    operation.arguments['script_path'] = Argument.wrap('script_path', operation.implementation,
-                                                       'Relative path to the executable file.')
-    operation.arguments['process'] = Argument.wrap('process', _get_process(operation),
-                                                   'Sub-process configuration.')
-
-
-def _configure_local(operation):
-    """
-    Local operation.
-    """
-
-    from . import operations
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_locally.__name__)
-
-
-def _configure_remote(operation):
-    """
-    Remote SSH operation via Fabric.
-    """
-
-    from ...modeling.models import Argument
-    from . import operations
-
-    ssh = _get_ssh(operation)
-
-    # Defaults
-    # TODO: find a way to configure these generally in the service template
-    default_user = ''
-    default_password = ''
-    if 'user' not in ssh:
-        ssh['user'] = default_user
-    if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
-        ssh['password'] = default_password
-
-    operation.arguments['use_sudo'] = Argument.wrap('use_sudo', ssh.get('use_sudo', False),
-                                                    'Whether to execute with sudo.')
-
-    operation.arguments['hide_output'] = Argument.wrap('hide_output', ssh.get('hide_output', []),
-                                                       'Hide output of these Fabric groups.')
-
-    fabric_env = {}
-    if 'warn_only' in ssh:
-        fabric_env['warn_only'] = ssh['warn_only']
-    fabric_env['user'] = ssh.get('user')
-    fabric_env['password'] = ssh.get('password')
-    fabric_env['key'] = ssh.get('key')
-    fabric_env['key_filename'] = ssh.get('key_filename')
-    if 'address' in ssh:
-        fabric_env['host_string'] = ssh['address']
-
-    # Make sure we have a user
-    if fabric_env.get('user') is None:
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('must configure "ssh.user" for "{0}"'
-                                  .format(operation.implementation),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-    # Make sure we have an authentication value
-    if (fabric_env.get('password') is None) and \
-        (fabric_env.get('key') is None) and \
-        (fabric_env.get('key_filename') is None):
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
-                                  'for "{0}"'
-                                  .format(operation.implementation),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-    operation.arguments['fabric_env'] = Argument.wrap('fabric_env', fabric_env,
-                                                      'Fabric configuration.')
-
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_with_ssh.__name__)
-
-
-def _get_process(operation):
-    value = (operation.configurations.get('process')._value
-             if 'process' in operation.configurations
-             else None)
-    if value is None:
-        return {}
-    _validate_type(value, dict, 'process')
-    value = OrderedDict(value)
-    for k, v in value.iteritems():
-        if k == 'eval_python':
-            value[k] = _coerce_bool(v, 'process.eval_python')
-        elif k == 'cwd':
-            _validate_type(v, basestring, 'process.cwd')
-        elif k == 'command_prefix':
-            _validate_type(v, basestring, 'process.command_prefix')
-        elif k == 'args':
-            value[k] = _dict_to_list_of_strings(v, 'process.args')
-        elif k == 'env':
-            _validate_type(v, dict, 'process.env')
-        else:
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration parameter: "process.{0}"'
-                                      .format(k),
-                                      level=validation.Issue.BETWEEN_TYPES)
-    return value
-
-
-def _get_ssh(operation):
-    value = (operation.configurations.get('ssh')._value
-             if 'ssh' in operation.configurations
-             else None)
-    if value is None:
-        return {}
-    _validate_type(value, dict, 'ssh')
-    value = OrderedDict(value)
-    for k, v in value.iteritems():
-        if k == 'use_sudo':
-            value[k] = _coerce_bool(v, 'ssh.use_sudo')
-        elif k == 'hide_output':
-            value[k] = _dict_to_list_of_strings(v, 'ssh.hide_output')
-        elif k == 'warn_only':
-            value[k] = _coerce_bool(v, 'ssh.warn_only')
-        elif k == 'user':
-            _validate_type(v, basestring, 'ssh.user')
-        elif k == 'password':
-            _validate_type(v, basestring, 'ssh.password')
-        elif k == 'key':
-            _validate_type(v, basestring, 'ssh.key')
-        elif k == 'key_filename':
-            _validate_type(v, basestring, 'ssh.key_filename')
-        elif k == 'address':
-            _validate_type(v, basestring, 'ssh.address')
-        else:
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration parameter: "ssh.{0}"'.format(k),
-                                      level=validation.Issue.BETWEEN_TYPES)
-    return value
-
-
-def _validate_type(value, the_type, name):
-    if isinstance(value, Function):
-        return
-    if not isinstance(value, the_type):
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not a {1}: {2}'
-                                  .format(name, full_type_name(the_type), safe_repr(value)),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-
-def _coerce_bool(value, name):
-    if value is None:
-        return None
-    if isinstance(value, bool):
-        return value
-    _validate_type(value, basestring, name)
-    if value == 'true':
-        return True
-    elif value == 'false':
-        return False
-    else:
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not "true" or "false": {1}'
-                                  .format(name, safe_repr(value)),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-
-def _dict_to_list_of_strings(the_dict, name):
-    _validate_type(the_dict, dict, name)
-    value = []
-    for k in sorted(the_dict):
-        v = the_dict[k]
-        _validate_type(v, basestring, '{0}.{1}'.format(name, k))
-        value.append(v)
-    return value
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/local.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/local.py
deleted file mode 100644
index 04b9ecd..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/local.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Local execution of operations.
-"""
-
-import os
-import subprocess
-import threading
-import StringIO
-
-from . import ctx_proxy
-from . import exceptions
-from . import common
-from . import constants
-from . import environment_globals
-from . import python_script_scope
-
-
-def run_script(ctx, script_path, process, **kwargs):
-    if not script_path:
-        ctx.task.abort('Missing script_path')
-    process = process or {}
-    script_path = common.download_script(ctx, script_path)
-    script_func = _get_run_script_func(script_path, process)
-    return script_func(
-        ctx=ctx,
-        script_path=script_path,
-        process=process,
-        operation_kwargs=kwargs)
-
-
-def _get_run_script_func(script_path, process):
-    if _treat_script_as_python_script(script_path, process):
-        return _eval_script_func
-    else:
-        if _treat_script_as_powershell_script(script_path):
-            process.setdefault('command_prefix', constants.DEFAULT_POWERSHELL_EXECUTABLE)
-        return _execute_func
-
-
-def _treat_script_as_python_script(script_path, process):
-    eval_python = process.get('eval_python')
-    script_extension = os.path.splitext(script_path)[1].lower()
-    return (eval_python is True or (script_extension == constants.PYTHON_SCRIPT_FILE_EXTENSION and
-                                    eval_python is not False))
-
-
-def _treat_script_as_powershell_script(script_path):
-    script_extension = os.path.splitext(script_path)[1].lower()
-    return script_extension == constants.POWERSHELL_SCRIPT_FILE_EXTENSION
-
-
-def _eval_script_func(script_path, ctx, operation_kwargs, **_):
-    with python_script_scope(operation_ctx=ctx, operation_inputs=operation_kwargs):
-        execfile(script_path, environment_globals.create_initial_globals(script_path))
-
-
-def _execute_func(script_path, ctx, process, operation_kwargs):
-    os.chmod(script_path, 0755)
-    process = common.create_process_config(
-        script_path=script_path,
-        process=process,
-        operation_kwargs=operation_kwargs)
-    command = process['command']
-    env = os.environ.copy()
-    env.update(process['env'])
-    ctx.logger.info('Executing: {0}'.format(command))
-    with ctx_proxy.server.CtxProxy(ctx, common.patch_ctx) as proxy:
-        env[ctx_proxy.client.CTX_SOCKET_URL] = proxy.socket_url
-        running_process = subprocess.Popen(
-            command,
-            shell=True,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            env=env,
-            cwd=process.get('cwd'),
-            bufsize=1,
-            close_fds=not common.is_windows())
-        stdout_consumer = _OutputConsumer(running_process.stdout)
-        stderr_consumer = _OutputConsumer(running_process.stderr)
-        exit_code = running_process.wait()
-    stdout_consumer.join()
-    stderr_consumer.join()
-    ctx.logger.info('Execution done (exit_code={0}): {1}'.format(exit_code, command))
-
-    def error_check_func():
-        if exit_code:
-            raise exceptions.ProcessException(
-                command=command,
-                exit_code=exit_code,
-                stdout=stdout_consumer.read_output(),
-                stderr=stderr_consumer.read_output())
-    return common.check_error(ctx, error_check_func=error_check_func)
-
-
-class _OutputConsumer(object):
-
-    def __init__(self, out):
-        self._out = out
-        self._buffer = StringIO.StringIO()
-        self._consumer = threading.Thread(target=self._consume_output)
-        self._consumer.daemon = True
-        self._consumer.start()
-
-    def _consume_output(self):
-        for line in iter(self._out.readline, b''):
-            self._buffer.write(line)
-        self._out.close()
-
-    def read_output(self):
-        return self._buffer.getvalue()
-
-    def join(self):
-        self._consumer.join()
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/operations.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/operations.py
deleted file mode 100644
index e8de545..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/operations.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Entry point functions.
-"""
-
-from aria.orchestrator import operation
-from . import local as local_operations
-
-
-@operation
-def run_script_locally(ctx,
-                       script_path,
-                       process=None,
-                       **kwargs):
-    return local_operations.run_script(
-        ctx=ctx,
-        script_path=script_path,
-        process=process,
-        **kwargs)
-
-
-@operation
-def run_script_with_ssh(ctx,
-                        script_path,
-                        fabric_env=None,
-                        process=None,
-                        use_sudo=False,
-                        hide_output=None,
-                        **kwargs):
-    return _try_import_ssh().run_script(
-        ctx=ctx,
-        script_path=script_path,
-        fabric_env=fabric_env,
-        process=process,
-        use_sudo=use_sudo,
-        hide_output=hide_output,
-        **kwargs)
-
-
-@operation
-def run_commands_with_ssh(ctx,
-                          commands,
-                          fabric_env=None,
-                          use_sudo=False,
-                          hide_output=None,
-                          **_):
-    return _try_import_ssh().run_commands(
-        ctx=ctx,
-        commands=commands,
-        fabric_env=fabric_env,
-        use_sudo=use_sudo,
-        hide_output=hide_output)
-
-
-def _try_import_ssh():
-    try:
-        from .ssh import operations as ssh_operations
-        return ssh_operations
-    except Exception:
-        raise RuntimeError('Failed to import SSH modules; Have you installed the ARIA SSH extra?')
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/operations.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/operations.py
deleted file mode 100644
index c40e783..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/operations.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for running commands remotely over SSH.
-"""
-
-import os
-import random
-import string
-import tempfile
-import StringIO
-
-import fabric.api
-import fabric.context_managers
-import fabric.contrib.files
-
-from .. import constants
-from .. import exceptions
-from .. import common
-from .. import ctx_proxy
-from . import tunnel
-
-
-_PROXY_CLIENT_PATH = ctx_proxy.client.__file__
-if _PROXY_CLIENT_PATH.endswith('.pyc'):
-    _PROXY_CLIENT_PATH = _PROXY_CLIENT_PATH[:-1]
-
-
-def run_commands(ctx, commands, fabric_env, use_sudo, hide_output, **_):
-    """Runs the provider 'commands' in sequence
-
-    :param commands: a list of commands to run
-    :param fabric_env: fabric configuration
-    """
-    with fabric.api.settings(_hide_output(ctx, groups=hide_output),
-                             **_fabric_env(ctx, fabric_env, warn_only=True)):
-        for command in commands:
-            ctx.logger.info('Running command: {0}'.format(command))
-            run = fabric.api.sudo if use_sudo else fabric.api.run
-            result = run(command)
-            if result.failed:
-                raise exceptions.ProcessException(
-                    command=result.command,
-                    exit_code=result.return_code,
-                    stdout=result.stdout,
-                    stderr=result.stderr)
-
-
-def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **kwargs):
-    process = process or {}
-    paths = _Paths(base_dir=process.get('base_dir', constants.DEFAULT_BASE_DIR),
-                   local_script_path=common.download_script(ctx, script_path))
-    with fabric.api.settings(_hide_output(ctx, groups=hide_output),
-                             **_fabric_env(ctx, fabric_env, warn_only=False)):
-        # the remote host must have the ctx before running any fabric scripts
-        if not fabric.contrib.files.exists(paths.remote_ctx_path):
-            # there may be race conditions with other operations that
-            # may be running in parallel, so we pass -p to make sure
-            # we get 0 exit code if the directory already exists
-            fabric.api.run('mkdir -p {0} && mkdir -p {1}'.format(paths.remote_scripts_dir,
-                                                                 paths.remote_work_dir))
-            # this file has to be present before using ctx
-            fabric.api.put(_PROXY_CLIENT_PATH, paths.remote_ctx_path)
-        process = common.create_process_config(
-            script_path=paths.remote_script_path,
-            process=process,
-            operation_kwargs=kwargs,
-            quote_json_env_vars=True)
-        fabric.api.put(paths.local_script_path, paths.remote_script_path)
-        with ctx_proxy.server.CtxProxy(ctx, _patch_ctx) as proxy:
-            local_port = proxy.port
-            with fabric.context_managers.cd(process.get('cwd', paths.remote_work_dir)):  # pylint: disable=not-context-manager
-                with tunnel.remote(ctx, local_port=local_port) as remote_port:
-                    local_socket_url = proxy.socket_url
-                    remote_socket_url = local_socket_url.replace(str(local_port), str(remote_port))
-                    env_script = _write_environment_script_file(
-                        process=process,
-                        paths=paths,
-                        local_socket_url=local_socket_url,
-                        remote_socket_url=remote_socket_url)
-                    fabric.api.put(env_script, paths.remote_env_script_path)
-                    try:
-                        command = 'source {0} && {1}'.format(paths.remote_env_script_path,
-                                                             process['command'])
-                        run = fabric.api.sudo if use_sudo else fabric.api.run
-                        run(command)
-                    except exceptions.TaskException:
-                        return common.check_error(ctx, reraise=True)
-            return common.check_error(ctx)
-
-
-def _patch_ctx(ctx):
-    common.patch_ctx(ctx)
-    original_download_resource = ctx.download_resource
-    original_download_resource_and_render = ctx.download_resource_and_render
-
-    def _download_resource(func, destination, **kwargs):
-        handle, temp_local_path = tempfile.mkstemp()
-        os.close(handle)
-        try:
-            func(destination=temp_local_path, **kwargs)
-            return fabric.api.put(temp_local_path, destination)
-        finally:
-            os.remove(temp_local_path)
-
-    def download_resource(destination, path=None):
-        _download_resource(
-            func=original_download_resource,
-            destination=destination,
-            path=path)
-    ctx.download_resource = download_resource
-
-    def download_resource_and_render(destination, path=None, variables=None):
-        _download_resource(
-            func=original_download_resource_and_render,
-            destination=destination,
-            path=path,
-            variables=variables)
-    ctx.download_resource_and_render = download_resource_and_render
-
-
-def _hide_output(ctx, groups):
-    """ Hides Fabric's output for every 'entity' in `groups` """
-    groups = set(groups or [])
-    if not groups.issubset(constants.VALID_FABRIC_GROUPS):
-        ctx.task.abort('`hide_output` must be a subset of {0} (Provided: {1})'
-                       .format(', '.join(constants.VALID_FABRIC_GROUPS), ', '.join(groups)))
-    return fabric.api.hide(*groups)
-
-
-def _fabric_env(ctx, fabric_env, warn_only):
-    """Prepares fabric environment variables configuration"""
-    ctx.logger.debug('Preparing fabric environment...')
-    env = constants.FABRIC_ENV_DEFAULTS.copy()
-    env.update(fabric_env or {})
-    env.setdefault('warn_only', warn_only)
-    # validations
-    if (not env.get('host_string')) and (ctx.task) and (ctx.task.actor) and (ctx.task.actor.host):
-        env['host_string'] = ctx.task.actor.host.host_address
-    if not env.get('host_string'):
-        ctx.task.abort('`host_string` not supplied and ip cannot be deduced automatically')
-    if not (env.get('password') or env.get('key_filename') or env.get('key')):
-        ctx.task.abort(
-            'Access credentials not supplied '
-            '(you must supply at least one of `key_filename`, `key` or `password`)')
-    if not env.get('user'):
-        ctx.task.abort('`user` not supplied')
-    ctx.logger.debug('Environment prepared successfully')
-    return env
-
-
-def _write_environment_script_file(process, paths, local_socket_url, remote_socket_url):
-    env_script = StringIO.StringIO()
-    env = process['env']
-    env['PATH'] = '{0}:$PATH'.format(paths.remote_ctx_dir)
-    env['PYTHONPATH'] = '{0}:$PYTHONPATH'.format(paths.remote_ctx_dir)
-    env_script.write('chmod +x {0}\n'.format(paths.remote_script_path))
-    env_script.write('chmod +x {0}\n'.format(paths.remote_ctx_path))
-    env.update({
-        ctx_proxy.client.CTX_SOCKET_URL: remote_socket_url,
-        'LOCAL_{0}'.format(ctx_proxy.client.CTX_SOCKET_URL): local_socket_url
-    })
-    for key, value in env.iteritems():
-        env_script.write('export {0}={1}\n'.format(key, value))
-    return env_script
-
-
-class _Paths(object):
-
-    def __init__(self, base_dir, local_script_path):
-        self.local_script_path = local_script_path
-        self.remote_ctx_dir = base_dir
-        self.base_script_path = os.path.basename(self.local_script_path)
-        self.remote_ctx_path = '{0}/ctx'.format(self.remote_ctx_dir)
-        self.remote_scripts_dir = '{0}/scripts'.format(self.remote_ctx_dir)
-        self.remote_work_dir = '{0}/work'.format(self.remote_ctx_dir)
-        random_suffix = ''.join(random.choice(string.ascii_lowercase + string.digits)
-                                for _ in range(8))
-        remote_path_suffix = '{0}-{1}'.format(self.base_script_path, random_suffix)
-        self.remote_env_script_path = '{0}/env-{1}'.format(self.remote_scripts_dir,
-                                                           remote_path_suffix)
-        self.remote_script_path = '{0}/{1}'.format(self.remote_scripts_dir, remote_path_suffix)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/tunnel.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/tunnel.py
deleted file mode 100644
index e76d525..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ssh/tunnel.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# This implementation was copied from the Fabric project directly:
-# https://github.com/fabric/fabric/blob/master/fabric/context_managers.py#L486
-# The purpose was to remove the rtunnel creation printouts here:
-# https://github.com/fabric/fabric/blob/master/fabric/context_managers.py#L547
-
-
-import contextlib
-import select
-import socket
-
-import fabric.api
-import fabric.state
-import fabric.thread_handling
-
-
-@contextlib.contextmanager
-def remote(ctx, local_port, remote_port=0, local_host='localhost', remote_bind_address='127.0.0.1'):
-    """Create a tunnel forwarding a locally-visible port to the remote target."""
-    sockets = []
-    channels = []
-    thread_handlers = []
-
-    def accept(channel, *args, **kwargs):
-        # This seemingly innocent statement seems to be doing nothing
-        # but the truth is far from it!
-        # calling fileno() on a paramiko channel the first time, creates
-        # the required plumbing to make the channel valid for select.
-        # While this would generally happen implicitly inside the _forwarder
-        # function when select is called, it may already be too late and may
-        # cause the select loop to hang.
-        # Specifically, when new data arrives to the channel, a flag is set
-        # on an "event" object which is what makes the select call work.
-        # problem is this will only happen if the event object is not None
-        # and it will be not-None only after channel.fileno() has been called
-        # for the first time. If we wait until _forwarder calls select for the
-        # first time it may be after initial data has reached the channel.
-        # calling it explicitly here in the paramiko transport main event loop
-        # guarantees this will not happen.
-        channel.fileno()
-
-        channels.append(channel)
-        sock = socket.socket()
-        sockets.append(sock)
-
-        try:
-            sock.connect((local_host, local_port))
-        except Exception as e:
-            try:
-                channel.close()
-            except Exception as ex2:
-                close_error = ' (While trying to close channel: {0})'.format(ex2)
-            else:
-                close_error = ''
-            ctx.task.abort('[{0}] rtunnel: cannot connect to {1}:{2} ({3}){4}'
-                           .format(fabric.api.env.host_string, local_host, local_port, e,
-                                   close_error))
-
-        thread_handler = fabric.thread_handling.ThreadHandler('fwd', _forwarder, channel, sock)
-        thread_handlers.append(thread_handler)
-
-    transport = fabric.state.connections[fabric.api.env.host_string].get_transport()
-    remote_port = transport.request_port_forward(
-        remote_bind_address, remote_port, handler=accept)
-
-    try:
-        yield remote_port
-    finally:
-        for sock, chan, thread_handler in zip(sockets, channels, thread_handlers):
-            sock.close()
-            chan.close()
-            thread_handler.thread.join()
-            thread_handler.raise_if_needed()
-        transport.cancel_port_forward(remote_bind_address, remote_port)
-
-
-def _forwarder(chan, sock):
-    # Bidirectionally forward data between a socket and a Paramiko channel.
-    while True:
-        read = select.select([sock, chan], [], [])[0]
-        if sock in read:
-            data = sock.recv(1024)
-            if len(data) == 0:
-                break
-            chan.send(data)
-        if chan in read:
-            data = chan.recv(1024)
-            if len(data) == 0:
-                break
-            sock.send(data)
-    chan.close()
-    sock.close()
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/plugin.py b/apache-ariatosca-0.1.1/aria/orchestrator/plugin.py
deleted file mode 100644
index 756a28e..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/plugin.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Plugin management.
-"""
-
-import os
-import tempfile
-import subprocess
-import sys
-import zipfile
-from datetime import datetime
-
-import wagon
-
-from . import exceptions
-from ..utils import process as process_utils
-
-_IS_WIN = os.name == 'nt'
-
-
-class PluginManager(object):
-
-    def __init__(self, model, plugins_dir):
-        """
-        :param plugins_dir: root directory in which to install plugins
-        """
-        self._model = model
-        self._plugins_dir = plugins_dir
-
-    def install(self, source):
-        """
-        Install a wagon plugin.
-        """
-        metadata = wagon.show(source)
-        cls = self._model.plugin.model_cls
-
-        os_props = metadata['build_server_os_properties']
-
-        plugin = cls(
-            name=metadata['package_name'],
-            archive_name=metadata['archive_name'],
-            supported_platform=metadata['supported_platform'],
-            supported_py_versions=metadata['supported_python_versions'],
-            distribution=os_props.get('distribution'),
-            distribution_release=os_props['distribution_version'],
-            distribution_version=os_props['distribution_release'],
-            package_name=metadata['package_name'],
-            package_version=metadata['package_version'],
-            package_source=metadata['package_source'],
-            wheels=metadata['wheels'],
-            uploaded_at=datetime.now()
-        )
-        if len(self._model.plugin.list(filters={'package_name': plugin.package_name,
-                                                'package_version': plugin.package_version})):
-            raise exceptions.PluginAlreadyExistsError(
-                'Plugin {0}, version {1} already exists'.format(plugin.package_name,
-                                                                plugin.package_version))
-        self._install_wagon(source=source, prefix=self.get_plugin_dir(plugin))
-        self._model.plugin.put(plugin)
-        return plugin
-
-    def load_plugin(self, plugin, env=None):
-        """
-        Load the plugin into an environment.
-
-        Loading the plugin means the plugin's code and binaries paths will be appended to the
-        environment's ``PATH`` and ``PYTHONPATH``, thereby allowing usage of the plugin.
-
-        :param plugin: plugin to load
-        :param env: environment to load the plugin into; If ``None``, :obj:`os.environ` will be
-         used
-        """
-        env = env or os.environ
-        plugin_dir = self.get_plugin_dir(plugin)
-
-        # Update PATH environment variable to include plugin's bin dir
-        bin_dir = 'Scripts' if _IS_WIN else 'bin'
-        process_utils.append_to_path(os.path.join(plugin_dir, bin_dir), env=env)
-
-        # Update PYTHONPATH environment variable to include plugin's site-packages
-        # directories
-        if _IS_WIN:
-            pythonpath_dirs = [os.path.join(plugin_dir, 'Lib', 'site-packages')]
-        else:
-            # In some linux environments, there will be both a lib and a lib64 directory
-            # with the latter, containing compiled packages.
-            pythonpath_dirs = [os.path.join(
-                plugin_dir, 'lib{0}'.format(b),
-                'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1]),
-                'site-packages') for b in ('', '64')]
-
-        process_utils.append_to_pythonpath(*pythonpath_dirs, env=env)
-
-    def get_plugin_dir(self, plugin):
-        return os.path.join(
-            self._plugins_dir,
-            '{0}-{1}'.format(plugin.package_name, plugin.package_version))
-
-    @staticmethod
-    def validate_plugin(source):
-        """
-        Validate a plugin archive.
-
-        A valid plugin is a `wagon <http://github.com/cloudify-cosmo/wagon>`__ in the zip format
-        (suffix may also be ``.wgn``).
-        """
-        if not zipfile.is_zipfile(source):
-            raise exceptions.InvalidPluginError(
-                'Archive {0} is of an unsupported type. Only '
-                'zip/wgn is allowed'.format(source))
-        with zipfile.ZipFile(source, 'r') as zip_file:
-            infos = zip_file.infolist()
-            try:
-                package_name = infos[0].filename[:infos[0].filename.index('/')]
-                package_json_path = "{0}/{1}".format(package_name, 'package.json')
-                zip_file.getinfo(package_json_path)
-            except (KeyError, ValueError, IndexError):
-                raise exceptions.InvalidPluginError(
-                    'Failed to validate plugin {0} '
-                    '(package.json was not found in archive)'.format(source))
-
-    def _install_wagon(self, source, prefix):
-        pip_freeze_output = self._pip_freeze()
-        file_descriptor, constraint_path = tempfile.mkstemp(prefix='constraint-', suffix='.txt')
-        os.close(file_descriptor)
-        try:
-            with open(constraint_path, 'wb') as constraint:
-                constraint.write(pip_freeze_output)
-            # Install the provided wagon.
-            # * The --prefix install_arg will cause the plugin to be installed under
-            #   plugins_dir/{package_name}-{package_version}, So different plugins don't step on
-            #   each other and don't interfere with the current virtualenv
-            # * The --constraint flag points a file containing the output of ``pip freeze``.
-            #   It is required, to handle cases where plugins depend on some python package with
-            #   a different version than the one installed in the current virtualenv. Without this
-            #   flag, the existing package will be **removed** from the parent virtualenv and the
-            #   new package will be installed under prefix. With the flag, the existing version will
-            #   remain, and the version requested by the plugin will be ignored.
-            wagon.install(
-                source=source,
-                install_args='--prefix="{prefix}" --constraint="{constraint}"'.format(
-                    prefix=prefix,
-                    constraint=constraint.name),
-                venv=os.environ.get('VIRTUAL_ENV'))
-        finally:
-            os.remove(constraint_path)
-
-    @staticmethod
-    def _pip_freeze():
-        """Run pip freeze in current environment and return the output"""
-        bin_dir = 'Scripts' if os.name == 'nt' else 'bin'
-        pip_path = os.path.join(sys.prefix, bin_dir,
-                                'pip{0}'.format('.exe' if os.name == 'nt' else ''))
-        pip_freeze = subprocess.Popen([pip_path, 'freeze'], stdout=subprocess.PIPE)
-        pip_freeze_output, _ = pip_freeze.communicate()
-        assert not pip_freeze.poll()
-        return pip_freeze_output
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflow_runner.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflow_runner.py
deleted file mode 100644
index df1725f..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflow_runner.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Running workflows.
-"""
-
-import os
-import sys
-from datetime import datetime
-
-from . import exceptions
-from .context.workflow import WorkflowContext
-from .workflows import builtin
-from .workflows.core import engine, graph_compiler
-from .workflows.executor.process import ProcessExecutor
-from ..modeling import models
-from ..modeling import utils as modeling_utils
-from ..utils.imports import import_fullname
-
-
-DEFAULT_TASK_MAX_ATTEMPTS = 30
-DEFAULT_TASK_RETRY_INTERVAL = 30
-
-
-class WorkflowRunner(object):
-
-    def __init__(self, model_storage, resource_storage, plugin_manager,
-                 execution_id=None, service_id=None, workflow_name=None, inputs=None, executor=None,
-                 task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
-                 task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
-        """
-        Manages a single workflow execution on a given service.
-
-        :param workflow_name: workflow name
-        :param service_id: service ID
-        :param inputs: key-value dict of inputs for the execution
-        :param model_storage: model storage API ("MAPI")
-        :param resource_storage: resource storage API ("RAPI")
-        :param plugin_manager: plugin manager
-        :param executor: executor for tasks; defaults to a
-         :class:`~aria.orchestrator.workflows.executor.process.ProcessExecutor` instance
-        :param task_max_attempts: maximum attempts of repeating each failing task
-        :param task_retry_interval: retry interval between retry attempts of a failing task
-        """
-
-        if not (execution_id or (workflow_name and service_id)):
-            exceptions.InvalidWorkflowRunnerParams(
-                "Either provide execution id in order to resume a workflow or workflow name "
-                "and service id with inputs")
-
-        self._is_resume = execution_id is not None
-
-        self._model_storage = model_storage
-        self._resource_storage = resource_storage
-
-        # the IDs are stored rather than the models themselves, so this module could be used
-        # by several threads without raising errors on model objects shared between threads
-
-        if self._is_resume:
-            self._execution_id = execution_id
-            self._service_id = self.execution.service.id
-            self._workflow_name = model_storage.execution.get(self._execution_id).workflow_name
-        else:
-            self._service_id = service_id
-            self._workflow_name = workflow_name
-            self._validate_workflow_exists_for_service()
-            self._execution_id = self._create_execution_model(inputs).id
-
-        self._workflow_context = WorkflowContext(
-            name=self.__class__.__name__,
-            model_storage=self._model_storage,
-            resource_storage=resource_storage,
-            service_id=service_id,
-            execution_id=self._execution_id,
-            workflow_name=self._workflow_name,
-            task_max_attempts=task_max_attempts,
-            task_retry_interval=task_retry_interval)
-
-        # Set default executor and kwargs
-        executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
-
-        # transforming the execution inputs to dict, to pass them to the workflow function
-        execution_inputs_dict = dict(inp.unwrapped for inp in self.execution.inputs.values())
-
-        if not self._is_resume:
-            workflow_fn = self._get_workflow_fn()
-            self._tasks_graph = workflow_fn(ctx=self._workflow_context, **execution_inputs_dict)
-            compiler = graph_compiler.GraphCompiler(self._workflow_context, executor.__class__)
-            compiler.compile(self._tasks_graph)
-
-        self._engine = engine.Engine(executors={executor.__class__: executor})
-
-    @property
-    def execution_id(self):
-        return self._execution_id
-
-    @property
-    def execution(self):
-        return self._model_storage.execution.get(self.execution_id)
-
-    @property
-    def service(self):
-        return self._model_storage.service.get(self._service_id)
-
-    def execute(self):
-        self._engine.execute(ctx=self._workflow_context, resuming=self._is_resume)
-
-    def cancel(self):
-        self._engine.cancel_execution(ctx=self._workflow_context)
-
-    def _create_execution_model(self, inputs):
-        execution = models.Execution(
-            created_at=datetime.utcnow(),
-            service=self.service,
-            workflow_name=self._workflow_name,
-            inputs={})
-
-        if self._workflow_name in builtin.BUILTIN_WORKFLOWS:
-            workflow_inputs = dict()  # built-in workflows don't have any inputs
-        else:
-            workflow_inputs = self.service.workflows[self._workflow_name].inputs
-
-        execution.inputs = modeling_utils.merge_parameter_values(inputs,
-                                                                 workflow_inputs,
-                                                                 model_cls=models.Input)
-        # TODO: these two following calls should execute atomically
-        self._validate_no_active_executions(execution)
-        self._model_storage.execution.put(execution)
-        return execution
-
-    def _validate_workflow_exists_for_service(self):
-        if self._workflow_name not in self.service.workflows and \
-                        self._workflow_name not in builtin.BUILTIN_WORKFLOWS:
-            raise exceptions.UndeclaredWorkflowError(
-                'No workflow policy {0} declared in service {1}'
-                .format(self._workflow_name, self.service.name))
-
-    def _validate_no_active_executions(self, execution):
-        active_executions = [e for e in self.service.executions if e.is_active()]
-        if active_executions:
-            raise exceptions.ActiveExecutionsError(
-                "Can't start execution; Service {0} has an active execution with ID {1}"
-                .format(self.service.name, active_executions[0].id))
-
-    def _get_workflow_fn(self):
-        if self._workflow_name in builtin.BUILTIN_WORKFLOWS:
-            return import_fullname('{0}.{1}'.format(builtin.BUILTIN_WORKFLOWS_PATH_PREFIX,
-                                                    self._workflow_name))
-
-        workflow = self.service.workflows[self._workflow_name]
-
-        # TODO: Custom workflow support needs improvement, currently this code uses internal
-        # knowledge of the resource storage; Instead, workflows should probably be loaded
-        # in a similar manner to operation plugins. Also consider passing to import_fullname
-        # as paths instead of appending to sys path.
-        service_template_resources_path = os.path.join(
-            self._resource_storage.service_template.base_path,
-            str(self.service.service_template.id))
-        sys.path.append(service_template_resources_path)
-
-        try:
-            workflow_fn = import_fullname(workflow.function)
-        except ImportError:
-            raise exceptions.WorkflowImplementationNotFoundError(
-                'Could not find workflow {0} function at {1}'.format(
-                    self._workflow_name, workflow.function))
-
-        return workflow_fn
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task.py
deleted file mode 100644
index 4c518fc..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Provides the tasks to be entered into the task graph
-"""
-
-from ... import context
-from ....modeling import models
-from ....modeling import utils as modeling_utils
-from ....utils.uuid import generate_uuid
-from .. import exceptions
-
-
-class BaseTask(object):
-    """
-    Base class for tasks.
-    """
-
-    def __init__(self, ctx=None, **kwargs):
-        if ctx is not None:
-            self._workflow_context = ctx
-        else:
-            self._workflow_context = context.workflow.current.get()
-        self._id = generate_uuid(variant='uuid')
-
-    @property
-    def id(self):
-        """
-        UUID4 ID.
-        """
-        return self._id
-
-    @property
-    def workflow_context(self):
-        """
-        Context of the current workflow.
-        """
-        return self._workflow_context
-
-
-class OperationTask(BaseTask):
-    """
-    Executes an operation.
-
-    :ivar name: formatted name (includes actor type, actor name, and interface/operation names)
-    :vartype name: basestring
-    :ivar actor: node or relationship
-    :vartype actor: :class:`~aria.modeling.models.Node` or
-     :class:`~aria.modeling.models.Relationship`
-    :ivar interface_name: interface name on actor
-    :vartype interface_name: basestring
-    :ivar operation_name: operation name on interface
-    :vartype operation_name: basestring
-    :ivar plugin: plugin (or None for default plugin)
-    :vartype plugin: :class:`~aria.modeling.models.Plugin`
-    :ivar function: path to Python function
-    :vartype function: basestring
-    :ivar arguments: arguments to send to Python function
-    :vartype arguments: {:obj:`basestring`: :class:`~aria.modeling.models.Argument`}
-    :ivar ignore_failure: whether to ignore failures
-    :vartype ignore_failure: bool
-    :ivar max_attempts: maximum number of attempts allowed in case of failure
-    :vartype max_attempts: int
-    :ivar retry_interval: interval between retries (in seconds)
-    :vartype retry_interval: float
-    """
-
-    NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
-
-    def __init__(self,
-                 actor,
-                 interface_name,
-                 operation_name,
-                 arguments=None,
-                 ignore_failure=None,
-                 max_attempts=None,
-                 retry_interval=None):
-        """
-        :param actor: node or relationship
-        :type actor: :class:`~aria.modeling.models.Node` or
-         :class:`~aria.modeling.models.Relationship`
-        :param interface_name: interface name on actor
-        :type interface_name: basestring
-        :param operation_name: operation name on interface
-        :type operation_name: basestring
-        :param arguments: override argument values
-        :type arguments: {:obj:`basestring`: object}
-        :param ignore_failure: override whether to ignore failures
-        :type ignore_failure: bool
-        :param max_attempts: override maximum number of attempts allowed in case of failure
-        :type max_attempts: int
-        :param retry_interval: override interval between retries (in seconds)
-        :type retry_interval: float
-        :raises ~aria.orchestrator.workflows.exceptions.OperationNotFoundException: if
-         ``interface_name`` and ``operation_name`` do not refer to an operation on the actor
-        """
-
-        # Creating OperationTask directly should raise an error when there is no
-        # interface/operation.
-        if not has_operation(actor, interface_name, operation_name):
-            raise exceptions.OperationNotFoundException(
-                'Could not find operation "{operation_name}" on interface '
-                '"{interface_name}" for {actor_type} "{actor.name}"'.format(
-                    operation_name=operation_name,
-                    interface_name=interface_name,
-                    actor_type=type(actor).__name__.lower(),
-                    actor=actor)
-            )
-
-        super(OperationTask, self).__init__()
-
-        self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
-                                                     name=actor.name,
-                                                     interface=interface_name,
-                                                     operation=operation_name)
-        self.actor = actor
-        self.interface_name = interface_name
-        self.operation_name = operation_name
-        self.ignore_failure = \
-            self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
-        self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
-        self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
-
-        operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
-        self.plugin = operation.plugin
-        self.function = operation.function
-        self.arguments = modeling_utils.merge_parameter_values(arguments,
-                                                               operation.arguments,
-                                                               model_cls=models.Argument)
-        if getattr(self.actor, 'outbound_relationships', None) is not None:
-            self._context_cls = context.operation.NodeOperationContext
-        elif getattr(self.actor, 'source_node', None) is not None:
-            self._context_cls = context.operation.RelationshipOperationContext
-        else:
-            raise exceptions.TaskCreationException('Could not locate valid context for '
-                                                   '{actor.__class__}'.format(actor=self.actor))
-
-    def __repr__(self):
-        return self.name
-
-
-class StubTask(BaseTask):
-    """
-    Enables creating empty tasks.
-    """
-
-
-class WorkflowTask(BaseTask):
-    """
-    Executes a complete workflow.
-    """
-
-    def __init__(self, workflow_func, **kwargs):
-        """
-        :param workflow_func: function to run
-        :param kwargs: kwargs that would be passed to the workflow_func
-        """
-        super(WorkflowTask, self).__init__(**kwargs)
-        kwargs['ctx'] = self.workflow_context
-        self._graph = workflow_func(**kwargs)
-
-    @property
-    def graph(self):
-        """
-        Graph constructed by the sub workflow.
-        """
-        return self._graph
-
-    def __getattr__(self, item):
-        try:
-            return getattr(self._graph, item)
-        except AttributeError:
-            return super(WorkflowTask, self).__getattribute__(item)
-
-
-def create_task(actor, interface_name, operation_name, **kwargs):
-    """
-    Helper function that enables safe creation of :class:`OperationTask`. If the supplied interface
-    or operation do not exist, ``None`` is returned.
-
-    :param actor: actor for this task
-    :param interface_name: name of the interface
-    :param operation_name: name of the operation
-    :param kwargs: any additional kwargs to be passed to the OperationTask
-    :return: OperationTask or None (if the interface/operation does not exists)
-    """
-    try:
-        return OperationTask(
-            actor,
-            interface_name=interface_name,
-            operation_name=operation_name,
-            **kwargs
-        )
-    except exceptions.OperationNotFoundException:
-        return None
-
-
-def create_relationships_tasks(
-        node, interface_name, source_operation_name=None, target_operation_name=None, **kwargs):
-    """
-    Creates a relationship task (source and target) for all of a node relationships.
-
-    :param basestring source_operation_name: relationship operation name
-    :param basestring interface_name: name of the interface
-    :param source_operation_name:
-    :param target_operation_name:
-    :param node: source node
-    """
-    sub_tasks = []
-    for relationship in node.outbound_relationships:
-        relationship_operations = create_relationship_tasks(
-            relationship,
-            interface_name,
-            source_operation_name=source_operation_name,
-            target_operation_name=target_operation_name,
-            **kwargs)
-        sub_tasks.append(relationship_operations)
-    return sub_tasks
-
-
-def create_relationship_tasks(relationship, interface_name, source_operation_name=None,
-                              target_operation_name=None, **kwargs):
-    """
-    Creates a relationship task (source and target).
-
-    :param relationship: relationship instance itself
-    :param source_operation_name:
-    :param target_operation_name:
-    """
-    operations = []
-    if source_operation_name:
-        operations.append(
-            create_task(
-                relationship,
-                interface_name=interface_name,
-                operation_name=source_operation_name,
-                **kwargs
-            )
-        )
-    if target_operation_name:
-        operations.append(
-            create_task(
-                relationship,
-                interface_name=interface_name,
-                operation_name=target_operation_name,
-                **kwargs
-            )
-        )
-
-    return [o for o in operations if o]
-
-
-def has_operation(actor, interface_name, operation_name):
-    interface = actor.interfaces.get(interface_name, None)
-    return interface and interface.operations.get(operation_name, False)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task_graph.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task_graph.py
deleted file mode 100644
index 900a0d1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/api/task_graph.py
+++ /dev/null
@@ -1,295 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Task graph.
-"""
-
-from collections import Iterable
-
-from networkx import DiGraph, topological_sort
-
-from ....utils.uuid import generate_uuid
-from . import task as api_task
-
-
-class TaskNotInGraphError(Exception):
-    """
-    An error representing a scenario where a given task is not in the graph as expected.
-    """
-    pass
-
-
-def _filter_out_empty_tasks(func=None):
-    if func is None:
-        return lambda f: _filter_out_empty_tasks(func=f)
-
-    def _wrapper(task, *tasks, **kwargs):
-        return func(*(t for t in (task,) + tuple(tasks) if t), **kwargs)
-    return _wrapper
-
-
-class TaskGraph(object):
-    """
-    Task graph builder.
-    """
-
-    def __init__(self, name):
-        self.name = name
-        self._id = generate_uuid(variant='uuid')
-        self._graph = DiGraph()
-
-    def __repr__(self):
-        return '{name}(id={self._id}, name={self.name}, graph={self._graph!r})'.format(
-            name=self.__class__.__name__, self=self)
-
-    @property
-    def id(self):
-        """
-        ID of the graph
-        """
-        return self._id
-
-    # graph traversal methods
-
-    @property
-    def tasks(self):
-        """
-        Iterator over tasks in the graph.
-        """
-        for _, data in self._graph.nodes_iter(data=True):
-            yield data['task']
-
-    def topological_order(self, reverse=False):
-        """
-        Topological sort of the graph.
-
-        :param reverse: whether to reverse the sort
-        :return: list which represents the topological sort
-        """
-        for task_id in topological_sort(self._graph, reverse=reverse):
-            yield self.get_task(task_id)
-
-    def get_dependencies(self, dependent_task):
-        """
-        Iterates over the task's dependencies.
-
-        :param dependent_task: task whose dependencies are requested
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if
-         ``dependent_task`` is not in the graph
-        """
-        if not self.has_tasks(dependent_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependent_task.id))
-        for _, dependency_id in self._graph.out_edges_iter(dependent_task.id):
-            yield self.get_task(dependency_id)
-
-    def get_dependents(self, dependency_task):
-        """
-        Iterates over the task's dependents.
-
-        :param dependency_task: task whose dependents are requested
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if
-         ``dependency_task`` is not in the graph
-        """
-        if not self.has_tasks(dependency_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependency_task.id))
-        for dependent_id, _ in self._graph.in_edges_iter(dependency_task.id):
-            yield self.get_task(dependent_id)
-
-    # task methods
-
-    def get_task(self, task_id):
-        """
-        Get a task instance that's been inserted to the graph by the task's ID.
-
-        :param basestring task_id: task ID
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if no task found in
-         the graph with the given ID
-        """
-        if not self._graph.has_node(task_id):
-            raise TaskNotInGraphError('Task id: {0}'.format(task_id))
-        data = self._graph.node[task_id]
-        return data['task']
-
-    @_filter_out_empty_tasks
-    def add_tasks(self, *tasks):
-        """
-        Adds a task to the graph.
-
-        :param task: task
-        :return: list of added tasks
-        :rtype: list
-        """
-        assert all([isinstance(task, (api_task.BaseTask, Iterable)) for task in tasks])
-        return_tasks = []
-
-        for task in tasks:
-            if isinstance(task, Iterable):
-                return_tasks += self.add_tasks(*task)
-            elif not self.has_tasks(task):
-                self._graph.add_node(task.id, task=task)
-                return_tasks.append(task)
-
-        return return_tasks
-
-    @_filter_out_empty_tasks
-    def remove_tasks(self, *tasks):
-        """
-        Removes the provided task from the graph.
-
-        :param task: task
-        :return: list of removed tasks
-        :rtype: list
-        """
-        return_tasks = []
-
-        for task in tasks:
-            if isinstance(task, Iterable):
-                return_tasks += self.remove_tasks(*task)
-            elif self.has_tasks(task):
-                self._graph.remove_node(task.id)
-                return_tasks.append(task)
-
-        return return_tasks
-
-    @_filter_out_empty_tasks
-    def has_tasks(self, *tasks):
-        """
-        Checks whether a task is in the graph.
-
-        :param task: task
-        :return: ``True`` if all tasks are in the graph, otherwise ``False``
-        :rtype: list
-        """
-        assert all(isinstance(t, (api_task.BaseTask, Iterable)) for t in tasks)
-        return_value = True
-
-        for task in tasks:
-            if isinstance(task, Iterable):
-                return_value &= self.has_tasks(*task)
-            else:
-                return_value &= self._graph.has_node(task.id)
-
-        return return_value
-
-    def add_dependency(self, dependent, dependency):
-        """
-        Adds a dependency for one item (task, sequence or parallel) on another.
-
-        The dependent will only be executed after the dependency terminates. If either of the items
-        is either a sequence or a parallel, multiple dependencies may be added.
-
-        :param dependent: dependent (task, sequence or parallel)
-        :param dependency: dependency (task, sequence or parallel)
-        :return: ``True`` if the dependency between the two hadn't already existed, otherwise
-         ``False``
-        :rtype: bool
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if either the
-         dependent or dependency are tasks which are not in the graph
-        """
-        if not (self.has_tasks(dependent) and self.has_tasks(dependency)):
-            raise TaskNotInGraphError()
-
-        if self.has_dependency(dependent, dependency):
-            return
-
-        if isinstance(dependent, Iterable):
-            for dependent_task in dependent:
-                self.add_dependency(dependent_task, dependency)
-        else:
-            if isinstance(dependency, Iterable):
-                for dependency_task in dependency:
-                    self.add_dependency(dependent, dependency_task)
-            else:
-                self._graph.add_edge(dependent.id, dependency.id)
-
-    def has_dependency(self, dependent, dependency):
-        """
-        Checks whether one item (task, sequence or parallel) depends on another.
-
-        Note that if either of the items is either a sequence or a parallel, and some of the
-        dependencies exist in the graph but not all of them, this method will return ``False``.
-
-        :param dependent: dependent (task, sequence or parallel)
-        :param dependency: dependency (task, sequence or parallel)
-        :return: ``True`` if the dependency between the two exists, otherwise ``False``
-        :rtype: bool
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if either the
-         dependent or dependency are tasks which are not in the graph
-        """
-        if not (dependent and dependency):
-            return False
-        elif not (self.has_tasks(dependent) and self.has_tasks(dependency)):
-            raise TaskNotInGraphError()
-
-        return_value = True
-
-        if isinstance(dependent, Iterable):
-            for dependent_task in dependent:
-                return_value &= self.has_dependency(dependent_task, dependency)
-        else:
-            if isinstance(dependency, Iterable):
-                for dependency_task in dependency:
-                    return_value &= self.has_dependency(dependent, dependency_task)
-            else:
-                return_value &= self._graph.has_edge(dependent.id, dependency.id)
-
-        return return_value
-
-    def remove_dependency(self, dependent, dependency):
-        """
-        Removes a dependency for one item (task, sequence or parallel) on another.
-
-        Note that if either of the items is either a sequence or a parallel, and some of the
-        dependencies exist in the graph but not all of them, this method will not remove any of the
-        dependencies and return ``False``.
-
-        :param dependent: dependent (task, sequence or parallel)
-        :param dependency: dependency (task, sequence or parallel)
-        :return: ``False`` if the dependency between the two hadn't existed, otherwise ``True``
-        :rtype: bool
-        :raises ~aria.orchestrator.workflows.api.task_graph.TaskNotInGraphError: if either the
-         dependent or dependency are tasks which are not in the graph
-        """
-        if not (self.has_tasks(dependent) and self.has_tasks(dependency)):
-            raise TaskNotInGraphError()
-
-        if not self.has_dependency(dependent, dependency):
-            return
-
-        if isinstance(dependent, Iterable):
-            for dependent_task in dependent:
-                self.remove_dependency(dependent_task, dependency)
-        elif isinstance(dependency, Iterable):
-            for dependency_task in dependency:
-                self.remove_dependency(dependent, dependency_task)
-        else:
-            self._graph.remove_edge(dependent.id, dependency.id)
-
-    @_filter_out_empty_tasks
-    def sequence(self, *tasks):
-        """
-        Creates and inserts a sequence into the graph, effectively each task i depends on i-1.
-
-        :param tasks: iterable of dependencies
-        :return: provided tasks
-        """
-        if tasks:
-            self.add_tasks(*tasks)
-
-            for i in xrange(1, len(tasks)):
-                self.add_dependency(tasks[i], tasks[i-1])
-
-        return tasks
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/execute_operation.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/execute_operation.py
deleted file mode 100644
index 949f864..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/execute_operation.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Built-in operation execution Workflow.
-"""
-
-from ... import workflow
-from ..api import task
-
-
-@workflow
-def execute_operation(
-        ctx,
-        graph,
-        interface_name,
-        operation_name,
-        operation_kwargs,
-        run_by_dependency_order,
-        type_names,
-        node_template_ids,
-        node_ids,
-        **kwargs):
-    """
-    Built-in operation execution Workflow.
-
-    :param workflow_context: workflow context
-    :param graph: graph which will describe the workflow
-    :param operation: operation name to execute
-    :param operation_kwargs:
-    :param run_by_dependency_order:
-    :param type_names:
-    :param node_template_ids:
-    :param node_ids:
-    :param kwargs:
-    :return:
-    """
-    subgraphs = {}
-    # filtering node instances
-    filtered_nodes = list(_filter_nodes(
-        context=ctx,
-        node_template_ids=node_template_ids,
-        node_ids=node_ids,
-        type_names=type_names))
-
-    if run_by_dependency_order:
-        filtered_node_ids = set(node_instance.id for node_instance in filtered_nodes)
-        for node in ctx.nodes:
-            if node.id not in filtered_node_ids:
-                subgraphs[node.id] = ctx.task_graph(
-                    name='execute_operation_stub_{0}'.format(node.id))
-
-    # registering actual tasks to sequences
-    for node in filtered_nodes:
-        graph.add_tasks(
-            task.OperationTask(
-                node,
-                interface_name=interface_name,
-                operation_name=operation_name,
-                arguments=operation_kwargs
-            )
-        )
-
-    for _, node_sub_workflow in subgraphs.items():
-        graph.add_tasks(node_sub_workflow)
-
-    # adding tasks dependencies if required
-    if run_by_dependency_order:
-        for node in ctx.nodes:
-            for relationship in node.relationships:
-                graph.add_dependency(
-                    source_task=subgraphs[node.id], after=[subgraphs[relationship.target_id]])
-
-
-def _filter_nodes(context, node_template_ids=(), node_ids=(), type_names=()):
-    def _is_node_template_by_id(node_template_id):
-        return not node_template_ids or node_template_id in node_template_ids
-
-    def _is_node_by_id(node_id):
-        return not node_ids or node_id in node_ids
-
-    def _is_node_by_type(node_type):
-        return not node_type.name in type_names
-
-    for node in context.nodes:
-        if all((_is_node_template_by_id(node.node_template.id),
-                _is_node_by_id(node.id),
-                _is_node_by_type(node.node_template.type))):
-            yield node
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/heal.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/heal.py
deleted file mode 100644
index 07e27b1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/heal.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# pylint: skip-file
-
-"""
-Built-in heal workflow.
-"""
-
-from aria import workflow
-
-from .workflows import (install_node, uninstall_node)
-from ..api import task
-
-
-@workflow
-def heal(ctx, graph, node_id):
-    """
-    Built-in heal workflow..
-
-    :param ctx: workflow context
-    :param graph: graph which will describe the workflow.
-    :param node_id: ID of the node to heal
-    :return:
-    """
-    failing_node = ctx.model.node.get(node_id)
-    host_node = ctx.model.node.get(failing_node.host.id)
-    failed_node_subgraph = _get_contained_subgraph(ctx, host_node)
-    failed_node_ids = list(n.id for n in failed_node_subgraph)
-
-    targeted_nodes = [node for node in ctx.nodes
-                               if node.id not in failed_node_ids]
-
-    uninstall_subgraph = task.WorkflowTask(
-        heal_uninstall,
-        failing_nodes=failed_node_subgraph,
-        targeted_nodes=targeted_nodes
-    )
-
-    install_subgraph = task.WorkflowTask(
-        heal_install,
-        failing_nodes=failed_node_subgraph,
-        targeted_nodes=targeted_nodes)
-
-    graph.sequence(uninstall_subgraph, install_subgraph)
-
-
-@workflow(suffix_template='{failing_nodes}')
-def heal_uninstall(ctx, graph, failing_nodes, targeted_nodes):
-    """
-    Uninstall phase of the heal mechanism.
-
-    :param ctx: workflow context
-    :param graph: task graph to edit
-    :param failing_nodes: failing nodes to heal
-    :param targeted_nodes: targets of the relationships where the failing node are
-    """
-    node_sub_workflows = {}
-
-    # Create install stub workflow for each unaffected node
-    for node in targeted_nodes:
-        node_stub = task.StubTask()
-        node_sub_workflows[node.id] = node_stub
-        graph.add_tasks(node_stub)
-
-    # create install sub workflow for every node
-    for node in failing_nodes:
-        node_sub_workflow = task.WorkflowTask(uninstall_node,
-                                                       node=node)
-        node_sub_workflows[node.id] = node_sub_workflow
-        graph.add_tasks(node_sub_workflow)
-
-    # create dependencies between the node sub workflow
-    for node in failing_nodes:
-        node_sub_workflow = node_sub_workflows[node.id]
-        for relationship in reversed(node.outbound_relationships):
-            graph.add_dependency(
-                node_sub_workflows[relationship.target_node.id],
-                node_sub_workflow)
-
-    # Add operations for intact nodes depending on a node belonging to nodes
-    for node in targeted_nodes:
-        node_sub_workflow = node_sub_workflows[node.id]
-
-        for relationship in reversed(node.outbound_relationships):
-
-            target_node = \
-                ctx.model.node.get(relationship.target_node.id)
-            target_node_subgraph = node_sub_workflows[target_node.id]
-            graph.add_dependency(target_node_subgraph, node_sub_workflow)
-
-            if target_node in failing_nodes:
-                dependency = task.create_relationship_tasks(
-                    relationship=relationship,
-                    operation_name='aria.interfaces.relationship_lifecycle.unlink')
-                graph.add_tasks(*dependency)
-                graph.add_dependency(node_sub_workflow, dependency)
-
-
-@workflow(suffix_template='{failing_nodes}')
-def heal_install(ctx, graph, failing_nodes, targeted_nodes):
-    """
-    Install phase of the heal mechanism.
-
-    :param ctx: workflow context
-    :param graph: task graph to edit.
-    :param failing_nodes: failing nodes to heal
-    :param targeted_nodes: targets of the relationships where the failing node are
-    """
-    node_sub_workflows = {}
-
-    # Create install sub workflow for each unaffected
-    for node in targeted_nodes:
-        node_stub = task.StubTask()
-        node_sub_workflows[node.id] = node_stub
-        graph.add_tasks(node_stub)
-
-    # create install sub workflow for every node
-    for node in failing_nodes:
-        node_sub_workflow = task.WorkflowTask(install_node,
-                                                       node=node)
-        node_sub_workflows[node.id] = node_sub_workflow
-        graph.add_tasks(node_sub_workflow)
-
-    # create dependencies between the node sub workflow
-    for node in failing_nodes:
-        node_sub_workflow = node_sub_workflows[node.id]
-        if node.outbound_relationships:
-            dependencies = \
-                [node_sub_workflows[relationship.target_node.id]
-                 for relationship in node.outbound_relationships]
-            graph.add_dependency(node_sub_workflow, dependencies)
-
-    # Add operations for intact nodes depending on a node
-    # belonging to nodes
-    for node in targeted_nodes:
-        node_sub_workflow = node_sub_workflows[node.id]
-
-        for relationship in node.outbound_relationships:
-            target_node = ctx.model.node.get(
-                relationship.target_node.id)
-            target_node_subworkflow = node_sub_workflows[target_node.id]
-            graph.add_dependency(node_sub_workflow, target_node_subworkflow)
-
-            if target_node in failing_nodes:
-                dependent = task.create_relationship_tasks(
-                    relationship=relationship,
-                    operation_name='aria.interfaces.relationship_lifecycle.establish')
-                graph.add_tasks(*dependent)
-                graph.add_dependency(dependent, node_sub_workflow)
-
-
-def _get_contained_subgraph(context, host_node):
-    contained_instances = [node
-                           for node in context.nodes
-                           if node.host_fk == host_node.id and
-                           node.host_fk != node.id]
-    result = [host_node]
-
-    if not contained_instances:
-        return result
-
-    result.extend(contained_instances)
-    for node in contained_instances:
-        result.extend(_get_contained_subgraph(context, node))
-
-    return set(result)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/install.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/install.py
deleted file mode 100644
index 1e7c531..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/install.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Built-in install workflow.
-"""
-
-from ... import workflow
-from ..api import task as api_task
-from . import workflows
-
-
-@workflow
-def install(ctx, graph):
-    """
-    Built-in install workflow.
-    """
-    tasks_and_nodes = []
-    for node in ctx.nodes:
-        tasks_and_nodes.append((api_task.WorkflowTask(workflows.install_node, node=node), node))
-    graph.add_tasks([task for task, _ in tasks_and_nodes])
-    workflows.create_node_task_dependencies(graph, tasks_and_nodes)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/start.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/start.py
deleted file mode 100644
index c02a26d..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/start.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Built-in start workflow.
-"""
-
-from .workflows import start_node
-from ... import workflow
-from ..api import task as api_task
-
-
-@workflow
-def start(ctx, graph):
-    """
-    Built-in start workflow.
-    """
-    for node in ctx.model.node.iter():
-        graph.add_tasks(api_task.WorkflowTask(start_node, node=node))
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/stop.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/stop.py
deleted file mode 100644
index 6f9930b..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/stop.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Built-in stop workflow.
-"""
-
-from .workflows import stop_node
-from ..api import task as api_task
-from ... import workflow
-
-
-@workflow
-def stop(ctx, graph):
-    """
-    Built-in stop workflow.
-    """
-    for node in ctx.model.node.iter():
-        graph.add_tasks(api_task.WorkflowTask(stop_node, node=node))
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/uninstall.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/uninstall.py
deleted file mode 100644
index 7925f4b..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/uninstall.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Built-in uninstall workflow.
-"""
-
-from ... import workflow
-from ..api import task as api_task
-from . import workflows
-
-
-@workflow
-def uninstall(ctx, graph):
-    """
-    Built-in uninstall workflow.
-    """
-    tasks_and_nodes = []
-    for node in ctx.nodes:
-        tasks_and_nodes.append((api_task.WorkflowTask(workflows.uninstall_node, node=node), node))
-    graph.add_tasks([task for task, _ in tasks_and_nodes])
-    workflows.create_node_task_dependencies(graph, tasks_and_nodes, reverse=True)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/workflows.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/workflows.py
deleted file mode 100644
index b286e98..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/builtin/workflows.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-TSOCA normative lifecycle workflows.
-"""
-
-from ... import workflow
-from ..api import task
-
-
-NORMATIVE_STANDARD_INTERFACE = 'Standard' # 'tosca.interfaces.node.lifecycle.Standard'
-NORMATIVE_CONFIGURE_INTERFACE = 'Configure' # 'tosca.interfaces.relationship.Configure'
-
-NORMATIVE_CREATE = 'create'
-NORMATIVE_CONFIGURE = 'configure'
-NORMATIVE_START = 'start'
-NORMATIVE_STOP = 'stop'
-NORMATIVE_DELETE = 'delete'
-
-NORMATIVE_PRE_CONFIGURE_SOURCE = 'pre_configure_source'
-NORMATIVE_PRE_CONFIGURE_TARGET = 'pre_configure_target'
-NORMATIVE_POST_CONFIGURE_SOURCE = 'post_configure_source'
-NORMATIVE_POST_CONFIGURE_TARGET = 'post_configure_target'
-
-NORMATIVE_ADD_SOURCE = 'add_source'
-NORMATIVE_ADD_TARGET = 'add_target'
-NORMATIVE_REMOVE_TARGET = 'remove_target'
-NORMATIVE_REMOVE_SOURCE = 'remove_source'
-NORMATIVE_TARGET_CHANGED = 'target_changed'
-
-
-__all__ = (
-    'NORMATIVE_STANDARD_INTERFACE',
-    'NORMATIVE_CONFIGURE_INTERFACE',
-    'NORMATIVE_CREATE',
-    'NORMATIVE_START',
-    'NORMATIVE_STOP',
-    'NORMATIVE_DELETE',
-    'NORMATIVE_CONFIGURE',
-    'NORMATIVE_PRE_CONFIGURE_SOURCE',
-    'NORMATIVE_PRE_CONFIGURE_TARGET',
-    'NORMATIVE_POST_CONFIGURE_SOURCE',
-    'NORMATIVE_POST_CONFIGURE_TARGET',
-    'NORMATIVE_ADD_SOURCE',
-    'NORMATIVE_ADD_TARGET',
-    'NORMATIVE_REMOVE_SOURCE',
-    'NORMATIVE_REMOVE_TARGET',
-    'NORMATIVE_TARGET_CHANGED',
-    'install_node',
-    'uninstall_node',
-    'start_node',
-    'stop_node',
-)
-
-
-@workflow(suffix_template='{node.name}')
-def install_node(graph, node, **kwargs):
-    # Create
-    sequence = [task.create_task(node, NORMATIVE_STANDARD_INTERFACE, NORMATIVE_CREATE)]
-
-    # Configure
-    sequence += task.create_relationships_tasks(node,
-                                                NORMATIVE_CONFIGURE_INTERFACE,
-                                                NORMATIVE_PRE_CONFIGURE_SOURCE,
-                                                NORMATIVE_PRE_CONFIGURE_TARGET)
-    sequence.append(task.create_task(node, NORMATIVE_STANDARD_INTERFACE, NORMATIVE_CONFIGURE))
-    sequence += task.create_relationships_tasks(node,
-                                                NORMATIVE_CONFIGURE_INTERFACE,
-                                                NORMATIVE_POST_CONFIGURE_SOURCE,
-                                                NORMATIVE_POST_CONFIGURE_TARGET)
-    # Start
-    sequence += _create_start_tasks(node)
-
-    graph.sequence(*sequence)
-
-
-@workflow(suffix_template='{node.name}')
-def uninstall_node(graph, node, **kwargs):
-    # Stop
-    sequence = _create_stop_tasks(node)
-
-    # Delete
-    sequence.append(task.create_task(node, NORMATIVE_STANDARD_INTERFACE, NORMATIVE_DELETE))
-
-    graph.sequence(*sequence)
-
-
-@workflow(suffix_template='{node.name}')
-def start_node(graph, node, **kwargs):
-    graph.sequence(*_create_start_tasks(node))
-
-
-@workflow(suffix_template='{node.name}')
-def stop_node(graph, node, **kwargs):
-    graph.sequence(*_create_stop_tasks(node))
-
-
-def _create_start_tasks(node):
-    sequence = [task.create_task(node, NORMATIVE_STANDARD_INTERFACE, NORMATIVE_START)]
-    sequence += task.create_relationships_tasks(node,
-                                                NORMATIVE_CONFIGURE_INTERFACE,
-                                                NORMATIVE_ADD_SOURCE, NORMATIVE_ADD_TARGET)
-    return sequence
-
-
-def _create_stop_tasks(node):
-    sequence = [task.create_task(node, NORMATIVE_STANDARD_INTERFACE, NORMATIVE_STOP)]
-    sequence += task.create_relationships_tasks(node,
-                                                NORMATIVE_CONFIGURE_INTERFACE,
-                                                NORMATIVE_REMOVE_SOURCE, NORMATIVE_REMOVE_TARGET)
-    return sequence
-
-
-def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
-    """
-    Creates dependencies between tasks if there is a relationship (outbound) between their nodes.
-    """
-
-    def get_task(node_name):
-        for api_task, task_node in tasks_and_nodes:
-            if task_node.name == node_name:
-                return api_task
-        return None
-
-    for api_task, node in tasks_and_nodes:
-        dependencies = []
-        for relationship in node.outbound_relationships:
-            dependency = get_task(relationship.target_node.name)
-            if dependency:
-                dependencies.append(dependency)
-        if dependencies:
-            if reverse:
-                for dependency in dependencies:
-                    graph.add_dependency(dependency, api_task)
-            else:
-                graph.add_dependency(api_task, dependencies)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/engine.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/engine.py
deleted file mode 100644
index d9c77e9..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/engine.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow execution.
-"""
-
-import time
-from datetime import datetime
-
-from aria import logger
-from aria.modeling import models
-from aria.orchestrator import events
-from aria.orchestrator.context import operation
-
-from .. import exceptions
-from ..executor.base import StubTaskExecutor
-# Import required so all signals are registered
-from . import events_handler  # pylint: disable=unused-import
-
-
-class Engine(logger.LoggerMixin):
-    """
-    Executes workflows.
-    """
-
-    def __init__(self, executors, **kwargs):
-        super(Engine, self).__init__(**kwargs)
-        self._executors = executors.copy()
-        self._executors.setdefault(StubTaskExecutor, StubTaskExecutor())
-
-    def execute(self, ctx, resuming=False):
-        """
-        Executes the workflow.
-        """
-        if resuming:
-            events.on_resume_workflow_signal.send(ctx)
-
-        tasks_tracker = _TasksTracker(ctx)
-        try:
-            events.start_workflow_signal.send(ctx)
-            while True:
-                cancel = self._is_cancel(ctx)
-                if cancel:
-                    break
-                for task in tasks_tracker.ended_tasks:
-                    self._handle_ended_tasks(task)
-                    tasks_tracker.finished(task)
-                for task in tasks_tracker.executable_tasks:
-                    tasks_tracker.executing(task)
-                    self._handle_executable_task(ctx, task)
-                if tasks_tracker.all_tasks_consumed:
-                    break
-                else:
-                    time.sleep(0.1)
-            if cancel:
-                self._terminate_tasks(tasks_tracker.executing_tasks)
-                events.on_cancelled_workflow_signal.send(ctx)
-            else:
-                events.on_success_workflow_signal.send(ctx)
-        except BaseException as e:
-            # Cleanup any remaining tasks
-            self._terminate_tasks(tasks_tracker.executing_tasks)
-            events.on_failure_workflow_signal.send(ctx, exception=e)
-            raise
-
-    def _terminate_tasks(self, tasks):
-        for task in tasks:
-            try:
-                self._executors[task._executor].terminate(task.id)
-            except BaseException:
-                pass
-
-    @staticmethod
-    def cancel_execution(ctx):
-        """
-        Send a cancel request to the engine. If execution already started, execution status
-        will be modified to ``cancelling`` status. If execution is in pending mode, execution status
-        will be modified to ``cancelled`` directly.
-        """
-        events.on_cancelling_workflow_signal.send(ctx)
-
-    @staticmethod
-    def _is_cancel(ctx):
-        execution = ctx.model.execution.refresh(ctx.execution)
-        return execution.status in (models.Execution.CANCELLING, models.Execution.CANCELLED)
-
-    def _handle_executable_task(self, ctx, task):
-        task_executor = self._executors[task._executor]
-
-        # If the task is a stub, a default context is provided, else it should hold the context cls
-        context_cls = operation.BaseOperationContext if task._stub_type else task._context_cls
-        op_ctx = context_cls(
-            model_storage=ctx.model,
-            resource_storage=ctx.resource,
-            workdir=ctx._workdir,
-            task_id=task.id,
-            actor_id=task.actor.id if task.actor else None,
-            service_id=task.execution.service.id,
-            execution_id=task.execution.id,
-            name=task.name
-        )
-
-        if not task._stub_type:
-            events.sent_task_signal.send(op_ctx)
-        task_executor.execute(op_ctx)
-
-    @staticmethod
-    def _handle_ended_tasks(task):
-        if task.status == models.Task.FAILED and not task.ignore_failure:
-            raise exceptions.ExecutorException('Workflow failed')
-
-
-class _TasksTracker(object):
-    def __init__(self, ctx):
-        self._ctx = ctx
-        self._tasks = ctx.execution.tasks
-        self._executed_tasks = [task for task in self._tasks if task.has_ended()]
-        self._executable_tasks = list(set(self._tasks) - set(self._executed_tasks))
-        self._executing_tasks = []
-
-    @property
-    def all_tasks_consumed(self):
-        return len(self._executed_tasks) == len(self._tasks) and len(self._executing_tasks) == 0
-
-    def executing(self, task):
-        # Task executing could be retrying (thus removed and added earlier)
-        if task not in self._executing_tasks:
-            self._executable_tasks.remove(task)
-            self._executing_tasks.append(task)
-
-    def finished(self, task):
-        self._executing_tasks.remove(task)
-        self._executed_tasks.append(task)
-
-    @property
-    def ended_tasks(self):
-        for task in self.executing_tasks:
-            if task.has_ended():
-                yield task
-
-    @property
-    def executable_tasks(self):
-        now = datetime.utcnow()
-        # we need both lists since retrying task are in the executing task list.
-        for task in self._update_tasks(self._executing_tasks + self._executable_tasks):
-            if all([task.is_waiting(),
-                    task.due_at <= now,
-                    all(dependency in self._executed_tasks for dependency in task.dependencies)
-                   ]):
-                yield task
-
-    @property
-    def executing_tasks(self):
-        for task in self._update_tasks(self._executing_tasks):
-            yield task
-
-    @property
-    def executed_tasks(self):
-        for task in self._update_tasks(self._executed_tasks):
-            yield task
-
-    @property
-    def tasks(self):
-        for task in self._update_tasks(self._tasks):
-            yield task
-
-    def _update_tasks(self, tasks):
-        for task in tasks:
-            yield self._ctx.model.task.refresh(task)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/events_handler.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/events_handler.py
deleted file mode 100644
index 37801de..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/events_handler.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow event handling.
-"""
-
-from datetime import (
-    datetime,
-    timedelta,
-)
-
-from ... import events
-from ... import exceptions
-
-
-@events.sent_task_signal.connect
-def _task_sent(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.status = ctx.task.SENT
-
-
-@events.start_task_signal.connect
-def _task_started(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.started_at = datetime.utcnow()
-        ctx.task.status = ctx.task.STARTED
-        _update_node_state_if_necessary(ctx, is_transitional=True)
-
-
-@events.on_failure_task_signal.connect
-def _task_failed(ctx, exception, *args, **kwargs):
-    with ctx.persist_changes:
-        should_retry = all([
-            not isinstance(exception, exceptions.TaskAbortException),
-            ctx.task.attempts_count < ctx.task.max_attempts or
-            ctx.task.max_attempts == ctx.task.INFINITE_RETRIES,
-            # ignore_failure check here means the task will not be retried and it will be marked
-            # as failed. The engine will also look at ignore_failure so it won't fail the
-            # workflow.
-            not ctx.task.ignore_failure
-        ])
-        if should_retry:
-            retry_interval = None
-            if isinstance(exception, exceptions.TaskRetryException):
-                retry_interval = exception.retry_interval
-            if retry_interval is None:
-                retry_interval = ctx.task.retry_interval
-            ctx.task.status = ctx.task.RETRYING
-            ctx.task.attempts_count += 1
-            ctx.task.due_at = datetime.utcnow() + timedelta(seconds=retry_interval)
-        else:
-            ctx.task.ended_at = datetime.utcnow()
-            ctx.task.status = ctx.task.FAILED
-
-
-@events.on_success_task_signal.connect
-def _task_succeeded(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.ended_at = datetime.utcnow()
-        ctx.task.status = ctx.task.SUCCESS
-
-        _update_node_state_if_necessary(ctx)
-
-
-@events.start_workflow_signal.connect
-def _workflow_started(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        # the execution may already be in the process of cancelling
-        if execution.status in (execution.CANCELLING, execution.CANCELLED):
-            return
-        execution.status = execution.STARTED
-        execution.started_at = datetime.utcnow()
-
-
-@events.on_failure_workflow_signal.connect
-def _workflow_failed(workflow_context, exception, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        execution.error = str(exception)
-        execution.status = execution.FAILED
-        execution.ended_at = datetime.utcnow()
-
-
-@events.on_success_workflow_signal.connect
-def _workflow_succeeded(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        execution.status = execution.SUCCEEDED
-        execution.ended_at = datetime.utcnow()
-
-
-@events.on_cancelled_workflow_signal.connect
-def _workflow_cancelled(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        # _workflow_cancelling function may have called this function already
-        if execution.status == execution.CANCELLED:
-            return
-        # the execution may have already been finished
-        elif execution.status in (execution.SUCCEEDED, execution.FAILED):
-            _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
-        else:
-            execution.status = execution.CANCELLED
-            execution.ended_at = datetime.utcnow()
-
-
-@events.on_resume_workflow_signal.connect
-def _workflow_resume(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        execution.status = execution.PENDING
-        # Any non ended task would be put back to pending state
-        for task in execution.tasks:
-            if not task.has_ended():
-                task.status = task.PENDING
-
-
-@events.on_cancelling_workflow_signal.connect
-def _workflow_cancelling(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        if execution.status == execution.PENDING:
-            return _workflow_cancelled(workflow_context=workflow_context)
-        # the execution may have already been finished
-        elif execution.status in (execution.SUCCEEDED, execution.FAILED):
-            _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
-        else:
-            execution.status = execution.CANCELLING
-
-
-def _update_node_state_if_necessary(ctx, is_transitional=False):
-    # TODO: this is not the right way to check! the interface name is arbitrary
-    # and also will *never* be the type name
-    node = ctx.task.node if ctx.task is not None else None
-    if (node is not None) and \
-        (ctx.task.interface_name in ('Standard', 'tosca.interfaces.node.lifecycle.Standard')):
-        state = node.determine_state(op_name=ctx.task.operation_name,
-                                     is_transitional=is_transitional)
-        if state:
-            node.state = state
-            ctx.model.node.update(node)
-
-
-def _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, status):
-    workflow_context.logger.info(
-        "'{workflow_name}' workflow execution {status} before the cancel request"
-        "was fully processed".format(workflow_name=workflow_context.workflow_name, status=status))
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/graph_compiler.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/graph_compiler.py
deleted file mode 100644
index 81543d5..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/core/graph_compiler.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ....modeling import models
-from .. import executor, api
-
-
-class GraphCompiler(object):
-    def __init__(self, ctx, default_executor):
-        self._ctx = ctx
-        self._default_executor = default_executor
-        self._stub_executor = executor.base.StubTaskExecutor
-        self._model_to_api_id = {}
-
-    def compile(self,
-                task_graph,
-                start_stub_type=models.Task.START_WORKFLOW,
-                end_stub_type=models.Task.END_WORKFLOW,
-                depends_on=()):
-        """
-        Translates the user graph to the execution graph
-        :param task_graph: The user's graph
-        :param start_stub_type: internal use
-        :param end_stub_type: internal use
-        :param depends_on: internal use
-        """
-        depends_on = list(depends_on)
-
-        # Insert start marker
-        start_task = self._create_stub_task(
-            start_stub_type, depends_on, self._start_graph_suffix(task_graph.id), task_graph.name,
-        )
-
-        for task in task_graph.topological_order(reverse=True):
-            dependencies = \
-                (self._get_tasks_from_dependencies(task_graph.get_dependencies(task))
-                 or [start_task])
-
-            if isinstance(task, api.task.OperationTask):
-                self._create_operation_task(task, dependencies)
-
-            elif isinstance(task, api.task.WorkflowTask):
-                # Build the graph recursively while adding start and end markers
-                self.compile(
-                    task, models.Task.START_SUBWROFKLOW, models.Task.END_SUBWORKFLOW, dependencies
-                )
-            elif isinstance(task, api.task.StubTask):
-                self._create_stub_task(models.Task.STUB, dependencies, task.id)
-            else:
-                raise RuntimeError('Undefined state')
-
-        # Insert end marker
-        self._create_stub_task(
-            end_stub_type,
-            self._get_non_dependent_tasks(self._ctx.execution) or [start_task],
-            self._end_graph_suffix(task_graph.id),
-            task_graph.name
-        )
-
-    def _create_stub_task(self, stub_type, dependencies, api_id, name=None):
-        model_task = models.Task(
-            name=name,
-            dependencies=dependencies,
-            execution=self._ctx.execution,
-            _executor=self._stub_executor,
-            _stub_type=stub_type)
-        self._ctx.model.task.put(model_task)
-        self._model_to_api_id[model_task.id] = api_id
-        return model_task
-
-    def _create_operation_task(self, api_task, dependencies):
-        model_task = models.Task.from_api_task(
-            api_task, self._default_executor, dependencies=dependencies)
-        self._ctx.model.task.put(model_task)
-        self._model_to_api_id[model_task.id] = api_task.id
-        return model_task
-
-    @staticmethod
-    def _start_graph_suffix(api_id):
-        return '{0}-Start'.format(api_id)
-
-    @staticmethod
-    def _end_graph_suffix(api_id):
-        return '{0}-End'.format(api_id)
-
-    @staticmethod
-    def _get_non_dependent_tasks(execution):
-        tasks_with_dependencies = set()
-        for task in execution.tasks:
-            tasks_with_dependencies.update(task.dependencies)
-        return list(set(execution.tasks) - set(tasks_with_dependencies))
-
-    def _get_tasks_from_dependencies(self, dependencies):
-        """
-        Returns task list from dependencies.
-        """
-        tasks = []
-        for dependency in dependencies:
-            if isinstance(dependency, (api.task.StubTask, api.task.OperationTask)):
-                dependency_name = dependency.id
-            else:
-                dependency_name = self._end_graph_suffix(dependency.id)
-            tasks.extend(task for task in self._ctx.execution.tasks
-                         if self._model_to_api_id.get(task.id, None) == dependency_name)
-        return tasks
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/events_logging.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/events_logging.py
deleted file mode 100644
index 9eee1e1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/events_logging.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-"""
-Workflow event logging.
-"""
-
-from .. import events
-from ... import modeling
-
-
-def _get_task_name(task):
-    if isinstance(task.actor, modeling.model_bases.service_instance.RelationshipBase):
-        return '{source_node.name}->{target_node.name}'.format(
-            source_node=task.actor.source_node, target_node=task.actor.target_node)
-    else:
-        return task.actor.name
-
-
-@events.start_task_signal.connect
-def _start_task_handler(ctx, **kwargs):
-    # If the task has no function this is an empty task.
-    if ctx.task.function:
-        suffix = 'started...'
-        logger = ctx.logger.info
-    else:
-        suffix = 'has no implementation'
-        logger = ctx.logger.debug
-
-    logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
-        name=_get_task_name(ctx.task), task=ctx.task, suffix=suffix))
-
-
-@events.on_success_task_signal.connect
-def _success_task_handler(ctx, **kwargs):
-    if not ctx.task.function:
-        return
-    ctx.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
-                    .format(name=_get_task_name(ctx.task), task=ctx.task))
-
-
-@events.on_failure_task_signal.connect
-def _failure_operation_handler(ctx, traceback, **kwargs):
-    ctx.logger.error(
-        '{name} {task.interface_name}.{task.operation_name} failed'
-        .format(name=_get_task_name(ctx.task), task=ctx.task), extra=dict(traceback=traceback)
-    )
-
-
-@events.start_workflow_signal.connect
-def _start_workflow_handler(context, **kwargs):
-    context.logger.info("Starting '{ctx.workflow_name}' workflow execution".format(ctx=context))
-
-
-@events.on_failure_workflow_signal.connect
-def _failure_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution failed".format(ctx=context))
-
-
-@events.on_success_workflow_signal.connect
-def _success_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution succeeded".format(ctx=context))
-
-
-@events.on_cancelled_workflow_signal.connect
-def _cancel_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution canceled".format(ctx=context))
-
-
-@events.on_cancelling_workflow_signal.connect
-def _cancelling_workflow_handler(context, **kwargs):
-    context.logger.info("Cancelling '{ctx.workflow_name}' workflow execution".format(ctx=context))
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/exceptions.py
deleted file mode 100644
index 2a1d6b1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/exceptions.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow exceptions.
-"""
-
-import os
-
-from .. import exceptions
-
-
-class ExecutorException(exceptions.AriaError):
-    """
-    General executor exception.
-    """
-    pass
-
-
-class ProcessException(ExecutorException):
-    """
-    Raised when subprocess execution fails.
-    """
-
-    def __init__(self, command, stderr=None, stdout=None, return_code=None):
-        """
-        Process class Exception
-        :param list command: child process command
-        :param str message: custom message
-        :param str stderr: child process stderr
-        :param str stdout: child process stdout
-        :param int return_code: child process exit code
-        """
-        super(ProcessException, self).__init__("child process failed")
-        self.command = command
-        self.stderr = stderr
-        self.stdout = stdout
-        self.return_code = return_code
-
-    @property
-    def explanation(self):
-        """
-        Describes the error in detail
-        """
-        return (
-            'Command "{error.command}" executed with an error.{0}'
-            'code: {error.return_code}{0}'
-            'error: {error.stderr}{0}'
-            'output: {error.stdout}'.format(os.linesep, error=self))
-
-
-class AriaEngineError(exceptions.AriaError):
-    """
-    Raised by the workflow engine.
-    """
-
-
-class TaskException(exceptions.AriaError):
-    """
-    Raised by the task.
-    """
-
-
-class TaskCreationException(TaskException):
-    """
-    Could not create the task.
-    """
-
-
-class OperationNotFoundException(TaskCreationException):
-    """
-    Could not find an operation on the node or relationship.
-    """
-
-
-class PluginNotFoundException(TaskCreationException):
-    """
-    Could not find a plugin matching the plugin specification.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/base.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/base.py
deleted file mode 100644
index e7d03ea..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/base.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Base class for task executors.
-"""
-
-from aria import logger
-from aria.orchestrator import events
-
-
-class BaseExecutor(logger.LoggerMixin):
-    """
-    Base class for task executors.
-    """
-    def _execute(self, ctx):
-        raise NotImplementedError
-
-    def execute(self, ctx):
-        """
-        Executes a task.
-
-        :param task: task to execute
-        """
-        if ctx.task.function:
-            self._execute(ctx)
-        else:
-            # In this case the task is missing a function. This task still gets to an
-            # executor, but since there is nothing to run, we by default simply skip the
-            # execution itself.
-            self._task_started(ctx)
-            self._task_succeeded(ctx)
-
-    def close(self):
-        """
-        Closes the executor.
-        """
-        pass
-
-    def terminate(self, task_id):
-        """
-        Terminate the executing task
-        :return:
-        """
-        pass
-
-    @staticmethod
-    def _task_started(ctx):
-        events.start_task_signal.send(ctx)
-
-    @staticmethod
-    def _task_failed(ctx, exception, traceback=None):
-        events.on_failure_task_signal.send(ctx, exception=exception, traceback=traceback)
-
-    @staticmethod
-    def _task_succeeded(ctx):
-        events.on_success_task_signal.send(ctx)
-
-
-class StubTaskExecutor(BaseExecutor):                                                               # pylint: disable=abstract-method
-    def execute(self, ctx, *args, **kwargs):
-        with ctx.persist_changes:
-            ctx.task.status = ctx.task.SUCCESS
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/celery.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/celery.py
deleted file mode 100644
index 0716e5b..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/celery.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Celery task executor.
-"""
-
-import threading
-import Queue
-
-from aria.orchestrator.workflows.executor import BaseExecutor
-
-
-class CeleryExecutor(BaseExecutor):
-    """
-    Celery task executor.
-    """
-
-    def __init__(self, app, *args, **kwargs):
-        super(CeleryExecutor, self).__init__(*args, **kwargs)
-        self._app = app
-        self._started_signaled = False
-        self._started_queue = Queue.Queue(maxsize=1)
-        self._tasks = {}
-        self._results = {}
-        self._receiver = None
-        self._stopped = False
-        self._receiver_thread = threading.Thread(target=self._events_receiver)
-        self._receiver_thread.daemon = True
-        self._receiver_thread.start()
-        self._started_queue.get(timeout=30)
-
-    def _execute(self, ctx):
-        self._tasks[ctx.id] = ctx
-        arguments = dict(arg.unwrapped for arg in ctx.arguments.values())
-        arguments['ctx'] = ctx.context
-        self._results[ctx.id] = self._app.send_task(
-            ctx.operation_mapping,
-            kwargs=arguments,
-            task_id=ctx.id,
-            queue=self._get_queue(ctx))
-
-    def close(self):
-        self._stopped = True
-        if self._receiver:
-            self._receiver.should_stop = True
-        self._receiver_thread.join()
-
-    @staticmethod
-    def _get_queue(task):
-        return None if task else None  # TODO
-
-    def _events_receiver(self):
-        with self._app.connection() as connection:
-            self._receiver = self._app.events.Receiver(connection, handlers={
-                'task-started': self._celery_task_started,
-                'task-succeeded': self._celery_task_succeeded,
-                'task-failed': self._celery_task_failed,
-            })
-            for _ in self._receiver.itercapture(limit=None, timeout=None, wakeup=True):
-                if not self._started_signaled:
-                    self._started_queue.put(True)
-                    self._started_signaled = True
-                if self._stopped:
-                    return
-
-    def _celery_task_started(self, event):
-        self._task_started(self._tasks[event['uuid']])
-
-    def _celery_task_succeeded(self, event):
-        task, _ = self._remove_task(event['uuid'])
-        self._task_succeeded(task)
-
-    def _celery_task_failed(self, event):
-        task, async_result = self._remove_task(event['uuid'])
-        try:
-            exception = async_result.result
-        except BaseException as e:
-            exception = RuntimeError(
-                'Could not de-serialize exception of task {0} --> {1}: {2}'
-                .format(task.name, type(e).__name__, str(e)))
-        self._task_failed(task, exception=exception)
-
-    def _remove_task(self, task_id):
-        return self._tasks.pop(task_id), self._results.pop(task_id)
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/dry.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/dry.py
deleted file mode 100644
index 9314e5d..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/dry.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Dry task executor.
-"""
-
-from datetime import datetime
-
-from . import base
-
-
-class DryExecutor(base.BaseExecutor):                                                                    # pylint: disable=abstract-method
-    """
-    Dry task executor: prints task information without causing any side effects.
-    """
-    def execute(self, ctx):
-        with ctx.persist_changes:
-            # updating the task manually instead of calling self._task_started(task),
-            # to avoid any side effects raising that event might cause
-            ctx.task.started_at = datetime.utcnow()
-            ctx.task.status = ctx.task.STARTED
-
-            dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
-            logger = ctx.logger.info if ctx.task.function else ctx.logger.debug
-
-            if hasattr(ctx.task.actor, 'source_node'):
-                name = '{source_node.name}->{target_node.name}'.format(
-                    source_node=ctx.task.actor.source_node, target_node=ctx.task.actor.target_node)
-            else:
-                name = ctx.task.actor.name
-
-            if ctx.task.function:
-                logger(dry_msg.format(name=name, task=ctx.task, suffix='started...'))
-                logger(dry_msg.format(name=name, task=ctx.task, suffix='successful'))
-            else:
-                logger(dry_msg.format(name=name, task=ctx.task, suffix='has no implementation'))
-
-            # updating the task manually instead of calling self._task_succeeded(task),
-            # to avoid any side effects raising that event might cause
-            ctx.task.ended_at = datetime.utcnow()
-            ctx.task.status = ctx.task.SUCCESS
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/process.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/process.py
deleted file mode 100644
index 81da26f..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/process.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Sub-process task executor.
-"""
-
-# pylint: disable=wrong-import-position
-
-import os
-import sys
-
-# As part of the process executor implementation, subprocess are started with this module as their
-# entry point. We thus remove this module's directory from the python path if it happens to be
-# there
-
-from collections import namedtuple
-
-script_dir = os.path.dirname(__file__)
-if script_dir in sys.path:
-    sys.path.remove(script_dir)
-
-import contextlib
-import io
-import threading
-import socket
-import struct
-import subprocess
-import tempfile
-import Queue
-import pickle
-
-import psutil
-import jsonpickle
-
-import aria
-from aria.orchestrator.workflows.executor import base
-from aria.extension import process_executor
-from aria.utils import (
-    imports,
-    exceptions,
-    process as process_utils
-)
-
-
-_INT_FMT = 'I'
-_INT_SIZE = struct.calcsize(_INT_FMT)
-UPDATE_TRACKED_CHANGES_FAILED_STR = \
-    'Some changes failed writing to storage. For more info refer to the log.'
-
-
-_Task = namedtuple('_Task', 'proc, ctx')
-
-
-class ProcessExecutor(base.BaseExecutor):
-    """
-    Sub-process task executor.
-    """
-
-    def __init__(self, plugin_manager=None, python_path=None, *args, **kwargs):
-        super(ProcessExecutor, self).__init__(*args, **kwargs)
-        self._plugin_manager = plugin_manager
-
-        # Optional list of additional directories that should be added to
-        # subprocesses python path
-        self._python_path = python_path or []
-
-        # Flag that denotes whether this executor has been stopped
-        self._stopped = False
-
-        # Contains reference to all currently running tasks
-        self._tasks = {}
-
-        self._request_handlers = {
-            'started': self._handle_task_started_request,
-            'succeeded': self._handle_task_succeeded_request,
-            'failed': self._handle_task_failed_request,
-        }
-
-        # Server socket used to accept task status messages from subprocesses
-        self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        self._server_socket.bind(('localhost', 0))
-        self._server_socket.listen(10)
-        self._server_port = self._server_socket.getsockname()[1]
-
-        # Used to send a "closed" message to the listener when this executor is closed
-        self._messenger = _Messenger(task_id=None, port=self._server_port)
-
-        # Queue object used by the listener thread to notify this constructed it has started
-        # (see last line of this __init__ method)
-        self._listener_started = Queue.Queue()
-
-        # Listener thread to handle subprocesses task status messages
-        self._listener_thread = threading.Thread(target=self._listener)
-        self._listener_thread.daemon = True
-        self._listener_thread.start()
-
-        # Wait for listener thread to actually start before returning
-        self._listener_started.get(timeout=60)
-
-    def close(self):
-        if self._stopped:
-            return
-        self._stopped = True
-        # Listener thread may be blocked on "accept" call. This will wake it up with an explicit
-        # "closed" message
-        self._messenger.closed()
-        self._server_socket.close()
-        self._listener_thread.join(timeout=60)
-
-        # we use set(self._tasks) since tasks may change in the process of closing
-        for task_id in set(self._tasks):
-            self.terminate(task_id)
-
-    def terminate(self, task_id):
-        task = self._remove_task(task_id)
-        # The process might have managed to finish, thus it would not be in the tasks list
-        if task:
-            try:
-                parent_process = psutil.Process(task.proc.pid)
-                for child_process in reversed(parent_process.children(recursive=True)):
-                    try:
-                        child_process.kill()
-                    except BaseException:
-                        pass
-                parent_process.kill()
-            except BaseException:
-                pass
-
-    def _execute(self, ctx):
-        self._check_closed()
-
-        # Temporary file used to pass arguments to the started subprocess
-        file_descriptor, arguments_json_path = tempfile.mkstemp(prefix='executor-', suffix='.json')
-        os.close(file_descriptor)
-        with open(arguments_json_path, 'wb') as f:
-            f.write(pickle.dumps(self._create_arguments_dict(ctx)))
-
-        env = self._construct_subprocess_env(task=ctx.task)
-        # Asynchronously start the operation in a subprocess
-        proc = subprocess.Popen(
-            [
-                sys.executable,
-                os.path.expanduser(os.path.expandvars(__file__)),
-                os.path.expanduser(os.path.expandvars(arguments_json_path))
-            ],
-            env=env)
-
-        self._tasks[ctx.task.id] = _Task(ctx=ctx, proc=proc)
-
-    def _remove_task(self, task_id):
-        return self._tasks.pop(task_id, None)
-
-    def _check_closed(self):
-        if self._stopped:
-            raise RuntimeError('Executor closed')
-
-    def _create_arguments_dict(self, ctx):
-        return {
-            'task_id': ctx.task.id,
-            'function': ctx.task.function,
-            'operation_arguments': dict(arg.unwrapped for arg in ctx.task.arguments.values()),
-            'port': self._server_port,
-            'context': ctx.serialization_dict,
-        }
-
-    def _construct_subprocess_env(self, task):
-        env = os.environ.copy()
-
-        if task.plugin_fk and self._plugin_manager:
-            # If this is a plugin operation,
-            # load the plugin on the subprocess env we're constructing
-            self._plugin_manager.load_plugin(task.plugin, env=env)
-
-        # Add user supplied directories to injected PYTHONPATH
-        if self._python_path:
-            process_utils.append_to_pythonpath(*self._python_path, env=env)
-
-        return env
-
-    def _listener(self):
-        # Notify __init__ method this thread has actually started
-        self._listener_started.put(True)
-        while not self._stopped:
-            try:
-                with self._accept_request() as (request, response):
-                    request_type = request['type']
-                    if request_type == 'closed':
-                        break
-                    request_handler = self._request_handlers.get(request_type)
-                    if not request_handler:
-                        raise RuntimeError('Invalid request type: {0}'.format(request_type))
-                    task_id = request['task_id']
-                    request_handler(task_id=task_id, request=request, response=response)
-            except BaseException as e:
-                self.logger.debug('Error in process executor listener: {0}'.format(e))
-
-    @contextlib.contextmanager
-    def _accept_request(self):
-        with contextlib.closing(self._server_socket.accept()[0]) as connection:
-            message = _recv_message(connection)
-            response = {}
-            try:
-                yield message, response
-            except BaseException as e:
-                response['exception'] = exceptions.wrap_if_needed(e)
-                raise
-            finally:
-                _send_message(connection, response)
-
-    def _handle_task_started_request(self, task_id, **kwargs):
-        self._task_started(self._tasks[task_id].ctx)
-
-    def _handle_task_succeeded_request(self, task_id, **kwargs):
-        task = self._remove_task(task_id)
-        if task:
-            self._task_succeeded(task.ctx)
-
-    def _handle_task_failed_request(self, task_id, request, **kwargs):
-        task = self._remove_task(task_id)
-        if task:
-            self._task_failed(
-                task.ctx, exception=request['exception'], traceback=request['traceback'])
-
-
-def _send_message(connection, message):
-
-    # Packing the length of the entire msg using struct.pack.
-    # This enables later reading of the content.
-    def _pack(data):
-        return struct.pack(_INT_FMT, len(data))
-
-    data = jsonpickle.dumps(message)
-    msg_metadata = _pack(data)
-    connection.send(msg_metadata)
-    connection.sendall(data)
-
-
-def _recv_message(connection):
-    # Retrieving the length of the msg to come.
-    def _unpack(conn):
-        return struct.unpack(_INT_FMT, _recv_bytes(conn, _INT_SIZE))[0]
-
-    msg_metadata_len = _unpack(connection)
-    msg = _recv_bytes(connection, msg_metadata_len)
-    return jsonpickle.loads(msg)
-
-
-def _recv_bytes(connection, count):
-    result = io.BytesIO()
-    while True:
-        if not count:
-            return result.getvalue()
-        read = connection.recv(count)
-        if not read:
-            return result.getvalue()
-        result.write(read)
-        count -= len(read)
-
-
-class _Messenger(object):
-
-    def __init__(self, task_id, port):
-        self.task_id = task_id
-        self.port = port
-
-    def started(self):
-        """Task started message"""
-        self._send_message(type='started')
-
-    def succeeded(self):
-        """Task succeeded message"""
-        self._send_message(type='succeeded')
-
-    def failed(self, exception):
-        """Task failed message"""
-        self._send_message(type='failed', exception=exception)
-
-    def closed(self):
-        """Executor closed message"""
-        self._send_message(type='closed')
-
-    def _send_message(self, type, exception=None):
-        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        sock.connect(('localhost', self.port))
-        try:
-            _send_message(sock, {
-                'type': type,
-                'task_id': self.task_id,
-                'exception': exceptions.wrap_if_needed(exception),
-                'traceback': exceptions.get_exception_as_string(*sys.exc_info()),
-            })
-            response = _recv_message(sock)
-            response_exception = response.get('exception')
-            if response_exception:
-                raise response_exception
-        finally:
-            sock.close()
-
-
-def _main():
-    arguments_json_path = sys.argv[1]
-    with open(arguments_json_path) as f:
-        arguments = pickle.loads(f.read())
-
-    # arguments_json_path is a temporary file created by the parent process.
-    # so we remove it here
-    os.remove(arguments_json_path)
-
-    task_id = arguments['task_id']
-    port = arguments['port']
-    messenger = _Messenger(task_id=task_id, port=port)
-
-    function = arguments['function']
-    operation_arguments = arguments['operation_arguments']
-    context_dict = arguments['context']
-
-    try:
-        ctx = context_dict['context_cls'].instantiate_from_dict(**context_dict['context'])
-    except BaseException as e:
-        messenger.failed(e)
-        return
-
-    try:
-        messenger.started()
-        task_func = imports.load_attribute(function)
-        aria.install_aria_extensions()
-        for decorate in process_executor.decorate():
-            task_func = decorate(task_func)
-        task_func(ctx=ctx, **operation_arguments)
-        ctx.close()
-        messenger.succeeded()
-    except BaseException as e:
-        ctx.close()
-        messenger.failed(e)
-
-if __name__ == '__main__':
-    _main()
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/thread.py b/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/thread.py
deleted file mode 100644
index 6cef2c0..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/workflows/executor/thread.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Thread task executor.
-"""
-
-import Queue
-import threading
-
-import sys
-
-from aria.utils import imports, exceptions
-
-from .base import BaseExecutor
-
-
-class ThreadExecutor(BaseExecutor):
-    """
-    Thread task executor.
-
-    It's easier writing tests using this executor rather than the full-blown sub-process executor.
-
-    Note: This executor is incapable of running plugin operations.
-    """
-
-    def __init__(self, pool_size=1, close_timeout=5, *args, **kwargs):
-        super(ThreadExecutor, self).__init__(*args, **kwargs)
-        self._stopped = False
-        self._close_timeout = close_timeout
-        self._queue = Queue.Queue()
-        self._pool = []
-        for i in range(pool_size):
-            name = 'ThreadExecutor-{index}'.format(index=i+1)
-            thread = threading.Thread(target=self._processor, name=name)
-            thread.daemon = True
-            thread.start()
-            self._pool.append(thread)
-
-    def _execute(self, ctx):
-        self._queue.put(ctx)
-
-    def close(self):
-        self._stopped = True
-        for thread in self._pool:
-            if self._close_timeout is None:
-                thread.join()
-            else:
-                thread.join(self._close_timeout)
-
-    def _processor(self):
-        while not self._stopped:
-            try:
-                ctx = self._queue.get(timeout=1)
-                self._task_started(ctx)
-                try:
-                    task_func = imports.load_attribute(ctx.task.function)
-                    arguments = dict(arg.unwrapped for arg in ctx.task.arguments.values())
-                    task_func(ctx=ctx, **arguments)
-                    self._task_succeeded(ctx)
-                except BaseException as e:
-                    self._task_failed(ctx,
-                                      exception=e,
-                                      traceback=exceptions.get_exception_as_string(*sys.exc_info()))
-            # Daemon threads
-            except BaseException as e:
-                pass
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/consumer.py b/apache-ariatosca-0.1.1/aria/parser/consumption/consumer.py
deleted file mode 100644
index 4f4c614..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/consumer.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...exceptions import AriaException
-from ...utils.exceptions import print_exception
-from ..validation import Issue
-
-
-class Consumer(object):
-    """
-    Base class for ARIA consumers.
-
-    Consumers provide useful functionality by consuming presentations.
-    """
-
-    def __init__(self, context):
-        self.context = context
-
-    def consume(self):
-        pass
-
-    def dump(self):
-        pass
-
-    def _handle_exception(self, e):
-        if hasattr(e, 'issue') and isinstance(e.issue, Issue):
-            self.context.validation.report(issue=e.issue)
-        else:
-            self.context.validation.report(exception=e)
-        if not isinstance(e, AriaException):
-            print_exception(e)
-
-
-class ConsumerChain(Consumer):
-    """
-    ARIA consumer chain.
-
-    Calls consumers in order, handling exception by calling ``_handle_exception`` on them, and stops
-    the chain if there are any validation issues.
-    """
-
-    def __init__(self, context, consumer_classes=None, handle_exceptions=True):
-        super(ConsumerChain, self).__init__(context)
-        self.handle_exceptions = handle_exceptions
-        self.consumers = []
-        if consumer_classes:
-            for consumer_class in consumer_classes:
-                self.append(consumer_class)
-
-    def append(self, *consumer_classes):
-        for consumer_class in consumer_classes:
-            self.consumers.append(consumer_class(self.context))
-
-    def consume(self):
-        for consumer in self.consumers:
-            try:
-                consumer.consume()
-            except BaseException as e:
-                if self.handle_exceptions:
-                    handle_exception(consumer, e)
-                else:
-                    raise e
-            if self.context.validation.has_issues:
-                break
-
-
-def handle_exception(consumer, e):
-    if isinstance(e, AriaException) and e.issue:
-        consumer.context.validation.report(issue=e.issue)
-    else:
-        consumer.context.validation.report(exception=e)
-    if not isinstance(e, AriaException):
-        print_exception(e)
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/context.py b/apache-ariatosca-0.1.1/aria/parser/consumption/context.py
deleted file mode 100644
index 6fa61f4..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/context.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import threading
-
-from ..validation import ValidationContext
-from ..loading import LoadingContext
-from ..reading import ReadingContext
-from ..presentation import PresentationContext
-from ..modeling import ModelingContext
-from .style import Style
-
-
-_thread_locals = threading.local()
-
-
-class ConsumptionContext(object):
-    """
-    Consumption context.
-
-    :ivar args: runtime arguments (usually provided on the command line)
-    :ivar out: message output stream (defaults to stdout)
-    :ivar style: message output style
-    :vartype style: Style
-    :ivar validation: validation context
-    :vartype validation: :class:`ValidationContext`
-    :ivar loading: loading context
-    :vartype loading: :class:`LoadingContext`
-    :ivar reading: reading context
-    :vartype reading: :class:`ReadingContext`
-    :ivar presentation: presentation context
-    :vartype presentation: :class:`PresentationContext`
-    :ivar modeling: modeling context
-    :vartype modeling: :class:`ModelingContext`
-    """
-
-    @staticmethod
-    def get_thread_local():
-        """
-        Gets the context attached to the current thread if there is one.
-        """
-
-        return getattr(_thread_locals, 'aria_consumption_context', None)
-
-    def __init__(self, set_thread_local=True):
-        self.args = []
-        self.out = sys.stdout
-        self.style = Style()
-        self.validation = ValidationContext()
-        self.loading = LoadingContext()
-        self.reading = ReadingContext()
-        self.presentation = PresentationContext()
-        self.modeling = ModelingContext()
-
-        if set_thread_local:
-            self.set_thread_local()
-
-    def set_thread_local(self):
-        """
-        Attaches this context to the current thread.
-        """
-
-        _thread_locals.aria_consumption_context = self
-
-    def write(self, string):
-        """
-        Writes to our ``out``, making sure to encode UTF-8 if required.
-        """
-
-        try:
-            self.out.write(string)
-        except UnicodeEncodeError:
-            self.out.write(string.encode('utf8'))
-
-    def has_arg_switch(self, name):
-        name = '--%s' % name
-        return name in self.args
-
-    def get_arg_value(self, name, default=None):
-        name = '--%s=' % name
-        for arg in self.args:
-            if arg.startswith(name):
-                return arg[len(name):]
-        return default
-
-    def get_arg_value_int(self, name, default=None):
-        value = self.get_arg_value(name)
-        if value is not None:
-            try:
-                return int(value)
-            except (TypeError, ValueError):
-                pass
-        return default
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/exceptions.py b/apache-ariatosca-0.1.1/aria/parser/consumption/exceptions.py
deleted file mode 100644
index 78509cb..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/exceptions.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...exceptions import AriaException
-
-
-class ConsumerException(AriaException):
-    """
-    ARIA consumer exception.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/inputs.py b/apache-ariatosca-0.1.1/aria/parser/consumption/inputs.py
deleted file mode 100644
index fe7e192..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/inputs.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.formatting import safe_repr
-from ..loading import UriLocation, LiteralLocation
-from ..reading import JsonReader
-from .consumer import Consumer
-
-
-class Inputs(Consumer):
-    """
-    Fills in the inputs if provided as arguments.
-    """
-
-    def consume(self):
-        inputs = self.context.get_arg_value('inputs')
-        if inputs is None:
-            return
-
-        if inputs.endswith('.json') or inputs.endswith('.yaml'):
-            location = UriLocation(inputs)
-        else:
-            location = LiteralLocation(inputs)
-
-        loader = self.context.loading.loader_source.get_loader(self.context.loading, location, None)
-
-        if isinstance(location, LiteralLocation):
-            reader = JsonReader(self.context.reading, location, loader)
-        else:
-            reader = self.context.reading.reader_source.get_reader(self.context.reading,
-                                                                   location, loader)
-
-        inputs = reader.read()
-
-        if not isinstance(inputs, dict):
-            self.context.validation.report(
-                'Inputs consumer: inputs are not a dict: %s' % safe_repr(inputs))
-            return
-
-        for name, value in inputs.iteritems():
-            self.context.modeling.set_input(name, value)
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/modeling.py b/apache-ariatosca-0.1.1/aria/parser/consumption/modeling.py
deleted file mode 100644
index 44027b9..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/modeling.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.formatting import json_dumps, yaml_dumps
-from .consumer import Consumer, ConsumerChain
-
-
-class DeriveServiceTemplate(Consumer):
-    """
-    Derives the service template from the presenter.
-    """
-
-    def consume(self):
-        if self.context.presentation.presenter is None:
-            self.context.validation.report('DeriveServiceTemplate consumer: missing presenter')
-            return
-
-        if not hasattr(self.context.presentation.presenter, '_get_model'):
-            self.context.validation.report('DeriveServiceTemplate consumer: presenter does not'
-                                           ' support "_get_model"')
-            return
-
-        self.context.modeling.template = \
-            self.context.presentation.presenter._get_model(self.context)
-
-
-class CoerceServiceTemplateValues(Consumer):
-    """
-    Coerces values in the service template.
-    """
-
-    def consume(self):
-        self.context.modeling.template.coerce_values(True)
-
-
-class ValidateServiceTemplate(Consumer):
-    """
-    Validates the service template.
-    """
-
-    def consume(self):
-        self.context.modeling.template.validate()
-
-
-class ServiceTemplate(ConsumerChain):
-    """
-    Generates the service template from the presenter.
-    """
-
-    def __init__(self, context):
-        super(ServiceTemplate, self).__init__(context, (DeriveServiceTemplate,
-                                                        CoerceServiceTemplateValues,
-                                                        ValidateServiceTemplate))
-
-    def dump(self):
-        if self.context.has_arg_switch('yaml'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.template_as_raw
-            self.context.write(yaml_dumps(raw, indent=indent))
-        elif self.context.has_arg_switch('json'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.template_as_raw
-            self.context.write(json_dumps(raw, indent=indent))
-        else:
-            self.context.modeling.template.dump()
-
-
-class Types(Consumer):
-    """
-    Used to just dump the types.
-    """
-
-    def dump(self):
-        if self.context.has_arg_switch('yaml'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.types_as_raw
-            self.context.write(yaml_dumps(raw, indent=indent))
-        elif self.context.has_arg_switch('json'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.types_as_raw
-            self.context.write(json_dumps(raw, indent=indent))
-        else:
-            self.context.modeling.template.dump_types()
-
-
-class InstantiateServiceInstance(Consumer):
-    """
-    Instantiates the service template into a service instance.
-    """
-
-    def consume(self):
-        if self.context.modeling.template is None:
-            self.context.validation.report('InstantiateServiceInstance consumer: missing service '
-                                           'template')
-            return
-
-        self.context.modeling.template.instantiate(None, None,
-                                                   inputs=dict(self.context.modeling.inputs))
-
-
-class CoerceServiceInstanceValues(Consumer):
-    """
-    Coerces values in the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.coerce_values(True)
-
-
-class ValidateServiceInstance(Consumer):
-    """
-    Validates the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.validate()
-
-
-class SatisfyRequirements(Consumer):
-    """
-    Satisfies node requirements in the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.satisfy_requirements()
-
-
-class ValidateCapabilities(Consumer):
-    """
-    Validates capabilities in the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.validate_capabilities()
-
-
-class FindHosts(Consumer):
-    """
-    Find hosts for all nodes in the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.find_hosts()
-
-
-class ConfigureOperations(Consumer):
-    """
-    Configures all operations in the service instance.
-    """
-
-    def consume(self):
-        self.context.modeling.instance.configure_operations()
-
-
-class ServiceInstance(ConsumerChain):
-    """
-    Generates the service instance by instantiating the service template.
-    """
-
-    def __init__(self, context):
-        super(ServiceInstance, self).__init__(context, (InstantiateServiceInstance,
-                                                        CoerceServiceInstanceValues,
-                                                        ValidateServiceInstance,
-                                                        CoerceServiceInstanceValues,
-                                                        SatisfyRequirements,
-                                                        CoerceServiceInstanceValues,
-                                                        ValidateCapabilities,
-                                                        FindHosts,
-                                                        ConfigureOperations,
-                                                        CoerceServiceInstanceValues))
-
-    def dump(self):
-        if self.context.has_arg_switch('graph'):
-            self.context.modeling.instance.dump_graph()
-        elif self.context.has_arg_switch('yaml'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.instance_as_raw
-            self.context.write(yaml_dumps(raw, indent=indent))
-        elif self.context.has_arg_switch('json'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.instance_as_raw
-            self.context.write(json_dumps(raw, indent=indent))
-        else:
-            self.context.modeling.instance.dump()
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/presentation.py b/apache-ariatosca-0.1.1/aria/parser/consumption/presentation.py
deleted file mode 100644
index 542b3f0..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/presentation.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...utils.threading import FixedThreadPoolExecutor
-from ...utils.formatting import json_dumps, yaml_dumps
-from ..loading import UriLocation
-from ..reading import AlreadyReadException
-from ..presentation import PresenterNotFoundError
-from .consumer import Consumer
-
-
-class Read(Consumer):
-    """
-    Reads the presentation, handling imports recursively.
-
-    It works by consuming a data source via appropriate :class:`~aria.parser.loading.Loader`,
-    :class:`~aria.parser.reading.Reader`, and :class:`~aria.parser.presentation.Presenter`
-    instances.
-
-    It supports agnostic raw data composition for presenters that have
-    ``_get_import_locations`` and ``_merge_import``.
-
-    To improve performance, loaders are called asynchronously on separate threads.
-
-    Note that parsing may internally trigger more than one loading/reading/presentation
-    cycle, for example if the agnostic raw data has dependencies that must also be parsed.
-    """
-
-    def consume(self):
-        if self.context.presentation.location is None:
-            self.context.validation.report('Presentation consumer: missing location')
-            return
-
-        presenter = None
-        imported_presentations = None
-
-        executor = FixedThreadPoolExecutor(size=self.context.presentation.threads,
-                                           timeout=self.context.presentation.timeout)
-        executor.print_exceptions = self.context.presentation.print_exceptions
-        try:
-            presenter = self._present(self.context.presentation.location, None, None, executor)
-            executor.drain()
-
-            # Handle exceptions
-            for e in executor.exceptions:
-                self._handle_exception(e)
-
-            imported_presentations = executor.returns
-        finally:
-            executor.close()
-
-        # Merge imports
-        if (imported_presentations is not None) and hasattr(presenter, '_merge_import'):
-            for imported_presentation in imported_presentations:
-                okay = True
-                if hasattr(presenter, '_validate_import'):
-                    okay = presenter._validate_import(self.context, imported_presentation)
-                if okay:
-                    presenter._merge_import(imported_presentation)
-
-        self.context.presentation.presenter = presenter
-
-    def dump(self):
-        if self.context.has_arg_switch('yaml'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.presentation.presenter._raw
-            self.context.write(yaml_dumps(raw, indent=indent))
-        elif self.context.has_arg_switch('json'):
-            indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.presentation.presenter._raw
-            self.context.write(json_dumps(raw, indent=indent))
-        else:
-            self.context.presentation.presenter._dump(self.context)
-
-    def _handle_exception(self, e):
-        if isinstance(e, AlreadyReadException):
-            return
-        super(Read, self)._handle_exception(e)
-
-    def _present(self, location, origin_location, presenter_class, executor):
-        # Link the context to this thread
-        self.context.set_thread_local()
-
-        raw = self._read(location, origin_location)
-
-        if self.context.presentation.presenter_class is not None:
-            # The presenter class we specified in the context overrides everything
-            presenter_class = self.context.presentation.presenter_class
-        else:
-            try:
-                presenter_class = self.context.presentation.presenter_source.get_presenter(raw)
-            except PresenterNotFoundError:
-                if presenter_class is None:
-                    raise
-            # We'll use the presenter class we were given (from the presenter that imported us)
-            if presenter_class is None:
-                raise PresenterNotFoundError('presenter not found')
-
-        presentation = presenter_class(raw=raw)
-
-        if presentation is not None and hasattr(presentation, '_link_locators'):
-            presentation._link_locators()
-
-        # Submit imports to executor
-        if hasattr(presentation, '_get_import_locations'):
-            import_locations = presentation._get_import_locations(self.context)
-            if import_locations:
-                for import_location in import_locations:
-                    # The imports inherit the parent presenter class and use the current location as
-                    # their origin location
-                    import_location = UriLocation(import_location)
-                    executor.submit(self._present, import_location, location, presenter_class,
-                                    executor)
-
-        return presentation
-
-    def _read(self, location, origin_location):
-        if self.context.reading.reader is not None:
-            return self.context.reading.reader.read()
-        loader = self.context.loading.loader_source.get_loader(self.context.loading, location,
-                                                               origin_location)
-        reader = self.context.reading.reader_source.get_reader(self.context.reading, location,
-                                                               loader)
-        return reader.read()
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/style.py b/apache-ariatosca-0.1.1/aria/parser/consumption/style.py
deleted file mode 100644
index 72892b9..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/style.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.console import Colored, indent
-from ...utils.formatting import safe_repr
-
-
-class Style(object):
-    def __init__(self, indentation=2):
-        self.indentation = indentation
-
-    @property
-    def indent(self):
-        return indent(self.indentation)
-
-    @staticmethod
-    def section(value):
-        return Colored.cyan(value, bold=True)
-
-    @staticmethod
-    def type(value):
-        return Colored.blue(value, bold=True)
-
-    @staticmethod
-    def node(value):
-        return Colored.red(value, bold=True)
-
-    @staticmethod
-    def property(value):
-        return Colored.magenta(value, bold=True)
-
-    @staticmethod
-    def literal(value):
-        return Colored.magenta(safe_repr(value))
-
-    @staticmethod
-    def meta(value):
-        return Colored.green(value)
diff --git a/apache-ariatosca-0.1.1/aria/parser/consumption/validation.py b/apache-ariatosca-0.1.1/aria/parser/consumption/validation.py
deleted file mode 100644
index a7bc3b8..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/consumption/validation.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from .consumer import Consumer
-
-
-class Validate(Consumer):
-    """
-    Validates the presentation.
-    """
-
-    def consume(self):
-        if self.context.presentation.presenter is None:
-            self.context.validation.report('Validation consumer: missing presenter')
-            return
-
-        self.context.presentation.presenter._validate(self.context)
diff --git a/apache-ariatosca-0.1.1/aria/parser/exceptions.py b/apache-ariatosca-0.1.1/aria/parser/exceptions.py
deleted file mode 100644
index a1f7012..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/exceptions.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Parser exceptions.
-"""
-
-from ..exceptions import AriaException
-from .validation import Issue
-
-
-class InvalidValueError(AriaException):
-    """
-    ARIA error: value is invalid.
-    """
-
-    def __init__(self, message, cause=None, cause_tb=None, location=None, line=None, column=None,
-                 locator=None, snippet=None, level=Issue.FIELD):
-        super(InvalidValueError, self).__init__(message, cause, cause_tb)
-        self.issue = Issue(message, location=location, line=line, column=column, locator=locator,
-                           snippet=snippet, level=level, exception=cause)
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/context.py b/apache-ariatosca-0.1.1/aria/parser/loading/context.py
deleted file mode 100644
index 59727c9..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/context.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...utils.collections import StrictList
-from .source import DefaultLoaderSource
-
-
-class LoadingContext(object):
-    """
-    Loading context.
-
-    :ivar loader_source: for finding loader instances
-    :vartype loader_source: ~aria.parser.loading.LoaderSource
-    :ivar prefixes: additional prefixes for :class:`UriTextLoader`
-    :vartype prefixes: [:obj:`basestring`]
-    """
-
-    def __init__(self):
-        self.loader_source = DefaultLoaderSource()
-        self.prefixes = StrictList(value_class=basestring)
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/exceptions.py b/apache-ariatosca-0.1.1/aria/parser/loading/exceptions.py
deleted file mode 100644
index 6e8267a..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/exceptions.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...exceptions import AriaException
-
-
-class LoaderException(AriaException):
-    """
-    ARIA loader exception.
-    """
-
-
-class LoaderNotFoundError(LoaderException):
-    """
-    ARIA loader error: loader not found for source.
-    """
-
-
-class DocumentNotFoundException(LoaderException):
-    """
-    ARIA loader exception: document not found.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/file.py b/apache-ariatosca-0.1.1/aria/parser/loading/file.py
deleted file mode 100644
index a02bd69..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/file.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import codecs
-
-from .loader import Loader
-from .exceptions import LoaderException, DocumentNotFoundException
-
-
-class FileTextLoader(Loader):
-    """
-    ARIA file text loader.
-
-    Extracts a text document from a file. The default encoding is UTF-8, but other supported
-    encoding can be specified instead.
-    """
-
-    def __init__(self, context, path, encoding='utf-8'):
-        self.context = context
-        self.path = path
-        self.encoding = encoding
-        self._file = None
-
-    def open(self):
-        try:
-            self._file = codecs.open(self.path, mode='r', encoding=self.encoding, buffering=1)
-        except IOError as e:
-            if e.errno == 2:
-                raise DocumentNotFoundException('file not found: "%s"' % self.path, cause=e)
-            else:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
-        except Exception as e:
-            raise LoaderException('file error: "%s"' % self.path, cause=e)
-
-    def close(self):
-        if self._file is not None:
-            try:
-                self._file.close()
-            except IOError as e:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
-            except Exception as e:
-                raise LoaderException('file error: "%s"' % self.path, cause=e)
-
-    def load(self):
-        if self._file is not None:
-            try:
-                return self._file.read()
-            except IOError as e:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
-            except Exception as e:
-                raise LoaderException('file error %s' % self.path, cause=e)
-        return None
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/literal.py b/apache-ariatosca-0.1.1/aria/parser/loading/literal.py
deleted file mode 100644
index 7865008..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/literal.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from .loader import Loader
-
-
-class LiteralLoader(Loader):
-    """
-    ARIA literal loader.
-
-    See :class:`~aria.parser.loading.LiteralLocation`.
-    """
-
-    def __init__(self, location):
-        self.location = location
-
-    def load(self):
-        return self.location.content
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/loader.py b/apache-ariatosca-0.1.1/aria/parser/loading/loader.py
deleted file mode 100644
index e1abfbf..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/loader.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Loader(object):
-    """
-    Base class for ARIA loaders.
-
-    Loaders extract a document by consuming a document source.
-
-    Though the extracted document is often textual (a string or string-like
-    data), loaders may provide any format.
-    """
-
-    def open(self):
-        pass
-
-    def close(self):
-        pass
-
-    def load(self):
-        raise NotImplementedError
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/location.py b/apache-ariatosca-0.1.1/aria/parser/loading/location.py
deleted file mode 100644
index 902e856..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/location.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import os
-
-from ...utils.uris import as_file
-
-
-class Location(object):
-    """
-    Base class for ARIA locations.
-
-    Locations are used by :class:`~aria.parser.loading.LoaderSource` to delegate to
-    an appropriate :class:`~aria.parser.loading.Loader`.
-    """
-
-    def is_equivalent(self, location):
-        raise NotImplementedError
-
-    @property
-    def prefix(self):
-        return None
-
-
-class UriLocation(Location):
-    """
-    A URI location can be absolute or relative, and can include a scheme or not.
-
-    If no scheme is included, it should be treated as a filesystem path.
-
-    See :class:`~aria.parser.loading.UriTextLoader`.
-    """
-
-    def __init__(self, uri):
-        self.uri = uri
-
-    def is_equivalent(self, location):
-        return isinstance(location, UriLocation) and (location.uri == self.uri)
-
-    @property
-    def prefix(self):
-        prefix = os.path.dirname(self.uri)
-        if prefix and (as_file(prefix) is None):
-            # Yes, it's weird, but dirname handles URIs,
-            # too: http://stackoverflow.com/a/35616478/849021
-            # We just need to massage it with a trailing slash
-            prefix += '/'
-        return prefix
-
-    def __str__(self):
-        return self.uri
-
-
-class LiteralLocation(Location):
-    """
-    A location that embeds content.
-
-    See :class:`~aria.parser.loading.LiteralLoader`.
-    """
-
-    def __init__(self, content, name='literal'):
-        self.content = content
-        self.name = name
-
-    def is_equivalent(self, location):
-        return isinstance(location, LiteralLocation) and (location.content == self.content)
-
-    def __str__(self):
-        return '<%s>' % self.name
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/request.py b/apache-ariatosca-0.1.1/aria/parser/loading/request.py
deleted file mode 100644
index a809347..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/request.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import tempfile
-
-from requests import Session
-from requests.exceptions import (ConnectionError, InvalidSchema)
-from cachecontrol import CacheControl
-from cachecontrol.caches import FileCache
-
-from .exceptions import LoaderException, DocumentNotFoundException
-from .loader import Loader
-
-SESSION = None
-SESSION_CACHE_PATH = os.path.join(tempfile.gettempdir(), 'aria_requests')
-
-
-class RequestLoader(Loader):
-    """
-    Base class for ARIA request-based loaders.
-
-    Extracts a document from a URI by performing a request.
-
-    Note that the "file:" schema is not supported: :class:`FileTextLoader` should
-    be used instead.
-    """
-
-    def __init__(self, context, uri, headers=None):
-        if headers is None:
-            headers = {}
-        self.context = context
-        self.uri = uri
-        self.headers = headers
-        self._response = None
-
-    def load(self):
-        pass
-
-    def open(self):
-        global SESSION
-        if SESSION is None:
-            SESSION = CacheControl(Session(), cache=FileCache(SESSION_CACHE_PATH))
-
-        try:
-            self._response = SESSION.get(self.uri, headers=self.headers)
-        except InvalidSchema as e:
-            raise DocumentNotFoundException('document not found: "%s"' % self.uri, cause=e)
-        except ConnectionError as e:
-            raise LoaderException('request connection error: "%s"' % self.uri, cause=e)
-        except Exception as e:
-            raise LoaderException('request error: "%s"' % self.uri, cause=e)
-
-        status = self._response.status_code
-        if status == 404:
-            self._response = None
-            raise DocumentNotFoundException('document not found: "%s"' % self.uri)
-        elif status != 200:
-            self._response = None
-            raise LoaderException('request error %d: "%s"' % (status, self.uri))
-
-
-class RequestTextLoader(RequestLoader):
-    """
-    ARIA request-based text loader.
-    """
-
-    def load(self):
-        if self._response is not None:
-            try:
-                if self._response.encoding is None:
-                    self._response.encoding = 'utf8'
-                return self._response.text
-            except Exception as e:
-                raise LoaderException('request error: %s' % self.uri, cause=e)
-        return None
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/source.py b/apache-ariatosca-0.1.1/aria/parser/loading/source.py
deleted file mode 100644
index bcd6dd1..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/source.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from .location import LiteralLocation, UriLocation
-from .literal import LiteralLoader
-from .uri import UriTextLoader
-
-
-class LoaderSource(object):
-    """
-    Base class for ARIA loader sources.
-
-    Loader sources provide appropriate :class:`Loader` instances for locations.
-    """
-
-    def get_loader(self, context, location, origin_location):
-        raise NotImplementedError
-
-
-class DefaultLoaderSource(LoaderSource):
-    """
-    The default ARIA loader source will generate a :class:`UriTextLoader` for
-    :class:`UriLocation` and a :class:`LiteralLoader` for a :class:`LiteralLocation`.
-    """
-
-    def get_loader(self, context, location, origin_location):
-        if isinstance(location, UriLocation):
-            return UriTextLoader(context, location, origin_location)
-        elif isinstance(location, LiteralLocation):
-            return LiteralLoader(location)
-
-        return super(DefaultLoaderSource, self).get_loader(context, location, origin_location)
diff --git a/apache-ariatosca-0.1.1/aria/parser/loading/uri.py b/apache-ariatosca-0.1.1/aria/parser/loading/uri.py
deleted file mode 100644
index a5a18e6..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/loading/uri.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-from urlparse import urljoin
-
-from ...extension import parser
-from ...utils.collections import StrictList
-from ...utils.uris import as_file
-from .loader import Loader
-from .file import FileTextLoader
-from .request import RequestTextLoader
-from .exceptions import DocumentNotFoundException
-
-
-class UriTextLoader(Loader):
-    """
-    Base class for ARIA URI loaders.
-
-    See :class:`~aria.parser.loading.UriLocation`.
-
-    Supports a list of search prefixes that are tried in order if the URI cannot be found.
-    They will be:
-
-    * If ``origin_location`` is provided its prefix will come first.
-    * Then the prefixes in the :class:`LoadingContext` will be added.
-    * Finally, the parser can supply a ``uri_loader_prefix`` function with extra prefixes.
-    """
-
-    def __init__(self, context, location, origin_location=None):
-        self.context = context
-        self.location = location
-        self._prefixes = StrictList(value_class=basestring)
-        self._loader = None
-
-        def add_prefix(prefix):
-            if prefix and (prefix not in self._prefixes):
-                self._prefixes.append(prefix)
-
-        def add_prefixes(prefixes):
-            for prefix in prefixes:
-                add_prefix(prefix)
-
-        if origin_location is not None:
-            add_prefix(origin_location.prefix)
-
-        add_prefixes(context.prefixes)
-        add_prefixes(parser.uri_loader_prefix())
-
-    def open(self):
-        try:
-            self._open(self.location.uri)
-            return
-        except DocumentNotFoundException:
-            # Try prefixes in order
-            for prefix in self._prefixes:
-                prefix_as_file = as_file(prefix)
-                if prefix_as_file is not None:
-                    uri = os.path.join(prefix_as_file, self.location.uri)
-                else:
-                    uri = urljoin(prefix, self.location.uri)
-                try:
-                    self._open(uri)
-                    return
-                except DocumentNotFoundException:
-                    pass
-        raise DocumentNotFoundException('document not found at URI: "%s"' % self.location)
-
-    def close(self):
-        if self._loader is not None:
-            self._loader.close()
-
-    def load(self):
-        return self._loader.load() if self._loader is not None else None
-
-    def _open(self, uri):
-        the_file = as_file(uri)
-        if the_file is not None:
-            uri = the_file
-            loader = FileTextLoader(self.context, uri)
-        else:
-            loader = RequestTextLoader(self.context, uri)
-        loader.open() # might raise an exception
-        self._loader = loader
-        self.location.uri = uri
diff --git a/apache-ariatosca-0.1.1/aria/parser/modeling/context.py b/apache-ariatosca-0.1.1/aria/parser/modeling/context.py
deleted file mode 100644
index 3d75617..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/modeling/context.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from ...utils.collections import StrictDict, prune
-from ...utils.uuid import generate_uuid
-
-
-class IdType(object):
-    LOCAL_SERIAL = 0
-    """
-    Locally unique serial ID: a running integer.
-    """
-
-    LOCAL_RANDOM = 1
-    """
-    Locally unique ID: 6 random safe characters.
-    """
-
-    UNIVERSAL_RANDOM = 2
-    """
-    Universally unique ID (UUID): 22 random safe characters.
-    """
-
-
-class ModelingContext(object):
-    """
-    Modeling context.
-
-    :ivar template: generated service template
-    :vartype template: aria.modeling.models.ServiceTemplate
-    :ivar instance: generated service instance
-    :vartype instance: aria.modeling.models.Service
-    :ivar node_id_format: format for node instance IDs
-    :vartype node_id_format: basestring
-    :ivar id_type: type of IDs to use for instances
-    :vartype id_type: basestring
-    :ivar id_max_length: maximum allowed instance ID length
-    :vartype id_max_length: int
-    :ivar inputs: inputs values
-    :vartype inputs: {:obj:`basestring`, object}
-    """
-
-    def __init__(self):
-        self.template = None
-        self.instance = None
-        self.node_id_format = '{template}_{id}'
-        #self.id_type = IdType.LOCAL_SERIAL
-        #self.id_type = IdType.LOCAL_RANDOM
-        self.id_type = IdType.UNIVERSAL_RANDOM
-        self.id_max_length = 63 # See: http://www.faqs.org/rfcs/rfc1035.html
-        self.inputs = StrictDict(key_class=basestring)
-
-        self._serial_id_counter = itertools.count(1)
-        self._locally_unique_ids = set()
-
-    def store(self, model_storage):
-        if self.template is not None:
-            model_storage.service_template.put(self.template)
-        if self.instance is not None:
-            model_storage.service.put(self.instance)
-
-    def generate_id(self):
-        if self.id_type == IdType.LOCAL_SERIAL:
-            return self._serial_id_counter.next()
-
-        elif self.id_type == IdType.LOCAL_RANDOM:
-            the_id = generate_uuid(6)
-            while the_id in self._locally_unique_ids:
-                the_id = generate_uuid(6)
-            self._locally_unique_ids.add(the_id)
-            return the_id
-
-        return generate_uuid()
-
-    def set_input(self, name, value):
-        self.inputs[name] = value
-        # TODO: coerce to validate type
-
-    @property
-    def template_as_raw(self):
-        raw = self.template.as_raw
-        prune(raw)
-        return raw
-
-    @property
-    def instance_as_raw(self):
-        raw = self.instance.as_raw
-        prune(raw)
-        return raw
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/context.py b/apache-ariatosca-0.1.1/aria/parser/presentation/context.py
deleted file mode 100644
index 44a6f82..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/context.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from .source import DefaultPresenterSource
-
-
-class PresentationContext(object):
-    """
-    Presentation context.
-
-    :ivar presenter: the generated presenter instance
-    :vartype presenter: ~aria.parser.presentation.Presenter
-    :ivar location: from where we will generate the presenter
-    :vartype location: ~aria.parser.loading.Location
-    :ivar presenter_source: for finding presenter classes
-    :vartype presenter_source: ~aria.parser.presentation.PresenterSource
-    :ivar presenter_class: overrides ``presenter_source`` with a specific class
-    :vartype presenter_class: type
-    :ivar import_profile: whether to import the profile by default (defaults to ``True``)
-    :vartype import_profile: bool
-    :ivar threads: number of threads to use when reading data
-    :vartype threads: int
-    :ivar timeout: timeout in seconds for loading data
-    :vartype timeout: float
-    :ivar print_exceptions: whether to print exceptions while reading data
-    :vartype print_exceptions: bool
-    """
-
-    def __init__(self):
-        self.presenter = None
-        self.location = None
-        self.presenter_source = DefaultPresenterSource()
-        self.presenter_class = None  # overrides
-        self.import_profile = True
-        self.threads = 8  # reasonable default for networking multithreading
-        self.timeout = 10  # in seconds
-        self.print_exceptions = False
-
-    def get(self, *names):
-        """
-        Gets attributes recursively from the presenter.
-        """
-
-        return self.presenter._get(*names) if self.presenter is not None else None
-
-    def get_from_dict(self, *names):
-        """
-        Gets attributes recursively from the presenter, except for the last name which is used
-        to get a value from the last dict.
-        """
-
-        return self.presenter._get_from_dict(*names) if self.presenter is not None else None
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/exceptions.py b/apache-ariatosca-0.1.1/aria/parser/presentation/exceptions.py
deleted file mode 100644
index cd7eb07..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/exceptions.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...exceptions import AriaException
-
-
-class PresenterException(AriaException):
-    """
-    ARIA presenter exception.
-    """
-
-
-class PresenterNotFoundError(PresenterException):
-    """
-    ARIA presenter error: presenter not found for raw.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/field_validators.py b/apache-ariatosca-0.1.1/aria/parser/presentation/field_validators.py
deleted file mode 100644
index aa04913..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/field_validators.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ..validation import Issue
-from .utils import (parse_types_dict_names, report_issue_for_unknown_type,
-                    report_issue_for_parent_is_self, report_issue_for_unknown_parent_type,
-                    report_issue_for_circular_type_hierarchy)
-
-
-def type_validator(type_name, *types_dict_names):
-    """
-    Makes sure that the field refers to an existing type defined in the root presenter.
-
-    The arguments from the second onwards are used to locate a nested field under
-    ``service_template`` under the root presenter. The first of these can optionally be a function,
-    in which case it will be called to convert type names. This can be used to support shorthand
-    type names, aliases, etc.
-
-    Can be used with the :func:`field_validator` decorator.
-    """
-
-    types_dict_names, convert = parse_types_dict_names(types_dict_names)
-
-    def validator_fn(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        # Make sure type exists
-        value = getattr(presentation, field.name)
-        if value is not None:
-            types_dict = context.presentation.get('service_template', *types_dict_names) or {}
-
-            if convert:
-                value = convert(context, value, types_dict)
-
-            if value not in types_dict:
-                report_issue_for_unknown_type(context, presentation, type_name, field.name)
-
-    return validator_fn
-
-
-def list_type_validator(type_name, *types_dict_names):
-    """
-    Makes sure that the field's elements refer to existing types defined in the root presenter.
-
-    Assumes that the field is a list.
-
-    The arguments from the second onwards are used to locate a nested field under
-    ``service_template`` under the root presenter. The first of these can optionally be a function,
-    in which case it will be called to convert type names. This can be used to support shorthand
-    type names, aliases, etc.
-
-    Can be used with the :func:`field_validator` decorator.
-    """
-
-    types_dict_names, convert = parse_types_dict_names(types_dict_names)
-
-    def validator_fn(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        # Make sure types exist
-        values = getattr(presentation, field.name)
-        if values is not None:
-            types_dict = context.presentation.get('service_template', *types_dict_names) or {}
-
-            for value in values:
-                if convert:
-                    value = convert(context, value, types_dict)
-
-                if value not in types_dict:
-                    report_issue_for_unknown_type(context, presentation, type_name, field.name)
-
-    return validator_fn
-
-
-def list_length_validator(length):
-    """
-    Makes sure the field has exactly a specific number of elements.
-
-    Assumes that the field is a list.
-
-    Can be used with the :func:`field_validator` decorator.
-    """
-
-    def validator_fn(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        # Make sure list has exactly the length
-        values = getattr(presentation, field.name)
-        if isinstance(values, list):
-            if len(values) != length:
-                context.validation.report('field "%s" does not have exactly %d elements in "%s"'
-                                          % (field.name, length, presentation._fullname),
-                                          locator=presentation._get_child_locator(field.name),
-                                          level=Issue.FIELD)
-
-    return validator_fn
-
-
-def derived_from_validator(*types_dict_names):
-    """
-    Makes sure that the field refers to a valid parent type defined in the root presenter.
-
-    Checks that we do not derive from ourselves and that we do not cause a circular hierarchy.
-
-    The arguments are used to locate a nested field under ``service_template`` under the root
-    presenter. The first of these can optionally be a function, in which case it will be called to
-    convert type names. This can be used to support shorthand type names, aliases, etc.
-
-    Can be used with the :func:`field_validator` decorator.
-    """
-
-    types_dict_names, convert = parse_types_dict_names(types_dict_names)
-
-    def validator_fn(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        value = getattr(presentation, field.name)
-        if value is not None:
-            types_dict = context.presentation.get('service_template', *types_dict_names) or {}
-
-            if convert:
-                value = convert(context, value, types_dict)
-
-            # Make sure not derived from self
-            if value == presentation._name:
-                report_issue_for_parent_is_self(context, presentation, field.name)
-            # Make sure derived from type exists
-            elif value not in types_dict:
-                report_issue_for_unknown_parent_type(context, presentation, field.name)
-            else:
-                # Make sure derivation hierarchy is not circular
-                hierarchy = [presentation._name]
-                presentation_tmp = presentation
-                while presentation_tmp.derived_from is not None:
-                    derived_from = presentation_tmp.derived_from
-                    if convert:
-                        derived_from = convert(context, derived_from, types_dict)
-
-                    if derived_from == presentation_tmp._name:
-                        # This should cause a validation issue at that type
-                        break
-                    elif derived_from not in types_dict:
-                        # This should cause a validation issue at that type
-                        break
-                    presentation_tmp = types_dict[derived_from]
-                    if presentation_tmp._name in hierarchy:
-                        report_issue_for_circular_type_hierarchy(context, presentation, field.name)
-                        break
-                    hierarchy.append(presentation_tmp._name)
-
-    return validator_fn
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/fields.py b/apache-ariatosca-0.1.1/aria/parser/presentation/fields.py
deleted file mode 100644
index 959bad1..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/fields.py
+++ /dev/null
@@ -1,756 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import threading
-from functools import wraps
-from types import MethodType
-
-from ...exceptions import AriaException
-from ...utils.collections import FrozenDict, FrozenList, deepcopy_with_locators, merge, OrderedDict
-from ...utils.caching import cachedmethod
-from ...utils.console import puts
-from ...utils.formatting import as_raw, safe_repr
-from ...utils.type import full_type_name
-from ...utils.exceptions import print_exception
-from ..exceptions import InvalidValueError
-
-from .null import NULL
-from .utils import validate_primitive
-
-#
-# Class decorators
-#
-
-# pylint: disable=unused-argument
-
-def has_fields(cls):
-    """
-    Class decorator for validated field support.
-
-    1. Adds a ``FIELDS`` class property that is a dict of all the fields. Will inherit and merge
-       ``FIELDS`` properties from base classes if they have them.
-
-    2. Generates automatic ``@property`` implementations for the fields with the help of a set of
-       special function decorators.
-
-    The class also works with the Python dict protocol, so that fields can be accessed via dict
-    semantics. The functionality is identical to that of using attribute access.
-
-    The class will also gain two utility methods, ``_iter_field_names`` and ``_iter_fields``.
-    """
-
-    # Make sure we have FIELDS
-    if 'FIELDS' not in cls.__dict__:
-        setattr(cls, 'FIELDS', OrderedDict())
-
-    # Inherit FIELDS from base classes
-    for base in cls.__bases__:
-        if hasattr(base, 'FIELDS'):
-            cls.FIELDS.update(base.FIELDS)
-
-    # We could do this:
-    #
-    #  for name, field in cls.__dict__.iteritems():
-    #
-    # But dir() is better because it has a deterministic order (alphabetical)
-
-    for name in dir(cls):
-        field = getattr(cls, name)
-
-        if isinstance(field, Field):
-            # Accumulate
-            cls.FIELDS[name] = field
-
-            field.name = name
-            field.container_cls = cls
-
-            # This function is here just to create an enclosed scope for "field"
-            def closure(field):
-
-                # By convention, we have the getter wrap the original function.
-                # (It is, for example, where the Python help() function will look for
-                # docstrings when encountering a property.)
-                @cachedmethod
-                @wraps(field.func)
-                def getter(self):
-                    return field.get(self, None)
-
-                def setter(self, value):
-                    field.set(self, None, value)
-
-                # Convert to Python property
-                return property(fget=getter, fset=setter)
-
-            setattr(cls, name, closure(field))
-
-    # Bind methods
-    setattr(cls, '_iter_field_names', MethodType(has_fields_iter_field_names, None, cls))
-    setattr(cls, '_iter_fields', MethodType(has_fields_iter_fields, None, cls))
-
-    # Behave like a dict
-    setattr(cls, '__len__', MethodType(has_fields_len, None, cls))
-    setattr(cls, '__getitem__', MethodType(has_fields_getitem, None, cls))
-    setattr(cls, '__setitem__', MethodType(has_fields_setitem, None, cls))
-    setattr(cls, '__delitem__', MethodType(has_fields_delitem, None, cls))
-    setattr(cls, '__iter__', MethodType(has_fields_iter, None, cls))
-    setattr(cls, '__contains__', MethodType(has_fields_contains, None, cls))
-
-    return cls
-
-
-def short_form_field(name):
-    """
-    Class decorator for specifying the short form field.
-
-    The class must be decorated with :func:`has_fields`.
-    """
-
-    def decorator(cls):
-        if hasattr(cls, name) and hasattr(cls, 'FIELDS') and (name in cls.FIELDS):
-            setattr(cls, 'SHORT_FORM_FIELD', name)
-            return cls
-        else:
-            raise AttributeError('@short_form_field must be used with '
-                                 'a Field name in @has_fields class')
-    return decorator
-
-
-def allow_unknown_fields(cls):
-    """
-    Class decorator specifying that the class allows unknown fields.
-
-    The class must be decorated with :func:`has_fields`.
-    """
-
-    if hasattr(cls, 'FIELDS'):
-        setattr(cls, 'ALLOW_UNKNOWN_FIELDS', True)
-        return cls
-    else:
-        raise AttributeError('@allow_unknown_fields must be used with a @has_fields class')
-
-#
-# Method decorators
-#
-
-
-def primitive_field(cls=None, default=None, allowed=None, required=False):
-    """
-    Method decorator for primitive fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='primitive', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def primitive_list_field(cls=None, default=None, allowed=None, required=False):
-    """
-    Method decorator for list of primitive fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='primitive_list', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def primitive_dict_field(cls=None, default=None, allowed=None, required=False):
-    """
-    Method decorator for dict of primitive fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-    def decorator(func):
-        return Field(field_variant='primitive_dict', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def primitive_dict_unknown_fields(cls=None, default=None, allowed=None, required=False):
-    """
-    Method decorator for dict of primitive fields, for all the fields that are
-    not already decorated.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='primitive_dict_unknown_fields', func=func, cls=cls,
-                     default=default, allowed=allowed, required=required)
-    return decorator
-
-
-def object_field(cls, default=None, allowed=None, required=False):
-    """
-    Method decorator for object fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-    def decorator(func):
-        return Field(field_variant='object', func=func, cls=cls, default=default, allowed=allowed,
-                     required=required)
-    return decorator
-
-
-def object_list_field(cls, default=None, allowed=None, required=False):
-    """
-    Method decorator for list of object fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='object_list', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def object_dict_field(cls, default=None, allowed=None, required=False):
-    """
-    Method decorator for dict of object fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='object_dict', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def object_sequenced_list_field(cls, default=None, allowed=None, required=False):
-    """
-    Method decorator for sequenced list of object fields.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-
-    def decorator(func):
-        return Field(field_variant='sequenced_object_list', func=func, cls=cls, default=default,
-                     allowed=allowed, required=required)
-    return decorator
-
-
-def object_dict_unknown_fields(cls, default=None, allowed=None, required=False):
-    """
-    Method decorator for dict of object fields, for all the fields that are not already decorated.
-
-    The function must be a method in a class decorated with :func:`has_fields`.
-    """
-    def decorator(func):
-        return Field(field_variant='object_dict_unknown_fields', func=func, cls=cls,
-                     default=default, allowed=allowed, required=required)
-    return decorator
-
-
-def field_getter(getter_func):
-    """
-    Method decorator for overriding the getter function of a field.
-
-    The signature of the getter function must be: ``f(field, presentation, context)``.
-    The default getter can be accessed as ```field.default_get(presentation, context)``.
-
-    The function must already be decorated with a field decorator.
-    """
-
-    def decorator(field):
-        if isinstance(field, Field):
-            field.get = MethodType(getter_func, field, Field)
-            return field
-        else:
-            raise AttributeError('@field_getter must be used with a Field')
-    return decorator
-
-
-def field_setter(setter_func):
-    """
-    Method decorator for overriding the setter function of a field.
-
-    The signature of the setter function must be: ``f(field, presentation, context, value)``.
-    The default setter can be accessed as ``field.default_set(presentation, context, value)``.
-
-    The function must already be decorated with a field decorator.
-    """
-
-    def decorator(field):
-        if isinstance(field, Field):
-            field.set = MethodType(setter_func, field, Field)
-            return field
-        else:
-            raise AttributeError('@field_setter must be used with a Field')
-    return decorator
-
-
-def field_validator(validator_fn):
-    """
-    Method decorator for overriding the validator function of a field.
-
-    The signature of the validator function must be: ``f(field, presentation, context)``.
-    The default validator can be accessed as ``field.default_validate(presentation, context)``.
-
-    The function must already be decorated with a field decorator.
-    """
-
-    def decorator(field):
-        if isinstance(field, Field):
-            field.validate = MethodType(validator_fn, field, Field)
-            return field
-        else:
-            raise AttributeError('@field_validator must be used with a Field')
-    return decorator
-
-#
-# Utils
-#
-
-
-def has_fields_iter_field_names(self):
-    for name in self.__class__.FIELDS:
-        yield name
-
-
-def has_fields_iter_fields(self):
-    return self.FIELDS.iteritems()
-
-
-def has_fields_len(self):
-    return len(self.__class__.FIELDS)
-
-
-def has_fields_getitem(self, key):
-    if not isinstance(key, basestring):
-        raise TypeError('key must be a string')
-    if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
-    return getattr(self, key)
-
-
-def has_fields_setitem(self, key, value):
-    if not isinstance(key, basestring):
-        raise TypeError('key must be a string')
-    if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
-    return setattr(self, key, value)
-
-
-def has_fields_delitem(self, key):
-    if not isinstance(key, basestring):
-        raise TypeError('key must be a string')
-    if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
-    return setattr(self, key, None)
-
-
-def has_fields_iter(self):
-    return self.__class__.FIELDS.iterkeys()
-
-
-def has_fields_contains(self, key):
-    if not isinstance(key, basestring):
-        raise TypeError('key must be a string')
-    return key in self.__class__.FIELDS
-
-
-class Field(object):
-    """
-    Field handler used by ``@has_fields`` decorator.
-    """
-
-    def __init__(self, field_variant, func, cls=None, default=None, allowed=None, required=False):
-        if cls == str:
-            # Use "unicode" instead of "str"
-            cls = unicode
-
-        self.container_cls = None
-        self.name = None
-        self.field_variant = field_variant
-        self.func = func
-        self.cls = cls
-        self.default = default
-        self.allowed = allowed
-        self.required = required
-
-    @property
-    def full_name(self):
-        return 'field "%s" in "%s"' % (self.name, full_type_name(self.container_cls))
-
-    @property
-    def full_cls_name(self):
-        name = full_type_name(self.cls)
-        if name == 'unicode':
-            # For simplicity, display "unicode" as "str"
-            name = 'str'
-        return name
-
-    def get(self, presentation, context):
-        return self.default_get(presentation, context)
-
-    def set(self, presentation, context, value):
-        return self.default_set(presentation, context, value)
-
-    def validate(self, presentation, context):
-        self.default_validate(presentation, context)
-
-    def get_locator(self, raw):
-        if hasattr(raw, '_locator'):
-            locator = raw._locator
-            if locator is not None:
-                return locator.get_child(self.name)
-        return None
-
-    def dump(self, presentation, context):
-        value = getattr(presentation, self.name)
-        if value is None:
-            return
-
-        dumper = getattr(self, '_dump_%s' % self.field_variant)
-        dumper(context, value)
-
-    def default_get(self, presentation, context):
-        # Handle raw
-
-        default_raw = (presentation._get_default_raw()
-                       if hasattr(presentation, '_get_default_raw')
-                       else None)
-
-        if default_raw is None:
-            raw = presentation._raw
-        else:
-            # Handle default raw value
-            raw = deepcopy_with_locators(default_raw)
-            merge(raw, presentation._raw)
-
-        # Handle unknown fields
-
-        if self.field_variant == 'primitive_dict_unknown_fields':
-            return self._get_primitive_dict_unknown_fields(presentation, raw, context)
-        elif self.field_variant == 'object_dict_unknown_fields':
-            return self._get_object_dict_unknown_fields(presentation, raw, context)
-
-        is_short_form_field = (self.container_cls.SHORT_FORM_FIELD == self.name
-                               if hasattr(self.container_cls, 'SHORT_FORM_FIELD')
-                               else False)
-        is_dict = isinstance(raw, dict)
-
-        # Find value
-
-        value = self._find_value(is_short_form_field, is_dict, raw)
-
-        # Handle required
-
-        if value is None:
-            if self.required:
-                raise InvalidValueError('required %s does not have a value' % self.full_name,
-                                        locator=self.get_locator(raw))
-            else:
-                return None
-
-        # Handle allowed values
-
-        if self.allowed is not None:
-            if value not in self.allowed:
-                raise InvalidValueError('%s is not %s'
-                                        % (self.full_name, ' or '.join([safe_repr(v)
-                                                                        for v in self.allowed])),
-                                        locator=self.get_locator(raw))
-
-        # Handle get according to variant
-
-        getter = getattr(self, '_get_%s' % self.field_variant, None)
-
-        if getter is None:
-            locator = self.get_locator(raw)
-            location = (' @%s' % locator) if locator is not None else ''
-            raise AttributeError('%s has unsupported field variant: "%s"%s'
-                                 % (self.full_name, self.field_variant, location))
-
-        return getter(presentation, raw, value, context)
-
-    def _find_value(self, is_short_form_field, is_dict, raw):
-        value = None
-        if is_short_form_field and not is_dict:
-            # Handle short form
-            value = raw
-        elif is_dict:
-            if self.name in raw:
-                value = raw[self.name]
-                if value is None:
-                    # An explicit null
-                    value = NULL
-            else:
-                value = self.default
-        return value
-
-    def default_set(self, presentation, context, value):
-        raw = presentation._raw
-        old = self.get(presentation, context)
-        raw[self.name] = value
-        try:
-            self.validate(presentation, context)
-        except Exception as e:
-            raw[self.name] = old
-            raise e
-        return old
-
-    def default_validate(self, presentation, context):
-        value = None
-
-        try:
-            value = self.get(presentation, context)
-        except AriaException as e:
-            if e.issue:
-                context.validation.report(issue=e.issue)
-        except Exception as e:
-            context.validation.report(exception=e)
-            print_exception(e)
-
-        self.validate_value(value, context)
-
-    def validate_value(self, value, context):
-        if isinstance(value, list):
-            if self.field_variant == 'object_list':
-                for element in value:
-                    if hasattr(element, '_validate'):
-                        element._validate(context)
-            elif self.field_variant == 'sequenced_object_list':
-                for _, element in value:
-                    if hasattr(element, '_validate'):
-                        element._validate(context)
-        elif isinstance(value, dict):
-            if self.field_variant in ('object_dict', 'object_dict_unknown_fields'):
-                for inner_value in value.itervalues():
-                    if hasattr(inner_value, '_validate'):
-                        inner_value._validate(context)
-
-        if hasattr(value, '_validate'):
-            value._validate(context)
-
-    @staticmethod
-    def _get_context():
-        thread_locals = threading.local()
-        return getattr(thread_locals, 'aria_consumption_context', None)
-
-    def _coerce_primitive(self, value, context):
-        if context is None:
-            context = Field._get_context()
-        allow_primitive_coercion = (context.validation.allow_primitive_coersion
-                                    if context is not None
-                                    else True)
-        return validate_primitive(value, self.cls, allow_primitive_coercion)
-
-    # primitive
-
-    def _get_primitive(self, presentation, raw, value, context):
-        if (self.cls is not None and not isinstance(value, self.cls)
-                and value is not None and value is not NULL):
-            try:
-                return self._coerce_primitive(value, context)
-            except ValueError as e:
-                raise InvalidValueError('%s is not a valid "%s": %s' %
-                                        (self.full_name, self.full_cls_name, safe_repr(value)),
-                                        locator=self.get_locator(raw), cause=e)
-        return value
-
-    def _dump_primitive(self, context, value):
-        if hasattr(value, 'as_raw'):
-            value = as_raw(value)
-        puts('%s: %s' % (self.name, context.style.literal(value)))
-
-    # primitive list
-
-    def _get_primitive_list(self, presentation, raw, value, context):
-        if not isinstance(value, list):
-            raise InvalidValueError('%s is not a list: %s' % (self.full_name, safe_repr(value)),
-                                    locator=self.get_locator(raw))
-        primitive_list = value
-        if self.cls is not None:
-            if context is None:
-                context = Field._get_context()
-            primitive_list = []
-            for i, _ in enumerate(value):
-                primitive = value[i]
-                try:
-                    primitive = self._coerce_primitive(primitive, context)
-                except ValueError as e:
-                    raise InvalidValueError('%s is not a list of "%s": element %d is %s'
-                                            % (self.full_name,
-                                               self.full_cls_name,
-                                               i,
-                                               safe_repr(primitive)),
-                                            locator=self.get_locator(raw), cause=e)
-                if primitive in primitive_list:
-                    raise InvalidValueError('%s has a duplicate "%s": %s'
-                                            % (self.full_name,
-                                               self.full_cls_name,
-                                               safe_repr(primitive)),
-                                            locator=self.get_locator(raw))
-                primitive_list.append(primitive)
-        return FrozenList(primitive_list)
-
-    def _dump_primitive_list(self, context, value):
-        puts('%s:' % self.name)
-        with context.style.indent:
-            for primitive in value:
-                if hasattr(primitive, 'as_raw'):
-                    primitive = as_raw(primitive)
-                puts(context.style.literal(primitive))
-
-    # primitive dict
-
-    def _get_primitive_dict(self, presentation, raw, value, context):
-        if not isinstance(value, dict):
-            raise InvalidValueError('%s is not a dict: %s' % (self.full_name, safe_repr(value)),
-                                    locator=self.get_locator(raw))
-        primitive_dict = value
-        if self.cls is not None:
-            if context is None:
-                context = Field._get_context()
-            primitive_dict = OrderedDict()
-            for k, v in value.iteritems():
-                try:
-                    primitive_dict[k] = self._coerce_primitive(v, context)
-                except ValueError as e:
-                    raise InvalidValueError('%s is not a dict of "%s" values: entry "%d" is %s'
-                                            % (self.full_name, self.full_cls_name, k, safe_repr(v)),
-                                            locator=self.get_locator(raw),
-                                            cause=e)
-        return FrozenDict(primitive_dict)
-
-    def _dump_primitive_dict(self, context, value):
-        puts('%s:' % self.name)
-        with context.style.indent:
-            for v in value.itervalues():
-                if hasattr(v, 'as_raw'):
-                    v = as_raw(v)
-                puts(context.style.literal(v))
-
-    # object
-
-    def _get_object(self, presentation, raw, value, context):
-        try:
-            return self.cls(name=self.name, raw=value, container=presentation)
-        except TypeError as e:
-            raise InvalidValueError('%s cannot not be initialized to an instance of "%s": %s'
-                                    % (self.full_name, self.full_cls_name, safe_repr(value)),
-                                    cause=e,
-                                    locator=self.get_locator(raw))
-
-    def _dump_object(self, context, value):
-        puts('%s:' % self.name)
-        with context.style.indent:
-            if hasattr(value, '_dump'):
-                value._dump(context)
-
-    # object list
-
-    def _get_object_list(self, presentation, raw, value, context):
-        if not isinstance(value, list):
-            raise InvalidValueError('%s is not a list: %s'
-                                    % (self.full_name, safe_repr(value)),
-                                    locator=self.get_locator(raw))
-        return FrozenList((self.cls(name=self.name, raw=v, container=presentation) for v in value))
-
-    def _dump_object_list(self, context, value):
-        puts('%s:' % self.name)
-        with context.style.indent:
-            for v in value:
-                if hasattr(v, '_dump'):
-                    v._dump(context)
-
-    # object dict
-
-    def _get_object_dict(self, presentation, raw, value, context):
-        if not isinstance(value, dict):
-            raise InvalidValueError('%s is not a dict: %s' % (self.full_name, safe_repr(value)),
-                                    locator=self.get_locator(raw))
-        return FrozenDict(((k, self.cls(name=k, raw=v, container=presentation))
-                           for k, v in value.iteritems()))
-
-    def _dump_object_dict(self, context, value):
-        puts('%s:' % self.name)
-        with context.style.indent:
-            for v in value.itervalues():
-                if hasattr(v, '_dump'):
-                    v._dump(context)
-
-    # sequenced object list
-
-    def _get_sequenced_object_list(self, presentation, raw, value, context):
-        if not isinstance(value, list):
-            raise InvalidValueError('%s is not a sequenced list (a list of dicts, '
-                                    'each with exactly one key): %s'
-                                    % (self.full_name, safe_repr(value)),
-                                    locator=self.get_locator(raw))
-        sequence = []
-        for v in value:
-            if not isinstance(v, dict):
-                raise InvalidValueError('%s list elements are not all dicts with '
-                                        'exactly one key: %s' % (self.full_name, safe_repr(value)),
-                                        locator=self.get_locator(raw))
-            if len(v) != 1:
-                raise InvalidValueError('%s list elements do not all have exactly one key: %s'
-                                        % (self.full_name, safe_repr(value)),
-                                        locator=self.get_locator(raw))
-            key, value = v.items()[0]
-            sequence.append((key, self.cls(name=key, raw=value, container=presentation)))
-        return FrozenList(sequence)
-
-    def _dump_sequenced_object_list(self, context, value):
-        puts('%s:' % self.name)
-        for _, v in value:
-            if hasattr(v, '_dump'):
-                v._dump(context)
-
-    # primitive dict for unknown fields
-
-    def _get_primitive_dict_unknown_fields(self, presentation, raw, context):
-        if isinstance(raw, dict):
-            primitive_dict = raw
-            if self.cls is not None:
-                if context is None:
-                    context = Field._get_context()
-                primitive_dict = OrderedDict()
-                for k, v in raw.iteritems():
-                    if k not in presentation.FIELDS:
-                        try:
-                            primitive_dict[k] = self._coerce_primitive(v, context)
-                        except ValueError as e:
-                            raise InvalidValueError('%s is not a dict of "%s" values:'
-                                                    ' entry "%d" is %s'
-                                                    % (self.full_name, self.full_cls_name,
-                                                       k, safe_repr(v)),
-                                                    locator=self.get_locator(raw),
-                                                    cause=e)
-            return FrozenDict(primitive_dict)
-        return None
-
-    def _dump_primitive_dict_unknown_fields(self, context, value):
-        self._dump_primitive_dict(context, value)
-
-    # object dict for unknown fields
-
-    def _get_object_dict_unknown_fields(self, presentation, raw, context):
-        if isinstance(raw, dict):
-            return FrozenDict(((k, self.cls(name=k, raw=v, container=presentation))
-                               for k, v in raw.iteritems() if k not in presentation.FIELDS))
-        return None
-
-    def _dump_object_dict_unknown_fields(self, context, value):
-        self._dump_object_dict(context, value)
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/null.py b/apache-ariatosca-0.1.1/aria/parser/presentation/null.py
deleted file mode 100644
index 287d2ba..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/null.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.collections import deepcopy_with_locators
-
-
-class Null(object):
-    """
-    Represents an explicit null value provided by the user, which is different from
-    not supplying a value at all.
-
-    It is a singleton.
-    """
-
-    @property
-    def as_raw(self):
-        return None
-
-NULL = Null()
-
-
-def none_to_null(value):
-    """
-    Convert ``None`` to ``NULL``, recursively.
-    """
-
-    if value is None:
-        return NULL
-    if isinstance(value, list):
-        value = deepcopy_with_locators(value)
-        for i, _ in enumerate(value):
-            value[i] = none_to_null(value[i])
-    elif isinstance(value, dict):
-        value = deepcopy_with_locators(value)
-        for k, v in value.iteritems():
-            value[k] = none_to_null(v)
-    return value
-
-
-def null_to_none(value):
-    """
-    Convert ``NULL`` to ``None``, recursively.
-    """
-
-    if value is NULL:
-        return None
-    if isinstance(value, list):
-        value = deepcopy_with_locators(value)
-        for i, _ in enumerate(value):
-            value[i] = none_to_null(value[i])
-    elif isinstance(value, dict):
-        value = deepcopy_with_locators(value)
-        for k, v in value.iteritems():
-            value[k] = none_to_null(v)
-    return value
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/presentation.py b/apache-ariatosca-0.1.1/aria/parser/presentation/presentation.py
deleted file mode 100644
index 7292562..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/presentation.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.caching import HasCachedMethods
-from ...utils.collections import deepcopy_with_locators
-from ...utils.formatting import safe_repr
-from ...utils.type import full_type_name
-from ...utils.console import puts
-from ..validation import Issue
-from .null import none_to_null
-from .utils import (get_locator, validate_no_short_form, validate_no_unknown_fields,
-                    validate_known_fields, validate_primitive)
-
-
-class Value(object):
-    """
-    Encapsulates a typed value assignment.
-    """
-
-    def __init__(self, type_name, value, description):
-        self.type = deepcopy_with_locators(type_name)
-        self.value = deepcopy_with_locators(value)
-        self.description = deepcopy_with_locators(description)
-
-
-class PresentationBase(HasCachedMethods):
-    """
-    Base class for ARIA presentation classes.
-    """
-
-    def __init__(self, name=None, raw=None, container=None):
-        self._name = name
-        self._raw = raw
-        self._container = container
-        super(PresentationBase, self).__init__()
-
-    @property
-    def as_raw(self):
-        return self._raw
-
-    def _validate(self, context):
-        """
-        Validates the presentation while reporting errors in the validation context but *not*
-        raising exceptions.
-
-        The base class does not thing, but subclasses may override this for specialized validation.
-        """
-
-    @property
-    def _fullname(self):
-        """
-        Always returns a usable full name of the presentation, whether it itself is named, or
-        recursing to its container, and finally defaulting to the class name.
-        """
-
-        if self._name is not None:
-            return self._name
-        elif self._container is not None:
-            return self._container._fullname
-        return full_type_name(self)
-
-    @property
-    def _locator(self):
-        """
-        Attempts to return the most relevant locator, whether we have one, or recursing to our
-        container.
-
-        :rtype: :class:`aria.parser.reading.Locator`
-        """
-
-        return get_locator(self._raw, self._container)
-
-    def _get(self, *names):
-        """
-        Gets attributes recursively.
-        """
-
-        obj = self
-        if (obj is not None) and names:
-            for name in names:
-                obj = getattr(obj, name, None)
-                if obj is None:
-                    break
-        return obj
-
-    def _get_from_dict(self, *names):
-        """
-        Gets attributes recursively, except for the last name which is used to get a value from the
-        last dict.
-        """
-
-        if names:
-            obj = self._get(*names[:-1])
-            if isinstance(obj, dict):
-                return obj.get(names[-1])  # pylint: disable=no-member
-        return None
-
-    def _get_child_locator(self, *names):
-        """
-        Attempts to return the locator of one our children. Will default to our locator if not
-        found.
-
-        :rtype: :class:`aria.parser.reading.Locator`
-        """
-
-        if hasattr(self._raw, '_locator'):
-            locator = self._raw._locator
-            if locator is not None:
-                return locator.get_child(*names)
-        return self._locator
-
-    def _dump(self, context):
-        """
-        Emits a colorized representation.
-
-        The base class will emit a sensible default representation of the fields, (by calling
-        ``_dump_content``), but subclasses may override this for specialized dumping.
-        """
-
-        if self._name:
-            puts(context.style.node(self._name))
-            with context.style.indent:
-                self._dump_content(context)
-        else:
-            self._dump_content(context)
-
-    def _dump_content(self, context, field_names=None):
-        """
-        Emits a colorized representation of the contents.
-
-        The base class will call ``_dump_field`` on all the fields, but subclasses may override
-        this for specialized dumping.
-        """
-
-        if field_names:
-            for field_name in field_names:
-                self._dump_field(context, field_name)
-        elif hasattr(self, '_iter_field_names'):
-            for field_name in self._iter_field_names():  # pylint: disable=no-member
-                self._dump_field(context, field_name)
-        else:
-            puts(context.style.literal(self._raw))
-
-    def _dump_field(self, context, field_name):
-        """
-        Emits a colorized representation of the field.
-
-        According to the field type, this may trigger nested recursion. The nested types will
-        delegate to their ``_dump`` methods.
-        """
-
-        field = self.FIELDS[field_name]  # pylint: disable=no-member
-        field.dump(self, context)
-
-    def _clone(self, container=None):
-        """
-        Creates a clone of this presentation, optionally allowing for a new container.
-        """
-
-        raw = deepcopy_with_locators(self._raw)
-        if container is None:
-            container = self._container
-        return self.__class__(name=self._name, raw=raw, container=container)
-
-
-class Presentation(PresentationBase):
-    """
-    Base class for ARIA presentations. A presentation is a Pythonic wrapper around agnostic raw
-    data, adding the ability to read and modify the data with proper validation.
-
-    ARIA presentation classes will often be decorated with :func:`has_fields`, as that mechanism
-    automates a lot of field-specific validation. However, that is not a requirement.
-
-    Make sure that your utility property and method names begin with a ``_``, because those names
-    without a ``_`` prefix are normally reserved for fields.
-    """
-
-    def _validate(self, context):
-        validate_no_short_form(context, self)
-        validate_no_unknown_fields(context, self)
-        validate_known_fields(context, self)
-
-
-class AsIsPresentation(PresentationBase):
-    """
-    Base class for trivial ARIA presentations that provide the raw value as is.
-    """
-
-    def __init__(self, name=None, raw=None, container=None, cls=None):
-        super(AsIsPresentation, self).__init__(name, raw, container)
-        self.cls = cls
-
-    @property
-    def value(self):
-        return none_to_null(self._raw)
-
-    @value.setter
-    def value(self, value):
-        self._raw = value
-
-    @property
-    def _full_cls_name(self):
-        name = full_type_name(self.cls) if self.cls is not None else None
-        if name == 'unicode':
-            # For simplicity, display "unicode" as "str"
-            name = 'str'
-        return name
-
-    def _validate(self, context):
-        try:
-            validate_primitive(self._raw, self.cls, context.validation.allow_primitive_coersion)
-        except ValueError as e:
-            context.validation.report('"%s" is not a valid "%s": %s'
-                                      % (self._fullname, self._full_cls_name, safe_repr(self._raw)),
-                                      locator=self._locator,
-                                      level=Issue.FIELD,
-                                      exception=e)
-
-    def _dump(self, context):
-        if hasattr(self._raw, '_dump'):
-            self._raw._dump(context)
-        else:
-            super(AsIsPresentation, self)._dump(context)
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/presenter.py b/apache-ariatosca-0.1.1/aria/parser/presentation/presenter.py
deleted file mode 100644
index 9fd296f..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/presenter.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.collections import merge
-from ...utils.formatting import safe_repr
-from ..validation import Issue
-from .presentation import Presentation
-
-
-class Presenter(Presentation):
-    """
-    Base class for ARIA presenters.
-
-    Presenters provide a robust API over agnostic raw data.
-    """
-
-    DSL_VERSIONS = None
-    ALLOWED_IMPORTED_DSL_VERSIONS = None
-
-    @classmethod
-    def can_present(cls, raw):
-        dsl = raw.get('tosca_definitions_version')
-        assert cls.DSL_VERSIONS
-        return dsl in cls.DSL_VERSIONS
-
-    def _validate_import(self, context, presentation):
-        tosca_definitions_version = presentation.service_template.tosca_definitions_version
-        assert self.ALLOWED_IMPORTED_DSL_VERSIONS
-        if tosca_definitions_version is not None \
-                and tosca_definitions_version not in self.__class__.ALLOWED_IMPORTED_DSL_VERSIONS:
-            context.validation.report(
-                'import "tosca_definitions_version" is not one of %s: %s'
-                % (' or '.join([safe_repr(v)
-                                for v in self.__class__.ALLOWED_IMPORTED_DSL_VERSIONS]),
-                   presentation.service_template.tosca_definitions_version),
-                locator=presentation._get_child_locator('inputs'),
-                level=Issue.BETWEEN_TYPES)
-            return False
-        return True
-
-    def _merge_import(self, presentation):
-        merge(self._raw, presentation._raw)
-        if hasattr(self._raw, '_locator') and hasattr(presentation._raw, '_locator'):
-            self._raw._locator.merge(presentation._raw._locator)
-
-    def _link_locators(self):
-        if hasattr(self._raw, '_locator'):
-            locator = self._raw._locator
-            delattr(self._raw, '_locator')
-            locator.link(self._raw)
-
-    @staticmethod
-    def _get_import_locations(context):
-        raise NotImplementedError
-
-    @staticmethod
-    def _get_deployment_template(context):
-        raise NotImplementedError
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/source.py b/apache-ariatosca-0.1.1/aria/parser/presentation/source.py
deleted file mode 100644
index 4bfb8e1..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/source.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from ...extension import parser
-
-from .exceptions import PresenterNotFoundError
-
-
-class PresenterSource(object):
-    """
-    Base class for ARIA presenter sources.
-
-    Presenter sources provide appropriate :class:`Presenter` classes for agnostic raw data.
-    """
-
-    def get_presenter(self, raw):  # pylint: disable=unused-argument,no-self-use
-        raise PresenterNotFoundError('presenter not found')
-
-
-class DefaultPresenterSource(PresenterSource):
-    """
-    The default ARIA presenter source.
-    """
-
-    def __init__(self, classes=None):
-        if classes is None:
-            classes = parser.presenter_class()
-        self.classes = classes
-
-    def get_presenter(self, raw):
-        for cls in self.classes:
-            if cls.can_present(raw):
-                return cls
-
-        if 'tosca_definitions_version' in raw:
-            if raw['tosca_definitions_version'] is None:
-                raise PresenterNotFoundError("'tosca_definitions_version' is not specified")
-            if not isinstance(raw['tosca_definitions_version'], basestring):
-                raise PresenterNotFoundError("'tosca_definitions_version' is not a string")
-            if not raw['tosca_definitions_version']:
-                raise PresenterNotFoundError("'tosca_definitions_version' is not specified")
-        return super(DefaultPresenterSource, self).get_presenter(raw)
diff --git a/apache-ariatosca-0.1.1/aria/parser/presentation/utils.py b/apache-ariatosca-0.1.1/aria/parser/presentation/utils.py
deleted file mode 100644
index f0fd390..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/presentation/utils.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from types import FunctionType
-
-from ...utils.formatting import safe_repr
-from ...utils.type import full_type_name
-from ..validation import Issue
-from .null import NULL
-
-
-def get_locator(*values):
-    """
-    Gets the first available locator.
-
-    :rtype: :class:`aria.parser.reading.Locator`
-    """
-
-    for v in values:
-        if hasattr(v, '_locator'):
-            locator = v._locator
-            if locator is not None:
-                return locator
-    return None
-
-
-def parse_types_dict_names(types_dict_names):
-    """
-    If the first element in the array is a function, extracts it out.
-    """
-
-    convert = None
-    if isinstance(types_dict_names[0], FunctionType):
-        convert = types_dict_names[0]
-        types_dict_names = types_dict_names[1:]
-    return types_dict_names, convert
-
-
-def validate_primitive(value, cls, coerce=False):
-    """
-    Checks if the value is of the primitive type, optionally attempting to coerce it
-    if it is not.
-
-    :raises ValueError: if not a primitive type or if coercion failed.
-    """
-
-    if (cls is not None) and (value is not None) and (value is not NULL):
-        if (cls is unicode) or (cls is str): # These two types are interchangeable
-            valid = isinstance(value, basestring)
-        elif cls is int:
-            # In Python, a bool is an int
-            valid = isinstance(value, int) and not isinstance(value, bool)
-        else:
-            valid = isinstance(value, cls)
-        if not valid:
-            if coerce:
-                value = cls(value)
-            else:
-                raise ValueError('not a "%s": %s' % (full_type_name(cls), safe_repr(value)))
-    return value
-
-
-def validate_no_short_form(context, presentation):
-    """
-    Makes sure that we can use short form definitions only if we allowed it.
-    """
-
-    if not hasattr(presentation, 'SHORT_FORM_FIELD') and not isinstance(presentation._raw, dict):
-        context.validation.report('short form not allowed for field "%s"' % presentation._fullname,
-                                  locator=presentation._locator,
-                                  level=Issue.BETWEEN_FIELDS)
-
-
-def validate_no_unknown_fields(context, presentation):
-    """
-    Make sure that we can use unknown fields only if we allowed it.
-    """
-
-    if not getattr(presentation, 'ALLOW_UNKNOWN_FIELDS', False) \
-            and not context.validation.allow_unknown_fields \
-            and isinstance(presentation._raw, dict) \
-            and hasattr(presentation, 'FIELDS'):
-        for k in presentation._raw:
-            if k not in presentation.FIELDS:
-                context.validation.report('field "%s" is not supported in "%s"'
-                                          % (k, presentation._fullname),
-                                          locator=presentation._get_child_locator(k),
-                                          level=Issue.BETWEEN_FIELDS)
-
-
-def validate_known_fields(context, presentation):
-    """
-    Validates all known fields.
-    """
-
-    if hasattr(presentation, '_iter_fields'):
-        for _, field in presentation._iter_fields():
-            field.validate(presentation, context)
-
-
-def get_parent_presentation(context, presentation, *types_dict_names):
-    """
-    Returns the parent presentation according to the ``derived_from`` field, or ``None`` if invalid.
-
-    Checks that we do not derive from ourselves and that we do not cause a circular hierarchy.
-
-    The arguments from the third onwards are used to locate a nested field under
-    ``service_template`` under the root presenter. The first of these can optionally be a function,
-    in which case it will be called to convert type names. This can be used to support shorthand
-    type names, aliases, etc.
-    """
-
-    type_name = presentation.derived_from
-
-    if type_name is None:
-        return None
-
-    types_dict_names, convert = parse_types_dict_names(types_dict_names)
-    types_dict = context.presentation.get('service_template', *types_dict_names) or {}
-
-    if convert:
-        type_name = convert(context, type_name, types_dict)
-
-    # Make sure not derived from self
-    if type_name == presentation._name:
-        return None
-    # Make sure derived from type exists
-    elif type_name not in types_dict:
-        return None
-    else:
-        # Make sure derivation hierarchy is not circular
-        hierarchy = [presentation._name]
-        presentation_copy = presentation
-        while presentation_copy.derived_from is not None:
-            derived_from = presentation_copy.derived_from
-            if convert:
-                derived_from = convert(context, derived_from, types_dict)
-
-            if derived_from == presentation_copy._name or derived_from not in types_dict:
-                return None
-            presentation_copy = types_dict[derived_from]
-            if presentation_copy._name in hierarchy:
-                return None
-            hierarchy.append(presentation_copy._name)
-
-    return types_dict[type_name]
-
-
-def report_issue_for_unknown_type(context, presentation, type_name, field_name, value=None):
-    if value is None:
-        value = getattr(presentation, field_name)
-    context.validation.report('"%s" refers to an unknown %s in "%s": %s'
-                              % (field_name, type_name, presentation._fullname, safe_repr(value)),
-                              locator=presentation._get_child_locator(field_name),
-                              level=Issue.BETWEEN_TYPES)
-
-
-def report_issue_for_parent_is_self(context, presentation, field_name):
-    context.validation.report('parent type of "%s" is self' % presentation._fullname,
-                              locator=presentation._get_child_locator(field_name),
-                              level=Issue.BETWEEN_TYPES)
-
-
-def report_issue_for_unknown_parent_type(context, presentation, field_name):
-    context.validation.report('unknown parent type "%s" in "%s"'
-                              % (getattr(presentation, field_name), presentation._fullname),
-                              locator=presentation._get_child_locator(field_name),
-                              level=Issue.BETWEEN_TYPES)
-
-
-def report_issue_for_circular_type_hierarchy(context, presentation, field_name):
-    context.validation.report('"%s" of "%s" creates a circular type hierarchy'
-                              % (getattr(presentation, field_name), presentation._fullname),
-                              locator=presentation._get_child_locator(field_name),
-                              level=Issue.BETWEEN_TYPES)
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/context.py b/apache-ariatosca-0.1.1/aria/parser/reading/context.py
deleted file mode 100644
index 233e407..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/context.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.threading import LockedList
-from .source import DefaultReaderSource
-
-
-class ReadingContext(object):
-    """
-    Reading context.
-
-    :ivar reader_source: for finding reader instances
-    :vartype reader_source: ReaderSource
-    :ivar reader: overrides ``reader_source`` with a specific class
-    :vartype reader: type
-    """
-
-    def __init__(self):
-        self.reader_source = DefaultReaderSource()
-        self.reader = None
-
-        self._locations = LockedList()  # for keeping track of locations already read
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/exceptions.py b/apache-ariatosca-0.1.1/aria/parser/reading/exceptions.py
deleted file mode 100644
index 3699729..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/exceptions.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...exceptions import AriaException
-from ..validation import Issue
-
-
-class ReaderException(AriaException):
-    """
-    ARIA reader exception.
-    """
-
-
-class ReaderNotFoundError(ReaderException):
-    """
-    ARIA reader error: reader not found for source.
-    """
-
-
-class ReaderSyntaxError(ReaderException):
-    """
-    ARIA read format error.
-    """
-
-    def __init__(self, message, cause=None, cause_tb=None, location=None, line=None,
-                 column=None, locator=None, snippet=None, level=Issue.SYNTAX):
-        super(ReaderSyntaxError, self).__init__(message, cause, cause_tb)
-        self.issue = Issue(message, location=location, line=line, column=column,
-                           locator=locator, snippet=snippet, level=level)
-
-
-class AlreadyReadException(ReaderException):
-    """
-    ARIA reader exception: already read.
-    """
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/jinja.py b/apache-ariatosca-0.1.1/aria/parser/reading/jinja.py
deleted file mode 100644
index 687317a..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/jinja.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from jinja2 import Template
-
-from ... import __version__ as version
-from ..loading import LiteralLocation, LiteralLoader
-from .reader import Reader
-from .exceptions import ReaderSyntaxError
-
-
-# TODO: we could put a lot of other useful stuff here.
-CONTEXT = {
-    'ARIA_VERSION': version,
-    'ENV': os.environ}
-
-
-class JinjaReader(Reader):
-    """
-    ARIA Jinja reader.
-
-    Forwards the rendered result to a new reader in the reader source.
-    """
-
-    def read(self):
-        data = self.load()
-        try:
-            data = str(data)
-            template = Template(data)
-            literal = template.render(CONTEXT)
-            # TODO: might be useful to write the literal result to a file for debugging
-            location = self.location
-            if isinstance(location, basestring) and location.endswith('.jinja'):
-                # Use reader based on the location with the ".jinja" prefix stripped off
-                location = location[:-6]
-                next_reader = self.context.reading.reader_source.get_reader(
-                    self.context, LiteralLocation(literal, name=location), LiteralLoader(literal))
-            else:
-                # Use reader for literal loader
-                next_reader = self.context.reading.reader_source.get_reader(
-                    self.context, LiteralLocation(literal), LiteralLoader(literal))
-            return next_reader.read()
-        except Exception as e:
-            raise ReaderSyntaxError('Jinja: %s' % e, cause=e)
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/json.py b/apache-ariatosca-0.1.1/aria/parser/reading/json.py
deleted file mode 100644
index d144f80..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/json.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 'json'
-
-import json
-
-from ...utils.collections import OrderedDict
-from .reader import Reader
-from .exceptions import ReaderSyntaxError
-
-
-class JsonReader(Reader):
-    """
-    ARIA JSON reader.
-    """
-
-    def read(self):
-        data = self.load()
-        try:
-            data = unicode(data)
-            return json.loads(data, object_pairs_hook=OrderedDict)
-        except Exception as e:
-            raise ReaderSyntaxError('JSON: %s' % e, cause=e)
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/locator.py b/apache-ariatosca-0.1.1/aria/parser/reading/locator.py
deleted file mode 100644
index 965164d..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/locator.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from copy import deepcopy
-
-
-from ...utils.console import puts, Colored, indent
-
-
-# We are inheriting the primitive types in order to add the ability to set
-# an attribute (_locator) on them.
-
-class LocatableString(unicode):
-    pass
-
-
-class LocatableInt(int):
-    pass
-
-
-class LocatableFloat(float):
-    pass
-
-
-def wrap(value):
-    if isinstance(value, basestring):
-        return True, LocatableString(value)
-    elif isinstance(value, int) and \
-            not isinstance(value, bool):  # Note: bool counts as int in Python!
-        return True, LocatableInt(value)
-    elif isinstance(value, float):
-        return True, LocatableFloat(value)
-    return False, value
-
-
-class Locator(object):
-    """
-    Stores location information (line and column numbers) for agnostic raw data.
-    """
-    def __init__(self, location, line, column, children=None):
-        self.location = location
-        self.line = line
-        self.column = column
-        self.children = children
-
-    def get_child(self, *names):
-        if (not names) or (not isinstance(self.children, dict)):
-            return self
-        name = names[0]
-        if name not in self.children:
-            return self
-        child = self.children[name]
-        return child.get_child(names[1:])
-
-    def link(self, raw, path=None):
-        if hasattr(raw, '_locator'):
-            # This can happen when we use anchors
-            return
-
-        try:
-            setattr(raw, '_locator', self)
-        except AttributeError:
-            return
-
-        if isinstance(raw, list):
-            for i, raw_element in enumerate(raw):
-                wrapped, raw_element = wrap(raw_element)
-                if wrapped:
-                    raw[i] = raw_element
-                child_path = '%s.%d' % (path, i) if path else str(i)
-                try:
-                    self.children[i].link(raw_element, child_path)
-                except KeyError:
-                    raise ValueError('location map does not match agnostic raw data: %s' %
-                                     child_path)
-        elif isinstance(raw, dict):
-            for k, raw_element in raw.iteritems():
-                wrapped, raw_element = wrap(raw_element)
-                if wrapped:
-                    raw[k] = raw_element
-                child_path = '%s.%s' % (path, k) if path else k
-                try:
-                    self.children[k].link(raw_element, child_path)
-                except KeyError:
-                    raise ValueError('location map does not match agnostic raw data: %s' %
-                                     child_path)
-
-    def merge(self, locator):
-        if isinstance(self.children, dict) and isinstance(locator.children, dict):
-            for k, loc in locator.children.iteritems():
-                if k in self.children:
-                    self.children[k].merge(loc)
-                else:
-                    self.children[k] = loc
-
-    def dump(self, key=None):
-        if key:
-            puts('%s "%s":%d:%d' %
-                 (Colored.red(key), Colored.blue(self.location), self.line, self.column))
-        else:
-            puts('"%s":%d:%d' % (Colored.blue(self.location), self.line, self.column))
-        if isinstance(self.children, list):
-            with indent(2):
-                for loc in self.children:
-                    loc.dump()
-        elif isinstance(self.children, dict):
-            with indent(2):
-                for k, loc in self.children.iteritems():
-                    loc.dump(k)
-
-    def __str__(self):
-        # Should be in same format as Issue.locator_as_str
-        return '"%s":%d:%d' % (self.location, self.line, self.column)
-
-
-def deepcopy_with_locators(value):
-    """
-    Like :func:`deepcopy`, but also copies over locators.
-    """
-
-    res = deepcopy(value)
-    copy_locators(res, value)
-    return res
-
-
-def copy_locators(target, source):
-    """
-    Copies over ``_locator`` for all elements, recursively.
-
-    Assumes that target and source have exactly the same list/dict structure.
-    """
-
-    locator = getattr(source, '_locator', None)
-    if locator is not None:
-        try:
-            setattr(target, '_locator', locator)
-        except AttributeError:
-            pass
-
-    if isinstance(target, list) and isinstance(source, list):
-        for i, _ in enumerate(target):
-            copy_locators(target[i], source[i])
-    elif isinstance(target, dict) and isinstance(source, dict):
-        for k, v in target.items():
-            copy_locators(v, source[k])
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/raw.py b/apache-ariatosca-0.1.1/aria/parser/reading/raw.py
deleted file mode 100644
index ed980ac..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/raw.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from .reader import Reader
-
-
-class RawReader(Reader):
-    """
-    ARIA raw reader.
-
-    Expects to receive agnostic raw data from the loader, and so does nothing to it.
-    """
-
-    def read(self):
-        return self.load()
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/reader.py b/apache-ariatosca-0.1.1/aria/parser/reading/reader.py
deleted file mode 100644
index 1a29f11..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/reader.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.openclose import OpenClose
-from .exceptions import ReaderException, AlreadyReadException
-
-
-class Reader(object):
-    """
-    Base class for ARIA readers.
-
-    Readers provide agnostic raw data by consuming :class:`aria.parser.loading.Loader` instances.
-    """
-
-    def __init__(self, context, location, loader):
-        self.context = context
-        self.location = location
-        self.loader = loader
-
-    def load(self):
-        with OpenClose(self.loader) as loader:
-            if self.context is not None:
-                with self.context._locations:
-                    for location in self.context._locations:
-                        if location.is_equivalent(loader.location):
-                            raise AlreadyReadException('already read: %s' % loader.location)
-                    self.context._locations.append(loader.location)
-
-            data = loader.load()
-            if data is None:
-                raise ReaderException('loader did not provide data: %s' % loader)
-            return data
-
-    def read(self):
-        raise NotImplementedError
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/source.py b/apache-ariatosca-0.1.1/aria/parser/reading/source.py
deleted file mode 100644
index 6fff2f6..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/source.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ..loading import LiteralLocation, UriLocation
-from .yaml import YamlReader
-from .json import JsonReader
-from .jinja import JinjaReader
-from .exceptions import ReaderNotFoundError
-
-
-EXTENSIONS = {
-    '.yaml': YamlReader,
-    '.json': JsonReader,
-    '.jinja': JinjaReader}
-
-
-class ReaderSource(object):
-    """
-    Base class for ARIA reader sources.
-
-    Reader sources provide appropriate :class:`Reader` instances for locations.
-    """
-
-    @staticmethod
-    def get_reader(context, location, loader):  # pylint: disable=unused-argument
-        raise ReaderNotFoundError('location: %s' % location)
-
-
-class DefaultReaderSource(ReaderSource):
-    """
-    The default ARIA reader source will generate a :class:`YamlReader` for
-    locations that end in ".yaml", a :class:`JsonReader` for locations that
-    end in ".json",  and a :class:`JinjaReader` for locations that end in
-    ".jinja".
-    """
-
-    def __init__(self, literal_reader_class=YamlReader):
-        super(DefaultReaderSource, self).__init__()
-        self.literal_reader_class = literal_reader_class
-
-    def get_reader(self, context, location, loader):
-        if isinstance(location, LiteralLocation):
-            return self.literal_reader_class(context, location, loader)
-
-        elif isinstance(location, UriLocation):
-            for extension, reader_class in EXTENSIONS.iteritems():
-                if location.uri.endswith(extension):
-                    return reader_class(context, location, loader)
-
-        return super(DefaultReaderSource, self).get_reader(context, location, loader)
diff --git a/apache-ariatosca-0.1.1/aria/parser/reading/yaml.py b/apache-ariatosca-0.1.1/aria/parser/reading/yaml.py
deleted file mode 100644
index f5eac43..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/reading/yaml.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ruamel import yaml # @UnresolvedImport
-
-from ...utils.collections import OrderedDict
-from .reader import Reader
-from .locator import Locator
-from .exceptions import ReaderSyntaxError
-from .locator import LocatableString, LocatableInt, LocatableFloat
-
-# Add our types to ruamel.yaml
-yaml.representer.RoundTripRepresenter.add_representer(
-    LocatableString, yaml.representer.RoundTripRepresenter.represent_unicode)
-yaml.representer.RoundTripRepresenter.add_representer(
-    LocatableInt, yaml.representer.RoundTripRepresenter.represent_int)
-yaml.representer.RoundTripRepresenter.add_representer(
-    LocatableFloat, yaml.representer.RoundTripRepresenter.represent_float)
-
-MERGE_TAG = u'tag:yaml.org,2002:merge'
-MAP_TAG = u'tag:yaml.org,2002:map'
-
-
-class YamlLocator(Locator):
-    """
-    Map for agnostic raw data read from YAML.
-    """
-
-    def add_children(self, node):
-        if isinstance(node, yaml.SequenceNode):
-            self.children = []
-            for child_node in node.value:
-                self.add_child(child_node)
-        elif isinstance(node, yaml.MappingNode):
-            self.children = {}
-            for k, child_node in node.value:
-                self.add_child(child_node, k)
-
-    def add_child(self, node, key=None):
-        locator = YamlLocator(self.location, node.start_mark.line + 1, node.start_mark.column + 1)
-        if key is not None:
-            # Dict
-            if key.tag == MERGE_TAG:
-                for merge_key, merge_node in node.value:
-                    self.add_child(merge_node, merge_key)
-            else:
-                self.children[key.value] = locator
-        else:
-            # List
-            self.children.append(locator)
-        locator.add_children(node)
-
-
-def construct_yaml_map(self, node):
-    data = OrderedDict()
-    yield data
-    value = self.construct_mapping(node)
-    data.update(value)
-
-
-yaml.constructor.SafeConstructor.add_constructor(MAP_TAG, construct_yaml_map)
-
-
-class YamlReader(Reader):
-    """
-    ARIA YAML reader.
-    """
-
-    def read(self):
-        data = self.load()
-        try:
-            data = unicode(data)
-            # see issue here:
-            # https://bitbucket.org/ruamel/yaml/issues/61/roundtriploader-causes-exceptions-with
-            #yaml_loader = yaml.RoundTripLoader(data)
-            yaml_loader = yaml.SafeLoader(data)
-            try:
-                node = yaml_loader.get_single_node()
-                locator = YamlLocator(self.loader.location, 0, 0)
-                if node is not None:
-                    locator.add_children(node)
-                    raw = yaml_loader.construct_document(node)
-                else:
-                    raw = OrderedDict()
-                #locator.dump()
-                setattr(raw, '_locator', locator)
-                return raw
-            finally:
-                yaml_loader.dispose()
-        except yaml.parser.MarkedYAMLError as e:
-            context = e.context or 'while parsing'
-            problem = e.problem
-            line = e.problem_mark.line
-            column = e.problem_mark.column
-            snippet = e.problem_mark.get_snippet()
-            raise ReaderSyntaxError('YAML %s: %s %s' %
-                                    (e.__class__.__name__, problem, context),
-                                    location=self.loader.location,
-                                    line=line,
-                                    column=column,
-                                    snippet=snippet,
-                                    cause=e)
-        except Exception as e:
-            raise ReaderSyntaxError('YAML: %s' % e, cause=e)
diff --git a/apache-ariatosca-0.1.1/aria/parser/specification.py b/apache-ariatosca-0.1.1/aria/parser/specification.py
deleted file mode 100644
index 4f452b8..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/specification.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for cross-referencing code with specification documents.
-"""
-
-import re
-
-from ..extension import parser
-from ..utils.collections import OrderedDict
-from ..utils.specification import (DSL_SPECIFICATIONS, implements_specification) # pylint: disable=unused-import
-
-
-def iter_specifications():
-    """
-    Iterates all specification assignments in the codebase.
-    """
-    def iter_sections(spec, sections):
-        for k in sorted(sections.keys(), key=_section_key):
-            details = OrderedDict()
-            details['code'] = sections[k]['code']
-            yield k, _fix_details(sections[k], spec)
-
-    for spec, sections in DSL_SPECIFICATIONS.iteritems():
-        yield spec, iter_sections(spec, sections)
-
-
-def _section_key(value):
-    try:
-        parts = value.split('-', 1)
-        first = (int(v) for v in parts[0].split('.'))
-        second = parts[1] if len(parts) > 1 else None
-        return (first, second)
-    except ValueError:
-        return value
-
-
-def _fix_details(details, spec):
-    code = details.get('code')
-    doc = details.get('doc')
-    url = parser.specification_url().get(spec)
-
-    if (url is not None) and (doc is not None):
-        # Look for a URL in ReST docstring that begins with our url
-        pattern = r'<?('
-        for char in url:
-            pattern += r'\s*'
-            pattern += re.escape(char)
-        pattern += r'[^>]+)>'
-        match = re.search(pattern, doc)
-        if match:
-            url = re.sub(r'\s+', '', match.group(1))
-
-    return OrderedDict((
-        ('code', code),
-        ('url', url)))
diff --git a/apache-ariatosca-0.1.1/aria/parser/validation/context.py b/apache-ariatosca-0.1.1/aria/parser/validation/context.py
deleted file mode 100644
index ef641bd..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/validation/context.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from .issue import Issue
-from ...utils.threading import LockedList
-from ...utils.collections import FrozenList
-from ...utils.exceptions import print_exception
-from ...utils.console import puts, Colored, indent
-from ...utils.formatting import as_raw
-
-
-class ValidationContext(object):
-    """
-    Validation context.
-
-    :ivar allow_unknown_fields: when ``False`` (the default) will report an issue if an unknown
-     field is used
-    :vartype allow_unknown_fields: bool
-    :ivar allow_primitive_coersion`: when ``False`` (the default) will not attempt to coerce
-     primitive field types
-    :vartype allow_primitive_coersion: bool
-    :ivar max_level: maximum validation level to report (default is all)
-    :vartype max_level: int
-    """
-
-    def __init__(self):
-        self.allow_unknown_fields = False
-        self.allow_primitive_coersion = False
-        self.max_level = Issue.ALL
-
-        self._issues = LockedList()
-
-    def report(self, message=None, exception=None, location=None, line=None,
-               column=None, locator=None, snippet=None, level=Issue.PLATFORM, issue=None):
-        if issue is None:
-            issue = Issue(message, exception, location, line, column, locator, snippet, level)
-
-        # Avoid duplicate issues
-        with self._issues:
-            for i in self._issues:
-                if str(i) == str(issue):
-                    return
-
-            self._issues.append(issue)
-
-    @property
-    def has_issues(self):
-        return len(self._issues) > 0
-
-    @property
-    def issues(self):
-        issues = [i for i in self._issues if i.level <= self.max_level]
-        issues.sort(key=lambda i: (i.level, i.location, i.line, i.column, i.message))
-        return FrozenList(issues)
-
-    @property
-    def issues_as_raw(self):
-        return [as_raw(i) for i in self.issues]
-
-    def dump_issues(self):
-        issues = self.issues
-        if issues:
-            puts(Colored.blue('Validation issues:', bold=True))
-            with indent(2):
-                for issue in issues:
-                    puts(Colored.blue(issue.heading_as_str))
-                    details = issue.details_as_str
-                    if details:
-                        with indent(3):
-                            puts(details)
-                    if issue.exception is not None:
-                        with indent(3):
-                            print_exception(issue.exception)
-            return True
-        return False
diff --git a/apache-ariatosca-0.1.1/aria/parser/validation/issue.py b/apache-ariatosca-0.1.1/aria/parser/validation/issue.py
deleted file mode 100644
index db8065d..0000000
--- a/apache-ariatosca-0.1.1/aria/parser/validation/issue.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 'collections'
-
-from ...utils.collections import OrderedDict
-from ...utils.type import full_type_name
-
-
-class Issue(object):
-    PLATFORM = 0
-    """
-    Platform error (e.g. I/O, hardware, a bug in ARIA)
-    """
-
-    SYNTAX = 1
-    """
-    Syntax and format (e.g. YAML, XML, JSON)
-    """
-
-    FIELD = 2
-    """
-    Single field
-    """
-
-    BETWEEN_FIELDS = 3
-    """
-    Relationships between fields within the type (internal grammar)
-    """
-
-    BETWEEN_TYPES = 4
-    """
-    Relationships between types (e.g. inheritance, external grammar)
-    """
-
-    BETWEEN_INSTANCES = 5
-    """
-    Topology (e.g. static requirements and capabilities)
-    """
-
-    EXTERNAL = 6
-    """
-    External (e.g. live requirements and capabilities)
-    """
-
-    ALL = 100
-
-    def __init__(self, message=None, exception=None, location=None, line=None,
-                 column=None, locator=None, snippet=None, level=0):
-        if message is not None:
-            self.message = str(message)
-        elif exception is not None:
-            self.message = str(exception)
-        else:
-            self.message = 'unknown issue'
-
-        self.exception = exception
-
-        if locator is not None:
-            self.location = locator.location
-            self.line = locator.line
-            self.column = locator.column
-        else:
-            self.location = location
-            self.line = line
-            self.column = column
-
-        self.snippet = snippet
-        self.level = level
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('level', self.level),
-            ('message', self.message),
-            ('location', self.location),
-            ('line', self.line),
-            ('column', self.column),
-            ('snippet', self.snippet),
-            ('exception', full_type_name(self.exception) if self.exception else None)))
-
-    @property
-    def locator_as_str(self):
-        if self.location is not None:
-            if self.line is not None:
-                if self.column is not None:
-                    return '"%s":%d:%d' % (self.location, self.line, self.column)
-                else:
-                    return '"%s":%d' % (self.location, self.line)
-            else:
-                return '"%s"' % self.location
-        else:
-            return None
-
-    @property
-    def heading_as_str(self):
-        return '%d: %s' % (self.level, self.message)
-
-    @property
-    def details_as_str(self):
-        details_str = ''
-        locator = self.locator_as_str
-        if locator is not None:
-            details_str += '@%s' % locator
-        if self.snippet is not None:
-            details_str += '\n%s' % self.snippet
-        return details_str
-
-    def __str__(self):
-        heading_str = self.heading_as_str
-        details = self.details_as_str
-        if details:
-            heading_str += ', ' + details
-        return heading_str
diff --git a/apache-ariatosca-0.1.1/aria/storage/api.py b/apache-ariatosca-0.1.1/aria/storage/api.py
deleted file mode 100644
index a337743..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/api.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Storage APIs.
-"""
-
-import threading
-
-
-class StorageAPI(object):
-    """
-    Base class for storage APIs.
-    """
-    def create(self, **kwargs):
-        """
-        Create a storage API.
-        :param kwargs:
-        :return:
-        """
-        raise NotImplementedError('Subclass must implement abstract create method')
-
-
-class ModelAPI(StorageAPI):
-    """
-    Base class for model APIs ("MAPI").
-    """
-    def __init__(self, model_cls, name=None, **kwargs):
-        """
-        :param model_cls: representing class of the model
-        :param name: name of the model
-        """
-        super(ModelAPI, self).__init__(**kwargs)
-        self._model_cls = model_cls
-        self._name = name or model_cls.__modelname__
-        self._thread_local = threading.local()
-        self._thread_local._instrumentation = []
-
-    @property
-    def _instrumentation(self):
-        if not hasattr(self._thread_local, '_instrumentation'):
-            self._thread_local._instrumentation = []
-        return self._thread_local._instrumentation
-
-
-    @property
-    def name(self):
-        """
-        Name of the class.
-
-        :type: :obj:`basestring`
-        """
-        return self._name
-
-    @property
-    def model_cls(self):
-        """
-        Class representing the model
-
-        :type: :obj:`Type`
-        """
-        return self._model_cls
-
-    def get(self, entry_id, filters=None, **kwargs):
-        """
-        Gets a model from storage.
-
-        :param entry_id:
-        """
-        raise NotImplementedError('Subclass must implement abstract get method')
-
-    def put(self, entry, **kwargs):
-        """
-        Puts a model in storage.
-
-        :param entry:
-        """
-        raise NotImplementedError('Subclass must implement abstract store method')
-
-    def delete(self, entry_id, **kwargs):
-        """
-        Deletes a model from storage.
-
-        :param entry_id:
-        """
-        raise NotImplementedError('Subclass must implement abstract delete method')
-
-    def __iter__(self):
-        return self.iter()
-
-    def iter(self, **kwargs):
-        """
-        Iterate over all models in storage.
-        """
-        raise NotImplementedError('Subclass must implement abstract iter method')
-
-    def update(self, entry, **kwargs):
-        """
-        Update a model in storage.
-
-        :param entry:
-        :param kwargs:
-        """
-        raise NotImplementedError('Subclass must implement abstract update method')
-
-
-class ResourceAPI(StorageAPI):
-    """
-    Base class for resource APIs ("RAPI").
-    """
-    def __init__(self, name, **kwargs):
-        """
-        :param name: resource type
-        """
-        super(ResourceAPI, self).__init__(**kwargs)
-        self._name = name
-
-    @property
-    def name(self):
-        """
-        Name of resource.
-
-        :type: :obj:`basestring`
-        """
-        return self._name
-
-    def read(self, entry_id, path, **kwargs):
-        """
-        Get a bytesteam for a resource from storage.
-
-        :param entry_id:
-        :param path:
-        """
-        raise NotImplementedError('Subclass must implement abstract read method')
-
-    def delete(self, entry_id, path, **kwargs):
-        """
-        Delete a resource from storage.
-
-        :param entry_id:
-        :param path:
-        """
-        raise NotImplementedError('Subclass must implement abstract delete method')
-
-    def download(self, entry_id, destination, path=None, **kwargs):
-        """
-        Download a resource from storage.
-
-        :param entry_id:
-        :param destination:
-        :param path:
-        """
-        raise NotImplementedError('Subclass must implement abstract download method')
-
-    def upload(self, entry_id, source, path=None, **kwargs):
-        """
-        Upload a resource to storage.
-
-        :param entry_id:
-        :param source:
-        :param path:
-        """
-        raise NotImplementedError('Subclass must implement abstract upload method')
-
-
-def generate_lower_name(model_cls):
-    """
-    Generates the name of the class from the class object, e.g. ``SomeClass`` -> ``some_class``
-
-    :param model_cls: class to evaluate
-    :return: lowercase name
-    :rtype: basestring
-    """
-    return getattr(model_cls, '__mapiname__', model_cls.__tablename__)
diff --git a/apache-ariatosca-0.1.1/aria/storage/collection_instrumentation.py b/apache-ariatosca-0.1.1/aria/storage/collection_instrumentation.py
deleted file mode 100644
index c90cb18..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/collection_instrumentation.py
+++ /dev/null
@@ -1,314 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for instrumenting collections of models in storage.
-"""
-
-from . import exceptions
-
-
-class _InstrumentedCollection(object):
-
-    def __init__(self,
-                 mapi,
-                 parent,
-                 field_name,
-                 field_cls,
-                 seq=None,
-                 is_top_level=True,
-                 **kwargs):
-        self._mapi = mapi
-        self._parent = parent
-        self._field_name = field_name
-        self._is_top_level = is_top_level
-        self._field_cls = field_cls
-        self._load(seq, **kwargs)
-
-    @property
-    def _raw(self):
-        raise NotImplementedError
-
-    def _load(self, seq, **kwargs):
-        """
-        Instantiates the object from existing seq.
-
-        :param seq: the original sequence to load from
-        """
-        raise NotImplementedError
-
-    def _set(self, key, value):
-        """
-        Sets the changes for the current object (not in the database).
-
-        :param key:
-        :param value:
-        """
-        raise NotImplementedError
-
-    def _del(self, collection, key):
-        raise NotImplementedError
-
-    def _instrument(self, key, value):
-        """
-        Instruments any collection to track changes (and ease of access).
-
-        :param key:
-        :param value:
-        """
-        if isinstance(value, _InstrumentedCollection):
-            return value
-        elif isinstance(value, dict):
-            instrumentation_cls = _InstrumentedDict
-        elif isinstance(value, list):
-            instrumentation_cls = _InstrumentedList
-        else:
-            return value
-
-        return instrumentation_cls(self._mapi, self, key, self._field_cls, value, False)
-
-    def _raw_value(self, value):
-        """
-        Gets the raw value.
-
-        :param value:
-        """
-        if isinstance(value, self._field_cls):
-            return value.value
-        return value
-
-    def _encapsulate_value(self, key, value):
-        """
-        Creates a new item class if needed.
-
-        :param key:
-        :param value:
-        """
-        if isinstance(value, self._field_cls):
-            return value
-        # If it is not wrapped
-        return self._field_cls.wrap(key, value)
-
-    def __setitem__(self, key, value):
-        """
-        Updates the values in both the local and the database locations.
-
-        :param key:
-        :param value:
-        """
-        self._set(key, value)
-        if self._is_top_level:
-            # We are at the top level
-            field = getattr(self._parent, self._field_name)
-            self._set_field(
-                field, key, value if key in field else self._encapsulate_value(key, value))
-            self._mapi.update(self._parent)
-        else:
-            # We are not at the top level
-            self._set_field(self._parent, self._field_name, self)
-
-    def _set_field(self, collection, key, value):
-        """
-        Enables updating the current change in the ancestors.
-
-        :param collection: collection to change
-        :param key: key for the specific field
-        :param value: new value
-        """
-        if isinstance(value, _InstrumentedCollection):
-            value = value._raw
-        if key in collection and isinstance(collection[key], self._field_cls):
-            if isinstance(collection[key], _InstrumentedCollection):
-                self._del(collection, key)
-            collection[key].value = value
-        else:
-            collection[key] = value
-        return collection[key]
-
-    def __deepcopy__(self, *args, **kwargs):
-        return self._raw
-
-
-class _InstrumentedDict(_InstrumentedCollection, dict):
-
-    def _load(self, dict_=None, **kwargs):
-        dict.__init__(
-            self,
-            tuple((key, self._raw_value(value)) for key, value in (dict_ or {}).items()),
-            **kwargs)
-
-    def update(self, dict_=None, **kwargs):
-        dict_ = dict_ or {}
-        for key, value in dict_.items():
-            self[key] = value
-        for key, value in kwargs.items():
-            self[key] = value
-
-    def __getitem__(self, key):
-        return self._instrument(key, dict.__getitem__(self, key))
-
-    def _set(self, key, value):
-        dict.__setitem__(self, key, self._raw_value(value))
-
-    @property
-    def _raw(self):
-        return dict(self)
-
-    def _del(self, collection, key):
-        del collection[key]
-
-
-class _InstrumentedList(_InstrumentedCollection, list):
-
-    def _load(self, list_=None, **kwargs):
-        list.__init__(self, list(item for item in list_ or []))
-
-    def append(self, value):
-        self.insert(len(self), value)
-
-    def insert(self, index, value):
-        list.insert(self, index, self._raw_value(value))
-        if self._is_top_level:
-            field = getattr(self._parent, self._field_name)
-            field.insert(index, self._encapsulate_value(index, value))
-        else:
-            self._parent[self._field_name] = self
-
-    def __getitem__(self, key):
-        return self._instrument(key, list.__getitem__(self, key))
-
-    def _set(self, key, value):
-        list.__setitem__(self, key, value)
-
-    def _del(self, collection, key):
-        del collection[key]
-
-    @property
-    def _raw(self):
-        return list(self)
-
-
-class _WrappedBase(object):
-
-    def __init__(self, wrapped, instrumentation):
-        self._wrapped = wrapped
-        self._instrumentation = instrumentation
-
-
-class _InstrumentedModel(_WrappedBase):
-
-    def __init__(self, mapi, *args, **kwargs):
-        """
-        The original model.
-
-        :param wrapped: model to be instrumented
-        :param mapi: MAPI for the wrapped model
-        """
-        super(_InstrumentedModel, self).__init__(*args, **kwargs)
-        self._mapi = mapi
-        self._apply_instrumentation()
-
-    def __getattr__(self, item):
-        return_value = getattr(self._wrapped, item)
-        if isinstance(return_value, self._wrapped.__class__):
-            return _create_instrumented_model(return_value, self._mapi, self._instrumentation)
-        if isinstance(return_value, (list, dict)):
-            return _create_wrapped_model(return_value, self._mapi, self._instrumentation)
-        return return_value
-
-    def _apply_instrumentation(self):
-        for field in self._instrumentation:
-            field_name = field.key
-            field_cls = field.mapper.class_
-            field = getattr(self._wrapped, field_name)
-
-            # Preserve the original value. e.g. original attributes would be located under
-            # _attributes
-            setattr(self, '_{0}'.format(field_name), field)
-
-            # set instrumented value
-            if isinstance(field, dict):
-                instrumentation_cls = _InstrumentedDict
-            elif isinstance(field, list):
-                instrumentation_cls = _InstrumentedList
-            else:
-                # TODO: raise proper error
-                raise exceptions.StorageError(
-                    "ARIA supports instrumentation for dict and list. Field {field} of the "
-                    "class {model} is of {type} type.".format(
-                        field=field,
-                        model=self._wrapped,
-                        type=type(field)))
-
-            instrumented_class = instrumentation_cls(seq=field,
-                                                     parent=self._wrapped,
-                                                     mapi=self._mapi,
-                                                     field_name=field_name,
-                                                     field_cls=field_cls)
-            setattr(self, field_name, instrumented_class)
-
-
-class _WrappedModel(_WrappedBase):
-
-    def __init__(self, instrumentation_kwargs, *args, **kwargs):
-        """
-        :param instrumented_cls: class to be instrumented
-        :param instrumentation_cls: instrumentation cls
-        :param wrapped: currently wrapped instance
-        :param kwargs: passed to the instrumented class
-        """
-        super(_WrappedModel, self).__init__(*args, **kwargs)
-        self._kwargs = instrumentation_kwargs
-
-    def _wrap(self, value):
-        if value.__class__ in (class_.class_ for class_ in self._instrumentation):
-            return _create_instrumented_model(
-                value, instrumentation=self._instrumentation, **self._kwargs)
-        elif hasattr(value, 'metadata') or isinstance(value, (dict, list)):
-            # Basically checks that the value is indeed an sqlmodel (it should have metadata)
-            return _create_wrapped_model(
-                value, instrumentation=self._instrumentation, **self._kwargs)
-        return value
-
-    def __getattr__(self, item):
-        if hasattr(self, '_wrapped'):
-            return self._wrap(getattr(self._wrapped, item))
-        else:
-            super(_WrappedModel, self).__getattribute__(item)
-
-    def __getitem__(self, item):
-        return self._wrap(self._wrapped[item])
-
-
-def _create_instrumented_model(original_model, mapi, instrumentation):
-    return type('Instrumented{0}'.format(original_model.__class__.__name__),
-                (_InstrumentedModel,),
-                {})(wrapped=original_model, instrumentation=instrumentation, mapi=mapi)
-
-
-def _create_wrapped_model(original_model, mapi, instrumentation):
-    return type('Wrapped{0}'.format(original_model.__class__.__name__),
-                (_WrappedModel, ),
-                {})(wrapped=original_model,
-                    instrumentation=instrumentation,
-                    instrumentation_kwargs=dict(mapi=mapi))
-
-
-def instrument(instrumentation, original_model, mapi):
-    for instrumented_field in instrumentation:
-        if isinstance(original_model, instrumented_field.class_):
-            return _create_instrumented_model(original_model, mapi, instrumentation)
-
-    return _create_wrapped_model(original_model, mapi, instrumentation)
diff --git a/apache-ariatosca-0.1.1/aria/storage/core.py b/apache-ariatosca-0.1.1/aria/storage/core.py
deleted file mode 100644
index 74b1147..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/core.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Storage API management.
-"""
-
-import copy
-from contextlib import contextmanager
-
-from aria.logger import LoggerMixin
-from . import sql_mapi
-
-__all__ = (
-    'Storage',
-    'ModelStorage',
-    'ResourceStorage'
-)
-
-
-class Storage(LoggerMixin):
-    """
-    Base class for storage managers.
-    """
-    def __init__(self,
-                 api_cls,
-                 api_kwargs=None,
-                 items=(),
-                 initiator=None,
-                 initiator_kwargs=None,
-                 **kwargs):
-        """
-        :param api_cls: API class for each entry
-        :param api_kwargs:
-        :param items: items to register
-        :param initiator: function which initializes the storage before the first use; this function
-         should return a dict, this dict would be passed in addition to the API kwargs; this enables
-         the creation of non-serializable objects
-        :param initiator_kwargs:
-        :param kwargs:
-        """
-        super(Storage, self).__init__(**kwargs)
-        self.api = api_cls
-        self.registered = {}
-        self._initiator = initiator
-        self._initiator_kwargs = initiator_kwargs or {}
-        self._api_kwargs = api_kwargs or {}
-        self._additional_api_kwargs = {}
-        if self._initiator:
-            self._additional_api_kwargs = self._initiator(**self._initiator_kwargs)
-        for item in items:
-            self.register(item)
-        self.logger.debug('{name} object is ready: {0!r}'.format(
-            self, name=self.__class__.__name__))
-
-    @property
-    def _all_api_kwargs(self):
-        kwargs = self._api_kwargs.copy()
-        kwargs.update(self._additional_api_kwargs)
-        return kwargs
-
-    def __repr__(self):
-        return '{name}(api={self.api})'.format(name=self.__class__.__name__, self=self)
-
-    def __getattr__(self, item):
-        try:
-            return self.registered[item]
-        except KeyError:
-            return super(Storage, self).__getattribute__(item)
-
-    @property
-    def serialization_dict(self):
-        return {
-            'api': self.api,
-            'api_kwargs': self._api_kwargs,
-            'initiator': self._initiator,
-            'initiator_kwargs': self._initiator_kwargs
-        }
-
-    def register(self, entry):
-        """
-        Register an API.
-
-        :param entry:
-        """
-        raise NotImplementedError('Subclass must implement abstract register method')
-
-
-class ResourceStorage(Storage):
-    """
-    Manages storage resource APIs ("RAPIs").
-    """
-    def register(self, name):
-        """
-        Register a storage resource API ("RAPI").
-
-        :param name: name
-        """
-        self.registered[name] = self.api(name=name, **self._all_api_kwargs)
-        self.registered[name].create()
-        self.logger.debug('setup {name} in storage {self!r}'.format(name=name, self=self))
-
-
-class ModelStorage(Storage):
-    """
-    Manages storage model APIs ("MAPIs").
-    """
-    def __init__(self, *args, **kwargs):
-        if kwargs.get('initiator', None) is None:
-            kwargs['initiator'] = sql_mapi.init_storage
-        super(ModelStorage, self).__init__(*args, **kwargs)
-
-    def register(self, model_cls):
-        """
-        Register a storage model API ("MAPI").
-
-        :param model_cls: model API to register
-        """
-        model_name = model_cls.__modelname__
-        if model_name in self.registered:
-            self.logger.debug('{name} in already storage {self!r}'.format(name=model_name,
-                                                                          self=self))
-            return
-        self.registered[model_name] = self.api(name=model_name,
-                                               model_cls=model_cls,
-                                               **self._all_api_kwargs)
-        self.registered[model_name].create()
-        self.logger.debug('setup {name} in storage {self!r}'.format(name=model_name, self=self))
-
-    def drop(self):
-        """
-        Drop all the tables.
-        """
-        for mapi in self.registered.values():
-            mapi.drop()
-
-    @contextmanager
-    def instrument(self, *instrumentation):
-        original_instrumentation = {}
-
-        try:
-            for mapi in self.registered.values():
-                original_instrumentation[mapi] = copy.copy(mapi._instrumentation)
-                mapi._instrumentation.extend(instrumentation)
-            yield self
-        finally:
-            for mapi in self.registered.values():
-                mapi._instrumentation[:] = original_instrumentation[mapi]
diff --git a/apache-ariatosca-0.1.1/aria/storage/exceptions.py b/apache-ariatosca-0.1.1/aria/storage/exceptions.py
deleted file mode 100644
index c538876..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/exceptions.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Storage exceptions.
-"""
-
-from .. import exceptions
-
-
-class StorageError(exceptions.AriaError):
-    """
-    General storage exception
-    """
-    pass
-
-
-class NotFoundError(StorageError):
-    pass
diff --git a/apache-ariatosca-0.1.1/aria/storage/filesystem_rapi.py b/apache-ariatosca-0.1.1/aria/storage/filesystem_rapi.py
deleted file mode 100644
index b425fa2..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/filesystem_rapi.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-File system implementation of the storage resource API ("RAPI").
-"""
-
-import os
-import shutil
-from multiprocessing import RLock
-from contextlib import contextmanager
-from functools import partial
-from distutils import dir_util                                # https://github.com/PyCQA/pylint/issues/73; pylint: disable=no-name-in-module
-
-from aria.storage import (
-    api,
-    exceptions
-)
-
-
-class FileSystemResourceAPI(api.ResourceAPI):
-    """
-    File system implementation of the storage resource API ("RAPI").
-    """
-
-    def __init__(self, directory, **kwargs):
-        """
-        :param directory: root dir for storage
-        """
-        super(FileSystemResourceAPI, self).__init__(**kwargs)
-        self.directory = directory
-        self.base_path = os.path.join(self.directory, self.name)
-        self._join_path = partial(os.path.join, self.base_path)
-        self._lock = RLock()
-
-    @contextmanager
-    def connect(self):
-        """
-        Establishes a connection and destroys it after use.
-        """
-        try:
-            self._establish_connection()
-            yield self
-        except BaseException as e:
-            raise exceptions.StorageError(str(e))
-        finally:
-            self._destroy_connection()
-
-    def _establish_connection(self):
-        """
-        Establishes a connection. Used in the ``connect`` context manager.
-        """
-        self._lock.acquire()
-
-    def _destroy_connection(self):
-        """
-        Destroys a connection. Used in the ``connect`` context manager.
-        """
-        self._lock.release()
-
-    def __repr__(self):
-        return '{cls.__name__}(directory={self.directory})'.format(
-            cls=self.__class__, self=self)
-
-    def create(self, **kwargs):
-        """
-        Creates a directory in by path. Tries to create the root directory as well.
-
-        :param name: path of directory
-        """
-        try:
-            os.makedirs(self.directory)
-        except (OSError, IOError):
-            pass
-        try:
-            os.makedirs(self.base_path)
-        except (OSError, IOError):
-            pass
-
-    def read(self, entry_id, path, **_):
-        """
-        Retrieves the contents of a file.
-
-        :param entry_id: entry ID
-        :param path: path to resource
-        :return: contents of the file
-        :rtype: bytes
-        """
-        resource_relative_path = os.path.join(self.name, entry_id, path or '')
-        resource = os.path.join(self.directory, resource_relative_path)
-        if not os.path.exists(resource):
-            raise exceptions.StorageError("Resource {0} does not exist".
-                                          format(resource_relative_path))
-        if not os.path.isfile(resource):
-            resources = os.listdir(resource)
-            if len(resources) != 1:
-                raise exceptions.StorageError(
-                    'Failed to read {0}; Reading a directory is '
-                    'only allowed when it contains a single resource'.format(resource))
-            resource = os.path.join(resource, resources[0])
-        with open(resource, 'rb') as resource_file:
-            return resource_file.read()
-
-    def download(self, entry_id, destination, path=None, **_):
-        """
-        Downloads a file or directory.
-
-        :param entry_id: entry ID
-        :param destination: download destination
-        :param path: path to download relative to the root of the entry (otherwise all)
-        """
-        resource_relative_path = os.path.join(self.name, entry_id, path or '')
-        resource = os.path.join(self.directory, resource_relative_path)
-        if not os.path.exists(resource):
-            raise exceptions.StorageError("Resource {0} does not exist".
-                                          format(resource_relative_path))
-        if os.path.isfile(resource):
-            shutil.copy2(resource, destination)
-        else:
-            dir_util.copy_tree(resource, destination)  # pylint: disable=no-member
-
-    def upload(self, entry_id, source, path=None, **_):
-        """
-        Uploads a file or directory.
-
-        :param entry_id: entry ID
-        :param source: source of the files to upload
-        :param path: the destination of the file/s relative to the entry root dir.
-        """
-        resource_directory = os.path.join(self.directory, self.name, entry_id)
-        if not os.path.exists(resource_directory):
-            os.makedirs(resource_directory)
-        destination = os.path.join(resource_directory, path or '')
-        if os.path.isfile(source):
-            shutil.copy2(source, destination)
-        else:
-            dir_util.copy_tree(source, destination)                                       # pylint: disable=no-member
-
-    def delete(self, entry_id, path=None, **_):
-        """
-        Deletes a file or directory.
-
-        :param entry_id: entry ID
-        :param path: path to delete relative to the root of the entry (otherwise all)
-        """
-        destination = os.path.join(self.directory, self.name, entry_id, path or '')
-        if os.path.exists(destination):
-            if os.path.isfile(destination):
-                os.remove(destination)
-            else:
-                shutil.rmtree(destination)
-            return True
-        return False
diff --git a/apache-ariatosca-0.1.1/aria/storage/sql_mapi.py b/apache-ariatosca-0.1.1/aria/storage/sql_mapi.py
deleted file mode 100644
index 975ada7..0000000
--- a/apache-ariatosca-0.1.1/aria/storage/sql_mapi.py
+++ /dev/null
@@ -1,439 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-SQLAlchemy implementation of the storage model API ("MAPI").
-"""
-
-import os
-import platform
-
-from sqlalchemy import (
-    create_engine,
-    orm,
-)
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm.exc import StaleDataError
-
-from aria.utils.collections import OrderedDict
-from . import (
-    api,
-    exceptions,
-    collection_instrumentation
-)
-
-_predicates = {'ge': '__ge__',
-               'gt': '__gt__',
-               'lt': '__lt__',
-               'le': '__le__',
-               'eq': '__eq__',
-               'ne': '__ne__'}
-
-
-class SQLAlchemyModelAPI(api.ModelAPI):
-    """
-    SQLAlchemy implementation of the storage model API ("MAPI").
-    """
-
-    def __init__(self,
-                 engine,
-                 session,
-                 **kwargs):
-        super(SQLAlchemyModelAPI, self).__init__(**kwargs)
-        self._engine = engine
-        self._session = session
-
-    def get(self, entry_id, include=None, **kwargs):
-        """
-        Returns a single result based on the model class and element ID
-        """
-        query = self._get_query(include, {'id': entry_id})
-        result = query.first()
-
-        if not result:
-            raise exceptions.NotFoundError(
-                'Requested `{0}` with ID `{1}` was not found'
-                .format(self.model_cls.__name__, entry_id)
-            )
-        return self._instrument(result)
-
-    def get_by_name(self, entry_name, include=None, **kwargs):
-        assert hasattr(self.model_cls, 'name')
-        result = self.list(include=include, filters={'name': entry_name})
-        if not result:
-            raise exceptions.NotFoundError(
-                'Requested {0} with name `{1}` was not found'
-                .format(self.model_cls.__name__, entry_name)
-            )
-        elif len(result) > 1:
-            raise exceptions.StorageError(
-                'Requested {0} with name `{1}` returned more than 1 value'
-                .format(self.model_cls.__name__, entry_name)
-            )
-        else:
-            return result[0]
-
-    def list(self,
-             include=None,
-             filters=None,
-             pagination=None,
-             sort=None,
-             **kwargs):
-        query = self._get_query(include, filters, sort)
-
-        results, total, size, offset = self._paginate(query, pagination)
-
-        return ListResult(
-            dict(total=total, size=size, offset=offset),
-            [self._instrument(result) for result in results]
-        )
-
-    def iter(self,
-             include=None,
-             filters=None,
-             sort=None,
-             **kwargs):
-        """
-        Returns a (possibly empty) list of ``model_class`` results.
-        """
-        for result in self._get_query(include, filters, sort):
-            yield self._instrument(result)
-
-    def put(self, entry, **kwargs):
-        """
-        Creatse a ``model_class`` instance from a serializable ``model`` object.
-
-        :param entry: dict with relevant kwargs, or an instance of a class that has a ``to_dict``
-         method, and whose attributes match the columns of ``model_class`` (might also be just an
-         instance of ``model_class``)
-        :return: an instance of ``model_class``
-        """
-        self._session.add(entry)
-        self._safe_commit()
-        return entry
-
-    def delete(self, entry, **kwargs):
-        """
-        Deletes a single result based on the model class and element ID.
-        """
-        self._load_relationships(entry)
-        self._session.delete(entry)
-        self._safe_commit()
-        return entry
-
-    def update(self, entry, **kwargs):
-        """
-        Adds ``instance`` to the database session, and attempts to commit.
-
-        :return: updated instance
-        """
-        return self.put(entry)
-
-    def refresh(self, entry):
-        """
-        Reloads the instance with fresh information from the database.
-
-        :param entry: instance to be re-loaded from the database
-        :return: refreshed instance
-        """
-        self._session.refresh(entry)
-        self._load_relationships(entry)
-        return entry
-
-    def _destroy_connection(self):
-        pass
-
-    def _establish_connection(self):
-        pass
-
-    def create(self, checkfirst=True, create_all=True, **kwargs):
-        self.model_cls.__table__.create(self._engine, checkfirst=checkfirst)
-
-        if create_all:
-            # In order to create any models created dynamically (e.g. many-to-many helper tables are
-            # created at runtime).
-            self.model_cls.metadata.create_all(bind=self._engine, checkfirst=checkfirst)
-
-    def drop(self):
-        """
-        Drops the table.
-        """
-        self.model_cls.__table__.drop(self._engine)
-
-    def _safe_commit(self):
-        """
-        Try to commit changes in the session. Roll back if exception raised SQLAlchemy errors and
-        rolls back if they're caught.
-        """
-        try:
-            self._session.commit()
-        except StaleDataError as e:
-            self._session.rollback()
-            raise exceptions.StorageError('Version conflict: {0}'.format(str(e)))
-        except (SQLAlchemyError, ValueError) as e:
-            self._session.rollback()
-            raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e)))
-
-    def _get_base_query(self, include, joins):
-        """
-        Create the initial query from the model class and included columns.
-
-        :param include: (possibly empty) list of columns to include in the query
-        :return: SQLAlchemy AppenderQuery object
-        """
-        # If only some columns are included, query through the session object
-        if include:
-            # Make sure that attributes come before association proxies
-            include.sort(key=lambda x: x.is_clause_element)
-            query = self._session.query(*include)
-        else:
-            # If all columns should be returned, query directly from the model
-            query = self._session.query(self.model_cls)
-
-        query = query.join(*joins)
-        return query
-
-    @staticmethod
-    def _get_joins(model_class, columns):
-        """
-        Gets a list of all the tables on which we need to join.
-
-        :param columns: set of all attributes involved in the query
-        """
-
-        # Using a list instead of a set because order is important
-        joins = OrderedDict()
-        for column_name in columns:
-            column = getattr(model_class, column_name)
-            while not column.is_attribute:
-                join_attr = column.local_attr
-                # This is a hack, to deal with the fact that SQLA doesn't
-                # fully support doing something like: `if join_attr in joins`,
-                # because some SQLA elements have their own comparators
-                join_attr_name = str(join_attr)
-                if join_attr_name not in joins:
-                    joins[join_attr_name] = join_attr
-                column = column.remote_attr
-
-        return joins.values()
-
-    @staticmethod
-    def _sort_query(query, sort=None):
-        """
-        Adds sorting clauses to the query.
-
-        :param query: base SQL query
-        :param sort: optional dictionary where keys are column names to sort by, and values are
-         the order (asc/desc)
-        :return: SQLAlchemy AppenderQuery object
-        """
-        if sort:
-            for column, order in sort.items():
-                if order == 'desc':
-                    column = column.desc()
-                query = query.order_by(column)
-        return query
-
-    def _filter_query(self, query, filters):
-        """
-        Adds filter clauses to the query.
-
-        :param query: base SQL query
-        :param filters: optional dictionary where keys are column names to filter by, and values
-         are values applicable for those columns (or lists of such values)
-        :return: SQLAlchemy AppenderQuery object
-        """
-        return self._add_value_filter(query, filters)
-
-    @staticmethod
-    def _add_value_filter(query, filters):
-        for column, value in filters.items():
-            if isinstance(value, dict):
-                for predicate, operand in value.items():
-                    query = query.filter(getattr(column, predicate)(operand))
-            elif isinstance(value, (list, tuple)):
-                query = query.filter(column.in_(value))
-            else:
-                query = query.filter(column == value)
-
-        return query
-
-    def _get_query(self,
-                   include=None,
-                   filters=None,
-                   sort=None):
-        """
-        Gets a SQL query object based on the params passed.
-
-        :param model_class: SQL database table class
-        :param include: optional list of columns to include in the query
-        :param filters: optional dictionary where keys are column names to filter by, and values
-         are values applicable for those columns (or lists of such values)
-        :param sort: optional dictionary where keys are column names to sort by, and values are the
-         order (asc/desc)
-        :return: sorted and filtered query with only the relevant columns
-        """
-        include, filters, sort, joins = self._get_joins_and_converted_columns(
-            include, filters, sort
-        )
-        filters = self._convert_operands(filters)
-
-        query = self._get_base_query(include, joins)
-        query = self._filter_query(query, filters)
-        query = self._sort_query(query, sort)
-        return query
-
-    @staticmethod
-    def _convert_operands(filters):
-        for column, conditions in filters.items():
-            if isinstance(conditions, dict):
-                for predicate, operand in conditions.items():
-                    if predicate not in _predicates:
-                        raise exceptions.StorageError(
-                            "{0} is not a valid predicate for filtering. Valid predicates are {1}"
-                            .format(predicate, ', '.join(_predicates.keys())))
-                    del filters[column][predicate]
-                    filters[column][_predicates[predicate]] = operand
-
-
-        return filters
-
-    def _get_joins_and_converted_columns(self,
-                                         include,
-                                         filters,
-                                         sort):
-        """
-        Gets a list of tables on which we need to join and the converted ``include``, ``filters``
-        and ```sort`` arguments (converted to actual SQLAlchemy column/label objects instead of
-        column names).
-        """
-        include = include or []
-        filters = filters or dict()
-        sort = sort or OrderedDict()
-
-        all_columns = set(include) | set(filters.keys()) | set(sort.keys())
-        joins = self._get_joins(self.model_cls, all_columns)
-
-        include, filters, sort = self._get_columns_from_field_names(
-            include, filters, sort
-        )
-        return include, filters, sort, joins
-
-    def _get_columns_from_field_names(self,
-                                      include,
-                                      filters,
-                                      sort):
-        """
-        Gooes over the optional parameters (include, filters, sort), and replace column names with
-        actual SQLAlechmy column objects.
-        """
-        include = [self._get_column(c) for c in include]
-        filters = dict((self._get_column(c), filters[c]) for c in filters)
-        sort = OrderedDict((self._get_column(c), sort[c]) for c in sort)
-
-        return include, filters, sort
-
-    def _get_column(self, column_name):
-        """
-        Returns the column on which an action (filtering, sorting, etc.) would need to be performed.
-        Can be either an attribute of the class, or an association proxy linked to a relationship
-        in the class.
-        """
-        column = getattr(self.model_cls, column_name)
-        if column.is_attribute:
-            return column
-        else:
-            # We need to get to the underlying attribute, so we move on to the
-            # next remote_attr until we reach one
-            while not column.remote_attr.is_attribute:
-                column = column.remote_attr
-            # Put a label on the remote attribute with the name of the column
-            return column.remote_attr.label(column_name)
-
-    @staticmethod
-    def _paginate(query, pagination):
-        """
-        Paginates the query by size and offset.
-
-        :param query: current SQLAlchemy query object
-        :param pagination: optional dict with size and offset keys
-        :return: tuple with four elements:
-         * results: ``size`` items starting from ``offset``
-         * the total count of items
-         * ``size`` [default: 0]
-         * ``offset`` [default: 0]
-        """
-        if pagination:
-            size = pagination.get('size', 0)
-            offset = pagination.get('offset', 0)
-            total = query.order_by(None).count()  # Fastest way to count
-            results = query.limit(size).offset(offset).all()
-            return results, total, size, offset
-        else:
-            results = query.all()
-            return results, len(results), 0, 0
-
-    @staticmethod
-    def _load_relationships(instance):
-        """
-        Helper method used to overcome a problem where the relationships that rely on joins aren't
-        being loaded automatically.
-        """
-        for rel in instance.__mapper__.relationships:
-            getattr(instance, rel.key)
-
-    def _instrument(self, model):
-        if self._instrumentation:
-            return collection_instrumentation.instrument(self._instrumentation, model, self)
-        else:
-            return model
-
-
-def init_storage(base_dir, filename='db.sqlite'):
-    """
-    Built-in ModelStorage initiator.
-
-    Creates a SQLAlchemy engine and a session to be passed to the MAPI.
-
-    ``initiator_kwargs`` must be passed to the ModelStorage which must hold the ``base_dir`` for the
-    location of the database file, and an option filename. This would create an SQLite database.
-
-    :param base_dir: directory of the database
-    :param filename: database file name.
-    :return:
-    """
-    uri = 'sqlite:///{platform_char}{path}'.format(
-        # Handles the windows behavior where there is not root, but drivers.
-        # Thus behaving as relative path.
-        platform_char='' if 'Windows' in platform.system() else '/',
-
-        path=os.path.join(base_dir, filename))
-
-    engine = create_engine(uri, connect_args=dict(timeout=15))
-
-    session_factory = orm.sessionmaker(bind=engine)
-    session = orm.scoped_session(session_factory=session_factory)
-
-    return dict(engine=engine, session=session)
-
-
-class ListResult(list):
-    """
-    Contains results about the requested items.
-    """
-    def __init__(self, metadata, *args, **qwargs):
-        super(ListResult, self).__init__(*args, **qwargs)
-        self.metadata = metadata
-        self.items = self
diff --git a/apache-ariatosca-0.1.1/aria/utils/archive.py b/apache-ariatosca-0.1.1/aria/utils/archive.py
deleted file mode 100644
index 29efcb1..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/archive.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Archive utilities.
-"""
-
-import os
-import tarfile
-import zipfile
-import tempfile
-from contextlib import closing
-
-
-def is_archive(source):
-    return tarfile.is_tarfile(source) or zipfile.is_zipfile(source)
-
-
-def extract_archive(source):
-    if tarfile.is_tarfile(source):
-        return untar(source)
-    elif zipfile.is_zipfile(source):
-        return unzip(source)
-    raise ValueError(
-        'Unsupported archive type provided or archive is not valid: {0}.'.format(source))
-
-
-def tar(source, destination):
-    with closing(tarfile.open(destination, 'w:gz')) as tar_archive:
-        tar_archive.add(source, arcname=os.path.basename(source))
-
-
-def untar(archive, destination=None):
-    if not destination:
-        destination = tempfile.mkdtemp()
-    with closing(tarfile.open(name=archive)) as tar_archive:
-        tar_archive.extractall(path=destination, members=tar_archive.getmembers())
-    return destination
-
-
-def zip(source, destination):
-    with closing(zipfile.ZipFile(destination, 'w')) as zip_file:
-        for root, _, files in os.walk(source):
-            for filename in files:
-                file_path = os.path.join(root, filename)
-                source_dir = os.path.dirname(source)
-                zip_file.write(
-                    file_path, os.path.relpath(file_path, source_dir))
-    return destination
-
-
-def unzip(archive, destination=None):
-    if not destination:
-        destination = tempfile.mkdtemp()
-    with closing(zipfile.ZipFile(archive, 'r')) as zip_file:
-        zip_file.extractall(destination)
-    return destination
diff --git a/apache-ariatosca-0.1.1/aria/utils/argparse.py b/apache-ariatosca-0.1.1/aria/utils/argparse.py
deleted file mode 100644
index a05a841..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/argparse.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Enhancements to Python's ``argparse`` module.
-"""
-
-from __future__ import absolute_import  # so we can import standard 'argparse'
-
-from argparse import ArgumentParser as BaseArgumentParser
-
-
-class ArgumentParser(BaseArgumentParser):
-    """
-    Enhanced argument parser.
-
-    Applied patch to fix `this issue <https://bugs.python.org/issue22433>`__.
-    """
-
-    def add_flag_argument(self, name, help_true=None, help_false=None, default=False):
-        """
-        Adds a flag argument as two arguments: ``--my-flag`` and ``--no-my-flag``.
-        """
-
-        dest = name.replace('-', '_')
-
-        if default:
-            if help_true is not None:
-                help_true += ' (default)'
-            else:
-                help_true = '(default)'
-        else:
-            if help_false is not None:
-                help_false += ' (default)'
-            else:
-                help_false = '(default)'
-
-        group = self.add_mutually_exclusive_group()
-        group.add_argument('--%s' % name, action='store_true', help=help_true)
-        group.add_argument('--no-%s' % name, dest=dest, action='store_false', help=help_false)
-
-        self.set_defaults(**{dest: default})
-
-    def _parse_optional(self, arg_string):
-
-        if self._is_positional(arg_string):
-            return None
-
-        # if the option string is present in the parser, return the action
-        if arg_string in self._option_string_actions:
-            action = self._option_string_actions[arg_string]
-            return action, arg_string, None
-
-        # if the option string before the "=" is present, return the action
-        if '=' in arg_string:
-            option_string, explicit_arg = arg_string.split('=', 1)
-            if option_string in self._option_string_actions:
-                action = self._option_string_actions[option_string]
-                return action, option_string, explicit_arg
-
-        # search through all possible prefixes of the option string
-        # and all actions in the parser for possible interpretations
-        option_tuples = self._get_option_tuples(arg_string)
-
-        # if multiple actions match, the option string was ambiguous
-        if len(option_tuples) > 1:
-            options = ', '.join(
-                [option_string for action, option_string, explicit_arg in option_tuples])
-            tup = arg_string, options
-            self.error('ambiguous option: %s could match %s' % tup)
-
-        # if exactly one action matched, this segmentation is good,
-        # so return the parsed action
-        elif len(option_tuples) == 1:
-            option_tuple = option_tuples
-            return option_tuple
-
-        # if it was not found as an option, but it looks like a negative
-        # number, it was meant to be positional
-        # unless there are negative-number-like options
-        if self._negative_number_matcher.match(arg_string):
-            if not self._has_negative_number_optionals:
-                return None
-
-        # it was meant to be an optional but there is no such option
-        # in this parser (though it might be a valid option in a subparser)
-        return None, arg_string, None
-
-    def _is_positional(self, arg_string):
-        # if it's an empty string, it was meant to be a positional
-        if not arg_string:
-            return True
-
-        # if it doesn't start with a prefix, it was meant to be positional
-        if not arg_string[0] in self.prefix_chars:
-            return True
-
-        # if it's just a single character, it was meant to be positional
-        if len(arg_string) == 1:
-            return True
-
-        # if it contains a space, it was meant to be a positional
-        if ' ' in arg_string and arg_string[0] not in self.prefix_chars:
-            return True
-
-        return False
diff --git a/apache-ariatosca-0.1.1/aria/utils/caching.py b/apache-ariatosca-0.1.1/aria/utils/caching.py
deleted file mode 100644
index 5f8cd88..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/caching.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Caching utilities.
-"""
-
-from __future__ import absolute_import  # so we can import standard 'collections' and 'threading'
-
-from threading import Lock
-from functools import partial
-
-from .collections import OrderedDict
-
-
-class cachedmethod(object):  # pylint: disable=invalid-name
-    """
-    Decorator for caching method return values.
-
-    The implementation is thread-safe.
-
-    Supports ``cache_info`` to be compatible with Python 3's ``functools.lru_cache``. Note that the
-    statistics are combined for all instances of the class.
-
-    Won't use the cache if not called when bound to an object, allowing you to override the cache.
-
-    Adapted from `this solution
-    <http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/>`__.
-    """
-
-    ENABLED = True
-
-    def __init__(self, func):
-        self.__doc__ = func.__doc__
-        self.func = func
-        self.hits = 0
-        self.misses = 0
-        self.lock = Lock()
-
-    def cache_info(self):
-        with self.lock:
-            return (self.hits, self.misses, None, self.misses)
-
-    def reset_cache_info(self):
-        with self.lock:
-            self.hits = 0
-            self.misses = 0
-
-    def __get__(self, instance, owner):
-        if instance is None:
-            # Don't use cache if not bound to an object
-            # Note: This is also a way for callers to override the cache
-            return self.func
-        return partial(self, instance)
-
-    def __call__(self, *args, **kwargs):
-        if not self.ENABLED:
-            return self.func(*args, **kwargs)
-
-        instance = args[0]
-        if not hasattr(instance, '_method_cache'):
-            instance._method_cache = {}
-        method_cache = instance._method_cache
-
-        key = (self.func, args[1:], frozenset(kwargs.items()))
-
-        try:
-            with self.lock:
-                return_value = method_cache[key]
-                self.hits += 1
-        except KeyError:
-            return_value = self.func(*args, **kwargs)
-            with self.lock:
-                method_cache[key] = return_value
-                self.misses += 1
-            # Another thread may override our cache entry here, so we need to read
-            # it again to make sure all threads use the same return value
-            return_value = method_cache.get(key, return_value)
-
-        return return_value
-
-
-class HasCachedMethods(object):
-    """
-    Provides convenience methods for working with :class:`cachedmethod`.
-    """
-
-    def __init__(self, method_cache=None):
-        self._method_cache = method_cache or {}
-
-    @property
-    def _method_cache_info(self):
-        """
-        The cache infos of all cached methods.
-
-        :rtype: dict of str, 4-tuple
-        """
-
-        cached_info = OrderedDict()
-        for k, v in self.__class__.__dict__.iteritems():
-            if isinstance(v, property):
-                # The property getter might be cached
-                v = v.fget
-            if hasattr(v, 'cache_info'):
-                cached_info[k] = v.cache_info()
-        return cached_info
-
-    def _reset_method_cache(self):
-        """
-        Resets the caches of all cached methods.
-        """
-
-        if hasattr(self, '_method_cache'):
-            self._method_cache = {}
-
-        # Note: Another thread may already be storing entries in the cache here.
-        # But it's not a big deal! It only means that our cache_info isn't
-        # guaranteed to be accurate.
-
-        for entry in self.__class__.__dict__.itervalues():
-            if isinstance(entry, property):
-                # The property getter might be cached
-                entry = entry.fget
-            if hasattr(entry, 'reset_cache_info'):
-                entry.reset_cache_info()
diff --git a/apache-ariatosca-0.1.1/aria/utils/collections.py b/apache-ariatosca-0.1.1/aria/utils/collections.py
deleted file mode 100644
index ccc37a1..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/collections.py
+++ /dev/null
@@ -1,303 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Additional collection classes and collection utilities.
-"""
-
-from __future__ import absolute_import  # so we can import standard 'collections'
-
-from copy import deepcopy
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict
-
-
-def cls_name(cls):
-    module = str(cls.__module__)
-    name = str(cls.__name__)
-    return name if module == '__builtin__' else '%s.%s' % (module, name)
-
-
-class FrozenList(list):
-    """
-    An immutable list.
-
-    After initialization it will raise :class:`~exceptions.TypeError` exceptions if modification is
-    attempted.
-
-    Note that objects stored in the list may not be immutable.
-    """
-    def __init__(self, *args, **kwargs):
-        self.locked = False
-        super(FrozenList, self).__init__(*args, **kwargs)
-        self.locked = True
-
-    def __setitem__(self, index, value):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).__setitem__(index, value)
-
-    def __delitem__(self, index):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).__delitem__(index)
-
-    def __iadd__(self, values):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).__iadd__(values)
-
-    def __deepcopy__(self, memo):
-        res = [deepcopy(v, memo) for v in self]
-        return FrozenList(res)
-
-    def append(self, value):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).append(value)
-
-    def extend(self, values):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).append(values)
-
-    def insert(self, index, value):
-        if self.locked:
-            raise TypeError('frozen list')
-        return super(FrozenList, self).insert(index, value)
-
-EMPTY_READ_ONLY_LIST = FrozenList()
-
-
-class FrozenDict(OrderedDict):
-    """
-    An immutable ordered dict.
-
-    After initialization it will raise :class:`~exceptions.TypeError` exceptions if modification is
-    attempted.
-
-    Note that objects stored in the dict may not be immutable.
-    """
-
-    def __init__(self, *args, **kwargs):
-        self.locked = False
-        super(FrozenDict, self).__init__(*args, **kwargs)
-        self.locked = True
-
-    def __setitem__(self, key, value, **_):
-        if self.locked:
-            raise TypeError('frozen dict')
-        return super(FrozenDict, self).__setitem__(key, value)
-
-    def __delitem__(self, key, **_):
-        if self.locked:
-            raise TypeError('frozen dict')
-        return super(FrozenDict, self).__delitem__(key)
-
-    def __deepcopy__(self, memo):
-        res = [(deepcopy(k, memo), deepcopy(v, memo)) for k, v in self.iteritems()]
-        return FrozenDict(res)
-
-EMPTY_READ_ONLY_DICT = FrozenDict()
-
-
-class StrictList(list):
-    """
-    A list that raises :class:`~exceptions.TypeError` exceptions when objects of the wrong type are
-    inserted.
-    """
-
-    def __init__(self,
-                 items=None,
-                 value_class=None,
-                 wrapper_function=None,
-                 unwrapper_function=None):
-        super(StrictList, self).__init__()
-        if isinstance(items, StrictList):
-            self.value_class = items.value_class
-            self.wrapper_function = items.wrapper_function
-            self.unwrapper_function = items.unwrapper_function
-        self.value_class = value_class
-        self.wrapper_function = wrapper_function
-        self.unwrapper_function = unwrapper_function
-        if items:
-            for item in items:
-                self.append(item)
-
-    def _wrap(self, value):
-        if (self.value_class is not None) and (not isinstance(value, self.value_class)):
-            raise TypeError('value must be a "%s": %s' % (cls_name(self.value_class), repr(value)))
-        if self.wrapper_function is not None:
-            value = self.wrapper_function(value)
-        return value
-
-    def _unwrap(self, value):
-        if self.unwrapper_function is not None:
-            value = self.unwrapper_function(value)
-        return value
-
-    def __getitem__(self, index):
-        value = super(StrictList, self).__getitem__(index)
-        value = self._unwrap(value)
-        return value
-
-    def __setitem__(self, index, value):
-        value = self._wrap(value)
-        return super(StrictList, self).__setitem__(index, value)
-
-    def __iadd__(self, values):
-        values = [self._wrap(v) for v in values]
-        return super(StrictList, self).__iadd__(values)
-
-    def append(self, value):
-        value = self._wrap(value)
-        return super(StrictList, self).append(value)
-
-    def extend(self, values):
-        values = [self._wrap(v) for v in values]
-        return super(StrictList, self).extend(values)
-
-    def insert(self, index, value):
-        value = self._wrap(value)
-        return super(StrictList, self).insert(index, value)
-
-
-class StrictDict(OrderedDict):
-    """
-    An ordered dict that raises :class:`~exceptions.TypeError` exceptions when keys or values of the
-    wrong type are used.
-    """
-
-    def __init__(self,
-                 items=None,
-                 key_class=None,
-                 value_class=None,
-                 wrapper_function=None,
-                 unwrapper_function=None):
-        super(StrictDict, self).__init__()
-        if isinstance(items, StrictDict):
-            self.key_class = items.key_class
-            self.value_class = items.value_class
-            self.wrapper_function = items.wrapper_function
-            self.unwrapper_function = items.unwrapper_function
-        self.key_class = key_class
-        self.value_class = value_class
-        self.wrapper_function = wrapper_function
-        self.unwrapper_function = unwrapper_function
-        if items:
-            for k, v in items:
-                self[k] = v
-
-    def __getitem__(self, key):
-        if (self.key_class is not None) and (not isinstance(key, self.key_class)):
-            raise TypeError('key must be a "%s": %s' % (cls_name(self.key_class), repr(key)))
-        value = super(StrictDict, self).__getitem__(key)
-        if self.unwrapper_function is not None:
-            value = self.unwrapper_function(value)
-        return value
-
-    def __setitem__(self, key, value, **_):
-        if (self.key_class is not None) and (not isinstance(key, self.key_class)):
-            raise TypeError('key must be a "%s": %s' % (cls_name(self.key_class), repr(key)))
-        if (self.value_class is not None) and (not isinstance(value, self.value_class)):
-            raise TypeError('value must be a "%s": %s' % (cls_name(self.value_class), repr(value)))
-        if self.wrapper_function is not None:
-            value = self.wrapper_function(value)
-        return super(StrictDict, self).__setitem__(key, value)
-
-
-def merge(dict_a, dict_b, path=None, strict=False):
-    """
-    Merges dicts, recursively.
-    """
-
-    # TODO: a.add_yaml_merge(b), see https://bitbucket.org/ruamel/yaml/src/
-    # TODO: 86622a1408e0f171a12e140d53c4ffac4b6caaa3/comments.py?fileviewer=file-view-default
-
-    path = path or []
-    for key, value_b in dict_b.iteritems():
-        if key in dict_a:
-            value_a = dict_a[key]
-            if isinstance(value_a, dict) and isinstance(value_b, dict):
-                merge(value_a, value_b, path + [str(key)], strict)
-            elif value_a != value_b:
-                if strict:
-                    raise ValueError('dict merge conflict at %s' % '.'.join(path + [str(key)]))
-                else:
-                    dict_a[key] = value_b
-        else:
-            dict_a[key] = value_b
-    return dict_a
-
-
-def is_removable(_container, _key, v):
-    return (v is None) or ((isinstance(v, dict) or isinstance(v, list)) and (len(v) == 0))
-
-
-def prune(value, is_removable_function=is_removable):
-    """
-    Deletes ``None`` and empty lists and dicts, recursively.
-    """
-
-    if isinstance(value, list):
-        for i, v in enumerate(value):
-            if is_removable_function(value, i, v):
-                del value[i]
-            else:
-                prune(v, is_removable_function)
-    elif isinstance(value, dict):
-        for k, v in value.items():
-            if is_removable_function(value, k, v):
-                del value[k]
-            else:
-                prune(v, is_removable_function)
-
-    return value
-
-
-# TODO: Move following two methods to some place parser specific
-
-def deepcopy_with_locators(value):
-    """
-    Like :func:`~copy.deepcopy`, but also copies over locators.
-    """
-
-    res = deepcopy(value)
-    copy_locators(res, value)
-    return res
-
-
-def copy_locators(target, source):
-    """
-    Copies over ``_locator`` for all elements, recursively.
-
-    Assumes that target and source have exactly the same list/dict structure.
-    """
-
-    locator = getattr(source, '_locator', None)
-    if locator is not None:
-        try:
-            setattr(target, '_locator', locator)
-        except AttributeError:
-            pass
-
-    if isinstance(target, list) and isinstance(source, list):
-        for i, _ in enumerate(target):
-            copy_locators(target[i], source[i])
-    elif isinstance(target, dict) and isinstance(source, dict):
-        for k, v in target.iteritems():
-            copy_locators(v, source[k])
diff --git a/apache-ariatosca-0.1.1/aria/utils/console.py b/apache-ariatosca-0.1.1/aria/utils/console.py
deleted file mode 100644
index 642cbb1..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/console.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Abstraction API above terminal color libraries.
-"""
-
-from clint.textui.core import STDOUT
-from clint.textui import puts as _puts
-from clint.textui.colored import ColoredString as _ColoredString
-from clint.textui import indent  # pylint: disable=unused-import
-
-from .formatting import safe_str
-
-
-class ColoredString(_ColoredString):
-    def __init__(self, color, str_, always_color=False, bold=False):
-        super(ColoredString, self).__init__(color, safe_str(str_), always_color, bold)
-
-
-def puts(string='', newline=True, stream=STDOUT):
-    _puts(safe_str(string), newline, stream)
-
-
-class Colored(object):
-    @staticmethod
-    def black(string, always=False, bold=False):
-        return ColoredString('BLACK', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def red(string, always=False, bold=False):
-        return ColoredString('RED', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def green(string, always=False, bold=False):
-        return ColoredString('GREEN', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def yellow(string, always=False, bold=False):
-        return ColoredString('YELLOW', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def blue(string, always=False, bold=False):
-        return ColoredString('BLUE', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def magenta(string, always=False, bold=False):
-        return ColoredString('MAGENTA', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def cyan(string, always=False, bold=False):
-        return ColoredString('CYAN', string, always_color=always, bold=bold)
-
-    @staticmethod
-    def white(string, always=False, bold=False):
-        return ColoredString('WHITE', string, always_color=always, bold=bold)
diff --git a/apache-ariatosca-0.1.1/aria/utils/exceptions.py b/apache-ariatosca-0.1.1/aria/utils/exceptions.py
deleted file mode 100644
index 5bb0e6d..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/exceptions.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for extracting and formatting Python exceptions.
-"""
-
-import sys
-import linecache
-import StringIO
-import traceback as tb
-
-import jsonpickle
-
-from .console import (puts, indent, Colored)
-
-
-ENTRY_FORMAT = 'File "{filename}", line {lineno}, in {name}'
-
-
-def print_exception(e, full=True, cause=False, traceback=None):
-    """
-    Prints the exception with nice colors and such.
-    """
-    def format_heading(e):
-        return '{0}{1}: {2}'.format(
-            Colored.red('Caused by ') if cause else '',
-            Colored.red(e.__class__.__name__, bold=True),
-            Colored.red(e))
-
-    puts(format_heading(e))
-    if full:
-        if cause:
-            if traceback:
-                print_traceback(traceback, True)
-        else:
-            print_traceback()
-    if hasattr(e, 'cause') and e.cause:
-        traceback = e.cause_traceback if hasattr(e, 'cause_traceback') else None
-        print_exception(e.cause, full=full, cause=True, traceback=traceback)
-
-
-def print_traceback(traceback=None, print_last_stack=False):
-    """
-    Prints the traceback with nice colors and such.
-    """
-
-    if traceback is None:
-        _, _, traceback = sys.exc_info()
-    while traceback is not None:
-        frame = traceback.tb_frame
-        code = frame.f_code
-        filename = code.co_filename
-        lineno = traceback.tb_lineno
-        name = code.co_name
-        with indent(2):
-            puts(ENTRY_FORMAT.format(filename=Colored.blue(filename),
-                                     lineno=Colored.cyan(lineno),
-                                     name=Colored.cyan(name)))
-            linecache.checkcache(filename)
-            line = linecache.getline(filename, lineno, frame.f_globals)
-            if line:
-                with indent(2):
-                    puts(line.strip())
-        traceback = traceback.tb_next
-        if print_last_stack and (traceback is None):
-            # Print stack of *last* traceback
-            _print_stack(frame)
-
-
-def _print_stack(frame):
-    entries = tb.extract_stack(frame)
-    if not entries:
-        return
-    puts(Colored.red('Call stack:'))
-    with indent(2):
-        for filename, lineno, name, line in entries:
-            puts(ENTRY_FORMAT.format(filename=Colored.blue(filename),
-                                     lineno=Colored.cyan(lineno),
-                                     name=Colored.cyan(name)))
-            with indent(2):
-                puts(line)
-
-
-def get_exception_as_string(exc_type, exc_val, traceback):
-    s_traceback = StringIO.StringIO()
-    tb.print_exception(
-        etype=exc_type,
-        value=exc_val,
-        tb=traceback,
-        file=s_traceback)
-    return s_traceback.getvalue()
-
-
-class _WrappedException(Exception):
-
-    def __init__(self, exception_type, exception_str):
-        super(_WrappedException, self).__init__(exception_type, exception_str)
-        self.exception_type = exception_type
-        self.exception_str = exception_str
-
-
-def wrap_if_needed(exception):
-    try:
-        jsonpickle.loads(jsonpickle.dumps(exception))
-        return exception
-    except BaseException:
-        return _WrappedException(type(exception).__name__, str(exception))
diff --git a/apache-ariatosca-0.1.1/aria/utils/file.py b/apache-ariatosca-0.1.1/aria/utils/file.py
deleted file mode 100644
index 75f2859..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/file.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-File utilities.
-"""
-
-import errno
-import os
-import shutil
-
-
-def makedirs(path):
-    """
-    Enhancement of :func:`os.makedirs` that doesn't fail if the directory already exists.
-    """
-    if os.path.isdir(path):
-        return
-    try:
-        os.makedirs(path)
-    except IOError as e:
-        if e.errno != errno.EEXIST:
-            raise
-
-def remove_if_exists(path):
-    try:
-        if os.path.isfile(path):
-            os.remove(path)
-        if os.path.isdir(path):
-            shutil.rmtree(path)
-
-    except OSError as e:
-        if e.errno != errno.ENOENT:  # errno.ENOENT = no such file or directory
-            raise  # re-raise exception if a different error occurred
diff --git a/apache-ariatosca-0.1.1/aria/utils/formatting.py b/apache-ariatosca-0.1.1/aria/utils/formatting.py
deleted file mode 100644
index fa34b7d..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/formatting.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-String formatting and string-based format utilities.
-"""
-
-import json
-from types import MethodType
-
-from ruamel import yaml  # @UnresolvedImport
-
-from .collections import FrozenList, FrozenDict, StrictList, StrictDict, OrderedDict
-
-
-PLURALIZE_EXCEPTIONS = {}
-
-
-# Add our types to ruamel.yaml (for round trips)
-yaml.representer.RoundTripRepresenter.add_representer(
-    FrozenList, yaml.representer.RoundTripRepresenter.represent_list)
-yaml.representer.RoundTripRepresenter.add_representer(
-    FrozenDict, yaml.representer.RoundTripRepresenter.represent_dict)
-yaml.representer.RoundTripRepresenter.add_representer(
-    StrictList, yaml.representer.RoundTripRepresenter.represent_list)
-yaml.representer.RoundTripRepresenter.add_representer(
-    StrictDict, yaml.representer.RoundTripRepresenter.represent_dict)
-
-# Without this, ruamel.yaml will output "!!omap" types, which is
-# technically correct but unnecessarily verbose for our uses
-yaml.representer.RoundTripRepresenter.add_representer(
-    OrderedDict, yaml.representer.RoundTripRepresenter.represent_dict)
-
-
-class JsonAsRawEncoder(json.JSONEncoder):
-    """
-    A :class:`JSONEncoder` that will use the ``as_raw`` property of objects if available.
-    """
-    def raw_encoder_default(self, obj):
-        try:
-            return iter(obj)
-        except TypeError:
-            if hasattr(obj, 'as_raw'):
-                return as_raw(obj)
-            return str(obj)
-        return super(JsonAsRawEncoder, self).default(obj)
-
-    def __init__(self, *args, **kwargs):
-        kwargs['default'] = self.raw_encoder_default
-        super(JsonAsRawEncoder, self).__init__(*args, **kwargs)
-
-
-class YamlAsRawDumper(yaml.dumper.RoundTripDumper):  # pylint: disable=too-many-ancestors
-    """
-    A :class:`RoundTripDumper` that will use the ``as_raw`` property of objects if available.
-    """
-
-    def represent_data(self, data):
-        if hasattr(data, 'as_raw'):
-            data = as_raw(data)
-        return super(YamlAsRawDumper, self).represent_data(data)
-
-
-def decode_list(data):
-    decoded_list = []
-    for item in data:
-        if isinstance(item, unicode):
-            item = item.encode('utf-8')
-        elif isinstance(item, list):
-            item = decode_list(item)
-        elif isinstance(item, dict):
-            item = decode_dict(item)
-        decoded_list.append(item)
-    return decoded_list
-
-
-def decode_dict(data):
-    decoded_dict = {}
-    for key, value in data.iteritems():
-        if isinstance(key, unicode):
-            key = key.encode('utf-8')
-        if isinstance(value, unicode):
-            value = value.encode('utf-8')
-        elif isinstance(value, list):
-            value = decode_list(value)
-        elif isinstance(value, dict):
-            value = decode_dict(value)
-        decoded_dict[key] = value
-    return decoded_dict
-
-
-def safe_str(value):
-    """
-    Like :class:`str` coercion, but makes sure that Unicode strings are properly encoded, and will
-    never return ``None``.
-    """
-
-    try:
-        return str(value)
-    except UnicodeEncodeError:
-        return unicode(value).encode('utf8')
-
-
-def safe_repr(value):
-    """
-    Like :func:`repr`, but calls :func:`as_raw` and :func:`as_agnostic` first.
-    """
-
-    return repr(as_agnostic(as_raw(value)))
-
-
-def string_list_as_string(strings):
-    """
-    Nice representation of a list of strings.
-    """
-
-    if not strings:
-        return 'none'
-    return ', '.join('"{0}"'.format(safe_str(v)) for v in strings)
-
-
-def pluralize(noun):
-    plural = PLURALIZE_EXCEPTIONS.get(noun)
-    if plural is not None:
-        return plural
-    elif noun.endswith('s'):
-        return '{0}es'.format(noun)
-    elif noun.endswith('y'):
-        return '{0}ies'.format(noun[:-1])
-    else:
-        return '{0}s'.format(noun)
-
-
-def as_raw(value):
-    """
-    Converts values using their ``as_raw`` property, if it exists, recursively.
-    """
-
-    if hasattr(value, 'as_raw'):
-        value = value.as_raw
-        if isinstance(value, MethodType):
-            # Old-style Python classes don't support properties
-            value = value()
-    elif isinstance(value, list):
-        value = list(value)
-        for i, v in enumerate(value):
-            value[i] = as_raw(v)
-    elif isinstance(value, dict):
-        value = dict(value)
-        for k, v in value.iteritems():
-            value[k] = as_raw(v)
-    return value
-
-
-def as_raw_list(value):
-    """
-    Assuming value is a list, converts its values using :func:`as_raw`.
-    """
-
-    if value is None:
-        return []
-    if isinstance(value, dict):
-        value = value.itervalues()
-    return [as_raw(v) for v in value]
-
-
-def as_raw_dict(value):
-    """
-    Assuming value is a dict, converts its values using :func:`as_raw`. The keys are left as is.
-    """
-
-    if value is None:
-        return OrderedDict()
-    return OrderedDict((
-        (k, as_raw(v)) for k, v in value.iteritems()))
-
-
-def as_agnostic(value):
-    """
-    Converts subclasses of list and dict to standard lists and dicts, and Unicode strings to
-    non-Unicode if possible, recursively.
-
-    Useful for creating human-readable output of structures.
-    """
-
-    if isinstance(value, unicode):
-        try:
-            value = str(value)
-        except UnicodeEncodeError:
-            pass
-    elif isinstance(value, list):
-        value = list(value)
-    elif isinstance(value, dict):
-        value = dict(value)
-
-    if isinstance(value, list):
-        for i, _ in enumerate(value):
-            value[i] = as_agnostic(value[i])
-    elif isinstance(value, dict):
-        for k, v in value.iteritems():
-            value[k] = as_agnostic(v)
-
-    return value
-
-
-def json_dumps(value, indent=2):
-    """
-    JSON dumps that supports Unicode and the ``as_raw`` property of objects if available.
-    """
-
-    return json.dumps(value, indent=indent, ensure_ascii=False, cls=JsonAsRawEncoder)
-
-
-def yaml_dumps(value, indent=2):
-    """
-    YAML dumps that supports Unicode and the ``as_raw`` property of objects if available.
-    """
-
-    return yaml.dump(value, indent=indent, allow_unicode=True, Dumper=YamlAsRawDumper)
-
-
-def yaml_loads(value):
-    return yaml.load(value, Loader=yaml.SafeLoader)
diff --git a/apache-ariatosca-0.1.1/aria/utils/http.py b/apache-ariatosca-0.1.1/aria/utils/http.py
deleted file mode 100644
index c8357e9..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/http.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-HTTP utilities.
-"""
-
-import os
-import tempfile
-
-import requests
-
-
-def download_file(url, destination=None, logger=None, progress_handler=None):
-    """
-    Download file.
-
-    :param url: URL from which to download
-    :type url: basestring
-    :param destination: path where the file should be saved or ``None`` to auto-generate
-    :type destination: basestring
-    :returns: path where the file was saved
-    :rtype: basestring
-    :raises exceptions.IOError:
-    :raises requests.exceptions.RequestException:
-    """
-    chunk_size = 1024
-
-    if not destination:
-        file_descriptor, destination = tempfile.mkstemp()
-        os.close(file_descriptor)
-    if logger:
-        logger.info('Downloading {0} to {1}...'.format(url, destination))
-
-    response = requests.get(url, stream=True)
-    final_url = response.url
-    if final_url != url and logger:
-        logger.debug('Redirected to {0}'.format(final_url))
-
-    read_bytes = 0
-    total_size = int(response.headers['Content-Length']) \
-        if 'Content-Length' in response.headers else None
-    try:
-        with open(destination, 'wb') as destination_file:
-            for chunk in response.iter_content(chunk_size):
-                destination_file.write(chunk)
-                if total_size and progress_handler:
-                    # Only showing progress bar if we have the total content length
-                    read_bytes += chunk_size
-                    progress_handler(read_bytes, total_size)
-    finally:
-        response.close()
-
-    return destination
diff --git a/apache-ariatosca-0.1.1/aria/utils/imports.py b/apache-ariatosca-0.1.1/aria/utils/imports.py
deleted file mode 100644
index 14ad09e..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/imports.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for dynamically loading Python code.
-"""
-
-import pkgutil
-import importlib
-
-
-def import_fullname(name, paths=None):
-    """
-    Imports a variable or class based on a full name, optionally searching for it in the paths.
-    """
-    paths = paths or []
-    if name is None:
-        return None
-
-    def do_import(name):
-        if name and ('.' in name):
-            module_name, name = name.rsplit('.', 1)
-            return getattr(__import__(module_name, fromlist=[name], level=0), name)
-        else:
-            raise ImportError('import not found: %s' % name)
-
-    try:
-        return do_import(name)
-    except ImportError:
-        for path in paths:
-            try:
-                return do_import('%s.%s' % (path, name))
-            except Exception as e:
-                raise ImportError('cannot import %s, because %s' % (name, e))
-
-    raise ImportError('import not found: %s' % name)
-
-
-def import_modules(name):
-    """
-    Imports a module and all its sub-modules, recursively. Relies on modules defining a ``MODULES``
-    attribute listing their sub-module names.
-    """
-
-    module = __import__(name, fromlist=['MODULES'], level=0)
-    if hasattr(module, 'MODULES'):
-        for module_ in module.MODULES:
-            import_modules('%s.%s' % (name, module_))
-
-
-# TODO merge with import_fullname
-def load_attribute(attribute_path):
-    """
-    Dynamically load an attribute based on the path to it. E.g.
-    ``some_package.some_module.some_attribute``, will load ``some_attribute`` from the
-    ``some_package.some_module`` module.
-    """
-    module_name, attribute_name = attribute_path.rsplit('.', 1)
-    try:
-        module = importlib.import_module(module_name)
-        return getattr(module, attribute_name)
-    except ImportError:
-        # TODO: handle
-        raise
-    except AttributeError:
-        # TODO: handle
-        raise
-
-
-def iter_modules():
-    # apparently pkgutil had some issues in python 2.6. Accessing any root level directories
-    # failed. and it got the entire process of importing fail. Since we only need any
-    # aria_extension related loading, in the meantime we could try to import only those
-    # (and assume they are not located at the root level.
-    # [In python 2.7 it does actually ignore any OSError].
-    yielded = {}
-    for importer in pkgutil.iter_importers():
-        try:
-            for module_name, ispkg in pkgutil.iter_importer_modules(importer):
-                if module_name not in yielded:
-                    yielded[module_name] = True
-                    yield importer, module_name, ispkg
-        except OSError:
-            pass
diff --git a/apache-ariatosca-0.1.1/aria/utils/openclose.py b/apache-ariatosca-0.1.1/aria/utils/openclose.py
deleted file mode 100644
index 722885c..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/openclose.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for working with open/close patterns.
-"""
-
-class OpenClose(object):
-    """
-    Wraps an object that has ``open()`` and ``close()`` methods to support the ``with`` keyword.
-    """
-
-    def __init__(self, wrapped):
-        self.wrapped = wrapped
-
-    def __enter__(self):
-        if hasattr(self.wrapped, 'open'):
-            self.wrapped.open()
-        return self.wrapped
-
-    def __exit__(self, the_type, value, traceback):
-        if hasattr(self.wrapped, 'close'):
-            self.wrapped.close()
-        return False
diff --git a/apache-ariatosca-0.1.1/aria/utils/plugin.py b/apache-ariatosca-0.1.1/aria/utils/plugin.py
deleted file mode 100644
index 4fb6a8e..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/plugin.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Plugin utilities.
-"""
-
-import wagon
-
-
-def create(source, destination_dir):
-    return wagon.create(source=source, archive_destination_dir=destination_dir)
diff --git a/apache-ariatosca-0.1.1/aria/utils/process.py b/apache-ariatosca-0.1.1/aria/utils/process.py
deleted file mode 100644
index ec4a72d..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/process.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Process utilities.
-"""
-
-import os
-
-
-def append_to_path(*args, **kwargs):
-    """
-    Appends one or more paths to the system path of an environment.
-    The environment will be that of the current process unless another is passed using the
-    'env' keyword argument.
-    :param args: paths to append
-    :param kwargs: 'env' may be used to pass a custom environment to use
-    """
-    _append_to_path('PATH', *args, **kwargs)
-
-
-def append_to_pythonpath(*args, **kwargs):
-    """
-    Appends one or more paths to the python path of an environment.
-    The environment will be that of the current process unless another is passed using the
-    'env' keyword argument.
-    :param args: paths to append
-    :param kwargs: 'env' may be used to pass a custom environment to use
-    """
-    _append_to_path('PYTHONPATH', *args, **kwargs)
-
-
-def _append_to_path(path, *args, **kwargs):
-    env = kwargs.get('env') or os.environ
-    env[path] = '{0}{1}{2}'.format(
-        os.pathsep.join(args),
-        os.pathsep,
-        env.get(path, '')
-    )
diff --git a/apache-ariatosca-0.1.1/aria/utils/specification.py b/apache-ariatosca-0.1.1/aria/utils/specification.py
deleted file mode 100644
index 8c51134..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/specification.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for cross-referencing code with specification documents.
-"""
-
-from .collections import OrderedDict
-
-
-DSL_SPECIFICATIONS = {}
-
-
-def implements_specification(section, spec):
-    """
-    Decorator for specification implementations.
-
-    Used for documentation and standards compliance.
-    """
-
-    from .type import full_type_name
-
-    def decorator(obj):
-        specification = DSL_SPECIFICATIONS.get(spec)
-
-        if specification is None:
-            specification = {}
-            DSL_SPECIFICATIONS[spec] = specification
-
-        if section in specification:
-            raise Exception('you cannot specify the same @implements_specification twice, consider'
-                            ' adding \'-1\', \'-2\', etc.: {0}, {1}'.format(spec, section))
-
-        specification[section] = OrderedDict((
-            ('code', full_type_name(obj)),
-            ('doc', obj.__doc__)))
-
-        try:
-            setattr(obj, '_dsl_specifications', {section: section, spec: spec})
-        except BaseException:
-            pass
-
-        return obj
-
-    return decorator
diff --git a/apache-ariatosca-0.1.1/aria/utils/threading.py b/apache-ariatosca-0.1.1/aria/utils/threading.py
deleted file mode 100644
index f5ca302..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/threading.py
+++ /dev/null
@@ -1,286 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Threading utilities.
-"""
-
-from __future__ import absolute_import  # so we can import standard 'threading'
-
-import sys
-import itertools
-import multiprocessing
-from threading import (Thread, Lock)
-from Queue import (Queue, Full, Empty)
-
-from .exceptions import print_exception
-
-class ExecutorException(Exception):
-    pass
-
-
-class DaemonThread(Thread):
-    def __init__(self, *args, **kwargs):
-        super(DaemonThread, self).__init__(*args, **kwargs)
-        self.daemon = True
-
-    def run(self):
-        """
-        We're overriding ``Thread.run`` in order to avoid annoying (but harmless) error messages
-        during shutdown. The problem is that CPython nullifies the global state _before_ shutting
-        down daemon threads, so that exceptions might happen, and then ``Thread.__bootstrap_inner``
-        prints them out.
-
-        Our solution is to swallow these exceptions here.
-
-        The side effect is that uncaught exceptions in our own thread code will _not_ be printed out
-        as usual, so it's our responsibility to catch them in our code.
-        """
-
-        try:
-            super(DaemonThread, self).run()
-        except SystemExit as e:
-            # This exception should be bubbled up
-            raise e
-        except BaseException:
-            # Exceptions might occur in daemon threads during interpreter shutdown
-            pass
-
-
-# https://gist.github.com/tliron/81dd915166b0bfc64be08b4f8e22c835
-class FixedThreadPoolExecutor(object):
-    """
-    Executes tasks in a fixed thread pool.
-
-    Makes sure to gather all returned results and thrown exceptions in one place, in order of task
-    submission.
-
-    Example::
-
-        def sum(arg1, arg2):
-            return arg1 + arg2
-
-        executor = FixedThreadPoolExecutor(10)
-        try:
-            for value in range(100):
-                executor.submit(sum, value, value)
-            executor.drain()
-        except:
-            executor.close()
-        executor.raise_first()
-        print executor.returns
-
-    You can also use it with the Python ``with`` keyword, in which case you don't need to call
-    ``close`` explicitly::
-
-        with FixedThreadPoolExecutor(10) as executor:
-            for value in range(100):
-                executor.submit(sum, value, value)
-            executor.drain()
-            executor.raise_first()
-            print executor.returns
-    """
-
-    _CYANIDE = object()  # Special task marker used to kill worker threads.
-
-    def __init__(self,
-                 size=None,
-                 timeout=None,
-                 print_exceptions=False):
-        """
-        :param size: number of threads in the pool; if ``None`` will use an optimal number for the
-         platform
-        :param timeout: timeout in seconds for all blocking operations (``None`` means no timeout)
-        :param print_exceptions: set to ``True`` in order to print exceptions from tasks
-        """
-        if not size:
-            try:
-                size = multiprocessing.cpu_count() * 2 + 1
-            except NotImplementedError:
-                size = 3
-
-        self.size = size
-        self.timeout = timeout
-        self.print_exceptions = print_exceptions
-
-        self._tasks = Queue()
-        self._returns = {}
-        self._exceptions = {}
-        self._id_creator = itertools.count()
-        self._lock = Lock() # for console output
-
-        self._workers = []
-        for index in range(size):
-            worker = DaemonThread(
-                name='%s%d' % (self.__class__.__name__, index),
-                target=self._thread_worker)
-            worker.start()
-            self._workers.append(worker)
-
-    def submit(self, func, *args, **kwargs):
-        """
-        Submit a task for execution.
-
-        The task will be called ASAP on the next available worker thread in the pool.
-
-        :raises ExecutorException: if cannot be submitted
-        """
-
-        try:
-            self._tasks.put((self._id_creator.next(), func, args, kwargs), timeout=self.timeout)
-        except Full:
-            raise ExecutorException('cannot submit task: queue is full')
-
-    def close(self):
-        """
-        Blocks until all current tasks finish execution and all worker threads are dead.
-
-        You cannot submit tasks anymore after calling this.
-
-        This is called automatically upon exit if you are using the ``with`` keyword.
-        """
-
-        self.drain()
-        while self.is_alive:
-            try:
-                self._tasks.put(self._CYANIDE, timeout=self.timeout)
-            except Full:
-                raise ExecutorException('cannot close executor: a thread seems to be hanging')
-        self._workers = None
-
-    def drain(self):
-        """
-        Blocks until all current tasks finish execution, but leaves the worker threads alive.
-        """
-
-        self._tasks.join()  # oddly, the API does not support a timeout parameter
-
-    @property
-    def is_alive(self):
-        """
-        True if any of the worker threads are alive.
-        """
-
-        for worker in self._workers:
-            if worker.is_alive():
-                return True
-        return False
-
-    @property
-    def returns(self):
-        """
-        The returned values from all tasks, in order of submission.
-        """
-
-        return [self._returns[k] for k in sorted(self._returns)]
-
-    @property
-    def exceptions(self):
-        """
-        The raised exceptions from all tasks, in order of submission.
-        """
-
-        return [self._exceptions[k] for k in sorted(self._exceptions)]
-
-    def raise_first(self):
-        """
-        If exceptions were thrown by any task, then the first one will be raised.
-
-        This is rather arbitrary: proper handling would involve iterating all the exceptions.
-        However, if you want to use the "raise" mechanism, you are limited to raising only one of
-        them.
-        """
-
-        exceptions = self.exceptions
-        if exceptions:
-            raise exceptions[0]
-
-    def _thread_worker(self):
-        while True:
-            if not self._execute_next_task():
-                break
-
-    def _execute_next_task(self):
-        try:
-            task = self._tasks.get(timeout=self.timeout)
-        except Empty:
-            # Happens if timeout is reached
-            return True
-        if task == self._CYANIDE:
-            # Time to die :(
-            return False
-        self._execute_task(*task)
-        return True
-
-    def _execute_task(self, task_id, func, args, kwargs):
-        try:
-            result = func(*args, **kwargs)
-            self._returns[task_id] = result
-        except Exception as e:
-            self._exceptions[task_id] = e
-            if self.print_exceptions:
-                with self._lock:
-                    print_exception(e)
-        self._tasks.task_done()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, the_type, value, traceback):
-        self.close()
-        return False
-
-
-class LockedList(list):
-    """
-    A list that supports the ``with`` keyword with a built-in lock.
-
-    Though Python lists are thread-safe in that they will not raise exceptions during concurrent
-    access, they do not guarantee atomicity. This class will let you gain atomicity when needed.
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(LockedList, self).__init__(*args, **kwargs)
-        self.lock = Lock()
-
-    def __enter__(self):
-        return self.lock.__enter__()
-
-    def __exit__(self, the_type, value, traceback):
-        return self.lock.__exit__(the_type, value, traceback)
-
-
-class ExceptionThread(Thread):
-    """
-    A thread from which top level exceptions can be retrieved or re-raised.
-    """
-    def __init__(self, *args, **kwargs):
-        Thread.__init__(self, *args, **kwargs)
-        self.exception = None
-        self.daemon = True
-
-    def run(self):
-        try:
-            super(ExceptionThread, self).run()
-        except BaseException:
-            self.exception = sys.exc_info()
-
-    def is_error(self):
-        return self.exception is not None
-
-    def raise_error_if_exists(self):
-        if self.is_error():
-            type_, value, trace = self.exception
-            raise type_, value, trace
diff --git a/apache-ariatosca-0.1.1/aria/utils/type.py b/apache-ariatosca-0.1.1/aria/utils/type.py
deleted file mode 100644
index fe88a62..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/type.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Type utilities.
-"""
-
-import datetime
-
-from .specification import implements_specification
-
-
-BASE_TYPES_TO_CANONICAL_NAMES = {
-    # TOSCA aliases:
-    None.__class__: 'null',
-    basestring: 'string',
-    int: 'integer',
-    float: 'float',
-    bool: 'boolean',
-    list: 'list',
-    tuple: 'list',
-    dict: 'map',
-    datetime.datetime: 'timestamp'
-}
-
-NAMES_TO_CANONICAL_TYPES = {
-    # Python:
-    'none': None.__class__,
-    'basestring': unicode,
-    'str': unicode,
-    'unicode': unicode,
-    'int': int,
-    'float': float, # also a TOSCA alias
-    'bool': bool,
-    'list': list, # also a TOSCA alias
-    'tuple': list,
-    'dict': dict,
-    'datetime': datetime.datetime,
-
-    # YAML 1.2:
-    'tag:yaml.org,2002:null': None.__class__,
-    'tag:yaml.org,2002:str': unicode,
-    'tag:yaml.org,2002:integer': int,
-    'tag:yaml.org,2002:float': float,
-    'tag:yaml.org,2002:bool': bool,
-
-    # TOSCA aliases:
-    'null': None.__class__,
-    'string': unicode,
-    'integer': int,
-    'boolean': bool,
-
-    # TOSCA custom types:
-    'map': dict,
-    'timestamp': datetime.datetime
-}
-
-
-def full_type_name(value):
-    """
-    The full class name of a type or instance.
-    """
-
-    if not isinstance(value, type):
-        value = value.__class__
-    module = str(value.__module__)
-    name = str(value.__name__)
-    return name if module == '__builtin__' else '{0}.{1}'.format(module, name)
-
-
-@implements_specification('3.2.1-1', 'tosca-simple-1.0')
-def canonical_type_name(value):
-    """
-    Returns the canonical TOSCA type name of a primitive value, or ``None`` if unknown.
-
-    For a list of TOSCA type names, see the `TOSCA Simple Profile v1.0
-    cos01 specification <http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01
-    /TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc373867862>`__
-    """
-
-    for the_type, name in BASE_TYPES_TO_CANONICAL_NAMES.iteritems():
-        if isinstance(value, the_type):
-            return name
-    return None
-
-
-@implements_specification('3.2.1-2', 'tosca-simple-1.0')
-def canonical_type(type_name):
-    """
-    Return the canonical type for any Python, YAML, or TOSCA type name or alias, or ``None`` if
-    unsupported.
-
-    :param type_name: Type name (case insensitive)
-    """
-
-    return NAMES_TO_CANONICAL_TYPES.get(type_name.lower())
-
-
-def validate_value_type(value, type_name):
-    """
-    Validate that a value is of a specific type. Supports Python, YAML, and TOSCA type names and
-    aliases.
-
-    :param type_name: type name (case insensitive)
-    :raises ~exceptions.ValueError: on type mismatch
-    """
-
-    the_type = canonical_type(type_name)
-    if the_type is None:
-        raise RuntimeError('Unsupported type name: {0}'.format(type_name))
-
-    # The following Python types do not inherit from the canonical type, but are considered valid
-    if (the_type is unicode) and isinstance(value, str):
-        return
-    if (the_type is list) and isinstance(value, tuple):
-        return
-
-    if not isinstance(value, the_type):
-        raise ValueError('Value {0} is not of type {1}'.format(value, type_name))
-
-
-def convert_value_to_type(str_value, python_type_name):
-    """
-    Converts a value to a specific Python primitive type.
-
-    :param python_type_name: Python primitive type name (case insensitive)
-    :raises ~exceptions.ValueError: for unsupported types or conversion failure
-    """
-
-    python_type_name = python_type_name.lower()
-    try:
-        if python_type_name in ('str', 'unicode'):
-            return str_value.decode('utf-8')
-        elif python_type_name == 'int':
-            return int(str_value)
-        elif python_type_name == 'bool':
-            return bool(str_value)
-        elif python_type_name == 'float':
-            return float(str_value)
-        else:
-            raise ValueError('Unsupported Python type name: {0}'.format(python_type_name))
-    except ValueError:
-        raise ValueError('Failed to to convert {0} to {1}'.format(str_value,
-                                                                  python_type_name))
diff --git a/apache-ariatosca-0.1.1/aria/utils/uris.py b/apache-ariatosca-0.1.1/aria/utils/uris.py
deleted file mode 100644
index 49881f2..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/uris.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-URI utilities.
-"""
-
-import os
-import urlparse
-
-
-_IS_WINDOWS = (os.name == 'nt')
-
-
-def as_file(uri):
-    """
-    If the URI is a file (either the ``file`` scheme or no scheme), then returns the normalized
-    path. Otherwise, returns ``None``.
-    """
-
-    if _IS_WINDOWS:
-        # We need this extra check in Windows before urlparse because paths might have a drive
-        # prefix, e.g. "C:" which will be considered a scheme for urlparse below
-        path = uri.replace('/', '\\')
-        if os.path.exists(path):
-            return os.path.normpath(path)
-
-    url = urlparse.urlparse(uri)
-    scheme = url.scheme
-    if (not scheme) or (scheme == 'file'):
-        path = url.path
-        if _IS_WINDOWS:
-            path = path.replace('/', '\\')
-        return os.path.normpath(path)
-
-    return None
diff --git a/apache-ariatosca-0.1.1/aria/utils/uuid.py b/apache-ariatosca-0.1.1/aria/utils/uuid.py
deleted file mode 100644
index d6c9ced..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/uuid.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-UUID generation utilities.
-"""
-
-from __future__ import absolute_import  # so we can import standard 'uuid'
-
-from random import randrange
-from uuid import uuid4
-
-from shortuuid import ShortUUID
-
-
-# Alphanumeric without visually ambiguous characters; default length is 22
-UUID_BASE57 = ShortUUID()
-
-# Lower-case alphanumeric; default length is 25
-UUID_LOWERCASE_ALPHANUMERIC = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123456789')
-
-
-def generate_uuid(length=None, variant='base57'):
-    """
-    A random string with varying degrees of guarantee of universal uniqueness.
-
-    :param variant:
-     * ``base57`` (the default) uses a mix of upper and lowercase alphanumerics ensuring no visually
-       ambiguous characters; default length 22
-     * ``alphanumeric`` uses lowercase alphanumeric; default length 25
-     * ``uuid`` uses lowercase hexadecimal in the classic UUID format, including dashes; length is
-       always 36
-     * ``hex`` uses lowercase hexadecimal characters but has no guarantee of uniqueness; default
-       length of 5
-    """
-
-    if variant == 'base57':
-        the_id = UUID_BASE57.uuid()
-        if length is not None:
-            the_id = the_id[:length]
-
-    elif variant == 'alphanumeric':
-        the_id = UUID_LOWERCASE_ALPHANUMERIC.uuid()
-        if length is not None:
-            the_id = the_id[:length]
-
-    elif variant == 'uuid':
-        the_id = str(uuid4())
-
-    elif variant == 'hex':
-        length = length or 5
-        # See: http://stackoverflow.com/a/2782859
-        the_id = ('%0' + str(length) + 'x') % randrange(16 ** length)
-
-    else:
-        raise ValueError('unsupported UUID variant: {0}'.format(variant))
-
-    return the_id
diff --git a/apache-ariatosca-0.1.1/aria/utils/validation.py b/apache-ariatosca-0.1.1/aria/utils/validation.py
deleted file mode 100644
index 06989a7..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/validation.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Validation utilities.
-"""
-
-from .formatting import string_list_as_string
-
-
-class ValidatorMixin(object):
-    """
-    A mix0in that should be added to classes that require validating user input.
-    """
-
-    _ARGUMENT_TYPE_MESSAGE = '{name} argument must be {type} based, got {arg!r}'
-    _ARGUMENT_CHOICE_MESSAGE = '{name} argument must be in {choices}, got {arg!r}'
-
-    @classmethod
-    def validate_in_choice(cls, name, argument, choices):
-        """
-        Validate ``argument`` is in ``choices``
-        """
-        if argument not in choices:
-            raise TypeError(cls._ARGUMENT_CHOICE_MESSAGE.format(
-                name=name, choices=choices, arg=argument))
-
-    @classmethod
-    def validate_type(cls, argument_name, argument, expected_type):
-        """
-        Validate ``argument`` is a subclass of ``expected_type``
-        """
-        if not issubclass(argument, expected_type):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type=expected_type, arg=argument))
-
-    @classmethod
-    def validate_instance(cls, argument_name, argument, expected_type):
-        """
-        Validate ``argument`` is a instance of ``expected_type``
-        """
-        if not isinstance(argument, expected_type):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type=expected_type, arg=argument))
-
-    @classmethod
-    def validate_callable(cls, argument_name, argument):
-        """
-        Validate ``argument`` is callable
-        """
-        if not callable(argument):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type='callable', arg=argument))
-
-
-def validate_function_arguments(func, func_kwargs):
-    """
-    Validates all required arguments are supplied to ``func`` and that no additional arguments are
-    supplied.
-    """
-
-    _kwargs_flags = 8
-
-    has_kwargs = func.func_code.co_flags & _kwargs_flags != 0
-    args_count = func.func_code.co_argcount
-
-    # all args without the ones with default values
-    args = func.func_code.co_varnames[:args_count]
-    non_default_args = args[:len(args) - len(func.func_defaults)] if func.func_defaults else args
-
-    # Check if any args without default values is missing in the func_kwargs
-    for arg in non_default_args:
-        if arg not in func_kwargs:
-            raise ValueError(
-                'The argument "{arg}" is not provided and does not have a default value for '
-                'function "{func.__name__}"'.format(arg=arg, func=func))
-
-    # check if there are any extra kwargs
-    extra_kwargs = [arg for arg in func_kwargs.keys() if arg not in args]
-
-    # assert that the function has kwargs
-    if extra_kwargs and not has_kwargs:
-        raise ValueError("The following extra kwargs were supplied: {extra_kwargs}".format(
-            extra_kwargs=string_list_as_string(extra_kwargs)
-        ))
diff --git a/apache-ariatosca-0.1.1/aria/utils/versions.py b/apache-ariatosca-0.1.1/aria/utils/versions.py
deleted file mode 100644
index 521004c..0000000
--- a/apache-ariatosca-0.1.1/aria/utils/versions.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Verion string utilities.
-"""
-
-import re
-
-
-_INF = float('inf')
-
-_NULL = (), _INF
-
-_DIGITS_RE = re.compile(r'^\d+$')
-
-_PREFIXES = {
-    'dev':   0.0001,
-    'alpha': 0.001,
-    'beta':  0.01,
-    'rc':    0.1
-}
-
-
-class VersionString(unicode):
-    """
-    Version string that can be compared, sorted, made unique in a set, and used as a unique dict
-    key.
-
-    The primary part of the string is one or more dot-separated natural numbers. Trailing zeroes
-    are treated as redundant, e.g. "1.0.0" == "1.0" == "1".
-
-    An optional qualifier can be added after a "-". The qualifier can be a natural number or a
-    specially treated prefixed natural number, e.g. "1.1-beta1" > "1.1-alpha2". The case of the
-    prefix is ignored.
-
-    Numeric qualifiers will always be greater than prefixed integer qualifiers, e.g. "1.1-1" >
-    "1.1-beta1".
-
-    Versions without a qualifier will always be greater than their equivalents with a qualifier,
-    e.g. e.g. "1.1" > "1.1-1".
-
-    Any value that does not conform to this format will be treated as a zero version, which would
-    be lesser than any non-zero version.
-
-    For efficient list sorts use the ``key`` property, e.g.::
-
-        sorted(versions, key=lambda x: x.key)
-    """
-
-    NULL = None # initialized below
-
-    def __init__(self, value=None):
-        if value is not None:
-            super(VersionString, self).__init__(value)
-        self.key = parse_version_string(self)
-
-    def __eq__(self, version):
-        if not isinstance(version, VersionString):
-            version = VersionString(version)
-        return self.key == version.key
-
-    def __lt__(self, version):
-        if not isinstance(version, VersionString):
-            version = VersionString(version)
-        return self.key < version.key
-
-    def __hash__(self):
-        return self.key.__hash__()
-
-
-def parse_version_string(version): # pylint: disable=too-many-branches
-    """
-    Parses a version string.
-
-    :param version: version string
-    :returns: primary tuple and qualifier float
-    :rtype: ((:obj:`int`), :obj:`float`)
-    """
-
-    if version is None:
-        return _NULL
-    version = unicode(version)
-
-    # Split to primary and qualifier on '-'
-    split = version.split('-', 1)
-    if len(split) == 2:
-        primary, qualifier = split
-    else:
-        primary = split[0]
-        qualifier = None
-
-    # Parse primary
-    split = primary.split('.')
-    primary = []
-    for element in split:
-        if _DIGITS_RE.match(element) is None:
-            # Invalid version string
-            return _NULL
-        try:
-            element = int(element)
-        except ValueError:
-            # Invalid version string
-            return _NULL
-        primary.append(element)
-
-    # Remove redundant zeros
-    for element in reversed(primary):
-        if element == 0:
-            primary.pop()
-        else:
-            break
-    primary = tuple(primary)
-
-    # Parse qualifier
-    if qualifier is not None:
-        if _DIGITS_RE.match(qualifier) is not None:
-            # Integer qualifier
-            try:
-                qualifier = float(int(qualifier))
-            except ValueError:
-                # Invalid version string
-                return _NULL
-        else:
-            # Prefixed integer qualifier
-            value = None
-            qualifier = qualifier.lower()
-            for prefix, factor in _PREFIXES.iteritems():
-                if qualifier.startswith(prefix):
-                    value = qualifier[len(prefix):]
-                    if _DIGITS_RE.match(value) is None:
-                        # Invalid version string
-                        return _NULL
-                    try:
-                        value = float(int(value)) * factor
-                    except ValueError:
-                        # Invalid version string
-                        return _NULL
-                    break
-            if value is None:
-                # Invalid version string
-                return _NULL
-            qualifier = value
-    else:
-        # Version strings with no qualifiers are higher
-        qualifier = _INF
-
-    return primary, qualifier
-
-
-VersionString.NULL = VersionString()
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/helloworld.yaml b/apache-ariatosca-0.1.1/examples/hello-world/helloworld.yaml
deleted file mode 100644
index d3369b7..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/helloworld.yaml
+++ /dev/null
@@ -1,38 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-node_types:
-
-  WebServer:
-    derived_from: tosca.nodes.Root
-    capabilities:
-      host:
-        type: tosca.capabilities.Container
-
-  WebApp:
-    derived_from: tosca.nodes.WebApplication
-    properties:
-      port:
-        type: integer
-
-topology_template:
-
-  node_templates:
-    web_server:
-      type: WebServer
-
-    web_app:
-      type: WebApp
-      properties:
-        port: 9090
-      requirements:
-        - host: web_server
-      interfaces:
-        Standard:
-          configure: scripts/configure.sh
-          start: scripts/start.sh
-          stop: scripts/stop.sh
-
-  outputs:
-    port:
-      type: integer
-      value: { get_property: [ web_app, port ] }
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/images/aria-logo.png b/apache-ariatosca-0.1.1/examples/hello-world/images/aria-logo.png
deleted file mode 100644
index 3505844..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/images/aria-logo.png
+++ /dev/null
Binary files differ
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/index.html b/apache-ariatosca-0.1.1/examples/hello-world/index.html
deleted file mode 100644
index 8d21c3a..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/index.html
+++ /dev/null
@@ -1,14 +0,0 @@
-<html>
-    <header>
-        <title>ARIA Hello World</title>
-    </header>
-<body>
-    <h1>Hello, World!</h1>
-    <p>
-        blueprint_id = {{ ctx.service_template.name }}<br/>
-        deployment_id = {{ ctx.service.name }}<br/>
-        node_id = {{ ctx.node.name }}
-    </p>
-    <img src="aria-logo.png">
-</body>
-</html>
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/scripts/configure.sh b/apache-ariatosca-0.1.1/examples/hello-world/scripts/configure.sh
deleted file mode 100755
index 9ac26d5..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/scripts/configure.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-TEMP_DIR="/tmp"
-PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
-if [ -d ${PYTHON_FILE_SERVER_ROOT} ]; then
-	echo "Removing file server root folder ${PYTHON_FILE_SERVER_ROOT}"
-	rm -rf ${PYTHON_FILE_SERVER_ROOT}
-fi
-ctx logger info "Creating HTTP server root directory at ${PYTHON_FILE_SERVER_ROOT}"
-
-mkdir -p ${PYTHON_FILE_SERVER_ROOT}
-
-cd ${PYTHON_FILE_SERVER_ROOT}
-
-index_path="index.html"
-image_path="images/aria-logo.png"
-
-ctx logger info "Downloading blueprint resources..."
-ctx download-resource-and-render ${PYTHON_FILE_SERVER_ROOT}/index.html ${index_path}
-ctx download-resource ${PYTHON_FILE_SERVER_ROOT}/aria-logo.png ${image_path}
-
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/scripts/start.sh b/apache-ariatosca-0.1.1/examples/hello-world/scripts/start.sh
deleted file mode 100755
index 010af2c..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/scripts/start.sh
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-TEMP_DIR="/tmp"
-PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
-PID_FILE="server.pid"
-
-ctx logger info "Starting HTTP server from ${PYTHON_FILE_SERVER_ROOT}"
-
-port=$(ctx node properties port)
-
-cd ${PYTHON_FILE_SERVER_ROOT}
-ctx logger info "Starting SimpleHTTPServer"
-nohup python -m SimpleHTTPServer ${port} > /dev/null 2>&1 &
-echo $! > ${PID_FILE}
-
-ctx logger info "Waiting for server to launch on port ${port}"
-url="http://localhost:${port}"
-
-server_is_up() {
-	if which wget >/dev/null; then
-		if wget $url >/dev/null; then
-			return 0
-		fi
-	elif which curl >/dev/null; then
-		if curl $url >/dev/null; then
-			return 0
-		fi
-	else
-		ctx logger error "Both curl, wget were not found in path"
-		exit 1
-	fi
-	return 1
-}
-
-STARTED=false
-for i in $(seq 1 15)
-do
-	if server_is_up; then
-		ctx logger info "Server is up."
-		STARTED=true
-    	break
-	else
-		ctx logger info "Server not up. waiting 1 second."
-		sleep 1
-	fi
-done
-if [ ${STARTED} = false ]; then
-	ctx logger error "Failed starting web server in 15 seconds."
-	exit 1
-fi
diff --git a/apache-ariatosca-0.1.1/examples/hello-world/scripts/stop.sh b/apache-ariatosca-0.1.1/examples/hello-world/scripts/stop.sh
deleted file mode 100755
index e393dbf..0000000
--- a/apache-ariatosca-0.1.1/examples/hello-world/scripts/stop.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-TEMP_DIR="/tmp"
-PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
-PID_FILE="server.pid"
-
-PID=`cat ${PYTHON_FILE_SERVER_ROOT}/${PID_FILE}`
-
-ctx logger info "Shutting down file server. pid = ${PID}"
-kill -9 ${PID} || exit $?
-
-ctx logger info "Deleting file server root directory (${PYTHON_FILE_SERVER_ROOT})"
-rm -rf ${PYTHON_FILE_SERVER_ROOT}
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/block-storage-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/block-storage-1.yaml
deleted file mode 100644
index b912fb2..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/block-storage-1.yaml
+++ /dev/null
@@ -1,68 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with server and attached block storage using the normative AttachesTo
-  Relationship Type.
-
-metadata:
-  template_name: block-storage-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      description: Size of the storage to be created.
-      default: 1 GB
-    storage_snapshot_id:
-      type: string
-      description: >-
-        Optional identifier for an existing snapshot to use when creating storage.    
-    storage_location:
-      type: string
-      description: Block storage mount point (filesystem path).
-
-  node_templates:
-
-    my_server:
-      type: Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 1 GB
-        os:
-          properties:
-            architecture: x86_64
-            type: linux 
-            distribution: fedora 
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            relationship:
-              type: AttachesTo
-              properties:
-                location: { get_input: storage_location }
-
-    my_storage:
-      type: BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-  outputs:
-    private_ip:
-      description: The private IP address of the newly created compute instance.
-      value: { get_attribute: [ my_server, private_address ] }
-    volume_id:
-      description: The volume id of the block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
deleted file mode 100644
index d0b0854..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-storage_snapshot_id: "snapshot-id"
-storage_location: /mnt
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/block-storage-2.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/block-storage-2.yaml
deleted file mode 100644
index ac475cf..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/block-storage-2.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with server and attached block storage using a custom AttachesTo Relationship
-  Type.
-
-metadata:
-  template_name: block-storage-2
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-relationship_types:
-
-  MyCustomAttachesTo:
-     derived_from: AttachesTo
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      description: Size of the storage to be created.
-      default: 1 GB
-    storage_snapshot_id:
-      type: string
-      description: >-
-        Optional identifier for an existing snapshot to use when creating storage.    
-    storage_location:
-      type: string
-      description: Block storage mount point (filesystem path).
-
-  node_templates:
-
-    my_server:
-      type: Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4 GB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux 
-            distribution: Fedora 
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            # Declare custom AttachesTo type using the 'relationship' keyword
-            relationship:
-              type: MyCustomAttachesTo
-              properties:
-                location: { get_input: storage_location }
-
-    my_storage:
-      type: BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-  outputs:
-    private_ip:
-      description: The private IP address of the newly created compute instance.
-      value: { get_attribute: [ my_server, private_address ] }
-
-    volume_id:
-      description: The volume id of the block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
deleted file mode 100644
index d0b0854..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-storage_snapshot_id: "snapshot-id"
-storage_location: /mnt
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/block-storage-3.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/block-storage-3.yaml
deleted file mode 100644
index c3f183e..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/block-storage-3.yaml
+++ /dev/null
@@ -1,68 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with server and attached block storage using a named Relationship Template
-  for the storage attachment.
-
-metadata:
-  template_name: block-storage-3
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      description: Size of the storage to be created.
-      default: 1 GB
-    storage_location:
-      type: string
-      description: Block storage mount point (filesystem path).
-
-  node_templates:
-
-    my_server:
-      type: Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4 GB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux 
-            distribution: Fedora 
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            # Declare template to use with 'relationship' keyword
-            relationship: storage_attachment
-
-    my_storage:
-      type: BlockStorage
-      properties:
-        size: { get_input: storage_size }
-
-  relationship_templates:
-
-    storage_attachment:
-      type: AttachesTo
-      properties:
-        location: { get_input: storage_location }
-
-  outputs:
-    private_ip:
-      description: The private IP address of the newly created compute instance.
-      value: { get_attribute: [ my_server, private_address ] }
-    volume_id:
-      description: The volume id of the block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
deleted file mode 100644
index daca041..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-storage_location: /mnt
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/block-storage-4.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/block-storage-4.yaml
deleted file mode 100644
index e2bdb9f..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/block-storage-4.yaml
+++ /dev/null
@@ -1,96 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with a Single Block Storage node shared by 2-Tier Application with custom
-  AttachesTo Type and implied relationships.
-
-metadata:
-  template_name: block-storage-4
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-relationship_types:
-
-  MyAttachesTo:
-    derived_from: tosca.relationships.AttachesTo
-    properties:
-      location:
-        type: string
-        default: /default_location
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      default: 1 GB
-      description: Size of the storage to be created.
-    storage_snapshot_id:
-      type: string
-      description: >-
-        Optional identifier for an existing snapshot to use when creating storage.    
-
-  node_templates:
-
-    my_web_app_tier_1:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            relationship: MyAttachesTo
-
-    my_web_app_tier_2:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            relationship:
-              type: MyAttachesTo
-              properties:
-                location: /some_other_data_location
-
-    my_storage:
-      type: tosca.nodes.BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-  outputs:
-    private_ip_1:
-      description: The private IP address of the application's first tier.
-      value: { get_attribute: [ my_web_app_tier_1, private_address ] }
-    private_ip_2:
-      description: The private IP address of the application's second tier.
-      value: { get_attribute: [ my_web_app_tier_2, private_address ] }
-    volume_id:
-      description: The volume id of the block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
deleted file mode 100644
index 18e457d..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-storage_snapshot_id: "snapshot-id"
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/block-storage-5.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/block-storage-5.yaml
deleted file mode 100644
index a0c2229..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/block-storage-5.yaml
+++ /dev/null
@@ -1,109 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with a single Block Storage node shared by 2-Tier Application with custom
-  AttachesTo Type and explicit Relationship Templates.
-
-metadata:
-  template_name: block-storage-5
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-relationship_types:
-
-  MyAttachesTo:
-    derived_from: tosca.relationships.AttachesTo
-    properties:
-      location:
-        type: string
-        default: /default_location
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      default: 1 GB
-      description: Size of the storage to be created.
-    storage_snapshot_id:
-      type: string
-      description: >-
-        Optional identifier for an existing snapshot to use when creating storage.
-    storage_location:
-      type: string
-      description: >-
-        Block storage mount point (filesystem path).
-
-  node_templates:
-
-    my_web_app_tier_1:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            relationship: storage_attachesto_1
-
-    my_web_app_tier_2:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-        - local_storage:
-            node: my_storage
-            relationship: storage_attachesto_2
-
-    my_storage:
-      type: tosca.nodes.BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-  relationship_templates:
-
-    storage_attachesto_1:
-      type: MyAttachesTo
-      properties:
-        location: /my_data_location
-
-    storage_attachesto_2:
-      type: MyAttachesTo
-      properties:
-        location: /some_other_data_location
-
-  outputs:
-    private_ip_1:
-      description: The private IP address of the application's first tier.
-      value: { get_attribute: [ my_web_app_tier_1, private_address ] }
-    private_ip_2:
-      description: The private IP address of the application's second tier.
-      value: { get_attribute: [ my_web_app_tier_2, private_address ] }
-    volume_id:
-      description: The volume id of the block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
deleted file mode 100644
index d0b0854..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-storage_snapshot_id: "snapshot-id"
-storage_location: /mnt
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/block-storage-6.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/block-storage-6.yaml
deleted file mode 100644
index 534884a..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/block-storage-6.yaml
+++ /dev/null
@@ -1,102 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with 2 servers each with different attached block storage.
-
-metadata:
-  template_name: block-storage-6
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    storage_size:
-      type: scalar-unit.size
-      default: 1 GB
-      description: Size of the storage to be created.
-    storage_snapshot_id:
-      type: string
-      description: >-
-        Optional identifier for an existing snapshot to use when creating storage.
-    storage_location:
-      type: string
-      description: >-
-        Block storage mount point (filesystem path).
-
-  node_templates:
-
-    my_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-         - local_storage:
-             node: my_storage
-             relationship:
-               type: AttachesTo
-               properties:
-                 location: { get_input: storage_location }
-
-    my_storage:
-      type: tosca.nodes.BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-    my_server2:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: Fedora
-            version: 18.0
-      requirements:
-         - local_storage:
-             node: my_storage2
-             relationship:
-               type: AttachesTo
-               properties:
-                 location: { get_input: storage_location }
-
-    my_storage2:
-      type: tosca.nodes.BlockStorage
-      properties:
-        size: { get_input: storage_size }
-        snapshot_id: { get_input: storage_snapshot_id }
-
-  outputs:
-    server_ip_1:
-      description: The private IP address of the application's first server.
-      value: { get_attribute: [ my_server, private_address ] }
-    server_ip_2:
-      description: The private IP address of the application's second server.
-      value: { get_attribute: [ my_server2, private_address ] }
-    volume_id_1:
-      description: The volume id of the first block storage instance.
-      value: { get_property: [ my_storage, volume_id ] } # ARIA NOTE: wrong in spec
-    volume_id_2:
-      description: The volume id of the second block storage instance.
-      value: { get_property: [ my_storage2, volume_id ] } # ARIA NOTE: wrong in spec
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
deleted file mode 100644
index d0b0854..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-storage_snapshot_id: "snapshot-id"
-storage_location: /mnt
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/compute-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/compute-1.yaml
deleted file mode 100644
index 254d2b6..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/compute-1.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile that just defines a single compute instance and selects a (guest) host
-  Operating System from the Compute node's properties. Note, this example does not include default
-  values on inputs properties.
-
-metadata:
-  template_name: compute-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-
-  node_templates:
-
-    my_server:
-      type: Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 1 GB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: ubuntu
-            version: 12.04
-
-  outputs:
-    private_ip:
-      description: The private IP address of the deployed server instance.
-      value: { get_attribute: [ my_server, private_address ] }
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
deleted file mode 100644
index c1ee88a..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/container-1/container-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/container-1/container-1.yaml
deleted file mode 100644
index f6f69fc..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/container-1/container-1.yaml
+++ /dev/null
@@ -1,68 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with wordpress, web server and mysql on the same server.
-
-metadata:
-  template_name: compute-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-imports:
-  - ../non-normative-types.yaml
-
-# Repositories to retrieve code artifacts from
-
-repositories:
-
-  docker_hub: https://registry.hub.docker.com/
-
-topology_template:
-
-  inputs:
-    wp_host_port:
-      type: integer
-      description: The host port that maps to port 80 of the WordPress container.
-    db_root_pwd:
-      type: string
-      description: Root password for MySQL.
-
-  node_templates:
-
-    # The MYSQL container based on official MySQL image in Docker hub
-
-    mysql_container:
-      type: tosca.nodes.Container.Application.Docker
-      # ARIA NOTE: moved to a requirement in the node type
-      #capabilities:
-      #  # This is a capability that would mimic the Docker –link feature
-      #  database_link: tosca.capabilities.Docker.Link
-      artifacts:
-        my_image:
-          file: mysql
-          type: tosca.artifacts.Deployment.Image.Container.Docker
-          repository: docker_hub
-      interfaces:
-        Standard:
-          create:
-            implementation: my_image
-            inputs:
-              db_root_password: { get_input: db_root_pwd }
-
-    # The WordPress container based on official WordPress image in Docker hub
-
-    wordpress_container:
-      type: tosca.nodes.Container.Application.Docker
-      requirements:
-        - database_link: mysql_container
-      artifacts:
-        my_image:
-          file: wordpress
-          type: tosca.artifacts.Deployment.Image.Container.Docker
-          repository: docker_hub
-      interfaces:
-        Standard:
-          create:
-            implementation: my_image
-            inputs:
-              host_port: { get_input: wp_host_port }
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/collectd.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/collectd.yaml
deleted file mode 100644
index 6d28899..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/collectd.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.SoftwareComponent.Collectd:
-    derived_from: tosca.nodes.SoftwareComponent
-    requirements:
-      - collectd_endpoint:
-          capability: tosca.capabilities.Endpoint
-          relationship: tosca.relationships.ConnectsTo
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
deleted file mode 100644
index 72b210a..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.SoftwareComponent.Elasticsearch:
-    derived_from: tosca.nodes.SoftwareComponent
-    capabilities:
-      app: tosca.capabilities.Endpoint
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
deleted file mode 100644
index 4ee8700..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.SoftwareComponent.Kibana:
-    derived_from: tosca.nodes.SoftwareComponent
-    requirements:
-      - search_endpoint:
-          capability: tosca.capabilities.Endpoint
-          relationship: tosca.relationships.ConnectsTo
-    capabilities:
-      app: tosca.capabilities.Endpoint
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
deleted file mode 100644
index ea74c7e..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.SoftwareComponent.Logstash:
-    derived_from: tosca.nodes.SoftwareComponent
-    requirements:
-      - search_endpoint:
-          capability: tosca.capabilities.Endpoint
-          relationship: tosca.relationships.ConnectsTo
-    capabilities:
-      app: tosca.capabilities.Endpoint
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/rsyslog.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/rsyslog.yaml
deleted file mode 100644
index 3bd7c2b..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/rsyslog.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.SoftwareComponent.Rsyslog:
-    derived_from: tosca.nodes.SoftwareComponent
-    requirements:
-      - rsyslog_endpoint:
-          capability: tosca.capabilities.Endpoint
-          relationship: tosca.relationships.ConnectsTo
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
deleted file mode 100644
index 5302bbf..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-my_cpus: 8
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/multi-tier-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/multi-tier-1.yaml
deleted file mode 100644
index 50401ec..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/multi-tier-1/multi-tier-1.yaml
+++ /dev/null
@@ -1,237 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  This TOSCA simple profile deploys nodejs, mongodb, elasticsearch, logstash and kibana each on a
-  separate server with monitoring enabled for nodejs server where a sample nodejs application is
-  running. The syslog and collectd are installed on a nodejs server.
-
-metadata:
-  template_name: multi-tier-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-imports:
-  - ../webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml # ARIA NOTE: moved
-  - custom_types/elasticsearch.yaml
-  - custom_types/logstash.yaml
-  - custom_types/kibana.yaml
-  - custom_types/collectd.yaml
-  - custom_types/rsyslog.yaml
-
-dsl_definitions:
-
-  host_capabilities: &host_capabilities
-    # container properties (flavor)
-    disk_size: 10 GB
-    num_cpus: { get_input: my_cpus }
-    mem_size: 4096 MB
-  os_capabilities: &os_capabilities
-    architecture: x86_64
-    type: Linux
-    distribution: Ubuntu
-    version: 14.04
-
-topology_template:
-
-  inputs:
-    my_cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-    github_url:
-       type: string
-       description: The URL to download nodejs.
-       default: https://github.com/sample.git
-
-  node_templates:
-
-    paypal_pizzastore:
-      type: tosca.nodes.WebApplication.PayPalPizzaStore
-      properties:
-        github_url: { get_input: github_url }
-      requirements:
-        - host: nodejs
-        - database_connection: mongo_db
-      interfaces:
-        Standard:
-           configure:
-             implementation: scripts/nodejs/configure.sh
-             inputs:
-               github_url: { get_property: [ SELF, github_url ] }
-               mongodb_ip: { get_attribute: [ mongo_server, private_address ] }
-           start: scripts/nodejs/start.sh
-
-    nodejs:
-      type: tosca.nodes.WebServer.Nodejs
-      requirements:
-        - host: app_server
-      interfaces:
-        Standard:
-          create: scripts/nodejs/create.sh
-
-    mongo_db:
-      type: tosca.nodes.Database
-      properties:
-        name: 'pizzastore' # ARIA NOTE: missing in spec
-      requirements:
-        - host: mongo_dbms
-      interfaces:
-        Standard:
-         create: create_database.sh
-
-    mongo_dbms:
-      type: tosca.nodes.DBMS
-      requirements:
-        - host: mongo_server
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/mongodb/create.sh
-          configure:
-            implementation: scripts/mongodb/config.sh
-            inputs:
-              mongodb_ip: { get_attribute: [ mongo_server, private_address ] } # ARIA NOTE: wrong in spec
-          start: scripts/mongodb/start.sh
-
-    elasticsearch:
-      type: tosca.nodes.SoftwareComponent.Elasticsearch
-      requirements:
-        - host: elasticsearch_server
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/elasticsearch/create.sh
-          start: scripts/elasticsearch/start.sh
-
-    logstash:
-      type: tosca.nodes.SoftwareComponent.Logstash
-      requirements:
-        - host: logstash_server
-        # ARIA NOTE: mangled in the spec
-        - search_endpoint:
-            node: elasticsearch
-            relationship:
-              interfaces:
-                Configure:
-                  pre_configure_source:
-                    implementation: python/logstash/configure_elasticsearch.py
-                    inputs:
-                      elasticsearch_ip: { get_attribute: [ elasticsearch_server, private_address ] } # ARIA NOTE: wrong in spec
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/lostash/create.sh
-          configure: scripts/logstash/config.sh
-          start: scripts/logstash/start.sh
-
-    kibana:
-      type: tosca.nodes.SoftwareComponent.Kibana
-      requirements:
-        - host: kibana_server
-        - search_endpoint: elasticsearch
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/kibana/create.sh
-          configure:
-            implementation: scripts/kibana/config.sh
-            inputs:
-              elasticsearch_ip: { get_attribute: [ elasticsearch_server, private_address ] } # ARIA NOTE: wrong in spec
-              kibana_ip: { get_attribute: [ kibana_server, private_address ] } # ARIA NOTE: wrong in spec
-          start: scripts/kibana/start.sh
-
-    app_collectd:
-      type: tosca.nodes.SoftwareComponent.Collectd
-      requirements:
-        - host: app_server
-        # ARIA NOTE: mangled in the spec
-        - collectd_endpoint:
-            node: logstash
-            relationship:
-              interfaces:
-                Configure:
-                  pre_configure_target:
-                    implementation: python/logstash/configure_collectd.py
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/collectd/create.sh
-          configure:
-            implementation: python/collectd/config.py
-            inputs:
-              logstash_ip: { get_attribute: [ logstash_server, private_address ] } # ARIA NOTE: wrong in spec
-          start: scripts/collectd/start.sh
-
-    app_rsyslog:
-      type: tosca.nodes.SoftwareComponent.Rsyslog
-      requirements:
-        - host: app_server
-        # ARIA NOTE: mangled in the spec
-        - rsyslog_endpoint:
-            node: logstash
-            relationship:
-              interfaces:
-                Configure:
-                  pre_configure_target:
-                    implementation: python/logstash/configure_rsyslog.py
-      interfaces:
-        Standard: # ARIA NOTE: wrong in spec
-          create: scripts/rsyslog/create.sh
-          configure:
-            implementation: scripts/rsyslog/config.sh
-            inputs:
-              logstash_ip: { get_attribute: [ logstash_server, private_address ] } # ARIA NOTE: wrong in spec
-          start: scripts/rsyslog/start.sh
-
-    app_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties: *host_capabilities
-        os:
-          properties: *os_capabilities
-
-    mongo_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties: *host_capabilities
-        os:
-          properties: *os_capabilities
-
-    elasticsearch_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties: *host_capabilities
-        os:
-          properties: *os_capabilities
-
-    logstash_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties: *host_capabilities
-        os:
-          properties: *os_capabilities
-
-    kibana_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties: *host_capabilities
-        os:
-          properties: *os_capabilities
-
-  outputs:
-    nodejs_url:
-      description: URL for the nodejs server.
-      value: { get_attribute: [ app_server, private_address ] }
-    mongodb_url:
-      description: URL for the mongodb server.
-      value: { get_attribute: [ mongo_server, private_address ] }
-    elasticsearch_url:
-      description: URL for the elasticsearch server.
-      value: { get_attribute: [ elasticsearch_server, private_address ] }
-    logstash_url:
-      description: URL for the logstash server.
-      value: { get_attribute: [ logstash_server, private_address ] }
-    kibana_url:
-      description: URL for the kibana server.
-      value: { get_attribute: [ kibana_server, private_address ] }
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
deleted file mode 100644
index 9687bb0..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-network_name: "network"
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/network-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/network-1.yaml
deleted file mode 100644
index c66964f..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-1/network-1.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with 1 server bound to a new network.
-
-metadata:
-  template_name: network-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    network_name:
-      type: string
-      description: Network name
-
-  node_templates:
-
-    my_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: 1
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: CirrOS
-            version: 0.3.2
-
-    my_network:
-      type: tosca.nodes.network.Network
-      properties:
-        network_name: { get_input: network_name }
-        ip_version: 4
-        cidr: '192.168.0.0/24'
-        start_ip: '192.168.0.50'
-        end_ip: '192.168.0.200'
-        gateway_ip: '192.168.0.1'
-
-    my_port:
-      type: tosca.nodes.network.Port
-      requirements:
-        - binding: my_server
-        - link: my_network
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
deleted file mode 100644
index 9687bb0..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-network_name: "network"
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/network-2.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/network-2.yaml
deleted file mode 100644
index 017950d..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-2/network-2.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with 1 server bound to an existing network.
-
-metadata:
-  template_name: network-2
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    network_name:
-      type: string
-      description: Network name
-
-  node_templates:
-
-    my_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: 1
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: CirrOS
-            version: 0.3.2
-
-    my_network:
-      type: tosca.nodes.network.Network
-      properties:
-        network_name: { get_input: network_name }
-
-    my_port:
-      type: tosca.nodes.network.Port
-      requirements:
-        - binding:
-            node: my_server
-        - link:
-            node: my_network
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
deleted file mode 100644
index 9687bb0..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-network_name: "network"
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/network-3.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/network-3.yaml
deleted file mode 100644
index 5fa40b7..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-3/network-3.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with 2 servers bound to the 1 network.
-
-metadata:
-  template_name: network-3
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    network_name:
-      type: string
-      description: Network name
-    network_cidr:
-      type: string
-      default: 10.0.0.0/24
-      description: CIDR for the network
-    network_start_ip:
-      type: string
-      default: 10.0.0.100
-      description: Start IP for the allocation pool
-    network_end_ip:
-      type: string
-      default: 10.0.0.150
-      description: End IP for the allocation pool
-
-  node_templates:
-
-    my_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: 1
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: CirrOS
-            version: 0.3.2
-
-    my_server2:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: 1
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: CirrOS
-            version: 0.3.2
-
-    my_network:
-      type: tosca.nodes.network.Network
-      properties:
-        ip_version: 4
-        cidr: { get_input: network_cidr }
-        network_name: { get_input: network_name }
-        start_ip: { get_input: network_start_ip }
-        end_ip: { get_input: network_end_ip }
-
-    my_port:
-      type: tosca.nodes.network.Port
-      requirements:
-        - binding: my_server
-        - link: my_network
-
-    my_port2:
-      type: tosca.nodes.network.Port
-      requirements:
-        - binding: my_server2
-        - link: my_network
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-4/network-4.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-4/network-4.yaml
deleted file mode 100644
index 5b51117..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/network-4/network-4.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with 1 server bound to 3 networks.
-
-metadata:
-  template_name: network-4
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  node_templates:
-
-    my_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: 1
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: Linux
-            distribution: CirrOS
-            version: 0.3.2
-
-    my_network1:
-      type: tosca.nodes.network.Network
-      properties:
-        cidr: '192.168.1.0/24'
-        network_name: net1
-
-    my_network2:
-      type: tosca.nodes.network.Network
-      properties:
-        cidr: '192.168.2.0/24'
-        network_name: net2
-
-    my_network3:
-      type: tosca.nodes.network.Network
-      properties:
-        cidr: '192.168.3.0/24'
-        network_name: net3
-
-    my_port1:
-      type: tosca.nodes.network.Port
-      properties:
-        order: 0
-      requirements:
-        - binding: my_server
-        - link: my_network1
-
-    my_port2:
-      type: tosca.nodes.network.Port
-      properties:
-        order: 1
-      requirements:
-        - binding: my_server
-        - link: my_network2
-
-    my_port3:
-      type: tosca.nodes.network.Port
-      properties:
-        order: 2
-      requirements:
-        - binding: my_server
-        - link: my_network3
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/non-normative-types.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/non-normative-types.yaml
deleted file mode 100644
index 24f22a3..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/non-normative-types.yaml
+++ /dev/null
@@ -1,177 +0,0 @@
-
-artifact_types:
-
-  tosca.artifacts.Deployment.Image.Container.Docker:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.1.1
-    derived_from: tosca.artifacts.Deployment.Image
-    description: Docker Container Image
-
-  tosca.artifacts.Deployment.Image.VM.ISO:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.1.2
-    derived_from: tosca.artifacts.Deployment.Image.VM
-    description: Virtual Machine (VM) image in ISO disk format
-    mime_type: application/octet-stream
-    file_ext: [ iso ]
-
-  tosca.artifacts.Deployment.Image.VM.QCOW2:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.1.3
-    derived_from: tosca.artifacts.Deployment.Image.VM
-    description: Virtual Machine (VM) image in QCOW v2 standard disk format
-    mime_type: application/octet-stream
-    file_ext: [ qcow2 ]
-
-capability_types:
-
-  tosca.capabilities.Container.Docker:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.2.1
-    derived_from: tosca.capabilities.Container
-    description: The type indicates capabilities of a Docker runtime environment (client).
-    properties:
-      version:
-        description: >-
-          The Docker version capability (i.e., the versions supported by the capability).
-        type: list
-        required: false
-        entry_schema: version
-      publish_all:
-        description: >-
-          Indicates that all ports (ranges) listed in the dockerfile using the EXPOSE keyword be
-          published.
-        type: boolean
-        default: false
-        required: false
-      publish_ports:
-        description: >-
-          List of ports mappings from source (Docker container) to target (host) ports to publish. 
-        type: list
-        entry_schema: PortSpec
-        required: false
-      expose_ports:
-        description: >-
-          List of ports mappings from source (Docker container) to expose to other Docker containers
-          (not accessible outside host).
-        type: list
-        entry_schema: PortSpec
-        required: false
-      volumes:
-        description: >-
-          The dockerfile VOLUME command which is used to enable access from the Docker container to
-          a directory on the host machine.
-        type: list
-        entry_schema: string
-        required: false
-      # ARIA NOTE: these are missing in the spec
-      host_id:
-        description: >-
-          The optional identifier of an existing host resource that should be used to run this
-          container on.
-        type: string
-        required: false
-      volume_id:
-        description: >-
-          The optional identifier of an existing storage volume (resource) that should be used to
-          create the container's mount point(s) on.
-        type: string
-        required: false
-
-  # ARIA NOTE: missing in spec
-  tosca.capabilities.Docker.Link:
-    derived_from: tosca.capabilities.Root
-    description: This is a capability that would mimic the Docker –link feature
-
-node_types:
-
-  tosca.nodes.Database.MySQL:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.1
-    derived_from: tosca.nodes.Database
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container # ARIA NOTE: missing in spec
-          node: tosca.nodes.DBMS.MySQL
-
-  tosca.nodes.DBMS.MySQL:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.2
-    derived_from: tosca.nodes.DBMS
-    properties:
-      port:
-        description: reflect the default MySQL server port
-        type: integer # AIRA NOTE: missing in spec
-        default: 3306
-      root_password:
-        # MySQL requires a root_password for configuration
-        # Override parent DBMS definition to make this property required
-        type: string # AIRA NOTE: missing in spec
-        required: true
-    capabilities:
-      # Further constrain the 'host' capability to only allow MySQL databases
-      host:
-        type: tosca.capabilities.Container # ARIA NOTE: missing in spec
-        valid_source_types: [ tosca.nodes.Database.MySQL ] 
-
-  tosca.nodes.WebServer.Apache:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.3
-    derived_from: tosca.nodes.WebServer
-
-  tosca.nodes.WebApplication.WordPress:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.4
-    derived_from: tosca.nodes.WebApplication
-    properties:
-      admin_user:
-        type: string
-        required: false # ARIA NOTE: missing in spec
-      admin_password:
-        type: string
-        required: false # ARIA NOTE: missing in spec
-      db_host:
-        type: string
-        required: false # ARIA NOTE: missing in spec
-    requirements:
-      - database_endpoint:
-          capability: tosca.capabilities.Endpoint.Database  
-          node: tosca.nodes.Database
-          relationship: tosca.relationships.ConnectsTo
-
-  tosca.nodes.WebServer.Nodejs:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.5
-    derived_from: tosca.nodes.WebServer
-    properties:
-      # Property to supply the desired implementation in the Github repository
-      github_url:
-        required: false
-        type: string
-        description: location of the application on the github.
-        default: https://github.com/mmm/testnode.git
-    interfaces:
-      Standard:
-        inputs:
-          github_url:
-            type: string
-
-  tosca.nodes.Container.Application.Docker:
-    _extensions:
-      specification: tosca-simple-1.0
-      specification_section: 8.3.6
-    derived_from: tosca.nodes.Container.Application
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container.Docker
-      - database_link: # ARIA NOTE: missing in spec
-          capability: tosca.capabilities.Docker.Link
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
deleted file mode 100644
index 57f99a3..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-objectstore_name: "objectstore"
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/object-storage-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/object-storage-1.yaml
deleted file mode 100644
index c55a4db..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/object-storage-1/object-storage-1.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-    TOSCA template for creating an object storage service.
-
-metadata:
-  template_name: object-storage-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-topology_template:
-
-  inputs:
-    objectstore_name:
-      type: string
-
-  node_templates:
-
-    obj_store_server:
-      type: tosca.nodes.ObjectStorage
-      properties:
-        name: { get_input: objectstore_name }
-        size: 4096 MB
-        maxsize: 20 GB
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
deleted file mode 100644
index c1ee88a..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
+++ /dev/null
@@ -1 +0,0 @@
-cpus: 4
\ No newline at end of file
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/software-component-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/software-component-1.yaml
deleted file mode 100644
index fc1cfd7..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/software-component-1/software-component-1.yaml
+++ /dev/null
@@ -1,54 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA Simple Profile with a SoftwareComponent node with a declared Virtual machine (VM) deployment
-  artifact that automatically deploys to its host Compute node.
-
-metadata:
-  template_name: software-component-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-imports:
-  - ../non-normative-types.yaml
-
-topology_template:
-
-  # ARIA NOTE: missing in spec
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-
-  node_templates:
-
-    my_virtual_machine:
-      type: SoftwareComponent
-      artifacts:
-        my_vm_image:
-          file: images/fedora-18-x86_64.qcow2
-          type: tosca.artifacts.Deployment.Image.VM.QCOW2
-      requirements:
-        - host: my_server
-      # Automatically deploy the VM image referenced on the create operation
-      interfaces:
-        Standard:
-          create: my_vm_image
-
-    # Compute instance with no Operating System guest host
-    my_server:
-      type: Compute
-      capabilities:
-        # Note: no guest OperatingSystem requirements as these are in the image.
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4 GB
-
-  outputs:
-    private_ip:
-      description: The private IP address of the deployed server instance.
-      value: { get_attribute: [ my_server, private_address ] }
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-1/webserver-dbms-1.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-1/webserver-dbms-1.yaml
deleted file mode 100644
index faf109d..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-1/webserver-dbms-1.yaml
+++ /dev/null
@@ -1,122 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with WordPress, a web server, a MySQL DBMS hosting the application's database
-  content on the same server. Does not have input defaults or constraints.
-
-metadata:
-  template_name: webserver-dbms-1
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-imports:
-  - ../non-normative-types.yaml
-
-topology_template:
-
-  inputs:
-    cpus:
-      type: integer
-      description: Number of CPUs for the server.
-    db_name:
-      type: string
-      description: The name of the database.
-    db_user:
-      type: string
-      description: The username of the DB user.
-    db_pwd:
-      type: string
-      description: The WordPress database admin account password.
-    db_root_pwd:
-      type: string
-      description: Root password for MySQL.
-    db_port:
-      type: PortDef
-      description: Port for the MySQL database
-    # ARIA NOTE: missing in spec
-    context_root:
-      type: string
-      description: Context root for WordPress.
-
-  node_templates:
-
-    wordpress:
-      type: tosca.nodes.WebApplication.WordPress
-      properties:
-        context_root: { get_input: context_root }
-      requirements:
-        - host: webserver
-        - database_endpoint: mysql_database
-      interfaces:
-        Standard:
-          create: wordpress_install.sh
-          configure:
-            implementation: wordpress_configure.sh           
-            inputs:
-              wp_db_name: { get_property: [ mysql_database, name ] }
-              wp_db_user: { get_property: [ mysql_database, user ] }
-              wp_db_password: { get_property: [ mysql_database, password ] }  
-              # In my own template, find requirement/capability, find port property
-              wp_db_port: { get_property: [ SELF, database_endpoint, port ] }
-
-    mysql_database:
-      type: Database
-      properties:
-        name: { get_input: db_name }
-        user: { get_input: db_user }
-        password: { get_input: db_pwd }
-        port: { get_input: db_port }
-      capabilities:
-        database_endpoint:
-          properties:
-            port: { get_input: db_port }
-      requirements:
-        - host: mysql_dbms
-      interfaces:
-        Standard:
-          configure: mysql_database_configure.sh
-
-    mysql_dbms:
-      type: DBMS
-      properties:
-        root_password: { get_input: db_root_pwd }
-        port: { get_input: db_port }
-      requirements:
-        - host: server
-      interfaces:
-        Standard:
-          # ARIA NOTE: not declared in spec              
-          #inputs:
-          #  db_root_password: { get_property: [ mysql_dbms, root_password ] }
-          create: mysql_dbms_install.sh
-          start: mysql_dbms_start.sh
-          configure: mysql_dbms_configure.sh
-
-    webserver:
-      type: WebServer
-      requirements:
-        - host: server
-      interfaces:
-        Standard:
-          create: webserver_install.sh
-          start: webserver_start.sh
-
-    server:
-      type: Compute
-      capabilities:
-        host:
-          properties:
-            disk_size: 10 GB
-            num_cpus: { get_input: cpus }
-            mem_size: 4096 MB
-        os:
-          properties:
-            architecture: x86_64
-            type: linux 
-            distribution: fedora 
-            version: 17.0
-
-  outputs:
-    website_url:
-      description: URL for Wordpress wiki.
-      value: { get_attribute: [ server, public_address ] }
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
deleted file mode 100644
index 02bb399..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
-# ARIA NOTE: missing in spec
-
-node_types:
-
-  tosca.nodes.WebApplication.PayPalPizzaStore:
-    derived_from: tosca.nodes.WebApplication
-    properties:
-      github_url:
-        type: string
-    requirements:
-      - database_connection:
-          capability: tosca.capabilities.Node
-
-  tosca.nodes.WebServer.Nodejs:
-    derived_from: tosca.nodes.WebServer
diff --git a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml b/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
deleted file mode 100644
index 91f0b35..0000000
--- a/apache-ariatosca-0.1.1/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
+++ /dev/null
@@ -1,115 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-description: >-
-  TOSCA simple profile with a nodejs web server hosting a PayPal sample application which connects
-  to a mongodb database.
-
-metadata:
-  template_name: webserver-dbms-2
-  template_author: TOSCA Simple Profile in YAML
-  template_version: '1.0'
-
-imports:
-  - custom_types/paypalpizzastore_nodejs_app.yaml
-
-dsl_definitions:
-
-  ubuntu_node: &ubuntu_node
-    disk_size: 10 GB
-    num_cpus: { get_input: my_cpus }
-    mem_size: 4096 MB
-  os_capabilities: &os_capabilities
-    architecture: x86_64
-    type: Linux
-    distribution: Ubuntu
-    version: 14.04
-
-topology_template:
-
-  inputs:
-    my_cpus:
-      type: integer
-      description: Number of CPUs for the server.
-      constraints:
-        - valid_values: [ 1, 2, 4, 8 ]
-      default: 1
-    github_url:
-      type: string
-      description: The URL to download nodejs.
-      default:  https://github.com/sample.git
-
-  node_templates:
-
-    paypal_pizzastore:
-      type: tosca.nodes.WebApplication.PayPalPizzaStore
-      properties:
-          github_url: { get_input: github_url }
-      requirements:
-        - host: nodejs
-        - database_connection: mongo_db
-      interfaces:
-        Standard:
-           configure:
-             implementation: scripts/nodejs/configure.sh
-             inputs:
-               github_url: { get_property: [ SELF, github_url ] }
-               mongodb_ip: { get_attribute: [ mongo_server, private_address ] }
-           start: scripts/nodejs/start.sh
-
-    nodejs:
-      type: tosca.nodes.WebServer.Nodejs
-      requirements:
-        - host: app_server
-      interfaces:
-        Standard:
-          create: scripts/nodejs/create.sh
-
-    mongo_db:
-      type: tosca.nodes.Database
-      properties:
-        name: 'pizzastore' # ARIA NOTE: missing in spec
-      requirements:
-        - host: mongo_dbms
-      interfaces:
-        Standard:
-         create: create_database.sh
-
-    mongo_dbms:
-      type: tosca.nodes.DBMS
-      requirements:
-        - host: mongo_server
-      properties:
-        port: 27017
-      interfaces:
-        Standard: # ARIA NOTE: mistaken in spec
-          create: mongodb/create.sh
-          configure:
-            implementation: mongodb/config.sh
-            inputs:
-              mongodb_ip: { get_attribute: [ mongo_server, private_address ] }
-          start: mongodb/start.sh
-
-    mongo_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        os:
-          properties: *os_capabilities
-        host:
-          properties: *ubuntu_node
-
-    app_server:
-      type: tosca.nodes.Compute
-      capabilities:
-        os:
-          properties: *os_capabilities
-        host:
-          properties: *ubuntu_node
-
-  outputs:
-
-    nodejs_url:
-      description: URL for the nodejs server, http://<IP>:3000
-      value: { get_attribute: [ app_server, private_address ] }
-    mongodb_url:
-      description: URL for the mongodb server.
-      value: { get_attribute: [ mongo_server, private_address ] }
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
deleted file mode 100644
index c1dc11d..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-policy_types:
-
-  aria.Plugin:
-    _extensions:
-      shorthand_name: Plugin
-      type_qualified_name: aria:Plugin
-      role: plugin
-    description: >-
-      Policy used to specify plugins used by services. For an operation to be able to use a plugin
-      it must have a matching policy. The name of the policy must be the name of the plugin. The
-      optional properties can be used to further specify plugin selection by the orchestrator.
-    derived_from: tosca.policies.Root
-    properties:
-      version:
-        description: >-
-          Minimum plugin version.
-        type: version
-        required: false
-      enabled:
-        description: >-
-          If the policy is to disable the plugin then it will be ignored and all operations and
-          workflows depending on it will also be disabled.
-        type: boolean
-        default: true
-
-  aria.Workflow:
-    _extensions:
-      shorthand_name: Workflow
-      type_qualified_name: aria:Workflow
-      role: workflow
-    description: >-
-      Policy used to specify custom workflows. A workflow is usually a workload of interconnected
-      calls to operations on nodes and relationships in the service topology. The name of the policy
-      is used as the name of the workflow. Note that it can be the same name as one of the normative
-      lifecycle workflows ("install", "uninstall", etc.), in which case it would be considered an
-      override of the default behavior. If the workflow requires parameters then this base type
-      should be inherited and extended with additional properties.
-    derived_from: tosca.policies.Root
-    properties:
-      implementation:
-        description: >-
-          The interpretation of the implementation string depends on the orchestrator. In ARIA it is
-          the full path to a Python @workflow function that generates a task graph based on the
-          service topology.
-        type: string
-        required: true
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/artifacts.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/artifacts.yaml
deleted file mode 100644
index cfb0df5..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/artifacts.yaml
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-artifact_types:
-
-  tosca.artifacts.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.3.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ARTIFACTS_ROOT'
-    description: >-
-      This is the default (root) TOSCA Artifact Type definition that all other TOSCA base Artifact Types derive from.
-  
-  tosca.artifacts.File:
-    _extensions:
-      shorthand_name: File
-      type_qualified_name: tosca:File
-      specification: tosca-simple-1.0
-      specification_section: 5.3.2
-    description: >-
-      This artifact type is used when an artifact definition needs to have its associated file simply treated as a file and no special handling/handlers are invoked (i.e., it is not treated as either an implementation or deployment artifact type).
-    derived_from: tosca.artifacts.Root
-  
-  #
-  # Deployments
-  #
-  
-  tosca.artifacts.Deployment:
-    _extensions:
-      shorthand_name: Deployment # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Deployment
-      specification: tosca-simple-1.0
-      specification_section: 5.3.3.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ARTIFACTS_DEPLOYMENT'
-    description: >-
-      This artifact type represents the parent type for all deployment artifacts in TOSCA. This class of artifacts typically
-      represents a binary packaging of an application or service that is used to install/create or deploy it as part of a node's
-      lifecycle.
-    derived_from: tosca.artifacts.Root
-    
-  tosca.artifacts.Deployment.Image:
-    _extensions:
-      shorthand_name: Deployment.Image
-      type_qualified_name: tosca:Deployment.Image
-      specification: tosca-simple-1.0
-      specification_section: 5.3.3.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ARTIFACTS_DEPLOYMENT_IMAGE'
-    description: >-
-      This artifact type represents a parent type for any "image" which is an opaque packaging of a TOSCA Node's deployment
-      (whether real or virtual) whose contents are typically already installed and pre-configured (i.e., "stateful") and prepared
-      to be run on a known target container.
-    derived_from: tosca.artifacts.Deployment
-  
-  tosca.artifacts.Deployment.Image.VM:
-    _extensions:
-      shorthand_name: Deployment.VM # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Deployment.VM
-      specification: tosca-simple-1.0
-      specification_section: 5.3.3.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ARTIFACTS_DEPLOY_IMAGE_VM'
-    description: >-
-      This artifact represents the parent type for all Virtual Machine (VM) image and container formatted deployment artifacts.
-      These images contain a stateful capture of a machine (e.g., server) including operating system and installed software along
-      with any configurations and can be run on another machine using a hypervisor which virtualizes typical server (i.e.,
-      hardware) resources.
-    derived_from: tosca.artifacts.Deployment
-  
-  #
-  # Implementations
-  #
-  
-  tosca.artifacts.Implementation:
-    _extensions:
-      shorthand_name: Implementation # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Implementation
-      specification: tosca-simple-1.0
-      specification_section: 5.3.4.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ARTIFACTS_IMPLEMENTATION'
-    description: >-
-      This artifact type represents the parent type for all implementation artifacts in TOSCA. These artifacts are used to
-      implement operations of TOSCA interfaces either directly (e.g., scripts) or indirectly (e.g., config. files).
-    derived_from: tosca.artifacts.Root
-  
-  tosca.artifacts.Implementation.Bash:
-    _extensions:
-      shorthand_name: Implementation.Bash # ARIA NOTE: mistake in spec? shouldn't we have "Implementation." as prefix?
-      type_qualified_name: tosca:Implementation.Bash
-      specification: tosca-simple-1.0
-      specification_section: 5.3.4.3
-    description: >-
-      This artifact type represents a Bash script type that contains Bash commands that can be executed on the Unix Bash shell.
-    derived_from: tosca.artifacts.Implementation
-    mime_type: application/x-sh
-    file_ext: [ sh ]
-  
-  tosca.artifacts.Implementation.Python:
-    _extensions:
-      shorthand_name: Implementation.Python # ARIA NOTE: mistake in spec? shouldn't we have "Implementation." as prefix?
-      type_qualified_name: tosca:Implementation.Python
-      specification: tosca-simple-1.0
-      specification_section: 5.3.4.4
-    description: >-
-      This artifact type represents a Python file that contains Python language constructs that can be executed within a Python
-      interpreter.
-    derived_from: tosca.artifacts.Implementation
-    mime_type: application/x-python
-    file_ext: [ py ]
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
deleted file mode 100644
index 30abe10..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
+++ /dev/null
@@ -1,321 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-capability_types:
-
-  tosca.capabilities.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.4.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ROOT'
-    description: >-
-      This is the default (root) TOSCA Capability Type definition that all other TOSCA Capability Types derive from.
-
-  tosca.capabilities.Node:
-    _extensions:
-      shorthand_name: Node
-      type_qualified_name: tosca:Node
-      specification: tosca-simple-1.0
-      specification_section: 5.4.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_NODE'
-      role: feature
-    description: >-
-      The Node capability indicates the base capabilities of a TOSCA Node Type.
-    derived_from: tosca.capabilities.Root
-
-  tosca.capabilities.Container:
-    _extensions:
-      shorthand_name: Container
-      type_qualified_name: tosca:Container
-      specification: tosca-simple-1.0
-      specification_section: 5.4.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_CONTAINER'
-      role: host
-    description: >-
-      The Container capability, when included on a Node Type or Template definition, indicates that the node can act as a container
-      for (or a host for) one or more other declared Node Types.
-    derived_from: tosca.capabilities.Root
-    properties:
-      num_cpus:
-        description: >-    
-          Number of (actual or virtual) CPUs associated with the Compute node.
-        type: integer
-        constraints:
-          - greater_or_equal: 1
-        required: false
-      cpu_frequency:
-        description: >-
-          Specifies the operating frequency of CPU's core. This property expresses the expected frequency of one (1) CPU as
-          provided by the property "num_cpus".
-        type: scalar-unit.frequency
-        constraints:
-          - greater_or_equal: 0.1 GHz
-        required: false
-      disk_size:
-        description: >-
-          Size of the local disk available to applications running on the Compute node (default unit is MB).
-        type: scalar-unit.size
-        constraints:
-          - greater_or_equal: 0 MB
-        required: false
-      mem_size:
-        description: >-
-          Size of memory available to applications running on the Compute node (default unit is MB).
-        type: scalar-unit.size
-        constraints:
-          - greater_or_equal: 0 MB
-        required: false
-
-  tosca.capabilities.Attachment:
-    _extensions:
-      shorthand_name: Attachment
-      type_qualified_name: tosca:Attachment
-      specification: tosca-simple-1.0
-      specification_section: 5.4.8
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ATTACHMENT'
-    description: >-
-      This is the default TOSCA type that should be used or extended to define an attachment capability of a (logical)
-      infrastructure device node (e.g., BlockStorage node).
-    derived_from: tosca.capabilities.Root
-
-  tosca.capabilities.OperatingSystem:
-    _extensions:
-      shorthand_name: OperatingSystem
-      type_qualified_name: tosca:OperatingSystem
-      specification: tosca-simple-1.0
-      specification_section: 5.4.9
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_OPSYS'
-    description: >-
-      This is the default TOSCA type that should be used to express an Operating System capability for a node.
-    derived_from: tosca.capabilities.Root
-    properties:
-      architecture:
-        description: >-
-          The Operating System (OS) architecture. Examples of valid values include: x86_32, x86_64, etc.
-        type: string
-        required: false
-      type:
-        description: >-
-          The Operating System (OS) type. Examples of valid values include: linux, aix, mac, windows, etc.
-        type: string
-        required: false
-      distribution:
-        description: >-
-          The Operating System (OS) distribution. Examples of valid values for a "type" of "Linux" would include: debian, fedora,
-          rhel and ubuntu.
-        type: string
-        required: false
-      version:
-        description: >-
-          The Operating System version.
-        type: version
-        required: false
-
-  tosca.capabilities.Scalable:
-    _extensions:
-      shorthand_name: Scalable
-      type_qualified_name: tosca:Scalable
-      specification: tosca-simple-1.0
-      specification_section: 5.4.10
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_SCALABLE'
-    description: >-
-      This is the default TOSCA type that should be used to express a scalability capability for a node.
-    derived_from: tosca.capabilities.Root
-    properties:
-      min_instances:
-        description: >-
-          This property is used to indicate the minimum number of instances that should be created for the associated TOSCA Node
-          Template by a TOSCA orchestrator.
-        type: integer
-        default: 1
-      max_instances:
-        description: >-
-          This property is used to indicate the maximum number of instances that should be created for the associated TOSCA Node
-          Template by a TOSCA orchestrator.
-        type: integer
-        default: 1
-      default_instances:
-        description: >-
-          An optional property that indicates the requested default number of instances that should be the starting number of
-          instances a TOSCA orchestrator should attempt to allocate. Note: The value for this property MUST be in the range between
-          the values set for "min_instances" and "max_instances" properties.
-        type: integer
-        required: false
-
-  #
-  # Endpoints
-  #
-
-  tosca.capabilities.Endpoint:
-    _extensions:
-      shorthand_name: Endpoint
-      type_qualified_name: tosca:Endpoint
-      specification: tosca-simple-1.0
-      specification_section: 5.4.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ENDPOINT'
-    description: >-
-      This is the default TOSCA type that should be used or extended to define a network endpoint capability. This includes the information to express a basic endpoint with a single port or a complex endpoint with multiple ports. By default the Endpoint is assumed to represent an address on a private network unless otherwise specified.
-    derived_from: tosca.capabilities.Root
-    properties:
-      protocol:
-        description: >-
-          The name of the protocol (i.e., the protocol prefix) that the endpoint accepts (any OSI Layer 4-7 protocols). Examples:
-          http, https, ftp, tcp, udp, etc.
-        type: string
-        default: tcp
-        required: true
-      port:
-        description: >-
-          The optional port of the endpoint.
-        type: tosca.datatypes.network.PortDef
-        required: false
-      secure:
-        description: >-
-          Requests for the endpoint to be secure and use credentials supplied on the ConnectsTo relationship.
-        type: boolean
-        default: false
-        required: false
-      url_path:
-        description: >-
-          The optional URL path of the endpoint's address if applicable for the protocol.
-        type: string
-        required: false
-      port_name:
-        description: >-
-          The optional name (or ID) of the network port this endpoint should be bound to.
-        type: string
-        required: false
-      network_name:
-        description: >-
-          The optional name (or ID) of the network this endpoint should be bound to. network_name: PRIVATE | PUBLIC |
-          <network_name> | <network_id>.
-        type: string
-        default: PRIVATE
-        required: false
-      initiator:
-        description: >-
-          The optional indicator of the direction of the connection.
-        type: string
-        constraints:
-          - valid_values: [ source, target, peer ]
-        default: source
-        required: false
-      ports:
-        description: >-
-          The optional map of ports the Endpoint supports (if more than one).
-        type: map
-        entry_schema:
-          type: tosca.datatypes.network.PortSpec
-        constraints:
-          - min_length: 1
-        required: false
-    attributes:
-      ip_address:
-        description: >-
-          Note: This is the IP address as propagated up by the associated node's host (Compute) container.
-        type: string
-
-  tosca.capabilities.Endpoint.Public:
-    _extensions:
-      shorthand_name: Endpoint.Public
-      type_qualified_name: tosca:Endpoint.Public
-      specification: tosca-simple-1.0
-      specification_section: 5.4.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ENDPOINT_PUBLIC'
-    description: >-
-      This capability represents a public endpoint which is accessible to the general internet (and its public IP address ranges).
-  
-      This public endpoint capability also can be used to create a floating (IP) address that the underlying network assigns from a
-      pool allocated from the application's underlying public network. This floating address is managed by the underlying network
-      such that can be routed an application's private address and remains reliable to internet clients.
-    derived_from: tosca.capabilities.Endpoint
-    properties:
-      network_name:
-        type: string
-        constraints:
-          - equal: PUBLIC
-        default: PUBLIC
-      floating:
-        description: >-
-          Indicates that the public address should be allocated from a pool of floating IPs that are associated with the network.
-        type: boolean
-        default: false
-        status: experimental
-      dns_name:
-        description: >-
-          The optional name to register with DNS.
-        type: string
-        required: false
-        status: experimental
-
-  tosca.capabilities.Endpoint.Admin:
-    _extensions:
-      shorthand_name: Endpoint.Admin
-      type_qualified_name: tosca:Endpoint.Admin
-      specification: tosca-simple-1.0
-      specification_section: 5.4.6
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ENDPOINT_ADMIN'
-    description: >-
-      This is the default TOSCA type that should be used or extended to define a specialized administrator endpoint capability.
-    derived_from: tosca.capabilities.Endpoint
-    properties:
-      secure:
-        description: >-
-          Requests for the endpoint to be secure and use credentials supplied on the ConnectsTo relationship.
-        type: boolean
-        constraints:
-          - equal: true
-        default: true
-
-  tosca.capabilities.Endpoint.Database:
-    _extensions:
-      shorthand_name: Endpoint.Database
-      type_qualified_name: tosca:Endpoint.Database
-      specification: tosca-simple-1.0
-      specification_section: 5.4.7
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_ENDPOINT_DATABASE'
-    description: >-
-      This is the default TOSCA type that should be used or extended to define a specialized database endpoint capability.
-    derived_from: tosca.capabilities.Endpoint
-  
-  #
-  # Network
-  #
-
-  tosca.capabilities.network.Bindable:
-    _extensions:
-      shorthand_name: Bindable # ARIA NOTE: mistake in spec? has "network." as a prefix 
-      type_qualified_name: tosca:Bindable
-      specification: tosca-simple-1.0
-      specification_section: 5.4.11
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_NETWORK_BINDABLE'
-    description: >-
-      A node type that includes the Bindable capability indicates that it can be bound to a logical network association via a
-      network port.
-    derived_from: tosca.capabilities.Node
-
-  tosca.capabilities.network.Linkable:
-    _extensions:
-      shorthand_name: Linkable
-      type_qualified_name: tosca:Linkable
-      specification: tosca-simple-1.0
-      specification_section: 7.5.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_NETWORK_LINKABLE'
-    description: >-
-      A node type that includes the Linkable capability indicates that it can be pointed by tosca.relationships.network.LinksTo
-      relationship type.
-    derived_from: tosca.capabilities.Node
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/data.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/data.yaml
deleted file mode 100644
index 771a969..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/data.yaml
+++ /dev/null
@@ -1,268 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-data_types:
-
-  #
-  # Primitive
-  #
-  
-  timestamp:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_timestamp
-
-  version:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_version
-      type_qualified_name: tosca:version
-      specification: tosca-simple-1.0
-      specification_section: 3.2.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_VERSION'
-
-  range:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_range
-      type_qualified_name: tosca:range
-      specification: tosca-simple-1.0
-      specification_section: 3.2.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_RANGE'
-
-  #
-  # With entry schema
-  #
-
-  list:
-    _extensions:
-      use_entry_schema: true
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_list
-      type_qualified_name: tosca:list
-      specification: tosca-simple-1.0
-      specification_section: 3.2.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_LIST'
-
-  map:
-    _extensions:
-      use_entry_schema: true
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_map_value
-      type_qualified_name: tosca:map
-      specification: tosca-simple-1.0
-      specification_section: 3.2.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_MAP'
-  
-  #
-  # Scalar
-  #
-
-  scalar-unit.size:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_scalar_unit_size
-      type_qualified_name: tosca:scalar-unit.size
-      specification: tosca-simple-1.0
-      specification_section: 3.2.6.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_SCALAR_UNIT_SIZE'
-
-  scalar-unit.time:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_scalar_unit_time
-      type_qualified_name: tosca:scalar-unit.time
-      specification: tosca-simple-1.0
-      specification_section: 3.2.6.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_SCALAR_UNIT_TIME'
-
-  scalar-unit.frequency:
-    _extensions:
-      coerce_value: aria_extension_tosca.simple_v1_0.data_types.coerce_scalar_unit_frequency
-      type_qualified_name: tosca:scalar-unit.frequency
-      specification: tosca-simple-1.0
-      specification_section: 3.2.6.6
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_SCALAR_UNIT_FREQUENCY'
-
-  #
-  # Complex
-  #
-
-  tosca.datatypes.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.2.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_ROOT'
-    description: >-
-      This is the default (root) TOSCA Root Type definition that all complex TOSCA Data Types derive from.
-
-  tosca.datatypes.Credential:
-    _extensions:
-      shorthand_name: Credential
-      type_qualified_name: tosca:Credential
-      specification: tosca-simple-1.0
-      specification_section: 5.2.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_CREDENTIAL'
-    description: >-
-      The Credential type is a complex TOSCA data Type used when describing authorization credentials used to access network
-      accessible resources.
-    derived_from: tosca.datatypes.Root
-    properties:
-      protocol:
-        description: >-
-          The optional protocol name.
-        type: string
-        required: false
-      token_type:
-        description: >-
-          The required token type.
-        type: string
-        default: password
-      token:
-        description: >-
-          The required token used as a credential for authorization or access to a networked resource.
-        type: string
-        required: false
-      keys:
-        description: >-
-          The optional list of protocol-specific keys or assertions.
-        type: map
-        entry_schema:
-          type: string
-        required: false
-      user:
-        description: >-
-          The optional user (name or ID) used for non-token based credentials.
-        type: string
-        required: false
-  
-  tosca.datatypes.network.NetworkInfo:
-    _extensions:
-      shorthand_name: NetworkInfo
-      type_qualified_name: tosca:NetworkInfo
-      specification: tosca-simple-1.0
-      specification_section: 5.2.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_NETWORKINFO'
-    description: >-
-      The Network type is a complex TOSCA data type used to describe logical network information.
-    derived_from: tosca.datatypes.Root
-    properties:
-      network_name:
-        description: >-
-          The name of the logical network. e.g., "public", "private", "admin". etc.
-        type: string
-        required: false
-      network_id:
-        description: >-
-          The unique ID of for the network generated by the network provider.
-        type: string
-        required: false
-      addresses:
-        description: >-
-          The list of IP addresses assigned from the underlying network.
-        type: list
-        entry_schema:
-          type: string
-        required: false
-  
-  tosca.datatypes.network.PortInfo:
-    _extensions:
-      shorthand_name: PortInfo
-      type_qualified_name: tosca:PortInfo
-      specification: tosca-simple-1.0
-      specification_section: 5.2.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_PORTINFO'
-    description: >-
-      The PortInfo type is a complex TOSCA data type used to describe network port information.
-    derived_from: tosca.datatypes.Root
-    properties:
-      port_name:
-        description: >-
-          The logical network port name.
-        type: string
-        required: false
-      port_id:
-        description: >-
-          The unique ID for the network port generated by the network provider.
-        type: string
-        required: false
-      network_id:
-        description: >-
-          The unique ID for the network.
-        type: string
-        required: false
-      mac_address:
-        description: >-
-          The unique media access control address (MAC address) assigned to the port.
-        type: string
-        required: false
-      addresses:
-        description: >-
-          The list of IP address(es) assigned to the port.
-        type: list
-        entry_schema:
-          type: string
-        required: false
-  
-  tosca.datatypes.network.PortDef:
-    _extensions:
-      shorthand_name: PortDef
-      type_qualified_name: tosca:PortDef
-      specification: tosca-simple-1.0
-      specification_section: 5.2.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_PORTDEF'
-    description: >-
-      The PortDef type is a TOSCA data Type used to define a network port.
-    derived_from: integer # ARIA NOTE: we allow deriving from primitives
-    constraints:
-      - in_range: [ 1, 65535 ]
-
-  tosca.datatypes.network.PortSpec:
-    _extensions:
-      shorthand_name: PortSpec
-      type_qualified_name: tosca:PortSpec
-      specification: tosca-simple-1.0
-      specification_section: 5.2.6
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#TYPE_TOSCA_DATA_PORTSPEC'
-    description: >-
-      The PortSpec type is a complex TOSCA data Type used when describing port specifications for a network connection.
-    derived_from: tosca.datatypes.Root
-    properties:
-      protocol:
-        description: >-
-          The required protocol used on the port.
-        type: string
-        constraints:
-          - valid_values: [ udp, tcp, igmp ]
-        default: tcp
-      source:
-        description: >-
-          The optional source port.
-        type: tosca.datatypes.network.PortDef
-        required: false
-      source_range:
-        description: >-
-          The optional range for source port.
-        type: range
-        constraints:
-          - in_range: [ 1, 65535 ]
-        required: false
-      target:
-        description: >-
-          The optional target port.
-        type: tosca.datatypes.network.PortDef
-        required: false
-      target_range:
-        description: >-
-          The optional range for target port.
-        type: range
-        constraints:
-          - in_range: [ 1, 65535 ]
-        required: false
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml
deleted file mode 100644
index 66cc25f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-group_types:
-
-  tosca.groups.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.9.1
-    description: >-
-      This is the default (root) TOSCA Group Type definition that all other TOSCA base Group Types derive from.
-    interfaces:
-      Standard:
-        type: tosca.interfaces.node.lifecycle.Standard
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
deleted file mode 100644
index 473bd98..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
+++ /dev/null
@@ -1,107 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-interface_types:
-
-  tosca.interfaces.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.7.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Ref384391055'
-    description: >-
-      This is the default (root) TOSCA Interface Type definition that all other TOSCA Interface Types derive from.
-  
-  tosca.interfaces.node.lifecycle.Standard:
-    _extensions:
-      shorthand_name: Standard
-      type_qualified_name: tosca:Standard
-      specification: tosca-simple-1.0
-      specification_section: 5.7.4
-    description: >-
-      This lifecycle interface defines the essential, normative operations that TOSCA nodes may support.
-    derived_from: tosca.interfaces.Root
-    create:
-      description: >-
-        Standard lifecycle create operation.
-    configure:
-      description: >-
-        Standard lifecycle configure operation.
-    start:
-      description: >-
-        Standard lifecycle start operation.
-    stop:
-      description: >-
-        Standard lifecycle stop operation.
-    delete:
-      description: >-
-        Standard lifecycle delete operation.
-
-  tosca.interfaces.relationship.Configure:
-    _extensions:
-      shorthand_name: Configure
-      type_qualified_name: tosca:Configure
-      specification: tosca-simple-1.0
-      specification_section: 5.7.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_ITFC_RELATIONSHIP_CONFIGURE'
-    description: >-
-      The lifecycle interfaces define the essential, normative operations that each TOSCA Relationship Types may support.
-    derived_from: tosca.interfaces.Root
-    pre_configure_source:
-      description: >-
-        Operation to pre-configure the source endpoint.
-      _extensions:
-        relationship_edge: source
-    pre_configure_target:
-      description: >-
-        Operation to pre-configure the target endpoint.
-      _extensions:
-        relationship_edge: target
-    post_configure_source:
-      description: >-
-        Operation to post-configure the source endpoint.
-      _extensions:
-        relationship_edge: source
-    post_configure_target:
-      description: >-
-        Operation to post-configure the target endpoint.
-      _extensions:
-        relationship_edge: target
-    add_target:
-      description: >-
-        Operation to notify the source node of a target node being added via a relationship.
-      _extensions:
-        relationship_edge: source
-    add_source:
-      description: >-
-        Operation to notify the target node of a source node which is now available via a relationship.
-      _extensions:
-        relationship_edge: target
-    target_changed:
-      description: >-
-        Operation to notify source some property or attribute of the target changed
-      _extensions:
-        relationship_edge: source
-    remove_target:
-      description: >-
-        Operation to remove a target node.
-      _extensions:
-        relationship_edge: source
-    remove_source:
-      description: >-
-        Operation to remove the source node.
-      _extensions:
-        relationship_edge: target
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
deleted file mode 100644
index 1d2fe90..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
+++ /dev/null
@@ -1,525 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-node_types:
-
-  tosca.nodes.Root:
-    _extensions:
-      shorthand_name: Root
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.8.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_ROOT'
-    description: >-
-      The TOSCA Root Node Type is the default type that all other TOSCA base Node Types derive from. This allows for all TOSCA nodes to have a consistent set of features for modeling and management (e.g., consistent definitions for requirements, capabilities and lifecycle interfaces).
-    attributes:
-      tosca_id:
-        description: >-
-          A unique identifier of the realized instance of a Node Template that derives from any TOSCA normative type.
-        type: string
-      tosca_name:
-        description: >-
-          This attribute reflects the name of the Node Template as defined in the TOSCA service template. This name is not unique
-          to the realized instance model of corresponding deployed application as each template in the model can result in one or
-          more instances (e.g., scaled) when orchestrated to a provider environment.
-        type: string
-      state:
-        description: >-
-          The state of the node instance.
-        type: string
-        default: initial
-    interfaces:
-      Standard:
-        type: tosca.interfaces.node.lifecycle.Standard
-    capabilities:
-      feature:
-        type: tosca.capabilities.Node  
-    requirements:
-      - dependency:
-          capability: tosca.capabilities.Node
-          node: tosca.nodes.Root
-          relationship: tosca.relationships.DependsOn
-          occurrences: [ 0, UNBOUNDED ]
-  
-  tosca.nodes.Compute:
-    _extensions:
-      shorthand_name: Compute
-      type_qualified_name: tosca:Compute
-      specification: tosca-simple-1.0
-      specification_section: 5.8.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_COMPUTE'
-      role: host
-    description: >-
-      The TOSCA Compute node represents one or more real or virtual processors of software applications or services along with
-      other essential local resources. Collectively, the resources the compute node represents can logically be viewed as a (real
-      or virtual) "server".
-    derived_from: tosca.nodes.Root
-    attributes:
-      private_address:
-        description: >-
-          The primary private IP address assigned by the cloud provider that applications may use to access the Compute node.
-        type: string
-      public_address:
-        description: >-
-          The primary public IP address assigned by the cloud provider that applications may use to access the Compute node.
-        type: string
-      networks:
-        description: >-
-          The list of logical networks assigned to the compute host instance and information about them.
-        type: map
-        entry_schema:
-          type: tosca.datatypes.network.NetworkInfo
-      ports:
-        description: >-
-          The list of logical ports assigned to the compute host instance and information about them.
-        type: map
-        entry_schema:
-          type: tosca.datatypes.network.PortInfo
-    capabilities:
-      host:
-         type: tosca.capabilities.Container
-         valid_source_types: [ tosca.nodes.SoftwareComponent ]
-      binding:
-         type: tosca.capabilities.network.Bindable
-      os:
-         type: tosca.capabilities.OperatingSystem
-      scalable:
-         type: tosca.capabilities.Scalable
-    requirements:
-      - local_storage:
-          capability: tosca.capabilities.Attachment
-          node: tosca.nodes.BlockStorage
-          relationship: tosca.relationships.AttachesTo
-          occurrences: [ 0, UNBOUNDED ]
-
-  tosca.nodes.LoadBalancer:
-    _extensions:
-      shorthand_name: LoadBalancer
-      type_qualified_name: tosca:LoadBalancer
-      specification: tosca-simple-1.0
-      specification_section: 5.8.12
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc379548332'
-    description: >-
-      The TOSCA Load Balancer node represents logical function that be used in conjunction with a Floating Address to distribute an
-      application's traffic (load) across a number of instances of the application (e.g., for a clustered or scaled application).
-    derived_from: tosca.nodes.Root
-    properties:
-      algorithm:
-        description: >-
-          No description in spec.
-        type: string
-        required: false
-        status: experimental
-    capabilities:
-      client:
-        description: >-
-          The Floating (IP) client's on the public network can connect to.
-        type: tosca.capabilities.Endpoint.Public
-        occurrences: [ 0, UNBOUNDED ] # ARIA NOTE: it seems unnecessary to specify this, as it is the implied default
-    requirements:
-      - application:
-          capability: tosca.capabilities.Endpoint
-          relationship: tosca.relationships.RoutesTo
-          occurrences: [ 0, UNBOUNDED ]
-  
-  #
-  # Software
-  #
-  
-  tosca.nodes.SoftwareComponent:
-    _extensions:
-      shorthand_name: SoftwareComponent
-      type_qualified_name: tosca:SoftwareComponent
-      specification: tosca-simple-1.0
-      specification_section: 5.8.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_SOFTWARE_COMPONENT'
-    description: >-
-      The TOSCA SoftwareComponent node represents a generic software component that can be managed and run by a TOSCA Compute Node
-      Type.
-    derived_from: tosca.nodes.Root
-    properties:
-      component_version:
-        description: >-
-          The optional software component's version.
-        type: version
-        required: false
-      admin_credential:
-        description: >-
-          The optional credential that can be used to authenticate to the software component.
-        type: tosca.datatypes.Credential
-        required: false
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container
-          node: tosca.nodes.Compute
-          relationship: tosca.relationships.HostedOn
-
-  tosca.nodes.WebServer:
-    _extensions:
-      shorthand_name: WebServer
-      type_qualified_name: tosca:WebServer
-      specification: tosca-simple-1.0
-      specification_section: 5.8.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_WEBSERVER'
-    description: >-
-      This TOSCA WebServer Node Type represents an abstract software component or service that is capable of hosting and providing
-      management operations for one or more WebApplication nodes.
-    derived_from: tosca.nodes.SoftwareComponent
-    capabilities:
-      data_endpoint:
-        type: tosca.capabilities.Endpoint
-      admin_endpoint:
-        type: tosca.capabilities.Endpoint.Admin
-      host:
-        type: tosca.capabilities.Container
-        valid_source_types: [ tosca.nodes.WebApplication ]
-
-  tosca.nodes.WebApplication:
-    _extensions:
-      shorthand_name: WebApplication
-      type_qualified_name: tosca:WebApplication
-      specification: tosca-simple-1.0
-      specification_section: 5.8.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_WEBAPPLICATION'
-    description: >-
-      The TOSCA WebApplication node represents a software application that can be managed and run by a TOSCA WebServer node.
-      Specific types of web applications such as Java, etc. could be derived from this type.
-    derived_from: tosca.nodes.SoftwareComponent # ARIA NOTE: the spec says tosca.nodes.Root
-    properties:
-      context_root:
-        description: >-
-          The web application's context root which designates the application's URL path within the web server it is hosted on.
-        type: string
-        required: false
-    capabilities:
-      app_endpoint:
-        type: tosca.capabilities.Endpoint
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container
-          node: tosca.nodes.WebServer
-          relationship: tosca.relationships.HostedOn
-  
-  tosca.nodes.DBMS:
-    _extensions:
-      shorthand_name: DBMS # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:DBMS
-      specification: tosca-simple-1.0
-      specification_section: 5.8.6
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_DBMS'
-    description: >-
-      The TOSCA DBMS node represents a typical relational, SQL Database Management System software component or service.
-    derived_from: tosca.nodes.SoftwareComponent
-    properties:
-      root_password:
-        description: >-
-          The optional root password for the DBMS server.
-        type: string
-        required: false
-      port:
-        description: >-
-          The DBMS server's port.
-        type: integer
-        required: false
-    capabilities:
-      host:
-        type: tosca.capabilities.Container
-        valid_source_types: [ tosca.nodes.Database ]
-
-  tosca.nodes.Database:
-    _extensions:
-      shorthand_name: Database
-      type_qualified_name: tosca:Database
-      specification: tosca-simple-1.0
-      specification_section: 5.8.7
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_DATABASE'
-    description: >-
-      The TOSCA Database node represents a logical database that can be managed and hosted by a TOSCA DBMS node.
-    derived_from: tosca.nodes.Root # ARIA NOTE: it's *not* a SoftwareComponent
-    properties:
-      name:
-        description: >-
-          The logical database Name.
-        type: string
-      port:
-        description: >-
-          The port the database service will use to listen for incoming data and requests.
-        type: integer
-        required: false
-      user:
-        description: >-
-          The special user account used for database administration.
-        type: string
-        required: false
-      password:
-        description: >-
-          The password associated with the user account provided in the 'user' property.
-        type: string
-        required: false
-    capabilities:
-      database_endpoint:
-        type: tosca.capabilities.Endpoint.Database
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container
-          node: tosca.nodes.DBMS
-          relationship: tosca.relationships.HostedOn
-  
-  #
-  # Container
-  #
-
-  tosca.nodes.Container.Runtime:
-    _extensions:
-      shorthand_name: Container.Runtime
-      type_qualified_name: tosca:Container.Runtime
-      specification: tosca-simple-1.0
-      specification_section: 5.8.10
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_CONTAINER_RUNTIME'
-    description: >-
-      The TOSCA Container Runtime node represents operating system-level virtualization technology used to run multiple application
-      services on a single Compute host.
-    derived_from: tosca.nodes.SoftwareComponent
-    capabilities:
-      host:
-        type: tosca.capabilities.Container
-      scalable:
-        type: tosca.capabilities.Scalable
-
-  tosca.nodes.Container.Application:
-    _extensions:
-      shorthand_name: Container.Application
-      type_qualified_name: tosca:Container.Application
-      specification: tosca-simple-1.0
-      specification_section: 5.8.11
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_CONTAINER_APP'
-    description: >-
-      The TOSCA Container Application node represents an application that requires Container-level virtualization technology.
-    derived_from: tosca.nodes.Root
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container
-          # ARIA NOTE: seems a mistake in the spec
-          #node: tosca.nodes.Container
-          relationship: tosca.relationships.HostedOn
-
-  #
-  # Storage
-  #
-
-  tosca.nodes.ObjectStorage:
-    _extensions:
-      shorthand_name: ObjectStorage
-      type_qualified_name: tosca:ObjectStorage
-      specification: tosca-simple-1.0
-      specification_section: 5.8.8
-    description: >-
-      The TOSCA ObjectStorage node represents storage that provides the ability to store data as objects (or BLOBs of data) without
-      consideration for the underlying filesystem or devices.
-    derived_from: tosca.nodes.Root
-    properties:
-      name:
-        description: >-
-          The logical name of the object store (or container).
-        type: string
-      size:
-        description: >-
-          The requested initial storage size (default unit is in Gigabytes).
-        type: scalar-unit.size
-        constraints:
-          - greater_or_equal: 0 GB
-        required: false
-      maxsize:
-        description: >-
-          The requested maximum storage size (default unit is in Gigabytes).
-        type: scalar-unit.size
-        constraints:
-          - greater_or_equal: 0 GB
-        required: false
-    capabilities:
-      storage_endpoint:
-        type: tosca.capabilities.Endpoint
-  
-  tosca.nodes.BlockStorage:
-    _extensions:
-      shorthand_name: BlockStorage
-      type_qualified_name: tosca:BlockStorage
-      specification: tosca-simple-1.0
-      specification_section: 5.8.9
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_BLOCK_STORAGE'
-    description: >-
-    derived_from: tosca.nodes.Root
-    properties:
-      size:
-        description: >-
-          The requested storage size (default unit is MB).
-        type: scalar-unit.size
-        constraints:
-          - greater_or_equal: 1 MB
-      volume_id:
-        description: >-
-          ID of an existing volume (that is in the accessible scope of the requesting application).
-        type: string
-        required: false
-      snapshot_id:
-        description: >-
-          Some identifier that represents an existing snapshot that should be used when creating the block storage (volume).
-        type: string
-        required: false
-    capabilities:
-      attachment:
-        type: tosca.capabilities.Attachment
-
-  #
-  # Network
-  #
-
-  tosca.nodes.network.Network:
-    _extensions:
-      shorthand_name: Network
-      type_qualified_name: tosca:Network
-      specification: tosca-simple-1.0
-      specification_section: 7.5.1
-    description: >-
-      The TOSCA Network node represents a simple, logical network service.
-    derived_from: tosca.nodes.Root
-    properties:
-      ip_version:
-        description: >-
-          The IP version of the requested network.
-        type: integer
-        constraints:
-          - valid_values: [ 4, 6 ]
-        default: 4
-        required: false
-      cidr:
-        description: >-
-          The cidr block of the requested network.
-        type: string
-        required: false
-      start_ip:
-        description: >-
-          The IP address to be used as the 1st one in a pool of addresses derived from the cidr block full IP range.
-        type: string
-        required: false
-      end_ip:
-        description: >-
-          The IP address to be used as the last one in a pool of addresses derived from the cidr block full IP range.
-        type: string
-        required: false
-      gateway_ip:
-        description: >-
-          The gateway IP address.
-        type: string
-        required: false
-      network_name:
-        description: >-
-          An Identifier that represents an existing Network instance in the underlying cloud infrastructure - OR - be used as the
-          name of the new created network.
-        type: string
-        required: false
-      network_id:
-        description: >-
-          An Identifier that represents an existing Network instance in the underlying cloud infrastructure. This property is
-          mutually exclusive with all other properties except network_name.
-        type: string
-        required: false
-      segmentation_id:
-        description: >-
-          A segmentation identifier in the underlying cloud infrastructure (e.g., VLAN id, GRE tunnel id). If the segmentation_id
-          is specified, the network_type or physical_network properties should be provided as well.
-        type: string
-        required: false
-      network_type:
-        description: >-
-          Optionally, specifies the nature of the physical network in the underlying cloud infrastructure. Examples are flat, vlan,
-          gre or vxlan. For flat and vlan types, physical_network should be provided too.
-        type: string
-        required: false
-      physical_network:
-        description: >-
-          Optionally, identifies the physical network on top of which the network is implemented, e.g. physnet1. This property is
-          required if network_type is flat or vlan.
-        type: string
-        required: false
-      dhcp_enabled:
-        description: >-
-          Indicates the TOSCA container to create a virtual network instance with or without a DHCP service.
-        type: boolean
-        default: true
-        required: false
-    capabilities:
-      link:
-        type: tosca.capabilities.network.Linkable
-  
-  tosca.nodes.network.Port:
-    _extensions:
-      shorthand_name: Port
-      type_qualified_name: tosca:Port
-      specification: tosca-simple-1.0
-      specification_section: 7.5.2
-    description: >-
-      The TOSCA Port node represents a logical entity that associates between Compute and Network normative types.
-      
-      The Port node type effectively represents a single virtual NIC on the Compute node instance.
-    derived_from: tosca.nodes.Root
-    properties:
-      ip_address:
-        description: >-
-          Allow the user to set a fixed IP address. Note that this address is a request to the provider which they will attempt to
-          fulfill but may not be able to dependent on the network the port is associated with.
-        type: string
-        required: false
-      order:
-        description: >-
-          The order of the NIC on the compute instance (e.g. eth2). Note: when binding more than one port to a single compute (aka
-          multi vNICs) and ordering is desired, it is *mandatory* that all ports will be set with an order value and. The order
-          values must represent a positive, arithmetic progression that starts with 0 (e.g. 0, 1, 2, ..., n).
-        type: integer
-        constraints:
-          - greater_or_equal: 0
-        default: 0
-        required: false
-      is_default:
-        description: >-
-          Set is_default=true to apply a default gateway route on the running compute instance to the associated network gateway.
-          Only one port that is associated to single compute node can set as default=true.
-        type: boolean
-        default: false
-        required: false
-      ip_range_start:
-        description: >-
-          Defines the starting IP of a range to be allocated for the compute instances that are associated by this Port. Without
-          setting this property the IP allocation is done from the entire CIDR block of the network.
-        type: string
-        required: false
-      ip_range_end:
-        description: >-
-          Defines the ending IP of a range to be allocated for the compute instances that are associated by this Port. Without
-          setting this property the IP allocation is done from the entire CIDR block of the network.
-        type: string
-        required: false
-    attributes:
-      ip_address:
-        description: >-
-          The IP address would be assigned to the associated compute instance.
-        type: string
-    requirements:
-      - link:
-          capability: tosca.capabilities.network.Linkable
-          relationship: tosca.relationships.network.LinksTo
-      - binding:
-          capability: tosca.capabilities.network.Bindable
-          relationship: tosca.relationships.network.BindsTo
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/policies.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/policies.yaml
deleted file mode 100644
index c65e38b..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/policies.yaml
+++ /dev/null
@@ -1,71 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-policy_types:
-
-  tosca.policies.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.10.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_POLICIES_ROOT'
-    description: >-
-      This is the default (root) TOSCA Policy Type definition that all other TOSCA base Policy Types derive from.
-  
-  tosca.policies.Placement:
-    _extensions:
-      shorthand_name: Placement # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Placement
-      specification: tosca-simple-1.0
-      specification_section: 5.10.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_POLICIES_PLACEMENT'
-    description: >-
-      This is the default (root) TOSCA Policy Type definition that is used to govern placement of TOSCA nodes or groups of nodes.
-    derived_from: tosca.policies.Root
-  
-  tosca.policies.Scaling:
-    _extensions:
-      shorthand_name: Scaling # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Scaling
-      specification: tosca-simple-1.0
-      specification_section: 5.10.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_POLICIES_SCALING'
-    description: >-
-      This is the default (root) TOSCA Policy Type definition that is used to govern scaling of TOSCA nodes or groups of nodes.
-    derived_from: tosca.policies.Root
-  
-  tosca.policies.Update:
-    _extensions:
-      shorthand_name: Update # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Update
-      specification: tosca-simple-1.0
-      specification_section: 5.10.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_POLICIES_UPDATE'
-    description: >-
-      This is the default (root) TOSCA Policy Type definition that is used to govern update of TOSCA nodes or groups of nodes.
-    derived_from: tosca.policies.Root
-  
-  tosca.policies.Performance:
-    _extensions:
-      shorthand_name: Performance # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Performance
-      specification: tosca-simple-1.0
-      specification_section: 5.10.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_POLICIES_PERFORMANCE'
-    description: >-
-      This is the default (root) TOSCA Policy Type definition that is used to declare performance requirements for TOSCA nodes or
-      groups of nodes.
-    derived_from: tosca.policies.Root
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/relationships.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/relationships.yaml
deleted file mode 100644
index b9d3176..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/relationships.yaml
+++ /dev/null
@@ -1,158 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-relationship_types:
-
-  tosca.relationships.Root:
-    _extensions:
-      shorthand_name: Root # ARIA NOTE: omitted in the spec
-      type_qualified_name: tosca:Root
-      specification: tosca-simple-1.0
-      specification_section: 5.6.1
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_ROOT'
-    description: >-
-      This is the default (root) TOSCA Relationship Type definition that all other TOSCA Relationship Types derive from.
-    attributes:
-      tosca_id:
-        description: >-
-          A unique identifier of the realized instance of a Relationship Template that derives from any TOSCA normative type.
-        type: string
-      tosca_name:
-        description: >-
-          This attribute reflects the name of the Relationship Template as defined in the TOSCA service template. This name is not
-          unique to the realized instance model of corresponding deployed application as each template in the model can result in
-          one or more instances (e.g., scaled) when orchestrated to a provider environment.
-        type: string
-      state:
-        description: >-
-          The state of the relationship instance.
-        type: string
-        default: initial
-    interfaces:
-      Configure:
-        type: tosca.interfaces.relationship.Configure
-  
-  tosca.relationships.DependsOn:
-    _extensions:
-      shorthand_name: DependsOn
-      type_qualified_name: tosca:DependsOn
-      specification: tosca-simple-1.0
-      specification_section: 5.6.2
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_DEPENDSON'
-    description: >-
-      This type represents a general dependency relationship between two nodes.
-    derived_from: tosca.relationships.Root
-    valid_target_types: [ tosca.capabilities.Node ]
-  
-  tosca.relationships.HostedOn:
-    _extensions:
-      shorthand_name: HostedOn
-      type_qualified_name: tosca:HostedOn
-      specification: tosca-simple-1.0
-      specification_section: 5.6.3
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_HOSTEDON'
-    description: >-
-      This type represents a hosting relationship between two nodes.
-    derived_from: tosca.relationships.Root
-    valid_target_types: [ tosca.capabilities.Container ]
-  
-  tosca.relationships.ConnectsTo:
-    _extensions:
-      shorthand_name: ConnectsTo
-      type_qualified_name: tosca:ConnectsTo
-      specification: tosca-simple-1.0
-      specification_section: 5.6.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_CONNECTSTO'
-    description: >-
-      This type represents a network connection relationship between two nodes.
-    derived_from: tosca.relationships.Root
-    valid_target_types: [ tosca.capabilities.Endpoint ]
-    properties:
-      credential:
-        type: tosca.datatypes.Credential
-        required: false
-  
-  tosca.relationships.AttachesTo:
-    _extensions:
-      shorthand_name: AttachesTo
-      type_qualified_name: tosca:AttachesTo
-      specification: tosca-simple-1.0
-      specification_section: 5.6.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_ATTACHTO'
-    description: >-
-      This type represents an attachment relationship between two nodes. For example, an AttachesTo relationship type would be used
-      for attaching a storage node to a Compute node.
-    derived_from: tosca.relationships.Root
-    valid_target_types: [ tosca.capabilities.Attachment ]
-    properties:
-      location:
-        description: >-
-          The relative location (e.g., path on the file system), which provides the root location to address an attached node.
-          e.g., a mount point / path such as '/usr/data'. Note: The user must provide it and it cannot be "root".
-        type: string
-        constraints:
-          - min_length: 1
-      device:
-        description: >-
-          The logical device name which for the attached device (which is represented by the target node in the model). e.g.,
-          '/dev/hda1'.
-        type: string
-        required: false
-    attributes:
-      device:
-        description: >-
-          The logical name of the device as exposed to the instance.
-          Note: A runtime property that gets set when the model gets instantiated by the orchestrator.
-        type: string
-  
-  tosca.relationships.RoutesTo:
-    _extensions:
-      shorthand_name: RoutesTo
-      type_qualified_name: tosca:RoutesTo
-      specification: tosca-simple-1.0
-      specification_section: 5.6.6
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc397688815'
-    description: >-
-      This type represents an intentional network routing between two Endpoints in different networks.
-    derived_from: tosca.relationships.ConnectsTo
-    valid_target_types: [ tosca.capabilities.Endpoint ]
-  
-  #
-  # Network
-  #
-  
-  tosca.relationships.network.LinksTo:
-    _extensions:
-      shorthand_name: LinksTo
-      type_qualified_name: tosca:LinksTo
-      specification: tosca-simple-1.0
-      specification_section: 7.5.4
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_NETWORK_LINKSTO'
-    description: >-
-      This relationship type represents an association relationship between Port and Network node types.
-    derived_from: tosca.relationships.DependsOn
-    valid_target_types: [ tosca.capabilities.network.Linkable ]
-  
-  tosca.relationships.network.BindsTo:
-    _extensions:
-      shorthand_name: BindsTo # ARIA NOTE: the spec says "network.BindsTo" which seems wrong
-      type_qualified_name: tosca:BindsTo
-      specification: tosca-simple-1.0
-      specification_section: 7.5.5
-      specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_RELATIONSHIPS_NETWORK_BINDTO'
-    description: >-
-      This type represents a network association relationship between Port and Compute node types.
-    derived_from: tosca.relationships.DependsOn
-    valid_target_types: [ tosca.capabilities.network.Bindable ]
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/tosca-simple-1.0.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/tosca-simple-1.0.yaml
deleted file mode 100644
index f8cc520..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/tosca-simple-1.0.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-imports:
-  - artifacts.yaml
-  - capabilities.yaml
-  - data.yaml
-  - groups.yaml
-  - interfaces.yaml
-  - nodes.yaml
-  - policies.yaml
-  - relationships.yaml
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/artifacts.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/artifacts.yaml
deleted file mode 100644
index 2427d9f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/artifacts.yaml
+++ /dev/null
@@ -1,84 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-artifact_types:
-
-  tosca.artifacts.nfv.SwImage:
-    _extensions:
-      shorthand_name: SwImage
-      type_qualified_name: tosca:SwImage
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.4.1
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896067'
-    derived_from: tosca.artifacts.Deployment.Image
-    properties:
-      name:
-        description: >-
-          Name of this software image.
-        type: string
-        required: true
-      version:
-        description: >-
-          Version of this software image.
-        type: string
-        required: true
-      checksum:
-        description: >-
-          Checksum of the software image file.
-        type: string
-      container_format:
-        description: >-
-          The container format describes the container file format in which software image is
-          provided.
-        type: string
-        required: true
-      disk_format:
-        description: >-
-          The disk format of a software image is the format of the underlying disk image.
-        type: string
-        required: true
-      min_disk:
-        description: >-
-          The minimal disk size requirement for this software image.
-        type: scalar-unit.size
-        required: true
-      min_ram:
-        description: >-
-          The minimal disk size requirement for this software image.
-        type: scalar-unit.size
-        required: false
-      size: # ARIA NOTE: section [5.4.1.1 Properties] calls this field 'Size'
-        description: >-
-          The size of this software image
-        type: scalar-unit.size
-        required: true
-      sw_image:
-        description: >-
-          A reference to the actual software image within VNF Package, or url.
-        type: string
-        required: true
-      operating_system:
-        description: >-
-          Identifies the operating system used in the software image.
-        type: string
-        required: false
-      supported _virtualization_enviroment:
-        description: >-
-          Identifies the virtualization environments (e.g. hypervisor) compatible with this software
-          image.
-        type: list
-        entry_schema:
-          type: string
-        required: false
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/capabilities.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/capabilities.yaml
deleted file mode 100644
index 7b6363f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/capabilities.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-capability_types:
-
-  tosca.capabilities.nfv.VirtualBindable:
-    _extensions:
-      shorthand_name: VirtualBindable
-      type_qualified_name: tosca:VirtualBindable
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.5.1
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896069'
-    description: >-
-      A node type that includes the VirtualBindable capability indicates that it can be pointed by
-      tosca.relationships.nfv.VirtualBindsTo relationship type.
-    derived_from: tosca.capabilities.Node
-
-  tosca.capabilities.nfv.Metric:
-    _extensions:
-      shorthand_name: Metric
-      type_qualified_name: tosca:Metric
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.5.2
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896070'
-    description: >-
-      A node type that includes the Metric capability indicates that it can be monitored using an nfv.relationships.Monitor
-      relationship type.
-    derived_from: tosca.capabilities.Endpoint
-
-  tosca.capabilities.nfv.VirtualCompute:
-    _extensions:
-      shorthand_name: VirtualCompute
-      type_qualified_name: tosca:VirtualCompute
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.5.3
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896071'
-    derived_from: tosca.capabilities.Root
-    properties:
-      requested_additional_capabilities:
-        # ARIA NOTE: in section [5.5.3.1 Properties] the name of this property is
-        # "request_additional_capabilities", and its type is not a map, but
-        # tosca.datatypes.nfv.RequestedAdditionalCapability
-        description: >-
-          Describes additional capability for a particular VDU.
-        type: map
-        entry_schema:
-           type: tosca.datatypes.nfv.RequestedAdditionalCapability
-        required: false
-      virtual_memory:
-        description: >-
-          Describes virtual memory of the virtualized compute.
-        type: tosca.datatypes.nfv.VirtualMemory
-        required: true
-      virtual_cpu:
-        description: >-
-          Describes virtual CPU(s) of the virtualized compute.
-        type: tosca.datatypes.nfv.VirtualCpu
-        required: true
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/data.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/data.yaml
deleted file mode 100644
index 889dcf7..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/data.yaml
+++ /dev/null
@@ -1,318 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-data_types:
-
-  tosca.datatypes.nfv.L2AddressData:
-    # TBD
-    _extensions:
-      shorthand_name: L2AddressData
-      type_qualified_name: tosca:L2AddressData
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.1
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896055'
-
-  tosca.datatypes.nfv.L3AddressData:
-    _extensions:
-      shorthand_name: L3AddressData
-      type_qualified_name: tosca:L3AddressData
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.2
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896056'
-    description: >-
-      The L3AddressData type is a complex TOSCA data type used to describe L3AddressData information
-      element as defined in [ETSI GS NFV-IFA 011], it provides the information on the IP addresses
-      to be assigned to the connection point instantiated from the parent Connection Point
-      Descriptor.
-    derived_from: tosca.datatypes.Root
-    properties:
-      ip_address_assignment:
-        description: >-
-          Specify if the address assignment is the responsibility of management and orchestration
-          function or not. If it is set to True, it is the management and orchestration function
-          responsibility.
-        type: boolean
-        required: true
-      floating_ip_activated:
-        description: Specify if the floating IP scheme is activated on the Connection Point or not.
-        type: boolean
-        required: true
-      ip_address_type:
-        description: >-
-          Define address type. The address type should be aligned with the address type supported by
-          the layer_protocol properties of the parent VnfExtCpd.
-        type: string
-        required: false
-        constraints:
-          - valid_values: [ ipv4, ipv6 ]
-      number_of_ip_address:
-        description: >-
-          Minimum number of IP addresses to be assigned.
-        type: integer
-        required: false
-
-  tosca.datatypes.nfv.AddressData:
-    _extensions:
-      shorthand_name: AddressData
-      type_qualified_name: tosca:AddressData
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.3
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896057'
-    description: >-
-      The AddressData type is a complex TOSCA data type used to describe AddressData information
-      element as defined in [ETSI GS NFV-IFA 011], it provides information on the addresses to be
-      assigned to the connection point(s) instantiated from a Connection Point Descriptor.
-    derived_from: tosca.datatypes.Root
-    properties:
-      address_type:
-        description: >-
-          Describes the type of the address to be assigned to the connection point instantiated from
-          the parent Connection Point Descriptor. The content type shall be aligned with the address
-          type supported by the layerProtocol property of the parent Connection Point Descriptor.
-        type: string
-        required: true
-        constraints:
-          - valid_values: [ mac_address, ip_address ]
-      l2_address_data:
-        # Shall be present when the addressType is mac_address.
-        description: >-
-          Provides the information on the MAC addresses to be assigned to the connection point(s)
-          instantiated from the parent Connection Point Descriptor.
-        type: tosca.datatypes.nfv.L2AddressData # Empty in "GS NFV IFA011 V0.7.3"
-        required: false
-      l3_address_data:
-        # Shall be present when the addressType is ip_address.
-        description: >-
-          Provides the information on the IP addresses to be assigned to the connection point
-          instantiated from the parent Connection Point Descriptor.
-        type: tosca.datatypes.nfv.L3AddressData
-        required: false
-
-  tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements:
-    _extensions:
-      shorthand_name: VirtualNetworkInterfaceRequirements
-      type_qualified_name: tosca:VirtualNetworkInterfaceRequirements
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.4
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896058'
-    description: >-
-      The VirtualNetworkInterfaceRequirements type is a complex TOSCA data type used to describe
-      VirtualNetworkInterfaceRequirements information element as defined in [ETSI GS NFV-IFA 011],
-      it provides the information to specify requirements on a virtual network interface realising the
-      CPs instantiated from this CPD.
-    derived_from: tosca.datatypes.Root
-    properties:
-      name:
-        description: >-
-          Provides a human readable name for the requirement.
-        type: string
-        required: false
-      description:
-        description: >-
-          Provides a human readable description for the requirement.
-        type: string
-        required: false
-      support_mandatory:
-        description: >-
-          Indicates whether fulfilling the constraint is mandatory (TRUE) for successful operation
-          or desirable (FALSE).
-        type: boolean
-        required: false
-      requirement:
-        description: >-
-          Specifies a requirement such as the support of SR-IOV, a particular data plane
-          acceleration library, an API to be exposed by a NIC, etc.
-        type: string # ARIA NOTE: the spec says "not specified", but TOSCA requires a type
-        required: true
-
-  tosca.datatypes.nfv.ConnectivityType:
-    _extensions:
-      shorthand_name: ConnectivityType
-      type_qualified_name: tosca:ConnectivityType
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.5
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896059'
-    description: >-
-      The TOSCA ConnectivityType type is a complex TOSCA data type used to describe ConnectivityType
-      information element as defined in [ETSI GS NFV-IFA 011].
-    derived_from: tosca.datatypes.Root
-    properties:
-      layer_protocol:
-        description: >-
-          Identifies the protocol this VL gives access to (ethernet, mpls, odu2, ipv4, ipv6,
-          pseudo_wire).
-        type: string
-        required: true
-        constraints:
-          - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo_wire ]
-      flow_pattern:
-        description: >-
-          Identifies the flow pattern of the connectivity (Line, Tree, Mesh).
-        type: string
-        required: false
-
-  tosca.datatypes.nfv.RequestedAdditionalCapability:
-    _extensions:
-      shorthand_name: RequestedAdditionalCapability
-      type_qualified_name: tosca:RequestedAdditionalCapability
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.6
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896060'
-    description: >-
-      RequestAdditionalCapability describes additional capability for a particular VDU.
-    derived_from: tosca.datatypes.Root
-    properties:
-      request_additional_capability_name:
-        description: >-
-          Identifies a requested additional capability for the VDU.
-        type: string
-        required: true
-      support_mandatory:
-        description: >-
-          Indicates whether the requested additional capability is mandatory for successful
-          operation.
-        type: string
-        required: true
-      min_requested_additional_capability_version:
-        description: >-
-          Identifies the minimum version of the requested additional capability.
-        type: string
-        required: false
-      preferred_requested_additional_capability_version:
-        description: >-
-          Identifies the preferred version of the requested additional capability.
-        type: string
-        required: false
-      target_performance_parameters:
-        description: >-
-          Identifies specific attributes, dependent on the requested additional capability type.
-        type: map
-        entry_schema:
-          type: string
-        required: true
-
-  tosca.datatypes.nfv.VirtualMemory:
-    _extensions:
-      shorthand_name: VirtualMemory
-      type_qualified_name: tosca:VirtualMemory
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.7
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896061'
-    description: >-
-      VirtualMemory describes virtual memory for a particular VDU.
-    derived_from: tosca.datatypes.Root
-    properties:
-      virtual_mem_size:
-        description: Amount of virtual memory.
-        type: scalar-unit.size
-        required: true
-      virtual_mem_oversubscription_policy:
-        description: >-
-          The memory core oversubscription policy in terms of virtual memory to physical memory on
-          the platform. The cardinality can be 0 during the allocation request, if no particular
-          value is requested.
-        type: string
-        required: false
-      numa_enabled:
-        description: >-
-          It specifies the memory allocation to be cognisant of the relevant process/core
-          allocation. The cardinality can be 0 during the allocation request, if no particular value
-          is requested.
-        type: boolean
-        required: false
-
-  tosca.datatypes.nfv.VirtualCpu:
-    _extensions:
-      shorthand_name: VirtualCpu
-      type_qualified_name: tosca:VirtualCpu
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.8
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896062'
-    description: >-
-      VirtualMemory describes virtual memory for a particular VDU.
-    derived_from: tosca.datatypes.Root
-    properties:
-      cpu_architecture:
-        description: >-
-          CPU architecture type. Examples are x86, ARM.
-        type: string
-        required: false
-      num_virtual_cpu:
-        description: >-
-          Number of virtual CPUs.
-        type: integer
-        required: true
-      virtual_cpu_clock:
-        description: >-
-          Minimum virtual CPU clock rate.
-        type: scalar-unit.frequency
-        required: false
-      virtual_cpu_oversubscription_policy:
-        description: >-
-          CPU core oversubscription policy.
-        type: string
-        required: false
-      virtual_cpu_pinning:
-        description: >-
-          The virtual CPU pinning configuration for the virtualized compute resource.
-        type: tosca.datatypes.nfv.VirtualCpuPinning
-        required: false
-
-  tosca.datatypes.nfv.VirtualCpuPinning:
-    _extensions:
-      shorthand_name: VirtualCpuPinning
-      type_qualified_name: tosca:VirtualCpuPinning
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.9
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896064'
-    description: >-
-      VirtualCpuPinning describes CPU pinning configuration for a particular CPU.
-    derived_from: tosca.datatypes.Root
-    properties:
-      cpu_pinning_policy:
-        description: >-
-          Indicates the policy for CPU pinning.
-        type: string
-        constraints:
-          - valid_values: [ static, dynamic ]
-        required: false
-      cpu_pinning_map:
-        description: >-
-          If cpuPinningPolicy is defined as "static", the cpuPinningMap provides the map of pinning
-          virtual CPU cores to physical CPU cores/threads.
-        type: map
-        entry_schema:
-          type: string
-        required: false
-
-  tosca.datatypes.nfv.VnfcConfigurableProperties:
-    _extensions:
-      shorthand_name: VnfcconfigurableProperties
-      type_qualified_name: tosca:VnfcconfigurableProperties
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.3.10
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896065'
-    # ARIA NOTE: description is mangled in spec
-    description: >-
-      VnfcConfigurableProperties describes additional configurable properties of a VNFC.
-    derived_from: tosca.datatypes.Root
-    properties:
-      additional_vnfc_configurable_properties:
-        description: >-
-          Describes additional configuration for VNFC.
-        type: map
-        entry_schema:
-          type: string
-        required: false
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/nodes.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/nodes.yaml
deleted file mode 100644
index 73f0ecd..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/nodes.yaml
+++ /dev/null
@@ -1,260 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-node_types:
-
-  tosca.nodes.nfv.VDU.Compute:
-    _extensions:
-      shorthand_name: VDU.Compute
-      type_qualified_name: tosca:VDU.Compute
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.9.2
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896079'
-    description: >-
-      The TOSCA nfv.VDU.Compute node type represents the virtual compute part of a VDU entity which
-      it mainly describes the deployment and operational behavior of a VNF component (VNFC), as
-      defined by [ETSI NFV IFA011].
-    derived_from: tosca.nodes.Compute
-    properties:
-      name:
-        description: >-
-          Human readable name of the VDU.
-        type: string
-        required: true
-      description:
-        description: >-
-          Human readable description of the VDU.
-        type: string
-        required: true
-      boot_order:
-        description: >-
-          The key indicates the boot index (lowest index defines highest boot priority).
-          The Value references a descriptor from which a valid boot device is created e.g.
-          VirtualStorageDescriptor from which a VirtualStorage instance is created. If no boot order
-          is defined the default boot order defined in the VIM or NFVI shall be used.
-        type: list # ARIA NOTE: an explicit index (boot index) is unnecessary, contrary to IFA011
-        entry_schema:
-          type: string
-        required: false
-      nfvi_constraints:
-        description: >-
-          Describes constraints on the NFVI for the VNFC instance(s) created from this VDU.
-          For example, aspects of a secure hosting environment for the VNFC instance that involve
-          additional entities or processes. More software images can be attached to the
-          virtualization container using virtual_storage.
-        type: list
-        entry_schema:
-          type: string
-        required: false
-      configurable_properties:
-        description: >-
-          Describes the configurable properties of all VNFC instances based on this VDU.
-        type: map
-        entry_schema:
-          type: tosca.datatypes.nfv.VnfcConfigurableProperties
-        required: true
-    attributes:
-      # ARIA NOTE: The attributes are only described in section [5.9.2.5 Definition], but are not
-      # mentioned in section [5.9.2.2 Attributes]. Additionally, it does not seem to make sense to
-      # deprecate inherited attributes, as it breaks the inheritence contract.
-      private_address:
-        type: string
-        status: deprecated
-      public_address:
-        type: string
-        status: deprecated
-      networks:
-        type: map
-        entry_schema:
-          type: tosca.datatypes.network.NetworkInfo
-        status: deprecated
-      ports:
-        type: map
-        entry_schema:
-          type: tosca.datatypes.network.PortInfo
-        status: deprecated
-    capabilities:
-      virtual_compute:
-        description: >-
-          Describes virtual compute resources capabilities.
-        type: tosca.capabilities.nfv.VirtualCompute
-      virtual_binding:
-        description: >-
-          Defines ability of VirtualBindable.
-        type: tosca.capabilities.nfv.VirtualBindable
-      monitoring_parameter:
-        # ARIA NOTE: commented out in 5.9.2.5
-        description: >-
-          Monitoring parameter, which can be tracked for a VNFC based on this VDU. Examples include:
-          memory-consumption, CPU-utilisation, bandwidth-consumption, VNFC downtime, etc.        
-        type: tosca.capabilities.nfv.Metric
-    #requirements:
-      # ARIA NOTE: virtual_storage is TBD
-      
-      # ARIA NOTE: csd04 attempts to deprecate the inherited local_storage requirement, but this
-      # is not possible in TOSCA
-    artifacts:
-      sw_image:
-        description: >-
-          Describes the software image which is directly loaded on the virtualization container
-          realizing this virtual storage.
-        file: '' # ARIA NOTE: missing value even though it is required in TOSCA
-        type: tosca.artifacts.nfv.SwImage
-
-  tosca.nodes.nfv.VDU.VirtualStorage:
-    _extensions:
-      shorthand_name: VirtualStorage # ARIA NOTE: seems wrong in spec
-      type_qualified_name: tosca:VirtualStorage # ARIA NOTE: seems wrong in spec
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.9.3
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896080'
-    description: >-
-      The NFV VirtualStorage node type represents a virtual storage entity which it describes the
-      deployment and operational behavior of a virtual storage resources, as defined by
-      [ETSI NFV IFA011].
-    derived_from: tosca.nodes.Root
-    properties:
-      type_of_storage:
-        description: >-
-          Type of virtualized storage resource.
-        type: string
-        required: true
-      size_of_storage:
-        description: >-
-          Size of virtualized storage resource (in GB).
-        type: scalar-unit.size
-        required: true
-      rdma_enabled:
-        description: >-
-          Indicate if the storage support RDMA.
-        type: boolean
-        required: false
-    artifacts:
-      sw_image:
-        description: >-
-          Describes the software image which is directly loaded on the virtualization container
-          realizing this virtual storage.
-        file: '' # ARIA NOTE: missing in spec
-        type: tosca.artifacts.nfv.SwImage
-
-  tosca.nodes.nfv.Cpd:
-    _extensions:
-      shorthand_name: Cpd
-      type_qualified_name: tosca:Cpd
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.9.4
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896081'
-    description: >-
-      The TOSCA nfv.Cpd node represents network connectivity to a compute resource or a VL as defined
-      by [ETSI GS NFV-IFA 011]. This is an abstract type used as parent for the various Cpd types.
-    derived_from: tosca.nodes.Root
-    properties:
-      layer_protocol:
-        description: >-
-          Identifies which protocol the connection point uses for connectivity purposes.
-        type: string
-        constraints:
-          - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo_wire ]
-        required: false
-      role: # Name in ETSI NFV IFA011 v0.7.3 cpRole
-        description: >-
-          Identifies the role of the port in the context of the traffic flow patterns in the VNF or
-          parent NS. For example a VNF with a tree flow pattern within the VNF will have legal
-          cpRoles of ROOT and LEAF.
-        type: string
-        constraints:
-          - valid_values: [ root, leaf ]
-        required: false
-      description:
-        description: >-
-          Provides human-readable information on the purpose of the connection point
-          (e.g. connection point for control plane traffic).
-        type: string
-        required: false
-      address_data:
-        description: >-
-          Provides information on the addresses to be assigned to the connection point(s) instantiated
-          from this Connection Point Descriptor.
-        type: list
-        entry_schema:
-          type: tosca.datatypes.nfv.AddressData
-        required: false
-
-  tosca.nodes.nfv.VduCpd:
-    _extensions:
-       shorthand_name: VduCpd
-       type_qualified_name: tosca:VduCpd
-       specification: tosca-simple-nfv-1.0
-       specification_section: 5.9.5
-       specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896082'
-    description: >-
-      The TOSCA nfv.VduCpd node type represents a type of TOSCA Cpd node and describes network
-      connectivity between a VNFC instance (based on this VDU) and an internal VL as defined by
-      [ETSI GS NFV-IFA 011].
-    derived_from: tosca.nodes.nfv.Cpd
-    properties:
-      bitrate_requirement:
-        description: >-
-          Bitrate requirement on this connection point.
-        type: integer
-        required: false
-      virtual_network_interface_requirements:
-        description: >-
-          Specifies requirements on a virtual network interface realising the CPs instantiated from
-          this CPD.
-        type: list
-        entry_schema:
-          type: VirtualNetworkInterfaceRequirements
-        required: false
-    requirements:
-     # ARIA NOTE: seems to be a leftover from csd03
-     # - virtual_link:
-     #     description: Describes the requirements for linking to virtual link
-     #     capability: tosca.capabilities.nfv.VirtualLinkable
-     #     relationship: tosca.relationships.nfv.VirtualLinksTo
-     #     node: tosca.nodes.nfv.VnfVirtualLinkDesc
-      - virtual_binding:
-          capability: tosca.capabilities.nfv.VirtualBindable
-          relationship: tosca.relationships.nfv.VirtualBindsTo
-          node: tosca.nodes.nfv.VDU.Compute # ARIA NOTE: seems wrong in spec
-
-  tosca.nodes.nfv.VnfVirtualLinkDesc:
-    _extensions:
-       shorthand_name: VnfVirtualLinkDesc
-       type_qualified_name: tosca:VnfVirtualLinkDesc
-       specification: tosca-simple-nfv-1.0
-       specification_section: 5.9.6
-       specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896083'
-    description: >-
-      The TOSCA nfv.VnfVirtualLinkDesc node type represents a logical internal virtual link as
-      defined by [ETSI GS NFV-IFA 011].
-    derived_from: tosca.nodes.Root
-    properties:
-      connectivity_type:
-        description: >-
-          specifies the protocol exposed by the VL and the flow pattern supported by the VL.
-        type: tosca.datatypes.nfv.ConnectivityType
-        required: true
-      description:
-        description: >-
-          Provides human-readable information on the purpose of the VL (e.g. control plane traffic).
-        type: string
-        required: false
-      test_access:
-        description: >-
-          Test access facilities available on the VL (e.g. none, passive, monitoring, or active
-          (intrusive) loopbacks at endpoints.
-        type: string
-        required: false
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/relationships.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/relationships.yaml
deleted file mode 100644
index 4cf99a2..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/relationships.yaml
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-relationship_types:
-
-  tosca.relationships.nfv.VirtualBindsTo:
-    _extensions:
-      shorthand_name: VirtualBindsTo
-      type_qualified_name: tosca:VirtualBindsTo
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.7.1
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896074'
-    description: >-
-      This relationship type represents an association relationship between VDU and CP node types.
-    derived_from: tosca.relationships.DependsOn
-    valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ]
-
-  # ARIA NOTE: csd04 lacks the definition of tosca.relationships.nfv.Monitor (the derived_from and
-  # valid_target_types), so we are using the definition in csd03 section 8.4.2.
-  tosca.relationships.nfv.Monitor:
-    _extensions:
-      shorthand_name: Monitor
-      type_qualified_name: tosca:Monitor
-      specification: tosca-simple-nfv-1.0
-      specification_section: 5.7.2
-      specification_url: 'http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html#_Toc482896075'
-    description: >-
-      This relationship type represents an association relationship to the Metric capability of VDU
-      node types.
-    derived_from: tosca.relationships.ConnectsTo
-    valid_target_types: [ tosca.capabilities.nfv.Metric ]
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/tosca-simple-nfv-1.0.yaml b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/tosca-simple-nfv-1.0.yaml
deleted file mode 100644
index 764c739..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/profiles/tosca-simple-nfv-1.0/tosca-simple-nfv-1.0.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-imports:
-  - artifacts.yaml
-  - capabilities.yaml
-  - data.yaml
-  - nodes.yaml
-  - relationships.yaml
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_nfv_v1_0/presenter.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_nfv_v1_0/presenter.py
deleted file mode 100644
index 64178aa..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_nfv_v1_0/presenter.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenList
-from aria.utils.caching import cachedmethod
-
-from ..simple_v1_0 import ToscaSimplePresenter1_0
-
-
-class ToscaSimpleNfvPresenter1_0(ToscaSimplePresenter1_0): # pylint: disable=invalid-name,abstract-method
-    """
-    ARIA presenter for the `TOSCA Simple Profile for NFV v1.0 csd04 <http://docs.oasis-open.org
-    /tosca/tosca-nfv/v1.0/csd04/tosca-nfv-v1.0-csd04.html>`__.
-
-    Supported ``tosca_definitions_version`` values:
-
-    * ``tosca_simple_profile_for_nfv_1_0``
-    """
-
-    DSL_VERSIONS = ('tosca_simple_profile_for_nfv_1_0',)
-    ALLOWED_IMPORTED_DSL_VERSIONS = ('tosca_simple_yaml_1_0', 'tosca_simple_profile_for_nfv_1_0')
-    SIMPLE_PROFILE_FOR_NFV_LOCATION = 'tosca-simple-nfv-1.0/tosca-simple-nfv-1.0.yaml'
-
-    # Presenter
-
-    @cachedmethod
-    def _get_import_locations(self, context):
-        import_locations = super(ToscaSimpleNfvPresenter1_0, self)._get_import_locations(context)
-        if context.presentation.import_profile:
-            return FrozenList([self.SIMPLE_PROFILE_FOR_NFV_LOCATION] + import_locations)
-        return import_locations
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/assignments.py
deleted file mode 100644
index 0590527..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/assignments.py
+++ /dev/null
@@ -1,440 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenDict
-from aria.utils.caching import cachedmethod
-from aria.parser import implements_specification
-from aria.parser.presentation import (AsIsPresentation, has_fields, allow_unknown_fields,
-                                      short_form_field, primitive_field, object_field,
-                                      object_dict_field, object_dict_unknown_fields,
-                                      field_validator, type_validator)
-
-from .filters import NodeFilter
-from .misc import Description, OperationImplementation
-from .modeling.parameters import get_assigned_and_defined_parameter_values
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_validators import (node_template_or_type_validator,
-                                            relationship_template_or_type_validator,
-                                            capability_definition_or_type_validator,
-                                            node_filter_validator)
-from .presentation.types import (convert_shorthand_to_full_type_name,
-                                 get_type_by_full_or_shorthand_name)
-
-@implements_specification('3.5.9', 'tosca-simple-1.0')
-class PropertyAssignment(AsIsPresentation):
-    """
-    This section defines the grammar for assigning values to named properties within TOSCA Node and
-    Relationship templates that are defined in their corresponding named types.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_PROPERTY_VALUE_ASSIGNMENT>`__
-    """
-
-@short_form_field('implementation')
-@has_fields
-@implements_specification('3.5.13-2', 'tosca-simple-1.0')
-class OperationAssignment(ExtensiblePresentation):
-    """
-    An operation definition defines a named function or procedure that can be bound to an
-    implementation artifact (e.g., a script).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_OPERATION_DEF>`__
-    """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description string for the associated named operation.
-
-        :type: :class:`Description`
-        """
-
-    @object_field(OperationImplementation)
-    def implementation(self):
-        """
-        The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR
-        file).
-
-        :type: :class:`OperationImplementation`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def inputs(self):
-        """
-        The optional list of input property assignments (i.e., parameters assignments) for operation
-        definitions that are within TOSCA Node or Relationship Template definitions. This includes
-        when operation definitions are included as part of a Requirement assignment in a Node
-        Template.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @cachedmethod
-    def _get_extensions(self, context):
-        def update_inherited_extensions(extensions, interface_type):
-            parent = interface_type._get_parent(context)
-            if parent is not None:
-                update_inherited_extensions(extensions, parent)
-            operation_definition = interface_type.operations.get(self._name)
-            if operation_definition is not None:
-                if operation_definition._extensions:
-                    extensions.update(operation_definition._extensions)
-
-        extensions = {}
-        update_inherited_extensions(extensions, self._container._get_type(context))
-        if self._container._extensions:
-            extensions.update(self._container._extensions)
-        if self._extensions:
-            extensions.update(self._extensions)
-        return extensions
-
-@allow_unknown_fields
-@has_fields
-@implements_specification('3.5.14-2', 'tosca-simple-1.0')
-class InterfaceAssignment(ExtensiblePresentation):
-    """
-    An interface definition defines a named interface that can be associated with a Node or
-    Relationship Type.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_INTERFACE_DEF>`__
-    """
-
-    @object_dict_field(PropertyAssignment)
-    def inputs(self):
-        """
-        The optional list of input property assignments (i.e., parameters assignments) for interface
-        definitions that are within TOSCA Node or Relationship Template definitions. This includes
-        when interface definitions are referenced as part of a Requirement assignment in a Node
-        Template.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @object_dict_unknown_fields(OperationAssignment)
-    def operations(self):
-        """
-        :type: {:obj:`basestring`: :class:`OperationAssignment`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        the_type = self._container._get_type(context)
-
-        if isinstance(the_type, tuple):
-            # In RelationshipAssignment
-            the_type = the_type[0] # This could be a RelationshipTemplate
-
-        interface_definitions = the_type._get_interfaces(context) \
-            if the_type is not None else None
-        interface_definition = interface_definitions.get(self._name) \
-            if interface_definitions is not None else None
-        return interface_definition._get_type(context) \
-            if interface_definition is not None else None
-
-    def _validate(self, context):
-        super(InterfaceAssignment, self)._validate(context)
-        if self.operations:
-            for operation in self.operations.itervalues(): # pylint: disable=no-member
-                operation._validate(context)
-
-@short_form_field('type')
-@has_fields
-class RelationshipAssignment(ExtensiblePresentation):
-    """
-    Relationship assignment.
-    """
-
-    @field_validator(relationship_template_or_type_validator)
-    @primitive_field(str)
-    def type(self):
-        """
-        The optional reserved keyname used to provide the name of the Relationship Type for the
-        requirement assignment's relationship keyname.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        ARIA NOTE: This field is not mentioned in the spec, but is implied.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @object_dict_field(InterfaceAssignment)
-    def interfaces(self):
-        """
-        The optional reserved keyname used to reference declared (named) interface definitions of
-        the corresponding Relationship Type in order to provide Property assignments for these
-        interfaces or operations of these interfaces.
-
-        :type: {:obj:`basestring`: :class:`InterfaceAssignment`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        type_name = self.type
-        if type_name is not None:
-            the_type = context.presentation.get_from_dict('service_template', 'topology_template',
-                                                          'relationship_templates', type_name)
-            if the_type is not None:
-                return the_type, 'relationship_template'
-            the_type = get_type_by_full_or_shorthand_name(context, type_name, 'relationship_types')
-            if the_type is not None:
-                return the_type, 'relationship_type'
-        return None, None
-
-@short_form_field('node')
-@has_fields
-@implements_specification('3.7.2', 'tosca-simple-1.0')
-class RequirementAssignment(ExtensiblePresentation):
-    """
-    A Requirement assignment allows template authors to provide either concrete names of TOSCA
-    templates or provide abstract selection criteria for providers to use to find matching TOSCA
-    templates that are used to fulfill a named requirement's declared TOSCA Node Type.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_REQUIREMENT_ASSIGNMENT>`__
-    """
-
-    # The example in 3.7.2.2.2 shows unknown fields in addition to these, but is this a mistake?
-
-    @field_validator(capability_definition_or_type_validator)
-    @primitive_field(str)
-    def capability(self):
-        """
-        The optional reserved keyname used to provide the name of either a:
-
-        * Capability definition within a target node template that can fulfill the requirement.
-        * Capability Type that the provider will use to select a type-compatible target node
-          template to fulfill the requirement at runtime.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_validator(node_template_or_type_validator)
-    @primitive_field(str)
-    def node(self):
-        """
-        The optional reserved keyname used to identify the target node of a relationship.
-        Specifically, it is used to provide either a:
-
-        * Node Template name that can fulfill the target node requirement.
-        * Node Type name that the provider will use to select a type-compatible node template to
-          fulfill the requirement at runtime.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(RelationshipAssignment)
-    def relationship(self):
-        """
-        The optional reserved keyname used to provide the name of either a:
-
-        * Relationship Template to use to relate the source node to the (capability in the) target
-          node when fulfilling the requirement.
-        * Relationship Type that the provider will use to select a type-compatible relationship
-          template to relate the source node to the target node at runtime.
-
-        :type: :class:`RelationshipAssignment`
-        """
-
-    @field_validator(node_filter_validator)
-    @object_field(NodeFilter)
-    def node_filter(self):
-        """
-        The optional filter definition that TOSCA orchestrators or providers would use to select a
-        type-compatible target node that can fulfill the associated abstract requirement at runtime.
-
-        :type: :class:`NodeFilter`
-        """
-
-    @cachedmethod
-    def _get_node(self, context):
-        node = self.node
-
-        if node is not None:
-            node_template = context.presentation.get_from_dict('service_template',
-                                                               'topology_template',
-                                                               'node_templates', node)
-            if node_template is not None:
-                return node_template, 'node_template'
-            node_type = get_type_by_full_or_shorthand_name(context, node, 'node_types')
-            if node_type is not None:
-                return node_type, 'node_type'
-
-        return None, None
-
-    @cachedmethod
-    def _get_capability(self, context):
-        capability = self.capability
-
-        if capability is not None:
-            node, node_variant = self._get_node(context)
-            if node_variant == 'node_template':
-                capabilities = node._get_capabilities(context)
-                if capability in capabilities:
-                    return capabilities[capability], 'capability_assignment'
-            capability_type = get_type_by_full_or_shorthand_name(context, capability,
-                                                                 'capability_types')
-            if capability_type is not None:
-                return capability_type, 'capability_type'
-
-        return None, None
-
-@implements_specification('3.5.11', 'tosca-simple-1.0')
-class AttributeAssignment(AsIsPresentation):
-    """
-    This section defines the grammar for assigning values to named attributes within TOSCA Node and
-    Relationship templates which are defined in their corresponding named types.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_ATTRIBUTE_VALUE_ASSIGNMENT>`__
-    """
-
-@has_fields
-@implements_specification('3.7.1', 'tosca-simple-1.0')
-class CapabilityAssignment(ExtensiblePresentation):
-    """
-    A capability assignment allows node template authors to assign values to properties and
-    attributes for a named capability definition that is part of a Node Template's type definition.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_CAPABILITY_ASSIGNMENT>`__
-    """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        An optional list of property definitions for the Capability definition.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @object_dict_field(AttributeAssignment)
-    def attributes(self):
-        """
-        An optional list of attribute definitions for the Capability definition.
-
-        :type: {:obj:`basestring`: :class:`AttributeAssignment`}
-        """
-
-    @cachedmethod
-    def _get_definition(self, context):
-        node_type = self._container._get_type(context)
-        capability_definitions = node_type._get_capabilities(context) \
-            if node_type is not None else None
-        return capability_definitions.get(self._name) \
-            if capability_definitions is not None else None
-
-    @cachedmethod
-    def _get_type(self, context):
-        capability_definition = self._get_definition(context)
-        return capability_definition._get_type(context) \
-            if capability_definition is not None else None
-
-@has_fields
-@implements_specification('3.5.6', 'tosca-simple-1.0')
-class ArtifactAssignment(ExtensiblePresentation):
-    """
-    An artifact definition defines a named, typed file that can be associated with Node Type or Node
-    Template and used by orchestration engine to facilitate deployment and implementation of
-    interface operations.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_ARTIFACT_DEF>`__
-    """
-
-    @field_validator(type_validator('artifact type', convert_shorthand_to_full_type_name,
-                                    'artifact_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required artifact type for the artifact definition.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str, required=True)
-    def file(self):
-        """
-        The required URI string (relative or absolute) which can be used to locate the artifact's
-        file.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_validator(type_validator('repository', 'repositories'))
-    @primitive_field(str)
-    def repository(self):
-        """
-        The optional name of the repository definition which contains the location of the external
-        repository that contains the artifact. The artifact is expected to be referenceable by its
-        file URI within the repository.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the artifact definition.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_field(str)
-    def deploy_path(self):
-        """
-        The file path the associated file would be deployed into within the target node's container.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        ARIA NOTE: This field is not mentioned in the spec, but is implied.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'artifact_types')
-
-    @cachedmethod
-    def _get_repository(self, context):
-        return context.presentation.get_from_dict('service_template', 'repositories',
-                                                  self.repository)
-
-    @cachedmethod
-    def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
-
-    @cachedmethod
-    def _validate(self, context):
-        super(ArtifactAssignment, self)._validate(context)
-        self._get_property_values(context)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/data_types.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/data_types.py
deleted file mode 100644
index c385f78..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/data_types.py
+++ /dev/null
@@ -1,540 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-from datetime import datetime, tzinfo, timedelta
-try:
-    from functools import total_ordering
-except ImportError:
-    from total_ordering import total_ordering
-
-from aria.parser import implements_specification
-from aria.utils.collections import StrictDict, OrderedDict
-from aria.utils.formatting import safe_repr
-
-from .modeling.data_types import (coerce_to_data_type_class, report_issue_for_bad_format,
-                                  coerce_value)
-
-
-class Timezone(tzinfo):
-    """
-    Timezone as fixed offset in hours and minutes east of UTC.
-    """
-
-    def __init__(self, hours=0, minutes=0):
-        super(Timezone, self).__init__()
-        self._offset = timedelta(hours=hours, minutes=minutes)
-
-    def utcoffset(self, dt): # pylint: disable=unused-argument
-        return self._offset
-
-    def tzname(self, dt): # pylint: disable=unused-argument
-        return str(self._offset)
-
-    def dst(self, dt): # pylint: disable=unused-argument
-        return Timezone._ZERO
-
-    _ZERO = timedelta(0)
-
-UTC = Timezone()
-
-@total_ordering
-@implements_specification('timestamp', 'yaml-1.1')
-class Timestamp(object):
-    '''
-    TOSCA timestamps follow the YAML specification, which in turn is a variant of ISO8601.
-
-    Long forms and short forms (without time of day and assuming UTC timezone) are supported for
-    parsing. The canonical form (for rendering) matches the long form at the UTC timezone.
-
-    See the `Timestamp Language-Independent Type for YAML Version 1.1 (Working Draft 2005-01-18)
-    <http://yaml.org/type/timestamp.html>`__
-    '''
-
-    REGULAR_SHORT = r'^(?P<year>[0-9][0-9][0-9][0-9])-(?P<month>[0-9][0-9])-(?P<day>[0-9][0-9])$'
-    REGULAR_LONG = \
-        r'^(?P<year>[0-9][0-9][0-9][0-9])-(?P<month>[0-9][0-9]?)-(?P<day>[0-9][0-9]?)' + \
-        r'([Tt]|[ \t]+)' \
-        r'(?P<hour>[0-9][0-9]?):(?P<minute>[0-9][0-9]):(?P<second>[0-9][0-9])' + \
-        r'(?P<fraction>\.[0-9]*)?' + \
-        r'(([ \t]*)Z|(?P<tzhour>[-+][0-9][0-9])?(:(?P<tzminute>[0-9][0-9])?)?)?$'
-    CANONICAL = '%Y-%m-%dT%H:%M:%S'
-
-    def __init__(self, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        value = str(value)
-        match = re.match(Timestamp.REGULAR_SHORT, value)
-        if match is not None:
-            # Parse short form
-            year = int(match.group('year'))
-            month = int(match.group('month'))
-            day = int(match.group('day'))
-            self.value = datetime(year, month, day, tzinfo=UTC)
-        else:
-            match = re.match(Timestamp.REGULAR_LONG, value)
-            if match is not None:
-                # Parse long form
-                year = int(match.group('year'))
-                month = int(match.group('month'))
-                day = int(match.group('day'))
-                hour = match.group('hour')
-                if hour is not None:
-                    hour = int(hour)
-                minute = match.group('minute')
-                if minute is not None:
-                    minute = int(minute)
-                second = match.group('second')
-                if second is not None:
-                    second = int(second)
-                fraction = match.group('fraction')
-                if fraction is not None:
-                    fraction = int(float(fraction) * 1000000.0) # convert to microseconds
-                tzhour = match.group('tzhour')
-                if tzhour is not None:
-                    tzhour = int(tzhour)
-                else:
-                    tzhour = 0
-                tzminute = match.group('tzminute')
-                if tzminute is not None:
-                    tzminute = int(tzminute)
-                else:
-                    tzminute = 0
-                self.value = datetime(year, month, day, hour, minute, second, fraction,
-                                      Timezone(tzhour, tzminute))
-            else:
-                raise ValueError(
-                    'timestamp must be formatted as YAML ISO8601 variant or "YYYY-MM-DD": %s'
-                    % safe_repr(value))
-
-    @property
-    def as_datetime_utc(self):
-        return self.value.astimezone(UTC)
-
-    @property
-    def as_raw(self):
-        return self.__str__()
-
-    def __str__(self):
-        the_datetime = self.as_datetime_utc
-        return '%s%sZ' \
-            % (the_datetime.strftime(Timestamp.CANONICAL), Timestamp._fraction_as_str(the_datetime))
-
-    def __repr__(self):
-        return repr(self.__str__())
-
-    def __eq__(self, timestamp):
-        if not isinstance(timestamp, Timestamp):
-            return False
-        return self.value == timestamp.value
-
-    def __lt__(self, timestamp):
-        return self.value < timestamp.value
-
-    @staticmethod
-    def _fraction_as_str(the_datetime):
-        return '{0:g}'.format(the_datetime.microsecond / 1000000.0).lstrip('0')
-
-@total_ordering
-@implements_specification('3.2.2', 'tosca-simple-1.0')
-class Version(object):
-    """
-    TOSCA supports the concept of "reuse" of type definitions, as well as template definitions which
-    could be version and change over time. It is important to provide a reliable, normative means to
-    represent a version string which enables the comparison and management of types and templates
-    over time. Therefore, the TOSCA TC intends to provide a normative version type (string) for this
-    purpose in future Working Drafts of this specification.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_VERSION>`__
-    """
-
-    REGULAR = \
-        r'^(?P<major>\d+)\.(?P<minor>\d+)(\.(?P<fix>\d+)' + \
-        r'((\.(?P<qualifier>\d+))(\-(?P<build>\d+))?)?)?$'
-
-    @staticmethod
-    def key(version):
-        """
-        Key method for fast sorting.
-        """
-        return (version.major, version.minor, version.fix, version.qualifier, version.build)
-
-    def __init__(self, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        str_value = str(value)
-        match = re.match(Version.REGULAR, str_value)
-        if match is None:
-            raise ValueError(
-                'version must be formatted as <major_version>.<minor_version>'
-                '[.<fix_version>[.<qualifier>[-<build_version]]]: %s'
-                % safe_repr(value))
-
-        self.value = str_value
-
-        self.major = match.group('major')
-        self.major = int(self.major)
-        self.minor = match.group('minor')
-        self.minor = int(self.minor)
-        self.fix = match.group('fix')
-        if self.fix is not None:
-            self.fix = int(self.fix)
-        self.qualifier = match.group('qualifier')
-        if self.qualifier is not None:
-            self.qualifier = int(self.qualifier)
-        self.build = match.group('build')
-        if self.build is not None:
-            self.build = int(self.build)
-
-    @property
-    def as_raw(self):
-        return self.value
-
-    def __str__(self):
-        return self.value
-
-    def __repr__(self):
-        return repr(self.__str__())
-
-    def __eq__(self, version):
-        if not isinstance(version, Version):
-            return False
-        return (self.major, self.minor, self.fix, self.qualifier, self.build) == \
-            (version.major, version.minor, version.fix, version.qualifier, version.build)
-
-    def __lt__(self, version):
-        if self.major < version.major:
-            return True
-        elif self.major == version.major:
-            if self.minor < version.minor:
-                return True
-            elif self.minor == version.minor:
-                if self.fix < version.fix:
-                    return True
-                elif self.fix == version.fix:
-                    if self.qualifier < version.qualifier:
-                        return True
-                    elif self.qualifier == version.qualifier:
-                        if self.build < version.build:
-                            return True
-        return False
-
-@implements_specification('3.2.3', 'tosca-simple-1.0')
-class Range(object):
-    """
-    The range type can be used to define numeric ranges with a lower and upper boundary. For
-    example, this allows for specifying a range of ports to be opened in a firewall.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_RANGE>`__
-    """
-
-    def __init__(self, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        if not isinstance(value, list):
-            raise ValueError('range value is not a list: %s' % safe_repr(value))
-        if len(value) != 2:
-            raise ValueError('range value does not have exactly 2 elements: %s' % safe_repr(value))
-
-        def is_int(v):
-            return isinstance(v, int) and (not isinstance(v, bool)) # In Python bool is an int
-
-        if not is_int(value[0]):
-            raise ValueError('lower bound of range is not a valid integer: %s'
-                             % safe_repr(value[0]))
-
-        if value[1] != 'UNBOUNDED':
-            if not is_int(value[1]):
-                raise ValueError('upper bound of range is not a valid integer or "UNBOUNDED": %s'
-                                 % safe_repr(value[0]))
-
-            if value[0] >= value[1]:
-                raise ValueError(
-                    'upper bound of range is not greater than the lower bound: %s >= %s'
-                    % (safe_repr(value[0]), safe_repr(value[1])))
-
-        self.value = value
-
-    def is_in(self, value):
-        if value < self.value[0]:
-            return False
-        if (self.value[1] != 'UNBOUNDED') and (value > self.value[1]):
-            return False
-        return True
-
-    @property
-    def as_raw(self):
-        return list(self.value)
-
-@implements_specification('3.2.4', 'tosca-simple-1.0')
-class List(list):
-    """
-    The list type allows for specifying multiple values for a parameter of property. For example, if
-    an application allows for being configured to listen on multiple ports, a list of ports could be
-    configured using the list data type.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_LIST>`__
-    """
-
-    @staticmethod
-    def _create(context, presentation, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        if not isinstance(value, list):
-            raise ValueError('"list" data type value is not a list: %s' % safe_repr(value))
-
-        entry_schema_type = entry_schema._get_type(context)
-        entry_schema_constraints = entry_schema.constraints
-
-        the_list = List()
-        for v in value:
-            v = coerce_value(context, presentation, entry_schema_type, None,
-                             entry_schema_constraints, v, aspect)
-            if v is not None:
-                the_list.append(v)
-
-        return the_list
-
-    # Can't define as property because it's old-style Python class
-    def as_raw(self):
-        return list(self)
-
-@implements_specification('3.2.5', 'tosca-simple-1.0')
-class Map(StrictDict):
-    """
-    The map type allows for specifying multiple values for a parameter of property as a map. In
-    contrast to the list type, where each entry can only be addressed by its index in the list,
-    entries in a map are named elements that can be addressed by their keys.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_MAP>`__
-    """
-
-    @staticmethod
-    def _create(context, presentation, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        if not isinstance(value, dict):
-            raise ValueError('"map" data type value is not a dict: %s' % safe_repr(value))
-
-        if entry_schema is None:
-            raise ValueError('"map" data type does not define "entry_schema"')
-
-        entry_schema_type = entry_schema._get_type(context)
-        entry_schema_constraints = entry_schema.constraints
-
-        the_map = Map()
-        for k, v in value.iteritems():
-            v = coerce_value(context, presentation, entry_schema_type, None,
-                             entry_schema_constraints, v, aspect)
-            if v is not None:
-                the_map[k] = v
-
-        return the_map
-
-    def __init__(self, items=None):
-        super(Map, self).__init__(items, key_class=str)
-
-    # Can't define as property because it's old-style Python class
-    def as_raw(self):
-        return OrderedDict(self)
-
-@total_ordering
-@implements_specification('3.2.6', 'tosca-simple-1.0')
-class Scalar(object):
-    """
-    The scalar-unit type can be used to define scalar values along with a unit from the list of
-    recognized units.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_SCALAR_UNIT>`__
-    """
-
-    @staticmethod
-    def key(scalar):
-        """
-        Key method for fast sorting.
-        """
-        return scalar.value
-
-    def __init__(self, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-        str_value = str(value)
-        match = re.match(self.REGULAR, str_value) # pylint: disable=no-member
-        if match is None:
-            raise ValueError('scalar must be formatted as <scalar> <unit>: %s' % safe_repr(value))
-
-        self.factor = float(match.group('scalar'))
-        self.unit = match.group('unit')
-
-        unit_lower = self.unit.lower()
-        unit_size = None
-        for k, v in self.UNITS.iteritems(): # pylint: disable=no-member
-            if k.lower() == unit_lower:
-                self.unit = k
-                unit_size = v
-                break
-        if unit_size is None:
-            raise ValueError('scalar specified with unsupported unit: %s' % safe_repr(self.unit))
-
-        self.value = self.TYPE(self.factor * unit_size) # pylint: disable=no-member
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('value', self.value),
-            ('factor', self.factor),
-            ('unit', self.unit),
-            ('unit_size', self.UNITS[self.unit]))) # pylint: disable=no-member
-
-    def __str__(self):
-        return '%s %s' % (self.value, self.UNIT) # pylint: disable=no-member
-
-    def __repr__(self):
-        return repr(self.__str__())
-
-    def __eq__(self, scalar):
-        if isinstance(scalar, Scalar):
-            value = scalar.value
-        else:
-            value = self.TYPE(scalar) # pylint: disable=no-member
-        return self.value == value
-
-    def __lt__(self, scalar):
-        if isinstance(scalar, Scalar):
-            value = scalar.value
-        else:
-            value = self.TYPE(scalar) # pylint: disable=no-member
-        return self.value < value
-
-@implements_specification('3.2.6.4', 'tosca-simple-1.0')
-class ScalarSize(Scalar):
-    """
-    Integer scalar for counting bytes.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_SCALAR_UNIT_SIZE>`__
-    """
-
-    # See: http://www.regular-expressions.info/floatingpoint.html
-    REGULAR = \
-        r'^(?P<scalar>[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*(?P<unit>B|kB|KiB|MB|MiB|GB|GiB|TB|TiB)$'
-
-    UNITS = {
-        'B':               1,
-        'kB':           1000,
-        'KiB':          1024,
-        'MB':        1000000,
-        'MiB':       1048576,
-        'GB':     1000000000,
-        'GiB':    1073741824,
-        'TB':  1000000000000,
-        'TiB': 1099511627776}
-
-    TYPE = int
-    UNIT = 'bytes'
-
-@implements_specification('3.2.6.5', 'tosca-simple-1.0')
-class ScalarTime(Scalar):
-    """
-    Floating point scalar for counting seconds.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_SCALAR_UNIT_TIME>`__
-    """
-
-    # See: http://www.regular-expressions.info/floatingpoint.html
-    REGULAR = r'^(?P<scalar>[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*(?P<unit>ns|us|ms|s|m|h|d)$'
-
-    UNITS = {
-        'ns':     0.000000001,
-        'us':     0.000001,
-        'ms':     0.001,
-        's':      1.0,
-        'm':     60.0,
-        'h':   3600.0,
-        'd':  86400.0}
-
-    TYPE = float
-    UNIT = 'seconds'
-
-@implements_specification('3.2.6.6', 'tosca-simple-1.0')
-class ScalarFrequency(Scalar):
-    """
-    Floating point scalar for counting cycles per second (Hz).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #TYPE_TOSCA_SCALAR_UNIT_FREQUENCY>`__
-    """
-
-    # See: http://www.regular-expressions.info/floatingpoint.html
-    REGULAR = r'^(?P<scalar>[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*(?P<unit>Hz|kHz|MHz|GHz)$'
-
-    UNITS = {
-        'Hz':           1.0,
-        'kHz':       1000.0,
-        'MHz':    1000000.0,
-        'GHz': 1000000000.0}
-
-    TYPE = float
-    UNIT = 'Hz'
-
-#
-# The following are hooked in the YAML as 'coerce_value' extensions
-#
-
-def coerce_timestamp(context, presentation, the_type, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-    return coerce_to_data_type_class(context, presentation, Timestamp, entry_schema, constraints,
-                                     value, aspect)
-
-def coerce_version(context, presentation, the_type, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-    return coerce_to_data_type_class(context, presentation, Version, entry_schema, constraints,
-                                     value, aspect)
-
-def coerce_range(context, presentation, the_type, entry_schema, constraints, value, aspect):
-    if aspect == 'in_range':
-        # When we're in a "in_range" constraint, the values are *not* themselves ranges, but numbers
-        try:
-            return float(value)
-        except ValueError as e:
-            report_issue_for_bad_format(context, presentation, the_type, value, aspect, e)
-        except TypeError as e:
-            report_issue_for_bad_format(context, presentation, the_type, value, aspect, e)
-    else:
-        return coerce_to_data_type_class(context, presentation, Range, entry_schema, constraints,
-                                         value, aspect)
-
-def coerce_list(context, presentation, the_type, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-    return coerce_to_data_type_class(context, presentation, List, entry_schema, constraints,
-                                     value, aspect)
-
-def coerce_map_value(context, presentation, the_type, entry_schema, constraints, value, aspect): # pylint: disable=unused-argument
-    return coerce_to_data_type_class(context, presentation, Map, entry_schema, constraints, value,
-                                     aspect)
-
-def coerce_scalar_unit_size(context, presentation, the_type, entry_schema, constraints, value, # pylint: disable=unused-argument
-                            aspect):
-    return coerce_to_data_type_class(context, presentation, ScalarSize, entry_schema, constraints,
-                                     value, aspect)
-
-def coerce_scalar_unit_time(context, presentation, the_type, entry_schema, constraints, value, # pylint: disable=unused-argument
-                            aspect):
-    return coerce_to_data_type_class(context, presentation, ScalarTime, entry_schema, constraints,
-                                     value, aspect)
-
-def coerce_scalar_unit_frequency(context, presentation, the_type, entry_schema, constraints, value, # pylint: disable=unused-argument
-                                 aspect):
-    return coerce_to_data_type_class(context, presentation, ScalarFrequency, entry_schema,
-                                     constraints, value, aspect)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/definitions.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/definitions.py
deleted file mode 100644
index 1bd0366..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/definitions.py
+++ /dev/null
@@ -1,509 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenDict
-from aria.utils.caching import cachedmethod
-from aria.parser import implements_specification
-from aria.parser.presentation import (has_fields, short_form_field, allow_unknown_fields,
-                                      primitive_field, primitive_list_field, object_field,
-                                      object_list_field, object_dict_field,
-                                      object_dict_unknown_fields, field_validator,
-                                      field_getter, type_validator, list_type_validator)
-
-from .data_types import Range
-from .misc import (Description, ConstraintClause, OperationImplementation, EntrySchema)
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_getters import data_type_class_getter
-from .presentation.field_validators import (data_type_validator, data_value_validator,
-                                            entry_schema_validator)
-from .presentation.types import (convert_shorthand_to_full_type_name,
-                                 get_type_by_full_or_shorthand_name)
-from .modeling.data_types import get_data_type, get_property_constraints
-from .modeling.interfaces import (get_and_override_input_definitions_from_type,
-                                  get_and_override_operation_definitions_from_type)
-
-@has_fields
-@implements_specification('3.5.8', 'tosca-simple-1.0')
-class PropertyDefinition(ExtensiblePresentation):
-    """
-    A property definition defines a named, typed value and related data that can be associated with
-    an entity defined in this specification (e.g., Node Types, Relationship Types, Capability Types,
-    etc.). Properties are used by template authors to provide input values to TOSCA entities which
-    indicate their "desired state" when they are instantiated. The value of a property can be
-    retrieved using the ``get_property`` function within TOSCA Service Templates.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_PROPERTY_DEFN>`__
-    """
-
-    @field_validator(data_type_validator())
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required data type for the property.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the property.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_field(bool, default=True)
-    def required(self):
-        """
-        An optional key that declares a property as required (true) or not (false).
-
-        :type: bool
-        """
-
-    @field_validator(data_value_validator)
-    @primitive_field()
-    def default(self):
-        """
-        An optional key that may provide a value to be used as a default if not provided by another
-        means.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str, default='supported', allowed=('supported', 'unsupported', 'experimental',
-                                                        'deprecated'))
-    @implements_specification(section='3.5.8.3', spec='tosca-simple-1.0')
-    def status(self):
-        """
-        The optional status of the property relative to the specification or implementation.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_list_field(ConstraintClause)
-    def constraints(self):
-        """
-        The optional list of sequenced constraint clauses for the property.
-
-        :type: list of (str, :class:`ConstraintClause`)
-        """
-
-    @field_validator(entry_schema_validator)
-    @object_field(EntrySchema)
-    def entry_schema(self):
-        """
-        The optional key that is used to declare the name of the Datatype definition for entries of
-        set types such as the TOSCA list or map.
-
-        :type: :obj:`basestring`
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_data_type(context, self, 'type')
-
-    @cachedmethod
-    def _get_constraints(self, context):
-        return get_property_constraints(context, self)
-
-@has_fields
-@implements_specification('3.5.10', 'tosca-simple-1.0')
-class AttributeDefinition(ExtensiblePresentation):
-    """
-    An attribute definition defines a named, typed value that can be associated with an entity
-    defined in this specification (e.g., a Node, Relationship or Capability Type). Specifically, it
-    is used to expose the "actual state" of some property of a TOSCA entity after it has been
-    deployed and instantiated (as set by the TOSCA orchestrator). Attribute values can be retrieved
-    via the ``get_attribute`` function from the instance model and used as values to other
-    entities within TOSCA Service Templates.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_ATTRIBUTE_DEFN>`__
-    """
-
-    @field_validator(data_type_validator())
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required data type for the attribute.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the attribute.
-
-        :type: :class:`Description`
-        """
-
-    @field_validator(data_value_validator)
-    @primitive_field()
-    def default(self):
-        """
-        An optional key that may provide a value to be used as a default if not provided by another
-        means.
-
-        This value SHALL be type compatible with the type declared by the property definition's type
-        keyname.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str, default='supported', allowed=('supported', 'unsupported', 'experimental',
-                                                        'deprecated'))
-    def status(self):
-        """
-        The optional status of the attribute relative to the specification or implementation.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_validator(entry_schema_validator)
-    @object_field(EntrySchema)
-    def entry_schema(self):
-        """
-        The optional key that is used to declare the name of the Datatype definition for entries of
-        set types such as the TOSCA list or map.
-
-        :type: :obj:`basestring`
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_data_type(context, self, 'type')
-
-@has_fields
-@implements_specification('3.5.12', 'tosca-simple-1.0')
-class ParameterDefinition(PropertyDefinition):
-    """
-    A parameter definition is essentially a TOSCA property definition; however, it also allows a
-    value to be assigned to it (as for a TOSCA property assignment). In addition, in the case of
-    output parameters, it can optionally inherit the data type of the value assigned to it rather
-    than have an explicit data type defined for it.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_PARAMETER_DEF>`__
-    """
-
-    @field_validator(data_type_validator())
-    @primitive_field(str)
-    def type(self):
-        """
-        The required data type for the parameter.
-
-        Note: This keyname is required for a TOSCA Property definition, but is not for a TOSCA
-        Parameter definition.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_validator(data_value_validator)
-    @primitive_field()
-    def value(self):
-        """
-        The type-compatible value to assign to the named parameter. Parameter values may be provided
-        as the result from the evaluation of an expression or a function.
-        """
-
-@short_form_field('implementation')
-@has_fields
-@implements_specification('3.5.13-1', 'tosca-simple-1.0')
-class OperationDefinition(ExtensiblePresentation):
-    """
-    An operation definition defines a named function or procedure that can be bound to an
-    implementation artifact (e.g., a script).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_OPERATION_DEF>`__
-    """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description string for the associated named operation.
-
-        :type: :class:`Description`
-        """
-
-    @object_field(OperationImplementation)
-    def implementation(self):
-        """
-        The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR
-        file).
-
-        :type: :class:`OperationImplementation`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def inputs(self):
-        """
-        The optional list of input property definitions available to all defined operations for
-        interface definitions that are within TOSCA Node or Relationship Type definitions. This
-        includes when interface definitions are included as part of a Requirement definition in a
-        Node Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-@allow_unknown_fields
-@has_fields
-@implements_specification('3.5.14-1', 'tosca-simple-1.0')
-class InterfaceDefinition(ExtensiblePresentation):
-    """
-    An interface definition defines a named interface that can be associated with a Node or
-    Relationship Type.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_INTERFACE_DEF>`__
-    """
-
-    @field_validator(type_validator('interface type', convert_shorthand_to_full_type_name,
-                                    'interface_types'))
-    @primitive_field(str)
-    def type(self):
-        """
-        ARIA NOTE: This field is not mentioned in the spec, but is implied.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def inputs(self):
-        """
-        The optional list of input property definitions available to all defined operations for
-        interface definitions that are within TOSCA Node or Relationship Type definitions. This
-        includes when interface definitions are included as part of a Requirement definition in a
-        Node Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_unknown_fields(OperationDefinition)
-    def operations(self):
-        """
-        :type: {:obj:`basestring`: :class:`OperationDefinition`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'interface_types')
-
-    @cachedmethod
-    def _get_inputs(self, context):
-        return FrozenDict(get_and_override_input_definitions_from_type(context, self))
-
-    @cachedmethod
-    def _get_operations(self, context):
-        return FrozenDict(get_and_override_operation_definitions_from_type(context, self))
-
-    def _validate(self, context):
-        super(InterfaceDefinition, self)._validate(context)
-        if self.operations:
-            for operation in self.operations.itervalues(): # pylint: disable=no-member
-                operation._validate(context)
-
-@short_form_field('type')
-@has_fields
-class RelationshipDefinition(ExtensiblePresentation):
-    """
-    Relationship definition.
-    """
-
-    @field_validator(type_validator('relationship type', convert_shorthand_to_full_type_name,
-                                    'relationship_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The optional reserved keyname used to provide the name of the Relationship Type for the
-        requirement definition's relationship keyname.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_dict_field(InterfaceDefinition)
-    def interfaces(self):
-        """
-        The optional reserved keyname used to reference declared (named) interface definitions of
-        the corresponding Relationship Type in order to declare additional Property definitions for
-        these interfaces or operations of these interfaces.
-
-        :type: list of :class:`InterfaceDefinition`
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'relationship_types')
-
-@short_form_field('capability')
-@has_fields
-@implements_specification('3.6.2', 'tosca-simple-1.0')
-class RequirementDefinition(ExtensiblePresentation):
-    """
-    The Requirement definition describes a named requirement (dependencies) of a TOSCA Node Type or
-    Node template which needs to be fulfilled by a matching Capability definition declared by
-    another TOSCA modelable entity. The requirement definition may itself include the specific name
-    of the fulfilling entity (explicitly) or provide an abstract type, along with additional
-    filtering characteristics, that a TOSCA orchestrator can use to fulfill the capability at
-    runtime (implicitly).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_REQUIREMENT_DEF>`__
-    """
-
-    @field_validator(type_validator('capability type', convert_shorthand_to_full_type_name,
-                                    'capability_types'))
-    @primitive_field(str, required=True)
-    def capability(self):
-        """
-        The required reserved keyname used that can be used to provide the name of a valid
-        Capability Type that can fulfill the requirement.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_validator(type_validator('node type', convert_shorthand_to_full_type_name, 'node_types'))
-    @primitive_field(str)
-    def node(self):
-        """
-        The optional reserved keyname used to provide the name of a valid Node Type that contains
-        the capability definition that can be used to fulfill the requirement.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(RelationshipDefinition)
-    def relationship(self):
-        """
-        The optional reserved keyname used to provide the name of a valid Relationship Type to
-        construct when fulfilling the requirement.
-
-        :type: :class:`RelationshipDefinition`
-        """
-
-    @field_getter(data_type_class_getter(Range))
-    @primitive_field()
-    def occurrences(self):
-        """
-        The optional minimum and maximum occurrences for the requirement.
-
-        Note: the keyword UNBOUNDED is also supported to represent any positive integer.
-
-        :type: :class:`Range`
-        """
-
-    @cachedmethod
-    def _get_capability_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.capability, 'capability_types')
-
-    @cachedmethod
-    def _get_node_type(self, context):
-        return context.presentation.get_from_dict('service_template', 'node_types', self.node)
-
-@short_form_field('type')
-@has_fields
-@implements_specification('3.6.1', 'tosca-simple-1.0')
-class CapabilityDefinition(ExtensiblePresentation):
-    """
-    A capability definition defines a named, typed set of data that can be associated with Node Type
-    or Node Template to describe a transparent capability or feature of the software component the
-    node describes.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_CAPABILITY_DEFN>`__
-    """
-
-    @field_validator(type_validator('capability type', convert_shorthand_to_full_type_name,
-                                    'capability_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required name of the Capability Type the capability definition is based upon.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description of the Capability definition.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Capability definition.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_field(AttributeDefinition)
-    def attributes(self):
-        """
-        An optional list of attribute definitions for the Capability definition.
-
-        :type: {:obj:`basestring`: :class:`AttributeDefinition`}
-        """
-
-    @field_validator(list_type_validator('node type', convert_shorthand_to_full_type_name,
-                                         'node_types'))
-    @primitive_list_field(str)
-    def valid_source_types(self):
-        """
-        An optional list of one or more valid names of Node Types that are supported as valid
-        sources of any relationship established to the declared Capability Type.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @field_getter(data_type_class_getter(Range))
-    @primitive_field()
-    def occurrences(self):
-        """
-        The optional minimum and maximum occurrences for the capability. By default, an exported
-        Capability should allow at least one relationship to be formed with it with a maximum of
-        ``UNBOUNDED`` relationships.
-
-        Note: the keyword ``UNBOUNDED`` is also supported to represent any positive integer.
-
-        ARIA NOTE: The spec seems wrong here: the implied default should be ``[0,UNBOUNDED]``, not
-        ``[1,UNBOUNDED]``, otherwise it would imply that at 1 least one relationship *must* be
-        formed.
-
-        :type: :class:`Range`
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'capability_types')
-
-    @cachedmethod
-    def _get_parent(self, context):
-        container_parent = self._container._get_parent(context)
-        container_parent_capabilities = container_parent._get_capabilities(context) \
-            if container_parent is not None else None
-        return container_parent_capabilities.get(self._name) \
-            if container_parent_capabilities is not None else None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/filters.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/filters.py
deleted file mode 100644
index 6db140d..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/filters.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.caching import cachedmethod
-from aria.parser import implements_specification
-from aria.parser.presentation import (has_fields, object_sequenced_list_field, field_validator)
-
-from .misc import ConstraintClause
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_validators import (node_filter_properties_validator,
-                                            node_filter_capabilities_validator)
-
-@has_fields
-class CapabilityFilter(ExtensiblePresentation):
-    """
-    Capability filter.
-    """
-
-    @object_sequenced_list_field(ConstraintClause)
-    def properties(self):
-        pass
-
-    @cachedmethod
-    def _get_node_type(self, context):
-        return self._container._get_node_type(context)
-
-    @cachedmethod
-    def _get_type_for_name(self, context, name):
-        node_type = self._get_node_type(context)
-        if node_type is not None:
-            capabilities = node_type._get_capabilities(context)
-            capability = capabilities.get(self._name)
-            properties = capability.properties if capability is not None else None
-            prop = properties.get(name) if properties is not None else None
-            return prop._get_type(context) if prop is not None else None
-
-        return None
-
-@has_fields
-@implements_specification('3.5.4', 'tosca-simple-1.0')
-class NodeFilter(ExtensiblePresentation):
-    """
-    A node filter definition defines criteria for selection of a TOSCA Node Template based upon the
-    template's property values, capabilities and capability properties.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_NODE_FILTER_DEFN>`__
-    """
-
-    @field_validator(node_filter_properties_validator)
-    @object_sequenced_list_field(ConstraintClause)
-    @implements_specification('3.5.3', 'tosca-simple-1.0')
-    def properties(self):
-        """
-        An optional sequenced list of property filters that would be used to select (filter)
-        matching TOSCA entities (e.g., Node Template, Node Type, Capability Types, etc.) based upon
-        their property definitions' values.
-
-        See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-        /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-        #DEFN_ELEMENT_PROPERTY_FILTER_DEFN>`__
-
-        :type: list of (str, :class:`ConstraintClause`)
-        """
-
-    @field_validator(node_filter_capabilities_validator)
-    @object_sequenced_list_field(CapabilityFilter)
-    def capabilities(self):
-        """
-        An optional sequenced list of property filters that would be used to select (filter)
-        matching TOSCA entities (e.g., Node Template, Node Type, Capability Types, etc.) based upon
-        their capabilities' property definitions' values.
-
-        :type: list of (str, :class:`CapabilityDefinition`)
-        """
-
-    @cachedmethod
-    def _get_node_type(self, context):
-        if hasattr(self._container, '_get_node'):
-            node_type, node_type_variant = self._container._get_node(context)
-            return node_type if node_type_variant == 'node_type' else None
-        return None
-
-    @cachedmethod
-    def _get_type_for_name(self, context, name):
-        node_type = self._get_node_type(context)
-        if node_type is not None:
-            properties = node_type._get_properties(context)
-            prop = properties.get(name)
-            return prop._get_type(context) if prop is not None else None
-
-        return None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/misc.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/misc.py
deleted file mode 100644
index f4d43ac..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/misc.py
+++ /dev/null
@@ -1,434 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.caching import cachedmethod
-from aria.utils.console import puts
-from aria.utils.formatting import as_raw
-from aria.parser import implements_specification
-from aria.parser.presentation import (AsIsPresentation, has_fields, allow_unknown_fields,
-                                      short_form_field, primitive_field, primitive_list_field,
-                                      primitive_dict_unknown_fields, object_field,
-                                      object_list_field, object_dict_field, field_validator,
-                                      type_validator)
-
-from .modeling.data_types import (get_data_type, get_data_type_value, get_property_constraints,
-                                  apply_constraint_to_value)
-from .modeling.substitution_mappings import (validate_subtitution_mappings_requirement,
-                                             validate_subtitution_mappings_capability)
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_validators import (constraint_clause_field_validator,
-                                            constraint_clause_in_range_validator,
-                                            constraint_clause_valid_values_validator,
-                                            constraint_clause_pattern_validator,
-                                            data_type_validator)
-from .presentation.types import (convert_shorthand_to_full_type_name,
-                                 get_type_by_full_or_shorthand_name)
-
-@implements_specification('3.5.1', 'tosca-simple-1.0')
-class Description(AsIsPresentation):
-    """
-    Human-readable description.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_DESCRIPTION>`__
-    """
-
-    def __init__(self, name=None, raw=None, container=None, cls=None): # pylint: disable=unused-argument
-        super(Description, self).__init__(name, raw, container, cls=unicode)
-
-    def _dump(self, context):
-        value = as_raw(self.value)
-        puts(context.style.meta(value))
-
-@allow_unknown_fields
-@has_fields
-@implements_specification('3.9.3.2', 'tosca-simple-1.0')
-class MetaData(ExtensiblePresentation):
-    """
-    Meta data.
-    """
-
-    @primitive_field(str)
-    @implements_specification('3.9.3.3', 'tosca-simple-1.0')
-    def template_name(self):
-        """
-        This optional metadata keyname can be used to declare the name of service template as a
-        single-line string value.
-        """
-
-    @primitive_field(str)
-    @implements_specification('3.9.3.4', 'tosca-simple-1.0')
-    def template_author(self):
-        """
-        This optional metadata keyname can be used to declare the author(s) of the service template
-        as a single-line string value.
-        """
-
-    @primitive_field(str)
-    @implements_specification('3.9.3.5', 'tosca-simple-1.0')
-    def template_version(self):
-        """
-        This optional metadata keyname can be used to declare a domain specific version of the
-        service template as a single-line string value.
-        """
-
-    @primitive_dict_unknown_fields()
-    def custom(self):
-        """
-        :type: dict
-        """
-
-@short_form_field('url')
-@has_fields
-@implements_specification('3.5.5', 'tosca-simple-1.0')
-class Repository(ExtensiblePresentation):
-    """
-    A repository definition defines a named external repository which contains deployment and
-    implementation artifacts that are referenced within the TOSCA Service Template.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_REPOSITORY_DEF>`__
-    """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the repository.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_field(str, required=True)
-    def url(self):
-        """
-        The required URL or network address used to access the repository.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field()
-    def credential(self):
-        """
-        The optional Credential used to authorize access to the repository.
-
-        :type: tosca.datatypes.Credential
-        """
-
-    @cachedmethod
-    def _get_credential(self, context):
-        return get_data_type_value(context, self, 'credential', 'tosca.datatypes.Credential')
-
-@short_form_field('file')
-@has_fields
-@implements_specification('3.5.7', 'tosca-simple-1.0')
-class Import(ExtensiblePresentation):
-    """
-    An import definition is used within a TOSCA Service Template to locate and uniquely name another
-    TOSCA Service Template file which has type and template definitions to be imported (included)
-    and referenced within another Service Template.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_IMPORT_DEF>`__
-    """
-
-    @primitive_field(str, required=True)
-    def file(self):
-        """
-        The required symbolic name for the imported file.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str)
-    def repository(self):
-        """
-        The optional symbolic name of the repository definition where the imported file can be found
-        as a string.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str)
-    def namespace_uri(self):
-        """
-        The optional namespace URI to that will be applied to type definitions found within the
-        imported file as a string.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_field(str)
-    def namespace_prefix(self):
-        """
-        The optional namespace prefix (alias) that will be used to indicate the namespace_uri when
-        forming a qualified name (i.e., qname) when referencing type definitions from the imported
-        file.
-
-        :type: :obj:`basestring`
-        """
-
-@has_fields
-@implements_specification('3.5.2-1', 'tosca-simple-1.0')
-class ConstraintClause(ExtensiblePresentation):
-    """
-    A constraint clause defines an operation along with one or more compatible values that can be
-    used to define a constraint on a property or parameter's allowed values when it is defined in a
-    TOSCA Service Template or one of its entities.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_CONSTRAINTS_CLAUSE>`__
-    """
-
-    @field_validator(constraint_clause_field_validator)
-    @primitive_field()
-    def equal(self):
-        """
-        Constrains a property or parameter to a value equal to ('=') the value declared.
-        """
-
-    @field_validator(constraint_clause_field_validator)
-    @primitive_field()
-    def greater_than(self):
-        """
-        Constrains a property or parameter to a value greater than ('>') the value declared.
-        """
-
-    @field_validator(constraint_clause_field_validator)
-    @primitive_field()
-    def greater_or_equal(self):
-        """
-        Constrains a property or parameter to a value greater than or equal to ('>=') the value
-        declared.
-        """
-
-    @field_validator(constraint_clause_field_validator)
-    @primitive_field()
-    def less_than(self):
-        """
-        Constrains a property or parameter to a value less than ('<') the value declared.
-        """
-
-    @field_validator(constraint_clause_field_validator)
-    @primitive_field()
-    def less_or_equal(self):
-        """
-        Constrains a property or parameter to a value less than or equal to ('<=') the value
-        declared.
-        """
-
-    @field_validator(constraint_clause_in_range_validator)
-    @primitive_list_field()
-    def in_range(self):
-        """
-        Constrains a property or parameter to a value in range of (inclusive) the two values
-        declared.
-
-        Note: subclasses or templates of types that declare a property with the ``in_range``
-        constraint MAY only further restrict the range specified by the parent type.
-        """
-
-    @field_validator(constraint_clause_valid_values_validator)
-    @primitive_list_field()
-    def valid_values(self):
-        """
-        Constrains a property or parameter to a value that is in the list of declared values.
-        """
-
-    @primitive_field(int)
-    def length(self):
-        """
-        Constrains the property or parameter to a value of a given length.
-        """
-
-    @primitive_field(int)
-    def min_length(self):
-        """
-        Constrains the property or parameter to a value to a minimum length.
-        """
-
-    @primitive_field(int)
-    def max_length(self):
-        """
-        Constrains the property or parameter to a value to a maximum length.
-        """
-
-    @field_validator(constraint_clause_pattern_validator)
-    @primitive_field(str)
-    def pattern(self):
-        """
-        Constrains the property or parameter to a value that is allowed by the provided regular
-        expression.
-
-        Note: Future drafts of this specification will detail the use of regular expressions and
-        reference an appropriate standardized grammar.
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        if hasattr(self._container, '_get_type_for_name'):
-            # NodeFilter or CapabilityFilter
-            return self._container._get_type_for_name(context, self._name)
-        elif hasattr(self._container, '_get_type'):
-            # Properties
-            return self._container._get_type(context)
-        else:
-            # DataType (the DataType itself is our type)
-            return self._container
-
-    def _apply_to_value(self, context, presentation, value):
-        return apply_constraint_to_value(context, presentation, self, value)
-
-@short_form_field('type')
-@has_fields
-class EntrySchema(ExtensiblePresentation):
-    """
-    ARIA NOTE: The specification does not properly explain this type, however it is implied by
-    examples.
-    """
-
-    @field_validator(data_type_validator('entry schema data type'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        :type: :class:`Description`
-        """
-
-    @object_list_field(ConstraintClause)
-    def constraints(self):
-        """
-        :type: list of (str, :class:`ConstraintClause`)
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_data_type(context, self, 'type')
-
-    @cachedmethod
-    def _get_constraints(self, context):
-        return get_property_constraints(context, self)
-
-@short_form_field('primary')
-@has_fields
-class OperationImplementation(ExtensiblePresentation):
-    """
-    Operation implementation.
-    """
-
-    @primitive_field(str)
-    def primary(self):
-        """
-        The optional implementation artifact name (i.e., the primary script file name within a
-        TOSCA CSAR file).
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_list_field(str)
-    def dependencies(self):
-        """
-        The optional ordered list of one or more dependent or secondary implementation artifact name
-        which are referenced by the primary implementation artifact (e.g., a library the script
-        installs or a secondary script).
-
-        :type: [:obj:`basestring`]
-        """
-
-class SubstitutionMappingsRequirement(AsIsPresentation):
-    """
-    Substitution mapping for requirement.
-    """
-
-    @property
-    @cachedmethod
-    def node_template(self):
-        return str(self._raw[0])
-
-    @property
-    @cachedmethod
-    def requirement(self):
-        return str(self._raw[1])
-
-    def _validate(self, context):
-        super(SubstitutionMappingsRequirement, self)._validate(context)
-        validate_subtitution_mappings_requirement(context, self)
-
-class SubstitutionMappingsCapability(AsIsPresentation):
-    """
-    Substitution mapping for capability.
-    """
-
-    @property
-    @cachedmethod
-    def node_template(self):
-        return str(self._raw[0])
-
-    @property
-    @cachedmethod
-    def capability(self):
-        return str(self._raw[1])
-
-    def _validate(self, context):
-        super(SubstitutionMappingsCapability, self)._validate(context)
-        validate_subtitution_mappings_capability(context, self)
-
-@has_fields
-@implements_specification('2.10', 'tosca-simple-1.0')
-class SubstitutionMappings(ExtensiblePresentation):
-    """
-    Substitution mappings.
-    """
-
-    @field_validator(type_validator('node type', convert_shorthand_to_full_type_name, 'node_types'))
-    @primitive_field(str, required=True)
-    def node_type(self):
-        """
-        :type: :obj:`basestring`
-        """
-
-    @object_dict_field(SubstitutionMappingsRequirement)
-    def requirements(self):
-        """
-        :type: {:obj:`basestring`: :class:`SubstitutionMappingsRequirement`}
-        """
-
-    @object_dict_field(SubstitutionMappingsCapability)
-    def capabilities(self):
-        """
-        :type: {:obj:`basestring`: :class:`SubstitutionMappingsCapability`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.node_type, 'node_types')
-
-    def _validate(self, context):
-        super(SubstitutionMappings, self)._validate(context)
-        self._get_type(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'node_type',
-            'requirements',
-            'capabilities'))
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
deleted file mode 100644
index dd9eeb4..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import OrderedDict
-
-
-#
-# NodeType, NodeTemplate
-#
-
-def get_inherited_artifact_definitions(context, presentation, for_presentation=None):
-
-    if hasattr(presentation, '_get_type'):
-        # In NodeTemplate
-        parent = presentation._get_type(context)
-    else:
-        # In NodeType
-        parent = presentation._get_parent(context)
-
-    # Get artifact definitions from parent
-    artifacts = get_inherited_artifact_definitions(context, parent, for_presentation=presentation) \
-        if parent is not None else OrderedDict()
-
-    # Add/override our artifact definitions
-    our_artifacts = presentation.artifacts
-    if our_artifacts:
-        for artifact_name, artifact in our_artifacts.iteritems():
-            artifacts[artifact_name] = artifact._clone(for_presentation)
-
-    return artifacts
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
deleted file mode 100644
index d75e723..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import deepcopy_with_locators, OrderedDict
-from aria.parser.validation import Issue
-
-from .parameters import (convert_parameter_definitions_to_values, merge_raw_parameter_definitions,
-                         get_assigned_and_defined_parameter_values)
-
-
-#
-# CapabilityType
-#
-
-def get_inherited_valid_source_types(context, presentation):
-    """
-    If we haven't set the ``valid_source_types`` fields, uses that value from our parent, if we have
-    one (recursively).
-    """
-
-    valid_source_types = presentation.valid_source_types
-
-    if valid_source_types is None:
-        parent = presentation._get_parent(context)
-        valid_source_types = get_inherited_valid_source_types(context, parent) \
-            if parent is not None else None
-
-    return valid_source_types
-
-
-#
-# NodeType
-#
-
-def get_inherited_capability_definitions(context, presentation, for_presentation=None):
-    """
-    Returns our capability capability definitions added on top of those of our parent, if we have
-    one (recursively).
-
-    Allows overriding all aspects of parent capability properties except data type.
-    """
-
-    # Get capability definitions from parent
-    parent = presentation._get_parent(context)
-    capability_definitions = get_inherited_capability_definitions(context, parent,
-                                                                  for_presentation=presentation) \
-                                                                  if parent is not None \
-                                                                  else OrderedDict()
-
-    # Add/merge our capability definitions
-    our_capability_definitions = presentation.capabilities
-    if our_capability_definitions:
-        for capability_name, our_capability_definition in our_capability_definitions.iteritems():
-            if capability_name in capability_definitions:
-                capability_definition = capability_definitions[capability_name]
-
-                # Check if we changed the type
-                type1 = capability_definition.type
-                type2 = our_capability_definition.type
-                if type1 != type2:
-                    context.validation.report(
-                        'capability definition changes type from "%s" to "%s" in "%s"'
-                        % (type1, type2, presentation._fullname),
-                        locator=our_capability_definition._locator, level=Issue.BETWEEN_TYPES)
-
-                # Already cloned?
-                #capability_definition = capability_definition._clone(for_presentation)
-                #capability_definitions[capability_name] = capability_definition
-            else:
-                capability_definition = our_capability_definition._clone(for_presentation)
-                if isinstance(capability_definition._raw, basestring):
-                    # Make sure we have a dict
-                    the_type = capability_definition._raw
-                    capability_definition._raw = OrderedDict()
-                    capability_definition._raw['type'] = the_type
-                capability_definitions[capability_name] = capability_definition
-
-            merge_capability_definition_from_type(context, presentation, capability_definition)
-
-    for capability_definition in capability_definitions.itervalues():
-        capability_definition._reset_method_cache()
-
-    return capability_definitions
-
-
-#
-# NodeTemplate
-#
-
-def get_template_capabilities(context, presentation):
-    """
-    Returns the node type's capabilities with our assignments to properties and attributes merged
-    in.
-
-    Capability properties' default values, if available, will be used if we did not assign them.
-
-    Makes sure that required properties indeed end up with a value.
-    """
-
-    capability_assignments = OrderedDict()
-
-    the_type = presentation._get_type(context) # NodeType
-    capability_definitions = the_type._get_capabilities(context) if the_type is not None else None
-
-    # Copy over capability definitions from the type (will initialize properties with default
-    # values)
-    if capability_definitions:
-        for capability_name, capability_definition in capability_definitions.iteritems():
-            capability_assignments[capability_name] = \
-                convert_capability_from_definition_to_assignment(context, capability_definition,
-                                                                 presentation)
-
-    # Fill in our capability assignments
-    our_capability_assignments = presentation.capabilities
-    if our_capability_assignments:
-        for capability_name, our_capability_assignment in our_capability_assignments.iteritems():
-            if capability_name in capability_assignments:
-                capability_assignment = capability_assignments[capability_name]
-
-                # Assign properties
-                values = get_assigned_and_defined_parameter_values(context,
-                                                                   our_capability_assignment,
-                                                                   'property')
-                if values:
-                    capability_assignment._raw['properties'] = values
-            else:
-                context.validation.report(
-                    'capability "%s" not declared at node type "%s" in "%s"'
-                    % (capability_name, presentation.type, presentation._fullname),
-                    locator=our_capability_assignment._locator, level=Issue.BETWEEN_TYPES)
-
-    return capability_assignments
-
-
-#
-# Utils
-#
-
-def convert_capability_from_definition_to_assignment(context, presentation, container):
-    from ..assignments import CapabilityAssignment
-
-    raw = OrderedDict()
-
-    properties = presentation.properties
-    if properties is not None:
-        raw['properties'] = convert_parameter_definitions_to_values(context, properties)
-
-    # TODO attributes
-
-    return CapabilityAssignment(name=presentation._name, raw=raw, container=container)
-
-
-def merge_capability_definition_from_type(context, presentation, capability_definition):
-    raw_properties = OrderedDict()
-
-    # Merge properties from type
-    the_type = capability_definition._get_type(context)
-    type_property_defintions = the_type._get_properties(context)
-    merge_raw_parameter_definitions(context, presentation, raw_properties, type_property_defintions,
-                                    'properties')
-
-    # Merge our properties
-    merge_raw_parameter_definitions(context, presentation, raw_properties,
-                                    capability_definition.properties, 'properties')
-
-    if raw_properties:
-        capability_definition._raw['properties'] = raw_properties
-
-    # Override valid_source_types
-    if capability_definition._raw.get('valid_source_types') is None:
-        valid_source_types = the_type._get_valid_source_types(context)
-        if valid_source_types is not None:
-            capability_definition._raw['valid_source_types'] = \
-                deepcopy_with_locators(valid_source_types)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
deleted file mode 100644
index 9a30cc1..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from aria.modeling.constraints import NodeTemplateConstraint
-from aria.modeling.utils import NodeTemplateContainerHolder
-from aria.modeling.functions import evaluate
-from aria.parser import implements_specification
-
-
-@implements_specification('3.5.2-2', 'tosca-simple-1.0')
-class EvaluatingNodeTemplateConstraint(NodeTemplateConstraint):
-    """
-    A version of :class:`NodeTemplateConstraint` with boilerplate initialization for TOSCA
-    constraints.
-    """
-
-    def __init__(self, property_name, capability_name, constraint, as_list=False):
-        self.property_name = property_name
-        self.capability_name = capability_name
-        self.constraint = constraint
-        self.as_list = as_list
-
-    def matches(self, source_node_template, target_node_template):
-        # TOSCA node template constraints can refer to either capability properties or node
-        # template properties
-        if self.capability_name is not None:
-            # Capability property
-            capability = target_node_template.capability_templates.get(self.capability_name)
-            value = capability.properties.get(self.property_name) \
-                if capability is not None else None # Parameter
-        else:
-            # Node template property
-            value = target_node_template.properties.get(self.property_name) # Parameter
-
-        value = value.value if value is not None else None
-
-        container_holder = NodeTemplateContainerHolder(source_node_template)
-
-        if self.as_list:
-            constraints = []
-            for constraint in self.constraint:
-                evaluation = evaluate(constraint, container_holder)
-                if evaluation is not None:
-                    constraints.append(evaluation.value)
-                else:
-                    constraints.append(constraint)
-            constraint = constraints
-        else:
-            evaluation = evaluate(self.constraint, container_holder)
-            if evaluation is not None:
-                constraint = evaluation.value
-            else:
-                constraint = self.constraint
-
-        return self.matches_evaluated(value, constraint)
-
-    def matches_evaluated(self, value, constraint):
-        raise NotImplementedError
-
-
-class Equal(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return value == constraint
-
-
-class GreaterThan(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return value > constraint
-
-
-class GreaterOrEqual(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return value >= constraint
-
-
-class LessThan(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return value < constraint
-
-
-class LessOrEqual(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return value <= constraint
-
-
-class InRange(EvaluatingNodeTemplateConstraint):
-    def __init__(self, property_name, capability_name, constraint):
-        super(InRange, self).__init__(property_name, capability_name, constraint, as_list=True)
-
-    def matches_evaluated(self, value, constraints):
-        lower, upper = constraints
-        if value < lower:
-            return False
-        if (upper != 'UNBOUNDED') and (value > upper):
-            return False
-        return True
-
-
-class ValidValues(EvaluatingNodeTemplateConstraint):
-    def __init__(self, property_name, capability_name, constraint):
-        super(ValidValues, self).__init__(property_name, capability_name, constraint, as_list=True)
-
-    def matches_evaluated(self, value, constraints):
-        return value in constraints
-
-
-class Length(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return len(value) == constraint
-
-
-class MinLength(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return len(value) >= constraint
-
-
-class MaxLength(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        return len(value) <= constraint
-
-
-class Pattern(EvaluatingNodeTemplateConstraint):
-    def matches_evaluated(self, value, constraint):
-        # From TOSCA 1.0 3.5.2.1:
-        #
-        # "Note: Future drafts of this specification will detail the use of regular expressions and
-        # reference an appropriate standardized grammar."
-        #
-        # So we will just use Python's.
-        return re.match(constraint, unicode(value)) is not None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/copy.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/copy.py
deleted file mode 100644
index bd9037f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/copy.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# NodeTemplate, RelationshipTemplate
-#
-
-def get_default_raw_from_copy(presentation, field_name):
-    """
-    Used for the ``_get_default_raw`` field hook.
-    """
-
-    copy = presentation._raw.get('copy')
-    if copy is not None:
-        templates = getattr(presentation._container, field_name)
-        if templates is not None:
-            template = templates.get(copy)
-            if template is not None:
-                return template._raw
-    return None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
deleted file mode 100644
index ba94c70..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
+++ /dev/null
@@ -1,521 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from aria.utils.collections import OrderedDict
-from aria.utils.formatting import safe_repr
-from aria.utils.type import full_type_name
-from aria.utils.imports import import_fullname
-from aria.parser import implements_specification
-from aria.parser.presentation import (get_locator, validate_primitive)
-from aria.parser.validation import Issue
-
-from .functions import get_function
-from ..presentation.types import get_type_by_full_or_shorthand_name
-
-
-#
-# DataType
-#
-
-def get_inherited_constraints(context, presentation):
-    """
-    If we don't have constraints, will return our parent's constraints (if we have one),
-    recursively.
-
-    Implication: if we define even one constraint, the parent's constraints will not be inherited.
-    """
-
-    constraints = presentation.constraints
-
-    if constraints is None:
-        # If we don't have any, use our parent's
-        parent = presentation._get_parent(context)
-        parent_constraints = get_inherited_constraints(context, parent) \
-            if parent is not None else None
-        if parent_constraints is not None:
-            constraints = parent_constraints
-
-    return constraints
-
-
-def coerce_data_type_value(context, presentation, data_type, entry_schema, constraints, value, # pylint: disable=unused-argument
-                           aspect):
-    """
-    Handles the ``_coerce_data()`` hook for complex data types.
-
-    There are two kinds of handling:
-
-    1. If we have a primitive type as our great ancestor, then we do primitive type coersion, and
-       just check for constraints.
-
-    2. Otherwise, for normal complex data types we return the assigned property values while making
-       sure they are defined in our type. The property definition's default value, if available,
-       will be used if we did not assign it. We also make sure that required definitions indeed end
-       up with a value.
-    """
-
-    primitive_type = data_type._get_primitive_ancestor(context)
-    if primitive_type is not None:
-        # Must be coercible to primitive ancestor
-        value = coerce_to_primitive(context, presentation, primitive_type, constraints, value,
-                                    aspect)
-    else:
-        definitions = data_type._get_properties(context)
-        if isinstance(value, dict):
-            temp = OrderedDict()
-
-            # Fill in our values, but make sure they are defined
-            for name, v in value.iteritems():
-                if name in definitions:
-                    definition = definitions[name]
-                    definition_type = definition._get_type(context)
-                    definition_entry_schema = definition.entry_schema
-                    definition_constraints = definition._get_constraints(context)
-                    temp[name] = coerce_value(context, presentation, definition_type,
-                                              definition_entry_schema, definition_constraints, v,
-                                              aspect)
-                else:
-                    context.validation.report(
-                        'assignment to undefined property "%s" in type "%s" in "%s"'
-                        % (name, data_type._fullname, presentation._fullname),
-                        locator=get_locator(v, value, presentation), level=Issue.BETWEEN_TYPES)
-
-            # Fill in defaults from the definitions, and check if required definitions have not been
-            # assigned
-            for name, definition in definitions.iteritems():
-                if (temp.get(name) is None) and hasattr(definition, 'default') \
-                    and (definition.default is not None):
-                    definition_type = definition._get_type(context)
-                    definition_entry_schema = definition.entry_schema
-                    definition_constraints = definition._get_constraints(context)
-                    temp[name] = coerce_value(context, presentation, definition_type,
-                                              definition_entry_schema, definition_constraints,
-                                              definition.default, 'default')
-
-                if getattr(definition, 'required', False) and (temp.get(name) is None):
-                    context.validation.report(
-                        'required property "%s" in type "%s" is not assigned a value in "%s"'
-                        % (name, data_type._fullname, presentation._fullname),
-                        locator=presentation._get_child_locator('definitions'),
-                        level=Issue.BETWEEN_TYPES)
-
-            value = temp
-        elif value is not None:
-            context.validation.report('value of type "%s" is not a dict in "%s"'
-                                      % (data_type._fullname, presentation._fullname),
-                                      locator=get_locator(value, presentation),
-                                      level=Issue.BETWEEN_TYPES)
-            value = None
-
-    return value
-
-
-def validate_data_type_name(context, presentation):
-    """
-    Makes sure the complex data type's name is not that of a built-in type.
-    """
-
-    name = presentation._name
-    if get_primitive_data_type(name) is not None:
-        context.validation.report('data type name is that of a built-in type: %s'
-                                  % safe_repr(name),
-                                  locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-
-#
-# PropertyDefinition, AttributeDefinition, EntrySchema, DataType
-#
-
-def get_data_type(context, presentation, field_name, allow_none=False):
-    """
-    Returns the type, whether it's a complex data type (a DataType instance) or a primitive (a
-    Python primitive type class).
-
-    If the type is not specified, defaults to :class:`str`, per note in section 3.2.1.1 of the
-    `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #_Toc379455072>`__
-    """
-
-    type_name = getattr(presentation, field_name)
-
-    if type_name is None:
-        if allow_none:
-            return None
-        else:
-            return str
-
-    # Make sure not derived from self
-    if type_name == presentation._name:
-        return None
-
-    # Avoid circular definitions
-    container_data_type = get_container_data_type(presentation)
-    if (container_data_type is not None) and (container_data_type._name == type_name):
-        return None
-
-    # Try complex data type
-    data_type = get_type_by_full_or_shorthand_name(context, type_name, 'data_types')
-    if data_type is not None:
-        return data_type
-
-    # Try primitive data type
-    return get_primitive_data_type(type_name)
-
-
-#
-# PropertyDefinition, EntrySchema
-#
-
-def get_property_constraints(context, presentation):
-    """
-    If we don't have constraints, will return our type's constraints (if we have one), recursively.
-
-    Implication: if we define even one constraint, the type's constraints will not be inherited.
-    """
-
-    constraints = presentation.constraints
-
-    if constraints is None:
-        # If we don't have any, use our type's
-        the_type = presentation._get_type(context)
-        type_constraints = the_type._get_constraints(context) \
-            if hasattr(the_type, '_get_constraints') else None
-        if type_constraints is not None:
-            constraints = type_constraints
-
-    return constraints
-
-
-#
-# ConstraintClause
-#
-
-def apply_constraint_to_value(context, presentation, constraint_clause, value): # pylint: disable=too-many-statements,too-many-return-statements,too-many-branches
-    """
-    Returns false if the value does not conform to the constraint.
-    """
-
-    constraint_key = constraint_clause._raw.keys()[0]
-    the_type = constraint_clause._get_type(context)
-    # PropertyAssignment does not have this:
-    entry_schema = getattr(presentation, 'entry_schema', None)
-
-    def coerce_constraint(constraint):
-        return coerce_value(context, presentation, the_type, entry_schema, None, constraint,
-                            constraint_key)
-
-    def report(message, constraint):
-        context.validation.report('value %s %s per constraint in "%s": %s'
-                                  % (message, safe_repr(constraint),
-                                     presentation._name or presentation._container._name,
-                                     safe_repr(value)),
-                                  locator=presentation._locator, level=Issue.BETWEEN_FIELDS)
-
-    if constraint_key == 'equal':
-        constraint = coerce_constraint(constraint_clause.equal)
-        if value != constraint:
-            report('is not equal to', constraint)
-            return False
-
-    elif constraint_key == 'greater_than':
-        constraint = coerce_constraint(constraint_clause.greater_than)
-        if value <= constraint:
-            report('is not greater than', constraint)
-            return False
-
-    elif constraint_key == 'greater_or_equal':
-        constraint = coerce_constraint(constraint_clause.greater_or_equal)
-        if value < constraint:
-            report('is not greater than or equal to', constraint)
-            return False
-
-    elif constraint_key == 'less_than':
-        constraint = coerce_constraint(constraint_clause.less_than)
-        if value >= constraint:
-            report('is not less than', constraint)
-            return False
-
-    elif constraint_key == 'less_or_equal':
-        constraint = coerce_constraint(constraint_clause.less_or_equal)
-        if value > constraint:
-            report('is not less than or equal to', constraint)
-            return False
-
-    elif constraint_key == 'in_range':
-        lower, upper = constraint_clause.in_range
-        lower, upper = coerce_constraint(lower), coerce_constraint(upper)
-        if value < lower:
-            report('is not greater than or equal to lower bound', lower)
-            return False
-        if (upper != 'UNBOUNDED') and (value > upper):
-            report('is not lesser than or equal to upper bound', upper)
-            return False
-
-    elif constraint_key == 'valid_values':
-        constraint = tuple(coerce_constraint(v) for v in constraint_clause.valid_values)
-        if value not in constraint:
-            report('is not one of', constraint)
-            return False
-
-    elif constraint_key == 'length':
-        constraint = constraint_clause.length
-        try:
-            if len(value) != constraint:
-                report('is not of length', constraint)
-                return False
-        except TypeError:
-            pass # should be validated elsewhere
-
-    elif constraint_key == 'min_length':
-        constraint = constraint_clause.min_length
-        try:
-            if len(value) < constraint:
-                report('has a length lesser than', constraint)
-                return False
-        except TypeError:
-            pass # should be validated elsewhere
-
-    elif constraint_key == 'max_length':
-        constraint = constraint_clause.max_length
-        try:
-            if len(value) > constraint:
-                report('has a length greater than', constraint)
-                return False
-        except TypeError:
-            pass # should be validated elsewhere
-
-    elif constraint_key == 'pattern':
-        constraint = constraint_clause.pattern
-        try:
-            # From TOSCA 1.0 3.5.2.1:
-            #
-            # "Note: Future drafts of this specification will detail the use of regular expressions
-            # and reference an appropriate standardized grammar."
-            #
-            # So we will just use Python's.
-            if re.match(constraint, str(value)) is None:
-                report('does not match regular expression', constraint)
-                return False
-        except re.error:
-            pass # should be validated elsewhere
-
-    return True
-
-
-#
-# Repository
-#
-
-def get_data_type_value(context, presentation, field_name, type_name):
-    the_type = get_type_by_full_or_shorthand_name(context, type_name, 'data_types')
-    if the_type is not None:
-        value = getattr(presentation, field_name)
-        if value is not None:
-            return coerce_data_type_value(context, presentation, the_type, None, None, value, None)
-    else:
-        context.validation.report('field "%s" in "%s" refers to unknown data type "%s"'
-                                  % (field_name, presentation._fullname, type_name),
-                                  locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-    return None
-
-
-#
-# Utils
-#
-
-PRIMITIVE_DATA_TYPES = {
-    # YAML 1.2:
-    'tag:yaml.org,2002:str': unicode,
-    'tag:yaml.org,2002:integer': int,
-    'tag:yaml.org,2002:float': float,
-    'tag:yaml.org,2002:bool': bool,
-    'tag:yaml.org,2002:null': None.__class__,
-
-    # TOSCA aliases:
-    'string': unicode,
-    'integer': int,
-    'float': float,
-    'boolean': bool,
-    'null': None.__class__}
-
-
-@implements_specification('3.2.1-3', 'tosca-simple-1.0')
-def get_primitive_data_type(type_name):
-    """
-    Many of the types we use in this profile are built-in types from the YAML 1.2 specification
-    (i.e., those identified by the "tag:yaml.org,2002" version tag) [YAML-1.2].
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #_Toc373867862>`__
-    """
-
-    return PRIMITIVE_DATA_TYPES.get(type_name)
-
-
-def get_data_type_name(the_type):
-    """
-    Returns the name of the type, whether it's a DataType, a primitive type, or another class.
-    """
-
-    return the_type._name if hasattr(the_type, '_name') else full_type_name(the_type)
-
-
-def coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect=None): # pylint: disable=too-many-return-statements
-    """
-    Returns the value after it's coerced to its type, reporting validation errors if it cannot be
-    coerced.
-
-    Supports both complex data types and primitives.
-
-    Data types can use the ``coerce_value`` extension to hook their own specialized function.
-    If the extension is present, we will delegate to that hook.
-    """
-
-    # TODO: should support models as well as presentations
-
-    is_function, func = get_function(context, presentation, value)
-    if is_function:
-        return func
-
-    if the_type is None:
-        return value
-
-    if the_type == None.__class__:
-        if value is not None:
-            context.validation.report('field "%s" is of type "null" but has a non-null value: %s'
-                                      % (presentation._name, safe_repr(value)),
-                                      locator=presentation._locator, level=Issue.BETWEEN_FIELDS)
-            return None
-
-    # Delegate to 'coerce_value' extension
-    if hasattr(the_type, '_get_extension'):
-        coerce_value_fn_name = the_type._get_extension('coerce_value')
-        if coerce_value_fn_name is not None:
-            if value is None:
-                return None
-            coerce_value_fn = import_fullname(coerce_value_fn_name)
-            return coerce_value_fn(context, presentation, the_type, entry_schema, constraints,
-                                   value, aspect)
-
-    if hasattr(the_type, '_coerce_value'):
-        # Delegate to '_coerce_value' (likely a DataType instance)
-        return the_type._coerce_value(context, presentation, entry_schema, constraints, value,
-                                      aspect)
-
-    # Coerce to primitive type
-    return coerce_to_primitive(context, presentation, the_type, constraints, value, aspect)
-
-
-def coerce_to_primitive(context, presentation, primitive_type, constraints, value, aspect=None):
-    """
-    Returns the value after it's coerced to a primitive type, translating exceptions to validation
-    errors if it cannot be coerced.
-    """
-
-    if value is None:
-        return None
-
-    try:
-        # Coerce
-        value = validate_primitive(value, primitive_type,
-                                   context.validation.allow_primitive_coersion)
-
-        # Check constraints
-        apply_constraints_to_value(context, presentation, constraints, value)
-    except ValueError as e:
-        report_issue_for_bad_format(context, presentation, primitive_type, value, aspect, e)
-        value = None
-    except TypeError as e:
-        report_issue_for_bad_format(context, presentation, primitive_type, value, aspect, e)
-        value = None
-
-    return value
-
-
-def coerce_to_data_type_class(context, presentation, cls, entry_schema, constraints, value,
-                              aspect=None):
-    """
-    Returns the value after it's coerced to a data type class, reporting validation errors if it
-    cannot be coerced. Constraints will be applied after coersion.
-
-    Will either call a ``_create`` static function in the class, or instantiate it using a
-    constructor if ``_create`` is not available.
-
-    This will usually be called by a ``coerce_value`` extension hook in a :class:`DataType`.
-    """
-
-    try:
-        if hasattr(cls, '_create'):
-            # Instantiate using creator function
-            value = cls._create(context, presentation, entry_schema, constraints, value, aspect)
-        else:
-            # Normal instantiation
-            value = cls(entry_schema, constraints, value, aspect)
-    except ValueError as e:
-        report_issue_for_bad_format(context, presentation, cls, value, aspect, e)
-        value = None
-
-    # Check constraints
-    value = apply_constraints_to_value(context, presentation, constraints, value)
-
-    return value
-
-
-def apply_constraints_to_value(context, presentation, constraints, value):
-    """
-    Applies all constraints to the value. If the value conforms, returns the value. If it does not
-    conform, returns None.
-    """
-
-    if (value is not None) and (constraints is not None):
-        valid = True
-        for constraint in constraints:
-            if not constraint._apply_to_value(context, presentation, value):
-                valid = False
-        if not valid:
-            value = None
-    return value
-
-
-def get_container_data_type(presentation):
-    if presentation is None:
-        return None
-    if type(presentation).__name__ == 'DataType':
-        return presentation
-    return get_container_data_type(presentation._container)
-
-
-def report_issue_for_bad_format(context, presentation, the_type, value, aspect, e):
-    if aspect == 'default':
-        aspect = '"default" value'
-    elif aspect is not None:
-        aspect = '"%s" aspect' % aspect
-
-    if aspect is not None:
-        context.validation.report('%s for field "%s" is not a valid "%s": %s'
-                                  % (aspect, presentation._name or presentation._container._name,
-                                     get_data_type_name(the_type), safe_repr(value)),
-                                  locator=presentation._locator, level=Issue.BETWEEN_FIELDS,
-                                  exception=e)
-    else:
-        context.validation.report('field "%s" is not a valid "%s": %s'
-                                  % (presentation._name or presentation._container._name,
-                                     get_data_type_name(the_type), safe_repr(value)),
-                                  locator=presentation._locator, level=Issue.BETWEEN_FIELDS,
-                                  exception=e)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
deleted file mode 100644
index 590c6a0..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
+++ /dev/null
@@ -1,677 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from StringIO import StringIO # Note: cStringIO does not support Unicode
-import re
-
-from aria.utils.collections import FrozenList
-from aria.utils.formatting import (as_raw, safe_repr)
-from aria.utils.type import full_type_name
-from aria.parser import implements_specification
-from aria.parser.exceptions import InvalidValueError
-from aria.parser.validation import Issue
-from aria.modeling.exceptions import CannotEvaluateFunctionException
-from aria.modeling.models import (Node, NodeTemplate, Relationship, RelationshipTemplate)
-from aria.modeling.functions import (Function, Evaluation)
-
-
-#
-# Intrinsic
-#
-
-@implements_specification('4.3.1', 'tosca-simple-1.0')
-class Concat(Function):
-    """
-    The ``concat`` function is used to concatenate two or more string values within a TOSCA
-    service template.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if not isinstance(argument, list):
-            raise InvalidValueError(
-                'function "concat" argument must be a list of string expressions: {0}'
-                .format(safe_repr(argument)),
-                locator=self.locator)
-
-        string_expressions = []
-        for index, an_argument in enumerate(argument):
-            string_expressions.append(parse_string_expression(context, presentation, 'concat',
-                                                              index, None, an_argument))
-        self.string_expressions = FrozenList(string_expressions)
-
-    @property
-    def as_raw(self):
-        string_expressions = []
-        for string_expression in self.string_expressions:
-            if hasattr(string_expression, 'as_raw'):
-                string_expression = as_raw(string_expression)
-            string_expressions.append(string_expression)
-        return {'concat': string_expressions}
-
-    def __evaluate__(self, container_holder):
-        final = True
-        value = StringIO()
-        for e in self.string_expressions:
-            e, final = evaluate(e, final, container_holder)
-            if e is not None:
-                value.write(unicode(e))
-        value = value.getvalue() or u''
-        return Evaluation(value, final)
-
-
-@implements_specification('4.3.2', 'tosca-simple-1.0')
-class Token(Function):
-    """
-    The ``token`` function is used within a TOSCA service template on a string to parse out
-    (tokenize) substrings separated by one or more token characters within a larger string.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if (not isinstance(argument, list)) or (len(argument) != 3):
-            raise InvalidValueError('function "token" argument must be a list of 3 parameters: {0}'
-                                    .format(safe_repr(argument)),
-                                    locator=self.locator)
-
-        self.string_with_tokens = parse_string_expression(context, presentation, 'token', 0,
-                                                          'the string to tokenize', argument[0])
-        self.string_of_token_chars = parse_string_expression(context, presentation, 'token', 1,
-                                                             'the token separator characters',
-                                                             argument[1])
-        self.substring_index = parse_int(context, presentation, 'token', 2,
-                                         'the 0-based index of the token to return', argument[2])
-
-    @property
-    def as_raw(self):
-        string_with_tokens = self.string_with_tokens
-        if hasattr(string_with_tokens, 'as_raw'):
-            string_with_tokens = as_raw(string_with_tokens)
-        string_of_token_chars = self.string_of_token_chars
-        if hasattr(string_of_token_chars, 'as_raw'):
-            string_of_token_chars = as_raw(string_of_token_chars)
-        return {'token': [string_with_tokens, string_of_token_chars, self.substring_index]}
-
-    def __evaluate__(self, container_holder):
-        final = True
-        string_with_tokens, final = evaluate(self.string_with_tokens, final, container_holder)
-        string_of_token_chars, final = evaluate(self.string_of_token_chars, final, container_holder)
-
-        if string_of_token_chars:
-            regex = '[' + ''.join(re.escape(c) for c in string_of_token_chars) + ']'
-            split = re.split(regex, string_with_tokens)
-            if self.substring_index < len(split):
-                return Evaluation(split[self.substring_index], final)
-
-        raise CannotEvaluateFunctionException()
-
-
-#
-# Property
-#
-
-@implements_specification('4.4.1', 'tosca-simple-1.0')
-class GetInput(Function):
-    """
-    The ``get_input`` function is used to retrieve the values of properties declared within the
-    inputs section of a TOSCA Service Template.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        self.input_property_name = parse_string_expression(context, presentation, 'get_input',
-                                                           None, 'the input property name',
-                                                           argument)
-
-        if isinstance(self.input_property_name, basestring):
-            the_input = context.presentation.get_from_dict('service_template', 'topology_template',
-                                                           'inputs', self.input_property_name)
-            if the_input is None:
-                raise InvalidValueError(
-                    'function "get_input" argument is not a valid input name: {0}'
-                    .format(safe_repr(argument)),
-                    locator=self.locator)
-
-    @property
-    def as_raw(self):
-        return {'get_input': as_raw(self.input_property_name)}
-
-    def __evaluate__(self, container_holder):
-        service = container_holder.service
-        if service is None:
-            raise CannotEvaluateFunctionException()
-
-        value = service.inputs.get(self.input_property_name)
-        if value is not None:
-            value = value.value
-            return Evaluation(value, False) # We never return final evaluations!
-
-        raise InvalidValueError(
-            'function "get_input" argument is not a valid input name: {0}'
-            .format(safe_repr(self.input_property_name)),
-            locator=self.locator)
-
-
-@implements_specification('4.4.2', 'tosca-simple-1.0')
-class GetProperty(Function):
-    """
-    The ``get_property`` function is used to retrieve property values between modelable entities
-    defined in the same service template.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if (not isinstance(argument, list)) or (len(argument) < 2):
-            raise InvalidValueError(
-                'function "get_property" argument must be a list of at least 2 string expressions: '
-                '{0}'.format(safe_repr(argument)),
-                locator=self.locator)
-
-        self.modelable_entity_name = parse_modelable_entity_name(context, presentation,
-                                                                 'get_property', 0, argument[0])
-        # The first of these will be tried as a req-or-cap name:
-        self.nested_property_name_or_index = argument[1:]
-
-    @property
-    def as_raw(self):
-        return {'get_property': [self.modelable_entity_name] + self.nested_property_name_or_index}
-
-    def __evaluate__(self, container_holder):
-        modelable_entities = get_modelable_entities(container_holder, 'get_property', self.locator,
-                                                    self.modelable_entity_name)
-        req_or_cap_name = self.nested_property_name_or_index[0]
-
-        for modelable_entity in modelable_entities:
-            properties = None
-
-            if hasattr(modelable_entity, 'requirement_templates') \
-                and modelable_entity.requirement_templates \
-                and (req_or_cap_name in [v.name for v in modelable_entity.requirement_templates]):
-                for requirement_template in modelable_entity.requirement_templates:
-                    if requirement_template.name == req_or_cap_name:
-                        # First argument refers to a requirement
-                        # TODO: should follow to matched capability in other node...
-                        raise CannotEvaluateFunctionException()
-                        # break
-                nested_property_name_or_index = self.nested_property_name_or_index[1:]
-            elif hasattr(modelable_entity, 'capability_templates') \
-                and modelable_entity.capability_templates \
-                and (req_or_cap_name in modelable_entity.capability_templates):
-                # First argument refers to a capability
-                properties = modelable_entity.capability_templates[req_or_cap_name].properties
-                nested_property_name_or_index = self.nested_property_name_or_index[1:]
-            else:
-                properties = modelable_entity.properties
-                nested_property_name_or_index = self.nested_property_name_or_index
-
-            evaluation = get_modelable_entity_parameter(modelable_entity, properties,
-                                                        nested_property_name_or_index)
-            if evaluation is not None:
-                return evaluation
-
-        raise InvalidValueError(
-            'function "get_property" could not find "{0}" in modelable entity "{1}"'
-            .format('.'.join(self.nested_property_name_or_index), self.modelable_entity_name),
-            locator=self.locator)
-
-
-#
-# Attribute
-#
-
-@implements_specification('4.5.1', 'tosca-simple-1.0')
-class GetAttribute(Function):
-    """
-    The ``get_attribute`` function is used to retrieve the values of named attributes declared
-    by the referenced node or relationship template name.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if (not isinstance(argument, list)) or (len(argument) < 2):
-            raise InvalidValueError(
-                'function "get_attribute" argument must be a list of at least 2 string expressions:'
-                ' {0}'.format(safe_repr(argument)),
-                locator=self.locator)
-
-        self.modelable_entity_name = parse_modelable_entity_name(context, presentation,
-                                                                 'get_attribute', 0, argument[0])
-        # The first of these will be tried as a req-or-cap name:
-        self.nested_attribute_name_or_index = argument[1:]
-
-    @property
-    def as_raw(self):
-        return {'get_attribute': [self.modelable_entity_name] + self.nested_attribute_name_or_index}
-
-    def __evaluate__(self, container_holder):
-        modelable_entities = get_modelable_entities(container_holder, 'get_attribute', self.locator,
-                                                    self.modelable_entity_name)
-        for modelable_entity in modelable_entities:
-            attributes = modelable_entity.attributes
-            nested_attribute_name_or_index = self.nested_attribute_name_or_index
-            evaluation = get_modelable_entity_parameter(modelable_entity, attributes,
-                                                        nested_attribute_name_or_index)
-            if evaluation is not None:
-                evaluation.final = False # We never return final evaluations!
-                return evaluation
-
-        raise InvalidValueError(
-            'function "get_attribute" could not find "{0}" in modelable entity "{1}"'
-            .format('.'.join(self.nested_attribute_name_or_index), self.modelable_entity_name),
-            locator=self.locator)
-
-
-#
-# Operation
-#
-
-@implements_specification('4.6.1', 'tosca-simple-1.0') # pylint: disable=abstract-method
-class GetOperationOutput(Function):
-    """
-    The ``get_operation_output`` function is used to retrieve the values of variables exposed /
-    exported from an interface operation.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if (not isinstance(argument, list)) or (len(argument) != 4):
-            raise InvalidValueError(
-                'function "get_operation_output" argument must be a list of 4 parameters: {0}'
-                .format(safe_repr(argument)),
-                locator=self.locator)
-
-        self.modelable_entity_name = parse_string_expression(context, presentation,
-                                                             'get_operation_output', 0,
-                                                             'modelable entity name', argument[0])
-        self.interface_name = parse_string_expression(context, presentation, 'get_operation_output',
-                                                      1, 'the interface name', argument[1])
-        self.operation_name = parse_string_expression(context, presentation, 'get_operation_output',
-                                                      2, 'the operation name', argument[2])
-        self.output_variable_name = parse_string_expression(context, presentation,
-                                                            'get_operation_output', 3,
-                                                            'the output name', argument[3])
-
-    @property
-    def as_raw(self):
-        interface_name = self.interface_name
-        if hasattr(interface_name, 'as_raw'):
-            interface_name = as_raw(interface_name)
-        operation_name = self.operation_name
-        if hasattr(operation_name, 'as_raw'):
-            operation_name = as_raw(operation_name)
-        output_variable_name = self.output_variable_name
-        if hasattr(output_variable_name, 'as_raw'):
-            output_variable_name = as_raw(output_variable_name)
-        return {'get_operation_output': [self.modelable_entity_name, interface_name, operation_name,
-                                         output_variable_name]}
-
-
-#
-# Navigation
-#
-
-@implements_specification('4.7.1', 'tosca-simple-1.0')
-class GetNodesOfType(Function):
-    """
-    The ``get_nodes_of_type`` function can be used to retrieve a list of all known instances of
-    nodes of the declared Node Type.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        self.node_type_name = parse_string_expression(context, presentation, 'get_nodes_of_type',
-                                                      None, 'the node type name', argument)
-
-        if isinstance(self.node_type_name, basestring):
-            node_types = context.presentation.get('service_template', 'node_types')
-            if (node_types is None) or (self.node_type_name not in node_types):
-                raise InvalidValueError(
-                    'function "get_nodes_of_type" argument is not a valid node type name: {0}'
-                    .format(safe_repr(argument)),
-                    locator=self.locator)
-
-    @property
-    def as_raw(self):
-        node_type_name = self.node_type_name
-        if hasattr(node_type_name, 'as_raw'):
-            node_type_name = as_raw(node_type_name)
-        return {'get_nodes_of_type': node_type_name}
-
-    def __evaluate__(self, container):
-        pass
-
-
-#
-# Artifact
-#
-
-@implements_specification('4.8.1', 'tosca-simple-1.0') # pylint: disable=abstract-method
-class GetArtifact(Function):
-    """
-    The ``get_artifact`` function is used to retrieve artifact location between modelable
-    entities defined in the same service template.
-    """
-
-    def __init__(self, context, presentation, argument):
-        self.locator = presentation._locator
-
-        if (not isinstance(argument, list)) or (len(argument) < 2) or (len(argument) > 4):
-            raise InvalidValueError(
-                'function "get_artifact" argument must be a list of 2 to 4 parameters: {0}'
-                .format(safe_repr(argument)),
-                locator=self.locator)
-
-        self.modelable_entity_name = parse_string_expression(context, presentation, 'get_artifact',
-                                                             0, 'modelable entity name',
-                                                             argument[0])
-        self.artifact_name = parse_string_expression(context, presentation, 'get_artifact', 1,
-                                                     'the artifact name', argument[1])
-        self.location = parse_string_expression(context, presentation, 'get_artifact', 2,
-                                                'the location or "LOCAL_FILE"', argument[2])
-        self.remove = parse_bool(context, presentation, 'get_artifact', 3, 'the removal flag',
-                                 argument[3])
-
-    @property
-    def as_raw(self):
-        artifact_name = self.artifact_name
-        if hasattr(artifact_name, 'as_raw'):
-            artifact_name = as_raw(artifact_name)
-        location = self.location
-        if hasattr(location, 'as_raw'):
-            location = as_raw(location)
-        return {'get_artifacts': [self.modelable_entity_name, artifact_name, location, self.remove]}
-
-
-#
-# Utils
-#
-
-def get_function(context, presentation, value):
-    functions = context.presentation.presenter.functions
-    if isinstance(value, dict) and (len(value) == 1):
-        key = value.keys()[0]
-        if key in functions:
-            try:
-                return True, functions[key](context, presentation, value[key])
-            except InvalidValueError as e:
-                context.validation.report(issue=e.issue)
-                return True, None
-    return False, None
-
-
-def parse_string_expression(context, presentation, name, index, explanation, value): # pylint: disable=unused-argument
-    is_function, func = get_function(context, presentation, value)
-    if is_function:
-        return func
-    else:
-        value = str(value)
-    return value
-
-
-def parse_int(context, presentation, name, index, explanation, value): # pylint: disable=unused-argument
-    if not isinstance(value, int):
-        try:
-            value = int(value)
-        except ValueError:
-            raise invalid_value(name, index, 'an integer', explanation, value,
-                                presentation._locator)
-    return value
-
-
-def parse_bool(context, presentation, name, index, explanation, value): # pylint: disable=unused-argument
-    if not isinstance(value, bool):
-        raise invalid_value(name, index, 'a boolean', explanation, value, presentation._locator)
-    return value
-
-
-def parse_modelable_entity_name(context, presentation, name, index, value):
-    value = parse_string_expression(context, presentation, name, index, 'the modelable entity name',
-                                    value)
-    if value == 'SELF':
-        the_self, _ = parse_self(presentation)
-        if the_self is None:
-            raise invalid_modelable_entity_name(name, index, value, presentation._locator,
-                                                'a node template or a relationship template')
-    elif value == 'HOST':
-        _, self_variant = parse_self(presentation)
-        if self_variant != 'node_template':
-            raise invalid_modelable_entity_name(name, index, value, presentation._locator,
-                                                'a node template')
-    elif (value == 'SOURCE') or (value == 'TARGET'):
-        _, self_variant = parse_self(presentation)
-        if self_variant != 'relationship_template':
-            raise invalid_modelable_entity_name(name, index, value, presentation._locator,
-                                                'a relationship template')
-    elif isinstance(value, basestring):
-        node_templates = \
-            context.presentation.get('service_template', 'topology_template', 'node_templates') \
-            or {}
-        relationship_templates = \
-            context.presentation.get('service_template', 'topology_template',
-                                     'relationship_templates') \
-            or {}
-        if (value not in node_templates) and (value not in relationship_templates):
-            raise InvalidValueError(
-                'function "{0}" parameter {1:d} is not a valid modelable entity name: {2}'
-                .format(name, index + 1, safe_repr(value)),
-                locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-    return value
-
-
-def parse_self(presentation):
-    from ..types import (NodeType, RelationshipType)
-    from ..templates import (
-        NodeTemplate as NodeTemplatePresentation,
-        RelationshipTemplate as RelationshipTemplatePresentation
-    )
-
-    if presentation is None:
-        return None, None
-    elif isinstance(presentation, NodeTemplatePresentation) or isinstance(presentation, NodeType):
-        return presentation, 'node_template'
-    elif isinstance(presentation, RelationshipTemplatePresentation) \
-        or isinstance(presentation, RelationshipType):
-        return presentation, 'relationship_template'
-    else:
-        return parse_self(presentation._container)
-
-
-def evaluate(value, final, container_holder):
-    """
-    Calls ``__evaluate__`` and passes on ``final`` state.
-    """
-
-    if hasattr(value, '__evaluate__'):
-        value = value.__evaluate__(container_holder)
-        if not value.final:
-            final = False
-        return value.value, final
-    else:
-        return value, final
-
-
-@implements_specification('4.1', 'tosca-simple-1.0')
-def get_modelable_entities(container_holder, name, locator, modelable_entity_name):
-    """
-    The following keywords MAY be used in some TOSCA function in place of a TOSCA Node or
-    Relationship Template name.
-    """
-
-    if modelable_entity_name == 'SELF':
-        return get_self(container_holder, name, locator)
-    elif modelable_entity_name == 'HOST':
-        return get_hosts(container_holder, name, locator)
-    elif modelable_entity_name == 'SOURCE':
-        return get_source(container_holder, name, locator)
-    elif modelable_entity_name == 'TARGET':
-        return get_target(container_holder, name, locator)
-    elif isinstance(modelable_entity_name, basestring):
-        modelable_entities = []
-
-        service = container_holder.service
-        if service is not None:
-            for node in service.nodes.itervalues():
-                if node.node_template.name == modelable_entity_name:
-                    modelable_entities.append(node)
-        else:
-            service_template = container_holder.service_template
-            if service_template is not None:
-                for node_template in service_template.node_templates.itervalues():
-                    if node_template.name == modelable_entity_name:
-                        modelable_entities.append(node_template)
-
-        if not modelable_entities:
-            raise CannotEvaluateFunctionException()
-
-        return modelable_entities
-
-    raise InvalidValueError('function "{0}" could not find modelable entity "{1}"'
-                            .format(name, modelable_entity_name),
-                            locator=locator)
-
-
-def get_self(container_holder, name, locator):
-    """
-    A TOSCA orchestrator will interpret this keyword as the Node or Relationship Template instance
-    that contains the function at the time the function is evaluated.
-    """
-
-    container = container_holder.container
-    if (not isinstance(container, Node)) and \
-        (not isinstance(container, NodeTemplate)) and \
-        (not isinstance(container, Relationship)) and \
-        (not isinstance(container, RelationshipTemplate)):
-        raise InvalidValueError('function "{0}" refers to "SELF" but it is not contained in '
-                                'a node or a relationship: {1}'.format(name,
-                                                                       full_type_name(container)),
-                                locator=locator)
-
-    return [container]
-
-
-def get_hosts(container_holder, name, locator):
-    """
-    A TOSCA orchestrator will interpret this keyword to refer to the all nodes that "host" the node
-    using this reference (i.e., as identified by its HostedOn relationship).
-
-    Specifically, TOSCA orchestrators that encounter this keyword when evaluating the get_attribute
-    or ``get_property`` functions SHALL search each node along the "HostedOn" relationship chain
-    starting at the immediate node that hosts the node where the function was evaluated (and then
-    that node's host node, and so forth) until a match is found or the "HostedOn" relationship chain
-    ends.
-    """
-
-    container = container_holder.container
-    if (not isinstance(container, Node)) and (not isinstance(container, NodeTemplate)):
-        raise InvalidValueError('function "{0}" refers to "HOST" but it is not contained in '
-                                'a node: {1}'.format(name, full_type_name(container)),
-                                locator=locator)
-
-    if not isinstance(container, Node):
-        # NodeTemplate does not have "host"; we'll wait until instantiation
-        raise CannotEvaluateFunctionException()
-
-    host = container.host
-    if host is None:
-        # We might have a host later
-        raise CannotEvaluateFunctionException()
-
-    return [host]
-
-
-def get_source(container_holder, name, locator):
-    """
-    A TOSCA orchestrator will interpret this keyword as the Node Template instance that is at the
-    source end of the relationship that contains the referencing function.
-    """
-
-    container = container_holder.container
-    if (not isinstance(container, Relationship)) and \
-        (not isinstance(container, RelationshipTemplate)):
-        raise InvalidValueError('function "{0}" refers to "SOURCE" but it is not contained in '
-                                'a relationship: {1}'.format(name, full_type_name(container)),
-                                locator=locator)
-
-    if not isinstance(container, RelationshipTemplate):
-        # RelationshipTemplate does not have "source_node"; we'll wait until instantiation
-        raise CannotEvaluateFunctionException()
-
-    return [container.source_node]
-
-
-def get_target(container_holder, name, locator):
-    """
-    A TOSCA orchestrator will interpret this keyword as the Node Template instance that is at the
-    target end of the relationship that contains the referencing function.
-    """
-
-    container = container_holder.container
-    if (not isinstance(container, Relationship)) and \
-        (not isinstance(container, RelationshipTemplate)):
-        raise InvalidValueError('function "{0}" refers to "TARGET" but it is not contained in '
-                                'a relationship: {1}'.format(name, full_type_name(container)),
-                                locator=locator)
-
-    if not isinstance(container, RelationshipTemplate):
-        # RelationshipTemplate does not have "target_node"; we'll wait until instantiation
-        raise CannotEvaluateFunctionException()
-
-    return [container.target_node]
-
-
-def get_modelable_entity_parameter(modelable_entity, parameters, nested_parameter_name_or_index):
-    if not parameters:
-        return False, True, None
-
-    found = True
-    final = True
-    value = parameters
-
-    for name_or_index in nested_parameter_name_or_index:
-        if (isinstance(value, dict) and (name_or_index in value)) \
-            or ((isinstance(value, list) and (name_or_index < len(value)))):
-            value = value[name_or_index] # Parameter
-            # We are not using Parameter.value, but rather Parameter._value, because we want to make
-            # sure to get "final" (it is swallowed by Parameter.value)
-            value, final = evaluate(value._value, final, value)
-        else:
-            found = False
-            break
-
-    return Evaluation(value, final) if found else None
-
-
-def invalid_modelable_entity_name(name, index, value, locator, contexts):
-    return InvalidValueError('function "{0}" parameter {1:d} can be "{2}" only in {3}'
-                             .format(name, index + 1, value, contexts),
-                             locator=locator, level=Issue.FIELD)
-
-
-def invalid_value(name, index, the_type, explanation, value, locator):
-    return InvalidValueError(
-        'function "{0}" {1} is not {2}{3}: {4}'
-        .format(name,
-                'parameter {0:d}'.format(index + 1) if index is not None else 'argument',
-                the_type,
-                ', {0}'.format(explanation) if explanation is not None else '',
-                safe_repr(value)),
-        locator=locator, level=Issue.FIELD)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
deleted file mode 100644
index e04ac4a..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
+++ /dev/null
@@ -1,522 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
-from aria.parser.presentation import get_locator
-from aria.parser.validation import Issue
-
-from .parameters import (coerce_parameter_value, convert_parameter_definitions_to_values)
-
-
-#
-# InterfaceType
-#
-
-def get_inherited_operations(context, presentation):
-    """
-    Returns our operation definitions added on top of those of our parent, if we have one
-    (recursively).
-
-    Allows overriding all aspects of parent operations except input data types.
-    """
-
-    # Get operations from parent
-    parent = presentation._get_parent(context)
-    operations = get_inherited_operations(context, parent) if parent is not None else OrderedDict()
-
-    # Add/merge our operations
-    our_operations = presentation.operations # OperationDefinition
-    merge_operation_definitions(context, operations, our_operations, presentation._name,
-                                presentation, 'type')
-
-    for operation in operations.itervalues():
-        operation._reset_method_cache()
-
-    return operations
-
-
-#
-# InterfaceDefinition
-#
-
-def get_and_override_input_definitions_from_type(context, presentation):
-    """
-    Returns our input definitions added on top of those of the interface type, if specified.
-
-    Allows overriding all aspects of parent interface type inputs except data types.
-    """
-
-    inputs = OrderedDict()
-
-    # Get inputs from type
-    the_type = presentation._get_type(context) # IntefaceType
-    type_inputs = the_type._get_inputs(context) if the_type is not None else None
-    if type_inputs:
-        for input_name, type_input in type_inputs.iteritems():
-            inputs[input_name] = type_input._clone(presentation)
-
-    # Add/merge our inputs
-    our_inputs = presentation.inputs # PropertyDefinition
-    if our_inputs:
-        merge_input_definitions(context, inputs, our_inputs, presentation._name, None, presentation,
-                                'definition')
-
-    return inputs
-
-
-def get_and_override_operation_definitions_from_type(context, presentation):
-    """
-    Returns our operation definitions added on top of those of the interface type, if specified.
-
-    Allows overriding all aspects of parent interface type inputs except data types.
-    """
-
-    operations = OrderedDict()
-
-    # Get operations from type
-    the_type = presentation._get_type(context) # InterfaceType
-    type_operations = the_type._get_operations(context) if the_type is not None else None
-    if type_operations:
-        for operations_name, type_operation in type_operations.iteritems():
-            operations[operations_name] = type_operation._clone(presentation)
-
-    # Add/merge our operations
-    our_operations = presentation.operations # OperationDefinition
-    merge_operation_definitions(context, operations, our_operations, presentation._name,
-                                presentation, 'definition')
-
-    return operations
-
-
-#
-# NodeType, RelationshipType, GroupType
-#
-
-def get_inherited_interface_definitions(context, presentation, type_name, for_presentation=None):
-    """
-    Returns our interface definitions added on top of those of our parent, if we have one
-    (recursively).
-
-    Allows overriding all aspects of parent interfaces except interface and operation input data
-    types.
-    """
-
-    # Get interfaces from parent
-    parent = presentation._get_parent(context)
-    interfaces = get_inherited_interface_definitions(context, parent, type_name, presentation) \
-        if parent is not None else OrderedDict()
-
-    # Add/merge interfaces from their types
-    merge_interface_definitions_from_their_types(context, interfaces, presentation)
-
-    # Add/merge our interfaces
-    our_interfaces = presentation.interfaces
-    merge_interface_definitions(context, interfaces, our_interfaces, presentation,
-                                for_presentation=for_presentation)
-
-    return interfaces
-
-
-#
-# NodeTemplate, RelationshipTemplate, GroupTemplate
-#
-
-def get_template_interfaces(context, presentation, type_name):
-    """
-    Returns the assigned interface_template values while making sure they are defined in the type.
-    This includes the interfaces themselves, their operations, and inputs for interfaces and
-    operations.
-
-    Interface and operation inputs' default values, if available, will be used if we did not assign
-    them.
-
-    Makes sure that required inputs indeed end up with a value.
-
-    This code is especially complex due to the many levels of nesting involved.
-    """
-
-    template_interfaces = OrderedDict()
-
-    the_type = presentation._get_type(context) # NodeType, RelationshipType, GroupType
-    # InterfaceDefinition (or InterfaceAssignment in the case of RelationshipTemplate):
-    interface_definitions = the_type._get_interfaces(context) if the_type is not None else None
-
-    # Copy over interfaces from the type (will initialize inputs with default values)
-    if interface_definitions is not None:
-        for interface_name, interface_definition in interface_definitions.iteritems():
-            # Note that in the case of a RelationshipTemplate, we will already have the values as
-            # InterfaceAssignment. It will not be converted, just cloned.
-            template_interfaces[interface_name] = \
-                convert_interface_definition_from_type_to_template(context, interface_definition,
-                                                                   presentation)
-
-    # Fill in our interfaces
-    our_interface_assignments = presentation.interfaces
-    if our_interface_assignments:
-        # InterfaceAssignment:
-        for interface_name, our_interface_assignment in our_interface_assignments.iteritems():
-            if interface_name in template_interfaces:
-                interface_assignment = template_interfaces[interface_name] # InterfaceAssignment
-                # InterfaceDefinition (or InterfaceAssignment in the case of RelationshipTemplate):
-                interface_definition = interface_definitions[interface_name]
-                merge_interface(context, presentation, interface_assignment,
-                                our_interface_assignment, interface_definition, interface_name)
-            else:
-                context.validation.report(
-                    'interface definition "%s" not declared at %s "%s" in "%s"'
-                    % (interface_name, type_name, presentation.type, presentation._fullname),
-                    locator=our_interface_assignment._locator, level=Issue.BETWEEN_TYPES)
-
-    # Check that there are no required inputs that we haven't assigned
-    for interface_name, interface_template in template_interfaces.iteritems():
-        if interface_name in interface_definitions:
-            # InterfaceDefinition (or InterfaceAssignment in the case of RelationshipTemplate):
-            interface_definition = interface_definitions[interface_name]
-            our_interface_assignment = our_interface_assignments.get(interface_name) \
-                if our_interface_assignments is not None else None
-            validate_required_inputs(context, presentation, interface_template,
-                                     interface_definition, our_interface_assignment, interface_name)
-
-    return template_interfaces
-
-
-#
-# Utils
-#
-
-def convert_interface_definition_from_type_to_template(context, presentation, container):
-    from ..assignments import InterfaceAssignment
-
-    if isinstance(presentation, InterfaceAssignment):
-        # Nothing to convert, so just clone
-        return presentation._clone(container)
-
-    raw = convert_interface_definition_from_type_to_raw_template(context, presentation)
-    return InterfaceAssignment(name=presentation._name, raw=raw, container=container)
-
-
-def convert_interface_definition_from_type_to_raw_template(context, presentation): # pylint: disable=invalid-name
-    raw = OrderedDict()
-
-    # Copy default values for inputs
-    inputs = presentation._get_inputs(context)
-    if inputs is not None:
-        raw['inputs'] = convert_parameter_definitions_to_values(context, inputs)
-
-    # Copy operations
-    operations = presentation._get_operations(context)
-    if operations:
-        for operation_name, operation in operations.iteritems():
-            raw[operation_name] = OrderedDict()
-            description = operation.description
-            if description is not None:
-                raw[operation_name]['description'] = deepcopy_with_locators(description._raw)
-            implementation = operation.implementation
-            if implementation is not None:
-                raw[operation_name]['implementation'] = deepcopy_with_locators(implementation._raw)
-            inputs = operation.inputs
-            if inputs is not None:
-                raw[operation_name]['inputs'] = convert_parameter_definitions_to_values(context,
-                                                                                        inputs)
-
-    return raw
-
-
-def convert_requirement_interface_definitions_from_type_to_raw_template(context, raw_requirement, # pylint: disable=invalid-name
-                                                                        interface_definitions):
-    if not interface_definitions:
-        return
-    if 'interfaces' not in raw_requirement:
-        raw_requirement['interfaces'] = OrderedDict()
-    for interface_name, interface_definition in interface_definitions.iteritems():
-        raw_interface = convert_interface_definition_from_type_to_raw_template(context,
-                                                                               interface_definition)
-        if interface_name in raw_requirement['interfaces']:
-            merge(raw_requirement['interfaces'][interface_name], raw_interface)
-        else:
-            raw_requirement['interfaces'][interface_name] = raw_interface
-
-
-def merge_interface(context, presentation, interface_assignment, our_interface_assignment,
-                    interface_definition, interface_name):
-    # Assign/merge interface inputs
-    assign_raw_inputs(context, interface_assignment._raw, our_interface_assignment.inputs,
-                      interface_definition._get_inputs(context), interface_name, None, presentation)
-
-    # Assign operation implementations and inputs
-    our_operation_templates = our_interface_assignment.operations # OperationAssignment
-    # OperationDefinition or OperationAssignment:
-    operation_definitions = interface_definition._get_operations(context) \
-        if hasattr(interface_definition, '_get_operations') else interface_definition.operations
-    if our_operation_templates:
-        # OperationAssignment:
-        for operation_name, our_operation_template in our_operation_templates.iteritems():
-            operation_definition = operation_definitions.get(operation_name) # OperationDefinition
-
-            our_input_assignments = our_operation_template.inputs
-            our_implementation = our_operation_template.implementation
-
-            if operation_definition is None:
-                context.validation.report(
-                    'interface definition "%s" refers to an unknown operation "%s" in "%s"'
-                    % (interface_name, operation_name, presentation._fullname),
-                    locator=our_operation_template._locator, level=Issue.BETWEEN_TYPES)
-
-            if (our_input_assignments is not None) or (our_implementation is not None):
-                # Make sure we have the dict
-                if (operation_name not in interface_assignment._raw) \
-                    or (interface_assignment._raw[operation_name] is None):
-                    interface_assignment._raw[operation_name] = OrderedDict()
-
-            if our_implementation is not None:
-                interface_assignment._raw[operation_name]['implementation'] = \
-                    deepcopy_with_locators(our_implementation._raw)
-
-            # Assign/merge operation inputs
-            input_definitions = operation_definition.inputs \
-                if operation_definition is not None else None
-            assign_raw_inputs(context, interface_assignment._raw[operation_name],
-                              our_input_assignments, input_definitions, interface_name,
-                              operation_name, presentation)
-
-
-def merge_raw_input_definition(context, the_raw_input, our_input, interface_name, operation_name,
-                               presentation, type_name):
-    # Check if we changed the type
-    # TODO: allow a sub-type?
-    input_type1 = the_raw_input.get('type')
-    input_type2 = our_input.type
-    if input_type1 != input_type2:
-        if operation_name is not None:
-            context.validation.report(
-                'interface %s "%s" changes operation input "%s.%s" type from "%s" to "%s" in "%s"'
-                % (type_name, interface_name, operation_name, our_input._name, input_type1,
-                   input_type2, presentation._fullname),
-                locator=input_type2._locator, level=Issue.BETWEEN_TYPES)
-        else:
-            context.validation.report(
-                'interface %s "%s" changes input "%s" type from "%s" to "%s" in "%s"'
-                % (type_name, interface_name, our_input._name, input_type1, input_type2,
-                   presentation._fullname),
-                locator=input_type2._locator, level=Issue.BETWEEN_TYPES)
-
-    # Merge
-    merge(the_raw_input, our_input._raw)
-
-
-def merge_input_definitions(context, inputs, our_inputs, interface_name, operation_name,
-                            presentation, type_name):
-    for input_name, our_input in our_inputs.iteritems():
-        if input_name in inputs:
-            merge_raw_input_definition(context, inputs[input_name]._raw, our_input, interface_name,
-                                       operation_name, presentation, type_name)
-        else:
-            inputs[input_name] = our_input._clone(presentation)
-
-
-def merge_raw_input_definitions(context, raw_inputs, our_inputs, interface_name, operation_name,
-                                presentation, type_name):
-    for input_name, our_input in our_inputs.iteritems():
-        if input_name in raw_inputs:
-            merge_raw_input_definition(context, raw_inputs[input_name], our_input, interface_name,
-                                       operation_name, presentation, type_name)
-        else:
-            raw_inputs[input_name] = deepcopy_with_locators(our_input._raw)
-
-
-def merge_raw_operation_definition(context, raw_operation, our_operation, interface_name,
-                                   presentation, type_name):
-    if not isinstance(our_operation._raw, dict):
-        # Convert short form to long form
-        raw_operation['implementation'] = deepcopy_with_locators(our_operation._raw)
-        return
-
-    # Add/merge inputs
-    our_operation_inputs = our_operation.inputs
-    if our_operation_inputs:
-        # Make sure we have the dict
-        if ('inputs' not in raw_operation) or (raw_operation.get('inputs') is None):
-            raw_operation['inputs'] = OrderedDict()
-
-        merge_raw_input_definitions(context, raw_operation['inputs'], our_operation_inputs,
-                                    interface_name, our_operation._name, presentation, type_name)
-
-    # Override the description
-    if our_operation._raw.get('description') is not None:
-        raw_operation['description'] = deepcopy_with_locators(our_operation._raw['description'])
-
-    # Add/merge implementation
-    if our_operation._raw.get('implementation') is not None:
-        if raw_operation.get('implementation') is not None:
-            merge(raw_operation['implementation'],
-                  deepcopy_with_locators(our_operation._raw['implementation']))
-        else:
-            raw_operation['implementation'] = \
-                deepcopy_with_locators(our_operation._raw['implementation'])
-
-
-def merge_operation_definitions(context, operations, our_operations, interface_name, presentation,
-                                type_name):
-    if not our_operations:
-        return
-    for operation_name, our_operation in our_operations.iteritems():
-        if operation_name in operations:
-            merge_raw_operation_definition(context, operations[operation_name]._raw, our_operation,
-                                           interface_name, presentation, type_name)
-        else:
-            operations[operation_name] = our_operation._clone(presentation)
-
-
-def merge_raw_operation_definitions(context, raw_operations, our_operations, interface_name,
-                                    presentation, type_name):
-    for operation_name, our_operation in our_operations.iteritems():
-        if operation_name in raw_operations:
-            raw_operation = raw_operations[operation_name]
-            if isinstance(raw_operation, basestring):
-                # Convert short form to long form
-                raw_operations[operation_name] = OrderedDict((('implementation', raw_operation),))
-                raw_operation = raw_operations[operation_name]
-            merge_raw_operation_definition(context, raw_operation, our_operation, interface_name,
-                                           presentation, type_name)
-        else:
-            raw_operations[operation_name] = deepcopy_with_locators(our_operation._raw)
-
-
-# From either an InterfaceType or an InterfaceDefinition:
-def merge_interface_definition(context, interface, our_source, presentation, type_name):
-    if hasattr(our_source, 'type'):
-        # Check if we changed the interface type
-        input_type1 = interface.type
-        input_type2 = our_source.type
-        if (input_type1 is not None) and (input_type2 is not None) and (input_type1 != input_type2):
-            context.validation.report(
-                'interface definition "%s" changes type from "%s" to "%s" in "%s"'
-                % (interface._name, input_type1, input_type2, presentation._fullname),
-                locator=input_type2._locator, level=Issue.BETWEEN_TYPES)
-
-    # Add/merge inputs
-    our_interface_inputs = our_source._get_inputs(context) \
-        if hasattr(our_source, '_get_inputs') else our_source.inputs
-    if our_interface_inputs:
-        # Make sure we have the dict
-        if ('inputs' not in interface._raw) or (interface._raw.get('inputs') is None):
-            interface._raw['inputs'] = OrderedDict()
-
-        merge_raw_input_definitions(context, interface._raw['inputs'], our_interface_inputs,
-                                    our_source._name, None, presentation, type_name)
-
-    # Add/merge operations
-    our_operations = our_source._get_operations(context) \
-        if hasattr(our_source, '_get_operations') else our_source.operations
-    if our_operations is not None:
-        merge_raw_operation_definitions(context, interface._raw, our_operations, our_source._name,
-                                        presentation, type_name)
-
-
-def merge_interface_definitions(context, interfaces, our_interfaces, presentation,
-                                for_presentation=None):
-    if not our_interfaces:
-        return
-    for name, our_interface in our_interfaces.iteritems():
-        if name in interfaces:
-            merge_interface_definition(context, interfaces[name], our_interface, presentation,
-                                       'definition')
-        else:
-            interfaces[name] = our_interface._clone(for_presentation)
-
-
-def merge_interface_definitions_from_their_types(context, interfaces, presentation):
-    for interface in interfaces.itervalues():
-        the_type = interface._get_type(context) # InterfaceType
-        if the_type is not None:
-            merge_interface_definition(context, interface, the_type, presentation, 'type')
-
-
-def assign_raw_inputs(context, values, assignments, definitions, interface_name, operation_name,
-                      presentation):
-    if not assignments:
-        return
-
-    # Make sure we have the dict
-    if ('inputs' not in values) or (values['inputs'] is None):
-        values['inputs'] = OrderedDict()
-
-    # Assign inputs
-    for input_name, assignment in assignments.iteritems():
-        if (definitions is not None) and (input_name not in definitions):
-            if operation_name is not None:
-                context.validation.report(
-                    'interface definition "%s" assigns a value to an unknown operation input'
-                    ' "%s.%s" in "%s"'
-                    % (interface_name, operation_name, input_name, presentation._fullname),
-                    locator=assignment._locator, level=Issue.BETWEEN_TYPES)
-            else:
-                context.validation.report(
-                    'interface definition "%s" assigns a value to an unknown input "%s" in "%s"'
-                    % (interface_name, input_name, presentation._fullname),
-                    locator=assignment._locator, level=Issue.BETWEEN_TYPES)
-
-        definition = definitions.get(input_name) if definitions is not None else None
-
-        # Note: default value has already been assigned
-
-        # Coerce value
-        values['inputs'][input_name] = coerce_parameter_value(context, assignment, definition,
-                                                              assignment.value)
-
-
-def validate_required_inputs(context, presentation, assignment, definition, original_assignment,
-                             interface_name, operation_name=None):
-    input_definitions = definition.inputs
-    if input_definitions:
-        for input_name, input_definition in input_definitions.iteritems():
-            if input_definition.required:
-                prop = assignment.inputs.get(input_name) \
-                    if ((assignment is not None) and (assignment.inputs is not None)) else None
-                value = prop.value if prop is not None else None
-                value = value.value if value is not None else None
-                if value is None:
-                    if operation_name is not None:
-                        context.validation.report(
-                            'interface definition "%s" does not assign a value to a required'
-                            ' operation input "%s.%s" in "%s"'
-                            % (interface_name, operation_name, input_name, presentation._fullname),
-                            locator=get_locator(original_assignment, presentation._locator),
-                            level=Issue.BETWEEN_TYPES)
-                    else:
-                        context.validation.report(
-                            'interface definition "%s" does not assign a value to a required input'
-                            ' "%s" in "%s"'
-                            % (interface_name, input_name, presentation._fullname),
-                            locator=get_locator(original_assignment, presentation._locator),
-                            level=Issue.BETWEEN_TYPES)
-
-    if operation_name is not None:
-        return
-
-    assignment_operations = assignment.operations
-    operation_definitions = definition._get_operations(context)
-    if operation_definitions:
-        for operation_name, operation_definition in operation_definitions.iteritems():
-            assignment_operation = assignment_operations.get(operation_name) \
-                if assignment_operations is not None else None
-            original_operation = \
-                original_assignment.operations.get(operation_name, original_assignment) \
-                if (original_assignment is not None) \
-                and (original_assignment.operations is not None) \
-                else original_assignment
-            validate_required_inputs(context, presentation, assignment_operation,
-                                     operation_definition, original_operation, interface_name,
-                                     operation_name)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
deleted file mode 100644
index c910956..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
+++ /dev/null
@@ -1,211 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
-from aria.utils.formatting import pluralize
-from aria.parser.presentation import Value
-from aria.parser.validation import Issue
-
-from .data_types import coerce_value
-
-
-#
-# ArtifactType, DataType, CapabilityType, RelationshipType, NodeType, GroupType, PolicyType
-#
-
-def get_inherited_parameter_definitions(context, presentation, field_name, for_presentation=None):
-    """
-    Returns our parameter definitions added on top of those of our parent, if we have one
-    (recursively).
-
-    Allows overriding all aspects of parent properties except data type.
-    """
-
-    # Get definitions from parent
-    # If we inherit from a primitive, it does not have a parent:
-    parent = presentation._get_parent(context) if hasattr(presentation, '_get_parent') else None
-    definitions = get_inherited_parameter_definitions(context, parent, field_name,
-                                                      for_presentation=presentation) \
-                                                      if parent is not None else OrderedDict()
-
-    # Add/merge our definitions
-    # If we inherit from a primitive, it does not have our field
-    our_definitions = getattr(presentation, field_name, None)
-    if our_definitions:
-        our_definitions_clone = OrderedDict()
-        for name, our_definition in our_definitions.iteritems():
-            our_definitions_clone[name] = our_definition._clone(for_presentation)
-        our_definitions = our_definitions_clone
-        merge_parameter_definitions(context, presentation, definitions, our_definitions, field_name)
-
-    for definition in definitions.itervalues():
-        definition._reset_method_cache()
-
-    return definitions
-
-
-#
-# NodeTemplate, RelationshipTemplate, GroupTemplate, PolicyTemplate
-#
-
-def get_assigned_and_defined_parameter_values(context, presentation, field_name):
-    """
-    Returns the assigned property values while making sure they are defined in our type.
-
-    The property definition's default value, if available, will be used if we did not assign it.
-
-    Makes sure that required properties indeed end up with a value.
-    """
-
-    values = OrderedDict()
-
-    the_type = presentation._get_type(context)
-    field_name_plural = pluralize(field_name)
-    assignments = getattr(presentation, field_name_plural)
-    get_fn_name = '_get_{0}'.format(field_name_plural)
-    definitions = getattr(the_type, get_fn_name)(context) if the_type is not None else None
-
-    # Fill in our assignments, but make sure they are defined
-    if assignments:
-        for name, value in assignments.iteritems():
-            if (definitions is not None) and (name in definitions):
-                definition = definitions[name]
-                values[name] = coerce_parameter_value(context, value, definition, value.value)
-            else:
-                context.validation.report('assignment to undefined {0} "{1}" in "{2}"'
-                                          .format(field_name, name, presentation._fullname),
-                                          locator=value._locator, level=Issue.BETWEEN_TYPES)
-
-    # Fill in defaults from the definitions
-    if definitions:
-        for name, definition in definitions.iteritems():
-            if values.get(name) is None:
-                values[name] = coerce_parameter_value(context, presentation, definition,
-                                                      definition.default)
-
-    validate_required_values(context, presentation, values, definitions)
-
-    return values
-
-
-#
-# TopologyTemplate
-#
-
-def get_parameter_values(context, presentation, field_name):
-    values = OrderedDict()
-
-    parameters = getattr(presentation, field_name)
-
-    # Fill in defaults and values
-    if parameters:
-        for name, parameter in parameters.iteritems():
-            if values.get(name) is None:
-                if hasattr(parameter, 'value') and (parameter.value is not None):
-                    # For parameters only:
-                    values[name] = coerce_parameter_value(context, presentation, parameter,
-                                                          parameter.value)
-                else:
-                    default = parameter.default if hasattr(parameter, 'default') else None
-                    values[name] = coerce_parameter_value(context, presentation, parameter, default)
-
-    return values
-
-
-#
-# Utils
-#
-
-def validate_required_values(context, presentation, values, definitions):
-    """
-    Check if required properties have not been assigned.
-    """
-
-    if not definitions:
-        return
-    for name, definition in definitions.iteritems():
-        if getattr(definition, 'required', False) \
-            and ((values is None) or (values.get(name) is None)):
-            context.validation.report('required property "%s" is not assigned a value in "%s"'
-                                      % (name, presentation._fullname),
-                                      locator=presentation._get_child_locator('properties'),
-                                      level=Issue.BETWEEN_TYPES)
-
-
-def merge_raw_parameter_definition(context, presentation, raw_property_definition,
-                                   our_property_definition, field_name, property_name):
-    # Check if we changed the type
-    # TODO: allow a sub-type?
-    type1 = raw_property_definition.get('type')
-    type2 = our_property_definition.type
-    if type1 != type2:
-        context.validation.report(
-            'override changes type from "%s" to "%s" for property "%s" in "%s"'
-            % (type1, type2, property_name, presentation._fullname),
-            locator=presentation._get_child_locator(field_name, property_name),
-            level=Issue.BETWEEN_TYPES)
-
-    merge(raw_property_definition, our_property_definition._raw)
-
-
-def merge_raw_parameter_definitions(context, presentation, raw_property_definitions,
-                                    our_property_definitions, field_name):
-    if not our_property_definitions:
-        return
-    for property_name, our_property_definition in our_property_definitions.iteritems():
-        if property_name in raw_property_definitions:
-            raw_property_definition = raw_property_definitions[property_name]
-            merge_raw_parameter_definition(context, presentation, raw_property_definition,
-                                           our_property_definition, field_name, property_name)
-        else:
-            raw_property_definitions[property_name] = \
-                deepcopy_with_locators(our_property_definition._raw)
-
-
-def merge_parameter_definitions(context, presentation, property_definitions,
-                                our_property_definitions, field_name):
-    if not our_property_definitions:
-        return
-    for property_name, our_property_definition in our_property_definitions.iteritems():
-        if property_name in property_definitions:
-            property_definition = property_definitions[property_name]
-            merge_raw_parameter_definition(context, presentation, property_definition._raw,
-                                           our_property_definition, field_name, property_name)
-        else:
-            property_definitions[property_name] = our_property_definition
-
-
-# Works on properties, inputs, and parameters
-def coerce_parameter_value(context, presentation, definition, value, aspect=None):
-    the_type = definition._get_type(context) if definition is not None else None
-    entry_schema = definition.entry_schema if definition is not None else None
-    constraints = definition._get_constraints(context) \
-        if ((definition is not None) and hasattr(definition, '_get_constraints')) else None
-    value = coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect)
-    if (the_type is not None) and hasattr(the_type, '_name'):
-        type_name = the_type._name
-    else:
-        type_name = getattr(definition, 'type', None)
-    description = getattr(definition, 'description', None)
-    description = description.value if description is not None else None
-    return Value(type_name, value, description)
-
-
-def convert_parameter_definitions_to_values(context, definitions):
-    values = OrderedDict()
-    for name, definition in definitions.iteritems():
-        default = definition.default
-        values[name] = coerce_parameter_value(context, definition, definition, default)
-    return values
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
deleted file mode 100644
index 7dd803b..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ..presentation.types import convert_shorthand_to_full_type_name
-
-
-#
-# PolicyType
-#
-
-def get_inherited_targets(context, presentation):
-    """
-    Returns our target node types and group types if we have them or those of our parent, if we have
-    one (recursively).
-    """
-
-    parent = presentation._get_parent(context)
-
-    node_types, group_types = get_inherited_targets(context, parent) \
-        if parent is not None else ([], [])
-
-    our_targets = presentation.targets
-    if our_targets:
-        all_node_types = context.presentation.get('service_template', 'node_types') or {}
-        all_group_types = context.presentation.get('service_template', 'group_types') or {}
-        node_types = []
-        group_types = []
-
-        for our_target in our_targets:
-            if our_target in all_node_types:
-                our_target = convert_shorthand_to_full_type_name(context, our_target,
-                                                                 all_node_types)
-                node_types.append(all_node_types[our_target])
-            elif our_target in all_group_types:
-                our_target = convert_shorthand_to_full_type_name(context, our_target,
-                                                                 all_group_types)
-                group_types.append(all_group_types[our_target])
-
-    return node_types, group_types
-
-
-#
-# PolicyTemplate
-#
-
-def get_policy_targets(context, presentation):
-    """
-    Returns our target node templates and groups if we have them.
-    """
-
-    node_templates = []
-    groups = []
-
-    our_targets = presentation.targets
-    if our_targets:
-        all_node_templates = \
-            context.presentation.get('service_template', 'topology_template', 'node_templates') \
-            or {}
-        all_groups = \
-            context.presentation.get('service_template', 'topology_template', 'groups') \
-            or {}
-
-        for our_target in our_targets:
-            if our_target in all_node_templates:
-                node_templates.append(all_node_templates[our_target])
-            elif our_target in all_groups:
-                groups.append(all_groups[our_target])
-
-    return node_templates, groups
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
deleted file mode 100644
index 6bdb5b1..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.parser.validation import Issue
-from aria.utils.collections import (deepcopy_with_locators, OrderedDict)
-
-from .parameters import (convert_parameter_definitions_to_values, validate_required_values,
-                         coerce_parameter_value)
-from .interfaces import (convert_requirement_interface_definitions_from_type_to_raw_template,
-                         merge_interface_definitions, merge_interface, validate_required_inputs)
-
-
-#
-# NodeType
-#
-
-def get_inherited_requirement_definitions(context, presentation):
-    """
-    Returns our requirement definitions added on top of those of our parent, if we have one
-    (recursively).
-
-    Allows overriding requirement definitions if they have the same name.
-    """
-
-    parent = presentation._get_parent(context)
-    requirement_definitions = get_inherited_requirement_definitions(context, parent) \
-        if parent is not None else []
-
-    our_requirement_definitions = presentation.requirements
-    if our_requirement_definitions:
-        for requirement_name, our_requirement_definition in our_requirement_definitions:
-            # Remove existing requirement definitions of this name if they exist
-            for name, requirement_definition in requirement_definitions:
-                if name == requirement_name:
-                    requirement_definitions.remove((name, requirement_definition))
-
-            requirement_definitions.append((requirement_name, our_requirement_definition))
-
-    return requirement_definitions
-
-
-#
-# NodeTemplate
-#
-
-def get_template_requirements(context, presentation):
-    """
-    Returns our requirements added on top of those of the node type if they exist there.
-
-    If the requirement has a relationship, the relationship properties and interfaces are assigned.
-
-    Returns the assigned property, interface input, and interface operation input values while
-    making sure they are defined in our type. Default values, if available, will be used if we did
-    not assign them. Also makes sure that required properties and inputs indeed end up with a value.
-    """
-
-    requirement_assignments = []
-
-    the_type = presentation._get_type(context) # NodeType
-    requirement_definitions = the_type._get_requirements(context) if the_type is not None else None
-
-    # Add our requirement assignments
-    our_requirement_assignments = presentation.requirements
-    if our_requirement_assignments:
-        add_requirement_assignments(context, presentation, requirement_assignments,
-                                    requirement_definitions, our_requirement_assignments)
-
-    # Validate occurrences
-    if requirement_definitions:
-        for requirement_name, requirement_definition in requirement_definitions:
-            # Allowed occurrences
-            allowed_occurrences = requirement_definition.occurrences
-            allowed_occurrences = allowed_occurrences if allowed_occurrences is not None else None
-
-            # Count actual occurrences
-            actual_occurrences = 0
-            for name, _ in requirement_assignments:
-                if name == requirement_name:
-                    actual_occurrences += 1
-
-            if allowed_occurrences is None:
-                # If not specified, we interpret this to mean that exactly 1 occurrence is required
-                if actual_occurrences == 0:
-                    # If it's not there, we will automatically add it (this behavior is not in the
-                    # TOSCA spec, but seems implied)
-                    requirement_assignment, \
-                    relationship_property_definitions, \
-                    relationship_interface_definitions = \
-                        convert_requirement_from_definition_to_assignment(context,
-                                                                          requirement_definition,
-                                                                          None, presentation)
-                    validate_requirement_assignment(context, presentation, requirement_assignment,
-                                                    relationship_property_definitions,
-                                                    relationship_interface_definitions)
-                    requirement_assignments.append((requirement_name, requirement_assignment))
-                elif actual_occurrences > 1:
-                    context.validation.report(
-                        'requirement "%s" is allowed only one occurrence in "%s": %d'
-                        % (requirement_name, presentation._fullname, actual_occurrences),
-                        locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-            else:
-                if not allowed_occurrences.is_in(actual_occurrences):
-                    if allowed_occurrences.value[1] == 'UNBOUNDED':
-                        context.validation.report(
-                            'requirement "%s" does not have at least %d occurrences in "%s": has %d'
-                            % (requirement_name, allowed_occurrences.value[0],
-                               presentation._fullname, actual_occurrences),
-                            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-                    else:
-                        context.validation.report(
-                            'requirement "%s" is allowed between %d and %d occurrences in "%s":'
-                            ' has %d'
-                            % (requirement_name, allowed_occurrences.value[0],
-                               allowed_occurrences.value[1], presentation._fullname,
-                               actual_occurrences),
-                            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-    return requirement_assignments
-
-
-#
-# Utils
-#
-
-def convert_requirement_from_definition_to_assignment(context, requirement_definition, # pylint: disable=too-many-branches
-                                                      our_requirement_assignment, container):
-    from ..assignments import RequirementAssignment
-
-    raw = OrderedDict()
-
-    # Capability type name:
-    raw['capability'] = deepcopy_with_locators(requirement_definition.capability)
-
-    node_type = requirement_definition._get_node_type(context)
-    if node_type is not None:
-        raw['node'] = deepcopy_with_locators(node_type._name)
-
-    relationship_type = None
-    relationship_template = None
-    relationship_property_definitions = None
-    relationship_interface_definitions = None
-
-    # First try to find the relationship if we declared it
-    # RelationshipAssignment:
-    our_relationship = our_requirement_assignment.relationship \
-        if our_requirement_assignment is not None else None
-    if our_relationship is not None:
-        relationship_type, relationship_type_variant = our_relationship._get_type(context)
-        if relationship_type_variant == 'relationship_template':
-            relationship_template = relationship_type
-            relationship_type = relationship_template._get_type(context)
-
-    definition_relationship_type = None
-    relationship_definition = requirement_definition.relationship # RelationshipDefinition
-    if relationship_definition is not None:
-        definition_relationship_type = relationship_definition._get_type(context)
-
-    # If not exists, try at the node type
-    if relationship_type is None:
-        relationship_type = definition_relationship_type
-    else:
-        # Make sure the type is derived
-        if not definition_relationship_type._is_descendant(context, relationship_type):
-            context.validation.report(
-                'assigned relationship type "%s" is not a descendant of declared relationship type'
-                ' "%s"' \
-                % (relationship_type._name, definition_relationship_type._name),
-                locator=container._locator, level=Issue.BETWEEN_TYPES)
-
-    if relationship_type is not None:
-        raw['relationship'] = OrderedDict()
-
-        type_name = our_relationship.type if our_relationship is not None else None
-        if type_name is None:
-            type_name = relationship_type._name
-
-        raw['relationship']['type'] = deepcopy_with_locators(type_name)
-
-        # These are our property definitions
-        relationship_property_definitions = relationship_type._get_properties(context)
-
-        if relationship_template is not None:
-            # Property values from template
-            raw['relationship']['properties'] = relationship_template._get_property_values(context)
-        else:
-            if relationship_property_definitions:
-                # Convert property definitions to values
-                raw['relationship']['properties'] = \
-                    convert_parameter_definitions_to_values(context,
-                                                            relationship_property_definitions)
-
-        # These are our interface definitions
-        # InterfaceDefinition:
-        relationship_interface_definitions = OrderedDict(relationship_type._get_interfaces(context))
-
-        # Convert interface definitions to templates
-        convert_requirement_interface_definitions_from_type_to_raw_template(
-            context,
-            raw['relationship'],
-            relationship_interface_definitions)
-
-        if relationship_definition:
-            # Merge extra interface definitions
-            # InterfaceDefinition:
-            definition_interface_definitions = relationship_definition.interfaces
-            merge_interface_definitions(context, relationship_interface_definitions,
-                                        definition_interface_definitions, requirement_definition,
-                                        container)
-
-        if relationship_template is not None:
-            # Interfaces from template
-            interfaces = relationship_template._get_interfaces(context)
-            if interfaces:
-                raw['relationship']['interfaces'] = OrderedDict()
-                for interface_name, interface in interfaces.iteritems():
-                    raw['relationship']['interfaces'][interface_name] = interface._raw
-
-    return \
-        RequirementAssignment(name=requirement_definition._name, raw=raw, container=container), \
-        relationship_property_definitions, \
-        relationship_interface_definitions
-
-
-def add_requirement_assignments(context, presentation, requirement_assignments,
-                                requirement_definitions, our_requirement_assignments):
-    for requirement_name, our_requirement_assignment in our_requirement_assignments:
-        requirement_definition = get_first_requirement(requirement_definitions, requirement_name)
-        if requirement_definition is not None:
-            requirement_assignment, \
-            relationship_property_definitions, \
-            relationship_interface_definitions = \
-                convert_requirement_from_definition_to_assignment(context, requirement_definition,
-                                                                  our_requirement_assignment,
-                                                                  presentation)
-            merge_requirement_assignment(context,
-                                         relationship_property_definitions,
-                                         relationship_interface_definitions,
-                                         requirement_assignment, our_requirement_assignment)
-            validate_requirement_assignment(context,
-                                            our_requirement_assignment.relationship \
-                                            or our_requirement_assignment,
-                                            requirement_assignment,
-                                            relationship_property_definitions,
-                                            relationship_interface_definitions)
-            requirement_assignments.append((requirement_name, requirement_assignment))
-        else:
-            context.validation.report('requirement "%s" not declared at node type "%s" in "%s"'
-                                      % (requirement_name, presentation.type,
-                                         presentation._fullname),
-                                      locator=our_requirement_assignment._locator,
-                                      level=Issue.BETWEEN_TYPES)
-
-
-def merge_requirement_assignment(context, relationship_property_definitions,
-                                 relationship_interface_definitions, requirement, our_requirement):
-    our_capability = our_requirement.capability
-    if our_capability is not None:
-        requirement._raw['capability'] = deepcopy_with_locators(our_capability)
-
-    our_node = our_requirement.node
-    if our_node is not None:
-        requirement._raw['node'] = deepcopy_with_locators(our_node)
-
-    our_node_filter = our_requirement.node_filter
-    if our_node_filter is not None:
-        requirement._raw['node_filter'] = deepcopy_with_locators(our_node_filter._raw)
-
-    our_relationship = our_requirement.relationship # RelationshipAssignment
-    if (our_relationship is not None) and (our_relationship.type is None):
-        # Make sure we have a dict
-        if 'relationship' not in requirement._raw:
-            requirement._raw['relationship'] = OrderedDict()
-
-        merge_requirement_assignment_relationship(context, our_relationship,
-                                                  relationship_property_definitions,
-                                                  relationship_interface_definitions,
-                                                  requirement, our_relationship)
-
-
-def merge_requirement_assignment_relationship(context, presentation, property_definitions,
-                                              interface_definitions, requirement, our_relationship):
-    our_relationship_properties = our_relationship._raw.get('properties')
-    if our_relationship_properties:
-        # Make sure we have a dict
-        if 'properties' not in requirement._raw['relationship']:
-            requirement._raw['relationship']['properties'] = OrderedDict()
-
-        # Merge our properties
-        for property_name, prop in our_relationship_properties.iteritems():
-            if property_name in property_definitions:
-                definition = property_definitions[property_name]
-                requirement._raw['relationship']['properties'][property_name] = \
-                    coerce_parameter_value(context, presentation, definition, prop)
-            else:
-                context.validation.report(
-                    'relationship property "%s" not declared at definition of requirement "%s"'
-                    ' in "%s"'
-                    % (property_name, requirement._fullname,
-                       presentation._container._container._fullname),
-                    locator=our_relationship._get_child_locator('properties', property_name),
-                    level=Issue.BETWEEN_TYPES)
-
-    our_interfaces = our_relationship.interfaces
-    if our_interfaces:
-        # Make sure we have a dict
-        if 'interfaces' not in requirement._raw['relationship']:
-            requirement._raw['relationship']['interfaces'] = OrderedDict()
-
-        # Merge interfaces
-        for interface_name, our_interface in our_interfaces.iteritems():
-            if interface_name not in requirement._raw['relationship']['interfaces']:
-                requirement._raw['relationship']['interfaces'][interface_name] = OrderedDict()
-
-            if (interface_definitions is not None) and (interface_name in interface_definitions):
-                interface_definition = interface_definitions[interface_name]
-                interface_assignment = requirement.relationship.interfaces[interface_name]
-                merge_interface(context, presentation, interface_assignment, our_interface,
-                                interface_definition, interface_name)
-            else:
-                context.validation.report(
-                    'relationship interface "%s" not declared at definition of requirement "%s"'
-                    ' in "%s"'
-                    % (interface_name, requirement._fullname,
-                       presentation._container._container._fullname),
-                    locator=our_relationship._locator, level=Issue.BETWEEN_TYPES)
-
-
-def validate_requirement_assignment(context, presentation, requirement_assignment,
-                                    relationship_property_definitions,
-                                    relationship_interface_definitions):
-    relationship = requirement_assignment.relationship
-    if relationship is None:
-        return
-
-    validate_required_values(context, presentation, relationship.properties,
-                             relationship_property_definitions)
-
-    if relationship_interface_definitions:
-        for interface_name, relationship_interface_definition \
-            in relationship_interface_definitions.iteritems():
-            interface_assignment = relationship.interfaces.get(interface_name) \
-                if relationship.interfaces is not None else None
-            validate_required_inputs(context, presentation, interface_assignment,
-                                     relationship_interface_definition, None, interface_name)
-
-
-def get_first_requirement(requirement_definitions, name):
-    if requirement_definitions is not None:
-        for requirement_name, requirement_definition in requirement_definitions:
-            if requirement_name == name:
-                return requirement_definition
-    return None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
deleted file mode 100644
index 8f7ec4c..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.formatting import safe_repr
-from aria.parser.validation import Issue
-
-
-def validate_subtitution_mappings_requirement(context, presentation):
-    if not validate_format(context, presentation, 'requirement'):
-        return
-
-    node_template = get_node_template(context, presentation, 'requirement')
-    if node_template is None:
-        return
-
-    node_type = presentation._container._get_type(context)
-    if node_type is None:
-        return
-
-    requirements = node_type._get_requirements(context)
-    type_requirement = None
-    for name, the_requirement in requirements:
-        if name == presentation._name:
-            type_requirement = the_requirement
-            break
-    if type_requirement is None:
-        context.validation.report(
-            'substitution mappings requirement "%s" is not declared in node type "%s"'
-            % (presentation._name, node_type._name),
-            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-        return
-
-    requirement_name = presentation._raw[1]
-    requirements = node_template._get_requirements(context)
-    requirement = None
-    for name, the_requirement in requirements:
-        if name == requirement_name:
-            requirement = the_requirement
-            break
-
-    if requirement is None:
-        context.validation.report(
-            'substitution mappings requirement "%s" refers to an unknown requirement of node '
-            'template "%s": %s'
-            % (presentation._name, node_template._name, safe_repr(requirement_name)),
-            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-        return
-
-
-def validate_subtitution_mappings_capability(context, presentation):
-    if not validate_format(context, presentation, 'capability'):
-        return
-
-    node_template = get_node_template(context, presentation, 'capability')
-    if node_template is None:
-        return
-
-    node_type = presentation._container._get_type(context)
-    if node_type is None:
-        return
-
-    capabilities = node_type._get_capabilities(context)
-    type_capability = capabilities.get(presentation._name)
-    if type_capability is None:
-        context.validation.report(
-            'substitution mappings capability "%s" is not declared in node type "%s"'
-            % (presentation._name, node_type._name), locator=presentation._locator,
-            level=Issue.BETWEEN_TYPES)
-        return
-
-    capability_name = presentation._raw[1]
-    capabilities = node_template._get_capabilities(context)
-    capability = capabilities.get(capability_name)
-
-    if capability is None:
-        context.validation.report(
-            'substitution mappings capability "%s" refers to an unknown capability of node template'
-            ' "%s": %s'
-            % (presentation._name, node_template._name, safe_repr(capability_name)),
-            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-        return
-
-    type_capability_type = type_capability._get_type(context)
-    capability_type = capability._get_type(context)
-
-    if not type_capability_type._is_descendant(context, capability_type):
-        context.validation.report(
-            'type "%s" of substitution mappings capability "%s" is not a descendant of "%s"'
-            % (capability_type._name, presentation._name, type_capability_type._name),
-            locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-
-#
-# Utils
-#
-
-def validate_format(context, presentation, name):
-    if (not isinstance(presentation._raw, list)) or (len(presentation._raw) != 2) \
-        or (not isinstance(presentation._raw[0], basestring)) \
-        or (not isinstance(presentation._raw[1], basestring)):
-        context.validation.report(
-            'substitution mappings %s "%s" is not a list of 2 strings: %s'
-            % (name, presentation._name, safe_repr(presentation._raw)),
-            locator=presentation._locator, level=Issue.FIELD)
-        return False
-    return True
-
-
-def get_node_template(context, presentation, name):
-    node_template_name = presentation._raw[0]
-    node_template = context.presentation.get_from_dict('service_template', 'topology_template',
-                                                       'node_templates', node_template_name)
-    if node_template is None:
-        context.validation.report(
-            'substitution mappings %s "%s" refers to an unknown node template: %s'
-            % (name, presentation._name, safe_repr(node_template_name)),
-            locator=presentation._locator, level=Issue.FIELD)
-    return node_template
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/extensible.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/extensible.py
deleted file mode 100644
index 63bc02f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/extensible.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.caching import cachedmethod
-from aria.parser.presentation import (Presentation, has_fields, primitive_dict_field)
-
-@has_fields
-class ExtensiblePresentation(Presentation):
-    """
-    A presentation that supports an optional ``_extensions`` dict field.
-    """
-
-    @primitive_dict_field()
-    def _extensions(self):
-        pass
-
-    @cachedmethod
-    def _get_extension(self, name, default=None):
-        extensions = self._extensions
-        return extensions.get(name, default) if extensions is not None else None # pylint: disable=no-member
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_getters.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_getters.py
deleted file mode 100644
index f14164a..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_getters.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.formatting import safe_repr
-from aria.parser.exceptions import InvalidValueError
-
-
-def data_type_class_getter(cls):
-    """
-    Wraps the field value in a specialized data type class.
-
-    Can be used with the :func:`field_getter` decorator.
-    """
-
-    def getter(field, presentation, context=None):
-        raw = field.default_get(presentation, context)
-        if raw is not None:
-            try:
-                return cls(None, None, raw, None)
-            except ValueError as e:
-                raise InvalidValueError(
-                    '%s is not a valid "%s" in "%s": %s'
-                    % (field.full_name, field.full_cls_name, presentation._name, safe_repr(raw)),
-                    cause=e, locator=field.get_locator(raw))
-    return getter
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_validators.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_validators.py
deleted file mode 100644
index be80702..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/field_validators.py
+++ /dev/null
@@ -1,568 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from aria.utils.formatting import safe_repr
-from aria.parser import implements_specification
-from aria.parser.presentation import (report_issue_for_unknown_type, derived_from_validator)
-from aria.parser.validation import Issue
-
-from ..modeling.data_types import (get_primitive_data_type, get_data_type_name, coerce_value,
-                                   get_container_data_type)
-from .types import get_type_by_full_or_shorthand_name, convert_shorthand_to_full_type_name
-
-#
-# NodeTemplate, RelationshipTemplate
-#
-
-@implements_specification('3.7.3.3', 'tosca-simple-1.0')
-def copy_validator(template_type_name, templates_dict_name):
-    """
-    Makes sure that the field refers to an existing template defined in the root presenter.
-
-    Use with the :func:`field_validator` decorator for the ``copy`` field in
-    :class:`NodeTemplate` and :class:`RelationshipTemplate`.
-    """
-
-    def validator_fn(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        # Make sure type exists
-        value = getattr(presentation, field.name)
-        if value is not None:
-            copy = context.presentation.get_from_dict('service_template', 'topology_template',
-                                                      templates_dict_name, value)
-            if copy is None:
-                report_issue_for_unknown_type(context, presentation, template_type_name, field.name)
-            else:
-                if copy.copy is not None:
-                    context.validation.report(
-                        '"copy" field refers to a %s that itself is a copy in "%s": %s'
-                        % (template_type_name, presentation._fullname, safe_repr(value)),
-                        locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-    return validator_fn
-
-#
-# PropertyDefinition, AttributeDefinition, ParameterDefinition, EntrySchema
-#
-
-def data_type_validator(type_name='data type'):
-    """
-    Makes sure that the field refers to a valid data type, whether complex or primitive.
-
-    Used with the :func:`field_validator` decorator for the ``type`` fields in
-    :class:`PropertyDefinition`, :class:`AttributeDefinition`, :class:`ParameterDefinition`,
-    and :class:`EntrySchema`.
-
-    Extra behavior beyond validation: generated function returns true if field is a complex data
-    type.
-    """
-
-    def validator(field, presentation, context):
-        field.default_validate(presentation, context)
-
-        value = getattr(presentation, field.name)
-        if value is not None:
-            # Test for circular definitions
-            container_data_type = get_container_data_type(presentation)
-            if (container_data_type is not None) and (container_data_type._name == value):
-                context.validation.report(
-                    'type of property "%s" creates a circular value hierarchy: %s'
-                    % (presentation._fullname, safe_repr(value)),
-                    locator=presentation._get_child_locator('type'), level=Issue.BETWEEN_TYPES)
-
-            # Can be a complex data type
-            if get_type_by_full_or_shorthand_name(context, value, 'data_types') is not None:
-                return True
-
-            # Can be a primitive data type
-            if get_primitive_data_type(value) is None:
-                report_issue_for_unknown_type(context, presentation, type_name, field.name)
-
-        return False
-
-    return validator
-
-#
-# PropertyDefinition, AttributeDefinition
-#
-
-def entry_schema_validator(field, presentation, context):
-    """
-    According to whether the data type supports ``entry_schema`` (e.g., it is or inherits from
-    list or map), make sure that we either have or don't have a valid data type value.
-
-    Used with the :func:`field_validator` decorator for the ``entry_schema`` field in
-    :class:`PropertyDefinition` and :class:`AttributeDefinition`.
-    """
-
-    field.default_validate(presentation, context)
-
-    def type_uses_entry_schema(the_type):
-        use_entry_schema = the_type._get_extension('use_entry_schema', False) \
-            if hasattr(the_type, '_get_extension') else False
-        if use_entry_schema:
-            return True
-        parent = the_type._get_parent(context) if hasattr(the_type, '_get_parent') else None
-        if parent is None:
-            return False
-        return type_uses_entry_schema(parent)
-
-    value = getattr(presentation, field.name)
-    the_type = presentation._get_type(context)
-    if the_type is None:
-        return
-    use_entry_schema = type_uses_entry_schema(the_type)
-
-    if use_entry_schema:
-        if value is None:
-            context.validation.report(
-                '"entry_schema" does not have a value as required by data type "%s" in "%s"'
-                % (get_data_type_name(the_type), presentation._container._fullname),
-                locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-    else:
-        if value is not None:
-            context.validation.report(
-                '"entry_schema" has a value but it is not used by data type "%s" in "%s"'
-                % (get_data_type_name(the_type), presentation._container._fullname),
-                locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-def data_value_validator(field, presentation, context):
-    """
-    Makes sure that the field contains a valid value according to data type and constraints.
-
-    Used with the :func:`field_validator` decorator for the ``default`` field in
-    :class:`PropertyDefinition` and :class:`AttributeDefinition`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        the_type = presentation._get_type(context)
-        entry_schema = presentation.entry_schema
-        # AttributeDefinition does not have this:
-        constraints = presentation._get_constraints(context) \
-            if hasattr(presentation, '_get_constraints') else None
-        coerce_value(context, presentation, the_type, entry_schema, constraints, value, field.name)
-
-#
-# DataType
-#
-
-_data_type_validator = data_type_validator()
-_data_type_derived_from_validator = derived_from_validator(convert_shorthand_to_full_type_name,
-                                                           'data_types')
-
-def data_type_derived_from_validator(field, presentation, context):
-    """
-    Makes sure that the field refers to a valid parent data type (complex or primitive).
-
-    Used with the :func:`field_validator` decorator for the ``derived_from`` field in
-    :class:`DataType`.
-    """
-
-    if _data_type_validator(field, presentation, context):
-        # Validate derivation only if a complex data type (primitive types have no derivation
-        # hierarchy)
-        _data_type_derived_from_validator(field, presentation, context)
-
-def data_type_constraints_validator(field, presentation, context):
-    """
-    Makes sure that we do not have constraints if we are a complex type (with no primitive
-    ancestor).
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        if presentation._get_primitive_ancestor(context) is None:
-            context.validation.report(
-                'data type "%s" defines constraints but does not have a primitive ancestor'
-                % presentation._fullname,
-                locator=presentation._get_child_locator(field.name), level=Issue.BETWEEN_TYPES)
-
-def data_type_properties_validator(field, presentation, context):
-    """
-    Makes sure that we do not have properties if we have a primitive ancestor.
-
-    Used with the :func:`field_validator` decorator for the ``properties`` field in
-    :class:`DataType`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if values is not None:
-        if presentation._get_primitive_ancestor(context) is not None:
-            context.validation.report(
-                'data type "%s" defines properties even though it has a primitive ancestor'
-                % presentation._fullname,
-                locator=presentation._get_child_locator(field.name), level=Issue.BETWEEN_TYPES)
-
-#
-# ConstraintClause
-#
-
-def constraint_clause_field_validator(field, presentation, context):
-    """
-    Makes sure that field contains a valid value for the container type.
-
-    Used with the :func:`field_validator` decorator for various field in :class:`ConstraintClause`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        the_type = presentation._get_type(context)
-        constraints = the_type._get_constraints(context) \
-            if hasattr(the_type, '_get_constraints') else None
-        coerce_value(context, presentation, the_type, None, constraints, value, field.name)
-
-def constraint_clause_in_range_validator(field, presentation, context):
-    """
-    Makes sure that the value is a list with exactly two elements, that both lower bound contains a
-    valid value for the container type, and that the upper bound is either "UNBOUNDED" or a valid
-    value for the container type.
-
-    Used with the :func:`field_validator` decorator for the ``in_range`` field in
-    :class:`ConstraintClause`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if isinstance(values, list):
-        # Make sure list has exactly two elements
-        if len(values) == 2:
-            lower, upper = values
-            the_type = presentation._get_type(context)
-
-            # Lower bound must be coercible
-            lower = coerce_value(context, presentation, the_type, None, None, lower, field.name)
-
-            if upper != 'UNBOUNDED':
-                # Upper bound be coercible
-                upper = coerce_value(context, presentation, the_type, None, None, upper, field.name)
-
-                # Second "in_range" value must be greater than first
-                if (lower is not None) and (upper is not None) and (lower >= upper):
-                    context.validation.report(
-                        'upper bound of "in_range" constraint is not greater than the lower bound'
-                        ' in "%s": %s <= %s'
-                        % (presentation._container._fullname, safe_repr(lower), safe_repr(upper)),
-                        locator=presentation._locator, level=Issue.FIELD)
-        else:
-            context.validation.report(
-                'constraint "%s" is not a list of exactly 2 elements in "%s"'
-                % (field.name, presentation._fullname),
-                locator=presentation._get_child_locator(field.name), level=Issue.FIELD)
-
-def constraint_clause_valid_values_validator(field, presentation, context):
-    """
-    Makes sure that the value is a list of valid values for the container type.
-
-    Used with the :func:`field_validator` decorator for the ``valid_values`` field in
-    :class:`ConstraintClause`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if isinstance(values, list):
-        the_type = presentation._get_type(context)
-        for value in values:
-            coerce_value(context, presentation, the_type, None, None, value, field.name)
-
-def constraint_clause_pattern_validator(field, presentation, context):
-    """
-    Makes sure that the value is a valid regular expression.
-
-    Used with the :func:`field_validator` decorator for the ``pattern`` field in
-    :class:`ConstraintClause`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        try:
-            # From TOSCA 1.0 3.5.2.1:
-            #
-            # "Note: Future drafts of this specification will detail the use of regular expressions
-            # and reference an appropriate standardized grammar."
-            #
-            # So we will just use Python's.
-            re.compile(value)
-        except re.error as e:
-            context.validation.report(
-                'constraint "%s" is not a valid regular expression in "%s"'
-                % (field.name, presentation._fullname),
-                locator=presentation._get_child_locator(field.name), level=Issue.FIELD, exception=e)
-
-#
-# RequirementAssignment
-#
-
-def node_template_or_type_validator(field, presentation, context):
-    """
-    Makes sure that the field refers to either a node template or a node type.
-
-    Used with the :func:`field_validator` decorator for the ``node`` field in
-    :class:`RequirementAssignment`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        node_templates = \
-            context.presentation.get('service_template', 'topology_template', 'node_templates') \
-            or {}
-        if (value not in node_templates) and \
-            (get_type_by_full_or_shorthand_name(context, value, 'node_types') is None):
-            report_issue_for_unknown_type(context, presentation, 'node template or node type',
-                                          field.name)
-
-def capability_definition_or_type_validator(field, presentation, context):
-    """
-    Makes sure refers to either a capability assignment name in the node template referred to by the
-    ``node`` field or a general capability type.
-
-    If the value refers to a capability type, make sure the ``node`` field was not assigned.
-
-    Used with the :func:`field_validator` decorator for the ``capability`` field in
-    :class:`RequirementAssignment`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        node, node_variant = presentation._get_node(context)
-        if node_variant == 'node_template':
-            capabilities = node._get_capabilities(context)
-            if value in capabilities:
-                return
-
-        if get_type_by_full_or_shorthand_name(context, value, 'capability_types') is not None:
-            if node is not None:
-                context.validation.report(
-                    '"%s" refers to a capability type even though "node" has a value in "%s"'
-                    % (presentation._name, presentation._container._fullname),
-                    locator=presentation._get_child_locator(field.name), level=Issue.BETWEEN_FIELDS)
-            return
-
-        if node_variant == 'node_template':
-            context.validation.report(
-                'requirement "%s" refers to an unknown capability definition name or capability'
-                ' type in "%s": %s'
-                % (presentation._name, presentation._container._fullname, safe_repr(value)),
-                locator=presentation._get_child_locator(field.name), level=Issue.BETWEEN_TYPES)
-        else:
-            context.validation.report(
-                'requirement "%s" refers to an unknown capability type in "%s": %s'
-                % (presentation._name, presentation._container._fullname, safe_repr(value)),
-                locator=presentation._get_child_locator(field.name), level=Issue.BETWEEN_TYPES)
-
-def node_filter_validator(field, presentation, context):
-    """
-    Makes sure that the field has a value only if "node" refers to a node type.
-
-    Used with the :func:`field_validator` decorator for the ``node_filter`` field in
-    :class:`RequirementAssignment`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        _, node_type_variant = presentation._get_node(context)
-        if node_type_variant != 'node_type':
-            context.validation.report(
-                'requirement "%s" has a node filter even though "node" does not refer to a node'
-                ' type in "%s"'
-                % (presentation._fullname, presentation._container._fullname),
-                locator=presentation._locator, level=Issue.BETWEEN_FIELDS)
-
-#
-# RelationshipAssignment
-#
-
-def relationship_template_or_type_validator(field, presentation, context):
-    """
-    Makes sure that the field refers to either a relationship template or a relationship type.
-
-    Used with the :func:`field_validator` decorator for the ``type`` field in
-    :class:`RelationshipAssignment`.
-    """
-
-    field.default_validate(presentation, context)
-
-    value = getattr(presentation, field.name)
-    if value is not None:
-        relationship_templates = \
-            context.presentation.get('service_template', 'topology_template',
-                                     'relationship_templates') \
-            or {}
-        if (value not in relationship_templates) and \
-            (get_type_by_full_or_shorthand_name(context, value, 'relationship_types') is None):
-            report_issue_for_unknown_type(context, presentation,
-                                          'relationship template or relationship type', field.name)
-
-#
-# PolicyType
-#
-
-def list_node_type_or_group_type_validator(field, presentation, context):
-    """
-    Makes sure that the field's elements refer to either node types or a group types.
-
-    Used with the :func:`field_validator` decorator for the ``targets`` field in
-    :class:`PolicyType`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if values is not None:
-        for value in values:
-            if \
-                (get_type_by_full_or_shorthand_name(context, value, 'node_types') is None) and \
-                (get_type_by_full_or_shorthand_name(context, value, 'group_types') is None):
-                report_issue_for_unknown_type(context, presentation, 'node type or group type',
-                                              field.name, value)
-
-#
-# PolicyTemplate
-#
-
-def policy_targets_validator(field, presentation, context):
-    """
-    Makes sure that the field's elements refer to either node templates or groups, and that
-    they match the node types and group types declared in the policy type.
-
-    Used with the :func:`field_validator` decorator for the ``targets`` field in
-    :class:`PolicyTemplate`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if values is not None:
-        for value in values:
-            node_templates = \
-                context.presentation.get('service_template', 'topology_template',
-                                         'node_templates') \
-                or {}
-            groups = context.presentation.get('service_template', 'topology_template', 'groups') \
-                or {}
-            if (value not in node_templates) and (value not in groups):
-                report_issue_for_unknown_type(context, presentation, 'node template or group',
-                                              field.name, value)
-
-            policy_type = presentation._get_type(context)
-            if policy_type is None:
-                break
-
-            node_types, group_types = policy_type._get_targets(context)
-
-            is_valid = False
-
-            if value in node_templates:
-                our_node_type = node_templates[value]._get_type(context)
-                for node_type in node_types:
-                    if node_type._is_descendant(context, our_node_type):
-                        is_valid = True
-                        break
-
-            elif value in groups:
-                our_group_type = groups[value]._get_type(context)
-                for group_type in group_types:
-                    if group_type._is_descendant(context, our_group_type):
-                        is_valid = True
-                        break
-
-            if not is_valid:
-                context.validation.report(
-                    'policy definition target does not match either a node type or a group type'
-                    ' declared in the policy type in "%s": %s'
-                    % (presentation._name, safe_repr(value)),
-                    locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-#
-# NodeFilter
-#
-
-def node_filter_properties_validator(field, presentation, context):
-    """
-    Makes sure that the field's elements refer to defined properties in the target node type.
-
-    Used with the :func:`field_validator` decorator for the ``properties`` field in
-    :class:`NodeFilter`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if values is not None:
-        node_type = presentation._get_node_type(context)
-        if node_type is not None:
-            properties = node_type._get_properties(context)
-            for name, _ in values:
-                if name not in properties:
-                    context.validation.report(
-                        'node filter refers to an unknown property definition in "%s": %s'
-                        % (node_type._name, name),
-                        locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-
-def node_filter_capabilities_validator(field, presentation, context):
-    """
-    Makes sure that the field's elements refer to defined capabilities and properties in the target
-    node type.
-
-    Used with the :func:`field_validator` decorator for the ``capabilities`` field in
-    :class:`NodeFilter`.
-    """
-
-    field.default_validate(presentation, context)
-
-    values = getattr(presentation, field.name)
-    if values is not None: # pylint: disable=too-many-nested-blocks
-        node_type = presentation._get_node_type(context)
-        if node_type is not None:
-            capabilities = node_type._get_capabilities(context)
-            for name, value in values:
-                capability = capabilities.get(name)
-                if capability is not None:
-                    properties = value.properties
-                    capability_properties = capability.properties
-                    if (properties is not None) and (capability_properties is not None):
-                        for property_name, _ in properties:
-                            if property_name not in capability_properties:
-                                context.validation.report(
-                                    'node filter refers to an unknown capability definition'
-                                    ' property in "%s": %s'
-                                    % (node_type._name, property_name),
-                                    locator=presentation._locator, level=Issue.BETWEEN_TYPES)
-                else:
-                    context.validation.report(
-                        'node filter refers to an unknown capability definition in "%s": %s'
-                        % (node_type._name, name),
-                        locator=presentation._locator, level=Issue.BETWEEN_TYPES)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
deleted file mode 100644
index 610e4a0..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def convert_shorthand_to_full_type_name(context, name, types_dict): # pylint: disable=unused-argument
-    """
-    Converts a shorthand type name to its full type name, or else returns it unchanged.
-
-    Works by checking for ``shorthand_name`` in the types' ``_extensions`` field. See also
-    :class:`aria_extension_tosca.v1_0.presentation.extensible.ExtensiblePresentation`.
-
-    Can be used as the conversion function argument in ``type_validator`` and
-    ``derived_from_validator``.
-    """
-
-    if (name is not None) and types_dict and (name not in types_dict):
-        for full_name, the_type in types_dict.iteritems():
-            if hasattr(the_type, '_extensions') and the_type._extensions \
-                and (the_type._extensions.get('shorthand_name') == name):
-                return full_name
-    return name
-
-def get_type_by_full_or_shorthand_name(context, name, *types_dict_names):
-    """
-    Gets a type either by its full name or its shorthand name.
-
-    Works by checking for ``shorthand_name`` in the types' ``_extensions`` field. See also
-    :class:`~aria_extension_tosca.v1_0.presentation.extensible.ExtensiblePresentation`.
-
-    The arguments from the third onwards are used to locate a nested field under
-    ``service_template`` under the root presenter.
-    """
-
-    if name is not None:
-        types_dict = context.presentation.get('service_template', *types_dict_names)
-        if types_dict:
-            the_type = types_dict.get(name)
-            if the_type is not None:
-                # Full name
-                return the_type
-            for the_type in types_dict.itervalues():
-                if hasattr(the_type, '_extensions') and the_type._extensions \
-                    and (the_type._extensions.get('shorthand_name') == name):
-                    # Shorthand name
-                    return the_type
-    return None
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presenter.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presenter.py
deleted file mode 100644
index 394e303..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/presenter.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenList, EMPTY_READ_ONLY_LIST
-from aria.utils.caching import cachedmethod
-from aria.parser.presentation import Presenter
-
-from .modeling import create_service_template_model
-from .modeling.functions import (Concat, Token, GetInput, GetProperty, GetAttribute,
-                                 GetOperationOutput, GetNodesOfType, GetArtifact)
-from .templates import ServiceTemplate
-
-class ToscaSimplePresenter1_0(Presenter): # pylint: disable=invalid-name,abstract-method
-    """
-    ARIA presenter for the `TOSCA Simple Profile v1.0 cos01 <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html>`__.
-
-    Supported ``tosca_definitions_version`` values:
-
-    * ``tosca_simple_yaml_1_0``
-    """
-
-    DSL_VERSIONS = ('tosca_simple_yaml_1_0',)
-    ALLOWED_IMPORTED_DSL_VERSIONS = ('tosca_simple_yaml_1_0',)
-    SIMPLE_PROFILE_LOCATION = 'tosca-simple-1.0/tosca-simple-1.0.yaml'
-    SPECIAL_IMPORTS = {
-        'aria-1.0': 'aria-1.0/aria-1.0.yaml'}
-
-    @property
-    @cachedmethod
-    def service_template(self):
-        return ServiceTemplate(raw=self._raw)
-
-    @property
-    @cachedmethod
-    def functions(self):
-        return {
-            'concat': Concat,
-            'token': Token,
-            'get_input': GetInput,
-            'get_property': GetProperty,
-            'get_attribute': GetAttribute,
-            'get_operation_output': GetOperationOutput,
-            'get_nodes_of_type': GetNodesOfType,
-            'get_artifact': GetArtifact}
-
-    # Presentation
-
-    def _dump(self, context):
-        self.service_template._dump(context)
-
-    def _validate(self, context):
-        self.service_template._validate(context)
-
-    # Presenter
-
-    @cachedmethod
-    def _get_import_locations(self, context):
-        import_locations = []
-        if context.presentation.import_profile:
-            import_locations.append(self.SIMPLE_PROFILE_LOCATION)
-        imports = self._get('service_template', 'imports')
-        if imports:
-            import_locations += [self.SPECIAL_IMPORTS.get(i.file, i.file) for i in imports]
-        return FrozenList(import_locations) if import_locations else EMPTY_READ_ONLY_LIST
-
-    @cachedmethod
-    def _get_model(self, context): # pylint: disable=no-self-use
-        return create_service_template_model(context)
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/templates.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/templates.py
deleted file mode 100644
index 285e496..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/templates.py
+++ /dev/null
@@ -1,732 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenDict, FrozenList
-from aria.utils.caching import cachedmethod
-from aria.parser import implements_specification
-from aria.parser.presentation import (has_fields, primitive_field, primitive_list_field,
-                                      object_field, object_list_field, object_dict_field,
-                                      object_sequenced_list_field, field_validator,
-                                      type_validator, list_type_validator)
-
-from .assignments import (PropertyAssignment, AttributeAssignment, RequirementAssignment,
-                          CapabilityAssignment, InterfaceAssignment, ArtifactAssignment)
-from .definitions import ParameterDefinition
-from .filters import NodeFilter
-from .misc import (Description, MetaData, Repository, Import, SubstitutionMappings)
-from .modeling.parameters import (get_assigned_and_defined_parameter_values, get_parameter_values)
-from .modeling.interfaces import get_template_interfaces
-from .modeling.requirements import get_template_requirements
-from .modeling.capabilities import get_template_capabilities
-from .modeling.artifacts import get_inherited_artifact_definitions
-from .modeling.policies import get_policy_targets
-from .modeling.copy import get_default_raw_from_copy
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_validators import copy_validator, policy_targets_validator
-from .presentation.types import (convert_shorthand_to_full_type_name,
-                                 get_type_by_full_or_shorthand_name)
-from .types import (ArtifactType, DataType, CapabilityType, InterfaceType, RelationshipType,
-                    NodeType, GroupType, PolicyType)
-
-@has_fields
-@implements_specification('3.7.3', 'tosca-simple-1.0')
-class NodeTemplate(ExtensiblePresentation):
-    """
-    A Node Template specifies the occurrence of a manageable software component as part of an
-    application's topology model which is defined in a TOSCA Service Template. A Node template is an
-    instance of a specified Node Type and can provide customized properties, constraints or
-    operations which override the defaults provided by its Node Type and its implementations.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_NODE_TEMPLATE>`__
-    """
-
-    @field_validator(type_validator('node type', convert_shorthand_to_full_type_name, 'node_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required name of the Node Type the Node Template is based upon.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Node Template.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_list_field(str)
-    def directives(self):
-        """
-        An optional list of directive values to provide processing instructions to orchestrators and
-        tooling.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        An optional list of property value assignments for the Node Template.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @object_dict_field(AttributeAssignment)
-    def attributes(self):
-        """
-        An optional list of attribute value assignments for the Node Template.
-
-        :type: {:obj:`basestring`: :class:`AttributeAssignment`}
-        """
-
-    @object_sequenced_list_field(RequirementAssignment)
-    def requirements(self):
-        """
-        An optional sequenced list of requirement assignments for the Node Template.
-
-        :type: list of (str, :class:`RequirementAssignment`)
-        """
-
-    @object_dict_field(CapabilityAssignment)
-    def capabilities(self):
-        """
-        An optional list of capability assignments for the Node Template.
-
-        :type: {:obj:`basestring`: :class:`CapabilityAssignment`}
-        """
-
-    @object_dict_field(InterfaceAssignment)
-    def interfaces(self):
-        """
-        An optional list of named interface definitions for the Node Template.
-
-        :type: {:obj:`basestring`: :class:`InterfaceAssignment`}
-        """
-
-    @object_dict_field(ArtifactAssignment)
-    def artifacts(self):
-        """
-        An optional list of named artifact definitions for the Node Template.
-
-        :type: {:obj:`basestring`: :class:`ArtifactAssignment`}
-        """
-
-    @object_field(NodeFilter)
-    def node_filter(self):
-        """
-        The optional filter definition that TOSCA orchestrators would use to select the correct
-        target node. This keyname is only valid if the directive has the value of "selectable" set.
-
-        :type: :class:`NodeFilter`
-        """
-
-    @field_validator(copy_validator('node template', 'node_templates'))
-    @primitive_field(str)
-    def copy(self):
-        """
-        The optional (symbolic) name of another node template to copy into (all keynames and values)
-        and use as a basis for this node template.
-
-        :type: :obj:`basestring`
-        """
-
-    @cachedmethod
-    def _get_default_raw(self):
-        return get_default_raw_from_copy(self, 'node_templates')
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'node_types')
-
-    @cachedmethod
-    def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
-
-    @cachedmethod
-    def _get_attribute_default_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'attribute'))
-
-    @cachedmethod
-    def _get_requirements(self, context):
-        return FrozenList(get_template_requirements(context, self))
-
-    @cachedmethod
-    def _get_capabilities(self, context):
-        return FrozenDict(get_template_capabilities(context, self))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_template_interfaces(context, self, 'node template'))
-
-    @cachedmethod
-    def _get_artifacts(self, context):
-        return FrozenDict(get_inherited_artifact_definitions(context, self))
-
-    def _validate(self, context):
-        super(NodeTemplate, self)._validate(context)
-        self._get_property_values(context)
-        self._get_requirements(context)
-        self._get_capabilities(context)
-        self._get_interfaces(context)
-        self._get_artifacts(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'type',
-            'directives',
-            'properties',
-            'attributes',
-            'requirements',
-            'capabilities',
-            'interfaces',
-            'artifacts',
-            'node_filter',
-            'copy'))
-
-@has_fields
-@implements_specification('3.7.4', 'tosca-simple-1.0')
-class RelationshipTemplate(ExtensiblePresentation):
-    """
-    A Relationship Template specifies the occurrence of a manageable relationship between node
-    templates as part of an application's topology model that is defined in a TOSCA Service
-    Template. A Relationship template is an instance of a specified Relationship Type and can
-    provide customized properties, constraints or operations which override the defaults provided by
-    its Relationship Type and its implementations.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_RELATIONSHIP_TEMPLATE>`__
-    """
-
-    @field_validator(type_validator('relationship type', convert_shorthand_to_full_type_name,
-                                    'relationship_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required name of the Relationship Type the Relationship Template is based upon.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Relationship Template.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        An optional list of property assignments for the Relationship Template.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @object_dict_field(AttributeAssignment)
-    def attributes(self):
-        """
-        An optional list of attribute assignments for the Relationship Template.
-
-        :type: {:obj:`basestring`: :class:`AttributeAssignment`}
-        """
-
-    @object_dict_field(InterfaceAssignment)
-    def interfaces(self):
-        """
-        An optional list of named interface definitions for the Node Template.
-
-        ARIA NOTE: Spec is wrong here, should be Relationship Template.
-
-        :type: {:obj:`basestring`: :class:`InterfaceAssignment`}
-        """
-
-    @field_validator(copy_validator('relationship template', 'relationship_templates'))
-    @primitive_field(str)
-    def copy(self):
-        """
-        The optional (symbolic) name of another relationship template to copy into (all keynames and
-        values) and use as a basis for this relationship template.
-
-        :type: :obj:`basestring`
-        """
-
-    @cachedmethod
-    def _get_default_raw(self):
-        return get_default_raw_from_copy(self, 'relationship_templates')
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'relationship_types')
-
-    @cachedmethod
-    def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_template_interfaces(context, self, 'relationship template'))
-
-    def _validate(self, context):
-        super(RelationshipTemplate, self)._validate(context)
-        self._get_property_values(context)
-        self._get_interfaces(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'type',
-            'properties',
-            'attributes',
-            'interfaces',
-            'copy'))
-
-@has_fields
-@implements_specification('3.7.5', 'tosca-simple-1.0')
-class GroupTemplate(ExtensiblePresentation):
-    """
-    A group definition defines a logical grouping of node templates, typically for management
-    purposes, but is separate from the application's topology template.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_GROUP_DEF>`__
-    """
-
-    @field_validator(type_validator('group type', convert_shorthand_to_full_type_name,
-                                    'group_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required name of the group type the group definition is based upon.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the group definition.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        An optional list of property value assignments for the group definition.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`}
-        """
-
-    @field_validator(list_type_validator('node template', 'topology_template', 'node_templates'))
-    @primitive_list_field(str)
-    def members(self):
-        """
-        The optional list of one or more node template names that are members of this group
-        definition.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @object_dict_field(InterfaceAssignment)
-    def interfaces(self):
-        """
-        An optional list of named interface definitions for the group definition.
-
-        :type: {:obj:`basestring`: :class:`InterfaceDefinition`}
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'group_types')
-
-    @cachedmethod
-    def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_template_interfaces(context, self, 'group definition'))
-
-    def _validate(self, context):
-        super(GroupTemplate, self)._validate(context)
-        self._get_property_values(context)
-        self._get_interfaces(context)
-
-@has_fields
-@implements_specification('3.7.6', 'tosca-simple-1.0')
-class PolicyTemplate(ExtensiblePresentation):
-    """
-    A policy definition defines a policy that can be associated with a TOSCA topology or top-level
-    entity definition (e.g., group definition, node template, etc.).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_POLICY_DEF>`__
-    """
-
-    @field_validator(type_validator('policy type', convert_shorthand_to_full_type_name,
-                                    'policy_types'))
-    @primitive_field(str, required=True)
-    def type(self):
-        """
-        The required name of the policy type the policy definition is based upon.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the policy definition.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyAssignment)
-    def properties(self):
-        """
-        An optional list of property value assignments for the policy definition.
-
-        :type: {:obj:`basestring`: :class:`PropertyAssignment`
-        """
-
-    @field_validator(policy_targets_validator)
-    @primitive_list_field(str)
-    def targets(self):
-        """
-        An optional list of valid Node Templates or Groups the Policy can be applied to.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @cachedmethod
-    def _get_type(self, context):
-        return get_type_by_full_or_shorthand_name(context, self.type, 'policy_types')
-
-    @cachedmethod
-    def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
-
-    @cachedmethod
-    def _get_targets(self, context):
-        node_templates, groups = get_policy_targets(context, self)
-        return FrozenList(node_templates), FrozenList(groups)
-
-    def _validate(self, context):
-        super(PolicyTemplate, self)._validate(context)
-        self._get_property_values(context)
-
-@has_fields
-@implements_specification('3.8', 'tosca-simple-1.0')
-class TopologyTemplate(ExtensiblePresentation):
-    """
-    This section defines the topology template of a cloud application. The main ingredients of the
-    topology template are node templates representing components of the application and relationship
-    templates representing links between the components. These elements are defined in the nested
-    ``node_templates`` section and the nested relationship_templates sections, respectively.
-    Furthermore, a topology template allows for defining input parameters, output parameters as well
-    as grouping of node templates.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_TOPOLOGY_TEMPLATE>`__
-    """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the Topology Template.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(ParameterDefinition)
-    def inputs(self):
-        """
-        An optional list of input parameters (i.e., as parameter definitions) for the Topology
-        Template.
-
-        :type: {:obj:`basestring`: :class:`ParameterDefinition`}
-        """
-
-    @object_dict_field(NodeTemplate)
-    def node_templates(self):
-        """
-        An optional list of node template definitions for the Topology Template.
-
-        :type: {:obj:`basestring`: :class:`NodeTemplate`}
-        """
-
-    @object_dict_field(RelationshipTemplate)
-    def relationship_templates(self):
-        """
-        An optional list of relationship templates for the Topology Template.
-
-        :type: {:obj:`basestring`: :class:`RelationshipTemplate`}
-        """
-
-    @object_dict_field(GroupTemplate)
-    def groups(self):
-        """
-        An optional list of Group definitions whose members are node templates defined within this
-        same Topology Template.
-
-        :class:`GroupTemplate`
-        """
-
-    @object_dict_field(PolicyTemplate)
-    def policies(self):
-        """
-        An optional list of Policy definitions for the Topology Template.
-
-        :type: {:obj:`basestring`: :class:`PolicyTemplate`}
-        """
-
-    @object_dict_field(ParameterDefinition)
-    def outputs(self):
-        """
-        An optional list of output parameters (i.e., as parameter definitions) for the Topology
-        Template.
-
-        :type: {:obj:`basestring`: :class:`ParameterDefinition`}
-        """
-
-    @object_field(SubstitutionMappings)
-    def substitution_mappings(self):
-        """
-        An optional declaration that exports the topology template as an implementation of a Node
-        type.
-
-        This also includes the mappings between the external Node Types named capabilities and
-        requirements to existing implementations of those capabilities and requirements on Node
-        templates declared within the topology template.
-        """
-
-    @cachedmethod
-    def _get_input_values(self, context):
-        return FrozenDict(get_parameter_values(context, self, 'inputs'))
-
-    @cachedmethod
-    def _get_output_values(self, context):
-        return FrozenDict(get_parameter_values(context, self, 'outputs'))
-
-    def _validate(self, context):
-        super(TopologyTemplate, self)._validate(context)
-        self._get_input_values(context)
-        self._get_output_values(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'inputs',
-            'node_templates',
-            'relationship_templates',
-            'groups',
-            'policies',
-            'outputs',
-            'substitution_mappings'))
-
-@has_fields
-@implements_specification('3.9', 'tosca-simple-1.0')
-class ServiceTemplate(ExtensiblePresentation):
-    """
-    Servicate template.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ELEMENT_SERVICE_TEMPLATE>`__.
-    """
-
-    @primitive_field(str)
-    @implements_specification('3.9.3.1', 'tosca-simple-1.0')
-    def tosca_definitions_version(self):
-        """
-        Defines the version of the TOSCA Simple Profile specification the template (grammar)
-        complies with.
-
-        See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-        /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-        #_Toc379455047>`__
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(MetaData)
-    def metadata(self):
-        """
-        Defines a section used to declare additional metadata information. Domain-specific TOSCA
-        profile specifications may define keynames that are required for their implementations.
-
-        See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-        /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-        #_Toc379455048>`__
-
-        :type: :class:`MetaData`
-        """
-
-    @object_field(Description)
-    @implements_specification('3.9.3.6', 'tosca-simple-1.0')
-    def description(self):
-        """
-        Declares a description for this Service Template and its contents.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_field()
-    @implements_specification('3.9.3.7', 'tosca-simple-1.0')
-    def dsl_definitions(self):
-        """
-        Declares optional DSL-specific definitions and conventions. For example, in YAML, this
-        allows defining reusable YAML macros (i.e., YAML alias anchors) for use throughout the TOSCA
-        Service Template.
-
-        See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-        /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-        #_Toc397688790>`__
-        """
-
-    @object_dict_field(Repository)
-    @implements_specification('3.9.3.8', 'tosca-simple-1.0')
-    def repositories(self):
-        """
-        Declares the list of external repositories which contain artifacts that are referenced in
-        the service template along with their addresses and necessary credential information used to
-        connect to them in order to retrieve the artifacts.
-
-        :type: {:obj:`basestring`: :class:`Repository`}
-        """
-
-    @object_list_field(Import)
-    @implements_specification('3.9.3.9', 'tosca-simple-1.0')
-    def imports(self):
-        """
-        Declares import statements external TOSCA Definitions documents. For example, these may be
-        file location or URIs relative to the service template file within the same TOSCA CSAR file.
-
-        :type: list of :class:`Import`
-        """
-
-    @object_dict_field(ArtifactType)
-    @implements_specification('3.9.3.10', 'tosca-simple-1.0')
-    def artifact_types(self):
-        """
-        This section contains an optional list of artifact type definitions for use in the service
-        template.
-
-        :type: {:obj:`basestring`: :class:`ArtifactType`}
-        """
-
-    @object_dict_field(DataType)
-    @implements_specification('3.9.3.11', 'tosca-simple-1.0')
-    def data_types(self):
-        """
-        Declares a list of optional TOSCA Data Type definitions.
-
-        :type: {:obj:`basestring`: :class:`DataType`}
-        """
-
-    @object_dict_field(CapabilityType)
-    @implements_specification('3.9.3.12', 'tosca-simple-1.0')
-    def capability_types(self):
-        """
-        This section contains an optional list of capability type definitions for use in the service
-        template.
-
-        :type: {:obj:`basestring`: :class:`CapabilityType`}
-        """
-
-    @object_dict_field(InterfaceType)
-    @implements_specification('3.9.3.13', 'tosca-simple-1.0')
-    def interface_types(self):
-        """
-        This section contains an optional list of interface type definitions for use in the service
-        template.
-
-        :type: {:obj:`basestring`: :class:`InterfaceType`}
-        """
-
-    @object_dict_field(RelationshipType)
-    @implements_specification('3.9.3.14', 'tosca-simple-1.0')
-    def relationship_types(self):
-        """
-        This section contains a set of relationship type definitions for use in the service
-        template.
-
-        :type: {:obj:`basestring`: :class:`RelationshipType`}
-        """
-
-    @object_dict_field(NodeType)
-    @implements_specification('3.9.3.15', 'tosca-simple-1.0')
-    def node_types(self):
-        """
-        This section contains a set of node type definitions for use in the service template.
-
-        :type: {:obj:`basestring`: :class:`NodeType`}
-        """
-
-    @object_dict_field(GroupType)
-    @implements_specification('3.9.3.16', 'tosca-simple-1.0')
-    def group_types(self):
-        """
-        This section contains a list of group type definitions for use in the service template.
-
-        :type: {:obj:`basestring`: :class:`GroupType`}
-        """
-
-    @object_dict_field(PolicyType)
-    @implements_specification('3.9.3.17', 'tosca-simple-1.0')
-    def policy_types(self):
-        """
-        This section contains a list of policy type definitions for use in the service template.
-
-        :type: {:obj:`basestring`: :class:`PolicyType`}
-        """
-
-    @object_field(TopologyTemplate)
-    def topology_template(self):
-        """
-        Defines the topology template of an application or service, consisting of node templates
-        that represent the application's or service's components, as well as relationship templates
-        representing relations between the components.
-
-        :type: :class:`TopologyTemplate`
-        """
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'tosca_definitions_version',
-            'metadata',
-            'repositories',
-            'imports',
-            'artifact_types',
-            'data_types',
-            'capability_types',
-            'interface_types',
-            'relationship_types',
-            'node_types',
-            'group_types',
-            'policy_types',
-            'topology_template'))
diff --git a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/types.py b/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/types.py
deleted file mode 100644
index 570b89f..0000000
--- a/apache-ariatosca-0.1.1/extensions/aria_extension_tosca/simple_v1_0/types.py
+++ /dev/null
@@ -1,868 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import FrozenDict, FrozenList
-from aria.utils.caching import cachedmethod
-from aria.parser import implements_specification
-from aria.parser.presentation import (has_fields, allow_unknown_fields, primitive_field,
-                                      primitive_list_field, object_field, object_dict_field,
-                                      object_list_field, object_sequenced_list_field,
-                                      object_dict_unknown_fields, field_getter, field_validator,
-                                      list_type_validator, derived_from_validator,
-                                      get_parent_presentation)
-
-from .assignments import ArtifactAssignment
-from .data_types import Version
-from .definitions import (PropertyDefinition, AttributeDefinition, InterfaceDefinition,
-                          RequirementDefinition, CapabilityDefinition, OperationDefinition)
-from .misc import (Description, ConstraintClause)
-from .modeling.artifacts import get_inherited_artifact_definitions
-from .modeling.capabilities import (get_inherited_valid_source_types,
-                                    get_inherited_capability_definitions)
-from .modeling.data_types import (get_data_type, get_inherited_constraints, coerce_data_type_value,
-                                  validate_data_type_name)
-from .modeling.interfaces import (get_inherited_interface_definitions, get_inherited_operations)
-from .modeling.policies import get_inherited_targets
-from .modeling.parameters import get_inherited_parameter_definitions
-from .modeling.requirements import get_inherited_requirement_definitions
-from .presentation.extensible import ExtensiblePresentation
-from .presentation.field_getters import data_type_class_getter
-from .presentation.field_validators import (data_type_derived_from_validator,
-                                            data_type_constraints_validator,
-                                            data_type_properties_validator,
-                                            list_node_type_or_group_type_validator)
-from .presentation.types import convert_shorthand_to_full_type_name
-
-@has_fields
-@implements_specification('3.6.3', 'tosca-simple-1.0')
-class ArtifactType(ExtensiblePresentation):
-    """
-    An Artifact Type is a reusable entity that defines the type of one or more files that are used
-    to define implementation or deployment artifacts that are referenced by nodes or relationships
-    on their operations.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_ARTIFACT_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name, 'artifact_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Artifact Type name the Artifact Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @field_getter(data_type_class_getter(Version))
-    @primitive_field()
-    def version(self):
-        """
-        An optional version for the Artifact Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Artifact Type.
-
-        :type: :class:`Description`
-        """
-
-    @primitive_field(str)
-    def mime_type(self):
-        """
-        The required mime type property for the Artifact Type.
-
-        :type: :obj:`basestring`
-        """
-
-    @primitive_list_field(str)
-    def file_ext(self):
-        """
-        The required file extension property for the Artifact Type.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Artifact Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'artifact_types')
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    def _validate(self, context):
-        super(ArtifactType, self)._validate(context)
-        self._get_properties(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'mime_type',
-            'file_ext',
-            'properties'))
-
-@has_fields
-@implements_specification('3.6.5', 'tosca-simple-1.0')
-class DataType(ExtensiblePresentation):
-    """
-    A Data Type definition defines the schema for new named datatypes in TOSCA.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_DATA_TYPE>`__
-    """
-
-    @field_validator(data_type_derived_from_validator)
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        The optional key used when a datatype is derived from an existing TOSCA Data Type.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Data Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the Data Type.
-
-        :type: :class:`Description`
-        """
-
-    @field_validator(data_type_constraints_validator)
-    @object_list_field(ConstraintClause)
-    def constraints(self):
-        """
-        The optional list of sequenced constraint clauses for the Data Type.
-
-        :type: list of (str, :class:`ConstraintClause`)
-        """
-
-    @field_validator(data_type_properties_validator)
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        The optional list property definitions that comprise the schema for a complex Data Type in
-        TOSCA.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_data_type(context, self, 'derived_from', allow_none=True)
-
-    @cachedmethod
-    def _get_primitive_ancestor(self, context):
-        parent = self._get_parent(context)
-        if parent is not None:
-            if not isinstance(parent, DataType):
-                return parent
-            else:
-                return parent._get_primitive_ancestor(context) # pylint: disable=no-member
-        return None
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_constraints(self, context):
-        return get_inherited_constraints(context, self)
-
-    def _validate(self, context):
-        super(DataType, self)._validate(context)
-        validate_data_type_name(context, self)
-        self._get_properties(context)
-
-    def _coerce_value(self, context, presentation, entry_schema, constraints, value, aspect):
-        return coerce_data_type_value(context, presentation, self, entry_schema, constraints, value,
-                                      aspect)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'constraints',
-            'properties'))
-
-@has_fields
-@implements_specification('3.6.6', 'tosca-simple-1.0')
-class CapabilityType(ExtensiblePresentation):
-    """
-    A Capability Type is a reusable entity that describes a kind of capability that a Node Type can
-    declare to expose. Requirements (implicit or explicit) that are declared as part of one node can
-    be matched to (i.e., fulfilled by) the Capabilities declared by another node.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_CAPABILITY_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name,
-                                            'capability_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent capability type name this new Capability Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Capability Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Capability Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Capability Type.
-
-        ARIA NOTE: The spec says 'list', but the examples are all of dicts.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_field(AttributeDefinition)
-    def attributes(self):
-        """
-        An optional list of attribute definitions for the Capability Type.
-
-        :type: {:obj:`basestring`: :class:`AttributeDefinition`}
-        """
-
-    @field_validator(list_type_validator('node type', convert_shorthand_to_full_type_name,
-                                         'node_types'))
-    @primitive_list_field(str)
-    def valid_source_types(self):
-        """
-        An optional list of one or more valid names of Node Types that are supported as valid
-        sources of any relationship established to the declared Capability Type.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'capability_types')
-
-    @cachedmethod
-    def _is_descendant(self, context, the_type):
-        if the_type is None:
-            return False
-        elif the_type._name == self._name:
-            return True
-        return self._is_descendant(context, the_type._get_parent(context))
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_valid_source_types(self, context):
-        return get_inherited_valid_source_types(context, self)
-
-    def _validate(self, context):
-        super(CapabilityType, self)._validate(context)
-        self._get_properties(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'valid_source_types',
-            'properties',
-            'attributes'))
-
-@allow_unknown_fields
-@has_fields
-@implements_specification('3.6.4', 'tosca-simple-1.0')
-class InterfaceType(ExtensiblePresentation):
-    """
-    An Interface Type is a reusable entity that describes a set of operations that can be used to
-    interact with or manage a node or relationship in a TOSCA topology.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_INTERFACE_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name, 'interface_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Interface Type name this new Interface Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Interface Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Interface Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def inputs(self):
-        """
-        The optional list of input parameter definitions.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_unknown_fields(OperationDefinition)
-    def operations(self):
-        """
-        :type: {:obj:`basestring`: :class:`OperationDefinition`}
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'interface_types')
-
-    @cachedmethod
-    def _get_inputs(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'inputs'))
-
-    @cachedmethod
-    def _get_operations(self, context):
-        return FrozenDict(get_inherited_operations(context, self))
-
-    def _validate(self, context):
-        super(InterfaceType, self)._validate(context)
-        self._get_inputs(context)
-        for operation in self.operations.itervalues(): # pylint: disable=no-member
-            operation._validate(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'inputs',
-            'operations'))
-
-@has_fields
-@implements_specification('3.6.9', 'tosca-simple-1.0')
-class RelationshipType(ExtensiblePresentation):
-    """
-    A Relationship Type is a reusable entity that defines the type of one or more relationships
-    between Node Types or Node Templates.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_RELATIONSHIP_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name,
-                                            'relationship_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Relationship Type name the Relationship Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Relationship Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Relationship Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Relationship Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_field(AttributeDefinition)
-    def attributes(self):
-        """
-        An optional list of attribute definitions for the Relationship Type.
-
-        :type: {:obj:`basestring`: :class:`AttributeDefinition`}
-        """
-
-    @object_dict_field(InterfaceDefinition)
-    def interfaces(self):
-        """
-        An optional list of interface definitions interfaces supported by the Relationship Type.
-
-        :type: {:obj:`basestring`: :class:`InterfaceDefinition`}
-        """
-
-    @field_validator(list_type_validator('capability type', convert_shorthand_to_full_type_name,
-                                         'capability_types'))
-    @primitive_list_field(str)
-    def valid_target_types(self):
-        """
-        An optional list of one or more names of Capability Types that are valid targets for this
-        relationship.
-
-        :type: [:obj:`basestring`]
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'relationship_types')
-
-    @cachedmethod
-    def _is_descendant(self, context, the_type):
-        if the_type is None:
-            return False
-        elif the_type._name == self._name:
-            return True
-        return self._is_descendant(context, the_type._get_parent(context))
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_attributes(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_inherited_interface_definitions(context, self, 'relationship type'))
-
-    def _validate(self, context):
-        super(RelationshipType, self)._validate(context)
-        self._get_properties(context)
-        self._get_attributes(context)
-        self._get_interfaces(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'valid_target_types',
-            'properties',
-            'attributes',
-            'interfaces'))
-
-@has_fields
-@implements_specification('3.6.8', 'tosca-simple-1.0')
-class NodeType(ExtensiblePresentation):
-    """
-    A Node Type is a reusable entity that defines the type of one or more Node Templates. As such, a
-    Node Type defines the structure of observable properties via a Properties Definition, the
-    Requirements and Capabilities of the node as well as its supported interfaces.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_NODE_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name, 'node_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Node Type name this new Node Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Node Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        An optional description for the Node Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Node Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @object_dict_field(AttributeDefinition)
-    def attributes(self):
-        """
-        An optional list of attribute definitions for the Node Type.
-
-        :type: {:obj:`basestring`: :class:`AttributeDefinition`}
-        """
-
-    @object_sequenced_list_field(RequirementDefinition)
-    def requirements(self):
-        """
-        An optional sequenced list of requirement definitions for the Node Type.
-
-        ARIA NOTE: The spec seems wrong to make this a sequenced list. It seems that when you have
-        more than one requirement of the same name, behavior is undefined. The idea is to use the
-        "occurrences" field if you need to limit the number of requirement assignments.
-
-        :type: list of (str, :class:`RequirementDefinition`)
-        """
-
-    @object_dict_field(CapabilityDefinition)
-    def capabilities(self):
-        """
-        An optional list of capability definitions for the Node Type.
-
-        :type: list of :class:`CapabilityDefinition`
-        """
-
-    @object_dict_field(InterfaceDefinition)
-    def interfaces(self):
-        """
-        An optional list of interface definitions supported by the Node Type.
-
-        :type: {:obj:`basestring`: :class:`InterfaceDefinition`}
-        """
-
-    @object_dict_field(ArtifactAssignment)
-    def artifacts(self):
-        """
-        An optional list of named artifact definitions for the Node Type.
-
-        :type: {:obj:`basestring`: :class:`ArtifactAssignment`}
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'node_types')
-
-    @cachedmethod
-    def _is_descendant(self, context, the_type):
-        if the_type is None:
-            return False
-        elif the_type._name == self._name:
-            return True
-        return self._is_descendant(context, the_type._get_parent(context))
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_attributes(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
-
-    @cachedmethod
-    def _get_requirements(self, context):
-        return FrozenList(get_inherited_requirement_definitions(context, self))
-
-    @cachedmethod
-    def _get_capabilities(self, context):
-        return FrozenDict(get_inherited_capability_definitions(context, self))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_inherited_interface_definitions(context, self, 'node type'))
-
-    @cachedmethod
-    def _get_artifacts(self, context):
-        return FrozenDict(get_inherited_artifact_definitions(context, self))
-
-    def _validate(self, context):
-        super(NodeType, self)._validate(context)
-        self._get_properties(context)
-        self._get_attributes(context)
-        self._get_requirements(context)
-        self._get_capabilities(context)
-        self._get_interfaces(context)
-        self._get_artifacts(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'properties',
-            'attributes',
-            'interfaces',
-            'artifacts',
-            'requirements',
-            'capabilities'))
-
-@has_fields
-@implements_specification('3.6.10', 'tosca-simple-1.0')
-class GroupType(ExtensiblePresentation):
-    """
-    A Group Type defines logical grouping types for nodes, typically for different management
-    purposes. Groups can effectively be viewed as logical nodes that are not part of the physical
-    deployment topology of an application, yet can have capabilities and the ability to attach
-    policies and interfaces that can be applied (depending on the group type) to its member nodes.
-
-    Conceptually, group definitions allow the creation of logical "membership" relationships to
-    nodes in a service template that are not a part of the application's explicit requirement
-    dependencies in the topology template (i.e. those required to actually get the application
-    deployed and running). Instead, such logical membership allows for the introduction of things
-    such as group management and uniform application of policies (i.e., requirements that are also
-    not bound to the application itself) to the group's members.
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_GROUP_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name, 'group_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Group Type name the Group Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Group Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the Group Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Group Type.
-
-        :type: {:obj:`basestring`: :class:`PropertyDefinition`}
-        """
-
-    @field_validator(list_type_validator('node type', convert_shorthand_to_full_type_name,
-                                         'node_types'))
-    @primitive_list_field(str)
-    def members(self):
-        """
-        An optional list of one or more names of Node Types that are valid (allowed) as members of
-        the Group Type.
-
-        Note: This can be viewed by TOSCA Orchestrators as an implied relationship from the listed
-        members nodes to the group, but one that does not have operational lifecycle considerations.
-        For example, if we were to name this as an explicit Relationship Type we might call this
-        "MemberOf" (group).
-
-        :type: [:obj:`basestring`]
-        """
-
-    @object_dict_field(InterfaceDefinition)
-    def interfaces(self):
-        """
-        An optional list of interface definitions supported by the Group Type.
-
-        :type: {:obj:`basestring`: :class:`InterfaceDefinition`}
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'group_types')
-
-    @cachedmethod
-    def _is_descendant(self, context, the_type):
-        if the_type is None:
-            return False
-        elif the_type._name == self._name:
-            return True
-        return self._is_descendant(context, the_type._get_parent(context))
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_interfaces(self, context):
-        return FrozenDict(get_inherited_interface_definitions(context, self, 'group type'))
-
-    def _validate(self, context):
-        super(GroupType, self)._validate(context)
-        self._get_properties(context)
-        self._get_interfaces(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'members',
-            'properties',
-            'interfaces'))
-
-@has_fields
-@implements_specification('3.6.11', 'tosca-simple-1.0')
-class PolicyType(ExtensiblePresentation):
-    """
-    A Policy Type defines a type of requirement that affects or governs an application or service's
-    topology at some stage of its lifecycle, but is not explicitly part of the topology itself
-    (i.e., it does not prevent the application or service from being deployed or run if it did not
-    exist).
-
-    See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
-    /TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
-    #DEFN_ENTITY_POLICY_TYPE>`__
-    """
-
-    @field_validator(derived_from_validator(convert_shorthand_to_full_type_name, 'policy_types'))
-    @primitive_field(str)
-    def derived_from(self):
-        """
-        An optional parent Policy Type name the Policy Type derives from.
-
-        :type: :obj:`basestring`
-        """
-
-    @object_field(Version)
-    def version(self):
-        """
-        An optional version for the Policy Type definition.
-
-        :type: :class:`Version`
-        """
-
-    @object_field(Description)
-    def description(self):
-        """
-        The optional description for the Policy Type.
-
-        :type: :class:`Description`
-        """
-
-    @object_dict_field(PropertyDefinition)
-    def properties(self):
-        """
-        An optional list of property definitions for the Policy Type.
-
-        :type: :class:`PropertyDefinition`
-        """
-
-    @field_validator(list_node_type_or_group_type_validator)
-    @primitive_list_field(str)
-    def targets(self):
-        """
-        An optional list of valid Node Types or Group Types the Policy Type can be applied to.
-
-        Note: This can be viewed by TOSCA Orchestrators as an implied relationship to the target
-        nodes, but one that does not have operational lifecycle considerations. For example, if we
-        were to name this as an explicit Relationship Type we might call this "AppliesTo" (node or
-        group).
-
-        :type: [:obj:`basestring`]
-        """
-
-    @cachedmethod
-    def _get_parent(self, context):
-        return get_parent_presentation(context, self, convert_shorthand_to_full_type_name,
-                                       'policy_types')
-
-    @cachedmethod
-    def _get_properties(self, context):
-        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
-
-    @cachedmethod
-    def _get_targets(self, context):
-        node_types, group_types = get_inherited_targets(context, self)
-        return FrozenList(node_types), FrozenList(group_types)
-
-    def _validate(self, context):
-        super(PolicyType, self)._validate(context)
-        self._get_properties(context)
-
-    def _dump(self, context):
-        self._dump_content(context, (
-            'description',
-            'version',
-            'derived_from',
-            'targets',
-            'properties'))
diff --git a/apache-ariatosca-0.1.1/requirements.in b/apache-ariatosca-0.1.1/requirements.in
deleted file mode 100644
index 723ed51..0000000
--- a/apache-ariatosca-0.1.1/requirements.in
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# In order to create the requirements.txt file, execute
-# pip-compile --output-file requirements.txt requirements.in (pip-tools package is needed).
-
-requests>=2.3.0, <2.14.0
-networkx>=1.9, <1.10 # version 1.10 dropped support of python 2.6
-retrying>=1.3.0, <1.4.0
-blinker>1.3, <1.5
-jsonpickle>0.9.0, <=0.9.4
-ruamel.yaml>=0.11.12, <0.12.0  # version 0.12.0 dropped support of python 2.6
-Jinja2>=2.8, <2.9
-shortuuid>=0.5, <0.6
-CacheControl[filecache]>=0.11.0, <0.13
-clint>=0.5.0, <0.6
-SQLAlchemy>=1.1.0, <1.2  # version 1.2 dropped support of python 2.6
-wagon==0.6.0
-bottle>=0.12.0, <0.13
-setuptools>=35.0.0, <36.0.0
-click>=6.0, < 7.0
-colorama>=0.3.7, <=0.3.9
-PrettyTable>=0.7,<0.8
-click_didyoumean==0.0.3
-backports.shutil_get_terminal_size==1.0.0
-logutils==0.3.4.1
-psutil>=5.2.2, < 6.0.0
-importlib ; python_version < '2.7'
-ordereddict ; python_version < '2.7'
-total-ordering ; python_version < '2.7'  # only one version on pypi
diff --git a/apache-ariatosca-0.1.1/requirements.txt b/apache-ariatosca-0.1.1/requirements.txt
deleted file mode 100644
index 7ee1008..0000000
--- a/apache-ariatosca-0.1.1/requirements.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-#
-# This file is autogenerated by pip-compile
-# To update, run:
-#
-#    pip-compile --output-file requirements.txt requirements.in
-#
-appdirs==1.4.3            # via setuptools
-args==0.1.0               # via clint
-backports.shutil_get_terminal_size==1.0.0
-blinker==1.4
-bottle==0.12.13
-cachecontrol[filecache]==0.12.1
-click==6.7
-click_didyoumean==0.0.3
-clint==0.5.1
-colorama==0.3.9
-decorator==4.0.11         # via networkx
-importlib==1.0.4 ; python_version < "2.7"
-jinja2==2.8.1
-jsonpickle==0.9.4
-lockfile==0.12.2          # via cachecontrol
-logutils==0.3.4.1
-markupsafe==1.0           # via jinja2
-msgpack-python==0.4.8     # via cachecontrol
-networkx==1.9.1
-ordereddict==1.1 ; python_version < "2.7"
-packaging==16.8           # via setuptools
-prettytable==0.7.2
-psutil==5.2.2
-pyparsing==2.2.0          # via packaging
-requests==2.13.0
-retrying==1.3.3
-ruamel.ordereddict==0.4.9  # via ruamel.yaml
-ruamel.yaml==0.11.15
-shortuuid==0.5.0
-six==1.10.0               # via packaging, retrying, setuptools
-sqlalchemy==1.1.6
-total-ordering==0.1.0 ; python_version < "2.7"
-wagon==0.6.0
-wheel==0.29.0             # via wagon
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==35.0.2
diff --git a/apache-ariatosca-0.1.1/setup.cfg b/apache-ariatosca-0.1.1/setup.cfg
deleted file mode 100644
index 8bfd5a1..0000000
--- a/apache-ariatosca-0.1.1/setup.cfg
+++ /dev/null
@@ -1,4 +0,0 @@
-[egg_info]
-tag_build = 
-tag_date = 0
-
diff --git a/apache-ariatosca-0.1.1/setup.py b/apache-ariatosca-0.1.1/setup.py
deleted file mode 100644
index 8e95c19..0000000
--- a/apache-ariatosca-0.1.1/setup.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-from setuptools import setup, find_packages
-from setuptools.command.install import install
-from setuptools.command.develop import develop
-
-
-_PACKAGE_NAME = 'apache-ariatosca'
-_PYTHON_SUPPORTED_VERSIONS = [(2, 6), (2, 7)]
-_EXTENSION_DIR = 'extensions'
-_EXTENSION_NAMES = [
-    'aria_extension_tosca'
-]
-
-if (sys.version_info[0], sys.version_info[1]) not in _PYTHON_SUPPORTED_VERSIONS:
-    raise NotImplementedError(
-        '{0} Package support Python version 2.6 & 2.7 Only'.format(
-            _PACKAGE_NAME))
-
-root_dir = os.path.dirname(__file__)
-
-with open(os.path.join(root_dir, 'VERSION')) as version_file:
-    __version__ = version_file.read().strip()
-    incubating_version = '{0}-incubating'.format(__version__)
-
-with open(os.path.join(root_dir, 'README.rst')) as readme:
-    long_description = readme.read()
-
-install_requires = []
-
-ssh_requires = [
-    'Fabric>=1.13.0, <1.14',
-]
-win_ssh_requires = [
-    # Fabric depends on the pypiwin32 on Windows, but doesn't install it
-    'pypiwin32==219'
-]
-
-extras_require = {
-    'ssh': ssh_requires,
-    'ssh:sys_platform=="win32"': win_ssh_requires
-}
-
-with open(os.path.join(root_dir, 'requirements.in')) as requirements:
-    for requirement in requirements.readlines():
-        requirement = requirement.split('#')[0].strip()  # get rid of comments or trailing comments
-        if not requirement:
-            continue  # skip empty and comment lines
-
-        # dependencies which use environment markers have to go in as conditional dependencies
-        # under "extra_require" rather than "install_requires", or otherwise the environment
-        # markers get ignored when installing from wheel. See more here:
-        # https://wheel.readthedocs.io/en/latest/index.html#defining-conditional-dependencies
-        # https://hynek.me/articles/conditional-python-dependencies/
-        if ';' in requirement:
-            package, condition = requirement.split(';')
-            cond_name = ':{0}'.format(condition.strip())
-            extras_require.setdefault(cond_name, [])
-            extras_require[cond_name].append(package.strip())
-        else:
-            install_requires.append(requirement)
-
-
-console_scripts = ['aria = aria.cli.main:main']
-
-
-def _generate_user_options(command):
-    return command.user_options + [
-        ('skip-ctx', None, 'Install with or without the ctx (Defaults to False)')
-    ]
-
-
-def _generate_boolean_options(command):
-    return command.boolean_options + ['skip-ctx']
-
-
-def _initialize_options(custom_cmd):
-    custom_cmd.command.initialize_options(custom_cmd)
-    custom_cmd.skip_ctx = False
-
-
-def _run(custom_cmd):
-    if custom_cmd.skip_ctx is False:
-        console_scripts.append('ctx = aria.orchestrator.execution_plugin.ctx_proxy.client:main')
-    custom_cmd.command.run(custom_cmd)
-
-
-class InstallCommand(install):
-    command = install
-
-    user_options = _generate_user_options(install)
-    boolean_options = _generate_boolean_options(install)
-    initialize_options = _initialize_options
-    run = _run
-
-
-class DevelopCommand(develop):
-    command = develop
-
-    user_options = _generate_user_options(develop)
-    boolean_options = _generate_boolean_options(develop)
-    initialize_options = _initialize_options
-    run = _run
-
-setup(
-    name=_PACKAGE_NAME,
-    version=__version__,
-    description='ARIA',
-    long_description=long_description,
-    license='Apache License 2.0',
-    author='ARIA',
-    author_email='dev@ariatosca.incubator.apache.org',
-    url='http://ariatosca.incubator.apache.org/',
-    download_url=(
-        'https://dist.apache.org/repos/dist/release/incubator/ariatosca/' + incubating_version),
-    classifiers=[
-        'Development Status :: 4 - Beta',
-        'Environment :: Console',
-        'Environment :: Web Environment',
-        'Intended Audience :: Developers',
-        'Intended Audience :: System Administrators',
-        'License :: OSI Approved :: Apache Software License',
-        'Operating System :: OS Independent',
-        'Programming Language :: Python',
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
-        'Topic :: Software Development :: Libraries :: Python Modules',
-        'Topic :: System :: Networking',
-        'Topic :: System :: Systems Administration'],
-    packages=find_packages(include=['aria*']) +
-             find_packages(where=_EXTENSION_DIR,
-                           include=['{0}*'.format(name) for name in _EXTENSION_NAMES]),
-    package_dir=dict((name, '{0}/{1}'.format(_EXTENSION_DIR, name)) for name in _EXTENSION_NAMES),
-    package_data={
-        'aria': [
-            'cli/config/config_template.yaml'
-        ],
-        'aria_extension_tosca': [
-            'profiles/tosca-simple-1.0/**',
-            'profiles/tosca-simple-nfv-1.0/**',
-            'profiles/aria-1.0/**'
-        ]
-    },
-    platforms=['any'],
-    zip_safe=False,
-    install_requires=install_requires,
-    extras_require=extras_require,
-    entry_points={
-        'console_scripts': console_scripts
-    },
-    cmdclass={
-        'install': InstallCommand,      # used in pip install ...
-        'develop': DevelopCommand       # used in pip install -e ...
-    }
-)
diff --git a/build_site.sh b/build_site.sh
index 68ae104..b6835c3 100755
--- a/build_site.sh
+++ b/build_site.sh
@@ -17,12 +17,13 @@
 DISTPATH=https://dist.apache.org/repos/dist/dev/incubator/ariatosca/$ARIAVERSION-incubating/sdist/apache-ariatosca-$ARIAVERSION.tar.gz
 DISTFILE=apache-ariatosca-$ARIAVERSION.tar.gz
 
-# Copy docs
+# Copy docs into _site
 wget -P $WORKDIR $DISTPATH
 tar -C $WORKDIR -xzf $WORKDIR/$DISTFILE apache-ariatosca-$ARIAVERSION/docs
 mkdir -p $WORKDIR/_site/docs
-mv $WORKDIR/apache-ariatosca-$ARIAVERSION/docs $WORKDIR/_site/docs
+mv $WORKDIR/apache-ariatosca-$ARIAVERSION/docs $WORKDIR/_site/
 rm -f $WORKDIR/$DISTFILE
+rm -Rf $WORKDIR/apache-ariatosca-$ARIAVERSION
 
 # Push
 git checkout asf-site