PHOENIX-5394 Fix the build so it actually runs IT's and restore phoenixdb

Signed-off-by: Karan Mehta <karanmehta93@gmail.com>
diff --git a/assembly/cluster.xml b/assembly/cluster.xml
index b98f017..ad08c25 100644
--- a/assembly/cluster.xml
+++ b/assembly/cluster.xml
@@ -1,3 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
 <assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
           xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
     <id>cluster</id>
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 097f1cd..4562607 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -1,4 +1,22 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
 <project xmlns="http://maven.apache.org/POM/4.0.0"
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -52,4 +70,4 @@
         </plugins>
     </build>
 
-</project>
\ No newline at end of file
+</project>
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
deleted file mode 100644
index a421ff3..0000000
--- a/phoenix-client/pom.xml
+++ /dev/null
@@ -1,366 +0,0 @@
-<?xml version='1.0'?>
-<!--
-
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
-
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.phoenix</groupId>
-    <artifactId>phoenix-queryserver</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-  </parent>
-  <artifactId>phoenix-client</artifactId>
-  <name>Phoenix Client</name>
-  <description>Phoenix Client</description>
-  <packaging>jar</packaging>
-  <properties>
-    <!-- Don't make a test-jar -->
-    <maven.test.skip>true</maven.test.skip>
-    <!-- Don't make a source-jar -->
-    <source.skip>true</source.skip>
-    <license.bundles.dependencies>true</license.bundles.dependencies>
-    <top.dir>${project.basedir}/..</top.dir>
-    <shaded.package>org.apache.phoenix.shaded</shaded.package>
-
-  </properties>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-site-plugin</artifactId>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <finalName>phoenix-${project.version}-client</finalName>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-install-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals>
-              <goal>install-file</goal>
-            </goals>
-            <id>default-install</id>
-            <configuration>
-              <skip>true</skip>
-            </configuration>
-            <phase>install</phase>
-          </execution>
-        </executions>
-        <configuration>
-          <file>${basedir}/target/phoenix-${project.version}-client.jar</file>
-	  <pomFile>${basedir}/pom.xml</pomFile>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <finalName>phoenix-${project.version}-client</finalName>
-              <shadedArtifactAttached>false</shadedArtifactAttached>
-              <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
-              <shadeTestJar>false</shadeTestJar>
-              <transformers>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>csv-bulk-load-config.properties</resource>
-                  <file>
-                    ${project.basedir}/../config/csv-bulk-load-config.properties
-                  </file>
-                </transformer>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>README.md</resource>
-                  <file>${project.basedir}/../README.md</file>
-                </transformer>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>LICENSE.txt</resource>
-                  <file>${project.basedir}/../LICENSE</file>
-                </transformer>
-                <transformer
-                    implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>NOTICE</resource>
-                  <file>${project.basedir}/../NOTICE</file>
-                </transformer>
-              </transformers>
-              <artifactSet>
-                <includes>
-                  <include>*:*</include>
-                </includes>
-                <excludes>
-                  <exclude>org.apache.phoenix:phoenix-client</exclude>
-                  <exclude>xom:xom</exclude>
-                </excludes>
-              </artifactSet>
-              <filters>
-                <filter>
-                  <artifact>*:*</artifact>
-                  <excludes>
-                    <exclude>META-INF/*.SF</exclude>
-                    <exclude>META-INF/*.DSA</exclude>
-                    <exclude>META-INF/*.RSA</exclude>
-                    <exclude>META-INF/license/*</exclude>
-                    <exclude>LICENSE.*</exclude>
-                    <exclude>NOTICE.*</exclude>
-                  </excludes>
-                </filter>
-              </filters>
-
-              <relocations>
-
-                <!-- COM relocation -->
-                <relocation>
-                  <pattern>com.codahale</pattern>
-                  <shadedPattern>${shaded.package}.com.codahale</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.fasterxml</pattern>
-                  <shadedPattern>${shaded.package}.com.fasterxml</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.jamesmurty</pattern>
-                  <shadedPattern>${shaded.package}.com.jamesmurty</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.jcraft</pattern>
-                  <shadedPattern>${shaded.package}.com.jcraft</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.lmax</pattern>
-                  <shadedPattern>${shaded.package}.com.lmax</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.thoughtworks</pattern>
-                  <shadedPattern>${shaded.package}.com.thoughtworks</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.yammer</pattern>
-                  <shadedPattern>${shaded.package}.com.yammer</shadedPattern>
-                </relocation>
-
-                <!-- IO relocations -->
-                <relocation>
-                  <pattern>io.netty</pattern>
-                  <shadedPattern>${shaded.package}.io.netty</shadedPattern>
-                </relocation>
-
-                <!-- ORG relocations -->
-                <relocation>
-                  <pattern>org.antlr</pattern>
-                  <shadedPattern>${shaded.package}.org.antlr</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.aopalliance</pattern>
-                  <shadedPattern>${shaded.package}.org.aopalliance</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.codehaus</pattern>
-                  <shadedPattern>${shaded.package}.org.codehaus</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.fusesource</pattern>
-                  <shadedPattern>${shaded.package}.org.fusesource</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.hamcrest</pattern>
-                  <shadedPattern>${shaded.package}.org.hamcrest</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.hsqldb</pattern>
-                  <shadedPattern>${shaded.package}.org.hsqldb</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.iq80</pattern>
-                  <shadedPattern>${shaded.package}.org.iq80</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jamon</pattern>
-                  <shadedPattern>${shaded.package}.org.jamon</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jboss</pattern>
-                  <shadedPattern>${shaded.package}.org.jboss</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jcodings</pattern>
-                  <shadedPattern>${shaded.package}.org.jcodings</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jets3t</pattern>
-                  <shadedPattern>${shaded.package}.org.jets3t</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.joda</pattern>
-                  <shadedPattern>${shaded.package}.org.joda</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.joni</pattern>
-                  <shadedPattern>${shaded.package}.org.joni</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.junit</pattern>
-                  <shadedPattern>${shaded.package}.org.junit</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.kosmix</pattern>
-                  <shadedPattern>${shaded.package}.org.kosmix</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.mortbay</pattern>
-                  <shadedPattern>${shaded.package}.org.mortbay</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.objectweb</pattern>
-                  <shadedPattern>${shaded.package}.org.objectweb</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.stringtemplate</pattern>
-                  <shadedPattern>${shaded.package}.org.stringtemplate</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.tukaani</pattern>
-                  <shadedPattern>${shaded.package}.org.tukaani</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.znerd</pattern>
-                  <shadedPattern>${shaded.package}.org.znerd</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.avro</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.avro</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.commons</shadedPattern>
-                  <excludes>
-                    <exclude>org.apache.commons.csv.**</exclude>
-                    <exclude>org.apache.commons.logging.**</exclude>
-                    <exclude>org.apache.commons.configuration.**</exclude>
-                  </excludes>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.directory</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.directory</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.http</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.http</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.jasper</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.jasper</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.jute</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.jute</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.mina</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.mina</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.oro</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.oro</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.taglibs</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.taglibs</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.thrift</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.thrift</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.tools</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.tools</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.twill</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.twill</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.velocity</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.velocity</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.zookeeper</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.zookeeper</shadedPattern>
-                </relocation>
-
-                <!-- NET relocations -->
-                <relocation>
-                  <pattern>net</pattern>
-                  <shadedPattern>${shaded.package}.net</shadedPattern>
-                </relocation>
-
-                <!-- Misc relocations -->
-                <relocation>
-                  <pattern>antlr</pattern>
-                  <shadedPattern>${shaded.package}.antlr</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>it.unimi</pattern>
-                  <shadedPattern>${shaded.package}.it.unimi</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>jline</pattern>
-                  <shadedPattern>${shaded.package}.jline</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>junit</pattern>
-                  <shadedPattern>${shaded.package}.junit</shadedPattern>
-                </relocation>
-              </relocations>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-  <dependencies>
-    <!-- Depend on all other internal projects -->
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-      <version>${phoenix.version}</version>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/pom.xml b/pom.xml
index caff061..4123556 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,6 +1,5 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
-
  Licensed to the Apache Software Foundation (ASF) under one
  or more contributor license agreements.  See the NOTICE file
  distributed with this work for additional information
@@ -17,12 +16,16 @@
  KIND, either express or implied.  See the License for the
  specific language governing permissions and limitations
  under the License.
-
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0"
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
+    <parent>
+      <groupId>org.apache</groupId>
+      <artifactId>apache</artifactId>
+      <version>21</version>
+    </parent>
 
     <groupId>org.apache.phoenix</groupId>
     <artifactId>phoenix-queryserver</artifactId>
@@ -49,17 +52,9 @@
         <module>queryserver-client</module>
         <module>load-balancer</module>
         <module>assembly</module>
-        <module>phoenix-client</module>
         <module>queryserver-orchestrator</module>
     </modules>
 
-    <repositories>
-        <repository>
-            <id>apache release</id>
-            <url>https://repository.apache.org/content/repositories/releases/</url>
-        </repository>
-    </repositories>
-
     <scm>
         <connection>scm:git:https://gitbox.apache.org/repos/asf/phoenix-queryserver.git</connection>
         <url>https://gitbox.apache.org/repos/asf/phoenix-queryserver.git</url>
@@ -71,15 +66,13 @@
         <top.dir>${project.basedir}</top.dir>
 
         <!-- Hadoop Versions -->
-        <hbase.version>1.4.0</hbase.version>
+        <hbase.version>1.4.10</hbase.version>
         <hadoop-two.version>2.7.5</hadoop-two.version>
         <curator.version>2.12.0</curator.version>
-        <phoenix.version>4.15.0-HBase-1.3-SNAPSHOT</phoenix.version>
+        <phoenix.version>4.14.2-HBase-1.4</phoenix.version>
 
         <!-- Dependency versions -->
-        <protobuf-java.version>2.5.0</protobuf-java.version>
         <sqlline.version>1.2.0</sqlline.version>
-        <guava.version>13.0.1</guava.version>
         <jline.version>2.11</jline.version>
         <commons-logging.version>1.2</commons-logging.version>
         <!-- Do not change jodatime.version until HBASE-15199 is fixed -->
@@ -99,15 +92,12 @@
         <maven.assembly.version>2.5.2</maven.assembly.version>
 
         <!-- Plugin options -->
-        <numForkedUT>8</numForkedUT>
-        <numForkedIT>7</numForkedIT>
         <it.failIfNoSpecifiedTests>false</it.failIfNoSpecifiedTests>
         <surefire.failIfNoSpecifiedTests>false</surefire.failIfNoSpecifiedTests>
 
         <!-- Set default encoding so multi-byte tests work correctly on the Mac -->
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-
     </properties>
 
     <build>
@@ -168,11 +158,6 @@
                     <!-- Avoid defining exclusions in pluginManagement as they are global.
                          We already inherit some from the ASF parent pom. -->
                 </plugin>
-                <!-- We put slow-running tests into src/it and run them during the
-                    integration-test phase using the failsafe plugin. This way
-                    developers can run unit tests conveniently from the IDE or via
-                    "mvn package" from the command line without triggering time
-                    consuming integration tests. -->
                 <plugin>
                     <groupId>org.codehaus.mojo</groupId>
                     <artifactId>build-helper-maven-plugin</artifactId>
@@ -212,20 +197,16 @@
                     <version>${maven-failsafe-plugin.version}</version>
                     <executions>
                         <execution>
-                            <id>ParallelStatsEnabledTest</id>
+                            <id>IntegrationTests</id>
                             <configuration>
                                 <encoding>UTF-8</encoding>
-                                <forkCount>${numForkedIT}</forkCount>
                                 <runOrder>alphabetical</runOrder>
-                                <reuseForks>true</reuseForks>
+                                <reuseForks>false</reuseForks>
                                 <runOrder>alphabetical</runOrder>
-                                <!--parallel>methods</parallel>
-                                <threadCount>20</threadCount-->
                                 <argLine>-Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
-                                <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
+                                <redirectTestOutputToFile>true</redirectTestOutputToFile>
                                 <shutdown>kill</shutdown>
                                 <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
-                                <groups>org.apache.phoenix.end2end.ParallelStatsEnabledTest</groups>
                             </configuration>
                             <goals>
                                 <goal>integration-test</goal>
@@ -335,7 +316,7 @@
                     <reuseForks>true</reuseForks>
                     <argLine>-enableassertions -Xmx2250m -XX:MaxPermSize=128m
                         -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
-                    <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
+                    <redirectTestOutputToFile>true</redirectTestOutputToFile>
                     <shutdown>kill</shutdown>
                 </configuration>
             </plugin>
@@ -367,20 +348,6 @@
                 <artifactId>apache-rat-plugin</artifactId>
                 <configuration>
                     <excludes>
-                        <!-- Header on changelog isn't normal -->
-                        <exclude>CHANGES</exclude>
-                        <!-- IDE configuration -->
-                        <exclude>dev/phoenix.importorder</exclude>
-                        <!-- Release L&N -->
-                        <exclude>dev/release_files/LICENSE</exclude>
-                        <exclude>dev/release_files/NOTICE</exclude>
-                        <!-- Exclude data files for examples -->
-                        <exclude>docs/*.csv</exclude>
-                        <exclude>examples/*.csv</exclude>
-                        <!-- Exclude SQL files from rat. Sqlline 1.1.9 doesn't work with
-                             comments on the first line of a file. -->
-                        <exclude>examples/*.sql</exclude>
-                        <exclude>examples/pig/testdata</exclude>
                         <!-- precommit? -->
                         <exclude>**/patchprocess/**</exclude>
                         <!-- Argparse is bundled to work around system Python version
@@ -574,7 +541,7 @@
             <dependency>
                 <groupId>jline</groupId>
                 <artifactId>jline</artifactId>
-                <version>2.11</version>
+                <version>${jline.version}</version>
             </dependency>
             <dependency>
                 <groupId>sqlline</groupId>
@@ -605,7 +572,7 @@
             <dependency>
                 <groupId>org.slf4j</groupId>
                 <artifactId>slf4j-api</artifactId>
-                <version>1.8.0-alpha2</version>
+                <version>1.7.26</version>
             </dependency>
             <dependency>
                 <groupId>net.sourceforge.argparse4j</groupId>
@@ -615,4 +582,4 @@
         </dependencies>
     </dependencyManagement>
 
-</project>
\ No newline at end of file
+</project>
diff --git a/python/.gitignore b/python/.gitignore
new file mode 100644
index 0000000..57335c6
--- /dev/null
+++ b/python/.gitignore
@@ -0,0 +1,8 @@
+/dist/
+/build/
+/doc/_build/
+/doc/build/
+*.pyc
+*.egg-info/
+.vagrant/
+.tox
diff --git a/python/.gitlab-ci.yml b/python/.gitlab-ci.yml
new file mode 100644
index 0000000..6e58a23
--- /dev/null
+++ b/python/.gitlab-ci.yml
@@ -0,0 +1,149 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+stages:
+  - prepare
+  - test
+
+build build-env image:
+  stage: prepare
+  script:
+    - cd ci/build-env
+    - docker build -t ${CI_REGISTRY_IMAGE}/build-env .
+    - docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
+    - docker push $CI_REGISTRY_IMAGE/build-env
+  tags:
+    - docker-host
+  only:
+    - master@lukas/python-phoenixdb
+
+.build-phoenix-image: &build_phoenix_image
+  stage: prepare
+  script:
+    - JOB_NAME=($CI_JOB_NAME)
+    - cd ci/phoenix
+    - docker build -t ${CI_REGISTRY_IMAGE}/phoenix:${JOB_NAME[2]}
+        --build-arg PHOENIX_VERSION=$PHOENIX_VERSION
+        --build-arg HBASE_VERSION=$HBASE_VERSION
+        --build-arg HBASE_DIR=$HBASE_DIR
+        .
+    - docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
+    - docker push $CI_REGISTRY_IMAGE/phoenix:${JOB_NAME[2]}
+  tags:
+    - docker-host
+
+build phoenix 5.0.0-alpha-HBase-2.0 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 5.0.0-alpha-HBase-2.0
+    HBASE_VERSION: 2.0.0-beta-1
+    HBASE_DIR: hbase-2.0.0-beta-1
+
+build phoenix 4.13 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.13.1-HBase-1.3
+    HBASE_VERSION: 1.3.1
+    HBASE_DIR: 1.3.1
+
+build phoenix 4.12 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.12.0-HBase-1.3
+    HBASE_VERSION: 1.3.1
+    HBASE_DIR: 1.3.1
+
+build phoenix 4.11 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.11.0-HBase-1.3
+    HBASE_VERSION: 1.3.1
+    HBASE_DIR: 1.3.1
+
+build phoenix 4.10 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.10.0-HBase-1.2
+    HBASE_VERSION: 1.2.6
+    HBASE_DIR: 1.2.6
+
+build phoenix 4.9 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.9.0-HBase-1.2
+    HBASE_VERSION: 1.2.6
+    HBASE_DIR: 1.2.6
+
+build phoenix 4.8 image:
+  <<: *build_phoenix_image
+  variables:
+    PHOENIX_VERSION: 4.8.2-HBase-1.2
+    HBASE_VERSION: 1.2.6
+    HBASE_DIR: 1.2.6
+
+.test: &test
+  image: $CI_REGISTRY_IMAGE/build-env
+  variables:
+    PHOENIXDB_TEST_DB_URL: http://phoenix:8765/
+    PIP_CACHE_DIR: $CI_PROJECT_DIR/cache/
+  script:
+    - tox -e py27,py35
+  cache:
+    paths:
+      - cache/
+  tags:
+    - docker
+
+test phoenix 5.0.0-alpha-HBase-2.0:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:5.0.0-alpha-HBase-2.0
+      alias: phoenix
+
+test phoenix 4.13:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.13
+      alias: phoenix
+
+test phoenix 4.12:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.12
+      alias: phoenix
+
+test phoenix 4.11:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.11
+      alias: phoenix
+
+test phoenix 4.10:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.10
+      alias: phoenix
+
+test phoenix 4.9:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.9
+      alias: phoenix
+
+test phoenix 4.8:
+  <<: *test
+  services:
+    - name: $CI_REGISTRY_IMAGE/phoenix:4.8
+      alias: phoenix
diff --git a/python/README.md b/python/README.md
new file mode 100644
index 0000000..6ea5fc9
--- /dev/null
+++ b/python/README.md
@@ -0,0 +1,93 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+# Apache Phoenix Python Driver "PhoenixDB"
+
+This directory contains the Python driver for Apache Phoenix called "Python PhoenixDB" or just "PhoenixDB".
+
+This driver implements the Python DB 2.0 API for database drivers as described by [PEP-249](https://www.python.org/dev/peps/pep-0249/).
+This driver is implemented using the Phoenix Query Server (PQS) and the [Apache Calcite
+Avatica](https://calcite.apache.org/avatica) project.
+
+This driver should be compatible with Python 2.7 and Python 3.3+ and support both unauthenticated access and
+authenticated access via SPNEGO to PQS.
+
+## Usage
+
+The use of a virtual Python environment is strongly recommended, e.g. [virtualenv](https://virtualenv.pypa.io/en/stable/) or [conda](https://conda.io/docs/). You can install one of these using the Python package manager [pip](https://pypi.org/project/pip/). For developers who need to support multiple versions of Python, Python version managers, such as [pyenv](https://github.com/pyenv/pyenv), can drastically improve your quality of life.
+
+When connecting to an unsecured PQS instance, install the phoenixdb module into your local environment and write your
+application.
+
+```bash
+$ virtualenv e
+$ source e/bin/activate
+$ pip install file:///path/to/phoenix-x.y.z/phoenix/python/phoenixdb
+$ cat <<EOF
+import phoenixdb
+import phoenixdb.cursor
+
+if __name__ == '__main__':
+  database_url = 'http://localhost:8765/'
+  conn = phoenixdb.connect(database_url, autocommit=True)
+  cursor = conn.cursor()
+  cursor.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username VARCHAR)")
+  cursor.execute("UPSERT INTO users VALUES (?, ?)", (1, 'admin'))
+  cursor.execute("SELECT * FROM users")
+  print(cursor.fetchall())
+EOF > test-client.py
+$ python test-client.py
+```
+
+When using a PQS instance secured via SPNEGO with Kerberos-based authentication, you must also install the custom
+release of requests-kerberos provided with PhoenixDB.
+
+
+```bash
+$ virtualenv e
+$ source e/bin/activate
+$ pip install file:///path/to/phoenix-x.y.z/phoenix/python/phoenixdb
+$ pip install file:///path/to/phoenix-x.y.z/phoenix/python/requests-kerberos
+$ cat <<EOF
+import phoenixdb
+import phoenixdb.cursor
+
+if __name__ == '__main__':
+  database_url = 'http://localhost:8765/'
+  conn = phoenixdb.connect(database_url, autocommit=True, auth="SPNEGO")
+  cursor = conn.cursor()
+  cursor.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username VARCHAR)")
+  cursor.execute("UPSERT INTO users VALUES (?, ?)", (1, 'admin'))
+  cursor.execute("SELECT * FROM users")
+  print(cursor.fetchall())
+EOF > test-client.py
+$ python test-client.py
+```
+
+Please see the README included with PhoenixDB for more information on using the Python driver.
+
+## Kerberos support in testing
+
+An integration test, `SecureQueryServerPhoenixDBIT`, is included with Phoenix that sets up a secured Phoenix installation with PQS, then
+uses the driver to interact with that installation. We have observed that, with a correct krb5.conf for
+the Kerberos installation (MIT or Heimdal), this driver and the patched requests-kerberos library can
+communicate with the secured PQS instance.
+
+This test will guess at the flavor of Kerberos that you have installed on your local system. There is an option
+exposed which will force a specific flavor to be assumed: `PHOENIXDB_KDC_IMPL`. Valid options are `MIT` and `HEIMDAL`.
+You specify this as a Java system property from Maven, e.g. `mvn verify -Dtest=foo -Did.test=SecureQueryServerPhoenixDBIT -DPHOENIXDB_KDC_IMPL=MIT`
+forces an MIT Kerberos style krb5.conf to be used for the test.
diff --git a/python/phoenixdb/NEWS.rst b/python/phoenixdb/NEWS.rst
new file mode 100644
index 0000000..21e2277
--- /dev/null
+++ b/python/phoenixdb/NEWS.rst
@@ -0,0 +1,44 @@
+Changelog
+=========
+
+Version 0.7
+-----------
+
+- Added DictCursor for easier access to columns by their names.
+- Support for Phoenix versions from 4.8 to 4.11.
+
+Version 0.6
+-----------
+
+- Fixed result fetching when using a query with parameters.
+- Support for Phoenix 4.9.
+
+Version 0.5
+-----------
+
+- Added support for Python 3.
+- Switched from the JSON serialization to Protocol Buffers, improved compatibility with Phoenix 4.8.
+- Phoenix 4.6 and older are no longer supported.
+
+Version 0.4
+-----------
+
+- Fixes for the final version of Phoenix 4.7.
+
+Version 0.3
+-----------
+
+- Compatible with Phoenix 4.7.
+
+Version 0.2
+-----------
+
+- Added (configurable) retry on connection errors.
+- Added Vagrantfile for easier testing.
+- Compatible with Phoenix 4.6.
+
+Version 0.1
+-----------
+
+- Initial release.
+- Compatible with Phoenix 4.4.
diff --git a/python/phoenixdb/README.rst b/python/phoenixdb/README.rst
new file mode 100644
index 0000000..c74104c
--- /dev/null
+++ b/python/phoenixdb/README.rst
@@ -0,0 +1,136 @@
+Phoenix database adapter for Python
+===================================
+
+.. image:: https://code.oxygene.sk/lukas/python-phoenixdb/badges/master/pipeline.svg
+    :target: https://code.oxygene.sk/lukas/python-phoenixdb/commits/master
+    :alt: Build Status
+
+.. image:: https://readthedocs.org/projects/python-phoenixdb/badge/?version=latest
+    :target: http://python-phoenixdb.readthedocs.io/en/latest/?badge=latest
+    :alt: Documentation Status
+
+``phoenixdb`` is a Python library for accessing the
+`Phoenix SQL database <http://phoenix.apache.org/>`_
+using the
+`remote query server <http://phoenix.apache.org/server.html>`_.
+The library implements the
+standard `DB API 2.0 <https://www.python.org/dev/peps/pep-0249/>`_ interface,
+which should be familiar to most Python programmers.
+
+Installation
+------------
+
+The easiest way to install the library is using `pip <https://pip.pypa.io/en/stable/>`_::
+
+    pip install phoenixdb
+
+You can also download the source code from `here <https://phoenix.apache.org/download.html>`_,
+extract the archive and then install it manually::
+
+    cd /path/to/apache-phoenix-x.y.z/phoenix
+    python setup.py install
+
+Usage
+-----
+
+The library implements the standard DB API 2.0 interface, so it can be
+used the same way you would use any other SQL database from Python, for example::
+
+    import phoenixdb
+    import phoenixdb.cursor
+
+    database_url = 'http://localhost:8765/'
+    conn = phoenixdb.connect(database_url, autocommit=True)
+
+    cursor = conn.cursor()
+    cursor.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username VARCHAR)")
+    cursor.execute("UPSERT INTO users VALUES (?, ?)", (1, 'admin'))
+    cursor.execute("SELECT * FROM users")
+    print(cursor.fetchall())
+
+    cursor = conn.cursor(cursor_factory=phoenixdb.cursor.DictCursor)
+    cursor.execute("SELECT * FROM users WHERE id=1")
+    print(cursor.fetchone()['USERNAME'])
+
+
+Setting up a development environment
+------------------------------------
+
+If you want to quickly try out the included examples, you can set up a
+local `virtualenv <https://virtualenv.pypa.io/en/latest/>`_ with all the
+necessary requirements::
+
+    virtualenv e
+    source e/bin/activate
+    pip install -r requirements.txt
+    python setup.py develop
+
+To create or update the Avatica protobuf classes, change the tag in ``gen-protobuf.sh``
+and run the script.
+
+If you need a Phoenix query server for experimenting, you can get one running
+quickly using `Docker <https://www.docker.com/>`_::
+
+    docker-compose up
+
+Or if you need an older version of Phoenix::
+
+    PHOENIX_VERSION=4.9 docker-compose up
+
+If you want to use the library without installing the phoenixdb library, you can use
+the `PYTHONPATH` environment variable to point to the library directly::
+
+    cd $PHOENIX_HOME/python
+    python setup.py build
+    cd ~/my_project
+    PYTHONPATH=$PHOENIX_HOME/build/lib python my_app.py
+
+Interactive SQL shell
+---------------------
+
+There is a Python-based interactive shell include in the examples folder, which can be
+used to connect to Phoenix and execute queries::
+
+    ./examples/shell.py http://localhost:8765/
+    db=> CREATE TABLE test (id INTEGER PRIMARY KEY, name VARCHAR);
+    no rows affected (1.363 seconds)
+    db=> UPSERT INTO test (id, name) VALUES (1, 'Lukas');
+    1 row affected (0.004 seconds)
+    db=> SELECT * FROM test;
+    +------+-------+
+    |   ID | NAME  |
+    +======+=======+
+    |    1 | Lukas |
+    +------+-------+
+    1 row selected (0.019 seconds)
+
+Running the test suite
+----------------------
+
+The library comes with a test suite for testing Python DB API 2.0 compliance and
+various Phoenix-specific features. In order to run the test suite, you need a
+working Phoenix database and set the ``PHOENIXDB_TEST_DB_URL`` environment variable::
+
+    export PHOENIXDB_TEST_DB_URL='http://localhost:8765/'
+    nosetests
+
+Similarly, tox can be used to run the test suite against multiple Python versions::
+
+    pyenv install 3.5.5
+    pyenv install 3.6.4
+    pyenv install 2.7.14
+    pyenv global 2.7.14 3.5.5 3.6.4
+    PHOENIXDB_TEST_DB_URL='http://localhost:8765' tox
+
+Known issues
+------------
+
+- You can only use the library in autocommit mode. The native Java Phoenix library also implements batched upserts, which can be committed at once, but this is not exposed over the remote server. This was previously unimplemented due to CALCITE-767, however this functionality exists in the server, but is lacking in the driver.
+  (`CALCITE-767 <https://issues.apache.org/jira/browse/CALCITE-767>`_)
+- TIME and DATE columns in Phoenix are stored as full timestamps with a millisecond accuracy,
+  but the remote protocol only exposes the time (hour/minute/second) or date (year/month/day)
+  parts of the columns. (`CALCITE-797 <https://issues.apache.org/jira/browse/CALCITE-797>`_, `CALCITE-798 <https://issues.apache.org/jira/browse/CALCITE-798>`_)
+- TIMESTAMP columns in Phoenix are stored with a nanosecond accuracy, but the remote protocol truncates them to milliseconds. (`CALCITE-796 <https://issues.apache.org/jira/browse/CALCITE-796>`_)
+- ARRAY columns are not supported. Again, this previously lacked server-side support which has since been built. The
+  driver needs to be updated to support this functionality.
+  (`CALCITE-1050 <https://issues.apache.org/jira/browse/CALCITE-1050>`_, `PHOENIX-2585 <https://issues.apache.org/jira/browse/PHOENIX-2585>`_)
diff --git a/python/phoenixdb/RELEASING.rst b/python/phoenixdb/RELEASING.rst
new file mode 100644
index 0000000..d996cfe
--- /dev/null
+++ b/python/phoenixdb/RELEASING.rst
@@ -0,0 +1,12 @@
+Releasing a new version
+=======================
+
+Change the version number ``setup.py`` and ``NEWS.rst``.
+
+Commit the changes and tag the repository::
+
+    git tag -s vX.Y
+
+Upload the package to PyPI::
+
+    python setup.py clean sdist upload
diff --git a/python/phoenixdb/ci/build-env/Dockerfile b/python/phoenixdb/ci/build-env/Dockerfile
new file mode 100644
index 0000000..08ce45c
--- /dev/null
+++ b/python/phoenixdb/ci/build-env/Dockerfile
@@ -0,0 +1,7 @@
+FROM ubuntu:xenial
+
+RUN apt-get update && \
+    DEBIAN_FRONTEND=noninteractive apt-get install -y python-dev python3-dev tox
+
+RUN apt-get update && \
+    DEBIAN_FRONTEND=noninteractive apt-get install -y git
diff --git a/python/phoenixdb/ci/phoenix/Dockerfile b/python/phoenixdb/ci/phoenix/Dockerfile
new file mode 100644
index 0000000..fc6fadc
--- /dev/null
+++ b/python/phoenixdb/ci/phoenix/Dockerfile
@@ -0,0 +1,33 @@
+FROM openjdk:8
+
+ARG HBASE_VERSION
+ARG HBASE_DIR
+ARG PHOENIX_VERSION
+ARG PHOENIX_NAME=apache-phoenix
+
+ENV HBASE_URL https://archive.apache.org/dist/hbase/$HBASE_DIR/hbase-$HBASE_VERSION-bin.tar.gz
+
+RUN wget --no-verbose -O hbase.tar.gz "$HBASE_URL" && \
+    mkdir /opt/hbase && \
+    tar xf hbase.tar.gz --strip-components=1 -C /opt/hbase && \
+    rm hbase.tar.gz
+
+ENV PHOENIX_URL https://archive.apache.org/dist/phoenix/apache-phoenix-$PHOENIX_VERSION/bin/apache-phoenix-$PHOENIX_VERSION-bin.tar.gz
+
+RUN wget --no-verbose -O phoenix.tar.gz "$PHOENIX_URL" && \
+    mkdir /opt/phoenix && \
+    tar xf phoenix.tar.gz --strip-components=1 -C /opt/phoenix && \
+    rm phoenix.tar.gz
+
+RUN ln -sv /opt/phoenix/phoenix-*-server.jar /opt/hbase/lib/
+
+ADD hbase-site.xml /opt/hbase/conf/hbase-site.xml
+
+ENV HBASE_CONF_DIR /opt/hbase/conf
+ENV HBASE_CP /opt/hbase/lib
+ENV HBASE_HOME /opt/hbase
+
+EXPOSE 8765
+
+COPY docker-entrypoint.sh /usr/local/bin/
+ENTRYPOINT ["docker-entrypoint.sh"]
diff --git a/python/phoenixdb/ci/phoenix/docker-entrypoint.sh b/python/phoenixdb/ci/phoenix/docker-entrypoint.sh
new file mode 100755
index 0000000..0bcc6da
--- /dev/null
+++ b/python/phoenixdb/ci/phoenix/docker-entrypoint.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+pids=()
+
+/opt/hbase/bin/hbase-daemon.sh foreground_start master &
+pids+=($!)
+
+/opt/phoenix/bin/queryserver.py &
+pids+=($!)
+
+cleanup() {
+    if [ ${#pids[@]} -ne 0 ]
+    then
+        pids=($(ps -o pid= -p "${pids[@]}"))
+        if [ ${#pids[@]} -ne 0 ]
+        then
+            kill "${pids[@]}"
+        fi
+    fi
+}
+
+trap cleanup SIGCHLD SIGINT SIGTERM
+
+wait
diff --git a/python/phoenixdb/ci/phoenix/hbase-site.xml b/python/phoenixdb/ci/phoenix/hbase-site.xml
new file mode 100644
index 0000000..0e9a1b1
--- /dev/null
+++ b/python/phoenixdb/ci/phoenix/hbase-site.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+    <property>
+        <name>hbase.regionserver.wal.codec</name>
+        <value>org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec</value>
+    </property>
+	<property>
+		<name>phoenix.schema.isNamespaceMappingEnabled</name>
+		<value>true</value>
+	</property>
+</configuration>
diff --git a/python/phoenixdb/doc/Makefile b/python/phoenixdb/doc/Makefile
new file mode 100644
index 0000000..31eb086
--- /dev/null
+++ b/python/phoenixdb/doc/Makefile
@@ -0,0 +1,192 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  applehelp  to make an Apple Help Book"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+	@echo "  coverage   to run coverage check of the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/phoenixdb.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/phoenixdb.qhc"
+
+applehelp:
+	$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+	@echo
+	@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+	@echo "N.B. You won't be able to view it unless you put it in" \
+	      "~/Library/Documentation/Help or install it in your application" \
+	      "bundle."
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/phoenixdb"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/phoenixdb"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+coverage:
+	$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+	@echo "Testing of coverage in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/coverage/python.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/python/phoenixdb/doc/api.rst b/python/phoenixdb/doc/api.rst
new file mode 100644
index 0000000..cac317c
--- /dev/null
+++ b/python/phoenixdb/doc/api.rst
@@ -0,0 +1,30 @@
+API Reference
+=============
+
+phoenixdb module
+----------------
+
+.. automodule:: phoenixdb
+    :members:
+    :undoc-members:
+
+phoenixdb.connection module
+---------------------------
+
+.. automodule:: phoenixdb.connection
+    :members:
+    :undoc-members:
+
+phoenixdb.cursor module
+-----------------------
+
+.. automodule:: phoenixdb.cursor
+    :members:
+    :undoc-members:
+
+phoenixdb.avatica module
+------------------------
+
+.. automodule:: phoenixdb.avatica
+    :members:
+    :undoc-members:
diff --git a/python/phoenixdb/doc/conf.py b/python/phoenixdb/doc/conf.py
new file mode 100644
index 0000000..21898d7
--- /dev/null
+++ b/python/phoenixdb/doc/conf.py
@@ -0,0 +1,287 @@
+# -*- coding: utf-8 -*-
+#
+# phoenixdb documentation build configuration file, created by
+# sphinx-quickstart on Sun Jun 28 18:07:35 2015.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('../phoenixdb'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.autodoc',
+    'sphinx.ext.doctest',
+    'sphinx.ext.intersphinx',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'phoenixdb'
+copyright = u'2015, Lukas Lalinsky'
+author = u'Lukas Lalinsky'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'classic'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+#html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+#html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'phoenixdbdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+#latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+
+# Latex figure (float) alignment
+#'figure_align': 'htbp',
+#}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+#latex_documents = [
+#  (master_doc, 'phoenixdb.tex', u'phoenixdb Documentation',
+#   u'Lukas Lalinsky', 'manual'),
+#]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, 'phoenixdb', u'phoenixdb Documentation',
+     [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  (master_doc, 'phoenixdb', u'phoenixdb Documentation',
+   author, 'phoenixdb', 'One line description of project.',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'https://docs.python.org/': None}
diff --git a/python/phoenixdb/doc/index.rst b/python/phoenixdb/doc/index.rst
new file mode 100644
index 0000000..ada7fb8
--- /dev/null
+++ b/python/phoenixdb/doc/index.rst
@@ -0,0 +1,27 @@
+.. include:: ../README.rst
+
+API Reference
+-------------
+
+.. toctree::
+   :maxdepth: 2
+
+   api
+
+Changelog
+-------------
+
+.. toctree::
+   :maxdepth: 2
+
+   versions
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+
+.. _
diff --git a/python/phoenixdb/doc/versions.rst b/python/phoenixdb/doc/versions.rst
new file mode 100644
index 0000000..f3830fd
--- /dev/null
+++ b/python/phoenixdb/doc/versions.rst
@@ -0,0 +1,3 @@
+.. include:: ../NEWS.rst
+
+.. _
diff --git a/python/phoenixdb/docker-compose.yml b/python/phoenixdb/docker-compose.yml
new file mode 100644
index 0000000..bf398ec
--- /dev/null
+++ b/python/phoenixdb/docker-compose.yml
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: "3"
+services:
+  phoenix:
+    image: docker.oxygene.sk/lukas/python-phoenixdb/phoenix:${PHOENIX_VERSION:-4.11}
+    ports:
+      - "127.0.0.1:8765:8765"
diff --git a/python/phoenixdb/examples/basic.py b/python/phoenixdb/examples/basic.py
new file mode 100755
index 0000000..4894d21
--- /dev/null
+++ b/python/phoenixdb/examples/basic.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import phoenixdb
+
+with phoenixdb.connect('http://localhost:8765/', autocommit=True) as connection:
+    with connection.cursor() as cursor:
+        cursor.execute("DROP TABLE IF EXISTS test")
+        cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY, text VARCHAR)")
+        cursor.executemany("UPSERT INTO test VALUES (?, ?)", [[1, 'hello'], [2, 'world']])
+        cursor.execute("SELECT * FROM test ORDER BY id")
+        for row in cursor:
+            print(row)
diff --git a/python/phoenixdb/examples/shell.py b/python/phoenixdb/examples/shell.py
new file mode 100755
index 0000000..820435e
--- /dev/null
+++ b/python/phoenixdb/examples/shell.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import argparse
+import sqlline
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--debug', '-d', action='store_true')
+parser.add_argument('url')
+args = parser.parse_args()
+
+if args.debug:
+    logging.basicConfig(level=logging.DEBUG)
+
+with sqlline.SqlLine() as sqlline:
+    sqlline.connect('phoenixdb', args.url)
+    sqlline.connection.autocommit = True
+    sqlline.run()
diff --git a/python/phoenixdb/gen-protobuf.sh b/python/phoenixdb/gen-protobuf.sh
new file mode 100755
index 0000000..ee094ce
--- /dev/null
+++ b/python/phoenixdb/gen-protobuf.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -x
+AVATICA_VER=rel/avatica-1.10.0
+
+set -e
+
+rm -rf avatica-tmp
+
+mkdir avatica-tmp
+cd avatica-tmp
+wget -O avatica.tar.gz https://github.com/apache/calcite-avatica/archive/$AVATICA_VER.tar.gz
+tar -x --strip-components=1 -f avatica.tar.gz
+
+cd ..
+rm -f phoenixdb/avatica/proto/*_pb2.py
+protoc --proto_path=avatica-tmp/core/src/main/protobuf/ --python_out=phoenixdb/avatica/proto avatica-tmp/core/src/main/protobuf/*.proto
+if [[ "$(uname)" == "Darwin" ]]; then
+  sed -i '' 's/import common_pb2/from . import common_pb2/' phoenixdb/avatica/proto/*_pb2.py
+else
+  sed -i 's/import common_pb2/from . import common_pb2/' phoenixdb/avatica/proto/*_pb2.py
+fi
+
+rm -rf avatica-tmp
diff --git a/python/phoenixdb/phoenixdb/__init__.py b/python/phoenixdb/phoenixdb/__init__.py
new file mode 100644
index 0000000..24cb370
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/__init__.py
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from phoenixdb import errors, types
+from phoenixdb.avatica import AvaticaClient
+from phoenixdb.connection import Connection
+from phoenixdb.errors import *  # noqa: F401,F403
+from phoenixdb.types import *  # noqa: F401,F403
+
+__all__ = ['connect', 'apilevel', 'threadsafety', 'paramstyle'] + types.__all__ + errors.__all__
+
+
+apilevel = "2.0"
+"""
+This module supports the `DB API 2.0 interface <https://www.python.org/dev/peps/pep-0249/>`_.
+"""
+
+threadsafety = 1
+"""
+Multiple threads can share the module, but neither connections nor cursors.
+"""
+
+paramstyle = 'qmark'
+"""
+Parmetrized queries should use the question mark as a parameter placeholder.
+
+For example::
+
+ cursor.execute("SELECT * FROM table WHERE id = ?", [my_id])
+"""
+
+
+def connect(url, max_retries=None, auth=None, **kwargs):
+    """Connects to a Phoenix query server.
+
+    :param url:
+        URL to the Phoenix query server, e.g. ``http://localhost:8765/``
+
+    :param autocommit:
+        Switch the connection to autocommit mode.
+
+    :param readonly:
+        Switch the connection to readonly mode.
+
+    :param max_retries:
+        The maximum number of retries in case there is a connection error.
+
+    :param cursor_factory:
+        If specified, the connection's :attr:`~phoenixdb.connection.Connection.cursor_factory` is set to it.
+
+    :param auth
+        If specified a specific auth type will be used, otherwise connection will be unauthenticated
+        Currently only SPNEGO is supported
+
+    :returns:
+        :class:`~phoenixdb.connection.Connection` object.
+    """
+    client = AvaticaClient(url, max_retries=max_retries, auth=auth)
+    client.connect()
+    return Connection(client, **kwargs)
diff --git a/python/phoenixdb/phoenixdb/avatica/__init__.py b/python/phoenixdb/phoenixdb/avatica/__init__.py
new file mode 100644
index 0000000..53776d7
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .client import AvaticaClient  # noqa: F401
diff --git a/python/phoenixdb/phoenixdb/avatica/client.py b/python/phoenixdb/phoenixdb/avatica/client.py
new file mode 100644
index 0000000..46f349f
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/client.py
@@ -0,0 +1,502 @@
+# Copyright 2015 Lukas Lalinsky
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Implementation of the JSON-over-HTTP RPC protocol used by Avatica."""
+
+import re
+import socket
+import pprint
+import math
+import logging
+import time
+from phoenixdb import errors
+from phoenixdb.avatica.proto import requests_pb2, common_pb2, responses_pb2
+
+import requests
+#from requests_gssapi import HTTPSPNEGOAuth, OPTIONAL
+from requests_kerberos import HTTPKerberosAuth, OPTIONAL
+import kerberos
+
+try:
+    import urlparse
+except ImportError:
+    import urllib.parse as urlparse
+
+try:
+    from HTMLParser import HTMLParser
+except ImportError:
+    from html.parser import HTMLParser
+
+__all__ = ['AvaticaClient']
+
+logger = logging.getLogger(__name__)
+
+
+class JettyErrorPageParser(HTMLParser):
+
+    def __init__(self):
+        HTMLParser.__init__(self)
+        self.path = []
+        self.title = []
+        self.message = []
+
+    def handle_starttag(self, tag, attrs):
+        self.path.append(tag)
+
+    def handle_endtag(self, tag):
+        self.path.pop()
+
+    def handle_data(self, data):
+        if len(self.path) > 2 and self.path[0] == 'html' and self.path[1] == 'body':
+            if len(self.path) == 3 and self.path[2] == 'h2':
+                self.title.append(data.strip())
+            elif len(self.path) == 4 and self.path[2] == 'p' and self.path[3] == 'pre':
+                self.message.append(data.strip())
+
+
+def parse_url(url):
+    url = urlparse.urlparse(url)
+    if not url.scheme and not url.netloc and url.path:
+        netloc = url.path
+        if ':' not in netloc:
+            netloc = '{}:8765'.format(netloc)
+        return urlparse.ParseResult('http', netloc, '/', '', '', '')
+    return url
+
+
+# Defined in phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+SQLSTATE_ERROR_CLASSES = [
+    ('08', errors.OperationalError),  # Connection Exception
+    ('22018', errors.IntegrityError),  # Constraint violatioin.
+    ('22', errors.DataError),  # Data Exception
+    ('23', errors.IntegrityError),  # Constraint Violation
+    ('24', errors.InternalError),  # Invalid Cursor State
+    ('25', errors.InternalError),  # Invalid Transaction State
+    ('42', errors.ProgrammingError),  # Syntax Error or Access Rule Violation
+    ('XLC', errors.OperationalError),  # Execution exceptions
+    ('INT', errors.InternalError),  # Phoenix internal error
+]
+
+# Relevant properties as defined by https://calcite.apache.org/avatica/docs/client_reference.html
+OPEN_CONNECTION_PROPERTIES = (
+    'user',  # User for the database connection
+    'password',  # Password for the user
+)
+
+
+def raise_sql_error(code, sqlstate, message):
+    for prefix, error_class in SQLSTATE_ERROR_CLASSES:
+        if sqlstate.startswith(prefix):
+            raise error_class(message, code, sqlstate)
+
+
+def parse_and_raise_sql_error(message):
+    match = re.findall(r'(?:([^ ]+): )?ERROR (\d+) \(([0-9A-Z]{5})\): (.*?) ->', message)
+    if match is not None and len(match):
+        exception, code, sqlstate, message = match[0]
+        raise_sql_error(int(code), sqlstate, message)
+
+
+def parse_error_page(html):
+    parser = JettyErrorPageParser()
+    parser.feed(html)
+    if parser.title == ['HTTP ERROR: 500']:
+        message = ' '.join(parser.message).strip()
+        parse_and_raise_sql_error(message)
+        raise errors.InternalError(message)
+
+
+def parse_error_protobuf(text):
+    message = common_pb2.WireMessage()
+    message.ParseFromString(text)
+
+    err = responses_pb2.ErrorResponse()
+    err.ParseFromString(message.wrapped_message)
+
+    parse_and_raise_sql_error(err.error_message)
+    raise_sql_error(err.error_code, err.sql_state, err.error_message)
+    raise errors.InternalError(err.error_message)
+
+
+class AvaticaClient(object):
+    """Client for Avatica's RPC server.
+
+    This exposes all low-level functionality that the Avatica
+    server provides, using the native terminology. You most likely
+    do not want to use this class directly, but rather get connect
+    to a server using :func:`phoenixdb.connect`.
+    """
+
+    def __init__(self, url, max_retries=None, auth=None):
+        """Constructs a new client object.
+
+        :param url:
+            URL of an Avatica RPC server.
+        """
+        self.url = parse_url(url)
+        self.max_retries = max_retries if max_retries is not None else 3
+        self.auth = auth
+        self.connection = None
+
+    def connect(self):
+        """This method used to open a persistent TCP connection
+        requests does not require this"""
+        pass
+
+    def close(self):
+        """Also does nothing per requests"""
+        pass
+
+    def _post_request(self, body, headers):
+        retry_count = self.max_retries
+        while True:
+            logger.debug("POST %s %r %r", self.url.geturl(), body, headers)
+            try:
+                if self.auth == "SPNEGO":
+                    #response = requests.request('post', self.url.geturl(), data=body, stream=True, headers=headers, auth=HTTPSPNEGOAuth(mutual_authentication=OPTIONAL))
+                    response = requests.request('post', self.url.geturl(), data=body, stream=True, headers=headers, auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL, mech_oid=kerberos.GSS_MECH_OID_SPNEGO))
+                else:
+                    response = requests.request('post', self.url.geturl(), data=body, stream=True, headers=headers)
+
+            except requests.HTTPError as e:
+                if retry_count > 0:
+                    delay = math.exp(-retry_count)
+                    logger.debug("HTTP protocol error, will retry in %s seconds...", delay, exc_info=True)
+                    time.sleep(delay)
+                    retry_count -= 1
+                    continue
+                raise errors.InterfaceError('RPC request failed', cause=e)
+            else:
+                if response.status_code == requests.codes.service_unavailable:
+                    if retry_count > 0:
+                        delay = math.exp(-retry_count)
+                        logger.debug("Service unavailable, will retry in %s seconds...", delay, exc_info=True)
+                        time.sleep(delay)
+                        retry_count -= 1
+                        continue
+                return response
+
+    def _apply(self, request_data, expected_response_type=None):
+        logger.debug("Sending request\n%s", pprint.pformat(request_data))
+
+        request_name = request_data.__class__.__name__
+        message = common_pb2.WireMessage()
+        message.name = 'org.apache.calcite.avatica.proto.Requests${}'.format(request_name)
+        message.wrapped_message = request_data.SerializeToString()
+        body = message.SerializeToString()
+        headers = {'content-type': 'application/x-google-protobuf'}
+
+        response = self._post_request(body, headers)
+        response_body = response.raw.read()
+
+        if response.status_code != requests.codes.ok:
+            logger.debug("Received response\n%s", response_body)
+            if b'<html>' in response_body:
+                parse_error_page(response_body)
+            else:
+                # assume the response is in protobuf format
+                parse_error_protobuf(response_body)
+            raise errors.InterfaceError('RPC request returned invalid status code', response.status_code)
+
+        message = common_pb2.WireMessage()
+        message.ParseFromString(response_body)
+
+        logger.debug("Received response\n%s", message)
+
+        if expected_response_type is None:
+            expected_response_type = request_name.replace('Request', 'Response')
+
+        expected_response_type = 'org.apache.calcite.avatica.proto.Responses$' + expected_response_type
+        if message.name != expected_response_type:
+            raise errors.InterfaceError('unexpected response type "{}" expected "{}"'.format(message.name, expected_response_type))
+
+        return message.wrapped_message
+
+    def get_catalogs(self, connection_id):
+        request = requests_pb2.CatalogsRequest()
+        request.connection_id = connection_id
+        return self._apply(request)
+
+    def get_schemas(self, connection_id, catalog=None, schemaPattern=None):
+        request = requests_pb2.SchemasRequest()
+        request.connection_id = connection_id
+        if catalog is not None:
+            request.catalog = catalog
+        if schemaPattern is not None:
+            request.schema_pattern = schemaPattern
+        return self._apply(request)
+
+    def get_tables(self, connection_id, catalog=None, schemaPattern=None, tableNamePattern=None, typeList=None):
+        request = requests_pb2.TablesRequest()
+        request.connection_id = connection_id
+        if catalog is not None:
+            request.catalog = catalog
+        if schemaPattern is not None:
+            request.schema_pattern = schemaPattern
+        if tableNamePattern is not None:
+            request.table_name_pattern = tableNamePattern
+        if typeList is not None:
+            request.type_list = typeList
+        if typeList is not None:
+            request.type_list.extend(typeList)
+        request.has_type_list = typeList is not None
+        return self._apply(request)
+
+    def get_columns(self, connection_id, catalog=None, schemaPattern=None, tableNamePattern=None, columnNamePattern=None):
+        request = requests_pb2.ColumnsRequest()
+        request.connection_id = connection_id
+        if catalog is not None:
+            request.catalog = catalog
+        if schemaPattern is not None:
+            request.schema_pattern = schemaPattern
+        if tableNamePattern is not None:
+            request.table_name_pattern = tableNamePattern
+        if columnNamePattern is not None:
+            request.column_name_pattern = columnNamePattern
+        return self._apply(request)
+
+    def get_table_types(self, connection_id):
+        request = requests_pb2.TableTypesRequest()
+        request.connection_id = connection_id
+        return self._apply(request)
+
+    def get_type_info(self, connection_id):
+        request = requests_pb2.TypeInfoRequest()
+        request.connection_id = connection_id
+        return self._apply(request)
+
+    def connection_sync(self, connection_id, connProps=None):
+        """Synchronizes connection properties with the server.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param connProps:
+            Dictionary with the properties that should be changed.
+
+        :returns:
+            A ``common_pb2.ConnectionProperties`` object.
+        """
+        if connProps is None:
+            connProps = {}
+
+        request = requests_pb2.ConnectionSyncRequest()
+        request.connection_id = connection_id
+        request.conn_props.auto_commit = connProps.get('autoCommit', False)
+        request.conn_props.has_auto_commit = True
+        request.conn_props.read_only = connProps.get('readOnly', False)
+        request.conn_props.has_read_only = True
+        request.conn_props.transaction_isolation = connProps.get('transactionIsolation', 0)
+        request.conn_props.catalog = connProps.get('catalog', '')
+        request.conn_props.schema = connProps.get('schema', '')
+
+        response_data = self._apply(request)
+        response = responses_pb2.ConnectionSyncResponse()
+        response.ParseFromString(response_data)
+        return response.conn_props
+
+    def open_connection(self, connection_id, info=None):
+        """Opens a new connection.
+
+        :param connection_id:
+            ID of the connection to open.
+        """
+        request = requests_pb2.OpenConnectionRequest()
+        request.connection_id = connection_id
+        if info is not None:
+            # Info is a list of repeated pairs, setting a dict directly fails
+            for k, v in info.items():
+                request.info[k] = v
+
+        response_data = self._apply(request)
+        response = responses_pb2.OpenConnectionResponse()
+        response.ParseFromString(response_data)
+
+    def close_connection(self, connection_id):
+        """Closes a connection.
+
+        :param connection_id:
+            ID of the connection to close.
+        """
+        request = requests_pb2.CloseConnectionRequest()
+        request.connection_id = connection_id
+        self._apply(request)
+
+    def create_statement(self, connection_id):
+        """Creates a new statement.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :returns:
+            New statement ID.
+        """
+        request = requests_pb2.CreateStatementRequest()
+        request.connection_id = connection_id
+
+        response_data = self._apply(request)
+        response = responses_pb2.CreateStatementResponse()
+        response.ParseFromString(response_data)
+        return response.statement_id
+
+    def close_statement(self, connection_id, statement_id):
+        """Closes a statement.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param statement_id:
+            ID of the statement to close.
+        """
+        request = requests_pb2.CloseStatementRequest()
+        request.connection_id = connection_id
+        request.statement_id = statement_id
+
+        self._apply(request)
+
+    def prepare_and_execute(self, connection_id, statement_id, sql, max_rows_total=None, first_frame_max_size=None):
+        """Prepares and immediately executes a statement.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param statement_id:
+            ID of the statement to prepare.
+
+        :param sql:
+            SQL query.
+
+        :param max_rows_total:
+            The maximum number of rows that will be allowed for this query.
+
+        :param first_frame_max_size:
+            The maximum number of rows that will be returned in the first Frame returned for this query.
+
+        :returns:
+            Result set with the signature of the prepared statement and the first frame data.
+        """
+        request = requests_pb2.PrepareAndExecuteRequest()
+        request.connection_id = connection_id
+        request.statement_id = statement_id
+        request.sql = sql
+        if max_rows_total is not None:
+            request.max_rows_total = max_rows_total
+        if first_frame_max_size is not None:
+            request.first_frame_max_size = first_frame_max_size
+
+        response_data = self._apply(request, 'ExecuteResponse')
+        response = responses_pb2.ExecuteResponse()
+        response.ParseFromString(response_data)
+        return response.results
+
+    def prepare(self, connection_id, sql, max_rows_total=None):
+        """Prepares a statement.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param sql:
+            SQL query.
+
+        :param max_rows_total:
+            The maximum number of rows that will be allowed for this query.
+
+        :returns:
+            Signature of the prepared statement.
+        """
+        request = requests_pb2.PrepareRequest()
+        request.connection_id = connection_id
+        request.sql = sql
+        if max_rows_total is not None:
+            request.max_rows_total = max_rows_total
+
+        response_data = self._apply(request)
+        response = responses_pb2.PrepareResponse()
+        response.ParseFromString(response_data)
+        return response.statement
+
+    def execute(self, connection_id, statement_id, signature, parameter_values=None, first_frame_max_size=None):
+        """Returns a frame of rows.
+
+        The frame describes whether there may be another frame. If there is not
+        another frame, the current iteration is done when we have finished the
+        rows in the this frame.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param statement_id:
+            ID of the statement to fetch rows from.
+
+        :param signature:
+            common_pb2.Signature object
+
+        :param parameter_values:
+            A list of parameter values, if statement is to be executed; otherwise ``None``.
+
+        :param first_frame_max_size:
+            The maximum number of rows that will be returned in the first Frame returned for this query.
+
+        :returns:
+            Frame data, or ``None`` if there are no more.
+        """
+        request = requests_pb2.ExecuteRequest()
+        request.statementHandle.id = statement_id
+        request.statementHandle.connection_id = connection_id
+        request.statementHandle.signature.CopyFrom(signature)
+        if parameter_values is not None:
+            request.parameter_values.extend(parameter_values)
+            request.has_parameter_values = True
+        if first_frame_max_size is not None:
+            request.deprecated_first_frame_max_size = first_frame_max_size
+            request.first_frame_max_size = first_frame_max_size
+
+        response_data = self._apply(request)
+        response = responses_pb2.ExecuteResponse()
+        response.ParseFromString(response_data)
+        return response.results
+
+    def fetch(self, connection_id, statement_id, offset=0, frame_max_size=None):
+        """Returns a frame of rows.
+
+        The frame describes whether there may be another frame. If there is not
+        another frame, the current iteration is done when we have finished the
+        rows in the this frame.
+
+        :param connection_id:
+            ID of the current connection.
+
+        :param statement_id:
+            ID of the statement to fetch rows from.
+
+        :param offset:
+            Zero-based offset of first row in the requested frame.
+
+        :param frame_max_size:
+            Maximum number of rows to return; negative means no limit.
+
+        :returns:
+            Frame data, or ``None`` if there are no more.
+        """
+        request = requests_pb2.FetchRequest()
+        request.connection_id = connection_id
+        request.statement_id = statement_id
+        request.offset = offset
+        if frame_max_size is not None:
+            request.frame_max_size = frame_max_size
+
+        response_data = self._apply(request)
+        response = responses_pb2.FetchResponse()
+        response.ParseFromString(response_data)
+        return response.frame
diff --git a/python/phoenixdb/phoenixdb/avatica/proto/__init__.py b/python/phoenixdb/phoenixdb/avatica/proto/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/proto/__init__.py
diff --git a/python/phoenixdb/phoenixdb/avatica/proto/common_pb2.py b/python/phoenixdb/phoenixdb/avatica/proto/common_pb2.py
new file mode 100644
index 0000000..3c99502
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/proto/common_pb2.py
@@ -0,0 +1,1667 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: common.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='common.proto',
+  package='',
+  syntax='proto3',
+  serialized_pb=_b('\n\x0c\x63ommon.proto\"\xc0\x01\n\x14\x43onnectionProperties\x12\x10\n\x08is_dirty\x18\x01 \x01(\x08\x12\x13\n\x0b\x61uto_commit\x18\x02 \x01(\x08\x12\x17\n\x0fhas_auto_commit\x18\x07 \x01(\x08\x12\x11\n\tread_only\x18\x03 \x01(\x08\x12\x15\n\rhas_read_only\x18\x08 \x01(\x08\x12\x1d\n\x15transaction_isolation\x18\x04 \x01(\r\x12\x0f\n\x07\x63\x61talog\x18\x05 \x01(\t\x12\x0e\n\x06schema\x18\x06 \x01(\t\"S\n\x0fStatementHandle\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\r\x12\x1d\n\tsignature\x18\x03 \x01(\x0b\x32\n.Signature\"\xb0\x01\n\tSignature\x12 \n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x0f.ColumnMetaData\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12%\n\nparameters\x18\x03 \x03(\x0b\x32\x11.AvaticaParameter\x12&\n\x0e\x63ursor_factory\x18\x04 \x01(\x0b\x32\x0e.CursorFactory\x12%\n\rstatementType\x18\x05 \x01(\x0e\x32\x0e.StatementType\"\xad\x03\n\x0e\x43olumnMetaData\x12\x0f\n\x07ordinal\x18\x01 \x01(\r\x12\x16\n\x0e\x61uto_increment\x18\x02 \x01(\x08\x12\x16\n\x0e\x63\x61se_sensitive\x18\x03 \x01(\x08\x12\x12\n\nsearchable\x18\x04 \x01(\x08\x12\x10\n\x08\x63urrency\x18\x05 \x01(\x08\x12\x10\n\x08nullable\x18\x06 \x01(\r\x12\x0e\n\x06signed\x18\x07 \x01(\x08\x12\x14\n\x0c\x64isplay_size\x18\x08 \x01(\r\x12\r\n\x05label\x18\t \x01(\t\x12\x13\n\x0b\x63olumn_name\x18\n \x01(\t\x12\x13\n\x0bschema_name\x18\x0b \x01(\t\x12\x11\n\tprecision\x18\x0c \x01(\r\x12\r\n\x05scale\x18\r \x01(\r\x12\x12\n\ntable_name\x18\x0e \x01(\t\x12\x14\n\x0c\x63\x61talog_name\x18\x0f \x01(\t\x12\x11\n\tread_only\x18\x10 \x01(\x08\x12\x10\n\x08writable\x18\x11 \x01(\x08\x12\x1b\n\x13\x64\x65\x66initely_writable\x18\x12 \x01(\x08\x12\x19\n\x11\x63olumn_class_name\x18\x13 \x01(\t\x12\x1a\n\x04type\x18\x14 \x01(\x0b\x32\x0c.AvaticaType\"}\n\x0b\x41vaticaType\x12\n\n\x02id\x18\x01 \x01(\r\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x11\n\x03rep\x18\x03 \x01(\x0e\x32\x04.Rep\x12 \n\x07\x63olumns\x18\x04 \x03(\x0b\x32\x0f.ColumnMetaData\x12\x1f\n\tcomponent\x18\x05 \x01(\x0b\x32\x0c.AvaticaType\"\x91\x01\n\x10\x41vaticaParameter\x12\x0e\n\x06signed\x18\x01 \x01(\x08\x12\x11\n\tprecision\x18\x02 \x01(\r\x12\r\n\x05scale\x18\x03 \x01(\r\x12\x16\n\x0eparameter_type\x18\x04 \x01(\r\x12\x11\n\ttype_name\x18\x05 \x01(\t\x12\x12\n\nclass_name\x18\x06 \x01(\t\x12\x0c\n\x04name\x18\x07 \x01(\t\"\xb3\x01\n\rCursorFactory\x12#\n\x05style\x18\x01 \x01(\x0e\x32\x14.CursorFactory.Style\x12\x12\n\nclass_name\x18\x02 \x01(\t\x12\x13\n\x0b\x66ield_names\x18\x03 \x03(\t\"T\n\x05Style\x12\n\n\x06OBJECT\x10\x00\x12\n\n\x06RECORD\x10\x01\x12\x15\n\x11RECORD_PROJECTION\x10\x02\x12\t\n\x05\x41RRAY\x10\x03\x12\x08\n\x04LIST\x10\x04\x12\x07\n\x03MAP\x10\x05\"9\n\x05\x46rame\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\x12\x12\n\x04rows\x18\x03 \x03(\x0b\x32\x04.Row\"\"\n\x03Row\x12\x1b\n\x05value\x18\x01 \x03(\x0b\x32\x0c.ColumnValue\"3\n\x10\x44\x61tabaseProperty\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tfunctions\x18\x02 \x03(\t\"4\n\x0bWireMessage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0fwrapped_message\x18\x02 \x01(\x0c\"\x87\x01\n\x0b\x43olumnValue\x12\x1a\n\x05value\x18\x01 \x03(\x0b\x32\x0b.TypedValue\x12 \n\x0b\x61rray_value\x18\x02 \x03(\x0b\x32\x0b.TypedValue\x12\x17\n\x0fhas_array_value\x18\x03 \x01(\x08\x12!\n\x0cscalar_value\x18\x04 \x01(\x0b\x32\x0b.TypedValue\"\xf2\x01\n\nTypedValue\x12\x12\n\x04type\x18\x01 \x01(\x0e\x32\x04.Rep\x12\x12\n\nbool_value\x18\x02 \x01(\x08\x12\x14\n\x0cstring_value\x18\x03 \x01(\t\x12\x14\n\x0cnumber_value\x18\x04 \x01(\x12\x12\x13\n\x0b\x62ytes_value\x18\x05 \x01(\x0c\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x0c\n\x04null\x18\x07 \x01(\x08\x12 \n\x0b\x61rray_value\x18\x08 \x03(\x0b\x32\x0b.TypedValue\x12\x1c\n\x0e\x63omponent_type\x18\t \x01(\x0e\x32\x04.Rep\x12\x17\n\x0fimplicitly_null\x18\n \x01(\x08\"\xa6\x02\n\x19MetaDataOperationArgument\x12\x14\n\x0cstring_value\x18\x01 \x01(\t\x12\x12\n\nbool_value\x18\x02 \x01(\x08\x12\x11\n\tint_value\x18\x03 \x01(\x11\x12\x1b\n\x13string_array_values\x18\x04 \x03(\t\x12\x18\n\x10int_array_values\x18\x05 \x03(\x11\x12\x35\n\x04type\x18\x06 \x01(\x0e\x32\'.MetaDataOperationArgument.ArgumentType\"^\n\x0c\x41rgumentType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\x07\n\x03INT\x10\x02\x12\x13\n\x0fREPEATED_STRING\x10\x03\x12\x10\n\x0cREPEATED_INT\x10\x04\x12\x08\n\x04NULL\x10\x05\"\xb0\x01\n\nQueryState\x12\x18\n\x04type\x18\x01 \x01(\x0e\x32\n.StateType\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x1e\n\x02op\x18\x03 \x01(\x0e\x32\x12.MetaDataOperation\x12(\n\x04\x61rgs\x18\x04 \x03(\x0b\x32\x1a.MetaDataOperationArgument\x12\x10\n\x08has_args\x18\x05 \x01(\x08\x12\x0f\n\x07has_sql\x18\x06 \x01(\x08\x12\x0e\n\x06has_op\x18\x07 \x01(\x08*\x9f\x01\n\rStatementType\x12\n\n\x06SELECT\x10\x00\x12\n\n\x06INSERT\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\n\n\x06UPSERT\x10\x04\x12\t\n\x05MERGE\x10\x05\x12\r\n\tOTHER_DML\x10\x06\x12\n\n\x06\x43REATE\x10\x07\x12\x08\n\x04\x44ROP\x10\x08\x12\t\n\x05\x41LTER\x10\t\x12\r\n\tOTHER_DDL\x10\n\x12\x08\n\x04\x43\x41LL\x10\x0b*\xe2\x03\n\x03Rep\x12\x15\n\x11PRIMITIVE_BOOLEAN\x10\x00\x12\x12\n\x0ePRIMITIVE_BYTE\x10\x01\x12\x12\n\x0ePRIMITIVE_CHAR\x10\x02\x12\x13\n\x0fPRIMITIVE_SHORT\x10\x03\x12\x11\n\rPRIMITIVE_INT\x10\x04\x12\x12\n\x0ePRIMITIVE_LONG\x10\x05\x12\x13\n\x0fPRIMITIVE_FLOAT\x10\x06\x12\x14\n\x10PRIMITIVE_DOUBLE\x10\x07\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\x08\n\x04\x42YTE\x10\t\x12\r\n\tCHARACTER\x10\n\x12\t\n\x05SHORT\x10\x0b\x12\x0b\n\x07INTEGER\x10\x0c\x12\x08\n\x04LONG\x10\r\x12\t\n\x05\x46LOAT\x10\x0e\x12\n\n\x06\x44OUBLE\x10\x0f\x12\x0f\n\x0b\x42IG_INTEGER\x10\x19\x12\x0f\n\x0b\x42IG_DECIMAL\x10\x1a\x12\x11\n\rJAVA_SQL_TIME\x10\x10\x12\x16\n\x12JAVA_SQL_TIMESTAMP\x10\x11\x12\x11\n\rJAVA_SQL_DATE\x10\x12\x12\x12\n\x0eJAVA_UTIL_DATE\x10\x13\x12\x0f\n\x0b\x42YTE_STRING\x10\x14\x12\n\n\x06STRING\x10\x15\x12\n\n\x06NUMBER\x10\x16\x12\n\n\x06OBJECT\x10\x17\x12\x08\n\x04NULL\x10\x18\x12\t\n\x05\x41RRAY\x10\x1b\x12\n\n\x06STRUCT\x10\x1c\x12\x0c\n\x08MULTISET\x10\x1d*^\n\x08Severity\x12\x14\n\x10UNKNOWN_SEVERITY\x10\x00\x12\x12\n\x0e\x46\x41TAL_SEVERITY\x10\x01\x12\x12\n\x0e\x45RROR_SEVERITY\x10\x02\x12\x14\n\x10WARNING_SEVERITY\x10\x03*\xd7\x04\n\x11MetaDataOperation\x12\x12\n\x0eGET_ATTRIBUTES\x10\x00\x12\x1b\n\x17GET_BEST_ROW_IDENTIFIER\x10\x01\x12\x10\n\x0cGET_CATALOGS\x10\x02\x12\x1e\n\x1aGET_CLIENT_INFO_PROPERTIES\x10\x03\x12\x19\n\x15GET_COLUMN_PRIVILEGES\x10\x04\x12\x0f\n\x0bGET_COLUMNS\x10\x05\x12\x17\n\x13GET_CROSS_REFERENCE\x10\x06\x12\x15\n\x11GET_EXPORTED_KEYS\x10\x07\x12\x18\n\x14GET_FUNCTION_COLUMNS\x10\x08\x12\x11\n\rGET_FUNCTIONS\x10\t\x12\x15\n\x11GET_IMPORTED_KEYS\x10\n\x12\x12\n\x0eGET_INDEX_INFO\x10\x0b\x12\x14\n\x10GET_PRIMARY_KEYS\x10\x0c\x12\x19\n\x15GET_PROCEDURE_COLUMNS\x10\r\x12\x12\n\x0eGET_PROCEDURES\x10\x0e\x12\x16\n\x12GET_PSEUDO_COLUMNS\x10\x0f\x12\x0f\n\x0bGET_SCHEMAS\x10\x10\x12\x19\n\x15GET_SCHEMAS_WITH_ARGS\x10\x11\x12\x14\n\x10GET_SUPER_TABLES\x10\x12\x12\x13\n\x0fGET_SUPER_TYPES\x10\x13\x12\x18\n\x14GET_TABLE_PRIVILEGES\x10\x14\x12\x0e\n\nGET_TABLES\x10\x15\x12\x13\n\x0fGET_TABLE_TYPES\x10\x16\x12\x11\n\rGET_TYPE_INFO\x10\x17\x12\x0c\n\x08GET_UDTS\x10\x18\x12\x17\n\x13GET_VERSION_COLUMNS\x10\x19*\"\n\tStateType\x12\x07\n\x03SQL\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x42\"\n org.apache.calcite.avatica.protob\x06proto3')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_STATEMENTTYPE = _descriptor.EnumDescriptor(
+  name='StatementType',
+  full_name='StatementType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SELECT', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INSERT', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UPDATE', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DELETE', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UPSERT', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='MERGE', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTHER_DML', index=6, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CREATE', index=7, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DROP', index=8, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ALTER', index=9, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTHER_DDL', index=10, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CALL', index=11, number=11,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2426,
+  serialized_end=2585,
+)
+_sym_db.RegisterEnumDescriptor(_STATEMENTTYPE)
+
+StatementType = enum_type_wrapper.EnumTypeWrapper(_STATEMENTTYPE)
+_REP = _descriptor.EnumDescriptor(
+  name='Rep',
+  full_name='Rep',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_BOOLEAN', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_BYTE', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_CHAR', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_SHORT', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_INT', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_LONG', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_FLOAT', index=6, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIMITIVE_DOUBLE', index=7, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BOOLEAN', index=8, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BYTE', index=9, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CHARACTER', index=10, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SHORT', index=11, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INTEGER', index=12, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LONG', index=13, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FLOAT', index=14, number=14,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DOUBLE', index=15, number=15,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BIG_INTEGER', index=16, number=25,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BIG_DECIMAL', index=17, number=26,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='JAVA_SQL_TIME', index=18, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='JAVA_SQL_TIMESTAMP', index=19, number=17,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='JAVA_SQL_DATE', index=20, number=18,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='JAVA_UTIL_DATE', index=21, number=19,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BYTE_STRING', index=22, number=20,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=23, number=21,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NUMBER', index=24, number=22,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OBJECT', index=25, number=23,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NULL', index=26, number=24,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ARRAY', index=27, number=27,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRUCT', index=28, number=28,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='MULTISET', index=29, number=29,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2588,
+  serialized_end=3070,
+)
+_sym_db.RegisterEnumDescriptor(_REP)
+
+Rep = enum_type_wrapper.EnumTypeWrapper(_REP)
+_SEVERITY = _descriptor.EnumDescriptor(
+  name='Severity',
+  full_name='Severity',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN_SEVERITY', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FATAL_SEVERITY', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ERROR_SEVERITY', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WARNING_SEVERITY', index=3, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=3072,
+  serialized_end=3166,
+)
+_sym_db.RegisterEnumDescriptor(_SEVERITY)
+
+Severity = enum_type_wrapper.EnumTypeWrapper(_SEVERITY)
+_METADATAOPERATION = _descriptor.EnumDescriptor(
+  name='MetaDataOperation',
+  full_name='MetaDataOperation',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='GET_ATTRIBUTES', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_BEST_ROW_IDENTIFIER', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_CATALOGS', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_CLIENT_INFO_PROPERTIES', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_COLUMN_PRIVILEGES', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_COLUMNS', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_CROSS_REFERENCE', index=6, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_EXPORTED_KEYS', index=7, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_FUNCTION_COLUMNS', index=8, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_FUNCTIONS', index=9, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_IMPORTED_KEYS', index=10, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_INDEX_INFO', index=11, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_PRIMARY_KEYS', index=12, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_PROCEDURE_COLUMNS', index=13, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_PROCEDURES', index=14, number=14,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_PSEUDO_COLUMNS', index=15, number=15,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_SCHEMAS', index=16, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_SCHEMAS_WITH_ARGS', index=17, number=17,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_SUPER_TABLES', index=18, number=18,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_SUPER_TYPES', index=19, number=19,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_TABLE_PRIVILEGES', index=20, number=20,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_TABLES', index=21, number=21,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_TABLE_TYPES', index=22, number=22,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_TYPE_INFO', index=23, number=23,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_UDTS', index=24, number=24,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_VERSION_COLUMNS', index=25, number=25,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=3169,
+  serialized_end=3768,
+)
+_sym_db.RegisterEnumDescriptor(_METADATAOPERATION)
+
+MetaDataOperation = enum_type_wrapper.EnumTypeWrapper(_METADATAOPERATION)
+_STATETYPE = _descriptor.EnumDescriptor(
+  name='StateType',
+  full_name='StateType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SQL', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='METADATA', index=1, number=1,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=3770,
+  serialized_end=3804,
+)
+_sym_db.RegisterEnumDescriptor(_STATETYPE)
+
+StateType = enum_type_wrapper.EnumTypeWrapper(_STATETYPE)
+SELECT = 0
+INSERT = 1
+UPDATE = 2
+DELETE = 3
+UPSERT = 4
+MERGE = 5
+OTHER_DML = 6
+CREATE = 7
+DROP = 8
+ALTER = 9
+OTHER_DDL = 10
+CALL = 11
+PRIMITIVE_BOOLEAN = 0
+PRIMITIVE_BYTE = 1
+PRIMITIVE_CHAR = 2
+PRIMITIVE_SHORT = 3
+PRIMITIVE_INT = 4
+PRIMITIVE_LONG = 5
+PRIMITIVE_FLOAT = 6
+PRIMITIVE_DOUBLE = 7
+BOOLEAN = 8
+BYTE = 9
+CHARACTER = 10
+SHORT = 11
+INTEGER = 12
+LONG = 13
+FLOAT = 14
+DOUBLE = 15
+BIG_INTEGER = 25
+BIG_DECIMAL = 26
+JAVA_SQL_TIME = 16
+JAVA_SQL_TIMESTAMP = 17
+JAVA_SQL_DATE = 18
+JAVA_UTIL_DATE = 19
+BYTE_STRING = 20
+STRING = 21
+NUMBER = 22
+OBJECT = 23
+NULL = 24
+ARRAY = 27
+STRUCT = 28
+MULTISET = 29
+UNKNOWN_SEVERITY = 0
+FATAL_SEVERITY = 1
+ERROR_SEVERITY = 2
+WARNING_SEVERITY = 3
+GET_ATTRIBUTES = 0
+GET_BEST_ROW_IDENTIFIER = 1
+GET_CATALOGS = 2
+GET_CLIENT_INFO_PROPERTIES = 3
+GET_COLUMN_PRIVILEGES = 4
+GET_COLUMNS = 5
+GET_CROSS_REFERENCE = 6
+GET_EXPORTED_KEYS = 7
+GET_FUNCTION_COLUMNS = 8
+GET_FUNCTIONS = 9
+GET_IMPORTED_KEYS = 10
+GET_INDEX_INFO = 11
+GET_PRIMARY_KEYS = 12
+GET_PROCEDURE_COLUMNS = 13
+GET_PROCEDURES = 14
+GET_PSEUDO_COLUMNS = 15
+GET_SCHEMAS = 16
+GET_SCHEMAS_WITH_ARGS = 17
+GET_SUPER_TABLES = 18
+GET_SUPER_TYPES = 19
+GET_TABLE_PRIVILEGES = 20
+GET_TABLES = 21
+GET_TABLE_TYPES = 22
+GET_TYPE_INFO = 23
+GET_UDTS = 24
+GET_VERSION_COLUMNS = 25
+SQL = 0
+METADATA = 1
+
+
+_CURSORFACTORY_STYLE = _descriptor.EnumDescriptor(
+  name='Style',
+  full_name='CursorFactory.Style',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='OBJECT', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='RECORD', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='RECORD_PROJECTION', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ARRAY', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LIST', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='MAP', index=5, number=5,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1278,
+  serialized_end=1362,
+)
+_sym_db.RegisterEnumDescriptor(_CURSORFACTORY_STYLE)
+
+_METADATAOPERATIONARGUMENT_ARGUMENTTYPE = _descriptor.EnumDescriptor(
+  name='ArgumentType',
+  full_name='MetaDataOperationArgument.ArgumentType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BOOL', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INT', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REPEATED_STRING', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REPEATED_INT', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NULL', index=5, number=5,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2150,
+  serialized_end=2244,
+)
+_sym_db.RegisterEnumDescriptor(_METADATAOPERATIONARGUMENT_ARGUMENTTYPE)
+
+
+_CONNECTIONPROPERTIES = _descriptor.Descriptor(
+  name='ConnectionProperties',
+  full_name='ConnectionProperties',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='is_dirty', full_name='ConnectionProperties.is_dirty', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='auto_commit', full_name='ConnectionProperties.auto_commit', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_auto_commit', full_name='ConnectionProperties.has_auto_commit', index=2,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='read_only', full_name='ConnectionProperties.read_only', index=3,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_read_only', full_name='ConnectionProperties.has_read_only', index=4,
+      number=8, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='transaction_isolation', full_name='ConnectionProperties.transaction_isolation', index=5,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='catalog', full_name='ConnectionProperties.catalog', index=6,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='schema', full_name='ConnectionProperties.schema', index=7,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=17,
+  serialized_end=209,
+)
+
+
+_STATEMENTHANDLE = _descriptor.Descriptor(
+  name='StatementHandle',
+  full_name='StatementHandle',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='StatementHandle.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='StatementHandle.id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='signature', full_name='StatementHandle.signature', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=211,
+  serialized_end=294,
+)
+
+
+_SIGNATURE = _descriptor.Descriptor(
+  name='Signature',
+  full_name='Signature',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='columns', full_name='Signature.columns', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql', full_name='Signature.sql', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='parameters', full_name='Signature.parameters', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cursor_factory', full_name='Signature.cursor_factory', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statementType', full_name='Signature.statementType', index=4,
+      number=5, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=297,
+  serialized_end=473,
+)
+
+
+_COLUMNMETADATA = _descriptor.Descriptor(
+  name='ColumnMetaData',
+  full_name='ColumnMetaData',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ordinal', full_name='ColumnMetaData.ordinal', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='auto_increment', full_name='ColumnMetaData.auto_increment', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='case_sensitive', full_name='ColumnMetaData.case_sensitive', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='searchable', full_name='ColumnMetaData.searchable', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='currency', full_name='ColumnMetaData.currency', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='nullable', full_name='ColumnMetaData.nullable', index=5,
+      number=6, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='signed', full_name='ColumnMetaData.signed', index=6,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='display_size', full_name='ColumnMetaData.display_size', index=7,
+      number=8, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='label', full_name='ColumnMetaData.label', index=8,
+      number=9, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='column_name', full_name='ColumnMetaData.column_name', index=9,
+      number=10, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='schema_name', full_name='ColumnMetaData.schema_name', index=10,
+      number=11, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='precision', full_name='ColumnMetaData.precision', index=11,
+      number=12, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='scale', full_name='ColumnMetaData.scale', index=12,
+      number=13, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='table_name', full_name='ColumnMetaData.table_name', index=13,
+      number=14, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='catalog_name', full_name='ColumnMetaData.catalog_name', index=14,
+      number=15, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='read_only', full_name='ColumnMetaData.read_only', index=15,
+      number=16, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='writable', full_name='ColumnMetaData.writable', index=16,
+      number=17, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='definitely_writable', full_name='ColumnMetaData.definitely_writable', index=17,
+      number=18, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='column_class_name', full_name='ColumnMetaData.column_class_name', index=18,
+      number=19, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='ColumnMetaData.type', index=19,
+      number=20, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=476,
+  serialized_end=905,
+)
+
+
+_AVATICATYPE = _descriptor.Descriptor(
+  name='AvaticaType',
+  full_name='AvaticaType',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='AvaticaType.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='AvaticaType.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='rep', full_name='AvaticaType.rep', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='columns', full_name='AvaticaType.columns', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='component', full_name='AvaticaType.component', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=907,
+  serialized_end=1032,
+)
+
+
+_AVATICAPARAMETER = _descriptor.Descriptor(
+  name='AvaticaParameter',
+  full_name='AvaticaParameter',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='signed', full_name='AvaticaParameter.signed', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='precision', full_name='AvaticaParameter.precision', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='scale', full_name='AvaticaParameter.scale', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='parameter_type', full_name='AvaticaParameter.parameter_type', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type_name', full_name='AvaticaParameter.type_name', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='class_name', full_name='AvaticaParameter.class_name', index=5,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='AvaticaParameter.name', index=6,
+      number=7, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1035,
+  serialized_end=1180,
+)
+
+
+_CURSORFACTORY = _descriptor.Descriptor(
+  name='CursorFactory',
+  full_name='CursorFactory',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='style', full_name='CursorFactory.style', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='class_name', full_name='CursorFactory.class_name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field_names', full_name='CursorFactory.field_names', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _CURSORFACTORY_STYLE,
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1183,
+  serialized_end=1362,
+)
+
+
+_FRAME = _descriptor.Descriptor(
+  name='Frame',
+  full_name='Frame',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='offset', full_name='Frame.offset', index=0,
+      number=1, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='done', full_name='Frame.done', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='rows', full_name='Frame.rows', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1364,
+  serialized_end=1421,
+)
+
+
+_ROW = _descriptor.Descriptor(
+  name='Row',
+  full_name='Row',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='Row.value', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1423,
+  serialized_end=1457,
+)
+
+
+_DATABASEPROPERTY = _descriptor.Descriptor(
+  name='DatabaseProperty',
+  full_name='DatabaseProperty',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='DatabaseProperty.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='functions', full_name='DatabaseProperty.functions', index=1,
+      number=2, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1459,
+  serialized_end=1510,
+)
+
+
+_WIREMESSAGE = _descriptor.Descriptor(
+  name='WireMessage',
+  full_name='WireMessage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='WireMessage.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='wrapped_message', full_name='WireMessage.wrapped_message', index=1,
+      number=2, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1512,
+  serialized_end=1564,
+)
+
+
+_COLUMNVALUE = _descriptor.Descriptor(
+  name='ColumnValue',
+  full_name='ColumnValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='ColumnValue.value', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='array_value', full_name='ColumnValue.array_value', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_array_value', full_name='ColumnValue.has_array_value', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='scalar_value', full_name='ColumnValue.scalar_value', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1567,
+  serialized_end=1702,
+)
+
+
+_TYPEDVALUE = _descriptor.Descriptor(
+  name='TypedValue',
+  full_name='TypedValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='TypedValue.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='bool_value', full_name='TypedValue.bool_value', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='TypedValue.string_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='number_value', full_name='TypedValue.number_value', index=3,
+      number=4, type=18, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='bytes_value', full_name='TypedValue.bytes_value', index=4,
+      number=5, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='double_value', full_name='TypedValue.double_value', index=5,
+      number=6, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='null', full_name='TypedValue.null', index=6,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='array_value', full_name='TypedValue.array_value', index=7,
+      number=8, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='component_type', full_name='TypedValue.component_type', index=8,
+      number=9, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='implicitly_null', full_name='TypedValue.implicitly_null', index=9,
+      number=10, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1705,
+  serialized_end=1947,
+)
+
+
+_METADATAOPERATIONARGUMENT = _descriptor.Descriptor(
+  name='MetaDataOperationArgument',
+  full_name='MetaDataOperationArgument',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='MetaDataOperationArgument.string_value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='bool_value', full_name='MetaDataOperationArgument.bool_value', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='int_value', full_name='MetaDataOperationArgument.int_value', index=2,
+      number=3, type=17, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_array_values', full_name='MetaDataOperationArgument.string_array_values', index=3,
+      number=4, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='int_array_values', full_name='MetaDataOperationArgument.int_array_values', index=4,
+      number=5, type=17, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='MetaDataOperationArgument.type', index=5,
+      number=6, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _METADATAOPERATIONARGUMENT_ARGUMENTTYPE,
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1950,
+  serialized_end=2244,
+)
+
+
+_QUERYSTATE = _descriptor.Descriptor(
+  name='QueryState',
+  full_name='QueryState',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='QueryState.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql', full_name='QueryState.sql', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='op', full_name='QueryState.op', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='args', full_name='QueryState.args', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_args', full_name='QueryState.has_args', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_sql', full_name='QueryState.has_sql', index=5,
+      number=6, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_op', full_name='QueryState.has_op', index=6,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2247,
+  serialized_end=2423,
+)
+
+_STATEMENTHANDLE.fields_by_name['signature'].message_type = _SIGNATURE
+_SIGNATURE.fields_by_name['columns'].message_type = _COLUMNMETADATA
+_SIGNATURE.fields_by_name['parameters'].message_type = _AVATICAPARAMETER
+_SIGNATURE.fields_by_name['cursor_factory'].message_type = _CURSORFACTORY
+_SIGNATURE.fields_by_name['statementType'].enum_type = _STATEMENTTYPE
+_COLUMNMETADATA.fields_by_name['type'].message_type = _AVATICATYPE
+_AVATICATYPE.fields_by_name['rep'].enum_type = _REP
+_AVATICATYPE.fields_by_name['columns'].message_type = _COLUMNMETADATA
+_AVATICATYPE.fields_by_name['component'].message_type = _AVATICATYPE
+_CURSORFACTORY.fields_by_name['style'].enum_type = _CURSORFACTORY_STYLE
+_CURSORFACTORY_STYLE.containing_type = _CURSORFACTORY
+_FRAME.fields_by_name['rows'].message_type = _ROW
+_ROW.fields_by_name['value'].message_type = _COLUMNVALUE
+_COLUMNVALUE.fields_by_name['value'].message_type = _TYPEDVALUE
+_COLUMNVALUE.fields_by_name['array_value'].message_type = _TYPEDVALUE
+_COLUMNVALUE.fields_by_name['scalar_value'].message_type = _TYPEDVALUE
+_TYPEDVALUE.fields_by_name['type'].enum_type = _REP
+_TYPEDVALUE.fields_by_name['array_value'].message_type = _TYPEDVALUE
+_TYPEDVALUE.fields_by_name['component_type'].enum_type = _REP
+_METADATAOPERATIONARGUMENT.fields_by_name['type'].enum_type = _METADATAOPERATIONARGUMENT_ARGUMENTTYPE
+_METADATAOPERATIONARGUMENT_ARGUMENTTYPE.containing_type = _METADATAOPERATIONARGUMENT
+_QUERYSTATE.fields_by_name['type'].enum_type = _STATETYPE
+_QUERYSTATE.fields_by_name['op'].enum_type = _METADATAOPERATION
+_QUERYSTATE.fields_by_name['args'].message_type = _METADATAOPERATIONARGUMENT
+DESCRIPTOR.message_types_by_name['ConnectionProperties'] = _CONNECTIONPROPERTIES
+DESCRIPTOR.message_types_by_name['StatementHandle'] = _STATEMENTHANDLE
+DESCRIPTOR.message_types_by_name['Signature'] = _SIGNATURE
+DESCRIPTOR.message_types_by_name['ColumnMetaData'] = _COLUMNMETADATA
+DESCRIPTOR.message_types_by_name['AvaticaType'] = _AVATICATYPE
+DESCRIPTOR.message_types_by_name['AvaticaParameter'] = _AVATICAPARAMETER
+DESCRIPTOR.message_types_by_name['CursorFactory'] = _CURSORFACTORY
+DESCRIPTOR.message_types_by_name['Frame'] = _FRAME
+DESCRIPTOR.message_types_by_name['Row'] = _ROW
+DESCRIPTOR.message_types_by_name['DatabaseProperty'] = _DATABASEPROPERTY
+DESCRIPTOR.message_types_by_name['WireMessage'] = _WIREMESSAGE
+DESCRIPTOR.message_types_by_name['ColumnValue'] = _COLUMNVALUE
+DESCRIPTOR.message_types_by_name['TypedValue'] = _TYPEDVALUE
+DESCRIPTOR.message_types_by_name['MetaDataOperationArgument'] = _METADATAOPERATIONARGUMENT
+DESCRIPTOR.message_types_by_name['QueryState'] = _QUERYSTATE
+DESCRIPTOR.enum_types_by_name['StatementType'] = _STATEMENTTYPE
+DESCRIPTOR.enum_types_by_name['Rep'] = _REP
+DESCRIPTOR.enum_types_by_name['Severity'] = _SEVERITY
+DESCRIPTOR.enum_types_by_name['MetaDataOperation'] = _METADATAOPERATION
+DESCRIPTOR.enum_types_by_name['StateType'] = _STATETYPE
+
+ConnectionProperties = _reflection.GeneratedProtocolMessageType('ConnectionProperties', (_message.Message,), dict(
+  DESCRIPTOR = _CONNECTIONPROPERTIES,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:ConnectionProperties)
+  ))
+_sym_db.RegisterMessage(ConnectionProperties)
+
+StatementHandle = _reflection.GeneratedProtocolMessageType('StatementHandle', (_message.Message,), dict(
+  DESCRIPTOR = _STATEMENTHANDLE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:StatementHandle)
+  ))
+_sym_db.RegisterMessage(StatementHandle)
+
+Signature = _reflection.GeneratedProtocolMessageType('Signature', (_message.Message,), dict(
+  DESCRIPTOR = _SIGNATURE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:Signature)
+  ))
+_sym_db.RegisterMessage(Signature)
+
+ColumnMetaData = _reflection.GeneratedProtocolMessageType('ColumnMetaData', (_message.Message,), dict(
+  DESCRIPTOR = _COLUMNMETADATA,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:ColumnMetaData)
+  ))
+_sym_db.RegisterMessage(ColumnMetaData)
+
+AvaticaType = _reflection.GeneratedProtocolMessageType('AvaticaType', (_message.Message,), dict(
+  DESCRIPTOR = _AVATICATYPE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:AvaticaType)
+  ))
+_sym_db.RegisterMessage(AvaticaType)
+
+AvaticaParameter = _reflection.GeneratedProtocolMessageType('AvaticaParameter', (_message.Message,), dict(
+  DESCRIPTOR = _AVATICAPARAMETER,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:AvaticaParameter)
+  ))
+_sym_db.RegisterMessage(AvaticaParameter)
+
+CursorFactory = _reflection.GeneratedProtocolMessageType('CursorFactory', (_message.Message,), dict(
+  DESCRIPTOR = _CURSORFACTORY,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:CursorFactory)
+  ))
+_sym_db.RegisterMessage(CursorFactory)
+
+Frame = _reflection.GeneratedProtocolMessageType('Frame', (_message.Message,), dict(
+  DESCRIPTOR = _FRAME,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:Frame)
+  ))
+_sym_db.RegisterMessage(Frame)
+
+Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict(
+  DESCRIPTOR = _ROW,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:Row)
+  ))
+_sym_db.RegisterMessage(Row)
+
+DatabaseProperty = _reflection.GeneratedProtocolMessageType('DatabaseProperty', (_message.Message,), dict(
+  DESCRIPTOR = _DATABASEPROPERTY,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:DatabaseProperty)
+  ))
+_sym_db.RegisterMessage(DatabaseProperty)
+
+WireMessage = _reflection.GeneratedProtocolMessageType('WireMessage', (_message.Message,), dict(
+  DESCRIPTOR = _WIREMESSAGE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:WireMessage)
+  ))
+_sym_db.RegisterMessage(WireMessage)
+
+ColumnValue = _reflection.GeneratedProtocolMessageType('ColumnValue', (_message.Message,), dict(
+  DESCRIPTOR = _COLUMNVALUE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:ColumnValue)
+  ))
+_sym_db.RegisterMessage(ColumnValue)
+
+TypedValue = _reflection.GeneratedProtocolMessageType('TypedValue', (_message.Message,), dict(
+  DESCRIPTOR = _TYPEDVALUE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:TypedValue)
+  ))
+_sym_db.RegisterMessage(TypedValue)
+
+MetaDataOperationArgument = _reflection.GeneratedProtocolMessageType('MetaDataOperationArgument', (_message.Message,), dict(
+  DESCRIPTOR = _METADATAOPERATIONARGUMENT,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:MetaDataOperationArgument)
+  ))
+_sym_db.RegisterMessage(MetaDataOperationArgument)
+
+QueryState = _reflection.GeneratedProtocolMessageType('QueryState', (_message.Message,), dict(
+  DESCRIPTOR = _QUERYSTATE,
+  __module__ = 'common_pb2'
+  # @@protoc_insertion_point(class_scope:QueryState)
+  ))
+_sym_db.RegisterMessage(QueryState)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n org.apache.calcite.avatica.proto'))
+# @@protoc_insertion_point(module_scope)
diff --git a/python/phoenixdb/phoenixdb/avatica/proto/requests_pb2.py b/python/phoenixdb/phoenixdb/avatica/proto/requests_pb2.py
new file mode 100644
index 0000000..203f945
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/proto/requests_pb2.py
@@ -0,0 +1,1206 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: requests.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import common_pb2 as common__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='requests.proto',
+  package='',
+  syntax='proto3',
+  serialized_pb=_b('\n\x0erequests.proto\x1a\x0c\x63ommon.proto\"(\n\x0f\x43\x61talogsRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"0\n\x17\x44\x61tabasePropertyRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"P\n\x0eSchemasRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x15\n\rconnection_id\x18\x03 \x01(\t\"\x95\x01\n\rTablesRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x1a\n\x12table_name_pattern\x18\x03 \x01(\t\x12\x11\n\ttype_list\x18\x04 \x03(\t\x12\x15\n\rhas_type_list\x18\x06 \x01(\x08\x12\x15\n\rconnection_id\x18\x07 \x01(\t\"*\n\x11TableTypesRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"\x89\x01\n\x0e\x43olumnsRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x1a\n\x12table_name_pattern\x18\x03 \x01(\t\x12\x1b\n\x13\x63olumn_name_pattern\x18\x04 \x01(\t\x12\x15\n\rconnection_id\x18\x05 \x01(\t\"(\n\x0fTypeInfoRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"\xa1\x01\n\x18PrepareAndExecuteRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x15\n\rmax_row_count\x18\x03 \x01(\x04\x12\x14\n\x0cstatement_id\x18\x04 \x01(\r\x12\x16\n\x0emax_rows_total\x18\x05 \x01(\x03\x12\x1c\n\x14\x66irst_frame_max_size\x18\x06 \x01(\x05\"c\n\x0ePrepareRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x15\n\rmax_row_count\x18\x03 \x01(\x04\x12\x16\n\x0emax_rows_total\x18\x04 \x01(\x03\"\x80\x01\n\x0c\x46\x65tchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x0e\n\x06offset\x18\x03 \x01(\x04\x12\x1b\n\x13\x66\x65tch_max_row_count\x18\x04 \x01(\r\x12\x16\n\x0e\x66rame_max_size\x18\x05 \x01(\x05\"/\n\x16\x43reateStatementRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"D\n\x15\x43loseStatementRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\"\x8b\x01\n\x15OpenConnectionRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12.\n\x04info\x18\x02 \x03(\x0b\x32 .OpenConnectionRequest.InfoEntry\x1a+\n\tInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"/\n\x16\x43loseConnectionRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"Y\n\x15\x43onnectionSyncRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12)\n\nconn_props\x18\x02 \x01(\x0b\x32\x15.ConnectionProperties\"\xc7\x01\n\x0e\x45xecuteRequest\x12)\n\x0fstatementHandle\x18\x01 \x01(\x0b\x32\x10.StatementHandle\x12%\n\x10parameter_values\x18\x02 \x03(\x0b\x32\x0b.TypedValue\x12\'\n\x1f\x64\x65precated_first_frame_max_size\x18\x03 \x01(\x04\x12\x1c\n\x14has_parameter_values\x18\x04 \x01(\x08\x12\x1c\n\x14\x66irst_frame_max_size\x18\x05 \x01(\x05\"m\n\x12SyncResultsRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1a\n\x05state\x18\x03 \x01(\x0b\x32\x0b.QueryState\x12\x0e\n\x06offset\x18\x04 \x01(\x04\"&\n\rCommitRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"(\n\x0fRollbackRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"b\n\x1dPrepareAndExecuteBatchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x14\n\x0csql_commands\x18\x03 \x03(\t\"4\n\x0bUpdateBatch\x12%\n\x10parameter_values\x18\x01 \x03(\x0b\x32\x0b.TypedValue\"a\n\x13\x45xecuteBatchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1d\n\x07updates\x18\x03 \x03(\x0b\x32\x0c.UpdateBatchB\"\n org.apache.calcite.avatica.protob\x06proto3')
+  ,
+  dependencies=[common__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_CATALOGSREQUEST = _descriptor.Descriptor(
+  name='CatalogsRequest',
+  full_name='CatalogsRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CatalogsRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=32,
+  serialized_end=72,
+)
+
+
+_DATABASEPROPERTYREQUEST = _descriptor.Descriptor(
+  name='DatabasePropertyRequest',
+  full_name='DatabasePropertyRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='DatabasePropertyRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=74,
+  serialized_end=122,
+)
+
+
+_SCHEMASREQUEST = _descriptor.Descriptor(
+  name='SchemasRequest',
+  full_name='SchemasRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='catalog', full_name='SchemasRequest.catalog', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='schema_pattern', full_name='SchemasRequest.schema_pattern', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='SchemasRequest.connection_id', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=124,
+  serialized_end=204,
+)
+
+
+_TABLESREQUEST = _descriptor.Descriptor(
+  name='TablesRequest',
+  full_name='TablesRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='catalog', full_name='TablesRequest.catalog', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='schema_pattern', full_name='TablesRequest.schema_pattern', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='table_name_pattern', full_name='TablesRequest.table_name_pattern', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type_list', full_name='TablesRequest.type_list', index=3,
+      number=4, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_type_list', full_name='TablesRequest.has_type_list', index=4,
+      number=6, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='TablesRequest.connection_id', index=5,
+      number=7, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=207,
+  serialized_end=356,
+)
+
+
+_TABLETYPESREQUEST = _descriptor.Descriptor(
+  name='TableTypesRequest',
+  full_name='TableTypesRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='TableTypesRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=358,
+  serialized_end=400,
+)
+
+
+_COLUMNSREQUEST = _descriptor.Descriptor(
+  name='ColumnsRequest',
+  full_name='ColumnsRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='catalog', full_name='ColumnsRequest.catalog', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='schema_pattern', full_name='ColumnsRequest.schema_pattern', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='table_name_pattern', full_name='ColumnsRequest.table_name_pattern', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='column_name_pattern', full_name='ColumnsRequest.column_name_pattern', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='ColumnsRequest.connection_id', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=403,
+  serialized_end=540,
+)
+
+
+_TYPEINFOREQUEST = _descriptor.Descriptor(
+  name='TypeInfoRequest',
+  full_name='TypeInfoRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='TypeInfoRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=542,
+  serialized_end=582,
+)
+
+
+_PREPAREANDEXECUTEREQUEST = _descriptor.Descriptor(
+  name='PrepareAndExecuteRequest',
+  full_name='PrepareAndExecuteRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='PrepareAndExecuteRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql', full_name='PrepareAndExecuteRequest.sql', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_row_count', full_name='PrepareAndExecuteRequest.max_row_count', index=2,
+      number=3, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='PrepareAndExecuteRequest.statement_id', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_rows_total', full_name='PrepareAndExecuteRequest.max_rows_total', index=4,
+      number=5, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='first_frame_max_size', full_name='PrepareAndExecuteRequest.first_frame_max_size', index=5,
+      number=6, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=585,
+  serialized_end=746,
+)
+
+
+_PREPAREREQUEST = _descriptor.Descriptor(
+  name='PrepareRequest',
+  full_name='PrepareRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='PrepareRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql', full_name='PrepareRequest.sql', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_row_count', full_name='PrepareRequest.max_row_count', index=2,
+      number=3, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_rows_total', full_name='PrepareRequest.max_rows_total', index=3,
+      number=4, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=748,
+  serialized_end=847,
+)
+
+
+_FETCHREQUEST = _descriptor.Descriptor(
+  name='FetchRequest',
+  full_name='FetchRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='FetchRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='FetchRequest.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='offset', full_name='FetchRequest.offset', index=2,
+      number=3, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='fetch_max_row_count', full_name='FetchRequest.fetch_max_row_count', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='frame_max_size', full_name='FetchRequest.frame_max_size', index=4,
+      number=5, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=850,
+  serialized_end=978,
+)
+
+
+_CREATESTATEMENTREQUEST = _descriptor.Descriptor(
+  name='CreateStatementRequest',
+  full_name='CreateStatementRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CreateStatementRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=980,
+  serialized_end=1027,
+)
+
+
+_CLOSESTATEMENTREQUEST = _descriptor.Descriptor(
+  name='CloseStatementRequest',
+  full_name='CloseStatementRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CloseStatementRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='CloseStatementRequest.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1029,
+  serialized_end=1097,
+)
+
+
+_OPENCONNECTIONREQUEST_INFOENTRY = _descriptor.Descriptor(
+  name='InfoEntry',
+  full_name='OpenConnectionRequest.InfoEntry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='OpenConnectionRequest.InfoEntry.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='OpenConnectionRequest.InfoEntry.value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1196,
+  serialized_end=1239,
+)
+
+_OPENCONNECTIONREQUEST = _descriptor.Descriptor(
+  name='OpenConnectionRequest',
+  full_name='OpenConnectionRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='OpenConnectionRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='info', full_name='OpenConnectionRequest.info', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_OPENCONNECTIONREQUEST_INFOENTRY, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1100,
+  serialized_end=1239,
+)
+
+
+_CLOSECONNECTIONREQUEST = _descriptor.Descriptor(
+  name='CloseConnectionRequest',
+  full_name='CloseConnectionRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CloseConnectionRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1241,
+  serialized_end=1288,
+)
+
+
+_CONNECTIONSYNCREQUEST = _descriptor.Descriptor(
+  name='ConnectionSyncRequest',
+  full_name='ConnectionSyncRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='ConnectionSyncRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='conn_props', full_name='ConnectionSyncRequest.conn_props', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1290,
+  serialized_end=1379,
+)
+
+
+_EXECUTEREQUEST = _descriptor.Descriptor(
+  name='ExecuteRequest',
+  full_name='ExecuteRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='statementHandle', full_name='ExecuteRequest.statementHandle', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='parameter_values', full_name='ExecuteRequest.parameter_values', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated_first_frame_max_size', full_name='ExecuteRequest.deprecated_first_frame_max_size', index=2,
+      number=3, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_parameter_values', full_name='ExecuteRequest.has_parameter_values', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='first_frame_max_size', full_name='ExecuteRequest.first_frame_max_size', index=4,
+      number=5, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1382,
+  serialized_end=1581,
+)
+
+
+_SYNCRESULTSREQUEST = _descriptor.Descriptor(
+  name='SyncResultsRequest',
+  full_name='SyncResultsRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='SyncResultsRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='SyncResultsRequest.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='state', full_name='SyncResultsRequest.state', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='offset', full_name='SyncResultsRequest.offset', index=3,
+      number=4, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1583,
+  serialized_end=1692,
+)
+
+
+_COMMITREQUEST = _descriptor.Descriptor(
+  name='CommitRequest',
+  full_name='CommitRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CommitRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1694,
+  serialized_end=1732,
+)
+
+
+_ROLLBACKREQUEST = _descriptor.Descriptor(
+  name='RollbackRequest',
+  full_name='RollbackRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='RollbackRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1734,
+  serialized_end=1774,
+)
+
+
+_PREPAREANDEXECUTEBATCHREQUEST = _descriptor.Descriptor(
+  name='PrepareAndExecuteBatchRequest',
+  full_name='PrepareAndExecuteBatchRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='PrepareAndExecuteBatchRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='PrepareAndExecuteBatchRequest.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql_commands', full_name='PrepareAndExecuteBatchRequest.sql_commands', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1776,
+  serialized_end=1874,
+)
+
+
+_UPDATEBATCH = _descriptor.Descriptor(
+  name='UpdateBatch',
+  full_name='UpdateBatch',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='parameter_values', full_name='UpdateBatch.parameter_values', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1876,
+  serialized_end=1928,
+)
+
+
+_EXECUTEBATCHREQUEST = _descriptor.Descriptor(
+  name='ExecuteBatchRequest',
+  full_name='ExecuteBatchRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='ExecuteBatchRequest.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='ExecuteBatchRequest.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='updates', full_name='ExecuteBatchRequest.updates', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1930,
+  serialized_end=2027,
+)
+
+_OPENCONNECTIONREQUEST_INFOENTRY.containing_type = _OPENCONNECTIONREQUEST
+_OPENCONNECTIONREQUEST.fields_by_name['info'].message_type = _OPENCONNECTIONREQUEST_INFOENTRY
+_CONNECTIONSYNCREQUEST.fields_by_name['conn_props'].message_type = common__pb2._CONNECTIONPROPERTIES
+_EXECUTEREQUEST.fields_by_name['statementHandle'].message_type = common__pb2._STATEMENTHANDLE
+_EXECUTEREQUEST.fields_by_name['parameter_values'].message_type = common__pb2._TYPEDVALUE
+_SYNCRESULTSREQUEST.fields_by_name['state'].message_type = common__pb2._QUERYSTATE
+_UPDATEBATCH.fields_by_name['parameter_values'].message_type = common__pb2._TYPEDVALUE
+_EXECUTEBATCHREQUEST.fields_by_name['updates'].message_type = _UPDATEBATCH
+DESCRIPTOR.message_types_by_name['CatalogsRequest'] = _CATALOGSREQUEST
+DESCRIPTOR.message_types_by_name['DatabasePropertyRequest'] = _DATABASEPROPERTYREQUEST
+DESCRIPTOR.message_types_by_name['SchemasRequest'] = _SCHEMASREQUEST
+DESCRIPTOR.message_types_by_name['TablesRequest'] = _TABLESREQUEST
+DESCRIPTOR.message_types_by_name['TableTypesRequest'] = _TABLETYPESREQUEST
+DESCRIPTOR.message_types_by_name['ColumnsRequest'] = _COLUMNSREQUEST
+DESCRIPTOR.message_types_by_name['TypeInfoRequest'] = _TYPEINFOREQUEST
+DESCRIPTOR.message_types_by_name['PrepareAndExecuteRequest'] = _PREPAREANDEXECUTEREQUEST
+DESCRIPTOR.message_types_by_name['PrepareRequest'] = _PREPAREREQUEST
+DESCRIPTOR.message_types_by_name['FetchRequest'] = _FETCHREQUEST
+DESCRIPTOR.message_types_by_name['CreateStatementRequest'] = _CREATESTATEMENTREQUEST
+DESCRIPTOR.message_types_by_name['CloseStatementRequest'] = _CLOSESTATEMENTREQUEST
+DESCRIPTOR.message_types_by_name['OpenConnectionRequest'] = _OPENCONNECTIONREQUEST
+DESCRIPTOR.message_types_by_name['CloseConnectionRequest'] = _CLOSECONNECTIONREQUEST
+DESCRIPTOR.message_types_by_name['ConnectionSyncRequest'] = _CONNECTIONSYNCREQUEST
+DESCRIPTOR.message_types_by_name['ExecuteRequest'] = _EXECUTEREQUEST
+DESCRIPTOR.message_types_by_name['SyncResultsRequest'] = _SYNCRESULTSREQUEST
+DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST
+DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST
+DESCRIPTOR.message_types_by_name['PrepareAndExecuteBatchRequest'] = _PREPAREANDEXECUTEBATCHREQUEST
+DESCRIPTOR.message_types_by_name['UpdateBatch'] = _UPDATEBATCH
+DESCRIPTOR.message_types_by_name['ExecuteBatchRequest'] = _EXECUTEBATCHREQUEST
+
+CatalogsRequest = _reflection.GeneratedProtocolMessageType('CatalogsRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CATALOGSREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:CatalogsRequest)
+  ))
+_sym_db.RegisterMessage(CatalogsRequest)
+
+DatabasePropertyRequest = _reflection.GeneratedProtocolMessageType('DatabasePropertyRequest', (_message.Message,), dict(
+  DESCRIPTOR = _DATABASEPROPERTYREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:DatabasePropertyRequest)
+  ))
+_sym_db.RegisterMessage(DatabasePropertyRequest)
+
+SchemasRequest = _reflection.GeneratedProtocolMessageType('SchemasRequest', (_message.Message,), dict(
+  DESCRIPTOR = _SCHEMASREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:SchemasRequest)
+  ))
+_sym_db.RegisterMessage(SchemasRequest)
+
+TablesRequest = _reflection.GeneratedProtocolMessageType('TablesRequest', (_message.Message,), dict(
+  DESCRIPTOR = _TABLESREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:TablesRequest)
+  ))
+_sym_db.RegisterMessage(TablesRequest)
+
+TableTypesRequest = _reflection.GeneratedProtocolMessageType('TableTypesRequest', (_message.Message,), dict(
+  DESCRIPTOR = _TABLETYPESREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:TableTypesRequest)
+  ))
+_sym_db.RegisterMessage(TableTypesRequest)
+
+ColumnsRequest = _reflection.GeneratedProtocolMessageType('ColumnsRequest', (_message.Message,), dict(
+  DESCRIPTOR = _COLUMNSREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:ColumnsRequest)
+  ))
+_sym_db.RegisterMessage(ColumnsRequest)
+
+TypeInfoRequest = _reflection.GeneratedProtocolMessageType('TypeInfoRequest', (_message.Message,), dict(
+  DESCRIPTOR = _TYPEINFOREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:TypeInfoRequest)
+  ))
+_sym_db.RegisterMessage(TypeInfoRequest)
+
+PrepareAndExecuteRequest = _reflection.GeneratedProtocolMessageType('PrepareAndExecuteRequest', (_message.Message,), dict(
+  DESCRIPTOR = _PREPAREANDEXECUTEREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:PrepareAndExecuteRequest)
+  ))
+_sym_db.RegisterMessage(PrepareAndExecuteRequest)
+
+PrepareRequest = _reflection.GeneratedProtocolMessageType('PrepareRequest', (_message.Message,), dict(
+  DESCRIPTOR = _PREPAREREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:PrepareRequest)
+  ))
+_sym_db.RegisterMessage(PrepareRequest)
+
+FetchRequest = _reflection.GeneratedProtocolMessageType('FetchRequest', (_message.Message,), dict(
+  DESCRIPTOR = _FETCHREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:FetchRequest)
+  ))
+_sym_db.RegisterMessage(FetchRequest)
+
+CreateStatementRequest = _reflection.GeneratedProtocolMessageType('CreateStatementRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CREATESTATEMENTREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:CreateStatementRequest)
+  ))
+_sym_db.RegisterMessage(CreateStatementRequest)
+
+CloseStatementRequest = _reflection.GeneratedProtocolMessageType('CloseStatementRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CLOSESTATEMENTREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:CloseStatementRequest)
+  ))
+_sym_db.RegisterMessage(CloseStatementRequest)
+
+OpenConnectionRequest = _reflection.GeneratedProtocolMessageType('OpenConnectionRequest', (_message.Message,), dict(
+
+  InfoEntry = _reflection.GeneratedProtocolMessageType('InfoEntry', (_message.Message,), dict(
+    DESCRIPTOR = _OPENCONNECTIONREQUEST_INFOENTRY,
+    __module__ = 'requests_pb2'
+    # @@protoc_insertion_point(class_scope:OpenConnectionRequest.InfoEntry)
+    ))
+  ,
+  DESCRIPTOR = _OPENCONNECTIONREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:OpenConnectionRequest)
+  ))
+_sym_db.RegisterMessage(OpenConnectionRequest)
+_sym_db.RegisterMessage(OpenConnectionRequest.InfoEntry)
+
+CloseConnectionRequest = _reflection.GeneratedProtocolMessageType('CloseConnectionRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CLOSECONNECTIONREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:CloseConnectionRequest)
+  ))
+_sym_db.RegisterMessage(CloseConnectionRequest)
+
+ConnectionSyncRequest = _reflection.GeneratedProtocolMessageType('ConnectionSyncRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CONNECTIONSYNCREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:ConnectionSyncRequest)
+  ))
+_sym_db.RegisterMessage(ConnectionSyncRequest)
+
+ExecuteRequest = _reflection.GeneratedProtocolMessageType('ExecuteRequest', (_message.Message,), dict(
+  DESCRIPTOR = _EXECUTEREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:ExecuteRequest)
+  ))
+_sym_db.RegisterMessage(ExecuteRequest)
+
+SyncResultsRequest = _reflection.GeneratedProtocolMessageType('SyncResultsRequest', (_message.Message,), dict(
+  DESCRIPTOR = _SYNCRESULTSREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:SyncResultsRequest)
+  ))
+_sym_db.RegisterMessage(SyncResultsRequest)
+
+CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict(
+  DESCRIPTOR = _COMMITREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:CommitRequest)
+  ))
+_sym_db.RegisterMessage(CommitRequest)
+
+RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict(
+  DESCRIPTOR = _ROLLBACKREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:RollbackRequest)
+  ))
+_sym_db.RegisterMessage(RollbackRequest)
+
+PrepareAndExecuteBatchRequest = _reflection.GeneratedProtocolMessageType('PrepareAndExecuteBatchRequest', (_message.Message,), dict(
+  DESCRIPTOR = _PREPAREANDEXECUTEBATCHREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:PrepareAndExecuteBatchRequest)
+  ))
+_sym_db.RegisterMessage(PrepareAndExecuteBatchRequest)
+
+UpdateBatch = _reflection.GeneratedProtocolMessageType('UpdateBatch', (_message.Message,), dict(
+  DESCRIPTOR = _UPDATEBATCH,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:UpdateBatch)
+  ))
+_sym_db.RegisterMessage(UpdateBatch)
+
+ExecuteBatchRequest = _reflection.GeneratedProtocolMessageType('ExecuteBatchRequest', (_message.Message,), dict(
+  DESCRIPTOR = _EXECUTEBATCHREQUEST,
+  __module__ = 'requests_pb2'
+  # @@protoc_insertion_point(class_scope:ExecuteBatchRequest)
+  ))
+_sym_db.RegisterMessage(ExecuteBatchRequest)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n org.apache.calcite.avatica.proto'))
+_OPENCONNECTIONREQUEST_INFOENTRY.has_options = True
+_OPENCONNECTIONREQUEST_INFOENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/python/phoenixdb/phoenixdb/avatica/proto/responses_pb2.py b/python/phoenixdb/phoenixdb/avatica/proto/responses_pb2.py
new file mode 100644
index 0000000..e0259b9
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/avatica/proto/responses_pb2.py
@@ -0,0 +1,917 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: responses.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import common_pb2 as common__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='responses.proto',
+  package='',
+  syntax='proto3',
+  serialized_pb=_b('\n\x0fresponses.proto\x1a\x0c\x63ommon.proto\"\xc9\x01\n\x11ResultSetResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x15\n\rown_statement\x18\x03 \x01(\x08\x12\x1d\n\tsignature\x18\x04 \x01(\x0b\x32\n.Signature\x12\x1b\n\x0b\x66irst_frame\x18\x05 \x01(\x0b\x32\x06.Frame\x12\x14\n\x0cupdate_count\x18\x06 \x01(\x04\x12\x1e\n\x08metadata\x18\x07 \x01(\x0b\x32\x0c.RpcMetadata\"q\n\x0f\x45xecuteResponse\x12#\n\x07results\x18\x01 \x03(\x0b\x32\x12.ResultSetResponse\x12\x19\n\x11missing_statement\x18\x02 \x01(\x08\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"V\n\x0fPrepareResponse\x12#\n\tstatement\x18\x01 \x01(\x0b\x32\x10.StatementHandle\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"z\n\rFetchResponse\x12\x15\n\x05\x66rame\x18\x01 \x01(\x0b\x32\x06.Frame\x12\x19\n\x11missing_statement\x18\x02 \x01(\x08\x12\x17\n\x0fmissing_results\x18\x03 \x01(\x08\x12\x1e\n\x08metadata\x18\x04 \x01(\x0b\x32\x0c.RpcMetadata\"f\n\x17\x43reateStatementResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"8\n\x16\x43loseStatementResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"8\n\x16OpenConnectionResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"9\n\x17\x43loseConnectionResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"c\n\x16\x43onnectionSyncResponse\x12)\n\nconn_props\x18\x01 \x01(\x0b\x32\x15.ConnectionProperties\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"u\n\x17\x44\x61tabasePropertyElement\x12\x1e\n\x03key\x18\x01 \x01(\x0b\x32\x11.DatabaseProperty\x12\x1a\n\x05value\x18\x02 \x01(\x0b\x32\x0b.TypedValue\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"c\n\x18\x44\x61tabasePropertyResponse\x12\'\n\x05props\x18\x01 \x03(\x0b\x32\x18.DatabasePropertyElement\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"\xb6\x01\n\rErrorResponse\x12\x12\n\nexceptions\x18\x01 \x03(\t\x12\x16\n\x0ehas_exceptions\x18\x07 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x1b\n\x08severity\x18\x03 \x01(\x0e\x32\t.Severity\x12\x12\n\nerror_code\x18\x04 \x01(\r\x12\x11\n\tsql_state\x18\x05 \x01(\t\x12\x1e\n\x08metadata\x18\x06 \x01(\x0b\x32\x0c.RpcMetadata\"f\n\x13SyncResultsResponse\x12\x19\n\x11missing_statement\x18\x01 \x01(\x08\x12\x14\n\x0cmore_results\x18\x02 \x01(\x08\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"%\n\x0bRpcMetadata\x12\x16\n\x0eserver_address\x18\x01 \x01(\t\"\x10\n\x0e\x43ommitResponse\"\x12\n\x10RollbackResponse\"\x95\x01\n\x14\x45xecuteBatchResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x15\n\rupdate_counts\x18\x03 \x03(\x04\x12\x19\n\x11missing_statement\x18\x04 \x01(\x08\x12\x1e\n\x08metadata\x18\x05 \x01(\x0b\x32\x0c.RpcMetadataB\"\n org.apache.calcite.avatica.protob\x06proto3')
+  ,
+  dependencies=[common__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_RESULTSETRESPONSE = _descriptor.Descriptor(
+  name='ResultSetResponse',
+  full_name='ResultSetResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='ResultSetResponse.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='ResultSetResponse.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='own_statement', full_name='ResultSetResponse.own_statement', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='signature', full_name='ResultSetResponse.signature', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='first_frame', full_name='ResultSetResponse.first_frame', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='update_count', full_name='ResultSetResponse.update_count', index=5,
+      number=6, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='ResultSetResponse.metadata', index=6,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=34,
+  serialized_end=235,
+)
+
+
+_EXECUTERESPONSE = _descriptor.Descriptor(
+  name='ExecuteResponse',
+  full_name='ExecuteResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='results', full_name='ExecuteResponse.results', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='missing_statement', full_name='ExecuteResponse.missing_statement', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='ExecuteResponse.metadata', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=237,
+  serialized_end=350,
+)
+
+
+_PREPARERESPONSE = _descriptor.Descriptor(
+  name='PrepareResponse',
+  full_name='PrepareResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='statement', full_name='PrepareResponse.statement', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='PrepareResponse.metadata', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=352,
+  serialized_end=438,
+)
+
+
+_FETCHRESPONSE = _descriptor.Descriptor(
+  name='FetchResponse',
+  full_name='FetchResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='frame', full_name='FetchResponse.frame', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='missing_statement', full_name='FetchResponse.missing_statement', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='missing_results', full_name='FetchResponse.missing_results', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='FetchResponse.metadata', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=440,
+  serialized_end=562,
+)
+
+
+_CREATESTATEMENTRESPONSE = _descriptor.Descriptor(
+  name='CreateStatementResponse',
+  full_name='CreateStatementResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='CreateStatementResponse.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='CreateStatementResponse.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='CreateStatementResponse.metadata', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=564,
+  serialized_end=666,
+)
+
+
+_CLOSESTATEMENTRESPONSE = _descriptor.Descriptor(
+  name='CloseStatementResponse',
+  full_name='CloseStatementResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='CloseStatementResponse.metadata', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=668,
+  serialized_end=724,
+)
+
+
+_OPENCONNECTIONRESPONSE = _descriptor.Descriptor(
+  name='OpenConnectionResponse',
+  full_name='OpenConnectionResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='OpenConnectionResponse.metadata', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=726,
+  serialized_end=782,
+)
+
+
+_CLOSECONNECTIONRESPONSE = _descriptor.Descriptor(
+  name='CloseConnectionResponse',
+  full_name='CloseConnectionResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='CloseConnectionResponse.metadata', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=784,
+  serialized_end=841,
+)
+
+
+_CONNECTIONSYNCRESPONSE = _descriptor.Descriptor(
+  name='ConnectionSyncResponse',
+  full_name='ConnectionSyncResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='conn_props', full_name='ConnectionSyncResponse.conn_props', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='ConnectionSyncResponse.metadata', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=843,
+  serialized_end=942,
+)
+
+
+_DATABASEPROPERTYELEMENT = _descriptor.Descriptor(
+  name='DatabasePropertyElement',
+  full_name='DatabasePropertyElement',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='DatabasePropertyElement.key', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='DatabasePropertyElement.value', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='DatabasePropertyElement.metadata', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=944,
+  serialized_end=1061,
+)
+
+
+_DATABASEPROPERTYRESPONSE = _descriptor.Descriptor(
+  name='DatabasePropertyResponse',
+  full_name='DatabasePropertyResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='props', full_name='DatabasePropertyResponse.props', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='DatabasePropertyResponse.metadata', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1063,
+  serialized_end=1162,
+)
+
+
+_ERRORRESPONSE = _descriptor.Descriptor(
+  name='ErrorResponse',
+  full_name='ErrorResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='exceptions', full_name='ErrorResponse.exceptions', index=0,
+      number=1, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='has_exceptions', full_name='ErrorResponse.has_exceptions', index=1,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='error_message', full_name='ErrorResponse.error_message', index=2,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='severity', full_name='ErrorResponse.severity', index=3,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='error_code', full_name='ErrorResponse.error_code', index=4,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sql_state', full_name='ErrorResponse.sql_state', index=5,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='ErrorResponse.metadata', index=6,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1165,
+  serialized_end=1347,
+)
+
+
+_SYNCRESULTSRESPONSE = _descriptor.Descriptor(
+  name='SyncResultsResponse',
+  full_name='SyncResultsResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='missing_statement', full_name='SyncResultsResponse.missing_statement', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='more_results', full_name='SyncResultsResponse.more_results', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='SyncResultsResponse.metadata', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1349,
+  serialized_end=1451,
+)
+
+
+_RPCMETADATA = _descriptor.Descriptor(
+  name='RpcMetadata',
+  full_name='RpcMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='server_address', full_name='RpcMetadata.server_address', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1453,
+  serialized_end=1490,
+)
+
+
+_COMMITRESPONSE = _descriptor.Descriptor(
+  name='CommitResponse',
+  full_name='CommitResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1492,
+  serialized_end=1508,
+)
+
+
+_ROLLBACKRESPONSE = _descriptor.Descriptor(
+  name='RollbackResponse',
+  full_name='RollbackResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1510,
+  serialized_end=1528,
+)
+
+
+_EXECUTEBATCHRESPONSE = _descriptor.Descriptor(
+  name='ExecuteBatchResponse',
+  full_name='ExecuteBatchResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='ExecuteBatchResponse.connection_id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='statement_id', full_name='ExecuteBatchResponse.statement_id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='update_counts', full_name='ExecuteBatchResponse.update_counts', index=2,
+      number=3, type=4, cpp_type=4, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='missing_statement', full_name='ExecuteBatchResponse.missing_statement', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='ExecuteBatchResponse.metadata', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1531,
+  serialized_end=1680,
+)
+
+_RESULTSETRESPONSE.fields_by_name['signature'].message_type = common__pb2._SIGNATURE
+_RESULTSETRESPONSE.fields_by_name['first_frame'].message_type = common__pb2._FRAME
+_RESULTSETRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_EXECUTERESPONSE.fields_by_name['results'].message_type = _RESULTSETRESPONSE
+_EXECUTERESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_PREPARERESPONSE.fields_by_name['statement'].message_type = common__pb2._STATEMENTHANDLE
+_PREPARERESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_FETCHRESPONSE.fields_by_name['frame'].message_type = common__pb2._FRAME
+_FETCHRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_CREATESTATEMENTRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_CLOSESTATEMENTRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_OPENCONNECTIONRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_CLOSECONNECTIONRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_CONNECTIONSYNCRESPONSE.fields_by_name['conn_props'].message_type = common__pb2._CONNECTIONPROPERTIES
+_CONNECTIONSYNCRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_DATABASEPROPERTYELEMENT.fields_by_name['key'].message_type = common__pb2._DATABASEPROPERTY
+_DATABASEPROPERTYELEMENT.fields_by_name['value'].message_type = common__pb2._TYPEDVALUE
+_DATABASEPROPERTYELEMENT.fields_by_name['metadata'].message_type = _RPCMETADATA
+_DATABASEPROPERTYRESPONSE.fields_by_name['props'].message_type = _DATABASEPROPERTYELEMENT
+_DATABASEPROPERTYRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_ERRORRESPONSE.fields_by_name['severity'].enum_type = common__pb2._SEVERITY
+_ERRORRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_SYNCRESULTSRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+_EXECUTEBATCHRESPONSE.fields_by_name['metadata'].message_type = _RPCMETADATA
+DESCRIPTOR.message_types_by_name['ResultSetResponse'] = _RESULTSETRESPONSE
+DESCRIPTOR.message_types_by_name['ExecuteResponse'] = _EXECUTERESPONSE
+DESCRIPTOR.message_types_by_name['PrepareResponse'] = _PREPARERESPONSE
+DESCRIPTOR.message_types_by_name['FetchResponse'] = _FETCHRESPONSE
+DESCRIPTOR.message_types_by_name['CreateStatementResponse'] = _CREATESTATEMENTRESPONSE
+DESCRIPTOR.message_types_by_name['CloseStatementResponse'] = _CLOSESTATEMENTRESPONSE
+DESCRIPTOR.message_types_by_name['OpenConnectionResponse'] = _OPENCONNECTIONRESPONSE
+DESCRIPTOR.message_types_by_name['CloseConnectionResponse'] = _CLOSECONNECTIONRESPONSE
+DESCRIPTOR.message_types_by_name['ConnectionSyncResponse'] = _CONNECTIONSYNCRESPONSE
+DESCRIPTOR.message_types_by_name['DatabasePropertyElement'] = _DATABASEPROPERTYELEMENT
+DESCRIPTOR.message_types_by_name['DatabasePropertyResponse'] = _DATABASEPROPERTYRESPONSE
+DESCRIPTOR.message_types_by_name['ErrorResponse'] = _ERRORRESPONSE
+DESCRIPTOR.message_types_by_name['SyncResultsResponse'] = _SYNCRESULTSRESPONSE
+DESCRIPTOR.message_types_by_name['RpcMetadata'] = _RPCMETADATA
+DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE
+DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE
+DESCRIPTOR.message_types_by_name['ExecuteBatchResponse'] = _EXECUTEBATCHRESPONSE
+
+ResultSetResponse = _reflection.GeneratedProtocolMessageType('ResultSetResponse', (_message.Message,), dict(
+  DESCRIPTOR = _RESULTSETRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:ResultSetResponse)
+  ))
+_sym_db.RegisterMessage(ResultSetResponse)
+
+ExecuteResponse = _reflection.GeneratedProtocolMessageType('ExecuteResponse', (_message.Message,), dict(
+  DESCRIPTOR = _EXECUTERESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:ExecuteResponse)
+  ))
+_sym_db.RegisterMessage(ExecuteResponse)
+
+PrepareResponse = _reflection.GeneratedProtocolMessageType('PrepareResponse', (_message.Message,), dict(
+  DESCRIPTOR = _PREPARERESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:PrepareResponse)
+  ))
+_sym_db.RegisterMessage(PrepareResponse)
+
+FetchResponse = _reflection.GeneratedProtocolMessageType('FetchResponse', (_message.Message,), dict(
+  DESCRIPTOR = _FETCHRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:FetchResponse)
+  ))
+_sym_db.RegisterMessage(FetchResponse)
+
+CreateStatementResponse = _reflection.GeneratedProtocolMessageType('CreateStatementResponse', (_message.Message,), dict(
+  DESCRIPTOR = _CREATESTATEMENTRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:CreateStatementResponse)
+  ))
+_sym_db.RegisterMessage(CreateStatementResponse)
+
+CloseStatementResponse = _reflection.GeneratedProtocolMessageType('CloseStatementResponse', (_message.Message,), dict(
+  DESCRIPTOR = _CLOSESTATEMENTRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:CloseStatementResponse)
+  ))
+_sym_db.RegisterMessage(CloseStatementResponse)
+
+OpenConnectionResponse = _reflection.GeneratedProtocolMessageType('OpenConnectionResponse', (_message.Message,), dict(
+  DESCRIPTOR = _OPENCONNECTIONRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:OpenConnectionResponse)
+  ))
+_sym_db.RegisterMessage(OpenConnectionResponse)
+
+CloseConnectionResponse = _reflection.GeneratedProtocolMessageType('CloseConnectionResponse', (_message.Message,), dict(
+  DESCRIPTOR = _CLOSECONNECTIONRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:CloseConnectionResponse)
+  ))
+_sym_db.RegisterMessage(CloseConnectionResponse)
+
+ConnectionSyncResponse = _reflection.GeneratedProtocolMessageType('ConnectionSyncResponse', (_message.Message,), dict(
+  DESCRIPTOR = _CONNECTIONSYNCRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:ConnectionSyncResponse)
+  ))
+_sym_db.RegisterMessage(ConnectionSyncResponse)
+
+DatabasePropertyElement = _reflection.GeneratedProtocolMessageType('DatabasePropertyElement', (_message.Message,), dict(
+  DESCRIPTOR = _DATABASEPROPERTYELEMENT,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:DatabasePropertyElement)
+  ))
+_sym_db.RegisterMessage(DatabasePropertyElement)
+
+DatabasePropertyResponse = _reflection.GeneratedProtocolMessageType('DatabasePropertyResponse', (_message.Message,), dict(
+  DESCRIPTOR = _DATABASEPROPERTYRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:DatabasePropertyResponse)
+  ))
+_sym_db.RegisterMessage(DatabasePropertyResponse)
+
+ErrorResponse = _reflection.GeneratedProtocolMessageType('ErrorResponse', (_message.Message,), dict(
+  DESCRIPTOR = _ERRORRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:ErrorResponse)
+  ))
+_sym_db.RegisterMessage(ErrorResponse)
+
+SyncResultsResponse = _reflection.GeneratedProtocolMessageType('SyncResultsResponse', (_message.Message,), dict(
+  DESCRIPTOR = _SYNCRESULTSRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:SyncResultsResponse)
+  ))
+_sym_db.RegisterMessage(SyncResultsResponse)
+
+RpcMetadata = _reflection.GeneratedProtocolMessageType('RpcMetadata', (_message.Message,), dict(
+  DESCRIPTOR = _RPCMETADATA,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:RpcMetadata)
+  ))
+_sym_db.RegisterMessage(RpcMetadata)
+
+CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict(
+  DESCRIPTOR = _COMMITRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:CommitResponse)
+  ))
+_sym_db.RegisterMessage(CommitResponse)
+
+RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict(
+  DESCRIPTOR = _ROLLBACKRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:RollbackResponse)
+  ))
+_sym_db.RegisterMessage(RollbackResponse)
+
+ExecuteBatchResponse = _reflection.GeneratedProtocolMessageType('ExecuteBatchResponse', (_message.Message,), dict(
+  DESCRIPTOR = _EXECUTEBATCHRESPONSE,
+  __module__ = 'responses_pb2'
+  # @@protoc_insertion_point(class_scope:ExecuteBatchResponse)
+  ))
+_sym_db.RegisterMessage(ExecuteBatchResponse)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n org.apache.calcite.avatica.proto'))
+# @@protoc_insertion_point(module_scope)
diff --git a/python/phoenixdb/phoenixdb/connection.py b/python/phoenixdb/phoenixdb/connection.py
new file mode 100644
index 0000000..593a242
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/connection.py
@@ -0,0 +1,187 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import uuid
+import weakref
+from phoenixdb import errors
+from phoenixdb.avatica.client import OPEN_CONNECTION_PROPERTIES
+from phoenixdb.cursor import Cursor
+from phoenixdb.errors import ProgrammingError
+
+__all__ = ['Connection']
+
+logger = logging.getLogger(__name__)
+
+
+class Connection(object):
+    """Database connection.
+
+    You should not construct this object manually, use :func:`~phoenixdb.connect` instead.
+    """
+
+    cursor_factory = None
+    """
+    The default cursor factory used by :meth:`cursor` if the parameter is not specified.
+    """
+
+    def __init__(self, client, cursor_factory=None, **kwargs):
+        self._client = client
+        self._closed = False
+        if cursor_factory is not None:
+            self.cursor_factory = cursor_factory
+        else:
+            self.cursor_factory = Cursor
+        self._cursors = []
+        # Extract properties to pass to OpenConnectionRequest
+        self._connection_args = {}
+        # The rest of the kwargs
+        self._filtered_args = {}
+        for k in kwargs:
+            if k in OPEN_CONNECTION_PROPERTIES:
+                self._connection_args[k] = kwargs[k]
+            else:
+                self._filtered_args[k] = kwargs[k]
+        self.open()
+        self.set_session(**self._filtered_args)
+
+    def __del__(self):
+        if not self._closed:
+            self.close()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        if not self._closed:
+            self.close()
+
+    def open(self):
+        """Opens the connection."""
+        self._id = str(uuid.uuid4())
+        self._client.open_connection(self._id, info=self._connection_args)
+
+    def close(self):
+        """Closes the connection.
+        No further operations are allowed, either on the connection or any
+        of its cursors, once the connection is closed.
+
+        If the connection is used in a ``with`` statement, this method will
+        be automatically called at the end of the ``with`` block.
+        """
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+        for cursor_ref in self._cursors:
+            cursor = cursor_ref()
+            if cursor is not None and not cursor._closed:
+                cursor.close()
+        self._client.close_connection(self._id)
+        self._client.close()
+        self._closed = True
+
+    @property
+    def closed(self):
+        """Read-only attribute specifying if the connection is closed or not."""
+        return self._closed
+
+    def commit(self):
+        """Commits pending database changes.
+
+        Currently, this does nothing, because the RPC does not support
+        transactions. Only defined for DB API 2.0 compatibility.
+        You need to use :attr:`autocommit` mode.
+        """
+        # TODO can support be added for this?
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+
+    def cursor(self, cursor_factory=None):
+        """Creates a new cursor.
+
+        :param cursor_factory:
+            This argument can be used to create non-standard cursors.
+            The class returned must be a subclass of
+            :class:`~phoenixdb.cursor.Cursor` (for example :class:`~phoenixdb.cursor.DictCursor`).
+            A default factory for the connection can also be specified using the
+            :attr:`cursor_factory` attribute.
+
+        :returns:
+            A :class:`~phoenixdb.cursor.Cursor` object.
+        """
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+        cursor = (cursor_factory or self.cursor_factory)(self)
+        self._cursors.append(weakref.ref(cursor, self._cursors.remove))
+        return cursor
+
+    def set_session(self, autocommit=None, readonly=None):
+        """Sets one or more parameters in the current connection.
+
+        :param autocommit:
+            Switch the connection to autocommit mode. With the current
+            version, you need to always enable this, because
+            :meth:`commit` is not implemented.
+
+        :param readonly:
+            Switch the connection to read-only mode.
+        """
+        props = {}
+        if autocommit is not None:
+            props['autoCommit'] = bool(autocommit)
+        if readonly is not None:
+            props['readOnly'] = bool(readonly)
+        props = self._client.connection_sync(self._id, props)
+        self._autocommit = props.auto_commit
+        self._readonly = props.read_only
+        self._transactionisolation = props.transaction_isolation
+
+    @property
+    def autocommit(self):
+        """Read/write attribute for switching the connection's autocommit mode."""
+        return self._autocommit
+
+    @autocommit.setter
+    def autocommit(self, value):
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+        props = self._client.connection_sync(self._id, {'autoCommit': bool(value)})
+        self._autocommit = props.auto_commit
+
+    @property
+    def readonly(self):
+        """Read/write attribute for switching the connection's readonly mode."""
+        return self._readonly
+
+    @readonly.setter
+    def readonly(self, value):
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+        props = self._client.connection_sync(self._id, {'readOnly': bool(value)})
+        self._readonly = props.read_only
+
+    @property
+    def transactionisolation(self):
+        return self._transactionisolation
+
+    @transactionisolation.setter
+    def transactionisolation(self, value):
+        if self._closed:
+            raise ProgrammingError('the connection is already closed')
+        props = self._client.connection_sync(self._id, {'transactionIsolation': bool(value)})
+        self._transactionisolation = props.transaction_isolation
+
+
+for name in errors.__all__:
+    setattr(Connection, name, getattr(errors, name))
diff --git a/python/phoenixdb/phoenixdb/cursor.py b/python/phoenixdb/phoenixdb/cursor.py
new file mode 100644
index 0000000..8be7bed
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/cursor.py
@@ -0,0 +1,347 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import collections
+from phoenixdb.types import TypeHelper
+from phoenixdb.errors import ProgrammingError, InternalError
+from phoenixdb.avatica.proto import common_pb2
+
+__all__ = ['Cursor', 'ColumnDescription', 'DictCursor']
+
+logger = logging.getLogger(__name__)
+
+# TODO see note in Cursor.rowcount()
+MAX_INT = 2 ** 64 - 1
+
+ColumnDescription = collections.namedtuple('ColumnDescription', 'name type_code display_size internal_size precision scale null_ok')
+"""Named tuple for representing results from :attr:`Cursor.description`."""
+
+
+class Cursor(object):
+    """Database cursor for executing queries and iterating over results.
+
+    You should not construct this object manually, use :meth:`Connection.cursor() <phoenixdb.connection.Connection.cursor>` instead.
+    """
+
+    arraysize = 1
+    """
+    Read/write attribute specifying the number of rows to fetch
+    at a time with :meth:`fetchmany`. It defaults to 1 meaning to
+    fetch a single row at a time.
+    """
+
+    itersize = 2000
+    """
+    Read/write attribute specifying the number of rows to fetch
+    from the backend at each network roundtrip during iteration
+    on the cursor. The default is 2000.
+    """
+
+    def __init__(self, connection, id=None):
+        self._connection = connection
+        self._id = id
+        self._signature = None
+        self._column_data_types = []
+        self._frame = None
+        self._pos = None
+        self._closed = False
+        self.arraysize = self.__class__.arraysize
+        self.itersize = self.__class__.itersize
+        self._updatecount = -1
+
+    def __del__(self):
+        if not self._connection._closed and not self._closed:
+            self.close()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        if not self._closed:
+            self.close()
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        row = self.fetchone()
+        if row is None:
+            raise StopIteration
+        return row
+
+    next = __next__
+
+    def close(self):
+        """Closes the cursor.
+        No further operations are allowed once the cursor is closed.
+
+        If the cursor is used in a ``with`` statement, this method will
+        be automatically called at the end of the ``with`` block.
+        """
+        if self._closed:
+            raise ProgrammingError('the cursor is already closed')
+        if self._id is not None:
+            self._connection._client.close_statement(self._connection._id, self._id)
+            self._id = None
+        self._signature = None
+        self._column_data_types = []
+        self._frame = None
+        self._pos = None
+        self._closed = True
+
+    @property
+    def closed(self):
+        """Read-only attribute specifying if the cursor is closed or not."""
+        return self._closed
+
+    @property
+    def description(self):
+        if self._signature is None:
+            return None
+        description = []
+        for column in self._signature.columns:
+            description.append(ColumnDescription(
+                column.column_name,
+                column.type.name,
+                column.display_size,
+                None,
+                column.precision,
+                column.scale,
+                None if column.nullable == 2 else bool(column.nullable),
+            ))
+        return description
+
+    def _set_id(self, id):
+        if self._id is not None and self._id != id:
+            self._connection._client.close_statement(self._connection._id, self._id)
+        self._id = id
+
+    def _set_signature(self, signature):
+        self._signature = signature
+        self._column_data_types = []
+        self._parameter_data_types = []
+        if signature is None:
+            return
+
+        for column in signature.columns:
+            dtype = TypeHelper.from_class(column.column_class_name)
+            self._column_data_types.append(dtype)
+
+        for parameter in signature.parameters:
+            dtype = TypeHelper.from_class(parameter.class_name)
+            self._parameter_data_types.append(dtype)
+
+    def _set_frame(self, frame):
+        self._frame = frame
+        self._pos = None
+
+        if frame is not None:
+            if frame.rows:
+                self._pos = 0
+            elif not frame.done:
+                raise InternalError('got an empty frame, but the statement is not done yet')
+
+    def _fetch_next_frame(self):
+        offset = self._frame.offset + len(self._frame.rows)
+        frame = self._connection._client.fetch(
+            self._connection._id, self._id,
+            offset=offset, frame_max_size=self.itersize)
+        self._set_frame(frame)
+
+    def _process_results(self, results):
+        if results:
+            result = results[0]
+            if result.own_statement:
+                self._set_id(result.statement_id)
+            self._set_signature(result.signature if result.HasField('signature') else None)
+            self._set_frame(result.first_frame if result.HasField('first_frame') else None)
+            self._updatecount = result.update_count
+
+    def _transform_parameters(self, parameters):
+        typed_parameters = []
+        for value, data_type in zip(parameters, self._parameter_data_types):
+            field_name, rep, mutate_to, cast_from = data_type
+            typed_value = common_pb2.TypedValue()
+
+            if value is None:
+                typed_value.null = True
+                typed_value.type = common_pb2.NULL
+            else:
+                typed_value.null = False
+
+                # use the mutator function
+                if mutate_to is not None:
+                    value = mutate_to(value)
+
+                typed_value.type = rep
+                setattr(typed_value, field_name, value)
+
+            typed_parameters.append(typed_value)
+        return typed_parameters
+
+    def execute(self, operation, parameters=None):
+        if self._closed:
+            raise ProgrammingError('the cursor is already closed')
+        self._updatecount = -1
+        self._set_frame(None)
+        if parameters is None:
+            if self._id is None:
+                self._set_id(self._connection._client.create_statement(self._connection._id))
+            results = self._connection._client.prepare_and_execute(
+                self._connection._id, self._id,
+                operation, first_frame_max_size=self.itersize)
+            self._process_results(results)
+        else:
+            statement = self._connection._client.prepare(
+                self._connection._id, operation)
+            self._set_id(statement.id)
+            self._set_signature(statement.signature)
+
+            results = self._connection._client.execute(
+                self._connection._id, self._id,
+                statement.signature, self._transform_parameters(parameters),
+                first_frame_max_size=self.itersize)
+            self._process_results(results)
+
+    def executemany(self, operation, seq_of_parameters):
+        if self._closed:
+            raise ProgrammingError('the cursor is already closed')
+        self._updatecount = -1
+        self._set_frame(None)
+        statement = self._connection._client.prepare(
+            self._connection._id, operation, max_rows_total=0)
+        self._set_id(statement.id)
+        self._set_signature(statement.signature)
+        for parameters in seq_of_parameters:
+            self._connection._client.execute(
+                self._connection._id, self._id,
+                statement.signature, self._transform_parameters(parameters),
+                first_frame_max_size=0)
+
+    def _transform_row(self, row):
+        """Transforms a Row into Python values.
+
+        :param row:
+            A ``common_pb2.Row`` object.
+
+        :returns:
+            A list of values casted into the correct Python types.
+
+        :raises:
+            NotImplementedError
+        """
+        tmp_row = []
+
+        for i, column in enumerate(row.value):
+            if column.has_array_value:
+                raise NotImplementedError('array types are not supported')
+            elif column.scalar_value.null:
+                tmp_row.append(None)
+            else:
+                field_name, rep, mutate_to, cast_from = self._column_data_types[i]
+
+                # get the value from the field_name
+                value = getattr(column.scalar_value, field_name)
+
+                # cast the value
+                if cast_from is not None:
+                    value = cast_from(value)
+
+                tmp_row.append(value)
+        return tmp_row
+
+    def fetchone(self):
+        if self._frame is None:
+            raise ProgrammingError('no select statement was executed')
+        if self._pos is None:
+            return None
+        rows = self._frame.rows
+        row = self._transform_row(rows[self._pos])
+        self._pos += 1
+        if self._pos >= len(rows):
+            self._pos = None
+            if not self._frame.done:
+                self._fetch_next_frame()
+        return row
+
+    def fetchmany(self, size=None):
+        if size is None:
+            size = self.arraysize
+        rows = []
+        while size > 0:
+            row = self.fetchone()
+            if row is None:
+                break
+            rows.append(row)
+            size -= 1
+        return rows
+
+    def fetchall(self):
+        rows = []
+        while True:
+            row = self.fetchone()
+            if row is None:
+                break
+            rows.append(row)
+        return rows
+
+    def setinputsizes(self, sizes):
+        pass
+
+    def setoutputsize(self, size, column=None):
+        pass
+
+    @property
+    def connection(self):
+        """Read-only attribute providing access to the :class:`Connection <phoenixdb.connection.Connection>`
+        object this cursor was created from."""
+        return self._connection
+
+    @property
+    def rowcount(self):
+        """Read-only attribute specifying the number of rows affected by
+        the last executed DML statement or -1 if the number cannot be
+        determined. Note that this will always be set to -1 for select
+        queries."""
+        # TODO instead of -1, this ends up being set to Integer.MAX_VALUE
+        if self._updatecount == MAX_INT:
+            return -1
+        return self._updatecount
+
+    @property
+    def rownumber(self):
+        """Read-only attribute providing the current 0-based index of the
+        cursor in the result set or ``None`` if the index cannot be
+        determined.
+
+        The index can be seen as index of the cursor in a sequence
+        (the result set). The next fetch operation will fetch the
+        row indexed by :attr:`rownumber` in that sequence.
+        """
+        if self._frame is not None and self._pos is not None:
+            return self._frame.offset + self._pos
+        return self._pos
+
+
+class DictCursor(Cursor):
+    """A cursor which returns results as a dictionary"""
+
+    def _transform_row(self, row):
+        row = super(DictCursor, self)._transform_row(row)
+        d = {}
+        for ind, val in enumerate(row):
+            d[self._signature.columns[ind].column_name] = val
+        return d
diff --git a/python/phoenixdb/phoenixdb/errors.py b/python/phoenixdb/phoenixdb/errors.py
new file mode 100644
index 0000000..a046c0d
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/errors.py
@@ -0,0 +1,93 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__all__ = [
+    'Warning', 'Error', 'InterfaceError', 'DatabaseError', 'DataError',
+    'OperationalError', 'IntegrityError', 'InternalError',
+    'ProgrammingError', 'NotSupportedError',
+]
+
+try:
+    _StandardError = StandardError
+except NameError:
+    _StandardError = Exception
+
+
+class Warning(_StandardError):
+    """Not used by this package, only defined for compatibility
+    with DB API 2.0."""
+
+
+class Error(_StandardError):
+    """Exception that is the base class of all other error exceptions.
+    You can use this to catch all errors with one single except statement."""
+
+    def __init__(self, message, code=None, sqlstate=None, cause=None):
+        super(_StandardError, self).__init__(message, code, sqlstate, cause)
+
+    @property
+    def message(self):
+        return self.args[0]
+
+    @property
+    def code(self):
+        return self.args[1]
+
+    @property
+    def sqlstate(self):
+        return self.args[2]
+
+    @property
+    def cause(self):
+        return self.args[3]
+
+
+class InterfaceError(Error):
+    """Exception raised for errors that are related to the database
+    interface rather than the database itself."""
+
+
+class DatabaseError(Error):
+    """Exception raised for errors that are related to the database."""
+
+
+class DataError(DatabaseError):
+    """Exception raised for errors that are due to problems with the
+    processed data like division by zero, numeric value out of range,
+    etc."""
+
+
+class OperationalError(DatabaseError):
+    """Raised for errors that are related to the database's operation and not
+    necessarily under the control of the programmer, e.g. an unexpected
+    disconnect occurs, the data source name is not found, a transaction could
+    not be processed, a memory allocation error occurred during
+    processing, etc."""
+
+
+class IntegrityError(DatabaseError):
+    """Raised when the relational integrity of the database is affected, e.g. a foreign key check fails."""
+
+
+class InternalError(DatabaseError):
+    """Raised when the database encounters an internal problem."""
+
+
+class ProgrammingError(DatabaseError):
+    """Raises for programming errors, e.g. table not found, syntax error, etc."""
+
+
+class NotSupportedError(DatabaseError):
+    """Raised when using an API that is not supported by the database."""
diff --git a/python/phoenixdb/phoenixdb/tests/__init__.py b/python/phoenixdb/phoenixdb/tests/__init__.py
new file mode 100644
index 0000000..ec9a79b
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/__init__.py
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import unittest
+import phoenixdb
+
+TEST_DB_URL = os.environ.get('PHOENIXDB_TEST_DB_URL')
+
+
+@unittest.skipIf(TEST_DB_URL is None, "these tests require the PHOENIXDB_TEST_DB_URL environment variable set to a clean database")
+class DatabaseTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.conn = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        self.cleanup_tables = []
+
+    def tearDown(self):
+        self.doCleanups()
+        self.conn.close()
+
+    def addTableCleanup(self, name):
+        def dropTable():
+            with self.conn.cursor() as cursor:
+                cursor.execute("DROP TABLE IF EXISTS {}".format(name))
+        self.addCleanup(dropTable)
+
+    def createTable(self, name, columns):
+        with self.conn.cursor() as cursor:
+            cursor.execute("DROP TABLE IF EXISTS {}".format(name))
+            cursor.execute("CREATE TABLE {} ({})".format(name, columns))
+            self.addTableCleanup(name)
diff --git a/python/phoenixdb/phoenixdb/tests/dbapi20.py b/python/phoenixdb/phoenixdb/tests/dbapi20.py
new file mode 100644
index 0000000..f176400
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/dbapi20.py
@@ -0,0 +1,857 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+''' Python DB API 2.0 driver compliance unit test suite. 
+    
+    This software is Public Domain and may be used without restrictions.
+
+ "Now we have booze and barflies entering the discussion, plus rumours of
+  DBAs on drugs... and I won't tell you what flashes through my mind each
+  time I read the subject line with 'Anal Compliance' in it.  All around
+  this is turning out to be a thoroughly unwholesome unit test."
+
+    -- Ian Bicking
+'''
+
+__version__ = '1.14.3'
+
+import unittest
+import time
+import sys
+
+if sys.version[0] >= '3': #python 3.x
+    _BaseException = Exception
+    def _failUnless(self, expr, msg=None):
+        self.assertTrue(expr, msg)
+else:                   #python 2.x
+    from exceptions import StandardError as _BaseException
+    def _failUnless(self, expr, msg=None):
+        self.failUnless(expr, msg)  ## deprecated since Python 2.6
+
+def str2bytes(sval):
+    if sys.version_info < (3,0) and isinstance(sval, str):
+        sval = sval.decode("latin1")
+    return sval.encode("latin1") #python 3 make unicode into bytes
+
+class DatabaseAPI20Test(unittest.TestCase):
+    ''' Test a database self.driver for DB API 2.0 compatibility.
+        This implementation tests Gadfly, but the TestCase
+        is structured so that other self.drivers can subclass this 
+        test case to ensure compiliance with the DB-API. It is 
+        expected that this TestCase may be expanded in the future
+        if ambiguities or edge conditions are discovered.
+
+        The 'Optional Extensions' are not yet being tested.
+
+        self.drivers should subclass this test, overriding setUp, tearDown,
+        self.driver, connect_args and connect_kw_args. Class specification
+        should be as follows:
+
+        import dbapi20 
+        class mytest(dbapi20.DatabaseAPI20Test):
+           [...] 
+
+        Don't 'import DatabaseAPI20Test from dbapi20', or you will
+        confuse the unit tester - just 'import dbapi20'.
+    '''
+
+    # The self.driver module. This should be the module where the 'connect'
+    # method is to be found
+    driver = None
+    connect_args = () # List of arguments to pass to connect
+    connect_kw_args = {} # Keyword arguments for connect
+    table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
+
+    ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
+    ddl2 = 'create table %sbarflys (name varchar(20), drink varchar(30))' % table_prefix
+    xddl1 = 'drop table %sbooze' % table_prefix
+    xddl2 = 'drop table %sbarflys' % table_prefix
+    insert = 'insert'
+
+    lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
+        
+    # Some drivers may need to override these helpers, for example adding
+    # a 'commit' after the execute.
+    def executeDDL1(self,cursor):
+        cursor.execute(self.ddl1)
+
+    def executeDDL2(self,cursor):
+        cursor.execute(self.ddl2)
+
+    def setUp(self):
+        ''' self.drivers should override this method to perform required setup
+            if any is necessary, such as creating the database.
+        '''
+        pass
+
+    def tearDown(self):
+        ''' self.drivers should override this method to perform required cleanup
+            if any is necessary, such as deleting the test database.
+            The default drops the tables that may be created.
+        '''
+        try:
+            con = self._connect()
+            try:
+                cur = con.cursor()
+                for ddl in (self.xddl1,self.xddl2):
+                    try:
+                        cur.execute(ddl)
+                        con.commit()
+                    except self.driver.Error:
+                        # Assume table didn't exist. Other tests will check if
+                        # execute is busted.
+                        pass
+            finally:
+                con.close()
+        except _BaseException:
+            pass
+
+    def _connect(self):
+        try:
+             r = self.driver.connect(
+                *self.connect_args,**self.connect_kw_args
+                )
+        except AttributeError:
+            self.fail("No connect method found in self.driver module")
+        return r
+
+    def test_connect(self):
+        con = self._connect()
+        con.close()
+
+    def test_apilevel(self):
+        try:
+            # Must exist
+            apilevel = self.driver.apilevel
+            # Must equal 2.0
+            self.assertEqual(apilevel,'2.0')
+        except AttributeError:
+            self.fail("Driver doesn't define apilevel")
+
+    def test_threadsafety(self):
+        try:
+            # Must exist
+            threadsafety = self.driver.threadsafety
+            # Must be a valid value
+            _failUnless(self, threadsafety in (0,1,2,3))
+        except AttributeError:
+            self.fail("Driver doesn't define threadsafety")
+
+    def test_paramstyle(self):
+        try:
+            # Must exist
+            paramstyle = self.driver.paramstyle
+            # Must be a valid value
+            _failUnless(self, paramstyle in (
+                'qmark','numeric','named','format','pyformat'
+                ))
+        except AttributeError:
+            self.fail("Driver doesn't define paramstyle")
+
+    def test_Exceptions(self):
+        # Make sure required exceptions exist, and are in the
+        # defined heirarchy.
+        if sys.version[0] == '3': #under Python 3 StardardError no longer exists
+            self.assertTrue(issubclass(self.driver.Warning,Exception))
+            self.assertTrue(issubclass(self.driver.Error,Exception))
+        else:
+            self.failUnless(issubclass(self.driver.Warning,StandardError))
+            self.failUnless(issubclass(self.driver.Error,StandardError))
+
+        _failUnless(self,
+            issubclass(self.driver.InterfaceError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.DatabaseError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.OperationalError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.IntegrityError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.InternalError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.ProgrammingError,self.driver.Error)
+            )
+        _failUnless(self,
+            issubclass(self.driver.NotSupportedError,self.driver.Error)
+            )
+
+    def test_ExceptionsAsConnectionAttributes(self):
+        # OPTIONAL EXTENSION
+        # Test for the optional DB API 2.0 extension, where the exceptions
+        # are exposed as attributes on the Connection object
+        # I figure this optional extension will be implemented by any
+        # driver author who is using this test suite, so it is enabled
+        # by default.
+        con = self._connect()
+        drv = self.driver
+        _failUnless(self,con.Warning is drv.Warning)
+        _failUnless(self,con.Error is drv.Error)
+        _failUnless(self,con.InterfaceError is drv.InterfaceError)
+        _failUnless(self,con.DatabaseError is drv.DatabaseError)
+        _failUnless(self,con.OperationalError is drv.OperationalError)
+        _failUnless(self,con.IntegrityError is drv.IntegrityError)
+        _failUnless(self,con.InternalError is drv.InternalError)
+        _failUnless(self,con.ProgrammingError is drv.ProgrammingError)
+        _failUnless(self,con.NotSupportedError is drv.NotSupportedError)
+
+
+    def test_commit(self):
+        con = self._connect()
+        try:
+            # Commit must work, even if it doesn't do anything
+            con.commit()
+        finally:
+            con.close()
+
+    def test_rollback(self):
+        con = self._connect()
+        # If rollback is defined, it should either work or throw
+        # the documented exception
+        if hasattr(con,'rollback'):
+            try:
+                con.rollback()
+            except self.driver.NotSupportedError:
+                pass
+    
+    def test_cursor(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+        finally:
+            con.close()
+
+    def test_cursor_isolation(self):
+        con = self._connect()
+        try:
+            # Make sure cursors created from the same connection have
+            # the documented transaction isolation level
+            cur1 = con.cursor()
+            cur2 = con.cursor()
+            self.executeDDL1(cur1)
+            cur1.execute("%s into %sbooze values ('Victoria Bitter')" % (
+                self.insert, self.table_prefix
+                ))
+            cur2.execute("select name from %sbooze" % self.table_prefix)
+            booze = cur2.fetchall()
+            self.assertEqual(len(booze),1)
+            self.assertEqual(len(booze[0]),1)
+            self.assertEqual(booze[0][0],'Victoria Bitter')
+        finally:
+            con.close()
+
+    def test_description(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL1(cur)
+            self.assertEqual(cur.description,None,
+                'cursor.description should be none after executing a '
+                'statement that can return no rows (such as DDL)'
+                )
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            self.assertEqual(len(cur.description),1,
+                'cursor.description describes too many columns'
+                )
+            self.assertEqual(len(cur.description[0]),7,
+                'cursor.description[x] tuples must have 7 elements'
+                )
+            self.assertEqual(cur.description[0][0].lower(),'name',
+                'cursor.description[x][0] must return column name'
+                )
+            self.assertEqual(cur.description[0][1],self.driver.STRING,
+                'cursor.description[x][1] must return column type. Got %r'
+                    % cur.description[0][1]
+                )
+
+            # Make sure self.description gets reset
+            self.executeDDL2(cur)
+            self.assertEqual(cur.description,None,
+                'cursor.description not being set to None when executing '
+                'no-result statements (eg. DDL)'
+                )
+        finally:
+            con.close()
+
+    def test_rowcount(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL1(cur)
+            _failUnless(self,cur.rowcount in (-1,0),   # Bug #543885
+                'cursor.rowcount should be -1 or 0 after executing no-result '
+                'statements'
+                )
+            cur.execute("%s into %sbooze values ('Victoria Bitter')" % (
+                self.insert, self.table_prefix
+                ))
+            _failUnless(self,cur.rowcount in (-1,1),
+                'cursor.rowcount should == number or rows inserted, or '
+                'set to -1 after executing an insert statement'
+                )
+            cur.execute("select name from %sbooze" % self.table_prefix)
+            _failUnless(self,cur.rowcount in (-1,1),
+                'cursor.rowcount should == number of rows returned, or '
+                'set to -1 after executing a select statement'
+                )
+            self.executeDDL2(cur)
+            _failUnless(self,cur.rowcount in (-1,0),   # Bug #543885
+                'cursor.rowcount should be -1 or 0 after executing no-result '
+                'statements'
+                )
+        finally:
+            con.close()
+
+    lower_func = 'lower'
+    def test_callproc(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            if self.lower_func and hasattr(cur,'callproc'):
+                r = cur.callproc(self.lower_func,('FOO',))
+                self.assertEqual(len(r),1)
+                self.assertEqual(r[0],'FOO')
+                r = cur.fetchall()
+                self.assertEqual(len(r),1,'callproc produced no result set')
+                self.assertEqual(len(r[0]),1,
+                    'callproc produced invalid result set'
+                    )
+                self.assertEqual(r[0][0],'foo',
+                    'callproc produced invalid results'
+                    )
+        finally:
+            con.close()
+
+    def test_close(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+        finally:
+            con.close()
+
+        # cursor.execute should raise an Error if called after connection
+        # closed
+        self.assertRaises(self.driver.Error,self.executeDDL1,cur)
+
+        # connection.commit should raise an Error if called after connection'
+        # closed.'
+        self.assertRaises(self.driver.Error,con.commit)
+
+    def test_non_idempotent_close(self):
+        con = self._connect()
+        con.close()
+        # connection.close should raise an Error if called more than once
+        #!!! reasonable persons differ about the usefulness of this test and this feature !!!
+        self.assertRaises(self.driver.Error,con.close)
+
+    def test_execute(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self._paraminsert(cur)
+        finally:
+            con.close()
+
+    def _paraminsert(self,cur):
+        self.executeDDL2(cur)
+        cur.execute("%s into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" % (
+            self.insert, self.table_prefix
+            ))
+        _failUnless(self,cur.rowcount in (-1,1))
+
+        if self.driver.paramstyle == 'qmark':
+            cur.execute(
+                "%s into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
+                ("Cooper's",)
+                )
+        elif self.driver.paramstyle == 'numeric':
+            cur.execute(
+                "%s into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
+                ("Cooper's",)
+                )
+        elif self.driver.paramstyle == 'named':
+            cur.execute(
+                "%s into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
+                {'beer':"Cooper's"}
+                )
+        elif self.driver.paramstyle == 'format':
+            cur.execute(
+                "%s into %sbarflys values (%%s, 'thi%%%%s :may ca%%%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
+                ("Cooper's",)
+                )
+        elif self.driver.paramstyle == 'pyformat':
+            cur.execute(
+                "%s into %sbarflys values (%%(beer)s, 'thi%%%%s :may ca%%%%(u)se? troub:1e')" % (self.insert, self.table_prefix),
+                {'beer':"Cooper's"}
+                )
+        else:
+            self.fail('Invalid paramstyle')
+        _failUnless(self,cur.rowcount in (-1,1))
+
+        cur.execute('select name, drink from %sbarflys' % self.table_prefix)
+        res = cur.fetchall()
+        self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
+        beers = [res[0][0],res[1][0]]
+        beers.sort()
+        self.assertEqual(beers[0],"Cooper's",
+            'cursor.fetchall retrieved incorrect data, or data inserted '
+            'incorrectly'
+            )
+        self.assertEqual(beers[1],"Victoria Bitter",
+            'cursor.fetchall retrieved incorrect data, or data inserted '
+            'incorrectly'
+            )
+        trouble = "thi%s :may ca%(u)se? troub:1e"
+        self.assertEqual(res[0][1], trouble,
+            'cursor.fetchall retrieved incorrect data, or data inserted '
+            'incorrectly. Got=%s, Expected=%s' % (repr(res[0][1]), repr(trouble)))      
+        self.assertEqual(res[1][1], trouble,
+            'cursor.fetchall retrieved incorrect data, or data inserted '
+            'incorrectly. Got=%s, Expected=%s' % (repr(res[1][1]), repr(trouble)
+            ))
+        
+    def test_executemany(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL1(cur)
+            largs = [ ("Cooper's",) , ("Boag's",) ]
+            margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
+            if self.driver.paramstyle == 'qmark':
+                cur.executemany(
+                    '%s into %sbooze values (?)' % (self.insert, self.table_prefix),
+                    largs
+                    )
+            elif self.driver.paramstyle == 'numeric':
+                cur.executemany(
+                    '%s into %sbooze values (:1)' % (self.insert, self.table_prefix),
+                    largs
+                    )
+            elif self.driver.paramstyle == 'named':
+                cur.executemany(
+                    '%s into %sbooze values (:beer)' % (self.insert, self.table_prefix),
+                    margs
+                    )
+            elif self.driver.paramstyle == 'format':
+                cur.executemany(
+                    '%s into %sbooze values (%%s)' % (self.insert, self.table_prefix),
+                    largs
+                    )
+            elif self.driver.paramstyle == 'pyformat':
+                cur.executemany(
+                    '%s into %sbooze values (%%(beer)s)' % (
+                        self.insert, self.table_prefix
+                        ),
+                    margs
+                    )
+            else:
+                self.fail('Unknown paramstyle')
+            _failUnless(self,cur.rowcount in (-1,2),
+                'insert using cursor.executemany set cursor.rowcount to '
+                'incorrect value %r' % cur.rowcount
+                )
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            res = cur.fetchall()
+            self.assertEqual(len(res),2,
+                'cursor.fetchall retrieved incorrect number of rows'
+                )
+            beers = [res[0][0],res[1][0]]
+            beers.sort()
+            self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
+            self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
+        finally:
+            con.close()
+
+    def test_fetchone(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+
+            # cursor.fetchone should raise an Error if called before
+            # executing a select-type query
+            self.assertRaises(self.driver.Error,cur.fetchone)
+
+            # cursor.fetchone should raise an Error if called after
+            # executing a query that cannnot return rows
+            self.executeDDL1(cur)
+            self.assertRaises(self.driver.Error,cur.fetchone)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            self.assertEqual(cur.fetchone(),None,
+                'cursor.fetchone should return None if a query retrieves '
+                'no rows'
+                )
+            _failUnless(self,cur.rowcount in (-1,0))
+
+            # cursor.fetchone should raise an Error if called after
+            # executing a query that cannnot return rows
+            cur.execute("%s into %sbooze values ('Victoria Bitter')" % (
+                self.insert, self.table_prefix
+                ))
+            self.assertRaises(self.driver.Error,cur.fetchone)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            r = cur.fetchone()
+            self.assertEqual(len(r),1,
+                'cursor.fetchone should have retrieved a single row'
+                )
+            self.assertEqual(r[0],'Victoria Bitter',
+                'cursor.fetchone retrieved incorrect data'
+                )
+            self.assertEqual(cur.fetchone(),None,
+                'cursor.fetchone should return None if no more rows available'
+                )
+            _failUnless(self,cur.rowcount in (-1,1))
+        finally:
+            con.close()
+
+    samples = [
+        'Carlton Cold',
+        'Carlton Draft',
+        'Mountain Goat',
+        'Redback',
+        'Victoria Bitter',
+        'XXXX'
+        ]
+
+    def _populate(self):
+        ''' Return a list of sql commands to setup the DB for the fetch
+            tests.
+        '''
+        populate = [
+            "%s into %sbooze values ('%s')" % (self.insert, self.table_prefix, s)
+                for s in self.samples
+            ]
+        return populate
+
+    def test_fetchmany(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+
+            # cursor.fetchmany should raise an Error if called without
+            #issuing a query
+            self.assertRaises(self.driver.Error,cur.fetchmany,4)
+
+            self.executeDDL1(cur)
+            for sql in self._populate():
+                cur.execute(sql)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            r = cur.fetchmany()
+            self.assertEqual(len(r),1,
+                'cursor.fetchmany retrieved incorrect number of rows, '
+                'default of arraysize is one.'
+                )
+            cur.arraysize=10
+            r = cur.fetchmany(3) # Should get 3 rows
+            self.assertEqual(len(r),3,
+                'cursor.fetchmany retrieved incorrect number of rows'
+                )
+            r = cur.fetchmany(4) # Should get 2 more
+            self.assertEqual(len(r),2,
+                'cursor.fetchmany retrieved incorrect number of rows'
+                )
+            r = cur.fetchmany(4) # Should be an empty sequence
+            self.assertEqual(len(r),0,
+                'cursor.fetchmany should return an empty sequence after '
+                'results are exhausted'
+            )
+            _failUnless(self,cur.rowcount in (-1,6))
+
+            # Same as above, using cursor.arraysize
+            cur.arraysize=4
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            r = cur.fetchmany() # Should get 4 rows
+            self.assertEqual(len(r),4,
+                'cursor.arraysize not being honoured by fetchmany'
+                )
+            r = cur.fetchmany() # Should get 2 more
+            self.assertEqual(len(r),2)
+            r = cur.fetchmany() # Should be an empty sequence
+            self.assertEqual(len(r),0)
+            _failUnless(self,cur.rowcount in (-1,6))
+
+            cur.arraysize=6
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            rows = cur.fetchmany() # Should get all rows
+            _failUnless(self,cur.rowcount in (-1,6))
+            self.assertEqual(len(rows),6)
+            self.assertEqual(len(rows),6)
+            rows = [r[0] for r in rows]
+            rows.sort()
+          
+            # Make sure we get the right data back out
+            for i in range(0,6):
+                self.assertEqual(rows[i],self.samples[i],
+                    'incorrect data retrieved by cursor.fetchmany'
+                    )
+
+            rows = cur.fetchmany() # Should return an empty list
+            self.assertEqual(len(rows),0,
+                'cursor.fetchmany should return an empty sequence if '
+                'called after the whole result set has been fetched'
+                )
+            _failUnless(self,cur.rowcount in (-1,6))
+
+            self.executeDDL2(cur)
+            cur.execute('select name from %sbarflys' % self.table_prefix)
+            r = cur.fetchmany() # Should get empty sequence
+            self.assertEqual(len(r),0,
+                'cursor.fetchmany should return an empty sequence if '
+                'query retrieved no rows'
+                )
+            _failUnless(self,cur.rowcount in (-1,0))
+
+        finally:
+            con.close()
+
+    def test_fetchall(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            # cursor.fetchall should raise an Error if called
+            # without executing a query that may return rows (such
+            # as a select)
+            self.assertRaises(self.driver.Error, cur.fetchall)
+
+            self.executeDDL1(cur)
+            for sql in self._populate():
+                cur.execute(sql)
+
+            # cursor.fetchall should raise an Error if called
+            # after executing a a statement that cannot return rows
+            self.assertRaises(self.driver.Error,cur.fetchall)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            rows = cur.fetchall()
+            _failUnless(self,cur.rowcount in (-1,len(self.samples)))
+            self.assertEqual(len(rows),len(self.samples),
+                'cursor.fetchall did not retrieve all rows'
+                )
+            rows = [r[0] for r in rows]
+            rows.sort()
+            for i in range(0,len(self.samples)):
+                self.assertEqual(rows[i],self.samples[i],
+                'cursor.fetchall retrieved incorrect rows'
+                )
+            rows = cur.fetchall()
+            self.assertEqual(
+                len(rows),0,
+                'cursor.fetchall should return an empty list if called '
+                'after the whole result set has been fetched'
+                )
+            _failUnless(self,cur.rowcount in (-1,len(self.samples)))
+
+            self.executeDDL2(cur)
+            cur.execute('select name from %sbarflys' % self.table_prefix)
+            rows = cur.fetchall()
+            _failUnless(self,cur.rowcount in (-1,0))
+            self.assertEqual(len(rows),0,
+                'cursor.fetchall should return an empty list if '
+                'a select query returns no rows'
+                )
+            
+        finally:
+            con.close()
+    
+    def test_mixedfetch(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL1(cur)
+            for sql in self._populate():
+                cur.execute(sql)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            rows1  = cur.fetchone()
+            rows23 = cur.fetchmany(2)
+            rows4  = cur.fetchone()
+            rows56 = cur.fetchall()
+            _failUnless(self,cur.rowcount in (-1,6))
+            self.assertEqual(len(rows23),2,
+                'fetchmany returned incorrect number of rows'
+                )
+            self.assertEqual(len(rows56),2,
+                'fetchall returned incorrect number of rows'
+                )
+
+            rows = [rows1[0]]
+            rows.extend([rows23[0][0],rows23[1][0]])
+            rows.append(rows4[0])
+            rows.extend([rows56[0][0],rows56[1][0]])
+            rows.sort()
+            for i in range(0,len(self.samples)):
+                self.assertEqual(rows[i],self.samples[i],
+                    'incorrect data retrieved or inserted'
+                    )
+        finally:
+            con.close()
+
+    def help_nextset_setUp(self,cur):
+        ''' Should create a procedure called deleteme
+            that returns two result sets, first the 
+	    number of rows in booze then "name from booze"
+        '''
+        raise NotImplementedError('Helper not implemented')
+        #sql="""
+        #    create procedure deleteme as
+        #    begin
+        #        select count(*) from booze
+        #        select name from booze
+        #    end
+        #"""
+        #cur.execute(sql)
+
+    def help_nextset_tearDown(self,cur):
+        'If cleaning up is needed after nextSetTest'
+        raise NotImplementedError('Helper not implemented')
+        #cur.execute("drop procedure deleteme")
+
+    def test_nextset(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            if not hasattr(cur,'nextset'):
+                return
+
+            try:
+                self.executeDDL1(cur)
+                sql=self._populate()
+                for sql in self._populate():
+                    cur.execute(sql)
+
+                self.help_nextset_setUp(cur)
+
+                cur.callproc('deleteme')
+                numberofrows=cur.fetchone()
+                assert numberofrows[0]== len(self.samples)
+                assert cur.nextset()
+                names=cur.fetchall()
+                assert len(names) == len(self.samples)
+                s=cur.nextset()
+                assert s == None,'No more return sets, should return None'
+            finally:
+                self.help_nextset_tearDown(cur)
+
+        finally:
+            con.close()
+
+    def test_nextset(self):
+        raise NotImplementedError('Drivers need to override this test')
+
+    def test_arraysize(self):
+        # Not much here - rest of the tests for this are in test_fetchmany
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            _failUnless(self,hasattr(cur,'arraysize'),
+                'cursor.arraysize must be defined'
+                )
+        finally:
+            con.close()
+
+    def test_setinputsizes(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            cur.setinputsizes( (25,) )
+            self._paraminsert(cur) # Make sure cursor still works
+        finally:
+            con.close()
+
+    def test_setoutputsize_basic(self):
+        # Basic test is to make sure setoutputsize doesn't blow up
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            cur.setoutputsize(1000)
+            cur.setoutputsize(2000,0)
+            self._paraminsert(cur) # Make sure the cursor still works
+        finally:
+            con.close()
+
+    def test_setoutputsize(self):
+        # Real test for setoutputsize is driver dependant
+        raise NotImplementedError('Driver needed to override this test')
+
+    def test_None(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL1(cur)
+            cur.execute("%s into %sbarflys values ('a', NULL)" % (self.insert, self.table_prefix))
+            cur.execute('select drink from %sbarflys' % self.table_prefix)
+            r = cur.fetchall()
+            self.assertEqual(len(r),1)
+            self.assertEqual(len(r[0]),1)
+            self.assertEqual(r[0][0],None,'NULL value not returned as None')
+        finally:
+            con.close()
+
+    def test_Date(self):
+        d1 = self.driver.Date(2002,12,25)
+        d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
+        # Can we assume this? API doesn't specify, but it seems implied
+        # self.assertEqual(str(d1),str(d2))
+
+    def test_Time(self):
+        t1 = self.driver.Time(13,45,30)
+        t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
+        # Can we assume this? API doesn't specify, but it seems implied
+        # self.assertEqual(str(t1),str(t2))
+
+    def test_Timestamp(self):
+        t1 = self.driver.Timestamp(2002,12,25,13,45,30)
+        t2 = self.driver.TimestampFromTicks(
+            time.mktime((2002,12,25,13,45,30,0,0,0))
+            )
+        # Can we assume this? API doesn't specify, but it seems implied
+        # self.assertEqual(str(t1),str(t2))
+
+    def test_Binary(self):
+        b = self.driver.Binary(str2bytes('Something'))
+        b = self.driver.Binary(str2bytes(''))
+
+    def test_STRING(self):
+        _failUnless(self, hasattr(self.driver,'STRING'),
+            'module.STRING must be defined'
+            )
+
+    def test_BINARY(self):
+        _failUnless(self, hasattr(self.driver,'BINARY'),
+            'module.BINARY must be defined.'
+            )
+
+    def test_NUMBER(self):
+        _failUnless(self, hasattr(self.driver,'NUMBER'),
+            'module.NUMBER must be defined.'
+            )
+
+    def test_DATETIME(self):
+        _failUnless(self, hasattr(self.driver,'DATETIME'),
+            'module.DATETIME must be defined.'
+            )
+
+    def test_ROWID(self):
+        _failUnless(self, hasattr(self.driver,'ROWID'),
+            'module.ROWID must be defined.'
+            )
diff --git a/python/phoenixdb/phoenixdb/tests/test_avatica.py b/python/phoenixdb/phoenixdb/tests/test_avatica.py
new file mode 100644
index 0000000..6152814
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_avatica.py
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from phoenixdb.avatica.client import parse_url, urlparse
+
+
+class ParseUrlTest(unittest.TestCase):
+
+    def test_parse_url(self):
+        self.assertEqual(urlparse.urlparse('http://localhost:8765/'), parse_url('localhost'))
+        self.assertEqual(urlparse.urlparse('http://localhost:2222/'), parse_url('localhost:2222'))
+        self.assertEqual(urlparse.urlparse('http://localhost:2222/'), parse_url('http://localhost:2222/'))
diff --git a/python/phoenixdb/phoenixdb/tests/test_connection.py b/python/phoenixdb/phoenixdb/tests/test_connection.py
new file mode 100644
index 0000000..2deacf5
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_connection.py
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import phoenixdb
+from phoenixdb.tests import TEST_DB_URL
+
+
+@unittest.skipIf(TEST_DB_URL is None, "these tests require the PHOENIXDB_TEST_DB_URL environment variable set to a clean database")
+class PhoenixConnectionTest(unittest.TestCase):
+
+    def _connect(self, connect_kw_args):
+        try:
+            r = phoenixdb.connect(TEST_DB_URL, **connect_kw_args)
+        except AttributeError:
+            self.fail("Failed to connect")
+        return r
+
+    def test_connection_credentials(self):
+        connect_kw_args = {'user': 'SCOTT', 'password': 'TIGER', 'readonly': 'True'}
+        con = self._connect(connect_kw_args)
+        try:
+            self.assertEqual(
+                con._connection_args, {'user': 'SCOTT', 'password': 'TIGER'},
+                'Should have extract user and password')
+            self.assertEqual(
+                con._filtered_args, {'readonly': 'True'},
+                'Should have not extracted foo')
+        finally:
+            con.close()
diff --git a/python/phoenixdb/phoenixdb/tests/test_db.py b/python/phoenixdb/phoenixdb/tests/test_db.py
new file mode 100644
index 0000000..2fb1a2a
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_db.py
@@ -0,0 +1,99 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import phoenixdb
+import phoenixdb.cursor
+from phoenixdb.errors import InternalError
+from phoenixdb.tests import TEST_DB_URL
+
+
+@unittest.skipIf(TEST_DB_URL is None, "these tests require the PHOENIXDB_TEST_DB_URL environment variable set to a clean database")
+class PhoenixDatabaseTest(unittest.TestCase):
+
+    def test_select_literal(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        self.addCleanup(db.close)
+
+        with db.cursor() as cursor:
+            cursor.execute("DROP TABLE IF EXISTS test")
+            cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY, text VARCHAR)")
+            cursor.executemany("UPSERT INTO test VALUES (?, ?)", [[i, 'text {}'.format(i)] for i in range(10)])
+
+        with db.cursor() as cursor:
+            cursor.itersize = 4
+            cursor.execute("SELECT * FROM test WHERE id>1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[i, 'text {}'.format(i)] for i in range(2, 10)])
+
+    def test_select_parameter(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        self.addCleanup(db.close)
+
+        with db.cursor() as cursor:
+            cursor.execute("DROP TABLE IF EXISTS test")
+            cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY, text VARCHAR)")
+            cursor.executemany("UPSERT INTO test VALUES (?, ?)", [[i, 'text {}'.format(i)] for i in range(10)])
+
+        with db.cursor() as cursor:
+            cursor.itersize = 4
+            cursor.execute("SELECT * FROM test WHERE id>? ORDER BY id", [1])
+            self.assertEqual(cursor.fetchall(), [[i, 'text {}'.format(i)] for i in range(2, 10)])
+
+    def _check_dict_cursor(self, cursor):
+        cursor.execute("DROP TABLE IF EXISTS test")
+        cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY, text VARCHAR)")
+        cursor.execute("UPSERT INTO test VALUES (?, ?)", [1, 'text 1'])
+        cursor.execute("SELECT * FROM test ORDER BY id")
+        self.assertEqual(cursor.fetchall(), [{'ID': 1, 'TEXT': 'text 1'}])
+
+    def test_dict_cursor_default_parameter(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True, cursor_factory=phoenixdb.cursor.DictCursor)
+        self.addCleanup(db.close)
+
+        with db.cursor() as cursor:
+            self._check_dict_cursor(cursor)
+
+    def test_dict_cursor_default_attribute(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        db.cursor_factory = phoenixdb.cursor.DictCursor
+        self.addCleanup(db.close)
+
+        with db.cursor() as cursor:
+            self._check_dict_cursor(cursor)
+
+    def test_dict_cursor(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        self.addCleanup(db.close)
+
+        with db.cursor(cursor_factory=phoenixdb.cursor.DictCursor) as cursor:
+            self._check_dict_cursor(cursor)
+
+    def test_schema(self):
+        db = phoenixdb.connect(TEST_DB_URL, autocommit=True)
+        self.addCleanup(db.close)
+
+        with db.cursor() as cursor:
+            try:
+                cursor.execute("CREATE SCHEMA IF NOT EXISTS test_schema")
+            except InternalError as e:
+                if "phoenix.schema.isNamespaceMappingEnabled" in e.message:
+                    self.skipTest(e.message)
+                raise
+
+            cursor.execute("DROP TABLE IF EXISTS test_schema.test")
+            cursor.execute("CREATE TABLE test_schema.test (id INTEGER PRIMARY KEY, text VARCHAR)")
+            cursor.execute("UPSERT INTO test_schema.test VALUES (?, ?)", [1, 'text 1'])
+            cursor.execute("SELECT * FROM test_schema.test ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, 'text 1']])
diff --git a/python/phoenixdb/phoenixdb/tests/test_dbapi20.py b/python/phoenixdb/phoenixdb/tests/test_dbapi20.py
new file mode 100644
index 0000000..0e5c2e4
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_dbapi20.py
@@ -0,0 +1,122 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import phoenixdb
+from . import dbapi20
+from phoenixdb.tests import TEST_DB_URL
+
+
+@unittest.skipIf(TEST_DB_URL is None, "these tests require the PHOENIXDB_TEST_DB_URL environment variable set to a clean database")
+class PhoenixDatabaseAPI20Test(dbapi20.DatabaseAPI20Test):
+    driver = phoenixdb
+    connect_args = (TEST_DB_URL, )
+
+    ddl1 = 'create table %sbooze (name varchar(20) primary key)' % dbapi20.DatabaseAPI20Test.table_prefix
+    ddl2 = 'create table %sbarflys (name varchar(20) primary key, drink varchar(30))' % dbapi20.DatabaseAPI20Test.table_prefix
+    insert = 'upsert'
+
+    def test_nextset(self):
+        pass
+
+    def test_setoutputsize(self):
+        pass
+
+    def _connect(self):
+        con = dbapi20.DatabaseAPI20Test._connect(self)
+        con.autocommit = True
+        return con
+
+    def test_None(self):
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            self.executeDDL2(cur)
+            cur.execute("%s into %sbarflys values ('a', NULL)" % (self.insert, self.table_prefix))
+            cur.execute('select drink from %sbarflys' % self.table_prefix)
+            r = cur.fetchall()
+            self.assertEqual(len(r), 1)
+            self.assertEqual(len(r[0]), 1)
+            self.assertEqual(r[0][0], None, 'NULL value not returned as None')
+        finally:
+            con.close()
+
+    def test_autocommit(self):
+        con = dbapi20.DatabaseAPI20Test._connect(self)
+        self.assertFalse(con.autocommit)
+        con.autocommit = True
+        self.assertTrue(con.autocommit)
+        con.autocommit = False
+        self.assertFalse(con.autocommit)
+        con.close()
+
+    def test_readonly(self):
+        con = dbapi20.DatabaseAPI20Test._connect(self)
+        self.assertFalse(con.readonly)
+        con.readonly = True
+        self.assertTrue(con.readonly)
+        con.readonly = False
+        self.assertFalse(con.readonly)
+        con.close()
+
+    def test_iter(self):
+        # https://www.python.org/dev/peps/pep-0249/#iter
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            if hasattr(cur, '__iter__'):
+                self.assertIs(cur, iter(cur))
+        finally:
+            con.close()
+
+    def test_next(self):
+        # https://www.python.org/dev/peps/pep-0249/#next
+        con = self._connect()
+        try:
+            cur = con.cursor()
+            if not hasattr(cur, 'next'):
+                return
+
+            # cursor.next should raise an Error if called before
+            # executing a select-type query
+            self.assertRaises(self.driver.Error, cur.next)
+
+            # cursor.next should raise an Error if called after
+            # executing a query that cannnot return rows
+            self.executeDDL1(cur)
+            self.assertRaises(self.driver.Error, cur.next)
+
+            # cursor.next should return None if a query retrieves '
+            # no rows
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            self.assertRaises(StopIteration, cur.next)
+            self.failUnless(cur.rowcount in (-1, 0))
+
+            # cursor.next should raise an Error if called after
+            # executing a query that cannnot return rows
+            cur.execute("%s into %sbooze values ('Victoria Bitter')" % (
+                self.insert, self.table_prefix
+                ))
+            self.assertRaises(self.driver.Error, cur.next)
+
+            cur.execute('select name from %sbooze' % self.table_prefix)
+            r = cur.next()
+            self.assertEqual(len(r), 1, 'cursor.next should have retrieved a row with one column')
+            self.assertEqual(r[0], 'Victoria Bitter', 'cursor.next retrieved incorrect data')
+            # cursor.next should raise StopIteration if no more rows available
+            self.assertRaises(StopIteration, cur.next)
+            self.failUnless(cur.rowcount in (-1, 1))
+        finally:
+            con.close()
diff --git a/python/phoenixdb/phoenixdb/tests/test_errors.py b/python/phoenixdb/phoenixdb/tests/test_errors.py
new file mode 100644
index 0000000..191ccb1
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_errors.py
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from phoenixdb.tests import DatabaseTestCase
+
+
+class ProgrammingErrorTest(DatabaseTestCase):
+
+    def test_invalid_sql(self):
+        with self.conn.cursor() as cursor:
+            with self.assertRaises(self.conn.ProgrammingError) as cm:
+                cursor.execute("UPS")
+            self.assertEqual("Syntax error. Encountered \"UPS\" at line 1, column 1.", cm.exception.message)
+            self.assertEqual(601, cm.exception.code)
+            self.assertEqual("42P00", cm.exception.sqlstate)
+
+
+class IntegrityErrorTest(DatabaseTestCase):
+
+    def test_null_in_pk(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key")
+        with self.conn.cursor() as cursor:
+            with self.assertRaises(self.conn.IntegrityError) as cm:
+                cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (NULL)")
+            self.assertEqual("Constraint violation. PHOENIXDB_TEST_TBL1.ID may not be null", cm.exception.message)
+            self.assertEqual(218, cm.exception.code)
+            self.assertIn(cm.exception.sqlstate, ("22018", "23018"))
+
+
+class DataErrorTest(DatabaseTestCase):
+
+    def test_number_outside_of_range(self):
+        self.createTable("phoenixdb_test_tbl1", "id tinyint primary key")
+        with self.conn.cursor() as cursor:
+            with self.assertRaises(self.conn.DataError) as cm:
+                cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (10000)")
+            self.assertEqual("Type mismatch. TINYINT and INTEGER for 10000", cm.exception.message)
+            self.assertEqual(203, cm.exception.code)
+            self.assertEqual("22005", cm.exception.sqlstate)
+
+    def test_division_by_zero(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key")
+        with self.conn.cursor() as cursor:
+            with self.assertRaises(self.conn.DataError) as cm:
+                cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2/0)")
+            self.assertEqual("Divide by zero.", cm.exception.message)
+            self.assertEqual(202, cm.exception.code)
+            self.assertEqual("22012", cm.exception.sqlstate)
diff --git a/python/phoenixdb/phoenixdb/tests/test_types.py b/python/phoenixdb/phoenixdb/tests/test_types.py
new file mode 100644
index 0000000..2cef0f2
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/tests/test_types.py
@@ -0,0 +1,327 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import unittest
+import datetime
+import phoenixdb
+from decimal import Decimal
+from phoenixdb.tests import DatabaseTestCase
+
+
+class TypesTest(DatabaseTestCase):
+
+    def checkIntType(self, type_name, min_value, max_value):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val {}".format(type_name))
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 1)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [1])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [None])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [min_value])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [max_value])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.description[1].type_code, phoenixdb.NUMBER)
+            self.assertEqual(cursor.fetchall(), [[1, 1], [2, None], [3, 1], [4, None], [5, min_value], [6, max_value]])
+
+            self.assertRaises(
+                self.conn.DatabaseError, cursor.execute,
+                "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, {})".format(min_value - 1))
+
+            self.assertRaises(
+                self.conn.DatabaseError, cursor.execute,
+                "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, {})".format(max_value + 1))
+
+            # XXX The server silently truncates the values
+#            self.assertRaises(self.conn.DatabaseError, cursor.execute, "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, ?)", [min_value - 1])
+#            self.assertRaises(self.conn.DatabaseError, cursor.execute, "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, ?)", [max_value + 1])
+
+    def test_integer(self):
+        self.checkIntType("integer", -2147483648, 2147483647)
+
+    def test_unsigned_int(self):
+        self.checkIntType("unsigned_int", 0, 2147483647)
+
+    def test_bigint(self):
+        self.checkIntType("bigint", -9223372036854775808, 9223372036854775807)
+
+    def test_unsigned_long(self):
+        self.checkIntType("unsigned_long", 0, 9223372036854775807)
+
+    def test_tinyint(self):
+        self.checkIntType("tinyint", -128, 127)
+
+    def test_unsigned_tinyint(self):
+        self.checkIntType("unsigned_tinyint", 0, 127)
+
+    def test_smallint(self):
+        self.checkIntType("smallint", -32768, 32767)
+
+    def test_unsigned_smallint(self):
+        self.checkIntType("unsigned_smallint", 0, 32767)
+
+    def checkFloatType(self, type_name, min_value, max_value):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val {}".format(type_name))
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 1)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [1])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [None])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [min_value])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [max_value])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.description[1].type_code, phoenixdb.NUMBER)
+            rows = cursor.fetchall()
+            self.assertEqual([r[0] for r in rows], [1, 2, 3, 4, 5, 6])
+            self.assertEqual(rows[0][1], 1.0)
+            self.assertEqual(rows[1][1], None)
+            self.assertEqual(rows[2][1], 1.0)
+            self.assertEqual(rows[3][1], None)
+            self.assertAlmostEqual(rows[4][1], min_value)
+            self.assertAlmostEqual(rows[5][1], max_value)
+
+    def test_float(self):
+        self.checkFloatType("float", -3.4028234663852886e+38, 3.4028234663852886e+38)
+
+    def test_unsigned_float(self):
+        self.checkFloatType("unsigned_float", 0, 3.4028234663852886e+38)
+
+    def test_double(self):
+        self.checkFloatType("double", -1.7976931348623158E+308, 1.7976931348623158E+308)
+
+    def test_unsigned_double(self):
+        self.checkFloatType("unsigned_double", 0, 1.7976931348623158E+308)
+
+    def test_decimal(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val decimal(8,3)")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 33333.333)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [33333.333])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [Decimal('33333.333')])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [None])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.description[1].type_code, phoenixdb.NUMBER)
+            rows = cursor.fetchall()
+            self.assertEqual([r[0] for r in rows], [1, 2, 3, 4, 5])
+            self.assertEqual(rows[0][1], Decimal('33333.333'))
+            self.assertEqual(rows[1][1], None)
+            self.assertEqual(rows[2][1], Decimal('33333.333'))
+            self.assertEqual(rows[3][1], Decimal('33333.333'))
+            self.assertEqual(rows[4][1], None)
+            self.assertRaises(
+                self.conn.DatabaseError, cursor.execute,
+                "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, ?)", [Decimal('1234567890')])
+            self.assertRaises(
+                self.conn.DatabaseError, cursor.execute,
+                "UPSERT INTO phoenixdb_test_tbl1 VALUES (101, ?)", [Decimal('123456.789')])
+
+    def test_boolean(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val boolean")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, TRUE)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, FALSE)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [True])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [False])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [None])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.description[1].type_code, phoenixdb.BOOLEAN)
+            self.assertEqual(cursor.fetchall(), [[1, True], [2, False], [3, None], [4, True], [5, False], [6, None]])
+
+    def test_time(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val time")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '1970-01-01 12:01:02')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [phoenixdb.Time(12, 1, 2)])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [datetime.time(12, 1, 2)])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [None])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.time(12, 1, 2)],
+                [2, None],
+                [3, datetime.time(12, 1, 2)],
+                [4, datetime.time(12, 1, 2)],
+                [5, None],
+            ])
+
+    @unittest.skip("https://issues.apache.org/jira/browse/CALCITE-797")
+    def test_time_full(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val time")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '2015-07-12 13:01:02.123')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", [datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+                [2, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+            ])
+
+    def test_date(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val date")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '2015-07-12 00:00:00')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [phoenixdb.Date(2015, 7, 12)])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [datetime.date(2015, 7, 12)])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.date(2015, 7, 12)],
+                [3, datetime.date(2015, 7, 12)],
+                [4, datetime.date(2015, 7, 12)],
+            ])
+
+    @unittest.skip("https://issues.apache.org/jira/browse/CALCITE-798")
+    def test_date_full(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val date")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '2015-07-12 13:01:02.123')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", [datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+                [2, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+            ])
+
+    def test_date_null(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val date")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", [None])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")  # raises NullPointerException on the server
+            self.assertEqual(cursor.fetchall(), [
+                [1, None],
+                [2, None],
+            ])
+
+    def test_timestamp(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val timestamp")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '2015-07-12 13:01:02.123')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", [phoenixdb.Timestamp(2015, 7, 12, 13, 1, 2)])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, ?)", [None])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+                [2, None],
+                [3, datetime.datetime(2015, 7, 12, 13, 1, 2)],
+                [4, datetime.datetime(2015, 7, 12, 13, 1, 2, 123000)],
+                [5, None],
+            ])
+
+    @unittest.skip("https://issues.apache.org/jira/browse/CALCITE-796")
+    def test_timestamp_full(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val timestamp")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, '2015-07-12 13:01:02.123456789')")
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, datetime.datetime(2015, 7, 12, 13, 1, 2, 123456789)],
+            ])
+
+    def test_varchar(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val varchar")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 'abc')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", ['abc'])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [None])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, '')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [''])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, 'abc'], [2, None], [3, 'abc'], [4, None], [5, None], [6, None]])
+
+    def test_varchar_very_long(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val varchar")
+        with self.conn.cursor() as cursor:
+            value = '1234567890' * 1000
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, ?)", [value])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, value]])
+
+    def test_varchar_limited(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val varchar(2)")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 'ab')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, ?)", ['ab'])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [None])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, '')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [''])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, 'ab'], [2, None], [3, 'ab'], [4, None], [5, None], [6, None]])
+            self.assertRaises(self.conn.DataError, cursor.execute, "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, 'abc')")
+
+    def test_char_null(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val char(2)")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, NULL)")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [None])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (5, '')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (6, ?)", [''])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[2, None], [4, None], [5, None], [6, None]])
+            self.assertRaises(self.conn.DataError, cursor.execute, "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, 'abc')")
+
+    def test_char(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val char(2)")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 'ab')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", ['ab'])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, 'a')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", ['b'])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, 'ab'], [2, 'ab'], [3, 'a'], [4, 'b']])
+            self.assertRaises(self.conn.DataError, cursor.execute, "UPSERT INTO phoenixdb_test_tbl1 VALUES (100, 'abc')")
+
+    def test_binary(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val binary(2)")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, 'ab')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", [phoenixdb.Binary(b'ab')])
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (3, '\x01\x00')")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (4, ?)", [phoenixdb.Binary(b'\x01\x00')])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, b'ab'],
+                [2, b'ab'],
+                [3, b'\x01\x00'],
+                [4, b'\x01\x00'],
+            ])
+
+    def test_binary_all_bytes(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val binary(256)")
+        with self.conn.cursor() as cursor:
+            if sys.version_info[0] < 3:
+                value = ''.join(map(chr, range(256)))
+            else:
+                value = bytes(range(256))
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, ?)", [phoenixdb.Binary(value)])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [[1, value]])
+
+    @unittest.skip("https://issues.apache.org/jira/browse/CALCITE-1050 https://issues.apache.org/jira/browse/PHOENIX-2585")
+    def test_array(self):
+        self.createTable("phoenixdb_test_tbl1", "id integer primary key, val integer[]")
+        with self.conn.cursor() as cursor:
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (1, ARRAY[1, 2])")
+            cursor.execute("UPSERT INTO phoenixdb_test_tbl1 VALUES (2, ?)", [[2, 3]])
+            cursor.execute("SELECT id, val FROM phoenixdb_test_tbl1 ORDER BY id")
+            self.assertEqual(cursor.fetchall(), [
+                [1, [1, 2]],
+                [2, [2, 3]],
+            ])
diff --git a/python/phoenixdb/phoenixdb/types.py b/python/phoenixdb/phoenixdb/types.py
new file mode 100644
index 0000000..f41355a
--- /dev/null
+++ b/python/phoenixdb/phoenixdb/types.py
@@ -0,0 +1,202 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import time
+import datetime
+from decimal import Decimal
+from phoenixdb.avatica.proto import common_pb2
+
+__all__ = [
+    'Date', 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks',
+    'Binary', 'STRING', 'BINARY', 'NUMBER', 'DATETIME', 'ROWID', 'BOOLEAN',
+    'JAVA_CLASSES', 'JAVA_CLASSES_MAP', 'TypeHelper',
+]
+
+
+def Date(year, month, day):
+    """Constructs an object holding a date value."""
+    return datetime.date(year, month, day)
+
+
+def Time(hour, minute, second):
+    """Constructs an object holding a time value."""
+    return datetime.time(hour, minute, second)
+
+
+def Timestamp(year, month, day, hour, minute, second):
+    """Constructs an object holding a datetime/timestamp value."""
+    return datetime.datetime(year, month, day, hour, minute, second)
+
+
+def DateFromTicks(ticks):
+    """Constructs an object holding a date value from the given UNIX timestamp."""
+    return Date(*time.localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+    """Constructs an object holding a time value from the given UNIX timestamp."""
+    return Time(*time.localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+    """Constructs an object holding a datetime/timestamp value from the given UNIX timestamp."""
+    return Timestamp(*time.localtime(ticks)[:6])
+
+
+def Binary(value):
+    """Constructs an object capable of holding a binary (long) string value."""
+    return bytes(value)
+
+
+def time_from_java_sql_time(n):
+    dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n)
+    return dt.time()
+
+
+def time_to_java_sql_time(t):
+    return ((t.hour * 60 + t.minute) * 60 + t.second) * 1000 + t.microsecond // 1000
+
+
+def date_from_java_sql_date(n):
+    return datetime.date(1970, 1, 1) + datetime.timedelta(days=n)
+
+
+def date_to_java_sql_date(d):
+    if isinstance(d, datetime.datetime):
+        d = d.date()
+    td = d - datetime.date(1970, 1, 1)
+    return td.days
+
+
+def datetime_from_java_sql_timestamp(n):
+    return datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n)
+
+
+def datetime_to_java_sql_timestamp(d):
+    td = d - datetime.datetime(1970, 1, 1)
+    return td.microseconds // 1000 + (td.seconds + td.days * 24 * 3600) * 1000
+
+
+class ColumnType(object):
+
+    def __init__(self, eq_types):
+        self.eq_types = tuple(eq_types)
+        self.eq_types_set = set(eq_types)
+
+    def __eq__(self, other):
+        return other in self.eq_types_set
+
+    def __cmp__(self, other):
+        if other in self.eq_types_set:
+            return 0
+        if other < self.eq_types:
+            return 1
+        else:
+            return -1
+
+
+STRING = ColumnType(['VARCHAR', 'CHAR'])
+"""Type object that can be used to describe string-based columns."""
+
+BINARY = ColumnType(['BINARY', 'VARBINARY'])
+"""Type object that can be used to describe (long) binary columns."""
+
+NUMBER = ColumnType([
+    'INTEGER', 'UNSIGNED_INT', 'BIGINT', 'UNSIGNED_LONG', 'TINYINT', 'UNSIGNED_TINYINT',
+    'SMALLINT', 'UNSIGNED_SMALLINT', 'FLOAT', 'UNSIGNED_FLOAT', 'DOUBLE', 'UNSIGNED_DOUBLE', 'DECIMAL'
+])
+"""Type object that can be used to describe numeric columns."""
+
+DATETIME = ColumnType(['TIME', 'DATE', 'TIMESTAMP', 'UNSIGNED_TIME', 'UNSIGNED_DATE', 'UNSIGNED_TIMESTAMP'])
+"""Type object that can be used to describe date/time columns."""
+
+ROWID = ColumnType([])
+"""Only implemented for DB API 2.0 compatibility, not used."""
+
+BOOLEAN = ColumnType(['BOOLEAN'])
+"""Type object that can be used to describe boolean columns. This is a phoenixdb-specific extension."""
+
+
+# XXX ARRAY
+
+if sys.version_info[0] < 3:
+    _long = long  # noqa: F821
+else:
+    _long = int
+
+JAVA_CLASSES = {
+    'bool_value': [
+        ('java.lang.Boolean', common_pb2.BOOLEAN, None, None),
+    ],
+    'string_value': [
+        ('java.lang.Character', common_pb2.CHARACTER, None, None),
+        ('java.lang.String', common_pb2.STRING, None, None),
+        ('java.math.BigDecimal', common_pb2.BIG_DECIMAL, str, Decimal),
+    ],
+    'number_value': [
+        ('java.lang.Integer', common_pb2.INTEGER, None, int),
+        ('java.lang.Short', common_pb2.SHORT, None, int),
+        ('java.lang.Long', common_pb2.LONG, None, _long),
+        ('java.lang.Byte', common_pb2.BYTE, None, int),
+        ('java.sql.Time', common_pb2.JAVA_SQL_TIME, time_to_java_sql_time, time_from_java_sql_time),
+        ('java.sql.Date', common_pb2.JAVA_SQL_DATE, date_to_java_sql_date, date_from_java_sql_date),
+        ('java.sql.Timestamp', common_pb2.JAVA_SQL_TIMESTAMP, datetime_to_java_sql_timestamp, datetime_from_java_sql_timestamp),
+    ],
+    'bytes_value': [
+        ('[B', common_pb2.BYTE_STRING, Binary, None),
+    ],
+    'double_value': [
+        # if common_pb2.FLOAT is used, incorrect values are sent
+        ('java.lang.Float', common_pb2.DOUBLE, float, float),
+        ('java.lang.Double', common_pb2.DOUBLE, float, float),
+    ]
+}
+"""Groups of Java classes."""
+
+JAVA_CLASSES_MAP = dict((v[0], (k, v[1], v[2], v[3])) for k in JAVA_CLASSES for v in JAVA_CLASSES[k])
+"""Flips the available types to allow for faster lookup by Java class.
+
+This mapping should be structured as:
+    {
+        'java.math.BigDecimal': ('string_value', common_pb2.BIG_DECIMAL, str, Decimal),),
+        ...
+        '<java class>': (<field_name>, <Rep enum>, <mutate_to function>, <cast_from function>),
+    }
+"""
+
+
+class TypeHelper(object):
+    @staticmethod
+    def from_class(klass):
+        """Retrieves a Rep and functions to cast to/from based on the Java class.
+
+        :param klass:
+            The string of the Java class for the column or parameter.
+
+        :returns: tuple ``(field_name, rep, mutate_to, cast_from)``
+            WHERE
+            ``field_name`` is the attribute in ``common_pb2.TypedValue``
+            ``rep`` is the common_pb2.Rep enum
+            ``mutate_to`` is the function to cast values into Phoenix values, if any
+            ``cast_from`` is the function to cast from the Phoenix value to the Python value, if any
+
+        :raises:
+            NotImplementedError
+        """
+        if klass not in JAVA_CLASSES_MAP:
+            raise NotImplementedError('type {} is not supported'.format(klass))
+
+        return JAVA_CLASSES_MAP[klass]
diff --git a/python/phoenixdb/requirements.txt b/python/phoenixdb/requirements.txt
new file mode 100644
index 0000000..e6be902
--- /dev/null
+++ b/python/phoenixdb/requirements.txt
@@ -0,0 +1,25 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+-e git+https://bitbucket.org/lalinsky/python-sqlline.git#egg=sqlline
+nose
+protobuf>=3.0.0
+sphinx
+flake8
+requests
+../requests-kerberos
diff --git a/python/phoenixdb/setup.cfg b/python/phoenixdb/setup.cfg
new file mode 100644
index 0000000..ebc28c2
--- /dev/null
+++ b/python/phoenixdb/setup.cfg
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[nosetests]
+verbosity=2
+testmatch=^test_.+
+where=phoenixdb/tests
+
+[build_sphinx]
+source-dir = doc
+build-dir = doc/build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = doc/build/html
+
+[flake8]
+max-line-length = 140
+exclude =
+  e,e3,env,venv,doc,build,dist,.tox,.idea,
+  ./phoenixdb/tests/dbapi20.py,
+  ./phoenixdb/avatica/proto/*_pb2.py
diff --git a/python/phoenixdb/setup.py b/python/phoenixdb/setup.py
new file mode 100644
index 0000000..994286a
--- /dev/null
+++ b/python/phoenixdb/setup.py
@@ -0,0 +1,69 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+from setuptools import setup, find_packages
+
+cmdclass = {}
+
+try:
+    from sphinx.setup_command import BuildDoc
+    cmdclass['build_sphinx'] = BuildDoc
+except ImportError:
+    pass
+
+
+def readme():
+    with open('README.rst') as f:
+        return f.read()
+
+
+version = "0.7"
+
+setup(
+    name="phoenixdb",
+    version=version,
+    description="Phoenix database adapter for Python",
+    long_description=readme(),
+    author="Lukas Lalinsky",
+    author_email="lukas@oxygene.sk",
+    url="https://bitbucket.org/lalinsky/python-phoenixdb",
+    license="Apache 2",
+    packages=find_packages(),
+    include_package_data=True,
+    cmdclass=cmdclass,
+    command_options={
+        'build_sphinx': {
+            'version': ('setup.py', version),
+            'release': ('setup.py', version),
+        },
+    },
+    classifiers=[
+        'Programming Language :: Python',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.4',
+        'Programming Language :: Python :: 3.5',
+        'Programming Language :: Python :: 3.6',
+    ],
+    install_requires=[
+        'protobuf>=3.0.0',
+        'requests',
+        'requests-kerberos'
+    ]
+)
diff --git a/python/phoenixdb/tox.ini b/python/phoenixdb/tox.ini
new file mode 100644
index 0000000..908696a
--- /dev/null
+++ b/python/phoenixdb/tox.ini
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[tox]
+envlist = py27,py35,py36
+
+[testenv]
+passenv = PHOENIXDB_TEST_DB_URL
+commands =
+  flake8
+  nosetests -v
+deps = -rrequirements.txt
diff --git a/python/requests-kerberos/.travis.sh b/python/requests-kerberos/.travis.sh
new file mode 100755
index 0000000..a1861d4
--- /dev/null
+++ b/python/requests-kerberos/.travis.sh
@@ -0,0 +1,202 @@
+#!/bin/bash
+
+set -e
+
+IP_ADDRESS=$(hostname -I)
+HOSTNAME=$(cat /etc/hostname)
+PY_MAJOR=${PYENV:0:1}
+
+export KERBEROS_HOSTNAME=$HOSTNAME.$KERBEROS_REALM
+export DEBIAN_FRONTEND=noninteractive
+
+echo "Configure the hosts file for Kerberos to work in a container"
+cp /etc/hosts ~/hosts.new
+sed -i "/.*$HOSTNAME/c\\$IP_ADDRESS\t$KERBEROS_HOSTNAME" ~/hosts.new
+cp -f ~/hosts.new /etc/hosts
+
+echo "Setting up Kerberos config file at /etc/krb5.conf"
+cat > /etc/krb5.conf << EOL
+[libdefaults]
+    default_realm = ${KERBEROS_REALM^^}
+    dns_lookup_realm = false
+    dns_lookup_kdc = false
+
+[realms]
+    ${KERBEROS_REALM^^} = {
+        kdc = $KERBEROS_HOSTNAME
+        admin_server = $KERBEROS_HOSTNAME
+    }
+
+[domain_realm]
+    .$KERBEROS_REALM = ${KERBEROS_REALM^^}
+
+[logging]
+    kdc = FILE:/var/log/krb5kdc.log
+    admin_server = FILE:/var/log/kadmin.log
+    default = FILE:/var/log/krb5lib.log
+EOL
+
+echo "Setting up kerberos ACL configuration at /etc/krb5kdc/kadm5.acl"
+mkdir /etc/krb5kdc
+echo -e "*/*@${KERBEROS_REALM^^}\t*" > /etc/krb5kdc/kadm5.acl
+
+echo "Installing all the packages required in this test"
+apt-get update
+apt-get \
+    -y \
+    -qq \
+    install \
+    krb5-{user,kdc,admin-server,multidev} \
+    libkrb5-dev \
+    wget \
+    curl \
+    apache2 \
+    libapache2-mod-auth-gssapi \
+    python-dev \
+    libffi-dev \
+    build-essential \
+    libssl-dev \
+    zlib1g-dev \
+    libbz2-dev
+
+echo "Creating KDC database"
+# krb5_newrealm returns non-0 return code as it is running in a container, ignore it for this command only
+set +e
+printf "$KERBEROS_PASSWORD\n$KERBEROS_PASSWORD" | krb5_newrealm
+set -e
+
+echo "Creating principals for tests"
+kadmin.local -q "addprinc -pw $KERBEROS_PASSWORD $KERBEROS_USERNAME"
+
+echo "Adding HTTP principal for Kerberos and create keytab"
+kadmin.local -q "addprinc -randkey HTTP/$KERBEROS_HOSTNAME"
+kadmin.local -q "ktadd -k /etc/krb5.keytab HTTP/$KERBEROS_HOSTNAME"
+chmod 777 /etc/krb5.keytab
+
+echo "Restarting Kerberos KDS service"
+service krb5-kdc restart
+
+echo "Add ServerName to Apache config"
+grep -q -F "ServerName $KERBEROS_HOSTNAME" /etc/apache2/apache2.conf || echo "ServerName $KERBEROS_HOSTNAME" >> /etc/apache2/apache2.conf
+
+echo "Deleting default virtual host file"
+rm /etc/apache2/sites-enabled/000-default.conf
+rm /etc/apache2/sites-available/000-default.conf
+rm /etc/apache2/sites-available/default-ssl.conf
+
+echo "Create website directory structure and pages"
+mkdir -p /var/www/example.com/public_html
+chmod -R 755 /var/www
+echo "<html><head><title>Title</title></head><body>body mesage</body></html>" > /var/www/example.com/public_html/index.html
+
+echo "Create self signed certificate for HTTPS endpoint"
+mkdir /etc/apache2/ssl
+openssl req \
+    -x509 \
+    -nodes \
+    -days 365 \
+    -newkey rsa:2048 \
+    -keyout /etc/apache2/ssl/https.key \
+    -out /etc/apache2/ssl/https.crt \
+    -subj "/CN=$KERBEROS_HOSTNAME/o=Testing LTS./C=US"
+
+echo "Create virtual host files"
+cat > /etc/apache2/sites-available/example.com.conf << EOL
+<VirtualHost *:80>
+    ServerName $KERBEROS_HOSTNAME
+    ServerAlias $KERBEROS_HOSTNAME
+    DocumentRoot /var/www/example.com/public_html
+    ErrorLog ${APACHE_LOG_DIR}/error.log
+    CustomLog ${APACHE_LOG_DIR}/access.log combined
+    <Directory "/var/www/example.com/public_html">
+        AuthType GSSAPI
+        AuthName "GSSAPI Single Sign On Login"
+        Require user $KERBEROS_USERNAME@${KERBEROS_REALM^^}
+        GssapiCredStore keytab:/etc/krb5.keytab
+    </Directory>
+</VirtualHost>
+<VirtualHost *:443>
+    ServerName $KERBEROS_HOSTNAME
+    ServerAlias $KERBEROS_HOSTNAME
+    DocumentRoot /var/www/example.com/public_html
+    ErrorLog ${APACHE_LOG_DIR}/error.log
+    CustomLog ${APACHE_LOG_DIR}/access.log combined
+    SSLEngine on
+    SSLCertificateFile /etc/apache2/ssl/https.crt
+    SSLCertificateKeyFile /etc/apache2/ssl/https.key
+    <Directory "/var/www/example.com/public_html">
+        AuthType GSSAPI
+        AuthName "GSSAPI Single Sign On Login"
+        Require user $KERBEROS_USERNAME@${KERBEROS_REALM^^}
+        GssapiCredStore keytab:/etc/krb5.keytab
+    </Directory>
+</VirtualHost>
+EOL
+
+echo "Enabling virtual host site"
+a2enmod ssl
+a2ensite example.com.conf
+service apache2 restart
+
+echo "Getting ticket for Kerberos user"
+echo -n "$KERBEROS_PASSWORD" | kinit "$KERBEROS_USERNAME@${KERBEROS_REALM^^}"
+
+echo "Try out the HTTP connection with curl"
+CURL_OUTPUT=$(curl --negotiate -u : "http://$KERBEROS_HOSTNAME")
+
+if [ "$CURL_OUTPUT" != "<html><head><title>Title</title></head><body>body mesage</body></html>" ]; then
+    echo -e "ERROR: Did not get success message, cannot continue with actual tests:\nActual Output:\n$CURL_OUTPUT"
+    exit 1
+else
+    echo -e "SUCCESS: Apache site built and set for Kerberos auth\nActual Output:\n$CURL_OUTPUT"
+fi
+
+echo "Try out the HTTPS connection with curl"
+CURL_OUTPUT=$(curl --negotiate -u : "https://$KERBEROS_HOSTNAME" --insecure)
+
+if [ "$CURL_OUTPUT" != "<html><head><title>Title</title></head><body>body mesage</body></html>" ]; then
+    echo -e "ERROR: Did not get success message, cannot continue with actual tests:\nActual Output:\n$CURL_OUTPUT"
+    exit 1
+else
+    echo -e "SUCCESS: Apache site built and set for Kerberos auth\nActual Output:\n$CURL_OUTPUT"
+fi
+
+if [ "$IMAGE" == "ubuntu:16.04" ]; then
+    echo "Downloading Python $PYENV"
+    wget -q "https://www.python.org/ftp/python/$PYENV/Python-$PYENV.tgz"
+    tar xzf "Python-$PYENV.tgz"
+    cd "Python-$PYENV"
+
+    echo "Configuring Python install"
+    ./configure &> /dev/null
+
+    echo "Running make install on Python"
+    make install &> /dev/null
+    cd ..
+    rm -rf "Python-$PYENV"
+    rm "Python-$PYENV.tgz"
+fi
+
+echo "Installing Pip"
+wget -q https://bootstrap.pypa.io/get-pip.py
+python$PY_MAJOR get-pip.py
+rm get-pip.py
+
+echo "Updating pip and installing library"
+pip$PY_MAJOR install -U pip setuptools
+pip$PY_MAJOR install .
+pip$PY_MAJOR install -r requirements-test.txt
+
+echo "Outputting build info before tests"
+echo "Python Version: $(python$PY_MAJOR --version 2>&1)"
+echo "Pip Version: $(pip$PY_MAJOR --version)"
+echo "Pip packages: $(pip$PY_MAJOR list)"
+
+echo "Running Python tests"
+export KERBEROS_PRINCIPAL="$KERBEROS_USERNAME@${KERBEROS_REALM^^}"
+export KERBEROS_URL="http://$KERBEROS_HOSTNAME"
+python$PY_MAJOR -m pytest -v --cov=requests_kerberos\
+
+echo "Running Python test over HTTPS for basic CBT test"
+export KERBEROS_URL="https://$KERBEROS_HOSTNAME"
+python$PY_MAJOR -m pytest -v --cov=requests_kerberos\
diff --git a/python/requests-kerberos/.travis.yml b/python/requests-kerberos/.travis.yml
new file mode 100644
index 0000000..8253cb4
--- /dev/null
+++ b/python/requests-kerberos/.travis.yml
@@ -0,0 +1,36 @@
+sudo: required
+
+language: python
+
+services:
+- docker
+
+os: linux
+dist: trusty
+
+matrix:
+  include:
+  - env: PYENV=2.7.14 IMAGE=python:2.7.14-slim-stretch
+  - env: PYENV=3.3.7 IMAGE=ubuntu:16.04
+  - env: PYENV=3.4.7 IMAGE=ubuntu:16.04
+  - env: PYENV=3.5.4 IMAGE=ubuntu:16.04
+  - env: PYENV=3.6.3 IMAGE=python:3.6.3-slim-stretch
+
+install:
+- pip install coveralls # Need to have coveralls installed locally for after_success to run
+
+script:
+- >
+  docker run
+  -v $(pwd):$(pwd)
+  -w $(pwd)
+  -e PYENV=$PYENV
+  -e IMAGE=$IMAGE
+  -e KERBEROS_USERNAME=administrator
+  -e KERBEROS_PASSWORD=Password01
+  -e KERBEROS_REALM=example.com
+  $IMAGE
+  /bin/bash .travis.sh
+
+after_success:
+- coveralls
diff --git a/python/requests-kerberos/AUTHORS b/python/requests-kerberos/AUTHORS
new file mode 100644
index 0000000..eae61ac
--- /dev/null
+++ b/python/requests-kerberos/AUTHORS
@@ -0,0 +1,3 @@
+Michael Komitee
+Jose Castro Leon
+David Pursehouse
diff --git a/python/requests-kerberos/HISTORY.rst b/python/requests-kerberos/HISTORY.rst
new file mode 100644
index 0000000..44bfc4b
--- /dev/null
+++ b/python/requests-kerberos/HISTORY.rst
@@ -0,0 +1,102 @@
+History
+=======
+
+0.12.0: 2017-12-20
+------------------------
+
+- Add support for channel binding tokens (assumes pykerberos support >= 1.2.1)
+- Add support for kerberos message encryption (assumes pykerberos support >= 1.2.1)
+- Misc CI/test fixes
+
+0.11.0: 2016-11-02
+------------------
+
+- Switch dependency on Windows from kerberos-sspi/pywin32 to WinKerberos.
+  This brings Custom Principal support to Windows users.
+
+0.10.0: 2016-05-18
+------------------
+
+- Make it possible to receive errors without having their contents and headers
+  stripped.
+- Resolve a bug caused by passing the ``principal`` keyword argument to
+  kerberos-sspi on Windows.
+
+0.9.0: 2016-05-06
+-----------------
+
+- Support for principal, hostname, and realm override.
+
+- Added support for mutual auth.
+
+0.8.0: 2016-01-07
+-----------------
+
+- Support for Kerberos delegation.
+
+- Fixed problems declaring kerberos-sspi on Windows installs.
+
+0.7.0: 2015-05-04
+-----------------
+
+- Added Windows native authentication support by adding kerberos-sspi as an
+  alternative backend.
+
+- Prevent infinite recursion when a server returns 401 to an authorization
+  attempt.
+
+- Reduce the logging during successful responses.
+
+0.6.1: 2014-11-14
+-----------------
+
+- Fix HTTPKerberosAuth not to treat non-file as a file
+
+- Prevent infinite recursion when GSSErrors occurs
+
+0.6: 2014-11-04
+---------------
+
+- Handle mutual authentication (see pull request 36_)
+
+  All users should upgrade immediately. This has been reported to
+  oss-security_ and we are awaiting a proper CVE identifier.
+
+  **Update**: We were issued CVE-2014-8650
+
+- Distribute as a wheel.
+
+.. _36: https://github.com/requests/requests-kerberos/pull/36
+.. _oss-security: http://www.openwall.com/lists/oss-security/
+
+0.5: 2014-05-14
+---------------
+
+- Allow non-HTTP service principals with HTTPKerberosAuth using a new optional
+  argument ``service``.
+
+- Fix bug in ``setup.py`` on distributions where the ``compiler`` module is
+  not available.
+
+- Add test dependencies to ``setup.py`` so ``python setup.py test`` will work.
+
+0.4: 2013-10-26
+---------------
+
+- Minor updates in the README
+- Change requirements to depend on requests above 1.1.0
+
+0.3: 2013-06-02
+---------------
+
+- Work with servers operating on non-standard ports
+
+0.2: 2013-03-26
+---------------
+
+- Not documented
+
+0.1: Never released
+-------------------
+
+- Initial Release
diff --git a/python/requests-kerberos/LICENSE b/python/requests-kerberos/LICENSE
new file mode 100644
index 0000000..581f115
--- /dev/null
+++ b/python/requests-kerberos/LICENSE
@@ -0,0 +1,15 @@
+ISC License
+
+Copyright (c) 2012 Kenneth Reitz
+
+Permission to use, copy, modify and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS-IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/python/requests-kerberos/MANIFEST.in b/python/requests-kerberos/MANIFEST.in
new file mode 100644
index 0000000..c5c480f
--- /dev/null
+++ b/python/requests-kerberos/MANIFEST.in
@@ -0,0 +1,5 @@
+include requirements.txt
+include README.rst
+include LICENSE
+include HISTORY.rst
+include AUTHORS
diff --git a/python/requests-kerberos/README.rst b/python/requests-kerberos/README.rst
new file mode 100644
index 0000000..c9bac7c
--- /dev/null
+++ b/python/requests-kerberos/README.rst
@@ -0,0 +1,173 @@
+requests Kerberos/GSSAPI authentication library
+===============================================
+
+.. image:: https://travis-ci.org/requests/requests-kerberos.svg?branch=master
+    :target: https://travis-ci.org/requests/requests-kerberos
+
+.. image:: https://coveralls.io/repos/github/requests/requests-kerberos/badge.svg?branch=master
+    :target: https://coveralls.io/github/requests/requests-kerberos?branch=master
+
+Requests is an HTTP library, written in Python, for human beings. This library
+adds optional Kerberos/GSSAPI authentication support and supports mutual
+authentication. Basic GET usage:
+
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth
+    >>> r = requests.get("http://example.org", auth=HTTPKerberosAuth())
+    ...
+
+The entire ``requests.api`` should be supported.
+
+Authentication Failures
+-----------------------
+
+Client authentication failures will be communicated to the caller by returning
+the 401 response.
+
+Mutual Authentication
+---------------------
+
+REQUIRED
+^^^^^^^^
+
+By default, ``HTTPKerberosAuth`` will require mutual authentication from the
+server, and if a server emits a non-error response which cannot be
+authenticated, a ``requests_kerberos.errors.MutualAuthenticationError`` will
+be raised. If a server emits an error which cannot be authenticated, it will
+be returned to the user but with its contents and headers stripped. If the
+response content is more important than the need for mutual auth on errors,
+(eg, for certain WinRM calls) the stripping behavior can be suppressed by
+setting ``sanitize_mutual_error_response=False``:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, REQUIRED
+    >>> kerberos_auth = HTTPKerberosAuth(mutual_authentication=REQUIRED, sanitize_mutual_error_response=False)
+    >>> r = requests.get("https://windows.example.org/wsman", auth=kerberos_auth)
+    ...
+
+
+OPTIONAL
+^^^^^^^^
+
+If you'd prefer to not require mutual authentication, you can set your
+preference when constructing your ``HTTPKerberosAuth`` object:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, OPTIONAL
+    >>> kerberos_auth = HTTPKerberosAuth(mutual_authentication=OPTIONAL)
+    >>> r = requests.get("http://example.org", auth=kerberos_auth)
+    ...
+
+This will cause ``requests_kerberos`` to attempt mutual authentication if the
+server advertises that it supports it, and cause a failure if authentication
+fails, but not if the server does not support it at all.
+
+DISABLED
+^^^^^^^^
+
+While we don't recommend it, if you'd prefer to never attempt mutual
+authentication, you can do that as well:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, DISABLED
+    >>> kerberos_auth = HTTPKerberosAuth(mutual_authentication=DISABLED)
+    >>> r = requests.get("http://example.org", auth=kerberos_auth)
+    ...
+
+Preemptive Authentication
+-------------------------
+
+``HTTPKerberosAuth`` can be forced to preemptively initiate the Kerberos
+GSS exchange and present a Kerberos ticket on the initial request (and all
+subsequent). By default, authentication only occurs after a
+``401 Unauthorized`` response containing a Kerberos or Negotiate challenge
+is received from the origin server. This can cause mutual authentication
+failures for hosts that use a persistent connection (eg, Windows/WinRM), as
+no Kerberos challenges are sent after the initial auth handshake. This
+behavior can be altered by setting  ``force_preemptive=True``:
+
+.. code-block:: python
+    
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, REQUIRED
+    >>> kerberos_auth = HTTPKerberosAuth(mutual_authentication=REQUIRED, force_preemptive=True)
+    >>> r = requests.get("https://windows.example.org/wsman", auth=kerberos_auth)
+    ...
+
+Hostname Override
+-----------------
+
+If communicating with a host whose DNS name doesn't match its
+kerberos hostname (eg, behind a content switch or load balancer),
+the hostname used for the Kerberos GSS exchange can be overridden by
+setting the ``hostname_override`` arg:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, REQUIRED
+    >>> kerberos_auth = HTTPKerberosAuth(hostname_override="internalhost.local")
+    >>> r = requests.get("https://externalhost.example.org/", auth=kerberos_auth)
+    ...
+
+Explicit Principal
+------------------
+
+``HTTPKerberosAuth`` normally uses the default principal (ie, the user for
+whom you last ran ``kinit`` or ``kswitch``, or an SSO credential if
+applicable). However, an explicit principal can be specified, which will
+cause Kerberos to look for a matching credential cache for the named user.
+This feature depends on OS support for collection-type credential caches,
+as well as working principal support in PyKerberos (it is broken in many
+builds). An explicit principal can be specified with the ``principal`` arg:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth, REQUIRED
+    >>> kerberos_auth = HTTPKerberosAuth(principal="user@REALM")
+    >>> r = requests.get("http://example.org", auth=kerberos_auth)
+    ...
+
+On Windows, WinKerberos is used instead of PyKerberos. WinKerberos allows the
+use of arbitrary principals instead of a credential cache. Passwords can be
+specified by following the form ``user@realm:password`` for ``principal``.
+
+Delegation
+----------
+
+``requests_kerberos`` supports credential delegation (``GSS_C_DELEG_FLAG``).
+To enable delegation of credentials to a server that requests delegation, pass
+``delegate=True`` to ``HTTPKerberosAuth``:
+
+.. code-block:: python
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth
+    >>> r = requests.get("http://example.org", auth=HTTPKerberosAuth(delegate=True))
+    ...
+
+Be careful to only allow delegation to servers you trust as they will be able
+to impersonate you using the delegated credentials.
+
+Logging
+-------
+
+This library makes extensive use of Python's logging facilities.
+
+Log messages are logged to the ``requests_kerberos`` and
+``requests_kerberos.kerberos_`` named loggers.
+
+If you are having difficulty we suggest you configure logging. Issues with the
+underlying kerberos libraries will be made apparent. Additionally, copious debug
+information is made available which may assist in troubleshooting if you
+increase your log level all the way up to debug.
diff --git a/python/requests-kerberos/requests_kerberos/__init__.py b/python/requests-kerberos/requests_kerberos/__init__.py
new file mode 100644
index 0000000..63c7db5
--- /dev/null
+++ b/python/requests-kerberos/requests_kerberos/__init__.py
@@ -0,0 +1,25 @@
+"""
+requests Kerberos/GSSAPI authentication library
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings. This library
+adds optional Kerberos/GSSAPI authentication support and supports mutual
+authentication. Basic GET usage:
+
+    >>> import requests
+    >>> from requests_kerberos import HTTPKerberosAuth
+    >>> r = requests.get("http://example.org", auth=HTTPKerberosAuth())
+
+The entire `requests.api` should be supported.
+"""
+import logging
+
+from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
+from .exceptions import MutualAuthenticationError
+from .compat import NullHandler
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+__all__ = ('HTTPKerberosAuth', 'MutualAuthenticationError', 'REQUIRED',
+           'OPTIONAL', 'DISABLED')
+__version__ = '0.13.0.dev0'
diff --git a/python/requests-kerberos/requests_kerberos/compat.py b/python/requests-kerberos/requests_kerberos/compat.py
new file mode 100644
index 0000000..01b7500
--- /dev/null
+++ b/python/requests-kerberos/requests_kerberos/compat.py
@@ -0,0 +1,14 @@
+"""
+Compatibility library for older versions of python
+"""
+import sys
+
+# python 2.7 introduced a NullHandler which we want to use, but to support
+# older versions, we implement our own if needed.
+if sys.version_info[:2] > (2, 6):
+    from logging import NullHandler
+else:
+    from logging import Handler
+    class NullHandler(Handler):
+        def emit(self, record):
+            pass
diff --git a/python/requests-kerberos/requests_kerberos/exceptions.py b/python/requests-kerberos/requests_kerberos/exceptions.py
new file mode 100644
index 0000000..51e11ec
--- /dev/null
+++ b/python/requests-kerberos/requests_kerberos/exceptions.py
@@ -0,0 +1,15 @@
+"""
+requests_kerberos.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of exceptions.
+
+"""
+from requests.exceptions import RequestException
+
+
+class MutualAuthenticationError(RequestException):
+    """Mutual Authentication Error"""
+
+class KerberosExchangeError(RequestException):
+    """Kerberos Exchange Failed Error"""
diff --git a/python/requests-kerberos/requests_kerberos/kerberos_.py b/python/requests-kerberos/requests_kerberos/kerberos_.py
new file mode 100644
index 0000000..38f9f76
--- /dev/null
+++ b/python/requests-kerberos/requests_kerberos/kerberos_.py
@@ -0,0 +1,457 @@
+try:
+    import kerberos
+except ImportError:
+    import winkerberos as kerberos
+import logging
+import re
+import sys
+import warnings
+
+from cryptography import x509
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes
+from cryptography.exceptions import UnsupportedAlgorithm
+
+from requests.auth import AuthBase
+from requests.models import Response
+from requests.compat import urlparse, StringIO
+from requests.structures import CaseInsensitiveDict
+from requests.cookies import cookiejar_from_dict
+from requests.packages.urllib3 import HTTPResponse
+
+from .exceptions import MutualAuthenticationError, KerberosExchangeError
+
+log = logging.getLogger(__name__)
+
+# Different types of mutual authentication:
+#  with mutual_authentication set to REQUIRED, all responses will be
+#   authenticated with the exception of errors. Errors will have their contents
+#   and headers stripped. If a non-error response cannot be authenticated, a
+#   MutualAuthenticationError exception will be raised.
+# with mutual_authentication set to OPTIONAL, mutual authentication will be
+#   attempted if supported, and if supported and failed, a
+#   MutualAuthenticationError exception will be raised. Responses which do not
+#   support mutual authentication will be returned directly to the user.
+# with mutual_authentication set to DISABLED, mutual authentication will not be
+#   attempted, even if supported.
+REQUIRED = 1
+OPTIONAL = 2
+DISABLED = 3
+
+
+class NoCertificateRetrievedWarning(Warning):
+    pass
+
+class UnknownSignatureAlgorithmOID(Warning):
+    pass
+
+
+class SanitizedResponse(Response):
+    """The :class:`Response <Response>` object, which contains a server's
+    response to an HTTP request.
+
+    This differs from `requests.models.Response` in that it's headers and
+    content have been sanitized. This is only used for HTTP Error messages
+    which do not support mutual authentication when mutual authentication is
+    required."""
+
+    def __init__(self, response):
+        super(SanitizedResponse, self).__init__()
+        self.status_code = response.status_code
+        self.encoding = response.encoding
+        self.raw = response.raw
+        self.reason = response.reason
+        self.url = response.url
+        self.request = response.request
+        self.connection = response.connection
+        self._content_consumed = True
+
+        self._content = ""
+        self.cookies = cookiejar_from_dict({})
+        self.headers = CaseInsensitiveDict()
+        self.headers['content-length'] = '0'
+        for header in ('date', 'server'):
+            if header in response.headers:
+                self.headers[header] = response.headers[header]
+
+
+def _negotiate_value(response):
+    """Extracts the gssapi authentication token from the appropriate header"""
+    if hasattr(_negotiate_value, 'regex'):
+        regex = _negotiate_value.regex
+    else:
+        # There's no need to re-compile this EVERY time it is called. Compile
+        # it once and you won't have the performance hit of the compilation.
+        regex = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I)
+        _negotiate_value.regex = regex
+
+    authreq = response.headers.get('www-authenticate', None)
+
+    if authreq:
+        match_obj = regex.search(authreq)
+        if match_obj:
+            return match_obj.group(1)
+
+    return None
+
+
+def _get_certificate_hash(certificate_der):
+    # https://tools.ietf.org/html/rfc5929#section-4.1
+    cert = x509.load_der_x509_certificate(certificate_der, default_backend())
+
+    try:
+        hash_algorithm = cert.signature_hash_algorithm
+    except UnsupportedAlgorithm as ex:
+        warnings.warn("Failed to get signature algorithm from certificate, "
+                      "unable to pass channel bindings: %s" % str(ex), UnknownSignatureAlgorithmOID)
+        return None
+
+    # if the cert signature algorithm is either md5 or sha1 then use sha256
+    # otherwise use the signature algorithm
+    if hash_algorithm.name in ['md5', 'sha1']:
+        digest = hashes.Hash(hashes.SHA256(), default_backend())
+    else:
+        digest = hashes.Hash(hash_algorithm, default_backend())
+
+    digest.update(certificate_der)
+    certificate_hash = digest.finalize()
+
+    return certificate_hash
+
+
+def _get_channel_bindings_application_data(response):
+    """
+    https://tools.ietf.org/html/rfc5929 4. The 'tls-server-end-point' Channel Binding Type
+
+    Gets the application_data value for the 'tls-server-end-point' CBT Type.
+    This is ultimately the SHA256 hash of the certificate of the HTTPS endpoint
+    appended onto tls-server-end-point. This value is then passed along to the
+    kerberos library to bind to the auth response. If the socket is not an SSL
+    socket or the raw HTTP object is not a urllib3 HTTPResponse then None will
+    be returned and the Kerberos auth will use GSS_C_NO_CHANNEL_BINDINGS
+
+    :param response: The original 401 response from the server
+    :return: byte string used on the application_data.value field on the CBT struct
+    """
+
+    application_data = None
+    raw_response = response.raw
+
+    if isinstance(raw_response, HTTPResponse):
+        try:
+            if sys.version_info > (3, 0):
+                socket = raw_response._fp.fp.raw._sock
+            else:
+                socket = raw_response._fp.fp._sock
+        except AttributeError:
+            warnings.warn("Failed to get raw socket for CBT; has urllib3 impl changed",
+                          NoCertificateRetrievedWarning)
+        else:
+            try:
+                server_certificate = socket.getpeercert(True)
+            except AttributeError:
+                pass
+            else:
+                certificate_hash = _get_certificate_hash(server_certificate)
+                application_data = b'tls-server-end-point:' + certificate_hash
+    else:
+        warnings.warn(
+            "Requests is running with a non urllib3 backend, cannot retrieve server certificate for CBT",
+            NoCertificateRetrievedWarning)
+
+    return application_data
+
+class HTTPKerberosAuth(AuthBase):
+    """Attaches HTTP GSSAPI/Kerberos Authentication to the given Request
+    object."""
+    def __init__(
+            self, mutual_authentication=REQUIRED,
+            service="HTTP", delegate=False, force_preemptive=False,
+            principal=None, hostname_override=None, mech_oid=None,
+            sanitize_mutual_error_response=True, send_cbt=True):
+        self.context = {}
+        self.mutual_authentication = mutual_authentication
+        self.delegate = delegate
+        self.pos = None
+        self.service = service
+        self.force_preemptive = force_preemptive
+        self.principal = principal
+        self.hostname_override = hostname_override
+        self.mech_oid = mech_oid
+        self.sanitize_mutual_error_response = sanitize_mutual_error_response
+        self.auth_done = False
+        self.winrm_encryption_available = hasattr(kerberos, 'authGSSWinRMEncryptMessage')
+
+        # Set the CBT values populated after the first response
+        self.send_cbt = send_cbt
+        self.cbt_binding_tried = False
+        self.cbt_struct = None
+
+    def generate_request_header(self, response, host, is_preemptive=False):
+        """
+        Generates the GSSAPI authentication token with kerberos.
+
+        If any GSSAPI step fails, raise KerberosExchangeError
+        with failure detail.
+
+        """
+
+        # Flags used by kerberos module.
+        gssflags = kerberos.GSS_C_MUTUAL_FLAG | kerberos.GSS_C_SEQUENCE_FLAG
+        if self.delegate:
+            gssflags |= kerberos.GSS_C_DELEG_FLAG
+
+        try:
+            kerb_stage = "authGSSClientInit()"
+            # contexts still need to be stored by host, but hostname_override
+            # allows use of an arbitrary hostname for the kerberos exchange
+            # (eg, in cases of aliased hosts, internal vs external, CNAMEs
+            # w/ name-based HTTP hosting)
+            kerb_host = self.hostname_override if self.hostname_override is not None else host
+            kerb_spn = "{0}@{1}".format(self.service, kerb_host)
+
+            if self.mech_oid is not None: 
+                result, self.context[host] = kerberos.authGSSClientInit(kerb_spn,
+                    gssflags=gssflags, principal=self.principal, mech_oid=self.mech_oid)
+            else:
+                result, self.context[host] = kerberos.authGSSClientInit(kerb_spn,
+                    gssflags=gssflags, principal=self.principal)
+
+            if result < 1:
+                raise EnvironmentError(result, kerb_stage)
+
+            # if we have a previous response from the server, use it to continue
+            # the auth process, otherwise use an empty value
+            negotiate_resp_value = '' if is_preemptive else _negotiate_value(response)
+
+            kerb_stage = "authGSSClientStep()"
+            # If this is set pass along the struct to Kerberos
+            if self.cbt_struct:
+                result = kerberos.authGSSClientStep(self.context[host],
+                                                    negotiate_resp_value,
+                                                    channel_bindings=self.cbt_struct)
+            else:
+                result = kerberos.authGSSClientStep(self.context[host],
+                                                    negotiate_resp_value)
+
+            if result < 0:
+                raise EnvironmentError(result, kerb_stage)
+
+            kerb_stage = "authGSSClientResponse()"
+            gss_response = kerberos.authGSSClientResponse(self.context[host])
+
+            return "Negotiate {0}".format(gss_response)
+
+        except kerberos.GSSError as error:
+            log.exception(
+                "generate_request_header(): {0} failed:".format(kerb_stage))
+            log.exception(error)
+            raise KerberosExchangeError("%s failed: %s" % (kerb_stage, str(error.args)))
+
+        except EnvironmentError as error:
+            # ensure we raised this for translation to KerberosExchangeError
+            # by comparing errno to result, re-raise if not
+            if error.errno != result:
+                raise
+            message = "{0} failed, result: {1}".format(kerb_stage, result)
+            log.error("generate_request_header(): {0}".format(message))
+            raise KerberosExchangeError(message)
+
+    def authenticate_user(self, response, **kwargs):
+        """Handles user authentication with gssapi/kerberos"""
+
+        host = urlparse(response.url).hostname
+
+        try:
+            auth_header = self.generate_request_header(response, host)
+        except KerberosExchangeError:
+            # GSS Failure, return existing response
+            return response
+
+        log.debug("authenticate_user(): Authorization header: {0}".format(
+            auth_header))
+        response.request.headers['Authorization'] = auth_header
+
+        # Consume the content so we can reuse the connection for the next
+        # request.
+        response.content
+        response.raw.release_conn()
+
+        _r = response.connection.send(response.request, **kwargs)
+        _r.history.append(response)
+
+        log.debug("authenticate_user(): returning {0}".format(_r))
+        return _r
+
+    def handle_401(self, response, **kwargs):
+        """Handles 401's, attempts to use gssapi/kerberos authentication"""
+
+        log.debug("handle_401(): Handling: 401")
+        if _negotiate_value(response) is not None:
+            _r = self.authenticate_user(response, **kwargs)
+            log.debug("handle_401(): returning {0}".format(_r))
+            return _r
+        else:
+            log.debug("handle_401(): Kerberos is not supported")
+            log.debug("handle_401(): returning {0}".format(response))
+            return response
+
+    def handle_other(self, response):
+        """Handles all responses with the exception of 401s.
+
+        This is necessary so that we can authenticate responses if requested"""
+
+        log.debug("handle_other(): Handling: %d" % response.status_code)
+
+        if self.mutual_authentication in (REQUIRED, OPTIONAL) and not self.auth_done:
+
+            is_http_error = response.status_code >= 400
+
+            if _negotiate_value(response) is not None:
+                log.debug("handle_other(): Authenticating the server")
+                if not self.authenticate_server(response):
+                    # Mutual authentication failure when mutual auth is wanted,
+                    # raise an exception so the user doesn't use an untrusted
+                    # response.
+                    log.error("handle_other(): Mutual authentication failed")
+                    raise MutualAuthenticationError("Unable to authenticate "
+                                                    "{0}".format(response))
+
+                # Authentication successful
+                log.debug("handle_other(): returning {0}".format(response))
+                self.auth_done = True
+                return response
+
+            elif is_http_error or self.mutual_authentication == OPTIONAL:
+                if not response.ok:
+                    log.error("handle_other(): Mutual authentication unavailable "
+                              "on {0} response".format(response.status_code))
+
+                if(self.mutual_authentication == REQUIRED and
+                       self.sanitize_mutual_error_response):
+                    return SanitizedResponse(response)
+                else:
+                    return response
+            else:
+                # Unable to attempt mutual authentication when mutual auth is
+                # required, raise an exception so the user doesn't use an
+                # untrusted response.
+                log.error("handle_other(): Mutual authentication failed")
+                raise MutualAuthenticationError("Unable to authenticate "
+                                                "{0}".format(response))
+        else:
+            log.debug("handle_other(): returning {0}".format(response))
+            return response
+
+    def authenticate_server(self, response):
+        """
+        Uses GSSAPI to authenticate the server.
+
+        Returns True on success, False on failure.
+        """
+
+        log.debug("authenticate_server(): Authenticate header: {0}".format(
+            _negotiate_value(response)))
+
+        host = urlparse(response.url).hostname
+
+        try:
+            # If this is set pass along the struct to Kerberos
+            if self.cbt_struct:
+                result = kerberos.authGSSClientStep(self.context[host],
+                                                    _negotiate_value(response),
+                                                    channel_bindings=self.cbt_struct)
+            else:
+                result = kerberos.authGSSClientStep(self.context[host],
+                                                    _negotiate_value(response))
+        except kerberos.GSSError:
+            log.exception("authenticate_server(): authGSSClientStep() failed:")
+            return False
+
+        if result < 1:
+            log.error("authenticate_server(): authGSSClientStep() failed: "
+                      "{0}".format(result))
+            return False
+
+        log.debug("authenticate_server(): returning {0}".format(response))
+        return True
+
+    def handle_response(self, response, **kwargs):
+        """Takes the given response and tries kerberos-auth, as needed."""
+        num_401s = kwargs.pop('num_401s', 0)
+
+        # Check if we have already tried to get the CBT data value
+        if not self.cbt_binding_tried and self.send_cbt:
+            # If we haven't tried, try getting it now
+            cbt_application_data = _get_channel_bindings_application_data(response)
+            if cbt_application_data:
+                # Only the latest version of pykerberos has this method available
+                try:
+                    self.cbt_struct = kerberos.channelBindings(application_data=cbt_application_data)
+                except AttributeError:
+                    # Using older version set to None
+                    self.cbt_struct = None
+            # Regardless of the result, set tried to True so we don't waste time next time
+            self.cbt_binding_tried = True
+
+        if self.pos is not None:
+            # Rewind the file position indicator of the body to where
+            # it was to resend the request.
+            response.request.body.seek(self.pos)
+
+        if response.status_code == 401 and num_401s < 2:
+            # 401 Unauthorized. Handle it, and if it still comes back as 401,
+            # that means authentication failed.
+            _r = self.handle_401(response, **kwargs)
+            log.debug("handle_response(): returning %s", _r)
+            log.debug("handle_response() has seen %d 401 responses", num_401s)
+            num_401s += 1
+            return self.handle_response(_r, num_401s=num_401s, **kwargs)
+        elif response.status_code == 401 and num_401s >= 2:
+            # Still receiving 401 responses after attempting to handle them.
+            # Authentication has failed. Return the 401 response.
+            log.debug("handle_response(): returning 401 %s", response)
+            return response
+        else:
+            _r = self.handle_other(response)
+            log.debug("handle_response(): returning %s", _r)
+            return _r
+
+    def deregister(self, response):
+        """Deregisters the response handler"""
+        response.request.deregister_hook('response', self.handle_response)
+
+    def wrap_winrm(self, host, message):
+        if not self.winrm_encryption_available:
+            raise NotImplementedError("WinRM encryption is not available on the installed version of pykerberos")
+
+        return kerberos.authGSSWinRMEncryptMessage(self.context[host], message)
+
+    def unwrap_winrm(self, host, message, header):
+        if not self.winrm_encryption_available:
+            raise NotImplementedError("WinRM encryption is not available on the installed version of pykerberos")
+
+        return kerberos.authGSSWinRMDecryptMessage(self.context[host], message, header)
+
+    def __call__(self, request):
+        if self.force_preemptive and not self.auth_done:
+            # add Authorization header before we receive a 401
+            # by the 401 handler
+            host = urlparse(request.url).hostname
+
+            auth_header = self.generate_request_header(None, host, is_preemptive=True)
+
+            log.debug("HTTPKerberosAuth: Preemptive Authorization header: {0}".format(auth_header))
+
+            request.headers['Authorization'] = auth_header
+
+        request.register_hook('response', self.handle_response)
+        try:
+            self.pos = request.body.tell()
+        except AttributeError:
+            # In the case of HTTPKerberosAuth being reused and the body
+            # of the previous request was a file-like object, pos has
+            # the file position of the previous body. Ensure it's set to
+            # None.
+            self.pos = None
+        return request
diff --git a/python/requests-kerberos/requirements-test.txt b/python/requests-kerberos/requirements-test.txt
new file mode 100644
index 0000000..88c224f
--- /dev/null
+++ b/python/requests-kerberos/requirements-test.txt
@@ -0,0 +1,4 @@
+mock
+pytest<=3.2.5
+pytest-cov
+coveralls
diff --git a/python/requests-kerberos/requirements.txt b/python/requests-kerberos/requirements.txt
new file mode 100644
index 0000000..ebb2686
--- /dev/null
+++ b/python/requests-kerberos/requirements.txt
@@ -0,0 +1,6 @@
+requests>=1.1.0
+winkerberos >= 0.5.0; sys.platform == 'win32'
+pykerberos >= 1.1.8, < 1.2.0; sys.platform != 'win32'
+cryptography>=1.3
+cryptography>=1.3; python_version!="3.3"
+cryptography>=1.3, <2; python_version=="3.3"
diff --git a/python/requests-kerberos/setup.cfg b/python/requests-kerberos/setup.cfg
new file mode 100644
index 0000000..5e40900
--- /dev/null
+++ b/python/requests-kerberos/setup.cfg
@@ -0,0 +1,2 @@
+[wheel]
+universal = 1
diff --git a/python/requests-kerberos/setup.py b/python/requests-kerberos/setup.py
new file mode 100755
index 0000000..ee89ced
--- /dev/null
+++ b/python/requests-kerberos/setup.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# coding: utf-8
+import os
+import re
+from setuptools import setup
+
+path = os.path.dirname(__file__)
+desc_fd = os.path.join(path, 'README.rst')
+hist_fd = os.path.join(path, 'HISTORY.rst')
+
+long_desc = ''
+short_desc = 'A Kerberos authentication handler for python-requests'
+
+if os.path.isfile(desc_fd):
+    with open(desc_fd) as fd:
+        long_desc = fd.read()
+
+if os.path.isfile(hist_fd):
+    with open(hist_fd) as fd:
+        long_desc = '\n\n'.join([long_desc, fd.read()])
+
+
+def get_version():
+    """
+    Simple function to extract the current version using regular expressions.
+    """
+    reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]')
+    with open('requests_kerberos/__init__.py') as fd:
+        matches = list(filter(lambda x: x, map(reg.match, fd)))
+
+    if not matches:
+        raise RuntimeError(
+            'Could not find the version information for requests_kerberos'
+            )
+
+    return matches[0].group(1)
+
+
+setup(
+    name='requests-kerberos',
+    description=short_desc,
+    long_description=long_desc,
+    author='Ian Cordasco, Cory Benfield, Michael Komitee',
+    author_email='graffatcolmingov@gmail.com',
+    url='https://github.com/requests/requests-kerberos',
+    packages=['requests_kerberos'],
+    package_data={'': ['LICENSE', 'AUTHORS']},
+    include_package_data=True,
+    version=get_version(),
+    install_requires=[
+        'requests>=1.1.0',
+        'cryptography>=1.3;python_version!="3.3"',
+        'cryptography>=1.3,<2;python_version=="3.3"'
+    ],
+    extras_require={
+        ':sys_platform=="win32"': ['winkerberos>=0.5.0'],
+        ':sys_platform!="win32"': ['pykerberos>=1.1.8,<1.2.0'],
+    },
+    test_suite='test_requests_kerberos',
+    tests_require=['mock'],
+    classifiers=[
+        "License :: OSI Approved :: ISC License (ISCL)"
+    ],
+)
diff --git a/python/requests-kerberos/tests/__init__.py b/python/requests-kerberos/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/python/requests-kerberos/tests/__init__.py
diff --git a/python/requests-kerberos/tests/test_functional_kerberos.py b/python/requests-kerberos/tests/test_functional_kerberos.py
new file mode 100644
index 0000000..a3efef4
--- /dev/null
+++ b/python/requests-kerberos/tests/test_functional_kerberos.py
@@ -0,0 +1,47 @@
+import requests
+import os
+import unittest
+
+from requests_kerberos import HTTPKerberosAuth, REQUIRED
+
+
+class KerberosFunctionalTestCase(unittest.TestCase):
+    """
+    This test is designed to run functional tests against a live website
+    secured with Kerberos authentication. See .travis.sh for the script that
+    is used to setup a Kerberos realm and Apache site.
+        
+    For this test to run the 2 environment variables need to be set
+        KERBEROS_PRINCIPAL: The principal to authenticate with (user@REALM.COM)
+            Before running this test you need to ensure you have gotten a valid
+            ticket for the user in that realm using kinit.
+        KERBEROS_URL: The URL (http://host.realm.com) to authenticate with
+            This need to be set up before hand          
+    """
+
+    def setUp(self):
+        """Setup."""
+        self.principal = os.environ.get('KERBEROS_PRINCIPAL', None)
+        self.url = os.environ.get('KERBEROS_URL', None)
+
+        # Skip the test if not set
+        if self.principal is None:
+            raise unittest.SkipTest("KERBEROS_PRINCIPAL is not set, skipping functional tests")
+        if self.url is None:
+            raise unittest.SkipTest("KERBEROS_URL is not set, skipping functional tests")
+
+    def test_successful_http_call(self):
+        session = requests.Session()
+        if self.url.startswith("https://"):
+            session.verify = False
+
+        session.auth = HTTPKerberosAuth(mutual_authentication=REQUIRED, principal=self.principal)
+        request = requests.Request('GET', self.url)
+        prepared_request = session.prepare_request(request)
+
+        response = session.send(prepared_request)
+
+        assert response.status_code == 200, "HTTP response with kerberos auth did not return a 200 error code"
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/python/requests-kerberos/tests/test_requests_kerberos.py b/python/requests-kerberos/tests/test_requests_kerberos.py
new file mode 100644
index 0000000..ebaca37
--- /dev/null
+++ b/python/requests-kerberos/tests/test_requests_kerberos.py
@@ -0,0 +1,904 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Tests for requests_kerberos."""
+
+import base64
+from mock import Mock, patch
+from requests.compat import urlparse
+import requests
+import warnings
+
+
+try:
+    import kerberos
+    kerberos_module_name='kerberos'
+except ImportError:
+    import winkerberos as kerberos  # On Windows
+    kerberos_module_name = 'winkerberos'
+
+import requests_kerberos
+import unittest
+from requests_kerberos.kerberos_ import _get_certificate_hash
+
+# kerberos.authClientInit() is called with the service name (HTTP@FQDN) and
+# returns 1 and a kerberos context object on success. Returns -1 on failure.
+clientInit_complete = Mock(return_value=(1, "CTX"))
+clientInit_error = Mock(return_value=(-1, "CTX"))
+
+# kerberos.authGSSClientStep() is called with the kerberos context object
+# returned by authGSSClientInit and the negotiate auth token provided in the
+# http response's www-authenticate header. It returns 0 or 1 on success. 0
+# Indicates that authentication is progressing but not complete.
+clientStep_complete = Mock(return_value=1)
+clientStep_continue = Mock(return_value=0)
+clientStep_error = Mock(return_value=-1)
+clientStep_exception = Mock(side_effect=kerberos.GSSError)
+
+# kerberos.authGSSCLientResponse() is called with the kerberos context which
+# was initially returned by authGSSClientInit and had been mutated by a call by
+# authGSSClientStep. It returns a string.
+clientResponse = Mock(return_value="GSSRESPONSE")
+
+# Note: we're not using the @mock.patch decorator:
+# > My only word of warning is that in the past, the patch decorator hides
+# > tests when using the standard unittest library.
+# > -- sigmavirus24 in https://github.com/requests/requests-kerberos/issues/1
+
+
+class KerberosTestCase(unittest.TestCase):
+
+    def setUp(self):
+        """Setup."""
+        clientInit_complete.reset_mock()
+        clientInit_error.reset_mock()
+        clientStep_complete.reset_mock()
+        clientStep_continue.reset_mock()
+        clientStep_error.reset_mock()
+        clientStep_exception.reset_mock()
+        clientResponse.reset_mock()
+
+    def tearDown(self):
+        """Teardown."""
+        pass
+
+    def test_negotate_value_extraction(self):
+        response = requests.Response()
+        response.headers = {'www-authenticate': 'negotiate token'}
+        self.assertEqual(
+            requests_kerberos.kerberos_._negotiate_value(response),
+            'token'
+        )
+
+    def test_negotate_value_extraction_none(self):
+        response = requests.Response()
+        response.headers = {}
+        self.assertTrue(
+            requests_kerberos.kerberos_._negotiate_value(response) is None
+        )
+
+    def test_force_preemptive(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            auth = requests_kerberos.HTTPKerberosAuth(force_preemptive=True)
+
+            request = requests.Request(url="http://www.example.org")
+
+            auth.__call__(request)
+
+            self.assertTrue('Authorization' in request.headers)
+            self.assertEqual(request.headers.get('Authorization'), 'Negotiate GSSRESPONSE')
+
+    def test_no_force_preemptive(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            auth = requests_kerberos.HTTPKerberosAuth()
+
+            request = requests.Request(url="http://www.example.org")
+
+            auth.__call__(request)
+
+            self.assertTrue('Authorization' not in request.headers)
+
+    def test_generate_request_header(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth()
+            self.assertEqual(
+                auth.generate_request_header(response, host),
+                "Negotiate GSSRESPONSE"
+            )
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_generate_request_header_init_error(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_error,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth()
+            self.assertRaises(requests_kerberos.exceptions.KerberosExchangeError,
+                auth.generate_request_header, response, host
+            )
+            clientInit_error.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            self.assertFalse(clientStep_continue.called)
+            self.assertFalse(clientResponse.called)
+
+    def test_generate_request_header_step_error(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_error):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth()
+            self.assertRaises(requests_kerberos.exceptions.KerberosExchangeError,
+                auth.generate_request_header, response, host
+            )
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_error.assert_called_with("CTX", "token")
+            self.assertFalse(clientResponse.called)
+
+    def test_authenticate_user(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {'www-authenticate': 'negotiate servertoken'}
+
+            connection = Mock()
+            connection.send = Mock(return_value=response_ok)
+
+            raw = Mock()
+            raw.release_conn = Mock(return_value=None)
+
+            request = requests.Request()
+            response = requests.Response()
+            response.request = request
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            response.status_code = 401
+            response.connection = connection
+            response._content = ""
+            response.raw = raw
+            auth = requests_kerberos.HTTPKerberosAuth()
+            r = auth.authenticate_user(response)
+
+            self.assertTrue(response in r.history)
+            self.assertEqual(r, response_ok)
+            self.assertEqual(
+                request.headers['Authorization'],
+                'Negotiate GSSRESPONSE')
+            connection.send.assert_called_with(request)
+            raw.release_conn.assert_called_with()
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_handle_401(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {'www-authenticate': 'negotiate servertoken'}
+
+            connection = Mock()
+            connection.send = Mock(return_value=response_ok)
+
+            raw = Mock()
+            raw.release_conn = Mock(return_value=None)
+
+            request = requests.Request()
+            response = requests.Response()
+            response.request = request
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            response.status_code = 401
+            response.connection = connection
+            response._content = ""
+            response.raw = raw
+            auth = requests_kerberos.HTTPKerberosAuth()
+            r = auth.handle_401(response)
+
+            self.assertTrue(response in r.history)
+            self.assertEqual(r, response_ok)
+            self.assertEqual(
+                request.headers['Authorization'],
+                'Negotiate GSSRESPONSE')
+            connection.send.assert_called_with(request)
+            raw.release_conn.assert_called_with()
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_authenticate_server(self):
+        with patch.multiple(kerberos_module_name, authGSSClientStep=clientStep_complete):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {
+                'www-authenticate': 'negotiate servertoken',
+                'authorization': 'Negotiate GSSRESPONSE'}
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+            result = auth.authenticate_server(response_ok)
+
+            self.assertTrue(result)
+            clientStep_complete.assert_called_with("CTX", "servertoken")
+
+    def test_handle_other(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_complete):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {
+                'www-authenticate': 'negotiate servertoken',
+                'authorization': 'Negotiate GSSRESPONSE'}
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_other(response_ok)
+
+            self.assertEqual(r, response_ok)
+            clientStep_complete.assert_called_with("CTX", "servertoken")
+
+    def test_handle_response_200(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_complete):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {
+                'www-authenticate': 'negotiate servertoken',
+                'authorization': 'Negotiate GSSRESPONSE'}
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_response(response_ok)
+
+            self.assertEqual(r, response_ok)
+            clientStep_complete.assert_called_with("CTX", "servertoken")
+
+    def test_handle_response_200_mutual_auth_required_failure(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_error):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {}
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+
+            self.assertRaises(requests_kerberos.MutualAuthenticationError,
+                              auth.handle_response,
+                              response_ok)
+
+            self.assertFalse(clientStep_error.called)
+
+    def test_handle_response_200_mutual_auth_required_failure_2(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_exception):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {
+                'www-authenticate': 'negotiate servertoken',
+                'authorization': 'Negotiate GSSRESPONSE'}
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+
+            self.assertRaises(requests_kerberos.MutualAuthenticationError,
+                              auth.handle_response,
+                              response_ok)
+
+            clientStep_exception.assert_called_with("CTX", "servertoken")
+
+    def test_handle_response_200_mutual_auth_optional_hard_failure(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_error):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {
+                'www-authenticate': 'negotiate servertoken',
+                'authorization': 'Negotiate GSSRESPONSE'}
+
+            auth = requests_kerberos.HTTPKerberosAuth(
+                requests_kerberos.OPTIONAL)
+            auth.context = {"www.example.org": "CTX"}
+
+            self.assertRaises(requests_kerberos.MutualAuthenticationError,
+                              auth.handle_response,
+                              response_ok)
+
+            clientStep_error.assert_called_with("CTX", "servertoken")
+
+    def test_handle_response_200_mutual_auth_optional_soft_failure(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_error):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+
+            auth = requests_kerberos.HTTPKerberosAuth(
+                requests_kerberos.OPTIONAL)
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_response(response_ok)
+
+            self.assertEqual(r, response_ok)
+
+            self.assertFalse(clientStep_error.called)
+
+    def test_handle_response_500_mutual_auth_required_failure(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_error):
+
+            response_500 = requests.Response()
+            response_500.url = "http://www.example.org/"
+            response_500.status_code = 500
+            response_500.headers = {}
+            response_500.request = "REQUEST"
+            response_500.connection = "CONNECTION"
+            response_500._content = "CONTENT"
+            response_500.encoding = "ENCODING"
+            response_500.raw = "RAW"
+            response_500.cookies = "COOKIES"
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_response(response_500)
+
+            self.assertTrue(isinstance(r, requests_kerberos.kerberos_.SanitizedResponse))
+            self.assertNotEqual(r, response_500)
+            self.assertNotEqual(r.headers, response_500.headers)
+            self.assertEqual(r.status_code, response_500.status_code)
+            self.assertEqual(r.encoding, response_500.encoding)
+            self.assertEqual(r.raw, response_500.raw)
+            self.assertEqual(r.url, response_500.url)
+            self.assertEqual(r.reason, response_500.reason)
+            self.assertEqual(r.connection, response_500.connection)
+            self.assertEqual(r.content, '')
+            self.assertNotEqual(r.cookies, response_500.cookies)
+
+            self.assertFalse(clientStep_error.called)
+
+            # re-test with error response sanitizing disabled
+            auth = requests_kerberos.HTTPKerberosAuth(sanitize_mutual_error_response=False)
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_response(response_500)
+
+            self.assertFalse(isinstance(r, requests_kerberos.kerberos_.SanitizedResponse))
+
+    def test_handle_response_500_mutual_auth_optional_failure(self):
+        with patch(kerberos_module_name+'.authGSSClientStep', clientStep_error):
+
+            response_500 = requests.Response()
+            response_500.url = "http://www.example.org/"
+            response_500.status_code = 500
+            response_500.headers = {}
+            response_500.request = "REQUEST"
+            response_500.connection = "CONNECTION"
+            response_500._content = "CONTENT"
+            response_500.encoding = "ENCODING"
+            response_500.raw = "RAW"
+            response_500.cookies = "COOKIES"
+
+            auth = requests_kerberos.HTTPKerberosAuth(
+                requests_kerberos.OPTIONAL)
+            auth.context = {"www.example.org": "CTX"}
+
+            r = auth.handle_response(response_500)
+
+            self.assertEqual(r, response_500)
+
+            self.assertFalse(clientStep_error.called)
+
+    def test_handle_response_401(self):
+        # Get a 401 from server, authenticate, and get a 200 back.
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {'www-authenticate': 'negotiate servertoken'}
+
+            connection = Mock()
+            connection.send = Mock(return_value=response_ok)
+
+            raw = Mock()
+            raw.release_conn = Mock(return_value=None)
+
+            request = requests.Request()
+            response = requests.Response()
+            response.request = request
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            response.status_code = 401
+            response.connection = connection
+            response._content = ""
+            response.raw = raw
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+            auth.handle_other = Mock(return_value=response_ok)
+
+            r = auth.handle_response(response)
+
+            self.assertTrue(response in r.history)
+            auth.handle_other.assert_called_once_with(response_ok)
+            self.assertEqual(r, response_ok)
+            self.assertEqual(
+                request.headers['Authorization'],
+                'Negotiate GSSRESPONSE')
+            connection.send.assert_called_with(request)
+            raw.release_conn.assert_called_with()
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_handle_response_401_rejected(self):
+        # Get a 401 from server, authenticate, and get another 401 back.
+        # Ensure there is no infinite recursion.
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+
+            connection = Mock()
+
+            def connection_send(self, *args, **kwargs):
+                reject = requests.Response()
+                reject.url = "http://www.example.org/"
+                reject.status_code = 401
+                reject.connection = connection
+                return reject
+
+            connection.send.side_effect = connection_send
+
+            raw = Mock()
+            raw.release_conn.return_value = None
+
+            request = requests.Request()
+            response = requests.Response()
+            response.request = request
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            response.status_code = 401
+            response.connection = connection
+            response._content = ""
+            response.raw = raw
+
+            auth = requests_kerberos.HTTPKerberosAuth()
+
+            r = auth.handle_response(response)
+
+            self.assertEqual(r.status_code, 401)
+            self.assertEqual(request.headers['Authorization'],
+                             'Negotiate GSSRESPONSE')
+            connection.send.assert_called_with(request)
+            raw.release_conn.assert_called_with()
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_generate_request_header_custom_service(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth(service="barfoo")
+            auth.generate_request_header(response, host),
+            clientInit_complete.assert_called_with(
+                "barfoo@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+
+    def test_delegation(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+
+            response_ok = requests.Response()
+            response_ok.url = "http://www.example.org/"
+            response_ok.status_code = 200
+            response_ok.headers = {'www-authenticate': 'negotiate servertoken'}
+
+            connection = Mock()
+            connection.send = Mock(return_value=response_ok)
+
+            raw = Mock()
+            raw.release_conn = Mock(return_value=None)
+
+            request = requests.Request()
+            response = requests.Response()
+            response.request = request
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            response.status_code = 401
+            response.connection = connection
+            response._content = ""
+            response.raw = raw
+            auth = requests_kerberos.HTTPKerberosAuth(1, "HTTP", True)
+            r = auth.authenticate_user(response)
+
+            self.assertTrue(response in r.history)
+            self.assertEqual(r, response_ok)
+            self.assertEqual(
+                request.headers['Authorization'],
+                'Negotiate GSSRESPONSE')
+            connection.send.assert_called_with(request)
+            raw.release_conn.assert_called_with()
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG |
+                    kerberos.GSS_C_DELEG_FLAG),
+                principal=None
+                )
+            clientStep_continue.assert_called_with("CTX", "token")
+            clientResponse.assert_called_with("CTX")
+
+    def test_principal_override(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth(principal="user@REALM")
+            auth.generate_request_header(response, host)
+            clientInit_complete.assert_called_with(
+                "HTTP@www.example.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal="user@REALM")
+
+    def test_realm_override(self):
+        with patch.multiple(kerberos_module_name,
+                            authGSSClientInit=clientInit_complete,
+                            authGSSClientResponse=clientResponse,
+                            authGSSClientStep=clientStep_continue):
+            response = requests.Response()
+            response.url = "http://www.example.org/"
+            response.headers = {'www-authenticate': 'negotiate token'}
+            host = urlparse(response.url).hostname
+            auth = requests_kerberos.HTTPKerberosAuth(hostname_override="otherhost.otherdomain.org")
+            auth.generate_request_header(response, host)
+            clientInit_complete.assert_called_with(
+                "HTTP@otherhost.otherdomain.org",
+                gssflags=(
+                    kerberos.GSS_C_MUTUAL_FLAG |
+                    kerberos.GSS_C_SEQUENCE_FLAG),
+                principal=None)
+
+
+class TestCertificateHash(unittest.TestCase):
+
+    def test_rsa_md5(self):
+        cert_der = b'MIIDGzCCAgOgAwIBAgIQJzshhViMG5hLHIJHxa+TcTANBgkqhkiG9w0' \
+                   b'BAQQFADAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MD' \
+                   b'MxNloXDTE4MDUzMDA4MjMxNlowFTETMBEGA1UEAwwKU0VSVkVSMjAxN' \
+                   b'jCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN9N5GAzI7uq' \
+                   b'AVlI6vUqhY5+EZWCWWGRwR3FT2DEXE5++AiJxXO0i0ZfAkLu7UggtBe' \
+                   b'QwVNkaPD27EYzVUhy1iDo37BrFcLNpfjsjj8wVjaSmQmqvLvrvEh/BT' \
+                   b'C5SBgDrk2+hiMh9PrpJoB3QAMDinz5aW0rEXMKitPBBiADrczyYrliF' \
+                   b'AlEU6pTlKEKDUAeP7dKOBlDbCYvBxKnR3ddVH74I5T2SmNBq5gzkbKP' \
+                   b'nlCXdHLZSh74USu93rKDZQF8YzdTO5dcBreJDJsntyj1o49w9WCt6M7' \
+                   b'+pg6vKvE+tRbpCm7kXq5B9PDi42Nb6//MzNaMYf9V7v5MHapvVSv3+y' \
+                   b'sCAwEAAaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGA' \
+                   b'QUFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0G' \
+                   b'A1UdDgQWBBTh4L2Clr9ber6yfY3JFS3wiECL4DANBgkqhkiG9w0BAQQ' \
+                   b'FAAOCAQEA0JK/SL7SP9/nvqWp52vnsxVefTFehThle5DLzagmms/9gu' \
+                   b'oSE2I9XkQIttFMprPosaIZWt7WP42uGcZmoZOzU8kFFYJMfg9Ovyca+' \
+                   b'gnG28jDUMF1E74KrC7uynJiQJ4vPy8ne7F3XJ592LsNJmK577l42gAW' \
+                   b'u08p3TvEJFNHy2dBk/IwZp0HIPr9+JcPf7v0uL6lK930xHJHP56XLzN' \
+                   b'YG8vCMpJFR7wVZp3rXkJQUy3GxyHPJPjS8S43I9j+PoyioWIMEotq2+' \
+                   b'q0IpXU/KeNFkdGV6VPCmzhykijExOMwO6doUzIUM8orv9jYLHXYC+i6' \
+                   b'IFKSb6runxF1MAik+GCSA=='
+
+        expected_hash = b'\x23\x34\xB8\x47\x6C\xBF\x4E\x6D\xFC\x76\x6A\x5D' \
+                        b'\x5A\x30\xD6\x64\x9C\x01\xBA\xE1\x66\x2A\x5C\x3A' \
+                        b'\x13\x02\xA9\x68\xD7\xC6\xB0\xF6'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_rsa_sha1(self):
+        cert_der = b'MIIDGzCCAgOgAwIBAgIQJg/Mf5sR55xApJRK+kabbTANBgkqhkiG9w0' \
+                   b'BAQUFADAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MD' \
+                   b'MxNloXDTE4MDUzMDA4MjMxNlowFTETMBEGA1UEAwwKU0VSVkVSMjAxN' \
+                   b'jCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALPKwYikjbzL' \
+                   b'Lo6JtS6cyytdMMjSrggDoTnRUKauC5/izoYJd+2YVR5YqnluBJZpoFp' \
+                   b'hkCgFFohUOU7qUsI1SkuGnjI8RmWTrrDsSy62BrfX+AXkoPlXo6IpHz' \
+                   b'HaEPxjHJdUACpn8QVWTPmdAhwTwQkeUutrm3EOVnKPX4bafNYeAyj7/' \
+                   b'AGEplgibuXT4/ehbzGKOkRN3ds/pZuf0xc4Q2+gtXn20tQIUt7t6iwh' \
+                   b'nEWjIgopFL/hX/r5q5MpF6stc1XgIwJjEzqMp76w/HUQVqaYneU4qSG' \
+                   b'f90ANK/TQ3aDbUNtMC/ULtIfHqHIW4POuBYXaWBsqalJL2VL3YYkKTU' \
+                   b'sCAwEAAaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGA' \
+                   b'QUFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0G' \
+                   b'A1UdDgQWBBS1jgojcjPu9vqeP1uSKuiIonGwAjANBgkqhkiG9w0BAQU' \
+                   b'FAAOCAQEAKjHL6k5Dv/Zb7dvbYEZyx0wVhjHkCTpT3xstI3+TjfAFsu' \
+                   b'3zMmyFqFqzmr4pWZ/rHc3ObD4pEa24kP9hfB8nmr8oHMLebGmvkzh5h' \
+                   b'0GYc4dIH7Ky1yfQN51hi7/X5iN7jnnBoCJTTlgeBVYDOEBXhfXi3cLT' \
+                   b'u3d7nz2heyNq07gFP8iN7MfqdPZndVDYY82imLgsgar9w5d+fvnYM+k' \
+                   b'XWItNNCUH18M26Obp4Es/Qogo/E70uqkMHost2D+tww/7woXi36X3w/' \
+                   b'D2yBDyrJMJKZLmDgfpNIeCimncTOzi2IhzqJiOY/4XPsVN/Xqv0/dzG' \
+                   b'TDdI11kPLq4EiwxvPanCg=='
+
+        expected_hash = b'\x14\xCF\xE8\xE4\xB3\x32\xB2\x0A\x34\x3F\xC8\x40' \
+                        b'\xB1\x8F\x9F\x6F\x78\x92\x6A\xFE\x7E\xC3\xE7\xB8' \
+                        b'\xE2\x89\x69\x61\x9B\x1E\x8F\x3E'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_rsa_sha256(self):
+        cert_der = b'MIIDGzCCAgOgAwIBAgIQWkeAtqoFg6pNWF7xC4YXhTANBgkqhkiG9w0' \
+                   b'BAQsFADAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUyNzA5MD' \
+                   b'I0NFoXDTE4MDUyNzA5MjI0NFowFTETMBEGA1UEAwwKU0VSVkVSMjAxN' \
+                   b'jCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALIPKM5uykFy' \
+                   b'NmVoLyvPSXGk15ZDqjYi3AbUxVFwCkVImqhefLATit3PkTUYFtAT+TC' \
+                   b'AwK2E4lOu1XHM+Tmp2KIOnq2oUR8qMEvfxYThEf1MHxkctFljFssZ9N' \
+                   b'vASDD4lzw8r0Bhl+E5PhR22Eu1Wago5bvIldojkwG+WBxPQv3ZR546L' \
+                   b'MUZNaBXC0RhuGj5w83lbVz75qM98wvv1ekfZYAP7lrVyHxqCTPDomEU' \
+                   b'I45tQQZHCZl5nRx1fPCyyYfcfqvFlLWD4Q3PZAbnw6mi0MiWJbGYKME' \
+                   b'1XGicjqyn/zM9XKA1t/JzChS2bxf6rsyA9I7ibdRHUxsm1JgKry2jfW' \
+                   b'0CAwEAAaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGA' \
+                   b'QUFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0G' \
+                   b'A1UdDgQWBBQabLGWg1sn7AXPwYPyfE0ER921ZDANBgkqhkiG9w0BAQs' \
+                   b'FAAOCAQEAnRohyl6ZmOsTWCtxOJx5A8yr//NweXKwWWmFQXRmCb4bMC' \
+                   b'xhD4zqLDf5P6RotGV0I/SHvqz+pAtJuwmr+iyAF6WTzo3164LCfnQEu' \
+                   b'psfrrfMkf3txgDwQkA0oPAw3HEwOnR+tzprw3Yg9x6UoZEhi4XqP9AX' \
+                   b'R49jU92KrNXJcPlz5MbkzNo5t9nr2f8q39b5HBjaiBJxzdM1hxqsbfD' \
+                   b'KirTYbkUgPlVOo/NDmopPPb8IX8ubj/XETZG2jixD0zahgcZ1vdr/iZ' \
+                   b'+50WSXKN2TAKBO2fwoK+2/zIWrGRxJTARfQdF+fGKuj+AERIFNh88HW' \
+                   b'xSDYjHQAaFMcfdUpa9GGQ=='
+
+        expected_hash = b'\x99\x6F\x3E\xEA\x81\x2C\x18\x70\xE3\x05\x49\xFF' \
+                        b'\x9B\x86\xCD\x87\xA8\x90\xB6\xD8\xDF\xDF\x4A\x81' \
+                        b'\xBE\xF9\x67\x59\x70\xDA\xDB\x26'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_rsa_sha384(self):
+        cert_der = b'MIIDGzCCAgOgAwIBAgIQEmj1prSSQYRL2zYBEjsm5jANBgkqhkiG9w0' \
+                   b'BAQwFADAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MD' \
+                   b'MxN1oXDTE4MDUzMDA4MjMxN1owFTETMBEGA1UEAwwKU0VSVkVSMjAxN' \
+                   b'jCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKsK5NvHi4xO' \
+                   b'081fRLMmPqKsKaHvXgPRykLA0SmKxpGJHfTAZzxojHVeVwOm87IvQj2' \
+                   b'JUh/yrRwSi5Oqrvqx29l2IC/qQt2xkAQsO51/EWkMQ5OSJsl1MN3NXW' \
+                   b'eRTKVoUuJzBs8XLmeraxQcBPyyLhq+WpMl/Q4ZDn1FrUEZfxV0POXgU' \
+                   b'dI3ApuQNRtJOb6iteBIoQyMlnof0RswBUnkiWCA/+/nzR0j33j47IfL' \
+                   b'nkmU4RtqkBlO13f6+e1GZ4lEcQVI2yZq4Zgu5VVGAFU2lQZ3aEVMTu9' \
+                   b'8HEqD6heyNp2on5G/K/DCrGWYCBiASjnX3wiSz0BYv8f3HhCgIyVKhJ' \
+                   b'8CAwEAAaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGA' \
+                   b'QUFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0G' \
+                   b'A1UdDgQWBBQS/SI61S2UE8xwSgHxbkCTpZXo4TANBgkqhkiG9w0BAQw' \
+                   b'FAAOCAQEAMVV/WMXd9w4jtDfSrIsKaWKGtHtiMPpAJibXmSakBRwLOn' \
+                   b'5ZGXL2bWI/Ac2J2Y7bSzs1im2ifwmEqwzzqnpVKShIkZmtij0LS0SEr' \
+                   b'6Fw5IrK8tD6SH+lMMXUTvp4/lLQlgRCwOWxry/YhQSnuprx8IfSPvil' \
+                   b'kwZ0Ysim4Aa+X5ojlhHpWB53edX+lFrmR1YWValBnQ5DvnDyFyLR6II' \
+                   b'Ialp4vmkzI9e3/eOgSArksizAhpXpC9dxQBiHXdhredN0X+1BVzbgzV' \
+                   b'hQBEwgnAIPa+B68oDILaV0V8hvxrP6jFM4IrKoGS1cq0B+Ns0zkG7ZA' \
+                   b'2Q0W+3nVwSxIr6bd6hw7g=='
+
+        expected_hash = b'\x34\xF3\x03\xC9\x95\x28\x6F\x4B\x21\x4A\x9B\xA6' \
+                        b'\x43\x5B\x69\xB5\x1E\xCF\x37\x58\xEA\xBC\x2A\x14' \
+                        b'\xD7\xA4\x3F\xD2\x37\xDC\x2B\x1A\x1A\xD9\x11\x1C' \
+                        b'\x5C\x96\x5E\x10\x75\x07\xCB\x41\x98\xC0\x9F\xEC'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_rsa_sha512(self):
+        cert_der = b'MIIDGzCCAgOgAwIBAgIQUDHcKGevZohJV+TkIIYC1DANBgkqhkiG9w0' \
+                   b'BAQ0FADAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MD' \
+                   b'MxN1oXDTE4MDUzMDA4MjMxN1owFTETMBEGA1UEAwwKU0VSVkVSMjAxN' \
+                   b'jCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKr9bo/XXvHt' \
+                   b'D6Qnhb1wyLg9lDQxxe/enH49LQihtVTZMwGf2010h81QrRUe/bkHTvw' \
+                   b'K22s2lqj3fUpGxtEbYFWLAHxv6IFnIKd+Zi1zaCPGfas9ekqCSj3vZQ' \
+                   b'j7lCJVGUGuuqnSDvsed6g2Pz/g6mJUa+TzjxN+8wU5oj5YVUK+aing1' \
+                   b'zPSA2MDCfx3+YzjxVwNoGixOz6Yx9ijT4pUsAYQAf1o9R+6W1/IpGgu' \
+                   b'oax714QILT9heqIowwlHzlUZc1UAYs0/JA4CbDZaw9hlJyzMqe/aE46' \
+                   b'efqPDOpO3vCpOSRcSyzh02WijPvEEaPejQRWg8RX93othZ615MT7dqp' \
+                   b'ECAwEAAaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGA' \
+                   b'QUFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0G' \
+                   b'A1UdDgQWBBTgod3R6vejt6kOASAApA19xIG6kTANBgkqhkiG9w0BAQ0' \
+                   b'FAAOCAQEAVfz0okK2bh3OQE8cWNbJ5PjJRSAJEqVUvYaTlS0Nqkyuaj' \
+                   b'gicP3hb/pF8FvaVaB6r7LqgBxyW5NNL1xwdNLt60M2zaULL6Fhm1vzM' \
+                   b'sSMc2ynkyN4++ODwii674YcQAnkUh+ZGIx+CTdZBWJfVM9dZb7QjgBT' \
+                   b'nVukeFwN2EOOBSpiQSBpcoeJEEAq9csDVRhEfcB8Wtz7TTItgOVsilY' \
+                   b'dQY56ON5XszjCki6UA3GwdQbBEHjWF2WERqXWrojrSSNOYDvxM5mrEx' \
+                   b'sG1npzUTsaIr9w8ty1beh/2aToCMREvpiPFOXnVV/ovHMU1lFQTNeQ0' \
+                   b'OI7elR0nJ0peai30eMpQQ=='
+
+        expected_hash = b'\x55\x6E\x1C\x17\x84\xE3\xB9\x57\x37\x0B\x7F\x54' \
+                        b'\x4F\x62\xC5\x33\xCB\x2C\xA5\xC1\xDA\xE0\x70\x6F' \
+                        b'\xAE\xF0\x05\x44\xE1\xAD\x2B\x76\xFF\x25\xCF\xBE' \
+                        b'\x69\xB1\xC4\xE6\x30\xC3\xBB\x02\x07\xDF\x11\x31' \
+                        b'\x4C\x67\x38\xBC\xAE\xD7\xE0\x71\xD7\xBF\xBF\x2C' \
+                        b'\x9D\xFA\xB8\x5D'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_ecdsa_sha1(self):
+        cert_der = b'MIIBjjCCATSgAwIBAgIQRCJw7nbtvJ5F8wikRmwgizAJBgcqhkjOPQQ' \
+                   b'BMBUxEzARBgNVBAMMClNFUlZFUjIwMTYwHhcNMTcwNTMwMDgwMzE3Wh' \
+                   b'cNMTgwNTMwMDgyMzE3WjAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MFkwE' \
+                   b'wYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEk3fOh178kRglmnPKe9K/mbgi' \
+                   b'gf8YgNq62rF2EpfzpyQY0eGw4xnmKDG73aZ+ATSlV2IybxiUVsKyMUn' \
+                   b'LhPfvmaNnMGUwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQ' \
+                   b'UFBwMCBggrBgEFBQcDATAVBgNVHREEDjAMggpTRVJWRVIyMDE2MB0GA' \
+                   b'1UdDgQWBBQSK8qwmiQmyAWWya3FxQDj9wqQAzAJBgcqhkjOPQQBA0kA' \
+                   b'MEYCIQCiOsP56Iqo+cHRvCp2toj65Mgxo/PQY1tn+S3WH4RJFQIhAJe' \
+                   b'gGQuaPWg6aCWV+2+6pNCNMdg/Nix+mMOJ88qCBNHi'
+
+        expected_hash = b'\x1E\xC9\xAD\x46\xDE\xE9\x34\x0E\x45\x03\xCF\xFD' \
+                        b'\xB5\xCD\x81\x0C\xB2\x6B\x77\x8F\x46\xBE\x95\xD5' \
+                        b'\xEA\xF9\x99\xDC\xB1\xC4\x5E\xDA'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_ecdsa_sha256(self):
+        cert_der = b'MIIBjzCCATWgAwIBAgIQeNQTxkMgq4BF9tKogIGXUTAKBggqhkjOPQQ' \
+                   b'DAjAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MDMxN1' \
+                   b'oXDTE4MDUzMDA4MjMxN1owFTETMBEGA1UEAwwKU0VSVkVSMjAxNjBZM' \
+                   b'BMGByqGSM49AgEGCCqGSM49AwEHA0IABDAfXTLOaC3ElgErlgk2tBlM' \
+                   b'wf9XmGlGBw4vBtMJap1hAqbsdxFm6rhK3QU8PFFpv8Z/AtRG7ba3UwQ' \
+                   b'prkssClejZzBlMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBg' \
+                   b'EFBQcDAgYIKwYBBQUHAwEwFQYDVR0RBA4wDIIKU0VSVkVSMjAxNjAdB' \
+                   b'gNVHQ4EFgQUnFDE8824TYAiBeX4fghEEg33UgYwCgYIKoZIzj0EAwID' \
+                   b'SAAwRQIhAK3rXA4/0i6nm/U7bi6y618Ci2Is8++M3tYIXnEsA7zSAiA' \
+                   b'w2s6bJoI+D7Xaey0Hp0gkks9z55y976keIEI+n3qkzw=='
+
+        expected_hash = b'\xFE\xCF\x1B\x25\x85\x44\x99\x90\xD9\xE3\xB2\xC9' \
+                        b'\x2D\x3F\x59\x7E\xC8\x35\x4E\x12\x4E\xDA\x75\x1D' \
+                        b'\x94\x83\x7C\x2C\x89\xA2\xC1\x55'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_ecdsa_sha384(self):
+        cert_der = b'MIIBjzCCATWgAwIBAgIQcO3/jALdQ6BOAoaoseLSCjAKBggqhkjOPQQ' \
+                   b'DAzAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA4MDMxOF' \
+                   b'oXDTE4MDUzMDA4MjMxOFowFTETMBEGA1UEAwwKU0VSVkVSMjAxNjBZM' \
+                   b'BMGByqGSM49AgEGCCqGSM49AwEHA0IABJLjZH274heB/8PhmhWWCIVQ' \
+                   b'Wle1hBZEN3Tk2yWSKaz9pz1bjwb9t79lVpQE9tvGL0zP9AqJYHcVOO9' \
+                   b'YG9trqfejZzBlMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBg' \
+                   b'EFBQcDAgYIKwYBBQUHAwEwFQYDVR0RBA4wDIIKU0VSVkVSMjAxNjAdB' \
+                   b'gNVHQ4EFgQUkRajoFr8qZ/8L8rKB3zGiGolDygwCgYIKoZIzj0EAwMD' \
+                   b'SAAwRQIgfi8dAxXljCMSvngtDtagGCTGBs7Xxh8Z3WX6ZwJZsHYCIQC' \
+                   b'D4iNReh1afXKYC0ipjXWAIkiihnEEycCIQMbkMNst7A=='
+
+        expected_hash = b'\xD2\x98\x7A\xD8\xF2\x0E\x83\x16\xA8\x31\x26\x1B' \
+                        b'\x74\xEF\x7B\x3E\x55\x15\x5D\x09\x22\xE0\x7F\xFE' \
+                        b'\x54\x62\x08\x06\x98\x2B\x68\xA7\x3A\x5E\x3C\x47' \
+                        b'\x8B\xAA\x5E\x77\x14\x13\x5C\xB2\x6D\x98\x07\x49'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_ecdsa_sha512(self):
+        cert_der = b'MIIBjjCCATWgAwIBAgIQHVj2AGEwd6pOOSbcf0skQDAKBggqhkjOPQQ' \
+                   b'DBDAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA3NTUzOV' \
+                   b'oXDTE4MDUzMDA4MTUzOVowFTETMBEGA1UEAwwKU0VSVkVSMjAxNjBZM' \
+                   b'BMGByqGSM49AgEGCCqGSM49AwEHA0IABL8d9S++MFpfzeH8B3vG/PjA' \
+                   b'AWg8tGJVgsMw9nR+OfC9ltbTUwhB+yPk3JPcfW/bqsyeUgq4//LhaSp' \
+                   b'lOWFNaNqjZzBlMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBg' \
+                   b'EFBQcDAgYIKwYBBQUHAwEwFQYDVR0RBA4wDIIKU0VSVkVSMjAxNjAdB' \
+                   b'gNVHQ4EFgQUKUkCgLlxoeai0EtQrZth1/BSc5kwCgYIKoZIzj0EAwQD' \
+                   b'RwAwRAIgRrV7CLpDG7KueyFA3ZDced9dPOcv2Eydx/hgrfxYEcYCIBQ' \
+                   b'D35JvzmqU05kSFV5eTvkhkaDObd7V55vokhm31+Li'
+
+        expected_hash = b'\xE5\xCB\x68\xB2\xF8\x43\xD6\x3B\xF4\x0B\xCB\x20' \
+                        b'\x07\x60\x8F\x81\x97\x61\x83\x92\x78\x3F\x23\x30' \
+                        b'\xE5\xEF\x19\xA5\xBD\x8F\x0B\x2F\xAA\xC8\x61\x85' \
+                        b'\x5F\xBB\x63\xA2\x21\xCC\x46\xFC\x1E\x22\x6A\x07' \
+                        b'\x24\x11\xAF\x17\x5D\xDE\x47\x92\x81\xE0\x06\x87' \
+                        b'\x8B\x34\x80\x59'
+        actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+        assert actual_hash == expected_hash
+
+    def test_invalid_signature_algorithm(self):
+        # Manually edited from test_ecdsa_sha512 to change the OID to '1.2.840.10045.4.3.5'
+        cert_der = b'MIIBjjCCATWgAwIBAgIQHVj2AGEwd6pOOSbcf0skQDAKBggqhkjOPQQ' \
+                   b'DBTAVMRMwEQYDVQQDDApTRVJWRVIyMDE2MB4XDTE3MDUzMDA3NTUzOV' \
+                   b'oXDTE4MDUzMDA4MTUzOVowFTETMBEGA1UEAwwKU0VSVkVSMjAxNjBZM' \
+                   b'BMGByqGSM49AgEGCCqGSM49AwEHA0IABL8d9S++MFpfzeH8B3vG/PjA' \
+                   b'AWg8tGJVgsMw9nR+OfC9ltbTUwhB+yPk3JPcfW/bqsyeUgq4//LhaSp' \
+                   b'lOWFNaNqjZzBlMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBg' \
+                   b'EFBQcDAgYIKwYBBQUHAwEwFQYDVR0RBA4wDIIKU0VSVkVSMjAxNjAdB' \
+                   b'gNVHQ4EFgQUKUkCgLlxoeai0EtQrZth1/BSc5kwCgYIKoZIzj0EAwUD' \
+                   b'RwAwRAIgRrV7CLpDG7KueyFA3ZDced9dPOcv2Eydx/hgrfxYEcYCIBQ' \
+                   b'D35JvzmqU05kSFV5eTvkhkaDObd7V55vokhm31+Li'
+
+        expected_hash = None
+        expected_warning = "Failed to get signature algorithm from " \
+                           "certificate, unable to pass channel bindings:"
+
+        with warnings.catch_warnings(record=True) as w:
+            warnings.simplefilter("always")
+            actual_hash = _get_certificate_hash(base64.b64decode(cert_der))
+            assert actual_hash == expected_hash
+            assert expected_warning in str(w[-1].message)
+
+
+if __name__ == '__main__':
+    unittest.main()