Add integration tests for S3 storage driver (#1580)
* Enable tests to work with existing containers
* Add integration tests for S3
* Pass secrets to integration tests
* Fix error due to missing addClassCleanup
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
index 9918715..e27609f 100644
--- a/.github/workflows/integration-tests.yml
+++ b/.github/workflows/integration-tests.yml
@@ -43,7 +43,7 @@
strategy:
matrix:
- python_version: [3.7]
+ python_version: [3.8]
steps:
- uses: actions/checkout@master
@@ -75,3 +75,7 @@
- name: Run tox target
run: |
script -e -c "tox -e integration-storage"
+ env:
+ AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_ACCESS_KEY_SECRET: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
diff --git a/integration/storage/base.py b/integration/storage/base.py
index 56786cd..dda7d06 100644
--- a/integration/storage/base.py
+++ b/integration/storage/base.py
@@ -45,6 +45,9 @@
account = None
secret = None
+ container_name_prefix = 'lcsit'
+ container_name_max_length = 63
+
def setUp(self):
for required in 'provider', 'account', 'secret':
value = getattr(self, required, None)
@@ -63,6 +66,9 @@
def tearDown(self):
for container in self.driver.list_containers():
+ if not container.name.startswith(self.container_name_prefix):
+ continue
+
for obj in container.list_objects():
try:
obj.delete()
@@ -86,19 +92,17 @@
def test_containers(self):
# make a new container
- container_name = random_container_name()
+ container_name = self._random_container_name()
container = self.driver.create_container(container_name)
self.assertEqual(container.name, container_name)
container = self.driver.get_container(container_name)
self.assertEqual(container.name, container_name)
- # check that an existing container can't be re-created
- with self.assertRaises(types.ContainerAlreadyExistsError):
- self.driver.create_container(container_name)
+ self.assert_existing_container_cannot_be_recreated(container)
# check that the new container can be listed
containers = self.driver.list_containers()
- self.assertEqual([c.name for c in containers], [container_name])
+ self.assertIn(container_name, [c.name for c in containers])
# delete the container
self.driver.delete_container(container)
@@ -109,12 +113,12 @@
# check that the container is deleted
containers = self.driver.list_containers()
- self.assertEqual([c.name for c in containers], [])
+ self.assertNotIn(container_name, [c.name for c in containers])
def _test_objects(self, do_upload, do_download, size=1 * MB):
content = os.urandom(size)
blob_name = 'testblob'
- container = self.driver.create_container(random_container_name())
+ container = self.driver.create_container(self._random_container_name())
# upload a file
obj = do_upload(container, blob_name, content)
@@ -145,6 +149,10 @@
blobs = self.driver.list_container_objects(container)
self.assertEqual([blob.name for blob in blobs], [blob_name[::-1]])
+ def assert_existing_container_cannot_be_recreated(self, container):
+ with self.assertRaises(types.ContainerAlreadyExistsError):
+ self.driver.create_container(container.name)
+
def assert_file_is_missing(self, container, obj):
with self.assertRaises(types.ObjectDoesNotExistError):
self.driver.delete_object(obj)
@@ -167,7 +175,7 @@
def test_objects_range_downloads(self):
blob_name = 'testblob-range'
content = b'0123456789'
- container = self.driver.create_container(random_container_name())
+ container = self.driver.create_container(self._random_container_name())
obj = self.driver.upload_object(
self._create_tempfile(content=content),
@@ -255,7 +263,7 @@
def test_upload_via_stream_with_content_encoding(self):
object_name = 'content_encoding.gz'
content = gzip.compress(os.urandom(MB // 100))
- container = self.driver.create_container(random_container_name())
+ container = self.driver.create_container(self._random_container_name())
self.driver.upload_object_via_stream(
iter(content),
container,
@@ -269,7 +277,7 @@
def test_cdn_url(self):
content = os.urandom(MB // 100)
- container = self.driver.create_container(random_container_name())
+ container = self.driver.create_container(self._random_container_name())
obj = self.driver.upload_object_via_stream(iter(content), container, 'cdn')
response = requests.get(self.driver.get_object_cdn_url(obj))
@@ -284,6 +292,16 @@
self.addCleanup(os.remove, path)
return path
+ @classmethod
+ def _random_container_name(cls):
+ suffix = random_string(cls.container_name_max_length)
+ name = cls.container_name_prefix + suffix
+ name = re.sub('[^a-z0-9-]', '-', name)
+ name = re.sub('-+', '-', name)
+ name = name[:cls.container_name_max_length]
+ name = name.lower()
+ return name
+
class ContainerTestBase(TestBase):
image = None
version = 'latest'
@@ -373,17 +391,6 @@
return ''.join(random.choice(alphabet) for _ in range(length))
-def random_container_name(prefix='test'):
- max_length = 63
- suffix = random_string(max_length)
- name = prefix + suffix
- name = re.sub('[^a-z0-9-]', '-', name)
- name = re.sub('-+', '-', name)
- name = name[:max_length]
- name = name.lower()
- return name
-
-
def read_stream(stream):
buffer = io.BytesIO()
buffer.writelines(stream)
diff --git a/integration/storage/requirements.txt b/integration/storage/requirements.txt
index cbdd599..d54d412 100644
--- a/integration/storage/requirements.txt
+++ b/integration/storage/requirements.txt
@@ -1,5 +1,6 @@
azure-identity
azure-mgmt-resource
azure-mgmt-storage
+boto3
docker
requests
diff --git a/integration/storage/test_s3.py b/integration/storage/test_s3.py
new file mode 100644
index 0000000..8276b80
--- /dev/null
+++ b/integration/storage/test_s3.py
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the 'License'); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+import unittest
+
+try:
+ import boto3
+except ImportError:
+ boto3 = None
+
+from integration.storage.base import Integration
+from libcloud.storage import types
+
+
+class S3Test(Integration.TestBase):
+ provider = 's3'
+
+ @classmethod
+ def setUpClass(cls):
+ if boto3 is None:
+ raise unittest.SkipTest('missing boto3 library')
+
+ config = {
+ key: os.getenv(key)
+ for key in (
+ 'AWS_ACCESS_KEY_ID',
+ 'AWS_ACCESS_KEY_SECRET',
+ )
+ }
+
+ for key, value in config.items():
+ if not value:
+ raise unittest.SkipTest('missing environment variable %s' % key)
+
+ cls.account = config['AWS_ACCESS_KEY_ID']
+ cls.secret = config['AWS_ACCESS_KEY_SECRET']
+
+ @classmethod
+ def tearDownClass(cls):
+ client = boto3.Session(
+ aws_access_key_id=cls.account,
+ aws_secret_access_key=cls.secret,
+ ).client('s3')
+
+ buckets = (
+ item['Name']
+ for item in client.list_buckets()['Buckets']
+ if item['Name'].startswith(cls.container_name_prefix)
+ )
+
+ for name in buckets:
+ bucket = boto3.resource('s3').Bucket(name)
+ bucket.objects.delete()
+ client.delete_bucket(name)
+
+ def assert_existing_container_cannot_be_recreated(self, container):
+ pass
+
+ def assert_file_is_missing(self, container, obj):
+ with self.assertRaises(types.ObjectDoesNotExistError):
+ self.driver.get_object(container.name, obj.name)
+
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())
diff --git a/tox.ini b/tox.ini
index 7fff156..646428e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -14,8 +14,8 @@
pypypy3: pypy3
py3.5: python3.5
py3.6: python3.6
- {py3.7,docs,checks,lint,pylint,mypy,coverage,docs,integration-storage,py3.7-dist,py3.7-dist-wheel}: python3.7
- {py3.8,py3.8-windows}: python3.8
+ {py3.7,docs,checks,lint,pylint,mypy,coverage,docs,py3.7-dist,py3.7-dist-wheel}: python3.7
+ {py3.8,py3.8-windows,integration-storage}: python3.8
{py3.9}: python3.9
setenv =
CRYPTOGRAPHY_ALLOW_OPENSSL_102=1
@@ -218,7 +218,7 @@
commands = python -m integration.compute
[testenv:integration-storage]
-passenv = AZURE_CLIENT_SECRET
+passenv = AZURE_CLIENT_SECRET AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_SECRET
setenv =
AZURE_CLIENT_ID=16cd65a3-dfa2-4272-bcdb-842cbbedb1b7