Merge pull request #1494 from apache/tristan/refresh-deps

Update requirements and fix resulting problems
diff --git a/.github/compose/ci.docker-compose.yml b/.github/compose/ci.docker-compose.yml
index ee8d8c8..64247c9 100644
--- a/.github/compose/ci.docker-compose.yml
+++ b/.github/compose/ci.docker-compose.yml
@@ -2,7 +2,7 @@
 
 x-tests-template: &tests-template
     image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:32-${CI_IMAGE_VERSION:-latest}
-    command: tox -vvvvv -- --color=yes --integration -n 4
+    command: tox -vvvvv -- --color=yes --integration
     environment:
       TOXENV: ${CI_TOXENV_ALL}
 
@@ -46,12 +46,6 @@
     <<: *tests-template
     image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:minimal-${CI_IMAGE_VERSION:-latest}
 
-  # Ensure that tests also pass without `--develop` flag
-  no-usedevelop:
-    <<: *tests-template
-    environment:
-      TOXENV: py36-nocover,py37-nocover,py38-nocover
-
   # Test the master version of external plugins
   plugins-master:
     <<: *tests-template
@@ -105,7 +99,7 @@
 
   lint:
     <<: *tests-template
-    command: tox -e lint
+    command: tox -e lint,format-check
 
   mypy:
     <<: *tests-template
diff --git a/.github/run-ci.sh b/.github/run-ci.sh
new file mode 100755
index 0000000..0828be5
--- /dev/null
+++ b/.github/run-ci.sh
@@ -0,0 +1,124 @@
+#!/bin/bash
+
+topdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+function usage () {
+    echo "Usage: "
+    echo "  run-ci.sh [OPTIONS] [TEST NAME [TEST NAME...]]"
+    echo
+    echo "Runs the CI tests locally using docker"
+    echo
+    echo "The test names are based on the names of tests in the CI yaml files"
+    echo
+    echo "If no test names are specified, all tests will be run"
+    echo
+    echo "Options:"
+    echo
+    echo "  -h --help      Display this help message and exit"
+    echo "  -s --service   Run service tests instead of regular tests"
+    echo "  "
+    exit 1;
+}
+
+arg_service=false
+
+while : ; do
+    case "$1" in 
+	-h|--help)
+	    usage;
+	    shift ;;
+	-s|--service)
+	    arg_service=true
+	    shift ;;
+	*)
+	    break ;;
+    esac
+done
+
+test_names="${@}"
+
+
+# We need to give ownership to the docker image user `testuser`,
+# chances are high that this will be the same UID as the primary
+# user on this host
+#
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+if [ "${user_uid}" -ne "1000" ] || [ "${user_gid}" -ne "1000" ]; then
+    sudo chown -R 1000:1000 "${topdir}/.."
+fi
+
+
+# runTest()
+#
+#  $1 = test name
+#
+function runTest() {
+    test_name=$1
+
+    # Run docker-compose from it's directory, because it will use
+    # relative paths
+    cd "${topdir}/compose"
+    docker-compose \
+        --env-file ${topdir}/common.env \
+        --file ${topdir}/compose/ci.docker-compose.yml \
+        run "${test_name}"
+}
+
+
+# runServiceTest()
+#
+#  $1 = test name
+#
+function runServiceTest() {
+    local test_name=$1
+
+    # Run docker-compose from it's directory, because it will use
+    # relative paths
+    cd "${topdir}/compose"
+    docker-compose \
+        --env-file "${topdir}/common.env" \
+        --file "${topdir}/compose/ci.${test_name}.yml" \
+        up --detach --renew-anon-volumes --remove-orphans
+    docker-compose \
+        --env-file "${topdir}/common.env" \
+        --file "${topdir}/compose/ci.docker-compose.yml" run ${test_name}
+    docker-compose \
+        --env-file "${topdir}/common.env" \
+        --file "${topdir}/compose/ci.${test_name}.yml" stop
+    docker-compose \
+        --env-file "${topdir}/common.env" \
+        --file "${topdir}/compose/ci.${test_name}.yml" logs
+    docker-compose \
+        --env-file "${topdir}/common.env" \
+        --file "${topdir}/compose/ci.${test_name}.yml" down
+}
+
+
+# Lazily ensure that the script exits when a command fails
+#
+set -x
+
+if [ -z "${test_names}" ]; then
+    runTest "lint"
+    runTest "mypy"
+    runTest "debian-10"
+    runTest "fedora-32"
+    runTest "fedora-33"
+    runTest "ubuntu-18.04"
+    runTest "centos-7.7.1908"
+    runTest "fedora-missing-deps"
+    runServiceTest "bst-artifact-server"
+    runServiceTest "buildbarn"
+    runServiceTest "buildgrid"
+else
+    if $arg_service; then
+	for test_name in "${test_names}"; do
+	    runServiceTest "${test_name}"
+	done
+    else
+	for test_name in "${test_names}"; do
+	    runTest "${test_name}"
+	done
+    fi
+fi
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 894caaa..63ec71f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -50,7 +50,6 @@
           - ubuntu-18.04
           - centos-7.7.1908
           - fedora-missing-deps
-          - no-usedevelop
           - lint
           - mypy
 
@@ -65,16 +64,9 @@
         with:
           fetch-depth: 0
 
-      - name: Give `testuser` ownership of the source directory
-        run: sudo chown -R 1000:1000 ${GITHUB_WORKSPACE}
-
       - name: Run tests with Docker Compose
         run: |
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.docker-compose.yml \
-            run \
-            ${{ matrix.test-name }}
+          ${GITHUB_WORKSPACE}/.github/run-ci.sh ${{ matrix.test-name }}
 
   # Matrix of tests which run against remote services which we bring up adjacently
   service-tests:
@@ -98,33 +90,9 @@
         with:
           fetch-depth: 0
 
-      - name: Give `testuser` ownership of the source directory
-        run: sudo chown -R 1000:1000 ${GITHUB_WORKSPACE}
-
       - name: Bring up the RE cluster
         run: |
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.${{ matrix.test-name }}.yml \
-            up --detach --renew-anon-volumes --remove-orphans
-
-      - name: Run the remote execution tests
-        run: |
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.docker-compose.yml run ${{ matrix.test-name }}
-
-      - name: Bring down the RE cluster
-        run: |
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.${{ matrix.test-name }}.yml stop
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.${{ matrix.test-name }}.yml logs
-          docker-compose \
-            --env-file ${GITHUB_WORKSPACE}/.github/common.env \
-            --file ${GITHUB_WORKSPACE}/.github/compose/ci.${{ matrix.test-name }}.yml down
+          ${GITHUB_WORKSPACE}/.github/run-ci.sh --service ${{ matrix.test-name }}
 
   docs:
     runs-on: ubuntu-20.04
diff --git a/.pylintrc b/.pylintrc
index 382c81b..fb2dc26 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -120,6 +120,11 @@
         # at some point
         raise-missing-from,
 
+	# We can probably enable this soon, it is a bit experimental
+	# for the moment and current releases of pylint (August 2021) raise
+	# a lot of false positives.
+	unused-private-member,
+
         ##################################################
         # Formatting-related messages, enforced by Black #
         ##################################################
diff --git a/requirements/cov-requirements.txt b/requirements/cov-requirements.txt
index 24a5cb9..4bb1048 100644
--- a/requirements/cov-requirements.txt
+++ b/requirements/cov-requirements.txt
@@ -1,12 +1,12 @@
 coverage==4.4
 pytest-cov==2.10.1
-pytest==6.1.2
-Cython==0.29.21
+pytest==6.2.4
+Cython==0.29.24
 ## The following requirements were added by pip freeze:
-attrs==20.3.0
+attrs==21.2.0
 iniconfig==1.1.1
-packaging==20.7
+packaging==21.0
 pluggy==0.13.1
-py==1.9.0
+py==1.10.0
 pyparsing==2.4.7
 toml==0.10.2
diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt
index 5ad8be9..cef33fd 100644
--- a/requirements/dev-requirements.txt
+++ b/requirements/dev-requirements.txt
@@ -1,27 +1,26 @@
 pexpect==4.8.0
-pylint==2.6.0
+pylint==2.10.2
 # Pytest 6.0.0 doesn't play well with pylint
-pytest==6.1.2
+pytest==6.2.4
 pytest-datafiles==2.0
 pytest-env==0.6.2
-pytest-xdist==2.1.0
+pytest-xdist==2.3.0
 pytest-timeout==1.4.2
 pyftpdlib==1.5.6
 ## The following requirements were added by pip freeze:
-apipkg==1.5
-astroid==2.4.2
-attrs==20.3.0
-execnet==1.7.1
+astroid==2.7.2
+attrs==21.2.0
+execnet==1.9.0
 iniconfig==1.1.1
-isort==5.6.4
-lazy-object-proxy==1.4.3
+isort==5.9.3
+lazy-object-proxy==1.6.0
 mccabe==0.6.1
-packaging==20.7
+packaging==21.0
+platformdirs==2.2.0
 pluggy==0.13.1
-ptyprocess==0.6.0
-py==1.9.0
+ptyprocess==0.7.0
+py==1.10.0
 pyparsing==2.4.7
 pytest-forked==1.3.0
-six==1.15.0
 toml==0.10.2
 wrapt==1.12.1
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index 0d4f7d7..77eebc2 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -1,15 +1,15 @@
-click==7.1.2
-grpcio==1.34.0
-Jinja2==2.11.2
-pluginbase==1.0.0
-protobuf==3.14.0
-psutil==5.7.3
-ruamel.yaml==0.16.12
-ruamel.yaml.clib==0.2.2
-setuptools==49.1.3
-pyroaring==0.2.9
-ujson==4.0.1
-python-dateutil==2.8.1
+click==8.0.1
+grpcio==1.39.0
+Jinja2==3.0.1
+pluginbase==1.0.1
+protobuf==3.17.3
+psutil==5.8.0
+ruamel.yaml==0.17.13
+ruamel.yaml.clib==0.2.6
+setuptools==44.1.1
+pyroaring==0.3.3
+ujson==4.1.0
+python-dateutil==2.8.2
 ## The following requirements were added by pip freeze:
-MarkupSafe==1.1.1
-six==1.15.0
+MarkupSafe==2.0.1
+six==1.16.0
diff --git a/setup.py b/setup.py
index 022ecfb..4e29d76 100755
--- a/setup.py
+++ b/setup.py
@@ -191,7 +191,7 @@
             for filename in files:
                 if filename.endswith(".py"):
                     path = os.path.join(root, filename)
-                    with open(path, "r") as f:
+                    with open(path, "r", encoding="utf-8") as f:
                         code = f.read()
 
                     # All protos are in buildstream._protos
@@ -201,7 +201,7 @@
                         r"^from buildstream._protos.google.protobuf", r"from google.protobuf", code, flags=re.MULTILINE
                     )
 
-                    with open(path, "w") as f:
+                    with open(path, "w", encoding="utf-8") as f:
                         f.write(code)
 
 
@@ -216,13 +216,13 @@
 #####################################################
 #               Gather requirements                 #
 #####################################################
-with open("requirements/requirements.in") as install_reqs:
+with open("requirements/requirements.in", encoding="utf-8") as install_reqs:
     install_requires = install_reqs.read().splitlines()
 
 #####################################################
 #     Prepare package description from README       #
 #####################################################
-with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), "README.rst")) as readme:
+with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), "README.rst"), encoding="utf-8") as readme:
     long_description = readme.read()
 
 
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 609feb5..a992199 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -320,7 +320,7 @@
             batch.add(digest)
             batch.send()
 
-        return open(objpath, mode=mode)
+        return open(objpath, mode=mode)  # pylint: disable=consider-using-with,unspecified-encoding
 
     # add_object():
     #
@@ -475,7 +475,7 @@
         else:
             instance_name = ""
 
-        missing_blobs = dict()
+        missing_blobs = {}
         # Limit size of FindMissingBlobs request
         for required_blobs_group in _grouper(iter(blobs), 512):
             request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=instance_name)
diff --git a/src/buildstream/_cas/casdprocessmanager.py b/src/buildstream/_cas/casdprocessmanager.py
index 0a7d768..bb1fb4a 100644
--- a/src/buildstream/_cas/casdprocessmanager.py
+++ b/src/buildstream/_cas/casdprocessmanager.py
@@ -87,11 +87,13 @@
         self._start_time = time.time()
         self._logfile = self._rotate_and_get_next_logfile()
 
-        with open(self._logfile, "w") as logfile_fp:
+        with open(self._logfile, "w", encoding="utf-8") as logfile_fp:
             # Block SIGINT on buildbox-casd, we don't need to stop it
             # The frontend will take care of it if needed
             with _signals.blocked([signal.SIGINT], ignore=False):
-                self.process = subprocess.Popen(casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT)
+                self.process = subprocess.Popen(  # pylint: disable=consider-using-with
+                    casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT
+                )
 
     # _make_socket_path()
     #
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index 3a89736..1ea52fe 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -227,18 +227,20 @@
     def Read(self, request, context):
         self.logger.debug("Reading %s", request.resource_name)
         try:
-            return self.bytestream.Read(request)
+            ret = self.bytestream.Read(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
     def Write(self, request_iterator, context):
         # Note that we can't easily give more information because the
         # data is stuck in an iterator that will be consumed if read.
         self.logger.debug("Writing data")
         try:
-            return self.bytestream.Write(request_iterator)
+            ret = self.bytestream.Write(request_iterator)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
 
 class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
@@ -251,23 +253,26 @@
     def FindMissingBlobs(self, request, context):
         self.logger.info("Finding '%s'", request.blob_digests)
         try:
-            return self.cas.FindMissingBlobs(request)
+            ret = self.cas.FindMissingBlobs(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
     def BatchReadBlobs(self, request, context):
         self.logger.info("Reading '%s'", request.digests)
         try:
-            return self.cas.BatchReadBlobs(request)
+            ret = self.cas.BatchReadBlobs(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
     def BatchUpdateBlobs(self, request, context):
         self.logger.info("Updating: '%s'", [request.digest for request in request.requests])
         try:
-            return self.cas.BatchUpdateBlobs(request)
+            ret = self.cas.BatchUpdateBlobs(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
 
 class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
@@ -300,16 +305,18 @@
     def FetchBlob(self, request, context):
         self.logger.debug("FetchBlob '%s'", request.uris)
         try:
-            return self.fetch.FetchBlob(request)
+            ret = self.fetch.FetchBlob(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
     def FetchDirectory(self, request, context):
         self.logger.debug("FetchDirectory '%s'", request.uris)
         try:
-            return self.fetch.FetchDirectory(request)
+            ret = self.fetch.FetchDirectory(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
 
 class _PushServicer(remote_asset_pb2_grpc.PushServicer):
@@ -321,16 +328,18 @@
     def PushBlob(self, request, context):
         self.logger.debug("PushBlob '%s'", request.uris)
         try:
-            return self.push.PushBlob(request)
+            ret = self.push.PushBlob(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
     def PushDirectory(self, request, context):
         self.logger.debug("PushDirectory '%s'", request.uris)
         try:
-            return self.push.PushDirectory(request)
+            ret = self.push.PushDirectory(request)
         except grpc.RpcError as err:
             context.abort(err.code(), err.details())
+        return ret
 
 
 class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index c2afed9..852284c 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -422,7 +422,7 @@
             # us programatically insert comments or whitespace at
             # the toplevel.
             try:
-                with open(project_path, "w") as f:
+                with open(project_path, "w", encoding="utf-8") as f:
                     f.write(
                         "# Unique project name\n"
                         + "name: {}\n\n".format(project_name)
@@ -717,7 +717,7 @@
                     except BstError as e:
                         click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
                 elif choice == "log":
-                    with open(failure.logfile, "r") as logfile:
+                    with open(failure.logfile, "r", encoding="utf-8") as logfile:
                         content = logfile.read()
                         click.echo_via_pager(content)
 
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index f20d98f..0753215 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -1542,7 +1542,7 @@
         if not out:
             try:
                 for log in list(artifact_logs.values()):
-                    with open(log[0], "r") as f:
+                    with open(log[0], "r", encoding="utf-8") as f:
                         data = f.read()
                     click.echo_via_pager(data)
             except (OSError, FileNotFoundError):
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 45e857e..d17bb73 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -150,7 +150,19 @@
     elif isinstance(param_type, click.File):
         return complete_path("File", incomplete)
     elif isinstance(param_type, click.Path):
-        return complete_path(param_type.path_type, incomplete)
+
+        # Workaround click 8.x API break:
+        #
+        #    https://github.com/pallets/click/issues/2037
+        #
+        if param_type.file_okay and not param_type.dir_okay:
+            path_type = "File"
+        elif param_type.dir_okay and not param_type.file_okay:
+            path_type = "Directory"
+        else:
+            path_type = "Path"
+
+        return complete_path(path_type, incomplete)
 
     return []
 
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 4bdf7c9..3bd0611 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -791,7 +791,7 @@
         with ExitStack() as stack:
             # mmap handles low-level memory details, allowing for
             # faster searches
-            f = stack.enter_context(open(logfile, "r+"))
+            f = stack.enter_context(open(logfile, "r+", encoding="utf-8"))
             log = stack.enter_context(mmap(f.fileno(), os.path.getsize(f.name)))
 
             count = 0
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index e3b2466..67ca142 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -461,7 +461,7 @@
                         cwd=fullpath,
                     )
 
-            with open(os.path.join(fullpath, ".git", "shallow"), "w") as shallow_file:
+            with open(os.path.join(fullpath, ".git", "shallow"), "w", encoding="utf-8") as shallow_file:
                 for rev in shallow:
                     shallow_file.write("{}\n".format(rev))
 
@@ -490,7 +490,7 @@
                         cwd=fullpath,
                     )
 
-            with open(os.path.join(fullpath, ".git", "HEAD"), "w") as head:
+            with open(os.path.join(fullpath, ".git", "HEAD"), "w", encoding="utf-8") as head:
                 self.source.call(
                     [self.source.host_git, "rev-parse", self.ref],
                     stdout=head,
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index edb79ec..c69bc21 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -400,7 +400,7 @@
         directory = os.path.dirname(self._locals.log_filename)
         os.makedirs(directory, exist_ok=True)
 
-        with open(self._locals.log_filename, "a") as logfile:
+        with open(self._locals.log_filename, "a", encoding="utf-8") as logfile:
 
             # Write one last line to the log and flush it to disk
             def flush_log():
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index 0219e83..a254b49 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -98,7 +98,7 @@
             ]
         )
 
-        with open(self.log_filename, "a") as fp:
+        with open(self.log_filename, "a", encoding="utf-8") as fp:
             stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
 
             # Create the log file
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 12c6638..fac843e 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -1829,7 +1829,7 @@
         # Stage all our sources in a temporary directory. The this
         # directory can be used to either construct a tarball or moved
         # to the final desired location.
-        temp_source_dir = tempfile.TemporaryDirectory(dir=self._context.tmpdir)
+        temp_source_dir = tempfile.TemporaryDirectory(dir=self._context.tmpdir)  # pylint: disable=consider-using-with
         try:
             self._write_element_sources(temp_source_dir.name, elements)
             if include_build_scripts:
@@ -1884,12 +1884,10 @@
             compression = ""
         mode = _handle_compression(compression)
         try:
-            with utils.save_file_atomic(tar_name, mode="wb") as f:
-                tarball = tarfile.open(fileobj=f, mode=mode)
+            with utils.save_file_atomic(tar_name, mode="wb") as f, tarfile.open(fileobj=f, mode=mode) as tarball:
                 for item in os.listdir(str(directory)):
                     file_to_add = os.path.join(directory, item)
                     tarball.add(file_to_add, arcname=item)
-                tarball.close()
         except OSError as e:
             raise StreamError("Failed to create tar archive: {}".format(e)) from e
 
@@ -1908,7 +1906,7 @@
 
         script_path = os.path.join(directory, "build.sh")
 
-        with open(_site.build_all_template, "r") as f:
+        with open(_site.build_all_template, "r", encoding="utf-8") as f:
             script_template = f.read()
 
         with utils.save_file_atomic(script_path, "w") as script:
@@ -2037,8 +2035,8 @@
                     globs[glob] = globs[glob] + 1
 
         # Issue warnings and errors
-        unmatched = [glob for glob in globs if globs[glob] == 0]
-        doubly_matched = [glob for glob in globs if globs[glob] > 1]
+        unmatched = [glob for glob, glob_count in globs.items() if glob_count == 0]
+        doubly_matched = [glob for glob, glob_count in globs.items() if glob_count > 1]
 
         # Warn the user if any of the provided globs did not match anything
         if unmatched:
diff --git a/src/buildstream/downloadablefilesource.py b/src/buildstream/downloadablefilesource.py
index b299b7f..208f53b 100644
--- a/src/buildstream/downloadablefilesource.py
+++ b/src/buildstream/downloadablefilesource.py
@@ -196,7 +196,7 @@
     def _get_etag(self, ref):
         etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
         if os.path.exists(etagfilename):
-            with open(etagfilename, "r") as etagfile:
+            with open(etagfilename, "r", encoding="utf-8") as etagfile:
                 return etagfile.read()
 
         return None
@@ -265,7 +265,7 @@
     @classmethod
     def _reset_url_opener(cls):
         # Needed for tests, in order to cleanup the `netrc` configuration.
-        cls.__urlopener = None
+        cls.__urlopener = None  # pylint: disable=unused-private-member
 
     def __get_urlopener(self):
         if not DownloadableFileSource.__urlopener:
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 38240bb..c67f886 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -1669,7 +1669,7 @@
         if self._cached_failure() and not self.__assemble_done:
             with self._output_file() as output_file:
                 for log_path in self.__artifact.get_logs():
-                    with open(log_path) as log_file:
+                    with open(log_path, encoding="utf-8") as log_file:
                         output_file.write(log_file.read())
 
             _, description, detail = self._get_build_result()
@@ -2089,7 +2089,7 @@
     #
     # Writes a script to the given directory.
     def _write_script(self, directory):
-        with open(_site.build_module_template, "r") as f:
+        with open(_site.build_module_template, "r", encoding="utf-8") as f:
             script_template = f.read()
 
         variable_string = ""
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index edeec65..4008734 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -176,7 +176,7 @@
             # Here we send the result again, just in case it was a PickleError
             # in which case the same exception would be thrown down
             result_queue.put((exc, result))
-        except pickle.PickleError as exc:
+        except pickle.PickleError:
             result_queue.put((traceback.format_exc(), None))
 
 
@@ -331,7 +331,7 @@
 
         # Infer the kind identifier
         modulename = type(self).__module__
-        self.__kind = modulename.split(".")[-1]
+        self.__kind = modulename.rsplit(".", maxsplit=1)[-1]
         self.debug("Created: {}".format(self))
 
     def __del__(self):
@@ -800,7 +800,7 @@
     def _output_file(self):
         log = self.__context.messenger.get_log_handle()
         if log is None:
-            with open(os.devnull, "w") as output:
+            with open(os.devnull, "w", encoding="utf-8") as output:
                 yield output
         else:
             yield log
diff --git a/src/buildstream/sandbox/_sandboxbuildboxrun.py b/src/buildstream/sandbox/_sandboxbuildboxrun.py
index 9880875..e3f8d7a 100644
--- a/src/buildstream/sandbox/_sandboxbuildboxrun.py
+++ b/src/buildstream/sandbox/_sandboxbuildboxrun.py
@@ -173,7 +173,7 @@
                 stack.enter_context(_signals.suspendable(suspend_proc, resume_proc))
                 stack.enter_context(_signals.terminator(kill_proc))
 
-            process = subprocess.Popen(
+            process = subprocess.Popen(  # pylint: disable=consider-using-with
                 argv, close_fds=True, stdin=stdin, stdout=stdout, stderr=stderr, start_new_session=new_session,
             )
 
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 3f1fc59..74ce1dc 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -287,7 +287,7 @@
             encoding = "utf-8"
 
         if "r" in mode:
-            return open(newpath, mode=mode, encoding=encoding)
+            return open(newpath, mode=mode, encoding=encoding)  # pylint: disable=consider-using-with
         else:
             if "x" in mode:
                 # This check is not atomic, however, we're operating with a
diff --git a/src/buildstream/testing/_cachekeys.py b/src/buildstream/testing/_cachekeys.py
index b4340ad..34be170 100644
--- a/src/buildstream/testing/_cachekeys.py
+++ b/src/buildstream/testing/_cachekeys.py
@@ -102,7 +102,7 @@
     for element_name in actual_keys:
         expected = _element_filename(project_dir, element_name, "expected")
         try:
-            with open(expected, "r") as f:
+            with open(expected, "r", encoding="utf-8") as f:
                 expected_key = f.read()
                 expected_key = expected_key.strip()
         except FileNotFoundError:
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index b834f32..92265dd 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -36,7 +36,7 @@
 def create_test_file(*path, mode=0o644, content="content\n"):
     path = os.path.join(*path)
     os.makedirs(os.path.dirname(path), exist_ok=True)
-    with open(path, "w") as f:
+    with open(path, "w", encoding="utf-8") as f:
         f.write(content)
         os.fchmod(f.fileno(), mode)
 
@@ -98,7 +98,7 @@
             result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir])
             result.assert_success()
 
-            with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
+            with open(os.path.join(checkoutdir, "ls-l"), "r", encoding="utf-8") as f:
                 for line in f.readlines():
                     test_values.append(line.split()[0] + " " + line.split()[-1])
                 return test_values
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index 2c41419..8be5db4 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -49,7 +49,7 @@
     files = str(tmpdir.join("imported_files_{}".format(name)))
     os.makedirs(files)
 
-    with open(os.path.join(files, "{}.txt".format(name)), "w") as f:
+    with open(os.path.join(files, "{}.txt".format(name)), "w", encoding="utf-8") as f:
         f.write(name)
 
     repo = create_repo(kind, str(tmpdir.join("element_{}_repo".format(name))))
diff --git a/src/buildstream/testing/_update_cachekeys.py b/src/buildstream/testing/_update_cachekeys.py
index 219e17f..555b86d 100755
--- a/src/buildstream/testing/_update_cachekeys.py
+++ b/src/buildstream/testing/_update_cachekeys.py
@@ -38,7 +38,7 @@
 
 def write_expected_key(project_dir, element_name, actual_key):
     expected_file = _element_filename(project_dir, element_name, "expected")
-    with open(expected_file, "w") as f:
+    with open(expected_file, "w", encoding="utf-8") as f:
         f.write(actual_key)
 
 
diff --git a/src/buildstream/testing/_utils/site.py b/src/buildstream/testing/_utils/site.py
index 727fe01..3ce9223 100644
--- a/src/buildstream/testing/_utils/site.py
+++ b/src/buildstream/testing/_utils/site.py
@@ -34,7 +34,7 @@
     GIT = None
     HAVE_GIT = False
     HAVE_OLD_GIT = False
-    GIT_ENV = dict()
+    GIT_ENV = {}
 
 try:
     BZR = utils.get_host_tool("bzr")  # type: Optional[str]
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 31b74c6..7c29174 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -352,7 +352,7 @@
             try:
                 sys.__stdout__.fileno()
             except ValueError:
-                sys.__stdout__ = open("/dev/stdout", "w")
+                sys.__stdout__ = open("/dev/stdout", "w", encoding="utf-8")  # pylint: disable=consider-using-with
 
             result = self._invoke(bst_cli, bst_args, binary_capture=binary_capture)
 
@@ -378,7 +378,7 @@
         # Temporarily redirect sys.stdin to /dev/null to ensure that
         # Popen doesn't attempt to read pytest's dummy stdin.
         old_stdin = sys.stdin
-        with open(os.devnull) as devnull:
+        with open(os.devnull, "rb") as devnull:
             sys.stdin = devnull
             capture_kind = FDCaptureBinary if binary_capture else FDCapture
             capture = MultiCapture(out=capture_kind(1), err=capture_kind(2), in_=None)
@@ -527,7 +527,7 @@
         else:
             project_load_filename = project_backup
 
-        with open(project_load_filename) as f:
+        with open(project_load_filename, encoding="utf-8") as f:
             config = f.read()
         config = config.format(project_dir=project_directory)
 
@@ -545,7 +545,7 @@
             with tempfile.TemporaryDirectory(dir=project_directory) as scratchdir:
 
                 temp_project = os.path.join(scratchdir, "project.conf")
-                with open(temp_project, "w") as f:
+                with open(temp_project, "w", encoding="utf-8") as f:
                     yaml.safe_dump(project_config, f)
 
                 project_config = _yaml.load(temp_project, shortname="project.conf")
@@ -557,7 +557,7 @@
         else:
 
             # Otherwise, just dump it as is
-            with open(project_filename, "w") as f:
+            with open(project_filename, "w", encoding="utf-8") as f:
                 f.write(config)
 
         return super().run(**kwargs)
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 34914df..b01ecab 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -48,7 +48,7 @@
     """
 
     # A dict of all values mapping to the entries in the enum
-    _value_to_entry = dict()  # type: Dict[str, Any]
+    _value_to_entry = {}  # type: Dict[str, Any]
 
     @classmethod
     def values(cls):
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 04bbf26..c837aad 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -16,6 +16,12 @@
 #
 #  Authors:
 #        Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+
+# Disable this for the file, because pylint is not picking it up
+# when specifying it on the specific line.
+#
+# pylint: disable=subprocess-popen-preexec-fn
+#
 """
 Utilities
 =========
@@ -1378,10 +1384,9 @@
             group_id = os.getpgid(process.pid)
             os.killpg(group_id, signal.SIGCONT)
 
-    with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
-        process = subprocess.Popen(  # pylint: disable=subprocess-popen-preexec-fn
-            *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
-        )
+    with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc), subprocess.Popen(
+        *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
+    ) as process:
         # Here, we don't use `process.communicate()` directly without a timeout
         # This is because, if we were to do that, and the process would never
         # output anything, the control would never be given back to the python
diff --git a/tests/examples/developing.py b/tests/examples/developing.py
index 90d33bf..b936721 100644
--- a/tests/examples/developing.py
+++ b/tests/examples/developing.py
@@ -7,7 +7,7 @@
 from buildstream.testing import cli_integration as cli  # pylint: disable=unused-import
 from buildstream.testing.integration import assert_contains
 from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
-import tests.testutils.patch as patch
+from tests.testutils import patch
 
 pytestmark = pytest.mark.integration
 
diff --git a/tests/format/project.py b/tests/format/project.py
index 6e06176..b5b75d0 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -223,7 +223,7 @@
     linked_project = os.path.join(str(tmpdir), "linked")
     os.symlink(real_project, linked_project)
     os.makedirs(os.path.join(real_project, "elements"), exist_ok=True)
-    with open(os.path.join(real_project, "elements", "element.bst"), "w") as f:
+    with open(os.path.join(real_project, "elements", "element.bst"), "w", encoding="utf-8") as f:
         f.write("kind: manual\n")
     result = cli.run(project=linked_project, args=["show", "element.bst"])
     result.assert_success()
diff --git a/tests/frontend/artifact_log.py b/tests/frontend/artifact_log.py
index 07efa56..6f50b1b 100644
--- a/tests/frontend/artifact_log.py
+++ b/tests/frontend/artifact_log.py
@@ -94,11 +94,11 @@
 
     # Ensure the file contains the logs by checking for the LOG line
     pattern = r"\[..:..:..\] LOG     \[.*\] target.bst"
-    with open(target, "r") as f:
+    with open(target, "r", encoding="utf-8") as f:
         data = f.read()
         assert len(re.findall(pattern, data, re.MULTILINE)) > 0
 
     pattern = r"\[..:..:..\] LOG     \[.*\] import-bin.bst"
-    with open(import_bin, "r") as f:
+    with open(import_bin, "r", encoding="utf-8") as f:
         data = f.read()
         assert len(re.findall(pattern, data, re.MULTILINE)) > 0
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 72587b3..6021ff0 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -642,7 +642,7 @@
     with tarfile.open(name=checkout, mode="r:") as tar:
         tar.extractall(extract)
 
-    with open(os.path.join(extract, "basicfolder", "basicsymlink")) as fp:
+    with open(os.path.join(extract, "basicfolder", "basicsymlink"), encoding="utf-8") as fp:
         data = fp.read()
     assert data == "file contents\n"
 
@@ -672,7 +672,7 @@
     result = cli.run(project=project, args=checkout_args)
     result.assert_success()
 
-    with open(os.path.join(checkout, "basicfolder", "basicsymlink")) as fp:
+    with open(os.path.join(checkout, "basicfolder", "basicsymlink"), encoding="utf-8") as fp:
         data = fp.read()
     assert data == "file contents\n"
 
@@ -695,7 +695,7 @@
 
     # Create the checkout dir and add a file to it, should cause checkout to fail
     os.makedirs(checkout, exist_ok=True)
-    with open(filename, "w") as f:
+    with open(filename, "w", encoding="utf-8") as f:
         f.write("Hello")
 
     # Prepare checkout args
@@ -730,7 +730,7 @@
 
     # Create the checkout dir and add a file to it, should cause checkout to fail
     os.makedirs(checkout, exist_ok=True)
-    with open(filename, "w") as f:
+    with open(filename, "w", encoding="utf-8") as f:
         f.write("Hello")
 
     # Prepare checkout args
@@ -768,7 +768,7 @@
     assert os.path.isdir(builddir)
     assert not os.listdir(builddir)
 
-    with open(tarball, "w") as f:
+    with open(tarball, "w", encoding="utf-8") as f:
         f.write("Hello")
 
     checkout_args = ["artifact", "checkout", "--force", "--tar", tarball, "target.bst"]
@@ -889,7 +889,7 @@
     # Assert the content of /etc/animal.conf
     filename = os.path.join(checkout, "etc", "animal.conf")
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         contents = f.read()
     assert contents == "animal=Pony\n"
 
@@ -919,12 +919,12 @@
 
     # Assert the content of /etc/animal.conf in the workspace
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         contents = f.read()
     assert contents == "animal=Pony\n"
 
     # Modify the content of the animal.conf in the workspace
-    with open(filename, "w") as f:
+    with open(filename, "w", encoding="utf-8") as f:
         f.write("animal=Horsy\n")
 
     # Now try to build it, this should automatically result in fetching
@@ -942,7 +942,7 @@
     # Assert the workspace modified content of /etc/animal.conf
     filename = os.path.join(checkout, "etc", "animal.conf")
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         contents = f.read()
     assert contents == "animal=Horsy\n"
 
@@ -999,7 +999,7 @@
     # Assert the content of /etc/animal.conf
     filename = os.path.join(checkout, "etc", "animal.conf")
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         contents = f.read()
     assert contents == "animal=Pony\n"
 
@@ -1035,7 +1035,7 @@
     # Assert the content of /etc/animal.conf
     filename = os.path.join(checkout, "etc", "animal.conf")
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         contents = f.read()
     assert contents == "animal=Pony\n"
 
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index c3b80ae..0972f94 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -18,7 +18,7 @@
 
     import_dir = tmpdir.join("import")
     os.makedirs(str(import_dir))
-    with open(str(import_dir.join("hello.txt")), "w") as f:
+    with open(str(import_dir.join("hello.txt")), "w", encoding="utf-8") as f:
         f.write("hello!")
 
     import_repo_dir = tmpdir.join("import_repo")
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index 6c8a4b7..b8f8d54 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -46,7 +46,7 @@
 
     # Assert that none of the sources are cached
     states = cli.get_element_states(project, [target, build_dep, runtime_dep])
-    assert all([state == "fetch needed" for state in states.values()])
+    assert all(state == "fetch needed" for state in states.values())
 
     # Now fetch the specified sources
     result = cli.run(project=project, args=["source", "fetch", "--deps", deps, target])
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index c3af27b..c206fb0 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -65,7 +65,7 @@
 def test_project_exists(cli, tmpdir):
     project = str(tmpdir)
     project_path = os.path.join(project, "project.conf")
-    with open(project_path, "w") as f:
+    with open(project_path, "w", encoding="utf-8") as f:
         f.write("name: pony\n")
 
     result = cli.run(args=["init", "--project-name", "foo", project])
@@ -75,7 +75,7 @@
 def test_force_overwrite_project(cli, tmpdir):
     project = str(tmpdir)
     project_path = os.path.join(project, "project.conf")
-    with open(project_path, "w") as f:
+    with open(project_path, "w", encoding="utf-8") as f:
         f.write("name: pony\n")
 
     result = cli.run(args=["init", "--project-name", "foo", "--force", project])
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index f8ac932..315268c 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -58,7 +58,7 @@
     large_directory_dir = os.path.join(project, "files", "large-directory")
     os.mkdir(large_directory_dir)
     for i in range(NUM_FILES):
-        with open(os.path.join(large_directory_dir, str(i)), "w") as f:
+        with open(os.path.join(large_directory_dir, str(i)), "w", encoding="utf-8") as f:
             # The files need to have different content as we want different digests.
             f.write(str(i))
 
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index f2be66e..6cbbee7 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -161,7 +161,7 @@
     result = cli.run(project=project, args=["artifact", "log", "--out", logfiles, "logtest.bst"])
     result.assert_success()
 
-    with open(logfile, "r") as f:
+    with open(logfile, "r", encoding="utf-8") as f:
         task_log = f.read()
 
     #########################################################
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index bffc754..b6fcf8f 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -132,7 +132,7 @@
 
     result = cli.run(project=project_dir, args=["source", "fetch", element_name])
     result.assert_success()
-    with open(output_file) as f:
+    with open(output_file, encoding="utf-8") as f:
         contents = f.read()
         assert "Fetch foo:repo1 succeeded from FOO/repo1" in contents
         assert "Fetch bar:repo2 succeeded from RAB/repo2" in contents
@@ -156,7 +156,7 @@
 
     result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name])
     result.assert_success()
-    with open(output_file) as f:
+    with open(output_file, encoding="utf-8") as f:
         contents = f.read()
         print(contents)
         # Success if fetching from arrakis' mirror happened before middle-earth's
@@ -190,7 +190,7 @@
 
     result = cli.run(project=project_dir, args=["source", "fetch", element_name])
     result.assert_success()
-    with open(output_file) as f:
+    with open(output_file, encoding="utf-8") as f:
         contents = f.read()
         print(contents)
         # Success if fetching from Oz' mirror happened before middle-earth's
@@ -224,7 +224,7 @@
 
     result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name])
     result.assert_success()
-    with open(output_file) as f:
+    with open(output_file, encoding="utf-8") as f:
         contents = f.read()
         print(contents)
         # Success if fetching from arrakis' mirror happened before middle-earth's
@@ -330,7 +330,7 @@
 
     main_files = os.path.join(str(tmpdir), "main-files")
     os.makedirs(main_files)
-    with open(os.path.join(main_files, "README"), "w") as f:
+    with open(os.path.join(main_files, "README"), "w", encoding="utf-8") as f:
         f.write("TEST\n")
     main_repodir = os.path.join(str(tmpdir), "main-upstream")
     main_repo = create_repo("git", main_repodir)
@@ -408,7 +408,7 @@
 
     main_files = os.path.join(str(tmpdir), "main-files")
     os.makedirs(main_files)
-    with open(os.path.join(main_files, "README"), "w") as f:
+    with open(os.path.join(main_files, "README"), "w", encoding="utf-8") as f:
         f.write("TEST\n")
     upstream_main_repodir = os.path.join(str(tmpdir), "main-upstream")
     upstream_main_repo = create_repo("git", upstream_main_repodir)
diff --git a/tests/frontend/progress.py b/tests/frontend/progress.py
index 5d446bb..bb1302e 100644
--- a/tests/frontend/progress.py
+++ b/tests/frontend/progress.py
@@ -92,7 +92,7 @@
 
     # Add dependencies to the junction (not allowed, but let's do it
     # anyway)
-    with open(junction_path, "a") as f:
+    with open(junction_path, "a", encoding="utf-8") as f:
         deps = {"depends": ["manual.bst"]}
         _yaml.roundtrip_dump(deps, f)
 
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 70a7009..93168cb 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -142,7 +142,9 @@
         cli.configure({"artifacts": {"servers": [{"url": bad_share.repo, "push": True},]}})
 
         # Now try `bst artifact push` to the good_share.
-        result = cli.run(project=project, args=["artifact", "push", "target.bst", "--artifact-remote", good_share.repo])
+        result = cli.run(
+            project=project, args=["artifact", "push", "target.bst", "--artifact-remote", good_share.repo]
+        )
         result.assert_success()
 
         # Assert that all the artifacts are in the share we pushed
@@ -158,7 +160,9 @@
         artifactdir = os.path.join(cli.directory, "artifacts")
         shutil.rmtree(artifactdir)
 
-        result = cli.run(project=project, args=["artifact", "pull", "target.bst", "--artifact-remote", good_share.repo])
+        result = cli.run(
+            project=project, args=["artifact", "pull", "target.bst", "--artifact-remote", good_share.repo]
+        )
         result.assert_success()
 
         # And assert that it's again in the local cache, without having built
@@ -200,7 +204,7 @@
             assert cli.get_element_state(project, element_name) != "cached"
 
         # Add a file to force change in strict cache key of import-bin.bst
-        with open(os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w") as f:
+        with open(os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w", encoding="utf-8") as f:
             f.write("world")
 
         # Assert that the workspaced element requires a rebuild
@@ -448,7 +452,7 @@
     # We need a big file that does not go into a batch to test a different
     # code path
     os.makedirs(os.path.join(project, "files/dev-files/usr/share"), exist_ok=True)
-    with open(os.path.join(project, "files/dev-files/usr/share/big-file"), "w") as f:
+    with open(os.path.join(project, "files/dev-files/usr/share/big-file"), "w", encoding="utf-8") as f:
         buf = " " * 4096
         for _ in range(1024):
             f.write(buf)
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 8919330..d32abc3 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -605,7 +605,7 @@
     ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
 
         # Add shareproject repo url to project.conf
-        with open(os.path.join(project, "project.conf"), "a") as projconf:
+        with open(os.path.join(project, "project.conf"), "a", encoding="utf-8") as projconf:
             projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
@@ -671,7 +671,7 @@
         # import-bin.bst element to change due to the local files it
         # imports changing.
         path = os.path.join(project, "files", "bin-files", "newfile")
-        with open(path, "w") as f:
+        with open(path, "w", encoding="utf-8") as f:
             f.write("PONY !")
 
         # Now build again after having changed the dependencies
diff --git a/tests/frontend/rebuild.py b/tests/frontend/rebuild.py
index d54eedf..a69c0e5 100644
--- a/tests/frontend/rebuild.py
+++ b/tests/frontend/rebuild.py
@@ -25,7 +25,7 @@
     result.assert_success()
 
     # Modify base import
-    with open(os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w") as f:
+    with open(os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w", encoding="utf-8") as f:
         f.write("#define NEW")
 
     # Rebuild base import and build top-level rebuild-target.bst
@@ -59,7 +59,7 @@
 
     # Modify dependency
     new_header_path = os.path.join(project, "files", "dev-files", "usr", "include", "new.h")
-    with open(new_header_path, "w") as f:
+    with open(new_header_path, "w", encoding="utf-8") as f:
         f.write("#define NEW")
 
     # Trigger rebuild. This will also rebuild the unmodified target as this
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index 7f4d9ff..814e889 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -422,7 +422,7 @@
             _yaml.roundtrip_dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
 
             source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
-            open(source, "x").close()
+            open(source, "x", encoding="utf-8").close()  # pylint: disable=consider-using-with
             assert os.path.exists(source)
 
     setup_test()
@@ -544,7 +544,7 @@
     # Now modify the file, effectively causing the common base.bst
     # dependency to change it's cache key
     hello_path = os.path.join(project, "files", "hello.txt")
-    with open(hello_path, "w") as f:
+    with open(hello_path, "w", encoding="utf-8") as f:
         f.write("Goodbye")
 
     # Now assert that we have the states we expect as a result
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
index 58c59ec..263f54c 100644
--- a/tests/frontend/source_checkout.py
+++ b/tests/frontend/source_checkout.py
@@ -104,8 +104,8 @@
         args=["source", "checkout", "--tar", tar, "--compression", compression, "--deps", "none", target],
     )
     result.assert_success()
-    tar = tarfile.open(name=tar, mode="r:" + compression)
-    assert os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
+    with tarfile.open(name=tar, mode="r:" + compression) as tar:
+        assert os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
 
 
 @pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 950cd83..3ed9f50 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -88,10 +88,10 @@
     repo.add_commit()
 
     # Substitute the {repo} for the git repo we created
-    with open(element_path) as f:
+    with open(element_path, encoding="utf-8") as f:
         target_bst = f.read()
     target_bst = target_bst.format(repo=repo.repo)
-    with open(element_path, "w") as f:
+    with open(element_path, "w", encoding="utf-8") as f:
         f.write(target_bst)
 
     # First track for both options
@@ -151,7 +151,7 @@
 
     # Assert that none of the sources have a reference
     states = cli.get_element_states(project, [target, build_dep, runtime_dep])
-    assert all([state == "no reference" for state in states.values()])
+    assert all(state == "no reference" for state in states.values())
 
     # Now track the specified sources
     result = cli.run(project=project, args=["source", "track", "--deps", deps, target])
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 17ff8b2..16b1c81 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -281,7 +281,7 @@
     ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(["git"], ["git"])
     os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
 
-    with open(workspace_dir, "w") as fl:
+    with open(workspace_dir, "w", encoding="utf-8") as fl:
         fl.write("foo")
 
     # Now open the workspace, this should have the effect of automatically
@@ -354,7 +354,7 @@
     assert os.path.exists(workspace)
 
     # Create a new file in the workspace
-    with open(hello_path, "w") as f:
+    with open(hello_path, "w", encoding="utf-8") as f:
         f.write("hello")
 
     # Now open the workspace again with --force and --no-checkout
@@ -365,7 +365,7 @@
 
     # Ensure that our files were not overwritten
     assert os.path.exists(hello_path)
-    with open(hello_path) as f:
+    with open(hello_path, encoding="utf-8") as f:
         assert f.read() == "hello"
 
 
@@ -528,7 +528,7 @@
     # Modify workspace
     shutil.rmtree(os.path.join(workspace, "usr", "bin"))
     os.makedirs(os.path.join(workspace, "etc"))
-    with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     # Now reset the open workspace, this should have the
@@ -569,7 +569,7 @@
     # Modify workspace
     shutil.rmtree(os.path.join(workspace, "usr", "bin"))
     os.makedirs(os.path.join(workspace, "etc"))
-    with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     assert not os.path.exists(os.path.join(workspace, "usr", "bin"))
@@ -600,7 +600,7 @@
     # Modify workspaces
     shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
     os.makedirs(os.path.join(workspace_beta, "etc"))
-    with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     # Now reset the open workspaces, this should have the
@@ -622,7 +622,7 @@
     # Modify workspaces
     shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
     os.makedirs(os.path.join(workspace_beta, "etc"))
-    with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     # Now reset the open workspace, this should have the
@@ -668,7 +668,7 @@
     # Modify workspace
     shutil.rmtree(os.path.join(workspace, "usr", "bin"))
     os.makedirs(os.path.join(workspace, "etc"))
-    with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     # Configure strict mode
@@ -760,12 +760,12 @@
     #
     if modification == "addfile":
         os.makedirs(os.path.join(workspace, "etc"))
-        with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
+        with open(os.path.join(workspace, "etc", "pony.conf"), "w", encoding="utf-8") as f:
             f.write("PONY='pink'")
     elif modification == "removefile":
         os.remove(os.path.join(workspace, "usr", "bin", "hello"))
     elif modification == "modifyfile":
-        with open(os.path.join(workspace, "usr", "bin", "hello"), "w") as f:
+        with open(os.path.join(workspace, "usr", "bin", "hello"), "w", encoding="utf-8") as f:
             f.write("cookie")
     else:
         # This cannot be reached
@@ -802,7 +802,7 @@
     elif modification == "removefile":
         assert not os.path.exists(os.path.join(checkout, "usr", "bin", "hello"))
     elif modification == "modifyfile":
-        with open(os.path.join(workspace, "usr", "bin", "hello"), "r") as f:
+        with open(os.path.join(workspace, "usr", "bin", "hello"), "r", encoding="utf-8") as f:
             data = f.read()
             assert data == "cookie"
     else:
@@ -930,7 +930,7 @@
     # Modify workspace
     shutil.rmtree(os.path.join(workspace, "usr", "bin"))
     os.makedirs(os.path.join(workspace, "etc"))
-    with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
+    with open(os.path.join(workspace, "etc", "pony.conf"), "w", encoding="utf-8") as f:
         f.write("PONY='pink'")
 
     # Configure strict mode
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index f385510..2003328 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -209,7 +209,7 @@
     #
     filename = os.path.join(checkout, "etc", "test.conf")
     assert os.path.exists(filename)
-    with open(filename, "r") as f:
+    with open(filename, "r", encoding="utf-8") as f:
         data = f.read()
         data = data.strip()
         assert data == "pony"
diff --git a/tests/integration/manual.py b/tests/integration/manual.py
index 22b87fb..c538be9 100644
--- a/tests/integration/manual.py
+++ b/tests/integration/manual.py
@@ -58,7 +58,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert (
@@ -89,7 +89,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "2\n"
@@ -117,7 +117,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert (
@@ -179,7 +179,7 @@
     result.assert_success()
     result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     result.assert_success()
-    with open(os.path.join(checkout, "hello")) as f:
+    with open(os.path.join(checkout, "hello"), encoding="utf-8") as f:
         assert f.read() == "hello from root\n"
 
     # Now, change element configuration to have a different command-subdir.
@@ -202,7 +202,7 @@
     shutil.rmtree(checkout)
     result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     result.assert_success()
-    with open(os.path.join(checkout, "hello")) as f:
+    with open(os.path.join(checkout, "hello"), encoding="utf-8") as f:
         assert f.read() == "hello from subdir\n"
 
 
@@ -221,5 +221,5 @@
     )
     result.assert_success()
 
-    with open(os.path.join(checkout, "test.txt")) as f:
+    with open(os.path.join(checkout, "test.txt"), encoding="utf-8") as f:
         assert f.read() == "This is another test\n"
diff --git a/tests/integration/script.py b/tests/integration/script.py
index 35a3fdd..eb161c4 100644
--- a/tests/integration/script.py
+++ b/tests/integration/script.py
@@ -52,7 +52,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "Hi\n"
@@ -87,7 +87,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "I can write to root\n"
@@ -140,7 +140,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "test\n"
@@ -159,7 +159,7 @@
     cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "test")) as f:
+    with open(os.path.join(checkout, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "Hi\n"
@@ -186,7 +186,7 @@
     )
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout_original, "canary")) as f:
+    with open(os.path.join(checkout_original, "canary"), encoding="utf-8") as f:
         assert f.read() == "alive\n"
 
     res = cli.run(project=project, args=["build", element_name])
@@ -195,7 +195,7 @@
     res = cli.run(project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_after])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout_after, "canary")) as f:
+    with open(os.path.join(checkout_after, "canary"), encoding="utf-8") as f:
         assert f.read() == "alive\n"
 
 
@@ -230,7 +230,7 @@
     )
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout_original, "canary")) as f:
+    with open(os.path.join(checkout_original, "canary"), encoding="utf-8") as f:
         assert f.read() == "alive\n"
 
     res = cli.run(project=project, args=["build", element_name])
@@ -239,5 +239,5 @@
     res = cli.run(project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_after])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout_after, "canary")) as f:
+    with open(os.path.join(checkout_after, "canary"), encoding="utf-8") as f:
         assert f.read() == "alive\n"
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index a030592..ed7ed65 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -295,7 +295,7 @@
     #
     workspace_hello_path = os.path.join(cli.directory, "workspace", "hello.c")
     assert os.path.exists(workspace_hello_path)
-    with open(workspace_hello_path, "r") as f:
+    with open(workspace_hello_path, "r", encoding="utf-8") as f:
         workspace_hello = f.read()
 
     # Cat the hello.c file from a bst shell command, and assert
@@ -332,7 +332,7 @@
         # Mutate the project.conf to use a default shell command
         project_file = os.path.join(project, "project.conf")
         config_text = "shell:\n  command: ['true']\n"
-        with open(project_file, "a") as f:
+        with open(project_file, "a", encoding="utf-8") as f:
             f.write(config_text)
 
     result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, element_name])
@@ -411,7 +411,7 @@
     # Create a file with unique contents such that it cannot be in the cache already
     test_filepath = os.path.join(project, "files", "hello.txt")
     test_message = "Hello World! {}".format(uuid.uuid4())
-    with open(test_filepath, "w") as f:
+    with open(test_filepath, "w", encoding="utf-8") as f:
         f.write(test_message)
     checksum = utils.sha256sum(test_filepath)
 
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index a69e55a..dcb7798 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -15,7 +15,7 @@
 def create_test_file(*path, mode=0o644, content="content\n"):
     path = os.path.join(*path)
     os.makedirs(os.path.dirname(path), exist_ok=True)
-    with open(path, "w") as f:
+    with open(path, "w", encoding="utf-8") as f:
         f.write(content)
         os.fchmod(f.fileno(), mode)
 
@@ -65,7 +65,7 @@
             result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir])
             result.assert_success()
 
-            with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
+            with open(os.path.join(checkoutdir, "ls-l"), "r", encoding="utf-8") as f:
                 for line in f.readlines():
                     test_values.append(line.split()[0] + " " + line.split()[-1])
                 return test_values
diff --git a/tests/integration/stack.py b/tests/integration/stack.py
index bad807f..c44d1e0 100644
--- a/tests/integration/stack.py
+++ b/tests/integration/stack.py
@@ -27,10 +27,10 @@
     cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout])
     assert res.exit_code == 0
 
-    with open(os.path.join(checkout, "hi")) as f:
+    with open(os.path.join(checkout, "hi"), encoding="utf-8") as f:
         hi = f.read()
 
-    with open(os.path.join(checkout, "another-hi")) as f:
+    with open(os.path.join(checkout, "another-hi"), encoding="utf-8") as f:
         another_hi = f.read()
 
     assert hi == "Hi\n"
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index fab45b1..754a492 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -180,7 +180,7 @@
     _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
 
     # And our build fails!
-    with open(os.path.join(workspace, "Makefile"), "a") as f:
+    with open(os.path.join(workspace, "Makefile"), "a", encoding="utf-8") as f:
         f.write("\texit 1")
 
     res = cli.run(project=project, args=["build", element_name])
@@ -195,9 +195,9 @@
     _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
 
     # And fix the source
-    with open(os.path.join(workspace, "Makefile"), "r") as f:
+    with open(os.path.join(workspace, "Makefile"), "r", encoding="utf-8") as f:
         makefile = f.readlines()
-    with open(os.path.join(workspace, "Makefile"), "w") as f:
+    with open(os.path.join(workspace, "Makefile"), "w", encoding="utf-8") as f:
         f.write("\n".join(makefile[:-1]))
 
     # Since buildstream thinks hello.txt did not change, we could end
@@ -299,7 +299,7 @@
     assert not "./prepared-again" in files
 
     # Add file to workspace to trigger an (incremental) build
-    with open(os.path.join(workspace, "newfile"), "w"):
+    with open(os.path.join(workspace, "newfile"), "w", encoding="utf-8"):
         pass
 
     # When we build again, the configure commands should not be
@@ -420,7 +420,7 @@
     wait_for_cache_granularity()
 
     # Replace source file contents with '2'
-    with open(os.path.join(workspace, "source"), "w") as f:
+    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
         f.write("2")
 
     # Perform incremental build of the workspace
@@ -437,7 +437,7 @@
 
     # Replace source file contents with '3', however, set an old mtime such
     # that `make` will not pick up the change
-    with open(os.path.join(workspace, "source"), "w") as f:
+    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
         f.write("3")
     os.utime(os.path.join(workspace, "source"), (BST_ARBITRARY_TIMESTAMP, BST_ARBITRARY_TIMESTAMP))
 
@@ -488,7 +488,7 @@
 
     # Delete source1 and replace source2 file contents with '2'
     os.unlink(os.path.join(workspace, "source1"))
-    with open(os.path.join(workspace, "source2"), "w") as f:
+    with open(os.path.join(workspace, "source2"), "w", encoding="utf-8") as f:
         f.write("2")
 
     # Perform incremental build of the workspace
@@ -499,7 +499,7 @@
     wait_for_cache_granularity()
 
     # Recreate source1 file
-    with open(os.path.join(workspace, "source1"), "w") as f:
+    with open(os.path.join(workspace, "source1"), "w", encoding="utf-8") as f:
         f.write("2")
 
     # Perform incremental build of the workspace
diff --git a/tests/internals/storage_vdir_import.py b/tests/internals/storage_vdir_import.py
index ff2b140..dba8883 100644
--- a/tests/internals/storage_vdir_import.py
+++ b/tests/internals/storage_vdir_import.py
@@ -68,7 +68,7 @@
             (dirnames, filename) = os.path.split(path)
             os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
             fullpath = os.path.join(rootdir, dirnames, filename)
-            with open(fullpath, "wt") as f:
+            with open(fullpath, "wt", encoding="utf-8") as f:
                 f.write(content)
             # set file mtime to arbitrary
             _set_file_mtime(fullpath, _parse_timestamp(TIMESTAMP))
@@ -103,7 +103,7 @@
             os.makedirs(target)
             locations.append(os.path.join(location, thingname))
         elif thing == "file":
-            with open(target, "wt") as f:
+            with open(target, "wt", encoding="utf-8") as f:
                 f.write("This is node {}\n".format(i))
             _set_file_mtime(target, _parse_timestamp(TIMESTAMP))
         elif thing == "link":
@@ -121,7 +121,7 @@
 
 
 def file_contents(path):
-    with open(path, "r") as f:
+    with open(path, "r", encoding="utf-8") as f:
         result = f.read()
     return result
 
@@ -306,7 +306,7 @@
         d.import_files(test_dir)
         digest = d.descend("a", "l").index["g"].get_digest()
 
-        with open(cas_cache.objpath(digest)) as fp:
+        with open(cas_cache.objpath(digest), encoding="utf-8") as fp:
             content = fp.read()
         assert Content_to_check == content
     finally:
@@ -363,7 +363,7 @@
         d.import_files(test_dir)
 
         digest = d.descend("a", "l", follow_symlinks=True).index["file"].get_digest()
-        with open(cas_cache.objpath(digest)) as fp:
+        with open(cas_cache.objpath(digest), encoding="utf-8") as fp:
             content = fp.read()
         assert Content_to_check == content
     finally:
@@ -391,7 +391,7 @@
 
         digest = d.descend("a", "l", follow_symlinks=True).index["file"].get_digest()
 
-        with open(cas_cache.objpath(digest)) as fp:
+        with open(cas_cache.objpath(digest), encoding="utf-8") as fp:
             content = fp.read()
         assert Content_to_check == content
     finally:
diff --git a/tests/internals/utils_save_atomic.py b/tests/internals/utils_save_atomic.py
index 8982860..197a90c 100644
--- a/tests/internals/utils_save_atomic.py
+++ b/tests/internals/utils_save_atomic.py
@@ -10,21 +10,21 @@
         f.write("foo\n")
 
     assert os.listdir(str(tmpdir)) == ["savefile-success.test"]
-    with open(filename) as f:
+    with open(filename, encoding="utf-8") as f:
         assert f.read() == "foo\n"
 
 
 def test_save_over_existing_file(tmpdir):
     filename = os.path.join(str(tmpdir), "savefile-overwrite.test")
 
-    with open(filename, "w") as f:
+    with open(filename, "w", encoding="utf-8") as f:
         f.write("existing contents\n")
 
     with save_file_atomic(filename, "w") as f:
         f.write("overwritten contents\n")
 
     assert os.listdir(str(tmpdir)) == ["savefile-overwrite.test"]
-    with open(filename) as f:
+    with open(filename, encoding="utf-8") as f:
         assert f.read() == "overwritten contents\n"
 
 
@@ -42,7 +42,7 @@
 def test_exception_existing_file(tmpdir):
     filename = os.path.join(str(tmpdir), "savefile-existing.test")
 
-    with open(filename, "w") as f:
+    with open(filename, "w", encoding="utf-8") as f:
         f.write("existing contents\n")
 
     with pytest.raises(RuntimeError):
@@ -51,7 +51,7 @@
             raise RuntimeError("Something goes wrong")
 
     assert os.listdir(str(tmpdir)) == ["savefile-existing.test"]
-    with open(filename) as f:
+    with open(filename, encoding="utf-8") as f:
         assert f.read() == "existing contents\n"
 
 
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index 4585ef7..452906f 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -400,11 +400,16 @@
     assert exc.value.reason == LoadErrorReason.INVALID_DATA
 
 
+# This test has been broken by upstream ruamel.yaml, filed an issue here:
+#
+#    https://sourceforge.net/p/ruamel-yaml/tickets/390/
+#
+@pytest.mark.xfail(reason="recent versions of ruamel.yaml have broken roundtripping perfection")
 @pytest.mark.datafiles(os.path.join(DATA_DIR))
 @pytest.mark.parametrize("fromdisk", [(True), (False)])
 def test_roundtrip_dump(datafiles, fromdisk):
     filename = os.path.join(datafiles.dirname, datafiles.basename, "roundtrip-test.yaml")
-    with open(filename, "r") as fh:
+    with open(filename, "r", encoding="utf-8") as fh:
         rt_raw = fh.read()
     if fromdisk:
         rt_loaded = _yaml.roundtrip_load(filename)
diff --git a/tests/plugins/loading.py b/tests/plugins/loading.py
index 7aeb242..2c2058c 100644
--- a/tests/plugins/loading.py
+++ b/tests/plugins/loading.py
@@ -679,29 +679,12 @@
 @pytest.mark.datafiles(DATA_DIR)
 @pytest.mark.parametrize(
     "plugin_type,provenance",
-    [("elements", "project.conf [line 10 column 2]"), ("sources", "project.conf [line 10 column 2]")],
+    [("elements", "project.conf [line 12 column 2]"), ("sources", "project.conf [line 12 column 2]")],
 )
 def test_junction_invalid_full_path(cli, datafiles, plugin_type, provenance):
     project = str(datafiles)
-    subproject = os.path.join(project, "subproject")
-    subsubproject = os.path.join(subproject, "subsubproject")
 
-    shutil.copytree(os.path.join(project, "plugins"), os.path.join(subsubproject, "plugins"))
-
-    # The toplevel says to search for the "notfound" plugin in the subproject
-    #
-    update_project(
-        project,
-        {
-            "plugins": [
-                {
-                    "origin": "junction",
-                    "junction": "subproject-junction.bst:pony-junction.bst",
-                    plugin_type: ["notfound"],
-                }
-            ]
-        },
-    )
+    shutil.copy(os.path.join(project, "not-found-{}.conf".format(plugin_type)), os.path.join(project, "project.conf"))
     setup_element(project, plugin_type, "notfound")
 
     result = cli.run(project=project, args=["show", "element.bst"])
diff --git a/tests/plugins/loading/not-found-elements.conf b/tests/plugins/loading/not-found-elements.conf
new file mode 100644
index 0000000..71e7ad9
--- /dev/null
+++ b/tests/plugins/loading/not-found-elements.conf
@@ -0,0 +1,15 @@
+# This project.conf gets rewritten for each plugin loading test
+name: test
+
+# Required BuildStream version
+min-version: 2.0
+
+# Subdirectory where elements are stored
+element-path: elements
+
+# Load non-existant element across junction boundaries
+plugins:
+- origin: junction
+  junction: subproject-junction.bst:pony-junction.bst
+  elements:
+  - notfound
diff --git a/tests/plugins/loading/not-found-sources.conf b/tests/plugins/loading/not-found-sources.conf
new file mode 100644
index 0000000..404a20a
--- /dev/null
+++ b/tests/plugins/loading/not-found-sources.conf
@@ -0,0 +1,15 @@
+# This project.conf gets rewritten for each plugin loading test
+name: test
+
+# Required BuildStream version
+min-version: 2.0
+
+# Subdirectory where elements are stored
+element-path: elements
+
+# Load non-existant source across junction boundaries
+plugins:
+- origin: junction
+  junction: subproject-junction.bst:pony-junction.bst
+  sources:
+  - notfound
diff --git a/tests/remoteexecution/junction.py b/tests/remoteexecution/junction.py
index 2b02616..a0a1293 100644
--- a/tests/remoteexecution/junction.py
+++ b/tests/remoteexecution/junction.py
@@ -57,10 +57,10 @@
 
     # ensure that the correct project directory is also listed in the junction
     subproject_conf = os.path.join(subproject_path, "project.conf")
-    with open(subproject_conf) as f:
+    with open(subproject_conf, encoding="utf-8") as f:
         config = f.read()
     config = config.format(project_dir=subproject_path)
-    with open(subproject_conf, "w") as f:
+    with open(subproject_conf, "w", encoding="utf-8") as f:
         f.write(config)
 
     # Create a trackable element to depend on the cross junction element,
diff --git a/tests/remoteexecution/workspace.py b/tests/remoteexecution/workspace.py
index cf85872..93397cd 100644
--- a/tests/remoteexecution/workspace.py
+++ b/tests/remoteexecution/workspace.py
@@ -210,7 +210,7 @@
     # add a file (asserting later that this is in the buildtree)
     newfile = "newfile.cfg"
     newfile_path = os.path.join(workspace, newfile)
-    with open(newfile_path, "w") as fdata:
+    with open(newfile_path, "w", encoding="utf-8") as fdata:
         fdata.write("somestring")
     input_files.append(os.sep + newfile)
 
@@ -252,7 +252,7 @@
 
     # buildmark time should be the same
     assert build_timemark == rebuild_timemark
-    assert all([rebuild_times[fname] == build_times[fname] for fname in rebuild_times]), "{}\n{}".format(
+    assert all(rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format(
         rebuild_times, build_times
     )
 
@@ -267,9 +267,9 @@
 
     elif modification == "content":
         # change a source file (there's a race here but it's not serious)
-        with open(main_path, "r") as fdata:
+        with open(main_path, "r", encoding="utf-8") as fdata:
             data = fdata.readlines()
-        with open(main_path, "w") as fdata:
+        with open(main_path, "w", encoding="utf-8") as fdata:
             for line in data:
                 fdata.write(re.sub(r"Hello", "Goodbye", line))
         touched_time = int(os.stat(main_path).st_mtime)
@@ -294,7 +294,7 @@
     del rebuild_times[os.sep + BLDMARK]
 
     # check the times of the unmodified files
-    assert all([rebuild_times[fname] == build_times[fname] for fname in rebuild_times]), "{}\n{}".format(
+    assert all(rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format(
         rebuild_times, build_times
     )
 
diff --git a/tests/sourcecache/cache.py b/tests/sourcecache/cache.py
index 34009fc..7b992e8 100644
--- a/tests/sourcecache/cache.py
+++ b/tests/sourcecache/cache.py
@@ -127,7 +127,7 @@
     assert len(os.listdir(elementsources_protos)) == 1
 
     # modify hello-patch file and check tracking updates refs
-    with open(os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a") as f:
+    with open(os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a", encoding="utf-8") as f:
         f.write("\nappending nonsense")
 
     res = cli.run(project=project_dir, args=["source", "track", element_name])
diff --git a/tests/sources/bzr.py b/tests/sources/bzr.py
index 2dcacfe..f0dad8f 100644
--- a/tests/sources/bzr.py
+++ b/tests/sources/bzr.py
@@ -34,7 +34,7 @@
     assert result.exit_code == 0
 
     # Assert we checked out the file as it was commited
-    with open(os.path.join(checkoutdir, "test")) as f:
+    with open(os.path.join(checkoutdir, "test"), encoding="utf-8") as f:
         text = f.read()
 
     assert text == "test\n"
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 861e70c..af28f92 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -744,7 +744,7 @@
     repofiles = os.path.join(str(tmpdir), "repofiles")
     os.makedirs(repofiles, exist_ok=True)
     file0 = os.path.join(repofiles, "file0")
-    with open(file0, "w") as f:
+    with open(file0, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     repo = create_repo("git", str(tmpdir))
@@ -759,13 +759,13 @@
     tag("uselesstag")
 
     file1 = os.path.join(str(tmpdir), "file1")
-    with open(file1, "w") as f:
+    with open(file1, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file1)
     tag("tag1")
 
     file2 = os.path.join(str(tmpdir), "file2")
-    with open(file2, "w") as f:
+    with open(file2, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.branch("branch2")
     repo.add_file(file2)
@@ -773,7 +773,7 @@
 
     repo.checkout("master")
     file3 = os.path.join(str(tmpdir), "file3")
-    with open(file3, "w") as f:
+    with open(file3, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file3)
 
@@ -854,7 +854,7 @@
     repofiles = os.path.join(str(tmpdir), "repofiles")
     os.makedirs(repofiles, exist_ok=True)
     file0 = os.path.join(repofiles, "file0")
-    with open(file0, "w") as f:
+    with open(file0, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     repo = create_repo("git", str(tmpdir))
@@ -869,19 +869,19 @@
     tag("uselesstag")
 
     file1 = os.path.join(str(tmpdir), "file1")
-    with open(file1, "w") as f:
+    with open(file1, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file1)
 
     file2 = os.path.join(str(tmpdir), "file2")
-    with open(file2, "w") as f:
+    with open(file2, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.branch("branch2")
     repo.add_file(file2)
 
     repo.checkout("master")
     file3 = os.path.join(str(tmpdir), "file3")
-    with open(file3, "w") as f:
+    with open(file3, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file3)
 
@@ -961,32 +961,32 @@
     repofiles = os.path.join(str(tmpdir), "repofiles")
     os.makedirs(repofiles, exist_ok=True)
     file0 = os.path.join(repofiles, "file0")
-    with open(file0, "w") as f:
+    with open(file0, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     repo = create_repo("git", str(tmpdir))
     repo.create(repofiles)
 
     file1 = os.path.join(str(tmpdir), "file1")
-    with open(file1, "w") as f:
+    with open(file1, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file1)
     repo.branch("branch")
     repo.checkout("master")
 
     file2 = os.path.join(str(tmpdir), "file2")
-    with open(file2, "w") as f:
+    with open(file2, "w", encoding="utf-8") as f:
         f.write("test\n")
     repo.add_file(file2)
 
     file3 = os.path.join(str(tmpdir), "file3")
-    with open(file3, "w") as f:
+    with open(file3, "w", encoding="utf-8") as f:
         f.write("test\n")
     branch_boundary = repo.add_file(file3)
 
     repo.checkout("branch")
     file4 = os.path.join(str(tmpdir), "file4")
-    with open(file4, "w") as f:
+    with open(file4, "w", encoding="utf-8") as f:
         f.write("test\n")
     tagged_ref = repo.add_file(file4)
     repo.add_annotated_tag("tag1", "tag1")
@@ -1034,7 +1034,7 @@
     repofiles = os.path.join(str(tmpdir), "repofiles")
     os.makedirs(repofiles, exist_ok=True)
     file0 = os.path.join(repofiles, "file0")
-    with open(file0, "w") as f:
+    with open(file0, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     repo = create_repo("git", str(tmpdir))
@@ -1074,7 +1074,7 @@
     repofiles = os.path.join(str(tmpdir), "repofiles")
     os.makedirs(repofiles, exist_ok=True)
     file0 = os.path.join(repofiles, "file0")
-    with open(file0, "w") as f:
+    with open(file0, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     repo = create_repo("git", str(tmpdir))
@@ -1090,7 +1090,7 @@
     repo.add_annotated_tag("tag", "tag")
 
     file1 = os.path.join(repofiles, "file1")
-    with open(file1, "w") as f:
+    with open(file1, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     ref = repo.add_file(file1)
@@ -1112,7 +1112,7 @@
     repo.checkout(top_commit)
 
     file2 = os.path.join(repofiles, "file2")
-    with open(file2, "w") as f:
+    with open(file2, "w", encoding="utf-8") as f:
         f.write("test\n")
 
     new_ref = repo.add_file(file2)
diff --git a/tests/sources/local.py b/tests/sources/local.py
index f68a5b3..bddc022 100644
--- a/tests/sources/local.py
+++ b/tests/sources/local.py
@@ -44,13 +44,13 @@
 def test_invalid_absolute_path(cli, datafiles):
     project = str(datafiles)
 
-    with open(os.path.join(project, "target.bst"), "r") as f:
+    with open(os.path.join(project, "target.bst"), "r", encoding="utf-8") as f:
         old_yaml = f.read()
 
     new_yaml = old_yaml.replace("file.txt", os.path.join(project, "file.txt"))
     assert old_yaml != new_yaml
 
-    with open(os.path.join(project, "target.bst"), "w") as f:
+    with open(os.path.join(project, "target.bst"), "w", encoding="utf-8") as f:
         f.write(new_yaml)
 
     result = cli.run(project=project, args=["show", "target.bst"])
@@ -159,7 +159,7 @@
     def create_test_file(*path, mode=0o644, content="content\n"):
         path = os.path.join(*path)
         os.makedirs(os.path.dirname(path), exist_ok=True)
-        with open(path, "w") as f:
+        with open(path, "w", encoding="utf-8") as f:
             f.write(content)
             os.fchmod(f.fileno(), mode)
 
diff --git a/tests/sources/patch.py b/tests/sources/patch.py
index 5392a64..e6af8df 100644
--- a/tests/sources/patch.py
+++ b/tests/sources/patch.py
@@ -40,12 +40,12 @@
 def test_invalid_absolute_path(cli, datafiles):
     project = str(datafiles)
 
-    with open(os.path.join(project, "target.bst"), "r") as f:
+    with open(os.path.join(project, "target.bst"), "r", encoding="utf-8") as f:
         old_yaml = f.read()
     new_yaml = old_yaml.replace("file_1.patch", os.path.join(project, "file_1.patch"))
     assert old_yaml != new_yaml
 
-    with open(os.path.join(project, "target.bst"), "w") as f:
+    with open(os.path.join(project, "target.bst"), "w", encoding="utf-8") as f:
         f.write(new_yaml)
 
     result = cli.run(project=project, args=["show", "target.bst"])
@@ -72,7 +72,7 @@
     result.assert_success()
 
     # Test the file.txt was patched and changed
-    with open(os.path.join(checkoutdir, "file.txt")) as f:
+    with open(os.path.join(checkoutdir, "file.txt"), encoding="utf-8") as f:
         assert f.read() == "This is text file with superpowers\n"
 
 
@@ -108,7 +108,7 @@
     result.assert_success()
 
     # Test the file.txt was patched and changed
-    with open(os.path.join(checkoutdir, "test-dir", "file.txt")) as f:
+    with open(os.path.join(checkoutdir, "test-dir", "file.txt"), encoding="utf-8") as f:
         assert f.read() == "This is text file in a directory with superpowers\n"
 
 
@@ -124,7 +124,7 @@
     result.assert_success()
 
     # Test the file.txt was patched and changed
-    with open(os.path.join(checkoutdir, "file.txt")) as f:
+    with open(os.path.join(checkoutdir, "file.txt"), encoding="utf-8") as f:
         assert f.read() == "This is text file with more superpowers\n"
 
 
@@ -140,5 +140,5 @@
     result.assert_success()
 
     # Test the file.txt was patched and changed
-    with open(os.path.join(checkoutdir, "file.txt")) as f:
+    with open(os.path.join(checkoutdir, "file.txt"), encoding="utf-8") as f:
         assert f.read() == "This is text file with superpowers\n"
diff --git a/tests/sources/previous_source_access.py b/tests/sources/previous_source_access.py
index 3c95d77..f242ac1 100644
--- a/tests/sources/previous_source_access.py
+++ b/tests/sources/previous_source_access.py
@@ -42,6 +42,6 @@
     # the same content
     assert os.path.exists(os.path.join(destpath, "file"))
     assert os.path.exists(os.path.join(destpath, "filetransform"))
-    with open(os.path.join(destpath, "file")) as file1:
-        with open(os.path.join(destpath, "filetransform")) as file2:
+    with open(os.path.join(destpath, "file"), encoding="utf-8") as file1:
+        with open(os.path.join(destpath, "filetransform"), encoding="utf-8") as file2:
             assert file1.read() == file2.read()
diff --git a/tests/testutils/filetypegenerator.py b/tests/testutils/filetypegenerator.py
index 732608c..bd14b87 100644
--- a/tests/testutils/filetypegenerator.py
+++ b/tests/testutils/filetypegenerator.py
@@ -39,7 +39,7 @@
 
     clean()
 
-    with open(path, "w"):
+    with open(path, "w", encoding="utf-8"):
         pass
     yield
     clean()
diff --git a/tests/testutils/python_repo.py b/tests/testutils/python_repo.py
index 07efa37..f0c7959 100644
--- a/tests/testutils/python_repo.py
+++ b/tests/testutils/python_repo.py
@@ -75,7 +75,7 @@
     #
     setup_file = os.path.join(tmpdir, "setup.py")
     pkgdirname = re.sub("[^0-9a-zA-Z]+", "", name)
-    with open(setup_file, "w") as f:
+    with open(setup_file, "w", encoding="utf-8") as f:
         f.write(SETUP_TEMPLATE.format(name=name, version=version, pkgdirname=pkgdirname, pkgdeps=dependencies))
     os.chmod(setup_file, 0o755)
 
@@ -83,7 +83,7 @@
     os.makedirs(package)
 
     main_file = os.path.join(package, "__init__.py")
-    with open(main_file, "w") as f:
+    with open(main_file, "w", encoding="utf-8") as f:
         f.write(INIT_TEMPLATE.format(name=name))
     os.chmod(main_file, 0o644)
 
@@ -102,7 +102,7 @@
 
     # add an index html page
     index_html = os.path.join(pypi_package, "index.html")
-    with open(index_html, "w") as f:
+    with open(index_html, "w", encoding="utf-8") as f:
         f.write(HTML_TEMPLATE.format(name=name, version=version))
 
     # copy generated tarfile to pypi package