Merge pull request #1576 from abderrahim/arch-test

_sandboxbwrap.py: try to support architectures that don't match host
diff --git a/.github/common.env b/.github/common.env
index 196752a..32d7fa7 100644
--- a/.github/common.env
+++ b/.github/common.env
@@ -1,5 +1,5 @@
 # Shared common variables
 
-CI_IMAGE_VERSION=master-241289109
-CI_TOXENV_MAIN=py36-nocover,py37-nocover,py38-nocover,py39-nocover
+CI_IMAGE_VERSION=master-443158932
+CI_TOXENV_MAIN=py36-nocover,py37-nocover,py38-nocover,py39-nocover,py310-nocover
 CI_TOXENV_ALL="${CI_TOXENV_MAIN}"
diff --git a/.github/compose/ci.docker-compose.yml b/.github/compose/ci.docker-compose.yml
index a3bc1f3..914459e 100644
--- a/.github/compose/ci.docker-compose.yml
+++ b/.github/compose/ci.docker-compose.yml
@@ -1,8 +1,8 @@
 version: '3.4'
 
 x-tests-template: &tests-template
-    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:32-${CI_IMAGE_VERSION:-latest}
-    command: tox -vvvvv -- --color=yes --integration -n 4
+    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:34-${CI_IMAGE_VERSION:-latest}
+    command: tox -vvvvv -- --color=yes --integration
     environment:
       TOXENV: ${CI_TOXENV_ALL}
 
@@ -22,26 +22,18 @@
 
 services:
 
-  fedora-32:
+  fedora-34:
     <<: *tests-template
-    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:32-${CI_IMAGE_VERSION:-latest}
+    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:34-${CI_IMAGE_VERSION:-latest}
 
-  fedora-33:
+  fedora-35:
     <<: *tests-template
-    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:33-${CI_IMAGE_VERSION:-latest}
+    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:35-${CI_IMAGE_VERSION:-latest}
 
   debian-10:
     <<: *tests-template
     image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-debian:10-${CI_IMAGE_VERSION:-latest}
 
-  ubuntu-18.04:
-    <<: *tests-template
-    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-ubuntu:18.04-${CI_IMAGE_VERSION:-latest}
-
-  centos-7.7.1908:
-    <<: *tests-template
-    image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-centos:7.7.1908-${CI_IMAGE_VERSION:-latest}
-
   docs:
     <<: *tests-template
     command: tox -e docs
diff --git a/.github/run-ci.sh b/.github/run-ci.sh
index 80b4a91..9a26ef9 100755
--- a/.github/run-ci.sh
+++ b/.github/run-ci.sh
@@ -67,10 +67,8 @@
 if [ -z "${test_names}" ]; then
     runTest "lint"
     runTest "debian-10"
-    runTest "fedora-32"
-    runTest "fedora-33"
-    runTest "ubuntu-18.04"
-    runTest "centos-7.7.1908"
+    runTest "fedora-34"
+    runTest "fedora-35"
 else
     for test_name in "${test_names}"; do
 	runTest "${test_name}"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 061904e..aa43865 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -26,10 +26,8 @@
         # "../compose/ci.docker-compose.yml"
         test-name:
           - debian-10
-          - fedora-32
-          - fedora-33
-          - ubuntu-18.04
-          - centos-7.7.1908
+          - fedora-34
+          - fedora-35
           - lint
 
     steps:
diff --git a/.pylintrc b/.pylintrc
index a273725..594187f 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -72,6 +72,7 @@
         # Messages that are of no use to us #
         #####################################
         ,
+        consider-using-f-string,
         fixme,
         missing-docstring,
         no-self-use,
diff --git a/NEWS b/NEWS
index 04e4f27..c2078ec 100644
--- a/NEWS
+++ b/NEWS
@@ -6,6 +6,9 @@
 
   o script element plugin now supports `create-dev-shm`
 
+  o Python 3.6 is no longer tested in CI but support is maintained on
+    best effort level.
+
 =================
 buildstream 1.6.3
 =================
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index fc873e4..289ce9e 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -448,6 +448,10 @@
                 self._cache_size = stored_size
             else:
                 self.compute_cache_size()
+                # Computing cache doesn't actually write the value.
+                # Write cache size explicitly here since otherwise
+                # in some cases it's not stored on disk.
+                self.set_cache_size(self._cache_size)
 
         return self._cache_size
 
@@ -865,19 +869,21 @@
     def _read_cache_size(self):
         size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
 
-        if not os.path.exists(size_file_path):
+        try:
+            with open(size_file_path, "r", encoding="utf-8") as f:
+                size = f.read()
+        except FileNotFoundError:
             return None
 
-        with open(size_file_path, "r", encoding="utf-8") as f:
-            size = f.read()
-
         try:
             num_size = int(size)
-        except ValueError as e:
-            raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
-                size, size_file_path)) from e
-
-        return num_size
+        except ValueError:
+            self._message(MessageType.WARN, "Failure resolving cache size",
+                          detail="Size '{}' parsed from '{}' was not an integer"
+                          .format(size, size_file_path))
+            return None
+        else:
+            return num_size
 
     # _calculate_cache_quota()
     #
diff --git a/buildstream/_fuse/hardlinks.py b/buildstream/_fuse/hardlinks.py
index b921a00..d4a719e 100644
--- a/buildstream/_fuse/hardlinks.py
+++ b/buildstream/_fuse/hardlinks.py
@@ -66,26 +66,30 @@
         path = os.path.join(self.root, partial)
         return path
 
-    def _ensure_copy(self, full_path):
+    def _ensure_copy(self, full_path, follow_symlinks=True):
         try:
-            # Follow symbolic links manually here
-            real_path = os.path.realpath(full_path)
-            file_stat = os.stat(real_path)
+            if follow_symlinks:
+                # Follow symbolic links manually here
+                real_path = os.path.realpath(full_path)
+            else:
+                real_path = full_path
 
-            # Dont bother with files that cannot be hardlinked, oddly it
-            # directories actually usually have st_nlink > 1 so just avoid
-            # that.
+            file_stat = os.stat(real_path, follow_symlinks=False)
+
+            # Skip the file if it's not a hardlink
+            if file_stat.st_nlink <= 1:
+                return
+
+            # For some reason directories may have st_nlink > 1, but they
+            # cannot be hardlinked, so just ignore those.
             #
-            # We already wont get symlinks here, and stat will throw
-            # the FileNotFoundError below if a followed symlink did not exist.
-            #
-            if not stat.S_ISDIR(file_stat.st_mode) and file_stat.st_nlink > 1:
+            if not stat.S_ISDIR(file_stat.st_mode):
                 with tempfile.TemporaryDirectory(dir=self.tmp) as tempdir:
                     basename = os.path.basename(real_path)
                     temp_path = os.path.join(tempdir, basename)
 
                     # First copy, then unlink origin and rename
-                    shutil.copy2(real_path, temp_path)
+                    shutil.copy2(real_path, temp_path, follow_symlinks=False)
                     os.unlink(real_path)
                     os.rename(temp_path, real_path)
 
@@ -113,24 +117,31 @@
         full_path = self._full_path(path)
 
         # Ensure copies on chown
-        self._ensure_copy(full_path)
-        return os.chown(full_path, uid, gid)
+        self._ensure_copy(full_path, follow_symlinks=False)
+        return os.chown(full_path, uid, gid, follow_symlinks=False)
 
     def getattr(self, path, fh=None):
         full_path = self._full_path(path)
         st = os.lstat(full_path)
         return dict((key, getattr(st, key)) for key in (
             'st_atime', 'st_ctime', 'st_gid', 'st_mode',
-            'st_mtime', 'st_nlink', 'st_size', 'st_uid'))
+            'st_mtime', 'st_nlink', 'st_size', 'st_uid',
+            'st_ino'))
 
     def readdir(self, path, fh):
         full_path = self._full_path(path)
 
-        dirents = ['.', '..']
+        dir_entries = ['.', '..']
         if os.path.isdir(full_path):
-            dirents.extend(os.listdir(full_path))
-        for r in dirents:
-            yield r
+            dir_entries.extend(os.listdir(full_path))
+        for entry in dir_entries:
+            entry_full_path = os.path.join(full_path, entry)
+            st = os.stat(entry_full_path, follow_symlinks=False)
+
+            attrs = dict((key, getattr(st, key)) for key in (
+                'st_ino', 'st_mode'))
+
+            yield entry, attrs, 0
 
     def readlink(self, path):
         pathname = os.readlink(self._full_path(path))
diff --git a/buildstream/_fuse/mount.py b/buildstream/_fuse/mount.py
index 83b3ba9..ca3b944 100644
--- a/buildstream/_fuse/mount.py
+++ b/buildstream/_fuse/mount.py
@@ -184,7 +184,7 @@
         # Run fuse in foreground in this child process, internally libfuse
         # will handle SIGTERM and gracefully exit it's own little main loop.
         #
-        FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True)
+        FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True, use_ino=True)
 
         # Explicit 0 exit code, if the operations crashed for some reason, the exit
         # code will not be 0, and we want to know about it.
diff --git a/buildstream/_ostree.py b/buildstream/_ostree.py
deleted file mode 100644
index 40fe2c2..0000000
--- a/buildstream/_ostree.py
+++ /dev/null
@@ -1,289 +0,0 @@
-#
-#  Copyright (C) 2017 Codethink Limited
-#
-#  This program is free software; you can redistribute it and/or
-#  modify it under the terms of the GNU Lesser General Public
-#  License as published by the Free Software Foundation; either
-#  version 2 of the License, or (at your option) any later version.
-#
-#  This library is distributed in the hope that it will be useful,
-#  but WITHOUT ANY WARRANTY; without even the implied warranty of
-#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
-#  Lesser General Public License for more details.
-#
-#  You should have received a copy of the GNU Lesser General Public
-#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
-#
-#  Authors:
-#        Jürg Billeter <juerg.billeter@codethink.co.uk>
-#        Andrew Leeming <andrew.leeming@codethink.co.uk>
-#        Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-#
-# Code based on Jürg's artifact cache and Andrew's ostree plugin
-#
-
-# Disable pylint warnings that are not appicable to this module
-# pylint: disable=bad-exception-context,catching-non-exception
-
-import os
-
-import gi  # pylint: disable=import-error
-from gi.repository.GLib import Variant, VariantDict
-
-from ._exceptions import BstError, ErrorDomain
-
-# pylint: disable=wrong-import-position,wrong-import-order
-gi.require_version('OSTree', '1.0')
-from gi.repository import GLib, Gio, OSTree  # nopep8
-
-
-# For users of this file, they must expect (except) it.
-class OSTreeError(BstError):
-    def __init__(self, message, reason=None):
-        super().__init__(message, domain=ErrorDomain.UTIL, reason=reason)
-
-
-# ensure()
-#
-# Args:
-#    path (str): The file path to where the desired repo should be
-#    compress (bool): use compression or not when creating
-#
-# Returns: an OSTree.Repo
-def ensure(path, compress):
-
-    # create also succeeds on existing repository
-    repo = OSTree.Repo.new(Gio.File.new_for_path(path))
-    mode = OSTree.RepoMode.ARCHIVE_Z2 if compress \
-        else OSTree.RepoMode.BARE_USER
-
-    repo.create(mode)
-
-    # Disble OSTree's built in minimum-disk-space check.
-    config = repo.copy_config()
-    config.set_string('core', 'min-free-space-percent', '0')
-    repo.write_config(config)
-    repo.reload_config()
-
-    return repo
-
-
-# checkout()
-#
-# Checkout the content at 'commit' from 'repo' in
-# the specified 'path'
-#
-# Args:
-#    repo (OSTree.Repo): The repo
-#    path (str): The checkout path
-#    commit_ (str): The commit checksum to checkout
-#    user (boot): Whether to checkout in user mode
-#
-def checkout(repo, path, commit_, user=False):
-
-    # Check out a full copy of an OSTree at a given ref to some directory.
-    #
-    # Note: OSTree does not like updating directories inline/sync, therefore
-    # make sure you checkout to a clean directory or add additional code to support
-    # union mode or (if it exists) file replacement/update.
-    #
-    # Returns True on success
-    #
-    # cli exmaple:
-    #   ostree --repo=repo checkout --user-mode runtime/org.freedesktop.Sdk/x86_64/1.4 foo
-    os.makedirs(os.path.dirname(path), exist_ok=True)
-
-    options = OSTree.RepoCheckoutAtOptions()
-
-    # For repos which contain root owned files, we need
-    # to checkout with OSTree.RepoCheckoutMode.USER
-    #
-    # This will reassign uid/gid and also munge the
-    # permission bits a bit.
-    if user:
-        options.mode = OSTree.RepoCheckoutMode.USER
-
-    # Using AT_FDCWD value from fcntl.h
-    #
-    # This will be ignored if the passed path is an absolute path,
-    # if path is a relative path then it will be appended to the
-    # current working directory.
-    AT_FDCWD = -100
-    try:
-        repo.checkout_at(options, AT_FDCWD, path, commit_)
-    except GLib.GError as e:
-        raise OSTreeError("Failed to checkout commit '{}': {}".format(commit_, e.message)) from e
-
-
-# exists():
-#
-# Checks wether a given commit or symbolic ref exists and
-# is locally cached in the specified repo.
-#
-# Args:
-#    repo (OSTree.Repo): The repo
-#    ref (str): A commit checksum or symbolic ref
-#
-# Returns:
-#    (bool): Whether 'ref' is valid in 'repo'
-#
-def exists(repo, ref):
-
-    # Get the commit checksum, this will:
-    #
-    #  o Return a commit checksum if ref is a symbolic branch
-    #  o Return the same commit checksum if ref is a valid commit checksum
-    #  o Return None if the ostree repo doesnt know this ref.
-    #
-    ref = checksum(repo, ref)
-    if ref is None:
-        return False
-
-    # If we do have a ref which the ostree knows about, this does
-    # not mean we necessarily have the object locally (we may just
-    # have some metadata about it, this can happen).
-    #
-    # Use has_object() only with a resolved valid commit checksum
-    # to check if we actually have the object locally.
-    _, has_object = repo.has_object(OSTree.ObjectType.COMMIT, ref, None)
-    return has_object
-
-
-# checksum():
-#
-# Returns the commit checksum for a given symbolic ref,
-# which might be a branch or tag. If it is a branch,
-# the latest commit checksum for the given branch is returned.
-#
-# Args:
-#    repo (OSTree.Repo): The repo
-#    ref (str): The symbolic ref
-#
-# Returns:
-#    (str): The commit checksum, or None if ref does not exist.
-#
-def checksum(repo, ref):
-
-    _, checksum_ = repo.resolve_rev(ref, True)
-    return checksum_
-
-
-# fetch()
-#
-# Fetch new objects from a remote, if configured
-#
-# Args:
-#    repo (OSTree.Repo): The repo
-#    remote (str): An optional remote name, defaults to 'origin'
-#    ref (str): An optional ref to fetch, will reduce the amount of objects fetched
-#    progress (callable): An optional progress callback
-#
-# Note that a commit checksum or a branch reference are both
-# valid options for the 'ref' parameter. Using the ref parameter
-# can save a lot of bandwidth but mirroring the full repo is
-# still possible.
-#
-def fetch(repo, remote="origin", ref=None, progress=None):
-    # Fetch metadata of the repo from a remote
-    #
-    # cli example:
-    #  ostree --repo=repo pull --mirror freedesktop:runtime/org.freedesktop.Sdk/x86_64/1.4
-    def progress_callback(info):
-        status = async_progress.get_status()
-        outstanding_fetches = async_progress.get_uint('outstanding-fetches')
-        bytes_transferred = async_progress.get_uint64('bytes-transferred')
-        fetched = async_progress.get_uint('fetched')
-        requested = async_progress.get_uint('requested')
-
-        if status:
-            progress(0.0, status)
-        elif outstanding_fetches > 0:
-            formatted_bytes = GLib.format_size_full(bytes_transferred, 0)
-            if requested == 0:
-                percent = 0.0
-            else:
-                percent = (fetched * 1.0 / requested) * 100
-
-            progress(percent,
-                     "Receiving objects: {:d}% ({:d}/{:d}) {}".format(int(percent), fetched,
-                                                                      requested, formatted_bytes))
-        else:
-            progress(100.0, "Writing Objects")
-
-    async_progress = None
-    if progress is not None:
-        async_progress = OSTree.AsyncProgress.new()
-        async_progress.connect('changed', progress_callback)
-
-    # FIXME: This hangs the process and ignores keyboard interrupt,
-    #        fix this using the Gio.Cancellable
-    refs = None
-    if ref is not None:
-        refs = [ref]
-
-    try:
-        repo.pull(remote,
-                  refs,
-                  OSTree.RepoPullFlags.MIRROR,
-                  async_progress,
-                  None)  # Gio.Cancellable
-    except GLib.GError as e:
-        if ref is not None:
-            raise OSTreeError("Failed to fetch ref '{}' from '{}': {}".format(ref, remote, e.message)) from e
-        raise OSTreeError("Failed to fetch from '{}': {}".format(remote, e.message)) from e
-
-
-# configure_remote():
-#
-# Ensures a remote is setup to a given url.
-#
-# Args:
-#    repo (OSTree.Repo): The repo
-#    remote (str): The name of the remote
-#    url (str): The url of the remote ostree repo
-#    key_url (str): The optional url of a GPG key (should be a local file)
-#
-def configure_remote(repo, remote, url, key_url=None):
-    # Add a remote OSTree repo. If no key is given, we disable gpg checking.
-    #
-    # cli exmaple:
-    #   wget https://sdk.gnome.org/keys/gnome-sdk.gpg
-    #   ostree --repo=repo --gpg-import=gnome-sdk.gpg remote add freedesktop https://sdk.gnome.org/repo
-    options = None  # or GLib.Variant of type a{sv}
-    if key_url is None:
-        vd = VariantDict.new()
-        vd.insert_value('gpg-verify', Variant.new_boolean(False))
-        options = vd.end()
-
-    try:
-        repo.remote_change(None,      # Optional OSTree.Sysroot
-                           OSTree.RepoRemoteChange.ADD_IF_NOT_EXISTS,
-                           remote,    # Remote name
-                           url,       # Remote url
-                           options,   # Remote options
-                           None)      # Optional Gio.Cancellable
-    except GLib.GError as e:
-        raise OSTreeError("Failed to configure remote '{}': {}".format(remote, e.message)) from e
-
-    # Remote needs to exist before adding key
-    if key_url is not None:
-        try:
-            gfile = Gio.File.new_for_uri(key_url)
-            stream = gfile.read()
-
-            # In ostree commit `v2019.2-10-gaa5df899`, the python
-            # facing API was changed by way of modifying the
-            # instrospection annotations.
-            #
-            # This means we need to call this API in two different
-            # ways depending on which ostree version is installed.
-            #
-            try:
-                # New API
-                repo.remote_gpg_import(remote, stream, None, None)
-            except TypeError:
-                # Old API
-                repo.remote_gpg_import(remote, stream, None, 0, None)
-
-        except GLib.GError as e:
-            raise OSTreeError("Failed to add gpg key from url '{}': {}".format(key_url, e.message)) from e
diff --git a/buildstream/_signals.py b/buildstream/_signals.py
index 293e3b5..5e0d568 100644
--- a/buildstream/_signals.py
+++ b/buildstream/_signals.py
@@ -70,8 +70,6 @@
 #
 @contextmanager
 def terminator(terminate_func):
-    global terminator_stack                   # pylint: disable=global-statement
-
     # Signal handling only works in the main thread
     if threading.current_thread() != threading.main_thread():
         yield
@@ -135,8 +133,6 @@
 #
 @contextmanager
 def suspendable(suspend_callback, resume_callback):
-    global suspendable_stack                  # pylint: disable=global-statement
-
     outermost = not suspendable_stack
     suspender = Suspender(suspend_callback, resume_callback)
     suspendable_stack.append(suspender)
diff --git a/buildstream/plugins/sources/ostree.py b/buildstream/plugins/sources/ostree.py
index 526a91a..abcb16a 100644
--- a/buildstream/plugins/sources/ostree.py
+++ b/buildstream/plugins/sources/ostree.py
@@ -54,9 +54,7 @@
 import shutil
 
 from buildstream import Source, SourceError, Consistency
-from buildstream import _ostree
 from buildstream import utils
-from buildstream._ostree import OSTreeError
 
 
 class OSTreeSource(Source):
@@ -66,6 +64,8 @@
 
         self.node_validate(node, ['url', 'ref', 'track', 'gpg-key'] + Source.COMMON_CONFIG_KEYS)
 
+        self.ostree = None
+
         self.original_url = self.node_get_member(node, str, 'url')
         self.url = self.translate_url(self.original_url)
         self.ref = self.node_get_member(node, str, 'ref', None)
@@ -90,7 +90,8 @@
         self.repo = None
 
     def preflight(self):
-        pass
+        # Check if ostree is installed, get the binary at the same time
+        self.ostree = utils.get_host_tool("ostree")
 
     def get_unique_key(self):
         return [self.original_url, self.ref]
@@ -105,53 +106,95 @@
         node['ref'] = self.ref = ref
 
     def track(self):
-        # If self.tracking is not specified its' not an error, just silently return
+        # If self.tracking is not specified it's not an error, just silently return
         if not self.tracking:
             return None
 
         self.ensure()
         remote_name = self.ensure_remote(self.url)
-        with self.timed_activity("Fetching tracking ref '{}' from origin: {}"
-                                 .format(self.tracking, self.url)):
-            try:
-                _ostree.fetch(self.repo, remote=remote_name, ref=self.tracking, progress=self.progress)
-            except OSTreeError as e:
-                raise SourceError("{}: Failed to fetch tracking ref '{}' from origin {}\n\n{}"
-                                  .format(self, self.tracking, self.url, e)) from e
+        with self.timed_activity(
+            "Fetching tracking ref '{}' from origin: {}".format(
+                self.tracking, self.url
+            )
+        ):
+            self.call(
+                [
+                    self.ostree,
+                    "pull",
+                    "--repo",
+                    self.mirror,
+                    "--mirror",
+                    remote_name,
+                    self.tracking,
+                ],
+                fail="Failed to fetch tracking ref '{}' from origin {}".format(
+                    self.tracking, self.url
+                ),
+            )
 
-        return _ostree.checksum(self.repo, self.tracking)
+        return self.check_output(
+            [self.ostree, "rev-parse", "--repo", self.mirror, self.tracking],
+            fail="Failed to compute checksum of '{}' on '{}'".format(
+                self.tracking, self.mirror
+            ),
+        )[1]
+
 
     def fetch(self):
         self.ensure()
+
         remote_name = self.ensure_remote(self.url)
-        if not _ostree.exists(self.repo, self.ref):
-            with self.timed_activity("Fetching remote ref: {} from origin: {}"
-                                     .format(self.ref, self.url)):
-                try:
-                    _ostree.fetch(self.repo, remote=remote_name, ref=self.ref, progress=self.progress)
-                except OSTreeError as e:
-                    raise SourceError("{}: Failed to fetch ref '{}' from origin: {}\n\n{}"
-                                      .format(self, self.ref, self.url, e)) from e
+        with self.timed_activity(
+            "Fetching remote ref: {} from origin: {}".format(
+                self.ref, self.url
+            )
+        ):
+            self.call(
+                [
+                    self.ostree,
+                    "pull",
+                    "--repo",
+                    self.mirror,
+                    "--mirror",
+                    remote_name,
+                    self.ref,
+                ],
+                fail="Failed to fetch ref '{}' from origin: {}".format(
+                    self.ref, remote_name
+                ),
+            )
+
 
     def stage(self, directory):
+
         self.ensure()
 
         # Checkout self.ref into the specified directory
         with self.tempdir() as tmpdir:
-            checkoutdir = os.path.join(tmpdir, 'checkout')
+            checkoutdir = os.path.join(tmpdir, "checkout")
 
-            with self.timed_activity("Staging ref: {} from origin: {}"
-                                     .format(self.ref, self.url)):
-                try:
-                    _ostree.checkout(self.repo, checkoutdir, self.ref, user=True)
-                except OSTreeError as e:
-                    raise SourceError("{}: Failed to checkout ref '{}' from origin: {}\n\n{}"
-                                      .format(self, self.ref, self.url, e)) from e
+            with self.timed_activity(
+                "Staging ref: {} from origin: {}".format(self.ref, self.url)
+            ):
+                self.call(
+                    [
+                        self.ostree,
+                        "checkout",
+                        "--repo",
+                        self.mirror,
+                        "--user-mode",
+                        self.ref,
+                        checkoutdir,
+                    ],
+                    fail="Failed to checkout ref '{}' from origin: {}".format(
+                        self.ref, self.url
+                    ),
+                )
 
             # The target directory is guaranteed to exist, here we must move the
             # content of out checkout into the existing target directory.
             #
-            # We may not be able to create the target directory as it's parent
+            # We may not be able to create the target directory as its parent
             # may be readonly, and the directory itself is often a mount point.
             #
             try:
@@ -159,45 +202,79 @@
                     source_path = os.path.join(checkoutdir, entry)
                     shutil.move(source_path, directory)
             except (shutil.Error, OSError) as e:
-                raise SourceError("{}: Failed to move ostree checkout {} from '{}' to '{}'\n\n{}"
-                                  .format(self, self.url, tmpdir, directory, e)) from e
+                raise SourceError(
+                    "{}: Failed to move ostree checkout {} from '{}' to '{}'\n\n{}".format(
+                        self, self.url, tmpdir, directory, e
+                    )
+                ) from e
+
 
     def get_consistency(self):
         if self.ref is None:
             return Consistency.INCONSISTENT
+        elif os.path.exists(self.mirror):
+            if self.call([self.ostree, "show", "--repo", self.mirror, self.ref]) == 0:
+                return Consistency.CACHED
 
-        self.ensure()
-        if _ostree.exists(self.repo, self.ref):
-            return Consistency.CACHED
         return Consistency.RESOLVED
 
     #
     # Local helpers
     #
     def ensure(self):
-        if not self.repo:
+        if not os.path.exists(self.mirror):
             self.status("Creating local mirror for {}".format(self.url))
+            self.call(
+                [
+                    self.ostree,
+                    "init",
+                    "--repo",
+                    self.mirror,
+                    "--mode",
+                    "archive-z2",
+                ],
+                fail="Unable to create local mirror for repository",
+            )
+            self.call(
+                [
+                    self.ostree,
+                    "config",
+                    "--repo",
+                    self.mirror,
+                    "set",
+                    "core.min-free-space-percent",
+                    "0",
+                ],
+                fail="Unable to disable minimum disk space checks",
+            )
 
-            self.repo = _ostree.ensure(self.mirror, True)
 
     def ensure_remote(self, url):
         if self.original_url == self.url:
-            remote_name = 'origin'
+            remote_name = "origin"
         else:
             remote_name = utils.url_directory_name(url)
 
-        gpg_key = None
-        if self.gpg_key_path:
-            gpg_key = 'file://' + self.gpg_key_path
+        command = [
+            self.ostree,
+            "remote",
+            "add",
+            "--if-not-exists",
+            "--repo",
+            self.mirror,
+            remote_name,
+            url,
+        ]
 
-        try:
-            _ostree.configure_remote(self.repo, remote_name, url, key_url=gpg_key)
-        except OSTreeError as e:
-            raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
+        if self.gpg_key_path:
+            command.extend(["--gpg-import", self.gpg_key_path])
+        else:
+            command.extend(["--no-gpg-verify"])
+
+        self.call(command, fail="Failed to configure origin {}".format(url))
+
         return remote_name
 
-    def progress(self, percent, message):
-        self.status(message)
 
 
 # Plugin entry point
diff --git a/buildstream/plugins/sources/pip.py b/buildstream/plugins/sources/pip.py
index 6824bd3..6ea8476 100644
--- a/buildstream/plugins/sources/pip.py
+++ b/buildstream/plugins/sources/pip.py
@@ -92,6 +92,7 @@
     'python3.7',
     'python3.8',
     'python3.9',
+    'python3.10',
 ]
 
 # List of allowed extensions taken from
diff --git a/buildstream/sandbox/_sandboxbwrap.py b/buildstream/sandbox/_sandboxbwrap.py
index df33442..2bcb80b 100644
--- a/buildstream/sandbox/_sandboxbwrap.py
+++ b/buildstream/sandbox/_sandboxbwrap.py
@@ -221,7 +221,7 @@
         # there just in case so that we can safely cleanup the debris.
         #
         existing_basedirs = {
-            directory: os.path.exists(os.path.join(root_directory, directory))
+            directory: os.path.lexists(os.path.join(root_directory, directory))
             for directory in ['dev/shm', 'tmp', 'dev', 'proc']
         }
 
diff --git a/doc/source/install_linux_distro.rst b/doc/source/install_linux_distro.rst
index b30ab5a..f27aab5 100644
--- a/doc/source/install_linux_distro.rst
+++ b/doc/source/install_linux_distro.rst
@@ -44,7 +44,6 @@
 * libostree >= v2017.8 with introspection data
 * bubblewrap >= 0.1.2
 * fuse2
-* PyGObject introspection bindings
 * psutil python library (so you don't have to install GCC and python-devel to build it yourself)
 
 BuildStream also depends on the host tools for the :mod:`Source <buildstream.source>` plugins.
diff --git a/requirements/cov-requirements.txt b/requirements/cov-requirements.txt
index 42fa92f..e52fbdf 100644
--- a/requirements/cov-requirements.txt
+++ b/requirements/cov-requirements.txt
@@ -1,11 +1,11 @@
 coverage==4.5.4
 pytest-cov==2.10.1
 ## The following requirements were added by pip freeze:
-attrs==21.2.0
+attrs==21.4.0
 iniconfig==1.1.1
-packaging==21.0
-pluggy==0.13.1
-py==1.10.0
-pyparsing==2.4.7
-pytest==6.2.4
-toml==0.10.2
+packaging==21.3
+pluggy==1.0.0
+py==1.11.0
+pyparsing==3.0.7
+pytest==7.0.1
+tomli==2.0.1
diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt
index 8ab8873..109a5d2 100644
--- a/requirements/dev-requirements.txt
+++ b/requirements/dev-requirements.txt
@@ -1,24 +1,26 @@
 pep8==1.7.1
-pylint==2.10.2
-pytest==6.2.4
+pylint==2.12.2
+pytest==7.0.1
 pytest-datafiles==2.0
 pytest-env==0.6.2
-pytest-xdist==2.3.0
-pytest-timeout==1.4.2
+pytest-xdist==2.5.0
+pytest-timeout==2.1.0
 pyftpdlib==1.5.6
 ## The following requirements were added by pip freeze:
-astroid==2.7.2
-attrs==21.2.0
+astroid==2.9.3
+attrs==21.4.0
 execnet==1.9.0
 iniconfig==1.1.1
-isort==5.9.3
-lazy-object-proxy==1.6.0
+isort==5.10.1
+lazy-object-proxy==1.7.1
 mccabe==0.6.1
-packaging==21.0
-platformdirs==2.2.0
-pluggy==0.13.1
-py==1.10.0
-pyparsing==2.4.7
-pytest-forked==1.3.0
+packaging==21.3
+platformdirs==2.5.0
+pluggy==1.0.0
+py==1.11.0
+pyparsing==3.0.7
+pytest-forked==1.4.0
 toml==0.10.2
-wrapt==1.12.1
+tomli==2.0.1
+typing_extensions==4.1.1
+wrapt==1.13.3
diff --git a/requirements/plugin-requirements.in b/requirements/plugin-requirements.in
index e352728..a30105a 100644
--- a/requirements/plugin-requirements.in
+++ b/requirements/plugin-requirements.in
@@ -1,2 +1 @@
 arpy
-PyGObject
diff --git a/requirements/plugin-requirements.txt b/requirements/plugin-requirements.txt
index 43d0963..0ab7e2b 100644
--- a/requirements/plugin-requirements.txt
+++ b/requirements/plugin-requirements.txt
@@ -1,4 +1,2 @@
 arpy==2.2.0
-PyGObject==3.40.1
 ## The following requirements were added by pip freeze:
-pycairo==1.20.1
diff --git a/requirements/requirements.in b/requirements/requirements.in
index 14569a3..17a1cf5 100644
--- a/requirements/requirements.in
+++ b/requirements/requirements.in
@@ -4,6 +4,6 @@
 pluginbase
 protobuf >= 3.6
 psutil
-ruamel.yaml >= 0.16
+ruamel.yaml < 0.17
 setuptools
 ujson
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index b5de6e9..030c57b 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -1,12 +1,12 @@
-click==8.0.1
-grpcio==1.39.0
-Jinja2==3.0.1
+click==8.0.3
+grpcio==1.43.0
+Jinja2==3.0.3
 pluginbase==1.0.1
-protobuf==3.17.3
-psutil==5.8.0
-ruamel.yaml==0.17.13
-setuptools==44.1.1
-ujson==4.1.0
+protobuf==3.19.4
+psutil==5.9.0
+ruamel.yaml==0.16.13
+setuptools==59.6.0
+ujson==5.1.0
 ## The following requirements were added by pip freeze:
 MarkupSafe==2.0.1
 ruamel.yaml.clib==0.2.6
diff --git a/tox.ini b/tox.ini
index a342353..5351aa5 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,7 @@
 # Tox global configuration
 #
 [tox]
-envlist = py36-nocover,py37-nocover,py38-nocover,py39-nocover
+envlist = py36-nocover,py37-nocover,py38-nocover,py39-nocover,py310-nocover
 skip_missing_interpreters = true
 
 #
@@ -13,16 +13,16 @@
 [testenv]
 commands =
     # Running with coverage reporting enabled
-    py{36,37,38,39}-!nocover: pytest --basetemp {envtmpdir} --cov=buildstream --cov-config .coveragerc {posargs}
-    py{36,37,38,39}-!nocover: mkdir -p .coverage-reports
-    py{36,37,38,39}-!nocover: mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
+    py{36,37,38,39,310}-!nocover: pytest --basetemp {envtmpdir} --cov=buildstream --cov-config .coveragerc {posargs}
+    py{36,37,38,39,310}-!nocover: mkdir -p .coverage-reports
+    py{36,37,38,39,310}-!nocover: mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
 
     # Running with coverage reporting disabled
-    py{36,37,38,39}-nocover: pytest --basetemp {envtmpdir} {posargs}
+    py{36,37,38,39,310}-nocover: pytest --basetemp {envtmpdir} {posargs}
 deps =
-    py{36,37,38,39}: -rrequirements/requirements.txt
-    py{36,37,38,39}: -rrequirements/dev-requirements.txt
-    py{36,37,38,39}: -rrequirements/plugin-requirements.txt
+    py{36,37,38,39,310}: -rrequirements/requirements.txt
+    py{36,37,38,39,310}: -rrequirements/dev-requirements.txt
+    py{36,37,38,39,310}: -rrequirements/plugin-requirements.txt
 
     # Only require coverage and pytest-cov when using it
     !nocover: -rrequirements/cov-requirements.txt
@@ -35,9 +35,9 @@
 # These keys are not inherited by any other sections
 #
 setenv =
-    py{36,37,38,39}: COVERAGE_FILE = {envtmpdir}/.coverage
+    py{36,37,38,39,310}: COVERAGE_FILE = {envtmpdir}/.coverage
 whitelist_externals =
-    py{36,37,38,39}:
+    py{36,37,38,39,310}:
         mv
         mkdir