Merge pull request #1707 from apache/tristan/full-build-tree

Cache full build tree for debugging failed builds
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 019ca1f..2ff345e 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -52,7 +52,7 @@
 #
 class Artifact:
 
-    version = 1
+    version = 2
 
     def __init__(self, element, context, *, strong_key=None, strict_key=None, weak_key=None):
         self._element = element
@@ -127,6 +127,18 @@
         files_digest = self._get_field_digest("files")
         return CasBasedDirectory(self._cas, digest=files_digest)
 
+    # get_buildroot():
+    #
+    # Get a virtual directory for the artifact buildroot content
+    #
+    # Returns:
+    #    (Directory): The virtual directory object
+    #
+    def get_buildroot(self):
+        buildroot_digest = self._get_field_digest("buildroot")
+
+        return CasBasedDirectory(self._cas, digest=buildroot_digest)
+
     # get_buildtree():
     #
     # Get a virtual directory for the artifact buildtree content
@@ -182,6 +194,7 @@
     # Create the artifact and commit to cache
     #
     # Args:
+    #    buildrootvdir (Directory): The root directory of the build sandbox
     #    sandbox_build_dir (Directory): Virtual Directory object for the sandbox build-root
     #    collectvdir (Directory): Virtual Directoy object from within the sandbox for collection
     #    sourcesvdir (Directory): Virtual Directoy object for the staged sources
@@ -191,12 +204,10 @@
     #    environment (dict): dict of the element's environment variables
     #    sandboxconfig (SandboxConfig): The element's SandboxConfig
     #
-    # Returns:
-    #    (int): The size of the newly cached artifact
-    #
     def cache(
         self,
         *,
+        buildrootvdir,
         sandbox_build_dir,
         collectvdir,
         sourcesvdir,
@@ -284,7 +295,6 @@
             assert len(files_to_capture) == len(digests)
             for entry, digest in zip(files_to_capture, digests):
                 entry[1].CopyFrom(digest)
-                size += digest.size_bytes
 
         # store build dependencies
         for e in element._dependencies(_Scope.BUILD):
@@ -299,12 +309,16 @@
             buildtreevdir = CasBasedDirectory(cas_cache=self._cas)
             buildtreevdir._import_files_internal(sandbox_build_dir, properties=properties, collect_result=False)
             artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
-            size += buildtreevdir._get_size()
 
         # Store sources
         if sourcesvdir is not None:
             artifact.sources.CopyFrom(sourcesvdir._get_digest())
-            size += sourcesvdir._get_size()
+
+        # Store build root
+        if buildrootvdir is not None:
+            rootvdir = CasBasedDirectory(cas_cache=self._cas)
+            rootvdir._import_files_internal(buildrootvdir, properties=properties, collect_result=False)
+            artifact.buildroot.CopyFrom(rootvdir._get_digest())
 
         os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True)
         keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
@@ -313,7 +327,37 @@
             with utils.save_file_atomic(path, mode="wb") as f:
                 f.write(artifact.SerializeToString())
 
-        return size
+    # cached_buildroot()
+    #
+    # Check if artifact is cached with expected buildroot. A
+    # buildroot will not be present if the rest of the partial artifact
+    # is not cached.
+    #
+    # Returns:
+    #     (bool): True if artifact cached with buildroot, False if
+    #             missing expected buildroot. Note this only confirms
+    #             if a buildroot is present, not its contents.
+    #
+    def cached_buildroot(self):
+
+        buildroot_digest = self._get_field_digest("buildroot")
+        if buildroot_digest:
+            return self._cas.contains_directory(buildroot_digest, with_files=True)
+        else:
+            return False
+
+    # buildroot_exists()
+    #
+    # Check if artifact was created with a buildroot. This does not check
+    # whether the buildroot is present in the local cache.
+    #
+    # Returns:
+    #     (bool): True if artifact was created with buildroot
+    #
+    def buildroot_exists(self):
+
+        artifact = self._get_proto()
+        return bool(str(artifact.buildroot))
 
     # cached_buildtree()
     #
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index ba0261f..9c4b003 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -395,6 +395,12 @@
                 except FileNotFoundError:
                     pass
 
+            if str(artifact_proto.buildroot):
+                try:
+                    self.cas._send_directory(remote, artifact_proto.buildroot)
+                except FileNotFoundError:
+                    pass
+
             digests = [artifact_digest, artifact_proto.low_diversity_meta, artifact_proto.high_diversity_meta]
 
             if str(artifact_proto.public_data):
@@ -451,6 +457,8 @@
             referenced_directories.append(artifact_proto.buildtree)
         if artifact_proto.sources:
             referenced_directories.append(artifact_proto.sources)
+        if artifact_proto.buildroot:
+            referenced_directories.append(artifact_proto.buildroot)
 
         referenced_blobs = [artifact_proto.low_diversity_meta, artifact_proto.high_diversity_meta] + [
             log_file.digest for log_file in artifact_proto.logs
@@ -504,8 +512,11 @@
             if str(artifact.files):
                 self.cas._fetch_directory(remote, artifact.files)
 
-            if pull_buildtrees and str(artifact.buildtree):
-                self.cas._fetch_directory(remote, artifact.buildtree)
+            if pull_buildtrees:
+                if str(artifact.buildtree):
+                    self.cas._fetch_directory(remote, artifact.buildtree)
+                if str(artifact.buildroot):
+                    self.cas._fetch_directory(remote, artifact.buildroot)
 
             digests = [artifact.low_diversity_meta, artifact.high_diversity_meta]
             if str(artifact.public_data):
diff --git a/src/buildstream/_protos/buildstream/v2/artifact.proto b/src/buildstream/_protos/buildstream/v2/artifact.proto
index 2f489f2..08a456a 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact.proto
+++ b/src/buildstream/_protos/buildstream/v2/artifact.proto
@@ -83,4 +83,7 @@
 
   // Strict key is a later addition to the core metadata
   string strict_key = 16;
+
+  // digest of a directory
+  build.bazel.remote.execution.v2.Digest buildroot = 17;  // optional
 }
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
index b0264f2..f4883a1 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
@@ -16,7 +16,7 @@
 from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
 
 
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\xcd\x06\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x38\n\x07sources\x18\r \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x43\n\x12low_diversity_meta\x18\x0e \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\x13high_diversity_meta\x18\x0f \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstrict_key\x18\x10 \x01(\t\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digestb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\x89\x07\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x38\n\x07sources\x18\r \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x43\n\x12low_diversity_meta\x18\x0e \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\x13high_diversity_meta\x18\x0f \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstrict_key\x18\x10 \x01(\t\x12:\n\tbuildroot\x18\x11 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digestb\x06proto3')
 
 
 
@@ -50,9 +50,9 @@
 
   DESCRIPTOR._options = None
   _ARTIFACT._serialized_start=136
-  _ARTIFACT._serialized_end=981
-  _ARTIFACT_DEPENDENCY._serialized_start=800
-  _ARTIFACT_DEPENDENCY._serialized_end=899
-  _ARTIFACT_LOGFILE._serialized_start=901
-  _ARTIFACT_LOGFILE._serialized_end=981
+  _ARTIFACT._serialized_end=1041
+  _ARTIFACT_DEPENDENCY._serialized_start=860
+  _ARTIFACT_DEPENDENCY._serialized_end=959
+  _ARTIFACT_LOGFILE._serialized_start=961
+  _ARTIFACT_LOGFILE._serialized_end=1041
 # @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_scheduler/queues/buildqueue.py b/src/buildstream/_scheduler/queues/buildqueue.py
index 5a6ca75..6a05594 100644
--- a/src/buildstream/_scheduler/queues/buildqueue.py
+++ b/src/buildstream/_scheduler/queues/buildqueue.py
@@ -54,4 +54,4 @@
 
     @staticmethod
     def _assemble_element(element):
-        return element._assemble()
+        element._assemble()
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 3ee2468..1ae6049 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -278,7 +278,12 @@
         if unique_id and target is None:
             element = Plugin._lookup(unique_id)
         else:
-            selection = _PipelineSelection.BUILD if scope == _Scope.BUILD else _PipelineSelection.RUN
+            if usebuildtree:
+                selection = _PipelineSelection.NONE
+            elif scope == _Scope.BUILD:
+                selection = _PipelineSelection.BUILD
+            else:
+                selection = _PipelineSelection.RUN
 
             elements = self.load_selection(
                 (target,),
@@ -308,21 +313,23 @@
             self.query_cache(pull_elements)
             self._pull_missing_artifacts(pull_elements)
 
-        missing_deps = [dep for dep in _pipeline.dependencies([element], scope) if not dep._cached()]
-        if missing_deps:
-            raise StreamError(
-                "Elements need to be built or downloaded before staging a shell environment",
-                detail="\n".join(list(map(lambda x: x._get_full_name(), missing_deps))),
-                reason="shell-missing-deps",
-            )
+        # We dont need dependency artifacts to shell into a cached build tree
+        if not usebuildtree:
+            missing_deps = [dep for dep in _pipeline.dependencies([element], scope) if not dep._cached()]
+            if missing_deps:
+                raise StreamError(
+                    "Elements need to be built or downloaded before staging a shell environment",
+                    detail="\n".join(list(map(lambda x: x._get_full_name(), missing_deps))),
+                    reason="shell-missing-deps",
+                )
 
         # Check if we require a pull queue attempt, with given artifact state and context
         if usebuildtree:
-            if not element._cached_buildtree():
+            if not element._cached_buildroot():
                 if not element._cached():
                     message = "Artifact not cached locally or in available remotes"
                     reason = "missing-buildtree-artifact-not-cached"
-                elif element._buildtree_exists():
+                elif element._buildroot_exists():
                     message = "Buildtree is not cached locally or in available remotes"
                     reason = "missing-buildtree-artifact-buildtree-not-cached"
                 else:
@@ -1961,31 +1968,6 @@
 
         return os.path.join(directory, *reversed(parts))
 
-    # _buildtree_pull_required()
-    #
-    # Check if current task, given config, requires element buildtree artifact
-    #
-    # Args:
-    #    elements (list): elements to check if buildtrees are required
-    #
-    # Returns:
-    #    (list): elements requiring buildtrees
-    #
-    def _buildtree_pull_required(self, elements):
-        required_list = []
-
-        # If context is set to not pull buildtrees, or no fetch remotes, return empty list
-        if not self._context.pull_buildtrees or not self._artifacts.has_fetch_remotes():
-            return required_list
-
-        for element in elements:
-            # Check if element is partially cached without its buildtree, as the element
-            # artifact may not be cached at all
-            if element._cached() and not element._cached_buildtree() and element._buildtree_exists():
-                required_list.append(element)
-
-        return required_list
-
     # _expand_and_classify_targets()
     #
     # Takes the user provided targets, expand any glob patterns, and
diff --git a/src/buildstream/_versions.py b/src/buildstream/_versions.py
index 733df13..e96516a 100644
--- a/src/buildstream/_versions.py
+++ b/src/buildstream/_versions.py
@@ -23,4 +23,4 @@
 # or if buildstream was changed in a way which can cause
 # the same cache key to produce something that is no longer
 # the same.
-BST_CORE_ARTIFACT_VERSION = 10
+BST_CORE_ARTIFACT_VERSION = 11
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 8f47a5b..a9c4e42 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -1411,13 +1411,17 @@
 
         # bst shell and bst artifact checkout require a local sandbox.
         with self.__sandbox(None, config=self.__sandbox_config, allow_remote=False) as sandbox:
-            sandbox._usebuildtree = usebuildtree
 
             # Configure always comes first, and we need it.
             self.__configure_sandbox(sandbox)
 
-            # Stage what we need
-            if shell and scope == _Scope.BUILD:
+            if usebuildtree:
+                # Use the cached buildroot directly
+                buildrootvdir = self.__artifact.get_buildroot()
+                sandbox_vroot = sandbox.get_virtual_directory()
+                sandbox_vroot._import_files_internal(buildrootvdir, collect_result=False)
+            elif shell and scope == _Scope.BUILD:
+                # Stage what we need
                 self.__stage(sandbox)
             else:
                 # Stage deps in the sandbox root
@@ -1446,7 +1450,7 @@
         # Stage all sources that need to be copied
         sandbox_vroot = sandbox.get_virtual_directory()
         host_vdirectory = sandbox_vroot.open_directory(directory.lstrip(os.sep), create=True)
-        self._stage_sources_at(host_vdirectory, usebuildtree=sandbox._usebuildtree)
+        self._stage_sources_at(host_vdirectory)
 
     # _stage_sources_at():
     #
@@ -1454,9 +1458,8 @@
     #
     # Args:
     #     vdirectory (Union[str, Directory]): A virtual directory object or local path to stage sources to.
-    #     usebuildtree (bool): use a the elements build tree as its source.
     #
-    def _stage_sources_at(self, vdirectory, usebuildtree=False):
+    def _stage_sources_at(self, vdirectory):
 
         # It's advantageous to have this temporary directory on
         # the same file system as the rest of our cache.
@@ -1467,26 +1470,18 @@
             if vdirectory:
                 raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
 
-            # Check if we have a cached buildtree to use
-            if usebuildtree:
-                import_dir = self.__artifact.get_buildtree()
-                if not import_dir:
-                    detail = "Element type either does not expect a buildtree or it was explictily cached without one."
-                    self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
+            # stage sources from source cache
+            staged_sources = self.__sources.get_files()
 
-            # No cached buildtree, stage source from source cache
+            # incremental builds should merge the source into the last artifact before staging
+            last_build_artifact = self.__get_last_build_artifact()
+            if last_build_artifact:
+                self.info("Incremental build")
+                last_sources = last_build_artifact.get_sources()
+                import_dir = last_build_artifact.get_buildtree()
+                import_dir._apply_changes(last_sources, staged_sources)
             else:
-                staged_sources = self.__sources.get_files()
-
-                # incremental builds should merge the source into the last artifact before staging
-                last_build_artifact = self.__get_last_build_artifact()
-                if last_build_artifact:
-                    self.info("Incremental build")
-                    last_sources = last_build_artifact.get_sources()
-                    import_dir = last_build_artifact.get_buildtree()
-                    import_dir._apply_changes(last_sources, staged_sources)
-                else:
-                    import_dir = staged_sources
+                import_dir = staged_sources
 
             # Set update_mtime to ensure deterministic mtime of sources at build time
             vdirectory._import_files_internal(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP, collect_result=False)
@@ -1639,9 +1634,6 @@
     #   - Call the public abstract methods for the build phase
     #   - Cache the resulting artifact
     #
-    # Returns:
-    #    (int): The size of the newly cached artifact
-    #
     def _assemble(self):
 
         # Only do this the first time around (i.e. __assemble_done is False)
@@ -1708,7 +1700,7 @@
 
                     raise
                 else:
-                    return self._cache_artifact(sandbox, collect)
+                    self._cache_artifact(sandbox, collect)
 
     def _cache_artifact(self, sandbox, collect):
 
@@ -1719,6 +1711,7 @@
         collectvdir = None
         sandbox_build_dir = None
         sourcesvdir = None
+        buildrootvdir = None
 
         cache_buildtrees = context.cache_buildtrees
         build_success = buildresult[0]
@@ -1741,6 +1734,7 @@
                 # if the directory could not be found.
                 pass
 
+            buildrootvdir = sandbox_vroot
             sourcesvdir = self.__sources.get_files()
 
         if collect is not None:
@@ -1755,7 +1749,8 @@
         assert self.__artifact._cache_key is not None
 
         with self.timed_activity("Caching artifact"):
-            artifact_size = self.__artifact.cache(
+            self.__artifact.cache(
+                buildrootvdir=buildrootvdir,
                 sandbox_build_dir=sandbox_build_dir,
                 collectvdir=collectvdir,
                 sourcesvdir=sourcesvdir,
@@ -1772,8 +1767,6 @@
                 "unable to collect artifact contents".format(collect)
             )
 
-        return artifact_size
-
     # _fetch_done()
     #
     # Indicates that fetching the sources for this element has been done.
@@ -1947,6 +1940,8 @@
                 return True
             if not self._cached_buildtree() and self._buildtree_exists():
                 return True
+            if not self._cached_buildroot() and self._buildroot_exists():
+                return True
 
         return False
 
@@ -1967,6 +1962,9 @@
         if not self._cached_buildtree() and self._buildtree_exists():
             raise ElementError("Push failed: buildtree of {} is not cached".format(self.name))
 
+        if not self._cached_buildroot() and self._buildroot_exists():
+            raise ElementError("Push failed: buildroot of {} is not cached".format(self.name))
+
         if self.__get_tainted():
             self.warn("Not pushing tainted artifact.")
             return False
@@ -2153,6 +2151,39 @@
 
         return self.__artifact.buildtree_exists()
 
+    # _cached_buildroot()
+    #
+    # Check if element artifact contains expected buildroot. An
+    # element's buildroot artifact will not be present if the rest
+    # of the partial artifact is not cached.
+    #
+    # Returns:
+    #     (bool): True if artifact cached with buildroot, False if
+    #             element not cached or missing expected buildroot.
+    #             Note this only confirms if a buildroot is present,
+    #             not its contents.
+    #
+    def _cached_buildroot(self):
+        if not self._cached():
+            return False
+
+        return self.__artifact.cached_buildroot()
+
+    # _buildroot_exists()
+    #
+    # Check if artifact was created with a buildroot. This does not check
+    # whether the buildroot is present in the local cache.
+    #
+    # Returns:
+    #     (bool): True if artifact was created with buildroot, False if
+    #             element not cached or not created with a buildroot.
+    #
+    def _buildroot_exists(self):
+        if not self._cached():
+            return False
+
+        return self.__artifact.buildroot_exists()
+
     # _cached_logs()
     #
     # Check if the artifact is cached with log files.
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index e30414b..47d841f 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -95,7 +95,6 @@
         self.__stderr = kwargs["stderr"]
 
         self._vdir = None  # type: Optional[Directory]
-        self._usebuildtree = False
 
         # Pending command batch
         self.__batch = None
diff --git a/tests/cachekey/project/elements/build1.expected b/tests/cachekey/project/elements/build1.expected
index 98311d4..a1d899f 100644
--- a/tests/cachekey/project/elements/build1.expected
+++ b/tests/cachekey/project/elements/build1.expected
@@ -1 +1 @@
-7f00e0e1d2f7f154895c0392816750984ddcbcde290a87df475a3ff1ef151308
\ No newline at end of file
+95cc8ee040690b8abbacb201c8f0d9eeab7d1376db473d62799b5b5c36c71b47
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/build2.expected b/tests/cachekey/project/elements/build2.expected
index f07a044..224e0bf 100644
--- a/tests/cachekey/project/elements/build2.expected
+++ b/tests/cachekey/project/elements/build2.expected
@@ -1 +1 @@
-11578c1823d8fad0143bb1afd195a6dbc5b36a11834ab04d1babe7d5750b5341
\ No newline at end of file
+dccab3eb3e5a9b5ef3a29f409cb0235ee4c393edfda1407114aa7784c35ed588
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/build3.expected b/tests/cachekey/project/elements/build3.expected
index 2edc024..95222af 100644
--- a/tests/cachekey/project/elements/build3.expected
+++ b/tests/cachekey/project/elements/build3.expected
@@ -1 +1 @@
-bf6067aaba914c449080a8e477d8b206dd3a756085874be4ec0d902850d3f0a1
\ No newline at end of file
+492953d9bdef66486c5111caf48f015690848e46d7f8de0f71bde386b68d4e5e
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose1.expected b/tests/cachekey/project/elements/compose1.expected
index 8f769e5..845cb64 100644
--- a/tests/cachekey/project/elements/compose1.expected
+++ b/tests/cachekey/project/elements/compose1.expected
@@ -1 +1 @@
-79741eae7ac50457a67f06d2913f549080739dada5033b3015fb68b494536c5a
\ No newline at end of file
+7cca3e9df846040a0e1bd034d61e667ff5552f594377152cdb87ca7eb1ecf624
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose2.expected b/tests/cachekey/project/elements/compose2.expected
index 1d1db39..1761103 100644
--- a/tests/cachekey/project/elements/compose2.expected
+++ b/tests/cachekey/project/elements/compose2.expected
@@ -1 +1 @@
-329f4ff2c3855f541ff2f85d0ba5ad4a87c73610914f84483c8462b095a8007e
\ No newline at end of file
+76d17f849d551f448fca92c23fb3e58d00f8b338507684d3e19ff2b7eed973ae
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose3.expected b/tests/cachekey/project/elements/compose3.expected
index d56d567..d7bb403 100644
--- a/tests/cachekey/project/elements/compose3.expected
+++ b/tests/cachekey/project/elements/compose3.expected
@@ -1 +1 @@
-db8429d5f2a87335cfb3ca005b13a12267b3ba4ef369c72ee2d6dd237c4cf12a
\ No newline at end of file
+b69f69de6220a40105231f085db41b3ec6369a01cb5bb1087e3a92adbe883079
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose4.expected b/tests/cachekey/project/elements/compose4.expected
index f7c85c1..34a64cc 100644
--- a/tests/cachekey/project/elements/compose4.expected
+++ b/tests/cachekey/project/elements/compose4.expected
@@ -1 +1 @@
-fff3c6f147a40ba9b47a4f6b39fc307934db891659d3d06258ab4b2458a5650f
\ No newline at end of file
+738683bb7a8f9b879ba2ac447b94d7a05de6e8714e4d730117ebef85526c63de
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose5.expected b/tests/cachekey/project/elements/compose5.expected
index 57ddda2..d1d8996 100644
--- a/tests/cachekey/project/elements/compose5.expected
+++ b/tests/cachekey/project/elements/compose5.expected
@@ -1 +1 @@
-7040e4b5a4b82f2c4c511e62a125239dbb07ac171673ba167ab9e39d4e491763
\ No newline at end of file
+3627e625bea80e5c66b20b3301138eacf942475336b53da180ed8036aee7cb46
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/import1.expected b/tests/cachekey/project/elements/import1.expected
index a3133cc..16d3fdf 100644
--- a/tests/cachekey/project/elements/import1.expected
+++ b/tests/cachekey/project/elements/import1.expected
@@ -1 +1 @@
-3b62fa381e9183353cac12f8e6ced23ea42931ed4f89ffe24625b6ec9f236ab5
\ No newline at end of file
+abcca93a8c5d9f483e91e2abe97477ecc161d6cb9a2c4616852a6db05e8d3295
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/import2.expected b/tests/cachekey/project/elements/import2.expected
index 4d32ff0..bd955b3 100644
--- a/tests/cachekey/project/elements/import2.expected
+++ b/tests/cachekey/project/elements/import2.expected
@@ -1 +1 @@
-abeff9a432127674cff36acb36f45cd2665eebf75b589693db6676885e37c59c
\ No newline at end of file
+857ecf27a54c23b557e4ac4fe35f8e651a41de159a507393538de11b28aeb4bc
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/import3.expected b/tests/cachekey/project/elements/import3.expected
index 0fc1c1f..7124fdb 100644
--- a/tests/cachekey/project/elements/import3.expected
+++ b/tests/cachekey/project/elements/import3.expected
@@ -1 +1 @@
-f2c5ae1c73a0d8abfbffb36b09bd74929e9b680bdb548f5faa1213a4c0423fe8
\ No newline at end of file
+65d86ffc69220b5f9de7670794bdc2bb7f7979596b029030c319f82c6be22008
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/script1.expected b/tests/cachekey/project/elements/script1.expected
index 2eeb3e0..b5a65b4 100644
--- a/tests/cachekey/project/elements/script1.expected
+++ b/tests/cachekey/project/elements/script1.expected
@@ -1 +1 @@
-eb81c0dfd2432b1ebf0bb052d6fa3a529438614c00210cfdc276bbcc2f3045d5
\ No newline at end of file
+9fe8b7edde3d883884a93f23673fc5e8271f1607ed8ba6ef0491057f2ec94cf4
\ No newline at end of file
diff --git a/tests/cachekey/project/elements/variables1.expected b/tests/cachekey/project/elements/variables1.expected
index 823d4d8..50cca2e 100644
--- a/tests/cachekey/project/elements/variables1.expected
+++ b/tests/cachekey/project/elements/variables1.expected
@@ -1 +1 @@
-1905948eb76ebc8ff077a8cd1171e5763beca75eabc64981941e297c31f586f6
\ No newline at end of file
+015ec2ae48b66178540db8983a93f536b831b53ff9c4f6fc70c0517b11031170
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/local1.expected b/tests/cachekey/project/sources/local1.expected
index 44f8f99..9ce54de 100644
--- a/tests/cachekey/project/sources/local1.expected
+++ b/tests/cachekey/project/sources/local1.expected
@@ -1 +1 @@
-8f60074539dab1c95cca763c690b2afb8d4edc07810765d6439a9024330d9178
\ No newline at end of file
+526e79c6485baaaa037078e9ae916b0a27ea29571bc47733eff4ab344a98a994
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/local2.expected b/tests/cachekey/project/sources/local2.expected
index 14051f1..0280257 100644
--- a/tests/cachekey/project/sources/local2.expected
+++ b/tests/cachekey/project/sources/local2.expected
@@ -1 +1 @@
-0158be74cc02831a535ebd72326a663c36e79764a99e19462d617f2f538934a4
\ No newline at end of file
+e7647c5f651303969e6f554601293af511d3ec8ce1e845918555de3159353f6c
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/remote1.expected b/tests/cachekey/project/sources/remote1.expected
index 0b9f807..38e03a5 100644
--- a/tests/cachekey/project/sources/remote1.expected
+++ b/tests/cachekey/project/sources/remote1.expected
@@ -1 +1 @@
-296efa9843a3ec0fd016c5e3a14ea04d7f712988af01cc85909f625f76903fcb
\ No newline at end of file
+d114cb0014edaa5ef0faf8bded09317cf66992aaad83a64464ba01ad71a49ec6
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/remote2.expected b/tests/cachekey/project/sources/remote2.expected
index 6388e4b..c3bdacf 100644
--- a/tests/cachekey/project/sources/remote2.expected
+++ b/tests/cachekey/project/sources/remote2.expected
@@ -1 +1 @@
-d3ad2e9bca5dcc8bb915938f0fcfd118ec1a38e4644a416898bc4b40ddfc7997
\ No newline at end of file
+17f4888bf8f25f99fb27ee8ce4e1608cf159ed59c71f55f3c60db40dfcb1d4a9
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/tar1.expected b/tests/cachekey/project/sources/tar1.expected
index 7be4f28..858d74f 100644
--- a/tests/cachekey/project/sources/tar1.expected
+++ b/tests/cachekey/project/sources/tar1.expected
@@ -1 +1 @@
-e4f2bd7e23727048719100b217afe9dd9e84873d9a0a863945a17bc0334057aa
\ No newline at end of file
+75120aeebb659b69a3b688976a05058ed73fa90ff3ff2b81b619aac4f2757469
\ No newline at end of file
diff --git a/tests/cachekey/project/sources/tar2.expected b/tests/cachekey/project/sources/tar2.expected
index 603b07b..3b12f8d 100644
--- a/tests/cachekey/project/sources/tar2.expected
+++ b/tests/cachekey/project/sources/tar2.expected
@@ -1 +1 @@
-29214222df2cb44b807621dbcef679d420a9ac70be06c62977effd09729b19ac
\ No newline at end of file
+a9fa1deeb8ae46ccc9e35db4e04f4b724b2c2391cf663415bbc384ce3445948a
\ No newline at end of file
diff --git a/tests/cachekey/project/target.expected b/tests/cachekey/project/target.expected
index 524b42a..56301bf 100644
--- a/tests/cachekey/project/target.expected
+++ b/tests/cachekey/project/target.expected
@@ -1 +1 @@
-94d8bb8fa9545152d59e4b9e141feb2c6190576457350cc6335186b024e99967
\ No newline at end of file
+34a0ed236c80ba9069519d6f0cb280747b3c949bf6e877645d35b616eb7729b5
\ No newline at end of file
diff --git a/tests/frontend/completions.py b/tests/frontend/completions.py
index ae289b5..f734112 100644
--- a/tests/frontend/completions.py
+++ b/tests/frontend/completions.py
@@ -340,8 +340,8 @@
 
     # Use hard coded artifact names, cache keys should be stable now
     artifacts = [
-        "test/import-bin/cb0c8c2e1881b09338aa3f533d224f83f06bdf263523d04ee197232c74f09357",
-        "test/import-bin/edcfeda7d52c6bb77e632e31bd8ba40122125b2f50553b57c34947aa5fa709df",
+        "test/import-bin/0b769809a4e12dd5060df8e8bb1cd960f6c93d1ed2a6a34350eac9272ea71e81",
+        "test/import-bin/67844557f63f985ef38ba68d68e3dfdeda02f4b40a90f53707f0fc643245ccb8",
     ]
 
     # Test autocompletion of the artifact
diff --git a/tests/integration/project/elements/build-shell/compose-dep-fail.bst b/tests/integration/project/elements/build-shell/compose-dep-fail.bst
new file mode 100644
index 0000000..2ce06f3
--- /dev/null
+++ b/tests/integration/project/elements/build-shell/compose-dep-fail.bst
@@ -0,0 +1,13 @@
+kind: manual
+
+depends:
+- base.bst
+
+config:
+  build-commands:
+  - "echo 'Goodbye world'"
+
+public:
+  bst:
+    integration-commands:
+    - exit 1
diff --git a/tests/integration/project/elements/build-shell/compose-dep-success.bst b/tests/integration/project/elements/build-shell/compose-dep-success.bst
new file mode 100644
index 0000000..b2813eb
--- /dev/null
+++ b/tests/integration/project/elements/build-shell/compose-dep-success.bst
@@ -0,0 +1,13 @@
+kind: manual
+
+depends:
+- base.bst
+
+config:
+  build-commands:
+  - "echo 'Hello world'"
+
+public:
+  bst:
+    integration-commands:
+    - echo "Hi" > /integration-success
diff --git a/tests/integration/project/elements/build-shell/compose-fail.bst b/tests/integration/project/elements/build-shell/compose-fail.bst
new file mode 100644
index 0000000..06c676f
--- /dev/null
+++ b/tests/integration/project/elements/build-shell/compose-fail.bst
@@ -0,0 +1,4 @@
+kind: compose
+
+build-depends:
+- build-shell/compose-dep-fail.bst
diff --git a/tests/integration/project/elements/build-shell/compose-success.bst b/tests/integration/project/elements/build-shell/compose-success.bst
new file mode 100644
index 0000000..29ff5de
--- /dev/null
+++ b/tests/integration/project/elements/build-shell/compose-success.bst
@@ -0,0 +1,4 @@
+kind: compose
+
+build-depends:
+- build-shell/compose-dep-success.bst
diff --git a/tests/integration/project/elements/build-shell/script.bst b/tests/integration/project/elements/build-shell/script.bst
new file mode 100644
index 0000000..d370a02
--- /dev/null
+++ b/tests/integration/project/elements/build-shell/script.bst
@@ -0,0 +1,8 @@
+kind: script
+
+build-depends:
+- base.bst
+
+config:
+  commands:
+  - "echo 'Hi' > /test"
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index 4ae20d5..b27a25a 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -228,15 +228,26 @@
     "pull_deps,pull_buildtree,expect_error",
     [
         # Don't pull at all
-        (None, False, "shell-missing-deps"),
+        (None, False, "missing-buildtree-artifact-not-cached"),
         # Pull only dependencies
         ("build", False, "missing-buildtree-artifact-not-cached"),
         # Pull all elements including the shell element, but without the buildtree
         ("all", False, "missing-buildtree-artifact-buildtree-not-cached"),
         # Pull all elements including the shell element, and pull buildtrees
         ("all", True, None),
+        # Pull only the artifact, but without the buildtree
+        ("none", False, "missing-buildtree-artifact-buildtree-not-cached"),
+        # Pull only the artifact with its buildtree
+        ("none", True, None),
     ],
-    ids=["no-pull", "pull-only-deps", "pull-without-buildtree", "pull-with-buildtree"],
+    ids=[
+        "no-pull",
+        "pull-only-deps",
+        "pull-without-buildtree",
+        "pull-with-buildtree",
+        "pull-target-without-buildtree",
+        "pull-target-with-buildtree",
+    ],
 )
 def test_shell_use_cached_buildtree(share_with_buildtrees, datafiles, cli, pull_deps, pull_buildtree, expect_error):
     project = str(datafiles)
@@ -336,3 +347,67 @@
 
     # Sorry, a buildtree was never cached for this element
     result.assert_main_error(ErrorDomain.APP, "missing-buildtree-artifact-created-without-buildtree")
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+def test_shell_script_element(datafiles, cli_integration):
+    project = str(datafiles)
+    element_name = "build-shell/script.bst"
+
+    result = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
+    result.assert_success()
+
+    # Run the shell and use the cached buildtree on this script element
+    result = cli_integration.run(
+        project=project, args=["shell", "--build", element_name, "--use-buildtree", "--", "cat", "/test"]
+    )
+
+    result.assert_success()
+    assert "Hi" in result.output
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+@pytest.mark.parametrize(
+    "element_name,expect_success",
+    [
+        # Build shell into a compose element which succeeded
+        ("build-shell/compose-success.bst", True),
+        # Build shell into a compose element with failed integration commands
+        ("build-shell/compose-fail.bst", False),
+    ],
+    ids=["integration-success", "integration-fail"],
+)
+def test_shell_compose_element(datafiles, cli_integration, element_name, expect_success):
+    project = str(datafiles)
+
+    # Build the element so it's in the local cache, ensure caching of buildtrees at build time
+    result = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
+    if expect_success:
+        result.assert_success()
+    else:
+        result.assert_main_error(ErrorDomain.STREAM, None)
+
+    # Ensure that the shell works regardless of success expectations
+    #
+    result = cli_integration.run(
+        project=project, args=["shell", "--build", element_name, "--use-buildtree", "--", "echo", "Hi"]
+    )
+    result.assert_success()
+    assert "Hi" in result.output
+
+    # Check the file created with integration commands
+    #
+    result = cli_integration.run(
+        project=project,
+        args=["shell", "--build", element_name, "--use-buildtree", "--", "cat", "/integration-success"],
+    )
+    if expect_success:
+        result.assert_success()
+        assert "Hi" in result.output
+    else:
+        # Here the exit code is determined by `cat`, and will be non-zero.
+        #
+        # We cannot use result.assert_main_error() because that explicitly expects -1
+        assert result.exit_code != 0