Merge pull request #1697 from apache/juerg/artifact-cache-opt

_artifact.py: cache(): Optimize capture of metadata files
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 37cf793..ea57b74 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -234,7 +234,7 @@
         # Store files
         if collectvdir is not None:
             filesvdir = CasBasedDirectory(cas_cache=self._cas)
-            filesvdir._import_files_internal(collectvdir, properties=properties)
+            filesvdir._import_files_internal(collectvdir, properties=properties, collect_result=False)
             artifact.files.CopyFrom(filesvdir._get_digest())
             size += filesvdir._get_size()
 
@@ -294,7 +294,7 @@
         # Store build tree
         if sandbox_build_dir is not None:
             buildtreevdir = CasBasedDirectory(cas_cache=self._cas)
-            buildtreevdir._import_files_internal(sandbox_build_dir, properties=properties)
+            buildtreevdir._import_files_internal(sandbox_build_dir, properties=properties, collect_result=False)
             artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
             size += buildtreevdir._get_size()
 
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index d6a5b2a..42419d0 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -449,7 +449,9 @@
     # A contextmanager to stage a CAS directory tree in the local filesystem.
     #
     # This makes the specified directory tree temporarily available for local
-    # filesystem access. This may use FUSE or hardlinking.
+    # filesystem access. This may use FUSE, hardlinks, reflinks or file copies,
+    # depending on the system. The implementation makes sure that BuildStream
+    # and subprocesses cannot corrupt the cache by modifying staged files.
     #
     # Args:
     #     directory_digest (Digest): The digest of a directory
@@ -464,6 +466,9 @@
         request = local_cas_pb2.StageTreeRequest()
         request.root_digest.CopyFrom(directory_digest)
 
+        # Specify the credentials used to access the staged tree
+        request.access_credentials.uid = os.geteuid()
+
         done_event = threading.Event()
 
         def request_iterator():
diff --git a/src/buildstream/_elementsources.py b/src/buildstream/_elementsources.py
index b13525e..c4e3be4 100644
--- a/src/buildstream/_elementsources.py
+++ b/src/buildstream/_elementsources.py
@@ -482,10 +482,10 @@
 
                         # Capture modified tree
                         vsubdir._clear()
-                        vsubdir.import_files(tmpdir)
+                        vsubdir.import_files(tmpdir, collect_result=False)
             else:
                 source_dir = self._sourcecache.export(source)
-                vsubdir.import_files(source_dir)
+                vsubdir.import_files(source_dir, collect_result=False)
 
         return vdir
 
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 8a0c3e0..0f60422 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -192,6 +192,14 @@
         junction_path = name.split(":")
         loader = self
 
+        #
+        # In this case we are attempting to load a subproject element via the
+        # command line instead of referencing the subproject through a project
+        # element or otherwise.
+        #
+        if provenance_node is None and load_subprojects:
+            self.project.ensure_fully_loaded()
+
         circular_provenance_node = self._loader_search_provenances.get(name, None)
         if circular_provenance_node and load_subprojects:
 
@@ -206,13 +214,13 @@
                 detail=detail,
             )
 
-        if load_subprojects:
+        if load_subprojects and provenance_node:
             self._loader_search_provenances[name] = provenance_node
 
         for junction_name in junction_path:
             loader = loader._get_loader(junction_name, provenance_node, load_subprojects=load_subprojects)
 
-        if load_subprojects:
+        if load_subprojects and provenance_node:
             del self._loader_search_provenances[name]
 
         return loader
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index db79136..922787d 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -173,8 +173,13 @@
     #
     # Args:
     #    node (node): A YAML Loaded dictionary
+    #    restricted (List[str]): A list of restricted keys
     #
-    def process_node(self, node):
+    # Restricted keys, if found in a conditional block, will raise
+    # an error if specified such that they would be composited at the
+    # root of "node",
+    #
+    def process_node(self, node, *, restricted=None):
 
         # A conditional will result in composition, which can
         # in turn add new conditionals to the root.
@@ -182,7 +187,7 @@
         # Keep processing conditionals on the root node until
         # all directly nested conditionals are resolved.
         #
-        while self._process_one_node(node):
+        while self._process_one_node(node, restricted=restricted):
             pass
 
         # Now recurse into nested dictionaries and lists
@@ -254,7 +259,7 @@
     #
     # Return true if a conditional was processed.
     #
-    def _process_one_node(self, node):
+    def _process_one_node(self, node, *, restricted=None):
         conditions = node.get_sequence("(?)", default=None)
         assertion = node.get_str("(!)", default=None)
 
@@ -292,6 +297,15 @@
                         LoadErrorReason.ILLEGAL_COMPOSITE,
                     )
 
+                # Observe restricted keys that are not allowed to be conditional
+                for key in restricted or []:
+                    if key in value:
+                        provenance = value.get_node(key).get_provenance()
+                        raise LoadError(
+                            "{}: The '{}' key cannot be specified conditionally.".format(provenance, key),
+                            LoadErrorReason.ILLEGAL_COMPOSITE,
+                        )
+
                 # Apply the yaml fragment if its condition evaluates to true
                 if apply_fragment:
                     value._composite(node)
diff --git a/src/buildstream/_pluginfactory/pluginoriginjunction.py b/src/buildstream/_pluginfactory/pluginoriginjunction.py
index 724bd1f..cf0205c 100644
--- a/src/buildstream/_pluginfactory/pluginoriginjunction.py
+++ b/src/buildstream/_pluginfactory/pluginoriginjunction.py
@@ -36,7 +36,6 @@
         #
         loader = self.project.loader.get_loader(self._junction, self.provenance_node)
         project = loader.project
-        project.ensure_fully_loaded()
 
         # Now get the appropriate PluginFactory object
         #
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 62956ee..57b801a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -76,7 +76,6 @@
 #    parent_loader: The parent loader
 #    provenance_node: The YAML provenance causing this project to be loaded
 #    search_for_project: Whether to search for a project directory, e.g. from workspace metadata or parent directories
-#    load_project: Whether to attempt to load a project.conf
 #
 class Project:
     def __init__(
@@ -90,7 +89,6 @@
         parent_loader: Optional[Loader] = None,
         provenance_node: Optional[ProvenanceInformation] = None,
         search_for_project: bool = True,
-        load_project: bool = True,
     ):
         #
         # Public members
@@ -171,7 +169,7 @@
 
         self._context.add_project(self)
 
-        if self.directory and load_project:
+        if self.directory:
             with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, "-")):
                 self._load(parent_loader=parent_loader, provenance_node=provenance_node)
         else:
@@ -646,6 +644,7 @@
                 "source-caches",
                 "junctions",
                 "(@)",
+                "(?)",
             ]
         )
 
@@ -945,7 +944,24 @@
         # Now resolve any conditionals in the remaining configuration,
         # any conditionals specified for project option declarations,
         # or conditionally specifying the project name; will be ignored.
-        output.options.process_node(config)
+        #
+        # Specify any options that would be ignored in the restrict list
+        # so as to raise an appropriate error.
+        #
+        output.options.process_node(
+            config,
+            restricted=[
+                "min-version",
+                "name",
+                "element-path",
+                "junctions",
+                "defaults",
+                "fatal-warnings",
+                "ref-storage",
+                "options",
+                "plugins",
+            ],
+        )
 
         # Element and Source  type configurations will be composited later onto
         # element/source types, so we delete it from here and run our final
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas.proto b/src/buildstream/_protos/build/buildgrid/local_cas.proto
index 378033e..b0e1e4b 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas.proto
+++ b/src/buildstream/_protos/build/buildgrid/local_cas.proto
@@ -100,6 +100,12 @@
   // If a CAS remote is configured, the blobs are uploaded.
   // The `bypass_local_cache` parameter is a hint to indicate whether the blobs
   // shall be uploaded without first storing them in the local cache.
+  //
+  // The `move_files` parameter is a hint to indicate that files could be
+  // moved into the storage. This can make capturing more efficient by
+  // avoiding copies when it is known that files will not be needed after they
+  // are imported. If a server chooses not to move them, the source files will
+  // still exist after this request.
   rpc CaptureTree(CaptureTreeRequest) returns (CaptureTreeResponse) {}
 
   // Capture files from the local filesystem.
@@ -109,6 +115,12 @@
   // If a CAS remote is configured, the blobs are uploaded.
   // The `bypass_local_cache` parameter is a hint to indicate whether the blobs
   // shall be uploaded without first storing them in the local cache.
+  //
+  // The `move_files` parameter is a hint to indicate that the files could be
+  // moved into the storage. This can make capturing more efficient by
+  // avoiding copies when it is known that files will not be needed after they
+  // are imported. If a server chooses not to move them, the source files will
+  // still exist after this request.
   rpc CaptureFiles(CaptureFilesRequest) returns (CaptureFilesResponse) {}
 
   // Configure remote CAS endpoint.
@@ -126,6 +138,13 @@
   // specified endpoints in further requests.
   rpc GetInstanceNameForRemotes(GetInstanceNameForRemotesRequest) returns (GetInstanceNameForRemotesResponse) {}
 
+  // Configure sandboxed clients.
+  //
+  // This returns a string that can be used as instance_name to access
+  // this service from clients running in the specified filesystem/mount
+  // namespace or chroot environment
+  rpc GetInstanceNameForNamespace(GetInstanceNameForNamespaceRequest) returns (GetInstanceNameForNamespaceResponse) {}
+
   // Query total space used by the local cache.
   rpc GetLocalDiskUsage(GetLocalDiskUsageRequest) returns (GetLocalDiskUsageResponse) {}
 }
@@ -264,6 +283,15 @@
   // this staged tree will in that case be limited to the lifetime of the
   // parent.
   string path = 3;
+
+  message Credentials {
+    int64 uid = 1;
+  }
+
+  // The UNIX credentials of the processes that will access the staged tree.
+  // This will be used to ensure that the files have the right permissions
+  // for access without risking corruption of files in the local cache.
+  Credentials access_credentials = 4;
 }
 
 // A response message for
@@ -283,6 +311,11 @@
   // omitted.
   string instance_name = 1;
 
+  // The optional root path to restrict capture to a subtree.
+  // If specified, `path` will be resolved inside this root.
+  // No files outside the root will be captured.
+  string root = 6;
+
   // The path(s) in the local filesystem to capture.
   repeated string path = 2;
 
@@ -292,6 +325,11 @@
 
   // The properties of path(s) in the local filesystem to capture.
   repeated string node_properties = 4;
+
+  // Hints whether files can be moved into the storage.
+  // If enabled, they MUST NOT be modified after issuing this request in order
+  // to guarantee consistency.
+  bool move_files = 5;
 }
 
 // A response message for
@@ -325,6 +363,11 @@
   // omitted.
   string instance_name = 1;
 
+  // The optional root path to restrict capture to a subtree.
+  // If specified, `path` will be resolved inside this root.
+  // No files outside the root will be captured.
+  string root = 6;
+
   // The path(s) in the local filesystem to capture.
   repeated string path = 2;
 
@@ -334,6 +377,11 @@
 
   // The properties of path(s) in the local filesystem to capture.
   repeated string node_properties = 4;
+
+  // Hints whether the files can be moved into the storage.
+  // If enabled, they MUST NOT be modified after issuing this request in order
+  // to guarantee consistency.
+  bool move_files = 5;
 }
 
 // A response message for
@@ -425,8 +473,16 @@
 // A request message for
 // [LocalContentAddressableStorage.GetInstanceNameForRemotes][build.buildgrid.v2.LocalContentAddressableStorage.GetInstanceNameForRemotes].
 message GetInstanceNameForRemotesRequest {
+  // The instance of the execution system to operate against. A server may
+  // support multiple instances of the execution system (with their own workers,
+  // storage, caches, etc.). The server MAY require use of this field to select
+  // between them in an implementation-defined fashion, otherwise it can be
+  // omitted.
+  string instance_name = 3;
+
   Remote content_addressable_storage = 1;
   Remote remote_asset = 2;
+  Remote action_cache = 4;
 }
 
 // A response message for
@@ -435,6 +491,28 @@
   string instance_name = 1;
 }
 
+
+// A request message for
+// [LocalContentAddressableStorage.GetInstanceNameForRemote][build.buildgrid.v2.LocalContentAddressableStorage.GetInstanceNameForNamespace].
+message GetInstanceNameForNamespaceRequest {
+  // The instance of the execution system to operate against. A server may
+  // support multiple instances of the execution system (with their own workers,
+  // storage, caches, etc.). The server MAY require use of this field to select
+  // between them in an implementation-defined fashion, otherwise it can be
+  // omitted.
+  string instance_name = 1;
+
+  // The root path of the mount namespace to restrict capture and staging.
+  // All paths in requests to the new instance will be resolved inside this root.
+  string root = 2;
+}
+
+// A response message for
+// [LocalContentAddressableStorage.GetInstanceNameForRemote][build.buildgrid.v2.LocalContentAddressableStorage.GetInstanceNameForNamespace].
+message GetInstanceNameForNamespaceResponse {
+  string instance_name = 1;
+}
+
 // A request message for
 // [LocalContentAddressableStorage.GetLocalDiskUsage][build.buildgrid.v2.LocalContentAddressableStorage.GetLocalDiskUsage].
 message GetLocalDiskUsageRequest {
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py b/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
index f446a0c..1bd65d5 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
+++ b/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
@@ -16,7 +16,7 @@
 from buildstream._protos.google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
 
 
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x62uild/buildgrid/local_cas.proto\x12\x0f\x62uild.buildgrid\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x17google/rpc/status.proto\"p\n\x18\x46\x65tchMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xcc\x01\n\x19\x46\x65tchMissingBlobsResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.build.buildgrid.FetchMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"q\n\x19UploadMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xce\x01\n\x1aUploadMissingBlobsResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.build.buildgrid.UploadMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\x81\x01\n\x10\x46\x65tchTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x18\n\x10\x66\x65tch_file_blobs\x18\x03 \x01(\x08\"\x13\n\x11\x46\x65tchTreeResponse\"h\n\x11UploadTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x14\n\x12UploadTreeResponse\"u\n\x10StageTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04path\x18\x03 \x01(\t\"!\n\x11StageTreeResponse\x12\x0c\n\x04path\x18\x01 \x01(\t\"n\n\x12\x43\x61ptureTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\"\xd3\x01\n\x13\x43\x61ptureTreeResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.build.buildgrid.CaptureTreeResponse.Response\x1az\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"o\n\x13\x43\x61ptureFilesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\"\xb8\x02\n\x14\x43\x61ptureFilesResponse\x12\x41\n\tresponses\x18\x01 \x03(\x0b\x32..build.buildgrid.CaptureFilesResponse.Response\x1a\xdc\x01\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12H\n\x0fnode_properties\x18\x06 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x05\x10\x06\"\x83\x01\n\x1fGetInstanceNameForRemoteRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"9\n GetInstanceNameForRemoteResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"j\n\x06Remote\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"\x8f\x01\n GetInstanceNameForRemotesRequest\x12<\n\x1b\x63ontent_addressable_storage\x18\x01 \x01(\x0b\x32\x17.build.buildgrid.Remote\x12-\n\x0cremote_asset\x18\x02 \x01(\x0b\x32\x17.build.buildgrid.Remote\":\n!GetInstanceNameForRemotesResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\x1a\n\x18GetLocalDiskUsageRequest\"D\n\x19GetLocalDiskUsageResponse\x12\x12\n\nsize_bytes\x18\x01 \x01(\x03\x12\x13\n\x0bquota_bytes\x18\x02 \x01(\x03\x32\xbc\x08\n\x1eLocalContentAddressableStorage\x12l\n\x11\x46\x65tchMissingBlobs\x12).build.buildgrid.FetchMissingBlobsRequest\x1a*.build.buildgrid.FetchMissingBlobsResponse\"\x00\x12o\n\x12UploadMissingBlobs\x12*.build.buildgrid.UploadMissingBlobsRequest\x1a+.build.buildgrid.UploadMissingBlobsResponse\"\x00\x12T\n\tFetchTree\x12!.build.buildgrid.FetchTreeRequest\x1a\".build.buildgrid.FetchTreeResponse\"\x00\x12W\n\nUploadTree\x12\".build.buildgrid.UploadTreeRequest\x1a#.build.buildgrid.UploadTreeResponse\"\x00\x12X\n\tStageTree\x12!.build.buildgrid.StageTreeRequest\x1a\".build.buildgrid.StageTreeResponse\"\x00(\x01\x30\x01\x12Z\n\x0b\x43\x61ptureTree\x12#.build.buildgrid.CaptureTreeRequest\x1a$.build.buildgrid.CaptureTreeResponse\"\x00\x12]\n\x0c\x43\x61ptureFiles\x12$.build.buildgrid.CaptureFilesRequest\x1a%.build.buildgrid.CaptureFilesResponse\"\x00\x12\x81\x01\n\x18GetInstanceNameForRemote\x12\x30.build.buildgrid.GetInstanceNameForRemoteRequest\x1a\x31.build.buildgrid.GetInstanceNameForRemoteResponse\"\x00\x12\x84\x01\n\x19GetInstanceNameForRemotes\x12\x31.build.buildgrid.GetInstanceNameForRemotesRequest\x1a\x32.build.buildgrid.GetInstanceNameForRemotesResponse\"\x00\x12l\n\x11GetLocalDiskUsage\x12).build.buildgrid.GetLocalDiskUsageRequest\x1a*.build.buildgrid.GetLocalDiskUsageResponse\"\x00\x62\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x62uild/buildgrid/local_cas.proto\x12\x0f\x62uild.buildgrid\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x17google/rpc/status.proto\"p\n\x18\x46\x65tchMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xcc\x01\n\x19\x46\x65tchMissingBlobsResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.build.buildgrid.FetchMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"q\n\x19UploadMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xce\x01\n\x1aUploadMissingBlobsResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.build.buildgrid.UploadMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\x81\x01\n\x10\x46\x65tchTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x18\n\x10\x66\x65tch_file_blobs\x18\x03 \x01(\x08\"\x13\n\x11\x46\x65tchTreeResponse\"h\n\x11UploadTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x14\n\x12UploadTreeResponse\"\xdc\x01\n\x10StageTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04path\x18\x03 \x01(\t\x12I\n\x12\x61\x63\x63\x65ss_credentials\x18\x04 \x01(\x0b\x32-.build.buildgrid.StageTreeRequest.Credentials\x1a\x1a\n\x0b\x43redentials\x12\x0b\n\x03uid\x18\x01 \x01(\x03\"!\n\x11StageTreeResponse\x12\x0c\n\x04path\x18\x01 \x01(\t\"\x90\x01\n\x12\x43\x61ptureTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x06 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\x12\x12\n\nmove_files\x18\x05 \x01(\x08\"\xd3\x01\n\x13\x43\x61ptureTreeResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.build.buildgrid.CaptureTreeResponse.Response\x1az\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\x91\x01\n\x13\x43\x61ptureFilesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x06 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\x12\x12\n\nmove_files\x18\x05 \x01(\x08\"\xb8\x02\n\x14\x43\x61ptureFilesResponse\x12\x41\n\tresponses\x18\x01 \x03(\x0b\x32..build.buildgrid.CaptureFilesResponse.Response\x1a\xdc\x01\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12H\n\x0fnode_properties\x18\x06 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x05\x10\x06\"\x83\x01\n\x1fGetInstanceNameForRemoteRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"9\n GetInstanceNameForRemoteResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"j\n\x06Remote\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"\xd5\x01\n GetInstanceNameForRemotesRequest\x12\x15\n\rinstance_name\x18\x03 \x01(\t\x12<\n\x1b\x63ontent_addressable_storage\x18\x01 \x01(\x0b\x32\x17.build.buildgrid.Remote\x12-\n\x0cremote_asset\x18\x02 \x01(\x0b\x32\x17.build.buildgrid.Remote\x12-\n\x0c\x61\x63tion_cache\x18\x04 \x01(\x0b\x32\x17.build.buildgrid.Remote\":\n!GetInstanceNameForRemotesResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"I\n\"GetInstanceNameForNamespaceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\t\"<\n#GetInstanceNameForNamespaceResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\x1a\n\x18GetLocalDiskUsageRequest\"D\n\x19GetLocalDiskUsageResponse\x12\x12\n\nsize_bytes\x18\x01 \x01(\x03\x12\x13\n\x0bquota_bytes\x18\x02 \x01(\x03\x32\xc9\t\n\x1eLocalContentAddressableStorage\x12l\n\x11\x46\x65tchMissingBlobs\x12).build.buildgrid.FetchMissingBlobsRequest\x1a*.build.buildgrid.FetchMissingBlobsResponse\"\x00\x12o\n\x12UploadMissingBlobs\x12*.build.buildgrid.UploadMissingBlobsRequest\x1a+.build.buildgrid.UploadMissingBlobsResponse\"\x00\x12T\n\tFetchTree\x12!.build.buildgrid.FetchTreeRequest\x1a\".build.buildgrid.FetchTreeResponse\"\x00\x12W\n\nUploadTree\x12\".build.buildgrid.UploadTreeRequest\x1a#.build.buildgrid.UploadTreeResponse\"\x00\x12X\n\tStageTree\x12!.build.buildgrid.StageTreeRequest\x1a\".build.buildgrid.StageTreeResponse\"\x00(\x01\x30\x01\x12Z\n\x0b\x43\x61ptureTree\x12#.build.buildgrid.CaptureTreeRequest\x1a$.build.buildgrid.CaptureTreeResponse\"\x00\x12]\n\x0c\x43\x61ptureFiles\x12$.build.buildgrid.CaptureFilesRequest\x1a%.build.buildgrid.CaptureFilesResponse\"\x00\x12\x81\x01\n\x18GetInstanceNameForRemote\x12\x30.build.buildgrid.GetInstanceNameForRemoteRequest\x1a\x31.build.buildgrid.GetInstanceNameForRemoteResponse\"\x00\x12\x84\x01\n\x19GetInstanceNameForRemotes\x12\x31.build.buildgrid.GetInstanceNameForRemotesRequest\x1a\x32.build.buildgrid.GetInstanceNameForRemotesResponse\"\x00\x12\x8a\x01\n\x1bGetInstanceNameForNamespace\x12\x33.build.buildgrid.GetInstanceNameForNamespaceRequest\x1a\x34.build.buildgrid.GetInstanceNameForNamespaceResponse\"\x00\x12l\n\x11GetLocalDiskUsage\x12).build.buildgrid.GetLocalDiskUsageRequest\x1a*.build.buildgrid.GetLocalDiskUsageResponse\"\x00\x62\x06proto3')
 
 
 
@@ -31,6 +31,7 @@
 _UPLOADTREEREQUEST = DESCRIPTOR.message_types_by_name['UploadTreeRequest']
 _UPLOADTREERESPONSE = DESCRIPTOR.message_types_by_name['UploadTreeResponse']
 _STAGETREEREQUEST = DESCRIPTOR.message_types_by_name['StageTreeRequest']
+_STAGETREEREQUEST_CREDENTIALS = _STAGETREEREQUEST.nested_types_by_name['Credentials']
 _STAGETREERESPONSE = DESCRIPTOR.message_types_by_name['StageTreeResponse']
 _CAPTURETREEREQUEST = DESCRIPTOR.message_types_by_name['CaptureTreeRequest']
 _CAPTURETREERESPONSE = DESCRIPTOR.message_types_by_name['CaptureTreeResponse']
@@ -43,6 +44,8 @@
 _REMOTE = DESCRIPTOR.message_types_by_name['Remote']
 _GETINSTANCENAMEFORREMOTESREQUEST = DESCRIPTOR.message_types_by_name['GetInstanceNameForRemotesRequest']
 _GETINSTANCENAMEFORREMOTESRESPONSE = DESCRIPTOR.message_types_by_name['GetInstanceNameForRemotesResponse']
+_GETINSTANCENAMEFORNAMESPACEREQUEST = DESCRIPTOR.message_types_by_name['GetInstanceNameForNamespaceRequest']
+_GETINSTANCENAMEFORNAMESPACERESPONSE = DESCRIPTOR.message_types_by_name['GetInstanceNameForNamespaceResponse']
 _GETLOCALDISKUSAGEREQUEST = DESCRIPTOR.message_types_by_name['GetLocalDiskUsageRequest']
 _GETLOCALDISKUSAGERESPONSE = DESCRIPTOR.message_types_by_name['GetLocalDiskUsageResponse']
 FetchMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('FetchMissingBlobsRequest', (_message.Message,), {
@@ -118,11 +121,19 @@
 _sym_db.RegisterMessage(UploadTreeResponse)
 
 StageTreeRequest = _reflection.GeneratedProtocolMessageType('StageTreeRequest', (_message.Message,), {
+
+  'Credentials' : _reflection.GeneratedProtocolMessageType('Credentials', (_message.Message,), {
+    'DESCRIPTOR' : _STAGETREEREQUEST_CREDENTIALS,
+    '__module__' : 'build.buildgrid.local_cas_pb2'
+    # @@protoc_insertion_point(class_scope:build.buildgrid.StageTreeRequest.Credentials)
+    })
+  ,
   'DESCRIPTOR' : _STAGETREEREQUEST,
   '__module__' : 'build.buildgrid.local_cas_pb2'
   # @@protoc_insertion_point(class_scope:build.buildgrid.StageTreeRequest)
   })
 _sym_db.RegisterMessage(StageTreeRequest)
+_sym_db.RegisterMessage(StageTreeRequest.Credentials)
 
 StageTreeResponse = _reflection.GeneratedProtocolMessageType('StageTreeResponse', (_message.Message,), {
   'DESCRIPTOR' : _STAGETREERESPONSE,
@@ -210,6 +221,20 @@
   })
 _sym_db.RegisterMessage(GetInstanceNameForRemotesResponse)
 
+GetInstanceNameForNamespaceRequest = _reflection.GeneratedProtocolMessageType('GetInstanceNameForNamespaceRequest', (_message.Message,), {
+  'DESCRIPTOR' : _GETINSTANCENAMEFORNAMESPACEREQUEST,
+  '__module__' : 'build.buildgrid.local_cas_pb2'
+  # @@protoc_insertion_point(class_scope:build.buildgrid.GetInstanceNameForNamespaceRequest)
+  })
+_sym_db.RegisterMessage(GetInstanceNameForNamespaceRequest)
+
+GetInstanceNameForNamespaceResponse = _reflection.GeneratedProtocolMessageType('GetInstanceNameForNamespaceResponse', (_message.Message,), {
+  'DESCRIPTOR' : _GETINSTANCENAMEFORNAMESPACERESPONSE,
+  '__module__' : 'build.buildgrid.local_cas_pb2'
+  # @@protoc_insertion_point(class_scope:build.buildgrid.GetInstanceNameForNamespaceResponse)
+  })
+_sym_db.RegisterMessage(GetInstanceNameForNamespaceResponse)
+
 GetLocalDiskUsageRequest = _reflection.GeneratedProtocolMessageType('GetLocalDiskUsageRequest', (_message.Message,), {
   'DESCRIPTOR' : _GETLOCALDISKUSAGEREQUEST,
   '__module__' : 'build.buildgrid.local_cas_pb2'
@@ -248,36 +273,42 @@
   _UPLOADTREEREQUEST._serialized_end=1035
   _UPLOADTREERESPONSE._serialized_start=1037
   _UPLOADTREERESPONSE._serialized_end=1057
-  _STAGETREEREQUEST._serialized_start=1059
-  _STAGETREEREQUEST._serialized_end=1176
-  _STAGETREERESPONSE._serialized_start=1178
-  _STAGETREERESPONSE._serialized_end=1211
-  _CAPTURETREEREQUEST._serialized_start=1213
-  _CAPTURETREEREQUEST._serialized_end=1323
-  _CAPTURETREERESPONSE._serialized_start=1326
-  _CAPTURETREERESPONSE._serialized_end=1537
-  _CAPTURETREERESPONSE_RESPONSE._serialized_start=1415
-  _CAPTURETREERESPONSE_RESPONSE._serialized_end=1537
-  _CAPTUREFILESREQUEST._serialized_start=1539
-  _CAPTUREFILESREQUEST._serialized_end=1650
-  _CAPTUREFILESRESPONSE._serialized_start=1653
-  _CAPTUREFILESRESPONSE._serialized_end=1965
-  _CAPTUREFILESRESPONSE_RESPONSE._serialized_start=1745
-  _CAPTUREFILESRESPONSE_RESPONSE._serialized_end=1965
-  _GETINSTANCENAMEFORREMOTEREQUEST._serialized_start=1968
-  _GETINSTANCENAMEFORREMOTEREQUEST._serialized_end=2099
-  _GETINSTANCENAMEFORREMOTERESPONSE._serialized_start=2101
-  _GETINSTANCENAMEFORREMOTERESPONSE._serialized_end=2158
-  _REMOTE._serialized_start=2160
-  _REMOTE._serialized_end=2266
-  _GETINSTANCENAMEFORREMOTESREQUEST._serialized_start=2269
-  _GETINSTANCENAMEFORREMOTESREQUEST._serialized_end=2412
-  _GETINSTANCENAMEFORREMOTESRESPONSE._serialized_start=2414
-  _GETINSTANCENAMEFORREMOTESRESPONSE._serialized_end=2472
-  _GETLOCALDISKUSAGEREQUEST._serialized_start=2474
-  _GETLOCALDISKUSAGEREQUEST._serialized_end=2500
-  _GETLOCALDISKUSAGERESPONSE._serialized_start=2502
-  _GETLOCALDISKUSAGERESPONSE._serialized_end=2570
-  _LOCALCONTENTADDRESSABLESTORAGE._serialized_start=2573
-  _LOCALCONTENTADDRESSABLESTORAGE._serialized_end=3657
+  _STAGETREEREQUEST._serialized_start=1060
+  _STAGETREEREQUEST._serialized_end=1280
+  _STAGETREEREQUEST_CREDENTIALS._serialized_start=1254
+  _STAGETREEREQUEST_CREDENTIALS._serialized_end=1280
+  _STAGETREERESPONSE._serialized_start=1282
+  _STAGETREERESPONSE._serialized_end=1315
+  _CAPTURETREEREQUEST._serialized_start=1318
+  _CAPTURETREEREQUEST._serialized_end=1462
+  _CAPTURETREERESPONSE._serialized_start=1465
+  _CAPTURETREERESPONSE._serialized_end=1676
+  _CAPTURETREERESPONSE_RESPONSE._serialized_start=1554
+  _CAPTURETREERESPONSE_RESPONSE._serialized_end=1676
+  _CAPTUREFILESREQUEST._serialized_start=1679
+  _CAPTUREFILESREQUEST._serialized_end=1824
+  _CAPTUREFILESRESPONSE._serialized_start=1827
+  _CAPTUREFILESRESPONSE._serialized_end=2139
+  _CAPTUREFILESRESPONSE_RESPONSE._serialized_start=1919
+  _CAPTUREFILESRESPONSE_RESPONSE._serialized_end=2139
+  _GETINSTANCENAMEFORREMOTEREQUEST._serialized_start=2142
+  _GETINSTANCENAMEFORREMOTEREQUEST._serialized_end=2273
+  _GETINSTANCENAMEFORREMOTERESPONSE._serialized_start=2275
+  _GETINSTANCENAMEFORREMOTERESPONSE._serialized_end=2332
+  _REMOTE._serialized_start=2334
+  _REMOTE._serialized_end=2440
+  _GETINSTANCENAMEFORREMOTESREQUEST._serialized_start=2443
+  _GETINSTANCENAMEFORREMOTESREQUEST._serialized_end=2656
+  _GETINSTANCENAMEFORREMOTESRESPONSE._serialized_start=2658
+  _GETINSTANCENAMEFORREMOTESRESPONSE._serialized_end=2716
+  _GETINSTANCENAMEFORNAMESPACEREQUEST._serialized_start=2718
+  _GETINSTANCENAMEFORNAMESPACEREQUEST._serialized_end=2791
+  _GETINSTANCENAMEFORNAMESPACERESPONSE._serialized_start=2793
+  _GETINSTANCENAMEFORNAMESPACERESPONSE._serialized_end=2853
+  _GETLOCALDISKUSAGEREQUEST._serialized_start=2855
+  _GETLOCALDISKUSAGEREQUEST._serialized_end=2881
+  _GETLOCALDISKUSAGERESPONSE._serialized_start=2883
+  _GETLOCALDISKUSAGERESPONSE._serialized_end=2951
+  _LOCALCONTENTADDRESSABLESTORAGE._serialized_start=2954
+  _LOCALCONTENTADDRESSABLESTORAGE._serialized_end=4179
 # @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
index 2c39fc9..0117d3a 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
+++ b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
@@ -59,6 +59,11 @@
                 request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemotesRequest.SerializeToString,
                 response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemotesResponse.FromString,
                 )
+        self.GetInstanceNameForNamespace = channel.unary_unary(
+                '/build.buildgrid.LocalContentAddressableStorage/GetInstanceNameForNamespace',
+                request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceRequest.SerializeToString,
+                response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceResponse.FromString,
+                )
         self.GetLocalDiskUsage = channel.unary_unary(
                 '/build.buildgrid.LocalContentAddressableStorage/GetLocalDiskUsage',
                 request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.SerializeToString,
@@ -170,6 +175,12 @@
         If a CAS remote is configured, the blobs are uploaded.
         The `bypass_local_cache` parameter is a hint to indicate whether the blobs
         shall be uploaded without first storing them in the local cache.
+
+        The `move_files` parameter is a hint to indicate that files could be
+        moved into the storage. This can make capturing more efficient by
+        avoiding copies when it is known that files will not be needed after they
+        are imported. If a server chooses not to move them, the source files will
+        still exist after this request.
         """
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
@@ -183,6 +194,12 @@
         If a CAS remote is configured, the blobs are uploaded.
         The `bypass_local_cache` parameter is a hint to indicate whether the blobs
         shall be uploaded without first storing them in the local cache.
+
+        The `move_files` parameter is a hint to indicate that the files could be
+        moved into the storage. This can make capturing more efficient by
+        avoiding copies when it is known that files will not be needed after they
+        are imported. If a server chooses not to move them, the source files will
+        still exist after this request.
         """
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
@@ -211,6 +228,17 @@
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
+    def GetInstanceNameForNamespace(self, request, context):
+        """Configure sandboxed clients.
+
+        This returns a string that can be used as instance_name to access
+        this service from clients running in the specified filesystem/mount
+        namespace or chroot environment
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
     def GetLocalDiskUsage(self, request, context):
         """Query total space used by the local cache.
         """
@@ -266,6 +294,11 @@
                     request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemotesRequest.FromString,
                     response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemotesResponse.SerializeToString,
             ),
+            'GetInstanceNameForNamespace': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetInstanceNameForNamespace,
+                    request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceRequest.FromString,
+                    response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceResponse.SerializeToString,
+            ),
             'GetLocalDiskUsage': grpc.unary_unary_rpc_method_handler(
                     servicer.GetLocalDiskUsage,
                     request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.FromString,
@@ -435,6 +468,23 @@
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
+    def GetInstanceNameForNamespace(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/GetInstanceNameForNamespace',
+            build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceRequest.SerializeToString,
+            build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForNamespaceResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
     def GetLocalDiskUsage(request,
             target,
             options=(),
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index be52090..24776ff 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -76,7 +76,7 @@
         if not source.BST_STAGE_VIRTUAL_DIRECTORY:
             with utils._tempdir(dir=self.context.tmpdir, prefix="staging-temp") as tmpdir:
                 source._stage(tmpdir)
-                vdir.import_files(tmpdir)
+                vdir.import_files(tmpdir, collect_result=False)
         else:
             source._stage(vdir)
 
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 1d6ea80..2323b76 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -978,6 +978,7 @@
         split_filter = self.__split_filter_func(include, exclude, orphans)
 
         result = vstagedir._import_files_internal(files_vdir, filter_callback=split_filter)
+        assert result is not None
 
         owner._overlap_collector.collect_stage_result(self, result)
 
@@ -1488,7 +1489,7 @@
                     import_dir = staged_sources
 
             # Set update_mtime to ensure deterministic mtime of sources at build time
-            vdirectory._import_files_internal(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP)
+            vdirectory._import_files_internal(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP, collect_result=False)
 
         # Ensure deterministic owners of sources at build time
         vdirectory._set_deterministic_user()
@@ -1878,9 +1879,9 @@
         # Attempt to pull artifact with the weak cache key
         pulled = pull and artifact.pull(pull_buildtrees=pull_buildtrees)
 
-        # When building in non-strict mode, we ignore a failed artifact unless it has the
-        # expected strong key, this ensures that failed builds will be retried whenever
-        # dependencies have changed.
+        # Automatically retry building failed builds in non-strict mode, because
+        # dependencies may have changed since the last build which might cause this
+        # failed build to succeed.
         #
         # When not building (e.g. `bst show`, `bst artifact push` etc), we do not drop
         # the failed artifact, the retry only occurs at build time.
@@ -1888,12 +1889,19 @@
         if context.build and artifact.cached():
             success, _, _ = artifact.load_build_result()
             if not success:
-
-                # Calculate what the cache key would be for this artifact, if we were going to build it
-                cache_key = self.__calculate_strong_cache_key()
-                assert cache_key is not None
-
-                if artifact.strong_key != cache_key:
+                #
+                # If we could resolve the stong cache key for this element at this time,
+                # we could compare the artifact key against the resolved strong key.
+                #
+                # If we could assert that artifact state is never consulted in advance
+                # of resolving the strong key, then we could discard the loaded artifact
+                # at that time instead.
+                #
+                # Since neither of these are true, we settle for always retrying a failed
+                # build in non-strict mode unless the failed artifact's strong key is
+                # equal to the resolved strict key.
+                #
+                if artifact.strong_key != self.__strict_cache_key:
                     artifact = Artifact(
                         self,
                         context,
@@ -3155,19 +3163,6 @@
 
         self.__build_result = self.__artifact.load_build_result()
 
-    # __calculate_strong_cache_key():
-    #
-    # Convenience function for calculating the strong cache key
-    #
-    # This will return the strong cache key if all of the dependencies have cache
-    # keys available, otherwise it will return None.
-    #
-    def __calculate_strong_cache_key(self):
-        assert self.__weak_cache_key is not None
-
-        dependencies = [[e.project_name, e.name, e._get_cache_key()] for e in self._dependencies(_Scope.BUILD)]
-        return self._calculate_cache_key(dependencies, self.__weak_cache_key)
-
     # __update_cache_keys()
     #
     # Updates weak and strict cache keys
@@ -3276,7 +3271,10 @@
                 self.__cache_key = strong_key
             elif self.__assemble_scheduled or self.__assemble_done:
                 # Artifact will or has been built, not downloaded
-                self.__cache_key = self.__calculate_strong_cache_key()
+                assert self.__weak_cache_key is not None
+
+                dependencies = [[e.project_name, e.name, e._get_cache_key()] for e in self._dependencies(_Scope.BUILD)]
+                self.__cache_key = self._calculate_cache_key(dependencies, self.__weak_cache_key)
 
             if self.__cache_key is None:
                 # Strong cache key could not be calculated yet
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 176b28d..90446c8 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -170,7 +170,7 @@
 
         with self.timed_activity("Creating composition", detail=detail, silent_nested=True):
             self.info("Composing {} files".format(len(manifest)))
-            installdir.import_files(vbasedir, filter_callback=import_filter)
+            installdir.import_files(vbasedir, filter_callback=import_filter, collect_result=False)
 
         # And we're done
         return os.path.join(os.sep, "buildstream", "install")
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index ba6e443..8a16a9f 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -83,7 +83,7 @@
             raise ElementError("{}: No files were found inside directory '{}'".format(self, self.source))
 
         # Move it over
-        outputdir.import_files(inputdir)
+        outputdir.import_files(inputdir, collect_result=False)
 
         # And we're done
         return "/output"
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index 54e7679..0928ef1 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -100,7 +100,7 @@
         assert isinstance(directory, Directory)
         assert self.__digest is not None
         with self._cache_directory(digest=self.__digest) as cached_directory:
-            directory.import_files(cached_directory)
+            directory.import_files(cached_directory, collect_result=False)
 
     def init_workspace_directory(self, directory):
         #
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 9127b7c..0f174f2 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -108,7 +108,7 @@
         assert isinstance(directory, Directory)
         assert self.__digest is not None
         with self._cache_directory(digest=self.__digest) as cached_directory:
-            directory._import_files_internal(cached_directory)
+            directory._import_files_internal(cached_directory, collect_result=False)
 
     # As a core element, we speed up some scenarios when this is used for
     # a junction, by providing the local path to this content directly.
@@ -124,6 +124,7 @@
         assert isinstance(directory, Directory)
         with self.timed_activity("Staging local files"):
             result = directory._import_files_internal(self.path, properties=["mtime"])
+            assert result is not None
 
             if result.overwritten or result.ignored:
                 raise SourceError(
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 97ce72c..f1dd935 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -359,9 +359,10 @@
         *,
         filter_callback: Optional[Callable[[str], bool]] = None,
         update_mtime: Optional[float] = None,
-        properties: Optional[List[str]] = None
-    ) -> FileListResult:
-        result = FileListResult()
+        properties: Optional[List[str]] = None,
+        collect_result: bool = True
+    ) -> Optional[FileListResult]:
+        result = FileListResult() if collect_result else None
 
         # See if we can get a source directory to copy from
         source_directory: Optional[str] = None
@@ -722,7 +723,7 @@
     # fileListResult.overwritten and fileListResult.ignore are updated depending
     # on the result.
     #
-    def __check_replacement(self, name: str, relative_pathname: str, fileListResult: FileListResult) -> bool:
+    def __check_replacement(self, name: str, relative_pathname: str, fileListResult: Optional[FileListResult]) -> bool:
         existing_entry = self.__index.get(name)
         if existing_entry is None:
             return True
@@ -732,15 +733,18 @@
             subdir = existing_entry.get_directory(self)
             if not subdir:
                 self.remove(name)
-                fileListResult.overwritten.append(relative_pathname)
+                if fileListResult is not None:
+                    fileListResult.overwritten.append(relative_pathname)
                 return True
             else:
                 # We can't overwrite a non-empty directory, so we just ignore it.
-                fileListResult.ignored.append(relative_pathname)
+                if fileListResult is not None:
+                    fileListResult.ignored.append(relative_pathname)
                 return False
         else:
             self.remove(name)
-            fileListResult.overwritten.append(relative_pathname)
+            if fileListResult is not None:
+                fileListResult.overwritten.append(relative_pathname)
             return True
 
     # __partial_import_cas_into_cas()
@@ -754,7 +758,7 @@
         *,
         path_prefix: str = "",
         origin: "CasBasedDirectory" = None,
-        result: FileListResult
+        result: Optional[FileListResult]
     ) -> None:
         if origin is None:
             origin = self
@@ -790,7 +794,8 @@
                     else:
                         subdir = dest_entry.get_directory(self)
 
-                    subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
+                    if result is not None:
+                        subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
                 else:
                     src_subdir = source_directory.open_directory(name)
                     if src_subdir == origin:
@@ -823,7 +828,8 @@
                     else:
                         assert entry.type == FileType.SYMLINK
                         self.__add_new_link_direct(name=name, target=entry.target)
-                    result.files_written.append(relative_pathname)
+                    if result is not None:
+                        result.files_written.append(relative_pathname)
 
     # __list_prefixed_relative_paths()
     #
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 408256a..cf234ea 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -237,7 +237,8 @@
         *,
         filter_callback: Optional[Callable[[str], bool]] = None,
         update_mtime: Optional[float] = None,
-        properties: Optional[List[str]] = None
+        properties: Optional[List[str]] = None,
+        collect_result: bool = True
     ) -> FileListResult:
 
         # See if we can get a source directory to copy from
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index ab55bbe..021af41 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -171,7 +171,8 @@
         external_pathspec: Union["Directory", str],
         *,
         filter_callback: Optional[Callable[[str], bool]] = None,
-    ) -> FileListResult:
+        collect_result: bool = True
+    ) -> Optional[FileListResult]:
         """Imports some or all files from external_path into this directory.
 
         Args:
@@ -181,9 +182,11 @@
                             relative path as argument for every file in the source directory.
                             The file is imported only if the callable returns True.
                             If no filter callback is specified, all files will be imported.
+           collect_result: Whether to collect data for the :class:`.FileListResult`, defaults to True.
 
         Returns:
-           A :class:`.FileListResult` report of files imported and overwritten.
+           A :class:`.FileListResult` report of files imported and overwritten,
+           or `None` if `collect_result` is False.
 
         Raises:
            DirectoryError: if any system error occurs.
@@ -191,6 +194,7 @@
         return self._import_files_internal(
             external_pathspec,
             filter_callback=filter_callback,
+            collect_result=collect_result,
         )
 
     def import_single_file(self, external_pathspec: str) -> FileListResult:
@@ -390,9 +394,11 @@
     #                    If no filter callback is specified, all files will be imported.
     #                    update_mtime: Update the access and modification time of each file copied to the time specified in seconds.
     #   properties: Optional list of strings representing file properties to capture when importing.
+    #   collect_result: Whether to collect data for the :class:`.FileListResult`, defaults to True.
     #
     # Returns:
-    #    A :class:`.FileListResult` report of files imported and overwritten.
+    #    A :class:`.FileListResult` report of files imported and overwritten,
+    #    or `None` if `collect_result` is False.
     #
     # Raises:
     #    DirectoryError: if any system error occurs.
@@ -404,12 +410,14 @@
         filter_callback: Optional[Callable[[str], bool]] = None,
         update_mtime: Optional[float] = None,
         properties: Optional[List[str]] = None,
-    ) -> FileListResult:
+        collect_result: bool = True
+    ) -> Optional[FileListResult]:
         return self._import_files(
             external_pathspec,
             filter_callback=filter_callback,
             update_mtime=update_mtime,
             properties=properties,
+            collect_result=collect_result,
         )
 
     # _import_files()
@@ -425,9 +433,11 @@
     #                    If no filter callback is specified, all files will be imported.
     #                    update_mtime: Update the access and modification time of each file copied to the time specified in seconds.
     #   properties: Optional list of strings representing file properties to capture when importing.
+    #   collect_result: Whether to collect data for the :class:`.FileListResult`, defaults to True.
     #
     # Returns:
-    #    A :class:`.FileListResult` report of files imported and overwritten.
+    #    A :class:`.FileListResult` report of files imported and overwritten,
+    #    or `None` if `collect_result` is False.
     #
     # Raises:
     #    DirectoryError: if any system error occurs.
@@ -439,7 +449,8 @@
         filter_callback: Optional[Callable[[str], bool]] = None,
         update_mtime: Optional[float] = None,
         properties: Optional[List[str]] = None,
-    ) -> FileListResult:
+        collect_result: bool = True
+    ) -> Optional[FileListResult]:
         raise NotImplementedError()
 
     # _export_files()
diff --git a/tests/format/junctions.py b/tests/format/junctions.py
index c961539..6f0a5d1 100644
--- a/tests/format/junctions.py
+++ b/tests/format/junctions.py
@@ -880,3 +880,43 @@
     result.assert_success()
     result_vars = _yaml.load_data(result.output)
     assert result_vars.get_str("resolved") == expected_result
+
+
+# This test verifies that project option conditional statements made
+# in an include file are resolved in the context of the project where
+# the include file originates.
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize(
+    "target",
+    ["target.bst", "subproject.bst:target.bst"],
+    ids=["toplevel-target", "subproject-target"],
+)
+@pytest.mark.parametrize(
+    "animal,expected_result",
+    [
+        ("pony", "target pony"),
+        ("horsy", "target horsy"),
+    ],
+    ids=["branch1", "branch2"],
+)
+def test_include_vars_cross_junction_element(cli, datafiles, target, animal, expected_result):
+    project = os.path.join(str(datafiles), "include-complex")
+    result = cli.run(
+        project=project,
+        silent=True,
+        args=[
+            "--option",
+            "animal",
+            animal,
+            "show",
+            "--deps",
+            "none",
+            "--format",
+            "%{vars}",
+            target,
+        ],
+    )
+    result.assert_success()
+    result_vars = _yaml.load_data(result.output)
+    assert result_vars.get_str("target_animal_variable") == expected_result
diff --git a/tests/format/junctions/include-complex/project.conf b/tests/format/junctions/include-complex/project.conf
new file mode 100644
index 0000000..eb7f3c7
--- /dev/null
+++ b/tests/format/junctions/include-complex/project.conf
@@ -0,0 +1,15 @@
+name: test
+min-version: 2.0
+
+(@):
+- subproject.bst:include.yml
+
+options:
+  animal:
+    type: enum
+    description: the animal
+    values:
+    - pony
+    - horsy
+    default: pony
+    variable: animal
diff --git a/tests/format/junctions/include-complex/subproject.bst b/tests/format/junctions/include-complex/subproject.bst
new file mode 100644
index 0000000..daaea30
--- /dev/null
+++ b/tests/format/junctions/include-complex/subproject.bst
@@ -0,0 +1,8 @@
+kind: junction
+sources:
+- kind: local
+  path: subproject
+
+config:
+  options:
+    target_animal: '%{animal}'
diff --git a/tests/format/junctions/include-complex/subproject/include.yml b/tests/format/junctions/include-complex/subproject/include.yml
new file mode 100644
index 0000000..e47d22f
--- /dev/null
+++ b/tests/format/junctions/include-complex/subproject/include.yml
@@ -0,0 +1,8 @@
+
+variables:
+  target_animal_variable: "no target animal"
+  (?):
+  - target_animal == "pony":
+      target_animal_variable: "target pony"
+  - target_animal == "horsy":
+      target_animal_variable: "target horsy"
diff --git a/tests/format/junctions/include-complex/subproject/project.conf b/tests/format/junctions/include-complex/subproject/project.conf
new file mode 100644
index 0000000..ad8a3b3
--- /dev/null
+++ b/tests/format/junctions/include-complex/subproject/project.conf
@@ -0,0 +1,11 @@
+name: subtest
+min-version: 2.0
+
+options:
+  target_animal:
+    type: enum
+    description: the target animal
+    values:
+    - pony
+    - horsy
+    default: pony
diff --git a/tests/format/junctions/include-complex/subproject/target.bst b/tests/format/junctions/include-complex/subproject/target.bst
new file mode 100644
index 0000000..9349a18
--- /dev/null
+++ b/tests/format/junctions/include-complex/subproject/target.bst
@@ -0,0 +1,4 @@
+kind: stack
+
+(@):
+- include.yml
diff --git a/tests/format/junctions/include-complex/target.bst b/tests/format/junctions/include-complex/target.bst
new file mode 100644
index 0000000..7cb4b6c
--- /dev/null
+++ b/tests/format/junctions/include-complex/target.bst
@@ -0,0 +1,4 @@
+kind: manual
+
+depends:
+- subproject.bst:target.bst
diff --git a/tests/format/option-element-override/element.bst b/tests/format/option-element-override/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/option-element-override/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/option-element-override/project.conf b/tests/format/option-element-override/project.conf
new file mode 100644
index 0000000..63df718
--- /dev/null
+++ b/tests/format/option-element-override/project.conf
@@ -0,0 +1,24 @@
+name: test
+min-version: 2.0
+
+options:
+  animal:
+    type: enum
+    description: The kind of animal
+    values:
+    - pony
+    - horsy
+    default: pony
+
+variables:
+  result: "a sloppy joe"
+
+elements:
+  manual:
+    (?):
+    - animal == "pony":
+        variables:
+          result: "a pony"
+    - animal == "horsy":
+        variables:
+          result: "a horsy"
diff --git a/tests/format/option-project-root/element.bst b/tests/format/option-project-root/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/option-project-root/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/option-project-root/project.conf b/tests/format/option-project-root/project.conf
new file mode 100644
index 0000000..eb254bc
--- /dev/null
+++ b/tests/format/option-project-root/project.conf
@@ -0,0 +1,19 @@
+name: test
+min-version: 2.0
+
+options:
+  animal:
+    type: enum
+    description: The kind of animal
+    values:
+    - pony
+    - horsy
+    default: pony
+
+(?):
+- animal == "pony":
+    variables:
+      result: "a pony"
+- animal == "horsy":
+    variables:
+      result: "a horsy"
diff --git a/tests/format/option-restricted-name/element.bst b/tests/format/option-restricted-name/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/option-restricted-name/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/option-restricted-name/project.conf b/tests/format/option-restricted-name/project.conf
new file mode 100644
index 0000000..e3e7c00
--- /dev/null
+++ b/tests/format/option-restricted-name/project.conf
@@ -0,0 +1,17 @@
+name: test
+min-version: 2.0
+
+options:
+  animal:
+    type: enum
+    description: The kind of animal
+    values:
+    - pony
+    - horsy
+    default: pony
+
+(?):
+- animal == "pony":
+    name: ponyproject
+- animal == "horsy":
+    name: horsyproject
diff --git a/tests/format/option-restricted-options/element.bst b/tests/format/option-restricted-options/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/option-restricted-options/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/option-restricted-options/project.conf b/tests/format/option-restricted-options/project.conf
new file mode 100644
index 0000000..650bc44
--- /dev/null
+++ b/tests/format/option-restricted-options/project.conf
@@ -0,0 +1,25 @@
+name: test
+min-version: 2.0
+
+options:
+  animal:
+    type: enum
+    description: The kind of animal
+    values:
+    - pony
+    - horsy
+    default: pony
+
+(?):
+- animal == "pony":
+    options:
+      pony:
+        type: bool
+        description: Whether a pony or not
+        default: False
+- animal == "horsy":
+    options:
+      horsy:
+        type: bool
+        description: Whether a horsy or not
+        default: False
diff --git a/tests/format/optionprojectroot.py b/tests/format/optionprojectroot.py
new file mode 100644
index 0000000..8f26b4a
--- /dev/null
+++ b/tests/format/optionprojectroot.py
@@ -0,0 +1,68 @@
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+from buildstream import _yaml
+from buildstream.exceptions import ErrorDomain, LoadErrorReason
+from buildstream._testing.runcli import cli  # pylint: disable=unused-import
+
+# Project directory
+DATA_DIR = os.path.dirname(os.path.realpath(__file__))
+
+
+#
+# Test that project option conditionals can be resolved in the project root
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("value,expected", [("pony", "a pony"), ("horsy", "a horsy")], ids=["pony", "horsy"])
+def test_resolve_project_root_conditional(cli, datafiles, value, expected):
+    project = os.path.join(datafiles.dirname, datafiles.basename, "option-project-root")
+    result = cli.run(
+        project=project,
+        silent=True,
+        args=["--option", "animal", value, "show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+    )
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded.get_str("result") == expected
+
+
+#
+# Test that project option conditionals can be resolved in element overrides
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("value,expected", [("pony", "a pony"), ("horsy", "a horsy")], ids=["pony", "horsy"])
+def test_resolve_element_override_conditional(cli, datafiles, value, expected):
+    project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-override")
+    result = cli.run(
+        project=project,
+        silent=True,
+        args=["--option", "animal", value, "show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+    )
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded.get_str("result") == expected
+
+
+#
+# Test that restricted keys error out correctly if specified conditionally
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize(
+    "project_dir,provenance",
+    [
+        ("option-restricted-name", "project.conf [line 15 column 10]"),
+        ("option-restricted-options", "project.conf [line 16 column 6]"),
+    ],
+    ids=["name", "options"],
+)
+def test_restricted_conditionals(cli, datafiles, project_dir, provenance):
+    project = os.path.join(datafiles.dirname, datafiles.basename, project_dir)
+    result = cli.run(
+        project=project,
+        silent=True,
+        args=["show", "element.bst"],
+    )
+    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.ILLEGAL_COMPOSITE)
+    assert provenance in result.stderr
diff --git a/tests/plugins/junction-with-junction/element.bst b/tests/plugins/junction-with-junction/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/plugins/junction-with-junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/plugins/junction-with-junction/project.conf b/tests/plugins/junction-with-junction/project.conf
new file mode 100644
index 0000000..b64a6b1
--- /dev/null
+++ b/tests/plugins/junction-with-junction/project.conf
@@ -0,0 +1,11 @@
+name: test
+min-version: 2.0
+
+plugins:
+- origin: junction
+  junction: sample-plugins.bst
+  sources:
+  - git
+
+(@):
+- subproject.bst:variables.yml
diff --git a/tests/plugins/junction-with-junction/subproject/project.conf b/tests/plugins/junction-with-junction/subproject/project.conf
new file mode 100644
index 0000000..7d535b6
--- /dev/null
+++ b/tests/plugins/junction-with-junction/subproject/project.conf
@@ -0,0 +1,2 @@
+name: subproject-test
+min-version: 2.0
diff --git a/tests/plugins/junction-with-junction/subproject/target.bst b/tests/plugins/junction-with-junction/subproject/target.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/plugins/junction-with-junction/subproject/target.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/plugins/junction-with-junction/subproject/variables.yml b/tests/plugins/junction-with-junction/subproject/variables.yml
new file mode 100644
index 0000000..c5ade0f
--- /dev/null
+++ b/tests/plugins/junction-with-junction/subproject/variables.yml
@@ -0,0 +1,4 @@
+
+
+variables:
+  animal: pony
diff --git a/tests/plugins/loading.py b/tests/plugins/loading.py
index 2863133..4764649 100644
--- a/tests/plugins/loading.py
+++ b/tests/plugins/loading.py
@@ -12,8 +12,10 @@
 
 from buildstream.exceptions import ErrorDomain, LoadErrorReason
 from buildstream._testing import cli  # pylint: disable=unused-import
+from buildstream._testing import create_repo
 from buildstream import _yaml
 
+from tests.testutils.repo.git import Git
 from tests.testutils.site import pip_sample_packages  # pylint: disable=unused-import
 from tests.testutils.site import SAMPLE_PACKAGES_SKIP_REASON
 
@@ -823,3 +825,80 @@
     result = cli.run(project=project, args=["show", "element.bst"])
     result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
     assert provenance in result.stderr
+
+
+# Test scenario for junction plugin origins
+# =========================================
+#
+# This is a regression test which ensures that cross junction includes
+# at the project.conf level continues to work even in conjunction with
+# complex cross junction plugin loading scenarios.
+#
+#         main project
+#         /           \
+#        |             |
+#  junction (tar)      |
+#        |             | include a file across this junction
+#        |             |
+#        /             |
+#  git plugin           \
+#                        \
+#                  junction (git)
+#                         |
+#                         |
+#                     subproject
+#
+#
+# `bst source track subproject.bst`
+#
+#
+JUNCTION_DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)))
+
+
+@pytest.mark.datafiles(JUNCTION_DATA_DIR)
+def test_load_junction_via_junctioned_plugin(cli, datafiles, tmpdir):
+    sample_plugins_dir = os.path.join(str(datafiles), "sample-plugins")
+    project = os.path.join(str(datafiles), "junction-with-junction")
+    subproject = os.path.join(str(datafiles), "junction-with-junction", "subproject")
+
+    # Create a tar repo containing the sample plugins
+    #
+    repo = create_repo("tar", str(tmpdir))
+    ref = repo.create(sample_plugins_dir)
+
+    # Generate the junction to the sample plugins
+    #
+    element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
+    _yaml.roundtrip_dump(element, os.path.join(project, "sample-plugins.bst"))
+
+    # Create a git repo containing the subproject
+    #
+    subproject_repo = Git(str(tmpdir))
+    subproject_repo.create(subproject)
+
+    # Generate the subproject junction pointing to the git repo with the subproject
+    #
+    element = {"kind": "junction", "sources": [subproject_repo.source_config()]}
+    _yaml.roundtrip_dump(element, os.path.join(project, "subproject.bst"))
+
+    # Track the subproject
+    #
+    result = cli.run(project=project, args=["source", "track", "subproject.bst"])
+    result.assert_success()
+
+    # Check the included variable resolves in the element
+    #
+    result = cli.run(
+        project=project,
+        silent=True,
+        args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+    )
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded.get_str("animal") == "pony"
+
+    # Try a subproject element access on the command line, as this project
+    # has the potential to make this break.
+    #
+    result = cli.run(project=project, args=["show", "subproject.bst:target.bst"])
+    result.assert_success()
diff --git a/tests/plugins/sample-plugins/project.conf b/tests/plugins/sample-plugins/project.conf
new file mode 100644
index 0000000..60fd372
--- /dev/null
+++ b/tests/plugins/sample-plugins/project.conf
@@ -0,0 +1,15 @@
+name: sample-plugins
+min-version: 2.0
+
+plugins:
+- origin: local
+  path: src/sample_plugins/elements
+  elements:
+  - autotools
+  - sample
+
+- origin: local
+  path: src/sample_plugins/sources
+  sources:
+  - git
+  - sample