Merge pull request #1442 from apache/tristan/optional-project

Stop requiring project.conf for all commands
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 9937470..ce1610f 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -284,23 +284,24 @@
                     cli_options=self._main_options["option"],
                     default_mirror=self._main_options.get("default_mirror"),
                 )
-
-                self.stream.set_project(self.project)
             except LoadError as e:
 
-                # Help users that are new to BuildStream by suggesting 'init'.
-                # We don't want to slow down users that just made a mistake, so
-                # don't stop them with an offer to create a project for them.
-                if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
-                    click.echo("No project found. You can create a new project like so:", err=True)
-                    click.echo("", err=True)
-                    click.echo("    bst init", err=True)
-
-                self._error_exit(e, "Error loading project")
+                # If there was no project.conf at all then there was just no project found.
+                #
+                # Don't error out in this case, as Stream() supports some operations which
+                # do not require a project. If Stream() requires a project and it is missing,
+                # then it will raise an error.
+                #
+                if e.reason != LoadErrorReason.MISSING_PROJECT_CONF:
+                    self._error_exit(e, "Error loading project")
 
             except BstError as e:
                 self._error_exit(e, "Error loading project")
 
+            # Set the project on the Stream, this can be None if there is no project.
+            #
+            self.stream.set_project(self.project)
+
             # Run the body of the session here, once everything is loaded
             try:
                 yield
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 0d5379f..dfc340a 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -444,7 +444,7 @@
     # and so on.
     #
     # Args:
-    #    toplevel_project (Project): The toplevel project we were invoked from
+    #    toplevel_project (Project): The toplevel project we were invoked from, or None
     #    stream (Stream): The stream
     #    log_file (file): An optional file handle for additional logging
     #
@@ -460,7 +460,8 @@
         text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
         values = OrderedDict()
         values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
-        values["Project"] = "{} ({})".format(toplevel_project.name, toplevel_project.directory)
+        if toplevel_project:
+            values["Project"] = "{} ({})".format(toplevel_project.name, toplevel_project.directory)
         values["Targets"] = ", ".join([t.name for t in stream.targets])
         text += self._format_values(values)
 
@@ -483,7 +484,12 @@
 
         # Print information about each loaded project
         #
-        for project_info in toplevel_project.loaded_projects():
+        if toplevel_project:
+            loaded_projects = toplevel_project.loaded_projects()
+        else:
+            loaded_projects = []
+
+        for project_info in loaded_projects:
             project = project_info.project
 
             # Project title line
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 22feab9..6eb25e8 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -124,7 +124,8 @@
     def set_project(self, project):
         assert self._project is None
         self._project = project
-        self._project.load_context.set_fetch_subprojects(self._fetch_subprojects)
+        if self._project:
+            self._project.load_context.set_fetch_subprojects(self._fetch_subprojects)
 
     # load_selection()
     #
@@ -885,6 +886,8 @@
     #    remove_dir (bool): Whether to remove the associated directory
     #
     def workspace_close(self, element_name, *, remove_dir):
+        self._assert_project("Unable to locate workspaces")
+
         workspaces = self._context.get_workspaces()
         workspace = workspaces.get_workspace(element_name)
 
@@ -913,6 +916,7 @@
     #    soft (bool): Only set the workspace state to not prepared
     #
     def workspace_reset(self, targets, *, soft):
+        self._assert_project("Unable to locate workspaces")
 
         elements = self._load(targets, selection=_PipelineSelection.REDIRECT)
 
@@ -953,6 +957,8 @@
     # True if there are any existing workspaces.
     #
     def workspace_exists(self, element_name=None):
+        self._assert_project("Unable to locate workspaces")
+
         workspaces = self._context.get_workspaces()
         if element_name:
             workspace = workspaces.get_workspace(element_name)
@@ -968,6 +974,8 @@
     # Serializes the workspaces and dumps them in YAML to stdout.
     #
     def workspace_list(self):
+        self._assert_project("Unable to locate workspaces")
+
         workspaces = []
         for element_name, workspace_ in self._context.get_workspaces().list():
             workspace_detail = {
@@ -1106,6 +1114,22 @@
     #                    Private Methods                        #
     #############################################################
 
+    # _assert_project()
+    #
+    # Raises an assertion of a project was not loaded
+    #
+    # Args:
+    #    message: The user facing error message, e.g. "Unable to load elements"
+    #
+    # Raises:
+    #    A StreamError with reason "project-not-loaded" is raised if no project was loaded
+    #
+    def _assert_project(self, message: str) -> None:
+        if not self._project:
+            raise StreamError(
+                message, detail="No project.conf or active workspace was located", reason="project-not-loaded"
+            )
+
     # _fetch_subprojects()
     #
     # Fetch subprojects as part of the project and element loading process.
@@ -1210,7 +1234,12 @@
             targets, valid_artifact_names=valid_artifact_names
         )
 
-        self._project.load_context.set_rewritable(rewritable)
+        # We need a project in order to load elements
+        if element_names:
+            self._assert_project("Unable to load elements: {}".format(", ".join(element_names)))
+
+        if self._project:
+            self._project.load_context.set_rewritable(rewritable)
 
         # Load elements and except elements
         if element_names:
@@ -1493,7 +1522,11 @@
     def _resolve_elements(self, targets):
         with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
             # We need to go through the project to access the loader
-            if task:
+            #
+            # FIXME: We need to calculate the total elements to resolve differently so that
+            #        it can include artifact elements
+            #
+            if task and self._project:
                 task.set_maximum_progress(self._project.loader.loaded)
 
             # XXX: Now that Element._update_state() can trigger recursive update_state calls
@@ -1845,22 +1878,23 @@
             element_targets = initial_targets
 
         # Expand globs for elements
-        all_elements = []
-        element_path_length = len(self._project.element_path) + 1
-        for dirpath, _, filenames in os.walk(self._project.element_path):
-            for filename in filenames:
-                if filename.endswith(".bst"):
-                    element_path = os.path.join(dirpath, filename)
-                    element_path = element_path[element_path_length:]  # Strip out the element_path
-                    all_elements.append(element_path)
+        if self._project:
+            all_elements = []
+            element_path_length = len(self._project.element_path) + 1
+            for dirpath, _, filenames in os.walk(self._project.element_path):
+                for filename in filenames:
+                    if filename.endswith(".bst"):
+                        element_path = os.path.join(dirpath, filename)
+                        element_path = element_path[element_path_length:]  # Strip out the element_path
+                        all_elements.append(element_path)
 
-        for glob in globs:
-            matched = False
-            for element_path in utils.glob(all_elements, glob):
-                element_targets.append(element_path)
-                matched = True
-            if matched:
-                globs[glob] = globs[glob] + 1
+            for glob in globs:
+                matched = False
+                for element_path in utils.glob(all_elements, glob):
+                    element_targets.append(element_path)
+                    matched = True
+                if matched:
+                    globs[glob] = globs[glob] + 1
 
         # Expand globs for artifact names
         if valid_artifact_names:
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index e51be08..ebca148 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -313,10 +313,16 @@
 class Workspaces:
     def __init__(self, toplevel_project, workspace_project_cache):
         self._toplevel_project = toplevel_project
-        self._bst_directory = os.path.join(toplevel_project.directory, ".bst")
-        self._workspaces = self._load_config()
         self._workspace_project_cache = workspace_project_cache
 
+        # A project without a directory can happen
+        if toplevel_project.directory:
+            self._bst_directory = os.path.join(toplevel_project.directory, ".bst")
+            self._workspaces = self._load_config()
+        else:
+            self._bst_directory = None
+            self._workspaces = {}
+
     # list()
     #
     # Generator function to enumerate workspaces.
diff --git a/tests/format/project.py b/tests/format/project.py
index d3de672..6e06176 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -15,10 +15,11 @@
 
 
 @pytest.mark.datafiles(os.path.join(DATA_DIR))
-def test_missing_project_conf(cli, datafiles):
+@pytest.mark.parametrize("args", [["workspace", "list"], ["show", "pony.bst"]], ids=["list-workspace", "show-element"])
+def test_missing_project_conf(cli, datafiles, args):
     project = str(datafiles)
-    result = cli.run(project=project, args=["workspace", "list"])
-    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_PROJECT_CONF)
+    result = cli.run(project=project, args=args)
+    result.assert_main_error(ErrorDomain.STREAM, "project-not-loaded")
 
 
 @pytest.mark.datafiles(os.path.join(DATA_DIR))
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index c243a89..1375bc3 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -32,7 +32,8 @@
     ],
     ids=["none", "build", "run", "all"],
 )
-def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist):
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, with_project):
     project = str(datafiles)
     checkout = os.path.join(cli.directory, "checkout")
 
@@ -56,6 +57,10 @@
         shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
         assert cli.get_element_state(project, "target-import.bst") != "cached"
 
+        # Delete the project.conf if we're going to try this without a project
+        if not with_project:
+            os.remove(os.path.join(project, "project.conf"))
+
         # Now checkout the artifact
         result = cli.run(
             project=project,
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 7b26a76..37b9731 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -50,7 +50,8 @@
 
 # Test that we can delete an artifact by specifying its ref.
 @pytest.mark.datafiles(DATA_DIR)
-def test_artifact_delete_artifact(cli, tmpdir, datafiles):
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_artifact_delete_artifact(cli, tmpdir, datafiles, with_project):
     project = str(datafiles)
     element = "target.bst"
 
@@ -69,6 +70,10 @@
     # Explicitly check that the ARTIFACT exists in the cache
     assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact))
 
+    # Delete the project.conf if we're going to try this without a project
+    if not with_project:
+        os.remove(os.path.join(project, "project.conf"))
+
     # Delete the artifact
     result = cli.run(project=project, args=["artifact", "delete", artifact])
     result.assert_success()
diff --git a/tests/frontend/artifact_list_contents.py b/tests/frontend/artifact_list_contents.py
index ee129cc..5003755 100644
--- a/tests/frontend/artifact_list_contents.py
+++ b/tests/frontend/artifact_list_contents.py
@@ -22,6 +22,7 @@
 import pytest
 
 from buildstream.testing import cli  # pylint: disable=unused-import
+from buildstream.exceptions import ErrorDomain
 
 
 # Project directory
@@ -29,22 +30,9 @@
 
 
 @pytest.mark.datafiles(DATA_DIR)
-def test_artifact_list_exact_contents_element(cli, datafiles):
-    project = str(datafiles)
-
-    # Ensure we have an artifact to read
-    result = cli.run(project=project, args=["build", "import-bin.bst"])
-    assert result.exit_code == 0
-
-    # List the contents via the element name
-    result = cli.run(project=project, args=["artifact", "list-contents", "import-bin.bst"])
-    assert result.exit_code == 0
-    expected_output = "import-bin.bst:\n\tusr\n\tusr/bin\n\tusr/bin/hello\n\n"
-    assert expected_output in result.output
-
-
-@pytest.mark.datafiles(DATA_DIR)
-def test_artifact_list_exact_contents_ref(cli, datafiles):
+@pytest.mark.parametrize("target", ["element-name", "artifact-name"])
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_artifact_list_exact_contents(cli, datafiles, target, with_project):
     project = str(datafiles)
 
     # Get the cache key of our test element
@@ -52,13 +40,58 @@
 
     # Ensure we have an artifact to read
     result = cli.run(project=project, args=["build", "import-bin.bst"])
-    assert result.exit_code == 0
+    result.assert_success()
+
+    if target == "element-name":
+        arg = "import-bin.bst"
+    elif target == "artifact-name":
+        key = cli.get_element_key(project, "import-bin.bst")
+        arg = "test/import-bin/" + key
+
+    # Delete the project.conf if we're going to try this without a project
+    if not with_project:
+        os.remove(os.path.join(project, "project.conf"))
 
     # List the contents via the key
-    result = cli.run(project=project, args=["artifact", "list-contents", "test/import-bin/" + key])
+    result = cli.run(project=project, args=["artifact", "list-contents", arg])
+
+    # Expect to fail if we try to list by element name and there is no project
+    if target == "element-name" and not with_project:
+        result.assert_main_error(ErrorDomain.STREAM, "project-not-loaded")
+    else:
+        result.assert_success()
+
+        expected_output_template = "{target}:\n\tusr\n\tusr/bin\n\tusr/bin/hello\n\n"
+        expected_output = expected_output_template.format(target=arg)
+        assert expected_output in result.output
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("target", ["element-name", "artifact-name"])
+def test_artifact_list_exact_contents_long(cli, datafiles, target):
+    project = str(datafiles)
+
+    # Ensure we have an artifact to read
+    result = cli.run(project=project, args=["build", "import-bin.bst"])
     assert result.exit_code == 0
 
-    expected_output = "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
+    if target == "element-name":
+        arg = "import-bin.bst"
+    elif target == "artifact-name":
+        key = cli.get_element_key(project, "import-bin.bst")
+        arg = "test/import-bin/" + key
+
+    # List the contents via the element name
+    result = cli.run(project=project, args=["artifact", "list-contents", "--long", arg])
+    assert result.exit_code == 0
+    expected_output_template = (
+        "{target}:\n"
+        "\tdrwxr-xr-x  dir    1           usr\n"
+        "\tdrwxr-xr-x  dir    1           usr/bin\n"
+        "\t-rw-r--r--  reg    107         usr/bin/hello\n\n"
+    )
+    expected_output = expected_output_template.format(target=arg)
+
     assert expected_output in result.output
 
 
@@ -89,49 +122,3 @@
 
     for artifact in expected_artifacts:
         assert artifact in result.output
-
-
-@pytest.mark.datafiles(DATA_DIR)
-def test_artifact_list_exact_contents_element_long(cli, datafiles):
-    project = str(datafiles)
-
-    # Ensure we have an artifact to read
-    result = cli.run(project=project, args=["build", "import-bin.bst"])
-    assert result.exit_code == 0
-
-    # List the contents via the element name
-    result = cli.run(project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"])
-    assert result.exit_code == 0
-    expected_output = (
-        "import-bin.bst:\n"
-        "\tdrwxr-xr-x  dir    1           usr\n"
-        "\tdrwxr-xr-x  dir    1           usr/bin\n"
-        "\t-rw-r--r--  reg    107         usr/bin/hello\n\n"
-    )
-
-    assert expected_output in result.output
-
-
-@pytest.mark.datafiles(DATA_DIR)
-def test_artifact_list_exact_contents_ref_long(cli, datafiles):
-    project = str(datafiles)
-
-    # Get the cache key of our test element
-    key = cli.get_element_key(project, "import-bin.bst")
-
-    # Ensure we have an artifact to read
-    result = cli.run(project=project, args=["build", "import-bin.bst"])
-    assert result.exit_code == 0
-
-    # List the contents via the key
-    result = cli.run(project=project, args=["artifact", "list-contents", "-l", "test/import-bin/" + key])
-    assert result.exit_code == 0
-
-    expected_output = (
-        "  test/import-bin/" + key + ":\n"
-        "\tdrwxr-xr-x  dir    1           usr\n"
-        "\tdrwxr-xr-x  dir    1           usr/bin\n"
-        "\t-rw-r--r--  reg    107         usr/bin/hello\n\n"
-    )
-
-    assert expected_output in result.output
diff --git a/tests/frontend/artifact_log.py b/tests/frontend/artifact_log.py
index 8fd51ea..07efa56 100644
--- a/tests/frontend/artifact_log.py
+++ b/tests/frontend/artifact_log.py
@@ -30,7 +30,9 @@
 
 
 @pytest.mark.datafiles(DATA_DIR)
-def test_artifact_log(cli, datafiles):
+@pytest.mark.parametrize("target", ["artifact", "artifact-glob"])
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_artifact_log(cli, datafiles, target, with_project):
     project = str(datafiles)
 
     # Get the cache key of our test element
@@ -43,26 +45,28 @@
 
     # Ensure we have an artifact to read
     result = cli.run(project=project, args=["build", "target.bst"])
-    assert result.exit_code == 0
+    result.assert_success()
 
-    # Read the log via the element name
+    # Collect the log by running `bst artifact log` on the element name first
     result = cli.run(project=project, args=["artifact", "log", "target.bst"])
-    assert result.exit_code == 0
+    result.assert_success()
     log = result.output
-
-    # Assert that there actually was a log file
     assert log != ""
 
-    # Read the log via the key
-    result = cli.run(project=project, args=["artifact", "log", "test/target/" + key])
-    assert result.exit_code == 0
-    assert log == result.output
+    # Delete the project.conf if we're going to try this without a project
+    if not with_project:
+        os.remove(os.path.join(project, "project.conf"))
 
-    # Read the log via glob
-    result = cli.run(project=project, args=["artifact", "log", "test/target/*"])
-    assert result.exit_code == 0
-    # The artifact is cached under both a strong key and a weak key
-    assert log == result.output
+    args = ["artifact", "log"]
+    if target == "artifact":
+        args.append("test/target/{}".format(key))
+    elif target == "artifact-glob":
+        args.append("test/target/*")
+
+    # Run bst artifact log
+    result = cli.run(project=project, args=args)
+    result.assert_success()
+    assert result.output == log
 
 
 @pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index 6595106..4fa6b19 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -32,12 +32,17 @@
     ],
     ids=["none", "build", "run", "all"],
 )
-def test_pull(cli, tmpdir, datafiles, deps, expect_cached):
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
     project = str(datafiles)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        # Build the element to push it to cache
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+
+        # Build the element to push it to cache, and explicitly configure local cache so we can check it
+        local_cache = os.path.join(str(tmpdir), "cache")
+        cli.configure(
+            {"cachedir": local_cache, "artifacts": {"url": share.repo, "push": True},}
+        )
 
         # Build it
         result = cli.run(project=project, args=["build", "target.bst"])
@@ -50,11 +55,20 @@
         # Obtain the artifact name for pulling purposes
         artifact_name = cli.get_artifact_name(project, "test", "target.bst")
 
+        # Translate the expected element names into artifact names
+        expect_cached_artifacts = [
+            cli.get_artifact_name(project, "test", element_name) for element_name in expect_cached
+        ]
+
         # Discard the local cache
         shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
         shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
         assert cli.get_element_state(project, "target.bst") != "cached"
 
+        # Delete the project.conf if we're going to try this without a project
+        if not with_project:
+            os.remove(os.path.join(project, "project.conf"))
+
         # Now run our pull test
         result = cli.run(project=project, args=["artifact", "pull", "--deps", deps, artifact_name])
 
@@ -64,6 +78,8 @@
             result.assert_success()
 
         # After pulling, assert that we have the expected elements cached again.
-        states = cli.get_element_states(project, ["target.bst"])
-        for expect in expect_cached:
-            assert states[expect] == "cached"
+        #
+        # Note that we do not use cli.get_element_states() here because the project.conf
+        # might not be present, so we poke at the cache directly for this assertion.
+        for expect in expect_cached_artifacts:
+            assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", expect))
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 392a9e2..2a7131c 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -88,7 +88,8 @@
 
 # Test artifact show with artifact ref
 @pytest.mark.datafiles(DATA_DIR)
-def test_artifact_show_artifact_ref(cli, tmpdir, datafiles):
+@pytest.mark.parametrize("with_project", [True, False], ids=["with-project", "without-project"])
+def test_artifact_show_artifact_name(cli, tmpdir, datafiles, with_project):
     project = str(datafiles)
     element = "target.bst"
 
@@ -98,6 +99,10 @@
     cache_key = cli.get_element_key(project, element)
     artifact_ref = "test/target/" + cache_key
 
+    # Delete the project.conf if we're going to try this without a project
+    if not with_project:
+        os.remove(os.path.join(project, "project.conf"))
+
     result = cli.run(project=project, args=["artifact", "show", artifact_ref])
     result.assert_success()
     assert "cached {}".format(artifact_ref) in result.output