Merge pull request #1501 from apache/tristan/bst-1/update-news

Updating news for 1.6.3 release
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 1973c9a..fc873e4 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -495,8 +495,8 @@
     def initialize_remotes(self, *, on_failure=None):
         remote_specs = self.global_remote_specs
 
-        for project in self.project_remote_specs:
-            remote_specs += self.project_remote_specs[project]
+        for _, project_specs in self.project_remote_specs.items():
+            remote_specs += project_specs
 
         remote_specs = list(utils._deduplicate(remote_specs))
 
@@ -775,7 +775,7 @@
                     element.info("Remote ({}) does not have {} cached".format(
                         remote.spec.url, display_key
                     ))
-            except BlobNotFound as e:
+            except BlobNotFound:
                 element.info("Remote ({}) does not have {} cached".format(
                     remote.spec.url, display_key
                 ))
@@ -868,7 +868,7 @@
         if not os.path.exists(size_file_path):
             return None
 
-        with open(size_file_path, "r") as f:
+        with open(size_file_path, "r", encoding="utf-8") as f:
             size = f.read()
 
         try:
diff --git a/buildstream/_artifactcache/cascache.py b/buildstream/_artifactcache/cascache.py
index 3395470..50b0b9e 100644
--- a/buildstream/_artifactcache/cascache.py
+++ b/buildstream/_artifactcache/cascache.py
@@ -442,7 +442,7 @@
                 os.makedirs(os.path.dirname(objpath), exist_ok=True)
                 os.link(tmp.name, objpath)
 
-        except FileExistsError as e:
+        except FileExistsError:
             # We can ignore the failed link() if the object is already in the repo.
             pass
 
@@ -991,7 +991,7 @@
     def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
         required_blobs = self._required_blobs(digest)
 
-        missing_blobs = dict()
+        missing_blobs = {}
         # Limit size of FindMissingBlobs request
         for required_blobs_group in _grouper(required_blobs, 512):
             request = remote_execution_pb2.FindMissingBlobsRequest()
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 08ef5fe..18fbdad 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -494,7 +494,7 @@
         directory = os.path.dirname(self._log_filename)
         os.makedirs(directory, exist_ok=True)
 
-        with open(self._log_filename, 'a') as logfile:
+        with open(self._log_filename, 'a', encoding='utf-8') as logfile:
 
             # Write one last line to the log and flush it to disk
             def flush_log():
diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index b35894c..e9c646f 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -358,7 +358,7 @@
             # us programatically insert comments or whitespace at
             # the toplevel.
             try:
-                with open(project_path, 'w') as f:
+                with open(project_path, 'w', encoding='utf-8') as f:
                     f.write("# Unique project name\n" +
                             "name: {}\n\n".format(project_name) +
                             "# Required BuildStream format version\n" +
@@ -620,7 +620,7 @@
                     except BstError as e:
                         click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
                 elif choice == 'log':
-                    with open(failure.logfile, 'r') as logfile:
+                    with open(failure.logfile, 'r', encoding='utf-8') as logfile:
                         content = logfile.read()
                         click.echo_via_pager(content)
 
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index a4f412a..6721bea 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -706,7 +706,7 @@
         with ExitStack() as stack:
             # mmap handles low-level memory details, allowing for
             # faster searches
-            f = stack.enter_context(open(logfile, 'r+'))
+            f = stack.enter_context(open(logfile, 'r+', encoding='utf-8'))
             log = stack.enter_context(mmap(f.fileno(), os.path.getsize(f.name)))
 
             count = 0
diff --git a/buildstream/_fuse/hardlinks.py b/buildstream/_fuse/hardlinks.py
index 1386f14..b921a00 100644
--- a/buildstream/_fuse/hardlinks.py
+++ b/buildstream/_fuse/hardlinks.py
@@ -97,9 +97,9 @@
     ###########################################################
     #                     Fuse Methods                        #
     ###########################################################
-    def access(self, path, mode):
+    def access(self, path, amode):
         full_path = self._full_path(path)
-        if not os.access(full_path, mode):
+        if not os.access(full_path, amode):
             raise FuseOSError(errno.EACCES)
 
     def chmod(self, path, mode):
@@ -160,18 +160,18 @@
     def unlink(self, path):
         return os.unlink(self._full_path(path))
 
-    def symlink(self, name, target):
-        return os.symlink(target, self._full_path(name))
+    def symlink(self, target, source):
+        return os.symlink(source, self._full_path(target))
 
     def rename(self, old, new):
         return os.rename(self._full_path(old), self._full_path(new))
 
-    def link(self, target, name):
+    def link(self, target, source):
 
         # When creating a hard link here, should we ensure the original
         # file is not a hardlink itself first ?
         #
-        return os.link(self._full_path(name), self._full_path(target))
+        return os.link(self._full_path(source), self._full_path(target))
 
     def utimens(self, path, times=None):
         return os.utime(self._full_path(path), times)
@@ -185,24 +185,24 @@
 
         return os.open(full_path, flags)
 
-    def create(self, path, mode, flags):
+    def create(self, path, mode, fi=None):
         full_path = self._full_path(path)
 
         # If it already exists, ensure it's a copy first
         self._ensure_copy(full_path)
-        return os.open(full_path, flags, mode)
+        return os.open(full_path, fi, mode)
 
-    def read(self, path, length, offset, fh):
+    def read(self, path, size, offset, fh):
         os.lseek(fh, offset, os.SEEK_SET)
-        return os.read(fh, length)
+        return os.read(fh, size)
 
-    def write(self, path, buf, offset, fh):
+    def write(self, path, data, offset, fh):
         os.lseek(fh, offset, os.SEEK_SET)
-        return os.write(fh, buf)
+        return os.write(fh, data)
 
     def truncate(self, path, length, fh=None):
         full_path = self._full_path(path)
-        with open(full_path, 'r+') as f:
+        with open(full_path, 'r+', encoding='utf-8') as f:
             f.truncate(length)
 
     def flush(self, path, fh):
@@ -211,5 +211,5 @@
     def release(self, path, fh):
         return os.close(fh)
 
-    def fsync(self, path, fdatasync, fh):
+    def fsync(self, path, datasync, fh):
         return self.flush(path, fh)
diff --git a/buildstream/_scheduler/queues/queue.py b/buildstream/_scheduler/queues/queue.py
index ec1e813..aed6938 100644
--- a/buildstream/_scheduler/queues/queue.py
+++ b/buildstream/_scheduler/queues/queue.py
@@ -249,7 +249,7 @@
                     workspaces.save_config()
                 except BstError as e:
                     self._message(element, MessageType.ERROR, "Error saving workspaces", detail=str(e))
-                except Exception as e:   # pylint: disable=broad-except
+                except Exception:   # pylint: disable=broad-except
                     self._message(element, MessageType.BUG,
                                   "Unhandled exception while saving workspaces",
                                   detail=traceback.format_exc())
@@ -292,7 +292,7 @@
             #
             set_last_task_error(e.domain, e.reason)
 
-        except Exception as e:   # pylint: disable=broad-except
+        except Exception:   # pylint: disable=broad-except
 
             # Report unhandled exceptions and mark as failed
             #
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 2efd4c2..03de20b 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -701,7 +701,8 @@
         #
         # FIXME: A bit hackish
         try:
-            open(tar_location, mode="x")
+            with open(tar_location, mode="x") as _:  # pylint: disable=unspecified-encoding
+                pass
             os.remove(tar_location)
         except IOError as e:
             raise StreamError("Cannot write to {0}: {1}"
@@ -1132,7 +1133,7 @@
 
         script_path = os.path.join(directory, "build.sh")
 
-        with open(_site.build_all_template, "r") as f:
+        with open(_site.build_all_template, "r", encoding="utf-8") as f:
             script_template = f.read()
 
         with utils.save_file_atomic(script_path, "w") as script:
diff --git a/buildstream/_yaml.py b/buildstream/_yaml.py
index ce1232b..1c3e95d 100644
--- a/buildstream/_yaml.py
+++ b/buildstream/_yaml.py
@@ -30,8 +30,8 @@
 from ._exceptions import LoadError, LoadErrorReason
 
 # This overrides the ruamel constructor to treat everything as a string
-RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:int', RoundTripConstructor.construct_yaml_str)
-RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:float', RoundTripConstructor.construct_yaml_str)
+RoundTripConstructor.add_constructor('tag:yaml.org,2002:int', RoundTripConstructor.construct_yaml_str)
+RoundTripConstructor.add_constructor('tag:yaml.org,2002:float', RoundTripConstructor.construct_yaml_str)
 
 # We store information in the loaded yaml on a DictProvenance
 # stored in all dictionaries under this key
@@ -197,7 +197,7 @@
     file = ProvenanceFile(filename, shortname, project)
 
     try:
-        with open(filename) as f:
+        with open(filename, encoding="utf-8") as f:
             return load_data(f, file, copy_tree=copy_tree)
     except FileNotFoundError as e:
         raise LoadError(LoadErrorReason.MISSING_FILE,
diff --git a/buildstream/element.py b/buildstream/element.py
index 3be50b5..1d4208c 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1870,7 +1870,7 @@
     #
     # Writes a script to the given directory.
     def _write_script(self, directory):
-        with open(_site.build_module_template, "r") as f:
+        with open(_site.build_module_template, "r", encoding="utf-8") as f:
             script_template = f.read()
 
         variable_string = ""
diff --git a/buildstream/plugin.py b/buildstream/plugin.py
index 737ff31..f07b6fd 100644
--- a/buildstream/plugin.py
+++ b/buildstream/plugin.py
@@ -729,7 +729,7 @@
     def _output_file(self):
         log = self.__context.get_log_handle()
         if log is None:
-            with open(os.devnull, "w") as output:
+            with open(os.devnull, "w", encoding="utf-8") as output:
                 yield output
         else:
             yield log
diff --git a/buildstream/sandbox/_mount.py b/buildstream/sandbox/_mount.py
index 1540d9d..2fe968a 100644
--- a/buildstream/sandbox/_mount.py
+++ b/buildstream/sandbox/_mount.py
@@ -73,7 +73,7 @@
                 parent_dir = os.path.dirname(self.mount_source.rstrip('/'))
                 os.makedirs(parent_dir, exist_ok=True)
                 if not os.path.exists(self.mount_source):
-                    with open(self.mount_source, 'w'):
+                    with open(self.mount_source, 'w', encoding='utf-8'):
                         pass
 
     @contextmanager
diff --git a/buildstream/sandbox/_sandboxbwrap.py b/buildstream/sandbox/_sandboxbwrap.py
index 8afb0f1..cb570ef 100644
--- a/buildstream/sandbox/_sandboxbwrap.py
+++ b/buildstream/sandbox/_sandboxbwrap.py
@@ -163,10 +163,7 @@
         mount_source_overrides = self._get_mount_sources()
         for mark in marked_directories:
             mount_point = mark['directory']
-            if mount_point in mount_source_overrides:
-                mount_source = mount_source_overrides[mount_point]
-            else:
-                mount_source = mount_map.get_mount_source(mount_point)
+            mount_source = mount_source_overrides.get(mount_point, mount_map.get_mount_source(mount_point))
 
             # Use --dev-bind for all mounts, this is simply a bind mount which does
             # not restrictive about devices.
@@ -220,7 +217,7 @@
             if flags & SandboxFlags.INTERACTIVE:
                 stdin = sys.stdin
             else:
-                stdin = stack.enter_context(open(os.devnull, "r"))
+                stdin = stack.enter_context(open(os.devnull, "r"))  # pylint: disable=unspecified-encoding
 
             # Run bubblewrap !
             exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
diff --git a/buildstream/sandbox/_sandboxchroot.py b/buildstream/sandbox/_sandboxchroot.py
index f3f8c50..cf40355 100644
--- a/buildstream/sandbox/_sandboxchroot.py
+++ b/buildstream/sandbox/_sandboxchroot.py
@@ -96,7 +96,7 @@
             if flags & SandboxFlags.INTERACTIVE:
                 stdin = sys.stdin
             else:
-                stdin = stack.enter_context(open(os.devnull, 'r'))
+                stdin = stack.enter_context(open(os.devnull, 'r'))  # pylint: disable=unspecified-encoding
 
             # Ensure the cwd exists
             if cwd is not None:
@@ -264,10 +264,7 @@
         @contextmanager
         def mount_point(point, **kwargs):
             mount_source_overrides = self._get_mount_sources()
-            if point in mount_source_overrides:
-                mount_source = mount_source_overrides[point]
-            else:
-                mount_source = self.mount_map.get_mount_source(point)
+            mount_source = mount_source_overrides.get(point, self.mount_map.get_mount_source(point))
             mount_point = os.path.join(rootfs, point.lstrip(os.sep))
 
             with Mounter.bind_mount(mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs):
diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in
index 817127a..bba2fc5 100644
--- a/requirements/dev-requirements.in
+++ b/requirements/dev-requirements.in
@@ -1,5 +1,5 @@
 pep8
-pylint == 2.8.3
+pylint >= 2.10.0
 pytest >= 3.7
 pytest-datafiles
 pytest-env
diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt
index 08ae77e..c97a089 100644
--- a/requirements/dev-requirements.txt
+++ b/requirements/dev-requirements.txt
@@ -1,12 +1,12 @@
 pep8==1.7.1
-pylint==2.8.3
+pylint==2.10.2
 pytest==6.2.4
 pytest-datafiles==2.0
 pytest-env==0.6.2
 pytest-xdist==2.3.0
 pytest-timeout==1.4.2
 ## The following requirements were added by pip freeze:
-astroid==2.5.6
+astroid==2.7.2
 attrs==21.2.0
 execnet==1.9.0
 iniconfig==1.1.1
@@ -14,6 +14,7 @@
 lazy-object-proxy==1.6.0
 mccabe==0.6.1
 packaging==21.0
+platformdirs==2.2.0
 pluggy==0.13.1
 py==1.10.0
 pyparsing==2.4.7
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index 2a8a505..b5de6e9 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -4,9 +4,9 @@
 pluginbase==1.0.1
 protobuf==3.17.3
 psutil==5.8.0
-ruamel.yaml==0.17.10
+ruamel.yaml==0.17.13
 setuptools==44.1.1
-ujson==4.0.2
+ujson==4.1.0
 ## The following requirements were added by pip freeze:
 MarkupSafe==2.0.1
 ruamel.yaml.clib==0.2.6
diff --git a/setup.py b/setup.py
index fb2ed7d..e174ee3 100755
--- a/setup.py
+++ b/setup.py
@@ -203,7 +203,7 @@
             for filename in files:
                 if filename.endswith('.py'):
                     path = os.path.join(root, filename)
-                    with open(path, 'r') as f:
+                    with open(path, 'r', encoding='utf-8') as f:
                         code = f.read()
 
                     # All protos are in buildstream._protos
@@ -213,7 +213,7 @@
                     code = re.sub(r'^from buildstream._protos.google.protobuf', r'from google.protobuf',
                                   code, flags=re.MULTILINE)
 
-                    with open(path, 'w') as f:
+                    with open(path, 'w', encoding='utf-8') as f:
                         f.write(code)
 
 
@@ -228,17 +228,17 @@
 #####################################################
 #               Gather requirements                 #
 #####################################################
-with open('requirements/dev-requirements.in') as dev_reqs:
+with open('requirements/dev-requirements.in', encoding='utf-8') as dev_reqs:
     dev_requires = dev_reqs.read().splitlines()
 
-with open('requirements/requirements.in') as install_reqs:
+with open('requirements/requirements.in', encoding='utf-8') as install_reqs:
     install_requires = install_reqs.read().splitlines()
 
 #####################################################
 #     Prepare package description from README       #
 #####################################################
 with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
-                       'README.rst')) as readme:
+                       'README.rst'), encoding='utf-8') as readme:
     long_description = readme.read()