Merge pull request #1812 from apache/abderrahim/nogrpc
elementsourcescache: don't try catching grpc.RpcError
diff --git a/.asf.yaml b/.asf.yaml
index 7483a9a..120de7b 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -33,6 +33,9 @@
# disable rebase button:
rebase: false
+ # Close branches when pull requests are merged
+ del_branch_on_merge: true
+
# Enable pages publishing
ghp_branch: gh-pages
ghp_path: /
diff --git a/.github/common.env b/.github/common.env
index 2072f21..598896c 100644
--- a/.github/common.env
+++ b/.github/common.env
@@ -1,6 +1,6 @@
# Shared common variables
-CI_IMAGE_VERSION=master-533491591
-CI_TOXENV_MAIN=py37,py38-nocover,py39-nocover,py310-nocover
-CI_TOXENV_PLUGINS=py37-plugins,py38-plugins-nocover,py39-plugins-nocover,py310-plugins-nocover
+CI_IMAGE_VERSION=master-643533272
+CI_TOXENV_MAIN=py37,py38,py39,py310,py311
+CI_TOXENV_PLUGINS=py37-plugins,py38-plugins,py39-plugins,py310-plugins,py311-plugins
CI_TOXENV_ALL="${CI_TOXENV_MAIN},${CI_TOXENV_PLUGINS}"
diff --git a/.github/compose/ci.docker-compose.yml b/.github/compose/ci.docker-compose.yml
index e213aaf..19e0b2a 100644
--- a/.github/compose/ci.docker-compose.yml
+++ b/.github/compose/ci.docker-compose.yml
@@ -1,7 +1,7 @@
version: '3.4'
x-tests-template: &tests-template
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:35-${CI_IMAGE_VERSION:-latest}
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:36-${CI_IMAGE_VERSION:-latest}
command: tox -vvvvv -- --color=yes --integration
environment:
TOXENV: ${CI_TOXENV_ALL}
@@ -22,14 +22,14 @@
services:
- fedora-35:
- <<: *tests-template
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:35-${CI_IMAGE_VERSION:-latest}
-
fedora-36:
<<: *tests-template
image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:36-${CI_IMAGE_VERSION:-latest}
+ fedora-37:
+ <<: *tests-template
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:37-${CI_IMAGE_VERSION:-latest}
+
debian-10:
<<: *tests-template
image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-debian:10-${CI_IMAGE_VERSION:-latest}
diff --git a/.github/run-ci.sh b/.github/run-ci.sh
index 3227b32..bff9de8 100755
--- a/.github/run-ci.sh
+++ b/.github/run-ci.sh
@@ -102,7 +102,7 @@
if [ -z "${test_names}" ]; then
- for test_name in mypy debian-10 fedora-35 fedora-36 fedora-missing-deps; do
+ for test_name in mypy debian-10 fedora-36 fedora-37 fedora-missing-deps; do
if ! runTest "${test_name}"; then
echo "Tests failed"
exit 1
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ed9ee44..307bbb8 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -37,8 +37,8 @@
# "../compose/ci.docker-compose.yml"
test-name:
- debian-10
- - fedora-35
- fedora-36
+ - fedora-37
- fedora-missing-deps
- lint
- mypy
diff --git a/requirements/cov-requirements.in b/requirements/cov-requirements.in
index d80d8f4..65bd5da 100644
--- a/requirements/cov-requirements.in
+++ b/requirements/cov-requirements.in
@@ -1,4 +1,4 @@
-coverage == 4.4.0
+coverage >= 6
pytest-cov >= 2.5.0
pytest >= 6.0.1
Cython
diff --git a/requirements/cov-requirements.txt b/requirements/cov-requirements.txt
index dc46a1b..9df1cf0 100644
--- a/requirements/cov-requirements.txt
+++ b/requirements/cov-requirements.txt
@@ -1,5 +1,5 @@
-coverage==4.4
-pytest-cov==2.10.1
+coverage==7.0.5
+pytest-cov==4.0.0
pytest==7.2.0
Cython==0.29.32
## The following requirements were added by pip freeze:
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index e8dd97c..3c621fb 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -1,5 +1,5 @@
click==8.1.3
-grpcio==1.46.0
+grpcio==1.51.1
Jinja2==3.1.2
pluginbase==1.0.1
protobuf==4.21.12
diff --git a/src/buildstream/downloadablefilesource.py b/src/buildstream/downloadablefilesource.py
index d36da67..2e261cb 100644
--- a/src/buildstream/downloadablefilesource.py
+++ b/src/buildstream/downloadablefilesource.py
@@ -97,22 +97,36 @@
if etag is not None:
request.add_header("If-None-Match", etag)
- with contextlib.closing(opener.open(request)) as response:
- info = response.info()
+ try:
+ with contextlib.closing(opener.open(request)) as response:
+ info = response.info()
- # some servers don't honor the 'If-None-Match' header
- if etag and info["ETag"] == etag:
- return None, None
+ # some servers don't honor the 'If-None-Match' header
+ if etag and info["ETag"] == etag:
+ return None, None, None
- etag = info["ETag"]
+ etag = info["ETag"]
- filename = info.get_filename(default_name)
- filename = os.path.basename(filename)
- local_file = os.path.join(directory, filename)
- with open(local_file, "wb") as dest:
- shutil.copyfileobj(response, dest)
+ filename = info.get_filename(default_name)
+ filename = os.path.basename(filename)
+ local_file = os.path.join(directory, filename)
+ with open(local_file, "wb") as dest:
+ shutil.copyfileobj(response, dest)
- return local_file, etag
+ except urllib.error.HTTPError as e:
+ if e.code == 304:
+ # 304 Not Modified.
+ # Because we use etag only for matching ref, currently specified ref is what
+ # we would have downloaded.
+ return None, None, None
+
+ return None, None, str(e)
+ except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError) as e:
+ # Note that urllib.request.Request in the try block may throw a
+ # ValueError for unknown url types, so we handle it here.
+ return None, None, str(e)
+
+ return local_file, etag, None
class DownloadableFileSource(Source):
@@ -206,50 +220,39 @@
def _ensure_mirror(self, activity_name: str):
# Downloads from the url and caches it according to its sha256sum.
- try:
- with self.tempdir() as td:
- # We do not use etag in case what we have in cache is
- # not matching ref in order to be able to recover from
- # corrupted download.
- if self.ref and not self.is_cached():
- # Do not re-download the file if the ETag matches.
- etag = self._get_etag(self.ref)
- else:
- etag = None
+ with self.tempdir() as td:
+ # We do not use etag in case what we have in cache is
+ # not matching ref in order to be able to recover from
+ # corrupted download.
+ if self.ref and not self.is_cached():
+ # Do not re-download the file if the ETag matches.
+ etag = self._get_etag(self.ref)
+ else:
+ etag = None
- local_file, new_etag = self.blocking_activity(
- _download_file, (self.__get_urlopener(), self.url, etag, td), activity_name
- )
+ local_file, new_etag, error = self.blocking_activity(
+ _download_file, (self.__get_urlopener(), self.url, etag, td), activity_name
+ )
- if local_file is None:
- return self.ref
+ if error:
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, error), temporary=True)
- # Make sure url-specific mirror dir exists.
- if not os.path.isdir(self._mirror_dir):
- os.makedirs(self._mirror_dir)
-
- # Store by sha256sum
- sha256 = utils.sha256sum(local_file)
- # Even if the file already exists, move the new file over.
- # In case the old file was corrupted somehow.
- os.rename(local_file, self._get_mirror_file(sha256))
-
- if new_etag:
- self._store_etag(sha256, new_etag)
- return sha256
-
- except urllib.error.HTTPError as e:
- if e.code == 304:
- # 304 Not Modified.
- # Because we use etag only for matching ref, currently specified ref is what
- # we would have downloaded.
+ if local_file is None:
return self.ref
- raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
- except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError) as e:
- # Note that urllib.request.Request in the try block may throw a
- # ValueError for unknown url types, so we handle it here.
- raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
+ # Make sure url-specific mirror dir exists.
+ if not os.path.isdir(self._mirror_dir):
+ os.makedirs(self._mirror_dir)
+
+ # Store by sha256sum
+ sha256 = utils.sha256sum(local_file)
+ # Even if the file already exists, move the new file over.
+ # In case the old file was corrupted somehow.
+ os.rename(local_file, self._get_mirror_file(sha256))
+
+ if new_etag:
+ self._store_etag(sha256, new_etag)
+ return sha256
def _get_mirror_file(self, sha=None):
if sha is not None:
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index e1e7b1f..14e35b9 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -123,7 +123,6 @@
import multiprocessing.popen_forkserver # type: ignore
import os
-import pickle
import queue
import signal
import subprocess
@@ -184,13 +183,8 @@
try:
result = target(*args)
result_queue.put((None, result))
- except Exception as exc: # pylint: disable=broad-except
- try:
- # Here we send the result again, just in case it was a PickleError
- # in which case the same exception would be thrown down
- result_queue.put((exc, result))
- except pickle.PickleError:
- result_queue.put((traceback.format_exc(), None))
+ except Exception: # pylint: disable=broad-except
+ result_queue.put((traceback.format_exc(), None))
class Plugin:
@@ -606,7 +600,8 @@
in order to avoid starving the scheduler.
The function, its arguments and return value must all be pickleable,
- as it will be run in another process.
+ as it will be run in another process. The function should not raise
+ an exception.
This should be used whenever there is a potential for a blocking
syscall to not return in a reasonable (<1s) amount of time.
@@ -676,12 +671,7 @@
raise PluginError("Background process didn't exit after 15 seconds and got killed.")
if err is not None:
- if isinstance(err, str):
- # This was a pickle error, this is a bug
- raise PluginError(
- "An error happened while returning the result from a blocking activity", detail=err
- )
- raise err
+ raise PluginError("An error happened while running a blocking activity", detail=err)
return result
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 7d80725..267cd51 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -70,6 +70,10 @@
_UMASK = os.umask(0o777)
os.umask(_UMASK)
+# Only some operating systems have os.copy_file_range and even when present
+# it might not work
+_USE_CP_FILE_RANGE = hasattr(os, "copy_file_range")
+
class UtilError(BstError):
"""Raised by utility functions when system calls fail.
@@ -357,6 +361,26 @@
return h.hexdigest()
+def _copy_file_range(src, dest):
+ global _USE_CP_FILE_RANGE # pylint: disable=global-statement
+ if not _USE_CP_FILE_RANGE:
+ return False
+ with open(src, "rb") as src_file, open(dest, "wb") as dest_file:
+ num_bytes = os.fstat(src_file.fileno()).st_size
+ while num_bytes > 0:
+ try:
+ bytes_read = os.copy_file_range(src_file.fileno(), dest_file.fileno(), num_bytes)
+ if not bytes_read:
+ return True
+ num_bytes -= bytes_read
+ except OSError as error:
+ if error.errno in (errno.ENOSYS, errno.EXDEV):
+ _USE_CP_FILE_RANGE = False
+ return False
+ raise error from None
+ return True
+
+
def safe_copy(src: str, dest: str, *, copystat: bool = True, result: Optional[FileListResult] = None) -> None:
"""Copy a file while optionally preserving attributes
@@ -381,7 +405,9 @@
raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
try:
- shutil.copyfile(src, dest)
+ ret = _copy_file_range(src, dest)
+ if not ret:
+ shutil.copyfile(src, dest)
except (OSError, shutil.Error) as e:
raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e)) from e
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 36c6990..753384b 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -11,13 +11,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import multiprocessing
import os
import shutil
import signal
from collections import namedtuple
from contextlib import ExitStack, contextmanager
from concurrent import futures
-from multiprocessing import Process, Queue
from urllib.parse import urlparse
import grpc
@@ -36,9 +36,11 @@
class BaseArtifactShare:
def __init__(self):
- q = Queue()
+ multiprocessing_context = multiprocessing.get_context("forkserver")
- self.process = Process(target=self.run, args=(q,))
+ q = multiprocessing_context.Queue()
+
+ self.process = multiprocessing_context.Process(target=self.run, args=(q,))
self.process.start()
# Retrieve port from server subprocess
@@ -74,14 +76,6 @@
server.stop(0)
- # Save collected coverage data
- try:
- from pytest_cov.embed import cleanup
- except ImportError:
- pass
- else:
- cleanup()
-
# _create_server()
#
# Create the server that will be run in the process
@@ -138,13 +132,14 @@
self.repodir = os.path.join(self.directory, "repo")
os.makedirs(self.repodir)
- self.cas = CASCache(self.repodir, casd=False)
-
self.quota = quota
self.index_only = index_only
super().__init__()
+ # Set after subprocess creation as it's not picklable
+ self.cas = CASCache(self.repodir, casd=False)
+
def _create_server(self):
return create_server(
self.repodir,
diff --git a/tests/testutils/repo/git.py b/tests/testutils/repo/git.py
index 7882be1..050d10e 100644
--- a/tests/testutils/repo/git.py
+++ b/tests/testutils/repo/git.py
@@ -78,7 +78,7 @@
if url is not None:
submodule["url"] = url
self.submodules[subdir] = submodule
- self._run_git("submodule", "add", url, subdir)
+ self._run_git("-c", "protocol.file.allow=always", "submodule", "add", url, subdir)
self._run_git("commit", "-m", "Added the submodule")
return self.latest_commit()
diff --git a/tox.ini b/tox.ini
index 4b2f54c..c6998a9 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,7 +16,7 @@
# Tox global configuration
#
[tox]
-envlist = py37,py{38,39,310,311}-nocover
+envlist = py{37,38,39,310,311}
skip_missing_interpreters = true
isolated_build = true