|  | # | 
|  | #  Copyright (C) 2016 Codethink Limited | 
|  | #  Copyright (C) 2019 Bloomberg Finance LP | 
|  | # | 
|  | #  This program is free software; you can redistribute it and/or | 
|  | #  modify it under the terms of the GNU Lesser General Public | 
|  | #  License as published by the Free Software Foundation; either | 
|  | #  version 2 of the License, or (at your option) any later version. | 
|  | # | 
|  | #  This library is distributed in the hope that it will be useful, | 
|  | #  but WITHOUT ANY WARRANTY; without even the implied warranty of | 
|  | #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU | 
|  | #  Lesser General Public License for more details. | 
|  | # | 
|  | #  You should have received a copy of the GNU Lesser General Public | 
|  | #  License along with this library. If not, see <http://www.gnu.org/licenses/>. | 
|  | # | 
|  | #  Authors: | 
|  | #        Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> | 
|  | """ | 
|  | Source - Base source class | 
|  | ========================== | 
|  |  | 
|  | .. _core_source_builtins: | 
|  |  | 
|  | Built-in functionality | 
|  | ---------------------- | 
|  |  | 
|  | The Source base class provides built in functionality that may be overridden | 
|  | by individual plugins. | 
|  |  | 
|  | * Directory | 
|  |  | 
|  | The ``directory`` variable can be set for all sources of a type in project.conf | 
|  | or per source within a element. | 
|  |  | 
|  | This sets the location within the build root that the content of the source | 
|  | will be loaded in to. If the location does not exist, it will be created. | 
|  |  | 
|  | .. _core_source_abstract_methods: | 
|  |  | 
|  | Abstract Methods | 
|  | ---------------- | 
|  | For loading and configuration purposes, Sources must implement the | 
|  | :ref:`Plugin base class abstract methods <core_plugin_abstract_methods>`. | 
|  |  | 
|  | .. attention:: | 
|  |  | 
|  | In order to ensure that all configuration data is processed at | 
|  | load time, it is important that all URLs have been processed during | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>`. | 
|  |  | 
|  | Source implementations *must* either call | 
|  | :func:`Source.translate_url() <buildstream.source.Source.translate_url>` or | 
|  | :func:`Source.mark_download_url() <buildstream.source.Source.mark_download_url>` | 
|  | for every URL that has been specified in the configuration during | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` | 
|  |  | 
|  | Sources expose the following abstract methods. Unless explicitly mentioned, | 
|  | these methods are mandatory to implement. | 
|  |  | 
|  | * :func:`Source.get_consistency() <buildstream.source.Source.get_consistency>` | 
|  |  | 
|  | Report the sources consistency state. | 
|  |  | 
|  | * :func:`Source.load_ref() <buildstream.source.Source.load_ref>` | 
|  |  | 
|  | Load the ref from a specific YAML node | 
|  |  | 
|  | * :func:`Source.get_ref() <buildstream.source.Source.get_ref>` | 
|  |  | 
|  | Fetch the source ref | 
|  |  | 
|  | * :func:`Source.set_ref() <buildstream.source.Source.set_ref>` | 
|  |  | 
|  | Set a new ref explicitly | 
|  |  | 
|  | * :func:`Source.track() <buildstream.source.Source.track>` | 
|  |  | 
|  | Automatically derive a new ref from a symbolic tracking branch | 
|  |  | 
|  | * :func:`Source.fetch() <buildstream.source.Source.fetch>` | 
|  |  | 
|  | Fetch the actual payload for the currently set ref | 
|  |  | 
|  | * :func:`Source.stage() <buildstream.source.Source.stage>` | 
|  |  | 
|  | Stage the sources for a given ref at a specified location | 
|  |  | 
|  | * :func:`Source.init_workspace() <buildstream.source.Source.init_workspace>` | 
|  |  | 
|  | Stage sources in a local directory for use as a workspace. | 
|  |  | 
|  | **Optional**: If left unimplemented, this will default to calling | 
|  | :func:`Source.stage() <buildstream.source.Source.stage>` | 
|  |  | 
|  | * :func:`Source.get_source_fetchers() <buildstream.source.Source.get_source_fetchers>` | 
|  |  | 
|  | Get the objects that are used for fetching. | 
|  |  | 
|  | **Optional**: This only needs to be implemented for sources that need to | 
|  | download from multiple URLs while fetching (e.g. a git repo and its | 
|  | submodules). For details on how to define a SourceFetcher, see | 
|  | :ref:`SourceFetcher <core_source_fetcher>`. | 
|  |  | 
|  | * :func:`Source.validate_cache() <buildstream.source.Source.validate_cache>` | 
|  |  | 
|  | Perform any validations which require the sources to be cached. | 
|  |  | 
|  | **Optional**: This is completely optional and will do nothing if left unimplemented. | 
|  |  | 
|  | Accessing previous sources | 
|  | -------------------------- | 
|  | *Since: 1.4* | 
|  |  | 
|  | In the general case, all sources are fetched and tracked independently of one | 
|  | another. In situations where a source needs to access previous source(s) in | 
|  | order to perform its own track and/or fetch, following attributes can be set to | 
|  | request access to previous sources: | 
|  |  | 
|  | * :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK` | 
|  |  | 
|  | Indicate that access to previous sources is required during track | 
|  |  | 
|  | * :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH` | 
|  |  | 
|  | Indicate that access to previous sources is required during fetch | 
|  |  | 
|  | The intended use of such plugins is to fetch external dependencies of other | 
|  | sources, typically using some kind of package manager, such that all the | 
|  | dependencies of the original source(s) are available at build time. | 
|  |  | 
|  | When implementing such a plugin, implementors should adhere to the following | 
|  | guidelines: | 
|  |  | 
|  | * Implementations must be able to store the obtained artifacts in a | 
|  | subdirectory. | 
|  |  | 
|  | * Implementations must be able to deterministically generate a unique ref, such | 
|  | that two refs are different if and only if they produce different outputs. | 
|  |  | 
|  | * Implementations must not introduce host contamination. | 
|  |  | 
|  |  | 
|  | .. _core_source_fetcher: | 
|  |  | 
|  | SourceFetcher - Object for fetching individual URLs | 
|  | =================================================== | 
|  |  | 
|  |  | 
|  | Abstract Methods | 
|  | ---------------- | 
|  | SourceFetchers expose the following abstract methods. Unless explicitly | 
|  | mentioned, these methods are mandatory to implement. | 
|  |  | 
|  | * :func:`SourceFetcher.fetch() <buildstream.source.SourceFetcher.fetch>` | 
|  |  | 
|  | Fetches the URL associated with this SourceFetcher, optionally taking an | 
|  | alias override. | 
|  |  | 
|  | Class Reference | 
|  | --------------- | 
|  | """ | 
|  |  | 
|  | import os | 
|  | from contextlib import contextmanager | 
|  | from typing import Iterable, Iterator, Optional, Tuple, TYPE_CHECKING | 
|  |  | 
|  | from . import _yaml, utils | 
|  | from .node import MappingNode | 
|  | from .plugin import Plugin | 
|  | from .types import Consistency, SourceRef, Union, List | 
|  | from ._exceptions import BstError, ImplError, PluginError, ErrorDomain | 
|  | from ._loader.metasource import MetaSource | 
|  | from ._projectrefs import ProjectRefStorage | 
|  | from ._cachekey import generate_key | 
|  | from .storage import CasBasedDirectory | 
|  | from .storage import FileBasedDirectory | 
|  | from .storage.directory import Directory, VirtualDirectoryError | 
|  |  | 
|  | if TYPE_CHECKING: | 
|  | from typing import Any, Dict, Set | 
|  |  | 
|  | # pylint: disable=cyclic-import | 
|  | from ._context import Context | 
|  | from ._project import Project | 
|  |  | 
|  | # pylint: enable=cyclic-import | 
|  |  | 
|  |  | 
|  | class SourceError(BstError): | 
|  | """This exception should be raised by :class:`.Source` implementations | 
|  | to report errors to the user. | 
|  |  | 
|  | Args: | 
|  | message: The breif error description to report to the user | 
|  | detail: A possibly multiline, more detailed error message | 
|  | reason: An optional machine readable reason string, used for test cases | 
|  | temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*) | 
|  | """ | 
|  |  | 
|  | def __init__( | 
|  | self, message: str, *, detail: Optional[str] = None, reason: Optional[str] = None, temporary: bool = False | 
|  | ): | 
|  | super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary) | 
|  |  | 
|  |  | 
|  | class SourceFetcher: | 
|  | """SourceFetcher() | 
|  |  | 
|  | This interface exists so that a source that downloads from multiple | 
|  | places (e.g. a git source with submodules) has a consistent interface for | 
|  | fetching and substituting aliases. | 
|  |  | 
|  | *Since: 1.2* | 
|  |  | 
|  | .. attention:: | 
|  |  | 
|  | When implementing a SourceFetcher, remember to call | 
|  | :func:`Source.mark_download_url() <buildstream.source.Source.mark_download_url>` | 
|  | for every URL found in the configuration data at | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` time. | 
|  | """ | 
|  |  | 
|  | def __init__(self): | 
|  | self.__alias = None | 
|  |  | 
|  | ############################################################# | 
|  | #                      Abstract Methods                     # | 
|  | ############################################################# | 
|  | def fetch(self, alias_override: Optional[str] = None, **kwargs) -> None: | 
|  | """Fetch remote sources and mirror them locally, ensuring at least | 
|  | that the specific reference is cached locally. | 
|  |  | 
|  | Args: | 
|  | alias_override: The alias to use instead of the default one | 
|  | defined by the :ref:`aliases <project_source_aliases>` field | 
|  | in the project's config. | 
|  |  | 
|  | Raises: | 
|  | :class:`.SourceError` | 
|  |  | 
|  | Implementors should raise :class:`.SourceError` if the there is some | 
|  | network error or if the source reference could not be matched. | 
|  | """ | 
|  | raise ImplError("SourceFetcher '{}' does not implement fetch()".format(type(self))) | 
|  |  | 
|  | ############################################################# | 
|  | #                       Public Methods                      # | 
|  | ############################################################# | 
|  | def mark_download_url(self, url: str) -> None: | 
|  | """Identifies the URL that this SourceFetcher uses to download | 
|  |  | 
|  | This must be called during the fetcher's initialization | 
|  |  | 
|  | Args: | 
|  | url: The url used to download. | 
|  | """ | 
|  | self.__alias = _extract_alias(url) | 
|  |  | 
|  | ############################################################# | 
|  | #            Private Methods used in BuildStream            # | 
|  | ############################################################# | 
|  |  | 
|  | # Returns the alias used by this fetcher | 
|  | def _get_alias(self): | 
|  | return self.__alias | 
|  |  | 
|  |  | 
|  | class Source(Plugin): | 
|  | """Source() | 
|  |  | 
|  | Base Source class. | 
|  |  | 
|  | All Sources derive from this class, this interface defines how | 
|  | the core will be interacting with Sources. | 
|  | """ | 
|  |  | 
|  | # The defaults from the project | 
|  | __defaults = None  # type: Optional[Dict[str, Any]] | 
|  |  | 
|  | BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False | 
|  | """Whether access to previous sources is required during track | 
|  |  | 
|  | When set to True: | 
|  | * all sources listed before this source in the given element will be | 
|  | fetched before this source is tracked | 
|  | * Source.track() will be called with an additional keyword argument | 
|  | `previous_sources_dir` where previous sources will be staged | 
|  | * this source can not be the first source for an element | 
|  |  | 
|  | *Since: 1.4* | 
|  | """ | 
|  |  | 
|  | BST_REQUIRES_PREVIOUS_SOURCES_FETCH = False | 
|  | """Whether access to previous sources is required during fetch | 
|  |  | 
|  | When set to True: | 
|  | * all sources listed before this source in the given element will be | 
|  | fetched before this source is fetched | 
|  | * Source.fetch() will be called with an additional keyword argument | 
|  | `previous_sources_dir` where previous sources will be staged | 
|  | * this source can not be the first source for an element | 
|  |  | 
|  | *Since: 1.4* | 
|  | """ | 
|  |  | 
|  | BST_REQUIRES_PREVIOUS_SOURCES_STAGE = False | 
|  | """Whether access to previous sources is required during cache | 
|  |  | 
|  | When set to True: | 
|  | * All sources listed before current source in the given element will be | 
|  | staged with the source when it's cached. | 
|  | * This source can not be the first source for an element. | 
|  |  | 
|  | *Since: 1.4* | 
|  | """ | 
|  |  | 
|  | BST_STAGE_VIRTUAL_DIRECTORY = False | 
|  | """Whether we can stage this source directly to a virtual directory | 
|  |  | 
|  | When set to true, virtual directories can be passed to the source to stage | 
|  | to. | 
|  |  | 
|  | *Since: 1.4* | 
|  | """ | 
|  |  | 
|  | BST_KEY_REQUIRES_STAGE = False | 
|  | """Whether the source will require staging in order to efficiently generate | 
|  | a unique key. | 
|  |  | 
|  | *Since: 1.91.2* | 
|  | """ | 
|  |  | 
|  | def __init__( | 
|  | self, | 
|  | context: "Context", | 
|  | project: "Project", | 
|  | meta: MetaSource, | 
|  | *, | 
|  | alias_override: Optional[Tuple[str, str]] = None, | 
|  | unique_id: Optional[int] = None | 
|  | ): | 
|  | provenance = meta.config.get_provenance() | 
|  | # Set element_name member before parent init, as needed for debug messaging | 
|  | self.__element_name = meta.element_name  # The name of the element owning this source | 
|  | super().__init__( | 
|  | "{}-{}".format(meta.element_name, meta.element_index), | 
|  | context, | 
|  | project, | 
|  | provenance, | 
|  | "source", | 
|  | unique_id=unique_id, | 
|  | ) | 
|  |  | 
|  | self.__element_index = meta.element_index  # The index of the source in the owning element's source list | 
|  | self.__element_kind = meta.element_kind  # The kind of the element owning this source | 
|  | self.__directory = meta.directory  # Staging relative directory | 
|  | self.__consistency = Consistency.INCONSISTENT  # Cached consistency state | 
|  | self.__meta_kind = meta.kind  # The kind of this source, required for unpickling | 
|  |  | 
|  | self.__key = None  # Cache key for source | 
|  |  | 
|  | # The alias_override is only set on a re-instantiated Source | 
|  | self.__alias_override = alias_override  # Tuple of alias and its override to use instead | 
|  | self.__expected_alias = None  # The primary alias | 
|  | # Set of marked download URLs | 
|  | self.__marked_urls = set()  # type: Set[str] | 
|  |  | 
|  | # Collect the composited element configuration and | 
|  | # ask the element to configure itself. | 
|  | self.__init_defaults(project, meta) | 
|  | self.__config = self.__extract_config(meta) | 
|  | self.__first_pass = meta.first_pass | 
|  |  | 
|  | # cached values for commonly access values on the source | 
|  | self.__mirror_directory = None  # type: Optional[str] | 
|  |  | 
|  | self._configure(self.__config) | 
|  | self.__digest = None | 
|  |  | 
|  | COMMON_CONFIG_KEYS = ["kind", "directory"] | 
|  | """Common source config keys | 
|  |  | 
|  | Source config keys that must not be accessed in configure(), and | 
|  | should be checked for using node.validate_keys(). | 
|  | """ | 
|  |  | 
|  | ############################################################# | 
|  | #                      Abstract Methods                     # | 
|  | ############################################################# | 
|  | def get_consistency(self) -> int: | 
|  | """Report whether the source has a resolved reference | 
|  |  | 
|  | Returns: | 
|  | (:class:`.Consistency`): The source consistency | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind())) | 
|  |  | 
|  | def load_ref(self, node: MappingNode) -> None: | 
|  | """Loads the *ref* for this Source from the specified *node*. | 
|  |  | 
|  | Args: | 
|  | node: The YAML node to load the ref from | 
|  |  | 
|  | .. note:: | 
|  |  | 
|  | The *ref* for the Source is expected to be read at | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` time, | 
|  | this will only be used for loading refs from alternative locations | 
|  | than in the `element.bst` file where the given Source object has | 
|  | been declared. | 
|  |  | 
|  | *Since: 1.2* | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind())) | 
|  |  | 
|  | def get_ref(self) -> SourceRef: | 
|  | """Fetch the internal ref, however it is represented | 
|  |  | 
|  | Returns: | 
|  | (simple object): The internal source reference, or ``None`` | 
|  |  | 
|  | .. note:: | 
|  |  | 
|  | The reference is the user provided (or track resolved) value | 
|  | the plugin uses to represent a specific input, like a commit | 
|  | in a VCS or a tarball's checksum. Usually the reference is a string, | 
|  | but the plugin may choose to represent it with a tuple or such. | 
|  |  | 
|  | Implementations *must* return a ``None`` value in the case that | 
|  | the ref was not loaded. E.g. a ``(None, None)`` tuple is not acceptable. | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement get_ref()".format(self.get_kind())) | 
|  |  | 
|  | def set_ref(self, ref: SourceRef, node: MappingNode) -> None: | 
|  | """Applies the internal ref, however it is represented | 
|  |  | 
|  | Args: | 
|  | ref (simple object): The internal source reference to set, or ``None`` | 
|  | node: The same dictionary which was previously passed | 
|  | to :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` | 
|  |  | 
|  | See :func:`Source.get_ref() <buildstream.source.Source.get_ref>` | 
|  | for a discussion on the *ref* parameter. | 
|  |  | 
|  | .. note:: | 
|  |  | 
|  | Implementors must support the special ``None`` value here to | 
|  | allow clearing any existing ref. | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind())) | 
|  |  | 
|  | def track(self, **kwargs) -> SourceRef: | 
|  | """Resolve a new ref from the plugin's track option | 
|  |  | 
|  | Args: | 
|  | previous_sources_dir (str): directory where previous sources are staged. | 
|  | Note that this keyword argument is available only when | 
|  | :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_TRACK` | 
|  | is set to True. | 
|  |  | 
|  | Returns: | 
|  | (simple object): A new internal source reference, or None | 
|  |  | 
|  | If the backend in question supports resolving references from | 
|  | a symbolic tracking branch or tag, then this should be implemented | 
|  | to perform this task on behalf of :ref:`bst source track <invoking_source_track>` | 
|  | commands. | 
|  |  | 
|  | This usually requires fetching new content from a remote origin | 
|  | to see if a new ref has appeared for your branch or tag. If the | 
|  | backend store allows one to query for a new ref from a symbolic | 
|  | tracking data without downloading then that is desirable. | 
|  |  | 
|  | See :func:`Source.get_ref() <buildstream.source.Source.get_ref>` | 
|  | for a discussion on the *ref* parameter. | 
|  | """ | 
|  | # Allow a non implementation | 
|  | return None | 
|  |  | 
|  | def fetch(self, **kwargs) -> None: | 
|  | """Fetch remote sources and mirror them locally, ensuring at least | 
|  | that the specific reference is cached locally. | 
|  |  | 
|  | Args: | 
|  | previous_sources_dir (str): directory where previous sources are staged. | 
|  | Note that this keyword argument is available only when | 
|  | :attr:`~buildstream.source.Source.BST_REQUIRES_PREVIOUS_SOURCES_FETCH` | 
|  | is set to True. | 
|  |  | 
|  | Raises: | 
|  | :class:`.SourceError` | 
|  |  | 
|  | Implementors should raise :class:`.SourceError` if the there is some | 
|  | network error or if the source reference could not be matched. | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind())) | 
|  |  | 
|  | def stage(self, directory: Union[str, Directory]) -> None: | 
|  | """Stage the sources to a directory | 
|  |  | 
|  | Args: | 
|  | directory: Path to stage the source | 
|  |  | 
|  | Raises: | 
|  | :class:`.SourceError` | 
|  |  | 
|  | Implementors should assume that *directory* already exists | 
|  | and stage already cached sources to the passed directory. | 
|  |  | 
|  | Implementors should raise :class:`.SourceError` when encountering | 
|  | some system error. | 
|  | """ | 
|  | raise ImplError("Source plugin '{}' does not implement stage()".format(self.get_kind())) | 
|  |  | 
|  | def init_workspace(self, directory: str) -> None: | 
|  | """Initialises a new workspace | 
|  |  | 
|  | Args: | 
|  | directory: Path of the workspace to init | 
|  |  | 
|  | Raises: | 
|  | :class:`.SourceError` | 
|  |  | 
|  | Default implementation is to call | 
|  | :func:`Source.stage() <buildstream.source.Source.stage>`. | 
|  |  | 
|  | Implementors overriding this method should assume that *directory* | 
|  | already exists. | 
|  |  | 
|  | Implementors should raise :class:`.SourceError` when encountering | 
|  | some system error. | 
|  | """ | 
|  | self.stage(directory) | 
|  |  | 
|  | def get_source_fetchers(self) -> Iterable[SourceFetcher]: | 
|  | """Get the objects that are used for fetching | 
|  |  | 
|  | If this source doesn't download from multiple URLs, | 
|  | returning None and falling back on the default behaviour | 
|  | is recommended. | 
|  |  | 
|  | Returns: | 
|  | The Source's SourceFetchers, if any. | 
|  |  | 
|  | .. note:: | 
|  |  | 
|  | Implementors can implement this as a generator. | 
|  |  | 
|  | The :func:`SourceFetcher.fetch() <buildstream.source.SourceFetcher.fetch>` | 
|  | method will be called on the returned fetchers one by one, | 
|  | before consuming the next fetcher in the list. | 
|  |  | 
|  | *Since: 1.2* | 
|  | """ | 
|  | return [] | 
|  |  | 
|  | def validate_cache(self) -> None: | 
|  | """Implement any validations once we know the sources are cached | 
|  |  | 
|  | This is guaranteed to be called only once for a given session | 
|  | once the sources are known to be | 
|  | :attr:`Consistency.CACHED <buildstream.types.Consistency.CACHED>`, | 
|  | if source tracking is enabled in the session for this source, | 
|  | then this will only be called if the sources become cached after | 
|  | tracking completes. | 
|  |  | 
|  | *Since: 1.4* | 
|  | """ | 
|  |  | 
|  | ############################################################# | 
|  | #                       Public Methods                      # | 
|  | ############################################################# | 
|  | def get_mirror_directory(self) -> str: | 
|  | """Fetches the directory where this source should store things | 
|  |  | 
|  | Returns: | 
|  | The directory belonging to this source | 
|  | """ | 
|  | if self.__mirror_directory is None: | 
|  | # Create the directory if it doesnt exist | 
|  | context = self._get_context() | 
|  | directory = os.path.join(context.sourcedir, self.get_kind()) | 
|  | os.makedirs(directory, exist_ok=True) | 
|  | self.__mirror_directory = directory | 
|  |  | 
|  | return self.__mirror_directory | 
|  |  | 
|  | def translate_url(self, url: str, *, alias_override: Optional[str] = None, primary: bool = True) -> str: | 
|  | """Translates the given url which may be specified with an alias | 
|  | into a fully qualified url. | 
|  |  | 
|  | Args: | 
|  | url: A URL, which may be using an alias | 
|  | alias_override: Optionally, an URI to override the alias with. (*Since: 1.2*) | 
|  | primary: Whether this is the primary URL for the source. (*Since: 1.2*) | 
|  |  | 
|  | Returns: | 
|  | The fully qualified URL, with aliases resolved | 
|  | .. note:: | 
|  |  | 
|  | This must be called for every URL in the configuration during | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` if | 
|  | :func:`Source.mark_download_url() <buildstream.source.Source.mark_download_url>` | 
|  | is not called. | 
|  | """ | 
|  | # Ensure that the download URL is also marked | 
|  | self.mark_download_url(url, primary=primary) | 
|  |  | 
|  | # Alias overriding can happen explicitly (by command-line) or | 
|  | # implicitly (the Source being constructed with an __alias_override). | 
|  | if alias_override or self.__alias_override: | 
|  | url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1) | 
|  | if url_alias: | 
|  | if alias_override: | 
|  | url = alias_override + url_body | 
|  | else: | 
|  | # Implicit alias overrides may only be done for one | 
|  | # specific alias, so that sources that fetch from multiple | 
|  | # URLs and use different aliases default to only overriding | 
|  | # one alias, rather than getting confused. | 
|  | override_alias = self.__alias_override[0]  # type: ignore | 
|  | override_url = self.__alias_override[1]  # type: ignore | 
|  | if url_alias == override_alias: | 
|  | url = override_url + url_body | 
|  | return url | 
|  | else: | 
|  | project = self._get_project() | 
|  | return project.translate_url(url, first_pass=self.__first_pass) | 
|  |  | 
|  | def mark_download_url(self, url: str, *, primary: bool = True) -> None: | 
|  | """Identifies the URL that this Source uses to download | 
|  |  | 
|  | Args: | 
|  | url (str): The URL used to download | 
|  | primary (bool): Whether this is the primary URL for the source | 
|  |  | 
|  | .. note:: | 
|  |  | 
|  | This must be called for every URL in the configuration during | 
|  | :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` if | 
|  | :func:`Source.translate_url() <buildstream.source.Source.translate_url>` | 
|  | is not called. | 
|  |  | 
|  | *Since: 1.2* | 
|  | """ | 
|  | # Only mark the Source level aliases on the main instance, not in | 
|  | # a reinstantiated instance in mirroring. | 
|  | if not self.__alias_override: | 
|  | if primary: | 
|  | expected_alias = _extract_alias(url) | 
|  |  | 
|  | assert ( | 
|  | self.__expected_alias is None or self.__expected_alias == expected_alias | 
|  | ), "Primary URL marked twice with different URLs" | 
|  |  | 
|  | self.__expected_alias = expected_alias | 
|  |  | 
|  | # Enforce proper behaviour of plugins by ensuring that all | 
|  | # aliased URLs have been marked at Plugin.configure() time. | 
|  | # | 
|  | if self._get_configuring(): | 
|  | # Record marked urls while configuring | 
|  | # | 
|  | self.__marked_urls.add(url) | 
|  | else: | 
|  | # If an unknown aliased URL is seen after configuring, | 
|  | # this is an error. | 
|  | # | 
|  | # It is still possible that a URL that was not mentioned | 
|  | # in the element configuration can be marked, this is | 
|  | # the case for git submodules which might be automatically | 
|  | # discovered. | 
|  | # | 
|  | assert url in self.__marked_urls or not _extract_alias( | 
|  | url | 
|  | ), "URL was not seen at configure time: {}".format(url) | 
|  |  | 
|  | def get_project_directory(self) -> str: | 
|  | """Fetch the project base directory | 
|  |  | 
|  | This is useful for sources which need to load resources | 
|  | stored somewhere inside the project. | 
|  |  | 
|  | Returns: | 
|  | The project base directory | 
|  | """ | 
|  | project = self._get_project() | 
|  | return project.directory | 
|  |  | 
|  | @contextmanager | 
|  | def tempdir(self) -> Iterator[str]: | 
|  | """Context manager for working in a temporary directory | 
|  |  | 
|  | Yields: | 
|  | A path to a temporary directory | 
|  |  | 
|  | This should be used by source plugins directly instead of the tempfile | 
|  | module. This one will automatically cleanup in case of termination by | 
|  | catching the signal before os._exit(). It will also use the 'mirror | 
|  | directory' as expected for a source. | 
|  | """ | 
|  | mirrordir = self.get_mirror_directory() | 
|  | with utils._tempdir(dir=mirrordir) as tempdir: | 
|  | yield tempdir | 
|  |  | 
|  | ############################################################# | 
|  | #       Private Abstract Methods used in BuildStream        # | 
|  | ############################################################# | 
|  |  | 
|  | # Returns the local path to the source | 
|  | # | 
|  | # If the source is locally available, this method returns the absolute | 
|  | # path. Otherwise, the return value is None. | 
|  | # | 
|  | # This is an optimization for local sources and optional to implement. | 
|  | # | 
|  | # Returns: | 
|  | #    (str): The local absolute path, or None | 
|  | # | 
|  | def _get_local_path(self): | 
|  | return None | 
|  |  | 
|  | ############################################################# | 
|  | #            Private Methods used in BuildStream            # | 
|  | ############################################################# | 
|  | # Stage files at the localpath into the cascache | 
|  | # | 
|  | # Returns: | 
|  | #   the hash of the cas directory | 
|  | def _stage_into_cas(self) -> str: | 
|  | # FIXME: this should not be called for sources with digests already set | 
|  | # since they will already have been staged into the cache. However, | 
|  | # _get_unique_key is sometimes called outside of _generate_key | 
|  | if self.__digest is None: | 
|  | cas_dir = CasBasedDirectory(self._get_context().get_cascache()) | 
|  | self.stage(cas_dir) | 
|  | digest = cas_dir._get_digest() | 
|  | self.__digest = digest | 
|  | else: | 
|  | # XXX: an assignment to please mypy | 
|  | digest = self.__digest | 
|  | return digest.hash | 
|  |  | 
|  | # Wrapper around preflight() method | 
|  | # | 
|  | def _preflight(self): | 
|  | try: | 
|  | self.preflight() | 
|  | except BstError as e: | 
|  | # Prepend provenance to the error | 
|  | raise SourceError("{}: {}".format(self, e), reason=e.reason) from e | 
|  |  | 
|  | # Update cached consistency for a source | 
|  | # | 
|  | # This must be called whenever the state of a source may have changed. | 
|  | # | 
|  | def _update_state(self): | 
|  |  | 
|  | if self.__consistency < Consistency.CACHED: | 
|  |  | 
|  | # Source consistency interrogations are silent. | 
|  | context = self._get_context() | 
|  | with context.messenger.silence(): | 
|  | try: | 
|  | self.__consistency = self.get_consistency()  # pylint: disable=assignment-from-no-return | 
|  | except SourceError: | 
|  | # SourceErrors should be preserved so that the | 
|  | # plugin can communicate real error cases. | 
|  | raise | 
|  | except Exception as err:  # pylint: disable=broad-except | 
|  | # Generic errors point to bugs in the plugin, so | 
|  | # we need to catch them and make sure they do not | 
|  | # cause stacktraces | 
|  | raise PluginError( | 
|  | "Source plugin '{}' failed to compute source consistency: {}".format(self.get_kind(), err), | 
|  | reason="source-bug", | 
|  | ) | 
|  |  | 
|  | # Give the Source an opportunity to validate the cached | 
|  | # sources as soon as the Source becomes Consistency.CACHED. | 
|  | if self.__consistency == Consistency.CACHED: | 
|  | self.validate_cache() | 
|  |  | 
|  | # Return cached consistency | 
|  | # | 
|  | def _get_consistency(self): | 
|  | return self.__consistency | 
|  |  | 
|  | # Wrapper function around plugin provided fetch method | 
|  | # | 
|  | # Args: | 
|  | #   previous_sources (list): List of Sources listed prior to this source | 
|  | #   fetch_original (bool): whether to fetch full source, or use local CAS | 
|  | # | 
|  | def _fetch(self, previous_sources): | 
|  |  | 
|  | if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH: | 
|  | self.__ensure_previous_sources(previous_sources) | 
|  | with self.__stage_previous_sources(previous_sources) as staging_directory: | 
|  | self.__do_fetch(previous_sources_dir=self.__ensure_directory(staging_directory)) | 
|  | else: | 
|  | self.__do_fetch() | 
|  |  | 
|  | # Wrapper for stage() api which gives the source | 
|  | # plugin a fully constructed path considering the | 
|  | # 'directory' option | 
|  | # | 
|  | def _stage(self, directory): | 
|  | directory = self.__ensure_directory(directory) | 
|  |  | 
|  | if self.BST_KEY_REQUIRES_STAGE: | 
|  | # _get_unique_key should be called before _stage | 
|  | assert self.__digest is not None | 
|  | cas_dir = CasBasedDirectory(self._get_context().get_cascache(), digest=self.__digest) | 
|  | directory.import_files(cas_dir) | 
|  | else: | 
|  | self.stage(directory) | 
|  |  | 
|  | # Wrapper for init_workspace() | 
|  | def _init_workspace(self, directory): | 
|  | if self.BST_STAGE_VIRTUAL_DIRECTORY: | 
|  | directory = FileBasedDirectory(external_directory=directory) | 
|  |  | 
|  | directory = self.__ensure_directory(directory) | 
|  |  | 
|  | self.init_workspace(directory) | 
|  |  | 
|  | # _get_unique_key(): | 
|  | # | 
|  | # Wrapper for get_unique_key() api | 
|  | # | 
|  | def _get_unique_key(self): | 
|  | key = {} | 
|  | key["directory"] = self.__directory | 
|  | if self.BST_KEY_REQUIRES_STAGE: | 
|  | key["unique"] = self._stage_into_cas() | 
|  | else: | 
|  | key["unique"] = self.get_unique_key()  # pylint: disable=assignment-from-no-return | 
|  | return key | 
|  |  | 
|  | # _project_refs(): | 
|  | # | 
|  | # Gets the appropriate ProjectRefs object for this source, | 
|  | # which depends on whether the owning element is a junction | 
|  | # | 
|  | # Args: | 
|  | #    project (Project): The project to check | 
|  | # | 
|  | def _project_refs(self, project): | 
|  | element_kind = self.__element_kind | 
|  | if element_kind == "junction": | 
|  | return project.junction_refs | 
|  | return project.refs | 
|  |  | 
|  | # _load_ref(): | 
|  | # | 
|  | # Loads the ref for the said source. | 
|  | # | 
|  | # Raises: | 
|  | #    (SourceError): If the source does not implement load_ref() | 
|  | # | 
|  | # Returns: | 
|  | #    (ref): A redundant ref specified inline for a project.refs using project | 
|  | # | 
|  | # This is partly a wrapper around `Source.load_ref()`, it will decide | 
|  | # where to load the ref from depending on which project the source belongs | 
|  | # to and whether that project uses a project.refs file. | 
|  | # | 
|  | # Note the return value is used to construct a summarized warning in the | 
|  | # case that the toplevel project uses project.refs and also lists refs | 
|  | # which will be ignored. | 
|  | # | 
|  | def _load_ref(self): | 
|  | context = self._get_context() | 
|  | project = self._get_project() | 
|  | toplevel = context.get_toplevel_project() | 
|  | redundant_ref = None | 
|  |  | 
|  | element_name = self.__element_name | 
|  | element_idx = self.__element_index | 
|  |  | 
|  | def do_load_ref(node): | 
|  | try: | 
|  | self.load_ref(ref_node) | 
|  | except ImplError as e: | 
|  | raise SourceError( | 
|  | "{}: Storing refs in project.refs is not supported by '{}' sources".format(self, self.get_kind()), | 
|  | reason="unsupported-load-ref", | 
|  | ) from e | 
|  |  | 
|  | # If the main project overrides the ref, use the override | 
|  | if project is not toplevel and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS: | 
|  | refs = self._project_refs(toplevel) | 
|  | ref_node = refs.lookup_ref(project.name, element_name, element_idx) | 
|  | if ref_node is not None: | 
|  | do_load_ref(ref_node) | 
|  |  | 
|  | # If the project itself uses project.refs, clear the ref which | 
|  | # was already loaded via Source.configure(), as this would | 
|  | # violate the rule of refs being either in project.refs or in | 
|  | # the elements themselves. | 
|  | # | 
|  | elif project.ref_storage == ProjectRefStorage.PROJECT_REFS: | 
|  |  | 
|  | # First warn if there is a ref already loaded, and reset it | 
|  | redundant_ref = self.get_ref()  # pylint: disable=assignment-from-no-return | 
|  | if redundant_ref is not None: | 
|  | self.set_ref(None, {}) | 
|  |  | 
|  | # Try to load the ref | 
|  | refs = self._project_refs(project) | 
|  | ref_node = refs.lookup_ref(project.name, element_name, element_idx) | 
|  | if ref_node is not None: | 
|  | do_load_ref(ref_node) | 
|  |  | 
|  | return redundant_ref | 
|  |  | 
|  | # _set_ref() | 
|  | # | 
|  | # Persists the ref for this source. This will decide where to save the | 
|  | # ref, or refuse to persist it, depending on active ref-storage project | 
|  | # settings. | 
|  | # | 
|  | # Args: | 
|  | #    new_ref (smth): The new reference to save | 
|  | #    save (bool): Whether to write the new reference to file or not | 
|  | # | 
|  | # Returns: | 
|  | #    (bool): Whether the ref has changed | 
|  | # | 
|  | # Raises: | 
|  | #    (SourceError): In the case we encounter errors saving a file to disk | 
|  | # | 
|  | def _set_ref(self, new_ref, *, save): | 
|  |  | 
|  | context = self._get_context() | 
|  | project = self._get_project() | 
|  | toplevel = context.get_toplevel_project() | 
|  | toplevel_refs = self._project_refs(toplevel) | 
|  | provenance = self._get_provenance() | 
|  |  | 
|  | element_name = self.__element_name | 
|  | element_idx = self.__element_index | 
|  |  | 
|  | # | 
|  | # Step 1 - Obtain the node | 
|  | # | 
|  | node = {} | 
|  | if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS: | 
|  | node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True) | 
|  |  | 
|  | if project is toplevel and not node: | 
|  | node = provenance._node | 
|  |  | 
|  | # Ensure the node is not from a junction | 
|  | if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance._project is not toplevel: | 
|  | if provenance._project is project: | 
|  | self.warn("{}: Not persisting new reference in junctioned project".format(self)) | 
|  | elif provenance._project is None: | 
|  | assert provenance._filename == "" | 
|  | assert provenance._shortname == "" | 
|  | raise SourceError("{}: Error saving source reference to synthetic node.".format(self)) | 
|  | else: | 
|  | raise SourceError( | 
|  | "{}: Cannot track source in a fragment from a junction".format(provenance._shortname), | 
|  | reason="tracking-junction-fragment", | 
|  | ) | 
|  |  | 
|  | # | 
|  | # Step 2 - Set the ref in memory, and determine changed state | 
|  | # | 
|  | # TODO: we are working on dictionaries here, would be nicer to just work on the nodes themselves | 
|  | clean = node.strip_node_info() | 
|  | to_modify = node.strip_node_info() | 
|  |  | 
|  | current_ref = self.get_ref()  # pylint: disable=assignment-from-no-return | 
|  |  | 
|  | # Set the ref regardless of whether it changed, the | 
|  | # TrackQueue() will want to update a specific node with | 
|  | # the ref, regardless of whether the original has changed. | 
|  | self.set_ref(new_ref, to_modify) | 
|  |  | 
|  | if current_ref == new_ref or not save: | 
|  | # Note: We do not look for and propagate changes at this point | 
|  | # which might result in desync depending if something changes about | 
|  | # tracking in the future.  For now, this is quite safe. | 
|  | return False | 
|  |  | 
|  | actions = {} | 
|  | for k, v in clean.items(): | 
|  | if k not in to_modify: | 
|  | actions[k] = "del" | 
|  | else: | 
|  | if v != to_modify[k]: | 
|  | actions[k] = "mod" | 
|  | for k in to_modify.keys(): | 
|  | if k not in clean: | 
|  | actions[k] = "add" | 
|  |  | 
|  | def walk_container(container, path): | 
|  | # For each step along path, synthesise if we need to. | 
|  | # If we're synthesising missing list entries, we know we're | 
|  | # doing this for project.refs so synthesise empty dicts for the | 
|  | # intervening entries too | 
|  | lpath = path.copy() | 
|  | lpath.append("")  # We know the last step will be a string key | 
|  | for step, next_step in zip(lpath, lpath[1:]): | 
|  | if type(step) is str:  # pylint: disable=unidiomatic-typecheck | 
|  | # handle dict container | 
|  | if step not in container: | 
|  | if type(next_step) is str:  # pylint: disable=unidiomatic-typecheck | 
|  | container[step] = {} | 
|  | else: | 
|  | container[step] = [] | 
|  | container = container[step] | 
|  | else: | 
|  | # handle list container | 
|  | if len(container) <= step: | 
|  | while len(container) <= step: | 
|  | container.append({}) | 
|  | container = container[step] | 
|  | return container | 
|  |  | 
|  | def process_value(action, container, path, key, new_value): | 
|  | container = walk_container(container, path) | 
|  | if action == "del": | 
|  | del container[key] | 
|  | elif action == "mod": | 
|  | container[key] = new_value | 
|  | elif action == "add": | 
|  | container[key] = new_value | 
|  | else: | 
|  | assert False, "BUG: Unknown action: {}".format(action) | 
|  |  | 
|  | roundtrip_cache = {} | 
|  | for key, action in actions.items(): | 
|  | # Obtain the top level node and its file | 
|  | if action == "add": | 
|  | provenance = node.get_provenance() | 
|  | else: | 
|  | provenance = node.get_node(key).get_provenance() | 
|  |  | 
|  | toplevel_node = provenance._toplevel | 
|  |  | 
|  | # Get the path to whatever changed | 
|  | if action == "add": | 
|  | path = toplevel_node._find(node) | 
|  | else: | 
|  | full_path = toplevel_node._find(node.get_node(key)) | 
|  | # We want the path to the node containing the key, not to the key | 
|  | path = full_path[:-1] | 
|  |  | 
|  | roundtrip_file = roundtrip_cache.get(provenance._filename) | 
|  | if not roundtrip_file: | 
|  | roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load( | 
|  | provenance._filename, allow_missing=True | 
|  | ) | 
|  |  | 
|  | # Get the value of the round trip file that we need to change | 
|  | process_value(action, roundtrip_file, path, key, to_modify.get(key)) | 
|  |  | 
|  | # | 
|  | # Step 3 - Apply the change in project data | 
|  | # | 
|  | for filename, data in roundtrip_cache.items(): | 
|  | # This is our roundtrip dump from the track | 
|  | try: | 
|  | _yaml.roundtrip_dump(data, filename) | 
|  | except OSError as e: | 
|  | raise SourceError( | 
|  | "{}: Error saving source reference to '{}': {}".format(self, filename, e), reason="save-ref-error" | 
|  | ) from e | 
|  |  | 
|  | return True | 
|  |  | 
|  | # Wrapper for track() | 
|  | # | 
|  | # Args: | 
|  | #   previous_sources (list): List of Sources listed prior to this source | 
|  | # | 
|  | def _track(self, previous_sources: List["Source"]) -> SourceRef: | 
|  | if self.BST_KEY_REQUIRES_STAGE: | 
|  | # ensure that these sources have a key after tracking | 
|  | self._get_unique_key() | 
|  | return None | 
|  |  | 
|  | if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK: | 
|  | self.__ensure_previous_sources(previous_sources) | 
|  | with self.__stage_previous_sources(previous_sources) as staging_directory: | 
|  | new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory)) | 
|  | else: | 
|  | new_ref = self.__do_track() | 
|  |  | 
|  | current_ref = self.get_ref()  # pylint: disable=assignment-from-no-return | 
|  |  | 
|  | if new_ref is None: | 
|  | # No tracking, keep current ref | 
|  | new_ref = current_ref | 
|  |  | 
|  | if current_ref != new_ref: | 
|  | self.info("Found new revision: {}".format(new_ref)) | 
|  |  | 
|  | # Save ref in local process for subsequent sources | 
|  | self._set_ref(new_ref, save=False) | 
|  |  | 
|  | return new_ref | 
|  |  | 
|  | # _is_trackable() | 
|  | # | 
|  | # Returns: | 
|  | #   (bool): Whether this source is trackable | 
|  | # | 
|  | def _is_trackable(self) -> bool: | 
|  | """Report whether this source can be tracked.""" | 
|  | # sources that require staging to generate keys cannot be tracked | 
|  | return not self.BST_KEY_REQUIRES_STAGE | 
|  |  | 
|  | # _requires_previous_sources() | 
|  | # | 
|  | # If a plugin requires access to previous sources at track or fetch time, | 
|  | # then it cannot be the first source of an elemenet. | 
|  | # | 
|  | # Returns: | 
|  | #   (bool): Whether this source requires access to previous sources | 
|  | # | 
|  | def _requires_previous_sources(self): | 
|  | return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH | 
|  |  | 
|  | # Returns the alias if it's defined in the project | 
|  | def _get_alias(self): | 
|  | alias = self.__expected_alias | 
|  | project = self._get_project() | 
|  | if project.get_alias_uri(alias, first_pass=self.__first_pass): | 
|  | # The alias must already be defined in the project's aliases | 
|  | # otherwise http://foo gets treated like it contains an alias | 
|  | return alias | 
|  | else: | 
|  | return None | 
|  |  | 
|  | def _generate_key(self, previous_sources): | 
|  | keys = [self._get_unique_key()] | 
|  |  | 
|  | if self.BST_REQUIRES_PREVIOUS_SOURCES_STAGE: | 
|  | for previous_source in previous_sources: | 
|  | keys.append(previous_source._get_unique_key()) | 
|  |  | 
|  | self.__key = generate_key(keys) | 
|  |  | 
|  | @property | 
|  | def _key(self): | 
|  | return self.__key | 
|  |  | 
|  | # Gives a ref path that points to where sources are kept in the CAS | 
|  | def _get_source_name(self): | 
|  | # @ is used to prevent conflicts with project names | 
|  | return "{}/{}".format(self.get_kind(), self._key) | 
|  |  | 
|  | def _get_brief_display_key(self): | 
|  | context = self._get_context() | 
|  | key = self._key | 
|  |  | 
|  | length = min(len(key), context.log_key_length) | 
|  | return key[:length] | 
|  |  | 
|  | @property | 
|  | def _element_name(self): | 
|  | return self.__element_name | 
|  |  | 
|  | # _get_args_for_child_job_pickling(self) | 
|  | # | 
|  | # Return data necessary to reconstruct this object in a child job process. | 
|  | # | 
|  | # Returns: | 
|  | #    (str, dict): A tuple of (meta_kind, state), where a factory can use | 
|  | #    `meta_kind` to create an instance of the same type as `self`. `state` | 
|  | #    is what we want `self.__dict__` to be restored to after instantiation | 
|  | #    in the child process. | 
|  | # | 
|  | def _get_args_for_child_job_pickling(self): | 
|  | # In case you're wondering, note that it doesn't seem to be necessary | 
|  | # to make a copy of `self.__dict__` here, because: | 
|  | # | 
|  | #   o It seems that the default implementation of `_PyObject_GetState` | 
|  | #     in `typeobject.c` currently works this way, in CPython. | 
|  | # | 
|  | #   o The code sketch of how pickling works also returns `self.__dict__`: | 
|  | #     https://docs.python.org/3/library/pickle.html#pickling-class-instances | 
|  | # | 
|  | return self.__meta_kind, self.__dict__ | 
|  |  | 
|  | ############################################################# | 
|  | #                   Local Private Methods                   # | 
|  | ############################################################# | 
|  |  | 
|  | # __clone_for_uri() | 
|  | # | 
|  | # Clone the source with an alternative URI setup for the alias | 
|  | # which this source uses. | 
|  | # | 
|  | # This is used for iteration over source mirrors. | 
|  | # | 
|  | # Args: | 
|  | #    uri (str): The alternative URI for this source's alias | 
|  | # | 
|  | # Returns: | 
|  | #    (Source): A new clone of this Source, with the specified URI | 
|  | #              as the value of the alias this Source has marked as | 
|  | #              primary with either mark_download_url() or | 
|  | #              translate_url(). | 
|  | # | 
|  | def __clone_for_uri(self, uri): | 
|  | project = self._get_project() | 
|  | context = self._get_context() | 
|  | alias = self._get_alias() | 
|  | source_kind = type(self) | 
|  |  | 
|  | # Rebuild a MetaSource from the current element | 
|  | meta = MetaSource( | 
|  | self.__element_name, | 
|  | self.__element_index, | 
|  | self.__element_kind, | 
|  | self.get_kind(), | 
|  | self.__config, | 
|  | self.__directory, | 
|  | ) | 
|  |  | 
|  | meta.first_pass = self.__first_pass | 
|  |  | 
|  | clone = source_kind(context, project, meta, alias_override=(alias, uri), unique_id=self._unique_id) | 
|  |  | 
|  | # Do the necessary post instantiation routines here | 
|  | # | 
|  | clone._preflight() | 
|  | clone._load_ref() | 
|  | clone._update_state() | 
|  |  | 
|  | return clone | 
|  |  | 
|  | # Context manager that stages sources in a cas based or temporary file | 
|  | # based directory | 
|  | @contextmanager | 
|  | def __stage_previous_sources(self, sources): | 
|  | with self.tempdir() as tempdir: | 
|  | directory = FileBasedDirectory(external_directory=tempdir) | 
|  |  | 
|  | for src in sources: | 
|  | if src.BST_STAGE_VIRTUAL_DIRECTORY: | 
|  | src._stage(directory) | 
|  | else: | 
|  | src._stage(tempdir) | 
|  |  | 
|  | if self.BST_STAGE_VIRTUAL_DIRECTORY: | 
|  | yield directory | 
|  | else: | 
|  | yield tempdir | 
|  |  | 
|  | # Tries to call fetch for every mirror, stopping once it succeeds | 
|  | def __do_fetch(self, **kwargs): | 
|  | project = self._get_project() | 
|  | context = self._get_context() | 
|  |  | 
|  | # Silence the STATUS messages which might happen as a result | 
|  | # of checking the source fetchers. | 
|  | with context.messenger.silence(): | 
|  | source_fetchers = self.get_source_fetchers() | 
|  |  | 
|  | # Use the source fetchers if they are provided | 
|  | # | 
|  | if source_fetchers: | 
|  |  | 
|  | # Use a contorted loop here, this is to allow us to | 
|  | # silence the messages which can result from consuming | 
|  | # the items of source_fetchers, if it happens to be a generator. | 
|  | # | 
|  | source_fetchers = iter(source_fetchers) | 
|  |  | 
|  | while True: | 
|  |  | 
|  | with context.messenger.silence(): | 
|  | try: | 
|  | fetcher = next(source_fetchers) | 
|  | except StopIteration: | 
|  | # as per PEP479, we are not allowed to let StopIteration | 
|  | # thrown from a context manager. | 
|  | # Catching it here and breaking instead. | 
|  | break | 
|  |  | 
|  | alias = fetcher._get_alias() | 
|  | for uri in project.get_alias_uris(alias, first_pass=self.__first_pass): | 
|  | try: | 
|  | fetcher.fetch(uri) | 
|  | # FIXME: Need to consider temporary vs. permanent failures, | 
|  | #        and how this works with retries. | 
|  | except BstError as e: | 
|  | last_error = e | 
|  | continue | 
|  |  | 
|  | # No error, we're done with this fetcher | 
|  | break | 
|  |  | 
|  | else: | 
|  | # No break occurred, raise the last detected error | 
|  | raise last_error | 
|  |  | 
|  | # Default codepath is to reinstantiate the Source | 
|  | # | 
|  | else: | 
|  | alias = self._get_alias() | 
|  | if self.__first_pass: | 
|  | mirrors = project.first_pass_config.mirrors | 
|  | else: | 
|  | mirrors = project.config.mirrors | 
|  | if not mirrors or not alias: | 
|  | self.fetch(**kwargs) | 
|  | return | 
|  |  | 
|  | for uri in project.get_alias_uris(alias, first_pass=self.__first_pass): | 
|  | new_source = self.__clone_for_uri(uri) | 
|  | try: | 
|  | new_source.fetch(**kwargs) | 
|  | # FIXME: Need to consider temporary vs. permanent failures, | 
|  | #        and how this works with retries. | 
|  | except BstError as e: | 
|  | last_error = e | 
|  | continue | 
|  |  | 
|  | # No error, we're done here | 
|  | return | 
|  |  | 
|  | # Re raise the last detected error | 
|  | raise last_error | 
|  |  | 
|  | # Tries to call track for every mirror, stopping once it succeeds | 
|  | def __do_track(self, **kwargs): | 
|  | project = self._get_project() | 
|  | alias = self._get_alias() | 
|  | if self.__first_pass: | 
|  | mirrors = project.first_pass_config.mirrors | 
|  | else: | 
|  | mirrors = project.config.mirrors | 
|  | # If there are no mirrors, or no aliases to replace, there's nothing to do here. | 
|  | if not mirrors or not alias: | 
|  | return self.track(**kwargs) | 
|  |  | 
|  | # NOTE: We are assuming here that tracking only requires substituting the | 
|  | #       first alias used | 
|  | for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)): | 
|  | new_source = self.__clone_for_uri(uri) | 
|  | try: | 
|  | ref = new_source.track(**kwargs)  # pylint: disable=assignment-from-none | 
|  | # FIXME: Need to consider temporary vs. permanent failures, | 
|  | #        and how this works with retries. | 
|  | except BstError as e: | 
|  | last_error = e | 
|  | continue | 
|  | return ref | 
|  | raise last_error | 
|  |  | 
|  | # Ensures a fully constructed path and returns it | 
|  | def __ensure_directory(self, directory): | 
|  |  | 
|  | if not isinstance(directory, Directory): | 
|  | if self.__directory is not None: | 
|  | directory = os.path.join(directory, self.__directory.lstrip(os.sep)) | 
|  |  | 
|  | try: | 
|  | os.makedirs(directory, exist_ok=True) | 
|  | except OSError as e: | 
|  | raise SourceError( | 
|  | "Failed to create staging directory: {}".format(e), reason="ensure-stage-dir-fail" | 
|  | ) from e | 
|  |  | 
|  | else: | 
|  | if self.__directory is not None: | 
|  | try: | 
|  | directory = directory.descend(*self.__directory.lstrip(os.sep).split(os.sep), create=True) | 
|  | except VirtualDirectoryError as e: | 
|  | raise SourceError( | 
|  | "Failed to descend into staging directory: {}".format(e), reason="ensure-stage-dir-fail" | 
|  | ) from e | 
|  |  | 
|  | return directory | 
|  |  | 
|  | @classmethod | 
|  | def __init_defaults(cls, project, meta): | 
|  | if cls.__defaults is None: | 
|  | if meta.first_pass: | 
|  | sources = project.first_pass_config.source_overrides | 
|  | else: | 
|  | sources = project.source_overrides | 
|  | cls.__defaults = sources.get_mapping(meta.kind, default={}) | 
|  |  | 
|  | # This will resolve the final configuration to be handed | 
|  | # off to source.configure() | 
|  | # | 
|  | @classmethod | 
|  | def __extract_config(cls, meta): | 
|  | config = cls.__defaults.get_mapping("config", default={}) | 
|  | config = config.clone() | 
|  |  | 
|  | meta.config._composite(config) | 
|  | config._assert_fully_composited() | 
|  |  | 
|  | return config | 
|  |  | 
|  | # Ensures that previous sources have been tracked and fetched. | 
|  | # | 
|  | def __ensure_previous_sources(self, previous_sources): | 
|  | for index, src in enumerate(previous_sources): | 
|  | # BuildStream should track sources in the order they appear so | 
|  | # previous sources should never be in an inconsistent state | 
|  | assert src.get_consistency() != Consistency.INCONSISTENT | 
|  |  | 
|  | if src.get_consistency() == Consistency.RESOLVED: | 
|  | src._fetch(previous_sources[0:index]) | 
|  |  | 
|  |  | 
|  | def _extract_alias(url): | 
|  | parts = url.split(utils._ALIAS_SEPARATOR, 1) | 
|  | if len(parts) > 1 and not parts[0].lower() in utils._URI_SCHEMES: | 
|  | return parts[0] | 
|  | else: | 
|  | return "" |