diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index ef1fd76..061c8b3 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,409 +1,409 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os from typing import Dict, Generator, List, Mapping, Optional, Sequence, Tuple from swh.core.tarball import uncompress from swh.core.config import SWHConfig from swh.model.from_disk import Directory from swh.model.identifiers import ( revision_identifier, snapshot_identifier, identifier_to_bytes ) from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.loader.core.converters import content_for_storage from swh.loader.package.utils import download logger = logging.getLogger(__name__) # Not implemented yet: # - clean up disk routines from previous killed workers (when OOMkilled) # -> separation of concern would like this to be abstracted from the code # -> experience tells us it's complicated to do as such (T903, T964, T982, # etc...) # # - model: swh.model.merkle.from_disk should output swh.model.model.* objects # to avoid this layer's conversion routine call # -> Take this up within swh.model's current implementation class PackageLoader: # Origin visit type (str) set by the loader visit_type = '' def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() self.storage = get_storage(**self.config['storage']) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ if 'storage' not in self.config: raise ValueError( 'Misconfiguration, at least the storage key should be set') def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: """Given a release version of a package, retrieve the associated artifact information for such version. Args: version: Package version Returns: (artifact filename, artifact uri, raw artifact metadata) """ yield from {} def build_revision( self, a_metadata: Dict, i_metadata: Dict) -> Dict: """Build the revision dict from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Returns: SWH data dict """ return {} def get_default_release(self) -> str: """Retrieve the latest release version Returns: Latest version """ return '' def last_snapshot(self) -> Optional[Dict]: """Retrieve the last snapshot """ visit = self.storage.origin_visit_get_latest( self.url, require_snapshot=True) if visit: return snapshot_get_all_branches( self.storage, visit['snapshot']['id']) def known_artifacts(self, snapshot: Dict) -> [Dict]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot or 'branches' not in snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [rev['target'] for rev in snapshot['branches'].values() if rev and rev['target_type'] == 'revision'] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision['id']] = revision['metadata'] return ret def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None def download_package(self, artifacts_package_info: Mapping[str, str], tmpdir: str) -> Tuple[str, Dict]: """Download artifacts for a specific package. All downloads happen in the the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (uri, filename, etc...) tmpdir: Location to retrieve such artifacts Note: """ a_uri = artifacts_package_info['url'] filename = artifacts_package_info.get('filename') return download(a_uri, dest=tmpdir, filename=filename) def read_intrinsic_metadata( self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: """Read intrinsic metadata from either the a_metadata or the uncompressed path. Depending on the implementations, some extracts directly from the artifacts to ingest (pypi, npm...), some use api to access directly their intrinsic metadata (debian exposes a dsc through uri) or some have none (gnu). """ return {} def uncompress( self, a_path: str, tmpdir: str, a_metadata: Dict) -> str: """Uncompress the artfifact(s) stored at a_path to tmpdir. Optionally, this could need to use the a_metadata dict for some more information (debian). """ uncompressed_path = os.path.join(tmpdir, 'src') uncompress(a_path, dest=uncompressed_path) return uncompressed_path def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = 'uneventful' # either: eventful, uneventful, failed status_visit = 'full' # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None try: # Prepare origin and origin_visit origin = {'url': self.url} self.storage.origin_add([origin]) visit_id = self.storage.origin_visit_add( origin=self.url, date=self.visit_date, type=self.visit_type)['visit'] last_snapshot = self.last_snapshot() logger.debug('last snapshot: %s', last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug('known artifacts: %s', known_artifacts) # Retrieve the default release (the "latest" one) default_release = self.get_default_release() logger.debug('default release: %s', default_release) for version in self.get_versions(): # for each logger.debug('version: %s', version) tmp_revisions[version] = [] - # `a_` stands for `artifact_`, `p_` stands for `package_` + # `a_` stands for `artifact(s)_`, `p_` stands for `package_` for a_p_info, a_metadata in self.get_artifacts(version): logger.debug('a_p_info: %s', a_p_info) logger.debug('a_metadata: %s', a_metadata) revision_id = self.resolve_revision_from( known_artifacts, a_metadata) if revision_id is None: with tempfile.TemporaryDirectory() as tmpdir: try: # a_c_: archive_computed_ a_path, a_c_metadata = self.download_package( a_p_info, tmpdir) except Exception: logger.exception('Unable to retrieve %s', a_p_info['url']) status_visit = 'partial' continue logger.debug('archive_path: %s', a_path) logger.debug('archive_computed_metadata: %s', a_c_metadata) uncompressed_path = self.uncompress( a_path, tmpdir, a_metadata) logger.debug('uncompressed_path: %s', uncompressed_path) directory = Directory.from_disk( path=uncompressed_path.encode('utf-8'), data=True) # noqa # FIXME: Try not to load the full raw content in # memory objects = directory.collect() contents = objects['content'].values() logger.debug('Number of contents: %s', len(contents)) self.storage.content_add( map(content_for_storage, contents)) status_load = 'eventful' directories = objects['directory'].values() logger.debug('Number of directories: %s', len(directories)) self.storage.directory_add(directories) i_metadata = self.read_intrinsic_metadata( a_metadata, uncompressed_path) # FIXME: This should be release. cf. D409 revision = self.build_revision( a_metadata, i_metadata) revision.update({ 'synthetic': True, 'directory': directory.hash, }) revision['metadata'].update({ 'original_artifact': a_c_metadata, }) revision['id'] = revision_id = identifier_to_bytes( revision_identifier(revision)) logger.debug('Revision: %s', revision) self.storage.revision_add([revision]) tmp_revisions[version].append( (a_p_info['filename'], revision_id)) # Build and load the snapshot branches = {} for version, v_branches in tmp_revisions.items(): if len(v_branches) == 1: branch_name = ( version if version == 'HEAD' else 'releases/%s' % version).encode('utf-8') if version == default_release: branches[b'HEAD'] = { 'target_type': 'alias', 'target': branch_name, } branches[branch_name] = { 'target_type': 'revision', 'target': v_branches[0][1], } else: for filename, target in v_branches: branch_name = ('releases/%s/%s' % ( version, filename)).encode('utf-8') branches[branch_name] = { 'target_type': 'revision', 'target': target, } snapshot = { 'branches': branches } logger.debug('snapshot: %s', snapshot) snapshot['id'] = identifier_to_bytes( snapshot_identifier(snapshot)) logger.debug('snapshot: %s', snapshot) self.storage.snapshot_add([snapshot]) if hasattr(self.storage, 'flush'): self.storage.flush() except Exception: logger.exception('Fail to load %s' % self.url) status_visit = 'partial' status_load = 'failed' finally: self.storage.origin_visit_update( origin=self.url, visit_id=visit_id, status=status_visit, snapshot=snapshot) result = { 'status': status_load, } if snapshot: result['snapshot_id'] = snapshot['id'] return result