diff --git a/mypy.ini b/mypy.ini index 81a986a..ad6a2ae 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,27 +1,42 @@ [mypy] namespace_packages = True warn_unused_ignores = True - # 3rd party libraries without stubs (yet) [mypy-celery.*] ignore_missing_imports = True [mypy-pkg_resources.*] ignore_missing_imports = True [mypy-psutil.*] ignore_missing_imports = True [mypy-psycopg2.*] ignore_missing_imports = True [mypy-pytest.*] ignore_missing_imports = True [mypy-requests_mock.*] ignore_missing_imports = True [mypy-retrying.*] ignore_missing_imports = True + +[mypy-chardet.*] +ignore_missing_imports = True + +[mypy-debian.*] +ignore_missing_imports = True + +[mypy-iso8601.*] +ignore_missing_imports = True + +[mypy-pkginfo.*] +ignore_missing_imports = True + +[mypy-swh.deposit.*] +ignore_missing_imports = True + diff --git a/swh/loader/package/__init__.py b/swh/loader/package/__init__.py index a8b4a14..4756635 100644 --- a/swh/loader/package/__init__.py +++ b/swh/loader/package/__init__.py @@ -1,19 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information try: - from swh.loader.core._version import __version__ + from swh.loader.core._version import __version__ # type: ignore except ImportError: __version__ = 'devel' DEFAULT_PARAMS = { 'headers': { 'User-Agent': 'Software Heritage Loader (%s)' % ( __version__ ) } } diff --git a/swh/loader/package/archive.py b/swh/loader/package/archive.py index 6c3557f..4d32228 100644 --- a/swh/loader/package/archive.py +++ b/swh/loader/package/archive.py @@ -1,135 +1,136 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import iso8601 import logging from os import path from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import release_name from swh.model.identifiers import normalize_timestamp logger = logging.getLogger(__name__) SWH_PERSON = { 'name': b'Software Heritage', 'fullname': b'Software Heritage', 'email': b'robot@softwareheritage.org' } REVISION_MESSAGE = b'swh-loader-package: synthetic revision message' class ArchiveLoader(PackageLoader): visit_type = 'tar' def __init__(self, url: str, artifacts: Sequence[Mapping[str, Any]], identity_artifact_keys: Optional[Sequence[str]] = None): """Loader constructor. For now, this is the lister's task output. Args: url: Origin url artifacts: List of artifact information with keys: **time**: last modification time as either isoformat date string or timestamp **url**: the artifact url to retrieve filename **artifact's filename version**: artifact's version length **length**: artifact's length identity_artifact_keys: Optional List of keys forming the "identity" of an artifact """ super().__init__(url=url) self.artifacts = artifacts # assume order is enforced in the lister if not identity_artifact_keys: # default keys for gnu identity_artifact_keys = ['time', 'url', 'length', 'version'] self.identity_artifact_keys = identity_artifact_keys def get_versions(self) -> Sequence[str]: versions = [] for archive in self.artifacts: v = archive.get('version') if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one return self.artifacts[-1]['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: for a_metadata in self.artifacts: url = a_metadata['url'] package_version = a_metadata['version'] if version == package_version: filename = a_metadata.get('filename') p_info = { 'url': url, 'filename': filename if filename else path.split(url)[-1], 'raw': a_metadata, } # FIXME: this code assumes we have only 1 artifact per # versioned package yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: identity = artifact_identity( artifact_metadata, id_keys=self.identity_artifact_keys) for rev_id, known_artifact in known_artifacts.items(): logging.debug('known_artifact: %s', known_artifact) reference_artifact = known_artifact['extrinsic']['raw'] known_identity = artifact_identity( reference_artifact, id_keys=self.identity_artifact_keys) if identity == known_identity: return rev_id + return None def build_revision(self, a_metadata: Mapping[str, Any], uncompressed_path: str) -> Dict: time = a_metadata['time'] # assume it's a timestamp if isinstance(time, str): # otherwise, assume it's a parsable date time = iso8601.parse_date(time) normalized_time = normalize_timestamp(time) return { 'type': 'tar', 'message': REVISION_MESSAGE, 'date': normalized_time, 'author': SWH_PERSON, 'committer': SWH_PERSON, 'committer_date': normalized_time, 'parents': [], 'metadata': { 'intrinsic': {}, 'extrinsic': { 'provider': self.url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } def artifact_identity(d: Mapping[str, Any], id_keys: Sequence[str]) -> Sequence[Any]: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ return [d.get(k) for k in id_keys] diff --git a/swh/loader/package/debian.py b/swh/loader/package/debian.py index cd21630..cb76800 100644 --- a/swh/loader/package/debian.py +++ b/swh/loader/package/debian.py @@ -1,395 +1,394 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import iso8601 import logging import re import subprocess from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc from os import path from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple ) from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import download, release_name logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r'(?<=\>)\s*,\s*') class DebianLoader(PackageLoader): """Load debian origins into swh archive. """ visit_type = 'debian' def __init__(self, url: str, date: str, packages: Mapping[str, Any]): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ super().__init__(url=url) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ - return self.packages.keys() - - def get_default_version(self) -> str: - """No default version for debian so no HEAD alias in snapshot. - - """ - return None + return list(self.packages.keys()) def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.packages[version] p_info = meta.copy() p_info['raw'] = meta yield release_name(version), p_info def resolve_revision_from( self, known_package_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: artifacts_to_fetch = artifact_metadata['files'] logger.debug('k_p_artifacts: %s', known_package_artifacts) logger.debug('artifacts_to_fetch: %s', artifacts_to_fetch) for rev_id, known_artifacts in known_package_artifacts.items(): logger.debug('Revision: %s', rev_id) logger.debug('Associated known_artifacts: %s', known_artifacts) known_artifacts = known_artifacts['extrinsic']['raw']['files'] rev_found = True for a_name, k_artifact in known_artifacts.items(): artifact_to_fetch = artifacts_to_fetch.get(a_name) logger.debug('artifact_to_fetch: %s', artifact_to_fetch) if artifact_to_fetch is None: # as soon as we do not see an artifact, we consider we need # to check the other revision rev_found = False if k_artifact['sha256'] != artifact_to_fetch['sha256']: # Hash is different, we consider we need to check the other # revisions rev_found = False if rev_found: logger.debug('Existing revision %s found for new artifacts.', rev_id) return rev_id - # if we pass here, we did not find any known artifacts logger.debug('No existing revision found for the new artifacts.') + return None def download_package(self, p_info: Mapping[str, Any], - tmpdir: str) -> [Tuple[str, Dict]]: + tmpdir: str) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `a_metadata` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug('all_hashes: %s', all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug('res: %s', res) return res - def uncompress(self, dl_artifacts: [Tuple[str, Dict]], dest: str) -> str: + def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], + dest: str) -> str: logger.debug('dl_artifacts: %s', dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_revision(self, a_metadata: Mapping[str, Any], uncompressed_path: str) -> Dict: dsc_url, dsc_name = dsc_information(a_metadata) + if not dsc_name: + raise ValueError( + 'dsc name for url %s should not be None' % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) i_metadata = get_package_metadata( a_metadata, dsc_path, uncompressed_path) logger.debug('i_metadata: %s', i_metadata) logger.debug('a_metadata: %s', a_metadata) msg = 'Synthetic revision for Debian source package %s version %s' % ( a_metadata['name'], a_metadata['version']) date = iso8601.parse_date(i_metadata['changelog']['date']) author = prepare_person(i_metadata['changelog']['person']) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa return { 'type': 'dsc', 'message': msg.encode('utf-8'), 'author': author, 'date': date, 'committer': author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'dsc', 'raw': i_metadata, }, 'extrinsic': { 'provider': dsc_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } def uid_to_person(uid: str) -> Mapping[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug('uid: %s', uid) ret = { 'name': '', 'email': '', 'fullname': uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret['name'] = name ret['email'] = mail else: ret['name'] = uid return ret def prepare_person(person: Mapping[str, str]) -> Mapping[str, bytes]: """Prepare person for swh serialization... Args: A person dict Returns: A person dict ready for storage """ ret = {} for key, value in person.items(): ret[key] = value.encode('utf-8') return ret def download_package( package: Mapping[str, Any], tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: package: Dict defining the set of files representing a debian package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in package['files'].items(): uri = fileinfo['uri'] logger.debug('fileinfo: %s', fileinfo) extrinsic_hashes = {'sha256': fileinfo['sha256']} logger.debug('extrinsic_hashes(%s): %s', filename, extrinsic_hashes) filepath, hashes = download(uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes) all_hashes[filename] = hashes logger.debug('all_hashes: %s', all_hashes) return all_hashes -def dsc_information(package: Mapping[str, Any]) -> Tuple[str, str]: +def dsc_information(package: Mapping[str, Any]) -> Tuple[ + Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: package: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in package['files'].items(): if filename.endswith('.dsc'): if dsc_name: raise ValueError( 'Package %s_%s references several dsc files' % (package['name'], package['version']) ) dsc_url = fileinfo['uri'] dsc_name = filename return dsc_url, dsc_name -def extract_package(dl_artifacts: List[Tuple[str, Dict]], dest: str) -> str: +def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug('dl_artifacts: %s', dl_artifacts) for _, hashes in dl_artifacts: logger.debug('hashes: %s', hashes) filename = hashes['filename'] if filename.endswith('.dsc'): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, 'extracted') logfile = path.join(dest, 'extract.log') logger.debug('extract Debian source package %s in %s' % (dsc_path, destdir), extra={ 'swh_type': 'deb_extract', 'swh_dsc': dsc_path, 'swh_destdir': destdir, }) cmd = ['dpkg-source', '--no-copy', '--no-check', '--ignore-bad-version', '-x', dsc_path, destdir] try: with open(logfile, 'w') as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, 'r').read() raise ValueError('dpkg-source exited with code %s: %s' % (e.returncode, logdata)) from None return destdir def get_package_metadata(package: Mapping[str, Any], dsc_path: str, extracted_path: str) -> Mapping[str, Any]: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: package: the package dict (with a dsc_path key) dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, 'rb') as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, 'debian/changelog') with open(changelog_path, 'rb') as changelog: try: parsed_changelog = Changelog(changelog) except UnicodeDecodeError: logger.warning('Unknown encoding for changelog %s,' ' falling back to iso' % - changelog_path.decode('utf-8'), extra={ + changelog_path, extra={ 'swh_type': 'deb_changelog_encoding', 'swh_name': package['name'], 'swh_version': str(package['version']), - 'swh_changelog': changelog_path.decode('utf-8'), + 'swh_changelog': changelog_path, }) # need to reset as Changelog scrolls to the end of the file changelog.seek(0) parsed_changelog = Changelog(changelog, encoding='iso-8859-15') package_info = { 'name': package['name'], 'version': str(package['version']), 'changelog': { 'person': uid_to_person(parsed_changelog.author), 'date': parse_date(parsed_changelog.date).isoformat(), 'history': [(block.package, str(block.version)) for block in parsed_changelog][1:], } } maintainers = [ uid_to_person(parsed_dsc['Maintainer']), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get('Uploaders', '')) ) package_info['maintainers'] = maintainers return package_info diff --git a/swh/loader/package/deposit.py b/swh/loader/package/deposit.py index 160819c..3177ad4 100644 --- a/swh/loader/package/deposit.py +++ b/swh/loader/package/deposit.py @@ -1,154 +1,154 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging -from typing import Dict, Generator, Mapping, Sequence, Tuple +from typing import Any, Dict, Generator, Mapping, Sequence, Tuple from swh.model.hashutil import hash_to_hex from swh.loader.package.loader import PackageLoader from swh.deposit.client import PrivateApiDepositClient as ApiClient logger = logging.getLogger(__name__) class DepositLoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'deposit' def __init__(self, url: str, deposit_id: str): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity """ super().__init__(url=url) # For now build back existing api urls # archive_url: Private api url to retrieve archive artifact self.archive_url = '/%s/raw/' % deposit_id # metadata_url: Private api url to retrieve the deposit metadata self.metadata_url = '/%s/meta/' % deposit_id # deposit_update_url: Private api to push pids and status update on the # deposit id self.deposit_update_url = '/%s/update/' % deposit_id self.client = ApiClient() self._metadata = None @property def metadata(self): if self._metadata is None: self._metadata = self.client.metadata_get(self.metadata_url) return self._metadata def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ['HEAD'] def get_package_info(self, version: str) -> Generator[ - Tuple[Mapping[str, str], Dict], None, None]: + Tuple[str, Mapping[str, Any]], None, None]: p_info = { 'url': self.client.base_url + self.archive_url, 'filename': 'archive.zip', 'raw': self.metadata, } yield 'HEAD', p_info def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: revision = a_metadata.pop('revision') metadata = { 'extrinsic': { 'provider': '%s/%s' % ( self.client.base_url, self.metadata_url), 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } # FIXME: the deposit no longer needs to build the revision revision['metadata'].update(metadata) revision['author'] = parse_author(revision['author']) revision['committer'] = parse_author(revision['committer']) revision['message'] = revision['message'].encode('utf-8') revision['type'] = 'tar' return revision def load(self) -> Dict: # Usual loading r = super().load() success = r['status'] != 'failed' if success: # Update archive with metadata information origin_metadata = self.metadata['origin_metadata'] logger.debug('origin_metadata: %s', origin_metadata) tools = self.storage.tool_add([origin_metadata['tool']]) logger.debug('tools: %s', tools) tool_id = tools[0]['id'] provider = origin_metadata['provider'] # FIXME: Shall we delete this info? provider_id = self.storage.metadata_provider_add( provider['provider_name'], provider['provider_type'], provider['provider_url'], metadata=None) metadata = origin_metadata['metadata'] self.storage.origin_metadata_add( self.url, self.visit_date, provider_id, tool_id, metadata) # Update deposit status try: if not success: self.client.status_update( self.deposit_update_url, status='failed') return r snapshot_id = r['snapshot_id'] branches = self.storage.snapshot_get(snapshot_id)['branches'] logger.debug('branches: %s', branches) if not branches: return r rev_id = branches[b'HEAD']['target'] revision = next(self.storage.revision_get([rev_id])) # Retrieve the revision identifier dir_id = revision['directory'] # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_update_url, status='done', revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(dir_id), origin_url=self.url) except Exception: logger.exception( 'Problem when trying to update the deposit\'s status') return {'status': 'failed'} return r def parse_author(author): """See prior fixme """ return { 'fullname': author['fullname'].encode('utf-8'), 'name': author['name'].encode('utf-8'), 'email': author['email'].encode('utf-8'), } diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 6e75cb3..4c7d462 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,389 +1,394 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple ) from swh.core.tarball import uncompress from swh.core.config import SWHConfig from swh.model.from_disk import Directory from swh.model.identifiers import ( revision_identifier, snapshot_identifier, identifier_to_bytes ) from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.loader.core.converters import content_for_storage from swh.loader.package.utils import download logger = logging.getLogger(__name__) # Not implemented yet: # - clean up disk routines from previous killed workers (when OOMkilled) # -> separation of concern would like this to be abstracted from the code # -> experience tells us it's complicated to do as such (T903, T964, T982, # etc...) # # - model: swh.model.merkle.from_disk should output swh.model.model.* objects # to avoid this layer's conversion routine call # -> Take this up within swh.model's current implementation class PackageLoader: # Origin visit type (str) set by the loader visit_type = '' def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() self.storage = get_storage(**self.config['storage']) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ if 'storage' not in self.config: raise ValueError( 'Misconfiguration, at least the storage key should be set') def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( - self, a_metadata: Dict, i_metadata: Dict) -> Dict: + self, a_metadata: Dict, uncompressed_path: str) -> Dict: """Build the revision dict from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. + Args: + a_metadata: Artifact metadata + uncompressed_path: Artifact uncompressed path on disk + Returns: SWH data dict """ return {} def get_default_version(self) -> str: - """Retrieve the latest release version + """Retrieve the latest release version if any. Returns: Latest version """ return '' def last_snapshot(self) -> Optional[Dict]: """Retrieve the last snapshot """ + snapshot = None visit = self.storage.origin_visit_get_latest( self.url, require_snapshot=True) if visit: - return snapshot_get_all_branches( + snapshot = snapshot_get_all_branches( self.storage, visit['snapshot']) + return snapshot - def known_artifacts(self, snapshot: Dict) -> [Dict]: + def known_artifacts(self, snapshot: Optional[Dict]) -> Dict: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot or 'branches' not in snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [rev['target'] for rev in snapshot['branches'].values() if rev and rev['target_type'] == 'revision'] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision['id']] = revision['metadata'] return ret def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None def download_package(self, p_info: Mapping[str, Any], - tmpdir: str) -> [Tuple[str, Dict]]: + tmpdir: str) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ a_uri = p_info['url'] filename = p_info.get('filename') return [download(a_uri, dest=tmpdir, filename=filename)] def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, 'src') for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = 'uneventful' # either: eventful, uneventful, failed status_visit = 'full' # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None try: # Prepare origin and origin_visit origin = {'url': self.url} self.storage.origin_add_one(origin) visit_id = self.storage.origin_visit_add( origin=self.url, date=self.visit_date, type=self.visit_type)['visit'] last_snapshot = self.last_snapshot() logger.debug('last snapshot: %s', last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug('known artifacts: %s', known_artifacts) # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug('default version: %s', default_version) for version in self.get_versions(): # for each logger.debug('version: %s', version) tmp_revisions[version] = [] # `p_` stands for `package_` for branch_name, p_info in self.get_package_info(version): logger.debug('package_info: %s', p_info) revision_id = self.resolve_revision_from( known_artifacts, p_info['raw']) if revision_id is None: with tempfile.TemporaryDirectory() as tmpdir: try: dl_artifacts = self.download_package( p_info, tmpdir) except Exception: logger.exception('Unable to retrieve %s', p_info) status_visit = 'partial' continue uncompressed_path = self.uncompress( dl_artifacts, dest=tmpdir) logger.debug('uncompressed_path: %s', uncompressed_path) directory = Directory.from_disk( path=uncompressed_path.encode('utf-8'), data=True) # noqa # FIXME: Try not to load the full raw content in # memory objects = directory.collect() contents = objects['content'].values() logger.debug('Number of contents: %s', len(contents)) self.storage.content_add( [content_for_storage(x) for x in contents]) status_load = 'eventful' directories = list(objects['directory'].values()) logger.debug('Number of directories: %s', len(directories)) self.storage.directory_add(directories) # FIXME: This should be release. cf. D409 revision = self.build_revision( p_info['raw'], uncompressed_path) revision.update({ 'synthetic': True, 'directory': directory.hash, }) revision['metadata'].update({ 'original_artifact': [ hashes for _, hashes in dl_artifacts ], }) revision['id'] = revision_id = identifier_to_bytes( revision_identifier(revision)) logger.debug('Revision: %s', revision) self.storage.revision_add([revision]) tmp_revisions[version].append((branch_name, revision_id)) logger.debug('tmp_revisions: %s', tmp_revisions) # Build and load the snapshot - branches = {} + branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in tmp_revisions.items(): if version == default_version and \ len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) if branch_name != 'HEAD': branches[b'HEAD'] = { 'target_type': 'alias', 'target': branch_name.encode('utf-8'), } for branch_name, target in branch_name_revisions: - branch_name = branch_name.encode('utf-8') - branches[branch_name] = { + branches[branch_name.encode('utf-8')] = { 'target_type': 'revision', 'target': target, } snapshot = { 'branches': branches } logger.debug('snapshot: %s', snapshot) snapshot['id'] = identifier_to_bytes( snapshot_identifier(snapshot)) logger.debug('snapshot: %s', snapshot) self.storage.snapshot_add([snapshot]) if hasattr(self.storage, 'flush'): self.storage.flush() except Exception: logger.exception('Fail to load %s' % self.url) status_visit = 'partial' status_load = 'failed' finally: self.storage.origin_visit_update( origin=self.url, visit_id=visit_id, status=status_visit, snapshot=snapshot and snapshot['id']) result = { 'status': status_load, - } + } # type: Dict[str, Any] if snapshot: result['snapshot_id'] = snapshot['id'] return result diff --git a/swh/loader/package/npm.py b/swh/loader/package/npm.py index 0bc3ab8..cef1293 100644 --- a/swh/loader/package/npm.py +++ b/swh/loader/package/npm.py @@ -1,295 +1,296 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os import re from codecs import BOM_UTF8 from typing import Any, Dict, Generator, Mapping, Sequence, Tuple, Optional import chardet import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info, release_name logger = logging.getLogger(__name__) _EMPTY_AUTHOR = {'fullname': b'', 'name': None, 'email': None} # https://github.com/jonschlinkert/author-regex _author_regexp = r'([^<(]+?)?[ \t]*(?:<([^>(]+?)>)?[ \t]*(?:\(([^)]+?)\)|$)' class NpmLoader(PackageLoader): visit_type = 'npm' def __init__(self, package_name, package_url, package_metadata_url): super().__init__(url=package_url) self.provider_url = package_metadata_url self._info = None self._versions = None # if package_url is None: # package_url = 'https://www.npmjs.com/package/%s' % package_name # if package_metadata_url is None: # package_metadata_url = 'https://replicate.npmjs.com/%s/' %\ # quote(package_name, safe='') @property def info(self) -> Dict: """Return the project metadata information (fetched from npm registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return sorted(list(self.info['versions'].keys())) def get_default_version(self) -> str: return self.info['dist-tags'].get('latest', '') def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.info['versions'][version] url = meta['dist']['tarball'] p_info = { 'url': url, 'filename': os.path.basename(url), 'raw': meta, } yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: shasum = artifact_metadata['dist']['shasum'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'][0] if shasum == original_artifact['checksums']['sha1']: return rev_id + return None def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: i_metadata = extract_intrinsic_metadata(uncompressed_path) # from intrinsic metadata author = extract_npm_package_author(i_metadata) # extrinsic metadata version = i_metadata['version'] date = self.info['time'][version] date = iso8601.parse_date(date) date = normalize_timestamp(int(date.timestamp())) message = version.encode('ascii') return { 'type': 'tar', 'message': message, 'author': author, 'date': date, 'committer': author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'package.json', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } def parse_npm_package_author(author_str): """ Parse npm package author string. It works with a flexible range of formats, as detailed below:: name name (url) name (url) name (url) name(url) name (url) name (url) name(url) name(url) name (url) name(url) name name (url) (url) (url) (url) (url) Args: author_str (str): input author string Returns: dict: A dict that may contain the following keys: * name * email * url """ author = {} matches = re.findall(_author_regexp, author_str.replace('<>', '').replace('()', ''), re.M) for match in matches: if match[0].strip(): author['name'] = match[0].strip() if match[1].strip(): author['email'] = match[1].strip() if match[2].strip(): author['url'] = match[2].strip() return author def extract_npm_package_author(package_json): """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json (dict): Dict holding the content of parsed ``package.json`` file Returns: dict: A dict with the following keys: * fullname * name * email """ def _author_str(author_data): if type(author_data) is dict: author_str = '' if 'name' in author_data: author_str += author_data['name'] if 'email' in author_data: author_str += ' <%s>' % author_data['email'] return author_str elif type(author_data) is list: return _author_str(author_data[0]) if len(author_data) > 0 else '' else: return author_data author_data = {} for author_key in ('author', 'authors'): if author_key in package_json: author_str = _author_str(package_json[author_key]) author_data = parse_npm_package_author(author_str) name = author_data.get('name') email = author_data.get('email') fullname = None if name and email: fullname = '%s <%s>' % (name, email) elif name: fullname = name if not fullname: return _EMPTY_AUTHOR if fullname: fullname = fullname.encode('utf-8') if name: name = name.encode('utf-8') if email: email = email.encode('utf-8') return {'fullname': fullname, 'name': name, 'email': email} def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom):] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode('utf-8') except UnicodeDecodeError: encoding = chardet.detect(json_bytes)['encoding'] if encoding: json_str = json_bytes.decode(encoding, 'replace') try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, 'package.json') if not os.path.exists(package_json_path): return {} with open(package_json_path, 'rb') as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/pypi.py b/swh/loader/package/pypi.py index dec6735..0386036 100644 --- a/swh/loader/package/pypi.py +++ b/swh/loader/package/pypi.py @@ -1,193 +1,197 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from urllib.parse import urlparse from pkginfo import UnpackedSDist import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info, release_name class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'pypi' def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return self.info['releases'].keys() def get_default_version(self) -> str: return self.info['info']['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: res = [] for meta in self.info['releases'][version]: filename = meta['filename'] p_info = { 'url': meta['url'], 'filename': filename, 'raw': meta, } res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info['filename']), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: sha256 = artifact_metadata['digests']['sha256'] for rev_id, known_artifact in known_artifacts.items(): for original_artifact in known_artifact['original_artifact']: if sha256 == original_artifact['checksums']['sha256']: return rev_id + return None def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: i_metadata = extract_intrinsic_metadata(uncompressed_path) # from intrinsic metadata name = i_metadata['version'] _author = author(i_metadata) # from extrinsic metadata message = a_metadata.get('comment_text', '') message = '%s: %s' % (name, message) if message else name date = normalize_timestamp( int(iso8601.parse_date(a_metadata['upload_time']).timestamp())) return { 'type': 'tar', 'message': message.encode('utf-8'), 'author': _author, 'date': date, 'committer': _author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'PKG-INFO', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.split('/')[-1] url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop('filename') # this gets added with the ondisk location return raw def author(data: Dict) -> Dict: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get('author') email = data.get('author_email') + fullname = None # type: Optional[str] if email: fullname = '%s <%s>' % (name, email) else: fullname = name if not fullname: return {'fullname': b'', 'name': None, 'email': None} - fullname = fullname.encode('utf-8') - if name is not None: name = name.encode('utf-8') if email is not None: email = email.encode('utf-8') - return {'fullname': fullname, 'name': name, 'email': email} + return { + 'fullname': fullname.encode('utf-8'), + 'name': name, + 'email': email + } diff --git a/swh/loader/package/tasks.py b/swh/loader/package/tasks.py index eeeb5ea..629c28a 100644 --- a/swh/loader/package/tasks.py +++ b/swh/loader/package/tasks.py @@ -1,43 +1,38 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.debian import DebianLoader from swh.loader.package.deposit import DepositLoader from swh.loader.package.npm import NpmLoader from swh.loader.package.pypi import PyPILoader from swh.loader.package.archive import ArchiveLoader @shared_task(name=__name__ + '.LoadArchive') def load_archive(url=None, artifacts=None, identity_artifact_keys=None): return ArchiveLoader(url, artifacts, identity_artifact_keys=identity_artifact_keys).load() @shared_task(name=__name__ + '.LoadDebian') def load_debian(*, url, date, packages): return DebianLoader(url, date, packages).load() @shared_task(name=__name__ + '.LoadDeposit') def load_deposit(*, url, deposit_id): return DepositLoader(url, deposit_id).load() -@shared_task(name=__name__ + '.LoadGNU') -def load_gnu(*, url, tarballs): - return GNULoader(url, tarballs).load() - - @shared_task(name=__name__ + '.LoadNpm') def load_npm(*, package_name, package_url, package_metadata_url): return NpmLoader(package_name, package_url, package_metadata_url).load() @shared_task(name=__name__ + '.LoadPyPI') def load_pypi(*, url=None): return PyPILoader(url).load() diff --git a/swh/loader/package/tests/common.py b/swh/loader/package/tests/common.py index 36cfc18..b9f1e12 100644 --- a/swh/loader/package/tests/common.py +++ b/swh/loader/package/tests/common.py @@ -1,127 +1,127 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from os import path import logging from typing import Dict, List, Tuple from swh.model.hashutil import hash_to_bytes, hash_to_hex logger = logging.getLogger(__file__) DATADIR = path.join(path.abspath(path.dirname(__file__)), 'resources') def decode_target(target): """Test helper to ease readability in test """ if not target: return target target_type = target['target_type'] if target_type == 'alias': decoded_target = target['target'].decode('utf-8') else: decoded_target = hash_to_hex(target['target']) return { 'target': decoded_target, 'target_type': target_type } def check_snapshot(expected_snapshot, storage): """Check for snapshot match. Provide the hashes as hexadecimal, the conversion is done within the method. Args: expected_snapshot (dict): full snapshot with hex ids storage (Storage): expected storage """ expected_snapshot_id = expected_snapshot['id'] expected_branches = expected_snapshot['branches'] snap = storage.snapshot_get(hash_to_bytes(expected_snapshot_id)) if snap is None: # display known snapshots instead if possible if hasattr(storage, '_snapshots'): # in-mem storage from pprint import pprint for snap_id, (_snap, _) in storage._snapshots.items(): snapd = _snap.to_dict() snapd['id'] = hash_to_hex(snapd['id']) branches = { branch.decode('utf-8'): decode_target(target) for branch, target in snapd['branches'].items() } snapd['branches'] = branches pprint(snapd) raise AssertionError('Snapshot is not found') branches = { branch.decode('utf-8'): decode_target(target) for branch, target in snap['branches'].items() } assert expected_branches == branches def check_metadata(metadata: Dict, key_path: str, raw_type: str): """Given a metadata dict, ensure the associated key_path value is of type raw_type. Args: metadata: Dict to check key_path: Path to check raw_type: Type to check the path with Raises: Assertion error in case of mismatch """ data = metadata keys = key_path.split('.') for k in keys: try: data = data[k] except (TypeError, KeyError) as e: # KeyError: because path too long # TypeError: data is not a dict raise AssertionError(e) - assert isinstance(data, raw_type) + assert isinstance(data, raw_type) # type: ignore def check_metadata_paths(metadata: Dict, paths: List[Tuple[str, str]]): """Given a metadata dict, ensure the keys are of expected types Args: metadata: Dict to check key_path: Path to check raw_type: Type to check the path with Raises: Assertion error in case of mismatch """ for key_path, raw_type in paths: check_metadata(metadata, key_path, raw_type) def get_stats(storage) -> Dict: """Adaptation utils to unify the stats counters across storage implementation. """ storage.refresh_stat_counters() stats = storage.stat_counters() keys = ['content', 'directory', 'origin', 'origin_visit', 'person', 'release', 'revision', 'skipped_content', 'snapshot'] return {k: stats.get(k) for k in keys}