diff --git a/conftest.py b/conftest.py index 2006c7a..50822c3 100644 --- a/conftest.py +++ b/conftest.py @@ -1,76 +1,75 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest import yaml from typing import Any, Dict from swh.storage.tests.conftest import * # noqa from swh.scheduler.tests.conftest import * # noqa @pytest.fixture def swh_loader_config(swh_storage_postgresql) -> Dict[str, Any]: return { 'storage': { 'cls': 'pipeline', 'steps': [ - {'cls': 'validate'}, {'cls': 'retry'}, {'cls': 'filter'}, {'cls': 'buffer'}, { 'cls': 'local', 'args': { 'db': swh_storage_postgresql.dsn, 'objstorage': { 'cls': 'memory', 'args': {} }, } } ] }, 'deposit': { 'url': 'https://deposit.softwareheritage.org/1/private', 'auth': { 'username': 'user', 'password': 'pass', } }, } @pytest.fixture def swh_config(swh_loader_config, monkeypatch, tmp_path): conffile = os.path.join(str(tmp_path), 'loader.yml') with open(conffile, 'w') as f: f.write(yaml.dump(swh_loader_config)) monkeypatch.setenv('SWH_CONFIG_FILENAME', conffile) return conffile @pytest.fixture(autouse=True, scope='session') def swh_proxy(): """Automatically inject this fixture in all tests to ensure no outside connection takes place. """ os.environ['http_proxy'] = 'http://localhost:999' os.environ['https_proxy'] = 'http://localhost:999' @pytest.fixture(scope='session') # type: ignore # expected redefinition def celery_includes(): return [ 'swh.loader.package.archive.tasks', 'swh.loader.package.cran.tasks', 'swh.loader.package.debian.tasks', 'swh.loader.package.deposit.tasks', 'swh.loader.package.npm.tasks', 'swh.loader.package.pypi.tasks', ] diff --git a/requirements-swh.txt b/requirements-swh.txt index 4177d43..c58ed6e 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,4 +1,4 @@ swh.core >= 0.0.75 -swh.model >= 0.0.54 +swh.model >= 0.0.57 swh.scheduler swh.storage >= 0.0.163 diff --git a/swh/loader/core/tests/test_converters.py b/swh/loader/core/tests/test_converters.py index 5cf9222..1adab49 100644 --- a/swh/loader/core/tests/test_converters.py +++ b/swh/loader/core/tests/test_converters.py @@ -1,113 +1,112 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import tempfile from swh.loader.core import converters from swh.model import from_disk from swh.model.model import Content, SkippedContent def tmpfile_with_content(fromdir, contentfile): """Create a temporary file with content contentfile in directory fromdir. """ tmpfilepath = tempfile.mktemp( suffix='.swh', prefix='tmp-file-for-test', dir=str(fromdir)) with open(tmpfilepath, 'wb') as f: f.write(contentfile) return tmpfilepath def test_content_for_storage_path(tmpdir): # given data = b'temp file for testing content storage conversion' tmpfile = tmpfile_with_content(tmpdir, data) - obj = from_disk.Content.from_file(path=os.fsdecode(tmpfile), - save_path=True).get_data() + obj = from_disk.Content.from_file(path=os.fsdecode(tmpfile)).get_data() expected_content = obj.copy() expected_content['data'] = data expected_content['status'] = 'visible' del expected_content['path'] del expected_content['perms'] expected_content = Content.from_dict(expected_content) # when content = converters.content_for_storage(obj) # then assert content == expected_content def test_content_for_storage_data(tmpdir): # given data = b'temp file for testing content storage conversion' obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() del obj['perms'] expected_content = obj.copy() expected_content['status'] = 'visible' expected_content = Content.from_dict(expected_content) # when content = converters.content_for_storage(obj) # then assert content == expected_content def test_content_for_storage_too_long(tmpdir): # given data = b'temp file for testing content storage conversion' obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() del obj['perms'] expected_content = obj.copy() expected_content.pop('data') expected_content['status'] = 'absent' expected_content['origin'] = 'http://example.org/' expected_content['reason'] = 'Content too large' expected_content = SkippedContent.from_dict(expected_content) # when content = converters.content_for_storage( obj, max_content_size=len(data) - 1, origin_url=expected_content.origin, ) # then assert content == expected_content def test_prepare_contents(tmpdir): contents = [] data_fine = b'tmp file fine' max_size = len(data_fine) for data in [b'tmp file with too much data', data_fine]: obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() del obj['perms'] contents.append(obj) actual_contents, actual_skipped_contents = converters.prepare_contents( contents, max_content_size=max_size, origin_url='some-origin') assert len(actual_contents) == 1 assert len(actual_skipped_contents) == 1 actual_content = actual_contents[0] assert 'reason' not in actual_content assert actual_content['status'] == 'visible' actual_skipped_content = actual_skipped_contents[0] assert actual_skipped_content['reason'] == 'Content too large' assert actual_skipped_content['status'] == 'absent' assert actual_skipped_content['origin'] == 'some-origin' diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py index e689dff..cca2035 100644 --- a/swh/loader/package/archive/loader.py +++ b/swh/loader/package/archive/loader.py @@ -1,126 +1,131 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import iso8601 import logging from os import path from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import release_name, artifact_identity -from swh.model.identifiers import normalize_timestamp +from swh.model.model import ( + Sha1Git, Person, TimestampWithTimezone, Revision, RevisionType, +) logger = logging.getLogger(__name__) -SWH_PERSON = { - 'name': b'Software Heritage', - 'fullname': b'Software Heritage', - 'email': b'robot@softwareheritage.org' -} +SWH_PERSON = Person( + name=b'Software Heritage', + fullname=b'Software Heritage', + email=b'robot@softwareheritage.org' +) REVISION_MESSAGE = b'swh-loader-package: synthetic revision message' class ArchiveLoader(PackageLoader): """Load archive origin's artifact files into swh archive """ visit_type = 'tar' def __init__(self, url: str, artifacts: Sequence[Mapping[str, Any]], identity_artifact_keys: Optional[Sequence[str]] = None): """Loader constructor. For now, this is the lister's task output. Args: url: Origin url artifacts: List of artifact information with keys: - **time**: last modification time as either isoformat date string or timestamp - **url**: the artifact url to retrieve filename - **artifact's filename version**: artifact's version length - **length**: artifact's length identity_artifact_keys: Optional List of keys forming the "identity" of an artifact """ super().__init__(url=url) self.artifacts = artifacts # assume order is enforced in the lister if not identity_artifact_keys: # default keys for gnu identity_artifact_keys = ['time', 'url', 'length', 'version'] self.identity_artifact_keys = identity_artifact_keys def get_versions(self) -> Sequence[str]: versions = [] for archive in self.artifacts: v = archive.get('version') if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one return self.artifacts[-1]['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: for a_metadata in self.artifacts: url = a_metadata['url'] package_version = a_metadata['version'] if version == package_version: filename = a_metadata.get('filename') p_info = { 'url': url, 'filename': filename if filename else path.split(url)[-1], 'raw': a_metadata, } # FIXME: this code assumes we have only 1 artifact per # versioned package yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: identity = artifact_identity( artifact_metadata, id_keys=self.identity_artifact_keys) for rev_id, known_artifact in known_artifacts.items(): logging.debug('known_artifact: %s', known_artifact) reference_artifact = known_artifact['extrinsic']['raw'] known_identity = artifact_identity( reference_artifact, id_keys=self.identity_artifact_keys) if identity == known_identity: return rev_id return None - def build_revision(self, a_metadata: Mapping[str, Any], - uncompressed_path: str) -> Dict: + def build_revision( + self, a_metadata: Mapping[str, Any], uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: time = a_metadata['time'] # assume it's a timestamp if isinstance(time, str): # otherwise, assume it's a parsable date time = iso8601.parse_date(time) - normalized_time = normalize_timestamp(time) - return { - 'type': 'tar', - 'message': REVISION_MESSAGE, - 'date': normalized_time, - 'author': SWH_PERSON, - 'committer': SWH_PERSON, - 'committer_date': normalized_time, - 'parents': [], - 'metadata': { + normalized_time = TimestampWithTimezone.from_datetime(time) + return Revision( + type=RevisionType.TAR, + message=REVISION_MESSAGE, + date=normalized_time, + author=SWH_PERSON, + committer=SWH_PERSON, + committer_date=normalized_time, + parents=[], + directory=directory, + synthetic=True, + metadata={ 'intrinsic': {}, 'extrinsic': { 'provider': self.url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, - } + ) diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py index 239c653..1a8ca77 100644 --- a/swh/loader/package/cran/loader.py +++ b/swh/loader/package/cran/loader.py @@ -1,186 +1,194 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import dateutil.parser import datetime import os import logging import re from datetime import timezone from os import path from typing import Any, Generator, Dict, List, Mapping, Optional, Tuple from debian.deb822 import Deb822 from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import ( release_name, parse_author, swh_author, artifact_identity ) -from swh.model.identifiers import normalize_timestamp +from swh.model.model import ( + TimestampWithTimezone, Sha1Git, Revision, RevisionType, +) logger = logging.getLogger(__name__) DATE_PATTERN = re.compile(r'^(?P\d{4})-(?P\d{2})$') class CRANLoader(PackageLoader): visit_type = 'cran' def __init__(self, url: str, artifacts: List[Dict]): """Loader constructor. Args: url: Origin url to retrieve cran artifact(s) from artifacts: List of associated artifact for the origin url """ super().__init__(url=url) # explicit what we consider the artifact identity self.id_keys = ['url', 'version'] self.artifacts = artifacts def get_versions(self) -> List[str]: versions = [] for artifact in self.artifacts: versions.append(artifact['version']) return versions def get_default_version(self) -> str: return self.artifacts[-1]['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Dict[str, Any]], None, None]: for a_metadata in self.artifacts: url = a_metadata['url'] package_version = a_metadata['version'] if version == package_version: p_info = { 'url': url, 'filename': path.basename(url), 'raw': a_metadata, } yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Mapping[bytes, Mapping], artifact_metadata: Mapping[str, Any]) \ -> Optional[bytes]: """Given known_artifacts per revision, try to determine the revision for artifact_metadata """ new_identity = artifact_identity(artifact_metadata, self.id_keys) for rev_id, known_artifact_meta in known_artifacts.items(): logging.debug('known_artifact_meta: %s', known_artifact_meta) known_artifact = known_artifact_meta['extrinsic']['raw'] known_identity = artifact_identity(known_artifact, self.id_keys) if new_identity == known_identity: return rev_id return None def build_revision( self, a_metadata: Mapping[str, Any], - uncompressed_path: str) -> Dict[str, Any]: + uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: # a_metadata is empty metadata = extract_intrinsic_metadata(uncompressed_path) - normalized_date = normalize_timestamp(parse_date(metadata.get('Date'))) + date = parse_date(metadata.get('Date')) author = swh_author(parse_author(metadata.get('Maintainer', {}))) version = metadata.get('Version', a_metadata['version']) - return { - 'message': version.encode('utf-8'), - 'type': 'tar', - 'date': normalized_date, - 'author': author, - 'committer': author, - 'committer_date': normalized_date, - 'parents': [], - 'metadata': { + return Revision( + message=version.encode('utf-8'), + type=RevisionType.TAR, + date=date, + author=author, + committer=author, + committer_date=date, + parents=[], + directory=directory, + synthetic=True, + metadata={ 'intrinsic': { 'tool': 'DESCRIPTION', 'raw': metadata, }, 'extrinsic': { 'provider': self.url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, - } + ) def parse_debian_control(filepath: str) -> Dict[str, Any]: """Parse debian control at filepath""" metadata: Dict = {} logger.debug('Debian control file %s', filepath) for paragraph in Deb822.iter_paragraphs(open(filepath, 'rb')): logger.debug('paragraph: %s', paragraph) metadata.update(**paragraph) logger.debug('metadata parsed: %s', metadata) return metadata def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]: """Given an uncompressed path holding the DESCRIPTION file, returns a DESCRIPTION parsed structure as a dict. Cran origins describes their intrinsic metadata within a DESCRIPTION file at the root tree of a tarball. This DESCRIPTION uses a simple file format called DCF, the Debian control format. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the DESCRIPTION parsed structure as a dict (or empty dict if missing) """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] description_path = os.path.join(dir_path, project_dirname, 'DESCRIPTION') if not os.path.exists(description_path): return {} return parse_debian_control(description_path) -def parse_date(date: Optional[str]) -> Optional[datetime.datetime]: +def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]: """Parse a date into a datetime """ assert not date or isinstance(date, str) dt: Optional[datetime.datetime] = None if not date: - return dt + return None try: specific_date = DATE_PATTERN.match(date) if specific_date: year = int(specific_date.group('year')) month = int(specific_date.group('month')) dt = datetime.datetime(year, month, 1) else: dt = dateutil.parser.parse(date) if not dt.tzinfo: # up for discussion the timezone needs to be set or # normalize_timestamp is not happy: ValueError: normalize_timestamp # received datetime without timezone: 2001-06-08 00:00:00 dt = dt.replace(tzinfo=timezone.utc) except Exception as e: logger.warning('Fail to parse date %s. Reason: %s', (date, e)) - return dt + if dt: + return TimestampWithTimezone.from_datetime(dt) + else: + return None diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py index 017c2fe..eae2c4a 100644 --- a/swh/loader/package/cran/tests/test_cran.py +++ b/swh/loader/package/cran/tests/test_cran.py @@ -1,325 +1,330 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest from datetime import datetime, timezone from dateutil.tz import tzlocal from os import path from swh.loader.package.cran.loader import ( extract_intrinsic_metadata, CRANLoader, parse_date, parse_debian_control ) from swh.core.tarball import uncompress +from swh.model.model import TimestampWithTimezone from swh.loader.package.tests.common import ( check_snapshot, get_stats ) def test_cran_parse_date(): data = [ # parsable, some have debatable results though ('2001-June-08', datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), ('Tue Dec 27 15:06:08 PST 2011', datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc)), ('8-14-2013', datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), ('2011-01', datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), ('201109', datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), ('04-12-2014', datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), ('2018-08-24, 10:40:10', datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc)), ('2013-October-16', datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), ('Aug 23, 2013', datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), ('27-11-2014', datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), ('2019-09-26,', datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), ('9/25/2014', datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), ('Fri Jun 27 17:23:53 2014', datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc)), ('28-04-2014', datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), ('04-14-2014', datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), ('2019-05-08 14:17:31 UTC', datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc)), ('Wed May 21 13:50:39 CEST 2014', datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal())), ('2018-04-10 00:01:04 KST', datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc)), ('2019-08-25 10:45', datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), ('March 9, 2015', datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), ('Aug. 18, 2012', datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), ('2014-Dec-17', datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), ('March 01, 2013', datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), ('2017-04-08.', datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), ('2014-Apr-22', datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), ('Mon Jan 12 19:54:04 2015', datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc)), ('May 22, 2014', datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), ('2014-08-12 09:55:10 EDT', datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc)), # unparsable ('Fabruary 21, 2012', None), ('2019-05-28"', None), ('2017-03-01 today', None), ('2016-11-0110.1093/icesjms/fsw182', None), ('2019-07-010', None), ('2015-02.23', None), ('20013-12-30', None), ('2016-08-017', None), ('2019-02-07l', None), ('2018-05-010', None), ('2019-09-27 KST', None), ('$Date$', None), ('2019-09-27 KST', None), ('2019-06-22 $Date$', None), ('$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $', None), ('2015-7-013', None), ('2018-05-023', None), ("Check NEWS file for changes: news(package='simSummary')", None) ] for date, expected_date in data: - actual_date = parse_date(date) - assert actual_date == expected_date, f'input date to parse {date}' + actual_tstz = parse_date(date) + if expected_date is None: + assert actual_tstz is None, date + else: + expected_tstz = TimestampWithTimezone.from_datetime(expected_date) + assert actual_tstz == expected_tstz, date @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) # sample url # https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa archive_path = path.join( datadir, 'https_cran.r-project.org', 'src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz') uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { 'Package': 'KernSmooth', 'Priority': 'recommended', 'Version': '2.22-6', 'Date': '2001-June-08', 'Title': 'Functions for kernel smoothing for Wand & Jones (1995)', 'Author': 'S original by Matt Wand.\n\tR port by Brian Ripley .', # noqa 'Maintainer': 'Brian Ripley ', 'Description': 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa 'License': 'Unlimited use and distribution (see LICENCE).', 'URL': 'http://www.biostat.harvard.edu/~mwand' } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" # inexistent first level path assert extract_intrinsic_metadata('/something-inexistent') == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = str(tmp_path / 'something') os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} def test_cran_one_visit(swh_config, requests_mock_datadir): version = '2.22-6' base_url = 'https://cran.r-project.org' origin_url = f'{base_url}/Packages/Recommended_KernSmooth/index.html' artifact_url = f'{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz' # noqa loader = CRANLoader(origin_url, artifacts=[{ 'url': artifact_url, 'version': version, }]) actual_load_status = loader.load() expected_snapshot_id = '920adcccc78aaeedd3cfa4459dd900d8c3431a21' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'HEAD': {'target': f'releases/{version}', 'target_type': 'alias'}, f'releases/{version}': { 'target': '42bdb16facd5140424359c8ce89a28ecfa1ce603', 'target_type': 'revision' } } } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(origin_url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'cran' visit_stats = get_stats(loader.storage) assert { 'content': 33, 'directory': 7, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, 'skipped_content': 0, 'snapshot': 1 } == visit_stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] # visited each artifact once across 2 visits assert len(urls) == 1 def test_cran_2_visits_same_origin( swh_config, requests_mock_datadir): """Multiple visits on the same origin, only 1 archive fetch""" version = '2.22-6' base_url = 'https://cran.r-project.org' origin_url = f'{base_url}/Packages/Recommended_KernSmooth/index.html' artifact_url = f'{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz' # noqa loader = CRANLoader(origin_url, artifacts=[{ 'url': artifact_url, 'version': version }]) # first visit actual_load_status = loader.load() expected_snapshot_id = '920adcccc78aaeedd3cfa4459dd900d8c3431a21' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'HEAD': {'target': f'releases/{version}', 'target_type': 'alias'}, f'releases/{version}': { 'target': '42bdb16facd5140424359c8ce89a28ecfa1ce603', 'target_type': 'revision' } } } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(origin_url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'cran' visit_stats = get_stats(loader.storage) assert { 'content': 33, 'directory': 7, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, 'skipped_content': 0, 'snapshot': 1 } == visit_stats # second visit actual_load_status2 = loader.load() assert actual_load_status2 == { 'status': 'uneventful', 'snapshot_id': expected_snapshot_id } origin_visit2 = next(loader.storage.origin_visit_get(origin_url)) assert origin_visit2['status'] == 'full' assert origin_visit2['type'] == 'cran' visit_stats2 = get_stats(loader.storage) visit_stats['origin_visit'] += 1 assert visit_stats2 == visit_stats, 'same stats as 1st visit, +1 visit' urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] assert len(urls) == 1, 'visited one time artifact url (across 2 visits)' def test_parse_debian_control(datadir): description_file = os.path.join(datadir, 'description', 'acepack') actual_metadata = parse_debian_control(description_file) assert actual_metadata == { 'Package': 'acepack', 'Maintainer': 'Shawn Garbett', 'Version': '1.4.1', 'Author': 'Phil Spector, Jerome Friedman, Robert Tibshirani...', 'Description': 'Two nonparametric methods for multiple regression...', 'Title': 'ACE & AVAS 4 Selecting Multiple Regression Transformations', 'License': 'MIT + file LICENSE', 'Suggests': 'testthat', 'Packaged': '2016-10-28 15:38:59 UTC; garbetsp', 'Repository': 'CRAN', 'Date/Publication': '2016-10-29 00:11:52', 'NeedsCompilation': 'yes' } def test_parse_debian_control_unicode_issue(datadir): # iso-8859-1 caused failure, now fixed description_file = os.path.join( datadir, 'description', 'KnownBR' ) actual_metadata = parse_debian_control(description_file) assert actual_metadata == { 'Package': 'KnowBR', 'Version': '2.0', 'Title': '''Discriminating Well Surveyed Spatial Units from Exhaustive Biodiversity Databases''', 'Author': 'Cástor Guisande González and Jorge M. Lobo', 'Maintainer': 'Cástor Guisande González ', 'Description': 'It uses species accumulation curves and diverse estimators...', 'License': 'GPL (>= 2)', 'Encoding': 'latin1', 'Depends': 'R (>= 3.0), fossil, mgcv, plotrix, sp, vegan', 'Suggests': 'raster, rgbif', 'NeedsCompilation': 'no', 'Packaged': '2019-01-30 13:27:29 UTC; castor', 'Repository': 'CRAN', 'Date/Publication': '2019-01-31 20:53:50 UTC' } diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py index fd919f7..c68f709 100644 --- a/swh/loader/package/debian/loader.py +++ b/swh/loader/package/debian/loader.py @@ -1,404 +1,410 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils -import iso8601 import logging +from os import path import re import subprocess from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc -from os import path from typing import ( - Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple + Any, Generator, List, Mapping, Optional, Sequence, Tuple ) from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import download, release_name +from swh.model.model import ( + Sha1Git, Person, Revision, RevisionType, TimestampWithTimezone +) logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r'(?<=\>)\s*,\s*') class DebianLoader(PackageLoader): """Load debian origins into swh archive. """ visit_type = 'deb' def __init__(self, url: str, date: str, packages: Mapping[str, Any]): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ super().__init__(url=url) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return list(self.packages.keys()) def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.packages[version] p_info = meta.copy() p_info['raw'] = meta yield release_name(version), p_info def resolve_revision_from( self, known_package_artifacts: Mapping, artifact_metadata: Mapping) \ -> Optional[bytes]: return resolve_revision_from( known_package_artifacts, artifact_metadata) def download_package(self, p_info: Mapping[str, Any], tmpdir: str) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `a_metadata` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug('all_hashes: %s', all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug('res: %s', res) return res def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str) -> str: logger.debug('dl_artifacts: %s', dl_artifacts) return extract_package(dl_artifacts, dest=dest) - def build_revision(self, a_metadata: Mapping[str, Any], - uncompressed_path: str) -> Dict: + def build_revision( + self, a_metadata: Mapping[str, Any], uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: dsc_url, dsc_name = dsc_information(a_metadata) if not dsc_name: raise ValueError( 'dsc name for url %s should not be None' % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) i_metadata = get_package_metadata( a_metadata, dsc_path, uncompressed_path) logger.debug('i_metadata: %s', i_metadata) logger.debug('a_metadata: %s', a_metadata) msg = 'Synthetic revision for Debian source package %s version %s' % ( a_metadata['name'], a_metadata['version']) - date = iso8601.parse_date(i_metadata['changelog']['date']) + date = TimestampWithTimezone.from_iso8601( + i_metadata['changelog']['date']) author = prepare_person(i_metadata['changelog']['person']) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa - return { - 'type': 'dsc', - 'message': msg.encode('utf-8'), - 'author': author, - 'date': date, - 'committer': author, - 'committer_date': date, - 'parents': [], - 'metadata': { + return Revision( + type=RevisionType.DSC, + message=msg.encode('utf-8'), + author=author, + date=date, + committer=author, + committer_date=date, + parents=[], + directory=directory, + synthetic=True, + metadata={ 'intrinsic': { 'tool': 'dsc', 'raw': i_metadata, }, 'extrinsic': { 'provider': dsc_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, - } - } + }, + ) def resolve_revision_from(known_package_artifacts: Mapping, artifact_metadata: Mapping) -> Optional[bytes]: """Given known package artifacts (resolved from the snapshot of previous visit) and the new artifact to fetch, try to solve the corresponding revision. """ artifacts_to_fetch = artifact_metadata.get('files') if not artifacts_to_fetch: return None def to_set(data): return frozenset([ (name, meta['sha256'], meta['size']) for name, meta in data['files'].items() ]) # what we want to avoid downloading back if we have them already set_new_artifacts = to_set(artifact_metadata) known_artifacts_revision_id = {} for rev_id, known_artifacts in known_package_artifacts.items(): extrinsic = known_artifacts.get('extrinsic') if not extrinsic: continue s = to_set(extrinsic['raw']) known_artifacts_revision_id[s] = rev_id return known_artifacts_revision_id.get(set_new_artifacts) def uid_to_person(uid: str) -> Mapping[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug('uid: %s', uid) ret = { 'name': '', 'email': '', 'fullname': uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret['name'] = name ret['email'] = mail else: ret['name'] = uid return ret -def prepare_person(person: Mapping[str, str]) -> Mapping[str, bytes]: +def prepare_person(person: Mapping[str, str]) -> Person: """Prepare person for swh serialization... Args: A person dict Returns: - A person dict ready for storage + A person ready for storage """ - ret = {} - for key, value in person.items(): - ret[key] = value.encode('utf-8') - return ret + return Person.from_dict({ + key: value.encode('utf-8') + for (key, value) in person.items() + }) def download_package( package: Mapping[str, Any], tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: package: Dict defining the set of files representing a debian package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in package['files'].items(): uri = fileinfo['uri'] logger.debug('fileinfo: %s', fileinfo) extrinsic_hashes = {'sha256': fileinfo['sha256']} logger.debug('extrinsic_hashes(%s): %s', filename, extrinsic_hashes) filepath, hashes = download(uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes) all_hashes[filename] = hashes logger.debug('all_hashes: %s', all_hashes) return all_hashes def dsc_information(package: Mapping[str, Any]) -> Tuple[ Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: package: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in package['files'].items(): if filename.endswith('.dsc'): if dsc_name: raise ValueError( 'Package %s_%s references several dsc files.' % (package['name'], package['version']) ) dsc_url = fileinfo['uri'] dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug('dl_artifacts: %s', dl_artifacts) for _, hashes in dl_artifacts: logger.debug('hashes: %s', hashes) filename = hashes['filename'] if filename.endswith('.dsc'): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, 'extracted') logfile = path.join(dest, 'extract.log') logger.debug('extract Debian source package %s in %s' % (dsc_path, destdir), extra={ 'swh_type': 'deb_extract', 'swh_dsc': dsc_path, 'swh_destdir': destdir, }) cmd = ['dpkg-source', '--no-copy', '--no-check', '--ignore-bad-version', '-x', dsc_path, destdir] try: with open(logfile, 'w') as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, 'r').read() raise ValueError('dpkg-source exited with code %s: %s' % (e.returncode, logdata)) from None return destdir def get_package_metadata(package: Mapping[str, Any], dsc_path: str, extracted_path: str) -> Mapping[str, Any]: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: package: the package dict (with a dsc_path key) dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, 'rb') as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, 'debian/changelog') with open(changelog_path, 'rb') as changelog: try: parsed_changelog = Changelog(changelog) except UnicodeDecodeError: logger.warning('Unknown encoding for changelog %s,' ' falling back to iso' % changelog_path, extra={ 'swh_type': 'deb_changelog_encoding', 'swh_name': package['name'], 'swh_version': str(package['version']), 'swh_changelog': changelog_path, }) # need to reset as Changelog scrolls to the end of the file changelog.seek(0) parsed_changelog = Changelog(changelog, encoding='iso-8859-15') package_info = { 'name': package['name'], 'version': str(package['version']), 'changelog': { 'person': uid_to_person(parsed_changelog.author), 'date': parse_date(parsed_changelog.date).isoformat(), 'history': [(block.package, str(block.version)) for block in parsed_changelog][1:], } } maintainers = [ uid_to_person(parsed_dsc['Maintainer']), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get('Uploaders', '')) ) package_info['maintainers'] = maintainers return package_info diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index fd5b41e..f4e3307 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,462 +1,464 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import logging import pytest import random from os import path from swh.loader.package.debian.loader import ( DebianLoader, download_package, dsc_information, uid_to_person, prepare_person, get_package_metadata, extract_package ) from swh.loader.package.tests.common import check_snapshot, get_stats from swh.loader.package.debian.loader import resolve_revision_from +from swh.model.model import Person + logger = logging.getLogger(__name__) PACKAGE_FILES = { 'name': 'cicero', 'version': '0.7.2-3', 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c', # noqa 'size': 3964, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz' # noqa }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03', # noqa 'size': 1864, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc'}, # noqa 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786', # noqa 'size': 96527, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz' # noqa } }, } PACKAGE_FILES2 = { 'name': 'cicero', 'version': '0.7.2-4', 'files': { 'cicero_0.7.2-4.diff.gz': { 'md5sum': '1e7e6fc4a59d57c98082a3af78145734', 'name': 'cicero_0.7.2-4.diff.gz', 'sha256': '2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01', # noqa 'size': 4038, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz' # noqa }, 'cicero_0.7.2-4.dsc': { 'md5sum': '1a6c8855a73b4282bb31d15518f18cde', 'name': 'cicero_0.7.2-4.dsc', 'sha256': '913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b', # noqa 'size': 1881, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc'}, # noqa 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786', # noqa 'size': 96527, 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz' # noqa } } } PACKAGE_PER_VERSION = { 'stretch/contrib/0.7.2-3': PACKAGE_FILES, } PACKAGES_PER_VERSION = { 'stretch/contrib/0.7.2-3': PACKAGE_FILES, 'buster/contrib/0.7.2-4': PACKAGE_FILES2, } def test_debian_first_visit( swh_config, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader( url='deb://Debian/packages/cicero', date='2019-10-12T05:58:09.165557+00:00', packages=PACKAGE_PER_VERSION) actual_load_status = loader.load() expected_snapshot_id = '3b6b66e6ee4e7d903a379a882684a2a50480c0b4' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': 42, 'directory': 2, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, # all artifacts under 1 revision 'skipped_content': 0, 'snapshot': 1 } == stats expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'releases/stretch/contrib/0.7.2-3': { 'target_type': 'revision', 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', } }, } # different than the previous loader as no release is done check_snapshot(expected_snapshot, loader.storage) def test_debian_first_visit_then_another_visit( swh_config, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ url = 'deb://Debian/packages/cicero' loader = DebianLoader( url=url, date='2019-10-12T05:58:09.165557+00:00', packages=PACKAGE_PER_VERSION) actual_load_status = loader.load() expected_snapshot_id = '3b6b66e6ee4e7d903a379a882684a2a50480c0b4' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'deb' stats = get_stats(loader.storage) assert { 'content': 42, 'directory': 2, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, # all artifacts under 1 revision 'skipped_content': 0, 'snapshot': 1 } == stats expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'releases/stretch/contrib/0.7.2-3': { 'target_type': 'revision', 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', } }, } # different than the previous loader as no release is done check_snapshot(expected_snapshot, loader.storage) # No change in between load actual_load_status2 = loader.load() assert actual_load_status2['status'] == 'uneventful' origin_visit2 = list(loader.storage.origin_visit_get(url)) assert origin_visit2[-1]['status'] == 'full' assert origin_visit2[-1]['type'] == 'deb' stats2 = get_stats(loader.storage) assert { 'content': 42 + 0, 'directory': 2 + 0, 'origin': 1, 'origin_visit': 1 + 1, # a new visit occurred 'person': 1, 'release': 0, 'revision': 1, 'skipped_content': 0, 'snapshot': 1, # same snapshot across 2 visits } == stats2 urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith('http://deb.debian.org') ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) def test_uid_to_person(): uid = 'Someone Name ' actual_person = uid_to_person(uid) assert actual_person == { 'name': 'Someone Name', 'email': 'someone@orga.org', 'fullname': uid, } def test_prepare_person(): actual_author = prepare_person({ 'name': 'Someone Name', 'email': 'someone@orga.org', 'fullname': 'Someone Name ', }) - assert actual_author == { - 'name': b'Someone Name', - 'email': b'someone@orga.org', - 'fullname': b'Someone Name ', - } + assert actual_author == Person( + name=b'Someone Name', + email=b'someone@orga.org', + fullname=b'Someone Name ', + ) def test_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) all_hashes = download_package(PACKAGE_FILES, tmpdir) assert all_hashes == { 'cicero_0.7.2-3.diff.gz': { 'checksums': { 'sha1': '0815282053f21601b0ec4adf7a8fe47eace3c0bc', 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c' # noqa }, 'filename': 'cicero_0.7.2-3.diff.gz', 'length': 3964}, 'cicero_0.7.2-3.dsc': { 'checksums': { 'sha1': 'abbec4e8efbbc80278236e1dd136831eac08accd', 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03' # noqa }, 'filename': 'cicero_0.7.2-3.dsc', 'length': 1864}, 'cicero_0.7.2.orig.tar.gz': { 'checksums': { 'sha1': 'a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786' # noqa }, 'filename': 'cicero_0.7.2.orig.tar.gz', 'length': 96527 } } def test_dsc_information_ok(): fname = 'cicero_0.7.2-3.dsc' dsc_url, dsc_name = dsc_information(PACKAGE_FILES) assert dsc_url == PACKAGE_FILES['files'][fname]['uri'] assert dsc_name == PACKAGE_FILES['files'][fname]['name'] def test_dsc_information_not_found(): fname = 'cicero_0.7.2-3.dsc' package_files = copy.deepcopy(PACKAGE_FILES) package_files['files'].pop(fname) dsc_url, dsc_name = dsc_information(package_files) assert dsc_url is None assert dsc_name is None def test_dsc_information_too_many_dsc_entries(): # craft an extra dsc file fname = 'cicero_0.7.2-3.dsc' package_files = copy.deepcopy(PACKAGE_FILES) data = package_files['files'][fname] fname2 = fname.replace('cicero', 'ciceroo') package_files['files'][fname2] = data with pytest.raises( ValueError, match='Package %s_%s references several dsc' % ( package_files['name'], package_files['version'])): dsc_information(package_files) def test_get_package_metadata(requests_mock_datadir, datadir, tmp_path): tmp_path = str(tmp_path) # py3.5 compat. package = PACKAGE_FILES logger.debug('package: %s', package) # download the packages all_hashes = download_package(package, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(package) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) actual_package_info = get_package_metadata( package, dsc_path, extracted_path) logger.debug('actual_package_info: %s', actual_package_info) assert actual_package_info == { 'changelog': { 'date': '2014-10-19T16:52:35+02:00', 'history': [ ('cicero', '0.7.2-2'), ('cicero', '0.7.2-1'), ('cicero', '0.7-1') ], 'person': { 'email': 'sthibault@debian.org', 'fullname': 'Samuel Thibault ', 'name': 'Samuel Thibault' } }, 'maintainers': [ { 'email': 'debian-accessibility@lists.debian.org', 'fullname': 'Debian Accessibility Team ' '', 'name': 'Debian Accessibility Team' }, { 'email': 'sthibault@debian.org', 'fullname': 'Samuel Thibault ', 'name': 'Samuel Thibault' } ], 'name': 'cicero', 'version': '0.7.2-3' } def test_debian_multiple_packages(swh_config, requests_mock_datadir): url = 'deb://Debian/packages/cicero' loader = DebianLoader( url=url, date='2019-10-12T05:58:09.165557+00:00', packages=PACKAGES_PER_VERSION) actual_load_status = loader.load() expected_snapshot_id = 'defc19021187f3727293121fcf6c5c82cb923604' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'deb' expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'releases/stretch/contrib/0.7.2-3': { 'target_type': 'revision', 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', }, 'releases/buster/contrib/0.7.2-4': { 'target_type': 'revision', 'target': '8224139c274c984147ef4b09aa0e462c55a10bd3', } }, } check_snapshot(expected_snapshot, loader.storage) def test_resolve_revision_from_edge_cases(): """Solving revision with empty data will result in unknown revision """ for package_artifacts in [{}, PACKAGE_FILES]: actual_revision = resolve_revision_from( package_artifacts, {}) assert actual_revision is None for known_artifacts in [{}, PACKAGE_FILES]: actual_revision = resolve_revision_from( {}, known_artifacts) assert actual_revision is None known_package_artifacts = { b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07": { 'extrinsic': { # empty }, # ... removed the unnecessary intermediary data } } assert not resolve_revision_from(known_package_artifacts, PACKAGE_FILES) def test_resolve_revision_from_edge_cases_hit_and_miss(): """Solving revision with inconsistent data will result in unknown revision """ artifact_metadata = PACKAGE_FILES2 expected_revision_id = b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07" # noqa known_package_artifacts = { expected_revision_id: { 'extrinsic': { 'raw': PACKAGE_FILES, }, # ... removed the unnecessary intermediary data } } actual_revision = resolve_revision_from( known_package_artifacts, artifact_metadata ) assert actual_revision is None def test_resolve_revision_from(): """Solving revision with consistent data will solve the revision """ artifact_metadata = PACKAGE_FILES expected_revision_id = b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" # noqa files = artifact_metadata['files'] # shuffling dict's keys keys = list(files.keys()) random.shuffle(keys) package_files = { 'files': {k: files[k] for k in keys} } known_package_artifacts = { expected_revision_id: { 'extrinsic': { 'raw': package_files, }, # ... removed the unnecessary intermediary data } } actual_revision = resolve_revision_from( known_package_artifacts, artifact_metadata ) assert actual_revision == expected_revision_id diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index 4d50cf5..85d30ea 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,233 +1,245 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import requests import types from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple, Union ) from swh.model.hashutil import hash_to_hex, hash_to_bytes +from swh.model.model import ( + Person, Revision, RevisionType, TimestampWithTimezone, Sha1Git, +) from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import download logger = logging.getLogger(__name__) class DepositLoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'deposit' def __init__(self, url: str, deposit_id: str): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity """ super().__init__(url=url) config_deposit = self.config['deposit'] self.deposit_id = deposit_id self.client = ApiClient(url=config_deposit['url'], auth=config_deposit['auth']) self._metadata = None @property def metadata(self): if self._metadata is None: self._metadata = self.client.metadata_get(self.deposit_id) return self._metadata def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ['HEAD'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: p_info = { 'filename': 'archive.zip', 'raw': self.metadata, } yield 'HEAD', p_info def download_package(self, p_info: Mapping[str, Any], tmpdir: str) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client """ return [self.client.archive_get( self.deposit_id, tmpdir, p_info['filename'])] def build_revision( - self, a_metadata: Dict, uncompressed_path: str) -> Dict: - revision = a_metadata.pop('revision') - metadata = { + self, a_metadata: Dict, uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: + revision_data = a_metadata.pop('revision') + + # FIXME: the deposit no longer needs to build the revision + + date = TimestampWithTimezone.from_dict(revision_data['date']) + metadata = revision_data['metadata'] + metadata.update({ 'extrinsic': { 'provider': self.client.metadata_url(self.deposit_id), 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, - } - - # FIXME: the deposit no longer needs to build the revision - revision['metadata'].update(metadata) - revision['author'] = parse_author(revision['author']) - revision['committer'] = parse_author(revision['committer']) - revision['message'] = revision['message'].encode('utf-8') - revision['type'] = 'tar' - parents = revision.get('parents', []) - revision['parents'] = [hash_to_bytes(p) for p in parents] - - return revision + }) + + return Revision( + type=RevisionType.TAR, + message=revision_data['message'].encode('utf-8'), + author=parse_author(revision_data['author']), + date=date, + committer=parse_author(revision_data['committer']), + committer_date=date, + parents=[hash_to_bytes(p) + for p in revision_data.get('parents', [])], + directory=directory, + synthetic=True, + metadata=metadata, + ) def load(self) -> Dict: # Usual loading r = super().load() success = r['status'] != 'failed' if success: # Update archive with metadata information origin_metadata = self.metadata['origin_metadata'] logger.debug('origin_metadata: %s', origin_metadata) tools = self.storage.tool_add([origin_metadata['tool']]) logger.debug('tools: %s', tools) tool_id = tools[0]['id'] provider = origin_metadata['provider'] # FIXME: Shall we delete this info? provider_id = self.storage.metadata_provider_add( provider['provider_name'], provider['provider_type'], provider['provider_url'], metadata=None) metadata = origin_metadata['metadata'] self.storage.origin_metadata_add( self.url, self.visit_date, provider_id, tool_id, metadata) # Update deposit status try: if not success: self.client.status_update(self.deposit_id, status='failed') return r snapshot_id = hash_to_bytes(r['snapshot_id']) branches = self.storage.snapshot_get(snapshot_id)['branches'] logger.debug('branches: %s', branches) if not branches: return r rev_id = branches[b'HEAD']['target'] revisions = self.storage.revision_get([rev_id]) # FIXME: inconsistency between tests and production code if isinstance(revisions, types.GeneratorType): revisions = list(revisions) revision = revisions[0] # Retrieve the revision identifier dir_id = revision['directory'] # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_id, status='done', revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(dir_id), origin_url=self.url) except Exception: logger.exception( 'Problem when trying to update the deposit\'s status') return {'status': 'failed'} return r -def parse_author(author): +def parse_author(author) -> Person: """See prior fixme """ - return { - 'fullname': author['fullname'].encode('utf-8'), - 'name': author['name'].encode('utf-8'), - 'email': author['email'].encode('utf-8'), - } + return Person( + fullname=author['fullname'].encode('utf-8'), + name=author['name'].encode('utf-8'), + email=author['email'].encode('utf-8'), + ) class ApiClient: """Private Deposit Api client """ def __init__(self, url, auth: Optional[Mapping[str, str]]): self.base_url = url.rstrip('/') self.auth = None if not auth else (auth['username'], auth['password']) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: kwargs['auth'] = self.auth return method_fn(url, *args, **kwargs) def archive_get( self, deposit_id: Union[int, str], tmpdir: str, filename: str) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally """ url = f'{self.base_url}/{deposit_id}/raw/' return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: return f'{self.base_url}/{deposit_id}/meta/' def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json """ url = self.metadata_url(deposit_id) r = self.do('get', url) if r.ok: return r.json() msg = f'Problem when retrieving deposit metadata at {url}' logger.error(msg) raise ValueError(msg) def status_update(self, deposit_id: Union[int, str], status: str, revision_id: Optional[str] = None, directory_id: Optional[str] = None, origin_url: Optional[str] = None): """Update deposit's information including status, and persistent identifiers result of the loading. """ url = f'{self.base_url}/{deposit_id}/update/' payload = {'status': status} if revision_id: payload['revision_id'] = revision_id if directory_id: payload['directory_id'] = directory_id if origin_url: payload['origin_url'] = origin_url self.do('put', url, json=payload) diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 05d7cc7..d6f08a0 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,423 +1,432 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple ) +import attr + from swh.core.tarball import uncompress from swh.core.config import SWHConfig -from swh.model.from_disk import Directory +from swh.model import from_disk from swh.model.hashutil import hash_to_hex -from swh.model.identifiers import ( - revision_identifier, snapshot_identifier, identifier_to_bytes +from swh.model.model import ( + BaseModel, Sha1Git, + Content, SkippedContent, Directory, + Revision, + TargetType, Snapshot, + Origin ) -from swh.model.model import Sha1Git from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches -from swh.loader.core.converters import prepare_contents from swh.loader.package.utils import download logger = logging.getLogger(__name__) # Not implemented yet: # - clean up disk routines from previous killed workers (when OOMkilled) # -> separation of concern would like this to be abstracted from the code # -> experience tells us it's complicated to do as such (T903, T964, T982, # etc...) # # - model: swh.model.merkle.from_disk should output swh.model.model.* objects # to avoid this layer's conversion routine call # -> Take this up within swh.model's current implementation class PackageLoader: # Origin visit type (str) set by the loader visit_type = '' def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() self.storage = get_storage(**self.config['storage']) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) self.max_content_size = self.config['max_content_size'] def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ if 'storage' not in self.config: raise ValueError( 'Misconfiguration, at least the storage key should be set') def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( - self, a_metadata: Dict, uncompressed_path: str) -> Dict: - """Build the revision dict from the archive metadata (extrinsic + self, a_metadata: Dict, uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: + """Build the revision from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: a_metadata: Artifact metadata uncompressed_path: Artifact uncompressed path on disk Returns: SWH data dict """ - return {} + raise NotImplementedError('build_revision') def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return '' - def last_snapshot(self) -> Optional[Dict]: + def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot """ snapshot = None visit = self.storage.origin_visit_get_latest( self.url, require_snapshot=True) - if visit: - snapshot = snapshot_get_all_branches( - self.storage, visit['snapshot']) + if visit and visit.get('snapshot'): + snapshot = Snapshot.from_dict(snapshot_get_all_branches( + self.storage, visit['snapshot'])) return snapshot - def known_artifacts(self, snapshot: Optional[Dict]) -> Dict: + def known_artifacts( + self, snapshot: Optional[Snapshot]) -> Dict[Sha1Git, BaseModel]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ - if not snapshot or 'branches' not in snapshot: + if not snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) - revs = [rev['target'] - for rev in snapshot['branches'].values() - if rev and rev['target_type'] == 'revision'] + revs = [rev.target + for rev in snapshot.branches.values() + if rev and rev.target_type == TargetType.REVISION] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision['id']] = revision['metadata'] return ret def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None def download_package(self, p_info: Mapping[str, Any], tmpdir: str) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ a_uri = p_info['url'] filename = p_info.get('filename') return [download(a_uri, dest=tmpdir, filename=filename)] def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, 'src') for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = 'uneventful' # either: eventful, uneventful, failed status_visit = 'full' # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None # Prepare origin and origin_visit - origin = {'url': self.url} + origin = Origin(url=self.url) try: self.storage.origin_add_one(origin) visit_id = self.storage.origin_visit_add( origin=self.url, date=self.visit_date, type=self.visit_type)['visit'] - except Exception as e: - logger.error( - 'Failed to create origin/origin_visit. Reason: %s', e) + except Exception: + logger.exception('Failed to create origin/origin_visit:') return {'status': 'failed'} try: last_snapshot = self.last_snapshot() logger.debug('last snapshot: %s', last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug('known artifacts: %s', known_artifacts) # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug('default version: %s', default_version) for version in self.get_versions(): # for each logger.debug('version: %s', version) tmp_revisions[version] = [] # `p_` stands for `package_` for branch_name, p_info in self.get_package_info(version): logger.debug('package_info: %s', p_info) revision_id = self.resolve_revision_from( known_artifacts, p_info['raw']) if revision_id is None: (revision_id, loaded) = \ self._load_revision(p_info, origin) if loaded: status_load = 'eventful' else: status_visit = 'partial' if revision_id is None: continue tmp_revisions[version].append((branch_name, revision_id)) logger.debug('tmp_revisions: %s', tmp_revisions) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in tmp_revisions.items(): if version == default_version and \ len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) if branch_name != 'HEAD': branches[b'HEAD'] = { 'target_type': 'alias', 'target': branch_name.encode('utf-8'), } for branch_name, target in branch_name_revisions: branches[branch_name.encode('utf-8')] = { 'target_type': 'revision', 'target': target, } - snapshot = { + snapshot_data = { 'branches': branches } - logger.debug('snapshot: %s', snapshot) + logger.debug('snapshot: %s', snapshot_data) - snapshot['id'] = identifier_to_bytes( - snapshot_identifier(snapshot)) + snapshot = Snapshot.from_dict(snapshot_data) logger.debug('snapshot: %s', snapshot) self.storage.snapshot_add([snapshot]) if hasattr(self.storage, 'flush'): self.storage.flush() except Exception: logger.exception('Fail to load %s' % self.url) status_visit = 'partial' status_load = 'failed' finally: self.storage.origin_visit_update( origin=self.url, visit_id=visit_id, status=status_visit, - snapshot=snapshot and snapshot['id']) + snapshot=snapshot and snapshot.id) result = { 'status': status_load, } # type: Dict[str, Any] if snapshot: - result['snapshot_id'] = hash_to_hex(snapshot['id']) + result['snapshot_id'] = hash_to_hex(snapshot.id) return result def _load_revision(self, p_info, origin) -> Tuple[Optional[Sha1Git], bool]: """Does all the loading of a revision itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and revision to self.storage * returns (revision_id, loaded) """ with tempfile.TemporaryDirectory() as tmpdir: try: dl_artifacts = self.download_package(p_info, tmpdir) except Exception: logger.exception('Unable to retrieve %s', p_info) return (None, False) uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug('uncompressed_path: %s', uncompressed_path) - directory = Directory.from_disk( + directory = from_disk.Directory.from_disk( path=uncompressed_path.encode('utf-8'), - data=True) # noqa - # FIXME: Try not to load the full raw content in - # memory - objects = directory.collect() - - contents, skipped_contents = prepare_contents( - objects.get('content', {}).values(), - max_content_size=self.max_content_size, - origin_url=origin['url']) - self.storage.skipped_content_add(skipped_contents) + max_content_length=self.max_content_size) + + contents: List[Content] = [] + skipped_contents: List[SkippedContent] = [] + directories: List[Directory] = [] + + for obj in directory.iter_tree(): + obj = obj.to_model() + if isinstance(obj, Content): + # FIXME: read the data from disk later (when the + # storage buffer is flushed). + obj = obj.with_data() + contents.append(obj) + elif isinstance(obj, SkippedContent): + skipped_contents.append(obj) + elif isinstance(obj, Directory): + directories.append(obj) + else: + raise TypeError( + f'Unexpected content type from disk: {obj}') + logger.debug('Number of skipped contents: %s', len(skipped_contents)) - self.storage.content_add(contents) + self.storage.skipped_content_add(skipped_contents) logger.debug('Number of contents: %s', len(contents)) + self.storage.content_add(contents) - directories = list( - objects.get('directory', {}).values()) logger.debug('Number of directories: %s', len(directories)) self.storage.directory_add(directories) # FIXME: This should be release. cf. D409 - revision = self.build_revision(p_info['raw'], uncompressed_path) + revision = self.build_revision( + p_info['raw'], uncompressed_path, directory=directory.hash) if not revision: # Some artifacts are missing intrinsic metadata # skipping those return (None, True) - revision.update({ - 'synthetic': True, - 'directory': directory.hash, - }) - - revision['metadata'].update({ + metadata = revision.metadata or {} + metadata.update({ 'original_artifact': [ hashes for _, hashes in dl_artifacts ], }) - - revision['id'] = identifier_to_bytes( - revision_identifier(revision)) + revision = attr.evolve(revision, metadata=metadata) logger.debug('Revision: %s', revision) self.storage.revision_add([revision]) - return (revision['id'], True) + return (revision.id, True) diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py index fe015d6..6815a2b 100644 --- a/swh/loader/package/npm/loader.py +++ b/swh/loader/package/npm/loader.py @@ -1,280 +1,287 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from codecs import BOM_UTF8 from typing import Any, Dict, Generator, Mapping, Sequence, Tuple, Optional +import attr import chardet -import iso8601 from urllib.parse import quote -from swh.model.identifiers import normalize_timestamp +from swh.model.model import ( + Person, RevisionType, Revision, TimestampWithTimezone, Sha1Git, +) + from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import ( api_info, release_name, parse_author, swh_author ) logger = logging.getLogger(__name__) class NpmLoader(PackageLoader): """Load npm origin's artifact releases into swh archive. """ visit_type = 'npm' def __init__(self, url: str): """Constructor Args str: origin url (e.g. https://www.npmjs.com/package/) """ super().__init__(url=url) package_name = url.split('https://www.npmjs.com/package/')[1] safe_name = quote(package_name, safe='') self.provider_url = f'https://replicate.npmjs.com/{safe_name}/' self._info: Dict[str, Any] = {} self._versions = None @property def info(self) -> Dict[str, Any]: """Return the project metadata information (fetched from npm registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return sorted(list(self.info['versions'].keys())) def get_default_version(self) -> str: return self.info['dist-tags'].get('latest', '') def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.info['versions'][version] url = meta['dist']['tarball'] p_info = { 'url': url, 'filename': os.path.basename(url), 'raw': meta, } yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, artifact_metadata) def build_revision( - self, a_metadata: Dict, uncompressed_path: str) -> Dict: + self, a_metadata: Dict, uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: - return {} + return None # from intrinsic metadata author = extract_npm_package_author(i_metadata) message = i_metadata['version'].encode('ascii') # from extrinsic metadata # No date available in intrinsic metadata: retrieve it from the API # metadata, using the version number that the API claims this package # has. extrinsic_version = a_metadata['version'] if 'time' in self.info: date = self.info['time'][extrinsic_version] elif 'mtime' in a_metadata: date = a_metadata['mtime'] else: artifact_name = os.path.basename(a_metadata['dist']['tarball']) raise ValueError( 'Origin %s: Cannot determine upload time for artifact %s.' % (self.url, artifact_name) ) - date = iso8601.parse_date(date) - date = normalize_timestamp(int(date.timestamp())) - - return { - 'type': 'tar', - 'message': message, - 'author': author, - 'date': date, - 'committer': author, - 'committer_date': date, - 'parents': [], - 'metadata': { + date = TimestampWithTimezone.from_iso8601(date) + + # FIXME: this is to remain bug-compatible with earlier versions: + date = attr.evolve(date, timestamp=attr.evolve( + date.timestamp, microseconds=0)) + + r = Revision( + type=RevisionType.TAR, + message=message, + author=author, + date=date, + committer=author, + committer_date=date, + parents=[], + directory=directory, + synthetic=True, + metadata={ 'intrinsic': { 'tool': 'package.json', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, - } + ) + return r def artifact_to_revision_id( known_artifacts: Dict, artifact_metadata: Dict) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats: - old format sample:: { 'package_source': { 'sha1': '05181c12cd8c22035dd31155656826b85745da37', } } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ shasum = artifact_metadata['dist']['shasum'] for rev_id, known_artifact in known_artifacts.items(): known_original_artifact = known_artifact.get('original_artifact') if not known_original_artifact: # previous loader-npm version kept original artifact elsewhere known_original_artifact = known_artifact.get('package_source') if not known_original_artifact: continue original_hash = known_original_artifact['sha1'] else: assert isinstance(known_original_artifact, list) original_hash = known_original_artifact[0]['checksums']['sha1'] if shasum == original_hash: return rev_id return None -def extract_npm_package_author(package_json): +def extract_npm_package_author(package_json) -> Person: """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json (dict): Dict holding the content of parsed ``package.json`` file Returns: - dict: A dict with the following keys: - * fullname - * name - * email + Person """ def _author_str(author_data): if type(author_data) is dict: author_str = '' if 'name' in author_data: author_str += author_data['name'] if 'email' in author_data: author_str += ' <%s>' % author_data['email'] return author_str elif type(author_data) is list: return _author_str(author_data[0]) if len(author_data) > 0 else '' else: return author_data author_data = {} for author_key in ('author', 'authors'): if author_key in package_json: author_str = _author_str(package_json[author_key]) author_data = parse_author(author_str) return swh_author(author_data) def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom):] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode('utf-8') except UnicodeDecodeError: encoding = chardet.detect(json_bytes)['encoding'] if encoding: json_str = json_bytes.decode(encoding, 'replace') try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, 'package.json') if not os.path.exists(package_json_path): return {} with open(package_json_path, 'rb') as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py index 7a5010a..0348630 100644 --- a/swh/loader/package/npm/tests/test_npm.py +++ b/swh/loader/package/npm/tests/test_npm.py @@ -1,590 +1,591 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import pytest from swh.model.hashutil import hash_to_bytes +from swh.model.model import Person from swh.loader.package.npm.loader import ( NpmLoader, extract_npm_package_author, artifact_to_revision_id ) from swh.loader.package.tests.common import ( check_snapshot, check_metadata_paths, get_stats ) def test_extract_npm_package_author(datadir): package_metadata_filepath = os.path.join( datadir, 'https_replicate.npmjs.com', 'org_visit1') with open(package_metadata_filepath) as json_file: package_metadata = json.load(json_file) extract_npm_package_author(package_metadata['versions']['0.0.2']) == \ - { - 'fullname': b'mooz ', - 'name': b'mooz', - 'email': b'stillpedant@gmail.com' - } + Person( + fullname=b'mooz ', + name=b'mooz', + email=b'stillpedant@gmail.com' + ) assert ( extract_npm_package_author(package_metadata['versions']['0.0.3']) == - { - 'fullname': b'Masafumi Oyamada ', - 'name': b'Masafumi Oyamada', - 'email': b'stillpedant@gmail.com' - } + Person( + fullname=b'Masafumi Oyamada ', + name=b'Masafumi Oyamada', + email=b'stillpedant@gmail.com' + ) ) package_json = json.loads(''' { "name": "highlightjs-line-numbers.js", "version": "2.7.0", "description": "Highlight.js line numbers plugin.", "main": "src/highlightjs-line-numbers.js", "dependencies": {}, "devDependencies": { "gulp": "^4.0.0", "gulp-rename": "^1.4.0", "gulp-replace": "^0.6.1", "gulp-uglify": "^1.2.0" }, "repository": { "type": "git", "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git" }, "author": "Yauheni Pakala ", "license": "MIT", "bugs": { "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues" }, "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/" }''') # noqa assert extract_npm_package_author(package_json) == \ - { - 'fullname': b'Yauheni Pakala ', - 'name': b'Yauheni Pakala', - 'email': b'evgeniy.pakalo@gmail.com' - } + Person( + fullname=b'Yauheni Pakala ', + name=b'Yauheni Pakala', + email=b'evgeniy.pakalo@gmail.com' + ) package_json = json.loads(''' { "name": "3-way-diff", "version": "0.0.1", "description": "3-way diffing of JavaScript objects", "main": "index.js", "authors": [ { "name": "Shawn Walsh", "url": "https://github.com/shawnpwalsh" }, { "name": "Markham F Rollins IV", "url": "https://github.com/mrollinsiv" } ], "keywords": [ "3-way diff", "3 way diff", "three-way diff", "three way diff" ], "devDependencies": { "babel-core": "^6.20.0", "babel-preset-es2015": "^6.18.0", "mocha": "^3.0.2" }, "dependencies": { "lodash": "^4.15.0" } }''') assert extract_npm_package_author(package_json) == \ - { - 'fullname': b'Shawn Walsh', - 'name': b'Shawn Walsh', - 'email': None - } + Person( + fullname=b'Shawn Walsh', + name=b'Shawn Walsh', + email=None + ) package_json = json.loads(''' { "name": "yfe-ynpm", "version": "1.0.0", "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm", "repository": { "type": "git", "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git" }, "author": [ "fengmk2 (https://fengmk2.com)", "xufuzi (https://7993.org)" ], "license": "MIT" }''') assert extract_npm_package_author(package_json) == \ - { - 'fullname': b'fengmk2 ', - 'name': b'fengmk2', - 'email': b'fengmk2@gmail.com' - } + Person( + fullname=b'fengmk2 ', + name=b'fengmk2', + email=b'fengmk2@gmail.com' + ) package_json = json.loads(''' { "name": "umi-plugin-whale", "version": "0.0.8", "description": "Internal contract component", "authors": { "name": "xiaohuoni", "email": "448627663@qq.com" }, "repository": "alitajs/whale", "devDependencies": { "np": "^3.0.4", "umi-tools": "*" }, "license": "MIT" }''') assert extract_npm_package_author(package_json) == \ - { - 'fullname': b'xiaohuoni <448627663@qq.com>', - 'name': b'xiaohuoni', - 'email': b'448627663@qq.com' - } + Person( + fullname=b'xiaohuoni <448627663@qq.com>', + name=b'xiaohuoni', + email=b'448627663@qq.com' + ) def normalize_hashes(hashes): if isinstance(hashes, str): return hash_to_bytes(hashes) if isinstance(hashes, list): return [hash_to_bytes(x) for x in hashes] return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()} _expected_new_contents_first_visit = normalize_hashes([ '4ce3058e16ab3d7e077f65aabf855c34895bf17c', '858c3ceee84c8311adc808f8cdb30d233ddc9d18', '0fa33b4f5a4e0496da6843a38ff1af8b61541996', '85a410f8ef8eb8920f2c384a9555566ad4a2e21b', '9163ac8025923d5a45aaac482262893955c9b37b', '692cf623b8dd2c5df2c2998fd95ae4ec99882fb4', '18c03aac6d3e910efb20039c15d70ab5e0297101', '41265c42446aac17ca769e67d1704f99e5a1394d', '783ff33f5882813dca9239452c4a7cadd4dba778', 'b029cfb85107aee4590c2434a3329bfcf36f8fa1', '112d1900b4c2e3e9351050d1b542c9744f9793f3', '5439bbc4bd9a996f1a38244e6892b71850bc98fd', 'd83097a2f994b503185adf4e719d154123150159', 'd0939b4898e83090ee55fd9d8a60e312cfadfbaf', 'b3523a26f7147e4af40d9d462adaae6d49eda13e', 'cd065fb435d6fb204a8871bcd623d0d0e673088c', '2854a40855ad839a54f4b08f5cff0cf52fca4399', 'b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe', '0f73d56e1cf480bded8a1ecf20ec6fc53c574713', '0d9882b2dfafdce31f4e77fe307d41a44a74cefe', '585fc5caab9ead178a327d3660d35851db713df1', 'e8cd41a48d79101977e3036a87aeb1aac730686f', '5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7', '9c3cc2763bf9e9e37067d3607302c4776502df98', '3649a68410e354c83cd4a38b66bd314de4c8f5c9', 'e96ed0c091de1ebdf587104eaf63400d1974a1fe', '078ca03d2f99e4e6eab16f7b75fbb7afb699c86c', '38de737da99514de6559ff163c988198bc91367a', ]) _expected_new_directories_first_visit = normalize_hashes([ '3370d20d6f96dc1c9e50f083e2134881db110f4f', '42753c0c2ab00c4501b552ac4671c68f3cf5aece', 'd7895533ef5edbcffdea3f057d9fef3a1ef845ce', '80579be563e2ef3e385226fe7a3f079b377f142c', '3b0ddc6a9e58b4b53c222da4e27b280b6cda591c', 'bcad03ce58ac136f26f000990fc9064e559fe1c0', '5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca', 'e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd', '584b5b4b6cf7f038095e820b99386a9c232de931', '184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a', 'bb5f4ee143c970367eb409f2e4c1104898048b9d', '1b95491047add1103db0dfdfa84a9735dcb11e88', 'a00c6de13471a2d66e64aca140ddb21ef5521e62', '5ce6c1cd5cda2d546db513aaad8c72a44c7771e2', 'c337091e349b6ac10d38a49cdf8c2401ef9bb0f2', '202fafcd7c0f8230e89d5496ad7f44ab12b807bf', '775cc516543be86c15c1dc172f49c0d4e6e78235', 'ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e', ]) _expected_new_revisions_first_visit = normalize_hashes({ 'd8a1c7474d2956ac598a19f0f27d52f7015f117e': '42753c0c2ab00c4501b552ac4671c68f3cf5aece', '5f9eb78af37ffd12949f235e86fac04898f9f72a': '3370d20d6f96dc1c9e50f083e2134881db110f4f', 'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a': 'd7895533ef5edbcffdea3f057d9fef3a1ef845ce'} ) def package_url(package): return 'https://www.npmjs.com/package/%s' % package def package_metadata_url(package): return 'https://replicate.npmjs.com/%s/' % package def test_revision_metadata_structure(swh_config, requests_mock_datadir): package = 'org' loader = NpmLoader(package_url(package)) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' assert actual_load_status['snapshot_id'] is not None expected_revision_id = hash_to_bytes( 'd8a1c7474d2956ac598a19f0f27d52f7015f117e') revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None check_metadata_paths(revision['metadata'], paths=[ ('intrinsic.tool', str), ('intrinsic.raw', dict), ('extrinsic.provider', str), ('extrinsic.when', str), ('extrinsic.raw', dict), ('original_artifact', list), ]) for original_artifact in revision['metadata']['original_artifact']: check_metadata_paths(original_artifact, paths=[ ('filename', str), ('length', int), ('checksums', dict), ]) def test_npm_loader_first_visit(swh_config, requests_mock_datadir): package = 'org' loader = NpmLoader(package_url(package)) actual_load_status = loader.load() expected_snapshot_id = 'd0587e1195aed5a8800411a008f2f2d627f18e2d' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': len(_expected_new_contents_first_visit), 'directory': len(_expected_new_directories_first_visit), 'origin': 1, 'origin_visit': 1, 'person': 2, 'release': 0, 'revision': len(_expected_new_revisions_first_visit), 'skipped_content': 0, 'snapshot': 1, } == stats assert len(list(loader.storage.content_get( _expected_new_contents_first_visit))) == len( _expected_new_contents_first_visit) assert list(loader.storage.directory_missing( _expected_new_directories_first_visit)) == [] assert list(loader.storage.revision_missing( _expected_new_revisions_first_visit)) == [] expected_snapshot = { 'id': expected_snapshot_id, 'branches': { 'HEAD': { 'target': 'releases/0.0.4', 'target_type': 'alias' }, 'releases/0.0.2': { 'target': 'd8a1c7474d2956ac598a19f0f27d52f7015f117e', 'target_type': 'revision' }, 'releases/0.0.3': { 'target': '5f9eb78af37ffd12949f235e86fac04898f9f72a', 'target_type': 'revision' }, 'releases/0.0.4': { 'target': 'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a', 'target_type': 'revision' } } } check_snapshot(expected_snapshot, loader.storage) def test_npm_loader_incremental_visit( swh_config, requests_mock_datadir_visits): package = 'org' url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' assert actual_load_status['status'] is not None origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'npm' stats = get_stats(loader.storage) assert { 'content': len(_expected_new_contents_first_visit), 'directory': len(_expected_new_directories_first_visit), 'origin': 1, 'origin_visit': 1, 'person': 2, 'release': 0, 'revision': len(_expected_new_revisions_first_visit), 'skipped_content': 0, 'snapshot': 1, } == stats loader._info = None # reset loader internal state actual_load_status2 = loader.load() assert actual_load_status2['status'] == 'eventful' snap_id2 = actual_load_status2['snapshot_id'] assert snap_id2 is not None assert snap_id2 != actual_load_status['snapshot_id'] origin_visit2 = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit2['status'] == 'full' assert origin_visit2['type'] == 'npm' stats = get_stats(loader.storage) assert { # 3 new releases artifacts 'content': len(_expected_new_contents_first_visit) + 14, 'directory': len(_expected_new_directories_first_visit) + 15, 'origin': 1, 'origin_visit': 2, 'person': 2, 'release': 0, 'revision': len(_expected_new_revisions_first_visit) + 3, 'skipped_content': 0, 'snapshot': 2, } == stats urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith('https://registry.npmjs.org') ] assert len(urls) == len(set(urls)) # we visited each artifact once across @pytest.mark.usefixtures('requests_mock_datadir') def test_npm_loader_version_divergence(swh_config): package = '@aller_shared' url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' assert actual_load_status['status'] is not None origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'npm' stats = get_stats(loader.storage) assert { # 1 new releases artifacts 'content': 534, 'directory': 153, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1, } == stats expected_snapshot = { 'id': 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92', 'branches': { 'HEAD': { 'target_type': 'alias', 'target': 'releases/0.1.0' }, 'releases/0.1.0': { 'target_type': 'revision', 'target': '845673bfe8cbd31b1eaf757745a964137e6f9116', }, 'releases/0.1.1-alpha.14': { 'target_type': 'revision', 'target': '05181c12cd8c22035dd31155656826b85745da37', }, }, } check_snapshot(expected_snapshot, loader.storage) def test_npm_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ artifact_metadata = { 'dist': { 'shasum': '05181c12cd8c22035dd31155656826b85745da37', }, } known_artifacts = { 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92': {}, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_npm_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ artifact_metadata = { 'dist': { 'shasum': '05181c12cd8c22035dd31155656826b85745da37', } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'package_source': { 'sha1': "something-wrong" } }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'package_source': { 'sha1': '05181c12cd8c22035dd31155656826b85745da37', } } } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116') def test_npm_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ artifact_metadata = { 'dist': { 'shasum': '05181c12cd8c22035dd31155656826b85745da37', } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'original_artifact': [{ 'checksums': { 'sha1': "05181c12cd8c22035dd31155656826b85745da37" }, }], }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'original_artifact': [{ 'checksums': { 'sha1': 'something-wrong' }, }], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92') def test_npm_artifact_with_no_intrinsic_metadata( swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ package = 'nativescript-telerik-analytics' url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' # no branch as one artifact without any intrinsic metadata expected_snapshot = { 'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', 'branches': {}, } check_snapshot(expected_snapshot, loader.storage) origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'npm' def test_npm_artifact_with_no_upload_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ package = 'jammit-no-time' url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'failed' # no branch as one artifact without any intrinsic metadata expected_snapshot = { 'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', 'branches': {}, } check_snapshot(expected_snapshot, loader.storage) origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'npm' def test_npm_artifact_use_mtime_if_no_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ package = 'jammit-express' url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' # artifact is used expected_snapshot = { 'id': 'd6e08e19159f77983242877c373c75222d5ae9dd', 'branches': { 'HEAD': { 'target_type': 'alias', 'target': 'releases/0.0.1' }, 'releases/0.0.1': { 'target_type': 'revision', 'target': '9e4dd2b40d1b46b70917c0949aa2195c823a648e', } } } check_snapshot(expected_snapshot, loader.storage) origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'npm' diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index aa330aa..f7fe35a 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,247 +1,250 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import logging from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from urllib.parse import urlparse from pkginfo import UnpackedSDist -import iso8601 +from swh.model.model import ( + Person, Sha1Git, TimestampWithTimezone, Revision, RevisionType +) -from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import api_info, release_name +from swh.loader.package.utils import api_info, release_name, EMPTY_AUTHOR logger = logging.getLogger(__name__) class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'pypi' def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return self.info['releases'].keys() def get_default_version(self) -> str: return self.info['info']['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: res = [] for meta in self.info['releases'][version]: if meta['packagetype'] != 'sdist': continue filename = meta['filename'] p_info = { 'url': meta['url'], 'filename': filename, 'raw': meta, } res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info['filename']), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, artifact_metadata) def build_revision( - self, a_metadata: Dict, uncompressed_path: str) -> Dict: + self, a_metadata: Dict, uncompressed_path: str, + directory: Sha1Git) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: - return {} + return None # from intrinsic metadata name = i_metadata['version'] _author = author(i_metadata) # from extrinsic metadata message = a_metadata.get('comment_text', '') message = '%s: %s' % (name, message) if message else name - date = normalize_timestamp( - int(iso8601.parse_date(a_metadata['upload_time']).timestamp())) - - return { - 'type': 'tar', - 'message': message.encode('utf-8'), - 'author': _author, - 'date': date, - 'committer': _author, - 'committer_date': date, - 'parents': [], - 'metadata': { + date = TimestampWithTimezone.from_iso8601(a_metadata['upload_time']) + + return Revision( + type=RevisionType.TAR, + message=message.encode('utf-8'), + author=_author, + date=date, + committer=_author, + committer_date=date, + parents=[], + directory=directory, + synthetic=True, + metadata={ 'intrinsic': { 'tool': 'PKG-INFO', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } - } + ) def artifact_to_revision_id( known_artifacts: Dict, artifact_metadata: Dict) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats (column metadata in 'revision') - old format sample:: { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, ... } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ sha256 = artifact_metadata['digests']['sha256'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'] if isinstance(original_artifact, dict): # previous loader-pypi version stored metadata as dict original_sha256 = original_artifact['sha256'] if sha256 == original_sha256: return rev_id continue # new pypi loader actually store metadata dict differently... assert isinstance(original_artifact, list) # current loader-pypi stores metadata as list of dict for original_artifact in known_artifact['original_artifact']: if sha256 == original_artifact['checksums']['sha256']: return rev_id return None def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.rstrip('/').split('/')[-1] url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop('filename') # this gets added with the ondisk location return raw -def author(data: Dict) -> Dict: +def author(data: Dict) -> Person: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get('author') email = data.get('author_email') fullname = None # type: Optional[str] if email: fullname = '%s <%s>' % (name, email) else: fullname = name if not fullname: - return {'fullname': b'', 'name': None, 'email': None} + return EMPTY_AUTHOR if name is not None: name = name.encode('utf-8') if email is not None: email = email.encode('utf-8') - return { - 'fullname': fullname.encode('utf-8'), - 'name': name, - 'email': email - } + return Person( + fullname=fullname.encode('utf-8'), + name=name, + email=email + ) diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index f0cff08..116eede 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,830 +1,834 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from os import path import pytest from unittest.mock import patch from swh.core.tarball import uncompress from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.model.hashutil import hash_to_bytes +from swh.model.model import Person from swh.loader.package.pypi.loader import ( PyPILoader, pypi_api_url, author, extract_intrinsic_metadata, artifact_to_revision_id ) from swh.loader.package.tests.common import ( check_snapshot, check_metadata_paths, get_stats ) def test_author_basic(): data = { 'author': "i-am-groot", 'author_email': 'iam@groot.org', } actual_author = author(data) - expected_author = { - 'fullname': b'i-am-groot ', - 'name': b'i-am-groot', - 'email': b'iam@groot.org', - } + expected_author = Person( + fullname=b'i-am-groot ', + name=b'i-am-groot', + email=b'iam@groot.org', + ) assert actual_author == expected_author def test_author_empty_email(): data = { 'author': 'i-am-groot', 'author_email': '', } actual_author = author(data) - expected_author = { - 'fullname': b'i-am-groot', - 'name': b'i-am-groot', - 'email': b'', - } + expected_author = Person( + fullname=b'i-am-groot', + name=b'i-am-groot', + email=b'', + ) assert actual_author == expected_author def test_author_empty_name(): data = { 'author': "", 'author_email': 'iam@groot.org', } actual_author = author(data) - expected_author = { - 'fullname': b' ', - 'name': b'', - 'email': b'iam@groot.org', - } + expected_author = Person( + fullname=b' ', + name=b'', + email=b'iam@groot.org', + ) assert actual_author == expected_author def test_author_malformed(): data = { 'author': "['pierre', 'paul', 'jacques']", 'author_email': None, } actual_author = author(data) - expected_author = { - 'fullname': b"['pierre', 'paul', 'jacques']", - 'name': b"['pierre', 'paul', 'jacques']", - 'email': None, - } + expected_author = Person( + fullname=b"['pierre', 'paul', 'jacques']", + name=b"['pierre', 'paul', 'jacques']", + email=None, + ) assert actual_author == expected_author def test_author_malformed_2(): data = { 'author': '[marie, jeanne]', 'author_email': '[marie@some, jeanne@thing]', } actual_author = author(data) - expected_author = { - 'fullname': b'[marie, jeanne] <[marie@some, jeanne@thing]>', - 'name': b'[marie, jeanne]', - 'email': b'[marie@some, jeanne@thing]', - } + expected_author = Person( + fullname=b'[marie, jeanne] <[marie@some, jeanne@thing]>', + name=b'[marie, jeanne]', + email=b'[marie@some, jeanne@thing]', + ) assert actual_author == expected_author def test_author_malformed_3(): data = { 'author': '[marie, jeanne, pierre]', 'author_email': '[marie@somewhere.org, jeanne@somewhere.org]', } actual_author = author(data) - expected_author = { - 'fullname': b'[marie, jeanne, pierre] <[marie@somewhere.org, jeanne@somewhere.org]>', # noqa - 'name': b'[marie, jeanne, pierre]', - 'email': b'[marie@somewhere.org, jeanne@somewhere.org]', - } + expected_author = Person( + fullname=( + b'[marie, jeanne, pierre] ' + b'<[marie@somewhere.org, jeanne@somewhere.org]>' + ), + name=b'[marie, jeanne, pierre]', + email=b'[marie@somewhere.org, jeanne@somewhere.org]', + ) actual_author == expected_author # configuration error # def test_badly_configured_loader_raise(monkeypatch): """Badly configured loader should raise""" monkeypatch.delenv('SWH_CONFIG_FILENAME', raising=False) with pytest.raises(ValueError) as e: PyPILoader(url='some-url') assert 'Misconfiguration' in e.value.args[0] def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url('https://pypi.org/project/requests') assert url == 'https://pypi.org/pypi/requests/json' def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url('https://pypi.org/project/requests/') assert url == 'https://pypi.org/pypi/requests/json' @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, 'https_files.pythonhosted.org', '0805nexter-1.1.0.zip') uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { 'metadata_version': '1.0', 'name': '0805nexter', 'version': '1.1.0', 'summary': 'a simple printer of nested lest', 'home_page': 'http://www.hp.com', 'author': 'hgtkpython', 'author_email': '2868989685@qq.com', 'platforms': ['UNKNOWN'], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata('/something-inexistent') == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, 'something') os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory(ignore_urls=[ 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa 'https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip', # noqa ]) def test_no_release_artifact(swh_config, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'uneventful' assert actual_load_status['snapshot_id'] is not None stats = get_stats(loader.storage) assert { 'content': 0, 'directory': 0, 'origin': 1, 'origin_visit': 1, 'person': 0, 'release': 0, 'revision': 0, 'skipped_content': 0, 'snapshot': 1, } == stats origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_release_with_traceback(swh_config): url = 'https://pypi.org/project/0805nexter' with patch('swh.loader.package.pypi.loader.PyPILoader.get_default_version', side_effect=ValueError('Problem')): loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status == {'status': 'failed'} stats = get_stats(loader.storage) assert { 'content': 0, 'directory': 0, 'origin': 1, 'origin_visit': 1, 'person': 0, 'release': 0, 'revision': 0, 'skipped_content': 0, 'snapshot': 0, } == stats origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory(ignore_urls=[ 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa ]) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} def test_revision_metadata_structure(swh_config, requests_mock_datadir): url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' assert actual_load_status['snapshot_id'] is not None expected_revision_id = hash_to_bytes( 'e445da4da22b31bfebb6ffc4383dbf839a074d21') revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None check_metadata_paths(revision['metadata'], paths=[ ('intrinsic.tool', str), ('intrinsic.raw', dict), ('extrinsic.provider', str), ('extrinsic.when', str), ('extrinsic.raw', dict), ('original_artifact', list), ]) for original_artifact in revision['metadata']['original_artifact']: check_metadata_paths(original_artifact, paths=[ ('filename', str), ('length', int), ('checksums', dict), ]) def test_visit_with_missing_artifact( swh_config, requests_mock_datadir_missing_one): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'dd0e4201a232b1c104433741dbf45895b8ac9355' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': 3, 'directory': 2, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, 'skipped_content': 0, 'snapshot': 1 } == stats expected_contents = map(hash_to_bytes, [ '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' def test_visit_with_1_release_artifact(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == stats expected_contents = map(hash_to_bytes, [ 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', '938c33483285fd8ad57f15497f538320df82aeb8', 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ '05219ba38bc542d4345d5638af1ed56c7d43ca7d', 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' def test_multiple_visits_with_no_change(swh_config, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': snapshot_id, } stats = get_stats(loader.storage) assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == stats expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' actual_load_status2 = loader.load() assert actual_load_status2 == { 'status': 'uneventful', 'snapshot_id': actual_load_status2['snapshot_id'] } stats2 = get_stats(loader.storage) expected_stats2 = stats.copy() expected_stats2['origin_visit'] = 1 + 1 assert expected_stats2 == stats2 # same snapshot actual_snapshot_id = origin_visit['snapshot'] assert actual_snapshot_id == hash_to_bytes(snapshot_id) def test_incremental_visit(swh_config, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(loader.storage) expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert visit1_actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } origin_visit1 = next(loader.storage.origin_visit_get(url)) assert origin_visit1['status'] == 'full' assert origin_visit1['type'] == 'pypi' assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == visit1_stats # Reset internal state loader._info = None visit2_actual_load_status = loader.load() visit2_stats = get_stats(loader.storage) assert visit2_actual_load_status['status'] == 'eventful' expected_snapshot_id2 = '2e5149a7b0725d18231a37b342e9b7c4e121f283' assert visit2_actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id2 } visits = list(loader.storage.origin_visit_get(url)) assert len(visits) == 2 assert visits[1]['status'] == 'full' assert visits[1]['type'] == 'pypi' assert { 'content': 6 + 1, # 1 more content 'directory': 4 + 2, # 2 more directories 'origin': 1, 'origin_visit': 1 + 1, 'person': 1, 'release': 0, 'revision': 2 + 1, # 1 more revision 'skipped_content': 0, 'snapshot': 1 + 1, # 1 more snapshot } == visit2_stats expected_contents = map(hash_to_bytes, [ 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', '938c33483285fd8ad57f15497f538320df82aeb8', 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', '92689fa2b7fb4d4fc6fb195bf73a50c87c030639' ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ '05219ba38bc542d4345d5638af1ed56c7d43ca7d', 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', '52604d46843b898f5a43208045d09fcf8731631b', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa hash_to_bytes('51247143b01445c9348afa9edfae31bf7c5d86b1'): hash_to_bytes('e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'releases/1.3.0': { 'target': '51247143b01445c9348afa9edfae31bf7c5d86b1', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.3.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id2, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith('https://files.pythonhosted.org') ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different def test_visit_1_release_with_2_artifacts(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = 'https://pypi.org/project/nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id, } expected_branches = { 'releases/1.1.0/nexter-1.1.0.zip': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.1.0/nexter-1.1.0.tar.gz': { 'target': '0bf88f5760cca7665d0af4d6575d9301134fe11a', 'target_type': 'revision', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' def test_pypi_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, } assert artifact_to_revision_id({}, artifact_metadata) is None known_artifacts = { 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92': { 'original_artifact': { 'sha256': 'something-irrelevant', }, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_pypi_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'original_artifact': { 'sha256': "something-wrong", }, }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, } } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116') def test_pypi_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, }], }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'original_artifact': [{ 'checksums': { 'sha256': 'something-wrong' }, }], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92') def test_pypi_artifact_to_revision_id_failures(): with pytest.raises(KeyError, match='sha256'): artifact_metadata = { 'digests': {}, } assert artifact_to_revision_id({}, artifact_metadata) with pytest.raises(KeyError, match='digests'): artifact_metadata = { 'something': 'wrong', } assert artifact_to_revision_id({}, artifact_metadata) def test_pypi_artifact_with_no_intrinsic_metadata( swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ url = 'https://pypi.org/project/upymenu' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id, } # no branch as one artifact without any intrinsic metadata expected_snapshot = { 'id': expected_snapshot_id, 'branches': {} } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' diff --git a/swh/loader/package/tests/test_common.py b/swh/loader/package/tests/test_common.py index c430bf9..83bffc1 100644 --- a/swh/loader/package/tests/test_common.py +++ b/swh/loader/package/tests/test_common.py @@ -1,188 +1,186 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from swh.model.hashutil import hash_to_bytes +from swh.model.model import Snapshot, SnapshotBranch, TargetType from swh.loader.package.tests.common import ( decode_target, check_snapshot, check_metadata, check_metadata_paths ) from swh.storage import get_storage hash_hex = '43e45d56f88993aae6a0198013efa80716fd8920' storage_config = { 'cls': 'pipeline', 'steps': [ - { - 'cls': 'validate', - }, { 'cls': 'memory', } ] } def test_decode_target_edge(): assert not decode_target(None) def test_decode_target(): actual_alias_decode_target = decode_target({ 'target_type': 'alias', 'target': b'something', }) assert actual_alias_decode_target == { 'target_type': 'alias', 'target': 'something', } actual_decode_target = decode_target({ 'target_type': 'revision', 'target': hash_to_bytes(hash_hex), }) assert actual_decode_target == { 'target_type': 'revision', 'target': hash_hex, } def test_check_snapshot(): storage = get_storage(**storage_config) snap_id = '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7' - snapshot = { - 'id': hash_to_bytes(snap_id), - 'branches': { - b'master': { - 'target': hash_to_bytes(hash_hex), - 'target_type': 'revision', - }, + snapshot = Snapshot( + id=hash_to_bytes(snap_id), + branches={ + b'master': SnapshotBranch( + target=hash_to_bytes(hash_hex), + target_type=TargetType.REVISION, + ), }, - } + ) s = storage.snapshot_add([snapshot]) assert s == { 'snapshot:add': 1, } expected_snapshot = { 'id': snap_id, 'branches': { 'master': { 'target': hash_hex, 'target_type': 'revision', } } } check_snapshot(expected_snapshot, storage) def test_check_snapshot_failure(): storage = get_storage(**storage_config) - snapshot = { - 'id': hash_to_bytes('2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'), - 'branches': { - b'master': { - 'target': hash_to_bytes(hash_hex), - 'target_type': 'revision', - }, + snapshot = Snapshot( + id=hash_to_bytes('2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'), + branches={ + b'master': SnapshotBranch( + target=hash_to_bytes(hash_hex), + target_type=TargetType.REVISION, + ), }, - } + ) s = storage.snapshot_add([snapshot]) assert s == { 'snapshot:add': 1, } unexpected_snapshot = { 'id': '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7', 'branches': { 'master': { 'target': hash_hex, 'target_type': 'release', # wrong value } } } with pytest.raises(AssertionError): check_snapshot(unexpected_snapshot, storage) def test_check_metadata(): metadata = { 'a': { 'raw': { 'time': 'something', }, }, 'b': [], 'c': 1, } for raw_path, raw_type in [ ('a.raw', dict), ('a.raw.time', str), ('b', list), ('c', int), ]: check_metadata(metadata, raw_path, raw_type) def test_check_metadata_ko(): metadata = { 'a': { 'raw': 'hello', }, 'b': [], 'c': 1, } for raw_path, raw_type in [ ('a.b', dict), ('a.raw.time', str), ]: with pytest.raises(AssertionError): check_metadata(metadata, raw_path, raw_type) def test_check_metadata_paths(): metadata = { 'a': { 'raw': { 'time': 'something', }, }, 'b': [], 'c': 1, } check_metadata_paths(metadata, [ ('a.raw', dict), ('a.raw.time', str), ('b', list), ('c', int), ]) def test_check_metadata_paths_ko(): metadata = { 'a': { 'raw': 'hello', }, 'b': [], 'c': 1, } with pytest.raises(AssertionError): check_metadata_paths(metadata, [ ('a.b', dict), ('a.raw.time', str), ]) diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index eff017f..a93ee49 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,213 +1,219 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import logging import os import requests import re from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple from swh.model.hashutil import MultiHash, HASH_BLOCK_SIZE +from swh.model.model import Person + from swh.loader.package import DEFAULT_PARAMS logger = logging.getLogger(__name__) DOWNLOAD_HASHES = set(['sha1', 'sha256', 'length']) # https://github.com/jonschlinkert/author-regex _author_regexp = r'([^<(]+?)?[ \t]*(?:<([^>(]+?)>)?[ \t]*(?:\(([^)]+?)\)|$)' -_EMPTY_AUTHOR = {'fullname': b'', 'name': None, 'email': None} +EMPTY_AUTHOR = Person( + fullname=b'', + name=None, + email=None, +) def api_info(url: str) -> Dict: """Basic api client to retrieve information on project. This deals with fetching json metadata about pypi projects. Args: url (str): The api url (e.g PyPI, npm, etc...) Raises: ValueError in case of query failures (for some reasons: 404, ...) Returns: The associated response's information dict """ response = requests.get(url, **DEFAULT_PARAMS) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % ( url, response.status_code)) return response.json() def download(url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None) -> Tuple[str, Dict]: """Download a remote tarball from url, uncompresses and computes swh hashes on it. Args: url: Artifact uri to fetch, uncompress and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact to download (those hashes are expected to be hex string) auth: Optional tuple of login/password (for http authentication service, e.g. deposit) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ params = copy.deepcopy(DEFAULT_PARAMS) if auth is not None: params['auth'] = auth response = requests.get(url, **params, stream=True) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % ( url, response.status_code)) filename = filename if filename else os.path.basename(url) logger.debug('filename: %s', filename) filepath = os.path.join(dest, filename) logger.debug('filepath: %s', filepath) h = MultiHash(hash_names=DOWNLOAD_HASHES) with open(filepath, 'wb') as f: for chunk in response.iter_content(chunk_size=HASH_BLOCK_SIZE): h.update(chunk) f.write(chunk) # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( 'Failure when fetching %s. ' 'Checksum mismatched: %s != %s' % ( url, expected_digest, actual_digest)) computed_hashes = h.hexdigest() length = computed_hashes.pop('length') extrinsic_metadata = { 'length': length, 'filename': filename, 'checksums': computed_hashes, } logger.debug('extrinsic_metadata', extrinsic_metadata) return filepath, extrinsic_metadata def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return 'releases/%s/%s' % (version, filename) return 'releases/%s' % version def parse_author(author_str: str) -> Dict[str, str]: """ Parse npm package author string. It works with a flexible range of formats, as detailed below:: name name (url) name (url) name (url) name(url) name (url) name (url) name(url) name(url) name (url) name(url) name name (url) (url) (url) (url) (url) Args: author_str (str): input author string Returns: dict: A dict that may contain the following keys: * name * email * url """ author = {} matches = re.findall(_author_regexp, author_str.replace('<>', '').replace('()', ''), re.M) for match in matches: if match[0].strip(): author['name'] = match[0].strip() if match[1].strip(): author['email'] = match[1].strip() if match[2].strip(): author['url'] = match[2].strip() return author -def swh_author(author: Dict[str, str]) -> Dict[str, Optional[bytes]]: +def swh_author(author: Dict[str, str]) -> Person: """Transform an author like dict to an expected swh like dict (values are bytes) """ name = author.get('name') email = author.get('email') fullname = None if name and email: fullname = '%s <%s>' % (name, email) elif name: fullname = name if not fullname: - r = _EMPTY_AUTHOR + r = EMPTY_AUTHOR else: - r = { - 'fullname': fullname.encode('utf-8') if fullname else None, - 'name': name.encode('utf-8') if name else None, - 'email': email.encode('utf-8') if email else None - } + r = Person( + fullname=fullname.encode('utf-8') if fullname else b'', + name=name.encode('utf-8') if name else None, + email=email.encode('utf-8') if email else None + ) return r def artifact_identity(d: Mapping[str, Any], id_keys: Sequence[str]) -> List[Any]: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ return [d.get(k) for k in id_keys]