diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index ff1c8cb..b52811d 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,401 +1,406 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple ) from swh.core.tarball import uncompress from swh.core.config import SWHConfig from swh.model.from_disk import Directory from swh.model.hashutil import hash_to_hex from swh.model.identifiers import ( revision_identifier, snapshot_identifier, identifier_to_bytes ) from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.loader.core.converters import content_for_storage from swh.loader.package.utils import download logger = logging.getLogger(__name__) # Not implemented yet: # - clean up disk routines from previous killed workers (when OOMkilled) # -> separation of concern would like this to be abstracted from the code # -> experience tells us it's complicated to do as such (T903, T964, T982, # etc...) # # - model: swh.model.merkle.from_disk should output swh.model.model.* objects # to avoid this layer's conversion routine call # -> Take this up within swh.model's current implementation class PackageLoader: # Origin visit type (str) set by the loader visit_type = '' def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() self.storage = get_storage(**self.config['storage']) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ if 'storage' not in self.config: raise ValueError( 'Misconfiguration, at least the storage key should be set') def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: """Build the revision dict from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: a_metadata: Artifact metadata uncompressed_path: Artifact uncompressed path on disk Returns: SWH data dict """ return {} def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return '' def last_snapshot(self) -> Optional[Dict]: """Retrieve the last snapshot """ snapshot = None visit = self.storage.origin_visit_get_latest( self.url, require_snapshot=True) if visit: snapshot = snapshot_get_all_branches( self.storage, visit['snapshot']) return snapshot def known_artifacts(self, snapshot: Optional[Dict]) -> Dict: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot or 'branches' not in snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [rev['target'] for rev in snapshot['branches'].values() if rev and rev['target_type'] == 'revision'] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision['id']] = revision['metadata'] return ret def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None def download_package(self, p_info: Mapping[str, Any], tmpdir: str) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ a_uri = p_info['url'] filename = p_info.get('filename') return [download(a_uri, dest=tmpdir, filename=filename)] def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, 'src') for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = 'uneventful' # either: eventful, uneventful, failed status_visit = 'full' # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None # Prepare origin and origin_visit origin = {'url': self.url} try: self.storage.origin_add_one(origin) visit_id = self.storage.origin_visit_add( origin=self.url, date=self.visit_date, type=self.visit_type)['visit'] except Exception as e: logger.error( 'Failed to create origin/origin_visit. Reason: %s', e) return {'status': 'failed'} try: last_snapshot = self.last_snapshot() logger.debug('last snapshot: %s', last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug('known artifacts: %s', known_artifacts) # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug('default version: %s', default_version) for version in self.get_versions(): # for each logger.debug('version: %s', version) tmp_revisions[version] = [] # `p_` stands for `package_` for branch_name, p_info in self.get_package_info(version): logger.debug('package_info: %s', p_info) revision_id = self.resolve_revision_from( known_artifacts, p_info['raw']) if revision_id is None: with tempfile.TemporaryDirectory() as tmpdir: try: dl_artifacts = self.download_package( p_info, tmpdir) except Exception: logger.exception('Unable to retrieve %s', p_info) status_visit = 'partial' continue uncompressed_path = self.uncompress( dl_artifacts, dest=tmpdir) logger.debug('uncompressed_path: %s', uncompressed_path) directory = Directory.from_disk( path=uncompressed_path.encode('utf-8'), data=True) # noqa # FIXME: Try not to load the full raw content in # memory objects = directory.collect() contents = list( objects.get('content', {}).values()) logger.debug('Number of contents: %s', len(contents)) self.storage.content_add( [content_for_storage(x) for x in contents]) status_load = 'eventful' directories = list( objects.get('directory', {}).values()) logger.debug('Number of directories: %s', len(directories)) self.storage.directory_add(directories) # FIXME: This should be release. cf. D409 revision = self.build_revision( p_info['raw'], uncompressed_path) + if not revision: + # Some artifacts are missing intrinsic metadata + # skipping those + continue + revision.update({ 'synthetic': True, 'directory': directory.hash, }) revision['metadata'].update({ 'original_artifact': [ hashes for _, hashes in dl_artifacts ], }) revision['id'] = revision_id = identifier_to_bytes( revision_identifier(revision)) logger.debug('Revision: %s', revision) self.storage.revision_add([revision]) tmp_revisions[version].append((branch_name, revision_id)) logger.debug('tmp_revisions: %s', tmp_revisions) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in tmp_revisions.items(): if version == default_version and \ len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) if branch_name != 'HEAD': branches[b'HEAD'] = { 'target_type': 'alias', 'target': branch_name.encode('utf-8'), } for branch_name, target in branch_name_revisions: branches[branch_name.encode('utf-8')] = { 'target_type': 'revision', 'target': target, } snapshot = { 'branches': branches } logger.debug('snapshot: %s', snapshot) snapshot['id'] = identifier_to_bytes( snapshot_identifier(snapshot)) logger.debug('snapshot: %s', snapshot) self.storage.snapshot_add([snapshot]) if hasattr(self.storage, 'flush'): self.storage.flush() except Exception: logger.exception('Fail to load %s' % self.url) status_visit = 'partial' status_load = 'failed' finally: self.storage.origin_visit_update( origin=self.url, visit_id=visit_id, status=status_visit, snapshot=snapshot and snapshot['id']) result = { 'status': status_load, } # type: Dict[str, Any] if snapshot: result['snapshot_id'] = hash_to_hex(snapshot['id']) return result diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index 37415cc..aa330aa 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,245 +1,247 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import logging from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from urllib.parse import urlparse from pkginfo import UnpackedSDist import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info, release_name logger = logging.getLogger(__name__) class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'pypi' def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return self.info['releases'].keys() def get_default_version(self) -> str: return self.info['info']['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: res = [] for meta in self.info['releases'][version]: if meta['packagetype'] != 'sdist': continue filename = meta['filename'] p_info = { 'url': meta['url'], 'filename': filename, 'raw': meta, } res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info['filename']), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, artifact_metadata) def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: i_metadata = extract_intrinsic_metadata(uncompressed_path) + if not i_metadata: + return {} # from intrinsic metadata name = i_metadata['version'] _author = author(i_metadata) # from extrinsic metadata message = a_metadata.get('comment_text', '') message = '%s: %s' % (name, message) if message else name date = normalize_timestamp( int(iso8601.parse_date(a_metadata['upload_time']).timestamp())) return { 'type': 'tar', 'message': message.encode('utf-8'), 'author': _author, 'date': date, 'committer': _author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'PKG-INFO', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } def artifact_to_revision_id( known_artifacts: Dict, artifact_metadata: Dict) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats (column metadata in 'revision') - old format sample:: { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, ... } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ sha256 = artifact_metadata['digests']['sha256'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'] if isinstance(original_artifact, dict): # previous loader-pypi version stored metadata as dict original_sha256 = original_artifact['sha256'] if sha256 == original_sha256: return rev_id continue # new pypi loader actually store metadata dict differently... assert isinstance(original_artifact, list) # current loader-pypi stores metadata as list of dict for original_artifact in known_artifact['original_artifact']: if sha256 == original_artifact['checksums']['sha256']: return rev_id return None def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.rstrip('/').split('/')[-1] url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop('filename') # this gets added with the ondisk location return raw def author(data: Dict) -> Dict: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get('author') email = data.get('author_email') fullname = None # type: Optional[str] if email: fullname = '%s <%s>' % (name, email) else: fullname = name if not fullname: return {'fullname': b'', 'name': None, 'email': None} if name is not None: name = name.encode('utf-8') if email is not None: email = email.encode('utf-8') return { 'fullname': fullname.encode('utf-8'), 'name': name, 'email': email } diff --git a/swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_86_10_c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a_upymenu-0.0.1.tar.gz b/swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_86_10_c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a_upymenu-0.0.1.tar.gz new file mode 100644 index 0000000..069519e Binary files /dev/null and b/swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_86_10_c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a_upymenu-0.0.1.tar.gz differ diff --git a/swh/loader/package/pypi/tests/data/https_pypi.org/pypi_upymenu_json b/swh/loader/package/pypi/tests/data/https_pypi.org/pypi_upymenu_json new file mode 100644 index 0000000..33eaf8b --- /dev/null +++ b/swh/loader/package/pypi/tests/data/https_pypi.org/pypi_upymenu_json @@ -0,0 +1,113 @@ +{ + "info": { + "author": "Joost Plattel", + "author_email": "", + "bugtrack_url": null, + "classifiers": [], + "description": "# uPyMenu\n\nuPyMenu is a micropython menu implementation for LCD displays. Coming from an Arduino experience, I was used with [LiquidCrystal](https://github.com/arduino-libraries/LiquidCrystal) and [LiquidMenu](https://github.com/VaSe7u/LiquidMenu), but they don't offer a python implementation. So I figured I could create one myself.\n\n# Development\n\nCurrent development is done based on when I have to to work on it when I want to. But feel free to fork it and add your functionality. If you find any bugs or have suggestions for features, please open up a issue (or pull-request if you create the feature yourself!).\n\n## TODO\n\n* Add arguments to callback actions in the menu\n\n# Usage\n\nThe example below renders a menu with a submenu and actions that have callbacks attached which you can use to run your own function. It requires one dependency, and that's [`python_lcd`](https://github.com/dhylands/python_lcd) for interacting with the LCD itself.\n\n```python\nfrom machine import Pin, I2C # Basics for creating an LCD interface\nfrom esp8266_i2c_lcd import I2cLcd # Example LCD interface used\nfrom upymenu import Menu, MenuAction, MenuNoop\n\ndef action_callback():\n print(\"callback action chosen\")\n\nsubmenu = Menu(\"Submenu\")\nsubmenu_action_1 = MenuAction(\"Submenu Action\", callback=action_callback)\nsubmenu_action_2 = MenuAction(\"Submenu Action 1\", callback=action_callback)\nsubmenu.add_option(submenu_action_1)\nsubmenu.add_option(submenu_action_2)\n\nmenu_action = MenuAction(\"Action\", callback=action_callback)\nmenu = Menu(\"Main Menu\")\nmenu.add_option(submenu)\nmenu.add_option(menu_action)\nmenu.add_option(MenuNoop(\"Nothing here\"))\n\n# Example config for LCD via i2c, you will need this \n# for the menu to function, the screen size is required\n# to render the menu correctly on the screen.\ni2c = I2C(scl=Pin(5), sda=Pin(4), freq=400000)\nlcd = I2cLcd(i2c, 0x3F, 4, 20)\n\ncurrent_menu = menu.start(lcd) # Starts the menu on the LCD\n\nmenu.focus_next() # Focus on the next item in the menu \nmenu.focus_prev() # Focus on the previous item in the menu \n\n# Choose the focused item, if it's and action execute \n# the callback, or if it is a menu, render that menu.\nmenu = menu.choose()\n\n# If it's a submenu, you can use the parent() function\n# to navigate back up to the tree.\nmenu = menu.parent() \n```\n\n# Testing\n\nIf you have a `micropython` port running locally, you can run the test suite with it, ie: `micropython tests.py`. This is as close I can get with my current knowledge about micropython. Any pointers and suggestions towards improvement here would be appreciated!\n\n", + "description_content_type": "text/markdown", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 + }, + "home_page": "https://github.com/jplattel/upymenu", + "keywords": "", + "license": "", + "maintainer": "", + "maintainer_email": "", + "name": "upymenu", + "package_url": "https://pypi.org/project/upymenu/", + "platform": "", + "project_url": "https://pypi.org/project/upymenu/", + "project_urls": { + "Homepage": "https://github.com/jplattel/upymenu" + }, + "release_url": "https://pypi.org/project/upymenu/0.0.1/", + "requires_dist": null, + "requires_python": "", + "summary": "A micropython Menu for LCD Displays", + "version": "0.0.1" + }, + "last_serial": 6514377, + "releases": { + "0.0.1": [ + { + "comment_text": "", + "digests": { + "md5": "9a71147f342cf1a2f224bc39bab467de", + "sha256": "45159f7c599400fdb8ba69e898472481b2f4e72eb417c90bf425b1cea7bd0442" + }, + "downloads": -1, + "filename": "upymenu-0.0.1-py3-none-any.whl", + "has_sig": false, + "md5_digest": "9a71147f342cf1a2f224bc39bab467de", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": null, + "size": 4515, + "upload_time": "2020-01-24T15:47:22", + "upload_time_iso_8601": "2020-01-24T15:47:22.810597Z", + "url": "https://files.pythonhosted.org/packages/2a/1a/3dd2d22a22150a74c2d664163cd3739c91612838377128cc2d3da43285f7/upymenu-0.0.1-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "dc87480218b64094e9b8a4191bd6d9ef", + "sha256": "05c86d8690d8fd8f331ddca03544608a1e7993226d5d14ce5a7bbb96d215cc18" + }, + "downloads": -1, + "filename": "upymenu-0.0.1.tar.gz", + "has_sig": false, + "md5_digest": "dc87480218b64094e9b8a4191bd6d9ef", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 2848, + "upload_time": "2020-01-24T15:47:25", + "upload_time_iso_8601": "2020-01-24T15:47:25.223650Z", + "url": "https://files.pythonhosted.org/packages/86/10/c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a/upymenu-0.0.1.tar.gz" + } + ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "9a71147f342cf1a2f224bc39bab467de", + "sha256": "45159f7c599400fdb8ba69e898472481b2f4e72eb417c90bf425b1cea7bd0442" + }, + "downloads": -1, + "filename": "upymenu-0.0.1-py3-none-any.whl", + "has_sig": false, + "md5_digest": "9a71147f342cf1a2f224bc39bab467de", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": null, + "size": 4515, + "upload_time": "2020-01-24T15:47:22", + "upload_time_iso_8601": "2020-01-24T15:47:22.810597Z", + "url": "https://files.pythonhosted.org/packages/2a/1a/3dd2d22a22150a74c2d664163cd3739c91612838377128cc2d3da43285f7/upymenu-0.0.1-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "dc87480218b64094e9b8a4191bd6d9ef", + "sha256": "05c86d8690d8fd8f331ddca03544608a1e7993226d5d14ce5a7bbb96d215cc18" + }, + "downloads": -1, + "filename": "upymenu-0.0.1.tar.gz", + "has_sig": false, + "md5_digest": "dc87480218b64094e9b8a4191bd6d9ef", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 2848, + "upload_time": "2020-01-24T15:47:25", + "upload_time_iso_8601": "2020-01-24T15:47:25.223650Z", + "url": "https://files.pythonhosted.org/packages/86/10/c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a/upymenu-0.0.1.tar.gz" + } + ] +} diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index a4ddcac..f0cff08 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,803 +1,830 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from os import path import pytest from unittest.mock import patch from swh.core.tarball import uncompress from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.model.hashutil import hash_to_bytes from swh.loader.package.pypi.loader import ( PyPILoader, pypi_api_url, author, extract_intrinsic_metadata, artifact_to_revision_id ) from swh.loader.package.tests.common import ( check_snapshot, check_metadata_paths, get_stats ) def test_author_basic(): data = { 'author': "i-am-groot", 'author_email': 'iam@groot.org', } actual_author = author(data) expected_author = { 'fullname': b'i-am-groot ', 'name': b'i-am-groot', 'email': b'iam@groot.org', } assert actual_author == expected_author def test_author_empty_email(): data = { 'author': 'i-am-groot', 'author_email': '', } actual_author = author(data) expected_author = { 'fullname': b'i-am-groot', 'name': b'i-am-groot', 'email': b'', } assert actual_author == expected_author def test_author_empty_name(): data = { 'author': "", 'author_email': 'iam@groot.org', } actual_author = author(data) expected_author = { 'fullname': b' ', 'name': b'', 'email': b'iam@groot.org', } assert actual_author == expected_author def test_author_malformed(): data = { 'author': "['pierre', 'paul', 'jacques']", 'author_email': None, } actual_author = author(data) expected_author = { 'fullname': b"['pierre', 'paul', 'jacques']", 'name': b"['pierre', 'paul', 'jacques']", 'email': None, } assert actual_author == expected_author def test_author_malformed_2(): data = { 'author': '[marie, jeanne]', 'author_email': '[marie@some, jeanne@thing]', } actual_author = author(data) expected_author = { 'fullname': b'[marie, jeanne] <[marie@some, jeanne@thing]>', 'name': b'[marie, jeanne]', 'email': b'[marie@some, jeanne@thing]', } assert actual_author == expected_author def test_author_malformed_3(): data = { 'author': '[marie, jeanne, pierre]', 'author_email': '[marie@somewhere.org, jeanne@somewhere.org]', } actual_author = author(data) expected_author = { 'fullname': b'[marie, jeanne, pierre] <[marie@somewhere.org, jeanne@somewhere.org]>', # noqa 'name': b'[marie, jeanne, pierre]', 'email': b'[marie@somewhere.org, jeanne@somewhere.org]', } actual_author == expected_author # configuration error # def test_badly_configured_loader_raise(monkeypatch): """Badly configured loader should raise""" monkeypatch.delenv('SWH_CONFIG_FILENAME', raising=False) with pytest.raises(ValueError) as e: PyPILoader(url='some-url') assert 'Misconfiguration' in e.value.args[0] def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url('https://pypi.org/project/requests') assert url == 'https://pypi.org/pypi/requests/json' def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url('https://pypi.org/project/requests/') assert url == 'https://pypi.org/pypi/requests/json' @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, 'https_files.pythonhosted.org', '0805nexter-1.1.0.zip') uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { 'metadata_version': '1.0', 'name': '0805nexter', 'version': '1.1.0', 'summary': 'a simple printer of nested lest', 'home_page': 'http://www.hp.com', 'author': 'hgtkpython', 'author_email': '2868989685@qq.com', 'platforms': ['UNKNOWN'], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata('/something-inexistent') == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, 'something') os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory(ignore_urls=[ 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa 'https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip', # noqa ]) def test_no_release_artifact(swh_config, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'uneventful' assert actual_load_status['snapshot_id'] is not None stats = get_stats(loader.storage) assert { 'content': 0, 'directory': 0, 'origin': 1, 'origin_visit': 1, 'person': 0, 'release': 0, 'revision': 0, 'skipped_content': 0, 'snapshot': 1, } == stats origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_release_with_traceback(swh_config): url = 'https://pypi.org/project/0805nexter' with patch('swh.loader.package.pypi.loader.PyPILoader.get_default_version', side_effect=ValueError('Problem')): loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status == {'status': 'failed'} stats = get_stats(loader.storage) assert { 'content': 0, 'directory': 0, 'origin': 1, 'origin_visit': 1, 'person': 0, 'release': 0, 'revision': 0, 'skipped_content': 0, 'snapshot': 0, } == stats origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory(ignore_urls=[ 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa ]) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} def test_revision_metadata_structure(swh_config, requests_mock_datadir): url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status['status'] == 'eventful' assert actual_load_status['snapshot_id'] is not None expected_revision_id = hash_to_bytes( 'e445da4da22b31bfebb6ffc4383dbf839a074d21') revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None check_metadata_paths(revision['metadata'], paths=[ ('intrinsic.tool', str), ('intrinsic.raw', dict), ('extrinsic.provider', str), ('extrinsic.when', str), ('extrinsic.raw', dict), ('original_artifact', list), ]) for original_artifact in revision['metadata']['original_artifact']: check_metadata_paths(original_artifact, paths=[ ('filename', str), ('length', int), ('checksums', dict), ]) def test_visit_with_missing_artifact( swh_config, requests_mock_datadir_missing_one): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'dd0e4201a232b1c104433741dbf45895b8ac9355' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': 3, 'directory': 2, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 1, 'skipped_content': 0, 'snapshot': 1 } == stats expected_contents = map(hash_to_bytes, [ '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'partial' assert origin_visit['type'] == 'pypi' def test_visit_with_1_release_artifact(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } stats = get_stats(loader.storage) assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == stats expected_contents = map(hash_to_bytes, [ 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', '938c33483285fd8ad57f15497f538320df82aeb8', 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ '05219ba38bc542d4345d5638af1ed56c7d43ca7d', 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' def test_multiple_visits_with_no_change(swh_config, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) actual_load_status = loader.load() snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': snapshot_id, } stats = get_stats(loader.storage) assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == stats expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.2.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' actual_load_status2 = loader.load() assert actual_load_status2 == { 'status': 'uneventful', 'snapshot_id': actual_load_status2['snapshot_id'] } stats2 = get_stats(loader.storage) expected_stats2 = stats.copy() expected_stats2['origin_visit'] = 1 + 1 assert expected_stats2 == stats2 # same snapshot actual_snapshot_id = origin_visit['snapshot'] assert actual_snapshot_id == hash_to_bytes(snapshot_id) def test_incremental_visit(swh_config, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = 'https://pypi.org/project/0805nexter' loader = PyPILoader(url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(loader.storage) expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' assert visit1_actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id } origin_visit1 = next(loader.storage.origin_visit_get(url)) assert origin_visit1['status'] == 'full' assert origin_visit1['type'] == 'pypi' assert { 'content': 6, 'directory': 4, 'origin': 1, 'origin_visit': 1, 'person': 1, 'release': 0, 'revision': 2, 'skipped_content': 0, 'snapshot': 1 } == visit1_stats # Reset internal state loader._info = None visit2_actual_load_status = loader.load() visit2_stats = get_stats(loader.storage) assert visit2_actual_load_status['status'] == 'eventful' expected_snapshot_id2 = '2e5149a7b0725d18231a37b342e9b7c4e121f283' assert visit2_actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id2 } visits = list(loader.storage.origin_visit_get(url)) assert len(visits) == 2 assert visits[1]['status'] == 'full' assert visits[1]['type'] == 'pypi' assert { 'content': 6 + 1, # 1 more content 'directory': 4 + 2, # 2 more directories 'origin': 1, 'origin_visit': 1 + 1, 'person': 1, 'release': 0, 'revision': 2 + 1, # 1 more revision 'skipped_content': 0, 'snapshot': 1 + 1, # 1 more snapshot } == visit2_stats expected_contents = map(hash_to_bytes, [ 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', '938c33483285fd8ad57f15497f538320df82aeb8', 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', '92689fa2b7fb4d4fc6fb195bf73a50c87c030639' ]) assert list(loader.storage.content_missing_per_sha1(expected_contents))\ == [] expected_dirs = map(hash_to_bytes, [ '05219ba38bc542d4345d5638af1ed56c7d43ca7d', 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', '52604d46843b898f5a43208045d09fcf8731631b', ]) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa hash_to_bytes('51247143b01445c9348afa9edfae31bf7c5d86b1'): hash_to_bytes('e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a'), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { 'releases/1.1.0': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.2.0': { 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', 'target_type': 'revision', }, 'releases/1.3.0': { 'target': '51247143b01445c9348afa9edfae31bf7c5d86b1', 'target_type': 'revision', }, 'HEAD': { 'target': 'releases/1.3.0', 'target_type': 'alias', }, } expected_snapshot = { 'id': expected_snapshot_id2, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = list(loader.storage.origin_visit_get(url))[-1] assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith('https://files.pythonhosted.org') ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different def test_visit_1_release_with_2_artifacts(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = 'https://pypi.org/project/nexter' loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = 'a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6' assert actual_load_status == { 'status': 'eventful', 'snapshot_id': expected_snapshot_id, } expected_branches = { 'releases/1.1.0/nexter-1.1.0.zip': { 'target': '4c99891f93b81450385777235a37b5e966dd1571', 'target_type': 'revision', }, 'releases/1.1.0/nexter-1.1.0.tar.gz': { 'target': '0bf88f5760cca7665d0af4d6575d9301134fe11a', 'target_type': 'revision', }, } expected_snapshot = { 'id': expected_snapshot_id, 'branches': expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = next(loader.storage.origin_visit_get(url)) assert origin_visit['status'] == 'full' assert origin_visit['type'] == 'pypi' def test_pypi_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, } assert artifact_to_revision_id({}, artifact_metadata) is None known_artifacts = { 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92': { 'original_artifact': { 'sha256': 'something-irrelevant', }, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_pypi_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'original_artifact': { 'sha256': "something-wrong", }, }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, } } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116') def test_pypi_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ artifact_metadata = { 'digests': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa } } known_artifacts = { hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa }, }], }, hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { 'original_artifact': [{ 'checksums': { 'sha256': 'something-wrong' }, }], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ == hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92') def test_pypi_artifact_to_revision_id_failures(): with pytest.raises(KeyError, match='sha256'): artifact_metadata = { 'digests': {}, } assert artifact_to_revision_id({}, artifact_metadata) with pytest.raises(KeyError, match='digests'): artifact_metadata = { 'something': 'wrong', } assert artifact_to_revision_id({}, artifact_metadata) + + +def test_pypi_artifact_with_no_intrinsic_metadata( + swh_config, requests_mock_datadir): + """Skip artifact with no intrinsic metadata during ingestion + + """ + url = 'https://pypi.org/project/upymenu' + loader = PyPILoader(url) + + actual_load_status = loader.load() + expected_snapshot_id = '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e' + assert actual_load_status == { + 'status': 'eventful', + 'snapshot_id': expected_snapshot_id, + } + + # no branch as one artifact without any intrinsic metadata + expected_snapshot = { + 'id': expected_snapshot_id, + 'branches': {} + } + check_snapshot(expected_snapshot, loader.storage) + + origin_visit = next(loader.storage.origin_visit_get(url)) + assert origin_visit['status'] == 'full' + assert origin_visit['type'] == 'pypi'