diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py index 9c03b99..f716a65 100644 --- a/swh/loader/package/cran/tests/test_cran.py +++ b/swh/loader/package/cran/tests/test_cran.py @@ -1,374 +1,369 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timezone import os from os import path from unittest.mock import patch from dateutil.tz import tzlocal import pytest from swh.core.tarball import uncompress from swh.loader.package.cran.loader import ( CRANLoader, extract_intrinsic_metadata, parse_date, parse_debian_control, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone SNAPSHOT = Snapshot( id=hash_to_bytes("920adcccc78aaeedd3cfa4459dd900d8c3431a21"), branches={ b"HEAD": SnapshotBranch( target=b"releases/2.22-6", target_type=TargetType.ALIAS ), b"releases/2.22-6": SnapshotBranch( target=hash_to_bytes("42bdb16facd5140424359c8ce89a28ecfa1ce603"), target_type=TargetType.REVISION, ), }, ) def test_cran_parse_date(): data = [ # parsable, some have debatable results though ("2001-June-08", datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), ( "Tue Dec 27 15:06:08 PST 2011", datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc), ), ("8-14-2013", datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), ("2011-01", datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), ("201109", datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), ("04-12-2014", datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), ( "2018-08-24, 10:40:10", datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc), ), ("2013-October-16", datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), ("Aug 23, 2013", datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), ("27-11-2014", datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), ("2019-09-26,", datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), ("9/25/2014", datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), ( "Fri Jun 27 17:23:53 2014", datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc), ), ("28-04-2014", datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), ("04-14-2014", datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), ( "2019-05-08 14:17:31 UTC", datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc), ), ( "Wed May 21 13:50:39 CEST 2014", datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal()), ), ( "2018-04-10 00:01:04 KST", datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc), ), ("2019-08-25 10:45", datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), ("March 9, 2015", datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), ("Aug. 18, 2012", datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), ("2014-Dec-17", datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), ("March 01, 2013", datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), ("2017-04-08.", datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), ("2014-Apr-22", datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), ( "Mon Jan 12 19:54:04 2015", datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc), ), ("May 22, 2014", datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), ( "2014-08-12 09:55:10 EDT", datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc), ), # unparsable ("Fabruary 21, 2012", None), ('2019-05-28"', None), ("2017-03-01 today", None), ("2016-11-0110.1093/icesjms/fsw182", None), ("2019-07-010", None), ("2015-02.23", None), ("20013-12-30", None), ("2016-08-017", None), ("2019-02-07l", None), ("2018-05-010", None), ("2019-09-27 KST", None), ("$Date$", None), ("2019-09-27 KST", None), ("2019-06-22 $Date$", None), ("$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $", None), ("2015-7-013", None), ("2018-05-023", None), ("Check NEWS file for changes: news(package='simSummary')", None), ] for date, expected_date in data: actual_tstz = parse_date(date) if expected_date is None: assert actual_tstz is None, date else: expected_tstz = TimestampWithTimezone.from_datetime(expected_date) assert actual_tstz == expected_tstz, date @pytest.mark.fs def test_cran_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) # sample url # https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa archive_path = path.join( datadir, "https_cran.r-project.org", "src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz", ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "Package": "KernSmooth", "Priority": "recommended", "Version": "2.22-6", "Date": "2001-June-08", "Title": "Functions for kernel smoothing for Wand & Jones (1995)", "Author": "S original by Matt Wand.\n\tR port by Brian Ripley .", # noqa "Maintainer": "Brian Ripley ", "Description": 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa "License": "Unlimited use and distribution (see LICENCE).", "URL": "http://www.biostat.harvard.edu/~mwand", } assert actual_metadata == expected_metadata @pytest.mark.fs def test_cran_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = str(tmp_path / "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} def test_cran_one_visit(swh_storage, requests_mock_datadir): version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version,}] ) actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } assert_last_visit_matches( swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id ) check_snapshot(SNAPSHOT, swh_storage) visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] # visited each artifact once across 2 visits assert len(urls) == 1 def test_cran_2_visits_same_origin(swh_storage, requests_mock_datadir): """Multiple visits on the same origin, only 1 archive fetch""" version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}] ) # first visit actual_load_status = loader.load() - expected_snapshot_id = "920adcccc78aaeedd3cfa4459dd900d8c3431a21" assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } check_snapshot(SNAPSHOT, swh_storage) assert_last_visit_matches( swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id ) visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats # second visit actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", - "snapshot_id": expected_snapshot_id, + "snapshot_id": SNAPSHOT.id.hex(), } assert_last_visit_matches( - swh_storage, - origin_url, - status="full", - type="cran", - snapshot=hash_to_bytes(expected_snapshot_id), + swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id, ) visit_stats2 = get_stats(swh_storage) visit_stats["origin_visit"] += 1 assert visit_stats2 == visit_stats, "same stats as 1st visit, +1 visit" urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] assert len(urls) == 1, "visited one time artifact url (across 2 visits)" def test_cran_parse_debian_control(datadir): description_file = os.path.join(datadir, "description", "acepack") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "acepack", "Maintainer": "Shawn Garbett", "Version": "1.4.1", "Author": "Phil Spector, Jerome Friedman, Robert Tibshirani...", "Description": "Two nonparametric methods for multiple regression...", "Title": "ACE & AVAS 4 Selecting Multiple Regression Transformations", "License": "MIT + file LICENSE", "Suggests": "testthat", "Packaged": "2016-10-28 15:38:59 UTC; garbetsp", "Repository": "CRAN", "Date/Publication": "2016-10-29 00:11:52", "NeedsCompilation": "yes", } def test_cran_parse_debian_control_unicode_issue(datadir): # iso-8859-1 caused failure, now fixed description_file = os.path.join(datadir, "description", "KnownBR") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "KnowBR", "Version": "2.0", "Title": """Discriminating Well Surveyed Spatial Units from Exhaustive Biodiversity Databases""", "Author": "Cástor Guisande González and Jorge M. Lobo", "Maintainer": "Cástor Guisande González ", "Description": "It uses species accumulation curves and diverse estimators...", "License": "GPL (>= 2)", "Encoding": "latin1", "Depends": "R (>= 3.0), fossil, mgcv, plotrix, sp, vegan", "Suggests": "raster, rgbif", "NeedsCompilation": "no", "Packaged": "2019-01-30 13:27:29 UTC; castor", "Repository": "CRAN", "Date/Publication": "2019-01-31 20:53:50 UTC", } @pytest.mark.parametrize( "method_name", ["build_extrinsic_snapshot_metadata", "build_extrinsic_origin_metadata",], ) def test_cran_fail_to_build_or_load_extrinsic_metadata( method_name, swh_storage, requests_mock_datadir ): """problem during loading: {visit: failed, status: failed, no snapshot} """ version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) full_method_name = f"swh.loader.package.cran.loader.CRANLoader.{method_name}" with patch( full_method_name, side_effect=ValueError("Fake to fail to build or load extrinsic metadata"), ): loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}], ) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", "snapshot_id": SNAPSHOT.id.hex(), } visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats assert_last_visit_matches( swh_storage, origin_url, status="partial", type="cran", snapshot=SNAPSHOT.id ) diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index e8871a6..ef4e5d7 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,887 +1,777 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os from os import path from unittest.mock import patch import pytest from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.core.tarball import uncompress from swh.loader.package import __version__ from swh.loader.package.pypi.loader import ( PyPILoader, PyPIPackageInfo, author, extract_intrinsic_metadata, pypi_api_url, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.interface import PagedResult @pytest.fixture def _0805nexter_api_info(datadir) -> bytes: with open( os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb", ) as f: return f.read() def test_pypi_author_basic(): data = { "author": "i-am-groot", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b"i-am-groot ", name=b"i-am-groot", email=b"iam@groot.org", ) assert actual_author == expected_author def test_pypi_author_empty_email(): data = { "author": "i-am-groot", "author_email": "", } actual_author = author(data) expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",) assert actual_author == expected_author def test_pypi_author_empty_name(): data = { "author": "", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b" ", name=b"", email=b"iam@groot.org", ) assert actual_author == expected_author def test_pypi_author_malformed(): data = { "author": "['pierre', 'paul', 'jacques']", "author_email": None, } actual_author = author(data) expected_author = Person( fullname=b"['pierre', 'paul', 'jacques']", name=b"['pierre', 'paul', 'jacques']", email=None, ) assert actual_author == expected_author def test_pypi_author_malformed_2(): data = { "author": "[marie, jeanne]", "author_email": "[marie@some, jeanne@thing]", } actual_author = author(data) expected_author = Person( fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>", name=b"[marie, jeanne]", email=b"[marie@some, jeanne@thing]", ) assert actual_author == expected_author def test_pypi_author_malformed_3(): data = { "author": "[marie, jeanne, pierre]", "author_email": "[marie@somewhere.org, jeanne@somewhere.org]", } actual_author = author(data) expected_author = Person( fullname=( b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>" ), name=b"[marie, jeanne, pierre]", email=b"[marie@somewhere.org, jeanne@somewhere.org]", ) actual_author == expected_author # configuration error # def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests") assert url == "https://pypi.org/pypi/requests/json" def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests/") assert url == "https://pypi.org/pypi/requests/json" @pytest.mark.fs def test_pypi_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip" ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "metadata_version": "1.0", "name": "0805nexter", "version": "1.1.0", "summary": "a simple printer of nested lest", "home_page": "http://www.hp.com", "author": "hgtkpython", "author_email": "2868989685@qq.com", "platforms": ["UNKNOWN"], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_pypi_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa ] ) def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None empty_snapshot = Snapshot(branches={}) assert_last_visit_matches( swh_storage, url, status="partial", type="pypi", snapshot=empty_snapshot.id ) stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir): """problem during loading: {visit: failed, status: failed, no snapshot} """ url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader._load_snapshot", side_effect=ValueError("Fake problem to fail visit"), ): loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") stats = get_stats(loader.storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 0, } == stats # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader.last_snapshot", side_effect=ValueError("Fake problem to fail the visit"), ): loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa ] ) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} def test_pypi_revision_metadata_structure( swh_storage, requests_mock_datadir, _0805nexter_api_info ): url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21") revision = swh_storage.revision_get([expected_revision_id])[0] assert revision is not None revision_swhid = CoreSWHID( object_type=ObjectType.REVISION, object_id=expected_revision_id ) directory_swhid = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://pypi.org/", ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.pypi.loader.PyPILoader", version=__version__, ), discovery_date=loader.visit_date, format="pypi-project-json", metadata=json.dumps( json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0] ).encode(), origin=url, revision=revision_swhid, ) ] assert swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_pypi_visit_with_missing_artifact( swh_storage, requests_mock_datadir_missing_one ): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id, ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) - expected_contents = map( - hash_to_bytes, - [ - "405859113963cb7a797642b45f171d6360425d16", - "e5686aa568fdb1d19d7f1329267082fe40482d31", - "83ecf6ec1114fd260ca7a833a2d165e71258c338", - ], - ) - - assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] - - expected_dirs = map( - hash_to_bytes, - [ - "b178b66bd22383d5f16f4f5c923d39ca798861b4", - "c3a58f8b57433a4b56caaa5033ae2e0931405338", - ], - ) - - assert list(swh_storage.directory_missing(expected_dirs)) == [] - - # {revision hash: directory hash} - expected_revs = { - hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( - "b178b66bd22383d5f16f4f5c923d39ca798861b4" - ), # noqa - } - assert list(swh_storage.revision_missing(expected_revs)) == [] - stats = get_stats(swh_storage) assert { "content": 3, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats - expected_contents = map( - hash_to_bytes, - [ - "a61e24cdfdab3bb7817f6be85d37a3e666b34566", - "938c33483285fd8ad57f15497f538320df82aeb8", - "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", - "405859113963cb7a797642b45f171d6360425d16", - "e5686aa568fdb1d19d7f1329267082fe40482d31", - "83ecf6ec1114fd260ca7a833a2d165e71258c338", - ], - ) - - assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] - - expected_dirs = map( - hash_to_bytes, - [ - "05219ba38bc542d4345d5638af1ed56c7d43ca7d", - "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", - "b178b66bd22383d5f16f4f5c923d39ca798861b4", - "c3a58f8b57433a4b56caaa5033ae2e0931405338", - ], - ) - - assert list(swh_storage.directory_missing(expected_dirs)) == [] - - # {revision hash: directory hash} - expected_revs = { - hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( - "05219ba38bc542d4345d5638af1ed56c7d43ca7d" - ), # noqa - hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( - "b178b66bd22383d5f16f4f5c923d39ca798861b4" - ), # noqa - } - assert list(swh_storage.revision_missing(expected_revs)) == [] - def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=snapshot_id ) expected_snapshot = Snapshot( id=snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": actual_load_status2["snapshot_id"], } visit_status2 = assert_last_visit_matches( swh_storage, url, status="full", type="pypi" ) stats2 = get_stats(swh_storage) expected_stats2 = stats.copy() expected_stats2["origin_visit"] = 1 + 1 assert expected_stats2 == stats2 # same snapshot assert visit_status2.snapshot == snapshot_id def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(swh_storage) expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert visit1_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == visit1_stats # Reset internal state del loader._cached__raw_info del loader._cached_info visit2_actual_load_status = loader.load() visit2_stats = get_stats(swh_storage) assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283") assert visit2_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id2.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2 ) expected_snapshot = Snapshot( id=expected_snapshot_id2, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"releases/1.3.0": SnapshotBranch( target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.3.0", target_type=TargetType.ALIAS, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) assert { "content": 6 + 1, # 1 more content "directory": 4 + 2, # 2 more directories "origin": 1, "origin_visit": 1 + 1, "release": 0, "revision": 2 + 1, # 1 more revision "skipped_content": 0, "snapshot": 1 + 1, # 1 more snapshot } == visit2_stats - expected_contents = map( - hash_to_bytes, - [ - "a61e24cdfdab3bb7817f6be85d37a3e666b34566", - "938c33483285fd8ad57f15497f538320df82aeb8", - "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", - "405859113963cb7a797642b45f171d6360425d16", - "e5686aa568fdb1d19d7f1329267082fe40482d31", - "83ecf6ec1114fd260ca7a833a2d165e71258c338", - "92689fa2b7fb4d4fc6fb195bf73a50c87c030639", - ], - ) - - assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] - - expected_dirs = map( - hash_to_bytes, - [ - "05219ba38bc542d4345d5638af1ed56c7d43ca7d", - "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", - "b178b66bd22383d5f16f4f5c923d39ca798861b4", - "c3a58f8b57433a4b56caaa5033ae2e0931405338", - "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a", - "52604d46843b898f5a43208045d09fcf8731631b", - ], - ) - - assert list(swh_storage.directory_missing(expected_dirs)) == [] - - # {revision hash: directory hash} - expected_revs = { - hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( - "05219ba38bc542d4345d5638af1ed56c7d43ca7d" - ), # noqa - hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( - "b178b66bd22383d5f16f4f5c923d39ca798861b4" - ), # noqa - hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes( - "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a" - ), # noqa - } - - assert list(swh_storage.revision_missing(expected_revs)) == [] - urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://files.pythonhosted.org") ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch( target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, swh_storage) def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ url = "https://pypi.org/project/upymenu" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot(id=expected_snapshot_id, branches={}) assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) def test_pypi_origin_not_found(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/unknown" loader = PyPILoader(swh_storage, url) assert loader.load() == {"status": "failed"} assert_last_visit_matches( swh_storage, url, status="not_found", type="pypi", snapshot=None ) def test_pypi_build_revision_missing_version_in_pkg_info(swh_storage, tmp_path): """Simulate revision build when Version field is missing in PKG-INFO file.""" url = "https://pypi.org/project/GermlineFilter" # create package info p_info = PyPIPackageInfo( url=url, filename="GermlineFilter-1.2.tar.gz", directory_extrinsic_metadata=[], raw_info={}, comment_text="", sha256="e4982353c544d94b34f02c5690ab3d3ebc93480d5b62fe6f3317f23c515acc05", upload_time="2015-02-18T20:39:13", ) # create PKG-INFO file with missing Version field package_path = tmp_path / "GermlineFilter-1.2" pkg_info_path = package_path / "PKG-INFO" package_path.mkdir() pkg_info_path.write_text( """Metadata-Version: 1.2 Name: germline_filter Home-page: Author: Cristian Caloian (OICR) Author-email: cristian.caloian@oicr.on.ca License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN""" ) directory = hash_to_bytes("8b864d66f356afe35033d58f8e03b7c23a66751f") # attempt to build revision loader = PyPILoader(swh_storage, url) revision = loader.build_revision(p_info, str(tmp_path), directory) # without comment_text and version in PKG-INFO, message should be empty assert revision.message == b"" def test_filter_out_invalid_sdists(swh_storage, requests_mock): project_name = "swh-test-sdist-filtering" version = "1.0.0" url = f"https://pypi.org/project/{project_name}" json_url = f"https://pypi.org/pypi/{project_name}/json" common_sdist_entries = { "url": "", "comment_text": "", "digests": {"sha256": ""}, "upload_time": "", "packagetype": "sdist", } requests_mock.get( json_url, json={ "releases": { version: [ { **common_sdist_entries, "filename": f"{project_name}-{version}.{ext}", } for ext in ("tar.gz", "deb", "egg", "rpm", "whl") ] }, }, ) loader = PyPILoader(swh_storage, url) packages = list(loader.get_package_info(version=version)) assert len(packages) == 1 assert packages[0][1].filename.endswith(".tar.gz")