diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py index aeddcdf..bc148c2 100644 --- a/swh/loader/package/archive/tests/test_archive.py +++ b/swh/loader/package/archive/tests/test_archive.py @@ -1,465 +1,465 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import hashlib from io import BytesIO from pathlib import Path import string import attr import pytest from requests.exceptions import ContentDecodingError from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType URL = "https://ftp.gnu.org/gnu/8sync/" GNU_ARTIFACTS = [ { "time": 944729610, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz", "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", }, { "time": 1480991830, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", }, ] _expected_new_contents_first_visit = [ "e9258d81faf5881a2f96a77ba609396f82cb97ad", "1170cf105b04b7e2822a0e09d2acf71da7b9a130", "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac", "0057bec9b5422aff9256af240b177ac0e3ac2608", "2b8d0d0b43a1078fc708930c8ddc2956a86c566e", "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55", "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b", "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62", "edeb33282b2bffa0e608e9d2fd960fd08093c0ea", "d64e64d4c73679323f8d4cde2643331ba6c20af9", "7a756602914be889c0a2d3952c710144b3e64cb0", "84fb589b554fcb7f32b806951dcf19518d67b08f", "8624bcdae55baeef00cd11d5dfcfa60f68710a02", "e08441aeab02704cfbd435d6445f7c072f8f524e", "f67935bc3a83a67259cda4b2d43373bd56703844", "809788434b433eb2e3cfabd5d591c9a659d5e3d8", "7d7c6c8c5ebaeff879f61f37083a3854184f6c41", "b99fec102eb24bffd53ab61fc30d59e810f116a2", "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68", "f0c97052e567948adf03e641301e9983c478ccff", "7fb724242e2b62b85ca64190c31dcae5303e19b3", "4f9709e64a9134fe8aefb36fd827b84d8b617ab5", "7350628ccf194c2c3afba4ac588c33e3f3ac778d", "0bb892d9391aa706dc2c3b1906567df43cbe06a2", "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c", "6b5cc594ac466351450f7f64a0b79fdaf4435ad3", "3046e5d1f70297e2a507b98224b6222c9688d610", "1572607d456d7f633bc6065a2b3048496d679a31", ] _expected_new_directories_first_visit = [ "daabc65ec75d487b1335ffc101c0ac11c803f8fc", "263be23b4a8101d3ad0d9831319a3e0f2b065f36", "7f6e63ba6eb3e2236f65892cd822041f1a01dd5c", "4db0a3ecbc976083e2dac01a62f93729698429a3", "dfef1c80e1098dd5deda664bb44a9ab1f738af13", "eca971d346ea54d95a6e19d5051f900237fafdaa", "3aebc29ed1fccc4a6f2f2010fb8e57882406b528", ] -_expected_new_revisions_first_visit = { - "44183488c0774ce3c957fa19ba695cf18a4a42b3": ( +_expected_new_releases_first_visit = { + "c9786c1e3b46f52779c727d3509d66ebf8948d88": ( "3aebc29ed1fccc4a6f2f2010fb8e57882406b528" ) } def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir): url = URL unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" loader = ArchiveLoader( swh_storage, url, artifacts=[ { "time": 944729610, "url": unknown_artifact_url, # unknown artifact "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches(swh_storage, url, status="partial", type="tar") def test_archive_visit_with_release_artifact_no_prior_visit( swh_storage, requests_mock_datadir ): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS[:1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" expected_snapshot_first_visit_id = hash_to_bytes( - "c419397fd912039825ebdbea378bc6283f006bf5" + "cdf8f335fa0c81c8ad089870ec14f52b1980eb6c" ) assert ( hash_to_bytes(actual_load_status["snapshot_id"]) == expected_snapshot_first_visit_id ) assert_last_visit_matches(swh_storage, URL, status="full", type="tar") stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, - "release": 0, - "revision": len(_expected_new_revisions_first_visit), + "release": len(_expected_new_releases_first_visit), + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=expected_snapshot_first_visit_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0", ), b"releases/0.1.0": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes(list(_expected_new_revisions_first_visit)[0]), + target_type=TargetType.RELEASE, + target=hash_to_bytes(list(_expected_new_releases_first_visit)[0]), ), }, ) check_snapshot(expected_snapshot, swh_storage) expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit) assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) assert list(swh_storage.directory_missing(expected_dirs)) == [] - expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit) - assert list(swh_storage.revision_missing(expected_revs)) == [] + expected_rels = map(hash_to_bytes, _expected_new_releases_first_visit) + assert list(swh_storage.release_missing(expected_rels)) == [] def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS[:1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] is not None assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, url, [artifact1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, - "release": 0, - "revision": len(_expected_new_revisions_first_visit), + "release": len(_expected_new_releases_first_visit), + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 artifact2 = GNU_ARTIFACTS[1] loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2]) stats2 = get_stats(swh_storage) assert stats == stats2 # ensure we share the storage actual_load_status2 = loader2.load() assert actual_load_status2["status"] == "eventful" assert actual_load_status2["snapshot_id"] is not None stats2 = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 8, "origin": 1, "origin_visit": 1 + 1, - "release": 0, - "revision": len(_expected_new_revisions_first_visit) + 1, + "release": len(_expected_new_releases_first_visit) + 1, + "revision": 0, "skipped_content": 0, "snapshot": 1 + 1, } == stats2 assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] # 1 artifact (2nd time no modification) + 1 new artifact assert len(urls) == 2 def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir): """Load a project archive (not gnu) ends up with 1 snapshot """ url = "https://something.else.org/8sync/" artifacts = [ # this is not a gnu artifact { "time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp "sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa # keep a gnu artifact reference to avoid adding other test files "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", } ] # Here the loader defines the id_keys to use for existence in the snapshot # It's not the default archive loader which loader = ArchiveLoader( swh_storage, url, artifacts=artifacts, extid_manifest_format="$sha256 $length $url", ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"] assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 def test_archive_extid(): """Compute primary key should return the right identity """ @attr.s class TestPackageInfo(ArchivePackageInfo): a = attr.ib() b = attr.ib() metadata = GNU_ARTIFACTS[0] p_info = TestPackageInfo( raw_info={**metadata, "a": 1, "b": 2}, a=1, b=2, **metadata, ) for manifest_format, expected_manifest in [ (string.Template("$a $b"), b"1 2"), (string.Template(""), b""), (None, "{time} {length} {version} {url}".format(**metadata).encode()), ]: actual_id = p_info.extid(manifest_format=manifest_format) assert actual_id == ( "package-manifest-sha256", hashlib.sha256(expected_manifest).digest(), ) with pytest.raises(KeyError): p_info.extid(manifest_format=string.Template("$a $unknown_key")) def test_archive_snapshot_append(swh_storage, requests_mock_datadir): # first loading with a first artifact artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 branch_artifact1_name = f"releases/{artifact1['version']}".encode() assert b"HEAD" in snapshot.branches assert branch_artifact1_name in snapshot.branches assert snapshot.branches[b"HEAD"].target == branch_artifact1_name # second loading with a second artifact artifact2 = GNU_ARTIFACTS[1] loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot, should contain a new branch and the # branch for the first artifact snapshot = loader.last_snapshot() assert len(snapshot.branches) == 3 branch_artifact2_name = f"releases/{artifact2['version']}".encode() assert b"HEAD" in snapshot.branches assert branch_artifact2_name in snapshot.branches assert branch_artifact1_name in snapshot.branches assert snapshot.branches[b"HEAD"].target == branch_artifact2_name def test_archive_snapshot_append_branch_override(swh_storage, requests_mock_datadir): # first loading for a first artifact artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 branch_artifact1_name = f"releases/{artifact1['version']}".encode() assert branch_artifact1_name in snapshot.branches branch_target_first_visit = snapshot.branches[branch_artifact1_name].target # second loading for a second artifact with same version as the first one # but with different tarball content artifact2 = dict(GNU_ARTIFACTS[0]) artifact2["url"] = GNU_ARTIFACTS[1]["url"] artifact2["time"] = GNU_ARTIFACTS[1]["time"] artifact2["length"] = GNU_ARTIFACTS[1]["length"] loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot, should contain the same branch as previously # but with different target snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 assert branch_artifact1_name in snapshot.branches branch_target_second_visit = snapshot.branches[branch_artifact1_name].target assert branch_target_first_visit != branch_target_second_visit @pytest.fixture def not_gzipped_tarball_bytes(datadir): return Path(datadir, "not_gzipped_tarball.tar.gz").read_bytes() def test_archive_not_gzipped_tarball( swh_storage, requests_mock, not_gzipped_tarball_bytes ): """Check that a tarball erroneously marked as gzip compressed can still be downloaded and processed. """ filename = "not_gzipped_tarball.tar.gz" url = f"https://example.org/ftp/{filename}" requests_mock.get( url, [ {"exc": ContentDecodingError,}, {"body": BytesIO(not_gzipped_tarball_bytes),}, ], ) loader = ArchiveLoader( swh_storage, url, artifacts=[ { "time": 944729610, "url": url, "length": 221837, "filename": filename, "version": "0.1.0", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 assert b"releases/0.1.0" in snapshot.branches diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py index f716a65..fdf0bac 100644 --- a/swh/loader/package/cran/tests/test_cran.py +++ b/swh/loader/package/cran/tests/test_cran.py @@ -1,369 +1,369 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timezone import os from os import path from unittest.mock import patch from dateutil.tz import tzlocal import pytest from swh.core.tarball import uncompress from swh.loader.package.cran.loader import ( CRANLoader, extract_intrinsic_metadata, parse_date, parse_debian_control, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone SNAPSHOT = Snapshot( - id=hash_to_bytes("920adcccc78aaeedd3cfa4459dd900d8c3431a21"), + id=hash_to_bytes("56ed00938d83892bd5b42f2f368ae38a1dbfa718"), branches={ b"HEAD": SnapshotBranch( target=b"releases/2.22-6", target_type=TargetType.ALIAS ), b"releases/2.22-6": SnapshotBranch( - target=hash_to_bytes("42bdb16facd5140424359c8ce89a28ecfa1ce603"), - target_type=TargetType.REVISION, + target=hash_to_bytes("42993a72eac50a4a83523c9327a52be3593755a8"), + target_type=TargetType.RELEASE, ), }, ) def test_cran_parse_date(): data = [ # parsable, some have debatable results though ("2001-June-08", datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), ( "Tue Dec 27 15:06:08 PST 2011", datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc), ), ("8-14-2013", datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), ("2011-01", datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), ("201109", datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), ("04-12-2014", datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), ( "2018-08-24, 10:40:10", datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc), ), ("2013-October-16", datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), ("Aug 23, 2013", datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), ("27-11-2014", datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), ("2019-09-26,", datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), ("9/25/2014", datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), ( "Fri Jun 27 17:23:53 2014", datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc), ), ("28-04-2014", datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), ("04-14-2014", datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), ( "2019-05-08 14:17:31 UTC", datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc), ), ( "Wed May 21 13:50:39 CEST 2014", datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal()), ), ( "2018-04-10 00:01:04 KST", datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc), ), ("2019-08-25 10:45", datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), ("March 9, 2015", datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), ("Aug. 18, 2012", datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), ("2014-Dec-17", datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), ("March 01, 2013", datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), ("2017-04-08.", datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), ("2014-Apr-22", datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), ( "Mon Jan 12 19:54:04 2015", datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc), ), ("May 22, 2014", datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), ( "2014-08-12 09:55:10 EDT", datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc), ), # unparsable ("Fabruary 21, 2012", None), ('2019-05-28"', None), ("2017-03-01 today", None), ("2016-11-0110.1093/icesjms/fsw182", None), ("2019-07-010", None), ("2015-02.23", None), ("20013-12-30", None), ("2016-08-017", None), ("2019-02-07l", None), ("2018-05-010", None), ("2019-09-27 KST", None), ("$Date$", None), ("2019-09-27 KST", None), ("2019-06-22 $Date$", None), ("$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $", None), ("2015-7-013", None), ("2018-05-023", None), ("Check NEWS file for changes: news(package='simSummary')", None), ] for date, expected_date in data: actual_tstz = parse_date(date) if expected_date is None: assert actual_tstz is None, date else: expected_tstz = TimestampWithTimezone.from_datetime(expected_date) assert actual_tstz == expected_tstz, date @pytest.mark.fs def test_cran_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) # sample url # https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa archive_path = path.join( datadir, "https_cran.r-project.org", "src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz", ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "Package": "KernSmooth", "Priority": "recommended", "Version": "2.22-6", "Date": "2001-June-08", "Title": "Functions for kernel smoothing for Wand & Jones (1995)", "Author": "S original by Matt Wand.\n\tR port by Brian Ripley .", # noqa "Maintainer": "Brian Ripley ", "Description": 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa "License": "Unlimited use and distribution (see LICENCE).", "URL": "http://www.biostat.harvard.edu/~mwand", } assert actual_metadata == expected_metadata @pytest.mark.fs def test_cran_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = str(tmp_path / "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} def test_cran_one_visit(swh_storage, requests_mock_datadir): version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version,}] ) actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } assert_last_visit_matches( swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id ) check_snapshot(SNAPSHOT, swh_storage) visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == visit_stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] # visited each artifact once across 2 visits assert len(urls) == 1 def test_cran_2_visits_same_origin(swh_storage, requests_mock_datadir): """Multiple visits on the same origin, only 1 archive fetch""" version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}] ) # first visit actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } check_snapshot(SNAPSHOT, swh_storage) assert_last_visit_matches( swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id ) visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == visit_stats # second visit actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": SNAPSHOT.id.hex(), } assert_last_visit_matches( swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id, ) visit_stats2 = get_stats(swh_storage) visit_stats["origin_visit"] += 1 assert visit_stats2 == visit_stats, "same stats as 1st visit, +1 visit" urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] assert len(urls) == 1, "visited one time artifact url (across 2 visits)" def test_cran_parse_debian_control(datadir): description_file = os.path.join(datadir, "description", "acepack") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "acepack", "Maintainer": "Shawn Garbett", "Version": "1.4.1", "Author": "Phil Spector, Jerome Friedman, Robert Tibshirani...", "Description": "Two nonparametric methods for multiple regression...", "Title": "ACE & AVAS 4 Selecting Multiple Regression Transformations", "License": "MIT + file LICENSE", "Suggests": "testthat", "Packaged": "2016-10-28 15:38:59 UTC; garbetsp", "Repository": "CRAN", "Date/Publication": "2016-10-29 00:11:52", "NeedsCompilation": "yes", } def test_cran_parse_debian_control_unicode_issue(datadir): # iso-8859-1 caused failure, now fixed description_file = os.path.join(datadir, "description", "KnownBR") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "KnowBR", "Version": "2.0", "Title": """Discriminating Well Surveyed Spatial Units from Exhaustive Biodiversity Databases""", "Author": "Cástor Guisande González and Jorge M. Lobo", "Maintainer": "Cástor Guisande González ", "Description": "It uses species accumulation curves and diverse estimators...", "License": "GPL (>= 2)", "Encoding": "latin1", "Depends": "R (>= 3.0), fossil, mgcv, plotrix, sp, vegan", "Suggests": "raster, rgbif", "NeedsCompilation": "no", "Packaged": "2019-01-30 13:27:29 UTC; castor", "Repository": "CRAN", "Date/Publication": "2019-01-31 20:53:50 UTC", } @pytest.mark.parametrize( "method_name", ["build_extrinsic_snapshot_metadata", "build_extrinsic_origin_metadata",], ) def test_cran_fail_to_build_or_load_extrinsic_metadata( method_name, swh_storage, requests_mock_datadir ): """problem during loading: {visit: failed, status: failed, no snapshot} """ version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) full_method_name = f"swh.loader.package.cran.loader.CRANLoader.{method_name}" with patch( full_method_name, side_effect=ValueError("Fake to fail to build or load extrinsic metadata"), ): loader = CRANLoader( swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}], ) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", "snapshot_id": SNAPSHOT.id.hex(), } visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == visit_stats assert_last_visit_matches( swh_storage, origin_url, status="partial", type="cran", snapshot=SNAPSHOT.id ) diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index 21a79e4..16a1acf 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,449 +1,449 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from copy import deepcopy import logging from os import path import pytest from swh.loader.package.debian.loader import ( DebianLoader, DebianPackageChangelog, DebianPackageInfo, IntrinsicPackageMetadata, download_package, dsc_information, extract_package, get_intrinsic_package_metadata, prepare_person, uid_to_person, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Person, Snapshot, SnapshotBranch, TargetType logger = logging.getLogger(__name__) URL = "deb://Debian/packages/cicero" PACKAGE_FILES = { "name": "cicero", "version": "0.7.2-3", "files": { "cicero_0.7.2-3.diff.gz": { "md5sum": "a93661b6a48db48d59ba7d26796fc9ce", "name": "cicero_0.7.2-3.diff.gz", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa "size": 3964, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa }, "cicero_0.7.2-3.dsc": { "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a", "name": "cicero_0.7.2-3.dsc", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa "size": 1864, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_FILES2 = { "name": "cicero", "version": "0.7.2-4", "files": { "cicero_0.7.2-4.diff.gz": { "md5sum": "1e7e6fc4a59d57c98082a3af78145734", "name": "cicero_0.7.2-4.diff.gz", "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa "size": 4038, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa }, "cicero_0.7.2-4.dsc": { "md5sum": "1a6c8855a73b4282bb31d15518f18cde", "name": "cicero_0.7.2-4.dsc", "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa "size": 1881, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, } PACKAGES_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, "buster/contrib/0.7.2-4": PACKAGE_FILES2, } def test_debian_first_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() - expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" + expected_snapshot_id = "8bc5d12e2443ab216fdd2f969b25b39e96c20fef" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("5a99736512d381700c5f54d7fdd6b46e136535a2"), ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, # all artifacts under 1 revision + "release": 1, # all artifacts under 1 release + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() - expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" + expected_snapshot_id = "8bc5d12e2443ab216fdd2f969b25b39e96c20fef" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("5a99736512d381700c5f54d7fdd6b46e136535a2"), ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, # all artifacts under 1 revision + "release": 1, # all artifacts under 1 release + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats # No change in between load actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) stats2 = get_stats(swh_storage) assert { "content": 42 + 0, "directory": 2 + 0, "origin": 1, "origin_visit": 1 + 1, # a new visit occurred - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, # same snapshot across 2 visits } == stats2 urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("http://deb.debian.org") ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) def test_debian_uid_to_person(): uid = "Someone Name " actual_person = uid_to_person(uid) assert actual_person == { "name": "Someone Name", "email": "someone@orga.org", "fullname": uid, } def test_debian_prepare_person(): actual_author = prepare_person( { "name": "Someone Name", "email": "someone@orga.org", "fullname": "Someone Name ", } ) assert actual_author == Person( name=b"Someone Name", email=b"someone@orga.org", fullname=b"Someone Name ", ) def test_debian_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) all_hashes = download_package(p_info, tmpdir) assert all_hashes == { "cicero_0.7.2-3.diff.gz": { "checksums": { "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa }, "filename": "cicero_0.7.2-3.diff.gz", "length": 3964, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2-3.diff.gz" ), }, "cicero_0.7.2-3.dsc": { "checksums": { "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa }, "filename": "cicero_0.7.2-3.dsc", "length": 1864, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc" ), }, "cicero_0.7.2.orig.tar.gz": { "checksums": { "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa }, "filename": "cicero_0.7.2.orig.tar.gz", "length": 96527, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2.orig.tar.gz" ), }, } def test_debian_dsc_information_ok(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url == PACKAGE_FILES["files"][fname]["uri"] assert dsc_name == PACKAGE_FILES["files"][fname]["name"] def test_debian_dsc_information_not_found(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) p_info.files.pop(fname) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url is None assert dsc_name is None def test_debian_dsc_information_missing_md5sum(): package_files = deepcopy(PACKAGE_FILES) for package_metadata in package_files["files"].values(): del package_metadata["md5sum"] p_info = DebianPackageInfo.from_metadata(package_files, url=URL) for debian_file_metadata in p_info.files.values(): assert not debian_file_metadata.md5sum def test_debian_dsc_information_too_many_dsc_entries(): # craft an extra dsc file fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) data = p_info.files[fname] fname2 = fname.replace("cicero", "ciceroo") p_info.files[fname2] = data with pytest.raises( ValueError, match="Package %s_%s references several dsc" % (PACKAGE_FILES["name"], PACKAGE_FILES["version"]), ): dsc_information(p_info) def test_debian_get_intrinsic_package_metadata( requests_mock_datadir, datadir, tmp_path ): tmp_path = str(tmp_path) # py3.5 compat. p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) logger.debug("p_info: %s", p_info) # download the packages all_hashes = download_package(p_info, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(p_info) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) actual_package_info = get_intrinsic_package_metadata( p_info, dsc_path, extracted_path ) logger.debug("actual_package_info: %s", actual_package_info) assert actual_package_info == IntrinsicPackageMetadata( changelog=DebianPackageChangelog( date="2014-10-19T16:52:35+02:00", history=[ ("cicero", "0.7.2-2"), ("cicero", "0.7.2-1"), ("cicero", "0.7-1"), ], person={ "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ), maintainers=[ { "email": "debian-accessibility@lists.debian.org", "fullname": "Debian Accessibility Team " "", "name": "Debian Accessibility Team", }, { "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ], name="cicero", version="0.7.2-3", ) def test_debian_multiple_packages(swh_storage, requests_mock_datadir): loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGES_PER_VERSION, ) actual_load_status = loader.load() - expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604" + expected_snapshot_id = "3d26243c91eb084c350627a5a102cfe039c5b92a" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("5a99736512d381700c5f54d7fdd6b46e136535a2"), ), b"releases/buster/contrib/0.7.2-4": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("8224139c274c984147ef4b09aa0e462c55a10bd3"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("192fc7ccce80f64a0d3cf33d379133af067ec721"), ), }, ) check_snapshot(expected_snapshot, swh_storage) diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index 3e63d05..551addb 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,383 +1,383 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from datetime import timezone import json import logging from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union import attr import requests from swh.core.config import load_from_envvar from swh.loader.core.loader import DEFAULT_CONFIG from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method, download from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) def now() -> datetime.datetime: return datetime.datetime.now(tz=timezone.utc) @attr.s class DepositPackageInfo(BasePackageInfo): filename = attr.ib(type=str) # instead of Optional[str] raw_info = attr.ib(type=Dict[str, Any]) author_date = attr.ib(type=datetime.datetime) """codemeta:dateCreated if any, deposit completed_date otherwise""" commit_date = attr.ib(type=datetime.datetime) """codemeta:datePublished if any, deposit completed_date otherwise""" client = attr.ib(type=str) id = attr.ib(type=int) """Internal ID of the deposit in the deposit DB""" collection = attr.ib(type=str) """The collection in the deposit; see SWORD specification.""" author = attr.ib(type=Person) committer = attr.ib(type=Person) @classmethod def from_metadata( cls, metadata: Dict[str, Any], url: str, filename: str ) -> "DepositPackageInfo": # Note: # `date` and `committer_date` are always transmitted by the deposit read api # which computes itself the values. The loader needs to use those to create the - # revision. + # release. all_metadata_raw: List[str] = metadata["metadata_raw"] raw_info = { "origin": metadata["origin"], "origin_metadata": { "metadata": metadata["metadata_dict"], "provider": metadata["provider"], "tool": metadata["tool"], }, } depo = metadata["deposit"] return cls( url=url, filename=filename, author_date=depo["author_date"], commit_date=depo["committer_date"], client=depo["client"], id=depo["id"], collection=depo["collection"], author=parse_author(depo["author"]), committer=parse_author(depo["committer"]), raw_info=raw_info, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_metadata.encode(), format="sword-v2-atom-codemeta-v2", ) for raw_metadata in all_metadata_raw ], ) def extid(self) -> None: # For now, we don't try to deduplicate deposits. There is little point anyway, # as it only happens when the exact same tarball was deposited twice. return None class DepositLoader(PackageLoader[DepositPackageInfo]): """Load a deposited artifact into swh archive. """ visit_type = "deposit" def __init__( self, storage: StorageInterface, url: str, deposit_id: str, deposit_client: "ApiClient", max_content_size: Optional[int] = None, default_filename: str = "archive.tar", ): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity deposit_client: Deposit api client """ super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.deposit_id = deposit_id self.client = deposit_client self.default_filename = default_filename @classmethod def from_configfile(cls, **kwargs: Any): """Instantiate a loader from the configuration loaded from the SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their value is not None. Args: kwargs: kwargs passed to the loader instantiation """ config = dict(load_from_envvar(DEFAULT_CONFIG)) config.update({k: v for k, v in kwargs.items() if v is not None}) deposit_client = ApiClient(**config.pop("deposit")) return cls.from_config(deposit_client=deposit_client, **config) def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ["HEAD"] def get_metadata_authority(self) -> MetadataAuthority: provider = self.metadata()["provider"] assert provider["provider_type"] == MetadataAuthorityType.DEPOSIT_CLIENT.value return MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url=provider["provider_url"], metadata={ "name": provider["provider_name"], **(provider["metadata"] or {}), }, ) def get_metadata_fetcher(self) -> MetadataFetcher: tool = self.metadata()["tool"] return MetadataFetcher( name=tool["name"], version=tool["version"], metadata=tool["configuration"], ) def get_package_info( self, version: str ) -> Iterator[Tuple[str, DepositPackageInfo]]: p_info = DepositPackageInfo.from_metadata( self.metadata(), url=self.url, filename=self.default_filename, ) yield "HEAD", p_info def download_package( self, p_info: DepositPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client """ return [self.client.archive_get(self.deposit_id, tmpdir, p_info.filename)] def build_revision( self, p_info: DepositPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: message = ( f"{p_info.client}: Deposit {p_info.id} in collection {p_info.collection}" ).encode("utf-8") return Revision( type=RevisionType.TAR, message=message, author=p_info.author, date=TimestampWithTimezone.from_dict(p_info.author_date), committer=p_info.committer, committer_date=TimestampWithTimezone.from_dict(p_info.commit_date), directory=directory, synthetic=True, ) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: metadata = self.metadata() all_metadata_raw: List[str] = metadata["metadata_raw"] origin_metadata = json.dumps( { "metadata": all_metadata_raw, "provider": metadata["provider"], "tool": metadata["tool"], } ).encode() return [ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_meta.encode(), format="sword-v2-atom-codemeta-v2", ) for raw_meta in all_metadata_raw ] + [ RawExtrinsicMetadataCore( discovery_date=now(), metadata=origin_metadata, format="original-artifacts-json", ) ] @cached_method def metadata(self): """Returns metadata from the deposit server""" return self.client.metadata_get(self.deposit_id) def load(self) -> Dict: # First making sure the deposit is known on the deposit's RPC server # prior to trigger a loading try: self.metadata() except ValueError: logger.error(f"Unknown deposit {self.deposit_id}, ignoring") return {"status": "failed"} # Then usual loading return super().load() def finalize_visit( self, status_visit: str, errors: Optional[List[str]] = None, **kwargs ) -> Dict[str, Any]: r = super().finalize_visit(status_visit=status_visit, **kwargs) success = status_visit == "full" # Update deposit status try: if not success: self.client.status_update( self.deposit_id, status="failed", errors=errors, ) return r snapshot_id = hash_to_bytes(r["snapshot_id"]) snapshot = snapshot_get_all_branches(self.storage, snapshot_id) if not snapshot: return r branches = snapshot.branches logger.debug("branches: %s", branches) if not branches: return r - rev_id = branches[b"HEAD"].target + rel_id = branches[b"HEAD"].target - revision = self.storage.revision_get([rev_id])[0] - if not revision: + release = self.storage.release_get([rel_id])[0] + if not release: return r # update the deposit's status to success with its - # revision-id and directory-id + # release-id and directory-id self.client.status_update( self.deposit_id, status="done", - revision_id=hash_to_hex(rev_id), - directory_id=hash_to_hex(revision.directory), + release_id=hash_to_hex(rel_id), + directory_id=hash_to_hex(release.target), snapshot_id=r["snapshot_id"], origin_url=self.url, ) except Exception: logger.exception("Problem when trying to update the deposit's status") return {"status": "failed"} return r def parse_author(author) -> Person: """See prior fixme """ return Person( fullname=author["fullname"].encode("utf-8"), name=author["name"].encode("utf-8"), email=author["email"].encode("utf-8"), ) class ApiClient: """Private Deposit Api client """ def __init__(self, url, auth: Optional[Mapping[str, str]]): self.base_url = url.rstrip("/") self.auth = None if not auth else (auth["username"], auth["password"]) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: kwargs["auth"] = self.auth return method_fn(url, *args, **kwargs) def archive_get( self, deposit_id: Union[int, str], tmpdir: str, filename: str ) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally """ url = f"{self.base_url}/{deposit_id}/raw/" return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: return f"{self.base_url}/{deposit_id}/meta/" def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json """ url = self.metadata_url(deposit_id) r = self.do("get", url) if r.ok: return r.json() msg = f"Problem when retrieving deposit metadata at {url}" logger.error(msg) raise ValueError(msg) def status_update( self, deposit_id: Union[int, str], status: str, errors: Optional[List[str]] = None, - revision_id: Optional[str] = None, + release_id: Optional[str] = None, directory_id: Optional[str] = None, snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, ): """Update deposit's information including status, and persistent identifiers result of the loading. """ url = f"{self.base_url}/{deposit_id}/update/" payload: Dict[str, Any] = {"status": status} - if revision_id: - payload["revision_id"] = revision_id + if release_id: + payload["release_id"] = release_id if directory_id: payload["directory_id"] = directory_id if snapshot_id: payload["snapshot_id"] = snapshot_id if origin_url: payload["origin_url"] = origin_url if errors: payload["status_detail"] = {"loading": errors} self.do("put", url, json=payload) diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py index f65cb5f..0923989 100644 --- a/swh/loader/package/deposit/tests/test_deposit.py +++ b/swh/loader/package/deposit/tests/test_deposit.py @@ -1,500 +1,494 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import re from typing import List import pytest from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.loader.package.deposit.loader import ApiClient, DepositLoader from swh.loader.package.loader import now from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( - MetadataAuthority, - MetadataAuthorityType, - MetadataFetcher, Origin, Person, RawExtrinsicMetadata, - Revision, - RevisionType, + Release, Snapshot, SnapshotBranch, TargetType, Timestamp, TimestampWithTimezone, ) +from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher +from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType DEPOSIT_URL = "https://deposit.softwareheritage.org/1/private" @pytest.fixture def requests_mock_datadir(requests_mock_datadir): """Enhance default mock data to mock put requests as the loader does some internal update queries there. """ requests_mock_datadir.put(re.compile("https")) return requests_mock_datadir def test_deposit_init_ok(swh_storage, deposit_client, swh_loader_config): url = "some-url" deposit_id = 999 loader = DepositLoader( swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip" ) # Something that does not exist assert loader.url == url assert loader.client is not None assert loader.client.base_url == swh_loader_config["deposit"]["url"] def test_deposit_from_configfile(swh_config): """Ensure the deposit instantiation is ok """ loader = DepositLoader.from_configfile( url="some-url", deposit_id="666", default_filename="archive.zip" ) assert isinstance(loader.client, ApiClient) def test_deposit_loading_unknown_deposit( swh_storage, deposit_client, requests_mock_datadir ): """Loading an unknown deposit should fail no origin, no visit, no snapshot """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' url = "some-url" unknown_deposit_id = 667 loader = DepositLoader( swh_storage, url, unknown_deposit_id, deposit_client, default_filename="archive.zip", ) # does not exist actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 0, "origin_visit": 0, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[f"{DEPOSIT_URL}/666/raw/",] ) def test_deposit_loading_failure_to_retrieve_1_artifact( swh_storage, deposit_client, requests_mock_datadir_missing_one ): """Deposit with missing artifact ends up with an uneventful/partial visit """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' url = "some-url-2" deposit_id = 666 requests_mock_datadir_missing_one.put(re.compile("https")) loader = DepositLoader( swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip" ) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(loader.storage, url, status="partial", type="deposit") stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats # Retrieve the information for deposit status update query to the deposit urls = [ m for m in requests_mock_datadir_missing_one.request_history if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/" ] assert len(urls) == 1 update_query = urls[0] body = update_query.json() expected_body = { "status": "failed", "status_detail": { "loading": [ "Failed to load branch HEAD for some-url-2: Fail to query " "'https://deposit.softwareheritage.org/1/private/666/raw/'. Reason: 404" ] }, } assert body == expected_body def test_deposit_loading_ok(swh_storage, deposit_client, requests_mock_datadir): url = "https://hal-test.archives-ouvertes.fr/some-external-id" deposit_id = 666 loader = DepositLoader( swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip" ) actual_load_status = loader.load() - expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd" + expected_snapshot_id = "1090aaadc9fd1a77798bf6187d309145cbd23c53" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( loader.storage, url, status="full", type="deposit", snapshot=hash_to_bytes(expected_snapshot_id), ) - revision_id_hex = "637318680351f5d78856d13264faebbd91efe9bb" - revision_id = hash_to_bytes(revision_id_hex) + release_id_hex = "77c127bff4f9137baf26774fe19e29d82a41f69d" + release_id = hash_to_bytes(release_id_hex) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ - b"HEAD": SnapshotBranch( - target=revision_id, target_type=TargetType.REVISION, - ), + b"HEAD": SnapshotBranch(target=release_id, target_type=TargetType.RELEASE,), }, ) check_snapshot(expected_snapshot, storage=loader.storage) - revision = loader.storage.revision_get([revision_id])[0] + release = loader.storage.release_get([release_id])[0] date = TimestampWithTimezone( timestamp=Timestamp(seconds=1507389428, microseconds=0), offset=0, negative_utc=False, ) person = Person( fullname=b"Software Heritage", name=b"Software Heritage", email=b"robot@softwareheritage.org", ) - assert revision == Revision( - id=revision_id, + assert release == Release( + id=release_id, + name=b"HEAD", message=b"hal: Deposit 666 in collection hal", author=person, - committer=person, date=date, - committer_date=date, - type=RevisionType.TAR, - directory=b"\xfd-\xf1-\xc5SL\x1d\xa1\xe9\x18\x0b\x91Q\x02\xfbo`\x1d\x19", + target_type=ModelObjectType.DIRECTORY, + target=b"\xfd-\xf1-\xc5SL\x1d\xa1\xe9\x18\x0b\x91Q\x02\xfbo`\x1d\x19", synthetic=True, metadata=None, - parents=(), - extra_headers=(), ) # check metadata fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",) authority = MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url="https://hal-test.archives-ouvertes.fr/", ) # Check origin metadata orig_meta = loader.storage.raw_extrinsic_metadata_get( Origin(url).swhid(), authority ) assert orig_meta.next_page_token is None raw_meta = loader.client.metadata_get(deposit_id) all_metadata_raw: List[str] = raw_meta["metadata_raw"] # 2 raw metadata xml + 1 json dict assert len(orig_meta.results) == len(all_metadata_raw) + 1 orig_meta0 = orig_meta.results[0] assert orig_meta0.authority == authority assert orig_meta0.fetcher == fetcher # Check directory metadata + assert release.target_type == ModelObjectType.DIRECTORY directory_swhid = CoreSWHID( - object_type=ObjectType.DIRECTORY, object_id=revision.directory + object_type=ObjectType.DIRECTORY, object_id=release.target ) actual_dir_meta = loader.storage.raw_extrinsic_metadata_get( directory_swhid, authority ) assert actual_dir_meta.next_page_token is None assert len(actual_dir_meta.results) == len(all_metadata_raw) for dir_meta in actual_dir_meta.results: assert dir_meta.authority == authority assert dir_meta.fetcher == fetcher assert dir_meta.metadata.decode() in all_metadata_raw # Retrieve the information for deposit status update query to the deposit urls = [ m for m in requests_mock_datadir.request_history if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/" ] assert len(urls) == 1 update_query = urls[0] body = update_query.json() expected_body = { "status": "done", - "revision_id": revision_id_hex, - "directory_id": hash_to_hex(revision.directory), + "release_id": release_id_hex, + "directory_id": hash_to_hex(release.target), "snapshot_id": expected_snapshot_id, "origin_url": url, } assert body == expected_body stats = get_stats(loader.storage) assert { "content": 303, "directory": 12, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_deposit_loading_ok_2(swh_storage, deposit_client, requests_mock_datadir): """Field dates should be se appropriately """ external_id = "some-external-id" url = f"https://hal-test.archives-ouvertes.fr/{external_id}" deposit_id = 777 loader = DepositLoader( swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip" ) actual_load_status = loader.load() - expected_snapshot_id = "3e68440fdd7c81d283f8f3aebb6f0c8657864192" + expected_snapshot_id = "f87b25c121d9ab3ff0219b04b92d83f8c6f368f4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( loader.storage, url, status="full", type="deposit", snapshot=hash_to_bytes(expected_snapshot_id), ) - revision_id = "564d18943d71be80d0d73b43a77cfb205bcde96c" + release_id = "c6891941d4033f4fb1dbf39b501c819ac618f957" expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"HEAD": SnapshotBranch( - target=hash_to_bytes(revision_id), target_type=TargetType.REVISION + target=hash_to_bytes(release_id), target_type=TargetType.RELEASE ) }, ) check_snapshot(expected_snapshot, storage=loader.storage) raw_meta = loader.client.metadata_get(deposit_id) - # Ensure the date fields are set appropriately in the revision + # Ensure the date fields are set appropriately in the release - # Retrieve the revision - revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0] - assert revision - assert revision.date.to_dict() == raw_meta["deposit"]["author_date"] - assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"] - assert not revision.metadata + # Retrieve the release + release = loader.storage.release_get([hash_to_bytes(release_id)])[0] + assert release + assert release.date.to_dict() == raw_meta["deposit"]["author_date"] + assert not release.metadata provider = { "provider_name": "hal", "provider_type": "deposit_client", "provider_url": "https://hal-test.archives-ouvertes.fr/", "metadata": None, } tool = { "name": "swh-deposit", "version": "0.0.1", "configuration": {"sword_version": "2"}, } fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",) authority = MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url="https://hal-test.archives-ouvertes.fr/", ) # Check the origin metadata swh side origin_extrinsic_metadata = loader.storage.raw_extrinsic_metadata_get( Origin(url).swhid(), authority ) assert origin_extrinsic_metadata.next_page_token is None all_metadata_raw: List[str] = raw_meta["metadata_raw"] # 1 raw metadata xml + 1 json dict assert len(origin_extrinsic_metadata.results) == len(all_metadata_raw) + 1 origin_swhid = Origin(url).swhid() expected_metadata = [] for idx, raw_meta in enumerate(all_metadata_raw): origin_meta = origin_extrinsic_metadata.results[idx] expected_metadata.append( RawExtrinsicMetadata( target=origin_swhid, discovery_date=origin_meta.discovery_date, metadata=raw_meta.encode(), format="sword-v2-atom-codemeta-v2", authority=authority, fetcher=fetcher, ) ) origin_metadata = { "metadata": all_metadata_raw, "provider": provider, "tool": tool, } expected_metadata.append( RawExtrinsicMetadata( target=origin_swhid, discovery_date=origin_extrinsic_metadata.results[-1].discovery_date, metadata=json.dumps(origin_metadata).encode(), format="original-artifacts-json", authority=authority, fetcher=fetcher, ) ) assert sorted(origin_extrinsic_metadata.results) == sorted(expected_metadata) - # Check the revision metadata swh side + # Check the release metadata swh side + assert release.target_type == ModelObjectType.DIRECTORY directory_swhid = ExtendedSWHID( - object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) actual_directory_metadata = loader.storage.raw_extrinsic_metadata_get( directory_swhid, authority ) assert actual_directory_metadata.next_page_token is None assert len(actual_directory_metadata.results) == len(all_metadata_raw) - revision_swhid = CoreSWHID( - object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id) + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=hash_to_bytes(release_id) ) dir_metadata_template = RawExtrinsicMetadata( target=directory_swhid, format="sword-v2-atom-codemeta-v2", authority=authority, fetcher=fetcher, origin=url, - revision=revision_swhid, + release=release_swhid, # to satisfy the constructor discovery_date=now(), metadata=b"", ) expected_directory_metadata = [] for idx, raw_meta in enumerate(all_metadata_raw): dir_metadata = actual_directory_metadata.results[idx] expected_directory_metadata.append( RawExtrinsicMetadata.from_dict( { **{ k: v for (k, v) in dir_metadata_template.to_dict().items() if k != "id" }, "discovery_date": dir_metadata.discovery_date, "metadata": raw_meta.encode(), } ) ) assert sorted(actual_directory_metadata.results) == sorted( expected_directory_metadata ) # Retrieve the information for deposit status update query to the deposit urls = [ m for m in requests_mock_datadir.request_history if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/" ] assert len(urls) == 1 update_query = urls[0] body = update_query.json() expected_body = { "status": "done", - "revision_id": revision_id, - "directory_id": hash_to_hex(revision.directory), + "release_id": release_id, + "directory_id": hash_to_hex(release.target), "snapshot_id": expected_snapshot_id, "origin_url": url, } assert body == expected_body def test_deposit_loading_ok_3(swh_storage, deposit_client, requests_mock_datadir): """Deposit loading can happen on tarball artifacts as well The latest deposit changes introduce the internal change. """ external_id = "hal-123456" url = f"https://hal-test.archives-ouvertes.fr/{external_id}" deposit_id = 888 loader = DepositLoader(swh_storage, url, deposit_id, deposit_client) actual_load_status = loader.load() - expected_snapshot_id = "0ac7b54c042a026389f2087dc16f1d5c644ed0e4" + expected_snapshot_id = "212228fe041c763471c14545cf11dbec8003d6b4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( loader.storage, url, status="full", type="deposit", snapshot=hash_to_bytes(expected_snapshot_id), ) diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 674912d..93a4e42 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,965 +1,1074 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from itertools import islice import json import logging import os import string import sys import tempfile from typing import ( Any, Dict, Generic, Iterable, Iterator, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, ) +import warnings import attr from requests.exceptions import ContentDecodingError import sentry_sdk from swh.core.tarball import uncompress from swh.loader.core.loader import BaseLoader from swh.loader.exception import NotFound from swh.loader.package.utils import download from swh.model import from_disk from swh.model.hashutil import hash_to_hex from swh.model.model import ( ExtID, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, +) +from swh.model.model import ( Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, + Release, Revision, Sha1Git, Snapshot, ) +from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now logger = logging.getLogger(__name__) SWH_METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", metadata={}, ) """Metadata authority for extrinsic metadata generated by Software Heritage. Used for metadata on "original artifacts", ie. length, filename, and checksums of downloaded archive files.""" PartialExtID = Tuple[str, bytes] """The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object.""" @attr.s class RawExtrinsicMetadataCore: """Contains the core of the metadata extracted by a loader, that will be used to build a full RawExtrinsicMetadata object by adding object identifier, context, and provenance information.""" format = attr.ib(type=str) metadata = attr.ib(type=bytes) discovery_date = attr.ib(type=Optional[datetime.datetime], default=None) """Defaults to the visit date.""" @attr.s class BasePackageInfo: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ url = attr.ib(type=str) filename = attr.ib(type=Optional[str]) MANIFEST_FORMAT: Optional[string.Template] = None """If not None, used by the default extid() implementation to format a manifest, before hashing it to produce an ExtID.""" EXTID_TYPE: str = "package-manifest-sha256" # The following attribute has kw_only=True in order to allow subclasses # to add attributes. Without kw_only, attributes without default values cannot # go after attributes with default values. # See directory_extrinsic_metadata = attr.ib( type=List[RawExtrinsicMetadataCore], default=[], kw_only=True, ) """:term:`extrinsic metadata` collected by the loader, that will be attached to the loaded directory and added to the Metadata storage.""" - # TODO: add support for metadata for revisions and contents + # TODO: add support for metadata for releases and contents def extid(self) -> Optional[PartialExtID]: """Returns a unique intrinsic identifier of this package info, or None if this package info is not 'deduplicatable' (meaning that we will always load it, instead of checking the ExtID storage to see if we already did)""" if self.MANIFEST_FORMAT is None: return None else: manifest = self.MANIFEST_FORMAT.substitute( {k: str(v) for (k, v) in attr.asdict(self).items()} ) return (self.EXTID_TYPE, hashlib.sha256(manifest.encode()).digest()) TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo) class PackageLoader(BaseLoader, Generic[TPackageInfo]): # Origin visit type (str) set by the loader visit_type = "" visit_date: datetime.datetime def __init__( self, storage: StorageInterface, url: str, max_content_size: Optional[int] = None, ): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: storage: Storage instance url: Origin url to load data from """ super().__init__(storage=storage, max_content_size=max_content_size) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Raises: class:`swh.loader.exception.NotFound` error when failing to read the published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: """Build the revision from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. + This method is deprecated, :meth:`build_release` should be implemented instead. + Args: p_info: Package information uncompressed_path: Artifact uncompressed path on disk + """ + raise NotImplementedError("build_revision") - Returns: - Revision object + def build_release( + self, + version: str, + p_info: TPackageInfo, + uncompressed_path: str, + directory: Sha1Git, + ) -> Optional[Release]: + """Build the release from the archive metadata (extrinsic + artifact metadata) and the intrinsic metadata. + Args: + p_info: Package information + uncompressed_path: Artifact uncompressed path on disk """ - raise NotImplementedError("build_revision") + warnings.warn( + f"{self.get_loader_name()} is missing a build_release() method. " + f"Falling back to `build_revision` + automatic conversion to release.", + DeprecationWarning, + ) + + rev = self.build_revision(p_info, uncompressed_path, directory) + if rev is None: + return None + else: + return rev2rel(rev, version) def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot out of the last visit. """ return snapshot_get_latest(self.storage, self.url) def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]: return p_info.extid() def _get_known_extids( self, packages_info: List[TPackageInfo] ) -> Dict[PartialExtID, List[CoreSWHID]]: """Compute the ExtIDs from new PackageInfo objects, searches which are already loaded in the archive, and returns them if any.""" # Compute the ExtIDs of all the new packages, grouped by extid type new_extids: Dict[str, List[bytes]] = {} for p_info in packages_info: res = p_info.extid() if res is not None: (extid_type, extid_extid) = res new_extids.setdefault(extid_type, []).append(extid_extid) # For each extid type, call extid_get_from_extid() with all the extids of # that type, and store them in the '(type, extid) -> target' map. known_extids: Dict[PartialExtID, List[CoreSWHID]] = {} for (extid_type, extids) in new_extids.items(): for extid in self.storage.extid_get_from_extid(extid_type, extids): if extid is not None: key = (extid.extid_type, extid.extid) known_extids.setdefault(key, []).append(extid.target) return known_extids - def resolve_revision_from_extids( + def resolve_object_from_extids( self, known_extids: Dict[PartialExtID, List[CoreSWHID]], p_info: TPackageInfo, - revision_whitelist: Set[Sha1Git], - ) -> Optional[Sha1Git]: - """Resolve the revision from known ExtIDs and a package info object. + whitelist: Set[Sha1Git], + ) -> Optional[CoreSWHID]: + """Resolve the revision/release from known ExtIDs and a package info object. If the artifact has already been downloaded, this will return the - existing revision targeting that uncompressed artifact directory. + existing release (or revision) targeting that uncompressed artifact directory. Otherwise, this returns None. Args: known_extids: Dict built from a list of ExtID, with the target as value p_info: Package information - revision_whitelist: Any ExtID with target not in this set is filtered out + whitelist: Any ExtID with target not in this set is filtered out Returns: - None or revision identifier + None or release/revision SWHID """ new_extid = p_info.extid() if new_extid is None: return None + extid_targets = [] for extid_target in known_extids.get(new_extid, []): - if extid_target.object_id not in revision_whitelist: + if extid_target.object_id not in whitelist: # There is a known ExtID for this package, but its target is not # in the snapshot. # This can happen for three reasons: # # 1. a loader crashed after writing the ExtID, but before writing # the snapshot # 2. some other loader loaded the same artifact, but produced # a different revision, causing an additional ExtID object # to be written. We will probably find this loader's ExtID # in a future iteration of this loop. # Note that for now, this is impossible, as each loader has a # completely different extid_type, but this is an implementation # detail of each loader. # 3. we took a snapshot, then the package disappeared, # then we took another snapshot, and the package reappeared # # In case of 1, we must actually load the package now, # so let's do it. - # TODO: detect when we are in case 3 using revision_missing instead - # of the snapshot. + # TODO: detect when we are in case 3 using release_missing + # or revision_missing instead of the snapshot. continue - elif extid_target.object_type != ObjectType.REVISION: - # We only support revisions for now. + elif extid_target.object_type in (ObjectType.RELEASE, ObjectType.REVISION): + extid_targets.append(extid_target) + else: # Note that this case should never be reached unless there is a # collision between a revision hash and some non-revision object's # hash, but better safe than sorry. logger.warning( - "%s is in the revision whitelist, but is not a revision.", + "%s is in the whitelist, but is not a revision/release.", hash_to_hex(extid_target.object_type), ) - continue - return extid_target.object_id - return None + if extid_targets: + # This is a known package version, as we have an extid to reference it. + # Let's return one of them. + + # If there is a release extid, return it. + release_extid_targets = [ + extid_target + for extid_target in extid_targets + if extid_target.object_type == ObjectType.RELEASE + ] + if release_extid_targets: + assert len(release_extid_targets) == 1, release_extid_targets + return release_extid_targets[0] + + # If there is no release extid (ie. if the package was only loaded with + # older versions of this loader, which produced revision objects instead + # of releases), return a revision extid. + assert len(extid_targets) == 1, extid_targets + assert extid_targets[0].object_type == ObjectType.REVISION, extid_targets + return extid_targets[0] + else: + # No target found (this is probably a new package version) + return None def download_package( self, p_info: TPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ try: return [download(p_info.url, dest=tmpdir, filename=p_info.filename)] except ContentDecodingError: # package might be erroneously marked as gzip compressed while is is not, # try to download its raw bytes again without attempting to uncompress # the input stream return [ download( p_info.url, dest=tmpdir, filename=p_info.filename, extra_request_headers={"Accept-Encoding": "identity"}, ) ] def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def finalize_visit( self, *, snapshot: Optional[Snapshot], visit: OriginVisit, status_visit: str, status_load: str, failed_branches: List[str], errors: Optional[List[str]] = None, ) -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ self.storage.flush() snapshot_id: Optional[bytes] = None if snapshot and snapshot.id: # to prevent the snapshot.id to b"" snapshot_id = snapshot.id assert visit.visit visit_status = OriginVisitStatus( origin=self.url, visit=visit.visit, type=self.visit_type, date=now(), status=status_visit, snapshot=snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) result: Dict[str, Any] = { "status": status_load, } if snapshot_id: result["snapshot_id"] = hash_to_hex(snapshot_id) if failed_branches: logger.warning("%d failed branches", len(failed_branches)) for i, urls in enumerate(islice(failed_branches, 50)): prefix_url = "Failed branches: " if i == 0 else "" logger.warning("%s%s", prefix_url, urls) return result def load(self) -> Dict: """Load for a specific origin the associated contents. 1. Get the list of versions in an origin. 2. Get the snapshot from the previous run of the loader, and filter out versions that were already loaded, if their :term:`extids ` match Then, for each remaining version in the origin 3. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 4. Extract the downloaded files. By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 5. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 6. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 7. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 8. Generate and load the snapshot for the visit Using the revisions/releases collected at step 7., and the branch information from step 2., generate a snapshot and load it into the Software Heritage archive """ status_load = "uneventful" # either: eventful, uneventful, failed status_visit = "full" # see swh.model.model.OriginVisitStatus snapshot = None failed_branches: List[str] = [] # Prepare origin and origin_visit origin = Origin(url=self.url) try: self.storage.origin_add([origin]) visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] except Exception as e: logger.exception("Failed to initialize origin_visit for %s", self.url) sentry_sdk.capture_exception(e) return {"status": "failed"} # Get the previous snapshot for this origin. It is then used to see which # of the package's versions are already loaded in the archive. try: last_snapshot = self.last_snapshot() logger.debug("last snapshot: %s", last_snapshot) except Exception as e: logger.exception("Failed to get previous state for %s", self.url) sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) load_exceptions: List[Exception] = [] # Get the list of all version names try: versions = self.get_versions() except NotFound as e: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="not_found", status_load="failed", errors=[str(e)], ) except Exception as e: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) # Get the metadata of each version's package packages_info: List[Tuple[str, str, TPackageInfo]] = [ (version, branch_name, p_info) for version in versions for (branch_name, p_info) in self.get_package_info(version) ] # Compute the ExtID of each of these packages known_extids = self._get_known_extids( [p_info for (_, _, p_info) in packages_info] ) if last_snapshot is None: last_snapshot_targets: Set[Sha1Git] = set() else: last_snapshot_targets = { branch.target for branch in last_snapshot.branches.values() } new_extids: Set[ExtID] = set() - tmp_revisions: Dict[str, List[Tuple[str, Sha1Git]]] = { + tmp_releases: Dict[str, List[Tuple[str, Sha1Git]]] = { version: [] for version in versions } errors = [] for (version, branch_name, p_info) in packages_info: logger.debug("package_info: %s", p_info) # Check if the package was already loaded, using its ExtID - revision_id = self.resolve_revision_from_extids( + swhid = self.resolve_object_from_extids( known_extids, p_info, last_snapshot_targets ) - if revision_id is None: - # No matching revision found in the last snapshot, load it. + if swhid is not None and swhid.object_type == ObjectType.REVISION: + # This package was already loaded, but by an older version + # of this loader, which produced revisions instead of releases. + # Let's fetch the revision's data, and "upgrade" it into a release. + (rev,) = self.storage.revision_get([swhid.object_id]) + if not rev: + logger.error( + "Failed to upgrade branch %s from revision to " + "release, %s is missing from the storage. " + "Falling back to re-loading from the origin.", + branch_name, + swhid, + ) + else: + rev = None + + if swhid is None or (swhid.object_type == ObjectType.REVISION and not rev): + # No matching revision or release found in the last snapshot, load it. + + release_id = None + try: - res = self._load_revision(p_info, origin) + res = self._load_release(version, p_info, origin) if res: - (revision_id, directory_id) = res - assert revision_id + (release_id, directory_id) = res + assert release_id assert directory_id self._load_extrinsic_directory_metadata( - p_info, revision_id, directory_id + p_info, release_id, directory_id ) self.storage.flush() status_load = "eventful" except Exception as e: self.storage.clear_buffers() load_exceptions.append(e) sentry_sdk.capture_exception(e) error = f"Failed to load branch {branch_name} for {self.url}" logger.exception(error) failed_branches.append(branch_name) errors.append(f"{error}: {e}") continue - if revision_id is None: + if release_id is None: continue + add_extid = True + elif swhid.object_type == ObjectType.REVISION: + # If 'rev' was None, the previous block would have run. + assert rev is not None + rel = rev2rel(rev, version) + self.storage.release_add([rel]) + logger.debug("Upgraded %s to %s", swhid, rel.swhid()) + release_id = rel.id + + # Create a new extid for this package, so the next run of this loader + # will be able to find the new release, and use it (instead of the + # old revision) + add_extid = True + elif swhid.object_type == ObjectType.RELEASE: + # This package was already loaded, nothing to do. + release_id = swhid.object_id + add_extid = False + else: + assert False, f"Unexpected object type: {swhid}" + + assert release_id is not None + + if add_extid: partial_extid = p_info.extid() if partial_extid is not None: (extid_type, extid) = partial_extid - revision_swhid = CoreSWHID( - object_type=ObjectType.REVISION, object_id=revision_id + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=release_id ) new_extids.add( - ExtID(extid_type=extid_type, extid=extid, target=revision_swhid) + ExtID(extid_type=extid_type, extid=extid, target=release_swhid) ) - tmp_revisions[version].append((branch_name, revision_id)) + tmp_releases[version].append((branch_name, release_id)) if load_exceptions: status_visit = "partial" - if not tmp_revisions: - # We could not load any revisions; fail completely + if not tmp_releases: + # We could not load any releases; fail completely return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=errors, ) try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() logger.debug("extra branches: %s", extra_branches) snapshot = self._load_snapshot( - default_version, tmp_revisions, extra_branches + default_version, tmp_releases, extra_branches ) self.storage.flush() except Exception as e: error = f"Failed to build snapshot for origin {self.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "failed" status_load = "failed" if snapshot: try: metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id) self._load_metadata_objects(metadata_objects) except Exception as e: error = f"Failed to load extrinsic snapshot metadata for {self.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" try: metadata_objects = self.build_extrinsic_origin_metadata() self._load_metadata_objects(metadata_objects) except Exception as e: error = f"Failed to load extrinsic origin metadata for {self.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" self._load_extids(new_extids) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit=status_visit, status_load=status_load, errors=errors, ) def _load_directory( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str ) -> Tuple[str, from_disk.Directory]: uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( path=uncompressed_path.encode("utf-8"), max_content_length=self.max_content_size, ) contents, skipped_contents, directories = from_disk.iter_directory(directory) logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) return (uncompressed_path, directory) - def _load_revision( - self, p_info: TPackageInfo, origin + def _load_release( + self, version: str, p_info: TPackageInfo, origin ) -> Optional[Tuple[Sha1Git, Sha1Git]]: - """Does all the loading of a revision itself: + """Does all the loading of a release itself: * downloads a package and uncompresses it * loads it from disk - * adds contents, directories, and revision to self.storage - * returns (revision_id, directory_id) + * adds contents, directories, and release to self.storage + * returns (release_id, directory_id) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir) # FIXME: This should be release. cf. D409 - revision = self.build_revision( - p_info, uncompressed_path, directory=directory.hash + release = self.build_release( + version, p_info, uncompressed_path, directory=directory.hash ) - if not revision: + if not release: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = [metadata for (filepath, metadata) in dl_artifacts] + assert release.target is not None, release + assert release.target_type == ModelObjectType.DIRECTORY, release + metadata_target = ExtendedSWHID( + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target + ) original_artifact_metadata = RawExtrinsicMetadata( - target=ExtendedSWHID( - object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory - ), + target=metadata_target, discovery_date=self.visit_date, authority=SWH_METADATA_AUTHORITY, fetcher=self.get_metadata_fetcher(), format="original-artifacts-json", metadata=json.dumps(metadata).encode(), origin=self.url, - revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=revision.id), + release=release.swhid(), ) self._load_metadata_objects([original_artifact_metadata]) - logger.debug("Revision: %s", revision) + logger.debug("Release: %s", release) - self.storage.revision_add([revision]) + self.storage.release_add([release]) assert directory.hash - return (revision.id, directory.hash) + return (release.id, directory.hash) def _load_snapshot( self, default_version: str, - revisions: Dict[str, List[Tuple[str, bytes]]], + releases: Dict[str, List[Tuple[str, bytes]]], extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: - """Build snapshot out of the current revisions stored and extra branches. + """Build snapshot out of the current releases stored and extra branches. Then load it in the storage. """ - logger.debug("revisions: %s", revisions) + logger.debug("releases: %s", releases) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] - for version, branch_name_revisions in revisions.items(): - if version == default_version and len(branch_name_revisions) == 1: + for version, branch_name_releases in releases.items(): + if version == default_version and len(branch_name_releases) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' - branch_name, _ = branch_name_revisions[0] + branch_name, _ = branch_name_releases[0] # except for some corner case (deposit) if branch_name != "HEAD": branches[b"HEAD"] = { "target_type": "alias", "target": branch_name.encode("utf-8"), } - for branch_name, target in branch_name_revisions: + for branch_name, target in branch_name_releases: branches[branch_name.encode("utf-8")] = { - "target_type": "revision", + "target_type": "release", "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: logger.error("Extra branch '%s' has been ignored", name) else: branches[name] = branch_target snapshot_data = {"branches": branches} logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot def get_loader_name(self) -> str: """Returns a fully qualified name of this loader.""" return f"{self.__class__.__module__}.{self.__class__.__name__}" def get_loader_version(self) -> str: """Returns the version of the current loader.""" module_name = self.__class__.__module__ or "" module_name_parts = module_name.split(".") # Iterate rootward through the package hierarchy until we find a parent of this # loader's module with a __version__ attribute. for prefix_size in range(len(module_name_parts), 0, -1): package_name = ".".join(module_name_parts[0:prefix_size]) module = sys.modules[package_name] if hasattr(module, "__version__"): return module.__version__ # type: ignore # If this loader's class has no parent package with a __version__, # it should implement it itself. raise NotImplementedError( f"Could not dynamically find the version of {self.get_loader_name()}." ) def get_metadata_fetcher(self) -> MetadataFetcher: """Returns a MetadataFetcher instance representing this package loader; which is used to for adding provenance information to extracted extrinsic metadata, if any.""" return MetadataFetcher( name=self.get_loader_name(), version=self.get_loader_version(), metadata={}, ) def get_metadata_authority(self) -> MetadataAuthority: """For package loaders that get extrinsic metadata, returns the authority the metadata are coming from. """ raise NotImplementedError("get_metadata_authority") def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_origin_metadata.""" return [] def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_origin_metadata.""" metadata_items = self.get_extrinsic_origin_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=Origin(self.url).swhid(), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, ) ) return metadata_objects def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_snapshot_metadata.""" return [] def build_extrinsic_snapshot_metadata( self, snapshot_id: Sha1Git ) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_snapshot_metadata.""" metadata_items = self.get_extrinsic_snapshot_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, ) ) return metadata_objects def build_extrinsic_directory_metadata( - self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, + self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: if not p_info.directory_extrinsic_metadata: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in p_info.directory_extrinsic_metadata: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, - revision=CoreSWHID( - object_type=ObjectType.REVISION, object_id=revision_id + release=CoreSWHID( + object_type=ObjectType.RELEASE, object_id=release_id ), ) ) return metadata_objects def _load_extrinsic_directory_metadata( - self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, + self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> None: metadata_objects = self.build_extrinsic_directory_metadata( - p_info, revision_id, directory_id + p_info, release_id, directory_id ) self._load_metadata_objects(metadata_objects) def _load_metadata_objects( self, metadata_objects: List[RawExtrinsicMetadata] ) -> None: if not metadata_objects: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return self._create_authorities(mo.authority for mo in metadata_objects) self._create_fetchers(mo.fetcher for mo in metadata_objects) self.storage.raw_extrinsic_metadata_add(metadata_objects) def _create_authorities(self, authorities: Iterable[MetadataAuthority]) -> None: deduplicated_authorities = { (authority.type, authority.url): authority for authority in authorities } if authorities: self.storage.metadata_authority_add(list(deduplicated_authorities.values())) def _create_fetchers(self, fetchers: Iterable[MetadataFetcher]) -> None: deduplicated_fetchers = { (fetcher.name, fetcher.version): fetcher for fetcher in fetchers } if fetchers: self.storage.metadata_fetcher_add(list(deduplicated_fetchers.values())) def _load_extids(self, extids: Set[ExtID]) -> None: if not extids: return try: self.storage.extid_add(list(extids)) except Exception as e: logger.exception("Failed to load new ExtIDs for %s", self.url) sentry_sdk.capture_exception(e) # No big deal, it just means the next visit will load the same versions # again. + + +def rev2rel(rev: Revision, version: str) -> Release: + """Converts a revision to a release.""" + return Release( + name=version.encode(), + message=rev.message, + target=rev.directory, + target_type=ModelObjectType.DIRECTORY, + synthetic=rev.synthetic, + author=rev.author, + date=rev.date, + ) diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py index 12fcf6b..3f830b7 100644 --- a/swh/loader/package/nixguix/tests/test_nixguix.py +++ b/swh/loader/package/nixguix/tests/test_nixguix.py @@ -1,621 +1,621 @@ # Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from typing import Dict, Optional, Tuple import pytest from swh.loader.package import __version__ from swh.loader.package.archive.loader import ArchiveLoader from swh.loader.package.nixguix.loader import ( NixGuixLoader, clean_sources, make_pattern_unsupported_file_extension, parse_sources, retrieve_sources, ) from swh.loader.package.utils import download from swh.loader.tests import assert_last_visit_matches from swh.loader.tests import check_snapshot as check_snapshot_full from swh.loader.tests import get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.model.swhids import ExtendedObjectType, ExtendedSWHID from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.storage.exc import HashCollision from swh.storage.interface import PagedResult, StorageInterface sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json" @pytest.fixture def raw_sources(datadir) -> bytes: with open( os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json" ), "rb", ) as f: return f.read() SNAPSHOT1 = Snapshot( - id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"), + id=hash_to_bytes("771d13ae4e799755c22d1e05da8fc39cf215de58"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( - target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), - target_type=TargetType.REVISION, + target=hash_to_bytes("24853190589d26d0ea2b6c0330b553ff39176e0c"), + target_type=TargetType.RELEASE, ), }, ) def check_snapshot(snapshot: Snapshot, storage: StorageInterface): # The `evaluation` branch is allowed to be unresolvable. It's possible at current # nixguix visit time, it is not yet visited (the git loader is in charge of its # visit for now). For more details, check the # swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring. check_snapshot_full( snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")] ) assert isinstance(snapshot, Snapshot) # then ensure the snapshot revisions are structurally as expected revision_ids = [] for name, branch in snapshot.branches.items(): if name == b"evaluation": continue # skipping that particular branch (cf. previous comment) if branch.target_type == TargetType.REVISION: revision_ids.append(branch.target) revisions = storage.revision_get(revision_ids) for rev in revisions: assert rev is not None metadata = rev.metadata assert not metadata def test_retrieve_sources(swh_storage, requests_mock_datadir): j = parse_sources(retrieve_sources(sources_url)) assert "sources" in j.keys() assert len(j["sources"]) == 2 def test_nixguix_url_not_found(swh_storage, requests_mock_datadir): """When failing to read from the url, the visit is marked as not_found. Here the sources url does not exist, so requests_mock_datadir returns a 404. Resulting in a NotFound raised within the package loader's main loop. This results in the task with status failed and a visit_status with status "not_found". """ unknown_url = "https://non-existing-url/" loader = NixGuixLoader(swh_storage, unknown_url) # during the retrieval step load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == unknown_url def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir): """Other errors during communication with the url, the visit is marked as failed requests_mock_datadir will intercept the requests to sources_url. Since the file exists, returns a 200 with the requested content of the query. As file.txt is no json, fails do decode and raises a JSONDecodeError. In effect failing the visit. """ sources_url = "https://example.com/file.txt" loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( swh_storage, sources_url, status="failed", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == sources_url def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir): sources = {} with pytest.raises(ValueError, match="sources structure invalid, missing: .*"): clean_sources(sources) def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir): for version_ok in [1, "1"]: # Check those versions are fine clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"}) for version_ko in [0, "0", 2, "2"]: # Check version != 1 raise an error with pytest.raises( ValueError, match="sources structure version .* is not supported" ): clean_sources( {"version": version_ko, "sources": [], "revision": "my-revision"} ) def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir): valid_sources = [ # 1 valid source {"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"}, ] sources = { "version": 1, "sources": valid_sources + [ # integrity is missing {"type": "url", "urls": ["my-url.tgz"],}, # urls is not a list {"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"}, # type is not url {"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"}, # missing fields which got double-checked nonetheless... {"integrity": "my-integrity"}, ], "revision": "my-revision", } clean = clean_sources(sources) assert len(clean["sources"]) == len(valid_sources) def test_make_pattern_unsupported_file_extension(): unsupported_extensions = ["el", "c", "txt"] supported_extensions = ["Z", "7z"] # for test actual_unsupported_pattern = make_pattern_unsupported_file_extension( unsupported_extensions ) for supported_ext in supported_extensions: assert supported_ext not in unsupported_extensions supported_filepath = f"anything.{supported_ext}" actual_match = actual_unsupported_pattern.match(supported_filepath) assert not actual_match for unsupported_ext in unsupported_extensions: unsupported_filepath = f"something.{unsupported_ext}" actual_match = actual_unsupported_pattern.match(unsupported_filepath) assert actual_match def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir): unsupported_file_extensions = [ "iso", "whl", "gem", "pom", "msi", "pod", "png", "rock", "ttf", "jar", "c", "el", "rpm", "diff", "patch", ] supported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in [ "known-unknown-but-ok", # this is fine as well with the current approach "zip", "tar.gz", "tgz", "tar.bz2", "tbz", "tbz2", "tar.xz", "tar", "zip", "7z", "Z", ] ] unsupported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in unsupported_file_extensions ] sources = { "version": 1, "sources": supported_sources + unsupported_sources, "revision": "my-revision", } clean = clean_sources(sources, unsupported_file_extensions) assert len(clean["sources"]) == len(supported_sources) def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources): loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats # The visit is partial because urls pointing to non tarball file # are not handled yet assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix" ) visit_status = origin_get_latest_visit_status(swh_storage, sources_url) snapshot_swhid = ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=visit_status.snapshot ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=sources_url, ) expected_metadata = [ RawExtrinsicMetadata( target=snapshot_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.nixguix.loader.NixGuixLoader", version=__version__, ), discovery_date=loader.visit_date, format="nixguix-sources-json", metadata=raw_sources, origin=sources_url, ) ] assert swh_storage.raw_extrinsic_metadata_get( snapshot_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_uncompress_failure(swh_storage, requests_mock_datadir): """Non tarball files are currently not supported and the uncompress function fails on such kind of files. However, even in this case of failure (because of the url https://example.com/file.txt), a snapshot and a visit has to be created (with a status partial since all files are not archived). """ loader = NixGuixLoader(swh_storage, sources_url) loader_status = loader.load() sources = loader.supported_sources()["sources"] urls = [s["urls"][0] for s in sources] assert "https://example.com/file.txt" in urls assert loader_status["status"] == "eventful" # The visit is partial because urls pointing to non tarball files # are not handled yet assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix" ) def test_loader_incremental(swh_storage, requests_mock_datadir): """Ensure a second visit do not download artifact already downloaded by the previous visit. """ loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) urls = [ m.url for m in requests_mock_datadir.request_history if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz") ] # The artifact # 'https://github.com/owner-1/repository-1/revision-1.tgz' is only # visited one time assert len(urls) == 1 def test_loader_two_visits(swh_storage, requests_mock_datadir_visits): """To ensure there is only one origin, but two visits, two revisions and two snapshots are created. The first visit creates a snapshot containing one tarball. The second visit creates a snapshot containing the same tarball and another tarball. """ loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() - expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97" + expected_snapshot_id_hex = "c5bba84fd5ac3342566effb86190619092d34e79" expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex) assert load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id_hex, } assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=expected_snapshot_id, ) # This ensures visits are incremental. Indeed, if we request a # second time an url, because of the requests_mock_datadir_visits # fixture, the file has to end with `_visit1`. expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( - target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), - target_type=TargetType.REVISION, + target=hash_to_bytes("24853190589d26d0ea2b6c0330b553ff39176e0c"), + target_type=TargetType.RELEASE, ), b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch( - target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"), - target_type=TargetType.REVISION, + target=hash_to_bytes("3d44fbe814ba802cfd77f83975e45766d3a2ba85"), + target_type=TargetType.RELEASE, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) stats = get_stats(swh_storage) assert { "content": 2, "directory": 5, "origin": 1, "origin_visit": 2, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 2, } == stats def test_evaluation_branch(swh_storage, requests_mock_datadir): loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) def test_eoferror(swh_storage, requests_mock_datadir): """Load a truncated archive which is invalid to make the uncompress function raising the exception EOFError. We then check if a snapshot is created, meaning this error is well managed. """ sources = ( "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa ) loader = NixGuixLoader(swh_storage, sources) loader.load() expected_snapshot = Snapshot( id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) def fake_download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, ) -> Tuple[str, Dict]: """Fake download which raises HashCollision (for the sake of test simpliciy, let's accept that makes sense) For tests purpose only. """ if url == "https://example.com/file.txt": # instead of failing because it's a file not dealt with by the nix guix # loader, make it raise a hash collision raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", []) return download(url, dest, hashes, filename, auth) def test_raise_exception(swh_storage, requests_mock_datadir, mocker): mock_download = mocker.patch("swh.loader.package.loader.download") mock_download.side_effect = fake_download loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res == { "status": "eventful", "snapshot_id": SNAPSHOT1.id.hex(), } # The visit is partial because some artifact downloads failed assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) assert len(mock_download.mock_calls) == 2 def test_load_nixguix_one_common_artifact_from_other_loader( swh_storage, datadir, requests_mock_datadir_visits, caplog ): """Misformatted revision should be caught and logged, then loading continues """ caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader") # 1. first ingest with for example the archive loader gnu_url = "https://ftp.gnu.org/gnu/8sync/" release = "0.1.0" artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz" gnu_artifacts = [ { "time": 944729610, "url": artifact_url, "length": 221837, "filename": f"8sync-{release}.tar.gz", "version": release, } ] archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts) actual_load_status = archive_loader.load() - expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5" + expected_snapshot_id = "cdf8f335fa0c81c8ad089870ec14f52b1980eb6c" assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] == expected_snapshot_id # noqa assert_last_visit_matches( archive_loader.storage, gnu_url, status="full", type="tar", snapshot=hash_to_bytes(expected_snapshot_id), ) # 2. Then ingest with the nixguix loader which lists the same artifact within its # sources.json # ensure test setup is ok data_sources = os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json" ) all_sources = json.loads(open(data_sources).read()) found = False for source in all_sources["sources"]: if source["urls"][0] == artifact_url: found = True assert ( found is True ), f"test setup error: {artifact_url} must be in {data_sources}" # first visit with a snapshot, ok sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json" loader = NixGuixLoader(swh_storage, sources_url) actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" snapshot_id = actual_load_status2["snapshot_id"] assert_last_visit_matches( swh_storage, sources_url, status="full", type="nixguix", snapshot=hash_to_bytes(snapshot_id), ) snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id)) assert snapshot diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py index 000fb15..796f58c 100644 --- a/swh/loader/package/npm/tests/test_npm.py +++ b/swh/loader/package/npm/tests/test_npm.py @@ -1,620 +1,620 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import pytest from swh.loader.package import __version__ from swh.loader.package.npm.loader import ( NpmLoader, _author_str, extract_npm_package_author, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( - MetadataAuthority, - MetadataAuthorityType, - MetadataFetcher, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) +from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher +from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.interface import PagedResult @pytest.fixture def org_api_info(datadir) -> bytes: with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f: return f.read() def test_npm_author_str(): for author, expected_author in [ ("author", "author"), ( ["Al from quantum leap", "hal from 2001 space odyssey"], "Al from quantum leap", ), ([], ""), ({"name": "groot", "email": "groot@galaxy.org",}, "groot "), ({"name": "somebody",}, "somebody"), ({"email": "no@one.org"}, " "), # note first elt is an extra blank ({"name": "no one", "email": None,}, "no one"), ({"email": None,}, ""), ({"name": None}, ""), ({"name": None, "email": None,}, ""), ({}, ""), (None, None), ({"name": []}, "",), ( {"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]}, "Susan McSween", ), (None, None), ]: assert _author_str(author) == expected_author def test_npm_extract_npm_package_author(datadir): package_metadata_filepath = os.path.join( datadir, "https_replicate.npmjs.com", "org_visit1" ) with open(package_metadata_filepath) as json_file: package_metadata = json.load(json_file) extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person( fullname=b"mooz ", name=b"mooz", email=b"stillpedant@gmail.com", ) assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person( fullname=b"Masafumi Oyamada ", name=b"Masafumi Oyamada", email=b"stillpedant@gmail.com", ) package_json = json.loads( """ { "name": "highlightjs-line-numbers.js", "version": "2.7.0", "description": "Highlight.js line numbers plugin.", "main": "src/highlightjs-line-numbers.js", "dependencies": {}, "devDependencies": { "gulp": "^4.0.0", "gulp-rename": "^1.4.0", "gulp-replace": "^0.6.1", "gulp-uglify": "^1.2.0" }, "repository": { "type": "git", "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git" }, "author": "Yauheni Pakala ", "license": "MIT", "bugs": { "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues" }, "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Yauheni Pakala ", name=b"Yauheni Pakala", email=b"evgeniy.pakalo@gmail.com", ) package_json = json.loads( """ { "name": "3-way-diff", "version": "0.0.1", "description": "3-way diffing of JavaScript objects", "main": "index.js", "authors": [ { "name": "Shawn Walsh", "url": "https://github.com/shawnpwalsh" }, { "name": "Markham F Rollins IV", "url": "https://github.com/mrollinsiv" } ], "keywords": [ "3-way diff", "3 way diff", "three-way diff", "three way diff" ], "devDependencies": { "babel-core": "^6.20.0", "babel-preset-es2015": "^6.18.0", "mocha": "^3.0.2" }, "dependencies": { "lodash": "^4.15.0" } }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None ) package_json = json.loads( """ { "name": "yfe-ynpm", "version": "1.0.0", "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm", "repository": { "type": "git", "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git" }, "author": [ "fengmk2 (https://fengmk2.com)", "xufuzi (https://7993.org)" ], "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"fengmk2 (https://fengmk2.com)", name=b"fengmk2", email=b"fengmk2@gmail.com", ) package_json = json.loads( """ { "name": "umi-plugin-whale", "version": "0.0.8", "description": "Internal contract component", "authors": { "name": "xiaohuoni", "email": "448627663@qq.com" }, "repository": "alitajs/whale", "devDependencies": { "np": "^3.0.4", "umi-tools": "*" }, "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"xiaohuoni <448627663@qq.com>", name=b"xiaohuoni", email=b"448627663@qq.com", ) package_json_no_authors = json.loads( """{ "authors": null, "license": "MIT" }""" ) assert extract_npm_package_author(package_json_no_authors) == Person( fullname=b"", name=None, email=None ) def normalize_hashes(hashes): if isinstance(hashes, str): return hash_to_bytes(hashes) if isinstance(hashes, list): return [hash_to_bytes(x) for x in hashes] return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()} _expected_new_contents_first_visit = normalize_hashes( [ "4ce3058e16ab3d7e077f65aabf855c34895bf17c", "858c3ceee84c8311adc808f8cdb30d233ddc9d18", "0fa33b4f5a4e0496da6843a38ff1af8b61541996", "85a410f8ef8eb8920f2c384a9555566ad4a2e21b", "9163ac8025923d5a45aaac482262893955c9b37b", "692cf623b8dd2c5df2c2998fd95ae4ec99882fb4", "18c03aac6d3e910efb20039c15d70ab5e0297101", "41265c42446aac17ca769e67d1704f99e5a1394d", "783ff33f5882813dca9239452c4a7cadd4dba778", "b029cfb85107aee4590c2434a3329bfcf36f8fa1", "112d1900b4c2e3e9351050d1b542c9744f9793f3", "5439bbc4bd9a996f1a38244e6892b71850bc98fd", "d83097a2f994b503185adf4e719d154123150159", "d0939b4898e83090ee55fd9d8a60e312cfadfbaf", "b3523a26f7147e4af40d9d462adaae6d49eda13e", "cd065fb435d6fb204a8871bcd623d0d0e673088c", "2854a40855ad839a54f4b08f5cff0cf52fca4399", "b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe", "0f73d56e1cf480bded8a1ecf20ec6fc53c574713", "0d9882b2dfafdce31f4e77fe307d41a44a74cefe", "585fc5caab9ead178a327d3660d35851db713df1", "e8cd41a48d79101977e3036a87aeb1aac730686f", "5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7", "9c3cc2763bf9e9e37067d3607302c4776502df98", "3649a68410e354c83cd4a38b66bd314de4c8f5c9", "e96ed0c091de1ebdf587104eaf63400d1974a1fe", "078ca03d2f99e4e6eab16f7b75fbb7afb699c86c", "38de737da99514de6559ff163c988198bc91367a", ] ) _expected_new_directories_first_visit = normalize_hashes( [ "3370d20d6f96dc1c9e50f083e2134881db110f4f", "42753c0c2ab00c4501b552ac4671c68f3cf5aece", "d7895533ef5edbcffdea3f057d9fef3a1ef845ce", "80579be563e2ef3e385226fe7a3f079b377f142c", "3b0ddc6a9e58b4b53c222da4e27b280b6cda591c", "bcad03ce58ac136f26f000990fc9064e559fe1c0", "5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca", "e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd", "584b5b4b6cf7f038095e820b99386a9c232de931", "184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a", "bb5f4ee143c970367eb409f2e4c1104898048b9d", "1b95491047add1103db0dfdfa84a9735dcb11e88", "a00c6de13471a2d66e64aca140ddb21ef5521e62", "5ce6c1cd5cda2d546db513aaad8c72a44c7771e2", "c337091e349b6ac10d38a49cdf8c2401ef9bb0f2", "202fafcd7c0f8230e89d5496ad7f44ab12b807bf", "775cc516543be86c15c1dc172f49c0d4e6e78235", "ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e", ] ) -_expected_new_revisions_first_visit = normalize_hashes( +_expected_new_releases_first_visit = normalize_hashes( { - "d8a1c7474d2956ac598a19f0f27d52f7015f117e": ( + "d25e722a32c145b3eb88b416049dd35d27759a87": ( "42753c0c2ab00c4501b552ac4671c68f3cf5aece" ), - "5f9eb78af37ffd12949f235e86fac04898f9f72a": ( + "3522e846b97c0b8434c565fe891c0f082a357e5d": ( "3370d20d6f96dc1c9e50f083e2134881db110f4f" ), - "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": ( + "54f6c1711c6aedb6de3cf2d6347b9f772e343784": ( "d7895533ef5edbcffdea3f057d9fef3a1ef845ce" ), } ) def package_url(package): return "https://www.npmjs.com/package/%s" % package def package_metadata_url(package): return "https://replicate.npmjs.com/%s/" % package def test_npm_loader_first_visit(swh_storage, requests_mock_datadir, org_api_info): package = "org" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") + expected_snapshot_id = hash_to_bytes("ddaad89b0b4edb7eefe7c92e9b1166caa776ebbc") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) versions = [ - ("0.0.2", "d8a1c7474d2956ac598a19f0f27d52f7015f117e"), - ("0.0.3", "5f9eb78af37ffd12949f235e86fac04898f9f72a"), - ("0.0.4", "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"), + ("0.0.2", "d25e722a32c145b3eb88b416049dd35d27759a87"), + ("0.0.3", "3522e846b97c0b8434c565fe891c0f082a357e5d"), + ("0.0.4", "54f6c1711c6aedb6de3cf2d6347b9f772e343784"), ] expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"releases/0.0.4", target_type=TargetType.ALIAS ), **{ b"releases/" + version_name.encode(): SnapshotBranch( - target=hash_to_bytes(version_id), target_type=TargetType.REVISION, + target=hash_to_bytes(version_id), target_type=TargetType.RELEASE, ) for (version_name, version_id) in versions }, }, ) check_snapshot(expected_snapshot, swh_storage) contents = swh_storage.content_get(_expected_new_contents_first_visit) count = sum(0 if content is None else 1 for content in contents) assert count == len(_expected_new_contents_first_visit) assert ( list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == [] ) - assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == [] + assert list(swh_storage.release_missing(_expected_new_releases_first_visit)) == [] metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", ) - for (version_name, revision_id) in versions: - revision = swh_storage.revision_get([hash_to_bytes(revision_id)])[0] - directory_id = revision.directory + for (version_name, release_id) in versions: + release = swh_storage.release_get([hash_to_bytes(release_id)])[0] + assert release.target_type == ModelObjectType.DIRECTORY + directory_id = release.target directory_swhid = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id, ) - revision_swhid = CoreSWHID( - object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id), + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=hash_to_bytes(release_id), ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.npm.loader.NpmLoader", version=__version__, ), discovery_date=loader.visit_date, format="replicate-npm-package-json", metadata=json.dumps( json.loads(org_api_info)["versions"][version_name] ).encode(), origin="https://www.npmjs.com/package/org", - revision=revision_swhid, + release=release_swhid, ) ] assert swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, - "release": 0, - "revision": len(_expected_new_revisions_first_visit), + "release": len(_expected_new_releases_first_visit), + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_npm_loader_incremental_visit(swh_storage, requests_mock_datadir_visits): package = "org" url = package_url(package) loader = NpmLoader(swh_storage, url) - expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") + expected_snapshot_id = hash_to_bytes("ddaad89b0b4edb7eefe7c92e9b1166caa776ebbc") actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, - "release": 0, - "revision": len(_expected_new_revisions_first_visit), + "release": len(_expected_new_releases_first_visit), + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats # reset loader internal state del loader._cached_info del loader._cached__raw_info actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" snap_id2 = actual_load_status2["snapshot_id"] assert snap_id2 is not None assert snap_id2 != actual_load_status["snapshot_id"] assert_last_visit_matches(swh_storage, url, status="full", type="npm") stats = get_stats(swh_storage) assert { # 3 new releases artifacts "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 15, "origin": 1, "origin_visit": 2, - "release": 0, - "revision": len(_expected_new_revisions_first_visit) + 3, + "release": len(_expected_new_releases_first_visit) + 3, + "revision": 0, "skipped_content": 0, "snapshot": 2, } == stats urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://registry.npmjs.org") ] assert len(urls) == len(set(urls)) # we visited each artifact once across @pytest.mark.usefixtures("requests_mock_datadir") def test_npm_loader_version_divergence(swh_storage): package = "@aller_shared" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92") + expected_snapshot_id = hash_to_bytes("7a89bc3cb51ff1d3213b2151c745d82c3b9d69b1") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0" ), b"releases/0.1.0": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("103fa6d0a1abb405468e3590dcf634bcb77f67be"), ), b"releases/0.1.1-alpha.14": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("c00b54143582a4e963e0b86e8dfa58eedd260020"), ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { # 1 new releases artifacts "content": 534, "directory": 153, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_npm_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ package = "nativescript-telerik-analytics" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot.id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) def test_npm_artifact_with_no_upload_time(swh_storage, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-no-time" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "uneventful", "snapshot_id": expected_snapshot.id.hex(), } assert_last_visit_matches( swh_storage, url, status="partial", type="npm", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) def test_npm_artifact_use_mtime_if_no_time(swh_storage, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-express" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd") + expected_snapshot_id = hash_to_bytes("7f5e591dd3c4754abca4db1cc18355671e2c014c") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # artifact is used expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.0.1" ), b"releases/0.0.1": SnapshotBranch( - target_type=TargetType.REVISION, - target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"), + target_type=TargetType.RELEASE, + target=hash_to_bytes("199bf0ad020617357d608655e6549e526a65dc36"), ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) def test_npm_no_artifact(swh_storage, requests_mock_datadir): """If no artifacts at all is found for origin, the visit fails completely """ package = "catify" url = package_url(package) loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", } assert_last_visit_matches(swh_storage, url, status="failed", type="npm") def test_npm_origin_not_found(swh_storage, requests_mock_datadir): url = package_url("non-existent-url") loader = NpmLoader(swh_storage, url) assert loader.load() == {"status": "failed"} assert_last_visit_matches( swh_storage, url, status="not_found", type="npm", snapshot=None ) diff --git a/swh/loader/package/opam/tests/test_opam.py b/swh/loader/package/opam/tests/test_opam.py index b73dbeb..15bf035 100644 --- a/swh/loader/package/opam/tests/test_opam.py +++ b/swh/loader/package/opam/tests/test_opam.py @@ -1,318 +1,347 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.loader.package import __version__ from swh.loader.package.loader import RawExtrinsicMetadataCore from swh.loader.package.opam.loader import OpamLoader, OpamPackageInfo from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.interface import PagedResult OCB_METADATA = b"""\ opam-version: "2.0" name: "ocb" version: "0.1" synopsis: "SVG badge generator" description: "An OCaml library for SVG badge generation. There\'s also a command-line tool provided." maintainer: "OCamlPro " authors: "OCamlPro " license: "ISC" homepage: "https://ocamlpro.github.io/ocb/" doc: "https://ocamlpro.github.io/ocb/api/" bug-reports: "https://github.com/OCamlPro/ocb/issues" depends: [ "ocaml" {>= "4.05"} "dune" {>= "2.0"} "odoc" {with-doc} ] build: [ ["dune" "subst"] {dev} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/OCamlPro/ocb.git" url { src: "https://github.com/OCamlPro/ocb/archive/0.1.tar.gz" checksum: [ "sha256=aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46" "sha512=1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b" ] } """ # noqa def test_opam_loader_no_opam_repository_fails(swh_storage, tmpdir, datadir): """Running opam loader without a prepared opam repository fails""" opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "agrid" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=False, # The opam directory must be present ) # No opam root directory init directory from loader. So, at the opam root does not # exist, the loading fails. That's the expected use for the production workers # (whose opam_root maintenance will be externally managed). actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} def test_opam_loader_one_version(tmpdir, requests_mock_datadir, datadir, swh_storage): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "agrid" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("4e4bf977312460329d7f769b0be89937c9827efc") + expected_snapshot_id = hash_to_bytes("50b5961c27dd4f8b138acce8bac4f90d1e33081f") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } - target = b"S\x8c\x8aq\xdcy\xa4/0\xa0\xb2j\xeb\xc1\x16\xad\xce\x06\xeaV" - expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch(target=b"agrid.0.1", target_type=TargetType.ALIAS,), b"agrid.0.1": SnapshotBranch( - target=target, target_type=TargetType.REVISION, + target=hash_to_bytes("efcb9ef9d0f2a85312463251732b42f9e45a5c12"), + target_type=TargetType.RELEASE, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 18, "directory": 8, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_opam_loader_many_version(tmpdir, requests_mock_datadir, datadir, swh_storage): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "directories" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("1b49be175dcf17c0f568bcd7aac3d4faadc41249") + expected_snapshot_id = hash_to_bytes("f0a974e47999e74d323f1fb9604fde72527bda28") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"directories.0.3", target_type=TargetType.ALIAS, ), b"directories.0.1": SnapshotBranch( - target=b"N\x92jA\xb2\x892\xeb\xcc\x9c\xa9\xb3\xea\xa7kz\xb08\xa6V", - target_type=TargetType.REVISION, + target=hash_to_bytes("1f839cb1f4720d6b33fdd856e3ff1119497979d9"), + target_type=TargetType.RELEASE, ), b"directories.0.2": SnapshotBranch( - target=b"yj\xc9\x1a\x8f\xe0\xaa\xff[\x88\xffz" - b"\x91C\xcc\x96\xb7\xd4\xf65", - target_type=TargetType.REVISION, + target=hash_to_bytes("4133834d966381804347efbc41e35dd2bdd48962"), + target_type=TargetType.RELEASE, ), b"directories.0.3": SnapshotBranch( - target=b"hA \xc4\xb5\x18A8\xb8C\x12\xa3\xa5T\xb7/v\x85X\xcb", - target_type=TargetType.REVISION, + target=hash_to_bytes("2f20cabfbacfe447b80dc2a4eb14d461775100c8"), + target_type=TargetType.RELEASE, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id ) check_snapshot(expected_snapshot, swh_storage) -def test_opam_revision(tmpdir, requests_mock_datadir, swh_storage, datadir): +def test_opam_release(tmpdir, requests_mock_datadir, swh_storage, datadir): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "ocb" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("398df115b9feb2f463efd21941d69b7d59cd9025") + expected_snapshot_id = hash_to_bytes("987425c6fe94d3972c4c4e97ee27a6a7c8b68e82") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } info_iter = loader.get_package_info("0.1") branch_name, package_info = next(info_iter) expected_branch_name = "ocb.0.1" expected_package_info = OpamPackageInfo( url="https://github.com/OCamlPro/ocb/archive/0.1.tar.gz", filename=None, author=Person( fullname=b"OCamlPro ", name=None, email=None ), committer=Person( fullname=b"OCamlPro ", name=None, email=None ), version="0.1", directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( metadata=OCB_METADATA, format="opam-package-definition", ) ], ) assert branch_name == expected_branch_name assert package_info == expected_package_info - revision_id = b"o\xad\x7f=\x07\xbb\xaah\xdbI(\xb0'\x10z\xfc\xff\x06x\x1b" + release_id = hash_to_bytes("8d0612cdf172e5dff3d876ca2bbc0f6003cc36cc") + + expected_snapshot = Snapshot( + id=hash_to_bytes(actual_load_status["snapshot_id"]), + branches={ + b"HEAD": SnapshotBranch(target=b"ocb.0.1", target_type=TargetType.ALIAS,), + b"ocb.0.1": SnapshotBranch( + target=release_id, target_type=TargetType.RELEASE, + ), + }, + ) + + assert_last_visit_matches( + swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id + ) + + check_snapshot(expected_snapshot, swh_storage) - revision = swh_storage.revision_get([revision_id])[0] + release = swh_storage.release_get([release_id])[0] - assert revision is not None + assert release is not None - assert revision.author == expected_package_info.author - assert revision.committer == expected_package_info.committer + assert release.author == expected_package_info.author def test_opam_metadata(tmpdir, requests_mock_datadir, swh_storage, datadir): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "ocb" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" - expected_revision_id = b"o\xad\x7f=\x07\xbb\xaah\xdbI(\xb0'\x10z\xfc\xff\x06x\x1b" + expected_release_id = hash_to_bytes("8d0612cdf172e5dff3d876ca2bbc0f6003cc36cc") + + expected_snapshot = Snapshot( + id=hash_to_bytes(actual_load_status["snapshot_id"]), + branches={ + b"HEAD": SnapshotBranch(target=b"ocb.0.1", target_type=TargetType.ALIAS,), + b"ocb.0.1": SnapshotBranch( + target=expected_release_id, target_type=TargetType.RELEASE, + ), + }, + ) + + assert_last_visit_matches( + swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id + ) + + check_snapshot(expected_snapshot, swh_storage) - revision = swh_storage.revision_get([expected_revision_id])[0] - assert revision is not None + release = swh_storage.release_get([expected_release_id])[0] + assert release is not None - revision_swhid = CoreSWHID( - object_type=ObjectType.REVISION, object_id=expected_revision_id + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=expected_release_id ) directory_swhid = ExtendedSWHID( - object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=opam_url, ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.opam.loader.OpamLoader", version=__version__, ), discovery_date=loader.visit_date, format="opam-package-definition", metadata=OCB_METADATA, origin=url, - revision=revision_swhid, + release=release_swhid, ) ] assert swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index ef4e5d7..df6b397 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,777 +1,801 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os from os import path from unittest.mock import patch import pytest from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.core.tarball import uncompress from swh.loader.package import __version__ from swh.loader.package.pypi.loader import ( PyPILoader, PyPIPackageInfo, author, extract_intrinsic_metadata, pypi_api_url, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.interface import PagedResult @pytest.fixture def _0805nexter_api_info(datadir) -> bytes: with open( os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb", ) as f: return f.read() def test_pypi_author_basic(): data = { "author": "i-am-groot", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b"i-am-groot ", name=b"i-am-groot", email=b"iam@groot.org", ) assert actual_author == expected_author def test_pypi_author_empty_email(): data = { "author": "i-am-groot", "author_email": "", } actual_author = author(data) expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",) assert actual_author == expected_author def test_pypi_author_empty_name(): data = { "author": "", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b" ", name=b"", email=b"iam@groot.org", ) assert actual_author == expected_author def test_pypi_author_malformed(): data = { "author": "['pierre', 'paul', 'jacques']", "author_email": None, } actual_author = author(data) expected_author = Person( fullname=b"['pierre', 'paul', 'jacques']", name=b"['pierre', 'paul', 'jacques']", email=None, ) assert actual_author == expected_author def test_pypi_author_malformed_2(): data = { "author": "[marie, jeanne]", "author_email": "[marie@some, jeanne@thing]", } actual_author = author(data) expected_author = Person( fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>", name=b"[marie, jeanne]", email=b"[marie@some, jeanne@thing]", ) assert actual_author == expected_author def test_pypi_author_malformed_3(): data = { "author": "[marie, jeanne, pierre]", "author_email": "[marie@somewhere.org, jeanne@somewhere.org]", } actual_author = author(data) expected_author = Person( fullname=( b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>" ), name=b"[marie, jeanne, pierre]", email=b"[marie@somewhere.org, jeanne@somewhere.org]", ) actual_author == expected_author # configuration error # def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests") assert url == "https://pypi.org/pypi/requests/json" def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests/") assert url == "https://pypi.org/pypi/requests/json" @pytest.mark.fs def test_pypi_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip" ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "metadata_version": "1.0", "name": "0805nexter", "version": "1.1.0", "summary": "a simple printer of nested lest", "home_page": "http://www.hp.com", "author": "hgtkpython", "author_email": "2868989685@qq.com", "platforms": ["UNKNOWN"], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_pypi_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa ] ) def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None empty_snapshot = Snapshot(branches={}) assert_last_visit_matches( swh_storage, url, status="partial", type="pypi", snapshot=empty_snapshot.id ) stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir): """problem during loading: {visit: failed, status: failed, no snapshot} """ url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader._load_snapshot", side_effect=ValueError("Fake problem to fail visit"), ): loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") stats = get_stats(loader.storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader.last_snapshot", side_effect=ValueError("Fake problem to fail the visit"), ): loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa ] ) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} -def test_pypi_revision_metadata_structure( +def test_pypi_release_metadata_structure( swh_storage, requests_mock_datadir, _0805nexter_api_info ): url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None - expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21") - revision = swh_storage.revision_get([expected_revision_id])[0] - assert revision is not None + expected_release_id = hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c") + + expected_snapshot = Snapshot( + id=hash_to_bytes(actual_load_status["snapshot_id"]), + branches={ + b"HEAD": SnapshotBranch( + target=b"releases/1.2.0", target_type=TargetType.ALIAS, + ), + b"releases/1.1.0": SnapshotBranch( + target=hash_to_bytes("9478c9981887fdf5ada3f1fcb20c81069cdf4c44"), + target_type=TargetType.RELEASE, + ), + b"releases/1.2.0": SnapshotBranch( + target=hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c"), + target_type=TargetType.RELEASE, + ), + }, + ) - revision_swhid = CoreSWHID( - object_type=ObjectType.REVISION, object_id=expected_revision_id + assert_last_visit_matches( + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id + ) + + check_snapshot(expected_snapshot, swh_storage) + + release = swh_storage.release_get([expected_release_id])[0] + assert release is not None + + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=expected_release_id ) directory_swhid = ExtendedSWHID( - object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://pypi.org/", ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.pypi.loader.PyPILoader", version=__version__, ), discovery_date=loader.visit_date, format="pypi-project-json", metadata=json.dumps( json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0] ).encode(), origin=url, - revision=revision_swhid, + release=release_swhid, ) ] assert swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_pypi_visit_with_missing_artifact( swh_storage, requests_mock_datadir_missing_one ): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355") + expected_snapshot_id = hash_to_bytes("eee24d5b0c156ebb4ece0c810c9dce636ebe881f") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id, ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/1.2.0": SnapshotBranch( - target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), - target_type=TargetType.REVISION, + target=hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c"), + target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) stats = get_stats(swh_storage) assert { "content": 3, "directory": 2, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 1, + "release": 1, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") + expected_snapshot_id = hash_to_bytes("62d957f2b5cdc515bea0a46252a3ab29ee271636") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( - target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), - target_type=TargetType.REVISION, + target=hash_to_bytes("9478c9981887fdf5ada3f1fcb20c81069cdf4c44"), + target_type=TargetType.RELEASE, ), b"releases/1.2.0": SnapshotBranch( - target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), - target_type=TargetType.REVISION, + target=hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c"), + target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() - snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") + snapshot_id = hash_to_bytes("62d957f2b5cdc515bea0a46252a3ab29ee271636") assert actual_load_status == { "status": "eventful", "snapshot_id": snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=snapshot_id ) expected_snapshot = Snapshot( id=snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( - target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), - target_type=TargetType.REVISION, + target=hash_to_bytes("9478c9981887fdf5ada3f1fcb20c81069cdf4c44"), + target_type=TargetType.RELEASE, ), b"releases/1.2.0": SnapshotBranch( - target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), - target_type=TargetType.REVISION, + target=hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c"), + target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": actual_load_status2["snapshot_id"], } visit_status2 = assert_last_visit_matches( swh_storage, url, status="full", type="pypi" ) stats2 = get_stats(swh_storage) expected_stats2 = stats.copy() expected_stats2["origin_visit"] = 1 + 1 assert expected_stats2 == stats2 # same snapshot assert visit_status2.snapshot == snapshot_id def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(swh_storage, url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(swh_storage) - expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") + expected_snapshot_id = hash_to_bytes("62d957f2b5cdc515bea0a46252a3ab29ee271636") assert visit1_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, - "release": 0, - "revision": 2, + "release": 2, + "revision": 0, "skipped_content": 0, "snapshot": 1, } == visit1_stats # Reset internal state del loader._cached__raw_info del loader._cached_info visit2_actual_load_status = loader.load() visit2_stats = get_stats(swh_storage) assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status - expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283") + expected_snapshot_id2 = hash_to_bytes("6a8a84e7f765bed4362315fb054adb2466598636") assert visit2_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id2.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2 ) expected_snapshot = Snapshot( id=expected_snapshot_id2, branches={ b"releases/1.1.0": SnapshotBranch( - target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), - target_type=TargetType.REVISION, + target=hash_to_bytes("9478c9981887fdf5ada3f1fcb20c81069cdf4c44"), + target_type=TargetType.RELEASE, ), b"releases/1.2.0": SnapshotBranch( - target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), - target_type=TargetType.REVISION, + target=hash_to_bytes("a1e10745d375be66c1b65e55c0c15fe98776b53c"), + target_type=TargetType.RELEASE, ), b"releases/1.3.0": SnapshotBranch( - target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"), - target_type=TargetType.REVISION, + target=hash_to_bytes("d46442e99bb6e05df5f75a7f0f7f61a4f2098147"), + target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.3.0", target_type=TargetType.ALIAS, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) assert { "content": 6 + 1, # 1 more content "directory": 4 + 2, # 2 more directories "origin": 1, "origin_visit": 1 + 1, - "release": 0, - "revision": 2 + 1, # 1 more revision + "release": 2 + 1, # 1 more release + "revision": 0, "skipped_content": 0, "snapshot": 1 + 1, # 1 more snapshot } == visit2_stats urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://files.pythonhosted.org") ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/nexter" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() - expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6") + expected_snapshot_id = hash_to_bytes("a136ee226316276c347d7be3da07df5828605927") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch( - target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), - target_type=TargetType.REVISION, + target=hash_to_bytes("9478c9981887fdf5ada3f1fcb20c81069cdf4c44"), + target_type=TargetType.RELEASE, ), b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch( - target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"), - target_type=TargetType.REVISION, + target=hash_to_bytes("b3391cb4007fb6872c4dfab476a7cfe7443a1bb4"), + target_type=TargetType.RELEASE, ), }, ) check_snapshot(expected_snapshot, swh_storage) def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ url = "https://pypi.org/project/upymenu" loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot(id=expected_snapshot_id, branches={}) assert_last_visit_matches( swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) def test_pypi_origin_not_found(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/unknown" loader = PyPILoader(swh_storage, url) assert loader.load() == {"status": "failed"} assert_last_visit_matches( swh_storage, url, status="not_found", type="pypi", snapshot=None ) -def test_pypi_build_revision_missing_version_in_pkg_info(swh_storage, tmp_path): - """Simulate revision build when Version field is missing in PKG-INFO file.""" +def test_pypi_build_release_missing_version_in_pkg_info(swh_storage, tmp_path): + """Simulate release build when Version field is missing in PKG-INFO file.""" url = "https://pypi.org/project/GermlineFilter" # create package info p_info = PyPIPackageInfo( url=url, filename="GermlineFilter-1.2.tar.gz", directory_extrinsic_metadata=[], raw_info={}, comment_text="", sha256="e4982353c544d94b34f02c5690ab3d3ebc93480d5b62fe6f3317f23c515acc05", upload_time="2015-02-18T20:39:13", ) # create PKG-INFO file with missing Version field package_path = tmp_path / "GermlineFilter-1.2" pkg_info_path = package_path / "PKG-INFO" package_path.mkdir() pkg_info_path.write_text( """Metadata-Version: 1.2 Name: germline_filter Home-page: Author: Cristian Caloian (OICR) Author-email: cristian.caloian@oicr.on.ca License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN""" ) directory = hash_to_bytes("8b864d66f356afe35033d58f8e03b7c23a66751f") - # attempt to build revision + # attempt to build release loader = PyPILoader(swh_storage, url) - revision = loader.build_revision(p_info, str(tmp_path), directory) + release = loader.build_release("1.2", p_info, str(tmp_path), directory) # without comment_text and version in PKG-INFO, message should be empty - assert revision.message == b"" + assert release.message == b"" def test_filter_out_invalid_sdists(swh_storage, requests_mock): project_name = "swh-test-sdist-filtering" version = "1.0.0" url = f"https://pypi.org/project/{project_name}" json_url = f"https://pypi.org/pypi/{project_name}/json" common_sdist_entries = { "url": "", "comment_text": "", "digests": {"sha256": ""}, "upload_time": "", "packagetype": "sdist", } requests_mock.get( json_url, json={ "releases": { version: [ { **common_sdist_entries, "filename": f"{project_name}-{version}.{ext}", } for ext in ("tar.gz", "deb", "egg", "rpm", "whl") ] }, }, ) loader = PyPILoader(swh_storage, url) packages = list(loader.get_package_info(version=version)) assert len(packages) == 1 assert packages[0][1].filename.endswith(".tar.gz") diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py index 6f345f4..d57db7d 100644 --- a/swh/loader/package/tests/test_loader.py +++ b/swh/loader/package/tests/test_loader.py @@ -1,318 +1,451 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib +import logging import string from unittest.mock import Mock, call, patch import attr import pytest from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.model.model import ( - ExtID, Origin, OriginVisit, OriginVisitStatus, + Person, + Release, + Revision, + RevisionType, Snapshot, SnapshotBranch, TargetType, + TimestampWithTimezone, ) +from swh.model.model import ExtID +from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ObjectType from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_latest class FakeStorage: def origin_add(self, origins): raise ValueError("We refuse to add an origin") def origin_visit_get_latest(self, origin): return None class FakeStorage2(FakeStorage): def origin_add(self, origins): pass def origin_visit_add(self, visits): raise ValueError("We refuse to add an origin visit") class StubPackageInfo(BasePackageInfo): pass class StubPackageLoader(PackageLoader[StubPackageInfo]): def get_versions(self): return ["v1.0", "v2.0", "v3.0", "v4.0"] def get_package_info(self, version): p_info = StubPackageInfo("http://example.org", f"example-{version}.tar") extid_type = "extid-type1" if version in ("v1.0", "v2.0") else "extid-type2" # Versions 1.0 and 2.0 have an extid of a given type, v3.0 has an extid # of a different type patch.object( p_info, "extid", return_value=(extid_type, f"extid-of-{version}".encode()), autospec=True, ).start() yield (f"branch-{version}", p_info) - def _load_revision(self, p_info, origin): + def _load_release(self, version, p_info, origin): return None def test_loader_origin_visit_failure(swh_storage): """Failure to add origin or origin visit should failed immediately """ loader = PackageLoader(swh_storage, "some-url") loader.storage = FakeStorage() actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} loader.storage = FakeStorage2() actual_load_status2 = loader.load() assert actual_load_status2 == {"status": "failed"} -def test_resolve_revision_from_extids() -> None: +def test_resolve_object_from_extids() -> None: loader = PackageLoader(None, None) # type: ignore p_info = Mock(wraps=BasePackageInfo(None, None)) # type: ignore # The PackageInfo does not support extids p_info.extid.return_value = None known_extids = { ("extid-type", b"extid-of-aaaa"): [ - CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20), + CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"a" * 20), ] } - revision_whitelist = {b"unused"} - assert ( - loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist) - is None - ) + whitelist = {b"unused"} + assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is not one of them (ie. cache miss) p_info.extid.return_value = ("extid-type", b"extid-of-cccc") - assert ( - loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist) - is None - ) + assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is one of them (ie. cache hit), - # but the target revision was not in the previous snapshot + # but the target release was not in the previous snapshot p_info.extid.return_value = ("extid-type", b"extid-of-aaaa") - assert ( - loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist) - is None - ) + assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is one of them (ie. cache hit), - # and the target revision was in the previous snapshot - revision_whitelist = {b"a" * 20} - assert ( - loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist) - == b"a" * 20 - ) + # and the target release was in the previous snapshot + whitelist = {b"a" * 20} + assert loader.resolve_object_from_extids( + known_extids, p_info, whitelist + ) == CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"a" * 20) # Same as before, but there is more than one extid, and only one is an allowed - # revision - revision_whitelist = {b"a" * 20} + # release + whitelist = {b"a" * 20} known_extids = { ("extid-type", b"extid-of-aaaa"): [ - CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20), - CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20), + CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"b" * 20), + CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"a" * 20), ] } - assert ( - loader.resolve_revision_from_extids(known_extids, p_info, revision_whitelist) - == b"a" * 20 - ) + assert loader.resolve_object_from_extids( + known_extids, p_info, whitelist + ) == CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"a" * 20) def test_load_get_known_extids() -> None: """Checks PackageLoader.load() fetches known extids efficiently""" storage = Mock(wraps=get_storage("memory")) loader = StubPackageLoader(storage, "http://example.org") loader.load() # Calls should be grouped by extid type storage.extid_get_from_extid.assert_has_calls( [ call("extid-type1", [b"extid-of-v1.0", b"extid-of-v2.0"]), call("extid-type2", [b"extid-of-v3.0", b"extid-of-v4.0"]), ], any_order=True, ) def test_load_extids() -> None: """Checks PackageLoader.load() skips iff it should, and writes (only) the new ExtIDs""" storage = get_storage("memory") origin = "http://example.org" - rev1_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"a" * 20) - rev2_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20) - rev3_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"c" * 20) - rev4_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"d" * 20) + rel1_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"a" * 20) + rel2_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"b" * 20) + rel3_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"c" * 20) + rel4_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"d" * 20) dir_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"e" * 20) # Results of a previous load storage.extid_add( [ - ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid), - ExtID("extid-type2", b"extid-of-v2.0", rev2_swhid), + ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), + ExtID("extid-type2", b"extid-of-v2.0", rel2_swhid), ] ) last_snapshot = Snapshot( branches={ b"v1.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev1_swhid.object_id + target_type=TargetType.RELEASE, target=rel1_swhid.object_id ), b"v2.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev2_swhid.object_id + target_type=TargetType.RELEASE, target=rel2_swhid.object_id ), b"v3.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev3_swhid.object_id + target_type=TargetType.RELEASE, target=rel3_swhid.object_id ), } ) storage.snapshot_add([last_snapshot]) date = datetime.datetime.now(tz=datetime.timezone.utc) storage.origin_add([Origin(url=origin)]) storage.origin_visit_add( [OriginVisit(origin="http://example.org", visit=1, date=date, type="tar")] ) storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin, visit=1, status="full", date=date, snapshot=last_snapshot.id, ) ] ) loader = StubPackageLoader(storage, "http://example.org") patch.object( loader, - "_load_revision", - return_value=(rev4_swhid.object_id, dir_swhid.object_id), + "_load_release", + return_value=(rel4_swhid.object_id, dir_swhid.object_id), autospec=True, ).start() loader.load() - assert loader._load_revision.mock_calls == [ # type: ignore - # v1.0: not loaded because there is already its (extid_type, extid, rev) + assert loader._load_release.mock_calls == [ # type: ignore + # v1.0: not loaded because there is already its (extid_type, extid, rel) # in the storage. # v2.0: loaded, because there is already a similar extid, but different type - call(StubPackageInfo(origin, "example-v2.0.tar"), Origin(url=origin)), + call("v2.0", StubPackageInfo(origin, "example-v2.0.tar"), Origin(url=origin)), # v3.0: loaded despite having an (extid_type, extid) in storage, because # the target of the extid is not in the previous snapshot - call(StubPackageInfo(origin, "example-v3.0.tar"), Origin(url=origin)), + call("v3.0", StubPackageInfo(origin, "example-v3.0.tar"), Origin(url=origin)), # v4.0: loaded, because there isn't its extid - call(StubPackageInfo(origin, "example-v4.0.tar"), Origin(url=origin)), + call("v4.0", StubPackageInfo(origin, "example-v4.0.tar"), Origin(url=origin)), ] # then check the snapshot has all the branches. - # versions 2.0 to 4.0 all point to rev4_swhid (instead of the value of the last + # versions 2.0 to 4.0 all point to rel4_swhid (instead of the value of the last # snapshot), because they had to be loaded (mismatched extid), and the mocked - # _load_revision always returns rev4_swhid. + # _load_release always returns rel4_swhid. snapshot = Snapshot( branches={ b"branch-v1.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev1_swhid.object_id + target_type=TargetType.RELEASE, target=rel1_swhid.object_id ), b"branch-v2.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev4_swhid.object_id + target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), b"branch-v3.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev4_swhid.object_id + target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), b"branch-v4.0": SnapshotBranch( - target_type=TargetType.REVISION, target=rev4_swhid.object_id + target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), } ) assert snapshot_get_latest(storage, origin) == snapshot extids = storage.extid_get_from_target( - ObjectType.REVISION, + ObjectType.RELEASE, [ - rev1_swhid.object_id, - rev2_swhid.object_id, - rev3_swhid.object_id, - rev4_swhid.object_id, + rel1_swhid.object_id, + rel2_swhid.object_id, + rel3_swhid.object_id, + rel4_swhid.object_id, ], ) assert set(extids) == { # What we inserted at the beginning of the test: - ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid), - ExtID("extid-type2", b"extid-of-v2.0", rev2_swhid), + ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), + ExtID("extid-type2", b"extid-of-v2.0", rel2_swhid), # Added by the loader: - ExtID("extid-type1", b"extid-of-v2.0", rev4_swhid), - ExtID("extid-type2", b"extid-of-v3.0", rev4_swhid), - ExtID("extid-type2", b"extid-of-v4.0", rev4_swhid), + ExtID("extid-type1", b"extid-of-v2.0", rel4_swhid), + ExtID("extid-type2", b"extid-of-v3.0", rel4_swhid), + ExtID("extid-type2", b"extid-of-v4.0", rel4_swhid), + } + + +def test_load_upgrade_from_revision_extids(caplog): + """Tests that, when loading incrementally based on a snapshot made by an old + version of the loader, the loader will convert revisions to releases + and add them to the storage. + + Also checks that, if an extid exists pointing to a non-existent revision + (which should never happen, but you never know...), the release is loaded from + scratch.""" + + storage = get_storage("memory") + + origin = "http://example.org" + dir1_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"d" * 20) + dir2_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"e" * 20) + + date = TimestampWithTimezone.from_datetime( + datetime.datetime.now(tz=datetime.timezone.utc) + ) + person = Person.from_fullname(b"Jane Doe ") + + rev1 = Revision( + message=b"blah", + author=person, + date=date, + committer=person, + committer_date=date, + directory=dir1_swhid.object_id, + type=RevisionType.TAR, + synthetic=True, + ) + + rel1 = Release( + name=b"v1.0", + message=b"blah", + author=person, + date=date, + target=dir1_swhid.object_id, + target_type=ModelObjectType.DIRECTORY, + synthetic=True, + ) + + rev1_swhid = rev1.swhid() + rel1_swhid = rel1.swhid() + rev2_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20) + rel2_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"c" * 20) + + # Results of a previous load + storage.extid_add( + [ + ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid), + ExtID("extid-type1", b"extid-of-v2.0", rev2_swhid), + ] + ) + storage.revision_add([rev1]) + last_snapshot = Snapshot( + branches={ + b"v1.0": SnapshotBranch( + target_type=TargetType.REVISION, target=rev1_swhid.object_id + ), + b"v2.0": SnapshotBranch( + target_type=TargetType.REVISION, target=rev2_swhid.object_id + ), + } + ) + storage.snapshot_add([last_snapshot]) + date = datetime.datetime.now(tz=datetime.timezone.utc) + storage.origin_add([Origin(url=origin)]) + storage.origin_visit_add( + [OriginVisit(origin="http://example.org", visit=1, date=date, type="tar")] + ) + storage.origin_visit_status_add( + [ + OriginVisitStatus( + origin=origin, + visit=1, + status="full", + date=date, + snapshot=last_snapshot.id, + ) + ] + ) + + loader = StubPackageLoader(storage, "http://example.org") + patch.object( + loader, + "_load_release", + return_value=(rel2_swhid.object_id, dir2_swhid.object_id), + autospec=True, + ).start() + patch.object( + loader, "get_versions", return_value=["v1.0", "v2.0", "v3.0"], autospec=True, + ).start() + + caplog.set_level(logging.ERROR) + + loader.load() + + assert len(caplog.records) == 1 + (record,) = caplog.records + assert record.levelname == "ERROR" + assert "Failed to upgrade branch branch-v2.0" in record.message + + assert loader._load_release.mock_calls == [ + # v1.0: not loaded because there is already a revision matching it + # v2.0: loaded, as the revision is missing from the storage even though there + # is an extid + call("v2.0", StubPackageInfo(origin, "example-v2.0.tar"), Origin(url=origin)), + # v3.0: loaded (did not exist yet) + call("v3.0", StubPackageInfo(origin, "example-v3.0.tar"), Origin(url=origin)), + ] + + snapshot = Snapshot( + branches={ + b"branch-v1.0": SnapshotBranch( + target_type=TargetType.RELEASE, target=rel1_swhid.object_id + ), + b"branch-v2.0": SnapshotBranch( + target_type=TargetType.RELEASE, target=rel2_swhid.object_id + ), + b"branch-v3.0": SnapshotBranch( + target_type=TargetType.RELEASE, target=rel2_swhid.object_id + ), + } + ) + assert snapshot_get_latest(storage, origin) == snapshot + + extids = storage.extid_get_from_target( + ObjectType.RELEASE, [rel1_swhid.object_id, rel2_swhid.object_id,], + ) + + assert set(extids) == { + ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), + ExtID("extid-type1", b"extid-of-v2.0", rel2_swhid), + ExtID("extid-type2", b"extid-of-v3.0", rel2_swhid), } def test_manifest_extid(): """Compute primary key should return the right identity """ @attr.s class TestPackageInfo(BasePackageInfo): a = attr.ib() b = attr.ib() length = attr.ib() filename = attr.ib() version = attr.ib() MANIFEST_FORMAT = string.Template("$a $b") p_info = TestPackageInfo( url="http://example.org/", a=1, b=2, length=221837, filename="8sync-0.1.0.tar.gz", version="0.1.0", ) actual_id = p_info.extid() assert actual_id == ("package-manifest-sha256", hashlib.sha256(b"1 2").digest()) def test_no_env_swh_config_filename_raise(monkeypatch): """No SWH_CONFIG_FILENAME environment variable makes package loader init raise """ class DummyPackageLoader(PackageLoader): """A dummy package loader for test purpose""" pass monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises( AssertionError, match="SWH_CONFIG_FILENAME environment variable is undefined" ): DummyPackageLoader.from_configfile(url="some-url") diff --git a/swh/loader/package/tests/test_loader_metadata.py b/swh/loader/package/tests/test_loader_metadata.py index 845018c..1f08ca3 100644 --- a/swh/loader/package/tests/test_loader_metadata.py +++ b/swh/loader/package/tests/test_loader_metadata.py @@ -1,217 +1,218 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from typing import Iterator, List, Sequence, Tuple import attr from swh.loader.package import __version__ from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Origin, Person, RawExtrinsicMetadata, Revision, RevisionType, Sha1Git, ) from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType EMPTY_SNAPSHOT_ID = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e" -FULL_SNAPSHOT_ID = "4a9b608c9f01860a627237dd2409d1d50ec4b054" +FULL_SNAPSHOT_ID = "4ac5730a9393f5099b63a35a17b6c33d36d70c3a" AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="http://example.org/", ) ORIGIN_URL = "http://example.org/archive.tgz" ORIGIN_SWHID = Origin(ORIGIN_URL).swhid() REVISION_ID = hash_to_bytes("8ff44f081d43176474b267de5451f2c2e88089d0") -REVISION_SWHID = CoreSWHID(object_type=ObjectType.REVISION, object_id=REVISION_ID) +RELEASE_ID = hash_to_bytes("9477a708196b44e59efb4e47b7d979a4146bd428") +RELEASE_SWHID = CoreSWHID(object_type=ObjectType.RELEASE, object_id=RELEASE_ID) DIRECTORY_ID = hash_to_bytes("aa" * 20) DIRECTORY_SWHID = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=DIRECTORY_ID ) FETCHER = MetadataFetcher( name="swh.loader.package.tests.test_loader_metadata.MetadataTestLoader", version=__version__, ) DISCOVERY_DATE = datetime.datetime.now(tz=datetime.timezone.utc) DIRECTORY_METADATA = [ RawExtrinsicMetadata( target=DIRECTORY_SWHID, discovery_date=DISCOVERY_DATE, authority=AUTHORITY, fetcher=FETCHER, format="test-format1", metadata=b"foo bar", origin=ORIGIN_URL, - revision=REVISION_SWHID, + release=RELEASE_SWHID, ), RawExtrinsicMetadata( target=DIRECTORY_SWHID, discovery_date=DISCOVERY_DATE + datetime.timedelta(seconds=1), authority=AUTHORITY, fetcher=FETCHER, format="test-format2", metadata=b"bar baz", origin=ORIGIN_URL, - revision=REVISION_SWHID, + release=RELEASE_SWHID, ), ] ORIGIN_METADATA = [ RawExtrinsicMetadata( target=ORIGIN_SWHID, discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=AUTHORITY, fetcher=FETCHER, format="test-format3", metadata=b"baz qux", ), ] class MetadataTestLoader(PackageLoader[BasePackageInfo]): def get_versions(self) -> Sequence[str]: return ["v1.0.0"] def _load_directory(self, dl_artifacts, tmpdir): class directory: hash = DIRECTORY_ID - return (None, directory) # just enough for _load_revision to work + return (None, directory) # just enough for _load_release to work def download_package(self, p_info: BasePackageInfo, tmpdir: str): return [("path", {"artifact_key": "value", "length": 0})] def build_revision( self, p_info: BasePackageInfo, uncompressed_path: str, directory: Sha1Git ): return Revision( id=REVISION_ID, message=b"", author=Person.from_fullname(b""), committer=Person.from_fullname(b""), date=None, committer_date=None, type=RevisionType.TAR, directory=DIRECTORY_ID, synthetic=False, ) def get_metadata_authority(self): return attr.evolve(AUTHORITY, metadata={}) def get_package_info(self, version: str) -> Iterator[Tuple[str, BasePackageInfo]]: m0 = DIRECTORY_METADATA[0] m1 = DIRECTORY_METADATA[1] p_info = BasePackageInfo( url=ORIGIN_URL, filename="archive.tgz", directory_extrinsic_metadata=[ RawExtrinsicMetadataCore(m0.format, m0.metadata, m0.discovery_date), RawExtrinsicMetadataCore(m1.format, m1.metadata, m1.discovery_date), ], ) yield (version, p_info) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: m = ORIGIN_METADATA[0] return [RawExtrinsicMetadataCore(m.format, m.metadata, m.discovery_date)] def test_load_artifact_metadata(swh_storage, caplog): loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } authority = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", ) result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, authority,) assert result.next_page_token is None assert len(result.results) == 1 assert result.results[0] == RawExtrinsicMetadata( target=DIRECTORY_SWHID, discovery_date=result.results[0].discovery_date, authority=authority, fetcher=FETCHER, format="original-artifacts-json", metadata=b'[{"artifact_key": "value", "length": 0}]', origin=ORIGIN_URL, - revision=REVISION_SWHID, + release=RELEASE_SWHID, ) def test_load_metadata(swh_storage, caplog): loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA result = swh_storage.raw_extrinsic_metadata_get(ORIGIN_SWHID, AUTHORITY,) assert result.next_page_token is None assert result.results == ORIGIN_METADATA assert caplog.text == "" def test_existing_authority(swh_storage, caplog): loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA assert caplog.text == "" def test_existing_fetcher(swh_storage, caplog): loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA assert caplog.text == ""