Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py
index a592826..aeddcdf 100644
--- a/swh/loader/package/archive/tests/test_archive.py
+++ b/swh/loader/package/archive/tests/test_archive.py
@@ -1,465 +1,465 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import hashlib
from io import BytesIO
from pathlib import Path
import string
import attr
import pytest
from requests.exceptions import ContentDecodingError
from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import Snapshot, SnapshotBranch, TargetType
URL = "https://ftp.gnu.org/gnu/8sync/"
GNU_ARTIFACTS = [
{
"time": 944729610,
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz",
"length": 221837,
"filename": "8sync-0.1.0.tar.gz",
"version": "0.1.0",
},
{
"time": 1480991830,
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
"length": 238466,
"filename": "8sync-0.2.0.tar.gz",
"version": "0.2.0",
},
]
_expected_new_contents_first_visit = [
"e9258d81faf5881a2f96a77ba609396f82cb97ad",
"1170cf105b04b7e2822a0e09d2acf71da7b9a130",
"fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac",
"0057bec9b5422aff9256af240b177ac0e3ac2608",
"2b8d0d0b43a1078fc708930c8ddc2956a86c566e",
"27de3b3bc6545d2a797aeeb4657c0e215a0c2e55",
"2e6db43f5cd764e677f416ff0d0c78c7a82ef19b",
"ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62",
"edeb33282b2bffa0e608e9d2fd960fd08093c0ea",
"d64e64d4c73679323f8d4cde2643331ba6c20af9",
"7a756602914be889c0a2d3952c710144b3e64cb0",
"84fb589b554fcb7f32b806951dcf19518d67b08f",
"8624bcdae55baeef00cd11d5dfcfa60f68710a02",
"e08441aeab02704cfbd435d6445f7c072f8f524e",
"f67935bc3a83a67259cda4b2d43373bd56703844",
"809788434b433eb2e3cfabd5d591c9a659d5e3d8",
"7d7c6c8c5ebaeff879f61f37083a3854184f6c41",
"b99fec102eb24bffd53ab61fc30d59e810f116a2",
"7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68",
"f0c97052e567948adf03e641301e9983c478ccff",
"7fb724242e2b62b85ca64190c31dcae5303e19b3",
"4f9709e64a9134fe8aefb36fd827b84d8b617ab5",
"7350628ccf194c2c3afba4ac588c33e3f3ac778d",
"0bb892d9391aa706dc2c3b1906567df43cbe06a2",
"49d4c0ce1a16601f1e265d446b6c5ea6b512f27c",
"6b5cc594ac466351450f7f64a0b79fdaf4435ad3",
"3046e5d1f70297e2a507b98224b6222c9688d610",
"1572607d456d7f633bc6065a2b3048496d679a31",
]
_expected_new_directories_first_visit = [
"daabc65ec75d487b1335ffc101c0ac11c803f8fc",
"263be23b4a8101d3ad0d9831319a3e0f2b065f36",
"7f6e63ba6eb3e2236f65892cd822041f1a01dd5c",
"4db0a3ecbc976083e2dac01a62f93729698429a3",
"dfef1c80e1098dd5deda664bb44a9ab1f738af13",
"eca971d346ea54d95a6e19d5051f900237fafdaa",
"3aebc29ed1fccc4a6f2f2010fb8e57882406b528",
]
_expected_new_revisions_first_visit = {
"44183488c0774ce3c957fa19ba695cf18a4a42b3": (
"3aebc29ed1fccc4a6f2f2010fb8e57882406b528"
)
}
def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir):
url = URL
unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz"
loader = ArchiveLoader(
swh_storage,
url,
artifacts=[
{
"time": 944729610,
"url": unknown_artifact_url, # unknown artifact
"length": 221837,
"filename": "8sync-0.1.0.tar.gz",
"version": "0.1.0",
}
],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert_last_visit_matches(swh_storage, url, status="partial", type="tar")
def test_archive_visit_with_release_artifact_no_prior_visit(
swh_storage, requests_mock_datadir
):
"""With no prior visit, load a gnu project ends up with 1 snapshot
"""
loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS[:1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_snapshot_first_visit_id = hash_to_bytes(
"c419397fd912039825ebdbea378bc6283f006bf5"
)
assert (
hash_to_bytes(actual_load_status["snapshot_id"])
== expected_snapshot_first_visit_id
)
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit),
"skipped_content": 0,
"snapshot": 1,
} == stats
- expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit)
- assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
-
- expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
- assert list(swh_storage.directory_missing(expected_dirs)) == []
-
- expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit)
- assert list(swh_storage.revision_missing(expected_revs)) == []
-
expected_snapshot = Snapshot(
id=expected_snapshot_first_visit_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"releases/0.1.0",
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.REVISION,
- target=hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3"),
+ target=hash_to_bytes(list(_expected_new_revisions_first_visit)[0]),
),
},
)
check_snapshot(expected_snapshot, swh_storage)
+ expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit)
+ assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
+
+ expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
+ assert list(swh_storage.directory_missing(expected_dirs)) == []
+
+ expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit)
+ assert list(swh_storage.revision_missing(expected_revs)) == []
+
def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project ends up with 1 snapshot
"""
url = URL
loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS[:1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
assert actual_load_status2["snapshot_id"] is not None
assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project ends up with 1 snapshot
"""
url = URL
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, url, [artifact1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit),
"skipped_content": 0,
"snapshot": 1,
} == stats
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
artifact2 = GNU_ARTIFACTS[1]
loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2])
stats2 = get_stats(swh_storage)
assert stats == stats2 # ensure we share the storage
actual_load_status2 = loader2.load()
assert actual_load_status2["status"] == "eventful"
assert actual_load_status2["snapshot_id"] is not None
stats2 = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit) + 14,
"directory": len(_expected_new_directories_first_visit) + 8,
"origin": 1,
"origin_visit": 1 + 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit) + 1,
"skipped_content": 0,
"snapshot": 1 + 1,
} == stats2
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
# 1 artifact (2nd time no modification) + 1 new artifact
assert len(urls) == 2
def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir):
"""Load a project archive (not gnu) ends up with 1 snapshot
"""
url = "https://something.else.org/8sync/"
artifacts = [ # this is not a gnu artifact
{
"time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp
"sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa
# keep a gnu artifact reference to avoid adding other test files
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
"length": 238466,
"filename": "8sync-0.2.0.tar.gz",
"version": "0.2.0",
}
]
# Here the loader defines the id_keys to use for existence in the snapshot
# It's not the default archive loader which
loader = ArchiveLoader(
swh_storage,
url,
artifacts=artifacts,
extid_manifest_format="$sha256 $length $url",
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
def test_archive_extid():
"""Compute primary key should return the right identity
"""
@attr.s
class TestPackageInfo(ArchivePackageInfo):
a = attr.ib()
b = attr.ib()
metadata = GNU_ARTIFACTS[0]
p_info = TestPackageInfo(
raw_info={**metadata, "a": 1, "b": 2}, a=1, b=2, **metadata,
)
for manifest_format, expected_manifest in [
(string.Template("$a $b"), b"1 2"),
(string.Template(""), b""),
(None, "{time} {length} {version} {url}".format(**metadata).encode()),
]:
actual_id = p_info.extid(manifest_format=manifest_format)
assert actual_id == (
"package-manifest-sha256",
hashlib.sha256(expected_manifest).digest(),
)
with pytest.raises(KeyError):
p_info.extid(manifest_format=string.Template("$a $unknown_key"))
def test_archive_snapshot_append(swh_storage, requests_mock_datadir):
# first loading with a first artifact
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
branch_artifact1_name = f"releases/{artifact1['version']}".encode()
assert b"HEAD" in snapshot.branches
assert branch_artifact1_name in snapshot.branches
assert snapshot.branches[b"HEAD"].target == branch_artifact1_name
# second loading with a second artifact
artifact2 = GNU_ARTIFACTS[1]
loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot, should contain a new branch and the
# branch for the first artifact
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 3
branch_artifact2_name = f"releases/{artifact2['version']}".encode()
assert b"HEAD" in snapshot.branches
assert branch_artifact2_name in snapshot.branches
assert branch_artifact1_name in snapshot.branches
assert snapshot.branches[b"HEAD"].target == branch_artifact2_name
def test_archive_snapshot_append_branch_override(swh_storage, requests_mock_datadir):
# first loading for a first artifact
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
branch_artifact1_name = f"releases/{artifact1['version']}".encode()
assert branch_artifact1_name in snapshot.branches
branch_target_first_visit = snapshot.branches[branch_artifact1_name].target
# second loading for a second artifact with same version as the first one
# but with different tarball content
artifact2 = dict(GNU_ARTIFACTS[0])
artifact2["url"] = GNU_ARTIFACTS[1]["url"]
artifact2["time"] = GNU_ARTIFACTS[1]["time"]
artifact2["length"] = GNU_ARTIFACTS[1]["length"]
loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot, should contain the same branch as previously
# but with different target
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
assert branch_artifact1_name in snapshot.branches
branch_target_second_visit = snapshot.branches[branch_artifact1_name].target
assert branch_target_first_visit != branch_target_second_visit
@pytest.fixture
def not_gzipped_tarball_bytes(datadir):
return Path(datadir, "not_gzipped_tarball.tar.gz").read_bytes()
def test_archive_not_gzipped_tarball(
swh_storage, requests_mock, not_gzipped_tarball_bytes
):
"""Check that a tarball erroneously marked as gzip compressed can still
be downloaded and processed.
"""
filename = "not_gzipped_tarball.tar.gz"
url = f"https://example.org/ftp/{filename}"
requests_mock.get(
url,
[
{"exc": ContentDecodingError,},
{"body": BytesIO(not_gzipped_tarball_bytes),},
],
)
loader = ArchiveLoader(
swh_storage,
url,
artifacts=[
{
"time": 944729610,
"url": url,
"length": 221837,
"filename": filename,
"version": "0.1.0",
}
],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
assert b"releases/0.1.0" in snapshot.branches
diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py
index 1b2d28d..9c03b99 100644
--- a/swh/loader/package/cran/tests/test_cran.py
+++ b/swh/loader/package/cran/tests/test_cran.py
@@ -1,364 +1,374 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from datetime import datetime, timezone
import os
from os import path
from unittest.mock import patch
from dateutil.tz import tzlocal
import pytest
from swh.core.tarball import uncompress
from swh.loader.package.cran.loader import (
CRANLoader,
extract_intrinsic_metadata,
parse_date,
parse_debian_control,
)
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone
SNAPSHOT = Snapshot(
id=hash_to_bytes("920adcccc78aaeedd3cfa4459dd900d8c3431a21"),
branches={
b"HEAD": SnapshotBranch(
target=b"releases/2.22-6", target_type=TargetType.ALIAS
),
b"releases/2.22-6": SnapshotBranch(
target=hash_to_bytes("42bdb16facd5140424359c8ce89a28ecfa1ce603"),
target_type=TargetType.REVISION,
),
},
)
def test_cran_parse_date():
data = [
# parsable, some have debatable results though
("2001-June-08", datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)),
(
"Tue Dec 27 15:06:08 PST 2011",
datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc),
),
("8-14-2013", datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)),
("2011-01", datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)),
("201109", datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)),
("04-12-2014", datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)),
(
"2018-08-24, 10:40:10",
datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc),
),
("2013-October-16", datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)),
("Aug 23, 2013", datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)),
("27-11-2014", datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)),
("2019-09-26,", datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)),
("9/25/2014", datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)),
(
"Fri Jun 27 17:23:53 2014",
datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc),
),
("28-04-2014", datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)),
("04-14-2014", datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)),
(
"2019-05-08 14:17:31 UTC",
datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc),
),
(
"Wed May 21 13:50:39 CEST 2014",
datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal()),
),
(
"2018-04-10 00:01:04 KST",
datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc),
),
("2019-08-25 10:45", datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)),
("March 9, 2015", datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)),
("Aug. 18, 2012", datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)),
("2014-Dec-17", datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)),
("March 01, 2013", datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)),
("2017-04-08.", datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)),
("2014-Apr-22", datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)),
(
"Mon Jan 12 19:54:04 2015",
datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc),
),
("May 22, 2014", datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)),
(
"2014-08-12 09:55:10 EDT",
datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc),
),
# unparsable
("Fabruary 21, 2012", None),
('2019-05-28"', None),
("2017-03-01 today", None),
("2016-11-0110.1093/icesjms/fsw182", None),
("2019-07-010", None),
("2015-02.23", None),
("20013-12-30", None),
("2016-08-017", None),
("2019-02-07l", None),
("2018-05-010", None),
("2019-09-27 KST", None),
("$Date$", None),
("2019-09-27 KST", None),
("2019-06-22 $Date$", None),
("$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $", None),
("2015-7-013", None),
("2018-05-023", None),
("Check NEWS file for changes: news(package='simSummary')", None),
]
for date, expected_date in data:
actual_tstz = parse_date(date)
if expected_date is None:
assert actual_tstz is None, date
else:
expected_tstz = TimestampWithTimezone.from_datetime(expected_date)
assert actual_tstz == expected_tstz, date
@pytest.mark.fs
def test_cran_extract_intrinsic_metadata(tmp_path, datadir):
"""Parsing existing archive's PKG-INFO should yield results"""
uncompressed_archive_path = str(tmp_path)
# sample url
# https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa
archive_path = path.join(
datadir,
"https_cran.r-project.org",
"src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz",
)
uncompress(archive_path, dest=uncompressed_archive_path)
actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path)
expected_metadata = {
"Package": "KernSmooth",
"Priority": "recommended",
"Version": "2.22-6",
"Date": "2001-June-08",
"Title": "Functions for kernel smoothing for Wand & Jones (1995)",
"Author": "S original by Matt Wand.\n\tR port by Brian Ripley <ripley@stats.ox.ac.uk>.", # noqa
"Maintainer": "Brian Ripley <ripley@stats.ox.ac.uk>",
"Description": 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa
"License": "Unlimited use and distribution (see LICENCE).",
"URL": "http://www.biostat.harvard.edu/~mwand",
}
assert actual_metadata == expected_metadata
@pytest.mark.fs
def test_cran_extract_intrinsic_metadata_failures(tmp_path):
"""Parsing inexistent path/archive/PKG-INFO yield None"""
# inexistent first level path
assert extract_intrinsic_metadata("/something-inexistent") == {}
# inexistent second level path (as expected by pypi archives)
assert extract_intrinsic_metadata(tmp_path) == {}
# inexistent PKG-INFO within second level path
existing_path_no_pkginfo = str(tmp_path / "something")
os.mkdir(existing_path_no_pkginfo)
assert extract_intrinsic_metadata(tmp_path) == {}
def test_cran_one_visit(swh_storage, requests_mock_datadir):
version = "2.22-6"
base_url = "https://cran.r-project.org"
origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html"
artifact_url = (
f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa
)
loader = CRANLoader(
swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version,}]
)
actual_load_status = loader.load()
assert actual_load_status == {
"status": "eventful",
"snapshot_id": SNAPSHOT.id.hex(),
}
- check_snapshot(SNAPSHOT, swh_storage)
+ assert_last_visit_matches(
+ swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id
+ )
- assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran")
+ check_snapshot(SNAPSHOT, swh_storage)
visit_stats = get_stats(swh_storage)
assert {
"content": 33,
"directory": 7,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == visit_stats
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith(base_url)
]
# visited each artifact once across 2 visits
assert len(urls) == 1
def test_cran_2_visits_same_origin(swh_storage, requests_mock_datadir):
"""Multiple visits on the same origin, only 1 archive fetch"""
version = "2.22-6"
base_url = "https://cran.r-project.org"
origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html"
artifact_url = (
f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa
)
loader = CRANLoader(
swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}]
)
# first visit
actual_load_status = loader.load()
expected_snapshot_id = "920adcccc78aaeedd3cfa4459dd900d8c3431a21"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": SNAPSHOT.id.hex(),
}
check_snapshot(SNAPSHOT, swh_storage)
- assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran")
+ assert_last_visit_matches(
+ swh_storage, origin_url, status="full", type="cran", snapshot=SNAPSHOT.id
+ )
visit_stats = get_stats(swh_storage)
assert {
"content": 33,
"directory": 7,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == visit_stats
# second visit
actual_load_status2 = loader.load()
assert actual_load_status2 == {
"status": "uneventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran")
+ assert_last_visit_matches(
+ swh_storage,
+ origin_url,
+ status="full",
+ type="cran",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
visit_stats2 = get_stats(swh_storage)
visit_stats["origin_visit"] += 1
assert visit_stats2 == visit_stats, "same stats as 1st visit, +1 visit"
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith(base_url)
]
assert len(urls) == 1, "visited one time artifact url (across 2 visits)"
def test_cran_parse_debian_control(datadir):
description_file = os.path.join(datadir, "description", "acepack")
actual_metadata = parse_debian_control(description_file)
assert actual_metadata == {
"Package": "acepack",
"Maintainer": "Shawn Garbett",
"Version": "1.4.1",
"Author": "Phil Spector, Jerome Friedman, Robert Tibshirani...",
"Description": "Two nonparametric methods for multiple regression...",
"Title": "ACE & AVAS 4 Selecting Multiple Regression Transformations",
"License": "MIT + file LICENSE",
"Suggests": "testthat",
"Packaged": "2016-10-28 15:38:59 UTC; garbetsp",
"Repository": "CRAN",
"Date/Publication": "2016-10-29 00:11:52",
"NeedsCompilation": "yes",
}
def test_cran_parse_debian_control_unicode_issue(datadir):
# iso-8859-1 caused failure, now fixed
description_file = os.path.join(datadir, "description", "KnownBR")
actual_metadata = parse_debian_control(description_file)
assert actual_metadata == {
"Package": "KnowBR",
"Version": "2.0",
"Title": """Discriminating Well Surveyed Spatial Units from Exhaustive
Biodiversity Databases""",
"Author": "Cástor Guisande González and Jorge M. Lobo",
"Maintainer": "Cástor Guisande González <castor@email.es>",
"Description": "It uses species accumulation curves and diverse estimators...",
"License": "GPL (>= 2)",
"Encoding": "latin1",
"Depends": "R (>= 3.0), fossil, mgcv, plotrix, sp, vegan",
"Suggests": "raster, rgbif",
"NeedsCompilation": "no",
"Packaged": "2019-01-30 13:27:29 UTC; castor",
"Repository": "CRAN",
"Date/Publication": "2019-01-31 20:53:50 UTC",
}
@pytest.mark.parametrize(
"method_name",
["build_extrinsic_snapshot_metadata", "build_extrinsic_origin_metadata",],
)
def test_cran_fail_to_build_or_load_extrinsic_metadata(
method_name, swh_storage, requests_mock_datadir
):
"""problem during loading: {visit: failed, status: failed, no snapshot}
"""
version = "2.22-6"
base_url = "https://cran.r-project.org"
origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html"
artifact_url = (
f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa
)
full_method_name = f"swh.loader.package.cran.loader.CRANLoader.{method_name}"
with patch(
full_method_name,
side_effect=ValueError("Fake to fail to build or load extrinsic metadata"),
):
loader = CRANLoader(
swh_storage,
origin_url,
artifacts=[{"url": artifact_url, "version": version}],
)
actual_load_status = loader.load()
assert actual_load_status == {
"status": "failed",
"snapshot_id": SNAPSHOT.id.hex(),
}
visit_stats = get_stats(swh_storage)
assert {
"content": 33,
"directory": 7,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == visit_stats
assert_last_visit_matches(
- swh_storage, origin_url, status="partial", type="cran"
+ swh_storage, origin_url, status="partial", type="cran", snapshot=SNAPSHOT.id
)
diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py
index 04a35dd..21a79e4 100644
--- a/swh/loader/package/debian/tests/test_debian.py
+++ b/swh/loader/package/debian/tests/test_debian.py
@@ -1,425 +1,449 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from copy import deepcopy
import logging
from os import path
import pytest
from swh.loader.package.debian.loader import (
DebianLoader,
DebianPackageChangelog,
DebianPackageInfo,
IntrinsicPackageMetadata,
download_package,
dsc_information,
extract_package,
get_intrinsic_package_metadata,
prepare_person,
uid_to_person,
)
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import Person, Snapshot, SnapshotBranch, TargetType
logger = logging.getLogger(__name__)
URL = "deb://Debian/packages/cicero"
PACKAGE_FILES = {
"name": "cicero",
"version": "0.7.2-3",
"files": {
"cicero_0.7.2-3.diff.gz": {
"md5sum": "a93661b6a48db48d59ba7d26796fc9ce",
"name": "cicero_0.7.2-3.diff.gz",
"sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa
"size": 3964,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa
},
"cicero_0.7.2-3.dsc": {
"md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a",
"name": "cicero_0.7.2-3.dsc",
"sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa
"size": 1864,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa
}, # noqa
"cicero_0.7.2.orig.tar.gz": {
"md5sum": "4353dede07c5728319ba7f5595a7230a",
"name": "cicero_0.7.2.orig.tar.gz",
"sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa
"size": 96527,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa
},
},
}
PACKAGE_FILES2 = {
"name": "cicero",
"version": "0.7.2-4",
"files": {
"cicero_0.7.2-4.diff.gz": {
"md5sum": "1e7e6fc4a59d57c98082a3af78145734",
"name": "cicero_0.7.2-4.diff.gz",
"sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa
"size": 4038,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa
},
"cicero_0.7.2-4.dsc": {
"md5sum": "1a6c8855a73b4282bb31d15518f18cde",
"name": "cicero_0.7.2-4.dsc",
"sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa
"size": 1881,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa
}, # noqa
"cicero_0.7.2.orig.tar.gz": {
"md5sum": "4353dede07c5728319ba7f5595a7230a",
"name": "cicero_0.7.2.orig.tar.gz",
"sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa
"size": 96527,
"uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa
},
},
}
PACKAGE_PER_VERSION = {
"stretch/contrib/0.7.2-3": PACKAGE_FILES,
}
PACKAGES_PER_VERSION = {
"stretch/contrib/0.7.2-3": PACKAGE_FILES,
"buster/contrib/0.7.2-4": PACKAGE_FILES2,
}
def test_debian_first_visit(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project ends up with 1 snapshot
"""
loader = DebianLoader(
swh_storage,
URL,
date="2019-10-12T05:58:09.165557+00:00",
packages=PACKAGE_PER_VERSION,
)
actual_load_status = loader.load()
expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
-
- stats = get_stats(swh_storage)
- assert {
- "content": 42,
- "directory": 2,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 1, # all artifacts under 1 revision
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
+ assert_last_visit_matches(
+ swh_storage,
+ URL,
+ status="full",
+ type="deb",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
)
},
) # different than the previous loader as no release is done
check_snapshot(expected_snapshot, swh_storage)
+ stats = get_stats(swh_storage)
+ assert {
+ "content": 42,
+ "directory": 2,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 1, # all artifacts under 1 revision
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir):
"""With no prior visit, load a debian project ends up with 1 snapshot
"""
loader = DebianLoader(
swh_storage,
URL,
date="2019-10-12T05:58:09.165557+00:00",
packages=PACKAGE_PER_VERSION,
)
actual_load_status = loader.load()
expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
-
- stats = get_stats(swh_storage)
- assert {
- "content": 42,
- "directory": 2,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 1, # all artifacts under 1 revision
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
+ assert_last_visit_matches(
+ swh_storage,
+ URL,
+ status="full",
+ type="deb",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
)
},
) # different than the previous loader as no release is done
check_snapshot(expected_snapshot, swh_storage)
+ stats = get_stats(swh_storage)
+ assert {
+ "content": 42,
+ "directory": 2,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 1, # all artifacts under 1 revision
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
# No change in between load
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
- assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
+ assert_last_visit_matches(
+ swh_storage,
+ URL,
+ status="full",
+ type="deb",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
stats2 = get_stats(swh_storage)
assert {
"content": 42 + 0,
"directory": 2 + 0,
"origin": 1,
"origin_visit": 1 + 1, # a new visit occurred
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1, # same snapshot across 2 visits
} == stats2
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("http://deb.debian.org")
]
# visited each package artifact twice across 2 visits
assert len(urls) == len(set(urls))
def test_debian_uid_to_person():
uid = "Someone Name <someone@orga.org>"
actual_person = uid_to_person(uid)
assert actual_person == {
"name": "Someone Name",
"email": "someone@orga.org",
"fullname": uid,
}
def test_debian_prepare_person():
actual_author = prepare_person(
{
"name": "Someone Name",
"email": "someone@orga.org",
"fullname": "Someone Name <someone@orga.org>",
}
)
assert actual_author == Person(
name=b"Someone Name",
email=b"someone@orga.org",
fullname=b"Someone Name <someone@orga.org>",
)
def test_debian_download_package(datadir, tmpdir, requests_mock_datadir):
tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue)
p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
all_hashes = download_package(p_info, tmpdir)
assert all_hashes == {
"cicero_0.7.2-3.diff.gz": {
"checksums": {
"sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc",
"sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa
},
"filename": "cicero_0.7.2-3.diff.gz",
"length": 3964,
"url": (
"http://deb.debian.org/debian/pool/contrib/c/cicero/"
"cicero_0.7.2-3.diff.gz"
),
},
"cicero_0.7.2-3.dsc": {
"checksums": {
"sha1": "abbec4e8efbbc80278236e1dd136831eac08accd",
"sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa
},
"filename": "cicero_0.7.2-3.dsc",
"length": 1864,
"url": (
"http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc"
),
},
"cicero_0.7.2.orig.tar.gz": {
"checksums": {
"sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43",
"sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa
},
"filename": "cicero_0.7.2.orig.tar.gz",
"length": 96527,
"url": (
"http://deb.debian.org/debian/pool/contrib/c/cicero/"
"cicero_0.7.2.orig.tar.gz"
),
},
}
def test_debian_dsc_information_ok():
fname = "cicero_0.7.2-3.dsc"
p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
dsc_url, dsc_name = dsc_information(p_info)
assert dsc_url == PACKAGE_FILES["files"][fname]["uri"]
assert dsc_name == PACKAGE_FILES["files"][fname]["name"]
def test_debian_dsc_information_not_found():
fname = "cicero_0.7.2-3.dsc"
p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
p_info.files.pop(fname)
dsc_url, dsc_name = dsc_information(p_info)
assert dsc_url is None
assert dsc_name is None
def test_debian_dsc_information_missing_md5sum():
package_files = deepcopy(PACKAGE_FILES)
for package_metadata in package_files["files"].values():
del package_metadata["md5sum"]
p_info = DebianPackageInfo.from_metadata(package_files, url=URL)
for debian_file_metadata in p_info.files.values():
assert not debian_file_metadata.md5sum
def test_debian_dsc_information_too_many_dsc_entries():
# craft an extra dsc file
fname = "cicero_0.7.2-3.dsc"
p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
data = p_info.files[fname]
fname2 = fname.replace("cicero", "ciceroo")
p_info.files[fname2] = data
with pytest.raises(
ValueError,
match="Package %s_%s references several dsc"
% (PACKAGE_FILES["name"], PACKAGE_FILES["version"]),
):
dsc_information(p_info)
def test_debian_get_intrinsic_package_metadata(
requests_mock_datadir, datadir, tmp_path
):
tmp_path = str(tmp_path) # py3.5 compat.
p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL)
logger.debug("p_info: %s", p_info)
# download the packages
all_hashes = download_package(p_info, tmp_path)
# Retrieve information from package
_, dsc_name = dsc_information(p_info)
dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()]
# Extract information from package
extracted_path = extract_package(dl_artifacts, tmp_path)
# Retrieve information on package
dsc_path = path.join(path.dirname(extracted_path), dsc_name)
actual_package_info = get_intrinsic_package_metadata(
p_info, dsc_path, extracted_path
)
logger.debug("actual_package_info: %s", actual_package_info)
assert actual_package_info == IntrinsicPackageMetadata(
changelog=DebianPackageChangelog(
date="2014-10-19T16:52:35+02:00",
history=[
("cicero", "0.7.2-2"),
("cicero", "0.7.2-1"),
("cicero", "0.7-1"),
],
person={
"email": "sthibault@debian.org",
"fullname": "Samuel Thibault <sthibault@debian.org>",
"name": "Samuel Thibault",
},
),
maintainers=[
{
"email": "debian-accessibility@lists.debian.org",
"fullname": "Debian Accessibility Team "
"<debian-accessibility@lists.debian.org>",
"name": "Debian Accessibility Team",
},
{
"email": "sthibault@debian.org",
"fullname": "Samuel Thibault <sthibault@debian.org>",
"name": "Samuel Thibault",
},
],
name="cicero",
version="0.7.2-3",
)
def test_debian_multiple_packages(swh_storage, requests_mock_datadir):
loader = DebianLoader(
swh_storage,
URL,
date="2019-10-12T05:58:09.165557+00:00",
packages=PACKAGES_PER_VERSION,
)
actual_load_status = loader.load()
expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(swh_storage, URL, status="full", type="deb")
+ assert_last_visit_matches(
+ swh_storage,
+ URL,
+ status="full",
+ type="deb",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"releases/stretch/contrib/0.7.2-3": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"),
),
b"releases/buster/contrib/0.7.2-4": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("8224139c274c984147ef4b09aa0e462c55a10bd3"),
),
},
)
check_snapshot(expected_snapshot, swh_storage)
diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py
index 80a4ff7..f65cb5f 100644
--- a/swh/loader/package/deposit/tests/test_deposit.py
+++ b/swh/loader/package/deposit/tests/test_deposit.py
@@ -1,482 +1,500 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import re
from typing import List
import pytest
from swh.core.pytest_plugin import requests_mock_datadir_factory
from swh.loader.package.deposit.loader import ApiClient, DepositLoader
from swh.loader.package.loader import now
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes, hash_to_hex
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
Origin,
Person,
RawExtrinsicMetadata,
Revision,
RevisionType,
Snapshot,
SnapshotBranch,
TargetType,
Timestamp,
TimestampWithTimezone,
)
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
DEPOSIT_URL = "https://deposit.softwareheritage.org/1/private"
@pytest.fixture
def requests_mock_datadir(requests_mock_datadir):
"""Enhance default mock data to mock put requests as the loader does some
internal update queries there.
"""
requests_mock_datadir.put(re.compile("https"))
return requests_mock_datadir
def test_deposit_init_ok(swh_storage, deposit_client, swh_loader_config):
url = "some-url"
deposit_id = 999
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
) # Something that does not exist
assert loader.url == url
assert loader.client is not None
assert loader.client.base_url == swh_loader_config["deposit"]["url"]
def test_deposit_from_configfile(swh_config):
"""Ensure the deposit instantiation is ok
"""
loader = DepositLoader.from_configfile(
url="some-url", deposit_id="666", default_filename="archive.zip"
)
assert isinstance(loader.client, ApiClient)
def test_deposit_loading_unknown_deposit(
swh_storage, deposit_client, requests_mock_datadir
):
"""Loading an unknown deposit should fail
no origin, no visit, no snapshot
"""
# private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
url = "some-url"
unknown_deposit_id = 667
loader = DepositLoader(
swh_storage,
url,
unknown_deposit_id,
deposit_client,
default_filename="archive.zip",
) # does not exist
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
stats = get_stats(loader.storage)
assert {
"content": 0,
"directory": 0,
"origin": 0,
"origin_visit": 0,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 0,
} == stats
requests_mock_datadir_missing_one = requests_mock_datadir_factory(
ignore_urls=[f"{DEPOSIT_URL}/666/raw/",]
)
def test_deposit_loading_failure_to_retrieve_1_artifact(
swh_storage, deposit_client, requests_mock_datadir_missing_one
):
"""Deposit with missing artifact ends up with an uneventful/partial visit
"""
# private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
url = "some-url-2"
deposit_id = 666
requests_mock_datadir_missing_one.put(re.compile("https"))
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(loader.storage, url, status="partial", type="deposit")
stats = get_stats(loader.storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir_missing_one.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "failed",
"status_detail": {
"loading": [
"Failed to load branch HEAD for some-url-2: Fail to query "
"'https://deposit.softwareheritage.org/1/private/666/raw/'. Reason: 404"
]
},
}
assert body == expected_body
def test_deposit_loading_ok(swh_storage, deposit_client, requests_mock_datadir):
url = "https://hal-test.archives-ouvertes.fr/some-external-id"
deposit_id = 666
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
)
actual_load_status = loader.load()
expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
-
- stats = get_stats(loader.storage)
- assert {
- "content": 303,
- "directory": 12,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 1,
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
+ assert_last_visit_matches(
+ loader.storage,
+ url,
+ status="full",
+ type="deposit",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
revision_id_hex = "637318680351f5d78856d13264faebbd91efe9bb"
revision_id = hash_to_bytes(revision_id_hex)
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"HEAD": SnapshotBranch(
target=revision_id, target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=loader.storage)
revision = loader.storage.revision_get([revision_id])[0]
date = TimestampWithTimezone(
timestamp=Timestamp(seconds=1507389428, microseconds=0),
offset=0,
negative_utc=False,
)
person = Person(
fullname=b"Software Heritage",
name=b"Software Heritage",
email=b"robot@softwareheritage.org",
)
assert revision == Revision(
id=revision_id,
message=b"hal: Deposit 666 in collection hal",
author=person,
committer=person,
date=date,
committer_date=date,
type=RevisionType.TAR,
directory=b"\xfd-\xf1-\xc5SL\x1d\xa1\xe9\x18\x0b\x91Q\x02\xfbo`\x1d\x19",
synthetic=True,
metadata=None,
parents=(),
extra_headers=(),
)
# check metadata
fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
authority = MetadataAuthority(
type=MetadataAuthorityType.DEPOSIT_CLIENT,
url="https://hal-test.archives-ouvertes.fr/",
)
# Check origin metadata
orig_meta = loader.storage.raw_extrinsic_metadata_get(
Origin(url).swhid(), authority
)
assert orig_meta.next_page_token is None
raw_meta = loader.client.metadata_get(deposit_id)
all_metadata_raw: List[str] = raw_meta["metadata_raw"]
# 2 raw metadata xml + 1 json dict
assert len(orig_meta.results) == len(all_metadata_raw) + 1
orig_meta0 = orig_meta.results[0]
assert orig_meta0.authority == authority
assert orig_meta0.fetcher == fetcher
# Check directory metadata
directory_swhid = CoreSWHID(
object_type=ObjectType.DIRECTORY, object_id=revision.directory
)
actual_dir_meta = loader.storage.raw_extrinsic_metadata_get(
directory_swhid, authority
)
assert actual_dir_meta.next_page_token is None
assert len(actual_dir_meta.results) == len(all_metadata_raw)
for dir_meta in actual_dir_meta.results:
assert dir_meta.authority == authority
assert dir_meta.fetcher == fetcher
assert dir_meta.metadata.decode() in all_metadata_raw
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "done",
"revision_id": revision_id_hex,
"directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
assert body == expected_body
+ stats = get_stats(loader.storage)
+ assert {
+ "content": 303,
+ "directory": 12,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 1,
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
def test_deposit_loading_ok_2(swh_storage, deposit_client, requests_mock_datadir):
"""Field dates should be se appropriately
"""
external_id = "some-external-id"
url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
deposit_id = 777
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client, default_filename="archive.zip"
)
actual_load_status = loader.load()
expected_snapshot_id = "3e68440fdd7c81d283f8f3aebb6f0c8657864192"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
+ assert_last_visit_matches(
+ loader.storage,
+ url,
+ status="full",
+ type="deposit",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
revision_id = "564d18943d71be80d0d73b43a77cfb205bcde96c"
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"HEAD": SnapshotBranch(
target=hash_to_bytes(revision_id), target_type=TargetType.REVISION
)
},
)
check_snapshot(expected_snapshot, storage=loader.storage)
raw_meta = loader.client.metadata_get(deposit_id)
# Ensure the date fields are set appropriately in the revision
# Retrieve the revision
revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0]
assert revision
assert revision.date.to_dict() == raw_meta["deposit"]["author_date"]
assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"]
assert not revision.metadata
provider = {
"provider_name": "hal",
"provider_type": "deposit_client",
"provider_url": "https://hal-test.archives-ouvertes.fr/",
"metadata": None,
}
tool = {
"name": "swh-deposit",
"version": "0.0.1",
"configuration": {"sword_version": "2"},
}
fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
authority = MetadataAuthority(
type=MetadataAuthorityType.DEPOSIT_CLIENT,
url="https://hal-test.archives-ouvertes.fr/",
)
# Check the origin metadata swh side
origin_extrinsic_metadata = loader.storage.raw_extrinsic_metadata_get(
Origin(url).swhid(), authority
)
assert origin_extrinsic_metadata.next_page_token is None
all_metadata_raw: List[str] = raw_meta["metadata_raw"]
# 1 raw metadata xml + 1 json dict
assert len(origin_extrinsic_metadata.results) == len(all_metadata_raw) + 1
origin_swhid = Origin(url).swhid()
expected_metadata = []
for idx, raw_meta in enumerate(all_metadata_raw):
origin_meta = origin_extrinsic_metadata.results[idx]
expected_metadata.append(
RawExtrinsicMetadata(
target=origin_swhid,
discovery_date=origin_meta.discovery_date,
metadata=raw_meta.encode(),
format="sword-v2-atom-codemeta-v2",
authority=authority,
fetcher=fetcher,
)
)
origin_metadata = {
"metadata": all_metadata_raw,
"provider": provider,
"tool": tool,
}
expected_metadata.append(
RawExtrinsicMetadata(
target=origin_swhid,
discovery_date=origin_extrinsic_metadata.results[-1].discovery_date,
metadata=json.dumps(origin_metadata).encode(),
format="original-artifacts-json",
authority=authority,
fetcher=fetcher,
)
)
assert sorted(origin_extrinsic_metadata.results) == sorted(expected_metadata)
# Check the revision metadata swh side
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
)
actual_directory_metadata = loader.storage.raw_extrinsic_metadata_get(
directory_swhid, authority
)
assert actual_directory_metadata.next_page_token is None
assert len(actual_directory_metadata.results) == len(all_metadata_raw)
revision_swhid = CoreSWHID(
object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id)
)
dir_metadata_template = RawExtrinsicMetadata(
target=directory_swhid,
format="sword-v2-atom-codemeta-v2",
authority=authority,
fetcher=fetcher,
origin=url,
revision=revision_swhid,
# to satisfy the constructor
discovery_date=now(),
metadata=b"",
)
expected_directory_metadata = []
for idx, raw_meta in enumerate(all_metadata_raw):
dir_metadata = actual_directory_metadata.results[idx]
expected_directory_metadata.append(
RawExtrinsicMetadata.from_dict(
{
**{
k: v
for (k, v) in dir_metadata_template.to_dict().items()
if k != "id"
},
"discovery_date": dir_metadata.discovery_date,
"metadata": raw_meta.encode(),
}
)
)
assert sorted(actual_directory_metadata.results) == sorted(
expected_directory_metadata
)
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "done",
"revision_id": revision_id,
"directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
assert body == expected_body
def test_deposit_loading_ok_3(swh_storage, deposit_client, requests_mock_datadir):
"""Deposit loading can happen on tarball artifacts as well
The latest deposit changes introduce the internal change.
"""
external_id = "hal-123456"
url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
deposit_id = 888
loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
actual_load_status = loader.load()
expected_snapshot_id = "0ac7b54c042a026389f2087dc16f1d5c644ed0e4"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
- assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
+ assert_last_visit_matches(
+ loader.storage,
+ url,
+ status="full",
+ type="deposit",
+ snapshot=hash_to_bytes(expected_snapshot_id),
+ )
diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py
index 5148708..12fcf6b 100644
--- a/swh/loader/package/nixguix/tests/test_nixguix.py
+++ b/swh/loader/package/nixguix/tests/test_nixguix.py
@@ -1,606 +1,621 @@
# Copyright (C) 2020-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import logging
import os
from typing import Dict, Optional, Tuple
import pytest
from swh.loader.package import __version__
from swh.loader.package.archive.loader import ArchiveLoader
from swh.loader.package.nixguix.loader import (
NixGuixLoader,
clean_sources,
make_pattern_unsupported_file_extension,
parse_sources,
retrieve_sources,
)
from swh.loader.package.utils import download
from swh.loader.tests import assert_last_visit_matches
from swh.loader.tests import check_snapshot as check_snapshot_full
from swh.loader.tests import get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.model.swhids import ExtendedObjectType, ExtendedSWHID
from swh.storage.algos.origin import origin_get_latest_visit_status
from swh.storage.algos.snapshot import snapshot_get_all_branches
from swh.storage.exc import HashCollision
from swh.storage.interface import PagedResult, StorageInterface
sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json"
@pytest.fixture
def raw_sources(datadir) -> bytes:
with open(
os.path.join(
datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json"
),
"rb",
) as f:
return f.read()
SNAPSHOT1 = Snapshot(
id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"),
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
target_type=TargetType.REVISION,
),
},
)
def check_snapshot(snapshot: Snapshot, storage: StorageInterface):
# The `evaluation` branch is allowed to be unresolvable. It's possible at current
# nixguix visit time, it is not yet visited (the git loader is in charge of its
# visit for now). For more details, check the
# swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring.
check_snapshot_full(
snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")]
)
assert isinstance(snapshot, Snapshot)
# then ensure the snapshot revisions are structurally as expected
revision_ids = []
for name, branch in snapshot.branches.items():
if name == b"evaluation":
continue # skipping that particular branch (cf. previous comment)
if branch.target_type == TargetType.REVISION:
revision_ids.append(branch.target)
revisions = storage.revision_get(revision_ids)
for rev in revisions:
assert rev is not None
metadata = rev.metadata
assert not metadata
def test_retrieve_sources(swh_storage, requests_mock_datadir):
j = parse_sources(retrieve_sources(sources_url))
assert "sources" in j.keys()
assert len(j["sources"]) == 2
def test_nixguix_url_not_found(swh_storage, requests_mock_datadir):
"""When failing to read from the url, the visit is marked as not_found.
Here the sources url does not exist, so requests_mock_datadir returns a 404.
Resulting in a NotFound raised within the package loader's main loop.
This results in the task with status failed and a visit_status with status
"not_found".
"""
unknown_url = "https://non-existing-url/"
loader = NixGuixLoader(swh_storage, unknown_url)
# during the retrieval step
load_status = loader.load()
assert load_status == {"status": "failed"}
assert_last_visit_matches(
swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None
)
assert len(requests_mock_datadir.request_history) == 1
assert requests_mock_datadir.request_history[0].url == unknown_url
def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir):
"""Other errors during communication with the url, the visit is marked as failed
requests_mock_datadir will intercept the requests to sources_url. Since the file
exists, returns a 200 with the requested content of the query. As file.txt is no
json, fails do decode and raises a JSONDecodeError. In effect failing the visit.
"""
sources_url = "https://example.com/file.txt"
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
assert load_status == {"status": "failed"}
assert_last_visit_matches(
swh_storage, sources_url, status="failed", type="nixguix", snapshot=None
)
assert len(requests_mock_datadir.request_history) == 1
assert requests_mock_datadir.request_history[0].url == sources_url
def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir):
sources = {}
with pytest.raises(ValueError, match="sources structure invalid, missing: .*"):
clean_sources(sources)
def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir):
for version_ok in [1, "1"]: # Check those versions are fine
clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"})
for version_ko in [0, "0", 2, "2"]: # Check version != 1 raise an error
with pytest.raises(
ValueError, match="sources structure version .* is not supported"
):
clean_sources(
{"version": version_ko, "sources": [], "revision": "my-revision"}
)
def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir):
valid_sources = [
# 1 valid source
{"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"},
]
sources = {
"version": 1,
"sources": valid_sources
+ [
# integrity is missing
{"type": "url", "urls": ["my-url.tgz"],},
# urls is not a list
{"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"},
# type is not url
{"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"},
# missing fields which got double-checked nonetheless...
{"integrity": "my-integrity"},
],
"revision": "my-revision",
}
clean = clean_sources(sources)
assert len(clean["sources"]) == len(valid_sources)
def test_make_pattern_unsupported_file_extension():
unsupported_extensions = ["el", "c", "txt"]
supported_extensions = ["Z", "7z"] # for test
actual_unsupported_pattern = make_pattern_unsupported_file_extension(
unsupported_extensions
)
for supported_ext in supported_extensions:
assert supported_ext not in unsupported_extensions
supported_filepath = f"anything.{supported_ext}"
actual_match = actual_unsupported_pattern.match(supported_filepath)
assert not actual_match
for unsupported_ext in unsupported_extensions:
unsupported_filepath = f"something.{unsupported_ext}"
actual_match = actual_unsupported_pattern.match(unsupported_filepath)
assert actual_match
def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir):
unsupported_file_extensions = [
"iso",
"whl",
"gem",
"pom",
"msi",
"pod",
"png",
"rock",
"ttf",
"jar",
"c",
"el",
"rpm",
"diff",
"patch",
]
supported_sources = [
{
"type": "url",
"urls": [f"https://server.org/my-url.{ext}"],
"integrity": "my-integrity",
}
for ext in [
"known-unknown-but-ok", # this is fine as well with the current approach
"zip",
"tar.gz",
"tgz",
"tar.bz2",
"tbz",
"tbz2",
"tar.xz",
"tar",
"zip",
"7z",
"Z",
]
]
unsupported_sources = [
{
"type": "url",
"urls": [f"https://server.org/my-url.{ext}"],
"integrity": "my-integrity",
}
for ext in unsupported_file_extensions
]
sources = {
"version": 1,
"sources": supported_sources + unsupported_sources,
"revision": "my-revision",
}
clean = clean_sources(sources, unsupported_file_extensions)
assert len(clean["sources"]) == len(supported_sources)
def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources):
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res["status"] == "eventful"
stats = get_stats(swh_storage)
assert {
"content": 1,
"directory": 3,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
# The visit is partial because urls pointing to non tarball file
# are not handled yet
assert_last_visit_matches(
swh_storage, sources_url, status="partial", type="nixguix"
)
visit_status = origin_get_latest_visit_status(swh_storage, sources_url)
snapshot_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.SNAPSHOT, object_id=visit_status.snapshot
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url=sources_url,
)
expected_metadata = [
RawExtrinsicMetadata(
target=snapshot_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.nixguix.loader.NixGuixLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="nixguix-sources-json",
metadata=raw_sources,
origin=sources_url,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
snapshot_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
def test_uncompress_failure(swh_storage, requests_mock_datadir):
"""Non tarball files are currently not supported and the uncompress
function fails on such kind of files.
However, even in this case of failure (because of the url
https://example.com/file.txt), a snapshot and a visit has to be
created (with a status partial since all files are not archived).
"""
loader = NixGuixLoader(swh_storage, sources_url)
loader_status = loader.load()
sources = loader.supported_sources()["sources"]
urls = [s["urls"][0] for s in sources]
assert "https://example.com/file.txt" in urls
assert loader_status["status"] == "eventful"
# The visit is partial because urls pointing to non tarball files
# are not handled yet
assert_last_visit_matches(
swh_storage, sources_url, status="partial", type="nixguix"
)
def test_loader_incremental(swh_storage, requests_mock_datadir):
"""Ensure a second visit do not download artifact already
downloaded by the previous visit.
"""
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
loader.load()
assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz")
]
# The artifact
# 'https://github.com/owner-1/repository-1/revision-1.tgz' is only
# visited one time
assert len(urls) == 1
def test_loader_two_visits(swh_storage, requests_mock_datadir_visits):
"""To ensure there is only one origin, but two visits, two revisions
and two snapshots are created.
The first visit creates a snapshot containing one tarball. The
second visit creates a snapshot containing the same tarball and
another tarball.
"""
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 1,
"directory": 3,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97"
expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex)
assert load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id_hex,
}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=expected_snapshot_id,
)
# This ensures visits are incremental. Indeed, if we request a
# second time an url, because of the requests_mock_datadir_visits
# fixture, the file has to end with `_visit1`.
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 2,
"directory": 5,
"origin": 1,
"origin_visit": 2,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 2,
} == stats
def test_evaluation_branch(swh_storage, requests_mock_datadir):
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res["status"] == "eventful"
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
def test_eoferror(swh_storage, requests_mock_datadir):
"""Load a truncated archive which is invalid to make the uncompress
function raising the exception EOFError. We then check if a
snapshot is created, meaning this error is well managed.
"""
sources = (
"https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa
)
loader = NixGuixLoader(swh_storage, sources)
loader.load()
expected_snapshot = Snapshot(
id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"),
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=swh_storage)
def fake_download(
url: str,
dest: str,
hashes: Dict = {},
filename: Optional[str] = None,
auth: Optional[Tuple[str, str]] = None,
) -> Tuple[str, Dict]:
"""Fake download which raises HashCollision (for the sake of test simpliciy,
let's accept that makes sense)
For tests purpose only.
"""
if url == "https://example.com/file.txt":
# instead of failing because it's a file not dealt with by the nix guix
# loader, make it raise a hash collision
raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", [])
return download(url, dest, hashes, filename, auth)
def test_raise_exception(swh_storage, requests_mock_datadir, mocker):
mock_download = mocker.patch("swh.loader.package.loader.download")
mock_download.side_effect = fake_download
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res == {
"status": "eventful",
"snapshot_id": SNAPSHOT1.id.hex(),
}
- check_snapshot(SNAPSHOT1, storage=swh_storage)
-
- assert len(mock_download.mock_calls) == 2
-
# The visit is partial because some artifact downloads failed
assert_last_visit_matches(
- swh_storage, sources_url, status="partial", type="nixguix"
+ swh_storage,
+ sources_url,
+ status="partial",
+ type="nixguix",
+ snapshot=SNAPSHOT1.id,
)
+ check_snapshot(SNAPSHOT1, storage=swh_storage)
+
+ assert len(mock_download.mock_calls) == 2
+
def test_load_nixguix_one_common_artifact_from_other_loader(
swh_storage, datadir, requests_mock_datadir_visits, caplog
):
"""Misformatted revision should be caught and logged, then loading continues
"""
caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader")
# 1. first ingest with for example the archive loader
gnu_url = "https://ftp.gnu.org/gnu/8sync/"
release = "0.1.0"
artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz"
gnu_artifacts = [
{
"time": 944729610,
"url": artifact_url,
"length": 221837,
"filename": f"8sync-{release}.tar.gz",
"version": release,
}
]
archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts)
actual_load_status = archive_loader.load()
expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5"
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] == expected_snapshot_id # noqa
assert_last_visit_matches(
- archive_loader.storage, gnu_url, status="full", type="tar"
+ archive_loader.storage,
+ gnu_url,
+ status="full",
+ type="tar",
+ snapshot=hash_to_bytes(expected_snapshot_id),
)
# 2. Then ingest with the nixguix loader which lists the same artifact within its
# sources.json
# ensure test setup is ok
data_sources = os.path.join(
datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json"
)
all_sources = json.loads(open(data_sources).read())
found = False
for source in all_sources["sources"]:
if source["urls"][0] == artifact_url:
found = True
assert (
found is True
), f"test setup error: {artifact_url} must be in {data_sources}"
# first visit with a snapshot, ok
sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json"
loader = NixGuixLoader(swh_storage, sources_url)
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "eventful"
- assert_last_visit_matches(swh_storage, sources_url, status="full", type="nixguix")
-
snapshot_id = actual_load_status2["snapshot_id"]
+
+ assert_last_visit_matches(
+ swh_storage,
+ sources_url,
+ status="full",
+ type="nixguix",
+ snapshot=hash_to_bytes(snapshot_id),
+ )
+
snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id))
assert snapshot
diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py
index 25f10e8..000fb15 100644
--- a/swh/loader/package/npm/tests/test_npm.py
+++ b/swh/loader/package/npm/tests/test_npm.py
@@ -1,619 +1,620 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import os
import pytest
from swh.loader.package import __version__
from swh.loader.package.npm.loader import (
NpmLoader,
_author_str,
extract_npm_package_author,
)
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
Person,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.interface import PagedResult
@pytest.fixture
def org_api_info(datadir) -> bytes:
with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f:
return f.read()
def test_npm_author_str():
for author, expected_author in [
("author", "author"),
(
["Al from quantum leap", "hal from 2001 space odyssey"],
"Al from quantum leap",
),
([], ""),
({"name": "groot", "email": "groot@galaxy.org",}, "groot <groot@galaxy.org>"),
({"name": "somebody",}, "somebody"),
({"email": "no@one.org"}, " <no@one.org>"), # note first elt is an extra blank
({"name": "no one", "email": None,}, "no one"),
({"email": None,}, ""),
({"name": None}, ""),
({"name": None, "email": None,}, ""),
({}, ""),
(None, None),
({"name": []}, "",),
(
{"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]},
"Susan McSween",
),
(None, None),
]:
assert _author_str(author) == expected_author
def test_npm_extract_npm_package_author(datadir):
package_metadata_filepath = os.path.join(
datadir, "https_replicate.npmjs.com", "org_visit1"
)
with open(package_metadata_filepath) as json_file:
package_metadata = json.load(json_file)
extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person(
fullname=b"mooz <stillpedant@gmail.com>",
name=b"mooz",
email=b"stillpedant@gmail.com",
)
assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person(
fullname=b"Masafumi Oyamada <stillpedant@gmail.com>",
name=b"Masafumi Oyamada",
email=b"stillpedant@gmail.com",
)
package_json = json.loads(
"""
{
"name": "highlightjs-line-numbers.js",
"version": "2.7.0",
"description": "Highlight.js line numbers plugin.",
"main": "src/highlightjs-line-numbers.js",
"dependencies": {},
"devDependencies": {
"gulp": "^4.0.0",
"gulp-rename": "^1.4.0",
"gulp-replace": "^0.6.1",
"gulp-uglify": "^1.2.0"
},
"repository": {
"type": "git",
"url": "https://github.com/wcoder/highlightjs-line-numbers.js.git"
},
"author": "Yauheni Pakala <evgeniy.pakalo@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues"
},
"homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"Yauheni Pakala <evgeniy.pakalo@gmail.com>",
name=b"Yauheni Pakala",
email=b"evgeniy.pakalo@gmail.com",
)
package_json = json.loads(
"""
{
"name": "3-way-diff",
"version": "0.0.1",
"description": "3-way diffing of JavaScript objects",
"main": "index.js",
"authors": [
{
"name": "Shawn Walsh",
"url": "https://github.com/shawnpwalsh"
},
{
"name": "Markham F Rollins IV",
"url": "https://github.com/mrollinsiv"
}
],
"keywords": [
"3-way diff",
"3 way diff",
"three-way diff",
"three way diff"
],
"devDependencies": {
"babel-core": "^6.20.0",
"babel-preset-es2015": "^6.18.0",
"mocha": "^3.0.2"
},
"dependencies": {
"lodash": "^4.15.0"
}
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None
)
package_json = json.loads(
"""
{
"name": "yfe-ynpm",
"version": "1.0.0",
"homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm",
"repository": {
"type": "git",
"url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git"
},
"author": [
"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
"xufuzi <xufuzi@ywwl.com> (https://7993.org)"
],
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
name=b"fengmk2",
email=b"fengmk2@gmail.com",
)
package_json = json.loads(
"""
{
"name": "umi-plugin-whale",
"version": "0.0.8",
"description": "Internal contract component",
"authors": {
"name": "xiaohuoni",
"email": "448627663@qq.com"
},
"repository": "alitajs/whale",
"devDependencies": {
"np": "^3.0.4",
"umi-tools": "*"
},
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"xiaohuoni <448627663@qq.com>",
name=b"xiaohuoni",
email=b"448627663@qq.com",
)
package_json_no_authors = json.loads(
"""{
"authors": null,
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json_no_authors) == Person(
fullname=b"", name=None, email=None
)
def normalize_hashes(hashes):
if isinstance(hashes, str):
return hash_to_bytes(hashes)
if isinstance(hashes, list):
return [hash_to_bytes(x) for x in hashes]
return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()}
_expected_new_contents_first_visit = normalize_hashes(
[
"4ce3058e16ab3d7e077f65aabf855c34895bf17c",
"858c3ceee84c8311adc808f8cdb30d233ddc9d18",
"0fa33b4f5a4e0496da6843a38ff1af8b61541996",
"85a410f8ef8eb8920f2c384a9555566ad4a2e21b",
"9163ac8025923d5a45aaac482262893955c9b37b",
"692cf623b8dd2c5df2c2998fd95ae4ec99882fb4",
"18c03aac6d3e910efb20039c15d70ab5e0297101",
"41265c42446aac17ca769e67d1704f99e5a1394d",
"783ff33f5882813dca9239452c4a7cadd4dba778",
"b029cfb85107aee4590c2434a3329bfcf36f8fa1",
"112d1900b4c2e3e9351050d1b542c9744f9793f3",
"5439bbc4bd9a996f1a38244e6892b71850bc98fd",
"d83097a2f994b503185adf4e719d154123150159",
"d0939b4898e83090ee55fd9d8a60e312cfadfbaf",
"b3523a26f7147e4af40d9d462adaae6d49eda13e",
"cd065fb435d6fb204a8871bcd623d0d0e673088c",
"2854a40855ad839a54f4b08f5cff0cf52fca4399",
"b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe",
"0f73d56e1cf480bded8a1ecf20ec6fc53c574713",
"0d9882b2dfafdce31f4e77fe307d41a44a74cefe",
"585fc5caab9ead178a327d3660d35851db713df1",
"e8cd41a48d79101977e3036a87aeb1aac730686f",
"5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7",
"9c3cc2763bf9e9e37067d3607302c4776502df98",
"3649a68410e354c83cd4a38b66bd314de4c8f5c9",
"e96ed0c091de1ebdf587104eaf63400d1974a1fe",
"078ca03d2f99e4e6eab16f7b75fbb7afb699c86c",
"38de737da99514de6559ff163c988198bc91367a",
]
)
_expected_new_directories_first_visit = normalize_hashes(
[
"3370d20d6f96dc1c9e50f083e2134881db110f4f",
"42753c0c2ab00c4501b552ac4671c68f3cf5aece",
"d7895533ef5edbcffdea3f057d9fef3a1ef845ce",
"80579be563e2ef3e385226fe7a3f079b377f142c",
"3b0ddc6a9e58b4b53c222da4e27b280b6cda591c",
"bcad03ce58ac136f26f000990fc9064e559fe1c0",
"5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca",
"e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd",
"584b5b4b6cf7f038095e820b99386a9c232de931",
"184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a",
"bb5f4ee143c970367eb409f2e4c1104898048b9d",
"1b95491047add1103db0dfdfa84a9735dcb11e88",
"a00c6de13471a2d66e64aca140ddb21ef5521e62",
"5ce6c1cd5cda2d546db513aaad8c72a44c7771e2",
"c337091e349b6ac10d38a49cdf8c2401ef9bb0f2",
"202fafcd7c0f8230e89d5496ad7f44ab12b807bf",
"775cc516543be86c15c1dc172f49c0d4e6e78235",
"ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e",
]
)
_expected_new_revisions_first_visit = normalize_hashes(
{
"d8a1c7474d2956ac598a19f0f27d52f7015f117e": (
"42753c0c2ab00c4501b552ac4671c68f3cf5aece"
),
"5f9eb78af37ffd12949f235e86fac04898f9f72a": (
"3370d20d6f96dc1c9e50f083e2134881db110f4f"
),
"ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": (
"d7895533ef5edbcffdea3f057d9fef3a1ef845ce"
),
}
)
def package_url(package):
return "https://www.npmjs.com/package/%s" % package
def package_metadata_url(package):
return "https://replicate.npmjs.com/%s/" % package
def test_npm_loader_first_visit(swh_storage, requests_mock_datadir, org_api_info):
package = "org"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
- stats = get_stats(swh_storage)
-
- assert {
- "content": len(_expected_new_contents_first_visit),
- "directory": len(_expected_new_directories_first_visit),
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": len(_expected_new_revisions_first_visit),
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
-
- contents = swh_storage.content_get(_expected_new_contents_first_visit)
- count = sum(0 if content is None else 1 for content in contents)
- assert count == len(_expected_new_contents_first_visit)
-
- assert (
- list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == []
- )
-
- assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == []
-
versions = [
("0.0.2", "d8a1c7474d2956ac598a19f0f27d52f7015f117e"),
("0.0.3", "5f9eb78af37ffd12949f235e86fac04898f9f72a"),
("0.0.4", "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"),
]
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target=b"releases/0.0.4", target_type=TargetType.ALIAS
),
**{
b"releases/"
+ version_name.encode(): SnapshotBranch(
target=hash_to_bytes(version_id), target_type=TargetType.REVISION,
)
for (version_name, version_id) in versions
},
},
)
check_snapshot(expected_snapshot, swh_storage)
+ contents = swh_storage.content_get(_expected_new_contents_first_visit)
+ count = sum(0 if content is None else 1 for content in contents)
+ assert count == len(_expected_new_contents_first_visit)
+
+ assert (
+ list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == []
+ )
+
+ assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == []
+
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="https://npmjs.com/",
)
for (version_name, revision_id) in versions:
revision = swh_storage.revision_get([hash_to_bytes(revision_id)])[0]
directory_id = revision.directory
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id,
)
revision_swhid = CoreSWHID(
object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id),
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.npm.loader.NpmLoader", version=__version__,
),
discovery_date=loader.visit_date,
format="replicate-npm-package-json",
metadata=json.dumps(
json.loads(org_api_info)["versions"][version_name]
).encode(),
origin="https://www.npmjs.com/package/org",
revision=revision_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
directory_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
+ stats = get_stats(swh_storage)
+
+ assert {
+ "content": len(_expected_new_contents_first_visit),
+ "directory": len(_expected_new_directories_first_visit),
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": len(_expected_new_revisions_first_visit),
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
def test_npm_loader_incremental_visit(swh_storage, requests_mock_datadir_visits):
package = "org"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
actual_load_status = loader.load()
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit),
"skipped_content": 0,
"snapshot": 1,
} == stats
# reset loader internal state
del loader._cached_info
del loader._cached__raw_info
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "eventful"
snap_id2 = actual_load_status2["snapshot_id"]
assert snap_id2 is not None
assert snap_id2 != actual_load_status["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="npm")
stats = get_stats(swh_storage)
assert { # 3 new releases artifacts
"content": len(_expected_new_contents_first_visit) + 14,
"directory": len(_expected_new_directories_first_visit) + 15,
"origin": 1,
"origin_visit": 2,
"release": 0,
"revision": len(_expected_new_revisions_first_visit) + 3,
"skipped_content": 0,
"snapshot": 2,
} == stats
urls = [
m.url
for m in requests_mock_datadir_visits.request_history
if m.url.startswith("https://registry.npmjs.org")
]
assert len(urls) == len(set(urls)) # we visited each artifact once across
@pytest.mark.usefixtures("requests_mock_datadir")
def test_npm_loader_version_divergence(swh_storage):
package = "@aller_shared"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
- stats = get_stats(swh_storage)
-
- assert { # 1 new releases artifacts
- "content": 534,
- "directory": 153,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 2,
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
-
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"releases/0.1.0"
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"),
),
b"releases/0.1.1-alpha.14": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"),
),
},
)
check_snapshot(expected_snapshot, swh_storage)
+ stats = get_stats(swh_storage)
+
+ assert { # 1 new releases artifacts
+ "content": 534,
+ "directory": 153,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 2,
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
def test_npm_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
"""Skip artifact with no intrinsic metadata during ingestion
"""
package = "nativescript-telerik-analytics"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(
id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
)
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot.id.hex(),
}
- check_snapshot(expected_snapshot, swh_storage)
-
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
def test_npm_artifact_with_no_upload_time(swh_storage, requests_mock_datadir):
"""With no time upload, artifact is skipped
"""
package = "jammit-no-time"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(
id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
)
assert actual_load_status == {
"status": "uneventful",
"snapshot_id": expected_snapshot.id.hex(),
}
- check_snapshot(expected_snapshot, swh_storage)
-
assert_last_visit_matches(
swh_storage, url, status="partial", type="npm", snapshot=expected_snapshot.id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
def test_npm_artifact_use_mtime_if_no_time(swh_storage, requests_mock_datadir):
"""With no time upload, artifact is skipped
"""
package = "jammit-express"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
# artifact is used
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"releases/0.0.1"
),
b"releases/0.0.1": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"),
),
},
)
- check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
def test_npm_no_artifact(swh_storage, requests_mock_datadir):
"""If no artifacts at all is found for origin, the visit fails completely
"""
package = "catify"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {
"status": "failed",
}
assert_last_visit_matches(swh_storage, url, status="failed", type="npm")
def test_npm_origin_not_found(swh_storage, requests_mock_datadir):
url = package_url("non-existent-url")
loader = NpmLoader(swh_storage, url)
assert loader.load() == {"status": "failed"}
assert_last_visit_matches(
swh_storage, url, status="not_found", type="npm", snapshot=None
)
diff --git a/swh/loader/package/opam/tests/test_opam.py b/swh/loader/package/opam/tests/test_opam.py
index 4f99e82..b73dbeb 100644
--- a/swh/loader/package/opam/tests/test_opam.py
+++ b/swh/loader/package/opam/tests/test_opam.py
@@ -1,317 +1,318 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.loader.package import __version__
from swh.loader.package.loader import RawExtrinsicMetadataCore
from swh.loader.package.opam.loader import OpamLoader, OpamPackageInfo
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
Person,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.interface import PagedResult
OCB_METADATA = b"""\
opam-version: "2.0"
name: "ocb"
version: "0.1"
synopsis: "SVG badge generator"
description:
"An OCaml library for SVG badge generation. There\'s also a command-line tool provided."
maintainer: "OCamlPro <contact@ocamlpro.com>"
authors: "OCamlPro <contact@ocamlpro.com>"
license: "ISC"
homepage: "https://ocamlpro.github.io/ocb/"
doc: "https://ocamlpro.github.io/ocb/api/"
bug-reports: "https://github.com/OCamlPro/ocb/issues"
depends: [
"ocaml" {>= "4.05"}
"dune" {>= "2.0"}
"odoc" {with-doc}
]
build: [
["dune" "subst"] {dev}
[
"dune"
"build"
"-p"
name
"-j"
jobs
"@install"
"@runtest" {with-test}
"@doc" {with-doc}
]
]
dev-repo: "git+https://github.com/OCamlPro/ocb.git"
url {
src: "https://github.com/OCamlPro/ocb/archive/0.1.tar.gz"
checksum: [
"sha256=aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46"
"sha512=1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b"
]
}
""" # noqa
def test_opam_loader_no_opam_repository_fails(swh_storage, tmpdir, datadir):
"""Running opam loader without a prepared opam repository fails"""
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "agrid"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=False, # The opam directory must be present
)
# No opam root directory init directory from loader. So, at the opam root does not
# exist, the loading fails. That's the expected use for the production workers
# (whose opam_root maintenance will be externally managed).
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
def test_opam_loader_one_version(tmpdir, requests_mock_datadir, datadir, swh_storage):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "agrid"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("4e4bf977312460329d7f769b0be89937c9827efc")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
target = b"S\x8c\x8aq\xdcy\xa4/0\xa0\xb2j\xeb\xc1\x16\xad\xce\x06\xeaV"
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(target=b"agrid.0.1", target_type=TargetType.ALIAS,),
b"agrid.0.1": SnapshotBranch(
target=target, target_type=TargetType.REVISION,
),
},
)
- check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
stats = get_stats(swh_storage)
assert {
"content": 18,
"directory": 8,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
def test_opam_loader_many_version(tmpdir, requests_mock_datadir, datadir, swh_storage):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "directories"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("1b49be175dcf17c0f568bcd7aac3d4faadc41249")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target=b"directories.0.3", target_type=TargetType.ALIAS,
),
b"directories.0.1": SnapshotBranch(
target=b"N\x92jA\xb2\x892\xeb\xcc\x9c\xa9\xb3\xea\xa7kz\xb08\xa6V",
target_type=TargetType.REVISION,
),
b"directories.0.2": SnapshotBranch(
target=b"yj\xc9\x1a\x8f\xe0\xaa\xff[\x88\xffz"
b"\x91C\xcc\x96\xb7\xd4\xf65",
target_type=TargetType.REVISION,
),
b"directories.0.3": SnapshotBranch(
target=b"hA \xc4\xb5\x18A8\xb8C\x12\xa3\xa5T\xb7/v\x85X\xcb",
target_type=TargetType.REVISION,
),
},
)
- check_snapshot(expected_snapshot, swh_storage)
-
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
def test_opam_revision(tmpdir, requests_mock_datadir, swh_storage, datadir):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "ocb"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("398df115b9feb2f463efd21941d69b7d59cd9025")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
info_iter = loader.get_package_info("0.1")
branch_name, package_info = next(info_iter)
expected_branch_name = "ocb.0.1"
expected_package_info = OpamPackageInfo(
url="https://github.com/OCamlPro/ocb/archive/0.1.tar.gz",
filename=None,
author=Person(
fullname=b"OCamlPro <contact@ocamlpro.com>", name=None, email=None
),
committer=Person(
fullname=b"OCamlPro <contact@ocamlpro.com>", name=None, email=None
),
version="0.1",
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
metadata=OCB_METADATA, format="opam-package-definition",
)
],
)
assert branch_name == expected_branch_name
assert package_info == expected_package_info
revision_id = b"o\xad\x7f=\x07\xbb\xaah\xdbI(\xb0'\x10z\xfc\xff\x06x\x1b"
revision = swh_storage.revision_get([revision_id])[0]
assert revision is not None
assert revision.author == expected_package_info.author
assert revision.committer == expected_package_info.committer
def test_opam_metadata(tmpdir, requests_mock_datadir, swh_storage, datadir):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "ocb"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_revision_id = b"o\xad\x7f=\x07\xbb\xaah\xdbI(\xb0'\x10z\xfc\xff\x06x\x1b"
revision = swh_storage.revision_get([expected_revision_id])[0]
assert revision is not None
revision_swhid = CoreSWHID(
object_type=ObjectType.REVISION, object_id=expected_revision_id
)
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url=opam_url,
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.opam.loader.OpamLoader", version=__version__,
),
discovery_date=loader.visit_date,
format="opam-package-definition",
metadata=OCB_METADATA,
origin=url,
revision=revision_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
directory_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py
index 3298864..e8871a6 100644
--- a/swh/loader/package/pypi/tests/test_pypi.py
+++ b/swh/loader/package/pypi/tests/test_pypi.py
@@ -1,882 +1,887 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import os
from os import path
from unittest.mock import patch
import pytest
from swh.core.pytest_plugin import requests_mock_datadir_factory
from swh.core.tarball import uncompress
from swh.loader.package import __version__
from swh.loader.package.pypi.loader import (
PyPILoader,
PyPIPackageInfo,
author,
extract_intrinsic_metadata,
pypi_api_url,
)
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
Person,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.interface import PagedResult
@pytest.fixture
def _0805nexter_api_info(datadir) -> bytes:
with open(
os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb",
) as f:
return f.read()
def test_pypi_author_basic():
data = {
"author": "i-am-groot",
"author_email": "iam@groot.org",
}
actual_author = author(data)
expected_author = Person(
fullname=b"i-am-groot <iam@groot.org>",
name=b"i-am-groot",
email=b"iam@groot.org",
)
assert actual_author == expected_author
def test_pypi_author_empty_email():
data = {
"author": "i-am-groot",
"author_email": "",
}
actual_author = author(data)
expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",)
assert actual_author == expected_author
def test_pypi_author_empty_name():
data = {
"author": "",
"author_email": "iam@groot.org",
}
actual_author = author(data)
expected_author = Person(
fullname=b" <iam@groot.org>", name=b"", email=b"iam@groot.org",
)
assert actual_author == expected_author
def test_pypi_author_malformed():
data = {
"author": "['pierre', 'paul', 'jacques']",
"author_email": None,
}
actual_author = author(data)
expected_author = Person(
fullname=b"['pierre', 'paul', 'jacques']",
name=b"['pierre', 'paul', 'jacques']",
email=None,
)
assert actual_author == expected_author
def test_pypi_author_malformed_2():
data = {
"author": "[marie, jeanne]",
"author_email": "[marie@some, jeanne@thing]",
}
actual_author = author(data)
expected_author = Person(
fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>",
name=b"[marie, jeanne]",
email=b"[marie@some, jeanne@thing]",
)
assert actual_author == expected_author
def test_pypi_author_malformed_3():
data = {
"author": "[marie, jeanne, pierre]",
"author_email": "[marie@somewhere.org, jeanne@somewhere.org]",
}
actual_author = author(data)
expected_author = Person(
fullname=(
b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>"
),
name=b"[marie, jeanne, pierre]",
email=b"[marie@somewhere.org, jeanne@somewhere.org]",
)
actual_author == expected_author
# configuration error #
def test_pypi_api_url():
"""Compute pypi api url from the pypi project url should be ok"""
url = pypi_api_url("https://pypi.org/project/requests")
assert url == "https://pypi.org/pypi/requests/json"
def test_pypi_api_url_with_slash():
"""Compute pypi api url from the pypi project url should be ok"""
url = pypi_api_url("https://pypi.org/project/requests/")
assert url == "https://pypi.org/pypi/requests/json"
@pytest.mark.fs
def test_pypi_extract_intrinsic_metadata(tmp_path, datadir):
"""Parsing existing archive's PKG-INFO should yield results"""
uncompressed_archive_path = str(tmp_path)
archive_path = path.join(
datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip"
)
uncompress(archive_path, dest=uncompressed_archive_path)
actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path)
expected_metadata = {
"metadata_version": "1.0",
"name": "0805nexter",
"version": "1.1.0",
"summary": "a simple printer of nested lest",
"home_page": "http://www.hp.com",
"author": "hgtkpython",
"author_email": "2868989685@qq.com",
"platforms": ["UNKNOWN"],
}
assert actual_metadata == expected_metadata
@pytest.mark.fs
def test_pypi_extract_intrinsic_metadata_failures(tmp_path):
"""Parsing inexistent path/archive/PKG-INFO yield None"""
tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue)
# inexistent first level path
assert extract_intrinsic_metadata("/something-inexistent") == {}
# inexistent second level path (as expected by pypi archives)
assert extract_intrinsic_metadata(tmp_path) == {}
# inexistent PKG-INFO within second level path
existing_path_no_pkginfo = path.join(tmp_path, "something")
os.mkdir(existing_path_no_pkginfo)
assert extract_intrinsic_metadata(tmp_path) == {}
# LOADER SCENARIO #
# "edge" cases (for the same origin) #
# no release artifact:
# {visit full, status: uneventful, no contents, etc...}
requests_mock_datadir_missing_all = requests_mock_datadir_factory(
ignore_urls=[
"https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa
"https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa
]
)
def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all):
"""Load a pypi project with all artifacts missing ends up with no snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
+ empty_snapshot = Snapshot(branches={})
+
+ assert_last_visit_matches(
+ swh_storage, url, status="partial", type="pypi", snapshot=empty_snapshot.id
+ )
+
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
- assert_last_visit_matches(swh_storage, url, status="partial", type="pypi")
-
def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir):
"""problem during loading: {visit: failed, status: failed, no snapshot}
"""
url = "https://pypi.org/project/0805nexter"
with patch(
"swh.loader.package.pypi.loader.PyPILoader._load_snapshot",
side_effect=ValueError("Fake problem to fail visit"),
):
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
+ assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
+
stats = get_stats(loader.storage)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 0,
} == stats
- assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
-
# problem during loading:
# {visit: partial, status: uneventful, no snapshot}
def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir):
url = "https://pypi.org/project/0805nexter"
with patch(
"swh.loader.package.pypi.loader.PyPILoader.last_snapshot",
side_effect=ValueError("Fake problem to fail the visit"),
):
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
+ assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
+
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 0,
} == stats
- assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
-
# problem during loading: failure early enough in between swh contents...
# some contents (contents, directories, etc...) have been written in storage
# {visit: partial, status: eventful, no snapshot}
# problem during loading: failure late enough we can have snapshots (some
# revisions are written in storage already)
# {visit: partial, status: eventful, snapshot}
# "normal" cases (for the same origin) #
requests_mock_datadir_missing_one = requests_mock_datadir_factory(
ignore_urls=[
"https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa
]
)
# some missing release artifacts:
# {visit partial, status: eventful, 1 snapshot}
def test_pypi_revision_metadata_structure(
swh_storage, requests_mock_datadir, _0805nexter_api_info
):
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21")
revision = swh_storage.revision_get([expected_revision_id])[0]
assert revision is not None
revision_swhid = CoreSWHID(
object_type=ObjectType.REVISION, object_id=expected_revision_id
)
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="https://pypi.org/",
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.pypi.loader.PyPILoader", version=__version__,
),
discovery_date=loader.visit_date,
format="pypi-project-json",
metadata=json.dumps(
json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0]
).encode(),
origin=url,
revision=revision_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
directory_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
def test_pypi_visit_with_missing_artifact(
swh_storage, requests_mock_datadir_missing_one
):
"""Load a pypi project with some missing artifacts ends up with 1 snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
- stats = get_stats(swh_storage)
+ assert_last_visit_matches(
+ swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id,
+ )
- assert {
- "content": 3,
- "directory": 2,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 1,
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
+ expected_snapshot = Snapshot(
+ id=hash_to_bytes(expected_snapshot_id),
+ branches={
+ b"releases/1.2.0": SnapshotBranch(
+ target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
+ target_type=TargetType.REVISION,
+ ),
+ b"HEAD": SnapshotBranch(
+ target=b"releases/1.2.0", target_type=TargetType.ALIAS,
+ ),
+ },
+ )
+ check_snapshot(expected_snapshot, storage=swh_storage)
expected_contents = map(
hash_to_bytes,
[
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
- expected_snapshot = Snapshot(
- id=hash_to_bytes(expected_snapshot_id),
- branches={
- b"releases/1.2.0": SnapshotBranch(
- target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
- target_type=TargetType.REVISION,
- ),
- b"HEAD": SnapshotBranch(
- target=b"releases/1.2.0", target_type=TargetType.ALIAS,
- ),
- },
- )
- check_snapshot(expected_snapshot, storage=swh_storage)
+ stats = get_stats(swh_storage)
- assert_last_visit_matches(
- swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id,
- )
+ assert {
+ "content": 3,
+ "directory": 2,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 1,
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir):
"""With no prior visit, load a pypi project ends up with 1 snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
+ assert_last_visit_matches(
+ swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
+ )
+
+ expected_snapshot = Snapshot(
+ id=expected_snapshot_id,
+ branches={
+ b"releases/1.1.0": SnapshotBranch(
+ target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
+ target_type=TargetType.REVISION,
+ ),
+ b"releases/1.2.0": SnapshotBranch(
+ target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
+ target_type=TargetType.REVISION,
+ ),
+ b"HEAD": SnapshotBranch(
+ target=b"releases/1.2.0", target_type=TargetType.ALIAS,
+ ),
+ },
+ )
+ check_snapshot(expected_snapshot, swh_storage)
+
stats = get_stats(swh_storage)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == stats
expected_contents = map(
hash_to_bytes,
[
"a61e24cdfdab3bb7817f6be85d37a3e666b34566",
"938c33483285fd8ad57f15497f538320df82aeb8",
"a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"05219ba38bc542d4345d5638af1ed56c7d43ca7d",
"cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
"05219ba38bc542d4345d5638af1ed56c7d43ca7d"
), # noqa
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
- expected_snapshot = Snapshot(
- id=expected_snapshot_id,
- branches={
- b"releases/1.1.0": SnapshotBranch(
- target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
- target_type=TargetType.REVISION,
- ),
- b"releases/1.2.0": SnapshotBranch(
- target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
- target_type=TargetType.REVISION,
- ),
- b"HEAD": SnapshotBranch(
- target=b"releases/1.2.0", target_type=TargetType.ALIAS,
- ),
- },
- )
- check_snapshot(expected_snapshot, swh_storage)
-
- assert_last_visit_matches(
- swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
- )
-
def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir):
"""Multiple visits with no changes results in 1 same snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=snapshot_id
)
- stats = get_stats(swh_storage)
-
- assert {
- "content": 6,
- "directory": 4,
- "origin": 1,
- "origin_visit": 1,
- "release": 0,
- "revision": 2,
- "skipped_content": 0,
- "snapshot": 1,
- } == stats
-
expected_snapshot = Snapshot(
id=snapshot_id,
branches={
b"releases/1.1.0": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.2.0": SnapshotBranch(
target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
target_type=TargetType.REVISION,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.2.0", target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
+ stats = get_stats(swh_storage)
+
+ assert {
+ "content": 6,
+ "directory": 4,
+ "origin": 1,
+ "origin_visit": 1,
+ "release": 0,
+ "revision": 2,
+ "skipped_content": 0,
+ "snapshot": 1,
+ } == stats
+
actual_load_status2 = loader.load()
assert actual_load_status2 == {
"status": "uneventful",
"snapshot_id": actual_load_status2["snapshot_id"],
}
visit_status2 = assert_last_visit_matches(
swh_storage, url, status="full", type="pypi"
)
stats2 = get_stats(swh_storage)
expected_stats2 = stats.copy()
expected_stats2["origin_visit"] = 1 + 1
assert expected_stats2 == stats2
# same snapshot
assert visit_status2.snapshot == snapshot_id
def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits):
"""With prior visit, 2nd load will result with a different snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
visit1_actual_load_status = loader.load()
visit1_stats = get_stats(swh_storage)
expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert visit1_actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == visit1_stats
# Reset internal state
del loader._cached__raw_info
del loader._cached_info
visit2_actual_load_status = loader.load()
visit2_stats = get_stats(swh_storage)
assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status
expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283")
assert visit2_actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id2.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2
)
+ expected_snapshot = Snapshot(
+ id=expected_snapshot_id2,
+ branches={
+ b"releases/1.1.0": SnapshotBranch(
+ target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
+ target_type=TargetType.REVISION,
+ ),
+ b"releases/1.2.0": SnapshotBranch(
+ target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
+ target_type=TargetType.REVISION,
+ ),
+ b"releases/1.3.0": SnapshotBranch(
+ target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"),
+ target_type=TargetType.REVISION,
+ ),
+ b"HEAD": SnapshotBranch(
+ target=b"releases/1.3.0", target_type=TargetType.ALIAS,
+ ),
+ },
+ )
+
+ assert_last_visit_matches(
+ swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
+ )
+
+ check_snapshot(expected_snapshot, swh_storage)
+
assert {
"content": 6 + 1, # 1 more content
"directory": 4 + 2, # 2 more directories
"origin": 1,
"origin_visit": 1 + 1,
"release": 0,
"revision": 2 + 1, # 1 more revision
"skipped_content": 0,
"snapshot": 1 + 1, # 1 more snapshot
} == visit2_stats
expected_contents = map(
hash_to_bytes,
[
"a61e24cdfdab3bb7817f6be85d37a3e666b34566",
"938c33483285fd8ad57f15497f538320df82aeb8",
"a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
"92689fa2b7fb4d4fc6fb195bf73a50c87c030639",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"05219ba38bc542d4345d5638af1ed56c7d43ca7d",
"cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
"e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a",
"52604d46843b898f5a43208045d09fcf8731631b",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
"05219ba38bc542d4345d5638af1ed56c7d43ca7d"
), # noqa
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes(
"e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
- expected_snapshot = Snapshot(
- id=expected_snapshot_id2,
- branches={
- b"releases/1.1.0": SnapshotBranch(
- target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
- target_type=TargetType.REVISION,
- ),
- b"releases/1.2.0": SnapshotBranch(
- target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
- target_type=TargetType.REVISION,
- ),
- b"releases/1.3.0": SnapshotBranch(
- target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"),
- target_type=TargetType.REVISION,
- ),
- b"HEAD": SnapshotBranch(
- target=b"releases/1.3.0", target_type=TargetType.ALIAS,
- ),
- },
- )
-
- check_snapshot(expected_snapshot, swh_storage)
-
- assert_last_visit_matches(
- swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
- )
-
urls = [
m.url
for m in requests_mock_datadir_visits.request_history
if m.url.startswith("https://files.pythonhosted.org")
]
# visited each artifact once across 2 visits
assert len(urls) == len(set(urls))
# release artifact, no new artifact
# {visit full, status uneventful, same snapshot as before}
# release artifact, old artifact with different checksums
# {visit full, status full, new snapshot with shared history and some new
# different history}
# release with multiple sdist artifacts per pypi "version"
# snapshot branch output is different
def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir):
"""With no prior visit, load a pypi project ends up with 1 snapshot
"""
url = "https://pypi.org/project/nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
+ assert_last_visit_matches(
+ swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
+ )
+
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch(
target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
- assert_last_visit_matches(
- swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
- )
-
def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
"""Skip artifact with no intrinsic metadata during ingestion
"""
url = "https://pypi.org/project/upymenu"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(id=expected_snapshot_id, branches={})
- check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
)
+ check_snapshot(expected_snapshot, swh_storage)
+
def test_pypi_origin_not_found(swh_storage, requests_mock_datadir):
url = "https://pypi.org/project/unknown"
loader = PyPILoader(swh_storage, url)
assert loader.load() == {"status": "failed"}
assert_last_visit_matches(
swh_storage, url, status="not_found", type="pypi", snapshot=None
)
def test_pypi_build_revision_missing_version_in_pkg_info(swh_storage, tmp_path):
"""Simulate revision build when Version field is missing in PKG-INFO file."""
url = "https://pypi.org/project/GermlineFilter"
# create package info
p_info = PyPIPackageInfo(
url=url,
filename="GermlineFilter-1.2.tar.gz",
directory_extrinsic_metadata=[],
raw_info={},
comment_text="",
sha256="e4982353c544d94b34f02c5690ab3d3ebc93480d5b62fe6f3317f23c515acc05",
upload_time="2015-02-18T20:39:13",
)
# create PKG-INFO file with missing Version field
package_path = tmp_path / "GermlineFilter-1.2"
pkg_info_path = package_path / "PKG-INFO"
package_path.mkdir()
pkg_info_path.write_text(
"""Metadata-Version: 1.2
Name: germline_filter
Home-page:
Author: Cristian Caloian (OICR)
Author-email: cristian.caloian@oicr.on.ca
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN"""
)
directory = hash_to_bytes("8b864d66f356afe35033d58f8e03b7c23a66751f")
# attempt to build revision
loader = PyPILoader(swh_storage, url)
revision = loader.build_revision(p_info, str(tmp_path), directory)
# without comment_text and version in PKG-INFO, message should be empty
assert revision.message == b""
def test_filter_out_invalid_sdists(swh_storage, requests_mock):
project_name = "swh-test-sdist-filtering"
version = "1.0.0"
url = f"https://pypi.org/project/{project_name}"
json_url = f"https://pypi.org/pypi/{project_name}/json"
common_sdist_entries = {
"url": "",
"comment_text": "",
"digests": {"sha256": ""},
"upload_time": "",
"packagetype": "sdist",
}
requests_mock.get(
json_url,
json={
"releases": {
version: [
{
**common_sdist_entries,
"filename": f"{project_name}-{version}.{ext}",
}
for ext in ("tar.gz", "deb", "egg", "rpm", "whl")
]
},
},
)
loader = PyPILoader(swh_storage, url)
packages = list(loader.get_package_info(version=version))
assert len(packages) == 1
assert packages[0][1].filename.endswith(".tar.gz")
diff --git a/swh/loader/tests/__init__.py b/swh/loader/tests/__init__.py
index a9601ff..fc574fc 100644
--- a/swh/loader/tests/__init__.py
+++ b/swh/loader/tests/__init__.py
@@ -1,261 +1,263 @@
# Copyright (C) 2018-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from collections import defaultdict
import os
from pathlib import PosixPath
import subprocess
from typing import Dict, Iterable, List, Optional, Tuple, Union
from swh.model.hashutil import hash_to_bytes
from swh.model.model import OriginVisitStatus, Snapshot, TargetType
from swh.storage.algos.origin import origin_get_latest_visit_status
from swh.storage.algos.snapshot import snapshot_get_all_branches
from swh.storage.interface import StorageInterface
def assert_last_visit_matches(
storage,
url: str,
status: str,
type: Optional[str] = None,
snapshot: Optional[bytes] = None,
) -> OriginVisitStatus:
"""This retrieves the last visit and visit_status which are expected to exist.
This also checks that the {visit|visit_status} have their respective properties
correctly set.
This returns the last visit_status for that given origin.
Args:
url: Origin url
status: Check that the visit status has the given status
type: Check that the returned visit has the given type
snapshot: Check that the visit status points to the given snapshot
Raises:
AssertionError in case visit or visit status is not found, or any of the type,
status and snapshot mismatch
Returns:
the visit status for further check during the remaining part of the test.
"""
visit_status = origin_get_latest_visit_status(storage, url)
assert visit_status is not None, f"Origin {url} has no visits"
if type:
assert (
visit_status.type == type
), f"Visit has type {visit_status.type} instead of {type}"
assert (
visit_status.status == status
), f"Visit_status has status {visit_status.status} instead of {status}"
if snapshot is not None:
assert visit_status.snapshot is not None
assert visit_status.snapshot == snapshot, (
f"Visit_status points to snapshot {visit_status.snapshot.hex()} "
f"instead of {snapshot.hex()}"
)
return visit_status
def prepare_repository_from_archive(
archive_path: str,
filename: Optional[str] = None,
tmp_path: Union[PosixPath, str] = "/tmp",
) -> str:
"""Given an existing archive_path, uncompress it.
Returns a file repo url which can be used as origin url.
This does not deal with the case where the archive passed along does not exist.
"""
if not isinstance(tmp_path, str):
tmp_path = str(tmp_path)
# uncompress folder/repositories/dump for the loader to ingest
subprocess.check_output(["tar", "xf", archive_path, "-C", tmp_path])
# build the origin url (or some derivative form)
_fname = filename if filename else os.path.basename(archive_path)
repo_url = f"file://{tmp_path}/{_fname}"
return repo_url
def encode_target(target: Dict) -> Dict:
"""Test helper to ease readability in test
"""
if not target:
return target
target_type = target["target_type"]
target_data = target["target"]
if target_type == "alias" and isinstance(target_data, str):
encoded_target = target_data.encode("utf-8")
elif isinstance(target_data, str):
encoded_target = hash_to_bytes(target_data)
else:
encoded_target = target_data
return {"target": encoded_target, "target_type": target_type}
class InconsistentAliasBranchError(AssertionError):
"""When an alias branch targets an inexistent branch."""
pass
class InexistentObjectsError(AssertionError):
"""When a targeted branch reference does not exist in the storage"""
pass
def check_snapshot(
- snapshot: Snapshot,
+ expected_snapshot: Snapshot,
storage: StorageInterface,
allowed_empty: Iterable[Tuple[TargetType, bytes]] = [],
) -> Snapshot:
"""Check that:
- snapshot exists in the storage and match
- each object reference up to the revision/release targets exists
Args:
- snapshot: full snapshot to check for existence and consistency
+ expected_snapshot: full snapshot to check for existence and consistency
storage: storage to lookup information into
allowed_empty: Iterable of branch we allow to be empty (some edge case loaders
allows this case to happen, nixguix for example allows the branch evaluation"
to target the nixpkgs git commit reference, which may not yet be resolvable at
loading time)
Returns:
the snapshot stored in the storage for further test assertion if any is
needed.
"""
- if not isinstance(snapshot, Snapshot):
- raise AssertionError(f"variable 'snapshot' must be a snapshot: {snapshot!r}")
+ if not isinstance(expected_snapshot, Snapshot):
+ raise AssertionError(
+ f"argument 'expected_snapshot' must be a snapshot: {expected_snapshot!r}"
+ )
- expected_snapshot = snapshot_get_all_branches(storage, snapshot.id)
- if expected_snapshot is None:
- raise AssertionError(f"Snapshot {snapshot.id.hex()} is not found")
+ snapshot = snapshot_get_all_branches(storage, expected_snapshot.id)
+ if snapshot is None:
+ raise AssertionError(f"Snapshot {expected_snapshot.id.hex()} is not found")
assert snapshot == expected_snapshot
objects_by_target_type = defaultdict(list)
object_to_branch = {}
for branch, target in expected_snapshot.branches.items():
if (target.target_type, branch) in allowed_empty:
# safe for those elements to not be checked for existence
continue
objects_by_target_type[target.target_type].append(target.target)
object_to_branch[target.target] = branch
# check that alias references target something that exists, otherwise raise
aliases: List[bytes] = objects_by_target_type.get(TargetType.ALIAS, [])
for alias in aliases:
if alias not in expected_snapshot.branches:
raise InconsistentAliasBranchError(
f"Alias branch {alias.decode('utf-8')} "
f"should be in {list(expected_snapshot.branches)}"
)
revs = objects_by_target_type.get(TargetType.REVISION)
if revs:
revisions = storage.revision_get(revs)
not_found = [rev_id for rev_id, rev in zip(revs, revisions) if rev is None]
if not_found:
missing_objs = ", ".join(
str((object_to_branch[rev], rev.hex())) for rev in not_found
)
raise InexistentObjectsError(
f"Branch/Revision(s) {missing_objs} should exist in storage"
)
# retrieve information from revision
for revision in revisions:
assert revision is not None
objects_by_target_type[TargetType.DIRECTORY].append(revision.directory)
object_to_branch[revision.directory] = revision.id
rels = objects_by_target_type.get(TargetType.RELEASE)
if rels:
not_found = list(storage.release_missing(rels))
if not_found:
missing_objs = ", ".join(
str((object_to_branch[rel], rel.hex())) for rel in not_found
)
raise InexistentObjectsError(
f"Branch/Release(s) {missing_objs} should exist in storage"
)
# first level dirs exist?
dirs = objects_by_target_type.get(TargetType.DIRECTORY)
if dirs:
not_found = list(storage.directory_missing(dirs))
if not_found:
missing_objs = ", ".join(
str((object_to_branch[dir_].hex(), dir_.hex())) for dir_ in not_found
)
raise InexistentObjectsError(
f"Missing directories {missing_objs}: "
"(revision exists, directory target does not)"
)
for dir_ in dirs: # retrieve new objects to check for existence
paths = storage.directory_ls(dir_, recursive=True)
for path in paths:
if path["type"] == "dir":
target_type = TargetType.DIRECTORY
else:
target_type = TargetType.CONTENT
target = path["target"]
objects_by_target_type[target_type].append(target)
object_to_branch[target] = dir_
# check nested directories
dirs = objects_by_target_type.get(TargetType.DIRECTORY)
if dirs:
not_found = list(storage.directory_missing(dirs))
if not_found:
missing_objs = ", ".join(
str((object_to_branch[dir_].hex(), dir_.hex())) for dir_ in not_found
)
raise InexistentObjectsError(
f"Missing directories {missing_objs}: "
"(revision exists, directory target does not)"
)
# check contents directories
cnts = objects_by_target_type.get(TargetType.CONTENT)
if cnts:
not_found = list(storage.content_missing_per_sha1_git(cnts))
if not_found:
missing_objs = ", ".join(
str((object_to_branch[cnt].hex(), cnt.hex())) for cnt in not_found
)
raise InexistentObjectsError(f"Missing contents {missing_objs}")
return snapshot
def get_stats(storage) -> Dict:
"""Adaptation utils to unify the stats counters across storage
implementation.
"""
storage.refresh_stat_counters()
stats = storage.stat_counters()
keys = [
"content",
"directory",
"origin",
"origin_visit",
"release",
"revision",
"skipped_content",
"snapshot",
]
return {k: stats.get(k) for k in keys}
diff --git a/swh/loader/tests/test_init.py b/swh/loader/tests/test_init.py
index 78226b4..8f660e7 100644
--- a/swh/loader/tests/test_init.py
+++ b/swh/loader/tests/test_init.py
@@ -1,516 +1,518 @@
# Copyright (C) 2020-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import os
import subprocess
import attr
import pytest
from swh.loader.tests import (
InconsistentAliasBranchError,
InexistentObjectsError,
assert_last_visit_matches,
check_snapshot,
encode_target,
prepare_repository_from_archive,
)
from swh.model.from_disk import DentryPerms
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
Content,
Directory,
DirectoryEntry,
ObjectType,
OriginVisit,
OriginVisitStatus,
Person,
Release,
Revision,
RevisionType,
Snapshot,
SnapshotBranch,
TargetType,
Timestamp,
TimestampWithTimezone,
)
hash_hex = "43e45d56f88993aae6a0198013efa80716fd8920"
ORIGIN_VISIT = OriginVisit(
origin="some-url",
visit=1,
date=datetime.datetime.now(tz=datetime.timezone.utc),
type="archive",
)
ORIGIN_VISIT_STATUS = OriginVisitStatus(
origin="some-url",
visit=1,
type="archive",
date=datetime.datetime.now(tz=datetime.timezone.utc),
status="full",
snapshot=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"),
metadata=None,
)
CONTENT = Content(
data=b"42\n",
length=3,
sha1=hash_to_bytes("34973274ccef6ab4dfaaf86599792fa9c3fe4689"),
sha1_git=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"),
sha256=hash_to_bytes(
"673650f936cb3b0a2f93ce09d81be10748b1b203c19e8176b4eefc1964a0cf3a"
),
blake2s256=hash_to_bytes(
"d5fe1939576527e42cfd76a9455a2432fe7f56669564577dd93c4280e76d661d"
),
status="visible",
)
DIRECTORY = Directory(
id=hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"),
entries=tuple(
[
DirectoryEntry(
name=b"foo",
type="file",
target=CONTENT.sha1_git,
perms=DentryPerms.content,
)
]
),
)
REVISION = Revision(
id=hash_to_bytes("066b1b62dbfa033362092af468bf6cfabec230e7"),
message=b"hello",
author=Person(
name=b"Nicolas Dandrimont",
email=b"nicolas@example.com",
fullname=b"Nicolas Dandrimont <nicolas@example.com> ",
),
date=TimestampWithTimezone(
timestamp=Timestamp(seconds=1234567890, microseconds=0),
offset=120,
negative_utc=False,
),
committer=Person(
name=b"St\xc3fano Zacchiroli",
email=b"stefano@example.com",
fullname=b"St\xc3fano Zacchiroli <stefano@example.com>",
),
committer_date=TimestampWithTimezone(
timestamp=Timestamp(seconds=1123456789, microseconds=0),
offset=0,
negative_utc=True,
),
parents=(),
type=RevisionType.GIT,
directory=DIRECTORY.id,
metadata={
"checksums": {"sha1": "tarball-sha1", "sha256": "tarball-sha256",},
"signed-off-by": "some-dude",
},
extra_headers=(
(b"gpgsig", b"test123"),
(b"mergetag", b"foo\\bar"),
(b"mergetag", b"\x22\xaf\x89\x80\x01\x00"),
),
synthetic=True,
)
RELEASE = Release(
id=hash_to_bytes("3e9050196aa288264f2a9d279d6abab8b158448b"),
name=b"v0.0.2",
author=Person(
name=b"tony", email=b"tony@ardumont.fr", fullname=b"tony <tony@ardumont.fr>",
),
date=TimestampWithTimezone(
timestamp=Timestamp(seconds=1634336813, microseconds=0),
offset=0,
negative_utc=False,
),
target=REVISION.id,
target_type=ObjectType.REVISION,
message=b"yet another synthetic release",
synthetic=True,
)
SNAPSHOT = Snapshot(
id=hash_to_bytes("2498dbf535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"release/0.1.0": SnapshotBranch(
target=RELEASE.id, target_type=TargetType.RELEASE,
),
b"HEAD": SnapshotBranch(target=REVISION.id, target_type=TargetType.REVISION,),
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
b"evaluation": SnapshotBranch( # branch dedicated to not exist in storage
target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
target_type=TargetType.REVISION,
),
},
)
@pytest.fixture
def swh_storage_backend_config(swh_storage_postgresql):
return {
"cls": "postgresql",
"db": swh_storage_postgresql.dsn,
"objstorage": {"cls": "memory"},
}
@pytest.fixture
def mock_storage(mocker):
mock_storage = mocker.patch("swh.loader.tests.origin_get_latest_visit_status")
mock_storage.return_value = ORIGIN_VISIT_STATUS
return mock_storage
def test_assert_last_visit_matches_raise(mock_storage, mocker):
"""Not finding origin visit_and_statu should raise
"""
# overwrite so we raise because we do not find the right visit
mock_storage.return_value = None
with pytest.raises(AssertionError, match="Origin url has no visits"):
assert_last_visit_matches(mock_storage, "url", status="full")
assert mock_storage.called is True
def test_assert_last_visit_matches_wrong_status(mock_storage, mocker):
"""Wrong visit detected should raise AssertionError
"""
expected_status = "partial"
assert ORIGIN_VISIT_STATUS.status != expected_status
with pytest.raises(AssertionError, match="Visit_status has status"):
assert_last_visit_matches(mock_storage, "url", status=expected_status)
assert mock_storage.called is True
def test_assert_last_visit_matches_wrong_type(mock_storage, mocker):
"""Wrong visit detected should raise AssertionError
"""
expected_type = "git"
assert ORIGIN_VISIT.type != expected_type
with pytest.raises(AssertionError, match="Visit has type"):
assert_last_visit_matches(
mock_storage,
"url",
status=ORIGIN_VISIT_STATUS.status,
type=expected_type, # mismatched type will raise
)
assert mock_storage.called is True
def test_assert_last_visit_matches_wrong_snapshot(mock_storage, mocker):
"""Wrong visit detected should raise AssertionError
"""
expected_snapshot_id = hash_to_bytes("e92cc0710eb6cf9efd5b920a8453e1e07157b6cd")
assert ORIGIN_VISIT_STATUS.snapshot != expected_snapshot_id
with pytest.raises(AssertionError, match="Visit_status points to snapshot"):
assert_last_visit_matches(
mock_storage,
"url",
status=ORIGIN_VISIT_STATUS.status,
snapshot=expected_snapshot_id, # mismatched snapshot will raise
)
assert mock_storage.called is True
def test_assert_last_visit_matches(mock_storage, mocker):
"""Correct visit detected should return the visit_status
"""
visit_type = ORIGIN_VISIT.type
visit_status = ORIGIN_VISIT_STATUS.status
visit_snapshot = ORIGIN_VISIT_STATUS.snapshot
actual_visit_status = assert_last_visit_matches(
mock_storage,
"url",
type=visit_type,
status=visit_status,
snapshot=visit_snapshot,
)
assert actual_visit_status == ORIGIN_VISIT_STATUS
assert mock_storage.called is True
def test_prepare_repository_from_archive_failure():
# does not deal with inexistent archive so raise
assert os.path.exists("unknown-archive") is False
with pytest.raises(subprocess.CalledProcessError, match="exit status 2"):
prepare_repository_from_archive("unknown-archive")
def test_prepare_repository_from_archive(datadir, tmp_path):
archive_name = "0805nexter-1.1.0"
archive_path = os.path.join(str(datadir), f"{archive_name}.tar.gz")
assert os.path.exists(archive_path) is True
tmp_path = str(tmp_path) # deals with path string
repo_url = prepare_repository_from_archive(
archive_path, filename=archive_name, tmp_path=tmp_path
)
expected_uncompressed_archive_path = os.path.join(tmp_path, archive_name)
assert repo_url == f"file://{expected_uncompressed_archive_path}"
assert os.path.exists(expected_uncompressed_archive_path)
def test_prepare_repository_from_archive_no_filename(datadir, tmp_path):
archive_name = "0805nexter-1.1.0"
archive_path = os.path.join(str(datadir), f"{archive_name}.tar.gz")
assert os.path.exists(archive_path) is True
# deals with path as posix path (for tmp_path)
repo_url = prepare_repository_from_archive(archive_path, tmp_path=tmp_path)
tmp_path = str(tmp_path)
expected_uncompressed_archive_path = os.path.join(tmp_path, archive_name)
expected_repo_url = os.path.join(tmp_path, f"{archive_name}.tar.gz")
assert repo_url == f"file://{expected_repo_url}"
# passing along the filename does not influence the on-disk extraction
# just the repo-url computation
assert os.path.exists(expected_uncompressed_archive_path)
def test_encode_target():
assert encode_target(None) is None
for target_alias in ["something", b"something"]:
target = {
"target_type": "alias",
"target": target_alias,
}
actual_alias_encode_target = encode_target(target)
assert actual_alias_encode_target == {
"target_type": "alias",
"target": b"something",
}
for hash_ in [hash_hex, hash_to_bytes(hash_hex)]:
target = {"target_type": "revision", "target": hash_}
actual_encode_target = encode_target(target)
assert actual_encode_target == {
"target_type": "revision",
"target": hash_to_bytes(hash_hex),
}
def test_check_snapshot(swh_storage):
"""Everything should be fine when snapshot is found and the snapshot reference up to the
revision exist in the storage.
"""
# Create a consistent snapshot arborescence tree in storage
found = False
for entry in DIRECTORY.entries:
if entry.target == CONTENT.sha1_git:
found = True
break
assert found is True
assert REVISION.directory == DIRECTORY.id
assert RELEASE.target == REVISION.id
for branch, target in SNAPSHOT.branches.items():
if branch == b"alias":
assert target.target in SNAPSHOT.branches
elif branch == b"evaluation":
# this one does not exist and we are safelisting its check below
continue
else:
assert target.target in [REVISION.id, RELEASE.id]
swh_storage.content_add([CONTENT])
swh_storage.directory_add([DIRECTORY])
swh_storage.revision_add([REVISION])
swh_storage.release_add([RELEASE])
s = swh_storage.snapshot_add([SNAPSHOT])
assert s == {
"snapshot:add": 1,
}
# all should be fine!
check_snapshot(
SNAPSHOT, swh_storage, allowed_empty=[(TargetType.REVISION, b"evaluation")]
)
def test_check_snapshot_failures(swh_storage):
"""Failure scenarios:
0. snapshot parameter is not a snapshot
1. snapshot id is correct but branches mismatched
2. snapshot id is not correct, it's not found in the storage
3. snapshot reference an alias which does not exist
4. snapshot is found in storage, targeted revision does not exist
5. snapshot is found in storage, targeted revision exists but the directory the
revision targets does not exist
6. snapshot is found in storage, target revision exists, targeted directory by the
revision exist. Content targeted by the directory does not exist.
7. snapshot is found in storage, targeted release does not exist
"""
snap_id_hex = "2498dbf535f882bc7f9a18fb16c9ad27fda7bab7"
snapshot = Snapshot(
id=hash_to_bytes(snap_id_hex),
branches={
b"master": SnapshotBranch(
target=hash_to_bytes(hash_hex), target_type=TargetType.REVISION,
),
},
)
s = swh_storage.snapshot_add([snapshot])
assert s == {
"snapshot:add": 1,
}
unexpected_snapshot = Snapshot(
branches={
b"tip": SnapshotBranch( # wrong branch
target=hash_to_bytes(hash_hex), target_type=TargetType.RELEASE
)
},
)
# 0. not a Snapshot object, raise!
- with pytest.raises(AssertionError, match="variable 'snapshot' must be a snapshot"):
+ with pytest.raises(
+ AssertionError, match="argument 'expected_snapshot' must be a snapshot"
+ ):
check_snapshot(ORIGIN_VISIT, swh_storage)
# 1. snapshot id is correct but branches mismatched
with pytest.raises(AssertionError): # sadly debian build raises only assertion
check_snapshot(attr.evolve(unexpected_snapshot, id=snapshot.id), swh_storage)
# 2. snapshot id is not correct, it's not found in the storage
wrong_snap_id = hash_to_bytes("999666f535f882bc7f9a18fb16c9ad27fda7bab7")
with pytest.raises(AssertionError, match="is not found"):
check_snapshot(attr.evolve(unexpected_snapshot, id=wrong_snap_id), swh_storage)
# 3. snapshot references an inexistent alias
snapshot0 = Snapshot(
id=hash_to_bytes("123666f535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
},
)
swh_storage.snapshot_add([snapshot0])
with pytest.raises(InconsistentAliasBranchError, match="Alias branch HEAD"):
check_snapshot(snapshot0, swh_storage)
# 4. snapshot is found in storage, targeted revision does not exist
rev_not_found = list(swh_storage.revision_missing([REVISION.id]))
assert len(rev_not_found) == 1
snapshot1 = Snapshot(
id=hash_to_bytes("456666f535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
b"HEAD": SnapshotBranch(
target=REVISION.id, target_type=TargetType.REVISION,
),
},
)
swh_storage.snapshot_add([snapshot1])
with pytest.raises(InexistentObjectsError, match="Branch/Revision"):
check_snapshot(snapshot1, swh_storage)
# 5. snapshot is found in storage, targeted revision exists but the directory the
# revision targets does not exist
swh_storage.revision_add([REVISION])
dir_not_found = list(swh_storage.directory_missing([REVISION.directory]))
assert len(dir_not_found) == 1
snapshot2 = Snapshot(
id=hash_to_bytes("987123f535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
b"HEAD": SnapshotBranch(
target=REVISION.id, target_type=TargetType.REVISION,
),
},
)
swh_storage.snapshot_add([snapshot2])
with pytest.raises(InexistentObjectsError, match="Missing directories"):
check_snapshot(snapshot2, swh_storage)
assert DIRECTORY.id == REVISION.directory
swh_storage.directory_add([DIRECTORY])
# 6. snapshot is found in storage, target revision exists, targeted directory by the
# revision exist. Content targeted by the directory does not exist.
assert DIRECTORY.entries[0].target == CONTENT.sha1_git
not_found = list(swh_storage.content_missing_per_sha1_git([CONTENT.sha1_git]))
assert len(not_found) == 1
swh_storage.directory_add([DIRECTORY])
snapshot3 = Snapshot(
id=hash_to_bytes("091456f535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
b"HEAD": SnapshotBranch(
target=REVISION.id, target_type=TargetType.REVISION,
),
},
)
swh_storage.snapshot_add([snapshot3])
with pytest.raises(InexistentObjectsError, match="Missing content(s)"):
check_snapshot(snapshot3, swh_storage)
# 7. snapshot is found in storage, targeted release does not exist
# release targets the revisions which exists
assert RELEASE.target == REVISION.id
snapshot4 = Snapshot(
id=hash_to_bytes("789666f535f882bc7f9a18fb16c9ad27fda7bab7"),
branches={
b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,),
b"HEAD": SnapshotBranch(
target=REVISION.id, target_type=TargetType.REVISION,
),
b"release/0.1.0": SnapshotBranch(
target=RELEASE.id, target_type=TargetType.RELEASE,
),
},
)
swh_storage.snapshot_add([snapshot4])
with pytest.raises(InexistentObjectsError, match="Branch/Release"):
check_snapshot(snapshot4, swh_storage)

File Metadata

Mime Type
text/x-diff
Expires
Fri, Jul 4, 11:28 AM (3 w, 2 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3236126

Event Timeline