Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/PKG-INFO b/PKG-INFO
index 59d0c1b..c3c81ca 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,32 +1,32 @@
Metadata-Version: 2.1
Name: swh.loader.core
-Version: 0.19.0
+Version: 0.20.0
Summary: Software Heritage Base Loader
Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE
Author: Software Heritage developers
Author-email: swh-devel@inria.fr
License: UNKNOWN
Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest
Project-URL: Funding, https://www.softwareheritage.org/donate
Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core
Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/
Description: SWH-loader-core
===============
The Software Heritage Core Loader is a low-level loading utilities and
helpers used by other loaders.
The main entry points are classes:
- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn)
- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...)
- :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...)
Platform: UNKNOWN
Classifier: Programming Language :: Python :: 3
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Classifier: Operating System :: OS Independent
Classifier: Development Status :: 5 - Production/Stable
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Provides-Extra: testing
diff --git a/requirements-swh.txt b/requirements-swh.txt
index 01bb27c..3294103 100644
--- a/requirements-swh.txt
+++ b/requirements-swh.txt
@@ -1,5 +1,5 @@
swh.core >= 0.3
-swh.model >= 0.7.2
+swh.model >= 1.0.0
swh.objstorage >= 0.2.2
swh.scheduler >= 0.4.0
swh.storage >= 0.13.1
diff --git a/swh.loader.core.egg-info/PKG-INFO b/swh.loader.core.egg-info/PKG-INFO
index 59d0c1b..c3c81ca 100644
--- a/swh.loader.core.egg-info/PKG-INFO
+++ b/swh.loader.core.egg-info/PKG-INFO
@@ -1,32 +1,32 @@
Metadata-Version: 2.1
Name: swh.loader.core
-Version: 0.19.0
+Version: 0.20.0
Summary: Software Heritage Base Loader
Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE
Author: Software Heritage developers
Author-email: swh-devel@inria.fr
License: UNKNOWN
Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest
Project-URL: Funding, https://www.softwareheritage.org/donate
Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core
Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/
Description: SWH-loader-core
===============
The Software Heritage Core Loader is a low-level loading utilities and
helpers used by other loaders.
The main entry points are classes:
- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn)
- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...)
- :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...)
Platform: UNKNOWN
Classifier: Programming Language :: Python :: 3
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Classifier: Operating System :: OS Independent
Classifier: Development Status :: 5 - Production/Stable
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Provides-Extra: testing
diff --git a/swh.loader.core.egg-info/requires.txt b/swh.loader.core.egg-info/requires.txt
index a164afa..fa39d22 100644
--- a/swh.loader.core.egg-info/requires.txt
+++ b/swh.loader.core.egg-info/requires.txt
@@ -1,19 +1,19 @@
psutil
requests
iso8601
pkginfo
python-debian
python-dateutil
swh.core>=0.3
-swh.model>=0.7.2
+swh.model>=1.0.0
swh.objstorage>=0.2.2
swh.scheduler>=0.4.0
swh.storage>=0.13.1
[testing]
pytest
pytest-mock
requests_mock
swh-core[testing]
swh-scheduler[testing]>=0.5.0
swh-storage[testing]>=0.10.6
diff --git a/swh/loader/core/loader.py b/swh/loader/core/loader.py
index 4f6e746..ba503de 100644
--- a/swh/loader/core/loader.py
+++ b/swh/loader/core/loader.py
@@ -1,465 +1,463 @@
# Copyright (C) 2015-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import hashlib
import logging
import os
from typing import Any, Dict, Iterable, Optional
from swh.core.config import load_from_envvar
from swh.loader.exception import NotFound
from swh.model.model import (
BaseContent,
Content,
Directory,
Origin,
OriginVisit,
OriginVisitStatus,
Release,
Revision,
Sha1Git,
SkippedContent,
Snapshot,
)
from swh.storage import get_storage
from swh.storage.interface import StorageInterface
from swh.storage.utils import now
DEFAULT_CONFIG: Dict[str, Any] = {
"max_content_size": 100 * 1024 * 1024,
}
class BaseLoader:
"""Base class for (D)VCS loaders (e.g Svn, Git, Mercurial, ...) or PackageLoader (e.g
PyPI, Npm, CRAN, ...)
A loader retrieves origin information (git/mercurial/svn repositories, pypi/npm/...
package artifacts), ingests the contents/directories/revisions/releases/snapshot
read from those artifacts and send them to the archive through the storage backend.
The main entry point for the loader is the :func:`load` function.
2 static methods (:func:`from_config`, :func:`from_configfile`) centralizes and
eases the loader instantiation from either configuration dict or configuration file.
Some class examples:
- :class:`SvnLoader`
- :class:`GitLoader`
- :class:`PyPILoader`
- :class:`NpmLoader`
"""
def __init__(
self,
storage: StorageInterface,
logging_class: Optional[str] = None,
save_data_path: Optional[str] = None,
max_content_size: Optional[int] = None,
):
super().__init__()
self.storage = storage
self.max_content_size = int(max_content_size) if max_content_size else None
if logging_class is None:
logging_class = "%s.%s" % (
self.__class__.__module__,
self.__class__.__name__,
)
self.log = logging.getLogger(logging_class)
_log = logging.getLogger("requests.packages.urllib3.connectionpool")
_log.setLevel(logging.WARN)
# possibly overridden in self.prepare method
self.visit_date: Optional[datetime.datetime] = None
self.origin: Optional[Origin] = None
if not hasattr(self, "visit_type"):
self.visit_type: Optional[str] = None
self.origin_metadata: Dict[str, Any] = {}
self.loaded_snapshot_id: Optional[Sha1Git] = None
if save_data_path:
path = save_data_path
os.stat(path)
if not os.access(path, os.R_OK | os.W_OK):
raise PermissionError("Permission denied: %r" % path)
self.save_data_path = save_data_path
@classmethod
def from_config(cls, storage: Dict[str, Any], **config: Any):
"""Instantiate a loader from a configuration dict.
This is basically a backwards-compatibility shim for the CLI.
Args:
storage: instantiation config for the storage
config: the configuration dict for the loader, with the following keys:
- credentials (optional): credentials list for the scheduler
- any other kwargs passed to the loader.
Returns:
the instantiated loader
"""
# Drop the legacy config keys which aren't used for this generation of loader.
for legacy_key in ("storage", "celery"):
config.pop(legacy_key, None)
# Instantiate the storage
storage_instance = get_storage(**storage)
return cls(storage=storage_instance, **config)
@classmethod
def from_configfile(cls, **kwargs: Any):
"""Instantiate a loader from the configuration loaded from the
SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their
value is not None.
Args:
kwargs: kwargs passed to the loader instantiation
"""
config = dict(load_from_envvar(DEFAULT_CONFIG))
config.update({k: v for k, v in kwargs.items() if v is not None})
return cls.from_config(**config)
def save_data(self) -> None:
"""Save the data associated to the current load"""
raise NotImplementedError
def get_save_data_path(self) -> str:
"""The path to which we archive the loader's raw data"""
if not hasattr(self, "__save_data_path"):
year = str(self.visit_date.year) # type: ignore
assert self.origin
url = self.origin.url.encode("utf-8")
origin_url_hash = hashlib.sha1(url).hexdigest()
path = "%s/sha1:%s/%s/%s" % (
self.save_data_path,
origin_url_hash[0:2],
origin_url_hash,
year,
)
os.makedirs(path, exist_ok=True)
self.__save_data_path = path
return self.__save_data_path
def flush(self) -> None:
"""Flush any potential buffered data not sent to swh-storage.
"""
self.storage.flush()
def cleanup(self) -> None:
"""Last step executed by the loader.
"""
raise NotImplementedError
def prepare_origin_visit(self) -> None:
"""First step executed by the loader to prepare origin and visit
references. Set/update self.origin, and
optionally self.origin_url, self.visit_date.
"""
raise NotImplementedError
def _store_origin_visit(self) -> None:
"""Store origin and visit references. Sets the self.visit references.
"""
assert self.origin
self.storage.origin_add([self.origin])
if not self.visit_date: # now as default visit_date if not provided
self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc)
assert isinstance(self.visit_date, datetime.datetime)
assert isinstance(self.visit_type, str)
self.visit = list(
self.storage.origin_visit_add(
[
OriginVisit(
origin=self.origin.url,
date=self.visit_date,
type=self.visit_type,
)
]
)
)[0]
def prepare(self) -> None:
"""Second step executed by the loader to prepare some state needed by
the loader.
Raises
NotFound exception if the origin to ingest is not found.
"""
raise NotImplementedError
def get_origin(self) -> Origin:
"""Get the origin that is currently being loaded.
self.origin should be set in :func:`prepare_origin`
Returns:
dict: an origin ready to be sent to storage by
:func:`origin_add`.
"""
assert self.origin
return self.origin
def fetch_data(self) -> bool:
"""Fetch the data from the source the loader is currently loading
(ex: git/hg/svn/... repository).
Returns:
a value that is interpreted as a boolean. If True, fetch_data needs
to be called again to complete loading.
"""
raise NotImplementedError
def store_data(self):
"""Store fetched data in the database.
Should call the :func:`maybe_load_xyz` methods, which handle the
bundles sent to storage, rather than send directly.
"""
raise NotImplementedError
def store_metadata(self) -> None:
"""Store fetched metadata in the database.
For more information, see implementation in :class:`DepositLoader`.
"""
pass
def load_status(self) -> Dict[str, str]:
"""Detailed loading status.
Defaults to logging an eventful load.
Returns: a dictionary that is eventually passed back as the task's
result to the scheduler, allowing tuning of the task recurrence
mechanism.
"""
return {
"status": "eventful",
}
def post_load(self, success: bool = True) -> None:
"""Permit the loader to do some additional actions according to status
after the loading is done. The flag success indicates the
loading's status.
Defaults to doing nothing.
This is up to the implementer of this method to make sure this
does not break.
Args:
success (bool): the success status of the loading
"""
pass
def visit_status(self) -> str:
"""Detailed visit status.
Defaults to logging a full visit.
"""
return "full"
def pre_cleanup(self) -> None:
"""As a first step, will try and check for dangling data to cleanup.
This should do its best to avoid raising issues.
"""
pass
def load(self) -> Dict[str, str]:
r"""Loading logic for the loader to follow:
- 1. Call :meth:`prepare_origin_visit` to prepare the
origin and visit we will associate loading data to
- 2. Store the actual ``origin_visit`` to storage
- 3. Call :meth:`prepare` to prepare any eventual state
- 4. Call :meth:`get_origin` to get the origin we work with and store
- while True:
- 5. Call :meth:`fetch_data` to fetch the data to store
- 6. Call :meth:`store_data` to store the data
- 7. Call :meth:`cleanup` to clean up any eventual state put in place
in :meth:`prepare` method.
"""
try:
self.pre_cleanup()
except Exception:
msg = "Cleaning up dangling data failed! Continue loading."
self.log.warning(msg)
self.prepare_origin_visit()
self._store_origin_visit()
assert (
self.origin
), "The method `prepare_origin_visit` call should set the origin (Origin)"
assert (
self.visit.visit
), "The method `_store_origin_visit` should set the visit (OriginVisit)"
self.log.info(
"Load origin '%s' with type '%s'", self.origin.url, self.visit.type
)
try:
self.prepare()
while True:
more_data_to_fetch = self.fetch_data()
self.store_data()
if not more_data_to_fetch:
break
self.store_metadata()
visit_status = OriginVisitStatus(
origin=self.origin.url,
visit=self.visit.visit,
type=self.visit_type,
date=now(),
status=self.visit_status(),
snapshot=self.loaded_snapshot_id,
)
self.storage.origin_visit_status_add([visit_status])
self.post_load()
except Exception as e:
if isinstance(e, NotFound):
status = "not_found"
task_status = "uneventful"
else:
status = "partial" if self.loaded_snapshot_id else "failed"
task_status = "failed"
self.log.exception(
"Loading failure, updating to `%s` status",
status,
extra={
"swh_task_args": [],
- "swh_task_kwargs": {
- "origin": self.origin.url
- },
+ "swh_task_kwargs": {"origin": self.origin.url},
},
)
visit_status = OriginVisitStatus(
origin=self.origin.url,
visit=self.visit.visit,
type=self.visit_type,
date=now(),
status=status,
snapshot=self.loaded_snapshot_id,
)
self.storage.origin_visit_status_add([visit_status])
self.post_load(success=False)
return {"status": task_status}
finally:
self.flush()
self.cleanup()
return self.load_status()
class DVCSLoader(BaseLoader):
"""This base class is a pattern for dvcs loaders (e.g. git, mercurial).
Those loaders are able to load all the data in one go. For example, the
loader defined in swh-loader-git :class:`BulkUpdater`.
For other loaders (stateful one, (e.g :class:`SWHSvnLoader`),
inherit directly from :class:`BaseLoader`.
"""
def cleanup(self) -> None:
"""Clean up an eventual state installed for computations."""
pass
def has_contents(self) -> bool:
"""Checks whether we need to load contents"""
return True
def get_contents(self) -> Iterable[BaseContent]:
"""Get the contents that need to be loaded"""
raise NotImplementedError
def has_directories(self) -> bool:
"""Checks whether we need to load directories"""
return True
def get_directories(self) -> Iterable[Directory]:
"""Get the directories that need to be loaded"""
raise NotImplementedError
def has_revisions(self) -> bool:
"""Checks whether we need to load revisions"""
return True
def get_revisions(self) -> Iterable[Revision]:
"""Get the revisions that need to be loaded"""
raise NotImplementedError
def has_releases(self) -> bool:
"""Checks whether we need to load releases"""
return True
def get_releases(self) -> Iterable[Release]:
"""Get the releases that need to be loaded"""
raise NotImplementedError
def get_snapshot(self) -> Snapshot:
"""Get the snapshot that needs to be loaded"""
raise NotImplementedError
def eventful(self) -> bool:
"""Whether the load was eventful"""
raise NotImplementedError
def store_data(self) -> None:
assert self.origin
if self.save_data_path:
self.save_data()
if self.has_contents():
for obj in self.get_contents():
if isinstance(obj, Content):
self.storage.content_add([obj])
elif isinstance(obj, SkippedContent):
self.storage.skipped_content_add([obj])
else:
raise TypeError(f"Unexpected content type: {obj}")
if self.has_directories():
for directory in self.get_directories():
self.storage.directory_add([directory])
if self.has_revisions():
for revision in self.get_revisions():
self.storage.revision_add([revision])
if self.has_releases():
for release in self.get_releases():
self.storage.release_add([release])
snapshot = self.get_snapshot()
self.storage.snapshot_add([snapshot])
self.flush()
self.loaded_snapshot_id = snapshot.id
diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py
index e6a05ab..94f59e4 100644
--- a/swh/loader/package/deposit/tests/test_deposit.py
+++ b/swh/loader/package/deposit/tests/test_deposit.py
@@ -1,491 +1,499 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import re
from typing import List
import attr
import pytest
from swh.core.pytest_plugin import requests_mock_datadir_factory
from swh.loader.package.deposit.loader import ApiClient, DepositLoader
from swh.loader.package.loader import now
from swh.loader.package.tests.common import check_metadata_paths
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes, hash_to_hex
-from swh.model.identifiers import SWHID
+from swh.model.identifiers import (
+ CoreSWHID,
+ ExtendedObjectType,
+ ExtendedSWHID,
+ ObjectType,
+)
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
+ Origin,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
DEPOSIT_URL = "https://deposit.softwareheritage.org/1/private"
@pytest.fixture
def requests_mock_datadir(requests_mock_datadir):
"""Enhance default mock data to mock put requests as the loader does some
internal update queries there.
"""
requests_mock_datadir.put(re.compile("https"))
return requests_mock_datadir
def test_deposit_init_ok(swh_storage, deposit_client, swh_loader_config):
url = "some-url"
deposit_id = 999
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client
) # Something that does not exist
assert loader.url == url
assert loader.client is not None
assert loader.client.base_url == swh_loader_config["deposit"]["url"]
def test_deposit_from_configfile(swh_config):
"""Ensure the deposit instantiation is ok
"""
loader = DepositLoader.from_configfile(url="some-url", deposit_id="666")
assert isinstance(loader.client, ApiClient)
def test_deposit_loading_unknown_deposit(
swh_storage, deposit_client, requests_mock_datadir
):
"""Loading an unknown deposit should fail
no origin, no visit, no snapshot
"""
# private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
url = "some-url"
unknown_deposit_id = 667
loader = DepositLoader(
swh_storage, url, unknown_deposit_id, deposit_client
) # does not exist
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
stats = get_stats(loader.storage)
assert {
"content": 0,
"directory": 0,
"origin": 0,
"origin_visit": 0,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 0,
} == stats
requests_mock_datadir_missing_one = requests_mock_datadir_factory(
ignore_urls=[f"{DEPOSIT_URL}/666/raw/",]
)
def test_deposit_loading_failure_to_retrieve_1_artifact(
swh_storage, deposit_client, requests_mock_datadir_missing_one
):
"""Deposit with missing artifact ends up with an uneventful/partial visit
"""
# private api url form: 'https://deposit.s.o/1/private/hal/666/raw/'
url = "some-url-2"
deposit_id = 666
requests_mock_datadir_missing_one.put(re.compile("https"))
loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(loader.storage, url, status="partial", type="deposit")
stats = get_stats(loader.storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir_missing_one.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "failed",
}
assert body == expected_body
def test_deposit_revision_metadata_structure(
swh_storage, deposit_client, requests_mock_datadir
):
url = "https://hal-test.archives-ouvertes.fr/some-external-id"
deposit_id = 666
loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("637318680351f5d78856d13264faebbd91efe9bb")
revision = loader.storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
revision.metadata,
paths=[
("extrinsic.provider", str),
("extrinsic.when", str),
("extrinsic.raw", dict),
("original_artifact", list),
],
)
# Only 2 top-level keys now
assert set(revision.metadata.keys()) == {"extrinsic", "original_artifact"}
for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
)
def test_deposit_loading_ok(swh_storage, deposit_client, requests_mock_datadir):
url = "https://hal-test.archives-ouvertes.fr/some-external-id"
deposit_id = 666
loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
actual_load_status = loader.load()
expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
stats = get_stats(loader.storage)
assert {
"content": 303,
"directory": 12,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
revision_id_hex = "637318680351f5d78856d13264faebbd91efe9bb"
revision_id = hash_to_bytes(revision_id_hex)
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"HEAD": SnapshotBranch(
target=revision_id, target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=loader.storage)
revision = loader.storage.revision_get([revision_id])[0]
assert revision is not None
# check metadata
fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
authority = MetadataAuthority(
type=MetadataAuthorityType.DEPOSIT_CLIENT,
url="https://hal-test.archives-ouvertes.fr/",
)
# Check origin metadata
orig_meta = loader.storage.raw_extrinsic_metadata_get(
- MetadataTargetType.ORIGIN, url, authority
+ Origin(url).swhid(), authority
)
assert orig_meta.next_page_token is None
raw_meta = loader.client.metadata_get(deposit_id)
all_metadata_raw: List[str] = raw_meta["metadata_raw"]
# 2 raw metadata xml + 1 json dict
assert len(orig_meta.results) == len(all_metadata_raw) + 1
orig_meta0 = orig_meta.results[0]
assert orig_meta0.authority == authority
assert orig_meta0.fetcher == fetcher
# Check directory metadata
- directory_id = hash_to_hex(revision.directory)
- directory_swhid = SWHID(object_type="directory", object_id=directory_id)
+ directory_swhid = CoreSWHID(
+ object_type=ObjectType.DIRECTORY, object_id=revision.directory
+ )
actual_dir_meta = loader.storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, directory_swhid, authority
+ directory_swhid, authority
)
assert actual_dir_meta.next_page_token is None
assert len(actual_dir_meta.results) == len(all_metadata_raw)
for dir_meta in actual_dir_meta.results:
assert dir_meta.authority == authority
assert dir_meta.fetcher == fetcher
assert dir_meta.metadata.decode() in all_metadata_raw
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "done",
"revision_id": revision_id_hex,
"directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
assert body == expected_body
def test_deposit_loading_ok_2(swh_storage, deposit_client, requests_mock_datadir):
"""Field dates should be se appropriately
"""
external_id = "some-external-id"
url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
deposit_id = 777
loader = DepositLoader(swh_storage, url, deposit_id, deposit_client)
actual_load_status = loader.load()
expected_snapshot_id = "3e68440fdd7c81d283f8f3aebb6f0c8657864192"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
revision_id = "564d18943d71be80d0d73b43a77cfb205bcde96c"
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"HEAD": SnapshotBranch(
target=hash_to_bytes(revision_id), target_type=TargetType.REVISION
)
},
)
check_snapshot(expected_snapshot, storage=loader.storage)
raw_meta = loader.client.metadata_get(deposit_id)
# Ensure the date fields are set appropriately in the revision
# Retrieve the revision
revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0]
assert revision
assert revision.date.to_dict() == raw_meta["deposit"]["author_date"]
assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"]
read_api = f"{DEPOSIT_URL}/{deposit_id}/meta/"
provider = {
"provider_name": "hal",
"provider_type": "deposit_client",
"provider_url": "https://hal-test.archives-ouvertes.fr/",
"metadata": None,
}
tool = {
"name": "swh-deposit",
"version": "0.0.1",
"configuration": {"sword_version": "2"},
}
assert revision.metadata == {
"extrinsic": {
"provider": read_api,
"raw": {
"origin": {"type": "deposit", "url": url,},
"origin_metadata": {
"metadata": raw_meta["metadata_dict"],
"provider": provider,
"tool": tool,
},
},
"when": revision.metadata["extrinsic"]["when"], # dynamic
},
"original_artifact": [
{
"checksums": {
"sha1": "f8c63d7c890a7453498e6cf9fef215d85ec6801d",
"sha256": "474bf646aeeff6d945eb752b1a9f8a40f3d81a88909ee7bd2d08cc822aa361e6", # noqa
},
"filename": "archive.zip",
"length": 956830,
"url": "https://deposit.softwareheritage.org/1/private/777/raw/",
}
],
}
fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",)
authority = MetadataAuthority(
type=MetadataAuthorityType.DEPOSIT_CLIENT,
url="https://hal-test.archives-ouvertes.fr/",
)
# Check the origin metadata swh side
origin_extrinsic_metadata = loader.storage.raw_extrinsic_metadata_get(
- MetadataTargetType.ORIGIN, url, authority
+ Origin(url).swhid(), authority
)
assert origin_extrinsic_metadata.next_page_token is None
all_metadata_raw: List[str] = raw_meta["metadata_raw"]
# 1 raw metadata xml + 1 json dict
assert len(origin_extrinsic_metadata.results) == len(all_metadata_raw) + 1
+ origin_swhid = Origin(url).swhid()
+
expected_metadata = []
for idx, raw_meta in enumerate(all_metadata_raw):
origin_meta = origin_extrinsic_metadata.results[idx]
expected_metadata.append(
RawExtrinsicMetadata(
- type=MetadataTargetType.ORIGIN,
- target=url,
+ target=origin_swhid,
discovery_date=origin_meta.discovery_date,
metadata=raw_meta.encode(),
format="sword-v2-atom-codemeta-v2",
authority=authority,
fetcher=fetcher,
)
)
origin_metadata = {
"metadata": all_metadata_raw,
"provider": provider,
"tool": tool,
}
expected_metadata.append(
RawExtrinsicMetadata(
- type=MetadataTargetType.ORIGIN,
- target=url,
+ target=origin_swhid,
discovery_date=origin_extrinsic_metadata.results[-1].discovery_date,
metadata=json.dumps(origin_metadata).encode(),
format="original-artifacts-json",
authority=authority,
fetcher=fetcher,
)
)
assert len(origin_extrinsic_metadata.results) == len(expected_metadata)
for orig_meta in origin_extrinsic_metadata.results:
assert orig_meta in expected_metadata
# Check the revision metadata swh side
- directory_id = hash_to_hex(revision.directory)
- directory_swhid = SWHID(object_type="directory", object_id=directory_id)
+ directory_swhid = ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
+ )
actual_directory_metadata = loader.storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, directory_swhid, authority
+ directory_swhid, authority
)
assert actual_directory_metadata.next_page_token is None
assert len(actual_directory_metadata.results) == len(all_metadata_raw)
- revision_swhid = SWHID(object_type="revision", object_id=revision_id)
+ revision_swhid = CoreSWHID(
+ object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id)
+ )
dir_metadata_template = RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=directory_swhid,
format="sword-v2-atom-codemeta-v2",
authority=authority,
fetcher=fetcher,
origin=url,
revision=revision_swhid,
# to satisfy the constructor
discovery_date=now(),
metadata=b"",
)
expected_directory_metadata = []
for idx, raw_meta in enumerate(all_metadata_raw):
dir_metadata = actual_directory_metadata.results[idx]
expected_directory_metadata.append(
attr.evolve(
dir_metadata_template,
discovery_date=dir_metadata.discovery_date,
metadata=raw_meta.encode(),
)
)
assert actual_directory_metadata.results == expected_directory_metadata
# Retrieve the information for deposit status update query to the deposit
urls = [
m
for m in requests_mock_datadir.request_history
if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/"
]
assert len(urls) == 1
update_query = urls[0]
body = update_query.json()
expected_body = {
"status": "done",
"revision_id": revision_id,
"directory_id": hash_to_hex(revision.directory),
"snapshot_id": expected_snapshot_id,
"origin_url": url,
}
assert body == expected_body
def test_deposit_loading_ok_3(swh_storage, deposit_client, requests_mock_datadir):
"""Deposit loading can happen on tarball artifacts as well
The latest deposit changes introduce the internal change.
"""
external_id = "hal-123456"
url = f"https://hal-test.archives-ouvertes.fr/{external_id}"
deposit_id = 888
loader = DepositLoader(
swh_storage, url, deposit_id, deposit_client, default_filename="archive.tar"
)
actual_load_status = loader.load()
expected_snapshot_id = "0ac7b54c042a026389f2087dc16f1d5c644ed0e4"
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id,
}
assert_last_visit_matches(loader.storage, url, status="full", type="deposit")
diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py
index 6825b30..3638527 100644
--- a/swh/loader/package/loader.py
+++ b/swh/loader/package/loader.py
@@ -1,834 +1,840 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from itertools import islice
import json
import logging
import os
import sys
import tempfile
from typing import (
Any,
Dict,
Generic,
Iterable,
Iterator,
List,
Mapping,
Optional,
Sequence,
Tuple,
TypeVar,
)
import attr
import sentry_sdk
from swh.core.tarball import uncompress
from swh.loader.core.loader import BaseLoader
from swh.loader.exception import NotFound
from swh.loader.package.utils import download
from swh.model import from_disk
from swh.model.collections import ImmutableDict
from swh.model.hashutil import hash_to_hex
-from swh.model.identifiers import SWHID
+from swh.model.identifiers import (
+ CoreSWHID,
+ ExtendedObjectType,
+ ExtendedSWHID,
+ ObjectType,
+)
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
Origin,
OriginVisit,
OriginVisitStatus,
RawExtrinsicMetadata,
Revision,
Sha1Git,
Snapshot,
TargetType,
)
from swh.storage.algos.snapshot import snapshot_get_latest
from swh.storage.interface import StorageInterface
from swh.storage.utils import now
logger = logging.getLogger(__name__)
SWH_METADATA_AUTHORITY = MetadataAuthority(
type=MetadataAuthorityType.REGISTRY,
url="https://softwareheritage.org/",
metadata={},
)
"""Metadata authority for extrinsic metadata generated by Software Heritage.
Used for metadata on "original artifacts", ie. length, filename, and checksums
of downloaded archive files."""
@attr.s
class RawExtrinsicMetadataCore:
"""Contains the core of the metadata extracted by a loader, that will be
used to build a full RawExtrinsicMetadata object by adding object identifier,
context, and provenance information."""
format = attr.ib(type=str)
metadata = attr.ib(type=bytes)
discovery_date = attr.ib(type=Optional[datetime.datetime], default=None)
"""Defaults to the visit date."""
@attr.s
class BasePackageInfo:
"""Compute the primary key for a dict using the id_keys as primary key
composite.
Args:
d: A dict entry to compute the primary key on
id_keys: Sequence of keys to use as primary key
Returns:
The identity for that dict entry
"""
url = attr.ib(type=str)
filename = attr.ib(type=Optional[str])
# The following attribute has kw_only=True in order to allow subclasses
# to add attributes. Without kw_only, attributes without default values cannot
# go after attributes with default values.
# See <https://github.com/python-attrs/attrs/issues/38>
directory_extrinsic_metadata = attr.ib(
type=List[RawExtrinsicMetadataCore], default=[], kw_only=True,
)
# TODO: add support for metadata for directories and contents
@property
def ID_KEYS(self):
raise NotImplementedError(f"{self.__class__.__name__} is missing ID_KEYS")
def artifact_identity(self):
return [getattr(self, k) for k in self.ID_KEYS]
TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo)
class PackageLoader(BaseLoader, Generic[TPackageInfo]):
# Origin visit type (str) set by the loader
visit_type = ""
visit_date: datetime.datetime
def __init__(
self,
storage: StorageInterface,
url: str,
max_content_size: Optional[int] = None,
):
"""Loader's constructor. This raises exception if the minimal required
configuration is missing (cf. fn:`check` method).
Args:
storage: Storage instance
url: Origin url to load data from
"""
super().__init__(storage=storage, max_content_size=max_content_size)
self.url = url
self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc)
def get_versions(self) -> Sequence[str]:
"""Return the list of all published package versions.
Raises:
`class:swh.loader.exception.NotFound` error when failing to read the
published package versions.
Returns:
Sequence of published versions
"""
return []
def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]:
"""Given a release version of a package, retrieve the associated
package information for such version.
Args:
version: Package version
Returns:
(branch name, package metadata)
"""
yield from {}
def build_revision(
self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Revision]:
"""Build the revision from the archive metadata (extrinsic
artifact metadata) and the intrinsic metadata.
Args:
p_info: Package information
uncompressed_path: Artifact uncompressed path on disk
Returns:
Revision object
"""
raise NotImplementedError("build_revision")
def get_default_version(self) -> str:
"""Retrieve the latest release version if any.
Returns:
Latest version
"""
return ""
def last_snapshot(self) -> Optional[Snapshot]:
"""Retrieve the last snapshot out of the last visit.
"""
return snapshot_get_latest(self.storage, self.url)
def known_artifacts(
self, snapshot: Optional[Snapshot]
) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]:
"""Retrieve the known releases/artifact for the origin.
Args
snapshot: snapshot for the visit
Returns:
Dict of keys revision id (bytes), values a metadata Dict.
"""
if not snapshot:
return {}
# retrieve only revisions (e.g the alias we do not want here)
revs = [
rev.target
for rev in snapshot.branches.values()
if rev and rev.target_type == TargetType.REVISION
]
known_revisions = self.storage.revision_get(revs)
return {
revision.id: revision.metadata for revision in known_revisions if revision
}
def resolve_revision_from(
self, known_artifacts: Dict, p_info: TPackageInfo,
) -> Optional[bytes]:
"""Resolve the revision from a snapshot and an artifact metadata dict.
If the artifact has already been downloaded, this will return the
existing revision targeting that uncompressed artifact directory.
Otherwise, this returns None.
Args:
snapshot: Snapshot
p_info: Package information
Returns:
None or revision identifier
"""
return None
def download_package(
self, p_info: TPackageInfo, tmpdir: str
) -> List[Tuple[str, Mapping]]:
"""Download artifacts for a specific package. All downloads happen in
in the tmpdir folder.
Default implementation expects the artifacts package info to be
about one artifact per package.
Note that most implementation have 1 artifact per package. But some
implementation have multiple artifacts per package (debian), some have
none, the package is the artifact (gnu).
Args:
artifacts_package_info: Information on the package artifacts to
download (url, filename, etc...)
tmpdir: Location to retrieve such artifacts
Returns:
List of (path, computed hashes)
"""
return [download(p_info.url, dest=tmpdir, filename=p_info.filename)]
def uncompress(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str
) -> str:
"""Uncompress the artifact(s) in the destination folder dest.
Optionally, this could need to use the p_info dict for some more
information (debian).
"""
uncompressed_path = os.path.join(dest, "src")
for a_path, _ in dl_artifacts:
uncompress(a_path, dest=uncompressed_path)
return uncompressed_path
def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]:
"""Return an extra dict of branches that are used to update the set of
branches.
"""
return {}
def finalize_visit(
self,
*,
snapshot: Optional[Snapshot],
visit: OriginVisit,
status_visit: str,
status_load: str,
failed_branches: List[str],
) -> Dict[str, Any]:
"""Finalize the visit:
- flush eventual unflushed data to storage
- update origin visit's status
- return the task's status
"""
self.storage.flush()
snapshot_id: Optional[bytes] = None
if snapshot and snapshot.id: # to prevent the snapshot.id to b""
snapshot_id = snapshot.id
assert visit.visit
visit_status = OriginVisitStatus(
origin=self.url,
visit=visit.visit,
type=self.visit_type,
date=now(),
status=status_visit,
snapshot=snapshot_id,
)
self.storage.origin_visit_status_add([visit_status])
result: Dict[str, Any] = {
"status": status_load,
}
if snapshot_id:
result["snapshot_id"] = hash_to_hex(snapshot_id)
if failed_branches:
logger.warning("%d failed branches", len(failed_branches))
for i, urls in enumerate(islice(failed_branches, 50)):
prefix_url = "Failed branches: " if i == 0 else ""
logger.warning("%s%s", prefix_url, urls)
return result
def load(self) -> Dict:
"""Load for a specific origin the associated contents.
for each package version of the origin
1. Fetch the files for one package version By default, this can be
implemented as a simple HTTP request. Loaders with more specific
requirements can override this, e.g.: the PyPI loader checks the
integrity of the downloaded files; the Debian loader has to download
and check several files for one package version.
2. Extract the downloaded files By default, this would be a universal
archive/tarball extraction.
Loaders for specific formats can override this method (for instance,
the Debian loader uses dpkg-source -x).
3. Convert the extracted directory to a set of Software Heritage
objects Using swh.model.from_disk.
4. Extract the metadata from the unpacked directories This would only
be applicable for "smart" loaders like npm (parsing the
package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
debian/changelog and debian/control).
On "minimal-metadata" sources such as the GNU archive, the lister
should provide the minimal set of metadata needed to populate the
revision/release objects (authors, dates) as an argument to the
task.
5. Generate the revision/release objects for the given version. From
the data generated at steps 3 and 4.
end for each
6. Generate and load the snapshot for the visit
Using the revisions/releases collected at step 5., and the branch
information from step 0., generate a snapshot and load it into the
Software Heritage archive
"""
status_load = "uneventful" # either: eventful, uneventful, failed
status_visit = "full" # see swh.model.model.OriginVisitStatus
tmp_revisions = {} # type: Dict[str, List]
snapshot = None
failed_branches: List[str] = []
# Prepare origin and origin_visit
origin = Origin(url=self.url)
try:
self.storage.origin_add([origin])
visit = list(
self.storage.origin_visit_add(
[
OriginVisit(
origin=self.url, date=self.visit_date, type=self.visit_type,
)
]
)
)[0]
except Exception as e:
logger.exception("Failed to initialize origin_visit for %s", self.url)
sentry_sdk.capture_exception(e)
return {"status": "failed"}
try:
last_snapshot = self.last_snapshot()
logger.debug("last snapshot: %s", last_snapshot)
known_artifacts = self.known_artifacts(last_snapshot)
logger.debug("known artifacts: %s", known_artifacts)
except Exception as e:
logger.exception("Failed to get previous state for %s", self.url)
sentry_sdk.capture_exception(e)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
)
load_exceptions: List[Exception] = []
try:
versions = self.get_versions()
except NotFound:
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="not_found",
status_load="failed",
)
except Exception:
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
)
for version in versions:
logger.debug("version: %s", version)
tmp_revisions[version] = []
# `p_` stands for `package_`
for branch_name, p_info in self.get_package_info(version):
logger.debug("package_info: %s", p_info)
revision_id = self.resolve_revision_from(known_artifacts, p_info)
if revision_id is None:
try:
res = self._load_revision(p_info, origin)
if res:
(revision_id, directory_id) = res
assert revision_id
assert directory_id
self._load_extrinsic_directory_metadata(
p_info, revision_id, directory_id
)
self.storage.flush()
status_load = "eventful"
except Exception as e:
self.storage.clear_buffers()
load_exceptions.append(e)
sentry_sdk.capture_exception(e)
logger.exception(
"Failed loading branch %s for %s", branch_name, self.url
)
failed_branches.append(branch_name)
continue
if revision_id is None:
continue
tmp_revisions[version].append((branch_name, revision_id))
if load_exceptions:
status_visit = "partial"
if not tmp_revisions:
# We could not load any revisions; fail completely
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
)
try:
# Retrieve the default release version (the "latest" one)
default_version = self.get_default_version()
logger.debug("default version: %s", default_version)
# Retrieve extra branches
extra_branches = self.extra_branches()
logger.debug("extra branches: %s", extra_branches)
snapshot = self._load_snapshot(
default_version, tmp_revisions, extra_branches
)
self.storage.flush()
except Exception as e:
logger.exception("Failed to build snapshot for origin %s", self.url)
sentry_sdk.capture_exception(e)
status_visit = "failed"
status_load = "failed"
if snapshot:
try:
metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id)
self._load_metadata_objects(metadata_objects)
except Exception as e:
logger.exception(
"Failed to load extrinsic snapshot metadata for %s", self.url
)
sentry_sdk.capture_exception(e)
status_visit = "partial"
status_load = "failed"
try:
metadata_objects = self.build_extrinsic_origin_metadata()
self._load_metadata_objects(metadata_objects)
except Exception as e:
logger.exception(
"Failed to load extrinsic origin metadata for %s", self.url
)
sentry_sdk.capture_exception(e)
status_visit = "partial"
status_load = "failed"
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit=status_visit,
status_load=status_load,
)
def _load_directory(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str
) -> Tuple[str, from_disk.Directory]:
uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir)
logger.debug("uncompressed_path: %s", uncompressed_path)
directory = from_disk.Directory.from_disk(
path=uncompressed_path.encode("utf-8"),
max_content_length=self.max_content_size,
)
contents, skipped_contents, directories = from_disk.iter_directory(directory)
logger.debug("Number of skipped contents: %s", len(skipped_contents))
self.storage.skipped_content_add(skipped_contents)
logger.debug("Number of contents: %s", len(contents))
self.storage.content_add(contents)
logger.debug("Number of directories: %s", len(directories))
self.storage.directory_add(directories)
return (uncompressed_path, directory)
def _load_revision(
self, p_info: TPackageInfo, origin
) -> Optional[Tuple[Sha1Git, Sha1Git]]:
"""Does all the loading of a revision itself:
* downloads a package and uncompresses it
* loads it from disk
* adds contents, directories, and revision to self.storage
* returns (revision_id, directory_id)
Raises
exception when unable to download or uncompress artifacts
"""
with tempfile.TemporaryDirectory() as tmpdir:
dl_artifacts = self.download_package(p_info, tmpdir)
(uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir)
# FIXME: This should be release. cf. D409
revision = self.build_revision(
p_info, uncompressed_path, directory=directory.hash
)
if not revision:
# Some artifacts are missing intrinsic metadata
# skipping those
return None
metadata = [metadata for (filepath, metadata) in dl_artifacts]
extra_metadata: Tuple[str, Any] = (
"original_artifact",
metadata,
)
if revision.metadata is not None:
full_metadata = list(revision.metadata.items()) + [extra_metadata]
else:
full_metadata = [extra_metadata]
# TODO: don't add these extrinsic metadata to the revision.
revision = attr.evolve(revision, metadata=ImmutableDict(full_metadata))
original_artifact_metadata = RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
- target=SWHID(object_type="directory", object_id=revision.directory),
+ target=ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
+ ),
discovery_date=self.visit_date,
authority=SWH_METADATA_AUTHORITY,
fetcher=self.get_metadata_fetcher(),
format="original-artifacts-json",
metadata=json.dumps(metadata).encode(),
origin=self.url,
- revision=SWHID(object_type="revision", object_id=revision.id),
+ revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=revision.id),
)
self._load_metadata_objects([original_artifact_metadata])
logger.debug("Revision: %s", revision)
self.storage.revision_add([revision])
assert directory.hash
return (revision.id, directory.hash)
def _load_snapshot(
self,
default_version: str,
revisions: Dict[str, List[Tuple[str, bytes]]],
extra_branches: Dict[bytes, Mapping[str, Any]],
) -> Optional[Snapshot]:
"""Build snapshot out of the current revisions stored and extra branches.
Then load it in the storage.
"""
logger.debug("revisions: %s", revisions)
# Build and load the snapshot
branches = {} # type: Dict[bytes, Mapping[str, Any]]
for version, branch_name_revisions in revisions.items():
if version == default_version and len(branch_name_revisions) == 1:
# only 1 branch (no ambiguity), we can create an alias
# branch 'HEAD'
branch_name, _ = branch_name_revisions[0]
# except for some corner case (deposit)
if branch_name != "HEAD":
branches[b"HEAD"] = {
"target_type": "alias",
"target": branch_name.encode("utf-8"),
}
for branch_name, target in branch_name_revisions:
branches[branch_name.encode("utf-8")] = {
"target_type": "revision",
"target": target,
}
# Deal with extra-branches
for name, branch_target in extra_branches.items():
if name in branches:
logger.error("Extra branch '%s' has been ignored", name)
else:
branches[name] = branch_target
snapshot_data = {"branches": branches}
logger.debug("snapshot: %s", snapshot_data)
snapshot = Snapshot.from_dict(snapshot_data)
logger.debug("snapshot: %s", snapshot)
self.storage.snapshot_add([snapshot])
return snapshot
def get_loader_name(self) -> str:
"""Returns a fully qualified name of this loader."""
return f"{self.__class__.__module__}.{self.__class__.__name__}"
def get_loader_version(self) -> str:
"""Returns the version of the current loader."""
module_name = self.__class__.__module__ or ""
module_name_parts = module_name.split(".")
# Iterate rootward through the package hierarchy until we find a parent of this
# loader's module with a __version__ attribute.
for prefix_size in range(len(module_name_parts), 0, -1):
package_name = ".".join(module_name_parts[0:prefix_size])
module = sys.modules[package_name]
if hasattr(module, "__version__"):
return module.__version__ # type: ignore
# If this loader's class has no parent package with a __version__,
# it should implement it itself.
raise NotImplementedError(
f"Could not dynamically find the version of {self.get_loader_name()}."
)
def get_metadata_fetcher(self) -> MetadataFetcher:
"""Returns a MetadataFetcher instance representing this package loader;
which is used to for adding provenance information to extracted
extrinsic metadata, if any."""
return MetadataFetcher(
name=self.get_loader_name(), version=self.get_loader_version(), metadata={},
)
def get_metadata_authority(self) -> MetadataAuthority:
"""For package loaders that get extrinsic metadata, returns the authority
the metadata are coming from.
"""
raise NotImplementedError("get_metadata_authority")
def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]:
"""Returns metadata items, used by build_extrinsic_origin_metadata."""
return []
def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_origin_metadata."""
metadata_items = self.get_extrinsic_origin_metadata()
if not metadata_items:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in metadata_items:
metadata_objects.append(
RawExtrinsicMetadata(
- type=MetadataTargetType.ORIGIN,
- target=self.url,
+ target=Origin(self.url).swhid(),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
)
)
return metadata_objects
def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]:
"""Returns metadata items, used by build_extrinsic_snapshot_metadata."""
return []
def build_extrinsic_snapshot_metadata(
self, snapshot_id: Sha1Git
) -> List[RawExtrinsicMetadata]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_snapshot_metadata."""
metadata_items = self.get_extrinsic_snapshot_metadata()
if not metadata_items:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in metadata_items:
metadata_objects.append(
RawExtrinsicMetadata(
- type=MetadataTargetType.SNAPSHOT,
- target=SWHID(object_type="snapshot", object_id=snapshot_id),
+ target=ExtendedSWHID(
+ object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id
+ ),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
origin=self.url,
)
)
return metadata_objects
def build_extrinsic_directory_metadata(
self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git,
) -> List[RawExtrinsicMetadata]:
if not p_info.directory_extrinsic_metadata:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in p_info.directory_extrinsic_metadata:
metadata_objects.append(
RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
- target=SWHID(object_type="directory", object_id=directory_id),
+ target=ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id
+ ),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
origin=self.url,
- revision=SWHID(
- object_type="revision", object_id=hash_to_hex(revision_id)
+ revision=CoreSWHID(
+ object_type=ObjectType.REVISION, object_id=revision_id
),
)
)
return metadata_objects
def _load_extrinsic_directory_metadata(
self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git,
) -> None:
metadata_objects = self.build_extrinsic_directory_metadata(
p_info, revision_id, directory_id
)
self._load_metadata_objects(metadata_objects)
def _load_metadata_objects(
self, metadata_objects: List[RawExtrinsicMetadata]
) -> None:
if not metadata_objects:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return
self._create_authorities(mo.authority for mo in metadata_objects)
self._create_fetchers(mo.fetcher for mo in metadata_objects)
self.storage.raw_extrinsic_metadata_add(metadata_objects)
def _create_authorities(self, authorities: Iterable[MetadataAuthority]) -> None:
deduplicated_authorities = {
(authority.type, authority.url): authority for authority in authorities
}
if authorities:
self.storage.metadata_authority_add(list(deduplicated_authorities.values()))
def _create_fetchers(self, fetchers: Iterable[MetadataFetcher]) -> None:
deduplicated_fetchers = {
(fetcher.name, fetcher.version): fetcher for fetcher in fetchers
}
if fetchers:
self.storage.metadata_fetcher_add(list(deduplicated_fetchers.values()))
diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py
index 240e509..fc171db 100644
--- a/swh/loader/package/nixguix/tests/test_nixguix.py
+++ b/swh/loader/package/nixguix/tests/test_nixguix.py
@@ -1,707 +1,705 @@
# Copyright (C) 2020-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import logging
import os
from typing import Dict, Optional, Tuple
from unittest.mock import patch
import attr
import pytest
from swh.loader.package import __version__
from swh.loader.package.archive.loader import ArchiveLoader
from swh.loader.package.nixguix.loader import (
NixGuixLoader,
NixGuixPackageInfo,
clean_sources,
make_pattern_unsupported_file_extension,
parse_sources,
retrieve_sources,
)
from swh.loader.package.utils import download
from swh.loader.tests import assert_last_visit_matches
from swh.loader.tests import check_snapshot as check_snapshot_full
from swh.loader.tests import get_stats
from swh.model.hashutil import hash_to_bytes, hash_to_hex
-from swh.model.identifiers import SWHID
+from swh.model.identifiers import ExtendedObjectType, ExtendedSWHID
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.storage.algos.origin import origin_get_latest_visit_status
from swh.storage.algos.snapshot import snapshot_get_all_branches
from swh.storage.exc import HashCollision
from swh.storage.interface import PagedResult, StorageInterface
sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json"
@pytest.fixture
def raw_sources(datadir) -> bytes:
with open(
os.path.join(
datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json"
),
"rb",
) as f:
return f.read()
SNAPSHOT1 = Snapshot(
id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"),
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
target_type=TargetType.REVISION,
),
},
)
def check_snapshot(snapshot: Snapshot, storage: StorageInterface):
# The `evaluation` branch is allowed to be unresolvable. It's possible at current
# nixguix visit time, it is not yet visited (the git loader is in charge of its
# visit for now). For more details, check the
# swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring.
check_snapshot_full(
snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")]
)
assert isinstance(snapshot, Snapshot)
# then ensure the snapshot revisions are structurally as expected
revision_ids = []
for name, branch in snapshot.branches.items():
if name == b"evaluation":
continue # skipping that particular branch (cf. previous comment)
if branch.target_type == TargetType.REVISION:
revision_ids.append(branch.target)
revisions = storage.revision_get(revision_ids)
for rev in revisions:
assert rev is not None
metadata = rev.metadata
assert metadata is not None
raw = metadata["extrinsic"]["raw"]
assert "url" in raw
assert "integrity" in raw
def test_retrieve_sources(swh_storage, requests_mock_datadir):
j = parse_sources(retrieve_sources(sources_url))
assert "sources" in j.keys()
assert len(j["sources"]) == 2
def test_nixguix_url_not_found(swh_storage, requests_mock_datadir):
"""When failing to read from the url, the visit is marked as not_found.
Here the sources url does not exist, so requests_mock_datadir returns a 404.
Resulting in a NotFound raised within the package loader's main loop.
This results in the task with status failed and a visit_status with status
"not_found".
"""
unknown_url = "https://non-existing-url/"
loader = NixGuixLoader(swh_storage, unknown_url)
# during the retrieval step
load_status = loader.load()
assert load_status == {"status": "failed"}
assert_last_visit_matches(
swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None
)
assert len(requests_mock_datadir.request_history) == 1
assert requests_mock_datadir.request_history[0].url == unknown_url
def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir):
"""Other errors during communication with the url, the visit is marked as failed
requests_mock_datadir will intercept the requests to sources_url. Since the file
exists, returns a 200 with the requested content of the query. As file.txt is no
json, fails do decode and raises a JSONDecodeError. In effect failing the visit.
"""
sources_url = "https://example.com/file.txt"
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
assert load_status == {"status": "failed"}
assert_last_visit_matches(
swh_storage, sources_url, status="failed", type="nixguix", snapshot=None
)
assert len(requests_mock_datadir.request_history) == 1
assert requests_mock_datadir.request_history[0].url == sources_url
def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir):
sources = {}
with pytest.raises(ValueError, match="sources structure invalid, missing: .*"):
clean_sources(sources)
def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir):
for version_ok in [1, "1"]: # Check those versions are fine
clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"})
for version_ko in [0, "0", 2, "2"]: # Check version != 1 raise an error
with pytest.raises(
ValueError, match="sources structure version .* is not supported"
):
clean_sources(
{"version": version_ko, "sources": [], "revision": "my-revision"}
)
def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir):
valid_sources = [
# 1 valid source
{"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"},
]
sources = {
"version": 1,
"sources": valid_sources
+ [
# integrity is missing
{"type": "url", "urls": ["my-url.tgz"],},
# urls is not a list
{"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"},
# type is not url
{"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"},
# missing fields which got double-checked nonetheless...
{"integrity": "my-integrity"},
],
"revision": "my-revision",
}
clean = clean_sources(sources)
assert len(clean["sources"]) == len(valid_sources)
def test_make_pattern_unsupported_file_extension():
unsupported_extensions = ["el", "c", "txt"]
supported_extensions = ["Z", "7z"] # for test
actual_unsupported_pattern = make_pattern_unsupported_file_extension(
unsupported_extensions
)
for supported_ext in supported_extensions:
assert supported_ext not in unsupported_extensions
supported_filepath = f"anything.{supported_ext}"
actual_match = actual_unsupported_pattern.match(supported_filepath)
assert not actual_match
for unsupported_ext in unsupported_extensions:
unsupported_filepath = f"something.{unsupported_ext}"
actual_match = actual_unsupported_pattern.match(unsupported_filepath)
assert actual_match
def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir):
unsupported_file_extensions = [
"iso",
"whl",
"gem",
"pom",
"msi",
"pod",
"png",
"rock",
"ttf",
"jar",
"c",
"el",
"rpm",
"diff",
"patch",
]
supported_sources = [
{
"type": "url",
"urls": [f"https://server.org/my-url.{ext}"],
"integrity": "my-integrity",
}
for ext in [
"known-unknown-but-ok", # this is fine as well with the current approach
"zip",
"tar.gz",
"tgz",
"tar.bz2",
"tbz",
"tbz2",
"tar.xz",
"tar",
"zip",
"7z",
"Z",
]
]
unsupported_sources = [
{
"type": "url",
"urls": [f"https://server.org/my-url.{ext}"],
"integrity": "my-integrity",
}
for ext in unsupported_file_extensions
]
sources = {
"version": 1,
"sources": supported_sources + unsupported_sources,
"revision": "my-revision",
}
clean = clean_sources(sources, unsupported_file_extensions)
assert len(clean["sources"]) == len(supported_sources)
def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources):
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res["status"] == "eventful"
stats = get_stats(swh_storage)
assert {
"content": 1,
"directory": 3,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
# The visit is partial because urls pointing to non tarball file
# are not handled yet
assert_last_visit_matches(
swh_storage, sources_url, status="partial", type="nixguix"
)
visit_status = origin_get_latest_visit_status(swh_storage, sources_url)
- snapshot_swhid = SWHID(
- object_type="snapshot", object_id=hash_to_hex(visit_status.snapshot)
+ snapshot_swhid = ExtendedSWHID(
+ object_type=ExtendedObjectType.SNAPSHOT, object_id=visit_status.snapshot
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url=sources_url,
)
expected_metadata = [
RawExtrinsicMetadata(
- type=MetadataTargetType.SNAPSHOT,
target=snapshot_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.nixguix.loader.NixGuixLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="nixguix-sources-json",
metadata=raw_sources,
origin=sources_url,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.SNAPSHOT, snapshot_swhid, metadata_authority,
+ snapshot_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
def test_uncompress_failure(swh_storage, requests_mock_datadir):
"""Non tarball files are currently not supported and the uncompress
function fails on such kind of files.
However, even in this case of failure (because of the url
https://example.com/file.txt), a snapshot and a visit has to be
created (with a status partial since all files are not archived).
"""
loader = NixGuixLoader(swh_storage, sources_url)
loader_status = loader.load()
sources = loader.supported_sources()["sources"]
urls = [s["urls"][0] for s in sources]
assert "https://example.com/file.txt" in urls
assert loader_status["status"] == "eventful"
# The visit is partial because urls pointing to non tarball files
# are not handled yet
assert_last_visit_matches(
swh_storage, sources_url, status="partial", type="nixguix"
)
def test_loader_incremental(swh_storage, requests_mock_datadir):
"""Ensure a second visit do not download artifact already
downloaded by the previous visit.
"""
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
loader.load()
assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz")
]
# The artifact
# 'https://github.com/owner-1/repository-1/revision-1.tgz' is only
# visited one time
assert len(urls) == 1
def test_loader_two_visits(swh_storage, requests_mock_datadir_visits):
"""To ensure there is only one origin, but two visits, two revisions
and two snapshots are created.
The first visit creates a snapshot containing one tarball. The
second visit creates a snapshot containing the same tarball and
another tarball.
"""
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 1,
"directory": 3,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
loader = NixGuixLoader(swh_storage, sources_url)
load_status = loader.load()
expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97"
expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex)
assert load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id_hex,
}
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=expected_snapshot_id,
)
# This ensures visits are incremental. Indeed, if we request a
# second time an url, because of the requests_mock_datadir_visits
# fixture, the file has to end with `_visit1`.
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"),
target_type=TargetType.REVISION,
),
b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch(
target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 2,
"directory": 5,
"origin": 1,
"origin_visit": 2,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 2,
} == stats
def test_resolve_revision_from(swh_storage, requests_mock_datadir, datadir):
loader = NixGuixLoader(swh_storage, sources_url)
known_artifacts = {
"id1": {"extrinsic": {"raw": {"url": "url1", "integrity": "integrity1"}}},
"id2": {"extrinsic": {"raw": {"url": "url2", "integrity": "integrity2"}}},
}
p_info = NixGuixPackageInfo.from_metadata(
{"url": "url1", "integrity": "integrity1"}
)
assert loader.resolve_revision_from(known_artifacts, p_info) == "id1"
p_info = NixGuixPackageInfo.from_metadata(
{"url": "url3", "integrity": "integrity3"}
)
assert loader.resolve_revision_from(known_artifacts, p_info) == None # noqa
def test_evaluation_branch(swh_storage, requests_mock_datadir):
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res["status"] == "eventful"
assert_last_visit_matches(
swh_storage,
sources_url,
status="partial",
type="nixguix",
snapshot=SNAPSHOT1.id,
)
check_snapshot(SNAPSHOT1, storage=swh_storage)
def test_eoferror(swh_storage, requests_mock_datadir):
"""Load a truncated archive which is invalid to make the uncompress
function raising the exception EOFError. We then check if a
snapshot is created, meaning this error is well managed.
"""
sources = (
"https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa
)
loader = NixGuixLoader(swh_storage, sources)
loader.load()
expected_snapshot = Snapshot(
id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"),
branches={
b"evaluation": SnapshotBranch(
target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, storage=swh_storage)
def fake_download(
url: str,
dest: str,
hashes: Dict = {},
filename: Optional[str] = None,
auth: Optional[Tuple[str, str]] = None,
) -> Tuple[str, Dict]:
"""Fake download which raises HashCollision (for the sake of test simpliciy,
let's accept that makes sense)
For tests purpose only.
"""
if url == "https://example.com/file.txt":
# instead of failing because it's a file not dealt with by the nix guix
# loader, make it raise a hash collision
raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", [])
return download(url, dest, hashes, filename, auth)
def test_raise_exception(swh_storage, requests_mock_datadir, mocker):
mock_download = mocker.patch("swh.loader.package.loader.download")
mock_download.side_effect = fake_download
loader = NixGuixLoader(swh_storage, sources_url)
res = loader.load()
assert res == {
"status": "eventful",
"snapshot_id": SNAPSHOT1.id.hex(),
}
check_snapshot(SNAPSHOT1, storage=swh_storage)
assert len(mock_download.mock_calls) == 2
# The visit is partial because some artifact downloads failed
assert_last_visit_matches(
swh_storage, sources_url, status="partial", type="nixguix"
)
def test_load_nixguix_one_common_artifact_from_other_loader(
swh_storage, datadir, requests_mock_datadir_visits, caplog
):
"""Misformatted revision should be caught and logged, then loading continues
"""
caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader")
# 1. first ingest with for example the archive loader
gnu_url = "https://ftp.gnu.org/gnu/8sync/"
release = "0.1.0"
artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz"
gnu_artifacts = [
{
"time": 944729610,
"url": artifact_url,
"length": 221837,
"filename": f"8sync-{release}.tar.gz",
"version": release,
}
]
archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts)
actual_load_status = archive_loader.load()
expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5"
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] == expected_snapshot_id # noqa
assert_last_visit_matches(
archive_loader.storage, gnu_url, status="full", type="tar"
)
gnu_snapshot: Snapshot = snapshot_get_all_branches(
archive_loader.storage, hash_to_bytes(expected_snapshot_id)
)
first_revision = gnu_snapshot.branches[f"releases/{release}".encode("utf-8")]
# 2. Then ingest with the nixguix loader which lists the same artifact within its
# sources.json
# ensure test setup is ok
data_sources = os.path.join(
datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json"
)
all_sources = json.loads(open(data_sources).read())
found = False
for source in all_sources["sources"]:
if source["urls"][0] == artifact_url:
found = True
assert (
found is True
), f"test setup error: {artifact_url} must be in {data_sources}"
# first visit with a snapshot, ok
sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json"
loader = NixGuixLoader(swh_storage, sources_url)
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "eventful"
assert_last_visit_matches(swh_storage, sources_url, status="full", type="nixguix")
snapshot_id = actual_load_status2["snapshot_id"]
snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id))
assert snapshot
# simulate a snapshot already seen with a revision with the wrong metadata structure
# This revision should be skipped, thus making the artifact being ingested again.
with patch(
"swh.loader.package.loader.PackageLoader.last_snapshot"
) as last_snapshot:
# mutate the snapshot to target a revision with the wrong metadata structure
# snapshot["branches"][artifact_url.encode("utf-8")] = first_revision
old_revision = swh_storage.revision_get([first_revision.target])[0]
# assert that revision is not in the right format
assert old_revision.metadata["extrinsic"]["raw"].get("integrity", {}) == {}
# mutate snapshot to create a clash
snapshot = attr.evolve(
snapshot,
branches={
**snapshot.branches,
artifact_url.encode("utf-8"): SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes(old_revision.id),
),
},
)
# modify snapshot to actually change revision metadata structure so we simulate
# a revision written by somebody else (structure different)
last_snapshot.return_value = snapshot
loader = NixGuixLoader(swh_storage, sources_url)
actual_load_status3 = loader.load()
assert last_snapshot.called
assert actual_load_status3["status"] == "eventful"
assert_last_visit_matches(
swh_storage, sources_url, status="full", type="nixguix"
)
new_snapshot_id = "32ff641e510aceefc3a6d0dcbf208b2854d2e965"
assert actual_load_status3["snapshot_id"] == new_snapshot_id
last_snapshot = snapshot_get_all_branches(
swh_storage, hash_to_bytes(new_snapshot_id)
)
new_revision_branch = last_snapshot.branches[artifact_url.encode("utf-8")]
assert new_revision_branch.target_type == TargetType.REVISION
new_revision = swh_storage.revision_get([new_revision_branch.target])[0]
# the new revision has the correct structure, so it got ingested alright by the
# new run
assert new_revision.metadata["extrinsic"]["raw"]["integrity"] is not None
nb_detections = 0
actual_detection: Dict
for record in caplog.records:
logtext = record.getMessage()
if "Unexpected metadata revision structure detected:" in logtext:
nb_detections += 1
actual_detection = record.args["context"]
assert actual_detection
# as many calls as there are sources listed in the sources.json
assert nb_detections == len(all_sources["sources"])
assert actual_detection == {
"revision": hash_to_hex(old_revision.id),
"reason": "'integrity'",
"known_artifact": old_revision.metadata,
}
diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py
index baa43a3..dee9e2a 100644
--- a/swh/loader/package/npm/tests/test_npm.py
+++ b/swh/loader/package/npm/tests/test_npm.py
@@ -1,711 +1,718 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import os
import pytest
from swh.loader.package import __version__
from swh.loader.package.npm.loader import (
NpmLoader,
_author_str,
artifact_to_revision_id,
extract_npm_package_author,
)
from swh.loader.package.tests.common import check_metadata_paths
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
-from swh.model.identifiers import SWHID
+from swh.model.identifiers import (
+ CoreSWHID,
+ ExtendedObjectType,
+ ExtendedSWHID,
+ ObjectType,
+)
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
Person,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.storage.interface import PagedResult
@pytest.fixture
def org_api_info(datadir) -> bytes:
with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f:
return f.read()
def test_npm_author_str():
for author, expected_author in [
("author", "author"),
(
["Al from quantum leap", "hal from 2001 space odyssey"],
"Al from quantum leap",
),
([], ""),
({"name": "groot", "email": "groot@galaxy.org",}, "groot <groot@galaxy.org>"),
({"name": "somebody",}, "somebody"),
({"email": "no@one.org"}, " <no@one.org>"), # note first elt is an extra blank
({"name": "no one", "email": None,}, "no one"),
({"email": None,}, ""),
({"name": None}, ""),
({"name": None, "email": None,}, ""),
({}, ""),
(None, None),
({"name": []}, "",),
(
{"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]},
"Susan McSween",
),
(None, None),
]:
assert _author_str(author) == expected_author
def test_npm_extract_npm_package_author(datadir):
package_metadata_filepath = os.path.join(
datadir, "https_replicate.npmjs.com", "org_visit1"
)
with open(package_metadata_filepath) as json_file:
package_metadata = json.load(json_file)
extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person(
fullname=b"mooz <stillpedant@gmail.com>",
name=b"mooz",
email=b"stillpedant@gmail.com",
)
assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person(
fullname=b"Masafumi Oyamada <stillpedant@gmail.com>",
name=b"Masafumi Oyamada",
email=b"stillpedant@gmail.com",
)
package_json = json.loads(
"""
{
"name": "highlightjs-line-numbers.js",
"version": "2.7.0",
"description": "Highlight.js line numbers plugin.",
"main": "src/highlightjs-line-numbers.js",
"dependencies": {},
"devDependencies": {
"gulp": "^4.0.0",
"gulp-rename": "^1.4.0",
"gulp-replace": "^0.6.1",
"gulp-uglify": "^1.2.0"
},
"repository": {
"type": "git",
"url": "https://github.com/wcoder/highlightjs-line-numbers.js.git"
},
"author": "Yauheni Pakala <evgeniy.pakalo@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues"
},
"homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"Yauheni Pakala <evgeniy.pakalo@gmail.com>",
name=b"Yauheni Pakala",
email=b"evgeniy.pakalo@gmail.com",
)
package_json = json.loads(
"""
{
"name": "3-way-diff",
"version": "0.0.1",
"description": "3-way diffing of JavaScript objects",
"main": "index.js",
"authors": [
{
"name": "Shawn Walsh",
"url": "https://github.com/shawnpwalsh"
},
{
"name": "Markham F Rollins IV",
"url": "https://github.com/mrollinsiv"
}
],
"keywords": [
"3-way diff",
"3 way diff",
"three-way diff",
"three way diff"
],
"devDependencies": {
"babel-core": "^6.20.0",
"babel-preset-es2015": "^6.18.0",
"mocha": "^3.0.2"
},
"dependencies": {
"lodash": "^4.15.0"
}
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None
)
package_json = json.loads(
"""
{
"name": "yfe-ynpm",
"version": "1.0.0",
"homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm",
"repository": {
"type": "git",
"url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git"
},
"author": [
"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
"xufuzi <xufuzi@ywwl.com> (https://7993.org)"
],
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
name=b"fengmk2",
email=b"fengmk2@gmail.com",
)
package_json = json.loads(
"""
{
"name": "umi-plugin-whale",
"version": "0.0.8",
"description": "Internal contract component",
"authors": {
"name": "xiaohuoni",
"email": "448627663@qq.com"
},
"repository": "alitajs/whale",
"devDependencies": {
"np": "^3.0.4",
"umi-tools": "*"
},
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json) == Person(
fullname=b"xiaohuoni <448627663@qq.com>",
name=b"xiaohuoni",
email=b"448627663@qq.com",
)
package_json_no_authors = json.loads(
"""{
"authors": null,
"license": "MIT"
}"""
)
assert extract_npm_package_author(package_json_no_authors) == Person(
fullname=b"", name=None, email=None
)
def normalize_hashes(hashes):
if isinstance(hashes, str):
return hash_to_bytes(hashes)
if isinstance(hashes, list):
return [hash_to_bytes(x) for x in hashes]
return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()}
_expected_new_contents_first_visit = normalize_hashes(
[
"4ce3058e16ab3d7e077f65aabf855c34895bf17c",
"858c3ceee84c8311adc808f8cdb30d233ddc9d18",
"0fa33b4f5a4e0496da6843a38ff1af8b61541996",
"85a410f8ef8eb8920f2c384a9555566ad4a2e21b",
"9163ac8025923d5a45aaac482262893955c9b37b",
"692cf623b8dd2c5df2c2998fd95ae4ec99882fb4",
"18c03aac6d3e910efb20039c15d70ab5e0297101",
"41265c42446aac17ca769e67d1704f99e5a1394d",
"783ff33f5882813dca9239452c4a7cadd4dba778",
"b029cfb85107aee4590c2434a3329bfcf36f8fa1",
"112d1900b4c2e3e9351050d1b542c9744f9793f3",
"5439bbc4bd9a996f1a38244e6892b71850bc98fd",
"d83097a2f994b503185adf4e719d154123150159",
"d0939b4898e83090ee55fd9d8a60e312cfadfbaf",
"b3523a26f7147e4af40d9d462adaae6d49eda13e",
"cd065fb435d6fb204a8871bcd623d0d0e673088c",
"2854a40855ad839a54f4b08f5cff0cf52fca4399",
"b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe",
"0f73d56e1cf480bded8a1ecf20ec6fc53c574713",
"0d9882b2dfafdce31f4e77fe307d41a44a74cefe",
"585fc5caab9ead178a327d3660d35851db713df1",
"e8cd41a48d79101977e3036a87aeb1aac730686f",
"5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7",
"9c3cc2763bf9e9e37067d3607302c4776502df98",
"3649a68410e354c83cd4a38b66bd314de4c8f5c9",
"e96ed0c091de1ebdf587104eaf63400d1974a1fe",
"078ca03d2f99e4e6eab16f7b75fbb7afb699c86c",
"38de737da99514de6559ff163c988198bc91367a",
]
)
_expected_new_directories_first_visit = normalize_hashes(
[
"3370d20d6f96dc1c9e50f083e2134881db110f4f",
"42753c0c2ab00c4501b552ac4671c68f3cf5aece",
"d7895533ef5edbcffdea3f057d9fef3a1ef845ce",
"80579be563e2ef3e385226fe7a3f079b377f142c",
"3b0ddc6a9e58b4b53c222da4e27b280b6cda591c",
"bcad03ce58ac136f26f000990fc9064e559fe1c0",
"5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca",
"e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd",
"584b5b4b6cf7f038095e820b99386a9c232de931",
"184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a",
"bb5f4ee143c970367eb409f2e4c1104898048b9d",
"1b95491047add1103db0dfdfa84a9735dcb11e88",
"a00c6de13471a2d66e64aca140ddb21ef5521e62",
"5ce6c1cd5cda2d546db513aaad8c72a44c7771e2",
"c337091e349b6ac10d38a49cdf8c2401ef9bb0f2",
"202fafcd7c0f8230e89d5496ad7f44ab12b807bf",
"775cc516543be86c15c1dc172f49c0d4e6e78235",
"ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e",
]
)
_expected_new_revisions_first_visit = normalize_hashes(
{
"d8a1c7474d2956ac598a19f0f27d52f7015f117e": (
"42753c0c2ab00c4501b552ac4671c68f3cf5aece"
),
"5f9eb78af37ffd12949f235e86fac04898f9f72a": (
"3370d20d6f96dc1c9e50f083e2134881db110f4f"
),
"ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": (
"d7895533ef5edbcffdea3f057d9fef3a1ef845ce"
),
}
)
def package_url(package):
return "https://www.npmjs.com/package/%s" % package
def package_metadata_url(package):
return "https://replicate.npmjs.com/%s/" % package
def test_npm_revision_metadata_structure(swh_storage, requests_mock_datadir):
package = "org"
loader = NpmLoader(swh_storage, package_url(package))
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e")
revision = swh_storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
revision.metadata,
paths=[
("intrinsic.tool", str),
("intrinsic.raw", dict),
("extrinsic.provider", str),
("extrinsic.when", str),
("extrinsic.raw", dict),
("original_artifact", list),
],
)
for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
)
def test_npm_loader_first_visit(swh_storage, requests_mock_datadir, org_api_info):
package = "org"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit),
"skipped_content": 0,
"snapshot": 1,
} == stats
contents = swh_storage.content_get(_expected_new_contents_first_visit)
count = sum(0 if content is None else 1 for content in contents)
assert count == len(_expected_new_contents_first_visit)
assert (
list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == []
)
assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == []
versions = [
("0.0.2", "d8a1c7474d2956ac598a19f0f27d52f7015f117e"),
("0.0.3", "5f9eb78af37ffd12949f235e86fac04898f9f72a"),
("0.0.4", "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"),
]
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target=b"releases/0.0.4", target_type=TargetType.ALIAS
),
**{
b"releases/"
+ version_name.encode(): SnapshotBranch(
target=hash_to_bytes(version_id), target_type=TargetType.REVISION,
)
for (version_name, version_id) in versions
},
},
)
check_snapshot(expected_snapshot, swh_storage)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="https://npmjs.com/",
)
for (version_name, revision_id) in versions:
revision = swh_storage.revision_get([hash_to_bytes(revision_id)])[0]
directory_id = revision.directory
- directory_swhid = SWHID(object_type="directory", object_id=directory_id,)
- revision_swhid = SWHID(object_type="revision", object_id=revision_id,)
+ directory_swhid = ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id,
+ )
+ revision_swhid = CoreSWHID(
+ object_type=ObjectType.REVISION, object_id=hash_to_bytes(revision_id),
+ )
expected_metadata = [
RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.npm.loader.NpmLoader", version=__version__,
),
discovery_date=loader.visit_date,
format="replicate-npm-package-json",
metadata=json.dumps(
json.loads(org_api_info)["versions"][version_name]
).encode(),
origin="https://www.npmjs.com/package/org",
revision=revision_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, directory_swhid, metadata_authority,
+ directory_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
def test_npm_loader_incremental_visit(swh_storage, requests_mock_datadir_visits):
package = "org"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d")
actual_load_status = loader.load()
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": len(_expected_new_revisions_first_visit),
"skipped_content": 0,
"snapshot": 1,
} == stats
# reset loader internal state
del loader._cached_info
del loader._cached__raw_info
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "eventful"
snap_id2 = actual_load_status2["snapshot_id"]
assert snap_id2 is not None
assert snap_id2 != actual_load_status["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="npm")
stats = get_stats(swh_storage)
assert { # 3 new releases artifacts
"content": len(_expected_new_contents_first_visit) + 14,
"directory": len(_expected_new_directories_first_visit) + 15,
"origin": 1,
"origin_visit": 2,
"release": 0,
"revision": len(_expected_new_revisions_first_visit) + 3,
"skipped_content": 0,
"snapshot": 2,
} == stats
urls = [
m.url
for m in requests_mock_datadir_visits.request_history
if m.url.startswith("https://registry.npmjs.org")
]
assert len(urls) == len(set(urls)) # we visited each artifact once across
@pytest.mark.usefixtures("requests_mock_datadir")
def test_npm_loader_version_divergence(swh_storage):
package = "@aller_shared"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id
)
stats = get_stats(swh_storage)
assert { # 1 new releases artifacts
"content": 534,
"directory": 153,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == stats
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"releases/0.1.0"
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"),
),
b"releases/0.1.1-alpha.14": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"),
),
},
)
check_snapshot(expected_snapshot, swh_storage)
def test_npm_artifact_to_revision_id_none():
"""Current loader version should stop soon if nothing can be found
"""
class artifact_metadata:
shasum = "05181c12cd8c22035dd31155656826b85745da37"
known_artifacts = {
"b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": {},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None
def test_npm_artifact_to_revision_id_old_loader_version():
"""Current loader version should solve old metadata scheme
"""
class artifact_metadata:
shasum = "05181c12cd8c22035dd31155656826b85745da37"
known_artifacts = {
hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): {
"package_source": {"sha1": "something-wrong"}
},
hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): {
"package_source": {"sha1": "05181c12cd8c22035dd31155656826b85745da37",}
},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes(
"845673bfe8cbd31b1eaf757745a964137e6f9116"
)
def test_npm_artifact_to_revision_id_current_loader_version():
"""Current loader version should be able to solve current metadata scheme
"""
class artifact_metadata:
shasum = "05181c12cd8c22035dd31155656826b85745da37"
known_artifacts = {
hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): {
"original_artifact": [
{"checksums": {"sha1": "05181c12cd8c22035dd31155656826b85745da37"},}
],
},
hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): {
"original_artifact": [{"checksums": {"sha1": "something-wrong"},}],
},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes(
"b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"
)
def test_npm_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
"""Skip artifact with no intrinsic metadata during ingestion
"""
package = "nativescript-telerik-analytics"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(
id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
)
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot.id.hex(),
}
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
)
def test_npm_artifact_with_no_upload_time(swh_storage, requests_mock_datadir):
"""With no time upload, artifact is skipped
"""
package = "jammit-no-time"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(
id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={},
)
assert actual_load_status == {
"status": "uneventful",
"snapshot_id": expected_snapshot.id.hex(),
}
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="partial", type="npm", snapshot=expected_snapshot.id
)
def test_npm_artifact_use_mtime_if_no_time(swh_storage, requests_mock_datadir):
"""With no time upload, artifact is skipped
"""
package = "jammit-express"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
# artifact is used
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS, target=b"releases/0.0.1"
),
b"releases/0.0.1": SnapshotBranch(
target_type=TargetType.REVISION,
target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"),
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id
)
def test_npm_no_artifact(swh_storage, requests_mock_datadir):
"""If no artifacts at all is found for origin, the visit fails completely
"""
package = "catify"
url = package_url(package)
loader = NpmLoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {
"status": "failed",
}
assert_last_visit_matches(swh_storage, url, status="failed", type="npm")
def test_npm_origin_not_found(swh_storage, requests_mock_datadir):
url = package_url("non-existent-url")
loader = NpmLoader(swh_storage, url)
assert loader.load() == {"status": "failed"}
assert_last_visit_matches(
swh_storage, url, status="not_found", type="npm", snapshot=None
)
diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py
index 836f0c5..5415f78 100644
--- a/swh/loader/package/pypi/tests/test_pypi.py
+++ b/swh/loader/package/pypi/tests/test_pypi.py
@@ -1,899 +1,902 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import os
from os import path
from unittest.mock import patch
import pytest
from swh.core.pytest_plugin import requests_mock_datadir_factory
from swh.core.tarball import uncompress
from swh.loader.package import __version__
from swh.loader.package.pypi.loader import (
PyPILoader,
artifact_to_revision_id,
author,
extract_intrinsic_metadata,
pypi_api_url,
)
from swh.loader.package.tests.common import check_metadata_paths
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
-from swh.model.hashutil import hash_to_bytes, hash_to_hex
-from swh.model.identifiers import SWHID
+from swh.model.hashutil import hash_to_bytes
+from swh.model.identifiers import (
+ CoreSWHID,
+ ExtendedObjectType,
+ ExtendedSWHID,
+ ObjectType,
+)
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
Person,
RawExtrinsicMetadata,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.storage.interface import PagedResult
@pytest.fixture
def _0805nexter_api_info(datadir) -> bytes:
with open(
os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb",
) as f:
return f.read()
def test_pypi_author_basic():
data = {
"author": "i-am-groot",
"author_email": "iam@groot.org",
}
actual_author = author(data)
expected_author = Person(
fullname=b"i-am-groot <iam@groot.org>",
name=b"i-am-groot",
email=b"iam@groot.org",
)
assert actual_author == expected_author
def test_pypi_author_empty_email():
data = {
"author": "i-am-groot",
"author_email": "",
}
actual_author = author(data)
expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",)
assert actual_author == expected_author
def test_pypi_author_empty_name():
data = {
"author": "",
"author_email": "iam@groot.org",
}
actual_author = author(data)
expected_author = Person(
fullname=b" <iam@groot.org>", name=b"", email=b"iam@groot.org",
)
assert actual_author == expected_author
def test_pypi_author_malformed():
data = {
"author": "['pierre', 'paul', 'jacques']",
"author_email": None,
}
actual_author = author(data)
expected_author = Person(
fullname=b"['pierre', 'paul', 'jacques']",
name=b"['pierre', 'paul', 'jacques']",
email=None,
)
assert actual_author == expected_author
def test_pypi_author_malformed_2():
data = {
"author": "[marie, jeanne]",
"author_email": "[marie@some, jeanne@thing]",
}
actual_author = author(data)
expected_author = Person(
fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>",
name=b"[marie, jeanne]",
email=b"[marie@some, jeanne@thing]",
)
assert actual_author == expected_author
def test_pypi_author_malformed_3():
data = {
"author": "[marie, jeanne, pierre]",
"author_email": "[marie@somewhere.org, jeanne@somewhere.org]",
}
actual_author = author(data)
expected_author = Person(
fullname=(
b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>"
),
name=b"[marie, jeanne, pierre]",
email=b"[marie@somewhere.org, jeanne@somewhere.org]",
)
actual_author == expected_author
# configuration error #
def test_pypi_api_url():
"""Compute pypi api url from the pypi project url should be ok"""
url = pypi_api_url("https://pypi.org/project/requests")
assert url == "https://pypi.org/pypi/requests/json"
def test_pypi_api_url_with_slash():
"""Compute pypi api url from the pypi project url should be ok"""
url = pypi_api_url("https://pypi.org/project/requests/")
assert url == "https://pypi.org/pypi/requests/json"
@pytest.mark.fs
def test_pypi_extract_intrinsic_metadata(tmp_path, datadir):
"""Parsing existing archive's PKG-INFO should yield results"""
uncompressed_archive_path = str(tmp_path)
archive_path = path.join(
datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip"
)
uncompress(archive_path, dest=uncompressed_archive_path)
actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path)
expected_metadata = {
"metadata_version": "1.0",
"name": "0805nexter",
"version": "1.1.0",
"summary": "a simple printer of nested lest",
"home_page": "http://www.hp.com",
"author": "hgtkpython",
"author_email": "2868989685@qq.com",
"platforms": ["UNKNOWN"],
}
assert actual_metadata == expected_metadata
@pytest.mark.fs
def test_pypi_extract_intrinsic_metadata_failures(tmp_path):
"""Parsing inexistent path/archive/PKG-INFO yield None"""
tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue)
# inexistent first level path
assert extract_intrinsic_metadata("/something-inexistent") == {}
# inexistent second level path (as expected by pypi archives)
assert extract_intrinsic_metadata(tmp_path) == {}
# inexistent PKG-INFO within second level path
existing_path_no_pkginfo = path.join(tmp_path, "something")
os.mkdir(existing_path_no_pkginfo)
assert extract_intrinsic_metadata(tmp_path) == {}
# LOADER SCENARIO #
# "edge" cases (for the same origin) #
# no release artifact:
# {visit full, status: uneventful, no contents, etc...}
requests_mock_datadir_missing_all = requests_mock_datadir_factory(
ignore_urls=[
"https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa
"https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa
]
)
def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all):
"""Load a pypi project with all artifacts missing ends up with no snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert_last_visit_matches(swh_storage, url, status="partial", type="pypi")
def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir):
"""problem during loading: {visit: failed, status: failed, no snapshot}
"""
url = "https://pypi.org/project/0805nexter"
with patch(
"swh.loader.package.pypi.loader.PyPILoader._load_snapshot",
side_effect=ValueError("Fake problem to fail visit"),
):
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
stats = get_stats(loader.storage)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 0,
} == stats
assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
# problem during loading:
# {visit: partial, status: uneventful, no snapshot}
def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir):
url = "https://pypi.org/project/0805nexter"
with patch(
"swh.loader.package.pypi.loader.PyPILoader.last_snapshot",
side_effect=ValueError("Fake problem to fail the visit"),
):
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 0,
} == stats
assert_last_visit_matches(swh_storage, url, status="failed", type="pypi")
# problem during loading: failure early enough in between swh contents...
# some contents (contents, directories, etc...) have been written in storage
# {visit: partial, status: eventful, no snapshot}
# problem during loading: failure late enough we can have snapshots (some
# revisions are written in storage already)
# {visit: partial, status: eventful, snapshot}
# "normal" cases (for the same origin) #
requests_mock_datadir_missing_one = requests_mock_datadir_factory(
ignore_urls=[
"https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa
]
)
# some missing release artifacts:
# {visit partial, status: eventful, 1 snapshot}
def test_pypi_revision_metadata_structure(
swh_storage, requests_mock_datadir, _0805nexter_api_info
):
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21")
revision = swh_storage.revision_get([expected_revision_id])[0]
assert revision is not None
check_metadata_paths(
revision.metadata,
paths=[
("intrinsic.tool", str),
("intrinsic.raw", dict),
("extrinsic.provider", str),
("extrinsic.when", str),
("extrinsic.raw", dict),
("original_artifact", list),
],
)
for original_artifact in revision.metadata["original_artifact"]:
check_metadata_paths(
original_artifact,
paths=[("filename", str), ("length", int), ("checksums", dict),],
)
- revision_swhid = SWHID(
- object_type="revision", object_id=hash_to_hex(expected_revision_id)
+ revision_swhid = CoreSWHID(
+ object_type=ObjectType.REVISION, object_id=expected_revision_id
)
- directory_swhid = SWHID(
- object_type="directory", object_id=hash_to_hex(revision.directory)
+ directory_swhid = ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="https://pypi.org/",
)
expected_metadata = [
RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.pypi.loader.PyPILoader", version=__version__,
),
discovery_date=loader.visit_date,
format="pypi-project-json",
metadata=json.dumps(
json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0]
).encode(),
origin=url,
revision=revision_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, directory_swhid, metadata_authority,
+ directory_swhid, metadata_authority,
) == PagedResult(next_page_token=None, results=expected_metadata,)
def test_pypi_visit_with_missing_artifact(
swh_storage, requests_mock_datadir_missing_one
):
"""Load a pypi project with some missing artifacts ends up with 1 snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
stats = get_stats(swh_storage)
assert {
"content": 3,
"directory": 2,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 1,
"skipped_content": 0,
"snapshot": 1,
} == stats
expected_contents = map(
hash_to_bytes,
[
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
expected_snapshot = Snapshot(
id=hash_to_bytes(expected_snapshot_id),
branches={
b"releases/1.2.0": SnapshotBranch(
target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
target_type=TargetType.REVISION,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.2.0", target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, storage=swh_storage)
assert_last_visit_matches(
swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id,
)
def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir):
"""With no prior visit, load a pypi project ends up with 1 snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
stats = get_stats(swh_storage)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == stats
expected_contents = map(
hash_to_bytes,
[
"a61e24cdfdab3bb7817f6be85d37a3e666b34566",
"938c33483285fd8ad57f15497f538320df82aeb8",
"a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"05219ba38bc542d4345d5638af1ed56c7d43ca7d",
"cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
"05219ba38bc542d4345d5638af1ed56c7d43ca7d"
), # noqa
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"releases/1.1.0": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.2.0": SnapshotBranch(
target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
target_type=TargetType.REVISION,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.2.0", target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
)
def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir):
"""Multiple visits with no changes results in 1 same snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=snapshot_id
)
stats = get_stats(swh_storage)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == stats
expected_snapshot = Snapshot(
id=snapshot_id,
branches={
b"releases/1.1.0": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.2.0": SnapshotBranch(
target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
target_type=TargetType.REVISION,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.2.0", target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
actual_load_status2 = loader.load()
assert actual_load_status2 == {
"status": "uneventful",
"snapshot_id": actual_load_status2["snapshot_id"],
}
visit_status2 = assert_last_visit_matches(
swh_storage, url, status="full", type="pypi"
)
stats2 = get_stats(swh_storage)
expected_stats2 = stats.copy()
expected_stats2["origin_visit"] = 1 + 1
assert expected_stats2 == stats2
# same snapshot
assert visit_status2.snapshot == snapshot_id
def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits):
"""With prior visit, 2nd load will result with a different snapshot
"""
url = "https://pypi.org/project/0805nexter"
loader = PyPILoader(swh_storage, url)
visit1_actual_load_status = loader.load()
visit1_stats = get_stats(swh_storage)
expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a")
assert visit1_actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id
)
assert {
"content": 6,
"directory": 4,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 2,
"skipped_content": 0,
"snapshot": 1,
} == visit1_stats
# Reset internal state
del loader._cached__raw_info
del loader._cached_info
visit2_actual_load_status = loader.load()
visit2_stats = get_stats(swh_storage)
assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status
expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283")
assert visit2_actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id2.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2
)
assert {
"content": 6 + 1, # 1 more content
"directory": 4 + 2, # 2 more directories
"origin": 1,
"origin_visit": 1 + 1,
"release": 0,
"revision": 2 + 1, # 1 more revision
"skipped_content": 0,
"snapshot": 1 + 1, # 1 more snapshot
} == visit2_stats
expected_contents = map(
hash_to_bytes,
[
"a61e24cdfdab3bb7817f6be85d37a3e666b34566",
"938c33483285fd8ad57f15497f538320df82aeb8",
"a27576d60e08c94a05006d2e6d540c0fdb5f38c8",
"405859113963cb7a797642b45f171d6360425d16",
"e5686aa568fdb1d19d7f1329267082fe40482d31",
"83ecf6ec1114fd260ca7a833a2d165e71258c338",
"92689fa2b7fb4d4fc6fb195bf73a50c87c030639",
],
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(
hash_to_bytes,
[
"05219ba38bc542d4345d5638af1ed56c7d43ca7d",
"cf019eb456cf6f78d8c4674596f1c9a97ece8f44",
"b178b66bd22383d5f16f4f5c923d39ca798861b4",
"c3a58f8b57433a4b56caaa5033ae2e0931405338",
"e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a",
"52604d46843b898f5a43208045d09fcf8731631b",
],
)
assert list(swh_storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes(
"05219ba38bc542d4345d5638af1ed56c7d43ca7d"
), # noqa
hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes(
"b178b66bd22383d5f16f4f5c923d39ca798861b4"
), # noqa
hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes(
"e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a"
), # noqa
}
assert list(swh_storage.revision_missing(expected_revs)) == []
expected_snapshot = Snapshot(
id=expected_snapshot_id2,
branches={
b"releases/1.1.0": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.2.0": SnapshotBranch(
target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"),
target_type=TargetType.REVISION,
),
b"releases/1.3.0": SnapshotBranch(
target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"),
target_type=TargetType.REVISION,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.3.0", target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
)
urls = [
m.url
for m in requests_mock_datadir_visits.request_history
if m.url.startswith("https://files.pythonhosted.org")
]
# visited each artifact once across 2 visits
assert len(urls) == len(set(urls))
# release artifact, no new artifact
# {visit full, status uneventful, same snapshot as before}
# release artifact, old artifact with different checksums
# {visit full, status full, new snapshot with shared history and some new
# different history}
# release with multiple sdist artifacts per pypi "version"
# snapshot branch output is different
def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir):
"""With no prior visit, load a pypi project ends up with 1 snapshot
"""
url = "https://pypi.org/project/nexter"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch(
target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"),
target_type=TargetType.REVISION,
),
b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch(
target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"),
target_type=TargetType.REVISION,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
)
def test_pypi_artifact_to_revision_id_none():
"""Current loader version should stop soon if nothing can be found
"""
class artifact_metadata:
sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec"
assert artifact_to_revision_id({}, artifact_metadata) is None
known_artifacts = {
"b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": {
"original_artifact": {"sha256": "something-irrelevant",},
},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None
def test_pypi_artifact_to_revision_id_old_loader_version():
"""Current loader version should solve old metadata scheme
"""
class artifact_metadata:
sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec"
known_artifacts = {
hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): {
"original_artifact": {"sha256": "something-wrong",},
},
hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): {
"original_artifact": {
"sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa
},
},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes(
"845673bfe8cbd31b1eaf757745a964137e6f9116"
)
def test_pypi_artifact_to_revision_id_current_loader_version():
"""Current loader version should be able to solve current metadata scheme
"""
class artifact_metadata:
sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec"
known_artifacts = {
hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): {
"original_artifact": [
{
"checksums": {
"sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa
},
}
],
},
hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): {
"original_artifact": [{"checksums": {"sha256": "something-wrong"},}],
},
}
assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes(
"b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"
)
def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir):
"""Skip artifact with no intrinsic metadata during ingestion
"""
url = "https://pypi.org/project/upymenu"
loader = PyPILoader(swh_storage, url)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
# no branch as one artifact without any intrinsic metadata
expected_snapshot = Snapshot(id=expected_snapshot_id, branches={})
check_snapshot(expected_snapshot, swh_storage)
assert_last_visit_matches(
swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id
)
def test_pypi_origin_not_found(swh_storage, requests_mock_datadir):
url = "https://pypi.org/project/unknown"
loader = PyPILoader(swh_storage, url)
assert loader.load() == {"status": "failed"}
assert_last_visit_matches(
swh_storage, url, status="not_found", type="pypi", snapshot=None
)
diff --git a/swh/loader/package/tests/test_loader_metadata.py b/swh/loader/package/tests/test_loader_metadata.py
index 411c40e..88e53cb 100644
--- a/swh/loader/package/tests/test_loader_metadata.py
+++ b/swh/loader/package/tests/test_loader_metadata.py
@@ -1,228 +1,222 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from typing import Iterator, List, Sequence, Tuple
import attr
from swh.loader.package import __version__
from swh.loader.package.loader import (
BasePackageInfo,
PackageLoader,
RawExtrinsicMetadataCore,
)
from swh.model.hashutil import hash_to_bytes
-from swh.model.identifiers import SWHID
+from swh.model.identifiers import (
+ CoreSWHID,
+ ExtendedObjectType,
+ ExtendedSWHID,
+ ObjectType,
+)
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
- MetadataTargetType,
+ Origin,
Person,
RawExtrinsicMetadata,
Revision,
RevisionType,
Sha1Git,
)
EMPTY_SNAPSHOT_ID = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"
FULL_SNAPSHOT_ID = "4a9b608c9f01860a627237dd2409d1d50ec4b054"
AUTHORITY = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="http://example.org/",
)
ORIGIN_URL = "http://example.org/archive.tgz"
+ORIGIN_SWHID = Origin(ORIGIN_URL).swhid()
REVISION_ID = hash_to_bytes("8ff44f081d43176474b267de5451f2c2e88089d0")
-REVISION_SWHID = SWHID(object_type="revision", object_id=REVISION_ID)
+REVISION_SWHID = CoreSWHID(object_type=ObjectType.REVISION, object_id=REVISION_ID)
DIRECTORY_ID = hash_to_bytes("aa" * 20)
-DIRECTORY_SWHID = SWHID(object_type="directory", object_id=DIRECTORY_ID)
+DIRECTORY_SWHID = ExtendedSWHID(
+ object_type=ExtendedObjectType.DIRECTORY, object_id=DIRECTORY_ID
+)
FETCHER = MetadataFetcher(
name="swh.loader.package.tests.test_loader_metadata.MetadataTestLoader",
version=__version__,
)
DISCOVERY_DATE = datetime.datetime.now(tz=datetime.timezone.utc)
DIRECTORY_METADATA = [
RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=DIRECTORY_SWHID,
discovery_date=DISCOVERY_DATE,
authority=AUTHORITY,
fetcher=FETCHER,
format="test-format1",
metadata=b"foo bar",
origin=ORIGIN_URL,
revision=REVISION_SWHID,
),
RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=DIRECTORY_SWHID,
discovery_date=DISCOVERY_DATE + datetime.timedelta(seconds=1),
authority=AUTHORITY,
fetcher=FETCHER,
format="test-format2",
metadata=b"bar baz",
origin=ORIGIN_URL,
revision=REVISION_SWHID,
),
]
ORIGIN_METADATA = [
RawExtrinsicMetadata(
- type=MetadataTargetType.ORIGIN,
- target=ORIGIN_URL,
+ target=ORIGIN_SWHID,
discovery_date=datetime.datetime.now(tz=datetime.timezone.utc),
authority=AUTHORITY,
fetcher=FETCHER,
format="test-format3",
metadata=b"baz qux",
),
]
class MetadataTestLoader(PackageLoader[BasePackageInfo]):
def get_versions(self) -> Sequence[str]:
return ["v1.0.0"]
def _load_directory(self, dl_artifacts, tmpdir):
class directory:
hash = DIRECTORY_ID
return (None, directory) # just enough for _load_revision to work
def download_package(self, p_info: BasePackageInfo, tmpdir: str):
return [("path", {"artifact_key": "value", "length": 0})]
def build_revision(
self, p_info: BasePackageInfo, uncompressed_path: str, directory: Sha1Git
):
return Revision(
id=REVISION_ID,
message=b"",
author=Person.from_fullname(b""),
committer=Person.from_fullname(b""),
date=None,
committer_date=None,
type=RevisionType.TAR,
directory=DIRECTORY_ID,
synthetic=False,
)
def get_metadata_authority(self):
return attr.evolve(AUTHORITY, metadata={})
def get_package_info(self, version: str) -> Iterator[Tuple[str, BasePackageInfo]]:
m0 = DIRECTORY_METADATA[0]
m1 = DIRECTORY_METADATA[1]
p_info = BasePackageInfo(
url=ORIGIN_URL,
filename="archive.tgz",
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(m0.format, m0.metadata, m0.discovery_date),
RawExtrinsicMetadataCore(m1.format, m1.metadata, m1.discovery_date),
],
)
yield (version, p_info)
def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]:
m = ORIGIN_METADATA[0]
return [RawExtrinsicMetadataCore(m.format, m.metadata, m.discovery_date)]
def test_load_artifact_metadata(swh_storage, caplog):
loader = MetadataTestLoader(swh_storage, ORIGIN_URL)
load_status = loader.load()
assert load_status == {
"status": "eventful",
"snapshot_id": FULL_SNAPSHOT_ID,
}
authority = MetadataAuthority(
type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/",
)
- result = swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, authority,
- )
+ result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, authority,)
assert result.next_page_token is None
assert len(result.results) == 1
assert result.results[0] == RawExtrinsicMetadata(
- type=MetadataTargetType.DIRECTORY,
target=DIRECTORY_SWHID,
discovery_date=result.results[0].discovery_date,
authority=authority,
fetcher=FETCHER,
format="original-artifacts-json",
metadata=b'[{"artifact_key": "value", "length": 0}]',
origin=ORIGIN_URL,
revision=REVISION_SWHID,
)
def test_load_metadata(swh_storage, caplog):
loader = MetadataTestLoader(swh_storage, ORIGIN_URL)
load_status = loader.load()
assert load_status == {
"status": "eventful",
"snapshot_id": FULL_SNAPSHOT_ID,
}
- result = swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY,
- )
+ result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,)
assert result.next_page_token is None
assert result.results == DIRECTORY_METADATA
- result = swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.ORIGIN, ORIGIN_URL, AUTHORITY,
- )
+ result = swh_storage.raw_extrinsic_metadata_get(ORIGIN_SWHID, AUTHORITY,)
assert result.next_page_token is None
assert result.results == ORIGIN_METADATA
assert caplog.text == ""
def test_existing_authority(swh_storage, caplog):
loader = MetadataTestLoader(swh_storage, ORIGIN_URL)
load_status = loader.load()
assert load_status == {
"status": "eventful",
"snapshot_id": FULL_SNAPSHOT_ID,
}
- result = swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY,
- )
+ result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,)
assert result.next_page_token is None
assert result.results == DIRECTORY_METADATA
assert caplog.text == ""
def test_existing_fetcher(swh_storage, caplog):
loader = MetadataTestLoader(swh_storage, ORIGIN_URL)
load_status = loader.load()
assert load_status == {
"status": "eventful",
"snapshot_id": FULL_SNAPSHOT_ID,
}
- result = swh_storage.raw_extrinsic_metadata_get(
- MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY,
- )
+ result = swh_storage.raw_extrinsic_metadata_get(DIRECTORY_SWHID, AUTHORITY,)
assert result.next_page_token is None
assert result.results == DIRECTORY_METADATA
assert caplog.text == ""
diff --git a/swh/loader/tests/test_cli.py b/swh/loader/tests/test_cli.py
index b584deb..638fb99 100644
--- a/swh/loader/tests/test_cli.py
+++ b/swh/loader/tests/test_cli.py
@@ -1,148 +1,149 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import os
from click.formatting import HelpFormatter
from click.testing import CliRunner
import pytest
import yaml
from swh.loader.cli import SUPPORTED_LOADERS, get_loader
from swh.loader.cli import loader as loader_cli
from swh.loader.package.loader import PackageLoader
def test_get_loader_wrong_input(swh_config):
"""Unsupported loader should raise
"""
loader_type = "unknown"
assert loader_type not in SUPPORTED_LOADERS
with pytest.raises(ValueError, match="Invalid loader"):
get_loader(loader_type, url="db-url")
def test_get_loader(swh_loader_config):
"""Instantiating a supported loader should be ok
"""
loader_input = {
"archive": {"url": "some-url", "artifacts": []},
"debian": {"url": "some-url", "date": "something", "packages": [],},
"npm": {"url": "https://www.npmjs.com/package/onepackage",},
"pypi": {"url": "some-url",},
}
for loader_type, kwargs in loader_input.items():
kwargs["storage"] = swh_loader_config["storage"]
loader = get_loader(loader_type, **kwargs)
assert isinstance(loader, PackageLoader)
def _write_usage(command, args, max_width=80):
hf = HelpFormatter(width=max_width)
hf.write_usage(command, args)
return hf.getvalue()[:-1]
def test_run_help(swh_config):
"""Help message should be ok
"""
runner = CliRunner()
result = runner.invoke(loader_cli, ["run", "-h"])
assert result.exit_code == 0
usage_prefix = _write_usage(
"loader", f"run [OPTIONS] [{'|'.join(SUPPORTED_LOADERS)}]\n"
)
assert result.output.startswith(usage_prefix)
def test_run_with_configuration_failure(tmp_path):
"""Triggering a load should fail since configuration is incomplete
"""
runner = CliRunner()
conf_path = os.path.join(str(tmp_path), "cli.yml")
with open(conf_path, "w") as f:
f.write(yaml.dump({}))
with pytest.raises(ValueError, match="Missing storage"):
runner.invoke(
- loader_cli, ["-C", conf_path, "run", "pypi", "url=https://some-url",],
- catch_exceptions=False
+ loader_cli,
+ ["-C", conf_path, "run", "pypi", "url=https://some-url",],
+ catch_exceptions=False,
)
def test_run_pypi(mocker, swh_config):
"""Triggering a load should be ok
"""
mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load")
runner = CliRunner()
result = runner.invoke(
loader_cli, ["-C", swh_config, "run", "pypi", "url=https://some-url",]
)
assert result.exit_code == 0
mock_loader.assert_called_once_with()
def test_run_with_visit_date(mocker, swh_config):
"""iso visit_date parameter should be parsed as datetime
"""
mock_loader = mocker.patch("swh.loader.cli.get_loader")
runner = CliRunner()
input_date = "2016-05-03 15:16:32+00"
result = runner.invoke(
loader_cli, ["run", "npm", "https://some-url", f"visit_date='{input_date}'"]
)
assert result.exit_code == 0
expected_parsed_date = datetime.datetime(
2016, 5, 3, 15, 16, 32, tzinfo=datetime.timezone.utc
)
mock_loader.assert_called_once_with(
"npm",
storage={"cls": "memory"},
url="https://some-url",
visit_date=expected_parsed_date,
)
def test_list_help(mocker, swh_config):
"""Triggering a load should be ok
"""
runner = CliRunner()
result = runner.invoke(loader_cli, ["list", "--help"])
assert result.exit_code == 0
usage_prefix = _write_usage("loader", "list [OPTIONS]\n")
expected_help_msg = f"""{usage_prefix}
[[{'|'.join(['all'] + SUPPORTED_LOADERS)}]]
List supported loaders and optionally their arguments
Options:
-h, --help Show this message and exit.
"""
assert result.output.startswith(expected_help_msg)
def test_list_help_npm(mocker, swh_config):
"""Triggering a load should be ok
"""
runner = CliRunner()
result = runner.invoke(loader_cli, ["list", "npm"])
assert result.exit_code == 0
expected_help_msg = """
Loader: Load npm origin's artifact releases into swh archive.
"""
assert result.output.startswith(expected_help_msg[1:])

File Metadata

Mime Type
text/x-diff
Expires
Jun 4 2025, 7:43 PM (11 w, 4 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3299298

Event Timeline