Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/requirements-swh.txt b/requirements-swh.txt
index 30b3fcc..4f0fc56 100644
--- a/requirements-swh.txt
+++ b/requirements-swh.txt
@@ -1,5 +1,5 @@
swh.core >= 2.12
-swh.model >= 4.4.0
+swh.model >= 6.5.1
swh.objstorage >= 0.2.2
swh.scheduler >= 0.4.0
swh.storage >= 0.29.0
diff --git a/swh/loader/package/arch/loader.py b/swh/loader/package/arch/loader.py
index 7ab9fc2..6753966 100644
--- a/swh/loader/package/arch/loader.py
+++ b/swh/loader/package/arch/loader.py
@@ -1,141 +1,142 @@
# Copyright (C) 2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from distutils.version import LooseVersion
from pathlib import Path
import re
from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple
import attr
from swh.loader.package.loader import BasePackageInfo, PackageLoader
from swh.loader.package.utils import release_name
from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone
from swh.storage.interface import StorageInterface
@attr.s
class ArchPackageInfo(BasePackageInfo):
name = attr.ib(type=str)
"""Name of the package"""
version = attr.ib(type=str)
"""Current version"""
last_modified = attr.ib(type=str)
"""File last modified date as release date"""
def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]:
"""Extract intrinsic metadata from .PKGINFO file at dir_path.
Each Arch linux package has a .PKGINFO file at the root of the archive.
Args:
dir_path: A directory on disk where a package has been extracted
Returns:
A dict mapping
"""
pkginfo_path = Path(dir_path, ".PKGINFO")
rex = re.compile(r"^(\w+)\s=\s(.*)$", re.M)
with pkginfo_path.open("rb") as content:
parsed = rex.findall(content.read().decode())
data = {entry[0].lower(): entry[1] for entry in parsed}
if "url" in data.keys():
data["project_url"] = data["url"]
return data
class ArchLoader(PackageLoader[ArchPackageInfo]):
visit_type = "arch"
def __init__(
self,
storage: StorageInterface,
url: str,
artifacts: List[Dict[str, Any]],
arch_metadata: List[Dict[str, Any]],
**kwargs,
):
super().__init__(storage=storage, url=url, **kwargs)
self.url = url
self.artifacts: Dict[str, Dict] = {
artifact["version"]: artifact for artifact in artifacts
}
self.arch_metadata: Dict[str, Dict] = {
metadata["version"]: metadata for metadata in arch_metadata
}
def get_versions(self) -> Sequence[str]:
"""Get all released versions of an Arch Linux package
Returns:
A sequence of versions
Example::
["0.1.1", "0.10.2"]
"""
versions = list(self.artifacts.keys())
versions.sort(key=LooseVersion)
return versions
def get_default_version(self) -> str:
"""Get the newest release version of an Arch Linux package
Returns:
A string representing a version
Example::
"0.1.2"
"""
return self.get_versions()[-1]
def get_package_info(self, version: str) -> Iterator[Tuple[str, ArchPackageInfo]]:
"""Get release name and package information from version
Args:
version: arch version (e.g: "0.1.0")
Returns:
Iterator of tuple (release_name, p_info)
"""
artifact = self.artifacts[version]
metadata = self.arch_metadata[version]
assert version == artifact["version"] == metadata["version"]
p_info = ArchPackageInfo(
name=metadata["name"],
filename=artifact["filename"],
url=artifact["url"],
version=version,
last_modified=metadata["last_modified"],
+ checksums=artifact["checksums"],
)
yield release_name(version, artifact["filename"]), p_info
def build_release(
self, p_info: ArchPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
intrinsic_metadata = extract_intrinsic_metadata(Path(uncompressed_path))
author = Person.from_fullname(intrinsic_metadata["packager"].encode())
description = intrinsic_metadata["pkgdesc"]
message = (
f"Synthetic release for Arch Linux source package {p_info.name} "
f"version {p_info.version}\n\n"
f"{description}\n"
)
return Release(
name=p_info.version.encode(),
author=author,
date=TimestampWithTimezone.from_iso8601(p_info.last_modified),
message=message.encode(),
target_type=ObjectType.DIRECTORY,
target=directory,
synthetic=True,
)
diff --git a/swh/loader/package/arch/tests/test_arch.py b/swh/loader/package/arch/tests/test_arch.py
index 3180f9d..e061ce3 100644
--- a/swh/loader/package/arch/tests/test_arch.py
+++ b/swh/loader/package/arch/tests/test_arch.py
@@ -1,253 +1,271 @@
# Copyright (C) 2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
+
+# flake8: noqa: B950
+
import pytest
from swh.loader.package.arch.loader import ArchLoader
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
ObjectType,
Person,
Release,
Snapshot,
SnapshotBranch,
TargetType,
TimestampWithTimezone,
)
EXPECTED_PACKAGES = [
{
"url": "https://archive.archlinux.org/packages/d/dialog/",
"artifacts": [
{
- "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz", # noqa: B950
+ "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz",
"version": "1:1.3_20190211-1",
- "length": 180000,
+ "length": 440,
"filename": "dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz",
+ "checksums": {
+ "length": 440,
+ "md5": "ce66c053ded0d51e5610368d85242684",
+ "sha256": "27c6a7af005cd2214fd63f7498bf51e3bff332df33a9b8f7ed07934823f7ba43",
+ },
},
{
- "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst", # noqa: B950
+ "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst",
"version": "1:1.3_20220414-1",
- "length": 198000,
+ "length": 371,
"filename": "dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst",
+ "checksums": {
+ "length": 371,
+ "md5": "5687f6bfc3b6975fdd073deb7075ec09",
+ "sha256": "b002d18d1e1f356410f73b08170f0bd52f0d83b37b71ccd938594e7d486c4e8a",
+ },
},
],
"arch_metadata": [
{
"arch": "x86_64",
"repo": "core",
"name": "dialog",
"version": "1:1.3_20190211-1",
"last_modified": "2019-02-13T08:36:00",
},
{
"arch": "x86_64",
"repo": "core",
"name": "dialog",
"version": "1:1.3_20220414-1",
"last_modified": "2022-04-16T03:59:00",
},
],
},
{
"url": "https://archlinuxarm.org/packages/aarch64/gzip",
"artifacts": [
{
- "url": "https://uk.mirror.archlinuxarm.org/aarch64/core/gzip-1.12-1-aarch64.pkg.tar.xz", # noqa: B950
- "length": 79640,
+ "url": "https://uk.mirror.archlinuxarm.org/aarch64/core/gzip-1.12-1-aarch64.pkg.tar.xz",
+ "length": 472,
"version": "1.12-1",
"filename": "gzip-1.12-1-aarch64.pkg.tar.xz",
+ "checksums": {
+ "length": 472,
+ "md5": "0b96fa72ae35c097ec78132ed2f05a57",
+ "sha256": "8d45b871283e2c37513833f6327ebcdd96c6c3b335588945f873cb809b1e6d2b",
+ },
}
],
"arch_metadata": [
{
"arch": "aarch64",
"name": "gzip",
"repo": "core",
"version": "1.12-1",
"last_modified": "2022-04-07T21:08:14",
}
],
},
]
def test_get_versions(swh_storage):
loader = ArchLoader(
swh_storage,
url=EXPECTED_PACKAGES[0]["url"],
artifacts=EXPECTED_PACKAGES[0]["artifacts"],
arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"],
)
assert loader.get_versions() == [
"1:1.3_20190211-1",
"1:1.3_20220414-1",
]
def test_get_default_version(requests_mock_datadir, swh_storage):
loader = ArchLoader(
swh_storage,
url=EXPECTED_PACKAGES[0]["url"],
artifacts=EXPECTED_PACKAGES[0]["artifacts"],
arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"],
)
assert loader.get_default_version() == "1:1.3_20220414-1"
def test_arch_loader_load_one_version(datadir, requests_mock_datadir, swh_storage):
loader = ArchLoader(
swh_storage,
url=EXPECTED_PACKAGES[1]["url"],
artifacts=EXPECTED_PACKAGES[1]["artifacts"],
arch_metadata=EXPECTED_PACKAGES[1]["arch_metadata"],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_snapshot_id = "4020d0a278027550e336b5481a4159a913c91aa4"
expected_release_id = "7681098c9e381f9cc8bd1724d57eeee2182982dc"
assert expected_snapshot_id == actual_load_status["snapshot_id"]
expected_snapshot = Snapshot(
id=hash_to_bytes(actual_load_status["snapshot_id"]),
branches={
b"releases/1.12-1/gzip-1.12-1-aarch64.pkg.tar.xz": SnapshotBranch(
target=hash_to_bytes(expected_release_id),
target_type=TargetType.RELEASE,
),
b"HEAD": SnapshotBranch(
target=b"releases/1.12-1/gzip-1.12-1-aarch64.pkg.tar.xz",
target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 1,
"directory": 1,
"origin": 1,
"origin_visit": 1,
"release": 1,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] == Release(
name=b"1.12-1",
message=b"Synthetic release for Arch Linux source package gzip version "
b"1.12-1\n\nGNU compression utility\n",
target=hash_to_bytes("bd742aaf422953a1f7a5e084ec4a7477491d63fb"),
target_type=ObjectType.DIRECTORY,
synthetic=True,
author=Person.from_fullname(
b"Arch Linux ARM Build System <builder+seattle@archlinuxarm.org>"
),
date=TimestampWithTimezone.from_iso8601("2022-04-07T21:08:14+00:00"),
id=hash_to_bytes(expected_release_id),
)
assert_last_visit_matches(
swh_storage,
url=EXPECTED_PACKAGES[1]["url"],
status="full",
type="arch",
snapshot=expected_snapshot.id,
)
def test_arch_loader_load_n_versions(datadir, requests_mock_datadir, swh_storage):
loader = ArchLoader(
swh_storage,
url=EXPECTED_PACKAGES[0]["url"],
artifacts=EXPECTED_PACKAGES[0]["artifacts"],
arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
expected_snapshot_id = "832139d69a91edffcc3a96cca11deaf9255041c3"
assert expected_snapshot_id == actual_load_status["snapshot_id"]
expected_snapshot = Snapshot(
id=hash_to_bytes(actual_load_status["snapshot_id"]),
branches={
b"releases/1:1.3_20190211-1/"
b"dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz": SnapshotBranch(
target=hash_to_bytes("37efb727ff8bb8fbf92518aa8fe5fff2ad427d06"),
target_type=TargetType.RELEASE,
),
b"releases/1:1.3_20220414-1/"
b"dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst": SnapshotBranch(
target=hash_to_bytes("020d3f5627df7474f257fd04f1ede4415296e265"),
target_type=TargetType.RELEASE,
),
b"HEAD": SnapshotBranch(
target=b"releases/1:1.3_20220414-1/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst",
target_type=TargetType.ALIAS,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 2,
"directory": 2,
"origin": 1,
"origin_visit": 1,
"release": 2,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert_last_visit_matches(
swh_storage,
url=EXPECTED_PACKAGES[0]["url"],
status="full",
type="arch",
snapshot=expected_snapshot.id,
)
def test_arch_invalid_origin_archive_not_found(swh_storage, requests_mock_datadir):
url = "https://nowhere/packages/42"
loader = ArchLoader(
swh_storage,
url,
artifacts=[
{
"filename": "42-0.0.1.pkg.xz",
"url": "https://mirror2.nowhere/pkg/42-0.0.1.pkg.xz",
"version": "0.0.1",
"length": 42,
},
],
arch_metadata=[
{
"version": "0.0.1",
"arch": "aarch64",
"name": "42",
"repo": "community",
"last_modified": "2022-04-07T21:08:14",
},
],
)
with pytest.raises(Exception):
assert loader.load() == {"status": "failed"}
assert_last_visit_matches(
swh_storage, url, status="not_found", type="arch", snapshot=None
)
diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py
index b96cad6..feef63b 100644
--- a/swh/loader/package/archive/loader.py
+++ b/swh/loader/package/archive/loader.py
@@ -1,168 +1,171 @@
# Copyright (C) 2019-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
+from __future__ import annotations
+
import datetime
import hashlib
import logging
from os import path
import string
from typing import Any, Dict, Iterator, Mapping, Optional, Sequence, Tuple, Union
import attr
import iso8601
from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID
from swh.loader.package.utils import EMPTY_AUTHOR, release_name
from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone
from swh.storage.interface import StorageInterface
logger = logging.getLogger(__name__)
@attr.s
class ArchivePackageInfo(BasePackageInfo):
raw_info = attr.ib(type=Dict[str, Any])
length = attr.ib(type=int)
"""Size of the archive file"""
time = attr.ib(type=Union[str, datetime.datetime])
"""Timestamp of the archive file on the server"""
# default format for gnu
MANIFEST_FORMAT = string.Template("$time $length $version $url")
def extid(self, manifest_format: Optional[string.Template] = None) -> PartialExtID:
"""Returns a unique intrinsic identifier of this package info
``manifest_format`` allows overriding the class' default MANIFEST_FORMAT"""
manifest_format = manifest_format or self.MANIFEST_FORMAT
# TODO: use parsed attributes instead of self.raw_info
manifest = manifest_format.substitute(
{k: str(v) for (k, v) in self.raw_info.items()}
)
return (
self.EXTID_TYPE,
self.EXTID_VERSION,
hashlib.sha256(manifest.encode()).digest(),
)
@classmethod
- def from_metadata(cls, a_metadata: Dict[str, Any]) -> "ArchivePackageInfo":
+ def from_metadata(cls, a_metadata: Dict[str, Any]) -> ArchivePackageInfo:
url = a_metadata["url"]
filename = a_metadata.get("filename")
return cls(
url=url,
filename=filename if filename else path.split(url)[-1],
raw_info=a_metadata,
length=a_metadata["length"],
time=a_metadata["time"],
version=a_metadata["version"],
+ checksums={"length": a_metadata["length"]},
)
class ArchiveLoader(PackageLoader[ArchivePackageInfo]):
"""Load archive origin's artifact files into swh archive"""
visit_type = "tar"
def __init__(
self,
storage: StorageInterface,
url: str,
artifacts: Sequence[Dict[str, Any]],
extid_manifest_format: Optional[str] = None,
snapshot_append: bool = False,
**kwargs: Any,
):
f"""Loader constructor.
For now, this is the lister's task output.
Args:
url: Origin url
artifacts: List of artifact information with keys:
- **time**: last modification time as either isoformat date
string or timestamp
- **url**: the artifact url to retrieve filename
- **filename**: optionally, the file's name
- **version**: artifact's version
- **length**: artifact's length
extid_manifest_format: template string used to format a manifest,
which is hashed to get the extid of a package.
Defaults to {ArchivePackageInfo.MANIFEST_FORMAT!r}
snapshot_append: if :const:`True`, append latest snapshot content to
the new snapshot created by the loader
"""
super().__init__(storage=storage, url=url, **kwargs)
self.artifacts = artifacts # assume order is enforced in the lister
self.extid_manifest_format = (
None
if extid_manifest_format is None
else string.Template(extid_manifest_format)
)
self.snapshot_append = snapshot_append
def get_versions(self) -> Sequence[str]:
versions = []
for archive in self.artifacts:
v = archive.get("version")
if v:
versions.append(v)
return versions
def get_default_version(self) -> str:
# It's the most recent, so for this loader, it's the last one
return self.artifacts[-1]["version"]
def get_package_info(
self, version: str
) -> Iterator[Tuple[str, ArchivePackageInfo]]:
for a_metadata in self.artifacts:
p_info = ArchivePackageInfo.from_metadata(a_metadata)
if version == p_info.version:
# FIXME: this code assumes we have only 1 artifact per
# versioned package
yield release_name(version), p_info
def new_packageinfo_to_extid(
self, p_info: ArchivePackageInfo
) -> Optional[PartialExtID]:
return p_info.extid(manifest_format=self.extid_manifest_format)
def build_release(
self, p_info: ArchivePackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
time = p_info.time # assume it's a timestamp
if isinstance(time, str): # otherwise, assume it's a parsable date
parsed_time = iso8601.parse_date(time)
else:
parsed_time = time
normalized_time = (
TimestampWithTimezone.from_datetime(parsed_time)
if parsed_time is not None
else None
)
msg = f"Synthetic release for archive at {p_info.url}\n"
return Release(
name=p_info.version.encode(),
message=msg.encode(),
date=normalized_time,
author=EMPTY_AUTHOR,
target=directory,
target_type=ObjectType.DIRECTORY,
synthetic=True,
)
def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]:
if not self.snapshot_append:
return {}
last_snapshot = self.last_snapshot()
return last_snapshot.to_dict()["branches"] if last_snapshot else {}
diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py
index a000d3d..1aaefae 100644
--- a/swh/loader/package/archive/tests/test_archive.py
+++ b/swh/loader/package/archive/tests/test_archive.py
@@ -1,625 +1,625 @@
# Copyright (C) 2019-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import copy
import datetime
import hashlib
from io import BytesIO
from pathlib import Path
import string
import attr
import pytest
from requests.exceptions import ContentDecodingError
from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes, hash_to_hex
from swh.model.model import (
ObjectType,
Person,
Release,
Snapshot,
SnapshotBranch,
TargetType,
TimestampWithTimezone,
)
URL = "https://ftp.gnu.org/gnu/8sync/"
GNU_ARTIFACTS = [
{
"time": 944729610,
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz",
"length": 221837,
"filename": "8sync-0.1.0.tar.gz",
"version": "0.1.0",
},
{
"time": 1480991830,
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
"length": 238466,
"filename": "8sync-0.2.0.tar.gz",
"version": "0.2.0",
},
]
_expected_new_contents_first_visit = [
"e9258d81faf5881a2f96a77ba609396f82cb97ad",
"1170cf105b04b7e2822a0e09d2acf71da7b9a130",
"fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac",
"0057bec9b5422aff9256af240b177ac0e3ac2608",
"2b8d0d0b43a1078fc708930c8ddc2956a86c566e",
"27de3b3bc6545d2a797aeeb4657c0e215a0c2e55",
"2e6db43f5cd764e677f416ff0d0c78c7a82ef19b",
"ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62",
"edeb33282b2bffa0e608e9d2fd960fd08093c0ea",
"d64e64d4c73679323f8d4cde2643331ba6c20af9",
"7a756602914be889c0a2d3952c710144b3e64cb0",
"84fb589b554fcb7f32b806951dcf19518d67b08f",
"8624bcdae55baeef00cd11d5dfcfa60f68710a02",
"e08441aeab02704cfbd435d6445f7c072f8f524e",
"f67935bc3a83a67259cda4b2d43373bd56703844",
"809788434b433eb2e3cfabd5d591c9a659d5e3d8",
"7d7c6c8c5ebaeff879f61f37083a3854184f6c41",
"b99fec102eb24bffd53ab61fc30d59e810f116a2",
"7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68",
"f0c97052e567948adf03e641301e9983c478ccff",
"7fb724242e2b62b85ca64190c31dcae5303e19b3",
"4f9709e64a9134fe8aefb36fd827b84d8b617ab5",
"7350628ccf194c2c3afba4ac588c33e3f3ac778d",
"0bb892d9391aa706dc2c3b1906567df43cbe06a2",
"49d4c0ce1a16601f1e265d446b6c5ea6b512f27c",
"6b5cc594ac466351450f7f64a0b79fdaf4435ad3",
"3046e5d1f70297e2a507b98224b6222c9688d610",
"1572607d456d7f633bc6065a2b3048496d679a31",
]
_expected_new_directories_first_visit = [
"daabc65ec75d487b1335ffc101c0ac11c803f8fc",
"263be23b4a8101d3ad0d9831319a3e0f2b065f36",
"7f6e63ba6eb3e2236f65892cd822041f1a01dd5c",
"4db0a3ecbc976083e2dac01a62f93729698429a3",
"dfef1c80e1098dd5deda664bb44a9ab1f738af13",
"eca971d346ea54d95a6e19d5051f900237fafdaa",
"3aebc29ed1fccc4a6f2f2010fb8e57882406b528",
]
_expected_new_releases_first_visit = {
"c92b2ad9e70ef1dce455e8fe1d8e41b92512cc08": (
"3aebc29ed1fccc4a6f2f2010fb8e57882406b528"
)
}
@pytest.fixture(autouse=True, scope="function")
def lower_sample_rate(mocker):
"""Lower the number of entries per discovery sample so the minimum threshold
for discovery is hit in tests without creating huge test data"""
mocker.patch("swh.loader.package.loader.discovery.SAMPLE_SIZE", 1)
def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir):
url = URL
unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz"
loader = ArchiveLoader(
swh_storage,
url,
artifacts=[
{
"time": 944729610,
"url": unknown_artifact_url, # unknown artifact
"length": 221837,
"filename": "8sync-0.1.0.tar.gz",
"version": "0.1.0",
}
],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
stats = get_stats(swh_storage)
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert_last_visit_matches(swh_storage, url, status="partial", type="tar")
def test_archive_visit_with_skipped_content(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project and set the max content size
to something low to check that the loader skips "big" content."""
loader = ArchiveLoader(
swh_storage, URL, artifacts=GNU_ARTIFACTS[:1], max_content_size=10 * 1024
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_snapshot_first_visit_id = hash_to_bytes(
"9efecc835e8f99254934f256b5301b94f348fd17"
)
assert actual_load_status["snapshot_id"] == hash_to_hex(
expected_snapshot_first_visit_id
)
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
_expected_new_non_skipped_contents_first_visit = [
"ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62",
"809788434b433eb2e3cfabd5d591c9a659d5e3d8",
"1572607d456d7f633bc6065a2b3048496d679a31",
"27de3b3bc6545d2a797aeeb4657c0e215a0c2e55",
"fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac",
"4f9709e64a9134fe8aefb36fd827b84d8b617ab5",
"84fb589b554fcb7f32b806951dcf19518d67b08f",
"3046e5d1f70297e2a507b98224b6222c9688d610",
"e08441aeab02704cfbd435d6445f7c072f8f524e",
"49d4c0ce1a16601f1e265d446b6c5ea6b512f27c",
"7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68",
"f0c97052e567948adf03e641301e9983c478ccff",
"2e6db43f5cd764e677f416ff0d0c78c7a82ef19b",
"e9258d81faf5881a2f96a77ba609396f82cb97ad",
"7350628ccf194c2c3afba4ac588c33e3f3ac778d",
"0057bec9b5422aff9256af240b177ac0e3ac2608",
"6b5cc594ac466351450f7f64a0b79fdaf4435ad3",
]
_expected_new_skipped_contents_first_visit = [
"1170cf105b04b7e2822a0e09d2acf71da7b9a130",
"2b8d0d0b43a1078fc708930c8ddc2956a86c566e",
"edeb33282b2bffa0e608e9d2fd960fd08093c0ea",
"d64e64d4c73679323f8d4cde2643331ba6c20af9",
"7a756602914be889c0a2d3952c710144b3e64cb0",
"8624bcdae55baeef00cd11d5dfcfa60f68710a02",
"f67935bc3a83a67259cda4b2d43373bd56703844",
"7d7c6c8c5ebaeff879f61f37083a3854184f6c41",
"b99fec102eb24bffd53ab61fc30d59e810f116a2",
"7fb724242e2b62b85ca64190c31dcae5303e19b3",
"0bb892d9391aa706dc2c3b1906567df43cbe06a2",
]
# Check that the union of both sets make up the original set (without skipping)
union = set(_expected_new_non_skipped_contents_first_visit) | set(
_expected_new_skipped_contents_first_visit
)
assert union == set(_expected_new_contents_first_visit)
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_non_skipped_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": len(_expected_new_releases_first_visit),
"revision": 0,
"skipped_content": len(_expected_new_skipped_contents_first_visit),
"snapshot": 1,
} == stats
release_id = hash_to_bytes(list(_expected_new_releases_first_visit)[0])
expected_snapshot = Snapshot(
id=expected_snapshot_first_visit_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS,
target=b"releases/0.1.0",
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.RELEASE,
target=release_id,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert swh_storage.release_get([release_id])[0] == Release(
id=release_id,
name=b"0.1.0",
message=(
b"Synthetic release for archive at "
b"https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz\n"
),
target=hash_to_bytes("3aebc29ed1fccc4a6f2f2010fb8e57882406b528"),
target_type=ObjectType.DIRECTORY,
synthetic=True,
author=Person.from_fullname(b""),
date=TimestampWithTimezone.from_datetime(
datetime.datetime(1999, 12, 9, 8, 53, 30, tzinfo=datetime.timezone.utc)
),
)
expected_contents = map(
hash_to_bytes, _expected_new_non_skipped_contents_first_visit
)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
assert list(swh_storage.directory_missing(expected_dirs)) == []
expected_rels = map(hash_to_bytes, _expected_new_releases_first_visit)
assert list(swh_storage.release_missing(expected_rels)) == []
def test_archive_visit_with_release_artifact_no_prior_visit(
swh_storage, requests_mock_datadir
):
"""With no prior visit, load a gnu project ends up with 1 snapshot"""
loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS[:1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_snapshot_first_visit_id = hash_to_bytes(
"9efecc835e8f99254934f256b5301b94f348fd17"
)
assert actual_load_status["snapshot_id"] == hash_to_hex(
expected_snapshot_first_visit_id
)
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": len(_expected_new_releases_first_visit),
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
release_id = hash_to_bytes(list(_expected_new_releases_first_visit)[0])
expected_snapshot = Snapshot(
id=expected_snapshot_first_visit_id,
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS,
target=b"releases/0.1.0",
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.RELEASE,
target=release_id,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert swh_storage.release_get([release_id])[0] == Release(
id=release_id,
name=b"0.1.0",
message=(
b"Synthetic release for archive at "
b"https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz\n"
),
target=hash_to_bytes("3aebc29ed1fccc4a6f2f2010fb8e57882406b528"),
target_type=ObjectType.DIRECTORY,
synthetic=True,
author=Person.from_fullname(b""),
date=TimestampWithTimezone.from_datetime(
datetime.datetime(1999, 12, 9, 8, 53, 30, tzinfo=datetime.timezone.utc)
),
)
expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit)
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
assert list(swh_storage.directory_missing(expected_dirs)) == []
expected_rels = map(hash_to_bytes, _expected_new_releases_first_visit)
assert list(swh_storage.release_missing(expected_rels)) == []
def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project ends up with 1 snapshot"""
url = URL
loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS[:1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
assert actual_load_status2["snapshot_id"] is not None
assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir):
"""With no prior visit, load a gnu project ends up with 1 snapshot"""
url = URL
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, url, [artifact1])
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
stats = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit),
"directory": len(_expected_new_directories_first_visit),
"origin": 1,
"origin_visit": 1,
"release": len(_expected_new_releases_first_visit),
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
artifact2 = GNU_ARTIFACTS[1]
loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2])
stats2 = get_stats(swh_storage)
assert stats == stats2 # ensure we share the storage
actual_load_status2 = loader2.load()
assert actual_load_status2["status"] == "eventful"
assert actual_load_status2["snapshot_id"] is not None
stats2 = get_stats(swh_storage)
assert {
"content": len(_expected_new_contents_first_visit) + 14,
"directory": len(_expected_new_directories_first_visit) + 8,
"origin": 1,
"origin_visit": 1 + 1,
"release": len(_expected_new_releases_first_visit) + 1,
"revision": 0,
"skipped_content": 0,
"snapshot": 1 + 1,
} == stats2
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
# 1 artifact (2nd time no modification) + 1 new artifact
assert len(urls) == 2
def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir):
"""Load a project archive (not gnu) ends up with 1 snapshot"""
url = "https://something.else.org/8sync/"
artifacts = [ # this is not a gnu artifact
{
"time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp
"sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa
# keep a gnu artifact reference to avoid adding other test files
"url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz",
"length": 238466,
"filename": "8sync-0.2.0.tar.gz",
"version": "0.2.0",
}
]
# Here the loader defines the id_keys to use for existence in the snapshot
# It's not the default archive loader which
loader = ArchiveLoader(
swh_storage,
url,
artifacts=artifacts,
extid_manifest_format="$sha256 $length $url",
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"]
assert_last_visit_matches(swh_storage, url, status="full", type="tar")
urls = [
m.url
for m in requests_mock_datadir.request_history
if m.url.startswith("https://ftp.gnu.org")
]
assert len(urls) == 1
def test_archive_extid():
"""Compute primary key should return the right identity"""
@attr.s
class TestPackageInfo(ArchivePackageInfo):
a = attr.ib()
b = attr.ib()
metadata = GNU_ARTIFACTS[0]
p_info = TestPackageInfo(
raw_info={**metadata, "a": 1, "b": 2},
a=1,
b=2,
**metadata,
)
for manifest_format, expected_manifest in [
(string.Template("$a $b"), b"1 2"),
(string.Template(""), b""),
(None, "{time} {length} {version} {url}".format(**metadata).encode()),
]:
actual_id = p_info.extid(manifest_format=manifest_format)
assert actual_id == (
"package-manifest-sha256",
0,
hashlib.sha256(expected_manifest).digest(),
)
with pytest.raises(KeyError):
p_info.extid(manifest_format=string.Template("$a $unknown_key"))
def test_archive_snapshot_append(swh_storage, requests_mock_datadir):
# first loading with a first artifact
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
branch_artifact1_name = f"releases/{artifact1['version']}".encode()
assert b"HEAD" in snapshot.branches
assert branch_artifact1_name in snapshot.branches
assert snapshot.branches[b"HEAD"].target == branch_artifact1_name
# second loading with a second artifact
artifact2 = GNU_ARTIFACTS[1]
loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot, should contain a new branch and the
# branch for the first artifact
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 3
branch_artifact2_name = f"releases/{artifact2['version']}".encode()
assert b"HEAD" in snapshot.branches
assert branch_artifact2_name in snapshot.branches
assert branch_artifact1_name in snapshot.branches
assert snapshot.branches[b"HEAD"].target == branch_artifact2_name
def test_archive_snapshot_append_branch_override(swh_storage, requests_mock_datadir):
# first loading for a first artifact
artifact1 = GNU_ARTIFACTS[0]
loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
branch_artifact1_name = f"releases/{artifact1['version']}".encode()
assert branch_artifact1_name in snapshot.branches
branch_target_first_visit = snapshot.branches[branch_artifact1_name].target
# second loading for a second artifact with same version as the first one
# but with different tarball content
artifact2 = dict(GNU_ARTIFACTS[0])
artifact2["url"] = GNU_ARTIFACTS[1]["url"]
artifact2["time"] = GNU_ARTIFACTS[1]["time"]
artifact2["length"] = GNU_ARTIFACTS[1]["length"]
loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
# check expected snapshot, should contain the same branch as previously
# but with different target
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
assert branch_artifact1_name in snapshot.branches
branch_target_second_visit = snapshot.branches[branch_artifact1_name].target
assert branch_target_first_visit != branch_target_second_visit
@pytest.fixture
def not_gzipped_tarball_bytes(datadir):
return Path(datadir, "not_gzipped_tarball.tar.gz").read_bytes()
def test_archive_not_gzipped_tarball(
swh_storage, requests_mock, not_gzipped_tarball_bytes
):
"""Check that a tarball erroneously marked as gzip compressed can still
be downloaded and processed.
"""
filename = "not_gzipped_tarball.tar.gz"
url = f"https://example.org/ftp/{filename}"
requests_mock.get(
url,
[
{
"exc": ContentDecodingError,
},
{
"body": BytesIO(not_gzipped_tarball_bytes),
},
],
)
loader = ArchiveLoader(
swh_storage,
url,
artifacts=[
{
"time": 944729610,
"url": url,
- "length": 221837,
+ "length": 778240,
"filename": filename,
"version": "0.1.0",
}
],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] is not None
snapshot = loader.last_snapshot()
assert len(snapshot.branches) == 2
assert b"releases/0.1.0" in snapshot.branches
def test_archive_visit_no_time_for_tarball(swh_storage, requests_mock_datadir):
artifacts = copy.deepcopy(GNU_ARTIFACTS)
for artifact in artifacts:
artifact["time"] = None
loader = ArchiveLoader(swh_storage, URL, artifacts=artifacts)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert_last_visit_matches(swh_storage, URL, status="full", type="tar")
diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py
index b7383d6..93997fe 100644
--- a/swh/loader/package/cran/loader.py
+++ b/swh/loader/package/cran/loader.py
@@ -1,175 +1,176 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from datetime import timezone
import logging
import os
from os import path
import re
import string
from typing import Any, Dict, Iterator, List, Optional, Tuple
import attr
import dateutil.parser
from debian.deb822 import Deb822
from swh.loader.package.loader import BasePackageInfo, PackageLoader
from swh.loader.package.utils import release_name
from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone
from swh.storage.interface import StorageInterface
logger = logging.getLogger(__name__)
DATE_PATTERN = re.compile(r"^(?P<year>\d{4})-(?P<month>\d{2})$")
@attr.s
class CRANPackageInfo(BasePackageInfo):
raw_info = attr.ib(type=Dict[str, Any])
name = attr.ib(type=str)
EXTID_TYPE = "cran-sha256"
MANIFEST_FORMAT = string.Template("$version $url")
@classmethod
def from_metadata(cls, a_metadata: Dict[str, Any]) -> "CRANPackageInfo":
url = a_metadata["url"]
return CRANPackageInfo(
url=url,
filename=path.basename(url),
raw_info=a_metadata,
name=a_metadata["package"],
version=a_metadata["version"],
+ checksums=a_metadata.get("checksums", {}),
)
class CRANLoader(PackageLoader[CRANPackageInfo]):
visit_type = "cran"
def __init__(
self, storage: StorageInterface, url: str, artifacts: List[Dict], **kwargs: Any
):
"""Loader constructor.
Args:
url: Origin url to retrieve cran artifact(s) from
artifacts: List of associated artifact for the origin url
"""
super().__init__(storage=storage, url=url, **kwargs)
# explicit what we consider the artifact identity
self.artifacts = artifacts
def get_versions(self) -> List[str]:
versions = []
for artifact in self.artifacts:
versions.append(artifact["version"])
return versions
def get_default_version(self) -> str:
return self.artifacts[-1]["version"]
def get_package_info(self, version: str) -> Iterator[Tuple[str, CRANPackageInfo]]:
for a_metadata in self.artifacts:
p_info = CRANPackageInfo.from_metadata(a_metadata)
if version == p_info.version:
yield release_name(version), p_info
def build_release(
self, p_info: CRANPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
# a_metadata is empty
metadata = extract_intrinsic_metadata(uncompressed_path)
date = parse_date(metadata.get("Date"))
author = Person.from_fullname(metadata.get("Maintainer", "").encode())
msg = (
f"Synthetic release for CRAN source package {p_info.name} "
f"version {p_info.version}\n"
)
return Release(
name=p_info.version.encode(),
message=msg.encode(),
date=date,
author=author,
target_type=ObjectType.DIRECTORY,
target=directory,
synthetic=True,
)
def parse_debian_control(filepath: str) -> Dict[str, Any]:
"""Parse debian control at filepath"""
metadata: Dict = {}
logger.debug("Debian control file %s", filepath)
for paragraph in Deb822.iter_paragraphs(open(filepath, "rb")):
logger.debug("paragraph: %s", paragraph)
metadata.update(**paragraph)
logger.debug("metadata parsed: %s", metadata)
return metadata
def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]:
"""Given an uncompressed path holding the DESCRIPTION file, returns a
DESCRIPTION parsed structure as a dict.
Cran origins describes their intrinsic metadata within a DESCRIPTION file
at the root tree of a tarball. This DESCRIPTION uses a simple file format
called DCF, the Debian control format.
The release artifact contains at their root one folder. For example:
$ tar tvf zprint-0.0.6.tar.gz
drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/
...
Args:
dir_path (str): Path to the uncompressed directory
representing a release artifact from pypi.
Returns:
the DESCRIPTION parsed structure as a dict (or empty dict if missing)
"""
# Retrieve the root folder of the archive
if not os.path.exists(dir_path):
return {}
lst = os.listdir(dir_path)
if len(lst) != 1:
return {}
project_dirname = lst[0]
description_path = os.path.join(dir_path, project_dirname, "DESCRIPTION")
if not os.path.exists(description_path):
return {}
return parse_debian_control(description_path)
def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]:
"""Parse a date into a datetime"""
assert not date or isinstance(date, str)
dt: Optional[datetime.datetime] = None
if not date:
return None
try:
specific_date = DATE_PATTERN.match(date)
if specific_date:
year = int(specific_date.group("year"))
month = int(specific_date.group("month"))
dt = datetime.datetime(year, month, 1)
else:
dt = dateutil.parser.parse(date)
if not dt.tzinfo:
# up for discussion the timezone needs to be set or
# normalize_timestamp is not happy: ValueError: normalize_timestamp
# received datetime without timezone: 2001-06-08 00:00:00
dt = dt.replace(tzinfo=timezone.utc)
except Exception as e:
logger.warning("Fail to parse date %s. Reason: %s", date, e)
if dt:
return TimestampWithTimezone.from_datetime(dt)
else:
return None
diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py
index 4d5add9..4af88a0 100644
--- a/swh/loader/package/loader.py
+++ b/swh/loader/package/loader.py
@@ -1,1123 +1,1137 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import asyncio
import datetime
import hashlib
from itertools import islice
import json
import logging
import os
import string
import sys
import tempfile
from typing import (
Any,
Dict,
Generic,
Iterator,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
TypeVar,
)
import attr
from requests.exceptions import ContentDecodingError
import sentry_sdk
from swh.core.tarball import uncompress
from swh.loader.core import discovery
from swh.loader.core.loader import BaseLoader
from swh.loader.exception import NotFound
from swh.loader.package.utils import download
from swh.model import from_disk
from swh.model.hashutil import hash_to_hex
from swh.model.model import (
ExtID,
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
)
from swh.model.model import (
Origin,
OriginVisit,
OriginVisitStatus,
RawExtrinsicMetadata,
Release,
Revision,
Sha1Git,
Snapshot,
)
from swh.model.model import ObjectType as ModelObjectType
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.algos.snapshot import snapshot_get_latest
from swh.storage.interface import StorageInterface
from swh.storage.utils import now
logger = logging.getLogger(__name__)
SWH_METADATA_AUTHORITY = MetadataAuthority(
type=MetadataAuthorityType.REGISTRY,
url="https://softwareheritage.org/",
metadata={},
)
"""Metadata authority for extrinsic metadata generated by Software Heritage.
Used for metadata on "original artifacts", ie. length, filename, and checksums
of downloaded archive files."""
PartialExtID = Tuple[str, int, bytes]
"""The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object."""
@attr.s
class RawExtrinsicMetadataCore:
"""Contains the core of the metadata extracted by a loader, that will be
used to build a full RawExtrinsicMetadata object by adding object identifier,
context, and provenance information."""
format = attr.ib(type=str)
metadata = attr.ib(type=bytes)
discovery_date = attr.ib(type=Optional[datetime.datetime], default=None)
"""Defaults to the visit date."""
@attr.s
class BasePackageInfo:
"""Compute the primary key for a dict using the id_keys as primary key
composite.
Args:
d: A dict entry to compute the primary key on
id_keys: Sequence of keys to use as primary key
Returns:
The identity for that dict entry
"""
url = attr.ib(type=str)
filename = attr.ib(type=Optional[str])
version = attr.ib(type=str)
"""Version name/number."""
MANIFEST_FORMAT: Optional[string.Template] = None
"""If not None, used by the default extid() implementation to format a manifest,
before hashing it to produce an ExtID."""
EXTID_TYPE: str = "package-manifest-sha256"
EXTID_VERSION: int = 0
# The following attribute has kw_only=True in order to allow subclasses
# to add attributes. Without kw_only, attributes without default values cannot
# go after attributes with default values.
# See <https://github.com/python-attrs/attrs/issues/38>
directory_extrinsic_metadata = attr.ib(
type=List[RawExtrinsicMetadataCore],
default=[],
kw_only=True,
)
""":term:`extrinsic metadata` collected by the loader, that will be attached to the
loaded directory and added to the Metadata storage."""
+ checksums = attr.ib(type=Dict[str, str], default={}, kw_only=True)
+ """Dictionary holding package tarball checksums for integrity check after
+ download, keys are hash algorithm names and values are checksums in
+ hexadecimal format. The supported algorithms are defined in the
+ :data:`swh.model.hashutil.ALGORITHMS` set."""
+
# TODO: add support for metadata for releases and contents
def extid(self) -> Optional[PartialExtID]:
"""Returns a unique intrinsic identifier of this package info,
or None if this package info is not 'deduplicatable' (meaning that
we will always load it, instead of checking the ExtID storage
to see if we already did)"""
if self.MANIFEST_FORMAT is None:
return None
else:
manifest = self.MANIFEST_FORMAT.substitute(
{k: str(v) for (k, v) in attr.asdict(self).items()}
)
return (
self.EXTID_TYPE,
self.EXTID_VERSION,
hashlib.sha256(manifest.encode()).digest(),
)
TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo)
class PackageLoader(BaseLoader, Generic[TPackageInfo]):
def __init__(self, storage: StorageInterface, url: str, **kwargs: Any):
"""Loader's constructor. This raises exception if the minimal required
configuration is missing (cf. fn:`check` method).
Args:
storage: Storage instance
url: Origin url to load data from
"""
super().__init__(storage=storage, origin_url=url, **kwargs)
self.status_load = ""
self.status_visit = ""
def load_status(self) -> Dict[str, str]:
"""Detailed loading status."""
return {
"status": self.status_load,
}
def visit_status(self) -> str:
"""Detailed visit status."""
return self.status_visit
def get_versions(self) -> Sequence[str]:
"""Return the list of all published package versions.
Raises:
class:`swh.loader.exception.NotFound` error when failing to read the
published package versions.
Returns:
Sequence of published versions
"""
return []
def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]:
"""Given a release version of a package, retrieve the associated
package information for such version.
Args:
version: Package version
Returns:
(branch name, package metadata)
"""
yield from {}
def build_release(
self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
"""Build the release from the archive metadata (extrinsic
artifact metadata) and the intrinsic metadata.
Args:
p_info: Package information
uncompressed_path: Artifact uncompressed path on disk
"""
raise NotImplementedError("build_release")
def get_default_version(self) -> str:
"""Retrieve the latest release version if any.
Returns:
Latest version
"""
return ""
def last_snapshot(self) -> Optional[Snapshot]:
"""Retrieve the last snapshot out of the last visit."""
return snapshot_get_latest(self.storage, self.origin.url)
def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]:
return p_info.extid()
def _get_known_extids(
self, packages_info: List[TPackageInfo]
) -> Dict[PartialExtID, List[CoreSWHID]]:
"""Compute the ExtIDs from new PackageInfo objects, searches which are already
loaded in the archive, and returns them if any."""
# Compute the ExtIDs of all the new packages, grouped by extid type
new_extids: Dict[Tuple[str, int], List[bytes]] = {}
for p_info in packages_info:
res = p_info.extid()
if res is not None:
(extid_type, extid_version, extid_extid) = res
new_extids.setdefault((extid_type, extid_version), []).append(
extid_extid
)
# For each extid type, call extid_get_from_extid() with all the extids of
# that type, and store them in the '(type, extid) -> target' map.
known_extids: Dict[PartialExtID, List[CoreSWHID]] = {}
for ((extid_type, extid_version), extids) in new_extids.items():
for extid in self.storage.extid_get_from_extid(
extid_type, extids, version=extid_version
):
if extid is not None:
key = (extid.extid_type, extid_version, extid.extid)
known_extids.setdefault(key, []).append(extid.target)
return known_extids
def resolve_object_from_extids(
self,
known_extids: Dict[PartialExtID, List[CoreSWHID]],
p_info: TPackageInfo,
whitelist: Set[Sha1Git],
) -> Optional[CoreSWHID]:
"""Resolve the revision/release from known ExtIDs and a package info object.
If the artifact has already been downloaded, this will return the
existing release (or revision) targeting that uncompressed artifact directory.
Otherwise, this returns None.
Args:
known_extids: Dict built from a list of ExtID, with the target as value
p_info: Package information
whitelist: Any ExtID with target not in this set is filtered out
Returns:
None or release/revision SWHID
"""
new_extid = p_info.extid()
if new_extid is None:
return None
extid_targets = set()
for extid_target in known_extids.get(new_extid, []):
if extid_target.object_id not in whitelist:
# There is a known ExtID for this package, but its target is not
# in the snapshot.
# This can happen for three reasons:
#
# 1. a loader crashed after writing the ExtID, but before writing
# the snapshot
# 2. some other loader loaded the same artifact, but produced
# a different revision, causing an additional ExtID object
# to be written. We will probably find this loader's ExtID
# in a future iteration of this loop.
# Note that for now, this is impossible, as each loader has a
# completely different extid_type, but this is an implementation
# detail of each loader.
# 3. we took a snapshot, then the package disappeared,
# then we took another snapshot, and the package reappeared
#
# In case of 1, we must actually load the package now,
# so let's do it.
# TODO: detect when we are in case 3 using release_missing
# or revision_missing instead of the snapshot.
continue
elif extid_target.object_type in (ObjectType.RELEASE, ObjectType.REVISION):
extid_targets.add(extid_target)
else:
# Note that this case should never be reached unless there is a
# collision between a revision hash and some non-revision object's
# hash, but better safe than sorry.
logger.warning(
"%s is in the whitelist, but is not a revision/release.",
hash_to_hex(extid_target.object_type),
)
if extid_targets:
# This is a known package version, as we have an extid to reference it.
# Let's return one of them.
# If there is a release extid, return it.
release_extid_targets = {
extid_target
for extid_target in extid_targets
if extid_target.object_type == ObjectType.RELEASE
}
# Exclude missing targets
missing_releases = {
CoreSWHID(object_type=ObjectType.RELEASE, object_id=id_)
for id_ in self.storage.release_missing(
[swhid.object_id for swhid in release_extid_targets]
)
}
if missing_releases:
err_message = "Found ExtIDs pointing to missing releases"
logger.error(err_message + ": %s", missing_releases)
with sentry_sdk.push_scope() as scope:
scope.set_extra(
"missing_releases", [str(x) for x in missing_releases]
)
sentry_sdk.capture_message(err_message, "error")
release_extid_targets -= missing_releases
extid_target2 = self.select_extid_target(p_info, release_extid_targets)
if extid_target2:
return extid_target2
# If there is no release extid (ie. if the package was only loaded with
# older versions of this loader, which produced revision objects instead
# of releases), return a revision extid when possible.
revision_extid_targets = {
extid_target
for extid_target in extid_targets
if extid_target.object_type == ObjectType.REVISION
}
if revision_extid_targets:
assert len(extid_targets) == 1, extid_targets
extid_target = list(extid_targets)[0]
return extid_target
# No target found (this is probably a new package version)
return None
def select_extid_target(
self, p_info: TPackageInfo, extid_targets: Set[CoreSWHID]
) -> Optional[CoreSWHID]:
"""Given a list of release extid targets, choses one appropriate for the
given package info.
Package loaders shyould implement this if their ExtIDs may map to multiple
releases, so they can fetch releases from the storage and inspect their fields
to select the right one for this ``p_info``.
"""
if extid_targets:
# The base package loader does not have the domain-specific knowledge
# to select the right release -> crash if there is more than one.
assert len(extid_targets) == 1, extid_targets
return list(extid_targets)[0]
return None
def download_package(
self, p_info: TPackageInfo, tmpdir: str
) -> List[Tuple[str, Mapping]]:
"""Download artifacts for a specific package. All downloads happen in
in the tmpdir folder.
Default implementation expects the artifacts package info to be
about one artifact per package.
Note that most implementation have 1 artifact per package. But some
implementation have multiple artifacts per package (debian), some have
none, the package is the artifact (gnu).
Args:
artifacts_package_info: Information on the package artifacts to
download (url, filename, etc...)
tmpdir: Location to retrieve such artifacts
Returns:
List of (path, computed hashes)
"""
try:
- return [download(p_info.url, dest=tmpdir, filename=p_info.filename)]
+ return [
+ download(
+ p_info.url,
+ dest=tmpdir,
+ filename=p_info.filename,
+ hashes=p_info.checksums,
+ )
+ ]
except ContentDecodingError:
# package might be erroneously marked as gzip compressed while is is not,
# try to download its raw bytes again without attempting to uncompress
# the input stream
return [
download(
p_info.url,
dest=tmpdir,
filename=p_info.filename,
+ hashes=p_info.checksums,
extra_request_headers={"Accept-Encoding": "identity"},
)
]
def uncompress(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str
) -> str:
"""Uncompress the artifact(s) in the destination folder dest.
Optionally, this could need to use the p_info dict for some more
information (debian).
"""
uncompressed_path = os.path.join(dest, "src")
for a_path, _ in dl_artifacts:
uncompress(a_path, dest=uncompressed_path)
return uncompressed_path
def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]:
"""Return an extra dict of branches that are used to update the set of
branches.
"""
return {}
def finalize_visit(
self,
*,
snapshot: Optional[Snapshot],
visit: OriginVisit,
status_visit: str,
status_load: str,
failed_branches: List[str],
errors: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Finalize the visit:
- flush eventual unflushed data to storage
- update origin visit's status
- return the task's status
"""
self.status_load = status_load
self.status_visit = status_visit
self.storage.flush()
snapshot_id: Optional[bytes] = None
if snapshot and snapshot.id: # to prevent the snapshot.id to b""
snapshot_id = snapshot.id
assert visit.visit
visit_status = OriginVisitStatus(
origin=self.origin.url,
visit=visit.visit,
type=self.visit_type,
date=now(),
status=status_visit,
snapshot=snapshot_id,
)
self.storage.origin_visit_status_add([visit_status])
result: Dict[str, Any] = {
"status": status_load,
}
if snapshot_id:
result["snapshot_id"] = hash_to_hex(snapshot_id)
if failed_branches:
logger.warning("%d failed branches", len(failed_branches))
for i, urls in enumerate(islice(failed_branches, 50)):
prefix_url = "Failed branches: " if i == 0 else ""
logger.warning("%s%s", prefix_url, urls)
return result
def load(self) -> Dict:
"""Load for a specific origin the associated contents.
1. Get the list of versions in an origin.
2. Get the snapshot from the previous run of the loader,
and filter out versions that were already loaded, if their
:term:`extids <extid>` match
Then, for each remaining version in the origin
3. Fetch the files for one package version By default, this can be
implemented as a simple HTTP request. Loaders with more specific
requirements can override this, e.g.: the PyPI loader checks the
integrity of the downloaded files; the Debian loader has to download
and check several files for one package version.
4. Extract the downloaded files. By default, this would be a universal
archive/tarball extraction.
Loaders for specific formats can override this method (for instance,
the Debian loader uses dpkg-source -x).
5. Convert the extracted directory to a set of Software Heritage
objects Using swh.model.from_disk.
6. Extract the metadata from the unpacked directories This would only
be applicable for "smart" loaders like npm (parsing the
package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
debian/changelog and debian/control).
On "minimal-metadata" sources such as the GNU archive, the lister
should provide the minimal set of metadata needed to populate the
revision/release objects (authors, dates) as an argument to the
task.
7. Generate the revision/release objects for the given version. From
the data generated at steps 3 and 4.
end for each
8. Generate and load the snapshot for the visit
Using the revisions/releases collected at step 7., and the branch
information from step 2., generate a snapshot and load it into the
Software Heritage archive
"""
self.status_load = "uneventful" # either: eventful, uneventful, failed
self.status_visit = "full" # see swh.model.model.OriginVisitStatus
snapshot = None
failed_branches: List[str] = []
# Prepare origin and origin_visit
origin = Origin(url=self.origin.url)
try:
self.storage.origin_add([origin])
visit = list(
self.storage.origin_visit_add(
[
OriginVisit(
origin=self.origin.url,
date=self.visit_date,
type=self.visit_type,
)
]
)
)[0]
except Exception as e:
logger.exception(
"Failed to initialize origin_visit for %s", self.origin.url
)
sentry_sdk.capture_exception(e)
self.status_load = self.status_visit = "failed"
return {"status": "failed"}
# Get the previous snapshot for this origin. It is then used to see which
# of the package's versions are already loaded in the archive.
try:
last_snapshot = self.last_snapshot()
logger.debug("last snapshot: %s", last_snapshot)
except Exception as e:
logger.exception("Failed to get previous state for %s", self.origin.url)
sentry_sdk.capture_exception(e)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=[str(e)],
)
load_exceptions: List[Exception] = []
# Get the list of all version names
try:
versions = self.get_versions()
except NotFound as e:
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="not_found",
status_load="failed",
errors=[str(e)],
)
except Exception as e:
logger.exception("Failed to get list of versions for %s", self.origin.url)
sentry_sdk.capture_exception(e)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=[str(e)],
)
errors = []
# Get the metadata of each version's package
packages_info: List[Tuple[str, TPackageInfo]] = []
for version in versions:
try:
for branch_name, p_info in self.get_package_info(version):
packages_info.append((branch_name, p_info))
except Exception as e:
load_exceptions.append(e)
sentry_sdk.capture_exception(e)
error = f"Failed to get package info for version {version} of {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
# Compute the ExtID of each of these packages
known_extids = self._get_known_extids([p_info for (_, p_info) in packages_info])
if last_snapshot is None:
last_snapshot_targets: Set[Sha1Git] = set()
else:
last_snapshot_targets = {
branch.target for branch in last_snapshot.branches.values()
}
new_extids: Set[ExtID] = set()
tmp_releases: Dict[str, List[Tuple[str, Sha1Git]]] = {
version: [] for version in versions
}
for (branch_name, p_info) in packages_info:
logger.debug("package_info: %s", p_info)
# Check if the package was already loaded, using its ExtID
swhid = self.resolve_object_from_extids(
known_extids, p_info, last_snapshot_targets
)
if swhid is not None and swhid.object_type == ObjectType.REVISION:
# This package was already loaded, but by an older version
# of this loader, which produced revisions instead of releases.
# Let's fetch the revision's data, and "upgrade" it into a release.
(rev,) = self.storage.revision_get([swhid.object_id])
if not rev:
logger.error(
"Failed to upgrade branch %s from revision to "
"release, %s is missing from the storage. "
"Falling back to re-loading from the origin.",
branch_name,
swhid,
)
else:
rev = None
if swhid is None or (swhid.object_type == ObjectType.REVISION and not rev):
# No matching revision or release found in the last snapshot, load it.
release_id = None
try:
res = self._load_release(p_info, origin)
if res:
(release_id, directory_id) = res
assert release_id
assert directory_id
self._load_extrinsic_directory_metadata(
p_info, release_id, directory_id
)
self.storage.flush()
self.status_load = "eventful"
except Exception as e:
self.storage.clear_buffers()
load_exceptions.append(e)
sentry_sdk.capture_exception(e)
error = f"Failed to load branch {branch_name} for {self.origin.url}"
logger.exception(error)
failed_branches.append(branch_name)
errors.append(f"{error}: {e}")
continue
if release_id is None:
continue
add_extid = True
elif swhid.object_type == ObjectType.REVISION:
# If 'rev' was None, the previous block would have run.
assert rev is not None
rel = rev2rel(rev, p_info.version)
self.storage.release_add([rel])
logger.debug("Upgraded %s to %s", swhid, rel.swhid())
release_id = rel.id
# Create a new extid for this package, so the next run of this loader
# will be able to find the new release, and use it (instead of the
# old revision)
add_extid = True
elif swhid.object_type == ObjectType.RELEASE:
# This package was already loaded, nothing to do.
release_id = swhid.object_id
add_extid = False
else:
assert False, f"Unexpected object type: {swhid}"
assert release_id is not None
if add_extid:
partial_extid = p_info.extid()
if partial_extid is not None:
(extid_type, extid_version, extid) = partial_extid
release_swhid = CoreSWHID(
object_type=ObjectType.RELEASE, object_id=release_id
)
new_extids.add(
ExtID(
extid_type=extid_type,
extid_version=extid_version,
extid=extid,
target=release_swhid,
)
)
tmp_releases[p_info.version].append((branch_name, release_id))
if load_exceptions:
self.status_visit = "partial"
if not tmp_releases:
# We could not load any releases; fail completely
logger.error("Failed to load any release for %s", self.origin.url)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=errors,
)
try:
# Retrieve the default release version (the "latest" one)
default_version = self.get_default_version()
logger.debug("default version: %s", default_version)
# Retrieve extra branches
extra_branches = self.extra_branches()
logger.debug("extra branches: %s", extra_branches)
snapshot = self._load_snapshot(
default_version, tmp_releases, extra_branches
)
self.storage.flush()
except Exception as e:
error = f"Failed to build snapshot for origin {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "failed"
self.status_load = "failed"
if snapshot:
try:
metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id)
self.load_metadata_objects(metadata_objects)
except Exception as e:
error = (
f"Failed to load extrinsic snapshot metadata for {self.origin.url}"
)
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "partial"
self.status_load = "failed"
try:
metadata_objects = self.build_extrinsic_origin_metadata()
self.load_metadata_objects(metadata_objects)
except Exception as e:
error = f"Failed to load extrinsic origin metadata for {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "partial"
self.status_load = "failed"
if self.status_load != "failed":
self._load_extids(new_extids)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit=self.status_visit,
status_load=self.status_load,
errors=errors,
)
def _load_directory(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str
) -> Tuple[str, from_disk.Directory]:
uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir)
logger.debug("uncompressed_path: %s", uncompressed_path)
directory = from_disk.Directory.from_disk(
path=uncompressed_path.encode("utf-8"),
max_content_length=self.max_content_size,
)
contents, skipped_contents, directories = from_disk.iter_directory(directory)
# Instead of sending everything from the bottom up to the storage,
# use a Merkle graph discovery algorithm to filter out known objects.
contents, skipped_contents, directories = asyncio.run(
discovery.filter_known_objects(
discovery.DiscoveryStorageConnection(
contents, skipped_contents, directories, self.storage
),
)
)
logger.debug("Number of skipped contents: %s", len(skipped_contents))
self.storage.skipped_content_add(skipped_contents)
logger.debug("Number of contents: %s", len(contents))
self.storage.content_add(contents)
logger.debug("Number of directories: %s", len(directories))
self.storage.directory_add(directories)
return (uncompressed_path, directory)
def _load_release(
self, p_info: TPackageInfo, origin
) -> Optional[Tuple[Sha1Git, Sha1Git]]:
"""Does all the loading of a release itself:
* downloads a package and uncompresses it
* loads it from disk
* adds contents, directories, and release to self.storage
* returns (release_id, directory_id)
Raises
exception when unable to download or uncompress artifacts
"""
with tempfile.TemporaryDirectory() as tmpdir:
dl_artifacts = self.download_package(p_info, tmpdir)
(uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir)
# FIXME: This should be release. cf. D409
release = self.build_release(
p_info, uncompressed_path, directory=directory.hash
)
if not release:
# Some artifacts are missing intrinsic metadata
# skipping those
return None
metadata = [metadata for (filepath, metadata) in dl_artifacts]
assert release.target is not None, release
assert release.target_type == ModelObjectType.DIRECTORY, release
metadata_target = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=release.target
)
original_artifact_metadata = RawExtrinsicMetadata(
target=metadata_target,
discovery_date=self.visit_date,
authority=SWH_METADATA_AUTHORITY,
fetcher=self.get_metadata_fetcher(),
format="original-artifacts-json",
metadata=json.dumps(metadata).encode(),
origin=self.origin.url,
release=release.swhid(),
)
self.load_metadata_objects([original_artifact_metadata])
logger.debug("Release: %s", release)
self.storage.release_add([release])
assert directory.hash
return (release.id, directory.hash)
def _load_snapshot(
self,
default_version: str,
releases: Dict[str, List[Tuple[str, bytes]]],
extra_branches: Dict[bytes, Mapping[str, Any]],
) -> Optional[Snapshot]:
"""Build snapshot out of the current releases stored and extra branches.
Then load it in the storage.
"""
logger.debug("releases: %s", releases)
# Build and load the snapshot
branches = {} # type: Dict[bytes, Mapping[str, Any]]
for version, branch_name_releases in releases.items():
if version == default_version and len(branch_name_releases) == 1:
# only 1 branch (no ambiguity), we can create an alias
# branch 'HEAD'
branch_name, _ = branch_name_releases[0]
# except for some corner case (deposit)
if branch_name != "HEAD":
branches[b"HEAD"] = {
"target_type": "alias",
"target": branch_name.encode("utf-8"),
}
for branch_name, target in branch_name_releases:
branches[branch_name.encode("utf-8")] = {
"target_type": "release",
"target": target,
}
# Deal with extra-branches
for name, branch_target in extra_branches.items():
if name in branches:
error_message = f"Extra branch '{name!r}' has been ignored"
logger.error(error_message)
sentry_sdk.capture_message(error_message, "error")
else:
branches[name] = branch_target
snapshot_data = {"branches": branches}
logger.debug("snapshot: %s", snapshot_data)
snapshot = Snapshot.from_dict(snapshot_data)
logger.debug("snapshot: %s", snapshot)
self.storage.snapshot_add([snapshot])
return snapshot
def get_loader_name(self) -> str:
"""Returns a fully qualified name of this loader."""
return f"{self.__class__.__module__}.{self.__class__.__name__}"
def get_loader_version(self) -> str:
"""Returns the version of the current loader."""
module_name = self.__class__.__module__ or ""
module_name_parts = module_name.split(".")
# Iterate rootward through the package hierarchy until we find a parent of this
# loader's module with a __version__ attribute.
for prefix_size in range(len(module_name_parts), 0, -1):
package_name = ".".join(module_name_parts[0:prefix_size])
module = sys.modules[package_name]
if hasattr(module, "__version__"):
return module.__version__
# If this loader's class has no parent package with a __version__,
# it should implement it itself.
raise NotImplementedError(
f"Could not dynamically find the version of {self.get_loader_name()}."
)
def get_metadata_fetcher(self) -> MetadataFetcher:
"""Returns a MetadataFetcher instance representing this package loader;
which is used to for adding provenance information to extracted
extrinsic metadata, if any."""
return MetadataFetcher(
name=self.get_loader_name(),
version=self.get_loader_version(),
metadata={},
)
def get_metadata_authority(self) -> MetadataAuthority:
"""For package loaders that get extrinsic metadata, returns the authority
the metadata are coming from.
"""
raise NotImplementedError("get_metadata_authority")
def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]:
"""Returns metadata items, used by build_extrinsic_origin_metadata."""
return []
def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_origin_metadata."""
metadata_items = self.get_extrinsic_origin_metadata()
if not metadata_items:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in metadata_items:
metadata_objects.append(
RawExtrinsicMetadata(
target=self.origin.swhid(),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
)
)
return metadata_objects
def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]:
"""Returns metadata items, used by build_extrinsic_snapshot_metadata."""
return []
def build_extrinsic_snapshot_metadata(
self, snapshot_id: Sha1Git
) -> List[RawExtrinsicMetadata]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_snapshot_metadata."""
metadata_items = self.get_extrinsic_snapshot_metadata()
if not metadata_items:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in metadata_items:
metadata_objects.append(
RawExtrinsicMetadata(
target=ExtendedSWHID(
object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id
),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
origin=self.origin.url,
)
)
return metadata_objects
def build_extrinsic_directory_metadata(
self,
p_info: TPackageInfo,
release_id: Sha1Git,
directory_id: Sha1Git,
) -> List[RawExtrinsicMetadata]:
if not p_info.directory_extrinsic_metadata:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return []
authority = self.get_metadata_authority()
fetcher = self.get_metadata_fetcher()
metadata_objects = []
for item in p_info.directory_extrinsic_metadata:
metadata_objects.append(
RawExtrinsicMetadata(
target=ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id
),
discovery_date=item.discovery_date or self.visit_date,
authority=authority,
fetcher=fetcher,
format=item.format,
metadata=item.metadata,
origin=self.origin.url,
release=CoreSWHID(
object_type=ObjectType.RELEASE, object_id=release_id
),
)
)
return metadata_objects
def _load_extrinsic_directory_metadata(
self,
p_info: TPackageInfo,
release_id: Sha1Git,
directory_id: Sha1Git,
) -> None:
metadata_objects = self.build_extrinsic_directory_metadata(
p_info, release_id, directory_id
)
self.load_metadata_objects(metadata_objects)
def _load_extids(self, extids: Set[ExtID]) -> None:
if not extids:
return
try:
self.storage.extid_add(list(extids))
except Exception as e:
logger.exception("Failed to load new ExtIDs for %s", self.origin.url)
sentry_sdk.capture_exception(e)
# No big deal, it just means the next visit will load the same versions
# again.
def rev2rel(rev: Revision, version: str) -> Release:
"""Converts a revision to a release."""
message = rev.message
if message and not message.endswith(b"\n"):
message += b"\n"
return Release(
name=version.encode(),
message=message,
target=rev.directory,
target_type=ModelObjectType.DIRECTORY,
synthetic=rev.synthetic,
author=rev.author,
date=rev.date,
)
diff --git a/swh/loader/package/maven/loader.py b/swh/loader/package/maven/loader.py
index 140a703..ef5e456 100644
--- a/swh/loader/package/maven/loader.py
+++ b/swh/loader/package/maven/loader.py
@@ -1,206 +1,212 @@
# Copyright (C) 2021-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from __future__ import annotations
from datetime import datetime, timezone
import json
import logging
from os import path
import string
from typing import Any, Iterator, List, Optional, Sequence, Tuple
import attr
import iso8601
import requests
from typing_extensions import TypedDict
from swh.loader.package.loader import (
BasePackageInfo,
PackageLoader,
RawExtrinsicMetadataCore,
)
-from swh.loader.package.utils import EMPTY_AUTHOR, release_name
+from swh.loader.package.utils import EMPTY_AUTHOR, get_url_body, release_name
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
ObjectType,
RawExtrinsicMetadata,
Release,
Sha1Git,
TimestampWithTimezone,
)
from swh.storage.interface import StorageInterface
logger = logging.getLogger(__name__)
class ArtifactDict(TypedDict):
"""Data about a Maven artifact, passed by the Maven Lister."""
time: str
"""the time of the last update of jar file on the server as an iso8601 date string
"""
url: str
"""the artifact url to retrieve filename"""
filename: Optional[str]
"""optionally, the file's name"""
gid: str
"""artifact's groupId"""
aid: str
"""artifact's artifactId"""
version: str
"""artifact's version"""
base_url: str
"""root URL of the Maven instance"""
@attr.s
class MavenPackageInfo(BasePackageInfo):
time = attr.ib(type=datetime)
"""Timestamp of the last update of jar file on the server."""
gid = attr.ib(type=str)
"""Group ID of the maven artifact"""
aid = attr.ib(type=str)
"""Artifact ID of the maven artifact"""
version = attr.ib(type=str)
"""Version of the maven artifact"""
base_url = attr.ib(type=str)
"""Root URL of the Maven instance"""
# default format for maven artifacts
MANIFEST_FORMAT = string.Template("$gid $aid $version $url $time")
EXTID_TYPE = "maven-jar"
EXTID_VERSION = 0
@classmethod
def from_metadata(cls, a_metadata: ArtifactDict) -> MavenPackageInfo:
time = iso8601.parse_date(a_metadata["time"]).astimezone(tz=timezone.utc)
url = a_metadata["url"]
+ checksums = {}
+ try:
+ checksums["sha1"] = get_url_body(url + ".sha1").decode()
+ except requests.HTTPError:
+ pass
return cls(
url=url,
filename=a_metadata.get("filename") or path.split(url)[-1],
time=time,
gid=a_metadata["gid"],
aid=a_metadata["aid"],
version=a_metadata["version"],
base_url=a_metadata["base_url"],
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
format="maven-json",
metadata=json.dumps(a_metadata).encode(),
),
],
+ checksums=checksums,
)
class MavenLoader(PackageLoader[MavenPackageInfo]):
"""Load source code jar origin's artifact files into swh archive"""
visit_type = "maven"
def __init__(
self,
storage: StorageInterface,
url: str,
artifacts: Sequence[ArtifactDict],
**kwargs: Any,
):
"""Loader constructor.
For now, this is the lister's task output.
There is one, and only one, artefact (jar or zip) per version, as guaranteed by
the Maven coordinates system.
Args:
url: Origin url
artifacts: List of single artifact information
"""
super().__init__(storage=storage, url=url, **kwargs)
self.artifacts = artifacts # assume order is enforced in the lister
self.version_artifact = {
jar["version"]: jar for jar in artifacts if jar["version"]
}
if artifacts:
base_urls = {jar["base_url"] for jar in artifacts}
try:
(self.base_url,) = base_urls
except ValueError:
raise ValueError(
"Artifacts originate from more than one Maven instance: "
+ ", ".join(base_urls)
) from None
else:
# There is no artifact, so self.metadata_authority won't be called,
# so self.base_url won't be accessed.
pass
def get_versions(self) -> Sequence[str]:
return list(self.version_artifact)
def get_default_version(self) -> str:
# Default version is the one of the most recent artifact
return max(self.artifacts, key=lambda a: a["time"])["version"]
def get_metadata_authority(self):
return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.base_url)
def build_extrinsic_directory_metadata(
self,
p_info: MavenPackageInfo,
release_id: Sha1Git,
directory_id: Sha1Git,
) -> List[RawExtrinsicMetadata]:
# Rebuild POM URL.
pom_url = path.dirname(p_info.url)
pom_url = f"{pom_url}/{p_info.aid}-{p_info.version}.pom"
r = requests.get(pom_url, allow_redirects=True)
if r.status_code == 200:
metadata_pom = r.content
else:
metadata_pom = b""
p_info.directory_extrinsic_metadata.append(
RawExtrinsicMetadataCore(
format="maven-pom",
metadata=metadata_pom,
)
)
return super().build_extrinsic_directory_metadata(
p_info=p_info,
release_id=release_id,
directory_id=directory_id,
)
def get_package_info(self, version: str) -> Iterator[Tuple[str, MavenPackageInfo]]:
a_metadata = self.version_artifact[version]
rel_name = release_name(a_metadata["version"])
yield rel_name, MavenPackageInfo.from_metadata(a_metadata)
def build_release(
self, p_info: MavenPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
msg = f"Synthetic release for archive at {p_info.url}\n".encode("utf-8")
normalized_time = TimestampWithTimezone.from_datetime(p_info.time)
return Release(
name=p_info.version.encode(),
message=msg,
date=normalized_time,
author=EMPTY_AUTHOR,
target=directory,
target_type=ObjectType.DIRECTORY,
synthetic=True,
)
diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1 b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1
new file mode 100644
index 0000000..9b4204d
--- /dev/null
+++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1
@@ -0,0 +1 @@
+6976e186000753610a63713677f42f0228f04e64
\ No newline at end of file
diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1 b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1
new file mode 100644
index 0000000..065821c
--- /dev/null
+++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1
@@ -0,0 +1 @@
+10c61786a119470096b8d1884e43d5880d99ec7e
\ No newline at end of file
diff --git a/swh/loader/package/maven/tests/test_maven.py b/swh/loader/package/maven/tests/test_maven.py
index 36de2a7..5bc6db6 100644
--- a/swh/loader/package/maven/tests/test_maven.py
+++ b/swh/loader/package/maven/tests/test_maven.py
@@ -1,475 +1,499 @@
# Copyright (C) 2019-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import hashlib
from itertools import chain
import json
import os
from pathlib import Path
import pytest
from swh.core.tarball import uncompress
from swh.loader.package import __version__
from swh.loader.package.maven.loader import MavenLoader, MavenPackageInfo
from swh.loader.package.utils import EMPTY_AUTHOR
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.from_disk import Directory, iter_directory
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
RawExtrinsicMetadata,
Release,
Snapshot,
SnapshotBranch,
TargetType,
TimestampWithTimezone,
)
from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher
from swh.model.model import ObjectType as ModelObjectType
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.algos.snapshot import snapshot_get_all_branches
REPO_BASE_URL = "https://repo1.maven.org/maven2/"
MVN_ORIGIN_URL = f"{REPO_BASE_URL}al/aldi/sprova4j"
MVN_ARTIFACTS = [
{
"time": "2021-07-12 19:06:59.335000",
"gid": "al.aldi",
"aid": "sprova4j",
"filename": "sprova4j-0.1.0-sources.jar",
"version": "0.1.0",
"base_url": REPO_BASE_URL,
"url": f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.0/sprova4j-0.1.0-sources.jar",
},
{
"time": "2021-07-12 19:37:05.534000",
"gid": "al.aldi",
"aid": "sprova4j",
"filename": "sprova4j-0.1.1-sources.jar",
"version": "0.1.1",
"base_url": REPO_BASE_URL,
"url": f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.1/sprova4j-0.1.1-sources.jar",
},
]
MVN_ARTIFACTS_POM = [
f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.0/sprova4j-0.1.0.pom",
f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.1/sprova4j-0.1.1.pom",
]
REL_MSGS = (
b"Synthetic release for archive at https://repo1.maven.org/maven2/al/aldi/"
b"sprova4j/0.1.0/sprova4j-0.1.0-sources.jar\n",
b"Synthetic release for archive at https://repo1.maven.org/maven2/al/aldi/"
b"sprova4j/0.1.1/sprova4j-0.1.1-sources.jar\n",
)
REL_DATES = (
TimestampWithTimezone.from_datetime(
datetime.datetime(2021, 7, 12, 19, 6, 59, 335000, tzinfo=datetime.timezone.utc)
),
TimestampWithTimezone.from_datetime(
datetime.datetime(2021, 7, 12, 19, 37, 5, 534000, tzinfo=datetime.timezone.utc)
),
)
@pytest.fixture
def data_jar_1(datadir):
content = Path(
datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar"
).read_bytes()
return content
+@pytest.fixture
+def data_jar_1_sha1(datadir):
+ content = Path(
+ datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar.sha1"
+ ).read_bytes()
+ return content
+
+
@pytest.fixture
def data_pom_1(datadir):
content = Path(datadir, "https_maven.org", "sprova4j-0.1.0.pom").read_bytes()
return content
@pytest.fixture
def data_jar_2(datadir):
content = Path(
datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar"
).read_bytes()
return content
+@pytest.fixture
+def data_jar_2_sha1(datadir):
+ content = Path(
+ datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar.sha1"
+ ).read_bytes()
+ return content
+
+
@pytest.fixture
def data_pom_2(datadir):
content = Path(datadir, "https_maven.org", "sprova4j-0.1.1.pom").read_bytes()
return content
@pytest.fixture
def jar_dirs(datadir, tmp_path):
jar_1_path = os.path.join(datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar")
jar_2_path = os.path.join(datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar")
jar_1_extract_path = os.path.join(tmp_path, "jar_1")
jar_2_extract_path = os.path.join(tmp_path, "jar_2")
uncompress(jar_1_path, jar_1_extract_path)
uncompress(jar_2_path, jar_2_extract_path)
jar_1_dir = Directory.from_disk(path=jar_1_extract_path.encode())
jar_2_dir = Directory.from_disk(path=jar_2_extract_path.encode())
return [jar_1_dir, jar_2_dir]
@pytest.fixture
def expected_contents_and_directories(jar_dirs):
jar_1_cnts, _, jar_1_dirs = iter_directory(jar_dirs[0])
jar_2_cnts, _, jar_2_dirs = iter_directory(jar_dirs[1])
contents = {cnt.sha1 for cnt in chain(jar_1_cnts, jar_2_cnts)}
directories = {dir.id for dir in chain(jar_1_dirs, jar_2_dirs)}
return contents, directories
@pytest.fixture
def expected_releases(jar_dirs):
return [
Release(
name=b"0.1.0",
message=REL_MSGS[0],
author=EMPTY_AUTHOR,
date=REL_DATES[0],
target_type=ModelObjectType.DIRECTORY,
target=jar_dirs[0].hash,
synthetic=True,
metadata=None,
),
Release(
name=b"0.1.1",
message=REL_MSGS[1],
author=EMPTY_AUTHOR,
date=REL_DATES[1],
target_type=ModelObjectType.DIRECTORY,
target=jar_dirs[1].hash,
synthetic=True,
metadata=None,
),
]
@pytest.fixture
def expected_snapshot(expected_releases):
return Snapshot(
branches={
b"HEAD": SnapshotBranch(
target_type=TargetType.ALIAS,
target=b"releases/0.1.1",
),
b"releases/0.1.0": SnapshotBranch(
target_type=TargetType.RELEASE,
target=expected_releases[0].id,
),
b"releases/0.1.1": SnapshotBranch(
target_type=TargetType.RELEASE,
target=expected_releases[1].id,
),
},
)
@pytest.fixture
def expected_json_metadata():
return MVN_ARTIFACTS
@pytest.fixture
def expected_pom_metadata(data_pom_1, data_pom_2):
return [data_pom_1, data_pom_2]
@pytest.fixture(autouse=True)
def network_requests_mock(
requests_mock,
data_jar_1,
+ data_jar_1_sha1,
data_pom_1,
data_jar_2,
+ data_jar_2_sha1,
data_pom_2,
):
requests_mock.get(MVN_ARTIFACTS[0]["url"], content=data_jar_1)
+ requests_mock.get(MVN_ARTIFACTS[0]["url"] + ".sha1", content=data_jar_1_sha1)
requests_mock.get(MVN_ARTIFACTS_POM[0], content=data_pom_1)
requests_mock.get(MVN_ARTIFACTS[1]["url"], content=data_jar_2)
+ requests_mock.get(MVN_ARTIFACTS[1]["url"] + ".sha1", content=data_jar_2_sha1)
requests_mock.get(MVN_ARTIFACTS_POM[1], content=data_pom_2)
def test_maven_loader_visit_with_no_artifact_found(swh_storage, requests_mock_datadir):
origin_url = "https://ftp.g.o/unknown"
unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz"
loader = MavenLoader(
swh_storage,
origin_url,
artifacts=[
{
"time": "2021-07-18 08:05:05.187000",
"url": unknown_artifact_url, # unknown artifact
"filename": "8sync-0.1.0.tar.gz",
"gid": "al/aldi",
"aid": "sprova4j",
"version": "0.1.0",
"base_url": "https://repo1.maven.org/maven2/",
}
],
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "uneventful"
assert actual_load_status["snapshot_id"] is not None
expected_snapshot_id = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"
assert actual_load_status["snapshot_id"] == expected_snapshot_id
stats = get_stats(swh_storage)
assert_last_visit_matches(swh_storage, origin_url, status="partial", type="maven")
assert {
"content": 0,
"directory": 0,
"origin": 1,
"origin_visit": 1,
"release": 0,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
def test_maven_loader_jar_visit_inconsistent_base_url(
swh_storage, requests_mock, data_jar_1, data_pom_1
):
"""With no prior visit, loading a jar ends up with 1 snapshot"""
with pytest.raises(ValueError, match="more than one Maven instance"):
MavenLoader(
swh_storage,
MVN_ORIGIN_URL,
artifacts=[
MVN_ARTIFACTS[0],
{**MVN_ARTIFACTS[1], "base_url": "http://maven.example/"},
],
)
def test_maven_loader_first_visit(
swh_storage, expected_contents_and_directories, expected_snapshot, expected_releases
):
"""With no prior visit, loading a jar ends up with 1 snapshot"""
loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
actual_snapshot = snapshot_get_all_branches(
swh_storage, hash_to_bytes(actual_load_status["snapshot_id"])
)
assert actual_load_status["snapshot_id"] == expected_snapshot.id.hex()
check_snapshot(expected_snapshot, swh_storage)
stats = get_stats(swh_storage)
assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven")
expected_contents, expected_directories = expected_contents_and_directories
assert list(swh_storage.content_missing_per_sha1(expected_contents)) == []
assert list(swh_storage.directory_missing(expected_directories)) == []
rel_id = actual_snapshot.branches[b"releases/0.1.0"].target
rel2_id = actual_snapshot.branches[b"releases/0.1.1"].target
releases = swh_storage.release_get([rel_id, rel2_id])
assert releases == expected_releases
assert {
"content": len(expected_contents),
"directory": len(expected_directories),
"origin": 1,
"origin_visit": 1,
"release": 2,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
def test_maven_loader_2_visits_without_change(
swh_storage, requests_mock, expected_snapshot
):
"""With no prior visit, load a maven project ends up with 1 snapshot"""
loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
assert actual_load_status["snapshot_id"] == expected_snapshot.id.hex()
assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven")
actual_load_status2 = loader.load()
assert actual_load_status2["status"] == "uneventful"
assert actual_load_status2["snapshot_id"] is not None
assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"]
assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven")
# Make sure we have only one entry in history for the pom fetch, one for
# the actual download of jar, and that they're correct.
urls_history = [str(req.url) for req in list(requests_mock.request_history)]
assert urls_history == [
+ MVN_ARTIFACTS[0]["url"] + ".sha1",
+ MVN_ARTIFACTS[1]["url"] + ".sha1",
MVN_ARTIFACTS[0]["url"],
MVN_ARTIFACTS_POM[0],
MVN_ARTIFACTS[1]["url"],
MVN_ARTIFACTS_POM[1],
+ MVN_ARTIFACTS[0]["url"] + ".sha1",
+ MVN_ARTIFACTS[1]["url"] + ".sha1",
]
def test_maven_loader_extrinsic_metadata(
swh_storage, expected_releases, expected_json_metadata, expected_pom_metadata
):
"""With no prior visit, loading a jar ends up with 1 snapshot.
Extrinsic metadata is the pom file associated to the source jar.
"""
loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
for i, expected_release in enumerate(expected_releases):
expected_release_id = expected_release.id
release = swh_storage.release_get([expected_release_id])[0]
assert release is not None
release_swhid = CoreSWHID(
object_type=ObjectType.RELEASE, object_id=expected_release_id
)
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=release.target
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE,
url=REPO_BASE_URL,
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.maven.loader.MavenLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="maven-pom",
metadata=expected_pom_metadata[i],
origin=MVN_ORIGIN_URL,
release=release_swhid,
),
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.maven.loader.MavenLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="maven-json",
metadata=json.dumps(expected_json_metadata[i]).encode(),
origin=MVN_ORIGIN_URL,
release=release_swhid,
),
]
res = swh_storage.raw_extrinsic_metadata_get(
directory_swhid, metadata_authority
)
assert res.next_page_token is None
assert set(res.results) == set(expected_metadata)
def test_maven_loader_extrinsic_metadata_no_pom(
swh_storage, requests_mock, expected_releases, expected_json_metadata
):
"""With no prior visit, loading a jar ends up with 1 snapshot.
Extrinsic metadata is None if the pom file cannot be retrieved.
"""
requests_mock.get(MVN_ARTIFACTS_POM[0], status_code="404")
loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_release_id = expected_releases[0].id
release = swh_storage.release_get([expected_release_id])[0]
assert release is not None
release_swhid = CoreSWHID(
object_type=ObjectType.RELEASE, object_id=expected_release_id
)
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=release.target
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE,
url=REPO_BASE_URL,
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.maven.loader.MavenLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="maven-pom",
metadata=b"",
origin=MVN_ORIGIN_URL,
release=release_swhid,
),
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.maven.loader.MavenLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="maven-json",
metadata=json.dumps(expected_json_metadata[0]).encode(),
origin=MVN_ORIGIN_URL,
release=release_swhid,
),
]
res = swh_storage.raw_extrinsic_metadata_get(directory_swhid, metadata_authority)
assert res.next_page_token is None
assert set(res.results) == set(expected_metadata)
def test_maven_loader_jar_extid():
"""Compute primary key should return the right identity"""
metadata = MVN_ARTIFACTS[0]
p_info = MavenPackageInfo(**metadata)
expected_manifest = "{gid} {aid} {version} {url} {time}".format(**metadata).encode()
actual_id = p_info.extid()
assert actual_id == (
"maven-jar",
0,
hashlib.sha256(expected_manifest).digest(),
)
diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py
index a44e22d..1e2dd76 100644
--- a/swh/loader/package/npm/loader.py
+++ b/swh/loader/package/npm/loader.py
@@ -1,300 +1,301 @@
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from codecs import BOM_UTF8
import json
import logging
import os
import string
from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union
from urllib.parse import quote
import attr
import chardet
from swh.loader.package.loader import (
BasePackageInfo,
PackageLoader,
RawExtrinsicMetadataCore,
)
from swh.loader.package.utils import cached_method, get_url_body, release_name
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
ObjectType,
Person,
Release,
Sha1Git,
TimestampWithTimezone,
)
from swh.storage.interface import StorageInterface
logger = logging.getLogger(__name__)
EMPTY_PERSON = Person.from_fullname(b"")
@attr.s
class NpmPackageInfo(BasePackageInfo):
raw_info = attr.ib(type=Dict[str, Any])
package_name = attr.ib(type=str)
date = attr.ib(type=Optional[str])
shasum = attr.ib(type=str)
"""sha1 checksum"""
# we cannot rely only on $shasum, as it is technically possible for two versions
# of the same package to have the exact same tarball.
# But the release data (message and date) are extrinsic to the content of the
# package, so they differ between versions.
# So we need every attribute used to build the release object to be part of the
# manifest.
MANIFEST_FORMAT = string.Template(
"date $date\nname $package_name\nshasum $shasum\nurl $url\nversion $version"
)
EXTID_TYPE = "npm-manifest-sha256"
EXTID_VERSION = 0
@classmethod
def from_metadata(
cls, project_metadata: Dict[str, Any], version: str
) -> "NpmPackageInfo":
package_metadata = project_metadata["versions"][version]
url = package_metadata["dist"]["tarball"]
assert package_metadata["name"] == project_metadata["name"]
# No date available in intrinsic metadata: retrieve it from the API
# metadata, using the version number that the API claims this package
# has.
extrinsic_version = package_metadata["version"]
if "time" in project_metadata:
date = project_metadata["time"][extrinsic_version]
elif "mtime" in package_metadata:
date = package_metadata["mtime"]
else:
date = None
return cls(
package_name=package_metadata["name"],
url=url,
filename=os.path.basename(url),
date=date,
shasum=package_metadata["dist"]["shasum"],
version=extrinsic_version,
raw_info=package_metadata,
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
format="replicate-npm-package-json",
metadata=json.dumps(package_metadata).encode(),
)
],
+ checksums={"sha1": package_metadata["dist"]["shasum"]},
)
class NpmLoader(PackageLoader[NpmPackageInfo]):
"""Load npm origin's artifact releases into swh archive."""
visit_type = "npm"
def __init__(self, storage: StorageInterface, url: str, **kwargs: Any):
"""Constructor
Args
str: origin url (e.g. https://www.npmjs.com/package/<package-name>)
"""
super().__init__(storage=storage, url=url, **kwargs)
self.package_name = url.split("https://www.npmjs.com/package/")[1]
safe_name = quote(self.package_name, safe="")
self.provider_url = f"https://replicate.npmjs.com/{safe_name}/"
self._info: Dict[str, Any] = {}
self._versions = None
@cached_method
def _raw_info(self) -> bytes:
return get_url_body(self.provider_url)
@cached_method
def info(self) -> Dict:
"""Return the project metadata information (fetched from npm registry)"""
return json.loads(self._raw_info())
def get_versions(self) -> Sequence[str]:
return sorted(list(self.info()["versions"].keys()))
def get_default_version(self) -> str:
return self.info()["dist-tags"].get("latest", "")
def get_metadata_authority(self):
return MetadataAuthority(
type=MetadataAuthorityType.FORGE,
url="https://npmjs.com/",
metadata={},
)
def get_package_info(self, version: str) -> Iterator[Tuple[str, NpmPackageInfo]]:
p_info = NpmPackageInfo.from_metadata(
project_metadata=self.info(), version=version
)
yield release_name(version), p_info
def build_release(
self, p_info: NpmPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
# Metadata from NPM is not intrinsic to tarballs.
# This means two package versions can have the same tarball, but different
# metadata. To avoid mixing up releases, every field used to build the
# release object must be part of NpmPackageInfo.MANIFEST_FORMAT.
i_metadata = extract_intrinsic_metadata(uncompressed_path)
if not i_metadata:
return None
author = extract_npm_package_author(i_metadata)
assert self.package_name == p_info.package_name
msg = (
f"Synthetic release for NPM source package {p_info.package_name} "
f"version {p_info.version}\n"
)
if p_info.date is None:
url = p_info.url
artifact_name = os.path.basename(url)
raise ValueError(
"Origin %s: Cannot determine upload time for artifact %s."
% (p_info.url, artifact_name)
)
date = TimestampWithTimezone.from_iso8601(p_info.date)
# FIXME: this is to remain bug-compatible with earlier versions:
date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0))
r = Release(
name=p_info.version.encode(),
message=msg.encode(),
author=author,
date=date,
target=directory,
target_type=ObjectType.DIRECTORY,
synthetic=True,
)
return r
def _author_str(author_data: Union[Dict, List, str]) -> str:
"""Parse author from package.json author fields"""
if isinstance(author_data, dict):
author_str = ""
name = author_data.get("name")
if name is not None:
if isinstance(name, str):
author_str += name
elif isinstance(name, list):
author_str += _author_str(name[0]) if len(name) > 0 else ""
email = author_data.get("email")
if email is not None:
author_str += f" <{email}>"
result = author_str
elif isinstance(author_data, list):
result = _author_str(author_data[0]) if len(author_data) > 0 else ""
else:
result = author_data
return result
def extract_npm_package_author(package_json: Dict[str, Any]) -> Person:
"""
Extract package author from a ``package.json`` file content and
return it in swh format.
Args:
package_json: Dict holding the content of parsed
``package.json`` file
Returns:
Person
"""
for author_key in ("author", "authors"):
if author_key in package_json:
author_data = package_json[author_key]
if author_data is None:
return EMPTY_PERSON
author_str = _author_str(author_data)
return Person.from_fullname(author_str.encode())
return EMPTY_PERSON
def _lstrip_bom(s, bom=BOM_UTF8):
if s.startswith(bom):
return s[len(bom) :]
else:
return s
def load_json(json_bytes):
"""
Try to load JSON from bytes and return a dictionary.
First try to decode from utf-8. If the decoding failed,
try to detect the encoding and decode again with replace
error handling.
If JSON is malformed, an empty dictionary will be returned.
Args:
json_bytes (bytes): binary content of a JSON file
Returns:
dict: JSON data loaded in a dictionary
"""
json_data = {}
try:
json_str = _lstrip_bom(json_bytes).decode("utf-8")
except UnicodeDecodeError:
encoding = chardet.detect(json_bytes)["encoding"]
if encoding:
json_str = json_bytes.decode(encoding, "replace")
try:
json_data = json.loads(json_str)
except json.decoder.JSONDecodeError:
pass
return json_data
def extract_intrinsic_metadata(dir_path: str) -> Dict:
"""Given an uncompressed path holding the pkginfo file, returns a
pkginfo parsed structure as a dict.
The release artifact contains at their root one folder. For example:
$ tar tvf zprint-0.0.6.tar.gz
drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/
...
Args:
dir_path (str): Path to the uncompressed directory
representing a release artifact from npm.
Returns:
the pkginfo parsed structure as a dict if any or None if
none was present.
"""
# Retrieve the root folder of the archive
if not os.path.exists(dir_path):
return {}
lst = os.listdir(dir_path)
if len(lst) == 0:
return {}
project_dirname = lst[0]
package_json_path = os.path.join(dir_path, project_dirname, "package.json")
if not os.path.exists(package_json_path):
return {}
with open(package_json_path, "rb") as package_json_file:
package_json_bytes = package_json_file.read()
return load_json(package_json_bytes)
diff --git a/swh/loader/package/opam/loader.py b/swh/loader/package/opam/loader.py
index d192695..0c39bf0 100644
--- a/swh/loader/package/opam/loader.py
+++ b/swh/loader/package/opam/loader.py
@@ -1,265 +1,273 @@
# Copyright (C) 2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import io
import os
from subprocess import PIPE, Popen, call
from typing import Any, Iterator, List, Optional, Tuple
import attr
from swh.loader.package.loader import (
BasePackageInfo,
PackageLoader,
RawExtrinsicMetadataCore,
)
from swh.loader.package.utils import cached_method
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
ObjectType,
Person,
Release,
Sha1Git,
)
from swh.storage.interface import StorageInterface
@attr.s
class OpamPackageInfo(BasePackageInfo):
author = attr.ib(type=Person)
committer = attr.ib(type=Person)
def opam_read(
cmd: List[str], init_error_msg_if_any: Optional[str] = None
) -> Optional[str]:
"""This executes an opam command and returns the first line of the output.
Args:
cmd: Opam command to execute as a list of string
init_error_msg_if_any: Error message to raise in case a problem occurs
during initialization
Raises:
ValueError with the init_error_msg_if_any content in case stdout is not
consumable and the variable is provided with non empty value.
Returns:
the first line of the executed command output
"""
with Popen(cmd, stdout=PIPE) as proc:
if proc.stdout is not None:
for line in io.TextIOWrapper(proc.stdout):
# care only for the first line output result (mostly blank separated
# values, callers will deal with the parsing of the line)
return line
elif init_error_msg_if_any:
raise ValueError(init_error_msg_if_any)
return None
class OpamLoader(PackageLoader[OpamPackageInfo]):
"""Load all versions of a given package in a given opam repository.
The state of the opam repository is stored in a directory called an opam root. This
folder is a requisite for the opam binary to actually list information on package.
When initialize_opam_root is False (the default for production workers), the opam
root must already have been configured outside of the loading process. If not an
error is raised, thus failing the loading.
For standalone workers, initialize_opam_root must be set to True, so the ingestion
can take care of installing the required opam root properly.
The remaining ingestion uses the opam binary to give the versions of the given
package. Then, for each version, the loader uses the opam binary to list the tarball
url to fetch and ingest.
"""
visit_type = "opam"
def __init__(
self,
storage: StorageInterface,
url: str,
opam_root: str,
opam_instance: str,
opam_url: str,
opam_package: str,
initialize_opam_root: bool = False,
**kwargs: Any,
):
super().__init__(storage=storage, url=url, **kwargs)
self.opam_root = opam_root
self.opam_instance = opam_instance
self.opam_url = opam_url
self.opam_package = opam_package
self.initialize_opam_root = initialize_opam_root
def get_package_dir(self) -> str:
return (
f"{self.opam_root}/repo/{self.opam_instance}/packages/{self.opam_package}"
)
def get_package_name(self, version: str) -> str:
return f"{self.opam_package}.{version}"
def get_package_file(self, version: str) -> str:
return f"{self.get_package_dir()}/{self.get_package_name(version)}/opam"
def get_metadata_authority(self):
return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.opam_url)
@cached_method
def _compute_versions(self) -> List[str]:
"""Compute the versions using opam internals
Raises:
ValueError in case the lister is not able to determine the list of versions
Returns:
The list of versions for the package
"""
# TODO: use `opam show` instead of this workaround when it support the `--repo`
# flag
package_dir = self.get_package_dir()
if not os.path.exists(package_dir):
raise ValueError(
f"can't get versions for package {self.opam_package} "
f"(at url {self.origin.url})."
)
versions = [
".".join(version.split(".")[1:]) for version in os.listdir(package_dir)
]
if not versions:
raise ValueError(
f"can't get versions for package {self.opam_package} "
f"(at url {self.origin.url})"
)
versions.sort()
return versions
def get_versions(self) -> List[str]:
"""First initialize the opam root directory if needed then start listing the
package versions.
Raises:
ValueError in case the lister is not able to determine the list of
versions or if the opam root directory is invalid.
"""
if self.initialize_opam_root:
# for standalone loader (e.g docker), loader must initialize the opam root
# folder
call(
[
"opam",
"init",
"--reinit",
"--bare",
"--no-setup",
"--root",
self.opam_root,
self.opam_instance,
self.opam_url,
]
)
else:
# for standard/production loaders, no need to initialize the opam root
# folder. It must be present though so check for it, if not present, raise
if not os.path.isfile(os.path.join(self.opam_root, "config")):
# so if not correctly setup, raise immediately
raise ValueError("Invalid opam root")
return self._compute_versions()
def get_default_version(self) -> str:
"""Return the most recent version of the package as default."""
return self._compute_versions()[-1]
def _opam_show_args(self, version: str):
package_file = self.get_package_file(version)
return [
"opam",
"show",
"--color",
"never",
"--safe",
"--normalise",
"--root",
self.opam_root,
"--file",
package_file,
]
def get_enclosed_single_line_field(self, field, version) -> Optional[str]:
result = opam_read(self._opam_show_args(version) + ["--field", field])
# Sanitize the result if any (remove trailing \n and enclosing ")
return result.strip().strip('"') if result else None
def get_package_info(self, version: str) -> Iterator[Tuple[str, OpamPackageInfo]]:
url = self.get_enclosed_single_line_field("url.src:", version)
if url is None:
raise ValueError(
f"can't get field url.src: for version {version} of package"
f" {self.opam_package} (at url {self.origin.url}) from `opam show`"
)
+ checksums_str = self.get_enclosed_single_line_field("url.checksum:", version)
+ checksums = {}
+ if checksums_str:
+ for c in checksums_str.strip("[]").split(" "):
+ algo, hash = c.strip('"').split("=")
+ checksums[algo] = hash
+
authors_field = self.get_enclosed_single_line_field("authors:", version)
fullname = b"" if authors_field is None else str.encode(authors_field)
author = Person.from_fullname(fullname)
maintainer_field = self.get_enclosed_single_line_field("maintainer:", version)
fullname = b"" if maintainer_field is None else str.encode(maintainer_field)
committer = Person.from_fullname(fullname)
with Popen(self._opam_show_args(version) + ["--raw"], stdout=PIPE) as proc:
assert proc.stdout is not None
metadata = proc.stdout.read()
yield self.get_package_name(version), OpamPackageInfo(
url=url,
filename=None,
author=author,
committer=committer,
version=version,
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
metadata=metadata,
format="opam-package-definition",
)
],
+ checksums=checksums,
)
def build_release(
self,
p_info: OpamPackageInfo,
uncompressed_path: str,
directory: Sha1Git,
) -> Optional[Release]:
msg = (
f"Synthetic release for OPAM source package {self.opam_package} "
f"version {p_info.version}\n"
)
return Release(
name=p_info.version.encode(),
author=p_info.author,
message=msg.encode(),
date=None,
target=directory,
target_type=ObjectType.DIRECTORY,
synthetic=True,
)
diff --git a/swh/loader/package/opam/tests/test_opam.py b/swh/loader/package/opam/tests/test_opam.py
index 1ab1cdc..1b052b5 100644
--- a/swh/loader/package/opam/tests/test_opam.py
+++ b/swh/loader/package/opam/tests/test_opam.py
@@ -1,414 +1,421 @@
# Copyright (C) 2019-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from os.path import exists
import shutil
import pytest
from swh.loader.package import __version__
from swh.loader.package.loader import RawExtrinsicMetadataCore
from swh.loader.package.opam.loader import OpamLoader, OpamPackageInfo
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
Person,
RawExtrinsicMetadata,
Release,
Snapshot,
SnapshotBranch,
TargetType,
)
from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher
from swh.model.model import ObjectType as ModelObjectType
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.interface import PagedResult
OCB_METADATA = b"""\
opam-version: "2.0"
name: "ocb"
version: "0.1"
synopsis: "SVG badge generator"
description:
"An OCaml library for SVG badge generation. There\'s also a command-line tool provided."
maintainer: "OCamlPro <contact@ocamlpro.com>"
authors: "OCamlPro <contact@ocamlpro.com>"
license: "ISC"
homepage: "https://ocamlpro.github.io/ocb/"
doc: "https://ocamlpro.github.io/ocb/api/"
bug-reports: "https://github.com/OCamlPro/ocb/issues"
depends: [
"ocaml" {>= "4.05"}
"dune" {>= "2.0"}
"odoc" {with-doc}
]
build: [
["dune" "subst"] {dev}
[
"dune"
"build"
"-p"
name
"-j"
jobs
"@install"
"@runtest" {with-test}
"@doc" {with-doc}
]
]
dev-repo: "git+https://github.com/OCamlPro/ocb.git"
url {
src: "https://github.com/OCamlPro/ocb/archive/0.1.tar.gz"
checksum: [
"sha256=aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46"
"sha512=1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b"
]
}
""" # noqa
@pytest.fixture
def fake_opam_root(mocker, tmpdir, datadir):
"""Fixture to initialize the actual opam in test context. It mocks the actual opam init
calls and installs a fake opam root out of the one present in datadir.
"""
# inhibits the real `subprocess.call` which prepares the required internal opam
# state
module_name = "swh.loader.package.opam.loader"
mock_init = mocker.patch(f"{module_name}.call", return_value=None)
# Installs the fake opam root for the tests to use
fake_opam_root_src = f"{datadir}/fake_opam_repo"
fake_opam_root_dst = f"{tmpdir}/opam"
# old version does not support dirs_exist_ok...
# TypeError: copytree() got an unexpected keyword argument 'dirs_exist_ok'
# see: https://docs.python.org/3.7/library/shutil.html
if exists(fake_opam_root_dst):
shutil.rmtree(fake_opam_root_dst)
shutil.copytree(fake_opam_root_src, fake_opam_root_dst)
yield fake_opam_root_dst
# loader are initialized with `initialize_opam_root=True` so this should be called
assert mock_init.called, "This should be called when loader use this fixture"
def test_opam_loader_no_opam_repository_fails(swh_storage, tmpdir, datadir):
"""Running opam loader without a prepared opam repository fails"""
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = tmpdir
opam_instance = "loadertest"
opam_package = "agrid"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=False, # The opam directory must be present and no init...
)
# No opam root directory init directory from loader. So, at the opam root does not
# exist, the loading fails. That's the expected use for the production workers
# (whose opam_root maintenance will be externally managed).
actual_load_status = loader.load()
assert actual_load_status == {"status": "failed"}
def test_opam_loader_one_version(
tmpdir, requests_mock_datadir, fake_opam_root, datadir, swh_storage
):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = fake_opam_root
opam_instance = "loadertest"
opam_package = "agrid"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True, # go through the initialization while mocking it
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("e1159446b00745ba4daa7ee26d74fbd81ecc081c")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id
)
release_id = hash_to_bytes("d4d8d3df4f34609a3eeabd48aea49002c5f54f41")
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target=b"agrid.0.1",
target_type=TargetType.ALIAS,
),
b"agrid.0.1": SnapshotBranch(
target=release_id,
target_type=TargetType.RELEASE,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
assert swh_storage.release_get([release_id])[0] == Release(
name=b"0.1",
message=b"Synthetic release for OPAM source package agrid version 0.1\n",
target=hash_to_bytes("00412ee5bc601deb462e55addd1004715116785e"),
target_type=ModelObjectType.DIRECTORY,
synthetic=True,
author=Person.from_fullname(b"OCamlPro <contact@ocamlpro.com>"),
date=None,
id=release_id,
)
stats = get_stats(swh_storage)
assert {
"content": 18,
"directory": 8,
"origin": 1,
"origin_visit": 1,
"release": 1,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
def test_opam_loader_many_version(
tmpdir, requests_mock_datadir, fake_opam_root, datadir, swh_storage
):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = fake_opam_root
opam_instance = "loadertest"
opam_package = "directories"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("f498f7f3b0edbce5cf5834b487a4f8360f6a6a43")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
expected_snapshot = Snapshot(
id=expected_snapshot_id,
branches={
b"HEAD": SnapshotBranch(
target=b"directories.0.3",
target_type=TargetType.ALIAS,
),
b"directories.0.1": SnapshotBranch(
target=hash_to_bytes("1c88d466b3d57a619e296999322d096fa37bb1c2"),
target_type=TargetType.RELEASE,
),
b"directories.0.2": SnapshotBranch(
target=hash_to_bytes("d6f30684039ad485511a138e2ae504ff67a13075"),
target_type=TargetType.RELEASE,
),
b"directories.0.3": SnapshotBranch(
target=hash_to_bytes("6cf92c0ff052074e69ac18809a9c8198bcc2e746"),
target_type=TargetType.RELEASE,
),
},
)
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id
)
check_snapshot(expected_snapshot, swh_storage)
def test_opam_release(
tmpdir, requests_mock_datadir, fake_opam_root, swh_storage, datadir
):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = fake_opam_root
opam_instance = "loadertest"
opam_package = "ocb"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
expected_snapshot_id = hash_to_bytes("8ba39f050243a72ca667c5587a87413240cbaa47")
assert actual_load_status == {
"status": "eventful",
"snapshot_id": expected_snapshot_id.hex(),
}
info_iter = loader.get_package_info("0.1")
branch_name, package_info = next(info_iter)
expected_branch_name = "ocb.0.1"
expected_package_info = OpamPackageInfo(
url="https://github.com/OCamlPro/ocb/archive/0.1.tar.gz",
filename=None,
author=Person.from_fullname(b"OCamlPro <contact@ocamlpro.com>"),
committer=Person.from_fullname(b"OCamlPro <contact@ocamlpro.com>"),
version="0.1",
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
metadata=OCB_METADATA,
format="opam-package-definition",
)
],
+ checksums={
+ "sha256": "aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46",
+ "sha512": (
+ "1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f"
+ "37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b"
+ ),
+ },
)
assert branch_name == expected_branch_name
assert package_info == expected_package_info
release_id = hash_to_bytes("c231e541eb29c712635ada394b04127ac69e9fb0")
expected_snapshot = Snapshot(
id=hash_to_bytes(actual_load_status["snapshot_id"]),
branches={
b"HEAD": SnapshotBranch(
target=b"ocb.0.1",
target_type=TargetType.ALIAS,
),
b"ocb.0.1": SnapshotBranch(
target=release_id,
target_type=TargetType.RELEASE,
),
},
)
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id
)
check_snapshot(expected_snapshot, swh_storage)
release = swh_storage.release_get([release_id])[0]
assert release is not None
assert release.author == expected_package_info.author
def test_opam_metadata(
tmpdir, requests_mock_datadir, fake_opam_root, swh_storage, datadir
):
opam_url = f"file://{datadir}/fake_opam_repo"
opam_root = fake_opam_root
opam_instance = "loadertest"
opam_package = "ocb"
url = f"opam+{opam_url}/packages/{opam_package}"
loader = OpamLoader(
swh_storage,
url,
opam_root,
opam_instance,
opam_url,
opam_package,
initialize_opam_root=True,
)
actual_load_status = loader.load()
assert actual_load_status["status"] == "eventful"
expected_release_id = hash_to_bytes("c231e541eb29c712635ada394b04127ac69e9fb0")
expected_snapshot = Snapshot(
id=hash_to_bytes(actual_load_status["snapshot_id"]),
branches={
b"HEAD": SnapshotBranch(
target=b"ocb.0.1",
target_type=TargetType.ALIAS,
),
b"ocb.0.1": SnapshotBranch(
target=expected_release_id,
target_type=TargetType.RELEASE,
),
},
)
assert_last_visit_matches(
swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id
)
check_snapshot(expected_snapshot, swh_storage)
release = swh_storage.release_get([expected_release_id])[0]
assert release is not None
release_swhid = CoreSWHID(
object_type=ObjectType.RELEASE, object_id=expected_release_id
)
directory_swhid = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=release.target
)
metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE,
url=opam_url,
)
expected_metadata = [
RawExtrinsicMetadata(
target=directory_swhid,
authority=metadata_authority,
fetcher=MetadataFetcher(
name="swh.loader.package.opam.loader.OpamLoader",
version=__version__,
),
discovery_date=loader.visit_date,
format="opam-package-definition",
metadata=OCB_METADATA,
origin=url,
release=release_swhid,
)
]
assert swh_storage.raw_extrinsic_metadata_get(
directory_swhid,
metadata_authority,
) == PagedResult(
next_page_token=None,
results=expected_metadata,
)
diff --git a/swh/loader/package/pubdev/loader.py b/swh/loader/package/pubdev/loader.py
index 608457a..d78fe9b 100644
--- a/swh/loader/package/pubdev/loader.py
+++ b/swh/loader/package/pubdev/loader.py
@@ -1,194 +1,195 @@
# Copyright (C) 2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
from pathlib import Path
from typing import Any, Dict, Iterator, Optional, Sequence, Tuple
import attr
from packaging.version import parse as parse_version
import yaml
from swh.loader.package.loader import BasePackageInfo, PackageLoader
from swh.loader.package.utils import (
EMPTY_AUTHOR,
Person,
cached_method,
get_url_body,
release_name,
)
from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone
from swh.storage.interface import StorageInterface
@attr.s
class PubDevPackageInfo(BasePackageInfo):
name = attr.ib(type=str)
"""Name of the package"""
version = attr.ib(type=str)
"""Current version"""
last_modified = attr.ib(type=str)
"""Last modified date as release date"""
author = attr.ib(type=Person)
"""Author"""
description = attr.ib(type=str)
"""Description"""
def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]:
"""Extract intrinsic metadata from pubspec.yaml file at dir_path.
Each pub.dev package version has a pubspec.yaml file at the root of the archive.
See https://dart.dev/tools/pub/pubspec for pubspec specifications.
Args:
dir_path: A directory on disk where a pubspec.yaml must be present
Returns:
A dict mapping from yaml parser
"""
pubspec_path = dir_path / "pubspec.yaml"
return yaml.safe_load(pubspec_path.read_text())
class PubDevLoader(PackageLoader[PubDevPackageInfo]):
visit_type = "pubdev"
PUBDEV_BASE_URL = "https://pub.dev/"
def __init__(
self,
storage: StorageInterface,
url: str,
**kwargs,
):
super().__init__(storage=storage, url=url, **kwargs)
self.url = url
assert url.startswith(self.PUBDEV_BASE_URL)
self.package_info_url = url.replace(
self.PUBDEV_BASE_URL, f"{self.PUBDEV_BASE_URL}api/"
)
def _raw_info(self) -> bytes:
return get_url_body(self.package_info_url)
@cached_method
def info(self) -> Dict:
"""Return the project metadata information (fetched from pub.dev registry)"""
# Use strict=False in order to correctly manage case where \n is present in a string
info = json.loads(self._raw_info(), strict=False)
# Arrange versions list as a new dict with `version` as key
versions = {v["version"]: v for v in info["versions"]}
info["versions"] = versions
return info
def get_versions(self) -> Sequence[str]:
"""Get all released versions of a PubDev package
Returns:
A sequence of versions
Example::
["0.1.1", "0.10.2"]
"""
versions = list(self.info()["versions"].keys())
versions.sort(key=parse_version)
return versions
def get_default_version(self) -> str:
"""Get the newest release version of a PubDev package
Returns:
A string representing a version
Example::
"0.1.2"
"""
latest = self.info()["latest"]
return latest["version"]
def get_package_info(self, version: str) -> Iterator[Tuple[str, PubDevPackageInfo]]:
"""Get release name and package information from version
Package info comes from extrinsic metadata (from self.info())
Args:
version: Package version (e.g: "0.1.0")
Returns:
Iterator of tuple (release_name, p_info)
"""
v = self.info()["versions"][version]
assert v["version"] == version
url = v["archive_url"]
name = v["pubspec"]["name"]
filename = f"{name}-{version}.tar.gz"
last_modified = v["published"]
if "authors" in v["pubspec"]:
# TODO: here we have a list of author, see T3887
author = Person.from_fullname(v["pubspec"]["authors"][0].encode())
elif "author" in v["pubspec"] and v["pubspec"]["author"] is not None:
author = Person.from_fullname(v["pubspec"]["author"].encode())
else:
author = EMPTY_AUTHOR
description = v["pubspec"]["description"]
p_info = PubDevPackageInfo(
name=name,
filename=filename,
url=url,
version=version,
last_modified=last_modified,
author=author,
description=description,
+ checksums={"sha256": v["archive_sha256"]},
)
yield release_name(version), p_info
def build_release(
self, p_info: PubDevPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
# Extract intrinsic metadata from uncompressed_path/pubspec.yaml
intrinsic_metadata = extract_intrinsic_metadata(Path(uncompressed_path))
name: str = intrinsic_metadata["name"]
version: str = intrinsic_metadata["version"]
assert version == p_info.version
# author from intrinsic_metadata should not take precedence over the one
# returned by the api, see https://dart.dev/tools/pub/pubspec#authorauthors
author: Person = p_info.author
if "description" in intrinsic_metadata and intrinsic_metadata["description"]:
description = intrinsic_metadata["description"]
else:
description = p_info.description
message = (
f"Synthetic release for pub.dev source package {name} "
f"version {version}\n\n"
f"{description}\n"
)
return Release(
name=version.encode(),
author=author,
date=TimestampWithTimezone.from_iso8601(p_info.last_modified),
message=message.encode(),
target_type=ObjectType.DIRECTORY,
target=directory,
synthetic=True,
)
diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker
index 824af41..1c1d1b9 100644
--- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker
+++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker
@@ -1,29 +1,31 @@
{
"name": "Autolinker",
"latest": {
"version": "0.1.1",
"pubspec": {
"version": "0.1.1",
"homepage": "https://github.com/hackcave",
"description": "Port of Autolinker.js to dart",
"name": "Autolinker",
"author": "hackcave <hackers@hackcave.org>"
},
"archive_url": "https://pub.dartlang.org/packages/Autolinker/versions/0.1.1.tar.gz",
+ "archive_sha256": "ca6149c2bb566b07beaf731930ade8b77fad86055b3f37b6eb2f17aca2fbc1b1",
"published": "2014-12-24T22:34:02.534090Z"
},
"versions": [
{
"version": "0.1.1",
"pubspec": {
"version": "0.1.1",
"homepage": "https://github.com/hackcave",
"description": "Port of Autolinker.js to dart",
"name": "Autolinker",
"author": "hackcave <hackers@hackcave.org>"
},
"archive_url": "https://pub.dartlang.org/packages/Autolinker/versions/0.1.1.tar.gz",
+ "archive_sha256": "ca6149c2bb566b07beaf731930ade8b77fad86055b3f37b6eb2f17aca2fbc1b1",
"published": "2014-12-24T22:34:02.534090Z"
}
]
-}
+}
\ No newline at end of file
diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication
index 25ca01a..f1f6a45 100644
--- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication
+++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication
@@ -1,77 +1,77 @@
{
"name": "authentication",
"latest": {
"version": "0.0.1",
"pubspec": {
"name": "authentication",
"description": "Persistent user authentication for Flutter with optional backend API integration.",
"version": "0.0.1",
"author": null,
"homepage": null,
"environment": {
"sdk": ">=2.7.0 <3.0.0",
"flutter": ">=1.17.0 <2.0.0"
},
"dependencies": {
"flutter": {
"sdk": "flutter"
}
},
"dev_dependencies": {
"flutter_test": {
"sdk": "flutter"
}
},
"flutter": {
"plugin": {
"platforms": {
"some_platform": {
"pluginClass": "somePluginClass"
}
}
}
}
},
"archive_url": "https://pub.dartlang.org/packages/authentication/versions/0.0.1.tar.gz",
- "archive_sha256": "0179334b346cb67e4e6e3c905e5cc5c8e488a45ebd99fd2be3a7e0476d620d99",
+ "archive_sha256": "bb8296bce47a5fe587b8d902ff87490593e8b86f736b38c6f9259c958b1f9b21",
"published": "2020-08-13T04:53:34.134687Z"
},
"versions": [
{
"version": "0.0.1",
"pubspec": {
"name": "authentication",
"description": "Persistent user authentication for Flutter with optional backend API integration.",
"version": "0.0.1",
"author": null,
"homepage": null,
"environment": {
"sdk": ">=2.7.0 <3.0.0",
"flutter": ">=1.17.0 <2.0.0"
},
"dependencies": {
"flutter": {
"sdk": "flutter"
}
},
"dev_dependencies": {
"flutter_test": {
"sdk": "flutter"
}
},
"flutter": {
"plugin": {
"platforms": {
"some_platform": {
"pluginClass": "somePluginClass"
}
}
}
}
},
"archive_url": "https://pub.dartlang.org/packages/authentication/versions/0.0.1.tar.gz",
- "archive_sha256": "0179334b346cb67e4e6e3c905e5cc5c8e488a45ebd99fd2be3a7e0476d620d99",
+ "archive_sha256": "bb8296bce47a5fe587b8d902ff87490593e8b86f736b38c6f9259c958b1f9b21",
"published": "2020-08-13T04:53:34.134687Z"
}
]
-}
+}
\ No newline at end of file
diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier
index dacdd55..38cc16a 100644
--- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier
+++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier
@@ -1,55 +1,55 @@
{
"name": "bezier",
"latest": {
"version": "1.1.5",
"pubspec": {
"name": "bezier",
"version": "1.1.5",
"authors": [
"Aaron Barrett <aaron@aaronbarrett.com>",
"Isaac Barrett <ikebart9999@gmail.com>"
],
"description": "A 2D Bézier curve math library. Based heavily on the work of @TheRealPomax <pomax.github.io/bezierjs>. Live examples can be found at <www.dartographer.com/bezier>.",
"homepage": "https://github.com/aab29/bezier.dart",
"environment": {
"sdk": ">=2.0.0 <3.0.0"
},
"dependencies": {
"vector_math": "^2.0.0"
},
"dev_dependencies": {
"test": "^1.0.0"
}
},
"archive_url": "https://pub.dartlang.org/packages/bezier/versions/1.1.5.tar.gz",
- "archive_sha256": "cc5da2fa927b5d347550f78d456cd984b7df78a7f0405119cdab12111e2f9ee8",
+ "archive_sha256": "d8f2a8f75732e7f7c3c0295801c95970301536eee205d4532cb3bc1d720cb1bf",
"published": "2019-12-22T03:17:30.805225Z"
},
"versions": [
{
"version": "1.1.5",
"pubspec": {
"name": "bezier",
"version": "1.1.5",
"authors": [
"Aaron Barrett <aaron@aaronbarrett.com>",
"Isaac Barrett <ikebart9999@gmail.com>"
],
"description": "A 2D Bézier curve math library. Based heavily on the work of @TheRealPomax <pomax.github.io/bezierjs>.Live examples can be found at <www.dartographer.com/bezier>.",
"homepage": "https://github.com/aab29/bezier.dart",
"environment": {
"sdk": ">=2.0.0 <3.0.0"
},
"dependencies": {
"vector_math": "^2.0.0"
},
"dev_dependencies": {
"test": "^1.0.0"
}
},
"archive_url": "https://pub.dartlang.org/packages/bezier/versions/1.1.5.tar.gz",
- "archive_sha256": "cc5da2fa927b5d347550f78d456cd984b7df78a7f0405119cdab12111e2f9ee8",
+ "archive_sha256": "d8f2a8f75732e7f7c3c0295801c95970301536eee205d4532cb3bc1d720cb1bf",
"published": "2019-12-22T03:17:30.805225Z"
}
]
-}
+}
\ No newline at end of file
diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf
index 1541536..2afc490 100644
--- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf
+++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf
@@ -1,88 +1,91 @@
{
"name": "pdf",
"latest": {
"version": "3.8.2",
"pubspec": {
"name": "pdf",
"description": "A pdf producer for Dart. It can create pdf files for both web or flutter.",
"homepage": "https://github.com/DavBfr/dart_pdf/tree/master/pdf",
"repository": "https://github.com/DavBfr/dart_pdf",
"issue_tracker": "https://github.com/DavBfr/dart_pdf/issues",
"version": "3.8.2",
"environment": {
"sdk": ">=2.12.0 <3.0.0"
},
"dependencies": {
"archive": "^3.1.0",
"barcode": ">=2.2.0 <3.0.0",
"crypto": "^3.0.0",
"image": ">=3.0.1 <4.0.0",
"meta": ">=1.3.0 <2.0.0",
"path_parsing": ">=0.2.0 <2.0.0",
"vector_math": "^2.1.0",
"xml": ">=5.1.0 <7.0.0"
},
"dev_dependencies": {
"flutter_lints": "^1.0.4",
"test": ">=1.16.0 <2.0.0"
}
},
"archive_url": "https://pub.dartlang.org/packages/pdf/versions/3.8.2.tar.gz",
+ "archive_sha256": "b69a47f10620b5639bfcf51cd9acd1083e7e856dfc4a23f49df89445d1d27692",
"published": "2022-07-25T11:38:25.983876Z"
},
"versions": [
{
"version": "1.0.0",
"pubspec": {
"version": "1.0.0",
"name": "pdf",
"dependencies": {
"ttf_parser": "^1.0.0",
"vector_math": "^2.0.7",
"meta": "^1.1.5"
},
"author": "David PHAM-VAN <dev.nfet.net@gmail.com>",
"description": "A pdf producer for Dart",
"homepage": "https://github.com/davbfr/dart_pdf",
"environment": {
"sdk": ">=1.8.0 <2.0.0"
},
"dev_dependencies": {
"test": "any"
}
},
"archive_url": "https://pub.dartlang.org/packages/pdf/versions/1.0.0.tar.gz",
+ "archive_sha256": "54f1b1c4d519c3bad61ca63b53b46e7e9eabc3b7fb9a4707525520215152e4e1",
"published": "2018-07-16T21:12:28.894137Z"
},
{
"version": "3.8.2",
"pubspec": {
"name": "pdf",
"description": "A pdf producer for Dart. It can create pdf files for both web or flutter.",
"homepage": "https://github.com/DavBfr/dart_pdf/tree/master/pdf",
"repository": "https://github.com/DavBfr/dart_pdf",
"issue_tracker": "https://github.com/DavBfr/dart_pdf/issues",
"version": "3.8.2",
"environment": {
"sdk": ">=2.12.0 <3.0.0"
},
"dependencies": {
"archive": "^3.1.0",
"barcode": ">=2.2.0 <3.0.0",
"crypto": "^3.0.0",
"image": ">=3.0.1 <4.0.0",
"meta": ">=1.3.0 <2.0.0",
"path_parsing": ">=0.2.0 <2.0.0",
"vector_math": "^2.1.0",
"xml": ">=5.1.0 <7.0.0"
},
"dev_dependencies": {
"flutter_lints": "^1.0.4",
"test": ">=1.16.0 <2.0.0"
}
},
"archive_url": "https://pub.dartlang.org/packages/pdf/versions/3.8.2.tar.gz",
+ "archive_sha256": "b69a47f10620b5639bfcf51cd9acd1083e7e856dfc4a23f49df89445d1d27692",
"published": "2022-07-25T11:38:25.983876Z"
}
]
-}
+}
\ No newline at end of file
diff --git a/swh/loader/package/puppet/loader.py b/swh/loader/package/puppet/loader.py
index fd4da49..6f0221e 100644
--- a/swh/loader/package/puppet/loader.py
+++ b/swh/loader/package/puppet/loader.py
@@ -1,152 +1,153 @@
# Copyright (C) 2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from datetime import datetime
import json
from pathlib import Path
from typing import Any, Dict, Iterator, Optional, Sequence, Tuple
import attr
import iso8601
from packaging.version import parse as parse_version
from swh.loader.package.loader import BasePackageInfo, PackageLoader
from swh.loader.package.utils import Person, release_name
from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone
from swh.storage.interface import StorageInterface
@attr.s
class PuppetPackageInfo(BasePackageInfo):
name = attr.ib(type=str)
"""Name of the package"""
filename = attr.ib(type=str)
"""Archive (tar.gz) file name"""
version = attr.ib(type=str)
"""Current version"""
last_modified = attr.ib(type=datetime)
"""Module last update date as release date"""
def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]:
"""Extract intrinsic metadata from metadata.json file at dir_path.
Each Puppet module version has a metadata.json file at the root of the archive.
See ``https://puppet.com/docs/puppet/7/modules_metadata.html`` for metadata specifications.
Args:
dir_path: A directory on disk where a metadata.json file must be present
Returns:
A dict mapping from json parser
"""
meta_json_path = dir_path / "metadata.json"
metadata: Dict[str, Any] = json.loads(meta_json_path.read_text())
return metadata
class PuppetLoader(PackageLoader[PuppetPackageInfo]):
visit_type = "puppet"
def __init__(
self,
storage: StorageInterface,
url: str,
artifacts: Dict[str, Any],
**kwargs,
):
super().__init__(storage=storage, url=url, **kwargs)
self.url = url
self.artifacts = artifacts
def get_versions(self) -> Sequence[str]:
"""Get all released versions of a Puppet module
Returns:
A sequence of versions
Example::
["0.1.1", "0.10.2"]
"""
versions = list(self.artifacts.keys())
versions.sort(key=parse_version)
return versions
def get_default_version(self) -> str:
"""Get the newest release version of a Puppet module
Returns:
A string representing a version
Example::
"0.10.2"
"""
return self.get_versions()[-1]
def get_package_info(self, version: str) -> Iterator[Tuple[str, PuppetPackageInfo]]:
"""Get release name and package information from version
Args:
version: Package version (e.g: "0.1.0")
Returns:
Iterator of tuple (release_name, p_info)
"""
data = self.artifacts[version]
assert data["filename"].endswith(f"-{version}.tar.gz")
pkgname: str = data["filename"].split(f"-{version}.tar.gz")[0]
url: str = data["url"]
filename: str = data["filename"]
last_modified: datetime = iso8601.parse_date(data["last_update"])
p_info = PuppetPackageInfo(
name=pkgname,
filename=filename,
url=url,
version=version,
last_modified=last_modified,
+ checksums=data["checksums"],
)
yield release_name(version), p_info
def build_release(
self, p_info: PuppetPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
# compute extracted module directory name
dirname = p_info.filename.split(".tar.gz")[0]
# Extract intrinsic metadata from uncompressed_path/{dirname}/metadata.json
intrinsic_metadata = extract_intrinsic_metadata(
Path(uncompressed_path) / f"{dirname}"
)
version: str = intrinsic_metadata["version"]
assert version == p_info.version
description = intrinsic_metadata["summary"]
author = Person.from_fullname(intrinsic_metadata["author"].encode())
message = (
f"Synthetic release for Puppet source package {p_info.name} "
f"version {version}\n\n"
f"{description}\n"
)
return Release(
name=version.encode(),
author=author,
date=TimestampWithTimezone.from_datetime(p_info.last_modified),
message=message.encode(),
target_type=ObjectType.DIRECTORY,
target=directory,
synthetic=True,
)
diff --git a/swh/loader/package/puppet/tests/test_puppet.py b/swh/loader/package/puppet/tests/test_puppet.py
index 709689a..07cf0ac 100644
--- a/swh/loader/package/puppet/tests/test_puppet.py
+++ b/swh/loader/package/puppet/tests/test_puppet.py
@@ -1,118 +1,125 @@
# Copyright (C) 2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.loader.package.puppet.loader import PuppetLoader
from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
ObjectType,
Person,
Release,
Snapshot,
SnapshotBranch,
TargetType,
TimestampWithTimezone,
)
ORIGINS = {
"url": "https://forge.puppet.com/modules/saz/memcached",
"artifacts": {
"1.0.0": {
"url": "https://forgeapi.puppet.com/v3/files/saz-memcached-1.0.0.tar.gz", # noqa: B950
"version": "1.0.0",
"filename": "saz-memcached-1.0.0.tar.gz",
"last_update": "2011-11-20T13:40:30-08:00",
+ "checksums": {
+ "length": 763,
+ },
},
"8.1.0": {
"url": "https://forgeapi.puppet.com/v3/files/saz-memcached-8.1.0.tar.gz", # noqa: B950
"version": "8.1.0",
"filename": "saz-memcached-8.1.0.tar.gz",
"last_update": "2022-07-11T03:34:55-07:00",
+ "checksums": {
+ "md5": "5313e8fff0af08d63681daf955e7a604",
+ "sha256": "0dbb1470c64435700767e9887d0cf70203b1ae59445c401d5d200f2dabb3226e", # noqa: B950
+ },
},
},
}
def test_get_versions(requests_mock_datadir, swh_storage):
loader = PuppetLoader(
swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"]
)
assert loader.get_versions() == ["1.0.0", "8.1.0"]
def test_get_default_version(requests_mock_datadir, swh_storage):
loader = PuppetLoader(
swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"]
)
assert loader.get_default_version() == "8.1.0"
def test_puppet_loader_load_multiple_version(
datadir, requests_mock_datadir, swh_storage
):
loader = PuppetLoader(
swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"]
)
load_status = loader.load()
assert load_status["status"] == "eventful"
assert load_status["snapshot_id"] is not None
expected_snapshot_id = "9a8e76a8a6eae5285059d9f6d5083a99317727cf"
assert expected_snapshot_id == load_status["snapshot_id"]
expected_snapshot = Snapshot(
id=hash_to_bytes(load_status["snapshot_id"]),
branches={
b"HEAD": SnapshotBranch(
target=b"releases/8.1.0",
target_type=TargetType.ALIAS,
),
b"releases/1.0.0": SnapshotBranch(
target=hash_to_bytes("50eb560bb5322cd149359b9cc8debc78834bcfad"),
target_type=TargetType.RELEASE,
),
b"releases/8.1.0": SnapshotBranch(
target=hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22"),
target_type=TargetType.RELEASE,
),
},
)
check_snapshot(expected_snapshot, swh_storage)
stats = get_stats(swh_storage)
assert {
"content": 1 + 1,
"directory": 2 + 2,
"origin": 1,
"origin_visit": 1,
"release": 1 + 1,
"revision": 0,
"skipped_content": 0,
"snapshot": 1,
} == stats
assert swh_storage.release_get(
[hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22")]
)[0] == Release(
name=b"8.1.0",
message=b"Synthetic release for Puppet source package saz-memcached version 8.1.0\n\n"
b"Manage memcached via Puppet\n",
target=hash_to_bytes("1b9a2dbc80f954e1ba4b2f1c6344d1ce4e84ab7c"),
target_type=ObjectType.DIRECTORY,
synthetic=True,
author=Person(fullname=b"saz", name=b"saz", email=None),
date=TimestampWithTimezone.from_iso8601("2022-07-11T03:34:55-07:00"),
id=hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22"),
)
assert_last_visit_matches(
swh_storage,
url=ORIGINS["url"],
status="full",
type="puppet",
snapshot=expected_snapshot.id,
)
diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py
index fe814f7..19e26e0 100644
--- a/swh/loader/package/pypi/loader.py
+++ b/swh/loader/package/pypi/loader.py
@@ -1,248 +1,251 @@
-# Copyright (C) 2019-2021 The Software Heritage developers
+# Copyright (C) 2019-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
+from __future__ import annotations
+
import json
import logging
import os
from typing import Any, Dict, Iterator, Optional, Sequence, Tuple
from urllib.parse import urlparse
import attr
from pkginfo import UnpackedSDist
from swh.loader.package.loader import (
BasePackageInfo,
PackageLoader,
PartialExtID,
RawExtrinsicMetadataCore,
)
from swh.loader.package.utils import (
EMPTY_AUTHOR,
cached_method,
get_url_body,
release_name,
)
from swh.model.hashutil import hash_to_bytes
from swh.model.model import (
MetadataAuthority,
MetadataAuthorityType,
ObjectType,
Person,
Release,
Sha1Git,
TimestampWithTimezone,
)
from swh.storage.interface import StorageInterface
logger = logging.getLogger(__name__)
EXTID_TYPE = "pypi-archive-sha256"
EXTID_VERSION = 0
@attr.s
class PyPIPackageInfo(BasePackageInfo):
raw_info = attr.ib(type=Dict[str, Any])
name = attr.ib(type=str)
comment_text = attr.ib(type=Optional[str])
sha256 = attr.ib(type=str)
upload_time = attr.ib(type=str)
@classmethod
def from_metadata(
cls, metadata: Dict[str, Any], name: str, version: str
- ) -> "PyPIPackageInfo":
+ ) -> PyPIPackageInfo:
return cls(
url=metadata["url"],
filename=metadata["filename"],
version=version,
raw_info=metadata,
name=name,
comment_text=metadata.get("comment_text"),
sha256=metadata["digests"]["sha256"],
upload_time=metadata["upload_time"],
directory_extrinsic_metadata=[
RawExtrinsicMetadataCore(
format="pypi-project-json",
metadata=json.dumps(metadata).encode(),
)
],
+ checksums={"sha256": metadata["digests"]["sha256"]},
)
def extid(self) -> PartialExtID:
return (EXTID_TYPE, EXTID_VERSION, hash_to_bytes(self.sha256))
class PyPILoader(PackageLoader[PyPIPackageInfo]):
"""Load pypi origin's artifact releases into swh archive."""
visit_type = "pypi"
def __init__(self, storage: StorageInterface, url: str, **kwargs):
super().__init__(storage=storage, url=url, **kwargs)
self.provider_url = pypi_api_url(self.origin.url)
@cached_method
def _raw_info(self) -> bytes:
return get_url_body(self.provider_url)
@cached_method
def info(self) -> Dict:
"""Return the project metadata information (fetched from pypi registry)"""
return json.loads(self._raw_info())
def get_versions(self) -> Sequence[str]:
return self.info()["releases"].keys()
def get_default_version(self) -> str:
return self.info()["info"]["version"]
def get_metadata_authority(self):
p_url = urlparse(self.origin.url)
return MetadataAuthority(
type=MetadataAuthorityType.FORGE,
url=f"{p_url.scheme}://{p_url.netloc}/",
metadata={},
)
def get_package_info(self, version: str) -> Iterator[Tuple[str, PyPIPackageInfo]]:
res = []
for meta in self.info()["releases"][version]:
# process only standard sdist archives
if meta["packagetype"] != "sdist" or meta["filename"].lower().endswith(
(".deb", ".egg", ".rpm", ".whl")
):
continue
p_info = PyPIPackageInfo.from_metadata(
meta, name=self.info()["info"]["name"], version=version
)
res.append((version, p_info))
if len(res) == 1:
version, p_info = res[0]
yield release_name(version), p_info
else:
for version, p_info in res:
yield release_name(version, p_info.filename), p_info
def build_release(
self, p_info: PyPIPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
i_metadata = extract_intrinsic_metadata(uncompressed_path)
if not i_metadata:
return None
# from intrinsic metadata
version_ = i_metadata.get("version", p_info.version)
author_ = author(i_metadata)
if p_info.comment_text:
msg = p_info.comment_text
else:
msg = (
f"Synthetic release for PyPI source package {p_info.name} "
f"version {version_}\n"
)
date = TimestampWithTimezone.from_iso8601(p_info.upload_time)
return Release(
name=p_info.version.encode(),
message=msg.encode(),
author=author_,
date=date,
target=directory,
target_type=ObjectType.DIRECTORY,
synthetic=True,
)
def pypi_api_url(url: str) -> str:
"""Compute api url from a project url
Args:
url (str): PyPI instance's url (e.g: https://pypi.org/project/requests)
This deals with correctly transforming the project's api url (e.g
https://pypi.org/pypi/requests/json)
Returns:
api url
"""
p_url = urlparse(url)
project_name = p_url.path.rstrip("/").split("/")[-1]
url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name)
return url
def extract_intrinsic_metadata(dir_path: str) -> Dict:
"""Given an uncompressed path holding the pkginfo file, returns a
pkginfo parsed structure as a dict.
The release artifact contains at their root one folder. For example:
$ tar tvf zprint-0.0.6.tar.gz
drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/
...
Args:
dir_path (str): Path to the uncompressed directory
representing a release artifact from pypi.
Returns:
the pkginfo parsed structure as a dict if any or None if
none was present.
"""
# Retrieve the root folder of the archive
if not os.path.exists(dir_path):
return {}
lst = os.listdir(dir_path)
if len(lst) != 1:
return {}
project_dirname = lst[0]
pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO")
if not os.path.exists(pkginfo_path):
return {}
pkginfo = UnpackedSDist(pkginfo_path)
raw = pkginfo.__dict__
raw.pop("filename") # this gets added with the ondisk location
return raw
def author(data: Dict) -> Person:
"""Given a dict of project/release artifact information (coming from
PyPI), returns an author subset.
Args:
data (dict): Representing either artifact information or
release information.
Returns:
swh-model dict representing a person.
"""
name = data.get("author")
email = data.get("author_email")
fullname = None # type: Optional[str]
if email:
fullname = "%s <%s>" % (name, email)
else:
fullname = name
if not fullname:
return EMPTY_AUTHOR
if name is not None:
name = name.encode("utf-8")
if email is not None:
email = email.encode("utf-8")
return Person(fullname=fullname.encode("utf-8"), name=name, email=email)

File Metadata

Mime Type
text/x-diff
Expires
Sat, Jun 21, 8:58 PM (3 w, 6 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3266211

Event Timeline