diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index 5fa45d7..986b2ce 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,324 +1,328 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import json import logging import requests from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union import types import attr from swh.model.hashutil import hash_to_hex, hash_to_bytes from swh.model.model import ( Person, Revision, RevisionType, TimestampWithTimezone, Sha1Git, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, ) from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) -from swh.loader.package.utils import download +from swh.loader.package.utils import cached_method, download logger = logging.getLogger(__name__) @attr.s class DepositPackageInfo(BasePackageInfo): filename = attr.ib(type=str) # instead of Optional[str] raw_info = attr.ib(type=Dict[str, Any]) author_date = attr.ib(type=datetime.datetime) """codemeta:dateCreated if any, deposit completed_date otherwise""" commit_date = attr.ib(type=datetime.datetime) """codemeta:datePublished if any, deposit completed_date otherwise""" client = attr.ib(type=str) id = attr.ib(type=int) """Internal ID of the deposit in the deposit DB""" collection = attr.ib(type=str) """The collection in the deposit; see SWORD specification.""" author = attr.ib(type=Person) committer = attr.ib(type=Person) revision_parents = attr.ib(type=Tuple[Sha1Git, ...]) """Revisions created from previous deposits, that will be used as parents of the revision created for this deposit.""" @classmethod def from_metadata( cls, metadata: Dict[str, Any], url: str, filename: str ) -> "DepositPackageInfo": # Note: # `date` and `committer_date` are always transmitted by the deposit read api # which computes itself the values. The loader needs to use those to create the # revision. raw_metadata_from_origin = json.dumps( metadata["origin_metadata"]["metadata"] ).encode() metadata = metadata.copy() # FIXME: this removes information from 'raw' metadata depo = metadata.pop("deposit") return cls( url=url, filename=filename, author_date=depo["author_date"], commit_date=depo["committer_date"], client=depo["client"], id=depo["id"], collection=depo["collection"], author=parse_author(depo["author"]), committer=parse_author(depo["committer"]), revision_parents=tuple(hash_to_bytes(p) for p in depo["revision_parents"]), raw_info=metadata, revision_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="sword-v2-atom-codemeta-v2-in-json", metadata=raw_metadata_from_origin, ), ], ) class DepositLoader(PackageLoader[DepositPackageInfo]): """Load pypi origin's artifact releases into swh archive. """ visit_type = "deposit" def __init__(self, url: str, deposit_id: str): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity """ super().__init__(url=url) config_deposit = self.config["deposit"] self.deposit_id = deposit_id self.client = ApiClient(url=config_deposit["url"], auth=config_deposit["auth"]) - self.metadata: Dict[str, Any] = {} def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ["HEAD"] def get_metadata_authority(self) -> MetadataAuthority: - provider = self.metadata["origin_metadata"]["provider"] + provider = self.metadata()["origin_metadata"]["provider"] assert provider["provider_type"] == "deposit_client" return MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url=provider["provider_url"], metadata={ "name": provider["provider_name"], **(provider["metadata"] or {}), }, ) def get_metadata_fetcher(self) -> MetadataFetcher: - tool = self.metadata["origin_metadata"]["tool"] + tool = self.metadata()["origin_metadata"]["tool"] return MetadataFetcher( name=tool["name"], version=tool["version"], metadata=tool["configuration"], ) def get_package_info( self, version: str ) -> Iterator[Tuple[str, DepositPackageInfo]]: p_info = DepositPackageInfo.from_metadata( - self.metadata, url=self.url, filename="archive.zip", + self.metadata(), url=self.url, filename="archive.zip", ) yield "HEAD", p_info def download_package( self, p_info: DepositPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client """ return [self.client.archive_get(self.deposit_id, tmpdir, p_info.filename)] def build_revision( self, p_info: DepositPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: message = ( f"{p_info.client}: Deposit {p_info.id} in collection {p_info.collection}" ).encode("utf-8") return Revision( type=RevisionType.TAR, message=message, author=p_info.author, date=TimestampWithTimezone.from_dict(p_info.author_date), committer=p_info.committer, committer_date=TimestampWithTimezone.from_dict(p_info.commit_date), parents=p_info.revision_parents, directory=directory, synthetic=True, metadata={ "extrinsic": { "provider": self.client.metadata_url(self.deposit_id), "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: - origin_metadata = self.metadata["origin_metadata"] + origin_metadata = self.metadata()["origin_metadata"] return [ RawExtrinsicMetadataCore( format="sword-v2-atom-codemeta-v2-in-json", metadata=json.dumps(origin_metadata["metadata"]).encode(), ) ] + @cached_method + def metadata(self): + """Returns metadata from the deposit server""" + return self.client.metadata_get(self.deposit_id) + def load(self) -> Dict: # First making sure the deposit is known prior to trigger a loading try: - self.metadata = self.client.metadata_get(self.deposit_id) + self.metadata() except ValueError: logger.error(f"Unknown deposit {self.deposit_id}, ignoring") return {"status": "failed"} # Then usual loading r = super().load() success = r["status"] != "failed" # Update deposit status try: if not success: self.client.status_update(self.deposit_id, status="failed") return r snapshot_id = hash_to_bytes(r["snapshot_id"]) branches = self.storage.snapshot_get(snapshot_id)["branches"] logger.debug("branches: %s", branches) if not branches: return r rev_id = branches[b"HEAD"]["target"] revisions = self.storage.revision_get([rev_id]) # FIXME: inconsistency between tests and production code if isinstance(revisions, types.GeneratorType): revisions = list(revisions) revision = revisions[0] # Retrieve the revision identifier dir_id = revision["directory"] # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_id, status="done", revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(dir_id), snapshot_id=r["snapshot_id"], origin_url=self.url, ) except Exception: logger.exception("Problem when trying to update the deposit's status") return {"status": "failed"} return r def parse_author(author) -> Person: """See prior fixme """ return Person( fullname=author["fullname"].encode("utf-8"), name=author["name"].encode("utf-8"), email=author["email"].encode("utf-8"), ) class ApiClient: """Private Deposit Api client """ def __init__(self, url, auth: Optional[Mapping[str, str]]): self.base_url = url.rstrip("/") self.auth = None if not auth else (auth["username"], auth["password"]) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: kwargs["auth"] = self.auth return method_fn(url, *args, **kwargs) def archive_get( self, deposit_id: Union[int, str], tmpdir: str, filename: str ) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally """ url = f"{self.base_url}/{deposit_id}/raw/" return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: return f"{self.base_url}/{deposit_id}/meta/" def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json """ url = self.metadata_url(deposit_id) r = self.do("get", url) if r.ok: return r.json() msg = f"Problem when retrieving deposit metadata at {url}" logger.error(msg) raise ValueError(msg) def status_update( self, deposit_id: Union[int, str], status: str, revision_id: Optional[str] = None, directory_id: Optional[str] = None, snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, ): """Update deposit's information including status, and persistent identifiers result of the loading. """ url = f"{self.base_url}/{deposit_id}/update/" payload = {"status": status} if revision_id: payload["revision_id"] = revision_id if directory_id: payload["directory_id"] = directory_id if snapshot_id: payload["snapshot_id"] = snapshot_id if origin_url: payload["origin_url"] = origin_url self.do("put", url, json=payload) diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py index f38e34a..6861061 100644 --- a/swh/loader/package/npm/loader.py +++ b/swh/loader/package/npm/loader.py @@ -1,342 +1,343 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from codecs import BOM_UTF8 from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union import attr import chardet from urllib.parse import quote from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, Person, RevisionType, Revision, TimestampWithTimezone, Sha1Git, ) from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) -from swh.loader.package.utils import api_info, release_name +from swh.loader.package.utils import api_info, cached_method, release_name logger = logging.getLogger(__name__) EMPTY_PERSON = Person(fullname=b"", name=None, email=None) @attr.s class NpmPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) date = attr.ib(type=Optional[str]) shasum = attr.ib(type=str) """sha1 checksum""" version = attr.ib(type=str) @classmethod def from_metadata( cls, project_metadata: Dict[str, Any], version: str ) -> "NpmPackageInfo": package_metadata = project_metadata["versions"][version] url = package_metadata["dist"]["tarball"] # No date available in intrinsic metadata: retrieve it from the API # metadata, using the version number that the API claims this package # has. extrinsic_version = package_metadata["version"] if "time" in project_metadata: date = project_metadata["time"][extrinsic_version] elif "mtime" in package_metadata: date = package_metadata["mtime"] else: date = None return cls( url=url, filename=os.path.basename(url), date=date, shasum=package_metadata["dist"]["shasum"], version=extrinsic_version, raw_info=package_metadata, # FIXME: loses some of the project metadata ) class NpmLoader(PackageLoader[NpmPackageInfo]): """Load npm origin's artifact releases into swh archive. """ visit_type = "npm" def __init__(self, url: str): """Constructor Args str: origin url (e.g. https://www.npmjs.com/package/) """ super().__init__(url=url) package_name = url.split("https://www.npmjs.com/package/")[1] safe_name = quote(package_name, safe="") self.provider_url = f"https://replicate.npmjs.com/{safe_name}/" self._info: Dict[str, Any] = {} self._versions = None - @property - def info(self) -> Dict[str, Any]: + @cached_method + def _raw_info(self) -> bytes: + return api_info(self.provider_url) + + @cached_method + def info(self) -> Dict: """Return the project metadata information (fetched from npm registry) """ - if not self._info: - self._raw_info = api_info(self.provider_url) - self._info = json.loads(self._raw_info) - return self._info + return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: - return sorted(list(self.info["versions"].keys())) + return sorted(list(self.info()["versions"].keys())) def get_default_version(self) -> str: - return self.info["dist-tags"].get("latest", "") + return self.info()["dist-tags"].get("latest", "") def get_metadata_authority(self): return MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", metadata={}, ) def get_extrinsic_snapshot_metadata(self): return [ RawExtrinsicMetadataCore( - format="replicate-npm-package-json", metadata=self._raw_info, + format="replicate-npm-package-json", metadata=self._raw_info(), ), ] def get_package_info(self, version: str) -> Iterator[Tuple[str, NpmPackageInfo]]: p_info = NpmPackageInfo.from_metadata( - project_metadata=self.info, version=version + project_metadata=self.info(), version=version ) yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, p_info: NpmPackageInfo ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, p_info) def build_revision( self, p_info: NpmPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None author = extract_npm_package_author(i_metadata) message = i_metadata["version"].encode("ascii") if p_info.date is None: url = p_info.url artifact_name = os.path.basename(url) raise ValueError( "Origin %s: Cannot determine upload time for artifact %s." % (p_info.url, artifact_name) ) date = TimestampWithTimezone.from_iso8601(p_info.date) # FIXME: this is to remain bug-compatible with earlier versions: date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0)) r = Revision( type=RevisionType.TAR, message=message, author=author, date=date, committer=author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "package.json", "raw": i_metadata,}, "extrinsic": { "provider": self.provider_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) return r def artifact_to_revision_id( known_artifacts: Dict, p_info: NpmPackageInfo ) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats: - old format sample:: { 'package_source': { 'sha1': '05181c12cd8c22035dd31155656826b85745da37', } } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ shasum = p_info.shasum for rev_id, known_artifact in known_artifacts.items(): known_original_artifact = known_artifact.get("original_artifact") if not known_original_artifact: # previous loader-npm version kept original artifact elsewhere known_original_artifact = known_artifact.get("package_source") if not known_original_artifact: continue original_hash = known_original_artifact["sha1"] else: assert isinstance(known_original_artifact, list) original_hash = known_original_artifact[0]["checksums"]["sha1"] if shasum == original_hash: return rev_id return None def _author_str(author_data: Union[Dict, List, str]) -> str: """Parse author from package.json author fields """ if isinstance(author_data, dict): author_str = "" name = author_data.get("name") if name is not None: if isinstance(name, str): author_str += name elif isinstance(name, list): author_str += _author_str(name[0]) if len(name) > 0 else "" email = author_data.get("email") if email is not None: author_str += f" <{email}>" result = author_str elif isinstance(author_data, list): result = _author_str(author_data[0]) if len(author_data) > 0 else "" else: result = author_data return result def extract_npm_package_author(package_json: Dict[str, Any]) -> Person: """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json: Dict holding the content of parsed ``package.json`` file Returns: Person """ for author_key in ("author", "authors"): if author_key in package_json: author_data = package_json[author_key] if author_data is None: return EMPTY_PERSON author_str = _author_str(author_data) return Person.from_fullname(author_str.encode()) return EMPTY_PERSON def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom) :] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode("utf-8") except UnicodeDecodeError: encoding = chardet.detect(json_bytes)["encoding"] if encoding: json_str = json_bytes.decode(encoding, "replace") try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, "package.json") if not os.path.exists(package_json_path): return {} with open(package_json_path, "rb") as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py index 8d8a02a..8011f3c 100644 --- a/swh/loader/package/npm/tests/test_npm.py +++ b/swh/loader/package/npm/tests/test_npm.py @@ -1,705 +1,708 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import pytest from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.interface import PagedResult from swh.loader.package import __version__ from swh.loader.package.npm.loader import ( _author_str, NpmLoader, extract_npm_package_author, artifact_to_revision_id, ) from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import ( assert_last_visit_matches, check_snapshot, get_stats, ) @pytest.fixture def org_api_info(datadir) -> bytes: with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f: return f.read() def test_npm_author_str(): for author, expected_author in [ ("author", "author"), ( ["Al from quantum leap", "hal from 2001 space odyssey"], "Al from quantum leap", ), ([], ""), ({"name": "groot", "email": "groot@galaxy.org",}, "groot "), ({"name": "somebody",}, "somebody"), ({"email": "no@one.org"}, " "), # note first elt is an extra blank ({"name": "no one", "email": None,}, "no one"), ({"email": None,}, ""), ({"name": None}, ""), ({"name": None, "email": None,}, ""), ({}, ""), (None, None), ({"name": []}, "",), ( {"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]}, "Susan McSween", ), (None, None), ]: assert _author_str(author) == expected_author def test_extract_npm_package_author(datadir): package_metadata_filepath = os.path.join( datadir, "https_replicate.npmjs.com", "org_visit1" ) with open(package_metadata_filepath) as json_file: package_metadata = json.load(json_file) extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person( fullname=b"mooz ", name=b"mooz", email=b"stillpedant@gmail.com", ) assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person( fullname=b"Masafumi Oyamada ", name=b"Masafumi Oyamada", email=b"stillpedant@gmail.com", ) package_json = json.loads( """ { "name": "highlightjs-line-numbers.js", "version": "2.7.0", "description": "Highlight.js line numbers plugin.", "main": "src/highlightjs-line-numbers.js", "dependencies": {}, "devDependencies": { "gulp": "^4.0.0", "gulp-rename": "^1.4.0", "gulp-replace": "^0.6.1", "gulp-uglify": "^1.2.0" }, "repository": { "type": "git", "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git" }, "author": "Yauheni Pakala ", "license": "MIT", "bugs": { "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues" }, "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Yauheni Pakala ", name=b"Yauheni Pakala", email=b"evgeniy.pakalo@gmail.com", ) package_json = json.loads( """ { "name": "3-way-diff", "version": "0.0.1", "description": "3-way diffing of JavaScript objects", "main": "index.js", "authors": [ { "name": "Shawn Walsh", "url": "https://github.com/shawnpwalsh" }, { "name": "Markham F Rollins IV", "url": "https://github.com/mrollinsiv" } ], "keywords": [ "3-way diff", "3 way diff", "three-way diff", "three way diff" ], "devDependencies": { "babel-core": "^6.20.0", "babel-preset-es2015": "^6.18.0", "mocha": "^3.0.2" }, "dependencies": { "lodash": "^4.15.0" } }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None ) package_json = json.loads( """ { "name": "yfe-ynpm", "version": "1.0.0", "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm", "repository": { "type": "git", "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git" }, "author": [ "fengmk2 (https://fengmk2.com)", "xufuzi (https://7993.org)" ], "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"fengmk2 (https://fengmk2.com)", name=b"fengmk2", email=b"fengmk2@gmail.com", ) package_json = json.loads( """ { "name": "umi-plugin-whale", "version": "0.0.8", "description": "Internal contract component", "authors": { "name": "xiaohuoni", "email": "448627663@qq.com" }, "repository": "alitajs/whale", "devDependencies": { "np": "^3.0.4", "umi-tools": "*" }, "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"xiaohuoni <448627663@qq.com>", name=b"xiaohuoni", email=b"448627663@qq.com", ) package_json_no_authors = json.loads( """{ "authors": null, "license": "MIT" }""" ) assert extract_npm_package_author(package_json_no_authors) == Person( fullname=b"", name=None, email=None ) def normalize_hashes(hashes): if isinstance(hashes, str): return hash_to_bytes(hashes) if isinstance(hashes, list): return [hash_to_bytes(x) for x in hashes] return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()} _expected_new_contents_first_visit = normalize_hashes( [ "4ce3058e16ab3d7e077f65aabf855c34895bf17c", "858c3ceee84c8311adc808f8cdb30d233ddc9d18", "0fa33b4f5a4e0496da6843a38ff1af8b61541996", "85a410f8ef8eb8920f2c384a9555566ad4a2e21b", "9163ac8025923d5a45aaac482262893955c9b37b", "692cf623b8dd2c5df2c2998fd95ae4ec99882fb4", "18c03aac6d3e910efb20039c15d70ab5e0297101", "41265c42446aac17ca769e67d1704f99e5a1394d", "783ff33f5882813dca9239452c4a7cadd4dba778", "b029cfb85107aee4590c2434a3329bfcf36f8fa1", "112d1900b4c2e3e9351050d1b542c9744f9793f3", "5439bbc4bd9a996f1a38244e6892b71850bc98fd", "d83097a2f994b503185adf4e719d154123150159", "d0939b4898e83090ee55fd9d8a60e312cfadfbaf", "b3523a26f7147e4af40d9d462adaae6d49eda13e", "cd065fb435d6fb204a8871bcd623d0d0e673088c", "2854a40855ad839a54f4b08f5cff0cf52fca4399", "b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe", "0f73d56e1cf480bded8a1ecf20ec6fc53c574713", "0d9882b2dfafdce31f4e77fe307d41a44a74cefe", "585fc5caab9ead178a327d3660d35851db713df1", "e8cd41a48d79101977e3036a87aeb1aac730686f", "5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7", "9c3cc2763bf9e9e37067d3607302c4776502df98", "3649a68410e354c83cd4a38b66bd314de4c8f5c9", "e96ed0c091de1ebdf587104eaf63400d1974a1fe", "078ca03d2f99e4e6eab16f7b75fbb7afb699c86c", "38de737da99514de6559ff163c988198bc91367a", ] ) _expected_new_directories_first_visit = normalize_hashes( [ "3370d20d6f96dc1c9e50f083e2134881db110f4f", "42753c0c2ab00c4501b552ac4671c68f3cf5aece", "d7895533ef5edbcffdea3f057d9fef3a1ef845ce", "80579be563e2ef3e385226fe7a3f079b377f142c", "3b0ddc6a9e58b4b53c222da4e27b280b6cda591c", "bcad03ce58ac136f26f000990fc9064e559fe1c0", "5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca", "e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd", "584b5b4b6cf7f038095e820b99386a9c232de931", "184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a", "bb5f4ee143c970367eb409f2e4c1104898048b9d", "1b95491047add1103db0dfdfa84a9735dcb11e88", "a00c6de13471a2d66e64aca140ddb21ef5521e62", "5ce6c1cd5cda2d546db513aaad8c72a44c7771e2", "c337091e349b6ac10d38a49cdf8c2401ef9bb0f2", "202fafcd7c0f8230e89d5496ad7f44ab12b807bf", "775cc516543be86c15c1dc172f49c0d4e6e78235", "ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e", ] ) _expected_new_revisions_first_visit = normalize_hashes( { "d8a1c7474d2956ac598a19f0f27d52f7015f117e": ( "42753c0c2ab00c4501b552ac4671c68f3cf5aece" ), "5f9eb78af37ffd12949f235e86fac04898f9f72a": ( "3370d20d6f96dc1c9e50f083e2134881db110f4f" ), "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": ( "d7895533ef5edbcffdea3f057d9fef3a1ef845ce" ), } ) def package_url(package): return "https://www.npmjs.com/package/%s" % package def package_metadata_url(package): return "https://replicate.npmjs.com/%s/" % package def test_revision_metadata_structure(swh_config, requests_mock_datadir): package = "org" loader = NpmLoader(package_url(package)) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None check_metadata_paths( revision["metadata"], paths=[ ("intrinsic.tool", str), ("intrinsic.raw", dict), ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) for original_artifact in revision["metadata"]["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) def test_npm_loader_first_visit(swh_config, requests_mock_datadir, org_api_info): package = "org" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) stats = get_stats(loader.storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "person": 2, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats assert len( list(loader.storage.content_get(_expected_new_contents_first_visit)) ) == len(_expected_new_contents_first_visit) assert ( list(loader.storage.directory_missing(_expected_new_directories_first_visit)) == [] ) assert ( list(loader.storage.revision_missing(_expected_new_revisions_first_visit)) == [] ) expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"releases/0.0.4", target_type=TargetType.ALIAS ), b"releases/0.0.2": SnapshotBranch( target=hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e"), target_type=TargetType.REVISION, ), b"releases/0.0.3": SnapshotBranch( target=hash_to_bytes("5f9eb78af37ffd12949f235e86fac04898f9f72a"), target_type=TargetType.REVISION, ), b"releases/0.0.4": SnapshotBranch( target=hash_to_bytes("ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, loader.storage) snapshot_swhid = SWHID( object_type="snapshot", object_id=hash_to_hex(expected_snapshot_id) ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", ) expected_metadata = [ RawExtrinsicMetadata( type=MetadataTargetType.SNAPSHOT, id=snapshot_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.npm.loader.NpmLoader", version=__version__, ), discovery_date=loader.visit_date, format="replicate-npm-package-json", metadata=org_api_info, origin="https://www.npmjs.com/package/org", ) ] assert loader.storage.raw_extrinsic_metadata_get( type=MetadataTargetType.SNAPSHOT, id=snapshot_swhid, authority=metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_npm_loader_incremental_visit(swh_config, requests_mock_datadir_visits): package = "org" url = package_url(package) loader = NpmLoader(url) expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) stats = get_stats(loader.storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "person": 2, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats - loader._info = None # reset loader internal state + # reset loader internal state + del loader._cached_info + del loader._cached__raw_info + actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" snap_id2 = actual_load_status2["snapshot_id"] assert snap_id2 is not None assert snap_id2 != actual_load_status["snapshot_id"] assert_last_visit_matches(loader.storage, url, status="full", type="npm") stats = get_stats(loader.storage) assert { # 3 new releases artifacts "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 15, "origin": 1, "origin_visit": 2, "person": 2, "release": 0, "revision": len(_expected_new_revisions_first_visit) + 3, "skipped_content": 0, "snapshot": 2, } == stats urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://registry.npmjs.org") ] assert len(urls) == len(set(urls)) # we visited each artifact once across @pytest.mark.usefixtures("requests_mock_datadir") def test_npm_loader_version_divergence(swh_config): package = "@aller_shared" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) stats = get_stats(loader.storage) assert { # 1 new releases artifacts "content": 534, "directory": 153, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0" ), b"releases/0.1.0": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"), ), b"releases/0.1.1-alpha.14": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"), ), }, ) check_snapshot(expected_snapshot, loader.storage) def test_npm_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": {}, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_npm_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "package_source": {"sha1": "something-wrong"} }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "package_source": {"sha1": "05181c12cd8c22035dd31155656826b85745da37",} }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "845673bfe8cbd31b1eaf757745a964137e6f9116" ) def test_npm_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": [ {"checksums": {"sha1": "05181c12cd8c22035dd31155656826b85745da37"},} ], }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": [{"checksums": {"sha1": "something-wrong"},}], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" ) def test_npm_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ package = "nativescript-telerik-analytics" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot.id.hex(), } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) def test_npm_artifact_with_no_upload_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-no-time" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "uneventful", "snapshot_id": expected_snapshot.id.hex(), } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="partial", type="npm", snapshot=expected_snapshot.id ) def test_npm_artifact_use_mtime_if_no_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-express" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # artifact is used expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.0.1" ), b"releases/0.0.1": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"), ), }, ) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) def test_npm_no_artifact(swh_config, requests_mock_datadir): """If no artifacts at all is found for origin, the visit fails completely """ package = "catify" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", } assert_last_visit_matches(loader.storage, url, status="partial", type="npm") diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index a7b6066..7389090 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,287 +1,292 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import json import os from typing import Any, Dict, Iterator, Optional, Sequence, Tuple from urllib.parse import urlparse import attr from pkginfo import UnpackedSDist from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, Person, Sha1Git, TimestampWithTimezone, Revision, RevisionType, ) from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) -from swh.loader.package.utils import api_info, release_name, EMPTY_AUTHOR +from swh.loader.package.utils import ( + api_info, + cached_method, + release_name, + EMPTY_AUTHOR, +) logger = logging.getLogger(__name__) @attr.s class PyPIPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) comment_text = attr.ib(type=Optional[str]) sha256 = attr.ib(type=str) upload_time = attr.ib(type=str) @classmethod def from_metadata(cls, metadata: Dict[str, Any]) -> "PyPIPackageInfo": return cls( url=metadata["url"], filename=metadata["filename"], raw_info=metadata, comment_text=metadata.get("comment_text"), sha256=metadata["digests"]["sha256"], upload_time=metadata["upload_time"], ) class PyPILoader(PackageLoader[PyPIPackageInfo]): """Load pypi origin's artifact releases into swh archive. """ visit_type = "pypi" def __init__(self, url): super().__init__(url=url) - self._info = None self.provider_url = pypi_api_url(self.url) - @property + @cached_method + def _raw_info(self) -> bytes: + return api_info(self.provider_url) + + @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ - if not self._info: - self._raw_info = api_info(self.provider_url) - self._info = json.loads(self._raw_info) - return self._info + return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: - return self.info["releases"].keys() + return self.info()["releases"].keys() def get_default_version(self) -> str: - return self.info["info"]["version"] + return self.info()["info"]["version"] def get_metadata_authority(self): p_url = urlparse(self.url) return MetadataAuthority( type=MetadataAuthorityType.FORGE, url=f"{p_url.scheme}://{p_url.netloc}/", metadata={}, ) def get_extrinsic_snapshot_metadata(self): return [ RawExtrinsicMetadataCore( - format="pypi-project-json", metadata=self._raw_info, + format="pypi-project-json", metadata=self._raw_info(), ), ] def get_package_info(self, version: str) -> Iterator[Tuple[str, PyPIPackageInfo]]: res = [] - for meta in self.info["releases"][version]: + for meta in self.info()["releases"][version]: if meta["packagetype"] != "sdist": continue p_info = PyPIPackageInfo.from_metadata(meta) res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info.filename), p_info def resolve_revision_from( self, known_artifacts: Dict, p_info: PyPIPackageInfo ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, p_info) def build_revision( self, p_info: PyPIPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None # from intrinsic metadata name = i_metadata["version"] _author = author(i_metadata) # from extrinsic metadata message = p_info.comment_text or "" message = "%s: %s" % (name, message) if message else name date = TimestampWithTimezone.from_iso8601(p_info.upload_time) return Revision( type=RevisionType.TAR, message=message.encode("utf-8"), author=_author, date=date, committer=_author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "PKG-INFO", "raw": i_metadata,}, "extrinsic": { "provider": self.provider_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def artifact_to_revision_id( known_artifacts: Dict, p_info: PyPIPackageInfo ) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats (column metadata in 'revision') - old format sample:: { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, ... } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ sha256 = p_info.sha256 for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact["original_artifact"] if isinstance(original_artifact, dict): # previous loader-pypi version stored metadata as dict original_sha256 = original_artifact["sha256"] if sha256 == original_sha256: return rev_id continue # new pypi loader actually store metadata dict differently... assert isinstance(original_artifact, list) # current loader-pypi stores metadata as list of dict for original_artifact in known_artifact["original_artifact"]: if sha256 == original_artifact["checksums"]["sha256"]: return rev_id return None def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.rstrip("/").split("/")[-1] url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO") if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop("filename") # this gets added with the ondisk location return raw def author(data: Dict) -> Person: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get("author") email = data.get("author_email") fullname = None # type: Optional[str] if email: fullname = "%s <%s>" % (name, email) else: fullname = name if not fullname: return EMPTY_AUTHOR if name is not None: name = name.encode("utf-8") if email is not None: email = email.encode("utf-8") return Person(fullname=fullname.encode("utf-8"), name=name, email=email) diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index c070aab..71f0363 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,885 +1,886 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from os import path import pytest from unittest.mock import patch from swh.core.tarball import uncompress from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.interface import PagedResult from swh.loader.package import __version__ from swh.loader.package.pypi.loader import ( PyPILoader, pypi_api_url, author, extract_intrinsic_metadata, artifact_to_revision_id, ) from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import ( assert_last_visit_matches, check_snapshot, get_stats, ) @pytest.fixture def _0805nexter_api_info(datadir) -> bytes: with open( os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb", ) as f: return f.read() def test_author_basic(): data = { "author": "i-am-groot", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b"i-am-groot ", name=b"i-am-groot", email=b"iam@groot.org", ) assert actual_author == expected_author def test_author_empty_email(): data = { "author": "i-am-groot", "author_email": "", } actual_author = author(data) expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",) assert actual_author == expected_author def test_author_empty_name(): data = { "author": "", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b" ", name=b"", email=b"iam@groot.org", ) assert actual_author == expected_author def test_author_malformed(): data = { "author": "['pierre', 'paul', 'jacques']", "author_email": None, } actual_author = author(data) expected_author = Person( fullname=b"['pierre', 'paul', 'jacques']", name=b"['pierre', 'paul', 'jacques']", email=None, ) assert actual_author == expected_author def test_author_malformed_2(): data = { "author": "[marie, jeanne]", "author_email": "[marie@some, jeanne@thing]", } actual_author = author(data) expected_author = Person( fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>", name=b"[marie, jeanne]", email=b"[marie@some, jeanne@thing]", ) assert actual_author == expected_author def test_author_malformed_3(): data = { "author": "[marie, jeanne, pierre]", "author_email": "[marie@somewhere.org, jeanne@somewhere.org]", } actual_author = author(data) expected_author = Person( fullname=( b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>" ), name=b"[marie, jeanne, pierre]", email=b"[marie@somewhere.org, jeanne@somewhere.org]", ) actual_author == expected_author # configuration error # def test_badly_configured_loader_raise(monkeypatch): """Badly configured loader should raise""" monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises(ValueError) as e: PyPILoader(url="some-url") assert "Misconfiguration" in e.value.args[0] def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests") assert url == "https://pypi.org/pypi/requests/json" def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests/") assert url == "https://pypi.org/pypi/requests/json" @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip" ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "metadata_version": "1.0", "name": "0805nexter", "version": "1.1.0", "summary": "a simple printer of nested lest", "home_page": "http://www.hp.com", "author": "hgtkpython", "author_email": "2868989685@qq.com", "platforms": ["UNKNOWN"], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa ] ) def test_no_release_artifact(swh_config, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "person": 0, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches(loader.storage, url, status="partial", type="pypi") # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_release_with_traceback(swh_config, requests_mock_datadir): url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader.last_snapshot", side_effect=ValueError("Fake problem to fail the visit"), ): loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "person": 0, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats assert_last_visit_matches(loader.storage, url, status="partial", type="pypi") # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa ] ) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} def test_revision_metadata_structure(swh_config, requests_mock_datadir): url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None check_metadata_paths( revision["metadata"], paths=[ ("intrinsic.tool", str), ("intrinsic.raw", dict), ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) for original_artifact in revision["metadata"]["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) def test_snapshot_metadata(swh_config, requests_mock_datadir, _0805nexter_api_info): url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None snapshot_swhid = SWHID( object_type="snapshot", object_id=hash_to_hex(actual_load_status["snapshot_id"]) ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://pypi.org/", ) expected_metadata = [ RawExtrinsicMetadata( type=MetadataTargetType.SNAPSHOT, id=snapshot_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.pypi.loader.PyPILoader", version=__version__, ), discovery_date=loader.visit_date, format="pypi-project-json", metadata=_0805nexter_api_info, origin=url, ) ] assert loader.storage.raw_extrinsic_metadata_get( type=MetadataTargetType.SNAPSHOT, id=snapshot_swhid, authority=metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_visit_with_missing_artifact(swh_config, requests_mock_datadir_missing_one): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } stats = get_stats(loader.storage) assert { "content": 3, "directory": 2, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats expected_contents = map( hash_to_bytes, [ "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", ], ) assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", ], ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, storage=loader.storage) assert_last_visit_matches( loader.storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id, ) def test_visit_with_1_release_artifact(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } stats = get_stats(loader.storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_contents = map( hash_to_bytes, [ "a61e24cdfdab3bb7817f6be85d37a3e666b34566", "938c33483285fd8ad57f15497f538320df82aeb8", "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", ], ) assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "05219ba38bc542d4345d5638af1ed56c7d43ca7d", "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", ], ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( "05219ba38bc542d4345d5638af1ed56c7d43ca7d" ), # noqa hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) def test_multiple_visits_with_no_change(swh_config, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": snapshot_id.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=snapshot_id ) stats = get_stats(loader.storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, loader.storage) actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": actual_load_status2["snapshot_id"], } visit_status2 = assert_last_visit_matches( loader.storage, url, status="full", type="pypi" ) stats2 = get_stats(loader.storage) expected_stats2 = stats.copy() expected_stats2["origin_visit"] = 1 + 1 assert expected_stats2 == stats2 # same snapshot assert visit_status2.snapshot == snapshot_id def test_incremental_visit(swh_config, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(loader.storage) expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert visit1_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == visit1_stats # Reset internal state - loader._info = None + del loader._cached__raw_info + del loader._cached_info visit2_actual_load_status = loader.load() visit2_stats = get_stats(loader.storage) - assert visit2_actual_load_status["status"] == "eventful" + assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283") assert visit2_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id2.hex(), } assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2 ) assert { "content": 6 + 1, # 1 more content "directory": 4 + 2, # 2 more directories "origin": 1, "origin_visit": 1 + 1, "person": 1, "release": 0, "revision": 2 + 1, # 1 more revision "skipped_content": 0, "snapshot": 1 + 1, # 1 more snapshot } == visit2_stats expected_contents = map( hash_to_bytes, [ "a61e24cdfdab3bb7817f6be85d37a3e666b34566", "938c33483285fd8ad57f15497f538320df82aeb8", "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", "92689fa2b7fb4d4fc6fb195bf73a50c87c030639", ], ) assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "05219ba38bc542d4345d5638af1ed56c7d43ca7d", "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a", "52604d46843b898f5a43208045d09fcf8731631b", ], ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( "05219ba38bc542d4345d5638af1ed56c7d43ca7d" ), # noqa hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes( "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a" ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=expected_snapshot_id2, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"releases/1.3.0": SnapshotBranch( target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.3.0", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://files.pythonhosted.org") ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different def test_visit_1_release_with_2_artifacts(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/nexter" loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch( target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) def test_pypi_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" assert artifact_to_revision_id({}, artifact_metadata) is None known_artifacts = { "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": { "original_artifact": {"sha256": "something-irrelevant",}, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_pypi_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": {"sha256": "something-wrong",}, }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": { "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa }, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "845673bfe8cbd31b1eaf757745a964137e6f9116" ) def test_pypi_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": [ { "checksums": { "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa }, } ], }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": [{"checksums": {"sha256": "something-wrong"},}], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" ) def test_pypi_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ url = "https://pypi.org/project/upymenu" loader = PyPILoader(url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot(id=expected_snapshot_id, branches={}) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index e83cf64..b6189a1 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,123 +1,144 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy +import functools import logging import os import requests -from typing import Dict, Optional, Tuple +from typing import Callable, Dict, Optional, Tuple, TypeVar from swh.model.hashutil import MultiHash, HASH_BLOCK_SIZE from swh.model.model import Person from swh.loader.package import DEFAULT_PARAMS logger = logging.getLogger(__name__) DOWNLOAD_HASHES = set(["sha1", "sha256", "length"]) EMPTY_AUTHOR = Person(fullname=b"", name=None, email=None,) def api_info(url: str, **extra_params) -> bytes: """Basic api client to retrieve information on project. This deals with fetching json metadata about pypi projects. Args: url (str): The api url (e.g PyPI, npm, etc...) Raises: ValueError in case of query failures (for some reasons: 404, ...) Returns: The associated response's information """ response = requests.get(url, **{**DEFAULT_PARAMS, **extra_params}) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % (url, response.status_code)) return response.content def download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, ) -> Tuple[str, Dict]: """Download a remote tarball from url, uncompresses and computes swh hashes on it. Args: url: Artifact uri to fetch, uncompress and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact to download (those hashes are expected to be hex string) auth: Optional tuple of login/password (for http authentication service, e.g. deposit) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ params = copy.deepcopy(DEFAULT_PARAMS) if auth is not None: params["auth"] = auth # so the connection does not hang indefinitely (read/connection timeout) timeout = params.get("timeout", 60) response = requests.get(url, **params, timeout=timeout, stream=True) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % (url, response.status_code)) filename = filename if filename else os.path.basename(url) logger.debug("filename: %s", filename) filepath = os.path.join(dest, filename) logger.debug("filepath: %s", filepath) h = MultiHash(hash_names=DOWNLOAD_HASHES) with open(filepath, "wb") as f: for chunk in response.iter_content(chunk_size=HASH_BLOCK_SIZE): h.update(chunk) f.write(chunk) # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( "Failure when fetching %s. " "Checksum mismatched: %s != %s" % (url, expected_digest, actual_digest) ) computed_hashes = h.hexdigest() length = computed_hashes.pop("length") extrinsic_metadata = { "length": length, "filename": filename, "checksums": computed_hashes, } logger.debug("extrinsic_metadata", extrinsic_metadata) return filepath, extrinsic_metadata def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version + + +TReturn = TypeVar("TReturn") +TSelf = TypeVar("TSelf") + +_UNDEFINED = object() + + +def cached_method(f: Callable[[TSelf], TReturn]) -> Callable[[TSelf], TReturn]: + cache_name = f"_cached_{f.__name__}" + + @functools.wraps(f) + def newf(self): + value = getattr(self, cache_name, _UNDEFINED) + if value is _UNDEFINED: + value = f(self) + setattr(self, cache_name, value) + return value + + return newf