diff --git a/swh/__init__.py b/swh/__init__.py index 8d9f151..b36383a 100644 --- a/swh/__init__.py +++ b/swh/__init__.py @@ -1,4 +1,3 @@ from pkgutil import extend_path -from typing import List -__path__: List[str] = extend_path(__path__, __name__) +__path__ = extend_path(__path__, __name__) diff --git a/swh/indexer/origin_head.py b/swh/indexer/origin_head.py index 61c495a..fec42c8 100644 --- a/swh/indexer/origin_head.py +++ b/swh/indexer/origin_head.py @@ -1,154 +1,154 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import re from typing import Any, Dict, List, Tuple, Union import click from swh.indexer.indexer import OriginIndexer from swh.model.model import SnapshotBranch, TargetType from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_all_branches class OriginHeadIndexer(OriginIndexer[Dict]): """Origin-level indexer. This indexer is in charge of looking up the revision that acts as the "head" of an origin. In git, this is usually the commit pointed to by the 'master' branch.""" USE_TOOLS = False def persist_index_computations(self, results: Any) -> Dict[str, int]: """Do nothing. The indexer's results are not persistent, they should only be piped to another indexer.""" return {} # Dispatch def index(self, id: str, data: None = None, **kwargs) -> List[Dict]: origin_url = id visit_status = origin_get_latest_visit_status( self.storage, origin_url, allowed_statuses=["full"], require_snapshot=True ) if not visit_status: return [] assert visit_status.snapshot is not None snapshot = snapshot_get_all_branches(self.storage, visit_status.snapshot) if snapshot is None: return [] method = getattr( self, "_try_get_%s_head" % visit_status.type, self._try_get_head_generic ) - rev_id = method(snapshot.branches) + rev_id = method(snapshot.branches) # type: ignore if rev_id is not None: return [{"origin_url": origin_url, "revision_id": rev_id,}] # could not find a head revision return [] # Tarballs _archive_filename_re = re.compile( rb"^" rb"(?P.*)[-_]" rb"(?P[0-9]+(\.[0-9])*)" rb"(?P[-+][a-zA-Z0-9.~]+?)?" rb"(?P(\.[a-zA-Z0-9]+)+)" rb"$" ) @classmethod def _parse_version(cls: Any, filename: bytes) -> Tuple[Union[float, int], ...]: """Extracts the release version from an archive filename, to get an ordering whose maximum is likely to be the last version of the software >>> OriginHeadIndexer._parse_version(b'foo') (-inf,) >>> OriginHeadIndexer._parse_version(b'foo.tar.gz') (-inf,) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1.tar.gz') (0, 0, 1, 0) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1-beta2.tar.gz') (0, 0, 1, -1, 'beta2') >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1+foobar.tar.gz') (0, 0, 1, 1, 'foobar') """ res = cls._archive_filename_re.match(filename) if res is None: return (float("-infinity"),) version = [int(n) for n in res.group("version").decode().split(".")] if res.group("preversion") is None: version.append(0) else: preversion = res.group("preversion").decode() if preversion.startswith("-"): version.append(-1) version.append(preversion[1:]) elif preversion.startswith("+"): version.append(1) version.append(preversion[1:]) else: assert False, res.group("preversion") return tuple(version) def _try_get_ftp_head(self, branches: Dict[bytes, SnapshotBranch]) -> Any: archive_names = list(branches) max_archive_name = max(archive_names, key=self._parse_version) r = self._try_resolve_target(branches, max_archive_name) return r # Generic def _try_get_head_generic(self, branches: Dict[bytes, SnapshotBranch]) -> Any: # Works on 'deposit', 'pypi', and VCSs. return self._try_resolve_target(branches, b"HEAD") or self._try_resolve_target( branches, b"master" ) def _try_resolve_target( self, branches: Dict[bytes, SnapshotBranch], branch_name: bytes ) -> Any: try: branch = branches[branch_name] if branch is None: return None while branch.target_type == TargetType.ALIAS: branch = branches[branch.target] if branch is None: return None if branch.target_type == TargetType.REVISION: return branch.target elif branch.target_type == TargetType.CONTENT: return None # TODO elif branch.target_type == TargetType.DIRECTORY: return None # TODO elif branch.target_type == TargetType.RELEASE: return None # TODO else: assert False, branch except KeyError: return None @click.command() @click.option( "--origins", "-i", help='Origins to lookup, in the "type+url" format', multiple=True ) def main(origins: List[str]) -> None: rev_metadata_indexer = OriginHeadIndexer() rev_metadata_indexer.run(origins) if __name__ == "__main__": logging.basicConfig(level=logging.INFO) main() diff --git a/swh/indexer/storage/model.py b/swh/indexer/storage/model.py index 3eace16..ad3cbed 100644 --- a/swh/indexer/storage/model.py +++ b/swh/indexer/storage/model.py @@ -1,138 +1,138 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Classes used internally by the in-memory idx-storage, and will be used for the interface of the idx-storage in the near future.""" from __future__ import annotations from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar import attr from typing_extensions import Final from swh.model.model import Sha1Git, dictify TSelf = TypeVar("TSelf") @attr.s class BaseRow: UNIQUE_KEY_FIELDS: Tuple = ("id", "indexer_configuration_id") id = attr.ib(type=Any) indexer_configuration_id = attr.ib(type=Optional[int], default=None, kw_only=True) tool = attr.ib(type=Optional[Dict], default=None, kw_only=True) def __attrs_post_init__(self): if self.indexer_configuration_id is None and self.tool is None: raise TypeError("Either indexer_configuration_id or tool must be not None.") if self.indexer_configuration_id is not None and self.tool is not None: raise TypeError( "indexer_configuration_id and tool are mutually exclusive; " "only one may be not None." ) def anonymize(self: TSelf) -> Optional[TSelf]: # Needed to implement swh.journal.writer.ValueProtocol return None def to_dict(self) -> Dict[str, Any]: """Wrapper of `attr.asdict` that can be overridden by subclasses that have special handling of some of the fields.""" d = dictify(attr.asdict(self, recurse=False)) if d["indexer_configuration_id"] is None: del d["indexer_configuration_id"] if d["tool"] is None: del d["tool"] return d @classmethod def from_dict(cls: Type[TSelf], d) -> TSelf: - return cls(**d) # type: ignore + return cls(**d) def unique_key(self) -> Dict: obj = self # tool["id"] and obj.indexer_configuration_id are the same value, but # only one of them is set for any given object if obj.indexer_configuration_id is None: assert obj.tool # constructors ensures tool XOR indexer_configuration_id obj = attr.evolve(obj, indexer_configuration_id=obj.tool["id"], tool=None) return {key: getattr(obj, key) for key in self.UNIQUE_KEY_FIELDS} @attr.s class ContentMimetypeRow(BaseRow): object_type: Final = "content_mimetype" id = attr.ib(type=Sha1Git) mimetype = attr.ib(type=str) encoding = attr.ib(type=str) @attr.s class ContentLanguageRow(BaseRow): object_type: Final = "content_language" id = attr.ib(type=Sha1Git) lang = attr.ib(type=str) @attr.s class ContentCtagsRow(BaseRow): object_type: Final = "content_ctags" UNIQUE_KEY_FIELDS = ( "id", "indexer_configuration_id", "name", "kind", "line", "lang", ) id = attr.ib(type=Sha1Git) name = attr.ib(type=str) kind = attr.ib(type=str) line = attr.ib(type=int) lang = attr.ib(type=str) @attr.s class ContentLicenseRow(BaseRow): object_type: Final = "content_fossology_license" UNIQUE_KEY_FIELDS = ("id", "indexer_configuration_id", "license") id = attr.ib(type=Sha1Git) license = attr.ib(type=str) @attr.s class ContentMetadataRow(BaseRow): object_type: Final = "content_metadata" id = attr.ib(type=Sha1Git) metadata = attr.ib(type=Dict[str, Any]) @attr.s class RevisionIntrinsicMetadataRow(BaseRow): object_type: Final = "revision_intrinsic_metadata" id = attr.ib(type=Sha1Git) metadata = attr.ib(type=Dict[str, Any]) mappings = attr.ib(type=List[str]) @attr.s class OriginIntrinsicMetadataRow(BaseRow): object_type: Final = "origin_intrinsic_metadata" id = attr.ib(type=str) metadata = attr.ib(type=Dict[str, Any]) from_revision = attr.ib(type=Sha1Git) mappings = attr.ib(type=List[str]) diff --git a/tox.ini b/tox.ini index 3afda02..0af6d83 100644 --- a/tox.ini +++ b/tox.ini @@ -1,78 +1,78 @@ [tox] envlist=black,flake8,mypy,py3 [testenv] extras = testing deps = pytest-cov swh-scheduler[testing] >= 0.5.0 swh-storage[testing] >= 0.10.0 dev: pdbpp commands = pytest --doctest-modules \ !slow: --hypothesis-profile=fast \ slow: --hypothesis-profile=slow \ {envsitepackagesdir}/swh/indexer \ --cov={envsitepackagesdir}/swh/indexer \ --cov-branch {posargs} [testenv:black] skip_install = true deps = black==19.10b0 commands = {envpython} -m black --check swh [testenv:flake8] skip_install = true deps = flake8 commands = {envpython} -m flake8 [testenv:mypy] extras = testing deps = - mypy + mypy==0.920 commands = mypy swh # build documentation outside swh-environment using the current # git HEAD of swh-docs, is executed on CI for each diff to prevent # breaking doc build [testenv:sphinx] whitelist_externals = make usedevelop = true extras = testing deps = # fetch and install swh-docs in develop mode -e git+https://forge.softwareheritage.org/source/swh-docs#egg=swh.docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx/src/swh-docs/swh/ -C docs # build documentation only inside swh-environment using local state # of swh-docs package [testenv:sphinx-dev] whitelist_externals = make usedevelop = true extras = testing deps = # install swh-docs in develop mode -e ../swh-docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx-dev/src/swh-docs/swh/ -C docs