diff --git a/swh/indexer/origin_head.py b/swh/indexer/origin_head.py index 53d4770..440fcf9 100644 --- a/swh/indexer/origin_head.py +++ b/swh/indexer/origin_head.py @@ -1,154 +1,159 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re import click import logging from swh.indexer.indexer import OriginIndexer class OriginHeadIndexer(OriginIndexer): """Origin-level indexer. This indexer is in charge of looking up the revision that acts as the "head" of an origin. In git, this is usually the commit pointed to by the 'master' branch.""" USE_TOOLS = False def persist_index_computations(self, results, policy_update): """Do nothing. The indexer's results are not persistent, they should only be piped to another indexer.""" pass # Dispatch def index(self, origin): origin_id = origin['id'] latest_snapshot = self.storage.snapshot_get_latest(origin_id) if latest_snapshot is None: return None method = getattr(self, '_try_get_%s_head' % origin['type'], None) if method is None: method = self._try_get_head_generic rev_id = method(latest_snapshot) if rev_id is None: return None result = { 'origin_id': origin_id, 'revision_id': rev_id, } return result # VCSs def _try_get_vcs_head(self, snapshot): try: branches = snapshot['branches'] if branches[b'HEAD']['target_type'] == 'revision': return branches[b'HEAD']['target'] except KeyError: return None _try_get_hg_head = _try_get_git_head = _try_get_vcs_head # Tarballs _archive_filename_re = re.compile( rb'^' rb'(?P.*)[-_]' rb'(?P[0-9]+(\.[0-9])*)' rb'(?P[-+][a-zA-Z0-9.~]+?)?' rb'(?P(\.[a-zA-Z0-9]+)+)' rb'$') @classmethod def _parse_version(cls, filename): """Extracts the release version from an archive filename, to get an ordering whose maximum is likely to be the last version of the software >>> OriginHeadIndexer._parse_version(b'foo') (-inf,) >>> OriginHeadIndexer._parse_version(b'foo.tar.gz') (-inf,) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1.tar.gz') (0, 0, 1, 0) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1-beta2.tar.gz') (0, 0, 1, -1, 'beta2') >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1+foobar.tar.gz') (0, 0, 1, 1, 'foobar') """ res = cls._archive_filename_re.match(filename) if res is None: return (float('-infinity'),) version = [int(n) for n in res.group('version').decode().split('.')] if res.group('preversion') is None: version.append(0) else: preversion = res.group('preversion').decode() if preversion.startswith('-'): version.append(-1) version.append(preversion[1:]) elif preversion.startswith('+'): version.append(1) version.append(preversion[1:]) else: assert False, res.group('preversion') return tuple(version) def _try_get_ftp_head(self, snapshot): archive_names = list(snapshot['branches']) max_archive_name = max(archive_names, key=self._parse_version) r = self._try_resolve_target(snapshot['branches'], max_archive_name) return r # Generic def _try_get_head_generic(self, snapshot): # Works on 'deposit', 'svn', and 'pypi'. try: branches = snapshot['branches'] except KeyError: return None else: return ( self._try_resolve_target(branches, b'HEAD') or self._try_resolve_target(branches, b'master') ) def _try_resolve_target(self, branches, target_name): try: target = branches[target_name] + if target is None: + return None while target['target_type'] == 'alias': target = branches[target['target']] + if target is None: + return None + if target['target_type'] == 'revision': return target['target'] elif target['target_type'] == 'content': return None # TODO elif target['target_type'] == 'directory': return None # TODO elif target['target_type'] == 'release': return None # TODO else: assert False except KeyError: return None @click.command() @click.option('--origins', '-i', help='Origins to lookup, in the "type+url" format', multiple=True) def main(origins): rev_metadata_indexer = OriginHeadIndexer() rev_metadata_indexer.run(origins) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) main() diff --git a/swh/indexer/tests/test_origin_head.py b/swh/indexer/tests/test_origin_head.py index 8067f46..8b90afa 100644 --- a/swh/indexer/tests/test_origin_head.py +++ b/swh/indexer/tests/test_origin_head.py @@ -1,125 +1,145 @@ # Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import unittest from swh.indexer.origin_head import OriginHeadIndexer from swh.indexer.tests.utils import ( BASE_TEST_CONFIG, fill_storage ) ORIGIN_HEAD_CONFIG = { **BASE_TEST_CONFIG, 'tools': { 'name': 'origin-metadata', 'version': '0.0.1', 'configuration': {}, }, 'tasks': { 'revision_metadata': None, 'origin_intrinsic_metadata': None, } } class OriginHeadTestIndexer(OriginHeadIndexer): """Specific indexer whose configuration is enough to satisfy the indexing tests. """ def parse_config_file(self, *args, **kwargs): return ORIGIN_HEAD_CONFIG def persist_index_computations(self, results, policy_update): self.results = results class OriginHead(unittest.TestCase): def setUp(self): self.indexer = OriginHeadTestIndexer() self.indexer.catch_exceptions = False fill_storage(self.indexer.storage) def _get_origin_id(self, type_, url): origin = self.indexer.storage.origin_get({ 'type': type_, 'url': url}) return origin['id'] def test_git(self): self.indexer.run( ['git+https://github.com/SoftwareHeritage/swh-storage']) origin_id = self._get_origin_id( 'git', 'https://github.com/SoftwareHeritage/swh-storage') self.assertEqual(self.indexer.results, [{ 'revision_id': b'8K\x12\x00d\x03\xcc\xe4]bS\xe3\x8f{' b'\xd7}\xac\xefrm', 'origin_id': origin_id}]) def test_vcs_missing_snapshot(self): self.indexer.storage.origin_add([{ 'type': 'git', 'url': 'https://github.com/SoftwareHeritage/swh-indexer', }]) self.indexer.run( ['git+https://github.com/SoftwareHeritage/swh-indexer']) self.assertEqual(self.indexer.results, []) + def test_pypi_missing_branch(self): + origin_id = self.indexer.storage.origin_add_one({ + 'type': 'pypi', + 'url': 'https://pypi.org/project/abcdef/', + }) + visit = self.indexer.storage.origin_visit_add( + origin_id, '2019-02-27') + self.indexer.storage.snapshot_add(origin_id, visit['visit'], { + 'id': 'foo', + 'branches': { + b'foo': None, + b'HEAD': { + 'target_type': 'alias', + 'target': b'foo', + } + } + }) + self.indexer.run(['pypi+https://pypi.org/project/abcdef/']) + self.assertEqual(self.indexer.results, []) + def test_ftp(self): self.indexer.run( ['ftp+rsync://ftp.gnu.org/gnu/3dldf']) origin_id = self._get_origin_id( 'ftp', 'rsync://ftp.gnu.org/gnu/3dldf') self.assertEqual(self.indexer.results, [{ 'revision_id': b'\x8e\xa9\x8e/\xea}\x9feF\xf4\x9f\xfd\xee' b'\xcc\x1a\xb4`\x8c\x8by', 'origin_id': origin_id}]) def test_ftp_missing_snapshot(self): self.indexer.storage.origin_add([{ 'type': 'ftp', 'url': 'rsync://ftp.gnu.org/gnu/foobar', }]) self.indexer.run( ['ftp+rsync://ftp.gnu.org/gnu/foobar']) self.assertEqual(self.indexer.results, []) def test_deposit(self): self.indexer.run( ['deposit+https://forge.softwareheritage.org/source/' 'jesuisgpl/']) origin_id = self._get_origin_id( 'deposit', 'https://forge.softwareheritage.org/source/jesuisgpl/') self.assertEqual(self.indexer.results, [{ 'revision_id': b'\xe7n\xa4\x9c\x9f\xfb\xb7\xf76\x11\x08{' b'\xa6\xe9\x99\xb1\x9e]q\xeb', 'origin_id': origin_id}]) def test_deposit_missing_snapshot(self): self.indexer.storage.origin_add([{ 'type': 'deposit', 'url': 'https://forge.softwareheritage.org/source/foobar', }]) self.indexer.run( ['deposit+https://forge.softwareheritage.org/source/foobar']) self.assertEqual(self.indexer.results, []) def test_pypi(self): self.indexer.run( ['pypi+https://pypi.org/project/limnoria/']) origin_id = self._get_origin_id( 'pypi', 'https://pypi.org/project/limnoria/') self.assertEqual(self.indexer.results, [{ 'revision_id': b'\x83\xb9\xb6\xc7\x05\xb1%\xd0\xfem\xd8k' b'A\x10\x9d\xc5\xfa2\xf8t', 'origin_id': origin_id}]) def test_svn(self): self.indexer.run( ['svn+http://0-512-md.googlecode.com/svn/']) origin_id = self._get_origin_id( 'svn', 'http://0-512-md.googlecode.com/svn/') self.assertEqual(self.indexer.results, [{ 'revision_id': b'\xe4?r\xe1,\x88\xab\xec\xe7\x9a\x87\xb8' b'\xc9\xad#.\x1bw=\x18', 'origin_id': origin_id}])