diff --git a/swh/indexer/origin_head.py b/swh/indexer/origin_head.py index 54123ac..6a1ca96 100644 --- a/swh/indexer/origin_head.py +++ b/swh/indexer/origin_head.py @@ -1,217 +1,221 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re import click import logging from swh.scheduler import get_scheduler from swh.scheduler.utils import create_task_dict from swh.indexer.indexer import OriginIndexer class OriginHeadIndexer(OriginIndexer): """Origin-level indexer. This indexer is in charge of looking up the revision that acts as the "head" of an origin. In git, this is usually the commit pointed to by the 'master' branch.""" ADDITIONAL_CONFIG = { 'tools': ('dict', { 'name': 'origin-metadata', 'version': '0.0.1', 'configuration': {}, }), + 'tasks': ('dict', { + 'revision_metadata': 'revision_metadata', + 'origin_intrinsic_metadata': 'origin_metadata', + }) } CONFIG_BASE_FILENAME = 'indexer/origin_head' - revision_metadata_task = 'revision_metadata' - origin_intrinsic_metadata_task = 'origin_metadata' - def filter(self, ids): yield from ids def persist_index_computations(self, results, policy_update): """Do nothing. The indexer's results are not persistent, they should only be piped to another indexer.""" pass def next_step(self, results, task): """Once the head is found, call the RevisionMetadataIndexer on these revisions, then call the OriginMetadataIndexer with both the origin_id and the revision metadata, so it can copy the revision metadata to the origin's metadata. Args: results (Iterable[dict]): Iterable of return values from `index`. """ super().next_step(results, task) - if self.revision_metadata_task is None and \ - self.origin_intrinsic_metadata_task is None: + revision_metadata_task = self.config['tasks']['revision_metadata'] + origin_intrinsic_metadata_task = self.config['tasks'][ + 'origin_intrinsic_metadata'] + if revision_metadata_task is None and \ + origin_intrinsic_metadata_task is None: return - assert self.revision_metadata_task is not None - assert self.origin_intrinsic_metadata_task is not None + assert revision_metadata_task is not None + assert origin_intrinsic_metadata_task is not None # Second task to run after this one: copy the revision's metadata # to the origin sub_task = create_task_dict( - self.origin_intrinsic_metadata_task, + origin_intrinsic_metadata_task, 'oneshot', origin_head={ str(result['origin_id']): result['revision_id'].decode() for result in results}, policy_update='update-dups', ) del sub_task['next_run'] # Not json-serializable # First task to run after this one: index the metadata of the # revision task = create_task_dict( - self.revision_metadata_task, + revision_metadata_task, 'oneshot', ids=[res['revision_id'].decode() for res in results], policy_update='update-dups', next_step={ **sub_task, 'result_name': 'revisions_metadata'}, ) if getattr(self, 'scheduler', None): scheduler = self.scheduler else: scheduler = get_scheduler(**self.config['scheduler']) scheduler.create_tasks([task]) # Dispatch def index(self, origin): origin_id = origin['id'] latest_snapshot = self.storage.snapshot_get_latest(origin_id) method = getattr(self, '_try_get_%s_head' % origin['type'], None) if method is None: method = self._try_get_head_generic rev_id = method(latest_snapshot) if rev_id is None: return None result = { 'origin_id': origin_id, 'revision_id': rev_id, } return result # VCSs def _try_get_vcs_head(self, snapshot): try: if isinstance(snapshot, dict): branches = snapshot['branches'] if branches[b'HEAD']['target_type'] == 'revision': return branches[b'HEAD']['target'] except KeyError: return None _try_get_hg_head = _try_get_git_head = _try_get_vcs_head # Tarballs _archive_filename_re = re.compile( rb'^' rb'(?P.*)[-_]' rb'(?P[0-9]+(\.[0-9])*)' rb'(?P[-+][a-zA-Z0-9.~]+?)?' rb'(?P(\.[a-zA-Z0-9]+)+)' rb'$') @classmethod def _parse_version(cls, filename): """Extracts the release version from an archive filename, to get an ordering whose maximum is likely to be the last version of the software >>> OriginHeadIndexer._parse_version(b'foo') (-inf,) >>> OriginHeadIndexer._parse_version(b'foo.tar.gz') (-inf,) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1.tar.gz') (0, 0, 1, 0) >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1-beta2.tar.gz') (0, 0, 1, -1, 'beta2') >>> OriginHeadIndexer._parse_version(b'gnu-hello-0.0.1+foobar.tar.gz') (0, 0, 1, 1, 'foobar') """ res = cls._archive_filename_re.match(filename) if res is None: return (float('-infinity'),) version = [int(n) for n in res.group('version').decode().split('.')] if res.group('preversion') is None: version.append(0) else: preversion = res.group('preversion').decode() if preversion.startswith('-'): version.append(-1) version.append(preversion[1:]) elif preversion.startswith('+'): version.append(1) version.append(preversion[1:]) else: assert False, res.group('preversion') return tuple(version) def _try_get_ftp_head(self, snapshot): archive_names = list(snapshot['branches']) max_archive_name = max(archive_names, key=self._parse_version) r = self._try_resolve_target(snapshot['branches'], max_archive_name) return r # Generic def _try_get_head_generic(self, snapshot): # Works on 'deposit', 'svn', and 'pypi'. try: if isinstance(snapshot, dict): branches = snapshot['branches'] except KeyError: return None else: return ( self._try_resolve_target(branches, b'HEAD') or self._try_resolve_target(branches, b'master') ) def _try_resolve_target(self, branches, target_name): try: target = branches[target_name] while target['target_type'] == 'alias': target = branches[target['target']] if target['target_type'] == 'revision': return target['target'] elif target['target_type'] == 'content': return None # TODO elif target['target_type'] == 'directory': return None # TODO elif target['target_type'] == 'release': return None # TODO else: assert False except KeyError: return None @click.command() @click.option('--origins', '-i', help='Origins to lookup, in the "type+url" format', multiple=True) def main(origins): rev_metadata_indexer = OriginHeadIndexer() rev_metadata_indexer.run(origins, 'update-dups', parse_ids=True) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) main() diff --git a/swh/indexer/tests/test_origin_head.py b/swh/indexer/tests/test_origin_head.py index 335ced7..f7e07a1 100644 --- a/swh/indexer/tests/test_origin_head.py +++ b/swh/indexer/tests/test_origin_head.py @@ -1,91 +1,91 @@ -# Copyright (C) 2017 The Software Heritage developers +# Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import unittest import logging from swh.indexer.origin_head import OriginHeadIndexer from swh.indexer.tests.test_utils import MockIndexerStorage, MockStorage class OriginHeadTestIndexer(OriginHeadIndexer): """Specific indexer whose configuration is enough to satisfy the indexing tests. """ - - revision_metadata_task = None - origin_intrinsic_metadata_task = None - def prepare(self): self.config = { 'tools': { 'name': 'origin-metadata', 'version': '0.0.1', 'configuration': {}, }, + 'tasks': { + 'revision_metadata': None, + 'origin_intrinsic_metadata': None, + } } self.storage = MockStorage() self.idx_storage = MockIndexerStorage() self.log = logging.getLogger('swh.indexer') self.objstorage = None self.tools = self.register_tools(self.config['tools']) self.tool = self.tools[0] self.results = None def persist_index_computations(self, results, policy_update): self.results = results class OriginHead(unittest.TestCase): def test_git(self): indexer = OriginHeadTestIndexer() indexer.run( ['git+https://github.com/SoftwareHeritage/swh-storage'], 'update-dups', parse_ids=True) self.assertEqual(indexer.results, [{ 'revision_id': b'8K\x12\x00d\x03\xcc\xe4]bS\xe3\x8f{' b'\xd7}\xac\xefrm', 'origin_id': 52189575}]) def test_ftp(self): indexer = OriginHeadTestIndexer() indexer.run( ['ftp+rsync://ftp.gnu.org/gnu/3dldf'], 'update-dups', parse_ids=True) self.assertEqual(indexer.results, [{ 'revision_id': b'\x8e\xa9\x8e/\xea}\x9feF\xf4\x9f\xfd\xee' b'\xcc\x1a\xb4`\x8c\x8by', 'origin_id': 4423668}]) def test_deposit(self): indexer = OriginHeadTestIndexer() indexer.run( ['deposit+https://forge.softwareheritage.org/source/' 'jesuisgpl/'], 'update-dups', parse_ids=True) self.assertEqual(indexer.results, [{ 'revision_id': b'\xe7n\xa4\x9c\x9f\xfb\xb7\xf76\x11\x08{' b'\xa6\xe9\x99\xb1\x9e]q\xeb', 'origin_id': 77775770}]) def test_pypi(self): indexer = OriginHeadTestIndexer() indexer.run( ['pypi+https://pypi.org/project/limnoria/'], 'update-dups', parse_ids=True) self.assertEqual(indexer.results, [{ 'revision_id': b'\x83\xb9\xb6\xc7\x05\xb1%\xd0\xfem\xd8k' b'A\x10\x9d\xc5\xfa2\xf8t', 'origin_id': 85072327}]) def test_svn(self): indexer = OriginHeadTestIndexer() indexer.run( ['svn+http://0-512-md.googlecode.com/svn/'], 'update-dups', parse_ids=True) self.assertEqual(indexer.results, [{ 'revision_id': b'\xe4?r\xe1,\x88\xab\xec\xe7\x9a\x87\xb8' b'\xc9\xad#.\x1bw=\x18', 'origin_id': 49908349}]) diff --git a/swh/indexer/tests/test_origin_metadata.py b/swh/indexer/tests/test_origin_metadata.py index 1ed3024..7166434 100644 --- a/swh/indexer/tests/test_origin_metadata.py +++ b/swh/indexer/tests/test_origin_metadata.py @@ -1,126 +1,130 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import time import logging import unittest from celery import task from swh.indexer.metadata import OriginMetadataIndexer from swh.indexer.tests.test_utils import MockObjStorage, MockStorage from swh.indexer.tests.test_utils import MockIndexerStorage from swh.indexer.tests.test_origin_head import OriginHeadTestIndexer from swh.indexer.tests.test_metadata import RevisionMetadataTestIndexer from swh.scheduler.tests.scheduler_testing import SchedulerTestFixture class OriginMetadataTestIndexer(OriginMetadataIndexer): def prepare(self): self.config = { 'storage': { 'cls': 'remote', 'args': { 'url': 'http://localhost:9999', } }, 'tools': { 'name': 'origin-metadata', 'version': '0.0.1', 'configuration': {} } } self.storage = MockStorage() self.idx_storage = MockIndexerStorage() self.log = logging.getLogger('swh.indexer') self.objstorage = MockObjStorage() self.tools = self.register_tools(self.config['tools']) self.tool = self.tools[0] self.results = [] @task def revision_metadata_test_task(*args, **kwargs): indexer = RevisionMetadataTestIndexer() indexer.run(*args, **kwargs) return indexer.results @task def origin_intrinsic_metadata_test_task(*args, **kwargs): indexer = OriginMetadataTestIndexer() indexer.run(*args, **kwargs) return indexer.results class OriginHeadTestIndexer(OriginHeadTestIndexer): - revision_metadata_task = 'revision_metadata_test_task' - origin_intrinsic_metadata_task = 'origin_intrinsic_metadata_test_task' + def prepare(self): + super().prepare() + self.config['tasks'] = { + 'revision_metadata': 'revision_metadata_test_task', + 'origin_intrinsic_metadata': 'origin_intrinsic_metadata_test_task', + } class TestOriginMetadata(SchedulerTestFixture, unittest.TestCase): def setUp(self): super().setUp() self.maxDiff = None MockIndexerStorage.added_data = [] self.add_scheduler_task_type( 'revision_metadata_test_task', 'swh.indexer.tests.test_origin_metadata.' 'revision_metadata_test_task') self.add_scheduler_task_type( 'origin_intrinsic_metadata_test_task', 'swh.indexer.tests.test_origin_metadata.' 'origin_intrinsic_metadata_test_task') RevisionMetadataTestIndexer.scheduler = self.scheduler def tearDown(self): del RevisionMetadataTestIndexer.scheduler super().tearDown() def test_pipeline(self): indexer = OriginHeadTestIndexer() indexer.scheduler = self.scheduler indexer.run( ["git+https://github.com/librariesio/yarn-parser"], policy_update='update-dups', parse_ids=True) self.run_ready_tasks() # Run the first task time.sleep(0.1) # Give it time to complete and schedule the 2nd one self.run_ready_tasks() # Run the second task metadata = { '@context': 'https://doi.org/10.5063/schema/codemeta-2.0', 'url': 'https://github.com/librariesio/yarn-parser#readme', 'schema:codeRepository': 'git+https://github.com/librariesio/yarn-parser.git', 'schema:author': 'Andrew Nesbitt', 'license': 'AGPL-3.0', 'version': '1.0.0', 'description': 'Tiny web service for parsing yarn.lock files', 'codemeta:issueTracker': 'https://github.com/librariesio/yarn-parser/issues', 'name': 'yarn-parser', 'keywords': ['yarn', 'parse', 'lock', 'dependencies'], } rev_metadata = { 'id': '8dbb6aeb036e7fd80664eb8bfd1507881af1ba9f', 'translated_metadata': metadata, 'indexer_configuration_id': 7, } origin_metadata = { 'origin_id': 54974445, 'from_revision': '8dbb6aeb036e7fd80664eb8bfd1507881af1ba9f', 'metadata': metadata, 'indexer_configuration_id': 7, } expected_results = [ ('origin_intrinsic_metadata', True, [origin_metadata]), ('revision_metadata', True, [rev_metadata])] results = list(indexer.idx_storage.added_data) self.assertCountEqual(expected_results, results)