diff --git a/swh/lister/core/abstractattribute.py b/swh/lister/core/abstractattribute.py index afd244c..17db1a8 100644 --- a/swh/lister/core/abstractattribute.py +++ b/swh/lister/core/abstractattribute.py @@ -1,24 +1,26 @@ # Copyright (C) 2017 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information class AbstractAttribute: """AbstractAttributes in a base class must be overridden by the subclass. - It's like the @abc.abstractmethod decorator, but for things that are - explicitly attributes/properties, not methods, without the need for - empty method def boilerplate. Like abc.abstractmethod, the class - containing AbstractAttributes must inherit abc.ABC or use the - abc.ABCMeta metaclass. + It's like the :py:func:`abc.abstractmethod` decorator, but for things that + are explicitly attributes/properties, not methods, without the need for + empty method def boilerplate. Like abc.abstractmethod, the class containing + AbstractAttributes must inherit from :py:class:`abc.ABC` or use the + :py:class:`abc.ABCMeta` metaclass. + + Usage example:: - Usage Example: import abc class ClassContainingAnAbstractAttribute(abc.ABC): foo = AbstractAttribute('descriptive docstring for foo') + """ __isabstractmethod__ = True def __init__(self, docstring=None): if docstring is not None: self.__doc__ = 'AbstractAttribute: ' + docstring diff --git a/swh/lister/core/indexing_lister.py b/swh/lister/core/indexing_lister.py index 6d121c4..df8c31e 100644 --- a/swh/lister/core/indexing_lister.py +++ b/swh/lister/core/indexing_lister.py @@ -1,210 +1,212 @@ # Copyright (C) 2015-2017 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import abc import logging from sqlalchemy import func from .lister_transports import SWHListerHttpTransport from .lister_base import SWHListerBase class SWHIndexingLister(SWHListerBase): """Lister* intermediate class for any service that follows the pattern: - -- The service must report at least one stable unique identifier, - known herein as the UID value, for every listed repository. - -- If the service splits the list of repositories into sublists, - it must report at least one stable and sorted index identifier - for every listed repository, known herein as the indexable value, - which can be used as part of the service endpoint query to request - a sublist beginning from that index. This might be the UID if the - UID is monotonic. - -- Client sends a request to list repositories starting from a given - index. - -- Client receives structured (json/xml/etc) response with information - about a sequential series of repositories starting from that index - and, if necessary/available, some indication of the URL or index - for fetching the next series of repository data. - - * - See swh.lister.core.lister_base.SWHListerBase for more details. + + - The service must report at least one stable unique identifier, known + herein as the UID value, for every listed repository. + - If the service splits the list of repositories into sublists, it must + report at least one stable and sorted index identifier for every listed + repository, known herein as the indexable value, which can be used as + part of the service endpoint query to request a sublist beginning from + that index. This might be the UID if the UID is monotonic. + - Client sends a request to list repositories starting from a given + index. + - Client receives structured (json/xml/etc) response with information about + a sequential series of repositories starting from that index and, if + necessary/available, some indication of the URL or index for fetching the + next series of repository data. + + See :py:class:`swh.lister.core.lister_base.SWHListerBase` for more details. This class cannot be instantiated. To create a new Lister for a source code listing service that follows the model described above, you must subclass this class and provide the required overrides in addition to any unmet implementation/override requirements of this class's base. (see parent class and member docstrings for details) - Required Overrides: + Required Overrides:: + def get_next_target_from_response + """ @abc.abstractmethod def get_next_target_from_response(self, response): """Find the next server endpoint identifier given the entire response. Implementation of this method depends on the server API spec and the shape of the network response object returned by the transport_request method. Args: response (transport response): response page from the server Returns: index of next page, possibly extracted from a next href url """ pass # You probably don't need to override anything below this line. def filter_before_inject(self, models_list): """Overrides SWHListerBase.filter_before_inject Bounds query results by this Lister's set max_index. """ models_list = [ m for m in models_list if self.is_within_bounds(m['indexable'], None, self.max_index) ] return models_list def db_query_range(self, start, end): """Look in the db for a range of repositories with indexable values in the range [start, end] Args: start (model indexable type): start of desired indexable range end (model indexable type): end of desired indexable range Returns: a list of sqlalchemy.ext.declarative.declarative_base objects with indexable values within the given range """ retlist = self.db_session.query(self.MODEL) if start is not None: retlist = retlist.filter(self.MODEL.indexable >= start) if end is not None: retlist = retlist.filter(self.MODEL.indexable <= end) return retlist def db_partition_indices(self, partition_size): """Describe an index-space compartmentalization of the db table in equal sized chunks. This is used to describe min&max bounds for parallelizing fetch tasks. Args: partition_size (int): desired size to make each partition Returns: a list of tuples (begin, end) of indexable value that declare approximately equal-sized ranges of existing repos """ n = self.db_num_entries() partitions = [] partition_size = min(partition_size, n) prev_index = None for i in range(0, n-1, partition_size): # indexable column from the ith row index = self.db_session.query(self.MODEL.indexable) \ .order_by(self.MODEL.indexable).offset(i).first() if index is not None and prev_index is not None: partitions.append((prev_index, index)) prev_index = index partitions.append((prev_index, self.db_last_index())) return partitions def db_last_index(self): """Look in the db for the largest indexable value Returns: the largest indexable value of all repos in the db """ t = self.db_session.query(func.max(self.MODEL.indexable)).first() if t: return t[0] else: return None def disable_deleted_repo_tasks(self, start, end, keep_these): """Disable tasks for repos that no longer exist between start and end. Args: start: beginning of range to disable end: end of range to disable keep_these (uid list): do not disable repos with uids in this list """ if end is None: end = self.db_last_index() if not self.is_within_bounds(end, None, self.max_index): end = self.max_index deleted_repos = self.winnow_models( self.db_query_range(start, end), self.MODEL.uid, keep_these ) tasks_to_disable = [repo.task_id for repo in deleted_repos if repo.task_id is not None] if tasks_to_disable: self.scheduler.disable_tasks(tasks_to_disable) for repo in deleted_repos: repo.task_id = None def run(self, min_index=None, max_index=None): """Main entry function. Sequentially fetches repository data from the service according to the basic outline in the class docstring, continually fetching sublists until either there is no next index reference given or the given next index is greater than the desired max_index. Args: min_index (indexable type): optional index to start from max_index (indexable type): optional index to stop at Returns: nothing """ index = min_index or '' loop_count = 0 self.min_index = min_index self.max_index = max_index while self.is_within_bounds(index, self.min_index, self.max_index): logging.info('listing repos starting at %s' % index) response, injected_repos = self.ingest_data(index) next_index = self.get_next_target_from_response(response) # Determine if any repos were deleted, and disable their tasks. keep_these = [k for k in injected_repos.keys()] self.disable_deleted_repo_tasks(index, next_index, keep_these) # termination condition if (next_index is None) or (next_index == index): logging.info('stopping after index %s, no next link found' % index) break else: index = next_index loop_count += 1 if loop_count == 20: logging.info('flushing updates') loop_count = 0 self.db_session.commit() self.db_session = self.mk_session() self.db_session.commit() self.db_session = self.mk_session() class SWHIndexingHttpLister(SWHListerHttpTransport, SWHIndexingLister): """Convenience class for ensuring right lookup and init order when combining SWHIndexingLister and SWHListerHttpTransport.""" def __init__(self, lister_name=None, api_baseurl=None, override_config=None): SWHListerHttpTransport.__init__(self, api_baseurl=api_baseurl) SWHIndexingLister.__init__(self, lister_name=lister_name, override_config=override_config) diff --git a/swh/lister/core/tasks.py b/swh/lister/core/tasks.py index 4ab610a..8a3a7a0 100644 --- a/swh/lister/core/tasks.py +++ b/swh/lister/core/tasks.py @@ -1,71 +1,73 @@ # Copyright (C) 2017 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import abc import random from celery import group from celery.app.task import TaskType from swh.scheduler.task import Task from .abstractattribute import AbstractAttribute class AbstractTaskMeta(abc.ABCMeta, TaskType): pass class ListerTaskBase(Task, metaclass=AbstractTaskMeta): """Lister Tasks define the process of periodically requesting batches of repository information from source code hosting services. They instantiate Listers to do batches of work at periodic intervals. There are two main kinds of lister tasks: - 1) Discovering new repositories. - 2) Refreshing the list of already discovered repositories. + 1. Discovering new repositories. + 2. Refreshing the list of already discovered repositories. If the hosting service is indexable (according to the requirements of - SWHIndexingLister), then we can optionally partition the set of known - repositories into sub-sets to distribute the work. + :py:class:`SWHIndexingLister`), then we can optionally partition the + set of known repositories into sub-sets to distribute the work. This means that there is a third possible Task type for Indexing Listers: - 3) Discover or refresh a specific range of indices. + + 3. Discover or refresh a specific range of indices. + """ task_queue = AbstractAttribute('Celery Task queue name') @abc.abstractmethod def new_lister(self): """Return a new lister of the appropriate type. """ pass @abc.abstractmethod def run_task(self): pass class IndexingDiscoveryListerTask(ListerTaskBase): def run_task(self): lister = self.new_lister() return lister.run(min_index=lister.db_last_index(), max_index=None) class IndexingRangeListerTask(ListerTaskBase): def run_task(self, start, end): lister = self.new_lister() return lister.run(min_index=start, max_index=end) class IndexingRefreshListerTask(ListerTaskBase): GROUP_SPLIT = 10000 def run_task(self): lister = self.new_lister() ranges = lister.db_partition_indices(self.GROUP_SPLIT) random.shuffle(ranges) range_task = IndexingRangeListerTask() group(range_task.s(minv, maxv) for minv, maxv in ranges)()