diff --git a/PKG-INFO b/PKG-INFO index 7626a5b..93b07dc 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,28 +1,28 @@ Metadata-Version: 2.1 Name: swh.scheduler -Version: 0.0.57 +Version: 0.0.58 Summary: Software Heritage Scheduler Home-page: https://forge.softwareheritage.org/diffusion/DSCH/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Source, https://forge.softwareheritage.org/source/swh-scheduler Description: swh-scheduler ============= Job scheduler for the Software Heritage project. Task manager for asynchronous/delayed tasks, used for both recurrent (e.g., listing a forge, loading new stuff from a Git repository) and one-off activities (e.g., loading a specific version of a source package). Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Description-Content-Type: text/markdown Provides-Extra: testing diff --git a/requirements-swh.txt b/requirements-swh.txt index 6d30cae..fcd030d 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,2 +1,2 @@ -swh.core[db,http] >= 0.0.61 +swh.core[db,http] >= 0.0.65 swh.storage >= 0.0.129 diff --git a/swh.scheduler.egg-info/PKG-INFO b/swh.scheduler.egg-info/PKG-INFO index 7626a5b..93b07dc 100644 --- a/swh.scheduler.egg-info/PKG-INFO +++ b/swh.scheduler.egg-info/PKG-INFO @@ -1,28 +1,28 @@ Metadata-Version: 2.1 Name: swh.scheduler -Version: 0.0.57 +Version: 0.0.58 Summary: Software Heritage Scheduler Home-page: https://forge.softwareheritage.org/diffusion/DSCH/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Source, https://forge.softwareheritage.org/source/swh-scheduler Description: swh-scheduler ============= Job scheduler for the Software Heritage project. Task manager for asynchronous/delayed tasks, used for both recurrent (e.g., listing a forge, loading new stuff from a Git repository) and one-off activities (e.g., loading a specific version of a source package). Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Description-Content-Type: text/markdown Provides-Extra: testing diff --git a/swh.scheduler.egg-info/requires.txt b/swh.scheduler.egg-info/requires.txt index 9048ea4..1949565 100644 --- a/swh.scheduler.egg-info/requires.txt +++ b/swh.scheduler.egg-info/requires.txt @@ -1,16 +1,16 @@ arrow celery>=4 Click elasticsearch>5.4 flask psycopg2 pyyaml vcversioner -swh.core[db,http]>=0.0.61 +swh.core[db,http]>=0.0.65 swh.storage>=0.0.129 [testing] pytest<4 pytest-postgresql celery>=4 hypothesis>=3.11.0 diff --git a/swh/scheduler/api/client.py b/swh/scheduler/api/client.py index f720132..7c07cc4 100644 --- a/swh/scheduler/api/client.py +++ b/swh/scheduler/api/client.py @@ -1,106 +1,106 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -from swh.core.api import SWHRemoteAPI +from swh.core.api import RPCClient -class RemoteScheduler(SWHRemoteAPI): +class RemoteScheduler(RPCClient): """Proxy to a remote scheduler API """ def close_connection(self): return self.post('close_connection', {}) def set_status_tasks(self, task_ids, status='disabled', next_run=None): return self.post('set_status_tasks', dict( task_ids=task_ids, status=status, next_run=next_run)) def create_task_type(self, task_type): return self.post('create_task_type', {'task_type': task_type}) def get_task_type(self, task_type_name): return self.post('get_task_type', {'task_type_name': task_type_name}) def get_task_types(self): return self.post('get_task_types', {}) def create_tasks(self, tasks): return self.post('create_tasks', {'tasks': tasks}) def disable_tasks(self, task_ids): return self.post('disable_tasks', {'task_ids': task_ids}) def get_tasks(self, task_ids): return self.post('get_tasks', {'task_ids': task_ids}) def get_task_runs(self, task_ids, limit=None): return self.post( 'get_task_runs', {'task_ids': task_ids, 'limit': limit}) def search_tasks(self, task_id=None, task_type=None, status=None, priority=None, policy=None, before=None, after=None, limit=None): return self.post('search_tasks', dict( task_id=task_id, task_type=task_type, status=status, priority=priority, policy=policy, before=before, after=after, limit=limit)) def peek_ready_tasks(self, task_type, timestamp=None, num_tasks=None, num_tasks_priority=None): return self.post('peek_ready_tasks', { 'task_type': task_type, 'timestamp': timestamp, 'num_tasks': num_tasks, 'num_tasks_priority': num_tasks_priority, }) def grab_ready_tasks(self, task_type, timestamp=None, num_tasks=None, num_tasks_priority=None): return self.post('grab_ready_tasks', { 'task_type': task_type, 'timestamp': timestamp, 'num_tasks': num_tasks, 'num_tasks_priority': num_tasks_priority, }) def schedule_task_run(self, task_id, backend_id, metadata=None, timestamp=None): return self.post('schedule_task_run', { 'task_id': task_id, 'backend_id': backend_id, 'metadata': metadata, 'timestamp': timestamp, }) def mass_schedule_task_runs(self, task_runs): return self.post('mass_schedule_task_runs', {'task_runs': task_runs}) def start_task_run(self, backend_id, metadata=None, timestamp=None): return self.post('start_task_run', { 'backend_id': backend_id, 'metadata': metadata, 'timestamp': timestamp, }) def end_task_run(self, backend_id, status, metadata=None, timestamp=None): return self.post('end_task_run', { 'backend_id': backend_id, 'status': status, 'metadata': metadata, 'timestamp': timestamp, }) def filter_task_to_archive(self, after_ts, before_ts, limit=10, last_id=-1): return self.post('filter_task_to_archive', { 'after_ts': after_ts, 'before_ts': before_ts, 'limit': limit, 'last_id': last_id, }) def delete_archived_tasks(self, task_ids): return self.post('delete_archived_tasks', {'task_ids': task_ids}) diff --git a/swh/scheduler/celery_backend/config.py b/swh/scheduler/celery_backend/config.py index 7d3a3dd..0f275b8 100644 --- a/swh/scheduler/celery_backend/config.py +++ b/swh/scheduler/celery_backend/config.py @@ -1,271 +1,282 @@ # Copyright (C) 2015-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import os +import pkg_resources import urllib.parse from celery import Celery from celery.signals import setup_logging, celeryd_after_setup from celery.utils.log import ColorFormatter from celery.worker.control import Panel from kombu import Exchange, Queue from kombu.five import monotonic as _monotonic import requests from swh.scheduler import CONFIG as SWH_CONFIG from swh.core.config import load_named_config, merge_configs from swh.core.logger import JournalHandler DEFAULT_CONFIG_NAME = 'worker' CONFIG_NAME_ENVVAR = 'SWH_WORKER_INSTANCE' CONFIG_NAME_TEMPLATE = 'worker/%s' DEFAULT_CONFIG = { 'task_broker': ('str', 'amqp://guest@localhost//'), 'task_modules': ('list[str]', []), 'task_queues': ('list[str]', []), 'task_soft_time_limit': ('int', 0), } logger = logging.getLogger(__name__) @setup_logging.connect def setup_log_handler(loglevel=None, logfile=None, format=None, colorize=None, log_console=None, log_journal=None, **kwargs): """Setup logging according to Software Heritage preferences. We use the command-line loglevel for tasks only, as we never really care about the debug messages from celery. """ if loglevel is None: loglevel = logging.DEBUG if isinstance(loglevel, str): loglevel = logging._nameToLevel[loglevel] formatter = logging.Formatter(format) root_logger = logging.getLogger('') root_logger.setLevel(logging.INFO) log_target = os.environ.get('SWH_LOG_TARGET', 'console') if log_target == 'console': log_console = True elif log_target == 'journal': log_journal = True # this looks for log levels *higher* than DEBUG if loglevel <= logging.DEBUG and log_console is None: log_console = True if log_console: color_formatter = ColorFormatter(format) if colorize else formatter console = logging.StreamHandler() console.setLevel(logging.DEBUG) console.setFormatter(color_formatter) root_logger.addHandler(console) if log_journal: systemd_journal = JournalHandler() systemd_journal.setLevel(logging.DEBUG) systemd_journal.setFormatter(formatter) root_logger.addHandler(systemd_journal) logging.getLogger('celery').setLevel(logging.INFO) # Silence amqp heartbeat_tick messages logger = logging.getLogger('amqp') logger.addFilter(lambda record: not record.msg.startswith( 'heartbeat_tick')) logger.setLevel(loglevel) # Silence useless "Starting new HTTP connection" messages logging.getLogger('urllib3').setLevel(logging.WARNING) logging.getLogger('swh').setLevel(loglevel) # get_task_logger makes the swh tasks loggers children of celery.task logging.getLogger('celery.task').setLevel(loglevel) return loglevel @celeryd_after_setup.connect def setup_queues_and_tasks(sender, instance, **kwargs): """Signal called on worker start. This automatically registers swh.scheduler.task.Task subclasses as available celery tasks. This also subscribes the worker to the "implicit" per-task queues defined for these task classes. """ logger.info('Setup Queues & Tasks for %s', sender) instance.app.conf['worker_name'] = sender @Panel.register def monotonic(state): """Get the current value for the monotonic clock""" return {'monotonic': _monotonic()} def route_for_task(name, args, kwargs, options, task=None, **kw): """Route tasks according to the task_queue attribute in the task class""" if name is not None and name.startswith('swh.'): return {'queue': name} def get_queue_stats(app, queue_name): """Get the statistics regarding a queue on the broker. Arguments: queue_name: name of the queue to check Returns a dictionary raw from the RabbitMQ management API; or `None` if the current configuration does not use RabbitMQ. Interesting keys: - Consumers (number of consumers for the queue) - messages (number of messages in queue) - messages_unacknowledged (number of messages currently being processed) Documentation: https://www.rabbitmq.com/management.html#http-api """ conn_info = app.connection().info() if conn_info['transport'] == 'memory': # We're running in a test environment, without RabbitMQ. return None url = 'http://{hostname}:{port}/api/queues/{vhost}/{queue}'.format( hostname=conn_info['hostname'], port=conn_info['port'] + 10000, vhost=urllib.parse.quote(conn_info['virtual_host'], safe=''), queue=urllib.parse.quote(queue_name, safe=''), ) credentials = (conn_info['userid'], conn_info['password']) r = requests.get(url, auth=credentials) if r.status_code == 404: return {} if r.status_code != 200: raise ValueError('Got error %s when reading queue stats: %s' % ( r.status_code, r.json())) return r.json() def get_queue_length(app, queue_name): """Shortcut to get a queue's length""" stats = get_queue_stats(app, queue_name) if stats: return stats.get('messages') def register_task_class(app, name, cls): """Register a class-based task under the given name""" if name in app.tasks: return task_instance = cls() task_instance.name = name app.register_task(task_instance) INSTANCE_NAME = os.environ.get(CONFIG_NAME_ENVVAR) CONFIG_NAME = os.environ.get('SWH_CONFIG_FILENAME') CONFIG = {} if CONFIG_NAME: # load the celery config from the main config file given as # SWH_CONFIG_FILENAME environment variable. # This is expected to have a [celery] section in which we have the # celery specific configuration. SWH_CONFIG.clear() SWH_CONFIG.update(load_named_config(CONFIG_NAME)) CONFIG = SWH_CONFIG.get('celery') if not CONFIG: # otherwise, back to compat config loading mechanism if INSTANCE_NAME: CONFIG_NAME = CONFIG_NAME_TEMPLATE % INSTANCE_NAME else: CONFIG_NAME = DEFAULT_CONFIG_NAME # Load the Celery config CONFIG = load_named_config(CONFIG_NAME, DEFAULT_CONFIG) +CONFIG.setdefault('task_modules', []) +# load tasks modules declared as plugin entry points +for entrypoint in pkg_resources.iter_entry_points('swh.workers'): + worker_registrer_fn = entrypoint.load() + # The registry function is expected to return a dict which the 'tasks' key + # is a string (or a list of strings) with the name of the python module in + # which celery tasks are defined. + task_modules = worker_registrer_fn().get('task_modules', []) + CONFIG['task_modules'].extend(task_modules) + # Celery Queues CELERY_QUEUES = [Queue('celery', Exchange('celery'), routing_key='celery')] CELERY_DEFAULT_CONFIG = dict( # Timezone configuration: all in UTC enable_utc=True, timezone='UTC', # Imported modules imports=CONFIG.get('task_modules', []), # Time (in seconds, or a timedelta object) for when after stored task # tombstones will be deleted. None means to never expire results. result_expires=None, # A string identifying the default serialization method to use. Can # be json (default), pickle, yaml, msgpack, or any custom # serialization methods that have been registered with task_serializer='msgpack', # Result serialization format result_serializer='msgpack', # Late ack means the task messages will be acknowledged after the task has # been executed, not just before, which is the default behavior. task_acks_late=True, # A string identifying the default serialization method to use. # Can be pickle (default), json, yaml, msgpack or any custom serialization # methods that have been registered with kombu.serialization.registry accept_content=['msgpack', 'json'], # If True the task will report its status as “started” # when the task is executed by a worker. task_track_started=True, # Default compression used for task messages. Can be gzip, bzip2 # (if available), or any custom compression schemes registered # in the Kombu compression registry. # result_compression='bzip2', # task_compression='bzip2', # Disable all rate limits, even if tasks has explicit rate limits set. # (Disabling rate limits altogether is recommended if you don’t have any # tasks using them.) worker_disable_rate_limits=True, # Task routing task_routes=route_for_task, # Allow pool restarts from remote worker_pool_restarts=True, # Do not prefetch tasks worker_prefetch_multiplier=1, # Send events worker_send_task_events=True, # Do not send useless task_sent events task_send_sent_event=False, ) def build_app(config=None): config = merge_configs( {k: v for (k, (_, v)) in DEFAULT_CONFIG.items()}, config or {}) config['task_queues'] = CELERY_QUEUES + [ Queue(queue, Exchange(queue), routing_key=queue) for queue in config.get('task_queues', ())] logger.debug('Creating a Celery app with %s', config) # Instantiate the Celery app app = Celery(broker=config['task_broker'], task_cls='swh.scheduler.task:SWHTask') app.add_defaults(CELERY_DEFAULT_CONFIG) app.add_defaults(config) return app app = build_app(CONFIG) # XXX for BW compat Celery.get_queue_length = get_queue_length diff --git a/swh/scheduler/cli/task.py b/swh/scheduler/cli/task.py index ddc8604..e6d0cdf 100644 --- a/swh/scheduler/cli/task.py +++ b/swh/scheduler/cli/task.py @@ -1,564 +1,564 @@ # Copyright (C) 2016-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import json import itertools import locale import logging import arrow import csv import click from . import cli locale.setlocale(locale.LC_ALL, '') ARROW_LOCALE = locale.getlocale(locale.LC_TIME)[0] class DateTimeType(click.ParamType): name = 'time and date' def convert(self, value, param, ctx): if not isinstance(value, arrow.Arrow): value = arrow.get(value) return value DATETIME = DateTimeType() CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) def format_dict(d): ret = {} for k, v in d.items(): if isinstance(v, (arrow.Arrow, datetime.date, datetime.datetime)): v = arrow.get(v).format() elif isinstance(v, dict): v = format_dict(v) ret[k] = v return ret def pretty_print_list(list, indent=0): """Pretty-print a list""" return ''.join('%s%r\n' % (' ' * indent, item) for item in list) def pretty_print_dict(dict, indent=0): """Pretty-print a list""" return ''.join('%s%s: %r\n' % (' ' * indent, click.style(key, bold=True), value) for key, value in sorted(dict.items())) def pretty_print_run(run, indent=4): fmt = ('{indent}{backend_id} [{status}]\n' '{indent} scheduled: {scheduled} [{started}:{ended}]') return fmt.format(indent=' '*indent, **format_dict(run)) def pretty_print_task(task, full=False): """Pretty-print a task If 'full' is True, also print the status and priority fields. >>> task = { ... 'id': 1234, ... 'arguments': { ... 'args': ['foo', 'bar', True], ... 'kwargs': {'key': 'value', 'key2': 42}, ... }, ... 'current_interval': datetime.timedelta(hours=1), ... 'next_run': datetime.datetime(2019, 2, 21, 13, 52, 35, 407818), ... 'policy': 'oneshot', ... 'priority': None, ... 'status': 'next_run_not_scheduled', ... 'type': 'test_task', ... } >>> print(click.unstyle(pretty_print_task(task))) Task 1234 Next run: ... (2019-02-21 13:52:35+00:00) Interval: 1:00:00 Type: test_task Policy: oneshot Args: 'foo' 'bar' True Keyword args: key: 'value' key2: 42 >>> print(click.unstyle(pretty_print_task(task, full=True))) Task 1234 Next run: ... (2019-02-21 13:52:35+00:00) Interval: 1:00:00 Type: test_task Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: 'foo' 'bar' True Keyword args: key: 'value' key2: 42 """ next_run = arrow.get(task['next_run']) lines = [ '%s %s\n' % (click.style('Task', bold=True), task['id']), click.style(' Next run: ', bold=True), "%s (%s)" % (next_run.humanize(locale=ARROW_LOCALE), next_run.format()), '\n', click.style(' Interval: ', bold=True), str(task['current_interval']), '\n', click.style(' Type: ', bold=True), task['type'] or '', '\n', click.style(' Policy: ', bold=True), task['policy'] or '', '\n', ] if full: lines += [ click.style(' Status: ', bold=True), task['status'] or '', '\n', click.style(' Priority: ', bold=True), task['priority'] or '', '\n', ] lines += [ click.style(' Args:\n', bold=True), pretty_print_list(task['arguments']['args'], indent=4), click.style(' Keyword args:\n', bold=True), pretty_print_dict(task['arguments']['kwargs'], indent=4), ] return ''.join(lines) @cli.group('task') @click.pass_context def task(ctx): """Manipulate tasks.""" pass @task.command('schedule') @click.option('--columns', '-c', multiple=True, default=['type', 'args', 'kwargs', 'next_run'], type=click.Choice([ 'type', 'args', 'kwargs', 'policy', 'next_run']), help='columns present in the CSV file') @click.option('--delimiter', '-d', default=',') @click.argument('file', type=click.File(encoding='utf-8')) @click.pass_context def schedule_tasks(ctx, columns, delimiter, file): """Schedule tasks from a CSV input file. The following columns are expected, and can be set through the -c option: - type: the type of the task to be scheduled (mandatory) - args: the arguments passed to the task (JSON list, defaults to an empty list) - kwargs: the keyword arguments passed to the task (JSON object, defaults to an empty dict) - next_run: the date at which the task should run (datetime, defaults to now) The CSV can be read either from a named file, or from stdin (use - as filename). Use sample: cat scheduling-task.txt | \ python3 -m swh.scheduler.cli \ --database 'service=swh-scheduler-dev' \ task schedule \ --columns type --columns kwargs --columns policy \ --delimiter ';' - """ tasks = [] now = arrow.utcnow() scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') reader = csv.reader(file, delimiter=delimiter) for line in reader: task = dict(zip(columns, line)) args = json.loads(task.pop('args', '[]')) kwargs = json.loads(task.pop('kwargs', '{}')) task['arguments'] = { 'args': args, 'kwargs': kwargs, } task['next_run'] = DATETIME.convert(task.get('next_run', now), None, None) tasks.append(task) created = scheduler.create_tasks(tasks) output = [ 'Created %d tasks\n' % len(created), ] for task in created: output.append(pretty_print_task(task)) click.echo_via_pager('\n'.join(output)) @task.command('add') @click.argument('type', nargs=1, required=True) @click.argument('options', nargs=-1) @click.option('--policy', '-p', default='recurring', type=click.Choice(['recurring', 'oneshot'])) @click.option('--priority', '-P', default=None, type=click.Choice(['low', 'normal', 'high'])) @click.option('--next-run', '-n', default=None) @click.pass_context def schedule_task(ctx, type, options, policy, priority, next_run): """Schedule one task from arguments. The first argument is the name of the task type, further ones are positional and keyword argument(s) of the task, in YAML format. Keyword args are of the form key=value. Usage sample: swh-scheduler --database 'service=swh-scheduler' \ task add list-pypi swh-scheduler --database 'service=swh-scheduler' \ task add list-debian-distribution --policy=oneshot distribution=stretch Note: if the priority is not given, the task won't have the priority set, which is considered as the lowest priority level. """ from .utils import parse_options scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') now = arrow.utcnow() (args, kw) = parse_options(options) task = {'type': type, 'policy': policy, 'priority': priority, 'arguments': { 'args': args, 'kwargs': kw, }, 'next_run': DATETIME.convert(next_run or now, None, None), } created = scheduler.create_tasks([task]) output = [ 'Created %d tasks\n' % len(created), ] for task in created: output.append(pretty_print_task(task)) click.echo('\n'.join(output)) @task.command('schedule_origins') @click.argument('type', nargs=1, required=True) @click.argument('options', nargs=-1) @click.option('--batch-size', '-b', 'origin_batch_size', default=10, show_default=True, type=int, help="Number of origins per task") @click.option('--min-id', default=0, show_default=True, type=int, help="Only schedule tasks for origins whose ID is greater") @click.option('--max-id', default=None, type=int, help="Only schedule tasks for origins whose ID is lower") @click.option('--storage-url', '-g', help="URL of the (graph) storage API") @click.option('--dry-run/--no-dry-run', is_flag=True, default=False, help='List only what would be scheduled.') @click.pass_context def schedule_origin_metadata_index( ctx, type, options, storage_url, origin_batch_size, min_id, max_id, dry_run): """Schedules tasks for origins that are already known. The first argument is the name of the task type, further ones are keyword argument(s) of the task in the form key=value, where value is in YAML format. Usage sample: swh-scheduler --database 'service=swh-scheduler' \ task schedule_origins index-origin-metadata """ from swh.storage import get_storage from swh.storage.algos.origin import iter_origins from .utils import parse_options, schedule_origin_batches scheduler = ctx.obj['scheduler'] storage = get_storage('remote', {'url': storage_url}) if dry_run: scheduler = None (args, kw) = parse_options(options) if args: raise click.ClickException('Only keywords arguments are allowed.') origins = iter_origins(storage, origin_from=min_id, origin_to=max_id) - origin_ids = (origin['id'] for origin in origins) + origin_urls = (origin['url'] for origin in origins) schedule_origin_batches( - scheduler, type, origin_ids, origin_batch_size, kw) + scheduler, type, origin_urls, origin_batch_size, kw) @task.command('list-pending') @click.argument('task-types', required=True, nargs=-1) @click.option('--limit', '-l', required=False, type=click.INT, help='The maximum number of tasks to fetch') @click.option('--before', '-b', required=False, type=DATETIME, help='List all jobs supposed to run before the given date') @click.pass_context def list_pending_tasks(ctx, task_types, limit, before): """List the tasks that are going to be run. You can override the number of tasks to fetch """ from swh.scheduler import compute_nb_tasks_from scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') num_tasks, num_tasks_priority = compute_nb_tasks_from(limit) output = [] for task_type in task_types: pending = scheduler.peek_ready_tasks( task_type, timestamp=before, num_tasks=num_tasks, num_tasks_priority=num_tasks_priority) output.append('Found %d %s tasks\n' % ( len(pending), task_type)) for task in pending: output.append(pretty_print_task(task)) click.echo('\n'.join(output)) @task.command('list') @click.option('--task-id', '-i', default=None, multiple=True, metavar='ID', help='List only tasks whose id is ID.') @click.option('--task-type', '-t', default=None, multiple=True, metavar='TYPE', help='List only tasks of type TYPE') @click.option('--limit', '-l', required=False, type=click.INT, help='The maximum number of tasks to fetch.') @click.option('--status', '-s', multiple=True, metavar='STATUS', default=None, help='List tasks whose status is STATUS.') @click.option('--policy', '-p', default=None, type=click.Choice(['recurring', 'oneshot']), help='List tasks whose policy is POLICY.') @click.option('--priority', '-P', default=None, multiple=True, type=click.Choice(['all', 'low', 'normal', 'high']), help='List tasks whose priority is PRIORITY.') @click.option('--before', '-b', required=False, type=DATETIME, metavar='DATETIME', help='Limit to tasks supposed to run before the given date.') @click.option('--after', '-a', required=False, type=DATETIME, metavar='DATETIME', help='Limit to tasks supposed to run after the given date.') @click.option('--list-runs', '-r', is_flag=True, default=False, help='Also list past executions of each task.') @click.pass_context def list_tasks(ctx, task_id, task_type, limit, status, policy, priority, before, after, list_runs): """List tasks. """ scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') if not task_type: task_type = [x['type'] for x in scheduler.get_task_types()] # if task_id is not given, default value for status is # 'next_run_not_scheduled' # if task_id is given, default status is 'all' if task_id is None and status is None: status = ['next_run_not_scheduled'] if status and 'all' in status: status = None if priority and 'all' in priority: priority = None output = [] tasks = scheduler.search_tasks( task_id=task_id, task_type=task_type, status=status, priority=priority, policy=policy, before=before, after=after, limit=limit) if list_runs: runs = {t['id']: [] for t in tasks} for r in scheduler.get_task_runs([task['id'] for task in tasks]): runs[r['task']].append(r) else: runs = {} output.append('Found %d tasks\n' % ( len(tasks))) for task in tasks: output.append(pretty_print_task(task, full=True)) if runs.get(task['id']): output.append(click.style(' Executions:', bold=True)) for run in runs[task['id']]: output.append(pretty_print_run(run, indent=4)) click.echo('\n'.join(output)) @task.command('respawn') @click.argument('task-ids', required=True, nargs=-1) @click.option('--next-run', '-n', required=False, type=DATETIME, metavar='DATETIME', default=None, help='Re spawn the selected tasks at this date') @click.pass_context def respawn_tasks(ctx, task_ids, next_run): """Respawn tasks. Respawn tasks given by their ids (see the 'task list' command to find task ids) at the given date (immediately by default). Eg. swh-scheduler task respawn 1 3 12 """ scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') if next_run is None: next_run = arrow.utcnow() output = [] scheduler.set_status_tasks( task_ids, status='next_run_not_scheduled', next_run=next_run) output.append('Respawn tasks %s\n' % (task_ids,)) click.echo('\n'.join(output)) @task.command('archive') @click.option('--before', '-b', default=None, help='''Task whose ended date is anterior will be archived. Default to current month's first day.''') @click.option('--after', '-a', default=None, help='''Task whose ended date is after the specified date will be archived. Default to prior month's first day.''') @click.option('--batch-index', default=1000, type=click.INT, help='Batch size of tasks to read from db to archive') @click.option('--bulk-index', default=200, type=click.INT, help='Batch size of tasks to bulk index') @click.option('--batch-clean', default=1000, type=click.INT, help='Batch size of task to clean after archival') @click.option('--dry-run/--no-dry-run', is_flag=True, default=False, help='Default to list only what would be archived.') @click.option('--verbose', is_flag=True, default=False, help='Verbose mode') @click.option('--cleanup/--no-cleanup', is_flag=True, default=True, help='Clean up archived tasks (default)') @click.option('--start-from', type=click.INT, default=-1, help='(Optional) default task id to start from. Default is -1.') @click.pass_context def archive_tasks(ctx, before, after, batch_index, bulk_index, batch_clean, dry_run, verbose, cleanup, start_from): """Archive task/task_run whose (task_type is 'oneshot' and task_status is 'completed') or (task_type is 'recurring' and task_status is 'disabled'). With --dry-run flag set (default), only list those. """ from swh.core.utils import grouper from swh.scheduler.backend_es import SWHElasticSearchClient scheduler = ctx.obj['scheduler'] if not scheduler: raise ValueError('Scheduler class (local/remote) must be instantiated') es_client = SWHElasticSearchClient() logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO) log = logging.getLogger('swh.scheduler.cli.archive') logging.getLogger('urllib3').setLevel(logging.WARN) logging.getLogger('elasticsearch').setLevel(logging.WARN) if dry_run: log.info('**DRY-RUN** (only reading db)') if not cleanup: log.info('**NO CLEANUP**') now = arrow.utcnow() # Default to archive tasks from a rolling month starting the week # prior to the current one if not before: before = now.shift(weeks=-1).format('YYYY-MM-DD') if not after: after = now.shift(weeks=-1).shift(months=-1).format('YYYY-MM-DD') log.debug('index: %s; cleanup: %s; period: [%s ; %s]' % ( not dry_run, not dry_run and cleanup, after, before)) def group_by_index_name(data, es_client=es_client): """Given a data record, determine the index's name through its ending date. This varies greatly depending on the task_run's status. """ date = data.get('started') if not date: date = data['scheduled'] return es_client.compute_index_name(date.year, date.month) def index_data(before, last_id, batch_index): tasks_in = scheduler.filter_task_to_archive( after, before, last_id=last_id, limit=batch_index) for index_name, tasks_group in itertools.groupby( tasks_in, key=group_by_index_name): log.debug('Index tasks to %s' % index_name) if dry_run: for task in tasks_group: yield task continue yield from es_client.streaming_bulk( index_name, tasks_group, source=['task_id', 'task_run_id'], chunk_size=bulk_index, log=log) gen = index_data(before, last_id=start_from, batch_index=batch_index) if cleanup: for task_ids in grouper(gen, n=batch_clean): task_ids = list(task_ids) log.info('Clean up %s tasks: [%s, ...]' % ( len(task_ids), task_ids[0])) if dry_run: # no clean up continue ctx.obj['scheduler'].delete_archived_tasks(task_ids) else: for task_ids in grouper(gen, n=batch_index): task_ids = list(task_ids) log.info('Indexed %s tasks: [%s, ...]' % ( len(task_ids), task_ids[0])) diff --git a/swh/scheduler/sql/50-swh-data.sql b/swh/scheduler/sql/50-swh-data.sql index 512a844..f3cb04e 100644 --- a/swh/scheduler/sql/50-swh-data.sql +++ b/swh/scheduler/sql/50-swh-data.sql @@ -1,390 +1,416 @@ insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-svn-from-archive', 'Loading svn repositories from svn dump', 'swh.loader.svn.tasks.MountAndLoadSvnRepository', '1 day', '1 day', '1 day', 1, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-svn', 'Create dump of a remote svn repository, mount it and load it', 'swh.loader.svn.tasks.DumpMountAndLoadSvnRepository', '1 day', '1 day', '1 day', 1, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, num_retries, max_queue_length) values ( 'load-deposit', 'Loading deposit archive into swh through swh-loader-tar', 'swh.deposit.loader.tasks.LoadDepositArchiveTsk', '1 day', '1 day', '1 day', 1, 3, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, num_retries, max_queue_length) values ( 'check-deposit', 'Pre-checking deposit step before loading into swh archive', 'swh.deposit.loader.tasks.ChecksDepositTsk', '1 day', '1 day', '1 day', 1, 3, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'cook-vault-bundle', 'Cook a Vault bundle', 'swh.vault.cooking_tasks.SWHCookingTask', '1 day', '1 day', '1 day', 1, 10000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-hg', 'Loading mercurial repository swh-loader-mercurial', 'swh.loader.mercurial.tasks.LoadMercurial', '1 day', '1 day', '1 day', 1, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-hg-from-archive', 'Loading archive mercurial repository swh-loader-mercurial', 'swh.loader.mercurial.tasks.LoadArchiveMercurial', '1 day', '1 day', '1 day', 1, 1000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-git', 'Update an origin of type git', 'swh.loader.git.tasks.UpdateGitRepository', '64 days', '12:00:00', '64 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-bitbucket-incremental', 'Incrementally list BitBucket', 'swh.lister.bitbucket.tasks.IncrementalBitBucketLister', '1 day', '1 day', '1 day', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-bitbucket-full', 'Full update of Bitbucket repos list', 'swh.lister.bitbucket.tasks.FullBitBucketRelister', '90 days', '90 days', '90 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-github-incremental', 'Incrementally list GitHub', 'swh.lister.github.tasks.IncrementalGitHubLister', '1 day', '1 day', '1 day', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-github-full', 'Full update of GitHub repos list', 'swh.lister.github.tasks.FullGitHubRelister', '90 days', '90 days', '90 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-debian-distribution', 'List a Debian distribution', 'swh.lister.debian.tasks.DebianListerTask', '1 day', '1 day', '1 day', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length, num_retries, retry_delay) values ( 'load-debian-package', 'Load a Debian package', 'swh.loader.debian.tasks.LoadDebianPackage', NULL, NULL, NULL, NULL, 5000, 5, '1 hour'); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-gitlab-incremental', 'Incrementally list a Gitlab instance', 'swh.lister.gitlab.tasks.IncrementalGitLabLister', '1 day', '1 day', '1 day', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-gitlab-full', 'Full update of a Gitlab instance''s repos list', 'swh.lister.gitlab.tasks.FullGitLabRelister', '90 days', '90 days', '90 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-pypi', 'Full pypi lister', 'swh.lister.pypi.tasks.PyPIListerTask', '1 days', '1 days', '1 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-pypi', 'Load Pypi origin', 'swh.loader.pypi.tasks.LoadPyPI', '64 days', '12:00:00', '64 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-mimetype', 'Mimetype indexer task', 'swh.indexer.tasks.ContentMimetype', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-mimetype-for-range', 'Mimetype Range indexer task', 'swh.indexer.tasks.ContentRangeMimetype', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-fossology-license', 'Fossology license indexer task', 'swh.indexer.tasks.ContentFossologyLicense', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-fossology-license-for-range', 'Fossology license range indexer task', 'swh.indexer.tasks.ContentRangeFossologyLicense', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-origin-head', 'Origin Head indexer task', 'swh.indexer.tasks.OriginHead', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-revision-metadata', 'Revision Metadata indexer task', 'swh.indexer.tasks.RevisionMetadata', '1 day', '12:00:00', '1 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'index-origin-metadata', 'Origin Metadata indexer task', 'swh.indexer.tasks.OriginMetadata', '1 day', '12:00:00', '1 days', 2, 20000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-npm-full', 'Full npm lister', 'swh.lister.npm.tasks.NpmListerTask', '1 week', '1 week', '1 week', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor, max_queue_length) values ( 'load-npm', 'Load npm origin', 'swh.loader.npm.tasks.LoadNpm', '64 days', '12:00:00', '64 days', 2, 5000); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-gnu-full', 'Full gnu lister', 'swh.lister.gnu.tasks.GNUListerTask', '90 days', '90 days', '90 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-phabricator-full', 'Full Phabricator instance lister', 'swh.lister.phabricator.tasks.FullPhabricatorLister', '90 days', '90 days', '90 days', 1); insert into task_type( type, description, backend_name, default_interval, min_interval, max_interval, backoff_factor) values ( 'list-phabricator-incremental', 'Incremental Phabricator instance lister', 'swh.lister.phabricator.tasks.IncrementalPhabricatorLister', '1 week', '1 week', '1 week', 1); + +insert into task_type( + type, + description, + backend_name, + default_interval, min_interval, max_interval, backoff_factor) +values ( + 'list-cgit', + 'CGit instance lister', + 'swh.lister.cgit.tasks.CGitListerTask', + '64 days', + '64 days', + '64 days', 1); + +insert into task_type( + type, + description, + backend_name, + default_interval, min_interval, max_interval, backoff_factor, num_retries) +values ( + 'load-tar', + 'Load Tar from gnu', + 'swh.loader.package.tasks.LoadGNU', + '64 days', + '64 days', + '64 days', 1, 3); diff --git a/swh/scheduler/tests/conftest.py b/swh/scheduler/tests/conftest.py index db13bb9..e4e9e05 100644 --- a/swh/scheduler/tests/conftest.py +++ b/swh/scheduler/tests/conftest.py @@ -1,96 +1,100 @@ import os import pytest import glob from datetime import timedelta +import pkg_resources from swh.core.utils import numfile_sortkey as sortkey from swh.scheduler import get_scheduler from swh.scheduler.tests import SQL_DIR # make sure we are not fooled by CELERY_ config environment vars for var in [x for x in os.environ.keys() if x.startswith('CELERY')]: os.environ.pop(var) import swh.scheduler.celery_backend.config # noqa # this import is needed here to enforce creation of the celery current app # BEFORE the swh_app fixture is called, otherwise the Celery app instance from # celery_backend.config becomes the celery.current_app # test_cli tests depends on a en/C locale, so ensure it os.environ['LC_ALL'] = 'C.UTF-8' DUMP_FILES = os.path.join(SQL_DIR, '*.sql') # celery tasks for testing purpose; tasks themselves should be # in swh/scheduler/tests/tasks.py TASK_NAMES = ['ping', 'multiping', 'add', 'error'] @pytest.fixture(scope='session') def celery_enable_logging(): return True @pytest.fixture(scope='session') def celery_includes(): - return [ + task_modules = [ 'swh.scheduler.tests.tasks', ] + for entrypoint in pkg_resources.iter_entry_points('swh.workers'): + task_modules.extend(entrypoint.load()().get('task_modules', [])) + return task_modules @pytest.fixture(scope='session') def celery_parameters(): return { 'task_cls': 'swh.scheduler.task:SWHTask', } @pytest.fixture(scope='session') def celery_config(): return { 'accept_content': ['application/x-msgpack', 'application/json'], 'task_serializer': 'msgpack', 'result_serializer': 'json', } # override the celery_session_app fixture to monkeypatch the 'main' # swh.scheduler.celery_backend.config.app Celery application # with the test application. @pytest.fixture(scope='session') def swh_app(celery_session_app): swh.scheduler.celery_backend.config.app = celery_session_app yield celery_session_app @pytest.fixture def swh_scheduler(request, postgresql_proc, postgresql): scheduler_config = { 'db': 'postgresql://{user}@{host}:{port}/{dbname}'.format( host=postgresql_proc.host, port=postgresql_proc.port, user='postgres', dbname='tests') } all_dump_files = sorted(glob.glob(DUMP_FILES), key=sortkey) cursor = postgresql.cursor() for fname in all_dump_files: with open(fname) as fobj: cursor.execute(fobj.read()) postgresql.commit() scheduler = get_scheduler('local', scheduler_config) for taskname in TASK_NAMES: scheduler.create_task_type({ 'type': 'swh-test-{}'.format(taskname), 'description': 'The {} testing task'.format(taskname), 'backend_name': 'swh.scheduler.tests.tasks.{}'.format(taskname), 'default_interval': timedelta(days=1), 'min_interval': timedelta(hours=6), 'max_interval': timedelta(days=12), }) return scheduler diff --git a/swh/scheduler/tests/test_cli.py b/swh/scheduler/tests/test_cli.py index 3498e1e..b413ec5 100644 --- a/swh/scheduler/tests/test_cli.py +++ b/swh/scheduler/tests/test_cli.py @@ -1,671 +1,684 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import re import tempfile from unittest.mock import patch import logging from click.testing import CliRunner import pytest from swh.storage.in_memory import Storage from swh.scheduler.cli import cli from swh.scheduler.utils import create_task_dict CLI_CONFIG = ''' scheduler: cls: foo args: {} ''' def invoke(scheduler, catch_exceptions, args): runner = CliRunner() with patch('swh.scheduler.get_scheduler') as get_scheduler_mock, \ tempfile.NamedTemporaryFile('a', suffix='.yml') as config_fd: config_fd.write(CLI_CONFIG) config_fd.seek(0) get_scheduler_mock.return_value = scheduler args = ['-C' + config_fd.name, ] + args result = runner.invoke(cli, args, obj={'log_level': logging.WARNING}) if not catch_exceptions and result.exception: print(result.output) raise result.exception return result def test_schedule_tasks(swh_scheduler): csv_data = ( b'swh-test-ping;[["arg1", "arg2"]];{"key": "value"};' + datetime.datetime.utcnow().isoformat().encode() + b'\n' + b'swh-test-ping;[["arg3", "arg4"]];{"key": "value"};' + datetime.datetime.utcnow().isoformat().encode() + b'\n') with tempfile.NamedTemporaryFile(suffix='.csv') as csv_fd: csv_fd.write(csv_data) csv_fd.seek(0) result = invoke(swh_scheduler, False, [ 'task', 'schedule', '-d', ';', csv_fd.name ]) expected = r''' Created 2 tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: recurring Args: \['arg1', 'arg2'\] Keyword args: key: 'value' Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: recurring Args: \['arg3', 'arg4'\] Keyword args: key: 'value' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_schedule_tasks_columns(swh_scheduler): with tempfile.NamedTemporaryFile(suffix='.csv') as csv_fd: csv_fd.write( b'swh-test-ping;oneshot;["arg1", "arg2"];{"key": "value"}\n') csv_fd.seek(0) result = invoke(swh_scheduler, False, [ 'task', 'schedule', '-c', 'type', '-c', 'policy', '-c', 'args', '-c', 'kwargs', '-d', ';', csv_fd.name ]) expected = r''' Created 1 tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: 'arg1' 'arg2' Keyword args: key: 'value' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_schedule_task(swh_scheduler): result = invoke(swh_scheduler, False, [ 'task', 'add', 'swh-test-ping', 'arg1', 'arg2', 'key=value', ]) expected = r''' Created 1 tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: recurring Args: 'arg1' 'arg2' Keyword args: key: 'value' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_pending_tasks_none(swh_scheduler): result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', ]) expected = r''' Found 0 swh-test-ping tasks '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_pending_tasks(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task2['next_run'] += datetime.timedelta(days=1) swh_scheduler.create_tasks([task1, task2]) result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', ]) expected = r''' Found 1 swh-test-ping tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value1' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output swh_scheduler.grab_ready_tasks('swh-test-ping') result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', ]) expected = r''' Found 0 swh-test-ping tasks '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_pending_tasks_filter(swh_scheduler): task = create_task_dict('swh-test-multiping', 'oneshot', key='value') swh_scheduler.create_tasks([task]) result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', ]) expected = r''' Found 0 swh-test-ping tasks '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_pending_tasks_filter_2(swh_scheduler): swh_scheduler.create_tasks([ create_task_dict('swh-test-multiping', 'oneshot', key='value'), create_task_dict('swh-test-ping', 'oneshot', key='value2'), ]) result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', ]) expected = r''' Found 1 swh-test-ping tasks Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output # Fails because "task list-pending --limit 3" only returns 2 tasks, because # of how compute_nb_tasks_from works. @pytest.mark.xfail def test_list_pending_tasks_limit(swh_scheduler): swh_scheduler.create_tasks([ create_task_dict('swh-test-ping', 'oneshot', key='value%d' % i) for i in range(10) ]) result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', '--limit', '3', ]) expected = r''' Found 2 swh-test-ping tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value0' Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value1' Task 3 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_pending_tasks_before(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task1['next_run'] += datetime.timedelta(days=3) task2['next_run'] += datetime.timedelta(days=1) swh_scheduler.create_tasks([task1, task2]) result = invoke(swh_scheduler, False, [ 'task', 'list-pending', 'swh-test-ping', '--before', (datetime.date.today() + datetime.timedelta(days=2)).isoformat() ]) expected = r''' Found 1 swh-test-ping tasks Task 2 Next run: in a day \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task1['next_run'] += datetime.timedelta(days=3, hours=2) swh_scheduler.create_tasks([task1, task2]) swh_scheduler.grab_ready_tasks('swh-test-ping') result = invoke(swh_scheduler, False, [ 'task', 'list', ]) expected = r''' Found 2 tasks Task 1 Next run: in 3 days \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value1' Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_scheduled Priority:\x20 Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_id(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task3 = create_task_dict('swh-test-ping', 'oneshot', key='value3') swh_scheduler.create_tasks([task1, task2, task3]) result = invoke(swh_scheduler, False, [ 'task', 'list', '--task-id', '2', ]) expected = r''' Found 1 tasks Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_id_2(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task3 = create_task_dict('swh-test-ping', 'oneshot', key='value3') swh_scheduler.create_tasks([task1, task2, task3]) result = invoke(swh_scheduler, False, [ 'task', 'list', '--task-id', '2', '--task-id', '3' ]) expected = r''' Found 2 tasks Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value2' Task 3 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value3' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_type(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-multiping', 'oneshot', key='value2') task3 = create_task_dict('swh-test-ping', 'oneshot', key='value3') swh_scheduler.create_tasks([task1, task2, task3]) result = invoke(swh_scheduler, False, [ 'task', 'list', '--task-type', 'swh-test-ping' ]) expected = r''' Found 2 tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value1' Task 3 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value3' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_limit(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task3 = create_task_dict('swh-test-ping', 'oneshot', key='value3') swh_scheduler.create_tasks([task1, task2, task3]) result = invoke(swh_scheduler, False, [ 'task', 'list', '--limit', '2', ]) expected = r''' Found 2 tasks Task 1 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value1' Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_before(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task1['next_run'] += datetime.timedelta(days=3, hours=2) swh_scheduler.create_tasks([task1, task2]) swh_scheduler.grab_ready_tasks('swh-test-ping') result = invoke(swh_scheduler, False, [ 'task', 'list', '--before', (datetime.date.today() + datetime.timedelta(days=2)).isoformat() ]) expected = r''' Found 1 tasks Task 2 Next run: just now \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_scheduled Priority:\x20 Args: Keyword args: key: 'value2' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def test_list_tasks_after(swh_scheduler): task1 = create_task_dict('swh-test-ping', 'oneshot', key='value1') task2 = create_task_dict('swh-test-ping', 'oneshot', key='value2') task1['next_run'] += datetime.timedelta(days=3, hours=2) swh_scheduler.create_tasks([task1, task2]) swh_scheduler.grab_ready_tasks('swh-test-ping') result = invoke(swh_scheduler, False, [ 'task', 'list', '--after', (datetime.date.today() + datetime.timedelta(days=2)).isoformat() ]) expected = r''' Found 1 tasks Task 1 Next run: in 3 days \(.*\) Interval: 1 day, 0:00:00 Type: swh-test-ping Policy: oneshot Status: next_run_not_scheduled Priority:\x20 Args: Keyword args: key: 'value1' '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), result.output def _fill_storage_with_origins(storage, nb_origins): - storage.origin_add([ + origins = [ { 'type': 'type{}'.format(i), 'url': 'http://example.com/{}'.format(i), } for i in range(nb_origins) - ]) + ] + storage.origin_add(origins) + return origins @pytest.fixture def storage(): """An instance of swh.storage.in_memory.Storage that gets injected into the CLI functions.""" storage = Storage() with patch('swh.storage.get_storage') as get_storage_mock: get_storage_mock.return_value = storage yield storage @patch('swh.scheduler.cli.utils.TASK_BATCH_SIZE', 3) def test_task_schedule_origins_dry_run( swh_scheduler, storage): """Tests the scheduling when origin_batch_size*task_batch_size is a divisor of nb_origins.""" _fill_storage_with_origins(storage, 90) result = invoke(swh_scheduler, False, [ 'task', 'schedule_origins', '--dry-run', 'swh-test-ping', ]) # Check the output expected = r''' Scheduled 3 tasks \(30 origins\). Scheduled 6 tasks \(60 origins\). Scheduled 9 tasks \(90 origins\). Done. '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), \ repr(result.output) # Check scheduled tasks tasks = swh_scheduler.search_tasks() assert len(tasks) == 0 +def _assert_origin_tasks_contraints( + tasks, max_tasks, max_task_size, expected_origins): + # check there are not too many tasks + assert len(tasks) <= max_tasks + + # check tasks are not too large + assert all(len(task['arguments']['args'][0]) <= max_task_size + for task in tasks) + + # check the tasks are exhaustive + assert sum([len(task['arguments']['args'][0]) for task in tasks]) == \ + len(expected_origins) + assert \ + set.union(*(set(task['arguments']['args'][0]) for task in tasks)) == \ + {origin['url'] for origin in expected_origins} + + @patch('swh.scheduler.cli.utils.TASK_BATCH_SIZE', 3) def test_task_schedule_origins(swh_scheduler, storage): """Tests the scheduling when neither origin_batch_size or task_batch_size is a divisor of nb_origins.""" - _fill_storage_with_origins(storage, 70) + origins = _fill_storage_with_origins(storage, 70) result = invoke(swh_scheduler, False, [ 'task', 'schedule_origins', 'swh-test-ping', '--batch-size', '20', ]) # Check the output expected = r''' Scheduled 3 tasks \(60 origins\). Scheduled 4 tasks \(70 origins\). Done. '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), \ repr(result.output) - # Check scheduled tasks + # Check tasks tasks = swh_scheduler.search_tasks() - assert len(tasks) == 4 - assert tasks[0]['arguments']['args'] == [list(range(1, 21))] - assert tasks[1]['arguments']['args'] == [list(range(21, 41))] - assert tasks[2]['arguments']['args'] == [list(range(41, 61))] - assert tasks[3]['arguments']['args'] == [list(range(61, 71))] + _assert_origin_tasks_contraints(tasks, 4, 20, origins) assert all(task['arguments']['kwargs'] == {} for task in tasks) def test_task_schedule_origins_kwargs(swh_scheduler, storage): """Tests support of extra keyword-arguments.""" - _fill_storage_with_origins(storage, 30) + origins = _fill_storage_with_origins(storage, 30) result = invoke(swh_scheduler, False, [ 'task', 'schedule_origins', 'swh-test-ping', '--batch-size', '20', 'key1="value1"', 'key2="value2"', ]) # Check the output expected = r''' Scheduled 2 tasks \(30 origins\). Done. '''.lstrip() assert result.exit_code == 0, result.output assert re.fullmatch(expected, result.output, re.MULTILINE), \ repr(result.output) - # Check scheduled tasks + # Check tasks tasks = swh_scheduler.search_tasks() - assert len(tasks) == 2 - assert tasks[0]['arguments']['args'] == [list(range(1, 21))] - assert tasks[1]['arguments']['args'] == [list(range(21, 31))] + _assert_origin_tasks_contraints(tasks, 2, 20, origins) assert all(task['arguments']['kwargs'] == {'key1': 'value1', 'key2': 'value2'} for task in tasks) diff --git a/swh/scheduler/updater/backend.py b/swh/scheduler/updater/backend.py index d03e48a..919cb0b 100644 --- a/swh/scheduler/updater/backend.py +++ b/swh/scheduler/updater/backend.py @@ -1,113 +1,113 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from arrow import utcnow import psycopg2.pool import psycopg2.extras from swh.core.db import BaseDb from swh.core.db.common import db_transaction, db_transaction_generator from swh.scheduler.backend import format_query class SchedulerUpdaterBackend: CONFIG_BASE_FILENAME = 'backend/scheduler-updater' # 'cache_read_limit': ('int', 1000), def __init__(self, db, cache_read_limit=1000, min_pool_conns=1, max_pool_conns=10): """ Args: db_conn: either a libpq connection string, or a psycopg2 connection """ if isinstance(db, psycopg2.extensions.connection): self._pool = None self._db = BaseDb(db) else: self._pool = psycopg2.pool.ThreadedConnectionPool( min_pool_conns, max_pool_conns, db, cursor_factory=psycopg2.extras.RealDictCursor, ) self._db = None self.limit = cache_read_limit def get_db(self): if self._db: return self._db return BaseDb.from_pool(self._pool) def put_db(self, db): if db is not self._db: db.put_conn() cache_put_keys = ['url', 'cnt', 'last_seen', 'origin_type'] @db_transaction() def cache_put(self, events, timestamp=None, db=None, cur=None): """Write new events in the backend. """ cur.execute('select swh_mktemp_cache()') db.copy_to(prepare_events(events, timestamp), 'tmp_cache', self.cache_put_keys, cur=cur) cur.execute('select swh_cache_put()') cache_read_keys = ['id', 'url', 'origin_type', 'cnt', 'first_seen', 'last_seen'] @db_transaction_generator() def cache_read(self, timestamp=None, limit=None, db=None, cur=None): """Read events from the cache prior to timestamp. Note that limit=None does not mean 'no limit' but use the default limit (see cache_read_limit constructor argument). """ if not timestamp: timestamp = utcnow() if not limit: limit = self.limit q = format_query('select {keys} from swh_cache_read(%s, %s)', self.cache_read_keys) cur.execute(q, (timestamp, limit)) yield from cur.fetchall() @db_transaction() def cache_remove(self, entries, db=None, cur=None): """Clean events from the cache """ q = 'delete from cache where url in (%s)' % ( ', '.join(("'%s'" % e for e in entries)), ) cur.execute(q) def prepare_events(events, timestamp=None): if timestamp is None: timestamp = utcnow() outevents = [] urls = [] for e in events: event = e.get() url = event['url'].strip() if event['last_seen'] is None: event['last_seen'] = timestamp event['url'] = url if url in urls: idx = urls.index(url) urls.append(urls.pop(idx)) - pevent = outevents.pop(idx) - event['cnt'] += pevent['cnt'] + prev_event = outevents.pop(idx) + event['cnt'] += prev_event['cnt'] event['last_seen'] = max( - event['last_seen'], pevent['last_seen']) + event['last_seen'], prev_event['last_seen']) else: urls.append(url) outevents.append(event) return outevents diff --git a/version.txt b/version.txt index 96f7eb4..fa319cd 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v0.0.57-0-g09e7245 \ No newline at end of file +v0.0.58-0-g9c2b329 \ No newline at end of file