diff --git a/requirements-test.txt b/requirements-test.txt
index 04a85a6c..489af9b0 100644
--- a/requirements-test.txt
+++ b/requirements-test.txt
@@ -1,5 +1,7 @@
+hypothesis
 pytest
 pytest-django
-hypothesis
+pytest-mock
+requests-mock
 swh.core[http] >= 0.0.61
 swh.loader.git >= 0.0.47
diff --git a/swh/web/common/origin_save.py b/swh/web/common/origin_save.py
index 909e5475..940d5c17 100644
--- a/swh/web/common/origin_save.py
+++ b/swh/web/common/origin_save.py
@@ -1,405 +1,535 @@
 # Copyright (C) 2018-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
+import json
+import logging
+
 from bisect import bisect_right
-from datetime import datetime, timezone
+from datetime import datetime, timezone, timedelta
+
+import requests
 
 from django.core.exceptions import ObjectDoesNotExist
 from django.core.exceptions import ValidationError
 from django.core.validators import URLValidator
 from django.utils.html import escape
 
 from swh.web import config
 from swh.web.common import service
 from swh.web.common.exc import BadInputExc, ForbiddenExc, NotFoundExc
 from swh.web.common.models import (
     SaveUnauthorizedOrigin, SaveAuthorizedOrigin, SaveOriginRequest,
     SAVE_REQUEST_ACCEPTED, SAVE_REQUEST_REJECTED, SAVE_REQUEST_PENDING,
     SAVE_TASK_NOT_YET_SCHEDULED, SAVE_TASK_SCHEDULED,
     SAVE_TASK_SUCCEED, SAVE_TASK_FAILED, SAVE_TASK_RUNNING
 )
 from swh.web.common.origin_visits import get_origin_visits
 from swh.web.common.utils import parse_timestamp
 
 from swh.scheduler.utils import create_oneshot_task_dict
 
 scheduler = config.scheduler()
 
+logger = logging.getLogger(__name__)
+
 
 def get_origin_save_authorized_urls():
     """
     Get the list of origin url prefixes authorized to be
     immediately loaded into the archive (whitelist).
 
     Returns:
         list: The list of authorized origin url prefix
     """
     return [origin.url
             for origin in SaveAuthorizedOrigin.objects.all()]
 
 
 def get_origin_save_unauthorized_urls():
     """
     Get the list of origin url prefixes forbidden to be
     loaded into the archive (blacklist).
 
     Returns:
         list: the list of unauthorized origin url prefix
     """
     return [origin.url
             for origin in SaveUnauthorizedOrigin.objects.all()]
 
 
 def can_save_origin(origin_url):
     """
     Check if a software origin can be saved into the archive.
 
     Based on the origin url, the save request will be either:
 
       * immediately accepted if the url is whitelisted
       * rejected if the url is blacklisted
       * put in pending state for manual review otherwise
 
     Args:
         origin_url (str): the software origin url to check
 
     Returns:
         str: the origin save request status, either **accepted**,
         **rejected** or **pending**
     """
     # origin url may be blacklisted
     for url_prefix in get_origin_save_unauthorized_urls():
         if origin_url.startswith(url_prefix):
             return SAVE_REQUEST_REJECTED
 
     # if the origin url is in the white list, it can be immediately saved
     for url_prefix in get_origin_save_authorized_urls():
         if origin_url.startswith(url_prefix):
             return SAVE_REQUEST_ACCEPTED
 
     # otherwise, the origin url needs to be manually verified
     return SAVE_REQUEST_PENDING
 
 
 # map origin type to scheduler task
 # TODO: do not hardcode the task name here (T1157)
 _origin_type_task = {
     'git': 'load-git',
     'hg': 'load-hg',
     'svn': 'load-svn'
 }
 
 
 # map scheduler task status to origin save status
 _save_task_status = {
     'next_run_not_scheduled': SAVE_TASK_NOT_YET_SCHEDULED,
     'next_run_scheduled': SAVE_TASK_SCHEDULED,
     'completed': SAVE_TASK_SUCCEED,
     'disabled': SAVE_TASK_FAILED
 }
 
 
 def get_savable_origin_types():
     return sorted(list(_origin_type_task.keys()))
 
 
 def _check_origin_type_savable(origin_type):
     """
     Get the list of software origin types that can be loaded
     through a save request.
 
     Returns:
         list: the list of saveable origin types
     """
     allowed_origin_types = ', '.join(get_savable_origin_types())
     if origin_type not in _origin_type_task:
         raise BadInputExc('Origin of type %s can not be saved! '
                           'Allowed types are the following: %s' %
                           (origin_type, allowed_origin_types))
 
 
 _validate_url = URLValidator(schemes=['http', 'https', 'svn', 'git'])
 
 
 def _check_origin_url_valid(origin_url):
     try:
         _validate_url(origin_url)
     except ValidationError:
         raise BadInputExc('The provided origin url (%s) is not valid!' %
                           escape(origin_url))
 
 
 def _get_visit_info_for_save_request(save_request):
     visit_date = None
     visit_status = None
     try:
         origin = {'type': save_request.origin_type,
                   'url': save_request.origin_url}
         origin_info = service.lookup_origin(origin)
         origin_visits = get_origin_visits(origin_info)
         visit_dates = [parse_timestamp(v['date'])
                        for v in origin_visits]
         i = bisect_right(visit_dates, save_request.request_date)
         if i != len(visit_dates):
             visit_date = visit_dates[i]
             visit_status = origin_visits[i]['status']
             if origin_visits[i]['status'] == 'ongoing':
                 visit_date = None
     except Exception:
         pass
     return visit_date, visit_status
 
 
 def _check_visit_update_status(save_request, save_task_status):
     visit_date, visit_status = _get_visit_info_for_save_request(save_request)
     save_request.visit_date = visit_date
     # visit has been performed, mark the saving task as succeed
     if visit_date and visit_status is not None:
         save_task_status = SAVE_TASK_SUCCEED
     elif visit_status == 'ongoing':
         save_task_status = SAVE_TASK_RUNNING
     else:
         time_now = datetime.now(tz=timezone.utc)
         time_delta = time_now - save_request.request_date
         # consider the task as failed if it is still in scheduled state
         # 30 days after its submission
         if time_delta.days > 30:
             save_task_status = SAVE_TASK_FAILED
     return visit_date, save_task_status
 
 
 def _save_request_dict(save_request, task=None):
     must_save = False
     visit_date = save_request.visit_date
     # save task still in scheduler db
     if task:
         save_task_status = _save_task_status[task['status']]
         # Consider request from which a visit date has already been found
         # as succeeded to avoid retrieving it again
         if save_task_status == SAVE_TASK_SCHEDULED and visit_date:
             save_task_status = SAVE_TASK_SUCCEED
         if save_task_status in (SAVE_TASK_FAILED, SAVE_TASK_SUCCEED) \
                 and not visit_date:
             visit_date, _ = _get_visit_info_for_save_request(save_request)
             save_request.visit_date = visit_date
             must_save = True
         # Ensure last origin visit is available in database
         # before reporting the task execution as successful
         if save_task_status == SAVE_TASK_SUCCEED and not visit_date:
             save_task_status = SAVE_TASK_SCHEDULED
         # Check tasks still marked as scheduled / not yet scheduled
         if save_task_status in (SAVE_TASK_SCHEDULED,
                                 SAVE_TASK_NOT_YET_SCHEDULED):
             visit_date, save_task_status = _check_visit_update_status(
                 save_request, save_task_status)
 
     # save task may have been archived
     else:
         save_task_status = save_request.loading_task_status
         if save_task_status in (SAVE_TASK_SCHEDULED,
                                 SAVE_TASK_NOT_YET_SCHEDULED):
             visit_date, save_task_status = _check_visit_update_status(
                 save_request, save_task_status)
 
         else:
             save_task_status = save_request.loading_task_status
 
     if save_request.loading_task_status != save_task_status:
         save_request.loading_task_status = save_task_status
         must_save = True
 
     if must_save:
         save_request.save()
 
     return {'id': save_request.id,
             'origin_type': save_request.origin_type,
             'origin_url': save_request.origin_url,
             'save_request_date': save_request.request_date.isoformat(),
             'save_request_status': save_request.status,
             'save_task_status': save_task_status,
             'visit_date': visit_date.isoformat() if visit_date else None}
 
 
 def create_save_origin_request(origin_type, origin_url):
     """
     Create a loading task to save a software origin into the archive.
 
     This function aims to create a software origin loading task
     trough the use of the swh-scheduler component.
 
     First, some checks are performed to see if the origin type and
     url are valid but also if the the save request can be accepted.
     If those checks passed, the loading task is then created.
     Otherwise, the save request is put in pending or rejected state.
 
     All the submitted save requests are logged into the swh-web
     database to keep track of them.
 
     Args:
         origin_type (str): the type of origin to save (currently only
             ``git`` but ``svn`` and ``hg`` will soon be available)
         origin_url (str): the url of the origin to save
 
     Raises:
         BadInputExc: the origin type or url is invalid
         ForbiddenExc: the provided origin url is blacklisted
 
     Returns:
         dict: A dict describing the save request with the following keys:
 
             * **origin_type**: the type of the origin to save
             * **origin_url**: the url of the origin
             * **save_request_date**: the date the request was submitted
             * **save_request_status**: the request status, either **accepted**,
               **rejected** or **pending**
             * **save_task_status**: the origin loading task status, either
               **not created**, **not yet scheduled**, **scheduled**,
               **succeed** or **failed**
 
 
     """
     _check_origin_type_savable(origin_type)
     _check_origin_url_valid(origin_url)
     save_request_status = can_save_origin(origin_url)
     task = None
 
     # if the origin save request is accepted, create a scheduler
     # task to load it into the archive
     if save_request_status == SAVE_REQUEST_ACCEPTED:
         # create a task with high priority
         kwargs = {'priority': 'high'}
         # set task parameters according to the origin type
         if origin_type == 'git':
             kwargs['repo_url'] = origin_url
         elif origin_type == 'hg':
             kwargs['origin_url'] = origin_url
         elif origin_type == 'svn':
             kwargs['origin_url'] = origin_url
             kwargs['svn_url'] = origin_url
 
         sor = None
         # get list of previously sumitted save requests
         current_sors = \
             list(SaveOriginRequest.objects.filter(origin_type=origin_type,
                                                   origin_url=origin_url))
 
         can_create_task = False
         # if no save requests previously submitted, create the scheduler task
         if not current_sors:
             can_create_task = True
         else:
             # get the latest submitted save request
             sor = current_sors[0]
             # if it was in pending state, we need to create the scheduler task
             # and update the save request info in the database
             if sor.status == SAVE_REQUEST_PENDING:
                 can_create_task = True
             # a task has already been created to load the origin
             elif sor.loading_task_id != -1:
                 # get the scheduler task and its status
                 tasks = scheduler.get_tasks([sor.loading_task_id])
                 task = tasks[0] if tasks else None
                 task_status = _save_request_dict(sor, task)['save_task_status']
                 # create a new scheduler task only if the previous one has been
                 # already executed
                 if task_status == SAVE_TASK_FAILED or \
                    task_status == SAVE_TASK_SUCCEED:
                     can_create_task = True
                     sor = None
                 else:
                     can_create_task = False
 
         if can_create_task:
             # effectively create the scheduler task
             task_dict = create_oneshot_task_dict(
                 _origin_type_task[origin_type], **kwargs)
             task = scheduler.create_tasks([task_dict])[0]
 
             # pending save request has been accepted
             if sor:
                 sor.status = SAVE_REQUEST_ACCEPTED
                 sor.loading_task_id = task['id']
                 sor.save()
             else:
                 sor = SaveOriginRequest.objects.create(origin_type=origin_type,
                                                        origin_url=origin_url,
                                                        status=save_request_status, # noqa
                                                        loading_task_id=task['id']) # noqa
     # save request must be manually reviewed for acceptation
     elif save_request_status == SAVE_REQUEST_PENDING:
         # check if there is already such a save request already submitted,
         # no need to add it to the database in that case
         try:
             sor = SaveOriginRequest.objects.get(origin_type=origin_type,
                                                 origin_url=origin_url,
                                                 status=save_request_status)
         # if not add it to the database
         except ObjectDoesNotExist:
             sor = SaveOriginRequest.objects.create(origin_type=origin_type,
                                                    origin_url=origin_url,
                                                    status=save_request_status)
     # origin can not be saved as its url is blacklisted,
     # log the request to the database anyway
     else:
         sor = SaveOriginRequest.objects.create(origin_type=origin_type,
                                                origin_url=origin_url,
                                                status=save_request_status)
 
     if save_request_status == SAVE_REQUEST_REJECTED:
         raise ForbiddenExc('The origin url is blacklisted and will not be '
                            'loaded into the archive.')
 
     return _save_request_dict(sor, task)
 
 
 def get_save_origin_requests_from_queryset(requests_queryset):
     """
     Get all save requests from a SaveOriginRequest queryset.
 
     Args:
         requests_queryset (django.db.models.QuerySet): input
             SaveOriginRequest queryset
 
     Returns:
         list: A list of save origin requests dict as described in
         :func:`swh.web.common.origin_save.create_save_origin_request`
     """
     task_ids = []
     for sor in requests_queryset:
         task_ids.append(sor.loading_task_id)
-    requests = []
+    save_requests = []
     if task_ids:
         tasks = scheduler.get_tasks(task_ids)
         tasks = {task['id']: task for task in tasks}
         for sor in requests_queryset:
             sr_dict = _save_request_dict(sor, tasks.get(sor.loading_task_id))
-            requests.append(sr_dict)
-    return requests
+            save_requests.append(sr_dict)
+    return save_requests
 
 
 def get_save_origin_requests(origin_type, origin_url):
     """
     Get all save requests for a given software origin.
 
     Args:
         origin_type (str): the type of the origin
         origin_url (str): the url of the origin
 
     Raises:
         BadInputExc: the origin type or url is invalid
         NotFoundExc: no save requests can be found for the given origin
 
     Returns:
         list: A list of save origin requests dict as described in
         :func:`swh.web.common.origin_save.create_save_origin_request`
     """
     _check_origin_type_savable(origin_type)
     _check_origin_url_valid(origin_url)
     sors = SaveOriginRequest.objects.filter(origin_type=origin_type,
                                             origin_url=origin_url)
     if sors.count() == 0:
         raise NotFoundExc(('No save requests found for origin with type '
                            '%s and url %s.') % (origin_type, origin_url))
     return get_save_origin_requests_from_queryset(sors)
+
+
+def get_save_origin_task_info(save_request_id):
+    """
+    Get detailed information about an accepted save origin request
+    and its associated loading task.
+
+    If the associated loading task info is archived and removed
+    from the scheduler database, returns an empty dictionary.
+
+    Args:
+        save_request_id (int): identifier of a save origin request
+
+    Returns:
+        dict: A dictionary with the following keys:
+            - **type**: loading task type
+            - **arguments**: loading task arguments
+            - **id**: loading task database identifier
+            - **backend_id**: loading task celery identifier
+            - **scheduled**: loading task scheduling date
+            - **ended**: loading task termination date
+            - **status**: loading task execution status
+        Depending on the availability of the task logs in the elasticsearch
+        cluster of Software Heritage, the returned dictionary may also
+        contain the following keys:
+            - **name**: associated celery task name
+            - **message**: relevant log message from task execution
+            - **duration**: task execution time (only if it succeeded)
+            - **worker**: name of the worker that executed the task
+    """
+    try:
+        save_request = SaveOriginRequest.objects.get(id=save_request_id)
+    except ObjectDoesNotExist:
+        return {}
+
+    task = scheduler.get_tasks([save_request.loading_task_id])
+    task = task[0] if task else None
+    if task is None:
+        return {}
+
+    task_run = scheduler.get_task_runs([task['id']])
+    task_run = task_run[0] if task_run else None
+    if task_run is None:
+        return {}
+    task_run['type'] = task['type']
+    task_run['arguments'] = task['arguments']
+    task_run['id'] = task_run['task']
+    del task_run['task']
+    del task_run['metadata']
+    del task_run['started']
+
+    es_workers_index_url = config.get_config()['es_workers_index_url']
+    if not es_workers_index_url:
+        return task_run
+    es_workers_index_url += '/_search'
+
+    if save_request.visit_date:
+        min_ts = save_request.visit_date
+        max_ts = min_ts + timedelta(days=7)
+    else:
+        min_ts = save_request.request_date
+        max_ts = min_ts + timedelta(days=30)
+    min_ts = int(min_ts.timestamp()) * 1000
+    max_ts = int(max_ts.timestamp()) * 1000
+
+    save_task_status = _save_task_status[task['status']]
+    priority = '3' if save_task_status == SAVE_TASK_FAILED else '6'
+
+    query = {
+        'bool': {
+            'must': [
+                {
+                    'match_phrase': {
+                        'priority': {
+                            'query': priority
+                        }
+                    }
+                },
+                {
+                    'match_phrase': {
+                        'swh_task_id': {
+                            'query': task_run['backend_id']
+                        }
+                    }
+                },
+                {
+                    'range': {
+                        '@timestamp': {
+                            'gte': min_ts,
+                            'lte': max_ts,
+                            'format': 'epoch_millis'
+                        }
+                    }
+                }
+            ]
+        }
+    }
+
+    try:
+        response = requests.post(es_workers_index_url,
+                                 json={'query': query,
+                                       'sort': ['@timestamp']})
+        results = json.loads(response.text)
+        if results['hits']['total'] >= 1:
+            task_run_info = results['hits']['hits'][-1]['_source']
+            if 'swh_logging_args_runtime' in task_run_info:
+                duration = task_run_info['swh_logging_args_runtime']
+                task_run['duration'] = duration
+            if 'message' in task_run_info:
+                task_run['message'] = task_run_info['message']
+            if 'swh_logging_args_name' in task_run_info:
+                task_run['name'] = task_run_info['swh_logging_args_name']
+            elif 'swh_task_name' in task_run_info:
+                task_run['name'] = task_run_info['swh_task_name']
+            if 'hostname' in task_run_info:
+                task_run['worker'] = task_run_info['hostname']
+            elif 'host' in task_run_info:
+                task_run['worker'] = task_run_info['host']
+    except Exception as e:
+        logger.warning('Request to Elasticsearch failed\n%s' % str(e))
+        pass
+
+    return task_run
diff --git a/swh/web/config.py b/swh/web/config.py
index 635d5f24..d90350a2 100644
--- a/swh/web/config.py
+++ b/swh/web/config.py
@@ -1,156 +1,157 @@
 # Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import os
 
 from swh.core import config
 from swh.indexer.storage import get_indexer_storage
 from swh.scheduler import get_scheduler
 from swh.storage import get_storage
 from swh.vault import get_vault
 from swh.web import settings
 
 SETTINGS_DIR = os.path.dirname(settings.__file__)
 
 DEFAULT_CONFIG = {
     'allowed_hosts': ('list', []),
     'storage': ('dict', {
         'cls': 'remote',
         'args': {
             'url': 'http://127.0.0.1:5002/',
             'timeout': 10,
         },
     }),
     'indexer_storage': ('dict', {
         'cls': 'remote',
         'args': {
             'url': 'http://127.0.0.1:5007/',
             'timeout': 1,
         }
     }),
     'log_dir': ('string', '/tmp/swh/log'),
     'debug': ('bool', False),
     'serve_assets': ('bool', False),
     'host': ('string', '127.0.0.1'),
     'port': ('int', 5004),
     'secret_key': ('string', 'development key'),
     # do not display code highlighting for content > 1MB
     'content_display_max_size': ('int', 5 * 1024 * 1024),
     'snapshot_content_max_size': ('int', 1000),
     'throttling': ('dict', {
         'cache_uri': None,  # production: memcached as cache (127.0.0.1:11211)
                             # development: in-memory cache so None
         'scopes': {
             'swh_api': {
                 'limiter_rate': {
                     'default': '120/h'
                 },
                 'exempted_networks': ['127.0.0.0/8']
             },
             'swh_vault_cooking': {
                 'limiter_rate': {
                     'default': '120/h',
                     'GET': '60/m'
                 },
                 'exempted_networks': ['127.0.0.0/8']
             },
             'swh_save_origin': {
                 'limiter_rate': {
                     'default': '120/h',
                     'POST': '10/h'
                 },
                 'exempted_networks': ['127.0.0.0/8']
             },
             'swh_api_origin_visit_latest': {
                 'limiter_rate': {
                     'default': '700/m'
                 },
                 'exempted_networks': ['127.0.0.0/8'],
             },
         }
     }),
     'vault': ('dict', {
         'cls': 'remote',
         'args': {
             'url': 'http://127.0.0.1:5005/',
         }
     }),
     'scheduler': ('dict', {
         'cls': 'remote',
         'args': {
             'url': 'http://127.0.0.1:5008/'
         }
     }),
     'development_db': ('string', os.path.join(SETTINGS_DIR, 'db.sqlite3')),
     'test_db': ('string', os.path.join(SETTINGS_DIR, 'testdb.sqlite3')),
     'production_db': ('string', '/var/lib/swh/web.sqlite3'),
     'deposit': ('dict', {
         'private_api_url': 'https://deposit.softwareheritage.org/1/private/',
         'private_api_user': 'swhworker',
         'private_api_password': ''
     }),
     'coverage_count_origins': ('bool', False),
-    'e2e_tests_mode': ('bool', False)
+    'e2e_tests_mode': ('bool', False),
+    'es_workers_index_url': ('string', ''),
 }
 
 swhweb_config = {}
 
 
 def get_config(config_file='web/web'):
     """Read the configuration file `config_file`.
 
        If an environment variable SWH_CONFIG_FILENAME is defined, this
        takes precedence over the config_file parameter.
 
        In any case, update the app with parameters (secret_key, conf)
        and return the parsed configuration as a dict.
 
        If no configuration file is provided, return a default
        configuration.
 
     """
 
     if not swhweb_config:
         config_filename = os.environ.get('SWH_CONFIG_FILENAME')
         if config_filename:
             config_file = config_filename
         cfg = config.load_named_config(config_file, DEFAULT_CONFIG)
         swhweb_config.update(cfg)
         config.prepare_folders(swhweb_config, 'log_dir')
         swhweb_config['storage'] = get_storage(**swhweb_config['storage'])
         swhweb_config['vault'] = get_vault(**swhweb_config['vault'])
         swhweb_config['indexer_storage'] = \
             get_indexer_storage(**swhweb_config['indexer_storage'])
         swhweb_config['scheduler'] = get_scheduler(
             **swhweb_config['scheduler'])
     return swhweb_config
 
 
 def storage():
     """Return the current application's storage.
 
     """
     return get_config()['storage']
 
 
 def vault():
     """Return the current application's vault.
 
     """
     return get_config()['vault']
 
 
 def indexer_storage():
     """Return the current application's indexer storage.
 
     """
     return get_config()['indexer_storage']
 
 
 def scheduler():
     """Return the current application's scheduler.
 
     """
     return get_config()['scheduler']
diff --git a/swh/web/tests/common/test_origin_save.py b/swh/web/tests/common/test_origin_save.py
new file mode 100644
index 00000000..118a79d9
--- /dev/null
+++ b/swh/web/tests/common/test_origin_save.py
@@ -0,0 +1,127 @@
+# Copyright (C) 2019  The Software Heritage developers
+# See the AUTHORS file at the top-level directory of this distribution
+# License: GNU Affero General Public License version 3, or any later version
+# See top-level LICENSE file for more information
+
+import json
+import os
+
+from datetime import datetime, timedelta, timezone
+
+import pytest
+import requests_mock
+
+from swh.web.common.models import (
+    SaveOriginRequest
+)
+from swh.web.common.origin_save import get_save_origin_task_info
+from swh.web.config import get_config
+
+
+_RESOURCES_PATH = os.path.join(os.path.dirname(__file__), '../resources')
+
+_es_url = 'http://esnode1.internal.softwareheritage.org:9200'
+_es_workers_index_url = '%s/swh_workers-*' % _es_url
+
+
+def _get_save_origin_task_info_test(mocker, task_archived=False,
+                                    es_available=True):
+
+    swh_web_config = get_config()
+
+    if es_available:
+        swh_web_config.update({'es_workers_index_url': _es_workers_index_url})
+    else:
+        swh_web_config.update({'es_workers_index_url': ''})
+
+    sor_id = 4473
+
+    SaveOriginRequest.objects.create(
+        id=sor_id,
+        request_date=datetime(2019, 8, 30, 23, 7, 3, 474294,
+                              tzinfo=timezone.utc),
+        origin_type='git',
+        origin_url='https://gitlab.com/inkscape/inkscape',
+        status='accepted',
+        loading_task_id=203525448,
+        visit_date=datetime(2019, 8, 30, 23, 18, 11, 54341,
+                            tzinfo=timezone.utc)
+    )
+
+    mock_scheduler = mocker.patch('swh.web.common.origin_save.scheduler')
+    task = {
+        'arguments': {
+            'args': [],
+            'kwargs': {
+                'repo_url': 'https://gitlab.com/inkscape/inkscape'
+            }
+        },
+        'current_interval': timedelta(days=64),
+        'id': 203525448,
+        'next_run': datetime(2019, 8, 30, 23, 7, 1, 614823),
+        'policy': 'oneshot',
+        'priority': 'high',
+        'retries_left': 0,
+        'status': 'disabled',
+        'type': 'load-git'
+    } if not task_archived else None
+    mock_scheduler.get_tasks.return_value = [task]
+
+    task_run = {
+        'backend_id': 'f00c712c-e820-41ce-a07c-9bf8df914205',
+        'ended': datetime(2019, 8, 30, 23, 18, 13, 770800),
+        'id': 654270631,
+        'metadata': {},
+        'scheduled': datetime(2019, 8, 30, 23, 8, 34, 282021),
+        'started': None,
+        'status': 'failed',
+        'task': 203525448
+    }
+    mock_scheduler.get_task_runs.return_value = [task_run]
+
+    es_response = os.path.join(_RESOURCES_PATH,
+                               'json/es_task_info_response.json')
+    with open(es_response) as json_fd:
+        es_response = json.load(json_fd)
+
+    task_exec_data = es_response['hits']['hits'][-1]['_source']
+
+    with requests_mock.Mocker() as requests_mocker:
+        requests_mocker.register_uri('POST', _es_workers_index_url+'/_search',
+                                     json=es_response)
+
+        sor_task_info = get_save_origin_task_info(sor_id)
+
+    expected_result = {
+        'type': task['type'],
+        'arguments': task['arguments'],
+        'id': task['id'],
+        'backend_id': task_run['backend_id'],
+        'scheduled': task_run['scheduled'],
+        'ended': task_run['ended'],
+        'status': task_run['status'],
+    } if not task_archived else {}
+
+    if es_available and not task_archived:
+        expected_result.update({
+            'message': task_exec_data['message'],
+            'name': task_exec_data['swh_task_name'],
+            'worker': task_exec_data['hostname']
+        })
+
+    assert sor_task_info == expected_result
+
+
+@pytest.mark.django_db
+def test_get_save_origin_archived_task_info(mocker):
+    _get_save_origin_task_info_test(mocker, task_archived=True)
+
+
+@pytest.mark.django_db
+def test_get_save_origin_task_info_with_es(mocker):
+    _get_save_origin_task_info_test(mocker, es_available=True)
+
+
+@pytest.mark.django_db
+def test_get_save_origin_task_info_without_es(mocker):
+    _get_save_origin_task_info_test(mocker, es_available=False)
diff --git a/swh/web/tests/resources/json/es_task_info_response.json b/swh/web/tests/resources/json/es_task_info_response.json
new file mode 100644
index 00000000..853654f3
--- /dev/null
+++ b/swh/web/tests/resources/json/es_task_info_response.json
@@ -0,0 +1,62 @@
+{
+  "took": 19,
+  "timed_out": false,
+  "_shards": {
+    "total": 194,
+    "successful": 194,
+    "skipped": 186,
+    "failed": 0
+  },
+  "hits": {
+    "total": 1,
+    "max_score": null,
+    "hits": [{
+      "_index": "swh_workers-2019.08.30",
+      "_type": "doc",
+      "_id": "uHrS5GwBjk15w1A-eZNK",
+      "_score": null,
+      "_source": {
+        "comm": "python3",
+        "code_line": "909",
+        "type": "journal",
+        "code_func": "load",
+        "transport": "journal",
+        "swh_task_name": "swh.loader.git.tasks.UpdateGitRepository",
+        "logger": "swh.loader.git.BulkLoader",
+        "swh_task_args_0": "https://gitlab.com/inkscape/inkscape",
+        "source_realtime_timestamp": "1567207093348189",
+        "code_file": "/usr/lib/python3/dist-packages/swh/loader/core/loader.py",
+        "systemd_slice": "system-swh\\x2dworker.slice",
+        "@version": "1",
+        "cap_effective": "0",
+        "boot_id": "b82af8ba13ee48258109a7dfd5058e53",
+        "machine_id": "563ec85b8bcd4ec289b9af4f52b6fa41",
+        "swh_task_id": "f00c712c-e820-41ce-a07c-9bf8df914205",
+        "gid": "1004",
+        "beat": {
+          "name": "worker13",
+          "version": "5.5.0",
+          "hostname": "worker13"
+        },
+        "priority": "3",
+        "systemd_invocation_id": "18bb45cd515d4e1794ddd4d391389045",
+        "@realtime_timestamp": 1567207093348366,
+        "pid": "675",
+        "exe": "/usr/bin/python3.5",
+        "@timestamp": "2019-08-30T23:18:13.348Z",
+        "systemd_unit": "swh-worker@loader_git.service",
+        "tags": ["beats_input_codec_plain_applied"],
+        "systemd_cgroup": "/system.slice/system-swh\\x2dworker.slice/swh-worker@loader_git.service",
+        "host": "worker13",
+        "thread_name": "MainThread",
+        "message": "[2019-08-30 23:18:13,342: ERROR/ForkPoolWorker-64335] Loading failure, updating to `partial` status\nTraceback (most recent call last):\n  File \"/usr/lib/python3/dist-packages/swh/loader/core/loader.py\", line 895, in load\n    more_data_to_fetch = self.fetch_data()\n  File \"/usr/lib/python3/dist-packages/swh/loader/git/loader.py\", line 311, in fetch_data\n    do_progress)\n  File \"/usr/lib/python3/dist-packages/swh/loader/git/loader.py\", line 243, in fetch_pack_from_origin\n    progress=do_activity).refs\n  File \"/usr/lib/python3/dist-packages/dulwich/client.py\", line 1557, in fetch_pack\n    \"git-upload-pack\", url, data=req_data.getvalue())\n  File \"/usr/lib/python3/dist-packages/dulwich/client.py\", line 1467, in _smart_request\n    resp, read = self._http_request(url, headers, data)\n  File \"/usr/lib/python3/dist-packages/dulwich/client.py\", line 1402, in _http_request\n    raise NotGitRepository()\ndulwich.errors.NotGitRepository",
+        "uid": "1004",
+        "syslog_identifier": "python3",
+        "swh_task_kwargs_base_url": "None",
+        "hostname": "worker13",
+        "cmdline": "/usr/bin/python3 -m celery worker --app=swh.scheduler.celery_backend.config.app --pool=prefork --events --concurrency=1 --maxtasksperchild=5 -Ofair --loglevel=info --without-gossip --without-mingle --without-heartbeat -n loader_git.%h"
+      },
+      "sort": [1567207093348]
+    }]
+  }
+}
\ No newline at end of file