diff --git a/swh/journal/publisher.py b/swh/journal/publisher.py
index ff0e0f4..9774c44 100644
--- a/swh/journal/publisher.py
+++ b/swh/journal/publisher.py
@@ -1,194 +1,198 @@
 # Copyright (C) 2016-2019 The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from collections import defaultdict
 import logging
 
 from kafka import KafkaProducer, KafkaConsumer
 
 from swh.storage import get_storage
 from swh.storage.algos import snapshot
 
 from .serializers import kafka_to_key, key_to_kafka
 
+logger = logging.getLogger(__name__)
+
 
 class JournalPublisher:
     """The journal publisher is a layer in charge of:
 
     - consuming messages from topics (1 topic per object_type)
     - reify the object ids read from those topics (using the storage)
     - producing those reified objects to output topics (1 topic per
       object type)
 
     The main entry point for this class is the 'poll' method.
 
     """
     def __init__(self, config):
         self.config = config
         self._prepare_storage(config)
         self._prepare_journal(config)
         self.max_messages = self.config['max_messages']
 
     def _prepare_journal(self, config):
         """Prepare the consumer and subscriber instances for the publisher to
            actually be able to discuss with the journal.
 
         """
         # yes, the temporary topics contain values that are actually _keys_
         self.consumer = KafkaConsumer(
             bootstrap_servers=config['brokers'],
             value_deserializer=kafka_to_key,
             auto_offset_reset='earliest',
             enable_auto_commit=False,
             group_id=config['consumer_id'],
         )
         self.producer = KafkaProducer(
             bootstrap_servers=config['brokers'],
             key_serializer=key_to_kafka,
             value_serializer=key_to_kafka,
             client_id=config['publisher_id'],
         )
 
-        logging.debug('Subscribing to object types event: %s' % (
+        logger.info('Subscribing to object types event: %s' % (
             config['object_types'], ))
         self.consumer.subscribe(
             topics=['%s.%s' % (config['temporary_prefix'], object_type)
                     for object_type in config['object_types']],
         )
 
     def _prepare_storage(self, config):
         """Prepare the storage instance needed for the publisher to be able to
            discuss with the storage to retrieve the objects.
 
         """
         self.storage = get_storage(**config['storage'])
 
     def poll(self, max_messages=None):
         """Process a batch of messages from the consumer's topics. Use the
            storage to reify those ids. Produces back those reified
            objects to the production topics.
 
            This method polls a given amount of message then stops.
            The number of messages to consume is either provided or
            configured as fallback.
 
            The following method is expected to be called from within a
            loop.
 
         """
         messages = defaultdict(list)
         if max_messages is None:
             max_messages = self.max_messages
 
         for num, message in enumerate(self.consumer):
             object_type = message.topic.split('.')[-1]
-            logging.debug('num: %s, object_type: %s, message: %s' % (
+            logger.debug('num: %s, object_type: %s, message: %s' % (
                 num, object_type, message))
             messages[object_type].append(message.value)
             if num + 1 >= self.max_messages:
                 break
 
+        logger.debug('number of messages: %s', num)
+
         new_objects = self.process_objects(messages)
         self.produce_messages(new_objects)
         self.consumer.commit()
 
     def process_objects(self, messages):
         """Given a dict of messages {object type: [object id]}, reify those
            ids to swh object from the storage and returns a
            corresponding dict.
 
         Args:
             messages (dict): Dict of {object_type: [id-as-bytes]}
 
         Returns:
             Dict of {object_type: [tuple]}.
 
                 object_type (str): content, revision, release
                 tuple (bytes, dict): object id as bytes, object as swh dict.
 
         """
         processors = {
             'content': self.process_contents,
             'revision': self.process_revisions,
             'release': self.process_releases,
             'snapshot': self.process_snapshots,
             'origin': self.process_origins,
             'origin_visit': self.process_origin_visits,
         }
 
         return {
             key: processors[key](value)
             for key, value in messages.items()
         }
 
     def produce_messages(self, messages):
         """Produce new swh object to the producer topic.
 
         Args:
             messages ([dict]): Dict of {object_type: [tuple]}.
 
                 object_type (str): content, revision, release
                 tuple (bytes, dict): object id as bytes, object as swh dict.
 
         """
         for object_type, objects in messages.items():
             topic = '%s.%s' % (self.config['final_prefix'], object_type)
             for key, object in objects:
-                logging.debug('topic: %s, key: %s, value: %s' % (
+                logger.debug('topic: %s, key: %s, value: %s' % (
                     topic, key, object))
                 self.producer.send(topic, key=key, value=object)
 
         self.producer.flush()
 
     def process_contents(self, content_objs):
-        logging.debug('contents: %s' % content_objs)
+        logger.debug('contents: %s' % content_objs)
         metadata = self.storage.content_get_metadata(
             (c[b'sha1'] for c in content_objs))
         return [(content['sha1'], content) for content in metadata]
 
     def process_revisions(self, revision_objs):
-        logging.debug('revisions: %s' % revision_objs)
+        logger.debug('revisions: %s' % revision_objs)
         metadata = self.storage.revision_get((r[b'id'] for r in revision_objs))
         return [(revision['id'], revision)
                 for revision in metadata if revision]
 
     def process_releases(self, release_objs):
-        logging.debug('releases: %s' % release_objs)
+        logger.debug('releases: %s' % release_objs)
         metadata = self.storage.release_get((r[b'id'] for r in release_objs))
         return [(release['id'], release) for release in metadata]
 
     def process_origins(self, origin_objs):
-        logging.debug('origins: %s' % origin_objs)
+        logger.debug('origins: %s' % origin_objs)
         r = []
         for o in origin_objs:
             origin = {'url': o[b'url'], 'type': o[b'type']}
             r.append((origin, origin))
         return r
 
     def process_origin_visits(self, origin_visits):
-        logging.debug('origin_visits: %s' % origin_visits)
+        logger.debug('origin_visits: %s' % origin_visits)
         metadata = []
         for ov in origin_visits:
             origin_visit = self.storage.origin_visit_get_by(
                 ov[b'origin'], ov[b'visit'])
             if origin_visit:
                 pk = ov[b'origin'], ov[b'visit']
                 origin_visit['date'] = str(origin_visit['date'])
                 metadata.append((pk, origin_visit))
         return metadata
 
     def process_snapshots(self, snapshot_objs):
-        logging.debug('snapshots: %s' % snapshot_objs)
+        logger.debug('snapshots: %s' % snapshot_objs)
         metadata = []
         for snap in snapshot_objs:
             full_obj = snapshot.snapshot_get_all_branches(
                 self.storage, snap[b'id'])
             metadata.append((full_obj['id'], full_obj))
 
         return metadata
 
 
 if __name__ == '__main__':
     print('Please use the "swh-journal publisher run" command')