diff --git a/swh/journal/cli.py b/swh/journal/cli.py
--- a/swh/journal/cli.py
+++ b/swh/journal/cli.py
@@ -191,7 +191,7 @@
         if map_.size() % SHA1_SIZE != 0:
             ctx.fail('--exclude-sha1 must link to a file whose size is an '
                      'exact multiple of %d bytes.' % SHA1_SIZE)
-        nb_excluded_hashes = int(map_.size()/SHA1_SIZE)
+        nb_excluded_hashes = int(map_.size() / SHA1_SIZE)
 
         def exclude_fn(obj):
             return is_hash_in_bytearray(obj['sha1'], map_, nb_excluded_hashes)
diff --git a/swh/journal/client.py b/swh/journal/client.py
--- a/swh/journal/client.py
+++ b/swh/journal/client.py
@@ -225,7 +225,7 @@
 
                 # clamp batch size to avoid overrunning stop_after_objects
                 batch_size = min(
-                    self.stop_after_objects-total_objects_processed,
+                    self.stop_after_objects - total_objects_processed,
                     batch_size,
                 )
 
diff --git a/swh/journal/replay.py b/swh/journal/replay.py
--- a/swh/journal/replay.py
+++ b/swh/journal/replay.py
@@ -174,13 +174,13 @@
         raise ValueError('hash_ does not match the provided hash_size.')
 
     def get_hash(position):
-        return array[position*hash_size:(position+1)*hash_size]
+        return array[position * hash_size:(position + 1) * hash_size]
 
     # Regular dichotomy:
     left = 0
     right = nb_hashes
-    while left < right-1:
-        middle = int((right+left)/2)
+    while left < right - 1:
+        middle = int((right + left) / 2)
         pivot = get_hash(middle)
         if pivot == hash_:
             return True
@@ -384,8 +384,8 @@
         'processed %s content objects in %.1fsec '
         '(%.1f obj/sec, %.1fMB/sec) - %d failed - %d skipped',
         len(vol), dt,
-        len(vol)/dt,
-        sum(vol)/1024/1024/dt,
+        len(vol) / dt,
+        sum(vol) / 1024 / 1024 / dt,
         nb_failures,
         nb_skipped)
 
diff --git a/swh/journal/tests/conftest.py b/swh/journal/tests/conftest.py
--- a/swh/journal/tests/conftest.py
+++ b/swh/journal/tests/conftest.py
@@ -91,10 +91,10 @@
         'message': b'hello',
         'date': DATES[0],
         'committer': COMMITTERS[0],
-        'author':  COMMITTERS[0],
+        'author': COMMITTERS[0],
         'committer_date': DATES[0],
         'type': 'git',
-        'directory': b'\x01'*20,
+        'directory': b'\x01' * 20,
         'synthetic': False,
         'metadata': None,
         'parents': [],
@@ -104,10 +104,10 @@
         'message': b'hello again',
         'date': DATES[1],
         'committer': COMMITTERS[1],
-        'author':  COMMITTERS[1],
+        'author': COMMITTERS[1],
         'committer_date': DATES[1],
         'type': 'hg',
-        'directory': b'\x02'*20,
+        'directory': b'\x02' * 20,
         'synthetic': False,
         'metadata': None,
         'parents': [],
@@ -128,7 +128,7 @@
         },
         'author': COMMITTERS[0],
         'target_type': 'revision',
-        'target': b'\x04'*20,
+        'target': b'\x04' * 20,
         'message': b'foo',
         'synthetic': False,
     },
diff --git a/swh/journal/tests/test_cli.py b/swh/journal/tests/test_cli.py
--- a/swh/journal/tests/test_cli.py
+++ b/swh/journal/tests/test_cli.py
@@ -99,11 +99,11 @@
     snapshot = {'id': b'foo', 'branches': {
         b'HEAD': {
             'target_type': 'revision',
-            'target': b'\x01'*20,
+            'target': b'\x01' * 20,
         }
     }}  # type: Dict[str, Any]
     producer.produce(
-        topic=kafka_prefix+'.snapshot',
+        topic=kafka_prefix + '.snapshot',
         key=key_to_kafka(snapshot['id']),
         value=value_to_kafka(snapshot),
     )
@@ -160,11 +160,11 @@
 
     contents = {}
     for i in range(NUM_CONTENTS):
-        content = b'\x00'*19 + bytes([i])
+        content = b'\x00' * 19 + bytes([i])
         sha1 = objstorages['src'].add(content)
         contents[sha1] = content
         producer.produce(
-            topic=kafka_prefix+'.content',
+            topic=kafka_prefix + '.content',
             key=key_to_kafka(sha1),
             value=key_to_kafka({
                 'sha1': sha1,
diff --git a/swh/journal/tests/test_replay.py b/swh/journal/tests/test_replay.py
--- a/swh/journal/tests/test_replay.py
+++ b/swh/journal/tests/test_replay.py
@@ -109,7 +109,7 @@
             [rel['id'] for rel in OBJECT_TYPE_KEYS['release'][1]]))
 
     origins = list(storage.origin_get(
-            [orig for orig in OBJECT_TYPE_KEYS['origin'][1]]))
+        [orig for orig in OBJECT_TYPE_KEYS['origin'][1]]))
     assert OBJECT_TYPE_KEYS['origin'][1] == \
         [{'url': orig['url']} for orig in origins]
     for origin in origins:
@@ -131,7 +131,7 @@
 
     input_contents = OBJECT_TYPE_KEYS['content'][1]
     contents = storage.content_get_metadata(
-            [cont['sha1'] for cont in input_contents])
+        [cont['sha1'] for cont in input_contents])
     assert len(contents) == len(input_contents)
     assert contents == {cont['sha1']: [cont] for cont in input_contents}
 
@@ -225,7 +225,7 @@
             [rel['id'] for rel in OBJECT_TYPE_KEYS['release'][1]]))
 
     origins = list(storage.origin_get(
-            [orig for orig in OBJECT_TYPE_KEYS['origin'][1]]))
+        [orig for orig in OBJECT_TYPE_KEYS['origin'][1]]))
     assert OBJECT_TYPE_KEYS['origin'][1] == \
         [{'url': orig['url']} for orig in origins]
     for origin in origins:
@@ -247,7 +247,7 @@
 
     input_contents = OBJECT_TYPE_KEYS['content'][1]
     contents = storage.content_get_metadata(
-            [cont['sha1'] for cont in input_contents])
+        [cont['sha1'] for cont in input_contents])
     assert len(contents) == len(input_contents)
     assert contents == {cont['sha1']: [cont] for cont in input_contents}