diff --git a/swh/storage/tests/__init__.py b/swh/storage/tests/__init__.py --- a/swh/storage/tests/__init__.py +++ b/swh/storage/tests/__init__.py @@ -1,5 +1,55 @@ +# Copyright (C) 2015-2018 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + from os import path import swh.storage +from hypothesis.strategies import (binary, composite, lists) + +from swh.model.hashutil import MultiHash + SQL_DIR = path.join(path.dirname(swh.storage.__file__), 'sql') + + +def gen_raw_content(): + """Generate raw content binary. + + """ + return binary(min_size=20, max_size=100) + + +@composite +def gen_contents(draw, *, min_size=0, max_size=100): + """Generate valid and consistent content. + + Context: Test purposes + + Args: + **draw**: Used by hypothesis to generate data + **min_size** (int): Minimal number of elements to generate + (default: 0) + **max_size** (int): Maximal number of elements to generate + (default: 100) + + Returns: + List of swh.model contents whose size is between [min_size:max_size]. + + """ + raw_contents = draw(lists( + gen_raw_content(), min_size=min_size, max_size=max_size)) + + contents = [] + for raw_content in raw_contents: + content = { + 'data': raw_content, + 'length': len(raw_content), + 'status': 'visible', + } + hashes = MultiHash.from_data(raw_content).digest() + content.update(hashes) + contents.append(content) + + return contents diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py --- a/swh/storage/tests/test_storage.py +++ b/swh/storage/tests/test_storage.py @@ -1,4 +1,4 @@ -# Copyright (C) 2015-2017 The Software Heritage developers +# Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information @@ -13,10 +13,14 @@ import psycopg2 import pytest +from hypothesis import given + from swh.model import from_disk, identifiers from swh.model.hashutil import hash_to_bytes from swh.storage.tests.storage_testing import StorageTestFixture +from . import gen_contents + @pytest.mark.db class BaseTestStorage(StorageTestFixture): @@ -1842,6 +1846,55 @@ self.assertIsNotNone(o_m1) +@pytest.mark.property_based +class PropBasedTestStorage(BaseTestStorage, unittest.TestCase): + def assert_contents_ok(self, expected_contents, actual_contents, + keys_to_check={'sha1', 'data'}): + """Assert that a given list of contents matches on a given set of keys. + + """ + for k in keys_to_check: + expected_list = [c[k] for c in expected_contents] + expected_list.sort() + actual_list = [c[k] for c in actual_contents] + actual_list.sort() + + self.assertEqual(actual_list, expected_list) + + @given(gen_contents(min_size=1, max_size=4)) + def test_generate_content_get(self, contents): + # add contents to storage + self.storage.content_add(contents) + + # input the list of sha1s we want from storage + get_sha1s = [c['sha1'] for c in contents] + + # retrieve contents + actual_contents = list(self.storage.content_get(get_sha1s)) + + self.assert_contents_ok(contents, actual_contents) + + @given(gen_contents(min_size=1, max_size=4)) + def test_generate_content_get_metadata(self, contents): + # add contents to storage + self.storage.content_add(contents) + + # input the list of sha1s we want from storage + get_sha1s = [c['sha1'] for c in contents] + + # retrieve contents + actual_contents = list(self.storage.content_get_metadata(get_sha1s)) + + self.assertEquals(len(actual_contents), len(contents)) + + # will check that all contents are retrieved correctly + one_content = contents[0] + # content_get_metadata does not return data + keys_to_check = set(one_content.keys()) - {'data'} + self.assert_contents_ok(contents, actual_contents, + keys_to_check=keys_to_check) + + class TestLocalStorage(CommonTestStorage, unittest.TestCase): """Test the local storage"""