diff --git a/swh/loader/pypi/tests/test_loader.py b/swh/loader/pypi/tests/test_loader.py index e9ef705..7d22f24 100644 --- a/swh/loader/pypi/tests/test_loader.py +++ b/swh/loader/pypi/tests/test_loader.py @@ -1,471 +1,508 @@ # Copyright (C) 2016-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import shutil import tempfile from nose.plugins.attrib import attr from nose.tools import istest from unittest import TestCase from swh.model import hashutil from swh.loader.pypi.client import PyPIProject from swh.loader.pypi.loader import PyPILoader from .common import PyPIClientWithCache, RESOURCES_PATH, LoaderNoStorage class TestPyPILoader(LoaderNoStorage, PyPILoader): """Real PyPILoader for test purposes (storage and pypi interactions inhibited) """ def __init__(self, project_name, json_filename=None): if not json_filename: # defaulting to using same name as project json_filename = '%s.json' % project_name project_metadata_file = '%s/%s' % (RESOURCES_PATH, json_filename) project_metadata_url = 'https://pypi.org/pypi/%s/json' % project_name with open(project_metadata_file) as f: data = json.load(f) temp_dir = tempfile.mkdtemp( dir='/tmp/', prefix='swh.loader.pypi.tests-') # Will use the pypi with cache client = PyPIClientWithCache( temp_directory=temp_dir, cache_dir=RESOURCES_PATH) super().__init__(client=client) self.project = PyPIProject( client=client, project=project_name, project_metadata_url=project_metadata_url, data=data) def prepare(self, project_name, origin_url, origin_metadata_url=None): self.project_name = project_name self.origin_url = origin_url self.origin_metadata_url = origin_metadata_url self.visit = 1 # first visit self._prepare_state() @attr('fs') class BaseLoaderITest(TestCase): """Loader Test Mixin to prepare the pypi to 'load' in a test context. In this setup, the loader uses the cache to load data so no network interaction (no storage, no pypi). """ def setUp(self, project_name='0805nexter', dummy_pypi_instance='https://dummy.org'): self.tmp_root_path = tempfile.mkdtemp() self._project = project_name self._origin_url = '%s/pypi/%s/' % (dummy_pypi_instance, project_name) self._project_metadata_url = '%s/pypi/%s/json' % ( dummy_pypi_instance, project_name) def tearDown(self): shutil.rmtree(self.tmp_root_path) def assertContentsOk(self, expected_contents): contents = self.loader.all_contents self.assertEquals(len(contents), len(expected_contents)) for content in contents: content_id = hashutil.hash_to_hex(content['sha1']) self.assertIn(content_id, expected_contents) def assertDirectoriesOk(self, expected_directories): directories = self.loader.all_directories self.assertEquals(len(directories), len(expected_directories)) for _dir in directories: _dir_id = hashutil.hash_to_hex(_dir['id']) self.assertIn(_dir_id, expected_directories) - def assertSnapshotOk(self, expected_snapshot, expected_revisions): + def assertSnapshotOk(self, expected_snapshot, expected_branches): snapshots = self.loader.all_snapshots self.assertEqual(len(snapshots), 1) snap = snapshots[0] snap_id = hashutil.hash_to_hex(snap['id']) self.assertEqual(snap_id, expected_snapshot) - branches = snap['branches'] - self.assertEqual(len(expected_revisions), len(branches)) - - for branch, target in branches.items(): - rev_id = hashutil.hash_to_hex(target['target']) - self.assertIn(rev_id, expected_revisions) - self.assertEqual('revision', target['target_type']) + branches = { + branch.decode('utf-8'): { + 'target': hashutil.hash_to_hex(t['target']), + 'target_type': t['target_type'], + } if t else t + for branch, t in snap['branches'].items() + } + self.assertEqual(expected_branches, branches) def assertRevisionsOk(self, expected_revisions): # noqa: N802 """Check the loader's revisions match the expected revisions. Expects self.loader to be instantiated and ready to be inspected (meaning the loading took place). Args: expected_revisions (dict): Dict with key revision id, value the targeted directory id. """ # The last revision being the one used later to start back from for rev in self.loader.all_revisions: rev_id = hashutil.hash_to_hex(rev['id']) directory_id = hashutil.hash_to_hex(rev['directory']) self.assertEquals(expected_revisions[rev_id], directory_id) # Define loaders with no storage # They'll just accumulate the data in place # Only for testing purposes. class PyPILoaderNoSnapshot(TestPyPILoader): """Same as TestPyPILoader with no prior snapshot seen """ def _last_snapshot(self): return None class LoaderITest(BaseLoaderITest): def setUp(self, project_name='0805nexter', dummy_pypi_instance='https://dummy.org'): super().setUp(project_name, dummy_pypi_instance) self.loader = PyPILoaderNoSnapshot(project_name=project_name) @istest def load(self): """Load a pypi origin """ # when self.loader.load( self._project, self._origin_url, self._project_metadata_url) # then self.assertEquals(len(self.loader.all_contents), 6, '3 contents per release artifact files (2)') self.assertEquals(len(self.loader.all_directories), 4) self.assertEquals(len(self.loader.all_revisions), 2, '2 releases so 2 revisions should be created') self.assertEquals(len(self.loader.all_releases), 0, 'No release is created in the pypi loader') self.assertEquals(len(self.loader.all_snapshots), 1, 'Only 1 snapshot targetting all revisions') expected_contents = [ 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', '938c33483285fd8ad57f15497f538320df82aeb8', 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', '405859113963cb7a797642b45f171d6360425d16', 'e5686aa568fdb1d19d7f1329267082fe40482d31', '83ecf6ec1114fd260ca7a833a2d165e71258c338', ] self.assertContentsOk(expected_contents) expected_directories = [ '05219ba38bc542d4345d5638af1ed56c7d43ca7d', 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', 'b178b66bd22383d5f16f4f5c923d39ca798861b4', 'c3a58f8b57433a4b56caaa5033ae2e0931405338', ] self.assertDirectoriesOk(expected_directories) # {revision hash: directory hash} expected_revisions = { '4c99891f93b81450385777235a37b5e966dd1571': '05219ba38bc542d4345d5638af1ed56c7d43ca7d', # noqa 'e445da4da22b31bfebb6ffc4383dbf839a074d21': 'b178b66bd22383d5f16f4f5c923d39ca798861b4', # noqa } self.assertRevisionsOk(expected_revisions) + expected_branches = { + '0805nexter-1.1.0.zip': { + 'target': '4c99891f93b81450385777235a37b5e966dd1571', + 'target_type': 'revision', + }, + '0805nexter-1.2.0.zip': { + 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', + 'target_type': 'revision', + }, + } + self.assertSnapshotOk('f456b03e8bf1920d64b00df234b1efedc25b6c93', - expected_revisions) + expected_branches) class PyPILoaderWithSnapshot(TestPyPILoader): """This loader provides a snapshot and lists corresponding seen release artifacts. """ def _last_snapshot(self): """Return last visited snapshot""" return { 'id': b'\xf4V\xb0>\x8b\xf1\x92\rd\xb0\r\xf24\xb1\xef\xed\xc2[l\x93', # noqa 'branches': { b'0805nexter-1.1.0.zip': { 'target': b'L\x99\x89\x1f\x93\xb8\x14P' b'8Ww#Z7\xb5\xe9f\xdd\x15q', 'target_type': 'revision' }, b'0805nexter-1.2.0.zip': { 'target': b'\xe4E\xdaM\xa2+1\xbf' b'\xeb\xb6\xff\xc48=\xbf\x83' b'\x9a\x07M!', 'target_type': 'revision' }, }, } def _known_artifacts(self, last_snapshot): """List corresponding seen release artifacts""" return { ( '0805nexter-1.1.0.zip', '52cd128ad3afe539478abc7440d4b043384295fbe6b0958a237cb6d926465035' # noqa ): b'L\x99\x89\x1f\x93\xb8\x14P8Ww#Z7\xb5\xe9f\xdd\x15q', ( '0805nexter-1.2.0.zip', '49785c6ae39ea511b3c253d7621c0b1b6228be2f965aca8a491e6b84126d0709' # noqa ): b'\xe4E\xdaM\xa2+1\xbf\xeb\xb6\xff\xc48=\xbf\x83\x9a\x07M!', } class LoaderNoNewChangesSinceLastVisitITest(BaseLoaderITest): """This scenario makes use of the incremental nature of the loader. If nothing changes in between visits, the snapshot for the visit must stay the same as the first visit. """ def setUp(self, project_name='0805nexter', dummy_pypi_instance='https://dummy.org'): super().setUp(project_name, dummy_pypi_instance) self.loader = PyPILoaderWithSnapshot(project_name=project_name) @istest def load(self): """Load a PyPI origin without new changes results in 1 same snapshot """ # when self.loader.load( self._project, self._origin_url, self._project_metadata_url) # then self.assertEquals(len(self.loader.all_contents), 0) self.assertEquals(len(self.loader.all_directories), 0) self.assertEquals(len(self.loader.all_revisions), 0) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_snapshots), 1) self.assertContentsOk([]) self.assertDirectoriesOk([]) self.assertRevisionsOk(expected_revisions={}) - expected_revisions = { - '4c99891f93b81450385777235a37b5e966dd1571': '05219ba38bc542d4345d5638af1ed56c7d43ca7d', # noqa - 'e445da4da22b31bfebb6ffc4383dbf839a074d21': 'b178b66bd22383d5f16f4f5c923d39ca798861b4', # noqa - } expected_snapshot_id = 'f456b03e8bf1920d64b00df234b1efedc25b6c93' - self.assertSnapshotOk('f456b03e8bf1920d64b00df234b1efedc25b6c93', - expected_revisions) + expected_branches = { + '0805nexter-1.1.0.zip': { + 'target': '4c99891f93b81450385777235a37b5e966dd1571', + 'target_type': 'revision', + }, + '0805nexter-1.2.0.zip': { + 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', + 'target_type': 'revision', + }, + } + self.assertSnapshotOk(expected_snapshot_id, expected_branches) _id = hashutil.hash_to_hex(self.loader._last_snapshot()['id']) self.assertEquals(expected_snapshot_id, _id) class LoaderNewChangesSinceLastVisitITest(BaseLoaderITest): """In this scenario, a visit has already taken place. An existing snapshot exists. This time, the PyPI project has changed, a new release (with 1 new release artifact) has been uploaded. The old releases did not change. The visit results in a new snapshot. The new snapshot shares the same history as prior visit's snapshot. It holds a new branch targetting the new revision. """ def setUp(self, project_name='0805nexter', dummy_pypi_instance='https://dummy.org'): super().setUp(project_name, dummy_pypi_instance) self.loader = PyPILoaderWithSnapshot( project_name=project_name, json_filename='0805nexter+new-made-up-release.json') @istest def load(self): """Load a PyPI origin with changes results in 1 new snapshot """ # when self.loader.load( self._project, self._origin_url, self._project_metadata_url) # then self.assertEquals( len(self.loader.all_contents), 4, "3 + 1 new content (only change between 1.2.0 and 1.3.0 archives)") self.assertEquals(len(self.loader.all_directories), 2) self.assertEquals( len(self.loader.all_revisions), 1, "This results in 1 new revision targetting that new directory id") self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_snapshots), 1) expected_contents = [ '92689fa2b7fb4d4fc6fb195bf73a50c87c030639', # new one '405859113963cb7a797642b45f171d6360425d16', '83ecf6ec1114fd260ca7a833a2d165e71258c338', 'e5686aa568fdb1d19d7f1329267082fe40482d31', ] self.assertContentsOk(expected_contents) expected_directories = [ 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', '52604d46843b898f5a43208045d09fcf8731631b', ] self.assertDirectoriesOk(expected_directories) expected_revisions = { 'fb46e49605b0bbe69f8c53d315e89370e7c6cb5d': 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', # noqa } self.assertRevisionsOk(expected_revisions) old_revisions = { '4c99891f93b81450385777235a37b5e966dd1571': '05219ba38bc542d4345d5638af1ed56c7d43ca7d', # noqa 'e445da4da22b31bfebb6ffc4383dbf839a074d21': 'b178b66bd22383d5f16f4f5c923d39ca798861b4', # noqa } for rev, dir_id in old_revisions.items(): expected_revisions[rev] = dir_id expected_snapshot_id = 'e5beda90e9ddbc8672734ed172246b06fcbc6827' - self.assertSnapshotOk(expected_snapshot_id, expected_revisions) + expected_branches = { + '0805nexter-1.1.0.zip': { + 'target': '4c99891f93b81450385777235a37b5e966dd1571', + 'target_type': 'revision', + }, + '0805nexter-1.2.0.zip': { + 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', + 'target_type': 'revision', + }, + '0805nexter-1.3.0.zip': { + 'target': 'fb46e49605b0bbe69f8c53d315e89370e7c6cb5d', + 'target_type': 'revision', + }, + } + + self.assertSnapshotOk(expected_snapshot_id, expected_branches) _id = hashutil.hash_to_hex(self.loader._last_snapshot()['id']) self.assertNotEqual(expected_snapshot_id, _id) class PyPILoaderWithSnapshot2(TestPyPILoader): """This loader provides a snapshot and lists corresponding seen release artifacts. """ def _last_snapshot(self): """Return last visited snapshot""" return { 'id': b"\xe5\xbe\xda\x90\xe9\xdd\xbc\x86rsN\xd1r$k\x06\xfc\xbch'", 'branches': { b'0805nexter-1.1.0.zip': { 'target': b'L\x99\x89\x1f\x93\xb8\x14P8Ww#Z7\xb5\xe9f\xdd\x15q', # noqa 'target_type': 'revision' }, b'0805nexter-1.2.0.zip': { 'target': b'\xe4E\xdaM\xa2+1\xbf\xeb\xb6\xff\xc48=\xbf\x83\x9a\x07M!', # noqa 'target_type': 'revision' }, b'0805nexter-1.3.0.zip': { 'target': b'\xfbF\xe4\x96\x05\xb0\xbb\xe6\x9f\x8cS\xd3\x15\xe8\x93p\xe7\xc6\xcb]', # noqa 'target_type': 'revision' } } } def _known_artifacts(self, last_snapshot): """Map previously seen release artifacts to their revision""" return { ( '0805nexter-1.1.0.zip', '52cd128ad3afe539478abc7440d4b043384295fbe6b0958a237cb6d926465035' # noqa ): b'L\x99\x89\x1f\x93\xb8\x14P8Ww#Z7\xb5\xe9f\xdd\x15q', ( '0805nexter-1.2.0.zip', '49785c6ae39ea511b3c253d7621c0b1b6228be2f965aca8a491e6b84126d0709' # noqa ): b'\xe4E\xdaM\xa2+1\xbf\xeb\xb6\xff\xc48=\xbf\x83\x9a\x07M!', ( '0805nexter-1.3.0.zip', '7097c49fb8ec24a7aaab54c3dbfbb5a6ca1431419d9ee0f6c363d9ad01d2b8b1' # noqa ): b'\xfbF\xe4\x96\x05\xb0\xbb\xe6\x9f\x8cS\xd3\x15\xe8\x93p\xe7\xc6\xcb]', # noqa } class LoaderChangesOldReleaseArtifactRemovedSinceLastVisit(BaseLoaderITest): """In this scenario, a visit has already taken place. An existing snapshot exists. The PyPI project has changed: - a new release has been uploaded - an older one has been removed The visit should result in a new snapshot. Such snapshot shares some of the same branches as prior visit (but not all): - new release artifact branch exists - old release artifact branch has been removed - the other unchanged release artifact branches are left unchanged """ def setUp(self, project_name='0805nexter', dummy_pypi_instance='https://dummy.org'): super().setUp(project_name, dummy_pypi_instance) self.loader = PyPILoaderWithSnapshot2( project_name=project_name, json_filename='0805nexter-unpublished-release.json') @istest def load(self): """Load PyPI origin with removed artifact + changes ~> 1 new snapshot """ # when self.loader.load( self._project, self._origin_url, self._project_metadata_url) # then self.assertEquals( len(self.loader.all_contents), 4, "3 + 1 new content (only change between 1.3.0 and 1.4.0 archives)") self.assertEquals(len(self.loader.all_directories), 2) self.assertEquals( len(self.loader.all_revisions), 1, "This results in 1 new revision targetting that new directory id") self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_snapshots), 1) expected_contents = [ 'e2d68a197e3a3ad0fc6de28749077892c2148043', # new one '405859113963cb7a797642b45f171d6360425d16', '83ecf6ec1114fd260ca7a833a2d165e71258c338', 'e5686aa568fdb1d19d7f1329267082fe40482d31', ] self.assertContentsOk(expected_contents) expected_directories = [ 'a2b7621f3e52eb3632657f6e3436bd08202db56f', # new one '770e21215ecac53cea331d8ea4dc0ffc9d979367', ] self.assertDirectoriesOk(expected_directories) expected_revisions = { # 1.4.0 '5e91875f096ac48c98d74acf307439a3490f2827': '770e21215ecac53cea331d8ea4dc0ffc9d979367', # noqa } self.assertRevisionsOk(expected_revisions) - old_revisions = { - # 1.2.0 - 'e445da4da22b31bfebb6ffc4383dbf839a074d21': 'b178b66bd22383d5f16f4f5c923d39ca798861b4', # noqa - # 1.3.0 - 'fb46e49605b0bbe69f8c53d315e89370e7c6cb5d': 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', # noqa - } - for rev, dir_id in old_revisions.items(): - expected_revisions[rev] = dir_id - expected_snapshot_id = 'fb192f35397812776377fa758e0ba4cf20a4cf5d' - self.assertSnapshotOk(expected_snapshot_id, expected_revisions) + expected_branches = { + '0805nexter-1.2.0.zip': { + 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', + 'target_type': 'revision', + }, + '0805nexter-1.3.0.zip': { + 'target': 'fb46e49605b0bbe69f8c53d315e89370e7c6cb5d', + 'target_type': 'revision', + }, + '0805nexter-1.4.0.zip': { + 'target': '5e91875f096ac48c98d74acf307439a3490f2827', + 'target_type': 'revision', + }, + } + self.assertSnapshotOk(expected_snapshot_id, expected_branches) _id = hashutil.hash_to_hex(self.loader._last_snapshot()['id']) self.assertNotEqual(expected_snapshot_id, _id)