diff --git a/swh/loader/svn/loader.py b/swh/loader/svn/loader.py index fcae4bd..6214e1e 100644 --- a/swh/loader/svn/loader.py +++ b/swh/loader/svn/loader.py @@ -1,427 +1,454 @@ # Copyright (C) 2015-2016 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Loader in charge of injecting either new or existing svn mirrors to swh-storage. """ import datetime from swh.core import utils from swh.model import git, hashutil from swh.model.git import GitType from swh.loader.core.loader import SWHLoader from . import svn, converters class SvnLoaderException(ValueError): """A wrapper exception to transit the swh_revision onto which the loading failed. """ def __init__(self, e, swh_revision): super().__init__(e) self.swh_revision = swh_revision class BaseSvnLoader(SWHLoader): """Base Svn loader to load one svn repository. There exists 2 different policies: - git-svn one (not for production): cf. GitSvnSvnLoader - SWH one: cf. SWHSvnLoader The main entry point of this is (no need to override it) - def load(self): Inherit this class and then override the following functions: - def build_swh_revision(self, rev, commit, dir_id, parents) This is in charge of converting an svn revision to a compliant swh revision - def process_repository(self) This is in charge of processing the actual svn repository and store the result to swh storage. """ CONFIG_BASE_FILENAME = 'loader/svn.ini' def __init__(self, svn_url, destination_path, origin): super().__init__(origin['id'], logging_class='swh.loader.svn.SvnLoader') self.origin = origin def build_swh_revision(self, rev, commit, dir_id, parents): """Convert an svn revision to an swh one according to the loader's policy (git-svn or swh). Args: rev: the svn revision number commit: dictionary with keys 'author_name', 'author_date', 'rev', 'message' dir_id: the hash tree computation parents: the revision's parents Returns: The swh revision """ raise NotImplementedError('This should be overriden by subclass') def process_repository(self): """The main idea of this function is to: - iterate over the svn commit logs - extract the svn commit log metadata - compute the hashes from the current directory down to the file - compute the equivalent swh revision - send all those objects for storage - create an swh occurrence pointing to the last swh revision seen - send that occurrence for storage in swh-storage. """ raise NotImplementedError('This should be implemented in subclass.') def process_svn_revisions(self, svnrepo, revision_start, revision_end, revision_parents): """Process revisions from revision_start to revision_end and send to swh for storage. At each svn revision, checkout the repository, compute the tree hash and blobs and send for swh storage to store. Then computes and yields the swh revision. Yields: swh revision """ gen_revs = svnrepo.swh_hash_data_per_revision( revision_start, revision_end) try: swh_revision = {} for rev, nextrev, commit, objects_per_path in gen_revs: # Send the associated contents/directories self.maybe_load_contents( git.objects_per_type(GitType.BLOB, objects_per_path)) self.maybe_load_directories( git.objects_per_type(GitType.TREE, objects_per_path)) # compute the fs tree's checksums dir_id = objects_per_path[b'']['checksums']['sha1_git'] swh_revision = self.build_swh_revision( rev, commit, dir_id, revision_parents[rev]) swh_revision['id'] = git.compute_revision_sha1_git( swh_revision) self.log.debug('rev: %s, swhrev: %s, dir: %s' % ( rev, hashutil.hash_to_hex(swh_revision['id']), hashutil.hash_to_hex(dir_id))) if nextrev: revision_parents[nextrev] = [swh_revision['id']] yield swh_revision except Exception as e: # Wrap the exception with the needed revision raise SvnLoaderException(e, swh_revision={ 'id': swh_revision['id'], 'parents': swh_revision['parents'], 'metadata': swh_revision.get('metadata') }) def process_swh_revisions(self, svnrepo, revision_start, revision_end, revision_parents): """Process and store revision to swh (sent by by blocks of 'revision_packet_size') Returns: The latest revision stored. """ for revisions in utils.grouper( self.process_svn_revisions(svnrepo, revision_start, revision_end, revision_parents), self.config['revision_packet_size']): revs = list(revisions) self.log.info('Processed %s revisions: [%s, ...]' % ( len(revs), hashutil.hash_to_hex(revs[0]['id']))) self.maybe_load_revisions(revs) return revs[-1] def process_swh_occurrence(self, revision, origin): """Process and load the occurrence pointing to the latest revision. """ occ = converters.build_swh_occurrence(revision['id'], origin['id'], datetime.datetime.utcnow()) self.log.debug('occ: %s' % occ) self.maybe_load_occurrences([occ]) def load(self, known_state=None): """Load a svn repository in swh. Checkout the svn repository locally in destination_path. Args: - svn_url: svn repository url to import - origin: Dictionary origin - id: origin's id - url: url origin we fetched - type: type of the origin Returns: Dictionary with the following keys: - status: mandatory, the status result as a boolean - stderr: optional when status is True, mandatory otherwise """ try: self.process_repository(known_state) finally: # flush eventual remaining data self.flush() self.svnrepo.clean_fs() return {'status': True} class GitSvnSvnLoader(BaseSvnLoader): """Git-svn like loader (compute hashes a-la git-svn) Notes: This implementation is: - NOT for production - NOT able to deal with update. Default policy: Its default policy is to enrich (or even alter) information at each svn revision. It will: - truncate the timestamp of the svn commit date - alter the user to be an email using the repository's uuid as mailserver (user -> user@) - fills in the gap for empty author with '(no author)' name - remove empty folder (thus not counting them during hash computation) The equivalent git command is: `git svn clone -q --no-metadata` """ def __init__(self, svn_url, destination_path, origin): super().__init__(svn_url, destination_path, origin) # We don't want to persist result in git-svn policy self.config['send_contents'] = False self.config['send_directories'] = False self.config['send_revisions'] = False self.config['send_releases'] = False self.config['send_occurrences'] = False self.svnrepo = svn.GitSvnSvnRepo( svn_url, origin['id'], self.storage, destination_path=destination_path) def build_swh_revision(self, rev, commit, dir_id, parents): """Build the swh revision a-la git-svn. Args: rev: the svn revision commit: the commit metadata dir_id: the upper tree's hash identifier parents: the parents' identifiers Returns: The swh revision corresponding to the svn revision without any extra headers. """ return converters.build_gitsvn_swh_revision(rev, commit, dir_id, parents) def process_repository(self, known_state=None): """Load the repository's svn commits and process them as swh hashes. This does not: - deal with update - nor with the potential known state. """ origin = self.origin svnrepo = self.svnrepo # default configuration revision_start = 1 revision_parents = { revision_start: [] } revision_end = svnrepo.head_revision() self.log.info('[revision_start-revision_end]: [%s-%s]' % ( revision_start, revision_end)) if revision_start == revision_end and revision_start is not 1: self.log.info('%s@%s already injected.' % ( svnrepo.remote_url, revision_end)) return {'status': True} self.log.info('Processing %s.' % svnrepo) # process and store revision to swh (sent by by blocks of # 'revision_packet_size') latest_rev = self.process_swh_revisions(svnrepo, revision_start, revision_end, revision_parents) self.process_swh_occurrence(latest_rev, origin) class SWHSvnLoader(BaseSvnLoader): """Swh svn loader is the main implementation destined for production. This implementation is able to deal with update on known svn repository. Default policy: It's to not add any information and be as close as possible from the svn data the server sent its way. The only thing that are added are the swh's revision 'extra_header' to be able to deal with update. """ def __init__(self, svn_url, destination_path, origin): super().__init__(svn_url, destination_path, origin) self.svnrepo = svn.SWHSvnRepo( svn_url, origin['id'], self.storage, destination_path=destination_path) def swh_previous_revision(self): """Retrieve swh's previous revision if any. """ return self.svnrepo.swh_previous_revision() def check_history_not_altered(self, svnrepo, revision_start, swh_rev): """Given a svn repository, check if the history was not tampered with. """ revision_id = swh_rev['id'] parents = swh_rev['parents'] hash_data_per_revs = svnrepo.swh_hash_data_at_revision(revision_start) rev = revision_start rev, _, commit, objects_per_path = list(hash_data_per_revs)[0] dir_id = objects_per_path[b'']['checksums']['sha1_git'] swh_revision = self.build_swh_revision(rev, commit, dir_id, parents) swh_revision_id = git.compute_revision_sha1_git(swh_revision) return swh_revision_id == revision_id def build_swh_revision(self, rev, commit, dir_id, parents): """Build the swh revision dictionary. This adds: - the 'synthetic' flag to true - the 'extra_headers' containing the repository's uuid and the svn revision number. Args: rev: the svn revision commit: the commit metadata dir_id: the upper tree's hash identifier parents: the parents' identifiers Returns: The swh revision corresponding to the svn revision. """ return converters.build_swh_revision(rev, commit, self.svnrepo.uuid, dir_id, parents) + def init_from(self, partial_swh_revision, previous_swh_revision): + """Function to determine from where to start from. + + Args: + - partial_swh_revision: A known revision from which + the previous loading did not finish. + - known_previous_revision: A known revision from which the + previous loading did finish. + + Returns: + The revision from which to start or None if nothing (fresh + start). + + """ + if partial_swh_revision and not previous_swh_revision: + return partial_swh_revision + if not partial_swh_revision and previous_swh_revision: + return previous_swh_revision + if partial_swh_revision and previous_swh_revision: + # will determine from which to start from + extra_headers1 = dict( + partial_swh_revision['metadata']['extra_headers']) + extra_headers2 = dict( + previous_swh_revision['metadata']['extra_headers']) + rev_start1 = int(extra_headers1['svn_revision']) + rev_start2 = int(extra_headers2['svn_revision']) + if rev_start1 <= rev_start2: + return previous_swh_revision + return partial_swh_revision + + return None + def process_repository(self, known_state=None): svnrepo = self.svnrepo origin = self.origin # default configuration revision_start = 1 revision_parents = { revision_start: [] } - if known_state: # Deal with a potential known state (it's a revision) - # In some edge case, svn repository with lots of svn - # commits for example, the loader can break. Thus, a - # rescheduling can take place which will try and load - # again the same repository but from a known state. - swh_rev = known_state - else: - # Deal with update - swh_rev = self.swh_previous_revision() + # Check if we already know a previous revision for that origin + swh_rev = self.swh_previous_revision() + # Determine from which known revision to start + swh_rev = self.init_from(known_state, previous_swh_revision=swh_rev) if swh_rev: # Yes, we do. Try and update it. extra_headers = dict(swh_rev['metadata']['extra_headers']) revision_start = int(extra_headers['svn_revision']) revision_parents = { revision_start: swh_rev['parents'], } self.log.debug('svn export --ignore-keywords %s@%s' % ( svnrepo.remote_url, revision_start)) if swh_rev and not self.check_history_not_altered( svnrepo, revision_start, swh_rev): msg = 'History of svn %s@%s history modified. Skipping...' % ( # noqa svnrepo.remote_url, revision_start) self.log.warn(msg) return {'status': False, 'stderr': msg} else: # now we know history is ok, we start at next revision revision_start = revision_start + 1 # and the parent become the latest know revision for # that repository revision_parents[revision_start] = [swh_rev['id']] revision_end = svnrepo.head_revision() self.log.info('[revision_start-revision_end]: [%s-%s]' % ( revision_start, revision_end)) if revision_start >= revision_end and revision_start is not 1: self.log.info('%s@%s already injected.' % ( svnrepo.remote_url, revision_end)) return {'status': True} self.log.info('Processing %s.' % svnrepo) # process and store revision to swh (sent by by blocks of # 'revision_packet_size') latest_rev = self.process_swh_revisions(svnrepo, revision_start, revision_end, revision_parents) self.process_swh_occurrence(latest_rev, origin) diff --git a/swh/loader/svn/tests/test_loader.py b/swh/loader/svn/tests/test_loader.py index c1a4fe7..ec3a62c 100644 --- a/swh/loader/svn/tests/test_loader.py +++ b/swh/loader/svn/tests/test_loader.py @@ -1,381 +1,608 @@ # Copyright (C) 2016 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import shutil import subprocess import tempfile import unittest from nose.tools import istest from swh.core import hashutil from swh.loader.svn.loader import GitSvnSvnLoader, SWHSvnLoader # Define loaders with no storage # They'll just accumulate the data in place # Only for testing purposes. class TestSvnLoader: """Mixin class to inhibit the persistence and keep in memory the data sent for storage. cf. GitSvnLoaderNoStorage, SWHSvnLoaderNoStorage """ def __init__(self, svn_url, destination_path, origin): super().__init__(svn_url, destination_path, origin) # We don't want to persist any result in this test context self.config['send_contents'] = False self.config['send_directories'] = False self.config['send_revisions'] = False self.config['send_releases'] = False self.config['send_occurrences'] = False # Init the state self.all_contents = [] self.all_directories = [] self.all_revisions = [] self.all_releases = [] self.all_occurrences = [] def maybe_load_contents(self, all_contents): self.all_contents.extend(all_contents) def maybe_load_directories(self, all_directories): self.all_directories.extend(all_directories) def maybe_load_revisions(self, all_revisions): self.all_revisions.extend(all_revisions) def maybe_load_releases(self, releases): raise ValueError('If called, the test must break.') def maybe_load_occurrences(self, all_occurrences): self.all_occurrences.extend(all_occurrences) class GitSvnLoaderNoStorage(TestSvnLoader, GitSvnSvnLoader): """A GitSvnLoader with no persistence. Context: Load an svn repository using the git-svn policy. """ def __init__(self, svn_url, destination_path, origin): super().__init__(svn_url, destination_path, origin) class SWHSvnLoaderNoStorage(TestSvnLoader, SWHSvnLoader): """An SWHSVNLoader with no persistence. Context: Load a new svn repository using the swh policy (so no update). """ def swh_previous_revision(self): """We do not know this repository so no revision. """ return None class SWHSvnLoaderUpdateNoStorage(TestSvnLoader, SWHSvnLoader): """An SWHSVNLoader with no persistence. Context: Load a known svn repository using the swh policy. We can either: - do nothing since it does not contain any new commit (so no change) - either check its history is not altered and update in consequence by loading the new revision """ def swh_previous_revision(self): """Avoid the storage persistence call and return the expected previous revision for that repository. Check the following for explanation about the hashes: - test_loader.org for (swh policy). - cf. SWHSvnLoaderITTest """ return { 'id': hashutil.hex_to_hash( '4876cb10aec6f708f7466dddf547567b65f6c39c'), 'parents': [hashutil.hex_to_hash( 'a3a577948fdbda9d1061913b77a1588695eadb41')], 'directory': hashutil.hex_to_hash( '0deab3023ac59398ae467fc4bff5583008af1ee2'), 'target_type': 'revision', 'metadata': { 'extra_headers': [ ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], ['svn_revision', '6'] ] } } class SWHSvnLoaderUpdateHistoryAlteredNoStorage(TestSvnLoader, SWHSvnLoader): """An SWHSVNLoader with no persistence. Context: Load a known svn repository using the swh policy with its history altered so we do not update it. """ def swh_previous_revision(self): """Avoid the storage persistence call and return the expected previous revision for that repository. Check the following for explanation about the hashes: - test_loader.org for (swh policy). - cf. SWHSvnLoaderITTest """ return { # Changed the revision id's hash to simulate history altered 'id': hashutil.hex_to_hash( 'badbadbadbadf708f7466dddf547567b65f6c39d'), 'parents': [hashutil.hex_to_hash( 'a3a577948fdbda9d1061913b77a1588695eadb41')], 'directory': hashutil.hex_to_hash( '0deab3023ac59398ae467fc4bff5583008af1ee2'), 'target_type': 'revision', 'metadata': { 'extra_headers': [ ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], ['svn_revision', b'6'] ] } } class BaseTestLoader(unittest.TestCase): """Base test loader class. In its setup, it's uncompressing a local svn mirror to /tmp. """ def setUp(self, archive_name='pkg-gourmet.tgz', filename='pkg-gourmet'): self.tmp_root_path = tempfile.mkdtemp() start_path = os.path.dirname(__file__) svn_mirror_repo = os.path.join(start_path, '../../../../..', 'swh-storage-testdata', 'svn-folders', archive_name) # uncompress the sample folder subprocess.check_output( ['tar', 'xvf', svn_mirror_repo, '-C', self.tmp_root_path], ) self.svn_mirror_url = 'file://' + self.tmp_root_path + '/' + filename self.destination_path = os.path.join( self.tmp_root_path, 'working-copy') def tearDown(self): shutil.rmtree(self.tmp_root_path) class GitSvnLoaderITTest(BaseTestLoader): def setUp(self): super().setUp() self.origin = {'id': 1, 'type': 'svn', 'url': 'file:///dev/null'} self.loader = GitSvnLoaderNoStorage( svn_url=self.svn_mirror_url, destination_path=self.destination_path, origin=self.origin) @istest def process_repository(self): """Process a repository with gitsvn policy should be ok.""" # when self.loader.process_repository() # then self.assertEquals(len(self.loader.all_revisions), 6) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_occurrences), 1) last_revision = 'bad4a83737f337d47e0ba681478214b07a707218' # cf. test_loader.org for explaining from where those hashes # come from expected_revisions = { # revision hash | directory hash # noqa '22c0fa5195a53f2e733ec75a9b6e9d1624a8b771': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', # noqa '17a631d474f49bbebfdf3d885dcde470d7faafd7': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', # noqa 'c8a9172b2a615d461154f61158180de53edc6070': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', # noqa '7c8f83394b6e8966eb46f0d3416c717612198a4b': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', # noqa '852547b3b2bb76c8582cee963e8aa180d552a15c': 'ab047e38d1532f61ff5c3621202afc3e763e9945', # noqa last_revision: '9bcfc25001b71c333b4b5a89224217de81c56e2e', # noqa } for rev in self.loader.all_revisions: rev_id = hashutil.hash_to_hex(rev['id']) directory_id = hashutil.hash_to_hex(rev['directory']) self.assertEquals(expected_revisions[rev_id], directory_id) occ = self.loader.all_occurrences[0] self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) self.assertEquals(occ['origin'], self.origin['id']) class SWHSvnLoaderNewRepositoryITTest(BaseTestLoader): def setUp(self): super().setUp() self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} self.loader = SWHSvnLoaderNoStorage( svn_url=self.svn_mirror_url, destination_path=self.destination_path, origin=self.origin) @istest def process_repository(self): """Process a new repository with swh policy should be ok. """ # when self.loader.process_repository() # then self.assertEquals(len(self.loader.all_revisions), 6) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_occurrences), 1) last_revision = '4876cb10aec6f708f7466dddf547567b65f6c39c' # cf. test_loader.org for explaining from where those hashes # come from expected_revisions = { # revision hash | directory hash '0d7dd5f751cef8fe17e8024f7d6b0e3aac2cfd71': '669a71cce6c424a81ba42b7dc5d560d32252f0ca', # noqa '95edacc8848369d6fb1608e887d6d2474fd5224f': '008ac97a1118560797c50e3392fa1443acdaa349', # noqa 'fef26ea45a520071711ba2b9d16a2985ee837021': '3780effbe846a26751a95a8c95c511fb72be15b4', # noqa '3f51abf3b3d466571be0855dfa67e094f9ceff1b': 'ffcca9b09c5827a6b8137322d4339c8055c3ee1e', # noqa 'a3a577948fdbda9d1061913b77a1588695eadb41': '7dc52cc04c3b8bd7c085900d60c159f7b846f866', # noqa last_revision: '0deab3023ac59398ae467fc4bff5583008af1ee2', # noqa } for rev in self.loader.all_revisions: rev_id = hashutil.hash_to_hex(rev['id']) directory_id = hashutil.hash_to_hex(rev['directory']) self.assertEquals(expected_revisions[rev_id], directory_id) occ = self.loader.all_occurrences[0] self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) self.assertEquals(occ['origin'], self.origin['id']) class SWHSvnLoaderUpdateWithNoChangeITTest(BaseTestLoader): def setUp(self): super().setUp() self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} self.loader = SWHSvnLoaderUpdateNoStorage( svn_url=self.svn_mirror_url, destination_path=self.destination_path, origin=self.origin) @istest def process_repository(self): """Process a known repository with swh policy and no new data should be ok. """ # when self.loader.process_repository() # then self.assertEquals(len(self.loader.all_revisions), 0) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_occurrences), 0) class SWHSvnLoaderUpdateWithHistoryAlteredITTest(BaseTestLoader): def setUp(self): # the svn repository pkg-gourmet has been updated with changes super().setUp(archive_name='pkg-gourmet-with-updates.tgz') self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} self.loader = SWHSvnLoaderUpdateHistoryAlteredNoStorage( svn_url=self.svn_mirror_url, destination_path=self.destination_path, origin=self.origin) @istest def process_repository(self): """Process a known repository with swh policy and history altered should stop and do nothing. """ # when self.loader.process_repository() # then # we got the previous run's last revision (rev 6) # so 2 news + 1 old self.assertEquals(len(self.loader.all_revisions), 0) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_occurrences), 0) class SWHSvnLoaderUpdateWithChangesITTest(BaseTestLoader): def setUp(self): # the svn repository pkg-gourmet has been updated with changes super().setUp(archive_name='pkg-gourmet-with-updates.tgz') self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} self.loader = SWHSvnLoaderUpdateNoStorage( svn_url=self.svn_mirror_url, destination_path=self.destination_path, origin=self.origin) @istest def process_repository(self): """Process a known repository with swh policy and new data should yield new revisions and occurrence. """ # when self.loader.process_repository() # then # we got the previous run's last revision (rev 6) # so 2 new self.assertEquals(len(self.loader.all_revisions), 2) self.assertEquals(len(self.loader.all_releases), 0) self.assertEquals(len(self.loader.all_occurrences), 1) last_revision = '38d81702cb28db4f1a6821e64321e5825d1f7fd6' # cf. test_loader.org for explaining from where those hashes # come from expected_revisions = { # revision hash | directory hash '7f5bc909c29d4e93d8ccfdda516e51ed44930ee1': '752c52134dcbf2fff13c7be1ce4e9e5dbf428a59', # noqa last_revision: '39c813fb4717a4864bacefbd90b51a3241ae4140', # noqa } for rev in self.loader.all_revisions: rev_id = hashutil.hash_to_hex(rev['id']) directory_id = hashutil.hash_to_hex(rev['directory']) self.assertEquals(expected_revisions[rev_id], directory_id) occ = self.loader.all_occurrences[0] self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) self.assertEquals(occ['origin'], self.origin['id']) + + +class SWHSvnLoaderUpdateWithUnfinishedLoadingChangesITTest(BaseTestLoader): + def setUp(self): + super().setUp(archive_name='pkg-gourmet-with-updates.tgz') + + self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} + + self.loader = SWHSvnLoaderNoStorage( + svn_url=self.svn_mirror_url, + destination_path=self.destination_path, + origin=self.origin) + + @istest + def process_repository(self): + """Process a known repository with swh policy, the previous run did + not finish, so this finishes the loading + + """ + previous_unfinished_revision = { + 'id': hashutil.hex_to_hash( + '4876cb10aec6f708f7466dddf547567b65f6c39c'), + 'parents': [hashutil.hex_to_hash( + 'a3a577948fdbda9d1061913b77a1588695eadb41')], + 'directory': hashutil.hex_to_hash( + '0deab3023ac59398ae467fc4bff5583008af1ee2'), + 'target_type': 'revision', + 'metadata': { + 'extra_headers': [ + ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], + ['svn_revision', '6'] + ] + } + } + # when + self.loader.process_repository( + known_state=previous_unfinished_revision) + + # then + # we got the previous run's last revision (rev 6) + # so 2 new + self.assertEquals(len(self.loader.all_revisions), 2) + self.assertEquals(len(self.loader.all_releases), 0) + self.assertEquals(len(self.loader.all_occurrences), 1) + + last_revision = '38d81702cb28db4f1a6821e64321e5825d1f7fd6' + # cf. test_loader.org for explaining from where those hashes + # come from + expected_revisions = { + # revision hash | directory hash + '7f5bc909c29d4e93d8ccfdda516e51ed44930ee1': '752c52134dcbf2fff13c7be1ce4e9e5dbf428a59', # noqa + last_revision: '39c813fb4717a4864bacefbd90b51a3241ae4140', # noqa + } + + for rev in self.loader.all_revisions: + rev_id = hashutil.hash_to_hex(rev['id']) + directory_id = hashutil.hash_to_hex(rev['directory']) + + self.assertEquals(expected_revisions[rev_id], directory_id) + + occ = self.loader.all_occurrences[0] + self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) + self.assertEquals(occ['origin'], self.origin['id']) + + +class SWHSvnLoaderUpdateWithUnfinishedLoadingChangesButOccurrenceDoneITTest( + BaseTestLoader): + def setUp(self): + super().setUp(archive_name='pkg-gourmet-with-updates.tgz') + + self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} + + self.loader = SWHSvnLoaderUpdateNoStorage( + svn_url=self.svn_mirror_url, + destination_path=self.destination_path, + origin=self.origin) + + @istest + def process_repository(self): + """known repository, swh policy, unfinished revision is less recent + than occurrence, we start from last occurrence. + + """ + previous_unfinished_revision = { + 'id': hashutil.hex_to_hash( + 'a3a577948fdbda9d1061913b77a1588695eadb41'), + 'parents': [hashutil.hex_to_hash( + '3f51abf3b3d466571be0855dfa67e094f9ceff1b')], + 'directory': hashutil.hex_to_hash( + '7dc52cc04c3b8bd7c085900d60c159f7b846f866'), + 'target_type': 'revision', + 'metadata': { + 'extra_headers': [ + ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], + ['svn_revision', '5'] + ] + } + } + + # when + self.loader.process_repository( + known_state=previous_unfinished_revision) + + # then + # we got the previous run's last revision (rev 6) + # so 2 new + self.assertEquals(len(self.loader.all_revisions), 2) + self.assertEquals(len(self.loader.all_releases), 0) + self.assertEquals(len(self.loader.all_occurrences), 1) + + last_revision = '38d81702cb28db4f1a6821e64321e5825d1f7fd6' + # cf. test_loader.org for explaining from where those hashes + # come from + expected_revisions = { + # revision hash | directory hash + '7f5bc909c29d4e93d8ccfdda516e51ed44930ee1': '752c52134dcbf2fff13c7be1ce4e9e5dbf428a59', # noqa + last_revision: '39c813fb4717a4864bacefbd90b51a3241ae4140', # noqa + } + + for rev in self.loader.all_revisions: + rev_id = hashutil.hash_to_hex(rev['id']) + directory_id = hashutil.hash_to_hex(rev['directory']) + + self.assertEquals(expected_revisions[rev_id], directory_id) + + occ = self.loader.all_occurrences[0] + self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) + self.assertEquals(occ['origin'], self.origin['id']) + + +class SWHSvnLoaderUpdateLessRecentNoStorage(TestSvnLoader, SWHSvnLoader): + """An SWHSVNLoader with no persistence. + + Context: + Load a known svn repository using the swh policy. + The last occurrence seen is less recent than a previous + unfinished crawl. + + """ + def swh_previous_revision(self): + """Avoid the storage persistence call and return the expected previous + revision for that repository. + + Check the following for explanation about the hashes: + - test_loader.org for (swh policy). + - cf. SWHSvnLoaderITTest + + """ + return { + 'id': hashutil.hex_to_hash( + 'a3a577948fdbda9d1061913b77a1588695eadb41'), + 'parents': [hashutil.hex_to_hash( + '3f51abf3b3d466571be0855dfa67e094f9ceff1b')], + 'directory': hashutil.hex_to_hash( + '7dc52cc04c3b8bd7c085900d60c159f7b846f866'), + 'target_type': 'revision', + 'metadata': { + 'extra_headers': [ + ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], + ['svn_revision', '5'] + ] + } + } + + +class SWHSvnLoaderUnfinishedLoadingChangesSinceLastOccurrenceITTest( + BaseTestLoader): + def setUp(self): + super().setUp(archive_name='pkg-gourmet-with-updates.tgz') + + self.origin = {'id': 2, 'type': 'svn', 'url': 'file:///dev/null'} + + self.loader = SWHSvnLoaderUpdateLessRecentNoStorage( + svn_url=self.svn_mirror_url, + destination_path=self.destination_path, + origin=self.origin) + + @istest + def process_repository(self): + """known repository, swh policy, unfinished revision is less recent + than occurrence, we start from last occurrence. + + """ + previous_unfinished_revision = { + 'id': hashutil.hex_to_hash( + '4876cb10aec6f708f7466dddf547567b65f6c39c'), + 'parents': [hashutil.hex_to_hash( + 'a3a577948fdbda9d1061913b77a1588695eadb41')], + 'directory': hashutil.hex_to_hash( + '0deab3023ac59398ae467fc4bff5583008af1ee2'), + 'target_type': 'revision', + 'metadata': { + 'extra_headers': [ + ['svn_repo_uuid', '3187e211-bb14-4c82-9596-0b59d67cd7f4'], + ['svn_revision', '6'] + ] + } + } + # when + self.loader.process_repository( + known_state=previous_unfinished_revision) + + # then + # we got the previous run's last revision (rev 6) + # so 2 new + self.assertEquals(len(self.loader.all_revisions), 2) + self.assertEquals(len(self.loader.all_releases), 0) + self.assertEquals(len(self.loader.all_occurrences), 1) + + last_revision = '38d81702cb28db4f1a6821e64321e5825d1f7fd6' + # cf. test_loader.org for explaining from where those hashes + # come from + expected_revisions = { + # revision hash | directory hash + '7f5bc909c29d4e93d8ccfdda516e51ed44930ee1': '752c52134dcbf2fff13c7be1ce4e9e5dbf428a59', # noqa + last_revision: '39c813fb4717a4864bacefbd90b51a3241ae4140', # noqa + } + + for rev in self.loader.all_revisions: + rev_id = hashutil.hash_to_hex(rev['id']) + directory_id = hashutil.hash_to_hex(rev['directory']) + + self.assertEquals(expected_revisions[rev_id], directory_id) + + occ = self.loader.all_occurrences[0] + self.assertEquals(hashutil.hash_to_hex(occ['target']), last_revision) + self.assertEquals(occ['origin'], self.origin['id'])