diff --git a/swh/loader/svn/loader.py b/swh/loader/svn/loader.py index 0423615..31dc42a 100644 --- a/swh/loader/svn/loader.py +++ b/swh/loader/svn/loader.py @@ -1,813 +1,812 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Loader in charge of injecting either new or existing svn mirrors to swh-storage. """ import os import pty import re import shutil import tempfile from mmap import mmap, ACCESS_WRITE from subprocess import Popen from typing import Any, Dict, Iterator, List, Optional, Tuple, Union from swh.model import hashutil from swh.model.model import ( Content, Directory, Origin, SkippedContent, Revision, Snapshot, SnapshotBranch, TargetType, ) from swh.model import from_disk from swh.loader.core.loader import BaseLoader from swh.loader.core.utils import clean_dangling_folders from swh.storage.algos.snapshot import snapshot_get_latest from . import svn, converters from .utils import ( init_svn_repo_from_dump, init_svn_repo_from_archive_dump, OutputStream, ) from .exception import SvnLoaderUneventful from .exception import SvnLoaderHistoryAltered DEFAULT_BRANCH = b"HEAD" def build_swh_snapshot(revision_id, branch=DEFAULT_BRANCH): """Build a swh snapshot from the revision id, origin url, and visit. """ return Snapshot( branches={ branch: SnapshotBranch(target=revision_id, target_type=TargetType.REVISION) } ) TEMPORARY_DIR_PREFIX_PATTERN = "swh.loader.svn." class SvnLoader(BaseLoader): """Swh svn loader. The repository is either remote or local. The loader deals with update on an already previously loaded repository. """ CONFIG_BASE_FILENAME = "loader/svn" ADDITIONAL_CONFIG = { "temp_directory": ("str", "/tmp"), "debug": ("bool", False), # NOT FOR PRODUCTION, False for production "check_revision": ( "dict", { "status": False, # do we check the revision? "limit": 1000, # at which pace do we check it? }, ), } visit_type = "svn" def __init__( self, url, origin_url=None, visit_date=None, destination_path=None, swh_revision=None, start_from_scratch=False, ): super().__init__(logging_class="swh.loader.svn.SvnLoader") # technical svn uri to act on svn repository self.svn_url = url # origin url as unique identifier for origin in swh archive self.origin_url = origin_url if origin_url else self.svn_url self.debug = self.config["debug"] self.last_seen_revision = None self.temp_directory = self.config["temp_directory"] self.done = False self.svnrepo = None # Revision check is configurable check_revision = self.config["check_revision"] if check_revision["status"]: self.check_revision = check_revision["limit"] else: self.check_revision = None # internal state used to store swh objects self._contents = [] self._skipped_contents = [] self._directories = [] self._revisions = [] - self._snapshot = None + self._snapshot: Optional[Snapshot] = None self._last_revision = None self._visit_status = "full" self._load_status = "uneventful" self.visit_date = visit_date self.destination_path = destination_path self.start_from_scratch = start_from_scratch self.swh_revision = swh_revision self.max_content_length = self.config["max_content_size"] self.snapshot = None def pre_cleanup(self): """Cleanup potential dangling files from prior runs (e.g. OOM killed tasks) """ clean_dangling_folders( self.temp_directory, pattern_check=TEMPORARY_DIR_PREFIX_PATTERN, log=self.log, ) def cleanup(self): """Clean up the svn repository's working representation on disk. """ if not self.svnrepo: # could happen if `prepare` fails return if self.debug: self.log.error( """NOT FOR PRODUCTION - debug flag activated Local repository not cleaned up for investigation: %s""" % (self.svnrepo.local_url.decode("utf-8"),) ) return self.svnrepo.clean_fs() def swh_revision_hash_tree_at_svn_revision(self, revision): """Compute and return the hash tree at a given svn revision. Args: rev (int): the svn revision we want to check Returns: The hash tree directory as bytes. """ local_dirname, local_url = self.svnrepo.export_temporary(revision) h = from_disk.Directory.from_disk(path=local_url).hash self.svnrepo.clean_fs(local_dirname) return h def swh_latest_snapshot_revision( self, origin_url: str, previous_swh_revision: Optional[Union[bytes, dict]] = None, ) -> Dict[str, Any]: """Look for latest snapshot revision and returns it if any. Args: origin_url: Origin identifier previous_swh_revision: possible previous swh revision (either a dict or revision identifier) Returns: dict: The latest known point in time as dict with keys (if any): 'revision': latest visited revision 'snapshot': latest snapshot If no snapshot matching criteria is found, returns an empty dict. """ storage = self.storage + latest_snapshot: Optional[Snapshot] = None - latest_snapshot_d = {} if not previous_swh_revision: latest_snapshot = snapshot_get_latest(storage, origin_url) if not latest_snapshot: return {} - latest_snapshot_d = latest_snapshot.to_dict() branches = latest_snapshot.branches if not branches: return {} branch = branches.get(DEFAULT_BRANCH) if not branch: return {} target_type = branch.target_type.value if target_type != "revision": return {} previous_swh_revision = branch.target if isinstance(previous_swh_revision, dict): swh_id = previous_swh_revision["id"] else: swh_id = previous_swh_revision revs = list(storage.revision_get([swh_id])) if revs: - return {"snapshot": latest_snapshot_d, "revision": revs[0]} + return {"snapshot": latest_snapshot, "revision": revs[0]} return {} def build_swh_revision(self, rev, commit, dir_id, parents): """Build the swh revision dictionary. This adds: - the `'synthetic`' flag to true - the '`extra_headers`' containing the repository's uuid and the svn revision number. Args: rev (dict): the svn revision commit (dict): the commit metadata dir_id (bytes): the upper tree's hash identifier parents ([bytes]): the parents' identifiers Returns: The swh revision corresponding to the svn revision. """ return converters.build_swh_revision( rev, commit, self.svnrepo.uuid, dir_id, parents ) def check_history_not_altered(self, svnrepo, revision_start, swh_rev): """Given a svn repository, check if the history was not tampered with. """ revision_id = swh_rev["id"] parents = swh_rev["parents"] hash_data_per_revs = svnrepo.swh_hash_data_at_revision(revision_start) rev = revision_start rev, _, commit, _, root_dir = list(hash_data_per_revs)[0] dir_id = root_dir.hash swh_revision = self.build_swh_revision(rev, commit, dir_id, parents) swh_revision_id = swh_revision.id return swh_revision_id == revision_id def _init_from(self, partial_swh_revision, previous_swh_revision): """Function to determine from where to start from. Args: partial_swh_revision (dict): A known revision from which the previous loading did not finish. known_previous_revision (dict): A known revision from which the previous loading did finish. Returns: The revision from which to start or None if nothing (fresh start). """ if partial_swh_revision and not previous_swh_revision: return partial_swh_revision if not partial_swh_revision and previous_swh_revision: return previous_swh_revision if partial_swh_revision and previous_swh_revision: # will determine from which to start from extra_headers1 = dict(partial_swh_revision["metadata"]["extra_headers"]) extra_headers2 = dict(previous_swh_revision["metadata"]["extra_headers"]) rev_start1 = int(extra_headers1["svn_revision"]) rev_start2 = int(extra_headers2["svn_revision"]) if rev_start1 <= rev_start2: return previous_swh_revision return partial_swh_revision return None def start_from(self, last_known_swh_revision=None, start_from_scratch=False): """Determine from where to start the loading. Args: last_known_swh_revision (dict): Last know swh revision or None start_from_scratch (bool): To start loading from scratch or not Returns: tuple (revision_start, revision_end, revision_parents) Raises: SvnLoaderHistoryAltered: When a hash divergence has been detected (should not happen) SvnLoaderUneventful: Nothing changed since last visit """ revision_head = self.svnrepo.head_revision() if revision_head == 0: # empty repository case revision_start = 0 revision_end = 0 else: # default configuration revision_start = self.svnrepo.initial_revision() revision_end = revision_head revision_parents = {revision_start: []} if not start_from_scratch: # Check if we already know a previous revision for that origin if self.latest_snapshot: swh_rev = self.latest_snapshot["revision"] else: swh_rev = None # Determine from which known revision to start swh_rev = self._init_from( last_known_swh_revision, previous_swh_revision=swh_rev ) if swh_rev: # Yes, we know a previous revision. Try and update it. extra_headers = dict(swh_rev["metadata"]["extra_headers"]) revision_start = int(extra_headers["svn_revision"]) revision_parents = { revision_start: swh_rev["parents"], } self.log.debug( "svn export --ignore-keywords %s@%s" % (self.svnrepo.remote_url, revision_start) ) if swh_rev and not self.check_history_not_altered( self.svnrepo, revision_start, swh_rev ): msg = "History of svn %s@%s altered. " "Skipping..." % ( self.svnrepo.remote_url, revision_start, ) raise SvnLoaderHistoryAltered(msg) # now we know history is ok, we start at next revision revision_start = revision_start + 1 # and the parent become the latest know revision for # that repository revision_parents[revision_start] = [swh_rev["id"]] if revision_start > revision_end and revision_start != 1: msg = "%s@%s already injected." % (self.svnrepo.remote_url, revision_end) raise SvnLoaderUneventful(msg) self.log.info( "Processing revisions [%s-%s] for %s" % (revision_start, revision_end, self.svnrepo) ) return revision_start, revision_end, revision_parents def _check_revision_divergence(self, count, rev, dir_id): """Check for hash revision computation divergence. The Rationale behind this is that svn can trigger unknown edge cases (mixed CRLF, svn properties, etc...). Those are not always easy to spot. Adding a check will help in spotting missing edge cases. Args: count (int): The number of revisions done so far rev (dict): The actual revision we are computing from dir_id (bytes): The actual directory for the given revision Returns: False if no hash divergence detected Raises ValueError if a hash divergence is detected """ if (count % self.check_revision) == 0: # hash computation check self.log.debug("Checking hash computations on revision %s..." % rev) checked_dir_id = self.swh_revision_hash_tree_at_svn_revision(rev) if checked_dir_id != dir_id: err = ( "Hash tree computation divergence detected " "(%s != %s), stopping!" % ( hashutil.hash_to_hex(dir_id), hashutil.hash_to_hex(checked_dir_id), ) ) raise ValueError(err) def process_svn_revisions( self, svnrepo, revision_start, revision_end, revision_parents ) -> Iterator[ Tuple[List[Content], List[SkippedContent], List[Directory], Revision] ]: """Process svn revisions from revision_start to revision_end. At each svn revision, apply new diffs and simultaneously compute swh hashes. This yields those computed swh hashes as a tuple (contents, directories, revision). Note that at every `self.check_revision`, a supplementary check takes place to check for hash-tree divergence (related T570). Yields: tuple (contents, directories, revision) of dict as a dictionary with keys, sha1_git, sha1, etc... Raises: ValueError in case of a hash divergence detection """ gen_revs = svnrepo.swh_hash_data_per_revision(revision_start, revision_end) swh_revision = None count = 0 for rev, nextrev, commit, new_objects, root_directory in gen_revs: count += 1 # Send the associated contents/directories _contents, _skipped_contents, _directories = new_objects # compute the fs tree's checksums dir_id = root_directory.hash swh_revision = self.build_swh_revision( rev, commit, dir_id, revision_parents[rev] ) self.log.debug( "rev: %s, swhrev: %s, dir: %s" % ( rev, hashutil.hash_to_hex(swh_revision.id), hashutil.hash_to_hex(dir_id), ) ) if self.check_revision: self._check_revision_divergence(count, rev, dir_id) if nextrev: revision_parents[nextrev] = [swh_revision.id] yield _contents, _skipped_contents, _directories, swh_revision def prepare_origin_visit(self, *args, **kwargs): self.origin = Origin(url=self.origin_url if self.origin_url else self.svn_url) def prepare(self, *args, **kwargs): if self.swh_revision: self.last_known_swh_revision = self.swh_revision else: self.last_known_swh_revision = None self.latest_snapshot = self.swh_latest_snapshot_revision( self.origin_url, self.last_known_swh_revision ) if self.destination_path: local_dirname = self.destination_path else: local_dirname = tempfile.mkdtemp( suffix="-%s" % os.getpid(), prefix=TEMPORARY_DIR_PREFIX_PATTERN, dir=self.temp_directory, ) self.svnrepo = svn.SvnRepo( self.svn_url, self.origin_url, local_dirname, self.max_content_length ) try: revision_start, revision_end, revision_parents = self.start_from( self.last_known_swh_revision, self.start_from_scratch ) self.swh_revision_gen = self.process_svn_revisions( self.svnrepo, revision_start, revision_end, revision_parents ) except SvnLoaderUneventful as e: self.log.warning(e) if self.latest_snapshot and "snapshot" in self.latest_snapshot: self._snapshot = self.latest_snapshot["snapshot"] self.done = True except SvnLoaderHistoryAltered as e: self.log.error(e) self.done = True self._visit_status = "partial" def fetch_data(self): """Fetching svn revision information. This will apply svn revision as patch on disk, and at the same time, compute the swh hashes. In effect, fetch_data fetches those data and compute the necessary swh objects. It's then stored in the internal state instance variables (initialized in `_prepare_state`). This is up to `store_data` to actually discuss with the storage to store those objects. Returns: bool: True to continue fetching data (next svn revision), False to stop. """ data = None if self.done: return False try: data = next(self.swh_revision_gen) self._load_status = "eventful" except StopIteration: self.done = True self._visit_status = "full" return False # Stopping iteration except Exception as e: # svn:external, hash divergence, i/o error... self.log.exception(e) self.done = True self._visit_status = "partial" return False # Stopping iteration self._contents, self._skipped_contents, self._directories, rev = data if rev: self._last_revision = rev self._revisions.append(rev) return True # next svn revision def store_data(self): """We store the data accumulated in internal instance variable. If the iteration over the svn revisions is done, we create the snapshot and flush to storage the data. This also resets the internal instance variable state. """ self.storage.skipped_content_add(self._skipped_contents) self.storage.content_add(self._contents) self.storage.directory_add(self._directories) self.storage.revision_add(self._revisions) if self.done: # finish line, snapshot! self.snapshot = self.generate_and_load_snapshot( revision=self._last_revision, snapshot=self._snapshot ) self.flush() self.loaded_snapshot_id = self.snapshot.id # reset internal state for next iteration self._revisions = [] def generate_and_load_snapshot( self, revision: Optional[Revision] = None, snapshot: Optional[Snapshot] = None ) -> Snapshot: """Create the snapshot either from existing revision or snapshot. Revision (supposedly new) has priority over the snapshot (supposedly existing one). Args: revision (dict): Last revision seen if any (None by default) snapshot (dict): Snapshot to use if any (None by default) Returns: Optional[Snapshot] The newly created snapshot """ if revision: # Priority to the revision snap = build_swh_snapshot(revision.id) elif snapshot: # Fallback to prior snapshot snap = snapshot else: raise ValueError( "generate_and_load_snapshot called with null revision and snapshot!" ) self.log.debug("snapshot: %s" % snap) self.storage.snapshot_add([snap]) return snap def load_status(self): return { "status": self._load_status, } def visit_status(self): return self._visit_status class SvnLoaderFromDumpArchive(SvnLoader): """Uncompress an archive containing an svn dump, mount the svn dump as an svn repository and load said repository. """ def __init__( self, url, archive_path, origin_url=None, destination_path=None, swh_revision=None, start_from_scratch=None, visit_date=None, ): super().__init__( url, origin_url=origin_url, destination_path=destination_path, swh_revision=swh_revision, start_from_scratch=start_from_scratch, visit_date=visit_date, ) self.archive_path = archive_path self.temp_dir = None self.repo_path = None def prepare(self, *args, **kwargs): self.log.info("Archive to mount and load %s" % self.archive_path) self.temp_dir, self.repo_path = init_svn_repo_from_archive_dump( self.archive_path, prefix=TEMPORARY_DIR_PREFIX_PATTERN, suffix="-%s" % os.getpid(), root_dir=self.temp_directory, ) super().prepare(*args, **kwargs) def cleanup(self): super().cleanup() if self.temp_dir and os.path.exists(self.temp_dir): msg = "Clean up temporary directory dump %s for project %s" % ( self.temp_dir, os.path.basename(self.repo_path), ) self.log.debug(msg) shutil.rmtree(self.temp_dir) class SvnLoaderFromRemoteDump(SvnLoader): """ Create a subversion repository dump using the svnrdump utility, mount it locally and load the repository from it. """ def __init__( self, url, origin_url=None, destination_path=None, swh_revision=None, start_from_scratch=False, visit_date=None, ): super().__init__( url, origin_url=origin_url, destination_path=destination_path, swh_revision=swh_revision, start_from_scratch=start_from_scratch, visit_date=visit_date, ) self.temp_dir = tempfile.mkdtemp(dir=self.temp_directory) self.repo_path = None self.truncated_dump = False def get_last_loaded_svn_rev(self, svn_url): """ Check if the svn repository has already been visited and return the last loaded svn revision number or -1 otherwise. """ last_loaded_svn_rev = -1 try: origin = self.storage.origin_get({"url": svn_url}) last_swh_rev = self.swh_latest_snapshot_revision(origin["url"])["revision"] last_swh_rev_headers = dict(last_swh_rev["metadata"]["extra_headers"]) last_loaded_svn_rev = int(last_swh_rev_headers["svn_revision"]) except Exception: pass return last_loaded_svn_rev def dump_svn_revisions(self, svn_url, last_loaded_svn_rev=-1): """ Generate a subversion dump file using the svnrdump tool. If the svnrdump command failed somehow, the produced dump file is analyzed to determine if a partial loading is still feasible. """ # Build the svnrdump command line svnrdump_cmd = ["svnrdump", "dump", svn_url] # Launch the svnrdump command while capturing stderr as # successfully dumped revision numbers are printed to it dump_temp_dir = tempfile.mkdtemp(dir=self.temp_dir) dump_name = "".join(c for c in svn_url if c.isalnum()) dump_path = "%s/%s.svndump" % (dump_temp_dir, dump_name) stderr_lines = [] self.log.debug("Executing %s" % " ".join(svnrdump_cmd)) with open(dump_path, "wb") as dump_file: stderr_r, stderr_w = pty.openpty() svnrdump = Popen(svnrdump_cmd, stdout=dump_file, stderr=stderr_w) os.close(stderr_w) stderr_stream = OutputStream(stderr_r) readable = True while readable: lines, readable = stderr_stream.read_lines() stderr_lines += lines for line in lines: self.log.debug(line) svnrdump.wait() os.close(stderr_r) if svnrdump.returncode == 0: return dump_path # There was an error but it does not mean that no revisions # can be loaded. # Get the stderr line with latest dumped revision last_dumped_rev = None if len(stderr_lines) > 1: last_dumped_rev = stderr_lines[-2] if last_dumped_rev: # Get the latest dumped revision number matched_rev = re.search(".*revision ([0-9]+)", last_dumped_rev) last_dumped_rev = int(matched_rev.group(1)) if matched_rev else -1 # Check if revisions inside the dump file can be loaded anyway if last_dumped_rev > last_loaded_svn_rev: self.log.debug( ( "svnrdump did not dump all expected revisions " "but revisions range %s:%s are available in " "the generated dump file and will be loaded " "into the archive." ) % (last_loaded_svn_rev + 1, last_dumped_rev) ) # Truncate the dump file after the last successfully dumped # revision to avoid the loading of corrupted data self.log.debug( ( "Truncating dump file after the last " "successfully dumped revision (%s) to avoid " "the loading of corrupted data" ) % last_dumped_rev ) with open(dump_path, "r+b") as f: with mmap(f.fileno(), 0, access=ACCESS_WRITE) as s: pattern = ( "Revision-number: %s" % (last_dumped_rev + 1) ).encode() n = s.rfind(pattern) if n != -1: s.resize(n) self.truncated_dump = True return dump_path elif last_dumped_rev != -1: raise Exception( ( "Last dumped subversion revision (%s) is " "lesser than the last one loaded into the " "archive (%s)." ) % (last_dumped_rev, last_loaded_svn_rev) ) raise Exception( "An error occurred when running svnrdump and " "no exploitable dump file has been generated." ) def prepare(self, *args, **kwargs): # First, check if previous revisions have been loaded for the # subversion origin and get the number of the last one last_loaded_svn_rev = self.get_last_loaded_svn_rev(self.svn_url) # Then try to generate a dump file containing relevant svn revisions # to load, an exception will be thrown if something wrong happened dump_path = self.dump_svn_revisions(self.svn_url, last_loaded_svn_rev) # Finally, mount the dump and load the repository self.log.debug('Mounting dump file with "svnadmin load".') _, self.repo_path = init_svn_repo_from_dump( dump_path, prefix=TEMPORARY_DIR_PREFIX_PATTERN, suffix="-%s" % os.getpid(), root_dir=self.temp_dir, ) self.svn_url = "file://%s" % self.repo_path super().prepare(*args, **kwargs) def cleanup(self): super().cleanup() if self.temp_dir and os.path.exists(self.temp_dir): shutil.rmtree(self.temp_dir) def visit_status(self): if self.truncated_dump: return "partial" else: return super().visit_status() diff --git a/swh/loader/svn/tests/test_loader.py b/swh/loader/svn/tests/test_loader.py index f1d4c3f..4010ae2 100644 --- a/swh/loader/svn/tests/test_loader.py +++ b/swh/loader/svn/tests/test_loader.py @@ -1,817 +1,828 @@ # Copyright (C) 2016-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import os import subprocess from typing import Optional from swh.loader.core.tests import BaseLoaderTest from swh.loader.tests.common import assert_last_visit_matches from swh.loader.package.tests.common import check_snapshot, get_stats from swh.loader.svn.loader import ( DEFAULT_BRANCH, SvnLoader, SvnLoaderFromRemoteDump, build_swh_snapshot, ) from swh.model import hashutil from swh.model.model import Origin, Snapshot def test_build_swh_snapshot(): rev_id = hashutil.hash_to_bytes("3f51abf3b3d466571be0855dfa67e094f9ceff1b") snap = build_swh_snapshot(rev_id) assert isinstance(snap, Snapshot) expected_snapshot = Snapshot.from_dict( {"branches": {DEFAULT_BRANCH: {"target": rev_id, "target_type": "revision",}}} ) assert snap == expected_snapshot _LOADER_TEST_CONFIG = { "check_revision": {"limit": 100, "status": False}, "debug": False, "log_db": "dbname=softwareheritage-log", "save_data": False, "save_data_path": "", "temp_directory": "/tmp", "max_content_size": 100 * 1024 * 1024, "storage": { "cls": "pipeline", "steps": [ {"cls": "retry",}, {"cls": "filter",}, { "cls": "buffer", "min_batch_size": { "content": 10000, "content_bytes": 1073741824, "directory": 2500, "revision": 10, "release": 100, }, }, {"cls": "memory"}, ], }, } GOURMET_SNAPSHOT = hashutil.hash_to_bytes("889cacc2731e3312abfb2b1a0c18ade82a949e07") GOURMET_FLAG_SNAPSHOT = hashutil.hash_to_bytes( "0011223344556677889900112233445566778899" ) GOURMET_UPDATES_SNAPSHOT = hashutil.hash_to_bytes( "11086d15317014e43d2438b7ffc712c44f1b8afe" ) GOURMET_EXTERNALS_SNAPSHOT = hashutil.hash_to_bytes( "19cb68d0a3f22372e2b7017ea5e2a2ea5ae3e09a" ) GOURMET_EDGE_CASES_SNAPSHOT = hashutil.hash_to_bytes( "18e60982fe521a2546ab8c3c73a535d80462d9d0" ) GOURMET_WRONG_LINKS_SNAPSHOT = hashutil.hash_to_bytes( "b17f38acabb90f066dedd30c29f01a02af88a5c4" ) MEDIAWIKI_SNAPSHOT = hashutil.hash_to_bytes("d6d6e9703f157c5702d9a4a5dec878926ed4ab76") PYANG_SNAPSHOT = hashutil.hash_to_bytes("6d9590de11b00a5801de0ff3297c5b44bbbf7d24") class SvnLoaderTest(SvnLoader): """An SVNLoader with no persistence. Context: Load a new svn repository using the swh policy (so no update). """ def __init__( self, url, last_snp_rev={}, destination_path=None, start_from_scratch=False, swh_revision=None, ): super().__init__( url, destination_path=destination_path, start_from_scratch=start_from_scratch, swh_revision=swh_revision, ) self.origin = Origin(url=url) self.last_snp_rev = last_snp_rev def parse_config_file(self, *args, **kwargs): return _LOADER_TEST_CONFIG def swh_latest_snapshot_revision(self, origin_url, prev_swh_revision=None): """Avoid the storage persistence call and return the expected previous revision for that repository. Check the following for explanation about the hashes: - test_loader.org for (swh policy). - cf. SvnLoaderTest """ return self.last_snp_rev class BaseSvnLoaderTest(BaseLoaderTest): """Base test loader class. In its setup, it's uncompressing a local svn mirror to /tmp. """ def setUp( self, archive_name="pkg-gourmet.tgz", filename="pkg-gourmet", loader=None, snapshot=None, type="default", start_from_scratch=False, swh_revision=None, ): super().setUp( archive_name=archive_name, filename=filename, prefix_tmp_folder_name="swh.loader.svn.", start_path=os.path.dirname(__file__), ) self.svn_mirror_url = self.repo_url if type == "default": loader_test_class = SvnLoaderTest else: loader_test_class = SvnLoaderTestFromRemoteDump if loader: self.loader = loader elif snapshot: self.loader = loader_test_class( self.svn_mirror_url, destination_path=self.destination_path, start_from_scratch=start_from_scratch, swh_revision=swh_revision, last_snp_rev=snapshot, ) else: self.loader = loader_test_class( self.svn_mirror_url, destination_path=self.destination_path, start_from_scratch=start_from_scratch, swh_revision=swh_revision, ) self.storage = self.loader.storage def prepare_repository_from_archive( archive_path: str, filename: Optional[str] = None, tmp_path: str = "/tmp" ) -> str: # uncompress folder/repositories/dump for the loader to ingest subprocess.check_output(["tar", "xf", archive_path, "-C", tmp_path]) # build the origin url (or some derivative form) _fname = filename if filename else os.path.basename(archive_path) repo_url = f"file://{tmp_path}/{_fname}" return repo_url def test_loader_svn_new_visit(swh_config, datadir, tmp_path): """Eventful visit should yield 1 snapshot""" archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url, destination_path=tmp_path) assert loader.load() == {"status": "eventful"} assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats == { "content": 19, "directory": 17, "origin": 1, "origin_visit": 1, "person": 1, "release": 0, "revision": 6, "skipped_content": 0, "snapshot": 1, } expected_snapshot = { "id": GOURMET_SNAPSHOT, "branches": { "HEAD": { "target": "4876cb10aec6f708f7466dddf547567b65f6c39c", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) def test_loader_svn_2_visits_no_change(swh_config, datadir, tmp_path): """Visit multiple times a repository with no change should yield the same snapshot """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) # FIXME: This should be uneventful here as there is no change in between visits... assert loader.load() == {"status": "eventful"} visit_status2 = assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot == visit_status2.snapshot stats = get_stats(loader.storage) assert stats["origin_visit"] == 1 + 1 # computed twice the same snapshot assert stats["snapshot"] == 1 # even starting from previous revision... revs = list( loader.storage.revision_get( [hashutil.hash_to_bytes("95edacc8848369d6fb1608e887d6d2474fd5224f")] ) ) start_revision = revs[0] assert start_revision is not None loader = SvnLoader(repo_url, swh_revision=start_revision) assert loader.load() == {"status": "eventful"} stats = get_stats(loader.storage) assert stats["origin_visit"] == 2 + 1 # ... with no change in repository, this yields the same snapshot assert stats["snapshot"] == 1 assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) _LAST_SNP_REV = { "snapshot": Snapshot.from_dict({"id": GOURMET_FLAG_SNAPSHOT, "branches": {}}), "revision": { "id": hashutil.hash_to_bytes("4876cb10aec6f708f7466dddf547567b65f6c39c"), "parents": ( hashutil.hash_to_bytes("a3a577948fdbda9d1061913b77a1588695eadb41"), ), "directory": hashutil.hash_to_bytes("0deab3023ac59398ae467fc4bff5583008af1ee2"), "target_type": "revision", "metadata": { "extra_headers": [ ["svn_repo_uuid", "3187e211-bb14-4c82-9596-0b59d67cd7f4"], ["svn_revision", "6"], ] }, }, } class SvnLoaderTest3(BaseSvnLoaderTest): """In this scenario, the dump has been tampered with to modify the commit log. This results in a hash divergence which is detected at startup. In effect, that stops the loading and do nothing. """ def setUp(self): last_snp_rev = copy.deepcopy(_LAST_SNP_REV) last_snp_rev["snapshot"] = None # Changed the revision id's hash to simulate history altered last_snp_rev["revision"]["id"] = hashutil.hash_to_bytes( "badbadbadbadf708f7466dddf547567b65f6c39d" ) # the svn repository pkg-gourmet has been updated with changes super().setUp( archive_name="pkg-gourmet-with-updates.tgz", snapshot=last_snp_rev ) def test_load(self): """Load known repository with history altered should do nothing """ # when assert self.loader.load() == {"status": "failed"} # then # we got the previous run's last revision (rev 6) # so 2 news + 1 old self.assertCountContents(0) self.assertCountDirectories(0) self.assertCountRevisions(0) self.assertCountReleases(0) self.assertCountSnapshots(0) self.assertEqual(self.loader.visit_status(), "partial") visit_status = assert_last_visit_matches( self.storage, self.repo_url, status="partial", type="svn" ) assert visit_status.snapshot is None def test_loader_svn_visit_with_changes(swh_config, datadir, tmp_path): """In this scenario, the repository has been updated with new changes. The loading visit should result in new objects stored and 1 new snapshot. """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_initial_url = prepare_repository_from_archive( archive_path, archive_name, tmp_path ) # repo_initial_url becomes the origin_url we want to visit some more below loader = SvnLoader(repo_initial_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_initial_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) archive_path = os.path.join(datadir, "pkg-gourmet-with-updates.tgz") repo_updated_url = prepare_repository_from_archive( archive_path, "pkg-gourmet", tmp_path ) loader = SvnLoader(repo_updated_url, origin_url=repo_initial_url,) assert loader.load() == {"status": "eventful"} visit_status2 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot != visit_status2.snapshot stats = get_stats(loader.storage) assert stats == { "content": 22, "directory": 28, "origin": 1, "origin_visit": 2, "person": 2, "release": 0, "revision": 11, "skipped_content": 0, "snapshot": 2, } expected_snapshot = { "id": GOURMET_UPDATES_SNAPSHOT, "branches": { "HEAD": { "target": "171dc35522bfd17dda4e90a542a0377fb2fc707a", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) # Start from scratch loading yields the same result loader = SvnLoader( repo_updated_url, origin_url=repo_initial_url, start_from_scratch=True ) assert loader.load() == {"status": "eventful"} visit_status3 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT, ) assert visit_status2.date < visit_status3.date assert visit_status3.snapshot == visit_status2.snapshot check_snapshot(expected_snapshot, loader.storage) stats = get_stats(loader.storage) assert stats["origin"] == 1 # always the same visit assert stats["origin_visit"] == 2 + 1 # 1 more visit assert stats["snapshot"] == 2 # no new snapshot def test_loader_svn_visit_start_from_revision(swh_config, datadir, tmp_path): """Starting from existing revision, next visit on changed repo should yield 1 new snapshot. """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_initial_url = prepare_repository_from_archive( archive_path, archive_name, tmp_path ) # repo_initial_url becomes the origin_url we want to visit some more below loader = SvnLoader(repo_initial_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_initial_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT, ) revs = list( loader.storage.revision_get( [hashutil.hash_to_bytes("95edacc8848369d6fb1608e887d6d2474fd5224f")] ) ) start_revision = revs[0] assert start_revision is not None archive_path = os.path.join(datadir, "pkg-gourmet-with-updates.tgz") repo_updated_url = prepare_repository_from_archive( archive_path, "pkg-gourmet", tmp_path ) # we'll start from start_revision loader = SvnLoader( repo_updated_url, origin_url=repo_initial_url, swh_revision=start_revision ) assert loader.load() == {"status": "eventful"} # nonetheless, we obtain the same snapshot (as previous tests on that repository) visit_status2 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot != visit_status2.snapshot stats = get_stats(loader.storage) assert stats == { "content": 22, "directory": 28, "origin": 1, "origin_visit": 2, "person": 2, "release": 0, "revision": 11, "skipped_content": 0, "snapshot": 2, } expected_snapshot = { "id": GOURMET_UPDATES_SNAPSHOT, "branches": { "HEAD": { "target": "171dc35522bfd17dda4e90a542a0377fb2fc707a", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) def test_loader_svn_visit_with_eol_style(swh_config, datadir, tmp_path): """Check that a svn repo containing a versioned file with CRLF line endings with svn:eol-style property set to 'native' (this is a violation of svn specification as the file should have been stored with LF line endings) can be loaded anyway. """ archive_name = "mediawiki-repo-r407-eol-native-crlf" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} expected_snapshot = { "id": MEDIAWIKI_SNAPSHOT, "branches": { "HEAD": { "target": "7da4975c363101b819756d33459f30a866d01b1b", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=MEDIAWIKI_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 def test_loader_svn_visit_with_mixed_crlf_lf(swh_config, datadir, tmp_path): """Check that a svn repo containing a versioned file with mixed CRLF/LF line endings with svn:eol-style property set to 'native' (this is a violation of svn specification as mixed line endings for textual content should not be stored when the svn:eol-style property is set) can be loaded anyway. """ archive_name = "pyang-repo-r343-eol-native-mixed-lf-crlf" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} expected_snapshot = { "id": PYANG_SNAPSHOT, "branches": { "HEAD": { "target": "9c6962eeb9164a636c374be700672355e34a98a7", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=PYANG_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 def test_loader_svn_with_external_properties(swh_config, datadir, tmp_path): """Repository with svn:external properties cannot be fully ingested yet """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, "pkg-gourmet-with-external-id.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} # repositoy holds 21 revisions, but the last commit holds an 'svn:externals' # property which will make the loader-svn stops at the last revision prior to the # bad one expected_snapshot = { "id": GOURMET_EXTERNALS_SNAPSHOT, "branches": { "HEAD": { "target": "82a7a4a09f9549223429143ba36ad77375e33c5c", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="partial", type="svn", snapshot=GOURMET_EXTERNALS_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 21 - 1 # commit with the svn:external property def test_loader_svn_with_symlink(swh_config, datadir, tmp_path): """Repository with symlinks should be ingested ok Edge case: - first create a file and commit it. Remove it, then add folder holding the same name, commit. - do the same scenario with symbolic link (instead of file) """ archive_name = "pkg-gourmet" archive_path = os.path.join( datadir, "pkg-gourmet-with-edge-case-links-and-files.tgz" ) repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} expected_snapshot = { "id": GOURMET_EDGE_CASES_SNAPSHOT, "branches": { "HEAD": { "target": "3f43af2578fccf18b0d4198e48563da7929dc608", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_EDGE_CASES_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 19 def test_loader_svn_with_wrong_symlinks(swh_config, datadir, tmp_path): """Repository with wrong symlinks should be ingested ok nonetheless Edge case: - wrong symbolic link - wrong symbolic link with empty space names """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, "pkg-gourmet-with-wrong-link-cases.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(repo_url) assert loader.load() == {"status": "eventful"} expected_snapshot = { "id": GOURMET_WRONG_LINKS_SNAPSHOT, "branches": { "HEAD": { "target": "cf30d3bb9d5967d0a2bbeacc405f10a5dd9b138a", "target_type": "revision", } }, } check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_WRONG_LINKS_SNAPSHOT, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 21 -class SvnLoaderTestFromRemoteDump(SvnLoaderTest, SvnLoaderFromRemoteDump): - pass +def test_loader_svn_loader_from_dump_archive(swh_config, datadir, tmp_path): + """Repository with wrong symlinks should be ingested ok nonetheless + Edge case: + - wrong symbolic link + - wrong symbolic link with empty space names -class SvnLoaderFromRemoteDumpTest(BaseSvnLoaderTest): - """ - Check that the results obtained with the remote svn dump loader - and the base svn loader are the same. """ + archive_name = "pkg-gourmet" + archive_path = os.path.join(datadir, f"{archive_name}.tgz") + repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) - def setUp(self): - _LOADER_TEST_CONFIG["debug"] = True # to avoid cleanup in between load - super().setUp(archive_name="pkg-gourmet.tgz", type="remote") + loaderFromDump = SvnLoaderFromRemoteDump(repo_url) + assert loaderFromDump.load() == {"status": "eventful"} + assert_last_visit_matches( + loaderFromDump.storage, + repo_url, + status="full", + type="svn", + snapshot=GOURMET_SNAPSHOT, + ) - def test_load(self): - """ - Compare results of remote dump loader and base loader - """ - dump_loader = self.loader - dump_loader.load() + origin_url = repo_url + "2" # rename to another origin + loader = SvnLoader(repo_url, origin_url=origin_url) + assert loader.load() == {"status": "eventful"} # because are working on new origin + assert_last_visit_matches( + loader.storage, origin_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT + ) + + expected_snapshot = { + "id": GOURMET_SNAPSHOT, + "branches": { + "HEAD": { + "target": "4876cb10aec6f708f7466dddf547567b65f6c39c", + "target_type": "revision", + } + }, + } + check_snapshot(expected_snapshot, loader.storage) - self.assertCountContents(19) - self.assertCountDirectories(17) - self.assertCountRevisions(6) - self.assertCountSnapshots(1) + stats = get_stats(loader.storage) + assert stats["origin"] == 2 # created one more origin + assert stats["origin_visit"] == 2 + assert stats["snapshot"] == 1 - base_loader = SvnLoaderTest(self.svn_mirror_url) - base_loader.load() + loader = SvnLoader(repo_url) # no change on the origin-url + assert loader.load() == {"status": "uneventful"} + assert_last_visit_matches( + loader.storage, origin_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT + ) - dump_storage_stat = dump_loader.storage.stat_counters() - base_storage_stat = base_loader.storage.stat_counters() - self.assertEqual(dump_storage_stat, base_storage_stat) + stats = get_stats(loader.storage) + assert stats["origin"] == 2 + assert stats["origin_visit"] == 3 + assert stats["snapshot"] == 1 - assert_last_visit_matches( - self.storage, - self.repo_url, - status="full", - type="svn", - snapshot=GOURMET_SNAPSHOT, - ) - assert_last_visit_matches( - base_loader.storage, - self.repo_url, - status="full", - type="svn", - snapshot=GOURMET_SNAPSHOT, - ) +class SvnLoaderTestFromRemoteDump(SvnLoaderTest, SvnLoaderFromRemoteDump): + pass class SvnLoaderTest14(BaseSvnLoaderTest): """Edge cases: The repository held some user defined svn-properties with special encodings, this prevented the repository from being loaded even though we do not ingest those information. """ def setUp(self): super().setUp(archive_name="httthttt.tgz", filename="httthttt") def test_load(self): """Decoding user defined svn properties error should not fail loading """ # when assert self.loader.load() == {"status": "eventful"} self.assertCountRevisions(7, "7 svn commits") self.assertCountReleases(0) last_revision = "604a17dbb15e8d7ecb3e9f3768d09bf493667a93" expected_revisions = { "e6ae8487c6d14df9e6cb7196c6aac045798fd5be": "75ed58f260bfa4102d0e09657803511f5f0ab372", # noqa "e1e3314e0e9c9d17e6a3f60d6662f48f0e3c2fa3": "7bfb95cef68c1affe8d7f786353213d92abbb2b7", # noqa "1632fd38a8653e9b607c00feb93a41faddfb544c": "cd6de65c84d9405e7ca45fead02aa10162e30727", # noqa "0ad1ebbb92d00721644b0a46d6322d18dbcba848": "cd6de65c84d9405e7ca45fead02aa10162e30727", # noqa "94b87c97697d178a9311b018daa5179f7d4ba31e": "c2128108adecb59a0144339c2e701cd8118cff5a", # noqa "bd741cf22f0642d88cd0d8b545e8896b898c692d": "c2128108adecb59a0144339c2e701cd8118cff5a", # noqa last_revision: "f051d60256b2d89a0ca2704d6f91ad1b0ab44e02", } self.assertRevisionsContain(expected_revisions) expected_snapshot_id = "70487267f682c07e52a2371061369b6cf5bffa47" expected_branches = { "HEAD": {"target": last_revision, "target_type": "revision"} } self.assertSnapshotEqual(expected_snapshot_id, expected_branches) self.assertEqual(self.loader.visit_status(), "full") assert_last_visit_matches( self.storage, self.repo_url, status="full", type="svn", snapshot=hashutil.hash_to_bytes(expected_snapshot_id), )