diff --git a/swh/loader/svn/loader.py b/swh/loader/svn/loader.py index d9364f9..701806d 100644 --- a/swh/loader/svn/loader.py +++ b/swh/loader/svn/loader.py @@ -1,795 +1,795 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Loader in charge of injecting either new or existing svn mirrors to swh-storage. """ from datetime import datetime from mmap import ACCESS_WRITE, mmap import os import pty import re import shutil from subprocess import Popen import tempfile from typing import Dict, Iterator, List, Optional, Tuple from subvertpy import SubversionException from swh.loader.core.loader import BaseLoader from swh.loader.core.utils import clean_dangling_folders from swh.loader.exception import NotFound from swh.loader.svn.svn import SvnRepo from swh.model import from_disk, hashutil from swh.model.model import ( Content, Directory, Origin, Revision, SkippedContent, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from . import converters from .exception import SvnLoaderHistoryAltered, SvnLoaderUneventful from .utils import ( OutputStream, init_svn_repo_from_archive_dump, init_svn_repo_from_dump, ) DEFAULT_BRANCH = b"HEAD" TEMPORARY_DIR_PREFIX_PATTERN = "swh.loader.svn." SUBVERSION_ERROR = re.compile(r".*(E[0-9]{6}):.*") SUBVERSION_NOT_FOUND = "E170013" class SvnLoader(BaseLoader): """Swh svn loader. The repository is either remote or local. The loader deals with update on an already previously loaded repository. """ visit_type = "svn" def __init__( self, storage: StorageInterface, url: str, origin_url: Optional[str] = None, visit_date: Optional[datetime] = None, destination_path: Optional[str] = None, swh_revision: Optional[str] = None, incremental: bool = True, temp_directory: str = "/tmp", debug: bool = False, check_revision: int = 0, max_content_size: Optional[int] = None, ): """Load an svn repository. Args: ... incremental: If True, the default, starts from the last snapshot (if any). Otherwise, starts from the initial commit of the repository. """ super().__init__( storage=storage, logging_class="swh.loader.svn.SvnLoader", max_content_size=max_content_size, ) # technical svn uri to act on svn repository self.svn_url = url # origin url as unique identifier for origin in swh archive self.origin_url = origin_url if origin_url else self.svn_url self.debug = debug self.temp_directory = temp_directory self.done = False self.svnrepo = None # Revision check is configurable self.check_revision = check_revision # internal state used to store swh objects self._contents: List[Content] = [] self._skipped_contents: List[SkippedContent] = [] self._directories: List[Directory] = [] self._revisions: List[Revision] = [] self._snapshot: Optional[Snapshot] = None # internal state, current visit self._last_revision = None self._visit_status = "full" self._load_status = "uneventful" self.visit_date = visit_date self.destination_path = destination_path self.incremental = incremental - self.snapshot = None + self.snapshot: Optional[Snapshot] = None # state from previous visit self.latest_snapshot = None self.latest_revision = None def pre_cleanup(self): """Cleanup potential dangling files from prior runs (e.g. OOM killed tasks) """ clean_dangling_folders( self.temp_directory, pattern_check=TEMPORARY_DIR_PREFIX_PATTERN, log=self.log, ) def cleanup(self): """Clean up the svn repository's working representation on disk. """ if not self.svnrepo: # could happen if `prepare` fails return if self.debug: self.log.error( """NOT FOR PRODUCTION - debug flag activated Local repository not cleaned up for investigation: %s""", self.svnrepo.local_url.decode("utf-8"), ) return self.svnrepo.clean_fs() def swh_revision_hash_tree_at_svn_revision(self, revision): """Compute and return the hash tree at a given svn revision. Args: rev (int): the svn revision we want to check Returns: The hash tree directory as bytes. """ local_dirname, local_url = self.svnrepo.export_temporary(revision) h = from_disk.Directory.from_disk(path=local_url).hash self.svnrepo.clean_fs(local_dirname) return h def _latest_snapshot_revision( self, origin_url: str, ) -> Optional[Tuple[Snapshot, Revision]]: """Look for latest snapshot revision and returns it if any. Args: origin_url: Origin identifier previous_swh_revision: possible previous swh revision (either a dict or revision identifier) Returns: Tuple of the latest Snapshot from the previous visit and its targeted revision if any or None otherwise. """ storage = self.storage latest_snapshot = snapshot_get_latest(storage, origin_url) if not latest_snapshot: return None branches = latest_snapshot.branches if not branches: return None branch = branches.get(DEFAULT_BRANCH) if not branch: return None if branch.target_type != TargetType.REVISION: return None swh_id = branch.target revision = storage.revision_get([swh_id])[0] if not revision: return None return latest_snapshot, revision def build_swh_revision(self, rev, commit, dir_id, parents): """Build the swh revision dictionary. This adds: - the `'synthetic`' flag to true - the '`extra_headers`' containing the repository's uuid and the svn revision number. Args: rev (int): the svn revision number commit (dict): the commit data: revision id, date, author, and message dir_id (bytes): the upper tree's hash identifier parents ([bytes]): the parents' identifiers Returns: The swh revision corresponding to the svn revision. """ return converters.build_swh_revision( rev, commit, self.svnrepo.uuid, dir_id, parents ) def check_history_not_altered( self, svnrepo, revision_start: int, swh_rev: Revision ) -> bool: """Given a svn repository, check if the history was modified in between visits. """ revision_id = swh_rev.id parents = swh_rev.parents hash_data_per_revs = svnrepo.swh_hash_data_at_revision(revision_start) rev = revision_start rev, _, commit, _, root_dir = list(hash_data_per_revs)[0] dir_id = root_dir.hash swh_revision = self.build_swh_revision(rev, commit, dir_id, parents) swh_revision_id = swh_revision.id return swh_revision_id == revision_id def start_from(self) -> Tuple[int, int, Dict[int, Tuple[bytes, ...]]]: """Determine from where to start the loading. Returns: tuple (revision_start, revision_end, revision_parents) Raises: SvnLoaderHistoryAltered: When a hash divergence has been detected (should not happen) SvnLoaderUneventful: Nothing changed since last visit """ assert self.svnrepo is not None, "svnrepo initialized in the `prepare` method" revision_head = self.svnrepo.head_revision() if revision_head == 0: # empty repository case revision_start = 0 revision_end = 0 else: # default configuration revision_start = self.svnrepo.initial_revision() revision_end = revision_head revision_parents: Dict[int, Tuple[bytes, ...]] = {revision_start: ()} # start from a previous revision if any if self.incremental and self.latest_revision is not None: extra_headers = dict(self.latest_revision.extra_headers) revision_start = int(extra_headers[b"svn_revision"]) revision_parents = { revision_start: self.latest_revision.parents, } self.log.debug( "svn export --ignore-keywords %s@%s", self.svnrepo.remote_url, revision_start, ) if not self.check_history_not_altered( self.svnrepo, revision_start, self.latest_revision ): msg = "History of svn %s@%s altered. Skipping..." % ( self.svnrepo.remote_url, revision_start, ) raise SvnLoaderHistoryAltered(msg) # now we know history is ok, we start at next revision revision_start = revision_start + 1 # and the parent become the latest know revision for # that repository revision_parents[revision_start] = (self.latest_revision.id,) if revision_start > revision_end: msg = "%s@%s already injected." % (self.svnrepo.remote_url, revision_end) raise SvnLoaderUneventful(msg) self.log.info( "Processing revisions [%s-%s] for %s", revision_start, revision_end, self.svnrepo, ) return revision_start, revision_end, revision_parents def _check_revision_divergence(self, count, rev, dir_id): """Check for hash revision computation divergence. The Rationale behind this is that svn can trigger unknown edge cases (mixed CRLF, svn properties, etc...). Those are not always easy to spot. Adding a check will help in spotting missing edge cases. Args: count (int): The number of revisions done so far rev (dict): The actual revision we are computing from dir_id (bytes): The actual directory for the given revision Returns: False if no hash divergence detected Raises ValueError if a hash divergence is detected """ # hash computation check if (self.check_revision != 0 and count % self.check_revision) == 0: self.log.debug("Checking hash computations on revision %s...", rev) checked_dir_id = self.swh_revision_hash_tree_at_svn_revision(rev) if checked_dir_id != dir_id: err = ( "Hash tree computation divergence detected " "(%s != %s), stopping!" % ( hashutil.hash_to_hex(dir_id), hashutil.hash_to_hex(checked_dir_id), ) ) raise ValueError(err) def process_svn_revisions( self, svnrepo, revision_start, revision_end, revision_parents ) -> Iterator[ Tuple[List[Content], List[SkippedContent], List[Directory], Revision] ]: """Process svn revisions from revision_start to revision_end. At each svn revision, apply new diffs and simultaneously compute swh hashes. This yields those computed swh hashes as a tuple (contents, directories, revision). Note that at every `self.check_revision`, a supplementary check takes place to check for hash-tree divergence (related T570). Yields: tuple (contents, directories, revision) of dict as a dictionary with keys, sha1_git, sha1, etc... Raises: ValueError in case of a hash divergence detection """ gen_revs = svnrepo.swh_hash_data_per_revision(revision_start, revision_end) swh_revision = None count = 0 for rev, nextrev, commit, new_objects, root_directory in gen_revs: count += 1 # Send the associated contents/directories _contents, _skipped_contents, _directories = new_objects # compute the fs tree's checksums dir_id = root_directory.hash swh_revision = self.build_swh_revision( rev, commit, dir_id, revision_parents[rev] ) self.log.debug( "rev: %s, swhrev: %s, dir: %s", rev, hashutil.hash_to_hex(swh_revision.id), hashutil.hash_to_hex(dir_id), ) if self.check_revision: self._check_revision_divergence(count, rev, dir_id) if nextrev: revision_parents[nextrev] = [swh_revision.id] yield _contents, _skipped_contents, _directories, swh_revision def prepare_origin_visit(self): self.origin = Origin(url=self.origin_url if self.origin_url else self.svn_url) def prepare(self): latest_snapshot_revision = self._latest_snapshot_revision(self.origin_url) if latest_snapshot_revision: self.latest_snapshot, self.latest_revision = latest_snapshot_revision if self.destination_path: local_dirname = self.destination_path else: local_dirname = tempfile.mkdtemp( suffix="-%s" % os.getpid(), prefix=TEMPORARY_DIR_PREFIX_PATTERN, dir=self.temp_directory, ) try: self.svnrepo = SvnRepo( self.svn_url, self.origin_url, local_dirname, self.max_content_size ) except SubversionException as e: error_msgs = [ "Unable to connect to a repository at URL", "Unknown URL type", ] for msg in error_msgs: if msg in e.args[0]: self._load_status = "uneventful" raise NotFound(e) raise try: revision_start, revision_end, revision_parents = self.start_from() self.swh_revision_gen = self.process_svn_revisions( self.svnrepo, revision_start, revision_end, revision_parents ) except SvnLoaderUneventful as e: self.log.warning(e) if self.latest_snapshot: self._snapshot = self.latest_snapshot self.done = True self._load_status = "uneventful" except SvnLoaderHistoryAltered as e: self.log.error(e) self.done = True self._visit_status = "partial" def fetch_data(self): """Fetching svn revision information. This will apply svn revision as patch on disk, and at the same time, compute the swh hashes. In effect, fetch_data fetches those data and compute the necessary swh objects. It's then stored in the internal state instance variables (initialized in `_prepare_state`). This is up to `store_data` to actually discuss with the storage to store those objects. Returns: bool: True to continue fetching data (next svn revision), False to stop. """ data = None if self.done: return False try: data = next(self.swh_revision_gen) self._load_status = "eventful" except StopIteration: self.done = True self._visit_status = "full" return False # Stopping iteration except Exception as e: # svn:external, hash divergence, i/o error... self.log.exception(e) self.done = True self._visit_status = "partial" return False # Stopping iteration self._contents, self._skipped_contents, self._directories, rev = data if rev: self._last_revision = rev self._revisions.append(rev) return True # next svn revision def store_data(self): """We store the data accumulated in internal instance variable. If the iteration over the svn revisions is done, we create the snapshot and flush to storage the data. This also resets the internal instance variable state. """ self.storage.skipped_content_add(self._skipped_contents) self.storage.content_add(self._contents) self.storage.directory_add(self._directories) self.storage.revision_add(self._revisions) if self.done: # finish line, snapshot! self.snapshot = self.generate_and_load_snapshot( revision=self._last_revision, snapshot=self._snapshot ) self.flush() self.loaded_snapshot_id = self.snapshot.id # reset internal state for next iteration self._revisions = [] def generate_and_load_snapshot( self, revision: Optional[Revision] = None, snapshot: Optional[Snapshot] = None ) -> Snapshot: """Create the snapshot either from existing revision or snapshot. Revision (supposedly new) has priority over the snapshot (supposedly existing one). Args: revision (dict): Last revision seen if any (None by default) snapshot (dict): Snapshot to use if any (None by default) Returns: Optional[Snapshot] The newly created snapshot """ if revision: # Priority to the revision snap = Snapshot( branches={ DEFAULT_BRANCH: SnapshotBranch( target=revision.id, target_type=TargetType.REVISION ) } ) elif snapshot: # Fallback to prior snapshot snap = snapshot else: raise ValueError( "generate_and_load_snapshot called with null revision and snapshot!" ) self.log.debug("snapshot: %s", snap) self.storage.snapshot_add([snap]) return snap def load_status(self): return { "status": self._load_status, } def visit_status(self): return self._visit_status class SvnLoaderFromDumpArchive(SvnLoader): """Uncompress an archive containing an svn dump, mount the svn dump as an svn repository and load said repository. """ def __init__( self, storage: StorageInterface, url: str, archive_path: str, origin_url: Optional[str] = None, destination_path: Optional[str] = None, swh_revision: Optional[str] = None, incremental: bool = False, visit_date: Optional[datetime] = None, temp_directory: str = "/tmp", debug: bool = False, check_revision: int = 0, max_content_size: Optional[int] = None, ): super().__init__( storage=storage, url=url, origin_url=origin_url, destination_path=destination_path, swh_revision=swh_revision, incremental=incremental, visit_date=visit_date, temp_directory=temp_directory, debug=debug, check_revision=check_revision, max_content_size=max_content_size, ) self.archive_path = archive_path self.temp_dir = None self.repo_path = None def prepare(self): self.log.info("Archive to mount and load %s", self.archive_path) self.temp_dir, self.repo_path = init_svn_repo_from_archive_dump( self.archive_path, prefix=TEMPORARY_DIR_PREFIX_PATTERN, suffix="-%s" % os.getpid(), root_dir=self.temp_directory, ) self.svn_url = f"file://{self.repo_path}" super().prepare() def cleanup(self): super().cleanup() if self.temp_dir and os.path.exists(self.temp_dir): self.log.debug( "Clean up temporary directory dump %s for project %s", self.temp_dir, os.path.basename(self.repo_path), ) shutil.rmtree(self.temp_dir) class SvnLoaderFromRemoteDump(SvnLoader): """ Create a subversion repository dump using the svnrdump utility, mount it locally and load the repository from it. """ def __init__( self, storage: StorageInterface, url: str, origin_url: Optional[str] = None, destination_path: Optional[str] = None, swh_revision: Optional[str] = None, incremental: bool = True, visit_date: Optional[datetime] = None, temp_directory: str = "/tmp", debug: bool = False, check_revision: int = 0, max_content_size: Optional[int] = None, ): super().__init__( storage=storage, url=url, origin_url=origin_url, destination_path=destination_path, swh_revision=swh_revision, incremental=incremental, visit_date=visit_date, temp_directory=temp_directory, debug=debug, check_revision=check_revision, max_content_size=max_content_size, ) self.temp_dir = tempfile.mkdtemp(dir=self.temp_directory) self.repo_path = None self.truncated_dump = False def get_last_loaded_svn_rev(self, svn_url: str) -> int: """Check if the svn repository has already been visited and return the last loaded svn revision number or -1 otherwise. """ origin = list(self.storage.origin_get([svn_url]))[0] if not origin: return -1 svn_revision = -1 try: latest_snapshot_revision = self._latest_snapshot_revision(origin.url) if latest_snapshot_revision: _, latest_revision = latest_snapshot_revision latest_revision_headers = dict(latest_revision.extra_headers) svn_revision = int(latest_revision_headers[b"svn_revision"]) except Exception: pass return svn_revision def dump_svn_revisions(self, svn_url, last_loaded_svn_rev=-1): """ Generate a subversion dump file using the svnrdump tool. If the svnrdump command failed somehow, the produced dump file is analyzed to determine if a partial loading is still feasible. Raises: NotFound when the repository is no longer found at url """ # Build the svnrdump command line svnrdump_cmd = ["svnrdump", "dump", svn_url] # Launch the svnrdump command while capturing stderr as # successfully dumped revision numbers are printed to it dump_temp_dir = tempfile.mkdtemp(dir=self.temp_dir) dump_name = "".join(c for c in svn_url if c.isalnum()) dump_path = "%s/%s.svndump" % (dump_temp_dir, dump_name) stderr_lines = [] self.log.debug("Executing %s", " ".join(svnrdump_cmd)) with open(dump_path, "wb") as dump_file: stderr_r, stderr_w = pty.openpty() svnrdump = Popen(svnrdump_cmd, stdout=dump_file, stderr=stderr_w) os.close(stderr_w) stderr_stream = OutputStream(stderr_r) readable = True error_codes: List[str] = [] error_messages: List[str] = [] while readable: lines, readable = stderr_stream.read_lines() stderr_lines += lines for line in lines: self.log.debug(line) match = SUBVERSION_ERROR.search(line) if match: error_codes.append(match.group(1)) error_messages.append(line) svnrdump.wait() os.close(stderr_r) if svnrdump.returncode == 0: return dump_path # There was an error but it does not mean that no revisions # can be loaded. # Get the stderr line with latest dumped revision last_dumped_rev = None if len(stderr_lines) > 1: last_dumped_rev = stderr_lines[-2] if last_dumped_rev: # Get the latest dumped revision number matched_rev = re.search(".*revision ([0-9]+)", last_dumped_rev) last_dumped_rev = int(matched_rev.group(1)) if matched_rev else -1 # Check if revisions inside the dump file can be loaded anyway if last_dumped_rev > last_loaded_svn_rev: self.log.debug( ( "svnrdump did not dump all expected revisions " "but revisions range %s:%s are available in " "the generated dump file and will be loaded " "into the archive." ) % (last_loaded_svn_rev + 1, last_dumped_rev) ) # Truncate the dump file after the last successfully dumped # revision to avoid the loading of corrupted data self.log.debug( ( "Truncating dump file after the last " "successfully dumped revision (%s) to avoid " "the loading of corrupted data" ) % last_dumped_rev ) with open(dump_path, "r+b") as f: with mmap(f.fileno(), 0, access=ACCESS_WRITE) as s: pattern = ( "Revision-number: %s" % (last_dumped_rev + 1) ).encode() n = s.rfind(pattern) if n != -1: s.resize(n) self.truncated_dump = True return dump_path elif last_dumped_rev != -1: raise Exception( ( "Last dumped subversion revision (%s) is " "lesser than the last one loaded into the " "archive (%s)." ) % (last_dumped_rev, last_loaded_svn_rev) ) if SUBVERSION_NOT_FOUND in error_codes: raise NotFound( f"{SUBVERSION_NOT_FOUND}: Repository never existed or disappeared" ) raise Exception( "An error occurred when running svnrdump and " "no exploitable dump file has been generated.\n" + "\n".join(error_messages) ) def prepare(self): # First, check if previous revisions have been loaded for the # subversion origin and get the number of the last one last_loaded_svn_rev = self.get_last_loaded_svn_rev(self.svn_url) # Then try to generate a dump file containing relevant svn revisions # to load, an exception will be thrown if something wrong happened dump_path = self.dump_svn_revisions(self.svn_url, last_loaded_svn_rev) # Finally, mount the dump and load the repository self.log.debug('Mounting dump file with "svnadmin load".') _, self.repo_path = init_svn_repo_from_dump( dump_path, prefix=TEMPORARY_DIR_PREFIX_PATTERN, suffix="-%s" % os.getpid(), root_dir=self.temp_dir, ) self.svn_url = "file://%s" % self.repo_path super().prepare() def cleanup(self): super().cleanup() if self.temp_dir and os.path.exists(self.temp_dir): shutil.rmtree(self.temp_dir) def visit_status(self): if self.truncated_dump: return "partial" else: return super().visit_status() diff --git a/swh/loader/svn/ra.py b/swh/loader/svn/ra.py index 1d869ce..f91123b 100644 --- a/swh/loader/svn/ra.py +++ b/swh/loader/svn/ra.py @@ -1,541 +1,542 @@ # Copyright (C) 2016-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Remote Access client to svn server. """ import codecs import os import shutil import tempfile from typing import List, Tuple import click from subvertpy import delta, properties from subvertpy.ra import Auth, RemoteAccess, get_username_provider from swh.model import from_disk, hashutil from swh.model.model import Content, Directory, SkippedContent _eol_style = {"native": b"\n", "CRLF": b"\r\n", "LF": b"\n", "CR": b"\r"} def _normalize_line_endings(lines, eol_style="native"): r"""Normalize line endings to unix (\\n), windows (\\r\\n) or mac (\\r). Args: lines (bytes): The lines to normalize line_ending (str): The line ending format as defined for svn:eol-style property. Acceptable values are 'native', 'CRLF', 'LF' and 'CR' Returns: bytes: lines with endings normalized """ - lines = lines.replace(_eol_style["CRLF"], _eol_style["LF"]).replace( - _eol_style["CR"], _eol_style["LF"] - ) - if _eol_style[eol_style] != _eol_style["LF"]: - lines = lines.replace(_eol_style["LF"], _eol_style[eol_style]) + if eol_style in _eol_style: + lines = lines.replace(_eol_style["CRLF"], _eol_style["LF"]).replace( + _eol_style["CR"], _eol_style["LF"] + ) + if _eol_style[eol_style] != _eol_style["LF"]: + lines = lines.replace(_eol_style["LF"], _eol_style[eol_style]) return lines def apply_txdelta_handler(sbuf, target_stream): """Return a function that can be called repeatedly with txdelta windows. When done, closes the target_stream. Adapted from subvertpy.delta.apply_txdelta_handler to close the stream when done. Args: sbuf: Source buffer target_stream: Target stream to write to. Returns: Function to be called to apply txdelta windows """ def apply_window(window, sbuf=sbuf, target_stream=target_stream): if window is None: target_stream.close() return # Last call patch = delta.apply_txdelta_window(sbuf, window) target_stream.write(patch) return apply_window def read_svn_link(data): """Read the svn link's content. Args: data (bytes): svn link's raw content Returns: The tuple of (filetype, destination path) """ split_byte = b" " filetype, *src = data.split(split_byte) src = split_byte.join(src) return filetype, src def is_file_an_svnlink_p(fullpath): """Determine if a filepath is an svnlink or something else. Args: fullpath (str/bytes): Full path to the potential symlink to check Returns: boolean value to determine if it's indeed a symlink (as per svn) or not. """ with open(fullpath, "rb") as f: filetype, src = read_svn_link(f.read()) return filetype == b"link", src def _ra_codecs_error_handler(e): """Subvertpy may fail to decode to utf-8 the user svn properties. As they are not used by the loader, return an empty string instead of the decoded content. Args: e (UnicodeDecodeError): exception raised during the svn properties decoding. """ return "", e.end DEFAULT_FLAG = 0 EXEC_FLAG = 1 NOEXEC_FLAG = 2 SVN_PROPERTY_EOL = "svn:eol-style" # EOL state check mess EOL_STYLE = {} class FileEditor: """File Editor in charge of updating file on disk and memory objects. """ __slots__ = ["directory", "path", "fullpath", "executable", "link"] def __init__(self, directory, rootpath, path): self.directory = directory self.path = path # default value: 0, 1: set the flag, 2: remove the exec flag self.executable = DEFAULT_FLAG self.link = None self.fullpath = os.path.join(rootpath, path) def change_prop(self, key, value): if key == properties.PROP_EXECUTABLE: if value is None: # bit flip off self.executable = NOEXEC_FLAG else: self.executable = EXEC_FLAG elif key == properties.PROP_SPECIAL: # Possibly a symbolic link. We cannot check further at # that moment though, patch(s) not being applied yet self.link = value is not None elif key == SVN_PROPERTY_EOL: # backup end of line style for file EOL_STYLE[self.fullpath] = value def __make_symlink(self, src): """Convert the svnlink to a symlink on disk. This function expects self.fullpath to be a svn link. Args: src (bytes): Path to the link's source Return: tuple: The svnlink's data tuple: - type (should be only 'link') - """ os.remove(self.fullpath) os.symlink(src=src, dst=self.fullpath) def __make_svnlink(self): """Convert the symlink to a svnlink on disk. Return: The symlink's svnlink data (``b'type '``) """ # we replace the symlink by a svnlink # to be able to patch the file on future commits src = os.readlink(self.fullpath) os.remove(self.fullpath) sbuf = b"link " + src with open(self.fullpath, "wb") as f: f.write(sbuf) return sbuf def apply_textdelta(self, base_checksum): if os.path.lexists(self.fullpath): if os.path.islink(self.fullpath): # svn does not deal with symlink so we transform into # real svn symlink for potential patching in later # commits sbuf = self.__make_svnlink() self.link = True else: with open(self.fullpath, "rb") as f: sbuf = f.read() else: sbuf = b"" t = open(self.fullpath, "wb") return apply_txdelta_handler(sbuf, target_stream=t) def close(self): """When done with the file, this is called. So the file exists and is updated, we can: - adapt accordingly its execution flag if any - compute the objects' checksums - replace the svnlink with a real symlink (for disk computation purposes) """ is_link = None if self.link: # can only check now that the link is a real one # since patch has been applied is_link, src = is_file_an_svnlink_p(self.fullpath) if is_link: self.__make_symlink(src) else: # not a real link... self.link = False elif os.path.islink(self.fullpath): # path was a symbolic link in previous revision but got the property # svn:special unset in current one, revert its content to svn link format self.__make_svnlink() if not is_link: # if a link, do nothing regarding flag if self.executable == EXEC_FLAG: os.chmod(self.fullpath, 0o755) elif self.executable == NOEXEC_FLAG: os.chmod(self.fullpath, 0o644) # And now compute file's checksums eol_style = EOL_STYLE.get(self.fullpath, None) if eol_style and not is_link: # ensure to normalize line endings as defined by svn:eol-style # property to get the same file checksum as after an export # or checkout operation with subversion with open(self.fullpath, "rb") as f: data = f.read() data = _normalize_line_endings(data, eol_style) mode = os.lstat(self.fullpath).st_mode self.directory[self.path] = from_disk.Content.from_bytes( mode=mode, data=data ) else: self.directory[self.path] = from_disk.Content.from_file(path=self.fullpath) class BaseDirEditor: """Base class implementation of dir editor. see :class:`DirEditor` for an implementation that hashes every directory encountered. Instantiate a new class inheriting from this class and define the following functions:: def update_checksum(self): # Compute the checksums at current state def open_directory(self, *args): # Update an existing folder. def add_directory(self, *args): # Add a new one. """ __slots__ = ["directory", "rootpath"] def __init__(self, directory, rootpath): self.directory = directory self.rootpath = rootpath # build directory on init os.makedirs(rootpath, exist_ok=True) def remove_child(self, path): """Remove a path from the current objects. The path can be resolved as link, file or directory. This function takes also care of removing the link between the child and the parent. Args: path: to remove from the current objects. """ try: entry_removed = self.directory[path] except KeyError: entry_removed = None else: del self.directory[path] fpath = os.path.join(self.rootpath, path) if isinstance(entry_removed, from_disk.Directory): shutil.rmtree(fpath) else: os.remove(fpath) # when deleting a directory ensure to remove any eol style setting for the # file it contains as they can be added again later in another revision # without the svn:eol-style property set fullpath = os.path.join(self.rootpath, path) for eol_path in list(EOL_STYLE): if eol_path.startswith(fullpath): del EOL_STYLE[eol_path] def update_checksum(self): raise NotImplementedError("This should be implemented.") def open_directory(self, *args): raise NotImplementedError("This should be implemented.") def add_directory(self, *args): raise NotImplementedError("This should be implemented.") def open_file(self, *args): """Updating existing file. """ path = os.fsencode(args[0]) self.directory[path] = from_disk.Content() return FileEditor(self.directory, rootpath=self.rootpath, path=path) def add_file(self, path, copyfrom_path=None, copyfrom_rev=-1): """Creating a new file. """ path = os.fsencode(path) self.directory[path] = from_disk.Content() return FileEditor(self.directory, self.rootpath, path) def change_prop(self, key, value): """Change property callback on directory. """ if key == properties.PROP_EXTERNALS: raise ValueError("Property '%s' detected. Not implemented yet." % key) def delete_entry(self, path, revision): """Remove a path. """ self.remove_child(path.encode("utf-8")) def close(self): """Function called when we finish walking a repository. """ self.update_checksum() class DirEditor(BaseDirEditor): """Directory Editor in charge of updating directory hashes computation. This implementation includes empty folder in the hash computation. """ def update_checksum(self): """Update the root path self.path's checksums according to the children's objects. This function is expected to be called when the folder has been completely 'walked'. """ pass def open_directory(self, *args): """Updating existing directory. """ return self def add_directory(self, path, copyfrom_path=None, copyfrom_rev=-1): """Adding a new directory. """ path = os.fsencode(path) os.makedirs(os.path.join(self.rootpath, path), exist_ok=True) self.directory[path] = from_disk.Directory() return self class Editor: """Editor in charge of replaying svn events and computing objects along. This implementation accounts for empty folder during hash computations. """ def __init__(self, rootpath, directory): self.rootpath = rootpath self.directory = directory def set_target_revision(self, revnum): pass def abort(self): pass def close(self): pass def open_root(self, base_revnum): return DirEditor(self.directory, rootpath=self.rootpath) class Replay: """Replay class. """ def __init__(self, conn, rootpath, directory=None): self.conn = conn self.rootpath = rootpath if directory is None: directory = from_disk.Directory() self.directory = directory self.editor = Editor(rootpath=rootpath, directory=directory) def replay(self, rev): """Replay svn actions between rev and rev+1. This method updates in place the self.editor.directory, as well as the filesystem. Returns: The updated root directory """ codecs.register_error("strict", _ra_codecs_error_handler) self.conn.replay(rev, rev + 1, self.editor) codecs.register_error("strict", codecs.strict_errors) return self.editor.directory def compute_objects( self, rev: int ) -> Tuple[List[Content], List[SkippedContent], List[Directory]]: """Compute objects at revisions rev. Expects the state to be at previous revision's objects. Args: rev: The revision to start the replay from. Returns: The updated objects between rev and rev+1. Beware that this mutates the filesystem at rootpath accordingly. """ self.replay(rev) return from_disk.iter_directory(self.directory) @click.command() @click.option("--local-url", default="/tmp", help="local svn working copy") @click.option( "--svn-url", default="file:///home/storage/svn/repos/pkg-fox", help="svn repository's url.", ) @click.option( "--revision-start", default=1, type=click.INT, help="svn repository's starting revision.", ) @click.option( "--revision-end", default=-1, type=click.INT, help="svn repository's ending revision.", ) @click.option( "--debug/--nodebug", default=True, help="Indicates if the server should run in debug mode.", ) @click.option( "--cleanup/--nocleanup", default=True, help="Indicates whether to cleanup disk when done or not.", ) def main(local_url, svn_url, revision_start, revision_end, debug, cleanup): """Script to present how to use Replay class. """ conn = RemoteAccess(svn_url.encode("utf-8"), auth=Auth([get_username_provider()])) os.makedirs(local_url, exist_ok=True) rootpath = tempfile.mkdtemp( prefix=local_url, suffix="-" + os.path.basename(svn_url) ) rootpath = os.fsencode(rootpath) # Do not go beyond the repository's latest revision revision_end_max = conn.get_latest_revnum() if revision_end == -1: revision_end = revision_end_max revision_end = min(revision_end, revision_end_max) try: replay = Replay(conn, rootpath) for rev in range(revision_start, revision_end + 1): contents, skipped_contents, directories = replay.compute_objects(rev) print( "r%s %s (%s new contents, %s new directories)" % ( rev, hashutil.hash_to_hex(replay.directory.hash), len(contents) + len(skipped_contents), len(directories), ) ) if debug: print("%s" % rootpath.decode("utf-8")) finally: if cleanup: if os.path.exists(rootpath): shutil.rmtree(rootpath) if __name__ == "__main__": main() diff --git a/swh/loader/svn/tests/test_loader.py b/swh/loader/svn/tests/test_loader.py index 26c1fa7..f833e74 100644 --- a/swh/loader/svn/tests/test_loader.py +++ b/swh/loader/svn/tests/test_loader.py @@ -1,1078 +1,1124 @@ # Copyright (C) 2016-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from enum import Enum from io import BytesIO import os import subprocess -from typing import Dict, List +from typing import Any, Dict, List import pytest from subvertpy import SubversionException, delta, repos from subvertpy.ra import Auth, RemoteAccess, get_username_provider from typing_extensions import TypedDict from swh.loader.svn.loader import ( SvnLoader, SvnLoaderFromDumpArchive, SvnLoaderFromRemoteDump, ) from swh.loader.svn.utils import init_svn_repo_from_dump from swh.loader.tests import ( assert_last_visit_matches, check_snapshot, get_stats, prepare_repository_from_archive, ) from swh.model.from_disk import DentryPerms from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType GOURMET_SNAPSHOT = Snapshot( id=hash_to_bytes("889cacc2731e3312abfb2b1a0c18ade82a949e07"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("4876cb10aec6f708f7466dddf547567b65f6c39c"), target_type=TargetType.REVISION, ) }, ) GOURMET_UPDATES_SNAPSHOT = Snapshot( id=hash_to_bytes("11086d15317014e43d2438b7ffc712c44f1b8afe"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("171dc35522bfd17dda4e90a542a0377fb2fc707a"), target_type=TargetType.REVISION, ) }, ) def test_loader_svn_not_found_no_mock(swh_storage, tmp_path): """Given an unknown repository, the loader visit ends up in status not_found""" repo_url = "unknown-repository" loader = SvnLoader(swh_storage, repo_url, destination_path=tmp_path) assert loader.load() == {"status": "uneventful"} assert_last_visit_matches( swh_storage, repo_url, status="not_found", type="svn", ) @pytest.mark.parametrize( "exception_msg", ["Unable to connect to a repository at URL", "Unknown URL type",] ) def test_loader_svn_not_found(swh_storage, tmp_path, exception_msg, mocker): """Given unknown repository issues, the loader visit ends up in status not_found""" mock = mocker.patch("swh.loader.svn.loader.SvnRepo") mock.side_effect = SubversionException(exception_msg, 0) unknown_repo_url = "unknown-repository" loader = SvnLoader(swh_storage, unknown_repo_url, destination_path=tmp_path) assert loader.load() == {"status": "uneventful"} assert_last_visit_matches( swh_storage, unknown_repo_url, status="not_found", type="svn", ) @pytest.mark.parametrize( "exception", [ SubversionException("Irrelevant message, considered a failure", 10), SubversionException("Present but fails to read, considered a failure", 20), ValueError("considered a failure"), ], ) def test_loader_svn_failures(swh_storage, tmp_path, exception, mocker): """Given any errors raised, the loader visit ends up in status failed""" mock = mocker.patch("swh.loader.svn.loader.SvnRepo") mock.side_effect = exception existing_repo_url = "existing-repo-url" loader = SvnLoader(swh_storage, existing_repo_url, destination_path=tmp_path) assert loader.load() == {"status": "failed"} assert_last_visit_matches( swh_storage, existing_repo_url, status="failed", type="svn", ) def test_loader_svnrdump_not_found(swh_storage, tmp_path, mocker): """Loading from remote dump which does not exist should end up as not_found visit""" unknown_repo_url = "file:///tmp/svn.code.sf.net/p/white-rats-studios/svn" loader = SvnLoaderFromRemoteDump( swh_storage, unknown_repo_url, destination_path=tmp_path ) assert loader.load() == {"status": "uneventful"} assert_last_visit_matches( swh_storage, unknown_repo_url, status="not_found", type="svn", ) def test_loader_svnrdump_no_such_revision(swh_storage, tmp_path, datadir): """Visit multiple times an origin with the remote loader should not raise. It used to fail the ingestion on the second visit with a "No such revision x, 160006" message. """ archive_dump = os.path.join(datadir, "penguinsdbtools2018.dump.gz") loading_path = str(tmp_path / "loading") # Prepare the dump as a local svn repository for test purposes temp_dir, repo_path = init_svn_repo_from_dump( archive_dump, root_dir=tmp_path, gzip=True ) repo_url = f"file://{repo_path}" loader = SvnLoaderFromRemoteDump( swh_storage, repo_url, destination_path=loading_path ) assert loader.load() == {"status": "eventful"} actual_visit = assert_last_visit_matches( swh_storage, repo_url, status="full", type="svn", ) loader2 = SvnLoaderFromRemoteDump( swh_storage, repo_url, destination_path=loading_path ) # Visiting a second time the same repository should be uneventful... assert loader2.load() == {"status": "uneventful"} actual_visit2 = assert_last_visit_matches( swh_storage, repo_url, status="full", type="svn", ) assert actual_visit.snapshot is not None # ... with the same snapshot as the first visit assert actual_visit2.snapshot == actual_visit.snapshot def test_loader_svn_new_visit(swh_storage, datadir, tmp_path): """Eventful visit should yield 1 snapshot""" archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url, destination_path=tmp_path) assert loader.load() == {"status": "eventful"} assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) stats = get_stats(loader.storage) assert stats == { "content": 19, "directory": 17, "origin": 1, "origin_visit": 1, "release": 0, "revision": 6, "skipped_content": 0, "snapshot": 1, } check_snapshot(GOURMET_SNAPSHOT, loader.storage) def test_loader_svn_2_visits_no_change(swh_storage, datadir, tmp_path): """Visit multiple times a repository with no change should yield the same snapshot """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) assert loader.load() == {"status": "uneventful"} visit_status2 = assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot == visit_status2.snapshot stats = get_stats(loader.storage) assert stats["origin_visit"] == 1 + 1 # computed twice the same snapshot assert stats["snapshot"] == 1 # even starting from previous revision... start_revision = loader.storage.revision_get( [hash_to_bytes("95edacc8848369d6fb1608e887d6d2474fd5224f")] )[0] assert start_revision is not None loader = SvnLoader(swh_storage, repo_url, swh_revision=start_revision) assert loader.load() == {"status": "uneventful"} stats = get_stats(loader.storage) assert stats["origin_visit"] == 2 + 1 # ... with no change in repository, this yields the same snapshot assert stats["snapshot"] == 1 assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) def test_loader_tampered_repository(swh_storage, datadir, tmp_path): """In this scenario, the dump has been tampered with to modify the commit log [1]. This results in a hash divergence which is detected at startup after a new run for the same origin. In effect, that stops the loading and do nothing. [1] Tampering with revision 6 log message following: ``` tar xvf pkg-gourmet.tgz # initial repository ingested cd pkg-gourmet/ echo "Tampering with commit log message for fun and profit" > log.txt svnadmin setlog . -r 6 log.txt --bypass-hooks tar cvf pkg-gourmet-tampered-rev6-log.tgz pkg-gourmet/ ``` """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} check_snapshot(GOURMET_SNAPSHOT, loader.storage) archive_path2 = os.path.join(datadir, "pkg-gourmet-tampered-rev6-log.tgz") repo_tampered_url = prepare_repository_from_archive( archive_path2, archive_name, tmp_path ) loader2 = SvnLoader(swh_storage, repo_tampered_url, origin_url=repo_url) assert loader2.load() == {"status": "failed"} assert_last_visit_matches( loader2.storage, repo_url, status="failed", type="svn", snapshot=None, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 2 assert stats["snapshot"] == 1 def test_loader_svn_visit_with_changes(swh_storage, datadir, tmp_path): """In this scenario, the repository has been updated with new changes. The loading visit should result in new objects stored and 1 new snapshot. """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_initial_url = prepare_repository_from_archive( archive_path, archive_name, tmp_path ) # repo_initial_url becomes the origin_url we want to visit some more below loader = SvnLoader(swh_storage, repo_initial_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_initial_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) archive_path = os.path.join(datadir, "pkg-gourmet-with-updates.tgz") repo_updated_url = prepare_repository_from_archive( archive_path, "pkg-gourmet", tmp_path ) loader = SvnLoader(swh_storage, repo_updated_url, origin_url=repo_initial_url,) assert loader.load() == {"status": "eventful"} visit_status2 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT.id, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot != visit_status2.snapshot stats = get_stats(loader.storage) assert stats == { "content": 22, "directory": 28, "origin": 1, "origin_visit": 2, "release": 0, "revision": 11, "skipped_content": 0, "snapshot": 2, } check_snapshot(GOURMET_UPDATES_SNAPSHOT, loader.storage) # Let's start the ingestion from the start, this should yield the same result loader = SvnLoader( swh_storage, repo_updated_url, origin_url=repo_initial_url, incremental=False, ) assert loader.load() == {"status": "eventful"} visit_status3 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT.id, ) assert visit_status2.date < visit_status3.date assert visit_status3.snapshot == visit_status2.snapshot check_snapshot(GOURMET_UPDATES_SNAPSHOT, loader.storage) stats = get_stats(loader.storage) assert stats["origin"] == 1 # always the same visit assert stats["origin_visit"] == 2 + 1 # 1 more visit assert stats["snapshot"] == 2 # no new snapshot def test_loader_svn_visit_start_from_revision(swh_storage, datadir, tmp_path): """Starting from existing revision, next visit on changed repo should yield 1 new snapshot. """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_initial_url = prepare_repository_from_archive( archive_path, archive_name, tmp_path ) # repo_initial_url becomes the origin_url we want to visit some more below loader = SvnLoader(swh_storage, repo_initial_url) assert loader.load() == {"status": "eventful"} visit_status1 = assert_last_visit_matches( loader.storage, repo_initial_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) start_revision = loader.storage.revision_get( [hash_to_bytes("95edacc8848369d6fb1608e887d6d2474fd5224f")] )[0] assert start_revision is not None archive_path = os.path.join(datadir, "pkg-gourmet-with-updates.tgz") repo_updated_url = prepare_repository_from_archive( archive_path, "pkg-gourmet", tmp_path ) # we'll start from start_revision loader = SvnLoader( swh_storage, repo_updated_url, origin_url=repo_initial_url, swh_revision=start_revision, ) assert loader.load() == {"status": "eventful"} # nonetheless, we obtain the same snapshot (as previous tests on that repository) visit_status2 = assert_last_visit_matches( loader.storage, repo_updated_url, status="full", type="svn", snapshot=GOURMET_UPDATES_SNAPSHOT.id, ) assert visit_status1.date < visit_status2.date assert visit_status1.snapshot != visit_status2.snapshot stats = get_stats(loader.storage) assert stats == { "content": 22, "directory": 28, "origin": 1, "origin_visit": 2, "release": 0, "revision": 11, "skipped_content": 0, "snapshot": 2, } check_snapshot(GOURMET_UPDATES_SNAPSHOT, loader.storage) def test_loader_svn_visit_with_eol_style(swh_storage, datadir, tmp_path): """Check that a svn repo containing a versioned file with CRLF line endings with svn:eol-style property set to 'native' (this is a violation of svn specification as the file should have been stored with LF line endings) can be loaded anyway. """ archive_name = "mediawiki-repo-r407-eol-native-crlf" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} mediawiki_snapshot = Snapshot( id=hash_to_bytes("d6d6e9703f157c5702d9a4a5dec878926ed4ab76"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("7da4975c363101b819756d33459f30a866d01b1b"), target_type=TargetType.REVISION, ) }, ) check_snapshot(mediawiki_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=mediawiki_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 def test_loader_svn_visit_with_mixed_crlf_lf(swh_storage, datadir, tmp_path): """Check that a svn repo containing a versioned file with mixed CRLF/LF line endings with svn:eol-style property set to 'native' (this is a violation of svn specification as mixed line endings for textual content should not be stored when the svn:eol-style property is set) can be loaded anyway. """ archive_name = "pyang-repo-r343-eol-native-mixed-lf-crlf" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} pyang_snapshot = Snapshot( id=hash_to_bytes("6d9590de11b00a5801de0ff3297c5b44bbbf7d24"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("9c6962eeb9164a636c374be700672355e34a98a7"), target_type=TargetType.REVISION, ) }, ) check_snapshot(pyang_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=pyang_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 def test_loader_svn_with_external_properties(swh_storage, datadir, tmp_path): """Repository with svn:external properties cannot be fully ingested yet """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, "pkg-gourmet-with-external-id.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} gourmet_externals_snapshot = Snapshot( id=hash_to_bytes("19cb68d0a3f22372e2b7017ea5e2a2ea5ae3e09a"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("82a7a4a09f9549223429143ba36ad77375e33c5c"), target_type=TargetType.REVISION, ) }, ) check_snapshot(gourmet_externals_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="partial", type="svn", snapshot=gourmet_externals_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 # repository holds 21 revisions, but the last commit holds an 'svn:externals' # property which will make the loader-svn stops at the last revision prior to the # bad one assert stats["revision"] == 21 - 1 # commit with the svn:external property def test_loader_svn_with_symlink(swh_storage, datadir, tmp_path): """Repository with symlinks should be ingested ok Edge case: - first create a file and commit it. Remove it, then add folder holding the same name, commit. - do the same scenario with symbolic link (instead of file) """ archive_name = "pkg-gourmet" archive_path = os.path.join( datadir, "pkg-gourmet-with-edge-case-links-and-files.tgz" ) repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} gourmet_edge_cases_snapshot = Snapshot( id=hash_to_bytes("18e60982fe521a2546ab8c3c73a535d80462d9d0"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("3f43af2578fccf18b0d4198e48563da7929dc608"), target_type=TargetType.REVISION, ) }, ) check_snapshot(gourmet_edge_cases_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=gourmet_edge_cases_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 19 def test_loader_svn_with_wrong_symlinks(swh_storage, datadir, tmp_path): """Repository with wrong symlinks should be ingested ok nonetheless Edge case: - wrong symbolic link - wrong symbolic link with empty space names """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, "pkg-gourmet-with-wrong-link-cases.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} gourmet_wrong_links_snapshot = Snapshot( id=hash_to_bytes("b17f38acabb90f066dedd30c29f01a02af88a5c4"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("cf30d3bb9d5967d0a2bbeacc405f10a5dd9b138a"), target_type=TargetType.REVISION, ) }, ) check_snapshot(gourmet_wrong_links_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=gourmet_wrong_links_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 21 def test_loader_svn_loader_from_remote_dump(swh_storage, datadir, tmp_path): """Repository with wrong symlinks should be ingested ok nonetheless Edge case: - wrong symbolic link - wrong symbolic link with empty space names """ archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loaderFromDump = SvnLoaderFromRemoteDump(swh_storage, repo_url) assert loaderFromDump.load() == {"status": "eventful"} assert_last_visit_matches( loaderFromDump.storage, repo_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) origin_url = repo_url + "2" # rename to another origin loader = SvnLoader(swh_storage, repo_url, origin_url=origin_url) assert loader.load() == {"status": "eventful"} # because are working on new origin assert_last_visit_matches( loader.storage, origin_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) check_snapshot(GOURMET_SNAPSHOT, loader.storage) stats = get_stats(loader.storage) assert stats["origin"] == 2 # created one more origin assert stats["origin_visit"] == 2 assert stats["snapshot"] == 1 loader = SvnLoader(swh_storage, repo_url) # no change on the origin-url assert loader.load() == {"status": "uneventful"} assert_last_visit_matches( loader.storage, origin_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 2 assert stats["origin_visit"] == 3 assert stats["snapshot"] == 1 # second visit from the dump should be uneventful loaderFromDump = SvnLoaderFromRemoteDump(swh_storage, repo_url) assert loaderFromDump.load() == {"status": "uneventful"} def test_loader_user_defined_svn_properties(swh_storage, datadir, tmp_path): """Edge cases: The repository held some user defined svn-properties with special encodings, this prevented the repository from being loaded even though we do not ingest those information. """ archive_name = "httthttt" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url) assert loader.load() == {"status": "eventful"} expected_snapshot = Snapshot( id=hash_to_bytes("70487267f682c07e52a2371061369b6cf5bffa47"), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes("604a17dbb15e8d7ecb3e9f3768d09bf493667a93"), target_type=TargetType.REVISION, ) }, ) check_snapshot(expected_snapshot, loader.storage) assert_last_visit_matches( loader.storage, repo_url, status="full", type="svn", snapshot=expected_snapshot.id, ) stats = get_stats(loader.storage) assert stats["origin"] == 1 assert stats["origin_visit"] == 1 assert stats["snapshot"] == 1 assert stats["revision"] == 7 def test_loader_svn_dir_added_then_removed(swh_storage, datadir, tmp_path): """Loader should handle directory removal when processing a commit""" archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}-add-remove-dir.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) loader = SvnLoader(swh_storage, repo_url, destination_path=tmp_path) assert loader.load() == {"status": "eventful"} assert loader.visit_status() == "full" def test_loader_svn_loader_from_dump_archive(swh_storage, datadir, tmp_path): archive_name = "pkg-gourmet" archive_path = os.path.join(datadir, f"{archive_name}.tgz") repo_url = prepare_repository_from_archive(archive_path, archive_name, tmp_path) origin_url = f"svn://{archive_name}" dump_filename = f"{archive_name}.dump" with open(os.path.join(tmp_path, dump_filename), "wb") as dump_file: # create compressed dump file of pkg-gourmet repo subprocess.run(["svnrdump", "dump", repo_url], stdout=dump_file) subprocess.run(["gzip", dump_filename], cwd=tmp_path) # load svn repo from that compressed dump file loader = SvnLoaderFromDumpArchive( swh_storage, url=origin_url, archive_path=os.path.join(tmp_path, f"{dump_filename}.gz"), ) assert loader.load() == {"status": "eventful"} assert_last_visit_matches( loader.storage, origin_url, status="full", type="svn", snapshot=GOURMET_SNAPSHOT.id, ) check_snapshot(GOURMET_SNAPSHOT, loader.storage) assert get_stats(loader.storage) == { "content": 19, "directory": 17, "origin": 1, "origin_visit": 1, "release": 0, "revision": 6, "skipped_content": 0, "snapshot": 1, } class CommitChangeType(Enum): AddOrUpdate = 1 Delete = 2 class CommitChange(TypedDict, total=False): change_type: CommitChangeType path: str properties: Dict[str, str] data: bytes def add_commit(repo_url: str, message: str, changes: List[CommitChange]) -> None: conn = RemoteAccess(repo_url, auth=Auth([get_username_provider()])) editor = conn.get_commit_editor({"svn:log": message}) root = editor.open_root() for change in changes: if change["change_type"] == CommitChangeType.Delete: root.delete_entry(change["path"].rstrip("/")) else: dir_change = change["path"].endswith("/") split_path = change["path"].rstrip("/").split("/") for i in range(len(split_path)): path = "/".join(split_path[0 : i + 1]) if i < len(split_path) - 1: try: root.add_directory(path).close() except SubversionException: pass else: if dir_change: root.add_directory(path).close() else: try: file = root.add_file(path) except SubversionException: file = root.open_file(path) if "properties" in change: for prop, value in change["properties"].items(): file.change_prop(prop, value) if "data" in change: txdelta = file.apply_textdelta() delta.send_stream(BytesIO(change["data"]), txdelta) file.close() root.close() editor.close() def test_loader_eol_style_file_property_handling_edge_case(swh_storage, tmp_path): # create a repository repo_path = os.path.join(tmp_path, "tmprepo") repos.create(repo_path) repo_url = f"file://{repo_path}" # # first commit add_commit( repo_url, ( "Add a directory containing a file with CRLF end of line " "and set svn:eol-style property to native so CRLF will be " "replaced by LF in the file when exporting the revision" ), [ CommitChange( change_type=CommitChangeType.AddOrUpdate, path="directory/file_with_crlf_eol.txt", properties={"svn:eol-style": "native"}, data=b"Hello world!\r\n", ) ], ) # second commit add_commit( repo_url, "Remove previously added directory and file", [CommitChange(change_type=CommitChangeType.Delete, path="directory/",)], ) # third commit add_commit( repo_url, ( "Add again same directory containing same file with CRLF end of line " "but do not set svn:eol-style property value so CRLF will not be " "replaced by LF when exporting the revision" ), [ CommitChange( change_type=CommitChangeType.AddOrUpdate, path="directory/file_with_crlf_eol.txt", data=b"Hello world!\r\n", ) ], ) # instantiate a svn loader checking after each processed revision that # the repository filesystem it reconstructed does not differ from a subversion # export of that revision loader = SvnLoader( swh_storage, repo_url, destination_path=tmp_path, check_revision=1 ) assert loader.load() == {"status": "eventful"} assert loader.visit_status() == "full" assert get_stats(loader.storage) == { "content": 2, "directory": 5, "origin": 1, "origin_visit": 1, "release": 0, "revision": 3, "skipped_content": 0, "snapshot": 1, } +def get_head_revision_paths_info(loader: SvnLoader) -> Dict[bytes, Dict[str, Any]]: + assert loader.snapshot is not None + root_dir = loader.snapshot.branches[b"HEAD"].target + revision = loader.storage.revision_get([root_dir])[0] + assert revision is not None + + paths = {} + for entry in loader.storage.directory_ls(revision.directory, recursive=True): + paths[entry["name"]] = entry + return paths + + def test_loader_eol_style_on_svn_link_handling(swh_storage, tmp_path): # create a repository repo_path = os.path.join(tmp_path, "tmprepo") repos.create(repo_path) repo_url = f"file://{repo_path}" # first commit add_commit( repo_url, ( "Add a regular file, a directory and a link to the regular file " "in the directory. Set svn:eol-style property for the regular " "file and the link. Set svn:special property for the link." ), [ CommitChange( change_type=CommitChangeType.AddOrUpdate, path="file_with_crlf_eol.txt", properties={"svn:eol-style": "native"}, data=b"Hello world!\r\n", ), CommitChange( change_type=CommitChangeType.AddOrUpdate, path="directory/file_with_crlf_eol.txt", properties={"svn:eol-style": "native", "svn:special": "*"}, data=b"link ../file_with_crlf_eol.txt", ), ], ) # instantiate a svn loader checking after each processed revision that # the repository filesystem it reconstructed does not differ from a subversion # export of that revision loader = SvnLoader( swh_storage, repo_url, destination_path=tmp_path, check_revision=1 ) assert loader.load() == {"status": "eventful"} assert loader.visit_status() == "full" # check loaded objects are those expected assert get_stats(loader.storage) == { "content": 2, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } - root_dir = loader.snapshot.branches[b"HEAD"].target - revision = loader.storage.revision_get([root_dir])[0] - - paths = {} - for entry in loader.storage.directory_ls(revision.directory, recursive=True): - paths[entry["name"]] = entry + paths = get_head_revision_paths_info(loader) assert ( loader.storage.content_get_data(paths[b"file_with_crlf_eol.txt"]["sha1"]) == b"Hello world!\n" ) assert paths[b"directory/file_with_crlf_eol.txt"]["perms"] == DentryPerms.symlink assert ( loader.storage.content_get_data( paths[b"directory/file_with_crlf_eol.txt"]["sha1"] ) == b"../file_with_crlf_eol.txt" ) def test_loader_svn_special_property_unset(swh_storage, tmp_path): # create a repository repo_path = os.path.join(tmp_path, "tmprepo") repos.create(repo_path) repo_url = f"file://{repo_path}" # first commit add_commit( repo_url, ( "Create a regular file, a link to a file and a link to an " "external file. Set the svn:special property on the links." ), [ CommitChange( change_type=CommitChangeType.AddOrUpdate, path="file.txt", data=b"Hello world!\n", ), CommitChange( change_type=CommitChangeType.AddOrUpdate, path="link.txt", properties={"svn:special": "*"}, data=b"link ./file.txt", ), CommitChange( change_type=CommitChangeType.AddOrUpdate, path="external_link.txt", properties={"svn:special": "*"}, data=b"link /home/user/data.txt", ), ], ) # second commit add_commit( repo_url, "Unset the svn:special property on the links.", [ CommitChange( change_type=CommitChangeType.AddOrUpdate, path="link.txt", properties={"svn:special": None}, ), CommitChange( change_type=CommitChangeType.AddOrUpdate, path="external_link.txt", properties={"svn:special": None}, ), ], ) # instantiate a svn loader checking after each processed revision that # the repository filesystem it reconstructed does not differ from a subversion # export of that revision loader = SvnLoader( swh_storage, repo_url, destination_path=tmp_path, check_revision=1 ) assert loader.load() == {"status": "eventful"} assert loader.visit_status() == "full" # check loaded objects are those expected assert get_stats(loader.storage) == { "content": 5, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } - root_dir = loader.snapshot.branches[b"HEAD"].target - revision = loader.storage.revision_get([root_dir])[0] - - paths = {} - for entry in loader.storage.directory_ls(revision.directory, recursive=True): - paths[entry["name"]] = entry + paths = get_head_revision_paths_info(loader) assert paths[b"link.txt"]["perms"] == DentryPerms.content assert ( loader.storage.content_get_data(paths[b"link.txt"]["sha1"]) == b"link ./file.txt" ) assert paths[b"external_link.txt"]["perms"] == DentryPerms.content assert ( loader.storage.content_get_data(paths[b"external_link.txt"]["sha1"]) == b"link /home/user/data.txt" ) + + +def test_loader_invalid_svn_eol_style_property_value(swh_storage, tmp_path): + # create a repository + repo_path = os.path.join(tmp_path, "tmprepo") + repos.create(repo_path) + repo_url = f"file://{repo_path}" + + filename = "file_with_crlf_eol.txt" + file_content = b"Hello world!\r\n" + + # # first commit + add_commit( + repo_url, + ( + "Add a file with CRLF end of line and set svn:eol-style property " + "to an invalid value." + ), + [ + CommitChange( + change_type=CommitChangeType.AddOrUpdate, + path=filename, + properties={"svn:eol-style": "foo"}, + data=file_content, + ) + ], + ) + + # instantiate a svn loader checking after each processed revision that + # the repository filesystem it reconstructed does not differ from a subversion + # export of that revision + loader = SvnLoader( + swh_storage, repo_url, destination_path=tmp_path, check_revision=1 + ) + + assert loader.load() == {"status": "eventful"} + assert loader.visit_status() == "full" + + paths = get_head_revision_paths_info(loader) + # end of lines should not have been processed + assert ( + loader.storage.content_get_data(paths[filename.encode()]["sha1"]) + == file_content + )