diff --git a/swh/loader/svn/svn.py b/swh/loader/svn/svn.py index 61178ae..984f9d9 100644 --- a/swh/loader/svn/svn.py +++ b/swh/loader/svn/svn.py @@ -1,529 +1,535 @@ # Copyright (C) 2015-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """SVN client in charge of iterating over svn logs and yield commit representations including the hash tree/content computations per svn commit. """ import logging import os import shutil import tempfile from typing import Dict, Iterator, List, Optional, Tuple, Union from subvertpy import SubversionException, client, properties, wc from subvertpy.ra import Auth, RemoteAccess, get_username_provider from swh.model.from_disk import Directory as DirectoryFromDisk from swh.model.model import ( Content, Directory, Person, SkippedContent, TimestampWithTimezone, ) from . import converters, replay from .svn_retry import svn_retry from .utils import is_recursive_external, parse_external_definition # When log message contains empty data DEFAULT_AUTHOR_MESSAGE = "" logger = logging.getLogger(__name__) class SvnRepo: """Svn repository representation. Args: remote_url: Remove svn repository url origin_url: Associated origin identifier local_dirname: Path to write intermediary svn action results """ def __init__( self, remote_url: str, origin_url: str, local_dirname: str, max_content_length: int, from_dump: bool = False, ): self.remote_url = remote_url.rstrip("/") self.origin_url = origin_url self.from_dump = from_dump auth = Auth([get_username_provider()]) # one connection for log iteration self.conn_log = self.remote_access(auth) # another for replay self.conn = self.remote_access(auth) # one client for update operation self.client = client.Client(auth=auth) self.local_dirname = local_dirname local_name = os.path.basename(self.remote_url) self.local_url = os.path.join(self.local_dirname, local_name).encode("utf-8") self.uuid = self.conn.get_uuid().encode("utf-8") self.swhreplay = replay.Replay( conn=self.conn, rootpath=self.local_url, svnrepo=self, temp_dir=local_dirname, ) self.max_content_length = max_content_length self.has_relative_externals = False self.has_recursive_externals = False self.replay_started = False # compute root directory path from the remote repository URL, required to # properly load the sub-tree of a repository mounted from a dump file - info = self.client.info(origin_url.rstrip("/")) - repos_root_url = next(iter(info.values())).repos_root_url + repos_root_url = self.info(origin_url).repos_root_url self.root_directory = origin_url.replace(repos_root_url, "", 1) def __str__(self): return str( { "swh-origin": self.origin_url, "remote_url": self.remote_url, "local_url": self.local_url, "uuid": self.uuid, } ) def head_revision(self) -> int: """Retrieve current head revision.""" return self.conn.get_latest_revnum() def initial_revision(self) -> int: """Retrieve the initial revision from which the remote url appeared.""" return 1 def convert_commit_message(self, msg: Union[str, bytes]) -> bytes: """Simply encode the commit message. Args: msg: the commit message to convert. Returns: The transformed message as bytes. """ if isinstance(msg, bytes): return msg return msg.encode("utf-8") def convert_commit_date(self, date: bytes) -> TimestampWithTimezone: """Convert the message commit date into a timestamp in swh format. The precision is kept. Args: date: the commit date to convert. Returns: The transformed date. """ return converters.svn_date_to_swh_date(date) def convert_commit_author(self, author: Optional[bytes]) -> Person: """Convert the commit author into an swh person. Args: author: the commit author to convert. Returns: Person as model object """ return converters.svn_author_to_swh_person(author) def __to_entry(self, log_entry: Tuple) -> Dict: changed_paths, rev, revprops, has_children = log_entry author_date = self.convert_commit_date( revprops.get(properties.PROP_REVISION_DATE) ) author = self.convert_commit_author( revprops.get(properties.PROP_REVISION_AUTHOR) ) message = self.convert_commit_message( revprops.get(properties.PROP_REVISION_LOG, DEFAULT_AUTHOR_MESSAGE) ) has_changes = ( not self.from_dump or changed_paths is not None and any( changed_path.startswith(self.root_directory) for changed_path in changed_paths.keys() ) ) return { "rev": rev, "author_date": author_date, "author_name": author, "message": message, "has_changes": has_changes, } def logs(self, revision_start: int, revision_end: int) -> Iterator[Dict]: """Stream svn logs between revision_start and revision_end by chunks of block_size logs. Yields revision and associated revision information between the revision start and revision_end. Args: revision_start: the svn revision starting bound revision_end: the svn revision ending bound Yields: tuple: tuple of revisions and logs: - revisions: list of revisions in order - logs: Dictionary with key revision number and value the log entry. The log entry is a dictionary with the following keys: - author_date: date of the commit - author_name: name of the author - message: commit message """ for log_entry in self.conn_log.iter_log( paths=None, start=revision_start, end=revision_end, discover_changed_paths=self.from_dump, ): yield self.__to_entry(log_entry) @svn_retry() def remote_access(self, auth: Auth) -> RemoteAccess: """Simple wrapper around subvertpy.ra.RemoteAccess creation enabling to retry the operation if a network error occurs.""" return RemoteAccess(self.remote_url, auth=auth) + @svn_retry() + def info(self, origin_url: str): + """Simple wrapper around subvertpy.client.Client.info enabling to retry + the command if a network error occurs.""" + info = self.client.info(origin_url.rstrip("/")) + return next(iter(info.values())) + @svn_retry() def export( self, url: str, to: str, rev: Optional[int] = None, peg_rev: Optional[int] = None, recurse: bool = True, ignore_externals: bool = False, overwrite: bool = False, ignore_keywords: bool = False, ) -> int: """Simple wrapper around subvertpy.client.Client.export enabling to retry the command if a network error occurs. See documentation of svn_client_export5 function from subversion C API to get details about parameters. """ # remove export path as command can be retried if os.path.isfile(to) or os.path.islink(to): os.remove(to) elif os.path.isdir(to): shutil.rmtree(to) options = [] if rev is not None: options.append(f"-r {rev}") if recurse: options.append("--depth infinity") if ignore_externals: options.append("--ignore-externals") if overwrite: options.append("--force") if ignore_keywords: options.append("--ignore-keywords") logger.debug( "svn export %s %s%s %s", " ".join(options), url, f"@{peg_rev}" if peg_rev else "", to, ) return self.client.export( url, to=to, rev=rev, peg_rev=peg_rev, recurse=recurse, ignore_externals=ignore_externals, overwrite=overwrite, ignore_keywords=ignore_keywords, ) @svn_retry() def checkout( self, url: str, path: str, rev: Optional[int] = None, peg_rev: Optional[int] = None, recurse: bool = True, ignore_externals: bool = False, allow_unver_obstructions: bool = False, ) -> int: """Simple wrapper around subvertpy.client.Client.checkout enabling to retry the command if a network error occurs. See documentation of svn_client_checkout3 function from subversion C API to get details about parameters. """ if os.path.isdir(os.path.join(path, ".svn")): # cleanup checkout path as command can be retried and svn working copy might # be locked wc.cleanup(path) elif os.path.isdir(path): # recursively remove checkout path otherwise if it is not a svn working copy shutil.rmtree(path) options = [] if rev is not None: options.append(f"-r {rev}") if recurse: options.append("--depth infinity") if ignore_externals: options.append("--ignore-externals") logger.debug( "svn checkout %s %s%s %s", " ".join(options), self.remote_url, f"@{peg_rev}" if peg_rev else "", path, ) return self.client.checkout( url, path=path, rev=rev, peg_rev=peg_rev, recurse=recurse, ignore_externals=ignore_externals, allow_unver_obstructions=allow_unver_obstructions, ) @svn_retry() def propget( self, name: str, target: str, peg_rev: Optional[int], rev: Optional[int] = None, recurse: bool = False, ): """Simple wrapper around subvertpy.client.Client.propget enabling to retry the command if a network error occurs. See documentation of svn_client_propget5 function from subversion C API to get details about parameters. """ return self.client.propget(name, target, peg_rev, rev, recurse) def export_temporary(self, revision: int) -> Tuple[str, bytes]: """Export the repository to a given revision in a temporary location. This is up to the caller of this function to clean up the temporary location when done (cf. self.clean_fs method) Args: revision: Revision to export at Returns: The tuple local_dirname the temporary location root folder, local_url where the repository was exported. """ local_dirname = tempfile.mkdtemp( dir=self.local_dirname, prefix=f"check-revision-{revision}." ) local_name = os.path.basename(self.remote_url) local_url = os.path.join(local_dirname, local_name) url = self.remote_url # if some paths have external URLs relative to the repository URL but targeting # paths outside it, we need to export from the origin URL as the remote URL can # target a dump mounted on the local filesystem if self.replay_started and self.has_relative_externals: # externals detected while replaying revisions url = self.origin_url elif not self.replay_started: # revisions replay has not started, we need to check if svn:externals # properties are set from a checkout of the revision and if some # external URLs are relative to pick the right export URL, # recursive externals are also checked with tempfile.TemporaryDirectory( dir=self.local_dirname, prefix=f"checkout-revision-{revision}." ) as co_dirname: self.checkout( self.remote_url, co_dirname, revision, ignore_externals=True ) # get all svn:externals properties recursively externals = self.propget("svn:externals", co_dirname, None, None, True) self.has_relative_externals = False self.has_recursive_externals = False for path, external_defs in externals.items(): if self.has_relative_externals or self.has_recursive_externals: break path = path.replace(self.remote_url.rstrip("/") + "/", "") for external_def in os.fsdecode(external_defs).split("\n"): # skip empty line or comment if not external_def or external_def.startswith("#"): continue ( external_path, external_url, _, relative_url, ) = parse_external_definition( external_def.rstrip("\r"), path, self.origin_url ) if is_recursive_external( self.origin_url, path, external_path, external_url, ): self.has_recursive_externals = True url = self.remote_url break if relative_url: self.has_relative_externals = True url = self.origin_url break try: url = url.rstrip("/") self.export( url, to=local_url, rev=revision, ignore_keywords=True, ignore_externals=self.has_recursive_externals, ) except SubversionException as se: if se.args[0].startswith( ( "Error parsing svn:externals property", "Unrecognized format for the relative external URL", ) ): pass else: raise if self.from_dump: # when exporting a subpath of a subversion repository mounted from # a dump file generated by svnrdump, exported paths are relative to # the repository root path while they are relative to the subpath # otherwise, so we need to adjust the URL of the exported filesystem root_dir_local_url = os.path.join(local_url, self.root_directory.strip("/")) # check that root directory of a subproject did not get removed in revision if os.path.exists(root_dir_local_url): local_url = root_dir_local_url return local_dirname, os.fsencode(local_url) def swh_hash_data_per_revision( self, start_revision: int, end_revision: int ) -> Iterator[ Tuple[ int, Dict, Tuple[List[Content], List[SkippedContent], List[Directory]], DirectoryFromDisk, ], ]: """Compute swh hash data per each revision between start_revision and end_revision. Args: start_revision: starting revision end_revision: ending revision Yields: Tuple (rev, nextrev, commit, objects_per_path): - rev: current revision - commit: commit data (author, date, message) for such revision - objects_per_path: Tuple of list of objects between start_revision and end_revision - complete Directory representation """ # even in incremental loading mode, we need to replay the whole set of # path modifications from first revision to restore possible file states induced # by setting svn properties on those files (end of line style for instance) self.replay_started = True first_revision = 1 if start_revision else 0 # handle empty repository edge case for commit in self.logs(first_revision, end_revision): rev = commit["rev"] objects = self.swhreplay.compute_objects(rev) if rev >= start_revision: # start yielding new data to archive once we reached the revision to # resume the loading from if commit["has_changes"] or start_revision == 0: # yield data only if commit has changes or if repository is empty root_dir_path = self.root_directory.encode()[1:] if not root_dir_path or root_dir_path in self.swhreplay.directory: root_dir = self.swhreplay.directory[root_dir_path] else: # root directory of subproject got removed in revision, return # empty directory for that edge case root_dir = DirectoryFromDisk() yield rev, commit, objects, root_dir def swh_hash_data_at_revision( self, revision: int ) -> Tuple[Dict, DirectoryFromDisk]: """Compute the information at a given svn revision. This is expected to be used for checks only. Yields: The tuple (commit dictionary, targeted directory object). """ # Update disk representation of the repository at revision id local_dirname, local_url = self.export_temporary(revision) # Compute the current hashes on disk directory = DirectoryFromDisk.from_disk( path=local_url, max_content_length=self.max_content_length ) # Retrieve the commit information for revision commit = list(self.logs(revision, revision))[0] # Clean export directory self.clean_fs(local_dirname) return commit, directory def clean_fs(self, local_dirname: Optional[str] = None) -> None: """Clean up the local working copy. Args: local_dirname: Path to remove recursively if provided. Otherwise, remove the temporary upper root tree used for svn repository loading. """ dirname = local_dirname or self.local_dirname if os.path.exists(dirname): logger.debug("cleanup %s", dirname) shutil.rmtree(dirname) diff --git a/swh/loader/svn/tests/conftest.py b/swh/loader/svn/tests/conftest.py index 1790084..fac2bb3 100644 --- a/swh/loader/svn/tests/conftest.py +++ b/swh/loader/svn/tests/conftest.py @@ -1,57 +1,58 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict import pytest from swh.loader.svn.loader import SvnRepo from .utils import create_repo @pytest.fixture def swh_storage_backend_config(swh_storage_backend_config): """Basic pg storage configuration with no journal collaborator (to avoid pulling optional dependency on clients of this fixture) """ return { "cls": "filter", "storage": { "cls": "buffer", "min_batch_size": { "content": 10000, "content_bytes": 1073741824, "directory": 2500, "revision": 10, "release": 100, }, "storage": swh_storage_backend_config, }, } @pytest.fixture def swh_loader_config(swh_storage_backend_config) -> Dict[str, Any]: return { "storage": swh_storage_backend_config, "check_revision": 100, "temp_directory": "/tmp", } @pytest.fixture def repo_url(tmpdir_factory): # create a repository return create_repo(tmpdir_factory.mktemp("repos")) @pytest.fixture(autouse=True) def svn_retry_sleep_mocker(mocker): mocker.patch.object(SvnRepo.export.retry, "sleep") mocker.patch.object(SvnRepo.checkout.retry, "sleep") mocker.patch.object(SvnRepo.propget.retry, "sleep") mocker.patch.object(SvnRepo.remote_access.retry, "sleep") + mocker.patch.object(SvnRepo.info.retry, "sleep") diff --git a/swh/loader/svn/tests/test_svn_retry.py b/swh/loader/svn/tests/test_svn_retry.py index f0c93ca..6c5c58f 100644 --- a/swh/loader/svn/tests/test_svn_retry.py +++ b/swh/loader/svn/tests/test_svn_retry.py @@ -1,292 +1,333 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest from subvertpy import SubversionException from subvertpy.ra import Auth, RemoteAccess, get_username_provider from swh.loader.svn.svn import SvnRepo from swh.loader.svn.svn_retry import SVN_RETRY_MAX_ATTEMPTS, SVN_RETRY_WAIT_EXP_BASE from swh.loader.tests import prepare_repository_from_archive def _get_repo_url(archive_name, datadir, tmp_path): archive_path = os.path.join(datadir, f"{archive_name}.tgz") return prepare_repository_from_archive(archive_path, "pkg-gourmet", tmp_path) @pytest.fixture() def sample_repo_url(datadir, tmp_path): return _get_repo_url("pkg-gourmet", datadir, tmp_path) @pytest.fixture() def sample_repo_with_externals_url(datadir, tmp_path): return _get_repo_url("pkg-gourmet-with-external-id", datadir, tmp_path) class SVNClientWrapper: """Methods of subvertpy.client.Client cannot be patched by mocker fixture as they are read only attributes due to subvertpy.client module being a C extension module. So we use that wrapper class instead to simulate mocking behavior. """ def __init__(self, client, exception, nb_failed_calls): self.client = client self.exception = exception self.nb_failed_calls = nb_failed_calls self.nb_calls = 0 def _wrapped_svn_cmd(self, svn_cmd, *args, **kwargs): self.nb_calls = self.nb_calls + 1 if self.nb_calls <= self.nb_failed_calls: raise self.exception else: return svn_cmd(*args, **kwargs) def export(self, *args, **kwargs): return self._wrapped_svn_cmd(self.client.export, *args, **kwargs) def checkout(self, *args, **kwargs): return self._wrapped_svn_cmd(self.client.checkout, *args, **kwargs) def propget(self, *args, **kwargs): return self._wrapped_svn_cmd(self.client.propget, *args, **kwargs) + def info(self, *args, **kwargs): + return self._wrapped_svn_cmd(self.client.info, *args, **kwargs) + def assert_sleep_calls(mock_sleep, mocker, nb_failures): mock_sleep.assert_has_calls( [ mocker.call(param) for param in [SVN_RETRY_WAIT_EXP_BASE**i for i in range(nb_failures)] ] ) RETRYABLE_EXCEPTIONS = [ SubversionException( "Error running context: The server unexpectedly closed the connection.", 120108, ), SubversionException("Connection timed out", 175012), SubversionException("Unable to connect to a repository at URL", 170013), ConnectionResetError(), TimeoutError(), ] @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_export_retry_success( mocker, tmp_path, sample_repo_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 ) mock_sleep = mocker.patch.object(svnrepo.export.retry, "sleep") nb_failed_calls = 2 svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) export_path = os.path.join(tmp_path, "export") svnrepo.export(sample_repo_url, export_path) assert os.path.exists(export_path) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_export_retry_failure( mocker, tmp_path, sample_repo_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 ) mock_sleep = mocker.patch.object(svnrepo.export.retry, "sleep") nb_failed_calls = SVN_RETRY_MAX_ATTEMPTS svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) with pytest.raises(type(exception_to_retry)): export_path = os.path.join(tmp_path, "export") svnrepo.export(sample_repo_url, export_path) assert not os.path.exists(export_path) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls - 1) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_checkout_retry_success( mocker, tmp_path, sample_repo_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 ) mock_sleep = mocker.patch.object(svnrepo.checkout.retry, "sleep") nb_failed_calls = 2 svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) checkout_path = os.path.join(tmp_path, "checkout") svnrepo.checkout(sample_repo_url, checkout_path, svnrepo.head_revision()) assert os.path.exists(checkout_path) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_checkout_retry_failure( mocker, tmp_path, sample_repo_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 ) mock_sleep = mocker.patch.object(svnrepo.checkout.retry, "sleep") nb_failed_calls = SVN_RETRY_MAX_ATTEMPTS svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) checkout_path = os.path.join(tmp_path, "checkout") with pytest.raises(type(exception_to_retry)): svnrepo.checkout(sample_repo_url, checkout_path, svnrepo.head_revision()) assert not os.path.exists(checkout_path) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls - 1) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_propget_retry_success( mocker, tmp_path, sample_repo_with_externals_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_with_externals_url, sample_repo_with_externals_url, tmp_path, max_content_length=100000, ) checkout_path = os.path.join(tmp_path, "checkout") svnrepo.checkout( sample_repo_with_externals_url, checkout_path, svnrepo.head_revision(), ignore_externals=True, ) mock_sleep = mocker.patch.object(svnrepo.propget.retry, "sleep") nb_failed_calls = 2 svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) externals = svnrepo.propget("svn:externals", checkout_path, None, None, True) assert externals assert_sleep_calls(mock_sleep, mocker, nb_failed_calls) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_svn_propget_retry_failure( mocker, tmp_path, sample_repo_with_externals_url, exception_to_retry ): svnrepo = SvnRepo( sample_repo_with_externals_url, sample_repo_with_externals_url, tmp_path, max_content_length=100000, ) checkout_path = os.path.join(tmp_path, "checkout") svnrepo.checkout( sample_repo_with_externals_url, checkout_path, svnrepo.head_revision(), ignore_externals=True, ) mock_sleep = mocker.patch.object(svnrepo.propget.retry, "sleep") nb_failed_calls = SVN_RETRY_MAX_ATTEMPTS svnrepo.client = SVNClientWrapper( svnrepo.client, exception_to_retry, nb_failed_calls ) with pytest.raises(type(exception_to_retry)): svnrepo.propget("svn:externals", checkout_path, None, None, True) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls - 1) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_remote_access_retry_success( mocker, tmp_path, sample_repo_url, exception_to_retry ): nb_failed_calls = 2 mock_ra = mocker.patch("swh.loader.svn.svn.RemoteAccess") remote_access = RemoteAccess(sample_repo_url, auth=Auth([get_username_provider()])) mock_ra.side_effect = ( [exception_to_retry] * nb_failed_calls + [remote_access] + [exception_to_retry] * nb_failed_calls + [remote_access] ) mock_sleep = mocker.patch.object(SvnRepo.remote_access.retry, "sleep") SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000, ) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls) @pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) def test_remote_access_retry_failure( mocker, tmp_path, sample_repo_url, exception_to_retry ): nb_failed_calls = SVN_RETRY_MAX_ATTEMPTS mock_ra = mocker.patch("swh.loader.svn.svn.RemoteAccess") remote_access = RemoteAccess(sample_repo_url, auth=Auth([get_username_provider()])) mock_ra.side_effect = ( [exception_to_retry] * nb_failed_calls + [remote_access] + [exception_to_retry] * nb_failed_calls + [remote_access] ) mock_sleep = mocker.patch.object(SvnRepo.remote_access.retry, "sleep") with pytest.raises(type(exception_to_retry)): SvnRepo( sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000, ) assert_sleep_calls(mock_sleep, mocker, nb_failed_calls - 1) + + +@pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) +def test_svn_info_retry_success(mocker, tmp_path, sample_repo_url, exception_to_retry): + svnrepo = SvnRepo( + sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 + ) + + mock_sleep = mocker.patch.object(svnrepo.info.retry, "sleep") + + nb_failed_calls = 2 + svnrepo.client = SVNClientWrapper( + svnrepo.client, exception_to_retry, nb_failed_calls + ) + + info = svnrepo.info(sample_repo_url) + assert info + + assert_sleep_calls(mock_sleep, mocker, nb_failed_calls) + + +@pytest.mark.parametrize("exception_to_retry", RETRYABLE_EXCEPTIONS) +def test_svn_info_retry_failure(mocker, tmp_path, sample_repo_url, exception_to_retry): + svnrepo = SvnRepo( + sample_repo_url, sample_repo_url, tmp_path, max_content_length=100000 + ) + + mock_sleep = mocker.patch.object(svnrepo.info.retry, "sleep") + + nb_failed_calls = SVN_RETRY_MAX_ATTEMPTS + svnrepo.client = SVNClientWrapper( + svnrepo.client, exception_to_retry, nb_failed_calls + ) + + with pytest.raises(type(exception_to_retry)): + svnrepo.info(sample_repo_url) + + assert_sleep_calls(mock_sleep, mocker, nb_failed_calls - 1)