diff --git a/swh/model/git.py b/swh/model/git.py index 3fea28d..842ecd8 100644 --- a/swh/model/git.py +++ b/swh/model/git.py @@ -1,794 +1,563 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from enum import Enum from swh.model import hashutil, identifiers ROOT_TREE_KEY = b'' class GitType(Enum): BLOB = b'blob' TREE = b'tree' EXEC = b'exec' LINK = b'link' COMM = b'commit' RELE = b'release' REFS = b'ref' class GitPerm(Enum): BLOB = b'100644' TREE = b'40000' EXEC = b'100755' LINK = b'120000' def _compute_directory_git_sha1(hashes): """Compute a directory git sha1 from hashes. Args: hashes: list of tree entries with keys: - sha1_git: the tree entry's sha1 - name: file or subdir's name - perms: the tree entry's sha1 permissions Returns: the binary sha1 of the dictionary's identifier Assumes: Every path exists in hashes. """ directory = { 'entries': [ { 'name': entry['name'], 'perms': int(entry['perms'].value, 8), 'target': entry['sha1_git'], 'type': 'dir' if entry['perms'] == GitPerm.TREE else 'file', } for entry in hashes ] } return hashutil.hash_to_bytes(identifiers.directory_identifier(directory)) def compute_directory_git_sha1(dirpath, hashes): """Compute a directory git sha1 for a dirpath. Args: dirpath: the directory's absolute path hashes: list of tree entries with keys: - sha1_git: the tree entry's sha1 - name: file or subdir's name - perms: the tree entry's sha1 permissions Returns: the binary sha1 of the dictionary's identifier Assumes: Every path exists in hashes. """ return _compute_directory_git_sha1(hashes[dirpath]) def compute_revision_sha1_git(revision): """Compute a revision sha1 git from its dict representation. Args: revision: Additional dictionary information needed to compute a synthetic revision. Following keys are expected: - author - date - committer - committer_date - message - type - directory: binary form of the tree hash Returns: revision sha1 in bytes # FIXME: beware, bytes output from storage api """ return hashutil.hash_to_bytes(identifiers.revision_identifier(revision)) def compute_release_sha1_git(release): """Compute a release sha1 git from its dict representation. Args: release: Additional dictionary information needed to compute a synthetic release. Following keys are expected: - name - message - date - author - revision: binary form of the sha1_git revision targeted by this Returns: release sha1 in bytes """ return hashutil.hash_to_bytes(identifiers.release_identifier(release)) def compute_link_metadata(linkpath): """Given a linkpath, compute the git metadata. Args: linkpath: absolute pathname of the link Returns: Dictionary of values: - data: link's content - length: link's content length - name: basename of the link - perms: git permission for link - type: git type for link - path: absolute path to the link on filesystem """ data = os.readlink(linkpath) link_metadata = hashutil.hash_data(data) link_metadata.update({ 'data': data, 'length': len(data), 'name': os.path.basename(linkpath), 'perms': GitPerm.LINK, 'type': GitType.BLOB, 'path': linkpath }) return link_metadata def compute_blob_metadata(filepath): """Given a filepath, compute the git metadata. Args: filepath: absolute pathname of the file. Returns: Dictionary of values: - name: basename of the file - perms: git permission for file - type: git type for file - path: absolute filepath on filesystem """ blob_metadata = hashutil.hash_path(filepath) perms = GitPerm.EXEC if os.access(filepath, os.X_OK) else GitPerm.BLOB blob_metadata.update({ 'name': os.path.basename(filepath), 'perms': perms, 'type': GitType.BLOB, 'path': filepath }) return blob_metadata def _compute_tree_metadata(dirname, hashes): """Given a dirname, compute the git metadata. Args: dirname: absolute pathname of the directory. hashes: list of tree dirname's entries with keys: - sha1_git: the tree entry's sha1 - name: file or subdir's name - perms: the tree entry's sha1 permissions Returns: Dictionary of values: - sha1_git: tree's sha1 git - name: basename of the directory - perms: git permission for directory - type: git type for directory - path: absolute path to directory on filesystem """ return { 'sha1_git': _compute_directory_git_sha1(hashes), 'name': os.path.basename(dirname), 'perms': GitPerm.TREE, 'type': GitType.TREE, 'path': dirname } def compute_tree_metadata(dirname, ls_hashes): """Given a dirname, compute the git metadata. Args: dirname: absolute pathname of the directory. ls_hashes: dictionary of path, hashes Returns: Dictionary of values: - sha1_git: tree's sha1 git - name: basename of the directory - perms: git permission for directory - type: git type for directory - path: absolute path to directory on filesystem """ return _compute_tree_metadata(dirname, ls_hashes[dirname]) def default_validation_dir(dirpath): """Default validation function. This is the equivalent of the identity function. Args: dirpath: Path to validate Returns: True """ return True def __walk(rootdir, dir_ok_fn=default_validation_dir, remove_empty_folder=False): """Walk the filesystem and yields a 3 tuples (dirpath, dirnames as set of absolute paths, filenames as set of abslute paths) Ignore files which won't pass the dir_ok_fn validation. If remove_empty_folder is True, remove and ignore any encountered empty folder. Args: - rootdir: starting walk root directory path - dir_ok_fn: validation function. if folder encountered are not ok, they are ignored. Default to default_validation_dir which does nothing. - remove_empty_folder: Flag to remove and ignore any encountered empty folders. Yields: 3 tuples dirpath, set of absolute children dirname paths, set of absolute filename paths. """ def basic_gen_dir(rootdir): for dp, dns, fns in os.walk(rootdir, topdown=False): yield (dp, set((os.path.join(dp, dn) for dn in dns)), set((os.path.join(dp, fn) for fn in fns))) if dir_ok_fn == default_validation_dir: if not remove_empty_folder: # os.walk yield from basic_gen_dir(rootdir) else: # os.walk + empty dir cleanup empty_folders = set() for dp, dns, fns in basic_gen_dir(rootdir): if not dns and not fns: empty_folders.add(dp) # need to remove it because folder of empty folder # is an empty folder!!! if os.path.islink(dp): os.remove(dp) else: os.rmdir(dp) parent = os.path.dirname(dp) # edge case about parent containing one empty # folder which become an empty one while not os.listdir(parent): empty_folders.add(parent) if os.path.islink(parent): os.remove(parent) else: os.rmdir(parent) parent = os.path.dirname(parent) continue yield (dp, dns - empty_folders, fns) else: def filtfn(dirnames): return set(filter(dir_ok_fn, dirnames)) gen_dir = ((dp, dns, fns) for dp, dns, fns in basic_gen_dir(rootdir) if dir_ok_fn(dp)) if not remove_empty_folder: # os.walk + filtering for dp, dns, fns in gen_dir: yield (dp, filtfn(dns), fns) else: # os.walk + filtering + empty dir cleanup empty_folders = set() for dp, dns, fns in gen_dir: dps = filtfn(dns) if not dps and not fns: empty_folders.add(dp) # need to remove it because folder of empty folder # is an empty folder!!! if os.path.islink(dp): os.remove(dp) else: os.rmdir(dp) parent = os.path.dirname(dp) # edge case about parent containing one empty # folder which become an empty one while not os.listdir(parent): empty_folders.add(parent) if os.path.islink(parent): os.remove(parent) else: os.rmdir(parent) parent = os.path.dirname(parent) continue yield dp, dps - empty_folders, fns def walk_and_compute_sha1_from_directory(rootdir, dir_ok_fn=default_validation_dir, with_root_tree=True, remove_empty_folder=False): """(Deprecated) TODO migrate the code to compute_hashes_from_directory. Compute git sha1 from directory rootdir. Args: - rootdir: Root directory from which beginning the git hash computation - dir_ok_fn: Filter function to filter directory according to rules defined in the function. By default, all folders are ok. Example override: dir_ok_fn = lambda dirpath: b'svn' not in dirpath - with_root_tree: Determine if we compute the upper root tree's checksums. As a default, we want it. One possible use case where this is not useful is the update (cf. `update_checksums_from`) Returns: Dictionary of entries with keys and as values a list of directory entries. Those are list of dictionary with keys: - 'perms' - 'type' - 'name' - 'sha1_git' - and specifically content: 'sha1', 'sha256', ... Note: One special key is ROOT_TREE_KEY to indicate the upper root of the directory (this is the revision's directory). Raises: Nothing If something is raised, this is a programmatic error. """ ls_hashes = {} all_links = set() if rootdir.endswith(b'/'): rootdir = rootdir.rstrip(b'/') for dirpath, dirnames, filenames in __walk( rootdir, dir_ok_fn, remove_empty_folder): hashes = [] links = (file for file in filenames.union(dirnames) if os.path.islink(file)) for linkpath in links: all_links.add(linkpath) m_hashes = compute_link_metadata(linkpath) hashes.append(m_hashes) for filepath in (file for file in filenames if file not in all_links): m_hashes = compute_blob_metadata(filepath) hashes.append(m_hashes) ls_hashes[dirpath] = hashes dir_hashes = [] for fulldirname in (dir for dir in dirnames if dir not in all_links): tree_hash = _compute_tree_metadata(fulldirname, ls_hashes[fulldirname]) dir_hashes.append(tree_hash) ls_hashes[dirpath].extend(dir_hashes) if with_root_tree: # compute the current directory hashes root_hash = { 'sha1_git': _compute_directory_git_sha1(ls_hashes[rootdir]), 'path': rootdir, 'name': os.path.basename(rootdir), 'perms': GitPerm.TREE, 'type': GitType.TREE } ls_hashes[ROOT_TREE_KEY] = [root_hash] return ls_hashes def compute_hashes_from_directory(rootdir, dir_ok_fn=default_validation_dir, remove_empty_folder=False): """Compute git sha1 from directory rootdir. Args: - rootdir: Root directory from which beginning the git hash computation - dir_ok_fn: Filter function to filter directory according to rules defined in the function. By default, all folders are ok. Example override: dir_ok_fn = lambda dirpath: b'svn' not in dirpath Returns: Dictionary of entries with keys absolute path name. Path-name can be a file/link or directory. The associated value is a dictionary with: - checksums: the dictionary with the hashes for the link/file/dir Those are list of dictionary with keys: - 'perms' - 'type' - 'name' - 'sha1_git' - and specifically content: 'sha1', 'sha256', ... - children: Only for a directory, the set of children paths Note: One special key is the / which indicates the upper root of the directory (this is the revision's directory). Raises: Nothing If something is raised, this is a programmatic error. """ def __get_dict_from_dirpath(_dict, path): """Retrieve the default associated value for key path. """ return _dict.get(path, dict(children=set(), checksums=None)) def __get_dict_from_filepath(_dict, path): """Retrieve the default associated value for key path. """ return _dict.get(path, dict(checksums=None)) ls_hashes = {} all_links = set() if rootdir.endswith(b'/'): rootdir = rootdir.rstrip(b'/') for dirpath, dirnames, filenames in __walk( rootdir, dir_ok_fn, remove_empty_folder): dir_entry = __get_dict_from_dirpath(ls_hashes, dirpath) children = dir_entry['children'] links = (file for file in filenames.union(dirnames) if os.path.islink(file)) for linkpath in links: all_links.add(linkpath) m_hashes = compute_link_metadata(linkpath) d = __get_dict_from_filepath(ls_hashes, linkpath) d['checksums'] = m_hashes ls_hashes[linkpath] = d children.add(linkpath) for filepath in (file for file in filenames if file not in all_links): m_hashes = compute_blob_metadata(filepath) d = __get_dict_from_filepath(ls_hashes, filepath) d['checksums'] = m_hashes ls_hashes[filepath] = d children.add(filepath) for fulldirname in (dir for dir in dirnames if dir not in all_links): d_hashes = __get_dict_from_dirpath(ls_hashes, fulldirname) tree_hash = _compute_tree_metadata( fulldirname, (ls_hashes[p]['checksums'] for p in d_hashes['children']) ) d = __get_dict_from_dirpath(ls_hashes, fulldirname) d['checksums'] = tree_hash ls_hashes[fulldirname] = d children.add(fulldirname) dir_entry['children'] = children ls_hashes[dirpath] = dir_entry # compute the current directory hashes d_hashes = __get_dict_from_dirpath(ls_hashes, rootdir) root_hash = { 'sha1_git': _compute_directory_git_sha1( (ls_hashes[p]['checksums'] for p in d_hashes['children']) ), 'path': rootdir, 'name': os.path.basename(rootdir), 'perms': GitPerm.TREE, 'type': GitType.TREE } d_hashes['checksums'] = root_hash ls_hashes[rootdir] = d_hashes return ls_hashes -def recompute_sha1_in_memory(root, deeper_rootdir, objects): - """TODO: Use git.walk_and_compute_sha1_from_directory_2 - - Recompute git sha1 from directory deeper_rootdir to root. - - This function relies exclusively on `objects` for hashes. It - expects the deeper_rootdir and every key below that path to be - already updated. - - Args: - - root: Upper root directory (so same as - objects[ROOT_TREE_KEY][0]['path']) - - - deeper_rootdir: Upper root directory from which the git hash - computation has alredy been updated. - - - objects: objects dictionary as per returned by - `walk_and_compute_sha1_from_directory` - - Returns: - Dictionary of entries with keys and as values a list of - directory entries. - Those are list of dictionary with keys: - - 'perms' - - 'type' - - 'name' - - 'sha1_git' - - and specifically content: 'sha1', 'sha256', ... - - Note: - One special key is ROOT_TREE_KEY to indicate the upper root of the - directory (this is the revision's target directory). - - Raises: - Nothing - If something is raised, this is a programmatic error. - - """ - # list of paths to update from bottom to top - upper_root = os.path.dirname(root) - rootdir = os.path.dirname(deeper_rootdir) - while rootdir != upper_root: - files = objects[rootdir] - ls_hashes = [] - for hashfile in files: - fulldirname = hashfile['path'] - if hashfile['type'] == GitType.TREE: - tree_hash = compute_tree_metadata(fulldirname, objects) - ls_hashes.append(tree_hash) - else: - ls_hashes.append(hashfile) - - objects[rootdir] = ls_hashes - - parent = os.path.dirname(rootdir) - rootdir = parent - - # update root - - root_tree_hash = compute_directory_git_sha1(root, objects) - objects[ROOT_TREE_KEY][0]['sha1_git'] = root_tree_hash - return objects - - -def commonpath(paths): - """Given a sequence of path names, returns the longest common sub-path. - - Copied from Python3.5 - - """ - - if not paths: - raise ValueError('commonpath() arg is an empty sequence') - - if isinstance(paths[0], bytes): - sep = b'/' - curdir = b'.' - else: - sep = '/' - curdir = '.' - - try: - split_paths = [path.split(sep) for path in paths] - - try: - isabs, = set(p[:1] == sep for p in paths) - except ValueError: - raise ValueError("Can't mix absolute and relative paths") - - split_paths = [ - [c for c in s if c and c != curdir] for s in split_paths] - s1 = min(split_paths) - s2 = max(split_paths) - common = s1 - for i, c in enumerate(s1): - if c != s2[i]: - common = s1[:i] - break - - prefix = sep if isabs else sep[:0] - return prefix + sep.join(common) - except (TypeError, AttributeError): - raise - - -def __remove_paths_from_objects(objects, rootpaths, - dir_ok_fn=default_validation_dir): - """Given top paths to remove, remove all paths and descendants from - objects. - - Args: - objects: The dictionary of paths to clean up. - rootpaths: The rootpaths to remove from objects. - - dir_ok_fn: Validation function on folder/file names. - Default to accept all. - - Returns: - Objects dictionary without the rootpaths and their descendants. - - """ - dirpaths_to_clean = set() - for path in rootpaths: - path_list = objects.pop(path, None) - if path_list: # need to remove the children directories too - for child in path_list: - if child['type'] == GitType.TREE: - dirpaths_to_clean.add(child['path']) - - parent = os.path.dirname(path) - # Is the parent still ok? (e.g. not an empty dir for example) - parent_check = dir_ok_fn(parent) - if not parent_check and parent not in dirpaths_to_clean: - dirpaths_to_clean.add(parent) - else: - # we need to pop the reference to path in the parent list - if objects.get(parent): - objects[parent] = filter( - lambda p: p != path, - objects.get(parent, [])) - - if dirpaths_to_clean: - objects = __remove_paths_from_objects(objects, - dirpaths_to_clean, - dir_ok_fn) - - return objects - - -def update_checksums_from(changed_paths, objects, - dir_ok_fn=default_validation_dir, - remove_empty_folder=False): - """Given a list of changed paths, recompute the checksums only where - needed. - - Args: - changed_paths: Dictionary list representing path changes. - A dictionary has the form: - - path: the full path to the file Added, Modified or Deleted - - action: A, M or D - objects: dictionary returned by `walk_and_compute_sha1_from_directory`. - - dir_ok_fn: Validation function on folder/file names. - Default to accept all. - - Returns: - Dictionary returned by `walk_and_compute_sha1_from_directory` - updated (mutated) according to latest filesystem modifications. - - """ - root = objects[ROOT_TREE_KEY][0]['path'] - if root.endswith(b'/'): - root = root.rstrip(b'/') - - paths = set() # contain the list of impacted paths (A, D, M) - paths_to_remove = set() # will contain the list of deletion paths (only D) - # a first round-trip to ensure we don't need to... - for changed_path in changed_paths: - path = changed_path['path'] - - parent = os.path.dirname(path) - if parent == root: # ... recompute everything anyway - return walk_and_compute_sha1_from_directory( - root, - dir_ok_fn=dir_ok_fn, - remove_empty_folder=remove_empty_folder) - - if changed_path['action'] == 'D': # (D)elete - paths_to_remove.add(path) - - paths.add(parent) - - # no modification on paths (paths also contain deletion paths if any) - if not paths: - return objects - - rootdir = commonpath(list(paths)) - - if paths_to_remove: - # Now we can remove the deleted directories from objects dictionary - objects = __remove_paths_from_objects(objects, - paths_to_remove, - dir_ok_fn) - - # Recompute from disk the checksums from impacted common ancestor - # rootdir changes. - while not objects.get(rootdir, None): - # it could happened that the path is not found. - # In the case of an ignored folder for example. - # So we'll find the next existing parent - rootdir = os.path.dirname(rootdir) - - if rootdir == root: # fallback, if we hit root, walk - # everything anyway - return walk_and_compute_sha1_from_directory( - root, - dir_ok_fn=dir_ok_fn, - remove_empty_folder=remove_empty_folder) - - hashes = walk_and_compute_sha1_from_directory( - rootdir, - dir_ok_fn=dir_ok_fn, - with_root_tree=False, - remove_empty_folder=remove_empty_folder) - - # Then update the original objects with new - # checksums for the arborescence tree below rootdir - objects.update(hashes) - - # Recompute hashes in memory from rootdir to root - return recompute_sha1_in_memory(root, rootdir, objects) - - def objects_per_type(filter_type, objects_per_path): """Given an object dictionary returned by `swh.model.git.compute_hashes_from_directory`, yields corresponding element type's hashes Args: filter_type: one of GitType enum objects_per_path: Yields: Elements of type filter_type's hashes """ def __children_hash(objects, children): for p in children: c = objects.get(p, None) if c: h = c.get('checksums', None) if h: yield h for path, obj in objects_per_path.items(): o = obj['checksums'] if o['type'] == filter_type: if 'children' in obj: # for trees if obj['children']: o['children'] = __children_hash(objects_per_path, obj['children']) else: o['children'] = [] yield o diff --git a/swh/model/tests/test_git.py b/swh/model/tests/test_git.py index 4d1470d..10003e1 100644 --- a/swh/model/tests/test_git.py +++ b/swh/model/tests/test_git.py @@ -1,728 +1,245 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import shutil import subprocess import tempfile import unittest from nose.plugins.attrib import attr from nose.tools import istest from swh.model import git class GitHashlib(unittest.TestCase): def setUp(self): self.tree_data = b''.join([b'40000 barfoo\0', bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087'), b'100644 blah\0', bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20'), b'100644 hello\0', bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')]) self.commit_data = """tree 1c61f7259dcb770f46b194d941df4f08ff0a3970 author Antoine R. Dumont (@ardumont) 1444054085 +0200 committer Antoine R. Dumont (@ardumont) 1444054085 +0200 initial """.encode('utf-8') # NOQA self.tag_data = """object 24d012aaec0bc5a4d2f62c56399053d6cc72a241 type commit tag 0.0.1 tagger Antoine R. Dumont (@ardumont) 1444225145 +0200 blah """.encode('utf-8') # NOQA self.checksums = { 'tree_sha1_git': bytes.fromhex('ac212302c45eada382b27bfda795db' '121dacdb1c'), 'commit_sha1_git': bytes.fromhex('e960570b2e6e2798fa4cfb9af2c399' 'd629189653'), 'tag_sha1_git': bytes.fromhex('bc2b99ba469987bcf1272c189ed534' 'e9e959f120'), } @istest def compute_directory_git_sha1(self): # given dirpath = 'some-dir-path' hashes = { dirpath: [{'perms': git.GitPerm.TREE, 'type': git.GitType.TREE, 'name': b'barfoo', 'sha1_git': bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'hello', 'sha1_git': bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'blah', 'sha1_git': bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20')}] } # when checksum = git.compute_directory_git_sha1(dirpath, hashes) # then self.assertEqual(checksum, self.checksums['tree_sha1_git']) @istest def compute_revision_sha1_git(self): # given tree_hash = bytes.fromhex('1c61f7259dcb770f46b194d941df4f08ff0a3970') revision = { 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444054085, 'offset': 120, }, 'committer': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'committer_date': { 'timestamp': 1444054085, 'offset': 120, }, 'message': b'initial\n', 'type': 'tar', 'directory': tree_hash, 'parents': [], } # when checksum = git.compute_revision_sha1_git(revision) # then self.assertEqual(checksum, self.checksums['commit_sha1_git']) @istest def compute_release_sha1_git(self): # given revision_hash = bytes.fromhex('24d012aaec0bc5a4d2f62c56399053' 'd6cc72a241') release = { 'name': b'0.0.1', 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444225145, 'offset': 120, }, 'message': b'blah\n', 'target_type': 'revision', 'target': revision_hash, } # when checksum = git.compute_release_sha1_git(release) # then self.assertEqual(checksum, self.checksums['tag_sha1_git']) @attr('fs') class GitHashWalkArborescenceTree(unittest.TestCase): """Root class to ease walk and git hash testing without side-effecty problems. """ def setUp(self): self.tmp_root_path = tempfile.mkdtemp().encode('utf-8') self.maxDiff = None start_path = os.path.dirname(__file__).encode('utf-8') sample_folder = os.path.join(start_path, b'../../../..', b'swh-storage-testdata', b'dir-folders', b'sample-folder.tgz') self.root_path = os.path.join(self.tmp_root_path, b'sample-folder') # uncompress the sample folder subprocess.check_output( ['tar', 'xvf', sample_folder, '-C', self.tmp_root_path]) def tearDown(self): if os.path.exists(self.tmp_root_path): shutil.rmtree(self.tmp_root_path) class GitHashFromScratch(GitHashWalkArborescenceTree): """Test the main `walk_and_compute_sha1_from_directory` algorithm that scans and compute the disk for checksums. """ @istest def walk_and_compute_sha1_from_directory(self): # make a temporary arborescence tree to hash without ignoring anything # same as previous behavior walk0 = git.walk_and_compute_sha1_from_directory(self.tmp_root_path) keys0 = list(walk0.keys()) path_excluded = os.path.join(self.tmp_root_path, b'sample-folder', b'foo') self.assertTrue(path_excluded in keys0) # it is not excluded here # make the same temporary arborescence tree to hash with ignoring one # folder foo walk1 = git.walk_and_compute_sha1_from_directory( self.tmp_root_path, dir_ok_fn=lambda dirpath: b'sample-folder/foo' not in dirpath) keys1 = list(walk1.keys()) self.assertTrue(path_excluded not in keys1) # remove the keys that can't be the same (due to hash definition) # Those are the top level folders keys_diff = [self.tmp_root_path, os.path.join(self.tmp_root_path, b'sample-folder'), git.ROOT_TREE_KEY] for k in keys_diff: self.assertNotEquals(walk0[k], walk1[k]) # The remaining keys (bottom path) should have exactly the same hashes # as before keys = set(keys1) - set(keys_diff) actual_walk1 = {} for k in keys: self.assertEquals(walk0[k], walk1[k]) actual_walk1[k] = walk1[k] expected_checksums = { os.path.join(self.tmp_root_path, b'sample-folder/empty-folder'): [], # noqa os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'): [{ # noqa 'type': git.GitType.BLOB, # noqa 'length': 72, 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa 'name': b'another-quote.org', # noqa 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo/another-quote.org'), # noqa 'perms': git.GitPerm.BLOB, # noqa 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68'}], # noqa os.path.join(self.tmp_root_path, b'sample-folder/bar'): [{ # noqa 'type': git.GitType.TREE, # noqa 'perms': git.GitPerm.TREE, # noqa 'name': b'barfoo', # noqa 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'), # noqa 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87'}]} # noqa self.assertEquals(actual_walk1, expected_checksums) @istest def walk_and_compute_sha1_from_directory_without_root_tree(self): # compute the full checksums expected_hashes = git.walk_and_compute_sha1_from_directory( self.tmp_root_path) # except for the key on that round actual_hashes = git.walk_and_compute_sha1_from_directory( self.tmp_root_path, with_root_tree=False) # then, removing the root tree hash from the first round del expected_hashes[git.ROOT_TREE_KEY] # should give us the same checksums as the second round self.assertEquals(actual_hashes, expected_hashes) - - -class GitHashUpdate(GitHashWalkArborescenceTree): - """Test `walk and git hash only on modified fs` functions. - - """ - @istest - def update_checksums_from_add_new_file(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (add a new file) - # update the actual git checksums from the deeper tree modified - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # update the existing file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # walk1 (this will be our expectation) - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_from_add_new_file_with_validation(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (add a new file) - # update the actual git checksums from the deeper tree modified - # + Add some validation on some file to ignore - - def dir_ok_fn(dirpath): - return b'empty-folder' not in dirpath - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, dir_ok_fn=dir_ok_fn) - - # update the existing file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # walk1 (this will be our expectation) - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, dir_ok_fn=dir_ok_fn) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_from_add_new_file_remove_empty_folder(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (add a new file) - # update the actual git checksums from the deeper tree modified - # + Add some validation on some file to ignore - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, remove_empty_folder=True) - - # update the existing file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # walk1 (this will be our expectation) - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, remove_empty_folder=True) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_new_file_with_validation_and_ignore_empty_dir(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (add a new file) - # update the actual git checksums from the deeper tree modified - # + Add some validation on some file to ignore - # + ignore empty folder - - def dir_ok_fn(dirpath): - return b'some-binary' not in dirpath - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, dir_ok_fn=dir_ok_fn, remove_empty_folder=True) - - # update the existing file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # walk1 (this will be our expectation) - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, dir_ok_fn=dir_ok_fn, remove_empty_folder=True) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_from_modify_existing_file(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way () - # update the actual git checksums where only the modification is needed - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # update existing file - changed_path = os.path.join( - self.tmp_root_path, - b'sample-folder/bar/barfoo/another-quote.org') - with open(changed_path, 'wb+') as f: - f.write(b'I have a dream') - - # walk1 (this will be our expectation) - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'M'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_no_change(self): - # when - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # nothing changes on disk - - # then - actual_dict = git.update_checksums_from([], expected_dict) - - self.assertEquals(actual_dict, expected_dict) - - @istest - def update_checksums_delete_existing_file(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (delete a file) - # update the actual git checksums from the deeper tree modified - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # Remove folder - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo') - shutil.rmtree(changed_path) - - # Actually walking the fs will be the resulting expectation - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'D'}], - objects) - - self.assertEquals(actual_dict, expected_dict) - - @istest - def update_checksums_from_multiple_fs_modifications(self): - # make a temporary arborescence tree to hash without ignoring anything - # update the disk in some way (modify a file, add a new, delete one) - # update the actual git checksums from the deeper tree modified - - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # Actions on disk (imagine a checkout of some form) - - # 1. Create a new file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # 2. update the existing file - changed_path1 = os.path.join( - self.tmp_root_path, - b'sample-folder/bar/barfoo/another-quote.org') - with open(changed_path1, 'wb') as f: - f.write(b'new line') - - # 3. Remove some folder - changed_path2 = os.path.join(self.tmp_root_path, - b'sample-folder/foo') - shutil.rmtree(changed_path2) - - # Actually walking the fs will be the resulting expectation - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}, - {'path': changed_path1, 'action': 'M'}, - {'path': changed_path2, 'action': 'D'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_from_common_ancestor(self): - # when - # Add some new arborescence below a folder destined to be removed - # want to check that old keys does not remain - future_folder_to_remove = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo') - - # add .../barfoo/hello/world under (.../barfoo which will be destroyed) - new_folder = os.path.join(future_folder_to_remove, b'hello') - os.makedirs(new_folder, exist_ok=True) - with open(os.path.join(future_folder_to_remove, b'world'), 'wb') as f: - f.write(b"i'm sad 'cause i'm destined to be removed...") - - # now we scan the disk - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - assert objects[future_folder_to_remove] - - # Actions on disk (to simulate a checkout of some sort) - - # 1. Create a new file - changed_path = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo/new') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # 2. update the existing file - changed_path1 = os.path.join( - self.tmp_root_path, - b'sample-folder/bar/barfoo/another-quote.org') - with open(changed_path1, 'wb') as f: - f.write(b'new line') - - # 3. Remove folder - shutil.rmtree(future_folder_to_remove) - - # Actually walking the fs will be the resulting expectation - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}, - {'path': changed_path1, 'action': 'M'}, - {'path': future_folder_to_remove, 'action': 'D'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def update_checksums_detects_recomputation_from_all_is_needed(self): - # when - objects = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # Actions on disk (imagine a checkout of some form) - - # 1. Create a new file - changed_path = os.path.join(self.tmp_root_path, - b'new-file-at-root') - with open(changed_path, 'wb') as f: - f.write(b'new line') - - # 2. update the existing file - changed_path1 = os.path.join( - self.tmp_root_path, - b'sample-folder/bar/barfoo/another-quote.org') - with open(changed_path1, 'wb') as f: - f.write(b'new line') - - # 3. Remove some folder - changed_path2 = os.path.join(self.tmp_root_path, - b'sample-folder/foo') - - # 3. Remove some folder - changed_path2 = os.path.join(self.tmp_root_path, - b'sample-folder/bar/barfoo') - shutil.rmtree(changed_path2) - - # Actually walking the fs will be the resulting expectation - expected_dict = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path) - - # then - actual_dict = git.update_checksums_from( - [{'path': changed_path, 'action': 'A'}, - {'path': changed_path1, 'action': 'M'}, - {'path': changed_path2, 'action': 'D'}], - objects) - - self.assertEquals(expected_dict, actual_dict) - - @istest - def commonpath(self): - paths = ['r/0/h', - 'r/1/d', 'r/1/i/a', 'r/1/i/b', 'r/1/i/c', - 'r/2/e', 'r/2/f', 'r/2/g'] - self.assertEquals(git.commonpath(paths), 'r') - - paths = ['r/1/d', 'r/1/i/a', 'r/1/i/b', 'r/1/i/c'] - self.assertEquals(git.commonpath(paths), 'r/1') - - paths = ['/a/r/2/g', '/a/r/1/i/c', '/a/r/0/h'] - self.assertEquals(git.commonpath(paths), '/a/r') - - paths = [b'/a/r/2/g', b'/b/r/1/i/c', b'/c/r/0/h'] - self.assertEquals(git.commonpath(paths), b'/') - - paths = ['a/z', 'a/z', 'a/z'] - self.assertEquals(git.commonpath(paths), 'a/z') - - paths = ['0'] - self.assertEquals(git.commonpath(paths), '0') - - -def untar(archive, dest): - # cleanup - shutil.rmtree(dest) - os.mkdir(dest) - # untar - cmd = [b'tar', b'xf', archive, b'-C', dest] - subprocess.check_output(cmd) - - -def ignore_svn_folder(dirpath): - return b'.svn' not in dirpath - - -@attr('fs') -class GitHashUpdateRealUseCase(unittest.TestCase): - """Test `walk and git hash only on modified fs` functions. - - """ - def setUp(self): - self.tmp_root_path = tempfile.mkdtemp().encode('utf-8') - - archives_folder = os.path.join( - os.path.dirname(__file__).encode('utf-8'), - b'../../../..', - b'swh-storage-testdata', - b'svn-folders') - - self.pkg_doc_linux_r10 = os.path.join(archives_folder, - b'pkg-doc-linux-r10.tgz') - self.pkg_doc_linux_r11 = os.path.join(archives_folder, - b'pkg-doc-linux-r11.tgz') - self.pkg_doc_linux_r12 = os.path.join(archives_folder, - b'pkg-doc-linux-r12.tgz') - - def tearDown(self): - if os.path.exists(self.tmp_root_path): - shutil.rmtree(self.tmp_root_path) - - @istest - def use_case_1_r10_r11(self): - # given - # untar the svn revision 10 - untar(self.pkg_doc_linux_r10, self.tmp_root_path) - - objects_r10 = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, - ignore_svn_folder) - - # untar the svn revision 11 - untar(self.pkg_doc_linux_r11, self.tmp_root_path) - - objects_r11 = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, - ignore_svn_folder) - - assert objects_r10 != objects_r11 - - changes = [ - {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights/non-free/Kiosk')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable/Kiosk')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable')}, # noqa - {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights/non-free/UPS')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable/UPS')} # noqa - ] - - # when - # update from objects from previous revision (r10) with - # actual changes from r10 to r11 - actual_objects = git.update_checksums_from(changes, - objects_r10, - ignore_svn_folder) - - # then - self.assertEquals(actual_objects, objects_r11) - - @istest - def use_case_2_r11_r12(self): - # given - # untar the svn revision 11 - untar(self.pkg_doc_linux_r11, self.tmp_root_path) - - objects_r11 = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, - ignore_svn_folder) - - # untar the svn revision 12 - untar(self.pkg_doc_linux_r12, self.tmp_root_path) - - objects_r12 = git.walk_and_compute_sha1_from_directory( - self.tmp_root_path, - ignore_svn_folder) - - assert objects_r11 != objects_r12 - changes = [ - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk')}, # noqa - {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/copyright.head')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/split-package')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-base.faq')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/make-copyright')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.menu')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/redirect.patch')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.overrides')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.prerm')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/README.updating')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.preinst')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.dirs')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/changelog')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.README.Debian')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/html2docs')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/rules')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.postrm')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/make-omf')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.preinst')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.postinst')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/copyrights')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/control')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.dirs')}, # noqa - {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.README.Debian')} # noqa - ] - - # when - # update from objects from previous revision (r11) with - # actual changes from r11 to r12 - actual_objects = git.update_checksums_from(changes, - objects_r11, - ignore_svn_folder) - - # then - self.assertEquals(actual_objects, objects_r12)