diff --git a/swh/model/from_disk.py b/swh/model/from_disk.py index 8795b1f..8bd7f5d 100644 --- a/swh/model/from_disk.py +++ b/swh/model/from_disk.py @@ -1,592 +1,592 @@ -# Copyright (C) 2017-2020 The Software Heritage developers +# Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Conversion from filesystem tree to SWH objects. This module allows reading a tree of directories and files from a local filesystem, and convert them to in-memory data structures, which can then be exported to SWH data model objects, as defined in :mod:`swh.model.model`. """ import datetime import enum import fnmatch import glob import os import re import stat from typing import Any, Iterable, Iterator, List, Optional, Pattern, Tuple import attr from attrs_strict import type_validator from typing_extensions import Final from . import model from .exceptions import InvalidDirectoryPath from .git_objects import directory_entry_sort_key from .hashutil import MultiHash, hash_to_hex from .merkle import MerkleLeaf, MerkleNode from .swhids import CoreSWHID, ObjectType @attr.s(frozen=True, slots=True) class DiskBackedContent(model.BaseContent): """Content-like class, which allows lazy-loading data from the disk.""" object_type: Final = "content_file" sha1 = attr.ib(type=bytes, validator=type_validator()) sha1_git = attr.ib(type=model.Sha1Git, validator=type_validator()) sha256 = attr.ib(type=bytes, validator=type_validator()) blake2s256 = attr.ib(type=bytes, validator=type_validator()) length = attr.ib(type=int, validator=type_validator()) status = attr.ib( type=str, validator=attr.validators.in_(["visible", "hidden"]), default="visible", ) ctime = attr.ib( type=Optional[datetime.datetime], validator=type_validator(), default=None, eq=False, ) path = attr.ib(type=Optional[bytes], default=None) @classmethod def from_dict(cls, d): return cls(**d) def __attrs_post_init__(self): if self.path is None: raise TypeError("path must not be None.") def with_data(self) -> model.Content: args = self.to_dict() del args["path"] assert self.path is not None with open(self.path, "rb") as fd: return model.Content.from_dict({**args, "data": fd.read()}) class DentryPerms(enum.IntEnum): """Admissible permissions for directory entries.""" content = 0o100644 """Content""" executable_content = 0o100755 """Executable content (e.g. executable script)""" symlink = 0o120000 """Symbolic link""" directory = 0o040000 """Directory""" revision = 0o160000 """Revision (e.g. submodule)""" def mode_to_perms(mode): """Convert a file mode to a permission compatible with Software Heritage directory entries Args: mode (int): a file mode as returned by :func:`os.stat` in :attr:`os.stat_result.st_mode` Returns: DentryPerms: one of the following values: :const:`DentryPerms.content`: plain file :const:`DentryPerms.executable_content`: executable file :const:`DentryPerms.symlink`: symbolic link :const:`DentryPerms.directory`: directory """ if stat.S_ISLNK(mode): return DentryPerms.symlink if stat.S_ISDIR(mode): return DentryPerms.directory else: # file is executable in any way if mode & (0o111): return DentryPerms.executable_content else: return DentryPerms.content class Content(MerkleLeaf): """Representation of a Software Heritage content as a node in a Merkle tree. The current Merkle hash for the Content nodes is the `sha1_git`, which makes it consistent with what :class:`Directory` uses for its own hash computation. """ __slots__ = [] # type: List[str] object_type: Final = "content" @classmethod def from_bytes(cls, *, mode, data): """Convert data (raw :class:`bytes`) to a Software Heritage content entry Args: mode (int): a file mode (passed to :func:`mode_to_perms`) data (bytes): raw contents of the file """ ret = MultiHash.from_data(data).digest() ret["length"] = len(data) ret["perms"] = mode_to_perms(mode) ret["data"] = data ret["status"] = "visible" return cls(ret) @classmethod def from_symlink(cls, *, path, mode): """Convert a symbolic link to a Software Heritage content entry""" return cls.from_bytes(mode=mode, data=os.readlink(path)) @classmethod def from_file(cls, *, path, max_content_length=None): """Compute the Software Heritage content entry corresponding to an on-disk file. The returned dictionary contains keys useful for both: - loading the content in the archive (hashes, `length`) - using the content as a directory entry in a directory Args: save_path (bool): add the file path to the entry max_content_length (Optional[int]): if given, all contents larger than this will be skipped. """ file_stat = os.lstat(path) mode = file_stat.st_mode length = file_stat.st_size too_large = max_content_length is not None and length > max_content_length if stat.S_ISLNK(mode): # Symbolic link: return a file whose contents are the link target if too_large: # Unlike large contents, we can't stream symlinks to # MultiHash, and we don't want to fit them in memory if # they exceed max_content_length either. # Thankfully, this should not happen for reasonable values of # max_content_length because of OS/filesystem limitations, # so let's just raise an error. raise Exception(f"Symlink too large ({length} bytes)") return cls.from_symlink(path=path, mode=mode) elif not stat.S_ISREG(mode): # not a regular file: return the empty file instead return cls.from_bytes(mode=mode, data=b"") if too_large: skip_reason = "Content too large" else: skip_reason = None hashes = MultiHash.from_path(path).digest() if skip_reason: ret = { **hashes, "status": "absent", "reason": skip_reason, } else: ret = { **hashes, "status": "visible", } ret["path"] = path ret["perms"] = mode_to_perms(mode) ret["length"] = length obj = cls(ret) return obj def swhid(self) -> CoreSWHID: """Return node identifier as a SWHID""" return CoreSWHID(object_type=ObjectType.CONTENT, object_id=self.hash) def __repr__(self): return "Content(id=%s)" % hash_to_hex(self.hash) def compute_hash(self): return self.data["sha1_git"] def to_model(self) -> model.BaseContent: """Builds a `model.BaseContent` object based on this leaf.""" data = self.get_data().copy() data.pop("perms", None) if data["status"] == "absent": data.pop("path", None) return model.SkippedContent.from_dict(data) elif "data" in data: return model.Content.from_dict(data) else: return DiskBackedContent.from_dict(data) def accept_all_directories(dirpath: str, dirname: str, entries: Iterable[Any]) -> bool: """Default filter for :func:`Directory.from_disk` accepting all directories Args: dirname (bytes): directory name entries (list): directory entries """ return True def ignore_empty_directories( dirpath: str, dirname: str, entries: Iterable[Any] ) -> bool: """Filter for :func:`directory_to_objects` ignoring empty directories Args: dirname (bytes): directory name entries (list): directory entries Returns: True if the directory is not empty, false if the directory is empty """ return bool(entries) def ignore_named_directories(names, *, case_sensitive=True): """Filter for :func:`directory_to_objects` to ignore directories named one of names. Args: names (list of bytes): names to ignore case_sensitive (bool): whether to do the filtering in a case sensitive way Returns: a directory filter for :func:`directory_to_objects` """ if not case_sensitive: names = [name.lower() for name in names] def named_filter( dirpath: str, dirname: str, entries: Iterable[Any], names: Iterable[Any] = names, case_sensitive: bool = case_sensitive, ): if case_sensitive: return dirname not in names else: return dirname.lower() not in names return named_filter # TODO: `extract_regex_objs` has been copied and adapted from `swh.scanner`. # In the future `swh.scanner` should use the `swh.model` version and remove its own. def extract_regex_objs( root_path: bytes, patterns: Iterable[bytes] ) -> Iterator[Pattern[bytes]]: """Generates a regex object for each pattern given in input and checks if the path is a subdirectory or relative to the root path. Args: root_path (bytes): path to the root directory patterns (list of byte): shell patterns to match Yields: an SRE_Pattern object """ absolute_root_path = os.path.abspath(root_path) for pattern in patterns: if os.path.isabs(pattern): pattern = os.path.relpath(pattern, root_path) # python 3.10 has a `root_dir` argument for glob, but not the previous # version. So we adjust the pattern test_pattern = os.path.join(absolute_root_path, pattern) for path in glob.glob(test_pattern): if os.path.isabs(path) and not path.startswith(absolute_root_path): error_msg = ( b'The path "' + path + b'" is not a subdirectory or relative ' b'to the root directory path: "' + root_path + b'"' ) raise InvalidDirectoryPath(error_msg) regex = fnmatch.translate((pattern.decode())) yield re.compile(regex.encode()) def ignore_directories_patterns(root_path: bytes, patterns: Iterable[bytes]): """Filter for :func:`directory_to_objects` to ignore directories matching certain patterns. Args: root_path (bytes): path of the root directory patterns (list of bytes): patterns to ignore Returns: a directory filter for :func:`directory_to_objects` """ sre_patterns = set(extract_regex_objs(root_path, patterns)) def pattern_filter( dirpath: bytes, dirname: bytes, entries: Iterable[Any], patterns: Iterable[Any] = sre_patterns, root_path: bytes = os.path.abspath(root_path), ): full_path = os.path.abspath(dirpath) relative_path = os.path.relpath(full_path, root_path) return not any([pattern.match(relative_path) for pattern in patterns]) return pattern_filter def iter_directory( directory, ) -> Tuple[List[model.Content], List[model.SkippedContent], List[model.Directory]]: """Return the directory listing from a disk-memory directory instance. Raises: TypeError in case an unexpected object type is listed. Returns: Tuple of respectively iterable of content, skipped content and directories. """ contents: List[model.Content] = [] skipped_contents: List[model.SkippedContent] = [] directories: List[model.Directory] = [] for obj in directory.iter_tree(): obj = obj.to_model() obj_type = obj.object_type if obj_type in (model.Content.object_type, DiskBackedContent.object_type): # FIXME: read the data from disk later (when the # storage buffer is flushed). obj = obj.with_data() contents.append(obj) elif obj_type == model.SkippedContent.object_type: skipped_contents.append(obj) elif obj_type == model.Directory.object_type: directories.append(obj) else: raise TypeError(f"Unexpected object type from disk: {obj}") return contents, skipped_contents, directories class Directory(MerkleNode): """Representation of a Software Heritage directory as a node in a Merkle Tree. This class can be used to generate, from an on-disk directory, all the objects that need to be sent to the Software Heritage archive. The :func:`from_disk` constructor allows you to generate the data structure from a directory on disk. The resulting :class:`Directory` can then be manipulated as a dictionary, using the path as key. The :func:`collect` method is used to retrieve all the objects that need to be added to the Software Heritage archive since the last collection, by class (contents and directories). When using the dict-like methods to update the contents of the directory, the affected levels of hierarchy are reset and can be collected again using the same method. This enables the efficient collection of updated nodes, for instance when the client is applying diffs. """ __slots__ = ["__entries", "__model_object"] object_type: Final = "directory" @classmethod def from_disk( cls, *, path, dir_filter=accept_all_directories, max_content_length=None ): """Compute the Software Heritage objects for a given directory tree Args: path (bytes): the directory to traverse data (bool): whether to add the data to the content objects save_path (bool): whether to add the path to the content objects dir_filter (function): a filter to ignore some directories by name or contents. Takes two arguments: dirname and entries, and returns True if the directory should be added, False if the directory should be ignored. max_content_length (Optional[int]): if given, all contents larger than this will be skipped. """ top_path = path dirs = {} for root, dentries, fentries in os.walk(top_path, topdown=False): entries = {} # Join fentries and dentries in the same processing, as symbolic # links to directories appear in dentries... for name in fentries + dentries: path = os.path.join(root, name) if not os.path.isdir(path) or os.path.islink(path): content = Content.from_file( path=path, max_content_length=max_content_length ) entries[name] = content else: if dir_filter(path, name, dirs[path].entries): entries[name] = dirs[path] dirs[root] = cls({"name": os.path.basename(root), "path": root}) dirs[root].update(entries) return dirs[top_path] def __init__(self, data=None): super().__init__(data=data) self.__entries = None self.__model_object = None def invalidate_hash(self): self.__entries = None self.__model_object = None super().invalidate_hash() @staticmethod def child_to_directory_entry(name, child): if child.object_type == "directory": return { "type": "dir", "perms": DentryPerms.directory, "target": child.hash, "name": name, } elif child.object_type == "content": return { "type": "file", "perms": child.data["perms"], "target": child.hash, "name": name, } else: raise ValueError(f"unknown child {child}") def get_data(self, **kwargs): return { "id": self.hash, "entries": self.entries, } @property def entries(self): """Child nodes, sorted by name in the same way :func:`swh.model.git_objects.directory_git_object` does.""" if self.__entries is None: self.__entries = sorted( ( self.child_to_directory_entry(name, child) for name, child in self.items() ), key=directory_entry_sort_key, ) return self.__entries def swhid(self) -> CoreSWHID: """Return node identifier as a SWHID""" return CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=self.hash) def compute_hash(self): return self.to_model().id def to_model(self) -> model.Directory: """Builds a `model.Directory` object based on this node; ignoring its children.""" if self.__model_object is None: DirectoryEntry = model.DirectoryEntry entries = [] for name, child in self.items(): if child.object_type == "directory": e = DirectoryEntry( type="dir", perms=DentryPerms.directory, target=child.hash, name=name, ) elif child.object_type == "content": e = DirectoryEntry( type="file", perms=child.data["perms"], target=child.hash, name=name, ) else: raise ValueError(f"unknown child {child}") entries.append(e) entries.sort(key=directory_entry_sort_key) self.__model_object = model.Directory(entries=tuple(entries)) return self.__model_object def __getitem__(self, key): if not isinstance(key, bytes): raise ValueError("Can only get a bytes from Directory") # Convenience shortcut if key == b"": return self if b"/" not in key: return super().__getitem__(key) else: key1, key2 = key.split(b"/", 1) return self.__getitem__(key1)[key2] def __setitem__(self, key, value): if not isinstance(key, bytes): raise ValueError("Can only set a bytes Directory entry") if not isinstance(value, (Content, Directory)): raise ValueError( "Can only set a Directory entry to a Content or " "Directory" ) if key == b"": raise ValueError("Directory entry must have a name") if b"\x00" in key: raise ValueError("Directory entry name must not contain nul bytes") if b"/" not in key: return super().__setitem__(key, value) else: key1, key2 = key.rsplit(b"/", 1) self[key1].__setitem__(key2, value) def __delitem__(self, key): if not isinstance(key, bytes): raise ValueError("Can only delete a bytes Directory entry") if b"/" not in key: super().__delitem__(key) else: key1, key2 = key.rsplit(b"/", 1) del self[key1][key2] def __contains__(self, key): if b"/" not in key: return super().__contains__(key) else: key1, key2 = key.split(b"/", 1) return super().__contains__(key1) and self[key1].__contains__(key2) def __repr__(self): return "Directory(id=%s, entries=[%s])" % ( hash_to_hex(self.hash), ", ".join(str(entry) for entry in self), ) diff --git a/swh/model/merkle.py b/swh/model/merkle.py index ab6b8ea..b224840 100644 --- a/swh/model/merkle.py +++ b/swh/model/merkle.py @@ -1,315 +1,233 @@ -# Copyright (C) 2017-2020 The Software Heritage developers +# Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Merkle tree data structure""" -import abc -from collections.abc import Mapping -from typing import Dict, Iterator, List, Set - - -def deep_update(left, right): - """Recursively update the left mapping with deeply nested values from the right - mapping. - - This function is useful to merge the results of several calls to - :func:`MerkleNode.collect`. - - Arguments: - left: a mapping (modified by the update operation) - right: a mapping - - Returns: - the left mapping, updated with nested values from the right mapping - - Example: - >>> a = { - ... 'key1': { - ... 'key2': { - ... 'key3': 'value1/2/3', - ... }, - ... }, - ... } - >>> deep_update(a, { - ... 'key1': { - ... 'key2': { - ... 'key4': 'value1/2/4', - ... }, - ... }, - ... }) == { - ... 'key1': { - ... 'key2': { - ... 'key3': 'value1/2/3', - ... 'key4': 'value1/2/4', - ... }, - ... }, - ... } - True - >>> deep_update(a, { - ... 'key1': { - ... 'key2': { - ... 'key3': 'newvalue1/2/3', - ... }, - ... }, - ... }) == { - ... 'key1': { - ... 'key2': { - ... 'key3': 'newvalue1/2/3', - ... 'key4': 'value1/2/4', - ... }, - ... }, - ... } - True +from __future__ import annotations - """ - for key, rvalue in right.items(): - if isinstance(rvalue, Mapping): - new_lvalue = deep_update(left.get(key, {}), rvalue) - left[key] = new_lvalue - else: - left[key] = rvalue - return left +import abc +from typing import Any, Dict, Iterator, List, Set class MerkleNode(dict, metaclass=abc.ABCMeta): """Representation of a node in a Merkle Tree. A (generalized) `Merkle Tree`_ is a tree in which every node is labeled with a hash of its own data and the hash of its children. .. _Merkle Tree: https://en.wikipedia.org/wiki/Merkle_tree In pseudocode:: node.hash = hash(node.data + sum(child.hash for child in node.children)) This class efficiently implements the Merkle Tree data structure on top of a Python :class:`dict`, minimizing hash computations and new data collections when updating nodes. Node data is stored in the :attr:`data` attribute, while (named) children are stored as items of the underlying dictionary. Addition, update and removal of objects are instrumented to automatically invalidate the hashes of the current node as well as its registered parents; It also resets the collection status of the objects so the updated objects can be collected. The collection of updated data from the tree is implemented through the :func:`collect` function and associated helpers. """ __slots__ = ["parents", "data", "__hash", "collected"] data: Dict """data associated to the current node""" parents: List """known parents of the current node""" collected: bool """whether the current node has been collected""" def __init__(self, data=None): super().__init__() self.parents = [] self.data = data self.__hash = None self.collected = False def __eq__(self, other): return ( isinstance(other, MerkleNode) and super().__eq__(other) and self.data == other.data ) def __ne__(self, other): return not self.__eq__(other) def invalidate_hash(self): """Invalidate the cached hash of the current node.""" if not self.__hash: return self.__hash = None self.collected = False for parent in self.parents: parent.invalidate_hash() - def update_hash(self, *, force=False): + def update_hash(self, *, force=False) -> Any: """Recursively compute the hash of the current node. Args: force (bool): invalidate the cache and force the computation for this node and all children. """ if self.__hash and not force: return self.__hash if force: self.invalidate_hash() for child in self.values(): child.update_hash(force=force) self.__hash = self.compute_hash() return self.__hash @property - def hash(self): + def hash(self) -> Any: """The hash of the current node, as calculated by :func:`compute_hash`. """ return self.update_hash() + def __hash__(self): + return hash(self.hash) + @abc.abstractmethod - def compute_hash(self): + def compute_hash(self) -> Any: """Compute the hash of the current node. The hash should depend on the data of the node, as well as on hashes of the children nodes. """ raise NotImplementedError("Must implement compute_hash method") def __setitem__(self, name, new_child): """Add a child, invalidating the current hash""" self.invalidate_hash() super().__setitem__(name, new_child) new_child.parents.append(self) def __delitem__(self, name): """Remove a child, invalidating the current hash""" if name in self: self.invalidate_hash() self[name].parents.remove(self) super().__delitem__(name) else: raise KeyError(name) def update(self, new_children): """Add several named children from a dictionary""" if not new_children: return self.invalidate_hash() for name, new_child in new_children.items(): new_child.parents.append(self) if name in self: self[name].parents.remove(self) super().update(new_children) def get_data(self, **kwargs): """Retrieve and format the collected data for the current node, for use by :func:`collect`. Can be overridden, for instance when you want the collected data to contain information about the child nodes. Arguments: kwargs: allow subclasses to alter behaviour depending on how :func:`collect` is called. Returns: data formatted for :func:`collect` """ return self.data - def collect_node(self, **kwargs): - """Collect the data for the current node, for use by :func:`collect`. - - Arguments: - kwargs: passed as-is to :func:`get_data`. - - Returns: - A :class:`dict` compatible with :func:`collect`. - """ + def collect_node(self) -> Set[MerkleNode]: + """Collect the current node if it has not been yet, for use by :func:`collect`.""" if not self.collected: self.collected = True - return {self.object_type: {self.hash: self.get_data(**kwargs)}} + return {self} else: - return {} + return set() - def collect(self, **kwargs): - """Collect the data for all nodes in the subtree rooted at `self`. - - The data is deduplicated by type and by hash. - - Arguments: - kwargs: passed as-is to :func:`get_data`. + def collect(self) -> Set[MerkleNode]: + """Collect the added and modified nodes in the subtree rooted at `self` + since the last collect operation. Returns: - A :class:`dict` with the following structure:: - - { - 'typeA': { - node1.hash: node1.get_data(), - node2.hash: node2.get_data(), - }, - 'typeB': { - node3.hash: node3.get_data(), - ... - }, - ... - } + A :class:`set` of collected nodes """ - ret = self.collect_node(**kwargs) + ret = self.collect_node() for child in self.values(): - deep_update(ret, child.collect(**kwargs)) + ret.update(child.collect()) return ret def reset_collect(self): """Recursively unmark collected nodes in the subtree rooted at `self`. This lets the caller use :func:`collect` again. """ self.collected = False for child in self.values(): child.reset_collect() - def iter_tree(self, dedup=True) -> Iterator["MerkleNode"]: + def iter_tree(self, dedup=True) -> Iterator[MerkleNode]: """Yields all children nodes, recursively. Common nodes are deduplicated by default (deduplication can be turned off setting the given argument 'dedup' to False). """ yield from self._iter_tree(set(), dedup) - def _iter_tree(self, seen: Set[bytes], dedup) -> Iterator["MerkleNode"]: + def _iter_tree(self, seen: Set[bytes], dedup) -> Iterator[MerkleNode]: if self.hash not in seen: if dedup: seen.add(self.hash) yield self for child in self.values(): yield from child._iter_tree(seen=seen, dedup=dedup) class MerkleLeaf(MerkleNode): """A leaf to a Merkle tree. A Merkle leaf is simply a Merkle node with children disabled. """ __slots__ = [] # type: List[str] def __setitem__(self, name, child): raise ValueError("%s is a leaf" % self.__class__.__name__) def __getitem__(self, name): raise ValueError("%s is a leaf" % self.__class__.__name__) def __delitem__(self, name): raise ValueError("%s is a leaf" % self.__class__.__name__) def update(self, new_children): """Children update operation. Disabled for leaves.""" raise ValueError("%s is a leaf" % self.__class__.__name__) diff --git a/swh/model/tests/test_from_disk.py b/swh/model/tests/test_from_disk.py index b7674d4..c07fef6 100644 --- a/swh/model/tests/test_from_disk.py +++ b/swh/model/tests/test_from_disk.py @@ -1,1001 +1,1008 @@ -# Copyright (C) 2017-2020 The Software Heritage developers +# Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict import os import tarfile import tempfile from typing import ClassVar, Optional import unittest import pytest from swh.model import from_disk, model from swh.model.from_disk import Content, DentryPerms, Directory, DiskBackedContent from swh.model.hashutil import DEFAULT_ALGORITHMS, hash_to_bytes, hash_to_hex TEST_DATA = os.path.join(os.path.dirname(__file__), "data") class ModeToPerms(unittest.TestCase): def setUp(self): super().setUp() # Generate a full permissions map self.perms_map = {} # Symlinks for i in range(0o120000, 0o127777 + 1): self.perms_map[i] = DentryPerms.symlink # Directories for i in range(0o040000, 0o047777 + 1): self.perms_map[i] = DentryPerms.directory # Other file types: socket, regular file, block device, character # device, fifo all map to regular files for ft in [0o140000, 0o100000, 0o060000, 0o020000, 0o010000]: for i in range(ft, ft + 0o7777 + 1): if i & 0o111: # executable bits are set self.perms_map[i] = DentryPerms.executable_content else: self.perms_map[i] = DentryPerms.content def test_exhaustive_mode_to_perms(self): for fmode, perm in self.perms_map.items(): self.assertEqual(perm, from_disk.mode_to_perms(fmode)) class TestDiskBackedContent(unittest.TestCase): def test_with_data(self): expected_content = model.Content( length=42, status="visible", data=b"foo bar", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) with tempfile.NamedTemporaryFile(mode="w+b") as fd: content = DiskBackedContent( length=42, status="visible", path=fd.name, sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) fd.write(b"foo bar") fd.seek(0) content_with_data = content.with_data() assert expected_content == content_with_data def test_lazy_data(self): with tempfile.NamedTemporaryFile(mode="w+b") as fd: fd.write(b"foo") fd.seek(0) content = DiskBackedContent( length=42, status="visible", path=fd.name, sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) fd.write(b"bar") fd.seek(0) content_with_data = content.with_data() fd.write(b"baz") fd.seek(0) assert content_with_data.data == b"bar" def test_with_data_cannot_read(self): with tempfile.NamedTemporaryFile(mode="w+b") as fd: content = DiskBackedContent( length=42, status="visible", path=fd.name, sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) with pytest.raises(OSError): content.with_data() def test_missing_path(self): with pytest.raises(TypeError): DiskBackedContent( length=42, status="visible", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) with pytest.raises(TypeError): DiskBackedContent( length=42, status="visible", path=None, sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) class DataMixin: maxDiff = None # type: ClassVar[Optional[int]] def setUp(self): self.tmpdir = tempfile.TemporaryDirectory(prefix="swh.model.from_disk") self.tmpdir_name = os.fsencode(self.tmpdir.name) self.contents = { b"file": { "data": b"42\n", "sha1": hash_to_bytes("34973274ccef6ab4dfaaf86599792fa9c3fe4689"), "sha256": hash_to_bytes( "084c799cd551dd1d8d5c5f9a5d593b2e" "931f5e36122ee5c793c1d08a19839cc0" ), "sha1_git": hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"), "blake2s256": hash_to_bytes( "d5fe1939576527e42cfd76a9455a2432" "fe7f56669564577dd93c4280e76d661d" ), "length": 3, "mode": 0o100644, }, } self.symlinks = { b"symlink": { "data": b"target", "blake2s256": hash_to_bytes( "595d221b30fdd8e10e2fdf18376e688e" "9f18d56fd9b6d1eb6a822f8c146c6da6" ), "sha1": hash_to_bytes("0e8a3ad980ec179856012b7eecf4327e99cd44cd"), "sha1_git": hash_to_bytes("1de565933b05f74c75ff9a6520af5f9f8a5a2f1d"), "sha256": hash_to_bytes( "34a04005bcaf206eec990bd9637d9fdb" "6725e0a0c0d4aebf003f17f4c956eb5c" ), "length": 6, "perms": DentryPerms.symlink, } } self.specials = { b"fifo": os.mkfifo, } self.empty_content = { "data": b"", "length": 0, "blake2s256": hash_to_bytes( "69217a3079908094e11121d042354a7c" "1f55b6482ca1a51e1b250dfd1ed0eef9" ), "sha1": hash_to_bytes("da39a3ee5e6b4b0d3255bfef95601890afd80709"), "sha1_git": hash_to_bytes("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), "sha256": hash_to_bytes( "e3b0c44298fc1c149afbf4c8996fb924" "27ae41e4649b934ca495991b7852b855" ), "perms": DentryPerms.content, } self.empty_directory = { "id": hash_to_bytes("4b825dc642cb6eb9a060e54bf8d69288fbee4904"), "entries": [], } # Generated with generate_testdata_from_disk self.tarball_contents = { b"": { "entries": [ { "name": b"bar", "perms": DentryPerms.directory, "target": hash_to_bytes( "3c1f578394f4623f74a0ba7fe761729f59fc6ec4" ), "type": "dir", }, { "name": b"empty-folder", "perms": DentryPerms.directory, "target": hash_to_bytes( "4b825dc642cb6eb9a060e54bf8d69288fbee4904" ), "type": "dir", }, { "name": b"foo", "perms": DentryPerms.directory, "target": hash_to_bytes( "2b41c40f0d1fbffcba12497db71fba83fcca96e5" ), "type": "dir", }, { "name": b"link-to-another-quote", "perms": DentryPerms.symlink, "target": hash_to_bytes( "7d5c08111e21c8a9f71540939998551683375fad" ), "type": "file", }, { "name": b"link-to-binary", "perms": DentryPerms.symlink, "target": hash_to_bytes( "e86b45e538d9b6888c969c89fbd22a85aa0e0366" ), "type": "file", }, { "name": b"link-to-foo", "perms": DentryPerms.symlink, "target": hash_to_bytes( "19102815663d23f8b75a47e7a01965dcdc96468c" ), "type": "file", }, { "name": b"some-binary", "perms": DentryPerms.executable_content, "target": hash_to_bytes( "68769579c3eaadbe555379b9c3538e6628bae1eb" ), "type": "file", }, ], "id": hash_to_bytes("e8b0f1466af8608c8a3fb9879db172b887e80759"), }, b"bar": { "entries": [ { "name": b"barfoo", "perms": DentryPerms.directory, "target": hash_to_bytes( "c3020f6bf135a38c6df3afeb5fb38232c5e07087" ), "type": "dir", } ], "id": hash_to_bytes("3c1f578394f4623f74a0ba7fe761729f59fc6ec4"), }, b"bar/barfoo": { "entries": [ { "name": b"another-quote.org", "perms": DentryPerms.content, "target": hash_to_bytes( "133693b125bad2b4ac318535b84901ebb1f6b638" ), "type": "file", } ], "id": hash_to_bytes("c3020f6bf135a38c6df3afeb5fb38232c5e07087"), }, b"bar/barfoo/another-quote.org": { "blake2s256": hash_to_bytes( "d26c1cad82d43df0bffa5e7be11a60e3" "4adb85a218b433cbce5278b10b954fe8" ), "length": 72, "perms": DentryPerms.content, "sha1": hash_to_bytes("90a6138ba59915261e179948386aa1cc2aa9220a"), "sha1_git": hash_to_bytes("133693b125bad2b4ac318535b84901ebb1f6b638"), "sha256": hash_to_bytes( "3db5ae168055bcd93a4d08285dc99ffe" "e2883303b23fac5eab850273a8ea5546" ), }, b"empty-folder": { "entries": [], "id": hash_to_bytes("4b825dc642cb6eb9a060e54bf8d69288fbee4904"), }, b"foo": { "entries": [ { "name": b"barfoo", "perms": DentryPerms.symlink, "target": hash_to_bytes( "8185dfb2c0c2c597d16f75a8a0c37668567c3d7e" ), "type": "file", }, { "name": b"quotes.md", "perms": DentryPerms.content, "target": hash_to_bytes( "7c4c57ba9ff496ad179b8f65b1d286edbda34c9a" ), "type": "file", }, { "name": b"rel-link-to-barfoo", "perms": DentryPerms.symlink, "target": hash_to_bytes( "acac326ddd63b0bc70840659d4ac43619484e69f" ), "type": "file", }, ], "id": hash_to_bytes("2b41c40f0d1fbffcba12497db71fba83fcca96e5"), }, b"foo/barfoo": { "blake2s256": hash_to_bytes( "e1252f2caa4a72653c4efd9af871b62b" "f2abb7bb2f1b0e95969204bd8a70d4cd" ), "data": b"bar/barfoo", "length": 10, "perms": DentryPerms.symlink, "sha1": hash_to_bytes("9057ee6d0162506e01c4d9d5459a7add1fedac37"), "sha1_git": hash_to_bytes("8185dfb2c0c2c597d16f75a8a0c37668567c3d7e"), "sha256": hash_to_bytes( "29ad3f5725321b940332c78e403601af" "ff61daea85e9c80b4a7063b6887ead68" ), }, b"foo/quotes.md": { "blake2s256": hash_to_bytes( "bf7ce4fe304378651ee6348d3e9336ed" "5ad603d33e83c83ba4e14b46f9b8a80b" ), "length": 66, "perms": DentryPerms.content, "sha1": hash_to_bytes("1bf0bb721ac92c18a19b13c0eb3d741cbfadebfc"), "sha1_git": hash_to_bytes("7c4c57ba9ff496ad179b8f65b1d286edbda34c9a"), "sha256": hash_to_bytes( "caca942aeda7b308859eb56f909ec96d" "07a499491690c453f73b9800a93b1659" ), }, b"foo/rel-link-to-barfoo": { "blake2s256": hash_to_bytes( "d9c327421588a1cf61f316615005a2e9" "c13ac3a4e96d43a24138d718fa0e30db" ), "data": b"../bar/barfoo", "length": 13, "perms": DentryPerms.symlink, "sha1": hash_to_bytes("dc51221d308f3aeb2754db48391b85687c2869f4"), "sha1_git": hash_to_bytes("acac326ddd63b0bc70840659d4ac43619484e69f"), "sha256": hash_to_bytes( "8007d20db2af40435f42ddef4b8ad76b" "80adbec26b249fdf0473353f8d99df08" ), }, b"link-to-another-quote": { "blake2s256": hash_to_bytes( "2d0e73cea01ba949c1022dc10c8a43e6" "6180639662e5dc2737b843382f7b1910" ), "data": b"bar/barfoo/another-quote.org", "length": 28, "perms": DentryPerms.symlink, "sha1": hash_to_bytes("cbeed15e79599c90de7383f420fed7acb48ea171"), "sha1_git": hash_to_bytes("7d5c08111e21c8a9f71540939998551683375fad"), "sha256": hash_to_bytes( "e6e17d0793aa750a0440eb9ad5b80b25" "8076637ef0fb68f3ac2e59e4b9ac3ba6" ), }, b"link-to-binary": { "blake2s256": hash_to_bytes( "9ce18b1adecb33f891ca36664da676e1" "2c772cc193778aac9a137b8dc5834b9b" ), "data": b"some-binary", "length": 11, "perms": DentryPerms.symlink, "sha1": hash_to_bytes("d0248714948b3a48a25438232a6f99f0318f59f1"), "sha1_git": hash_to_bytes("e86b45e538d9b6888c969c89fbd22a85aa0e0366"), "sha256": hash_to_bytes( "14126e97d83f7d261c5a6889cee73619" "770ff09e40c5498685aba745be882eff" ), }, b"link-to-foo": { "blake2s256": hash_to_bytes( "08d6cad88075de8f192db097573d0e82" "9411cd91eb6ec65e8fc16c017edfdb74" ), "data": b"foo", "length": 3, "perms": DentryPerms.symlink, "sha1": hash_to_bytes("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), "sha1_git": hash_to_bytes("19102815663d23f8b75a47e7a01965dcdc96468c"), "sha256": hash_to_bytes( "2c26b46b68ffc68ff99b453c1d304134" "13422d706483bfa0f98a5e886266e7ae" ), }, b"some-binary": { "blake2s256": hash_to_bytes( "922e0f7015035212495b090c27577357" "a740ddd77b0b9e0cd23b5480c07a18c6" ), "length": 5, "perms": DentryPerms.executable_content, "sha1": hash_to_bytes("0bbc12d7f4a2a15b143da84617d95cb223c9b23c"), "sha1_git": hash_to_bytes("68769579c3eaadbe555379b9c3538e6628bae1eb"), "sha256": hash_to_bytes( "bac650d34a7638bb0aeb5342646d24e3" "b9ad6b44c9b383621faa482b990a367d" ), }, } def tearDown(self): self.tmpdir.cleanup() def assertContentEqual(self, left, right, *, check_path=False): # noqa if not isinstance(left, Content): raise ValueError("%s is not a Content" % left) if isinstance(right, Content): right = right.get_data() # Compare dictionaries keys = DEFAULT_ALGORITHMS | { "length", "perms", } if check_path: keys |= {"path"} failed = [] for key in keys: try: lvalue = left.data[key] if key == "perms" and "perms" not in right: rvalue = from_disk.mode_to_perms(right["mode"]) else: rvalue = right[key] except KeyError: failed.append(key) continue if lvalue != rvalue: failed.append(key) if failed: raise self.failureException( "Content mismatched:\n" + "\n".join( "content[%s] = %r != %r" % (key, left.data.get(key), right.get(key)) for key in failed ) ) def assertDirectoryEqual(self, left, right): # NoQA if not isinstance(left, Directory): raise ValueError("%s is not a Directory" % left) if isinstance(right, Directory): right = right.get_data() assert left.entries == right["entries"] assert left.hash == right["id"] assert left.to_model() == model.Directory.from_dict(right) def make_contents(self, directory): for filename, content in self.contents.items(): path = os.path.join(directory, filename) with open(path, "wb") as f: f.write(content["data"]) os.chmod(path, content["mode"]) def make_symlinks(self, directory): for filename, symlink in self.symlinks.items(): path = os.path.join(directory, filename) os.symlink(symlink["data"], path) def make_specials(self, directory): for filename, fn in self.specials.items(): path = os.path.join(directory, filename) fn(path) def make_from_tarball(self, directory): tarball = os.path.join(TEST_DATA, "dir-folders", "sample-folder.tgz") with tarfile.open(tarball, "r:gz") as f: f.extractall(os.fsdecode(directory)) class TestContent(DataMixin, unittest.TestCase): def setUp(self): super().setUp() def test_data_to_content(self): for filename, content in self.contents.items(): conv_content = Content.from_bytes( mode=content["mode"], data=content["data"] ) self.assertContentEqual(conv_content, content) self.assertIn(hash_to_hex(conv_content.hash), repr(conv_content)) def test_content_swhid(self): for _, content in self.contents.items(): content_res = Content.from_bytes(mode=content["mode"], data=content["data"]) content_swhid = "swh:1:cnt:" + hash_to_hex(content["sha1_git"]) assert str(content_res.swhid()) == content_swhid class TestDirectory(DataMixin, unittest.TestCase): def setUp(self): super().setUp() def test_directory_swhid(self): directory_swhid = "swh:1:dir:" + hash_to_hex(self.empty_directory["id"]) directory = Directory.from_disk(path=self.tmpdir_name) assert str(directory.swhid()) == directory_swhid class SymlinkToContent(DataMixin, unittest.TestCase): def setUp(self): super().setUp() self.make_symlinks(self.tmpdir_name) def test_symlink_to_content(self): for filename, symlink in self.symlinks.items(): path = os.path.join(self.tmpdir_name, filename) perms = 0o120000 conv_content = Content.from_symlink(path=path, mode=perms) self.assertContentEqual(conv_content, symlink) def test_symlink_to_base_model(self): for filename, symlink in self.symlinks.items(): path = os.path.join(self.tmpdir_name, filename) perms = 0o120000 model_content = Content.from_symlink(path=path, mode=perms).to_model() right = symlink.copy() for key in ("perms", "path", "mode"): right.pop(key, None) right["status"] = "visible" assert model_content == model.Content.from_dict(right) class FileToContent(DataMixin, unittest.TestCase): def setUp(self): super().setUp() self.make_contents(self.tmpdir_name) self.make_symlinks(self.tmpdir_name) self.make_specials(self.tmpdir_name) def test_symlink_to_content(self): for filename, symlink in self.symlinks.items(): path = os.path.join(self.tmpdir_name, filename) conv_content = Content.from_file(path=path) self.assertContentEqual(conv_content, symlink) def test_file_to_content(self): for filename, content in self.contents.items(): path = os.path.join(self.tmpdir_name, filename) conv_content = Content.from_file(path=path) self.assertContentEqual(conv_content, content) def test_special_to_content(self): for filename in self.specials: path = os.path.join(self.tmpdir_name, filename) conv_content = Content.from_file(path=path) self.assertContentEqual(conv_content, self.empty_content) for path in ["/dev/null", "/dev/zero"]: path = os.path.join(self.tmpdir_name, filename) conv_content = Content.from_file(path=path) self.assertContentEqual(conv_content, self.empty_content) def test_symlink_to_content_model(self): for filename, symlink in self.symlinks.items(): path = os.path.join(self.tmpdir_name, filename) model_content = Content.from_file(path=path).to_model() right = symlink.copy() for key in ("perms", "path", "mode"): right.pop(key, None) right["status"] = "visible" assert model_content == model.Content.from_dict(right) def test_file_to_content_model(self): for filename, content in self.contents.items(): path = os.path.join(self.tmpdir_name, filename) model_content = Content.from_file(path=path).to_model() right = content.copy() for key in ("perms", "mode"): right.pop(key, None) assert model_content.with_data() == model.Content.from_dict(right) right["path"] = path del right["data"] assert model_content == DiskBackedContent.from_dict(right) def test_special_to_content_model(self): for filename in self.specials: path = os.path.join(self.tmpdir_name, filename) model_content = Content.from_file(path=path).to_model() right = self.empty_content.copy() for key in ("perms", "path", "mode"): right.pop(key, None) right["status"] = "visible" assert model_content == model.Content.from_dict(right) for path in ["/dev/null", "/dev/zero"]: model_content = Content.from_file(path=path).to_model() right = self.empty_content.copy() for key in ("perms", "path", "mode"): right.pop(key, None) right["status"] = "visible" assert model_content == model.Content.from_dict(right) def test_symlink_max_length(self): for max_content_length in [4, 10]: for filename, symlink in self.symlinks.items(): path = os.path.join(self.tmpdir_name, filename) content = Content.from_file(path=path) if content.data["length"] > max_content_length: with pytest.raises(Exception, match="too large"): Content.from_file( path=path, max_content_length=max_content_length ) else: limited_content = Content.from_file( path=path, max_content_length=max_content_length ) assert content == limited_content def test_file_max_length(self): for max_content_length in [2, 4]: for filename, content in self.contents.items(): path = os.path.join(self.tmpdir_name, filename) content = Content.from_file(path=path) limited_content = Content.from_file( path=path, max_content_length=max_content_length ) assert content.data["length"] == limited_content.data["length"] assert content.data["status"] == "visible" if content.data["length"] > max_content_length: assert limited_content.data["status"] == "absent" assert limited_content.data["reason"] == "Content too large" else: assert limited_content.data["status"] == "visible" def test_special_file_max_length(self): for max_content_length in [None, 0, 1]: for filename in self.specials: path = os.path.join(self.tmpdir_name, filename) content = Content.from_file(path=path) limited_content = Content.from_file( path=path, max_content_length=max_content_length ) assert limited_content == content def test_file_to_content_with_path(self): for filename, content in self.contents.items(): content_w_path = content.copy() path = os.path.join(self.tmpdir_name, filename) content_w_path["path"] = path conv_content = Content.from_file(path=path) self.assertContentEqual(conv_content, content_w_path, check_path=True) @pytest.mark.fs class DirectoryToObjects(DataMixin, unittest.TestCase): def setUp(self): super().setUp() contents = os.path.join(self.tmpdir_name, b"contents") os.mkdir(contents) self.make_contents(contents) symlinks = os.path.join(self.tmpdir_name, b"symlinks") os.mkdir(symlinks) self.make_symlinks(symlinks) specials = os.path.join(self.tmpdir_name, b"specials") os.mkdir(specials) self.make_specials(specials) empties = os.path.join(self.tmpdir_name, b"empty1", b"empty2") os.makedirs(empties) + def check_collect( + self, directory, expected_directory_count, expected_content_count + ): + objs = directory.collect() + contents = [] + directories = [] + for obj in objs: + if isinstance(obj, Content): + contents.append(obj) + elif isinstance(obj, Directory): + directories.append(obj) + + self.assertEqual(len(directories), expected_directory_count) + self.assertEqual(len(contents), expected_content_count) + def test_directory_to_objects(self): directory = Directory.from_disk(path=self.tmpdir_name) for name, value in self.contents.items(): self.assertContentEqual(directory[b"contents/" + name], value) for name, value in self.symlinks.items(): self.assertContentEqual(directory[b"symlinks/" + name], value) for name in self.specials: self.assertContentEqual( directory[b"specials/" + name], self.empty_content, ) self.assertEqual( directory[b"empty1/empty2"].get_data(), self.empty_directory, ) # Raise on non existent file with self.assertRaisesRegex(KeyError, "b'nonexistent'"): directory[b"empty1/nonexistent"] # Raise on non existent directory with self.assertRaisesRegex(KeyError, "b'nonexistentdir'"): directory[b"nonexistentdir/file"] - objs = directory.collect() - - self.assertCountEqual(["content", "directory"], objs) - - self.assertEqual(len(objs["directory"]), 6) - self.assertEqual( - len(objs["content"]), len(self.contents) + len(self.symlinks) + 1 + self.check_collect( + directory, + expected_directory_count=6, + expected_content_count=len(self.contents) + len(self.symlinks) + 1, ) def test_directory_to_objects_ignore_empty(self): directory = Directory.from_disk( path=self.tmpdir_name, dir_filter=from_disk.ignore_empty_directories ) for name, value in self.contents.items(): self.assertContentEqual(directory[b"contents/" + name], value) for name, value in self.symlinks.items(): self.assertContentEqual(directory[b"symlinks/" + name], value) for name in self.specials: self.assertContentEqual( directory[b"specials/" + name], self.empty_content, ) # empty directories have been ignored recursively with self.assertRaisesRegex(KeyError, "b'empty1'"): directory[b"empty1"] with self.assertRaisesRegex(KeyError, "b'empty1'"): directory[b"empty1/empty2"] - objs = directory.collect() - - self.assertCountEqual(["content", "directory"], objs) - - self.assertEqual(len(objs["directory"]), 4) - self.assertEqual( - len(objs["content"]), len(self.contents) + len(self.symlinks) + 1 + self.check_collect( + directory, + expected_directory_count=4, + expected_content_count=len(self.contents) + len(self.symlinks) + 1, ) def test_directory_to_objects_ignore_name(self): directory = Directory.from_disk( path=self.tmpdir_name, dir_filter=from_disk.ignore_named_directories([b"symlinks"]), ) for name, value in self.contents.items(): self.assertContentEqual(directory[b"contents/" + name], value) for name in self.specials: self.assertContentEqual( directory[b"specials/" + name], self.empty_content, ) self.assertEqual( directory[b"empty1/empty2"].get_data(), self.empty_directory, ) with self.assertRaisesRegex(KeyError, "b'symlinks'"): directory[b"symlinks"] - objs = directory.collect() - - self.assertCountEqual(["content", "directory"], objs) - - self.assertEqual(len(objs["directory"]), 5) - self.assertEqual(len(objs["content"]), len(self.contents) + 1) + self.check_collect( + directory, + expected_directory_count=5, + expected_content_count=len(self.contents) + 1, + ) def test_directory_to_objects_ignore_name_case(self): directory = Directory.from_disk( path=self.tmpdir_name, dir_filter=from_disk.ignore_named_directories( [b"symLiNks"], case_sensitive=False ), ) for name, value in self.contents.items(): self.assertContentEqual(directory[b"contents/" + name], value) for name in self.specials: self.assertContentEqual( directory[b"specials/" + name], self.empty_content, ) self.assertEqual( directory[b"empty1/empty2"].get_data(), self.empty_directory, ) with self.assertRaisesRegex(KeyError, "b'symlinks'"): directory[b"symlinks"] - objs = directory.collect() - - self.assertCountEqual(["content", "directory"], objs) - - self.assertEqual(len(objs["directory"]), 5) - self.assertEqual(len(objs["content"]), len(self.contents) + 1) + self.check_collect( + directory, + expected_directory_count=5, + expected_content_count=len(self.contents) + 1, + ) def test_directory_entry_order(self): with tempfile.TemporaryDirectory() as dirname: dirname = os.fsencode(dirname) open(os.path.join(dirname, b"foo."), "a") open(os.path.join(dirname, b"foo0"), "a") os.mkdir(os.path.join(dirname, b"foo")) directory = Directory.from_disk(path=dirname) assert [entry["name"] for entry in directory.entries] == [ b"foo.", b"foo", b"foo0", ] @pytest.mark.fs class TarballTest(DataMixin, unittest.TestCase): def setUp(self): super().setUp() self.make_from_tarball(self.tmpdir_name) def test_contents_match(self): directory = Directory.from_disk( path=os.path.join(self.tmpdir_name, b"sample-folder") ) for name, expected in self.tarball_contents.items(): obj = directory[name] if isinstance(obj, Content): self.assertContentEqual(obj, expected) elif isinstance(obj, Directory): self.assertDirectoryEqual(obj, expected) else: raise self.failureException("Unknown type for %s" % obj) class TarballIterDirectory(DataMixin, unittest.TestCase): def setUp(self): super().setUp() self.make_from_tarball(self.tmpdir_name) def test_iter_directory(self): """Iter from_disk.directory should yield the full arborescence tree""" directory = Directory.from_disk( path=os.path.join(self.tmpdir_name, b"sample-folder") ) contents, skipped_contents, directories = from_disk.iter_directory(directory) expected_nb = defaultdict(int) for name in self.tarball_contents.keys(): obj = directory[name] expected_nb[obj.object_type] += 1 assert len(contents) == expected_nb["content"] and len(contents) > 0 assert len(skipped_contents) == 0 assert len(directories) == expected_nb["directory"] and len(directories) > 0 class DirectoryManipulation(DataMixin, unittest.TestCase): def test_directory_access_nested(self): d = Directory() d[b"a"] = Directory() d[b"a/b"] = Directory() self.assertEqual(d[b"a/b"].get_data(), self.empty_directory) def test_directory_del_nested(self): d = Directory() d[b"a"] = Directory() d[b"a/b"] = Directory() with self.assertRaisesRegex(KeyError, "b'c'"): del d[b"a/b/c"] with self.assertRaisesRegex(KeyError, "b'level2'"): del d[b"a/level2/c"] del d[b"a/b"] self.assertEqual(d[b"a"].get_data(), self.empty_directory) def test_directory_access_self(self): d = Directory() self.assertIs(d, d[b""]) self.assertIs(d, d[b"/"]) self.assertIs(d, d[b"//"]) def test_directory_access_wrong_type(self): d = Directory() with self.assertRaisesRegex(ValueError, "bytes from Directory"): d["foo"] with self.assertRaisesRegex(ValueError, "bytes from Directory"): d[42] def test_directory_repr(self): entries = [b"a", b"b", b"c"] d = Directory() for entry in entries: d[entry] = Directory() r = repr(d) self.assertIn(hash_to_hex(d.hash), r) for entry in entries: self.assertIn(str(entry), r) def test_directory_set_wrong_type_name(self): d = Directory() with self.assertRaisesRegex(ValueError, "bytes Directory entry"): d["foo"] = Directory() with self.assertRaisesRegex(ValueError, "bytes Directory entry"): d[42] = Directory() def test_directory_set_nul_in_name(self): d = Directory() with self.assertRaisesRegex(ValueError, "nul bytes"): d[b"\x00\x01"] = Directory() def test_directory_set_empty_name(self): d = Directory() with self.assertRaisesRegex(ValueError, "must have a name"): d[b""] = Directory() with self.assertRaisesRegex(ValueError, "must have a name"): d[b"/"] = Directory() def test_directory_set_wrong_type(self): d = Directory() with self.assertRaisesRegex(ValueError, "Content or Directory"): d[b"entry"] = object() def test_directory_del_wrong_type(self): d = Directory() with self.assertRaisesRegex(ValueError, "bytes Directory entry"): del d["foo"] with self.assertRaisesRegex(ValueError, "bytes Directory entry"): del d[42] def test_directory_contains(self): d = Directory() d[b"a"] = Directory() d[b"a/b"] = Directory() d[b"a/b/c"] = Directory() d[b"a/b/c/d"] = Content() self.assertIn(b"a", d) self.assertIn(b"a/b", d) self.assertIn(b"a/b/c", d) self.assertIn(b"a/b/c/d", d) self.assertNotIn(b"b", d) self.assertNotIn(b"b/c", d) self.assertNotIn(b"b/c/d", d) diff --git a/swh/model/tests/test_merkle.py b/swh/model/tests/test_merkle.py index 52edb2c..a852541 100644 --- a/swh/model/tests/test_merkle.py +++ b/swh/model/tests/test_merkle.py @@ -1,267 +1,262 @@ -# Copyright (C) 2017-2020 The Software Heritage developers +# Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import unittest from swh.model import merkle class MerkleTestNode(merkle.MerkleNode): object_type = "tested_merkle_node_type" def __init__(self, data): super().__init__(data) self.compute_hash_called = 0 - def compute_hash(self): + def compute_hash(self) -> bytes: self.compute_hash_called += 1 child_data = [child + b"=" + self[child].hash for child in sorted(self)] - - return b"hash(" + b", ".join([self.data["value"]] + child_data) + b")" + return b"hash(" + b", ".join([self.data.get("value", b"")] + child_data) + b")" class MerkleTestLeaf(merkle.MerkleLeaf): object_type = "tested_merkle_leaf_type" def __init__(self, data): super().__init__(data) self.compute_hash_called = 0 def compute_hash(self): self.compute_hash_called += 1 - return b"hash(" + self.data["value"] + b")" + return b"hash(" + self.data.get("value", b"") + b")" class TestMerkleLeaf(unittest.TestCase): def setUp(self): self.data = {"value": b"value"} self.instance = MerkleTestLeaf(self.data) def test_equality(self): leaf1 = MerkleTestLeaf(self.data) leaf2 = MerkleTestLeaf(self.data) leaf3 = MerkleTestLeaf({}) self.assertEqual(leaf1, leaf2) self.assertNotEqual(leaf1, leaf3) def test_hash(self): self.assertEqual(self.instance.compute_hash_called, 0) instance_hash = self.instance.hash self.assertEqual(self.instance.compute_hash_called, 1) instance_hash2 = self.instance.hash self.assertEqual(self.instance.compute_hash_called, 1) self.assertEqual(instance_hash, instance_hash2) def test_data(self): self.assertEqual(self.instance.get_data(), self.data) def test_collect(self): collected = self.instance.collect() self.assertEqual( collected, - { - self.instance.object_type: { - self.instance.hash: self.instance.get_data(), - }, - }, + {self.instance}, ) collected2 = self.instance.collect() - self.assertEqual(collected2, {}) + self.assertEqual(collected2, set()) self.instance.reset_collect() collected3 = self.instance.collect() self.assertEqual(collected, collected3) def test_leaf(self): with self.assertRaisesRegex(ValueError, "is a leaf"): self.instance[b"key1"] = "Test" with self.assertRaisesRegex(ValueError, "is a leaf"): del self.instance[b"key1"] with self.assertRaisesRegex(ValueError, "is a leaf"): self.instance[b"key1"] with self.assertRaisesRegex(ValueError, "is a leaf"): self.instance.update(self.data) class TestMerkleNode(unittest.TestCase): maxDiff = None def setUp(self): self.root = MerkleTestNode({"value": b"root"}) self.nodes = {b"root": self.root} for i in (b"a", b"b", b"c"): value = b"root/" + i node = MerkleTestNode( { "value": value, } ) self.root[i] = node self.nodes[value] = node for j in (b"a", b"b", b"c"): value2 = value + b"/" + j node2 = MerkleTestNode( { "value": value2, } ) node[j] = node2 self.nodes[value2] = node2 for k in (b"a", b"b", b"c"): value3 = value2 + b"/" + j node3 = MerkleTestNode( { "value": value3, } ) node2[j] = node3 self.nodes[value3] = node3 def test_equality(self): - node1 = merkle.MerkleNode({"foo": b"bar"}) - node2 = merkle.MerkleNode({"foo": b"bar"}) - node3 = merkle.MerkleNode({}) + node1 = MerkleTestNode({"value": b"bar"}) + node2 = MerkleTestNode({"value": b"bar"}) + node3 = MerkleTestNode({}) self.assertEqual(node1, node2) self.assertNotEqual(node1, node3, node1 == node3) - node1["foo"] = node3 + node1[b"a"] = node3 self.assertNotEqual(node1, node2) - node2["foo"] = node3 + node2[b"a"] = node3 self.assertEqual(node1, node2) def test_hash(self): for node in self.nodes.values(): self.assertEqual(node.compute_hash_called, 0) # Root hash will compute hash for all the nodes hash = self.root.hash for node in self.nodes.values(): self.assertEqual(node.compute_hash_called, 1) self.assertIn(node.data["value"], hash) # Should use the cached value hash2 = self.root.hash self.assertEqual(hash, hash2) for node in self.nodes.values(): self.assertEqual(node.compute_hash_called, 1) # Should still use the cached value hash3 = self.root.update_hash(force=False) self.assertEqual(hash, hash3) for node in self.nodes.values(): self.assertEqual(node.compute_hash_called, 1) # Force update of the cached value for a deeply nested node self.root[b"a"][b"b"].update_hash(force=True) for key, node in self.nodes.items(): # update_hash rehashes all children if key.startswith(b"root/a/b"): self.assertEqual(node.compute_hash_called, 2) else: self.assertEqual(node.compute_hash_called, 1) hash4 = self.root.hash self.assertEqual(hash, hash4) for key, node in self.nodes.items(): # update_hash also invalidates all parents if key in (b"root", b"root/a") or key.startswith(b"root/a/b"): self.assertEqual(node.compute_hash_called, 2) else: self.assertEqual(node.compute_hash_called, 1) def test_collect(self): collected = self.root.collect() - self.assertEqual(len(collected[self.root.object_type]), len(self.nodes)) + self.assertEqual(collected, set(self.nodes.values())) for node in self.nodes.values(): self.assertTrue(node.collected) collected2 = self.root.collect() - self.assertEqual(collected2, {}) + self.assertEqual(collected2, set()) def test_iter_tree_with_deduplication(self): nodes = list(self.root.iter_tree()) self.assertCountEqual(nodes, self.nodes.values()) def test_iter_tree_without_deduplication(self): # duplicate existing hash in merkle tree self.root[b"d"] = MerkleTestNode({"value": b"root/c/c/c"}) nodes_dedup = list(self.root.iter_tree()) nodes = list(self.root.iter_tree(dedup=False)) assert nodes != nodes_dedup assert len(nodes) == len(nodes_dedup) + 1 def test_get(self): for key in (b"a", b"b", b"c"): self.assertEqual(self.root[key], self.nodes[b"root/" + key]) with self.assertRaisesRegex(KeyError, "b'nonexistent'"): self.root[b"nonexistent"] def test_del(self): hash_root = self.root.hash hash_a = self.nodes[b"root/a"].hash del self.root[b"a"][b"c"] hash_root2 = self.root.hash hash_a2 = self.nodes[b"root/a"].hash self.assertNotEqual(hash_root, hash_root2) self.assertNotEqual(hash_a, hash_a2) self.assertEqual(self.nodes[b"root/a/c"].parents, []) with self.assertRaisesRegex(KeyError, "b'nonexistent'"): del self.root[b"nonexistent"] def test_update(self): hash_root = self.root.hash hash_b = self.root[b"b"].hash new_children = { b"c": MerkleTestNode({"value": b"root/b/new_c"}), b"d": MerkleTestNode({"value": b"root/b/d"}), } # collect all nodes self.root.collect() self.root[b"b"].update(new_children) # Ensure everyone got reparented self.assertEqual(new_children[b"c"].parents, [self.root[b"b"]]) self.assertEqual(new_children[b"d"].parents, [self.root[b"b"]]) self.assertEqual(self.nodes[b"root/b/c"].parents, []) hash_root2 = self.root.hash self.assertNotEqual(hash_root, hash_root2) self.assertIn(b"root/b/new_c", hash_root2) self.assertIn(b"root/b/d", hash_root2) hash_b2 = self.root[b"b"].hash self.assertNotEqual(hash_b, hash_b2) for key, node in self.nodes.items(): if key in (b"root", b"root/b"): self.assertEqual(node.compute_hash_called, 2) else: self.assertEqual(node.compute_hash_called, 1) # Ensure we collected root, root/b, and both new children collected_after_update = self.root.collect() - self.assertCountEqual( - collected_after_update[MerkleTestNode.object_type], - [ - self.nodes[b"root"].hash, - self.nodes[b"root/b"].hash, - new_children[b"c"].hash, - new_children[b"d"].hash, - ], + self.assertEqual( + collected_after_update, + { + self.nodes[b"root"], + self.nodes[b"root/b"], + new_children[b"c"], + new_children[b"d"], + }, ) # test that noop updates doesn't invalidate anything self.root[b"a"][b"b"].update({}) - self.assertEqual(self.root.collect(), {}) + self.assertEqual(self.root.collect(), set())