diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..f5d2a49cd --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "restructuredtext.workspaceRoot": "/home/antoine/swh/swh-environment/swh-storage" +} \ No newline at end of file diff --git a/swh/storage/algos/__init__.py b/swh/storage/algos/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/swh/storage/algos/diff.py b/swh/storage/algos/diff.py new file mode 100644 index 000000000..75c53f11e --- /dev/null +++ b/swh/storage/algos/diff.py @@ -0,0 +1,402 @@ +# Copyright (C) 2018 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +# Utility module to efficiently compute the list of changed files +# between two directory trees. +# The implementation is inspired from the work of Alberto Cortés +# for the go-git project. For more details, you can refer to: +# - this blog post: https://blog.sourced.tech/post/difftree/ +# - the reference implementation in go: +# https://github.com/src-d/go-git/tree/master/utils/merkletrie + + +import collections + +from swh.model.identifiers import directory_identifier + +from .dir_iterators import ( + DirectoryIterator, DoubleDirectoryIterator, Remaining +) + +# get the hash identifier for an empty directory +_empty_dir_hash = directory_identifier({'entries': []}) + + +def _get_rev(storage, rev_id): + """ + Return revision data from swh storage. + """ + return list(storage.revision_get([rev_id]))[0] + + +class _RevisionChangesList(object): + """ + Helper class to track the changes between two + revision directories. + """ + + def __init__(self, storage, track_renaming): + """ + Args: + storage: instance of swh storage + track_renaming (bool): whether to track or not files renaming + """ + self.storage = storage + self.track_renaming = track_renaming + self.result = [] + # dicts used to track file renaming based on hash value + # we use a list instead of a single entry to handle the corner + # case when a repository contains multiple instance of + # the same file in different directories and a commit + # renames all of them + self.inserted_hash_idx = collections.defaultdict(list) + self.deleted_hash_idx = collections.defaultdict(list) + + def add_insert(self, it_to): + """ + Add a file insertion in the to directory. + + Args: + it_to (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on the to directory + """ + to_hash = it_to.current_hash() + # if the current file hash has been previously marked as deleted, + # the file has been renamed + if self.track_renaming and self.deleted_hash_idx[to_hash]: + # pop the delete change index in the same order it was inserted + change = self.result[self.deleted_hash_idx[to_hash].pop(0)] + # change the delete change as a rename one + change['type'] = 'rename' + change['to'] = it_to.current() + change['to_path'] = it_to.current_path() + else: + # add the insert change in the list + self.result.append({'type': 'insert', + 'from': None, + 'from_path': None, + 'to': it_to.current(), + 'to_path': it_to.current_path()}) + # if rename tracking is activated, add the change index in + # the inserted_hash_idx dict + if self.track_renaming: + self.inserted_hash_idx[to_hash].append(len(self.result) - 1) + + def add_delete(self, it_from): + """ + Add a file deletion in the from directory. + + Args: + it_from (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on the from directory + """ + from_hash = it_from.current_hash() + # if the current file has been previously marked as inserted, + # the file has been renamed + if self.track_renaming and self.inserted_hash_idx[from_hash]: + # pop the insert chnage index in the same order it was inserted + change = self.result[self.inserted_hash_idx[from_hash].pop(0)] + # change the insert change as a rename one + change['type'] = 'rename' + change['from'] = it_from.current() + change['from_path'] = it_from.current_path() + else: + # add the delete change in the list + self.result.append({'type': 'delete', + 'from': it_from.current(), + 'from_path': it_from.current_path(), + 'to': None, + 'to_path': None}) + # if rename tracking is activated, add the change index in + # the deleted_hash_idx dict + if self.track_renaming: + self.deleted_hash_idx[from_hash].append(len(self.result) - 1) + + def add_modify(self, it_from, it_to): + """ + Add a file modification in the to directory. + + Args: + it_from (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on the from directory + it_to (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on the to directory + """ + self.result.append({'type': 'modify', + 'from': it_from.current(), + 'from_path': it_from.current_path(), + 'to': it_to.current(), + 'to_path': it_to.current_path()}) + + def add_recursive(self, it, insert): + """ + Recursively add changes from a directory. + + Args: + it (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on a directory + insert (bool): the type of changes to add (insertion + or deletion) + """ + # current iterated element is a regular file, + # simply add adequate change in the list + if not it.current_is_dir(): + if insert: + self.add_insert(it) + else: + self.add_delete(it) + return + # current iterated element is a directory, + dir_id = it.current_hash() + # handle empty dir insertion/deletion as the swh model allow + # to have such object compared to git + if dir_id == _empty_dir_hash: + if insert: + self.add_insert(it) + else: + self.add_delete(it) + # iterate on files reachable from it and add + # adequate changes in the list + else: + sub_it = DirectoryIterator(self.storage, dir_id, + it.current_path() + b'/') + sub_it_current = sub_it.step() + while sub_it_current: + if not sub_it.current_is_dir(): + if insert: + self.add_insert(sub_it) + else: + self.add_delete(sub_it) + sub_it_current = sub_it.step() + + def add_recursive_insert(self, it_to): + """ + Recursively add files insertion from a to directory. + + Args: + it_to (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on a to directory + """ + self.add_recursive(it_to, True) + + def add_recursive_delete(self, it_from): + """ + Recursively add files deletion from a from directory. + + Args: + it_from (swh.storage.algos.dir_iterators.DirectoryIterator): + iterator on a from directory + """ + self.add_recursive(it_from, False) + + +def _diff_elts_same_name(changes, it): + """" + Compare two directory entries with the same name and add adequate + changes if any. + + Args: + changes (_RevisionChangesList): the list of changes between + two revisions + it (swh.storage.algos.dir_iterators.DoubleDirectoryIterator): + the iterator traversing two revision directories at the same time + """ + # compare the two current directory elements of the iterator + status = it.compare() + # elements have same hash and same permissions: + # no changes to add and call next on the two iterators + if status['same_hash'] and status['same_perms']: + it.next_both() + # elements are regular files and have been modified: + # insert the modification change in the list and + # call next on the two iterators + elif status['both_are_files']: + changes.add_modify(it.it_from, it.it_to) + it.next_both() + # one element is a regular file, the other a directory: + # recursively add delete/insert changes and call next + # on the two iterators + elif status['file_and_dir']: + changes.add_recursive_delete(it.it_from) + changes.add_recursive_insert(it.it_to) + it.next_both() + # both elements are directories: + elif status['both_are_dirs']: + # from directory is empty: + # recursively add insert changes in the to directory + # and call next on the two iterators + if status['from_is_empty_dir']: + changes.add_recursive_insert(it.it_to) + it.next_both() + # to directory is empty: + # recursively add delete changes in the from directory + # and call next on the two iterators + elif status['to_is_empty_dir']: + changes.add_recursive_delete(it.it_from) + it.next_both() + # both directories are not empty: + # call step on the two iterators to descend further in + # the directory trees. + elif not status['from_is_empty_dir'] and not status['to_is_empty_dir']: + it.step_both() + + +def _compare_paths(path1, path2): + """ + Compare paths in lexicographic depth-first order. + For instance, it returns: + - "a" < "b" + - "b/c/d" < "b" + - "c/foo.txt" < "c.txt" + """ + path1_parts = path1.split(b'/') + path2_parts = path2.split(b'/') + i = 0 + while True: + if len(path1_parts) == len(path2_parts) and i == len(path1_parts): + return 0 + elif len(path2_parts) == i: + return 1 + elif len(path1_parts) == i: + return -1 + else: + if path2_parts[i] > path1_parts[i]: + return -1 + elif path2_parts[i] < path1_parts[i]: + return 1 + i = i + 1 + + +def _diff_elts(changes, it): + """ + Compare two directory entries. + + Args: + changes (_RevisionChangesList): the list of changes between + two revisions + it (swh.storage.algos.dir_iterators.DoubleDirectoryIterator): + the iterator traversing two revision directories at the same time + """ + # compare current to and from path in depth-first lexicographic order + c = _compare_paths(it.it_from.current_path(), it.it_to.current_path()) + # current from path is lower than the current to path: + # the from path has been deleted + if c < 0: + changes.add_recursive_delete(it.it_from) + it.next_from() + # current from path is greather than the current to path: + # the to path has been inserted + elif c > 0: + changes.add_recursive_insert(it.it_to) + it.next_to() + # paths are the same and need more processing + else: + _diff_elts_same_name(changes, it) + + +def diff_directories(storage, from_dir, to_dir, track_renaming=False): + """ + Compute the differential between two directories, i.e. the list of + file changes (insertion / deletion / modification / renaming) + between them. + + Args: + storage (swh.storage.storage.Storage): instance of a swh + storage (either local or remote, for optimal performance + the use of a local storage is recommended) + from_dir (bytes): the swh identifier of the directory to compare from + to_dir (bytes): the swh identifier of the directory to compare to + track_renaming (bool): whether or not to track files renaming + + Returns: + list: A list of dict representing the changes between the two + revisions. Each dict contains the following entries: + + - *type*: a string describing the type of change + ('insert' / 'delete' / 'modify' / 'rename') + + - *from*: a dict containing the directory entry metadata in the + from revision (None in case of an insertion) + + - *from_path*: bytes string corresponding to the absolute path + of the from revision entry (None in case of an insertion) + + - *to*: a dict containing the directory entry metadata in the + to revision (None in case of a deletion) + + - *to_path*: bytes string corresponding to the absolute path + of the to revision entry (None in case of a deletion) + + The returned list is sorted in lexicographic depth-first order + according to the value of the *to_path* field. + + """ + changes = _RevisionChangesList(storage, track_renaming) + it = DoubleDirectoryIterator(storage, from_dir, to_dir) + while True: + r = it.remaining() + if r == Remaining.NoMoreFiles: + break + elif r == Remaining.OnlyFromFilesRemain: + changes.add_recursive_delete(it.it_from) + it.next_from() + elif r == Remaining.OnlyToFilesRemain: + changes.add_recursive_insert(it.it_to) + it.next_to() + else: + _diff_elts(changes, it) + return changes.result + + +def diff_revisions(storage, from_rev, to_rev, track_renaming=False): + """ + Compute the differential between two revisions, + i.e. the list of file changes between the two associated directories. + + Args: + storage (swh.storage.storage.Storage): instance of a swh + storage (either local or remote, for optimal performance + the use of a local storage is recommended) + from_rev (bytes): the identifier of the revision to compare from + to_rev (bytes): the identifier of the revision to compare to + track_renaming (bool): whether or not to track files renaming + + Returns: + list: A list of dict describing the introduced file changes + (see :func:`swh.storage.algos.diff.diff_directories`). + + """ + from_dir = None + if from_rev: + from_dir = _get_rev(storage, from_rev)['directory'] + to_dir = _get_rev(storage, to_rev)['directory'] + return diff_directories(storage, from_dir, to_dir, track_renaming) + + +def diff_revision(storage, revision, track_renaming=False): + """ + Computes the differential between a revision and its first parent. + If the revision has no parents, the directory to compare from + is considered as empty. + In other words, it computes the file changes introduced in a + specific revision. + + Args: + storage (swh.storage.storage.Storage): instance of a swh + storage (either local or remote, for optimal performance + the use of a local storage is recommended) + revision (bytes): the identifier of the revision from which to + compute the introduced changes. + track_renaming (bool): whether or not to track files renaming + + Returns: + list: A list of dict describing the introduced file changes + (see :func:`swh.storage.algos.diff.diff_directories`). + """ + rev_data = _get_rev(storage, revision) + parent = None + if rev_data['parents']: + parent = rev_data['parents'][0] + return diff_revisions(storage, parent, revision, track_renaming) diff --git a/swh/storage/algos/dir_iterators.py b/swh/storage/algos/dir_iterators.py new file mode 100644 index 000000000..798644d5f --- /dev/null +++ b/swh/storage/algos/dir_iterators.py @@ -0,0 +1,347 @@ +# Copyright (C) 2018 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +# Utility module to iterate on directory trees. +# The implementation is inspired from the work of Alberto Cortés +# for the go-git project. For more details, you can refer to: +# - this blog post: https://blog.sourced.tech/post/difftree/ +# - the reference implementation in go: +# https://github.com/src-d/go-git/tree/master/utils/merkletrie + + +from enum import Enum + +from swh.model.identifiers import directory_identifier + +# get the hash identifier for an empty directory +_empty_dir_hash = directory_identifier({'entries': []}) + + +def _get_dir(storage, dir_id): + """ + Return directory data from swh storage. + """ + return storage.directory_ls(dir_id) if dir_id else [] + + +class DirectoryIterator(object): + """ + Helper class used to iterate on a directory tree in a depth-first search + way with some additionnal features: + - sibling nodes are iterated in lexicographic order by name + - it is possible to skip the visit of sub-directories nodes + for efficency reasons when comparing two trees (no need to + go deeper if two directories have the same hash) + """ + + def __init__(self, storage, dir_id, base_path=b''): + """ + Args: + storage (swh.storage.storage.Storage): instance of swh storage + (either local or remote) + dir_id (bytes): identifier of a root directory + base_path (bytes): optional base path used when traversing + a sub-directory + """ + self.storage = storage + self.root_dir_id = dir_id + self.base_path = base_path + self.restart() + + def restart(self): + """ + Restart the iteration at the beginning. + """ + # stack of frames representing currently visited directories: + # the root directory is at the bottom while the current one + # is at the top + self.frames = [] + self._push_dir_frame(self.root_dir_id) + self.has_started = False + + def _push_dir_frame(self, dir_id): + """ + Visit a sub-directory by pushing a new frame to the stack. + Each frame is itself a stack of directory entries. + + Args: + dir_id (bytes): identifier of a root directory + """ + if dir_id: + if dir_id == _empty_dir_hash: + self.frames.append([]) + else: + # get directory entries + dir_data = _get_dir(self.storage, dir_id) + # sort them in lexicographical order + dir_data = sorted(dir_data, key=lambda e: e['name']) + # reverse the ordering in order to unstack the "smallest" + # entry each time the iterator advances + dir_data.reverse() + # push the directory frame to the main stack + self.frames.append(dir_data) + + def top(self): + """ + Returns: + list: The top frame of the main directories stack + """ + if not self.frames: + return None + return self.frames[-1] + + def current(self): + """ + Returns: + dict: The current visited directory entry, i.e. the + top element from the top frame + """ + top_frame = self.top() + if not top_frame: + return None + return top_frame[-1] + + def current_hash(self): + """ + Returns: + bytes: The hash value of the currently visited directory + entry + """ + return self.current()['target'] + + def current_perms(self): + """ + Returns: + int: The permissions value of the currently visited directory + entry + """ + return self.current()['perms'] + + def current_path(self): + """ + Returns: + str: The absolute path from the root directory of + the currently visited directory entry + """ + top_frame = self.top() + if not top_frame: + return None + path = [] + for frame in self.frames: + path.append(frame[-1]['name']) + return self.base_path + b'/'.join(path) + + def current_is_dir(self): + """ + Returns: + bool: If the currently visited directory entry is + a directory + """ + return self.current()['type'] == 'dir' + + def _advance(self, descend): + """ + Advance in the tree iteration. + + Args: + descend (bool): whether or not to push a new frame + if the currently visited element is a sub-directory + + Returns: + dict: The description of the newly visited directory entry + """ + current = self.current() + if not self.has_started or not current: + self.has_started = True + return current + + if descend and self.current_is_dir(): + self._push_dir_frame(current['target']) + else: + self.drop() + + return self.current() + + def next(self): + """ + Advance the tree iteration by dropping the current visited + directory entry from the top frame. If the top frame ends up empty, + the operation is recursively applied to remove all empty frames + as the tree is climbed up towards its root. + + Returns: + dict: The description of the newly visited directory entry + """ + return self._advance(False) + + def step(self): + """ + Advance the tree iteration like the next operation with the + difference that if the current visited element is a sub-directory + a new frame representing its content is pushed to the main stack. + + Returns: + dict: The description of the newly visited directory entry + """ + return self._advance(True) + + def drop(self): + """ + Drop the current visited element from the top frame. + If the frame ends up empty, the operation is recursively + applied. + """ + frame = self.top() + if not frame: + return + frame.pop() + if not frame: + self.frames.pop() + self.drop() + + +class Remaining(Enum): + """ + Enum to represent the current state when iterating + on both directory trees at the same time. + """ + NoMoreFiles = 0 + OnlyToFilesRemain = 1 + OnlyFromFilesRemain = 2 + BothHaveFiles = 3 + + +class DoubleDirectoryIterator(object): + """ + Helper class to traverse two directory trees at the same + time and compare their contents to detect changes between them. + """ + + def __init__(self, storage, dir_from, dir_to): + """ + Args: + storage: instance of swh storage + dir_from (bytes): hash identifier of the from directory + dir_to (bytes): hash identifier of the to directory + """ + self.storage = storage + self.dir_from = dir_from + self.dir_to = dir_to + self.restart() + + def restart(self): + """ + Restart the double iteration at the beginning. + """ + # initialize custom dfs iterators for the two directories + self.it_from = DirectoryIterator(self.storage, self.dir_from) + self.it_to = DirectoryIterator(self.storage, self.dir_to) + # grab the first element of each iterator + self.it_from.next() + self.it_to.next() + + def next_from(self): + """ + Apply the next operation on the from iterator. + """ + self.it_from.next() + + def next_to(self): + """ + Apply the next operation on the to iterator. + """ + self.it_to.next() + + def next_both(self): + """ + Apply the next operation on both iterators. + """ + self.next_from() + self.next_to() + + def step_from(self): + """ + Apply the step operation on the from iterator. + """ + self.it_from.step() + + def step_to(self): + """ + Apply the step operation on the from iterator. + """ + self.it_to.step() + + def step_both(self): + """ + Apply the step operation on the both iterators. + """ + self.step_from() + self.step_to() + + def remaining(self): + """ + Returns: + Remaining: the current state of the double iteration + """ + from_current = self.it_from.current() + to_current = self.it_to.current() + # no more files to iterate in both iterators + if not from_current and not to_current: + return Remaining.NoMoreFiles + # still some files to iterate in the to iterator + elif not from_current and to_current: + return Remaining.OnlyToFilesRemain + # still some files to iterate in the from iterator + elif from_current and not to_current: + return Remaining.OnlyFromFilesRemain + # still files to iterate in the both iterators + else: + return Remaining.BothHaveFiles + + def compare(self): + """ + Compare the current iterated directory entries in both iterators + and return the comparison status. + + Returns: + dict: The status of the comparison with the following bool values: + * *same_hash*: indicates if the two entries have the same hash + * *same_perms*: indicates if the two entries have the same + permissions + * *both_are_dirs*: indicates if the two entries are directories + * *both_are_files*: indicates if the two entries are regular + files + * *file_and_dir*: indicates if one of the entry is a directory + and the other a regular file + * *from_is_empty_dir*: indicates if the from entry is the + empty directory + * *from_is_empty_dir*: indicates if the to entry is the + empty directory + """ + from_current_hash = self.it_from.current_hash() + to_current_hash = self.it_to.current_hash() + from_current_perms = self.it_from.current_perms() + to_current_perms = self.it_to.current_perms() + from_is_dir = self.it_from.current_is_dir() + to_is_dir = self.it_to.current_is_dir() + status = {} + # compare hash + status['same_hash'] = from_current_hash == to_current_hash + # compare permissions + status['same_perms'] = from_current_perms == to_current_perms + # check if both elements are directories + status['both_are_dirs'] = from_is_dir and to_is_dir + # check if both elements are regular files + status['both_are_files'] = not from_is_dir and not to_is_dir + # check if one element is a directory, the other a regular file + status['file_and_dir'] = (not status['both_are_dirs'] and + not status['both_are_files']) + # check if the from element is the empty directory + status['from_is_empty_dir'] = (from_is_dir and + from_current_hash == _empty_dir_hash) + # check if the to element is the empty directory + status['to_is_empty_dir'] = (to_is_dir and + to_current_hash == _empty_dir_hash) + return status diff --git a/swh/storage/api/client.py b/swh/storage/api/client.py index 6f0048c10..e2157a616 100644 --- a/swh/storage/api/client.py +++ b/swh/storage/api/client.py @@ -1,220 +1,237 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.core.api import SWHRemoteAPI from ..exc import StorageAPIError class RemoteStorage(SWHRemoteAPI): """Proxy to a remote storage API""" def __init__(self, url): super().__init__(api_exception=StorageAPIError, url=url) def check_config(self, *, check_write): return self.post('check_config', {'check_write': check_write}) def content_add(self, content): return self.post('content/add', {'content': content}) def content_update(self, content, keys=[]): return self.post('content/update', {'content': content, 'keys': keys}) def content_missing(self, content, key_hash='sha1'): return self.post('content/missing', {'content': content, 'key_hash': key_hash}) def content_missing_per_sha1(self, contents): return self.post('content/missing/sha1', {'contents': contents}) def content_get(self, content): return self.post('content/data', {'content': content}) def content_get_metadata(self, content): return self.post('content/metadata', {'content': content}) def content_find(self, content): return self.post('content/present', {'content': content}) def directory_add(self, directories): return self.post('directory/add', {'directories': directories}) def directory_missing(self, directories): return self.post('directory/missing', {'directories': directories}) def directory_get(self, directories): return self.post('directory', dict(directories=directories)) def directory_ls(self, directory, recursive=False): return self.get('directory/ls', {'directory': directory, 'recursive': recursive}) def revision_get(self, revisions): return self.post('revision', {'revisions': revisions}) def revision_get_by(self, origin_id, branch_name, timestamp, limit=None): return self.post('revision/by', dict(origin_id=origin_id, branch_name=branch_name, timestamp=timestamp, limit=limit)) def revision_log(self, revisions, limit=None): return self.post('revision/log', {'revisions': revisions, 'limit': limit}) def revision_log_by(self, origin_id, branch_name, timestamp, limit=None): return self.post('revision/logby', {'origin_id': origin_id, 'branch_name': branch_name, 'timestamp': timestamp, 'limit': limit}) def revision_shortlog(self, revisions, limit=None): return self.post('revision/shortlog', {'revisions': revisions, 'limit': limit}) def revision_add(self, revisions): return self.post('revision/add', {'revisions': revisions}) def revision_missing(self, revisions): return self.post('revision/missing', {'revisions': revisions}) def release_add(self, releases): return self.post('release/add', {'releases': releases}) def release_get(self, releases): return self.post('release', {'releases': releases}) def release_get_by(self, origin_id, limit=None): return self.post('release/by', dict(origin_id=origin_id, limit=limit)) def release_missing(self, releases): return self.post('release/missing', {'releases': releases}) def object_find_by_sha1_git(self, ids): return self.post('object/find_by_sha1_git', {'ids': ids}) def occurrence_get(self, origin_id): return self.post('occurrence', {'origin_id': origin_id}) def occurrence_add(self, occurrences): return self.post('occurrence/add', {'occurrences': occurrences}) def snapshot_add(self, origin, visit, snapshot, back_compat=False): return self.post('snapshot/add', { 'origin': origin, 'visit': visit, 'snapshot': snapshot, 'back_compat': back_compat}) def snapshot_get(self, snapshot_id): return self.post('snapshot', {'snapshot_id': snapshot_id}) def snapshot_get_by_origin_visit(self, origin, visit): return self.post('snapshot/by_origin_visit', {'origin': origin, 'visit': visit}) def snapshot_get_latest(self, origin, allowed_statuses=None): return self.post('snapshot/latest', { 'origin': origin, 'allowed_statuses': allowed_statuses }) def origin_get(self, origin): return self.post('origin/get', {'origin': origin}) def origin_search(self, url_pattern, offset=0, limit=50, regexp=False): return self.post('origin/search', {'url_pattern': url_pattern, 'offset': offset, 'limit': limit, 'regexp': regexp}) def origin_add(self, origins): return self.post('origin/add_multi', {'origins': origins}) def origin_add_one(self, origin): return self.post('origin/add', {'origin': origin}) def origin_visit_add(self, origin, ts): return self.post('origin/visit/add', {'origin': origin, 'ts': ts}) def origin_visit_update(self, origin, visit_id, status, metadata=None): return self.post('origin/visit/update', {'origin': origin, 'visit_id': visit_id, 'status': status, 'metadata': metadata}) def origin_visit_get(self, origin, last_visit=None, limit=None): return self.post('origin/visit/get', { 'origin': origin, 'last_visit': last_visit, 'limit': limit}) def origin_visit_get_by(self, origin, visit): return self.post('origin/visit/getby', {'origin': origin, 'visit': visit}) def person_get(self, person): return self.post('person', {'person': person}) def fetch_history_start(self, origin_id): return self.post('fetch_history/start', {'origin_id': origin_id}) def fetch_history_end(self, fetch_history_id, data): return self.post('fetch_history/end', {'fetch_history_id': fetch_history_id, 'data': data}) def fetch_history_get(self, fetch_history_id): return self.get('fetch_history', {'id': fetch_history_id}) def entity_add(self, entities): return self.post('entity/add', {'entities': entities}) def entity_get(self, uuid): return self.post('entity/get', {'uuid': uuid}) def entity_get_one(self, uuid): return self.get('entity', {'uuid': uuid}) def entity_get_from_lister_metadata(self, entities): return self.post('entity/from_lister_metadata', {'entities': entities}) def stat_counters(self): return self.get('stat/counters') def directory_entry_get_by_path(self, directory, paths): return self.post('directory/path', dict(directory=directory, paths=paths)) def tool_add(self, tools): return self.post('tool/add', {'tools': tools}) def tool_get(self, tool): return self.post('tool/data', {'tool': tool}) def origin_metadata_add(self, origin_id, ts, provider, tool, metadata): return self.post('origin/metadata/add', {'origin_id': origin_id, 'ts': ts, 'provider': provider, 'tool': tool, 'metadata': metadata}) def origin_metadata_get_by(self, origin_id, provider_type=None): return self.post('origin/metadata/get', { 'origin_id': origin_id, 'provider_type': provider_type }) def metadata_provider_add(self, provider_name, provider_type, provider_url, metadata): return self.post('provider/add', {'provider_name': provider_name, 'provider_type': provider_type, 'provider_url': provider_url, 'metadata': metadata}) def metadata_provider_get(self, provider_id): return self.post('provider/get', {'provider_id': provider_id}) def metadata_provider_get_by(self, provider): return self.post('provider/getby', {'provider': provider}) + + def diff_directories(self, from_dir, to_dir, track_renaming=False): + return self.post('algos/diff_directories', + {'from_dir': from_dir, + 'to_dir': to_dir, + 'track_renaming': track_renaming}) + + def diff_revisions(self, from_rev, to_rev, track_renaming=False): + return self.post('algos/diff_revisions', + {'from_rev': from_rev, + 'to_rev': to_rev, + 'track_renaming': track_renaming}) + + def diff_revision(self, revision, track_renaming=False): + return self.post('algos/diff_revision', + {'revision': revision, + 'track_renaming': track_renaming}) diff --git a/swh/storage/api/server.py b/swh/storage/api/server.py index 0db44adba..23c48cc43 100644 --- a/swh/storage/api/server.py +++ b/swh/storage/api/server.py @@ -1,374 +1,389 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import click from flask import g, request from swh.core import config from swh.storage import get_storage from swh.core.api import (SWHServerAPIApp, decode_request, error_handler, encode_data_server as encode_data) DEFAULT_CONFIG_PATH = 'storage/storage' DEFAULT_CONFIG = { 'storage': ('dict', { 'cls': 'local', 'args': { 'db': 'dbname=softwareheritage-dev', 'objstorage': { 'cls': 'pathslicing', 'args': { 'root': '/srv/softwareheritage/objects', 'slicing': '0:2/2:4/4:6', }, }, }, }) } app = SWHServerAPIApp(__name__) @app.errorhandler(Exception) def my_error_handler(exception): return error_handler(exception, encode_data) @app.before_request def before_request(): g.storage = get_storage(**app.config['storage']) @app.route('/') def index(): return 'SWH Storage API server' @app.route('/check_config', methods=['POST']) def check_config(): return encode_data(g.storage.check_config(**decode_request(request))) @app.route('/content/missing', methods=['POST']) def content_missing(): return encode_data(g.storage.content_missing(**decode_request(request))) @app.route('/content/missing/sha1', methods=['POST']) def content_missing_per_sha1(): return encode_data(g.storage.content_missing_per_sha1( **decode_request(request))) @app.route('/content/present', methods=['POST']) def content_find(): return encode_data(g.storage.content_find(**decode_request(request))) @app.route('/content/add', methods=['POST']) def content_add(): return encode_data(g.storage.content_add(**decode_request(request))) @app.route('/content/update', methods=['POST']) def content_update(): return encode_data(g.storage.content_update(**decode_request(request))) @app.route('/content/data', methods=['POST']) def content_get(): return encode_data(g.storage.content_get(**decode_request(request))) @app.route('/content/metadata', methods=['POST']) def content_get_metadata(): return encode_data(g.storage.content_get_metadata( **decode_request(request))) @app.route('/directory', methods=['POST']) def directory_get(): return encode_data(g.storage.directory_get(**decode_request(request))) @app.route('/directory/missing', methods=['POST']) def directory_missing(): return encode_data(g.storage.directory_missing(**decode_request(request))) @app.route('/directory/add', methods=['POST']) def directory_add(): return encode_data(g.storage.directory_add(**decode_request(request))) @app.route('/directory/path', methods=['POST']) def directory_entry_get_by_path(): return encode_data(g.storage.directory_entry_get_by_path( **decode_request(request))) @app.route('/directory/ls', methods=['GET']) def directory_ls(): dir = request.args['directory'].encode('utf-8', 'surrogateescape') rec = json.loads(request.args.get('recursive', 'False').lower()) return encode_data(g.storage.directory_ls(dir, recursive=rec)) @app.route('/revision/add', methods=['POST']) def revision_add(): return encode_data(g.storage.revision_add(**decode_request(request))) @app.route('/revision', methods=['POST']) def revision_get(): return encode_data(g.storage.revision_get(**decode_request(request))) @app.route('/revision/by', methods=['POST']) def revision_get_by(): return encode_data(g.storage.revision_get_by(**decode_request(request))) @app.route('/revision/log', methods=['POST']) def revision_log(): return encode_data(g.storage.revision_log(**decode_request(request))) @app.route('/revision/logby', methods=['POST']) def revision_log_by(): return encode_data(g.storage.revision_log_by(**decode_request(request))) @app.route('/revision/shortlog', methods=['POST']) def revision_shortlog(): return encode_data(g.storage.revision_shortlog(**decode_request(request))) @app.route('/revision/missing', methods=['POST']) def revision_missing(): return encode_data(g.storage.revision_missing(**decode_request(request))) @app.route('/release/add', methods=['POST']) def release_add(): return encode_data(g.storage.release_add(**decode_request(request))) @app.route('/release', methods=['POST']) def release_get(): return encode_data(g.storage.release_get(**decode_request(request))) @app.route('/release/by', methods=['POST']) def release_get_by(): return encode_data(g.storage.release_get_by(**decode_request(request))) @app.route('/release/missing', methods=['POST']) def release_missing(): return encode_data(g.storage.release_missing(**decode_request(request))) @app.route('/object/find_by_sha1_git', methods=['POST']) def object_find_by_sha1_git(): return encode_data(g.storage.object_find_by_sha1_git( **decode_request(request))) @app.route('/occurrence', methods=['POST']) def occurrence_get(): return encode_data(g.storage.occurrence_get(**decode_request(request))) @app.route('/occurrence/add', methods=['POST']) def occurrence_add(): return encode_data(g.storage.occurrence_add(**decode_request(request))) @app.route('/snapshot/add', methods=['POST']) def snapshot_add(): return encode_data(g.storage.snapshot_add(**decode_request(request))) @app.route('/snapshot', methods=['POST']) def snapshot_get(): return encode_data(g.storage.snapshot_get(**decode_request(request))) @app.route('/snapshot/by_origin_visit', methods=['POST']) def snapshot_get_by_origin_visit(): return encode_data(g.storage.snapshot_get_by_origin_visit( **decode_request(request))) @app.route('/snapshot/latest', methods=['POST']) def snapshot_get_latest(): return encode_data(g.storage.snapshot_get_latest( **decode_request(request))) @app.route('/origin/get', methods=['POST']) def origin_get(): return encode_data(g.storage.origin_get(**decode_request(request))) @app.route('/origin/search', methods=['POST']) def origin_search(): return encode_data(g.storage.origin_search(**decode_request(request))) @app.route('/origin/add_multi', methods=['POST']) def origin_add(): return encode_data(g.storage.origin_add(**decode_request(request))) @app.route('/origin/add', methods=['POST']) def origin_add_one(): return encode_data(g.storage.origin_add_one(**decode_request(request))) @app.route('/origin/visit/get', methods=['POST']) def origin_visit_get(): return encode_data(g.storage.origin_visit_get(**decode_request(request))) @app.route('/origin/visit/getby', methods=['POST']) def origin_visit_get_by(): return encode_data( g.storage.origin_visit_get_by(**decode_request(request))) @app.route('/origin/visit/add', methods=['POST']) def origin_visit_add(): return encode_data(g.storage.origin_visit_add(**decode_request(request))) @app.route('/origin/visit/update', methods=['POST']) def origin_visit_update(): return encode_data(g.storage.origin_visit_update( **decode_request(request))) @app.route('/person', methods=['POST']) def person_get(): return encode_data(g.storage.person_get(**decode_request(request))) @app.route('/fetch_history', methods=['GET']) def fetch_history_get(): return encode_data(g.storage.fetch_history_get(request.args['id'])) @app.route('/fetch_history/start', methods=['POST']) def fetch_history_start(): return encode_data( g.storage.fetch_history_start(**decode_request(request))) @app.route('/fetch_history/end', methods=['POST']) def fetch_history_end(): return encode_data( g.storage.fetch_history_end(**decode_request(request))) @app.route('/entity/add', methods=['POST']) def entity_add(): return encode_data( g.storage.entity_add(**decode_request(request))) @app.route('/entity/get', methods=['POST']) def entity_get(): return encode_data( g.storage.entity_get(**decode_request(request))) @app.route('/entity', methods=['GET']) def entity_get_one(): return encode_data(g.storage.entity_get_one(request.args['uuid'])) @app.route('/entity/from_lister_metadata', methods=['POST']) def entity_from_lister_metadata(): return encode_data( g.storage.entity_get_from_lister_metadata(**decode_request(request))) @app.route('/tool/data', methods=['POST']) def tool_get(): return encode_data(g.storage.tool_get( **decode_request(request))) @app.route('/tool/add', methods=['POST']) def tool_add(): return encode_data(g.storage.tool_add( **decode_request(request))) @app.route('/origin/metadata/add', methods=['POST']) def origin_metadata_add(): return encode_data(g.storage.origin_metadata_add(**decode_request( request))) @app.route('/origin/metadata/get', methods=['POST']) def origin_metadata_get_by(): return encode_data(g.storage.origin_metadata_get_by(**decode_request( request))) @app.route('/provider/add', methods=['POST']) def metadata_provider_add(): return encode_data(g.storage.metadata_provider_add(**decode_request( request))) @app.route('/provider/get', methods=['POST']) def metadata_provider_get(): return encode_data(g.storage.metadata_provider_get(**decode_request( request))) @app.route('/provider/getby', methods=['POST']) def metadata_provider_get_by(): return encode_data(g.storage.metadata_provider_get_by(**decode_request( request))) @app.route('/stat/counters', methods=['GET']) def stat_counters(): return encode_data(g.storage.stat_counters()) +@app.route('/algos/diff_directories', methods=['POST']) +def diff_directories(): + return encode_data(g.storage.diff_directories(**decode_request(request))) + + +@app.route('/algos/diff_revisions', methods=['POST']) +def diff_revisions(): + return encode_data(g.storage.diff_revisions(**decode_request(request))) + + +@app.route('/algos/diff_revision', methods=['POST']) +def diff_revision(): + return encode_data(g.storage.diff_revision(**decode_request(request))) + + def run_from_webserver(environ, start_response, config_path=DEFAULT_CONFIG_PATH): """Run the WSGI app from the webserver, loading the configuration.""" cfg = config.load_named_config(config_path, DEFAULT_CONFIG) app.config.update(cfg) handler = logging.StreamHandler() app.logger.addHandler(handler) return app(environ, start_response) @click.command() @click.argument('config-path', required=1) @click.option('--host', default='0.0.0.0', help="Host to run the server") @click.option('--port', default=5002, type=click.INT, help="Binding port of the server") @click.option('--debug/--nodebug', default=True, help="Indicates if the server should run in debug mode") def launch(config_path, host, port, debug): app.config.update(config.read(config_path, DEFAULT_CONFIG)) app.run(host, port=int(port), debug=bool(debug)) if __name__ == '__main__': launch() diff --git a/swh/storage/storage.py b/swh/storage/storage.py index 022f4a777..d58d1efb1 100644 --- a/swh/storage/storage.py +++ b/swh/storage/storage.py @@ -1,1588 +1,1638 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict import datetime import itertools import json import dateutil.parser import psycopg2 from . import converters from .common import db_transaction_generator, db_transaction from .db import Db from .exc import StorageDBError +from .algos import diff from swh.model.hashutil import ALGORITHMS from swh.objstorage import get_objstorage from swh.objstorage.exc import ObjNotFoundError # Max block size of contents to return BULK_BLOCK_CONTENT_LEN_MAX = 10000 CONTENT_HASH_KEYS = ['sha1', 'sha1_git', 'sha256', 'blake2s256'] class Storage(): """SWH storage proxy, encompassing DB and object storage """ def __init__(self, db, objstorage): """ Args: db_conn: either a libpq connection string, or a psycopg2 connection obj_root: path to the root of the object storage """ try: if isinstance(db, psycopg2.extensions.connection): self.db = Db(db) else: self.db = Db.connect(db) except psycopg2.OperationalError as e: raise StorageDBError(e) self.objstorage = get_objstorage(**objstorage) def check_config(self, *, check_write): """Check that the storage is configured and ready to go.""" if not self.objstorage.check_config(check_write=check_write): return False # Check permissions on one of the tables with self.db.transaction() as cur: if check_write: check = 'INSERT' else: check = 'SELECT' cur.execute( "select has_table_privilege(current_user, 'content', %s)", (check,) ) return cur.fetchone()[0] return True def content_add(self, content): """Add content blobs to the storage Note: in case of DB errors, objects might have already been added to the object storage and will not be removed. Since addition to the object storage is idempotent, that should not be a problem. Args: content (iterable): iterable of dictionaries representing individual pieces of content to add. Each dictionary has the following keys: - data (bytes): the actual content - length (int): content length (default: -1) - one key for each checksum algorithm in :data:`swh.model.hashutil.ALGORITHMS`, mapped to the corresponding checksum - status (str): one of visible, hidden, absent - reason (str): if status = absent, the reason why - origin (int): if status = absent, the origin we saw the content in """ db = self.db def _unique_key(hash, keys=CONTENT_HASH_KEYS): """Given a hash (tuple or dict), return a unique key from the aggregation of keys. """ if isinstance(hash, tuple): return hash return tuple([hash[k] for k in keys]) content_by_status = defaultdict(list) for d in content: if 'status' not in d: d['status'] = 'visible' if 'length' not in d: d['length'] = -1 content_by_status[d['status']].append(d) content_with_data = content_by_status['visible'] content_without_data = content_by_status['absent'] missing_content = set(self.content_missing(content_with_data)) missing_skipped = set(_unique_key(hashes) for hashes in self.skipped_content_missing( content_without_data)) with db.transaction() as cur: if missing_content: # create temporary table for metadata injection db.mktemp('content', cur) def add_to_objstorage(cont): self.objstorage.add(cont['data'], obj_id=cont['sha1']) content_filtered = (cont for cont in content_with_data if cont['sha1'] in missing_content) db.copy_to(content_filtered, 'tmp_content', db.content_get_metadata_keys, cur, item_cb=add_to_objstorage) # move metadata in place db.content_add_from_temp(cur) if missing_skipped: missing_filtered = (cont for cont in content_without_data if _unique_key(cont) in missing_skipped) db.mktemp('skipped_content', cur) db.copy_to(missing_filtered, 'tmp_skipped_content', db.skipped_content_keys, cur) # move metadata in place db.skipped_content_add_from_temp(cur) @db_transaction def content_update(self, content, keys=[], cur=None): """Update content blobs to the storage. Does nothing for unknown contents or skipped ones. Args: content (iterable): iterable of dictionaries representing individual pieces of content to update. Each dictionary has the following keys: - data (bytes): the actual content - length (int): content length (default: -1) - one key for each checksum algorithm in :data:`swh.model.hashutil.ALGORITHMS`, mapped to the corresponding checksum - status (str): one of visible, hidden, absent keys (list): List of keys (str) whose values needs an update, e.g., new hash column """ db = self.db # TODO: Add a check on input keys. How to properly implement # this? We don't know yet the new columns. db.mktemp('content') select_keys = list(set(db.content_get_metadata_keys).union(set(keys))) db.copy_to(content, 'tmp_content', select_keys, cur) db.content_update_from_temp(keys_to_update=keys, cur=cur) def content_get(self, content): """Retrieve in bulk contents and their data. Args: content: iterables of sha1 Yields: dict: Generates streams of contents as dict with their raw data: - sha1: sha1's content - data: bytes data of the content Raises: ValueError in case of too much contents are required. cf. BULK_BLOCK_CONTENT_LEN_MAX """ # FIXME: Improve on server module to slice the result if len(content) > BULK_BLOCK_CONTENT_LEN_MAX: raise ValueError( "Send at maximum %s contents." % BULK_BLOCK_CONTENT_LEN_MAX) for obj_id in content: try: data = self.objstorage.get(obj_id) except ObjNotFoundError: yield None continue yield {'sha1': obj_id, 'data': data} @db_transaction_generator def content_get_metadata(self, content, cur=None): """Retrieve content metadata in bulk Args: content: iterable of content identifiers (sha1) Returns: an iterable with content metadata corresponding to the given ids """ db = self.db db.store_tmp_bytea(content, cur) for content_metadata in db.content_get_metadata_from_temp(cur): yield dict(zip(db.content_get_metadata_keys, content_metadata)) @db_transaction_generator def content_missing(self, content, key_hash='sha1', cur=None): """List content missing from storage Args: content ([dict]): iterable of dictionaries containing one key for each checksum algorithm in :data:`swh.model.hashutil.ALGORITHMS`, mapped to the corresponding checksum, and a length key mapped to the content length. key_hash (str): name of the column to use as hash id result (default: 'sha1') Returns: iterable ([bytes]): missing content ids (as per the key_hash column) Raises: TODO: an exception when we get a hash collision. """ db = self.db keys = CONTENT_HASH_KEYS if key_hash not in CONTENT_HASH_KEYS: raise ValueError("key_hash should be one of %s" % keys) key_hash_idx = keys.index(key_hash) # Create temporary table for metadata injection db.mktemp('content', cur) db.copy_to(content, 'tmp_content', keys + ['length'], cur) for obj in db.content_missing_from_temp(cur): yield obj[key_hash_idx] @db_transaction_generator def content_missing_per_sha1(self, contents, cur=None): """List content missing from storage based only on sha1. Args: contents: Iterable of sha1 to check for absence. Returns: iterable: missing ids Raises: TODO: an exception when we get a hash collision. """ db = self.db db.store_tmp_bytea(contents, cur) for obj in db.content_missing_per_sha1_from_temp(cur): yield obj[0] @db_transaction_generator def skipped_content_missing(self, content, cur=None): """List skipped_content missing from storage Args: content: iterable of dictionaries containing the data for each checksum algorithm. Returns: iterable: missing signatures """ keys = CONTENT_HASH_KEYS db = self.db db.mktemp('skipped_content', cur) db.copy_to(content, 'tmp_skipped_content', keys + ['length', 'reason'], cur) yield from db.skipped_content_missing_from_temp(cur) @db_transaction def content_find(self, content, cur=None): """Find a content hash in db. Args: content: a dictionary representing one content hash, mapping checksum algorithm names (see swh.model.hashutil.ALGORITHMS) to checksum values Returns: a triplet (sha1, sha1_git, sha256) if the content exist or None otherwise. Raises: ValueError: in case the key of the dictionary is not sha1, sha1_git nor sha256. """ db = self.db if not set(content).intersection(ALGORITHMS): raise ValueError('content keys must contain at least one of: ' 'sha1, sha1_git, sha256, blake2s256') c = db.content_find(sha1=content.get('sha1'), sha1_git=content.get('sha1_git'), sha256=content.get('sha256'), blake2s256=content.get('blake2s256'), cur=cur) if c: return dict(zip(db.content_find_cols, c)) return None def directory_add(self, directories): """Add directories to the storage Args: directories (iterable): iterable of dictionaries representing the individual directories to add. Each dict has the following keys: - id (sha1_git): the id of the directory to add - entries (list): list of dicts for each entry in the directory. Each dict has the following keys: - name (bytes) - type (one of 'file', 'dir', 'rev'): type of the directory entry (file, directory, revision) - target (sha1_git): id of the object pointed at by the directory entry - perms (int): entry permissions """ dirs = set() dir_entries = { 'file': defaultdict(list), 'dir': defaultdict(list), 'rev': defaultdict(list), } for cur_dir in directories: dir_id = cur_dir['id'] dirs.add(dir_id) for src_entry in cur_dir['entries']: entry = src_entry.copy() entry['dir_id'] = dir_id dir_entries[entry['type']][dir_id].append(entry) dirs_missing = set(self.directory_missing(dirs)) if not dirs_missing: return db = self.db with db.transaction() as cur: # Copy directory ids dirs_missing_dict = ({'id': dir} for dir in dirs_missing) db.mktemp('directory', cur) db.copy_to(dirs_missing_dict, 'tmp_directory', ['id'], cur) # Copy entries for entry_type, entry_list in dir_entries.items(): entries = itertools.chain.from_iterable( entries_for_dir for dir_id, entries_for_dir in entry_list.items() if dir_id in dirs_missing) db.mktemp_dir_entry(entry_type) db.copy_to( entries, 'tmp_directory_entry_%s' % entry_type, ['target', 'name', 'perms', 'dir_id'], cur, ) # Do the final copy db.directory_add_from_temp(cur) @db_transaction_generator def directory_missing(self, directories, cur): """List directories missing from storage Args: directories (iterable): an iterable of directory ids Yields: missing directory ids """ db = self.db # Create temporary table for metadata injection db.mktemp('directory', cur) directories_dicts = ({'id': dir} for dir in directories) db.copy_to(directories_dicts, 'tmp_directory', ['id'], cur) for obj in db.directory_missing_from_temp(cur): yield obj[0] @db_transaction_generator def directory_get(self, directories, cur=None): """Get information on directories. Args: - directories: an iterable of directory ids Returns: List of directories as dict with keys and associated values. """ db = self.db keys = ('id', 'dir_entries', 'file_entries', 'rev_entries') db.mktemp('directory', cur) db.copy_to(({'id': dir_id} for dir_id in directories), 'tmp_directory', ['id'], cur) dirs = db.directory_get_from_temp(cur) for line in dirs: yield dict(zip(keys, line)) @db_transaction_generator def directory_ls(self, directory, recursive=False, cur=None): """Get entries for one directory. Args: - directory: the directory to list entries from. - recursive: if flag on, this list recursively from this directory. Returns: List of entries for such directory. """ db = self.db if recursive: res_gen = db.directory_walk(directory, cur=cur) else: res_gen = db.directory_walk_one(directory, cur=cur) for line in res_gen: yield dict(zip(db.directory_ls_cols, line)) @db_transaction def directory_entry_get_by_path(self, directory, paths, cur=None): """Get the directory entry (either file or dir) from directory with path. Args: - directory: sha1 of the top level directory - paths: path to lookup from the top level directory. From left (top) to right (bottom). Returns: The corresponding directory entry if found, None otherwise. """ db = self.db res = db.directory_entry_get_by_path(directory, paths, cur) if res: return dict(zip(db.directory_ls_cols, res)) def revision_add(self, revisions): """Add revisions to the storage Args: revisions (iterable): iterable of dictionaries representing the individual revisions to add. Each dict has the following keys: - id (sha1_git): id of the revision to add - date (datetime.DateTime): date the revision was written - date_offset (int): offset from UTC in minutes the revision was written - date_neg_utc_offset (boolean): whether a null date_offset represents a negative UTC offset - committer_date (datetime.DateTime): date the revision got added to the origin - committer_date_offset (int): offset from UTC in minutes the revision was added to the origin - committer_date_neg_utc_offset (boolean): whether a null committer_date_offset represents a negative UTC offset - type (one of 'git', 'tar'): type of the revision added - directory (sha1_git): the directory the revision points at - message (bytes): the message associated with the revision - author_name (bytes): the name of the revision author - author_email (bytes): the email of the revision author - committer_name (bytes): the name of the revision committer - committer_email (bytes): the email of the revision committer - metadata (jsonb): extra information as dictionary - synthetic (bool): revision's nature (tarball, directory creates synthetic revision) - parents (list of sha1_git): the parents of this revision """ db = self.db revisions_missing = set(self.revision_missing( set(revision['id'] for revision in revisions))) if not revisions_missing: return with db.transaction() as cur: db.mktemp_revision(cur) revisions_filtered = ( converters.revision_to_db(revision) for revision in revisions if revision['id'] in revisions_missing) parents_filtered = [] db.copy_to( revisions_filtered, 'tmp_revision', db.revision_add_cols, cur, lambda rev: parents_filtered.extend(rev['parents'])) db.revision_add_from_temp(cur) db.copy_to(parents_filtered, 'revision_history', ['id', 'parent_id', 'parent_rank'], cur) @db_transaction_generator def revision_missing(self, revisions, cur=None): """List revisions missing from storage Args: revisions (iterable): revision ids Yields: missing revision ids """ db = self.db db.store_tmp_bytea(revisions, cur) for obj in db.revision_missing_from_temp(cur): yield obj[0] @db_transaction_generator def revision_get(self, revisions, cur): """Get all revisions from storage Args: revisions: an iterable of revision ids Returns: iterable: an iterable of revisions as dictionaries (or None if the revision doesn't exist) """ db = self.db db.store_tmp_bytea(revisions, cur) for line in self.db.revision_get_from_temp(cur): data = converters.db_to_revision( dict(zip(db.revision_get_cols, line)) ) if not data['type']: yield None continue yield data @db_transaction_generator def revision_log(self, revisions, limit=None, cur=None): """Fetch revision entry from the given root revisions. Args: revisions: array of root revision to lookup limit: limitation on the output result. Default to None. Yields: List of revision log from such revisions root. """ db = self.db for line in db.revision_log(revisions, limit, cur): data = converters.db_to_revision( dict(zip(db.revision_get_cols, line)) ) if not data['type']: yield None continue yield data @db_transaction_generator def revision_shortlog(self, revisions, limit=None, cur=None): """Fetch the shortlog for the given revisions Args: revisions: list of root revisions to lookup limit: depth limitation for the output Yields: a list of (id, parents) tuples. """ db = self.db yield from db.revision_shortlog(revisions, limit, cur) @db_transaction_generator def revision_log_by(self, origin_id, branch_name=None, timestamp=None, limit=None, cur=None): """Fetch revision entry from the actual origin_id's latest revision. Args: origin_id: the origin id from which deriving the revision branch_name: (optional) occurrence's branch name timestamp: (optional) occurrence's time limit: (optional) depth limitation for the output. Default to None. Yields: The revision log starting from the revision derived from the (origin, branch_name, timestamp) combination if any. Returns: None if no revision matching this combination is found. """ db = self.db # Retrieve the revision by criterion revisions = list(db.revision_get_by( origin_id, branch_name, timestamp, limit=1)) if not revisions: return None revision_id = revisions[0][0] # otherwise, retrieve the revision log from that revision yield from self.revision_log([revision_id], limit) def release_add(self, releases): """Add releases to the storage Args: releases (iterable): iterable of dictionaries representing the individual releases to add. Each dict has the following keys: - id (sha1_git): id of the release to add - revision (sha1_git): id of the revision the release points to - date (datetime.DateTime): the date the release was made - date_offset (int): offset from UTC in minutes the release was made - date_neg_utc_offset (boolean): whether a null date_offset represents a negative UTC offset - name (bytes): the name of the release - comment (bytes): the comment associated with the release - author_name (bytes): the name of the release author - author_email (bytes): the email of the release author """ db = self.db release_ids = set(release['id'] for release in releases) releases_missing = set(self.release_missing(release_ids)) if not releases_missing: return with db.transaction() as cur: db.mktemp_release(cur) releases_filtered = ( converters.release_to_db(release) for release in releases if release['id'] in releases_missing ) db.copy_to(releases_filtered, 'tmp_release', db.release_add_cols, cur) db.release_add_from_temp(cur) @db_transaction_generator def release_missing(self, releases, cur=None): """List releases missing from storage Args: releases: an iterable of release ids Returns: a list of missing release ids """ db = self.db # Create temporary table for metadata injection db.store_tmp_bytea(releases, cur) for obj in db.release_missing_from_temp(cur): yield obj[0] @db_transaction_generator def release_get(self, releases, cur=None): """Given a list of sha1, return the releases's information Args: releases: list of sha1s Yields: releases: list of releases as dicts with the following keys: - id: origin's id - revision: origin's type - url: origin's url - lister: lister's uuid - project: project's uuid (FIXME, retrieve this information) Raises: ValueError: if the keys does not match (url and type) nor id. """ db = self.db # Create temporary table for metadata injection db.store_tmp_bytea(releases, cur) for release in db.release_get_from_temp(cur): yield converters.db_to_release( dict(zip(db.release_get_cols, release)) ) @db_transaction def snapshot_add(self, origin, visit, snapshot, back_compat=False, cur=None): """Add a snapshot for the given origin/visit couple Args: origin (int): id of the origin visit (int): id of the visit snapshot (dict): the snapshot to add to the visit, containing the following keys: - **id** (:class:`bytes`): id of the snapshot - **branches** (:class:`dict`): branches the snapshot contains, mapping the branch name (:class:`bytes`) to the branch target, itself a :class:`dict` (or ``None`` if the branch points to an unknown object) - **target_type** (:class:`str`): one of ``content``, ``directory``, ``revision``, ``release``, ``snapshot``, ``alias`` - **target** (:class:`bytes`): identifier of the target (currently a ``sha1_git`` for all object kinds, or the name of the target branch for aliases) back_compat (bool): whether to add the occurrences for backwards-compatibility """ db = self.db if not db.snapshot_exists(snapshot['id'], cur): db.mktemp_snapshot_branch(cur) db.copy_to( ( { 'name': name, 'target': info['target'] if info else None, 'target_type': info['target_type'] if info else None, } for name, info in snapshot['branches'].items() ), 'tmp_snapshot_branch', ['name', 'target', 'target_type'], cur, ) db.snapshot_add(origin, visit, snapshot['id'], cur) if not back_compat: return # TODO: drop this compat feature occurrences = [] for name, info in snapshot['branches'].items(): if not info: target = b'\x00' * 20 target_type = 'revision' elif info['target_type'] == 'alias': continue else: target = info['target'] target_type = info['target_type'] occurrences.append({ 'origin': origin, 'visit': visit, 'branch': name, 'target': target, 'target_type': target_type, }) self.occurrence_add(occurrences) @db_transaction def snapshot_get(self, snapshot_id, cur=None): """Get the snapshot with the given id Args: snapshot_id (bytes): id of the snapshot Returns: dict: a snapshot with two keys: id:: identifier for the snapshot branches:: a list of branches contained by the snapshot """ db = self.db branches = {} for branch in db.snapshot_get_by_id(snapshot_id, cur): branch = dict(zip(db.snapshot_get_cols, branch)) del branch['snapshot_id'] name = branch.pop('name') if branch == {'target': None, 'target_type': None}: branch = None branches[name] = branch if branches: return {'id': snapshot_id, 'branches': branches} if db.snapshot_exists(snapshot_id, cur): # empty snapshot return {'id': snapshot_id, 'branches': {}} return None @db_transaction def snapshot_get_by_origin_visit(self, origin, visit, cur=None): """Get the snapshot for the given origin visit Args: origin (int): the origin identifier visit (int): the visit identifier Returns: dict: a snapshot with two keys: id:: identifier for the snapshot branches:: a dictionary containing the snapshot branch information """ db = self.db snapshot_id = db.snapshot_get_by_origin_visit(origin, visit, cur) if snapshot_id: return self.snapshot_get(snapshot_id, cur=cur) else: # compatibility code during the snapshot migration origin_visit_info = self.origin_visit_get_by(origin, visit, cur=cur) if origin_visit_info is None: return None ret = {'id': None} ret['branches'] = origin_visit_info['occurrences'] return ret return None @db_transaction def snapshot_get_latest(self, origin, allowed_statuses=None, cur=None): """Get the latest snapshot for the given origin, optionally only from visits that have one of the given allowed_statuses. Args: origin (int): the origin identifier allowed_statuses (list of str): list of visit statuses considered to find the latest snapshot for the visit. For instance, ``allowed_statuses=['full']`` will only consider visits that have successfully run to completion. Returns: dict: a snapshot with two keys: id:: identifier for the snapshot branches:: a dictionary containing the snapshot branch information """ db = self.db origin_visit = db.origin_visit_get_latest_snapshot( origin, allowed_statuses=allowed_statuses, cur=cur) if origin_visit: origin_visit = dict(zip(db.origin_visit_get_cols, origin_visit)) return self.snapshot_get(origin_visit['snapshot'], cur=cur) @db_transaction def occurrence_add(self, occurrences, cur=None): """Add occurrences to the storage Args: occurrences: iterable of dictionaries representing the individual occurrences to add. Each dict has the following keys: - origin (int): id of the origin corresponding to the occurrence - visit (int): id of the visit corresponding to the occurrence - branch (str): the reference name of the occurrence - target (sha1_git): the id of the object pointed to by the occurrence - target_type (str): the type of object pointed to by the occurrence """ db = self.db db.mktemp_occurrence_history(cur) db.copy_to(occurrences, 'tmp_occurrence_history', ['origin', 'branch', 'target', 'target_type', 'visit'], cur) db.occurrence_history_add_from_temp(cur) @db_transaction_generator def occurrence_get(self, origin_id, cur=None): """Retrieve occurrence information per origin_id. Args: origin_id: The occurrence's origin. Yields: List of occurrences matching criterion. """ db = self.db for line in db.occurrence_get(origin_id, cur): yield { 'origin': line[0], 'branch': line[1], 'target': line[2], 'target_type': line[3], } @db_transaction def origin_visit_add(self, origin, ts, cur=None): """Add an origin_visit for the origin at ts with status 'ongoing'. Args: origin: Visited Origin id ts: timestamp of such visit Returns: dict: dictionary with keys origin and visit where: - origin: origin identifier - visit: the visit identifier for the new visit occurrence - ts (datetime.DateTime): the visit date """ if isinstance(ts, str): ts = dateutil.parser.parse(ts) return { 'origin': origin, 'visit': self.db.origin_visit_add(origin, ts, cur) } @db_transaction def origin_visit_update(self, origin, visit_id, status, metadata=None, cur=None): """Update an origin_visit's status. Args: origin: Visited Origin id visit_id: Visit's id status: Visit's new status metadata: Data associated to the visit Returns: None """ return self.db.origin_visit_update(origin, visit_id, status, metadata, cur) @db_transaction_generator def origin_visit_get(self, origin, last_visit=None, limit=None, cur=None): """Retrieve all the origin's visit's information. Args: origin (int): The occurrence's origin (identifier). last_visit (int): Starting point from which listing the next visits Default to None limit (int): Number of results to return from the last visit. Default to None Yields: List of visits. """ db = self.db for line in db.origin_visit_get_all( origin, last_visit=last_visit, limit=limit, cur=cur): data = dict(zip(self.db.origin_visit_get_cols, line)) yield data @db_transaction def origin_visit_get_by(self, origin, visit, cur=None): """Retrieve origin visit's information. Args: origin: The occurrence's origin (identifier). Returns: The information on that particular (origin, visit) """ db = self.db ori_visit = db.origin_visit_get(origin, visit, cur) if not ori_visit: return None ori_visit = dict(zip(self.db.origin_visit_get_cols, ori_visit)) if ori_visit['snapshot']: ori_visit['occurrences'] = self.snapshot_get(ori_visit['snapshot'], cur=cur)['branches'] return ori_visit # TODO: remove Backwards compatibility after snapshot migration occs = {} for occ in db.occurrence_by_origin_visit(origin, visit): _, branch_name, target, target_type = occ occs[branch_name] = { 'target': target, 'target_type': target_type } ori_visit['occurrences'] = occs return ori_visit @db_transaction_generator def revision_get_by(self, origin_id, branch_name=None, timestamp=None, limit=None, cur=None): """Given an origin_id, retrieve occurrences' list per given criterions. Args: origin_id: The origin to filter on. branch_name: (optional) branch name. timestamp: (optional) time. limit: (optional) limit Yields: List of occurrences matching the criterions or None if nothing is found. """ for line in self.db.revision_get_by(origin_id, branch_name, timestamp, limit=limit, cur=cur): data = converters.db_to_revision( dict(zip(self.db.revision_get_cols, line)) ) if not data['type']: yield None continue yield data def release_get_by(self, origin_id, limit=None): """Given an origin id, return all the tag objects pointing to heads of origin_id. Args: origin_id: the origin to filter on. limit: None by default Yields: List of releases matching the criterions or None if nothing is found. """ for line in self.db.release_get_by(origin_id, limit=limit): data = converters.db_to_release( dict(zip(self.db.release_get_cols, line)) ) yield data @db_transaction def object_find_by_sha1_git(self, ids, cur=None): """Return the objects found with the given ids. Args: ids: a generator of sha1_gits Returns: dict: a mapping from id to the list of objects found. Each object found is itself a dict with keys: - sha1_git: the input id - type: the type of object found - id: the id of the object found - object_id: the numeric id of the object found. """ db = self.db ret = {id: [] for id in ids} for retval in db.object_find_by_sha1_git(ids): if retval[1]: ret[retval[0]].append(dict(zip(db.object_find_by_sha1_git_cols, retval))) return ret origin_keys = ['id', 'type', 'url', 'lister', 'project'] @db_transaction def origin_get(self, origin, cur=None): """Return the origin either identified by its id or its tuple (type, url). Args: origin: dictionary representing the individual origin to find. This dict has either the keys type and url: - type (FIXME: enum TBD): the origin type ('git', 'wget', ...) - url (bytes): the url the origin points to or the id: - id: the origin id Returns: dict: the origin dictionary with the keys: - id: origin's id - type: origin's type - url: origin's url - lister: lister's uuid - project: project's uuid (FIXME, retrieve this information) Raises: ValueError: if the keys does not match (url and type) nor id. """ db = self.db origin_id = origin.get('id') if origin_id: # check lookup per id first ori = db.origin_get(origin_id, cur) elif 'type' in origin and 'url' in origin: # or lookup per type, url ori = db.origin_get_with(origin['type'], origin['url'], cur) else: # unsupported lookup raise ValueError('Origin must have either id or (type and url).') if ori: return dict(zip(self.origin_keys, ori)) return None @db_transaction_generator def origin_search(self, url_pattern, offset=0, limit=50, regexp=False, cur=None): """Search for origins whose urls contain a provided string pattern or match a provided regular expression. The search is performed in a case insensitive way. Args: url_pattern: the string pattern to search for in origin urls offset: number of found origins to skip before returning results limit: the maximum number of found origins to return regexp: if True, consider the provided pattern as a regular expression and return origins whose urls match it Returns: An iterable of dict containing origin information as returned by :meth:`swh.storage.storage.Storage.origin_get`. """ db = self.db for origin in db.origin_search(url_pattern, offset, limit, regexp, cur): yield dict(zip(self.origin_keys, origin)) @db_transaction def _person_add(self, person, cur=None): """Add a person in storage. Note: Internal function for now, do not use outside of this module. Do not do anything fancy in case a person already exists. Please adapt code if more checks are needed. Args: person: dictionary with keys name and email. Returns: Id of the new person. """ db = self.db return db.person_add(person) @db_transaction_generator def person_get(self, person, cur=None): """Return the persons identified by their ids. Args: person: array of ids. Returns: The array of persons corresponding of the ids. """ db = self.db for person in db.person_get(person): yield dict(zip(db.person_get_cols, person)) @db_transaction def origin_add(self, origins, cur=None): """Add origins to the storage Args: origins: list of dictionaries representing the individual origins, with the following keys: - type: the origin type ('git', 'svn', 'deb', ...) - url (bytes): the url the origin points to Returns: list: ids corresponding to the given origins """ ret = [] for origin in origins: ret.append(self.origin_add_one(origin, cur=cur)) return ret @db_transaction def origin_add_one(self, origin, cur=None): """Add origin to the storage Args: origin: dictionary representing the individual origin to add. This dict has the following keys: - type (FIXME: enum TBD): the origin type ('git', 'wget', ...) - url (bytes): the url the origin points to Returns: the id of the added origin, or of the identical one that already exists. """ db = self.db data = db.origin_get_with(origin['type'], origin['url'], cur) if data: return data[0] return db.origin_add(origin['type'], origin['url'], cur) @db_transaction def fetch_history_start(self, origin_id, cur=None): """Add an entry for origin origin_id in fetch_history. Returns the id of the added fetch_history entry """ fetch_history = { 'origin': origin_id, 'date': datetime.datetime.now(tz=datetime.timezone.utc), } return self.db.create_fetch_history(fetch_history, cur) @db_transaction def fetch_history_end(self, fetch_history_id, data, cur=None): """Close the fetch_history entry with id `fetch_history_id`, replacing its data with `data`. """ now = datetime.datetime.now(tz=datetime.timezone.utc) fetch_history = self.db.get_fetch_history(fetch_history_id, cur) if not fetch_history: raise ValueError('No fetch_history with id %d' % fetch_history_id) fetch_history['duration'] = now - fetch_history['date'] fetch_history.update(data) self.db.update_fetch_history(fetch_history, cur) @db_transaction def fetch_history_get(self, fetch_history_id, cur=None): """Get the fetch_history entry with id `fetch_history_id`. """ return self.db.get_fetch_history(fetch_history_id, cur) @db_transaction def entity_add(self, entities, cur=None): """Add the given entitites to the database (in entity_history). Args: entities (iterable): iterable of dictionaries with the following keys: - uuid (uuid): id of the entity - parent (uuid): id of the parent entity - name (str): name of the entity - type (str): type of entity (one of 'organization', 'group_of_entities', 'hosting', 'group_of_persons', 'person', 'project') - description (str, optional): description of the entity - homepage (str): url of the entity's homepage - active (bool): whether the entity is active - generated (bool): whether the entity was generated - lister_metadata (dict): lister-specific entity metadata - metadata (dict): other metadata for the entity - validity (datetime.DateTime array): timestamps at which we listed the entity. """ db = self.db cols = list(db.entity_history_cols) cols.remove('id') db.mktemp_entity_history() db.copy_to(entities, 'tmp_entity_history', cols, cur) db.entity_history_add_from_temp() @db_transaction_generator def entity_get_from_lister_metadata(self, entities, cur=None): """Fetch entities from the database, matching with the lister and associated metadata. Args: entities (iterable): dictionaries containing the lister metadata to look for. Useful keys are 'lister', 'type', 'id', ... Yields: fetched entities with all their attributes. If no match was found, the returned entity is None. """ db = self.db db.mktemp_entity_lister(cur) mapped_entities = [] for i, entity in enumerate(entities): mapped_entity = { 'id': i, 'lister_metadata': entity, } mapped_entities.append(mapped_entity) db.copy_to(mapped_entities, 'tmp_entity_lister', ['id', 'lister_metadata'], cur) cur.execute('''select id, %s from swh_entity_from_tmp_entity_lister() order by id''' % ','.join(db.entity_cols)) for id, *entity_vals in cur: fetched_entity = dict(zip(db.entity_cols, entity_vals)) if fetched_entity['uuid']: yield fetched_entity else: yield { 'uuid': None, 'lister_metadata': entities[i], } @db_transaction_generator def entity_get(self, uuid, cur=None): """Returns the list of entity per its uuid identifier and also its parent hierarchy. Args: uuid: entity's identifier Returns: List of entities starting with entity with uuid and the parent hierarchy from such entity. """ db = self.db for entity in db.entity_get(uuid, cur): yield dict(zip(db.entity_cols, entity)) @db_transaction def entity_get_one(self, uuid, cur=None): """Returns one entity using its uuid identifier. Args: uuid: entity's identifier Returns: the object corresponding to the given entity """ db = self.db entity = db.entity_get_one(uuid, cur) if entity: return dict(zip(db.entity_cols, entity)) else: return None @db_transaction def stat_counters(self, cur=None): """compute statistics about the number of tuples in various tables Returns: dict: a dictionary mapping textual labels (e.g., content) to integer values (e.g., the number of tuples in table content) """ return {k: v for (k, v) in self.db.stat_counters()} @db_transaction def origin_metadata_add(self, origin_id, ts, provider, tool, metadata, cur=None): """ Add an origin_metadata for the origin at ts with provenance and metadata. Args: origin_id (int): the origin's id for which the metadata is added ts (datetime): timestamp of the found metadata provider (int): the provider of metadata (ex:'hal') tool (int): tool used to extract metadata metadata (jsonb): the metadata retrieved at the time and location Returns: id (int): the origin_metadata unique id """ if isinstance(ts, str): ts = dateutil.parser.parse(ts) return self.db.origin_metadata_add(origin_id, ts, provider, tool, metadata, cur) @db_transaction_generator def origin_metadata_get_by(self, origin_id, provider_type=None, cur=None): """Retrieve list of all origin_metadata entries for the origin_id Args: origin_id (int): the unique origin identifier provider_type (str): (optional) type of provider Returns: list of dicts: the origin_metadata dictionary with the keys: - id (int): origin_metadata's id - origin_id (int): origin's id - discovery_date (datetime): timestamp of discovery - tool_id (int): metadata's extracting tool - metadata (jsonb) - provider_id (int): metadata's provider - provider_name (str) - provider_type (str) - provider_url (str) """ db = self.db for line in db.origin_metadata_get_by(origin_id, provider_type, cur): yield dict(zip(db.origin_metadata_get_cols, line)) @db_transaction_generator def tool_add(self, tools, cur=None): """Add new tools to the storage. Args: tools (iterable of :class:`dict`): Tool information to add to storage. Each tool is a :class:`dict` with the following keys: - name (:class:`str`): name of the tool - version (:class:`str`): version of the tool - configuration (:class:`dict`): configuration of the tool, must be json-encodable Returns: `iterable` of :class:`dict`: All the tools inserted in storage (including the internal ``id``). The order of the list is not guaranteed to match the order of the initial list. """ db = self.db db.mktemp_tool(cur) db.copy_to(tools, 'tmp_tool', ['name', 'version', 'configuration'], cur) tools = db.tool_add_from_temp(cur) for line in tools: yield dict(zip(db.tool_cols, line)) @db_transaction def tool_get(self, tool, cur=None): """Retrieve tool information. Args: tool (dict): Tool information we want to retrieve from storage. The dicts have the same keys as those used in :func:`tool_add`. Returns: dict: The full tool information if it exists (``id`` included), None otherwise. """ db = self.db tool_conf = tool['configuration'] if isinstance(tool_conf, dict): tool_conf = json.dumps(tool_conf) idx = db.tool_get(tool['name'], tool['version'], tool_conf) if not idx: return None return dict(zip(self.db.tool_cols, idx)) @db_transaction def metadata_provider_add(self, provider_name, provider_type, provider_url, metadata, cur=None): db = self.db return db.metadata_provider_add(provider_name, provider_type, provider_url, metadata, cur) @db_transaction def metadata_provider_get(self, provider_id, cur=None): db = self.db result = db.metadata_provider_get(provider_id) if not result: return None return dict(zip(self.db.metadata_provider_cols, result)) @db_transaction def metadata_provider_get_by(self, provider, cur=None): db = self.db result = db.metadata_provider_get_by(provider['provider_name'], provider['provider_url']) if not result: return None return dict(zip(self.db.metadata_provider_cols, result)) + + def diff_directories(self, from_dir, to_dir, track_renaming=False): + """Compute the list of file changes introduced between two arbitrary + directories (insertion / deletion / modification / renaming of files). + + Args: + from_dir (bytes): identifier of the directory to compare from + to_dir (bytes): identifier of the directory to compare to + track_renaming (bool): whether or not to track files renaming + + Returns: + A list of dict describing the introduced file changes + (see :func:`swh.storage.algos.diff.diff_directories` + for more details). + """ + return diff.diff_directories(self, from_dir, to_dir, track_renaming) + + def diff_revisions(self, from_rev, to_rev, track_renaming=False): + """Compute the list of file changes introduced between two arbitrary + revisions (insertion / deletion / modification / renaming of files). + + Args: + from_rev (bytes): identifier of the revision to compare from + to_rev (bytes): identifier of the revision to compare to + track_renaming (bool): whether or not to track files renaming + + Returns: + A list of dict describing the introduced file changes + (see :func:`swh.storage.algos.diff.diff_directories` + for more details). + """ + return diff.diff_revisions(self, from_rev, to_rev, track_renaming) + + def diff_revision(self, revision, track_renaming=False): + """Compute the list of file changes introduced by a specific revision + (insertion / deletion / modification / renaming of files) by comparing + it against its first parent. + + Args: + revision (bytes): identifier of the revision from which to + compute the list of files changes + track_renaming (bool): whether or not to track files renaming + + Returns: + A list of dict describing the introduced file changes + (see :func:`swh.storage.algos.diff.diff_directories` + for more details). + """ + return diff.diff_revision(self, revision, track_renaming) diff --git a/swh/storage/tests/algos/__init__.py b/swh/storage/tests/algos/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/swh/storage/tests/algos/test_diff.py b/swh/storage/tests/algos/test_diff.py new file mode 100644 index 000000000..0a2f6d660 --- /dev/null +++ b/swh/storage/tests/algos/test_diff.py @@ -0,0 +1,368 @@ +# Copyright (C) 2018 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +# flake8: noqa + +import unittest + +from nose.tools import istest, nottest +from unittest.mock import patch + +from swh.model.identifiers import directory_identifier +from swh.storage.algos import diff + + +class DirectoryModel(object): + """ + Quick and dirty directory model to ease the writing + of revision trees differential tests. + """ + def __init__(self, name=''): + self.data = {} + self.data['name'] = name + self.data['perms'] = 16384 + self.data['type'] = 'dir' + self.data['entries'] = [] + self.data['entry_idx'] = {} + + def __getitem__(self, item): + if item == 'target': + return directory_identifier(self) + else: + return self.data[item] + + def add_file(self, path, sha1=None): + path_parts = path.split(b'/') + if len(path_parts) == 1: + self['entry_idx'][path] = len(self['entries']) + self['entries'].append({ + 'target': sha1, + 'name': path, + 'perms': 33188, + 'type': 'file' + }) + else: + if not path_parts[0] in self['entry_idx']: + self['entry_idx'][path_parts[0]] = len(self['entries']) + self['entries'].append(DirectoryModel(path_parts[0])) + if path_parts[1]: + dir_idx = self['entry_idx'][path_parts[0]] + self['entries'][dir_idx].add_file(b'/'.join(path_parts[1:]), sha1) + + def get_hash_data(self, entry_hash): + if self['target'] == entry_hash: + ret = [] + for e in self['entries']: + ret.append({ + 'target': e['target'], + 'name': e['name'], + 'perms': e['perms'], + 'type': e['type'] + }) + return ret + else: + for e in self['entries']: + if e['type'] == 'file' and e['target'] == entry_hash: + return e + elif e['type'] == 'dir': + data = e.get_hash_data(entry_hash) + if data: + return data + return None + + def get_path_data(self, path): + path_parts = path.split(b'/') + entry_idx = self['entry_idx'][path_parts[0]] + entry = self['entries'][entry_idx] + if len(path_parts) == 1: + return { + 'target': entry['target'], + 'name': entry['name'], + 'perms': entry['perms'], + 'type': entry['type'] + } + else: + return entry.get_path_data(b'/'.join(path_parts[1:])) + + +@patch('swh.storage.algos.diff._get_rev') +@patch('swh.storage.algos.dir_iterators._get_dir') +class TestDiffRevisions(unittest.TestCase): + + @nottest + def diff_revisions(self, rev_from, rev_to, from_dir_model, to_dir_model, + expected_changes, mock_get_dir, mock_get_rev): + + def _get_rev(*args, **kwargs): + if args[1] == rev_from: + return {'directory': from_dir_model['target']} + else: + return {'directory': to_dir_model['target']} + + def _get_dir(*args, **kwargs): + return from_dir_model.get_hash_data(args[1]) or \ + to_dir_model.get_hash_data(args[1]) + + mock_get_rev.side_effect = _get_rev + mock_get_dir.side_effect = _get_dir + + changes = diff.diff_revisions(None, rev_from, rev_to, track_renaming=True) + + self.assertEqual(changes, expected_changes) + + @istest + def test_insert_delete(self, mock_get_dir, mock_get_rev): + rev_from = '898ff03e1e7925ecde3da66327d3cdc7e07625ba' + rev_to = '647c3d381e67490e82cdbbe6c96e46d5e1628ce2' + + from_dir_model = DirectoryModel() + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + to_dir_model.add_file(b'file2', '3e5faecb3836ffcadf82cc160787e35d4e2bec6a') + to_dir_model.add_file(b'file3', '2ae33b2984974d35eababe4890d37fbf4bce6b2c') + + expected_changes = \ + [{ + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'file1'), + 'to_path': b'file1' + }, + { + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'file2'), + 'to_path': b'file2' + }, + { + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'file3'), + 'to_path': b'file3' + }] + + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + from_dir_model.add_file(b'file2', '3e5faecb3836ffcadf82cc160787e35d4e2bec6a') + from_dir_model.add_file(b'file3', '2ae33b2984974d35eababe4890d37fbf4bce6b2c') + + to_dir_model = DirectoryModel() + + expected_changes = \ + [{ + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'file1'), + 'from_path': b'file1', + 'to': None, + 'to_path': None + }, + { + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'file2'), + 'from_path': b'file2', + 'to': None, + 'to_path': None + }, + { + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'file3'), + 'from_path': b'file3', + 'to': None, + 'to_path': None + }] + + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + @istest + def test_onelevel_diff(self, mock_get_dir, mock_get_rev): + rev_from = '898ff03e1e7925ecde3da66327d3cdc7e07625ba' + rev_to = '647c3d381e67490e82cdbbe6c96e46d5e1628ce2' + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + from_dir_model.add_file(b'file2', 'f4a96b2000be83b61254d107046fa9777b17eb34') + from_dir_model.add_file(b'file3', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'file2', '3ee0f38ee0ea23cc2c8c0b9d66b27be4596b002b') + to_dir_model.add_file(b'file3', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + to_dir_model.add_file(b'file4', '40460b9653b1dc507e1b6eb333bd4500634bdffc') + + expected_changes = \ + [{ + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'file1'), + 'from_path': b'file1', + 'to': None, + 'to_path': None}, + { + 'type': 'modify', + 'from': from_dir_model.get_path_data(b'file2'), + 'from_path': b'file2', + 'to': to_dir_model.get_path_data(b'file2'), + 'to_path': b'file2'}, + { + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'file4'), + 'to_path': b'file4' + }] + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + @istest + def test_twolevels_diff(self, mock_get_dir, mock_get_rev): + rev_from = '898ff03e1e7925ecde3da66327d3cdc7e07625ba' + rev_to = '647c3d381e67490e82cdbbe6c96e46d5e1628ce2' + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + from_dir_model.add_file(b'dir1/file1', '8335fca266811bac7ae5c8e1621476b4cf4156b6') + from_dir_model.add_file(b'dir1/file2', 'a6127d909e79f1fcb28bbf220faf86e7be7831e5') + from_dir_model.add_file(b'dir1/file3', '18049b8d067ce1194a7e1cce26cfa3ae4242a43d') + from_dir_model.add_file(b'file2', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'file1', '3ee0f38ee0ea23cc2c8c0b9d66b27be4596b002b') + to_dir_model.add_file(b'dir1/file2', 'de3548b32a8669801daa02143a66dae21fe852fd') + to_dir_model.add_file(b'dir1/file3', '18049b8d067ce1194a7e1cce26cfa3ae4242a43d') + to_dir_model.add_file(b'dir1/file4', 'f5c3f42aec5fe7b92276196c350cbadaf4c51f87') + to_dir_model.add_file(b'file2', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + + expected_changes = \ + [{ + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'dir1/file1'), + 'from_path': b'dir1/file1', + 'to': None, + 'to_path': None + }, + { + 'type': 'modify', + 'from': from_dir_model.get_path_data(b'dir1/file2'), + 'from_path': b'dir1/file2', + 'to': to_dir_model.get_path_data(b'dir1/file2'), + 'to_path': b'dir1/file2' + }, + { + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'dir1/file4'), + 'to_path': b'dir1/file4' + }, + { + 'type': 'modify', + 'from': from_dir_model.get_path_data(b'file1'), + 'from_path': b'file1', + 'to': to_dir_model.get_path_data(b'file1'), + 'to_path': b'file1' + }] + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + @istest + def test_insert_delete_empty_dirs(self, mock_get_dir, mock_get_rev): + rev_from = '898ff03e1e7925ecde3da66327d3cdc7e07625ba' + rev_to = '647c3d381e67490e82cdbbe6c96e46d5e1628ce2' + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'dir3/file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'dir3/file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + to_dir_model.add_file(b'dir3/dir1/') + + expected_changes = \ + [{ + 'type': 'insert', + 'from': None, + 'from_path': None, + 'to': to_dir_model.get_path_data(b'dir3/dir1'), + 'to_path': b'dir3/dir1' + }] + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'dir1/dir2/') + from_dir_model.add_file(b'dir1/file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'dir1/file1', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + + expected_changes = \ + [{ + 'type': 'delete', + 'from': from_dir_model.get_path_data(b'dir1/dir2'), + 'from_path': b'dir1/dir2', + 'to': None, + 'to_path': None + }] + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev) + + @istest + def test_track_renaming(self, mock_get_dir, mock_get_rev): + rev_from = '898ff03e1e7925ecde3da66327d3cdc7e07625ba' + rev_to = '647c3d381e67490e82cdbbe6c96e46d5e1628ce2' + + from_dir_model = DirectoryModel() + from_dir_model.add_file(b'file1_oldname', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + from_dir_model.add_file(b'dir1/file1_oldname', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + from_dir_model.add_file(b'file2_oldname', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + + to_dir_model = DirectoryModel() + to_dir_model.add_file(b'dir1/file1_newname', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + to_dir_model.add_file(b'dir2/file1_newname', 'ea15f54ca215e7920c60f564315ebb7f911a5204') + to_dir_model.add_file(b'file2_newname', 'd3c00f9396c6d0277727cec522ff6ad1ea0bc2da') + + expected_changes = \ + [{ + 'type': 'rename', + 'from': from_dir_model.get_path_data(b'dir1/file1_oldname'), + 'from_path': b'dir1/file1_oldname', + 'to': to_dir_model.get_path_data(b'dir1/file1_newname'), + 'to_path': b'dir1/file1_newname' + }, + { + 'type': 'rename', + 'from': from_dir_model.get_path_data(b'file1_oldname'), + 'from_path': b'file1_oldname', + 'to': to_dir_model.get_path_data(b'dir2/file1_newname'), + 'to_path': b'dir2/file1_newname' + }, + { + 'type': 'rename', + 'from': from_dir_model.get_path_data(b'file2_oldname'), + 'from_path': b'file2_oldname', + 'to': to_dir_model.get_path_data(b'file2_newname'), + 'to_path': b'file2_newname' + }] + + self.diff_revisions(rev_from, rev_to, from_dir_model, + to_dir_model, expected_changes, + mock_get_dir, mock_get_rev)