diff --git a/PKG-INFO b/PKG-INFO index bbdc5c0c..98294c35 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.web.ui -Version: 0.0.28 +Version: 0.0.29 Summary: Software Heritage Web UI Home-page: https://forge.softwareheritage.org/diffusion/DWUI/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/requirements.txt b/requirements.txt index ecaa2878..d9283bc6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,17 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html # Runtime dependencies Flask Flask-API -swh.core +swh.core >= 0.0.20 swh.storage >= 0.0.33 dateutil # Test dependencies #Flask-Testing #blinker # Non-Python dependencies # libjs-cryptojs diff --git a/swh.web.ui.egg-info/PKG-INFO b/swh.web.ui.egg-info/PKG-INFO index bbdc5c0c..98294c35 100644 --- a/swh.web.ui.egg-info/PKG-INFO +++ b/swh.web.ui.egg-info/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.web.ui -Version: 0.0.28 +Version: 0.0.29 Summary: Software Heritage Web UI Home-page: https://forge.softwareheritage.org/diffusion/DWUI/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/swh.web.ui.egg-info/requires.txt b/swh.web.ui.egg-info/requires.txt index 404fedb6..638e2b70 100644 --- a/swh.web.ui.egg-info/requires.txt +++ b/swh.web.ui.egg-info/requires.txt @@ -1,5 +1,5 @@ Flask Flask-API dateutil -swh.core +swh.core>=0.0.20 swh.storage>=0.0.33 diff --git a/swh/web/ui/backend.py b/swh/web/ui/backend.py index b16dc6b7..819d9d15 100644 --- a/swh/web/ui/backend.py +++ b/swh/web/ui/backend.py @@ -1,223 +1,253 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import os from swh.web.ui import main def content_get(sha1_bin): """Lookup the content designed by {algo: hash_bin}. Args: sha1_bin: content's binary sha1. Returns: Content as dict with 'sha1' and 'data' keys. data representing its raw data. """ contents = main.storage().content_get([sha1_bin]) if contents and len(contents) >= 1: return contents[0] return None def content_find(algo, hash_bin): """Retrieve the content with binary hash hash_bin Args: algo: nature of the hash hash_bin. hash_bin: content's hash searched for. Returns: A triplet (sha1, sha1_git, sha256) if the content exist or None otherwise. """ return main.storage().content_find({algo: hash_bin}) def content_find_occurrence(algo, hash_bin): """Find the content's occurrence. Args: algo: nature of the hash hash_bin. hash_bin: content's hash searched for. Returns: The occurrence of the content. """ return main.storage().content_find_occurrence({algo: hash_bin}) def content_missing_per_sha1(sha1list): """List content missing from storage based on sha1 Args: sha1s: Iterable of sha1 to check for absence Returns: an iterable of sha1s missing from the storage """ return main.storage().content_missing_per_sha1(sha1list) def directory_get(sha1_bin): """Retrieve information on one directory. Args: sha1_bin: Directory's identifier Returns: The directory's information. """ res = main.storage().directory_get([sha1_bin]) if res and len(res) >= 1: return res[0] def origin_get(origin_id): """Return information about the origin with id origin_id. Args: origin_id: origin's identifier Returns: Origin information as dict. """ return main.storage().origin_get({'id': origin_id}) def person_get(person_id): """Return information about the person with id person_id. Args: person_id: person's identifier.v Returns: Person information as dict. """ res = main.storage().person_get([person_id]) if res and len(res) >= 1: return res[0] def directory_ls(sha1_git_bin, recursive=False): """Return information about the directory with id sha1_git. Args: sha1_git: directory's identifier. recursive: Optional recursive flag default to False Returns: Directory information as dict. """ directory_entries = main.storage().directory_ls(sha1_git_bin, recursive) if not directory_entries: return [] return directory_entries def release_get(sha1_git_bin): """Return information about the release with sha1 sha1_git_bin. Args: sha1_git_bin: The release's sha1 as bytes. Returns: Release information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ res = main.storage().release_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_get(sha1_git_bin): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ res = main.storage().revision_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_log(sha1_git_bin, limit=100): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. limit: the maximum number of revisions returned. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ return main.storage().revision_log([sha1_git_bin], limit) +def revision_log_by(origin_id, branch_name, ts, limit=100): + """Return information about the revision matching the timestamp + ts, from origin origin_id, in branch branch_name. + + Args: + origin_id: origin of the revision + - branch_name: revision's branch. + - timestamp: revision's time frame. + + Returns: + Information for the revision matching the criterions. + + """ + # Disable pending RemoteStorage opening revision_log_by + """ + if not ts and branch_name == 'refs/heads/master': + return main.storage().revision_log_by(origin_id) + """ + + rev = main.storage().revision_get_by(origin_id, + branch_name, + timestamp=ts, + limit=1) + if not rev: + return None + + rev_sha1s_bin = [revision['id'] for revision in rev] + return main.storage().revision_log(rev_sha1s_bin, limit) + + def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return main.storage().stat_counters() def revision_get_by(origin_id, branch_name, timestamp): """Return occurrence information matching the criterions origin_id, branch_name, ts. """ res = main.storage().revision_get_by(origin_id, branch_name, timestamp=timestamp, limit=1) if not res: return None return res[0] def directory_entry_get_by_path(directory, path): """Return a directory entry by its path. """ paths = path.strip(os.path.sep).split(os.path.sep) return main.storage().directory_entry_get_by_path( directory, list(map(lambda p: p.encode('utf-8'), paths))) def entity_get(uuid): """Retrieve the entity per its uuid. """ return main.storage().entity_get(uuid) diff --git a/swh/web/ui/converters.py b/swh/web/ui/converters.py index 521fb786..656ac16a 100644 --- a/swh/web/ui/converters.py +++ b/swh/web/ui/converters.py @@ -1,207 +1,228 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from swh.core import hashutil +from swh.core.utils import decode_with_escape from swh.web.ui import utils def from_swh(dict_swh, hashess={}, bytess={}, dates={}, blacklist={}, convert={}, convert_fn=lambda x: x): """Convert from an swh dictionary to something reasonably json serializable. Args: - dict_swh: the origin dictionary needed to be transformed - hashess: list/set of keys representing hashes values (sha1, sha256, sha1_git, etc...) as bytes. Those need to be transformed in hexadecimal string - bytess: list/set of keys representing bytes values which needs to be decoded - blacklist: set of keys to filter out from the conversion - convert: set of keys whose associated values need to be converted using convert_fn - convert_fn: the conversion function to apply on the value of key in 'convert' The remaining keys are copied as is in the output. Returns: dictionary equivalent as dict_swh only with its keys `converted`. """ def convert_hashes_bytes(v): """v is supposedly a hash as bytes, returns it converted in hex. """ if v and isinstance(v, bytes): return hashutil.hash_to_hex(v) return v def convert_bytes(v): """v is supposedly a bytes string, decode as utf-8. FIXME: Improve decoding policy. If not utf-8, break! """ if v and isinstance(v, bytes): return v.decode('utf-8') return v def convert_date(v): """v is a dict with three keys: timestamp offset negative_utc We convert it to a human-readable string """ tz = datetime.timezone(datetime.timedelta(minutes=v['offset'])) date = datetime.datetime.fromtimestamp(v['timestamp'], tz=tz) datestr = date.isoformat() if v['offset'] == 0 and v['negative_utc']: # remove the rightmost + and replace it with a - return '-'.join(datestr.rsplit('+', 1)) return datestr if not dict_swh: return dict_swh new_dict = {} for key, value in dict_swh.items(): if key in blacklist: continue elif key in dates: new_dict[key] = convert_date(value) elif isinstance(value, dict): new_dict[key] = from_swh(value, hashess, bytess, dates, blacklist, convert, convert_fn) elif key in hashess: new_dict[key] = utils.fmap(convert_hashes_bytes, value) elif key in bytess: - new_dict[key] = utils.fmap(convert_bytes, value) + try: + new_dict[key] = utils.fmap(convert_bytes, value) + except UnicodeDecodeError: + if 'decoding_failures' not in new_dict: + new_dict['decoding_failures'] = [key] + else: + new_dict['decoding_failures'].append(key) + new_dict[key] = utils.fmap(decode_with_escape, value) elif key in convert: new_dict[key] = convert_fn(value) else: new_dict[key] = value return new_dict def from_origin(origin): """Convert from an SWH origin to an origin dictionary. """ return from_swh(origin, hashess=set(['revision']), bytess=set(['path'])) def from_release(release): """Convert from an SWH release to a json serializable release dictionary. Args: release: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - revision: identifier of the revision the release points to (sha1 in bytes) - comment: release's comment message (bytes) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) Returns: Release dictionary with the following keys: - id: hexadecimal sha1 (string) - revision: hexadecimal sha1 (string) - comment: release's comment message (string) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) """ return from_swh( release, hashess=set(['id', 'target']), - bytess=set(['message', 'name', 'email']), + bytess=set(['message', 'name', 'fullname', 'email']), dates={'date'}, ) def from_revision(revision): """Convert from an SWH revision to a json serializable revision dictionary. Args: revision: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - directory: identifier of the directory the revision points to (sha1 in bytes) - author_name, author_email: author's revision name and email - committer_name, committer_email: committer's revision name and email - message: revision's message - date, date_offset: revision's author date - committer_date, committer_date_offset: revision's commit date - parents: list of parents for such revision - synthetic: revision's property nature - type: revision's type (git, tar or dsc at the moment) - metadata: if the revision is synthetic, this can reference dynamic properties. Returns: Revision dictionary with the same keys as inputs, only: - sha1s are in hexadecimal strings (id, directory) - bytes are decoded in string (author_name, committer_name, - author_email, committer_email, message) + author_email, committer_email) - remaining keys are left as is """ - return from_swh(revision, - hashess=set(['id', 'directory', 'parents', 'children']), - bytess=set(['name', - 'email', - 'message']), - dates={'date', 'committer_date'}) + revision = from_swh(revision, + hashess=set(['id', + 'directory', + 'parents', + 'children']), + bytess=set(['name', + 'fullname', + 'email']), + dates={'date', 'committer_date'}) + + if revision: + if 'message' in revision: + try: + revision['message'] = revision['message'].decode('utf-8') + except UnicodeDecodeError: + revision['message_decoding_failed'] = True + revision['message'] = None + + return revision def from_content(content): """Convert swh content to serializable content dictionary. """ return from_swh(content, hashess={'sha1', 'sha1_git', 'sha256'}, bytess={}, blacklist={}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) def from_person(person): """Convert swh person to serializable person dictionary. """ return from_swh(person, hashess=set(), - bytess=set(['name', 'email'])) + bytess=set(['name', 'fullname', 'email'])) def from_directory_entry(dir_entry): """Convert swh person to serializable person dictionary. """ return from_swh(dir_entry, hashess=set(['dir_id', 'sha1_git', 'sha1', 'sha256', 'target']), bytess=set(['name']), blacklist={}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) diff --git a/swh/web/ui/service.py b/swh/web/ui/service.py index b3afa313..4a0ab6ea 100644 --- a/swh/web/ui/service.py +++ b/swh/web/ui/service.py @@ -1,554 +1,623 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict from swh.core import hashutil from swh.web.ui import converters, query, upload, backend from swh.web.ui.exc import NotFoundExc def lookup_multiple_hashes(hashes): """Lookup the passed hashes in a single DB connection, using batch processing. Args: An array of {filename: X, sha1: Y}, string X, hex sha1 string Y. Returns: The same array with elements updated with elem['found'] = true if the hash is present in storage, elem['found'] = false if not. """ hashlist = [hashutil.hex_to_hash(elem['sha1']) for elem in hashes] content_missing = backend.content_missing_per_sha1(hashlist) missing = [hashutil.hash_to_hex(x) for x in content_missing] for x in hashes: x.update({'found': True}) for h in hashes: if h['sha1'] in missing: h['found'] = False return hashes def hash_and_search(filepath): """Hash the filepath's content as sha1, then search in storage if it exists. Args: Filepath of the file to hash and search. Returns: Tuple (hex sha1, found as True or false). The found boolean, according to whether the sha1 of the file is present or not. """ h = hashutil.hashfile(filepath) c = backend.content_find('sha1', h['sha1']) if c: r = converters.from_content(c) r['found'] = True return r else: return {'sha1': hashutil.hash_to_hex(h['sha1']), 'found': False} def upload_and_search(file): """Upload a file and compute its hash. """ tmpdir, filename, filepath = upload.save_in_upload_folder(file) res = {'filename': filename} try: content = hash_and_search(filepath) res.update(content) return res finally: # clean up if tmpdir: upload.cleanup(tmpdir) def lookup_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form - Returns: Dict with key found to True or False, according to - whether the checksum is present or not + Returns: Dict with key found containing the hash info if the + hash is present, None if not. """ algo, hash = query.parse_hash(q) found = backend.content_find(algo, hash) return {'found': found, 'algo': algo} +def search_hash(q): + """Checks if the storage contains a given content checksum + + Args: query string of the form + + Returns: Dict with key found to True or False, according to + whether the checksum is present or not + + """ + algo, hash = query.parse_hash(q) + found = backend.content_find(algo, hash) + return {'found': found is not None} + + def lookup_hash_origin(q): """Return information about the checksum contained in the query q. Args: query string of the form Returns: origin as dictionary if found for the given content. """ algo, hash = query.parse_hash(q) origin = backend.content_find_occurrence(algo, hash) return converters.from_origin(origin) def lookup_origin(origin_id): """Return information about the origin with id origin_id. Args: origin_id as string Returns: origin information as dict. """ return backend.origin_get(origin_id) def lookup_person(person_id): """Return information about the person with id person_id. Args: person_id as string Returns: person information as dict. """ person = backend.person_get(person_id) return converters.from_person(person) def lookup_directory(sha1_git): """Return information about the directory with id sha1_git. Args: sha1_git as string Returns: directory information as dict. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], # HACK: sha1_git really 'Only sha1_git is supported.') dir = backend.directory_get(sha1_git_bin) if not dir: return None directory_entries = backend.directory_ls(sha1_git_bin) return map(converters.from_directory_entry, directory_entries) def lookup_directory_with_path(directory_sha1_git, path_string): """Return directory information for entry with path path_string w.r.t. root directory pointed by directory_sha1_git Args: - directory_sha1_git: sha1_git corresponding to the directory to which we append paths to (hopefully) find the entry - the relative path to the entry starting from the directory pointed by directory_sha1_git Raises: NotFoundExc if the directory entry is not found """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( directory_sha1_git, ['sha1'], 'Only sha1_git is supported.') queried_dir = backend.directory_entry_get_by_path( sha1_git_bin, path_string) if not queried_dir: raise NotFoundExc(('Directory entry with path %s from %s not found') % (path_string, directory_sha1_git)) return converters.from_directory_entry(queried_dir) def lookup_release(release_sha1_git): """Return information about the release with sha1 release_sha1_git. Args: release_sha1_git: The release's sha1 as hexadecimal Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( release_sha1_git, ['sha1'], 'Only sha1_git is supported.') res = backend.release_get(sha1_git_bin) return converters.from_release(res) def lookup_revision(rev_sha1_git): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') - res = backend.revision_get(sha1_git_bin) - return converters.from_revision(res) + revision = backend.revision_get(sha1_git_bin) + return converters.from_revision(revision) + + +def lookup_revision_message(rev_sha1_git): + """Return the raw message of the revision with sha1 revision_sha1_git. + + Args: + revision_sha1_git: The revision's sha1 as hexadecimal + + Returns: + Decoded revision message as dict {'message': } + + Raises: + ValueError if the identifier provided is not of sha1 nature. + NotFoundExc if the revision is not found, or if it has no message + + """ + _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( + rev_sha1_git, + ['sha1'], + 'Only sha1_git is supported.') + + revision = backend.revision_get(sha1_git_bin) + if not revision: + raise NotFoundExc('Revision with sha1_git %s not found.' + % rev_sha1_git) + if 'message' not in revision: + raise NotFoundExc('No message for revision with sha1_git %s.' + % rev_sha1_git) + res = {'message': revision['message']} + return res def lookup_revision_by(origin_id, branch_name="refs/heads/master", timestamp=None): """Lookup revisions by origin_id, branch_name and timestamp. If: - branch_name is not provided, lookup using 'refs/heads/master' as default. - ts is not provided, use the most recent Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. Yields: The revisions matching the criterions. """ res = backend.revision_get_by(origin_id, branch_name, timestamp) return converters.from_revision(res) def lookup_revision_log(rev_sha1_git, limit=100): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal limit: the maximum number of revisions returned Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision_entries = backend.revision_log(sha1_git_bin, limit) return map(converters.from_revision, revision_entries) +def lookup_revision_log_by(origin_id, branch_name, timestamp): + """Return information about the revision with sha1 revision_sha1_git. + + Args: + origin_id: origin of the revision + branch_name: revision's branch + timestamp: revision's time frame + limit: the maximum number of revisions returned + + Returns: + Revision information as dict. + + Raises: + NotFoundExc if no revision corresponds to the criterion + NotFoundExc if the corresponding revision has no log + + """ + revision_entries = backend.revision_log_by(origin_id, + branch_name, + timestamp) + if not revision_entries: + return None + return map(converters.from_revision, revision_entries) + + def lookup_revision_with_context_by(origin_id, branch_name, ts, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. sha1_git_root being resolved through the lookup of a revision by origin_id, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. - limit: limit the lookup to 100 revisions back. Returns: Pair of (root_revision, revision). Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ rev_root = backend.revision_get_by(origin_id, branch_name, ts) if not rev_root: raise NotFoundExc('Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts)) return (converters.from_revision(rev_root), lookup_revision_with_context(rev_root, sha1_git, limit)) def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision. The type is either a sha1 (as an hex string) or a non converted dict. sha1_git: one of sha1_git_root's ancestors limit: limit the lookup to 100 revisions back Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) if isinstance(sha1_git_root, str): _, sha1_git_root_bin = query.parse_hash_with_algorithms_or_throws( sha1_git_root, ['sha1'], 'Only sha1_git is supported.') revision_root = backend.revision_get(sha1_git_root_bin) if not revision_root: raise NotFoundExc('Revision root %s not found' % sha1_git_root) else: sha1_git_root_bin = sha1_git_root['id'] revision_log = backend.revision_log(sha1_git_root_bin, limit) parents = {} children = defaultdict(list) for rev in revision_log: rev_id = rev['id'] parents[rev_id] = [] for parent_id in rev['parents']: parents[rev_id].append(parent_id) children[parent_id].append(rev_id) if revision['id'] not in parents: raise NotFoundExc('Revision %s is not an ancestor of %s' % (sha1_git, sha1_git_root)) revision['children'] = children[revision['id']] return converters.from_revision(revision) def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: boolean that indicates to retrieve the raw data if the path resolves to a content. Default to False (for the api) Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist. NotImplementedError in case of dir_path exists but do not reference a type 'dir' or 'file'. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) dir_sha1_git_bin = revision['directory'] if dir_path: entity = backend.directory_entry_get_by_path(dir_sha1_git_bin, dir_path) if not entity: raise NotFoundExc( "Directory or File '%s' pointed to by revision %s not found" % (dir_path, sha1_git)) else: entity = {'type': 'dir', 'target': dir_sha1_git_bin} if entity['type'] == 'dir': directory_entries = backend.directory_ls(entity['target']) return {'type': 'dir', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': map(converters.from_directory_entry, directory_entries)} elif entity['type'] == 'file': # content content = backend.content_find('sha1_git', entity['target']) if with_data: content['data'] = backend.content_get(content['sha1'])['data'] return {'type': 'file', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': converters.from_content(content)} else: raise NotImplementedError('Entity of type %s not implemented.' % entity['type']) def lookup_content(q): """Lookup the content designed by q. Args: q: The release's sha1 as hexadecimal """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) return converters.from_content(c) def lookup_content_raw(q): """Lookup the content defined by q. Args: q: query string of the form Returns: dict with 'sha1' and 'data' keys. data representing its raw data decoded. """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) if not c: return None content = backend.content_get(c['sha1']) return converters.from_content(content) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return backend.stat_counters() def lookup_entity_by_uuid(uuid): """Return the entity's hierarchy from its uuid. Args: uuid: entity's identifier. Returns: List of hierarchy entities from the entity with uuid. """ uuid = query.parse_uuid4(uuid) return backend.entity_get(uuid) def lookup_revision_through(revision, limit=100): """Retrieve a revision from the criterion stored in revision dictionary. Args: revision: Dictionary of criterion to lookup the revision with. Here are the supported combination of possible values: - origin_id, branch_name, ts, sha1_git - origin_id, branch_name, ts - sha1_git_root, sha1_git - sha1_git Returns: None if the revision is not found or the actual revision. """ if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context_by(revision['origin_id'], revision['branch_name'], revision['ts'], revision['sha1_git'], limit) if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision: return lookup_revision_by(revision['origin_id'], revision['branch_name'], revision['ts']) if 'sha1_git_root' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context(revision['sha1_git_root'], revision['sha1_git'], limit) if 'sha1_git' in revision: return lookup_revision(revision['sha1_git']) # this should not happen raise NotImplementedError('Should not happen!') def lookup_directory_through_revision(revision, path=None, limit=100, with_data=False): """Retrieve the directory information from the revision. Args: revision: dictionary of criterion representing a revision to lookup path: directory's path to lookup. limit: optional query parameter to limit the revisions log. (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: The directory pointing to by the revision criterions at path. """ rev = lookup_revision_through(revision, limit) if not rev: raise NotFoundExc('Revision with criterion %s not found!' % revision) return (rev['id'], lookup_directory_with_revision(rev['id'], path, with_data)) diff --git a/swh/web/ui/templates/content.html b/swh/web/ui/templates/content.html index 2da5bd02..8cb32bc3 100644 --- a/swh/web/ui/templates/content.html +++ b/swh/web/ui/templates/content.html @@ -1,28 +1,33 @@ {% extends "layout.html" %} {% block title %}Content information{% endblock %} {% block content %} {% if message is not none %}

{{ message }}

{% endif %} {% if content is not none %} {% for key in ['sha1', 'sha256', 'sha1_git', 'status', 'length', 'ctime'] %}
{{ key }}
{{ content[key] }}
{% endfor %} {% if content['data_url'] is not none %}
data_url
{% endif %} {% if content['data'] is not none %}
data
{{ content['data'] }}
{% endif %} + {% if 'decoding_failures' in content %} +
+
(some decoding errors)
+
+ {% endif %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/directory.html b/swh/web/ui/templates/directory.html index dd609d8f..32244ad4 100644 --- a/swh/web/ui/templates/directory.html +++ b/swh/web/ui/templates/directory.html @@ -1,10 +1,15 @@ {% extends "layout.html" %} {% block title %}Browse directory{% endblock %} {% block content %} {{ message }} {% if ls is not none %} {% for e in files %} - +
+

+ {{ e.name }} + {% if 'decoding_failures' in e %}(some decoding errors){% endif %} +

+
{% endfor %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/entity.html b/swh/web/ui/templates/entity.html index 8d5c19a9..9106cded 100644 --- a/swh/web/ui/templates/entity.html +++ b/swh/web/ui/templates/entity.html @@ -1,23 +1,28 @@ {% extends "layout.html" %} {% block title %}Entity information{% endblock %} {% block content %} {% if message is not none %}

{{ message | safe }}

{% endif %} {% if entities is not none %} {% for entity in entities %}
{% for key in ['uuid', 'parent', 'name', 'doap', 'lister_metadata', 'homepage', 'lister', 'description', 'type', 'last_id', 'generated', 'active', 'last_seen'] %} {% if entity[key] is not none %}
{{ key }}
{{ entity[key] }}
{% endif %} {% endfor %} + {% if 'decoding_failures' in entity %} +
+
(some decoding errors)
+
+ {% endif %} {% endfor %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/origin.html b/swh/web/ui/templates/origin.html index 10203314..d2ccd5d8 100644 --- a/swh/web/ui/templates/origin.html +++ b/swh/web/ui/templates/origin.html @@ -1,22 +1,27 @@ {% extends "layout.html" %} {% block title %}Origin{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if origin is not none %}
Details on origin {{ origin_id }}: {% for key in ['type', 'lister', 'projet', 'url'] %} {% if origin[key] is not none %}
{{ key }}
{{ origin[key] }}
{% endif %} {% endfor %} + {% if 'decoding_failures' in content %} +
+
(some decoding errors)
+
+ {% endif %}
{% endif %} {% endblock %} diff --git a/swh/web/ui/templates/person.html b/swh/web/ui/templates/person.html index 5c125cfd..ebf455f8 100644 --- a/swh/web/ui/templates/person.html +++ b/swh/web/ui/templates/person.html @@ -1,22 +1,27 @@ {% extends "layout.html" %} {% block title %}Person{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if person is not none %}
Details on person {{ person_id }}: {% for key in ['name', 'email'] %} {% if person[key] is not none %}
{{ key }}
{{ person[key] }}
{% endif %} {% endfor %} + {% if 'decoding_failures' in person %} +
+
(some decoding errors)
+
+ {% endif %}
{% endif %} {% endblock %} diff --git a/swh/web/ui/templates/release.html b/swh/web/ui/templates/release.html index d7b326b5..5e5335c7 100644 --- a/swh/web/ui/templates/release.html +++ b/swh/web/ui/templates/release.html @@ -1,35 +1,43 @@ {% extends "layout.html" %} {% block title %}Release{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if release is not none %}
{% for key in release.keys() %} - {% if key not in ['author', 'target_url', 'message', 'target', 'target_type'] and release[key] is not none %} + {% if key not in ['author', 'target_url', 'message', 'target', 'target_type', 'decoding_failures'] and release[key] is not none %}
{{ key }}
{{ release[key] }}
{% endif %} {% endfor %} {% if release['author'] is not none %}
author
-
{{ release['author']['name'] }} - {{ release['author']['email'] }}
+
+ {{ release['author']['name'] }} - {{ release['author']['email'] }} + {% if 'decoding_failures' in release['author'] %}(some decoding errors){% endif %} +
{% endif %}
{% if release['target_url'] is not none %}
{{ release['target_type'] }}
{% endif %} + {% if 'decoding_failures' in release %} +
+
(some decoding errors)
+
+ {% endif %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/revision-directory.html b/swh/web/ui/templates/revision-directory.html index 20ec0ed3..674afa40 100644 --- a/swh/web/ui/templates/revision-directory.html +++ b/swh/web/ui/templates/revision-directory.html @@ -1,45 +1,60 @@ {% extends "layout.html" %} {% block title %}Browse revision at path{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if result is not none %}

Browse revision '{{ revision }}' with {{ result['type'] }}path '{{ path }}':

{% if result['type'] == 'dir' %} {% if result['content'] is not none %} {% for e in result['content'] %} {% if e.type == 'dir' %} - +
+

+ {{ e.name }} + {% if 'decoding_failures' in e %}(some decoding errors){% endif %} +

+
{% else %} - - {% endif %} +
+

+ {{ e.name }} + {% if 'decoding_failures' in e %}(some decoding errors){% endif %} +

+
+ {% endif %} {% endfor %} {% endif %} {% else %} {% if result['content'] is not none %} {% for key in ['sha1', 'sha256', 'sha1_git', 'status', 'length', 'ctime'] %}
{{ key }}
{{ result['content'][key] }}
{% endfor %} {% endif %} {% if result['content']['data_url'] is not none %}
data_url
{% endif %} {% if result['content']['data'] is not none %}
data
{{ result['content']['data'] }}
- {% endif %} + {% endif %} + {% if 'decoding_failures' in result %} +
+
(some decoding errors)
+
+ {% endif %} {% endif %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/revision-log.html b/swh/web/ui/templates/revision-log.html index e5e7b27b..ba124f08 100644 --- a/swh/web/ui/templates/revision-log.html +++ b/swh/web/ui/templates/revision-log.html @@ -1,88 +1,128 @@ {% extends "layout.html" %} {% block title %}Revision Log{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} +
+

Queried revision:

+ {% if sha1_git is not none %} +
Revision with git SHA1 {{ sha1_git }}
+ {% else %} + +
Branch name {{ branch_name }}
+ {% if timestamp is not none %} +
Time stamp {{ timestamp }}
+ {% endif %} +
+
+ {% endif %} +
+ {% if revisions is not none %} {% for revision in revisions %} -
+
{% if revision['url'] is not none %} {% endif %} {% if revision['history_url'] is not none %} {% endif %} {% if revision['directory_url'] is not none %} {% endif %} {% if revision['author'] is not none %}
Author
- +
+

+ {{ revision['author']['name'] }} + {% if 'decoding_failures' in revision['author'] %}(some decoding errors){% endif %} +

+
Date

{{ revision['date'] }}

{% endif %} {% if revision['committer'] is not none %}
Committer
- +
+

+ {{ revision['committer']['name'] }} + {% if 'decoding_failures' in revision['committer'] %}(some decoding errors){% endif %} +

+
Committer Date

{{ revision['committer_date'] }}

{% endif %} {% if revision['message'] is not none %}
Message
{{ revision['message'] }}
- {% endif %} - + {% elif revision['message_encoding_failed'] %} +
+
Message
+ +
+
Message
+
No message found.
+
+ {% endif %} + {% for key in revision.keys() %} - {% if key in ['type', 'synthetic'] and revision[key] is not none %} + {% if key in ['type', 'synthetic'] and key not in ['decoding_failures'] and revision[key] is not none %}
{{ key }}

{{ revision[key] }}

{% endif %} {% endfor %} - {% for key in ['parent_urls', 'children_urls'] %} {% if revision[key] is not none %}
{{ key }}
{% for link in revision[key] %} {% endfor %}
{% endif %} {% endfor %} + {% if 'decoding_failures' in revision %} +
+
(some decoding errors occurred)
+
+ {% endif %} +

{% endfor %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/revision.html b/swh/web/ui/templates/revision.html index 29da1935..307420bb 100644 --- a/swh/web/ui/templates/revision.html +++ b/swh/web/ui/templates/revision.html @@ -1,84 +1,109 @@ {% extends "layout.html" %} {% block title %}Revision{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if revision is not none %}
{% if revision['url'] is not none %} {% endif %} {% if revision['history_url'] is not none %} {% endif %} {% if revision['directory_url'] is not none %} {% endif %} {% if revision['author'] is not none %}
Author
- +
+

+ {{ revision['author']['name'] }} + {% if 'decoding_failures' in revision['author'] %}(some decoding failed){% endif %} +

+
Date

{{ revision['date'] }}

{% endif %} {% if revision['committer'] is not none %}
Committer
- +
+

+ {{ revision['committer']['name'] }} + {% if 'decoding_failures' in revision['committer'] %}(some decoding failed){% endif %} +

+
Committer Date

{{ revision['committer_date'] }}

{% endif %} {% if revision['message'] is not none %}
Message
{{ revision['message'] }}
+ {% elif revision['message_encoding_failed'] %} +
+
Message
+ +
+
Message
+
No message found.
+
{% endif %} {% for key in revision.keys() %} {% if key in ['type', 'synthetic'] and revision[key] is not none %}
{{ key }}

{{ revision[key] }}

{% endif %} {% endfor %} {% for key in ['parent_urls', 'children_urls'] %} {% if revision[key] is not none %}
{{ key }}
{% for link in revision[key] %} {% endfor %}
{% endif %} {% endfor %} + {% if 'decoding_failures' in revision %} +
+
(some decoding failed)
+
+ {% endif %} {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/upload_and_search.html b/swh/web/ui/templates/upload_and_search.html index 6c9b3a85..1c90f4b0 100644 --- a/swh/web/ui/templates/upload_and_search.html +++ b/swh/web/ui/templates/upload_and_search.html @@ -1,98 +1,102 @@ {% extends "layout.html" %} {% block title %}Search SWH{% endblock %} {% block content %}

Drag and drop or click here to hash files and search for them. Your files will NOT be uploaded, hashing is done locally. Filesizes over 20Mb may be slow to process, use with care.
{% if search_stats is not none and search_stats %} {% endif %} - {% if responses is not none and responses %} + {% if search_res is not none %} - {% for resp in responses %} + {% for res in search_res %} - - {% if resp['found'] %} - + {% if res['filename'] is not none %} + + {% else %} + + {% endif %} + {% if res['found'] %} + {% else %} - + {% endif %} {% endfor %}
File name SHA1 checksum Result
{{ resp['filename'] }}{{ resp['sha1'] }}{{ res['filename'] }}From text input{{ res['sha1'] }} {{ resp['sha1'] }}{{ res['sha1'] }}
{% endif %} {% if messages is not none and messages %}
{% for message in messages %}
{{ message | safe }}
{% endfor %}
{% endif %}
{% endblock %} diff --git a/swh/web/ui/tests/test_backend.py b/swh/web/ui/tests/test_backend.py index 40d4b842..7b1b017e 100644 --- a/swh/web/ui/tests/test_backend.py +++ b/swh/web/ui/tests/test_backend.py @@ -1,522 +1,578 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock from swh.core import hashutil from swh.web.ui import backend from swh.web.ui.tests import test_app class BackendTestCase(test_app.SWHApiTestCase): @istest def content_get_ko_not_found_1(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f777') self.storage.content_get = MagicMock(return_value=None) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_get_ko_not_found_empty_result(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_get = MagicMock(return_value=[]) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_get(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') stub_contents = [{ 'sha1': sha1_bin, 'data': b'binary data', }, {}] self.storage.content_get = MagicMock(return_value=stub_contents) # when actual_content = backend.content_get(sha1_bin) # then self.assertEquals(actual_content, stub_contents[0]) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_find_ko_no_result(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find = MagicMock(return_value=None) # when actual_lookup = backend.content_find('sha1_git', sha1_bin) # then self.assertIsNone(actual_lookup) self.storage.content_find.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_find(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find = MagicMock(return_value=(1, 2, 3)) # when actual_content = backend.content_find('sha1', sha1_bin) # then self.assertEquals(actual_content, (1, 2, 3)) # check the function has been called with parameters self.storage.content_find.assert_called_with({'sha1': sha1_bin}) @istest def content_find_occurrence_ko_no_result(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find_occurrence = MagicMock(return_value=None) # when actual_lookup = backend.content_find_occurrence('sha1_git', sha1_bin) # then self.assertIsNone(actual_lookup) self.storage.content_find_occurrence.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_find_occurrence(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find_occurrence = MagicMock( return_value=(1, 2, 3)) # when actual_content = backend.content_find_occurrence('sha1', sha1_bin) # then self.assertEquals(actual_content, (1, 2, 3)) # check the function has been called with parameters self.storage.content_find_occurrence.assert_called_with( {'sha1': sha1_bin}) @istest def content_missing_per_sha1_none(self): # given sha1s_bin = [hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] self.storage.content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, []) self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) @istest def content_missing_per_sha1_some(self): # given sha1s_bin = [hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] self.storage.content_missing_per_sha1 = MagicMock(return_value=[ hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, [hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) @istest def origin_get(self): # given self.storage.origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when actual_origin = backend.origin_get('origin-id') # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) self.storage.origin_get.assert_called_with({'id': 'origin-id'}) @istest def person_get(self): # given self.storage.person_get = MagicMock(return_value=[{ 'id': 'person-id', 'name': 'blah'}]) # when actual_person = backend.person_get('person-id') # then self.assertEqual(actual_person, {'id': 'person-id', 'name': 'blah'}) self.storage.person_get.assert_called_with(['person-id']) @istest def directory_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') self.storage.directory_get = MagicMock(return_value=None) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, None) self.storage.directory_get.assert_called_with([sha1_bin]) @istest def directory_get(self): # given sha1_bin = hashutil.hex_to_hash( '51f71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') sha1_bin2 = hashutil.hex_to_hash( '62071b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir = {'id': sha1_bin, 'revision': b'sha1-blah'} stub_dir2 = {'id': sha1_bin2, 'revision': b'sha1-foobar'} self.storage.directory_get = MagicMock(return_value=[stub_dir, stub_dir2]) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, stub_dir) self.storage.directory_get.assert_called_with([sha1_bin]) @istest def directory_ls_empty_result(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') self.storage.directory_ls = MagicMock(return_value=[]) # when actual_directory = backend.directory_ls(sha1_bin) # then self.assertEquals(actual_directory, []) self.storage.directory_ls.assert_called_with(sha1_bin, False) @istest def directory_ls(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir_entries = [{ 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'target': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'dir_id': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'name': b'bob', 'type': 10, }] self.storage.directory_ls = MagicMock( return_value=stub_dir_entries) actual_directory = backend.directory_ls(sha1_bin, recursive=True) # then self.assertIsNotNone(actual_directory) self.assertEqual(list(actual_directory), stub_dir_entries) self.storage.directory_ls.assert_called_with(sha1_bin, True) @istest def release_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') self.storage.release_get = MagicMock(return_value=[]) # when actual_release = backend.release_get(sha1_bin) # then self.assertIsNone(actual_release) self.storage.release_get.assert_called_with([sha1_bin]) @istest def release_get(self): # given sha1_bin = hashutil.hex_to_hash( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') stub_releases = [{ 'id': sha1_bin, 'target': None, 'date': datetime.datetime(2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }] self.storage.release_get = MagicMock(return_value=stub_releases) # when actual_release = backend.release_get(sha1_bin) # then self.assertEqual(actual_release, stub_releases[0]) self.storage.release_get.assert_called_with([sha1_bin]) @istest def revision_get_by_not_found(self): # given self.storage.revision_get_by = MagicMock(return_value=[]) # when actual_revision = backend.revision_get_by(10, 'master', 'ts2') # then self.assertIsNone(actual_revision) self.storage.revision_get_by.assert_called_with(10, 'master', timestamp='ts2', limit=1) @istest def revision_get_by(self): # given self.storage.revision_get_by = MagicMock(return_value=[{'id': 1}]) # when actual_revisions = backend.revision_get_by(100, 'dev', 'ts') # then self.assertEquals(actual_revisions, {'id': 1}) self.storage.revision_get_by.assert_called_with(100, 'dev', timestamp='ts', limit=1) @istest def revision_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') self.storage.revision_get = MagicMock(return_value=[]) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertIsNone(actual_revision) self.storage.revision_get.assert_called_with([sha1_bin]) @istest def revision_get(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') stub_revisions = [{ 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] self.storage.revision_get = MagicMock(return_value=stub_revisions) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertEqual(actual_revision, stub_revisions[0]) self.storage.revision_get.assert_called_with([sha1_bin]) @istest def revision_log(self): # given sha1_bin = hashutil.hex_to_hash( '28d8be353ed3480476f032475e7c233eff7371d5') stub_revision_log = [{ 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] self.storage.revision_log = MagicMock(return_value=stub_revision_log) # when actual_revision = backend.revision_log(sha1_bin) # then self.assertEqual(list(actual_revision), stub_revision_log) self.storage.revision_log.assert_called_with([sha1_bin], 100) + @istest + def revision_log_by(self): + # given + # given + sha1_bin = hashutil.hex_to_hash( + '28d8be353ed3480476f032475e7c233eff7371d5') + stub_revision_log = [{ + 'id': sha1_bin, + 'directory': hashutil.hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'bill & boule', + 'email': b'bill@boule.org', + }, + 'committer': { + 'name': b'boule & bill', + 'email': b'boule@bill.org', + }, + 'message': b'elegant fix for bug 31415957', + 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), + 'date_offset': 0, + 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), + 'committer_date_offset': 0, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + }] + + self.storage.revision_get_by = MagicMock(return_value=[ + {'id': sha1_bin}]) + self.storage.revision_log = MagicMock(return_value=stub_revision_log) + + # when + actual_log = backend.revision_log_by(1, 'refs/heads/master', None) + + # then + self.assertEqual(actual_log, stub_revision_log) + self.storage.revision_log.assert_called_with([sha1_bin], 100) + + @istest + def revision_log_by_norev(self): + # given + # given + sha1_bin = hashutil.hex_to_hash( + '28d8be353ed3480476f032475e7c233eff7371d5') + + self.storage.revision_get_by = MagicMock(return_value=None) + + # when + actual_log = backend.revision_log_by(1, 'refs/heads/master', None) + + # then + self.assertEqual(actual_log, None) + self.storage.revision_log.assert_called_with([sha1_bin], 100) + @istest def stat_counters(self): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } self.storage.stat_counters = MagicMock(return_value=input_stats) # when actual_stats = backend.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) self.storage.stat_counters.assert_called_with() @istest def directory_entry_get_by_path(self): # given stub_dir_entry = {'id': b'dir-id', 'type': 'dir', 'name': b'some/path/foo'} self.storage.directory_entry_get_by_path = MagicMock( return_value=stub_dir_entry) # when actual_dir_entry = backend.directory_entry_get_by_path(b'dir-sha1', 'some/path/foo') self.assertEquals(actual_dir_entry, stub_dir_entry) self.storage.directory_entry_get_by_path.assert_called_once_with( b'dir-sha1', [b'some', b'path', b'foo']) @istest def entity_get(self): # given stub_entities = [{'uuid': 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2'}, {'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent': None}] self.storage.entity_get = MagicMock(return_value=stub_entities) # when actual_entities = backend.entity_get( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') # then self.assertEquals(actual_entities, stub_entities) self.storage.entity_get.assert_called_once_with( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') diff --git a/swh/web/ui/tests/test_converters.py b/swh/web/ui/tests/test_converters.py index 182ff123..ad85acd1 100644 --- a/swh/web/ui/tests/test_converters.py +++ b/swh/web/ui/tests/test_converters.py @@ -1,407 +1,545 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import unittest from nose.tools import istest from swh.core import hashutil from swh.web.ui import converters class ConvertersTestCase(unittest.TestCase): @istest def from_swh(self): some_input = { 'a': 'something', 'b': 'someone', 'c': b'sharp-0.3.4.tgz', 'd': hashutil.hex_to_hash( 'b04caf10e9535160d90e874b45aa426de762f19f'), 'e': b'sharp.html/doc_002dS_005fISREG.html', 'g': [b'utf-8-to-decode', b'another-one'], 'h': 'something filtered', 'i': {'e': b'something'}, 'j': { 'k': { 'l': [b'bytes thing', b'another thingy'], 'n': 'dont care either' }, 'm': 'dont care' }, 'o': 'something', 'p': 'bar', 'q': 'intact', 'r': {'p': 'also intact', 'q': 'bar'}, 's': { 'timestamp': 42, 'offset': -420, 'negative_utc': None, } } expected_output = { 'a': 'something', 'b': 'someone', 'c': 'sharp-0.3.4.tgz', 'd': 'b04caf10e9535160d90e874b45aa426de762f19f', 'e': 'sharp.html/doc_002dS_005fISREG.html', 'g': ['utf-8-to-decode', 'another-one'], 'i': {'e': 'something'}, 'j': { 'k': { 'l': ['bytes thing', 'another thingy'] } }, 'p': 'foo', 'q': 'intact', 'r': {'p': 'also intact', 'q': 'foo'}, 's': '1969-12-31T17:00:42-07:00', } def test_convert_fn(v): return 'foo' if v == 'bar' else v actual_output = converters.from_swh(some_input, hashess={'d', 'o'}, bytess={'c', 'e', 'g', 'l'}, dates={'s'}, blacklist={'h', 'm', 'n', 'o'}, convert={'p', 'q'}, convert_fn=test_convert_fn) self.assertEquals(expected_output, actual_output) @istest def from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self): some_input = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } expected_output = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } actual_output = converters.from_swh(some_input, hashess={'a', 'b'}, bytess={'c', 'd'}) self.assertEquals(expected_output, actual_output) + @istest + def from_swh_edge_cases_convert_invalid_utf8_bytes(self): + some_input = { + 'a': 'something', + 'b': 'someone', + 'c': b'a name \xff', + 'd': b'an email \xff', + } + + expected_output = { + 'a': 'something', + 'b': 'someone', + 'c': 'a name \\xff', + 'd': 'an email \\xff', + 'decoding_failures': ['c', 'd'] + } + + actual_output = converters.from_swh(some_input, + hashess={'a', 'b'}, + bytess={'c', 'd'}) + for v in ['a', 'b', 'c', 'd']: + self.assertEqual(expected_output[v], actual_output[v]) + self.assertEqual(len(expected_output['decoding_failures']), + len(actual_output['decoding_failures'])) + for v in expected_output['decoding_failures']: + self.assertTrue(v in actual_output['decoding_failures']) + @istest def from_swh_empty(self): # when self.assertEquals({}, converters.from_swh({})) @istest def from_swh_none(self): # when self.assertIsNone(converters.from_swh(None)) @istest def from_origin(self): # given origin_input = { 'origin_type': 'ftp', 'origin_url': 'rsync://ftp.gnu.org/gnu/octave', 'branch': 'octave-3.4.0.tar.gz', 'revision': b'\xb0L\xaf\x10\xe9SQ`\xd9\x0e\x87KE\xaaBm\xe7b\xf1\x9f', # noqa 'path': b'octave-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa } expected_origin = { 'origin_type': 'ftp', 'origin_url': 'rsync://ftp.gnu.org/gnu/octave', 'branch': 'octave-3.4.0.tar.gz', 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'path': 'octave-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa } # when actual_origin = converters.from_origin(origin_input) # then self.assertEqual(actual_origin, expected_origin) @istest def from_release(self): release_input = { 'id': hashutil.hex_to_hash( 'aad23fa492a0c5fed0708a6703be875448c86884'), 'target': hashutil.hex_to_hash( '5e46d564378afc44b31bb89f99d5675195fbdf67'), 'target_type': 'revision', 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'author': { 'name': b'author name', + 'fullname': b'Author Name author@email', 'email': b'author@email', }, 'name': b'v0.0.1', 'message': b'some comment on release', 'synthetic': True, } expected_release = { 'id': 'aad23fa492a0c5fed0708a6703be875448c86884', 'target': '5e46d564378afc44b31bb89f99d5675195fbdf67', 'target_type': 'revision', 'date': '2015-01-01T22:00:00+00:00', 'author': { 'name': 'author name', + 'fullname': 'Author Name author@email', 'email': 'author@email', }, 'name': 'v0.0.1', 'message': 'some comment on release', 'target_type': 'revision', 'synthetic': True, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_release_no_revision(self): release_input = { 'id': hashutil.hex_to_hash( 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2016, 3, 2, 10, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.1.1', 'message': b'comment on release', 'synthetic': False, 'author': { 'name': b'bob', + 'fullname': b'Bob bob@alice.net', 'email': b'bob@alice.net', }, } expected_release = { 'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e', 'target': None, 'date': '2016-03-02T10:00:00-00:00', 'name': 'v0.1.1', 'message': 'comment on release', 'synthetic': False, 'author': { 'name': 'bob', + 'fullname': 'Bob bob@alice.net', 'email': 'bob@alice.net', }, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_revision(self): revision_input = { 'id': hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'synthetic revision message', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'parents': [ hashutil.hex_to_hash( '29d8be353ed3480476f032475e7c244eff7371d5'), hashutil.hex_to_hash( '30d8be353ed3480476f032475e7c244eff7371d5') ], 'children': [ hashutil.hex_to_hash( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': 'synthetic revision message', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5', '30d8be353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) + @istest + def from_revision_invalid(self): + revision_input = { + 'id': hashutil.hex_to_hash( + '18d8be353ed3480476f032475e7c233eff7371d5'), + 'directory': hashutil.hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', + 'email': b'robot@softwareheritage.org', + }, + 'committer': { + 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', + 'email': b'robot@softwareheritage.org', + }, + 'message': b'invalid message \xff', + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': True, + 'type': 'tar', + 'parents': [ + hashutil.hex_to_hash( + '29d8be353ed3480476f032475e7c244eff7371d5'), + hashutil.hex_to_hash( + '30d8be353ed3480476f032475e7c244eff7371d5') + ], + 'children': [ + hashutil.hex_to_hash( + '123546353ed3480476f032475e7c244eff7371d5'), + ], + 'metadata': { + 'original_artifact': [{ + 'archive_type': 'tar', + 'name': 'webbase-5.7.0.tar.gz', + 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', + 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', + 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' + '309d36484e7edf7bb912', + + }] + }, + } + + expected_revision = { + 'id': '18d8be353ed3480476f032475e7c233eff7371d5', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'author': { + 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', + 'email': 'robot@softwareheritage.org', + }, + 'committer': { + 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', + 'email': 'robot@softwareheritage.org', + }, + 'message': None, + 'message_decoding_failed': True, + 'date': "2000-01-17T11:23:54+00:00", + 'committer_date': "2000-01-17T11:23:54+00:00", + 'children': [ + '123546353ed3480476f032475e7c244eff7371d5' + ], + 'parents': [ + '29d8be353ed3480476f032475e7c244eff7371d5', + '30d8be353ed3480476f032475e7c244eff7371d5' + ], + 'type': 'tar', + 'synthetic': True, + 'metadata': { + 'original_artifact': [{ + 'archive_type': 'tar', + 'name': 'webbase-5.7.0.tar.gz', + 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', + 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', + 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' + '309d36484e7edf7bb912' + }] + }, + } + + # when + actual_revision = converters.from_revision(revision_input) + + # then + self.assertEqual(actual_revision, expected_revision) + @istest def from_content(self): content_input = { 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'data': b'data in bytes', 'length': 10, 'status': 'hidden', } # 'status' is filtered expected_content = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' '7d3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'data': b'data in bytes', 'length': 10, 'status': 'absent', } # when actual_content = converters.from_content(content_input) # then self.assertEqual(actual_content, expected_content) @istest def from_person(self): person_input = { 'id': 10, 'anything': 'else', 'name': b'bob', + 'fullname': b'bob bob@alice.net', 'email': b'bob@foo.alice', } expected_person = { 'id': 10, 'anything': 'else', 'name': 'bob', + 'fullname': 'bob bob@alice.net', 'email': 'bob@foo.alice', } # when actual_person = converters.from_person(person_input) # then self.assertEqual(actual_person, expected_person) @istest def from_directory_entries(self): dir_entries_input = { 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'target': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'dir_id': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'name': b'bob', 'type': 10, 'status': 'hidden', } expected_dir_entries = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2747' 'd3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'bob', 'type': 10, 'status': 'absent', } # when actual_dir_entries = converters.from_directory_entry(dir_entries_input) # then self.assertEqual(actual_dir_entries, expected_dir_entries) diff --git a/swh/web/ui/tests/test_service.py b/swh/web/ui/tests/test_service.py index 1026c6e5..2ae2c728 100644 --- a/swh/web/ui/tests/test_service.py +++ b/swh/web/ui/tests/test_service.py @@ -1,1526 +1,1832 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock, patch, call from swh.core.hashutil import hex_to_hash, hash_to_hex from swh.web.ui import service from swh.web.ui.exc import BadInputExc, NotFoundExc from swh.web.ui.tests import test_app class ServiceTestCase(test_app.SWHApiTestCase): @patch('swh.web.ui.service.backend') @istest def lookup_multiple_hashes_ball_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': True}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) @patch('swh.web.ui.service.backend') @istest def lookup_multiple_hashes_some_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[ hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') ]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) @patch('swh.web.ui.service.backend') @istest def lookup_hash_does_not_exist(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_lookup = service.lookup_hash( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': None, 'algo': 'sha1_git'}, actual_lookup) # check the function has been called with parameters mock_backend.content_find.assert_called_with( 'sha1_git', hex_to_hash('123caf10e9535160d90e874b45aa426de762f19f')) @patch('swh.web.ui.service.backend') @istest def lookup_hash_exist(self, mock_backend): # given stub_content = { 'sha1': hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') } mock_backend.content_find = MagicMock(return_value=stub_content) # when actual_lookup = service.lookup_hash( 'sha1:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': stub_content, 'algo': 'sha1'}, actual_lookup) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f'), ) + @patch('swh.web.ui.service.backend') + @istest + def search_hash_does_not_exist(self, mock_backend): + # given + mock_backend.content_find = MagicMock(return_value=None) + + # when + actual_lookup = service.search_hash( + 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') + + # then + self.assertEquals({'found': False}, actual_lookup) + + # check the function has been called with parameters + mock_backend.content_find.assert_called_with( + 'sha1_git', + hex_to_hash('123caf10e9535160d90e874b45aa426de762f19f')) + + @patch('swh.web.ui.service.backend') + @istest + def search_hash_exist(self, mock_backend): + # given + stub_content = { + 'sha1': hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') + } + mock_backend.content_find = MagicMock(return_value=stub_content) + + # when + actual_lookup = service.search_hash( + 'sha1:456caf10e9535160d90e874b45aa426de762f19f') + + # then + self.assertEquals({'found': True}, actual_lookup) + + mock_backend.content_find.assert_called_with( + 'sha1', + hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f'), + ) + @patch('swh.web.ui.service.backend') @istest def lookup_hash_origin(self, mock_backend): # given mock_backend.content_find_occurrence = MagicMock(return_value={ 'origin_type': 'sftp', 'origin_url': 'sftp://ftp.gnu.org/gnu/octave', 'branch': 'octavio-3.4.0.tar.gz', 'revision': b'\xb0L\xaf\x10\xe9SQ`\xd9\x0e\x87KE\xaaBm\xe7b\xf1\x9f', # noqa 'path': b'octavio-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa }) expected_origin = { 'origin_type': 'sftp', 'origin_url': 'sftp://ftp.gnu.org/gnu/octave', 'branch': 'octavio-3.4.0.tar.gz', 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'path': 'octavio-3.4.0/doc/interpreter/octave.html/doc' '_002dS_005fISREG.html' } # when actual_origin = service.lookup_hash_origin( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_origin, expected_origin) mock_backend.content_find_occurrence.assert_called_with( 'sha1_git', hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f')) @patch('swh.web.ui.service.backend') @istest def stat_counters(self, mock_backend): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_backend.stat_counters = MagicMock(return_value=input_stats) # when actual_stats = service.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) mock_backend.stat_counters.assert_called_with() @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.hashutil') @istest def hash_and_search(self, mock_hashutil, mock_backend): # given bhash = hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') mock_hashutil.hashfile.return_value = {'sha1': bhash} mock_backend.content_find = MagicMock(return_value={ 'sha1': bhash, 'sha1_git': bhash, }) # when actual_content = service.hash_and_search('/some/path') # then self.assertEqual(actual_content, { 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'sha1_git': '456caf10e9535160d90e874b45aa426de762f19f', 'found': True, }) mock_hashutil.hashfile.assert_called_once_with('/some/path') mock_backend.content_find.assert_called_once_with('sha1', bhash) @patch('swh.web.ui.service.hashutil') @istest def hash_and_search_not_found(self, mock_hashutil): # given bhash = hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') mock_hashutil.hashfile.return_value = {'sha1': bhash} mock_hashutil.hash_to_hex = MagicMock( return_value='456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find = MagicMock(return_value=None) # when actual_content = service.hash_and_search('/some/path') # then self.assertEqual(actual_content, { 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False, }) mock_hashutil.hashfile.assert_called_once_with('/some/path') self.storage.content_find.assert_called_once_with({'sha1': bhash}) mock_hashutil.hash_to_hex.assert_called_once_with(bhash) @patch('swh.web.ui.service.upload') @istest def test_upload_and_search(self, mock_upload): mock_upload.save_in_upload_folder.return_value = ( '/tmp/dir', 'some-filename', '/tmp/dir/path/some-filename') service.hash_and_search = MagicMock(side_effect=lambda filepath: {'sha1': 'blah', 'found': True}) mock_upload.cleanup.return_value = None file = MagicMock(filename='some-filename') # when actual_res = service.upload_and_search(file) # then self.assertEqual(actual_res, { 'filename': 'some-filename', 'sha1': 'blah', 'found': True}) mock_upload.save_in_upload_folder.assert_called_with(file) mock_upload.cleanup.assert_called_with('/tmp/dir') service.hash_and_search.assert_called_once_with( '/tmp/dir/path/some-filename') @patch('swh.web.ui.service.backend') @istest def lookup_origin(self, mock_backend): # given mock_backend.origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when actual_origin = service.lookup_origin('origin-id') # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) mock_backend.origin_get.assert_called_with('origin-id') @patch('swh.web.ui.service.backend') @istest def lookup_release_ko_id_checksum_not_ok_because_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() with self.assertRaises(BadInputExc) as cm: # when service.lookup_release('not-a-sha1') self.assertIn('invalid checksum', cm.exception.args[0]) mock_backend.release_get.called = False @patch('swh.web.ui.service.backend') @istest def lookup_release_ko_id_checksum_ok_but_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() # when with self.assertRaises(BadInputExc) as cm: service.lookup_release( '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4daf5' '1aea892abe') self.assertIn('sha1_git supported', cm.exception.args[0]) mock_backend.release_get.called = False @patch('swh.web.ui.service.backend') @istest def lookup_directory_with_path_not_found(self, mock_backend): # given mock_backend.lookup_directory_with_path = MagicMock(return_value=None) sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertIsNone(actual_directory) @patch('swh.web.ui.service.backend') @istest def lookup_directory_with_path_found(self, mock_backend): # given sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' entry = {'id': 'dir-id', 'type': 'dir', 'name': 'some/path/foo'} mock_backend.lookup_directory_with_path = MagicMock(return_value=entry) # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertEqual(entry, actual_directory) @patch('swh.web.ui.service.backend') @istest def lookup_release(self, mock_backend): # given mock_backend.release_get = MagicMock(return_value={ 'id': hex_to_hash('65a55bbdf3629f916219feb3dcc7393ded1bc8db'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }) # when actual_release = service.lookup_release( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') # then self.assertEqual(actual_release, { 'id': '65a55bbdf3629f916219feb3dcc7393ded1bc8db', 'target': None, 'date': '2015-01-01T22:00:00-00:00', 'name': 'v0.0.1', 'message': 'synthetic release', 'synthetic': True, }) mock_backend.release_get.assert_called_with( hex_to_hash('65a55bbdf3629f916219feb3dcc7393ded1bc8db')) @istest def lookup_revision_with_context_ko_not_a_sha1_1(self): # given sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4' \ 'daf51aea892abe' sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @istest def lookup_revision_with_context_ko_not_a_sha1_2(self): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f6' \ '2d4daf51aea892abe' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_ko_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_bin = hex_to_hash(sha1_git) mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 777777bdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_called_once_with( sha1_git_bin) @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_ko_root_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_root_bin = hex_to_hash(sha1_git_root) sha1_git_bin = hex_to_hash(sha1_git) mock_backend.revision_get.side_effect = ['foo', None] # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 65a55bbdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_has_calls([call(sha1_git_bin), call(sha1_git_root_bin)]) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_revision_with_context(self, mock_query, mock_backend): # given sha1_git_root = '666' sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.side_effect = [ ('sha1', sha1_git_bin), ('sha1', sha1_git_root_bin) ] # lookup revision first 883, then 666 (both exists) mock_backend.revision_get.side_effect = [ sha1_git_dict, sha1_git_root_dict ] mock_backend.revision_log = MagicMock( return_value=stub_revisions) # when actual_revision = service.lookup_revision_with_context( sha1_git_root, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), }) mock_query.parse_hash_with_algorithms_or_throws.assert_has_calls( [call(sha1_git, ['sha1'], 'Only sha1_git is supported.'), call(sha1_git_root, ['sha1'], 'Only sha1_git is supported.')]) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_revision_with_context_sha1_git_root_already_retrieved_as_dict( self, mock_query, mock_backend): # given sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', sha1_git_bin) # lookup only on sha1 mock_backend.revision_get.return_value = sha1_git_dict mock_backend.revision_log.return_value = stub_revisions # when actual_revision = service.lookup_revision_with_context( {'id': sha1_git_root_bin}, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), }) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with( # noqa sha1_git, ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(sha1_git_bin) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_revision_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision('123') self.assertIn('Revision 123 not found', cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_revision_with_path_to_nowhere( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision( '123', 'path/to/something/unknown') self.assertIn("Directory/File 'path/to/something/unknown' " + "pointed to by revision 123 not found", cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'path/to/something/unknown') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_type_not_implemented( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'rev', 'name': b'some/path/to/rev', 'target': b'456' } stub_content = { 'id': b'12', 'type': 'file' } mock_backend.content_get.return_value = stub_content # when with self.assertRaises(NotImplementedError) as cm: service.lookup_directory_with_revision( '123', 'some/path/to/rev') self.assertIn("Entity of type 'rev' not implemented.", cm.exception.args[0]) # then mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/rev') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_without_path(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'123', 'type': 'dir' }, { 'id': b'456', 'type': 'file' }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_ls.assert_called_once_with(dir_id) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_dir(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'12', 'type': 'dir' }, { 'id': b'34', 'type': 'file' }] mock_backend.directory_entry_get_by_path.return_value = { 'type': 'dir', 'name': b'some/path', 'target': b'456' } mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123', 'some/path') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(actual_directory_entries['revision'], '123') self.assertEqual(actual_directory_entries['path'], 'some/path') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( dir_id, 'some/path') mock_backend.directory_ls.assert_called_once_with(b'456') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_without_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', } mock_backend.content_find.return_value = stub_content # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file') # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': stub_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_with_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', 'sha1': b'content-sha1' } mock_backend.content_find.return_value = stub_content mock_backend.content_get.return_value = { 'sha1': b'content-sha1', 'data': b'some raw data' } expected_content = { 'status': 'visible', 'sha1': hash_to_hex(b'content-sha1'), 'data': b'some raw data' } # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file', with_data=True) # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': expected_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') mock_backend.content_get.assert_called_once_with(b'content-sha1') @patch('swh.web.ui.service.backend') @istest def lookup_revision(self, mock_backend): # given mock_backend.revision_get = MagicMock(return_value={ 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc, ).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc, ).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }) # when actual_revision = service.lookup_revision( '18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertEqual(actual_revision, { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'bill & boule', 'email': 'bill@boule.org', }, 'committer': { 'name': 'boule & bill', 'email': 'boule@bill.org', }, 'message': 'elegant fix for bug 31415957', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }) mock_backend.revision_get.assert_called_with( hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_invalid_msg(self, mock_backend): + # given + stub_rev = { + 'id': hex_to_hash('123456'), + 'directory': hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'bill & boule', + 'email': b'bill@boule.org', + }, + 'committer': { + 'name': b'boule & bill', + 'email': b'boule@bill.org', + }, + 'message': b'elegant fix for bug \xff', + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + } + mock_backend.revision_get = MagicMock(return_value=stub_rev) + + # when + actual_revision = service.lookup_revision( + '18d8be353ed3480476f032475e7c233eff7371d5') + + # then + self.assertEqual(actual_revision, { + 'id': '123456', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'author': { + 'name': 'bill & boule', + 'email': 'bill@boule.org', + }, + 'committer': { + 'name': 'boule & bill', + 'email': 'boule@bill.org', + }, + 'message': None, + 'message_decoding_failed': True, + 'date': "2000-01-17T11:23:54+00:00", + 'committer_date': "2000-01-17T11:23:54+00:00", + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + }) + + mock_backend.revision_get.assert_called_with( + hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_msg_ok(self, mock_backend): + # given + mock_backend.revision_get.return_value = { + 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), + 'directory': hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'bill & boule', + 'email': b'bill@boule.org', + }, + 'committer': { + 'name': b'boule & bill', + 'email': b'boule@bill.org', + }, + 'message': b'elegant fix for bug 31415957', + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + } + + # when + rv = service.lookup_revision_message( + '18d8be353ed3480476f032475e7c233eff7371d5') + + # then + self.assertEquals(rv, {'message': b'elegant fix for bug 31415957'}) + mock_backend.revision_get.assert_called_with( + hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_msg_absent(self, mock_backend): + # given + mock_backend.revision_get.return_value = { + 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), + 'directory': hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'bill & boule', + 'email': b'bill@boule.org', + }, + 'committer': { + 'name': b'boule & bill', + 'email': b'boule@bill.org', + }, + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + } + + # when + with self.assertRaises(NotFoundExc) as cm: + service.lookup_revision_message( + '18d8be353ed3480476f032475e7c233eff7371d5') + + # then + mock_backend.revision_get.assert_called_with( + hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.assertEqual(cm.exception.args[0], 'No message for revision ' + 'with sha1_git ' + '18d8be353ed3480476f032475e7c233eff7371d5.') + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_msg_norev(self, mock_backend): + # given + mock_backend.revision_get.return_value = None + + # when + with self.assertRaises(NotFoundExc) as cm: + service.lookup_revision_message( + '18d8be353ed3480476f032475e7c233eff7371d5') + + # then + mock_backend.revision_get.assert_called_with( + hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.assertEqual(cm.exception.args[0], 'Revision with sha1_git ' + '18d8be353ed3480476f032475e7c233eff7371d5 ' + 'not found.') + @patch('swh.web.ui.service.backend') @istest def lookup_revision_log(self, mock_backend): # given stub_revision_log = [{ 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc, ).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc, ).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] mock_backend.revision_log = MagicMock(return_value=stub_revision_log) # when actual_revision = service.lookup_revision_log( 'abcdbe353ed3480476f032475e7c233eff7371d5') # then self.assertEqual(list(actual_revision), [{ 'id': '28d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'bill & boule', 'email': 'bill@boule.org', }, 'committer': { 'name': 'boule & bill', 'email': 'boule@bill.org', }, 'message': 'elegant fix for bug 31415957', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }]) mock_backend.revision_log.assert_called_with( hex_to_hash('abcdbe353ed3480476f032475e7c233eff7371d5'), 100) + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_log_by(self, mock_backend): + # given + stub_revision_log = [{ + 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), + 'directory': hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'bill & boule', + 'email': b'bill@boule.org', + }, + 'committer': { + 'name': b'boule & bill', + 'email': b'boule@bill.org', + }, + 'message': b'elegant fix for bug 31415957', + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + }] + mock_backend.revision_log_by = MagicMock( + return_value=stub_revision_log) + + # when + actual_log = service.lookup_revision_log_by( + 1, 'refs/heads/master', None) + # then + self.assertEqual(list(actual_log), [{ + 'id': '28d8be353ed3480476f032475e7c233eff7371d5', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'author': { + 'name': 'bill & boule', + 'email': 'bill@boule.org', + }, + 'committer': { + 'name': 'boule & bill', + 'email': 'boule@bill.org', + }, + 'message': 'elegant fix for bug 31415957', + 'date': "2000-01-17T11:23:54+00:00", + 'committer_date': "2000-01-17T11:23:54+00:00", + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + }]) + + mock_backend.revision_log_by.assert_called_with( + 1, 'refs/heads/master', None) + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_log_by_nolog(self, mock_backend): + # given + mock_backend.revision_log_by = MagicMock(return_value=None) + + # when + res = service.lookup_revision_log_by( + 1, 'refs/heads/master', None) + # then + self.assertEquals(res, None) + mock_backend.revision_log_by.assert_called_with( + 1, 'refs/heads/master', None) + @patch('swh.web.ui.service.backend') @istest def lookup_content_raw_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content_raw( 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) @patch('swh.web.ui.service.backend') @istest def lookup_content_raw(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value={ 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', }) mock_backend.content_get = MagicMock(return_value={ 'data': b'binary data'}) # when actual_content = service.lookup_content_raw( 'sha256:39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926') # then self.assertEquals(actual_content, {'data': b'binary data'}) mock_backend.content_find.assert_called_once_with( 'sha256', hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926')) mock_backend.content_get.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.service.backend') @istest def lookup_content_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content( 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) @patch('swh.web.ui.service.backend') @istest def lookup_content_with_sha1(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value={ 'sha1': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'length': 190, 'status': 'hidden', }) # when actual_content = service.lookup_content( 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertEqual(actual_content, { 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' '7d3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'length': 190, 'status': 'absent', }) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) @patch('swh.web.ui.service.backend') @istest def lookup_content_with_sha256(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value={ 'sha1': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'length': 360, 'status': 'visible', }) # when actual_content = service.lookup_content( 'sha256:39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926') # then self.assertEqual(actual_content, { 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' '7d3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'length': 360, 'status': 'visible', }) mock_backend.content_find.assert_called_with( 'sha256', hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926')) @patch('swh.web.ui.service.backend') @istest def lookup_person(self, mock_backend): # given mock_backend.person_get = MagicMock(return_value={ 'id': 'person_id', 'name': b'some_name', 'email': b'some-email', }) # when actual_person = service.lookup_person('person_id') # then self.assertEqual(actual_person, { 'id': 'person_id', 'name': 'some_name', 'email': 'some-email', }) mock_backend.person_get.assert_called_with('person_id') @patch('swh.web.ui.service.backend') @istest def lookup_directory_bad_checksum(self, mock_backend): # given mock_backend.directory_ls = MagicMock() # when with self.assertRaises(BadInputExc): service.lookup_directory('directory_id') # then mock_backend.directory_ls.called = False @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-id-bin') mock_backend.directory_get.return_value = None # when actual_dir = service.lookup_directory('directory_id') # then self.assertIsNone(actual_dir) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory_id', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_get.assert_called_with('directory-id-bin') mock_backend.directory_ls.called = False @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory(self, mock_query, mock_backend): mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-sha1-bin') # something that exists is all that matters here mock_backend.directory_get.return_value = {'id': b'directory-sha1-bin'} # given stub_dir_entries = [{ 'sha1': hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'target': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'dir_id': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'name': b'bob', 'type': 10, }] expected_dir_entries = [{ 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2747' 'd3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'bob', 'type': 10, }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_ls = list(service.lookup_directory( 'directory-sha1')) # then self.assertEqual(actual_directory_ls, expected_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory-sha1', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_ls.assert_called_with( 'directory-sha1-bin') @patch('swh.web.ui.service.backend') @istest def lookup_revision_by_nothing_found(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when actual_revisions = service.lookup_revision_by(1) # then self.assertIsNone(actual_revisions) mock_backend.revision_get_by(1, 'master', None) @patch('swh.web.ui.service.backend') @istest def lookup_revision_by(self, mock_backend): # given stub_rev = { 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'ynot', 'email': b'ynot@blah.org', }, 'committer': { 'name': b'ynot', 'email': b'ynot@blah.org', }, 'message': b'elegant solution 31415', 'date': { 'timestamp': datetime.datetime( 2016, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 420, 'negative_utc': None, }, 'committer_date': { 'timestamp': datetime.datetime( 2016, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 420, 'negative_utc': None, }, } expected_rev = { 'id': '28d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'ynot', 'email': 'ynot@blah.org', }, 'committer': { 'name': 'ynot', 'email': 'ynot@blah.org', }, 'message': 'elegant solution 31415', 'date': '2016-01-17T18:23:54+07:00', 'committer_date': '2016-01-17T18:23:54+07:00', } mock_backend.revision_get_by.return_value = stub_rev # when actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') # then self.assertEquals(actual_revision, expected_rev) mock_backend.revision_get_by(1, 'master2', 'some-ts') @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_by_ko(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when with self.assertRaises(NotFoundExc) as cm: origin_id = 1 branch_name = 'master3' ts = None service.lookup_revision_with_context_by(origin_id, branch_name, ts, 'sha1') # then self.assertIn( 'Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts), cm.exception.args[0]) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) @patch('swh.web.ui.service.lookup_revision_with_context') @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_by(self, mock_backend, mock_lookup_revision_with_context): # given stub_root_rev = {'id': 'root-rev-id'} mock_backend.revision_get_by.return_value = {'id': 'root-rev-id'} stub_rev = {'id': 'rev-found'} mock_lookup_revision_with_context.return_value = stub_rev # when origin_id = 1 branch_name = 'master3' ts = None sha1_git = 'sha1' actual_root_rev, actual_rev = service.lookup_revision_with_context_by( origin_id, branch_name, ts, sha1_git) # then self.assertEquals(actual_root_rev, stub_root_rev) self.assertEquals(actual_rev, stub_rev) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) mock_lookup_revision_with_context.assert_called_once_with( stub_root_rev, sha1_git, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_entity_by_uuid(self, mock_query, mock_backend): # given uuid_test = 'correct-uuid' mock_query.parse_uuid4.return_value = uuid_test stub_entities = [{'uuid': uuid_test}] mock_backend.entity_get.return_value = stub_entities # when actual_entities = service.lookup_entity_by_uuid(uuid_test) # then self.assertEquals(actual_entities, stub_entities) mock_query.parse_uuid4.assert_called_once_with(uuid_test) mock_backend.entity_get.assert_called_once_with(uuid_test) @istest def lookup_revision_through_ko_not_implemented(self): # then with self.assertRaises(NotImplementedError): service.lookup_revision_through({ 'something-unknown': 10, }) @patch('swh.web.ui.service.lookup_revision_with_context_by') @istest def lookup_revision_through_with_context_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 1, 'branch_name': 'master', 'ts': None, 'sha1_git': 'sha1-git' }, limit=1000) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 1, 'master', None, 'sha1-git', 1000) @patch('swh.web.ui.service.lookup_revision_by') @istest def lookup_revision_through_with_revision_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 2, 'branch_name': 'master2', 'ts': 'some-ts', }, limit=10) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 2, 'master2', 'some-ts') @patch('swh.web.ui.service.lookup_revision_with_context') @istest def lookup_revision_through_with_context(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git_root': 'some-sha1-root', 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1-root', 'some-sha1', 100) @patch('swh.web.ui.service.lookup_revision') @istest def lookup_revision_through_with_revision(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1') @patch('swh.web.ui.service.lookup_revision_through') @istest def lookup_directory_through_revision_ko_not_found( self, mock_lookup_rev): # given mock_lookup_rev.return_value = None # when with self.assertRaises(NotFoundExc): service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) @patch('swh.web.ui.service.lookup_revision_through') @patch('swh.web.ui.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_data( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} mock_lookup_dir.return_value = {'type': 'dir', 'content': []} # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, {'type': 'dir', 'content': []}) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', False) @patch('swh.web.ui.service.lookup_revision_through') @patch('swh.web.ui.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_content( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} stub_result = {'type': 'file', 'revision': 'rev-id', 'content': {'data': b'blah', 'sha1': 'sha1'}} mock_lookup_dir.return_value = stub_result # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 10, with_data=True) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, stub_result) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 10) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', True) diff --git a/swh/web/ui/tests/test_utils.py b/swh/web/ui/tests/test_utils.py index 98096d88..ed037ea1 100644 --- a/swh/web/ui/tests/test_utils.py +++ b/swh/web/ui/tests/test_utils.py @@ -1,572 +1,661 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import dateutil import unittest from unittest.mock import patch, call -from nose.tools import istest +from nose.tools import istest, nottest from swh.web.ui import utils class UtilsTestCase(unittest.TestCase): def setUp(self): self.url_map = [dict(rule='/other/', methods=set(['GET', 'POST', 'HEAD']), endpoint='foo'), dict(rule='/some/old/url/', methods=set(['GET', 'POST']), endpoint='blablafn'), dict(rule='/other/old/url/', methods=set(['GET', 'HEAD']), endpoint='bar'), dict(rule='/other', methods=set([]), endpoint=None), dict(rule='/other2', methods=set([]), endpoint=None)] @istest def filter_endpoints_1(self): # when actual_data = utils.filter_endpoints(self.url_map, '/some') # then self.assertEquals(actual_data, { '/some/old/url/': { 'methods': ['GET', 'POST'], 'endpoint': 'blablafn' } }) @istest def filter_endpoints_2(self): # when actual_data = utils.filter_endpoints(self.url_map, '/other', blacklist=['/other2']) # then # rules /other is skipped because its' exactly the prefix url # rules /other2 is skipped because it's blacklisted self.assertEquals(actual_data, { '/other/': { 'methods': ['GET', 'HEAD', 'POST'], 'endpoint': 'foo' }, '/other/old/url/': { 'methods': ['GET', 'HEAD'], 'endpoint': 'bar' } }) @istest def prepare_data_for_view_default_encoding(self): self.maxDiff = None # given inputs = [ { 'data': b'some blah data' }, { 'data': 1, 'data_url': '/api/1/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' }] # when actual_result = utils.prepare_data_for_view(inputs) # then self.assertEquals(actual_result, [ { 'data': 'some blah data', }, { 'data': 1, 'data_url': '/browse/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' } ]) @istest def prepare_data_for_view(self): self.maxDiff = None # given inputs = [ { 'data': b'some blah data' }, { 'data': 1, 'data_url': '/api/1/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' }] # when actual_result = utils.prepare_data_for_view(inputs, encoding='ascii') # then self.assertEquals(actual_result, [ { 'data': 'some blah data', }, { 'data': 1, 'data_url': '/browse/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' } ]) @istest def prepare_data_for_view_ko_cannot_decode(self): self.maxDiff = None # given inputs = { 'data': 'hé dude!'.encode('utf8'), } actual_result = utils.prepare_data_for_view(inputs, encoding='ascii') # then self.assertEquals(actual_result, { 'data': "Cannot decode the data bytes, try and set another " "encoding in the url (e.g. ?encoding=utf8) or " "download directly the " "content's raw data.", }) @istest def filter_field_keys_dict_unknown_keys(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory1', 'file2'}) # then self.assertEqual(actual_res, {}) @istest def filter_field_keys_dict(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory', 'link'}) # then self.assertEqual(actual_res, {'directory': 1, 'link': 3}) @istest def filter_field_keys_list_unknown_keys(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'1': 1, '2': 2, 'link': 3}], {'d'}) # then self.assertEqual(actual_res, [{}, {}]) @istest def filter_field_keys_list(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'dir': 1, 'fil': 2, 'lin': 3}], {'directory', 'dir'}) # then self.assertEqual(actual_res, [{'directory': 1}, {'dir': 1}]) @istest def filter_field_keys_other(self): # given input_set = {1, 2} # when actual_res = utils.filter_field_keys(input_set, {'a', '1'}) # then self.assertEqual(actual_res, input_set) @istest def fmap(self): self.assertEquals([2, 3, 4], utils.fmap(lambda x: x+1, [1, 2, 3])) self.assertEquals([11, 12, 13], list(utils.fmap(lambda x: x+10, map(lambda x: x, [1, 2, 3])))) self.assertEquals({'a': 2, 'b': 4}, utils.fmap(lambda x: x*2, {'a': 1, 'b': 2})) self.assertEquals(100, utils.fmap(lambda x: x*10, 10)) self.assertEquals({'a': [2, 6], 'b': 4}, utils.fmap(lambda x: x*2, {'a': [1, 3], 'b': 2})) @istest def person_to_string(self): self.assertEqual(utils.person_to_string(dict(name='raboof', email='foo@bar')), 'raboof ') @istest def parse_timestamp(self): input_timestamps = [ '2016-01-12', '2016-01-12T09:19:12+0100', 'Today is January 1, 2047 at 8:21:00AM', '1452591542', ] output_dates = [ - datetime.datetime(2016, 1, 12, 0, 0), + datetime.datetime(2016, 1, 12, 0, 0, tzinfo=datetime.timezone.utc), datetime.datetime(2016, 1, 12, 9, 19, 12, tzinfo=dateutil.tz.tzoffset(None, 3600)), - datetime.datetime(2047, 1, 1, 8, 21), - datetime.datetime(2016, 1, 12, 10, 39, 2), + datetime.datetime(2047, 1, 1, 8, 21, tzinfo=datetime.timezone.utc), + datetime.datetime(2016, 1, 12, 9, 39, 2, + tzinfo=datetime.timezone.utc), ] for ts, exp_date in zip(input_timestamps, output_dates): self.assertEquals(utils.parse_timestamp(ts), exp_date) @istest def enrich_release_0(self): # when actual_release = utils.enrich_release({}) # then self.assertEqual(actual_release, {}) @patch('swh.web.ui.utils.flask') @istest def enrich_release_1(self, mock_flask): # given mock_flask.url_for.return_value = '/api/1/content/sha1_git:123/' # when actual_release = utils.enrich_release({'target': '123', 'target_type': 'content'}) # then self.assertEqual(actual_release, { 'target': '123', 'target_type': 'content', 'target_url': '/api/1/content/sha1_git:123/' }) mock_flask.url_for.assert_called_once_with('api_content_metadata', q='sha1_git:123') @patch('swh.web.ui.utils.flask') @istest def enrich_release_2(self, mock_flask): # given mock_flask.url_for.return_value = '/api/1/dir/23/' # when actual_release = utils.enrich_release({'target': '23', 'target_type': 'directory'}) # then self.assertEqual(actual_release, { 'target': '23', 'target_type': 'directory', 'target_url': '/api/1/dir/23/' }) mock_flask.url_for.assert_called_once_with('api_directory', q='23') @patch('swh.web.ui.utils.flask') @istest def enrich_release_3(self, mock_flask): # given mock_flask.url_for.return_value = '/api/1/rev/3/' # when actual_release = utils.enrich_release({'target': '3', 'target_type': 'revision'}) # then self.assertEqual(actual_release, { 'target': '3', 'target_type': 'revision', 'target_url': '/api/1/rev/3/' }) mock_flask.url_for.assert_called_once_with('api_revision', sha1_git='3') @patch('swh.web.ui.utils.flask') @istest def enrich_release_4(self, mock_flask): # given mock_flask.url_for.return_value = '/api/1/rev/4/' # when actual_release = utils.enrich_release({'target': '4', 'target_type': 'release'}) # then self.assertEqual(actual_release, { 'target': '4', 'target_type': 'release', 'target_url': '/api/1/rev/4/' }) mock_flask.url_for.assert_called_once_with('api_release', sha1_git='4') @patch('swh.web.ui.utils.flask') @istest def enrich_directory_no_type(self, mock_flask): # when/then self.assertEqual(utils.enrich_directory({'id': 'dir-id'}), {'id': 'dir-id'}) # given mock_flask.url_for.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'target': '123', }) # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'target': '123', 'target_url': '/api/content/sha1_git:123/', }) mock_flask.url_for.assert_called_once_with('api_content_metadata', q='sha1_git:123') @patch('swh.web.ui.utils.flask') @istest def enrich_directory_with_context_and_type_file(self, mock_flask): # given mock_flask.url_for.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', }, context_url='/api/revision/revsha1/directory/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', 'target_url': '/api/content/sha1_git:123/', 'file_url': '/api/revision/revsha1/directory' '/prefix/path/hy/' }) mock_flask.url_for.assert_called_once_with('api_content_metadata', q='sha1_git:789') @patch('swh.web.ui.utils.flask') @istest def enrich_directory_with_context_and_type_dir(self, mock_flask): # given mock_flask.url_for.return_value = '/api/directory/456/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'dir', 'name': 'emacs-42', 'target_type': 'file', 'target': '456', }, context_url='/api/revision/origin/2/directory/some/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'dir', 'target_type': 'file', 'name': 'emacs-42', 'target': '456', 'target_url': '/api/directory/456/', 'dir_url': '/api/revision/origin/2/directory' '/some/prefix/path/emacs-42/' }) mock_flask.url_for.assert_called_once_with('api_directory', sha1_git='456') @istest def enrich_content_without_sha1(self): # when/then self.assertEqual(utils.enrich_content({'id': '123'}), {'id': '123'}) @patch('swh.web.ui.utils.flask') @istest def enrich_content_with_sha1(self, mock_flask): # given mock_flask.url_for.return_value = '/api/content/sha1:123/raw/' # when/then self.assertEqual(utils.enrich_content( {'id': '123', 'sha1': 'blahblah'}), {'id': '123', 'sha1': 'blahblah', 'data_url': '/api/content/sha1:123/raw/'}) mock_flask.url_for.assert_called_once_with('api_content_raw', q='blahblah') @istest def enrich_entity_identity(self): # when/then self.assertEqual(utils.enrich_content({'id': '123'}), {'id': '123'}) @patch('swh.web.ui.utils.flask') @istest def enrich_entity_with_sha1(self, mock_flask): # given def url_for_test(fn, **entity): return '/api/entity/' + entity['uuid'] + '/' mock_flask.url_for.side_effect = url_for_test # when actual_entity = utils.enrich_entity({ 'uuid': 'uuid-1', 'parent': 'uuid-parent', 'name': 'something' }) # then self.assertEqual(actual_entity, { 'uuid': 'uuid-1', 'uuid_url': '/api/entity/uuid-1/', 'parent': 'uuid-parent', 'parent_url': '/api/entity/uuid-parent/', 'name': 'something', }) mock_flask.url_for.assert_has_calls([call('api_entity_by_uuid', uuid='uuid-1'), call('api_entity_by_uuid', uuid='uuid-parent')]) @patch('swh.web.ui.utils.flask') @istest def enrich_revision_without_children_or_parent(self, mock_flask): # given def url_for_test(fn, **data): - print(fn, data) if fn == 'api_revision': return '/api/revision/' + data['sha1_git'] + '/' elif fn == 'api_revision_log': return '/api/revision/' + data['sha1_git'] + '/log/' elif fn == 'api_directory': return '/api/directory/' + data['sha1_git'] + '/' elif fn == 'api_person': return '/api/person/' + data['person_id'] + '/' mock_flask.url_for.side_effect = url_for_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'directory': '123', 'author': {'id': '1'}, 'committer': {'id': '2'}, }) # then self.assertEqual(actual_revision, { 'id': 'rev-id', 'directory': '123', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'directory_url': '/api/directory/123/', 'author': {'id': '1'}, 'author_url': '/api/person/1/', 'committer': {'id': '2'}, 'committer_url': '/api/person/2/' }) mock_flask.url_for.assert_has_calls([call('api_revision', sha1_git='rev-id'), call('api_revision_log', sha1_git='rev-id'), call('api_person', person_id='1'), call('api_person', person_id='2'), call('api_directory', sha1_git='123')]) @patch('swh.web.ui.utils.flask') @istest def enrich_revision_with_children_and_parent_no_dir(self, mock_flask): # given def url_for_test(fn, **data): - print(fn, data) if fn == 'api_revision': return '/api/revision/' + data['sha1_git'] + '/' elif fn == 'api_revision_log': return '/api/revision/' + data['sha1_git'] + '/log/' else: return '/api/revision/' + data['sha1_git_root'] + '/history/' + data['sha1_git'] + '/' # noqa mock_flask.url_for.side_effect = url_for_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456'], }, context='sha1_git_root') # then self.assertEqual(actual_revision, { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'parents': ['123'], 'parent_urls': ['/api/revision/sha1_git_root/history/123/'], 'children': ['456'], 'children_urls': ['/api/revision/sha1_git_root/history/456/'], }) mock_flask.url_for.assert_has_calls( [call('api_revision', sha1_git='rev-id'), call('api_revision_log', sha1_git='rev-id'), call('api_revision_history', sha1_git_root='sha1_git_root', sha1_git='123'), call('api_revision_history', sha1_git_root='sha1_git_root', sha1_git='456')]) + + @nottest + def _url_for_rev_message_test(self, fn, **data): + if fn == 'api_revision': + return '/api/revision/' + data['sha1_git'] + '/' + elif fn == 'api_revision_log': + return '/api/revision/' + data['sha1_git'] + '/log/' + elif fn == 'api_revision_raw_message': + return '/api/revision/' + data['sha1_git'] + '/raw/' + else: + return '/api/revision/' + data['sha1_git_root'] + '/history/' + data['sha1_git'] + '/' # noqa + + @patch('swh.web.ui.utils.flask') + @istest + def enrich_revision_with_no_message(self, mock_flask): + # given + mock_flask.url_for.side_effect = self._url_for_rev_message_test + + # when + actual_revision = utils.enrich_revision({ + 'id': 'rev-id', + 'message': None, + 'parents': ['123'], + 'children': ['456'], + }, context='sha1_git_root') + + # then + self.assertEqual(actual_revision, { + 'id': 'rev-id', + 'url': '/api/revision/rev-id/', + 'message': None, + 'history_url': '/api/revision/rev-id/log/', + 'parents': ['123'], + 'parent_urls': ['/api/revision/sha1_git_root/history/123/'], + 'children': ['456'], + 'children_urls': ['/api/revision/sha1_git_root/history/456/'], + }) + + mock_flask.url_for.assert_has_calls( + [call('api_revision', + sha1_git='rev-id'), + call('api_revision_log', + sha1_git='rev-id'), + call('api_revision_history', + sha1_git_root='sha1_git_root', + sha1_git='123'), + call('api_revision_history', + sha1_git_root='sha1_git_root', + sha1_git='456')]) + + @patch('swh.web.ui.utils.flask') + @istest + def enrich_revision_with_invalid_message(self, mock_flask): + # given + mock_flask.url_for.side_effect = self._url_for_rev_message_test + + # when + actual_revision = utils.enrich_revision({ + 'id': 'rev-id', + 'message': None, + 'message_decoding_failed': True, + 'parents': ['123'], + 'children': ['456'], + }, context='sha1_git_root') + + # then + self.assertEqual(actual_revision, { + 'id': 'rev-id', + 'url': '/api/revision/rev-id/', + 'message': None, + 'message_decoding_failed': True, + 'message_url': '/api/revision/rev-id/raw/', + 'history_url': '/api/revision/rev-id/log/', + 'parents': ['123'], + 'parent_urls': ['/api/revision/sha1_git_root/history/123/'], + 'children': ['456'], + 'children_urls': ['/api/revision/sha1_git_root/history/456/'], + }) + + mock_flask.url_for.assert_has_calls( + [call('api_revision', + sha1_git='rev-id'), + call('api_revision_log', + sha1_git='rev-id'), + call('api_revision_history', + sha1_git_root='sha1_git_root', + sha1_git='123'), + call('api_revision_history', + sha1_git_root='sha1_git_root', + sha1_git='456')]) diff --git a/swh/web/ui/tests/views/test_api.py b/swh/web/ui/tests/views/test_api.py index 61d4747f..a16f362f 100644 --- a/swh/web/ui/tests/views/test_api.py +++ b/swh/web/ui/tests/views/test_api.py @@ -1,1907 +1,2063 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import json import unittest import yaml from nose.tools import istest from unittest.mock import patch, MagicMock from swh.web.ui.tests import test_app from swh.web.ui import exc from swh.web.ui.views import api from swh.web.ui.exc import NotFoundExc, BadInputExc from swh.storage.exc import StorageDBError, StorageAPIError class ApiTestCase(test_app.SWHApiTestCase): + @istest def generic_api_lookup_nothing_is_found(self): # given def test_generic_lookup_fn(sha1, another_unused_arg): assert another_unused_arg == 'unused arg' assert sha1 == 'sha1' return None # when with self.assertRaises(NotFoundExc) as cm: api._api_lookup('sha1', test_generic_lookup_fn, 'This will be raised because None is returned.', lambda x: x, 'unused arg') self.assertIn('This will be raised because None is returned.', cm.exception.args[0]) @istest def generic_api_map_are_enriched_and_transformed_to_list(self): # given def test_generic_lookup_fn_1(criteria0, param0, param1): assert criteria0 == 'something' return map(lambda x: x + 1, [1, 2, 3]) # when actual_result = api._api_lookup( 'something', test_generic_lookup_fn_1, 'This is not the error message you are looking for. Move along.', lambda x: x * 2, 'some param 0', 'some param 1') self.assertEqual(actual_result, [4, 6, 8]) @istest def generic_api_list_are_enriched_too(self): # given def test_generic_lookup_fn_2(crit): assert crit == 'something' return ['a', 'b', 'c'] # when actual_result = api._api_lookup( 'something', test_generic_lookup_fn_2, 'Not the error message you are looking for, it is. ' 'Along, you move!', lambda x: ''. join(['=', x, '='])) self.assertEqual(actual_result, ['=a=', '=b=', '=c=']) @istest def generic_api_generator_are_enriched_and_returned_as_list(self): # given def test_generic_lookup_fn_3(crit): assert crit == 'crit' return (i for i in [4, 5, 6]) # when actual_result = api._api_lookup( 'crit', test_generic_lookup_fn_3, 'Move!', lambda x: x - 1) self.assertEqual(actual_result, [3, 4, 5]) @istest def generic_api_simple_data_are_enriched_and_returned_too(self): # given def test_generic_lookup_fn_4(crit): assert crit == '123' return {'a': 10} def test_enrich_data(x): x['a'] = x['a'] * 10 return x # when actual_result = api._api_lookup( '123', test_generic_lookup_fn_4, 'Nothing to do', test_enrich_data) self.assertEqual(actual_result, {'a': 100}) @patch('swh.web.ui.views.api.service') # @istest def api_content_checksum_to_origin(self, mock_service): mock_service.lookup_hash.return_value = {'found': True} stub_origin = { "lister": None, "url": "rsync://ftp.gnu.org/old-gnu/webbase", "type": "ftp", "id": 2, "project": None } mock_service.lookup_hash_origin.return_value = stub_origin # when rv = self.app.get( '/api/1/browse/sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_origin) mock_service.lookup_hash.assert_called_once_with( 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_hash_origin.assert_called_once_with( 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') # @istest def api_content_checksum_to_origin_sha_not_found(self, mock_service): # given mock_service.lookup_hash.return_value = {'found': False} # when rv = self.app.get( '/api/1/browse/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 not found.' }) mock_service.lookup_hash.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_metadata(self, mock_service): # given mock_service.lookup_content.return_value = { 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560' 'cde9b067a4f', 'length': 17, 'status': 'visible' } # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'data_url': '/api/1/content/' '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/raw/', 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560c' 'de9b067a4f', 'length': 17, 'status': 'visible' }) mock_service.lookup_content.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_not_found_as_json(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_hash_origin = MagicMock() # when rv = self.app.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_hash_origin.called = False @patch('swh.web.ui.views.api.service') @istest def api_content_not_found_as_yaml(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_hash_origin = MagicMock() # when rv = self.app.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/', headers={'accept': 'application/yaml'}) self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/yaml') response_data = yaml.load(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_hash_origin.called = False @patch('swh.web.ui.views.api.service') @istest def api_content_raw_ko_not_found(self, mock_service): # given mock_service.lookup_content_raw.return_value = None # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 not found.' }) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_raw(self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/', headers={'Content-type': 'application/octet-stream', 'Content-disposition': 'attachment'}) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/octet-stream') self.assertEquals(rv.data, stub_content['data']) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_search(self, mock_service): # given - mock_service.lookup_hash.return_value = { - 'found': { - 'sha1': 'or something' - } + mock_service.search_hash.return_value = {'found': True} + + expected_result = { + 'search_stats': {'nbfiles': 1, 'pct': 100}, + 'search_res': [{'filename': None, + 'sha1': 'sha1:blah', + 'found': True}] } # when rv = self.app.get('/api/1/search/sha1:blah/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, {'found': True}) - mock_service.lookup_hash.assert_called_once_with('sha1:blah') + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, expected_result) + mock_service.search_hash.assert_called_once_with('sha1:blah') @patch('swh.web.ui.views.api.service') @istest def api_search_as_yaml(self, mock_service): # given - mock_service.lookup_hash.return_value = { - 'found': { - 'sha1': 'sha1 hash' - } + mock_service.search_hash.return_value = {'found': True} + expected_result = { + 'search_stats': {'nbfiles': 1, 'pct': 100}, + 'search_res': [{'filename': None, + 'sha1': 'sha1:halb', + 'found': True}] } # when rv = self.app.get('/api/1/search/sha1:halb/', headers={'Accept': 'application/yaml'}) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/yaml') response_data = yaml.load(rv.data.decode('utf-8')) - self.assertEquals(response_data, {'found': True}) + self.assertEquals(response_data, expected_result) - mock_service.lookup_hash.assert_called_once_with('sha1:halb') + mock_service.search_hash.assert_called_once_with('sha1:halb') @patch('swh.web.ui.views.api.service') @istest def api_search_not_found(self, mock_service): # given - mock_service.lookup_hash.return_value = {} + mock_service.search_hash.return_value = {'found': False} + + expected_result = { + 'search_stats': {'nbfiles': 1, 'pct': 0}, + 'search_res': [{'filename': None, + 'sha1': 'sha1:halb', + 'found': False}] + } # when rv = self.app.get('/api/1/search/sha1:halb/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, {'found': False}) + self.assertEquals(response_data, expected_result) - mock_service.lookup_hash.assert_called_once_with('sha1:halb') + mock_service.search_hash.assert_called_once_with('sha1:halb') @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_error(self, mock_service): # given mock_service.stat_counters.side_effect = ValueError( 'voluntary error to check the bad request middleware.') # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'voluntary error to check the bad request middleware.'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_swh_storage_error_db(self, mock_service): # given mock_service.stat_counters.side_effect = StorageDBError( 'SWH Storage exploded! Will be back online shortly!') # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the backend: ' 'SWH Storage exploded! Will be back online shortly!'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_swh_storage_error_api(self, mock_service): # given mock_service.stat_counters.side_effect = StorageAPIError( 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' ) # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the api backend: ' 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' }) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters(self, mock_service): # given stub_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_service.stat_counters.return_value = stub_stats # when rv = self.app.get('/api/1/stat/counters/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_stats) mock_service.stat_counters.assert_called_once_with() @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.request') @istest def api_uploadnsearch_bad_input(self, mock_request, mock_service): # given mock_request.files = {} # when rv = self.app.post('/api/1/uploadnsearch/') self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': "Bad request, missing 'filename' entry in form."}) mock_service.upload_and_search.called = False @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.request') @istest def api_uploadnsearch(self, mock_request, mock_service): # given mock_request.files = {'filename': 'simple-filename'} mock_service.upload_and_search.return_value = { 'filename': 'simple-filename', 'sha1': 'some-hex-sha1', 'found': False, } # when rv = self.app.post('/api/1/uploadnsearch/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, {'filename': 'simple-filename', 'sha1': 'some-hex-sha1', 'found': False}) mock_service.upload_and_search.assert_called_once_with( 'simple-filename') @patch('swh.web.ui.views.api.service') @istest def api_origin(self, mock_service): # given stub_origin = { 'id': 1234, 'lister': 'uuid-lister-0', 'project': 'uuid-project-0', 'url': 'ftp://some/url/to/origin/0', 'type': 'ftp' } mock_service.lookup_origin.return_value = stub_origin # when rv = self.app.get('/api/1/origin/1234/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_origin) mock_service.lookup_origin.assert_called_with(1234) @patch('swh.web.ui.views.api.service') @istest def api_origin_not_found(self, mock_service): # given mock_service.lookup_origin.return_value = None # when rv = self.app.get('/api/1/origin/4321/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Origin with id 4321 not found.' }) mock_service.lookup_origin.assert_called_with(4321) @patch('swh.web.ui.views.api.service') @istest def api_release(self, mock_service): # given stub_release = { 'id': 'release-0', 'target_type': 'revision', 'target': 'revision-sha1', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } expected_release = { 'id': 'release-0', 'target_type': 'revision', 'target': 'revision-sha1', 'target_url': '/api/1/revision/revision-sha1/', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_release) mock_service.lookup_release.assert_called_once_with('release-0') @patch('swh.web.ui.views.api.service') @istest def api_release_target_type_not_a_revision(self, mock_service): # given stub_release = { 'id': 'release-0', 'target_type': 'other-stuff', 'target': 'other-stuff-checksum', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } expected_release = { 'id': 'release-0', 'target_type': 'other-stuff', 'target': 'other-stuff-checksum', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_release) mock_service.lookup_release.assert_called_once_with('release-0') @patch('swh.web.ui.views.api.service') @istest def api_release_not_found(self, mock_service): # given mock_service.lookup_release.return_value = None # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Release with sha1_git release-0 not found.' }) @patch('swh.web.ui.views.api.service') @istest def api_revision(self, mock_service): # given stub_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['8734ef7e7c357ce2af928115c6c6a42b7e2a44e7'], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } mock_service.lookup_revision.return_value = stub_revision expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233e' 'ff7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6' 'a42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [ '8734ef7e7c357ce2af928115c6c6a42b7e2a44e7' ], 'parent_urls': [ '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5' '/history/8734ef7e7c357ce2af928115c6c6a42b7e2a44e7/' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } # when rv = self.app.get('/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_revision) mock_service.lookup_revision.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_revision_not_found(self, mock_service): # given mock_service.lookup_revision.return_value = None # when rv = self.app.get('/api/1/revision/revision-0/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Revision with sha1_git revision-0 not found.'}) + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_raw_ok(self, mock_service): + # given + stub_revision = {'message': 'synthetic revision message'} + + mock_service.lookup_revision_message.return_value = stub_revision + + # when + rv = self.app.get('/api/1/revision/18d8be353ed3480476f032475e7c2' + '33eff7371d5/raw/') + # then + self.assertEquals(rv.status_code, 200) + self.assertEquals(rv.mimetype, 'application/octet-stream') + self.assertEquals(rv.data, b'synthetic revision message') + + mock_service.lookup_revision_message.assert_called_once_with( + '18d8be353ed3480476f032475e7c233eff7371d5') + + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_raw_ok_no_msg(self, mock_service): + # given + mock_service.lookup_revision_message.side_effect = NotFoundExc( + 'No message for revision') + + # when + rv = self.app.get('/api/1/revision/' + '18d8be353ed3480476f032475e7c233eff7371d5/raw/') + + # then + self.assertEquals(rv.status_code, 404) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, { + 'error': 'No message for revision'}) + + self.assertEquals + mock_service.lookup_revision_message.assert_called_once_with( + '18d8be353ed3480476f032475e7c233eff7371d5') + + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_raw_ko_no_rev(self, mock_service): + # given + mock_service.lookup_revision_message.side_effect = NotFoundExc( + 'No revision found') + + # when + rv = self.app.get('/api/1/revision/' + '18d8be353ed3480476f032475e7c233eff7371d5/raw/') + + # then + self.assertEquals(rv.status_code, 404) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, { + 'error': 'No revision found'}) + + mock_service.lookup_revision_message.assert_called_once_with( + '18d8be353ed3480476f032475e7c233eff7371d5') + @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin_not_found(self, mock_service): mock_service.lookup_revision_by.return_value = None rv = self.app.get('/api/1/revision/origin/123/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertIn('Revision with (origin_id: 123', response_data['error']) self.assertIn('not found', response_data['error']) mock_service.lookup_revision_by.assert_called_once_with( 123, 'refs/heads/master', None) @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin(self, mock_service): mock_revision = { 'id': '32', 'directory': '21', 'message': 'message 1', 'type': 'deb', } expected_revision = { 'id': '32', 'url': '/api/1/revision/32/', 'history_url': '/api/1/revision/32/log/', 'directory': '21', 'directory_url': '/api/1/directory/21/', 'message': 'message 1', 'type': 'deb', } mock_service.lookup_revision_by.return_value = mock_revision rv = self.app.get('/api/1/revision/origin/1/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin_and_branch_name(self, mock_service): mock_revision = { 'id': '12', 'directory': '23', 'message': 'message 2', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '12', 'url': '/api/1/revision/12/', 'history_url': '/api/1/revision/12/log/', 'directory': '23', 'directory_url': '/api/1/directory/23/', 'message': 'message 2', 'type': 'tar', } rv = self.app.get('/api/1/revision/origin/1/branch/refs/origin/dev/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', None) @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp(self, mock_utils, mock_service): mock_revision = { 'id': '123', 'directory': '456', 'message': 'message 3', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '123', 'url': '/api/1/revision/123/', 'history_url': '/api/1/revision/123/log/', 'directory': '456', 'directory_url': '/api/1/directory/456/', 'message': 'message 3', 'type': 'tar', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision rv = self.app.get('/api/1/revision' '/origin/1' '/branch/refs/origin/dev' '/ts/1452591542/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with('1452591542') mock_utils.enrich_revision.assert_called_once_with( mock_revision) @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp_with_escapes( self, mock_utils, mock_service): mock_revision = { 'id': '999', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '999', 'url': '/api/1/revision/999/', 'history_url': '/api/1/revision/999/log/', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision rv = self.app.get('/api/1/revision' '/origin/1' '/branch/refs%2Forigin%2Fdev' '/ts/Today%20is%20' 'January%201,%202047%20at%208:21:00AM/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with( 'Today is January 1, 2047 at 8:21:00AM') mock_utils.enrich_revision.assert_called_once_with( mock_revision) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_directory_through_rev_with_origin_history_with_rev_not_found_0( self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('not found') # when rv = self.app.get('/api/1/revision' '/origin/1' '/history/4563' '/directory/some-path/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': 'not found'}) mock_rev_dir.assert_called_once_with( { 'origin_id': 1, 'branch_name': 'refs/heads/master', 'ts': None, 'sha1_git': '4563' }, 'some-path', '/api/1/revision' '/origin/1' '/history/4563' '/directory/some-path/', limit=100, with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @patch('swh.web.ui.views.api.utils') @istest def api_directory_through_revision_with_origin_history( self, mock_utils, mock_rev_dir): # given stub_dir_content = [ { 'type': 'dir' }, { 'type': 'file' }, ] mock_rev_dir.return_value = stub_dir_content mock_utils.parse_timestamp.return_value = '2016-11-24 00:00:00' # when url = '/api/1/revision' \ '/origin/999' \ '/branch/refs/dev' \ '/ts/2016-11-24' \ '/history/12-sha1-git' \ '/directory/some/content/' rv = self.app.get(url + '?limit=666') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, stub_dir_content) mock_utils.parse_timestamp.assert_called_once_with('2016-11-24') mock_rev_dir.assert_called_once_with( { 'origin_id': 999, 'branch_name': 'refs/dev', 'ts': '2016-11-24 00:00:00', 'sha1_git': '12-sha1-git' }, 'some/content', url, limit=666, with_data=False) @patch('swh.web.ui.views.api.service') @istest def api_revision_history_through_origin_rev_not_found_0( self, mock_service): mock_service.lookup_revision_with_context_by.return_value = { 'id': 'root-rev-id'}, None # when rv = self.app.get('/api/1/revision' '/origin/1' '/history/4563/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" " sha1_git_root being the revision's identifier pointed to by " "(origin_id: %s, branch_name: %s, ts: %s)." % ('4563', 'root-rev-id', 1, 'refs/heads/master', None)}) mock_service.lookup_revision_with_context_by.assert_called_once_with( 1, 'refs/heads/master', None, '4563', 100) @patch('swh.web.ui.views.api.service') @istest def api_revision_history_through_origin_rev_not_found_1( self, mock_service): # given mock_service.lookup_revision_with_context_by.return_value = { 'id': 'root-rev-id'}, None # when rv = self.app.get('/api/1/revision' '/origin/10' '/branch/origin/dev' '/history/213/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" " sha1_git_root being the revision's identifier pointed to by " "(origin_id: %s, branch_name: %s, ts: %s)." % ('213', 'root-rev-id', 10, 'origin/dev', None)}) mock_service.lookup_revision_with_context_by.assert_called_once_with( 10, 'origin/dev', None, '213', 100) @patch('swh.web.ui.views.api.utils') @patch('swh.web.ui.views.api.service') @istest def api_revision_history_through_origin_rev_not_found_2( self, mock_service, mock_utils): # given mock_service.lookup_revision_with_context_by.return_value = { 'id': 'root-rev-id'}, None mock_utils.parse_timestamp.return_value = '2012-11-23 00:00:00' # when rv = self.app.get('/api/1/revision' '/origin/100' '/branch/master' '/ts/2012-11-23' '/history/876/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" " sha1_git_root being the revision's identifier pointed to by " "(origin_id: %s, branch_name: %s, ts: %s)." % ('876', 'root-rev-id', 100, 'master', '2012-11-23 00:00:00')}) mock_service.lookup_revision_with_context_by.assert_called_once_with( 100, 'master', '2012-11-23 00:00:00', '876', 100) mock_utils.parse_timestamp.assert_called_once_with('2012-11-23') @patch('swh.web.ui.views.api.utils') @patch('swh.web.ui.views.api.service') @istest def api_revision_history_through_origin_rev_not_found_3( self, mock_service, mock_utils): # given mock_service.lookup_revision_with_context_by.return_value = { 'id': 'root-rev-id'}, None mock_service.lookup_revision_with_context.return_value = None mock_utils.parse_timestamp.return_value = '2016-11-23 00:00:00' # when rv = self.app.get('/api/1/revision' '/origin/666' '/branch/refs/master' '/ts/2016-11-23' '/history/123-sha1-git/?limit=1000') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" " sha1_git_root being the revision's identifier pointed to by " "(origin_id: %s, branch_name: %s, ts: %s)." % ('123-sha1-git', 'root-rev-id', 666, 'refs/master', '2016-11-23 00:00:00')}) mock_service.lookup_revision_with_context_by.assert_called_once_with( 666, 'refs/master', '2016-11-23 00:00:00', '123-sha1-git', 1000) mock_utils.parse_timestamp.assert_called_once_with('2016-11-23') mock_service.lookup_revision_with_context('456-sha1-git-root', '123-sha1-git', 1000) @patch('swh.web.ui.views.api.utils') @patch('swh.web.ui.views.api.service') @istest def api_history_through_revision(self, mock_service, mock_utils): # given stub_root_rev = { 'id': '45-sha1-git-root' } stub_revision = { 'children': [], } mock_service.lookup_revision_with_context_by.return_value = ( stub_root_rev, stub_revision) mock_utils.enrich_revision.return_value = 'some-result' mock_utils.parse_timestamp.return_value = '2016-11-24 00:00:00' # when rv = self.app.get('/api/1/revision' '/origin/999' '/branch/refs/dev' '/ts/2016-11-24' '/history/12-sha1-git/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, 'some-result') mock_service.lookup_revision_with_context_by.assert_called_once_with( 999, 'refs/dev', '2016-11-24 00:00:00', '12-sha1-git', 100) mock_utils.parse_timestamp.assert_called_once_with('2016-11-24') mock_utils.enrich_revision.assert_called_once_with( stub_revision, context='45-sha1-git-root') @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_ko_raise(self, mock_service): # given mock_service.lookup_directory_through_revision.side_effect = NotFoundExc('not') # noqa # when with self.assertRaises(NotFoundExc): api._revision_directory_by( {'sha1_git': 'id'}, None, '/api/1/revision/sha1/directory/') # then mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'id'}, None, limit=100, with_data=False) @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_type_dir(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) # when actual_dir_content = api._revision_directory_by( {'sha1_git': 'blah-id'}, 'some/path', '/api/1/revision/sha1/directory/') # then self.assertEquals(actual_dir_content, { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'blah-id'}, 'some/path', limit=100, with_data=False) @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_type_file(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) # when actual_dir_content = api._revision_directory_by( {'sha1_git': 'sha1'}, 'some/path', '/api/1/revision/origin/2/directory/', limit=1000, with_data=True) # then self.assertEquals(actual_dir_content, { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'sha1'}, 'some/path', limit=1000, with_data=True) @patch('swh.web.ui.views.api.utils') @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_directory_through_revision_origin_ko_not_found(self, mock_rev_dir, mock_utils): mock_rev_dir.side_effect = NotFoundExc('not found') mock_utils.parse_timestamp.return_value = '2012-10-20 00:00:00' rv = self.app.get('/api/1/revision' '/origin/10' '/branch/refs/remote/origin/dev' '/ts/2012-10-20' '/directory/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': 'not found'}) mock_rev_dir.assert_called_once_with( {'origin_id': 10, 'branch_name': 'refs/remote/origin/dev', 'ts': '2012-10-20 00:00:00'}, None, '/api/1/revision' '/origin/10' '/branch/refs/remote/origin/dev' '/ts/2012-10-20' '/directory/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_directory_through_revision_origin(self, mock_revision_dir): expected_res = [{ 'id': '123' }] mock_revision_dir.return_value = expected_res rv = self.app.get('/api/1/revision/origin/3/directory/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_res) mock_revision_dir.assert_called_once_with({ 'origin_id': 3, 'branch_name': 'refs/heads/master', 'ts': None}, None, '/api/1/revision/origin/3/directory/', with_data=False) @patch('swh.web.ui.views.api.service') @istest def api_revision_log(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log.return_value = stub_revisions expected_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' 'f7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' '42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [ '7834ef7e7c357ce2af928115c6c6a42b7e2a4345' ], 'parent_urls': [ '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5' '/history/7834ef7e7c357ce2af928115c6c6a42b7e2a4345/' ], 'type': 'tar', 'synthetic': True, }] # when rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' 'b7e2a44e6/log/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_revisions) mock_service.lookup_revision_log.assert_called_once_with( '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 100) @patch('swh.web.ui.views.api.service') @istest def api_revision_log_not_found(self, mock_service): # given mock_service.lookup_revision_log.return_value = None # when rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42b7' 'e2a44e6/log/?limit=10') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Revision with sha1_git' ' 8834ef7e7c357ce2af928115c6c6a42b7e2a44e6 not found.'}) mock_service.lookup_revision_log.assert_called_once_with( '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 10) + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_log_by(self, mock_service): + # given + stub_revisions = [{ + 'id': '18d8be353ed3480476f032475e7c233eff7371d5', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'author_name': 'Software Heritage', + 'author_email': 'robot@softwareheritage.org', + 'committer_name': 'Software Heritage', + 'committer_email': 'robot@softwareheritage.org', + 'message': 'synthetic revision message', + 'date_offset': 0, + 'committer_date_offset': 0, + 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], + 'type': 'tar', + 'synthetic': True, + }] + mock_service.lookup_revision_log_by.return_value = stub_revisions + + expected_revisions = [{ + 'id': '18d8be353ed3480476f032475e7c233eff7371d5', + 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', + 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' + 'f7371d5/log/', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' + '42b7e2a44e6/', + 'author_name': 'Software Heritage', + 'author_email': 'robot@softwareheritage.org', + 'committer_name': 'Software Heritage', + 'committer_email': 'robot@softwareheritage.org', + 'message': 'synthetic revision message', + 'date_offset': 0, + 'committer_date_offset': 0, + 'parents': [ + '7834ef7e7c357ce2af928115c6c6a42b7e2a4345' + ], + 'parent_urls': [ + '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5' + '/history/7834ef7e7c357ce2af928115c6c6a42b7e2a4345/' + ], + 'type': 'tar', + 'synthetic': True, + }] + + # when + rv = self.app.get('/api/1/revision/origin/1/log/') + + # then + self.assertEquals(rv.status_code, 200) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, expected_revisions) + + mock_service.lookup_revision_log_by.assert_called_once_with( + 1, 'refs/heads/master', None) + + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_log_by_norev(self, mock_service): + # given + mock_service.lookup_revision_log_by.side_effect = NotFoundExc( + 'No revision') + + # when + rv = self.app.get('/api/1/revision/origin/1/log/') + + # then + self.assertEquals(rv.status_code, 404) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, {'error': 'No revision'}) + + mock_service.lookup_revision_log_by.assert_called_once_with( + 1, 'refs/heads/master', None) + @patch('swh.web.ui.views.api.service') @istest def api_revision_history_not_found(self, mock_service): # given mock_service.lookup_revision_with_context.return_value = None # then rv = self.app.get('/api/1/revision/999/history/338/?limit=5') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') mock_service.lookup_revision_with_context.assert_called_once_with( '999', '338', 5) @istest def api_revision_history_sha1_same_so_redirect(self): # when rv = self.app.get('/api/1/revision/123/history/123?limit=10') # then self.assertEquals(rv.status_code, 301) # Ideally we'd like to be able to check the resulting url path # but does not work, this returns the current url # also following the redirect would mean to yet mock again the # destination url... So for now cannot test it # self.assertEquals(rv.location, # 'http://localhost/api/1/revision/123?limit=10') @patch('swh.web.ui.views.api.service') @istest def api_revision_history(self, mock_service): # for readability purposes, we use: # - sha1 as 3 letters (url are way too long otherwise to respect pep8) # - only keys with modification steps (all other keys are kept as is) # given stub_revision = { 'id': '883', 'children': ['777', '999'], 'parents': [], 'directory': '272' } mock_service.lookup_revision_with_context.return_value = stub_revision # then rv = self.app.get('/api/1/revision/666/history/883/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'id': '883', 'url': '/api/1/revision/883/', 'history_url': '/api/1/revision/883/log/', 'children': ['777', '999'], 'children_urls': ['/api/1/revision/666/history/777/', '/api/1/revision/666/history/999/'], 'parents': [], 'parent_urls': [], 'directory': '272', 'directory_url': '/api/1/directory/272/' }) mock_service.lookup_revision_with_context.assert_called_once_with( '666', '883', 100) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ko_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('Not found') # then rv = self.app.get('/api/1/revision/999/directory/some/path/to/dir/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Not found'}) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path/to/dir', '/api/1/revision/999/directory/some/path/to/dir/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir): stub_dir = { 'type': 'dir', 'revision': '999', 'content': [ { 'sha1_git': '789', 'type': 'file', 'target': '101', 'target_url': '/api/1/content/sha1_git:101/', 'name': 'somefile', 'file_url': '/api/1/revision/999/directory/some/path/' 'somefile/' }, { 'sha1_git': '123', 'type': 'dir', 'target': '456', 'target_url': '/api/1/directory/456/', 'name': 'to-subdir', 'dir_url': '/api/1/revision/999/directory/some/path/' 'to-subdir/', }] } # given mock_rev_dir.return_value = stub_dir # then rv = self.app.get('/api/1/revision/999/directory/some/path/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path', '/api/1/revision/999/directory/some/path/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ok_returns_content(self, mock_rev_dir): stub_content = { 'type': 'file', 'revision': '999', 'content': { 'sha1_git': '789', 'sha1': '101', 'data_url': '/api/1/content/101/raw/', } } # given mock_rev_dir.return_value = stub_content # then url = '/api/1/revision/666/directory/some/other/path/' rv = self.app.get(url) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_content) mock_rev_dir.assert_called_once_with( {'sha1_git': '666'}, 'some/other/path', url, with_data=False) @istest def api_revision_history_directory_sha1_same_so_redirect(self): # when rv = self.app.get( '/api/1/revision/123/history/123/directory/path/to/?limit=1') # then self.assertEquals(rv.status_code, 301) # self.assertEquals(rv.location, # 'http://localhost/api/1/revision/123/directory/path/to/') @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_history_directory_ko_revision_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('not found') # then url = '/api/1/revision/456/history/987/directory/path/to/' rv = self.app.get(url + '?limit=10') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'not found'}) mock_rev_dir.assert_called_once_with( {'sha1_git_root': '456', 'sha1_git': '987'}, 'path/to', url, limit=10, with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_history_directory(self, mock_rev_dir): # given stub_dir = { 'type': 'dir', 'revision': 'rev-id', 'content': [ { 'sha1_git': '879', 'type': 'file', 'target': '110', 'target_url': '/api/1/content/sha1_git:110/', 'name': 'subfile', 'file_url': '/api/1/revision/354/history/867/directory/' 'debian/' 'subfile/', }, { 'sha1_git': '213', 'type': 'dir', 'target': '546', 'target_url': '/api/1/directory/546/', 'name': 'subdir', 'dir_url': '/api/1/revision/354/history/867/directory/debian/subdir/' }] } # given mock_rev_dir.return_value = stub_dir # then url = '/api/1/revision/354' \ '/history/867' \ '/directory/debian/' rv = self.app.get(url + '?limit=4') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git_root': '354', 'sha1_git': '867'}, 'debian', url, limit=4, with_data=False) @patch('swh.web.ui.views.api.service') @istest def api_person(self, mock_service): # given stub_person = { 'id': '198003', 'name': 'Software Heritage', 'email': 'robot@softwareheritage.org', } mock_service.lookup_person.return_value = stub_person # when rv = self.app.get('/api/1/person/198003/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_person) @patch('swh.web.ui.views.api.service') @istest def api_person_not_found(self, mock_service): # given mock_service.lookup_person.return_value = None # when rv = self.app.get('/api/1/person/666/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Person with id 666 not found.'}) @patch('swh.web.ui.views.api.service') @istest def api_directory(self, mock_service): # given stub_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', }] expected_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', 'target_url': '/api/1/directory/8be353ed3480476f032475e7c233eff737123456/', }] mock_service.lookup_directory.return_value = stub_directories # when rv = self.app.get('/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_directories) mock_service.lookup_directory.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_directory_not_found(self, mock_service): # given mock_service.lookup_directory.return_value = [] # when rv = self.app.get('/api/1/directory/' '66618d8be353ed3480476f032475e7c233eff737/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.'}) @patch('swh.web.ui.views.api.service') @istest def api_directory_with_path_found(self, mock_service): # given expected_dir = { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'name': 'bla', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', } mock_service.lookup_directory_with_path.return_value = expected_dir # when rv = self.app.get('/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/bla/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_dir) mock_service.lookup_directory_with_path.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5', 'bla') @patch('swh.web.ui.views.api.service') @istest def api_directory_with_path_not_found(self, mock_service): # given mock_service.lookup_directory_with_path.return_value = None path = 'some/path/to/dir/' # when rv = self.app.get(('/api/1/directory/' '66618d8be353ed3480476f032475e7c233eff737/%s') % path) path = path.strip('/') # Path stripped of lead/trail separators # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': (('Entry with path %s relative to ' 'directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.') % path)}) @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid_not_found(self, mock_service): # when mock_service.lookup_entity_by_uuid.return_value = [] # when rv = self.app.get('/api/1/entity/' '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': "Entity with uuid '5f4d4c51-498a-4e28-88b3-b3e4e8396cba' not " + "found."}) mock_service.lookup_entity_by_uuid.assert_called_once_with( '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid_bad_request(self, mock_service): # when mock_service.lookup_entity_by_uuid.side_effect = BadInputExc( 'bad input: uuid malformed!') # when rv = self.app.get('/api/1/entity/uuid malformed/') self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'bad input: uuid malformed!'}) mock_service.lookup_entity_by_uuid.assert_called_once_with( 'uuid malformed') @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid(self, mock_service): # when stub_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' } ] mock_service.lookup_entity_by_uuid.return_value = stub_entities expected_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'uuid_url': '/api/1/entity/34bd6b1b-463f-43e5-a697-' '785107f598e4/', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'uuid_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' } ] # when rv = self.app.get('/api/1/entity' '/34bd6b1b-463f-43e5-a697-785107f598e4/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_entities) mock_service.lookup_entity_by_uuid.assert_called_once_with( '34bd6b1b-463f-43e5-a697-785107f598e4') class ApiUtils(unittest.TestCase): @istest def api_lookup_not_found(self): # when with self.assertRaises(exc.NotFoundExc) as e: api._api_lookup('something', lambda x: None, 'this is the error message raised as it is None') self.assertEqual(e.exception.args[0], 'this is the error message raised as it is None') @istest def api_lookup_with_result(self): # when actual_result = api._api_lookup('something', lambda x: x + '!', 'this is the error which won\'t be ' 'used here') self.assertEqual(actual_result, 'something!') @istest def api_lookup_with_result_as_map(self): # when actual_result = api._api_lookup([1, 2, 3], lambda x: map(lambda y: y+1, x), 'this is the error which won\'t be ' 'used here') self.assertEqual(actual_result, [2, 3, 4]) diff --git a/swh/web/ui/tests/views/test_browse.py b/swh/web/ui/tests/views/test_browse.py index 762c4301..da54e11f 100644 --- a/swh/web/ui/tests/views/test_browse.py +++ b/swh/web/ui/tests/views/test_browse.py @@ -1,1590 +1,1692 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from nose.tools import istest from unittest.mock import patch from swh.web.ui.exc import BadInputExc, NotFoundExc from .. import test_app class FileMock(): def __init__(self, filename): self.filename = filename class SearchView(test_app.SWHViewTestCase): render_template = False @istest def search_default(self): # when rv = self.client.get('/search/') self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('messages'), []) - self.assertEqual(self.get_context_variable('responses'), []) + self.assertEqual(self.get_context_variable('message'), '') + self.assertEqual(self.get_context_variable('search_res'), None) self.assert_template_used('upload_and_search.html') - @patch('swh.web.ui.views.browse.service') + @patch('swh.web.ui.views.browse.api') @istest - def search_get_query_hash_not_found(self, mock_service): + def search_get_query_hash_not_found(self, mock_api): # given - mock_service.lookup_hash.return_value = {'found': None} + mock_api.api_search.return_value = { + 'search_res': [{ + 'filename': None, + 'sha1': 'sha1:456', + 'found': False}], + 'search_stats': {'nbfiles': 1, 'pct': 100}} # when rv = self.client.get('/search/?q=sha1:456') self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('q'), 'sha1:456') - self.assertEqual(self.get_context_variable('messages'), []) - self.assertEqual(self.get_context_variable('responses'), [ - {'filename': 'User submitted hash', + self.assertEqual(self.get_context_variable('message'), '') + self.assertEqual(self.get_context_variable('search_res'), [ + {'filename': None, 'sha1': 'sha1:456', 'found': False}]) self.assert_template_used('upload_and_search.html') - mock_service.lookup_hash.assert_called_once_with('sha1:456') + mock_api.api_search.assert_called_once_with('sha1:456') - @patch('swh.web.ui.views.browse.service') + @patch('swh.web.ui.views.browse.api') @istest - def search_get_query_hash_bad_input(self, mock_service): + def search_get_query_hash_bad_input(self, mock_api): # given - mock_service.lookup_hash.side_effect = BadInputExc('error msg') + mock_api.api_search.side_effect = BadInputExc('error msg') # when rv = self.client.get('/search/?q=sha1_git:789') self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('q'), 'sha1_git:789') - self.assertEqual(self.get_context_variable('messages'), ['error msg']) - self.assertEqual(self.get_context_variable('responses'), []) + self.assertEqual(self.get_context_variable('message'), 'error msg') + self.assertEqual(self.get_context_variable('search_res'), None) self.assert_template_used('upload_and_search.html') - mock_service.lookup_hash.assert_called_once_with('sha1_git:789') + mock_api.api_search.assert_called_once_with('sha1_git:789') - @patch('swh.web.ui.views.browse.service') + @patch('swh.web.ui.views.browse.api') @istest - def search_get_query_hash_found(self, mock_service): + def search_get_query_hash_found(self, mock_api): # given - mock_service.lookup_hash.return_value = {'found': True} + mock_api.api_search.return_value = { + 'search_res': [{ + 'filename': None, + 'sha1': 'sha1:123', + 'found': True}], + 'search_stats': {'nbfiles': 1, 'pct': 100}} # when rv = self.client.get('/search/?q=sha1:123') self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('q'), 'sha1:123') - self.assertEqual(self.get_context_variable('messages'), []) - self.assertEqual(len(self.get_context_variable('responses')), 1) - resp = self.get_context_variable('responses')[0] + self.assertEqual(self.get_context_variable('message'), '') + self.assertEqual(len(self.get_context_variable('search_res')), 1) + resp = self.get_context_variable('search_res')[0] self.assertTrue(resp is not None) self.assertEqual(resp['sha1'], 'sha1:123') self.assertEqual(resp['found'], True) self.assert_template_used('upload_and_search.html') - mock_service.lookup_hash.assert_called_once_with('sha1:123') + mock_api.api_search.assert_called_once_with('sha1:123') - @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.request') + @patch('swh.web.ui.views.browse.api') @istest - def search_post_hashes_bad_input(self, mock_request, - mock_service): + def search_post_hashes_bad_input(self, mock_api, mock_request): # given mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} mock_request.method = 'POST' - mock_service.lookup_multiple_hashes.side_effect = BadInputExc( + mock_api.api_search.side_effect = BadInputExc( 'error bad input') # when (mock_request completes the post request) rv = self.client.post('/search/') # then self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('search_stats'), {'nbfiles': 0, 'pct': 0}) - self.assertEqual(self.get_context_variable('responses'), []) - self.assertEqual(self.get_context_variable('messages'), - ['error bad input']) + self.assertEqual(self.get_context_variable('search_res'), None) + self.assertEqual(self.get_context_variable('message'), + 'error bad input') self.assert_template_used('upload_and_search.html') - mock_service.upload_and_search.called = True - - @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.request') + @patch('swh.web.ui.views.browse.api') @istest - def search_post_hashes_none(self, mock_request, mock_service): + def search_post_hashes_none(self, mock_api, mock_request): # given mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} mock_request.method = 'POST' - mock_service.lookup_multiple_hashes.return_value = [ - {'filename': 'a', - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': False}, - {'filename': 'b', - 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', - 'found': False} - ] + mock_api.api_search.return_value = { + 'search_stats': {'nbfiles': 2, 'pct': 0}, + 'search_res': [{'filename': 'a', + 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', + 'found': False}, + {'filename': 'b', + 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', + 'found': False}]} # when (mock_request completes the post request) rv = self.client.post('/search/') # then self.assertEqual(rv.status_code, 200) - self.assertEqual(len(self.get_context_variable('responses')), 2) + self.assertIsNotNone(self.get_context_variable('search_res')) self.assertTrue(self.get_context_variable('search_stats') is not None) + self.assertEqual(len(self.get_context_variable('search_res')), 2) + stats = self.get_context_variable('search_stats') self.assertEqual(stats['nbfiles'], 2) self.assertEqual(stats['pct'], 0) - a, b = self.get_context_variable('responses') + + a, b = self.get_context_variable('search_res') self.assertEqual(a['found'], False) self.assertEqual(b['found'], False) - self.assertEqual(self.get_context_variable('messages'), []) - self.assert_template_used('upload_and_search.html') + self.assertEqual(self.get_context_variable('message'), '') - mock_service.upload_and_search.called = True + self.assert_template_used('upload_and_search.html') - @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.request') + @patch('swh.web.ui.views.browse.api') @istest - def search_post_hashes_some(self, mock_request, mock_service): + def search_post_hashes_some(self, mock_api, mock_request): # given mock_request.form = {'a': '456caf10e9535160d90e874b45aa426de762f19f', 'b': '745bab676c8f3cec8016e0c39ea61cf57e518865'} mock_request.method = 'POST' - mock_service.lookup_multiple_hashes.return_value = [ - {'filename': 'a', - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': False}, - {'filename': 'b', - 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', - 'found': True} - ] + mock_api.api_search.return_value = { + 'search_stats': {'nbfiles': 2, 'pct': 50}, + 'search_res': [{'filename': 'a', + 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', + 'found': False}, + {'filename': 'b', + 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', + 'found': True}]} # when (mock_request completes the post request) rv = self.client.post('/search/') # then self.assertEqual(rv.status_code, 200) - self.assertEqual(len(self.get_context_variable('responses')), 2) + self.assertIsNotNone(self.get_context_variable('search_res')) + self.assertEqual(len(self.get_context_variable('search_res')), 2) self.assertTrue(self.get_context_variable('search_stats') is not None) + stats = self.get_context_variable('search_stats') self.assertEqual(stats['nbfiles'], 2) self.assertEqual(stats['pct'], 50) - self.assertEqual(self.get_context_variable('messages'), []) - a, b = self.get_context_variable('responses') + self.assertEqual(self.get_context_variable('message'), '') + + a, b = self.get_context_variable('search_res') self.assertEqual(a['found'], False) self.assertEqual(b['found'], True) self.assert_template_used('upload_and_search.html') - mock_service.upload_and_search.called = True - class ContentView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_content_ko_not_found(self, mock_api): # given mock_api.api_content_metadata.side_effect = NotFoundExc( 'Not found!') # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertEqual(self.get_context_variable('message'), 'Not found!') self.assertIsNone(self.get_context_variable('content')) mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.api') @istest def browse_content_ko_bad_input(self, mock_api): # given mock_api.api_content_metadata.side_effect = BadInputExc( 'Bad input!') # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertEqual(self.get_context_variable('message'), 'Bad input!') self.assertIsNone(self.get_context_variable('content')) mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_content(self, mock_api, mock_service): # given stub_content = {'sha1': 'sha1_hash'} mock_api.api_content_metadata.return_value = stub_content mock_service.lookup_content_raw.return_value = {'data': b'blah'} expected_content = {'sha1': 'sha1_hash', 'data': 'blah'} # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('content'), expected_content) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:sha1-hash') mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.redirect') @patch('swh.web.ui.views.browse.url_for') @istest def browse_content_raw(self, mock_urlfor, mock_redirect): # given stub_content_raw = b'some-data' mock_urlfor.return_value = '/api/content/sha1:sha1-hash/raw/' mock_redirect.return_value = stub_content_raw # when rv = self.client.get('/browse/content/sha1:sha1-hash/raw/') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, stub_content_raw) mock_urlfor.assert_called_once_with('api_content_raw', q='sha1:sha1-hash') mock_redirect.assert_called_once_with( '/api/content/sha1:sha1-hash/raw/') class DirectoryView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_directory_ko_bad_input(self, mock_api): # given mock_api.api_directory.side_effect = BadInputExc( 'Invalid hash') # when rv = self.client.get('/browse/directory/sha2-invalid/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Invalid hash') self.assertEqual(self.get_context_variable('files'), []) mock_api.api_directory.assert_called_once_with( 'sha2-invalid') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_empty_result(self, mock_api): # given mock_api.api_directory.return_value = [] # when rv = self.client.get('/browse/directory/some-sha1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Listing for directory some-sha1:') self.assertEqual(self.get_context_variable('files'), []) mock_api.api_directory.assert_called_once_with( 'some-sha1') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_file(self, mock_api, mock_service): # given stub_entry = { 'sha256': '240', 'type': 'file' } mock_service.lookup_directory_with_path.return_value = stub_entry stub_file = { 'sha1_git': '123', 'sha1': '456', 'status': 'visible', 'data_url': '/api/1/content/890', 'length': 42, 'ctime': 'Thu, 01 Oct 2015 12:13:53 GMT', 'target': 'file.txt', 'sha256': '148' } mock_api.api_content_metadata.return_value = stub_file mock_service.lookup_content_raw.return_value = { 'data': 'this is my file'} # when rv = self.client.get('/browse/directory/sha1/path/to/file/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertIsNotNone(self.get_context_variable('content')) content = self.get_context_variable('content') # change caused by call to prepare_data_for_view self.assertEqual(content['data_url'], '/browse/content/890') self.assertEqual(content['data'], 'this is my file') mock_api.api_content_metadata.assert_called_once_with('sha256:240') mock_service.lookup_content_raw.assert_called_once_with('sha256:240') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_dir(self, mock_api, mock_service): # given mock_service.lookup_directory_with_path.return_value = { 'sha256': '240', 'target': 'abcd', 'type': 'dir' } stub_directory_ls = [ {'type': 'dir', 'target': '123', 'name': 'some-dir-name'}, {'type': 'file', 'sha1': '654', 'name': 'some-filename'}, {'type': 'dir', 'target': '987', 'name': 'some-other-dirname'} ] mock_api.api_directory.return_value = stub_directory_ls # when rv = self.client.get('/browse/directory/sha1/path/to/dir/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertIsNotNone(self.get_context_variable('files')) self.assertEqual(len(self.get_context_variable('files')), len(stub_directory_ls)) mock_api.api_directory.assert_called_once_with('abcd') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_not_found(self, mock_api, mock_service): # given mock_service.lookup_directory_with_path.side_effect = NotFoundExc( 'Directory entry not found.') # when rv = self.client.get('/browse/directory/some-sha1/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Directory entry not found.') @patch('swh.web.ui.views.browse.api') @patch('swh.web.ui.views.browse.utils') @istest def browse_directory(self, mock_utils, mock_api): # given stub_directory_ls = [ {'type': 'dir', 'target': '123', 'name': 'some-dir-name'}, {'type': 'file', 'sha1': '654', 'name': 'some-filename'}, {'type': 'dir', 'target': '987', 'name': 'some-other-dirname'} ] mock_api.api_directory.return_value = stub_directory_ls stub_directory_map = [ {'link': '/path/to/url/dir/123', 'name': 'some-dir-name'}, {'link': '/path/to/url/file/654', 'name': 'some-filename'}, {'link': '/path/to/url/dir/987', 'name': 'some-other-dirname'} ] mock_utils.prepare_data_for_view.return_value = stub_directory_map # when rv = self.client.get('/browse/directory/some-sha1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Listing for directory some-sha1:') self.assertEqual(self.get_context_variable('files'), stub_directory_map) mock_api.api_directory.assert_called_once_with( 'some-sha1') mock_utils.prepare_data_for_view.assert_called_once_with( stub_directory_ls) class ContentWithOriginView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin_content_ko_not_found(self, mock_api): # given mock_api.api_content_checksum_to_origin.side_effect = NotFoundExc( 'Not found!') # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual(self.get_context_variable('message'), 'Not found!') mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin_ko_bad_input(self, mock_api): # given mock_api.api_content_checksum_to_origin.side_effect = BadInputExc( 'Invalid hash') # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual( self.get_context_variable('message'), 'Invalid hash') mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin(self, mock_api): # given mock_api.api_content_checksum_to_origin.return_value = { 'origin_type': 'ftp', 'origin_url': '/some/url', 'revision': 'revision-hash', 'branch': 'master', 'path': '/path/to', } # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual( self.get_context_variable('message'), "The content with hash sha256:some-sha256 has been seen on " + "origin with type 'ftp'\n" + "at url '/some/url'. The revision was identified at " + "'revision-hash' on branch 'master'.\n" + "The file's path referenced was '/path/to'.") mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') class OriginView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_origin_ko_not_found(self, mock_api): # given mock_api.api_origin.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/origin/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') self.assertEqual(self.get_context_variable('origin_id'), 1) self.assertEqual( self.get_context_variable('message'), 'Not found!') mock_api.api_origin.assert_called_once_with(1) @patch('swh.web.ui.views.browse.api') @istest def browse_origin_ko_bad_input(self, mock_api): # given mock_api.api_origin.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/origin/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') self.assertEqual(self.get_context_variable('origin_id'), 426) mock_api.api_origin.assert_called_once_with(426) @patch('swh.web.ui.views.browse.api') @istest def browse_origin_found(self, mock_api): # given mock_origin = {'type': 'git', 'lister': None, 'project': None, 'url': 'rsync://some/url', 'id': 426} mock_api.api_origin.return_value = mock_origin # when rv = self.client.get('/browse/origin/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') self.assertEqual(self.get_context_variable('origin_id'), 426) self.assertEqual(self.get_context_variable('origin'), mock_origin) mock_api.api_origin.assert_called_once_with(426) class PersonView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_person_ko_not_found(self, mock_api): # given mock_api.api_person.side_effect = NotFoundExc('not found') # when rv = self.client.get('/browse/person/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 1) self.assertEqual( self.get_context_variable('message'), 'not found') mock_api.api_person.assert_called_once_with(1) @patch('swh.web.ui.views.browse.api') @istest def browse_person_ko_bad_input(self, mock_api): # given mock_api.api_person.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/person/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 426) mock_api.api_person.assert_called_once_with(426) @patch('swh.web.ui.views.browse.api') @istest def browse_person(self, mock_api): # given mock_person = {'type': 'git', 'lister': None, 'project': None, 'url': 'rsync://some/url', 'id': 426} mock_api.api_person.return_value = mock_person # when rv = self.client.get('/browse/person/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 426) self.assertEqual(self.get_context_variable('person'), mock_person) mock_api.api_person.assert_called_once_with(426) class ReleaseView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_release_ko_not_found(self, mock_api): # given mock_api.api_release.side_effect = NotFoundExc('not found!') # when rv = self.client.get('/browse/release/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual( self.get_context_variable('message'), 'not found!') mock_api.api_release.assert_called_once_with('1') @patch('swh.web.ui.views.browse.api') @istest def browse_release_ko_bad_input(self, mock_api): # given mock_api.api_release.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/release/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') mock_api.api_release.assert_called_once_with('426') @patch('swh.web.ui.views.browse.api') @istest def browse_release(self, mock_api): # given self.maxDiff = None mock_release = { "date": "Sun, 05 Jul 2015 18:02:06 GMT", "id": "1e951912027ea6873da6985b91e50c47f645ae1a", "target": "d770e558e21961ad6cfdf0ff7df0eb5d7d4f0754", "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' 'eb5d7d4f0754/', "synthetic": False, "target_type": "revision", "author": { "email": "torvalds@linux-foundation.org", "name": "Linus Torvalds" }, "message": "Linux 4.2-rc1\n", "name": "v4.2-rc1" } mock_api.api_release.return_value = mock_release expected_release = { "date": "Sun, 05 Jul 2015 18:02:06 GMT", "id": "1e951912027ea6873da6985b91e50c47f645ae1a", "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' 'eb5d7d4f0754/', "target": 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', "synthetic": False, "target_type": "revision", "author": { "email": "torvalds@linux-foundation.org", "name": "Linus Torvalds" }, "message": "Linux 4.2-rc1\n", "name": "v4.2-rc1" } # when rv = self.client.get('/browse/release/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual(self.get_context_variable('release'), expected_release) mock_api.api_release.assert_called_once_with('426') class RevisionView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_revision_ko_not_found(self, mock_api): # given mock_api.api_revision.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual( self.get_context_variable('message'), 'Not found!') self.assertIsNone(self.get_context_variable('revision')) mock_api.api_revision.assert_called_once_with('1') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_ko_bad_input(self, mock_api): # given mock_api.api_revision.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/revision/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual( self.get_context_variable('message'), 'wrong input!') self.assertIsNone(self.get_context_variable('revision')) mock_api.api_revision.assert_called_once_with('426') @patch('swh.web.ui.views.browse.api') @istest def browse_revision(self, mock_api): # given stub_revision = { 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'directory_url': '/api/1/directory/' '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', 'parent_url': [ '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], } mock_api.api_revision.return_value = stub_revision expected_revision = { 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'parent_url': [ '/browse/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], 'directory_url': '/browse/directory/2a1dbabeed4dcf1f4a4c441993b2f' 'fc9d972780b/', } # when rv = self.client.get('/browse/revision/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual(self.get_context_variable('revision'), expected_revision) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision.assert_called_once_with('426') + @patch('swh.web.ui.views.browse.api') + @istest + def browse_revision_raw_message(self, mock_api): + # given + sha1 = 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754' + + # when + rv = self.client.get('/browse/revision/' + 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754/raw/') + + self.assertRedirects( + rv, '/api/1/revision/%s/raw/' % sha1) + @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_ko_not_found(self, mock_api): # given mock_api.api_revision_log.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/sha1/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), 'sha1') self.assertEqual( self.get_context_variable('message'), 'Not found!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log.assert_called_once_with('sha1') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_ko_bad_input(self, mock_api): # given mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/revision/426/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual( self.get_context_variable('message'), 'wrong input!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log.assert_called_once_with('426') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log(self, mock_api): # given stub_revisions = [{ 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'directory_url': '/api/1/directory/' '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', 'parent_url': [ '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], }] mock_api.api_revision_log.return_value = stub_revisions # when rv = self.client.get('/browse/revision/426/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertTrue( isinstance(self.get_context_variable('revisions'), map)) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_log.assert_called_once_with('426') + @patch('swh.web.ui.views.browse.api') + @istest + def browse_revision_log_by_ko_not_found(self, mock_api): + # given + mock_api.api_revision_log_by.side_effect = NotFoundExc('Not found!') + + # when + rv = self.client.get('/browse/revision/origin/9/log/') + + # then + self.assertEqual(rv.status_code, 200) + self.assert_template_used('revision-log.html') + self.assertEqual(self.get_context_variable('origin_id'), 9) + self.assertEqual( + self.get_context_variable('message'), + 'Not found!') + self.assertEqual(self.get_context_variable('revisions'), []) + + mock_api.api_revision_log_by.assert_called_once_with( + 9, 'refs/heads/master', None) + + @patch('swh.web.ui.views.browse.api') + @istest + def browse_revision_log_by_ko_bad_input(self, mock_api): + # given + mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') + + # when + rv = self.client.get('/browse/revision/abcd/log/') + + # then + self.assertEqual(rv.status_code, 200) + self.assert_template_used('revision-log.html') + self.assertEqual(self.get_context_variable('sha1_git'), 'abcd') + self.assertEqual( + self.get_context_variable('message'), + 'wrong input!') + self.assertEqual(self.get_context_variable('revisions'), []) + + mock_api.api_revision_log.assert_called_once_with('abcd') + + @patch('swh.web.ui.views.browse.api') + @istest + def browse_revision_log_by(self, mock_api): + # given + stub_revisions = [{ + 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', + 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', + 'committer': { + 'email': 'torvalds@linux-foundation.org', + 'name': 'Linus Torvalds' + }, + 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', + 'type': 'git', + 'author': { + 'email': 'torvalds@linux-foundation.org', + 'name': 'Linus Torvalds' + }, + 'message': 'Linux 4.2-rc1\n', + 'synthetic': False, + 'directory_url': '/api/1/directory/' + '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', + 'parent_url': [ + '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' + ], + }] + mock_api.api_revision_log_by.return_value = stub_revisions + + # when + rv = self.client.get('/browse/revision/origin/2/log/') + + # then + self.assertEqual(rv.status_code, 200) + self.assert_template_used('revision-log.html') + self.assertEqual(self.get_context_variable('origin_id'), 2) + self.assertTrue( + isinstance(self.get_context_variable('revisions'), map)) + self.assertIsNone(self.get_context_variable('message')) + + mock_api.api_revision_log_by.assert_called_once_with( + 2, 'refs/heads/master', None) + @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_ko_not_found(self, mock_api): # given mock_api.api_revision_history.side_effect = NotFoundExc( 'Not found') # when rv = self.client.get('/browse/revision/1/history/2/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '1') self.assertEqual(self.get_context_variable('sha1_git'), '2') self.assertEqual( self.get_context_variable('message'), 'Not found') mock_api.api_revision_history.assert_called_once_with( '1', '2') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_ko_bad_input(self, mock_api): # given mock_api.api_revision_history.side_effect = BadInputExc( 'Input incorrect') # when rv = self.client.get('/browse/revision/321/history/654/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '321') self.assertEqual(self.get_context_variable('sha1_git'), '654') self.assertEqual( self.get_context_variable('message'), 'Input incorrect') mock_api.api_revision_history.assert_called_once_with( '321', '654') @istest def browse_revision_history_ok_same_sha1(self): # when rv = self.client.get('/browse/revision/10/history/10/') # then self.assertEqual(rv.status_code, 302) @patch('swh.web.ui.views.browse.utils') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history(self, mock_api, mock_utils): # given stub_revision = {'id': 'some-rev'} mock_api.api_revision_history.return_value = stub_revision expected_revision = { 'id': 'some-rev-id', 'author': {'name': 'foo', 'email': 'bar'}, 'committer': {'name': 'foo', 'email': 'bar'} } mock_utils.prepare_data_for_view.return_value = expected_revision # when rv = self.client.get('/browse/revision/426/history/789/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '426') self.assertEqual(self.get_context_variable('sha1_git'), '789') self.assertEqual(self.get_context_variable('revision'), expected_revision) mock_api.api_revision_history.assert_called_once_with( '426', '789') mock_utils.prepare_data_for_view.assert_called_once_with(stub_revision) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_ko_not_found(self, mock_api): # given mock_api.api_revision_directory.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/1/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual(self.get_context_variable('path'), '.') self.assertIsNone(self.get_context_variable('result')) self.assertEqual( self.get_context_variable('message'), "Not found!") mock_api.api_revision_directory.assert_called_once_with( '1', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_ko_bad_input(self, mock_api): # given mock_api.api_revision_directory.side_effect = BadInputExc('Bad input!') # when rv = self.client.get('/browse/revision/10/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '10') self.assertEqual(self.get_context_variable('path'), '.') self.assertIsNone(self.get_context_variable('result')) self.assertEqual( self.get_context_variable('message'), "Bad input!") mock_api.api_revision_directory.assert_called_once_with( '10', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory(self, mock_api): # given stub_result0 = { 'type': 'dir', 'revision': '100', 'content': [ { 'id': 'some-result', 'type': 'file', 'name': 'blah', }, { 'id': 'some-other-result', 'type': 'dir', 'name': 'foo', } ] } mock_api.api_revision_directory.return_value = stub_result0 stub_result1 = { 'type': 'dir', 'revision': '100', 'content': [ { 'id': 'some-result', 'type': 'file', 'name': 'blah', }, { 'id': 'some-other-result', 'type': 'dir', 'name': 'foo', } ] } # when rv = self.client.get('/browse/revision/100/directory/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '100') self.assertEqual(self.get_context_variable('revision'), '100') self.assertEqual(self.get_context_variable('path'), 'some/path') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('result'), stub_result1) mock_api.api_revision_directory.assert_called_once_with( '100', 'some/path', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory_ko_not_found(self, mock_api): # given mock_api.api_revision_history_directory.side_effect = NotFoundExc( 'not found') # when rv = self.client.get('/browse/revision/123/history/456/directory/a/b/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '123') self.assertEqual(self.get_context_variable('sha1_git'), '456') self.assertEqual(self.get_context_variable('path'), 'a/b') self.assertEqual(self.get_context_variable('message'), 'not found') self.assertIsNone(self.get_context_variable('result')) mock_api.api_revision_history_directory.assert_called_once_with( '123', '456', 'a/b', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory_ko_bad_input(self, mock_api): # given mock_api.api_revision_history_directory.side_effect = BadInputExc( 'bad input') # when rv = self.client.get('/browse/revision/123/history/456/directory/a/c/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '123') self.assertEqual(self.get_context_variable('sha1_git'), '456') self.assertEqual(self.get_context_variable('path'), 'a/c') self.assertEqual(self.get_context_variable('message'), 'bad input') self.assertIsNone(self.get_context_variable('result')) mock_api.api_revision_history_directory.assert_called_once_with( '123', '456', 'a/c', with_data=True) @patch('swh.web.ui.views.browse.service') @istest def browse_revision_history_directory_ok_no_trailing_slash_so_redirect( self, mock_service): # when rv = self.client.get('/browse/revision/1/history/2/directory/path/to') # then self.assertEqual(rv.status_code, 301) @patch('swh.web.ui.views.browse.service') @istest def browse_revision_history_directory_ok_same_sha1_redirects( self, mock_service): # when rv = self.client.get('/browse/revision/1/history/1/directory/path/to') # then self.assertEqual(rv.status_code, 301) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory(self, mock_api): # given stub_result0 = { 'type': 'dir', 'revision': '1000', 'content': [{ 'id': 'some-result', 'type': 'file', 'name': 'blah' }] } mock_api.api_revision_history_directory.return_value = stub_result0 stub_result1 = { 'type': 'dir', 'revision': '1000', 'content': [{ 'id': 'some-result', 'type': 'file', 'name': 'blah' }] } # when rv = self.client.get('/browse/revision/100/history/999/directory/' 'path/to/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '100') self.assertEqual(self.get_context_variable('sha1_git'), '999') self.assertEqual(self.get_context_variable('revision'), '1000') self.assertEqual(self.get_context_variable('path'), 'path/to') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('result'), stub_result1) mock_api.api_revision_history_directory.assert_called_once_with( '100', '999', 'path/to', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_bad_input(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = BadInputExc( 'Problem input.') # noqa # when rv = self.client.get('/browse/revision/origin/99' '/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Problem input.') mock_api.api_revision_history_through_origin.assert_called_once_with( 99, 'refs/heads/master', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_not_found(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = NotFoundExc( 'Not found.') # when rv = self.client.get('/browse/revision/origin/999/' 'branch/dev/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Not found.') mock_api.api_revision_history_through_origin.assert_called_once_with( 999, 'dev', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_other_error(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = ValueError( 'Other Error.') # when rv = self.client.get('/browse/revision/origin/438' '/branch/scratch' '/ts/2016' '/history/789/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Other Error.') mock_api.api_revision_history_through_origin.assert_called_once_with( 438, 'scratch', '2016', '789') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin(self, mock_api): # given stub_rev = { 'id': 'some-id', 'author': {}, 'committer': {} } mock_api.api_revision_history_through_origin.return_value = stub_rev # when rv = self.client.get('/browse/revision/origin/99/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('revision'), stub_rev) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_history_through_origin.assert_called_once_with( 99, 'refs/heads/master', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_not_found(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = NotFoundExc( 'Not found') # when rv = self.client.get('/browse/revision/origin/1/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Not found') mock_api.api_revision_with_origin.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_bad_input(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = BadInputExc( 'Bad Input') # when rv = self.client.get('/browse/revision/origin/1000/branch/dev/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Bad Input') mock_api.api_revision_with_origin.assert_called_once_with( 1000, 'dev', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_other(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = ValueError( 'Other') # when rv = self.client.get('/browse/revision/origin/1999' '/branch/scratch/master' '/ts/1990-01-10/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Other') mock_api.api_revision_with_origin.assert_called_once_with( 1999, 'scratch/master', '1990-01-10') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin(self, mock_api): # given stub_rev = {'id': 'some-id', 'author': {}, 'committer': {}} mock_api.api_revision_with_origin.return_value = stub_rev # when rv = self.client.get('/browse/revision/origin/1/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('revision'), stub_rev) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_with_origin.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_not_found(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa 'this is not the robot you are looking for') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'this is not the robot you are looking for') mock_api.api_directory_through_revision_origin.assert_called_once_with( # noqa 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_bad_input(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa 'Bad Robot') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Bad Robot') mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_other(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = ValueError( # noqa 'Other bad stuff') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Other bad stuff') mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin(self, mock_api): # given stub_res = {'id': 'some-id', 'revision': 'some-rev-id', 'type': 'dir', 'content': 'some-content'} mock_api.api_directory_through_revision_origin.return_value = stub_res # when rv = self.client.get('/browse/revision/origin/2' '/branch/dev' '/ts/2013-20-20 10:02' '/directory/some/file/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('result'), stub_res) self.assertIsNone(self.get_context_variable('message')) mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'dev', '2013-20-20 10:02', 'some/file', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history_ko_not_found( self, mock_api): mock_api.api_directory_through_revision_with_origin_history.side_effect = NotFoundExc( # noqa 'Not found!') # when rv = self.client.get('/browse/revision/origin/987' '/history/sha1git' '/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Not found!') self.assertEqual(self.get_context_variable('path'), '.') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 987, 'refs/heads/master', None, 'sha1git', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history_ko_bad_input( self, mock_api): mock_api.api_directory_through_revision_with_origin_history.side_effect = BadInputExc( # noqa 'Bad input! Bleh!') # when rv = self.client.get('/browse/revision/origin/798' '/branch/refs/heads/dev' '/ts/2012-11-11' '/history/1234' '/directory/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Bad input! Bleh!') self.assertEqual(self.get_context_variable('path'), 'some/path') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 798, 'refs/heads/dev', '2012-11-11', '1234', 'some/path', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history( self, mock_api): stub_dir = {'type': 'dir', 'content': [], 'revision': 'specific-rev-id'} mock_api.api_directory_through_revision_with_origin_history.return_value = stub_dir # noqa # when rv = self.client.get('/browse/revision/origin/101010' '/ts/1955-11-12' '/history/54628' '/directory/emacs-24.5/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('result'), stub_dir) self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('path'), 'emacs-24.5') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 101010, 'refs/heads/master', '1955-11-12', '54628', 'emacs-24.5', with_data=True) class EntityView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_entity_ko_not_found(self, mock_api): # given mock_api.api_entity_by_uuid.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/entity/' '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), []) self.assertEqual(self.get_context_variable('message'), 'Not found!') mock_api.api_entity_by_uuid.assert_called_once_with( '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') @patch('swh.web.ui.views.browse.api') @istest def browse_entity_ko_bad_input(self, mock_api): # given mock_api.api_entity_by_uuid.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/entity/blah-blah-uuid/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), []) self.assertEqual(self.get_context_variable('message'), 'wrong input!') mock_api.api_entity_by_uuid.assert_called_once_with( 'blah-blah-uuid') @patch('swh.web.ui.views.browse.api') @istest def browse_entity(self, mock_api): # given stub_entities = [ {'id': '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba'}] mock_api.api_entity_by_uuid.return_value = stub_entities # when rv = self.client.get('/browse/entity/' '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), stub_entities) self.assertIsNone(self.get_context_variable('message')) mock_api.api_entity_by_uuid.assert_called_once_with( '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba') diff --git a/swh/web/ui/utils.py b/swh/web/ui/utils.py index 65688a8d..fa6321e1 100644 --- a/swh/web/ui/utils.py +++ b/swh/web/ui/utils.py @@ -1,245 +1,255 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import flask import re from dateutil import parser def filter_endpoints(url_map, prefix_url_rule, blacklist=[]): """Filter endpoints by prefix url rule. Args: - url_map: Url Werkzeug.Map of rules - prefix_url_rule: prefix url string - blacklist: blacklist of some url Returns: Dictionary of url_rule with values methods and endpoint. The key is the url, the associated value is a dictionary of 'methods' (possible http methods) and 'endpoint' (python function) """ out = {} for r in url_map: rule = r['rule'] if rule == prefix_url_rule or rule in blacklist: continue if rule.startswith(prefix_url_rule): out[rule] = {'methods': sorted(map(str, r['methods'])), 'endpoint': r['endpoint']} return out def fmap(f, data): """Map f to data. Keep the initial data structure as original but map function f to each level. Args: f: function that expects one argument. data: data to traverse to apply the f function. list, map, dict or bare value. Returns: The same data-structure with modified values by the f function. """ if isinstance(data, map): return (fmap(f, x) for x in data) if isinstance(data, list): return [fmap(f, x) for x in data] if isinstance(data, dict): return {k: fmap(f, v) for (k, v) in data.items()} return f(data) def prepare_data_for_view(data, encoding='utf-8'): def prepare_data(s): # Note: can only be 'data' key with bytes of raw content if isinstance(s, bytes): try: return s.decode(encoding) except: return "Cannot decode the data bytes, try and set another " \ "encoding in the url (e.g. ?encoding=utf8) or " \ "download directly the " \ "content's raw data." if isinstance(s, str): return re.sub(r'/api/1/', r'/browse/', s) return s return fmap(prepare_data, data) def filter_field_keys(data, field_keys): """Given an object instance (directory or list), and a csv field keys to filter on. Return the object instance with filtered keys. Note: Returns obj as is if it's an instance of types not in (dictionary, list) Args: - data: one object (dictionary, list...) to filter. - field_keys: csv or set of keys to filter the object on Returns: obj filtered on field_keys """ if isinstance(data, map): return (filter_field_keys(x, field_keys) for x in data) if isinstance(data, list): return [filter_field_keys(x, field_keys) for x in data] if isinstance(data, dict): return {k: v for (k, v) in data.items() if k in field_keys} return data def person_to_string(person): """Map a person (person, committer, tagger, etc...) to a string. """ return ''.join([person['name'], ' <', person['email'], '>']) def parse_timestamp(timestamp): """Given a time or timestamp (as string), parse the result as datetime. Returns: - datetime result of parsing values. + a timezone-aware datetime representing the parsed value. If the parsed + value doesn't specify a timezone, UTC is assumed. Samples: - 2016-01-12 - 2016-01-12T09:19:12+0100 - Today is January 1, 2047 at 8:21:00AM - 1452591542 """ + default_timestamp = datetime.datetime.utcfromtimestamp(0).replace( + tzinfo=datetime.timezone.utc) try: - res = parser.parse(timestamp, ignoretz=False, fuzzy=True) + res = parser.parse(timestamp, ignoretz=False, fuzzy=True, + default=default_timestamp) except: - res = datetime.datetime.fromtimestamp(float(timestamp)) + res = datetime.datetime.utcfromtimestamp(float(timestamp)).replace( + tzinfo=datetime.timezone.utc) return res def enrich_release(release): """Enrich a release with link to the 'target' of 'type' revision. """ if 'target' in release and 'target_type' in release: if release['target_type'] == 'revision': release['target_url'] = flask.url_for('api_revision', sha1_git=release['target']) elif release['target_type'] == 'release': release['target_url'] = flask.url_for('api_release', sha1_git=release['target']) elif release['target_type'] == 'content': release['target_url'] = flask.url_for( 'api_content_metadata', q='sha1_git:' + release['target']) elif release['target_type'] == 'directory': release['target_url'] = flask.url_for('api_directory', q=release['target']) return release def enrich_directory(directory, context_url=None): """Enrich directory with url to content or directory. """ if 'type' in directory: target_type = directory['type'] target = directory['target'] if target_type == 'file': directory['target_url'] = flask.url_for('api_content_metadata', q='sha1_git:%s' % target) if context_url: directory['file_url'] = context_url + directory['name'] + '/' else: directory['target_url'] = flask.url_for('api_directory', sha1_git=target) if context_url: directory['dir_url'] = context_url + directory['name'] + '/' return directory def enrich_content(content): """Enrich content with 'data', a link to its raw content. """ if 'sha1' in content: content['data_url'] = flask.url_for('api_content_raw', q=content['sha1']) return content def enrich_entity(entity): """Enrich entity with """ if 'uuid' in entity: entity['uuid_url'] = flask.url_for('api_entity_by_uuid', uuid=entity['uuid']) if 'parent' in entity and entity['parent']: entity['parent_url'] = flask.url_for('api_entity_by_uuid', uuid=entity['parent']) return entity def enrich_revision(revision, context=None): """Enrich revision with links where it makes sense (directory, parents). """ if not context: context = revision['id'] revision['url'] = flask.url_for('api_revision', sha1_git=revision['id']) revision['history_url'] = flask.url_for('api_revision_log', sha1_git=revision['id']) if 'author' in revision: author = revision['author'] revision['author_url'] = flask.url_for('api_person', person_id=author['id']) if 'committer' in revision: committer = revision['committer'] revision['committer_url'] = flask.url_for('api_person', person_id=committer['id']) if 'directory' in revision: revision['directory_url'] = flask.url_for( 'api_directory', sha1_git=revision['directory']) if 'parents' in revision: parents = [] for parent in revision['parents']: parents.append(flask.url_for('api_revision_history', sha1_git_root=context, sha1_git=parent)) revision['parent_urls'] = parents if 'children' in revision: children = [] for child in revision['children']: children.append(flask.url_for('api_revision_history', sha1_git_root=context, sha1_git=child)) revision['children_urls'] = children + if 'message_decoding_failed' in revision: + revision['message_url'] = flask.url_for( + 'api_revision_raw_message', + sha1_git=revision['id']) + return revision diff --git a/swh/web/ui/views/api.py b/swh/web/ui/views/api.py index 04cce02b..2975322e 100644 --- a/swh/web/ui/views/api.py +++ b/swh/web/ui/views/api.py @@ -1,826 +1,943 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from types import GeneratorType from flask import request, url_for, Response, redirect from swh.web.ui import service, utils from swh.web.ui.exc import BadInputExc, NotFoundExc from swh.web.ui.main import app @app.route('/api/1/stat/counters/') def api_stats(): """Return statistics on SWH storage. Returns: SWH storage's statistics. """ return service.stat_counters() -@app.route('/api/1/search/') +@app.route('/api/1/search/', methods=['POST']) @app.route('/api/1/search//') -def api_search(q): +def api_search(q=None): """Search a content per hash. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256). Returns: Dictionary with 'found' key and the associated result. Raises: BadInputExc in case of unknown algo_hash or bad hash. Example: GET /api/1/search/sha1:bd819b5b28fcde3bf114d16a44ac46250da94ee5/ """ - r = service.lookup_hash(q).get('found') - return {'found': True if r else False} + + response = {'search_res': None, + 'search_stats': None} + search_stats = {'nbfiles': 0, 'pct': 0} + search_res = None + + # Single hash request route + if q: + r = service.search_hash(q) + search_res = [{'filename': None, + 'sha1': q, + 'found': r['found']}] + search_stats['nbfiles'] = 1 + search_stats['pct'] = 100 if r['found'] else 0 + + # Post form submission with many hash requests + elif request.method == 'POST': + data = request.form + queries = [] + # Remove potential inputs with no associated value + for k, v in data.items(): + if v is not None: + if k == 'q' and len(v) > 0: + queries.append({'filename': None, 'sha1': v}) + elif v != '': + queries.append({'filename': k, 'sha1': v}) + + if len(queries) > 0: + lookup = service.lookup_multiple_hashes(queries) + result = [] + for el in lookup: + result.append({'filename': el['filename'], + 'sha1': el['sha1'], + 'found': el['found']}) + search_res = result + nbfound = len([x for x in lookup if x['found']]) + search_stats['nbfiles'] = len(queries) + search_stats['pct'] = (nbfound / len(queries))*100 + + response['search_res'] = search_res + response['search_stats'] = search_stats + return response def _api_lookup(criteria, lookup_fn, error_msg_if_not_found, enrich_fn=lambda x: x, *args): """Capture a redundant behavior of: - looking up the backend with a criteria (be it an identifier or checksum) passed to the function lookup_fn - if nothing is found, raise an NotFoundExc exception with error message error_msg_if_not_found. - Otherwise if something is returned: - either as list, map or generator, map the enrich_fn function to it and return the resulting data structure as list. - either as dict and pass to enrich_fn and return the dict enriched. Args: - criteria: discriminating criteria to lookup - lookup_fn: function expects one criteria and optional supplementary *args. - error_msg_if_not_found: if nothing matching the criteria is found, raise NotFoundExc with this error message. - enrich_fn: Function to use to enrich the result returned by lookup_fn. Default to the identity function if not provided. - *args: supplementary arguments to pass to lookup_fn. Raises: NotFoundExp or whatever `lookup_fn` raises. """ res = lookup_fn(criteria, *args) if not res: raise NotFoundExc(error_msg_if_not_found) if isinstance(res, (map, list, GeneratorType)): enriched_data = [] for e in res: enriched_data.append(enrich_fn(e)) return enriched_data return enrich_fn(res) @app.route('/api/1/origin/') @app.route('/api/1/origin//') def api_origin(origin_id): """Return information about origin with id origin_id. Args: origin_id: the origin's identifier. Returns: Information on the origin if found. Raises: NotFoundExc if the origin is not found. Example: GET /api/1/origin/1/ """ return _api_lookup( origin_id, lookup_fn=service.lookup_origin, error_msg_if_not_found='Origin with id %s not found.' % origin_id) @app.route('/api/1/person/') @app.route('/api/1/person//') def api_person(person_id): """Return information about person with identifier person_id. Args: person_id: the person's identifier. Returns: Information on the person if found. Raises: NotFoundExc if the person is not found. Example: GET /api/1/person/1/ """ return _api_lookup( person_id, lookup_fn=service.lookup_person, error_msg_if_not_found='Person with id %s not found.' % person_id) @app.route('/api/1/release/') @app.route('/api/1/release//') def api_release(sha1_git): """Return information about release with id sha1_git. Args: sha1_git: the release's hash. Returns: Information on the release if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the release is not found. Example: GET /api/1/release/b307094f00c3641b0c9da808d894f3a325371414 """ error_msg = 'Release with sha1_git %s not found.' % sha1_git return _api_lookup( sha1_git, lookup_fn=service.lookup_release, error_msg_if_not_found=error_msg, enrich_fn=utils.enrich_release) def _revision_directory_by(revision, path, request_path, limit=100, with_data=False): """Compute the revision matching criterion's directory or content data. Args: revision: dictionary of criterions representing a revision to lookup path: directory's path to lookup request_path: request path which holds the original context to limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of with_data: indicate to retrieve the content's raw data if path resolves to a content. """ def enrich_directory_local(dir, context_url=request_path): return utils.enrich_directory(dir, context_url) rev_id, result = service.lookup_directory_through_revision( revision, path, limit=limit, with_data=with_data) content = result['content'] if result['type'] == 'dir': # dir_entries result['content'] = list(map(enrich_directory_local, content)) else: # content result['content'] = utils.enrich_content(content) return result @app.route('/api/1/revision' '/origin/' '/directory/') @app.route('/api/1/revision' '/origin/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/directory//') def api_directory_through_revision_origin(origin_id, branch_name="refs/heads/master", ts=None, path=None, with_data=False): """Display directory or content information through a revision identified by origin/branch/timestamp. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). path: Path to directory or file to display. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: Information on the directory or content pointed to by such revision. Raises: NotFoundExc if the revision is not found or the path pointed to is not found. """ if ts: ts = utils.parse_timestamp(ts) return _revision_directory_by( { 'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts }, path, request.path, with_data=with_data) @app.route('/api/1/revision' '/origin/' '/history//') @app.route('/api/1/revision' '/origin/' '/branch/' '/history//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/history//') def api_revision_history_through_origin(origin_id, branch_name="refs/heads/master", ts=None, sha1_git=None): """ Return information about revision sha1_git, limited to the sub-graph of all transitive parents of the revision root identified by (origin_id, branch_name, ts). Given sha1_git_root such root revision's identifier, in other words, sha1_git is an ancestor of sha1_git_root. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ limit = int(request.args.get('limit', '100')) if ts: ts = utils.parse_timestamp(ts) rev_root, revision = service.lookup_revision_with_context_by( origin_id, branch_name, ts, sha1_git, limit) if not revision: raise NotFoundExc( "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s' " "sha1_git_root being the revision's identifier pointed to by " "(origin_id: %s, branch_name: %s, ts: %s)." % (sha1_git, rev_root['id'], origin_id, branch_name, ts)) return utils.enrich_revision(revision, context=rev_root['id']) @app.route('/api/1/revision' '/origin/' '/history/' '/directory/') @app.route('/api/1/revision' '/origin/' '/history/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/history/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/history/' '/directory//') @app.route('/api/1/revision' '/origin/' '/ts/' '/history/' '/directory/') @app.route('/api/1/revision' '/origin/' '/ts/' '/history/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory//') def api_directory_through_revision_with_origin_history( origin_id, branch_name="refs/heads/master", ts=None, sha1_git=None, path=None, with_data=False): """Return information about directory or content pointed to by the revision defined as: revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root (being the identified sha1 by looking up origin_id/branch_name/ts) Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). sha1_git: one of sha1_git_root's ancestors. path: optional directory or content pointed to by that revision. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root or the path referenced does not exist. """ limit = int(request.args.get('limit', '100')) if ts: ts = utils.parse_timestamp(ts) return _revision_directory_by( { 'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts, 'sha1_git': sha1_git }, path, request.path, limit=limit, with_data=with_data) @app.route('/api/1/revision' '/origin/') @app.route('/api/1/revision' '/origin//') @app.route('/api/1/revision' '/origin/' '/branch//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts//') @app.route('/api/1/revision' '/origin/' '/ts//') def api_revision_with_origin(origin_id, branch_name="refs/heads/master", ts=None): """Instead of having to specify a (root) revision by SHA1_GIT, users might want to specify a place and a time. In SWH a "place" is an origin; a "time" is a timestamp at which some place has been observed by SWH crawlers. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). Returns: Information on the revision if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the revision is not found. """ if ts: ts = utils.parse_timestamp(ts) return _api_lookup( origin_id, service.lookup_revision_by, 'Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts), utils.enrich_revision, branch_name, ts) @app.route('/api/1/revision/') @app.route('/api/1/revision//') def api_revision(sha1_git): """Return information about revision with id sha1_git. Args: sha1_git: the revision's hash. Returns: Information on the revision if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the revision is not found. Example: GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e """ return _api_lookup( sha1_git, lookup_fn=service.lookup_revision, error_msg_if_not_found='Revision with sha1_git %s not' ' found.' % sha1_git, enrich_fn=utils.enrich_revision) +@app.route('/api/1/revision//raw/') +def api_revision_raw_message(sha1_git): + """Return the raw data of the revision's message + + Args: + sha1_git: the revision's hash + + Returns: + The raw revision message, possibly in an illegible + format for humans, decoded in utf-8 by default. + + Raises: + BadInputExc in case of unknown algo_hash or bad hash. + NotFoundExc if the revision is not found or the revision has no + message + + Example: + GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e/raw/ + + """ + raw = service.lookup_revision_message(sha1_git) + return Response(raw['message'], + headers={'Content-disposition': 'attachment;' + 'filename=rev_%s_raw' % sha1_git}, + mimetype='application/octet-stream') + + @app.route('/api/1/revision//directory/') @app.route('/api/1/revision//directory//') def api_revision_directory(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: indicate to retrieve the content's raw data if path resolves to a content Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist Example: GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e/directory/ """ return _revision_directory_by( { 'sha1_git': sha1_git }, dir_path, request.path, with_data=with_data) @app.route('/api/1/revision//history//') def api_revision_history(sha1_git_root, sha1_git): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ limit = int(request.args.get('limit', '100')) if sha1_git == sha1_git_root: return redirect(url_for('api_revision', sha1_git=sha1_git, limit=limit)) revision = service.lookup_revision_with_context(sha1_git_root, sha1_git, limit) if not revision: raise NotFoundExc( "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" % (sha1_git, sha1_git_root)) return utils.enrich_revision(revision, context=sha1_git_root) @app.route('/api/1/revision/' '/history/' '/directory/') @app.route('/api/1/revision/' '/history/' '/directory//') def api_revision_history_directory(sha1_git_root, sha1_git, dir_path=None, with_data=False): """Return information about directory pointed to by the revision defined as: revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. dir_path: optional directory pointed to by that revision. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root or the path referenced does not exist """ limit = int(request.args.get('limit', '100')) if sha1_git == sha1_git_root: return redirect(url_for('api_revision_directory', sha1_git=sha1_git, dir_path=dir_path), code=301) return _revision_directory_by( { 'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git }, dir_path, request.path, limit=limit, with_data=with_data) @app.route('/api/1/revision//log/') def api_revision_log(sha1_git): """Show all revisions (~git log) starting from sha1_git. The first element returned is the given sha1_git. Args: sha1_git: the revision's hash. limit: optional query parameter to limit the revisions log (default to 100). Returns: Information on the revision if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the revision is not found. """ limit = int(request.args.get('limit', '100')) def lookup_revision_log_with_limit(s, limit=limit): return service.lookup_revision_log(s, limit) error_msg = 'Revision with sha1_git %s not found.' % sha1_git return _api_lookup(sha1_git, lookup_fn=lookup_revision_log_with_limit, error_msg_if_not_found=error_msg, enrich_fn=utils.enrich_revision) +@app.route('/api/1/revision' + '/origin/log/') +@app.route('/api/1/revision' + '/origin//log/') +@app.route('/api/1/revision' + '/origin/' + '/branch//log/') +@app.route('/api/1/revision' + '/origin/' + '/branch/' + '/ts//log/') +@app.route('/api/1/revision' + '/origin/' + '/ts//log/') +def api_revision_log_by(origin_id, + branch_name='refs/heads/master', + ts=None): + """Show all revisions (~git log) starting from the revision + described by its origin_id, optional branch name and timestamp. + The first element returned is the described revision. + + Args: + origin_id: the revision's origin. + branch_name: the branch of the revision (optional, defaults to + master + ts: the requested timeframe near which the revision was created. + limit: optional query parameter to limit the revisions log + (default to 100). + + Returns: + Information on the revision log if found. + + Raises: + NotFoundExc if the revision is not found. + """ + if ts: + ts = utils.parse_timestamp(ts) + + error_msg = 'No revision matching origin %s ' % origin_id + error_msg += ', branch name %s' % branch_name + error_msg += (' and time stamp %s.' % ts) if ts else '.' + return _api_lookup( + origin_id, + service.lookup_revision_log_by, + error_msg, + utils.enrich_revision, + branch_name, + ts) + + @app.route('/api/1/directory/') @app.route('/api/1/directory//') @app.route('/api/1/directory///') def api_directory(sha1_git, path=None): """Return information about release with id sha1_git. Args: sha1_git: Directory's sha1_git. If path exists: starting directory for relative navigation. path: The path to the queried directory Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the content is not found. Example: GET /api/1/directory/8d7dc91d18546a91564606c3e3695a5ab568d179 GET /api/1/directory/8d7dc91d18546a91564606c3e3695a5ab568d179/path/dir/ """ if path: error_msg_path = ('Entry with path %s relative to directory ' 'with sha1_git %s not found.') % (path, sha1_git) return _api_lookup( sha1_git, service.lookup_directory_with_path, error_msg_path, utils.enrich_directory, path) else: error_msg_nopath = 'Directory with sha1_git %s not found.' % sha1_git return _api_lookup( sha1_git, service.lookup_directory, error_msg_nopath, utils.enrich_directory) # @app.route('/api/1/browse/') # @app.route('/api/1/browse//') def api_content_checksum_to_origin(q): """Return content information up to one of its origin if the content is found. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256). Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the content is not found. Example: GET /api/1/browse/sha1_git:88b9b366facda0b5ff8d8640ee9279bed346f242 """ found = service.lookup_hash(q)['found'] if not found: raise NotFoundExc('Content with %s not found.' % q) return service.lookup_hash_origin(q) @app.route('/api/1/content//raw/') def api_content_raw(q): """Return content's raw data if content is found. Args: q is of the form (algo_hash:)hash with algo_hash in (sha1, sha1_git, sha256). When algo_hash is not provided, 'hash' is considered sha1. Returns: Content's raw data in application/octet-stream. Raises: - BadInputExc in case of unknown algo_hash or bad hash - NotFoundExc if the content is not found. """ def generate(content): yield content['data'] content = service.lookup_content_raw(q) if not content: raise NotFoundExc('Content with %s not found.' % q) return Response(generate(content), mimetype='application/octet-stream') @app.route('/api/1/content/') @app.route('/api/1/content//') def api_content_metadata(q): """Return content information if content is found. Args: q is of the form (algo_hash:)hash with algo_hash in (sha1, sha1_git, sha256). When algo_hash is not provided, 'hash' is considered sha1. Returns: Content's information. Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the content is not found. Example: GET /api/1/content/sha256:e2c76e40866bb6b28916387bdfc8649beceb 523015738ec6d4d540c7fe65232b """ return _api_lookup( q, lookup_fn=service.lookup_content, error_msg_if_not_found='Content with %s not found.' % q, enrich_fn=utils.enrich_content) @app.route('/api/1/entity/') @app.route('/api/1/entity//') def api_entity_by_uuid(uuid): """Return content information if content is found. Args: q is of the form (algo_hash:)hash with algo_hash in (sha1, sha1_git, sha256). When algo_hash is not provided, 'hash' is considered sha1. Returns: Content's information. Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the content is not found. Example: - GET /api/1/entity/5f4d4c51-498a-4e28-88b3-b3e4e8396cba/ - GET /api/1/entity/7c33636b-8f11-4bda-89d9-ba8b76a42cec/ """ return _api_lookup( uuid, lookup_fn=service.lookup_entity_by_uuid, error_msg_if_not_found="Entity with uuid '%s' not found." % uuid, enrich_fn=utils.enrich_entity) @app.route('/api/1/uploadnsearch/', methods=['POST']) def api_uploadnsearch(): """Upload the file's content in the post body request. Compute its hash and determine if it exists in the storage. Args: request.files filled with the filename's data to upload. Returns: Dictionary with 'sha1', 'filename' and 'found' predicate depending on whether we find it or not. Raises: BadInputExc in case of the form submitted is incorrect. """ file = request.files.get('filename') if not file: raise BadInputExc("Bad request, missing 'filename' entry in form.") return service.upload_and_search(file) diff --git a/swh/web/ui/views/browse.py b/swh/web/ui/views/browse.py index 3e5a5b10..eef62206 100644 --- a/swh/web/ui/views/browse.py +++ b/swh/web/ui/views/browse.py @@ -1,752 +1,793 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from encodings.aliases import aliases from flask import render_template, request, url_for, redirect from flask.ext.api.decorators import set_renderers from flask.ext.api.renderers import HTMLRenderer from swh.core.hashutil import ALGORITHMS from .. import service, utils from ..exc import BadInputExc, NotFoundExc from ..main import app from . import api hash_filter_keys = ALGORITHMS @app.route('/search/', methods=['GET', 'POST']) @set_renderers(HTMLRenderer) def search(): """Search for hashes in swh-storage. One form to submit either: - hash query to look up in swh storage - file hashes calculated client-side to be queried in swh storage - both Returns: dict representing data to look for in swh storage. The following keys are returned: - search_stats: {'nbfiles': X, 'pct': Y} the number of total queried files and percentage of files not in storage respectively - responses: array of {'filename': X, 'sha1': Y, 'found': Z} - messages: General messages. TODO: Batch-process with all checksums, not just sha1 """ - env = {'q': None, + + env = {'search_res': None, 'search_stats': None, - 'responses': None, - 'messages': []} + 'message': []} - search_stats = None - responses = [] - messages = [] + search_stats = {'nbfiles': 0, 'pct': 0} + search_res = None + message = '' # Get with a single hash request if request.method == 'GET': data = request.args q = data.get('q') - env['q'] = q if q: try: - search_stats = {'nbfiles': 0, 'pct': 0} - r = service.lookup_hash(q) - responses.append({'filename': 'User submitted hash', - 'sha1': q, - 'found': r.get('found') is not None}) - search_stats['nbfiles'] = 1 - search_stats['pct'] = 100 if r.get('found') is not None else 0 + search = api.api_search(q) + search_res = search['search_res'] + search_stats = search['search_stats'] except BadInputExc as e: - messages.append(str(e)) + message = str(e) - # POST form submission with many hash requests + # Post form submission with many hash requests elif request.method == 'POST': - data = request.form - search_stats = {'nbfiles': 0, 'pct': 0} - queries = [] - # Remove potential inputs with no associated value - for k, v in data.items(): - if v is not None and v != '': - queries.append({'filename': k, 'sha1': v}) - - if len(queries) > 0: - try: - lookup = service.lookup_multiple_hashes(queries) - nbfound = len([x for x in lookup if x['found']]) - responses = lookup - search_stats['nbfiles'] = len(queries) - search_stats['pct'] = (nbfound / len(queries))*100 - except BadInputExc as e: - messages.append(str(e)) + try: + search = api.api_search(None) + search_res = search['search_res'] + search_stats = search['search_stats'] + except BadInputExc as e: + message = str(e) env['search_stats'] = search_stats - env['responses'] = responses - env['messages'] = messages + env['search_res'] = search_res + env['message'] = message return render_template('upload_and_search.html', **env) @app.route('/browse/content/') @app.route('/browse/content//') @set_renderers(HTMLRenderer) def browse_content(q): """Given a hash and a checksum, display the content's meta-data. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256) Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if the content is not found. """ env = {'q': q, 'message': None, 'content': None} encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('content.html', **env) try: content = api.api_content_metadata(q) content_raw = service.lookup_content_raw(q) if content_raw: content['data'] = content_raw['data'] env['content'] = utils.prepare_data_for_view(content, encoding=encoding) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('content.html', **env) @app.route('/browse/content//raw/') def browse_content_raw(q): """Given a hash and a checksum, display the content's raw data. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256) Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if the content is not found. """ return redirect(url_for('api_content_raw', q=q)) def _origin_seen(q, data): """Given an origin, compute a message string with the right information. Args: origin: a dictionary with keys: - origin: a dictionary with type and url keys - occurrence: a dictionary with a validity range Returns: Message as a string """ origin_type = data['origin_type'] origin_url = data['origin_url'] revision = data['revision'] branch = data['branch'] path = data['path'] return """The content with hash %s has been seen on origin with type '%s' at url '%s'. The revision was identified at '%s' on branch '%s'. The file's path referenced was '%s'.""" % (q, origin_type, origin_url, revision, branch, path) # @app.route('/browse/content//origin/') @set_renderers(HTMLRenderer) def browse_content_with_origin(q): """Show content information. Args: - q: query string of the form with `algo_hash` in sha1, sha1_git, sha256. This means that several different URLs (at least one per HASH_ALGO) will point to the same content sha: the sha with 'hash' format Returns: The content's information at for a given checksum. """ env = {'q': q} try: origin = api.api_content_checksum_to_origin(q) message = _origin_seen(q, origin) except (NotFoundExc, BadInputExc) as e: message = str(e) env['message'] = message return render_template('content-with-origin.html', **env) @app.route('/browse/directory/') @app.route('/browse/directory//') @app.route('/browse/directory///') @set_renderers(HTMLRenderer) def browse_directory(sha1_git, path=None): """Show directory information. Args: - sha1_git: the directory's sha1 git identifier. If path is set, the base directory for the relative path to the entry - path: the path to the requested entry, relative to the directory pointed by sha1_git Returns: The content's information at sha1_git, or at sha1_git/path if path is set. """ env = {'sha1_git': sha1_git, 'files': []} try: if path: env['message'] = ('Listing for directory with path %s from %s:' % (path, sha1_git)) dir_or_file = service.lookup_directory_with_path( sha1_git, path) if dir_or_file['type'] == 'file': fsha = 'sha256:%s' % dir_or_file['sha256'] content = api.api_content_metadata(fsha) content_raw = service.lookup_content_raw(fsha) if content_raw: # FIXME: currently assuming utf8 encoding content['data'] = content_raw['data'] env['content'] = utils.prepare_data_for_view( content, encoding='utf-8') return render_template('content.html', **env) else: directory_files = api.api_directory(dir_or_file['target']) env['files'] = utils.prepare_data_for_view(directory_files) else: env['message'] = "Listing for directory %s:" % sha1_git directory_files = api.api_directory(sha1_git) env['files'] = utils.prepare_data_for_view(directory_files) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('directory.html', **env) @app.route('/browse/origin/') @app.route('/browse/origin//') @set_renderers(HTMLRenderer) def browse_origin(origin_id): """Browse origin with id id. """ env = {'origin_id': origin_id, 'origin': None} try: env['origin'] = api.api_origin(origin_id) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('origin.html', **env) @app.route('/browse/person/') @app.route('/browse/person//') @set_renderers(HTMLRenderer) def browse_person(person_id): """Browse person with id id. """ env = {'person_id': person_id, 'person': None, 'message': None} try: env['person'] = api.api_person(person_id) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('person.html', **env) @app.route('/browse/release/') @app.route('/browse/release//') @set_renderers(HTMLRenderer) def browse_release(sha1_git): """Browse release with sha1_git. """ env = {'sha1_git': sha1_git, 'message': None, 'release': None} try: rel = api.api_release(sha1_git) env['release'] = utils.prepare_data_for_view(rel) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('release.html', **env) @app.route('/browse/revision/') @app.route('/browse/revision//') @set_renderers(HTMLRenderer) def browse_revision(sha1_git): """Browse revision with sha1_git. """ env = {'sha1_git': sha1_git, 'message': None, 'revision': None} try: rev = api.api_revision(sha1_git) env['revision'] = utils.prepare_data_for_view(rev) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision.html', **env) +@app.route('/browse/revision//raw/') +def browse_revision_raw_message(sha1_git): + """Given a sha1_git, display the corresponding revision's raw message. + + """ + return redirect(url_for('api_revision_raw_message', sha1_git=sha1_git)) + + @app.route('/browse/revision//log/') @set_renderers(HTMLRenderer) def browse_revision_log(sha1_git): - """Browse revision with sha1_git. + """Browse revision with sha1_git's log. """ env = {'sha1_git': sha1_git, + 'sha1_url': '/browse/revision/%s/' % sha1_git, 'message': None, 'revisions': []} try: revisions = api.api_revision_log(sha1_git) env['revisions'] = map(utils.prepare_data_for_view, revisions) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision-log.html', **env) +@app.route('/browse/revision' + '/origin/log/') +@app.route('/browse/revision' + '/origin//log/') +@app.route('/browse/revision' + '/origin/' + '/branch//log/') +@app.route('/browse/revision' + '/origin/' + '/branch/' + '/ts//log/') +@app.route('/browse/revision' + '/origin/' + '/ts//log/') +@set_renderers(HTMLRenderer) +def browse_revision_log_by(origin_id, + branch_name='refs/heads/master', + timestamp=None): + """Browse the revision described by origin, branch name and timestamp's + log + + Args: + origin_id: the revision's origin + branch_name: the revision's branch + timestamp: the requested timeframe for the revision + + Returns: + The revision log of the described revision as a list of revisions + if it is found. + """ + env = {'sha1_git': None, + 'origin_id': origin_id, + 'origin_url': '/browse/origin/%d/' % origin_id, + 'branch_name': branch_name, + 'timestamp': timestamp, + 'message': None, + 'revisions': []} + + try: + revisions = api.api_revision_log_by( + origin_id, branch_name, timestamp) + env['revisions'] = map(utils.prepare_data_for_view, revisions) + except (NotFoundExc, BadInputExc) as e: + env['message'] = str(e) + + return render_template('revision-log.html', **env) + + @app.route('/browse/revision//history//') @set_renderers(HTMLRenderer) def browse_revision_history(sha1_git_root, sha1_git): """Display information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. """ env = {'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git, 'message': None, 'keys': [], 'revision': None} if sha1_git == sha1_git_root: return redirect(url_for('browse_revision', sha1_git=sha1_git)) try: revision = api.api_revision_history(sha1_git_root, sha1_git) env['revision'] = utils.prepare_data_for_view(revision) except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision//directory/') @app.route('/browse/revision//directory//') @set_renderers(HTMLRenderer) def browse_revision_directory(sha1_git, path=None): """Browse directory from revision with sha1_git. """ env = { 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) try: result = api.api_revision_directory(sha1_git, path, with_data=True) result['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['revision'] = result['revision'] env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision/' '/history/' '/directory/') @app.route('/browse/revision/' '/history/' '/directory//') @set_renderers(HTMLRenderer) def browse_revision_history_directory(sha1_git_root, sha1_git, path=None): """Return information about directory pointed to by the revision defined as: revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. path: optional directory pointed to by that revision. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root or the path referenced does not exist """ env = { 'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) if sha1_git == sha1_git_root: return redirect(url_for('browse_revision_directory', sha1_git=sha1_git, path=path, encoding=encoding), code=301) try: result = api.api_revision_history_directory(sha1_git_root, sha1_git, path, with_data=True) env['revision'] = result['revision'] env['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision' '/origin/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/ts/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/ts/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory//') @set_renderers(HTMLRenderer) def browse_directory_through_revision_with_origin_history( origin_id, branch_name="refs/heads/master", ts=None, sha1_git=None, path=None): env = { 'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts, 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = (('Encoding %s not supported.' 'Supported Encodings: %s') % ( encoding, list(aliases.keys()))) return render_template('revision-directory.html', **env) try: result = api.api_directory_through_revision_with_origin_history( origin_id, branch_name, ts, sha1_git, path, with_data=True) env['revision'] = result['revision'] env['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision' '/origin/') @app.route('/browse/revision' '/origin//') @app.route('/browse/revision' '/origin/' '/branch//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts//') @app.route('/browse/revision' '/origin/' '/ts//') @set_renderers(HTMLRenderer) def browse_revision_with_origin(origin_id, branch_name="refs/heads/master", ts=None): """Instead of having to specify a (root) revision by SHA1_GIT, users might want to specify a place and a time. In SWH a "place" is an origin; a "time" is a timestamp at which some place has been observed by SWH crawlers. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). Returns: Information on the revision if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the revision is not found. """ env = {'message': None, 'revision': None} try: revision = api.api_revision_with_origin(origin_id, branch_name, ts) env['revision'] = utils.prepare_data_for_view(revision) except (ValueError, NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision' '/origin/' '/history//') @app.route('/browse/revision' '/origin/' '/branch/' '/history//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history//') @set_renderers(HTMLRenderer) def browse_revision_history_through_origin(origin_id, branch_name='refs/heads/master', ts=None, sha1_git=None): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of the revision root identified by (origin_id, branch_name, ts). Given sha1_git_root such root revision's identifier, in other words, sha1_git is an ancestor of sha1_git_root. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. """ env = {'message': None, 'revision': None} try: revision = api.api_revision_history_through_origin( origin_id, branch_name, ts, sha1_git) env['revision'] = utils.prepare_data_for_view(revision) except (ValueError, BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision' '/origin/' '/directory/') @app.route('/browse/revision' '/origin/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/directory//') @set_renderers(HTMLRenderer) def browse_revision_directory_through_origin(origin_id, branch_name='refs/heads/master', ts=None, path=None): env = {'message': None, 'origin_id': origin_id, 'ts': ts, 'path': '.' if not path else path, 'result': None} encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) try: result = api.api_directory_through_revision_origin( origin_id, branch_name, ts, path, with_data=True) result['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['revision'] = result['revision'] env['result'] = result except (ValueError, BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/entity/') @app.route('/browse/entity//') @set_renderers(HTMLRenderer) def browse_entity(uuid): env = {'entities': [], 'message': None} try: entities = api.api_entity_by_uuid(uuid) env['entities'] = entities except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('entity.html', **env) diff --git a/version.txt b/version.txt index 216e8df1..e558ccc7 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v0.0.28-0-gd99ae9c \ No newline at end of file +v0.0.29-0-g379345c \ No newline at end of file