diff --git a/PKG-INFO b/PKG-INFO index 6485330c..d0adc346 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.web.ui -Version: 0.0.32 +Version: 0.0.33 Summary: Software Heritage Web UI Home-page: https://forge.softwareheritage.org/diffusion/DWUI/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/requirements.txt b/requirements.txt index 096cc139..18e67367 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,19 +1,19 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html # Runtime dependencies Flask -Flask-API swh.core >= 0.0.20 swh.storage >= 0.0.33 dateutil +docutils # Test dependencies #Flask-Testing #blinker # Non-Python dependencies #libjs-cryptojs #libjs-jquery-flot #libjs-jquery-flot-tooltip diff --git a/resources/test/webapp.ini b/resources/test/webapp.ini index 5eba7fba..f84418a0 100644 --- a/resources/test/webapp.ini +++ b/resources/test/webapp.ini @@ -1,26 +1,26 @@ [main] # the dedicated storage arguments (comma separated list of values) storage_args = http://localhost:5000/ # either remote_storage or local_storage storage_class = remote_storage # where to log information log_dir = /tmp/swh/web-ui/log # for dev only debug = true # current server (0.0.0.0 for world opening) host = 127.0.0.1 # its port port = 6543 -# Upload folder for temporary upload and hash -upload_folder = /tmp/swh-web-ui/uploads +# Max revisions shown in a log +max_log_revs = 25 # Allowed extensions for upload (commented or empty means all is accepted) # Otherwise, comma separated values of extensions. # upload_allowed_extensions = txt, csv # upload_allowed_extensions = diff --git a/swh.web.ui.egg-info/PKG-INFO b/swh.web.ui.egg-info/PKG-INFO index 6485330c..d0adc346 100644 --- a/swh.web.ui.egg-info/PKG-INFO +++ b/swh.web.ui.egg-info/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.web.ui -Version: 0.0.32 +Version: 0.0.33 Summary: Software Heritage Web UI Home-page: https://forge.softwareheritage.org/diffusion/DWUI/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/swh.web.ui.egg-info/SOURCES.txt b/swh.web.ui.egg-info/SOURCES.txt index 4a0d569f..2a869de9 100644 --- a/swh.web.ui.egg-info/SOURCES.txt +++ b/swh.web.ui.egg-info/SOURCES.txt @@ -1,82 +1,84 @@ .gitignore AUTHORS LICENSE MANIFEST.in Makefile Makefile.local README README-dev.md README-uri-scheme.md requirements.txt setup.py version.txt bin/swh-web-ui-dev debian/changelog debian/compat debian/control debian/copyright debian/rules debian/source/format docs/Makefile docs/source/conf.py docs/source/index.rst docs/source/modules.rst docs/source/swh.web.ui.rst docs/source/_static/dependencies.dot docs/source/_static/dependencies.png resources/test/webapp.ini swh.web.ui.egg-info/PKG-INFO swh.web.ui.egg-info/SOURCES.txt swh.web.ui.egg-info/dependency_links.txt swh.web.ui.egg-info/requires.txt swh.web.ui.egg-info/top_level.txt swh/web/ui/__init__.py swh/web/ui/apidoc.py swh/web/ui/backend.py swh/web/ui/converters.py swh/web/ui/exc.py swh/web/ui/main.py swh/web/ui/query.py swh/web/ui/renderers.py swh/web/ui/service.py swh/web/ui/utils.py swh/web/ui/static/css/bootstrap-responsive.min.css swh/web/ui/static/css/style.css swh/web/ui/static/js/calendar.js -swh/web/ui/static/js/filedrop.js +swh/web/ui/static/js/search.js swh/web/ui/static/lib/README swh/web/ui/static/lib/jquery.flot.min.js swh/web/ui/static/lib/jquery.flot.selection.min.js swh/web/ui/static/lib/jquery.flot.time.min.js swh/web/ui/templates/about.html swh/web/ui/templates/api.html +swh/web/ui/templates/apidoc.html swh/web/ui/templates/content-with-origin.html swh/web/ui/templates/content.html swh/web/ui/templates/directory.html swh/web/ui/templates/entity.html swh/web/ui/templates/home.html swh/web/ui/templates/layout.html swh/web/ui/templates/origin.html swh/web/ui/templates/person.html swh/web/ui/templates/release.html swh/web/ui/templates/revision-directory.html swh/web/ui/templates/revision-log.html swh/web/ui/templates/revision.html swh/web/ui/templates/search.html swh/web/ui/tests/__init__.py +swh/web/ui/tests/test_apidoc.py swh/web/ui/tests/test_app.py swh/web/ui/tests/test_backend.py swh/web/ui/tests/test_converters.py swh/web/ui/tests/test_query.py swh/web/ui/tests/test_renderers.py swh/web/ui/tests/test_service.py swh/web/ui/tests/test_utils.py swh/web/ui/tests/views/__init__.py swh/web/ui/tests/views/test_api.py swh/web/ui/tests/views/test_browse.py swh/web/ui/tests/views/test_main.py swh/web/ui/views/__init__.py swh/web/ui/views/api.py swh/web/ui/views/browse.py swh/web/ui/views/errorhandler.py swh/web/ui/views/main.py \ No newline at end of file diff --git a/swh.web.ui.egg-info/requires.txt b/swh.web.ui.egg-info/requires.txt index 638e2b70..9c2bc271 100644 --- a/swh.web.ui.egg-info/requires.txt +++ b/swh.web.ui.egg-info/requires.txt @@ -1,5 +1,5 @@ Flask -Flask-API dateutil +docutils swh.core>=0.0.20 swh.storage>=0.0.33 diff --git a/swh/web/ui/apidoc.py b/swh/web/ui/apidoc.py index 895999ad..31e16c0a 100644 --- a/swh/web/ui/apidoc.py +++ b/swh/web/ui/apidoc.py @@ -1,49 +1,241 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information +import re + +from functools import wraps +from enum import Enum + +from flask import request, render_template, url_for +from flask import g -import os -from swh.web.ui import utils, main from swh.web.ui.main import app -def _create_url_doc_endpoints(rules): - def split_path(path, acc): - rpath = os.path.dirname(path) - if rpath == '/': - yield from acc - else: - acc.append(rpath+'/') - yield from split_path(rpath, acc) - - url_doc_endpoints = set() - for rule in rules: - url_rule = rule['rule'] - url_doc_endpoints.add(url_rule) - if '<' in url_rule or '>' in url_rule: - continue - acc = [] - for rpath in split_path(url_rule, acc): - if rpath in url_doc_endpoints: - continue - yield rpath - url_doc_endpoints.add(rpath) - - -def install_browsable_api_endpoints(): - """Install browsable endpoints. - - """ - url_doc_endpoints = _create_url_doc_endpoints(main.rules()) - for url_doc in url_doc_endpoints: - endpoint_name = 'doc_api_' + url_doc.strip('/').replace('/', '_') - - def view_func(url_doc=url_doc): - return utils.filter_endpoints(main.rules(), - url_doc) - app.add_url_rule(rule=url_doc, - endpoint=endpoint_name, - view_func=view_func, - methods=['GET']) +class argtypes(Enum): # noqa: N801 + """Class for centralizing argument type descriptions + + """ + + ts = 'timestamp' + int = 'integer' + str = 'string' + path = 'path' + sha1 = 'sha1' + uuid = 'uuid' + sha1_git = 'sha1_git' + algo_and_hash = 'algo_hash:hash' + + +class rettypes(Enum): # noqa: N801 + """Class for centralizing return type descriptions + + """ + octet_stream = 'octet stream' + list = 'list' + dict = 'dict' + + +class excs(Enum): # noqa: N801 + """Class for centralizing exception type descriptions + + """ + + badinput = 'BadInputExc' + notfound = 'NotFoundExc' + + +class APIUrls(object): + """ + Class to manage API documentation URLs. + * Indexes all routes documented using apidoc's decorators. + * Tracks endpoint/request processing method relationships for use + in generating related urls in API documentation + Relies on the load_controllers logic in main.py for initialization. + + """ + apidoc_routes = {} + method_endpoints = {} + + @classmethod + def get_app_endpoints(cls): + return cls.apidoc_routes + + @classmethod + def get_method_endpoints(cls, fname): + if len(cls.method_endpoints) == 0: + cls.method_endpoints = cls.group_routes_by_method() + return cls.method_endpoints[fname] + + @classmethod + def group_routes_by_method(cls): + """ + Group URL endpoints according to their processing method. + Returns: + A dict where keys are the processing method names, and values + are the routes that are bound to the key method. + """ + endpoints = {} + for rule in app.url_map.iter_rules(): + rule_dict = {'rule': rule.rule, + 'methods': rule.methods} + if rule.endpoint not in endpoints: + endpoints[rule.endpoint] = [rule_dict] + else: + endpoints[rule.endpoint].append(rule_dict) + return endpoints + + @classmethod + def index_add_route(cls, route, docstring): + """ + Add a route to the self-documenting API reference + """ + if route not in cls.apidoc_routes: + cls.apidoc_routes[route] = docstring + + +class route(object): # noqa: N801 + """ + Decorate an API method to register it in the API doc route index + and create the corresponding Flask route. + Caution: decorating a method with this requires to also decorate it + __at least__ with @returns, or breaks the decorated endpoint + Args: + route: the documentation page's route + noargs: set to True if the route has no arguments, and its result + should be displayed anytime its documentation is requested + """ + def __init__(self, route, noargs=False): + self.route = route + self.noargs = noargs + + def __call__(self, f): + APIUrls.index_add_route(self.route, f.__doc__) + + @wraps(f) + def doc_func(*args, **kwargs): + return f(call_args=(args, kwargs), + doc_route=self.route, + noargs=self.noargs) + + if not self.noargs: + app.add_url_rule(self.route, f.__name__, doc_func) + + return doc_func + + +class arg(object): # noqa: N801 + """ + Decorate an API method to display an argument's information on the doc + page specified by @route above. + Args: + name: the argument's name. MUST match the method argument's name to + create the example request URL. + default: the argument's default value + argtype: the argument's type as an Enum value from apidoc.argtypes + argdoc: the argument's documentation string + """ + def __init__(self, name, default, argtype, argdoc): + self.doc_dict = { + 'name': name, + 'type': argtype.value, + 'doc': argdoc, + 'default': default + } + + def __call__(self, f): + @wraps(f) + def arg_fun(*args, **kwargs): + if 'args' in kwargs: + kwargs['args'].append(self.doc_dict) + else: + kwargs['args'] = [self.doc_dict] + return f(*args, **kwargs) + return arg_fun + + +class raises(object): # noqa: N801 + """ + Decorate an API method to display information pertaining to an exception + that can be raised by this method. + Args: + exc: the exception name as an Enum value from apidoc.excs + doc: the exception's documentation string + """ + def __init__(self, exc, doc): + self.exc_dict = { + 'exc': exc.value, + 'doc': doc + } + + def __call__(self, f): + @wraps(f) + def exc_fun(*args, **kwargs): + if 'excs' in kwargs: + kwargs['excs'].append(self.exc_dict) + else: + kwargs['excs'] = [self.exc_dict] + return f(*args, **kwargs) + return exc_fun + + +class returns(object): # noqa: N801 + """ + Decorate an API method to display information about its return value. + Caution: this MUST be the last decorator in the apidoc decorator stack, + or the decorated endpoint breaks + Args: + rettype: the return value's type as an Enum value from apidoc.rettypes + retdoc: the return value's documentation string + """ + def __init__(self, rettype=None, retdoc=None): + self.return_dict = { + 'type': rettype.value, + 'doc': retdoc + } + + def filter_api_url(self, endpoint, route_re, noargs): + doc_methods = {'GET', 'HEAD', 'OPTIONS'} + if re.match(route_re, endpoint['rule']): + if endpoint['methods'] == doc_methods and not noargs: + return False + return True + + def __call__(self, f): + @wraps(f) + def ret_fun(*args, **kwargs): + # Build documentation + env = { + 'docstring': f.__doc__, + 'route': kwargs['doc_route'], + 'return': self.return_dict + } + + for arg in ['args', 'excs']: + if arg in kwargs: + env[arg] = kwargs[arg] + + route_re = re.compile('.*%s$' % kwargs['doc_route']) + endpoint_list = APIUrls.get_method_endpoints(f.__name__) + other_urls = [url for url in endpoint_list if + self.filter_api_url(url, route_re, kwargs['noargs'])] + env['urls'] = other_urls + + # Build example endpoint URL + if 'args' in env: + defaults = {arg['name']: arg['default'] for arg in env['args']} + example = url_for(f.__name__, **defaults) + env['example'] = re.sub(r'(.*)\?.*', r'\1', example) + + # Prepare and send to mimetype selector if it's not a doc request + if re.match(route_re, request.url) and not kwargs['noargs']: + return app.response_class( + render_template('apidoc.html', **env), + content_type='text/html') + + cargs, ckwargs = kwargs['call_args'] + g.doc_env = env # Store for response processing + return f(*cargs, **ckwargs) + return ret_fun diff --git a/swh/web/ui/backend.py b/swh/web/ui/backend.py index 28a71a4d..229e27b8 100644 --- a/swh/web/ui/backend.py +++ b/swh/web/ui/backend.py @@ -1,270 +1,271 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import os from swh.web.ui import main def content_get(sha1_bin): """Lookup the content designed by {algo: hash_bin}. Args: sha1_bin: content's binary sha1. Returns: Content as dict with 'sha1' and 'data' keys. data representing its raw data. """ contents = main.storage().content_get([sha1_bin]) if contents and len(contents) >= 1: return contents[0] return None def content_find(algo, hash_bin): """Retrieve the content with binary hash hash_bin Args: algo: nature of the hash hash_bin. hash_bin: content's hash searched for. Returns: A triplet (sha1, sha1_git, sha256) if the content exist or None otherwise. """ return main.storage().content_find({algo: hash_bin}) def content_find_occurrence(algo, hash_bin): """Find the content's occurrence. Args: algo: nature of the hash hash_bin. hash_bin: content's hash searched for. Returns: The occurrence of the content. """ return main.storage().content_find_occurrence({algo: hash_bin}) def content_missing_per_sha1(sha1list): """List content missing from storage based on sha1 Args: sha1s: Iterable of sha1 to check for absence Returns: an iterable of sha1s missing from the storage """ return main.storage().content_missing_per_sha1(sha1list) def directory_get(sha1_bin): """Retrieve information on one directory. Args: sha1_bin: Directory's identifier Returns: The directory's information. """ res = main.storage().directory_get([sha1_bin]) if res and len(res) >= 1: return res[0] -def origin_get(origin_id): - """Return information about the origin with id origin_id. +def origin_get(origin): + """Return information about the origin matching dict origin. Args: - origin_id: origin's identifier + origin: origin's dict with keys either 'id' or + ('type' AND 'url') Returns: Origin information as dict. """ - return main.storage().origin_get({'id': origin_id}) + return main.storage().origin_get(origin) def person_get(person_id): """Return information about the person with id person_id. Args: person_id: person's identifier.v Returns: Person information as dict. """ res = main.storage().person_get([person_id]) if res and len(res) >= 1: return res[0] def directory_ls(sha1_git_bin, recursive=False): """Return information about the directory with id sha1_git. Args: sha1_git: directory's identifier. recursive: Optional recursive flag default to False Returns: Directory information as dict. """ directory_entries = main.storage().directory_ls(sha1_git_bin, recursive) if not directory_entries: return [] return directory_entries def release_get(sha1_git_bin): """Return information about the release with sha1 sha1_git_bin. Args: sha1_git_bin: The release's sha1 as bytes. Returns: Release information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ res = main.storage().release_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_get(sha1_git_bin): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ res = main.storage().revision_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_get_multiple(sha1_git_bin_list): """Return information about the revisions in sha1_git_bin_list Args: sha1_git_bin_list: The revisions' sha1s as a list of bytes. Returns: Revisions' information as an iterable of dicts if any found, an empty list otherwise Raises: ValueError if the identifier provided is not of sha1 nature. """ res = main.storage().revision_get(sha1_git_bin_list) if res and len(res) >= 1: return res return [] -def revision_log(sha1_git_bin, limit=100): +def revision_log(sha1_git_bin, limit): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. limit: the maximum number of revisions returned. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ return main.storage().revision_log([sha1_git_bin], limit) -def revision_log_by(origin_id, branch_name, ts, limit=100): +def revision_log_by(origin_id, branch_name, ts, limit): """Return information about the revision matching the timestamp ts, from origin origin_id, in branch branch_name. Args: origin_id: origin of the revision - branch_name: revision's branch. - timestamp: revision's time frame. Returns: Information for the revision matching the criterions. """ - return main.storage().revision_log_by(origin_id, branch_name, - ts) + ts, + limit=limit) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return main.storage().stat_counters() def stat_origin_visits(origin_id): """Return the dates at which the given origin was scanned for content. Returns: An array of dates """ return main.storage().origin_visit_get(origin_id) def revision_get_by(origin_id, branch_name, timestamp): """Return occurrence information matching the criterions origin_id, branch_name, ts. """ res = main.storage().revision_get_by(origin_id, branch_name, timestamp=timestamp, limit=1) if not res: return None return res[0] def directory_entry_get_by_path(directory, path): """Return a directory entry by its path. """ paths = path.strip(os.path.sep).split(os.path.sep) return main.storage().directory_entry_get_by_path( directory, list(map(lambda p: p.encode('utf-8'), paths))) def entity_get(uuid): """Retrieve the entity per its uuid. """ return main.storage().entity_get(uuid) diff --git a/swh/web/ui/converters.py b/swh/web/ui/converters.py index 656ac16a..287f95a4 100644 --- a/swh/web/ui/converters.py +++ b/swh/web/ui/converters.py @@ -1,228 +1,232 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from swh.core import hashutil from swh.core.utils import decode_with_escape from swh.web.ui import utils def from_swh(dict_swh, hashess={}, bytess={}, dates={}, blacklist={}, convert={}, convert_fn=lambda x: x): """Convert from an swh dictionary to something reasonably json serializable. Args: - dict_swh: the origin dictionary needed to be transformed - hashess: list/set of keys representing hashes values (sha1, sha256, sha1_git, etc...) as bytes. Those need to be transformed in hexadecimal string - bytess: list/set of keys representing bytes values which needs to be decoded - blacklist: set of keys to filter out from the conversion - convert: set of keys whose associated values need to be converted using convert_fn - convert_fn: the conversion function to apply on the value of key in 'convert' The remaining keys are copied as is in the output. Returns: dictionary equivalent as dict_swh only with its keys `converted`. """ def convert_hashes_bytes(v): """v is supposedly a hash as bytes, returns it converted in hex. """ if v and isinstance(v, bytes): return hashutil.hash_to_hex(v) return v def convert_bytes(v): """v is supposedly a bytes string, decode as utf-8. FIXME: Improve decoding policy. If not utf-8, break! """ if v and isinstance(v, bytes): return v.decode('utf-8') return v def convert_date(v): """v is a dict with three keys: timestamp offset negative_utc We convert it to a human-readable string """ tz = datetime.timezone(datetime.timedelta(minutes=v['offset'])) date = datetime.datetime.fromtimestamp(v['timestamp'], tz=tz) datestr = date.isoformat() if v['offset'] == 0 and v['negative_utc']: # remove the rightmost + and replace it with a - return '-'.join(datestr.rsplit('+', 1)) return datestr if not dict_swh: return dict_swh new_dict = {} for key, value in dict_swh.items(): if key in blacklist: continue elif key in dates: new_dict[key] = convert_date(value) elif isinstance(value, dict): new_dict[key] = from_swh(value, hashess, bytess, dates, blacklist, convert, convert_fn) elif key in hashess: new_dict[key] = utils.fmap(convert_hashes_bytes, value) elif key in bytess: try: new_dict[key] = utils.fmap(convert_bytes, value) except UnicodeDecodeError: if 'decoding_failures' not in new_dict: new_dict['decoding_failures'] = [key] else: new_dict['decoding_failures'].append(key) new_dict[key] = utils.fmap(decode_with_escape, value) elif key in convert: new_dict[key] = convert_fn(value) else: new_dict[key] = value return new_dict def from_origin(origin): """Convert from an SWH origin to an origin dictionary. """ return from_swh(origin, hashess=set(['revision']), bytess=set(['path'])) def from_release(release): """Convert from an SWH release to a json serializable release dictionary. Args: release: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - revision: identifier of the revision the release points to (sha1 in bytes) - comment: release's comment message (bytes) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) Returns: Release dictionary with the following keys: - id: hexadecimal sha1 (string) - revision: hexadecimal sha1 (string) - comment: release's comment message (string) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) """ return from_swh( release, hashess=set(['id', 'target']), bytess=set(['message', 'name', 'fullname', 'email']), dates={'date'}, ) def from_revision(revision): """Convert from an SWH revision to a json serializable revision dictionary. Args: revision: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - directory: identifier of the directory the revision points to (sha1 in bytes) - author_name, author_email: author's revision name and email - committer_name, committer_email: committer's revision name and email - message: revision's message - date, date_offset: revision's author date - committer_date, committer_date_offset: revision's commit date - parents: list of parents for such revision - synthetic: revision's property nature - type: revision's type (git, tar or dsc at the moment) - metadata: if the revision is synthetic, this can reference dynamic properties. Returns: Revision dictionary with the same keys as inputs, only: - sha1s are in hexadecimal strings (id, directory) - bytes are decoded in string (author_name, committer_name, author_email, committer_email) - remaining keys are left as is """ revision = from_swh(revision, hashess=set(['id', 'directory', 'parents', 'children']), bytess=set(['name', 'fullname', 'email']), dates={'date', 'committer_date'}) if revision: + if 'parents' in revision: + revision['merge'] = len(revision['parents']) > 1 if 'message' in revision: try: revision['message'] = revision['message'].decode('utf-8') except UnicodeDecodeError: revision['message_decoding_failed'] = True revision['message'] = None return revision def from_content(content): """Convert swh content to serializable content dictionary. """ + if content: + content = {k: v for k, v in content.items() if k not in ['ctime']} return from_swh(content, hashess={'sha1', 'sha1_git', 'sha256'}, bytess={}, blacklist={}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) def from_person(person): """Convert swh person to serializable person dictionary. """ return from_swh(person, hashess=set(), bytess=set(['name', 'fullname', 'email'])) def from_directory_entry(dir_entry): """Convert swh person to serializable person dictionary. """ return from_swh(dir_entry, hashess=set(['dir_id', 'sha1_git', 'sha1', 'sha256', 'target']), bytess=set(['name']), blacklist={}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) diff --git a/swh/web/ui/main.py b/swh/web/ui/main.py index 6bdfba22..4837a8e7 100644 --- a/swh/web/ui/main.py +++ b/swh/web/ui/main.py @@ -1,150 +1,139 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import os +import json -from flask.ext.api import FlaskAPI +from flask import Flask from swh.core import config -from swh.web.ui.renderers import RENDERERS, urlize_api_links +from swh.web.ui.renderers import urlize_api_links from swh.web.ui.renderers import safe_docstring_display +from swh.web.ui.renderers import revision_id_from_url +from swh.web.ui.renderers import SWHMultiResponse from swh.storage import get_storage DEFAULT_CONFIG = { 'storage_args': ('list[str]', ['http://localhost:5000/']), 'storage_class': ('str', 'remote_storage'), 'log_dir': ('string', '/tmp/swh/log'), 'debug': ('bool', None), 'host': ('string', '127.0.0.1'), 'port': ('int', 6543), 'secret_key': ('string', 'development key'), - 'max_upload_size': ('int', 16 * 1024 * 1024), - 'upload_folder': ('string', '/tmp/swh-web-ui/uploads'), - 'upload_allowed_extensions': ('list[str]', []) # means all are accepted + 'max_log_revs': ('int', 25), } - # api's definition -app = FlaskAPI(__name__) +app = Flask(__name__) +app.response_class = SWHMultiResponse app.jinja_env.filters['urlize_api_links'] = urlize_api_links app.jinja_env.filters['safe_docstring_display'] = safe_docstring_display - - -AUTODOC_ENDPOINT_INSTALLED = False +app.jinja_env.filters['revision_id_from_url'] = revision_id_from_url def read_config(config_file): """Read the configuration file `config_file`, update the app with parameters (secret_key, conf) and return the parsed configuration as a dict""" conf = config.read(config_file, DEFAULT_CONFIG) - config.prepare_folders(conf, 'log_dir', 'upload_folder') + config.prepare_folders(conf, 'log_dir') conf['storage'] = get_storage(conf['storage_class'], conf['storage_args']) return conf def load_controllers(): """Load the controllers for the application. """ from swh.web.ui import views, apidoc # flake8: noqa - # side-effects here (install autodoc endpoints so do it only once!) - global AUTODOC_ENDPOINT_INSTALLED - if not AUTODOC_ENDPOINT_INSTALLED: - apidoc.install_browsable_api_endpoints() - AUTODOC_ENDPOINT_INSTALLED = True - def rules(): """Returns rules from the application in dictionary form. Beware, must be called after swh.web.ui.main.load_controllers funcall. Returns: Generator of application's rules. """ for rule in app.url_map._rules: yield {'rule': rule.rule, 'methods': rule.methods, 'endpoint': rule.endpoint} def storage(): """Return the current application's storage. """ return app.config['conf']['storage'] def run_from_webserver(environ, start_response): """Run the WSGI app from the webserver, loading the configuration. Note: This function is called on a per-request basis so beware the side effects here! """ load_controllers() config_path = '/etc/softwareheritage/webapp/webapp.ini' conf = read_config(config_path) app.secret_key = conf['secret_key'] app.config['conf'] = conf - app.config['MAX_CONTENT_LENGTH'] = conf['max_upload_size'] - app.config['DEFAULT_RENDERERS'] = RENDERERS logging.basicConfig(filename=os.path.join(conf['log_dir'], 'web-ui.log'), level=logging.INFO) return app(environ, start_response) def run_debug_from(config_path, verbose=False): """Run the api's server in dev mode. Note: This is called only once (contrast with the production mode in run_from_webserver function) Args: conf is a dictionary of keywords: - 'db_url' the db url's access (through psycopg2 format) - 'content_storage_dir' revisions/directories/contents storage on disk - 'host' to override the default 127.0.0.1 to open or not the server to the world - 'port' to override the default of 5000 (from the underlying layer: flask) - 'debug' activate the verbose logs - 'secret_key' the flask secret key Returns: Never """ load_controllers() conf = read_config(config_path) app.secret_key = conf['secret_key'] app.config['conf'] = conf - app.config['MAX_CONTENT_LENGTH'] = conf['max_upload_size'] - app.config['DEFAULT_RENDERERS'] = RENDERERS host = conf.get('host', '127.0.0.1') port = conf.get('port') debug = conf.get('debug') log_file = os.path.join(conf['log_dir'], 'web-ui.log') logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO, handlers=[logging.FileHandler(log_file), logging.StreamHandler()]) app.run(host=host, port=port, debug=debug) diff --git a/swh/web/ui/renderers.py b/swh/web/ui/renderers.py index b5172ef3..056aaf14 100644 --- a/swh/web/ui/renderers.py +++ b/swh/web/ui/renderers.py @@ -1,144 +1,146 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import re import yaml +import json + +from docutils.core import publish_parts +from docutils.writers.html4css1 import Writer, HTMLTranslator +from inspect import cleandoc + +from flask import request, Response, render_template +from flask import g -from flask import make_response, request -from flask.ext.api import renderers, parsers -from flask_api.mediatypes import MediaType from swh.web.ui import utils class SWHFilterEnricher(): """Global filter on fields. """ def filter_by_fields(self, data): """Extract a request parameter 'fields' if it exists to permit the filtering on the data dict's keys. If such field is not provided, returns the data as is. """ fields = request.args.get('fields') if fields: fields = set(fields.split(',')) data = utils.filter_field_keys(data, fields) return data -class YAMLRenderer(renderers.BaseRenderer, SWHFilterEnricher): - """Renderer for application/yaml. - Orchestrate from python data structure to yaml. - +class SWHMultiResponse(Response, SWHFilterEnricher): """ - media_type = 'application/yaml' - - def render(self, data, media_type, **options): - data = self.filter_by_fields(data) - return yaml.dump(data, encoding=self.charset) - - -class JSONPEnricher(): - """JSONP rendering. - + A Flask Response subclass. + Override force_type to transform dict responses into callable Flask + response objects whose mimetype matches the request's Accept header: HTML + template render, YAML dump or default to a JSON dump. """ - def enrich_with_jsonp(self, data): - """Defines a jsonp function that extracts a potential 'callback' - request parameter holding the function name and wraps the data - inside a call to such function - - e.g: - GET /blah/foo/bar renders: {'output': 'wrapped'} - GET /blah/foo/bar?callback=fn renders: fn({'output': 'wrapped'}) - """ - jsonp = request.args.get('callback') - if jsonp: - return '%s(%s)' % (jsonp, data) - - return data - -class SWHJSONRenderer(renderers.JSONRenderer, - SWHFilterEnricher, - JSONPEnricher): - """Renderer for application/json. - Serializes in json the data and returns it. - - Also deals with jsonp. If callback is found in request parameter, - wrap the result as a function with name the value of the parameter - query 'callback'. + @classmethod + def make_response_from_mimetype(cls, rv, options={}): + if not (isinstance(rv, list) or isinstance(rv, dict)): + return rv + + def wants_html(best_match): + return best_match == 'text/html' and \ + request.accept_mimetypes[best_match] > \ + request.accept_mimetypes['application/json'] + + def wants_yaml(best_match): + return best_match == 'application/yaml' and \ + request.accept_mimetypes[best_match] > \ + request.accept_mimetypes['application/json'] + + rv = cls.filter_by_fields(cls, rv) + acc_mime = ['application/json', 'application/yaml', 'text/html'] + best_match = request.accept_mimetypes.best_match(acc_mime) + # return a template render + if wants_html(best_match): + data = json.dumps(rv, sort_keys=True, + indent=4, separators=(',', ': ')) + env = g.get('doc_env', {}) + env['response_data'] = data + env['request'] = request + rv = Response(render_template('apidoc.html', **env), + content_type='text/html', + **options) + # return formatted yaml + elif wants_yaml(best_match): + rv = Response( + yaml.dump(rv), + content_type='application/yaml', + **options) + # return formatted json + else: + # jsonify is unhappy with lists in Flask 0.10.1, use json.dumps + rv = Response( + json.dumps(rv), + content_type='application/json', + **options) + return rv + + @classmethod + def force_type(cls, rv, environ=None): + if isinstance(rv, dict) or isinstance(rv, list): + rv = cls.make_response_from_mimetype(rv) + return super().force_type(rv, environ) + + +def error_response(error_code, error): + """Private function to create a custom error response. """ - def render(self, data, media_type, **options): - data = self.filter_by_fields(data) - res = super().render(data, media_type, **options) - return self.enrich_with_jsonp(res) + error_opts = {'status': error_code} + error_data = {'error': str(error)} + + return SWHMultiResponse.make_response_from_mimetype(error_data, + options=error_opts) def urlize_api_links(content): """Utility function for decorating api links in browsable api.""" return re.sub(r'"(/api/.*|/browse/.*)"', r'"\1"', content) -def safe_docstring_display(docstring): - """Utility function to safely decorate docstring in browsable api.""" - src = r'(Args|Raises?|Throws?|Yields?|Returns?|Examples?|Samples?):.*' - dest = r'

\1:

  ' - return re.sub(src, dest, docstring) - - -class SWHBrowsableAPIRenderer(renderers.BrowsableAPIRenderer): - """SWH's browsable api renderer. - +class NoHeaderHTMLTranslator(HTMLTranslator): """ - template = "api.html" - - -RENDERERS = [ - 'swh.web.ui.renderers.SWHJSONRenderer', - 'swh.web.ui.renderers.SWHBrowsableAPIRenderer', - 'flask.ext.api.parsers.URLEncodedParser', - 'swh.web.ui.renderers.YAMLRenderer', -] + Docutils translator subclass to customize the generation of HTML + from reST-formatted docstrings + """ + def __init__(self, document): + super().__init__(document) + self.body_prefix = [] + self.body_suffix = [] + def visit_bullet_list(self, node): + self.context.append((self.compact_simple, self.compact_p)) + self.compact_p = None + self.compact_simple = self.is_compactable(node) + self.body.append(self.starttag(node, 'ul', CLASS='docstring')) -RENDERERS_INSTANCE = [ - SWHJSONRenderer(), - SWHBrowsableAPIRenderer(), - parsers.URLEncodedParser(), - YAMLRenderer(), -] +DOCSTRING_WRITER = Writer() +DOCSTRING_WRITER.translator_class = NoHeaderHTMLTranslator -RENDERERS_BY_TYPE = { - r.media_type: r - for r in RENDERERS_INSTANCE -} +def safe_docstring_display(docstring): + """ + Utility function to htmlize reST-formatted documentation in browsable + api. + """ + docstring = cleandoc(docstring) + return publish_parts(docstring, writer=DOCSTRING_WRITER)['html_body'] -def error_response(default_error_msg, error_code, error): - """Private function to create a custom error response. - """ - # if nothing is requested by client, use json - default_application_type = 'application/json' - accept_type = request.headers.get('Accept', default_application_type) - renderer = RENDERERS_BY_TYPE.get( - accept_type, - RENDERERS_BY_TYPE[default_application_type]) - - # for edge cases, use the elected renderer's media type - accept_type = renderer.media_type - response = make_response(default_error_msg, error_code) - response.headers['Content-Type'] = accept_type - response.data = renderer.render({"error": str(error)}, - media_type=MediaType(accept_type), - status=error_code, - headers={'Content-Type': accept_type}) - - return response +def revision_id_from_url(url): + """Utility function to obtain a revision's ID from its browsing URL.""" + return re.sub(r'/browse/revision/([0-9a-f]{40}|[0-9a-f]{64})/.*', + r'\1', url) diff --git a/swh/web/ui/service.py b/swh/web/ui/service.py index 50437ecf..8b2ec9b7 100644 --- a/swh/web/ui/service.py +++ b/swh/web/ui/service.py @@ -1,643 +1,622 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict from swh.core import hashutil from swh.web.ui import converters, query, backend from swh.web.ui.exc import NotFoundExc def lookup_multiple_hashes(hashes): """Lookup the passed hashes in a single DB connection, using batch processing. Args: An array of {filename: X, sha1: Y}, string X, hex sha1 string Y. Returns: The same array with elements updated with elem['found'] = true if the hash is present in storage, elem['found'] = false if not. """ hashlist = [hashutil.hex_to_hash(elem['sha1']) for elem in hashes] content_missing = backend.content_missing_per_sha1(hashlist) missing = [hashutil.hash_to_hex(x) for x in content_missing] for x in hashes: x.update({'found': True}) for h in hashes: if h['sha1'] in missing: h['found'] = False return hashes -def hash_and_search(filepath): - """Hash the filepath's content as sha1, then search in storage if - it exists. - - Args: - Filepath of the file to hash and search. - - Returns: - Tuple (hex sha1, found as True or false). - The found boolean, according to whether the sha1 of the file - is present or not. - """ - h = hashutil.hashfile(filepath) - c = backend.content_find('sha1', h['sha1']) - if c: - r = converters.from_content(c) - r['found'] = True - return r - else: - return {'sha1': hashutil.hash_to_hex(h['sha1']), - 'found': False} - - def lookup_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found containing the hash info if the hash is present, None if not. """ algo, hash = query.parse_hash(q) found = backend.content_find(algo, hash) return {'found': found, 'algo': algo} def search_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found to True or False, according to whether the checksum is present or not """ algo, hash = query.parse_hash(q) found = backend.content_find(algo, hash) return {'found': found is not None} def lookup_hash_origin(q): """Return information about the checksum contained in the query q. Args: query string of the form Returns: origin as dictionary if found for the given content. """ algo, hash = query.parse_hash(q) origin = backend.content_find_occurrence(algo, hash) return converters.from_origin(origin) -def lookup_origin(origin_id): - """Return information about the origin with id origin_id. +def lookup_origin(origin): + """Return information about the origin matching dict origin. Args: - origin_id as string + origin: origin's dict with keys either 'id' or + ('type' AND 'url') Returns: origin information as dict. """ - return backend.origin_get(origin_id) + return backend.origin_get(origin) def lookup_person(person_id): """Return information about the person with id person_id. Args: person_id as string Returns: person information as dict. """ person = backend.person_get(person_id) return converters.from_person(person) def lookup_directory(sha1_git): """Return information about the directory with id sha1_git. Args: sha1_git as string Returns: directory information as dict. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], # HACK: sha1_git really 'Only sha1_git is supported.') dir = backend.directory_get(sha1_git_bin) if not dir: return None directory_entries = backend.directory_ls(sha1_git_bin) return map(converters.from_directory_entry, directory_entries) def lookup_directory_with_path(directory_sha1_git, path_string): """Return directory information for entry with path path_string w.r.t. root directory pointed by directory_sha1_git Args: - directory_sha1_git: sha1_git corresponding to the directory to which we append paths to (hopefully) find the entry - the relative path to the entry starting from the directory pointed by directory_sha1_git Raises: NotFoundExc if the directory entry is not found """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( directory_sha1_git, ['sha1'], 'Only sha1_git is supported.') queried_dir = backend.directory_entry_get_by_path( sha1_git_bin, path_string) if not queried_dir: raise NotFoundExc(('Directory entry with path %s from %s not found') % (path_string, directory_sha1_git)) return converters.from_directory_entry(queried_dir) def lookup_release(release_sha1_git): """Return information about the release with sha1 release_sha1_git. Args: release_sha1_git: The release's sha1 as hexadecimal Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( release_sha1_git, ['sha1'], 'Only sha1_git is supported.') res = backend.release_get(sha1_git_bin) return converters.from_release(res) def lookup_revision(rev_sha1_git): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) return converters.from_revision(revision) def lookup_revision_multiple(sha1_git_list): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ def to_sha1_bin(sha1_hex): _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_hex, ['sha1'], 'Only sha1_git is supported.') return sha1_git_bin sha1_bin_list = (to_sha1_bin(x) for x in sha1_git_list) revisions = backend.revision_get_multiple(sha1_bin_list) return (converters.from_revision(x) for x in revisions) def lookup_revision_message(rev_sha1_git): """Return the raw message of the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Decoded revision message as dict {'message': } Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if the revision is not found, or if it has no message """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision with sha1_git %s not found.' % rev_sha1_git) if 'message' not in revision: raise NotFoundExc('No message for revision with sha1_git %s.' % rev_sha1_git) res = {'message': revision['message']} return res def lookup_revision_by(origin_id, branch_name="refs/heads/master", timestamp=None): """Lookup revisions by origin_id, branch_name and timestamp. If: - branch_name is not provided, lookup using 'refs/heads/master' as default. - ts is not provided, use the most recent Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. Yields: The revisions matching the criterions. """ res = backend.revision_get_by(origin_id, branch_name, timestamp) return converters.from_revision(res) -def lookup_revision_log(rev_sha1_git, limit=100): +def lookup_revision_log(rev_sha1_git, limit): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal limit: the maximum number of revisions returned Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision_entries = backend.revision_log(sha1_git_bin, limit) return map(converters.from_revision, revision_entries) -def lookup_revision_log_by(origin_id, branch_name, timestamp): +def lookup_revision_log_by(origin_id, branch_name, timestamp, limit): """Return information about the revision with sha1 revision_sha1_git. Args: origin_id: origin of the revision branch_name: revision's branch timestamp: revision's time frame limit: the maximum number of revisions returned Returns: Revision information as dict. Raises: NotFoundExc if no revision corresponds to the criterion NotFoundExc if the corresponding revision has no log """ revision_entries = backend.revision_log_by(origin_id, branch_name, - timestamp) + timestamp, + limit) if not revision_entries: return None return map(converters.from_revision, revision_entries) def lookup_revision_with_context_by(origin_id, branch_name, ts, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. sha1_git_root being resolved through the lookup of a revision by origin_id, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. - limit: limit the lookup to 100 revisions back. Returns: Pair of (root_revision, revision). Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ rev_root = backend.revision_get_by(origin_id, branch_name, ts) if not rev_root: raise NotFoundExc('Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts)) return (converters.from_revision(rev_root), lookup_revision_with_context(rev_root, sha1_git, limit)) def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision. The type is either a sha1 (as an hex string) or a non converted dict. sha1_git: one of sha1_git_root's ancestors limit: limit the lookup to 100 revisions back Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) if isinstance(sha1_git_root, str): _, sha1_git_root_bin = query.parse_hash_with_algorithms_or_throws( sha1_git_root, ['sha1'], 'Only sha1_git is supported.') revision_root = backend.revision_get(sha1_git_root_bin) if not revision_root: raise NotFoundExc('Revision root %s not found' % sha1_git_root) else: sha1_git_root_bin = sha1_git_root['id'] revision_log = backend.revision_log(sha1_git_root_bin, limit) parents = {} children = defaultdict(list) for rev in revision_log: rev_id = rev['id'] parents[rev_id] = [] for parent_id in rev['parents']: parents[rev_id].append(parent_id) children[parent_id].append(rev_id) if revision['id'] not in parents: raise NotFoundExc('Revision %s is not an ancestor of %s' % (sha1_git, sha1_git_root)) revision['children'] = children[revision['id']] return converters.from_revision(revision) def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: boolean that indicates to retrieve the raw data if the path resolves to a content. Default to False (for the api) Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist. NotImplementedError in case of dir_path exists but do not reference a type 'dir' or 'file'. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) dir_sha1_git_bin = revision['directory'] if dir_path: entity = backend.directory_entry_get_by_path(dir_sha1_git_bin, dir_path) if not entity: raise NotFoundExc( "Directory or File '%s' pointed to by revision %s not found" % (dir_path, sha1_git)) else: entity = {'type': 'dir', 'target': dir_sha1_git_bin} if entity['type'] == 'dir': directory_entries = backend.directory_ls(entity['target']) return {'type': 'dir', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': map(converters.from_directory_entry, directory_entries)} elif entity['type'] == 'file': # content content = backend.content_find('sha1_git', entity['target']) if with_data: content['data'] = backend.content_get(content['sha1'])['data'] return {'type': 'file', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': converters.from_content(content)} else: raise NotImplementedError('Entity of type %s not implemented.' % entity['type']) def lookup_content(q): """Lookup the content designed by q. Args: q: The release's sha1 as hexadecimal """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) return converters.from_content(c) def lookup_content_raw(q): """Lookup the content defined by q. Args: q: query string of the form Returns: dict with 'sha1' and 'data' keys. data representing its raw data decoded. """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) if not c: return None content = backend.content_get(c['sha1']) return converters.from_content(content) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return backend.stat_counters() def stat_origin_visits(origin_id): """Return the dates at which the given origin was scanned for content. Returns: An array of dates in the datetime format """ for visit in backend.stat_origin_visits(origin_id): visit['date'] = visit['date'].timestamp() yield(visit) def lookup_entity_by_uuid(uuid): """Return the entity's hierarchy from its uuid. Args: uuid: entity's identifier. Returns: List of hierarchy entities from the entity with uuid. """ uuid = query.parse_uuid4(uuid) return backend.entity_get(uuid) def lookup_revision_through(revision, limit=100): """Retrieve a revision from the criterion stored in revision dictionary. Args: revision: Dictionary of criterion to lookup the revision with. Here are the supported combination of possible values: - origin_id, branch_name, ts, sha1_git - origin_id, branch_name, ts - sha1_git_root, sha1_git - sha1_git Returns: None if the revision is not found or the actual revision. """ if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context_by(revision['origin_id'], revision['branch_name'], revision['ts'], revision['sha1_git'], limit) if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision: return lookup_revision_by(revision['origin_id'], revision['branch_name'], revision['ts']) if 'sha1_git_root' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context(revision['sha1_git_root'], revision['sha1_git'], limit) if 'sha1_git' in revision: return lookup_revision(revision['sha1_git']) # this should not happen raise NotImplementedError('Should not happen!') def lookup_directory_through_revision(revision, path=None, limit=100, with_data=False): """Retrieve the directory information from the revision. Args: revision: dictionary of criterion representing a revision to lookup path: directory's path to lookup. limit: optional query parameter to limit the revisions log. (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: The directory pointing to by the revision criterions at path. """ rev = lookup_revision_through(revision, limit) if not rev: raise NotFoundExc('Revision with criterion %s not found!' % revision) return (rev['id'], lookup_directory_with_revision(rev['id'], path, with_data)) diff --git a/swh/web/ui/static/js/filedrop.js b/swh/web/ui/static/js/filedrop.js deleted file mode 100644 index 820c4d40..00000000 --- a/swh/web/ui/static/js/filedrop.js +++ /dev/null @@ -1,194 +0,0 @@ -/** - * Search page management - */ - - -$.fn.extend({ - /** - * Call on any HTMLElement to make that element the recipient of files - * drag & dropped into it. - * Files then have their sha1 checksum calculated - * and searched in SWH. - * Args: - * resultDiv: the table where the result should be displayed - * errorDiv: the element where the error message should be displayed - */ - filedrop: function(fileLister, searchForm) { - - return this.each(function() { - - var dragwin = $(this); - var fileshovering = false; - - dragwin.on('dragover', function(event) { - event.stopPropagation(); - event.preventDefault(); - }); - - dragwin.on('dragenter', function(event) { - event.stopPropagation(); - event.preventDefault(); - if (!fileshovering) { - dragwin.css("border-style", "solid"); - dragwin.css("box-shadow", "inset 0 3px 4px"); - fileshovering = true; - } - }); - - dragwin.on('dragover', function(event) { - event.stopPropagation(); - event.preventDefault(); - if (!fileshovering) { - dragwin.css("border-style", "solid"); - dragwin.css("box-shadow", "inset 0 3px 4px"); - fileshovering = true; - } - }); - - dragwin.on('dragleave', function(event) { - event.stopPropagation(); - event.preventDefault(); - if (fileshovering) { - dragwin.css("border-style", "dashed"); - dragwin.css("box-shadow", "none"); - fileshovering = false; - } - }); - - dragwin.on('drop', function(event) { - event.stopPropagation(); - event.preventDefault(); - if (fileshovering) { - dragwin.css("border-style", "dashed"); - dragwin.css("box-shadow", "none"); - fileshovering = false; - } - var myfiles = event.originalEvent.dataTransfer.files; - if (myfiles.length >= 1) { - handleFiles(myfiles, fileLister, searchForm); - } - }); - }); - }, - /** - * Call on a jQuery-selected input to make it sensitive to - * the reception of new files, and have it process received - * files. - * Args: - * fileLister: the element keeping track of the files - * searchForm: the form whose submission will POST the file - * information - */ - filedialog: function(fileLister, searchForm) { - return this.each(function() { - var elem = $(this); - elem.on('change', function(){ - handleFiles(this.files, fileLister, searchForm); - }); - }); - }, - /** - * Call on a jQuery-selected element to delegate its click - * event to the given input instead. - * Args: - * input: the element to be clicked when the caller is clicked. - */ - inputclick: function(input) { - return this.each(function() { - $(this).click(function(event) { - event.preventDefault(); - input.click(); - }); - }); - }, - /** - * Call on a form to intercept its submission event and - * check the validity of the text input if present before submitting - * the form. - * Args: - * textInput: the input to validate - * messageElement: the element where the warning will be written - */ - checkSubmission: function(textInput, messageElement) { - var CHECKSUM_RE = /^([0-9a-f]{40}|[0-9a-f]{64})$/i; - $(this).submit(function(event) { - event.preventDefault(); - var q = textInput.val(); - if (q && !q.match(CHECKSUM_RE)) { - messageElement.empty(); - messageElement.html('Please enter a valid SHA-1'); - } else { - searchForm.submit(); - } - }); - } -}); - - -var nameList = []; /** Avoid adding the same file twice **/ - -/** - * Start reading the supplied files to hash them and add them to the form, - * and add their names to the file lister pre-search. - * Args: - * myfiles: the file array - * fileLister: the element that will receive the file names - * searchForm: the form to which we add hidden inputs with the - * correct values - */ -function handleFiles(myfiles, fileLister, searchForm) { - for (var i = 0; i < myfiles.length; i++) { - var file = myfiles.item(i); - if (nameList.indexOf(file.name) == -1) { - nameList.push(file.name); - var fr = new FileReader(); - fileLister.append(make_row(file.name)); - bind_reader(fr, file.name, searchForm); - fr.readAsArrayBuffer(file); - } - } -}; - -/** - * Bind a given FileReader to hash the file contents when the file - * has been read - * Args: - * filereader: the FileReader object - * filename: the name of the file being read by the FileReader - * searchForm: the form the corresponding hidden input will be - * appended to - */ -function bind_reader(filereader, filename, searchForm) { - filereader.onloadend = function(evt) { - if (evt.target.readyState == FileReader.DONE){ - return fileReadDone(evt.target.result, filename, searchForm); - } - }; -} - -function make_row(name) { - return "
"+name+"
"; -} - -/** - * Hash the buffer contents with CryptoJS's SHA1 implementation, and - * append the result to the given form for submission. - * Args: - * buffer: the buffer to be hashed - * fname: the file name corresponding to the buffer - * searchForm: the form the inputs should be appended to - */ -function fileReadDone(buffer, fname, searchForm) { - var wordArray = CryptoJS.lib.WordArray.create(buffer); - var sha1 = CryptoJS.SHA1(wordArray); - /** - var git_hd = "blob " + wordArray.length + "\0"; - var git_Array = CryptoJS.enc.utf8.parse(git_hd).concat(wordArray); - var sha256 = CryptoJS.SHA256(wordArray); - var sha1_git = CryptoJS.SHA1(wordArray); - **/ - searchForm.append($("", {type: "hidden", - name: fname, - value: sha1} - )); -} diff --git a/swh/web/ui/static/js/search.js b/swh/web/ui/static/js/search.js new file mode 100644 index 00000000..f551ed6c --- /dev/null +++ b/swh/web/ui/static/js/search.js @@ -0,0 +1,242 @@ +/** + * Search page management + * Args: + * textForm: the form containing the text input, if any + * fileForm: the form containing the file input, if any + * messageElem: the element that should display search messages + */ +var SearchFormController = function(textForm, fileForm, messageElem) + +{ + this.textForm = textForm; + this.fileForm = fileForm; + this.messageElem = messageElem; + + // List of hashes to check against files being processed + this.hashed_already = { + 'sha1': {}, + 'sha256': {}, + 'sha1_git': {} + }; + this.algos = ['sha1', 'sha256', 'sha1_git']; + this.CHECKSUM_RE = /^([0-9a-f]{40}|[0-9a-f]{64})$/i; + var self = this; + + /** + * Show search messages on the page + * Args: + * msg: the message to show + */ + this.searchMessage = function(msg) { + self.messageElem.empty(); + self.messageElem.text(msg); + }; + + /** + * Setup the text field + * Args: + * textFormInput: the text form's input + */ + this.setupTextForm = function(textFormInput) { + self.textForm.submit(function(event) { + var q = textFormInput.val(); + if (!q) { + event.preventDefault(); + self.searchMessage("Please enter a SHA-1 or SHA-256 checksum."); + } + else if (q && !q.match(self.CHECKSUM_RE)) { + event.preventDefault(); + self.searchMessage("Invalid SHA-1 or SHA-256 checksum"); + } + }); + }; + + /** + * Setup the file drag&drop UI and hashing support. + * Args: + * fileDropElem: the element receptive to drag & drop + * hashedListerElem: the element that receives the hased file descriptions + * fileFormInput: the input that actually receives files + * clearButton: the button used to clear currently hashed files + */ + this.setupFileForm = function(fileDropElem, hashedListerElem, fileFormInput, clearButton) { + if (!FileReader || !CryptoJS) { + self.searchMessage("Client-side file hashing is not available for your browser."); + return; + } + + // Enable clicking on the text element for file picker + fileDropElem.click(function(event) { + event.preventDefault(); + fileFormInput.click(); + }); + + // Enable drag&drop + var makeDroppable = function(fileReceptionElt) { + var fileshovering = false; + + fileReceptionElt.on('dragover', function(event) { + event.stopPropagation(); + event.preventDefault(); + }); + + fileReceptionElt.on('dragenter', function(event) { + event.stopPropagation(); + event.preventDefault(); + if (!fileshovering) { + fileReceptionElt.css("border-style", "solid"); + fileReceptionElt.css("box-shadow", "inset 0 3px 4px"); + fileshovering = true; + } + }); + + fileReceptionElt.on('dragover', function(event) { + event.stopPropagation(); + event.preventDefault(); + if (!fileshovering) { + fileReceptionElt.css("border-style", "solid"); + fileReceptionElt.css("box-shadow", "inset 0 3px 4px"); + fileshovering = true; + } + }); + + fileReceptionElt.on('dragleave', function(event) { + event.stopPropagation(); + event.preventDefault(); + if (fileshovering) { + fileReceptionElt.css("border-style", "dashed"); + fileReceptionElt.css("box-shadow", "none"); + fileshovering = false; + } + }); + + fileReceptionElt.on('drop', function(event) { + event.stopPropagation(); + event.preventDefault(); + if (fileshovering) { + fileReceptionElt.css("border-style", "dashed"); + fileReceptionElt.css("box-shadow", "none"); + fileshovering = false; + } + var myfiles = event.originalEvent.dataTransfer.files; + readAndHash(myfiles); + }); + }; + makeDroppable(fileDropElem); + + // Connect input change and rehash + var makeInputChange = function(fileInput) { + return fileInput.each(function() { + $(this).on('change', function(){ + readAndHash(this.files); + }); + }); + }; + makeInputChange(fileFormInput); + + // Connect clear button + var makeClearButton = function(button) { + return button.each(function() { + $(this).click(function(event) { + event.preventDefault(); + hashedListerElem.empty(); + self.fileForm.children('.search-hidden').remove(); + self.hashed_already = { + 'sha1': {}, + 'sha256': {}, + 'sha1_git': {} + }; + }); + }); + }; + makeClearButton(clearButton); + + var readAndHash = function(filelist) { + for (var file_idx = 0; file_idx < filelist.length; file_idx++) { + var file = filelist.item(file_idx); + var fr = new FileReader(); + bindReader(fr, file.name); + fr.readAsArrayBuffer(file); + } + }; + + var bindReader = function(freader, fname) { + freader.onloadend = function(event) { + if (event.target.readyState == FileReader.DONE) + return dedupAndAdd(event.target.result, fname); + else + return null; + }; + }; + + /** + * Hash the buffer with SHA-1, SHA-1_GIT, SHA-256 + * Args: + * buffer: the buffer to hash + * fname: the file name corresponding to the buffer + * Returns: + * a dict of algo_hash: hash + */ + var hashBuffer = function (buffer, fname) { + function str2ab(header) { + var buf = new ArrayBuffer(header.length); + var view = new Uint8Array(buf); // byte view, all we need is ASCII + for (var idx = 0, len=header.length; idx < len; idx++) + view[idx] = header.charCodeAt(idx); + return buf; + } + + var content_array = CryptoJS.lib.WordArray.create(buffer); + var git_hd_str = 'blob ' + buffer.byteLength + '\0'; + var git_hd_buffer = str2ab(git_hd_str); + var git_hd_array = CryptoJS.lib.WordArray.create(git_hd_buffer); + + var sha1 = CryptoJS.SHA1(content_array); + var sha256 = CryptoJS.SHA256(content_array); + var sha1_git = CryptoJS.SHA1(git_hd_array.concat(content_array)); + return { + 'sha1': sha1 + '', + 'sha256': sha256 + '', + 'sha1_git': sha1_git + '' + }; + }; + + /** + * Hash the buffer and add it to the form if it is unique + * If not, display which file has the same content + * Args: + * buffer: the buffer to hash + * fname: the file name corresponding to the buffer + */ + var dedupAndAdd = function(buffer, fname) { + var hashes = hashBuffer(buffer); + var has_duplicate = false; + for (var algo_s in hashes) { + if (self.hashed_already[algo_s][hashes[algo_s]] != undefined) { + // Duplicate content -- fileLister addition only, as duplicate + hashedListerElem.append($('
') + .addClass('span3') + .text(fname + ': duplicate of ' + self.hashed_already[algo_s][hashes[algo_s]])); + has_duplicate = true; + break; + } + } + // First file read with this content -- fileLister and form addition + if (!has_duplicate) { + // Add to hashed list + for (var algo_c in self.hashed_already) + self.hashed_already[algo_c][hashes[algo_c]] = fname; + hashedListerElem.append($('
') + .addClass('span3') + .text(fname)); + var hashstring = JSON.stringify(hashes).replace('\"', '\''); + self.fileForm.append($("", {type: 'hidden', + class: 'search-hidden', + name: fname, + value: hashes['sha1']}// hashstring} + )); + + } + }; + }; +}; diff --git a/swh/web/ui/templates/api.html b/swh/web/ui/templates/api.html index 629e6a74..08b5b76b 100644 --- a/swh/web/ui/templates/api.html +++ b/swh/web/ui/templates/api.html @@ -1,194 +1,13 @@ - - - - {% block head %} - - {% block meta %} - - - {% endblock %} - - {% block title %}Software Heritage API{% endblock %} - - {% block style %} - {% block bootstrap_theme %} - - - {% endblock %} - - - {% endblock %} - - {% endblock %} - - - - -
- - {% block navbar %} - - {% endblock %} - - -
- - -
- - {% if 'GET' in allowed_methods %} -
-
-
- GET - - - -
- -
-
- {% endif %} - - - - {% if 'DELETE' in allowed_methods %} -
- - - -
- {% endif %} - -
- - {% if view_description %} -
- {{ view_description | safe_docstring_display | safe}} -
- {% endif %} -
-
{{ request.method }} {{ request.full_path }}
-
-
-
HTTP {{ status }}{% autoescape off %} -{% for key, val in headers.items() %}{{ key }}: {{ val|e }} -{% endfor %} -
{% if content %}{{ content|urlize_api_links }}{% endif %}
{% endautoescape %} -
-
- - - {% if 'POST' in allowed_methods or 'PUT' in allowed_methods or 'PATCH' in allowed_methods %} -
-
-
-
-
-
- -
- - -
-
-
- -
- -
-
-
- {% if 'POST' in allowed_methods %} - - {% endif %} - {% if 'PUT' in allowed_methods %} - - {% endif %} - {% if 'PATCH' in allowed_methods %} - - {% endif %} -
-
-
-
-
-
- {% endif %} - -
- - -
- - -
- -
- -
- - {% block footer %} - {% endblock %} - - {% block script %} - - - - - {% endblock %} - - +{% extends "layout.html" %} +{% block title %}Software Heritage API Overview{% endblock %} +{% block content %} +
+ {% for route, doc in doc_routes %} +
+

{{ route }}

+ {% autoescape off %}{{ doc | safe_docstring_display }}{% endautoescape %} +
+
+ {% endfor %} +
+{% endblock %} diff --git a/swh/web/ui/templates/apidoc.html b/swh/web/ui/templates/apidoc.html new file mode 100644 index 00000000..1b947dfe --- /dev/null +++ b/swh/web/ui/templates/apidoc.html @@ -0,0 +1,84 @@ +{% extends "layout.html" %} +{% block title %}Software Heritage API{% endblock %} +{% block content %} + +{% if docstring %} +
+

Overview

+ {% autoescape off %} {{ docstring | safe_docstring_display }} {% endautoescape %} +
+{% endif %} +{% if response_data and response_data is not none %} +
+

Request

+
{{ request.method }} {{ request.url }}
+

Result

+
{% autoescape off %}{{ response_data | urlize_api_links }}{% endautoescape %}
+
+{% endif %} +
+
+ + + + + + + + + {% for url in urls %} + + + + + {% endfor %} + +
URLAllowed Methods
+ {{ url['rule'] }} + + {{ url['methods'] | sort | join(', ') }} +
+
+
+{% if args and args|length > 0 %} +
+

Args

+
+ {% for arg in args %} +
{{ arg['name'] }}: {{ arg['type'] }}
+
{% autoescape off %} {{ arg['doc'] | safe_docstring_display }} {% endautoescape %}
+ {% endfor %} +
+
+{% endif %} +{% if excs and excs|length > 0 %} +
+

Raises

+
+ {% for exc in excs %} +
{{ exc['exc'] }}
+
{% autoescape off %} {{ exc['doc'] | safe_docstring_display }} {% endautoescape %}
+ {% endfor %} +
+
+{% endif %} +{% if return %} +
+

Returns

+
+
{{ return['type'] }}
+
{% autoescape off %} {{ return['doc'] | safe_docstring_display }} {% endautoescape %}
+
+
+{% endif %} +{% if example %} +
+

Example

+
+
+ {{ example }} +
+
+
+{% endif %} +{% endblock %} diff --git a/swh/web/ui/templates/origin.html b/swh/web/ui/templates/origin.html index 963b0377..8d7d8006 100644 --- a/swh/web/ui/templates/origin.html +++ b/swh/web/ui/templates/origin.html @@ -1,40 +1,40 @@ {% extends "layout.html" %} {% block title %}Origin{% endblock %} {% block content %} {% if message is not none %} {{ message }} {% endif %} {% if origin is not none %} -
Details on origin {{ origin_id }}: +
Details on origin {{ origin['id'] }}:
- {% for key in ['type', 'lister', 'projet', 'url'] %} + {% for key in ['type', 'lister', 'project', 'url'] %} {% if origin[key] is not none %}
{{ key }}
{{ origin[key] }}
{% endif %} {% endfor %} {% if 'decoding_failures' in content %}
(some decoding errors)
{% endif %}
- + {% endif %} {% endblock %} diff --git a/swh/web/ui/templates/revision-log.html b/swh/web/ui/templates/revision-log.html index 26e4462a..87751e46 100644 --- a/swh/web/ui/templates/revision-log.html +++ b/swh/web/ui/templates/revision-log.html @@ -1,135 +1,156 @@ {% extends "layout.html" %} {% block title %}Revision Log{% endblock %} {% block content %} {% if message is not none %} +
{{ message }} +
{% endif %} -
-

Queried revision:

- {% if sha1_git is not none %} -
Revision with git SHA1 {{ sha1_git }}
- {% else %} - -
Branch name {{ branch_name }}
- {% if timestamp is not none %} -
Time stamp {{ timestamp }}
- {% endif %} -
-
- {% endif %} -
+
+

Queried revision:

+ {% if sha1_git is not none %} +
Revision with git SHA1 {{ sha1_git }}
+ {% else %} + +
Branch name {{ branch_name }}
+ {% if timestamp is not none %} +
Time stamp {{ timestamp }}
+ {% endif %} + {% endif %} +
{% if revisions is not none %} +
{% for revision in revisions %} - -
- {% if revision['url'] is not none %} -
-
Revision
- + {% if revision['merge'] %} +
+
+ Merge +
+
+ {% for url in revision['parent_urls'] %} + {{ url | revision_id_from_url }} + {% endfor %}
- {% endif %} +
+ {% endif %} + + {% if revision['url'] is not none %} +
+
Revision
+ +
+ {% endif %} - {% if revision['history_url'] is not none %} -
-
Revision Log
- + {% if revision['history_url'] is not none %} +
+
Revision Log
+ +
+ {% endif %} + + {% if revision['history_context_url'] is not none %} +
+
Contextual Revision Log
+ +
+ {% endif %} + + {% if revision['directory_url'] is not none %} + + {% endif %} + + {% if revision['author'] is not none %} +
+
Author
+
+

+ {{ revision['author']['name'] }} + {% if 'decoding_failures' in revision['author'] %}(some decoding errors){% endif %} +

- {% endif %} +
+
+
Date
+

{{ revision['date'] }}

+
+ {% endif %} - {% if revision['history_context_url'] is not none %} -
-
Contextual Revision Log
- + {% if revision['committer'] is not none %} +
+
Committer
+
+

+ {{ revision['committer']['name'] }} + {% if 'decoding_failures' in revision['committer'] %}(some decoding errors){% endif %} +

- {% endif %} - - {% if revision['directory_url'] is not none %} - - {% endif %} +
+
+
Committer Date
+

{{ revision['committer_date'] }}

+
+ {% endif %} - {% if revision['author'] is not none %} -
-
Author
-
-

- {{ revision['author']['name'] }} - {% if 'decoding_failures' in revision['author'] %}(some decoding errors){% endif %} -

-
-
-
-
Date
-

{{ revision['date'] }}

-
- {% endif %} + {% if revision['message'] is not none %} +
+
Message
+
{{ revision['message'] }}
+
+ {% elif revision['message_encoding_failed'] %} +
+
Message
+ +
+ {% else %} +
+
Message
+
No message found.
+
+ {% endif %} - {% if revision['committer'] is not none %} -
-
Committer
-
-

- {{ revision['committer']['name'] }} - {% if 'decoding_failures' in revision['committer'] %}(some decoding errors){% endif %} -

-
-
-
-
Committer Date
-

{{ revision['committer_date'] }}

-
- {% endif %} + {% for key in revision.keys() %} + {% if key in ['type', 'synthetic'] and key not in ['decoding_failures'] and revision[key] is not none %} +
+
{{ key }}
+
{{ revision[key] }}
+
+ {% endif %} + {% endfor %} - {% if revision['message'] is not none %} -
-
Message
-
{{ revision['message'] }}
-
- {% elif revision['message_encoding_failed'] %} -
-
Message
- -
-
Message
-
No message found.
-
- {% endif %} - - {% for key in revision.keys() %} - {% if key in ['type', 'synthetic'] and key not in ['decoding_failures'] and revision[key] is not none %} -
-
{{ key }}
-

{{ revision[key] }}

-
- {% endif %} + {% for key in ['children_urls', 'parent_urls'] %} + {% if revision[key] is not none %} +
+
{{ key }}
+ {% for link in revision[key] %} + {% endfor %} - {% for key in ['parent_urls', 'children_urls'] %} - {% if revision[key] is not none %} -
-
{{ key }}
- {% for link in revision[key] %} - - {% endfor %} -
- {% endif %} - {% endfor %} - {% if 'decoding_failures' in revision %} -
-
(some decoding errors occurred)
-
- {% endif %} -
+ {% endif %} + {% endfor %} + + {% if 'decoding_failures' in revision %} +
+
(some decoding errors occurred)
+
+ {% endif %}
{% endfor %} - -{% endif %} - + + {% if next_revs_url is not none %} + + + Next revisions + + + + {% endif %} + {% endif %} +
+
{% endblock %} diff --git a/swh/web/ui/templates/search.html b/swh/web/ui/templates/search.html index f5be3acd..3cce4e80 100644 --- a/swh/web/ui/templates/search.html +++ b/swh/web/ui/templates/search.html @@ -1,104 +1,117 @@ {% extends "layout.html" %} {% block title %}Search SWH{% endblock %} {% block content %} - - + +
-
+

Search with SHA-1 or SHA-256:

+ +
+
+ + + + +
+
+ + +

Search with files

+
-
- -
-
- -
-
-
- -
- Drag and drop or click here to hash files and search for them. - Your files will NOT be uploaded, hashing is done locally. - Filesizes over 20Mb may be slow to process, use with care. -
-
-
+ + + +
+ Drag and drop or click here to hash files and search for them. + Your files will NOT be uploaded, hashing is done locally. + Filesizes over 20Mb may be slow to process, use with care. +
-
- +
- -
-
+ +
+ + + + {% if search_res is not none %} + {% if search_stats is not none %} -
{% endif %} - -
-
{% endblock %} diff --git a/swh/web/ui/tests/test_apidoc.py b/swh/web/ui/tests/test_apidoc.py new file mode 100644 index 00000000..b1c0d98c --- /dev/null +++ b/swh/web/ui/tests/test_apidoc.py @@ -0,0 +1,299 @@ +# Copyright (C) 2015 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +from unittest.mock import MagicMock, patch +from nose.tools import istest + +from swh.web.ui import apidoc +from swh.web.ui.tests import test_app + + +class APIDocTestCase(test_app.SWHApidocTestCase): + + def setUp(self): + self.arg_dict = { + 'name': 'my_pretty_arg', + 'default': 'some default value', + 'type': apidoc.argtypes.sha1, + 'doc': 'this arg does things' + } + self.stub_excs = [{'exc': apidoc.excs.badinput, + 'doc': 'My exception documentation'}] + self.stub_args = [{'name': 'stub_arg', + 'default': 'some_default'}] + self.stub_rule_list = [ + {'rule': 'some/route/with/args/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}, + {'rule': 'some/doc/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}, + {'rule': 'some/other/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}} + ] + self.stub_return = { + 'type': apidoc.rettypes.dict.value, + 'doc': 'a dict with amazing properties' + } + + @patch('swh.web.ui.apidoc.APIUrls') + @patch('swh.web.ui.apidoc.app') + @istest + def apidoc_route(self, mock_app, mock_api_urls): + # given + decorator = apidoc.route('/some/url/for/doc/') + mock_fun = MagicMock(return_value=123) + mock_fun.__doc__ = 'Some documentation' + mock_fun.__name__ = 'some_fname' + decorated = decorator.__call__(mock_fun) + + # when + decorated('some', 'value', kws='and a kw') + + # then + mock_fun.assert_called_once_with( + call_args=(('some', 'value'), {'kws': 'and a kw'}), + doc_route='/some/url/for/doc/', + noargs=False + ) + mock_api_urls.index_add_route.assert_called_once_with( + '/some/url/for/doc/', + 'Some documentation') + mock_app.add_url_rule.assert_called_once_with( + '/some/url/for/doc/', 'some_fname', decorated) + + @istest + def apidoc_arg_noprevious(self): + # given + decorator = apidoc.arg('my_pretty_arg', + default='some default value', + argtype=apidoc.argtypes.sha1, + argdoc='this arg does things') + mock_fun = MagicMock(return_value=123) + decorated = decorator.__call__(mock_fun) + self.arg_dict['type'] = self.arg_dict['type'].value + + # when + decorated(call_args=((), {}), doc_route='some/route/') + + # then + mock_fun.assert_called_once_with( + call_args=((), {}), + doc_route='some/route/', + args=[self.arg_dict] + ) + + @istest + def apidoc_arg_previous(self): + # given + decorator = apidoc.arg('my_other_arg', + default='some other value', + argtype=apidoc.argtypes.sha1, + argdoc='this arg is optional') + mock_fun = MagicMock(return_value=123) + decorated = decorator.__call__(mock_fun) + + # when + decorated(call_args=((), {}), + doc_route='some/route/', + args=[self.arg_dict]) + + # then + mock_fun.assert_called_once_with( + call_args=((), {}), + doc_route='some/route/', + args=[self.arg_dict, + {'name': 'my_other_arg', + 'default': 'some other value', + 'type': apidoc.argtypes.sha1.value, + 'doc': 'this arg is optional'}]) + + @istest + def apidoc_raises_noprevious(self): + # given + decorator = apidoc.raises(exc=apidoc.excs.badinput, + doc='My exception documentation') + mock_fun = MagicMock(return_value=123) + decorated = decorator.__call__(mock_fun) + self.stub_excs[0]['exc'] = self.stub_excs[0]['exc'].value + + # when + decorated(call_args=((), {}), doc_route='some/route/') + + # then + mock_fun.assert_called_once_with( + call_args=((), {}), + doc_route='some/route/', + excs=self.stub_excs + ) + + @istest + def apidoc_raises_previous(self): + # given + decorator = apidoc.raises(exc=apidoc.excs.notfound, + doc='Another documentation') + mock_fun = MagicMock(return_value=123) + decorated = decorator.__call__(mock_fun) + expected_excs = self.stub_excs + [{ + 'exc': apidoc.excs.notfound.value, + 'doc': 'Another documentation'}] + expected_excs[0]['exc'] = expected_excs[0]['exc'].value + + # when + decorated(call_args=((), {}), + doc_route='some/route/', + excs=self.stub_excs) + + # then + mock_fun.assert_called_once_with( + call_args=((), {}), + doc_route='some/route/', + excs=expected_excs) + + @patch('swh.web.ui.apidoc.render_template') + @patch('swh.web.ui.apidoc.url_for') + @patch('swh.web.ui.apidoc.APIUrls') + @patch('swh.web.ui.apidoc.request') + @istest + def apidoc_returns_doc_call(self, + mock_request, + mock_api_urls, + mock_url_for, + mock_render): + # given + decorator = apidoc.returns(rettype=apidoc.rettypes.dict, + retdoc='a dict with amazing properties') + mock_fun = MagicMock(return_value=123) + mock_fun.__name__ = 'some_fname' + mock_fun.__doc__ = 'Some documentation' + decorated = decorator.__call__(mock_fun) + + mock_api_urls.get_method_endpoints.return_value = self.stub_rule_list + + mock_request.url = 'http://my-domain.tld/some/doc/route/' + mock_url_for.return_value = 'http://my-domain.tld/meaningful_route/' + + expected_env = { + 'urls': [{'rule': 'some/route/with/args/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}, + {'rule': 'some/other/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}], + 'docstring': 'Some documentation', + 'args': self.stub_args, + 'excs': self.stub_excs, + 'route': 'some/doc/route/', + 'example': 'http://my-domain.tld/meaningful_route/', + 'return': self.stub_return + } + + # when + decorated( + docstring='Some documentation', + call_args=(('some', 'args'), {'kw': 'kwargs'}), + args=self.stub_args, + excs=self.stub_excs, + doc_route='some/doc/route/', + noargs=False + ) + + # then + self.assertEqual(mock_fun.call_args_list, []) # function not called + mock_render.assert_called_once_with( + 'apidoc.html', + **expected_env + ) + + @patch('swh.web.ui.apidoc.g') + @patch('swh.web.ui.apidoc.url_for') + @patch('swh.web.ui.apidoc.APIUrls') + @patch('swh.web.ui.apidoc.request') + @istest + def apidoc_returns_noargs(self, + mock_request, + mock_api_urls, + mock_url_for, + mock_g): + + # given + decorator = apidoc.returns(rettype=apidoc.rettypes.dict, + retdoc='a dict with amazing properties') + mock_fun = MagicMock(return_value=123) + mock_fun.__name__ = 'some_fname' + mock_fun.__doc__ = 'Some documentation' + decorated = decorator.__call__(mock_fun) + + mock_api_urls.get_method_endpoints.return_value = [ + {'rule': 'some/doc/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}] + mock_request.url = 'http://my-domain.tld/some/doc/route/' + doc_dict = { + 'urls': [ + {'rule': 'some/doc/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}], + 'docstring': 'Some documentation', + 'route': 'some/doc/route/', + 'return': {'type': apidoc.rettypes.dict.value, + 'doc': 'a dict with amazing properties'} + } + + # when + decorated( + call_args=((), {}), + doc_route='some/doc/route/', + noargs=True + ) + + # then + mock_fun.assert_called_once_with() + self.assertEqual(mock_g.doc_env, doc_dict) + + @patch('swh.web.ui.apidoc.g') + @patch('swh.web.ui.apidoc.url_for') + @patch('swh.web.ui.apidoc.APIUrls') + @patch('swh.web.ui.apidoc.request') + @istest + def apidoc_return_endpoint_call(self, + mock_request, + mock_api_urls, + mock_url_for, + mock_g): + # given + decorator = apidoc.returns(rettype=apidoc.rettypes.dict, + retdoc='a dict with amazing properties') + mock_fun = MagicMock(return_value=123) + mock_fun.__name__ = 'some_fname' + mock_fun.__doc__ = 'Some documentation' + decorated = decorator.__call__(mock_fun) + + mock_api_urls.get_method_endpoints.return_value = self.stub_rule_list + + mock_request.url = 'http://my-domain.tld/some/arg/route/' + mock_url_for.return_value = 'http://my-domain.tld/some/arg/route' + + doc_dict = { + 'urls': [{'rule': 'some/route/with/args/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}, + {'rule': 'some/other/route/', + 'methods': {'GET', 'HEAD', 'OPTIONS'}}], + 'docstring': 'Some documentation', + 'args': self.stub_args, + 'excs': self.stub_excs, + 'route': 'some/doc/route/', + 'example': 'http://my-domain.tld/some/arg/route', + 'return': self.stub_return + } + + # when + decorated( + docstring='Some documentation', + call_args=(('some', 'args'), {'kw': 'kwargs'}), + args=self.stub_args, + excs=self.stub_excs, + noargs=False, + doc_route='some/doc/route/', + ) + + # then + mock_fun.assert_called_once_with('some', 'args', kw='kwargs') + self.assertEqual(mock_g.doc_env, doc_dict) diff --git a/swh/web/ui/tests/test_app.py b/swh/web/ui/tests/test_app.py index f5938ba0..4a218b79 100644 --- a/swh/web/ui/tests/test_app.py +++ b/swh/web/ui/tests/test_app.py @@ -1,89 +1,95 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information # Functions defined here are NOT DESIGNED FOR PRODUCTION import unittest from swh.storage.api.client import RemoteStorage as Storage -from swh.web.ui import renderers, main +from swh.web.ui import main from flask.ext.testing import TestCase # Because the Storage's __init__ function does side effect at startup... class RemoteStorageAdapter(Storage): def __init__(self, base_url): self.base_url = base_url def _init_mock_storage(base_url='https://somewhere.org:4321'): """Instanciate a remote storage whose goal is to be mocked in a test context. NOT FOR PRODUCTION Returns: An instance of swh.storage.api.client.RemoteStorage destined to be mocked (it does not do any rest call) """ return RemoteStorageAdapter(base_url) # destined to be used as mock def create_app(base_url='https://somewhere.org:4321'): """Function to initiate a flask app with storage designed to be mocked. Returns: Tuple: - app test client (for testing api, client decorator from flask) - application's full configuration - the storage instance to stub and mock - the main app without any decoration NOT FOR PRODUCTION """ storage = _init_mock_storage(base_url) # inject the mock data conf = {'storage': storage, - 'upload_folder': '/some/upload-dir', - 'upload_allowed_extensions': ['txt'], - 'max_upload_size': 1024} + 'max_log_revs': 25} main.app.config.update({'conf': conf}) - main.app.config['MAX_CONTENT_LENGTH'] = conf['max_upload_size'] - main.app.config['DEFAULT_RENDERERS'] = renderers.RENDERERS if not main.app.config['TESTING']: # HACK: install controllers only once! main.app.config['TESTING'] = True main.load_controllers() return main.app.test_client(), main.app.config, storage, main.app +class SWHApidocTestCase(unittest.TestCase): + """Testing APIDoc class. + + """ + @classmethod + def setUpClass(cls): + cls.app, cls.app_config, cls.storage, _ = create_app() + cls.maxDiff = None + + class SWHApiTestCase(unittest.TestCase): """Testing API class. """ @classmethod def setUpClass(cls): cls.app, cls.app_config, cls.storage, _ = create_app() cls.maxDiff = None class SWHViewTestCase(TestCase): """Testing view class. cf. http://pythonhosted.org/Flask-Testing/ """ # This inhibits template rendering # render_templates = False def create_app(self): """Initialize a Flask-Testing application instance to test view without template rendering """ _, _, _, appToDecorate = create_app() return appToDecorate diff --git a/swh/web/ui/tests/test_backend.py b/swh/web/ui/tests/test_backend.py index ed01c08e..202eb4fc 100644 --- a/swh/web/ui/tests/test_backend.py +++ b/swh/web/ui/tests/test_backend.py @@ -1,697 +1,724 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock from swh.core import hashutil from swh.web.ui import backend from swh.web.ui.tests import test_app class BackendTestCase(test_app.SWHApiTestCase): @istest def content_get_ko_not_found_1(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f777') self.storage.content_get = MagicMock(return_value=None) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_get_ko_not_found_empty_result(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_get = MagicMock(return_value=[]) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_get(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') stub_contents = [{ 'sha1': sha1_bin, 'data': b'binary data', }, {}] self.storage.content_get = MagicMock(return_value=stub_contents) # when actual_content = backend.content_get(sha1_bin) # then self.assertEquals(actual_content, stub_contents[0]) self.storage.content_get.assert_called_once_with( [sha1_bin]) @istest def content_find_ko_no_result(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find = MagicMock(return_value=None) # when actual_lookup = backend.content_find('sha1_git', sha1_bin) # then self.assertIsNone(actual_lookup) self.storage.content_find.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_find(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find = MagicMock(return_value=(1, 2, 3)) # when actual_content = backend.content_find('sha1', sha1_bin) # then self.assertEquals(actual_content, (1, 2, 3)) # check the function has been called with parameters self.storage.content_find.assert_called_with({'sha1': sha1_bin}) @istest def content_find_occurrence_ko_no_result(self): # given sha1_bin = hashutil.hex_to_hash( '123caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find_occurrence = MagicMock(return_value=None) # when actual_lookup = backend.content_find_occurrence('sha1_git', sha1_bin) # then self.assertIsNone(actual_lookup) self.storage.content_find_occurrence.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_find_occurrence(self): # given sha1_bin = hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f') self.storage.content_find_occurrence = MagicMock( return_value=(1, 2, 3)) # when actual_content = backend.content_find_occurrence('sha1', sha1_bin) # then self.assertEquals(actual_content, (1, 2, 3)) # check the function has been called with parameters self.storage.content_find_occurrence.assert_called_with( {'sha1': sha1_bin}) @istest def content_missing_per_sha1_none(self): # given sha1s_bin = [hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] self.storage.content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, []) self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) @istest def content_missing_per_sha1_some(self): # given sha1s_bin = [hashutil.hex_to_hash( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] self.storage.content_missing_per_sha1 = MagicMock(return_value=[ hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, [hashutil.hex_to_hash( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) @istest - def origin_get(self): + def origin_get_by_id(self): # given self.storage.origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when - actual_origin = backend.origin_get('origin-id') + actual_origin = backend.origin_get({'id': 'origin-id'}) # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) self.storage.origin_get.assert_called_with({'id': 'origin-id'}) + @istest + def origin_get_by_type_url(self): + # given + self.storage.origin_get = MagicMock(return_value={ + 'id': 'origin-id', + 'lister': 'uuid-lister', + 'project': 'uuid-project', + 'url': 'ftp://some/url/to/origin', + 'type': 'ftp'}) + + # when + actual_origin = backend.origin_get({'type': 'ftp', + 'url': 'ftp://some/url/to/origin'}) + + # then + self.assertEqual(actual_origin, {'id': 'origin-id', + 'lister': 'uuid-lister', + 'project': 'uuid-project', + 'url': 'ftp://some/url/to/origin', + 'type': 'ftp'}) + + self.storage.origin_get.assert_called_with( + {'type': 'ftp', + 'url': 'ftp://some/url/to/origin'}) + @istest def person_get(self): # given self.storage.person_get = MagicMock(return_value=[{ 'id': 'person-id', 'name': 'blah'}]) # when actual_person = backend.person_get('person-id') # then self.assertEqual(actual_person, {'id': 'person-id', 'name': 'blah'}) self.storage.person_get.assert_called_with(['person-id']) @istest def directory_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') self.storage.directory_get = MagicMock(return_value=None) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, None) self.storage.directory_get.assert_called_with([sha1_bin]) @istest def directory_get(self): # given sha1_bin = hashutil.hex_to_hash( '51f71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') sha1_bin2 = hashutil.hex_to_hash( '62071b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir = {'id': sha1_bin, 'revision': b'sha1-blah'} stub_dir2 = {'id': sha1_bin2, 'revision': b'sha1-foobar'} self.storage.directory_get = MagicMock(return_value=[stub_dir, stub_dir2]) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, stub_dir) self.storage.directory_get.assert_called_with([sha1_bin]) @istest def directory_ls_empty_result(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') self.storage.directory_ls = MagicMock(return_value=[]) # when actual_directory = backend.directory_ls(sha1_bin) # then self.assertEquals(actual_directory, []) self.storage.directory_ls.assert_called_with(sha1_bin, False) @istest def directory_ls(self): # given sha1_bin = hashutil.hex_to_hash( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir_entries = [{ 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'target': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'dir_id': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'name': b'bob', 'type': 10, }] self.storage.directory_ls = MagicMock( return_value=stub_dir_entries) actual_directory = backend.directory_ls(sha1_bin, recursive=True) # then self.assertIsNotNone(actual_directory) self.assertEqual(list(actual_directory), stub_dir_entries) self.storage.directory_ls.assert_called_with(sha1_bin, True) @istest def release_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') self.storage.release_get = MagicMock(return_value=[]) # when actual_release = backend.release_get(sha1_bin) # then self.assertIsNone(actual_release) self.storage.release_get.assert_called_with([sha1_bin]) @istest def release_get(self): # given sha1_bin = hashutil.hex_to_hash( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') stub_releases = [{ 'id': sha1_bin, 'target': None, 'date': datetime.datetime(2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }] self.storage.release_get = MagicMock(return_value=stub_releases) # when actual_release = backend.release_get(sha1_bin) # then self.assertEqual(actual_release, stub_releases[0]) self.storage.release_get.assert_called_with([sha1_bin]) @istest def revision_get_by_not_found(self): # given self.storage.revision_get_by = MagicMock(return_value=[]) # when actual_revision = backend.revision_get_by(10, 'master', 'ts2') # then self.assertIsNone(actual_revision) self.storage.revision_get_by.assert_called_with(10, 'master', timestamp='ts2', limit=1) @istest def revision_get_by(self): # given self.storage.revision_get_by = MagicMock(return_value=[{'id': 1}]) # when actual_revisions = backend.revision_get_by(100, 'dev', 'ts') # then self.assertEquals(actual_revisions, {'id': 1}) self.storage.revision_get_by.assert_called_with(100, 'dev', timestamp='ts', limit=1) @istest def revision_get_not_found(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') self.storage.revision_get = MagicMock(return_value=[]) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertIsNone(actual_revision) self.storage.revision_get.assert_called_with([sha1_bin]) @istest def revision_get(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') stub_revisions = [{ 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] self.storage.revision_get = MagicMock(return_value=stub_revisions) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertEqual(actual_revision, stub_revisions[0]) self.storage.revision_get.assert_called_with([sha1_bin]) @istest def revision_get_multiple(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') sha1_other = hashutil.hex_to_hash( 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') stub_revisions = [ { 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }, { 'id': sha1_other, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'name', 'email': b'name@surname.org', }, 'committer': { 'name': b'name', 'email': b'name@surname.org', }, 'message': b'ugly fix for bug 42', 'date': datetime.datetime(2000, 1, 12, 5, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 12, 5, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], } ] self.storage.revision_get = MagicMock( return_value=stub_revisions) # when actual_revision = backend.revision_get_multiple([sha1_bin, sha1_other]) # then self.assertEqual(actual_revision, stub_revisions) self.storage.revision_get.assert_called_with( [sha1_bin, sha1_other]) @istest def revision_get_multiple_none_found(self): # given sha1_bin = hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5') sha1_other = hashutil.hex_to_hash( 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') self.storage.revision_get = MagicMock( return_value=[]) # when actual_revision = backend.revision_get_multiple([sha1_bin, sha1_other]) # then self.assertEqual(actual_revision, []) self.storage.revision_get.assert_called_with( [sha1_bin, sha1_other]) @istest def revision_log(self): # given sha1_bin = hashutil.hex_to_hash( '28d8be353ed3480476f032475e7c233eff7371d5') stub_revision_log = [{ 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] self.storage.revision_log = MagicMock(return_value=stub_revision_log) # when - actual_revision = backend.revision_log(sha1_bin) + actual_revision = backend.revision_log(sha1_bin, limit=1) # then self.assertEqual(list(actual_revision), stub_revision_log) - self.storage.revision_log.assert_called_with([sha1_bin], 100) + self.storage.revision_log.assert_called_with([sha1_bin], 1) @istest def revision_log_by(self): # given sha1_bin = hashutil.hex_to_hash( '28d8be353ed3480476f032475e7c233eff7371d5') stub_revision_log = [{ 'id': sha1_bin, 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] self.storage.revision_log_by = MagicMock( return_value=stub_revision_log) # when - actual_log = backend.revision_log_by(1, 'refs/heads/master', None) + actual_log = backend.revision_log_by(1, 'refs/heads/master', + None, limit=1) # then self.assertEqual(actual_log, stub_revision_log) - self.storage.revision_log.assert_called_with([sha1_bin], 100) + self.storage.revision_log.assert_called_with([sha1_bin], 1) @istest def revision_log_by_norev(self): # given sha1_bin = hashutil.hex_to_hash( '28d8be353ed3480476f032475e7c233eff7371d5') self.storage.revision_log_by = MagicMock(return_value=None) # when - actual_log = backend.revision_log_by(1, 'refs/heads/master', None) + actual_log = backend.revision_log_by(1, 'refs/heads/master', + None, limit=1) # then self.assertEqual(actual_log, None) - self.storage.revision_log.assert_called_with([sha1_bin], 100) + self.storage.revision_log.assert_called_with([sha1_bin], 1) @istest def stat_counters(self): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } self.storage.stat_counters = MagicMock(return_value=input_stats) # when actual_stats = backend.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) self.storage.stat_counters.assert_called_with() @istest def stat_origin_visits(self): # given expected_dates = [ { 'date': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 1 }, { 'date': datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 2 }, { 'date': datetime.datetime( 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 3 } ] self.storage.origin_visit_get = MagicMock(return_value=expected_dates) # when actual_dates = backend.stat_origin_visits(5) # then self.assertEqual(actual_dates, expected_dates) self.storage.origin_visit_get.assert_called_with(5) @istest def directory_entry_get_by_path(self): # given stub_dir_entry = {'id': b'dir-id', 'type': 'dir', 'name': b'some/path/foo'} self.storage.directory_entry_get_by_path = MagicMock( return_value=stub_dir_entry) # when actual_dir_entry = backend.directory_entry_get_by_path(b'dir-sha1', 'some/path/foo') self.assertEquals(actual_dir_entry, stub_dir_entry) self.storage.directory_entry_get_by_path.assert_called_once_with( b'dir-sha1', [b'some', b'path', b'foo']) @istest def entity_get(self): # given stub_entities = [{'uuid': 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2'}, {'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent': None}] self.storage.entity_get = MagicMock(return_value=stub_entities) # when actual_entities = backend.entity_get( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') # then self.assertEquals(actual_entities, stub_entities) self.storage.entity_get.assert_called_once_with( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') diff --git a/swh/web/ui/tests/test_converters.py b/swh/web/ui/tests/test_converters.py index ad85acd1..56a414d1 100644 --- a/swh/web/ui/tests/test_converters.py +++ b/swh/web/ui/tests/test_converters.py @@ -1,545 +1,662 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import unittest from nose.tools import istest from swh.core import hashutil from swh.web.ui import converters class ConvertersTestCase(unittest.TestCase): @istest def from_swh(self): some_input = { 'a': 'something', 'b': 'someone', 'c': b'sharp-0.3.4.tgz', 'd': hashutil.hex_to_hash( 'b04caf10e9535160d90e874b45aa426de762f19f'), 'e': b'sharp.html/doc_002dS_005fISREG.html', 'g': [b'utf-8-to-decode', b'another-one'], 'h': 'something filtered', 'i': {'e': b'something'}, 'j': { 'k': { 'l': [b'bytes thing', b'another thingy'], 'n': 'dont care either' }, 'm': 'dont care' }, 'o': 'something', 'p': 'bar', 'q': 'intact', 'r': {'p': 'also intact', 'q': 'bar'}, 's': { 'timestamp': 42, 'offset': -420, 'negative_utc': None, } } expected_output = { 'a': 'something', 'b': 'someone', 'c': 'sharp-0.3.4.tgz', 'd': 'b04caf10e9535160d90e874b45aa426de762f19f', 'e': 'sharp.html/doc_002dS_005fISREG.html', 'g': ['utf-8-to-decode', 'another-one'], 'i': {'e': 'something'}, 'j': { 'k': { 'l': ['bytes thing', 'another thingy'] } }, 'p': 'foo', 'q': 'intact', 'r': {'p': 'also intact', 'q': 'foo'}, 's': '1969-12-31T17:00:42-07:00', } def test_convert_fn(v): return 'foo' if v == 'bar' else v actual_output = converters.from_swh(some_input, hashess={'d', 'o'}, bytess={'c', 'e', 'g', 'l'}, dates={'s'}, blacklist={'h', 'm', 'n', 'o'}, convert={'p', 'q'}, convert_fn=test_convert_fn) self.assertEquals(expected_output, actual_output) @istest def from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self): some_input = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } expected_output = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } actual_output = converters.from_swh(some_input, hashess={'a', 'b'}, bytess={'c', 'd'}) self.assertEquals(expected_output, actual_output) @istest def from_swh_edge_cases_convert_invalid_utf8_bytes(self): some_input = { 'a': 'something', 'b': 'someone', 'c': b'a name \xff', 'd': b'an email \xff', } expected_output = { 'a': 'something', 'b': 'someone', 'c': 'a name \\xff', 'd': 'an email \\xff', 'decoding_failures': ['c', 'd'] } actual_output = converters.from_swh(some_input, hashess={'a', 'b'}, bytess={'c', 'd'}) for v in ['a', 'b', 'c', 'd']: self.assertEqual(expected_output[v], actual_output[v]) self.assertEqual(len(expected_output['decoding_failures']), len(actual_output['decoding_failures'])) for v in expected_output['decoding_failures']: self.assertTrue(v in actual_output['decoding_failures']) @istest def from_swh_empty(self): # when self.assertEquals({}, converters.from_swh({})) @istest def from_swh_none(self): # when self.assertIsNone(converters.from_swh(None)) @istest def from_origin(self): # given origin_input = { 'origin_type': 'ftp', 'origin_url': 'rsync://ftp.gnu.org/gnu/octave', 'branch': 'octave-3.4.0.tar.gz', 'revision': b'\xb0L\xaf\x10\xe9SQ`\xd9\x0e\x87KE\xaaBm\xe7b\xf1\x9f', # noqa 'path': b'octave-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa } expected_origin = { 'origin_type': 'ftp', 'origin_url': 'rsync://ftp.gnu.org/gnu/octave', 'branch': 'octave-3.4.0.tar.gz', 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'path': 'octave-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa } # when actual_origin = converters.from_origin(origin_input) # then self.assertEqual(actual_origin, expected_origin) @istest def from_release(self): release_input = { 'id': hashutil.hex_to_hash( 'aad23fa492a0c5fed0708a6703be875448c86884'), 'target': hashutil.hex_to_hash( '5e46d564378afc44b31bb89f99d5675195fbdf67'), 'target_type': 'revision', 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'author': { 'name': b'author name', 'fullname': b'Author Name author@email', 'email': b'author@email', }, 'name': b'v0.0.1', 'message': b'some comment on release', 'synthetic': True, } expected_release = { 'id': 'aad23fa492a0c5fed0708a6703be875448c86884', 'target': '5e46d564378afc44b31bb89f99d5675195fbdf67', 'target_type': 'revision', 'date': '2015-01-01T22:00:00+00:00', 'author': { 'name': 'author name', 'fullname': 'Author Name author@email', 'email': 'author@email', }, 'name': 'v0.0.1', 'message': 'some comment on release', 'target_type': 'revision', 'synthetic': True, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_release_no_revision(self): release_input = { 'id': hashutil.hex_to_hash( 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2016, 3, 2, 10, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.1.1', 'message': b'comment on release', 'synthetic': False, 'author': { 'name': b'bob', 'fullname': b'Bob bob@alice.net', 'email': b'bob@alice.net', }, } expected_release = { 'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e', 'target': None, 'date': '2016-03-02T10:00:00-00:00', 'name': 'v0.1.1', 'message': 'comment on release', 'synthetic': False, 'author': { 'name': 'bob', 'fullname': 'Bob bob@alice.net', 'email': 'bob@alice.net', }, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_revision(self): revision_input = { 'id': hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'synthetic revision message', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'parents': [ hashutil.hex_to_hash( '29d8be353ed3480476f032475e7c244eff7371d5'), hashutil.hex_to_hash( '30d8be353ed3480476f032475e7c244eff7371d5') ], 'children': [ hashutil.hex_to_hash( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': 'synthetic revision message', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5', '30d8be353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, + 'merge': True + } + + # when + actual_revision = converters.from_revision(revision_input) + + # then + self.assertEqual(actual_revision, expected_revision) + + @istest + def from_revision_nomerge(self): + revision_input = { + 'id': hashutil.hex_to_hash( + '18d8be353ed3480476f032475e7c233eff7371d5'), + 'parents': [ + hashutil.hex_to_hash( + '29d8be353ed3480476f032475e7c244eff7371d5') + ] + } + + expected_revision = { + 'id': '18d8be353ed3480476f032475e7c233eff7371d5', + 'parents': [ + '29d8be353ed3480476f032475e7c244eff7371d5' + ], + 'merge': False + } + + # when + actual_revision = converters.from_revision(revision_input) + + # then + self.assertEqual(actual_revision, expected_revision) + + @istest + def from_revision_noparents(self): + revision_input = { + 'id': hashutil.hex_to_hash( + '18d8be353ed3480476f032475e7c233eff7371d5'), + 'directory': hashutil.hex_to_hash( + '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), + 'author': { + 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', + 'email': b'robot@softwareheritage.org', + }, + 'committer': { + 'name': b'Software Heritage', + 'fullname': b'robot robot@softwareheritage.org', + 'email': b'robot@softwareheritage.org', + }, + 'message': b'synthetic revision message', + 'date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'committer_date': { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc).timestamp(), + 'offset': 0, + 'negative_utc': False, + }, + 'synthetic': True, + 'type': 'tar', + 'children': [ + hashutil.hex_to_hash( + '123546353ed3480476f032475e7c244eff7371d5'), + ], + 'metadata': { + 'original_artifact': [{ + 'archive_type': 'tar', + 'name': 'webbase-5.7.0.tar.gz', + 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', + 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', + 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' + '309d36484e7edf7bb912', + + }] + }, + } + + expected_revision = { + 'id': '18d8be353ed3480476f032475e7c233eff7371d5', + 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', + 'author': { + 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', + 'email': 'robot@softwareheritage.org', + }, + 'committer': { + 'name': 'Software Heritage', + 'fullname': 'robot robot@softwareheritage.org', + 'email': 'robot@softwareheritage.org', + }, + 'message': 'synthetic revision message', + 'date': "2000-01-17T11:23:54+00:00", + 'committer_date': "2000-01-17T11:23:54+00:00", + 'children': [ + '123546353ed3480476f032475e7c244eff7371d5' + ], + 'type': 'tar', + 'synthetic': True, + 'metadata': { + 'original_artifact': [{ + 'archive_type': 'tar', + 'name': 'webbase-5.7.0.tar.gz', + 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', + 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', + 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' + '309d36484e7edf7bb912' + }] + } } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_revision_invalid(self): revision_input = { 'id': hashutil.hex_to_hash( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hex_to_hash( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'invalid message \xff', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'parents': [ hashutil.hex_to_hash( '29d8be353ed3480476f032475e7c244eff7371d5'), hashutil.hex_to_hash( '30d8be353ed3480476f032475e7c244eff7371d5') ], 'children': [ hashutil.hex_to_hash( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': None, 'message_decoding_failed': True, 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5', '30d8be353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, + 'merge': True } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_content(self): content_input = { 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'data': b'data in bytes', 'length': 10, 'status': 'hidden', } # 'status' is filtered expected_content = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' '7d3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'data': b'data in bytes', 'length': 10, 'status': 'absent', } # when actual_content = converters.from_content(content_input) # then self.assertEqual(actual_content, expected_content) @istest def from_person(self): person_input = { 'id': 10, 'anything': 'else', 'name': b'bob', 'fullname': b'bob bob@alice.net', 'email': b'bob@foo.alice', } expected_person = { 'id': 10, 'anything': 'else', 'name': 'bob', 'fullname': 'bob bob@alice.net', 'email': 'bob@foo.alice', } # when actual_person = converters.from_person(person_input) # then self.assertEqual(actual_person, expected_person) @istest def from_directory_entries(self): dir_entries_input = { 'sha1': hashutil.hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' '2ebda5'), 'sha256': hashutil.hex_to_hash('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'target': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'dir_id': hashutil.hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), 'name': b'bob', 'type': 10, 'status': 'hidden', } expected_dir_entries = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2747' 'd3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'bob', 'type': 10, 'status': 'absent', } # when actual_dir_entries = converters.from_directory_entry(dir_entries_input) # then self.assertEqual(actual_dir_entries, expected_dir_entries) diff --git a/swh/web/ui/tests/test_renderers.py b/swh/web/ui/tests/test_renderers.py index b22ae942..a74e7492 100644 --- a/swh/web/ui/tests/test_renderers.py +++ b/swh/web/ui/tests/test_renderers.py @@ -1,231 +1,233 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import json import unittest import yaml -from flask_api.mediatypes import MediaType +from flask import Response from nose.tools import istest -from unittest.mock import patch +from unittest.mock import patch, MagicMock from swh.web.ui import renderers class RendererTestCase(unittest.TestCase): + @patch('swh.web.ui.renderers.g') + @patch('swh.web.ui.renderers.json') @patch('swh.web.ui.renderers.request') + @patch('swh.web.ui.renderers.render_template') + @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') @istest - def swh_filter_renderer_do_nothing(self, mock_request): + def swh_multi_response_mimetype_html(self, mock_filter, mock_render, + mock_request, mock_json, mock_g): # given - mock_request.args = {} - - swh_filter_renderer = renderers.SWHFilterEnricher() - - input_data = {'a': 'some-data'} + data = {'data': [12, 34], + 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'} + mock_g.get.return_value = {'my_key': 'my_display_value'} + mock_filter.return_value = data + expected_env = { + 'my_key': 'my_display_value', + 'response_data': json.dumps(data), + 'request': mock_request + } + + def mock_mimetypes(key): + mimetypes = { + 'text/html': 10, + 'application/json': 0.1, + 'application/yaml': 0.1 + } + return mimetypes[key] + accept_mimetypes = MagicMock() + accept_mimetypes.__getitem__.side_effect = mock_mimetypes + accept_mimetypes.best_match = MagicMock(return_value='text/html') + mock_request.accept_mimetypes = accept_mimetypes + mock_json.dumps.return_value = json.dumps(data) # when - actual_data = swh_filter_renderer.filter_by_fields(input_data) + rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) # then - self.assertEquals(actual_data, input_data) + mock_filter.assert_called_once_with(renderers.SWHMultiResponse, data) + mock_render.assert_called_with('apidoc.html', **expected_env) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.mimetype, 'text/html') - @patch('swh.web.ui.renderers.utils') + @patch('swh.web.ui.renderers.g') + @patch('swh.web.ui.renderers.yaml') @patch('swh.web.ui.renderers.request') + @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') @istest - def swh_filter_renderer_do_filter(self, mock_request, mock_utils): + def swh_multi_response_mimetype_yaml(self, mock_filter, + mock_request, mock_yaml, mock_g): # given - mock_request.args = {'fields': 'a,c'} - mock_utils.filter_field_keys.return_value = {'a': 'some-data'} - - swh_filter_renderer = renderers.SWHFilterEnricher() - - input_data = {'a': 'some-data', - 'b': 'some-other-data'} + data = {'data': [12, 34], + 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'} + + def mock_mimetypes(key): + mimetypes = { + 'application/yaml': 10, + 'application/json': 0.1, + 'text/html': 0.1 + } + return mimetypes[key] + accept_mimetypes = MagicMock() + accept_mimetypes.__getitem__.side_effect = mock_mimetypes + accept_mimetypes.best_match = MagicMock( + return_value='application/yaml') + mock_request.accept_mimetypes = accept_mimetypes + mock_yaml.dump.return_value = yaml.dump(data) + mock_filter.return_value = data # when - actual_data = swh_filter_renderer.filter_by_fields(input_data) + rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) # then - self.assertEquals(actual_data, {'a': 'some-data'}) - - mock_utils.filter_field_keys.assert_called_once_with(input_data, - {'a', 'c'}) - + mock_filter.assert_called_once_with(renderers.SWHMultiResponse, data) + mock_yaml.dump.assert_called_once_with(data) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.mimetype, 'application/yaml') + self.assertEqual(data, yaml.load(rv.data.decode('utf-8'))) + + @patch('swh.web.ui.renderers.g') + @patch('swh.web.ui.renderers.json') @patch('swh.web.ui.renderers.request') + @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') @istest - def yaml_renderer_without_filter(self, mock_request): + def swh_multi_response_mimetype_json(self, mock_filter, + mock_request, mock_json, mock_g): # given - mock_request.args = {} - yaml_renderer = renderers.YAMLRenderer() - - input_data = {'target': 'sha1-dir', - 'type': 'dir', - 'dir-id': 'dir-id-sha1-git'} - - expected_data = input_data + data = {'data': [12, 34], + 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'} + + def mock_mimetypes(key): + mimetypes = { + 'application/json': 10, + 'text/html': 0.1, + 'application/yaml': 0.1 + } + return mimetypes[key] + accept_mimetypes = MagicMock() + accept_mimetypes.__getitem__.side_effect = mock_mimetypes + accept_mimetypes.best_match = MagicMock( + return_value='application/json') + mock_request.accept_mimetypes = accept_mimetypes + mock_json.dumps.return_value = json.dumps(data) + mock_filter.return_value = data # when - actual_data = yaml_renderer.render(input_data, 'application/yaml') + rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) # then - self.assertEqual(yaml.load(actual_data), expected_data) + mock_filter.assert_called_once_with(renderers.SWHMultiResponse, data) + mock_json.dumps.assert_called_once_with(data) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.mimetype, 'application/json') + self.assertEqual(data, json.loads(rv.data.decode('utf-8'))) @patch('swh.web.ui.renderers.request') @istest - def yaml_renderer(self, mock_request): + def swh_multi_response_make_response_not_list_dict(self, mock_request): # given - mock_request.args = {'fields': 'type,target'} - yaml_renderer = renderers.YAMLRenderer() - - input_data = {'target': 'sha1-dir', - 'type': 'dir', - 'dir-id': 'dir-id-sha1-git'} - - expected_data = {'target': 'sha1-dir', 'type': 'dir'} + incoming = Response() # when - actual_data = yaml_renderer.render(input_data, 'application/yaml') + rv = renderers.SWHMultiResponse.make_response_from_mimetype(incoming) # then - self.assertEqual(yaml.load(actual_data), expected_data) + self.assertEqual(rv, incoming) @patch('swh.web.ui.renderers.request') @istest - def json_renderer_basic(self, mock_request): + def swh_filter_renderer_do_nothing(self, mock_request): # given mock_request.args = {} - json_renderer = renderers.SWHJSONRenderer() - - input_data = {'target': 'sha1-dir', - 'type': 'dir', - 'dir-id': 'dir-id-sha1-git'} - - expected_data = input_data - - # when - actual_data = json_renderer.render(input_data, MediaType( - 'application/json')) - - # then - self.assertEqual(json.loads(actual_data), expected_data) - @patch('swh.web.ui.renderers.request') - @istest - def json_renderer_basic_with_filter(self, mock_request): - # given - mock_request.args = {'fields': 'target'} - json_renderer = renderers.SWHJSONRenderer() - - input_data = {'target': 'sha1-dir', - 'type': 'dir', - 'dir-id': 'dir-id-sha1-git'} + swh_filter_renderer = renderers.SWHFilterEnricher() - expected_data = {'target': 'sha1-dir'} + input_data = {'a': 'some-data'} # when - actual_data = json_renderer.render(input_data, MediaType( - 'application/json')) + actual_data = swh_filter_renderer.filter_by_fields(input_data) # then - self.assertEqual(json.loads(actual_data), expected_data) + self.assertEquals(actual_data, input_data) + @patch('swh.web.ui.renderers.utils') @patch('swh.web.ui.renderers.request') @istest - def json_renderer_basic_with_filter_and_jsonp(self, mock_request): + def swh_filter_renderer_do_filter(self, mock_request, mock_utils): # given - mock_request.args = {'fields': 'target', - 'callback': 'jsonpfn'} - json_renderer = renderers.SWHJSONRenderer() - - input_data = {'target': 'sha1-dir', - 'type': 'dir', - 'dir-id': 'dir-id-sha1-git'} - - # when - actual_data = json_renderer.render(input_data, MediaType( - 'application/json')) + mock_request.args = {'fields': 'a,c'} + mock_utils.filter_field_keys.return_value = {'a': 'some-data'} - # then - self.assertEqual(actual_data, 'jsonpfn({"target": "sha1-dir"})') + swh_filter_user = renderers.SWHMultiResponse() - @patch('swh.web.ui.renderers.request') - @istest - def jsonp_enricher_basic_with_filter_and_jsonp(self, mock_request): - # given - mock_request.args = {'callback': 'jsonpfn'} - jsonp_enricher = renderers.JSONPEnricher() + input_data = {'a': 'some-data', + 'b': 'some-other-data'} # when - actual_output = jsonp_enricher.enrich_with_jsonp({'output': 'test'}) + actual_data = swh_filter_user.filter_by_fields(input_data) # then - self.assertEqual(actual_output, "jsonpfn({'output': 'test'})") - - @patch('swh.web.ui.renderers.request') - @istest - def jsonp_enricher_do_nothing(self, mock_request): - # given - mock_request.args = {} - jsonp_enricher = renderers.JSONPEnricher() - - # when - actual_output = jsonp_enricher.enrich_with_jsonp({'output': 'test'}) + self.assertEquals(actual_data, {'a': 'some-data'}) - # then - self.assertEqual(actual_output, {'output': 'test'}) + mock_utils.filter_field_keys.assert_called_once_with(input_data, + {'a', 'c'}) @istest def urlize_api_links(self): # update api link with html links content with links content = '{"url": "/api/1/abc/"}' expected_content = '{"url": "/api/1/abc/"}' self.assertEquals(renderers.urlize_api_links(content), expected_content) # update /browse link with html links content with links content = '{"url": "/browse/def/"}' expected_content = '{"url": "' \ '/browse/def/"}' self.assertEquals(renderers.urlize_api_links(content), expected_content) # will do nothing since it's not an api url other_content = '{"url": "/something/api/1/other"}' self.assertEquals(renderers.urlize_api_links(other_content), other_content) + @istest + def revision_id_from_url(self): + url = ('/browse/revision/9ba4bcb645898d562498ea66a0df958ef0e7a68c/' + 'prev/9ba4bcb645898d562498ea66a0df958ef0e7aaaa/') + + expected_id = '9ba4bcb645898d562498ea66a0df958ef0e7a68c' + self.assertEqual(renderers.revision_id_from_url(url), expected_id) + @istest def safe_docstring_display(self): # update api link with html links content with links - docstring = """

Show all revisions (~git log) starting from -sha1_git. - The first element returned is the given sha1_git.

-

Args: - sha1_git: the revision's hash

-

Returns: - Information on the revision if found.

-

Raises: - BadInputExc in case of unknown algo_hash or bad hash - NotFoundExc if the revision is not found.

-

Example: - blah

""" - expected_docstring = """

Show all revisions (~git log) starting from -sha1_git. - The first element returned is the given sha1_git.

-

Args:

   - sha1_git: the revision's hash

-

Returns:

   - Information on the revision if found.

-

Raises:

   - BadInputExc in case of unknown algo_hash or bad hash - NotFoundExc if the revision is not found.

-

Example:

   - blah

""" + docstring = """This is my list header: + + - Here is item 1, with a continuation + line right here + - Here is item 2 + + Here is something that is not part of the list""" + + expected_docstring = """

This is my list header:

+
    +
  • Here is item 1, with a continuation +line right here
  • +
  • Here is item 2
  • +
+

Here is something that is not part of the list

+""" self.assertEquals(renderers.safe_docstring_display(docstring), expected_docstring) diff --git a/swh/web/ui/tests/test_service.py b/swh/web/ui/tests/test_service.py index 02e8a2f6..822e77fa 100644 --- a/swh/web/ui/tests/test_service.py +++ b/swh/web/ui/tests/test_service.py @@ -1,2025 +1,1717 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock, patch, call from swh.core.hashutil import hex_to_hash, hash_to_hex from swh.web.ui import service from swh.web.ui.exc import BadInputExc, NotFoundExc from swh.web.ui.tests import test_app class ServiceTestCase(test_app.SWHApiTestCase): + def setUp(self): + self.SHA1_SAMPLE = '18d8be353ed3480476f032475e7c233eff7371d5' + self.SHA1_SAMPLE_BIN = hex_to_hash(self.SHA1_SAMPLE) + self.SHA256_SAMPLE = ('39007420ca5de7cb3cfc15196335507e' + 'e76c98930e7e0afa4d2747d3bf96c926') + self.SHA256_SAMPLE_BIN = hex_to_hash(self.SHA256_SAMPLE) + self.SHA1GIT_SAMPLE = '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' + self.SHA1GIT_SAMPLE_BIN = hex_to_hash(self.SHA1GIT_SAMPLE) + self.DIRECTORY_ID = '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6' + self.DIRECTORY_ID_BIN = hex_to_hash(self.DIRECTORY_ID) + self.AUTHOR_ID_BIN = { + 'name': b'author', + 'email': b'author@company.org', + } + self.AUTHOR_ID = { + 'name': 'author', + 'email': 'author@company.org', + } + self.COMMITTER_ID_BIN = { + 'name': b'committer', + 'email': b'committer@corp.org', + } + self.COMMITTER_ID = { + 'name': 'committer', + 'email': 'committer@corp.org', + } + self.SAMPLE_DATE_RAW = { + 'timestamp': datetime.datetime( + 2000, 1, 17, 11, 23, 54, + tzinfo=datetime.timezone.utc, + ).timestamp(), + 'offset': 0, + 'negative_utc': False, + } + self.SAMPLE_DATE = '2000-01-17T11:23:54+00:00' + self.SAMPLE_MESSAGE_BIN = b'elegant fix for bug 31415957' + self.SAMPLE_MESSAGE = 'elegant fix for bug 31415957' + + self.SAMPLE_REVISION = { + 'id': self.SHA1_SAMPLE, + 'directory': self.DIRECTORY_ID, + 'author': self.AUTHOR_ID, + 'committer': self.COMMITTER_ID, + 'message': self.SAMPLE_MESSAGE, + 'date': self.SAMPLE_DATE, + 'committer_date': self.SAMPLE_DATE, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + 'merge': False + } + self.SAMPLE_REVISION_RAW = { + 'id': self.SHA1_SAMPLE_BIN, + 'directory': self.DIRECTORY_ID_BIN, + 'author': self.AUTHOR_ID_BIN, + 'committer': self.COMMITTER_ID_BIN, + 'message': self.SAMPLE_MESSAGE_BIN, + 'date': self.SAMPLE_DATE_RAW, + 'committer_date': self.SAMPLE_DATE_RAW, + 'synthetic': False, + 'type': 'git', + 'parents': [], + 'metadata': [], + } + + self.SAMPLE_CONTENT = { + 'sha1': self.SHA1_SAMPLE, + 'sha256': self.SHA256_SAMPLE, + 'sha1_git': self.SHA1GIT_SAMPLE, + 'length': 190, + 'status': 'absent' + } + self.SAMPLE_CONTENT_RAW = { + 'sha1': self.SHA1_SAMPLE_BIN, + 'sha256': self.SHA256_SAMPLE_BIN, + 'sha1_git': self.SHA1GIT_SAMPLE_BIN, + 'length': 190, + 'status': 'hidden' + } + @patch('swh.web.ui.service.backend') @istest def lookup_multiple_hashes_ball_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': True}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) @patch('swh.web.ui.service.backend') @istest def lookup_multiple_hashes_some_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[ hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') ]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) @patch('swh.web.ui.service.backend') @istest def lookup_hash_does_not_exist(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_lookup = service.lookup_hash( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': None, 'algo': 'sha1_git'}, actual_lookup) # check the function has been called with parameters mock_backend.content_find.assert_called_with( 'sha1_git', hex_to_hash('123caf10e9535160d90e874b45aa426de762f19f')) @patch('swh.web.ui.service.backend') @istest def lookup_hash_exist(self, mock_backend): # given stub_content = { 'sha1': hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') } mock_backend.content_find = MagicMock(return_value=stub_content) # when actual_lookup = service.lookup_hash( 'sha1:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': stub_content, 'algo': 'sha1'}, actual_lookup) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f'), ) @patch('swh.web.ui.service.backend') @istest def search_hash_does_not_exist(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_lookup = service.search_hash( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': False}, actual_lookup) # check the function has been called with parameters mock_backend.content_find.assert_called_with( 'sha1_git', hex_to_hash('123caf10e9535160d90e874b45aa426de762f19f')) @patch('swh.web.ui.service.backend') @istest def search_hash_exist(self, mock_backend): # given stub_content = { 'sha1': hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') } mock_backend.content_find = MagicMock(return_value=stub_content) # when actual_lookup = service.search_hash( 'sha1:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': True}, actual_lookup) mock_backend.content_find.assert_called_with( 'sha1', hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f'), ) @patch('swh.web.ui.service.backend') @istest def lookup_hash_origin(self, mock_backend): # given mock_backend.content_find_occurrence = MagicMock(return_value={ 'origin_type': 'sftp', 'origin_url': 'sftp://ftp.gnu.org/gnu/octave', 'branch': 'octavio-3.4.0.tar.gz', 'revision': b'\xb0L\xaf\x10\xe9SQ`\xd9\x0e\x87KE\xaaBm\xe7b\xf1\x9f', # noqa 'path': b'octavio-3.4.0/doc/interpreter/octave.html/doc_002dS_005fISREG.html' # noqa }) expected_origin = { 'origin_type': 'sftp', 'origin_url': 'sftp://ftp.gnu.org/gnu/octave', 'branch': 'octavio-3.4.0.tar.gz', 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'path': 'octavio-3.4.0/doc/interpreter/octave.html/doc' '_002dS_005fISREG.html' } # when actual_origin = service.lookup_hash_origin( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_origin, expected_origin) mock_backend.content_find_occurrence.assert_called_with( 'sha1_git', hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f')) @patch('swh.web.ui.service.backend') @istest def stat_counters(self, mock_backend): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_backend.stat_counters = MagicMock(return_value=input_stats) # when actual_stats = service.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) mock_backend.stat_counters.assert_called_with() @patch('swh.web.ui.service.backend') @istest def stat_origin_visits(self, mock_backend): # given stub_result = [ { 'date': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 1 }, { 'date': datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 2 }, { 'date': datetime.datetime( 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 3 } ] mock_backend.stat_origin_visits.return_value = stub_result # when expected_dates = [ { 'date': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'origin': 1, 'visit': 1 }, { 'date': datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'origin': 1, 'visit': 2 }, { 'date': datetime.datetime( 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'origin': 1, 'visit': 3 } ] actual_dates = service.stat_origin_visits(6) # then self.assertEqual(expected_dates, list(actual_dates)) mock_backend.stat_origin_visits.assert_called_once_with(6) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.hashutil') - @istest - def hash_and_search(self, mock_hashutil, mock_backend): - # given - bhash = hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') - mock_hashutil.hashfile.return_value = {'sha1': bhash} - mock_backend.content_find = MagicMock(return_value={ - 'sha1': bhash, - 'sha1_git': bhash, - }) - - # when - actual_content = service.hash_and_search('/some/path') - - # then - self.assertEqual(actual_content, { - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'sha1_git': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': True, - }) - - mock_hashutil.hashfile.assert_called_once_with('/some/path') - mock_backend.content_find.assert_called_once_with('sha1', bhash) - - @patch('swh.web.ui.service.hashutil') - @istest - def hash_and_search_not_found(self, mock_hashutil): - # given - bhash = hex_to_hash('456caf10e9535160d90e874b45aa426de762f19f') - mock_hashutil.hashfile.return_value = {'sha1': bhash} - mock_hashutil.hash_to_hex = MagicMock( - return_value='456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_find = MagicMock(return_value=None) - - # when - actual_content = service.hash_and_search('/some/path') - - # then - self.assertEqual(actual_content, { - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': False, - }) - - mock_hashutil.hashfile.assert_called_once_with('/some/path') - self.storage.content_find.assert_called_once_with({'sha1': bhash}) - mock_hashutil.hash_to_hex.assert_called_once_with(bhash) - @patch('swh.web.ui.service.backend') @istest def lookup_origin(self, mock_backend): # given mock_backend.origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when - actual_origin = service.lookup_origin('origin-id') + actual_origin = service.lookup_origin({'id': 'origin-id'}) # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) - mock_backend.origin_get.assert_called_with('origin-id') + mock_backend.origin_get.assert_called_with({'id': 'origin-id'}) @patch('swh.web.ui.service.backend') @istest def lookup_release_ko_id_checksum_not_ok_because_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() with self.assertRaises(BadInputExc) as cm: # when service.lookup_release('not-a-sha1') self.assertIn('invalid checksum', cm.exception.args[0]) mock_backend.release_get.called = False @patch('swh.web.ui.service.backend') @istest def lookup_release_ko_id_checksum_ok_but_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() # when with self.assertRaises(BadInputExc) as cm: service.lookup_release( '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4daf5' '1aea892abe') self.assertIn('sha1_git supported', cm.exception.args[0]) mock_backend.release_get.called = False @patch('swh.web.ui.service.backend') @istest def lookup_directory_with_path_not_found(self, mock_backend): # given mock_backend.lookup_directory_with_path = MagicMock(return_value=None) sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertIsNone(actual_directory) @patch('swh.web.ui.service.backend') @istest def lookup_directory_with_path_found(self, mock_backend): # given sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' entry = {'id': 'dir-id', 'type': 'dir', 'name': 'some/path/foo'} mock_backend.lookup_directory_with_path = MagicMock(return_value=entry) # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertEqual(entry, actual_directory) @patch('swh.web.ui.service.backend') @istest def lookup_release(self, mock_backend): # given mock_backend.release_get = MagicMock(return_value={ 'id': hex_to_hash('65a55bbdf3629f916219feb3dcc7393ded1bc8db'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }) # when actual_release = service.lookup_release( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') # then self.assertEqual(actual_release, { 'id': '65a55bbdf3629f916219feb3dcc7393ded1bc8db', 'target': None, 'date': '2015-01-01T22:00:00-00:00', 'name': 'v0.0.1', 'message': 'synthetic release', 'synthetic': True, }) mock_backend.release_get.assert_called_with( hex_to_hash('65a55bbdf3629f916219feb3dcc7393ded1bc8db')) @istest def lookup_revision_with_context_ko_not_a_sha1_1(self): # given sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4' \ 'daf51aea892abe' sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @istest def lookup_revision_with_context_ko_not_a_sha1_2(self): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f6' \ '2d4daf51aea892abe' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_ko_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_bin = hex_to_hash(sha1_git) mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 777777bdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_called_once_with( sha1_git_bin) @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_ko_root_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_root_bin = hex_to_hash(sha1_git_root) sha1_git_bin = hex_to_hash(sha1_git) mock_backend.revision_get.side_effect = ['foo', None] # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 65a55bbdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_has_calls([call(sha1_git_bin), call(sha1_git_root_bin)]) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_revision_with_context(self, mock_query, mock_backend): # given sha1_git_root = '666' sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.side_effect = [ ('sha1', sha1_git_bin), ('sha1', sha1_git_root_bin) ] # lookup revision first 883, then 666 (both exists) mock_backend.revision_get.side_effect = [ sha1_git_dict, sha1_git_root_dict ] mock_backend.revision_log = MagicMock( return_value=stub_revisions) # when actual_revision = service.lookup_revision_with_context( sha1_git_root, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), + 'merge': False }) mock_query.parse_hash_with_algorithms_or_throws.assert_has_calls( [call(sha1_git, ['sha1'], 'Only sha1_git is supported.'), call(sha1_git_root, ['sha1'], 'Only sha1_git is supported.')]) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_revision_with_context_sha1_git_root_already_retrieved_as_dict( self, mock_query, mock_backend): # given sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', sha1_git_bin) # lookup only on sha1 mock_backend.revision_get.return_value = sha1_git_dict mock_backend.revision_log.return_value = stub_revisions # when actual_revision = service.lookup_revision_with_context( {'id': sha1_git_root_bin}, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), + 'merge': False }) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with( # noqa sha1_git, ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(sha1_git_bin) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_revision_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision('123') self.assertIn('Revision 123 not found', cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_revision_with_path_to_nowhere( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision( '123', 'path/to/something/unknown') self.assertIn("Directory/File 'path/to/something/unknown' " + "pointed to by revision 123 not found", cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'path/to/something/unknown') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_ko_type_not_implemented( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'rev', 'name': b'some/path/to/rev', 'target': b'456' } stub_content = { 'id': b'12', 'type': 'file' } mock_backend.content_get.return_value = stub_content # when with self.assertRaises(NotImplementedError) as cm: service.lookup_directory_with_revision( '123', 'some/path/to/rev') self.assertIn("Entity of type 'rev' not implemented.", cm.exception.args[0]) # then mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/rev') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_without_path(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'123', 'type': 'dir' }, { 'id': b'456', 'type': 'file' }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_ls.assert_called_once_with(dir_id) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_dir(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'12', 'type': 'dir' }, { 'id': b'34', 'type': 'file' }] mock_backend.directory_entry_get_by_path.return_value = { 'type': 'dir', 'name': b'some/path', 'target': b'456' } mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123', 'some/path') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(actual_directory_entries['revision'], '123') self.assertEqual(actual_directory_entries['path'], 'some/path') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( dir_id, 'some/path') mock_backend.directory_ls.assert_called_once_with(b'456') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_without_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', } mock_backend.content_find.return_value = stub_content # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file') # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': stub_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_with_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', 'sha1': b'content-sha1' } mock_backend.content_find.return_value = stub_content mock_backend.content_get.return_value = { 'sha1': b'content-sha1', 'data': b'some raw data' } expected_content = { 'status': 'visible', 'sha1': hash_to_hex(b'content-sha1'), 'data': b'some raw data' } # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file', with_data=True) # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': expected_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') mock_backend.content_get.assert_called_once_with(b'content-sha1') @patch('swh.web.ui.service.backend') @istest def lookup_revision(self, mock_backend): # given - mock_backend.revision_get = MagicMock(return_value={ - 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug 31415957', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }) + mock_backend.revision_get = MagicMock( + return_value=self.SAMPLE_REVISION_RAW) # when actual_revision = service.lookup_revision( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) # then - self.assertEqual(actual_revision, { - 'id': '18d8be353ed3480476f032475e7c233eff7371d5', - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'bill & boule', - 'email': 'bill@boule.org', - }, - 'committer': { - 'name': 'boule & bill', - 'email': 'boule@bill.org', - }, - 'message': 'elegant fix for bug 31415957', - 'date': "2000-01-17T11:23:54+00:00", - 'committer_date': "2000-01-17T11:23:54+00:00", - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }) + self.assertEqual(actual_revision, self.SAMPLE_REVISION) mock_backend.revision_get.assert_called_with( - hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.SHA1_SAMPLE_BIN) @patch('swh.web.ui.service.backend') @istest def lookup_revision_invalid_msg(self, mock_backend): # given - stub_rev = { - 'id': hex_to_hash('123456'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug \xff', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - } + stub_rev = self.SAMPLE_REVISION_RAW + stub_rev['message'] = b'elegant fix for bug \xff' + + expected_revision = self.SAMPLE_REVISION + expected_revision['message'] = None + expected_revision['message_decoding_failed'] = True mock_backend.revision_get = MagicMock(return_value=stub_rev) # when actual_revision = service.lookup_revision( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) # then - self.assertEqual(actual_revision, { - 'id': '123456', - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'bill & boule', - 'email': 'bill@boule.org', - }, - 'committer': { - 'name': 'boule & bill', - 'email': 'boule@bill.org', - }, - 'message': None, - 'message_decoding_failed': True, - 'date': "2000-01-17T11:23:54+00:00", - 'committer_date': "2000-01-17T11:23:54+00:00", - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }) + self.assertEqual(actual_revision, expected_revision) mock_backend.revision_get.assert_called_with( - hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.SHA1_SAMPLE_BIN) @patch('swh.web.ui.service.backend') @istest def lookup_revision_msg_ok(self, mock_backend): # given - mock_backend.revision_get.return_value = { - 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug 31415957', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - } + mock_backend.revision_get.return_value = self.SAMPLE_REVISION_RAW # when rv = service.lookup_revision_message( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) # then - self.assertEquals(rv, {'message': b'elegant fix for bug 31415957'}) + self.assertEquals(rv, {'message': self.SAMPLE_MESSAGE_BIN}) mock_backend.revision_get.assert_called_with( - hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.SHA1_SAMPLE_BIN) @patch('swh.web.ui.service.backend') @istest def lookup_revision_msg_absent(self, mock_backend): # given - mock_backend.revision_get.return_value = { - 'id': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - } + stub_revision = self.SAMPLE_REVISION_RAW + del stub_revision['message'] + mock_backend.revision_get.return_value = stub_revision # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) # then mock_backend.revision_get.assert_called_with( - hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.SHA1_SAMPLE_BIN) self.assertEqual(cm.exception.args[0], 'No message for revision ' 'with sha1_git ' '18d8be353ed3480476f032475e7c233eff7371d5.') @patch('swh.web.ui.service.backend') @istest def lookup_revision_msg_norev(self, mock_backend): # given mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) # then mock_backend.revision_get.assert_called_with( - hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + self.SHA1_SAMPLE_BIN) self.assertEqual(cm.exception.args[0], 'Revision with sha1_git ' '18d8be353ed3480476f032475e7c233eff7371d5 ' 'not found.') @patch('swh.web.ui.service.backend') @istest def lookup_revision_multiple(self, mock_backend): # given - - sha1_bin = '18d8be353ed3480476f032475e7c233eff7371d5' + sha1 = self.SHA1_SAMPLE sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' stub_revisions = [ - { - 'id': hex_to_hash(sha1_bin), - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug 31415957', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc).timestamp(), - 'offset': 0, - 'negative_utc': False - }, - 'date_offset': 0, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc).timestamp(), - 'offset': 0, - 'negative_utc': False - }, - 'committer_date_offset': 0, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }, + self.SAMPLE_REVISION_RAW, { 'id': hex_to_hash(sha1_other), 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5', 'author': { 'name': b'name', 'email': b'name@surname.org', }, 'committer': { 'name': b'name', 'email': b'name@surname.org', }, 'message': b'ugly fix for bug 42', 'date': { 'timestamp': datetime.datetime( 2000, 1, 12, 5, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False }, 'date_offset': 0, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 12, 5, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False }, 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], } ] mock_backend.revision_get_multiple.return_value = stub_revisions # when actual_revisions = service.lookup_revision_multiple( - [sha1_bin, sha1_other]) + [sha1, sha1_other]) # then self.assertEqual(list(actual_revisions), [ - { - 'id': sha1_bin, - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'bill & boule', - 'email': 'bill@boule.org', - }, - 'committer': { - 'name': 'boule & bill', - 'email': 'boule@bill.org', - }, - 'message': 'elegant fix for bug 31415957', - 'date': '2000-01-17T11:23:54+00:00', - 'date_offset': 0, - 'committer_date': '2000-01-17T11:23:54+00:00', - 'committer_date_offset': 0, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }, + self.SAMPLE_REVISION, { 'id': sha1_other, 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5', 'author': { 'name': 'name', 'email': 'name@surname.org', }, 'committer': { 'name': 'name', 'email': 'name@surname.org', }, 'message': 'ugly fix for bug 42', 'date': '2000-01-12T05:23:54+00:00', 'date_offset': 0, 'committer_date': '2000-01-12T05:23:54+00:00', 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], + 'merge': False } ]) self.assertEqual( list(mock_backend.revision_get_multiple.call_args[0][0]), - [hex_to_hash( - '18d8be353ed3480476f032475e7c233eff7371d5'), - hex_to_hash( - 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')]) + [hex_to_hash(sha1), + hex_to_hash(sha1_other)]) @patch('swh.web.ui.service.backend') @istest def lookup_revision_multiple_none_found(self, mock_backend): # given - sha1_bin = '18d8be353ed3480476f032475e7c233eff7371d5' + sha1_bin = self.SHA1_SAMPLE sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' mock_backend.revision_get_multiple.return_value = [] # then actual_revisions = service.lookup_revision_multiple( [sha1_bin, sha1_other]) self.assertEqual(list(actual_revisions), []) self.assertEqual( list(mock_backend.revision_get_multiple.call_args[0][0]), - [hex_to_hash( - '18d8be353ed3480476f032475e7c233eff7371d5'), - hex_to_hash( - 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')]) + [hex_to_hash(self.SHA1_SAMPLE), + hex_to_hash(sha1_other)]) @patch('swh.web.ui.service.backend') @istest def lookup_revision_log(self, mock_backend): # given - stub_revision_log = [{ - 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug 31415957', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }] + stub_revision_log = [self.SAMPLE_REVISION_RAW] mock_backend.revision_log = MagicMock(return_value=stub_revision_log) # when actual_revision = service.lookup_revision_log( - 'abcdbe353ed3480476f032475e7c233eff7371d5') + 'abcdbe353ed3480476f032475e7c233eff7371d5', + limit=25) # then - self.assertEqual(list(actual_revision), [{ - 'id': '28d8be353ed3480476f032475e7c233eff7371d5', - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'bill & boule', - 'email': 'bill@boule.org', - }, - 'committer': { - 'name': 'boule & bill', - 'email': 'boule@bill.org', - }, - 'message': 'elegant fix for bug 31415957', - 'date': "2000-01-17T11:23:54+00:00", - 'committer_date': "2000-01-17T11:23:54+00:00", - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }]) + self.assertEqual(list(actual_revision), [self.SAMPLE_REVISION]) mock_backend.revision_log.assert_called_with( - hex_to_hash('abcdbe353ed3480476f032475e7c233eff7371d5'), 100) + hex_to_hash('abcdbe353ed3480476f032475e7c233eff7371d5'), 25) @patch('swh.web.ui.service.backend') @istest def lookup_revision_log_by(self, mock_backend): # given - stub_revision_log = [{ - 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'bill & boule', - 'email': b'bill@boule.org', - }, - 'committer': { - 'name': b'boule & bill', - 'email': b'boule@bill.org', - }, - 'message': b'elegant fix for bug 31415957', - 'date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2000, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc, - ).timestamp(), - 'offset': 0, - 'negative_utc': False, - }, - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }] + stub_revision_log = [self.SAMPLE_REVISION_RAW] mock_backend.revision_log_by = MagicMock( return_value=stub_revision_log) # when actual_log = service.lookup_revision_log_by( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, limit=100) # then - self.assertEqual(list(actual_log), [{ - 'id': '28d8be353ed3480476f032475e7c233eff7371d5', - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'bill & boule', - 'email': 'bill@boule.org', - }, - 'committer': { - 'name': 'boule & bill', - 'email': 'boule@bill.org', - }, - 'message': 'elegant fix for bug 31415957', - 'date': "2000-01-17T11:23:54+00:00", - 'committer_date': "2000-01-17T11:23:54+00:00", - 'synthetic': False, - 'type': 'git', - 'parents': [], - 'metadata': [], - }]) + self.assertEqual(list(actual_log), [self.SAMPLE_REVISION]) mock_backend.revision_log_by.assert_called_with( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, 100) @patch('swh.web.ui.service.backend') @istest def lookup_revision_log_by_nolog(self, mock_backend): # given mock_backend.revision_log_by = MagicMock(return_value=None) # when res = service.lookup_revision_log_by( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, limit=100) # then self.assertEquals(res, None) mock_backend.revision_log_by.assert_called_with( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, 100) @patch('swh.web.ui.service.backend') @istest def lookup_content_raw_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content_raw( 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( - 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + 'sha1', hex_to_hash(self.SHA1_SAMPLE)) @patch('swh.web.ui.service.backend') @istest def lookup_content_raw(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value={ - 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', + 'sha1': self.SHA1_SAMPLE, }) mock_backend.content_get = MagicMock(return_value={ 'data': b'binary data'}) # when actual_content = service.lookup_content_raw( - 'sha256:39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926') + 'sha256:%s' % self.SHA256_SAMPLE) # then self.assertEquals(actual_content, {'data': b'binary data'}) mock_backend.content_find.assert_called_once_with( - 'sha256', hex_to_hash('39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926')) + 'sha256', self.SHA256_SAMPLE_BIN) mock_backend.content_get.assert_called_once_with( - '18d8be353ed3480476f032475e7c233eff7371d5') + self.SHA1_SAMPLE) @patch('swh.web.ui.service.backend') @istest def lookup_content_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content( - 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') + 'sha1:%s' % self.SHA1_SAMPLE) # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( - 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + 'sha1', self.SHA1_SAMPLE_BIN) @patch('swh.web.ui.service.backend') @istest def lookup_content_with_sha1(self, mock_backend): # given - mock_backend.content_find = MagicMock(return_value={ - 'sha1': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), - 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926'), - 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' - 'c5b00a6d03'), - 'length': 190, - 'status': 'hidden', - }) + mock_backend.content_find = MagicMock( + return_value=self.SAMPLE_CONTENT_RAW) # when actual_content = service.lookup_content( - 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') + 'sha1:%s' % self.SHA1_SAMPLE) # then - self.assertEqual(actual_content, { - 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', - 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' - '7d3bf96c926', - 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', - 'length': 190, - 'status': 'absent', - }) + self.assertEqual(actual_content, self.SAMPLE_CONTENT) mock_backend.content_find.assert_called_with( - 'sha1', hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5')) + 'sha1', hex_to_hash(self.SHA1_SAMPLE)) @patch('swh.web.ui.service.backend') @istest def lookup_content_with_sha256(self, mock_backend): # given - mock_backend.content_find = MagicMock(return_value={ - 'sha1': hex_to_hash('18d8be353ed3480476f032475e7c233eff7371d5'), - 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926'), - 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' - 'c5b00a6d03'), - 'length': 360, - 'status': 'visible', - }) + stub_content = self.SAMPLE_CONTENT_RAW + stub_content['status'] = 'visible' + + expected_content = self.SAMPLE_CONTENT + expected_content['status'] = 'visible' + mock_backend.content_find = MagicMock( + return_value=stub_content) # when actual_content = service.lookup_content( - 'sha256:39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926') + 'sha256:%s' % self.SHA256_SAMPLE) # then - self.assertEqual(actual_content, { - 'sha1': '18d8be353ed3480476f032475e7c233eff7371d5', - 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' - '7d3bf96c926', - 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', - 'length': 360, - 'status': 'visible', - }) + self.assertEqual(actual_content, expected_content) mock_backend.content_find.assert_called_with( - 'sha256', hex_to_hash('39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926')) + 'sha256', self.SHA256_SAMPLE_BIN) @patch('swh.web.ui.service.backend') @istest def lookup_person(self, mock_backend): # given mock_backend.person_get = MagicMock(return_value={ 'id': 'person_id', 'name': b'some_name', 'email': b'some-email', }) # when actual_person = service.lookup_person('person_id') # then self.assertEqual(actual_person, { 'id': 'person_id', 'name': 'some_name', 'email': 'some-email', }) mock_backend.person_get.assert_called_with('person_id') @patch('swh.web.ui.service.backend') @istest def lookup_directory_bad_checksum(self, mock_backend): # given mock_backend.directory_ls = MagicMock() # when with self.assertRaises(BadInputExc): service.lookup_directory('directory_id') # then mock_backend.directory_ls.called = False @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-id-bin') mock_backend.directory_get.return_value = None # when actual_dir = service.lookup_directory('directory_id') # then self.assertIsNone(actual_dir) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory_id', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_get.assert_called_with('directory-id-bin') mock_backend.directory_ls.called = False @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_directory(self, mock_query, mock_backend): mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-sha1-bin') # something that exists is all that matters here mock_backend.directory_get.return_value = {'id': b'directory-sha1-bin'} # given stub_dir_entries = [{ - 'sha1': hex_to_hash('5c6f0e2750f48fa0bd0c4cf5976ba0b9e0' - '2ebda5'), - 'sha256': hex_to_hash('39007420ca5de7cb3cfc15196335507e' - 'e76c98930e7e0afa4d2747d3bf96c926'), - 'sha1_git': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' - 'c5b00a6d03'), + 'sha1': self.SHA1_SAMPLE_BIN, + 'sha256': self.SHA256_SAMPLE_BIN, + 'sha1_git': self.SHA1GIT_SAMPLE_BIN, 'target': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' 'c5b00a6d03'), - 'dir_id': hex_to_hash('40e71b8614fcd89ccd17ca2b1d9e66' - 'c5b00a6d03'), + 'dir_id': self.DIRECTORY_ID_BIN, 'name': b'bob', 'type': 10, }] expected_dir_entries = [{ - 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', - 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2747' - 'd3bf96c926', - 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', + 'sha1': self.SHA1_SAMPLE, + 'sha256': self.SHA256_SAMPLE, + 'sha1_git': self.SHA1GIT_SAMPLE, 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', - 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', + 'dir_id': self.DIRECTORY_ID, 'name': 'bob', 'type': 10, }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_ls = list(service.lookup_directory( 'directory-sha1')) # then self.assertEqual(actual_directory_ls, expected_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory-sha1', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_ls.assert_called_with( 'directory-sha1-bin') @patch('swh.web.ui.service.backend') @istest def lookup_revision_by_nothing_found(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when actual_revisions = service.lookup_revision_by(1) # then self.assertIsNone(actual_revisions) mock_backend.revision_get_by(1, 'master', None) @patch('swh.web.ui.service.backend') @istest def lookup_revision_by(self, mock_backend): # given - stub_rev = { - 'id': hex_to_hash('28d8be353ed3480476f032475e7c233eff7371d5'), - 'directory': hex_to_hash( - '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), - 'author': { - 'name': b'ynot', - 'email': b'ynot@blah.org', - }, - 'committer': { - 'name': b'ynot', - 'email': b'ynot@blah.org', - }, - 'message': b'elegant solution 31415', - 'date': { - 'timestamp': datetime.datetime( - 2016, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc).timestamp(), - 'offset': 420, - 'negative_utc': None, - }, - 'committer_date': { - 'timestamp': datetime.datetime( - 2016, 1, 17, 11, 23, 54, - tzinfo=datetime.timezone.utc).timestamp(), - 'offset': 420, - 'negative_utc': None, - }, - } + stub_rev = self.SAMPLE_REVISION_RAW - expected_rev = { - 'id': '28d8be353ed3480476f032475e7c233eff7371d5', - 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', - 'author': { - 'name': 'ynot', - 'email': 'ynot@blah.org', - }, - 'committer': { - 'name': 'ynot', - 'email': 'ynot@blah.org', - }, - 'message': 'elegant solution 31415', - 'date': '2016-01-17T18:23:54+07:00', - 'committer_date': '2016-01-17T18:23:54+07:00', - } + expected_rev = self.SAMPLE_REVISION + + mock_backend.revision_get_by.return_value = stub_rev + + # when + actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') + + # then + self.assertEquals(actual_revision, expected_rev) + + mock_backend.revision_get_by(1, 'master2', 'some-ts') + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_by_nomerge(self, mock_backend): + # given + stub_rev = self.SAMPLE_REVISION_RAW + stub_rev['parents'] = [ + hex_to_hash('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')] + + expected_rev = self.SAMPLE_REVISION + expected_rev['parents'] = ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'] + mock_backend.revision_get_by.return_value = stub_rev + + # when + actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') + + # then + self.assertEquals(actual_revision, expected_rev) + + mock_backend.revision_get_by(1, 'master2', 'some-ts') + + @patch('swh.web.ui.service.backend') + @istest + def lookup_revision_by_merge(self, mock_backend): + # given + stub_rev = self.SAMPLE_REVISION_RAW + stub_rev['parents'] = [ + hex_to_hash('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'), + hex_to_hash('ffff3b19e793491b1c6db0fd8b46cd9f32e592fc') + ] + + expected_rev = self.SAMPLE_REVISION + expected_rev['parents'] = [ + 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + 'ffff3b19e793491b1c6db0fd8b46cd9f32e592fc' + ] + expected_rev['merge'] = True mock_backend.revision_get_by.return_value = stub_rev # when actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') # then self.assertEquals(actual_revision, expected_rev) mock_backend.revision_get_by(1, 'master2', 'some-ts') @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_by_ko(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when with self.assertRaises(NotFoundExc) as cm: origin_id = 1 branch_name = 'master3' ts = None service.lookup_revision_with_context_by(origin_id, branch_name, ts, 'sha1') # then self.assertIn( 'Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts), cm.exception.args[0]) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) @patch('swh.web.ui.service.lookup_revision_with_context') @patch('swh.web.ui.service.backend') @istest def lookup_revision_with_context_by(self, mock_backend, mock_lookup_revision_with_context): # given stub_root_rev = {'id': 'root-rev-id'} mock_backend.revision_get_by.return_value = {'id': 'root-rev-id'} stub_rev = {'id': 'rev-found'} mock_lookup_revision_with_context.return_value = stub_rev # when origin_id = 1 branch_name = 'master3' ts = None sha1_git = 'sha1' actual_root_rev, actual_rev = service.lookup_revision_with_context_by( origin_id, branch_name, ts, sha1_git) # then self.assertEquals(actual_root_rev, stub_root_rev) self.assertEquals(actual_rev, stub_rev) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) mock_lookup_revision_with_context.assert_called_once_with( stub_root_rev, sha1_git, 100) @patch('swh.web.ui.service.backend') @patch('swh.web.ui.service.query') @istest def lookup_entity_by_uuid(self, mock_query, mock_backend): # given uuid_test = 'correct-uuid' mock_query.parse_uuid4.return_value = uuid_test stub_entities = [{'uuid': uuid_test}] mock_backend.entity_get.return_value = stub_entities # when actual_entities = service.lookup_entity_by_uuid(uuid_test) # then self.assertEquals(actual_entities, stub_entities) mock_query.parse_uuid4.assert_called_once_with(uuid_test) mock_backend.entity_get.assert_called_once_with(uuid_test) @istest def lookup_revision_through_ko_not_implemented(self): # then with self.assertRaises(NotImplementedError): service.lookup_revision_through({ 'something-unknown': 10, }) @patch('swh.web.ui.service.lookup_revision_with_context_by') @istest def lookup_revision_through_with_context_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 1, 'branch_name': 'master', 'ts': None, 'sha1_git': 'sha1-git' }, limit=1000) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 1, 'master', None, 'sha1-git', 1000) @patch('swh.web.ui.service.lookup_revision_by') @istest def lookup_revision_through_with_revision_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 2, 'branch_name': 'master2', 'ts': 'some-ts', }, limit=10) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 2, 'master2', 'some-ts') @patch('swh.web.ui.service.lookup_revision_with_context') @istest def lookup_revision_through_with_context(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git_root': 'some-sha1-root', 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1-root', 'some-sha1', 100) @patch('swh.web.ui.service.lookup_revision') @istest def lookup_revision_through_with_revision(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1') @patch('swh.web.ui.service.lookup_revision_through') @istest def lookup_directory_through_revision_ko_not_found( self, mock_lookup_rev): # given mock_lookup_rev.return_value = None # when with self.assertRaises(NotFoundExc): service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) @patch('swh.web.ui.service.lookup_revision_through') @patch('swh.web.ui.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_data( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} mock_lookup_dir.return_value = {'type': 'dir', 'content': []} # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, {'type': 'dir', 'content': []}) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', False) @patch('swh.web.ui.service.lookup_revision_through') @patch('swh.web.ui.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_content( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} stub_result = {'type': 'file', 'revision': 'rev-id', 'content': {'data': b'blah', 'sha1': 'sha1'}} mock_lookup_dir.return_value = stub_result # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 10, with_data=True) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, stub_result) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 10) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', True) diff --git a/swh/web/ui/tests/views/test_api.py b/swh/web/ui/tests/views/test_api.py index 31c5bfa6..2215526c 100644 --- a/swh/web/ui/tests/views/test_api.py +++ b/swh/web/ui/tests/views/test_api.py @@ -1,2203 +1,1922 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import json import unittest import yaml from nose.tools import istest from unittest.mock import patch, MagicMock from swh.web.ui.tests import test_app from swh.web.ui import exc from swh.web.ui.views import api from swh.web.ui.exc import NotFoundExc, BadInputExc from swh.storage.exc import StorageDBError, StorageAPIError class ApiTestCase(test_app.SWHApiTestCase): @istest def generic_api_lookup_nothing_is_found(self): # given def test_generic_lookup_fn(sha1, another_unused_arg): assert another_unused_arg == 'unused arg' assert sha1 == 'sha1' return None # when with self.assertRaises(NotFoundExc) as cm: api._api_lookup('sha1', test_generic_lookup_fn, 'This will be raised because None is returned.', lambda x: x, 'unused arg') self.assertIn('This will be raised because None is returned.', cm.exception.args[0]) @istest def generic_api_map_are_enriched_and_transformed_to_list(self): # given def test_generic_lookup_fn_1(criteria0, param0, param1): assert criteria0 == 'something' return map(lambda x: x + 1, [1, 2, 3]) # when actual_result = api._api_lookup( 'something', test_generic_lookup_fn_1, 'This is not the error message you are looking for. Move along.', lambda x: x * 2, 'some param 0', 'some param 1') self.assertEqual(actual_result, [4, 6, 8]) @istest def generic_api_list_are_enriched_too(self): # given def test_generic_lookup_fn_2(crit): assert crit == 'something' return ['a', 'b', 'c'] # when actual_result = api._api_lookup( 'something', test_generic_lookup_fn_2, 'Not the error message you are looking for, it is. ' 'Along, you move!', lambda x: ''. join(['=', x, '='])) self.assertEqual(actual_result, ['=a=', '=b=', '=c=']) @istest def generic_api_generator_are_enriched_and_returned_as_list(self): # given def test_generic_lookup_fn_3(crit): assert crit == 'crit' return (i for i in [4, 5, 6]) # when actual_result = api._api_lookup( 'crit', test_generic_lookup_fn_3, 'Move!', lambda x: x - 1) self.assertEqual(actual_result, [3, 4, 5]) @istest def generic_api_simple_data_are_enriched_and_returned_too(self): # given def test_generic_lookup_fn_4(crit): assert crit == '123' return {'a': 10} def test_enrich_data(x): x['a'] = x['a'] * 10 return x # when actual_result = api._api_lookup( '123', test_generic_lookup_fn_4, 'Nothing to do', test_enrich_data) self.assertEqual(actual_result, {'a': 100}) @patch('swh.web.ui.views.api.service') # @istest def api_content_checksum_to_origin(self, mock_service): mock_service.lookup_hash.return_value = {'found': True} stub_origin = { "lister": None, "url": "rsync://ftp.gnu.org/old-gnu/webbase", "type": "ftp", "id": 2, "project": None } mock_service.lookup_hash_origin.return_value = stub_origin # when rv = self.app.get( '/api/1/browse/sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_origin) mock_service.lookup_hash.assert_called_once_with( 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_hash_origin.assert_called_once_with( 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') # @istest def api_content_checksum_to_origin_sha_not_found(self, mock_service): # given mock_service.lookup_hash.return_value = {'found': False} # when rv = self.app.get( '/api/1/browse/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 not found.' }) mock_service.lookup_hash.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_metadata(self, mock_service): # given mock_service.lookup_content.return_value = { 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560' 'cde9b067a4f', 'length': 17, 'status': 'visible' } # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'data_url': '/api/1/content/' '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/raw/', 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560c' 'de9b067a4f', 'length': 17, 'status': 'visible' }) mock_service.lookup_content.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_not_found_as_json(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_hash_origin = MagicMock() # when rv = self.app.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_hash_origin.called = False @patch('swh.web.ui.views.api.service') @istest def api_content_not_found_as_yaml(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_hash_origin = MagicMock() # when rv = self.app.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/', headers={'accept': 'application/yaml'}) self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/yaml') response_data = yaml.load(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_hash_origin.called = False @patch('swh.web.ui.views.api.service') @istest def api_content_raw_ko_not_found(self, mock_service): # given mock_service.lookup_content_raw.return_value = None # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Content with sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 not found.' }) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_content_raw(self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content # when rv = self.app.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/', headers={'Content-type': 'application/octet-stream', 'Content-disposition': 'attachment'}) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/octet-stream') self.assertEquals(rv.data, stub_content['data']) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') @patch('swh.web.ui.views.api.service') @istest def api_search(self, mock_service): # given mock_service.search_hash.return_value = {'found': True} expected_result = { 'search_stats': {'nbfiles': 1, 'pct': 100}, 'search_res': [{'filename': None, 'sha1': 'sha1:blah', 'found': True}] } # when - rv = self.app.get('/api/1/search/sha1:blah/') + rv = self.app.get('/api/1/content/search/sha1:blah/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_result) mock_service.search_hash.assert_called_once_with('sha1:blah') @patch('swh.web.ui.views.api.service') @istest def api_search_as_yaml(self, mock_service): # given mock_service.search_hash.return_value = {'found': True} expected_result = { 'search_stats': {'nbfiles': 1, 'pct': 100}, 'search_res': [{'filename': None, 'sha1': 'sha1:halb', 'found': True}] } # when - rv = self.app.get('/api/1/search/sha1:halb/', + rv = self.app.get('/api/1/content/search/sha1:halb/', headers={'Accept': 'application/yaml'}) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/yaml') response_data = yaml.load(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_result) mock_service.search_hash.assert_called_once_with('sha1:halb') @patch('swh.web.ui.views.api.service') @istest def api_search_not_found(self, mock_service): # given mock_service.search_hash.return_value = {'found': False} expected_result = { 'search_stats': {'nbfiles': 1, 'pct': 0}, 'search_res': [{'filename': None, 'sha1': 'sha1:halb', 'found': False}] } # when - rv = self.app.get('/api/1/search/sha1:halb/') + rv = self.app.get('/api/1/content/search/sha1:halb/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_result) mock_service.search_hash.assert_called_once_with('sha1:halb') @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_error(self, mock_service): # given mock_service.stat_counters.side_effect = ValueError( 'voluntary error to check the bad request middleware.') # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'voluntary error to check the bad request middleware.'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_swh_storage_error_db(self, mock_service): # given mock_service.stat_counters.side_effect = StorageDBError( 'SWH Storage exploded! Will be back online shortly!') # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the backend: ' 'SWH Storage exploded! Will be back online shortly!'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters_raise_swh_storage_error_api(self, mock_service): # given mock_service.stat_counters.side_effect = StorageAPIError( 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' ) # when rv = self.app.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the api backend: ' 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' }) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_counters(self, mock_service): # given stub_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_service.stat_counters.return_value = stub_stats # when rv = self.app.get('/api/1/stat/counters/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_stats) mock_service.stat_counters.assert_called_once_with() @patch('swh.web.ui.views.api.service') @istest def api_1_stat_origin_visits_raise_error(self, mock_service): # given mock_service.stat_origin_visits.side_effect = ValueError( 'voluntary error to check the bad request middleware.') # when rv = self.app.get('/api/1/stat/visits/2/') # then self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'voluntary error to check the bad request middleware.'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_origin_visits_raise_swh_storage_error_db( self, mock_service): # given mock_service.stat_origin_visits.side_effect = StorageDBError( 'SWH Storage exploded! Will be back online shortly!') # when rv = self.app.get('/api/1/stat/visits/2/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the backend: ' 'SWH Storage exploded! Will be back online shortly!'}) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_origin_visits_raise_swh_storage_error_api( self, mock_service): # given mock_service.stat_origin_visits.side_effect = StorageAPIError( 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' ) # when rv = self.app.get('/api/1/stat/visits/2/') # then self.assertEquals(rv.status_code, 503) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'An unexpected error occurred in the api backend: ' 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' }) @patch('swh.web.ui.views.api.service') @istest def api_1_stat_origin_visits(self, mock_service): # given stub_stats = [ { 'date': 1420149600.0, 'origin': 1, 'visit': 1 }, { 'date': 1104616800.0, 'origin': 1, 'visit': 2 }, { 'date': 1293919200.0, 'origin': 1, 'visit': 3 } ] expected_stats = [1104616800.0, 1293919200.0, 1420149600.0] mock_service.stat_origin_visits.return_value = stub_stats # when rv = self.app.get('/api/1/stat/visits/2/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_stats) mock_service.stat_origin_visits.assert_called_once_with(2) @patch('swh.web.ui.views.api.service') @istest - def api_origin(self, mock_service): + def api_origin_by_id(self, mock_service): # given stub_origin = { 'id': 1234, 'lister': 'uuid-lister-0', 'project': 'uuid-project-0', 'url': 'ftp://some/url/to/origin/0', 'type': 'ftp' } mock_service.lookup_origin.return_value = stub_origin # when rv = self.app.get('/api/1/origin/1234/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_origin) - mock_service.lookup_origin.assert_called_with(1234) + mock_service.lookup_origin.assert_called_with({'id': 1234}) + + @patch('swh.web.ui.views.api.service') + @istest + def api_origin_by_type_url(self, mock_service): + # given + stub_origin = { + 'id': 1234, + 'lister': 'uuid-lister-0', + 'project': 'uuid-project-0', + 'url': 'ftp://some/url/to/origin/0', + 'type': 'ftp' + } + mock_service.lookup_origin.return_value = stub_origin + + # when + rv = self.app.get('/api/1/origin/ftp/url/ftp://some/url/to/origin/0/') + + # then + self.assertEquals(rv.status_code, 200) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, stub_origin) + + mock_service.lookup_origin.assert_called_with( + {'url': 'ftp://some/url/to/origin/0', + 'type': 'ftp'}) @patch('swh.web.ui.views.api.service') @istest def api_origin_not_found(self, mock_service): # given mock_service.lookup_origin.return_value = None # when rv = self.app.get('/api/1/origin/4321/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Origin with id 4321 not found.' }) - mock_service.lookup_origin.assert_called_with(4321) + mock_service.lookup_origin.assert_called_with({'id': 4321}) @patch('swh.web.ui.views.api.service') @istest def api_release(self, mock_service): # given stub_release = { 'id': 'release-0', 'target_type': 'revision', 'target': 'revision-sha1', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } expected_release = { 'id': 'release-0', 'target_type': 'revision', 'target': 'revision-sha1', 'target_url': '/api/1/revision/revision-sha1/', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_release) mock_service.lookup_release.assert_called_once_with('release-0') @patch('swh.web.ui.views.api.service') @istest def api_release_target_type_not_a_revision(self, mock_service): # given stub_release = { 'id': 'release-0', 'target_type': 'other-stuff', 'target': 'other-stuff-checksum', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } expected_release = { 'id': 'release-0', 'target_type': 'other-stuff', 'target': 'other-stuff-checksum', "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_release) mock_service.lookup_release.assert_called_once_with('release-0') @patch('swh.web.ui.views.api.service') @istest def api_release_not_found(self, mock_service): # given mock_service.lookup_release.return_value = None # when rv = self.app.get('/api/1/release/release-0/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Release with sha1_git release-0 not found.' }) @patch('swh.web.ui.views.api.service') @istest def api_revision(self, mock_service): # given stub_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['8734ef7e7c357ce2af928115c6c6a42b7e2a44e7'], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } mock_service.lookup_revision.return_value = stub_revision expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233e' 'ff7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6' 'a42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [ '8734ef7e7c357ce2af928115c6c6a42b7e2a44e7' ], 'parent_urls': [ '/api/1/revision/8734ef7e7c357ce2af928115c6c6a42b7e2a44e7' '/prev/18d8be353ed3480476f032475e7c233eff7371d5/' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } # when rv = self.app.get('/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_revision) mock_service.lookup_revision.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_revision_not_found(self, mock_service): # given mock_service.lookup_revision.return_value = None # when rv = self.app.get('/api/1/revision/revision-0/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Revision with sha1_git revision-0 not found.'}) @patch('swh.web.ui.views.api.service') @istest def api_revision_raw_ok(self, mock_service): # given stub_revision = {'message': 'synthetic revision message'} mock_service.lookup_revision_message.return_value = stub_revision # when rv = self.app.get('/api/1/revision/18d8be353ed3480476f032475e7c2' '33eff7371d5/raw/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/octet-stream') self.assertEquals(rv.data, b'synthetic revision message') mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_revision_raw_ok_no_msg(self, mock_service): # given mock_service.lookup_revision_message.side_effect = NotFoundExc( 'No message for revision') # when rv = self.app.get('/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/raw/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'No message for revision'}) self.assertEquals mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_revision_raw_ko_no_rev(self, mock_service): # given mock_service.lookup_revision_message.side_effect = NotFoundExc( 'No revision found') # when rv = self.app.get('/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/raw/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'No revision found'}) mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin_not_found(self, mock_service): mock_service.lookup_revision_by.return_value = None rv = self.app.get('/api/1/revision/origin/123/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertIn('Revision with (origin_id: 123', response_data['error']) self.assertIn('not found', response_data['error']) mock_service.lookup_revision_by.assert_called_once_with( 123, 'refs/heads/master', None) @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin(self, mock_service): mock_revision = { 'id': '32', 'directory': '21', 'message': 'message 1', 'type': 'deb', } expected_revision = { 'id': '32', 'url': '/api/1/revision/32/', 'history_url': '/api/1/revision/32/log/', 'directory': '21', 'directory_url': '/api/1/directory/21/', 'message': 'message 1', 'type': 'deb', } mock_service.lookup_revision_by.return_value = mock_revision rv = self.app.get('/api/1/revision/origin/1/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.api.service') @istest def api_revision_with_origin_and_branch_name(self, mock_service): mock_revision = { 'id': '12', 'directory': '23', 'message': 'message 2', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '12', 'url': '/api/1/revision/12/', 'history_url': '/api/1/revision/12/log/', 'directory': '23', 'directory_url': '/api/1/directory/23/', 'message': 'message 2', 'type': 'tar', } rv = self.app.get('/api/1/revision/origin/1/branch/refs/origin/dev/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', None) @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp(self, mock_utils, mock_service): mock_revision = { 'id': '123', 'directory': '456', 'message': 'message 3', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '123', 'url': '/api/1/revision/123/', 'history_url': '/api/1/revision/123/log/', 'directory': '456', 'directory_url': '/api/1/directory/456/', 'message': 'message 3', 'type': 'tar', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision rv = self.app.get('/api/1/revision' '/origin/1' '/branch/refs/origin/dev' '/ts/1452591542/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with('1452591542') mock_utils.enrich_revision.assert_called_once_with( mock_revision) @patch('swh.web.ui.views.api.service') @patch('swh.web.ui.views.api.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp_with_escapes( self, mock_utils, mock_service): mock_revision = { 'id': '999', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '999', 'url': '/api/1/revision/999/', 'history_url': '/api/1/revision/999/log/', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision rv = self.app.get('/api/1/revision' '/origin/1' '/branch/refs%2Forigin%2Fdev' '/ts/Today%20is%20' 'January%201,%202047%20at%208:21:00AM/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( 1, 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with( 'Today is January 1, 2047 at 8:21:00AM') mock_utils.enrich_revision.assert_called_once_with( mock_revision) - @patch('swh.web.ui.views.api._revision_directory_by') - @istest - def api_directory_through_rev_with_origin_history_with_rev_not_found_0( - self, mock_rev_dir): - # given - mock_rev_dir.side_effect = NotFoundExc('not found') - - # when - rv = self.app.get('/api/1/revision' - '/origin/1' - '/history/4563' - '/directory/some-path/') - - # then - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, { - 'error': - 'not found'}) - - mock_rev_dir.assert_called_once_with( - { - 'origin_id': 1, - 'branch_name': 'refs/heads/master', - 'ts': None, - 'sha1_git': '4563' - }, - 'some-path', - '/api/1/revision' - '/origin/1' - '/history/4563' - '/directory/some-path/', - limit=100, with_data=False) - - @patch('swh.web.ui.views.api._revision_directory_by') - @patch('swh.web.ui.views.api.utils') - @istest - def api_directory_through_revision_with_origin_history( - self, mock_utils, mock_rev_dir): - # given - stub_dir_content = [ - { - 'type': 'dir' - }, - { - 'type': 'file' - }, - ] - mock_rev_dir.return_value = stub_dir_content - - mock_utils.parse_timestamp.return_value = '2016-11-24 00:00:00' - - # when - url = '/api/1/revision' \ - '/origin/999' \ - '/branch/refs/dev' \ - '/ts/2016-11-24' \ - '/history/12-sha1-git' \ - '/directory/some/content/' - rv = self.app.get(url + '?limit=666') - - # then - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, stub_dir_content) - - mock_utils.parse_timestamp.assert_called_once_with('2016-11-24') - - mock_rev_dir.assert_called_once_with( - { - 'origin_id': 999, - 'branch_name': 'refs/dev', - 'ts': '2016-11-24 00:00:00', - 'sha1_git': '12-sha1-git' - }, - 'some/content', - url, - limit=666, with_data=False) - - @patch('swh.web.ui.views.api.service') - @istest - def api_revision_history_through_origin_rev_not_found_0( - self, mock_service): - mock_service.lookup_revision_with_context_by.return_value = { - 'id': 'root-rev-id'}, None - - # when - rv = self.app.get('/api/1/revision' - '/origin/1' - '/history/4563/') - - # then - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, { - 'error': - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" - " sha1_git_root being the revision's identifier pointed to by " - "(origin_id: %s, branch_name: %s, ts: %s)." - % ('4563', - 'root-rev-id', - 1, - 'refs/heads/master', - None)}) - - mock_service.lookup_revision_with_context_by.assert_called_once_with( - 1, 'refs/heads/master', None, '4563', 100) - - @patch('swh.web.ui.views.api.service') - @istest - def api_revision_history_through_origin_rev_not_found_1( - self, mock_service): - # given - mock_service.lookup_revision_with_context_by.return_value = { - 'id': 'root-rev-id'}, None - - # when - rv = self.app.get('/api/1/revision' - '/origin/10' - '/branch/origin/dev' - '/history/213/') - - # then - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, { - 'error': - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" - " sha1_git_root being the revision's identifier pointed to by " - "(origin_id: %s, branch_name: %s, ts: %s)." - % ('213', - 'root-rev-id', - 10, - 'origin/dev', - None)}) - - mock_service.lookup_revision_with_context_by.assert_called_once_with( - 10, 'origin/dev', None, '213', 100) - - @patch('swh.web.ui.views.api.utils') - @patch('swh.web.ui.views.api.service') - @istest - def api_revision_history_through_origin_rev_not_found_2( - self, mock_service, mock_utils): - # given - mock_service.lookup_revision_with_context_by.return_value = { - 'id': 'root-rev-id'}, None - mock_utils.parse_timestamp.return_value = '2012-11-23 00:00:00' - - # when - rv = self.app.get('/api/1/revision' - '/origin/100' - '/branch/master' - '/ts/2012-11-23' - '/history/876/') - - # then - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, { - 'error': - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" - " sha1_git_root being the revision's identifier pointed to by " - "(origin_id: %s, branch_name: %s, ts: %s)." - % ('876', - 'root-rev-id', - 100, - 'master', - '2012-11-23 00:00:00')}) - - mock_service.lookup_revision_with_context_by.assert_called_once_with( - 100, 'master', '2012-11-23 00:00:00', '876', 100) - - mock_utils.parse_timestamp.assert_called_once_with('2012-11-23') - - @patch('swh.web.ui.views.api.utils') - @patch('swh.web.ui.views.api.service') - @istest - def api_revision_history_through_origin_rev_not_found_3( - self, mock_service, mock_utils): - # given - mock_service.lookup_revision_with_context_by.return_value = { - 'id': 'root-rev-id'}, None - - mock_service.lookup_revision_with_context.return_value = None - - mock_utils.parse_timestamp.return_value = '2016-11-23 00:00:00' - - # when - rv = self.app.get('/api/1/revision' - '/origin/666' - '/branch/refs/master' - '/ts/2016-11-23' - '/history/123-sha1-git/?limit=1000') - - # then - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, { - 'error': - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" - " sha1_git_root being the revision's identifier pointed to by " - "(origin_id: %s, branch_name: %s, ts: %s)." - % ('123-sha1-git', - 'root-rev-id', - 666, - 'refs/master', - '2016-11-23 00:00:00')}) - - mock_service.lookup_revision_with_context_by.assert_called_once_with( - 666, 'refs/master', '2016-11-23 00:00:00', '123-sha1-git', 1000) - - mock_utils.parse_timestamp.assert_called_once_with('2016-11-23') - - mock_service.lookup_revision_with_context('456-sha1-git-root', - '123-sha1-git', - 1000) - - @patch('swh.web.ui.views.api.utils') - @patch('swh.web.ui.views.api.service') - @istest - def api_history_through_revision(self, mock_service, mock_utils): - # given - stub_root_rev = { - 'id': '45-sha1-git-root' - } - - stub_revision = { - 'children': [], - } - mock_service.lookup_revision_with_context_by.return_value = ( - stub_root_rev, - stub_revision) - - mock_utils.enrich_revision.return_value = 'some-result' - - mock_utils.parse_timestamp.return_value = '2016-11-24 00:00:00' - - # when - rv = self.app.get('/api/1/revision' - '/origin/999' - '/branch/refs/dev' - '/ts/2016-11-24' - '/history/12-sha1-git/') - - # then - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEqual(response_data, 'some-result') - - mock_service.lookup_revision_with_context_by.assert_called_once_with( - 999, - 'refs/dev', - '2016-11-24 00:00:00', - '12-sha1-git', - 100) - - mock_utils.parse_timestamp.assert_called_once_with('2016-11-24') - - mock_utils.enrich_revision.assert_called_once_with( - stub_revision, - context='45-sha1-git-root') - @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_ko_raise(self, mock_service): # given mock_service.lookup_directory_through_revision.side_effect = NotFoundExc('not') # noqa # when with self.assertRaises(NotFoundExc): api._revision_directory_by( {'sha1_git': 'id'}, None, '/api/1/revision/sha1/directory/') # then mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'id'}, None, limit=100, with_data=False) @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_type_dir(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) # when actual_dir_content = api._revision_directory_by( {'sha1_git': 'blah-id'}, 'some/path', '/api/1/revision/sha1/directory/') # then self.assertEquals(actual_dir_content, { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'blah-id'}, 'some/path', limit=100, with_data=False) @patch('swh.web.ui.views.api.service') @istest def revision_directory_by_type_file(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) # when actual_dir_content = api._revision_directory_by( {'sha1_git': 'sha1'}, 'some/path', '/api/1/revision/origin/2/directory/', limit=1000, with_data=True) # then self.assertEquals(actual_dir_content, { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'sha1'}, 'some/path', limit=1000, with_data=True) @patch('swh.web.ui.views.api.utils') @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_directory_through_revision_origin_ko_not_found(self, mock_rev_dir, mock_utils): mock_rev_dir.side_effect = NotFoundExc('not found') mock_utils.parse_timestamp.return_value = '2012-10-20 00:00:00' rv = self.app.get('/api/1/revision' '/origin/10' '/branch/refs/remote/origin/dev' '/ts/2012-10-20' '/directory/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, { 'error': 'not found'}) mock_rev_dir.assert_called_once_with( {'origin_id': 10, 'branch_name': 'refs/remote/origin/dev', 'ts': '2012-10-20 00:00:00'}, None, '/api/1/revision' '/origin/10' '/branch/refs/remote/origin/dev' '/ts/2012-10-20' '/directory/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_directory_through_revision_origin(self, mock_revision_dir): expected_res = [{ 'id': '123' }] mock_revision_dir.return_value = expected_res rv = self.app.get('/api/1/revision/origin/3/directory/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEqual(response_data, expected_res) mock_revision_dir.assert_called_once_with({ 'origin_id': 3, 'branch_name': 'refs/heads/master', 'ts': None}, None, '/api/1/revision/origin/3/directory/', with_data=False) @patch('swh.web.ui.views.api.service') @istest def api_revision_log(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log.return_value = stub_revisions expected_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' - 'f7371d5/log/', + 'f7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' - '42b7e2a44e6/', + '42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [ '7834ef7e7c357ce2af928115c6c6a42b7e2a4345' ], 'parent_urls': [ '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345' '/prev/18d8be353ed3480476f032475e7c233eff7371d5/' ], 'type': 'tar', 'synthetic': True, }] + expected_response = { + 'revisions': expected_revisions, + 'next_revs_url': None + } + + # when + rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' + 'b7e2a44e6/log/') + + # then + self.assertEquals(rv.status_code, 200) + self.assertEquals(rv.mimetype, 'application/json') + + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, expected_response) + + mock_service.lookup_revision_log.assert_called_once_with( + '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 26) + + @patch('swh.web.ui.views.api.service') + @istest + def api_revision_log_with_next(self, mock_service): + # given + stub_revisions = [] + for i in range(27): + stub_revisions.append({'id': i}) + + mock_service.lookup_revision_log.return_value = stub_revisions[:26] + + expected_revisions = [x for x in stub_revisions if x['id'] < 25] + for e in expected_revisions: + e['url'] = '/api/1/revision/%s/' % e['id'] + e['history_url'] = '/api/1/revision/%s/log/' % e['id'] + + expected_response = { + 'revisions': expected_revisions, + 'next_revs_url': '/api/1/revision/25/log/' + } + # when rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' 'b7e2a44e6/log/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) + self.assertEquals(response_data, expected_response) mock_service.lookup_revision_log.assert_called_once_with( - '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 100) + '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 26) @patch('swh.web.ui.views.api.service') @istest def api_revision_log_not_found(self, mock_service): # given mock_service.lookup_revision_log.return_value = None # when rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42b7' - 'e2a44e6/log/?limit=10') + 'e2a44e6/log/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Revision with sha1_git' ' 8834ef7e7c357ce2af928115c6c6a42b7e2a44e6 not found.'}) mock_service.lookup_revision_log.assert_called_once_with( - '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 10) + '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 26) @patch('swh.web.ui.views.api.service') @istest def api_revision_log_context(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log.return_value = stub_revisions mock_service.lookup_revision_multiple.return_value = [{ 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory': '18d8be353ed3480476f032475e7c233eff7371d5', 'author_name': 'Name Surname', 'author_email': 'name@surname.com', 'committer_name': 'Name Surname', 'committer_email': 'name@surname.com', 'message': 'amazing revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'], 'type': 'tar', 'synthetic': True, }] - # when - rv = self.app.get('/api/1/revision/18d8be353ed3480476f0' - '32475e7c233eff7371d5/prev/prev-rev/log/') - - # then - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, [ + expected_revisions = [ { 'url': '/api/1/revision/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/', 'history_url': '/api/1/revision/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/log/', 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory_url': '/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/', 'author_name': 'Name Surname', 'author_email': 'name@surname.com', 'committer_name': 'Name Surname', 'committer_email': 'name@surname.com', 'message': 'amazing revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'], 'parent_urls': [ '/api/1/revision/adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' '/prev/7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/' ], 'type': 'tar', 'synthetic': True, }, { 'url': '/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/log/', 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'parent_urls': [ '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345' '/prev/18d8be353ed3480476f032475e7c233eff7371d5/' ], 'type': 'tar', 'synthetic': True, - }]) + }] + + expected_response = { + 'revisions': expected_revisions, + 'next_revs_url': None + } + + # when + rv = self.app.get('/api/1/revision/18d8be353ed3480476f0' + '32475e7c233eff7371d5/prev/prev-rev/log/') + + # then + self.assertEquals(rv.status_code, 200) + self.assertEquals(rv.mimetype, 'application/json') + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, expected_response) mock_service.lookup_revision_log.assert_called_once_with( - '18d8be353ed3480476f032475e7c233eff7371d5', 100) + '18d8be353ed3480476f032475e7c233eff7371d5', 26) mock_service.lookup_revision_multiple.assert_called_once_with( ['prev-rev']) @patch('swh.web.ui.views.api.service') @istest def api_revision_log_by(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log_by.return_value = stub_revisions expected_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' 'f7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' '42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [ '7834ef7e7c357ce2af928115c6c6a42b7e2a4345' ], 'parent_urls': [ '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345' '/prev/18d8be353ed3480476f032475e7c233eff7371d5/' ], 'type': 'tar', 'synthetic': True, }] + expected_result = { + 'revisions': expected_revisions, + 'next_revs_url': None + } + # when rv = self.app.get('/api/1/revision/origin/1/log/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) + self.assertEquals(response_data, expected_result) mock_service.lookup_revision_log_by.assert_called_once_with( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, 26) @patch('swh.web.ui.views.api.service') @istest - def api_revision_log_by_norev(self, mock_service): + def api_revision_log_by_with_next(self, mock_service): # given - mock_service.lookup_revision_log_by.side_effect = NotFoundExc( - 'No revision') + stub_revisions = [] + for i in range(27): + stub_revisions.append({'id': i}) + + mock_service.lookup_revision_log_by.return_value = stub_revisions[:26] + + expected_revisions = [x for x in stub_revisions if x['id'] < 25] + for e in expected_revisions: + e['url'] = '/api/1/revision/%s/' % e['id'] + e['history_url'] = '/api/1/revision/%s/log/' % e['id'] + + expected_response = { + 'revisions': expected_revisions, + 'next_revs_url': '/api/1/revision/25/log/' + } # when rv = self.app.get('/api/1/revision/origin/1/log/') # then - self.assertEquals(rv.status_code, 404) + self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, {'error': 'No revision'}) + self.assertEquals(response_data, expected_response) mock_service.lookup_revision_log_by.assert_called_once_with( - 1, 'refs/heads/master', None) + 1, 'refs/heads/master', None, 26) @patch('swh.web.ui.views.api.service') @istest - def api_revision_history_not_found(self, mock_service): + def api_revision_log_by_norev(self, mock_service): # given - mock_service.lookup_revision_with_context.return_value = None + mock_service.lookup_revision_log_by.side_effect = NotFoundExc( + 'No revision') - # then - rv = self.app.get('/api/1/revision/999/history/338/?limit=5') + # when + rv = self.app.get('/api/1/revision/origin/1/log/') + # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') - mock_service.lookup_revision_with_context.assert_called_once_with( - '999', '338', 5) - - @istest - def api_revision_history_sha1_same_so_redirect(self): - # when - rv = self.app.get('/api/1/revision/123/history/123?limit=10') - # then - self.assertEquals(rv.status_code, 301) - # Ideally we'd like to be able to check the resulting url path - # but does not work, this returns the current url - # also following the redirect would mean to yet mock again the - # destination url... So for now cannot test it + response_data = json.loads(rv.data.decode('utf-8')) + self.assertEquals(response_data, {'error': 'No revision'}) - # self.assertEquals(rv.location, - # 'http://localhost/api/1/revision/123?limit=10') + mock_service.lookup_revision_log_by.assert_called_once_with( + 1, 'refs/heads/master', None, 26) @patch('swh.web.ui.views.api.service') @istest def api_revision_history(self, mock_service): # for readability purposes, we use: # - sha1 as 3 letters (url are way too long otherwise to respect pep8) # - only keys with modification steps (all other keys are kept as is) # given stub_revision = { 'id': '883', 'children': ['777', '999'], 'parents': [], 'directory': '272' } mock_service.lookup_revision.return_value = stub_revision # then rv = self.app.get('/api/1/revision/883/prev/999/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'id': '883', 'url': '/api/1/revision/883/', 'history_url': '/api/1/revision/883/log/', 'history_context_url': '/api/1/revision/883/prev/999/log/', 'children': ['777', '999'], 'children_urls': ['/api/1/revision/777/', '/api/1/revision/999/'], 'parents': [], 'parent_urls': [], 'directory': '272', 'directory_url': '/api/1/directory/272/' }) mock_service.lookup_revision.assert_called_once_with('883') @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ko_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('Not found') # then rv = self.app.get('/api/1/revision/999/directory/some/path/to/dir/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Not found'}) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path/to/dir', '/api/1/revision/999/directory/some/path/to/dir/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir): stub_dir = { 'type': 'dir', 'revision': '999', 'content': [ { 'sha1_git': '789', 'type': 'file', 'target': '101', 'target_url': '/api/1/content/sha1_git:101/', 'name': 'somefile', 'file_url': '/api/1/revision/999/directory/some/path/' 'somefile/' }, { 'sha1_git': '123', 'type': 'dir', 'target': '456', 'target_url': '/api/1/directory/456/', 'name': 'to-subdir', 'dir_url': '/api/1/revision/999/directory/some/path/' 'to-subdir/', }] } # given mock_rev_dir.return_value = stub_dir # then rv = self.app.get('/api/1/revision/999/directory/some/path/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path', '/api/1/revision/999/directory/some/path/', with_data=False) @patch('swh.web.ui.views.api._revision_directory_by') @istest def api_revision_directory_ok_returns_content(self, mock_rev_dir): stub_content = { 'type': 'file', 'revision': '999', 'content': { 'sha1_git': '789', 'sha1': '101', 'data_url': '/api/1/content/101/raw/', } } # given mock_rev_dir.return_value = stub_content # then url = '/api/1/revision/666/directory/some/other/path/' rv = self.app.get(url) self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_content) mock_rev_dir.assert_called_once_with( {'sha1_git': '666'}, 'some/other/path', url, with_data=False) - @istest - def api_revision_history_directory_sha1_same_so_redirect(self): - # when - rv = self.app.get( - '/api/1/revision/123/history/123/directory/path/to/?limit=1') - - # then - self.assertEquals(rv.status_code, 301) - - # self.assertEquals(rv.location, - # 'http://localhost/api/1/revision/123/directory/path/to/') - - @patch('swh.web.ui.views.api._revision_directory_by') - @istest - def api_revision_history_directory_ko_revision_not_found(self, - mock_rev_dir): - # given - mock_rev_dir.side_effect = NotFoundExc('not found') - - # then - url = '/api/1/revision/456/history/987/directory/path/to/' - rv = self.app.get(url + '?limit=10') - - self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { - 'error': 'not found'}) - - mock_rev_dir.assert_called_once_with( - {'sha1_git_root': '456', 'sha1_git': '987'}, 'path/to', url, - limit=10, with_data=False) - - @patch('swh.web.ui.views.api._revision_directory_by') - @istest - def api_revision_history_directory(self, - mock_rev_dir): - # given - stub_dir = { - 'type': 'dir', - 'revision': 'rev-id', - 'content': [ - { - 'sha1_git': '879', - 'type': 'file', - 'target': '110', - 'target_url': '/api/1/content/sha1_git:110/', - 'name': 'subfile', - 'file_url': '/api/1/revision/354/history/867/directory/' - 'debian/' - 'subfile/', - }, - { - 'sha1_git': '213', - 'type': 'dir', - 'target': '546', - 'target_url': '/api/1/directory/546/', - 'name': 'subdir', - 'dir_url': - '/api/1/revision/354/history/867/directory/debian/subdir/' - }] - } - - # given - mock_rev_dir.return_value = stub_dir - - # then - url = '/api/1/revision/354' \ - '/history/867' \ - '/directory/debian/' - rv = self.app.get(url + '?limit=4') - - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_dir) - - mock_rev_dir.assert_called_once_with( - {'sha1_git_root': '354', - 'sha1_git': '867'}, - 'debian', - url, - limit=4, with_data=False) - @patch('swh.web.ui.views.api.service') @istest def api_person(self, mock_service): # given stub_person = { 'id': '198003', 'name': 'Software Heritage', 'email': 'robot@softwareheritage.org', } mock_service.lookup_person.return_value = stub_person # when rv = self.app.get('/api/1/person/198003/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, stub_person) @patch('swh.web.ui.views.api.service') @istest def api_person_not_found(self, mock_service): # given mock_service.lookup_person.return_value = None # when rv = self.app.get('/api/1/person/666/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Person with id 666 not found.'}) @patch('swh.web.ui.views.api.service') @istest def api_directory(self, mock_service): # given stub_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', }] expected_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', 'target_url': '/api/1/directory/8be353ed3480476f032475e7c233eff737123456/', }] mock_service.lookup_directory.return_value = stub_directories # when rv = self.app.get('/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_directories) mock_service.lookup_directory.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') @patch('swh.web.ui.views.api.service') @istest def api_directory_not_found(self, mock_service): # given mock_service.lookup_directory.return_value = [] # when rv = self.app.get('/api/1/directory/' '66618d8be353ed3480476f032475e7c233eff737/') # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'Directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.'}) @patch('swh.web.ui.views.api.service') @istest def api_directory_with_path_found(self, mock_service): # given expected_dir = { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'name': 'bla', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', } mock_service.lookup_directory_with_path.return_value = expected_dir # when rv = self.app.get('/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/bla/') # then self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_dir) mock_service.lookup_directory_with_path.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5', 'bla') @patch('swh.web.ui.views.api.service') @istest def api_directory_with_path_not_found(self, mock_service): # given mock_service.lookup_directory_with_path.return_value = None path = 'some/path/to/dir/' # when rv = self.app.get(('/api/1/directory/' '66618d8be353ed3480476f032475e7c233eff737/%s') % path) path = path.strip('/') # Path stripped of lead/trail separators # then self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': (('Entry with path %s relative to ' 'directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.') % path)}) @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid_not_found(self, mock_service): # when mock_service.lookup_entity_by_uuid.return_value = [] # when rv = self.app.get('/api/1/entity/' '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') self.assertEquals(rv.status_code, 404) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': "Entity with uuid '5f4d4c51-498a-4e28-88b3-b3e4e8396cba' not " + "found."}) mock_service.lookup_entity_by_uuid.assert_called_once_with( '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid_bad_request(self, mock_service): # when mock_service.lookup_entity_by_uuid.side_effect = BadInputExc( 'bad input: uuid malformed!') # when rv = self.app.get('/api/1/entity/uuid malformed/') self.assertEquals(rv.status_code, 400) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, { 'error': 'bad input: uuid malformed!'}) mock_service.lookup_entity_by_uuid.assert_called_once_with( 'uuid malformed') @patch('swh.web.ui.views.api.service') @istest def api_lookup_entity_by_uuid(self, mock_service): # when stub_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' } ] mock_service.lookup_entity_by_uuid.return_value = stub_entities expected_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'uuid_url': '/api/1/entity/34bd6b1b-463f-43e5-a697-' '785107f598e4/', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'uuid_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' } ] # when rv = self.app.get('/api/1/entity' '/34bd6b1b-463f-43e5-a697-785107f598e4/') self.assertEquals(rv.status_code, 200) self.assertEquals(rv.mimetype, 'application/json') response_data = json.loads(rv.data.decode('utf-8')) self.assertEquals(response_data, expected_entities) mock_service.lookup_entity_by_uuid.assert_called_once_with( '34bd6b1b-463f-43e5-a697-785107f598e4') class ApiUtils(unittest.TestCase): @istest def api_lookup_not_found(self): # when with self.assertRaises(exc.NotFoundExc) as e: api._api_lookup('something', lambda x: None, 'this is the error message raised as it is None') self.assertEqual(e.exception.args[0], 'this is the error message raised as it is None') @istest def api_lookup_with_result(self): # when actual_result = api._api_lookup('something', lambda x: x + '!', 'this is the error which won\'t be ' 'used here') self.assertEqual(actual_result, 'something!') @istest def api_lookup_with_result_as_map(self): # when actual_result = api._api_lookup([1, 2, 3], lambda x: map(lambda y: y+1, x), 'this is the error which won\'t be ' 'used here') self.assertEqual(actual_result, [2, 3, 4]) diff --git a/swh/web/ui/tests/views/test_browse.py b/swh/web/ui/tests/views/test_browse.py index 7bd72fed..fc778431 100644 --- a/swh/web/ui/tests/views/test_browse.py +++ b/swh/web/ui/tests/views/test_browse.py @@ -1,1704 +1,1761 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from nose.tools import istest from unittest.mock import patch from swh.web.ui.exc import BadInputExc, NotFoundExc from .. import test_app class FileMock(): def __init__(self, filename): self.filename = filename class SearchView(test_app.SWHViewTestCase): render_template = False + @patch('swh.web.ui.apidoc.APIUrls') + @istest + def browse_api_doc(self, mock_api_urls): + # given + endpoints = { + '/a/doc/endpoint/': 'relevant documentation', + '/some/other/endpoint/': 'more docstrings'} + mock_api_urls.apidoc_routes = endpoints + + # when + rv = self.client.get('/api/1/doc/') + + # then + self.assertEquals(rv.status_code, 200) + self.assertIsNotNone( + self.get_context_variable('doc_routes'), + sorted(endpoints.items()) + ) + self.assert_template_used('api.html') + @istest def search_default(self): # when - rv = self.client.get('/search/') + rv = self.client.get('/content/search/') self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('message'), '') self.assertEqual(self.get_context_variable('search_res'), None) self.assert_template_used('search.html') @patch('swh.web.ui.views.browse.api') @istest def search_get_query_hash_not_found(self, mock_api): # given mock_api.api_search.return_value = { 'search_res': [{ 'filename': None, 'sha1': 'sha1:456', 'found': False}], 'search_stats': {'nbfiles': 1, 'pct': 100}} # when - rv = self.client.get('/search/?q=sha1:456') + rv = self.client.get('/content/search/?q=sha1:456') self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('message'), '') self.assertEqual(self.get_context_variable('search_res'), [ {'filename': None, 'sha1': 'sha1:456', 'found': False}]) self.assert_template_used('search.html') mock_api.api_search.assert_called_once_with('sha1:456') @patch('swh.web.ui.views.browse.api') @istest def search_get_query_hash_bad_input(self, mock_api): # given mock_api.api_search.side_effect = BadInputExc('error msg') # when - rv = self.client.get('/search/?q=sha1_git:789') + rv = self.client.get('/content/search/?q=sha1_git:789') self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('message'), 'error msg') self.assertEqual(self.get_context_variable('search_res'), None) self.assert_template_used('search.html') mock_api.api_search.assert_called_once_with('sha1_git:789') @patch('swh.web.ui.views.browse.api') @istest def search_get_query_hash_found(self, mock_api): # given mock_api.api_search.return_value = { 'search_res': [{ 'filename': None, 'sha1': 'sha1:123', 'found': True}], 'search_stats': {'nbfiles': 1, 'pct': 100}} # when - rv = self.client.get('/search/?q=sha1:123') + rv = self.client.get('/content/search/?q=sha1:123') self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('message'), '') self.assertEqual(len(self.get_context_variable('search_res')), 1) resp = self.get_context_variable('search_res')[0] self.assertTrue(resp is not None) self.assertEqual(resp['sha1'], 'sha1:123') self.assertEqual(resp['found'], True) self.assert_template_used('search.html') mock_api.api_search.assert_called_once_with('sha1:123') @patch('swh.web.ui.views.browse.request') @patch('swh.web.ui.views.browse.api') @istest def search_post_hashes_bad_input(self, mock_api, mock_request): # given mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} mock_request.method = 'POST' mock_api.api_search.side_effect = BadInputExc( 'error bad input') # when (mock_request completes the post request) - rv = self.client.post('/search/') + rv = self.client.post('/content/search/') # then self.assertEqual(rv.status_code, 200) self.assertEqual(self.get_context_variable('search_stats'), {'nbfiles': 0, 'pct': 0}) self.assertEqual(self.get_context_variable('search_res'), None) self.assertEqual(self.get_context_variable('message'), 'error bad input') self.assert_template_used('search.html') @patch('swh.web.ui.views.browse.request') @patch('swh.web.ui.views.browse.api') @istest def search_post_hashes_none(self, mock_api, mock_request): # given mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} mock_request.method = 'POST' mock_api.api_search.return_value = { 'search_stats': {'nbfiles': 2, 'pct': 0}, 'search_res': [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': False}]} # when (mock_request completes the post request) - rv = self.client.post('/search/') + rv = self.client.post('/content/search/') # then self.assertEqual(rv.status_code, 200) self.assertIsNotNone(self.get_context_variable('search_res')) self.assertTrue(self.get_context_variable('search_stats') is not None) self.assertEqual(len(self.get_context_variable('search_res')), 2) stats = self.get_context_variable('search_stats') self.assertEqual(stats['nbfiles'], 2) self.assertEqual(stats['pct'], 0) a, b = self.get_context_variable('search_res') self.assertEqual(a['found'], False) self.assertEqual(b['found'], False) self.assertEqual(self.get_context_variable('message'), '') self.assert_template_used('search.html') @patch('swh.web.ui.views.browse.request') @patch('swh.web.ui.views.browse.api') @istest def search_post_hashes_some(self, mock_api, mock_request): # given mock_request.form = {'a': '456caf10e9535160d90e874b45aa426de762f19f', 'b': '745bab676c8f3cec8016e0c39ea61cf57e518865'} mock_request.method = 'POST' mock_api.api_search.return_value = { 'search_stats': {'nbfiles': 2, 'pct': 50}, 'search_res': [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True}]} # when (mock_request completes the post request) - rv = self.client.post('/search/') + rv = self.client.post('/content/search/') # then self.assertEqual(rv.status_code, 200) self.assertIsNotNone(self.get_context_variable('search_res')) self.assertEqual(len(self.get_context_variable('search_res')), 2) self.assertTrue(self.get_context_variable('search_stats') is not None) stats = self.get_context_variable('search_stats') self.assertEqual(stats['nbfiles'], 2) self.assertEqual(stats['pct'], 50) self.assertEqual(self.get_context_variable('message'), '') a, b = self.get_context_variable('search_res') self.assertEqual(a['found'], False) self.assertEqual(b['found'], True) self.assert_template_used('search.html') class ContentView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_content_ko_not_found(self, mock_api): # given mock_api.api_content_metadata.side_effect = NotFoundExc( 'Not found!') # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertEqual(self.get_context_variable('message'), 'Not found!') self.assertIsNone(self.get_context_variable('content')) mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.api') @istest def browse_content_ko_bad_input(self, mock_api): # given mock_api.api_content_metadata.side_effect = BadInputExc( 'Bad input!') # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertEqual(self.get_context_variable('message'), 'Bad input!') self.assertIsNone(self.get_context_variable('content')) mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_content(self, mock_api, mock_service): # given stub_content = {'sha1': 'sha1_hash'} mock_api.api_content_metadata.return_value = stub_content mock_service.lookup_content_raw.return_value = {'data': b'blah'} expected_content = {'sha1': 'sha1_hash', 'data': 'blah'} # when rv = self.client.get('/browse/content/sha1:sha1-hash/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('content'), expected_content) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:sha1-hash') mock_api.api_content_metadata.assert_called_once_with( 'sha1:sha1-hash') @patch('swh.web.ui.views.browse.redirect') @patch('swh.web.ui.views.browse.url_for') @istest def browse_content_raw(self, mock_urlfor, mock_redirect): # given stub_content_raw = b'some-data' mock_urlfor.return_value = '/api/content/sha1:sha1-hash/raw/' mock_redirect.return_value = stub_content_raw # when rv = self.client.get('/browse/content/sha1:sha1-hash/raw/') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, stub_content_raw) mock_urlfor.assert_called_once_with('api_content_raw', q='sha1:sha1-hash') mock_redirect.assert_called_once_with( '/api/content/sha1:sha1-hash/raw/') class DirectoryView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_directory_ko_bad_input(self, mock_api): # given mock_api.api_directory.side_effect = BadInputExc( 'Invalid hash') # when rv = self.client.get('/browse/directory/sha2-invalid/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Invalid hash') self.assertEqual(self.get_context_variable('files'), []) mock_api.api_directory.assert_called_once_with( 'sha2-invalid') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_empty_result(self, mock_api): # given mock_api.api_directory.return_value = [] # when rv = self.client.get('/browse/directory/some-sha1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Listing for directory some-sha1:') self.assertEqual(self.get_context_variable('files'), []) mock_api.api_directory.assert_called_once_with( 'some-sha1') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_file(self, mock_api, mock_service): # given stub_entry = { 'sha256': '240', 'type': 'file' } mock_service.lookup_directory_with_path.return_value = stub_entry stub_file = { 'sha1_git': '123', 'sha1': '456', 'status': 'visible', 'data_url': '/api/1/content/890', 'length': 42, 'ctime': 'Thu, 01 Oct 2015 12:13:53 GMT', 'target': 'file.txt', 'sha256': '148' } mock_api.api_content_metadata.return_value = stub_file mock_service.lookup_content_raw.return_value = { 'data': 'this is my file'} # when rv = self.client.get('/browse/directory/sha1/path/to/file/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content.html') self.assertIsNotNone(self.get_context_variable('content')) content = self.get_context_variable('content') # change caused by call to prepare_data_for_view self.assertEqual(content['data_url'], '/browse/content/890') self.assertEqual(content['data'], 'this is my file') mock_api.api_content_metadata.assert_called_once_with('sha256:240') mock_service.lookup_content_raw.assert_called_once_with('sha256:240') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_dir(self, mock_api, mock_service): # given mock_service.lookup_directory_with_path.return_value = { 'sha256': '240', 'target': 'abcd', 'type': 'dir' } stub_directory_ls = [ {'type': 'dir', 'target': '123', 'name': 'some-dir-name'}, {'type': 'file', 'sha1': '654', 'name': 'some-filename'}, {'type': 'dir', 'target': '987', 'name': 'some-other-dirname'} ] mock_api.api_directory.return_value = stub_directory_ls # when rv = self.client.get('/browse/directory/sha1/path/to/dir/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertIsNotNone(self.get_context_variable('files')) self.assertEqual(len(self.get_context_variable('files')), len(stub_directory_ls)) mock_api.api_directory.assert_called_once_with('abcd') @patch('swh.web.ui.views.browse.service') @patch('swh.web.ui.views.browse.api') @istest def browse_directory_relative_not_found(self, mock_api, mock_service): # given mock_service.lookup_directory_with_path.side_effect = NotFoundExc( 'Directory entry not found.') # when rv = self.client.get('/browse/directory/some-sha1/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Directory entry not found.') @patch('swh.web.ui.views.browse.api') @patch('swh.web.ui.views.browse.utils') @istest def browse_directory(self, mock_utils, mock_api): # given stub_directory_ls = [ {'type': 'dir', 'target': '123', 'name': 'some-dir-name'}, {'type': 'file', 'sha1': '654', 'name': 'some-filename'}, {'type': 'dir', 'target': '987', 'name': 'some-other-dirname'} ] mock_api.api_directory.return_value = stub_directory_ls stub_directory_map = [ {'link': '/path/to/url/dir/123', 'name': 'some-dir-name'}, {'link': '/path/to/url/file/654', 'name': 'some-filename'}, {'link': '/path/to/url/dir/987', 'name': 'some-other-dirname'} ] mock_utils.prepare_data_for_view.return_value = stub_directory_map # when rv = self.client.get('/browse/directory/some-sha1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('directory.html') self.assertEqual(self.get_context_variable('message'), 'Listing for directory some-sha1:') self.assertEqual(self.get_context_variable('files'), stub_directory_map) mock_api.api_directory.assert_called_once_with( 'some-sha1') mock_utils.prepare_data_for_view.assert_called_once_with( stub_directory_ls) class ContentWithOriginView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin_content_ko_not_found(self, mock_api): # given mock_api.api_content_checksum_to_origin.side_effect = NotFoundExc( 'Not found!') # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual(self.get_context_variable('message'), 'Not found!') mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin_ko_bad_input(self, mock_api): # given mock_api.api_content_checksum_to_origin.side_effect = BadInputExc( 'Invalid hash') # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual( self.get_context_variable('message'), 'Invalid hash') mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') @patch('swh.web.ui.views.browse.api') # @istest def browse_content_with_origin(self, mock_api): # given mock_api.api_content_checksum_to_origin.return_value = { 'origin_type': 'ftp', 'origin_url': '/some/url', 'revision': 'revision-hash', 'branch': 'master', 'path': '/path/to', } # when rv = self.client.get('/browse/content/sha256:some-sha256/origin/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('content-with-origin.html') self.assertEqual( self.get_context_variable('message'), "The content with hash sha256:some-sha256 has been seen on " + "origin with type 'ftp'\n" + "at url '/some/url'. The revision was identified at " + "'revision-hash' on branch 'master'.\n" + "The file's path referenced was '/path/to'.") mock_api.api_content_checksum_to_origin.assert_called_once_with( 'sha256:some-sha256') class OriginView(test_app.SWHViewTestCase): render_template = False + def setUp(self): + + def url_for_test(fn, **args): + if fn == 'browse_revision_with_origin': + return '/browse/revision/origin/%s/' % args['origin_id'] + elif fn == 'api_origin_visits': + return '/api/1/stat/visits/%s/' % args['origin_id'] + + self.url_for_test = url_for_test + + self.stub_origin = {'type': 'git', + 'lister': None, + 'project': None, + 'url': 'rsync://some/url', + 'id': 426} + @patch('swh.web.ui.views.browse.api') @istest def browse_origin_ko_not_found(self, mock_api): # given mock_api.api_origin.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/origin/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') - self.assertEqual(self.get_context_variable('origin_id'), 1) + self.assertIsNone(self.get_context_variable('origin')) self.assertEqual( self.get_context_variable('message'), 'Not found!') - mock_api.api_origin.assert_called_once_with(1) + mock_api.api_origin.assert_called_once_with(1, None, None) @patch('swh.web.ui.views.browse.api') @istest def browse_origin_ko_bad_input(self, mock_api): # given mock_api.api_origin.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/origin/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') - self.assertEqual(self.get_context_variable('origin_id'), 426) + self.assertIsNone(self.get_context_variable('origin')) - mock_api.api_origin.assert_called_once_with(426) + mock_api.api_origin.assert_called_once_with(426, None, None) @patch('swh.web.ui.views.browse.api') @patch('swh.web.ui.views.browse.url_for') @istest - def browse_origin_found(self, mock_url_for, mock_api): + def browse_origin_found_id(self, mock_url_for, mock_api): # given - def url_for_test(fn, **args): - if fn == 'browse_revision_with_origin': - return '/browse/revision/origin/%s/' % args['origin_id'] - elif fn == 'api_origin_visits': - return '/api/1/stat/visits/%s/' % args['origin_id'] - mock_url_for.side_effect = url_for_test - mock_origin = {'type': 'git', - 'lister': None, - 'project': None, - 'url': 'rsync://some/url', - 'id': 426} - mock_api.api_origin.return_value = mock_origin + mock_url_for.side_effect = self.url_for_test + + mock_api.api_origin.return_value = self.stub_origin # when rv = self.client.get('/browse/origin/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('origin.html') - self.assertEqual(self.get_context_variable('origin_id'), 426) - self.assertEqual(self.get_context_variable('origin'), mock_origin) + self.assertEqual(self.get_context_variable('origin'), self.stub_origin) self.assertEqual(self.get_context_variable('browse_url'), '/browse/revision/origin/426/') self.assertEqual(self.get_context_variable('visit_url'), '/api/1/stat/visits/426/') - mock_api.api_origin.assert_called_once_with(426) + mock_api.api_origin.assert_called_once_with(426, None, None) + + @patch('swh.web.ui.views.browse.api') + @patch('swh.web.ui.views.browse.url_for') + @istest + def browse_origin_found_url_type(self, mock_url_for, mock_api): + # given + + mock_url_for.side_effect = self.url_for_test + + mock_api.api_origin.return_value = self.stub_origin + + # when + rv = self.client.get('/browse/origin/git/url/rsync://some/url/') + + # then + self.assertEqual(rv.status_code, 200) + self.assert_template_used('origin.html') + self.assertEqual(self.get_context_variable('origin'), self.stub_origin) + self.assertEqual(self.get_context_variable('browse_url'), + '/browse/revision/origin/426/') + self.assertEqual(self.get_context_variable('visit_url'), + '/api/1/stat/visits/426/') + + mock_api.api_origin.assert_called_once_with(None, 'git', + 'rsync://some/url') class PersonView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_person_ko_not_found(self, mock_api): # given mock_api.api_person.side_effect = NotFoundExc('not found') # when rv = self.client.get('/browse/person/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 1) self.assertEqual( self.get_context_variable('message'), 'not found') mock_api.api_person.assert_called_once_with(1) @patch('swh.web.ui.views.browse.api') @istest def browse_person_ko_bad_input(self, mock_api): # given mock_api.api_person.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/person/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 426) mock_api.api_person.assert_called_once_with(426) @patch('swh.web.ui.views.browse.api') @istest def browse_person(self, mock_api): # given mock_person = {'type': 'git', 'lister': None, 'project': None, 'url': 'rsync://some/url', 'id': 426} mock_api.api_person.return_value = mock_person # when rv = self.client.get('/browse/person/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('person.html') self.assertEqual(self.get_context_variable('person_id'), 426) self.assertEqual(self.get_context_variable('person'), mock_person) mock_api.api_person.assert_called_once_with(426) class ReleaseView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_release_ko_not_found(self, mock_api): # given mock_api.api_release.side_effect = NotFoundExc('not found!') # when rv = self.client.get('/browse/release/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual( self.get_context_variable('message'), 'not found!') mock_api.api_release.assert_called_once_with('1') @patch('swh.web.ui.views.browse.api') @istest def browse_release_ko_bad_input(self, mock_api): # given mock_api.api_release.side_effect = BadInputExc('wrong input') # when rv = self.client.get('/browse/release/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') mock_api.api_release.assert_called_once_with('426') @patch('swh.web.ui.views.browse.api') @istest def browse_release(self, mock_api): # given self.maxDiff = None mock_release = { "date": "Sun, 05 Jul 2015 18:02:06 GMT", "id": "1e951912027ea6873da6985b91e50c47f645ae1a", "target": "d770e558e21961ad6cfdf0ff7df0eb5d7d4f0754", "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' 'eb5d7d4f0754/', "synthetic": False, "target_type": "revision", "author": { "email": "torvalds@linux-foundation.org", "name": "Linus Torvalds" }, "message": "Linux 4.2-rc1\n", "name": "v4.2-rc1" } mock_api.api_release.return_value = mock_release expected_release = { "date": "Sun, 05 Jul 2015 18:02:06 GMT", "id": "1e951912027ea6873da6985b91e50c47f645ae1a", "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' 'eb5d7d4f0754/', "target": 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', "synthetic": False, "target_type": "revision", "author": { "email": "torvalds@linux-foundation.org", "name": "Linus Torvalds" }, "message": "Linux 4.2-rc1\n", "name": "v4.2-rc1" } # when rv = self.client.get('/browse/release/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('release.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual(self.get_context_variable('release'), expected_release) mock_api.api_release.assert_called_once_with('426') class RevisionView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_revision_ko_not_found(self, mock_api): # given mock_api.api_revision.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/1/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual( self.get_context_variable('message'), 'Not found!') self.assertIsNone(self.get_context_variable('revision')) mock_api.api_revision.assert_called_once_with('1', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_ko_bad_input(self, mock_api): # given mock_api.api_revision.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/revision/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual( self.get_context_variable('message'), 'wrong input!') self.assertIsNone(self.get_context_variable('revision')) mock_api.api_revision.assert_called_once_with('426', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision(self, mock_api): # given stub_revision = { 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'directory_url': '/api/1/directory/' '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', 'parent_url': [ '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], } mock_api.api_revision.return_value = stub_revision expected_revision = { 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'parent_url': [ '/browse/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], 'directory_url': '/browse/directory/2a1dbabeed4dcf1f4a4c441993b2f' 'fc9d972780b/', } # when rv = self.client.get('/browse/revision/426/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual(self.get_context_variable('revision'), expected_revision) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision.assert_called_once_with('426', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_raw_message(self, mock_api): # given sha1 = 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754' # when rv = self.client.get('/browse/revision/' 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754/raw/') self.assertRedirects( rv, '/api/1/revision/%s/raw/' % sha1) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_ko_not_found(self, mock_api): # given mock_api.api_revision_log.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/sha1/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), 'sha1') self.assertEqual( self.get_context_variable('message'), 'Not found!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log.assert_called_once_with('sha1', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_ko_bad_input(self, mock_api): # given mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/revision/426/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertEqual( self.get_context_variable('message'), 'wrong input!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log.assert_called_once_with('426', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log(self, mock_api): # given - stub_revisions = [{ - 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'committer': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'type': 'git', - 'author': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'message': 'Linux 4.2-rc1\n', - 'synthetic': False, - 'directory_url': '/api/1/directory/' - '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', - 'parent_url': [ - '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' - ], - }] + stub_revisions = { + 'revisions': [{ + 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', + 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', + 'committer': { + 'email': 'torvalds@linux-foundation.org', + 'name': 'Linus Torvalds' + }, + 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', + 'type': 'git', + 'author': { + 'email': 'torvalds@linux-foundation.org', + 'name': 'Linus Torvalds' + }, + 'message': 'Linux 4.2-rc1\n', + 'synthetic': False, + 'directory_url': '/api/1/directory/' + '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', + 'parent_url': [ + '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' + ], + }], + 'next_revs_url': '/api/1/revision/1234/log/' + } mock_api.api_revision_log.return_value = stub_revisions # when rv = self.client.get('/browse/revision/426/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), '426') self.assertTrue( isinstance(self.get_context_variable('revisions'), map)) + self.assertEqual( + self.get_context_variable('next_revs_url'), + '/browse/revision/1234/log/') self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_log.assert_called_once_with('426', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_by_ko_not_found(self, mock_api): # given mock_api.api_revision_log_by.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/origin/9/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('origin_id'), 9) self.assertEqual( self.get_context_variable('message'), 'Not found!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log_by.assert_called_once_with( 9, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_by_ko_bad_input(self, mock_api): # given mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/revision/abcd/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('sha1_git'), 'abcd') self.assertEqual( self.get_context_variable('message'), 'wrong input!') self.assertEqual(self.get_context_variable('revisions'), []) mock_api.api_revision_log.assert_called_once_with('abcd', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_log_by(self, mock_api): # given stub_revisions = [{ 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'committer': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', 'type': 'git', 'author': { 'email': 'torvalds@linux-foundation.org', 'name': 'Linus Torvalds' }, 'message': 'Linux 4.2-rc1\n', 'synthetic': False, 'directory_url': '/api/1/directory/' '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', 'parent_url': [ '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' ], }] mock_api.api_revision_log_by.return_value = stub_revisions # when rv = self.client.get('/browse/revision/origin/2/log/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-log.html') self.assertEqual(self.get_context_variable('origin_id'), 2) self.assertTrue( isinstance(self.get_context_variable('revisions'), map)) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_log_by.assert_called_once_with( 2, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_ko_not_found(self, mock_api): # given mock_api.api_revision_history.side_effect = NotFoundExc( 'Not found') # when rv = self.client.get('/browse/revision/1/history/2/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '1') self.assertEqual(self.get_context_variable('sha1_git'), '2') self.assertEqual( self.get_context_variable('message'), 'Not found') mock_api.api_revision_history.assert_called_once_with( '1', '2') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_ko_bad_input(self, mock_api): # given mock_api.api_revision_history.side_effect = BadInputExc( 'Input incorrect') # when rv = self.client.get('/browse/revision/321/history/654/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '321') self.assertEqual(self.get_context_variable('sha1_git'), '654') self.assertEqual( self.get_context_variable('message'), 'Input incorrect') mock_api.api_revision_history.assert_called_once_with( '321', '654') @istest def browse_revision_history_ok_same_sha1(self): # when rv = self.client.get('/browse/revision/10/history/10/') # then self.assertEqual(rv.status_code, 302) @patch('swh.web.ui.views.browse.utils') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history(self, mock_api, mock_utils): # given stub_revision = {'id': 'some-rev'} mock_api.api_revision_history.return_value = stub_revision expected_revision = { 'id': 'some-rev-id', 'author': {'name': 'foo', 'email': 'bar'}, 'committer': {'name': 'foo', 'email': 'bar'} } mock_utils.prepare_data_for_view.return_value = expected_revision # when rv = self.client.get('/browse/revision/426/history/789/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '426') self.assertEqual(self.get_context_variable('sha1_git'), '789') self.assertEqual(self.get_context_variable('revision'), expected_revision) mock_api.api_revision_history.assert_called_once_with( '426', '789') mock_utils.prepare_data_for_view.assert_called_once_with(stub_revision) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_ko_not_found(self, mock_api): # given mock_api.api_revision_directory.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/revision/1/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '1') self.assertEqual(self.get_context_variable('path'), '.') self.assertIsNone(self.get_context_variable('result')) self.assertEqual( self.get_context_variable('message'), "Not found!") mock_api.api_revision_directory.assert_called_once_with( '1', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_ko_bad_input(self, mock_api): # given mock_api.api_revision_directory.side_effect = BadInputExc('Bad input!') # when rv = self.client.get('/browse/revision/10/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '10') self.assertEqual(self.get_context_variable('path'), '.') self.assertIsNone(self.get_context_variable('result')) self.assertEqual( self.get_context_variable('message'), "Bad input!") mock_api.api_revision_directory.assert_called_once_with( '10', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory(self, mock_api): # given stub_result0 = { 'type': 'dir', 'revision': '100', 'content': [ { 'id': 'some-result', 'type': 'file', 'name': 'blah', }, { 'id': 'some-other-result', 'type': 'dir', 'name': 'foo', } ] } mock_api.api_revision_directory.return_value = stub_result0 stub_result1 = { 'type': 'dir', 'revision': '100', 'content': [ { 'id': 'some-result', 'type': 'file', 'name': 'blah', }, { 'id': 'some-other-result', 'type': 'dir', 'name': 'foo', } ] } # when rv = self.client.get('/browse/revision/100/directory/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git'), '100') self.assertEqual(self.get_context_variable('revision'), '100') self.assertEqual(self.get_context_variable('path'), 'some/path') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('result'), stub_result1) mock_api.api_revision_directory.assert_called_once_with( '100', 'some/path', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory_ko_not_found(self, mock_api): # given mock_api.api_revision_history_directory.side_effect = NotFoundExc( 'not found') # when rv = self.client.get('/browse/revision/123/history/456/directory/a/b/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '123') self.assertEqual(self.get_context_variable('sha1_git'), '456') self.assertEqual(self.get_context_variable('path'), 'a/b') self.assertEqual(self.get_context_variable('message'), 'not found') self.assertIsNone(self.get_context_variable('result')) mock_api.api_revision_history_directory.assert_called_once_with( '123', '456', 'a/b', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory_ko_bad_input(self, mock_api): # given mock_api.api_revision_history_directory.side_effect = BadInputExc( 'bad input') # when rv = self.client.get('/browse/revision/123/history/456/directory/a/c/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '123') self.assertEqual(self.get_context_variable('sha1_git'), '456') self.assertEqual(self.get_context_variable('path'), 'a/c') self.assertEqual(self.get_context_variable('message'), 'bad input') self.assertIsNone(self.get_context_variable('result')) mock_api.api_revision_history_directory.assert_called_once_with( '123', '456', 'a/c', with_data=True) @patch('swh.web.ui.views.browse.service') @istest def browse_revision_history_directory_ok_no_trailing_slash_so_redirect( self, mock_service): # when rv = self.client.get('/browse/revision/1/history/2/directory/path/to') # then self.assertEqual(rv.status_code, 301) @patch('swh.web.ui.views.browse.service') @istest def browse_revision_history_directory_ok_same_sha1_redirects( self, mock_service): # when rv = self.client.get('/browse/revision/1/history/1/directory/path/to') # then self.assertEqual(rv.status_code, 301) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_directory(self, mock_api): # given stub_result0 = { 'type': 'dir', 'revision': '1000', 'content': [{ 'id': 'some-result', 'type': 'file', 'name': 'blah' }] } mock_api.api_revision_history_directory.return_value = stub_result0 stub_result1 = { 'type': 'dir', 'revision': '1000', 'content': [{ 'id': 'some-result', 'type': 'file', 'name': 'blah' }] } # when rv = self.client.get('/browse/revision/100/history/999/directory/' 'path/to/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('sha1_git_root'), '100') self.assertEqual(self.get_context_variable('sha1_git'), '999') self.assertEqual(self.get_context_variable('revision'), '1000') self.assertEqual(self.get_context_variable('path'), 'path/to') self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('result'), stub_result1) mock_api.api_revision_history_directory.assert_called_once_with( '100', '999', 'path/to', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_bad_input(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = BadInputExc( 'Problem input.') # noqa # when rv = self.client.get('/browse/revision/origin/99' '/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Problem input.') mock_api.api_revision_history_through_origin.assert_called_once_with( 99, 'refs/heads/master', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_not_found(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = NotFoundExc( 'Not found.') # when rv = self.client.get('/browse/revision/origin/999/' 'branch/dev/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Not found.') mock_api.api_revision_history_through_origin.assert_called_once_with( 999, 'dev', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin_ko_other_error(self, mock_api): # given mock_api.api_revision_history_through_origin.side_effect = ValueError( 'Other Error.') # when rv = self.client.get('/browse/revision/origin/438' '/branch/scratch' '/ts/2016' '/history/789/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Other Error.') mock_api.api_revision_history_through_origin.assert_called_once_with( 438, 'scratch', '2016', '789') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_history_through_origin(self, mock_api): # given stub_rev = { 'id': 'some-id', 'author': {}, 'committer': {} } mock_api.api_revision_history_through_origin.return_value = stub_rev # when rv = self.client.get('/browse/revision/origin/99/history/123/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('revision'), stub_rev) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_history_through_origin.assert_called_once_with( 99, 'refs/heads/master', None, '123') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_not_found(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = NotFoundExc( 'Not found') # when rv = self.client.get('/browse/revision/origin/1/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Not found') mock_api.api_revision_with_origin.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_bad_input(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = BadInputExc( 'Bad Input') # when rv = self.client.get('/browse/revision/origin/1000/branch/dev/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Bad Input') mock_api.api_revision_with_origin.assert_called_once_with( 1000, 'dev', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin_ko_other(self, mock_api): # given mock_api.api_revision_with_origin.side_effect = ValueError( 'Other') # when rv = self.client.get('/browse/revision/origin/1999' '/branch/scratch/master' '/ts/1990-01-10/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertIsNone(self.get_context_variable('revision')) self.assertEqual(self.get_context_variable('message'), 'Other') mock_api.api_revision_with_origin.assert_called_once_with( 1999, 'scratch/master', '1990-01-10') @patch('swh.web.ui.views.browse.api') @istest def browse_revision_with_origin(self, mock_api): # given stub_rev = {'id': 'some-id', 'author': {}, 'committer': {}} mock_api.api_revision_with_origin.return_value = stub_rev # when rv = self.client.get('/browse/revision/origin/1/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision.html') self.assertEqual(self.get_context_variable('revision'), stub_rev) self.assertIsNone(self.get_context_variable('message')) mock_api.api_revision_with_origin.assert_called_once_with( 1, 'refs/heads/master', None) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_not_found(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa 'this is not the robot you are looking for') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'this is not the robot you are looking for') mock_api.api_directory_through_revision_origin.assert_called_once_with( # noqa 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_bad_input(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa 'Bad Robot') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Bad Robot') mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin_ko_other(self, mock_api): # given mock_api.api_directory_through_revision_origin.side_effect = ValueError( # noqa 'Other bad stuff') # when rv = self.client.get('/browse/revision/origin/2' '/directory/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Other bad stuff') mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'refs/heads/master', None, None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_revision_directory_through_origin(self, mock_api): # given stub_res = {'id': 'some-id', 'revision': 'some-rev-id', 'type': 'dir', 'content': 'some-content'} mock_api.api_directory_through_revision_origin.return_value = stub_res # when rv = self.client.get('/browse/revision/origin/2' '/branch/dev' '/ts/2013-20-20 10:02' '/directory/some/file/') self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('result'), stub_res) self.assertIsNone(self.get_context_variable('message')) mock_api.api_directory_through_revision_origin.assert_called_once_with( 2, 'dev', '2013-20-20 10:02', 'some/file', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history_ko_not_found( self, mock_api): mock_api.api_directory_through_revision_with_origin_history.side_effect = NotFoundExc( # noqa 'Not found!') # when rv = self.client.get('/browse/revision/origin/987' '/history/sha1git' '/directory/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Not found!') self.assertEqual(self.get_context_variable('path'), '.') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 987, 'refs/heads/master', None, 'sha1git', None, with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history_ko_bad_input( self, mock_api): mock_api.api_directory_through_revision_with_origin_history.side_effect = BadInputExc( # noqa 'Bad input! Bleh!') # when rv = self.client.get('/browse/revision/origin/798' '/branch/refs/heads/dev' '/ts/2012-11-11' '/history/1234' '/directory/some/path/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertIsNone(self.get_context_variable('result')) self.assertEqual(self.get_context_variable('message'), 'Bad input! Bleh!') self.assertEqual(self.get_context_variable('path'), 'some/path') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 798, 'refs/heads/dev', '2012-11-11', '1234', 'some/path', with_data=True) @patch('swh.web.ui.views.browse.api') @istest def browse_directory_through_revision_with_origin_history( self, mock_api): stub_dir = {'type': 'dir', 'content': [], 'revision': 'specific-rev-id'} mock_api.api_directory_through_revision_with_origin_history.return_value = stub_dir # noqa # when rv = self.client.get('/browse/revision/origin/101010' '/ts/1955-11-12' '/history/54628' '/directory/emacs-24.5/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('revision-directory.html') self.assertEqual(self.get_context_variable('result'), stub_dir) self.assertIsNone(self.get_context_variable('message')) self.assertEqual(self.get_context_variable('path'), 'emacs-24.5') mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa 101010, 'refs/heads/master', '1955-11-12', '54628', 'emacs-24.5', with_data=True) class EntityView(test_app.SWHViewTestCase): render_template = False @patch('swh.web.ui.views.browse.api') @istest def browse_entity_ko_not_found(self, mock_api): # given mock_api.api_entity_by_uuid.side_effect = NotFoundExc('Not found!') # when rv = self.client.get('/browse/entity/' '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), []) self.assertEqual(self.get_context_variable('message'), 'Not found!') mock_api.api_entity_by_uuid.assert_called_once_with( '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') @patch('swh.web.ui.views.browse.api') @istest def browse_entity_ko_bad_input(self, mock_api): # given mock_api.api_entity_by_uuid.side_effect = BadInputExc('wrong input!') # when rv = self.client.get('/browse/entity/blah-blah-uuid/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), []) self.assertEqual(self.get_context_variable('message'), 'wrong input!') mock_api.api_entity_by_uuid.assert_called_once_with( 'blah-blah-uuid') @patch('swh.web.ui.views.browse.api') @istest def browse_entity(self, mock_api): # given stub_entities = [ {'id': '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba'}] mock_api.api_entity_by_uuid.return_value = stub_entities # when rv = self.client.get('/browse/entity/' '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba/') # then self.assertEqual(rv.status_code, 200) self.assert_template_used('entity.html') self.assertEqual(self.get_context_variable('entities'), stub_entities) self.assertIsNone(self.get_context_variable('message')) mock_api.api_entity_by_uuid.assert_called_once_with( '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba') diff --git a/swh/web/ui/views/api.py b/swh/web/ui/views/api.py index 02c95fd7..4a4af56b 100644 --- a/swh/web/ui/views/api.py +++ b/swh/web/ui/views/api.py @@ -1,948 +1,764 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from types import GeneratorType -from flask import request, url_for, Response, redirect +from flask import request, url_for -from swh.web.ui import service, utils +from swh.web.ui import service, utils, apidoc as doc from swh.web.ui.exc import NotFoundExc from swh.web.ui.main import app @app.route('/api/1/stat/counters/') +@doc.route('/api/1/stat/counters/', noargs=True) +@doc.returns(rettype=doc.rettypes.dict, + retdoc="A dictionary of SWH's most important statistics") def api_stats(): """Return statistics on SWH storage. - Returns: - SWH storage's statistics. - """ return service.stat_counters() @app.route('/api/1/stat/visits//') +@doc.route('/api/1/stat/visits/') +@doc.arg('origin_id', + default=1, + argtype=doc.argtypes.int, + argdoc='The requested SWH origin identifier') +@doc.returns(rettype=doc.rettypes.list, + retdoc="""All instances of visits of the origin pointed by + origin_id as POSIX time since epoch""") def api_origin_visits(origin_id): - """Return visit dates for the given revision. - - Returns: - A list of SWH visit occurrence timestamps, sorted from oldest to - newest. - + """Return a list of visit dates as POSIX timestamps for the + given revision. """ date_gen = (item['date'] for item in service.stat_origin_visits(origin_id)) return sorted(date_gen) -@app.route('/api/1/search/', methods=['POST']) -@app.route('/api/1/search//') +@app.route('/api/1/content/search/', methods=['POST']) +@app.route('/api/1/content/search//') +@doc.route('/api/1/content/search/') +@doc.arg('q', + default='sha1:adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=doc.argtypes.algo_and_hash, + argdoc="""An algo_hash:hash string, where algo_hash is one of sha1, + sha1_git or sha256 and hash is the hash to search for in SWH""") +@doc.raises(exc=doc.excs.badinput, + doc='Raised if q is not well formed') +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""A dict with keys: + + - search_res: a list of dicts corresponding to queried content + with key 'found' to True if found, 'False' if not + - search_stats: a dict containing number of files searched and + percentage of files found + """) def api_search(q=None): """Search a content per hash. - Args: - q is of the form algo_hash:hash with algo_hash in - (sha1, sha1_git, sha256). - - Returns: - Dictionary with 'found' key and the associated result. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - - Example: - GET /api/1/search/sha1:bd819b5b28fcde3bf114d16a44ac46250da94ee5/ + This may take the form of: + - a GET request with a single checksum + - a POST request with many hashes, with the request body containing + identifiers (typically filenames) as keys and corresponding hashes as + values. """ response = {'search_res': None, 'search_stats': None} search_stats = {'nbfiles': 0, 'pct': 0} search_res = None # Single hash request route if q: r = service.search_hash(q) search_res = [{'filename': None, 'sha1': q, 'found': r['found']}] search_stats['nbfiles'] = 1 search_stats['pct'] = 100 if r['found'] else 0 # Post form submission with many hash requests elif request.method == 'POST': data = request.form queries = [] # Remove potential inputs with no associated value for k, v in data.items(): if v is not None: if k == 'q' and len(v) > 0: queries.append({'filename': None, 'sha1': v}) elif v != '': queries.append({'filename': k, 'sha1': v}) if len(queries) > 0: lookup = service.lookup_multiple_hashes(queries) result = [] for el in lookup: result.append({'filename': el['filename'], 'sha1': el['sha1'], 'found': el['found']}) search_res = result nbfound = len([x for x in lookup if x['found']]) search_stats['nbfiles'] = len(queries) search_stats['pct'] = (nbfound / len(queries))*100 response['search_res'] = search_res response['search_stats'] = search_stats return response def _api_lookup(criteria, lookup_fn, error_msg_if_not_found, enrich_fn=lambda x: x, *args): """Capture a redundant behavior of: - looking up the backend with a criteria (be it an identifier or checksum) passed to the function lookup_fn - if nothing is found, raise an NotFoundExc exception with error message error_msg_if_not_found. - Otherwise if something is returned: - either as list, map or generator, map the enrich_fn function to it and return the resulting data structure as list. - either as dict and pass to enrich_fn and return the dict enriched. Args: - criteria: discriminating criteria to lookup - lookup_fn: function expects one criteria and optional supplementary *args. - error_msg_if_not_found: if nothing matching the criteria is found, raise NotFoundExc with this error message. - enrich_fn: Function to use to enrich the result returned by lookup_fn. Default to the identity function if not provided. - *args: supplementary arguments to pass to lookup_fn. Raises: NotFoundExp or whatever `lookup_fn` raises. """ res = lookup_fn(criteria, *args) if not res: raise NotFoundExc(error_msg_if_not_found) if isinstance(res, (map, list, GeneratorType)): enriched_data = [] for e in res: enriched_data.append(enrich_fn(e)) return enriched_data return enrich_fn(res) -@app.route('/api/1/origin/') @app.route('/api/1/origin//') -def api_origin(origin_id): - """Return information about origin with id origin_id. - - - Args: - origin_id: the origin's identifier. - - Returns: - Information on the origin if found. - - Raises: - NotFoundExc if the origin is not found. - - Example: - GET /api/1/origin/1/ - +@app.route('/api/1/origin//url//') +@doc.route('/api/1/origin/') +@doc.arg('origin_id', + default=1, + argtype=doc.argtypes.int, + argdoc="The origin's SWH origin_id.") +@doc.arg('origin_type', + default='git', + argtype=doc.argtypes.str, + argdoc="The origin's type (git, svn..)") +@doc.arg('origin_url', + default='https://github.com/hylang/hy', + argtype=doc.argtypes.path, + argdoc="The origin's URL.") +@doc.raises(exc=doc.excs.notfound, + doc='Raised if origin_id does not correspond to an origin in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc='The metadata of the origin identified by origin_id') +def api_origin(origin_id=None, origin_type=None, origin_url=None): + """Return information about the origin matching the passed criteria. + + Criteria may be: + - An SWH-specific ID, if you already know it + - An origin type and its URL, if you do not have the origin's SWH + identifier """ + ori_dict = { + 'id': origin_id, + 'type': origin_type, + 'url': origin_url + } + ori_dict = {k: v for k, v in ori_dict.items() if ori_dict[k]} + if 'id' in ori_dict: + error_msg = 'Origin with id %s not found.' % ori_dict['id'] + else: + error_msg = 'Origin with type %s and URL %s not found' % ( + ori_dict['type'], ori_dict['url']) return _api_lookup( - origin_id, lookup_fn=service.lookup_origin, - error_msg_if_not_found='Origin with id %s not found.' % origin_id) + ori_dict, lookup_fn=service.lookup_origin, + error_msg_if_not_found=error_msg) -@app.route('/api/1/person/') @app.route('/api/1/person//') +@doc.route('/api/1/person/') +@doc.arg('person_id', + default=1, + argtype=doc.argtypes.int, + argdoc="The person's SWH identifier") +@doc.raises(exc=doc.excs.notfound, + doc='Raised if person_id does not correspond to an origin in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc='The metadata of the person identified by person_id') def api_person(person_id): """Return information about person with identifier person_id. - - Args: - person_id: the person's identifier. - - Returns: - Information on the person if found. - - Raises: - NotFoundExc if the person is not found. - - Example: - GET /api/1/person/1/ - """ return _api_lookup( person_id, lookup_fn=service.lookup_person, error_msg_if_not_found='Person with id %s not found.' % person_id) -@app.route('/api/1/release/') @app.route('/api/1/release//') +@doc.route('/api/1/release/') +@doc.arg('sha1_git', + default='8b137891791fe96927ad78e64b0aad7bded08bdc', + argtype=doc.argtypes.sha1_git, + argdoc="The release's sha1_git identifier") +@doc.raises(exc=doc.excs.badinput, + doc='Raised if the argument is not a sha1') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if sha1_git does not correspond to a release in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc='The metadata of the release identified by sha1_git') def api_release(sha1_git): """Return information about release with id sha1_git. - - Args: - sha1_git: the release's hash. - - Returns: - Information on the release if found. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the release is not found. - - Example: - GET /api/1/release/b307094f00c3641b0c9da808d894f3a325371414 - """ error_msg = 'Release with sha1_git %s not found.' % sha1_git return _api_lookup( sha1_git, lookup_fn=service.lookup_release, error_msg_if_not_found=error_msg, enrich_fn=utils.enrich_release) def _revision_directory_by(revision, path, request_path, limit=100, with_data=False): """Compute the revision matching criterion's directory or content data. Args: revision: dictionary of criterions representing a revision to lookup path: directory's path to lookup request_path: request path which holds the original context to limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of with_data: indicate to retrieve the content's raw data if path resolves to a content. """ def enrich_directory_local(dir, context_url=request_path): return utils.enrich_directory(dir, context_url) rev_id, result = service.lookup_directory_through_revision( revision, path, limit=limit, with_data=with_data) content = result['content'] if result['type'] == 'dir': # dir_entries result['content'] = list(map(enrich_directory_local, content)) else: # content result['content'] = utils.enrich_content(content) return result @app.route('/api/1/revision' '/origin/' '/directory/') @app.route('/api/1/revision' '/origin/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/directory//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/directory/') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts/' '/directory//') +@doc.route('/api/1/revision/origin/directory/') +@doc.arg('origin_id', + default=1, + argtype=doc.argtypes.int, + argdoc="The revision's origin's SWH identifier") +@doc.arg('branch_name', + default='refs/heads/master', + argtype=doc.argtypes.path, + argdoc="""The optional branch for the given origin (default + to master""") +@doc.arg('ts', + default='2000-01-17T11:23:54+00:00', + argtype=doc.argtypes.ts, + argdoc="""Optional timestamp (default to the nearest time + crawl of timestamp)""") +@doc.arg('path', + default='.', + argtype=doc.argtypes.path, + argdoc='The path to the directory or file to display') +@doc.raises(exc=doc.excs.notfound, + doc="""Raised if a revision matching the passed criteria was + not found""") +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata of the revision corresponding to the + passed criteria""") def api_directory_through_revision_origin(origin_id, branch_name="refs/heads/master", ts=None, path=None, with_data=False): """Display directory or content information through a revision identified by origin/branch/timestamp. - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - path: Path to directory or file to display. - with_data: indicate to retrieve the content's raw data if path resolves - to a content. - - Returns: - Information on the directory or content pointed to by such revision. - - Raises: - NotFoundExc if the revision is not found or the path pointed to - is not found. - """ if ts: ts = utils.parse_timestamp(ts) return _revision_directory_by( { 'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts }, path, request.path, with_data=with_data) -@app.route('/api/1/revision' - '/origin/' - '/history//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/history//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts/' - '/history//') -def api_revision_history_through_origin(origin_id, - branch_name="refs/heads/master", - ts=None, - sha1_git=None): - """ - Return information about revision sha1_git, limited to the - sub-graph of all transitive parents of the revision root identified - by (origin_id, branch_name, ts). - Given sha1_git_root such root revision's identifier, in other words, - sha1_git is an ancestor of sha1_git_root. - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - sha1_git: one of sha1_git_root's ancestors. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - - Returns: - Information on sha1_git if it is an ancestor of sha1_git_root - including children leading to sha1_git_root. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an - ancestor of sha1_git_root. - - """ - limit = int(request.args.get('limit', '100')) - - if ts: - ts = utils.parse_timestamp(ts) - - rev_root, revision = service.lookup_revision_with_context_by( - origin_id, branch_name, ts, sha1_git, limit) - - if not revision: - raise NotFoundExc( - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s' " - "sha1_git_root being the revision's identifier pointed to by " - "(origin_id: %s, branch_name: %s, ts: %s)." % (sha1_git, - rev_root['id'], - origin_id, - branch_name, - ts)) - - return utils.enrich_revision(revision, context=rev_root['id']) - - -@app.route('/api/1/revision' - '/origin/' - '/history/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/history/' - '/directory//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/history/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/history/' - '/directory//') -@app.route('/api/1/revision' - '/origin/' - '/ts/' - '/history/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/ts/' - '/history/' - '/directory//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts/' - '/history/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts/' - '/history/' - '/directory//') -def api_directory_through_revision_with_origin_history( - origin_id, - branch_name="refs/heads/master", - ts=None, - sha1_git=None, - path=None, - with_data=False): - """Return information about directory or content pointed to by the - revision defined as: revision sha1_git, limited to the sub-graph - of all transitive parents of sha1_git_root (being the identified - sha1 by looking up origin_id/branch_name/ts) - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - sha1_git: one of sha1_git_root's ancestors. - path: optional directory or content pointed to by that revision. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - with_data: indicate to retrieve the content's raw data if path resolves - to a content. - - Returns: - Information on the directory pointed to by that revision. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an - ancestor of sha1_git_root or the path referenced does not exist. - - """ - limit = int(request.args.get('limit', '100')) - - if ts: - ts = utils.parse_timestamp(ts) - - return _revision_directory_by( - { - 'origin_id': origin_id, - 'branch_name': branch_name, - 'ts': ts, - 'sha1_git': sha1_git - }, - path, - request.path, - limit=limit, with_data=with_data) - - -@app.route('/api/1/revision' - '/origin/') @app.route('/api/1/revision' '/origin//') @app.route('/api/1/revision' '/origin/' '/branch//') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts//') @app.route('/api/1/revision' '/origin/' '/ts//') +@doc.route('/api/1/revision/origin/') +@doc.arg('origin_id', + default=1, + argtype=doc.argtypes.int, + argdoc="The queried revision's origin identifier in SWH") +@doc.arg('branch_name', + default='refs/heads/master', + argtype=doc.argtypes.path, + argdoc="""The optional branch for the given origin (default + to master)""") +@doc.arg('ts', + default='2000-01-17T11:23:54+00:00', + argtype=doc.argtypes.ts, + argdoc="The time at which the queried revision should be constrained") +@doc.raises(exc=doc.excs.notfound, + doc="""Raised if a revision matching given criteria was not found + in SWH""") +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata of the revision identified by the given + criteria""") def api_revision_with_origin(origin_id, branch_name="refs/heads/master", ts=None): - """Instead of having to specify a (root) revision by SHA1_GIT, users - might want to specify a place and a time. In SWH a "place" is an - origin; a "time" is a timestamp at which some place has been - observed by SWH crawlers. - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - - Returns: - Information on the revision if found. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the revision is not found. - + """Display revision information through its identification by + origin/branch/timestamp. """ if ts: ts = utils.parse_timestamp(ts) return _api_lookup( origin_id, service.lookup_revision_by, 'Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts), utils.enrich_revision, branch_name, ts) -@app.route('/api/1/revision/') @app.route('/api/1/revision//') @app.route('/api/1/revision//prev//') +@doc.route('/api/1/revision/') +@doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=doc.argtypes.sha1_git, + argdoc="The revision's sha1_git identifier") +@doc.arg('context', + default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', + argtype=doc.argtypes.path, + argdoc='The navigation breadcrumbs -- use at your own risk') +@doc.raises(exc=doc.excs.badinput, + doc='Raised if sha1_git is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a revision matching sha1_git was not found in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc='The metadata of the revision identified by sha1_git') def api_revision(sha1_git, context=None): """Return information about revision with id sha1_git. - - Args: - sha1_git: the revision's hash. - - Returns: - Information on the revision if found. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the revision is not found. - - Example: - GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e """ def _enrich_revision(revision, context=context): return utils.enrich_revision(revision, context) return _api_lookup( sha1_git, service.lookup_revision, 'Revision with sha1_git %s not found.' % sha1_git, _enrich_revision) @app.route('/api/1/revision//raw/') +@doc.route('/api/1/revision/raw/') +@doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=doc.argtypes.sha1_git, + argdoc="The queried revision's sha1_git identifier") +@doc.raises(exc=doc.excs.badinput, + doc='Raised if sha1_git is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a revision matching sha1_git was not found in SWH') +@doc.returns(rettype=doc.rettypes.octet_stream, + retdoc="""The message of the revision identified by sha1_git + as a downloadable octet stream""") def api_revision_raw_message(sha1_git): - """Return the raw data of the revision's message - - Args: - sha1_git: the revision's hash - - Returns: - The raw revision message, possibly in an illegible - format for humans, decoded in utf-8 by default. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the revision is not found or the revision has no - message - - Example: - GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e/raw/ - + """Return the raw data of the message of revision identified by sha1_git """ raw = service.lookup_revision_message(sha1_git) - return Response(raw['message'], - headers={'Content-disposition': 'attachment;' - 'filename=rev_%s_raw' % sha1_git}, - mimetype='application/octet-stream') + return app.response_class(raw['message'], + headers={'Content-disposition': 'attachment;' + 'filename=rev_%s_raw' % sha1_git}, + mimetype='application/octet-stream') @app.route('/api/1/revision//directory/') @app.route('/api/1/revision//directory//') +@doc.route('/api/1/revision/directory/') +@doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=doc.argtypes.sha1_git, + argdoc="The revision's sha1_git identifier.") +@doc.arg('dir_path', + default='.', + argtype=doc.argtypes.path, + argdoc='The path from the top level directory') +@doc.raises(exc=doc.excs.badinput, + doc='Raised if sha1_git is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc="""Raised if a revision matching sha1_git was not found in SWH + , or if the path specified does not exist""") +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata of the directory pointed by revision id + sha1-git and dir_path""") def api_revision_directory(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). - - Args: - sha1_git: revision's hash. - dir_path: optional directory pointed to by that revision. - with_data: indicate to retrieve the content's raw data if path resolves - to a content - - Returns: - Information on the directory pointed to by that revision. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc either if the revision is not found or the path referenced - does not exist - - Example: - GET /api/1/revision/baf18f9fc50a0b6fef50460a76c33b2ddc57486e/directory/ - """ return _revision_directory_by( { 'sha1_git': sha1_git }, dir_path, request.path, with_data=with_data) -@app.route('/api/1/revision//history//') -def api_revision_history(sha1_git_root, sha1_git): - """Return information about revision sha1_git, limited to the - sub-graph of all transitive parents of sha1_git_root. - - In other words, sha1_git is an ancestor of sha1_git_root. - - Args: - sha1_git_root: latest revision of the browsed history. - sha1_git: one of sha1_git_root's ancestors. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - - Returns: - Information on sha1_git if it is an ancestor of sha1_git_root - including children leading to sha1_git_root. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an - ancestor of sha1_git_root. - - """ - limit = int(request.args.get('limit', '100')) - - if sha1_git == sha1_git_root: - return redirect(url_for('api_revision', - sha1_git=sha1_git, - limit=limit)) - - revision = service.lookup_revision_with_context(sha1_git_root, - sha1_git, - limit) - if not revision: - raise NotFoundExc( - "Possibly sha1_git '%s' is not an ancestor of sha1_git_root '%s'" - % (sha1_git, sha1_git_root)) - - return utils.enrich_revision(revision, context=sha1_git_root) - - -@app.route('/api/1/revision/' - '/history/' - '/directory/') -@app.route('/api/1/revision/' - '/history/' - '/directory//') -def api_revision_history_directory(sha1_git_root, sha1_git, - dir_path=None, with_data=False): - """Return information about directory pointed to by the revision - defined as: revision sha1_git, limited to the sub-graph of all - transitive parents of sha1_git_root. - - Args: - sha1_git_root: latest revision of the browsed history. - sha1_git: one of sha1_git_root's ancestors. - dir_path: optional directory pointed to by that revision. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - with_data: indicate to retrieve the content's raw data if path resolves - to a content. - - Returns: - Information on the directory pointed to by that revision. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an - ancestor of sha1_git_root or the path referenced does not exist - - """ - limit = int(request.args.get('limit', '100')) - - if sha1_git == sha1_git_root: - return redirect(url_for('api_revision_directory', - sha1_git=sha1_git, - dir_path=dir_path), - code=301) - - return _revision_directory_by( - { - 'sha1_git_root': sha1_git_root, - 'sha1_git': sha1_git - }, - dir_path, - request.path, - limit=limit, with_data=with_data) - - @app.route('/api/1/revision//log/') @app.route('/api/1/revision//prev//log/') +@doc.route('/api/1/revision/log/') +@doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=doc.argtypes.sha1_git, + argdoc='The sha1_git of the revision queried') +@doc.arg('prev_sha1s', + default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', + argtype=doc.argtypes.path, + argdoc='The navigation breadcrumbs -- use at your own risk!') +@doc.raises(exc=doc.excs.badinput, + doc='Raised if sha1_git or prev_sha1s is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a revision matching sha1_git was not found in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The log data starting at the revision identified by + sha1_git, completed with the navigation breadcrumbs, + if any""") def api_revision_log(sha1_git, prev_sha1s=None): """Show all revisions (~git log) starting from sha1_git. - The first element returned is the given sha1_git. - - Args: - sha1_git: the revision's hash. - prev_sha1s: the navigation breadcrumb - limit: optional query parameter to limit the revisions log - (default to 100). - - Returns: - Information on the revision if found, complemented with the revision's - children if we have navigation breadcrumbs for them. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the revision is not found. + The first element returned is the given sha1_git, or the first + breadcrumb, if any. """ - limit = int(request.args.get('limit', '100')) + limit = app.config['conf']['max_log_revs'] + + response = {'revisions': None, 'next_revs_url': None} + revisions = None + next_revs_url = None - def lookup_revision_log_with_limit(s, limit=limit): + def lookup_revision_log_with_limit(s, limit=limit+1): return service.lookup_revision_log(s, limit) error_msg = 'Revision with sha1_git %s not found.' % sha1_git - rev_backward = _api_lookup(sha1_git, - lookup_fn=lookup_revision_log_with_limit, - error_msg_if_not_found=error_msg, - enrich_fn=utils.enrich_revision) + rev_get = _api_lookup(sha1_git, + lookup_fn=lookup_revision_log_with_limit, + error_msg_if_not_found=error_msg, + enrich_fn=utils.enrich_revision) + + if len(rev_get) == limit+1: + rev_backward = rev_get[:-1] + next_revs_url = url_for('api_revision_log', + sha1_git=rev_get[-1]['id']) + else: + rev_backward = rev_get if not prev_sha1s: # no nav breadcrumbs, so we're done - return rev_backward + revisions = rev_backward - rev_forward_ids = prev_sha1s.split('/') - rev_forward = _api_lookup(rev_forward_ids, - lookup_fn=service.lookup_revision_multiple, - error_msg_if_not_found=error_msg, - enrich_fn=utils.enrich_revision) - return rev_forward + rev_backward + else: + rev_forward_ids = prev_sha1s.split('/') + rev_forward = _api_lookup(rev_forward_ids, + lookup_fn=service.lookup_revision_multiple, + error_msg_if_not_found=error_msg, + enrich_fn=utils.enrich_revision) + revisions = rev_forward + rev_backward + + response['revisions'] = revisions + response['next_revs_url'] = next_revs_url + + return response -@app.route('/api/1/revision' - '/origin/log/') @app.route('/api/1/revision' '/origin//log/') @app.route('/api/1/revision' '/origin/' '/branch//log/') @app.route('/api/1/revision' '/origin/' '/branch/' '/ts//log/') @app.route('/api/1/revision' '/origin/' '/ts//log/') +@doc.route('/api/1/revision/origin/log/') +@doc.arg('origin_id', + default=1, + argtype=doc.argtypes.int, + argdoc="The revision's SWH origin identifier") +@doc.arg('branch_name', + default='refs/heads/master', + argtype=doc.argtypes.path, + argdoc="The revision's branch name within the origin specified") +@doc.arg('ts', + default='2000-01-17T11:23:54+00:00', + argtype=doc.argtypes.ts, + argdoc="""A time or timestamp string to parse""") +@doc.raises(exc=doc.excs.notfound, + doc="""Raised if a revision matching the given criteria was not + found in SWH""") +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata of the revision log starting at the revision + matching the given criteria.""") def api_revision_log_by(origin_id, branch_name='refs/heads/master', ts=None): """Show all revisions (~git log) starting from the revision - described by its origin_id, optional branch name and timestamp. - The first element returned is the described revision. - - Args: - origin_id: the revision's origin. - branch_name: the branch of the revision (optional, defaults to - master - ts: the requested timeframe near which the revision was created. - limit: optional query parameter to limit the revisions log - (default to 100). - - Returns: - Information on the revision log if found. + described by its origin_id, optional branch name and timestamp. + The first element returned is the described revision. - Raises: - NotFoundExc if the revision is not found. """ + limit = app.config['conf']['max_log_revs'] + response = {'revisions': None, 'next_revs_url': None} + next_revs_url = None + if ts: ts = utils.parse_timestamp(ts) + def lookup_revision_log_by_with_limit(o_id, br, ts, limit=limit+1): + return service.lookup_revision_log_by(o_id, br, ts, limit) + error_msg = 'No revision matching origin %s ' % origin_id error_msg += ', branch name %s' % branch_name error_msg += (' and time stamp %s.' % ts) if ts else '.' - return _api_lookup( - origin_id, - service.lookup_revision_log_by, - error_msg, - utils.enrich_revision, - branch_name, - ts) + + rev_get = _api_lookup(origin_id, + lookup_revision_log_by_with_limit, + error_msg, + utils.enrich_revision, + branch_name, + ts) + if len(rev_get) == limit+1: + revisions = rev_get[:-1] + next_revs_url = url_for('api_revision_log', + sha1_git=rev_get[-1]['id']) + else: + revisions = rev_get + response['revisions'] = revisions + response['next_revs_url'] = next_revs_url + + return response -@app.route('/api/1/directory/') @app.route('/api/1/directory//') @app.route('/api/1/directory///') +@doc.route('/api/1/directory/') +@doc.arg('sha1_git', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=doc.argtypes.sha1_git, + argdoc="The queried directory's corresponding sha1_git hash") +@doc.arg('path', + default='.', + argtype=doc.argtypes.path, + argdoc="A path relative to the queried directory's top level") +@doc.raises(exc=doc.excs.badinput, + doc='Raised if sha1_git is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a directory matching sha1_git was not found in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata and contents of the release identified by + sha1_git""") def api_directory(sha1_git, path=None): """Return information about release with id sha1_git. - Args: - sha1_git: Directory's sha1_git. If path exists: starting directory for - relative navigation. - path: The path to the queried directory - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the content is not found. - - Example: - GET /api/1/directory/8d7dc91d18546a91564606c3e3695a5ab568d179 - GET /api/1/directory/8d7dc91d18546a91564606c3e3695a5ab568d179/path/dir/ - """ if path: error_msg_path = ('Entry with path %s relative to directory ' 'with sha1_git %s not found.') % (path, sha1_git) return _api_lookup( sha1_git, service.lookup_directory_with_path, error_msg_path, utils.enrich_directory, path) else: error_msg_nopath = 'Directory with sha1_git %s not found.' % sha1_git return _api_lookup( sha1_git, service.lookup_directory, error_msg_nopath, utils.enrich_directory) # @app.route('/api/1/browse/') # @app.route('/api/1/browse//') def api_content_checksum_to_origin(q): """Return content information up to one of its origin if the content is found. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256). Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the content is not found. Example: GET /api/1/browse/sha1_git:88b9b366facda0b5ff8d8640ee9279bed346f242 """ found = service.lookup_hash(q)['found'] if not found: raise NotFoundExc('Content with %s not found.' % q) return service.lookup_hash_origin(q) @app.route('/api/1/content//raw/') +@doc.route('/api/1/content/raw/') +@doc.arg('q', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=doc.argtypes.algo_and_hash, + argdoc="""An algo_hash:hash string, where algo_hash is one of sha1, + sha1_git or sha256 and hash is the hash to search for in SWH. Defaults + to sha1 in the case of a missing algo_hash + """) +@doc.raises(exc=doc.excs.badinput, + doc='Raised if q is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a content matching q was not found in SWH') +@doc.returns(rettype=doc.rettypes.octet_stream, + retdoc='The raw content data as an octet stream') def api_content_raw(q): """Return content's raw data if content is found. - Args: - q is of the form (algo_hash:)hash with algo_hash in - (sha1, sha1_git, sha256). - When algo_hash is not provided, 'hash' is considered sha1. - - Returns: - Content's raw data in application/octet-stream. - - Raises: - - BadInputExc in case of unknown algo_hash or bad hash - - NotFoundExc if the content is not found. - """ def generate(content): yield content['data'] content = service.lookup_content_raw(q) if not content: raise NotFoundExc('Content with %s not found.' % q) - return Response(generate(content), mimetype='application/octet-stream') + return app.response_class(generate(content), + headers={'Content-disposition': 'attachment;' + 'filename=content_%s_raw' % q}, + mimetype='application/octet-stream') -@app.route('/api/1/content/') @app.route('/api/1/content//') +@doc.route('/api/1/content/') +@doc.arg('q', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=doc.argtypes.algo_and_hash, + argdoc="""An algo_hash:hash string, where algo_hash is one of sha1, + sha1_git or sha256 and hash is the hash to search for in SWH. Defaults + to sha1 in the case of a missing algo_hash + """) +@doc.raises(exc=doc.excs.badinput, + doc='Raised if q is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if a content matching q was not found in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc="""The metadata of the content identified by q. If content + decoding was successful, it also returns the data""") def api_content_metadata(q): """Return content information if content is found. - Args: - q is of the form (algo_hash:)hash with algo_hash in - (sha1, sha1_git, sha256). - When algo_hash is not provided, 'hash' is considered sha1. - - Returns: - Content's information. - - Raises: - - BadInputExc in case of unknown algo_hash or bad hash. - - NotFoundExc if the content is not found. - - Example: - GET /api/1/content/sha256:e2c76e40866bb6b28916387bdfc8649beceb - 523015738ec6d4d540c7fe65232b - """ return _api_lookup( q, lookup_fn=service.lookup_content, error_msg_if_not_found='Content with %s not found.' % q, enrich_fn=utils.enrich_content) -@app.route('/api/1/entity/') @app.route('/api/1/entity//') +@doc.route('/api/1/entity/') +@doc.arg('uuid', + default='5f4d4c51-498a-4e28-88b3-b3e4e8396cba', + argtype=doc.argtypes.uuid, + argdoc="The entity's uuid identifier") +@doc.raises(exc=doc.excs.badinput, + doc='Raised if uuid is not well formed') +@doc.raises(exc=doc.excs.notfound, + doc='Raised if an entity matching uuid was not found in SWH') +@doc.returns(rettype=doc.rettypes.dict, + retdoc='The metadata of the entity identified by uuid') def api_entity_by_uuid(uuid): """Return content information if content is found. - Args: - q is of the form (algo_hash:)hash with algo_hash in - (sha1, sha1_git, sha256). - When algo_hash is not provided, 'hash' is considered sha1. - - Returns: - Content's information. - - Raises: - - BadInputExc in case of unknown algo_hash or bad hash. - - NotFoundExc if the content is not found. - - Example: - - GET /api/1/entity/5f4d4c51-498a-4e28-88b3-b3e4e8396cba/ - - GET /api/1/entity/7c33636b-8f11-4bda-89d9-ba8b76a42cec/ - """ return _api_lookup( uuid, lookup_fn=service.lookup_entity_by_uuid, error_msg_if_not_found="Entity with uuid '%s' not found." % uuid, enrich_fn=utils.enrich_entity) diff --git a/swh/web/ui/views/browse.py b/swh/web/ui/views/browse.py index b4466ff0..fc52e88b 100644 --- a/swh/web/ui/views/browse.py +++ b/swh/web/ui/views/browse.py @@ -1,853 +1,844 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from encodings.aliases import aliases from flask import render_template, request, url_for, redirect -from flask.ext.api.decorators import set_renderers -from flask.ext.api.renderers import HTMLRenderer - from swh.core.hashutil import ALGORITHMS -from .. import service, utils +from .. import service, utils, apidoc from ..exc import BadInputExc, NotFoundExc from ..main import app from . import api hash_filter_keys = ALGORITHMS -@app.route('/search/', methods=['GET', 'POST']) -@set_renderers(HTMLRenderer) +@app.route('/api/1/doc/') +def api_doc(): + """Render the API's documentation. + """ + routes = apidoc.APIUrls.get_app_endpoints() + # Return a list of routes with consistent ordering + env = { + 'doc_routes': sorted(routes.items()) + } + return render_template('api.html', **env) + + +@app.route('/content/search/', methods=['GET', 'POST']) def search(): """Search for hashes in swh-storage. One form to submit either: - hash query to look up in swh storage - file hashes calculated client-side to be queried in swh storage - both Returns: dict representing data to look for in swh storage. The following keys are returned: - search_stats: {'nbfiles': X, 'pct': Y} the number of total queried files and percentage of files not in storage respectively - responses: array of {'filename': X, 'sha1': Y, 'found': Z} - messages: General messages. TODO: Batch-process with all checksums, not just sha1 """ - env = {'search_res': None, 'search_stats': None, 'message': []} search_stats = {'nbfiles': 0, 'pct': 0} search_res = None message = '' # Get with a single hash request if request.method == 'GET': data = request.args q = data.get('q') if q: try: search = api.api_search(q) search_res = search['search_res'] search_stats = search['search_stats'] except BadInputExc as e: message = str(e) # Post form submission with many hash requests elif request.method == 'POST': try: search = api.api_search(None) search_res = search['search_res'] search_stats = search['search_stats'] except BadInputExc as e: message = str(e) env['search_stats'] = search_stats env['search_res'] = search_res env['message'] = message return render_template('search.html', **env) -@app.route('/browse/content/') @app.route('/browse/content//') -@set_renderers(HTMLRenderer) def browse_content(q): """Given a hash and a checksum, display the content's meta-data. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256) Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if the content is not found. """ env = {'q': q, 'message': None, 'content': None} encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('content.html', **env) try: content = api.api_content_metadata(q) content_raw = service.lookup_content_raw(q) if content_raw: content['data'] = content_raw['data'] env['content'] = utils.prepare_data_for_view(content, encoding=encoding) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('content.html', **env) @app.route('/browse/content//raw/') def browse_content_raw(q): """Given a hash and a checksum, display the content's raw data. Args: q is of the form algo_hash:hash with algo_hash in (sha1, sha1_git, sha256) Returns: Information on one possible origin for such content. Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if the content is not found. """ return redirect(url_for('api_content_raw', q=q)) def _origin_seen(q, data): """Given an origin, compute a message string with the right information. Args: origin: a dictionary with keys: - origin: a dictionary with type and url keys - occurrence: a dictionary with a validity range Returns: Message as a string """ origin_type = data['origin_type'] origin_url = data['origin_url'] revision = data['revision'] branch = data['branch'] path = data['path'] return """The content with hash %s has been seen on origin with type '%s' at url '%s'. The revision was identified at '%s' on branch '%s'. The file's path referenced was '%s'.""" % (q, origin_type, origin_url, revision, branch, path) # @app.route('/browse/content//origin/') -@set_renderers(HTMLRenderer) def browse_content_with_origin(q): """Show content information. Args: - q: query string of the form with `algo_hash` in sha1, sha1_git, sha256. This means that several different URLs (at least one per HASH_ALGO) will point to the same content sha: the sha with 'hash' format Returns: The content's information at for a given checksum. """ env = {'q': q} try: origin = api.api_content_checksum_to_origin(q) message = _origin_seen(q, origin) except (NotFoundExc, BadInputExc) as e: message = str(e) env['message'] = message return render_template('content-with-origin.html', **env) -@app.route('/browse/directory/') @app.route('/browse/directory//') @app.route('/browse/directory///') -@set_renderers(HTMLRenderer) def browse_directory(sha1_git, path=None): """Show directory information. Args: - sha1_git: the directory's sha1 git identifier. If path is set, the base directory for the relative path to the entry - path: the path to the requested entry, relative to the directory pointed by sha1_git Returns: The content's information at sha1_git, or at sha1_git/path if path is set. """ env = {'sha1_git': sha1_git, 'files': []} try: if path: env['message'] = ('Listing for directory with path %s from %s:' % (path, sha1_git)) dir_or_file = service.lookup_directory_with_path( sha1_git, path) if dir_or_file['type'] == 'file': fsha = 'sha256:%s' % dir_or_file['sha256'] content = api.api_content_metadata(fsha) content_raw = service.lookup_content_raw(fsha) if content_raw: # FIXME: currently assuming utf8 encoding content['data'] = content_raw['data'] env['content'] = utils.prepare_data_for_view( content, encoding='utf-8') return render_template('content.html', **env) else: directory_files = api.api_directory(dir_or_file['target']) env['files'] = utils.prepare_data_for_view(directory_files) else: env['message'] = "Listing for directory %s:" % sha1_git directory_files = api.api_directory(sha1_git) env['files'] = utils.prepare_data_for_view(directory_files) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('directory.html', **env) -@app.route('/browse/origin/') +@app.route('/browse/origin//url//') @app.route('/browse/origin//') -@set_renderers(HTMLRenderer) -def browse_origin(origin_id): - """Browse origin with id id. +def browse_origin(origin_id=None, origin_type=None, origin_url=None): + """Browse origin matching given criteria - either origin_id or + origin_type and origin_path. + Args: + - origin_id: origin's swh identifier + - origin_type: origin's type + - origin_url: origin's URL """ - - browse_url = url_for('browse_revision_with_origin', origin_id=origin_id) - visit_url = url_for('api_origin_visits', origin_id=origin_id) - - env = {'browse_url': browse_url, - 'visit_url': visit_url, - 'origin_id': origin_id, + # URLs for the calendar JS plugin + env = {'browse_url': None, + 'visit_url': None, 'origin': None} try: - env['origin'] = api.api_origin(origin_id) + origin = api.api_origin(origin_id, origin_type, origin_url) + env['origin'] = origin + env['browse_url'] = url_for('browse_revision_with_origin', + origin_id=origin['id']) + env['visit_url'] = url_for('api_origin_visits', + origin_id=origin['id']) + except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('origin.html', **env) -@app.route('/browse/person/') @app.route('/browse/person//') -@set_renderers(HTMLRenderer) def browse_person(person_id): """Browse person with id id. """ env = {'person_id': person_id, 'person': None, 'message': None} try: env['person'] = api.api_person(person_id) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('person.html', **env) -@app.route('/browse/release/') @app.route('/browse/release//') -@set_renderers(HTMLRenderer) def browse_release(sha1_git): """Browse release with sha1_git. """ env = {'sha1_git': sha1_git, 'message': None, 'release': None} try: rel = api.api_release(sha1_git) env['release'] = utils.prepare_data_for_view(rel) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('release.html', **env) -@app.route('/browse/revision/') @app.route('/browse/revision//') @app.route('/browse/revision//prev//') -@set_renderers(HTMLRenderer) def browse_revision(sha1_git, prev_sha1s=None): """Browse the revision with git SHA1 sha1_git_cur, while optionally keeping the context from which we came as a list of previous (i.e. later) revisions' sha1s. Args: sha1_git: the requested revision's sha1_git. prev_sha1s: an optional string of /-separated sha1s representing our context, ordered by descending revision date. Returns: Information about revision of git SHA1 sha1_git_cur, with relevant URLS pointing to the context augmented with sha1_git_cur. Example: GET /browse/revision/ """ - env = {'sha1_git': sha1_git, 'message': None, 'revision': None} try: rev = api.api_revision(sha1_git, prev_sha1s) env['revision'] = utils.prepare_data_for_view(rev) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) - return render_template('revision.html', **env) @app.route('/browse/revision//raw/') def browse_revision_raw_message(sha1_git): """Given a sha1_git, display the corresponding revision's raw message. """ return redirect(url_for('api_revision_raw_message', sha1_git=sha1_git)) @app.route('/browse/revision//log/') @app.route('/browse/revision//prev//log/') -@set_renderers(HTMLRenderer) def browse_revision_log(sha1_git, prev_sha1s=None): """Browse revision with sha1_git's log. If the navigation path through the commit tree is specified, we intersect the earliest revision's log with the revisions the user browsed through - ie the path taken to the specified revision. Args: sha1_git: the current revision's SHA1_git checksum prev_sha1s: optionally, the path through which we want log information """ env = {'sha1_git': sha1_git, 'sha1_url': '/browse/revision/%s/' % sha1_git, 'message': None, 'revisions': []} try: - revisions = api.api_revision_log(sha1_git, prev_sha1s) + revision_data = api.api_revision_log(sha1_git, prev_sha1s) + revisions = revision_data['revisions'] + next_revs_url = revision_data['next_revs_url'] env['revisions'] = map(utils.prepare_data_for_view, revisions) + env['next_revs_url'] = utils.prepare_data_for_view(next_revs_url) + except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision-log.html', **env) -@app.route('/browse/revision' - '/origin/log/') @app.route('/browse/revision' '/origin//log/') @app.route('/browse/revision' '/origin/' '/branch//log/') @app.route('/browse/revision' '/origin/' '/branch/' '/ts//log/') @app.route('/browse/revision' '/origin/' '/ts//log/') -@set_renderers(HTMLRenderer) def browse_revision_log_by(origin_id, branch_name='refs/heads/master', timestamp=None): """Browse the revision described by origin, branch name and timestamp's log Args: origin_id: the revision's origin branch_name: the revision's branch timestamp: the requested timeframe for the revision Returns: The revision log of the described revision as a list of revisions if it is found. """ env = {'sha1_git': None, 'origin_id': origin_id, 'origin_url': '/browse/origin/%d/' % origin_id, 'branch_name': branch_name, 'timestamp': timestamp, 'message': None, 'revisions': []} try: revisions = api.api_revision_log_by( origin_id, branch_name, timestamp) env['revisions'] = map(utils.prepare_data_for_view, revisions) except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision-log.html', **env) @app.route('/browse/revision//prev//') -@set_renderers(HTMLRenderer) def browse_with_rev_context(sha1_git_cur, sha1s): """Browse the revision with git SHA1 sha1_git_cur, while keeping the context from which we came as a list of previous (i.e. later) revisions' sha1s. Args: sha1_git_cur: the requested revision's sha1_git. sha1s: a string of /-separated sha1s representing our context, ordered by descending revision date. Returns: Information about revision of git SHA1 sha1_git_cur, with relevant URLS pointing to the context augmented with sha1_git_cur. Example: GET /browse/revision/ """ env = {'sha1_git': sha1_git_cur, 'message': None, 'revision': None} try: revision = api.api_revision( sha1_git_cur, sha1s) env['revision'] = utils.prepare_data_for_view(revision) except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision//history//') -@set_renderers(HTMLRenderer) def browse_revision_history(sha1_git_root, sha1_git): """Display information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. """ env = {'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git, 'message': None, 'keys': [], 'revision': None} if sha1_git == sha1_git_root: return redirect(url_for('browse_revision', sha1_git=sha1_git)) try: revision = api.api_revision_history(sha1_git_root, sha1_git) env['revision'] = utils.prepare_data_for_view(revision) except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision//directory/') @app.route('/browse/revision//directory//') -@set_renderers(HTMLRenderer) def browse_revision_directory(sha1_git, path=None): """Browse directory from revision with sha1_git. """ env = { 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) try: result = api.api_revision_directory(sha1_git, path, with_data=True) result['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['revision'] = result['revision'] env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision/' '/history/' '/directory/') @app.route('/browse/revision/' '/history/' '/directory//') -@set_renderers(HTMLRenderer) def browse_revision_history_directory(sha1_git_root, sha1_git, path=None): """Return information about directory pointed to by the revision defined as: revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. Args: sha1_git_root: latest revision of the browsed history. sha1_git: one of sha1_git_root's ancestors. path: optional directory pointed to by that revision. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root or the path referenced does not exist """ env = { 'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) if sha1_git == sha1_git_root: return redirect(url_for('browse_revision_directory', sha1_git=sha1_git, path=path, encoding=encoding), code=301) try: result = api.api_revision_history_directory(sha1_git_root, sha1_git, path, with_data=True) env['revision'] = result['revision'] env['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision' '/origin/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/ts/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/ts/' '/history/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history/' '/directory//') -@set_renderers(HTMLRenderer) def browse_directory_through_revision_with_origin_history( origin_id, branch_name="refs/heads/master", ts=None, sha1_git=None, path=None): env = { 'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts, 'sha1_git': sha1_git, 'path': '.' if not path else path, 'message': None, 'result': None } encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = (('Encoding %s not supported.' 'Supported Encodings: %s') % ( encoding, list(aliases.keys()))) return render_template('revision-directory.html', **env) try: result = api.api_directory_through_revision_with_origin_history( origin_id, branch_name, ts, sha1_git, path, with_data=True) env['revision'] = result['revision'] env['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['result'] = result except (BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/revision' '/origin/') @app.route('/browse/revision' '/origin//') @app.route('/browse/revision' '/origin/' '/branch//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts//') @app.route('/browse/revision' '/origin/' '/ts//') -@set_renderers(HTMLRenderer) def browse_revision_with_origin(origin_id, branch_name="refs/heads/master", ts=None): """Instead of having to specify a (root) revision by SHA1_GIT, users might want to specify a place and a time. In SWH a "place" is an origin; a "time" is a timestamp at which some place has been observed by SWH crawlers. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). Returns: Information on the revision if found. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc if the revision is not found. """ env = {'message': None, 'revision': None} try: revision = api.api_revision_with_origin(origin_id, branch_name, ts) env['revision'] = utils.prepare_data_for_view(revision) except (ValueError, NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision' '/origin/' '/history//') @app.route('/browse/revision' '/origin/' '/branch/' '/history//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/history//') -@set_renderers(HTMLRenderer) def browse_revision_history_through_origin(origin_id, branch_name='refs/heads/master', ts=None, sha1_git=None): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of the revision root identified by (origin_id, branch_name, ts). Given sha1_git_root such root revision's identifier, in other words, sha1_git is an ancestor of sha1_git_root. Args: origin_id: origin's identifier (default to 1). branch_name: the optional branch for the given origin (default to master). timestamp: optional timestamp (default to the nearest time crawl of timestamp). sha1_git: one of sha1_git_root's ancestors. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of sha1_git_root (even if it is). Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root. """ env = {'message': None, 'revision': None} try: revision = api.api_revision_history_through_origin( origin_id, branch_name, ts, sha1_git) env['revision'] = utils.prepare_data_for_view(revision) except (ValueError, BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision.html', **env) @app.route('/browse/revision' '/origin/' '/directory/') @app.route('/browse/revision' '/origin/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/directory//') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/directory/') @app.route('/browse/revision' '/origin/' '/branch/' '/ts/' '/directory//') -@set_renderers(HTMLRenderer) def browse_revision_directory_through_origin(origin_id, branch_name='refs/heads/master', ts=None, path=None): env = {'message': None, 'origin_id': origin_id, 'ts': ts, 'path': '.' if not path else path, 'result': None} encoding = request.args.get('encoding', 'utf8') if encoding not in aliases: env['message'] = 'Encoding %s not supported.' \ 'Supported Encodings: %s' % ( encoding, list(aliases.keys())) return render_template('revision-directory.html', **env) try: result = api.api_directory_through_revision_origin( origin_id, branch_name, ts, path, with_data=True) result['content'] = utils.prepare_data_for_view(result['content'], encoding=encoding) env['revision'] = result['revision'] env['result'] = result except (ValueError, BadInputExc, NotFoundExc) as e: env['message'] = str(e) return render_template('revision-directory.html', **env) @app.route('/browse/entity/') @app.route('/browse/entity//') -@set_renderers(HTMLRenderer) def browse_entity(uuid): env = {'entities': [], 'message': None} try: entities = api.api_entity_by_uuid(uuid) env['entities'] = entities except (NotFoundExc, BadInputExc) as e: env['message'] = str(e) return render_template('entity.html', **env) diff --git a/swh/web/ui/views/errorhandler.py b/swh/web/ui/views/errorhandler.py index 6d11d6de..da279f16 100644 --- a/swh/web/ui/views/errorhandler.py +++ b/swh/web/ui/views/errorhandler.py @@ -1,37 +1,35 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.storage.exc import StorageDBError, StorageAPIError from .. import renderers from ..exc import NotFoundExc from ..main import app @app.errorhandler(ValueError) def value_error_as_bad_request(error): """Compute a bad request and add body as payload. """ - return renderers.error_response('Bad request', 400, error) + return renderers.error_response(400, error) @app.errorhandler(NotFoundExc) def value_not_found(error): """Compute a not found and add body as payload. """ - return renderers.error_response('Not found', 404, error) + return renderers.error_response(404, error) @app.errorhandler(StorageDBError) @app.errorhandler(StorageAPIError) def backend_problem(error): """Compute a not found and add body as payload. """ - return renderers.error_response('Unexpected problem in SWH Storage.', - 503, - error) + return renderers.error_response(503, error) diff --git a/swh/web/ui/views/main.py b/swh/web/ui/views/main.py index 8d8e0c36..7152e313 100644 --- a/swh/web/ui/views/main.py +++ b/swh/web/ui/views/main.py @@ -1,27 +1,23 @@ # Copyright (C) 2016 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import flask from ..main import app -from flask.ext.api.decorators import set_renderers -from flask.ext.api.renderers import HTMLRenderer @app.route('/') -@set_renderers(HTMLRenderer) def homepage(): """Home page """ flask.flash('This Web app is still work in progress, use at your own risk', 'warning') return flask.render_template('home.html') @app.route('/about/') -@set_renderers(HTMLRenderer) def about(): return flask.render_template('about.html') diff --git a/version.txt b/version.txt index eb3c744e..5af212a9 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v0.0.32-0-g2137c95 \ No newline at end of file +v0.0.33-0-gb614d3b \ No newline at end of file